mirror of
https://github.com/NixOS/nix
synced 2025-07-06 21:41:48 +02:00
Merge remote-tracking branch 'origin/master' into nix-copy-gc
This commit is contained in:
commit
d002324f1b
298 changed files with 1709 additions and 5686 deletions
|
@ -41,7 +41,7 @@ INPUT = \
|
|||
@src@/src/libutil-c \
|
||||
@src@/src/libexpr-c \
|
||||
@src@/src/libstore-c \
|
||||
@src@/doc/external-api/README.md
|
||||
@src@/src/external-api-docs/README.md
|
||||
|
||||
FILE_PATTERNS = nix_api_*.h *.md
|
||||
|
||||
|
@ -55,6 +55,8 @@ EXCLUDE_PATTERNS = *_internal.h
|
|||
GENERATE_TREEVIEW = YES
|
||||
OPTIMIZE_OUTPUT_FOR_C = YES
|
||||
|
||||
USE_MDFILE_AS_MAINPAGE = doc/external-api/README.md
|
||||
USE_MDFILE_AS_MAINPAGE = @src@/src/external-api-docs/README.md
|
||||
|
||||
WARN_IF_UNDOCUMENTED = NO
|
||||
WARN_IF_INCOMPLETE_DOC = NO
|
||||
QUIET = YES
|
||||
|
|
|
@ -43,8 +43,8 @@ INPUT = \
|
|||
@src@/libexpr/flake \
|
||||
@src@/libexpr-tests \
|
||||
@src@/libexpr-tests/value \
|
||||
@src@/libexpr-test-support/test \
|
||||
@src@/libexpr-test-support/test/value \
|
||||
@src@/libexpr-test-support/tests \
|
||||
@src@/libexpr-test-support/tests/value \
|
||||
@src@/libexpr/value \
|
||||
@src@/libfetchers \
|
||||
@src@/libmain \
|
||||
|
@ -52,10 +52,11 @@ INPUT = \
|
|||
@src@/libstore/build \
|
||||
@src@/libstore/builtins \
|
||||
@src@/libstore-tests \
|
||||
@src@/libstore-test-support/test \
|
||||
@src@/libstore-test-support/tests \
|
||||
@src@/libutil \
|
||||
@src@/libutil/args \
|
||||
@src@/libutil-tests \
|
||||
@src@/libutil-test-support/test \
|
||||
@src@/libutil-test-support/tests \
|
||||
@src@/nix \
|
||||
@src@/nix-env \
|
||||
@src@/nix-store
|
||||
|
@ -83,7 +84,9 @@ EXPAND_ONLY_PREDEF = YES
|
|||
# RECURSIVE has no effect here.
|
||||
# This tag requires that the tag SEARCH_INCLUDES is set to YES.
|
||||
|
||||
INCLUDE_PATH =
|
||||
INCLUDE_PATH = \
|
||||
@BUILD_ROOT@/src/libexpr/libnixexpr.so.p \
|
||||
@BUILD_ROOT@/src/nix/nix.p \
|
||||
|
||||
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
|
||||
# tag can be used to specify a list of macro names that should be expanded. The
|
||||
|
@ -96,7 +99,18 @@ EXPAND_AS_DEFINED = \
|
|||
DECLARE_COMMON_SERIALISER \
|
||||
DECLARE_WORKER_SERIALISER \
|
||||
DECLARE_SERVE_SERIALISER \
|
||||
LENGTH_PREFIXED_PROTO_HELPER
|
||||
LENGTH_PREFIXED_PROTO_HELPER \
|
||||
LENGTH_PREFIXED_PROTO_HELPER_X \
|
||||
WORKER_USE_LENGTH_PREFIX_SERIALISER \
|
||||
WORKER_USE_LENGTH_PREFIX_SERIALISER_COMMA \
|
||||
SERVE_USE_LENGTH_PREFIX_SERIALISER \
|
||||
SERVE_USE_LENGTH_PREFIX_SERIALISER_COMMA \
|
||||
COMMON_METHODS \
|
||||
JSON_IMPL \
|
||||
MakeBinOp
|
||||
|
||||
PREDEFINED = DOXYGEN_SKIP
|
||||
|
||||
WARN_IF_UNDOCUMENTED = NO
|
||||
WARN_IF_INCOMPLETE_DOC = NO
|
||||
QUIET = YES
|
||||
|
|
|
@ -12,6 +12,7 @@ doxygen_cfg = configure_file(
|
|||
configuration : {
|
||||
'PROJECT_NUMBER': meson.project_version(),
|
||||
'OUTPUT_DIRECTORY' : meson.current_build_dir(),
|
||||
'BUILD_ROOT' : meson.build_root(),
|
||||
'src' : fs.parent(fs.parent(meson.project_source_root())) / 'src',
|
||||
},
|
||||
)
|
||||
|
|
|
@ -29,13 +29,13 @@ EvalSettings evalSettings {
|
|||
{
|
||||
{
|
||||
"flake",
|
||||
[](ref<Store> store, std::string_view rest) {
|
||||
[](EvalState & state, std::string_view rest) {
|
||||
experimentalFeatureSettings.require(Xp::Flakes);
|
||||
// FIXME `parseFlakeRef` should take a `std::string_view`.
|
||||
auto flakeRef = parseFlakeRef(fetchSettings, std::string { rest }, {}, true, false);
|
||||
debug("fetching flake search path element '%s''", rest);
|
||||
auto storePath = flakeRef.resolve(store).fetchTree(store).first;
|
||||
return store->toRealPath(storePath);
|
||||
auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first;
|
||||
return state.rootPath(state.store->toRealPath(storePath));
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -32,16 +32,6 @@ InstallableDerivedPath InstallableDerivedPath::parse(
|
|||
// store path.
|
||||
[&](const ExtendedOutputsSpec::Default &) -> DerivedPath {
|
||||
auto storePath = store->followLinksToStorePath(prefix);
|
||||
// Remove this prior to stabilizing the new CLI.
|
||||
if (storePath.isDerivation()) {
|
||||
auto oldDerivedPath = DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(storePath),
|
||||
.outputs = OutputsSpec::All { },
|
||||
};
|
||||
warn(
|
||||
"The interpretation of store paths arguments ending in `.drv` recently changed. If this command is now failing try again with '%s'",
|
||||
oldDerivedPath.to_string(*store));
|
||||
};
|
||||
return DerivedPath::Opaque {
|
||||
.path = std::move(storePath),
|
||||
};
|
||||
|
|
|
@ -26,7 +26,7 @@ struct ExtraPathInfoFlake : ExtraPathInfoValue
|
|||
Flake flake;
|
||||
|
||||
ExtraPathInfoFlake(Value && v, Flake && f)
|
||||
: ExtraPathInfoValue(std::move(v)), flake(f)
|
||||
: ExtraPathInfoValue(std::move(v)), flake(std::move(f))
|
||||
{ }
|
||||
};
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ struct ExtraPathInfoValue : ExtraPathInfo
|
|||
Value value;
|
||||
|
||||
ExtraPathInfoValue(Value && v)
|
||||
: value(v)
|
||||
: value(std::move(v))
|
||||
{ }
|
||||
|
||||
virtual ~ExtraPathInfoValue() = default;
|
||||
|
|
|
@ -857,6 +857,7 @@ std::vector<FlakeRef> RawInstallablesCommand::getFlakeRefsForCompletion()
|
|||
{
|
||||
applyDefaultInstallables(rawInstallables);
|
||||
std::vector<FlakeRef> res;
|
||||
res.reserve(rawInstallables.size());
|
||||
for (auto i : rawInstallables)
|
||||
res.push_back(parseFlakeRefWithFragment(
|
||||
fetchSettings,
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
libraries += libcmd
|
||||
|
||||
libcmd_NAME = libnixcmd
|
||||
|
||||
libcmd_DIR := $(d)
|
||||
|
||||
libcmd_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libcmd_CXXFLAGS += $(INCLUDE_libutil) $(INCLUDE_libstore) $(INCLUDE_libfetchers) $(INCLUDE_libexpr) $(INCLUDE_libflake) $(INCLUDE_libmain)
|
||||
|
||||
libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) $(THREAD_LDFLAGS)
|
||||
|
||||
libcmd_LIBS = libutil libstore libfetchers libflake libexpr libmain
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-cmd.pc, $(libdir)/pkgconfig, 0644))
|
|
@ -1,9 +0,0 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Package Manager
|
||||
Version: @PACKAGE_VERSION@
|
||||
Libs: -L${libdir} -lnixcmd
|
||||
Cflags: -I${includedir}/nix -std=c++2a
|
|
@ -1,25 +0,0 @@
|
|||
libraries += libexprc
|
||||
|
||||
libexprc_NAME = libnixexprc
|
||||
|
||||
libexprc_DIR := $(d)
|
||||
|
||||
libexprc_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libexprc := -I $(d)
|
||||
libexprc_CXXFLAGS += $(INCLUDE_libutil) $(INCLUDE_libutilc) \
|
||||
$(INCLUDE_libfetchers) \
|
||||
$(INCLUDE_libstore) $(INCLUDE_libstorec) \
|
||||
$(INCLUDE_libexpr) $(INCLUDE_libexprc)
|
||||
|
||||
libexprc_LIBS = libutil libutilc libstore libstorec libfetchers libexpr
|
||||
|
||||
libexprc_LDFLAGS += $(THREAD_LDFLAGS)
|
||||
|
||||
$(eval $(call install-file-in, $(d)/nix-expr-c.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
libexprc_FORCE_INSTALL := 1
|
||||
|
|
@ -75,6 +75,7 @@ headers = [config_h] + files(
|
|||
headers += files('nix_api_expr_internal.h')
|
||||
|
||||
subdir('build-utils-meson/export-all-symbols')
|
||||
subdir('build-utils-meson/windows-version')
|
||||
|
||||
this_library = library(
|
||||
'nixexprc',
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Language Evaluator - C API
|
||||
Version: @PACKAGE_VERSION@
|
||||
Requires: nix-store-c
|
||||
Libs: -L${libdir} -lnixexprc
|
||||
Cflags: -I${includedir}/nix
|
|
@ -67,7 +67,7 @@ nix_err nix_value_call_multi(nix_c_context * context, EvalState * state, nix_val
|
|||
if (context)
|
||||
context->last_err_code = NIX_OK;
|
||||
try {
|
||||
state->state.callFunction(fn->value, nargs, (nix::Value * *)args, value->value, nix::noPos);
|
||||
state->state.callFunction(fn->value, {(nix::Value * *) args, nargs}, value->value, nix::noPos);
|
||||
state->state.forceValue(value->value, nix::noPos);
|
||||
}
|
||||
NIXC_CATCH_ERRS
|
||||
|
|
|
@ -129,7 +129,7 @@ nix_err nix_value_call_multi(
|
|||
* @param[in] state The state of the evaluation.
|
||||
* @param[out] value The result of the function call.
|
||||
* @param[in] fn The Nix function to call.
|
||||
* @param[in] args The arguments to pass to the function.
|
||||
* @param[in] ... The arguments to pass to the function.
|
||||
*
|
||||
* @see nix_value_call_multi
|
||||
*/
|
||||
|
|
|
@ -77,8 +77,7 @@ typedef struct ExternalValue ExternalValue;
|
|||
*/
|
||||
typedef struct nix_realised_string nix_realised_string;
|
||||
|
||||
/** @defgroup primops
|
||||
* @brief Create your own primops
|
||||
/** @defgroup primops Adding primops
|
||||
* @{
|
||||
*/
|
||||
/** @brief Function pointer for primops
|
||||
|
@ -252,7 +251,7 @@ int64_t nix_get_int(nix_c_context * context, const nix_value * value);
|
|||
* @param[in] value Nix value to inspect
|
||||
* @return reference to external, NULL in case of error
|
||||
*/
|
||||
ExternalValue * nix_get_external(nix_c_context * context, nix_value *);
|
||||
ExternalValue * nix_get_external(nix_c_context * context, nix_value * value);
|
||||
|
||||
/** @brief Get the ix'th element of a list
|
||||
*
|
||||
|
@ -423,7 +422,7 @@ nix_list_builder_insert(nix_c_context * context, ListBuilder * list_builder, uns
|
|||
/** @brief Free a list builder
|
||||
*
|
||||
* Does not fail.
|
||||
* @param[in] builder the builder to free
|
||||
* @param[in] list_builder The builder to free.
|
||||
*/
|
||||
void nix_list_builder_free(ListBuilder * list_builder);
|
||||
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
libraries += libexpr-test-support
|
||||
|
||||
libexpr-test-support_NAME = libnixexpr-test-support
|
||||
|
||||
libexpr-test-support_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libexpr-test-support_INSTALL_DIR := $(checklibdir)
|
||||
else
|
||||
libexpr-test-support_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libexpr-test-support_SOURCES := \
|
||||
$(wildcard $(d)/tests/*.cc) \
|
||||
$(wildcard $(d)/tests/value/*.cc)
|
||||
|
||||
libexpr-test-support_CXXFLAGS += $(libexpr-tests_EXTRA_INCLUDES)
|
||||
|
||||
libexpr-test-support_LIBS = \
|
||||
libstore-test-support libutil-test-support \
|
||||
libexpr libstore libutil
|
||||
|
||||
libexpr-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck
|
|
@ -56,6 +56,7 @@ headers = files(
|
|||
)
|
||||
|
||||
subdir('build-utils-meson/export-all-symbols')
|
||||
subdir('build-utils-meson/windows-version')
|
||||
|
||||
this_library = library(
|
||||
'nix-expr-test-support',
|
||||
|
|
|
@ -40,6 +40,12 @@ namespace nix {
|
|||
return v;
|
||||
}
|
||||
|
||||
Value * maybeThunk(std::string input, bool forceValue = true) {
|
||||
Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root));
|
||||
assert(e);
|
||||
return e->maybeThunk(state, state.baseEnv);
|
||||
}
|
||||
|
||||
Symbol createSymbol(const char * value) {
|
||||
return state.symbols.create(value);
|
||||
}
|
||||
|
|
|
@ -138,4 +138,27 @@ TEST(nix_isAllowedURI, non_scheme_colon) {
|
|||
ASSERT_FALSE(isAllowedURI("https://foo/bar:baz", allowed));
|
||||
}
|
||||
|
||||
} // namespace nix
|
||||
class EvalStateTest : public LibExprTest {};
|
||||
|
||||
TEST_F(EvalStateTest, getBuiltins_ok) {
|
||||
auto evaled = maybeThunk("builtins");
|
||||
auto & builtins = state.getBuiltins();
|
||||
ASSERT_TRUE(builtins.type() == nAttrs);
|
||||
ASSERT_EQ(evaled, &builtins);
|
||||
}
|
||||
|
||||
TEST_F(EvalStateTest, getBuiltin_ok) {
|
||||
auto & builtin = state.getBuiltin("toString");
|
||||
ASSERT_TRUE(builtin.type() == nFunction);
|
||||
// FIXME
|
||||
// auto evaled = maybeThunk("builtins.toString");
|
||||
// ASSERT_EQ(evaled, &builtin);
|
||||
auto & builtin2 = state.getBuiltin("true");
|
||||
ASSERT_EQ(state.forceBool(builtin2, noPos, "in unit test"), true);
|
||||
}
|
||||
|
||||
TEST_F(EvalStateTest, getBuiltin_fail) {
|
||||
ASSERT_THROW(state.getBuiltin("nonexistent"), EvalError);
|
||||
}
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
check: libexpr-tests_RUN
|
||||
|
||||
programs += libexpr-tests
|
||||
|
||||
libexpr-tests_NAME := libnixexpr-tests
|
||||
|
||||
libexpr-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data GTEST_OUTPUT=xml:$$testresults/libexpr-tests.xml
|
||||
|
||||
libexpr-tests_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libexpr-tests_INSTALL_DIR := $(checkbindir)
|
||||
else
|
||||
libexpr-tests_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libexpr-tests_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
$(wildcard $(d)/value/*.cc) \
|
||||
$(wildcard $(d)/flake/*.cc)
|
||||
|
||||
libexpr-tests_EXTRA_INCLUDES = \
|
||||
-I src/libexpr-test-support \
|
||||
-I src/libstore-test-support \
|
||||
-I src/libutil-test-support \
|
||||
$(INCLUDE_libexpr) \
|
||||
$(INCLUDE_libexprc) \
|
||||
$(INCLUDE_libfetchers) \
|
||||
$(INCLUDE_libstore) \
|
||||
$(INCLUDE_libstorec) \
|
||||
$(INCLUDE_libutil) \
|
||||
$(INCLUDE_libutilc)
|
||||
|
||||
libexpr-tests_CXXFLAGS += $(libexpr-tests_EXTRA_INCLUDES)
|
||||
|
||||
libexpr-tests_LIBS = \
|
||||
libexpr-test-support libstore-test-support libutil-test-support \
|
||||
libexpr libexprc libfetchers libstore libstorec libutil libutilc
|
||||
|
||||
libexpr-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) -lgmock
|
||||
|
||||
ifdef HOST_WINDOWS
|
||||
# Increase the default reserved stack size to 65 MB so Nix doesn't run out of space
|
||||
libexpr-tests_LDFLAGS += -Wl,--stack,$(shell echo $$((65 * 1024 * 1024)))
|
||||
endif
|
|
@ -28,6 +28,7 @@ subdir('build-utils-meson/subprojects')
|
|||
subdir('build-utils-meson/threads')
|
||||
|
||||
subdir('build-utils-meson/export-all-symbols')
|
||||
subdir('build-utils-meson/windows-version')
|
||||
|
||||
rapidcheck = dependency('rapidcheck')
|
||||
deps_private += rapidcheck
|
||||
|
|
|
@ -10,6 +10,9 @@ lockFileStr:
|
|||
# unlocked trees.
|
||||
overrides:
|
||||
|
||||
# This is `prim_fetchFinalTree`.
|
||||
fetchTreeFinal:
|
||||
|
||||
let
|
||||
|
||||
lockFile = builtins.fromJSON lockFileStr;
|
||||
|
@ -44,7 +47,8 @@ let
|
|||
overrides.${key}.sourceInfo
|
||||
else
|
||||
# FIXME: remove obsolete node.info.
|
||||
fetchTree (node.info or {} // removeAttrs node.locked ["dir"]);
|
||||
# Note: lock file entries are always final.
|
||||
fetchTreeFinal (node.info or {} // removeAttrs node.locked ["dir"]);
|
||||
|
||||
subdir = overrides.${key}.dir or node.locked.dir or "";
|
||||
|
||||
|
|
|
@ -87,11 +87,15 @@ void EvalState::forceValue(Value & v, const PosIdx pos)
|
|||
{
|
||||
if (v.isThunk()) {
|
||||
Env * env = v.payload.thunk.env;
|
||||
assert(env || v.isBlackhole());
|
||||
Expr * expr = v.payload.thunk.expr;
|
||||
try {
|
||||
v.mkBlackhole();
|
||||
//checkInterrupt();
|
||||
expr->eval(*this, *env, v);
|
||||
if (env) [[likely]]
|
||||
expr->eval(*this, *env, v);
|
||||
else
|
||||
ExprBlackHole::throwInfiniteRecursionError(*this, v);
|
||||
} catch (...) {
|
||||
v.mkThunk(env, expr);
|
||||
tryFixupBlackHolePos(v, pos);
|
||||
|
|
|
@ -3,10 +3,11 @@
|
|||
|
||||
#include "config.hh"
|
||||
#include "ref.hh"
|
||||
#include "source-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
class Store;
|
||||
class EvalState;
|
||||
|
||||
struct EvalSettings : Config
|
||||
{
|
||||
|
@ -18,11 +19,8 @@ struct EvalSettings : Config
|
|||
*
|
||||
* The return value is (a) whether the entry was valid, and, if so,
|
||||
* what does it map to.
|
||||
*
|
||||
* @todo Return (`std::optional` of) `SourceAccssor` or something
|
||||
* more structured instead of mere `std::string`?
|
||||
*/
|
||||
using LookupPathHook = std::optional<std::string>(ref<Store> store, std::string_view);
|
||||
using LookupPathHook = std::optional<SourcePath>(EvalState & state, std::string_view);
|
||||
|
||||
/**
|
||||
* Map from "scheme" to a `LookupPathHook`.
|
||||
|
|
|
@ -448,7 +448,7 @@ void EvalState::addConstant(const std::string & name, Value * v, Constant info)
|
|||
/* Install value the base environment. */
|
||||
staticBaseEnv->vars.emplace_back(symbols.create(name), baseEnvDispl);
|
||||
baseEnv.values[baseEnvDispl++] = v;
|
||||
baseEnv.values[0]->payload.attrs->push_back(Attr(symbols.create(name2), v));
|
||||
getBuiltins().payload.attrs->push_back(Attr(symbols.create(name2), v));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -510,16 +510,32 @@ Value * EvalState::addPrimOp(PrimOp && primOp)
|
|||
|
||||
Value * v = allocValue();
|
||||
v->mkPrimOp(new PrimOp(primOp));
|
||||
staticBaseEnv->vars.emplace_back(envName, baseEnvDispl);
|
||||
baseEnv.values[baseEnvDispl++] = v;
|
||||
baseEnv.values[0]->payload.attrs->push_back(Attr(symbols.create(primOp.name), v));
|
||||
|
||||
if (primOp.internal)
|
||||
internalPrimOps.emplace(primOp.name, v);
|
||||
else {
|
||||
staticBaseEnv->vars.emplace_back(envName, baseEnvDispl);
|
||||
baseEnv.values[baseEnvDispl++] = v;
|
||||
getBuiltins().payload.attrs->push_back(Attr(symbols.create(primOp.name), v));
|
||||
}
|
||||
|
||||
return v;
|
||||
}
|
||||
|
||||
|
||||
Value & EvalState::getBuiltins()
|
||||
{
|
||||
return *baseEnv.values[0];
|
||||
}
|
||||
|
||||
|
||||
Value & EvalState::getBuiltin(const std::string & name)
|
||||
{
|
||||
return *baseEnv.values[0]->attrs()->find(symbols.create(name))->value;
|
||||
auto it = getBuiltins().attrs()->get(symbols.create(name));
|
||||
if (it)
|
||||
return *it->value;
|
||||
else
|
||||
error<EvalError>("builtin '%1%' not found", name).debugThrow();
|
||||
}
|
||||
|
||||
|
||||
|
@ -582,14 +598,14 @@ std::optional<EvalState::Doc> EvalState::getDoc(Value & v)
|
|||
if (isFunctor(v)) {
|
||||
try {
|
||||
Value & functor = *v.attrs()->find(sFunctor)->value;
|
||||
Value * vp = &v;
|
||||
Value * vp[] = {&v};
|
||||
Value partiallyApplied;
|
||||
// The first paramater is not user-provided, and may be
|
||||
// handled by code that is opaque to the user, like lib.const = x: y: y;
|
||||
// So preferably we show docs that are relevant to the
|
||||
// "partially applied" function returned by e.g. `const`.
|
||||
// We apply the first argument:
|
||||
callFunction(functor, 1, &vp, partiallyApplied, noPos);
|
||||
callFunction(functor, vp, partiallyApplied, noPos);
|
||||
auto _level = addCallDepth(noPos);
|
||||
return getDoc(partiallyApplied);
|
||||
}
|
||||
|
@ -1454,7 +1470,7 @@ void ExprLambda::eval(EvalState & state, Env & env, Value & v)
|
|||
v.mkLambda(&env, this);
|
||||
}
|
||||
|
||||
void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos)
|
||||
void EvalState::callFunction(Value & fun, std::span<Value *> args, Value & vRes, const PosIdx pos)
|
||||
{
|
||||
auto _level = addCallDepth(pos);
|
||||
|
||||
|
@ -1469,16 +1485,16 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
|||
auto makeAppChain = [&]()
|
||||
{
|
||||
vRes = vCur;
|
||||
for (size_t i = 0; i < nrArgs; ++i) {
|
||||
for (auto arg : args) {
|
||||
auto fun2 = allocValue();
|
||||
*fun2 = vRes;
|
||||
vRes.mkPrimOpApp(fun2, args[i]);
|
||||
vRes.mkPrimOpApp(fun2, arg);
|
||||
}
|
||||
};
|
||||
|
||||
const Attr * functor;
|
||||
|
||||
while (nrArgs > 0) {
|
||||
while (args.size() > 0) {
|
||||
|
||||
if (vCur.isLambda()) {
|
||||
|
||||
|
@ -1581,15 +1597,14 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
|||
throw;
|
||||
}
|
||||
|
||||
nrArgs--;
|
||||
args += 1;
|
||||
args = args.subspan(1);
|
||||
}
|
||||
|
||||
else if (vCur.isPrimOp()) {
|
||||
|
||||
size_t argsLeft = vCur.primOp()->arity;
|
||||
|
||||
if (nrArgs < argsLeft) {
|
||||
if (args.size() < argsLeft) {
|
||||
/* We don't have enough arguments, so create a tPrimOpApp chain. */
|
||||
makeAppChain();
|
||||
return;
|
||||
|
@ -1601,15 +1616,14 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
|||
if (countCalls) primOpCalls[fn->name]++;
|
||||
|
||||
try {
|
||||
fn->fun(*this, vCur.determinePos(noPos), args, vCur);
|
||||
fn->fun(*this, vCur.determinePos(noPos), args.data(), vCur);
|
||||
} catch (Error & e) {
|
||||
if (fn->addTrace)
|
||||
addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name);
|
||||
throw;
|
||||
}
|
||||
|
||||
nrArgs -= argsLeft;
|
||||
args += argsLeft;
|
||||
args = args.subspan(argsLeft);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1625,7 +1639,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
|||
auto arity = primOp->primOp()->arity;
|
||||
auto argsLeft = arity - argsDone;
|
||||
|
||||
if (nrArgs < argsLeft) {
|
||||
if (args.size() < argsLeft) {
|
||||
/* We still don't have enough arguments, so extend the tPrimOpApp chain. */
|
||||
makeAppChain();
|
||||
return;
|
||||
|
@ -1657,8 +1671,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
|||
throw;
|
||||
}
|
||||
|
||||
nrArgs -= argsLeft;
|
||||
args += argsLeft;
|
||||
args = args.subspan(argsLeft);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1669,13 +1682,12 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
|||
Value * args2[] = {allocValue(), args[0]};
|
||||
*args2[0] = vCur;
|
||||
try {
|
||||
callFunction(*functor->value, 2, args2, vCur, functor->pos);
|
||||
callFunction(*functor->value, args2, vCur, functor->pos);
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], "while calling a functor (an attribute set with a '__functor' attribute)");
|
||||
throw;
|
||||
}
|
||||
nrArgs--;
|
||||
args++;
|
||||
args = args.subspan(1);
|
||||
}
|
||||
|
||||
else
|
||||
|
@ -1718,7 +1730,7 @@ void ExprCall::eval(EvalState & state, Env & env, Value & v)
|
|||
for (size_t i = 0; i < args.size(); ++i)
|
||||
vArgs[i] = args[i]->maybeThunk(state, env);
|
||||
|
||||
state.callFunction(vFun, args.size(), vArgs.data(), v, pos);
|
||||
state.callFunction(vFun, vArgs, v, pos);
|
||||
}
|
||||
|
||||
|
||||
|
@ -1730,7 +1742,7 @@ void EvalState::incrFunctionCall(ExprLambda * fun)
|
|||
}
|
||||
|
||||
|
||||
void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
|
||||
void EvalState::autoCallFunction(const Bindings & args, Value & fun, Value & res)
|
||||
{
|
||||
auto pos = fun.determinePos(noPos);
|
||||
|
||||
|
@ -2040,9 +2052,12 @@ void ExprPos::eval(EvalState & state, Env & env, Value & v)
|
|||
state.mkPos(v, pos);
|
||||
}
|
||||
|
||||
|
||||
void ExprBlackHole::eval(EvalState & state, Env & env, Value & v)
|
||||
void ExprBlackHole::eval(EvalState & state, [[maybe_unused]] Env & env, Value & v)
|
||||
{
|
||||
throwInfiniteRecursionError(state, v);
|
||||
}
|
||||
|
||||
[[gnu::noinline]] [[noreturn]] void ExprBlackHole::throwInfiniteRecursionError(EvalState & state, Value &v) {
|
||||
state.error<InfiniteRecursionError>("infinite recursion encountered")
|
||||
.atPos(v.determinePos(noPos))
|
||||
.debugThrow();
|
||||
|
@ -3023,8 +3038,8 @@ SourcePath EvalState::findFile(const LookupPath & lookupPath, const std::string_
|
|||
if (!rOpt) continue;
|
||||
auto r = *rOpt;
|
||||
|
||||
Path res = suffix == "" ? r : concatStrings(r, "/", suffix);
|
||||
if (pathExists(res)) return rootPath(CanonPath(canonPath(res)));
|
||||
auto res = (r / CanonPath(suffix)).resolveSymlinks();
|
||||
if (res.pathExists()) return res;
|
||||
}
|
||||
|
||||
if (hasPrefix(path, "nix/"))
|
||||
|
@ -3039,13 +3054,13 @@ SourcePath EvalState::findFile(const LookupPath & lookupPath, const std::string_
|
|||
}
|
||||
|
||||
|
||||
std::optional<std::string> EvalState::resolveLookupPathPath(const LookupPath::Path & value0, bool initAccessControl)
|
||||
std::optional<SourcePath> EvalState::resolveLookupPathPath(const LookupPath::Path & value0, bool initAccessControl)
|
||||
{
|
||||
auto & value = value0.s;
|
||||
auto i = lookupPathResolved.find(value);
|
||||
if (i != lookupPathResolved.end()) return i->second;
|
||||
|
||||
auto finish = [&](std::string res) {
|
||||
auto finish = [&](SourcePath res) {
|
||||
debug("resolved search path element '%s' to '%s'", value, res);
|
||||
lookupPathResolved.emplace(value, res);
|
||||
return res;
|
||||
|
@ -3058,7 +3073,7 @@ std::optional<std::string> EvalState::resolveLookupPathPath(const LookupPath::Pa
|
|||
fetchSettings,
|
||||
EvalSettings::resolvePseudoUrl(value));
|
||||
auto storePath = fetchToStore(*store, SourcePath(accessor), FetchMode::Copy);
|
||||
return finish(store->toRealPath(storePath));
|
||||
return finish(rootPath(store->toRealPath(storePath)));
|
||||
} catch (Error & e) {
|
||||
logWarning({
|
||||
.msg = HintFmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
|
||||
|
@ -3070,29 +3085,29 @@ std::optional<std::string> EvalState::resolveLookupPathPath(const LookupPath::Pa
|
|||
auto scheme = value.substr(0, colPos);
|
||||
auto rest = value.substr(colPos + 1);
|
||||
if (auto * hook = get(settings.lookupPathHooks, scheme)) {
|
||||
auto res = (*hook)(store, rest);
|
||||
auto res = (*hook)(*this, rest);
|
||||
if (res)
|
||||
return finish(std::move(*res));
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
auto path = absPath(value);
|
||||
auto path = rootPath(value);
|
||||
|
||||
/* Allow access to paths in the search path. */
|
||||
if (initAccessControl) {
|
||||
allowPath(path);
|
||||
if (store->isInStore(path)) {
|
||||
allowPath(path.path.abs());
|
||||
if (store->isInStore(path.path.abs())) {
|
||||
try {
|
||||
StorePathSet closure;
|
||||
store->computeFSClosure(store->toStorePath(path).first, closure);
|
||||
store->computeFSClosure(store->toStorePath(path.path.abs()).first, closure);
|
||||
for (auto & p : closure)
|
||||
allowPath(p);
|
||||
} catch (InvalidPath &) { }
|
||||
}
|
||||
}
|
||||
|
||||
if (pathExists(path))
|
||||
if (path.pathExists())
|
||||
return finish(std::move(path));
|
||||
else {
|
||||
logWarning({
|
||||
|
@ -3103,7 +3118,6 @@ std::optional<std::string> EvalState::resolveLookupPathPath(const LookupPath::Pa
|
|||
|
||||
debug("failed to resolve search path element '%s'", value);
|
||||
return std::nullopt;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -91,7 +91,7 @@ struct PrimOp
|
|||
const char * doc = nullptr;
|
||||
|
||||
/**
|
||||
* Add a trace item, `while calling the '<name>' builtin`
|
||||
* Add a trace item, while calling the `<name>` builtin.
|
||||
*
|
||||
* This is used to remove the redundant item for `builtins.addErrorContext`.
|
||||
*/
|
||||
|
@ -107,6 +107,11 @@ struct PrimOp
|
|||
*/
|
||||
std::optional<ExperimentalFeature> experimentalFeature;
|
||||
|
||||
/**
|
||||
* If true, this primop is not exposed to the user.
|
||||
*/
|
||||
bool internal = false;
|
||||
|
||||
/**
|
||||
* Validity check to be performed by functions that introduce primops,
|
||||
* such as RegisterPrimOp() and Value::mkPrimOp().
|
||||
|
@ -342,7 +347,7 @@ private:
|
|||
|
||||
LookupPath lookupPath;
|
||||
|
||||
std::map<std::string, std::optional<std::string>> lookupPathResolved;
|
||||
std::map<std::string, std::optional<SourcePath>> lookupPathResolved;
|
||||
|
||||
/**
|
||||
* Cache used by prim_match().
|
||||
|
@ -447,9 +452,9 @@ public:
|
|||
*
|
||||
* If the specified search path element is a URI, download it.
|
||||
*
|
||||
* If it is not found, return `std::nullopt`
|
||||
* If it is not found, return `std::nullopt`.
|
||||
*/
|
||||
std::optional<std::string> resolveLookupPathPath(
|
||||
std::optional<SourcePath> resolveLookupPathPath(
|
||||
const LookupPath::Path & elem,
|
||||
bool initAccessControl = false);
|
||||
|
||||
|
@ -591,6 +596,11 @@ public:
|
|||
*/
|
||||
std::shared_ptr<StaticEnv> staticBaseEnv; // !!! should be private
|
||||
|
||||
/**
|
||||
* Internal primops not exposed to the user.
|
||||
*/
|
||||
std::unordered_map<std::string, Value *, std::hash<std::string>, std::equal_to<std::string>, traceable_allocator<std::pair<const std::string, Value *>>> internalPrimOps;
|
||||
|
||||
/**
|
||||
* Name and documentation about every constant.
|
||||
*
|
||||
|
@ -613,8 +623,19 @@ private:
|
|||
|
||||
public:
|
||||
|
||||
/**
|
||||
* Retrieve a specific builtin, equivalent to evaluating `builtins.${name}`.
|
||||
* @param name The attribute name of the builtin to retrieve.
|
||||
* @throws EvalError if the builtin does not exist.
|
||||
*/
|
||||
Value & getBuiltin(const std::string & name);
|
||||
|
||||
/**
|
||||
* Retrieve the `builtins` attrset, equivalent to evaluating the reference `builtins`.
|
||||
* Always returns an attribute set value.
|
||||
*/
|
||||
Value & getBuiltins();
|
||||
|
||||
struct Doc
|
||||
{
|
||||
Pos pos;
|
||||
|
@ -680,20 +701,19 @@ public:
|
|||
|
||||
bool isFunctor(Value & fun);
|
||||
|
||||
// FIXME: use std::span
|
||||
void callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos);
|
||||
void callFunction(Value & fun, std::span<Value *> args, Value & vRes, const PosIdx pos);
|
||||
|
||||
void callFunction(Value & fun, Value & arg, Value & vRes, const PosIdx pos)
|
||||
{
|
||||
Value * args[] = {&arg};
|
||||
callFunction(fun, 1, args, vRes, pos);
|
||||
callFunction(fun, args, vRes, pos);
|
||||
}
|
||||
|
||||
/**
|
||||
* Automatically call a function for which each argument has a
|
||||
* default value or has a binding in the `args` map.
|
||||
*/
|
||||
void autoCallFunction(Bindings & args, Value & fun, Value & res);
|
||||
void autoCallFunction(const Bindings & args, Value & fun, Value & res);
|
||||
|
||||
/**
|
||||
* Allocation primitives.
|
||||
|
@ -799,7 +819,6 @@ public:
|
|||
bool callPathFilter(
|
||||
Value * filterFun,
|
||||
const SourcePath & path,
|
||||
std::string_view pathArg,
|
||||
PosIdx pos);
|
||||
|
||||
DocComment getDocCommentForPos(PosIdx pos);
|
||||
|
|
|
@ -1,50 +0,0 @@
|
|||
libraries += libexpr
|
||||
|
||||
libexpr_NAME = libnixexpr
|
||||
|
||||
libexpr_DIR := $(d)
|
||||
|
||||
libexpr_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
$(wildcard $(d)/value/*.cc) \
|
||||
$(wildcard $(d)/primops/*.cc) \
|
||||
$(d)/lexer-tab.cc \
|
||||
$(d)/parser-tab.cc
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libexpr := -I $(d)
|
||||
|
||||
libexpr_CXXFLAGS += \
|
||||
$(INCLUDE_libutil) $(INCLUDE_libstore) $(INCLUDE_libfetchers) $(INCLUDE_libexpr) \
|
||||
-DGC_THREADS
|
||||
|
||||
libexpr_LIBS = libutil libstore libfetchers
|
||||
|
||||
libexpr_LDFLAGS += -lboost_context $(THREAD_LDFLAGS)
|
||||
ifdef HOST_LINUX
|
||||
libexpr_LDFLAGS += -ldl
|
||||
endif
|
||||
|
||||
# The dependency on libgc must be propagated (i.e. meaning that
|
||||
# programs/libraries that use libexpr must explicitly pass -lgc),
|
||||
# because inline functions in libexpr's header files call libgc.
|
||||
libexpr_LDFLAGS_PROPAGATED = $(BDW_GC_LIBS)
|
||||
|
||||
libexpr_ORDER_AFTER := $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh
|
||||
|
||||
$(d)/parser-tab.cc $(d)/parser-tab.hh: $(d)/parser.y
|
||||
$(trace-gen) bison -v -o $(libexpr_DIR)/parser-tab.cc $< -d
|
||||
|
||||
$(d)/lexer-tab.cc $(d)/lexer-tab.hh: $(d)/lexer.l
|
||||
$(trace-gen) flex --outfile $(libexpr_DIR)/lexer-tab.cc --header-file=$(libexpr_DIR)/lexer-tab.hh $<
|
||||
|
||||
clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
$(foreach i, $(wildcard src/libexpr/value/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/value, 0644)))
|
||||
|
||||
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh
|
||||
|
||||
$(d)/eval.cc: $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh $(d)/call-flake.nix.gen.hh
|
|
@ -1,10 +0,0 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Package Manager
|
||||
Version: @PACKAGE_VERSION@
|
||||
Requires: nix-store bdw-gc
|
||||
Libs: -L${libdir} -lnixexpr
|
||||
Cflags: -I${includedir}/nix -std=c++2a
|
|
@ -206,7 +206,7 @@ struct ExprSelect : Expr
|
|||
/**
|
||||
* Evaluate the `a.b.c` part of `a.b.c.d`. This exists mostly for the purpose of :doc in the repl.
|
||||
*
|
||||
* @param[out] v The attribute set that should contain the last attribute name (if it exists).
|
||||
* @param[out] attrs The attribute set that should contain the last attribute name (if it exists).
|
||||
* @return The last attribute name in `attrPath`
|
||||
*
|
||||
* @note This does *not* evaluate the final attribute, and does not fail if that's the only attribute that does not exist.
|
||||
|
@ -468,6 +468,7 @@ struct ExprBlackHole : Expr
|
|||
void show(const SymbolTable & symbols, std::ostream & str) const override {}
|
||||
void eval(EvalState & state, Env & env, Value & v) override;
|
||||
void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) override {}
|
||||
[[noreturn]] static void throwInfiniteRecursionError(EvalState & state, Value & v);
|
||||
};
|
||||
|
||||
extern ExprBlackHole eBlackHole;
|
||||
|
|
|
@ -71,6 +71,10 @@ mkMesonLibrary (finalAttrs: {
|
|||
nix-util
|
||||
nix-store
|
||||
nix-fetchers
|
||||
] ++ finalAttrs.passthru.externalPropagatedBuildInputs;
|
||||
|
||||
# Hack for sake of the dev shell
|
||||
passthru.externalPropagatedBuildInputs = [
|
||||
boost
|
||||
nlohmann_json
|
||||
] ++ lib.optional enableGC boehmgc;
|
||||
|
|
|
@ -91,6 +91,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context, StorePathS
|
|||
|
||||
/* Build/substitute the context. */
|
||||
std::vector<DerivedPath> buildReqs;
|
||||
buildReqs.reserve(drvs.size());
|
||||
for (auto & d : drvs) buildReqs.emplace_back(DerivedPath { d });
|
||||
buildStore->buildPaths(buildReqs, bmNormal, store);
|
||||
|
||||
|
@ -723,7 +724,7 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value * * a
|
|||
|
||||
/* Call the `operator' function with `e' as argument. */
|
||||
Value newElements;
|
||||
state.callFunction(*op->value, 1, &e, newElements, noPos);
|
||||
state.callFunction(*op->value, {&e, 1}, newElements, noPos);
|
||||
state.forceList(newElements, noPos, "while evaluating the return value of the `operator` passed to builtins.genericClosure");
|
||||
|
||||
/* Add the values returned by the operator to the work set. */
|
||||
|
@ -1602,7 +1603,8 @@ static RegisterPrimOp primop_placeholder({
|
|||
*************************************************************/
|
||||
|
||||
|
||||
/* Convert the argument to a path. !!! obsolete? */
|
||||
/* Convert the argument to a path and then to a string (confusing,
|
||||
eh?). !!! obsolete? */
|
||||
static void prim_toPath(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
NixStringContext context;
|
||||
|
@ -2436,7 +2438,6 @@ static RegisterPrimOp primop_toFile({
|
|||
bool EvalState::callPathFilter(
|
||||
Value * filterFun,
|
||||
const SourcePath & path,
|
||||
std::string_view pathArg,
|
||||
PosIdx pos)
|
||||
{
|
||||
auto st = path.lstat();
|
||||
|
@ -2444,12 +2445,12 @@ bool EvalState::callPathFilter(
|
|||
/* Call the filter function. The first argument is the path, the
|
||||
second is a string indicating the type of the file. */
|
||||
Value arg1;
|
||||
arg1.mkString(pathArg);
|
||||
arg1.mkString(path.path.abs());
|
||||
|
||||
// assert that type is not "unknown"
|
||||
Value * args []{&arg1, fileTypeToString(*this, st.type)};
|
||||
Value res;
|
||||
callFunction(*filterFun, 2, args, res, pos);
|
||||
callFunction(*filterFun, args, res, pos);
|
||||
|
||||
return forceBool(res, pos, "while evaluating the return value of the path filter function");
|
||||
}
|
||||
|
@ -2487,7 +2488,7 @@ static void addPath(
|
|||
if (filterFun)
|
||||
filter = std::make_unique<PathFilter>([&](const Path & p) {
|
||||
auto p2 = CanonPath(p);
|
||||
return state.callPathFilter(filterFun, {path.accessor, p2}, p2.abs(), pos);
|
||||
return state.callPathFilter(filterFun, {path.accessor, p2}, pos);
|
||||
});
|
||||
|
||||
std::optional<StorePath> expectedStorePath;
|
||||
|
@ -2613,13 +2614,13 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256);
|
||||
else
|
||||
state.error<EvalError>(
|
||||
"unsupported argument '%1%' to 'addPath'",
|
||||
"unsupported argument '%1%' to 'builtins.path'",
|
||||
state.symbols[attr.name]
|
||||
).atPos(attr.pos).debugThrow();
|
||||
}
|
||||
if (!path)
|
||||
state.error<EvalError>(
|
||||
"missing required 'path' attribute in the first argument to builtins.path"
|
||||
"missing required 'path' attribute in the first argument to 'builtins.path'"
|
||||
).atPos(pos).debugThrow();
|
||||
if (name.empty())
|
||||
name = path->baseName();
|
||||
|
@ -3486,7 +3487,7 @@ static void prim_foldlStrict(EvalState & state, const PosIdx pos, Value * * args
|
|||
for (auto [n, elem] : enumerate(args[2]->listItems())) {
|
||||
Value * vs []{vCur, elem};
|
||||
vCur = n == args[2]->listSize() - 1 ? &v : state.allocValue();
|
||||
state.callFunction(*args[0], 2, vs, *vCur, pos);
|
||||
state.callFunction(*args[0], vs, *vCur, pos);
|
||||
}
|
||||
state.forceValue(v, pos);
|
||||
} else {
|
||||
|
@ -3636,7 +3637,7 @@ static void prim_sort(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
|
||||
Value * vs[] = {a, b};
|
||||
Value vBool;
|
||||
state.callFunction(*args[0], 2, vs, vBool, noPos);
|
||||
state.callFunction(*args[0], vs, vBool, noPos);
|
||||
return state.forceBool(vBool, pos, "while evaluating the return value of the sorting function passed to builtins.sort");
|
||||
};
|
||||
|
||||
|
@ -4936,7 +4937,7 @@ void EvalState::createBaseEnv()
|
|||
|
||||
/* Now that we've added all primops, sort the `builtins' set,
|
||||
because attribute lookups expect it to be sorted. */
|
||||
baseEnv.values[0]->payload.attrs->sort();
|
||||
getBuiltins().payload.attrs->sort();
|
||||
|
||||
staticBaseEnv->sort();
|
||||
|
||||
|
|
|
@ -78,6 +78,7 @@ struct FetchTreeParams {
|
|||
bool emptyRevFallback = false;
|
||||
bool allowNameArgument = false;
|
||||
bool isFetchGit = false;
|
||||
bool isFinal = false;
|
||||
};
|
||||
|
||||
static void fetchTree(
|
||||
|
@ -195,6 +196,13 @@ static void fetchTree(
|
|||
|
||||
state.checkURI(input.toURLString());
|
||||
|
||||
if (params.isFinal) {
|
||||
input.attrs.insert_or_assign("__final", Explicit<bool>(true));
|
||||
} else {
|
||||
if (input.isFinal())
|
||||
throw Error("input '%s' is not allowed to use the '__final' attribute", input.to_string());
|
||||
}
|
||||
|
||||
auto [storePath, input2] = input.fetchToStore(state.store);
|
||||
|
||||
state.allowPath(storePath);
|
||||
|
@ -431,6 +439,18 @@ static RegisterPrimOp primop_fetchTree({
|
|||
.experimentalFeature = Xp::FetchTree,
|
||||
});
|
||||
|
||||
void prim_fetchFinalTree(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
fetchTree(state, pos, args, v, {.isFinal = true});
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_fetchFinalTree({
|
||||
.name = "fetchFinalTree",
|
||||
.args = {"input"},
|
||||
.fun = prim_fetchFinalTree,
|
||||
.internal = true,
|
||||
});
|
||||
|
||||
static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v,
|
||||
const std::string & who, bool unpack, std::string name)
|
||||
{
|
||||
|
|
|
@ -141,7 +141,9 @@ public:
|
|||
Value * * elems;
|
||||
ListBuilder(EvalState & state, size_t size);
|
||||
|
||||
ListBuilder(ListBuilder && x)
|
||||
// NOTE: Can be noexcept because we are just copying integral values and
|
||||
// raw pointers.
|
||||
ListBuilder(ListBuilder && x) noexcept
|
||||
: size(x.size)
|
||||
, inlineElems{x.inlineElems[0], x.inlineElems[1]}
|
||||
, elems(size <= 2 ? inlineElems : x.elems)
|
||||
|
|
|
@ -28,7 +28,7 @@ struct NixStringContextElem {
|
|||
/**
|
||||
* Plain opaque path to some store object.
|
||||
*
|
||||
* Encoded as just the path: ‘<path>’.
|
||||
* Encoded as just the path: `<path>`.
|
||||
*/
|
||||
using Opaque = SingleDerivedPath::Opaque;
|
||||
|
||||
|
@ -39,7 +39,7 @@ struct NixStringContextElem {
|
|||
* also all outputs of all derivations in that closure (including the
|
||||
* root derivation).
|
||||
*
|
||||
* Encoded in the form ‘=<drvPath>’.
|
||||
* Encoded in the form `=<drvPath>`.
|
||||
*/
|
||||
struct DrvDeep {
|
||||
StorePath drvPath;
|
||||
|
@ -50,7 +50,7 @@ struct NixStringContextElem {
|
|||
/**
|
||||
* Derivation output.
|
||||
*
|
||||
* Encoded in the form ‘!<output>!<drvPath>’.
|
||||
* Encoded in the form `!<output>!<drvPath>`.
|
||||
*/
|
||||
using Built = SingleDerivedPath::Built;
|
||||
|
||||
|
@ -68,9 +68,9 @@ struct NixStringContextElem {
|
|||
|
||||
/**
|
||||
* Decode a context string, one of:
|
||||
* - ‘<path>’
|
||||
* - ‘=<path>’
|
||||
* - ‘!<name>!<path>’
|
||||
* - `<path>`
|
||||
* - `=<path>`
|
||||
* - `!<name>!<path>`
|
||||
*
|
||||
* @param xpSettings Stop-gap to avoid globals during unit tests.
|
||||
*/
|
||||
|
|
|
@ -1,37 +0,0 @@
|
|||
check: libfetchers-tests_RUN
|
||||
|
||||
programs += libfetchers-tests
|
||||
|
||||
libfetchers-tests_NAME = libnixfetchers-tests
|
||||
|
||||
libfetchers-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data GTEST_OUTPUT=xml:$$testresults/libfetchers-tests.xml
|
||||
|
||||
libfetchers-tests_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libfetchers-tests_INSTALL_DIR := $(checkbindir)
|
||||
else
|
||||
libfetchers-tests_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libfetchers-tests_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libfetchers-tests_EXTRA_INCLUDES = \
|
||||
-I src/libstore-test-support \
|
||||
-I src/libutil-test-support \
|
||||
$(INCLUDE_libfetchers) \
|
||||
$(INCLUDE_libstore) \
|
||||
$(INCLUDE_libutil)
|
||||
|
||||
libfetchers-tests_CXXFLAGS += $(libfetchers-tests_EXTRA_INCLUDES)
|
||||
|
||||
libfetchers-tests_LIBS = \
|
||||
libstore-test-support libutil-test-support \
|
||||
libfetchers libstore libutil
|
||||
|
||||
libfetchers-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) $(LIBGIT2_LIBS)
|
||||
|
||||
ifdef HOST_WINDOWS
|
||||
# Increase the default reserved stack size to 65 MB so Nix doesn't run out of space
|
||||
libfetchers-tests_LDFLAGS += -Wl,--stack,$(shell echo $$((65 * 1024 * 1024)))
|
||||
endif
|
|
@ -27,6 +27,7 @@ subdir('build-utils-meson/subprojects')
|
|||
subdir('build-utils-meson/threads')
|
||||
|
||||
subdir('build-utils-meson/export-all-symbols')
|
||||
subdir('build-utils-meson/windows-version')
|
||||
|
||||
rapidcheck = dependency('rapidcheck')
|
||||
deps_private += rapidcheck
|
||||
|
|
|
@ -44,6 +44,8 @@ StorePath fetchToStore(
|
|||
: store.addToStore(
|
||||
name, path, method, HashAlgorithm::SHA256, {}, filter2, repair);
|
||||
|
||||
debug(mode == FetchMode::DryRun ? "hashed '%s'" : "copied '%s' to '%s'", path, store.printStorePath(storePath));
|
||||
|
||||
if (cacheKey && mode == FetchMode::Copy)
|
||||
fetchers::getCache()->upsert(*cacheKey, store, {}, storePath);
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#include "source-path.hh"
|
||||
#include "fetch-to-store.hh"
|
||||
#include "json-utils.hh"
|
||||
#include "store-path-accessor.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
@ -100,7 +101,7 @@ Input Input::fromAttrs(const Settings & settings, Attrs && attrs)
|
|||
auto allowedAttrs = inputScheme->allowedAttrs();
|
||||
|
||||
for (auto & [name, _] : attrs)
|
||||
if (name != "type" && allowedAttrs.count(name) == 0)
|
||||
if (name != "type" && name != "__final" && allowedAttrs.count(name) == 0)
|
||||
throw Error("input attribute '%s' not supported by scheme '%s'", name, schemeName);
|
||||
|
||||
auto res = inputScheme->inputFromAttrs(settings, attrs);
|
||||
|
@ -145,6 +146,11 @@ bool Input::isLocked() const
|
|||
return scheme && scheme->isLocked(*this);
|
||||
}
|
||||
|
||||
bool Input::isFinal() const
|
||||
{
|
||||
return maybeGetBoolAttr(attrs, "__final").value_or(false);
|
||||
}
|
||||
|
||||
Attrs Input::toAttrs() const
|
||||
{
|
||||
return attrs;
|
||||
|
@ -172,16 +178,24 @@ std::pair<StorePath, Input> Input::fetchToStore(ref<Store> store) const
|
|||
|
||||
auto [storePath, input] = [&]() -> std::pair<StorePath, Input> {
|
||||
try {
|
||||
auto [accessor, final] = getAccessorUnchecked(store);
|
||||
auto [accessor, result] = getAccessorUnchecked(store);
|
||||
|
||||
auto storePath = nix::fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, final.getName());
|
||||
auto storePath = nix::fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, result.getName());
|
||||
|
||||
auto narHash = store->queryPathInfo(storePath)->narHash;
|
||||
final.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
|
||||
result.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
|
||||
|
||||
scheme->checkLocks(*this, final);
|
||||
// FIXME: we would like to mark inputs as final in
|
||||
// getAccessorUnchecked(), but then we can't add
|
||||
// narHash. Or maybe narHash should be excluded from the
|
||||
// concept of "final" inputs?
|
||||
result.attrs.insert_or_assign("__final", Explicit<bool>(true));
|
||||
|
||||
return {storePath, final};
|
||||
assert(result.isFinal());
|
||||
|
||||
checkLocks(*this, result);
|
||||
|
||||
return {storePath, result};
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while fetching the input '%s'", to_string());
|
||||
throw;
|
||||
|
@ -191,13 +205,40 @@ std::pair<StorePath, Input> Input::fetchToStore(ref<Store> store) const
|
|||
return {std::move(storePath), input};
|
||||
}
|
||||
|
||||
void InputScheme::checkLocks(const Input & specified, const Input & final) const
|
||||
void Input::checkLocks(Input specified, Input & result)
|
||||
{
|
||||
/* If the original input is final, then we just return the
|
||||
original attributes, dropping any new fields returned by the
|
||||
fetcher. However, any fields that are in both the specified and
|
||||
result input must be identical. */
|
||||
if (specified.isFinal()) {
|
||||
|
||||
/* Backwards compatibility hack: we had some lock files in the
|
||||
past that 'narHash' fields with incorrect base-64
|
||||
formatting (lacking the trailing '=', e.g. 'sha256-ri...Mw'
|
||||
instead of ''sha256-ri...Mw='). So fix that. */
|
||||
if (auto prevNarHash = specified.getNarHash())
|
||||
specified.attrs.insert_or_assign("narHash", prevNarHash->to_string(HashFormat::SRI, true));
|
||||
|
||||
for (auto & field : specified.attrs) {
|
||||
auto field2 = result.attrs.find(field.first);
|
||||
if (field2 != result.attrs.end() && field.second != field2->second)
|
||||
throw Error("mismatch in field '%s' of input '%s', got '%s'",
|
||||
field.first,
|
||||
attrsToJSON(specified.attrs),
|
||||
attrsToJSON(result.attrs));
|
||||
}
|
||||
|
||||
result.attrs = specified.attrs;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (auto prevNarHash = specified.getNarHash()) {
|
||||
if (final.getNarHash() != prevNarHash) {
|
||||
if (final.getNarHash())
|
||||
if (result.getNarHash() != prevNarHash) {
|
||||
if (result.getNarHash())
|
||||
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got '%s'",
|
||||
specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true), final.getNarHash()->to_string(HashFormat::SRI, true));
|
||||
specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true), result.getNarHash()->to_string(HashFormat::SRI, true));
|
||||
else
|
||||
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got none",
|
||||
specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true));
|
||||
|
@ -205,32 +246,32 @@ void InputScheme::checkLocks(const Input & specified, const Input & final) const
|
|||
}
|
||||
|
||||
if (auto prevLastModified = specified.getLastModified()) {
|
||||
if (final.getLastModified() != prevLastModified)
|
||||
throw Error("'lastModified' attribute mismatch in input '%s', expected %d",
|
||||
final.to_string(), *prevLastModified);
|
||||
if (result.getLastModified() != prevLastModified)
|
||||
throw Error("'lastModified' attribute mismatch in input '%s', expected %d, got %d",
|
||||
result.to_string(), *prevLastModified, result.getLastModified().value_or(-1));
|
||||
}
|
||||
|
||||
if (auto prevRev = specified.getRev()) {
|
||||
if (final.getRev() != prevRev)
|
||||
if (result.getRev() != prevRev)
|
||||
throw Error("'rev' attribute mismatch in input '%s', expected %s",
|
||||
final.to_string(), prevRev->gitRev());
|
||||
result.to_string(), prevRev->gitRev());
|
||||
}
|
||||
|
||||
if (auto prevRevCount = specified.getRevCount()) {
|
||||
if (final.getRevCount() != prevRevCount)
|
||||
if (result.getRevCount() != prevRevCount)
|
||||
throw Error("'revCount' attribute mismatch in input '%s', expected %d",
|
||||
final.to_string(), *prevRevCount);
|
||||
result.to_string(), *prevRevCount);
|
||||
}
|
||||
}
|
||||
|
||||
std::pair<ref<SourceAccessor>, Input> Input::getAccessor(ref<Store> store) const
|
||||
{
|
||||
try {
|
||||
auto [accessor, final] = getAccessorUnchecked(store);
|
||||
auto [accessor, result] = getAccessorUnchecked(store);
|
||||
|
||||
scheme->checkLocks(*this, final);
|
||||
checkLocks(*this, result);
|
||||
|
||||
return {accessor, std::move(final)};
|
||||
return {accessor, std::move(result)};
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while fetching the input '%s'", to_string());
|
||||
throw;
|
||||
|
@ -244,12 +285,42 @@ std::pair<ref<SourceAccessor>, Input> Input::getAccessorUnchecked(ref<Store> sto
|
|||
if (!scheme)
|
||||
throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs()));
|
||||
|
||||
auto [accessor, final] = scheme->getAccessor(store, *this);
|
||||
/* The tree may already be in the Nix store, or it could be
|
||||
substituted (which is often faster than fetching from the
|
||||
original source). So check that. We only do this for final
|
||||
inputs, otherwise there is a risk that we don't return the
|
||||
same attributes (like `lastModified`) that the "real" fetcher
|
||||
would return.
|
||||
|
||||
FIXME: add a setting to disable this.
|
||||
FIXME: substituting may be slower than fetching normally,
|
||||
e.g. for fetchers like Git that are incremental!
|
||||
*/
|
||||
if (isFinal() && getNarHash()) {
|
||||
try {
|
||||
auto storePath = computeStorePath(*store);
|
||||
|
||||
store->ensurePath(storePath);
|
||||
|
||||
debug("using substituted/cached input '%s' in '%s'",
|
||||
to_string(), store->printStorePath(storePath));
|
||||
|
||||
auto accessor = makeStorePathAccessor(store, storePath);
|
||||
|
||||
accessor->fingerprint = scheme->getFingerprint(store, *this);
|
||||
|
||||
return {accessor, *this};
|
||||
} catch (Error & e) {
|
||||
debug("substitution of input '%s' failed: %s", to_string(), e.what());
|
||||
}
|
||||
}
|
||||
|
||||
auto [accessor, result] = scheme->getAccessor(store, *this);
|
||||
|
||||
assert(!accessor->fingerprint);
|
||||
accessor->fingerprint = scheme->getFingerprint(store, final);
|
||||
accessor->fingerprint = scheme->getFingerprint(store, result);
|
||||
|
||||
return {accessor, std::move(final)};
|
||||
return {accessor, std::move(result)};
|
||||
}
|
||||
|
||||
Input Input::applyOverrides(
|
||||
|
@ -404,7 +475,10 @@ namespace nlohmann {
|
|||
|
||||
using namespace nix;
|
||||
|
||||
fetchers::PublicKey adl_serializer<fetchers::PublicKey>::from_json(const json & json) {
|
||||
#ifndef DOXYGEN_SKIP
|
||||
|
||||
fetchers::PublicKey adl_serializer<fetchers::PublicKey>::from_json(const json & json)
|
||||
{
|
||||
fetchers::PublicKey res = { };
|
||||
if (auto type = optionalValueAt(json, "type"))
|
||||
res.type = getString(*type);
|
||||
|
@ -414,9 +488,12 @@ fetchers::PublicKey adl_serializer<fetchers::PublicKey>::from_json(const json &
|
|||
return res;
|
||||
}
|
||||
|
||||
void adl_serializer<fetchers::PublicKey>::to_json(json & json, fetchers::PublicKey p) {
|
||||
void adl_serializer<fetchers::PublicKey>::to_json(json & json, fetchers::PublicKey p)
|
||||
{
|
||||
json["type"] = p.type;
|
||||
json["key"] = p.key;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
}
|
||||
|
|
|
@ -78,17 +78,30 @@ public:
|
|||
Attrs toAttrs() const;
|
||||
|
||||
/**
|
||||
* Check whether this is a "direct" input, that is, not
|
||||
* Return whether this is a "direct" input, that is, not
|
||||
* one that goes through a registry.
|
||||
*/
|
||||
bool isDirect() const;
|
||||
|
||||
/**
|
||||
* Check whether this is a "locked" input, that is,
|
||||
* one that contains a commit hash or content hash.
|
||||
* Return whether this is a "locked" input, that is, it has
|
||||
* attributes like a Git revision or NAR hash that uniquely
|
||||
* identify its contents.
|
||||
*/
|
||||
bool isLocked() const;
|
||||
|
||||
/**
|
||||
* Return whether this is a "final" input, meaning that fetching
|
||||
* it will not add, remove or change any attributes. (See
|
||||
* `checkLocks()` for the semantics.) Only "final" inputs can be
|
||||
* substituted from a binary cache.
|
||||
*
|
||||
* The "final" state is denoted by the presence of an attribute
|
||||
* `__final = true`. This attribute is currently undocumented and
|
||||
* for internal use only.
|
||||
*/
|
||||
bool isFinal() const;
|
||||
|
||||
bool operator ==(const Input & other) const noexcept;
|
||||
|
||||
bool contains(const Input & other) const;
|
||||
|
@ -99,6 +112,19 @@ public:
|
|||
*/
|
||||
std::pair<StorePath, Input> fetchToStore(ref<Store> store) const;
|
||||
|
||||
/**
|
||||
* Check the locking attributes in `result` against
|
||||
* `specified`. E.g. if `specified` has a `rev` attribute, then
|
||||
* `result` must have the same `rev` attribute. Throw an exception
|
||||
* if there is a mismatch.
|
||||
*
|
||||
* If `specified` is marked final (i.e. has the `__final`
|
||||
* attribute), then the intersection of attributes in `specified`
|
||||
* and `result` must be equal, and `final.attrs` is set to
|
||||
* `specified.attrs` (i.e. we discard any new attributes).
|
||||
*/
|
||||
static void checkLocks(Input specified, Input & result);
|
||||
|
||||
/**
|
||||
* Return a `SourceAccessor` that allows access to files in the
|
||||
* input without copying it to the store. Also return a possibly
|
||||
|
@ -144,6 +170,10 @@ public:
|
|||
/**
|
||||
* For locked inputs, return a string that uniquely specifies the
|
||||
* content of the input (typically a commit hash or content hash).
|
||||
*
|
||||
* Only known-equivalent inputs should return the same fingerprint.
|
||||
*
|
||||
* This is not a stable identifier between Nix versions, but not guaranteed to change either.
|
||||
*/
|
||||
std::optional<std::string> getFingerprint(ref<Store> store) const;
|
||||
};
|
||||
|
@ -215,31 +245,11 @@ struct InputScheme
|
|||
virtual bool isDirect(const Input & input) const
|
||||
{ return true; }
|
||||
|
||||
/**
|
||||
* A sufficiently unique string that can be used as a cache key to identify the `input`.
|
||||
*
|
||||
* Only known-equivalent inputs should return the same fingerprint.
|
||||
*
|
||||
* This is not a stable identifier between Nix versions, but not guaranteed to change either.
|
||||
*/
|
||||
virtual std::optional<std::string> getFingerprint(ref<Store> store, const Input & input) const
|
||||
{ return std::nullopt; }
|
||||
|
||||
/**
|
||||
* Return `true` if this input is considered "locked", i.e. it has
|
||||
* attributes like a Git revision or NAR hash that uniquely
|
||||
* identify its contents.
|
||||
*/
|
||||
virtual bool isLocked(const Input & input) const
|
||||
{ return false; }
|
||||
|
||||
/**
|
||||
* Check the locking attributes in `final` against
|
||||
* `specified`. E.g. if `specified` has a `rev` attribute, then
|
||||
* `final` must have the same `rev` attribute. Throw an exception
|
||||
* if there is a mismatch.
|
||||
*/
|
||||
virtual void checkLocks(const Input & specified, const Input & final) const;
|
||||
};
|
||||
|
||||
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
|
||||
|
|
|
@ -217,8 +217,12 @@ static void initRepoAtomically(std::filesystem::path &path, bool bare) {
|
|||
try {
|
||||
std::filesystem::rename(tmpDir, path);
|
||||
} catch (std::filesystem::filesystem_error & e) {
|
||||
if (e.code() == std::errc::file_exists) // Someone might race us to create the repository.
|
||||
// Someone may race us to create the repository.
|
||||
if (e.code() == std::errc::file_exists
|
||||
// `path` may be attempted to be deleted by s::f::rename, in which case the code is:
|
||||
|| e.code() == std::errc::directory_not_empty) {
|
||||
return;
|
||||
}
|
||||
else
|
||||
throw SysError("moving temporary git repository from %s to %s", tmpDir, path);
|
||||
}
|
||||
|
|
|
@ -1,17 +0,0 @@
|
|||
libraries += libfetchers
|
||||
|
||||
libfetchers_NAME = libnixfetchers
|
||||
|
||||
libfetchers_DIR := $(d)
|
||||
|
||||
libfetchers_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libfetchers := -I $(d)
|
||||
|
||||
libfetchers_CXXFLAGS += $(INCLUDE_libutil) $(INCLUDE_libstore) $(INCLUDE_libfetchers)
|
||||
|
||||
libfetchers_LDFLAGS += $(THREAD_LDFLAGS) $(LIBGIT2_LIBS) -larchive
|
||||
|
||||
libfetchers_LIBS = libutil libstore
|
|
@ -52,15 +52,15 @@ sources = files(
|
|||
'fetch-to-store.cc',
|
||||
'fetchers.cc',
|
||||
'filtering-source-accessor.cc',
|
||||
'git.cc',
|
||||
'git-utils.cc',
|
||||
'git.cc',
|
||||
'github.cc',
|
||||
'indirect.cc',
|
||||
'mercurial.cc',
|
||||
'mounted-source-accessor.cc',
|
||||
'path.cc',
|
||||
'store-path-accessor.cc',
|
||||
'registry.cc',
|
||||
'store-path-accessor.cc',
|
||||
'tarball.cc',
|
||||
)
|
||||
|
||||
|
@ -71,10 +71,10 @@ headers = files(
|
|||
'cache.hh',
|
||||
'fetch-settings.hh',
|
||||
'fetch-to-store.hh',
|
||||
'fetchers.hh',
|
||||
'filtering-source-accessor.hh',
|
||||
'git-utils.hh',
|
||||
'mounted-source-accessor.hh',
|
||||
'fetchers.hh',
|
||||
'registry.hh',
|
||||
'store-path-accessor.hh',
|
||||
'tarball.hh',
|
||||
|
|
|
@ -72,6 +72,7 @@ struct PathInputScheme : InputScheme
|
|||
auto query = attrsToQuery(input.attrs);
|
||||
query.erase("path");
|
||||
query.erase("type");
|
||||
query.erase("__final");
|
||||
return ParsedURL {
|
||||
.scheme = "path",
|
||||
.path = getStrAttr(input.attrs, "path"),
|
||||
|
|
|
@ -1,43 +0,0 @@
|
|||
check: libflake-tests_RUN
|
||||
|
||||
programs += libflake-tests
|
||||
|
||||
libflake-tests_NAME := libnixflake-tests
|
||||
|
||||
libflake-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data GTEST_OUTPUT=xml:$$testresults/libflake-tests.xml
|
||||
|
||||
libflake-tests_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libflake-tests_INSTALL_DIR := $(checkbindir)
|
||||
else
|
||||
libflake-tests_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libflake-tests_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
$(wildcard $(d)/value/*.cc) \
|
||||
$(wildcard $(d)/flake/*.cc)
|
||||
|
||||
libflake-tests_EXTRA_INCLUDES = \
|
||||
-I src/libflake-test-support \
|
||||
-I src/libstore-test-support \
|
||||
-I src/libutil-test-support \
|
||||
$(INCLUDE_libflake) \
|
||||
$(INCLUDE_libexpr) \
|
||||
$(INCLUDE_libfetchers) \
|
||||
$(INCLUDE_libstore) \
|
||||
$(INCLUDE_libutil) \
|
||||
|
||||
libflake-tests_CXXFLAGS += $(libflake-tests_EXTRA_INCLUDES)
|
||||
|
||||
libflake-tests_LIBS = \
|
||||
libexpr-test-support libstore-test-support libutil-test-support \
|
||||
libflake libexpr libfetchers libstore libutil
|
||||
|
||||
libflake-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) -lgmock
|
||||
|
||||
ifdef HOST_WINDOWS
|
||||
# Increase the default reserved stack size to 65 MB so Nix doesn't run out of space
|
||||
libflake-tests_LDFLAGS += -Wl,--stack,$(shell echo $$((65 * 1024 * 1024)))
|
||||
endif
|
|
@ -27,6 +27,7 @@ subdir('build-utils-meson/subprojects')
|
|||
subdir('build-utils-meson/threads')
|
||||
|
||||
subdir('build-utils-meson/export-all-symbols')
|
||||
subdir('build-utils-meson/windows-version')
|
||||
|
||||
rapidcheck = dependency('rapidcheck')
|
||||
deps_private += rapidcheck
|
||||
|
|
|
@ -85,7 +85,6 @@ static void forceTrivialValue(EvalState & state, Value & value, const PosIdx pos
|
|||
state.forceValue(value, pos);
|
||||
}
|
||||
|
||||
|
||||
static void expectType(EvalState & state, ValueType type,
|
||||
Value & value, const PosIdx pos)
|
||||
{
|
||||
|
@ -810,12 +809,14 @@ void callFlake(EvalState & state,
|
|||
auto vCallFlake = state.allocValue();
|
||||
state.evalFile(state.callFlakeInternal, *vCallFlake);
|
||||
|
||||
auto vTmp1 = state.allocValue();
|
||||
auto vLocks = state.allocValue();
|
||||
vLocks->mkString(lockFileStr);
|
||||
state.callFunction(*vCallFlake, *vLocks, *vTmp1, noPos);
|
||||
|
||||
state.callFunction(*vTmp1, vOverrides, vRes, noPos);
|
||||
auto vFetchFinalTree = get(state.internalPrimOps, "fetchFinalTree");
|
||||
assert(vFetchFinalTree);
|
||||
|
||||
Value * args[] = {vLocks, &vOverrides, *vFetchFinalTree};
|
||||
state.callFunction(*vCallFlake, args, vRes, noPos);
|
||||
}
|
||||
|
||||
void initLib(const Settings & settings)
|
||||
|
|
|
@ -234,4 +234,11 @@ void emitTreeAttrs(
|
|||
bool emptyRevFallback = false,
|
||||
bool forceDirty = false);
|
||||
|
||||
/**
|
||||
* An internal builtin similar to `fetchTree`, except that it
|
||||
* always treats the input as final (i.e. no attributes can be
|
||||
* added/removed/changed).
|
||||
*/
|
||||
void prim_fetchFinalTree(EvalState & state, const PosIdx pos, Value * * args, Value & v);
|
||||
|
||||
}
|
||||
|
|
|
@ -46,6 +46,10 @@ LockedNode::LockedNode(
|
|||
if (!lockedRef.input.isLocked())
|
||||
throw Error("lock file contains unlocked input '%s'",
|
||||
fetchers::attrsToJSON(lockedRef.input.toAttrs()));
|
||||
|
||||
// For backward compatibility, lock file entries are implicitly final.
|
||||
assert(!lockedRef.input.attrs.contains("__final"));
|
||||
lockedRef.input.attrs.insert_or_assign("__final", Explicit<bool>(true));
|
||||
}
|
||||
|
||||
StorePath LockedNode::computeStorePath(Store & store) const
|
||||
|
@ -53,7 +57,6 @@ StorePath LockedNode::computeStorePath(Store & store) const
|
|||
return lockedRef.input.computeStorePath(store);
|
||||
}
|
||||
|
||||
|
||||
static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputPath & path, std::vector<InputPath> & visited)
|
||||
{
|
||||
auto pos = root;
|
||||
|
@ -191,6 +194,11 @@ std::pair<nlohmann::json, LockFile::KeyMap> LockFile::toJSON() const
|
|||
if (auto lockedNode = node.dynamic_pointer_cast<const LockedNode>()) {
|
||||
n["original"] = fetchers::attrsToJSON(lockedNode->originalRef.toAttrs());
|
||||
n["locked"] = fetchers::attrsToJSON(lockedNode->lockedRef.toAttrs());
|
||||
/* For backward compatibility, omit the "__final"
|
||||
attribute. We never allow non-final inputs in lock files
|
||||
anyway. */
|
||||
assert(lockedNode->lockedRef.input.isFinal());
|
||||
n["locked"].erase("__final");
|
||||
if (!lockedNode->isFlake)
|
||||
n["flake"] = false;
|
||||
}
|
||||
|
@ -239,7 +247,7 @@ std::optional<FlakeRef> LockFile::isUnlocked() const
|
|||
for (auto & i : nodes) {
|
||||
if (i == ref<const Node>(root)) continue;
|
||||
auto node = i.dynamic_pointer_cast<const LockedNode>();
|
||||
if (node && !node->lockedRef.input.isLocked())
|
||||
if (node && (!node->lockedRef.input.isLocked() || !node->lockedRef.input.isFinal()))
|
||||
return node->lockedRef;
|
||||
}
|
||||
|
||||
|
|
|
@ -68,8 +68,8 @@ struct LockFile
|
|||
std::pair<std::string, KeyMap> to_string() const;
|
||||
|
||||
/**
|
||||
* Check whether this lock file has any unlocked inputs. If so,
|
||||
* return one.
|
||||
* Check whether this lock file has any unlocked or non-final
|
||||
* inputs. If so, return one.
|
||||
*/
|
||||
std::optional<FlakeRef> isUnlocked() const;
|
||||
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Package Manager
|
||||
Version: @PACKAGE_VERSION@
|
||||
Requires: nix-util nix-store nix-expr
|
||||
Libs: -L${libdir} -lnixflake
|
||||
Cflags: -I${includedir}/nix -std=c++2a
|
|
@ -1,22 +0,0 @@
|
|||
libraries += libflake
|
||||
|
||||
libflake_NAME = libnixflake
|
||||
|
||||
libflake_DIR := $(d)
|
||||
|
||||
libflake_SOURCES := $(wildcard $(d)/*.cc $(d)/flake/*.cc)
|
||||
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libflake := -I $(d)
|
||||
|
||||
libflake_CXXFLAGS += $(INCLUDE_libutil) $(INCLUDE_libstore) $(INCLUDE_libfetchers) $(INCLUDE_libexpr) $(INCLUDE_libflake)
|
||||
|
||||
libflake_LDFLAGS += $(THREAD_LDFLAGS)
|
||||
|
||||
libflake_LIBS = libutil libstore libfetchers libexpr
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/flake/nix-flake.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
$(foreach i, $(wildcard src/libflake/flake/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
|
|
@ -68,6 +68,7 @@ headers = [config_h] + files(
|
|||
)
|
||||
|
||||
subdir('build-utils-meson/export-all-symbols')
|
||||
subdir('build-utils-meson/windows-version')
|
||||
|
||||
this_library = library(
|
||||
'nixmainc',
|
||||
|
|
|
@ -17,7 +17,9 @@ MixCommonArgs::MixCommonArgs(const std::string & programName)
|
|||
.shortName = 'v',
|
||||
.description = "Increase the logging verbosity level.",
|
||||
.category = loggingCategory,
|
||||
.handler = {[]() { verbosity = (Verbosity) (verbosity + 1); }},
|
||||
.handler = {[]() {
|
||||
verbosity = (Verbosity) std::min<std::underlying_type_t<Verbosity>>(verbosity + 1, lvlVomit);
|
||||
}},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
libraries += libmain
|
||||
|
||||
libmain_NAME = libnixmain
|
||||
|
||||
libmain_DIR := $(d)
|
||||
|
||||
libmain_SOURCES := $(wildcard $(d)/*.cc)
|
||||
ifdef HOST_UNIX
|
||||
libmain_SOURCES += $(wildcard $(d)/unix/*.cc)
|
||||
endif
|
||||
|
||||
INCLUDE_libmain := -I $(d)
|
||||
|
||||
libmain_CXXFLAGS += $(INCLUDE_libutil) $(INCLUDE_libstore) $(INCLUDE_libmain)
|
||||
|
||||
libmain_LDFLAGS += $(OPENSSL_LIBS)
|
||||
|
||||
libmain_LIBS = libstore libutil
|
||||
|
||||
libmain_ALLOW_UNDEFINED = 1
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-main.pc, $(libdir)/pkgconfig, 0644))
|
|
@ -1,9 +0,0 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Package Manager
|
||||
Version: @PACKAGE_VERSION@
|
||||
Libs: -L${libdir} -lnixmain
|
||||
Cflags: -I${includedir}/nix -std=c++2a
|
|
@ -543,7 +543,7 @@ public:
|
|||
auto state(state_.lock());
|
||||
if (!state->active) return {};
|
||||
std::cerr << fmt("\r\e[K%s ", msg);
|
||||
auto s = trim(readLine(STDIN_FILENO));
|
||||
auto s = trim(readLine(getStandardInput(), true));
|
||||
if (s.size() != 1) return {};
|
||||
draw(*state);
|
||||
return s[0];
|
||||
|
|
|
@ -1,21 +0,0 @@
|
|||
libraries += libstorec
|
||||
|
||||
libstorec_NAME = libnixstorec
|
||||
|
||||
libstorec_DIR := $(d)
|
||||
|
||||
libstorec_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libstorec_LIBS = libutil libstore libutilc
|
||||
|
||||
libstorec_LDFLAGS += $(THREAD_LDFLAGS)
|
||||
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libstorec := -I $(d)
|
||||
libstorec_CXXFLAGS += $(INCLUDE_libutil) $(INCLUDE_libutilc) \
|
||||
$(INCLUDE_libstore) $(INCLUDE_libstorec)
|
||||
|
||||
$(eval $(call install-file-in, $(d)/nix-store-c.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
libstorec_FORCE_INSTALL := 1
|
|
@ -67,6 +67,7 @@ headers = [config_h] + files(
|
|||
headers += files('nix_api_store_internal.h')
|
||||
|
||||
subdir('build-utils-meson/export-all-symbols')
|
||||
subdir('build-utils-meson/windows-version')
|
||||
|
||||
this_library = library(
|
||||
'nixstorec',
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Store - C API
|
||||
Version: @PACKAGE_VERSION@
|
||||
Libs: -L${libdir} -lnixstorec -lnixutilc
|
||||
Cflags: -I${includedir}/nix
|
|
@ -166,7 +166,7 @@ nix_store_get_version(nix_c_context * context, Store * store, nix_get_string_cal
|
|||
*
|
||||
* @param[out] context Optional, stores error information
|
||||
* @param[in] srcStore nix source store reference
|
||||
* @param[in] srcStore nix destination store reference
|
||||
* @param[in] dstStore nix destination store reference
|
||||
* @param[in] path Path to copy
|
||||
*/
|
||||
nix_err nix_store_copy_closure(nix_c_context * context, Store * srcStore, Store * dstStore, StorePath * path);
|
||||
|
|
|
@ -1,21 +0,0 @@
|
|||
libraries += libstore-test-support
|
||||
|
||||
libstore-test-support_NAME = libnixstore-test-support
|
||||
|
||||
libstore-test-support_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libstore-test-support_INSTALL_DIR := $(checklibdir)
|
||||
else
|
||||
libstore-test-support_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libstore-test-support_SOURCES := $(wildcard $(d)/tests/*.cc)
|
||||
|
||||
libstore-test-support_CXXFLAGS += $(libstore-tests_EXTRA_INCLUDES)
|
||||
|
||||
libstore-test-support_LIBS = \
|
||||
libutil-test-support \
|
||||
libstore libutil
|
||||
|
||||
libstore-test-support_LDFLAGS := $(THREAD_LDFLAGS) -lrapidcheck
|
|
@ -58,6 +58,7 @@ headers = files(
|
|||
)
|
||||
|
||||
subdir('build-utils-meson/export-all-symbols')
|
||||
subdir('build-utils-meson/windows-version')
|
||||
|
||||
this_library = library(
|
||||
'nix-store-test-support',
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
namespace rc {
|
||||
using namespace nix;
|
||||
|
||||
Gen<DerivedPath::Opaque> Arbitrary<DerivedPath::Opaque>::arbitrary()
|
||||
Gen<SingleDerivedPath::Opaque> Arbitrary<SingleDerivedPath::Opaque>::arbitrary()
|
||||
{
|
||||
return gen::just(DerivedPath::Opaque {
|
||||
.path = *gen::arbitrary<StorePath>(),
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
check: libstore-tests_RUN
|
||||
|
||||
programs += libstore-tests
|
||||
|
||||
libstore-tests_NAME = libnixstore-tests
|
||||
|
||||
libstore-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data GTEST_OUTPUT=xml:$$testresults/libstore-tests.xml
|
||||
|
||||
libstore-tests_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libstore-tests_INSTALL_DIR := $(checkbindir)
|
||||
else
|
||||
libstore-tests_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libstore-tests_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libstore-tests_EXTRA_INCLUDES = \
|
||||
-I src/libstore-test-support \
|
||||
-I src/libutil-test-support \
|
||||
$(INCLUDE_libstore) \
|
||||
$(INCLUDE_libstorec) \
|
||||
$(INCLUDE_libutil) \
|
||||
$(INCLUDE_libutilc)
|
||||
|
||||
libstore-tests_CXXFLAGS += $(libstore-tests_EXTRA_INCLUDES)
|
||||
|
||||
libstore-tests_LIBS = \
|
||||
libstore-test-support libutil-test-support \
|
||||
libstore libstorec libutil libutilc
|
||||
|
||||
libstore-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS)
|
||||
|
||||
ifdef HOST_WINDOWS
|
||||
# Increase the default reserved stack size to 65 MB so Nix doesn't run out of space
|
||||
libstore-tests_LDFLAGS += -Wl,--stack,$(shell echo $$((65 * 1024 * 1024)))
|
||||
endif
|
|
@ -28,6 +28,7 @@ subdir('build-utils-meson/subprojects')
|
|||
subdir('build-utils-meson/threads')
|
||||
|
||||
subdir('build-utils-meson/export-all-symbols')
|
||||
subdir('build-utils-meson/windows-version')
|
||||
|
||||
sqlite = dependency('sqlite3', 'sqlite', version : '>=3.6.19')
|
||||
deps_private += sqlite
|
||||
|
|
|
@ -38,15 +38,19 @@ mkMesonExecutable (finalAttrs: {
|
|||
(fileset.fileFilter (file: file.hasExt "hh") ./.)
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
nix-store
|
||||
nix-store-c
|
||||
nix-store-test-support
|
||||
# Hack for sake of the dev shell
|
||||
passthru.externalBuildInputs = [
|
||||
sqlite
|
||||
rapidcheck
|
||||
gtest
|
||||
];
|
||||
|
||||
buildInputs = finalAttrs.passthru.externalBuildInputs ++ [
|
||||
nix-store
|
||||
nix-store-c
|
||||
nix-store-test-support
|
||||
];
|
||||
|
||||
preConfigure =
|
||||
# "Inline" .version so it's not a symlink, and includes the suffix.
|
||||
# Do the meson utils, without modification.
|
||||
|
|
|
@ -38,6 +38,8 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
"oh no \0\0\0 what was that!",
|
||||
}))
|
||||
|
||||
#ifndef DOXYGEN_SKIP
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
storePath,
|
||||
|
@ -84,6 +86,8 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
},
|
||||
}))
|
||||
|
||||
#endif
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
realisation,
|
||||
|
@ -459,21 +463,14 @@ TEST_F(ServeProtoTest, handshake_client_truncated_replay_throws)
|
|||
CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) {
|
||||
for (size_t len = 0; len < toClientLog.size(); ++len) {
|
||||
NullBufferedSink nullSink;
|
||||
StringSource in {
|
||||
// truncate
|
||||
toClientLog.substr(0, len)
|
||||
};
|
||||
auto substring = toClientLog.substr(0, len);
|
||||
StringSource in{substring};
|
||||
if (len < 8) {
|
||||
EXPECT_THROW(
|
||||
ServeProto::BasicClientConnection::handshake(
|
||||
nullSink, in, defaultVersion, "blah"),
|
||||
EndOfFile);
|
||||
ServeProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, "blah"), EndOfFile);
|
||||
} else {
|
||||
// Not sure why cannot keep on checking for `EndOfFile`.
|
||||
EXPECT_THROW(
|
||||
ServeProto::BasicClientConnection::handshake(
|
||||
nullSink, in, defaultVersion, "blah"),
|
||||
Error);
|
||||
EXPECT_THROW(ServeProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, "blah"), Error);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -39,6 +39,8 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
"oh no \0\0\0 what was that!",
|
||||
}))
|
||||
|
||||
#ifndef DOXYGEN_SKIP
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
storePath,
|
||||
|
@ -69,6 +71,8 @@ VERSIONED_CHARACTERIZATION_TEST(
|
|||
},
|
||||
}))
|
||||
|
||||
#endif
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
derivedPath_1_29,
|
||||
|
@ -725,21 +729,14 @@ TEST_F(WorkerProtoTest, handshake_client_truncated_replay_throws)
|
|||
CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) {
|
||||
for (size_t len = 0; len < toClientLog.size(); ++len) {
|
||||
NullBufferedSink nullSink;
|
||||
StringSource in {
|
||||
// truncate
|
||||
toClientLog.substr(0, len)
|
||||
};
|
||||
auto substring = toClientLog.substr(0, len);
|
||||
StringSource in{substring};
|
||||
if (len < 8) {
|
||||
EXPECT_THROW(
|
||||
WorkerProto::BasicClientConnection::handshake(
|
||||
nullSink, in, defaultVersion, {}),
|
||||
EndOfFile);
|
||||
WorkerProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, {}), EndOfFile);
|
||||
} else {
|
||||
// Not sure why cannot keep on checking for `EndOfFile`.
|
||||
EXPECT_THROW(
|
||||
WorkerProto::BasicClientConnection::handshake(
|
||||
nullSink, in, defaultVersion, {}),
|
||||
Error);
|
||||
EXPECT_THROW(WorkerProto::BasicClientConnection::handshake(nullSink, in, defaultVersion, {}), Error);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -991,7 +991,10 @@ Goal::Co DerivationGoal::buildDone()
|
|||
auto nixLogCommand = experimentalFeatureSettings.isEnabled(Xp::NixCommand)
|
||||
? "nix log"
|
||||
: "nix-store -l";
|
||||
msg += fmt("For full logs, run '" ANSI_BOLD "%s %s" ANSI_NORMAL "'.",
|
||||
// The command is on a separate line for easy copying, such as with triple click.
|
||||
// This message will be indented elsewhere, so removing the indentation before the
|
||||
// command will not put it at the start of the line unfortunately.
|
||||
msg += fmt("For full logs, run:\n " ANSI_BOLD "%s %s" ANSI_NORMAL,
|
||||
nixLogCommand,
|
||||
worker.store.printStorePath(drvPath));
|
||||
}
|
||||
|
|
|
@ -66,6 +66,7 @@ std::vector<KeyedBuildResult> Store::buildPathsWithResults(
|
|||
worker.run(goals);
|
||||
|
||||
std::vector<KeyedBuildResult> results;
|
||||
results.reserve(state.size());
|
||||
|
||||
for (auto & [req, goalPtr] : state)
|
||||
results.emplace_back(KeyedBuildResult {
|
||||
|
|
|
@ -107,7 +107,7 @@ protected:
|
|||
public:
|
||||
|
||||
/**
|
||||
* Suspend our goal and wait until we get @ref work()-ed again.
|
||||
* Suspend our goal and wait until we get `work`-ed again.
|
||||
* `co_await`-able by @ref Co.
|
||||
*/
|
||||
struct Suspend {};
|
||||
|
@ -192,7 +192,7 @@ public:
|
|||
|
||||
bool await_ready() { return false; };
|
||||
/**
|
||||
* When we `co_await` another @ref Co-returning coroutine,
|
||||
* When we `co_await` another `Co`-returning coroutine,
|
||||
* we tell the caller of `caller_coroutine.resume()` to switch to our coroutine (@ref handle).
|
||||
* To make sure we return to the original coroutine, we set it as the continuation of our
|
||||
* coroutine. In @ref promise_type::final_awaiter we check if it's set and if so we return to it.
|
||||
|
@ -208,7 +208,7 @@ public:
|
|||
};
|
||||
|
||||
/**
|
||||
* Used on initial suspend, does the same as @ref std::suspend_always,
|
||||
* Used on initial suspend, does the same as `std::suspend_always`,
|
||||
* but asserts that everything has been set correctly.
|
||||
*/
|
||||
struct InitialSuspend {
|
||||
|
@ -269,8 +269,8 @@ public:
|
|||
};
|
||||
|
||||
/**
|
||||
* Called by compiler generated code to construct the @ref Co
|
||||
* that is returned from a @ref Co-returning coroutine.
|
||||
* Called by compiler generated code to construct the `Co`
|
||||
* that is returned from a `Co`-returning coroutine.
|
||||
*/
|
||||
Co get_return_object();
|
||||
|
||||
|
|
|
@ -208,7 +208,7 @@ public:
|
|||
const OutputsSpec & wantedOutputs, BuildMode buildMode = bmNormal);
|
||||
|
||||
/**
|
||||
* @ref SubstitutionGoal "substitution goal"
|
||||
* @ref PathSubstitutionGoal "substitution goal"
|
||||
*/
|
||||
std::shared_ptr<PathSubstitutionGoal> makePathSubstitutionGoal(const StorePath & storePath, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
||||
std::shared_ptr<DrvOutputSubstitutionGoal> makeDrvOutputSubstitutionGoal(const DrvOutput & id, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
||||
|
|
|
@ -97,8 +97,9 @@ struct ContentAddressMethod
|
|||
* were ingested, with the fixed output case not prefixed for back
|
||||
* compat.
|
||||
*
|
||||
* @param [in] m A string that should begin with the prefix.
|
||||
* @param [out] m The remainder of the string after the prefix.
|
||||
* @param m A string that should begin with the
|
||||
* prefix. On return, the remainder of the string after the
|
||||
* prefix.
|
||||
*/
|
||||
static ContentAddressMethod parsePrefix(std::string_view & m);
|
||||
|
||||
|
@ -139,14 +140,14 @@ struct ContentAddressMethod
|
|||
/**
|
||||
* We've accumulated several types of content-addressed paths over the
|
||||
* years; fixed-output derivations support multiple hash algorithms and
|
||||
* serialisation methods (flat file vs NAR). Thus, ‘ca’ has one of the
|
||||
* serialisation methods (flat file vs NAR). Thus, `ca` has one of the
|
||||
* following forms:
|
||||
*
|
||||
* - `TextIngestionMethod`:
|
||||
* ‘text:sha256:<sha256 hash of file contents>’
|
||||
* `text:sha256:<sha256 hash of file contents>`
|
||||
*
|
||||
* - `FixedIngestionMethod`:
|
||||
* ‘fixed:<r?>:<hash algorithm>:<hash of file contents>’
|
||||
* `fixed:<r?>:<hash algorithm>:<hash of file contents>`
|
||||
*/
|
||||
struct ContentAddress
|
||||
{
|
||||
|
|
|
@ -1017,29 +1017,31 @@ std::string hashPlaceholder(const OutputNameView outputName)
|
|||
return "/" + hashString(HashAlgorithm::SHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Nix32, false);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites)
|
||||
void BasicDerivation::applyRewrites(const StringMap & rewrites)
|
||||
{
|
||||
debug("Rewriting the derivation");
|
||||
if (rewrites.empty()) return;
|
||||
|
||||
for (auto & rewrite : rewrites) {
|
||||
debug("rewriting the derivation");
|
||||
|
||||
for (auto & rewrite : rewrites)
|
||||
debug("rewriting %s as %s", rewrite.first, rewrite.second);
|
||||
}
|
||||
|
||||
drv.builder = rewriteStrings(drv.builder, rewrites);
|
||||
for (auto & arg : drv.args) {
|
||||
builder = rewriteStrings(builder, rewrites);
|
||||
for (auto & arg : args)
|
||||
arg = rewriteStrings(arg, rewrites);
|
||||
}
|
||||
|
||||
StringPairs newEnv;
|
||||
for (auto & envVar : drv.env) {
|
||||
for (auto & envVar : env) {
|
||||
auto envName = rewriteStrings(envVar.first, rewrites);
|
||||
auto envValue = rewriteStrings(envVar.second, rewrites);
|
||||
newEnv.emplace(envName, envValue);
|
||||
}
|
||||
drv.env = newEnv;
|
||||
env = std::move(newEnv);
|
||||
}
|
||||
|
||||
static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites)
|
||||
{
|
||||
drv.applyRewrites(rewrites);
|
||||
|
||||
auto hashModulo = hashDerivationModulo(store, Derivation(drv), true);
|
||||
for (auto & [outputName, output] : drv.outputs) {
|
||||
|
|
|
@ -298,6 +298,10 @@ struct BasicDerivation
|
|||
std::string name;
|
||||
|
||||
BasicDerivation() = default;
|
||||
BasicDerivation(BasicDerivation &&) = default;
|
||||
BasicDerivation(const BasicDerivation &) = default;
|
||||
BasicDerivation& operator=(BasicDerivation &&) = default;
|
||||
BasicDerivation& operator=(const BasicDerivation &) = default;
|
||||
virtual ~BasicDerivation() { };
|
||||
|
||||
bool isBuiltin() const;
|
||||
|
@ -321,6 +325,12 @@ struct BasicDerivation
|
|||
|
||||
static std::string_view nameFromPath(const StorePath & storePath);
|
||||
|
||||
/**
|
||||
* Apply string rewrites to the `env`, `args` and `builder`
|
||||
* fields.
|
||||
*/
|
||||
void applyRewrites(const StringMap & rewrites);
|
||||
|
||||
bool operator == (const BasicDerivation &) const = default;
|
||||
// TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet.
|
||||
//auto operator <=> (const BasicDerivation &) const = default;
|
||||
|
|
|
@ -153,7 +153,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
template<class T>
|
||||
void fail(T && e)
|
||||
{
|
||||
failEx(std::make_exception_ptr(std::move(e)));
|
||||
failEx(std::make_exception_ptr(std::forward<T>(e)));
|
||||
}
|
||||
|
||||
LambdaSink finalSink;
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
#include "finally.hh"
|
||||
#include "unix-domain-socket.hh"
|
||||
#include "signals.hh"
|
||||
#include "posix-fs-canonicalise.hh"
|
||||
|
||||
#if !defined(__linux__)
|
||||
// For shelling out to lsof
|
||||
|
@ -763,13 +764,18 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (auto & path : topoSortPaths(visited)) {
|
||||
if (!dead.insert(path).second) continue;
|
||||
if (shouldDelete) {
|
||||
invalidatePathChecked(path);
|
||||
deleteFromStore(path.to_string());
|
||||
referrersCache.erase(path);
|
||||
try {
|
||||
invalidatePathChecked(path);
|
||||
deleteFromStore(path.to_string());
|
||||
referrersCache.erase(path);
|
||||
} catch (PathInUse &e) {
|
||||
// If we end up here, it's likely a new occurence
|
||||
// of https://github.com/NixOS/nix/issues/11923
|
||||
printError("BUG: %s", e.what());
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file Reusable serialisers for serialization container types in a
|
||||
* @file
|
||||
*
|
||||
* Reusable serialisers for serialization container types in a
|
||||
* length-prefixed manner.
|
||||
*
|
||||
* Used by both the Worker and Serve protocols.
|
||||
|
@ -28,25 +30,22 @@ struct StoreDirConfig;
|
|||
template<class Inner, typename T>
|
||||
struct LengthPrefixedProtoHelper;
|
||||
|
||||
/*!
|
||||
* \typedef LengthPrefixedProtoHelper::S
|
||||
*
|
||||
* Read this as simply `using S = Inner::Serialise;`.
|
||||
*
|
||||
* It would be nice to use that directly, but C++ doesn't seem to allow
|
||||
* it. The `typename` keyword needed to refer to `Inner` seems to greedy
|
||||
* (low precedence), and then C++ complains that `Serialise` is not a
|
||||
* type parameter but a real type.
|
||||
*
|
||||
* Making this `S` alias seems to be the only way to avoid these issues.
|
||||
*/
|
||||
|
||||
#define LENGTH_PREFIXED_PROTO_HELPER(Inner, T) \
|
||||
struct LengthPrefixedProtoHelper< Inner, T > \
|
||||
{ \
|
||||
static T read(const StoreDirConfig & store, typename Inner::ReadConn conn); \
|
||||
static void write(const StoreDirConfig & store, typename Inner::WriteConn conn, const T & str); \
|
||||
private: \
|
||||
/*! \
|
||||
* Read this as simply `using S = Inner::Serialise;`. \
|
||||
* \
|
||||
* It would be nice to use that directly, but C++ doesn't seem to allow \
|
||||
* it. The `typename` keyword needed to refer to `Inner` seems to greedy \
|
||||
* (low precedence), and then C++ complains that `Serialise` is not a \
|
||||
* type parameter but a real type. \
|
||||
* \
|
||||
* Making this `S` alias seems to be the only way to avoid these issues. \
|
||||
*/ \
|
||||
template<typename U> using S = typename Inner::template Serialise<U>; \
|
||||
}
|
||||
|
||||
|
@ -60,9 +59,8 @@ template<class Inner, typename... Ts>
|
|||
LENGTH_PREFIXED_PROTO_HELPER(Inner, std::tuple<Ts...>);
|
||||
|
||||
template<class Inner, typename K, typename V>
|
||||
#define _X std::map<K, V>
|
||||
LENGTH_PREFIXED_PROTO_HELPER(Inner, _X);
|
||||
#undef _X
|
||||
#define LENGTH_PREFIXED_PROTO_HELPER_X std::map<K, V>
|
||||
LENGTH_PREFIXED_PROTO_HELPER(Inner, LENGTH_PREFIXED_PROTO_HELPER_X);
|
||||
|
||||
template<class Inner, typename T>
|
||||
std::vector<T>
|
||||
|
|
|
@ -1,103 +0,0 @@
|
|||
libraries += libstore
|
||||
|
||||
libstore_NAME = libnixstore
|
||||
|
||||
libstore_DIR := $(d)
|
||||
|
||||
libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc)
|
||||
ifdef HOST_UNIX
|
||||
libstore_SOURCES += $(wildcard $(d)/unix/*.cc $(d)/unix/build/*.cc)
|
||||
endif
|
||||
ifdef HOST_LINUX
|
||||
libstore_SOURCES += $(wildcard $(d)/linux/*.cc)
|
||||
endif
|
||||
ifdef HOST_WINDOWS
|
||||
libstore_SOURCES += $(wildcard $(d)/windows/*.cc)
|
||||
endif
|
||||
|
||||
libstore_LIBS = libutil
|
||||
|
||||
libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(THREAD_LDFLAGS)
|
||||
ifdef HOST_LINUX
|
||||
libstore_LDFLAGS += -ldl
|
||||
endif
|
||||
ifdef HOST_WINDOWS
|
||||
libstore_LDFLAGS += -lws2_32
|
||||
endif
|
||||
|
||||
$(foreach file,$(libstore_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/sandbox)))
|
||||
|
||||
ifeq ($(ENABLE_S3), 1)
|
||||
libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core -laws-crt-cpp
|
||||
endif
|
||||
|
||||
ifdef HOST_SOLARIS
|
||||
libstore_LDFLAGS += -lsocket
|
||||
endif
|
||||
|
||||
ifeq ($(HAVE_SECCOMP), 1)
|
||||
libstore_LDFLAGS += $(LIBSECCOMP_LIBS)
|
||||
endif
|
||||
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libstore := -I $(d) -I $(d)/build
|
||||
ifdef HOST_UNIX
|
||||
INCLUDE_libstore += -I $(d)/unix -I $(d)/unix/build
|
||||
endif
|
||||
ifdef HOST_LINUX
|
||||
INCLUDE_libstore += -I $(d)/linux
|
||||
endif
|
||||
ifdef HOST_WINDOWS
|
||||
INCLUDE_libstore += -I $(d)/windows
|
||||
endif
|
||||
|
||||
ifdef HOST_WINDOWS
|
||||
NIX_ROOT = N:\\\\
|
||||
else
|
||||
NIX_ROOT =
|
||||
endif
|
||||
|
||||
# Prefix all but `NIX_STORE_DIR`, since we aren't doing a local store
|
||||
# yet so a "logical" store dir that is the same as unix is preferred.
|
||||
#
|
||||
# Also, it keeps the unit tests working.
|
||||
|
||||
libstore_CXXFLAGS += \
|
||||
$(INCLUDE_libutil) $(INCLUDE_libstore) $(INCLUDE_libstore) \
|
||||
-DNIX_PREFIX=\"$(NIX_ROOT)$(prefix)\" \
|
||||
-DNIX_STORE_DIR=\"$(storedir)\" \
|
||||
-DNIX_DATA_DIR=\"$(NIX_ROOT)$(datadir)\" \
|
||||
-DNIX_STATE_DIR=\"$(NIX_ROOT)$(localstatedir)/nix\" \
|
||||
-DNIX_LOG_DIR=\"$(NIX_ROOT)$(localstatedir)/log/nix\" \
|
||||
-DNIX_CONF_DIR=\"$(NIX_ROOT)$(sysconfdir)/nix\" \
|
||||
-DNIX_MAN_DIR=\"$(NIX_ROOT)$(mandir)\" \
|
||||
-DLSOF=\"$(NIX_ROOT)$(lsof)\"
|
||||
|
||||
ifeq ($(embedded_sandbox_shell),yes)
|
||||
libstore_CXXFLAGS += -DSANDBOX_SHELL=\"__embedded_sandbox_shell__\"
|
||||
|
||||
$(d)/unix/build/local-derivation-goal.cc: $(d)/unix/embedded-sandbox-shell.gen.hh
|
||||
|
||||
$(d)/unix/embedded-sandbox-shell.gen.hh: $(sandbox_shell)
|
||||
$(trace-gen) hexdump -v -e '1/1 "0x%x," "\n"' < $< > $@.tmp
|
||||
@mv $@.tmp $@
|
||||
else
|
||||
ifneq ($(sandbox_shell),)
|
||||
libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\""
|
||||
endif
|
||||
endif
|
||||
|
||||
$(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
|
||||
|
||||
$(d)/unix/build.cc:
|
||||
|
||||
clean-files += $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-store.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
$(foreach i, $(wildcard src/libstore/builtins/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/builtins, 0644)))
|
||||
|
||||
$(foreach i, $(wildcard src/libstore/build/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/build, 0644)))
|
|
@ -33,7 +33,7 @@ Machine::Machine(
|
|||
systemTypes(systemTypes),
|
||||
sshKey(sshKey),
|
||||
maxJobs(maxJobs),
|
||||
speedFactor(speedFactor == 0.0f ? 1.0f : std::move(speedFactor)),
|
||||
speedFactor(speedFactor == 0.0f ? 1.0f : speedFactor),
|
||||
supportedFeatures(supportedFeatures),
|
||||
mandatoryFeatures(mandatoryFeatures),
|
||||
sshPublicHostKey(sshPublicHostKey)
|
||||
|
|
|
@ -32,12 +32,12 @@ struct Machine {
|
|||
|
||||
/**
|
||||
* @return Whether `features` is a subset of the union of `supportedFeatures` and
|
||||
* `mandatoryFeatures`
|
||||
* `mandatoryFeatures`.
|
||||
*/
|
||||
bool allSupported(const std::set<std::string> & features) const;
|
||||
|
||||
/**
|
||||
* @return @Whether `mandatoryFeatures` is a subset of `features`
|
||||
* @return Whether `mandatoryFeatures` is a subset of `features`.
|
||||
*/
|
||||
bool mandatoryMet(const std::set<std::string> & features) const;
|
||||
|
||||
|
|
|
@ -34,6 +34,8 @@ subdir('build-utils-meson/subprojects')
|
|||
run_command('ln', '-s',
|
||||
meson.project_build_root() / '__nothing_link_target',
|
||||
meson.project_build_root() / '__nothing_symlink',
|
||||
# native doesn't allow dangling symlinks, which the tests require
|
||||
env : { 'MSYS' : 'winsymlinks:lnk' },
|
||||
check : true,
|
||||
)
|
||||
can_link_symlink = run_command('ln',
|
||||
|
@ -74,6 +76,12 @@ if host_machine.system() == 'darwin'
|
|||
deps_other += [sandbox]
|
||||
endif
|
||||
|
||||
if host_machine.system() == 'windows'
|
||||
wsock32 = cxx.find_library('wsock32')
|
||||
deps_other += [wsock32]
|
||||
endif
|
||||
|
||||
subdir('build-utils-meson/libatomic')
|
||||
subdir('build-utils-meson/threads')
|
||||
|
||||
boost = dependency(
|
||||
|
@ -410,6 +418,7 @@ foreach name, value : cpp_str_defines
|
|||
endforeach
|
||||
|
||||
subdir('build-utils-meson/export-all-symbols')
|
||||
subdir('build-utils-meson/windows-version')
|
||||
|
||||
this_library = library(
|
||||
'nixstore',
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Package Manager
|
||||
Version: @PACKAGE_VERSION@
|
||||
Requires: nix-util
|
||||
Libs: -L${libdir} -lnixstore
|
||||
Cflags: -I${includedir}/nix -std=c++2a
|
|
@ -153,7 +153,10 @@ namespace nlohmann {
|
|||
|
||||
using namespace nix;
|
||||
|
||||
OutputsSpec adl_serializer<OutputsSpec>::from_json(const json & json) {
|
||||
#ifndef DOXYGEN_SKIP
|
||||
|
||||
OutputsSpec adl_serializer<OutputsSpec>::from_json(const json & json)
|
||||
{
|
||||
auto names = json.get<StringSet>();
|
||||
if (names == StringSet({"*"}))
|
||||
return OutputsSpec::All {};
|
||||
|
@ -161,7 +164,8 @@ OutputsSpec adl_serializer<OutputsSpec>::from_json(const json & json) {
|
|||
return OutputsSpec::Names { std::move(names) };
|
||||
}
|
||||
|
||||
void adl_serializer<OutputsSpec>::to_json(json & json, OutputsSpec t) {
|
||||
void adl_serializer<OutputsSpec>::to_json(json & json, OutputsSpec t)
|
||||
{
|
||||
std::visit(overloaded {
|
||||
[&](const OutputsSpec::All &) {
|
||||
json = std::vector<std::string>({"*"});
|
||||
|
@ -172,8 +176,8 @@ void adl_serializer<OutputsSpec>::to_json(json & json, OutputsSpec t) {
|
|||
}, t.raw);
|
||||
}
|
||||
|
||||
|
||||
ExtendedOutputsSpec adl_serializer<ExtendedOutputsSpec>::from_json(const json & json) {
|
||||
ExtendedOutputsSpec adl_serializer<ExtendedOutputsSpec>::from_json(const json & json)
|
||||
{
|
||||
if (json.is_null())
|
||||
return ExtendedOutputsSpec::Default {};
|
||||
else {
|
||||
|
@ -181,7 +185,8 @@ ExtendedOutputsSpec adl_serializer<ExtendedOutputsSpec>::from_json(const json &
|
|||
}
|
||||
}
|
||||
|
||||
void adl_serializer<ExtendedOutputsSpec>::to_json(json & json, ExtendedOutputsSpec t) {
|
||||
void adl_serializer<ExtendedOutputsSpec>::to_json(json & json, ExtendedOutputsSpec t)
|
||||
{
|
||||
std::visit(overloaded {
|
||||
[&](const ExtendedOutputsSpec::Default &) {
|
||||
json = nullptr;
|
||||
|
@ -192,4 +197,6 @@ void adl_serializer<ExtendedOutputsSpec>::to_json(json & json, ExtendedOutputsSp
|
|||
}, t.raw);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
}
|
||||
|
|
|
@ -176,17 +176,18 @@ struct ValidPathInfo : UnkeyedValidPathInfo {
|
|||
*/
|
||||
Strings shortRefs() const;
|
||||
|
||||
ValidPathInfo(const ValidPathInfo & other) = default;
|
||||
|
||||
ValidPathInfo(StorePath && path, UnkeyedValidPathInfo info) : UnkeyedValidPathInfo(info), path(std::move(path)) { };
|
||||
ValidPathInfo(const StorePath & path, UnkeyedValidPathInfo info) : UnkeyedValidPathInfo(info), path(path) { };
|
||||
|
||||
ValidPathInfo(const Store & store,
|
||||
std::string_view name, ContentAddressWithReferences && ca, Hash narHash);
|
||||
|
||||
virtual ~ValidPathInfo() { }
|
||||
};
|
||||
|
||||
static_assert(std::is_move_assignable_v<ValidPathInfo>);
|
||||
static_assert(std::is_copy_assignable_v<ValidPathInfo>);
|
||||
static_assert(std::is_copy_constructible_v<ValidPathInfo>);
|
||||
static_assert(std::is_move_constructible_v<ValidPathInfo>);
|
||||
|
||||
using ValidPathInfos = std::map<StorePath, ValidPathInfo>;
|
||||
|
||||
}
|
||||
|
|
|
@ -37,6 +37,7 @@ DerivedPath StorePathWithOutputs::toDerivedPath() const
|
|||
std::vector<DerivedPath> toDerivedPaths(const std::vector<StorePathWithOutputs> ss)
|
||||
{
|
||||
std::vector<DerivedPath> reqs;
|
||||
reqs.reserve(ss.size());
|
||||
for (auto & s : ss) reqs.push_back(s.toDerivedPath());
|
||||
return reqs;
|
||||
}
|
||||
|
|
|
@ -81,7 +81,7 @@ typedef std::set<StorePath> StorePathSet;
|
|||
typedef std::vector<StorePath> StorePaths;
|
||||
|
||||
/**
|
||||
* The file extension of \ref Derivation derivations when serialized
|
||||
* The file extension of \ref nix::Derivation derivations when serialized
|
||||
* into store objects.
|
||||
*/
|
||||
constexpr std::string_view drvExtension = ".drv";
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file Implementation of Profiles.
|
||||
* @file
|
||||
*
|
||||
* Implementation of Profiles.
|
||||
*
|
||||
* See the manual for additional information.
|
||||
*/
|
||||
|
|
|
@ -40,7 +40,7 @@ struct RemoteStore::ConnectionHandle
|
|||
: handle(std::move(handle))
|
||||
{ }
|
||||
|
||||
ConnectionHandle(ConnectionHandle && h)
|
||||
ConnectionHandle(ConnectionHandle && h) noexcept
|
||||
: handle(std::move(h.handle))
|
||||
{ }
|
||||
|
||||
|
|
|
@ -29,11 +29,10 @@ SERVE_USE_LENGTH_PREFIX_SERIALISER(template<typename T>, std::vector<T>)
|
|||
SERVE_USE_LENGTH_PREFIX_SERIALISER(template<typename T>, std::set<T>)
|
||||
SERVE_USE_LENGTH_PREFIX_SERIALISER(template<typename... Ts>, std::tuple<Ts...>)
|
||||
|
||||
#define COMMA_ ,
|
||||
#define SERVE_USE_LENGTH_PREFIX_SERIALISER_COMMA ,
|
||||
SERVE_USE_LENGTH_PREFIX_SERIALISER(
|
||||
template<typename K COMMA_ typename V>,
|
||||
std::map<K COMMA_ V>)
|
||||
#undef COMMA_
|
||||
template<typename K SERVE_USE_LENGTH_PREFIX_SERIALISER_COMMA typename V>,
|
||||
std::map<K SERVE_USE_LENGTH_PREFIX_SERIALISER_COMMA V>)
|
||||
|
||||
/**
|
||||
* Use `CommonProto` where possible.
|
||||
|
|
|
@ -42,7 +42,8 @@ struct SQLite
|
|||
SQLite(const Path & path, SQLiteOpenMode mode = SQLiteOpenMode::Normal);
|
||||
SQLite(const SQLite & from) = delete;
|
||||
SQLite& operator = (const SQLite & from) = delete;
|
||||
SQLite& operator = (SQLite && from) { db = from.db; from.db = 0; return *this; }
|
||||
// NOTE: This is noexcept since we are only copying and assigning raw pointers.
|
||||
SQLite& operator = (SQLite && from) noexcept { db = from.db; from.db = 0; return *this; }
|
||||
~SQLite();
|
||||
operator sqlite3 * () { return db; }
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ public:
|
|||
/**
|
||||
* @param command The command (arg vector) to execute.
|
||||
*
|
||||
* @param extraSShArgs Extra args to pass to SSH (not the command to
|
||||
* @param extraSshArgs Extra arguments to pass to SSH (not the command to
|
||||
* execute). Will not be used when "fake SSHing" to the local
|
||||
* machine.
|
||||
*/
|
||||
|
|
|
@ -260,11 +260,11 @@ public:
|
|||
|
||||
/**
|
||||
* Query the set of all valid paths. Note that for some store
|
||||
* backends, the name part of store paths may be replaced by 'x'
|
||||
* (i.e. you'll get /nix/store/<hash>-x rather than
|
||||
* /nix/store/<hash>-<name>). Use queryPathInfo() to obtain the
|
||||
* backends, the name part of store paths may be replaced by `x`
|
||||
* (i.e. you'll get `/nix/store/<hash>-x` rather than
|
||||
* `/nix/store/<hash>-<name>`). Use queryPathInfo() to obtain the
|
||||
* full store path. FIXME: should return a set of
|
||||
* std::variant<StorePath, HashPart> to get rid of this hack.
|
||||
* `std::variant<StorePath, HashPart>` to get rid of this hack.
|
||||
*/
|
||||
virtual StorePathSet queryAllValidPaths()
|
||||
{ unsupported("queryAllValidPaths"); }
|
||||
|
|
|
@ -59,20 +59,20 @@ struct StoreDirConfig : public Config
|
|||
std::string showPaths(const StorePathSet & paths);
|
||||
|
||||
/**
|
||||
* @return true if ‘path’ is in the Nix store (but not the Nix
|
||||
* @return true if *path* is in the Nix store (but not the Nix
|
||||
* store itself).
|
||||
*/
|
||||
bool isInStore(PathView path) const;
|
||||
|
||||
/**
|
||||
* @return true if ‘path’ is a store path, i.e. a direct child of the
|
||||
* @return true if *path* is a store path, i.e. a direct child of the
|
||||
* Nix store.
|
||||
*/
|
||||
bool isStorePath(std::string_view path) const;
|
||||
|
||||
/**
|
||||
* Split a path like /nix/store/<hash>-<name>/<bla> into
|
||||
* /nix/store/<hash>-<name> and /<bla>.
|
||||
* Split a path like `/nix/store/<hash>-<name>/<bla>` into
|
||||
* `/nix/store/<hash>-<name>` and `/<bla>`.
|
||||
*/
|
||||
std::pair<StorePath, Path> toStorePath(PathView path) const;
|
||||
|
||||
|
|
|
@ -13,31 +13,31 @@ namespace nix {
|
|||
*
|
||||
* Supported values are:
|
||||
*
|
||||
* - ‘local’: The Nix store in /nix/store and database in
|
||||
* - `local`: The Nix store in /nix/store and database in
|
||||
* /nix/var/nix/db, accessed directly.
|
||||
*
|
||||
* - ‘daemon’: The Nix store accessed via a Unix domain socket
|
||||
* - `daemon`: The Nix store accessed via a Unix domain socket
|
||||
* connection to nix-daemon.
|
||||
*
|
||||
* - ‘unix://<path>’: The Nix store accessed via a Unix domain socket
|
||||
* connection to nix-daemon, with the socket located at <path>.
|
||||
* - `unix://<path>`: The Nix store accessed via a Unix domain socket
|
||||
* connection to nix-daemon, with the socket located at `<path>`.
|
||||
*
|
||||
* - ‘auto’ or ‘’: Equivalent to ‘local’ or ‘daemon’ depending on
|
||||
* - `auto` or ``: Equivalent to `local` or `daemon` depending on
|
||||
* whether the user has write access to the local Nix
|
||||
* store/database.
|
||||
*
|
||||
* - ‘file://<path>’: A binary cache stored in <path>.
|
||||
* - `file://<path>`: A binary cache stored in `<path>`.
|
||||
*
|
||||
* - ‘https://<path>’: A binary cache accessed via HTTP.
|
||||
* - `https://<path>`: A binary cache accessed via HTTP.
|
||||
*
|
||||
* - ‘s3://<path>’: A writable binary cache stored on Amazon's Simple
|
||||
* - `s3://<path>`: A writable binary cache stored on Amazon's Simple
|
||||
* Storage Service.
|
||||
*
|
||||
* - ‘ssh://[user@]<host>’: A remote Nix store accessed by running
|
||||
* ‘nix-store --serve’ via SSH.
|
||||
* - `ssh://[user@]<host>`: A remote Nix store accessed by running
|
||||
* `nix-store --serve` via SSH.
|
||||
*
|
||||
* You can pass parameters to the store type by appending
|
||||
* ‘?key=value&key=value&...’ to the URI.
|
||||
* `?key=value&key=value&...` to the URI.
|
||||
*/
|
||||
struct StoreReference
|
||||
{
|
||||
|
|
|
@ -78,7 +78,7 @@ struct WorkerProto::BasicClientConnection : WorkerProto::BasicConnection
|
|||
/**
|
||||
* Establishes connection, negotiating version.
|
||||
*
|
||||
* @return the minimum version supported by both sides and the set
|
||||
* @return The minimum version supported by both sides and the set
|
||||
* of protocol features supported by both sides.
|
||||
*
|
||||
* @param to Taken by reference to allow for various error handling
|
||||
|
@ -87,9 +87,9 @@ struct WorkerProto::BasicClientConnection : WorkerProto::BasicConnection
|
|||
* @param from Taken by reference to allow for various error
|
||||
* handling mechanisms.
|
||||
*
|
||||
* @param localVersion Our version which is sent over
|
||||
* @param localVersion Our version which is sent over.
|
||||
*
|
||||
* @param features The protocol features that we support
|
||||
* @param supportedFeatures The protocol features that we support.
|
||||
*/
|
||||
// FIXME: this should probably be a constructor.
|
||||
static std::tuple<Version, std::set<Feature>> handshake(
|
||||
|
@ -141,7 +141,7 @@ struct WorkerProto::BasicServerConnection : WorkerProto::BasicConnection
|
|||
/**
|
||||
* Establishes connection, negotiating version.
|
||||
*
|
||||
* @return the version provided by the other side of the
|
||||
* @return The version provided by the other side of the
|
||||
* connection.
|
||||
*
|
||||
* @param to Taken by reference to allow for various error handling
|
||||
|
@ -150,9 +150,9 @@ struct WorkerProto::BasicServerConnection : WorkerProto::BasicConnection
|
|||
* @param from Taken by reference to allow for various error
|
||||
* handling mechanisms.
|
||||
*
|
||||
* @param localVersion Our version which is sent over
|
||||
* @param localVersion Our version which is sent over.
|
||||
*
|
||||
* @param features The protocol features that we support
|
||||
* @param supportedFeatures The protocol features that we support.
|
||||
*/
|
||||
// FIXME: this should probably be a constructor.
|
||||
static std::tuple<Version, std::set<Feature>> handshake(
|
||||
|
|
|
@ -29,11 +29,10 @@ WORKER_USE_LENGTH_PREFIX_SERIALISER(template<typename T>, std::vector<T>)
|
|||
WORKER_USE_LENGTH_PREFIX_SERIALISER(template<typename T>, std::set<T>)
|
||||
WORKER_USE_LENGTH_PREFIX_SERIALISER(template<typename... Ts>, std::tuple<Ts...>)
|
||||
|
||||
#define COMMA_ ,
|
||||
#define WORKER_USE_LENGTH_PREFIX_SERIALISER_COMMA ,
|
||||
WORKER_USE_LENGTH_PREFIX_SERIALISER(
|
||||
template<typename K COMMA_ typename V>,
|
||||
std::map<K COMMA_ V>)
|
||||
#undef COMMA_
|
||||
template<typename K WORKER_USE_LENGTH_PREFIX_SERIALISER_COMMA typename V>,
|
||||
std::map<K WORKER_USE_LENGTH_PREFIX_SERIALISER_COMMA V>)
|
||||
|
||||
/**
|
||||
* Use `CommonProto` where possible.
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
libraries += libutilc
|
||||
|
||||
libutilc_NAME = libnixutilc
|
||||
|
||||
libutilc_DIR := $(d)
|
||||
|
||||
libutilc_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libutilc := -I $(d)
|
||||
libutilc_CXXFLAGS += $(INCLUDE_libutil) $(INCLUDE_libutilc)
|
||||
|
||||
libutilc_LIBS = libutil
|
||||
|
||||
libutilc_LDFLAGS += $(THREAD_LDFLAGS)
|
||||
|
||||
libutilc_FORCE_INSTALL := 1
|
|
@ -63,6 +63,7 @@ headers = [config_h] + files(
|
|||
headers += files('nix_api_util_internal.h')
|
||||
|
||||
subdir('build-utils-meson/export-all-symbols')
|
||||
subdir('build-utils-meson/windows-version')
|
||||
|
||||
this_library = library(
|
||||
'nixutilc',
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue