1
0
Fork 0
mirror of https://github.com/NixOS/nix synced 2025-06-30 07:33:16 +02:00

Merge remote-tracking branch 'origin/master' into tarball-cache

This commit is contained in:
Eelco Dolstra 2023-12-14 13:31:29 +01:00
commit 1fce12ec32
364 changed files with 5262 additions and 3101 deletions

View file

@ -80,7 +80,7 @@ SingleDerivedPath SingleBuiltPath::discardOutputPath() const
);
}
nlohmann::json BuiltPath::Built::toJSON(const Store & store) const
nlohmann::json BuiltPath::Built::toJSON(const StoreDirConfig & store) const
{
nlohmann::json res;
res["drvPath"] = drvPath->toJSON(store);
@ -90,7 +90,7 @@ nlohmann::json BuiltPath::Built::toJSON(const Store & store) const
return res;
}
nlohmann::json SingleBuiltPath::Built::toJSON(const Store & store) const
nlohmann::json SingleBuiltPath::Built::toJSON(const StoreDirConfig & store) const
{
nlohmann::json res;
res["drvPath"] = drvPath->toJSON(store);
@ -100,14 +100,14 @@ nlohmann::json SingleBuiltPath::Built::toJSON(const Store & store) const
return res;
}
nlohmann::json SingleBuiltPath::toJSON(const Store & store) const
nlohmann::json SingleBuiltPath::toJSON(const StoreDirConfig & store) const
{
return std::visit([&](const auto & buildable) {
return buildable.toJSON(store);
}, raw());
}
nlohmann::json BuiltPath::toJSON(const Store & store) const
nlohmann::json BuiltPath::toJSON(const StoreDirConfig & store) const
{
return std::visit([&](const auto & buildable) {
return buildable.toJSON(store);

View file

@ -14,9 +14,9 @@ struct SingleBuiltPathBuilt {
SingleDerivedPathBuilt discardOutputPath() const;
std::string to_string(const Store & store) const;
static SingleBuiltPathBuilt parse(const Store & store, std::string_view, std::string_view);
nlohmann::json toJSON(const Store & store) const;
std::string to_string(const StoreDirConfig & store) const;
static SingleBuiltPathBuilt parse(const StoreDirConfig & store, std::string_view, std::string_view);
nlohmann::json toJSON(const StoreDirConfig & store) const;
DECLARE_CMP(SingleBuiltPathBuilt);
};
@ -41,8 +41,8 @@ struct SingleBuiltPath : _SingleBuiltPathRaw {
SingleDerivedPath discardOutputPath() const;
static SingleBuiltPath parse(const Store & store, std::string_view);
nlohmann::json toJSON(const Store & store) const;
static SingleBuiltPath parse(const StoreDirConfig & store, std::string_view);
nlohmann::json toJSON(const StoreDirConfig & store) const;
};
static inline ref<SingleBuiltPath> staticDrv(StorePath drvPath)
@ -59,9 +59,9 @@ struct BuiltPathBuilt {
ref<SingleBuiltPath> drvPath;
std::map<std::string, StorePath> outputs;
std::string to_string(const Store & store) const;
static BuiltPathBuilt parse(const Store & store, std::string_view, std::string_view);
nlohmann::json toJSON(const Store & store) const;
std::string to_string(const StoreDirConfig & store) const;
static BuiltPathBuilt parse(const StoreDirConfig & store, std::string_view, std::string_view);
nlohmann::json toJSON(const StoreDirConfig & store) const;
DECLARE_CMP(BuiltPathBuilt);
};
@ -89,7 +89,7 @@ struct BuiltPath : _BuiltPathRaw {
StorePathSet outPaths() const;
RealisedPath::Set toRealisedPaths(Store & store) const;
nlohmann::json toJSON(const Store & store) const;
nlohmann::json toJSON(const StoreDirConfig & store) const;
};
typedef std::vector<BuiltPath> BuiltPaths;

View file

@ -1,4 +1,5 @@
#include "command.hh"
#include "markdown.hh"
#include "store-api.hh"
#include "local-fs-store.hh"
#include "derivations.hh"
@ -34,6 +35,19 @@ nlohmann::json NixMultiCommand::toJSON()
return MultiCommand::toJSON();
}
void NixMultiCommand::run()
{
if (!command) {
std::set<std::string> subCommandTextLines;
for (auto & [name, _] : commands)
subCommandTextLines.insert(fmt("- `%s`", name));
std::string markdownError = fmt("`nix %s` requires a sub-command. Available sub-commands:\n\n%s\n",
commandName, concatStringsSep("\n", subCommandTextLines));
throw UsageError(renderMarkdownToTerminal(markdownError));
}
command->second->run();
}
StoreCommand::StoreCommand()
{
}

View file

@ -26,9 +26,13 @@ static constexpr Command::Category catNixInstallation = 102;
static constexpr auto installablesCategory = "Options that change the interpretation of [installables](@docroot@/command-ref/new-cli/nix.md#installables)";
struct NixMultiCommand : virtual MultiCommand, virtual Command
struct NixMultiCommand : MultiCommand, virtual Command
{
nlohmann::json toJSON() override;
using MultiCommand::MultiCommand;
virtual void run() override;
};
// For the overloaded run methods

View file

@ -141,7 +141,7 @@ MixEvalArgs::MixEvalArgs()
.longName = "eval-store",
.description =
R"(
The [URL of the Nix store](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format)
The [URL of the Nix store](@docroot@/store/types/index.md#store-url-format)
to use for evaluation, i.e. to store derivations (`.drv` files) and inputs referenced by them.
)",
.category = category,

View file

@ -260,9 +260,10 @@ void SourceExprCommand::completeInstallable(AddCompletions & completions, std::s
evalSettings.pureEval = false;
auto state = getEvalState();
Expr *e = state->parseExprFromFile(
resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file)))
);
auto e =
state->parseExprFromFile(
resolveExprPath(
lookupFileArg(*state, *file)));
Value root;
state->eval(e, root);

View file

@ -12,4 +12,4 @@ libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) -pthread
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
$(eval $(call install-file-in, $(d)/nix-cmd.pc, $(libdir)/pkgconfig, 0644))
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-cmd.pc, $(libdir)/pkgconfig, 0644))

View file

@ -43,7 +43,6 @@ extern "C" {
#include "finally.hh"
#include "markdown.hh"
#include "local-fs-store.hh"
#include "progress-bar.hh"
#include "print.hh"
#if HAVE_BOEHMGC
@ -262,13 +261,11 @@ void NixRepl::mainLoop()
rl_set_list_possib_func(listPossibleCallback);
#endif
/* Stop the progress bar because it interferes with the display of
the repl. */
stopProgressBar();
std::string input;
while (true) {
// Hide the progress bar while waiting for user input, so that it won't interfere.
logger->pause();
// When continuing input from previous lines, don't print a prompt, just align to the same
// number of chars as the prompt.
if (!getLine(input, input.empty() ? "nix-repl> " : " ")) {
@ -278,6 +275,7 @@ void NixRepl::mainLoop()
logger->cout("");
break;
}
logger->resume();
try {
if (!removeWhitespace(input).empty() && !processLine(input)) return;
} catch (ParseError & e) {

View file

@ -22,7 +22,7 @@ struct AttrDb
{
std::atomic_bool failed{false};
const Store & cfg;
const StoreDirConfig & cfg;
struct State
{
@ -39,7 +39,7 @@ struct AttrDb
SymbolTable & symbols;
AttrDb(
const Store & cfg,
const StoreDirConfig & cfg,
const Hash & fingerprint,
SymbolTable & symbols)
: cfg(cfg)
@ -323,7 +323,7 @@ struct AttrDb
};
static std::shared_ptr<AttrDb> makeAttrDb(
const Store & cfg,
const StoreDirConfig & cfg,
const Hash & fingerprint,
SymbolTable & symbols)
{

View file

@ -103,8 +103,10 @@ void EvalState::forceValue(Value & v, Callable getPos)
throw;
}
}
else if (v.isApp())
callFunction(*v.app.left, *v.app.right, v, noPos);
else if (v.isApp()) {
PosIdx pos = getPos();
callFunction(*v.app.left, *v.app.right, v, pos);
}
else if (v.isBlackhole())
error("infinite recursion encountered").atPos(getPos()).template debugThrow<EvalError>();
}
@ -121,9 +123,9 @@ template <typename Callable>
[[gnu::always_inline]]
inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view errorCtx)
{
forceValue(v, noPos);
PosIdx pos = getPos();
forceValue(v, pos);
if (v.type() != nAttrs) {
PosIdx pos = getPos();
error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
}
}
@ -132,7 +134,7 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e
[[gnu::always_inline]]
inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view errorCtx)
{
forceValue(v, noPos);
forceValue(v, pos);
if (!v.isList()) {
error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
}

View file

@ -68,6 +68,11 @@ struct EvalSettings : Config
evaluation mode. For example, when set to
`https://github.com/NixOS`, builtin functions such as `fetchGit` are
allowed to access `https://github.com/NixOS/patchelf.git`.
Access is granted when
- the URI is equal to the prefix,
- or the URI is a subpath of the prefix,
- or the prefix is a URI scheme ended by a colon `:` and the URI has the same scheme.
)"};
Setting<bool> traceFunctionCalls{this, false, "trace-function-calls",

View file

@ -14,8 +14,11 @@
#include "profiles.hh"
#include "print.hh"
#include "fs-input-accessor.hh"
#include "filtering-input-accessor.hh"
#include "memory-input-accessor.hh"
#include "signals.hh"
#include "gc-small-vector.hh"
#include "url.hh"
#include <algorithm>
#include <chrono>
@ -31,6 +34,7 @@
#include <sys/resource.h>
#include <nlohmann/json.hpp>
#include <boost/container/small_vector.hpp>
#if HAVE_BOEHMGC
@ -342,7 +346,7 @@ static Symbol getName(const AttrName & name, EvalState & state, Env & env)
} else {
Value nameValue;
name.expr->eval(state, env, nameValue);
state.forceStringNoCtx(nameValue, noPos, "while evaluating an attribute name");
state.forceStringNoCtx(nameValue, name.expr->getPos(), "while evaluating an attribute name");
return state.symbols.create(nameValue.string_view());
}
}
@ -507,7 +511,16 @@ EvalState::EvalState(
, sOutputSpecified(symbols.create("outputSpecified"))
, repair(NoRepair)
, emptyBindings(0)
, rootFS(makeFSInputAccessor(CanonPath::root))
, rootFS(
evalSettings.restrictEval || evalSettings.pureEval
? ref<InputAccessor>(AllowListInputAccessor::create(makeFSInputAccessor(CanonPath::root), {},
[](const CanonPath & path) -> RestrictedPathError {
auto modeInformation = evalSettings.pureEval
? "in pure evaluation mode (use '--impure' to override)"
: "in restricted mode";
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation);
}))
: makeFSInputAccessor(CanonPath::root))
, corepkgsFS(makeMemoryInputAccessor())
, internalFS(makeMemoryInputAccessor())
, derivationInternal{corepkgsFS->addFile(
@ -549,28 +562,10 @@ EvalState::EvalState(
searchPath.elements.emplace_back(SearchPath::Elem::parse(i));
}
if (evalSettings.restrictEval || evalSettings.pureEval) {
allowedPaths = PathSet();
for (auto & i : searchPath.elements) {
auto r = resolveSearchPathPath(i.path);
if (!r) continue;
auto path = std::move(*r);
if (store->isInStore(path)) {
try {
StorePathSet closure;
store->computeFSClosure(store->toStorePath(path).first, closure);
for (auto & path : closure)
allowPath(path);
} catch (InvalidPath &) {
allowPath(path);
}
} else
allowPath(path);
}
}
/* Allow access to all paths in the search path. */
if (rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
for (auto & i : searchPath.elements)
resolveSearchPathPath(i.path, true);
corepkgsFS->addFile(
CanonPath("fetchurl.nix"),
@ -588,14 +583,14 @@ EvalState::~EvalState()
void EvalState::allowPath(const Path & path)
{
if (allowedPaths)
allowedPaths->insert(path);
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
rootFS2->allowPath(CanonPath(path));
}
void EvalState::allowPath(const StorePath & storePath)
{
if (allowedPaths)
allowedPaths->insert(store->toRealPath(storePath));
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
rootFS2->allowPath(CanonPath(store->toRealPath(storePath)));
}
void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value & v)
@ -605,79 +600,57 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value &
mkStorePathString(storePath, v);
}
SourcePath EvalState::checkSourcePath(const SourcePath & path_)
inline static bool isJustSchemePrefix(std::string_view prefix)
{
// Don't check non-rootFS accessors, they're in a different namespace.
if (path_.accessor != ref<InputAccessor>(rootFS)) return path_;
if (!allowedPaths) return path_;
auto i = resolvedPaths.find(path_.path.abs());
if (i != resolvedPaths.end())
return i->second;
bool found = false;
/* First canonicalize the path without symlinks, so we make sure an
* attacker can't append ../../... to a path that would be in allowedPaths
* and thus leak symlink targets.
*/
Path abspath = canonPath(path_.path.abs());
for (auto & i : *allowedPaths) {
if (isDirOrInDir(abspath, i)) {
found = true;
break;
}
}
if (!found) {
auto modeInformation = evalSettings.pureEval
? "in pure eval mode (use '--impure' to override)"
: "in restricted mode";
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", abspath, modeInformation);
}
/* Resolve symlinks. */
debug("checking access to '%s'", abspath);
SourcePath path = rootPath(CanonPath(canonPath(abspath, true)));
for (auto & i : *allowedPaths) {
if (isDirOrInDir(path.path.abs(), i)) {
resolvedPaths.insert_or_assign(path_.path.abs(), path);
return path;
}
}
throw RestrictedPathError("access to canonical path '%1%' is forbidden in restricted mode", path);
return
!prefix.empty()
&& prefix[prefix.size() - 1] == ':'
&& isValidSchemeName(prefix.substr(0, prefix.size() - 1));
}
bool isAllowedURI(std::string_view uri, const Strings & allowedUris)
{
/* 'uri' should be equal to a prefix, or in a subdirectory of a
prefix. Thus, the prefix https://github.co does not permit
access to https://github.com. */
for (auto & prefix : allowedUris) {
if (uri == prefix
// Allow access to subdirectories of the prefix.
|| (uri.size() > prefix.size()
&& prefix.size() > 0
&& hasPrefix(uri, prefix)
&& (
// Allow access to subdirectories of the prefix.
prefix[prefix.size() - 1] == '/'
|| uri[prefix.size()] == '/'
// Allow access to whole schemes
|| isJustSchemePrefix(prefix)
)
))
return true;
}
return false;
}
void EvalState::checkURI(const std::string & uri)
{
if (!evalSettings.restrictEval) return;
/* 'uri' should be equal to a prefix, or in a subdirectory of a
prefix. Thus, the prefix https://github.co does not permit
access to https://github.com. Note: this allows 'http://' and
'https://' as prefixes for any http/https URI. */
for (auto & prefix : evalSettings.allowedUris.get())
if (uri == prefix ||
(uri.size() > prefix.size()
&& prefix.size() > 0
&& hasPrefix(uri, prefix)
&& (prefix[prefix.size() - 1] == '/' || uri[prefix.size()] == '/')))
return;
if (isAllowedURI(uri, evalSettings.allowedUris.get())) return;
/* If the URI is a path, then check it against allowedPaths as
well. */
if (hasPrefix(uri, "/")) {
checkSourcePath(rootPath(CanonPath(uri)));
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
rootFS2->checkAccess(CanonPath(uri));
return;
}
if (hasPrefix(uri, "file://")) {
checkSourcePath(rootPath(CanonPath(std::string(uri, 7))));
if (auto rootFS2 = rootFS.dynamic_pointer_cast<AllowListInputAccessor>())
rootFS2->checkAccess(CanonPath(uri.substr(7)));
return;
}
@ -1179,10 +1152,8 @@ Value * ExprPath::maybeThunk(EvalState & state, Env & env)
}
void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial)
void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial)
{
auto path = checkSourcePath(path_);
FileEvalCache::iterator i;
if ((i = fileEvalCache.find(path)) != fileEvalCache.end()) {
v = i->second;
@ -1203,7 +1174,7 @@ void EvalState::evalFile(const SourcePath & path_, Value & v, bool mustBeTrivial
e = j->second;
if (!e)
e = parseExprFromFile(checkSourcePath(resolvedPath));
e = parseExprFromFile(resolvedPath);
fileParseCache[resolvedPath] = e;
@ -1512,7 +1483,7 @@ void ExprOpHasAttr::eval(EvalState & state, Env & env, Value & v)
e->eval(state, env, vTmp);
for (auto & i : attrPath) {
state.forceValue(*vAttrs, noPos);
state.forceValue(*vAttrs, getPos());
Bindings::iterator j;
auto name = getName(i, state, env);
if (vAttrs->type() != nAttrs ||
@ -1681,7 +1652,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
if (countCalls) primOpCalls[name]++;
try {
vCur.primOp->fun(*this, noPos, args, vCur);
vCur.primOp->fun(*this, vCur.determinePos(noPos), args, vCur);
} catch (Error & e) {
addErrorTrace(e, pos, "while calling the '%1%' builtin", name);
throw;
@ -1712,7 +1683,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
/* We have all the arguments, so call the primop with
the previous and new arguments. */
Value * vArgs[arity];
Value * vArgs[maxPrimOpArity];
auto n = argsDone;
for (Value * arg = &vCur; arg->isPrimOpApp(); arg = arg->primOpApp.left)
vArgs[--n] = arg->primOpApp.right;
@ -1729,7 +1700,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
// 1. Unify this and above code. Heavily redundant.
// 2. Create a fake env (arg1, arg2, etc.) and a fake expr (arg1: arg2: etc: builtins.name arg1 arg2 etc)
// so the debugger allows to inspect the wrong parameters passed to the builtin.
primOp->primOp->fun(*this, noPos, vArgs, vCur);
primOp->primOp->fun(*this, vCur.determinePos(noPos), vArgs, vCur);
} catch (Error & e) {
addErrorTrace(e, pos, "while calling the '%1%' builtin", name);
throw;
@ -1775,11 +1746,11 @@ void ExprCall::eval(EvalState & state, Env & env, Value & v)
// 4: about 60
// 5: under 10
// This excluded attrset lambdas (`{...}:`). Contributions of mixed lambdas appears insignificant at ~150 total.
Value * vArgs[args.size()];
SmallValueVector<4> vArgs(args.size());
for (size_t i = 0; i < args.size(); ++i)
vArgs[i] = args[i]->maybeThunk(state, env);
state.callFunction(vFun, args.size(), vArgs, v, pos);
state.callFunction(vFun, args.size(), vArgs.data(), v, pos);
}
@ -1837,7 +1808,7 @@ https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbo
}
}
callFunction(fun, allocValue()->mkAttrs(attrs), res, noPos);
callFunction(fun, allocValue()->mkAttrs(attrs), res, pos);
}
@ -1873,7 +1844,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v)
void ExprOpNot::eval(EvalState & state, Env & env, Value & v)
{
v.mkBool(!state.evalBool(env, e, noPos, "in the argument of the not operator")); // XXX: FIXME: !
v.mkBool(!state.evalBool(env, e, getPos(), "in the argument of the not operator")); // XXX: FIXME: !
}
@ -2018,8 +1989,9 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
return result;
};
Value values[es->size()];
Value * vTmpP = values;
// List of returned strings. References to these Values must NOT be persisted.
SmallTemporaryValueVector<conservativeStackReservation> values(es->size());
Value * vTmpP = values.data();
for (auto & [i_pos, i] : *es) {
Value & vTmp = *vTmpP++;
@ -2313,7 +2285,7 @@ BackedStringView EvalState::coerceToString(
std::string result;
for (auto [n, v2] : enumerate(v.listItems())) {
try {
result += *coerceToString(noPos, *v2, context,
result += *coerceToString(pos, *v2, context,
"while evaluating one element of the list",
coerceMore, copyToStore, canonicalizePath);
} catch (Error & e) {
@ -2460,8 +2432,8 @@ SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value &
bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_view errorCtx)
{
forceValue(v1, noPos);
forceValue(v2, noPos);
forceValue(v1, pos);
forceValue(v2, pos);
/* !!! Hack to support some old broken code that relies on pointer
equality tests between sets. (Specifically, builderDefs calls

View file

@ -30,7 +30,6 @@ class EvalState;
class StorePath;
struct SingleDerivedPath;
enum RepairFlag : bool;
struct FSInputAccessor;
struct MemoryInputAccessor;
@ -217,18 +216,12 @@ public:
*/
RepairFlag repair;
/**
* The allowed filesystem paths in restricted or pure evaluation
* mode.
*/
std::optional<PathSet> allowedPaths;
Bindings emptyBindings;
/**
* The accessor for the root filesystem.
*/
const ref<FSInputAccessor> rootFS;
const ref<InputAccessor> rootFS;
/**
* The in-memory filesystem for <nix/...> paths.
@ -342,11 +335,6 @@ private:
std::map<std::string, std::optional<std::string>> searchPathResolved;
/**
* Cache used by checkSourcePath().
*/
std::unordered_map<Path, SourcePath> resolvedPaths;
/**
* Cache used by prim_match().
*/
@ -396,12 +384,6 @@ public:
*/
void allowAndSetStorePathString(const StorePath & storePath, Value & v);
/**
* Check whether access to a path is allowed and throw an error if
* not. Otherwise return the canonicalised path.
*/
SourcePath checkSourcePath(const SourcePath & path);
void checkURI(const std::string & uri);
/**
@ -445,13 +427,15 @@ public:
SourcePath findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
/**
* Try to resolve a search path value (not the optional key part)
* Try to resolve a search path value (not the optional key part).
*
* If the specified search path element is a URI, download it.
*
* If it is not found, return `std::nullopt`
*/
std::optional<std::string> resolveSearchPathPath(const SearchPath::Path & path);
std::optional<std::string> resolveSearchPathPath(
const SearchPath::Path & elem,
bool initAccessControl = false);
/**
* Evaluate an expression to normal form
@ -756,6 +740,13 @@ public:
*/
[[nodiscard]] StringMap realiseContext(const NixStringContext & context);
/* Call the binary path filter predicate used builtins.path etc. */
bool callPathFilter(
Value * filterFun,
const SourcePath & path,
std::string_view pathArg,
PosIdx pos);
private:
/**
@ -841,6 +832,11 @@ std::string showType(const Value & v);
*/
SourcePath resolveExprPath(SourcePath path);
/**
* Whether a URI is allowed, assuming restrictEval is enabled
*/
bool isAllowedURI(std::string_view uri, const Strings & allowedPaths);
struct InvalidPathError : EvalError
{
Path path;

View file

@ -904,7 +904,7 @@ Fingerprint LockedFlake::getFingerprint() const
// FIXME: as an optimization, if the flake contains a lock file
// and we haven't changed it, then it's sufficient to use
// flake.sourceInfo.storePath for the fingerprint.
return hashString(htSHA256,
return hashString(HashAlgorithm::SHA256,
fmt("%s;%s;%d;%d;%s",
flake.storePath.to_string(),
flake.lockedRef.subdir,

View file

@ -90,7 +90,7 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
fragment = percentDecode(url.substr(fragmentStart+1));
}
if (pathEnd != std::string::npos && fragmentStart != std::string::npos) {
query = decodeQuery(url.substr(pathEnd+1, fragmentStart));
query = decodeQuery(url.substr(pathEnd+1, fragmentStart-pathEnd-1));
}
if (baseDir) {

View file

@ -0,0 +1,42 @@
#pragma once
#include <boost/container/small_vector.hpp>
#if HAVE_BOEHMGC
#include <gc/gc.h>
#include <gc/gc_cpp.h>
#include <gc/gc_allocator.h>
#endif
namespace nix {
struct Value;
/**
* A GC compatible vector that may used a reserved portion of `nItems` on the stack instead of allocating on the heap.
*/
#if HAVE_BOEHMGC
template <typename T, size_t nItems>
using SmallVector = boost::container::small_vector<T, nItems, traceable_allocator<T>>;
#else
template <typename T, size_t nItems>
using SmallVector = boost::container::small_vector<T, nItems>;
#endif
/**
* A vector of value pointers. See `SmallVector`.
*/
template <size_t nItems>
using SmallValueVector = SmallVector<Value *, nItems>;
/**
* A vector of values that must not be referenced after the vector is destroyed.
*
* See also `SmallValueVector`.
*/
template <size_t nItems>
using SmallTemporaryValueVector = SmallVector<Value, nItems>;
}

View file

@ -16,7 +16,7 @@ libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/lib
libexpr_LIBS = libutil libstore libfetchers
libexpr_LDFLAGS += -lboost_context -lboost_regex -pthread
libexpr_LDFLAGS += -lboost_context -pthread
ifdef HOST_LINUX
libexpr_LDFLAGS += -ldl
endif
@ -36,7 +36,7 @@ $(d)/lexer-tab.cc $(d)/lexer-tab.hh: $(d)/lexer.l
clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
$(foreach i, $(wildcard src/libexpr/value/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/value, 0644)))
@ -47,4 +47,4 @@ $(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh
$(d)/eval.cc: $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh $(d)/flake/call-flake.nix.gen.hh
src/libexpr/primops/fromTOML.o: ERROR_SWITCH_ENUM =
$(buildprefix)src/libexpr/primops/fromTOML.o: ERROR_SWITCH_ENUM =

View file

@ -405,6 +405,7 @@ struct ExprOpNot : Expr
{
Expr * e;
ExprOpNot(Expr * e) : e(e) { };
PosIdx getPos() const override { return e->getPos(); }
COMMON_METHODS
};

View file

@ -692,16 +692,17 @@ SourcePath resolveExprPath(SourcePath path)
/* If `path' is a symlink, follow it. This is so that relative
path references work. */
while (true) {
while (!path.path.isRoot()) {
// Basic cycle/depth limit to avoid infinite loops.
if (++followCount >= maxFollow)
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
if (path.lstat().type != InputAccessor::tSymlink) break;
path = {path.accessor, CanonPath(path.readLink(), path.path.parent().value_or(CanonPath::root))};
auto p = path.parent().resolveSymlinks() + path.baseName();
if (p.lstat().type != InputAccessor::tSymlink) break;
path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))};
}
/* If `path' refers to a directory, append `/default.nix'. */
if (path.lstat().type == InputAccessor::tDirectory)
if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory)
return path + "default.nix";
return path;
@ -716,7 +717,7 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path)
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
{
auto buffer = path.readFile();
auto buffer = path.resolveSymlinks().readFile();
// readFile hopefully have left some extra space for terminators
buffer.append("\0\0", 2);
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
@ -783,7 +784,7 @@ SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_
}
std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0)
std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl)
{
auto & value = value0.s;
auto i = searchPathResolved.find(value);
@ -800,7 +801,6 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
logWarning({
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
});
res = std::nullopt;
}
}
@ -814,6 +814,20 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
else {
auto path = absPath(value);
/* Allow access to paths in the search path. */
if (initAccessControl) {
allowPath(path);
if (store->isInStore(path)) {
try {
StorePathSet closure;
store->computeFSClosure(store->toStorePath(path).first, closure);
for (auto & p : closure)
allowPath(p);
} catch (InvalidPath &) { }
}
}
if (pathExists(path))
res = { path };
else {
@ -829,7 +843,7 @@ std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Pa
else
debug("failed to resolve search path element '%s'", value);
searchPathResolved[value] = res;
searchPathResolved.emplace(value, res);
return res;
}

View file

@ -4,6 +4,7 @@
#include "eval-inline.hh"
#include "eval.hh"
#include "eval-settings.hh"
#include "gc-small-vector.hh"
#include "globals.hh"
#include "json-to-value.hh"
#include "names.hh"
@ -14,9 +15,9 @@
#include "value-to-json.hh"
#include "value-to-xml.hh"
#include "primops.hh"
#include "fs-input-accessor.hh"
#include <boost/container/small_vector.hpp>
#include <boost/regex.hpp>
#include <nlohmann/json.hpp>
#include <sys/types.h>
@ -25,6 +26,7 @@
#include <algorithm>
#include <cstring>
#include <regex>
#include <dlfcn.h>
#include <cmath>
@ -89,9 +91,8 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
for (auto & [outputName, outputPath] : outputs) {
/* Add the output of this derivations to the allowed
paths. */
if (allowedPaths) {
allowPath(outputPath);
}
allowPath(store->toRealPath(outputPath));
/* Get all the output paths corresponding to the placeholders we had */
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
res.insert_or_assign(
@ -109,27 +110,19 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
return res;
}
struct RealisePathFlags {
// Whether to check that the path is allowed in pure eval mode
bool checkForPureEval = true;
};
static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, const RealisePathFlags flags = {})
static SourcePath realisePath(EvalState & state, const PosIdx pos, Value & v, bool resolveSymlinks = true)
{
NixStringContext context;
auto path = state.coerceToPath(noPos, v, context, "while realising the context of a path");
try {
if (!context.empty()) {
if (!context.empty() && path.accessor == state.rootFS) {
auto rewrites = state.realiseContext(context);
auto realPath = state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context);
return {path.accessor, CanonPath(realPath)};
path = {path.accessor, CanonPath(realPath)};
}
return flags.checkForPureEval
? state.checkSourcePath(path)
: path;
return resolveSymlinks ? path.resolveSymlinks() : path;
} catch (Error & e) {
e.addTrace(state.positions[pos], "while realising the context of path '%s'", path);
throw;
@ -169,7 +162,7 @@ static void mkOutputString(
argument. */
static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * vScope, Value & v)
{
auto path = realisePath(state, pos, vPath);
auto path = realisePath(state, pos, vPath, false);
auto path2 = path.path.abs();
// FIXME
@ -1316,7 +1309,7 @@ drvName, Bindings * attrs, Value & v)
.errPos = state.positions[noPos]
}));
auto h = newHashAllowEmpty(*outputHash, parseHashTypeOpt(outputHashAlgo));
auto h = newHashAllowEmpty(*outputHash, parseHashAlgoOpt(outputHashAlgo));
auto method = ingestionMethod.value_or(FileIngestionMethod::Flat);
@ -1338,7 +1331,7 @@ drvName, Bindings * attrs, Value & v)
.errPos = state.positions[noPos]
});
auto ht = parseHashTypeOpt(outputHashAlgo).value_or(htSHA256);
auto ha = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256);
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive);
for (auto & i : outputs) {
@ -1347,13 +1340,13 @@ drvName, Bindings * attrs, Value & v)
drv.outputs.insert_or_assign(i,
DerivationOutput::Impure {
.method = method,
.hashType = ht,
.hashAlgo = ha,
});
else
drv.outputs.insert_or_assign(i,
DerivationOutput::CAFloating {
.method = method,
.hashType = ht,
.hashAlgo = ha,
});
}
}
@ -1492,7 +1485,7 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args,
}));
NixStringContext context;
auto path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'")).path;
auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'").path;
/* Resolve symlinks in path, unless path itself is a symlink
directly in the store. The latter condition is necessary so
e.g. nix-push does the right thing. */
@ -1532,29 +1525,19 @@ static RegisterPrimOp primop_storePath({
static void prim_pathExists(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
auto & arg = *args[0];
/* We dont check the path right now, because we dont want to
throw if the path isnt allowed, but just return false (and we
cant just catch the exception here because we still want to
throw if something in the evaluation of `arg` tries to
access an unauthorized path). */
auto path = realisePath(state, pos, arg, { .checkForPureEval = false });
/* SourcePath doesn't know about trailing slash. */
auto mustBeDir = arg.type() == nString
&& (arg.string_view().ends_with("/")
|| arg.string_view().ends_with("/."));
try {
auto checked = state.checkSourcePath(path);
auto st = checked.maybeLstat();
auto & arg = *args[0];
auto path = realisePath(state, pos, arg);
/* SourcePath doesn't know about trailing slash. */
auto mustBeDir = arg.type() == nString
&& (arg.string_view().ends_with("/")
|| arg.string_view().ends_with("/."));
auto st = path.maybeLstat();
auto exists = st && (!mustBeDir || st->type == SourceAccessor::tDirectory);
v.mkBool(exists);
} catch (SysError & e) {
/* Don't give away info from errors while canonicalising
path in restricted mode. */
v.mkBool(false);
} catch (RestrictedPathError & e) {
v.mkBool(false);
}
@ -1698,7 +1681,7 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
auto path = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.findFile");
v.mkPath(state.checkSourcePath(state.findFile(searchPath, path, pos)));
v.mkPath(state.findFile(searchPath, path, pos));
}
static RegisterPrimOp primop_findFile(PrimOp {
@ -1753,17 +1736,17 @@ static RegisterPrimOp primop_findFile(PrimOp {
/* Return the cryptographic hash of a file in base-16. */
static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile");
std::optional<HashType> ht = parseHashType(type);
if (!ht)
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile");
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
if (!ha)
state.debugThrowLastTrace(Error({
.msg = hintfmt("unknown hash type '%1%'", type),
.msg = hintfmt("unknown hash algo '%1%'", algo),
.errPos = state.positions[pos]
}));
auto path = realisePath(state, pos, *args[1]);
v.mkString(hashString(*ht, path.readFile()).to_string(HashFormat::Base16, false));
v.mkString(hashString(*ha, path.readFile()).to_string(HashFormat::Base16, false));
}
static RegisterPrimOp primop_hashFile({
@ -1788,7 +1771,7 @@ static std::string_view fileTypeToString(InputAccessor::Type type)
static void prim_readFileType(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
auto path = realisePath(state, pos, *args[0]);
auto path = realisePath(state, pos, *args[0], false);
/* Retrieve the directory entry type and stringize it. */
v.mkString(fileTypeToString(path.lstat().type));
}
@ -2177,11 +2160,35 @@ static RegisterPrimOp primop_toFile({
.fun = prim_toFile,
});
bool EvalState::callPathFilter(
Value * filterFun,
const SourcePath & path,
std::string_view pathArg,
PosIdx pos)
{
auto st = path.lstat();
/* Call the filter function. The first argument is the path, the
second is a string indicating the type of the file. */
Value arg1;
arg1.mkString(pathArg);
Value arg2;
// assert that type is not "unknown"
arg2.mkString(fileTypeToString(st.type));
Value * args []{&arg1, &arg2};
Value res;
callFunction(*filterFun, 2, args, res, pos);
return forceBool(res, pos, "while evaluating the return value of the path filter function");
}
static void addPath(
EvalState & state,
const PosIdx pos,
std::string_view name,
Path path,
SourcePath path,
Value * filterFun,
FileIngestionMethod method,
const std::optional<Hash> expectedHash,
@ -2189,48 +2196,29 @@ static void addPath(
const NixStringContext & context)
{
try {
// FIXME: handle CA derivation outputs (where path needs to
// be rewritten to the actual output).
auto rewrites = state.realiseContext(context);
path = state.toRealPath(rewriteStrings(path, rewrites), context);
StorePathSet refs;
if (state.store->isInStore(path)) {
if (path.accessor == state.rootFS && state.store->isInStore(path.path.abs())) {
// FIXME: handle CA derivation outputs (where path needs to
// be rewritten to the actual output).
auto rewrites = state.realiseContext(context);
path = {state.rootFS, CanonPath(state.toRealPath(rewriteStrings(path.path.abs(), rewrites), context))};
try {
auto [storePath, subPath] = state.store->toStorePath(path);
auto [storePath, subPath] = state.store->toStorePath(path.path.abs());
// FIXME: we should scanForReferences on the path before adding it
refs = state.store->queryPathInfo(storePath)->references;
path = state.store->toRealPath(storePath) + subPath;
path = {state.rootFS, CanonPath(state.store->toRealPath(storePath) + subPath)};
} catch (Error &) { // FIXME: should be InvalidPathError
}
}
path = evalSettings.pureEval && expectedHash
? path
: state.checkSourcePath(state.rootPath(CanonPath(path))).path.abs();
PathFilter filter = filterFun ? ([&](const Path & path) {
auto st = lstat(path);
/* Call the filter function. The first argument is the path,
the second is a string indicating the type of the file. */
Value arg1;
arg1.mkString(path);
Value arg2;
arg2.mkString(
S_ISREG(st.st_mode) ? "regular" :
S_ISDIR(st.st_mode) ? "directory" :
S_ISLNK(st.st_mode) ? "symlink" :
"unknown" /* not supported, will fail! */);
Value * args []{&arg1, &arg2};
Value res;
state.callFunction(*filterFun, 2, args, res, pos);
return state.forceBool(res, pos, "while evaluating the return value of the path filter function");
}) : defaultPathFilter;
std::unique_ptr<PathFilter> filter;
if (filterFun)
filter = std::make_unique<PathFilter>([&](const Path & p) {
auto p2 = CanonPath(p);
return state.callPathFilter(filterFun, {path.accessor, p2}, p2.abs(), pos);
});
std::optional<StorePath> expectedStorePath;
if (expectedHash)
@ -2241,7 +2229,7 @@ static void addPath(
});
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
auto dstPath = state.rootPath(CanonPath(path)).fetchToStore(state.store, name, method, &filter, state.repair);
auto dstPath = path.fetchToStore(state.store, name, method, filter.get(), state.repair);
if (expectedHash && expectedStorePath != dstPath)
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
state.allowAndSetStorePathString(dstPath, v);
@ -2260,7 +2248,8 @@ static void prim_filterSource(EvalState & state, const PosIdx pos, Value * * arg
auto path = state.coerceToPath(pos, *args[1], context,
"while evaluating the second argument (the path to filter) passed to 'builtins.filterSource'");
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filterSource");
addPath(state, pos, path.baseName(), path.path.abs(), args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
addPath(state, pos, path.baseName(), path, args[0], FileIngestionMethod::Recursive, std::nullopt, v, context);
}
static RegisterPrimOp primop_filterSource({
@ -2340,7 +2329,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
else if (n == "recursive")
method = FileIngestionMethod { state.forceBool(*attr.value, attr.pos, "while evaluating the `recursive` attribute passed to builtins.path") };
else if (n == "sha256")
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), htSHA256);
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256);
else
state.debugThrowLastTrace(EvalError({
.msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]),
@ -2355,7 +2344,7 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
if (name.empty())
name = path->baseName();
addPath(state, pos, name, path->path.abs(), filterFun, method, expectedHash, v, context);
addPath(state, pos, name, *path, filterFun, method, expectedHash, v, context);
}
static RegisterPrimOp primop_path({
@ -2729,7 +2718,7 @@ static void prim_catAttrs(EvalState & state, const PosIdx pos, Value * * args, V
auto attrName = state.symbols.create(state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.catAttrs"));
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.catAttrs");
Value * res[args[1]->listSize()];
SmallValueVector<nonRecursiveStackReservation> res(args[1]->listSize());
size_t found = 0;
for (auto v2 : args[1]->listItems()) {
@ -3064,8 +3053,7 @@ static void prim_filter(EvalState & state, const PosIdx pos, Value * * args, Val
state.forceFunction(*args[0], pos, "while evaluating the first argument passed to builtins.filter");
// FIXME: putting this on the stack is risky.
Value * vs[args[1]->listSize()];
SmallValueVector<nonRecursiveStackReservation> vs(args[1]->listSize());
size_t k = 0;
bool same = true;
@ -3461,7 +3449,8 @@ static void prim_concatMap(EvalState & state, const PosIdx pos, Value * * args,
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.concatMap");
auto nrLists = args[1]->listSize();
Value lists[nrLists];
// List of returned lists before concatenation. References to these Values must NOT be persisted.
SmallTemporaryValueVector<conservativeStackReservation> lists(nrLists);
size_t len = 0;
for (unsigned int n = 0; n < nrLists; ++n) {
@ -3765,18 +3754,18 @@ static RegisterPrimOp primop_stringLength({
/* Return the cryptographic hash of a string in base-16. */
static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
auto type = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString");
std::optional<HashType> ht = parseHashType(type);
if (!ht)
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString");
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
if (!ha)
state.debugThrowLastTrace(Error({
.msg = hintfmt("unknown hash type '%1%'", type),
.msg = hintfmt("unknown hash algo '%1%'", algo),
.errPos = state.positions[pos]
}));
NixStringContext context; // discarded
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
v.mkString(hashString(*ht, s).to_string(HashFormat::Base16, false));
v.mkString(hashString(*ha, s).to_string(HashFormat::Base16, false));
}
static RegisterPrimOp primop_hashString({
@ -3799,15 +3788,15 @@ static void prim_convertHash(EvalState & state, const PosIdx pos, Value * * args
auto hash = state.forceStringNoCtx(*iteratorHash->value, pos, "while evaluating the attribute 'hash'");
Bindings::iterator iteratorHashAlgo = inputAttrs->find(state.symbols.create("hashAlgo"));
std::optional<HashType> ht = std::nullopt;
std::optional<HashAlgorithm> ha = std::nullopt;
if (iteratorHashAlgo != inputAttrs->end()) {
ht = parseHashType(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'"));
ha = parseHashAlgo(state.forceStringNoCtx(*iteratorHashAlgo->value, pos, "while evaluating the attribute 'hashAlgo'"));
}
Bindings::iterator iteratorToHashFormat = getAttr(state, state.symbols.create("toHashFormat"), args[0]->attrs, "while locating the attribute 'toHashFormat'");
HashFormat hf = parseHashFormat(state.forceStringNoCtx(*iteratorToHashFormat->value, pos, "while evaluating the attribute 'toHashFormat'"));
v.mkString(Hash::parseAny(hash, ht).to_string(hf, hf == HashFormat::SRI));
v.mkString(Hash::parseAny(hash, ha).to_string(hf, hf == HashFormat::SRI));
}
static RegisterPrimOp primop_convertHash({
@ -3836,7 +3825,8 @@ static RegisterPrimOp primop_convertHash({
The format of the resulting hash. Must be one of
- `"base16"`
- `"base32"`
- `"nix32"`
- `"base32"` (deprecated alias for `"nix32"`)
- `"base64"`
- `"sri"`
@ -3885,30 +3875,19 @@ static RegisterPrimOp primop_convertHash({
.fun = prim_convertHash,
});
// regex aliases, switch between boost and std
using regex = boost::regex;
using regex_error = boost::regex_error;
using cmatch = boost::cmatch;
using cregex_iterator = boost::cregex_iterator;
namespace regex_constants = boost::regex_constants;
// overloaded function alias
constexpr auto regex_match = [] (auto &&...args) {
return boost::regex_match(std::forward<decltype(args)>(args)...);
};
struct RegexCache
{
// TODO use C++20 transparent comparison when available
std::unordered_map<std::string_view, regex> cache;
std::unordered_map<std::string_view, std::regex> cache;
std::list<std::string> keys;
regex get(std::string_view re)
std::regex get(std::string_view re)
{
auto it = cache.find(re);
if (it != cache.end())
return it->second;
keys.emplace_back(re);
return cache.emplace(keys.back(), regex(keys.back(), regex::extended)).first->second;
return cache.emplace(keys.back(), std::regex(keys.back(), std::regex::extended)).first->second;
}
};
@ -3928,8 +3907,8 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v)
NixStringContext context;
const auto str = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.match");
cmatch match;
if (!regex_match(str.begin(), str.end(), match, regex)) {
std::cmatch match;
if (!std::regex_match(str.begin(), str.end(), match, regex)) {
v.mkNull();
return;
}
@ -3944,8 +3923,8 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v)
(v.listElems()[i] = state.allocValue())->mkString(match[i + 1].str());
}
} catch (regex_error & e) {
if (e.code() == regex_constants::error_space) {
} catch (std::regex_error & e) {
if (e.code() == std::regex_constants::error_space) {
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
state.debugThrowLastTrace(EvalError({
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
@ -4008,8 +3987,8 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v)
NixStringContext context;
const auto str = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.split");
auto begin = cregex_iterator(str.begin(), str.end(), regex);
auto end = cregex_iterator();
auto begin = std::cregex_iterator(str.begin(), str.end(), regex);
auto end = std::cregex_iterator();
// Any matches results are surrounded by non-matching results.
const size_t len = std::distance(begin, end);
@ -4048,8 +4027,8 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v)
assert(idx == 2 * len + 1);
} catch (regex_error & e) {
if (e.code() == regex_constants::error_space) {
} catch (std::regex_error & e) {
if (e.code() == std::regex_constants::error_space) {
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
state.debugThrowLastTrace(EvalError({
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
@ -4410,7 +4389,7 @@ void EvalState::createBaseEnv()
addConstant("__currentSystem", v, {
.type = nString,
.doc = R"(
The value of the [`system` configuration option](@docroot@/command-ref/conf-file.md#conf-pure-eval).
The value of the [`system` configuration option](@docroot@/command-ref/conf-file.md#conf-system).
It can be used to set the `system` attribute for [`builtins.derivation`](@docroot@/language/derivations.md) such that the resulting derivation can be built on the same system that evaluates the Nix expression:
@ -4459,7 +4438,7 @@ void EvalState::createBaseEnv()
.doc = R"(
Logical file system location of the [Nix store](@docroot@/glossary.md#gloss-store) currently in use.
This value is determined by the `store` parameter in [Store URLs](@docroot@/command-ref/new-cli/nix3-help-stores.md):
This value is determined by the `store` parameter in [Store URLs](@docroot@/store/types/index.md#store-url-format):
```shell-session
$ nix-instantiate --store 'dummy://?store=/blah' --eval --expr builtins.storeDir

View file

@ -1,5 +1,6 @@
#include "primops.hh"
#include "store-api.hh"
#include "realisation.hh"
#include "make-content-addressed.hh"
#include "url.hh"

View file

@ -31,7 +31,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
// be both a revision or a branch/tag name.
auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial");
if (std::regex_match(value.begin(), value.end(), revRegex))
rev = Hash::parseAny(value, htSHA1);
rev = Hash::parseAny(value, HashAlgorithm::SHA1);
else
ref = value;
}
@ -79,7 +79,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
attrs2.alloc("branch").mkString(*input2.getRef());
// Backward compatibility: set 'rev' to
// 0000000000000000000000000000000000000000 for a dirty tree.
auto rev2 = input2.getRev().value_or(Hash(htSHA1));
auto rev2 = input2.getRev().value_or(Hash(HashAlgorithm::SHA1));
attrs2.alloc("rev").mkString(rev2.gitRev());
attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12));
if (auto revCount = input2.getRevCount())

View file

@ -46,7 +46,7 @@ void emitTreeAttrs(
attrs.alloc("shortRev").mkString(rev->gitShortRev());
} else if (emptyRevFallback) {
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
auto emptyHash = Hash(htSHA1);
auto emptyHash = Hash(HashAlgorithm::SHA1);
attrs.alloc("rev").mkString(emptyHash.gitRev());
attrs.alloc("shortRev").mkString(emptyHash.gitShortRev());
}
@ -187,45 +187,215 @@ static RegisterPrimOp primop_fetchTree({
.name = "fetchTree",
.args = {"input"},
.doc = R"(
Fetch a source tree or a plain file using one of the supported backends.
*input* must be a [flake reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references), either in attribute set representation or in the URL-like syntax.
The input should be "locked", that is, it should contain a commit hash or content hash unless impure evaluation (`--impure`) is enabled.
Fetch a file system tree or a plain file using one of the supported backends and return an attribute set with:
> **Note**
- the resulting fixed-output [store path](@docroot@/glossary.md#gloss-store-path)
- the corresponding [NAR](@docroot@/glossary.md#gloss-nar) hash
- backend-specific metadata (currently not documented). <!-- TODO: document output attributes -->
*input* must be an attribute set with the following attributes:
- `type` (String, required)
One of the [supported source types](#source-types).
This determines other required and allowed input attributes.
- `narHash` (String, optional)
The `narHash` parameter can be used to substitute the source of the tree.
It also allows for verification of tree contents that may not be provided by the underlying transfer mechanism.
If `narHash` is set, the source is first looked up is the Nix store and [substituters](@docroot@/command-ref/conf-file.md#conf-substituters), and only fetched if not available.
A subset of the output attributes of `fetchTree` can be re-used for subsequent calls to `fetchTree` to produce the same result again.
That is, `fetchTree` is idempotent.
Downloads are cached in `$XDG_CACHE_HOME/nix`.
The remote source will be fetched from the network if both are true:
- A NAR hash is supplied and the corresponding store path is not [valid](@docroot@/glossary.md#gloss-validity), that is, not available in the store
> **Note**
>
> [Substituters](@docroot@/command-ref/conf-file.md#conf-substituters) are not used in fetching.
- There is no cache entry or the cache entry is older than [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl)
## Source types
The following source types and associated input attributes are supported.
<!-- TODO: It would be soooo much more predictable to work with (and
document) if `fetchTree` was a curried call with the first paramter for
`type` or an attribute like `builtins.fetchTree.git`! -->
- `"file"`
Place a plain file into the Nix store.
This is similar to [`builtins.fetchurl`](@docroot@/language/builtins.md#builtins-fetchurl)
- `url` (String, required)
Supported protocols:
- `https`
> **Example**
>
> ```nix
> fetchTree {
> type = "file";
> url = "https://example.com/index.html";
> }
> ```
- `http`
Insecure HTTP transfer for legacy sources.
> **Warning**
>
> HTTP performs no encryption or authentication.
> Use a `narHash` known in advance to ensure the output has expected contents.
- `file`
A file on the local file system.
> **Example**
>
> ```nix
> fetchTree {
> type = "file";
> url = "file:///home/eelco/nix/README.md";
> }
> ```
- `"tarball"`
Download a tar archive and extract it into the Nix store.
This has the same underyling implementation as [`builtins.fetchTarball`](@docroot@/language/builtins.md#builtins-fetchTarball)
- `url` (String, required)
> **Example**
>
> ```nix
> fetchTree {
> type = "tarball";
> url = "https://github.com/NixOS/nixpkgs/tarball/nixpkgs-23.11";
> }
> ```
- `"git"`
Fetch a Git tree and copy it to the Nix store.
This is similar to [`builtins.fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit).
- `url` (String, required)
The URL formats supported are the same as for Git itself.
> **Example**
>
> ```nix
> fetchTree {
> type = "git";
> url = "git@github.com:NixOS/nixpkgs.git";
> }
> ```
> **Note**
>
> If the URL points to a local directory, and no `ref` or `rev` is given, Nix will only consider files added to the Git index, as listed by `git ls-files` but use the *current file contents* of the Git working directory.
- `ref` (String, optional)
A [Git reference](https://git-scm.com/book/en/v2/Git-Internals-Git-References), such as a branch or tag name.
Default: `"HEAD"`
- `rev` (String, optional)
A Git revision; a commit hash.
Default: the tip of `ref`
- `shallow` (Bool, optional)
Make a shallow clone when fetching the Git tree.
Default: `false`
- `submodules` (Bool, optional)
Also fetch submodules if available.
Default: `false`
- `allRefs` (Bool, optional)
If set to `true`, always fetch the entire repository, even if the latest commit is still in the cache.
Otherwise, only the latest commit is fetched if it is not already cached.
Default: `false`
- `lastModified` (Integer, optional)
Unix timestamp of the fetched commit.
If set, pass through the value to the output attribute set.
Otherwise, generated from the fetched Git tree.
- `revCount` (Integer, optional)
Number of revisions in the history of the Git repository before the fetched commit.
If set, pass through the value to the output attribute set.
Otherwise, generated from the fetched Git tree.
The following input types are still subject to change:
- `"path"`
- `"github"`
- `"gitlab"`
- `"sourcehut"`
- `"mercurial"`
*input* can also be a [URL-like reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references).
The additional input types and the URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled.
> **Example**
>
> The URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled.
> Fetch a GitHub repository using the attribute set representation:
>
> ```nix
> builtins.fetchTree {
> type = "github";
> owner = "NixOS";
> repo = "nixpkgs";
> rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
> }
> ```
>
> This evaluates to the following attribute set:
>
> ```nix
> {
> lastModified = 1686503798;
> lastModifiedDate = "20230611171638";
> narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc=";
> outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source";
> rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
> shortRev = "ae2e6b3";
> }
> ```
Here are some examples of how to use `fetchTree`:
- Fetch a GitHub repository using the attribute set representation:
```nix
builtins.fetchTree {
type = "github";
owner = "NixOS";
repo = "nixpkgs";
rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
}
```
This evaluates to the following attribute set:
```
{
lastModified = 1686503798;
lastModifiedDate = "20230611171638";
narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc=";
outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source";
rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
shortRev = "ae2e6b3";
}
```
- Fetch the same GitHub repository using the URL-like syntax:
```
builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd"
```
> **Example**
>
> Fetch the same GitHub repository using the URL-like syntax:
>
> ```nix
> builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd"
> ```
)",
.fun = prim_fetchTree,
.experimentalFeature = Xp::FetchTree,
@ -246,7 +416,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
if (n == "url")
url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch");
else if (n == "sha256")
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), htSHA256);
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), HashAlgorithm::SHA256);
else if (n == "name")
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
else
@ -276,7 +446,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who));
// early exit if pinned and already in the store
if (expectedHash && expectedHash->type == htSHA256) {
if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) {
auto expectedPath = state.store->makeFixedOutputPath(
name,
FixedOutputInfo {
@ -301,10 +471,10 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
if (expectedHash) {
auto hash = unpack
? state.store->queryPathInfo(storePath)->narHash
: hashFile(htSHA256, state.store->toRealPath(storePath));
: hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath));
if (hash != *expectedHash)
state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
*url, expectedHash->to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true)));
*url, expectedHash->to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true)));
}
state.allowAndSetStorePathString(storePath, v);

View file

@ -1,68 +0,0 @@
#include <nlohmann/json.hpp>
#include <gtest/gtest.h>
#include <rapidcheck/gtest.h>
#include "tests/derived-path.hh"
#include "tests/libexpr.hh"
namespace nix {
// Testing of trivial expressions
class DerivedPathExpressionTest : public LibExprTest {};
// FIXME: `RC_GTEST_FIXTURE_PROP` isn't calling `SetUpTestSuite` because it is
// no a real fixture.
//
// See https://github.com/emil-e/rapidcheck/blob/master/doc/gtest.md#rc_gtest_fixture_propfixture-name-args
TEST_F(DerivedPathExpressionTest, force_init)
{
}
#ifndef COVERAGE
RC_GTEST_FIXTURE_PROP(
DerivedPathExpressionTest,
prop_opaque_path_round_trip,
(const SingleDerivedPath::Opaque & o))
{
auto * v = state.allocValue();
state.mkStorePathString(o.path, *v);
auto d = state.coerceToSingleDerivedPath(noPos, *v, "");
RC_ASSERT(SingleDerivedPath { o } == d);
}
// TODO use DerivedPath::Built for parameter once it supports a single output
// path only.
RC_GTEST_FIXTURE_PROP(
DerivedPathExpressionTest,
prop_derived_path_built_placeholder_round_trip,
(const SingleDerivedPath::Built & b))
{
/**
* We set these in tests rather than the regular globals so we don't have
* to worry about race conditions if the tests run concurrently.
*/
ExperimentalFeatureSettings mockXpSettings;
mockXpSettings.set("experimental-features", "ca-derivations");
auto * v = state.allocValue();
state.mkOutputString(*v, b, std::nullopt, mockXpSettings);
auto [d, _] = state.coerceToSingleDerivedPathUnchecked(noPos, *v, "");
RC_ASSERT(SingleDerivedPath { b } == d);
}
RC_GTEST_FIXTURE_PROP(
DerivedPathExpressionTest,
prop_derived_path_built_out_path_round_trip,
(const SingleDerivedPath::Built & b, const StorePath & outPath))
{
auto * v = state.allocValue();
state.mkOutputString(*v, b, outPath);
auto [d, _] = state.coerceToSingleDerivedPathUnchecked(noPos, *v, "");
RC_ASSERT(SingleDerivedPath { b } == d);
}
#endif
} /* namespace nix */

File diff suppressed because it is too large Load diff

View file

@ -1,22 +0,0 @@
#include <gtest/gtest.h>
#include "flake/flakeref.hh"
namespace nix {
/* ----------- tests for flake/flakeref.hh --------------------------------------------------*/
/* ----------------------------------------------------------------------------
* to_string
* --------------------------------------------------------------------------*/
TEST(to_string, doesntReencodeUrl) {
auto s = "http://localhost:8181/test/+3d.tar.gz";
auto flakeref = parseFlakeRef(s);
auto parsed = flakeref.to_string();
auto expected = "http://localhost:8181/test/%2B3d.tar.gz";
ASSERT_EQ(parsed, expected);
}
}

View file

@ -1,68 +0,0 @@
#include "tests/libexpr.hh"
#include "value-to-json.hh"
namespace nix {
// Testing the conversion to JSON
class JSONValueTest : public LibExprTest {
protected:
std::string getJSONValue(Value& value) {
std::stringstream ss;
NixStringContext ps;
printValueAsJSON(state, true, value, noPos, ss, ps);
return ss.str();
}
};
TEST_F(JSONValueTest, null) {
Value v;
v.mkNull();
ASSERT_EQ(getJSONValue(v), "null");
}
TEST_F(JSONValueTest, BoolFalse) {
Value v;
v.mkBool(false);
ASSERT_EQ(getJSONValue(v),"false");
}
TEST_F(JSONValueTest, BoolTrue) {
Value v;
v.mkBool(true);
ASSERT_EQ(getJSONValue(v), "true");
}
TEST_F(JSONValueTest, IntPositive) {
Value v;
v.mkInt(100);
ASSERT_EQ(getJSONValue(v), "100");
}
TEST_F(JSONValueTest, IntNegative) {
Value v;
v.mkInt(-100);
ASSERT_EQ(getJSONValue(v), "-100");
}
TEST_F(JSONValueTest, String) {
Value v;
v.mkString("test");
ASSERT_EQ(getJSONValue(v), "\"test\"");
}
TEST_F(JSONValueTest, StringQuotes) {
Value v;
v.mkString("test\"");
ASSERT_EQ(getJSONValue(v), "\"test\\\"\"");
}
// The dummy store doesn't support writing files. Fails with this exception message:
// C++ exception with description "error: operation 'addToStoreFromDump' is
// not supported by store 'dummy'" thrown in the test body.
TEST_F(JSONValueTest, DISABLED_Path) {
Value v;
v.mkPath(state.rootPath(CanonPath("/test")));
ASSERT_EQ(getJSONValue(v), "\"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x\"");
}
} /* namespace nix */

View file

@ -1,143 +0,0 @@
#pragma once
///@file
#include <gtest/gtest.h>
#include <gmock/gmock.h>
#include "value.hh"
#include "nixexpr.hh"
#include "eval.hh"
#include "eval-inline.hh"
#include "store-api.hh"
#include "tests/libstore.hh"
namespace nix {
class LibExprTest : public LibStoreTest {
public:
static void SetUpTestSuite() {
LibStoreTest::SetUpTestSuite();
initGC();
}
protected:
LibExprTest()
: LibStoreTest()
, state({}, store)
{
}
Value eval(std::string input, bool forceValue = true) {
Value v;
Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root));
assert(e);
state.eval(e, v);
if (forceValue)
state.forceValue(v, noPos);
return v;
}
Symbol createSymbol(const char * value) {
return state.symbols.create(value);
}
EvalState state;
};
MATCHER(IsListType, "") {
return arg != nList;
}
MATCHER(IsList, "") {
return arg.type() == nList;
}
MATCHER(IsString, "") {
return arg.type() == nString;
}
MATCHER(IsNull, "") {
return arg.type() == nNull;
}
MATCHER(IsThunk, "") {
return arg.type() == nThunk;
}
MATCHER(IsAttrs, "") {
return arg.type() == nAttrs;
}
MATCHER_P(IsStringEq, s, fmt("The string is equal to \"%1%\"", s)) {
if (arg.type() != nString) {
return false;
}
return std::string_view(arg.c_str()) == s;
}
MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v)) {
if (arg.type() != nInt) {
return false;
}
return arg.integer == v;
}
MATCHER_P(IsFloatEq, v, fmt("The float is equal to \"%1%\"", v)) {
if (arg.type() != nFloat) {
return false;
}
return arg.fpoint == v;
}
MATCHER(IsTrue, "") {
if (arg.type() != nBool) {
return false;
}
return arg.boolean == true;
}
MATCHER(IsFalse, "") {
if (arg.type() != nBool) {
return false;
}
return arg.boolean == false;
}
MATCHER_P(IsPathEq, p, fmt("Is a path equal to \"%1%\"", p)) {
if (arg.type() != nPath) {
*result_listener << "Expected a path got " << arg.type();
return false;
} else {
auto path = arg.path();
if (path.path != CanonPath(p)) {
*result_listener << "Expected a path that equals \"" << p << "\" but got: " << path.path;
return false;
}
}
return true;
}
MATCHER_P(IsListOfSize, n, fmt("Is a list of size [%1%]", n)) {
if (arg.type() != nList) {
*result_listener << "Expected list got " << arg.type();
return false;
} else if (arg.listSize() != (size_t)n) {
*result_listener << "Expected as list of size " << n << " got " << arg.listSize();
return false;
}
return true;
}
MATCHER_P(IsAttrsOfSize, n, fmt("Is a set of size [%1%]", n)) {
if (arg.type() != nAttrs) {
*result_listener << "Expected set got " << arg.type();
return false;
} else if (arg.attrs->size() != (size_t)n) {
*result_listener << "Expected a set with " << n << " attributes but got " << arg.attrs->size();
return false;
}
return true;
}
} /* namespace nix */

View file

@ -1,23 +0,0 @@
check: libexpr-tests_RUN
programs += libexpr-tests
libexpr-tests_NAME := libnixexpr-tests
libexpr-tests_DIR := $(d)
ifeq ($(INSTALL_UNIT_TESTS), yes)
libexpr-tests_INSTALL_DIR := $(checkbindir)
else
libexpr-tests_INSTALL_DIR :=
endif
libexpr-tests_SOURCES := \
$(wildcard $(d)/*.cc) \
$(wildcard $(d)/value/*.cc)
libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests -I src/libfetchers
libexpr-tests_LIBS = libstore-tests libutils-tests libexpr libutil libstore libfetchers
libexpr-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) -lgmock

View file

@ -1,832 +0,0 @@
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include "tests/libexpr.hh"
namespace nix {
class CaptureLogger : public Logger
{
std::ostringstream oss;
public:
CaptureLogger() {}
std::string get() const {
return oss.str();
}
void log(Verbosity lvl, std::string_view s) override {
oss << s << std::endl;
}
void logEI(const ErrorInfo & ei) override {
showErrorInfo(oss, ei, loggerSettings.showTrace.get());
}
};
class CaptureLogging {
Logger * oldLogger;
std::unique_ptr<CaptureLogger> tempLogger;
public:
CaptureLogging() : tempLogger(std::make_unique<CaptureLogger>()) {
oldLogger = logger;
logger = tempLogger.get();
}
~CaptureLogging() {
logger = oldLogger;
}
std::string get() const {
return tempLogger->get();
}
};
// Testing eval of PrimOp's
class PrimOpTest : public LibExprTest {};
TEST_F(PrimOpTest, throw) {
ASSERT_THROW(eval("throw \"foo\""), ThrownError);
}
TEST_F(PrimOpTest, abort) {
ASSERT_THROW(eval("abort \"abort\""), Abort);
}
TEST_F(PrimOpTest, ceil) {
auto v = eval("builtins.ceil 1.9");
ASSERT_THAT(v, IsIntEq(2));
}
TEST_F(PrimOpTest, floor) {
auto v = eval("builtins.floor 1.9");
ASSERT_THAT(v, IsIntEq(1));
}
TEST_F(PrimOpTest, tryEvalFailure) {
auto v = eval("builtins.tryEval (throw \"\")");
ASSERT_THAT(v, IsAttrsOfSize(2));
auto s = createSymbol("success");
auto p = v.attrs->get(s);
ASSERT_NE(p, nullptr);
ASSERT_THAT(*p->value, IsFalse());
}
TEST_F(PrimOpTest, tryEvalSuccess) {
auto v = eval("builtins.tryEval 123");
ASSERT_THAT(v, IsAttrs());
auto s = createSymbol("success");
auto p = v.attrs->get(s);
ASSERT_NE(p, nullptr);
ASSERT_THAT(*p->value, IsTrue());
s = createSymbol("value");
p = v.attrs->get(s);
ASSERT_NE(p, nullptr);
ASSERT_THAT(*p->value, IsIntEq(123));
}
TEST_F(PrimOpTest, getEnv) {
setenv("_NIX_UNIT_TEST_ENV_VALUE", "test value", 1);
auto v = eval("builtins.getEnv \"_NIX_UNIT_TEST_ENV_VALUE\"");
ASSERT_THAT(v, IsStringEq("test value"));
}
TEST_F(PrimOpTest, seq) {
ASSERT_THROW(eval("let x = throw \"test\"; in builtins.seq x { }"), ThrownError);
}
TEST_F(PrimOpTest, seqNotDeep) {
auto v = eval("let x = { z = throw \"test\"; }; in builtins.seq x { }");
ASSERT_THAT(v, IsAttrs());
}
TEST_F(PrimOpTest, deepSeq) {
ASSERT_THROW(eval("let x = { z = throw \"test\"; }; in builtins.deepSeq x { }"), ThrownError);
}
TEST_F(PrimOpTest, trace) {
CaptureLogging l;
auto v = eval("builtins.trace \"test string 123\" 123");
ASSERT_THAT(v, IsIntEq(123));
auto text = l.get();
ASSERT_NE(text.find("test string 123"), std::string::npos);
}
TEST_F(PrimOpTest, placeholder) {
auto v = eval("builtins.placeholder \"out\"");
ASSERT_THAT(v, IsStringEq("/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"));
}
TEST_F(PrimOpTest, baseNameOf) {
auto v = eval("builtins.baseNameOf /some/path");
ASSERT_THAT(v, IsStringEq("path"));
}
TEST_F(PrimOpTest, dirOf) {
auto v = eval("builtins.dirOf /some/path");
ASSERT_THAT(v, IsPathEq("/some"));
}
TEST_F(PrimOpTest, attrValues) {
auto v = eval("builtins.attrValues { x = \"foo\"; a = 1; }");
ASSERT_THAT(v, IsListOfSize(2));
ASSERT_THAT(*v.listElems()[0], IsIntEq(1));
ASSERT_THAT(*v.listElems()[1], IsStringEq("foo"));
}
TEST_F(PrimOpTest, getAttr) {
auto v = eval("builtins.getAttr \"x\" { x = \"foo\"; }");
ASSERT_THAT(v, IsStringEq("foo"));
}
TEST_F(PrimOpTest, getAttrNotFound) {
// FIXME: TypeError is really bad here, also the error wording is worse
// than on Nix <=2.3
ASSERT_THROW(eval("builtins.getAttr \"y\" { }"), TypeError);
}
TEST_F(PrimOpTest, unsafeGetAttrPos) {
// The `y` attribute is at position
const char* expr = "builtins.unsafeGetAttrPos \"y\" { y = \"x\"; }";
auto v = eval(expr);
ASSERT_THAT(v, IsNull());
}
TEST_F(PrimOpTest, hasAttr) {
auto v = eval("builtins.hasAttr \"x\" { x = 1; }");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, hasAttrNotFound) {
auto v = eval("builtins.hasAttr \"x\" { }");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, isAttrs) {
auto v = eval("builtins.isAttrs {}");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, isAttrsFalse) {
auto v = eval("builtins.isAttrs null");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, removeAttrs) {
auto v = eval("builtins.removeAttrs { x = 1; } [\"x\"]");
ASSERT_THAT(v, IsAttrsOfSize(0));
}
TEST_F(PrimOpTest, removeAttrsRetains) {
auto v = eval("builtins.removeAttrs { x = 1; y = 2; } [\"x\"]");
ASSERT_THAT(v, IsAttrsOfSize(1));
ASSERT_NE(v.attrs->find(createSymbol("y")), nullptr);
}
TEST_F(PrimOpTest, listToAttrsEmptyList) {
auto v = eval("builtins.listToAttrs []");
ASSERT_THAT(v, IsAttrsOfSize(0));
ASSERT_EQ(v.type(), nAttrs);
ASSERT_EQ(v.attrs->size(), 0);
}
TEST_F(PrimOpTest, listToAttrsNotFieldName) {
ASSERT_THROW(eval("builtins.listToAttrs [{}]"), Error);
}
TEST_F(PrimOpTest, listToAttrs) {
auto v = eval("builtins.listToAttrs [ { name = \"key\"; value = 123; } ]");
ASSERT_THAT(v, IsAttrsOfSize(1));
auto key = v.attrs->find(createSymbol("key"));
ASSERT_NE(key, nullptr);
ASSERT_THAT(*key->value, IsIntEq(123));
}
TEST_F(PrimOpTest, intersectAttrs) {
auto v = eval("builtins.intersectAttrs { a = 1; b = 2; } { b = 3; c = 4; }");
ASSERT_THAT(v, IsAttrsOfSize(1));
auto b = v.attrs->find(createSymbol("b"));
ASSERT_NE(b, nullptr);
ASSERT_THAT(*b->value, IsIntEq(3));
}
TEST_F(PrimOpTest, catAttrs) {
auto v = eval("builtins.catAttrs \"a\" [{a = 1;} {b = 0;} {a = 2;}]");
ASSERT_THAT(v, IsListOfSize(2));
ASSERT_THAT(*v.listElems()[0], IsIntEq(1));
ASSERT_THAT(*v.listElems()[1], IsIntEq(2));
}
TEST_F(PrimOpTest, functionArgs) {
auto v = eval("builtins.functionArgs ({ x, y ? 123}: 1)");
ASSERT_THAT(v, IsAttrsOfSize(2));
auto x = v.attrs->find(createSymbol("x"));
ASSERT_NE(x, nullptr);
ASSERT_THAT(*x->value, IsFalse());
auto y = v.attrs->find(createSymbol("y"));
ASSERT_NE(y, nullptr);
ASSERT_THAT(*y->value, IsTrue());
}
TEST_F(PrimOpTest, mapAttrs) {
auto v = eval("builtins.mapAttrs (name: value: value * 10) { a = 1; b = 2; }");
ASSERT_THAT(v, IsAttrsOfSize(2));
auto a = v.attrs->find(createSymbol("a"));
ASSERT_NE(a, nullptr);
ASSERT_THAT(*a->value, IsThunk());
state.forceValue(*a->value, noPos);
ASSERT_THAT(*a->value, IsIntEq(10));
auto b = v.attrs->find(createSymbol("b"));
ASSERT_NE(b, nullptr);
ASSERT_THAT(*b->value, IsThunk());
state.forceValue(*b->value, noPos);
ASSERT_THAT(*b->value, IsIntEq(20));
}
TEST_F(PrimOpTest, isList) {
auto v = eval("builtins.isList []");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, isListFalse) {
auto v = eval("builtins.isList null");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, elemtAt) {
auto v = eval("builtins.elemAt [0 1 2 3] 3");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, elemtAtOutOfBounds) {
ASSERT_THROW(eval("builtins.elemAt [0 1 2 3] 5"), Error);
}
TEST_F(PrimOpTest, head) {
auto v = eval("builtins.head [ 3 2 1 0 ]");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, headEmpty) {
ASSERT_THROW(eval("builtins.head [ ]"), Error);
}
TEST_F(PrimOpTest, headWrongType) {
ASSERT_THROW(eval("builtins.head { }"), Error);
}
TEST_F(PrimOpTest, tail) {
auto v = eval("builtins.tail [ 3 2 1 0 ]");
ASSERT_THAT(v, IsListOfSize(3));
for (const auto [n, elem] : enumerate(v.listItems()))
ASSERT_THAT(*elem, IsIntEq(2 - static_cast<int>(n)));
}
TEST_F(PrimOpTest, tailEmpty) {
ASSERT_THROW(eval("builtins.tail []"), Error);
}
TEST_F(PrimOpTest, map) {
auto v = eval("map (x: \"foo\" + x) [ \"bar\" \"bla\" \"abc\" ]");
ASSERT_THAT(v, IsListOfSize(3));
auto elem = v.listElems()[0];
ASSERT_THAT(*elem, IsThunk());
state.forceValue(*elem, noPos);
ASSERT_THAT(*elem, IsStringEq("foobar"));
elem = v.listElems()[1];
ASSERT_THAT(*elem, IsThunk());
state.forceValue(*elem, noPos);
ASSERT_THAT(*elem, IsStringEq("foobla"));
elem = v.listElems()[2];
ASSERT_THAT(*elem, IsThunk());
state.forceValue(*elem, noPos);
ASSERT_THAT(*elem, IsStringEq("fooabc"));
}
TEST_F(PrimOpTest, filter) {
auto v = eval("builtins.filter (x: x == 2) [ 3 2 3 2 3 2 ]");
ASSERT_THAT(v, IsListOfSize(3));
for (const auto elem : v.listItems())
ASSERT_THAT(*elem, IsIntEq(2));
}
TEST_F(PrimOpTest, elemTrue) {
auto v = eval("builtins.elem 3 [ 1 2 3 4 5 ]");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, elemFalse) {
auto v = eval("builtins.elem 6 [ 1 2 3 4 5 ]");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, concatLists) {
auto v = eval("builtins.concatLists [[1 2] [3 4]]");
ASSERT_THAT(v, IsListOfSize(4));
for (const auto [i, elem] : enumerate(v.listItems()))
ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1));
}
TEST_F(PrimOpTest, length) {
auto v = eval("builtins.length [ 1 2 3 ]");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, foldStrict) {
auto v = eval("builtins.foldl' (a: b: a + b) 0 [1 2 3]");
ASSERT_THAT(v, IsIntEq(6));
}
TEST_F(PrimOpTest, anyTrue) {
auto v = eval("builtins.any (x: x == 2) [ 1 2 3 ]");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, anyFalse) {
auto v = eval("builtins.any (x: x == 5) [ 1 2 3 ]");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, allTrue) {
auto v = eval("builtins.all (x: x > 0) [ 1 2 3 ]");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, allFalse) {
auto v = eval("builtins.all (x: x <= 0) [ 1 2 3 ]");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, genList) {
auto v = eval("builtins.genList (x: x + 1) 3");
ASSERT_EQ(v.type(), nList);
ASSERT_EQ(v.listSize(), 3);
for (const auto [i, elem] : enumerate(v.listItems())) {
ASSERT_THAT(*elem, IsThunk());
state.forceValue(*elem, noPos);
ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1));
}
}
TEST_F(PrimOpTest, sortLessThan) {
auto v = eval("builtins.sort builtins.lessThan [ 483 249 526 147 42 77 ]");
ASSERT_EQ(v.type(), nList);
ASSERT_EQ(v.listSize(), 6);
const std::vector<int> numbers = { 42, 77, 147, 249, 483, 526 };
for (const auto [n, elem] : enumerate(v.listItems()))
ASSERT_THAT(*elem, IsIntEq(numbers[n]));
}
TEST_F(PrimOpTest, partition) {
auto v = eval("builtins.partition (x: x > 10) [1 23 9 3 42]");
ASSERT_THAT(v, IsAttrsOfSize(2));
auto right = v.attrs->get(createSymbol("right"));
ASSERT_NE(right, nullptr);
ASSERT_THAT(*right->value, IsListOfSize(2));
ASSERT_THAT(*right->value->listElems()[0], IsIntEq(23));
ASSERT_THAT(*right->value->listElems()[1], IsIntEq(42));
auto wrong = v.attrs->get(createSymbol("wrong"));
ASSERT_NE(wrong, nullptr);
ASSERT_EQ(wrong->value->type(), nList);
ASSERT_EQ(wrong->value->listSize(), 3);
ASSERT_THAT(*wrong->value, IsListOfSize(3));
ASSERT_THAT(*wrong->value->listElems()[0], IsIntEq(1));
ASSERT_THAT(*wrong->value->listElems()[1], IsIntEq(9));
ASSERT_THAT(*wrong->value->listElems()[2], IsIntEq(3));
}
TEST_F(PrimOpTest, concatMap) {
auto v = eval("builtins.concatMap (x: x ++ [0]) [ [1 2] [3 4] ]");
ASSERT_EQ(v.type(), nList);
ASSERT_EQ(v.listSize(), 6);
const std::vector<int> numbers = { 1, 2, 0, 3, 4, 0 };
for (const auto [n, elem] : enumerate(v.listItems()))
ASSERT_THAT(*elem, IsIntEq(numbers[n]));
}
TEST_F(PrimOpTest, addInt) {
auto v = eval("builtins.add 3 5");
ASSERT_THAT(v, IsIntEq(8));
}
TEST_F(PrimOpTest, addFloat) {
auto v = eval("builtins.add 3.0 5.0");
ASSERT_THAT(v, IsFloatEq(8.0));
}
TEST_F(PrimOpTest, addFloatToInt) {
auto v = eval("builtins.add 3.0 5");
ASSERT_THAT(v, IsFloatEq(8.0));
v = eval("builtins.add 3 5.0");
ASSERT_THAT(v, IsFloatEq(8.0));
}
TEST_F(PrimOpTest, subInt) {
auto v = eval("builtins.sub 5 2");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, subFloat) {
auto v = eval("builtins.sub 5.0 2.0");
ASSERT_THAT(v, IsFloatEq(3.0));
}
TEST_F(PrimOpTest, subFloatFromInt) {
auto v = eval("builtins.sub 5.0 2");
ASSERT_THAT(v, IsFloatEq(3.0));
v = eval("builtins.sub 4 2.0");
ASSERT_THAT(v, IsFloatEq(2.0));
}
TEST_F(PrimOpTest, mulInt) {
auto v = eval("builtins.mul 3 5");
ASSERT_THAT(v, IsIntEq(15));
}
TEST_F(PrimOpTest, mulFloat) {
auto v = eval("builtins.mul 3.0 5.0");
ASSERT_THAT(v, IsFloatEq(15.0));
}
TEST_F(PrimOpTest, mulFloatMixed) {
auto v = eval("builtins.mul 3 5.0");
ASSERT_THAT(v, IsFloatEq(15.0));
v = eval("builtins.mul 2.0 5");
ASSERT_THAT(v, IsFloatEq(10.0));
}
TEST_F(PrimOpTest, divInt) {
auto v = eval("builtins.div 5 (-1)");
ASSERT_THAT(v, IsIntEq(-5));
}
TEST_F(PrimOpTest, divIntZero) {
ASSERT_THROW(eval("builtins.div 5 0"), EvalError);
}
TEST_F(PrimOpTest, divFloat) {
auto v = eval("builtins.div 5.0 (-1)");
ASSERT_THAT(v, IsFloatEq(-5.0));
}
TEST_F(PrimOpTest, divFloatZero) {
ASSERT_THROW(eval("builtins.div 5.0 0.0"), EvalError);
}
TEST_F(PrimOpTest, bitOr) {
auto v = eval("builtins.bitOr 1 2");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, bitXor) {
auto v = eval("builtins.bitXor 3 2");
ASSERT_THAT(v, IsIntEq(1));
}
TEST_F(PrimOpTest, lessThanFalse) {
auto v = eval("builtins.lessThan 3 1");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, lessThanTrue) {
auto v = eval("builtins.lessThan 1 3");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, toStringAttrsThrows) {
ASSERT_THROW(eval("builtins.toString {}"), EvalError);
}
TEST_F(PrimOpTest, toStringLambdaThrows) {
ASSERT_THROW(eval("builtins.toString (x: x)"), EvalError);
}
class ToStringPrimOpTest :
public PrimOpTest,
public testing::WithParamInterface<std::tuple<std::string, std::string_view>>
{};
TEST_P(ToStringPrimOpTest, toString) {
const auto [input, output] = GetParam();
auto v = eval(input);
ASSERT_THAT(v, IsStringEq(output));
}
#define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " input), std::string_view(output)))
INSTANTIATE_TEST_SUITE_P(
toString,
ToStringPrimOpTest,
testing::Values(
CASE(R"("foo")", "foo"),
CASE(R"(1)", "1"),
CASE(R"([1 2 3])", "1 2 3"),
CASE(R"(.123)", "0.123000"),
CASE(R"(true)", "1"),
CASE(R"(false)", ""),
CASE(R"(null)", ""),
CASE(R"({ v = "bar"; __toString = self: self.v; })", "bar"),
CASE(R"({ v = "bar"; __toString = self: self.v; outPath = "foo"; })", "bar"),
CASE(R"({ outPath = "foo"; })", "foo"),
CASE(R"(./test)", "/test")
)
);
#undef CASE
TEST_F(PrimOpTest, substring){
auto v = eval("builtins.substring 0 3 \"nixos\"");
ASSERT_THAT(v, IsStringEq("nix"));
}
TEST_F(PrimOpTest, substringSmallerString){
auto v = eval("builtins.substring 0 3 \"n\"");
ASSERT_THAT(v, IsStringEq("n"));
}
TEST_F(PrimOpTest, substringEmptyString){
auto v = eval("builtins.substring 1 3 \"\"");
ASSERT_THAT(v, IsStringEq(""));
}
TEST_F(PrimOpTest, stringLength) {
auto v = eval("builtins.stringLength \"123\"");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, hashStringMd5) {
auto v = eval("builtins.hashString \"md5\" \"asdf\"");
ASSERT_THAT(v, IsStringEq("912ec803b2ce49e4a541068d495ab570"));
}
TEST_F(PrimOpTest, hashStringSha1) {
auto v = eval("builtins.hashString \"sha1\" \"asdf\"");
ASSERT_THAT(v, IsStringEq("3da541559918a808c2402bba5012f6c60b27661c"));
}
TEST_F(PrimOpTest, hashStringSha256) {
auto v = eval("builtins.hashString \"sha256\" \"asdf\"");
ASSERT_THAT(v, IsStringEq("f0e4c2f76c58916ec258f246851bea091d14d4247a2fc3e18694461b1816e13b"));
}
TEST_F(PrimOpTest, hashStringSha512) {
auto v = eval("builtins.hashString \"sha512\" \"asdf\"");
ASSERT_THAT(v, IsStringEq("401b09eab3c013d4ca54922bb802bec8fd5318192b0a75f201d8b3727429080fb337591abd3e44453b954555b7a0812e1081c39b740293f765eae731f5a65ed1"));
}
TEST_F(PrimOpTest, hashStringInvalidHashType) {
ASSERT_THROW(eval("builtins.hashString \"foobar\" \"asdf\""), Error);
}
TEST_F(PrimOpTest, nixPath) {
auto v = eval("builtins.nixPath");
ASSERT_EQ(v.type(), nList);
// We can't test much more as currently the EvalSettings are a global
// that we can't easily swap / replace
}
TEST_F(PrimOpTest, langVersion) {
auto v = eval("builtins.langVersion");
ASSERT_EQ(v.type(), nInt);
}
TEST_F(PrimOpTest, storeDir) {
auto v = eval("builtins.storeDir");
ASSERT_THAT(v, IsStringEq(settings.nixStore));
}
TEST_F(PrimOpTest, nixVersion) {
auto v = eval("builtins.nixVersion");
ASSERT_THAT(v, IsStringEq(nixVersion));
}
TEST_F(PrimOpTest, currentSystem) {
auto v = eval("builtins.currentSystem");
ASSERT_THAT(v, IsStringEq(settings.thisSystem.get()));
}
TEST_F(PrimOpTest, derivation) {
auto v = eval("derivation");
ASSERT_EQ(v.type(), nFunction);
ASSERT_TRUE(v.isLambda());
ASSERT_NE(v.lambda.fun, nullptr);
ASSERT_TRUE(v.lambda.fun->hasFormals());
}
TEST_F(PrimOpTest, currentTime) {
auto v = eval("builtins.currentTime");
ASSERT_EQ(v.type(), nInt);
ASSERT_TRUE(v.integer > 0);
}
TEST_F(PrimOpTest, splitVersion) {
auto v = eval("builtins.splitVersion \"1.2.3git\"");
ASSERT_THAT(v, IsListOfSize(4));
const std::vector<std::string_view> strings = { "1", "2", "3", "git" };
for (const auto [n, p] : enumerate(v.listItems()))
ASSERT_THAT(*p, IsStringEq(strings[n]));
}
class CompareVersionsPrimOpTest :
public PrimOpTest,
public testing::WithParamInterface<std::tuple<std::string, const int>>
{};
TEST_P(CompareVersionsPrimOpTest, compareVersions) {
auto [expression, expectation] = GetParam();
auto v = eval(expression);
ASSERT_THAT(v, IsIntEq(expectation));
}
#define CASE(a, b, expected) (std::make_tuple("builtins.compareVersions \"" #a "\" \"" #b "\"", expected))
INSTANTIATE_TEST_SUITE_P(
compareVersions,
CompareVersionsPrimOpTest,
testing::Values(
// The first two are weird cases. Intuition tells they should
// be the same but they aren't.
CASE(1.0, 1.0.0, -1),
CASE(1.0.0, 1.0, 1),
// the following are from the nix-env manual:
CASE(1.0, 2.3, -1),
CASE(2.1, 2.3, -1),
CASE(2.3, 2.3, 0),
CASE(2.5, 2.3, 1),
CASE(3.1, 2.3, 1),
CASE(2.3.1, 2.3, 1),
CASE(2.3.1, 2.3a, 1),
CASE(2.3pre1, 2.3, -1),
CASE(2.3pre3, 2.3pre12, -1),
CASE(2.3a, 2.3c, -1),
CASE(2.3pre1, 2.3c, -1),
CASE(2.3pre1, 2.3q, -1)
)
);
#undef CASE
class ParseDrvNamePrimOpTest :
public PrimOpTest,
public testing::WithParamInterface<std::tuple<std::string, std::string_view, std::string_view>>
{};
TEST_P(ParseDrvNamePrimOpTest, parseDrvName) {
auto [input, expectedName, expectedVersion] = GetParam();
const auto expr = fmt("builtins.parseDrvName \"%1%\"", input);
auto v = eval(expr);
ASSERT_THAT(v, IsAttrsOfSize(2));
auto name = v.attrs->find(createSymbol("name"));
ASSERT_TRUE(name);
ASSERT_THAT(*name->value, IsStringEq(expectedName));
auto version = v.attrs->find(createSymbol("version"));
ASSERT_TRUE(version);
ASSERT_THAT(*version->value, IsStringEq(expectedVersion));
}
INSTANTIATE_TEST_SUITE_P(
parseDrvName,
ParseDrvNamePrimOpTest,
testing::Values(
std::make_tuple("nix-0.12pre12876", "nix", "0.12pre12876"),
std::make_tuple("a-b-c-1234pre5+git", "a-b-c", "1234pre5+git")
)
);
TEST_F(PrimOpTest, replaceStrings) {
// FIXME: add a test that verifies the string context is as expected
auto v = eval("builtins.replaceStrings [\"oo\" \"a\"] [\"a\" \"i\"] \"foobar\"");
ASSERT_EQ(v.type(), nString);
ASSERT_EQ(v.string_view(), "fabir");
}
TEST_F(PrimOpTest, concatStringsSep) {
// FIXME: add a test that verifies the string context is as expected
auto v = eval("builtins.concatStringsSep \"%\" [\"foo\" \"bar\" \"baz\"]");
ASSERT_EQ(v.type(), nString);
ASSERT_EQ(v.string_view(), "foo%bar%baz");
}
TEST_F(PrimOpTest, split1) {
// v = [ "" [ "a" ] "c" ]
auto v = eval("builtins.split \"(a)b\" \"abc\"");
ASSERT_THAT(v, IsListOfSize(3));
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
ASSERT_THAT(*v.listElems()[1], IsListOfSize(1));
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
ASSERT_THAT(*v.listElems()[2], IsStringEq("c"));
}
TEST_F(PrimOpTest, split2) {
// v is expected to be a list [ "" [ "a" ] "b" [ "c"] "" ]
auto v = eval("builtins.split \"([ac])\" \"abc\"");
ASSERT_THAT(v, IsListOfSize(5));
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
ASSERT_THAT(*v.listElems()[1], IsListOfSize(1));
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
ASSERT_THAT(*v.listElems()[2], IsStringEq("b"));
ASSERT_THAT(*v.listElems()[3], IsListOfSize(1));
ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsStringEq("c"));
ASSERT_THAT(*v.listElems()[4], IsStringEq(""));
}
TEST_F(PrimOpTest, split3) {
auto v = eval("builtins.split \"(a)|(c)\" \"abc\"");
ASSERT_THAT(v, IsListOfSize(5));
// First list element
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
// 2nd list element is a list [ "" null ]
ASSERT_THAT(*v.listElems()[1], IsListOfSize(2));
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
ASSERT_THAT(*v.listElems()[1]->listElems()[1], IsNull());
// 3rd element
ASSERT_THAT(*v.listElems()[2], IsStringEq("b"));
// 4th element is a list: [ null "c" ]
ASSERT_THAT(*v.listElems()[3], IsListOfSize(2));
ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsNull());
ASSERT_THAT(*v.listElems()[3]->listElems()[1], IsStringEq("c"));
// 5th element is the empty string
ASSERT_THAT(*v.listElems()[4], IsStringEq(""));
}
TEST_F(PrimOpTest, split4) {
auto v = eval("builtins.split \"([[:upper:]]+)\" \" FOO \"");
ASSERT_THAT(v, IsListOfSize(3));
auto first = v.listElems()[0];
auto second = v.listElems()[1];
auto third = v.listElems()[2];
ASSERT_THAT(*first, IsStringEq(" "));
ASSERT_THAT(*second, IsListOfSize(1));
ASSERT_THAT(*second->listElems()[0], IsStringEq("FOO"));
ASSERT_THAT(*third, IsStringEq(" "));
}
TEST_F(PrimOpTest, match1) {
auto v = eval("builtins.match \"ab\" \"abc\"");
ASSERT_THAT(v, IsNull());
}
TEST_F(PrimOpTest, match2) {
auto v = eval("builtins.match \"abc\" \"abc\"");
ASSERT_THAT(v, IsListOfSize(0));
}
TEST_F(PrimOpTest, match3) {
auto v = eval("builtins.match \"a(b)(c)\" \"abc\"");
ASSERT_THAT(v, IsListOfSize(2));
ASSERT_THAT(*v.listElems()[0], IsStringEq("b"));
ASSERT_THAT(*v.listElems()[1], IsStringEq("c"));
}
TEST_F(PrimOpTest, match4) {
auto v = eval("builtins.match \"[[:space:]]+([[:upper:]]+)[[:space:]]+\" \" FOO \"");
ASSERT_THAT(v, IsListOfSize(1));
ASSERT_THAT(*v.listElems()[0], IsStringEq("FOO"));
}
TEST_F(PrimOpTest, attrNames) {
auto v = eval("builtins.attrNames { x = 1; y = 2; z = 3; a = 2; }");
ASSERT_THAT(v, IsListOfSize(4));
// ensure that the list is sorted
const std::vector<std::string_view> expected { "a", "x", "y", "z" };
for (const auto [n, elem] : enumerate(v.listItems()))
ASSERT_THAT(*elem, IsStringEq(expected[n]));
}
TEST_F(PrimOpTest, genericClosure_not_strict) {
// Operator should not be used when startSet is empty
auto v = eval("builtins.genericClosure { startSet = []; }");
ASSERT_THAT(v, IsListOfSize(0));
}
} /* namespace nix */

View file

@ -1,90 +0,0 @@
#include <gtest/gtest.h>
#include <gmock/gmock.h>
#include "search-path.hh"
namespace nix {
TEST(SearchPathElem, parse_justPath) {
ASSERT_EQ(
SearchPath::Elem::parse("foo"),
(SearchPath::Elem {
.prefix = SearchPath::Prefix { .s = "" },
.path = SearchPath::Path { .s = "foo" },
}));
}
TEST(SearchPathElem, parse_emptyPrefix) {
ASSERT_EQ(
SearchPath::Elem::parse("=foo"),
(SearchPath::Elem {
.prefix = SearchPath::Prefix { .s = "" },
.path = SearchPath::Path { .s = "foo" },
}));
}
TEST(SearchPathElem, parse_oneEq) {
ASSERT_EQ(
SearchPath::Elem::parse("foo=bar"),
(SearchPath::Elem {
.prefix = SearchPath::Prefix { .s = "foo" },
.path = SearchPath::Path { .s = "bar" },
}));
}
TEST(SearchPathElem, parse_twoEqs) {
ASSERT_EQ(
SearchPath::Elem::parse("foo=bar=baz"),
(SearchPath::Elem {
.prefix = SearchPath::Prefix { .s = "foo" },
.path = SearchPath::Path { .s = "bar=baz" },
}));
}
TEST(SearchPathElem, suffixIfPotentialMatch_justPath) {
SearchPath::Prefix prefix { .s = "" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("any/thing"), std::optional { "any/thing" });
}
TEST(SearchPathElem, suffixIfPotentialMatch_misleadingPrefix1) {
SearchPath::Prefix prefix { .s = "foo" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("fooX"), std::nullopt);
}
TEST(SearchPathElem, suffixIfPotentialMatch_misleadingPrefix2) {
SearchPath::Prefix prefix { .s = "foo" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("fooX/bar"), std::nullopt);
}
TEST(SearchPathElem, suffixIfPotentialMatch_partialPrefix) {
SearchPath::Prefix prefix { .s = "fooX" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::nullopt);
}
TEST(SearchPathElem, suffixIfPotentialMatch_exactPrefix) {
SearchPath::Prefix prefix { .s = "foo" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::optional { "" });
}
TEST(SearchPathElem, suffixIfPotentialMatch_multiKey) {
SearchPath::Prefix prefix { .s = "foo/bar" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional { "baz" });
}
TEST(SearchPathElem, suffixIfPotentialMatch_trailingSlash) {
SearchPath::Prefix prefix { .s = "foo" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/"), std::optional { "" });
}
TEST(SearchPathElem, suffixIfPotentialMatch_trailingDoubleSlash) {
SearchPath::Prefix prefix { .s = "foo" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo//"), std::optional { "/" });
}
TEST(SearchPathElem, suffixIfPotentialMatch_trailingPath) {
SearchPath::Prefix prefix { .s = "foo" };
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional { "bar/baz" });
}
}

View file

@ -1,196 +0,0 @@
#include "tests/libexpr.hh"
namespace nix {
// Testing of trivial expressions
class TrivialExpressionTest : public LibExprTest {};
TEST_F(TrivialExpressionTest, true) {
auto v = eval("true");
ASSERT_THAT(v, IsTrue());
}
TEST_F(TrivialExpressionTest, false) {
auto v = eval("false");
ASSERT_THAT(v, IsFalse());
}
TEST_F(TrivialExpressionTest, null) {
auto v = eval("null");
ASSERT_THAT(v, IsNull());
}
TEST_F(TrivialExpressionTest, 1) {
auto v = eval("1");
ASSERT_THAT(v, IsIntEq(1));
}
TEST_F(TrivialExpressionTest, 1plus1) {
auto v = eval("1+1");
ASSERT_THAT(v, IsIntEq(2));
}
TEST_F(TrivialExpressionTest, minus1) {
auto v = eval("-1");
ASSERT_THAT(v, IsIntEq(-1));
}
TEST_F(TrivialExpressionTest, 1minus1) {
auto v = eval("1-1");
ASSERT_THAT(v, IsIntEq(0));
}
TEST_F(TrivialExpressionTest, lambdaAdd) {
auto v = eval("let add = a: b: a + b; in add 1 2");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(TrivialExpressionTest, list) {
auto v = eval("[]");
ASSERT_THAT(v, IsListOfSize(0));
}
TEST_F(TrivialExpressionTest, attrs) {
auto v = eval("{}");
ASSERT_THAT(v, IsAttrsOfSize(0));
}
TEST_F(TrivialExpressionTest, float) {
auto v = eval("1.234");
ASSERT_THAT(v, IsFloatEq(1.234));
}
TEST_F(TrivialExpressionTest, updateAttrs) {
auto v = eval("{ a = 1; } // { b = 2; a = 3; }");
ASSERT_THAT(v, IsAttrsOfSize(2));
auto a = v.attrs->find(createSymbol("a"));
ASSERT_NE(a, nullptr);
ASSERT_THAT(*a->value, IsIntEq(3));
auto b = v.attrs->find(createSymbol("b"));
ASSERT_NE(b, nullptr);
ASSERT_THAT(*b->value, IsIntEq(2));
}
TEST_F(TrivialExpressionTest, hasAttrOpFalse) {
auto v = eval("{} ? a");
ASSERT_THAT(v, IsFalse());
}
TEST_F(TrivialExpressionTest, hasAttrOpTrue) {
auto v = eval("{ a = 123; } ? a");
ASSERT_THAT(v, IsTrue());
}
TEST_F(TrivialExpressionTest, withFound) {
auto v = eval("with { a = 23; }; a");
ASSERT_THAT(v, IsIntEq(23));
}
TEST_F(TrivialExpressionTest, withNotFound) {
ASSERT_THROW(eval("with {}; a"), Error);
}
TEST_F(TrivialExpressionTest, withOverride) {
auto v = eval("with { a = 23; }; with { a = 42; }; a");
ASSERT_THAT(v, IsIntEq(42));
}
TEST_F(TrivialExpressionTest, letOverWith) {
auto v = eval("let a = 23; in with { a = 1; }; a");
ASSERT_THAT(v, IsIntEq(23));
}
TEST_F(TrivialExpressionTest, multipleLet) {
auto v = eval("let a = 23; in let a = 42; in a");
ASSERT_THAT(v, IsIntEq(42));
}
TEST_F(TrivialExpressionTest, defaultFunctionArgs) {
auto v = eval("({ a ? 123 }: a) {}");
ASSERT_THAT(v, IsIntEq(123));
}
TEST_F(TrivialExpressionTest, defaultFunctionArgsOverride) {
auto v = eval("({ a ? 123 }: a) { a = 5; }");
ASSERT_THAT(v, IsIntEq(5));
}
TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureBack) {
auto v = eval("({ a ? 123 }@args: args) {}");
ASSERT_THAT(v, IsAttrsOfSize(0));
}
TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureFront) {
auto v = eval("(args@{ a ? 123 }: args) {}");
ASSERT_THAT(v, IsAttrsOfSize(0));
}
TEST_F(TrivialExpressionTest, assertThrows) {
ASSERT_THROW(eval("let x = arg: assert arg == 1; 123; in x 2"), Error);
}
TEST_F(TrivialExpressionTest, assertPassed) {
auto v = eval("let x = arg: assert arg == 1; 123; in x 1");
ASSERT_THAT(v, IsIntEq(123));
}
class AttrSetMergeTrvialExpressionTest :
public TrivialExpressionTest,
public testing::WithParamInterface<const char*>
{};
TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy) {
// Usually Nix rejects duplicate keys in an attrset but it does allow
// so if it is an attribute set that contains disjoint sets of keys.
// The below is equivalent to `{a.b = 1; a.c = 2; }`.
// The attribute set `a` will be a Thunk at first as the attribuets
// have to be merged (or otherwise computed) and that is done in a lazy
// manner.
auto expr = GetParam();
auto v = eval(expr);
ASSERT_THAT(v, IsAttrsOfSize(1));
auto a = v.attrs->find(createSymbol("a"));
ASSERT_NE(a, nullptr);
ASSERT_THAT(*a->value, IsThunk());
state.forceValue(*a->value, noPos);
ASSERT_THAT(*a->value, IsAttrsOfSize(2));
auto b = a->value->attrs->find(createSymbol("b"));
ASSERT_NE(b, nullptr);
ASSERT_THAT(*b->value, IsIntEq(1));
auto c = a->value->attrs->find(createSymbol("c"));
ASSERT_NE(c, nullptr);
ASSERT_THAT(*c->value, IsIntEq(2));
}
INSTANTIATE_TEST_SUITE_P(
attrsetMergeLazy,
AttrSetMergeTrvialExpressionTest,
testing::Values(
"{ a.b = 1; a.c = 2; }",
"{ a = { b = 1; }; a = { c = 2; }; }"
)
);
TEST_F(TrivialExpressionTest, functor) {
auto v = eval("{ __functor = self: arg: self.v + arg; v = 10; } 5");
ASSERT_THAT(v, IsIntEq(15));
}
TEST_F(TrivialExpressionTest, bindOr) {
auto v = eval("{ or = 1; }");
ASSERT_THAT(v, IsAttrsOfSize(1));
auto b = v.attrs->find(createSymbol("or"));
ASSERT_NE(b, nullptr);
ASSERT_THAT(*b->value, IsIntEq(1));
}
TEST_F(TrivialExpressionTest, orCantBeUsed) {
ASSERT_THROW(eval("let or = 1; in or"), Error);
}
} /* namespace nix */

View file

@ -1,162 +0,0 @@
#include <nlohmann/json.hpp>
#include <gtest/gtest.h>
#include <rapidcheck/gtest.h>
#include "tests/path.hh"
#include "tests/libexpr.hh"
#include "tests/value/context.hh"
namespace nix {
// Test a few cases of invalid string context elements.
TEST(NixStringContextElemTest, empty_invalid) {
EXPECT_THROW(
NixStringContextElem::parse(""),
BadNixStringContextElem);
}
TEST(NixStringContextElemTest, single_bang_invalid) {
EXPECT_THROW(
NixStringContextElem::parse("!"),
BadNixStringContextElem);
}
TEST(NixStringContextElemTest, double_bang_invalid) {
EXPECT_THROW(
NixStringContextElem::parse("!!/"),
BadStorePath);
}
TEST(NixStringContextElemTest, eq_slash_invalid) {
EXPECT_THROW(
NixStringContextElem::parse("=/"),
BadStorePath);
}
TEST(NixStringContextElemTest, slash_invalid) {
EXPECT_THROW(
NixStringContextElem::parse("/"),
BadStorePath);
}
/**
* Round trip (string <-> data structure) test for
* `NixStringContextElem::Opaque`.
*/
TEST(NixStringContextElemTest, opaque) {
std::string_view opaque = "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x";
auto elem = NixStringContextElem::parse(opaque);
auto * p = std::get_if<NixStringContextElem::Opaque>(&elem.raw);
ASSERT_TRUE(p);
ASSERT_EQ(p->path, StorePath { opaque });
ASSERT_EQ(elem.to_string(), opaque);
}
/**
* Round trip (string <-> data structure) test for
* `NixStringContextElem::DrvDeep`.
*/
TEST(NixStringContextElemTest, drvDeep) {
std::string_view drvDeep = "=g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
auto elem = NixStringContextElem::parse(drvDeep);
auto * p = std::get_if<NixStringContextElem::DrvDeep>(&elem.raw);
ASSERT_TRUE(p);
ASSERT_EQ(p->drvPath, StorePath { drvDeep.substr(1) });
ASSERT_EQ(elem.to_string(), drvDeep);
}
/**
* Round trip (string <-> data structure) test for a simpler
* `NixStringContextElem::Built`.
*/
TEST(NixStringContextElemTest, built_opaque) {
std::string_view built = "!foo!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
auto elem = NixStringContextElem::parse(built);
auto * p = std::get_if<NixStringContextElem::Built>(&elem.raw);
ASSERT_TRUE(p);
ASSERT_EQ(p->output, "foo");
ASSERT_EQ(*p->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque {
.path = StorePath { built.substr(5) },
}));
ASSERT_EQ(elem.to_string(), built);
}
/**
* Round trip (string <-> data structure) test for a more complex,
* inductive `NixStringContextElem::Built`.
*/
TEST(NixStringContextElemTest, built_built) {
/**
* We set these in tests rather than the regular globals so we don't have
* to worry about race conditions if the tests run concurrently.
*/
ExperimentalFeatureSettings mockXpSettings;
mockXpSettings.set("experimental-features", "dynamic-derivations ca-derivations");
std::string_view built = "!foo!bar!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
auto elem = NixStringContextElem::parse(built, mockXpSettings);
auto * p = std::get_if<NixStringContextElem::Built>(&elem.raw);
ASSERT_TRUE(p);
ASSERT_EQ(p->output, "foo");
auto * drvPath = std::get_if<SingleDerivedPath::Built>(&*p->drvPath);
ASSERT_TRUE(drvPath);
ASSERT_EQ(drvPath->output, "bar");
ASSERT_EQ(*drvPath->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque {
.path = StorePath { built.substr(9) },
}));
ASSERT_EQ(elem.to_string(), built);
}
/**
* Without the right experimental features enabled, we cannot parse a
* complex inductive string context element.
*/
TEST(NixStringContextElemTest, built_built_xp) {
ASSERT_THROW(
NixStringContextElem::parse("!foo!bar!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"), MissingExperimentalFeature);
}
}
namespace rc {
using namespace nix;
Gen<NixStringContextElem::DrvDeep> Arbitrary<NixStringContextElem::DrvDeep>::arbitrary()
{
return gen::just(NixStringContextElem::DrvDeep {
.drvPath = *gen::arbitrary<StorePath>(),
});
}
Gen<NixStringContextElem> Arbitrary<NixStringContextElem>::arbitrary()
{
switch (*gen::inRange<uint8_t>(0, std::variant_size_v<NixStringContextElem::Raw>)) {
case 0:
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::Opaque>());
case 1:
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::DrvDeep>());
case 2:
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::Built>());
default:
assert(false);
}
}
}
namespace nix {
#ifndef COVERAGE
RC_GTEST_PROP(
NixStringContextElemTest,
prop_round_rip,
(const NixStringContextElem & o))
{
RC_ASSERT(o == NixStringContextElem::parse(o.to_string()));
}
#endif
}

View file

@ -1,31 +0,0 @@
#pragma once
///@file
#include <rapidcheck/gen/Arbitrary.h>
#include <value/context.hh>
namespace rc {
using namespace nix;
template<>
struct Arbitrary<NixStringContextElem::Opaque> {
static Gen<NixStringContextElem::Opaque> arbitrary();
};
template<>
struct Arbitrary<NixStringContextElem::Built> {
static Gen<NixStringContextElem::Built> arbitrary();
};
template<>
struct Arbitrary<NixStringContextElem::DrvDeep> {
static Gen<NixStringContextElem::DrvDeep> arbitrary();
};
template<>
struct Arbitrary<NixStringContextElem> {
static Gen<NixStringContextElem> arbitrary();
};
}

View file

@ -1,237 +0,0 @@
#include "tests/libexpr.hh"
#include "value.hh"
namespace nix {
using namespace testing;
struct ValuePrintingTests : LibExprTest
{
template<class... A>
void test(Value v, std::string_view expected, A... args)
{
std::stringstream out;
v.print(state.symbols, out, args...);
ASSERT_EQ(out.str(), expected);
}
};
TEST_F(ValuePrintingTests, tInt)
{
Value vInt;
vInt.mkInt(10);
test(vInt, "10");
}
TEST_F(ValuePrintingTests, tBool)
{
Value vBool;
vBool.mkBool(true);
test(vBool, "true");
}
TEST_F(ValuePrintingTests, tString)
{
Value vString;
vString.mkString("some-string");
test(vString, "\"some-string\"");
}
TEST_F(ValuePrintingTests, tPath)
{
Value vPath;
vPath.mkString("/foo");
test(vPath, "\"/foo\"");
}
TEST_F(ValuePrintingTests, tNull)
{
Value vNull;
vNull.mkNull();
test(vNull, "null");
}
TEST_F(ValuePrintingTests, tAttrs)
{
Value vOne;
vOne.mkInt(1);
Value vTwo;
vTwo.mkInt(2);
BindingsBuilder builder(state, state.allocBindings(10));
builder.insert(state.symbols.create("one"), &vOne);
builder.insert(state.symbols.create("two"), &vTwo);
Value vAttrs;
vAttrs.mkAttrs(builder.finish());
test(vAttrs, "{ one = 1; two = 2; }");
}
TEST_F(ValuePrintingTests, tList)
{
Value vOne;
vOne.mkInt(1);
Value vTwo;
vTwo.mkInt(2);
Value vList;
state.mkList(vList, 5);
vList.bigList.elems[0] = &vOne;
vList.bigList.elems[1] = &vTwo;
vList.bigList.size = 3;
test(vList, "[ 1 2 (nullptr) ]");
}
TEST_F(ValuePrintingTests, vThunk)
{
Value vThunk;
vThunk.mkThunk(nullptr, nullptr);
test(vThunk, "<CODE>");
}
TEST_F(ValuePrintingTests, vApp)
{
Value vApp;
vApp.mkApp(nullptr, nullptr);
test(vApp, "<CODE>");
}
TEST_F(ValuePrintingTests, vLambda)
{
Value vLambda;
vLambda.mkLambda(nullptr, nullptr);
test(vLambda, "<LAMBDA>");
}
TEST_F(ValuePrintingTests, vPrimOp)
{
Value vPrimOp;
PrimOp primOp{};
vPrimOp.mkPrimOp(&primOp);
test(vPrimOp, "<PRIMOP>");
}
TEST_F(ValuePrintingTests, vPrimOpApp)
{
Value vPrimOpApp;
vPrimOpApp.mkPrimOpApp(nullptr, nullptr);
test(vPrimOpApp, "<PRIMOP-APP>");
}
TEST_F(ValuePrintingTests, vExternal)
{
struct MyExternal : ExternalValueBase
{
public:
std::string showType() const override
{
return "";
}
std::string typeOf() const override
{
return "";
}
virtual std::ostream & print(std::ostream & str) const override
{
str << "testing-external!";
return str;
}
} myExternal;
Value vExternal;
vExternal.mkExternal(&myExternal);
test(vExternal, "testing-external!");
}
TEST_F(ValuePrintingTests, vFloat)
{
Value vFloat;
vFloat.mkFloat(2.0);
test(vFloat, "2");
}
TEST_F(ValuePrintingTests, vBlackhole)
{
Value vBlackhole;
vBlackhole.mkBlackhole();
test(vBlackhole, "«potential infinite recursion»");
}
TEST_F(ValuePrintingTests, depthAttrs)
{
Value vOne;
vOne.mkInt(1);
Value vTwo;
vTwo.mkInt(2);
BindingsBuilder builder(state, state.allocBindings(10));
builder.insert(state.symbols.create("one"), &vOne);
builder.insert(state.symbols.create("two"), &vTwo);
Value vAttrs;
vAttrs.mkAttrs(builder.finish());
BindingsBuilder builder2(state, state.allocBindings(10));
builder2.insert(state.symbols.create("one"), &vOne);
builder2.insert(state.symbols.create("two"), &vTwo);
builder2.insert(state.symbols.create("nested"), &vAttrs);
Value vNested;
vNested.mkAttrs(builder2.finish());
test(vNested, "{ nested = «too deep»; one = «too deep»; two = «too deep»; }", false, 1);
test(vNested, "{ nested = { one = «too deep»; two = «too deep»; }; one = 1; two = 2; }", false, 2);
test(vNested, "{ nested = { one = 1; two = 2; }; one = 1; two = 2; }", false, 3);
test(vNested, "{ nested = { one = 1; two = 2; }; one = 1; two = 2; }", false, 4);
}
TEST_F(ValuePrintingTests, depthList)
{
Value vOne;
vOne.mkInt(1);
Value vTwo;
vTwo.mkInt(2);
BindingsBuilder builder(state, state.allocBindings(10));
builder.insert(state.symbols.create("one"), &vOne);
builder.insert(state.symbols.create("two"), &vTwo);
Value vAttrs;
vAttrs.mkAttrs(builder.finish());
BindingsBuilder builder2(state, state.allocBindings(10));
builder2.insert(state.symbols.create("one"), &vOne);
builder2.insert(state.symbols.create("two"), &vTwo);
builder2.insert(state.symbols.create("nested"), &vAttrs);
Value vNested;
vNested.mkAttrs(builder2.finish());
Value vList;
state.mkList(vList, 5);
vList.bigList.elems[0] = &vOne;
vList.bigList.elems[1] = &vTwo;
vList.bigList.elems[2] = &vNested;
vList.bigList.size = 3;
test(vList, "[ «too deep» «too deep» «too deep» ]", false, 1);
test(vList, "[ 1 2 { nested = «too deep»; one = «too deep»; two = «too deep»; } ]", false, 2);
test(vList, "[ 1 2 { nested = { one = «too deep»; two = «too deep»; }; one = 1; two = 2; } ]", false, 3);
test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", false, 4);
test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", false, 5);
}
} // namespace nix

View file

@ -67,7 +67,6 @@ class Symbol;
class PosIdx;
struct Pos;
class StorePath;
class Store;
class EvalState;
class XMLWriter;
@ -424,10 +423,9 @@ public:
SourcePath path() const
{
assert(internalType == tPath);
return SourcePath {
.accessor = ref(_path.accessor->shared_from_this()),
.path = CanonPath(CanonPath::unchecked_t(), _path.path)
};
return SourcePath(
ref(_path.accessor->shared_from_this()),
CanonPath(CanonPath::unchecked_t(), _path.path));
}
std::string_view string_view() const

View file

@ -106,7 +106,7 @@ std::map<std::string, std::string> attrsToQuery(const Attrs & attrs)
Hash getRevAttr(const Attrs & attrs, const std::string & name)
{
return Hash::parseAny(getStrAttr(attrs, name), htSHA1);
return Hash::parseAny(getStrAttr(attrs, name), HashAlgorithm::SHA1);
}
}

View file

@ -289,8 +289,8 @@ std::string Input::getType() const
std::optional<Hash> Input::getNarHash() const
{
if (auto s = maybeGetStrAttr(attrs, "narHash")) {
auto hash = s->empty() ? Hash(htSHA256) : Hash::parseSRI(*s);
if (hash.type != htSHA256)
auto hash = s->empty() ? Hash(HashAlgorithm::SHA256) : Hash::parseSRI(*s);
if (hash.algo != HashAlgorithm::SHA256)
throw UsageError("narHash must use SHA-256");
return hash;
}
@ -314,7 +314,7 @@ std::optional<Hash> Input::getRev() const
} catch (BadHash &e) {
// Default to sha1 for backwards compatibility with existing
// usages (e.g. `builtins.fetchTree` calls or flake inputs).
hash = Hash::parseAny(*s, htSHA1);
hash = Hash::parseAny(*s, HashAlgorithm::SHA1);
}
}
@ -374,7 +374,7 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
std::pair<StorePath, Input> InputScheme::fetch(ref<Store> store, const Input & input)
{
auto [accessor, input2] = getAccessor(store, input);
auto storePath = accessor->root().fetchToStore(store, input2.getName());
auto storePath = SourcePath(accessor).fetchToStore(store, input2.getName());
return {storePath, input2};
}

View file

@ -0,0 +1,83 @@
#include "filtering-input-accessor.hh"
namespace nix {
std::string FilteringInputAccessor::readFile(const CanonPath & path)
{
checkAccess(path);
return next->readFile(prefix + path);
}
bool FilteringInputAccessor::pathExists(const CanonPath & path)
{
return isAllowed(path) && next->pathExists(prefix + path);
}
std::optional<InputAccessor::Stat> FilteringInputAccessor::maybeLstat(const CanonPath & path)
{
checkAccess(path);
return next->maybeLstat(prefix + path);
}
InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath & path)
{
checkAccess(path);
DirEntries entries;
for (auto & entry : next->readDirectory(prefix + path)) {
if (isAllowed(path + entry.first))
entries.insert(std::move(entry));
}
return entries;
}
std::string FilteringInputAccessor::readLink(const CanonPath & path)
{
checkAccess(path);
return next->readLink(prefix + path);
}
std::string FilteringInputAccessor::showPath(const CanonPath & path)
{
return next->showPath(prefix + path);
}
void FilteringInputAccessor::checkAccess(const CanonPath & path)
{
if (!isAllowed(path))
throw makeNotAllowedError
? makeNotAllowedError(path)
: RestrictedPathError("access to path '%s' is forbidden", showPath(path));
}
struct AllowListInputAccessorImpl : AllowListInputAccessor
{
std::set<CanonPath> allowedPaths;
AllowListInputAccessorImpl(
ref<InputAccessor> next,
std::set<CanonPath> && allowedPaths,
MakeNotAllowedError && makeNotAllowedError)
: AllowListInputAccessor(SourcePath(next), std::move(makeNotAllowedError))
, allowedPaths(std::move(allowedPaths))
{ }
bool isAllowed(const CanonPath & path) override
{
return path.isAllowed(allowedPaths);
}
void allowPath(CanonPath path) override
{
allowedPaths.insert(std::move(path));
}
};
ref<AllowListInputAccessor> AllowListInputAccessor::create(
ref<InputAccessor> next,
std::set<CanonPath> && allowedPaths,
MakeNotAllowedError && makeNotAllowedError)
{
return make_ref<AllowListInputAccessorImpl>(next, std::move(allowedPaths), std::move(makeNotAllowedError));
}
}

View file

@ -0,0 +1,73 @@
#pragma once
#include "input-accessor.hh"
namespace nix {
/**
* A function that should throw an exception of type
* `RestrictedPathError` explaining that access to `path` is
* forbidden.
*/
typedef std::function<RestrictedPathError(const CanonPath & path)> MakeNotAllowedError;
/**
* An abstract wrapping `InputAccessor` that performs access
* control. Subclasses should override `isAllowed()` to implement an
* access control policy. The error message is customized at construction.
*/
struct FilteringInputAccessor : InputAccessor
{
ref<InputAccessor> next;
CanonPath prefix;
MakeNotAllowedError makeNotAllowedError;
FilteringInputAccessor(const SourcePath & src, MakeNotAllowedError && makeNotAllowedError)
: next(src.accessor)
, prefix(src.path)
, makeNotAllowedError(std::move(makeNotAllowedError))
{ }
std::string readFile(const CanonPath & path) override;
bool pathExists(const CanonPath & path) override;
std::optional<Stat> maybeLstat(const CanonPath & path) override;
DirEntries readDirectory(const CanonPath & path) override;
std::string readLink(const CanonPath & path) override;
std::string showPath(const CanonPath & path) override;
/**
* Call `makeNotAllowedError` to throw a `RestrictedPathError`
* exception if `isAllowed()` returns `false` for `path`.
*/
void checkAccess(const CanonPath & path);
/**
* Return `true` iff access to path is allowed.
*/
virtual bool isAllowed(const CanonPath & path) = 0;
};
/**
* A wrapping `InputAccessor` that checks paths against an allow-list.
*/
struct AllowListInputAccessor : public FilteringInputAccessor
{
/**
* Grant access to the specified path.
*/
virtual void allowPath(CanonPath path) = 0;
static ref<AllowListInputAccessor> create(
ref<InputAccessor> next,
std::set<CanonPath> && allowedPaths,
MakeNotAllowedError && makeNotAllowedError);
using FilteringInputAccessor::FilteringInputAccessor;
};
}

View file

@ -4,19 +4,12 @@
namespace nix {
struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor
struct FSInputAccessor : InputAccessor, PosixSourceAccessor
{
CanonPath root;
std::optional<std::set<CanonPath>> allowedPaths;
MakeNotAllowedError makeNotAllowedError;
FSInputAccessorImpl(
const CanonPath & root,
std::optional<std::set<CanonPath>> && allowedPaths,
MakeNotAllowedError && makeNotAllowedError)
FSInputAccessor(const CanonPath & root)
: root(root)
, allowedPaths(std::move(allowedPaths))
, makeNotAllowedError(std::move(makeNotAllowedError))
{
displayPrefix = root.isRoot() ? "" : root.abs();
}
@ -27,39 +20,30 @@ struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor
std::function<void(uint64_t)> sizeCallback) override
{
auto absPath = makeAbsPath(path);
checkAllowed(absPath);
PosixSourceAccessor::readFile(absPath, sink, sizeCallback);
}
bool pathExists(const CanonPath & path) override
{
auto absPath = makeAbsPath(path);
return isAllowed(absPath) && PosixSourceAccessor::pathExists(absPath);
return PosixSourceAccessor::pathExists(makeAbsPath(path));
}
std::optional<Stat> maybeLstat(const CanonPath & path) override
{
auto absPath = makeAbsPath(path);
checkAllowed(absPath);
return PosixSourceAccessor::maybeLstat(absPath);
return PosixSourceAccessor::maybeLstat(makeAbsPath(path));
}
DirEntries readDirectory(const CanonPath & path) override
{
auto absPath = makeAbsPath(path);
checkAllowed(absPath);
DirEntries res;
for (auto & entry : PosixSourceAccessor::readDirectory(absPath))
if (isAllowed(absPath + entry.first))
res.emplace(entry);
for (auto & entry : PosixSourceAccessor::readDirectory(makeAbsPath(path)))
res.emplace(entry);
return res;
}
std::string readLink(const CanonPath & path) override
{
auto absPath = makeAbsPath(path);
checkAllowed(absPath);
return PosixSourceAccessor::readLink(absPath);
return PosixSourceAccessor::readLink(makeAbsPath(path));
}
CanonPath makeAbsPath(const CanonPath & path)
@ -67,59 +51,22 @@ struct FSInputAccessorImpl : FSInputAccessor, PosixSourceAccessor
return root + path;
}
void checkAllowed(const CanonPath & absPath) override
{
if (!isAllowed(absPath))
throw makeNotAllowedError
? makeNotAllowedError(absPath)
: RestrictedPathError("access to path '%s' is forbidden", absPath);
}
bool isAllowed(const CanonPath & absPath)
{
if (!absPath.isWithin(root))
return false;
if (allowedPaths) {
auto p = absPath.removePrefix(root);
if (!p.isAllowed(*allowedPaths))
return false;
}
return true;
}
void allowPath(CanonPath path) override
{
if (allowedPaths)
allowedPaths->insert(std::move(path));
}
bool hasAccessControl() override
{
return (bool) allowedPaths;
}
std::optional<CanonPath> getPhysicalPath(const CanonPath & path) override
{
return makeAbsPath(path);
}
};
ref<FSInputAccessor> makeFSInputAccessor(
const CanonPath & root,
std::optional<std::set<CanonPath>> && allowedPaths,
MakeNotAllowedError && makeNotAllowedError)
ref<InputAccessor> makeFSInputAccessor(const CanonPath & root)
{
return make_ref<FSInputAccessorImpl>(root, std::move(allowedPaths), std::move(makeNotAllowedError));
return make_ref<FSInputAccessor>(root);
}
ref<FSInputAccessor> makeStorePathAccessor(
ref<InputAccessor> makeStorePathAccessor(
ref<Store> store,
const StorePath & storePath,
MakeNotAllowedError && makeNotAllowedError)
const StorePath & storePath)
{
return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)), {}, std::move(makeNotAllowedError));
return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)));
}
SourcePath getUnfilteredRootPath(CanonPath path)

View file

@ -7,26 +7,12 @@ namespace nix {
class StorePath;
class Store;
struct FSInputAccessor : InputAccessor
{
virtual void checkAllowed(const CanonPath & absPath) = 0;
ref<InputAccessor> makeFSInputAccessor(
const CanonPath & root);
virtual void allowPath(CanonPath path) = 0;
virtual bool hasAccessControl() = 0;
};
typedef std::function<RestrictedPathError(const CanonPath & path)> MakeNotAllowedError;
ref<FSInputAccessor> makeFSInputAccessor(
const CanonPath & root,
std::optional<std::set<CanonPath>> && allowedPaths = {},
MakeNotAllowedError && makeNotAllowedError = {});
ref<FSInputAccessor> makeStorePathAccessor(
ref<InputAccessor> makeStorePathAccessor(
ref<Store> store,
const StorePath & storePath,
MakeNotAllowedError && makeNotAllowedError = {});
const StorePath & storePath);
SourcePath getUnfilteredRootPath(CanonPath path);

View file

@ -94,7 +94,7 @@ Hash toHash(const git_oid & oid)
#ifdef GIT_EXPERIMENTAL_SHA256
assert(oid.type == GIT_OID_SHA1);
#endif
Hash hash(htSHA1);
Hash hash(HashAlgorithm::SHA1);
memcpy(hash.hash, oid.id, hash.hashSize);
return hash;
}
@ -594,7 +594,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
std::string re = R"(Good "git" signature for \* with .* key SHA256:[)";
for (const fetchers::PublicKey & k : publicKeys){
// Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally
auto fingerprint = trim(hashString(htSHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
auto fingerprint = trim(hashString(HashAlgorithm::SHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" );
re += "(" + escaped_fingerprint + ")";
}
@ -612,7 +612,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
fetchers::Attrs cacheKey({{"_what", "treeHashToNarHash"}, {"treeHash", treeHash.gitRev()}});
if (auto res = fetchers::getCache()->lookup(cacheKey))
return Hash::parseAny(fetchers::getStrAttr(*res, "narHash"), htSHA256);
return Hash::parseAny(fetchers::getStrAttr(*res, "narHash"), HashAlgorithm::SHA256);
auto narHash = accessor->hashPath(CanonPath::root);
@ -725,7 +725,7 @@ struct GitInputAccessor : InputAccessor
return toHash(*git_tree_entry_id(entry));
}
std::map<CanonPath, TreeEntry> lookupCache;
std::unordered_map<CanonPath, TreeEntry> lookupCache;
/* Recursively look up 'path' relative to the root. */
git_tree_entry * lookup(const CanonPath & path)

View file

@ -9,6 +9,7 @@
#include "processes.hh"
#include "git.hh"
#include "fs-input-accessor.hh"
#include "filtering-input-accessor.hh"
#include "mounted-input-accessor.hh"
#include "git-utils.hh"
#include "logging.hh"
@ -52,7 +53,7 @@ bool touchCacheFile(const Path & path, time_t touch_time)
Path getCachePath(std::string_view key)
{
return getCacheDir() + "/nix/gitv3/" +
hashString(htSHA256, key).to_string(HashFormat::Base32, false);
hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false);
}
// Returns the name of the HEAD branch.
@ -369,7 +370,7 @@ struct GitInputScheme : InputScheme
{
auto checkHashType = [&](const std::optional<Hash> & hash)
{
if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256))
if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256))
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
};
@ -559,7 +560,7 @@ struct GitInputScheme : InputScheme
repoInfo.url
);
} else
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev());
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), HashAlgorithm::SHA1).gitRev());
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
}
@ -639,7 +640,10 @@ struct GitInputScheme : InputScheme
repoInfo.workdirInfo.files.insert(submodule.path);
ref<InputAccessor> accessor =
makeFSInputAccessor(CanonPath(repoInfo.url), repoInfo.workdirInfo.files, makeNotAllowedError(repoInfo.url));
AllowListInputAccessor::create(
makeFSInputAccessor(CanonPath(repoInfo.url)),
std::move(repoInfo.workdirInfo.files),
makeNotAllowedError(repoInfo.url));
/* If the repo has submodules, return a mounted input accessor
consisting of the accessor for the top-level repo and the

View file

@ -43,7 +43,7 @@ struct GitArchiveInputScheme : InputScheme
auto size = path.size();
if (size == 3) {
if (std::regex_match(path[2], revRegex))
rev = Hash::parseAny(path[2], htSHA1);
rev = Hash::parseAny(path[2], HashAlgorithm::SHA1);
else if (std::regex_match(path[2], refRegex))
ref = path[2];
else
@ -69,7 +69,7 @@ struct GitArchiveInputScheme : InputScheme
if (name == "rev") {
if (rev)
throw BadURL("URL '%s' contains multiple commit hashes", url.url);
rev = Hash::parseAny(value, htSHA1);
rev = Hash::parseAny(value, HashAlgorithm::SHA1);
}
else if (name == "ref") {
if (!std::regex_match(value, refRegex))
@ -323,9 +323,10 @@ struct GitHubInputScheme : GitArchiveInputScheme
readFile(
store->toRealPath(
downloadFile(store, url, "source", false, headers).storePath)));
return RefInfo {
.rev = Hash::parseAny(std::string { json["sha"] }, htSHA1),
.treeHash = Hash::parseAny(std::string { json["commit"]["tree"]["sha"] }, htSHA1)
.rev = Hash::parseAny(std::string { json["sha"] }, HashAlgorithm::SHA1),
.treeHash = Hash::parseAny(std::string { json["commit"]["tree"]["sha"] }, HashAlgorithm::SHA1)
};
}
@ -396,8 +397,9 @@ struct GitLabInputScheme : GitArchiveInputScheme
readFile(
store->toRealPath(
downloadFile(store, url, "source", false, headers).storePath)));
return RefInfo {
.rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1)
.rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1)
};
}
@ -489,7 +491,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
return RefInfo {
.rev = Hash::parseAny(*id, htSHA1)
.rev = Hash::parseAny(*id, HashAlgorithm::SHA1)
};
}

View file

@ -20,7 +20,7 @@ struct IndirectInputScheme : InputScheme
if (path.size() == 1) {
} else if (path.size() == 2) {
if (std::regex_match(path[1], revRegex))
rev = Hash::parseAny(path[1], htSHA1);
rev = Hash::parseAny(path[1], HashAlgorithm::SHA1);
else if (std::regex_match(path[1], refRegex))
ref = path[1];
else
@ -31,7 +31,7 @@ struct IndirectInputScheme : InputScheme
ref = path[1];
if (!std::regex_match(path[2], revRegex))
throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]);
rev = Hash::parseAny(path[2], htSHA1);
rev = Hash::parseAny(path[2], HashAlgorithm::SHA1);
} else
throw BadURL("GitHub URL '%s' is invalid", url.url);

View file

@ -44,8 +44,8 @@ StorePath InputAccessor::fetchToStore(
auto storePath =
settings.readOnlyMode
? store->computeStorePathFromDump(*source, name, method, htSHA256).first
: store->addToStoreFromDump(*source, name, method, htSHA256, repair);
? store->computeStorePathFromDump(*source, name, method, HashAlgorithm::SHA256).first
: store->addToStoreFromDump(*source, name, method, HashAlgorithm::SHA256, repair);
if (cacheKey)
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
@ -53,11 +53,6 @@ StorePath InputAccessor::fetchToStore(
return storePath;
}
SourcePath InputAccessor::root()
{
return {ref(shared_from_this()), CanonPath::root};
}
std::ostream & operator << (std::ostream & str, const SourcePath & path)
{
str << path.to_string();
@ -88,7 +83,7 @@ SourcePath SourcePath::parent() const
SourcePath SourcePath::resolveSymlinks() const
{
auto res = accessor->root();
auto res = SourcePath(accessor);
int linksAllowed = 1024;

View file

@ -36,8 +36,6 @@ struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this<Inpu
FileIngestionMethod method = FileIngestionMethod::Recursive,
PathFilter * filter = nullptr,
RepairFlag repair = NoRepair);
SourcePath root();
};
/**
@ -51,6 +49,11 @@ struct SourcePath
ref<InputAccessor> accessor;
CanonPath path;
SourcePath(ref<InputAccessor> accessor, CanonPath path = CanonPath::root)
: accessor(std::move(accessor))
, path(std::move(path))
{ }
std::string_view baseName() const;
/**
@ -127,7 +130,7 @@ struct SourcePath
{ return accessor->getPhysicalPath(path); }
std::string to_string() const
{ return path.abs(); }
{ return accessor->showPath(path); }
/**
* Append a `CanonPath` to this path.

View file

@ -210,7 +210,7 @@ struct MercurialInputScheme : InputScheme
return files.count(file);
};
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, filter);
return {std::move(storePath), input};
}
@ -220,7 +220,7 @@ struct MercurialInputScheme : InputScheme
auto checkHashType = [&](const std::optional<Hash> & hash)
{
if (hash.has_value() && hash->type != htSHA1)
if (hash.has_value() && hash->algo != HashAlgorithm::SHA1)
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
};
@ -260,14 +260,14 @@ struct MercurialInputScheme : InputScheme
});
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1);
if (!input.getRev() || input.getRev() == rev2) {
input.attrs.insert_or_assign("rev", rev2.gitRev());
return makeResult(res->first, std::move(res->second));
}
}
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(HashFormat::Base32, false));
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false));
/* If this is a commit hash that we already have, we don't
have to pull again. */
@ -301,7 +301,7 @@ struct MercurialInputScheme : InputScheme
runHg({ "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
assert(tokens.size() == 3);
input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], htSHA1).gitRev());
input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], HashAlgorithm::SHA1).gitRev());
auto revCount = std::stoull(tokens[1]);
input.attrs.insert_or_assign("ref", tokens[2]);

View file

@ -73,7 +73,7 @@ DownloadFileResult downloadFile(
} else {
StringSink sink;
dumpString(res.data, sink);
auto hash = hashString(htSHA256, res.data);
auto hash = hashString(HashAlgorithm::SHA256, res.data);
ValidPathInfo info {
*store,
name,
@ -82,7 +82,7 @@ DownloadFileResult downloadFile(
.hash = hash,
.references = {},
},
hashString(htSHA256, sink.s),
hashString(HashAlgorithm::SHA256, sink.s),
};
info.narSize = sink.s.size();
auto source = StringSource { sink.s };
@ -156,7 +156,7 @@ DownloadTarballResult downloadTarball(
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
auto topDir = tmpDir + "/" + members.begin()->name;
lastModified = lstat(topDir).st_mtime;
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair);
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, defaultPathFilter, NoRepair);
}
Attrs infoAttrs({

View file

@ -14,4 +14,4 @@ libmain_LIBS = libstore libutil
libmain_ALLOW_UNDEFINED = 1
$(eval $(call install-file-in, $(d)/nix-main.pc, $(libdir)/pkgconfig, 0644))
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-main.pc, $(libdir)/pkgconfig, 0644))

View file

@ -143,9 +143,9 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
/* Read the NAR simultaneously into a CompressionSink+FileSink (to
write the compressed NAR to disk), into a HashSink (to get the
NAR hash), and into a NarAccessor (to get the NAR listing). */
HashSink fileHashSink { htSHA256 };
HashSink fileHashSink { HashAlgorithm::SHA256 };
std::shared_ptr<SourceAccessor> narAccessor;
HashSink narHashSink { htSHA256 };
HashSink narHashSink { HashAlgorithm::SHA256 };
{
FdSink fileSink(fdTemp.get());
TeeSink teeSinkCompressed { fileSink, fileHashSink };
@ -165,8 +165,8 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
auto [fileHash, fileSize] = fileHashSink.finish();
narInfo->fileHash = fileHash;
narInfo->fileSize = fileSize;
narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Base32, false) + ".nar"
+ (compression == "xz" ? ".xz" :
narInfo->url = "nar/" + narInfo->fileHash->to_string(HashFormat::Nix32, false) + ".nar"
+ (compression == "xz" ? ".xz" :
compression == "bzip2" ? ".bz2" :
compression == "zstd" ? ".zst" :
compression == "lzip" ? ".lzip" :
@ -301,9 +301,9 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
}
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references)
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
{
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256)
if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
unsupported("addToStoreFromDump");
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info {
@ -399,13 +399,13 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
}
StorePath BinaryCacheStore::addToStore(
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashType hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references)
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashAlgorithm hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references)
{
/* FIXME: Make BinaryCacheStore::addToStoreCommon support
non-recursive+sha256 so we can just use the default
@ -448,7 +448,7 @@ StorePath BinaryCacheStore::addTextToStore(
const StorePathSet & references,
RepairFlag repair)
{
auto textHash = hashString(htSHA256, s);
auto textHash = hashString(HashAlgorithm::SHA256, s);
auto path = makeTextPath(name, TextInfo { { textHash }, references });
if (!repair && isValidPath(path))

View file

@ -124,16 +124,16 @@ public:
RepairFlag repair, CheckSigsFlag checkSigs) override;
StorePath addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override;
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override;
StorePath addToStore(
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashType hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references) override;
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashAlgorithm hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references) override;
StorePath addTextToStore(
std::string_view name,

View file

@ -196,10 +196,19 @@ void DerivationGoal::loadDerivation()
things being garbage collected while we're busy. */
worker.evalStore.addTempRoot(drvPath);
assert(worker.evalStore.isValidPath(drvPath));
/* Get the derivation. It is probably in the eval store, but it might be inthe main store:
/* Get the derivation. */
drv = std::make_unique<Derivation>(worker.evalStore.readDerivation(drvPath));
- Resolved derivation are resolved against main store realisations, and so must be stored there.
- Dynamic derivations are built, and so are found in the main store.
*/
for (auto * drvStore : { &worker.evalStore, &worker.store }) {
if (drvStore->isValidPath(drvPath)) {
drv = std::make_unique<Derivation>(drvStore->readDerivation(drvPath));
break;
}
}
assert(drv);
haveDerivation();
}
@ -401,11 +410,15 @@ void DerivationGoal::gaveUpOnSubstitution()
}
/* Copy the input sources from the eval store to the build
store. */
store.
Note that some inputs might not be in the eval store because they
are (resolved) derivation outputs in a resolved derivation. */
if (&worker.evalStore != &worker.store) {
RealisedPath::Set inputSrcs;
for (auto & i : drv->inputSrcs)
inputSrcs.insert(i);
if (worker.evalStore.isValidPath(i))
inputSrcs.insert(i);
copyClosure(worker.evalStore, worker.store, inputSrcs);
}
@ -453,7 +466,7 @@ void DerivationGoal::repairClosure()
std::map<StorePath, StorePath> outputsToDrv;
for (auto & i : inputClosure)
if (i.isDerivation()) {
auto depOutputs = worker.store.queryPartialDerivationOutputMap(i);
auto depOutputs = worker.store.queryPartialDerivationOutputMap(i, &worker.evalStore);
for (auto & j : depOutputs)
if (j.second)
outputsToDrv.insert_or_assign(*j.second, i);
@ -558,7 +571,7 @@ void DerivationGoal::inputsRealised()
inputDrvOutputs statefully, sometimes it gets out of sync with
the real source of truth (store). So we query the store
directly if there's a problem. */
attempt = fullDrv.tryResolve(worker.store);
attempt = fullDrv.tryResolve(worker.store, &worker.evalStore);
}
assert(attempt);
Derivation drvResolved { std::move(*attempt) };
@ -604,7 +617,13 @@ void DerivationGoal::inputsRealised()
return *outPath;
}
else {
auto outMap = worker.evalStore.queryDerivationOutputMap(depDrvPath);
auto outMap = [&]{
for (auto * drvStore : { &worker.evalStore, &worker.store })
if (drvStore->isValidPath(depDrvPath))
return worker.store.queryDerivationOutputMap(depDrvPath, drvStore);
assert(false);
}();
auto outMapPath = outMap.find(outputName);
if (outMapPath == outMap.end()) {
throw Error(
@ -1085,8 +1104,12 @@ void DerivationGoal::resolvedFinished()
auto newRealisation = realisation;
newRealisation.id = DrvOutput { initialOutput->outputHash, outputName };
newRealisation.signatures.clear();
if (!drv->type().isFixed())
newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath);
if (!drv->type().isFixed()) {
auto & drvStore = worker.evalStore.isValidPath(drvPath)
? worker.evalStore
: worker.store;
newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath, &drvStore);
}
signRealisation(newRealisation);
worker.store.registerDrvOutput(newRealisation);
}
@ -1379,7 +1402,10 @@ std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDeri
res.insert_or_assign(name, output.path(worker.store, drv->name, name));
return res;
} else {
return worker.store.queryPartialDerivationOutputMap(drvPath);
for (auto * drvStore : { &worker.evalStore, &worker.store })
if (drvStore->isValidPath(drvPath))
return worker.store.queryPartialDerivationOutputMap(drvPath, drvStore);
assert(false);
}
}
@ -1392,7 +1418,10 @@ OutputPathMap DerivationGoal::queryDerivationOutputMap()
res.insert_or_assign(name, *output.second);
return res;
} else {
return worker.store.queryDerivationOutputMap(drvPath);
for (auto * drvStore : { &worker.evalStore, &worker.store })
if (drvStore->isValidPath(drvPath))
return worker.store.queryDerivationOutputMap(drvPath, drvStore);
assert(false);
}
}

View file

@ -1066,8 +1066,8 @@ void LocalDerivationGoal::initTmpDir() {
if (passAsFile.find(i.first) == passAsFile.end()) {
env[i.first] = i.second;
} else {
auto hash = hashString(htSHA256, i.first);
std::string fn = ".attr-" + hash.to_string(HashFormat::Base32, false);
auto hash = hashString(HashAlgorithm::SHA256, i.first);
std::string fn = ".attr-" + hash.to_string(HashFormat::Nix32, false);
Path p = tmpDir + "/" + fn;
writeFile(p, rewriteStrings(i.second, inputRewrites));
chownToBuilder(p);
@ -1290,13 +1290,13 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
{ throw Error("queryPathFromHashPart"); }
StorePath addToStore(
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashType hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references) override
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashAlgorithm hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references) override
{ throw Error("addToStore"); }
void addToStore(const ValidPathInfo & info, Source & narSource,
@ -1318,12 +1318,12 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
}
StorePath addToStoreFromDump(
Source & dump,
std::string_view name,
FileIngestionMethod method,
HashType hashAlgo,
RepairFlag repair,
const StorePathSet & references) override
Source & dump,
std::string_view name,
FileIngestionMethod method,
HashAlgorithm hashAlgo,
RepairFlag repair,
const StorePathSet & references) override
{
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair, references);
goal.addDependency(path);
@ -2466,7 +2466,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
rewriteOutput(outputRewrites);
/* FIXME optimize and deduplicate with addToStore */
std::string oldHashPart { scratchPath->hashPart() };
HashModuloSink caSink { outputHash.hashType, oldHashPart };
HashModuloSink caSink {outputHash.hashAlgo, oldHashPart };
std::visit(overloaded {
[&](const TextIngestionMethod &) {
readFile(actualPath, caSink);
@ -2511,7 +2511,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
std::string(newInfo0.path.hashPart())}});
}
HashResult narHashAndSize = hashPath(htSHA256, actualPath);
HashResult narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath);
newInfo0.narHash = narHashAndSize.first;
newInfo0.narSize = narHashAndSize.second;
@ -2531,7 +2531,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
std::string { scratchPath->hashPart() },
std::string { requiredFinalPath.hashPart() });
rewriteOutput(outputRewrites);
auto narHashAndSize = hashPath(htSHA256, actualPath);
auto narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath);
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
newInfo0.narSize = narHashAndSize.second;
auto refs = rewriteRefs();
@ -2546,7 +2546,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating {
.method = dof.ca.method,
.hashType = wanted.type,
.hashAlgo = wanted.algo,
});
/* Check wanted hash */
@ -2583,7 +2583,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
[&](const DerivationOutput::Impure & doi) {
return newInfoFromCA(DerivationOutput::CAFloating {
.method = doi.method,
.hashType = doi.hashType,
.hashAlgo = doi.hashAlgo,
});
},
@ -2945,7 +2945,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(OutputNameView outputName)
{
return worker.store.makeStorePath(
"rewrite:" + std::string(drvPath.to_string()) + ":name:" + std::string(outputName),
Hash(htSHA256), outputPathName(drv->name, outputName));
Hash(HashAlgorithm::SHA256), outputPathName(drv->name, outputName));
}
@ -2953,7 +2953,7 @@ StorePath LocalDerivationGoal::makeFallbackPath(const StorePath & path)
{
return worker.store.makeStorePath(
"rewrite:" + std::string(drvPath.to_string()) + ":" + std::string(path.to_string()),
Hash(htSHA256), path.name());
Hash(HashAlgorithm::SHA256), path.name());
}

View file

@ -519,8 +519,8 @@ bool Worker::pathContentsGood(const StorePath & path)
if (!pathExists(store.printStorePath(path)))
res = false;
else {
HashResult current = hashPath(info->narHash.type, store.printStorePath(path));
Hash nullHash(htSHA256);
HashResult current = hashPath(info->narHash.algo, store.printStorePath(path));
Hash nullHash(HashAlgorithm::SHA256);
res = info->narHash == nullHash || info->narHash == current.first;
}
pathContentsGoodCache.insert_or_assign(path, res);

View file

@ -1,4 +1,5 @@
#include "buildenv.hh"
#include "derivations.hh"
#include <sys/stat.h>
#include <sys/types.h>

View file

@ -1,7 +1,6 @@
#pragma once
///@file
#include "derivations.hh"
#include "store-api.hh"
namespace nix {

View file

@ -63,9 +63,9 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
for (auto hashedMirror : settings.hashedMirrors.get())
try {
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
std::optional<HashType> ht = parseHashTypeOpt(getAttr("outputHashAlgo"));
std::optional<HashAlgorithm> ht = parseHashAlgoOpt(getAttr("outputHashAlgo"));
Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
fetch(hashedMirror + printHashType(h.type) + "/" + h.to_string(HashFormat::Base16, false));
fetch(hashedMirror + printHashAlgo(h.algo) + "/" + h.to_string(HashFormat::Base16, false));
return;
} catch (Error & e) {
debug(e.what());

View file

@ -16,11 +16,11 @@ namespace nix {
/* protocol-agnostic templates */
#define COMMON_USE_LENGTH_PREFIX_SERIALISER(TEMPLATE, T) \
TEMPLATE T CommonProto::Serialise< T >::read(const Store & store, CommonProto::ReadConn conn) \
TEMPLATE T CommonProto::Serialise< T >::read(const StoreDirConfig & store, CommonProto::ReadConn conn) \
{ \
return LengthPrefixedProtoHelper<CommonProto, T >::read(store, conn); \
} \
TEMPLATE void CommonProto::Serialise< T >::write(const Store & store, CommonProto::WriteConn conn, const T & t) \
TEMPLATE void CommonProto::Serialise< T >::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const T & t) \
{ \
LengthPrefixedProtoHelper<CommonProto, T >::write(store, conn, t); \
}

View file

@ -13,40 +13,40 @@ namespace nix {
/* protocol-agnostic definitions */
std::string CommonProto::Serialise<std::string>::read(const Store & store, CommonProto::ReadConn conn)
std::string CommonProto::Serialise<std::string>::read(const StoreDirConfig & store, CommonProto::ReadConn conn)
{
return readString(conn.from);
}
void CommonProto::Serialise<std::string>::write(const Store & store, CommonProto::WriteConn conn, const std::string & str)
void CommonProto::Serialise<std::string>::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const std::string & str)
{
conn.to << str;
}
StorePath CommonProto::Serialise<StorePath>::read(const Store & store, CommonProto::ReadConn conn)
StorePath CommonProto::Serialise<StorePath>::read(const StoreDirConfig & store, CommonProto::ReadConn conn)
{
return store.parseStorePath(readString(conn.from));
}
void CommonProto::Serialise<StorePath>::write(const Store & store, CommonProto::WriteConn conn, const StorePath & storePath)
void CommonProto::Serialise<StorePath>::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const StorePath & storePath)
{
conn.to << store.printStorePath(storePath);
}
ContentAddress CommonProto::Serialise<ContentAddress>::read(const Store & store, CommonProto::ReadConn conn)
ContentAddress CommonProto::Serialise<ContentAddress>::read(const StoreDirConfig & store, CommonProto::ReadConn conn)
{
return ContentAddress::parse(readString(conn.from));
}
void CommonProto::Serialise<ContentAddress>::write(const Store & store, CommonProto::WriteConn conn, const ContentAddress & ca)
void CommonProto::Serialise<ContentAddress>::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const ContentAddress & ca)
{
conn.to << renderContentAddress(ca);
}
Realisation CommonProto::Serialise<Realisation>::read(const Store & store, CommonProto::ReadConn conn)
Realisation CommonProto::Serialise<Realisation>::read(const StoreDirConfig & store, CommonProto::ReadConn conn)
{
std::string rawInput = readString(conn.from);
return Realisation::fromJSON(
@ -55,41 +55,41 @@ Realisation CommonProto::Serialise<Realisation>::read(const Store & store, Commo
);
}
void CommonProto::Serialise<Realisation>::write(const Store & store, CommonProto::WriteConn conn, const Realisation & realisation)
void CommonProto::Serialise<Realisation>::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const Realisation & realisation)
{
conn.to << realisation.toJSON().dump();
}
DrvOutput CommonProto::Serialise<DrvOutput>::read(const Store & store, CommonProto::ReadConn conn)
DrvOutput CommonProto::Serialise<DrvOutput>::read(const StoreDirConfig & store, CommonProto::ReadConn conn)
{
return DrvOutput::parse(readString(conn.from));
}
void CommonProto::Serialise<DrvOutput>::write(const Store & store, CommonProto::WriteConn conn, const DrvOutput & drvOutput)
void CommonProto::Serialise<DrvOutput>::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const DrvOutput & drvOutput)
{
conn.to << drvOutput.to_string();
}
std::optional<StorePath> CommonProto::Serialise<std::optional<StorePath>>::read(const Store & store, CommonProto::ReadConn conn)
std::optional<StorePath> CommonProto::Serialise<std::optional<StorePath>>::read(const StoreDirConfig & store, CommonProto::ReadConn conn)
{
auto s = readString(conn.from);
return s == "" ? std::optional<StorePath> {} : store.parseStorePath(s);
}
void CommonProto::Serialise<std::optional<StorePath>>::write(const Store & store, CommonProto::WriteConn conn, const std::optional<StorePath> & storePathOpt)
void CommonProto::Serialise<std::optional<StorePath>>::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const std::optional<StorePath> & storePathOpt)
{
conn.to << (storePathOpt ? store.printStorePath(*storePathOpt) : "");
}
std::optional<ContentAddress> CommonProto::Serialise<std::optional<ContentAddress>>::read(const Store & store, CommonProto::ReadConn conn)
std::optional<ContentAddress> CommonProto::Serialise<std::optional<ContentAddress>>::read(const StoreDirConfig & store, CommonProto::ReadConn conn)
{
return ContentAddress::parseOpt(readString(conn.from));
}
void CommonProto::Serialise<std::optional<ContentAddress>>::write(const Store & store, CommonProto::WriteConn conn, const std::optional<ContentAddress> & caOpt)
void CommonProto::Serialise<std::optional<ContentAddress>>::write(const StoreDirConfig & store, CommonProto::WriteConn conn, const std::optional<ContentAddress> & caOpt)
{
conn.to << (caOpt ? renderContentAddress(*caOpt) : "");
}

View file

@ -5,7 +5,7 @@
namespace nix {
class Store;
struct StoreDirConfig;
struct Source;
// items being serialized
@ -48,7 +48,7 @@ struct CommonProto
* infer the type instead of having to write it down explicitly.
*/
template<typename T>
static void write(const Store & store, WriteConn conn, const T & t)
static void write(const StoreDirConfig & store, WriteConn conn, const T & t)
{
CommonProto::Serialise<T>::write(store, conn, t);
}
@ -57,8 +57,8 @@ struct CommonProto
#define DECLARE_COMMON_SERIALISER(T) \
struct CommonProto::Serialise< T > \
{ \
static T read(const Store & store, CommonProto::ReadConn conn); \
static void write(const Store & store, CommonProto::WriteConn conn, const T & str); \
static T read(const StoreDirConfig & store, CommonProto::ReadConn conn); \
static void write(const StoreDirConfig & store, CommonProto::WriteConn conn, const T & str); \
}
template<>

View file

@ -38,14 +38,14 @@ ContentAddressMethod ContentAddressMethod::parsePrefix(std::string_view & m)
return FileIngestionMethod::Flat;
}
std::string ContentAddressMethod::render(HashType ht) const
std::string ContentAddressMethod::render(HashAlgorithm ha) const
{
return std::visit(overloaded {
[&](const TextIngestionMethod & th) {
return std::string{"text:"} + printHashType(ht);
return std::string{"text:"} + printHashAlgo(ha);
},
[&](const FileIngestionMethod & fim) {
return "fixed:" + makeFileIngestionPrefix(fim) + printHashType(ht);
return "fixed:" + makeFileIngestionPrefix(fim) + printHashAlgo(ha);
}
}, raw);
}
@ -61,13 +61,13 @@ std::string ContentAddress::render() const
+ makeFileIngestionPrefix(method);
},
}, method.raw)
+ this->hash.to_string(HashFormat::Base32, true);
+ this->hash.to_string(HashFormat::Nix32, true);
}
/**
* Parses content address strings up to the hash.
*/
static std::pair<ContentAddressMethod, HashType> parseContentAddressMethodPrefix(std::string_view & rest)
static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodPrefix(std::string_view & rest)
{
std::string_view wholeInput { rest };
@ -83,27 +83,27 @@ static std::pair<ContentAddressMethod, HashType> parseContentAddressMethodPrefix
auto hashTypeRaw = splitPrefixTo(rest, ':');
if (!hashTypeRaw)
throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", wholeInput);
HashType hashType = parseHashType(*hashTypeRaw);
return hashType;
HashAlgorithm hashAlgo = parseHashAlgo(*hashTypeRaw);
return hashAlgo;
};
// Switch on prefix
if (prefix == "text") {
// No parsing of the ingestion method, "text" only support flat.
HashType hashType = parseHashType_();
HashAlgorithm hashAlgo = parseHashType_();
return {
TextIngestionMethod {},
std::move(hashType),
std::move(hashAlgo),
};
} else if (prefix == "fixed") {
// Parse method
auto method = FileIngestionMethod::Flat;
if (splitPrefix(rest, "r:"))
method = FileIngestionMethod::Recursive;
HashType hashType = parseHashType_();
HashAlgorithm hashAlgo = parseHashType_();
return {
std::move(method),
std::move(hashType),
std::move(hashAlgo),
};
} else
throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix);
@ -113,15 +113,15 @@ ContentAddress ContentAddress::parse(std::string_view rawCa)
{
auto rest = rawCa;
auto [caMethod, hashType] = parseContentAddressMethodPrefix(rest);
auto [caMethod, hashAlgo] = parseContentAddressMethodPrefix(rest);
return ContentAddress {
.method = std::move(caMethod),
.hash = Hash::parseNonSRIUnprefixed(rest, hashType),
.hash = Hash::parseNonSRIUnprefixed(rest, hashAlgo),
};
}
std::pair<ContentAddressMethod, HashType> ContentAddressMethod::parse(std::string_view caMethod)
std::pair<ContentAddressMethod, HashAlgorithm> ContentAddressMethod::parse(std::string_view caMethod)
{
std::string asPrefix = std::string{caMethod} + ":";
// parseContentAddressMethodPrefix takes its argument by reference
@ -144,7 +144,7 @@ std::string renderContentAddress(std::optional<ContentAddress> ca)
std::string ContentAddress::printMethodAlgo() const
{
return method.renderPrefix()
+ printHashType(hash.type);
+ printHashAlgo(hash.algo);
}
bool StoreReferences::empty() const

View file

@ -94,7 +94,7 @@ struct ContentAddressMethod
/**
* Parse a content addressing method and hash type.
*/
static std::pair<ContentAddressMethod, HashType> parse(std::string_view rawCaMethod);
static std::pair<ContentAddressMethod, HashAlgorithm> parse(std::string_view rawCaMethod);
/**
* Render a content addressing method and hash type in a
@ -102,7 +102,7 @@ struct ContentAddressMethod
*
* The rough inverse of `parse()`.
*/
std::string render(HashType ht) const;
std::string render(HashAlgorithm ha) const;
};

View file

@ -400,22 +400,22 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork();
auto pathInfo = [&]() {
// NB: FramedSource must be out of scope before logger->stopWork();
auto [contentAddressMethod, hashType_] = ContentAddressMethod::parse(camStr);
auto hashType = hashType_; // work around clang bug
auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parse(camStr);
auto hashAlgo = hashAlgo_; // work around clang bug
FramedSource source(from);
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
return std::visit(overloaded {
[&](const TextIngestionMethod &) {
if (hashType != htSHA256)
if (hashAlgo != HashAlgorithm::SHA256)
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
name, printHashType(hashType));
name, printHashAlgo(hashAlgo));
// We could stream this by changing Store
std::string contents = source.drain();
auto path = store->addTextToStore(name, contents, refs, repair);
return store->queryPathInfo(path);
},
[&](const FileIngestionMethod & fim) {
auto path = store->addToStoreFromDump(source, name, fim, hashType, repair, refs);
auto path = store->addToStoreFromDump(source, name, fim, hashAlgo, repair, refs);
return store->queryPathInfo(path);
},
}, contentAddressMethod.raw);
@ -424,7 +424,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
WorkerProto::Serialise<ValidPathInfo>::write(*store, wconn, *pathInfo);
} else {
HashType hashAlgo;
HashAlgorithm hashAlgo;
std::string baseName;
FileIngestionMethod method;
{
@ -440,7 +440,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
hashAlgoRaw = "sha256";
method = FileIngestionMethod::Recursive;
}
hashAlgo = parseHashType(hashAlgoRaw);
hashAlgo = parseHashAlgo(hashAlgoRaw);
}
auto dumpSource = sinkToSource([&](Sink & saved) {
@ -574,6 +574,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
case WorkerProto::Op::BuildDerivation: {
auto drvPath = store->parseStorePath(readString(from));
BasicDerivation drv;
/*
* Note: unlike wopEnsurePath, this operation reads a
* derivation-to-be-realized from the client with
* readDerivation(Source,Store) rather than reading it from
* the local store with Store::readDerivation(). Since the
* derivation-to-be-realized is not registered in the store
* it cannot be trusted that its outPath was calculated
* correctly.
*/
readDerivation(from, *store, drv, Derivation::nameFromPath(drvPath));
BuildMode buildMode = (BuildMode) readInt(from);
logger->startWork();
@ -883,7 +892,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
bool repair, dontCheckSigs;
auto path = store->parseStorePath(readString(from));
auto deriver = readString(from);
auto narHash = Hash::parseAny(readString(from), htSHA256);
auto narHash = Hash::parseAny(readString(from), HashAlgorithm::SHA256);
ValidPathInfo info { path, narHash };
if (deriver != "")
info.deriver = store->parseStorePath(deriver);

View file

@ -11,7 +11,7 @@
namespace nix {
std::optional<StorePath> DerivationOutput::path(const Store & store, std::string_view drvName, OutputNameView outputName) const
std::optional<StorePath> DerivationOutput::path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const
{
return std::visit(overloaded {
[](const DerivationOutput::InputAddressed & doi) -> std::optional<StorePath> {
@ -35,7 +35,7 @@ std::optional<StorePath> DerivationOutput::path(const Store & store, std::string
}
StorePath DerivationOutput::CAFixed::path(const Store & store, std::string_view drvName, OutputNameView outputName) const
StorePath DerivationOutput::CAFixed::path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const
{
return store.makeFixedOutputPathFromCA(
outputPathName(drvName, outputName),
@ -214,26 +214,26 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
static DerivationOutput parseDerivationOutput(
const Store & store,
std::string_view pathS, std::string_view hashAlgo, std::string_view hashS,
const StoreDirConfig & store,
std::string_view pathS, std::string_view hashAlgoStr, std::string_view hashS,
const ExperimentalFeatureSettings & xpSettings)
{
if (hashAlgo != "") {
ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgo);
if (hashAlgoStr != "") {
ContentAddressMethod method = ContentAddressMethod::parsePrefix(hashAlgoStr);
if (method == TextIngestionMethod {})
xpSettings.require(Xp::DynamicDerivations);
const auto hashType = parseHashType(hashAlgo);
const auto hashAlgo = parseHashAlgo(hashAlgoStr);
if (hashS == "impure") {
xpSettings.require(Xp::ImpureDerivations);
if (pathS != "")
throw FormatError("impure derivation output should not specify output path");
return DerivationOutput::Impure {
.method = std::move(method),
.hashType = std::move(hashType),
.hashAlgo = std::move(hashAlgo),
};
} else if (hashS != "") {
validatePath(pathS);
auto hash = Hash::parseNonSRIUnprefixed(hashS, hashType);
auto hash = Hash::parseNonSRIUnprefixed(hashS, hashAlgo);
return DerivationOutput::CAFixed {
.ca = ContentAddress {
.method = std::move(method),
@ -246,7 +246,7 @@ static DerivationOutput parseDerivationOutput(
throw FormatError("content-addressed derivation output should not specify output path");
return DerivationOutput::CAFloating {
.method = std::move(method),
.hashType = std::move(hashType),
.hashAlgo = std::move(hashAlgo),
};
}
} else {
@ -261,7 +261,7 @@ static DerivationOutput parseDerivationOutput(
}
static DerivationOutput parseDerivationOutput(
const Store & store, std::istringstream & str,
const StoreDirConfig & store, std::istringstream & str,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings)
{
expect(str, ","); const auto pathS = parseString(str);
@ -290,7 +290,7 @@ enum struct DerivationATermVersion {
};
static DerivedPathMap<StringSet>::ChildNode parseDerivedPathMapNode(
const Store & store,
const StoreDirConfig & store,
std::istringstream & str,
DerivationATermVersion version)
{
@ -337,7 +337,7 @@ static DerivedPathMap<StringSet>::ChildNode parseDerivedPathMapNode(
Derivation parseDerivation(
const Store & store, std::string && s, std::string_view name,
const StoreDirConfig & store, std::string && s, std::string_view name,
const ExperimentalFeatureSettings & xpSettings)
{
Derivation drv;
@ -470,7 +470,7 @@ static void printUnquotedStrings(std::string & res, ForwardIterator i, ForwardIt
}
static void unparseDerivedPathMapNode(const Store & store, std::string & s, const DerivedPathMap<StringSet>::ChildNode & node)
static void unparseDerivedPathMapNode(const StoreDirConfig & store, std::string & s, const DerivedPathMap<StringSet>::ChildNode & node)
{
s += ',';
if (node.childMap.empty()) {
@ -511,7 +511,7 @@ static bool hasDynamicDrvDep(const Derivation & drv)
}
std::string Derivation::unparse(const Store & store, bool maskOutputs,
std::string Derivation::unparse(const StoreDirConfig & store, bool maskOutputs,
DerivedPathMap<StringSet>::ChildNode::Map * actualInputs) const
{
std::string s;
@ -547,7 +547,7 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs,
},
[&](const DerivationOutput::CAFloating & dof) {
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashType(dof.hashType));
s += ','; printUnquotedString(s, dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo));
s += ','; printUnquotedString(s, "");
},
[&](const DerivationOutput::Deferred &) {
@ -558,7 +558,7 @@ std::string Derivation::unparse(const Store & store, bool maskOutputs,
[&](const DerivationOutput::Impure & doi) {
// FIXME
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashType(doi.hashType));
s += ','; printUnquotedString(s, doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo));
s += ','; printUnquotedString(s, "impure");
}
}, i.second.raw);
@ -631,7 +631,7 @@ DerivationType BasicDerivation::type() const
floatingCAOutputs,
deferredIAOutputs,
impureOutputs;
std::optional<HashType> floatingHashType;
std::optional<HashAlgorithm> floatingHashAlgo;
for (auto & i : outputs) {
std::visit(overloaded {
@ -643,10 +643,10 @@ DerivationType BasicDerivation::type() const
},
[&](const DerivationOutput::CAFloating & dof) {
floatingCAOutputs.insert(i.first);
if (!floatingHashType) {
floatingHashType = dof.hashType;
if (!floatingHashAlgo) {
floatingHashAlgo = dof.hashAlgo;
} else {
if (*floatingHashType != dof.hashType)
if (*floatingHashAlgo != dof.hashAlgo)
throw Error("all floating outputs must use the same hash type");
}
},
@ -774,7 +774,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
std::map<std::string, Hash> outputHashes;
for (const auto & i : drv.outputs) {
auto & dof = std::get<DerivationOutput::CAFixed>(i.second.raw);
auto hash = hashString(htSHA256, "fixed:out:"
auto hash = hashString(HashAlgorithm::SHA256, "fixed:out:"
+ dof.ca.printMethodAlgo() + ":"
+ dof.ca.hash.to_string(HashFormat::Base16, false) + ":"
+ store.printStorePath(dof.path(store, drv.name, i.first)));
@ -825,7 +825,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
}
}
auto hash = hashString(htSHA256, drv.unparse(store, maskOutputs, &inputs2));
auto hash = hashString(HashAlgorithm::SHA256, drv.unparse(store, maskOutputs, &inputs2));
std::map<std::string, Hash> outputHashes;
for (const auto & [outputName, _] : drv.outputs) {
@ -845,7 +845,7 @@ std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation &
}
static DerivationOutput readDerivationOutput(Source & in, const Store & store)
static DerivationOutput readDerivationOutput(Source & in, const StoreDirConfig & store)
{
const auto pathS = readString(in);
const auto hashAlgo = readString(in);
@ -862,7 +862,7 @@ StringSet BasicDerivation::outputNames() const
return names;
}
DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const
DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const StoreDirConfig & store) const
{
DerivationOutputsAndOptPaths outsAndOptPaths;
for (auto & [outputName, output] : outputs)
@ -884,7 +884,7 @@ std::string_view BasicDerivation::nameFromPath(const StorePath & drvPath)
}
Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, std::string_view name)
Source & readDerivation(Source & in, const StoreDirConfig & store, BasicDerivation & drv, std::string_view name)
{
drv.name = name;
@ -912,7 +912,7 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv,
}
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDerivation & drv)
{
out << drv.outputs.size();
for (auto & i : drv.outputs) {
@ -930,7 +930,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
},
[&](const DerivationOutput::CAFloating & dof) {
out << ""
<< (dof.method.renderPrefix() + printHashType(dof.hashType))
<< (dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo))
<< "";
},
[&](const DerivationOutput::Deferred &) {
@ -940,7 +940,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
},
[&](const DerivationOutput::Impure & doi) {
out << ""
<< (doi.method.renderPrefix() + printHashType(doi.hashType))
<< (doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo))
<< "impure";
},
}, i.second.raw);
@ -958,7 +958,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
std::string hashPlaceholder(const OutputNameView outputName)
{
// FIXME: memoize?
return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Base32, false);
return "/" + hashString(HashAlgorithm::SHA256, concatStrings("nix-output:", outputName)).to_string(HashFormat::Nix32, false);
}
@ -1002,13 +1002,13 @@ static void rewriteDerivation(Store & store, BasicDerivation & drv, const String
}
std::optional<BasicDerivation> Derivation::tryResolve(Store & store) const
std::optional<BasicDerivation> Derivation::tryResolve(Store & store, Store * evalStore) const
{
std::map<std::pair<StorePath, std::string>, StorePath> inputDrvOutputs;
std::function<void(const StorePath &, const DerivedPathMap<StringSet>::ChildNode &)> accum;
accum = [&](auto & inputDrv, auto & node) {
for (auto & [outputName, outputPath] : store.queryPartialDerivationOutputMap(inputDrv)) {
for (auto & [outputName, outputPath] : store.queryPartialDerivationOutputMap(inputDrv, evalStore)) {
if (outputPath) {
inputDrvOutputs.insert_or_assign({inputDrv, outputName}, *outputPath);
if (auto p = get(node.childMap, outputName))
@ -1150,10 +1150,10 @@ void Derivation::checkInvariants(Store & store, const StorePath & drvPath) const
}
const Hash impureOutputHash = hashString(htSHA256, "impure");
const Hash impureOutputHash = hashString(HashAlgorithm::SHA256, "impure");
nlohmann::json DerivationOutput::toJSON(
const Store & store, std::string_view drvName, OutputNameView outputName) const
const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const
{
nlohmann::json res = nlohmann::json::object();
std::visit(overloaded {
@ -1167,11 +1167,11 @@ nlohmann::json DerivationOutput::toJSON(
// FIXME print refs?
},
[&](const DerivationOutput::CAFloating & dof) {
res["hashAlgo"] = dof.method.renderPrefix() + printHashType(dof.hashType);
res["hashAlgo"] = dof.method.renderPrefix() + printHashAlgo(dof.hashAlgo);
},
[&](const DerivationOutput::Deferred &) {},
[&](const DerivationOutput::Impure & doi) {
res["hashAlgo"] = doi.method.renderPrefix() + printHashType(doi.hashType);
res["hashAlgo"] = doi.method.renderPrefix() + printHashAlgo(doi.hashAlgo);
res["impure"] = true;
},
}, raw);
@ -1180,7 +1180,7 @@ nlohmann::json DerivationOutput::toJSON(
DerivationOutput DerivationOutput::fromJSON(
const Store & store, std::string_view drvName, OutputNameView outputName,
const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName,
const nlohmann::json & _json,
const ExperimentalFeatureSettings & xpSettings)
{
@ -1191,15 +1191,15 @@ DerivationOutput DerivationOutput::fromJSON(
for (const auto & [key, _] : json)
keys.insert(key);
auto methodAlgo = [&]() -> std::pair<ContentAddressMethod, HashType> {
std::string hashAlgo = json["hashAlgo"];
auto methodAlgo = [&]() -> std::pair<ContentAddressMethod, HashAlgorithm> {
std::string hashAlgoStr = json["hashAlgo"];
// remaining to parse, will be mutated by parsers
std::string_view s = hashAlgo;
std::string_view s = hashAlgoStr;
ContentAddressMethod method = ContentAddressMethod::parsePrefix(s);
if (method == TextIngestionMethod {})
xpSettings.require(Xp::DynamicDerivations);
auto hashType = parseHashType(s);
return { std::move(method), std::move(hashType) };
auto hashAlgo = parseHashAlgo(s);
return { std::move(method), std::move(hashAlgo) };
};
if (keys == (std::set<std::string_view> { "path" })) {
@ -1209,11 +1209,11 @@ DerivationOutput DerivationOutput::fromJSON(
}
else if (keys == (std::set<std::string_view> { "path", "hashAlgo", "hash" })) {
auto [method, hashType] = methodAlgo();
auto [method, hashAlgo] = methodAlgo();
auto dof = DerivationOutput::CAFixed {
.ca = ContentAddress {
.method = std::move(method),
.hash = Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashType),
.hash = Hash::parseNonSRIUnprefixed((std::string) json["hash"], hashAlgo),
},
};
if (dof.path(store, drvName, outputName) != store.parseStorePath((std::string) json["path"]))
@ -1223,10 +1223,10 @@ DerivationOutput DerivationOutput::fromJSON(
else if (keys == (std::set<std::string_view> { "hashAlgo" })) {
xpSettings.require(Xp::CaDerivations);
auto [method, hashType] = methodAlgo();
auto [method, hashAlgo] = methodAlgo();
return DerivationOutput::CAFloating {
.method = std::move(method),
.hashType = std::move(hashType),
.hashAlgo = std::move(hashAlgo),
};
}
@ -1236,10 +1236,10 @@ DerivationOutput DerivationOutput::fromJSON(
else if (keys == (std::set<std::string_view> { "hashAlgo", "impure" })) {
xpSettings.require(Xp::ImpureDerivations);
auto [method, hashType] = methodAlgo();
auto [method, hashAlgo] = methodAlgo();
return DerivationOutput::Impure {
.method = std::move(method),
.hashType = hashType,
.hashAlgo = hashAlgo,
};
}
@ -1249,7 +1249,7 @@ DerivationOutput DerivationOutput::fromJSON(
}
nlohmann::json Derivation::toJSON(const Store & store) const
nlohmann::json Derivation::toJSON(const StoreDirConfig & store) const
{
nlohmann::json res = nlohmann::json::object();
@ -1302,7 +1302,7 @@ nlohmann::json Derivation::toJSON(const Store & store) const
Derivation Derivation::fromJSON(
const Store & store,
const StoreDirConfig & store,
const nlohmann::json & json,
const ExperimentalFeatureSettings & xpSettings)
{

View file

@ -17,7 +17,7 @@
namespace nix {
class Store;
struct StoreDirConfig;
/* Abstract syntax of derivations. */
@ -55,7 +55,7 @@ struct DerivationOutput
* @param drvName The name of the derivation this is an output of, without the `.drv`.
* @param outputName The name of this output.
*/
StorePath path(const Store & store, std::string_view drvName, OutputNameView outputName) const;
StorePath path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const;
GENERATE_CMP(CAFixed, me->ca);
};
@ -75,9 +75,9 @@ struct DerivationOutput
/**
* How the serialization will be hashed
*/
HashType hashType;
HashAlgorithm hashAlgo;
GENERATE_CMP(CAFloating, me->method, me->hashType);
GENERATE_CMP(CAFloating, me->method, me->hashAlgo);
};
/**
@ -102,9 +102,9 @@ struct DerivationOutput
/**
* How the serialization will be hashed
*/
HashType hashType;
HashAlgorithm hashAlgo;
GENERATE_CMP(Impure, me->method, me->hashType);
GENERATE_CMP(Impure, me->method, me->hashAlgo);
};
typedef std::variant<
@ -132,17 +132,17 @@ struct DerivationOutput
* the safer interface provided by
* BasicDerivation::outputsAndOptPaths
*/
std::optional<StorePath> path(const Store & store, std::string_view drvName, OutputNameView outputName) const;
std::optional<StorePath> path(const StoreDirConfig & store, std::string_view drvName, OutputNameView outputName) const;
nlohmann::json toJSON(
const Store & store,
const StoreDirConfig & store,
std::string_view drvName,
OutputNameView outputName) const;
/**
* @param xpSettings Stop-gap to avoid globals during unit tests.
*/
static DerivationOutput fromJSON(
const Store & store,
const StoreDirConfig & store,
std::string_view drvName,
OutputNameView outputName,
const nlohmann::json & json,
@ -304,7 +304,7 @@ struct BasicDerivation
* augmented with knowledge of the Store paths they would be written
* into.
*/
DerivationOutputsAndOptPaths outputsAndOptPaths(const Store & store) const;
DerivationOutputsAndOptPaths outputsAndOptPaths(const StoreDirConfig & store) const;
static std::string_view nameFromPath(const StorePath & storePath);
@ -318,6 +318,8 @@ struct BasicDerivation
me->name);
};
class Store;
struct Derivation : BasicDerivation
{
/**
@ -328,7 +330,7 @@ struct Derivation : BasicDerivation
/**
* Print a derivation.
*/
std::string unparse(const Store & store, bool maskOutputs,
std::string unparse(const StoreDirConfig & store, bool maskOutputs,
DerivedPathMap<StringSet>::ChildNode::Map * actualInputs = nullptr) const;
/**
@ -340,7 +342,7 @@ struct Derivation : BasicDerivation
* 2. Input placeholders are replaced with realized input store
* paths.
*/
std::optional<BasicDerivation> tryResolve(Store & store) const;
std::optional<BasicDerivation> tryResolve(Store & store, Store * evalStore = nullptr) const;
/**
* Like the above, but instead of querying the Nix database for
@ -365,9 +367,9 @@ struct Derivation : BasicDerivation
Derivation(const BasicDerivation & bd) : BasicDerivation(bd) { }
Derivation(BasicDerivation && bd) : BasicDerivation(std::move(bd)) { }
nlohmann::json toJSON(const Store & store) const;
nlohmann::json toJSON(const StoreDirConfig & store) const;
static Derivation fromJSON(
const Store & store,
const StoreDirConfig & store,
const nlohmann::json & json,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
@ -391,7 +393,7 @@ StorePath writeDerivation(Store & store,
* Read a derivation from a file.
*/
Derivation parseDerivation(
const Store & store,
const StoreDirConfig & store,
std::string && s,
std::string_view name,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
@ -493,8 +495,8 @@ extern Sync<DrvHashes> drvHashes;
struct Source;
struct Sink;
Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, std::string_view name);
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv);
Source & readDerivation(Source & in, const StoreDirConfig & store, BasicDerivation & drv, std::string_view name);
void writeDerivation(Sink & out, const StoreDirConfig & store, const BasicDerivation & drv);
/**
* This creates an opaque and almost certainly unique string

View file

@ -1,4 +1,5 @@
#include "derived-path.hh"
#include "derivations.hh"
#include "store-api.hh"
#include <nlohmann/json.hpp>
@ -32,7 +33,7 @@ CMP(SingleDerivedPath, DerivedPathBuilt, outputs)
#undef CMP
#undef CMP_ONE
nlohmann::json DerivedPath::Opaque::toJSON(const Store & store) const
nlohmann::json DerivedPath::Opaque::toJSON(const StoreDirConfig & store) const
{
return store.printStorePath(path);
}
@ -86,50 +87,50 @@ nlohmann::json DerivedPath::toJSON(Store & store) const
}, raw());
}
std::string DerivedPath::Opaque::to_string(const Store & store) const
std::string DerivedPath::Opaque::to_string(const StoreDirConfig & store) const
{
return store.printStorePath(path);
}
std::string SingleDerivedPath::Built::to_string(const Store & store) const
std::string SingleDerivedPath::Built::to_string(const StoreDirConfig & store) const
{
return drvPath->to_string(store) + "^" + output;
}
std::string SingleDerivedPath::Built::to_string_legacy(const Store & store) const
std::string SingleDerivedPath::Built::to_string_legacy(const StoreDirConfig & store) const
{
return drvPath->to_string(store) + "!" + output;
}
std::string DerivedPath::Built::to_string(const Store & store) const
std::string DerivedPath::Built::to_string(const StoreDirConfig & store) const
{
return drvPath->to_string(store)
+ '^'
+ outputs.to_string();
}
std::string DerivedPath::Built::to_string_legacy(const Store & store) const
std::string DerivedPath::Built::to_string_legacy(const StoreDirConfig & store) const
{
return drvPath->to_string_legacy(store)
+ "!"
+ outputs.to_string();
}
std::string SingleDerivedPath::to_string(const Store & store) const
std::string SingleDerivedPath::to_string(const StoreDirConfig & store) const
{
return std::visit(
[&](const auto & req) { return req.to_string(store); },
raw());
}
std::string DerivedPath::to_string(const Store & store) const
std::string DerivedPath::to_string(const StoreDirConfig & store) const
{
return std::visit(
[&](const auto & req) { return req.to_string(store); },
raw());
}
std::string SingleDerivedPath::to_string_legacy(const Store & store) const
std::string SingleDerivedPath::to_string_legacy(const StoreDirConfig & store) const
{
return std::visit(overloaded {
[&](const SingleDerivedPath::Built & req) { return req.to_string_legacy(store); },
@ -137,7 +138,7 @@ std::string SingleDerivedPath::to_string_legacy(const Store & store) const
}, this->raw());
}
std::string DerivedPath::to_string_legacy(const Store & store) const
std::string DerivedPath::to_string_legacy(const StoreDirConfig & store) const
{
return std::visit(overloaded {
[&](const DerivedPath::Built & req) { return req.to_string_legacy(store); },
@ -146,7 +147,7 @@ std::string DerivedPath::to_string_legacy(const Store & store) const
}
DerivedPath::Opaque DerivedPath::Opaque::parse(const Store & store, std::string_view s)
DerivedPath::Opaque DerivedPath::Opaque::parse(const StoreDirConfig & store, std::string_view s)
{
return {store.parseStorePath(s)};
}
@ -166,7 +167,7 @@ void drvRequireExperiment(
}
SingleDerivedPath::Built SingleDerivedPath::Built::parse(
const Store & store, ref<SingleDerivedPath> drv,
const StoreDirConfig & store, ref<SingleDerivedPath> drv,
OutputNameView output,
const ExperimentalFeatureSettings & xpSettings)
{
@ -178,7 +179,7 @@ SingleDerivedPath::Built SingleDerivedPath::Built::parse(
}
DerivedPath::Built DerivedPath::Built::parse(
const Store & store, ref<SingleDerivedPath> drv,
const StoreDirConfig & store, ref<SingleDerivedPath> drv,
OutputNameView outputsS,
const ExperimentalFeatureSettings & xpSettings)
{
@ -190,7 +191,7 @@ DerivedPath::Built DerivedPath::Built::parse(
}
static SingleDerivedPath parseWithSingle(
const Store & store, std::string_view s, std::string_view separator,
const StoreDirConfig & store, std::string_view s, std::string_view separator,
const ExperimentalFeatureSettings & xpSettings)
{
size_t n = s.rfind(separator);
@ -207,7 +208,7 @@ static SingleDerivedPath parseWithSingle(
}
SingleDerivedPath SingleDerivedPath::parse(
const Store & store,
const StoreDirConfig & store,
std::string_view s,
const ExperimentalFeatureSettings & xpSettings)
{
@ -215,7 +216,7 @@ SingleDerivedPath SingleDerivedPath::parse(
}
SingleDerivedPath SingleDerivedPath::parseLegacy(
const Store & store,
const StoreDirConfig & store,
std::string_view s,
const ExperimentalFeatureSettings & xpSettings)
{
@ -223,7 +224,7 @@ SingleDerivedPath SingleDerivedPath::parseLegacy(
}
static DerivedPath parseWith(
const Store & store, std::string_view s, std::string_view separator,
const StoreDirConfig & store, std::string_view s, std::string_view separator,
const ExperimentalFeatureSettings & xpSettings)
{
size_t n = s.rfind(separator);
@ -240,7 +241,7 @@ static DerivedPath parseWith(
}
DerivedPath DerivedPath::parse(
const Store & store,
const StoreDirConfig & store,
std::string_view s,
const ExperimentalFeatureSettings & xpSettings)
{
@ -248,7 +249,7 @@ DerivedPath DerivedPath::parse(
}
DerivedPath DerivedPath::parseLegacy(
const Store & store,
const StoreDirConfig & store,
std::string_view s,
const ExperimentalFeatureSettings & xpSettings)
{

View file

@ -12,6 +12,9 @@
namespace nix {
struct StoreDirConfig;
// TODO stop needing this, `toJSON` below should be pure
class Store;
/**
@ -24,9 +27,9 @@ class Store;
struct DerivedPathOpaque {
StorePath path;
std::string to_string(const Store & store) const;
static DerivedPathOpaque parse(const Store & store, std::string_view);
nlohmann::json toJSON(const Store & store) const;
std::string to_string(const StoreDirConfig & store) const;
static DerivedPathOpaque parse(const StoreDirConfig & store, std::string_view);
nlohmann::json toJSON(const StoreDirConfig & store) const;
GENERATE_CMP(DerivedPathOpaque, me->path);
};
@ -59,18 +62,18 @@ struct SingleDerivedPathBuilt {
/**
* Uses `^` as the separator
*/
std::string to_string(const Store & store) const;
std::string to_string(const StoreDirConfig & store) const;
/**
* Uses `!` as the separator
*/
std::string to_string_legacy(const Store & store) const;
std::string to_string_legacy(const StoreDirConfig & store) const;
/**
* The caller splits on the separator, so it works for both variants.
*
* @param xpSettings Stop-gap to avoid globals during unit tests.
*/
static SingleDerivedPathBuilt parse(
const Store & store, ref<SingleDerivedPath> drvPath,
const StoreDirConfig & store, ref<SingleDerivedPath> drvPath,
OutputNameView outputs,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
nlohmann::json toJSON(Store & store) const;
@ -120,18 +123,18 @@ struct SingleDerivedPath : _SingleDerivedPathRaw {
/**
* Uses `^` as the separator
*/
std::string to_string(const Store & store) const;
std::string to_string(const StoreDirConfig & store) const;
/**
* Uses `!` as the separator
*/
std::string to_string_legacy(const Store & store) const;
std::string to_string_legacy(const StoreDirConfig & store) const;
/**
* Uses `^` as the separator
*
* @param xpSettings Stop-gap to avoid globals during unit tests.
*/
static SingleDerivedPath parse(
const Store & store,
const StoreDirConfig & store,
std::string_view,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
/**
@ -140,7 +143,7 @@ struct SingleDerivedPath : _SingleDerivedPathRaw {
* @param xpSettings Stop-gap to avoid globals during unit tests.
*/
static SingleDerivedPath parseLegacy(
const Store & store,
const StoreDirConfig & store,
std::string_view,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
nlohmann::json toJSON(Store & store) const;
@ -182,18 +185,18 @@ struct DerivedPathBuilt {
/**
* Uses `^` as the separator
*/
std::string to_string(const Store & store) const;
std::string to_string(const StoreDirConfig & store) const;
/**
* Uses `!` as the separator
*/
std::string to_string_legacy(const Store & store) const;
std::string to_string_legacy(const StoreDirConfig & store) const;
/**
* The caller splits on the separator, so it works for both variants.
*
* @param xpSettings Stop-gap to avoid globals during unit tests.
*/
static DerivedPathBuilt parse(
const Store & store, ref<SingleDerivedPath>,
const StoreDirConfig & store, ref<SingleDerivedPath>,
std::string_view,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
nlohmann::json toJSON(Store & store) const;
@ -242,18 +245,18 @@ struct DerivedPath : _DerivedPathRaw {
/**
* Uses `^` as the separator
*/
std::string to_string(const Store & store) const;
std::string to_string(const StoreDirConfig & store) const;
/**
* Uses `!` as the separator
*/
std::string to_string_legacy(const Store & store) const;
std::string to_string_legacy(const StoreDirConfig & store) const;
/**
* Uses `^` as the separator
*
* @param xpSettings Stop-gap to avoid globals during unit tests.
*/
static DerivedPath parse(
const Store & store,
const StoreDirConfig & store,
std::string_view,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
/**
@ -262,7 +265,7 @@ struct DerivedPath : _DerivedPathRaw {
* @param xpSettings Stop-gap to avoid globals during unit tests.
*/
static DerivedPath parseLegacy(
const Store & store,
const StoreDirConfig & store,
std::string_view,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);

View file

@ -5,7 +5,7 @@ namespace nix {
std::string DownstreamPlaceholder::render() const
{
return "/" + hash.to_string(HashFormat::Base32, false);
return "/" + hash.to_string(HashFormat::Nix32, false);
}
@ -19,7 +19,7 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownCaOutput(
auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4);
auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName);
return DownstreamPlaceholder {
hashString(htSHA256, clearText)
hashString(HashAlgorithm::SHA256, clearText)
};
}
@ -31,10 +31,10 @@ DownstreamPlaceholder DownstreamPlaceholder::unknownDerivation(
xpSettings.require(Xp::DynamicDerivations);
auto compressed = compressHash(placeholder.hash, 20);
auto clearText = "nix-computed-output:"
+ compressed.to_string(HashFormat::Base32, false)
+ compressed.to_string(HashFormat::Nix32, false)
+ ":" + std::string { outputName };
return DownstreamPlaceholder {
hashString(htSHA256, clearText)
hashString(HashAlgorithm::SHA256, clearText)
};
}

View file

@ -30,7 +30,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
{
auto info = queryPathInfo(path);
HashSink hashSink(htSHA256);
HashSink hashSink(HashAlgorithm::SHA256);
TeeSink teeSink(sink, hashSink);
narFromPath(path, teeSink);
@ -39,9 +39,9 @@ void Store::exportPath(const StorePath & path, Sink & sink)
filesystem corruption from spreading to other machines.
Don't complain if the stored hash is zero (unknown). */
Hash hash = hashSink.currentHash().first;
if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
if (hash != info->narHash && info->narHash != Hash(info->narHash.algo))
throw Error("hash of path '%s' has changed from '%s' to '%s'!",
printStorePath(path), info->narHash.to_string(HashFormat::Base32, true), hash.to_string(HashFormat::Base32, true));
printStorePath(path), info->narHash.to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true));
teeSink
<< exportMagic
@ -79,7 +79,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
auto references = CommonProto::Serialise<StorePathSet>::read(*this,
CommonProto::ReadConn { .from = source });
auto deriver = readString(source);
auto narHash = hashString(htSHA256, saved.s);
auto narHash = hashString(HashAlgorithm::SHA256, saved.s);
ValidPathInfo info { path, narHash };
if (deriver != "")

View file

@ -50,7 +50,7 @@ static void makeSymlink(const Path & link, const Path & target)
void LocalStore::addIndirectRoot(const Path & path)
{
std::string hash = hashString(htSHA1, path).to_string(HashFormat::Base32, false);
std::string hash = hashString(HashAlgorithm::SHA1, path).to_string(HashFormat::Nix32, false);
Path realRoot = canonPath(fmt("%1%/%2%/auto/%3%", stateDir, gcRootsDir, hash));
makeSymlink(realRoot, path);
}

View file

@ -116,7 +116,14 @@ Settings::Settings()
void loadConfFile()
{
globalConfig.applyConfigFile(settings.nixConfDir + "/nix.conf");
auto applyConfigFile = [&](const Path & path) {
try {
std::string contents = readFile(path);
globalConfig.applyConfig(contents, path);
} catch (SysError &) { }
};
applyConfigFile(settings.nixConfDir + "/nix.conf");
/* We only want to send overrides to the daemon, i.e. stuff from
~/.nix/nix.conf or the command line. */
@ -124,7 +131,7 @@ void loadConfFile()
auto files = settings.nixUserConfFiles;
for (auto file = files.rbegin(); file != files.rend(); file++) {
globalConfig.applyConfigFile(*file);
applyConfigFile(*file);
}
auto nixConfEnv = getEnv("NIX_CONFIG");

View file

@ -117,10 +117,11 @@ public:
Setting<std::string> storeUri{this, getEnv("NIX_REMOTE").value_or("auto"), "store",
R"(
The [URL of the Nix store](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format)
The [URL of the Nix store](@docroot@/store/types/index.md#store-url-format)
to use for most operations.
See [`nix help-stores`](@docroot@/command-ref/new-cli/nix3-help-stores.md)
for supported store types and settings.
See the
[Store Types](@docroot@/store/types/index.md)
section of the manual for supported store types and settings.
)"};
Setting<bool> keepFailed{this, false, "keep-failed",
@ -200,7 +201,7 @@ public:
Nix will only build a given [derivation](@docroot@/language/derivations.md) locally when its `system` attribute equals any of the values specified here or in [`extra-platforms`](#conf-extra-platforms).
The default value is set when Nix itself is compiled for the system it will run on.
The following system types are widely used, as [Nix is actively supported on these platforms](@docroot@/contributing/hacking.md#platforms):
The following system types are widely used, as Nix is actively supported on these platforms:
- `x86_64-linux`
- `x86_64-darwin`
@ -267,21 +268,16 @@ public:
Setting<bool> alwaysAllowSubstitutes{
this, false, "always-allow-substitutes",
R"(
If set to `true`, Nix will ignore the `allowSubstitutes` attribute in
derivations and always attempt to use available substituters.
For more information on `allowSubstitutes`, see [the manual chapter on advanced attributes](../language/advanced-attributes.md).
If set to `true`, Nix will ignore the [`allowSubstitutes`](@docroot@/language/advanced-attributes.md) attribute in derivations and always attempt to use [available substituters](#conf-substituters).
)"};
Setting<bool> buildersUseSubstitutes{
this, false, "builders-use-substitutes",
R"(
If set to `true`, Nix will instruct remote build machines to use
their own binary substitutes if available. In practical terms, this
means that remote hosts will fetch as many build dependencies as
possible from their own substitutes (e.g, from `cache.nixos.org`),
instead of waiting for this host to upload them all. This can
drastically reduce build times if the network connection between
this computer and the remote build host is slow.
If set to `true`, Nix will instruct [remote build machines](#conf-builders) to use their own [`substituters`](#conf-substituters) if available.
It means that remote build hosts will fetch as many dependencies as possible from their own substituters (e.g, from `cache.nixos.org`) instead of waiting for the local machine to upload them all.
This can drastically reduce build times if the network connection between the local machine and the remote build host is slow.
)"};
Setting<off_t> reservedSize{this, 8 * 1024 * 1024, "gc-reserved-space",
@ -759,8 +755,8 @@ public:
Strings{"https://cache.nixos.org/"},
"substituters",
R"(
A list of [URLs of Nix stores](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format) to be used as substituters, separated by whitespace.
A substituter is an additional [store]{@docroot@/glossary.md##gloss-store} from which Nix can obtain [store objects](@docroot@/glossary.md#gloss-store-object) instead of building them.
A list of [URLs of Nix stores](@docroot@/store/types/index.md#store-url-format) to be used as substituters, separated by whitespace.
A substituter is an additional [store](@docroot@/glossary.md#gloss-store) from which Nix can obtain [store objects](@docroot@/glossary.md#gloss-store-object) instead of building them.
Substituters are tried based on their priority value, which each substituter can set independently.
Lower value means higher priority.
@ -778,7 +774,7 @@ public:
Setting<StringSet> trustedSubstituters{
this, {}, "trusted-substituters",
R"(
A list of [Nix store URLs](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format), separated by whitespace.
A list of [Nix store URLs](@docroot@/store/types/index.md#store-url-format), separated by whitespace.
These are not used by default, but users of the Nix daemon can enable them by specifying [`substituters`](#conf-substituters).
Unprivileged users (those set in only [`allowed-users`](#conf-allowed-users) but not [`trusted-users`](#conf-trusted-users)) can pass as `substituters` only those URLs listed in `trusted-substituters`.

View file

@ -1,3 +1,4 @@
#include "legacy-ssh-store.hh"
#include "ssh-store-config.hh"
#include "archive.hh"
#include "pool.hh"
@ -13,432 +14,355 @@
namespace nix {
struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig
std::string LegacySSHStoreConfig::doc()
{
using CommonSSHStoreConfig::CommonSSHStoreConfig;
return
#include "legacy-ssh-store.md"
;
}
const Setting<Path> remoteProgram{this, "nix-store", "remote-program",
"Path to the `nix-store` executable on the remote machine."};
const Setting<int> maxConnections{this, 1, "max-connections",
"Maximum number of concurrent SSH connections."};
const std::string name() override { return "SSH Store"; }
std::string doc() override
{
return
#include "legacy-ssh-store.md"
;
}
};
struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Store
struct LegacySSHStore::Connection
{
// Hack for getting remote build log output.
// Intentionally not in `LegacySSHStoreConfig` so that it doesn't appear in
// the documentation
const Setting<int> logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"};
struct Connection
{
std::unique_ptr<SSHMaster::Connection> sshConn;
FdSink to;
FdSource from;
ServeProto::Version remoteVersion;
bool good = true;
/**
* Coercion to `ServeProto::ReadConn`. This makes it easy to use the
* factored out serve protocol searlizers with a
* `LegacySSHStore::Connection`.
*
* The serve protocol connection types are unidirectional, unlike
* this type.
*/
operator ServeProto::ReadConn ()
{
return ServeProto::ReadConn {
.from = from,
.version = remoteVersion,
};
}
/*
* Coercion to `ServeProto::WriteConn`. This makes it easy to use the
* factored out serve protocol searlizers with a
* `LegacySSHStore::Connection`.
*
* The serve protocol connection types are unidirectional, unlike
* this type.
*/
operator ServeProto::WriteConn ()
{
return ServeProto::WriteConn {
.to = to,
.version = remoteVersion,
};
}
};
std::string host;
ref<Pool<Connection>> connections;
SSHMaster master;
static std::set<std::string> uriSchemes() { return {"ssh"}; }
LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params)
: StoreConfig(params)
, CommonSSHStoreConfig(params)
, LegacySSHStoreConfig(params)
, Store(params)
, host(host)
, connections(make_ref<Pool<Connection>>(
std::max(1, (int) maxConnections),
[this]() { return openConnection(); },
[](const ref<Connection> & r) { return r->good; }
))
, master(
host,
sshKey,
sshPublicHostKey,
// Use SSH master only if using more than 1 connection.
connections->capacity() > 1,
compress,
logFD)
{
}
ref<Connection> openConnection()
{
auto conn = make_ref<Connection>();
conn->sshConn = master.startCommand(
fmt("%s --serve --write", remoteProgram)
+ (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get())));
conn->to = FdSink(conn->sshConn->in.get());
conn->from = FdSource(conn->sshConn->out.get());
try {
conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION;
conn->to.flush();
StringSink saved;
try {
TeeSource tee(conn->from, saved);
unsigned int magic = readInt(tee);
if (magic != SERVE_MAGIC_2)
throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
} catch (SerialisationError & e) {
/* In case the other side is waiting for our input,
close it. */
conn->sshConn->in.close();
auto msg = conn->from.drain();
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
host, chomp(saved.s + msg));
}
conn->remoteVersion = readInt(conn->from);
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host);
} catch (EndOfFile & e) {
throw Error("cannot connect to '%1%'", host);
}
return conn;
};
std::string getUri() override
{
return *uriSchemes().begin() + "://" + host;
}
void queryPathInfoUncached(const StorePath & path,
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override
{
try {
auto conn(connections->get());
/* No longer support missing NAR hash */
assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4);
debug("querying remote host '%s' for info on '%s'", host, printStorePath(path));
conn->to << ServeProto::Command::QueryPathInfos << PathSet{printStorePath(path)};
conn->to.flush();
auto p = readString(conn->from);
if (p.empty()) return callback(nullptr);
auto path2 = parseStorePath(p);
assert(path == path2);
/* Hash will be set below. FIXME construct ValidPathInfo at end. */
auto info = std::make_shared<ValidPathInfo>(path, Hash::dummy);
auto deriver = readString(conn->from);
if (deriver != "")
info->deriver = parseStorePath(deriver);
info->references = ServeProto::Serialise<StorePathSet>::read(*this, *conn);
readLongLong(conn->from); // download size
info->narSize = readLongLong(conn->from);
{
auto s = readString(conn->from);
if (s == "")
throw Error("NAR hash is now mandatory");
info->narHash = Hash::parseAnyPrefixed(s);
}
info->ca = ContentAddress::parseOpt(readString(conn->from));
info->sigs = readStrings<StringSet>(conn->from);
auto s = readString(conn->from);
assert(s == "");
callback(std::move(info));
} catch (...) { callback.rethrow(); }
}
void addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs) override
{
debug("adding path '%s' to remote host '%s'", printStorePath(info.path), host);
auto conn(connections->get());
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 5) {
conn->to
<< ServeProto::Command::AddToStoreNar
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(HashFormat::Base16, false);
ServeProto::write(*this, *conn, info.references);
conn->to
<< info.registrationTime
<< info.narSize
<< info.ultimate
<< info.sigs
<< renderContentAddress(info.ca);
try {
copyNAR(source, conn->to);
} catch (...) {
conn->good = false;
throw;
}
conn->to.flush();
} else {
conn->to
<< ServeProto::Command::ImportPaths
<< 1;
try {
copyNAR(source, conn->to);
} catch (...) {
conn->good = false;
throw;
}
conn->to
<< exportMagic
<< printStorePath(info.path);
ServeProto::write(*this, *conn, info.references);
conn->to
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< 0
<< 0;
conn->to.flush();
}
if (readInt(conn->from) != 1)
throw Error("failed to add path '%s' to remote host '%s'", printStorePath(info.path), host);
}
void narFromPath(const StorePath & path, Sink & sink) override
{
auto conn(connections->get());
conn->to << ServeProto::Command::DumpStorePath << printStorePath(path);
conn->to.flush();
copyNAR(conn->from, sink);
}
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
{ unsupported("queryPathFromHashPart"); }
StorePath addToStore(
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashType hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references) override
{ unsupported("addToStore"); }
StorePath addTextToStore(
std::string_view name,
std::string_view s,
const StorePathSet & references,
RepairFlag repair) override
{ unsupported("addTextToStore"); }
private:
void putBuildSettings(Connection & conn)
{
conn.to
<< settings.maxSilentTime
<< settings.buildTimeout;
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 2)
conn.to
<< settings.maxLogSize;
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 3)
conn.to
<< 0 // buildRepeat hasn't worked for ages anyway
<< 0;
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 7) {
conn.to << ((int) settings.keepFailed);
}
}
public:
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode) override
{
auto conn(connections->get());
conn->to
<< ServeProto::Command::BuildDerivation
<< printStorePath(drvPath);
writeDerivation(conn->to, *this, drv);
putBuildSettings(*conn);
conn->to.flush();
return ServeProto::Serialise<BuildResult>::read(*this, *conn);
}
void buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override
{
if (evalStore && evalStore.get() != this)
throw Error("building on an SSH store is incompatible with '--eval-store'");
auto conn(connections->get());
conn->to << ServeProto::Command::BuildPaths;
Strings ss;
for (auto & p : drvPaths) {
auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p);
std::visit(overloaded {
[&](const StorePathWithOutputs & s) {
ss.push_back(s.to_string(*this));
},
[&](const StorePath & drvPath) {
throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath));
},
[&](std::monostate) {
throw Error("wanted build derivation that is itself a build product, but the legacy ssh protocol doesn't support that. Try using ssh-ng://");
},
}, sOrDrvPath);
}
conn->to << ss;
putBuildSettings(*conn);
conn->to.flush();
BuildResult result;
result.status = (BuildResult::Status) readInt(conn->from);
if (!result.success()) {
conn->from >> result.errorMsg;
throw Error(result.status, result.errorMsg);
}
}
void ensurePath(const StorePath & path) override
{ unsupported("ensurePath"); }
virtual ref<SourceAccessor> getFSAccessor(bool requireValidPath) override
{ unsupported("getFSAccessor"); }
std::unique_ptr<SSHMaster::Connection> sshConn;
FdSink to;
FdSource from;
ServeProto::Version remoteVersion;
bool good = true;
/**
* The default instance would schedule the work on the client side, but
* for consistency with `buildPaths` and `buildDerivation` it should happen
* on the remote side.
* Coercion to `ServeProto::ReadConn`. This makes it easy to use the
* factored out serve protocol searlizers with a
* `LegacySSHStore::Connection`.
*
* We make this fail for now so we can add implement this properly later
* without it being a breaking change.
* The serve protocol connection types are unidirectional, unlike
* this type.
*/
void repairPath(const StorePath & path) override
{ unsupported("repairPath"); }
void computeFSClosure(const StorePathSet & paths,
StorePathSet & out, bool flipDirection = false,
bool includeOutputs = false, bool includeDerivers = false) override
operator ServeProto::ReadConn ()
{
if (flipDirection || includeDerivers) {
Store::computeFSClosure(paths, out, flipDirection, includeOutputs, includeDerivers);
return;
}
auto conn(connections->get());
conn->to
<< ServeProto::Command::QueryClosure
<< includeOutputs;
ServeProto::write(*this, *conn, paths);
conn->to.flush();
for (auto & i : ServeProto::Serialise<StorePathSet>::read(*this, *conn))
out.insert(i);
return ServeProto::ReadConn {
.from = from,
.version = remoteVersion,
};
}
StorePathSet queryValidPaths(const StorePathSet & paths,
SubstituteFlag maybeSubstitute = NoSubstitute) override
{
auto conn(connections->get());
conn->to
<< ServeProto::Command::QueryValidPaths
<< false // lock
<< maybeSubstitute;
ServeProto::write(*this, *conn, paths);
conn->to.flush();
return ServeProto::Serialise<StorePathSet>::read(*this, *conn);
}
void connect() override
{
auto conn(connections->get());
}
unsigned int getProtocol() override
{
auto conn(connections->get());
return conn->remoteVersion;
}
/**
* The legacy ssh protocol doesn't support checking for trusted-user.
* Try using ssh-ng:// instead if you want to know.
/*
* Coercion to `ServeProto::WriteConn`. This makes it easy to use the
* factored out serve protocol searlizers with a
* `LegacySSHStore::Connection`.
*
* The serve protocol connection types are unidirectional, unlike
* this type.
*/
std::optional<TrustedFlag> isTrustedClient() override
operator ServeProto::WriteConn ()
{
return std::nullopt;
return ServeProto::WriteConn {
.to = to,
.version = remoteVersion,
};
}
void queryRealisationUncached(const DrvOutput &,
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
// TODO: Implement
{ unsupported("queryRealisation"); }
};
LegacySSHStore::LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params)
: StoreConfig(params)
, CommonSSHStoreConfig(params)
, LegacySSHStoreConfig(params)
, Store(params)
, host(host)
, connections(make_ref<Pool<Connection>>(
std::max(1, (int) maxConnections),
[this]() { return openConnection(); },
[](const ref<Connection> & r) { return r->good; }
))
, master(
host,
sshKey,
sshPublicHostKey,
// Use SSH master only if using more than 1 connection.
connections->capacity() > 1,
compress,
logFD)
{
}
ref<LegacySSHStore::Connection> LegacySSHStore::openConnection()
{
auto conn = make_ref<Connection>();
conn->sshConn = master.startCommand(
fmt("%s --serve --write", remoteProgram)
+ (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get())));
conn->to = FdSink(conn->sshConn->in.get());
conn->from = FdSource(conn->sshConn->out.get());
try {
conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION;
conn->to.flush();
StringSink saved;
try {
TeeSource tee(conn->from, saved);
unsigned int magic = readInt(tee);
if (magic != SERVE_MAGIC_2)
throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
} catch (SerialisationError & e) {
/* In case the other side is waiting for our input,
close it. */
conn->sshConn->in.close();
auto msg = conn->from.drain();
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
host, chomp(saved.s + msg));
}
conn->remoteVersion = readInt(conn->from);
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host);
} catch (EndOfFile & e) {
throw Error("cannot connect to '%1%'", host);
}
return conn;
};
std::string LegacySSHStore::getUri()
{
return *uriSchemes().begin() + "://" + host;
}
void LegacySSHStore::queryPathInfoUncached(const StorePath & path,
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
{
try {
auto conn(connections->get());
/* No longer support missing NAR hash */
assert(GET_PROTOCOL_MINOR(conn->remoteVersion) >= 4);
debug("querying remote host '%s' for info on '%s'", host, printStorePath(path));
conn->to << ServeProto::Command::QueryPathInfos << PathSet{printStorePath(path)};
conn->to.flush();
auto p = readString(conn->from);
if (p.empty()) return callback(nullptr);
auto path2 = parseStorePath(p);
assert(path == path2);
auto info = std::make_shared<ValidPathInfo>(
path,
ServeProto::Serialise<UnkeyedValidPathInfo>::read(*this, *conn));
if (info->narHash == Hash::dummy)
throw Error("NAR hash is now mandatory");
auto s = readString(conn->from);
assert(s == "");
callback(std::move(info));
} catch (...) { callback.rethrow(); }
}
void LegacySSHStore::addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs)
{
debug("adding path '%s' to remote host '%s'", printStorePath(info.path), host);
auto conn(connections->get());
if (GET_PROTOCOL_MINOR(conn->remoteVersion) >= 5) {
conn->to
<< ServeProto::Command::AddToStoreNar
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(HashFormat::Base16, false);
ServeProto::write(*this, *conn, info.references);
conn->to
<< info.registrationTime
<< info.narSize
<< info.ultimate
<< info.sigs
<< renderContentAddress(info.ca);
try {
copyNAR(source, conn->to);
} catch (...) {
conn->good = false;
throw;
}
conn->to.flush();
} else {
conn->to
<< ServeProto::Command::ImportPaths
<< 1;
try {
copyNAR(source, conn->to);
} catch (...) {
conn->good = false;
throw;
}
conn->to
<< exportMagic
<< printStorePath(info.path);
ServeProto::write(*this, *conn, info.references);
conn->to
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< 0
<< 0;
conn->to.flush();
}
if (readInt(conn->from) != 1)
throw Error("failed to add path '%s' to remote host '%s'", printStorePath(info.path), host);
}
void LegacySSHStore::narFromPath(const StorePath & path, Sink & sink)
{
auto conn(connections->get());
conn->to << ServeProto::Command::DumpStorePath << printStorePath(path);
conn->to.flush();
copyNAR(conn->from, sink);
}
void LegacySSHStore::putBuildSettings(Connection & conn)
{
ServeProto::write(*this, conn, ServeProto::BuildOptions {
.maxSilentTime = settings.maxSilentTime,
.buildTimeout = settings.buildTimeout,
.maxLogSize = settings.maxLogSize,
.nrRepeats = 0, // buildRepeat hasn't worked for ages anyway
.enforceDeterminism = 0,
.keepFailed = settings.keepFailed,
});
}
BuildResult LegacySSHStore::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode)
{
auto conn(connections->get());
conn->to
<< ServeProto::Command::BuildDerivation
<< printStorePath(drvPath);
writeDerivation(conn->to, *this, drv);
putBuildSettings(*conn);
conn->to.flush();
return ServeProto::Serialise<BuildResult>::read(*this, *conn);
}
void LegacySSHStore::buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore)
{
if (evalStore && evalStore.get() != this)
throw Error("building on an SSH store is incompatible with '--eval-store'");
auto conn(connections->get());
conn->to << ServeProto::Command::BuildPaths;
Strings ss;
for (auto & p : drvPaths) {
auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p);
std::visit(overloaded {
[&](const StorePathWithOutputs & s) {
ss.push_back(s.to_string(*this));
},
[&](const StorePath & drvPath) {
throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath));
},
[&](std::monostate) {
throw Error("wanted build derivation that is itself a build product, but the legacy ssh protocol doesn't support that. Try using ssh-ng://");
},
}, sOrDrvPath);
}
conn->to << ss;
putBuildSettings(*conn);
conn->to.flush();
BuildResult result;
result.status = (BuildResult::Status) readInt(conn->from);
if (!result.success()) {
conn->from >> result.errorMsg;
throw Error(result.status, result.errorMsg);
}
}
void LegacySSHStore::computeFSClosure(const StorePathSet & paths,
StorePathSet & out, bool flipDirection,
bool includeOutputs, bool includeDerivers)
{
if (flipDirection || includeDerivers) {
Store::computeFSClosure(paths, out, flipDirection, includeOutputs, includeDerivers);
return;
}
auto conn(connections->get());
conn->to
<< ServeProto::Command::QueryClosure
<< includeOutputs;
ServeProto::write(*this, *conn, paths);
conn->to.flush();
for (auto & i : ServeProto::Serialise<StorePathSet>::read(*this, *conn))
out.insert(i);
}
StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths,
SubstituteFlag maybeSubstitute)
{
auto conn(connections->get());
conn->to
<< ServeProto::Command::QueryValidPaths
<< false // lock
<< maybeSubstitute;
ServeProto::write(*this, *conn, paths);
conn->to.flush();
return ServeProto::Serialise<StorePathSet>::read(*this, *conn);
}
void LegacySSHStore::connect()
{
auto conn(connections->get());
}
unsigned int LegacySSHStore::getProtocol()
{
auto conn(connections->get());
return conn->remoteVersion;
}
/**
* The legacy ssh protocol doesn't support checking for trusted-user.
* Try using ssh-ng:// instead if you want to know.
*/
std::optional<TrustedFlag> isTrustedClient()
{
return std::nullopt;
}
static RegisterStoreImplementation<LegacySSHStore, LegacySSHStoreConfig> regLegacySSHStore;
}

View file

@ -0,0 +1,132 @@
#pragma once
///@file
#include "ssh-store-config.hh"
#include "store-api.hh"
#include "ssh.hh"
#include "callback.hh"
#include "pool.hh"
namespace nix {
struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig
{
using CommonSSHStoreConfig::CommonSSHStoreConfig;
const Setting<Path> remoteProgram{this, "nix-store", "remote-program",
"Path to the `nix-store` executable on the remote machine."};
const Setting<int> maxConnections{this, 1, "max-connections",
"Maximum number of concurrent SSH connections."};
const std::string name() override { return "SSH Store"; }
std::string doc() override;
};
struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Store
{
// Hack for getting remote build log output.
// Intentionally not in `LegacySSHStoreConfig` so that it doesn't appear in
// the documentation
const Setting<int> logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"};
struct Connection;
std::string host;
ref<Pool<Connection>> connections;
SSHMaster master;
static std::set<std::string> uriSchemes() { return {"ssh"}; }
LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params);
ref<Connection> openConnection();
std::string getUri() override;
void queryPathInfoUncached(const StorePath & path,
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept override;
void addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs) override;
void narFromPath(const StorePath & path, Sink & sink) override;
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
{ unsupported("queryPathFromHashPart"); }
StorePath addToStore(
std::string_view name,
const Path & srcPath,
FileIngestionMethod method,
HashAlgorithm hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references) override
{ unsupported("addToStore"); }
StorePath addTextToStore(
std::string_view name,
std::string_view s,
const StorePathSet & references,
RepairFlag repair) override
{ unsupported("addTextToStore"); }
private:
void putBuildSettings(Connection & conn);
public:
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode) override;
void buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override;
void ensurePath(const StorePath & path) override
{ unsupported("ensurePath"); }
virtual ref<SourceAccessor> getFSAccessor(bool requireValidPath) override
{ unsupported("getFSAccessor"); }
/**
* The default instance would schedule the work on the client side, but
* for consistency with `buildPaths` and `buildDerivation` it should happen
* on the remote side.
*
* We make this fail for now so we can add implement this properly later
* without it being a breaking change.
*/
void repairPath(const StorePath & path) override
{ unsupported("repairPath"); }
void computeFSClosure(const StorePathSet & paths,
StorePathSet & out, bool flipDirection = false,
bool includeOutputs = false, bool includeDerivers = false) override;
StorePathSet queryValidPaths(const StorePathSet & paths,
SubstituteFlag maybeSubstitute = NoSubstitute) override;
void connect() override;
unsigned int getProtocol() override;
/**
* The legacy ssh protocol doesn't support checking for trusted-user.
* Try using ssh-ng:// instead if you want to know.
*/
std::optional<TrustedFlag> isTrustedClient() override
{
return std::nullopt;
}
void queryRealisationUncached(const DrvOutput &,
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
// TODO: Implement
{ unsupported("queryRealisation"); }
};
}

View file

@ -10,7 +10,7 @@
namespace nix {
class Store;
struct StoreDirConfig;
/**
* Reusable serialisers for serialization container types in a
@ -44,8 +44,8 @@ struct LengthPrefixedProtoHelper;
#define LENGTH_PREFIXED_PROTO_HELPER(Inner, T) \
struct LengthPrefixedProtoHelper< Inner, T > \
{ \
static T read(const Store & store, typename Inner::ReadConn conn); \
static void write(const Store & store, typename Inner::WriteConn conn, const T & str); \
static T read(const StoreDirConfig & store, typename Inner::ReadConn conn); \
static void write(const StoreDirConfig & store, typename Inner::WriteConn conn, const T & str); \
private: \
template<typename U> using S = typename Inner::template Serialise<U>; \
}
@ -67,7 +67,7 @@ LENGTH_PREFIXED_PROTO_HELPER(Inner, _X);
template<class Inner, typename T>
std::vector<T>
LengthPrefixedProtoHelper<Inner, std::vector<T>>::read(
const Store & store, typename Inner::ReadConn conn)
const StoreDirConfig & store, typename Inner::ReadConn conn)
{
std::vector<T> resSet;
auto size = readNum<size_t>(conn.from);
@ -80,7 +80,7 @@ LengthPrefixedProtoHelper<Inner, std::vector<T>>::read(
template<class Inner, typename T>
void
LengthPrefixedProtoHelper<Inner, std::vector<T>>::write(
const Store & store, typename Inner::WriteConn conn, const std::vector<T> & resSet)
const StoreDirConfig & store, typename Inner::WriteConn conn, const std::vector<T> & resSet)
{
conn.to << resSet.size();
for (auto & key : resSet) {
@ -91,7 +91,7 @@ LengthPrefixedProtoHelper<Inner, std::vector<T>>::write(
template<class Inner, typename T>
std::set<T>
LengthPrefixedProtoHelper<Inner, std::set<T>>::read(
const Store & store, typename Inner::ReadConn conn)
const StoreDirConfig & store, typename Inner::ReadConn conn)
{
std::set<T> resSet;
auto size = readNum<size_t>(conn.from);
@ -104,7 +104,7 @@ LengthPrefixedProtoHelper<Inner, std::set<T>>::read(
template<class Inner, typename T>
void
LengthPrefixedProtoHelper<Inner, std::set<T>>::write(
const Store & store, typename Inner::WriteConn conn, const std::set<T> & resSet)
const StoreDirConfig & store, typename Inner::WriteConn conn, const std::set<T> & resSet)
{
conn.to << resSet.size();
for (auto & key : resSet) {
@ -115,7 +115,7 @@ LengthPrefixedProtoHelper<Inner, std::set<T>>::write(
template<class Inner, typename K, typename V>
std::map<K, V>
LengthPrefixedProtoHelper<Inner, std::map<K, V>>::read(
const Store & store, typename Inner::ReadConn conn)
const StoreDirConfig & store, typename Inner::ReadConn conn)
{
std::map<K, V> resMap;
auto size = readNum<size_t>(conn.from);
@ -130,7 +130,7 @@ LengthPrefixedProtoHelper<Inner, std::map<K, V>>::read(
template<class Inner, typename K, typename V>
void
LengthPrefixedProtoHelper<Inner, std::map<K, V>>::write(
const Store & store, typename Inner::WriteConn conn, const std::map<K, V> & resMap)
const StoreDirConfig & store, typename Inner::WriteConn conn, const std::map<K, V> & resMap)
{
conn.to << resMap.size();
for (auto & i : resMap) {
@ -142,7 +142,7 @@ LengthPrefixedProtoHelper<Inner, std::map<K, V>>::write(
template<class Inner, typename... Ts>
std::tuple<Ts...>
LengthPrefixedProtoHelper<Inner, std::tuple<Ts...>>::read(
const Store & store, typename Inner::ReadConn conn)
const StoreDirConfig & store, typename Inner::ReadConn conn)
{
return std::tuple<Ts...> {
S<Ts>::read(store, conn)...,
@ -152,7 +152,7 @@ LengthPrefixedProtoHelper<Inner, std::tuple<Ts...>>::read(
template<class Inner, typename... Ts>
void
LengthPrefixedProtoHelper<Inner, std::tuple<Ts...>>::write(
const Store & store, typename Inner::WriteConn conn, const std::tuple<Ts...> & res)
const StoreDirConfig & store, typename Inner::WriteConn conn, const std::tuple<Ts...> & res)
{
std::apply([&]<typename... Us>(const Us &... args) {
(S<Us>::write(store, conn, args), ...);

View file

@ -4,6 +4,7 @@
#include "pathlocks.hh"
#include "worker-protocol.hh"
#include "derivations.hh"
#include "realisation.hh"
#include "nar-info.hh"
#include "references.hh"
#include "callback.hh"
@ -34,7 +35,6 @@
#include <sys/statvfs.h>
#include <sys/mount.h>
#include <sys/ioctl.h>
#include <sys/xattr.h>
#endif
#ifdef __CYGWIN__
@ -955,7 +955,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
StorePathSet paths;
for (auto & [_, i] : infos) {
assert(i.narHash.type == htSHA256);
assert(i.narHash.algo == HashAlgorithm::SHA256);
if (isValidPath_(*state, i.path))
updatePathInfo(*state, i);
else
@ -1069,7 +1069,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
/* While restoring the path from the NAR, compute the hash
of the NAR. */
HashSink hashSink(htSHA256);
HashSink hashSink(HashAlgorithm::SHA256);
TeeSource wrapperSource { source, hashSink };
@ -1080,7 +1080,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
if (hashResult.first != info.narHash)
throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s",
printStorePath(info.path), info.narHash.to_string(HashFormat::Base32, true), hashResult.first.to_string(HashFormat::Base32, true));
printStorePath(info.path), info.narHash.to_string(HashFormat::Nix32, true), hashResult.first.to_string(HashFormat::Nix32, true));
if (hashResult.second != info.narSize)
throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s",
@ -1090,14 +1090,14 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
auto & specified = *info.ca;
auto actualHash = hashCAPath(
specified.method,
specified.hash.type,
specified.hash.algo,
info.path
);
if (specified.hash != actualHash.hash) {
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
printStorePath(info.path),
specified.hash.to_string(HashFormat::Base32, true),
actualHash.hash.to_string(HashFormat::Base32, true));
specified.hash.to_string(HashFormat::Nix32, true),
actualHash.hash.to_string(HashFormat::Nix32, true));
}
}
@ -1116,7 +1116,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references)
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
{
/* For computing the store path. */
auto hashSink = std::make_unique<HashSink>(hashAlgo);
@ -1220,8 +1220,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
/* For computing the nar hash. In recursive SHA-256 mode, this
is the same as the store hash, so no need to do it again. */
auto narHash = std::pair { hash, size };
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256) {
HashSink narSink { htSHA256 };
if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256) {
HashSink narSink { HashAlgorithm::SHA256 };
dumpPath(realPath, narSink);
narHash = narSink.finish();
}
@ -1252,7 +1252,7 @@ StorePath LocalStore::addTextToStore(
std::string_view s,
const StorePathSet & references, RepairFlag repair)
{
auto hash = hashString(htSHA256, s);
auto hash = hashString(HashAlgorithm::SHA256, s);
auto dstPath = makeTextPath(name, TextInfo {
.hash = hash,
.references = references,
@ -1278,7 +1278,7 @@ StorePath LocalStore::addTextToStore(
StringSink sink;
dumpString(s, sink);
auto narHash = hashString(htSHA256, sink.s);
auto narHash = hashString(HashAlgorithm::SHA256, sink.s);
optimisePath(realPath, repair);
@ -1389,7 +1389,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
for (auto & link : readDirectory(linksDir)) {
printMsg(lvlTalkative, "checking contents of '%s'", link.name);
Path linkPath = linksDir + "/" + link.name;
std::string hash = hashPath(htSHA256, linkPath).first.to_string(HashFormat::Base32, false);
std::string hash = hashPath(HashAlgorithm::SHA256, linkPath).first.to_string(HashFormat::Nix32, false);
if (hash != link.name) {
printError("link '%s' was modified! expected hash '%s', got '%s'",
linkPath, link.name, hash);
@ -1406,7 +1406,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
printInfo("checking store hashes...");
Hash nullHash(htSHA256);
Hash nullHash(HashAlgorithm::SHA256);
for (auto & i : validPaths) {
try {
@ -1415,14 +1415,14 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
/* Check the content hash (optionally - slow). */
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
auto hashSink = HashSink(info->narHash.type);
auto hashSink = HashSink(info->narHash.algo);
dumpPath(Store::toRealPath(i), hashSink);
auto current = hashSink.finish();
if (info->narHash != nullHash && info->narHash != current.first) {
printError("path '%s' was modified! expected hash '%s', got '%s'",
printStorePath(i), info->narHash.to_string(HashFormat::Base32, true), current.first.to_string(HashFormat::Base32, true));
printStorePath(i), info->narHash.to_string(HashFormat::Nix32, true), current.first.to_string(HashFormat::Nix32, true));
if (repair) repairPath(i); else errors = true;
} else {
@ -1697,20 +1697,20 @@ void LocalStore::queryRealisationUncached(const DrvOutput & id,
}
ContentAddress LocalStore::hashCAPath(
const ContentAddressMethod & method, const HashType & hashType,
const ContentAddressMethod & method, const HashAlgorithm & hashAlgo,
const StorePath & path)
{
return hashCAPath(method, hashType, Store::toRealPath(path), path.hashPart());
return hashCAPath(method, hashAlgo, Store::toRealPath(path), path.hashPart());
}
ContentAddress LocalStore::hashCAPath(
const ContentAddressMethod & method,
const HashType & hashType,
const HashAlgorithm & hashAlgo,
const Path & path,
const std::string_view pathHash
)
{
HashModuloSink caSink ( hashType, std::string(pathHash) );
HashModuloSink caSink ( hashAlgo, std::string(pathHash) );
std::visit(overloaded {
[&](const TextIngestionMethod &) {
readFile(path, caSink);

View file

@ -178,7 +178,7 @@ public:
RepairFlag repair, CheckSigsFlag checkSigs) override;
StorePath addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override;
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override;
StorePath addTextToStore(
std::string_view name,
@ -353,12 +353,12 @@ private:
// XXX: Make a generic `Store` method
ContentAddress hashCAPath(
const ContentAddressMethod & method,
const HashType & hashType,
const HashAlgorithm & hashAlgo,
const StorePath & path);
ContentAddress hashCAPath(
const ContentAddressMethod & method,
const HashType & hashType,
const HashAlgorithm & hashAlgo,
const Path & path,
const std::string_view pathHash
);

View file

@ -59,7 +59,7 @@ $(d)/build.cc:
clean-files += $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
$(eval $(call install-file-in, $(d)/nix-store.pc, $(libdir)/pkgconfig, 0644))
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-store.pc, $(libdir)/pkgconfig, 0644))
$(foreach i, $(wildcard src/libstore/builtins/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/builtins, 0644)))

View file

@ -43,7 +43,7 @@ std::map<StorePath, StorePath> makeContentAddressed(
sink.s = rewriteStrings(sink.s, rewrites);
HashModuloSink hashModuloSink(htSHA256, oldHashPart);
HashModuloSink hashModuloSink(HashAlgorithm::SHA256, oldHashPart);
hashModuloSink(sink.s);
auto narModuloHash = hashModuloSink.finish().first;
@ -66,7 +66,7 @@ std::map<StorePath, StorePath> makeContentAddressed(
rsink2(sink.s);
rsink2.flush();
info.narHash = hashString(htSHA256, sink2.s);
info.narHash = hashString(HashAlgorithm::SHA256, sink2.s);
info.narSize = sink.s.size();
StringSource source(sink2.s);

View file

@ -4,6 +4,7 @@
#include "local-store.hh"
#include "store-api.hh"
#include "thread-pool.hh"
#include "realisation.hh"
#include "topo-sort.hh"
#include "callback.hh"
#include "closure.hh"
@ -330,8 +331,11 @@ std::map<DrvOutput, StorePath> drvOutputReferences(
std::map<DrvOutput, StorePath> drvOutputReferences(
Store & store,
const Derivation & drv,
const StorePath & outputPath)
const StorePath & outputPath,
Store * evalStore_)
{
auto & evalStore = evalStore_ ? *evalStore_ : store;
std::set<Realisation> inputRealisations;
std::function<void(const StorePath &, const DerivedPathMap<StringSet>::ChildNode &)> accumRealisations;
@ -339,7 +343,7 @@ std::map<DrvOutput, StorePath> drvOutputReferences(
accumRealisations = [&](const StorePath & inputDrv, const DerivedPathMap<StringSet>::ChildNode & inputNode) {
if (!inputNode.value.empty()) {
auto outputHashes =
staticOutputHashes(store, store.readDerivation(inputDrv));
staticOutputHashes(evalStore, evalStore.readDerivation(inputDrv));
for (const auto & outputName : inputNode.value) {
auto outputHash = get(outputHashes, outputName);
if (!outputHash)
@ -361,7 +365,7 @@ std::map<DrvOutput, StorePath> drvOutputReferences(
SingleDerivedPath next = SingleDerivedPath::Built { d, outputName };
accumRealisations(
// TODO deep resolutions for dynamic derivations, issue #8947, would go here.
resolveDerivedPath(store, next),
resolveDerivedPath(store, next, evalStore_),
childNode);
}
}

View file

@ -333,9 +333,9 @@ public:
(std::string(info->path.name()))
(narInfo ? narInfo->url : "", narInfo != 0)
(narInfo ? narInfo->compression : "", narInfo != 0)
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Base32, true) : "", narInfo && narInfo->fileHash)
(narInfo && narInfo->fileHash ? narInfo->fileHash->to_string(HashFormat::Nix32, true) : "", narInfo && narInfo->fileHash)
(narInfo ? narInfo->fileSize : 0, narInfo != 0 && narInfo->fileSize)
(info->narHash.to_string(HashFormat::Base32, true))
(info->narHash.to_string(HashFormat::Nix32, true))
(info->narSize)
(concatStringsSep(" ", info->shortRefs()))
(info->deriver ? std::string(info->deriver->to_string()) : "", (bool) info->deriver)

View file

@ -113,11 +113,11 @@ std::string NarInfo::to_string(const Store & store) const
res += "URL: " + url + "\n";
assert(compression != "");
res += "Compression: " + compression + "\n";
assert(fileHash && fileHash->type == htSHA256);
res += "FileHash: " + fileHash->to_string(HashFormat::Base32, true) + "\n";
assert(fileHash && fileHash->algo == HashAlgorithm::SHA256);
res += "FileHash: " + fileHash->to_string(HashFormat::Nix32, true) + "\n";
res += "FileSize: " + std::to_string(fileSize) + "\n";
assert(narHash.type == htSHA256);
res += "NarHash: " + narHash.to_string(HashFormat::Base32, true) + "\n";
assert(narHash.algo == HashAlgorithm::SHA256);
res += "NarHash: " + narHash.to_string(HashFormat::Nix32, true) + "\n";
res += "NarSize: " + std::to_string(narSize) + "\n";
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";

View file

@ -146,17 +146,17 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
Also note that if `path' is a symlink, then we're hashing the
contents of the symlink (i.e. the result of readlink()), not
the contents of the target (which may not even exist). */
Hash hash = hashPath(htSHA256, path).first;
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Base32, true));
Hash hash = hashPath(HashAlgorithm::SHA256, path).first;
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true));
/* Check if this is a known hash. */
Path linkPath = linksDir + "/" + hash.to_string(HashFormat::Base32, false);
Path linkPath = linksDir + "/" + hash.to_string(HashFormat::Nix32, false);
/* Maybe delete the link, if it has been corrupted. */
if (pathExists(linkPath)) {
auto stLink = lstat(linkPath);
if (st.st_size != stLink.st_size
|| (repair && hash != hashPath(htSHA256, linkPath).first))
|| (repair && hash != hashPath(HashAlgorithm::SHA256, linkPath).first))
{
// XXX: Consider overwriting linkPath with our valid version.
warn("removing corrupted link '%s'", linkPath);

View file

@ -146,7 +146,7 @@ static nlohmann::json pathInfoToJSON(
auto info = store.queryPathInfo(storePath);
auto & jsonPath = jsonList.emplace_back(
info->toJSON(store, false, HashFormat::Base32));
info->toJSON(store, false, HashFormat::Nix32));
// Add the path to the object whose metadata we are including.
jsonPath["path"] = store.printStorePath(storePath);

View file

@ -31,9 +31,9 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
throw Error("cannot calculate fingerprint of path '%s' because its size is not known",
store.printStorePath(path));
return
"1;" + store.printStorePath(path) + ";"
+ narHash.to_string(HashFormat::Base32, true) + ";"
+ std::to_string(narSize) + ";"
"1;" + store.printStorePath(path) + ";"
+ narHash.to_string(HashFormat::Nix32, true) + ";"
+ std::to_string(narSize) + ";"
+ concatStringsSep(",", store.printStorePathSet(references));
}

View file

@ -49,7 +49,7 @@ std::pair<StorePathSet, HashResult> scanForReferences(
const std::string & path,
const StorePathSet & refs)
{
HashSink hashSink { htSHA256 };
HashSink hashSink { HashAlgorithm::SHA256 };
auto found = scanForReferences(hashSink, path, refs);
auto hash = hashSink.finish();
return std::pair<StorePathSet, HashResult>(found, hash);

View file

@ -5,7 +5,7 @@
namespace nix {
std::string StorePathWithOutputs::to_string(const Store & store) const
std::string StorePathWithOutputs::to_string(const StoreDirConfig & store) const
{
return outputs.empty()
? store.printStorePath(path)
@ -85,7 +85,7 @@ std::pair<std::string_view, StringSet> parsePathWithOutputs(std::string_view s)
}
StorePathWithOutputs parsePathWithOutputs(const Store & store, std::string_view pathWithOutputs)
StorePathWithOutputs parsePathWithOutputs(const StoreDirConfig & store, std::string_view pathWithOutputs)
{
auto [path, outputs] = parsePathWithOutputs(pathWithOutputs);
return StorePathWithOutputs { store.parseStorePath(path), std::move(outputs) };

View file

@ -6,6 +6,8 @@
namespace nix {
struct StoreDirConfig;
/**
* This is a deprecated old type just for use by the old CLI, and older
* versions of the RPC protocols. In new code don't use it; you want
@ -19,7 +21,7 @@ struct StorePathWithOutputs
StorePath path;
std::set<std::string> outputs;
std::string to_string(const Store & store) const;
std::string to_string(const StoreDirConfig & store) const;
DerivedPath toDerivedPath() const;
@ -32,14 +34,14 @@ std::vector<DerivedPath> toDerivedPaths(const std::vector<StorePathWithOutputs>)
std::pair<std::string_view, StringSet> parsePathWithOutputs(std::string_view s);
class Store;
/**
* Split a string specifying a derivation and a set of outputs
* (/nix/store/hash-foo!out1,out2,...) into the derivation path
* and the outputs.
*/
StorePathWithOutputs parsePathWithOutputs(const Store & store, std::string_view pathWithOutputs);
StorePathWithOutputs parsePathWithOutputs(const StoreDirConfig & store, std::string_view pathWithOutputs);
class Store;
StorePathWithOutputs followLinksToStorePathWithOutputs(const Store & store, std::string_view pathWithOutputs);

View file

@ -1,4 +1,4 @@
#include "store-api.hh"
#include "store-dir-config.hh"
#include <sodium.h>
@ -35,7 +35,7 @@ StorePath::StorePath(std::string_view _baseName)
}
StorePath::StorePath(const Hash & hash, std::string_view _name)
: baseName((hash.to_string(HashFormat::Base32, false) + "-").append(std::string(_name)))
: baseName((hash.to_string(HashFormat::Nix32, false) + "-").append(std::string(_name)))
{
checkName(baseName, name());
}
@ -49,12 +49,12 @@ StorePath StorePath::dummy("ffffffffffffffffffffffffffffffff-x");
StorePath StorePath::random(std::string_view name)
{
Hash hash(htSHA1);
Hash hash(HashAlgorithm::SHA1);
randombytes_buf(hash.hash, hash.hashSize);
return StorePath(hash, name);
}
StorePath Store::parseStorePath(std::string_view path) const
StorePath StoreDirConfig::parseStorePath(std::string_view path) const
{
auto p = canonPath(std::string(path));
if (dirOf(p) != storeDir)
@ -62,7 +62,7 @@ StorePath Store::parseStorePath(std::string_view path) const
return StorePath(baseNameOf(p));
}
std::optional<StorePath> Store::maybeParseStorePath(std::string_view path) const
std::optional<StorePath> StoreDirConfig::maybeParseStorePath(std::string_view path) const
{
try {
return parseStorePath(path);
@ -71,24 +71,24 @@ std::optional<StorePath> Store::maybeParseStorePath(std::string_view path) const
}
}
bool Store::isStorePath(std::string_view path) const
bool StoreDirConfig::isStorePath(std::string_view path) const
{
return (bool) maybeParseStorePath(path);
}
StorePathSet Store::parseStorePathSet(const PathSet & paths) const
StorePathSet StoreDirConfig::parseStorePathSet(const PathSet & paths) const
{
StorePathSet res;
for (auto & i : paths) res.insert(parseStorePath(i));
return res;
}
std::string Store::printStorePath(const StorePath & path) const
std::string StoreDirConfig::printStorePath(const StorePath & path) const
{
return (storeDir + "/").append(path.to_string());
}
PathSet Store::printStorePathSet(const StorePathSet & paths) const
PathSet StoreDirConfig::printStorePathSet(const StorePathSet & paths) const
{
PathSet res;
for (auto & i : paths) res.insert(printStorePath(i));

View file

@ -1,4 +1,6 @@
#include <sys/xattr.h>
#if HAVE_SYS_XATTR_H
# include <sys/xattr.h>
#endif
#include "posix-fs-canonicalise.hh"
#include "file-system.hh"
@ -76,7 +78,7 @@ static void canonicalisePathMetaData_(
if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode)))
throw Error("file '%1%' has an unsupported type", path);
#if __linux__
#ifdef HAVE_SYS_XATTR_H
/* Remove extended attributes / ACLs. */
ssize_t eaSize = llistxattr(path.c_str(), nullptr, 0);

View file

@ -20,7 +20,7 @@
namespace nix {
/* TODO: Separate these store impls into different files, give them better names */
/* TODO: Separate these store types into different files, give them better names */
RemoteStore::RemoteStore(const Params & params)
: RemoteStoreConfig(params)
, Store(params)
@ -225,7 +225,7 @@ StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, Substitute
conn->to << WorkerProto::Op::QueryValidPaths;
WorkerProto::write(*this, *conn, paths);
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 27) {
conn->to << (settings.buildersUseSubstitutes ? 1 : 0);
conn->to << maybeSubstitute;
}
conn.processStderr();
return WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
@ -417,12 +417,12 @@ std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string &
ref<const ValidPathInfo> RemoteStore::addCAToStore(
Source & dump,
std::string_view name,
ContentAddressMethod caMethod,
HashType hashType,
const StorePathSet & references,
RepairFlag repair)
Source & dump,
std::string_view name,
ContentAddressMethod caMethod,
HashAlgorithm hashAlgo,
const StorePathSet & references,
RepairFlag repair)
{
std::optional<ConnectionHandle> conn_(getConnection());
auto & conn = *conn_;
@ -432,7 +432,7 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
conn->to
<< WorkerProto::Op::AddToStore
<< name
<< caMethod.render(hashType);
<< caMethod.render(hashAlgo);
WorkerProto::write(*this, *conn, references);
conn->to << repair;
@ -453,9 +453,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
std::visit(overloaded {
[&](const TextIngestionMethod & thm) -> void {
if (hashType != htSHA256)
if (hashAlgo != HashAlgorithm::SHA256)
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
name, printHashType(hashType));
name, printHashAlgo(hashAlgo));
std::string s = dump.drain();
conn->to << WorkerProto::Op::AddTextToStore << name << s;
WorkerProto::write(*this, *conn, references);
@ -465,9 +465,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
conn->to
<< WorkerProto::Op::AddToStore
<< name
<< ((hashType == htSHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
<< ((hashAlgo == HashAlgorithm::SHA256 && fim == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
<< (fim == FileIngestionMethod::Recursive ? 1 : 0)
<< printHashType(hashType);
<< printHashAlgo(hashAlgo);
try {
conn->to.written = 0;
@ -503,9 +503,9 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
StorePath RemoteStore::addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method, HashType hashType, RepairFlag repair, const StorePathSet & references)
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
{
return addCAToStore(dump, name, method, hashType, references, repair)->path;
return addCAToStore(dump, name, method, hashAlgo, references, repair)->path;
}
@ -610,7 +610,7 @@ StorePath RemoteStore::addTextToStore(
RepairFlag repair)
{
StringSource source(s);
return addCAToStore(source, name, TextIngestionMethod {}, htSHA256, references, repair)->path;
return addCAToStore(source, name, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair)->path;
}
void RemoteStore::registerDrvOutput(const Realisation & info)

View file

@ -74,18 +74,18 @@ public:
* Add a content-addressable store path. `dump` will be drained.
*/
ref<const ValidPathInfo> addCAToStore(
Source & dump,
std::string_view name,
ContentAddressMethod caMethod,
HashType hashType,
const StorePathSet & references,
RepairFlag repair);
Source & dump,
std::string_view name,
ContentAddressMethod caMethod,
HashAlgorithm hashAlgo,
const StorePathSet & references,
RepairFlag repair);
/**
* Add a content-addressable store path. Does not support references. `dump` will be drained.
*/
StorePath addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override;
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override;
void addToStore(const ValidPathInfo & info, Source & nar,
RepairFlag repair, CheckSigsFlag checkSigs) override;

View file

@ -16,11 +16,11 @@ namespace nix {
/* protocol-agnostic templates */
#define SERVE_USE_LENGTH_PREFIX_SERIALISER(TEMPLATE, T) \
TEMPLATE T ServeProto::Serialise< T >::read(const Store & store, ServeProto::ReadConn conn) \
TEMPLATE T ServeProto::Serialise< T >::read(const StoreDirConfig & store, ServeProto::ReadConn conn) \
{ \
return LengthPrefixedProtoHelper<ServeProto, T >::read(store, conn); \
} \
TEMPLATE void ServeProto::Serialise< T >::write(const Store & store, ServeProto::WriteConn conn, const T & t) \
TEMPLATE void ServeProto::Serialise< T >::write(const StoreDirConfig & store, ServeProto::WriteConn conn, const T & t) \
{ \
LengthPrefixedProtoHelper<ServeProto, T >::write(store, conn, t); \
}
@ -41,12 +41,12 @@ SERVE_USE_LENGTH_PREFIX_SERIALISER(
template<typename T>
struct ServeProto::Serialise
{
static T read(const Store & store, ServeProto::ReadConn conn)
static T read(const StoreDirConfig & store, ServeProto::ReadConn conn)
{
return CommonProto::Serialise<T>::read(store,
CommonProto::ReadConn { .from = conn.from });
}
static void write(const Store & store, ServeProto::WriteConn conn, const T & t)
static void write(const StoreDirConfig & store, ServeProto::WriteConn conn, const T & t)
{
CommonProto::Serialise<T>::write(store,
CommonProto::WriteConn { .to = conn.to },

View file

@ -5,6 +5,7 @@
#include "serve-protocol.hh"
#include "serve-protocol-impl.hh"
#include "archive.hh"
#include "path-info.hh"
#include <nlohmann/json.hpp>
@ -12,7 +13,7 @@ namespace nix {
/* protocol-specific definitions */
BuildResult ServeProto::Serialise<BuildResult>::read(const Store & store, ServeProto::ReadConn conn)
BuildResult ServeProto::Serialise<BuildResult>::read(const StoreDirConfig & store, ServeProto::ReadConn conn)
{
BuildResult status;
status.status = (BuildResult::Status) readInt(conn.from);
@ -34,7 +35,7 @@ BuildResult ServeProto::Serialise<BuildResult>::read(const Store & store, ServeP
return status;
}
void ServeProto::Serialise<BuildResult>::write(const Store & store, ServeProto::WriteConn conn, const BuildResult & status)
void ServeProto::Serialise<BuildResult>::write(const StoreDirConfig & store, ServeProto::WriteConn conn, const BuildResult & status)
{
conn.to
<< status.status
@ -54,4 +55,83 @@ void ServeProto::Serialise<BuildResult>::write(const Store & store, ServeProto::
}
}
UnkeyedValidPathInfo ServeProto::Serialise<UnkeyedValidPathInfo>::read(const StoreDirConfig & store, ReadConn conn)
{
/* Hash should be set below unless very old `nix-store --serve`.
Caller should assert that it did set it. */
UnkeyedValidPathInfo info { Hash::dummy };
auto deriver = readString(conn.from);
if (deriver != "")
info.deriver = store.parseStorePath(deriver);
info.references = ServeProto::Serialise<StorePathSet>::read(store, conn);
readLongLong(conn.from); // download size, unused
info.narSize = readLongLong(conn.from);
if (GET_PROTOCOL_MINOR(conn.version) >= 4) {
auto s = readString(conn.from);
if (!s.empty())
info.narHash = Hash::parseAnyPrefixed(s);
info.ca = ContentAddress::parseOpt(readString(conn.from));
info.sigs = readStrings<StringSet>(conn.from);
}
return info;
}
void ServeProto::Serialise<UnkeyedValidPathInfo>::write(const StoreDirConfig & store, WriteConn conn, const UnkeyedValidPathInfo & info)
{
conn.to
<< (info.deriver ? store.printStorePath(*info.deriver) : "");
ServeProto::write(store, conn, info.references);
// !!! Maybe we want compression?
conn.to
<< info.narSize // downloadSize, lie a little
<< info.narSize;
if (GET_PROTOCOL_MINOR(conn.version) >= 4)
conn.to
<< info.narHash.to_string(HashFormat::Nix32, true)
<< renderContentAddress(info.ca)
<< info.sigs;
}
ServeProto::BuildOptions ServeProto::Serialise<ServeProto::BuildOptions>::read(const StoreDirConfig & store, ReadConn conn)
{
BuildOptions options;
options.maxSilentTime = readInt(conn.from);
options.buildTimeout = readInt(conn.from);
if (GET_PROTOCOL_MINOR(conn.version) >= 2)
options.maxLogSize = readNum<unsigned long>(conn.from);
if (GET_PROTOCOL_MINOR(conn.version) >= 3) {
options.nrRepeats = readInt(conn.from);
options.enforceDeterminism = readInt(conn.from);
}
if (GET_PROTOCOL_MINOR(conn.version) >= 7) {
options.keepFailed = (bool) readInt(conn.from);
}
return options;
}
void ServeProto::Serialise<ServeProto::BuildOptions>::write(const StoreDirConfig & store, WriteConn conn, const ServeProto::BuildOptions & options)
{
conn.to
<< options.maxSilentTime
<< options.buildTimeout;
if (GET_PROTOCOL_MINOR(conn.version) >= 2)
conn.to
<< options.maxLogSize;
if (GET_PROTOCOL_MINOR(conn.version) >= 3)
conn.to
<< options.nrRepeats
<< options.enforceDeterminism;
if (GET_PROTOCOL_MINOR(conn.version) >= 7) {
conn.to << ((int) options.keepFailed);
}
}
}

View file

@ -13,11 +13,12 @@ namespace nix {
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
class Store;
struct StoreDirConfig;
struct Source;
// items being serialised
struct BuildResult;
struct UnkeyedValidPathInfo;
/**
@ -72,8 +73,8 @@ struct ServeProto
// See `worker-protocol.hh` for a longer explanation.
#if 0
{
static T read(const Store & store, ReadConn conn);
static void write(const Store & store, WriteConn conn, const T & t);
static T read(const StoreDirConfig & store, ReadConn conn);
static void write(const StoreDirConfig & store, WriteConn conn, const T & t);
};
#endif
@ -82,10 +83,17 @@ struct ServeProto
* infer the type instead of having to write it down explicitly.
*/
template<typename T>
static void write(const Store & store, WriteConn conn, const T & t)
static void write(const StoreDirConfig & store, WriteConn conn, const T & t)
{
ServeProto::Serialise<T>::write(store, conn, t);
}
/**
* Options for building shared between
* `ServeProto::Command::BuildPaths` and
* `ServeProto::Command::BuildDerivation`.
*/
struct BuildOptions;
};
enum struct ServeProto::Command : uint64_t
@ -101,6 +109,22 @@ enum struct ServeProto::Command : uint64_t
AddToStoreNar = 9,
};
struct ServeProto::BuildOptions {
/**
* Default value in this and every other field is so tests pass when
* testing older deserialisers which do not set all the fields.
*/
time_t maxSilentTime = -1;
time_t buildTimeout = -1;
size_t maxLogSize = -1;
size_t nrRepeats = -1;
bool enforceDeterminism = -1;
bool keepFailed = -1;
bool operator == (const ServeProto::BuildOptions &) const = default;
};
/**
* Convenience for sending operation codes.
*
@ -135,12 +159,16 @@ inline std::ostream & operator << (std::ostream & s, ServeProto::Command op)
#define DECLARE_SERVE_SERIALISER(T) \
struct ServeProto::Serialise< T > \
{ \
static T read(const Store & store, ServeProto::ReadConn conn); \
static void write(const Store & store, ServeProto::WriteConn conn, const T & t); \
static T read(const StoreDirConfig & store, ServeProto::ReadConn conn); \
static void write(const StoreDirConfig & store, ServeProto::WriteConn conn, const T & t); \
};
template<>
DECLARE_SERVE_SERIALISER(BuildResult);
template<>
DECLARE_SERVE_SERIALISER(UnkeyedValidPathInfo);
template<>
DECLARE_SERVE_SERIALISER(ServeProto::BuildOptions);
template<typename T>
DECLARE_SERVE_SERIALISER(std::vector<T>);

View file

@ -20,7 +20,7 @@ struct CommonSSHStoreConfig : virtual StoreConfig
const Setting<std::string> remoteStore{this, "", "remote-store",
R"(
[Store URL](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format)
[Store URL](@docroot@/store/types/index.md#store-url-format)
to be used on the remote machine. The default is `auto`
(i.e. use the Nix daemon or `/nix/store` directly).
)"};

View file

@ -1,6 +1,8 @@
#include "crypto.hh"
#include "source-accessor.hh"
#include "globals.hh"
#include "derived-path.hh"
#include "realisation.hh"
#include "derivations.hh"
#include "store-api.hh"
#include "util.hh"
@ -25,13 +27,13 @@ using json = nlohmann::json;
namespace nix {
bool Store::isInStore(PathView path) const
bool StoreDirConfig::isInStore(PathView path) const
{
return isInDir(path, storeDir);
}
std::pair<StorePath, Path> Store::toStorePath(PathView path) const
std::pair<StorePath, Path> StoreDirConfig::toStorePath(PathView path) const
{
if (!isInStore(path))
throw Error("path '%1%' is not in the Nix store", path);
@ -145,25 +147,25 @@ StorePath Store::followLinksToStorePath(std::string_view path) const
*/
StorePath Store::makeStorePath(std::string_view type,
StorePath StoreDirConfig::makeStorePath(std::string_view type,
std::string_view hash, std::string_view name) const
{
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
auto s = std::string(type) + ":" + std::string(hash)
+ ":" + storeDir + ":" + std::string(name);
auto h = compressHash(hashString(htSHA256, s), 20);
auto h = compressHash(hashString(HashAlgorithm::SHA256, s), 20);
return StorePath(h, name);
}
StorePath Store::makeStorePath(std::string_view type,
StorePath StoreDirConfig::makeStorePath(std::string_view type,
const Hash & hash, std::string_view name) const
{
return makeStorePath(type, hash.to_string(HashFormat::Base16, true), name);
}
StorePath Store::makeOutputPath(std::string_view id,
StorePath StoreDirConfig::makeOutputPath(std::string_view id,
const Hash & hash, std::string_view name) const
{
return makeStorePath("output:" + std::string { id }, hash, outputPathName(name, id));
@ -174,7 +176,7 @@ StorePath Store::makeOutputPath(std::string_view id,
hacky, but we can't put them in, say, <s2> (per the grammar above)
since that would be ambiguous. */
static std::string makeType(
const Store & store,
const StoreDirConfig & store,
std::string && type,
const StoreReferences & references)
{
@ -187,14 +189,14 @@ static std::string makeType(
}
StorePath Store::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const
StorePath StoreDirConfig::makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const
{
if (info.hash.type == htSHA256 && info.method == FileIngestionMethod::Recursive) {
if (info.hash.algo == HashAlgorithm::SHA256 && info.method == FileIngestionMethod::Recursive) {
return makeStorePath(makeType(*this, "source", info.references), info.hash, name);
} else {
assert(info.references.size() == 0);
return makeStorePath("output:out",
hashString(htSHA256,
hashString(HashAlgorithm::SHA256,
"fixed:out:"
+ makeFileIngestionPrefix(info.method)
+ info.hash.to_string(HashFormat::Base16, true) + ":"),
@ -203,9 +205,9 @@ StorePath Store::makeFixedOutputPath(std::string_view name, const FixedOutputInf
}
StorePath Store::makeTextPath(std::string_view name, const TextInfo & info) const
StorePath StoreDirConfig::makeTextPath(std::string_view name, const TextInfo & info) const
{
assert(info.hash.type == htSHA256);
assert(info.hash.algo == HashAlgorithm::SHA256);
return makeStorePath(
makeType(*this, "text", StoreReferences {
.others = info.references,
@ -216,7 +218,7 @@ StorePath Store::makeTextPath(std::string_view name, const TextInfo & info) cons
}
StorePath Store::makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const
StorePath StoreDirConfig::makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const
{
// New template
return std::visit(overloaded {
@ -230,12 +232,12 @@ StorePath Store::makeFixedOutputPathFromCA(std::string_view name, const ContentA
}
std::pair<StorePath, Hash> Store::computeStorePathFromDump(
Source & dump,
std::string_view name,
FileIngestionMethod method,
HashType hashAlgo,
const StorePathSet & references) const
std::pair<StorePath, Hash> StoreDirConfig::computeStorePathFromDump(
Source & dump,
std::string_view name,
FileIngestionMethod method,
HashAlgorithm hashAlgo,
const StorePathSet & references) const
{
HashSink sink(hashAlgo);
dump.drainInto(sink);
@ -249,26 +251,26 @@ std::pair<StorePath, Hash> Store::computeStorePathFromDump(
}
StorePath Store::computeStorePathForText(
StorePath StoreDirConfig::computeStorePathForText(
std::string_view name,
std::string_view s,
const StorePathSet & references) const
{
return makeTextPath(name, TextInfo {
.hash = hashString(htSHA256, s),
.hash = hashString(HashAlgorithm::SHA256, s),
.references = references,
});
}
StorePath Store::addToStore(
std::string_view name,
const Path & _srcPath,
FileIngestionMethod method,
HashType hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references)
std::string_view name,
const Path & _srcPath,
FileIngestionMethod method,
HashAlgorithm hashAlgo,
PathFilter & filter,
RepairFlag repair,
const StorePathSet & references)
{
Path srcPath(absPath(_srcPath));
auto source = sinkToSource([&](Sink & sink) {
@ -403,10 +405,10 @@ digraph graphname {
}
*/
ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo,
std::optional<Hash> expectedCAHash)
FileIngestionMethod method, HashAlgorithm hashAlgo,
std::optional<Hash> expectedCAHash)
{
HashSink narHashSink { htSHA256 };
HashSink narHashSink { HashAlgorithm::SHA256 };
HashSink caHashSink { hashAlgo };
/* Note that fileSink and unusualHashTee must be mutually exclusive, since
@ -415,7 +417,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
RegularFileSink fileSink { caHashSink };
TeeSink unusualHashTee { narHashSink, caHashSink };
auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != htSHA256
auto & narSink = method == FileIngestionMethod::Recursive && hashAlgo != HashAlgorithm::SHA256
? static_cast<Sink &>(unusualHashTee)
: narHashSink;
@ -443,7 +445,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
finish. */
auto [narHash, narSize] = narHashSink.finish();
auto hash = method == FileIngestionMethod::Recursive && hashAlgo == htSHA256
auto hash = method == FileIngestionMethod::Recursive && hashAlgo == HashAlgorithm::SHA256
? narHash
: caHashSink.finish().first;
@ -545,8 +547,8 @@ std::map<std::string, std::optional<StorePath>> Store::queryPartialDerivationOut
return outputs;
}
OutputPathMap Store::queryDerivationOutputMap(const StorePath & path) {
auto resp = queryPartialDerivationOutputMap(path);
OutputPathMap Store::queryDerivationOutputMap(const StorePath & path, Store * evalStore) {
auto resp = queryPartialDerivationOutputMap(path, evalStore);
OutputPathMap result;
for (auto & [outName, optOutPath] : resp) {
if (!optOutPath)
@ -1203,7 +1205,7 @@ std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istre
if (!hashGiven) {
std::string s;
getline(str, s);
auto narHash = Hash::parseAny(s, htSHA256);
auto narHash = Hash::parseAny(s, HashAlgorithm::SHA256);
getline(str, s);
auto narSize = string2Int<uint64_t>(s);
if (!narSize) throw Error("number expected");
@ -1227,7 +1229,7 @@ std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istre
}
std::string Store::showPaths(const StorePathSet & paths)
std::string StoreDirConfig::showPaths(const StorePathSet & paths)
{
std::string s;
for (auto & i : paths) {

View file

@ -1,8 +1,6 @@
#pragma once
///@file
#include "nar-info.hh"
#include "realisation.hh"
#include "path.hh"
#include "derived-path.hh"
#include "hash.hh"
@ -14,6 +12,7 @@
#include "config.hh"
#include "path-info.hh"
#include "repair-flag.hh"
#include "store-dir-config.hh"
#include <nlohmann/json_fwd.hpp>
#include <atomic>
@ -30,7 +29,7 @@
namespace nix {
/**
* About the class hierarchy of the store implementations:
* About the class hierarchy of the store types:
*
* Each store type `Foo` consists of two classes:
*
@ -64,12 +63,16 @@ MakeError(InvalidPath, Error);
MakeError(Unsupported, Error);
MakeError(SubstituteGone, Error);
MakeError(SubstituterDisabled, Error);
MakeError(BadStorePath, Error);
MakeError(InvalidStoreURI, Error);
struct Realisation;
struct RealisedPath;
struct DrvOutput;
struct BasicDerivation;
struct Derivation;
struct SourceAccessor;
class NarInfoDiskCache;
class Store;
@ -96,11 +99,11 @@ struct KeyedBuildResult;
typedef std::map<StorePath, std::optional<ContentAddress>> StorePathCAMap;
struct StoreConfig : public Config
struct StoreConfig : public StoreDirConfig
{
typedef std::map<std::string, std::string> Params;
using Config::Config;
using StoreDirConfig::StoreDirConfig;
StoreConfig() = delete;
@ -130,15 +133,6 @@ struct StoreConfig : public Config
return std::nullopt;
}
const PathSetting storeDir_{this, settings.nixStore,
"store",
R"(
Logical location of the Nix store, usually
`/nix/store`. Note that you can only copy store paths
between stores if they have the same `store` setting.
)"};
const Path storeDir = storeDir_;
const Setting<int> pathInfoCacheSize{this, 65536, "path-info-cache-size",
"Size of the in-memory store path metadata cache."};
@ -226,45 +220,6 @@ public:
virtual std::string getUri() = 0;
StorePath parseStorePath(std::string_view path) const;
std::optional<StorePath> maybeParseStorePath(std::string_view path) const;
std::string printStorePath(const StorePath & path) const;
/**
* Deprecated
*
* \todo remove
*/
StorePathSet parseStorePathSet(const PathSet & paths) const;
PathSet printStorePathSet(const StorePathSet & path) const;
/**
* Display a set of paths in human-readable form (i.e., between quotes
* and separated by commas).
*/
std::string showPaths(const StorePathSet & paths);
/**
* @return true if path is in the Nix store (but not the Nix
* store itself).
*/
bool isInStore(PathView path) const;
/**
* @return true if path is a store path, i.e. a direct child of the
* Nix store.
*/
bool isStorePath(std::string_view path) const;
/**
* Split a path like /nix/store/<hash>-<name>/<bla> into
* /nix/store/<hash>-<name> and /<bla>.
*/
std::pair<StorePath, Path> toStorePath(PathView path) const;
/**
* Follow symlinks until we end up with a path in the Nix store.
*/
@ -276,55 +231,6 @@ public:
*/
StorePath followLinksToStorePath(std::string_view path) const;
/**
* Constructs a unique store path name.
*/
StorePath makeStorePath(std::string_view type,
std::string_view hash, std::string_view name) const;
StorePath makeStorePath(std::string_view type,
const Hash & hash, std::string_view name) const;
StorePath makeOutputPath(std::string_view id,
const Hash & hash, std::string_view name) const;
StorePath makeFixedOutputPath(std::string_view name, const FixedOutputInfo & info) const;
StorePath makeTextPath(std::string_view name, const TextInfo & info) const;
StorePath makeFixedOutputPathFromCA(std::string_view name, const ContentAddressWithReferences & ca) const;
/**
* Read-only variant of addToStoreFromDump(). It returns the store
* path to which a NAR or flat file would be written.
*/
std::pair<StorePath, Hash> computeStorePathFromDump(
Source & dump,
std::string_view name,
FileIngestionMethod method = FileIngestionMethod::Recursive,
HashType hashAlgo = htSHA256,
const StorePathSet & references = {}) const;
/**
* Preparatory part of addTextToStore().
*
* !!! Computation of the path should take the references given to
* addTextToStore() into account, otherwise we have a (relatively
* minor) security hole: a caller can register a source file with
* bogus references. If there are too many references, the path may
* not be garbage collected when it has to be (not really a problem,
* the caller could create a root anyway), or it may be garbage
* collected when it shouldn't be (more serious).
*
* Hashing the references would solve this (bogus references would
* simply yield a different store path, so other users wouldn't be
* affected), but it has some backwards compatibility issues (the
* hashing scheme changes), so I'm not doing that for now.
*/
StorePath computeStorePathForText(
std::string_view name,
std::string_view s,
const StorePathSet & references) const;
/**
* Check whether a path is valid.
*/
@ -463,7 +369,7 @@ public:
* Query the mapping outputName=>outputPath for the given derivation.
* Assume every output has a mapping and throw an exception otherwise.
*/
OutputPathMap queryDerivationOutputMap(const StorePath & path);
OutputPathMap queryDerivationOutputMap(const StorePath & path, Store * evalStore = nullptr);
/**
* Query the full store path given the hash part of a valid store
@ -521,13 +427,13 @@ public:
* libutil/archive.hh).
*/
virtual StorePath addToStore(
std::string_view name,
const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive,
HashType hashAlgo = htSHA256,
PathFilter & filter = defaultPathFilter,
RepairFlag repair = NoRepair,
const StorePathSet & references = StorePathSet());
std::string_view name,
const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive,
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
PathFilter & filter = defaultPathFilter,
RepairFlag repair = NoRepair,
const StorePathSet & references = StorePathSet());
/**
* Copy the contents of a path to the store and register the
@ -535,8 +441,8 @@ public:
* memory.
*/
ValidPathInfo addToStoreSlow(std::string_view name, const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
std::optional<Hash> expectedCAHash = {});
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
std::optional<Hash> expectedCAHash = {});
/**
* Like addToStore(), but the contents of the path are contained
@ -548,8 +454,8 @@ public:
* \todo remove?
*/
virtual StorePath addToStoreFromDump(Source & dump, std::string_view name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair,
const StorePathSet & references = StorePathSet())
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair,
const StorePathSet & references = StorePathSet())
{ unsupported("addToStoreFromDump"); }
/**
@ -888,7 +794,7 @@ void copyStorePath(
*/
std::map<StorePath, StorePath> copyPaths(
Store & srcStore, Store & dstStore,
const RealisedPath::Set &,
const std::set<RealisedPath> &,
RepairFlag repair = NoRepair,
CheckSigsFlag checkSigs = CheckSigs,
SubstituteFlag substitute = NoSubstitute);
@ -905,7 +811,7 @@ std::map<StorePath, StorePath> copyPaths(
*/
void copyClosure(
Store & srcStore, Store & dstStore,
const RealisedPath::Set & paths,
const std::set<RealisedPath> & paths,
RepairFlag repair = NoRepair,
CheckSigsFlag checkSigs = CheckSigs,
SubstituteFlag substitute = NoSubstitute);
@ -962,7 +868,7 @@ OutputPathMap resolveDerivedPath(Store &, const DerivedPath::Built &, Store * ev
* - ssh://[user@]<host>: A remote Nix store accessed by running
* nix-store --serve via SSH.
*
* You can pass parameters to the store implementation by appending
* You can pass parameters to the store type by appending
* ?key=value&key=value&... to the URI.
*/
ref<Store> openStore(const std::string & uri = settings.storeUri.get(),
@ -1037,6 +943,7 @@ const ContentAddress * getDerivationCA(const BasicDerivation & drv);
std::map<DrvOutput, StorePath> drvOutputReferences(
Store & store,
const Derivation & drv,
const StorePath & outputPath);
const StorePath & outputPath,
Store * evalStore = nullptr);
}

Some files were not shown because too many files have changed in this diff Show more