1
0
Fork 0
mirror of https://github.com/NixOS/nix synced 2025-07-01 16:41:47 +02:00

Merge remote-tracking branch 'upstream/master' into libgit2

This commit is contained in:
John Ericson 2023-11-20 08:29:27 -05:00
commit 4ab27e5595
67 changed files with 1356 additions and 336 deletions

View file

@ -1,6 +1,7 @@
#include "eval.hh"
#include "eval-settings.hh"
#include "hash.hh"
#include "primops.hh"
#include "types.hh"
#include "util.hh"
#include "store-api.hh"
@ -722,6 +723,23 @@ void EvalState::addConstant(const std::string & name, Value * v, Constant info)
}
void PrimOp::check()
{
if (arity > maxPrimOpArity) {
throw Error("primop arity must not exceed %1%", maxPrimOpArity);
}
}
void Value::mkPrimOp(PrimOp * p)
{
p->check();
clearValue();
internalType = tPrimOp;
primOp = p;
}
Value * EvalState::addPrimOp(PrimOp && primOp)
{
/* Hack to make constants lazy: turn them into a application of
@ -1748,6 +1766,12 @@ void ExprCall::eval(EvalState & state, Env & env, Value & v)
Value vFun;
fun->eval(state, env, vFun);
// Empirical arity of Nixpkgs lambdas by regex e.g. ([a-zA-Z]+:(\s|(/\*.*\/)|(#.*\n))*){5}
// 2: over 4000
// 3: about 300
// 4: about 60
// 5: under 10
// This excluded attrset lambdas (`{...}:`). Contributions of mixed lambdas appears insignificant at ~150 total.
Value * vArgs[args.size()];
for (size_t i = 0; i < args.size(); ++i)
vArgs[i] = args[i]->maybeThunk(state, env);

View file

@ -18,6 +18,12 @@
namespace nix {
/**
* We put a limit on primop arity because it lets us use a fixed size array on
* the stack. 8 is already an impractical number of arguments. Use an attrset
* argument for such overly complicated functions.
*/
constexpr size_t maxPrimOpArity = 8;
class Store;
class EvalState;
@ -71,6 +77,12 @@ struct PrimOp
* Optional experimental for this to be gated on.
*/
std::optional<ExperimentalFeature> experimentalFeature;
/**
* Validity check to be performed by functions that introduce primops,
* such as RegisterPrimOp() and Value::mkPrimOp().
*/
void check();
};
/**
@ -827,7 +839,7 @@ std::string showType(const Value & v);
/**
* If `path` refers to a directory, then append "/default.nix".
*/
SourcePath resolveExprPath(const SourcePath & path);
SourcePath resolveExprPath(SourcePath path);
struct InvalidPathError : EvalError
{

View file

@ -43,7 +43,9 @@ $(foreach i, $(wildcard src/libexpr/value/*.hh), \
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh
$(d)/eval.cc: $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh
$(d)/flake/flake.cc: $(d)/flake/call-flake.nix.gen.hh

View file

@ -686,17 +686,25 @@ Expr * EvalState::parse(
}
SourcePath resolveExprPath(const SourcePath & path)
SourcePath resolveExprPath(SourcePath path)
{
unsigned int followCount = 0, maxFollow = 1024;
/* If `path' is a symlink, follow it. This is so that relative
path references work. */
auto path2 = path.resolveSymlinks();
while (true) {
// Basic cycle/depth limit to avoid infinite loops.
if (++followCount >= maxFollow)
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
if (path.lstat().type != InputAccessor::tSymlink) break;
path = {path.accessor, CanonPath(path.readLink(), path.path.parent().value_or(CanonPath::root))};
}
/* If `path' refers to a directory, append `/default.nix'. */
if (path2.lstat().type == InputAccessor::tDirectory)
return path2 + "default.nix";
if (path.lstat().type == InputAccessor::tDirectory)
return path + "default.nix";
return path2;
return path;
}

View file

@ -29,7 +29,6 @@
#include <cmath>
namespace nix {
@ -2375,7 +2374,7 @@ static RegisterPrimOp primop_path({
like `@`.
- filter\
A function of the type expected by `builtins.filterSource`,
A function of the type expected by [`builtins.filterSource`](#builtins-filterSource),
with the same semantics.
- recursive\
@ -2550,6 +2549,7 @@ static void prim_removeAttrs(EvalState & state, const PosIdx pos, Value * * args
/* Get the attribute names to be removed.
We keep them as Attrs instead of Symbols so std::set_difference
can be used to remove them from attrs[0]. */
// 64: large enough to fit the attributes of a derivation
boost::container::small_vector<Attr, 64> names;
names.reserve(args[1]->listSize());
for (auto elem : args[1]->listItems()) {
@ -2730,7 +2730,7 @@ static void prim_catAttrs(EvalState & state, const PosIdx pos, Value * * args, V
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.catAttrs");
Value * res[args[1]->listSize()];
unsigned int found = 0;
size_t found = 0;
for (auto v2 : args[1]->listItems()) {
state.forceAttrs(*v2, pos, "while evaluating an element in the list passed as second argument to builtins.catAttrs");
@ -3066,7 +3066,7 @@ static void prim_filter(EvalState & state, const PosIdx pos, Value * * args, Val
// FIXME: putting this on the stack is risky.
Value * vs[args[1]->listSize()];
unsigned int k = 0;
size_t k = 0;
bool same = true;
for (unsigned int n = 0; n < args[1]->listSize(); ++n) {
@ -3191,10 +3191,14 @@ static void anyOrAll(bool any, EvalState & state, const PosIdx pos, Value * * ar
state.forceFunction(*args[0], pos, std::string("while evaluating the first argument passed to builtins.") + (any ? "any" : "all"));
state.forceList(*args[1], pos, std::string("while evaluating the second argument passed to builtins.") + (any ? "any" : "all"));
std::string_view errorCtx = any
? "while evaluating the return value of the function passed to builtins.any"
: "while evaluating the return value of the function passed to builtins.all";
Value vTmp;
for (auto elem : args[1]->listItems()) {
state.callFunction(*args[0], *elem, vTmp, pos);
bool res = state.forceBool(vTmp, pos, std::string("while evaluating the return value of the function passed to builtins.") + (any ? "any" : "all"));
bool res = state.forceBool(vTmp, pos, errorCtx);
if (res == any) {
v.mkBool(any);
return;
@ -3456,7 +3460,7 @@ static void prim_concatMap(EvalState & state, const PosIdx pos, Value * * args,
for (unsigned int n = 0; n < nrLists; ++n) {
Value * vElem = args[1]->listElems()[n];
state.callFunction(*args[0], *vElem, lists[n], pos);
state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)), "while evaluating the return value of the function passed to buitlins.concatMap");
state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)), "while evaluating the return value of the function passed to builtins.concatMap");
len += lists[n].listSize();
}

View file

@ -8,6 +8,22 @@
namespace nix {
/**
* For functions where we do not expect deep recursion, we can use a sizable
* part of the stack a free allocation space.
*
* Note: this is expected to be multiplied by sizeof(Value), or about 24 bytes.
*/
constexpr size_t nonRecursiveStackReservation = 128;
/**
* Functions that maybe applied to self-similar inputs, such as concatMap on a
* tree, should reserve a smaller part of the stack for allocation.
*
* Note: this is expected to be multiplied by sizeof(Value), or about 24 bytes.
*/
constexpr size_t conservativeStackReservation = 16;
struct RegisterPrimOp
{
typedef std::vector<PrimOp> PrimOps;

View file

@ -906,12 +906,12 @@ namespace nix {
ASSERT_TRACE2("concatMap (x: 1) [ \"foo\" ] # TODO",
TypeError,
hintfmt("value is %s while a list was expected", "an integer"),
hintfmt("while evaluating the return value of the function passed to buitlins.concatMap"));
hintfmt("while evaluating the return value of the function passed to builtins.concatMap"));
ASSERT_TRACE2("concatMap (x: \"foo\") [ 1 2 ] # TODO",
TypeError,
hintfmt("value is %s while a list was expected", "a string"),
hintfmt("while evaluating the return value of the function passed to buitlins.concatMap"));
hintfmt("while evaluating the return value of the function passed to builtins.concatMap"));
}

View file

@ -6,7 +6,11 @@ libexpr-tests_NAME := libnixexpr-tests
libexpr-tests_DIR := $(d)
libexpr-tests_INSTALL_DIR :=
ifeq ($(INSTALL_UNIT_TESTS), yes)
libexpr-tests_INSTALL_DIR := $(checkbindir)
else
libexpr-tests_INSTALL_DIR :=
endif
libexpr-tests_SOURCES := \
$(wildcard $(d)/*.cc) \

View file

@ -114,7 +114,8 @@ TEST_F(ValuePrintingTests, vLambda)
TEST_F(ValuePrintingTests, vPrimOp)
{
Value vPrimOp;
vPrimOp.mkPrimOp(nullptr);
PrimOp primOp{};
vPrimOp.mkPrimOp(&primOp);
test(vPrimOp, "<PRIMOP>");
}

View file

@ -3,6 +3,7 @@
#include <cassert>
#include <climits>
#include <span>
#include "symbol-table.hh"
#include "value/context.hh"
@ -158,42 +159,60 @@ public:
inline bool isPrimOp() const { return internalType == tPrimOp; };
inline bool isPrimOpApp() const { return internalType == tPrimOpApp; };
/**
* Strings in the evaluator carry a so-called `context` which
* is a list of strings representing store paths. This is to
* allow users to write things like
*
* "--with-freetype2-library=" + freetype + "/lib"
*
* where `freetype` is a derivation (or a source to be copied
* to the store). If we just concatenated the strings without
* keeping track of the referenced store paths, then if the
* string is used as a derivation attribute, the derivation
* will not have the correct dependencies in its inputDrvs and
* inputSrcs.
* The semantics of the context is as follows: when a string
* with context C is used as a derivation attribute, then the
* derivations in C will be added to the inputDrvs of the
* derivation, and the other store paths in C will be added to
* the inputSrcs of the derivations.
* For canonicity, the store paths should be in sorted order.
*/
struct StringWithContext {
const char * c_str;
const char * * context; // must be in sorted order
};
struct Path {
InputAccessor * accessor;
const char * path;
};
struct ClosureThunk {
Env * env;
Expr * expr;
};
struct FunctionApplicationThunk {
Value * left, * right;
};
struct Lambda {
Env * env;
ExprLambda * fun;
};
union
{
NixInt integer;
bool boolean;
/**
* Strings in the evaluator carry a so-called `context` which
* is a list of strings representing store paths. This is to
* allow users to write things like
StringWithContext string;
* "--with-freetype2-library=" + freetype + "/lib"
* where `freetype` is a derivation (or a source to be copied
* to the store). If we just concatenated the strings without
* keeping track of the referenced store paths, then if the
* string is used as a derivation attribute, the derivation
* will not have the correct dependencies in its inputDrvs and
* inputSrcs.
* The semantics of the context is as follows: when a string
* with context C is used as a derivation attribute, then the
* derivations in C will be added to the inputDrvs of the
* derivation, and the other store paths in C will be added to
* the inputSrcs of the derivations.
* For canonicity, the store paths should be in sorted order.
*/
struct {
const char * c_str;
const char * * context; // must be in sorted order
} string;
struct {
InputAccessor * accessor;
const char * path;
} _path;
Path _path;
Bindings * attrs;
struct {
@ -201,21 +220,11 @@ public:
Value * * elems;
} bigList;
Value * smallList[2];
struct {
Env * env;
Expr * expr;
} thunk;
struct {
Value * left, * right;
} app;
struct {
Env * env;
ExprLambda * fun;
} lambda;
ClosureThunk thunk;
FunctionApplicationThunk app;
Lambda lambda;
PrimOp * primOp;
struct {
Value * left, * right;
} primOpApp;
FunctionApplicationThunk primOpApp;
ExternalValueBase * external;
NixFloat fpoint;
};
@ -354,13 +363,7 @@ public:
// Value will be overridden anyways
}
inline void mkPrimOp(PrimOp * p)
{
clearValue();
internalType = tPrimOp;
primOp = p;
}
void mkPrimOp(PrimOp * p);
inline void mkPrimOpApp(Value * l, Value * r)
{
@ -393,7 +396,13 @@ public:
return internalType == tList1 || internalType == tList2 ? smallList : bigList.elems;
}
const Value * const * listElems() const
std::span<Value * const> listItems() const
{
assert(isList());
return std::span<Value * const>(listElems(), listSize());
}
Value * const * listElems() const
{
return internalType == tList1 || internalType == tList2 ? smallList : bigList.elems;
}
@ -412,34 +421,6 @@ public:
*/
bool isTrivial() const;
auto listItems()
{
struct ListIterable
{
typedef Value * const * iterator;
iterator _begin, _end;
iterator begin() const { return _begin; }
iterator end() const { return _end; }
};
assert(isList());
auto begin = listElems();
return ListIterable { begin, begin + listSize() };
}
auto listItems() const
{
struct ConstListIterable
{
typedef const Value * const * iterator;
iterator _begin, _end;
iterator begin() const { return _begin; }
iterator end() const { return _end; }
};
assert(isList());
auto begin = listElems();
return ConstListIterable { begin, begin + listSize() };
}
SourcePath path() const
{
assert(internalType == tPath);

View file

@ -1317,9 +1317,26 @@ void DerivationGoal::handleChildOutput(int fd, std::string_view data)
auto s = handleJSONLogMessage(*json, worker.act, hook->activities, true);
// ensure that logs from a builder using `ssh-ng://` as protocol
// are also available to `nix log`.
if (s && !isWrittenToLog && logSink && (*json)["type"] == resBuildLogLine) {
auto f = (*json)["fields"];
(*logSink)((f.size() > 0 ? f.at(0).get<std::string>() : "") + "\n");
if (s && !isWrittenToLog && logSink) {
const auto type = (*json)["type"];
const auto fields = (*json)["fields"];
if (type == resBuildLogLine) {
(*logSink)((fields.size() > 0 ? fields[0].get<std::string>() : "") + "\n");
} else if (type == resSetPhase && ! fields.is_null()) {
const auto phase = fields[0];
if (! phase.is_null()) {
// nixpkgs' stdenv produces lines in the log to signal
// phase changes.
// We want to get the same lines in case of remote builds.
// The format is:
// @nix { "action": "setPhase", "phase": "$curPhase" }
const auto logLine = nlohmann::json::object({
{"action", "setPhase"},
{"phase", phase}
});
(*logSink)("@nix " + logLine.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace) + "\n");
}
}
}
}
currentHookLine.clear();
@ -1474,6 +1491,7 @@ void DerivationGoal::done(
SingleDrvOutputs builtOutputs,
std::optional<Error> ex)
{
outputLocks.unlock();
buildResult.status = status;
if (ex)
buildResult.errorMsg = fmt("%s", normaltxt(ex->info().msg));

View file

@ -652,8 +652,8 @@ void LocalDerivationGoal::startBuilder()
#if __linux__
/* Create a temporary directory in which we set up the chroot
environment using bind-mounts. We put it in the Nix store
to ensure that we can create hard-links to non-directory
inputs in the fake Nix store in the chroot (see below). */
so that the build outputs can be moved efficiently from the
chroot to their final location. */
chrootRootDir = worker.store.Store::toRealPath(drvPath) + ".chroot";
deletePath(chrootRootDir);

View file

@ -151,11 +151,10 @@ StorePath writeDerivation(Store & store,
/* Read string `s' from stream `str'. */
static void expect(std::istream & str, std::string_view s)
{
char s2[s.size()];
str.read(s2, s.size());
std::string_view s2View { s2, s.size() };
if (s2View != s)
throw FormatError("expected string '%s', got '%s'", s, s2View);
for (auto & c : s) {
if (str.get() != c)
throw FormatError("expected string '%1%'", s);
}
}

View file

@ -330,9 +330,7 @@ typedef std::unordered_map<Path, std::unordered_set<std::string>> UncheckedRoots
static void readProcLink(const std::string & file, UncheckedRoots & roots)
{
/* 64 is the starting buffer size gnu readlink uses... */
auto bufsiz = ssize_t{64};
try_again:
constexpr auto bufsiz = PATH_MAX;
char buf[bufsiz];
auto res = readlink(file.c_str(), buf, bufsiz);
if (res == -1) {
@ -341,10 +339,7 @@ try_again:
throw SysError("reading symlink");
}
if (res == bufsiz) {
if (SSIZE_MAX / 2 < bufsiz)
throw Error("stupidly long symlink");
bufsiz *= 2;
goto try_again;
throw Error("overly long symlink starting with '%1%'", std::string_view(buf, bufsiz));
}
if (res > 0 && buf[0] == '/')
roots[std::string(static_cast<char *>(buf), res)]

View file

@ -1084,6 +1084,16 @@ public:
true, // document default
Xp::ConfigurableImpureEnv
};
Setting<std::string> upgradeNixStorePathUrl{
this,
"https://github.com/NixOS/nixpkgs/raw/master/nixos/modules/installer/tools/nix-fallback-paths.nix",
"upgrade-nix-store-path-url",
R"(
Used by `nix upgrade-nix`, the URL of the file that contains the
store paths of the latest Nix release.
)"
};
};

View file

@ -6,7 +6,11 @@ libstore-tests-exe_NAME = libnixstore-tests
libstore-tests-exe_DIR := $(d)
libstore-tests-exe_INSTALL_DIR :=
ifeq ($(INSTALL_UNIT_TESTS), yes)
libstore-tests-exe_INSTALL_DIR := $(checkbindir)
else
libstore-tests-exe_INSTALL_DIR :=
endif
libstore-tests-exe_LIBS = libstore-tests
@ -18,7 +22,11 @@ libstore-tests_NAME = libnixstore-tests
libstore-tests_DIR := $(d)
libstore-tests_INSTALL_DIR :=
ifeq ($(INSTALL_UNIT_TESTS), yes)
libstore-tests_INSTALL_DIR := $(checklibdir)
else
libstore-tests_INSTALL_DIR :=
endif
libstore-tests_SOURCES := $(wildcard $(d)/*.cc)

View file

@ -97,6 +97,8 @@ struct Parser {
virtual void operator()(std::shared_ptr<Parser> & state, Strings & r) = 0;
Parser(std::string_view s) : remaining(s) {};
virtual ~Parser() { };
};
struct ParseQuoted : public Parser {

View file

@ -96,6 +96,14 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
[`nix`](@docroot@/command-ref/new-cli/nix.md) for details.
)",
},
{
.tag = Xp::GitHashing,
.name = "git-hashing",
.description = R"(
Allow creating (content-addressed) store objects which are hashed via Git's hashing algorithm.
These store objects will not be understandable by older versions of Nix.
)",
},
{
.tag = Xp::RecursiveNix,
.name = "recursive-nix",

View file

@ -22,6 +22,7 @@ enum struct ExperimentalFeature
Flakes,
FetchTree,
NixCommand,
GitHashing,
RecursiveNix,
NoUrlLiterals,
FetchClosure,

View file

@ -1,9 +1,263 @@
#include "git.hh"
#include <cerrno>
#include <algorithm>
#include <vector>
#include <map>
#include <regex>
#include <strings.h> // for strcasecmp
#include "signals.hh"
#include "config.hh"
#include "hash.hh"
#include "posix-source-accessor.hh"
#include "git.hh"
#include "serialise.hh"
namespace nix::git {
using namespace nix;
using namespace std::string_literals;
std::optional<Mode> decodeMode(RawMode m) {
switch (m) {
case (RawMode) Mode::Directory:
case (RawMode) Mode::Executable:
case (RawMode) Mode::Regular:
case (RawMode) Mode::Symlink:
return (Mode) m;
default:
return std::nullopt;
}
}
static std::string getStringUntil(Source & source, char byte)
{
std::string s;
char n[1];
source(std::string_view { n, 1 });
while (*n != byte) {
s += *n;
source(std::string_view { n, 1 });
}
return s;
}
static std::string getString(Source & source, int n)
{
std::string v;
v.resize(n);
source(v);
return v;
}
void parse(
ParseSink & sink,
const Path & sinkPath,
Source & source,
std::function<SinkHook> hook,
const ExperimentalFeatureSettings & xpSettings)
{
xpSettings.require(Xp::GitHashing);
auto type = getString(source, 5);
if (type == "blob ") {
sink.createRegularFile(sinkPath);
unsigned long long size = std::stoi(getStringUntil(source, 0));
sink.preallocateContents(size);
unsigned long long left = size;
std::string buf;
buf.reserve(65536);
while (left) {
checkInterrupt();
buf.resize(std::min((unsigned long long)buf.capacity(), left));
source(buf);
sink.receiveContents(buf);
left -= buf.size();
}
} else if (type == "tree ") {
unsigned long long size = std::stoi(getStringUntil(source, 0));
unsigned long long left = size;
sink.createDirectory(sinkPath);
while (left) {
std::string perms = getStringUntil(source, ' ');
left -= perms.size();
left -= 1;
RawMode rawMode = std::stoi(perms, 0, 8);
auto modeOpt = decodeMode(rawMode);
if (!modeOpt)
throw Error("Unknown Git permission: %o", perms);
auto mode = std::move(*modeOpt);
std::string name = getStringUntil(source, '\0');
left -= name.size();
left -= 1;
std::string hashs = getString(source, 20);
left -= 20;
Hash hash(htSHA1);
std::copy(hashs.begin(), hashs.end(), hash.hash);
hook(name, TreeEntry {
.mode = mode,
.hash = hash,
});
if (mode == Mode::Executable)
sink.isExecutable();
}
} else throw Error("input doesn't look like a Git object");
}
std::optional<Mode> convertMode(SourceAccessor::Type type)
{
switch (type) {
case SourceAccessor::tSymlink: return Mode::Symlink;
case SourceAccessor::tRegular: return Mode::Regular;
case SourceAccessor::tDirectory: return Mode::Directory;
case SourceAccessor::tMisc: return std::nullopt;
default: abort();
}
}
void restore(ParseSink & sink, Source & source, std::function<RestoreHook> hook)
{
parse(sink, "", source, [&](Path name, TreeEntry entry) {
auto [accessor, from] = hook(entry.hash);
auto stat = accessor->lstat(from);
auto gotOpt = convertMode(stat.type);
if (!gotOpt)
throw Error("file '%s' (git hash %s) has an unsupported type",
from,
entry.hash.to_string(HashFormat::Base16, false));
auto & got = *gotOpt;
if (got != entry.mode)
throw Error("git mode of file '%s' (git hash %s) is %o but expected %o",
from,
entry.hash.to_string(HashFormat::Base16, false),
(RawMode) got,
(RawMode) entry.mode);
copyRecursive(
*accessor, from,
sink, name);
});
}
void dumpBlobPrefix(
uint64_t size, Sink & sink,
const ExperimentalFeatureSettings & xpSettings)
{
xpSettings.require(Xp::GitHashing);
auto s = fmt("blob %d\0"s, std::to_string(size));
sink(s);
}
void dumpTree(const Tree & entries, Sink & sink,
const ExperimentalFeatureSettings & xpSettings)
{
xpSettings.require(Xp::GitHashing);
std::string v1;
for (auto & [name, entry] : entries) {
auto name2 = name;
if (entry.mode == Mode::Directory) {
assert(name2.back() == '/');
name2.pop_back();
}
v1 += fmt("%o %s\0"s, static_cast<RawMode>(entry.mode), name2);
std::copy(entry.hash.hash, entry.hash.hash + entry.hash.hashSize, std::back_inserter(v1));
}
{
auto s = fmt("tree %d\0"s, v1.size());
sink(s);
}
sink(v1);
}
Mode dump(
SourceAccessor & accessor, const CanonPath & path,
Sink & sink,
std::function<DumpHook> hook,
PathFilter & filter,
const ExperimentalFeatureSettings & xpSettings)
{
auto st = accessor.lstat(path);
switch (st.type) {
case SourceAccessor::tRegular:
{
accessor.readFile(path, sink, [&](uint64_t size) {
dumpBlobPrefix(size, sink, xpSettings);
});
return st.isExecutable
? Mode::Executable
: Mode::Regular;
}
case SourceAccessor::tDirectory:
{
Tree entries;
for (auto & [name, _] : accessor.readDirectory(path)) {
auto child = path + name;
if (!filter(child.abs())) continue;
auto entry = hook(child);
auto name2 = name;
if (entry.mode == Mode::Directory)
name2 += "/";
entries.insert_or_assign(std::move(name2), std::move(entry));
}
dumpTree(entries, sink, xpSettings);
return Mode::Directory;
}
case SourceAccessor::tSymlink:
case SourceAccessor::tMisc:
default:
throw Error("file '%1%' has an unsupported type", path);
}
}
TreeEntry dumpHash(
HashType ht,
SourceAccessor & accessor, const CanonPath & path, PathFilter & filter)
{
std::function<DumpHook> hook;
hook = [&](const CanonPath & path) -> TreeEntry {
auto hashSink = HashSink(ht);
auto mode = dump(accessor, path, hashSink, hook, filter);
auto hash = hashSink.finish().first;
return {
.mode = mode,
.hash = hash,
};
};
return hook(path);
}
namespace nix {
namespace git {
std::optional<LsRemoteRefLine> parseLsRemoteLine(std::string_view line)
{
@ -22,4 +276,3 @@ std::optional<LsRemoteRefLine> parseLsRemoteLine(std::string_view line)
}
}
}

View file

@ -5,9 +5,127 @@
#include <string_view>
#include <optional>
namespace nix {
#include "types.hh"
#include "serialise.hh"
#include "hash.hh"
#include "source-accessor.hh"
#include "fs-sink.hh"
namespace git {
namespace nix::git {
using RawMode = uint32_t;
enum struct Mode : RawMode {
Directory = 0040000,
Executable = 0100755,
Regular = 0100644,
Symlink = 0120000,
};
std::optional<Mode> decodeMode(RawMode m);
/**
* An anonymous Git tree object entry (no name part).
*/
struct TreeEntry
{
Mode mode;
Hash hash;
GENERATE_CMP(TreeEntry, me->mode, me->hash);
};
/**
* A Git tree object, fully decoded and stored in memory.
*
* Directory names must end in a `/` for sake of sorting. See
* https://github.com/mirage/irmin/issues/352
*/
using Tree = std::map<std::string, TreeEntry>;
/**
* Callback for processing a child hash with `parse`
*
* The function should
*
* 1. Obtain the file system objects denoted by `gitHash`
*
* 2. Ensure they match `mode`
*
* 3. Feed them into the same sink `parse` was called with
*
* Implementations may seek to memoize resources (bandwidth, storage,
* etc.) for the same Git hash.
*/
using SinkHook = void(const Path & name, TreeEntry entry);
void parse(
ParseSink & sink, const Path & sinkPath,
Source & source,
std::function<SinkHook> hook,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
/**
* Assists with writing a `SinkHook` step (2).
*/
std::optional<Mode> convertMode(SourceAccessor::Type type);
/**
* Simplified version of `SinkHook` for `restore`.
*
* Given a `Hash`, return a `SourceAccessor` and `CanonPath` pointing to
* the file system object with that path.
*/
using RestoreHook = std::pair<SourceAccessor *, CanonPath>(Hash);
/**
* Wrapper around `parse` and `RestoreSink`
*/
void restore(ParseSink & sink, Source & source, std::function<RestoreHook> hook);
/**
* Dumps a single file to a sink
*
* @param xpSettings for testing purposes
*/
void dumpBlobPrefix(
uint64_t size, Sink & sink,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
/**
* Dumps a representation of a git tree to a sink
*/
void dumpTree(
const Tree & entries, Sink & sink,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
/**
* Callback for processing a child with `dump`
*
* The function should return the Git hash and mode of the file at the
* given path in the accessor passed to `dump`.
*
* Note that if the child is a directory, its child in must also be so
* processed in order to compute this information.
*/
using DumpHook = TreeEntry(const CanonPath & path);
Mode dump(
SourceAccessor & accessor, const CanonPath & path,
Sink & sink,
std::function<DumpHook> hook,
PathFilter & filter = defaultPathFilter,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
/**
* Recursively dumps path, hashing as we go.
*
* A smaller wrapper around `dump`.
*/
TreeEntry dumpHash(
HashType ht,
SourceAccessor & accessor, const CanonPath & path,
PathFilter & filter = defaultPathFilter);
/**
* A line from the output of `git ls-remote --symref`.
@ -16,15 +134,17 @@ namespace git {
*
* - Symbolic references of the form
*
* ref: {target} {reference}
*
* where {target} is itself a reference and {reference} is optional
* ```
* ref: {target} {reference}
* ```
* where {target} is itself a reference and {reference} is optional
*
* - Object references of the form
*
* {target} {reference}
*
* where {target} is a commit id and {reference} is mandatory
* ```
* {target} {reference}
* ```
* where {target} is a commit id and {reference} is mandatory
*/
struct LsRemoteRefLine {
enum struct Kind {
@ -36,8 +156,9 @@ struct LsRemoteRefLine {
std::optional<std::string> reference;
};
/**
* Parse an `LsRemoteRefLine`
*/
std::optional<LsRemoteRefLine> parseLsRemoteLine(std::string_view line);
}
}

View file

@ -121,4 +121,60 @@ CanonPath MemorySourceAccessor::addFile(CanonPath path, std::string && contents)
return path;
}
using File = MemorySourceAccessor::File;
void MemorySink::createDirectory(const Path & path)
{
auto * f = dst.open(CanonPath{path}, File { File::Directory { } });
if (!f)
throw Error("file '%s' cannot be made because some parent file is not a directory", path);
if (!std::holds_alternative<File::Directory>(f->raw))
throw Error("file '%s' is not a directory", path);
};
void MemorySink::createRegularFile(const Path & path)
{
auto * f = dst.open(CanonPath{path}, File { File::Regular {} });
if (!f)
throw Error("file '%s' cannot be made because some parent file is not a directory", path);
if (!(r = std::get_if<File::Regular>(&f->raw)))
throw Error("file '%s' is not a regular file", path);
}
void MemorySink::closeRegularFile()
{
r = nullptr;
}
void MemorySink::isExecutable()
{
assert(r);
r->executable = true;
}
void MemorySink::preallocateContents(uint64_t len)
{
assert(r);
r->contents.reserve(len);
}
void MemorySink::receiveContents(std::string_view data)
{
assert(r);
r->contents += data;
}
void MemorySink::createSymlink(const Path & path, const std::string & target)
{
auto * f = dst.open(CanonPath{path}, File { File::Symlink { } });
if (!f)
throw Error("file '%s' cannot be made because some parent file is not a directory", path);
if (auto * s = std::get_if<File::Symlink>(&f->raw))
s->target = target;
else
throw Error("file '%s' is not a symbolic link", path);
}
}

View file

@ -1,4 +1,5 @@
#include "source-accessor.hh"
#include "fs-sink.hh"
#include "variant-wrapper.hh"
namespace nix {
@ -71,4 +72,28 @@ struct MemorySourceAccessor : virtual SourceAccessor
CanonPath addFile(CanonPath path, std::string && contents);
};
/**
* Write to a `MemorySourceAccessor` at the given path
*/
struct MemorySink : ParseSink
{
MemorySourceAccessor & dst;
MemorySink(MemorySourceAccessor & dst) : dst(dst) { }
void createDirectory(const Path & path) override;
void createRegularFile(const Path & path) override;
void receiveContents(std::string_view data) override;
void isExecutable() override;
void closeRegularFile() override;
void createSymlink(const Path & path, const std::string & target) override;
void preallocateContents(uint64_t size) override;
private:
MemorySourceAccessor::File::Regular * r;
};
}

View file

@ -74,6 +74,10 @@ void Source::operator () (char * data, size_t len)
}
}
void Source::operator () (std::string_view data)
{
(*this)((char *)data.data(), data.size());
}
void Source::drainInto(Sink & sink)
{

View file

@ -73,6 +73,7 @@ struct Source
* an error if it is not going to be available.
*/
void operator () (char * data, size_t len);
void operator () (std::string_view data);
/**
* Store up to len in the buffer pointed to by data, and

View file

@ -1,33 +1,236 @@
#include "git.hh"
#include <gtest/gtest.h>
#include "git.hh"
#include "memory-source-accessor.hh"
#include "tests/characterization.hh"
namespace nix {
TEST(GitLsRemote, parseSymrefLineWithReference) {
auto line = "ref: refs/head/main HEAD";
auto res = git::parseLsRemoteLine(line);
ASSERT_TRUE(res.has_value());
ASSERT_EQ(res->kind, git::LsRemoteRefLine::Kind::Symbolic);
ASSERT_EQ(res->target, "refs/head/main");
ASSERT_EQ(res->reference, "HEAD");
using namespace git;
class GitTest : public CharacterizationTest
{
Path unitTestData = getUnitTestData() + "/libutil/git";
public:
Path goldenMaster(std::string_view testStem) const override {
return unitTestData + "/" + testStem;
}
TEST(GitLsRemote, parseSymrefLineWithNoReference) {
auto line = "ref: refs/head/main";
auto res = git::parseLsRemoteLine(line);
ASSERT_TRUE(res.has_value());
ASSERT_EQ(res->kind, git::LsRemoteRefLine::Kind::Symbolic);
ASSERT_EQ(res->target, "refs/head/main");
ASSERT_EQ(res->reference, std::nullopt);
}
/**
* We set these in tests rather than the regular globals so we don't have
* to worry about race conditions if the tests run concurrently.
*/
ExperimentalFeatureSettings mockXpSettings;
TEST(GitLsRemote, parseObjectRefLine) {
auto line = "abc123 refs/head/main";
auto res = git::parseLsRemoteLine(line);
ASSERT_TRUE(res.has_value());
ASSERT_EQ(res->kind, git::LsRemoteRefLine::Kind::Object);
ASSERT_EQ(res->target, "abc123");
ASSERT_EQ(res->reference, "refs/head/main");
private:
void SetUp() override
{
mockXpSettings.set("experimental-features", "git-hashing");
}
};
TEST(GitMode, gitMode_directory) {
Mode m = Mode::Directory;
RawMode r = 0040000;
ASSERT_EQ(static_cast<RawMode>(m), r);
ASSERT_EQ(decodeMode(r), std::optional { m });
};
TEST(GitMode, gitMode_executable) {
Mode m = Mode::Executable;
RawMode r = 0100755;
ASSERT_EQ(static_cast<RawMode>(m), r);
ASSERT_EQ(decodeMode(r), std::optional { m });
};
TEST(GitMode, gitMode_regular) {
Mode m = Mode::Regular;
RawMode r = 0100644;
ASSERT_EQ(static_cast<RawMode>(m), r);
ASSERT_EQ(decodeMode(r), std::optional { m });
};
TEST(GitMode, gitMode_symlink) {
Mode m = Mode::Symlink;
RawMode r = 0120000;
ASSERT_EQ(static_cast<RawMode>(m), r);
ASSERT_EQ(decodeMode(r), std::optional { m });
};
TEST_F(GitTest, blob_read) {
readTest("hello-world-blob.bin", [&](const auto & encoded) {
StringSource in { encoded };
StringSink out;
RegularFileSink out2 { out };
parse(out2, "", in, [](auto &, auto) {}, mockXpSettings);
auto expected = readFile(goldenMaster("hello-world.bin"));
ASSERT_EQ(out.s, expected);
});
}
TEST_F(GitTest, blob_write) {
writeTest("hello-world-blob.bin", [&]() {
auto decoded = readFile(goldenMaster("hello-world.bin"));
StringSink s;
dumpBlobPrefix(decoded.size(), s, mockXpSettings);
s(decoded);
return s.s;
});
}
/**
* This data is for "shallow" tree tests. However, we use "real" hashes
* so that we can check our test data in the corresponding functional
* test (`git-hashing/unit-test-data`).
*/
const static Tree tree = {
{
"Foo",
{
.mode = Mode::Regular,
// hello world with special chars from above
.hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", htSHA1),
},
},
{
"bAr",
{
.mode = Mode::Executable,
// ditto
.hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", htSHA1),
},
},
{
"baZ/",
{
.mode = Mode::Directory,
// Empty directory hash
.hash = Hash::parseAny("4b825dc642cb6eb9a060e54bf8d69288fbee4904", htSHA1),
},
},
};
TEST_F(GitTest, tree_read) {
readTest("tree.bin", [&](const auto & encoded) {
StringSource in { encoded };
NullParseSink out;
Tree got;
parse(out, "", in, [&](auto & name, auto entry) {
auto name2 = name;
if (entry.mode == Mode::Directory)
name2 += '/';
got.insert_or_assign(name2, std::move(entry));
}, mockXpSettings);
ASSERT_EQ(got, tree);
});
}
TEST_F(GitTest, tree_write) {
writeTest("tree.bin", [&]() {
StringSink s;
dumpTree(tree, s, mockXpSettings);
return s.s;
});
}
TEST_F(GitTest, both_roundrip) {
using File = MemorySourceAccessor::File;
MemorySourceAccessor files;
files.root = File::Directory {
.contents {
{
"foo",
File::Regular {
.contents = "hello\n\0\n\tworld!",
},
},
{
"bar",
File::Directory {
.contents = {
{
"baz",
File::Regular {
.executable = true,
.contents = "good day,\n\0\n\tworld!",
},
},
},
},
},
},
};
std::map<Hash, std::string> cas;
std::function<DumpHook> dumpHook;
dumpHook = [&](const CanonPath & path) {
StringSink s;
HashSink hashSink { htSHA1 };
TeeSink s2 { s, hashSink };
auto mode = dump(
files, path, s2, dumpHook,
defaultPathFilter, mockXpSettings);
auto hash = hashSink.finish().first;
cas.insert_or_assign(hash, std::move(s.s));
return TreeEntry {
.mode = mode,
.hash = hash,
};
};
auto root = dumpHook(CanonPath::root);
MemorySourceAccessor files2;
MemorySink sinkFiles2 { files2 };
std::function<void(const Path, const Hash &)> mkSinkHook;
mkSinkHook = [&](const Path prefix, const Hash & hash) {
StringSource in { cas[hash] };
parse(sinkFiles2, prefix, in, [&](const Path & name, const auto & entry) {
mkSinkHook(prefix + "/" + name, entry.hash);
}, mockXpSettings);
};
mkSinkHook("", root.hash);
ASSERT_EQ(files, files2);
}
TEST(GitLsRemote, parseSymrefLineWithReference) {
auto line = "ref: refs/head/main HEAD";
auto res = parseLsRemoteLine(line);
ASSERT_TRUE(res.has_value());
ASSERT_EQ(res->kind, LsRemoteRefLine::Kind::Symbolic);
ASSERT_EQ(res->target, "refs/head/main");
ASSERT_EQ(res->reference, "HEAD");
}
TEST(GitLsRemote, parseSymrefLineWithNoReference) {
auto line = "ref: refs/head/main";
auto res = parseLsRemoteLine(line);
ASSERT_TRUE(res.has_value());
ASSERT_EQ(res->kind, LsRemoteRefLine::Kind::Symbolic);
ASSERT_EQ(res->target, "refs/head/main");
ASSERT_EQ(res->reference, std::nullopt);
}
TEST(GitLsRemote, parseObjectRefLine) {
auto line = "abc123 refs/head/main";
auto res = parseLsRemoteLine(line);
ASSERT_TRUE(res.has_value());
ASSERT_EQ(res->kind, LsRemoteRefLine::Kind::Object);
ASSERT_EQ(res->target, "abc123");
ASSERT_EQ(res->reference, "refs/head/main");
}
}

View file

@ -1,12 +1,16 @@
check: libutil-tests_RUN
check: libutil-tests-exe_RUN
programs += libutil-tests
programs += libutil-tests-exe
libutil-tests-exe_NAME = libnixutil-tests
libutil-tests-exe_DIR := $(d)
libutil-tests-exe_INSTALL_DIR :=
ifeq ($(INSTALL_UNIT_TESTS), yes)
libutil-tests-exe_INSTALL_DIR := $(checkbindir)
else
libutil-tests-exe_INSTALL_DIR :=
endif
libutil-tests-exe_LIBS = libutil-tests
@ -18,7 +22,11 @@ libutil-tests_NAME = libnixutil-tests
libutil-tests_DIR := $(d)
libutil-tests_INSTALL_DIR :=
ifeq ($(INSTALL_UNIT_TESTS), yes)
libutil-tests_INSTALL_DIR := $(checklibdir)
else
libutil-tests_INSTALL_DIR :=
endif
libutil-tests_SOURCES := $(wildcard $(d)/*.cc)
@ -27,3 +35,7 @@ libutil-tests_CXXFLAGS += -I src/libutil
libutil-tests_LIBS = libutil
libutil-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS)
check: unit-test-data/libutil/git/check-data.sh.test
$(eval $(call run-test,unit-test-data/libutil/git/check-data.sh))

View file

@ -172,7 +172,7 @@ static void loadSourceExpr(EvalState & state, const SourcePath & path, Value & v
directory). */
else if (st.type == InputAccessor::tDirectory) {
auto attrs = state.buildBindings(maxAttrs);
attrs.alloc("_combineChannels").mkList(0);
state.mkList(attrs.alloc("_combineChannels"), 0);
StringSet seen;
getAllExprs(state, path, seen, attrs);
v.mkAttrs(attrs);

View file

@ -1,28 +0,0 @@
R""(
# Description
Copy the regular file *path* to the Nix store, and print the resulting
store path on standard output.
> **Warning**
>
> The resulting store path is not registered as a garbage
> collector root, so it could be deleted before you have a
> chance to register it.
# Examples
Add a regular file to the store:
```console
# echo foo > bar
# nix store add-file ./bar
/nix/store/cbv2s4bsvzjri77s2gb8g8bpcb6dpa8w-bar
# cat /nix/store/cbv2s4bsvzjri77s2gb8g8bpcb6dpa8w-bar
foo
```
)""

View file

@ -5,11 +5,22 @@
using namespace nix;
static FileIngestionMethod parseIngestionMethod(std::string_view input)
{
if (input == "flat") {
return FileIngestionMethod::Flat;
} else if (input == "nar") {
return FileIngestionMethod::Recursive;
} else {
throw UsageError("Unknown hash mode '%s', expect `flat` or `nar`");
}
}
struct CmdAddToStore : MixDryRun, StoreCommand
{
Path path;
std::optional<std::string> namePart;
FileIngestionMethod ingestionMethod;
FileIngestionMethod ingestionMethod = FileIngestionMethod::Recursive;
CmdAddToStore()
{
@ -23,6 +34,23 @@ struct CmdAddToStore : MixDryRun, StoreCommand
.labels = {"name"},
.handler = {&namePart},
});
addFlag({
.longName = "mode",
.shortName = 'n',
.description = R"(
How to compute the hash of the input.
One of:
- `nar` (the default): Serialises the input as an archive (following the [_Nix Archive Format_](https://edolstra.github.io/pubs/phd-thesis.pdf#page=101)) and passes that to the hash function.
- `flat`: Assumes that the input is a single file and directly passes it to the hash function;
)",
.labels = {"hash-mode"},
.handler = {[this](std::string s) {
this->ingestionMethod = parseIngestionMethod(s);
}},
});
}
void run(ref<Store> store) override
@ -62,6 +90,22 @@ struct CmdAddToStore : MixDryRun, StoreCommand
}
};
struct CmdAdd : CmdAddToStore
{
std::string description() override
{
return "Add a file or directory to the Nix store";
}
std::string doc() override
{
return
#include "add.md"
;
}
};
struct CmdAddFile : CmdAddToStore
{
CmdAddFile()
@ -71,36 +115,18 @@ struct CmdAddFile : CmdAddToStore
std::string description() override
{
return "add a regular file to the Nix store";
}
std::string doc() override
{
return
#include "add-file.md"
;
return "Deprecated. Use [`nix store add --mode flat`](@docroot@/command-ref/new-cli/nix3-store-add.md) instead.";
}
};
struct CmdAddPath : CmdAddToStore
{
CmdAddPath()
{
ingestionMethod = FileIngestionMethod::Recursive;
}
std::string description() override
{
return "add a path to the Nix store";
}
std::string doc() override
{
return
#include "add-path.md"
;
return "Deprecated alias to [`nix store add`](@docroot@/command-ref/new-cli/nix3-store-add.md).";
}
};
static auto rCmdAddFile = registerCommand2<CmdAddFile>({"store", "add-file"});
static auto rCmdAddPath = registerCommand2<CmdAddPath>({"store", "add-path"});
static auto rCmdAdd = registerCommand2<CmdAdd>({"store", "add"});

View file

@ -19,7 +19,7 @@ Add a directory to the store:
# mkdir dir
# echo foo > dir/bar
# nix store add-path ./dir
# nix store add ./dir
/nix/store/6pmjx56pm94n66n4qw1nff0y1crm8nqg-dir
# cat /nix/store/6pmjx56pm94n66n4qw1nff0y1crm8nqg-dir/bar

View file

@ -1,6 +1,7 @@
#include "command.hh"
#include "store-api.hh"
#include "nar-accessor.hh"
#include "progress-bar.hh"
using namespace nix;
@ -13,6 +14,7 @@ struct MixCat : virtual Args
auto st = accessor->lstat(CanonPath(path));
if (st.type != SourceAccessor::Type::tRegular)
throw Error("path '%1%' is not a regular file", path);
stopProgressBar();
writeFull(STDOUT_FILENO, accessor->readFile(CanonPath(path)));
}
};

View file

@ -61,4 +61,12 @@ struct CmdDumpPath2 : Command
}
};
static auto rDumpPath2 = registerCommand2<CmdDumpPath2>({"nar", "dump-path"});
struct CmdNarDumpPath : CmdDumpPath2 {
void run() override {
warn("'nix nar dump-path' is a deprecated alias for 'nix nar pack'");
CmdDumpPath2::run();
}
};
static auto rCmdNarPack = registerCommand2<CmdDumpPath2>({"nar", "pack"});
static auto rCmdNarDumpPath = registerCommand2<CmdNarDumpPath>({"nar", "dump-path"});

View file

@ -5,7 +5,7 @@ R""(
* To serialise directory `foo` as a NAR:
```console
# nix nar dump-path ./foo > foo.nar
# nix nar pack ./foo > foo.nar
```
# Description

View file

@ -3,6 +3,7 @@
#include "shared.hh"
#include "store-api.hh"
#include "thread-pool.hh"
#include "progress-bar.hh"
#include <atomic>
@ -174,6 +175,7 @@ struct CmdKeyGenerateSecret : Command
if (!keyName)
throw UsageError("required argument '--key-name' is missing");
stopProgressBar();
writeFull(STDOUT_FILENO, SecretKey::generate(*keyName).to_string());
}
};
@ -195,6 +197,7 @@ struct CmdKeyConvertSecretToPublic : Command
void run() override
{
SecretKey secretKey(drainFD(STDIN_FILENO));
stopProgressBar();
writeFull(STDOUT_FILENO, secretKey.toPublicKey().to_string());
}
};

View file

@ -14,7 +14,6 @@ using namespace nix;
struct CmdUpgradeNix : MixDryRun, StoreCommand
{
Path profileDir;
std::string storePathsUrl = "https://github.com/NixOS/nixpkgs/raw/master/nixos/modules/installer/tools/nix-fallback-paths.nix";
CmdUpgradeNix()
{
@ -30,7 +29,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
.longName = "nix-store-paths-url",
.description = "The URL of the file that contains the store paths of the latest Nix release.",
.labels = {"url"},
.handler = {&storePathsUrl}
.handler = {&(std::string&) settings.upgradeNixStorePathUrl}
});
}
@ -44,7 +43,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
std::string description() override
{
return "upgrade Nix to the stable version declared in Nixpkgs";
return "upgrade Nix to the latest stable version";
}
std::string doc() override
@ -145,7 +144,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
Activity act(*logger, lvlInfo, actUnknown, "querying latest Nix version");
// FIXME: use nixos.org?
auto req = FileTransferRequest(storePathsUrl);
auto req = FileTransferRequest((std::string&) settings.upgradeNixStorePathUrl);
auto res = getFileTransfer()->download(req);
auto state = std::make_unique<EvalState>(SearchPath{}, store);

View file

@ -16,8 +16,10 @@ R""(
# Description
This command upgrades Nix to the stable version declared in Nixpkgs.
This stable version is defined in [nix-fallback-paths.nix](https://github.com/NixOS/nixpkgs/raw/master/nixos/modules/installer/tools/nix-fallback-paths.nix)
This command upgrades Nix to the stable version.
By default, the latest stable version is defined by Nixpkgs, in
[nix-fallback-paths.nix](https://github.com/NixOS/nixpkgs/raw/master/nixos/modules/installer/tools/nix-fallback-paths.nix)
and updated manually. It may not always be the latest tagged release.
By default, it locates the directory containing the `nix` binary in the `$PATH`