1
0
Fork 0
mirror of https://github.com/NixOS/nix synced 2025-07-07 10:11:47 +02:00

Merge branch 'master' into overlayfs-store

This commit is contained in:
John Ericson 2024-03-18 16:41:29 -04:00
commit 18945e3f44
131 changed files with 1958 additions and 4473 deletions

View file

@ -20,7 +20,7 @@ MixEvalArgs::MixEvalArgs()
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
.category = category,
.labels = {"name", "expr"},
.handler = {[&](std::string name, std::string expr) { autoArgs[name] = 'E' + expr; }}
.handler = {[&](std::string name, std::string expr) { autoArgs.insert_or_assign(name, AutoArg{AutoArgExpr(expr)}); }}
});
addFlag({
@ -28,7 +28,24 @@ MixEvalArgs::MixEvalArgs()
.description = "Pass the string *string* as the argument *name* to Nix functions.",
.category = category,
.labels = {"name", "string"},
.handler = {[&](std::string name, std::string s) { autoArgs[name] = 'S' + s; }},
.handler = {[&](std::string name, std::string s) { autoArgs.insert_or_assign(name, AutoArg{AutoArgString(s)}); }},
});
addFlag({
.longName = "arg-from-file",
.description = "Pass the contents of file *path* as the argument *name* to Nix functions.",
.category = category,
.labels = {"name", "path"},
.handler = {[&](std::string name, std::string path) { autoArgs.insert_or_assign(name, AutoArg{AutoArgFile(path)}); }},
.completer = completePath
});
addFlag({
.longName = "arg-from-stdin",
.description = "Pass the contents of stdin as the argument *name* to Nix functions.",
.category = category,
.labels = {"name"},
.handler = {[&](std::string name) { autoArgs.insert_or_assign(name, AutoArg{AutoArgStdin{}}); }},
});
addFlag({
@ -154,13 +171,23 @@ MixEvalArgs::MixEvalArgs()
Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
{
auto res = state.buildBindings(autoArgs.size());
for (auto & i : autoArgs) {
for (auto & [name, arg] : autoArgs) {
auto v = state.allocValue();
if (i.second[0] == 'E')
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(".")));
else
v->mkString(((std::string_view) i.second).substr(1));
res.insert(state.symbols.create(i.first), v);
std::visit(overloaded {
[&](const AutoArgExpr & arg) {
state.mkThunk_(*v, state.parseExprFromString(arg.expr, state.rootPath(".")));
},
[&](const AutoArgString & arg) {
v->mkString(arg.s);
},
[&](const AutoArgFile & arg) {
v->mkString(readFile(arg.path));
},
[&](const AutoArgStdin & arg) {
v->mkString(readFile(STDIN_FILENO));
}
}, arg);
res.insert(state.symbols.create(name), v);
}
return res.finish();
}

View file

@ -6,6 +6,8 @@
#include "common-args.hh"
#include "search-path.hh"
#include <filesystem>
namespace nix {
class Store;
@ -26,7 +28,14 @@ struct MixEvalArgs : virtual Args, virtual MixRepair
std::optional<std::string> evalStoreUrl;
private:
std::map<std::string, std::string> autoArgs;
struct AutoArgExpr { std::string expr; };
struct AutoArgString { std::string s; };
struct AutoArgFile { std::filesystem::path path; };
struct AutoArgStdin { };
using AutoArg = std::variant<AutoArgExpr, AutoArgString, AutoArgFile, AutoArgStdin>;
std::map<std::string, AutoArg> autoArgs;
};
SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr);

View file

@ -21,6 +21,7 @@
#include "url.hh"
#include "registry.hh"
#include "build-result.hh"
#include "fs-input-accessor.hh"
#include <regex>
#include <queue>
@ -146,7 +147,7 @@ MixFlakeOptions::MixFlakeOptions()
.category = category,
.labels = {"flake-lock-path"},
.handler = {[&](std::string lockFilePath) {
lockFlags.referenceLockFilePath = lockFilePath;
lockFlags.referenceLockFilePath = getUnfilteredRootPath(CanonPath(absPath(lockFilePath)));
}},
.completer = completePath
});
@ -442,10 +443,10 @@ ref<eval_cache::EvalCache> openEvalCache(
EvalState & state,
std::shared_ptr<flake::LockedFlake> lockedFlake)
{
auto fingerprint = lockedFlake->getFingerprint();
auto fingerprint = lockedFlake->getFingerprint(state.store);
return make_ref<nix::eval_cache::EvalCache>(
evalSettings.useEvalCache && evalSettings.pureEval
? std::optional { std::cref(fingerprint) }
? fingerprint
: std::nullopt,
state,
[&state, lockedFlake]()

View file

@ -123,7 +123,8 @@ struct NixRepl
.force = true,
.derivationPaths = true,
.maxDepth = maxDepth,
.prettyIndent = 2
.prettyIndent = 2,
.errors = ErrorPrintBehavior::ThrowTopLevel,
});
}
};
@ -336,13 +337,7 @@ ReplExitStatus NixRepl::mainLoop()
printMsg(lvlError, e.msg());
}
} catch (EvalError & e) {
// in debugger mode, an EvalError should trigger another repl session.
// when that session returns the exception will land here. No need to show it again;
// show the error for this repl session instead.
if (state->debugRepl && !state->debugTraces.empty())
showDebugTrace(std::cout, state->positions, state->debugTraces.front());
else
printMsg(lvlError, e.msg());
printMsg(lvlError, e.msg());
} catch (Error & e) {
printMsg(lvlError, e.msg());
} catch (Interrupted & e) {
@ -548,6 +543,7 @@ ProcessLineResult NixRepl::processLine(std::string line)
<< " :l, :load <path> Load Nix expression and add it to scope\n"
<< " :lf, :load-flake <ref> Load Nix flake and add it to scope\n"
<< " :p, :print <expr> Evaluate and print expression recursively\n"
<< " Strings are printed directly, without escaping.\n"
<< " :q, :quit Exit nix-repl\n"
<< " :r, :reload Reload all files\n"
<< " :sh <expr> Build dependencies of derivation, then start\n"
@ -755,7 +751,11 @@ ProcessLineResult NixRepl::processLine(std::string line)
else if (command == ":p" || command == ":print") {
Value v;
evalString(arg, v);
printValue(std::cout, v);
if (v.type() == nString) {
std::cout << v.string_view();
} else {
printValue(std::cout, v);
}
std::cout << std::endl;
}

View file

@ -21,11 +21,24 @@ struct EvalSettings : Config
Setting<Strings> nixPath{
this, getDefaultNixPath(), "nix-path",
R"(
List of directories to be searched for `<...>` file references
List of search paths to use for [lookup path](@docroot@/language/constructs/lookup-path.md) resolution.
This setting determines the value of
[`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath) and can be used with [`builtins.findFile`](@docroot@/language/builtin-constants.md#builtins-findFile).
In particular, outside of [pure evaluation mode](#conf-pure-eval), this determines the value of
[`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath).
)"};
The default value is
```
$HOME/.nix-defexpr/channels
nixpkgs=$NIX_STATE_DIR/profiles/per-user/root/channels/nixpkgs
$NIX_STATE_DIR/profiles/per-user/root/channels
```
It can be overridden with the [`NIX_PATH` environment variable](@docroot@/command-ref/env-common.md#env-NIX_PATH) or the [`-I` command line option](@docroot@/command-ref/opt-common.md#opt-I).
> **Note**
>
> If [pure evaluation](#conf-pure-eval) is enabled, `nixPath` evaluates to the empty list `[ ]`.
)", {}, false};
Setting<std::string> currentSystem{
this, "", "eval-system",
@ -55,8 +68,6 @@ struct EvalSettings : Config
[`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath),
or to URIs outside of
[`allowed-uris`](@docroot@/command-ref/conf-file.md#conf-allowed-uris).
Also the default value for [`nix-path`](#conf-nix-path) is ignored, such that only explicitly set search path entries are taken into account.
)"};
Setting<bool> pureEval{this, false, "pure-eval",

View file

@ -762,10 +762,24 @@ std::unique_ptr<ValMap> mapStaticEnvBindings(const SymbolTable & st, const Stati
return vm;
}
/**
* Sets `inDebugger` to true on construction and false on destruction.
*/
class DebuggerGuard {
bool & inDebugger;
public:
DebuggerGuard(bool & inDebugger) : inDebugger(inDebugger) {
inDebugger = true;
}
~DebuggerGuard() {
inDebugger = false;
}
};
void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr & expr)
{
// double check we've got the debugRepl function pointer.
if (!debugRepl)
// Make sure we have a debugger to run and we're not already in a debugger.
if (!debugRepl || inDebugger)
return;
auto dts =
@ -792,6 +806,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
auto se = getStaticEnv(expr);
if (se) {
auto vm = mapStaticEnvBindings(symbols, *se.get(), env);
DebuggerGuard _guard(inDebugger);
auto exitStatus = (debugRepl)(ref<EvalState>(shared_from_this()), *vm);
switch (exitStatus) {
case ReplExitStatus::QuitAll:
@ -934,12 +949,11 @@ void EvalState::mkThunk_(Value & v, Expr * expr)
void EvalState::mkPos(Value & v, PosIdx p)
{
auto pos = positions[p];
if (auto path = std::get_if<SourcePath>(&pos.origin)) {
auto origin = positions.originOf(p);
if (auto path = std::get_if<SourcePath>(&origin)) {
auto attrs = buildBindings(3);
attrs.alloc(sFile).mkString(path->path.abs());
attrs.alloc(sLine).mkInt(pos.line);
attrs.alloc(sColumn).mkInt(pos.column);
makePositionThunks(*this, p, attrs.alloc(sLine), attrs.alloc(sColumn));
v.mkAttrs(attrs);
} else
v.mkNull();
@ -2762,9 +2776,12 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<Sta
Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv)
{
auto s = make_ref<std::string>(std::move(s_));
s->append("\0\0", 2);
return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv);
// NOTE this method (and parseStdin) must take care to *fully copy* their input
// into their respective Pos::Origin until the parser stops overwriting its input
// data.
auto s = make_ref<std::string>(s_);
s_.append("\0\0", 2);
return parse(s_.data(), s_.size(), Pos::String{.source = s}, basePath, staticEnv);
}
@ -2776,12 +2793,15 @@ Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath
Expr * EvalState::parseStdin()
{
// NOTE this method (and parseExprFromString) must take care to *fully copy* their
// input into their respective Pos::Origin until the parser stops overwriting its
// input data.
//Activity act(*logger, lvlTalkative, "parsing standard input");
auto buffer = drainFD(0);
// drainFD should have left some extra space for terminators
buffer.append("\0\0", 2);
auto s = make_ref<std::string>(std::move(buffer));
return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath("."), staticBaseEnv);
auto s = make_ref<std::string>(buffer);
return parse(buffer.data(), buffer.size(), Pos::Stdin{.source = s}, rootPath("."), staticBaseEnv);
}

View file

@ -153,6 +153,7 @@ struct DebugTrace {
bool isError;
};
class EvalState : public std::enable_shared_from_this<EvalState>
{
public:
@ -222,6 +223,7 @@ public:
*/
ReplExitStatus (* debugRepl)(ref<EvalState> es, const ValMap & extraEnv);
bool debugStop;
bool inDebugger = false;
int trylevel;
std::list<DebugTrace> debugTraces;
std::map<const Expr*, const std::shared_ptr<const StaticEnv>> exprEnvs;

View file

@ -139,7 +139,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
attrs.emplace(state.symbols[attr.name], Explicit<bool> { attr.value->boolean });
break;
case nInt:
attrs.emplace(state.symbols[attr.name], (long unsigned int)attr.value->integer);
attrs.emplace(state.symbols[attr.name], (long unsigned int) attr.value->integer);
break;
default:
if (attr.name == state.symbols.create("publicKeys")) {
@ -202,43 +202,27 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
return inputs;
}
static Flake getFlake(
static Flake readFlake(
EvalState & state,
const FlakeRef & originalRef,
bool allowLookup,
FlakeCache & flakeCache,
InputPath lockRootPath)
const FlakeRef & resolvedRef,
const FlakeRef & lockedRef,
const SourcePath & rootDir,
const InputPath & lockRootPath)
{
auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, originalRef, allowLookup, flakeCache);
auto flakePath = rootDir / CanonPath(resolvedRef.subdir) / "flake.nix";
// We need to guard against symlink attacks, but before we start doing
// filesystem operations we should make sure there's a flake.nix in the
// first place.
auto unsafeFlakeDir = state.store->toRealPath(storePath) + "/" + lockedRef.subdir;
auto unsafeFlakeFile = unsafeFlakeDir + "/flake.nix";
if (!pathExists(unsafeFlakeFile))
throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir);
// Guard against symlink attacks.
auto flakeDir = canonPath(unsafeFlakeDir, true);
auto flakeFile = canonPath(flakeDir + "/flake.nix", true);
if (!isInDir(flakeFile, state.store->toRealPath(storePath)))
throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
lockedRef, state.store->printStorePath(storePath));
// NOTE evalFile forces vInfo to be an attrset because mustBeTrivial is true.
Value vInfo;
state.evalFile(flakePath, vInfo, true);
Flake flake {
.originalRef = originalRef,
.resolvedRef = resolvedRef,
.lockedRef = lockedRef,
.storePath = storePath,
.path = flakePath,
};
Value vInfo;
state.evalFile(state.rootPath(CanonPath(flakeFile)), vInfo, true); // FIXME: symlink attack
expectType(state, nAttrs, vInfo, state.positions.add({state.rootPath(CanonPath(flakeFile))}, 1, 1));
if (auto description = vInfo.attrs->get(state.sDescription)) {
expectType(state, nString, *description->value, description->pos);
flake.description = description->value->c_str();
@ -247,7 +231,7 @@ static Flake getFlake(
auto sInputs = state.symbols.create("inputs");
if (auto inputs = vInfo.attrs->get(sInputs))
flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, flakeDir, lockRootPath);
flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, flakePath.parent().path.abs(), lockRootPath); // FIXME
auto sOutputs = state.symbols.create("outputs");
@ -264,7 +248,7 @@ static Flake getFlake(
}
} else
throw Error("flake '%s' lacks attribute 'outputs'", lockedRef);
throw Error("flake '%s' lacks attribute 'outputs'", resolvedRef);
auto sNixConfig = state.symbols.create("nixConfig");
@ -281,7 +265,7 @@ static Flake getFlake(
NixStringContext emptyContext = {};
flake.config.settings.emplace(
state.symbols[setting.name],
state.coerceToString(setting.pos, *setting.value, emptyContext, "", false, true, true) .toOwned());
state.coerceToString(setting.pos, *setting.value, emptyContext, "", false, true, true).toOwned());
}
else if (setting.value->type() == nInt)
flake.config.settings.emplace(
@ -313,12 +297,25 @@ static Flake getFlake(
attr.name != sOutputs &&
attr.name != sNixConfig)
throw Error("flake '%s' has an unsupported attribute '%s', at %s",
lockedRef, state.symbols[attr.name], state.positions[attr.pos]);
resolvedRef, state.symbols[attr.name], state.positions[attr.pos]);
}
return flake;
}
static Flake getFlake(
EvalState & state,
const FlakeRef & originalRef,
bool allowLookup,
FlakeCache & flakeCache,
InputPath lockRootPath)
{
auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, originalRef, allowLookup, flakeCache);
return readFlake(state, originalRef, resolvedRef, lockedRef, state.rootPath(state.store->toRealPath(storePath)), lockRootPath);
}
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup, FlakeCache & flakeCache)
{
return getFlake(state, originalRef, allowLookup, flakeCache, {});
@ -330,6 +327,13 @@ Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup
return getFlake(state, originalRef, allowLookup, flakeCache);
}
static LockFile readLockFile(const SourcePath & lockFilePath)
{
return lockFilePath.pathExists()
? LockFile(lockFilePath.readFile(), fmt("%s", lockFilePath))
: LockFile();
}
/* Compute an in-memory lock file for the specified top-level flake,
and optionally write it to file, if the flake is writable. */
LockedFlake lockFlake(
@ -355,17 +359,16 @@ LockedFlake lockFlake(
throw Error("reference lock file was provided, but the `allow-dirty` setting is set to false");
}
// FIXME: symlink attack
auto oldLockFile = LockFile::read(
auto oldLockFile = readLockFile(
lockFlags.referenceLockFilePath.value_or(
state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir + "/flake.lock"));
flake.lockFilePath()));
debug("old lock file: %s", oldLockFile);
std::map<InputPath, FlakeInput> overrides;
std::set<InputPath> explicitCliOverrides;
std::set<InputPath> overridesUsed, updatesUsed;
std::map<ref<Node>, StorePath> nodePaths;
std::map<ref<Node>, SourcePath> nodePaths;
for (auto & i : lockFlags.inputOverrides) {
overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
@ -538,7 +541,7 @@ LockedFlake lockFlake(
if (mustRefetch) {
auto inputFlake = getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath);
nodePaths.emplace(childNode, inputFlake.storePath);
nodePaths.emplace(childNode, inputFlake.path.parent());
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, lockRootPath, parentPath, false);
} else {
computeLocks(fakeInputs, childNode, inputPath, oldLock, lockRootPath, parentPath, true);
@ -587,13 +590,12 @@ LockedFlake lockFlake(
flake. Also, unless we already have this flake
in the top-level lock file, use this flake's
own lock file. */
nodePaths.emplace(childNode, inputFlake.storePath);
nodePaths.emplace(childNode, inputFlake.path.parent());
computeLocks(
inputFlake.inputs, childNode, inputPath,
oldLock
? std::dynamic_pointer_cast<const Node>(oldLock)
: LockFile::read(
state.store->toRealPath(inputFlake.storePath) + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(),
: readLockFile(inputFlake.lockFilePath()).root.get_ptr(),
oldLock ? lockRootPath : inputPath,
localPath,
false);
@ -605,7 +607,7 @@ LockedFlake lockFlake(
auto childNode = make_ref<LockedNode>(lockedRef, ref, false);
nodePaths.emplace(childNode, storePath);
nodePaths.emplace(childNode, state.rootPath(state.store->toRealPath(storePath)));
node->inputs.insert_or_assign(id, childNode);
}
@ -619,9 +621,9 @@ LockedFlake lockFlake(
};
// Bring in the current ref for relative path resolution if we have it
auto parentPath = canonPath(state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir, true);
auto parentPath = flake.path.parent().path.abs();
nodePaths.emplace(newLockFile.root, flake.storePath);
nodePaths.emplace(newLockFile.root, flake.path.parent());
computeLocks(
flake.inputs,
@ -746,13 +748,15 @@ void callFlake(EvalState & state,
auto overrides = state.buildBindings(lockedFlake.nodePaths.size());
for (auto & [node, storePath] : lockedFlake.nodePaths) {
for (auto & [node, sourcePath] : lockedFlake.nodePaths) {
auto override = state.buildBindings(2);
auto & vSourceInfo = override.alloc(state.symbols.create("sourceInfo"));
auto lockedNode = node.dynamic_pointer_cast<const LockedNode>();
auto [storePath, subdir] = state.store->toStorePath(sourcePath.path.abs());
emitTreeAttrs(
state,
storePath,
@ -766,7 +770,7 @@ void callFlake(EvalState & state,
override
.alloc(state.symbols.create("dir"))
.mkString(lockedNode ? lockedNode->lockedRef.subdir : lockedFlake.flake.lockedRef.subdir);
.mkString(CanonPath(subdir).rel());
overrides.alloc(state.symbols.create(key->second)).mkAttrs(override);
}
@ -921,18 +925,17 @@ static RegisterPrimOp r4({
}
Fingerprint LockedFlake::getFingerprint() const
std::optional<Fingerprint> LockedFlake::getFingerprint(ref<Store> store) const
{
if (lockFile.isUnlocked()) return std::nullopt;
auto fingerprint = flake.lockedRef.input.getFingerprint(store);
if (!fingerprint) return std::nullopt;
// FIXME: as an optimization, if the flake contains a lock file
// and we haven't changed it, then it's sufficient to use
// flake.sourceInfo.storePath for the fingerprint.
return hashString(HashAlgorithm::SHA256,
fmt("%s;%s;%d;%d;%s",
flake.storePath.to_string(),
flake.lockedRef.subdir,
flake.lockedRef.input.getRevCount().value_or(0),
flake.lockedRef.input.getLastModified().value_or(0),
lockFile));
return hashString(HashAlgorithm::SHA256, fmt("%s;%s;%s", *fingerprint, flake.lockedRef.subdir, lockFile));
}
Flake::~Flake() { }

View file

@ -77,18 +77,27 @@ struct Flake
* the specific local store result of invoking the fetcher
*/
FlakeRef lockedRef;
/**
* The path of `flake.nix`.
*/
SourcePath path;
/**
* pretend that 'lockedRef' is dirty
*/
bool forceDirty = false;
std::optional<std::string> description;
StorePath storePath;
FlakeInputs inputs;
/**
* 'nixConfig' attribute
*/
ConfigFile config;
~Flake();
SourcePath lockFilePath()
{
return path.parent() / "flake.lock";
}
};
Flake getFlake(EvalState & state, const FlakeRef & flakeRef, bool allowLookup);
@ -104,13 +113,13 @@ struct LockedFlake
LockFile lockFile;
/**
* Store paths of nodes that have been fetched in
* Source tree accessors for nodes that have been fetched in
* lockFlake(); in particular, the root node and the overriden
* inputs.
*/
std::map<ref<Node>, StorePath> nodePaths;
std::map<ref<Node>, SourcePath> nodePaths;
Fingerprint getFingerprint() const;
std::optional<Fingerprint> getFingerprint(ref<Store> store) const;
};
struct LockFlags
@ -165,7 +174,7 @@ struct LockFlags
/**
* The path to a lock file to read instead of the `flake.lock` file in the top-level flake
*/
std::optional<std::string> referenceLockFilePath;
std::optional<SourcePath> referenceLockFilePath;
/**
* The path to a lock file to write to instead of the `flake.lock` file in the top-level flake

View file

@ -102,6 +102,19 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
if (isFlake) {
if (!S_ISDIR(lstat(path).st_mode)) {
if (baseNameOf(path) == "flake.nix") {
// Be gentle with people who accidentally write `/foo/bar/flake.nix` instead of `/foo/bar`
warn(
"Path '%s' should point at the directory containing the 'flake.nix' file, not the file itself. "
"Pretending that you meant '%s'"
, path, dirOf(path));
path = dirOf(path);
} else {
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
}
}
if (!allowMissing && !pathExists(path + "/flake.nix")){
notice("path '%s' does not contain a 'flake.nix', searching up",path);
@ -124,9 +137,6 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
throw BadURL("could not find a flake.nix file");
}
if (!S_ISDIR(lstat(path).st_mode))
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
if (!allowMissing && !pathExists(path + "/flake.nix"))
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
@ -274,7 +284,7 @@ FlakeRef FlakeRef::fromAttrs(const fetchers::Attrs & attrs)
std::pair<StorePath, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
{
auto [storePath, lockedInput] = input.fetch(store);
auto [storePath, lockedInput] = input.fetchToStore(store);
return {std::move(storePath), FlakeRef(std::move(lockedInput), subdir)};
}

View file

@ -84,8 +84,10 @@ std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
return doFind(root, path, visited);
}
LockFile::LockFile(const nlohmann::json & json, const Path & path)
LockFile::LockFile(std::string_view contents, std::string_view path)
{
auto json = nlohmann::json::parse(contents);
auto version = json.value("version", 0);
if (version < 5 || version > 7)
throw Error("lock file '%s' has unsupported version %d", path, version);
@ -203,12 +205,6 @@ std::pair<std::string, LockFile::KeyMap> LockFile::to_string() const
return {json.dump(2), std::move(nodeKeys)};
}
LockFile LockFile::read(const Path & path)
{
if (!pathExists(path)) return LockFile();
return LockFile(nlohmann::json::parse(readFile(path)), path);
}
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile)
{
stream << lockFile.toJSON().first.dump(2);

View file

@ -55,7 +55,7 @@ struct LockFile
ref<Node> root = make_ref<Node>();
LockFile() {};
LockFile(const nlohmann::json & json, const Path & path);
LockFile(std::string_view contents, std::string_view path);
typedef std::map<ref<const Node>, std::string> KeyMap;
@ -63,8 +63,6 @@ struct LockFile
std::pair<std::string, KeyMap> to_string() const;
static LockFile read(const Path & path);
/**
* Check whether this lock file has any unlocked inputs. If so,
* return one.

View file

@ -33,33 +33,16 @@ namespace nix {
static void initLoc(YYLTYPE * loc)
{
loc->first_line = loc->last_line = 1;
loc->first_column = loc->last_column = 1;
loc->first_line = loc->last_line = 0;
loc->first_column = loc->last_column = 0;
}
static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
{
loc->stash();
loc->first_line = loc->last_line;
loc->first_column = loc->last_column;
for (size_t i = 0; i < len; i++) {
switch (*s++) {
case '\r':
if (*s == '\n') { /* cr/lf */
i++;
s++;
}
/* fall through */
case '\n':
++loc->last_line;
loc->last_column = 1;
break;
default:
++loc->last_column;
}
}
loc->last_column += len;
}

View file

@ -149,7 +149,10 @@ void ExprLambda::show(const SymbolTable & symbols, std::ostream & str) const
if (hasFormals()) {
str << "{ ";
bool first = true;
for (auto & i : formals->formals) {
// the natural Symbol ordering is by creation time, which can lead to the
// same expression being printed in two different ways depending on its
// context. always use lexicographic ordering to avoid this.
for (auto & i : formals->lexicographicOrder(symbols)) {
if (first) first = false; else str << ", ";
str << symbols[i.name];
if (i.def) {
@ -580,6 +583,39 @@ std::string ExprLambda::showNamePos(const EvalState & state) const
/* Position table. */
Pos PosTable::operator[](PosIdx p) const
{
auto origin = resolve(p);
if (!origin)
return {};
const auto offset = origin->offsetOf(p);
Pos result{0, 0, origin->origin};
auto lines = this->lines.lock();
auto linesForInput = (*lines)[origin->offset];
if (linesForInput.empty()) {
auto source = result.getSource().value_or("");
const char * begin = source.data();
for (Pos::LinesIterator it(source), end; it != end; it++)
linesForInput.push_back(it->data() - begin);
if (linesForInput.empty())
linesForInput.push_back(0);
}
// as above: the first line starts at byte 0 and is always present
auto lineStartOffset = std::prev(
std::upper_bound(linesForInput.begin(), linesForInput.end(), offset));
result.line = 1 + (lineStartOffset - linesForInput.begin());
result.column = 1 + (offset - *lineStartOffset);
return result;
}
/* Symbol table. */
size_t SymbolTable::totalSize() const

View file

@ -7,7 +7,6 @@
#include "value.hh"
#include "symbol-table.hh"
#include "error.hh"
#include "chunked-vector.hh"
#include "position.hh"
#include "eval-error.hh"
#include "pos-idx.hh"

View file

@ -24,20 +24,15 @@ struct ParserLocation
int last_line, last_column;
// backup to recover from yyless(0)
int stashed_first_line, stashed_first_column;
int stashed_last_line, stashed_last_column;
int stashed_first_column, stashed_last_column;
void stash() {
stashed_first_line = first_line;
stashed_first_column = first_column;
stashed_last_line = last_line;
stashed_last_column = last_column;
}
void unstash() {
first_line = stashed_first_line;
first_column = stashed_first_column;
last_line = stashed_last_line;
last_column = stashed_last_column;
}
};
@ -276,7 +271,7 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos,
inline PosIdx ParserState::at(const ParserLocation & loc)
{
return positions.add(origin, loc.first_line, loc.first_column);
return positions.add(origin, loc.first_column);
}
}

View file

@ -64,6 +64,10 @@ using namespace nix;
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error)
{
if (std::string_view(error).starts_with("syntax error, unexpected end of file")) {
loc->first_column = loc->last_column;
loc->first_line = loc->last_line;
}
throw ParseError({
.msg = HintFmt(error),
.pos = state->positions[state->at(*loc)]
@ -87,6 +91,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char *
nix::StringToken uri;
nix::StringToken str;
std::vector<nix::AttrName> * attrNames;
std::vector<std::pair<nix::AttrName, nix::PosIdx>> * inheritAttrs;
std::vector<std::pair<nix::PosIdx, nix::Expr *>> * string_parts;
std::vector<std::pair<nix::PosIdx, std::variant<nix::Expr *, nix::StringToken>>> * ind_string_parts;
}
@ -97,7 +102,8 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char *
%type <attrs> binds
%type <formals> formals
%type <formal> formal
%type <attrNames> attrs attrpath
%type <attrNames> attrpath
%type <inheritAttrs> attrs
%type <string_parts> string_parts_interpolated
%type <ind_string_parts> ind_string_parts
%type <e> path_start string_parts string_attr
@ -309,13 +315,12 @@ binds
: binds attrpath '=' expr ';' { $$ = $1; state->addAttr($$, std::move(*$2), $4, state->at(@2)); delete $2; }
| binds INHERIT attrs ';'
{ $$ = $1;
for (auto & i : *$3) {
for (auto & [i, iPos] : *$3) {
if ($$->attrs.find(i.symbol) != $$->attrs.end())
state->dupAttr(i.symbol, state->at(@3), $$->attrs[i.symbol].pos);
auto pos = state->at(@3);
state->dupAttr(i.symbol, iPos, $$->attrs[i.symbol].pos);
$$->attrs.emplace(
i.symbol,
ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, ExprAttrs::AttrDef::Kind::Inherited));
ExprAttrs::AttrDef(new ExprVar(iPos, i.symbol), iPos, ExprAttrs::AttrDef::Kind::Inherited));
}
delete $3;
}
@ -325,14 +330,14 @@ binds
$$->inheritFromExprs = std::make_unique<std::vector<Expr *>>();
$$->inheritFromExprs->push_back($4);
auto from = new nix::ExprInheritFrom(state->at(@4), $$->inheritFromExprs->size() - 1);
for (auto & i : *$6) {
for (auto & [i, iPos] : *$6) {
if ($$->attrs.find(i.symbol) != $$->attrs.end())
state->dupAttr(i.symbol, state->at(@6), $$->attrs[i.symbol].pos);
state->dupAttr(i.symbol, iPos, $$->attrs[i.symbol].pos);
$$->attrs.emplace(
i.symbol,
ExprAttrs::AttrDef(
new ExprSelect(CUR_POS, from, i.symbol),
state->at(@6),
new ExprSelect(iPos, from, i.symbol),
iPos,
ExprAttrs::AttrDef::Kind::InheritedFrom));
}
delete $6;
@ -341,12 +346,12 @@ binds
;
attrs
: attrs attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($2))); }
: attrs attr { $$ = $1; $1->emplace_back(AttrName(state->symbols.create($2)), state->at(@2)); }
| attrs string_attr
{ $$ = $1;
ExprString * str = dynamic_cast<ExprString *>($2);
if (str) {
$$->push_back(AttrName(state->symbols.create(str->s)));
$$->emplace_back(AttrName(state->symbols.create(str->s)), state->at(@2));
delete str;
} else
throw ParseError({
@ -354,7 +359,7 @@ attrs
.pos = state->positions[state->at(@2)]
});
}
| { $$ = new AttrPath; }
| { $$ = new std::vector<std::pair<AttrName, PosIdx>>; }
;
attrpath
@ -433,7 +438,7 @@ Expr * parseExprFromBuf(
.symbols = symbols,
.positions = positions,
.basePath = basePath,
.origin = {origin},
.origin = positions.addOrigin(origin, length),
.rootFS = rootFS,
.s = astSymbols,
};

View file

@ -6,6 +6,7 @@ namespace nix {
class PosIdx
{
friend struct LazyPosAcessors;
friend class PosTable;
private:

View file

@ -7,6 +7,7 @@
#include "chunked-vector.hh"
#include "pos-idx.hh"
#include "position.hh"
#include "sync.hh"
namespace nix {
@ -17,66 +18,69 @@ public:
{
friend PosTable;
private:
// must always be invalid by default, add() replaces this with the actual value.
// subsequent add() calls use this index as a token to quickly check whether the
// current origins.back() can be reused or not.
mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
uint32_t offset;
// Used for searching in PosTable::[].
explicit Origin(uint32_t idx)
: idx(idx)
, origin{std::monostate()}
{
}
Origin(Pos::Origin origin, uint32_t offset, size_t size):
offset(offset), origin(origin), size(size)
{}
public:
const Pos::Origin origin;
const size_t size;
Origin(Pos::Origin origin)
: origin(origin)
uint32_t offsetOf(PosIdx p) const
{
return p.id - 1 - offset;
}
};
struct Offset
{
uint32_t line, column;
};
private:
std::vector<Origin> origins;
ChunkedVector<Offset, 8192> offsets;
using Lines = std::vector<uint32_t>;
public:
PosTable()
: offsets(1024)
{
origins.reserve(1024);
}
std::map<uint32_t, Origin> origins;
mutable Sync<std::map<uint32_t, Lines>> lines;
PosIdx add(const Origin & origin, uint32_t line, uint32_t column)
const Origin * resolve(PosIdx p) const
{
const auto idx = offsets.add({line, column}).second;
if (origins.empty() || origins.back().idx != origin.idx) {
origin.idx = idx;
origins.push_back(origin);
}
return PosIdx(idx + 1);
}
if (p.id == 0)
return nullptr;
Pos operator[](PosIdx p) const
{
if (p.id == 0 || p.id > offsets.size())
return {};
const auto idx = p.id - 1;
/* we want the last key <= idx, so we'll take prev(first key > idx).
this is guaranteed to never rewind origin.begin because the first
key is always 0. */
const auto pastOrigin = std::upper_bound(
origins.begin(), origins.end(), Origin(idx), [](const auto & a, const auto & b) { return a.idx < b.idx; });
const auto origin = *std::prev(pastOrigin);
const auto offset = offsets[idx];
return {offset.line, offset.column, origin.origin};
this is guaranteed to never rewind origin.begin because the first
key is always 0. */
const auto pastOrigin = origins.upper_bound(idx);
return &std::prev(pastOrigin)->second;
}
public:
Origin addOrigin(Pos::Origin origin, size_t size)
{
uint32_t offset = 0;
if (auto it = origins.rbegin(); it != origins.rend())
offset = it->first + it->second.size;
// +1 because all PosIdx are offset by 1 to begin with, and
// another +1 to ensure that all origins can point to EOF, eg
// on (invalid) empty inputs.
if (2 + offset + size < offset)
return Origin{origin, offset, 0};
return origins.emplace(offset, Origin{origin, offset, size}).first->second;
}
PosIdx add(const Origin & origin, size_t offset)
{
if (offset > origin.size)
return PosIdx();
return PosIdx(1 + origin.offset + offset);
}
Pos operator[](PosIdx p) const;
Pos::Origin originOf(PosIdx p) const
{
if (auto o = resolve(p))
return o->origin;
return std::monostate{};
}
};

View file

@ -1736,7 +1736,7 @@ static RegisterPrimOp primop_findFile(PrimOp {
- If the suffix is found inside that directory, then the entry is a match.
The combined absolute path of the directory (now downloaded if need be) and the suffix is returned.
[Lookup path](@docroot@/language/constructs/lookup-path.md) expressions can be [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath):
[Lookup path](@docroot@/language/constructs/lookup-path.md) expressions are [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath):
```nix
<nixpkgs>
@ -2524,6 +2524,54 @@ static RegisterPrimOp primop_unsafeGetAttrPos(PrimOp {
.fun = prim_unsafeGetAttrPos,
});
// access to exact position information (ie, line and colum numbers) is deferred
// due to the cost associated with calculating that information and how rarely
// it is used in practice. this is achieved by creating thunks to otherwise
// inaccessible primops that are not exposed as __op or under builtins to turn
// the internal PosIdx back into a line and column number, respectively. exposing
// these primops in any way would at best be not useful and at worst create wildly
// indeterministic eval results depending on parse order of files.
//
// in a simpler world this would instead be implemented as another kind of thunk,
// but each type of thunk has an associated runtime cost in the current evaluator.
// as with black holes this cost is too high to justify another thunk type to check
// for in the very hot path that is forceValue.
static struct LazyPosAcessors {
PrimOp primop_lineOfPos{
.arity = 1,
.fun = [] (EvalState & state, PosIdx pos, Value * * args, Value & v) {
v.mkInt(state.positions[PosIdx(args[0]->integer)].line);
}
};
PrimOp primop_columnOfPos{
.arity = 1,
.fun = [] (EvalState & state, PosIdx pos, Value * * args, Value & v) {
v.mkInt(state.positions[PosIdx(args[0]->integer)].column);
}
};
Value lineOfPos, columnOfPos;
LazyPosAcessors()
{
lineOfPos.mkPrimOp(&primop_lineOfPos);
columnOfPos.mkPrimOp(&primop_columnOfPos);
}
void operator()(EvalState & state, const PosIdx pos, Value & line, Value & column)
{
Value * posV = state.allocValue();
posV->mkInt(pos.id);
line.mkApp(&lineOfPos, posV);
column.mkApp(&columnOfPos, posV);
}
} makeLazyPosAccessors;
void makePositionThunks(EvalState & state, const PosIdx pos, Value & line, Value & column)
{
makeLazyPosAccessors(state, pos, line, column);
}
/* Dynamic version of the `?' operator. */
static void prim_hasAttr(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
@ -4522,11 +4570,9 @@ void EvalState::createBaseEnv()
addConstant("__nixPath", v, {
.type = nList,
.doc = R"(
List of search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md).
The value of the [`nix-path` configuration setting](@docroot@/command-ref/conf-file.md#conf-nix-path): a list of search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md).
Lookup path expressions can be
[desugared](https://en.wikipedia.org/wiki/Syntactic_sugar)
using this and
Lookup path expressions are [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and
[`builtins.findFile`](./builtins.html#builtins-findFile):
```nix

View file

@ -51,4 +51,6 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu
*/
void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v);
void makePositionThunks(EvalState & state, const PosIdx pos, Value & line, Value & column);
}

View file

@ -137,14 +137,14 @@ static RegisterPrimOp primop_addDrvOutputDependencies({
.name = "__addDrvOutputDependencies",
.args = {"s"},
.doc = R"(
Create a copy of the given string where a single consant string context element is turned into a "derivation deep" string context element.
Create a copy of the given string where a single constant string context element is turned into a "derivation deep" string context element.
The store path that is the constant string context element should point to a valid derivation, and end in `.drv`.
The original string context element must not be empty or have multiple elements, and it must not have any other type of element other than a constant or derivation deep element.
The latter is supported so this function is idempotent.
This is the opposite of [`builtins.unsafeDiscardOutputDependency`](#builtins-addDrvOutputDependencies).
This is the opposite of [`builtins.unsafeDiscardOutputDependency`](#builtins-unsafeDiscardOutputDependency).
)",
.fun = prim_addDrvOutputDependencies
});
@ -246,7 +246,7 @@ static RegisterPrimOp primop_getContext({
/* Append the given context to a given string.
See the commentary above unsafeGetContext for details of the
See the commentary above getContext for details of the
context representation.
*/
static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * args, Value & v)

View file

@ -64,8 +64,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
if (rev) attrs.insert_or_assign("rev", rev->gitRev());
auto input = fetchers::Input::fromAttrs(std::move(attrs));
// FIXME: use name
auto [storePath, input2] = input.fetch(state.store);
auto [storePath, input2] = input.fetchToStore(state.store);
auto attrs2 = state.buildBindings(8);
state.mkStorePathString(storePath, attrs2.alloc(state.sOutPath));

View file

@ -182,7 +182,7 @@ static void fetchTree(
state.checkURI(input.toURLString());
auto [storePath, input2] = input.fetch(state.store);
auto [storePath, input2] = input.fetchToStore(state.store);
state.allowPath(storePath);

View file

@ -8,6 +8,29 @@
namespace nix {
/**
* How errors should be handled when printing values.
*/
enum class ErrorPrintBehavior {
/**
* Print the first line of the error in brackets: `«error: oh no!»`
*/
Print,
/**
* Throw the error to the code that attempted to print the value, instead
* of suppressing it it.
*/
Throw,
/**
* Only throw the error if encountered at the top level of the expression.
*
* This will cause expressions like `builtins.throw "uh oh!"` to throw
* errors, but will print attribute sets and other nested structures
* containing values that error (like `nixpkgs`) normally.
*/
ThrowTopLevel,
};
/**
* Options for printing Nix values.
*/
@ -68,6 +91,11 @@ struct PrintOptions
*/
size_t prettyIndent = 0;
/**
* How to handle errors encountered while printing values.
*/
ErrorPrintBehavior errors = ErrorPrintBehavior::Print;
/**
* True if pretty-printing is enabled.
*/
@ -86,7 +114,7 @@ static PrintOptions errorPrintOptions = PrintOptions {
.maxDepth = 10,
.maxAttrs = 10,
.maxListItems = 10,
.maxStringLength = 1024
.maxStringLength = 1024,
};
}

View file

@ -271,25 +271,21 @@ private:
void printDerivation(Value & v)
{
try {
Bindings::iterator i = v.attrs->find(state.sDrvPath);
NixStringContext context;
std::string storePath;
if (i != v.attrs->end())
storePath = state.store->printStorePath(state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"));
Bindings::iterator i = v.attrs->find(state.sDrvPath);
NixStringContext context;
std::string storePath;
if (i != v.attrs->end())
storePath = state.store->printStorePath(state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"));
if (options.ansiColors)
output << ANSI_GREEN;
output << "«derivation";
if (!storePath.empty()) {
output << " " << storePath;
}
output << "»";
if (options.ansiColors)
output << ANSI_NORMAL;
} catch (Error & e) {
printError_(e);
if (options.ansiColors)
output << ANSI_GREEN;
output << "«derivation";
if (!storePath.empty()) {
output << " " << storePath;
}
output << "»";
if (options.ansiColors)
output << ANSI_NORMAL;
}
bool shouldPrettyPrintAttrs(AttrVec & v)
@ -510,64 +506,68 @@ private:
output.flush();
checkInterrupt();
if (options.force) {
try {
try {
if (options.force) {
state.forceValue(v, v.determinePos(noPos));
} catch (Error & e) {
printError_(e);
return;
}
}
switch (v.type()) {
switch (v.type()) {
case nInt:
printInt(v);
break;
case nInt:
printInt(v);
break;
case nFloat:
printFloat(v);
break;
case nFloat:
printFloat(v);
break;
case nBool:
printBool(v);
break;
case nBool:
printBool(v);
break;
case nString:
printString(v);
break;
case nString:
printString(v);
break;
case nPath:
printPath(v);
break;
case nPath:
printPath(v);
break;
case nNull:
printNull();
break;
case nNull:
printNull();
break;
case nAttrs:
printAttrs(v, depth);
break;
case nAttrs:
printAttrs(v, depth);
break;
case nList:
printList(v, depth);
break;
case nList:
printList(v, depth);
break;
case nFunction:
printFunction(v);
break;
case nFunction:
printFunction(v);
break;
case nThunk:
printThunk(v);
break;
case nThunk:
printThunk(v);
break;
case nExternal:
printExternal(v);
break;
case nExternal:
printExternal(v);
break;
default:
printUnknown();
break;
default:
printUnknown();
break;
}
} catch (Error & e) {
if (options.errors == ErrorPrintBehavior::Throw
|| (options.errors == ErrorPrintBehavior::ThrowTopLevel
&& depth == 0)) {
throw;
}
printError_(e);
}
}

View file

@ -161,7 +161,7 @@ bool Input::contains(const Input & other) const
return false;
}
std::pair<StorePath, Input> Input::fetch(ref<Store> store) const
std::pair<StorePath, Input> Input::fetchToStore(ref<Store> store) const
{
if (!scheme)
throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs()));
@ -186,56 +186,85 @@ std::pair<StorePath, Input> Input::fetch(ref<Store> store) const
auto [storePath, input] = [&]() -> std::pair<StorePath, Input> {
try {
return scheme->fetch(store, *this);
auto [accessor, final] = getAccessorUnchecked(store);
auto storePath = nix::fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, final.getName());
auto narHash = store->queryPathInfo(storePath)->narHash;
final.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
scheme->checkLocks(*this, final);
return {storePath, final};
} catch (Error & e) {
e.addTrace({}, "while fetching the input '%s'", to_string());
throw;
}
}();
auto narHash = store->queryPathInfo(storePath)->narHash;
input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
if (auto prevNarHash = getNarHash()) {
if (narHash != *prevNarHash)
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
to_string(),
store->printStorePath(storePath),
prevNarHash->to_string(HashFormat::SRI, true),
narHash.to_string(HashFormat::SRI, true));
}
if (auto prevLastModified = getLastModified()) {
if (input.getLastModified() != prevLastModified)
throw Error("'lastModified' attribute mismatch in input '%s', expected %d",
input.to_string(), *prevLastModified);
}
if (auto prevRev = getRev()) {
if (input.getRev() != prevRev)
throw Error("'rev' attribute mismatch in input '%s', expected %s",
input.to_string(), prevRev->gitRev());
}
if (auto prevRevCount = getRevCount()) {
if (input.getRevCount() != prevRevCount)
throw Error("'revCount' attribute mismatch in input '%s', expected %d",
input.to_string(), *prevRevCount);
}
return {std::move(storePath), input};
}
void InputScheme::checkLocks(const Input & specified, const Input & final) const
{
if (auto prevNarHash = specified.getNarHash()) {
if (final.getNarHash() != prevNarHash) {
if (final.getNarHash())
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got '%s'",
specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true), final.getNarHash()->to_string(HashFormat::SRI, true));
else
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s', expected '%s' but got none",
specified.to_string(), prevNarHash->to_string(HashFormat::SRI, true));
}
}
if (auto prevLastModified = specified.getLastModified()) {
if (final.getLastModified() != prevLastModified)
throw Error("'lastModified' attribute mismatch in input '%s', expected %d",
final.to_string(), *prevLastModified);
}
if (auto prevRev = specified.getRev()) {
if (final.getRev() != prevRev)
throw Error("'rev' attribute mismatch in input '%s', expected %s",
final.to_string(), prevRev->gitRev());
}
if (auto prevRevCount = specified.getRevCount()) {
if (final.getRevCount() != prevRevCount)
throw Error("'revCount' attribute mismatch in input '%s', expected %d",
final.to_string(), *prevRevCount);
}
}
std::pair<ref<InputAccessor>, Input> Input::getAccessor(ref<Store> store) const
{
try {
return scheme->getAccessor(store, *this);
auto [accessor, final] = getAccessorUnchecked(store);
scheme->checkLocks(*this, final);
return {accessor, std::move(final)};
} catch (Error & e) {
e.addTrace({}, "while fetching the input '%s'", to_string());
throw;
}
}
std::pair<ref<InputAccessor>, Input> Input::getAccessorUnchecked(ref<Store> store) const
{
// FIXME: cache the accessor
if (!scheme)
throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs()));
auto [accessor, final] = scheme->getAccessor(store, *this);
accessor->fingerprint = scheme->getFingerprint(store, final);
return {accessor, std::move(final)};
}
Input Input::applyOverrides(
std::optional<std::string> ref,
std::optional<Hash> rev) const
@ -372,18 +401,6 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
throw Error("do not know how to clone input '%s'", input.to_string());
}
std::pair<StorePath, Input> InputScheme::fetch(ref<Store> store, const Input & input)
{
auto [accessor, input2] = getAccessor(store, input);
auto storePath = fetchToStore(*store, SourcePath(accessor), FetchMode::Copy, input2.getName());
return {storePath, input2};
}
std::pair<ref<InputAccessor>, Input> InputScheme::getAccessor(ref<Store> store, const Input & input) const
{
throw UnimplementedError("InputScheme must implement fetch() or getAccessor()");
}
std::optional<ExperimentalFeature> InputScheme::experimentalFeature() const
{
return {};

View file

@ -80,10 +80,21 @@ public:
* Fetch the entire input into the Nix store, returning the
* location in the Nix store and the locked input.
*/
std::pair<StorePath, Input> fetch(ref<Store> store) const;
std::pair<StorePath, Input> fetchToStore(ref<Store> store) const;
/**
* Return an InputAccessor that allows access to files in the
* input without copying it to the store. Also return a possibly
* unlocked input.
*/
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store) const;
private:
std::pair<ref<InputAccessor>, Input> getAccessorUnchecked(ref<Store> store) const;
public:
Input applyOverrides(
std::optional<std::string> ref,
std::optional<Hash> rev) const;
@ -173,9 +184,7 @@ struct InputScheme
std::string_view contents,
std::optional<std::string> commitMsg) const;
virtual std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input);
virtual std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & input) const;
virtual std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & input) const = 0;
/**
* Is this `InputScheme` part of an experimental feature?
@ -202,6 +211,14 @@ struct InputScheme
*/
virtual bool isLocked(const Input & input) const
{ return false; }
/**
* Check the locking attributes in `final` against
* `specified`. E.g. if `specified` has a `rev` attribute, then
* `final` must have the same `rev` attribute. Throw an exception
* if there is a mismatch.
*/
virtual void checkLocks(const Input & specified, const Input & final) const;
};
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);

View file

@ -761,8 +761,6 @@ struct GitInputScheme : InputScheme
? getAccessorFromCommit(store, repoInfo, std::move(input))
: getAccessorFromWorkdir(store, repoInfo, std::move(input));
accessor->fingerprint = final.getFingerprint(store);
return {accessor, std::move(final)};
}

View file

@ -98,6 +98,10 @@ struct GitArchiveInputScheme : InputScheme
if (ref) input.attrs.insert_or_assign("ref", *ref);
if (host_url) input.attrs.insert_or_assign("host", *host_url);
auto narHash = url.query.find("narHash");
if (narHash != url.query.end())
input.attrs.insert_or_assign("narHash", narHash->second);
return input;
}
@ -111,6 +115,7 @@ struct GitArchiveInputScheme : InputScheme
"narHash",
"lastModified",
"host",
"treeHash",
};
}
@ -134,10 +139,13 @@ struct GitArchiveInputScheme : InputScheme
assert(!(ref && rev));
if (ref) path += "/" + *ref;
if (rev) path += "/" + rev->to_string(HashFormat::Base16, false);
return ParsedURL {
auto url = ParsedURL {
.scheme = std::string { schemeName() },
.path = path,
};
if (auto narHash = input.getNarHash())
url.query.insert_or_assign("narHash", narHash->to_string(HashFormat::SRI, true));
return url;
}
Input applyOverrides(
@ -268,15 +276,15 @@ struct GitArchiveInputScheme : InputScheme
{
auto [input, tarballInfo] = downloadArchive(store, _input);
#if 0
input.attrs.insert_or_assign("treeHash", tarballInfo.treeHash.gitRev());
#endif
input.attrs.insert_or_assign("lastModified", uint64_t(tarballInfo.lastModified));
auto accessor = getTarballCache()->getAccessor(tarballInfo.treeHash, false);
accessor->setPathDisplay("«" + input.to_string() + "»");
accessor->fingerprint = input.getFingerprint(store);
return {accessor, input};
}

View file

@ -97,7 +97,7 @@ struct IndirectInputScheme : InputScheme
return input;
}
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & input) const override
{
throw Error("indirect input '%s' cannot be fetched directly", input.to_string());
}

View file

@ -6,8 +6,8 @@
#include "tarfile.hh"
#include "store-api.hh"
#include "url-parts.hh"
#include "fs-input-accessor.hh"
#include "posix-source-accessor.hh"
#include "fetch-settings.hh"
#include <sys/time.h>
@ -161,9 +161,9 @@ struct MercurialInputScheme : InputScheme
return {isLocal, isLocal ? url.path : url.base};
}
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
StorePath fetchToStore(ref<Store> store, Input & input) const
{
Input input(_input);
auto origRev = input.getRev();
auto name = input.getName();
@ -218,7 +218,7 @@ struct MercurialInputScheme : InputScheme
FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {},
filter);
return {std::move(storePath), input};
return storePath;
}
}
@ -242,13 +242,12 @@ struct MercurialInputScheme : InputScheme
});
};
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
-> std::pair<StorePath, Input>
auto makeResult = [&](const Attrs & infoAttrs, const StorePath & storePath) -> StorePath
{
assert(input.getRev());
assert(!_input.getRev() || _input.getRev() == input.getRev());
assert(!origRev || origRev == input.getRev());
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
return {std::move(storePath), input};
return storePath;
};
if (input.getRev()) {
@ -329,7 +328,7 @@ struct MercurialInputScheme : InputScheme
{"revCount", (uint64_t) revCount},
});
if (!_input.getRev())
if (!origRev)
getCache()->add(
*store,
unlockedAttrs,
@ -347,6 +346,15 @@ struct MercurialInputScheme : InputScheme
return makeResult(infoAttrs, std::move(storePath));
}
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & _input) const override
{
Input input(_input);
auto storePath = fetchToStore(store, input);
return {makeStorePathAccessor(store, storePath), input};
}
bool isLocked(const Input & input) const override
{
return (bool) input.getRev();

View file

@ -1,6 +1,8 @@
#include "fetchers.hh"
#include "store-api.hh"
#include "archive.hh"
#include "fs-input-accessor.hh"
#include "posix-source-accessor.hh"
namespace nix::fetchers {
@ -87,6 +89,15 @@ struct PathInputScheme : InputScheme
writeFile((CanonPath(getAbsPath(input)) / path).abs(), contents);
}
std::optional<std::string> isRelative(const Input & input) const
{
auto path = getStrAttr(input.attrs, "path");
if (hasPrefix(path, "/"))
return std::nullopt;
else
return path;
}
bool isLocked(const Input & input) const override
{
return (bool) input.getNarHash();
@ -102,7 +113,7 @@ struct PathInputScheme : InputScheme
throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string());
}
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & _input) const override
{
Input input(_input);
std::string absPath;
@ -144,7 +155,24 @@ struct PathInputScheme : InputScheme
}
input.attrs.insert_or_assign("lastModified", uint64_t(mtime));
return {std::move(*storePath), input};
return {makeStorePathAccessor(store, *storePath), std::move(input)};
}
std::optional<std::string> getFingerprint(ref<Store> store, const Input & input) const override
{
if (isRelative(input))
return std::nullopt;
/* If this path is in the Nix store, use the hash of the
store object and the subpath. */
auto path = getAbsPath(input);
try {
auto [storePath, subPath] = store->toStorePath(path.abs());
auto info = store->queryPathInfo(storePath);
return fmt("path:%s:%s", info->narHash.to_string(HashFormat::Base16, false), subPath);
} catch (Error &) {
return std::nullopt;
}
}
std::optional<ExperimentalFeature> experimentalFeature() const override

View file

@ -123,6 +123,11 @@ struct KeyedBuildResult : BuildResult
* The derivation we built or the store path we substituted.
*/
DerivedPath path;
// Hack to work around a gcc "may be used uninitialized" warning.
KeyedBuildResult(BuildResult res, DerivedPath path)
: BuildResult(std::move(res)), path(std::move(path))
{ }
};
}

View file

@ -2480,6 +2480,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
CanonPath { tmpDir + "/tmp" }).hash;
}
}
assert(false);
}();
ValidPathInfo newInfo0 {
@ -2543,6 +2544,12 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
[&](const DerivationOutput::CAFixed & dof) {
auto & wanted = dof.ca.hash;
// Replace the output by a fresh copy of itself to make sure
// that there's no stale file descriptor pointing to it
Path tmpOutput = actualPath + ".tmp";
copyFile(actualPath, tmpOutput, true);
renameFile(tmpOutput, actualPath);
auto newInfo0 = newInfoFromCA(DerivationOutput::CAFloating {
.method = dof.ca.method,
.hashAlgo = wanted.algo,

View file

@ -45,7 +45,7 @@ R""(
; allow it if the package explicitly asks for it.
(if (param "_ALLOW_LOCAL_NETWORKING")
(begin
(allow network* (local ip) (local tcp) (local udp))
(allow network* (remote ip "localhost:*"))
; Allow access to /etc/resolv.conf (which is a symlink to
; /private/var/run/resolv.conf).

View file

@ -415,6 +415,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
// Use NAR; Git is not a serialization method
dumpMethod = FileSerialisationMethod::Recursive;
break;
default:
assert(false);
}
// TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store.
auto path = store->addToStoreFromDump(source, name, dumpMethod, contentAddressMethod, hashAlgo, refs, repair);

View file

@ -527,6 +527,8 @@ StorePath RemoteStore::addToStoreFromDump(
// Use NAR; Git is not a serialization method
fsm = FileSerialisationMethod::Recursive;
break;
default:
assert(false);
}
if (fsm != dumpMethod)
unsupported("RemoteStore::addToStoreFromDump doesn't support this `dumpMethod` `hashMethod` combination");

View file

@ -38,6 +38,11 @@ unsigned int getMaxCPU()
auto cpuMax = readFile(cpuFile);
auto cpuMaxParts = tokenizeString<std::vector<std::string>>(cpuMax, " \n");
if (cpuMaxParts.size() != 2) {
return 0;
}
auto quota = cpuMaxParts[0];
auto period = cpuMaxParts[1];
if (quota != "max")

View file

@ -1,4 +1,5 @@
#include "experimental-features.hh"
#include "fmt.hh"
#include "util.hh"
#include "nlohmann/json.hpp"
@ -10,6 +11,7 @@ struct ExperimentalFeatureDetails
ExperimentalFeature tag;
std::string_view name;
std::string_view description;
std::string_view trackingUrl;
};
/**
@ -35,6 +37,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
[__contentAddressed](@docroot@/language/advanced-attributes.md#adv-attr-__contentAddressed)
for details.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/35",
},
{
.tag = Xp::ImpureDerivations,
@ -65,6 +68,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
This is a more explicit alternative to using [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime).
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/42",
},
{
.tag = Xp::Flakes,
@ -73,6 +77,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
Enable flakes. See the manual entry for [`nix
flake`](@docroot@/command-ref/new-cli/nix3-flake.md) for details.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/27",
},
{
.tag = Xp::FetchTree,
@ -86,6 +91,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
Enabling just this feature serves as a "release candidate", allowing users to try it out in isolation.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/31",
},
{
.tag = Xp::NixCommand,
@ -94,6 +100,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
Enable the new `nix` subcommands. See the manual on
[`nix`](@docroot@/command-ref/new-cli/nix.md) for details.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/28",
},
{
.tag = Xp::GitHashing,
@ -102,6 +109,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
Allow creating (content-addressed) store objects which are hashed via Git's hashing algorithm.
These store objects will not be understandable by older versions of Nix.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/41",
},
{
.tag = Xp::RecursiveNix,
@ -143,6 +151,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
already in the build inputs or built by a previous recursive Nix
call.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/47",
},
{
.tag = Xp::NoUrlLiterals,
@ -184,6 +193,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
containing parameters have to be quoted anyway, and unquoted URLs
may confuse external tooling.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/44",
},
{
.tag = Xp::FetchClosure,
@ -191,6 +201,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
.description = R"(
Enable the use of the [`fetchClosure`](@docroot@/language/builtins.md#builtins-fetchClosure) built-in function in the Nix language.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/40",
},
{
.tag = Xp::ReplFlake,
@ -200,6 +211,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
Allow passing [installables](@docroot@/command-ref/new-cli/nix.md#installables) to `nix repl`, making its interface consistent with the other experimental commands.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/32",
},
{
.tag = Xp::AutoAllocateUids,
@ -208,6 +220,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
Allows Nix to automatically pick UIDs for builds, rather than creating
`nixbld*` user accounts. See the [`auto-allocate-uids`](@docroot@/command-ref/conf-file.md#conf-auto-allocate-uids) setting for details.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/34",
},
{
.tag = Xp::Cgroups,
@ -216,6 +229,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
Allows Nix to execute builds inside cgroups. See
the [`use-cgroups`](@docroot@/command-ref/conf-file.md#conf-use-cgroups) setting for details.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/36",
},
{
.tag = Xp::DaemonTrustOverride,
@ -226,6 +240,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
useful for various experiments with `nix-daemon --stdio`
networking.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/38",
},
{
.tag = Xp::DynamicDerivations,
@ -239,6 +254,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
- dependencies in derivations on the outputs of
derivations that are themselves derivations outputs.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/39",
},
{
.tag = Xp::ParseTomlTimestamps,
@ -246,6 +262,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
.description = R"(
Allow parsing of timestamps in builtins.fromTOML.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/45",
},
{
.tag = Xp::ReadOnlyLocalStore,
@ -253,6 +270,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
.description = R"(
Allow the use of the `read-only` parameter in [local store](@docroot@/store/types/local-store.md) URIs.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/46",
},
{
.tag = Xp::LocalOverlayStore,
@ -260,7 +278,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
.description = R"(
Allow the use of [local overlay store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-overlay-store).
)",
.trackingUrl = ""https://github.com/NixOS/nix/milestone/50",
.trackingUrl = "https://github.com/NixOS/nix/milestone/50",
},
{
.tag = Xp::ConfigurableImpureEnv,
@ -268,6 +286,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
.description = R"(
Allow the use of the [impure-env](@docroot@/command-ref/conf-file.md#conf-impure-env) setting.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/37",
},
{
.tag = Xp::MountedSSHStore,
@ -275,6 +294,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
.description = R"(
Allow the use of the [`mounted SSH store`](@docroot@/command-ref/new-cli/nix3-help-stores.html#experimental-ssh-store-with-filesytem-mounted).
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/43",
},
{
.tag = Xp::VerifiedFetches,
@ -282,6 +302,7 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
.description = R"(
Enables verification of git commit signatures through the [`fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit) built-in.
)",
.trackingUrl = "https://github.com/NixOS/nix/milestone/48",
},
}};
@ -320,9 +341,12 @@ std::string_view showExperimentalFeature(const ExperimentalFeature tag)
nlohmann::json documentExperimentalFeatures()
{
StringMap res;
for (auto & xpFeature : xpFeatureDetails)
res[std::string { xpFeature.name }] =
trim(stripIndentation(xpFeature.description));
for (auto & xpFeature : xpFeatureDetails) {
std::stringstream docOss;
docOss << stripIndentation(xpFeature.description);
docOss << fmt("\nRefer to [%1% tracking issue](%2%) for feature tracking.", xpFeature.name, xpFeature.trackingUrl);
res[std::string{xpFeature.name}] = trim(docOss.str());
}
return (nlohmann::json) res;
}

View file

@ -123,7 +123,7 @@ Hash hashPath(
case FileIngestionMethod::Git:
return git::dumpHash(ht, accessor, path, filter).hash;
}
assert(false);
}
}

View file

@ -617,6 +617,11 @@ void copy(const fs::directory_entry & from, const fs::path & to, bool andDelete)
}
}
void copyFile(const Path & oldPath, const Path & newPath, bool andDelete)
{
return copy(fs::directory_entry(fs::path(oldPath)), fs::path(newPath), andDelete);
}
void renameFile(const Path & oldName, const Path & newName)
{
fs::rename(oldName, newName);

View file

@ -186,6 +186,13 @@ void renameFile(const Path & src, const Path & dst);
*/
void moveFile(const Path & src, const Path & dst);
/**
* Recursively copy the content of `oldPath` to `newPath`. If `andDelete` is
* `true`, then also remove `oldPath` (making this equivalent to `moveFile`, but
* with the guaranty that the destination will be fresh, with no stale inode
* or file descriptor pointing to it).
*/
void copyFile(const Path & oldPath, const Path & newPath, bool andDelete);
/**
* Automatic cleanup of resources.

View file

@ -8,7 +8,6 @@
namespace nix {
namespace {
/**
* A helper for writing `boost::format` expressions.
*
@ -35,14 +34,13 @@ inline void formatHelper(F & f, const T & x, const Args & ... args)
/**
* Set the correct exceptions for `fmt`.
*/
void setExceptions(boost::format & fmt)
inline void setExceptions(boost::format & fmt)
{
fmt.exceptions(
boost::io::all_error_bits ^
boost::io::too_many_args_bit ^
boost::io::too_few_args_bit);
}
}
/**
* A helper for writing a `boost::format` expression to a string.

View file

@ -56,31 +56,63 @@ void parseBlob(
FileSystemObjectSink & sink,
const Path & sinkPath,
Source & source,
bool executable,
BlobMode blobMode,
const ExperimentalFeatureSettings & xpSettings)
{
xpSettings.require(Xp::GitHashing);
sink.createRegularFile(sinkPath, [&](auto & crf) {
if (executable)
crf.isExecutable();
unsigned long long size = std::stoi(getStringUntil(source, 0));
unsigned long long size = std::stoi(getStringUntil(source, 0));
auto doRegularFile = [&](bool executable) {
sink.createRegularFile(sinkPath, [&](auto & crf) {
if (executable)
crf.isExecutable();
crf.preallocateContents(size);
crf.preallocateContents(size);
unsigned long long left = size;
std::string buf;
buf.reserve(65536);
unsigned long long left = size;
std::string buf;
buf.reserve(65536);
while (left) {
while (left) {
checkInterrupt();
buf.resize(std::min((unsigned long long)buf.capacity(), left));
source(buf);
crf(buf);
left -= buf.size();
}
});
};
switch (blobMode) {
case BlobMode::Regular:
doRegularFile(false);
break;
case BlobMode::Executable:
doRegularFile(true);
break;
case BlobMode::Symlink:
{
std::string target;
target.resize(size, '0');
target.reserve(size);
for (size_t n = 0; n < target.size();) {
checkInterrupt();
buf.resize(std::min((unsigned long long)buf.capacity(), left));
source(buf);
crf(buf);
left -= buf.size();
n += source.read(
const_cast<char *>(target.c_str()) + n,
target.size() - n);
}
});
sink.createSymlink(sinkPath, target);
break;
}
default:
assert(false);
}
}
void parseTree(
@ -142,7 +174,7 @@ void parse(
FileSystemObjectSink & sink,
const Path & sinkPath,
Source & source,
bool executable,
BlobMode rootModeIfBlob,
std::function<SinkHook> hook,
const ExperimentalFeatureSettings & xpSettings)
{
@ -152,7 +184,7 @@ void parse(
switch (type) {
case ObjectType::Blob:
parseBlob(sink, sinkPath, source, executable, xpSettings);
parseBlob(sink, sinkPath, source, rootModeIfBlob, xpSettings);
break;
case ObjectType::Tree:
parseTree(sink, sinkPath, source, hook, xpSettings);
@ -177,7 +209,7 @@ std::optional<Mode> convertMode(SourceAccessor::Type type)
void restore(FileSystemObjectSink & sink, Source & source, std::function<RestoreHook> hook)
{
parse(sink, "", source, false, [&](Path name, TreeEntry entry) {
parse(sink, "", source, BlobMode::Regular, [&](Path name, TreeEntry entry) {
auto [accessor, from] = hook(entry.hash);
auto stat = accessor->lstat(from);
auto gotOpt = convertMode(stat.type);
@ -275,6 +307,13 @@ Mode dump(
}
case SourceAccessor::tSymlink:
{
auto target = accessor.readLink(path);
dumpBlobPrefix(target.size(), sink, xpSettings);
sink(target);
return Mode::Symlink;
}
case SourceAccessor::tMisc:
default:
throw Error("file '%1%' has an unsupported type", path);

View file

@ -75,10 +75,23 @@ ObjectType parseObjectType(
Source & source,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
/**
* These 3 modes are represented by blob objects.
*
* Sometimes we need this information to disambiguate how a blob is
* being used to better match our own "file system object" data model.
*/
enum struct BlobMode : RawMode
{
Regular = static_cast<RawMode>(Mode::Regular),
Executable = static_cast<RawMode>(Mode::Executable),
Symlink = static_cast<RawMode>(Mode::Symlink),
};
void parseBlob(
FileSystemObjectSink & sink, const Path & sinkPath,
Source & source,
bool executable,
BlobMode blobMode,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
void parseTree(
@ -89,11 +102,15 @@ void parseTree(
/**
* Helper putting the previous three `parse*` functions together.
*
* @rootModeIfBlob How to interpret a root blob, for which there is no
* disambiguating dir entry to answer that questino. If the root it not
* a blob, this is ignored.
*/
void parse(
FileSystemObjectSink & sink, const Path & sinkPath,
Source & source,
bool executable,
BlobMode rootModeIfBlob,
std::function<SinkHook> hook,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);

View file

@ -29,32 +29,17 @@ std::optional<LinesOfCode> Pos::getCodeLines() const
return std::nullopt;
if (auto source = getSource()) {
std::istringstream iss(*source);
// count the newlines.
int count = 0;
std::string curLine;
int pl = line - 1;
LinesIterator lines(*source), end;
LinesOfCode loc;
do {
std::getline(iss, curLine);
++count;
if (count < pl)
;
else if (count == pl) {
loc.prevLineOfCode = curLine;
} else if (count == pl + 1) {
loc.errLineOfCode = curLine;
} else if (count == pl + 2) {
loc.nextLineOfCode = curLine;
break;
}
if (!iss.good())
break;
} while (true);
if (line > 1)
std::advance(lines, line - 2);
if (lines != end && line > 1)
loc.prevLineOfCode = *lines++;
if (lines != end)
loc.errLineOfCode = *lines++;
if (lines != end)
loc.nextLineOfCode = *lines++;
return loc;
}
@ -109,4 +94,26 @@ std::ostream & operator<<(std::ostream & str, const Pos & pos)
return str;
}
void Pos::LinesIterator::bump(bool atFirst)
{
if (!atFirst) {
pastEnd = input.empty();
if (!input.empty() && input[0] == '\r')
input.remove_prefix(1);
if (!input.empty() && input[0] == '\n')
input.remove_prefix(1);
}
// nix line endings are not only \n as eg std::getline assumes, but also
// \r\n **and \r alone**. not treating them all the same causes error
// reports to not match with line numbers as the parser expects them.
auto eol = input.find_first_of("\r\n");
if (eol > input.size())
eol = input.size();
curLine = input.substr(0, eol);
input.remove_prefix(eol);
}
}

View file

@ -67,6 +67,48 @@ struct Pos
bool operator==(const Pos & rhs) const = default;
bool operator!=(const Pos & rhs) const = default;
bool operator<(const Pos & rhs) const;
struct LinesIterator {
using difference_type = size_t;
using value_type = std::string_view;
using reference = const std::string_view &;
using pointer = const std::string_view *;
using iterator_category = std::input_iterator_tag;
LinesIterator(): pastEnd(true) {}
explicit LinesIterator(std::string_view input): input(input), pastEnd(input.empty()) {
if (!pastEnd)
bump(true);
}
LinesIterator & operator++() {
bump(false);
return *this;
}
LinesIterator operator++(int) {
auto result = *this;
++*this;
return result;
}
reference operator*() const { return curLine; }
pointer operator->() const { return &curLine; }
bool operator!=(const LinesIterator & other) const {
return !(*this == other);
}
bool operator==(const LinesIterator & other) const {
return (pastEnd && other.pastEnd)
|| (std::forward_as_tuple(input.size(), input.data())
== std::forward_as_tuple(other.input.size(), other.input.data()));
}
private:
std::string_view input, curLine;
bool pastEnd = false;
void bump(bool atFirst);
};
};
std::ostream & operator<<(std::ostream & str, const Pos & pos);

View file

@ -85,16 +85,20 @@ bool PosixSourceAccessor::pathExists(const CanonPath & path)
std::optional<struct stat> PosixSourceAccessor::cachedLstat(const CanonPath & path)
{
static Sync<std::unordered_map<CanonPath, std::optional<struct stat>>> _cache;
static Sync<std::unordered_map<Path, std::optional<struct stat>>> _cache;
// Note: we convert std::filesystem::path to Path because the
// former is not hashable on libc++.
Path absPath = makeAbsPath(path);
{
auto cache(_cache.lock());
auto i = cache->find(path);
auto i = cache->find(absPath);
if (i != cache->end()) return i->second;
}
std::optional<struct stat> st{std::in_place};
if (::lstat(makeAbsPath(path).c_str(), &*st)) {
if (::lstat(absPath.c_str(), &*st)) {
if (errno == ENOENT || errno == ENOTDIR)
st.reset();
else
@ -103,7 +107,7 @@ std::optional<struct stat> PosixSourceAccessor::cachedLstat(const CanonPath & pa
auto cache(_cache.lock());
if (cache->size() >= 16384) cache->clear();
cache->emplace(path, st);
cache->emplace(absPath, st);
return st;
}

View file

@ -9,6 +9,10 @@
#include <sodium.h>
#ifdef NDEBUG
#error "Nix may not be built with assertions disabled (i.e. with -DNDEBUG)."
#endif
namespace nix {
void initLibUtil() {

View file

@ -11,6 +11,12 @@ R""(
Note the `file://` - without this, the destination is a chroot
store, not a binary cache.
* Copy all store paths from a local binary cache in `/tmp/cache` to the local store:
```console
# nix copy --all --from file:///tmp/cache
```
* Copy the entire current NixOS system closure to another machine via
SSH:

View file

@ -120,8 +120,17 @@ struct CmdEval : MixJSON, InstallableValueCommand, MixReadOnlyOption
}
else {
state->forceValueDeep(*v);
logger->cout("%s", ValuePrinter(*state, *v, PrintOptions { .force = true }));
logger->cout(
"%s",
ValuePrinter(
*state,
*v,
PrintOptions {
.force = true,
.derivationPaths = true
}
)
);
}
}
};

View file

@ -88,17 +88,19 @@ public:
expectArgs({
.label="inputs",
.optional=true,
.handler={[&](std::string inputToUpdate){
InputPath inputPath;
try {
inputPath = flake::parseInputPath(inputToUpdate);
} catch (Error & e) {
warn("Invalid flake input '%s'. To update a specific flake, use 'nix flake update --flake %s' instead.", inputToUpdate, inputToUpdate);
throw e;
.handler={[&](std::vector<std::string> inputsToUpdate){
for (auto inputToUpdate : inputsToUpdate) {
InputPath inputPath;
try {
inputPath = flake::parseInputPath(inputToUpdate);
} catch (Error & e) {
warn("Invalid flake input '%s'. To update a specific flake, use 'nix flake update --flake %s' instead.", inputToUpdate, inputToUpdate);
throw e;
}
if (lockFlags.inputUpdates.contains(inputPath))
warn("Input '%s' was specified multiple times. You may have done this by accident.");
lockFlags.inputUpdates.insert(inputPath);
}
if (lockFlags.inputUpdates.contains(inputPath))
warn("Input '%s' was specified multiple times. You may have done this by accident.");
lockFlags.inputUpdates.insert(inputPath);
}},
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
@ -205,6 +207,9 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
auto lockedFlake = lockFlake();
auto & flake = lockedFlake.flake;
// Currently, all flakes are in the Nix store via the rootFS accessor.
auto storePath = store->printStorePath(store->toStorePath(flake.path.path.abs()).first);
if (json) {
nlohmann::json j;
if (flake.description)
@ -214,6 +219,8 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
j["resolvedUrl"] = flake.resolvedRef.to_string();
j["resolved"] = fetchers::attrsToJSON(flake.resolvedRef.toAttrs());
j["url"] = flake.lockedRef.to_string(); // FIXME: rename to lockedUrl
// "locked" is a misnomer - this is the result of the
// attempt to lock.
j["locked"] = fetchers::attrsToJSON(flake.lockedRef.toAttrs());
if (auto rev = flake.lockedRef.input.getRev())
j["revision"] = rev->to_string(HashFormat::Base16, false);
@ -223,23 +230,24 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
j["revCount"] = *revCount;
if (auto lastModified = flake.lockedRef.input.getLastModified())
j["lastModified"] = *lastModified;
j["path"] = store->printStorePath(flake.storePath);
j["path"] = storePath;
j["locks"] = lockedFlake.lockFile.toJSON().first;
logger->cout("%s", j.dump());
} else {
logger->cout(
ANSI_BOLD "Resolved URL:" ANSI_NORMAL " %s",
flake.resolvedRef.to_string());
logger->cout(
ANSI_BOLD "Locked URL:" ANSI_NORMAL " %s",
flake.lockedRef.to_string());
if (flake.lockedRef.input.isLocked())
logger->cout(
ANSI_BOLD "Locked URL:" ANSI_NORMAL " %s",
flake.lockedRef.to_string());
if (flake.description)
logger->cout(
ANSI_BOLD "Description:" ANSI_NORMAL " %s",
*flake.description);
logger->cout(
ANSI_BOLD "Path:" ANSI_NORMAL " %s",
store->printStorePath(flake.storePath));
storePath);
if (auto rev = flake.lockedRef.input.getRev())
logger->cout(
ANSI_BOLD "Revision:" ANSI_NORMAL " %s",
@ -474,6 +482,8 @@ struct CmdFlakeCheck : FlakeCommand
checkHydraJobs = [&](const std::string & attrPath, Value & v, const PosIdx pos) {
try {
Activity act(*logger, lvlInfo, actUnknown,
fmt("checking Hydra job '%s'", attrPath));
state->forceAttrs(v, pos, "");
if (state->isDerivation(v))
@ -1031,7 +1041,9 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
StorePathSet sources;
sources.insert(flake.flake.storePath);
auto storePath = store->toStorePath(flake.flake.path.path.abs()).first;
sources.insert(storePath);
// FIXME: use graph output, handle cycles.
std::function<nlohmann::json(const Node & node)> traverse;
@ -1043,7 +1055,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
auto storePath =
dryRun
? (*inputNode)->lockedRef.input.computeStorePath(*store)
: (*inputNode)->lockedRef.input.fetch(store).first;
: (*inputNode)->lockedRef.input.fetchToStore(store).first;
if (json) {
auto& jsonObj3 = jsonObj2[inputName];
jsonObj3["path"] = store->printStorePath(storePath);
@ -1060,7 +1072,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun
if (json) {
nlohmann::json jsonRoot = {
{"path", store->printStorePath(flake.flake.storePath)},
{"path", store->printStorePath(storePath)},
{"inputs", traverse(*flake.lockFile.root)},
};
logger->cout("%s", jsonRoot);

View file

@ -11,9 +11,16 @@ R""(
* Remove all packages:
```console
# nix profile remove '.*'
# nix profile remove --all
```
* Remove packages by regular expression:
```console
# nix profile remove --regex '.*vim.*'
```
* Remove a package by store path:
```console

View file

@ -6,7 +6,7 @@ R""(
reference:
```console
# nix profile upgrade '.*'
# nix profile upgrade --all
```
* Upgrade a specific package by name:
@ -15,6 +15,12 @@ R""(
# nix profile upgrade hello
```
* Upgrade all packages that include 'vim' in their name:
```console
# nix profile upgrade --regex '.*vim.*'
```
# Description
This command upgrades a previously installed package in a Nix profile,

View file

@ -222,6 +222,8 @@ struct ProfileManifest
es[name] = obj;
}
nlohmann::json json;
// Only upgrade with great care as changing it can break fresh installs
// like in https://github.com/NixOS/nix/issues/10109
json["version"] = 3;
json["elements"] = es;
return json;
@ -477,55 +479,151 @@ struct CmdProfileInstall : InstallablesCommand, MixDefaultProfile
}
};
class MixProfileElementMatchers : virtual Args
struct Matcher
{
std::vector<std::string> _matchers;
virtual ~Matcher() { }
virtual std::string getTitle() = 0;
virtual bool matches(const std::string & name, const ProfileElement & element) = 0;
};
struct RegexMatcher final : public Matcher
{
std::regex regex;
std::string pattern;
RegexMatcher(const std::string & pattern) : regex(pattern, std::regex::extended | std::regex::icase), pattern(pattern)
{ }
std::string getTitle() override
{
return fmt("Regex '%s'", pattern);
}
bool matches(const std::string & name, const ProfileElement & element) override
{
return std::regex_match(element.identifier(), regex);
}
};
struct StorePathMatcher final : public Matcher
{
nix::StorePath storePath;
StorePathMatcher(const nix::StorePath & storePath) : storePath(storePath)
{ }
std::string getTitle() override
{
return fmt("Store path '%s'", storePath.to_string());
}
bool matches(const std::string & name, const ProfileElement & element) override
{
return element.storePaths.count(storePath);
}
};
struct NameMatcher final : public Matcher
{
std::string name;
NameMatcher(const std::string & name) : name(name)
{ }
std::string getTitle() override
{
return fmt("Package name '%s'", name);
}
bool matches(const std::string & name, const ProfileElement & element) override
{
return name == this->name;
}
};
struct AllMatcher final : public Matcher
{
std::string getTitle() override
{
return "--all";
}
bool matches(const std::string & name, const ProfileElement & element) override
{
return true;
}
};
AllMatcher all;
class MixProfileElementMatchers : virtual Args, virtual StoreCommand
{
std::vector<ref<Matcher>> _matchers;
public:
MixProfileElementMatchers()
{
expectArgs("elements", &_matchers);
addFlag({
.longName = "all",
.description = "Match all packages in the profile.",
.handler = {[this]() {
_matchers.push_back(ref<AllMatcher>(std::shared_ptr<AllMatcher>(&all, [](AllMatcher*) {})));
}},
});
addFlag({
.longName = "regex",
.description = "A regular expression to match one or more packages in the profile.",
.labels = {"pattern"},
.handler = {[this](std::string arg) {
_matchers.push_back(make_ref<RegexMatcher>(arg));
}},
});
expectArgs({
.label = "elements",
.optional = true,
.handler = {[this](std::vector<std::string> args) {
for (auto & arg : args) {
if (auto n = string2Int<size_t>(arg)) {
throw Error("'nix profile' no longer supports indices ('%d')", *n);
} else if (getStore()->isStorePath(arg)) {
_matchers.push_back(make_ref<StorePathMatcher>(getStore()->parseStorePath(arg)));
} else {
_matchers.push_back(make_ref<NameMatcher>(arg));
}
}
}}
});
}
struct RegexPattern {
std::string pattern;
std::regex reg;
};
typedef std::variant<Path, RegexPattern> Matcher;
std::vector<Matcher> getMatchers(ref<Store> store)
{
std::vector<Matcher> res;
for (auto & s : _matchers) {
if (auto n = string2Int<size_t>(s))
throw Error("'nix profile' no longer supports indices ('%d')", *n);
else if (store->isStorePath(s))
res.push_back(s);
else
res.push_back(RegexPattern{s,std::regex(s, std::regex::extended | std::regex::icase)});
std::set<std::string> getMatchingElementNames(ProfileManifest & manifest) {
if (_matchers.empty()) {
throw UsageError("No packages specified.");
}
return res;
}
if (std::find_if(_matchers.begin(), _matchers.end(), [](const ref<Matcher> & m) { return m.dynamic_pointer_cast<AllMatcher>(); }) != _matchers.end() && _matchers.size() > 1) {
throw UsageError("--all cannot be used with package names or regular expressions.");
}
bool matches(
const Store & store,
const std::string & name,
const ProfileElement & element,
const std::vector<Matcher> & matchers)
{
for (auto & matcher : matchers) {
if (auto path = std::get_if<Path>(&matcher)) {
if (element.storePaths.count(store.parseStorePath(*path))) return true;
} else if (auto regex = std::get_if<RegexPattern>(&matcher)) {
if (std::regex_match(name, regex->reg))
return true;
if (manifest.elements.empty()) {
warn("There are no packages in the profile.");
return {};
}
std::set<std::string> result;
for (auto & matcher : _matchers) {
bool foundMatch = false;
for (auto & [name, element] : manifest.elements) {
if (matcher->matches(name, element)) {
result.insert(name);
foundMatch = true;
}
}
if (!foundMatch) {
warn("%s does not match any packages in the profile.", matcher->getTitle());
}
}
return false;
return result;
}
};
@ -547,16 +645,19 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem
{
ProfileManifest oldManifest(*getEvalState(), *profile);
auto matchers = getMatchers(store);
ProfileManifest newManifest = oldManifest;
ProfileManifest newManifest;
auto matchingElementNames = getMatchingElementNames(oldManifest);
for (auto & [name, element] : oldManifest.elements) {
if (!matches(*store, name, element, matchers)) {
newManifest.elements.insert_or_assign(name, std::move(element));
} else {
notice("removing '%s'", element.identifier());
}
if (matchingElementNames.empty()) {
warn ("No packages to remove. Use 'nix profile list' to see the current profile.");
return;
}
for (auto & name : matchingElementNames) {
auto & element = oldManifest.elements[name];
notice("removing '%s'", element.identifier());
newManifest.elements.erase(name);
}
auto removedCount = oldManifest.elements.size() - newManifest.elements.size();
@ -564,16 +665,6 @@ struct CmdProfileRemove : virtual EvalCommand, MixDefaultProfile, MixProfileElem
removedCount,
newManifest.elements.size());
if (removedCount == 0) {
for (auto matcher: matchers) {
if (const Path * path = std::get_if<Path>(&matcher)) {
warn("'%s' does not match any paths", *path);
} else if (const RegexPattern * regex = std::get_if<RegexPattern>(&matcher)) {
warn("'%s' does not match any packages", regex->pattern);
}
}
warn ("Use 'nix profile list' to see the current profile.");
}
updateProfile(newManifest.build(store));
}
};
@ -596,20 +687,20 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
{
ProfileManifest manifest(*getEvalState(), *profile);
auto matchers = getMatchers(store);
Installables installables;
std::vector<ProfileElement *> elems;
auto matchedCount = 0;
auto upgradedCount = 0;
for (auto & [name, element] : manifest.elements) {
if (!matches(*store, name, element, matchers)) {
continue;
}
auto matchingElementNames = getMatchingElementNames(manifest);
matchedCount++;
if (matchingElementNames.empty()) {
warn("No packages to upgrade. Use 'nix profile list' to see the current profile.");
return;
}
for (auto & name : matchingElementNames) {
auto & element = manifest.elements[name];
if (!element.source) {
warn(
@ -648,7 +739,9 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
assert(infop);
auto & info = *infop;
if (element.source->lockedRef == info.flake.lockedRef) continue;
if (info.flake.lockedRef.input.isLocked()
&& element.source->lockedRef == info.flake.lockedRef)
continue;
printInfo("upgrading '%s' from flake '%s' to '%s'",
element.source->attrPath, element.source->lockedRef, info.flake.lockedRef);
@ -665,18 +758,8 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf
}
if (upgradedCount == 0) {
if (matchedCount == 0) {
for (auto & matcher : matchers) {
if (const Path * path = std::get_if<Path>(&matcher)) {
warn("'%s' does not match any paths", *path);
} else if (const RegexPattern * regex = std::get_if<RegexPattern>(&matcher)) {
warn("'%s' does not match any packages", regex->pattern);
}
}
} else {
warn("Found some packages but none of them could be upgraded.");
}
warn ("Use 'nix profile list' to see the current profile.");
warn("Found some packages but none of them could be upgraded.");
return;
}
auto builtPaths = builtPathsPerInstallable(

View file

@ -15,10 +15,10 @@ R""(
user flake:nixpkgs github:NixOS/nixpkgs/925b70cd964ceaedee26fde9b19cc4c4f081196a
```
and `nix flake info` will say:
and `nix flake metadata` will say:
```console
# nix flake info nixpkgs
# nix flake metadata nixpkgs
Resolved URL: github:NixOS/nixpkgs/925b70cd964ceaedee26fde9b19cc4c4f081196a
Locked URL: github:NixOS/nixpkgs/925b70cd964ceaedee26fde9b19cc4c4f081196a

View file

@ -188,7 +188,9 @@ struct CmdRegistryPin : RegistryCommand, EvalCommand
auto ref = parseFlakeRef(url);
auto lockedRef = parseFlakeRef(locked);
registry->remove(ref.input);
auto [tree, resolved] = lockedRef.resolve(store).input.fetch(store);
auto resolved = lockedRef.resolve(store).input.getAccessor(store).second;
if (!resolved.isLocked())
warn("flake '%s' is not locked", resolved.to_string());
fetchers::Attrs extraAttrs;
if (ref.subdir != "") extraAttrs["dir"] = ref.subdir;
registry->add(ref.input, resolved, extraAttrs);