mirror of
https://github.com/NixOS/nix
synced 2025-07-10 21:23:56 +02:00
Merge remote-tracking branch 'upstream/master' into overlayfs-store
This commit is contained in:
commit
31881d651a
378 changed files with 9552 additions and 4741 deletions
|
@ -137,11 +137,8 @@ static int main_build_remote(int argc, char * * argv)
|
|||
for (auto & m : machines) {
|
||||
debug("considering building on remote machine '%s'", m.storeUri);
|
||||
|
||||
if (m.enabled
|
||||
&& (neededSystem == "builtin"
|
||||
|| std::find(m.systemTypes.begin(),
|
||||
m.systemTypes.end(),
|
||||
neededSystem) != m.systemTypes.end()) &&
|
||||
if (m.enabled &&
|
||||
m.systemSupported(neededSystem) &&
|
||||
m.allSupported(requiredFeatures) &&
|
||||
m.mandatoryMet(requiredFeatures))
|
||||
{
|
||||
|
@ -214,7 +211,7 @@ static int main_build_remote(int argc, char * * argv)
|
|||
|
||||
for (auto & m : machines)
|
||||
error
|
||||
% concatStringsSep<std::vector<std::string>>(", ", m.systemTypes)
|
||||
% concatStringsSep<StringSet>(", ", m.systemTypes)
|
||||
% m.maxJobs
|
||||
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
|
||||
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
|
||||
|
|
|
@ -12,9 +12,9 @@ namespace nix {
|
|||
bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \
|
||||
{ \
|
||||
const MY_TYPE* me = this; \
|
||||
auto fields1 = std::make_tuple<const CHILD_TYPE &, const FIELD_TYPE &>(*me->drvPath, me->FIELD); \
|
||||
auto fields1 = std::tie(*me->drvPath, me->FIELD); \
|
||||
me = &other; \
|
||||
auto fields2 = std::make_tuple<const CHILD_TYPE &, const FIELD_TYPE &>(*me->drvPath, me->FIELD); \
|
||||
auto fields2 = std::tie(*me->drvPath, me->FIELD); \
|
||||
return fields1 COMPARATOR fields2; \
|
||||
}
|
||||
#define CMP(CHILD_TYPE, MY_TYPE, FIELD) \
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#include "editor-for.hh"
|
||||
#include "environment-variables.hh"
|
||||
#include "source-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
///@file
|
||||
|
||||
#include "types.hh"
|
||||
#include "input-accessor.hh"
|
||||
#include "source-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -58,22 +58,22 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths()
|
|||
|
||||
Bindings & autoArgs = *cmd.getAutoArgs(*state);
|
||||
|
||||
DrvInfos drvInfos;
|
||||
getDerivations(*state, *v, "", autoArgs, drvInfos, false);
|
||||
PackageInfos packageInfos;
|
||||
getDerivations(*state, *v, "", autoArgs, packageInfos, false);
|
||||
|
||||
// Backward compatibility hack: group results by drvPath. This
|
||||
// helps keep .all output together.
|
||||
std::map<StorePath, OutputsSpec> byDrvPath;
|
||||
|
||||
for (auto & drvInfo : drvInfos) {
|
||||
auto drvPath = drvInfo.queryDrvPath();
|
||||
for (auto & packageInfo : packageInfos) {
|
||||
auto drvPath = packageInfo.queryDrvPath();
|
||||
if (!drvPath)
|
||||
throw Error("'%s' is not a derivation", what());
|
||||
|
||||
auto newOutputs = std::visit(overloaded {
|
||||
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
|
||||
std::set<std::string> outputsToInstall;
|
||||
for (auto & output : drvInfo.queryOutputs(false, true))
|
||||
for (auto & output : packageInfo.queryOutputs(false, true))
|
||||
outputsToInstall.insert(output.first);
|
||||
return OutputsSpec::Names { std::move(outputsToInstall) };
|
||||
},
|
||||
|
|
|
@ -52,7 +52,7 @@ Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::Locked
|
|||
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
||||
assert(aOutputs);
|
||||
|
||||
state.forceValue(*aOutputs->value, [&]() { return aOutputs->value->determinePos(noPos); });
|
||||
state.forceValue(*aOutputs->value, aOutputs->value->determinePos(noPos));
|
||||
|
||||
return aOutputs->value;
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#include "installable-value.hh"
|
||||
#include "eval-cache.hh"
|
||||
#include "fetch-to-store.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -44,7 +45,7 @@ ref<InstallableValue> InstallableValue::require(ref<Installable> installable)
|
|||
std::optional<DerivedPathWithInfo> InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||
{
|
||||
if (v.type() == nPath) {
|
||||
auto storePath = v.path().fetchToStore(state->store);
|
||||
auto storePath = fetchToStore(*state->store, v.path());
|
||||
return {{
|
||||
.path = DerivedPath::Opaque {
|
||||
.path = std::move(storePath),
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
struct DrvInfo;
|
||||
struct PackageInfo;
|
||||
struct SourceExprCommand;
|
||||
|
||||
namespace eval_cache { class EvalCache; class AttrCursor; }
|
||||
|
|
|
@ -715,7 +715,7 @@ BuiltPaths Installable::toBuiltPaths(
|
|||
}
|
||||
}
|
||||
|
||||
StorePathSet Installable::toStorePaths(
|
||||
StorePathSet Installable::toStorePathSet(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode, OperateOn operateOn,
|
||||
|
@ -729,13 +729,27 @@ StorePathSet Installable::toStorePaths(
|
|||
return outPaths;
|
||||
}
|
||||
|
||||
StorePaths Installable::toStorePaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode, OperateOn operateOn,
|
||||
const Installables & installables)
|
||||
{
|
||||
StorePaths outPaths;
|
||||
for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) {
|
||||
auto thisOutPaths = path.outPaths();
|
||||
outPaths.insert(outPaths.end(), thisOutPaths.begin(), thisOutPaths.end());
|
||||
}
|
||||
return outPaths;
|
||||
}
|
||||
|
||||
StorePath Installable::toStorePath(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode, OperateOn operateOn,
|
||||
ref<Installable> installable)
|
||||
{
|
||||
auto paths = toStorePaths(evalStore, store, mode, operateOn, {installable});
|
||||
auto paths = toStorePathSet(evalStore, store, mode, operateOn, {installable});
|
||||
|
||||
if (paths.size() != 1)
|
||||
throw Error("argument '%s' should evaluate to one store path", installable->what());
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
struct DrvInfo;
|
||||
struct PackageInfo;
|
||||
|
||||
enum class Realise {
|
||||
/**
|
||||
|
@ -165,7 +165,14 @@ struct Installable
|
|||
const Installables & installables,
|
||||
BuildMode bMode = bmNormal);
|
||||
|
||||
static std::set<StorePath> toStorePaths(
|
||||
static std::set<StorePath> toStorePathSet(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
OperateOn operateOn,
|
||||
const Installables & installables);
|
||||
|
||||
static std::vector<StorePath> toStorePaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
|
|
|
@ -8,7 +8,7 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc)
|
|||
|
||||
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers
|
||||
|
||||
libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) -pthread
|
||||
libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) $(THREAD_LDFLAGS)
|
||||
|
||||
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
|
||||
|
||||
|
|
|
@ -4,12 +4,15 @@
|
|||
#include "terminal.hh"
|
||||
|
||||
#include <sys/queue.h>
|
||||
#if HAVE_LOWDOWN
|
||||
#include <lowdown.h>
|
||||
#endif
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::string renderMarkdownToTerminal(std::string_view markdown)
|
||||
{
|
||||
#if HAVE_LOWDOWN
|
||||
int windowWidth = getWindowSize().second;
|
||||
|
||||
struct lowdown_opts opts {
|
||||
|
@ -48,6 +51,9 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
|
|||
throw Error("allocation error while rendering Markdown");
|
||||
|
||||
return filterANSIEscapes(std::string(buf->data, buf->size), !shouldANSI());
|
||||
#else
|
||||
return std::string(markdown);
|
||||
#endif
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
#include <setjmp.h>
|
||||
|
||||
#ifdef READLINE
|
||||
#ifdef USE_READLINE
|
||||
#include <readline/history.h>
|
||||
#include <readline/readline.h>
|
||||
#else
|
||||
|
@ -93,9 +93,17 @@ struct NixRepl
|
|||
void evalString(std::string s, Value & v);
|
||||
void loadDebugTraceEnv(DebugTrace & dt);
|
||||
|
||||
typedef std::set<Value *> ValuesSeen;
|
||||
std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth);
|
||||
std::ostream & printValue(std::ostream & str, Value & v, unsigned int maxDepth, ValuesSeen & seen);
|
||||
void printValue(std::ostream & str,
|
||||
Value & v,
|
||||
unsigned int maxDepth = std::numeric_limits<unsigned int>::max())
|
||||
{
|
||||
::nix::printValue(*state, str, v, PrintOptions {
|
||||
.ansiColors = true,
|
||||
.force = true,
|
||||
.derivationPaths = true,
|
||||
.maxDepth = maxDepth
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
std::string removeWhitespace(std::string s)
|
||||
|
@ -112,7 +120,7 @@ NixRepl::NixRepl(const SearchPath & searchPath, nix::ref<Store> store, ref<EvalS
|
|||
: AbstractNixRepl(state)
|
||||
, debugTraceIndex(0)
|
||||
, getValues(getValues)
|
||||
, staticEnv(new StaticEnv(false, state->staticBaseEnv.get()))
|
||||
, staticEnv(new StaticEnv(nullptr, state->staticBaseEnv.get()))
|
||||
, historyFile(getDataDir() + "/nix/repl-history")
|
||||
{
|
||||
}
|
||||
|
@ -221,7 +229,7 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
|
|||
// prefer direct pos, but if noPos then try the expr.
|
||||
auto pos = dt.pos
|
||||
? dt.pos
|
||||
: static_cast<std::shared_ptr<AbstractPos>>(positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]);
|
||||
: positions[dt.expr.getPos() ? dt.expr.getPos() : noPos];
|
||||
|
||||
if (pos) {
|
||||
out << pos;
|
||||
|
@ -246,17 +254,17 @@ void NixRepl::mainLoop()
|
|||
rl_readline_name = "nix-repl";
|
||||
try {
|
||||
createDirs(dirOf(historyFile));
|
||||
} catch (SysError & e) {
|
||||
} catch (SystemError & e) {
|
||||
logWarning(e.info());
|
||||
}
|
||||
#ifndef READLINE
|
||||
#ifndef USE_READLINE
|
||||
el_hist_size = 1000;
|
||||
#endif
|
||||
read_history(historyFile.c_str());
|
||||
auto oldRepl = curRepl;
|
||||
curRepl = this;
|
||||
Finally restoreRepl([&] { curRepl = oldRepl; });
|
||||
#ifndef READLINE
|
||||
#ifndef USE_READLINE
|
||||
rl_set_complete_func(completionCallback);
|
||||
rl_set_list_possib_func(listPossibleCallback);
|
||||
#endif
|
||||
|
@ -442,10 +450,10 @@ static bool isVarName(std::string_view s)
|
|||
|
||||
|
||||
StorePath NixRepl::getDerivationPath(Value & v) {
|
||||
auto drvInfo = getDerivation(*state, v, false);
|
||||
if (!drvInfo)
|
||||
auto packageInfo = getDerivation(*state, v, false);
|
||||
if (!packageInfo)
|
||||
throw Error("expression does not evaluate to a derivation, so I can't build it");
|
||||
auto drvPath = drvInfo->queryDrvPath();
|
||||
auto drvPath = packageInfo->queryDrvPath();
|
||||
if (!drvPath)
|
||||
throw Error("expression did not evaluate to a valid derivation (no 'drvPath' attribute)");
|
||||
if (!state->store->isValidPath(*drvPath))
|
||||
|
@ -708,7 +716,8 @@ bool NixRepl::processLine(std::string line)
|
|||
else if (command == ":p" || command == ":print") {
|
||||
Value v;
|
||||
evalString(arg, v);
|
||||
printValue(std::cout, v, 1000000000) << std::endl;
|
||||
printValue(std::cout, v);
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
else if (command == ":q" || command == ":quit") {
|
||||
|
@ -770,7 +779,8 @@ bool NixRepl::processLine(std::string line)
|
|||
} else {
|
||||
Value v;
|
||||
evalString(line, v);
|
||||
printValue(std::cout, v, 1) << std::endl;
|
||||
printValue(std::cout, v, 1);
|
||||
std::cout << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -888,145 +898,7 @@ void NixRepl::evalString(std::string s, Value & v)
|
|||
{
|
||||
Expr * e = parseString(s);
|
||||
e->eval(*state, *env, v);
|
||||
state->forceValue(v, [&]() { return v.determinePos(noPos); });
|
||||
}
|
||||
|
||||
|
||||
std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int maxDepth)
|
||||
{
|
||||
ValuesSeen seen;
|
||||
return printValue(str, v, maxDepth, seen);
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// FIXME: lot of cut&paste from Nix's eval.cc.
|
||||
std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int maxDepth, ValuesSeen & seen)
|
||||
{
|
||||
str.flush();
|
||||
checkInterrupt();
|
||||
|
||||
state->forceValue(v, [&]() { return v.determinePos(noPos); });
|
||||
|
||||
switch (v.type()) {
|
||||
|
||||
case nInt:
|
||||
str << ANSI_CYAN << v.integer << ANSI_NORMAL;
|
||||
break;
|
||||
|
||||
case nBool:
|
||||
str << ANSI_CYAN;
|
||||
printLiteralBool(str, v.boolean);
|
||||
str << ANSI_NORMAL;
|
||||
break;
|
||||
|
||||
case nString:
|
||||
str << ANSI_WARNING;
|
||||
printLiteralString(str, v.string_view());
|
||||
str << ANSI_NORMAL;
|
||||
break;
|
||||
|
||||
case nPath:
|
||||
str << ANSI_GREEN << v.path().to_string() << ANSI_NORMAL; // !!! escaping?
|
||||
break;
|
||||
|
||||
case nNull:
|
||||
str << ANSI_CYAN "null" ANSI_NORMAL;
|
||||
break;
|
||||
|
||||
case nAttrs: {
|
||||
seen.insert(&v);
|
||||
|
||||
bool isDrv = state->isDerivation(v);
|
||||
|
||||
if (isDrv) {
|
||||
str << "«derivation ";
|
||||
Bindings::iterator i = v.attrs->find(state->sDrvPath);
|
||||
NixStringContext context;
|
||||
if (i != v.attrs->end())
|
||||
str << state->store->printStorePath(state->coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"));
|
||||
else
|
||||
str << "???";
|
||||
str << "»";
|
||||
}
|
||||
|
||||
else if (maxDepth > 0) {
|
||||
str << "{ ";
|
||||
|
||||
typedef std::map<std::string, Value *> Sorted;
|
||||
Sorted sorted;
|
||||
for (auto & i : *v.attrs)
|
||||
sorted.emplace(state->symbols[i.name], i.value);
|
||||
|
||||
for (auto & i : sorted) {
|
||||
printAttributeName(str, i.first);
|
||||
str << " = ";
|
||||
if (seen.count(i.second))
|
||||
str << "«repeated»";
|
||||
else
|
||||
try {
|
||||
printValue(str, *i.second, maxDepth - 1, seen);
|
||||
} catch (AssertionError & e) {
|
||||
str << ANSI_RED "«error: " << e.msg() << "»" ANSI_NORMAL;
|
||||
}
|
||||
str << "; ";
|
||||
}
|
||||
|
||||
str << "}";
|
||||
} else
|
||||
str << "{ ... }";
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case nList:
|
||||
seen.insert(&v);
|
||||
|
||||
str << "[ ";
|
||||
if (maxDepth > 0)
|
||||
for (auto elem : v.listItems()) {
|
||||
if (seen.count(elem))
|
||||
str << "«repeated»";
|
||||
else
|
||||
try {
|
||||
printValue(str, *elem, maxDepth - 1, seen);
|
||||
} catch (AssertionError & e) {
|
||||
str << ANSI_RED "«error: " << e.msg() << "»" ANSI_NORMAL;
|
||||
}
|
||||
str << " ";
|
||||
}
|
||||
else
|
||||
str << "... ";
|
||||
str << "]";
|
||||
break;
|
||||
|
||||
case nFunction:
|
||||
if (v.isLambda()) {
|
||||
std::ostringstream s;
|
||||
s << state->positions[v.lambda.fun->pos];
|
||||
str << ANSI_BLUE "«lambda @ " << filterANSIEscapes(s.str()) << "»" ANSI_NORMAL;
|
||||
} else if (v.isPrimOp()) {
|
||||
str << ANSI_MAGENTA "«primop»" ANSI_NORMAL;
|
||||
} else if (v.isPrimOpApp()) {
|
||||
str << ANSI_BLUE "«primop-app»" ANSI_NORMAL;
|
||||
} else {
|
||||
abort();
|
||||
}
|
||||
break;
|
||||
|
||||
case nFloat:
|
||||
str << v.fpoint;
|
||||
break;
|
||||
|
||||
case nThunk:
|
||||
case nExternal:
|
||||
default:
|
||||
str << ANSI_RED "«unknown»" ANSI_NORMAL;
|
||||
break;
|
||||
}
|
||||
|
||||
return str;
|
||||
state->forceValue(v, v.determinePos(noPos));
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "print.hh"
|
||||
#include "eval.hh"
|
||||
|
||||
namespace nix {
|
||||
|
@ -73,8 +74,6 @@ Env & EvalState::allocEnv(size_t size)
|
|||
#endif
|
||||
env = (Env *) allocBytes(sizeof(Env) + size * sizeof(Value *));
|
||||
|
||||
env->type = Env::Plain;
|
||||
|
||||
/* We assume that env->values has been cleared by the allocator; maybeThunk() and lookupVar fromWith expect this. */
|
||||
|
||||
return *env;
|
||||
|
@ -83,13 +82,6 @@ Env & EvalState::allocEnv(size_t size)
|
|||
|
||||
[[gnu::always_inline]]
|
||||
void EvalState::forceValue(Value & v, const PosIdx pos)
|
||||
{
|
||||
forceValue(v, [&]() { return pos; });
|
||||
}
|
||||
|
||||
|
||||
template<typename Callable>
|
||||
void EvalState::forceValue(Value & v, Callable getPos)
|
||||
{
|
||||
if (v.isThunk()) {
|
||||
Env * env = v.thunk.env;
|
||||
|
@ -100,15 +92,12 @@ void EvalState::forceValue(Value & v, Callable getPos)
|
|||
expr->eval(*this, *env, v);
|
||||
} catch (...) {
|
||||
v.mkThunk(env, expr);
|
||||
tryFixupBlackHolePos(v, pos);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
else if (v.isApp()) {
|
||||
PosIdx pos = getPos();
|
||||
else if (v.isApp())
|
||||
callFunction(*v.app.left, *v.app.right, v, pos);
|
||||
}
|
||||
else if (v.isBlackhole())
|
||||
error("infinite recursion encountered").atPos(getPos()).template debugThrow<EvalError>();
|
||||
}
|
||||
|
||||
|
||||
|
@ -126,7 +115,10 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e
|
|||
PosIdx pos = getPos();
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nAttrs) {
|
||||
error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||
error("expected a set but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -136,7 +128,10 @@ inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view e
|
|||
{
|
||||
forceValue(v, pos);
|
||||
if (!v.isList()) {
|
||||
error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||
error("expected a list but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -89,6 +89,12 @@ std::string EvalSettings::resolvePseudoUrl(std::string_view url)
|
|||
return std::string(url);
|
||||
}
|
||||
|
||||
const std::string & EvalSettings::getCurrentSystem()
|
||||
{
|
||||
const auto & evalSystem = currentSystem.get();
|
||||
return evalSystem != "" ? evalSystem : settings.thisSystem.get();
|
||||
}
|
||||
|
||||
EvalSettings evalSettings;
|
||||
|
||||
static GlobalConfig::Register rEvalSettings(&evalSettings);
|
||||
|
|
|
@ -27,6 +27,26 @@ struct EvalSettings : Config
|
|||
[`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath).
|
||||
)"};
|
||||
|
||||
Setting<std::string> currentSystem{
|
||||
this, "", "eval-system",
|
||||
R"(
|
||||
This option defines
|
||||
[`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem)
|
||||
in the Nix language if it is set as a non-empty string.
|
||||
Otherwise, if it is defined as the empty string (the default), the value of the
|
||||
[`system` ](#conf-system)
|
||||
configuration setting is used instead.
|
||||
|
||||
Unlike `system`, this setting does not change what kind of derivations can be built locally.
|
||||
This is useful for evaluating Nix code on one system to produce derivations to be built on another type of system.
|
||||
)"};
|
||||
|
||||
/**
|
||||
* Implements the `eval-system` vs `system` defaulting logic
|
||||
* described for `eval-system`.
|
||||
*/
|
||||
const std::string & getCurrentSystem();
|
||||
|
||||
Setting<bool> restrictEval{
|
||||
this, false, "restrict-eval",
|
||||
R"(
|
||||
|
@ -68,6 +88,11 @@ struct EvalSettings : Config
|
|||
evaluation mode. For example, when set to
|
||||
`https://github.com/NixOS`, builtin functions such as `fetchGit` are
|
||||
allowed to access `https://github.com/NixOS/patchelf.git`.
|
||||
|
||||
Access is granted when
|
||||
- the URI is equal to the prefix,
|
||||
- or the URI is a subpath of the prefix,
|
||||
- or the prefix is a URI scheme ended by a colon `:` and the URI has the same scheme.
|
||||
)"};
|
||||
|
||||
Setting<bool> traceFunctionCalls{this, false, "trace-function-calls",
|
||||
|
@ -99,6 +124,9 @@ struct EvalSettings : Config
|
|||
|
||||
Setting<bool> traceVerbose{this, false, "trace-verbose",
|
||||
"Whether `builtins.traceVerbose` should trace its first argument when evaluated."};
|
||||
|
||||
Setting<unsigned int> maxCallDepth{this, 10000, "max-call-depth",
|
||||
"The maximum function call depth to allow before erroring."};
|
||||
};
|
||||
|
||||
extern EvalSettings evalSettings;
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "eval-settings.hh"
|
||||
#include "hash.hh"
|
||||
#include "primops.hh"
|
||||
#include "print-options.hh"
|
||||
#include "types.hh"
|
||||
#include "util.hh"
|
||||
#include "store-api.hh"
|
||||
|
@ -18,6 +19,11 @@
|
|||
#include "memory-input-accessor.hh"
|
||||
#include "signals.hh"
|
||||
#include "gc-small-vector.hh"
|
||||
#include "url.hh"
|
||||
#include "fetch-to-store.hh"
|
||||
#include "tarball.hh"
|
||||
#include "flake/flakeref.hh"
|
||||
#include "parser-tab.hh"
|
||||
|
||||
#include <algorithm>
|
||||
#include <chrono>
|
||||
|
@ -27,9 +33,9 @@
|
|||
#include <unistd.h>
|
||||
#include <sys/time.h>
|
||||
#include <sys/resource.h>
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <functional>
|
||||
#include <iostream>
|
||||
|
||||
#include <sys/resource.h>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
@ -103,116 +109,23 @@ RootValue allocRootValue(Value * v)
|
|||
#endif
|
||||
}
|
||||
|
||||
void Value::print(const SymbolTable &symbols, std::ostream &str,
|
||||
std::set<const void *> *seen, int depth) const
|
||||
|
||||
{
|
||||
checkInterrupt();
|
||||
|
||||
if (depth <= 0) {
|
||||
str << "«too deep»";
|
||||
return;
|
||||
}
|
||||
switch (internalType) {
|
||||
case tInt:
|
||||
str << integer;
|
||||
break;
|
||||
case tBool:
|
||||
printLiteralBool(str, boolean);
|
||||
break;
|
||||
case tString:
|
||||
printLiteralString(str, string_view());
|
||||
break;
|
||||
case tPath:
|
||||
str << path().to_string(); // !!! escaping?
|
||||
break;
|
||||
case tNull:
|
||||
str << "null";
|
||||
break;
|
||||
case tAttrs: {
|
||||
if (seen && !attrs->empty() && !seen->insert(attrs).second)
|
||||
str << "«repeated»";
|
||||
else {
|
||||
str << "{ ";
|
||||
for (auto & i : attrs->lexicographicOrder(symbols)) {
|
||||
str << symbols[i->name] << " = ";
|
||||
i->value->print(symbols, str, seen, depth - 1);
|
||||
str << "; ";
|
||||
}
|
||||
str << "}";
|
||||
}
|
||||
break;
|
||||
}
|
||||
case tList1:
|
||||
case tList2:
|
||||
case tListN:
|
||||
if (seen && listSize() && !seen->insert(listElems()).second)
|
||||
str << "«repeated»";
|
||||
else {
|
||||
str << "[ ";
|
||||
for (auto v2 : listItems()) {
|
||||
if (v2)
|
||||
v2->print(symbols, str, seen, depth - 1);
|
||||
else
|
||||
str << "(nullptr)";
|
||||
str << " ";
|
||||
}
|
||||
str << "]";
|
||||
}
|
||||
break;
|
||||
case tThunk:
|
||||
case tApp:
|
||||
str << "<CODE>";
|
||||
break;
|
||||
case tLambda:
|
||||
str << "<LAMBDA>";
|
||||
break;
|
||||
case tPrimOp:
|
||||
str << "<PRIMOP>";
|
||||
break;
|
||||
case tPrimOpApp:
|
||||
str << "<PRIMOP-APP>";
|
||||
break;
|
||||
case tExternal:
|
||||
str << *external;
|
||||
break;
|
||||
case tFloat:
|
||||
str << fpoint;
|
||||
break;
|
||||
case tBlackhole:
|
||||
// Although we know for sure that it's going to be an infinite recursion
|
||||
// when this value is accessed _in the current context_, it's likely
|
||||
// that the user will misinterpret a simpler «infinite recursion» output
|
||||
// as a definitive statement about the value, while in fact it may be
|
||||
// a valid value after `builtins.trace` and perhaps some other steps
|
||||
// have completed.
|
||||
str << "«potential infinite recursion»";
|
||||
break;
|
||||
default:
|
||||
printError("Nix evaluator internal error: Value::print(): invalid value type %1%", internalType);
|
||||
abort();
|
||||
}
|
||||
}
|
||||
|
||||
void Value::print(const SymbolTable &symbols, std::ostream &str,
|
||||
bool showRepeated, int depth) const {
|
||||
std::set<const void *> seen;
|
||||
print(symbols, str, showRepeated ? nullptr : &seen, depth);
|
||||
}
|
||||
|
||||
// Pretty print types for assertion errors
|
||||
std::ostream & operator << (std::ostream & os, const ValueType t) {
|
||||
os << showType(t);
|
||||
return os;
|
||||
}
|
||||
|
||||
std::string printValue(const EvalState & state, const Value & v)
|
||||
std::string printValue(EvalState & state, Value & v)
|
||||
{
|
||||
std::ostringstream out;
|
||||
v.print(state.symbols, out);
|
||||
v.print(state, out);
|
||||
return out.str();
|
||||
}
|
||||
|
||||
void Value::print(EvalState & state, std::ostream & str, PrintOptions options)
|
||||
{
|
||||
printValue(state, str, *this, options);
|
||||
}
|
||||
|
||||
const Value * getPrimOp(const Value &v) {
|
||||
const Value * primOp = &v;
|
||||
|
@ -255,9 +168,8 @@ std::string showType(const Value & v)
|
|||
case tPrimOpApp:
|
||||
return fmt("the partially applied built-in function '%s'", std::string(getPrimOp(v)->primOp->name));
|
||||
case tExternal: return v.external->showType();
|
||||
case tThunk: return "a thunk";
|
||||
case tThunk: return v.isBlackhole() ? "a black hole" : "a thunk";
|
||||
case tApp: return "a function application";
|
||||
case tBlackhole: return "a black hole";
|
||||
default:
|
||||
return std::string(showType(v.type()));
|
||||
}
|
||||
|
@ -508,6 +420,16 @@ EvalState::EvalState(
|
|||
, sPath(symbols.create("path"))
|
||||
, sPrefix(symbols.create("prefix"))
|
||||
, sOutputSpecified(symbols.create("outputSpecified"))
|
||||
, exprSymbols{
|
||||
.sub = symbols.create("__sub"),
|
||||
.lessThan = symbols.create("__lessThan"),
|
||||
.mul = symbols.create("__mul"),
|
||||
.div = symbols.create("__div"),
|
||||
.or_ = symbols.create("or"),
|
||||
.findFile = symbols.create("__findFile"),
|
||||
.nixPath = symbols.create("__nixPath"),
|
||||
.body = symbols.create("body"),
|
||||
}
|
||||
, repair(NoRepair)
|
||||
, emptyBindings(0)
|
||||
, rootFS(
|
||||
|
@ -542,7 +464,7 @@ EvalState::EvalState(
|
|||
, env1AllocCache(std::allocate_shared<void *>(traceable_allocator<void *>(), nullptr))
|
||||
#endif
|
||||
, baseEnv(allocEnv(128))
|
||||
, staticBaseEnv{std::make_shared<StaticEnv>(false, nullptr)}
|
||||
, staticBaseEnv{std::make_shared<StaticEnv>(nullptr, nullptr)}
|
||||
{
|
||||
corepkgsFS->setPathDisplay("<nix", ">");
|
||||
internalFS->setPathDisplay("«nix-internal»", "");
|
||||
|
@ -553,6 +475,8 @@ EvalState::EvalState(
|
|||
|
||||
static_assert(sizeof(Env) <= 16, "environment must be <= 16 bytes");
|
||||
|
||||
vEmptyList.mkList(0);
|
||||
|
||||
/* Initialise the Nix expression search path. */
|
||||
if (!evalSettings.pureEval) {
|
||||
for (auto & i : _searchPath.elements)
|
||||
|
@ -599,21 +523,45 @@ void EvalState::allowAndSetStorePathString(const StorePath & storePath, Value &
|
|||
mkStorePathString(storePath, v);
|
||||
}
|
||||
|
||||
inline static bool isJustSchemePrefix(std::string_view prefix)
|
||||
{
|
||||
return
|
||||
!prefix.empty()
|
||||
&& prefix[prefix.size() - 1] == ':'
|
||||
&& isValidSchemeName(prefix.substr(0, prefix.size() - 1));
|
||||
}
|
||||
|
||||
bool isAllowedURI(std::string_view uri, const Strings & allowedUris)
|
||||
{
|
||||
/* 'uri' should be equal to a prefix, or in a subdirectory of a
|
||||
prefix. Thus, the prefix https://github.co does not permit
|
||||
access to https://github.com. */
|
||||
for (auto & prefix : allowedUris) {
|
||||
if (uri == prefix
|
||||
// Allow access to subdirectories of the prefix.
|
||||
|| (uri.size() > prefix.size()
|
||||
&& prefix.size() > 0
|
||||
&& hasPrefix(uri, prefix)
|
||||
&& (
|
||||
// Allow access to subdirectories of the prefix.
|
||||
prefix[prefix.size() - 1] == '/'
|
||||
|| uri[prefix.size()] == '/'
|
||||
|
||||
// Allow access to whole schemes
|
||||
|| isJustSchemePrefix(prefix)
|
||||
)
|
||||
))
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void EvalState::checkURI(const std::string & uri)
|
||||
{
|
||||
if (!evalSettings.restrictEval) return;
|
||||
|
||||
/* 'uri' should be equal to a prefix, or in a subdirectory of a
|
||||
prefix. Thus, the prefix https://github.co does not permit
|
||||
access to https://github.com. Note: this allows 'http://' and
|
||||
'https://' as prefixes for any http/https URI. */
|
||||
for (auto & prefix : evalSettings.allowedUris.get())
|
||||
if (uri == prefix ||
|
||||
(uri.size() > prefix.size()
|
||||
&& prefix.size() > 0
|
||||
&& hasPrefix(uri, prefix)
|
||||
&& (prefix[prefix.size() - 1] == '/' || uri[prefix.size()] == '/')))
|
||||
return;
|
||||
if (isAllowedURI(uri, evalSettings.allowedUris.get())) return;
|
||||
|
||||
/* If the URI is a path, then check it against allowedPaths as
|
||||
well. */
|
||||
|
@ -682,6 +630,26 @@ void PrimOp::check()
|
|||
}
|
||||
|
||||
|
||||
std::ostream & operator<<(std::ostream & output, PrimOp & primOp)
|
||||
{
|
||||
output << "primop " << primOp.name;
|
||||
return output;
|
||||
}
|
||||
|
||||
|
||||
PrimOp * Value::primOpAppPrimOp() const
|
||||
{
|
||||
Value * left = primOpApp.left;
|
||||
while (left && !left->isPrimOp()) {
|
||||
left = left->primOpApp.left;
|
||||
}
|
||||
|
||||
if (!left)
|
||||
return nullptr;
|
||||
return left->primOp;
|
||||
}
|
||||
|
||||
|
||||
void Value::mkPrimOp(PrimOp * p)
|
||||
{
|
||||
p->check();
|
||||
|
@ -756,7 +724,7 @@ void printStaticEnvBindings(const SymbolTable & st, const StaticEnv & se)
|
|||
// just for the current level of Env, not the whole chain.
|
||||
void printWithBindings(const SymbolTable & st, const Env & env)
|
||||
{
|
||||
if (env.type == Env::HasWithAttrs) {
|
||||
if (!env.values[0]->isThunk()) {
|
||||
std::cout << "with: ";
|
||||
std::cout << ANSI_MAGENTA;
|
||||
Bindings::iterator j = env.values[0]->attrs->begin();
|
||||
|
@ -810,7 +778,7 @@ void mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const En
|
|||
if (env.up && se.up) {
|
||||
mapStaticEnvBindings(st, *se.up, *env.up, vm);
|
||||
|
||||
if (env.type == Env::HasWithAttrs) {
|
||||
if (!env.values[0]->isThunk()) {
|
||||
// add 'with' bindings.
|
||||
Bindings::iterator j = env.values[0]->attrs->begin();
|
||||
while (j != env.values[0]->attrs->end()) {
|
||||
|
@ -843,7 +811,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
|
|||
? std::make_unique<DebugTraceStacker>(
|
||||
*this,
|
||||
DebugTrace {
|
||||
.pos = error->info().errPos ? error->info().errPos : static_cast<std::shared_ptr<AbstractPos>>(positions[expr.getPos()]),
|
||||
.pos = error->info().errPos ? error->info().errPos : positions[expr.getPos()],
|
||||
.expr = expr,
|
||||
.env = env,
|
||||
.hint = error->info().msg,
|
||||
|
@ -882,7 +850,7 @@ static std::unique_ptr<DebugTraceStacker> makeDebugTraceStacker(
|
|||
EvalState & state,
|
||||
Expr & expr,
|
||||
Env & env,
|
||||
std::shared_ptr<AbstractPos> && pos,
|
||||
std::shared_ptr<Pos> && pos,
|
||||
const char * s,
|
||||
const std::string & s2)
|
||||
{
|
||||
|
@ -948,22 +916,23 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval)
|
|||
|
||||
if (!var.fromWith) return env->values[var.displ];
|
||||
|
||||
// This early exit defeats the `maybeThunk` optimization for variables from `with`,
|
||||
// The added complexity of handling this appears to be similarly in cost, or
|
||||
// the cases where applicable were insignificant in the first place.
|
||||
if (noEval) return nullptr;
|
||||
|
||||
auto * fromWith = var.fromWith;
|
||||
while (1) {
|
||||
if (env->type == Env::HasWithExpr) {
|
||||
if (noEval) return 0;
|
||||
Value * v = allocValue();
|
||||
evalAttrs(*env->up, (Expr *) env->values[0], *v, noPos, "<borked>");
|
||||
env->values[0] = v;
|
||||
env->type = Env::HasWithAttrs;
|
||||
}
|
||||
forceAttrs(*env->values[0], fromWith->pos, "while evaluating the first subexpression of a with expression");
|
||||
Bindings::iterator j = env->values[0]->attrs->find(var.name);
|
||||
if (j != env->values[0]->attrs->end()) {
|
||||
if (countCalls) attrSelects[j->pos]++;
|
||||
return j->value;
|
||||
}
|
||||
if (!env->prevWith)
|
||||
if (!fromWith->parentWith)
|
||||
error("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow<UndefinedVarError>();
|
||||
for (size_t l = env->prevWith; l; --l, env = env->up) ;
|
||||
for (size_t l = fromWith->prevWith; l; --l, env = env->up) ;
|
||||
fromWith = fromWith->parentWith;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1159,7 +1128,7 @@ void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial)
|
|||
*this,
|
||||
*e,
|
||||
this->baseEnv,
|
||||
e->getPos() ? static_cast<std::shared_ptr<AbstractPos>>(positions[e->getPos()]) : nullptr,
|
||||
e->getPos() ? std::make_shared<Pos>(positions[e->getPos()]) : nullptr,
|
||||
"while evaluating the file '%1%':", resolvedPath.to_string())
|
||||
: nullptr;
|
||||
|
||||
|
@ -1198,7 +1167,10 @@ inline bool EvalState::evalBool(Env & env, Expr * e, const PosIdx pos, std::stri
|
|||
Value v;
|
||||
e->eval(*this, env, v);
|
||||
if (v.type() != nBool)
|
||||
error("value is %1% while a Boolean was expected", showType(v)).withFrame(env, *e).debugThrow<TypeError>();
|
||||
error("expected a Boolean but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.withFrame(env, *e).debugThrow<TypeError>();
|
||||
return v.boolean;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
|
@ -1212,7 +1184,10 @@ inline void EvalState::evalAttrs(Env & env, Expr * e, Value & v, const PosIdx po
|
|||
try {
|
||||
e->eval(*this, env, v);
|
||||
if (v.type() != nAttrs)
|
||||
error("value is %1% while a set was expected", showType(v)).withFrame(env, *e).debugThrow<TypeError>();
|
||||
error("expected a set but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.withFrame(env, *e).debugThrow<TypeError>();
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
throw;
|
||||
|
@ -1359,6 +1334,15 @@ void ExprList::eval(EvalState & state, Env & env, Value & v)
|
|||
}
|
||||
|
||||
|
||||
Value * ExprList::maybeThunk(EvalState & state, Env & env)
|
||||
{
|
||||
if (elems.empty()) {
|
||||
return &state.vEmptyList;
|
||||
}
|
||||
return Expr::maybeThunk(state, env);
|
||||
}
|
||||
|
||||
|
||||
void ExprVar::eval(EvalState & state, Env & env, Value & v)
|
||||
{
|
||||
Value * v2 = state.lookupVar(&env, *this, false);
|
||||
|
@ -1480,9 +1464,27 @@ void ExprLambda::eval(EvalState & state, Env & env, Value & v)
|
|||
v.mkLambda(&env, this);
|
||||
}
|
||||
|
||||
namespace {
|
||||
/** Increments a count on construction and decrements on destruction.
|
||||
*/
|
||||
class CallDepth {
|
||||
size_t & count;
|
||||
public:
|
||||
CallDepth(size_t & count) : count(count) {
|
||||
++count;
|
||||
}
|
||||
~CallDepth() {
|
||||
--count;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos)
|
||||
{
|
||||
if (callDepth > evalSettings.maxCallDepth)
|
||||
error("stack overflow; max-call-depth exceeded").atPos(pos).template debugThrow<EvalError>();
|
||||
CallDepth _level(callDepth);
|
||||
|
||||
auto trace = evalSettings.traceFunctionCalls
|
||||
? std::make_unique<FunctionCallTrace>(positions[pos])
|
||||
: nullptr;
|
||||
|
@ -1621,15 +1623,15 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
|||
return;
|
||||
} else {
|
||||
/* We have all the arguments, so call the primop. */
|
||||
auto name = vCur.primOp->name;
|
||||
auto * fn = vCur.primOp;
|
||||
|
||||
nrPrimOpCalls++;
|
||||
if (countCalls) primOpCalls[name]++;
|
||||
if (countCalls) primOpCalls[fn->name]++;
|
||||
|
||||
try {
|
||||
vCur.primOp->fun(*this, vCur.determinePos(noPos), args, vCur);
|
||||
fn->fun(*this, vCur.determinePos(noPos), args, vCur);
|
||||
} catch (Error & e) {
|
||||
addErrorTrace(e, pos, "while calling the '%1%' builtin", name);
|
||||
addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name);
|
||||
throw;
|
||||
}
|
||||
|
||||
|
@ -1666,18 +1668,18 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
|||
for (size_t i = 0; i < argsLeft; ++i)
|
||||
vArgs[argsDone + i] = args[i];
|
||||
|
||||
auto name = primOp->primOp->name;
|
||||
auto fn = primOp->primOp;
|
||||
nrPrimOpCalls++;
|
||||
if (countCalls) primOpCalls[name]++;
|
||||
if (countCalls) primOpCalls[fn->name]++;
|
||||
|
||||
try {
|
||||
// TODO:
|
||||
// 1. Unify this and above code. Heavily redundant.
|
||||
// 2. Create a fake env (arg1, arg2, etc.) and a fake expr (arg1: arg2: etc: builtins.name arg1 arg2 etc)
|
||||
// so the debugger allows to inspect the wrong parameters passed to the builtin.
|
||||
primOp->primOp->fun(*this, vCur.determinePos(noPos), vArgs, vCur);
|
||||
fn->fun(*this, vCur.determinePos(noPos), vArgs, vCur);
|
||||
} catch (Error & e) {
|
||||
addErrorTrace(e, pos, "while calling the '%1%' builtin", name);
|
||||
addErrorTrace(e, pos, "while calling the '%1%' builtin", fn->name);
|
||||
throw;
|
||||
}
|
||||
|
||||
|
@ -1703,7 +1705,11 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
|||
}
|
||||
|
||||
else
|
||||
error("attempt to call something which is not a function but %1%", showType(vCur)).atPos(pos).debugThrow<TypeError>();
|
||||
error("attempt to call something which is not a function but %1%: %2%",
|
||||
showType(vCur),
|
||||
ValuePrinter(*this, vCur, errorPrintOptions))
|
||||
.atPos(pos)
|
||||
.debugThrow<TypeError>();
|
||||
}
|
||||
|
||||
vRes = vCur;
|
||||
|
@ -1791,9 +1797,7 @@ void ExprWith::eval(EvalState & state, Env & env, Value & v)
|
|||
{
|
||||
Env & env2(state.allocEnv(1));
|
||||
env2.up = &env;
|
||||
env2.prevWith = prevWith;
|
||||
env2.type = Env::HasWithExpr;
|
||||
env2.values[0] = (Value *) attrs;
|
||||
env2.values[0] = attrs->maybeThunk(state, env);
|
||||
|
||||
body->eval(state, env2, v);
|
||||
}
|
||||
|
@ -2031,6 +2035,29 @@ void ExprPos::eval(EvalState & state, Env & env, Value & v)
|
|||
}
|
||||
|
||||
|
||||
void ExprBlackHole::eval(EvalState & state, Env & env, Value & v)
|
||||
{
|
||||
state.error("infinite recursion encountered")
|
||||
.debugThrow<InfiniteRecursionError>();
|
||||
}
|
||||
|
||||
// always force this to be separate, otherwise forceValue may inline it and take
|
||||
// a massive perf hit
|
||||
[[gnu::noinline]]
|
||||
void EvalState::tryFixupBlackHolePos(Value & v, PosIdx pos)
|
||||
{
|
||||
if (!v.isBlackhole())
|
||||
return;
|
||||
auto e = std::current_exception();
|
||||
try {
|
||||
std::rethrow_exception(e);
|
||||
} catch (InfiniteRecursionError & e) {
|
||||
e.err.errPos = positions[pos];
|
||||
} catch (...) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void EvalState::forceValueDeep(Value & v)
|
||||
{
|
||||
std::set<const Value *> seen;
|
||||
|
@ -2040,7 +2067,7 @@ void EvalState::forceValueDeep(Value & v)
|
|||
recurse = [&](Value & v) {
|
||||
if (!seen.insert(&v).second) return;
|
||||
|
||||
forceValue(v, [&]() { return v.determinePos(noPos); });
|
||||
forceValue(v, v.determinePos(noPos));
|
||||
|
||||
if (v.type() == nAttrs) {
|
||||
for (auto & i : *v.attrs)
|
||||
|
@ -2073,7 +2100,10 @@ NixInt EvalState::forceInt(Value & v, const PosIdx pos, std::string_view errorCt
|
|||
try {
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nInt)
|
||||
error("value is %1% while an integer was expected", showType(v)).debugThrow<TypeError>();
|
||||
error("expected an integer but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.debugThrow<TypeError>();
|
||||
return v.integer;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
|
@ -2089,7 +2119,10 @@ NixFloat EvalState::forceFloat(Value & v, const PosIdx pos, std::string_view err
|
|||
if (v.type() == nInt)
|
||||
return v.integer;
|
||||
else if (v.type() != nFloat)
|
||||
error("value is %1% while a float was expected", showType(v)).debugThrow<TypeError>();
|
||||
error("expected a float but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.debugThrow<TypeError>();
|
||||
return v.fpoint;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
|
@ -2103,7 +2136,10 @@ bool EvalState::forceBool(Value & v, const PosIdx pos, std::string_view errorCtx
|
|||
try {
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nBool)
|
||||
error("value is %1% while a Boolean was expected", showType(v)).debugThrow<TypeError>();
|
||||
error("expected a Boolean but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.debugThrow<TypeError>();
|
||||
return v.boolean;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
|
@ -2123,7 +2159,10 @@ void EvalState::forceFunction(Value & v, const PosIdx pos, std::string_view erro
|
|||
try {
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nFunction && !isFunctor(v))
|
||||
error("value is %1% while a function was expected", showType(v)).debugThrow<TypeError>();
|
||||
error("expected a function but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.debugThrow<TypeError>();
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
throw;
|
||||
|
@ -2136,7 +2175,10 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string
|
|||
try {
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nString)
|
||||
error("value is %1% while a string was expected", showType(v)).debugThrow<TypeError>();
|
||||
error("expected a string but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.debugThrow<TypeError>();
|
||||
return v.string_view();
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
|
@ -2230,7 +2272,9 @@ BackedStringView EvalState::coerceToString(
|
|||
return std::move(*maybeString);
|
||||
auto i = v.attrs->find(sOutPath);
|
||||
if (i == v.attrs->end()) {
|
||||
error("cannot coerce %1% to a string", showType(v))
|
||||
error("cannot coerce %1% to a string: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.withTrace(pos, errorCtx)
|
||||
.debugThrow<TypeError>();
|
||||
}
|
||||
|
@ -2276,7 +2320,9 @@ BackedStringView EvalState::coerceToString(
|
|||
}
|
||||
}
|
||||
|
||||
error("cannot coerce %1% to a string", showType(v))
|
||||
error("cannot coerce %1% to a string: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions))
|
||||
.withTrace(pos, errorCtx)
|
||||
.debugThrow<TypeError>();
|
||||
}
|
||||
|
@ -2292,7 +2338,7 @@ StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePat
|
|||
auto dstPath = i != srcToStore.end()
|
||||
? i->second
|
||||
: [&]() {
|
||||
auto dstPath = path.fetchToStore(store, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
|
||||
auto dstPath = fetchToStore(*store, path.resolveSymlinks(), path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
|
||||
allowPath(dstPath);
|
||||
srcToStore.insert_or_assign(path, dstPath);
|
||||
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
|
||||
|
@ -2432,7 +2478,7 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v
|
|||
return v1.boolean == v2.boolean;
|
||||
|
||||
case nString:
|
||||
return v1.string_view().compare(v2.string_view()) == 0;
|
||||
return strcmp(v1.c_str(), v2.c_str()) == 0;
|
||||
|
||||
case nPath:
|
||||
return
|
||||
|
@ -2633,10 +2679,187 @@ void EvalState::printStatistics()
|
|||
}
|
||||
|
||||
|
||||
SourcePath resolveExprPath(SourcePath path)
|
||||
{
|
||||
unsigned int followCount = 0, maxFollow = 1024;
|
||||
|
||||
/* If `path' is a symlink, follow it. This is so that relative
|
||||
path references work. */
|
||||
while (!path.path.isRoot()) {
|
||||
// Basic cycle/depth limit to avoid infinite loops.
|
||||
if (++followCount >= maxFollow)
|
||||
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
||||
auto p = path.parent().resolveSymlinks() + path.baseName();
|
||||
if (p.lstat().type != InputAccessor::tSymlink) break;
|
||||
path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))};
|
||||
}
|
||||
|
||||
/* If `path' refers to a directory, append `/default.nix'. */
|
||||
if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory)
|
||||
return path + "default.nix";
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromFile(const SourcePath & path)
|
||||
{
|
||||
return parseExprFromFile(path, staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto buffer = path.resolveSymlinks().readFile();
|
||||
// readFile hopefully have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto s = make_ref<std::string>(std::move(s_));
|
||||
s->append("\0\0", 2);
|
||||
return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath)
|
||||
{
|
||||
return parseExprFromString(std::move(s), basePath, staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseStdin()
|
||||
{
|
||||
//Activity act(*logger, lvlTalkative, "parsing standard input");
|
||||
auto buffer = drainFD(0);
|
||||
// drainFD should have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
auto s = make_ref<std::string>(std::move(buffer));
|
||||
return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath(CanonPath::fromCwd()), staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
SourcePath EvalState::findFile(const std::string_view path)
|
||||
{
|
||||
return findFile(searchPath, path);
|
||||
}
|
||||
|
||||
|
||||
SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos)
|
||||
{
|
||||
for (auto & i : searchPath.elements) {
|
||||
auto suffixOpt = i.prefix.suffixIfPotentialMatch(path);
|
||||
|
||||
if (!suffixOpt) continue;
|
||||
auto suffix = *suffixOpt;
|
||||
|
||||
auto rOpt = resolveSearchPathPath(i.path);
|
||||
if (!rOpt) continue;
|
||||
auto r = *rOpt;
|
||||
|
||||
Path res = suffix == "" ? r : concatStrings(r, "/", suffix);
|
||||
if (pathExists(res)) return rootPath(CanonPath(canonPath(res)));
|
||||
}
|
||||
|
||||
if (hasPrefix(path, "nix/"))
|
||||
return {corepkgsFS, CanonPath(path.substr(3))};
|
||||
|
||||
debugThrow(ThrownError({
|
||||
.msg = hintfmt(evalSettings.pureEval
|
||||
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
|
||||
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
|
||||
path),
|
||||
.errPos = positions[pos]
|
||||
}), 0, 0);
|
||||
}
|
||||
|
||||
|
||||
std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl)
|
||||
{
|
||||
auto & value = value0.s;
|
||||
auto i = searchPathResolved.find(value);
|
||||
if (i != searchPathResolved.end()) return i->second;
|
||||
|
||||
std::optional<std::string> res;
|
||||
|
||||
if (EvalSettings::isPseudoUrl(value)) {
|
||||
try {
|
||||
auto storePath = fetchers::downloadTarball(
|
||||
store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath;
|
||||
res = { store->toRealPath(storePath) };
|
||||
} catch (FileTransferError & e) {
|
||||
logWarning({
|
||||
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
else if (hasPrefix(value, "flake:")) {
|
||||
experimentalFeatureSettings.require(Xp::Flakes);
|
||||
auto flakeRef = parseFlakeRef(value.substr(6), {}, true, false);
|
||||
debug("fetching flake search path element '%s''", value);
|
||||
auto storePath = flakeRef.resolve(store).fetchTree(store).first;
|
||||
res = { store->toRealPath(storePath) };
|
||||
}
|
||||
|
||||
else {
|
||||
auto path = absPath(value);
|
||||
|
||||
/* Allow access to paths in the search path. */
|
||||
if (initAccessControl) {
|
||||
allowPath(path);
|
||||
if (store->isInStore(path)) {
|
||||
try {
|
||||
StorePathSet closure;
|
||||
store->computeFSClosure(store->toStorePath(path).first, closure);
|
||||
for (auto & p : closure)
|
||||
allowPath(p);
|
||||
} catch (InvalidPath &) { }
|
||||
}
|
||||
}
|
||||
|
||||
if (pathExists(path))
|
||||
res = { path };
|
||||
else {
|
||||
logWarning({
|
||||
.msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", value)
|
||||
});
|
||||
res = std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
if (res)
|
||||
debug("resolved search path element '%s' to '%s'", value, *res);
|
||||
else
|
||||
debug("failed to resolve search path element '%s'", value);
|
||||
|
||||
searchPathResolved.emplace(value, res);
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parse(
|
||||
char * text,
|
||||
size_t length,
|
||||
Pos::Origin origin,
|
||||
const SourcePath & basePath,
|
||||
std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto result = parseExprFromBuf(text, length, origin, basePath, symbols, positions, rootFS, exprSymbols);
|
||||
|
||||
result->bindVars(*this, staticEnv);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const
|
||||
{
|
||||
throw TypeError({
|
||||
.msg = hintfmt("cannot coerce %1% to a string", showType())
|
||||
.msg = hintfmt("cannot coerce %1% to a string: %2%", showType(), *this)
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -84,6 +84,8 @@ struct PrimOp
|
|||
void check();
|
||||
};
|
||||
|
||||
std::ostream & operator<<(std::ostream & output, PrimOp & primOp);
|
||||
|
||||
/**
|
||||
* Info about a constant
|
||||
*/
|
||||
|
@ -116,11 +118,6 @@ struct Constant
|
|||
struct Env
|
||||
{
|
||||
Env * up;
|
||||
/**
|
||||
* Number of of levels up to next `with` environment
|
||||
*/
|
||||
unsigned short prevWith:14;
|
||||
enum { Plain = 0, HasWithExpr, HasWithAttrs } type:2;
|
||||
Value * values[0];
|
||||
};
|
||||
|
||||
|
@ -132,7 +129,7 @@ std::unique_ptr<ValMap> mapStaticEnvBindings(const SymbolTable & st, const Stati
|
|||
void copyContext(const Value & v, NixStringContext & context);
|
||||
|
||||
|
||||
std::string printValue(const EvalState & state, const Value & v);
|
||||
std::string printValue(EvalState & state, Value & v);
|
||||
std::ostream & operator << (std::ostream & os, const ValueType t);
|
||||
|
||||
|
||||
|
@ -147,7 +144,7 @@ struct RegexCache;
|
|||
std::shared_ptr<RegexCache> makeRegexCache();
|
||||
|
||||
struct DebugTrace {
|
||||
std::shared_ptr<AbstractPos> pos;
|
||||
std::shared_ptr<Pos> pos;
|
||||
const Expr & expr;
|
||||
const Env & env;
|
||||
hintformat hint;
|
||||
|
@ -210,6 +207,8 @@ public:
|
|||
sPrefix,
|
||||
sOutputSpecified;
|
||||
|
||||
const Expr::AstSymbols exprSymbols;
|
||||
|
||||
/**
|
||||
* If set, force copying files to the Nix store even if they
|
||||
* already exist there.
|
||||
|
@ -218,6 +217,11 @@ public:
|
|||
|
||||
Bindings emptyBindings;
|
||||
|
||||
/**
|
||||
* Empty list constant.
|
||||
*/
|
||||
Value vEmptyList;
|
||||
|
||||
/**
|
||||
* The accessor for the root filesystem.
|
||||
*/
|
||||
|
@ -335,11 +339,6 @@ private:
|
|||
|
||||
std::map<std::string, std::optional<std::string>> searchPathResolved;
|
||||
|
||||
/**
|
||||
* Cache used by checkSourcePath().
|
||||
*/
|
||||
std::unordered_map<Path, SourcePath> resolvedPaths;
|
||||
|
||||
/**
|
||||
* Cache used by prim_match().
|
||||
*/
|
||||
|
@ -465,8 +464,7 @@ public:
|
|||
*/
|
||||
inline void forceValue(Value & v, const PosIdx pos);
|
||||
|
||||
template <typename Callable>
|
||||
inline void forceValue(Value & v, Callable getPos);
|
||||
void tryFixupBlackHolePos(Value & v, PosIdx pos);
|
||||
|
||||
/**
|
||||
* Force a value, then recursively force list elements and
|
||||
|
@ -628,6 +626,11 @@ private:
|
|||
const SourcePath & basePath,
|
||||
std::shared_ptr<StaticEnv> & staticEnv);
|
||||
|
||||
/**
|
||||
* Current Nix call stack depth, used with `max-call-depth` setting to throw stack overflow hopefully before we run out of system stack.
|
||||
*/
|
||||
size_t callDepth = 0;
|
||||
|
||||
public:
|
||||
|
||||
/**
|
||||
|
@ -837,6 +840,11 @@ std::string showType(const Value & v);
|
|||
*/
|
||||
SourcePath resolveExprPath(SourcePath path);
|
||||
|
||||
/**
|
||||
* Whether a URI is allowed, assuming restrictEval is enabled
|
||||
*/
|
||||
bool isAllowedURI(std::string_view uri, const Strings & allowedPaths);
|
||||
|
||||
struct InvalidPathError : EvalError
|
||||
{
|
||||
Path path;
|
||||
|
|
|
@ -90,7 +90,7 @@ std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
|
|||
fragment = percentDecode(url.substr(fragmentStart+1));
|
||||
}
|
||||
if (pathEnd != std::string::npos && fragmentStart != std::string::npos) {
|
||||
query = decodeQuery(url.substr(pathEnd+1, fragmentStart));
|
||||
query = decodeQuery(url.substr(pathEnd+1, fragmentStart-pathEnd-1));
|
||||
}
|
||||
|
||||
if (baseDir) {
|
||||
|
@ -190,7 +190,7 @@ std::optional<std::pair<FlakeRef, std::string>> parseFlakeIdRef(
|
|||
|
||||
static std::regex flakeRegex(
|
||||
"((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)"
|
||||
+ "(?:#(" + queryRegex + "))?",
|
||||
+ "(?:#(" + fragmentRegex + "))?",
|
||||
std::regex::ECMAScript);
|
||||
|
||||
if (std::regex_match(url, match, flakeRegex)) {
|
||||
|
|
48
src/libexpr/flake/url-name.cc
Normal file
48
src/libexpr/flake/url-name.cc
Normal file
|
@ -0,0 +1,48 @@
|
|||
#include "url-name.hh"
|
||||
#include <regex>
|
||||
#include <iostream>
|
||||
|
||||
namespace nix {
|
||||
|
||||
static const std::string attributeNamePattern("[a-zA-Z0-9_-]+");
|
||||
static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")(\\^.*)?");
|
||||
static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+");
|
||||
static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")");
|
||||
static const std::regex secondPathSegmentRegex("(?:" + pathSegmentPattern + ")/(" + pathSegmentPattern +")(?:/.*)?");
|
||||
static const std::regex gitProviderRegex("github|gitlab|sourcehut");
|
||||
static const std::regex gitSchemeRegex("git($|\\+.*)");
|
||||
static const std::regex defaultOutputRegex(".*\\.default($|\\^.*)");
|
||||
|
||||
std::optional<std::string> getNameFromURL(const ParsedURL & url)
|
||||
{
|
||||
std::smatch match;
|
||||
|
||||
/* If there is a dir= argument, use its value */
|
||||
if (url.query.count("dir") > 0)
|
||||
return url.query.at("dir");
|
||||
|
||||
/* If the fragment isn't a "default" and contains two attribute elements, use the last one */
|
||||
if (std::regex_match(url.fragment, match, lastAttributeRegex))
|
||||
return match.str(1);
|
||||
|
||||
/* If this is a github/gitlab/sourcehut flake, use the repo name */
|
||||
if (std::regex_match(url.scheme, gitProviderRegex) && std::regex_match(url.path, match, secondPathSegmentRegex))
|
||||
return match.str(1);
|
||||
|
||||
/* If it is a regular git flake, use the directory name */
|
||||
if (std::regex_match(url.scheme, gitSchemeRegex) && std::regex_match(url.path, match, lastPathSegmentRegex))
|
||||
return match.str(1);
|
||||
|
||||
/* If everything failed but there is a non-default fragment, use it in full */
|
||||
if (!url.fragment.empty() && !std::regex_match(url.fragment, defaultOutputRegex))
|
||||
return url.fragment;
|
||||
|
||||
/* If there is no fragment, take the last element of the path */
|
||||
if (std::regex_match(url.path, match, lastPathSegmentRegex))
|
||||
return match.str(1);
|
||||
|
||||
/* If even that didn't work, the URL does not contain enough info to determine a useful name */
|
||||
return {};
|
||||
}
|
||||
|
||||
}
|
20
src/libexpr/flake/url-name.hh
Normal file
20
src/libexpr/flake/url-name.hh
Normal file
|
@ -0,0 +1,20 @@
|
|||
#include "url.hh"
|
||||
#include "url-parts.hh"
|
||||
#include "util.hh"
|
||||
#include "split.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Try to extract a reasonably unique and meaningful, human-readable
|
||||
* name of a flake output from a parsed URL.
|
||||
* When nullopt is returned, the callsite should use information available
|
||||
* to it outside of the URL to determine a useful name.
|
||||
* This is a heuristic approach intended for user interfaces.
|
||||
* @return nullopt if the extracted name is not useful to identify a
|
||||
* flake output, for example because it is empty or "default".
|
||||
* Otherwise returns the extracted name.
|
||||
*/
|
||||
std::optional<std::string> getNameFromURL(const ParsedURL & url);
|
||||
|
||||
}
|
|
@ -11,13 +11,13 @@
|
|||
namespace nix {
|
||||
|
||||
|
||||
DrvInfo::DrvInfo(EvalState & state, std::string attrPath, Bindings * attrs)
|
||||
PackageInfo::PackageInfo(EvalState & state, std::string attrPath, Bindings * attrs)
|
||||
: state(&state), attrs(attrs), attrPath(std::move(attrPath))
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPathWithOutputs)
|
||||
PackageInfo::PackageInfo(EvalState & state, ref<Store> store, const std::string & drvPathWithOutputs)
|
||||
: state(&state), attrs(nullptr), attrPath("")
|
||||
{
|
||||
auto [drvPath, selectedOutputs] = parsePathWithOutputs(*store, drvPathWithOutputs);
|
||||
|
@ -45,7 +45,7 @@ DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPat
|
|||
}
|
||||
|
||||
|
||||
std::string DrvInfo::queryName() const
|
||||
std::string PackageInfo::queryName() const
|
||||
{
|
||||
if (name == "" && attrs) {
|
||||
auto i = attrs->find(state->sName);
|
||||
|
@ -56,7 +56,7 @@ std::string DrvInfo::queryName() const
|
|||
}
|
||||
|
||||
|
||||
std::string DrvInfo::querySystem() const
|
||||
std::string PackageInfo::querySystem() const
|
||||
{
|
||||
if (system == "" && attrs) {
|
||||
auto i = attrs->find(state->sSystem);
|
||||
|
@ -66,7 +66,7 @@ std::string DrvInfo::querySystem() const
|
|||
}
|
||||
|
||||
|
||||
std::optional<StorePath> DrvInfo::queryDrvPath() const
|
||||
std::optional<StorePath> PackageInfo::queryDrvPath() const
|
||||
{
|
||||
if (!drvPath && attrs) {
|
||||
Bindings::iterator i = attrs->find(state->sDrvPath);
|
||||
|
@ -80,7 +80,7 @@ std::optional<StorePath> DrvInfo::queryDrvPath() const
|
|||
}
|
||||
|
||||
|
||||
StorePath DrvInfo::requireDrvPath() const
|
||||
StorePath PackageInfo::requireDrvPath() const
|
||||
{
|
||||
if (auto drvPath = queryDrvPath())
|
||||
return *drvPath;
|
||||
|
@ -88,7 +88,7 @@ StorePath DrvInfo::requireDrvPath() const
|
|||
}
|
||||
|
||||
|
||||
StorePath DrvInfo::queryOutPath() const
|
||||
StorePath PackageInfo::queryOutPath() const
|
||||
{
|
||||
if (!outPath && attrs) {
|
||||
Bindings::iterator i = attrs->find(state->sOutPath);
|
||||
|
@ -102,7 +102,7 @@ StorePath DrvInfo::queryOutPath() const
|
|||
}
|
||||
|
||||
|
||||
DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall)
|
||||
PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall)
|
||||
{
|
||||
if (outputs.empty()) {
|
||||
/* Get the ‘outputs’ list. */
|
||||
|
@ -164,7 +164,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
|
|||
}
|
||||
|
||||
|
||||
std::string DrvInfo::queryOutputName() const
|
||||
std::string PackageInfo::queryOutputName() const
|
||||
{
|
||||
if (outputName == "" && attrs) {
|
||||
Bindings::iterator i = attrs->find(state->sOutputName);
|
||||
|
@ -174,7 +174,7 @@ std::string DrvInfo::queryOutputName() const
|
|||
}
|
||||
|
||||
|
||||
Bindings * DrvInfo::getMeta()
|
||||
Bindings * PackageInfo::getMeta()
|
||||
{
|
||||
if (meta) return meta;
|
||||
if (!attrs) return 0;
|
||||
|
@ -186,7 +186,7 @@ Bindings * DrvInfo::getMeta()
|
|||
}
|
||||
|
||||
|
||||
StringSet DrvInfo::queryMetaNames()
|
||||
StringSet PackageInfo::queryMetaNames()
|
||||
{
|
||||
StringSet res;
|
||||
if (!getMeta()) return res;
|
||||
|
@ -196,9 +196,9 @@ StringSet DrvInfo::queryMetaNames()
|
|||
}
|
||||
|
||||
|
||||
bool DrvInfo::checkMeta(Value & v)
|
||||
bool PackageInfo::checkMeta(Value & v)
|
||||
{
|
||||
state->forceValue(v, [&]() { return v.determinePos(noPos); });
|
||||
state->forceValue(v, v.determinePos(noPos));
|
||||
if (v.type() == nList) {
|
||||
for (auto elem : v.listItems())
|
||||
if (!checkMeta(*elem)) return false;
|
||||
|
@ -216,7 +216,7 @@ bool DrvInfo::checkMeta(Value & v)
|
|||
}
|
||||
|
||||
|
||||
Value * DrvInfo::queryMeta(const std::string & name)
|
||||
Value * PackageInfo::queryMeta(const std::string & name)
|
||||
{
|
||||
if (!getMeta()) return 0;
|
||||
Bindings::iterator a = meta->find(state->symbols.create(name));
|
||||
|
@ -225,7 +225,7 @@ Value * DrvInfo::queryMeta(const std::string & name)
|
|||
}
|
||||
|
||||
|
||||
std::string DrvInfo::queryMetaString(const std::string & name)
|
||||
std::string PackageInfo::queryMetaString(const std::string & name)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v || v->type() != nString) return "";
|
||||
|
@ -233,7 +233,7 @@ std::string DrvInfo::queryMetaString(const std::string & name)
|
|||
}
|
||||
|
||||
|
||||
NixInt DrvInfo::queryMetaInt(const std::string & name, NixInt def)
|
||||
NixInt PackageInfo::queryMetaInt(const std::string & name, NixInt def)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v) return def;
|
||||
|
@ -247,7 +247,7 @@ NixInt DrvInfo::queryMetaInt(const std::string & name, NixInt def)
|
|||
return def;
|
||||
}
|
||||
|
||||
NixFloat DrvInfo::queryMetaFloat(const std::string & name, NixFloat def)
|
||||
NixFloat PackageInfo::queryMetaFloat(const std::string & name, NixFloat def)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v) return def;
|
||||
|
@ -262,7 +262,7 @@ NixFloat DrvInfo::queryMetaFloat(const std::string & name, NixFloat def)
|
|||
}
|
||||
|
||||
|
||||
bool DrvInfo::queryMetaBool(const std::string & name, bool def)
|
||||
bool PackageInfo::queryMetaBool(const std::string & name, bool def)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v) return def;
|
||||
|
@ -277,7 +277,7 @@ bool DrvInfo::queryMetaBool(const std::string & name, bool def)
|
|||
}
|
||||
|
||||
|
||||
void DrvInfo::setMeta(const std::string & name, Value * v)
|
||||
void PackageInfo::setMeta(const std::string & name, Value * v)
|
||||
{
|
||||
getMeta();
|
||||
auto attrs = state->buildBindings(1 + (meta ? meta->size() : 0));
|
||||
|
@ -300,18 +300,18 @@ typedef std::set<Bindings *> Done;
|
|||
The result boolean indicates whether it makes sense
|
||||
for the caller to recursively search for derivations in `v'. */
|
||||
static bool getDerivation(EvalState & state, Value & v,
|
||||
const std::string & attrPath, DrvInfos & drvs, Done & done,
|
||||
const std::string & attrPath, PackageInfos & drvs, Done & done,
|
||||
bool ignoreAssertionFailures)
|
||||
{
|
||||
try {
|
||||
state.forceValue(v, [&]() { return v.determinePos(noPos); });
|
||||
state.forceValue(v, v.determinePos(noPos));
|
||||
if (!state.isDerivation(v)) return true;
|
||||
|
||||
/* Remove spurious duplicates (e.g., a set like `rec { x =
|
||||
derivation {...}; y = x;}'. */
|
||||
if (!done.insert(v.attrs).second) return false;
|
||||
|
||||
DrvInfo drv(state, attrPath, v.attrs);
|
||||
PackageInfo drv(state, attrPath, v.attrs);
|
||||
|
||||
drv.queryName();
|
||||
|
||||
|
@ -326,11 +326,11 @@ static bool getDerivation(EvalState & state, Value & v,
|
|||
}
|
||||
|
||||
|
||||
std::optional<DrvInfo> getDerivation(EvalState & state, Value & v,
|
||||
std::optional<PackageInfo> getDerivation(EvalState & state, Value & v,
|
||||
bool ignoreAssertionFailures)
|
||||
{
|
||||
Done done;
|
||||
DrvInfos drvs;
|
||||
PackageInfos drvs;
|
||||
getDerivation(state, v, "", drvs, done, ignoreAssertionFailures);
|
||||
if (drvs.size() != 1) return {};
|
||||
return std::move(drvs.front());
|
||||
|
@ -348,7 +348,7 @@ static std::regex attrRegex("[A-Za-z_][A-Za-z0-9-_+]*");
|
|||
|
||||
static void getDerivations(EvalState & state, Value & vIn,
|
||||
const std::string & pathPrefix, Bindings & autoArgs,
|
||||
DrvInfos & drvs, Done & done,
|
||||
PackageInfos & drvs, Done & done,
|
||||
bool ignoreAssertionFailures)
|
||||
{
|
||||
Value v;
|
||||
|
@ -401,7 +401,7 @@ static void getDerivations(EvalState & state, Value & vIn,
|
|||
|
||||
|
||||
void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix,
|
||||
Bindings & autoArgs, DrvInfos & drvs, bool ignoreAssertionFailures)
|
||||
Bindings & autoArgs, PackageInfos & drvs, bool ignoreAssertionFailures)
|
||||
{
|
||||
Done done;
|
||||
getDerivations(state, v, pathPrefix, autoArgs, drvs, done, ignoreAssertionFailures);
|
||||
|
|
|
@ -10,8 +10,10 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
|
||||
struct DrvInfo
|
||||
/**
|
||||
* A "parsed" package attribute set.
|
||||
*/
|
||||
struct PackageInfo
|
||||
{
|
||||
public:
|
||||
typedef std::map<std::string, std::optional<StorePath>> Outputs;
|
||||
|
@ -43,9 +45,9 @@ public:
|
|||
*/
|
||||
std::string attrPath;
|
||||
|
||||
DrvInfo(EvalState & state) : state(&state) { };
|
||||
DrvInfo(EvalState & state, std::string attrPath, Bindings * attrs);
|
||||
DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPathWithOutputs);
|
||||
PackageInfo(EvalState & state) : state(&state) { };
|
||||
PackageInfo(EvalState & state, std::string attrPath, Bindings * attrs);
|
||||
PackageInfo(EvalState & state, ref<Store> store, const std::string & drvPathWithOutputs);
|
||||
|
||||
std::string queryName() const;
|
||||
std::string querySystem() const;
|
||||
|
@ -82,21 +84,21 @@ public:
|
|||
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
typedef std::list<DrvInfo, traceable_allocator<DrvInfo>> DrvInfos;
|
||||
typedef std::list<PackageInfo, traceable_allocator<PackageInfo>> PackageInfos;
|
||||
#else
|
||||
typedef std::list<DrvInfo> DrvInfos;
|
||||
typedef std::list<PackageInfo> PackageInfos;
|
||||
#endif
|
||||
|
||||
|
||||
/**
|
||||
* If value `v` denotes a derivation, return a DrvInfo object
|
||||
* If value `v` denotes a derivation, return a PackageInfo object
|
||||
* describing it. Otherwise return nothing.
|
||||
*/
|
||||
std::optional<DrvInfo> getDerivation(EvalState & state,
|
||||
std::optional<PackageInfo> getDerivation(EvalState & state,
|
||||
Value & v, bool ignoreAssertionFailures);
|
||||
|
||||
void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix,
|
||||
Bindings & autoArgs, DrvInfos & drvs,
|
||||
Bindings & autoArgs, PackageInfos & drvs,
|
||||
bool ignoreAssertionFailures);
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
%option reentrant bison-bridge bison-locations
|
||||
%option align
|
||||
%option noyywrap
|
||||
%option never-interactive
|
||||
%option stack
|
||||
|
@ -28,15 +29,7 @@ using namespace nix;
|
|||
|
||||
namespace nix {
|
||||
|
||||
static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data)
|
||||
{
|
||||
return data->state.positions.add(data->origin, loc.first_line, loc.first_column);
|
||||
}
|
||||
|
||||
#define CUR_POS makeCurPos(*yylloc, data)
|
||||
|
||||
// backup to recover from yyless(0)
|
||||
thread_local YYLTYPE prev_yylloc;
|
||||
#define CUR_POS state->at(*yylloc)
|
||||
|
||||
static void initLoc(YYLTYPE * loc)
|
||||
{
|
||||
|
@ -46,7 +39,7 @@ static void initLoc(YYLTYPE * loc)
|
|||
|
||||
static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
|
||||
{
|
||||
prev_yylloc = *loc;
|
||||
loc->stash();
|
||||
|
||||
loc->first_line = loc->last_line;
|
||||
loc->first_column = loc->last_column;
|
||||
|
@ -132,7 +125,7 @@ else { return ELSE; }
|
|||
assert { return ASSERT; }
|
||||
with { return WITH; }
|
||||
let { return LET; }
|
||||
in { return IN; }
|
||||
in { return IN_KW; }
|
||||
rec { return REC; }
|
||||
inherit { return INHERIT; }
|
||||
or { return OR_KW; }
|
||||
|
@ -155,19 +148,19 @@ or { return OR_KW; }
|
|||
} catch (const boost::bad_lexical_cast &) {
|
||||
throw ParseError({
|
||||
.msg = hintfmt("invalid integer '%1%'", yytext),
|
||||
.errPos = data->state.positions[CUR_POS],
|
||||
.errPos = state->positions[CUR_POS],
|
||||
});
|
||||
}
|
||||
return INT;
|
||||
return INT_LIT;
|
||||
}
|
||||
{FLOAT} { errno = 0;
|
||||
yylval->nf = strtod(yytext, 0);
|
||||
if (errno != 0)
|
||||
throw ParseError({
|
||||
.msg = hintfmt("invalid float '%1%'", yytext),
|
||||
.errPos = data->state.positions[CUR_POS],
|
||||
.errPos = state->positions[CUR_POS],
|
||||
});
|
||||
return FLOAT;
|
||||
return FLOAT_LIT;
|
||||
}
|
||||
|
||||
\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
||||
|
@ -188,7 +181,7 @@ or { return OR_KW; }
|
|||
/* It is impossible to match strings ending with '$' with one
|
||||
regex because trailing contexts are only valid at the end
|
||||
of a rule. (A sane but undocumented limitation.) */
|
||||
yylval->str = unescapeStr(data->symbols, yytext, yyleng);
|
||||
yylval->str = unescapeStr(state->symbols, yytext, yyleng);
|
||||
return STR;
|
||||
}
|
||||
<STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
||||
|
@ -216,7 +209,7 @@ or { return OR_KW; }
|
|||
return IND_STR;
|
||||
}
|
||||
<IND_STRING>\'\'\\{ANY} {
|
||||
yylval->str = unescapeStr(data->symbols, yytext + 2, yyleng - 2);
|
||||
yylval->str = unescapeStr(state->symbols, yytext + 2, yyleng - 2);
|
||||
return IND_STR;
|
||||
}
|
||||
<IND_STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
||||
|
@ -230,7 +223,7 @@ or { return OR_KW; }
|
|||
{HPATH_START}\$\{ {
|
||||
PUSH_STATE(PATH_START);
|
||||
yyless(0);
|
||||
*yylloc = prev_yylloc;
|
||||
yylloc->unstash();
|
||||
}
|
||||
|
||||
<PATH_START>{PATH_SEG} {
|
||||
|
@ -286,7 +279,7 @@ or { return OR_KW; }
|
|||
context (it may be ')', ';', or something of that sort) */
|
||||
POP_STATE();
|
||||
yyless(0);
|
||||
*yylloc = prev_yylloc;
|
||||
yylloc->unstash();
|
||||
return PATH_END;
|
||||
}
|
||||
|
||||
|
@ -294,7 +287,7 @@ or { return OR_KW; }
|
|||
<INPATH_SLASH><<EOF>> {
|
||||
throw ParseError({
|
||||
.msg = hintfmt("path has a trailing slash"),
|
||||
.errPos = data->state.positions[CUR_POS],
|
||||
.errPos = state->positions[CUR_POS],
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -16,9 +16,9 @@ libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/lib
|
|||
|
||||
libexpr_LIBS = libutil libstore libfetchers
|
||||
|
||||
libexpr_LDFLAGS += -lboost_context -pthread
|
||||
libexpr_LDFLAGS += -lboost_context $(THREAD_LDFLAGS)
|
||||
ifdef HOST_LINUX
|
||||
libexpr_LDFLAGS += -ldl
|
||||
libexpr_LDFLAGS += -ldl
|
||||
endif
|
||||
|
||||
# The dependency on libgc must be propagated (i.e. meaning that
|
||||
|
|
|
@ -9,57 +9,9 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
struct PosAdapter : AbstractPos
|
||||
{
|
||||
Pos::Origin origin;
|
||||
unsigned long Expr::nrExprs = 0;
|
||||
|
||||
PosAdapter(Pos::Origin origin)
|
||||
: origin(std::move(origin))
|
||||
{
|
||||
}
|
||||
|
||||
std::optional<std::string> getSource() const override
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](const Pos::none_tag &) -> std::optional<std::string> {
|
||||
return std::nullopt;
|
||||
},
|
||||
[](const Pos::Stdin & s) -> std::optional<std::string> {
|
||||
// Get rid of the null terminators added by the parser.
|
||||
return std::string(s.source->c_str());
|
||||
},
|
||||
[](const Pos::String & s) -> std::optional<std::string> {
|
||||
// Get rid of the null terminators added by the parser.
|
||||
return std::string(s.source->c_str());
|
||||
},
|
||||
[](const SourcePath & path) -> std::optional<std::string> {
|
||||
try {
|
||||
return path.readFile();
|
||||
} catch (Error &) {
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
}, origin);
|
||||
}
|
||||
|
||||
void print(std::ostream & out) const override
|
||||
{
|
||||
std::visit(overloaded {
|
||||
[&](const Pos::none_tag &) { out << "«none»"; },
|
||||
[&](const Pos::Stdin &) { out << "«stdin»"; },
|
||||
[&](const Pos::String & s) { out << "«string»"; },
|
||||
[&](const SourcePath & path) { out << path; }
|
||||
}, origin);
|
||||
}
|
||||
};
|
||||
|
||||
Pos::operator std::shared_ptr<AbstractPos>() const
|
||||
{
|
||||
auto pos = std::make_shared<PosAdapter>(origin);
|
||||
pos->line = line;
|
||||
pos->column = column;
|
||||
return pos;
|
||||
}
|
||||
ExprBlackHole eBlackHole;
|
||||
|
||||
// FIXME: remove, because *symbols* are abstract and do not have a single
|
||||
// textual representation; see printIdentifier()
|
||||
|
@ -266,17 +218,6 @@ void ExprPos::show(const SymbolTable & symbols, std::ostream & str) const
|
|||
}
|
||||
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const Pos & pos)
|
||||
{
|
||||
if (auto pos2 = (std::shared_ptr<AbstractPos>) pos) {
|
||||
str << *pos2;
|
||||
} else
|
||||
str << "undefined position";
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
|
||||
std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
|
||||
{
|
||||
std::ostringstream out;
|
||||
|
@ -331,6 +272,8 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
|||
if (es.debugRepl)
|
||||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
|
||||
fromWith = nullptr;
|
||||
|
||||
/* Check whether the variable appears in the environment. If so,
|
||||
set its level and displacement. */
|
||||
const StaticEnv * curEnv;
|
||||
|
@ -342,7 +285,6 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
|||
} else {
|
||||
auto i = curEnv->find(name);
|
||||
if (i != curEnv->vars.end()) {
|
||||
fromWith = false;
|
||||
this->level = level;
|
||||
displ = i->second;
|
||||
return;
|
||||
|
@ -358,7 +300,8 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
|||
.msg = hintfmt("undefined variable '%1%'", es.symbols[name]),
|
||||
.errPos = es.positions[pos]
|
||||
});
|
||||
fromWith = true;
|
||||
for (auto * e = env.get(); e && !fromWith; e = e->up)
|
||||
fromWith = e->isWith;
|
||||
this->level = withLevel;
|
||||
}
|
||||
|
||||
|
@ -391,7 +334,7 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv>
|
|||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
|
||||
if (recursive) {
|
||||
auto newEnv = std::make_shared<StaticEnv>(false, env.get(), recursive ? attrs.size() : 0);
|
||||
auto newEnv = std::make_shared<StaticEnv>(nullptr, env.get(), recursive ? attrs.size() : 0);
|
||||
|
||||
Displacement displ = 0;
|
||||
for (auto & i : attrs)
|
||||
|
@ -433,7 +376,7 @@ void ExprLambda::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv>
|
|||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
|
||||
auto newEnv = std::make_shared<StaticEnv>(
|
||||
false, env.get(),
|
||||
nullptr, env.get(),
|
||||
(hasFormals() ? formals->formals.size() : 0) +
|
||||
(!arg ? 0 : 1));
|
||||
|
||||
|
@ -469,7 +412,7 @@ void ExprLet::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
|||
if (es.debugRepl)
|
||||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
|
||||
auto newEnv = std::make_shared<StaticEnv>(false, env.get(), attrs->attrs.size());
|
||||
auto newEnv = std::make_shared<StaticEnv>(nullptr, env.get(), attrs->attrs.size());
|
||||
|
||||
Displacement displ = 0;
|
||||
for (auto & i : attrs->attrs)
|
||||
|
@ -488,6 +431,10 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
|||
if (es.debugRepl)
|
||||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
|
||||
parentWith = nullptr;
|
||||
for (auto * e = env.get(); e && !parentWith; e = e->up)
|
||||
parentWith = e->isWith;
|
||||
|
||||
/* Does this `with' have an enclosing `with'? If so, record its
|
||||
level so that `lookupVar' can look up variables in the previous
|
||||
`with' if this one doesn't contain the desired attribute. */
|
||||
|
@ -504,7 +451,7 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
|||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
|
||||
attrs->bindVars(es, env);
|
||||
auto newEnv = std::make_shared<StaticEnv>(true, env.get());
|
||||
auto newEnv = std::make_shared<StaticEnv>(this, env.get());
|
||||
body->bindVars(es, newEnv);
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
#include "symbol-table.hh"
|
||||
#include "error.hh"
|
||||
#include "chunked-vector.hh"
|
||||
#include "position.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -21,25 +22,11 @@ MakeError(TypeError, EvalError);
|
|||
MakeError(UndefinedVarError, Error);
|
||||
MakeError(MissingArgumentError, EvalError);
|
||||
|
||||
/**
|
||||
* Position objects.
|
||||
*/
|
||||
struct Pos
|
||||
class InfiniteRecursionError : public EvalError
|
||||
{
|
||||
uint32_t line;
|
||||
uint32_t column;
|
||||
|
||||
struct none_tag { };
|
||||
struct Stdin { ref<std::string> source; };
|
||||
struct String { ref<std::string> source; };
|
||||
|
||||
typedef std::variant<none_tag, Stdin, String, SourcePath> Origin;
|
||||
|
||||
Origin origin;
|
||||
|
||||
explicit operator bool() const { return line > 0; }
|
||||
|
||||
operator std::shared_ptr<AbstractPos>() const;
|
||||
friend class EvalState;
|
||||
public:
|
||||
using EvalError::EvalError;
|
||||
};
|
||||
|
||||
class PosIdx {
|
||||
|
@ -74,7 +61,7 @@ public:
|
|||
mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
|
||||
|
||||
// Used for searching in PosTable::[].
|
||||
explicit Origin(uint32_t idx): idx(idx), origin{Pos::none_tag()} {}
|
||||
explicit Origin(uint32_t idx): idx(idx), origin{std::monostate()} {}
|
||||
|
||||
public:
|
||||
const Pos::Origin origin;
|
||||
|
@ -125,12 +112,11 @@ public:
|
|||
|
||||
inline PosIdx noPos = {};
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const Pos & pos);
|
||||
|
||||
|
||||
struct Env;
|
||||
struct Value;
|
||||
class EvalState;
|
||||
struct ExprWith;
|
||||
struct StaticEnv;
|
||||
|
||||
|
||||
|
@ -154,6 +140,11 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
|
|||
|
||||
struct Expr
|
||||
{
|
||||
struct AstSymbols {
|
||||
Symbol sub, lessThan, mul, div, or_, findFile, nixPath, body;
|
||||
};
|
||||
|
||||
|
||||
static unsigned long nrExprs;
|
||||
Expr() {
|
||||
nrExprs++;
|
||||
|
@ -219,8 +210,11 @@ struct ExprVar : Expr
|
|||
Symbol name;
|
||||
|
||||
/* Whether the variable comes from an environment (e.g. a rec, let
|
||||
or function argument) or from a "with". */
|
||||
bool fromWith;
|
||||
or function argument) or from a "with".
|
||||
|
||||
`nullptr`: Not from a `with`.
|
||||
Valid pointer: the nearest, innermost `with` expression to query first. */
|
||||
ExprWith * fromWith;
|
||||
|
||||
/* In the former case, the value is obtained by going `level`
|
||||
levels up from the current environment and getting the
|
||||
|
@ -292,6 +286,7 @@ struct ExprList : Expr
|
|||
std::vector<Expr *> elems;
|
||||
ExprList() { };
|
||||
COMMON_METHODS
|
||||
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||
|
||||
PosIdx getPos() const override
|
||||
{
|
||||
|
@ -378,6 +373,7 @@ struct ExprWith : Expr
|
|||
PosIdx pos;
|
||||
Expr * attrs, * body;
|
||||
size_t prevWith;
|
||||
ExprWith * parentWith;
|
||||
ExprWith(const PosIdx & pos, Expr * attrs, Expr * body) : pos(pos), attrs(attrs), body(body) { };
|
||||
PosIdx getPos() const override { return pos; }
|
||||
COMMON_METHODS
|
||||
|
@ -455,20 +451,30 @@ struct ExprPos : Expr
|
|||
COMMON_METHODS
|
||||
};
|
||||
|
||||
/* only used to mark thunks as black holes. */
|
||||
struct ExprBlackHole : Expr
|
||||
{
|
||||
void show(const SymbolTable & symbols, std::ostream & str) const override {}
|
||||
void eval(EvalState & state, Env & env, Value & v) override;
|
||||
void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) override {}
|
||||
};
|
||||
|
||||
extern ExprBlackHole eBlackHole;
|
||||
|
||||
|
||||
/* Static environments are used to map variable names onto (level,
|
||||
displacement) pairs used to obtain the value of the variable at
|
||||
runtime. */
|
||||
struct StaticEnv
|
||||
{
|
||||
bool isWith;
|
||||
ExprWith * isWith;
|
||||
const StaticEnv * up;
|
||||
|
||||
// Note: these must be in sorted order.
|
||||
typedef std::vector<std::pair<Symbol, Displacement>> Vars;
|
||||
Vars vars;
|
||||
|
||||
StaticEnv(bool isWith, const StaticEnv * up, size_t expectedSize = 0) : isWith(isWith), up(up) {
|
||||
StaticEnv(ExprWith * isWith, const StaticEnv * up, size_t expectedSize = 0) : isWith(isWith), up(up) {
|
||||
vars.reserve(expectedSize);
|
||||
};
|
||||
|
||||
|
|
271
src/libexpr/parser-state.hh
Normal file
271
src/libexpr/parser-state.hh
Normal file
|
@ -0,0 +1,271 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "eval.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* @note Storing a C-style `char *` and `size_t` allows us to avoid
|
||||
* having to define the special members that using string_view here
|
||||
* would implicitly delete.
|
||||
*/
|
||||
struct StringToken
|
||||
{
|
||||
const char * p;
|
||||
size_t l;
|
||||
bool hasIndentation;
|
||||
operator std::string_view() const { return {p, l}; }
|
||||
};
|
||||
|
||||
struct ParserLocation
|
||||
{
|
||||
int first_line, first_column;
|
||||
int last_line, last_column;
|
||||
|
||||
// backup to recover from yyless(0)
|
||||
int stashed_first_line, stashed_first_column;
|
||||
int stashed_last_line, stashed_last_column;
|
||||
|
||||
void stash() {
|
||||
stashed_first_line = first_line;
|
||||
stashed_first_column = first_column;
|
||||
stashed_last_line = last_line;
|
||||
stashed_last_column = last_column;
|
||||
}
|
||||
|
||||
void unstash() {
|
||||
first_line = stashed_first_line;
|
||||
first_column = stashed_first_column;
|
||||
last_line = stashed_last_line;
|
||||
last_column = stashed_last_column;
|
||||
}
|
||||
};
|
||||
|
||||
struct ParserState
|
||||
{
|
||||
SymbolTable & symbols;
|
||||
PosTable & positions;
|
||||
Expr * result;
|
||||
SourcePath basePath;
|
||||
PosTable::Origin origin;
|
||||
const ref<InputAccessor> rootFS;
|
||||
const Expr::AstSymbols & s;
|
||||
|
||||
void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos);
|
||||
void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos);
|
||||
void addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos);
|
||||
Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {});
|
||||
Expr * stripIndentation(const PosIdx pos,
|
||||
std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> && es);
|
||||
PosIdx at(const ParserLocation & loc);
|
||||
};
|
||||
|
||||
inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos)
|
||||
{
|
||||
throw ParseError({
|
||||
.msg = hintfmt("attribute '%1%' already defined at %2%",
|
||||
showAttrPath(symbols, attrPath), positions[prevPos]),
|
||||
.errPos = positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos)
|
||||
{
|
||||
throw ParseError({
|
||||
.msg = hintfmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]),
|
||||
.errPos = positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos)
|
||||
{
|
||||
AttrPath::iterator i;
|
||||
// All attrpaths have at least one attr
|
||||
assert(!attrPath.empty());
|
||||
// Checking attrPath validity.
|
||||
// ===========================
|
||||
for (i = attrPath.begin(); i + 1 < attrPath.end(); i++) {
|
||||
if (i->symbol) {
|
||||
ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
|
||||
if (j != attrs->attrs.end()) {
|
||||
if (!j->second.inherited) {
|
||||
ExprAttrs * attrs2 = dynamic_cast<ExprAttrs *>(j->second.e);
|
||||
if (!attrs2) dupAttr(attrPath, pos, j->second.pos);
|
||||
attrs = attrs2;
|
||||
} else
|
||||
dupAttr(attrPath, pos, j->second.pos);
|
||||
} else {
|
||||
ExprAttrs * nested = new ExprAttrs;
|
||||
attrs->attrs[i->symbol] = ExprAttrs::AttrDef(nested, pos);
|
||||
attrs = nested;
|
||||
}
|
||||
} else {
|
||||
ExprAttrs *nested = new ExprAttrs;
|
||||
attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, nested, pos));
|
||||
attrs = nested;
|
||||
}
|
||||
}
|
||||
// Expr insertion.
|
||||
// ==========================
|
||||
if (i->symbol) {
|
||||
ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
|
||||
if (j != attrs->attrs.end()) {
|
||||
// This attr path is already defined. However, if both
|
||||
// e and the expr pointed by the attr path are two attribute sets,
|
||||
// we want to merge them.
|
||||
// Otherwise, throw an error.
|
||||
auto ae = dynamic_cast<ExprAttrs *>(e);
|
||||
auto jAttrs = dynamic_cast<ExprAttrs *>(j->second.e);
|
||||
if (jAttrs && ae) {
|
||||
for (auto & ad : ae->attrs) {
|
||||
auto j2 = jAttrs->attrs.find(ad.first);
|
||||
if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error.
|
||||
dupAttr(ad.first, j2->second.pos, ad.second.pos);
|
||||
jAttrs->attrs.emplace(ad.first, ad.second);
|
||||
}
|
||||
jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end());
|
||||
} else {
|
||||
dupAttr(attrPath, pos, j->second.pos);
|
||||
}
|
||||
} else {
|
||||
// This attr path is not defined. Let's create it.
|
||||
attrs->attrs.emplace(i->symbol, ExprAttrs::AttrDef(e, pos));
|
||||
e->setName(i->symbol);
|
||||
}
|
||||
} else {
|
||||
attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, e, pos));
|
||||
}
|
||||
}
|
||||
|
||||
inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Symbol arg)
|
||||
{
|
||||
std::sort(formals->formals.begin(), formals->formals.end(),
|
||||
[] (const auto & a, const auto & b) {
|
||||
return std::tie(a.name, a.pos) < std::tie(b.name, b.pos);
|
||||
});
|
||||
|
||||
std::optional<std::pair<Symbol, PosIdx>> duplicate;
|
||||
for (size_t i = 0; i + 1 < formals->formals.size(); i++) {
|
||||
if (formals->formals[i].name != formals->formals[i + 1].name)
|
||||
continue;
|
||||
std::pair thisDup{formals->formals[i].name, formals->formals[i + 1].pos};
|
||||
duplicate = std::min(thisDup, duplicate.value_or(thisDup));
|
||||
}
|
||||
if (duplicate)
|
||||
throw ParseError({
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'", symbols[duplicate->first]),
|
||||
.errPos = positions[duplicate->second]
|
||||
});
|
||||
|
||||
if (arg && formals->has(arg))
|
||||
throw ParseError({
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'", symbols[arg]),
|
||||
.errPos = positions[pos]
|
||||
});
|
||||
|
||||
return formals;
|
||||
}
|
||||
|
||||
inline Expr * ParserState::stripIndentation(const PosIdx pos,
|
||||
std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> && es)
|
||||
{
|
||||
if (es.empty()) return new ExprString("");
|
||||
|
||||
/* Figure out the minimum indentation. Note that by design
|
||||
whitespace-only final lines are not taken into account. (So
|
||||
the " " in "\n ''" is ignored, but the " " in "\n foo''" is.) */
|
||||
bool atStartOfLine = true; /* = seen only whitespace in the current line */
|
||||
size_t minIndent = 1000000;
|
||||
size_t curIndent = 0;
|
||||
for (auto & [i_pos, i] : es) {
|
||||
auto * str = std::get_if<StringToken>(&i);
|
||||
if (!str || !str->hasIndentation) {
|
||||
/* Anti-quotations and escaped characters end the current start-of-line whitespace. */
|
||||
if (atStartOfLine) {
|
||||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
for (size_t j = 0; j < str->l; ++j) {
|
||||
if (atStartOfLine) {
|
||||
if (str->p[j] == ' ')
|
||||
curIndent++;
|
||||
else if (str->p[j] == '\n') {
|
||||
/* Empty line, doesn't influence minimum
|
||||
indentation. */
|
||||
curIndent = 0;
|
||||
} else {
|
||||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
}
|
||||
} else if (str->p[j] == '\n') {
|
||||
atStartOfLine = true;
|
||||
curIndent = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Strip spaces from each line. */
|
||||
auto * es2 = new std::vector<std::pair<PosIdx, Expr *>>;
|
||||
atStartOfLine = true;
|
||||
size_t curDropped = 0;
|
||||
size_t n = es.size();
|
||||
auto i = es.begin();
|
||||
const auto trimExpr = [&] (Expr * e) {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
es2->emplace_back(i->first, e);
|
||||
};
|
||||
const auto trimString = [&] (const StringToken & t) {
|
||||
std::string s2;
|
||||
for (size_t j = 0; j < t.l; ++j) {
|
||||
if (atStartOfLine) {
|
||||
if (t.p[j] == ' ') {
|
||||
if (curDropped++ >= minIndent)
|
||||
s2 += t.p[j];
|
||||
}
|
||||
else if (t.p[j] == '\n') {
|
||||
curDropped = 0;
|
||||
s2 += t.p[j];
|
||||
} else {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
s2 += t.p[j];
|
||||
}
|
||||
} else {
|
||||
s2 += t.p[j];
|
||||
if (t.p[j] == '\n') atStartOfLine = true;
|
||||
}
|
||||
}
|
||||
|
||||
/* Remove the last line if it is empty and consists only of
|
||||
spaces. */
|
||||
if (n == 1) {
|
||||
std::string::size_type p = s2.find_last_of('\n');
|
||||
if (p != std::string::npos && s2.find_first_not_of(' ', p + 1) == std::string::npos)
|
||||
s2 = std::string(s2, 0, p + 1);
|
||||
}
|
||||
|
||||
es2->emplace_back(i->first, new ExprString(std::move(s2)));
|
||||
};
|
||||
for (; i != es.end(); ++i, --n) {
|
||||
std::visit(overloaded { trimExpr, trimString }, i->second);
|
||||
}
|
||||
|
||||
/* If this is a single string, then don't do a concatenation. */
|
||||
if (es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0].second)) {
|
||||
auto *const result = (*es2)[0].second;
|
||||
delete es2;
|
||||
return result;
|
||||
}
|
||||
return new ExprConcatStrings(pos, true, es2);
|
||||
}
|
||||
|
||||
inline PosIdx ParserState::at(const ParserLocation & loc)
|
||||
{
|
||||
return positions.add(origin, loc.first_line, loc.first_column);
|
||||
}
|
||||
|
||||
}
|
|
@ -5,9 +5,9 @@
|
|||
%defines
|
||||
/* %no-lines */
|
||||
%parse-param { void * scanner }
|
||||
%parse-param { nix::ParseData * data }
|
||||
%parse-param { nix::ParserState * state }
|
||||
%lex-param { void * scanner }
|
||||
%lex-param { nix::ParseData * data }
|
||||
%lex-param { nix::ParserState * state }
|
||||
%expect 1
|
||||
%expect-rr 1
|
||||
|
||||
|
@ -18,6 +18,7 @@
|
|||
|
||||
#include <variant>
|
||||
|
||||
#include "finally.hh"
|
||||
#include "util.hh"
|
||||
#include "users.hh"
|
||||
|
||||
|
@ -25,38 +26,26 @@
|
|||
#include "eval.hh"
|
||||
#include "eval-settings.hh"
|
||||
#include "globals.hh"
|
||||
#include "parser-state.hh"
|
||||
|
||||
#define YYLTYPE ::nix::ParserLocation
|
||||
#define YY_DECL int yylex \
|
||||
(YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParserState * state)
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct ParseData
|
||||
{
|
||||
EvalState & state;
|
||||
SymbolTable & symbols;
|
||||
Expr * result;
|
||||
SourcePath basePath;
|
||||
PosTable::Origin origin;
|
||||
std::optional<ErrorInfo> error;
|
||||
};
|
||||
|
||||
struct ParserFormals {
|
||||
std::vector<Formal> formals;
|
||||
bool ellipsis = false;
|
||||
};
|
||||
Expr * parseExprFromBuf(
|
||||
char * text,
|
||||
size_t length,
|
||||
Pos::Origin origin,
|
||||
const SourcePath & basePath,
|
||||
SymbolTable & symbols,
|
||||
PosTable & positions,
|
||||
const ref<InputAccessor> rootFS,
|
||||
const Expr::AstSymbols & astSymbols);
|
||||
|
||||
}
|
||||
|
||||
// using C a struct allows us to avoid having to define the special
|
||||
// members that using string_view here would implicitly delete.
|
||||
struct StringToken {
|
||||
const char * p;
|
||||
size_t l;
|
||||
bool hasIndentation;
|
||||
operator std::string_view() const { return {p, l}; }
|
||||
};
|
||||
|
||||
#define YY_DECL int yylex \
|
||||
(YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParseData * data)
|
||||
|
||||
#endif
|
||||
|
||||
}
|
||||
|
@ -70,240 +59,15 @@ YY_DECL;
|
|||
|
||||
using namespace nix;
|
||||
|
||||
|
||||
namespace nix {
|
||||
#define CUR_POS state->at(*yylocp)
|
||||
|
||||
|
||||
static void dupAttr(const EvalState & state, const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos)
|
||||
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error)
|
||||
{
|
||||
throw ParseError({
|
||||
.msg = hintfmt("attribute '%1%' already defined at %2%",
|
||||
showAttrPath(state.symbols, attrPath), state.positions[prevPos]),
|
||||
.errPos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
static void dupAttr(const EvalState & state, Symbol attr, const PosIdx pos, const PosIdx prevPos)
|
||||
{
|
||||
throw ParseError({
|
||||
.msg = hintfmt("attribute '%1%' already defined at %2%", state.symbols[attr], state.positions[prevPos]),
|
||||
.errPos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
static void addAttr(ExprAttrs * attrs, AttrPath && attrPath,
|
||||
Expr * e, const PosIdx pos, const nix::EvalState & state)
|
||||
{
|
||||
AttrPath::iterator i;
|
||||
// All attrpaths have at least one attr
|
||||
assert(!attrPath.empty());
|
||||
// Checking attrPath validity.
|
||||
// ===========================
|
||||
for (i = attrPath.begin(); i + 1 < attrPath.end(); i++) {
|
||||
if (i->symbol) {
|
||||
ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
|
||||
if (j != attrs->attrs.end()) {
|
||||
if (!j->second.inherited) {
|
||||
ExprAttrs * attrs2 = dynamic_cast<ExprAttrs *>(j->second.e);
|
||||
if (!attrs2) dupAttr(state, attrPath, pos, j->second.pos);
|
||||
attrs = attrs2;
|
||||
} else
|
||||
dupAttr(state, attrPath, pos, j->second.pos);
|
||||
} else {
|
||||
ExprAttrs * nested = new ExprAttrs;
|
||||
attrs->attrs[i->symbol] = ExprAttrs::AttrDef(nested, pos);
|
||||
attrs = nested;
|
||||
}
|
||||
} else {
|
||||
ExprAttrs *nested = new ExprAttrs;
|
||||
attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, nested, pos));
|
||||
attrs = nested;
|
||||
}
|
||||
}
|
||||
// Expr insertion.
|
||||
// ==========================
|
||||
if (i->symbol) {
|
||||
ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
|
||||
if (j != attrs->attrs.end()) {
|
||||
// This attr path is already defined. However, if both
|
||||
// e and the expr pointed by the attr path are two attribute sets,
|
||||
// we want to merge them.
|
||||
// Otherwise, throw an error.
|
||||
auto ae = dynamic_cast<ExprAttrs *>(e);
|
||||
auto jAttrs = dynamic_cast<ExprAttrs *>(j->second.e);
|
||||
if (jAttrs && ae) {
|
||||
for (auto & ad : ae->attrs) {
|
||||
auto j2 = jAttrs->attrs.find(ad.first);
|
||||
if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error.
|
||||
dupAttr(state, ad.first, j2->second.pos, ad.second.pos);
|
||||
jAttrs->attrs.emplace(ad.first, ad.second);
|
||||
}
|
||||
jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end());
|
||||
} else {
|
||||
dupAttr(state, attrPath, pos, j->second.pos);
|
||||
}
|
||||
} else {
|
||||
// This attr path is not defined. Let's create it.
|
||||
attrs->attrs.emplace(i->symbol, ExprAttrs::AttrDef(e, pos));
|
||||
e->setName(i->symbol);
|
||||
}
|
||||
} else {
|
||||
attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, e, pos));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static Formals * toFormals(ParseData & data, ParserFormals * formals,
|
||||
PosIdx pos = noPos, Symbol arg = {})
|
||||
{
|
||||
std::sort(formals->formals.begin(), formals->formals.end(),
|
||||
[] (const auto & a, const auto & b) {
|
||||
return std::tie(a.name, a.pos) < std::tie(b.name, b.pos);
|
||||
});
|
||||
|
||||
std::optional<std::pair<Symbol, PosIdx>> duplicate;
|
||||
for (size_t i = 0; i + 1 < formals->formals.size(); i++) {
|
||||
if (formals->formals[i].name != formals->formals[i + 1].name)
|
||||
continue;
|
||||
std::pair thisDup{formals->formals[i].name, formals->formals[i + 1].pos};
|
||||
duplicate = std::min(thisDup, duplicate.value_or(thisDup));
|
||||
}
|
||||
if (duplicate)
|
||||
throw ParseError({
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'", data.symbols[duplicate->first]),
|
||||
.errPos = data.state.positions[duplicate->second]
|
||||
});
|
||||
|
||||
Formals result;
|
||||
result.ellipsis = formals->ellipsis;
|
||||
result.formals = std::move(formals->formals);
|
||||
|
||||
if (arg && result.has(arg))
|
||||
throw ParseError({
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'", data.symbols[arg]),
|
||||
.errPos = data.state.positions[pos]
|
||||
});
|
||||
|
||||
delete formals;
|
||||
return new Formals(std::move(result));
|
||||
}
|
||||
|
||||
|
||||
static Expr * stripIndentation(const PosIdx pos, SymbolTable & symbols,
|
||||
std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> && es)
|
||||
{
|
||||
if (es.empty()) return new ExprString("");
|
||||
|
||||
/* Figure out the minimum indentation. Note that by design
|
||||
whitespace-only final lines are not taken into account. (So
|
||||
the " " in "\n ''" is ignored, but the " " in "\n foo''" is.) */
|
||||
bool atStartOfLine = true; /* = seen only whitespace in the current line */
|
||||
size_t minIndent = 1000000;
|
||||
size_t curIndent = 0;
|
||||
for (auto & [i_pos, i] : es) {
|
||||
auto * str = std::get_if<StringToken>(&i);
|
||||
if (!str || !str->hasIndentation) {
|
||||
/* Anti-quotations and escaped characters end the current start-of-line whitespace. */
|
||||
if (atStartOfLine) {
|
||||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
for (size_t j = 0; j < str->l; ++j) {
|
||||
if (atStartOfLine) {
|
||||
if (str->p[j] == ' ')
|
||||
curIndent++;
|
||||
else if (str->p[j] == '\n') {
|
||||
/* Empty line, doesn't influence minimum
|
||||
indentation. */
|
||||
curIndent = 0;
|
||||
} else {
|
||||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
}
|
||||
} else if (str->p[j] == '\n') {
|
||||
atStartOfLine = true;
|
||||
curIndent = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Strip spaces from each line. */
|
||||
auto * es2 = new std::vector<std::pair<PosIdx, Expr *>>;
|
||||
atStartOfLine = true;
|
||||
size_t curDropped = 0;
|
||||
size_t n = es.size();
|
||||
auto i = es.begin();
|
||||
const auto trimExpr = [&] (Expr * e) {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
es2->emplace_back(i->first, e);
|
||||
};
|
||||
const auto trimString = [&] (const StringToken & t) {
|
||||
std::string s2;
|
||||
for (size_t j = 0; j < t.l; ++j) {
|
||||
if (atStartOfLine) {
|
||||
if (t.p[j] == ' ') {
|
||||
if (curDropped++ >= minIndent)
|
||||
s2 += t.p[j];
|
||||
}
|
||||
else if (t.p[j] == '\n') {
|
||||
curDropped = 0;
|
||||
s2 += t.p[j];
|
||||
} else {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
s2 += t.p[j];
|
||||
}
|
||||
} else {
|
||||
s2 += t.p[j];
|
||||
if (t.p[j] == '\n') atStartOfLine = true;
|
||||
}
|
||||
}
|
||||
|
||||
/* Remove the last line if it is empty and consists only of
|
||||
spaces. */
|
||||
if (n == 1) {
|
||||
std::string::size_type p = s2.find_last_of('\n');
|
||||
if (p != std::string::npos && s2.find_first_not_of(' ', p + 1) == std::string::npos)
|
||||
s2 = std::string(s2, 0, p + 1);
|
||||
}
|
||||
|
||||
es2->emplace_back(i->first, new ExprString(std::move(s2)));
|
||||
};
|
||||
for (; i != es.end(); ++i, --n) {
|
||||
std::visit(overloaded { trimExpr, trimString }, i->second);
|
||||
}
|
||||
|
||||
/* If this is a single string, then don't do a concatenation. */
|
||||
if (es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0].second)) {
|
||||
auto *const result = (*es2)[0].second;
|
||||
delete es2;
|
||||
return result;
|
||||
}
|
||||
return new ExprConcatStrings(pos, true, es2);
|
||||
}
|
||||
|
||||
|
||||
static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data)
|
||||
{
|
||||
return data->state.positions.add(data->origin, loc.first_line, loc.first_column);
|
||||
}
|
||||
|
||||
#define CUR_POS makeCurPos(*yylocp, data)
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * error)
|
||||
{
|
||||
data->error = {
|
||||
.msg = hintfmt(error),
|
||||
.errPos = data->state.positions[makeCurPos(*loc, data)]
|
||||
};
|
||||
.errPos = state->positions[state->at(*loc)]
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
@ -314,17 +78,17 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
|
|||
nix::Expr * e;
|
||||
nix::ExprList * list;
|
||||
nix::ExprAttrs * attrs;
|
||||
nix::ParserFormals * formals;
|
||||
nix::Formals * formals;
|
||||
nix::Formal * formal;
|
||||
nix::NixInt n;
|
||||
nix::NixFloat nf;
|
||||
StringToken id; // !!! -> Symbol
|
||||
StringToken path;
|
||||
StringToken uri;
|
||||
StringToken str;
|
||||
nix::StringToken id; // !!! -> Symbol
|
||||
nix::StringToken path;
|
||||
nix::StringToken uri;
|
||||
nix::StringToken str;
|
||||
std::vector<nix::AttrName> * attrNames;
|
||||
std::vector<std::pair<nix::PosIdx, nix::Expr *>> * string_parts;
|
||||
std::vector<std::pair<nix::PosIdx, std::variant<nix::Expr *, StringToken>>> * ind_string_parts;
|
||||
std::vector<std::pair<nix::PosIdx, std::variant<nix::Expr *, nix::StringToken>>> * ind_string_parts;
|
||||
}
|
||||
|
||||
%type <e> start expr expr_function expr_if expr_op
|
||||
|
@ -340,11 +104,11 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
|
|||
%type <id> attr
|
||||
%token <id> ID
|
||||
%token <str> STR IND_STR
|
||||
%token <n> INT
|
||||
%token <nf> FLOAT
|
||||
%token <n> INT_LIT
|
||||
%token <nf> FLOAT_LIT
|
||||
%token <path> PATH HPATH SPATH PATH_END
|
||||
%token <uri> URI
|
||||
%token IF THEN ELSE ASSERT WITH LET IN REC INHERIT EQ NEQ AND OR IMPL OR_KW
|
||||
%token IF THEN ELSE ASSERT WITH LET IN_KW REC INHERIT EQ NEQ AND OR IMPL OR_KW
|
||||
%token DOLLAR_CURLY /* == ${ */
|
||||
%token IND_STRING_OPEN IND_STRING_CLOSE
|
||||
%token ELLIPSIS
|
||||
|
@ -364,34 +128,34 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
|
|||
|
||||
%%
|
||||
|
||||
start: expr { data->result = $1; };
|
||||
start: expr { state->result = $1; };
|
||||
|
||||
expr: expr_function;
|
||||
|
||||
expr_function
|
||||
: ID ':' expr_function
|
||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); }
|
||||
{ $$ = new ExprLambda(CUR_POS, state->symbols.create($1), 0, $3); }
|
||||
| '{' formals '}' ':' expr_function
|
||||
{ $$ = new ExprLambda(CUR_POS, toFormals(*data, $2), $5); }
|
||||
{ $$ = new ExprLambda(CUR_POS, state->validateFormals($2), $5); }
|
||||
| '{' formals '}' '@' ID ':' expr_function
|
||||
{
|
||||
auto arg = data->symbols.create($5);
|
||||
$$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $2, CUR_POS, arg), $7);
|
||||
auto arg = state->symbols.create($5);
|
||||
$$ = new ExprLambda(CUR_POS, arg, state->validateFormals($2, CUR_POS, arg), $7);
|
||||
}
|
||||
| ID '@' '{' formals '}' ':' expr_function
|
||||
{
|
||||
auto arg = data->symbols.create($1);
|
||||
$$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $4, CUR_POS, arg), $7);
|
||||
auto arg = state->symbols.create($1);
|
||||
$$ = new ExprLambda(CUR_POS, arg, state->validateFormals($4, CUR_POS, arg), $7);
|
||||
}
|
||||
| ASSERT expr ';' expr_function
|
||||
{ $$ = new ExprAssert(CUR_POS, $2, $4); }
|
||||
| WITH expr ';' expr_function
|
||||
{ $$ = new ExprWith(CUR_POS, $2, $4); }
|
||||
| LET binds IN expr_function
|
||||
| LET binds IN_KW expr_function
|
||||
{ if (!$2->dynamicAttrs.empty())
|
||||
throw ParseError({
|
||||
.msg = hintfmt("dynamic attributes not allowed in let"),
|
||||
.errPos = data->state.positions[CUR_POS]
|
||||
.errPos = state->positions[CUR_POS]
|
||||
});
|
||||
$$ = new ExprLet($2, $4);
|
||||
}
|
||||
|
@ -405,24 +169,24 @@ expr_if
|
|||
|
||||
expr_op
|
||||
: '!' expr_op %prec NOT { $$ = new ExprOpNot($2); }
|
||||
| '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {new ExprInt(0), $2}); }
|
||||
| '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(state->s.sub), {new ExprInt(0), $2}); }
|
||||
| expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); }
|
||||
| expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); }
|
||||
| expr_op '<' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); }
|
||||
| expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); }
|
||||
| expr_op '>' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); }
|
||||
| expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); }
|
||||
| expr_op AND expr_op { $$ = new ExprOpAnd(makeCurPos(@2, data), $1, $3); }
|
||||
| expr_op OR expr_op { $$ = new ExprOpOr(makeCurPos(@2, data), $1, $3); }
|
||||
| expr_op IMPL expr_op { $$ = new ExprOpImpl(makeCurPos(@2, data), $1, $3); }
|
||||
| expr_op UPDATE expr_op { $$ = new ExprOpUpdate(makeCurPos(@2, data), $1, $3); }
|
||||
| expr_op '<' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$1, $3}); }
|
||||
| expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$3, $1})); }
|
||||
| expr_op '>' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$3, $1}); }
|
||||
| expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$1, $3})); }
|
||||
| expr_op AND expr_op { $$ = new ExprOpAnd(state->at(@2), $1, $3); }
|
||||
| expr_op OR expr_op { $$ = new ExprOpOr(state->at(@2), $1, $3); }
|
||||
| expr_op IMPL expr_op { $$ = new ExprOpImpl(state->at(@2), $1, $3); }
|
||||
| expr_op UPDATE expr_op { $$ = new ExprOpUpdate(state->at(@2), $1, $3); }
|
||||
| expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, std::move(*$3)); delete $3; }
|
||||
| expr_op '+' expr_op
|
||||
{ $$ = new ExprConcatStrings(makeCurPos(@2, data), false, new std::vector<std::pair<PosIdx, Expr *> >({{makeCurPos(@1, data), $1}, {makeCurPos(@3, data), $3}})); }
|
||||
| expr_op '-' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__sub")), {$1, $3}); }
|
||||
| expr_op '*' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__mul")), {$1, $3}); }
|
||||
| expr_op '/' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__div")), {$1, $3}); }
|
||||
| expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(makeCurPos(@2, data), $1, $3); }
|
||||
{ $$ = new ExprConcatStrings(state->at(@2), false, new std::vector<std::pair<PosIdx, Expr *> >({{state->at(@1), $1}, {state->at(@3), $3}})); }
|
||||
| expr_op '-' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.sub), {$1, $3}); }
|
||||
| expr_op '*' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.mul), {$1, $3}); }
|
||||
| expr_op '/' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.div), {$1, $3}); }
|
||||
| expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(state->at(@2), $1, $3); }
|
||||
| expr_app
|
||||
;
|
||||
|
||||
|
@ -445,7 +209,7 @@ expr_select
|
|||
| /* Backwards compatibility: because Nixpkgs has a rarely used
|
||||
function named ‘or’, allow stuff like ‘map or [...]’. */
|
||||
expr_simple OR_KW
|
||||
{ $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, data->symbols.create("or"))}); }
|
||||
{ $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, state->s.or_)}); }
|
||||
| expr_simple
|
||||
;
|
||||
|
||||
|
@ -455,25 +219,25 @@ expr_simple
|
|||
if ($1.l == s.size() && strncmp($1.p, s.data(), s.size()) == 0)
|
||||
$$ = new ExprPos(CUR_POS);
|
||||
else
|
||||
$$ = new ExprVar(CUR_POS, data->symbols.create($1));
|
||||
$$ = new ExprVar(CUR_POS, state->symbols.create($1));
|
||||
}
|
||||
| INT { $$ = new ExprInt($1); }
|
||||
| FLOAT { $$ = new ExprFloat($1); }
|
||||
| INT_LIT { $$ = new ExprInt($1); }
|
||||
| FLOAT_LIT { $$ = new ExprFloat($1); }
|
||||
| '"' string_parts '"' { $$ = $2; }
|
||||
| IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE {
|
||||
$$ = stripIndentation(CUR_POS, data->symbols, std::move(*$2));
|
||||
$$ = state->stripIndentation(CUR_POS, std::move(*$2));
|
||||
delete $2;
|
||||
}
|
||||
| path_start PATH_END
|
||||
| path_start string_parts_interpolated PATH_END {
|
||||
$2->insert($2->begin(), {makeCurPos(@1, data), $1});
|
||||
$2->insert($2->begin(), {state->at(@1), $1});
|
||||
$$ = new ExprConcatStrings(CUR_POS, false, $2);
|
||||
}
|
||||
| SPATH {
|
||||
std::string path($1.p + 1, $1.l - 2);
|
||||
$$ = new ExprCall(CUR_POS,
|
||||
new ExprVar(data->symbols.create("__findFile")),
|
||||
{new ExprVar(data->symbols.create("__nixPath")),
|
||||
new ExprVar(state->s.findFile),
|
||||
{new ExprVar(state->s.nixPath),
|
||||
new ExprString(std::move(path))});
|
||||
}
|
||||
| URI {
|
||||
|
@ -481,7 +245,7 @@ expr_simple
|
|||
if (noURLLiterals)
|
||||
throw ParseError({
|
||||
.msg = hintfmt("URL literals are disabled"),
|
||||
.errPos = data->state.positions[CUR_POS]
|
||||
.errPos = state->positions[CUR_POS]
|
||||
});
|
||||
$$ = new ExprString(std::string($1));
|
||||
}
|
||||
|
@ -489,7 +253,7 @@ expr_simple
|
|||
/* Let expressions `let {..., body = ...}' are just desugared
|
||||
into `(rec {..., body = ...}).body'. */
|
||||
| LET '{' binds '}'
|
||||
{ $3->recursive = true; $$ = new ExprSelect(noPos, $3, data->symbols.create("body")); }
|
||||
{ $3->recursive = true; $$ = new ExprSelect(noPos, $3, state->s.body); }
|
||||
| REC '{' binds '}'
|
||||
{ $3->recursive = true; $$ = $3; }
|
||||
| '{' binds '}'
|
||||
|
@ -505,23 +269,23 @@ string_parts
|
|||
|
||||
string_parts_interpolated
|
||||
: string_parts_interpolated STR
|
||||
{ $$ = $1; $1->emplace_back(makeCurPos(@2, data), new ExprString(std::string($2))); }
|
||||
| string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
|
||||
| DOLLAR_CURLY expr '}' { $$ = new std::vector<std::pair<PosIdx, Expr *>>; $$->emplace_back(makeCurPos(@1, data), $2); }
|
||||
{ $$ = $1; $1->emplace_back(state->at(@2), new ExprString(std::string($2))); }
|
||||
| string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->at(@2), $3); }
|
||||
| DOLLAR_CURLY expr '}' { $$ = new std::vector<std::pair<PosIdx, Expr *>>; $$->emplace_back(state->at(@1), $2); }
|
||||
| STR DOLLAR_CURLY expr '}' {
|
||||
$$ = new std::vector<std::pair<PosIdx, Expr *>>;
|
||||
$$->emplace_back(makeCurPos(@1, data), new ExprString(std::string($1)));
|
||||
$$->emplace_back(makeCurPos(@2, data), $3);
|
||||
$$->emplace_back(state->at(@1), new ExprString(std::string($1)));
|
||||
$$->emplace_back(state->at(@2), $3);
|
||||
}
|
||||
;
|
||||
|
||||
path_start
|
||||
: PATH {
|
||||
Path path(absPath({$1.p, $1.l}, data->basePath.path.abs()));
|
||||
Path path(absPath({$1.p, $1.l}, state->basePath.path.abs()));
|
||||
/* add back in the trailing '/' to the first segment */
|
||||
if ($1.p[$1.l-1] == '/' && $1.l > 1)
|
||||
path += "/";
|
||||
$$ = new ExprPath(ref<InputAccessor>(data->state.rootFS), std::move(path));
|
||||
$$ = new ExprPath(ref<InputAccessor>(state->rootFS), std::move(path));
|
||||
}
|
||||
| HPATH {
|
||||
if (evalSettings.pureEval) {
|
||||
|
@ -531,24 +295,24 @@ path_start
|
|||
);
|
||||
}
|
||||
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
|
||||
$$ = new ExprPath(ref<InputAccessor>(data->state.rootFS), std::move(path));
|
||||
$$ = new ExprPath(ref<InputAccessor>(state->rootFS), std::move(path));
|
||||
}
|
||||
;
|
||||
|
||||
ind_string_parts
|
||||
: ind_string_parts IND_STR { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $2); }
|
||||
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
|
||||
: ind_string_parts IND_STR { $$ = $1; $1->emplace_back(state->at(@2), $2); }
|
||||
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->at(@2), $3); }
|
||||
| { $$ = new std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>>; }
|
||||
;
|
||||
|
||||
binds
|
||||
: binds attrpath '=' expr ';' { $$ = $1; addAttr($$, std::move(*$2), $4, makeCurPos(@2, data), data->state); delete $2; }
|
||||
: binds attrpath '=' expr ';' { $$ = $1; state->addAttr($$, std::move(*$2), $4, state->at(@2)); delete $2; }
|
||||
| binds INHERIT attrs ';'
|
||||
{ $$ = $1;
|
||||
for (auto & i : *$3) {
|
||||
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
||||
dupAttr(data->state, i.symbol, makeCurPos(@3, data), $$->attrs[i.symbol].pos);
|
||||
auto pos = makeCurPos(@3, data);
|
||||
state->dupAttr(i.symbol, state->at(@3), $$->attrs[i.symbol].pos);
|
||||
auto pos = state->at(@3);
|
||||
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true));
|
||||
}
|
||||
delete $3;
|
||||
|
@ -558,48 +322,48 @@ binds
|
|||
/* !!! Should ensure sharing of the expression in $4. */
|
||||
for (auto & i : *$6) {
|
||||
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
||||
dupAttr(data->state, i.symbol, makeCurPos(@6, data), $$->attrs[i.symbol].pos);
|
||||
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), makeCurPos(@6, data)));
|
||||
state->dupAttr(i.symbol, state->at(@6), $$->attrs[i.symbol].pos);
|
||||
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), state->at(@6)));
|
||||
}
|
||||
delete $6;
|
||||
}
|
||||
| { $$ = new ExprAttrs(makeCurPos(@0, data)); }
|
||||
| { $$ = new ExprAttrs(state->at(@0)); }
|
||||
;
|
||||
|
||||
attrs
|
||||
: attrs attr { $$ = $1; $1->push_back(AttrName(data->symbols.create($2))); }
|
||||
: attrs attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($2))); }
|
||||
| attrs string_attr
|
||||
{ $$ = $1;
|
||||
ExprString * str = dynamic_cast<ExprString *>($2);
|
||||
if (str) {
|
||||
$$->push_back(AttrName(data->symbols.create(str->s)));
|
||||
$$->push_back(AttrName(state->symbols.create(str->s)));
|
||||
delete str;
|
||||
} else
|
||||
throw ParseError({
|
||||
.msg = hintfmt("dynamic attributes not allowed in inherit"),
|
||||
.errPos = data->state.positions[makeCurPos(@2, data)]
|
||||
.errPos = state->positions[state->at(@2)]
|
||||
});
|
||||
}
|
||||
| { $$ = new AttrPath; }
|
||||
;
|
||||
|
||||
attrpath
|
||||
: attrpath '.' attr { $$ = $1; $1->push_back(AttrName(data->symbols.create($3))); }
|
||||
: attrpath '.' attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($3))); }
|
||||
| attrpath '.' string_attr
|
||||
{ $$ = $1;
|
||||
ExprString * str = dynamic_cast<ExprString *>($3);
|
||||
if (str) {
|
||||
$$->push_back(AttrName(data->symbols.create(str->s)));
|
||||
$$->push_back(AttrName(state->symbols.create(str->s)));
|
||||
delete str;
|
||||
} else
|
||||
$$->push_back(AttrName($3));
|
||||
}
|
||||
| attr { $$ = new std::vector<AttrName>; $$->push_back(AttrName(data->symbols.create($1))); }
|
||||
| attr { $$ = new std::vector<AttrName>; $$->push_back(AttrName(state->symbols.create($1))); }
|
||||
| string_attr
|
||||
{ $$ = new std::vector<AttrName>;
|
||||
ExprString *str = dynamic_cast<ExprString *>($1);
|
||||
if (str) {
|
||||
$$->push_back(AttrName(data->symbols.create(str->s)));
|
||||
$$->push_back(AttrName(state->symbols.create(str->s)));
|
||||
delete str;
|
||||
} else
|
||||
$$->push_back(AttrName($1));
|
||||
|
@ -625,226 +389,52 @@ formals
|
|||
: formal ',' formals
|
||||
{ $$ = $3; $$->formals.emplace_back(*$1); delete $1; }
|
||||
| formal
|
||||
{ $$ = new ParserFormals; $$->formals.emplace_back(*$1); $$->ellipsis = false; delete $1; }
|
||||
{ $$ = new Formals; $$->formals.emplace_back(*$1); $$->ellipsis = false; delete $1; }
|
||||
|
|
||||
{ $$ = new ParserFormals; $$->ellipsis = false; }
|
||||
{ $$ = new Formals; $$->ellipsis = false; }
|
||||
| ELLIPSIS
|
||||
{ $$ = new ParserFormals; $$->ellipsis = true; }
|
||||
{ $$ = new Formals; $$->ellipsis = true; }
|
||||
;
|
||||
|
||||
formal
|
||||
: ID { $$ = new Formal{CUR_POS, data->symbols.create($1), 0}; }
|
||||
| ID '?' expr { $$ = new Formal{CUR_POS, data->symbols.create($1), $3}; }
|
||||
: ID { $$ = new Formal{CUR_POS, state->symbols.create($1), 0}; }
|
||||
| ID '?' expr { $$ = new Formal{CUR_POS, state->symbols.create($1), $3}; }
|
||||
;
|
||||
|
||||
%%
|
||||
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
#include <fcntl.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include "eval.hh"
|
||||
#include "filetransfer.hh"
|
||||
#include "tarball.hh"
|
||||
#include "store-api.hh"
|
||||
#include "flake/flake.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
#include "memory-input-accessor.hh"
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
unsigned long Expr::nrExprs = 0;
|
||||
|
||||
Expr * EvalState::parse(
|
||||
Expr * parseExprFromBuf(
|
||||
char * text,
|
||||
size_t length,
|
||||
Pos::Origin origin,
|
||||
const SourcePath & basePath,
|
||||
std::shared_ptr<StaticEnv> & staticEnv)
|
||||
SymbolTable & symbols,
|
||||
PosTable & positions,
|
||||
const ref<InputAccessor> rootFS,
|
||||
const Expr::AstSymbols & astSymbols)
|
||||
{
|
||||
yyscan_t scanner;
|
||||
ParseData data {
|
||||
.state = *this,
|
||||
ParserState state {
|
||||
.symbols = symbols,
|
||||
.positions = positions,
|
||||
.basePath = basePath,
|
||||
.origin = {origin},
|
||||
.rootFS = rootFS,
|
||||
.s = astSymbols,
|
||||
};
|
||||
|
||||
yylex_init(&scanner);
|
||||
Finally _destroy([&] { yylex_destroy(scanner); });
|
||||
|
||||
yy_scan_buffer(text, length, scanner);
|
||||
int res = yyparse(scanner, &data);
|
||||
yylex_destroy(scanner);
|
||||
yyparse(scanner, &state);
|
||||
|
||||
if (res) throw ParseError(data.error.value());
|
||||
|
||||
data.result->bindVars(*this, staticEnv);
|
||||
|
||||
return data.result;
|
||||
}
|
||||
|
||||
|
||||
SourcePath resolveExprPath(SourcePath path)
|
||||
{
|
||||
unsigned int followCount = 0, maxFollow = 1024;
|
||||
|
||||
/* If `path' is a symlink, follow it. This is so that relative
|
||||
path references work. */
|
||||
while (!path.path.isRoot()) {
|
||||
// Basic cycle/depth limit to avoid infinite loops.
|
||||
if (++followCount >= maxFollow)
|
||||
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
||||
auto p = path.parent().resolveSymlinks() + path.baseName();
|
||||
if (p.lstat().type != InputAccessor::tSymlink) break;
|
||||
path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))};
|
||||
}
|
||||
|
||||
/* If `path' refers to a directory, append `/default.nix'. */
|
||||
if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory)
|
||||
return path + "default.nix";
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromFile(const SourcePath & path)
|
||||
{
|
||||
return parseExprFromFile(path, staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto buffer = path.resolveSymlinks().readFile();
|
||||
// readFile hopefully have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto s = make_ref<std::string>(std::move(s_));
|
||||
s->append("\0\0", 2);
|
||||
return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath)
|
||||
{
|
||||
return parseExprFromString(std::move(s), basePath, staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseStdin()
|
||||
{
|
||||
//Activity act(*logger, lvlTalkative, "parsing standard input");
|
||||
auto buffer = drainFD(0);
|
||||
// drainFD should have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
auto s = make_ref<std::string>(std::move(buffer));
|
||||
return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath(CanonPath::fromCwd()), staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
SourcePath EvalState::findFile(const std::string_view path)
|
||||
{
|
||||
return findFile(searchPath, path);
|
||||
}
|
||||
|
||||
|
||||
SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos)
|
||||
{
|
||||
for (auto & i : searchPath.elements) {
|
||||
auto suffixOpt = i.prefix.suffixIfPotentialMatch(path);
|
||||
|
||||
if (!suffixOpt) continue;
|
||||
auto suffix = *suffixOpt;
|
||||
|
||||
auto rOpt = resolveSearchPathPath(i.path);
|
||||
if (!rOpt) continue;
|
||||
auto r = *rOpt;
|
||||
|
||||
Path res = suffix == "" ? r : concatStrings(r, "/", suffix);
|
||||
if (pathExists(res)) return rootPath(CanonPath(canonPath(res)));
|
||||
}
|
||||
|
||||
if (hasPrefix(path, "nix/"))
|
||||
return {corepkgsFS, CanonPath(path.substr(3))};
|
||||
|
||||
debugThrow(ThrownError({
|
||||
.msg = hintfmt(evalSettings.pureEval
|
||||
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
|
||||
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
|
||||
path),
|
||||
.errPos = positions[pos]
|
||||
}), 0, 0);
|
||||
}
|
||||
|
||||
|
||||
std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl)
|
||||
{
|
||||
auto & value = value0.s;
|
||||
auto i = searchPathResolved.find(value);
|
||||
if (i != searchPathResolved.end()) return i->second;
|
||||
|
||||
std::optional<std::string> res;
|
||||
|
||||
if (EvalSettings::isPseudoUrl(value)) {
|
||||
try {
|
||||
auto storePath = fetchers::downloadTarball(
|
||||
store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath;
|
||||
res = { store->toRealPath(storePath) };
|
||||
} catch (FileTransferError & e) {
|
||||
logWarning({
|
||||
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
else if (hasPrefix(value, "flake:")) {
|
||||
experimentalFeatureSettings.require(Xp::Flakes);
|
||||
auto flakeRef = parseFlakeRef(value.substr(6), {}, true, false);
|
||||
debug("fetching flake search path element '%s''", value);
|
||||
auto storePath = flakeRef.resolve(store).fetchTree(store).first;
|
||||
res = { store->toRealPath(storePath) };
|
||||
}
|
||||
|
||||
else {
|
||||
auto path = absPath(value);
|
||||
|
||||
/* Allow access to paths in the search path. */
|
||||
if (initAccessControl) {
|
||||
allowPath(path);
|
||||
if (store->isInStore(path)) {
|
||||
try {
|
||||
StorePathSet closure;
|
||||
store->computeFSClosure(store->toStorePath(path).first, closure);
|
||||
for (auto & p : closure)
|
||||
allowPath(p);
|
||||
} catch (InvalidPath &) { }
|
||||
}
|
||||
}
|
||||
|
||||
if (pathExists(path))
|
||||
res = { path };
|
||||
else {
|
||||
logWarning({
|
||||
.msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", value)
|
||||
});
|
||||
res = std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
if (res)
|
||||
debug("resolved search path element '%s' to '%s'", value, *res);
|
||||
else
|
||||
debug("failed to resolve search path element '%s'", value);
|
||||
|
||||
searchPathResolved.emplace(value, res);
|
||||
return res;
|
||||
return state.result;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
#include "value-to-xml.hh"
|
||||
#include "primops.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
#include "fetch-to-store.hh"
|
||||
|
||||
#include <boost/container/small_vector.hpp>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
@ -84,14 +85,14 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
|||
/* Build/substitute the context. */
|
||||
std::vector<DerivedPath> buildReqs;
|
||||
for (auto & d : drvs) buildReqs.emplace_back(DerivedPath { d });
|
||||
store->buildPaths(buildReqs);
|
||||
buildStore->buildPaths(buildReqs, bmNormal, store);
|
||||
|
||||
StorePathSet outputsToCopyAndAllow;
|
||||
|
||||
for (auto & drv : drvs) {
|
||||
auto outputs = resolveDerivedPath(*store, drv);
|
||||
auto outputs = resolveDerivedPath(*buildStore, drv, &*store);
|
||||
for (auto & [outputName, outputPath] : outputs) {
|
||||
/* Add the output of this derivations to the allowed
|
||||
paths. */
|
||||
allowPath(store->toRealPath(outputPath));
|
||||
outputsToCopyAndAllow.insert(outputPath);
|
||||
|
||||
/* Get all the output paths corresponding to the placeholders we had */
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)) {
|
||||
|
@ -101,12 +102,19 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
|||
.drvPath = drv.drvPath,
|
||||
.output = outputName,
|
||||
}).render(),
|
||||
store->printStorePath(outputPath)
|
||||
buildStore->printStorePath(outputPath)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (store != buildStore) copyClosure(*buildStore, *store, outputsToCopyAndAllow);
|
||||
for (auto & outputPath : outputsToCopyAndAllow) {
|
||||
/* Add the output of this derivations to the allowed
|
||||
paths. */
|
||||
allowPath(outputPath);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -214,7 +222,7 @@ static void import(EvalState & state, const PosIdx pos, Value & vPath, Value * v
|
|||
Env * env = &state.allocEnv(vScope->attrs->size());
|
||||
env->up = &state.baseEnv;
|
||||
|
||||
auto staticEnv = std::make_shared<StaticEnv>(false, state.staticBaseEnv.get(), vScope->attrs->size());
|
||||
auto staticEnv = std::make_shared<StaticEnv>(nullptr, state.staticBaseEnv.get(), vScope->attrs->size());
|
||||
|
||||
unsigned int displ = 0;
|
||||
for (auto & attr : *vScope->attrs) {
|
||||
|
@ -438,9 +446,7 @@ static RegisterPrimOp primop_isNull({
|
|||
.doc = R"(
|
||||
Return `true` if *e* evaluates to `null`, and `false` otherwise.
|
||||
|
||||
> **Warning**
|
||||
>
|
||||
> This function is *deprecated*; just write `e == null` instead.
|
||||
This is equivalent to `e == null`.
|
||||
)",
|
||||
.fun = prim_isNull,
|
||||
});
|
||||
|
@ -586,7 +592,7 @@ struct CompareValues
|
|||
case nFloat:
|
||||
return v1->fpoint < v2->fpoint;
|
||||
case nString:
|
||||
return v1->string_view().compare(v2->string_view()) < 0;
|
||||
return strcmp(v1->c_str(), v2->c_str()) < 0;
|
||||
case nPath:
|
||||
// Note: we don't take the accessor into account
|
||||
// since it's not obvious how to compare them in a
|
||||
|
@ -991,7 +997,7 @@ static void prim_trace(EvalState & state, const PosIdx pos, Value * * args, Valu
|
|||
if (args[0]->type() == nString)
|
||||
printError("trace: %1%", args[0]->string_view());
|
||||
else
|
||||
printError("trace: %1%", printValue(state, *args[0]));
|
||||
printError("trace: %1%", ValuePrinter(state, *args[0]));
|
||||
state.forceValue(*args[1], pos);
|
||||
v = *args[1];
|
||||
}
|
||||
|
@ -1872,7 +1878,7 @@ static RegisterPrimOp primop_outputOf({
|
|||
For instance,
|
||||
```nix
|
||||
builtins.outputOf
|
||||
(builtins.outputOf myDrv "out)
|
||||
(builtins.outputOf myDrv "out")
|
||||
"out"
|
||||
```
|
||||
will return a placeholder for the output of the output of `myDrv`.
|
||||
|
@ -2072,8 +2078,14 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val
|
|||
}
|
||||
|
||||
auto storePath = settings.readOnlyMode
|
||||
? state.store->computeStorePathForText(name, contents, refs)
|
||||
: state.store->addTextToStore(name, contents, refs, state.repair);
|
||||
? state.store->makeFixedOutputPathFromCA(name, TextInfo {
|
||||
.hash = hashString(HashAlgorithm::SHA256, contents),
|
||||
.references = std::move(refs),
|
||||
})
|
||||
: ({
|
||||
StringSource s { contents };
|
||||
state.store->addToStoreFromDump(s, name, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, state.repair);
|
||||
});
|
||||
|
||||
/* Note: we don't need to add `context' to the context of the
|
||||
result, since `storePath' itself has references to the paths
|
||||
|
@ -2229,7 +2241,7 @@ static void addPath(
|
|||
});
|
||||
|
||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||
auto dstPath = path.fetchToStore(state.store, name, method, filter.get(), state.repair);
|
||||
auto dstPath = fetchToStore(*state.store, path.resolveSymlinks(), name, method, filter.get(), state.repair);
|
||||
if (expectedHash && expectedStorePath != dstPath)
|
||||
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
|
||||
state.allowAndSetStorePathString(dstPath, v);
|
||||
|
@ -2401,7 +2413,7 @@ static void prim_attrNames(EvalState & state, const PosIdx pos, Value * * args,
|
|||
(v.listElems()[n++] = state.allocValue())->mkString(state.symbols[i.name]);
|
||||
|
||||
std::sort(v.listElems(), v.listElems() + n,
|
||||
[](Value * v1, Value * v2) { return v1->string_view().compare(v2->string_view()) < 0; });
|
||||
[](Value * v1, Value * v2) { return strcmp(v1->c_str(), v2->c_str()) < 0; });
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_attrNames({
|
||||
|
@ -3700,9 +3712,6 @@ static RegisterPrimOp primop_toString({
|
|||
static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
int start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring");
|
||||
int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring");
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[2], context, "while evaluating the third argument (the string) passed to builtins.substring");
|
||||
|
||||
if (start < 0)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
|
@ -3710,6 +3719,22 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args,
|
|||
.errPos = state.positions[pos]
|
||||
}));
|
||||
|
||||
|
||||
int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring");
|
||||
|
||||
// Special-case on empty substring to avoid O(n) strlen
|
||||
// This allows for the use of empty substrings to efficently capture string context
|
||||
if (len == 0) {
|
||||
state.forceValue(*args[2], pos);
|
||||
if (args[2]->type() == nString) {
|
||||
v.mkString("", args[2]->context());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[2], context, "while evaluating the third argument (the string) passed to builtins.substring");
|
||||
|
||||
v.mkString((unsigned int) start >= s->size() ? "" : s->substr(start, len), context);
|
||||
}
|
||||
|
||||
|
@ -4383,13 +4408,16 @@ void EvalState::createBaseEnv()
|
|||
.impureOnly = true,
|
||||
});
|
||||
|
||||
if (!evalSettings.pureEval) {
|
||||
v.mkString(settings.thisSystem.get());
|
||||
}
|
||||
if (!evalSettings.pureEval)
|
||||
v.mkString(evalSettings.getCurrentSystem());
|
||||
addConstant("__currentSystem", v, {
|
||||
.type = nString,
|
||||
.doc = R"(
|
||||
The value of the [`system` configuration option](@docroot@/command-ref/conf-file.md#conf-system).
|
||||
The value of the
|
||||
[`eval-system`](@docroot@/command-ref/conf-file.md#conf-eval-system)
|
||||
or else
|
||||
[`system`](@docroot@/command-ref/conf-file.md#conf-system)
|
||||
configuration option.
|
||||
|
||||
It can be used to set the `system` attribute for [`builtins.derivation`](@docroot@/language/derivations.md) such that the resulting derivation can be built on the same system that evaluates the Nix expression:
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#include "libfetchers/attrs.hh"
|
||||
#include "primops.hh"
|
||||
#include "eval-inline.hh"
|
||||
#include "eval-settings.hh"
|
||||
|
@ -25,7 +26,7 @@ void emitTreeAttrs(
|
|||
{
|
||||
assert(input.isLocked());
|
||||
|
||||
auto attrs = state.buildBindings(10);
|
||||
auto attrs = state.buildBindings(100);
|
||||
|
||||
state.mkStorePathString(storePath, attrs.alloc(state.sOutPath));
|
||||
|
||||
|
@ -135,6 +136,10 @@ static void fetchTree(
|
|||
state.symbols[attr.name], showType(*attr.value)));
|
||||
}
|
||||
|
||||
if (params.isFetchGit && !attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) {
|
||||
attrs.emplace("exportIgnore", Explicit<bool>{true});
|
||||
}
|
||||
|
||||
if (!params.allowNameArgument)
|
||||
if (auto nameIter = attrs.find("name"); nameIter != attrs.end())
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
|
@ -152,6 +157,9 @@ static void fetchTree(
|
|||
fetchers::Attrs attrs;
|
||||
attrs.emplace("type", "git");
|
||||
attrs.emplace("url", fixGitURL(url));
|
||||
if (!attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) {
|
||||
attrs.emplace("exportIgnore", Explicit<bool>{true});
|
||||
}
|
||||
input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||
} else {
|
||||
if (!experimentalFeatureSettings.isEnabled(Xp::Flakes))
|
||||
|
@ -166,8 +174,12 @@ static void fetchTree(
|
|||
if (!evalSettings.pureEval && !input.isDirect() && experimentalFeatureSettings.isEnabled(Xp::Flakes))
|
||||
input = lookupInRegistries(state.store, input).first;
|
||||
|
||||
if (evalSettings.pureEval && !input.isLocked())
|
||||
state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos]));
|
||||
if (evalSettings.pureEval && !input.isLocked()) {
|
||||
if (params.isFetchGit)
|
||||
state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchGit' requires a locked input, at %s", state.positions[pos]));
|
||||
else
|
||||
state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos]));
|
||||
}
|
||||
|
||||
state.checkURI(input.toURLString());
|
||||
|
||||
|
@ -593,10 +605,16 @@ static RegisterPrimOp primop_fetchGit({
|
|||
|
||||
A Boolean parameter that specifies whether submodules should be checked out.
|
||||
|
||||
- `exportIgnore` (default: `true`)
|
||||
|
||||
A Boolean parameter that specifies whether `export-ignore` from `.gitattributes` should be applied.
|
||||
This approximates part of the `git archive` behavior.
|
||||
|
||||
Enabling this option is not recommended because it is unknown whether the Git developers commit to the reproducibility of `export-ignore` in newer Git versions.
|
||||
|
||||
- `shallow` (default: `false`)
|
||||
|
||||
A Boolean parameter that specifies whether fetching from a shallow remote repository is allowed.
|
||||
This still performs a full clone of what is available on the remote.
|
||||
Make a shallow clone when fetching the Git tree.
|
||||
|
||||
- `allRefs`
|
||||
|
||||
|
|
101
src/libexpr/print-ambiguous.cc
Normal file
101
src/libexpr/print-ambiguous.cc
Normal file
|
@ -0,0 +1,101 @@
|
|||
#include "print-ambiguous.hh"
|
||||
#include "print.hh"
|
||||
#include "signals.hh"
|
||||
#include "eval.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
// See: https://github.com/NixOS/nix/issues/9730
|
||||
void printAmbiguous(
|
||||
Value &v,
|
||||
const SymbolTable &symbols,
|
||||
std::ostream &str,
|
||||
std::set<const void *> *seen,
|
||||
int depth)
|
||||
{
|
||||
checkInterrupt();
|
||||
|
||||
if (depth <= 0) {
|
||||
str << "«too deep»";
|
||||
return;
|
||||
}
|
||||
switch (v.type()) {
|
||||
case nInt:
|
||||
str << v.integer;
|
||||
break;
|
||||
case nBool:
|
||||
printLiteralBool(str, v.boolean);
|
||||
break;
|
||||
case nString:
|
||||
printLiteralString(str, v.string_view());
|
||||
break;
|
||||
case nPath:
|
||||
str << v.path().to_string(); // !!! escaping?
|
||||
break;
|
||||
case nNull:
|
||||
str << "null";
|
||||
break;
|
||||
case nAttrs: {
|
||||
if (seen && !v.attrs->empty() && !seen->insert(v.attrs).second)
|
||||
str << "«repeated»";
|
||||
else {
|
||||
str << "{ ";
|
||||
for (auto & i : v.attrs->lexicographicOrder(symbols)) {
|
||||
str << symbols[i->name] << " = ";
|
||||
printAmbiguous(*i->value, symbols, str, seen, depth - 1);
|
||||
str << "; ";
|
||||
}
|
||||
str << "}";
|
||||
}
|
||||
break;
|
||||
}
|
||||
case nList:
|
||||
if (seen && v.listSize() && !seen->insert(v.listElems()).second)
|
||||
str << "«repeated»";
|
||||
else {
|
||||
str << "[ ";
|
||||
for (auto v2 : v.listItems()) {
|
||||
if (v2)
|
||||
printAmbiguous(*v2, symbols, str, seen, depth - 1);
|
||||
else
|
||||
str << "(nullptr)";
|
||||
str << " ";
|
||||
}
|
||||
str << "]";
|
||||
}
|
||||
break;
|
||||
case nThunk:
|
||||
if (!v.isBlackhole()) {
|
||||
str << "<CODE>";
|
||||
} else {
|
||||
// Although we know for sure that it's going to be an infinite recursion
|
||||
// when this value is accessed _in the current context_, it's likely
|
||||
// that the user will misinterpret a simpler «infinite recursion» output
|
||||
// as a definitive statement about the value, while in fact it may be
|
||||
// a valid value after `builtins.trace` and perhaps some other steps
|
||||
// have completed.
|
||||
str << "«potential infinite recursion»";
|
||||
}
|
||||
break;
|
||||
case nFunction:
|
||||
if (v.isLambda()) {
|
||||
str << "<LAMBDA>";
|
||||
} else if (v.isPrimOp()) {
|
||||
str << "<PRIMOP>";
|
||||
} else if (v.isPrimOpApp()) {
|
||||
str << "<PRIMOP-APP>";
|
||||
}
|
||||
break;
|
||||
case nExternal:
|
||||
str << *v.external;
|
||||
break;
|
||||
case nFloat:
|
||||
str << v.fpoint;
|
||||
break;
|
||||
default:
|
||||
printError("Nix evaluator internal error: printAmbiguous: invalid value type");
|
||||
abort();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
24
src/libexpr/print-ambiguous.hh
Normal file
24
src/libexpr/print-ambiguous.hh
Normal file
|
@ -0,0 +1,24 @@
|
|||
#pragma once
|
||||
|
||||
#include "value.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Print a value in the deprecated format used by `nix-instantiate --eval` and
|
||||
* `nix-env` (for manifests).
|
||||
*
|
||||
* This output can't be changed because it's part of the `nix-instantiate` API,
|
||||
* but it produces ambiguous output; unevaluated thunks and lambdas (and a few
|
||||
* other types) are printed as Nix path syntax like `<CODE>`.
|
||||
*
|
||||
* See: https://github.com/NixOS/nix/issues/9730
|
||||
*/
|
||||
void printAmbiguous(
|
||||
Value &v,
|
||||
const SymbolTable &symbols,
|
||||
std::ostream &str,
|
||||
std::set<const void *> *seen,
|
||||
int depth);
|
||||
|
||||
}
|
70
src/libexpr/print-options.hh
Normal file
70
src/libexpr/print-options.hh
Normal file
|
@ -0,0 +1,70 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file
|
||||
* @brief Options for printing Nix values.
|
||||
*/
|
||||
|
||||
#include <limits>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Options for printing Nix values.
|
||||
*/
|
||||
struct PrintOptions
|
||||
{
|
||||
/**
|
||||
* If true, output ANSI color sequences.
|
||||
*/
|
||||
bool ansiColors = false;
|
||||
/**
|
||||
* If true, force values.
|
||||
*/
|
||||
bool force = false;
|
||||
/**
|
||||
* If true and `force` is set, print derivations as
|
||||
* `«derivation /nix/store/...»` instead of as attribute sets.
|
||||
*/
|
||||
bool derivationPaths = false;
|
||||
/**
|
||||
* If true, track which values have been printed and skip them on
|
||||
* subsequent encounters. Useful for self-referential values.
|
||||
*/
|
||||
bool trackRepeated = true;
|
||||
/**
|
||||
* Maximum depth to evaluate to.
|
||||
*/
|
||||
size_t maxDepth = std::numeric_limits<size_t>::max();
|
||||
/**
|
||||
* Maximum number of attributes in attribute sets to print.
|
||||
*
|
||||
* Note that this is a limit for the entire print invocation, not for each
|
||||
* attribute set encountered.
|
||||
*/
|
||||
size_t maxAttrs = std::numeric_limits<size_t>::max();
|
||||
/**
|
||||
* Maximum number of list items to print.
|
||||
*
|
||||
* Note that this is a limit for the entire print invocation, not for each
|
||||
* list encountered.
|
||||
*/
|
||||
size_t maxListItems = std::numeric_limits<size_t>::max();
|
||||
/**
|
||||
* Maximum string length to print.
|
||||
*/
|
||||
size_t maxStringLength = std::numeric_limits<size_t>::max();
|
||||
};
|
||||
|
||||
/**
|
||||
* `PrintOptions` for unknown and therefore potentially large values in error messages,
|
||||
* to avoid printing "too much" output.
|
||||
*/
|
||||
static PrintOptions errorPrintOptions = PrintOptions {
|
||||
.ansiColors = true,
|
||||
.maxDepth = 10,
|
||||
.maxAttrs = 10,
|
||||
.maxListItems = 10,
|
||||
.maxStringLength = 1024
|
||||
};
|
||||
|
||||
}
|
|
@ -1,24 +1,67 @@
|
|||
#include "print.hh"
|
||||
#include <limits>
|
||||
#include <unordered_set>
|
||||
|
||||
#include "print.hh"
|
||||
#include "ansicolor.hh"
|
||||
#include "signals.hh"
|
||||
#include "store-api.hh"
|
||||
#include "terminal.hh"
|
||||
#include "english.hh"
|
||||
#include "eval.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::ostream &
|
||||
printLiteralString(std::ostream & str, const std::string_view string)
|
||||
void printElided(
|
||||
std::ostream & output,
|
||||
unsigned int value,
|
||||
const std::string_view single,
|
||||
const std::string_view plural,
|
||||
bool ansiColors)
|
||||
{
|
||||
if (ansiColors)
|
||||
output << ANSI_FAINT;
|
||||
output << "«";
|
||||
pluralize(output, value, single, plural);
|
||||
output << " elided»";
|
||||
if (ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
|
||||
std::ostream &
|
||||
printLiteralString(std::ostream & str, const std::string_view string, size_t maxLength, bool ansiColors)
|
||||
{
|
||||
size_t charsPrinted = 0;
|
||||
if (ansiColors)
|
||||
str << ANSI_MAGENTA;
|
||||
str << "\"";
|
||||
for (auto i = string.begin(); i != string.end(); ++i) {
|
||||
if (charsPrinted >= maxLength) {
|
||||
str << "\" ";
|
||||
printElided(str, string.length() - charsPrinted, "byte", "bytes", ansiColors);
|
||||
return str;
|
||||
}
|
||||
|
||||
if (*i == '\"' || *i == '\\') str << "\\" << *i;
|
||||
else if (*i == '\n') str << "\\n";
|
||||
else if (*i == '\r') str << "\\r";
|
||||
else if (*i == '\t') str << "\\t";
|
||||
else if (*i == '$' && *(i+1) == '{') str << "\\" << *i;
|
||||
else str << *i;
|
||||
charsPrinted++;
|
||||
}
|
||||
str << "\"";
|
||||
if (ansiColors)
|
||||
str << ANSI_NORMAL;
|
||||
return str;
|
||||
}
|
||||
|
||||
std::ostream &
|
||||
printLiteralString(std::ostream & str, const std::string_view string)
|
||||
{
|
||||
return printLiteralString(str, string, std::numeric_limits<size_t>::max(), false);
|
||||
}
|
||||
|
||||
std::ostream &
|
||||
printLiteralBool(std::ostream & str, bool boolean)
|
||||
{
|
||||
|
@ -90,5 +133,382 @@ printAttributeName(std::ostream & str, std::string_view name) {
|
|||
return str;
|
||||
}
|
||||
|
||||
bool isImportantAttrName(const std::string& attrName)
|
||||
{
|
||||
return attrName == "type" || attrName == "_type";
|
||||
}
|
||||
|
||||
typedef std::pair<std::string, Value *> AttrPair;
|
||||
|
||||
struct ImportantFirstAttrNameCmp
|
||||
{
|
||||
|
||||
bool operator()(const AttrPair& lhs, const AttrPair& rhs) const
|
||||
{
|
||||
auto lhsIsImportant = isImportantAttrName(lhs.first);
|
||||
auto rhsIsImportant = isImportantAttrName(rhs.first);
|
||||
return std::forward_as_tuple(!lhsIsImportant, lhs.first)
|
||||
< std::forward_as_tuple(!rhsIsImportant, rhs.first);
|
||||
}
|
||||
};
|
||||
|
||||
typedef std::set<Value *> ValuesSeen;
|
||||
|
||||
class Printer
|
||||
{
|
||||
private:
|
||||
std::ostream & output;
|
||||
EvalState & state;
|
||||
PrintOptions options;
|
||||
std::optional<ValuesSeen> seen;
|
||||
size_t attrsPrinted = 0;
|
||||
size_t listItemsPrinted = 0;
|
||||
|
||||
void printRepeated()
|
||||
{
|
||||
if (options.ansiColors)
|
||||
output << ANSI_MAGENTA;
|
||||
output << "«repeated»";
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
void printNullptr()
|
||||
{
|
||||
if (options.ansiColors)
|
||||
output << ANSI_MAGENTA;
|
||||
output << "«nullptr»";
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
void printElided(unsigned int value, const std::string_view single, const std::string_view plural)
|
||||
{
|
||||
::nix::printElided(output, value, single, plural, options.ansiColors);
|
||||
}
|
||||
|
||||
void printInt(Value & v)
|
||||
{
|
||||
if (options.ansiColors)
|
||||
output << ANSI_CYAN;
|
||||
output << v.integer;
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
void printFloat(Value & v)
|
||||
{
|
||||
if (options.ansiColors)
|
||||
output << ANSI_CYAN;
|
||||
output << v.fpoint;
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
void printBool(Value & v)
|
||||
{
|
||||
if (options.ansiColors)
|
||||
output << ANSI_CYAN;
|
||||
printLiteralBool(output, v.boolean);
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
void printString(Value & v)
|
||||
{
|
||||
printLiteralString(output, v.string_view(), options.maxStringLength, options.ansiColors);
|
||||
}
|
||||
|
||||
void printPath(Value & v)
|
||||
{
|
||||
if (options.ansiColors)
|
||||
output << ANSI_GREEN;
|
||||
output << v.path().to_string(); // !!! escaping?
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
void printNull()
|
||||
{
|
||||
if (options.ansiColors)
|
||||
output << ANSI_CYAN;
|
||||
output << "null";
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
void printDerivation(Value & v)
|
||||
{
|
||||
try {
|
||||
Bindings::iterator i = v.attrs->find(state.sDrvPath);
|
||||
NixStringContext context;
|
||||
std::string storePath;
|
||||
if (i != v.attrs->end())
|
||||
storePath = state.store->printStorePath(state.coerceToStorePath(i->pos, *i->value, context, "while evaluating the drvPath of a derivation"));
|
||||
|
||||
if (options.ansiColors)
|
||||
output << ANSI_GREEN;
|
||||
output << "«derivation";
|
||||
if (!storePath.empty()) {
|
||||
output << " " << storePath;
|
||||
}
|
||||
output << "»";
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
} catch (BaseError & e) {
|
||||
printError_(e);
|
||||
}
|
||||
}
|
||||
|
||||
void printAttrs(Value & v, size_t depth)
|
||||
{
|
||||
if (seen && !seen->insert(&v).second) {
|
||||
printRepeated();
|
||||
return;
|
||||
}
|
||||
|
||||
if (options.force && options.derivationPaths && state.isDerivation(v)) {
|
||||
printDerivation(v);
|
||||
} else if (depth < options.maxDepth) {
|
||||
output << "{ ";
|
||||
|
||||
std::vector<std::pair<std::string, Value *>> sorted;
|
||||
for (auto & i : *v.attrs)
|
||||
sorted.emplace_back(std::pair(state.symbols[i.name], i.value));
|
||||
|
||||
if (options.maxAttrs == std::numeric_limits<size_t>::max())
|
||||
std::sort(sorted.begin(), sorted.end());
|
||||
else
|
||||
std::sort(sorted.begin(), sorted.end(), ImportantFirstAttrNameCmp());
|
||||
|
||||
for (auto & i : sorted) {
|
||||
if (attrsPrinted >= options.maxAttrs) {
|
||||
printElided(sorted.size() - attrsPrinted, "attribute", "attributes");
|
||||
break;
|
||||
}
|
||||
|
||||
printAttributeName(output, i.first);
|
||||
output << " = ";
|
||||
print(*i.second, depth + 1);
|
||||
output << "; ";
|
||||
attrsPrinted++;
|
||||
}
|
||||
|
||||
output << "}";
|
||||
} else
|
||||
output << "{ ... }";
|
||||
}
|
||||
|
||||
void printList(Value & v, size_t depth)
|
||||
{
|
||||
if (seen && v.listSize() && !seen->insert(&v).second) {
|
||||
printRepeated();
|
||||
return;
|
||||
}
|
||||
|
||||
output << "[ ";
|
||||
if (depth < options.maxDepth) {
|
||||
for (auto elem : v.listItems()) {
|
||||
if (listItemsPrinted >= options.maxListItems) {
|
||||
printElided(v.listSize() - listItemsPrinted, "item", "items");
|
||||
break;
|
||||
}
|
||||
|
||||
if (elem) {
|
||||
print(*elem, depth + 1);
|
||||
} else {
|
||||
printNullptr();
|
||||
}
|
||||
output << " ";
|
||||
listItemsPrinted++;
|
||||
}
|
||||
}
|
||||
else
|
||||
output << "... ";
|
||||
output << "]";
|
||||
}
|
||||
|
||||
void printFunction(Value & v)
|
||||
{
|
||||
if (options.ansiColors)
|
||||
output << ANSI_BLUE;
|
||||
output << "«";
|
||||
|
||||
if (v.isLambda()) {
|
||||
output << "lambda";
|
||||
if (v.lambda.fun) {
|
||||
if (v.lambda.fun->name) {
|
||||
output << " " << state.symbols[v.lambda.fun->name];
|
||||
}
|
||||
|
||||
std::ostringstream s;
|
||||
s << state.positions[v.lambda.fun->pos];
|
||||
output << " @ " << filterANSIEscapes(s.str());
|
||||
}
|
||||
} else if (v.isPrimOp()) {
|
||||
if (v.primOp)
|
||||
output << *v.primOp;
|
||||
else
|
||||
output << "primop";
|
||||
} else if (v.isPrimOpApp()) {
|
||||
output << "partially applied ";
|
||||
auto primOp = v.primOpAppPrimOp();
|
||||
if (primOp)
|
||||
output << *primOp;
|
||||
else
|
||||
output << "primop";
|
||||
} else {
|
||||
abort();
|
||||
}
|
||||
|
||||
output << "»";
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
void printThunk(Value & v)
|
||||
{
|
||||
if (v.isBlackhole()) {
|
||||
// Although we know for sure that it's going to be an infinite recursion
|
||||
// when this value is accessed _in the current context_, it's likely
|
||||
// that the user will misinterpret a simpler «infinite recursion» output
|
||||
// as a definitive statement about the value, while in fact it may be
|
||||
// a valid value after `builtins.trace` and perhaps some other steps
|
||||
// have completed.
|
||||
if (options.ansiColors)
|
||||
output << ANSI_RED;
|
||||
output << "«potential infinite recursion»";
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
} else if (v.isThunk() || v.isApp()) {
|
||||
if (options.ansiColors)
|
||||
output << ANSI_MAGENTA;
|
||||
output << "«thunk»";
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
} else {
|
||||
abort();
|
||||
}
|
||||
}
|
||||
|
||||
void printExternal(Value & v)
|
||||
{
|
||||
v.external->print(output);
|
||||
}
|
||||
|
||||
void printUnknown()
|
||||
{
|
||||
if (options.ansiColors)
|
||||
output << ANSI_RED;
|
||||
output << "«unknown»";
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
void printError_(BaseError & e)
|
||||
{
|
||||
if (options.ansiColors)
|
||||
output << ANSI_RED;
|
||||
output << "«" << e.msg() << "»";
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
void print(Value & v, size_t depth)
|
||||
{
|
||||
output.flush();
|
||||
checkInterrupt();
|
||||
|
||||
if (options.force) {
|
||||
try {
|
||||
state.forceValue(v, v.determinePos(noPos));
|
||||
} catch (BaseError & e) {
|
||||
printError_(e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
switch (v.type()) {
|
||||
|
||||
case nInt:
|
||||
printInt(v);
|
||||
break;
|
||||
|
||||
case nFloat:
|
||||
printFloat(v);
|
||||
break;
|
||||
|
||||
case nBool:
|
||||
printBool(v);
|
||||
break;
|
||||
|
||||
case nString:
|
||||
printString(v);
|
||||
break;
|
||||
|
||||
case nPath:
|
||||
printPath(v);
|
||||
break;
|
||||
|
||||
case nNull:
|
||||
printNull();
|
||||
break;
|
||||
|
||||
case nAttrs:
|
||||
printAttrs(v, depth);
|
||||
break;
|
||||
|
||||
case nList:
|
||||
printList(v, depth);
|
||||
break;
|
||||
|
||||
case nFunction:
|
||||
printFunction(v);
|
||||
break;
|
||||
|
||||
case nThunk:
|
||||
printThunk(v);
|
||||
break;
|
||||
|
||||
case nExternal:
|
||||
printExternal(v);
|
||||
break;
|
||||
|
||||
default:
|
||||
printUnknown();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
public:
|
||||
Printer(std::ostream & output, EvalState & state, PrintOptions options)
|
||||
: output(output), state(state), options(options) { }
|
||||
|
||||
void print(Value & v)
|
||||
{
|
||||
attrsPrinted = 0;
|
||||
listItemsPrinted = 0;
|
||||
|
||||
if (options.trackRepeated) {
|
||||
seen.emplace();
|
||||
} else {
|
||||
seen.reset();
|
||||
}
|
||||
|
||||
ValuesSeen seen;
|
||||
print(v, 0);
|
||||
}
|
||||
};
|
||||
|
||||
void printValue(EvalState & state, std::ostream & output, Value & v, PrintOptions options)
|
||||
{
|
||||
Printer(output, state, options).print(v);
|
||||
}
|
||||
|
||||
std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer)
|
||||
{
|
||||
printValue(printer.state, output, printer.value, printer.options);
|
||||
return output;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -9,46 +9,73 @@
|
|||
|
||||
#include <iostream>
|
||||
|
||||
#include "print-options.hh"
|
||||
|
||||
namespace nix {
|
||||
/**
|
||||
* Print a string as a Nix string literal.
|
||||
*
|
||||
* Quotes and fairly minimal escaping are added.
|
||||
*
|
||||
* @param s The logical string
|
||||
*/
|
||||
std::ostream & printLiteralString(std::ostream & o, std::string_view s);
|
||||
inline std::ostream & printLiteralString(std::ostream & o, const char * s) {
|
||||
return printLiteralString(o, std::string_view(s));
|
||||
}
|
||||
inline std::ostream & printLiteralString(std::ostream & o, const std::string & s) {
|
||||
return printLiteralString(o, std::string_view(s));
|
||||
}
|
||||
|
||||
/** Print `true` or `false`. */
|
||||
std::ostream & printLiteralBool(std::ostream & o, bool b);
|
||||
class EvalState;
|
||||
struct Value;
|
||||
|
||||
/**
|
||||
* Print a string as an attribute name in the Nix expression language syntax.
|
||||
*
|
||||
* Prints a quoted string if necessary.
|
||||
*/
|
||||
std::ostream & printAttributeName(std::ostream & o, std::string_view s);
|
||||
|
||||
/**
|
||||
* Returns `true' is a string is a reserved keyword which requires quotation
|
||||
* when printing attribute set field names.
|
||||
*/
|
||||
bool isReservedKeyword(const std::string_view str);
|
||||
|
||||
/**
|
||||
* Print a string as an identifier in the Nix expression language syntax.
|
||||
*
|
||||
* FIXME: "identifier" is ambiguous. Identifiers do not have a single
|
||||
* textual representation. They can be used in variable references,
|
||||
* let bindings, left-hand sides or attribute names in a select
|
||||
* expression, or something else entirely, like JSON. Use one of the
|
||||
* `print*` functions instead.
|
||||
*/
|
||||
std::ostream & printIdentifier(std::ostream & o, std::string_view s);
|
||||
/**
|
||||
* Print a string as a Nix string literal.
|
||||
*
|
||||
* Quotes and fairly minimal escaping are added.
|
||||
*
|
||||
* @param o The output stream to print to
|
||||
* @param s The logical string
|
||||
*/
|
||||
std::ostream & printLiteralString(std::ostream & o, std::string_view s);
|
||||
inline std::ostream & printLiteralString(std::ostream & o, const char * s) {
|
||||
return printLiteralString(o, std::string_view(s));
|
||||
}
|
||||
inline std::ostream & printLiteralString(std::ostream & o, const std::string & s) {
|
||||
return printLiteralString(o, std::string_view(s));
|
||||
}
|
||||
|
||||
/** Print `true` or `false`. */
|
||||
std::ostream & printLiteralBool(std::ostream & o, bool b);
|
||||
|
||||
/**
|
||||
* Print a string as an attribute name in the Nix expression language syntax.
|
||||
*
|
||||
* Prints a quoted string if necessary.
|
||||
*/
|
||||
std::ostream & printAttributeName(std::ostream & o, std::string_view s);
|
||||
|
||||
/**
|
||||
* Returns `true' is a string is a reserved keyword which requires quotation
|
||||
* when printing attribute set field names.
|
||||
*/
|
||||
bool isReservedKeyword(const std::string_view str);
|
||||
|
||||
/**
|
||||
* Print a string as an identifier in the Nix expression language syntax.
|
||||
*
|
||||
* FIXME: "identifier" is ambiguous. Identifiers do not have a single
|
||||
* textual representation. They can be used in variable references,
|
||||
* let bindings, left-hand sides or attribute names in a select
|
||||
* expression, or something else entirely, like JSON. Use one of the
|
||||
* `print*` functions instead.
|
||||
*/
|
||||
std::ostream & printIdentifier(std::ostream & o, std::string_view s);
|
||||
|
||||
void printValue(EvalState & state, std::ostream & str, Value & v, PrintOptions options = PrintOptions {});
|
||||
|
||||
/**
|
||||
* A partially-applied form of `printValue` which can be formatted using `<<`
|
||||
* without allocating an intermediate string.
|
||||
*/
|
||||
class ValuePrinter {
|
||||
friend std::ostream & operator << (std::ostream & output, const ValuePrinter & printer);
|
||||
private:
|
||||
EvalState & state;
|
||||
Value & value;
|
||||
PrintOptions options;
|
||||
|
||||
public:
|
||||
ValuePrinter(EvalState & state, Value & value, PrintOptions options = PrintOptions {})
|
||||
: state(state), value(value), options(options) { }
|
||||
};
|
||||
|
||||
std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer);
|
||||
}
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
#include "symbol-table.hh"
|
||||
#include "value/context.hh"
|
||||
#include "input-accessor.hh"
|
||||
#include "source-path.hh"
|
||||
#include "print-options.hh"
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
#include <gc/gc_allocator.h>
|
||||
|
@ -32,7 +34,6 @@ typedef enum {
|
|||
tThunk,
|
||||
tApp,
|
||||
tLambda,
|
||||
tBlackhole,
|
||||
tPrimOp,
|
||||
tPrimOpApp,
|
||||
tExternal,
|
||||
|
@ -62,6 +63,7 @@ class Bindings;
|
|||
struct Env;
|
||||
struct Expr;
|
||||
struct ExprLambda;
|
||||
struct ExprBlackHole;
|
||||
struct PrimOp;
|
||||
class Symbol;
|
||||
class PosIdx;
|
||||
|
@ -69,7 +71,7 @@ struct Pos;
|
|||
class StorePath;
|
||||
class EvalState;
|
||||
class XMLWriter;
|
||||
|
||||
class Printer;
|
||||
|
||||
typedef int64_t NixInt;
|
||||
typedef double NixFloat;
|
||||
|
@ -81,6 +83,7 @@ typedef double NixFloat;
|
|||
class ExternalValueBase
|
||||
{
|
||||
friend std::ostream & operator << (std::ostream & str, const ExternalValueBase & v);
|
||||
friend class Printer;
|
||||
protected:
|
||||
/**
|
||||
* Print out the value
|
||||
|
@ -138,11 +141,9 @@ private:
|
|||
|
||||
friend std::string showType(const Value & v);
|
||||
|
||||
void print(const SymbolTable &symbols, std::ostream &str, std::set<const void *> *seen, int depth) const;
|
||||
|
||||
public:
|
||||
|
||||
void print(const SymbolTable &symbols, std::ostream &str, bool showRepeated = false, int depth = INT_MAX) const;
|
||||
void print(EvalState &state, std::ostream &str, PrintOptions options = PrintOptions {});
|
||||
|
||||
// Functions needed to distinguish the type
|
||||
// These should be removed eventually, by putting the functionality that's
|
||||
|
@ -151,7 +152,7 @@ public:
|
|||
// type() == nThunk
|
||||
inline bool isThunk() const { return internalType == tThunk; };
|
||||
inline bool isApp() const { return internalType == tApp; };
|
||||
inline bool isBlackhole() const { return internalType == tBlackhole; };
|
||||
inline bool isBlackhole() const;
|
||||
|
||||
// type() == nFunction
|
||||
inline bool isLambda() const { return internalType == tLambda; };
|
||||
|
@ -248,7 +249,7 @@ public:
|
|||
case tLambda: case tPrimOp: case tPrimOpApp: return nFunction;
|
||||
case tExternal: return nExternal;
|
||||
case tFloat: return nFloat;
|
||||
case tThunk: case tApp: case tBlackhole: return nThunk;
|
||||
case tThunk: case tApp: return nThunk;
|
||||
}
|
||||
if (invalidIsThunk)
|
||||
return nThunk;
|
||||
|
@ -356,21 +357,22 @@ public:
|
|||
lambda.fun = f;
|
||||
}
|
||||
|
||||
inline void mkBlackhole()
|
||||
{
|
||||
internalType = tBlackhole;
|
||||
// Value will be overridden anyways
|
||||
}
|
||||
inline void mkBlackhole();
|
||||
|
||||
void mkPrimOp(PrimOp * p);
|
||||
|
||||
inline void mkPrimOpApp(Value * l, Value * r)
|
||||
{
|
||||
internalType = tPrimOpApp;
|
||||
app.left = l;
|
||||
app.right = r;
|
||||
primOpApp.left = l;
|
||||
primOpApp.right = r;
|
||||
}
|
||||
|
||||
/**
|
||||
* For a `tPrimOpApp` value, get the original `PrimOp` value.
|
||||
*/
|
||||
PrimOp * primOpAppPrimOp() const;
|
||||
|
||||
inline void mkExternal(ExternalValueBase * e)
|
||||
{
|
||||
clearValue();
|
||||
|
@ -447,6 +449,20 @@ public:
|
|||
};
|
||||
|
||||
|
||||
extern ExprBlackHole eBlackHole;
|
||||
|
||||
bool Value::isBlackhole() const
|
||||
{
|
||||
return internalType == tThunk && thunk.expr == (Expr*) &eBlackHole;
|
||||
}
|
||||
|
||||
void Value::mkBlackhole()
|
||||
{
|
||||
internalType = tThunk;
|
||||
thunk.expr = (Expr*) &eBlackHole;
|
||||
}
|
||||
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
typedef std::vector<Value *, traceable_allocator<Value *>> ValueVector;
|
||||
typedef std::map<Symbol, Value *, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, Value *>>> ValueMap;
|
||||
|
|
|
@ -106,7 +106,7 @@ struct CacheImpl : Cache
|
|||
}
|
||||
|
||||
void add(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
const Attrs & inAttrs,
|
||||
const Attrs & infoAttrs,
|
||||
const StorePath & storePath,
|
||||
|
@ -115,13 +115,13 @@ struct CacheImpl : Cache
|
|||
_state.lock()->add.use()
|
||||
(attrsToJSON(inAttrs).dump())
|
||||
(attrsToJSON(infoAttrs).dump())
|
||||
(store->printStorePath(storePath))
|
||||
(store.printStorePath(storePath))
|
||||
(locked)
|
||||
(time(0)).exec();
|
||||
}
|
||||
|
||||
std::optional<std::pair<Attrs, StorePath>> lookup(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
const Attrs & inAttrs) override
|
||||
{
|
||||
if (auto res = lookupExpired(store, inAttrs)) {
|
||||
|
@ -134,7 +134,7 @@ struct CacheImpl : Cache
|
|||
}
|
||||
|
||||
std::optional<Result> lookupExpired(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
const Attrs & inAttrs) override
|
||||
{
|
||||
auto state(_state.lock());
|
||||
|
@ -148,19 +148,19 @@ struct CacheImpl : Cache
|
|||
}
|
||||
|
||||
auto infoJSON = stmt.getStr(0);
|
||||
auto storePath = store->parseStorePath(stmt.getStr(1));
|
||||
auto storePath = store.parseStorePath(stmt.getStr(1));
|
||||
auto locked = stmt.getInt(2) != 0;
|
||||
auto timestamp = stmt.getInt(3);
|
||||
|
||||
store->addTempRoot(storePath);
|
||||
if (!store->isValidPath(storePath)) {
|
||||
store.addTempRoot(storePath);
|
||||
if (!store.isValidPath(storePath)) {
|
||||
// FIXME: we could try to substitute 'storePath'.
|
||||
debug("ignoring disappeared cache entry '%s'", inAttrsJSON);
|
||||
return {};
|
||||
}
|
||||
|
||||
debug("using cache entry '%s' -> '%s', '%s'",
|
||||
inAttrsJSON, infoJSON, store->printStorePath(storePath));
|
||||
inAttrsJSON, infoJSON, store.printStorePath(storePath));
|
||||
|
||||
return Result {
|
||||
.expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),
|
||||
|
|
|
@ -50,14 +50,14 @@ struct Cache
|
|||
|
||||
/* Old cache for things that have a store path. */
|
||||
virtual void add(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
const Attrs & inAttrs,
|
||||
const Attrs & infoAttrs,
|
||||
const StorePath & storePath,
|
||||
bool locked) = 0;
|
||||
|
||||
virtual std::optional<std::pair<Attrs, StorePath>> lookup(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
const Attrs & inAttrs) = 0;
|
||||
|
||||
struct Result
|
||||
|
@ -68,7 +68,7 @@ struct Cache
|
|||
};
|
||||
|
||||
virtual std::optional<Result> lookupExpired(
|
||||
ref<Store> store,
|
||||
Store & store,
|
||||
const Attrs & inAttrs) = 0;
|
||||
};
|
||||
|
||||
|
|
68
src/libfetchers/fetch-to-store.cc
Normal file
68
src/libfetchers/fetch-to-store.cc
Normal file
|
@ -0,0 +1,68 @@
|
|||
#include "fetch-to-store.hh"
|
||||
#include "fetchers.hh"
|
||||
#include "cache.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
StorePath fetchToStore(
|
||||
Store & store,
|
||||
const SourcePath & path,
|
||||
std::string_view name,
|
||||
ContentAddressMethod method,
|
||||
PathFilter * filter,
|
||||
RepairFlag repair)
|
||||
{
|
||||
// FIXME: add an optimisation for the case where the accessor is
|
||||
// an FSInputAccessor pointing to a store path.
|
||||
|
||||
std::optional<fetchers::Attrs> cacheKey;
|
||||
|
||||
if (!filter && path.accessor->fingerprint) {
|
||||
cacheKey = fetchers::Attrs{
|
||||
{"_what", "fetchToStore"},
|
||||
{"store", store.storeDir},
|
||||
{"name", std::string(name)},
|
||||
{"fingerprint", *path.accessor->fingerprint},
|
||||
{
|
||||
"method",
|
||||
std::visit(overloaded {
|
||||
[](const TextIngestionMethod &) {
|
||||
return "text";
|
||||
},
|
||||
[](const FileIngestionMethod & fim) {
|
||||
switch (fim) {
|
||||
case FileIngestionMethod::Flat: return "flat";
|
||||
case FileIngestionMethod::Recursive: return "nar";
|
||||
default: assert(false);
|
||||
}
|
||||
},
|
||||
}, method.raw),
|
||||
},
|
||||
{"path", path.path.abs()}
|
||||
};
|
||||
if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) {
|
||||
debug("store path cache hit for '%s'", path);
|
||||
return res->second;
|
||||
}
|
||||
} else
|
||||
debug("source path '%s' is uncacheable", path);
|
||||
|
||||
Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", path));
|
||||
|
||||
auto filter2 = filter ? *filter : defaultPathFilter;
|
||||
|
||||
auto storePath =
|
||||
settings.readOnlyMode
|
||||
? store.computeStorePath(
|
||||
name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2).first
|
||||
: store.addToStore(
|
||||
name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2, repair);
|
||||
|
||||
if (cacheKey)
|
||||
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
|
||||
|
||||
return storePath;
|
||||
}
|
||||
|
||||
|
||||
}
|
22
src/libfetchers/fetch-to-store.hh
Normal file
22
src/libfetchers/fetch-to-store.hh
Normal file
|
@ -0,0 +1,22 @@
|
|||
#pragma once
|
||||
|
||||
#include "source-path.hh"
|
||||
#include "store-api.hh"
|
||||
#include "file-system.hh"
|
||||
#include "repair-flag.hh"
|
||||
#include "file-content-address.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Copy the `path` to the Nix store.
|
||||
*/
|
||||
StorePath fetchToStore(
|
||||
Store & store,
|
||||
const SourcePath & path,
|
||||
std::string_view name = "source",
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
PathFilter * filter = nullptr,
|
||||
RepairFlag repair = NoRepair);
|
||||
|
||||
}
|
|
@ -1,6 +1,8 @@
|
|||
#include "fetchers.hh"
|
||||
#include "store-api.hh"
|
||||
#include "input-accessor.hh"
|
||||
#include "source-path.hh"
|
||||
#include "fetch-to-store.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
@ -374,7 +376,7 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
|
|||
std::pair<StorePath, Input> InputScheme::fetch(ref<Store> store, const Input & input)
|
||||
{
|
||||
auto [accessor, input2] = getAccessor(store, input);
|
||||
auto storePath = SourcePath(accessor).fetchToStore(store, input2.getName());
|
||||
auto storePath = fetchToStore(*store, SourcePath(accessor), input2.getName());
|
||||
return {storePath, input2};
|
||||
}
|
||||
|
||||
|
|
|
@ -187,6 +187,13 @@ struct InputScheme
|
|||
virtual bool isDirect(const Input & input) const
|
||||
{ return true; }
|
||||
|
||||
/**
|
||||
* A sufficiently unique string that can be used as a cache key to identify the `input`.
|
||||
*
|
||||
* Only known-equivalent inputs should return the same fingerprint.
|
||||
*
|
||||
* This is not a stable identifier between Nix versions, but not guaranteed to change either.
|
||||
*/
|
||||
virtual std::optional<std::string> getFingerprint(ref<Store> store, const Input & input) const
|
||||
{ return std::nullopt; }
|
||||
};
|
||||
|
|
|
@ -80,4 +80,13 @@ ref<AllowListInputAccessor> AllowListInputAccessor::create(
|
|||
return make_ref<AllowListInputAccessorImpl>(next, std::move(allowedPaths), std::move(makeNotAllowedError));
|
||||
}
|
||||
|
||||
bool CachingFilteringInputAccessor::isAllowed(const CanonPath & path)
|
||||
{
|
||||
auto i = cache.find(path);
|
||||
if (i != cache.end()) return i->second;
|
||||
auto res = isAllowedUncached(path);
|
||||
cache.emplace(path, res);
|
||||
return res;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
#pragma once
|
||||
|
||||
#include "input-accessor.hh"
|
||||
#include "source-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* A function that should throw an exception of type
|
||||
* A function that returns an exception of type
|
||||
* `RestrictedPathError` explaining that access to `path` is
|
||||
* forbidden.
|
||||
*/
|
||||
|
@ -70,4 +71,18 @@ struct AllowListInputAccessor : public FilteringInputAccessor
|
|||
using FilteringInputAccessor::FilteringInputAccessor;
|
||||
};
|
||||
|
||||
/**
|
||||
* A wrapping `InputAccessor` mix-in where `isAllowed()` caches the result of virtual `isAllowedUncached()`.
|
||||
*/
|
||||
struct CachingFilteringInputAccessor : FilteringInputAccessor
|
||||
{
|
||||
std::map<CanonPath, bool> cache;
|
||||
|
||||
using FilteringInputAccessor::FilteringInputAccessor;
|
||||
|
||||
bool isAllowed(const CanonPath & path) override;
|
||||
|
||||
virtual bool isAllowedUncached(const CanonPath & path) = 0;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#pragma once
|
||||
|
||||
#include "input-accessor.hh"
|
||||
#include "source-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
#include "git-utils.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
#include "input-accessor.hh"
|
||||
#include "filtering-input-accessor.hh"
|
||||
#include "cache.hh"
|
||||
#include "finally.hh"
|
||||
#include "processes.hh"
|
||||
|
@ -7,6 +9,7 @@
|
|||
|
||||
#include <boost/core/span.hpp>
|
||||
|
||||
#include <git2/attr.h>
|
||||
#include <git2/blob.h>
|
||||
#include <git2/commit.h>
|
||||
#include <git2/config.h>
|
||||
|
@ -21,6 +24,7 @@
|
|||
#include <git2/submodule.h>
|
||||
#include <git2/tree.h>
|
||||
|
||||
#include <iostream>
|
||||
#include <unordered_set>
|
||||
#include <queue>
|
||||
#include <regex>
|
||||
|
@ -50,6 +54,8 @@ bool operator == (const git_oid & oid1, const git_oid & oid2)
|
|||
|
||||
namespace nix {
|
||||
|
||||
struct GitInputAccessor;
|
||||
|
||||
// Some wrapper types that ensure that the git_*_free functions get called.
|
||||
template<auto del>
|
||||
struct Deleter
|
||||
|
@ -133,6 +139,7 @@ T peelObject(git_repository * repo, git_object * obj, git_object_t type)
|
|||
|
||||
struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
{
|
||||
/** Location of the repository on disk. */
|
||||
CanonPath path;
|
||||
Repository repo;
|
||||
|
||||
|
@ -307,7 +314,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
return std::nullopt;
|
||||
}
|
||||
|
||||
std::vector<std::tuple<Submodule, Hash>> getSubmodules(const Hash & rev) override;
|
||||
std::vector<std::tuple<Submodule, Hash>> getSubmodules(const Hash & rev, bool exportIgnore) override;
|
||||
|
||||
std::string resolveSubmoduleUrl(
|
||||
const std::string & url,
|
||||
|
@ -340,7 +347,14 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
return true;
|
||||
}
|
||||
|
||||
ref<InputAccessor> getAccessor(const Hash & rev) override;
|
||||
/**
|
||||
* A 'GitInputAccessor' with no regard for export-ignore or any other transformations.
|
||||
*/
|
||||
ref<GitInputAccessor> getRawAccessor(const Hash & rev);
|
||||
|
||||
ref<InputAccessor> getAccessor(const Hash & rev, bool exportIgnore) override;
|
||||
|
||||
ref<InputAccessor> getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError e) override;
|
||||
|
||||
static int sidebandProgressCallback(const char * str, int len, void * payload)
|
||||
{
|
||||
|
@ -369,27 +383,27 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
{
|
||||
Activity act(*logger, lvlTalkative, actFetchTree, fmt("fetching Git repository '%s'", url));
|
||||
|
||||
Remote remote;
|
||||
// TODO: implement git-credential helper support (preferably via libgit2, which as of 2024-01 does not support that)
|
||||
// then use code that was removed in this commit (see blame)
|
||||
|
||||
if (git_remote_create_anonymous(Setter(remote), *this, url.c_str()))
|
||||
throw Error("cannot create Git remote '%s': %s", url, git_error_last()->message);
|
||||
auto dir = this->path;
|
||||
Strings gitArgs;
|
||||
if (shallow) {
|
||||
gitArgs = { "-C", dir.abs(), "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec };
|
||||
}
|
||||
else {
|
||||
gitArgs = { "-C", dir.abs(), "fetch", "--quiet", "--force", "--", url, refspec };
|
||||
}
|
||||
|
||||
char * refspecs[] = {(char *) refspec.c_str()};
|
||||
git_strarray refspecs2 {
|
||||
.strings = refspecs,
|
||||
.count = 1
|
||||
};
|
||||
|
||||
git_fetch_options opts = GIT_FETCH_OPTIONS_INIT;
|
||||
// FIXME: for some reason, shallow fetching over ssh barfs
|
||||
// with "could not read from remote repository".
|
||||
opts.depth = shallow && parseURL(url).scheme != "ssh" ? 1 : GIT_FETCH_DEPTH_FULL;
|
||||
opts.callbacks.payload = &act;
|
||||
opts.callbacks.sideband_progress = sidebandProgressCallback;
|
||||
opts.callbacks.transfer_progress = transferProgressCallback;
|
||||
|
||||
if (git_remote_fetch(remote.get(), &refspecs2, &opts, nullptr))
|
||||
throw Error("fetching '%s' from '%s': %s", refspec, url, git_error_last()->message);
|
||||
runProgram(RunOptions {
|
||||
.program = "git",
|
||||
.searchPath = true,
|
||||
// FIXME: git stderr messes up our progress indicator, so
|
||||
// we're using --quiet for now. Should process its stderr.
|
||||
.args = gitArgs,
|
||||
.input = {},
|
||||
.isInteractive = true
|
||||
});
|
||||
}
|
||||
|
||||
void verifyCommit(
|
||||
|
@ -456,6 +470,9 @@ ref<GitRepo> GitRepo::openRepo(const CanonPath & path, bool create, bool bare)
|
|||
return make_ref<GitRepoImpl>(path, create, bare);
|
||||
}
|
||||
|
||||
/**
|
||||
* Raw git tree input accessor.
|
||||
*/
|
||||
struct GitInputAccessor : InputAccessor
|
||||
{
|
||||
ref<GitRepoImpl> repo;
|
||||
|
@ -644,17 +661,114 @@ struct GitInputAccessor : InputAccessor
|
|||
}
|
||||
};
|
||||
|
||||
ref<InputAccessor> GitRepoImpl::getAccessor(const Hash & rev)
|
||||
struct GitExportIgnoreInputAccessor : CachingFilteringInputAccessor {
|
||||
ref<GitRepoImpl> repo;
|
||||
std::optional<Hash> rev;
|
||||
|
||||
GitExportIgnoreInputAccessor(ref<GitRepoImpl> repo, ref<InputAccessor> next, std::optional<Hash> rev)
|
||||
: CachingFilteringInputAccessor(next, [&](const CanonPath & path) {
|
||||
return RestrictedPathError(fmt("'%s' does not exist because it was fetched with exportIgnore enabled", path));
|
||||
})
|
||||
, repo(repo)
|
||||
, rev(rev)
|
||||
{ }
|
||||
|
||||
bool gitAttrGet(const CanonPath & path, const char * attrName, const char * & valueOut)
|
||||
{
|
||||
const char * pathCStr = path.rel_c_str();
|
||||
|
||||
if (rev) {
|
||||
git_attr_options opts = GIT_ATTR_OPTIONS_INIT;
|
||||
opts.attr_commit_id = hashToOID(*rev);
|
||||
// TODO: test that gitattributes from global and system are not used
|
||||
// (ie more or less: home and etc - both of them!)
|
||||
opts.flags = GIT_ATTR_CHECK_INCLUDE_COMMIT | GIT_ATTR_CHECK_NO_SYSTEM;
|
||||
return git_attr_get_ext(
|
||||
&valueOut,
|
||||
*repo,
|
||||
&opts,
|
||||
pathCStr,
|
||||
attrName
|
||||
);
|
||||
}
|
||||
else {
|
||||
return git_attr_get(
|
||||
&valueOut,
|
||||
*repo,
|
||||
GIT_ATTR_CHECK_INDEX_ONLY | GIT_ATTR_CHECK_NO_SYSTEM,
|
||||
pathCStr,
|
||||
attrName);
|
||||
}
|
||||
}
|
||||
|
||||
bool isExportIgnored(const CanonPath & path)
|
||||
{
|
||||
const char *exportIgnoreEntry = nullptr;
|
||||
|
||||
// GIT_ATTR_CHECK_INDEX_ONLY:
|
||||
// > It will use index only for creating archives or for a bare repo
|
||||
// > (if an index has been specified for the bare repo).
|
||||
// -- https://github.com/libgit2/libgit2/blob/HEAD/include/git2/attr.h#L113C62-L115C48
|
||||
if (gitAttrGet(path, "export-ignore", exportIgnoreEntry)) {
|
||||
if (git_error_last()->klass == GIT_ENOTFOUND)
|
||||
return false;
|
||||
else
|
||||
throw Error("looking up '%s': %s", showPath(path), git_error_last()->message);
|
||||
}
|
||||
else {
|
||||
// Official git will silently reject export-ignore lines that have
|
||||
// values. We do the same.
|
||||
return GIT_ATTR_IS_TRUE(exportIgnoreEntry);
|
||||
}
|
||||
}
|
||||
|
||||
bool isAllowedUncached(const CanonPath & path) override
|
||||
{
|
||||
return !isExportIgnored(path);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
ref<GitInputAccessor> GitRepoImpl::getRawAccessor(const Hash & rev)
|
||||
{
|
||||
return make_ref<GitInputAccessor>(ref<GitRepoImpl>(shared_from_this()), rev);
|
||||
auto self = ref<GitRepoImpl>(shared_from_this());
|
||||
return make_ref<GitInputAccessor>(self, rev);
|
||||
}
|
||||
|
||||
std::vector<std::tuple<GitRepoImpl::Submodule, Hash>> GitRepoImpl::getSubmodules(const Hash & rev)
|
||||
ref<InputAccessor> GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore)
|
||||
{
|
||||
auto self = ref<GitRepoImpl>(shared_from_this());
|
||||
ref<GitInputAccessor> rawGitAccessor = getRawAccessor(rev);
|
||||
if (exportIgnore) {
|
||||
return make_ref<GitExportIgnoreInputAccessor>(self, rawGitAccessor, rev);
|
||||
}
|
||||
else {
|
||||
return rawGitAccessor;
|
||||
}
|
||||
}
|
||||
|
||||
ref<InputAccessor> GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError)
|
||||
{
|
||||
auto self = ref<GitRepoImpl>(shared_from_this());
|
||||
ref<InputAccessor> fileAccessor =
|
||||
AllowListInputAccessor::create(
|
||||
makeFSInputAccessor(path),
|
||||
std::set<CanonPath> { wd.files },
|
||||
std::move(makeNotAllowedError));
|
||||
if (exportIgnore) {
|
||||
return make_ref<GitExportIgnoreInputAccessor>(self, fileAccessor, std::nullopt);
|
||||
}
|
||||
else {
|
||||
return fileAccessor;
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<std::tuple<GitRepoImpl::Submodule, Hash>> GitRepoImpl::getSubmodules(const Hash & rev, bool exportIgnore)
|
||||
{
|
||||
/* Read the .gitmodules files from this revision. */
|
||||
CanonPath modulesFile(".gitmodules");
|
||||
|
||||
auto accessor = getAccessor(rev);
|
||||
auto accessor = getAccessor(rev, exportIgnore);
|
||||
if (!accessor->pathExists(modulesFile)) return {};
|
||||
|
||||
/* Parse it and get the revision of each submodule. */
|
||||
|
@ -665,8 +779,10 @@ std::vector<std::tuple<GitRepoImpl::Submodule, Hash>> GitRepoImpl::getSubmodules
|
|||
|
||||
std::vector<std::tuple<Submodule, Hash>> result;
|
||||
|
||||
auto rawAccessor = getRawAccessor(rev);
|
||||
|
||||
for (auto & submodule : parseSubmodules(CanonPath(pathTemp))) {
|
||||
auto rev = accessor.dynamic_pointer_cast<GitInputAccessor>()->getSubmoduleRev(submodule.path);
|
||||
auto rev = rawAccessor->getSubmoduleRev(submodule.path);
|
||||
result.push_back({std::move(submodule), rev});
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#pragma once
|
||||
|
||||
#include "filtering-input-accessor.hh"
|
||||
#include "input-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
@ -57,7 +58,7 @@ struct GitRepo
|
|||
* Return the submodules of this repo at the indicated revision,
|
||||
* along with the revision of each submodule.
|
||||
*/
|
||||
virtual std::vector<std::tuple<Submodule, Hash>> getSubmodules(const Hash & rev) = 0;
|
||||
virtual std::vector<std::tuple<Submodule, Hash>> getSubmodules(const Hash & rev, bool exportIgnore) = 0;
|
||||
|
||||
virtual std::string resolveSubmoduleUrl(
|
||||
const std::string & url,
|
||||
|
@ -71,7 +72,9 @@ struct GitRepo
|
|||
|
||||
virtual bool hasObject(const Hash & oid) = 0;
|
||||
|
||||
virtual ref<InputAccessor> getAccessor(const Hash & rev) = 0;
|
||||
virtual ref<InputAccessor> getAccessor(const Hash & rev, bool exportIgnore) = 0;
|
||||
|
||||
virtual ref<InputAccessor> getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError) = 0;
|
||||
|
||||
virtual void fetch(
|
||||
const std::string & url,
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#include "error.hh"
|
||||
#include "fetchers.hh"
|
||||
#include "users.hh"
|
||||
#include "cache.hh"
|
||||
|
@ -9,7 +10,6 @@
|
|||
#include "processes.hh"
|
||||
#include "git.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
#include "filtering-input-accessor.hh"
|
||||
#include "mounted-input-accessor.hh"
|
||||
#include "git-utils.hh"
|
||||
#include "logging.hh"
|
||||
|
@ -50,10 +50,12 @@ bool touchCacheFile(const Path & path, time_t touch_time)
|
|||
return lutimes(path.c_str(), times) == 0;
|
||||
}
|
||||
|
||||
Path getCachePath(std::string_view key)
|
||||
Path getCachePath(std::string_view key, bool shallow)
|
||||
{
|
||||
return getCacheDir() + "/nix/gitv3/" +
|
||||
hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false);
|
||||
return getCacheDir()
|
||||
+ "/nix/gitv3/"
|
||||
+ hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false)
|
||||
+ (shallow ? "-shallow" : "");
|
||||
}
|
||||
|
||||
// Returns the name of the HEAD branch.
|
||||
|
@ -92,7 +94,8 @@ std::optional<std::string> readHead(const Path & path)
|
|||
// Persist the HEAD ref from the remote repo in the local cached repo.
|
||||
bool storeCachedHead(const std::string & actualUrl, const std::string & headRef)
|
||||
{
|
||||
Path cacheDir = getCachePath(actualUrl);
|
||||
// set shallow=false as HEAD will never be queried for a shallow repo
|
||||
Path cacheDir = getCachePath(actualUrl, false);
|
||||
try {
|
||||
runProgram("git", true, { "-C", cacheDir, "--git-dir", ".", "symbolic-ref", "--", "HEAD", headRef });
|
||||
} catch (ExecError &e) {
|
||||
|
@ -107,7 +110,8 @@ std::optional<std::string> readHeadCached(const std::string & actualUrl)
|
|||
{
|
||||
// Create a cache path to store the branch of the HEAD ref. Append something
|
||||
// in front of the URL to prevent collision with the repository itself.
|
||||
Path cacheDir = getCachePath(actualUrl);
|
||||
// set shallow=false as HEAD will never be queried for a shallow repo
|
||||
Path cacheDir = getCachePath(actualUrl, false);
|
||||
Path headRefFile = cacheDir + "/HEAD";
|
||||
|
||||
time_t now = time(0);
|
||||
|
@ -174,7 +178,7 @@ struct GitInputScheme : InputScheme
|
|||
for (auto & [name, value] : url.query) {
|
||||
if (name == "rev" || name == "ref" || name == "keytype" || name == "publicKey" || name == "publicKeys")
|
||||
attrs.emplace(name, value);
|
||||
else if (name == "shallow" || name == "submodules" || name == "allRefs" || name == "verifyCommit")
|
||||
else if (name == "shallow" || name == "submodules" || name == "exportIgnore" || name == "allRefs" || name == "verifyCommit")
|
||||
attrs.emplace(name, Explicit<bool> { value == "1" });
|
||||
else
|
||||
url2.query.emplace(name, value);
|
||||
|
@ -199,6 +203,7 @@ struct GitInputScheme : InputScheme
|
|||
"rev",
|
||||
"shallow",
|
||||
"submodules",
|
||||
"exportIgnore",
|
||||
"lastModified",
|
||||
"revCount",
|
||||
"narHash",
|
||||
|
@ -250,6 +255,8 @@ struct GitInputScheme : InputScheme
|
|||
url.query.insert_or_assign("shallow", "1");
|
||||
if (getSubmodulesAttr(input))
|
||||
url.query.insert_or_assign("submodules", "1");
|
||||
if (maybeGetBoolAttr(input.attrs, "exportIgnore").value_or(false))
|
||||
url.query.insert_or_assign("exportIgnore", "1");
|
||||
if (maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(false))
|
||||
url.query.insert_or_assign("verifyCommit", "1");
|
||||
auto publicKeys = getPublicKeys(input.attrs);
|
||||
|
@ -314,15 +321,26 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
writeFile((CanonPath(repoInfo.url) + path).abs(), contents);
|
||||
|
||||
runProgram("git", true,
|
||||
{ "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) });
|
||||
auto result = runProgram(RunOptions {
|
||||
.program = "git",
|
||||
.args = {"-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "check-ignore", "--quiet", std::string(path.rel())},
|
||||
});
|
||||
auto exitCode = WEXITSTATUS(result.first);
|
||||
|
||||
// Pause the logger to allow for user input (such as a gpg passphrase) in `git commit`
|
||||
logger->pause();
|
||||
Finally restoreLogger([]() { logger->resume(); });
|
||||
if (commitMsg)
|
||||
if (exitCode != 0) {
|
||||
// The path is not `.gitignore`d, we can add the file.
|
||||
runProgram("git", true,
|
||||
{ "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-m", *commitMsg });
|
||||
{ "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) });
|
||||
|
||||
|
||||
if (commitMsg) {
|
||||
// Pause the logger to allow for user input (such as a gpg passphrase) in `git commit`
|
||||
logger->pause();
|
||||
Finally restoreLogger([]() { logger->resume(); });
|
||||
runProgram("git", true,
|
||||
{ "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-m", *commitMsg });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct RepoInfo
|
||||
|
@ -361,6 +379,11 @@ struct GitInputScheme : InputScheme
|
|||
return maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
|
||||
}
|
||||
|
||||
bool getExportIgnoreAttr(const Input & input) const
|
||||
{
|
||||
return maybeGetBoolAttr(input.attrs, "exportIgnore").value_or(false);
|
||||
}
|
||||
|
||||
bool getAllRefsAttr(const Input & input) const
|
||||
{
|
||||
return maybeGetBoolAttr(input.attrs, "allRefs").value_or(false);
|
||||
|
@ -368,14 +391,14 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
RepoInfo getRepoInfo(const Input & input) const
|
||||
{
|
||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||
auto checkHashAlgorithm = [&](const std::optional<Hash> & hash)
|
||||
{
|
||||
if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256))
|
||||
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
|
||||
};
|
||||
|
||||
if (auto rev = input.getRev())
|
||||
checkHashType(rev);
|
||||
checkHashAlgorithm(rev);
|
||||
|
||||
RepoInfo repoInfo;
|
||||
|
||||
|
@ -489,7 +512,7 @@ struct GitInputScheme : InputScheme
|
|||
if (!input.getRev())
|
||||
input.attrs.insert_or_assign("rev", GitRepo::openRepo(CanonPath(repoDir))->resolveRef(ref).gitRev());
|
||||
} else {
|
||||
Path cacheDir = getCachePath(repoInfo.url);
|
||||
Path cacheDir = getCachePath(repoInfo.url, getShallowAttr(input));
|
||||
repoDir = cacheDir;
|
||||
repoInfo.gitDir = ".";
|
||||
|
||||
|
@ -589,7 +612,8 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
verifyCommit(input, repo);
|
||||
|
||||
auto accessor = repo->getAccessor(rev);
|
||||
bool exportIgnore = getExportIgnoreAttr(input);
|
||||
auto accessor = repo->getAccessor(rev, exportIgnore);
|
||||
|
||||
accessor->setPathDisplay("«" + input.to_string() + "»");
|
||||
|
||||
|
@ -599,7 +623,7 @@ struct GitInputScheme : InputScheme
|
|||
if (getSubmodulesAttr(input)) {
|
||||
std::map<CanonPath, nix::ref<InputAccessor>> mounts;
|
||||
|
||||
for (auto & [submodule, submoduleRev] : repo->getSubmodules(rev)) {
|
||||
for (auto & [submodule, submoduleRev] : repo->getSubmodules(rev, exportIgnore)) {
|
||||
auto resolved = repo->resolveSubmoduleUrl(submodule.url, repoInfo.url);
|
||||
debug("Git submodule %s: %s %s %s -> %s",
|
||||
submodule.path, submodule.url, submodule.branch, submoduleRev.gitRev(), resolved);
|
||||
|
@ -609,6 +633,7 @@ struct GitInputScheme : InputScheme
|
|||
if (submodule.branch != "")
|
||||
attrs.insert_or_assign("ref", submodule.branch);
|
||||
attrs.insert_or_assign("rev", submoduleRev.gitRev());
|
||||
attrs.insert_or_assign("exportIgnore", Explicit<bool>{ exportIgnore });
|
||||
auto submoduleInput = fetchers::Input::fromAttrs(std::move(attrs));
|
||||
auto [submoduleAccessor, submoduleInput2] =
|
||||
submoduleInput.getAccessor(store);
|
||||
|
@ -639,10 +664,13 @@ struct GitInputScheme : InputScheme
|
|||
for (auto & submodule : repoInfo.workdirInfo.submodules)
|
||||
repoInfo.workdirInfo.files.insert(submodule.path);
|
||||
|
||||
auto repo = GitRepo::openRepo(CanonPath(repoInfo.url), false, false);
|
||||
|
||||
auto exportIgnore = getExportIgnoreAttr(input);
|
||||
|
||||
ref<InputAccessor> accessor =
|
||||
AllowListInputAccessor::create(
|
||||
makeFSInputAccessor(CanonPath(repoInfo.url)),
|
||||
std::move(repoInfo.workdirInfo.files),
|
||||
repo->getAccessor(repoInfo.workdirInfo,
|
||||
exportIgnore,
|
||||
makeNotAllowedError(repoInfo.url));
|
||||
|
||||
/* If the repo has submodules, return a mounted input accessor
|
||||
|
@ -656,6 +684,8 @@ struct GitInputScheme : InputScheme
|
|||
fetchers::Attrs attrs;
|
||||
attrs.insert_or_assign("type", "git");
|
||||
attrs.insert_or_assign("url", submodulePath.abs());
|
||||
attrs.insert_or_assign("exportIgnore", Explicit<bool>{ exportIgnore });
|
||||
|
||||
auto submoduleInput = fetchers::Input::fromAttrs(std::move(attrs));
|
||||
auto [submoduleAccessor, submoduleInput2] =
|
||||
submoduleInput.getAccessor(store);
|
||||
|
@ -714,6 +744,16 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
auto repoInfo = getRepoInfo(input);
|
||||
|
||||
if (getExportIgnoreAttr(input)
|
||||
&& getSubmodulesAttr(input)) {
|
||||
/* In this situation, we don't have a git CLI behavior that we can copy.
|
||||
`git archive` does not support submodules, so it is unclear whether
|
||||
rules from the parent should affect the submodule or not.
|
||||
When git may eventually implement this, we need Nix to match its
|
||||
behavior. */
|
||||
throw UnimplementedError("exportIgnore and submodules are not supported together yet");
|
||||
}
|
||||
|
||||
auto [accessor, final] =
|
||||
input.getRef() || input.getRev() || !repoInfo.isLocal
|
||||
? getAccessorFromCommit(store, repoInfo, std::move(input))
|
||||
|
@ -727,7 +767,7 @@ struct GitInputScheme : InputScheme
|
|||
std::optional<std::string> getFingerprint(ref<Store> store, const Input & input) const override
|
||||
{
|
||||
if (auto rev = input.getRev())
|
||||
return rev->gitRev() + (getSubmodulesAttr(input) ? ";s" : "");
|
||||
return rev->gitRev() + (getSubmodulesAttr(input) ? ";s" : "") + (getExportIgnoreAttr(input) ? ";e" : "");
|
||||
else
|
||||
return std::nullopt;
|
||||
}
|
||||
|
|
|
@ -201,7 +201,7 @@ struct GitArchiveInputScheme : InputScheme
|
|||
{"rev", rev->gitRev()},
|
||||
});
|
||||
|
||||
if (auto res = getCache()->lookup(store, lockedAttrs)) {
|
||||
if (auto res = getCache()->lookup(*store, lockedAttrs)) {
|
||||
input.attrs.insert_or_assign("lastModified", getIntAttr(res->first, "lastModified"));
|
||||
return {std::move(res->second), input};
|
||||
}
|
||||
|
@ -213,7 +213,7 @@ struct GitArchiveInputScheme : InputScheme
|
|||
input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified));
|
||||
|
||||
getCache()->add(
|
||||
store,
|
||||
*store,
|
||||
lockedAttrs,
|
||||
{
|
||||
{"rev", rev->gitRev()},
|
||||
|
|
|
@ -1,118 +0,0 @@
|
|||
#include "input-accessor.hh"
|
||||
#include "store-api.hh"
|
||||
#include "cache.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
StorePath InputAccessor::fetchToStore(
|
||||
ref<Store> store,
|
||||
const CanonPath & path,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method,
|
||||
PathFilter * filter,
|
||||
RepairFlag repair)
|
||||
{
|
||||
// FIXME: add an optimisation for the case where the accessor is
|
||||
// an FSInputAccessor pointing to a store path.
|
||||
|
||||
std::optional<fetchers::Attrs> cacheKey;
|
||||
|
||||
if (!filter && fingerprint) {
|
||||
cacheKey = fetchers::Attrs{
|
||||
{"_what", "fetchToStore"},
|
||||
{"store", store->storeDir},
|
||||
{"name", std::string(name)},
|
||||
{"fingerprint", *fingerprint},
|
||||
{"method", (uint8_t) method},
|
||||
{"path", path.abs()}
|
||||
};
|
||||
if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) {
|
||||
debug("store path cache hit for '%s'", showPath(path));
|
||||
return res->second;
|
||||
}
|
||||
} else
|
||||
debug("source path '%s' is uncacheable", showPath(path));
|
||||
|
||||
Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", showPath(path)));
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
if (method == FileIngestionMethod::Recursive)
|
||||
dumpPath(path, sink, filter ? *filter : defaultPathFilter);
|
||||
else
|
||||
readFile(path, sink);
|
||||
});
|
||||
|
||||
auto storePath =
|
||||
settings.readOnlyMode
|
||||
? store->computeStorePathFromDump(*source, name, method, HashAlgorithm::SHA256).first
|
||||
: store->addToStoreFromDump(*source, name, method, HashAlgorithm::SHA256, repair);
|
||||
|
||||
if (cacheKey)
|
||||
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
|
||||
|
||||
return storePath;
|
||||
}
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const SourcePath & path)
|
||||
{
|
||||
str << path.to_string();
|
||||
return str;
|
||||
}
|
||||
|
||||
StorePath SourcePath::fetchToStore(
|
||||
ref<Store> store,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method,
|
||||
PathFilter * filter,
|
||||
RepairFlag repair) const
|
||||
{
|
||||
return accessor->fetchToStore(store, path, name, method, filter, repair);
|
||||
}
|
||||
|
||||
std::string_view SourcePath::baseName() const
|
||||
{
|
||||
return path.baseName().value_or("source");
|
||||
}
|
||||
|
||||
SourcePath SourcePath::parent() const
|
||||
{
|
||||
auto p = path.parent();
|
||||
assert(p);
|
||||
return {accessor, std::move(*p)};
|
||||
}
|
||||
|
||||
SourcePath SourcePath::resolveSymlinks() const
|
||||
{
|
||||
auto res = SourcePath(accessor);
|
||||
|
||||
int linksAllowed = 1024;
|
||||
|
||||
std::list<std::string> todo;
|
||||
for (auto & c : path)
|
||||
todo.push_back(std::string(c));
|
||||
|
||||
while (!todo.empty()) {
|
||||
auto c = *todo.begin();
|
||||
todo.pop_front();
|
||||
if (c == "" || c == ".")
|
||||
;
|
||||
else if (c == "..")
|
||||
res.path.pop();
|
||||
else {
|
||||
res.path.push(c);
|
||||
if (auto st = res.maybeLstat(); st && st->type == InputAccessor::tSymlink) {
|
||||
if (!linksAllowed--)
|
||||
throw Error("infinite symlink recursion in path '%s'", path);
|
||||
auto target = res.readLink();
|
||||
res.path.pop();
|
||||
if (hasPrefix(target, "/"))
|
||||
res.path = CanonPath::root;
|
||||
todo.splice(todo.begin(), tokenizeString<std::list<std::string>>(target, "/"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,174 +0,0 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "source-accessor.hh"
|
||||
#include "ref.hh"
|
||||
#include "types.hh"
|
||||
#include "file-system.hh"
|
||||
#include "repair-flag.hh"
|
||||
#include "content-address.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
MakeError(RestrictedPathError, Error);
|
||||
|
||||
struct SourcePath;
|
||||
class StorePath;
|
||||
class Store;
|
||||
|
||||
struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this<InputAccessor>
|
||||
{
|
||||
std::optional<std::string> fingerprint;
|
||||
|
||||
/**
|
||||
* Return the maximum last-modified time of the files in this
|
||||
* tree, if available.
|
||||
*/
|
||||
virtual std::optional<time_t> getLastModified()
|
||||
{
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
StorePath fetchToStore(
|
||||
ref<Store> store,
|
||||
const CanonPath & path,
|
||||
std::string_view name = "source",
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||
PathFilter * filter = nullptr,
|
||||
RepairFlag repair = NoRepair);
|
||||
};
|
||||
|
||||
/**
|
||||
* An abstraction for accessing source files during
|
||||
* evaluation. Currently, it's just a wrapper around `CanonPath` that
|
||||
* accesses files in the regular filesystem, but in the future it will
|
||||
* support fetching files in other ways.
|
||||
*/
|
||||
struct SourcePath
|
||||
{
|
||||
ref<InputAccessor> accessor;
|
||||
CanonPath path;
|
||||
|
||||
SourcePath(ref<InputAccessor> accessor, CanonPath path = CanonPath::root)
|
||||
: accessor(std::move(accessor))
|
||||
, path(std::move(path))
|
||||
{ }
|
||||
|
||||
std::string_view baseName() const;
|
||||
|
||||
/**
|
||||
* Construct the parent of this `SourcePath`. Aborts if `this`
|
||||
* denotes the root.
|
||||
*/
|
||||
SourcePath parent() const;
|
||||
|
||||
/**
|
||||
* If this `SourcePath` denotes a regular file (not a symlink),
|
||||
* return its contents; otherwise throw an error.
|
||||
*/
|
||||
std::string readFile() const
|
||||
{ return accessor->readFile(path); }
|
||||
|
||||
/**
|
||||
* Return whether this `SourcePath` denotes a file (of any type)
|
||||
* that exists
|
||||
*/
|
||||
bool pathExists() const
|
||||
{ return accessor->pathExists(path); }
|
||||
|
||||
/**
|
||||
* Return stats about this `SourcePath`, or throw an exception if
|
||||
* it doesn't exist.
|
||||
*/
|
||||
InputAccessor::Stat lstat() const
|
||||
{ return accessor->lstat(path); }
|
||||
|
||||
/**
|
||||
* Return stats about this `SourcePath`, or std::nullopt if it
|
||||
* doesn't exist.
|
||||
*/
|
||||
std::optional<InputAccessor::Stat> maybeLstat() const
|
||||
{ return accessor->maybeLstat(path); }
|
||||
|
||||
/**
|
||||
* If this `SourcePath` denotes a directory (not a symlink),
|
||||
* return its directory entries; otherwise throw an error.
|
||||
*/
|
||||
InputAccessor::DirEntries readDirectory() const
|
||||
{ return accessor->readDirectory(path); }
|
||||
|
||||
/**
|
||||
* If this `SourcePath` denotes a symlink, return its target;
|
||||
* otherwise throw an error.
|
||||
*/
|
||||
std::string readLink() const
|
||||
{ return accessor->readLink(path); }
|
||||
|
||||
/**
|
||||
* Dump this `SourcePath` to `sink` as a NAR archive.
|
||||
*/
|
||||
void dumpPath(
|
||||
Sink & sink,
|
||||
PathFilter & filter = defaultPathFilter) const
|
||||
{ return accessor->dumpPath(path, sink, filter); }
|
||||
|
||||
/**
|
||||
* Copy this `SourcePath` to the Nix store.
|
||||
*/
|
||||
StorePath fetchToStore(
|
||||
ref<Store> store,
|
||||
std::string_view name = "source",
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive,
|
||||
PathFilter * filter = nullptr,
|
||||
RepairFlag repair = NoRepair) const;
|
||||
|
||||
/**
|
||||
* Return the location of this path in the "real" filesystem, if
|
||||
* it has a physical location.
|
||||
*/
|
||||
std::optional<CanonPath> getPhysicalPath() const
|
||||
{ return accessor->getPhysicalPath(path); }
|
||||
|
||||
std::string to_string() const
|
||||
{ return path.abs(); }
|
||||
|
||||
/**
|
||||
* Append a `CanonPath` to this path.
|
||||
*/
|
||||
SourcePath operator + (const CanonPath & x) const
|
||||
{ return {accessor, path + x}; }
|
||||
|
||||
/**
|
||||
* Append a single component `c` to this path. `c` must not
|
||||
* contain a slash. A slash is implicitly added between this path
|
||||
* and `c`.
|
||||
*/
|
||||
SourcePath operator + (std::string_view c) const
|
||||
{ return {accessor, path + c}; }
|
||||
|
||||
bool operator == (const SourcePath & x) const
|
||||
{
|
||||
return std::tie(accessor, path) == std::tie(x.accessor, x.path);
|
||||
}
|
||||
|
||||
bool operator != (const SourcePath & x) const
|
||||
{
|
||||
return std::tie(accessor, path) != std::tie(x.accessor, x.path);
|
||||
}
|
||||
|
||||
bool operator < (const SourcePath & x) const
|
||||
{
|
||||
return std::tie(accessor, path) < std::tie(x.accessor, x.path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve any symlinks in this `SourcePath` (including its
|
||||
* parents). The result is a `SourcePath` in which no element is a
|
||||
* symlink.
|
||||
*/
|
||||
SourcePath resolveSymlinks() const;
|
||||
};
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const SourcePath & path);
|
||||
|
||||
}
|
|
@ -8,6 +8,6 @@ libfetchers_SOURCES := $(wildcard $(d)/*.cc)
|
|||
|
||||
libfetchers_CXXFLAGS += -I src/libutil -I src/libstore
|
||||
|
||||
libfetchers_LDFLAGS += -pthread $(LIBGIT2_LIBS) -larchive
|
||||
libfetchers_LDFLAGS += $(THREAD_LDFLAGS) $(LIBGIT2_LIBS) -larchive
|
||||
|
||||
libfetchers_LIBS = libutil libstore
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#include "memory-input-accessor.hh"
|
||||
#include "memory-source-accessor.hh"
|
||||
#include "source-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "input-accessor.hh"
|
||||
#include "source-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
#include "tarfile.hh"
|
||||
#include "store-api.hh"
|
||||
#include "url-parts.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
|
||||
#include "fetch-settings.hh"
|
||||
|
||||
|
@ -210,7 +211,12 @@ struct MercurialInputScheme : InputScheme
|
|||
return files.count(file);
|
||||
};
|
||||
|
||||
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, filter);
|
||||
PosixSourceAccessor accessor;
|
||||
auto storePath = store->addToStore(
|
||||
input.getName(),
|
||||
accessor, CanonPath { actualPath },
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {},
|
||||
filter);
|
||||
|
||||
return {std::move(storePath), input};
|
||||
}
|
||||
|
@ -218,7 +224,7 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
if (!input.getRef()) input.attrs.insert_or_assign("ref", "default");
|
||||
|
||||
auto checkHashType = [&](const std::optional<Hash> & hash)
|
||||
auto checkHashAlgorithm = [&](const std::optional<Hash> & hash)
|
||||
{
|
||||
if (hash.has_value() && hash->algo != HashAlgorithm::SHA1)
|
||||
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
|
||||
|
@ -227,7 +233,7 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
auto getLockedAttrs = [&]()
|
||||
{
|
||||
checkHashType(input.getRev());
|
||||
checkHashAlgorithm(input.getRev());
|
||||
|
||||
return Attrs({
|
||||
{"type", "hg"},
|
||||
|
@ -246,7 +252,7 @@ struct MercurialInputScheme : InputScheme
|
|||
};
|
||||
|
||||
if (input.getRev()) {
|
||||
if (auto res = getCache()->lookup(store, getLockedAttrs()))
|
||||
if (auto res = getCache()->lookup(*store, getLockedAttrs()))
|
||||
return makeResult(res->first, std::move(res->second));
|
||||
}
|
||||
|
||||
|
@ -259,7 +265,7 @@ struct MercurialInputScheme : InputScheme
|
|||
{"ref", *input.getRef()},
|
||||
});
|
||||
|
||||
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
|
||||
if (auto res = getCache()->lookup(*store, unlockedAttrs)) {
|
||||
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1);
|
||||
if (!input.getRev() || input.getRev() == rev2) {
|
||||
input.attrs.insert_or_assign("rev", rev2.gitRev());
|
||||
|
@ -305,7 +311,7 @@ struct MercurialInputScheme : InputScheme
|
|||
auto revCount = std::stoull(tokens[1]);
|
||||
input.attrs.insert_or_assign("ref", tokens[2]);
|
||||
|
||||
if (auto res = getCache()->lookup(store, getLockedAttrs()))
|
||||
if (auto res = getCache()->lookup(*store, getLockedAttrs()))
|
||||
return makeResult(res->first, std::move(res->second));
|
||||
|
||||
Path tmpDir = createTempDir();
|
||||
|
@ -315,7 +321,8 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
deletePath(tmpDir + "/.hg_archival.txt");
|
||||
|
||||
auto storePath = store->addToStore(name, tmpDir);
|
||||
PosixSourceAccessor accessor;
|
||||
auto storePath = store->addToStore(name, accessor, CanonPath { tmpDir });
|
||||
|
||||
Attrs infoAttrs({
|
||||
{"rev", input.getRev()->gitRev()},
|
||||
|
@ -324,14 +331,14 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
if (!_input.getRev())
|
||||
getCache()->add(
|
||||
store,
|
||||
*store,
|
||||
unlockedAttrs,
|
||||
infoAttrs,
|
||||
storePath,
|
||||
false);
|
||||
|
||||
getCache()->add(
|
||||
store,
|
||||
*store,
|
||||
getLockedAttrs(),
|
||||
infoAttrs,
|
||||
storePath,
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
#include "tarfile.hh"
|
||||
#include "types.hh"
|
||||
#include "split.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
|
@ -26,7 +27,7 @@ DownloadFileResult downloadFile(
|
|||
{"name", name},
|
||||
});
|
||||
|
||||
auto cached = getCache()->lookupExpired(store, inAttrs);
|
||||
auto cached = getCache()->lookupExpired(*store, inAttrs);
|
||||
|
||||
auto useCached = [&]() -> DownloadFileResult
|
||||
{
|
||||
|
@ -91,7 +92,7 @@ DownloadFileResult downloadFile(
|
|||
}
|
||||
|
||||
getCache()->add(
|
||||
store,
|
||||
*store,
|
||||
inAttrs,
|
||||
infoAttrs,
|
||||
*storePath,
|
||||
|
@ -99,7 +100,7 @@ DownloadFileResult downloadFile(
|
|||
|
||||
if (url != res.effectiveUri)
|
||||
getCache()->add(
|
||||
store,
|
||||
*store,
|
||||
{
|
||||
{"type", "file"},
|
||||
{"url", res.effectiveUri},
|
||||
|
@ -130,7 +131,7 @@ DownloadTarballResult downloadTarball(
|
|||
{"name", name},
|
||||
});
|
||||
|
||||
auto cached = getCache()->lookupExpired(store, inAttrs);
|
||||
auto cached = getCache()->lookupExpired(*store, inAttrs);
|
||||
|
||||
if (cached && !cached->expired)
|
||||
return {
|
||||
|
@ -156,7 +157,8 @@ DownloadTarballResult downloadTarball(
|
|||
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
||||
auto topDir = tmpDir + "/" + members.begin()->name;
|
||||
lastModified = lstat(topDir).st_mtime;
|
||||
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, defaultPathFilter, NoRepair);
|
||||
PosixSourceAccessor accessor;
|
||||
unpackedStorePath = store->addToStore(name, accessor, CanonPath { topDir }, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, defaultPathFilter, NoRepair);
|
||||
}
|
||||
|
||||
Attrs infoAttrs({
|
||||
|
@ -168,7 +170,7 @@ DownloadTarballResult downloadTarball(
|
|||
infoAttrs.emplace("immutableUrl", *res.immutableUrl);
|
||||
|
||||
getCache()->add(
|
||||
store,
|
||||
*store,
|
||||
inAttrs,
|
||||
infoAttrs,
|
||||
*unpackedStorePath,
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
#include "thread-pool.hh"
|
||||
#include "callback.hh"
|
||||
#include "signals.hh"
|
||||
#include "archive.hh"
|
||||
|
||||
#include <chrono>
|
||||
#include <future>
|
||||
|
@ -27,7 +28,8 @@ BinaryCacheStore::BinaryCacheStore(const Params & params)
|
|||
, Store(params)
|
||||
{
|
||||
if (secretKeyFile != "")
|
||||
secretKey = std::unique_ptr<SecretKey>(new SecretKey(readFile(secretKeyFile)));
|
||||
signer = std::make_unique<LocalSigner>(
|
||||
SecretKey { readFile(secretKeyFile) });
|
||||
|
||||
StringSink sink;
|
||||
sink << narVersionMagic1;
|
||||
|
@ -273,7 +275,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
|||
stats.narWriteCompressionTimeMs += duration;
|
||||
|
||||
/* Atomically write the NAR info file.*/
|
||||
if (secretKey) narInfo->sign(*this, *secretKey);
|
||||
if (signer) narInfo->sign(*this, *signer);
|
||||
|
||||
writeNarInfo(narInfo);
|
||||
|
||||
|
@ -300,24 +302,60 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
|
|||
}});
|
||||
}
|
||||
|
||||
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||
StorePath BinaryCacheStore::addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair)
|
||||
{
|
||||
if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
|
||||
unsupported("addToStoreFromDump");
|
||||
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
|
||||
std::optional<Hash> caHash;
|
||||
std::string nar;
|
||||
|
||||
if (auto * dump2p = dynamic_cast<StringSource *>(&dump)) {
|
||||
auto & dump2 = *dump2p;
|
||||
// Hack, this gives us a "replayable" source so we can compute
|
||||
// multiple hashes more easily.
|
||||
caHash = hashString(HashAlgorithm::SHA256, dump2.s);
|
||||
switch (method.getFileIngestionMethod()) {
|
||||
case FileIngestionMethod::Recursive:
|
||||
// The dump is already NAR in this case, just use it.
|
||||
nar = dump2.s;
|
||||
break;
|
||||
case FileIngestionMethod::Flat:
|
||||
// The dump is Flat, so we need to convert it to NAR with a
|
||||
// single file.
|
||||
StringSink s;
|
||||
dumpString(dump2.s, s);
|
||||
nar = std::move(s.s);
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
// Otherwise, we have to do th same hashing as NAR so our single
|
||||
// hash will suffice for both purposes.
|
||||
if (method != FileIngestionMethod::Recursive || hashAlgo != HashAlgorithm::SHA256)
|
||||
unsupported("addToStoreFromDump");
|
||||
}
|
||||
StringSource narDump { nar };
|
||||
|
||||
// Use `narDump` if we wrote to `nar`.
|
||||
Source & narDump2 = nar.size() > 0
|
||||
? static_cast<Source &>(narDump)
|
||||
: dump;
|
||||
|
||||
return addToStoreCommon(narDump2, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info {
|
||||
*this,
|
||||
name,
|
||||
FixedOutputInfo {
|
||||
.method = method,
|
||||
.hash = nar.first,
|
||||
.references = {
|
||||
ContentAddressWithReferences::fromParts(
|
||||
method,
|
||||
caHash ? *caHash : nar.first,
|
||||
{
|
||||
.others = references,
|
||||
// caller is not capable of creating a self-reference, because this is content-addressed without modulus
|
||||
.self = false,
|
||||
},
|
||||
},
|
||||
}),
|
||||
nar.first,
|
||||
};
|
||||
info.narSize = nar.second;
|
||||
|
@ -399,72 +437,36 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
|
|||
}
|
||||
|
||||
StorePath BinaryCacheStore::addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references)
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & path,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair)
|
||||
{
|
||||
/* FIXME: Make BinaryCacheStore::addToStoreCommon support
|
||||
non-recursive+sha256 so we can just use the default
|
||||
implementation of this method in terms of addToStoreFromDump. */
|
||||
|
||||
HashSink sink { hashAlgo };
|
||||
if (method == FileIngestionMethod::Recursive) {
|
||||
dumpPath(srcPath, sink, filter);
|
||||
} else {
|
||||
readFile(srcPath, sink);
|
||||
}
|
||||
auto h = sink.finish().first;
|
||||
auto h = hashPath(accessor, path, method.getFileIngestionMethod(), hashAlgo, filter).first;
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
dumpPath(srcPath, sink, filter);
|
||||
accessor.dumpPath(path, sink, filter);
|
||||
});
|
||||
return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info {
|
||||
*this,
|
||||
name,
|
||||
FixedOutputInfo {
|
||||
.method = method,
|
||||
.hash = h,
|
||||
.references = {
|
||||
ContentAddressWithReferences::fromParts(
|
||||
method,
|
||||
h,
|
||||
{
|
||||
.others = references,
|
||||
// caller is not capable of creating a self-reference, because this is content-addressed without modulus
|
||||
.self = false,
|
||||
},
|
||||
},
|
||||
nar.first,
|
||||
};
|
||||
info.narSize = nar.second;
|
||||
return info;
|
||||
})->path;
|
||||
}
|
||||
|
||||
StorePath BinaryCacheStore::addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair)
|
||||
{
|
||||
auto textHash = hashString(HashAlgorithm::SHA256, s);
|
||||
auto path = makeTextPath(name, TextInfo { { textHash }, references });
|
||||
|
||||
if (!repair && isValidPath(path))
|
||||
return path;
|
||||
|
||||
StringSink sink;
|
||||
dumpString(s, sink);
|
||||
StringSource source(sink.s);
|
||||
return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info {
|
||||
*this,
|
||||
std::string { name },
|
||||
TextInfo {
|
||||
.hash = textHash,
|
||||
.references = references,
|
||||
},
|
||||
}),
|
||||
nar.first,
|
||||
};
|
||||
info.narSize = nar.second;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "crypto.hh"
|
||||
#include "signature/local-keys.hh"
|
||||
#include "store-api.hh"
|
||||
#include "log-store.hh"
|
||||
|
||||
|
@ -57,8 +57,7 @@ class BinaryCacheStore : public virtual BinaryCacheStoreConfig,
|
|||
{
|
||||
|
||||
private:
|
||||
|
||||
std::unique_ptr<SecretKey> secretKey;
|
||||
std::unique_ptr<Signer> signer;
|
||||
|
||||
protected:
|
||||
|
||||
|
@ -123,22 +122,22 @@ public:
|
|||
void addToStore(const ValidPathInfo & info, Source & narSource,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||
|
||||
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override;
|
||||
StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override;
|
||||
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override;
|
||||
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & srcPath,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair) override;
|
||||
|
||||
void registerDrvOutput(const Realisation & info) override;
|
||||
|
|
|
@ -196,10 +196,19 @@ void DerivationGoal::loadDerivation()
|
|||
things being garbage collected while we're busy. */
|
||||
worker.evalStore.addTempRoot(drvPath);
|
||||
|
||||
assert(worker.evalStore.isValidPath(drvPath));
|
||||
/* Get the derivation. It is probably in the eval store, but it might be inthe main store:
|
||||
|
||||
/* Get the derivation. */
|
||||
drv = std::make_unique<Derivation>(worker.evalStore.readDerivation(drvPath));
|
||||
- Resolved derivation are resolved against main store realisations, and so must be stored there.
|
||||
|
||||
- Dynamic derivations are built, and so are found in the main store.
|
||||
*/
|
||||
for (auto * drvStore : { &worker.evalStore, &worker.store }) {
|
||||
if (drvStore->isValidPath(drvPath)) {
|
||||
drv = std::make_unique<Derivation>(drvStore->readDerivation(drvPath));
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert(drv);
|
||||
|
||||
haveDerivation();
|
||||
}
|
||||
|
@ -214,7 +223,7 @@ void DerivationGoal::haveDerivation()
|
|||
if (!drv->type().hasKnownOutputPaths())
|
||||
experimentalFeatureSettings.require(Xp::CaDerivations);
|
||||
|
||||
if (!drv->type().isPure()) {
|
||||
if (drv->type().isImpure()) {
|
||||
experimentalFeatureSettings.require(Xp::ImpureDerivations);
|
||||
|
||||
for (auto & [outputName, output] : drv->outputs) {
|
||||
|
@ -295,7 +304,7 @@ void DerivationGoal::outputsSubstitutionTried()
|
|||
{
|
||||
trace("all outputs substituted (maybe)");
|
||||
|
||||
assert(drv->type().isPure());
|
||||
assert(!drv->type().isImpure());
|
||||
|
||||
if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) {
|
||||
done(BuildResult::TransientFailure, {},
|
||||
|
@ -388,9 +397,9 @@ void DerivationGoal::gaveUpOnSubstitution()
|
|||
for (const auto & [inputDrvPath, inputNode] : dynamic_cast<Derivation *>(drv.get())->inputDrvs.map) {
|
||||
/* Ensure that pure, non-fixed-output derivations don't
|
||||
depend on impure derivations. */
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::ImpureDerivations) && drv->type().isPure() && !drv->type().isFixed()) {
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::ImpureDerivations) && !drv->type().isImpure() && !drv->type().isFixed()) {
|
||||
auto inputDrv = worker.evalStore.readDerivation(inputDrvPath);
|
||||
if (!inputDrv.type().isPure())
|
||||
if (inputDrv.type().isImpure())
|
||||
throw Error("pure derivation '%s' depends on impure derivation '%s'",
|
||||
worker.store.printStorePath(drvPath),
|
||||
worker.store.printStorePath(inputDrvPath));
|
||||
|
@ -401,11 +410,15 @@ void DerivationGoal::gaveUpOnSubstitution()
|
|||
}
|
||||
|
||||
/* Copy the input sources from the eval store to the build
|
||||
store. */
|
||||
store.
|
||||
|
||||
Note that some inputs might not be in the eval store because they
|
||||
are (resolved) derivation outputs in a resolved derivation. */
|
||||
if (&worker.evalStore != &worker.store) {
|
||||
RealisedPath::Set inputSrcs;
|
||||
for (auto & i : drv->inputSrcs)
|
||||
inputSrcs.insert(i);
|
||||
if (worker.evalStore.isValidPath(i))
|
||||
inputSrcs.insert(i);
|
||||
copyClosure(worker.evalStore, worker.store, inputSrcs);
|
||||
}
|
||||
|
||||
|
@ -426,7 +439,7 @@ void DerivationGoal::gaveUpOnSubstitution()
|
|||
|
||||
void DerivationGoal::repairClosure()
|
||||
{
|
||||
assert(drv->type().isPure());
|
||||
assert(!drv->type().isImpure());
|
||||
|
||||
/* If we're repairing, we now know that our own outputs are valid.
|
||||
Now check whether the other paths in the outputs closure are
|
||||
|
@ -453,7 +466,7 @@ void DerivationGoal::repairClosure()
|
|||
std::map<StorePath, StorePath> outputsToDrv;
|
||||
for (auto & i : inputClosure)
|
||||
if (i.isDerivation()) {
|
||||
auto depOutputs = worker.store.queryPartialDerivationOutputMap(i);
|
||||
auto depOutputs = worker.store.queryPartialDerivationOutputMap(i, &worker.evalStore);
|
||||
for (auto & j : depOutputs)
|
||||
if (j.second)
|
||||
outputsToDrv.insert_or_assign(*j.second, i);
|
||||
|
@ -604,7 +617,13 @@ void DerivationGoal::inputsRealised()
|
|||
return *outPath;
|
||||
}
|
||||
else {
|
||||
auto outMap = worker.evalStore.queryDerivationOutputMap(depDrvPath);
|
||||
auto outMap = [&]{
|
||||
for (auto * drvStore : { &worker.evalStore, &worker.store })
|
||||
if (drvStore->isValidPath(depDrvPath))
|
||||
return worker.store.queryDerivationOutputMap(depDrvPath, drvStore);
|
||||
assert(false);
|
||||
}();
|
||||
|
||||
auto outMapPath = outMap.find(outputName);
|
||||
if (outMapPath == outMap.end()) {
|
||||
throw Error(
|
||||
|
@ -1081,12 +1100,16 @@ void DerivationGoal::resolvedFinished()
|
|||
worker.store.printStorePath(resolvedDrvGoal->drvPath), outputName);
|
||||
}();
|
||||
|
||||
if (drv->type().isPure()) {
|
||||
if (!drv->type().isImpure()) {
|
||||
auto newRealisation = realisation;
|
||||
newRealisation.id = DrvOutput { initialOutput->outputHash, outputName };
|
||||
newRealisation.signatures.clear();
|
||||
if (!drv->type().isFixed())
|
||||
newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath);
|
||||
if (!drv->type().isFixed()) {
|
||||
auto & drvStore = worker.evalStore.isValidPath(drvPath)
|
||||
? worker.evalStore
|
||||
: worker.store;
|
||||
newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation.outPath, &drvStore);
|
||||
}
|
||||
signRealisation(newRealisation);
|
||||
worker.store.registerDrvOutput(newRealisation);
|
||||
}
|
||||
|
@ -1372,34 +1395,40 @@ void DerivationGoal::flushLine()
|
|||
|
||||
std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDerivationOutputMap()
|
||||
{
|
||||
assert(drv->type().isPure());
|
||||
assert(!drv->type().isImpure());
|
||||
if (!useDerivation || drv->type().hasKnownOutputPaths()) {
|
||||
std::map<std::string, std::optional<StorePath>> res;
|
||||
for (auto & [name, output] : drv->outputs)
|
||||
res.insert_or_assign(name, output.path(worker.store, drv->name, name));
|
||||
return res;
|
||||
} else {
|
||||
return worker.store.queryPartialDerivationOutputMap(drvPath);
|
||||
for (auto * drvStore : { &worker.evalStore, &worker.store })
|
||||
if (drvStore->isValidPath(drvPath))
|
||||
return worker.store.queryPartialDerivationOutputMap(drvPath, drvStore);
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
OutputPathMap DerivationGoal::queryDerivationOutputMap()
|
||||
{
|
||||
assert(drv->type().isPure());
|
||||
assert(!drv->type().isImpure());
|
||||
if (!useDerivation || drv->type().hasKnownOutputPaths()) {
|
||||
OutputPathMap res;
|
||||
for (auto & [name, output] : drv->outputsAndOptPaths(worker.store))
|
||||
res.insert_or_assign(name, *output.second);
|
||||
return res;
|
||||
} else {
|
||||
return worker.store.queryDerivationOutputMap(drvPath);
|
||||
for (auto * drvStore : { &worker.evalStore, &worker.store })
|
||||
if (drvStore->isValidPath(drvPath))
|
||||
return worker.store.queryDerivationOutputMap(drvPath, drvStore);
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
std::pair<bool, SingleDrvOutputs> DerivationGoal::checkPathValidity()
|
||||
{
|
||||
if (!drv->type().isPure()) return { false, {} };
|
||||
if (drv->type().isImpure()) return { false, {} };
|
||||
|
||||
bool checkHash = buildMode == bmRepair;
|
||||
auto wantedOutputsLeft = std::visit(overloaded {
|
||||
|
|
|
@ -26,9 +26,9 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
|||
}
|
||||
if (i->exitCode != Goal::ecSuccess) {
|
||||
if (auto i2 = dynamic_cast<DerivationGoal *>(i.get()))
|
||||
failed.insert(std::string { i2->drvPath.to_string() });
|
||||
failed.insert(printStorePath(i2->drvPath));
|
||||
else if (auto i2 = dynamic_cast<PathSubstitutionGoal *>(i.get()))
|
||||
failed.insert(std::string { i2->storePath.to_string()});
|
||||
failed.insert(printStorePath(i2->storePath));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#include "child.hh"
|
||||
#include "unix-domain-socket.hh"
|
||||
#include "posix-fs-canonicalise.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
|
||||
#include <regex>
|
||||
#include <queue>
|
||||
|
@ -1290,13 +1291,14 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
|
|||
{ throw Error("queryPathFromHashPart"); }
|
||||
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & srcPath,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair) override
|
||||
{ throw Error("addToStore"); }
|
||||
|
||||
void addToStore(const ValidPathInfo & info, Source & narSource,
|
||||
|
@ -1306,26 +1308,15 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual In
|
|||
goal.addDependency(info.path);
|
||||
}
|
||||
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair = NoRepair) override
|
||||
{
|
||||
auto path = next->addTextToStore(name, s, references, repair);
|
||||
goal.addDependency(path);
|
||||
return path;
|
||||
}
|
||||
|
||||
StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override
|
||||
{
|
||||
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair, references);
|
||||
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, references, repair);
|
||||
goal.addDependency(path);
|
||||
return path;
|
||||
}
|
||||
|
@ -1504,7 +1495,7 @@ void LocalDerivationGoal::startDaemon()
|
|||
daemon::processConnection(store, from, to,
|
||||
NotTrusted, daemon::Recursive);
|
||||
debug("terminated daemon connection");
|
||||
} catch (SysError &) {
|
||||
} catch (SystemError &) {
|
||||
ignoreException();
|
||||
}
|
||||
});
|
||||
|
@ -1716,7 +1707,7 @@ void LocalDerivationGoal::runChild()
|
|||
try {
|
||||
if (drv->isBuiltin() && drv->builder == "builtin:fetchurl")
|
||||
netrcData = readFile(settings.netrcFile);
|
||||
} catch (SysError &) { }
|
||||
} catch (SystemError &) { }
|
||||
|
||||
#if __linux__
|
||||
if (useChroot) {
|
||||
|
@ -2453,8 +2444,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
throw BuildError(
|
||||
"output path %1% without valid stats info",
|
||||
actualPath);
|
||||
if (outputHash.method == ContentAddressMethod { FileIngestionMethod::Flat } ||
|
||||
outputHash.method == ContentAddressMethod { TextIngestionMethod {} })
|
||||
if (outputHash.method.getFileIngestionMethod() == FileIngestionMethod::Flat)
|
||||
{
|
||||
/* The output path should be a regular file without execute permission. */
|
||||
if (!S_ISREG(st->st_mode) || (st->st_mode & S_IXUSR) != 0)
|
||||
|
@ -2466,38 +2456,23 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
rewriteOutput(outputRewrites);
|
||||
/* FIXME optimize and deduplicate with addToStore */
|
||||
std::string oldHashPart { scratchPath->hashPart() };
|
||||
HashModuloSink caSink {outputHash.hashAlgo, oldHashPart };
|
||||
std::visit(overloaded {
|
||||
[&](const TextIngestionMethod &) {
|
||||
readFile(actualPath, caSink);
|
||||
},
|
||||
[&](const FileIngestionMethod & m2) {
|
||||
switch (m2) {
|
||||
case FileIngestionMethod::Recursive:
|
||||
dumpPath(actualPath, caSink);
|
||||
break;
|
||||
case FileIngestionMethod::Flat:
|
||||
readFile(actualPath, caSink);
|
||||
break;
|
||||
}
|
||||
},
|
||||
}, outputHash.method.raw);
|
||||
auto got = caSink.finish().first;
|
||||
auto got = ({
|
||||
HashModuloSink caSink { outputHash.hashAlgo, oldHashPart };
|
||||
PosixSourceAccessor accessor;
|
||||
dumpPath(
|
||||
accessor, CanonPath { actualPath },
|
||||
caSink,
|
||||
outputHash.method.getFileIngestionMethod());
|
||||
caSink.finish().first;
|
||||
});
|
||||
|
||||
auto optCA = ContentAddressWithReferences::fromPartsOpt(
|
||||
outputHash.method,
|
||||
std::move(got),
|
||||
rewriteRefs());
|
||||
if (!optCA) {
|
||||
// TODO track distinct failure modes separately (at the time of
|
||||
// writing there is just one but `nullopt` is unclear) so this
|
||||
// message can't get out of sync.
|
||||
throw BuildError("output path '%s' has illegal content address, probably a spurious self-reference with text hashing");
|
||||
}
|
||||
ValidPathInfo newInfo0 {
|
||||
worker.store,
|
||||
outputPathName(drv->name, outputName),
|
||||
std::move(*optCA),
|
||||
ContentAddressWithReferences::fromParts(
|
||||
outputHash.method,
|
||||
std::move(got),
|
||||
rewriteRefs()),
|
||||
Hash::dummy,
|
||||
};
|
||||
if (*scratchPath != newInfo0.path) {
|
||||
|
@ -2511,9 +2486,14 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
std::string(newInfo0.path.hashPart())}});
|
||||
}
|
||||
|
||||
HashResult narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath);
|
||||
newInfo0.narHash = narHashAndSize.first;
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
{
|
||||
PosixSourceAccessor accessor;
|
||||
HashResult narHashAndSize = hashPath(
|
||||
accessor, CanonPath { actualPath },
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
|
||||
newInfo0.narHash = narHashAndSize.first;
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
}
|
||||
|
||||
assert(newInfo0.ca);
|
||||
return newInfo0;
|
||||
|
@ -2531,7 +2511,10 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
std::string { scratchPath->hashPart() },
|
||||
std::string { requiredFinalPath.hashPart() });
|
||||
rewriteOutput(outputRewrites);
|
||||
auto narHashAndSize = hashPath(HashAlgorithm::SHA256, actualPath);
|
||||
PosixSourceAccessor accessor;
|
||||
HashResult narHashAndSize = hashPath(
|
||||
accessor, CanonPath { actualPath },
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256);
|
||||
ValidPathInfo newInfo0 { requiredFinalPath, narHashAndSize.first };
|
||||
newInfo0.narSize = narHashAndSize.second;
|
||||
auto refs = rewriteRefs();
|
||||
|
@ -2741,7 +2724,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
.outPath = newInfo.path
|
||||
};
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)
|
||||
&& drv->type().isPure())
|
||||
&& !drv->type().isImpure())
|
||||
{
|
||||
signRealisation(thisRealisation);
|
||||
worker.store.registerDrvOutput(thisRealisation);
|
||||
|
|
|
@ -68,6 +68,7 @@ R""(
|
|||
(allow file*
|
||||
(literal "/dev/null")
|
||||
(literal "/dev/random")
|
||||
(literal "/dev/stderr")
|
||||
(literal "/dev/stdin")
|
||||
(literal "/dev/stdout")
|
||||
(literal "/dev/tty")
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "substitution-goal.hh"
|
||||
#include "nar-info.hh"
|
||||
#include "finally.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -217,6 +218,8 @@ void PathSubstitutionGoal::tryToRun()
|
|||
|
||||
thr = std::thread([this]() {
|
||||
try {
|
||||
ReceiveInterrupts receiveInterrupts;
|
||||
|
||||
/* Wake up the worker loop when we're done. */
|
||||
Finally updateStats([this]() { outPipe.writeSide.close(); });
|
||||
|
||||
|
|
|
@ -251,7 +251,7 @@ void Worker::childTerminated(Goal * goal, bool wakeSleepers)
|
|||
|
||||
void Worker::waitForBuildSlot(GoalPtr goal)
|
||||
{
|
||||
debug("wait for build slot");
|
||||
goal->trace("wait for build slot");
|
||||
bool isSubstitutionGoal = goal->jobCategory() == JobCategory::Substitution;
|
||||
if ((!isSubstitutionGoal && getNrLocalBuilds() < settings.maxBuildJobs) ||
|
||||
(isSubstitutionGoal && getNrSubstitutions() < settings.maxSubstitutionJobs))
|
||||
|
@ -449,7 +449,7 @@ void Worker::waitForInput()
|
|||
} else {
|
||||
printMsg(lvlVomit, "%1%: read %2% bytes",
|
||||
goal->getName(), rd);
|
||||
std::string data((char *) buffer.data(), rd);
|
||||
std::string_view data((char *) buffer.data(), rd);
|
||||
j->lastOutput = after;
|
||||
goal->handleChildOutput(k, data);
|
||||
}
|
||||
|
@ -519,7 +519,9 @@ bool Worker::pathContentsGood(const StorePath & path)
|
|||
if (!pathExists(store.printStorePath(path)))
|
||||
res = false;
|
||||
else {
|
||||
HashResult current = hashPath(info->narHash.algo, store.printStorePath(path));
|
||||
HashResult current = hashPath(
|
||||
*store.getFSAccessor(), CanonPath { store.printStorePath(path) },
|
||||
FileIngestionMethod::Recursive, info->narHash.algo);
|
||||
Hash nullHash(HashAlgorithm::SHA256);
|
||||
res = info->narHash == nullHash || info->narHash == current.first;
|
||||
}
|
||||
|
|
|
@ -116,7 +116,7 @@ private:
|
|||
WeakGoals waitingForAWhile;
|
||||
|
||||
/**
|
||||
* Last time the goals in `waitingForAWhile` where woken up.
|
||||
* Last time the goals in `waitingForAWhile` were woken up.
|
||||
*/
|
||||
steady_time_point lastWokenUp;
|
||||
|
||||
|
|
|
@ -5,6 +5,9 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Think of this as a "store level package attrset", but stripped down to no more than the needs of buildenv.
|
||||
*/
|
||||
struct Package {
|
||||
Path path;
|
||||
bool active;
|
||||
|
|
|
@ -50,6 +50,18 @@ std::string ContentAddressMethod::render(HashAlgorithm ha) const
|
|||
}, raw);
|
||||
}
|
||||
|
||||
FileIngestionMethod ContentAddressMethod::getFileIngestionMethod() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](const TextIngestionMethod & th) {
|
||||
return FileIngestionMethod::Flat;
|
||||
},
|
||||
[&](const FileIngestionMethod & fim) {
|
||||
return fim;
|
||||
}
|
||||
}, raw);
|
||||
}
|
||||
|
||||
std::string ContentAddress::render() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
|
@ -79,7 +91,7 @@ static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodP
|
|||
prefix = *optPrefix;
|
||||
}
|
||||
|
||||
auto parseHashType_ = [&](){
|
||||
auto parseHashAlgorithm_ = [&](){
|
||||
auto hashTypeRaw = splitPrefixTo(rest, ':');
|
||||
if (!hashTypeRaw)
|
||||
throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", wholeInput);
|
||||
|
@ -90,7 +102,7 @@ static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodP
|
|||
// Switch on prefix
|
||||
if (prefix == "text") {
|
||||
// No parsing of the ingestion method, "text" only support flat.
|
||||
HashAlgorithm hashAlgo = parseHashType_();
|
||||
HashAlgorithm hashAlgo = parseHashAlgorithm_();
|
||||
return {
|
||||
TextIngestionMethod {},
|
||||
std::move(hashAlgo),
|
||||
|
@ -100,7 +112,7 @@ static std::pair<ContentAddressMethod, HashAlgorithm> parseContentAddressMethodP
|
|||
auto method = FileIngestionMethod::Flat;
|
||||
if (splitPrefix(rest, "r:"))
|
||||
method = FileIngestionMethod::Recursive;
|
||||
HashAlgorithm hashAlgo = parseHashType_();
|
||||
HashAlgorithm hashAlgo = parseHashAlgorithm_();
|
||||
return {
|
||||
std::move(method),
|
||||
std::move(hashAlgo),
|
||||
|
@ -176,13 +188,13 @@ ContentAddressWithReferences ContentAddressWithReferences::withoutRefs(const Con
|
|||
}, ca.method.raw);
|
||||
}
|
||||
|
||||
std::optional<ContentAddressWithReferences> ContentAddressWithReferences::fromPartsOpt(
|
||||
ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept
|
||||
ContentAddressWithReferences ContentAddressWithReferences::fromParts(
|
||||
ContentAddressMethod method, Hash hash, StoreReferences refs)
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](TextIngestionMethod _) -> std::optional<ContentAddressWithReferences> {
|
||||
[&](TextIngestionMethod _) -> ContentAddressWithReferences {
|
||||
if (refs.self)
|
||||
return std::nullopt;
|
||||
throw Error("self-reference not allowed with text hashing");
|
||||
return ContentAddressWithReferences {
|
||||
TextInfo {
|
||||
.hash = std::move(hash),
|
||||
|
@ -190,7 +202,7 @@ std::optional<ContentAddressWithReferences> ContentAddressWithReferences::fromPa
|
|||
}
|
||||
};
|
||||
},
|
||||
[&](FileIngestionMethod m2) -> std::optional<ContentAddressWithReferences> {
|
||||
[&](FileIngestionMethod m2) -> ContentAddressWithReferences {
|
||||
return ContentAddressWithReferences {
|
||||
FixedOutputInfo {
|
||||
.method = m2,
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
#include <variant>
|
||||
#include "hash.hh"
|
||||
#include "path.hh"
|
||||
#include "file-content-address.hh"
|
||||
#include "comparator.hh"
|
||||
#include "variant-wrapper.hh"
|
||||
|
||||
|
@ -31,22 +32,6 @@ namespace nix {
|
|||
*/
|
||||
struct TextIngestionMethod : std::monostate { };
|
||||
|
||||
/**
|
||||
* An enumeration of the main ways we can serialize file system
|
||||
* objects.
|
||||
*/
|
||||
enum struct FileIngestionMethod : uint8_t {
|
||||
/**
|
||||
* Flat-file hashing. Directly ingest the contents of a single file
|
||||
*/
|
||||
Flat = 0,
|
||||
/**
|
||||
* Recursive (or NAR) hashing. Serializes the file-system object in Nix
|
||||
* Archive format and ingest that
|
||||
*/
|
||||
Recursive = 1
|
||||
};
|
||||
|
||||
/**
|
||||
* Compute the prefix to the hash algorithm which indicates how the
|
||||
* files were ingested.
|
||||
|
@ -54,7 +39,7 @@ enum struct FileIngestionMethod : uint8_t {
|
|||
std::string makeFileIngestionPrefix(FileIngestionMethod m);
|
||||
|
||||
/**
|
||||
* An enumeration of all the ways we can serialize file system objects.
|
||||
* An enumeration of all the ways we can content-address store objects.
|
||||
*
|
||||
* Just the type of a content address. Combine with the hash itself, and
|
||||
* we have a `ContentAddress` as defined below. Combine that, in turn,
|
||||
|
@ -102,7 +87,15 @@ struct ContentAddressMethod
|
|||
*
|
||||
* The rough inverse of `parse()`.
|
||||
*/
|
||||
std::string render(HashAlgorithm ha) const;
|
||||
std::string render(HashAlgorithm ht) const;
|
||||
|
||||
/**
|
||||
* Get the underlying way to content-address file system objects.
|
||||
*
|
||||
* Different ways of hashing store objects may use the same method
|
||||
* for hashing file systeme objects.
|
||||
*/
|
||||
FileIngestionMethod getFileIngestionMethod() const;
|
||||
};
|
||||
|
||||
|
||||
|
@ -116,11 +109,11 @@ struct ContentAddressMethod
|
|||
* serialisation methods (flat file vs NAR). Thus, ‘ca’ has one of the
|
||||
* following forms:
|
||||
*
|
||||
* - ‘text:sha256:<sha256 hash of file contents>’: For paths
|
||||
* computed by Store::makeTextPath() / Store::addTextToStore().
|
||||
* - `TextIngestionMethod`:
|
||||
* ‘text:sha256:<sha256 hash of file contents>’
|
||||
*
|
||||
* - ‘fixed:<r?>:<ht>:<h>’: For paths computed by
|
||||
* Store::makeFixedOutputPath() / Store::addToStore().
|
||||
* - `FixedIngestionMethod`:
|
||||
* ‘fixed:<r?>:<hash type>:<hash of file contents>’
|
||||
*/
|
||||
struct ContentAddress
|
||||
{
|
||||
|
@ -266,11 +259,12 @@ struct ContentAddressWithReferences
|
|||
*
|
||||
* @param refs References to other store objects or oneself.
|
||||
*
|
||||
* Do note that not all combinations are supported; `nullopt` is
|
||||
* returns for invalid combinations.
|
||||
* @note note that all combinations are supported. This is a
|
||||
* *partial function* and exceptions will be thrown for invalid
|
||||
* combinations.
|
||||
*/
|
||||
static std::optional<ContentAddressWithReferences> fromPartsOpt(
|
||||
ContentAddressMethod method, Hash hash, StoreReferences refs) noexcept;
|
||||
static ContentAddressWithReferences fromParts(
|
||||
ContentAddressMethod method, Hash hash, StoreReferences refs);
|
||||
|
||||
ContentAddressMethod getMethod() const;
|
||||
|
||||
|
|
|
@ -403,22 +403,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
auto [contentAddressMethod, hashAlgo_] = ContentAddressMethod::parse(camStr);
|
||||
auto hashAlgo = hashAlgo_; // work around clang bug
|
||||
FramedSource source(from);
|
||||
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
|
||||
return std::visit(overloaded {
|
||||
[&](const TextIngestionMethod &) {
|
||||
if (hashAlgo != HashAlgorithm::SHA256)
|
||||
throw UnimplementedError("When adding text-hashed data called '%s', only SHA-256 is supported but '%s' was given",
|
||||
name, printHashAlgo(hashAlgo));
|
||||
// We could stream this by changing Store
|
||||
std::string contents = source.drain();
|
||||
auto path = store->addTextToStore(name, contents, refs, repair);
|
||||
return store->queryPathInfo(path);
|
||||
},
|
||||
[&](const FileIngestionMethod & fim) {
|
||||
auto path = store->addToStoreFromDump(source, name, fim, hashAlgo, repair, refs);
|
||||
return store->queryPathInfo(path);
|
||||
},
|
||||
}, contentAddressMethod.raw);
|
||||
// TODO these two steps are essentially RemoteStore::addCAToStore. Move it up to Store.
|
||||
auto path = store->addToStoreFromDump(source, name, contentAddressMethod, hashAlgo, refs, repair);
|
||||
return store->queryPathInfo(path);
|
||||
}();
|
||||
logger->stopWork();
|
||||
|
||||
|
@ -454,7 +441,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
eagerly consume the entire stream it's given, past the
|
||||
length of the Nar. */
|
||||
TeeSource savedNARSource(from, saved);
|
||||
NullParseSink sink; /* just parse the NAR */
|
||||
NullFileSystemObjectSink sink; /* just parse the NAR */
|
||||
parseDump(sink, savedNARSource);
|
||||
} else {
|
||||
/* Incrementally parse the NAR file, stripping the
|
||||
|
@ -496,7 +483,10 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
std::string s = readString(from);
|
||||
auto refs = WorkerProto::Serialise<StorePathSet>::read(*store, rconn);
|
||||
logger->startWork();
|
||||
auto path = store->addTextToStore(suffix, s, refs, NoRepair);
|
||||
auto path = ({
|
||||
StringSource source { s };
|
||||
store->addToStoreFromDump(source, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, refs, NoRepair);
|
||||
});
|
||||
logger->stopWork();
|
||||
to << store->printStorePath(path);
|
||||
break;
|
||||
|
@ -923,7 +913,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
source = std::make_unique<TunnelSource>(from, to);
|
||||
else {
|
||||
TeeSource tee { from, saved };
|
||||
NullParseSink ether;
|
||||
NullFileSystemObjectSink ether;
|
||||
parseDump(ether, tee);
|
||||
source = std::make_unique<StringSource>(saved.s);
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "downstream-placeholder.hh"
|
||||
#include "store-api.hh"
|
||||
#include "globals.hh"
|
||||
#include "types.hh"
|
||||
#include "util.hh"
|
||||
#include "split.hh"
|
||||
#include "common-protocol.hh"
|
||||
|
@ -109,17 +110,17 @@ bool DerivationType::isSandboxed() const
|
|||
}
|
||||
|
||||
|
||||
bool DerivationType::isPure() const
|
||||
bool DerivationType::isImpure() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](const InputAddressed & ia) {
|
||||
return true;
|
||||
return false;
|
||||
},
|
||||
[](const ContentAddressed & ca) {
|
||||
return true;
|
||||
return false;
|
||||
},
|
||||
[](const Impure &) {
|
||||
return false;
|
||||
return true;
|
||||
},
|
||||
}, raw);
|
||||
}
|
||||
|
@ -143,36 +144,89 @@ StorePath writeDerivation(Store & store,
|
|||
auto suffix = std::string(drv.name) + drvExtension;
|
||||
auto contents = drv.unparse(store, false);
|
||||
return readOnly || settings.readOnlyMode
|
||||
? store.computeStorePathForText(suffix, contents, references)
|
||||
: store.addTextToStore(suffix, contents, references, repair);
|
||||
? store.makeFixedOutputPathFromCA(suffix, TextInfo {
|
||||
.hash = hashString(HashAlgorithm::SHA256, contents),
|
||||
.references = std::move(references),
|
||||
})
|
||||
: ({
|
||||
StringSource s { contents };
|
||||
store.addToStoreFromDump(s, suffix, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
namespace {
|
||||
/**
|
||||
* This mimics std::istream to some extent. We use this much smaller implementation
|
||||
* instead of plain istreams because the sentry object overhead is too high.
|
||||
*/
|
||||
struct StringViewStream {
|
||||
std::string_view remaining;
|
||||
|
||||
int peek() const {
|
||||
return remaining.empty() ? EOF : remaining[0];
|
||||
}
|
||||
|
||||
int get() {
|
||||
if (remaining.empty()) return EOF;
|
||||
char c = remaining[0];
|
||||
remaining.remove_prefix(1);
|
||||
return c;
|
||||
}
|
||||
};
|
||||
|
||||
constexpr struct Escapes {
|
||||
char map[256];
|
||||
constexpr Escapes() {
|
||||
for (int i = 0; i < 256; i++) map[i] = (char) (unsigned char) i;
|
||||
map[(int) (unsigned char) 'n'] = '\n';
|
||||
map[(int) (unsigned char) 'r'] = '\r';
|
||||
map[(int) (unsigned char) 't'] = '\t';
|
||||
}
|
||||
char operator[](char c) const { return map[(unsigned char) c]; }
|
||||
} escapes;
|
||||
}
|
||||
|
||||
|
||||
/* Read string `s' from stream `str'. */
|
||||
static void expect(std::istream & str, std::string_view s)
|
||||
static void expect(StringViewStream & str, std::string_view s)
|
||||
{
|
||||
for (auto & c : s) {
|
||||
if (str.get() != c)
|
||||
throw FormatError("expected string '%1%'", s);
|
||||
}
|
||||
if (!str.remaining.starts_with(s))
|
||||
throw FormatError("expected string '%1%'", s);
|
||||
str.remaining.remove_prefix(s.size());
|
||||
}
|
||||
|
||||
|
||||
/* Read a C-style string from stream `str'. */
|
||||
static std::string parseString(std::istream & str)
|
||||
static BackedStringView parseString(StringViewStream & str)
|
||||
{
|
||||
std::string res;
|
||||
expect(str, "\"");
|
||||
int c;
|
||||
while ((c = str.get()) != '"')
|
||||
if (c == '\\') {
|
||||
c = str.get();
|
||||
if (c == 'n') res += '\n';
|
||||
else if (c == 'r') res += '\r';
|
||||
else if (c == 't') res += '\t';
|
||||
else res += c;
|
||||
auto c = str.remaining.begin(), end = str.remaining.end();
|
||||
bool escaped = false;
|
||||
for (; c != end && *c != '"'; c++) {
|
||||
if (*c == '\\') {
|
||||
c++;
|
||||
if (c == end)
|
||||
throw FormatError("unterminated string in derivation");
|
||||
escaped = true;
|
||||
}
|
||||
else res += c;
|
||||
}
|
||||
|
||||
const auto contentLen = c - str.remaining.begin();
|
||||
const auto content = str.remaining.substr(0, contentLen);
|
||||
str.remaining.remove_prefix(contentLen + 1);
|
||||
|
||||
if (!escaped)
|
||||
return content;
|
||||
|
||||
std::string res;
|
||||
res.reserve(content.size());
|
||||
for (c = content.begin(), end = content.end(); c != end; c++)
|
||||
if (*c == '\\') {
|
||||
c++;
|
||||
res += escapes[*c];
|
||||
}
|
||||
else res += *c;
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -181,15 +235,15 @@ static void validatePath(std::string_view s) {
|
|||
throw FormatError("bad path '%1%' in derivation", s);
|
||||
}
|
||||
|
||||
static Path parsePath(std::istream & str)
|
||||
static BackedStringView parsePath(StringViewStream & str)
|
||||
{
|
||||
auto s = parseString(str);
|
||||
validatePath(s);
|
||||
validatePath(*s);
|
||||
return s;
|
||||
}
|
||||
|
||||
|
||||
static bool endOfList(std::istream & str)
|
||||
static bool endOfList(StringViewStream & str)
|
||||
{
|
||||
if (str.peek() == ',') {
|
||||
str.get();
|
||||
|
@ -203,12 +257,12 @@ static bool endOfList(std::istream & str)
|
|||
}
|
||||
|
||||
|
||||
static StringSet parseStrings(std::istream & str, bool arePaths)
|
||||
static StringSet parseStrings(StringViewStream & str, bool arePaths)
|
||||
{
|
||||
StringSet res;
|
||||
expect(str, "[");
|
||||
while (!endOfList(str))
|
||||
res.insert(arePaths ? parsePath(str) : parseString(str));
|
||||
res.insert((arePaths ? parsePath(str) : parseString(str)).toOwned());
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -261,7 +315,7 @@ static DerivationOutput parseDerivationOutput(
|
|||
}
|
||||
|
||||
static DerivationOutput parseDerivationOutput(
|
||||
const StoreDirConfig & store, std::istringstream & str,
|
||||
const StoreDirConfig & store, StringViewStream & str,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings)
|
||||
{
|
||||
expect(str, ","); const auto pathS = parseString(str);
|
||||
|
@ -269,7 +323,7 @@ static DerivationOutput parseDerivationOutput(
|
|||
expect(str, ","); const auto hash = parseString(str);
|
||||
expect(str, ")");
|
||||
|
||||
return parseDerivationOutput(store, pathS, hashAlgo, hash, xpSettings);
|
||||
return parseDerivationOutput(store, *pathS, *hashAlgo, *hash, xpSettings);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -291,7 +345,7 @@ enum struct DerivationATermVersion {
|
|||
|
||||
static DerivedPathMap<StringSet>::ChildNode parseDerivedPathMapNode(
|
||||
const StoreDirConfig & store,
|
||||
std::istringstream & str,
|
||||
StringViewStream & str,
|
||||
DerivationATermVersion version)
|
||||
{
|
||||
DerivedPathMap<StringSet>::ChildNode node;
|
||||
|
@ -317,7 +371,7 @@ static DerivedPathMap<StringSet>::ChildNode parseDerivedPathMapNode(
|
|||
expect(str, ",[");
|
||||
while (!endOfList(str)) {
|
||||
expect(str, "(");
|
||||
auto outputName = parseString(str);
|
||||
auto outputName = parseString(str).toOwned();
|
||||
expect(str, ",");
|
||||
node.childMap.insert_or_assign(outputName, parseDerivedPathMapNode(store, str, version));
|
||||
expect(str, ")");
|
||||
|
@ -343,7 +397,7 @@ Derivation parseDerivation(
|
|||
Derivation drv;
|
||||
drv.name = name;
|
||||
|
||||
std::istringstream str(std::move(s));
|
||||
StringViewStream str{s};
|
||||
expect(str, "D");
|
||||
DerivationATermVersion version;
|
||||
switch (str.peek()) {
|
||||
|
@ -354,12 +408,12 @@ Derivation parseDerivation(
|
|||
case 'r': {
|
||||
expect(str, "rvWithVersion(");
|
||||
auto versionS = parseString(str);
|
||||
if (versionS == "xp-dyn-drv") {
|
||||
if (*versionS == "xp-dyn-drv") {
|
||||
// Only verison we have so far
|
||||
version = DerivationATermVersion::DynamicDerivations;
|
||||
xpSettings.require(Xp::DynamicDerivations);
|
||||
} else {
|
||||
throw FormatError("Unknown derivation ATerm format version '%s'", versionS);
|
||||
throw FormatError("Unknown derivation ATerm format version '%s'", *versionS);
|
||||
}
|
||||
expect(str, ",");
|
||||
break;
|
||||
|
@ -371,7 +425,7 @@ Derivation parseDerivation(
|
|||
/* Parse the list of outputs. */
|
||||
expect(str, "[");
|
||||
while (!endOfList(str)) {
|
||||
expect(str, "("); std::string id = parseString(str);
|
||||
expect(str, "("); std::string id = parseString(str).toOwned();
|
||||
auto output = parseDerivationOutput(store, str, xpSettings);
|
||||
drv.outputs.emplace(std::move(id), std::move(output));
|
||||
}
|
||||
|
@ -380,28 +434,28 @@ Derivation parseDerivation(
|
|||
expect(str, ",[");
|
||||
while (!endOfList(str)) {
|
||||
expect(str, "(");
|
||||
Path drvPath = parsePath(str);
|
||||
auto drvPath = parsePath(str);
|
||||
expect(str, ",");
|
||||
drv.inputDrvs.map.insert_or_assign(store.parseStorePath(drvPath), parseDerivedPathMapNode(store, str, version));
|
||||
drv.inputDrvs.map.insert_or_assign(store.parseStorePath(*drvPath), parseDerivedPathMapNode(store, str, version));
|
||||
expect(str, ")");
|
||||
}
|
||||
|
||||
expect(str, ","); drv.inputSrcs = store.parseStorePathSet(parseStrings(str, true));
|
||||
expect(str, ","); drv.platform = parseString(str);
|
||||
expect(str, ","); drv.builder = parseString(str);
|
||||
expect(str, ","); drv.platform = parseString(str).toOwned();
|
||||
expect(str, ","); drv.builder = parseString(str).toOwned();
|
||||
|
||||
/* Parse the builder arguments. */
|
||||
expect(str, ",[");
|
||||
while (!endOfList(str))
|
||||
drv.args.push_back(parseString(str));
|
||||
drv.args.push_back(parseString(str).toOwned());
|
||||
|
||||
/* Parse the environment variables. */
|
||||
expect(str, ",[");
|
||||
while (!endOfList(str)) {
|
||||
expect(str, "("); auto name = parseString(str);
|
||||
expect(str, ","); auto value = parseString(str);
|
||||
expect(str, "("); auto name = parseString(str).toOwned();
|
||||
expect(str, ","); auto value = parseString(str).toOwned();
|
||||
expect(str, ")");
|
||||
drv.env[name] = value;
|
||||
drv.env.insert_or_assign(std::move(name), std::move(value));
|
||||
}
|
||||
|
||||
expect(str, ")");
|
||||
|
@ -786,7 +840,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
|
|||
};
|
||||
}
|
||||
|
||||
if (!type.isPure()) {
|
||||
if (type.isImpure()) {
|
||||
std::map<std::string, Hash> outputHashes;
|
||||
for (const auto & [outputName, _] : drv.outputs)
|
||||
outputHashes.insert_or_assign(outputName, impureOutputHash);
|
||||
|
|
|
@ -253,12 +253,17 @@ struct DerivationType {
|
|||
bool isSandboxed() const;
|
||||
|
||||
/**
|
||||
* Whether the derivation is expected to produce the same result
|
||||
* every time, and therefore it only needs to be built once. This is
|
||||
* only false for derivations that have the attribute '__impure =
|
||||
* Whether the derivation is expected to produce a different result
|
||||
* every time, and therefore it needs to be rebuilt every time. This is
|
||||
* only true for derivations that have the attribute '__impure =
|
||||
* true'.
|
||||
*
|
||||
* Non-impure derivations can still behave impurely, to the degree permitted
|
||||
* by the sandbox. Hence why this method isn't `isPure`: impure derivations
|
||||
* are not the negation of pure derivations. Purity can not be ascertained
|
||||
* except by rather heavy tools.
|
||||
*/
|
||||
bool isPure() const;
|
||||
bool isImpure() const;
|
||||
|
||||
/**
|
||||
* Does the derivation knows its own output paths?
|
||||
|
|
|
@ -12,9 +12,9 @@ namespace nix {
|
|||
bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \
|
||||
{ \
|
||||
const MY_TYPE* me = this; \
|
||||
auto fields1 = std::make_tuple<const CHILD_TYPE &, const FIELD_TYPE &>(*me->drvPath, me->FIELD); \
|
||||
auto fields1 = std::tie(*me->drvPath, me->FIELD); \
|
||||
me = &other; \
|
||||
auto fields2 = std::make_tuple<const CHILD_TYPE &, const FIELD_TYPE &>(*me->drvPath, me->FIELD); \
|
||||
auto fields2 = std::tie(*me->drvPath, me->FIELD); \
|
||||
return fields1 COMPARATOR fields2; \
|
||||
}
|
||||
#define CMP(CHILD_TYPE, MY_TYPE, FIELD) \
|
||||
|
@ -22,13 +22,9 @@ namespace nix {
|
|||
CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, !=) \
|
||||
CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, <)
|
||||
|
||||
#define FIELD_TYPE std::string
|
||||
CMP(SingleDerivedPath, SingleDerivedPathBuilt, output)
|
||||
#undef FIELD_TYPE
|
||||
|
||||
#define FIELD_TYPE OutputsSpec
|
||||
CMP(SingleDerivedPath, DerivedPathBuilt, outputs)
|
||||
#undef FIELD_TYPE
|
||||
|
||||
#undef CMP
|
||||
#undef CMP_ONE
|
||||
|
|
|
@ -58,12 +58,14 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
|
|||
RepairFlag repair, CheckSigsFlag checkSigs) override
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
StorePath addTextToStore(
|
||||
virtual StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override
|
||||
{ unsupported("addTextToStore"); }
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = StorePathSet(),
|
||||
RepairFlag repair = NoRepair) override
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
void narFromPath(const StorePath & path, Sink & sink) override
|
||||
{ unsupported("narFromPath"); }
|
||||
|
|
|
@ -65,7 +65,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
|
|||
/* Extract the NAR from the source. */
|
||||
StringSink saved;
|
||||
TeeSource tee { source, saved };
|
||||
NullParseSink ether;
|
||||
NullFileSystemObjectSink ether;
|
||||
parseDump(ether, tee);
|
||||
|
||||
uint32_t magic = readInt(source);
|
||||
|
|
|
@ -149,11 +149,12 @@ void LocalStore::addTempRoot(const StorePath & path)
|
|||
try {
|
||||
nix::connect(fdRootsSocket->get(), socketPath);
|
||||
} catch (SysError & e) {
|
||||
/* The garbage collector may have exited, so we need to
|
||||
restart. */
|
||||
if (e.errNo == ECONNREFUSED) {
|
||||
debug("GC socket connection refused");
|
||||
/* The garbage collector may have exited or not
|
||||
created the socket yet, so we need to restart. */
|
||||
if (e.errNo == ECONNREFUSED || e.errNo == ENOENT) {
|
||||
debug("GC socket connection refused: %s", e.msg());
|
||||
fdRootsSocket->close();
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(100));
|
||||
goto restart;
|
||||
}
|
||||
throw;
|
||||
|
@ -413,7 +414,7 @@ void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
|
|||
auto env_end = std::sregex_iterator{};
|
||||
for (auto i = std::sregex_iterator{envString.begin(), envString.end(), storePathRegex}; i != env_end; ++i)
|
||||
unchecked[i->str()].emplace(envFile);
|
||||
} catch (SysError & e) {
|
||||
} catch (SystemError & e) {
|
||||
if (errno == ENOENT || errno == EACCES || errno == ESRCH)
|
||||
continue;
|
||||
throw;
|
||||
|
@ -509,6 +510,11 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
|
|||
auto fdGCLock = openGCLock();
|
||||
FdLock gcLock(fdGCLock.get(), ltWrite, true, "waiting for the big garbage collector lock...");
|
||||
|
||||
/* Synchronisation point to test ENOENT handling in
|
||||
addTempRoot(), see tests/gc-non-blocking.sh. */
|
||||
if (auto p = getEnv("_NIX_TEST_GC_SYNC_1"))
|
||||
readFile(*p);
|
||||
|
||||
/* Start the server for receiving new roots. */
|
||||
auto socketPath = stateDir.get() + gcSocketPath;
|
||||
createDirs(dirOf(socketPath));
|
||||
|
@ -632,6 +638,10 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
|
|||
roots.insert(root.first);
|
||||
}
|
||||
|
||||
/* Synchronisation point for testing, see tests/functional/gc-non-blocking.sh. */
|
||||
if (auto p = getEnv("_NIX_TEST_GC_SYNC_2"))
|
||||
readFile(*p);
|
||||
|
||||
/* Helper function that deletes a path from the store and throws
|
||||
GCLimitReached if we've deleted enough garbage. */
|
||||
auto deleteFromStore = [&](std::string_view baseName)
|
||||
|
@ -779,10 +789,6 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
|
|||
}
|
||||
};
|
||||
|
||||
/* Synchronisation point for testing, see tests/functional/gc-concurrent.sh. */
|
||||
if (auto p = getEnv("_NIX_TEST_GC_SYNC"))
|
||||
readFile(*p);
|
||||
|
||||
/* Either delete all garbage paths, or just the specified
|
||||
paths (for gcDeleteSpecific). */
|
||||
if (options.action == GCOptions::gcDeleteSpecific) {
|
||||
|
|
|
@ -15,8 +15,6 @@
|
|||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
#include <sodium/core.h>
|
||||
|
||||
#ifdef __GLIBC__
|
||||
# include <gnu/lib-names.h>
|
||||
# include <nss.h>
|
||||
|
@ -120,7 +118,7 @@ void loadConfFile()
|
|||
try {
|
||||
std::string contents = readFile(path);
|
||||
globalConfig.applyConfig(contents, path);
|
||||
} catch (SysError &) { }
|
||||
} catch (SystemError &) { }
|
||||
};
|
||||
|
||||
applyConfigFile(settings.nixConfDir + "/nix.conf");
|
||||
|
@ -409,9 +407,6 @@ void initLibStore() {
|
|||
|
||||
initLibUtil();
|
||||
|
||||
if (sodium_init() == -1)
|
||||
throw Error("could not initialise libsodium");
|
||||
|
||||
loadConfFile();
|
||||
|
||||
preloadNSS();
|
||||
|
|
|
@ -144,20 +144,25 @@ public:
|
|||
*/
|
||||
bool verboseBuild = true;
|
||||
|
||||
Setting<size_t> logLines{this, 10, "log-lines",
|
||||
Setting<size_t> logLines{this, 25, "log-lines",
|
||||
"The number of lines of the tail of "
|
||||
"the log to show if a build fails."};
|
||||
|
||||
MaxBuildJobsSetting maxBuildJobs{
|
||||
this, 1, "max-jobs",
|
||||
R"(
|
||||
This option defines the maximum number of jobs that Nix will try to
|
||||
build in parallel. The default is `1`. The special value `auto`
|
||||
causes Nix to use the number of CPUs in your system. `0` is useful
|
||||
when using remote builders to prevent any local builds (except for
|
||||
`preferLocalBuild` derivation attribute which executes locally
|
||||
regardless). It can be overridden using the `--max-jobs` (`-j`)
|
||||
command line switch.
|
||||
Maximum number of jobs that Nix will try to build locally in parallel.
|
||||
|
||||
The special value `auto` causes Nix to use the number of CPUs in your system.
|
||||
Use `0` to disable local builds and directly use the remote machines specified in [`builders`](#conf-builders).
|
||||
This will not affect derivations that have [`preferLocalBuild = true`](@docroot@/language/advanced-attributes.md#adv-attr-preferLocalBuild), which are always built locally.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> The number of CPU cores to use for each build job is independently determined by the [`cores`](#conf-cores) setting.
|
||||
|
||||
<!-- TODO(@fricklerhandwerk): would be good to have those shorthands for common options as part of the specification -->
|
||||
The setting can be overridden using the `--max-jobs` (`-j`) command line switch.
|
||||
)",
|
||||
{"build-max-jobs"}};
|
||||
|
||||
|
@ -214,7 +219,11 @@ public:
|
|||
In general, you do not have to modify this setting.
|
||||
While you can force Nix to run a Darwin-specific `builder` executable on a Linux machine, the result would obviously be wrong.
|
||||
|
||||
This value is available in the Nix language as [`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem).
|
||||
This value is available in the Nix language as
|
||||
[`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem)
|
||||
if the
|
||||
[`eval-system`](#conf-eval-system)
|
||||
configuration option is set as the empty string.
|
||||
)"};
|
||||
|
||||
Setting<time_t> maxSilentTime{
|
||||
|
@ -268,21 +277,16 @@ public:
|
|||
Setting<bool> alwaysAllowSubstitutes{
|
||||
this, false, "always-allow-substitutes",
|
||||
R"(
|
||||
If set to `true`, Nix will ignore the `allowSubstitutes` attribute in
|
||||
derivations and always attempt to use available substituters.
|
||||
For more information on `allowSubstitutes`, see [the manual chapter on advanced attributes](../language/advanced-attributes.md).
|
||||
If set to `true`, Nix will ignore the [`allowSubstitutes`](@docroot@/language/advanced-attributes.md) attribute in derivations and always attempt to use [available substituters](#conf-substituters).
|
||||
)"};
|
||||
|
||||
Setting<bool> buildersUseSubstitutes{
|
||||
this, false, "builders-use-substitutes",
|
||||
R"(
|
||||
If set to `true`, Nix will instruct remote build machines to use
|
||||
their own binary substitutes if available. In practical terms, this
|
||||
means that remote hosts will fetch as many build dependencies as
|
||||
possible from their own substitutes (e.g, from `cache.nixos.org`),
|
||||
instead of waiting for this host to upload them all. This can
|
||||
drastically reduce build times if the network connection between
|
||||
this computer and the remote build host is slow.
|
||||
If set to `true`, Nix will instruct [remote build machines](#conf-builders) to use their own [`substituters`](#conf-substituters) if available.
|
||||
|
||||
It means that remote build hosts will fetch as many dependencies as possible from their own substituters (e.g, from `cache.nixos.org`) instead of waiting for the local machine to upload them all.
|
||||
This can drastically reduce build times if the network connection between the local machine and the remote build host is slow.
|
||||
)"};
|
||||
|
||||
Setting<off_t> reservedSize{this, 8 * 1024 * 1024, "gc-reserved-space",
|
||||
|
@ -632,11 +636,11 @@ public:
|
|||
|
||||
At least one of the following condition must be met
|
||||
for Nix to accept copying a store object from another
|
||||
Nix store (such as a substituter):
|
||||
Nix store (such as a [substituter](#conf-substituters)):
|
||||
|
||||
- the store object has been signed using a key in the trusted keys list
|
||||
- the [`require-sigs`](#conf-require-sigs) option has been set to `false`
|
||||
- the store object is [output-addressed](@docroot@/glossary.md#gloss-output-addressed-store-object)
|
||||
- the store object is [content-addressed](@docroot@/glossary.md#gloss-content-addressed-store-object)
|
||||
)",
|
||||
{"binary-cache-public-keys"}};
|
||||
|
||||
|
@ -947,7 +951,9 @@ public:
|
|||
may be useful in certain scenarios (e.g. to spin up containers or
|
||||
set up userspace network interfaces in tests).
|
||||
)"};
|
||||
#endif
|
||||
|
||||
#if HAVE_ACL_SUPPORT
|
||||
Setting<StringSet> ignoredAcls{
|
||||
this, {"security.selinux", "system.nfs4_acl", "security.csm"}, "ignored-acls",
|
||||
R"(
|
||||
|
|
31
src/libstore/keys.cc
Normal file
31
src/libstore/keys.cc
Normal file
|
@ -0,0 +1,31 @@
|
|||
#include "file-system.hh"
|
||||
#include "globals.hh"
|
||||
#include "keys.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
PublicKeys getDefaultPublicKeys()
|
||||
{
|
||||
PublicKeys publicKeys;
|
||||
|
||||
// FIXME: filter duplicates
|
||||
|
||||
for (auto s : settings.trustedPublicKeys.get()) {
|
||||
PublicKey key(s);
|
||||
publicKeys.emplace(key.name, key);
|
||||
}
|
||||
|
||||
for (auto secretKeyFile : settings.secretKeyFiles.get()) {
|
||||
try {
|
||||
SecretKey secretKey(readFile(secretKeyFile));
|
||||
publicKeys.emplace(secretKey.name, secretKey.toPublicKey());
|
||||
} catch (SystemError & e) {
|
||||
/* Ignore unreadable key files. That's normal in a
|
||||
multi-user installation. */
|
||||
}
|
||||
}
|
||||
|
||||
return publicKeys;
|
||||
}
|
||||
|
||||
}
|
10
src/libstore/keys.hh
Normal file
10
src/libstore/keys.hh
Normal file
|
@ -0,0 +1,10 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "signature/local-keys.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
PublicKeys getDefaultPublicKeys();
|
||||
|
||||
}
|
|
@ -22,45 +22,10 @@ std::string LegacySSHStoreConfig::doc()
|
|||
}
|
||||
|
||||
|
||||
struct LegacySSHStore::Connection
|
||||
struct LegacySSHStore::Connection : public ServeProto::BasicClientConnection
|
||||
{
|
||||
std::unique_ptr<SSHMaster::Connection> sshConn;
|
||||
FdSink to;
|
||||
FdSource from;
|
||||
ServeProto::Version remoteVersion;
|
||||
bool good = true;
|
||||
|
||||
/**
|
||||
* Coercion to `ServeProto::ReadConn`. This makes it easy to use the
|
||||
* factored out serve protocol searlizers with a
|
||||
* `LegacySSHStore::Connection`.
|
||||
*
|
||||
* The serve protocol connection types are unidirectional, unlike
|
||||
* this type.
|
||||
*/
|
||||
operator ServeProto::ReadConn ()
|
||||
{
|
||||
return ServeProto::ReadConn {
|
||||
.from = from,
|
||||
.version = remoteVersion,
|
||||
};
|
||||
}
|
||||
|
||||
/*
|
||||
* Coercion to `ServeProto::WriteConn`. This makes it easy to use the
|
||||
* factored out serve protocol searlizers with a
|
||||
* `LegacySSHStore::Connection`.
|
||||
*
|
||||
* The serve protocol connection types are unidirectional, unlike
|
||||
* this type.
|
||||
*/
|
||||
operator ServeProto::WriteConn ()
|
||||
{
|
||||
return ServeProto::WriteConn {
|
||||
.to = to,
|
||||
.version = remoteVersion,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
@ -90,34 +55,31 @@ LegacySSHStore::LegacySSHStore(const std::string & scheme, const std::string & h
|
|||
ref<LegacySSHStore::Connection> LegacySSHStore::openConnection()
|
||||
{
|
||||
auto conn = make_ref<Connection>();
|
||||
conn->sshConn = master.startCommand(
|
||||
fmt("%s --serve --write", remoteProgram)
|
||||
+ (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get())));
|
||||
Strings command = remoteProgram.get();
|
||||
command.push_back("--serve");
|
||||
command.push_back("--write");
|
||||
if (remoteStore.get() != "") {
|
||||
command.push_back("--store");
|
||||
command.push_back(remoteStore.get());
|
||||
}
|
||||
conn->sshConn = master.startCommand(std::move(command));
|
||||
conn->to = FdSink(conn->sshConn->in.get());
|
||||
conn->from = FdSource(conn->sshConn->out.get());
|
||||
|
||||
StringSink saved;
|
||||
TeeSource tee(conn->from, saved);
|
||||
try {
|
||||
conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION;
|
||||
conn->to.flush();
|
||||
|
||||
StringSink saved;
|
||||
try {
|
||||
TeeSource tee(conn->from, saved);
|
||||
unsigned int magic = readInt(tee);
|
||||
if (magic != SERVE_MAGIC_2)
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
|
||||
} catch (SerialisationError & e) {
|
||||
/* In case the other side is waiting for our input,
|
||||
close it. */
|
||||
conn->sshConn->in.close();
|
||||
auto msg = conn->from.drain();
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
|
||||
host, chomp(saved.s + msg));
|
||||
conn->remoteVersion = ServeProto::BasicClientConnection::handshake(
|
||||
conn->to, tee, SERVE_PROTOCOL_VERSION, host);
|
||||
} catch (SerialisationError & e) {
|
||||
// in.close(): Don't let the remote block on us not writing.
|
||||
conn->sshConn->in.close();
|
||||
{
|
||||
NullSink nullSink;
|
||||
conn->from.drainInto(nullSink);
|
||||
}
|
||||
conn->remoteVersion = readInt(conn->from);
|
||||
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
|
||||
throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host);
|
||||
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
|
||||
host, chomp(saved.s));
|
||||
} catch (EndOfFile & e) {
|
||||
throw Error("cannot connect to '%1%'", host);
|
||||
}
|
||||
|
@ -232,16 +194,16 @@ void LegacySSHStore::narFromPath(const StorePath & path, Sink & sink)
|
|||
}
|
||||
|
||||
|
||||
void LegacySSHStore::putBuildSettings(Connection & conn)
|
||||
static ServeProto::BuildOptions buildSettings()
|
||||
{
|
||||
ServeProto::write(*this, conn, ServeProto::BuildOptions {
|
||||
return {
|
||||
.maxSilentTime = settings.maxSilentTime,
|
||||
.buildTimeout = settings.buildTimeout,
|
||||
.maxLogSize = settings.maxLogSize,
|
||||
.nrRepeats = 0, // buildRepeat hasn't worked for ages anyway
|
||||
.enforceDeterminism = 0,
|
||||
.keepFailed = settings.keepFailed,
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
@ -250,14 +212,7 @@ BuildResult LegacySSHStore::buildDerivation(const StorePath & drvPath, const Bas
|
|||
{
|
||||
auto conn(connections->get());
|
||||
|
||||
conn->to
|
||||
<< ServeProto::Command::BuildDerivation
|
||||
<< printStorePath(drvPath);
|
||||
writeDerivation(conn->to, *this, drv);
|
||||
|
||||
putBuildSettings(*conn);
|
||||
|
||||
conn->to.flush();
|
||||
conn->putBuildDerivationRequest(*this, drvPath, drv, buildSettings());
|
||||
|
||||
return ServeProto::Serialise<BuildResult>::read(*this, *conn);
|
||||
}
|
||||
|
@ -288,7 +243,7 @@ void LegacySSHStore::buildPaths(const std::vector<DerivedPath> & drvPaths, Build
|
|||
}
|
||||
conn->to << ss;
|
||||
|
||||
putBuildSettings(*conn);
|
||||
ServeProto::write(*this, *conn, buildSettings());
|
||||
|
||||
conn->to.flush();
|
||||
|
||||
|
@ -328,15 +283,8 @@ StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths,
|
|||
SubstituteFlag maybeSubstitute)
|
||||
{
|
||||
auto conn(connections->get());
|
||||
|
||||
conn->to
|
||||
<< ServeProto::Command::QueryValidPaths
|
||||
<< false // lock
|
||||
<< maybeSubstitute;
|
||||
ServeProto::write(*this, *conn, paths);
|
||||
conn->to.flush();
|
||||
|
||||
return ServeProto::Serialise<StorePathSet>::read(*this, *conn);
|
||||
return conn->queryValidPaths(*this,
|
||||
false, paths, maybeSubstitute);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig
|
|||
{
|
||||
using CommonSSHStoreConfig::CommonSSHStoreConfig;
|
||||
|
||||
const Setting<Path> remoteProgram{this, "nix-store", "remote-program",
|
||||
const Setting<Strings> remoteProgram{this, {"nix-store"}, "remote-program",
|
||||
"Path to the `nix-store` executable on the remote machine."};
|
||||
|
||||
const Setting<int> maxConnections{this, 1, "max-connections",
|
||||
|
@ -59,25 +59,24 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
{ unsupported("queryPathFromHashPart"); }
|
||||
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
std::string_view name,
|
||||
SourceAccessor & accessor,
|
||||
const CanonPath & srcPath,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair) override
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
StorePath addTextToStore(
|
||||
virtual StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override
|
||||
{ unsupported("addTextToStore"); }
|
||||
|
||||
private:
|
||||
|
||||
void putBuildSettings(Connection & conn);
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = StorePathSet(),
|
||||
RepairFlag repair = NoRepair) override
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
public:
|
||||
|
||||
|
|
|
@ -13,11 +13,15 @@
|
|||
#include "compression.hh"
|
||||
#include "signals.hh"
|
||||
#include "posix-fs-canonicalise.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
#include "keys.hh"
|
||||
|
||||
#include <iostream>
|
||||
#include <algorithm>
|
||||
#include <cstring>
|
||||
|
||||
#include <memory>
|
||||
#include <new>
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
#include <sys/select.h>
|
||||
|
@ -272,7 +276,7 @@ LocalStore::LocalStore(const Params & params)
|
|||
[[gnu::unused]] auto res2 = ftruncate(fd.get(), settings.reservedSize);
|
||||
}
|
||||
}
|
||||
} catch (SysError & e) { /* don't care about errors */
|
||||
} catch (SystemError & e) { /* don't care about errors */
|
||||
}
|
||||
|
||||
/* Acquire the big fat lock in shared mode to make sure that no
|
||||
|
@ -1050,8 +1054,12 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
bool narRead = false;
|
||||
Finally cleanup = [&]() {
|
||||
if (!narRead) {
|
||||
NullParseSink sink;
|
||||
parseDump(sink, source);
|
||||
NullFileSystemObjectSink sink;
|
||||
try {
|
||||
parseDump(sink, source);
|
||||
} catch (...) {
|
||||
ignoreException();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1094,11 +1102,22 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
|
||||
if (info.ca) {
|
||||
auto & specified = *info.ca;
|
||||
auto actualHash = hashCAPath(
|
||||
specified.method,
|
||||
specified.hash.algo,
|
||||
info.path
|
||||
);
|
||||
auto actualHash = ({
|
||||
HashModuloSink caSink {
|
||||
specified.hash.algo,
|
||||
std::string { info.path.hashPart() },
|
||||
};
|
||||
PosixSourceAccessor accessor;
|
||||
dumpPath(
|
||||
*getFSAccessor(false),
|
||||
CanonPath { printStorePath(info.path) },
|
||||
caSink,
|
||||
specified.method.getFileIngestionMethod());
|
||||
ContentAddress {
|
||||
.method = specified.method,
|
||||
.hash = caSink.finish().first,
|
||||
};
|
||||
});
|
||||
if (specified.hash != actualHash.hash) {
|
||||
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
|
||||
printStorePath(info.path),
|
||||
|
@ -1121,8 +1140,13 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
}
|
||||
|
||||
|
||||
StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name,
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||
StorePath LocalStore::addToStoreFromDump(
|
||||
Source & source0,
|
||||
std::string_view name,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair)
|
||||
{
|
||||
/* For computing the store path. */
|
||||
auto hashSink = std::make_unique<HashSink>(hashAlgo);
|
||||
|
@ -1136,7 +1160,11 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
|
|||
path. */
|
||||
bool inMemory = false;
|
||||
|
||||
std::string dump;
|
||||
struct Free {
|
||||
void operator()(void* v) { free(v); }
|
||||
};
|
||||
std::unique_ptr<char, Free> dumpBuffer(nullptr);
|
||||
std::string_view dump;
|
||||
|
||||
/* Fill out buffer, and decide whether we are working strictly in
|
||||
memory based on whether we break out because the buffer is full
|
||||
|
@ -1145,13 +1173,18 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
|
|||
auto oldSize = dump.size();
|
||||
constexpr size_t chunkSize = 65536;
|
||||
auto want = std::min(chunkSize, settings.narBufferSize - oldSize);
|
||||
dump.resize(oldSize + want);
|
||||
if (auto tmp = realloc(dumpBuffer.get(), oldSize + want)) {
|
||||
dumpBuffer.release();
|
||||
dumpBuffer.reset((char*) tmp);
|
||||
} else {
|
||||
throw std::bad_alloc();
|
||||
}
|
||||
auto got = 0;
|
||||
Finally cleanup([&]() {
|
||||
dump.resize(oldSize + got);
|
||||
dump = {dumpBuffer.get(), dump.size() + got};
|
||||
});
|
||||
try {
|
||||
got = source.read(dump.data() + oldSize, want);
|
||||
got = source.read(dumpBuffer.get() + oldSize, want);
|
||||
} catch (EndOfFile &) {
|
||||
inMemory = true;
|
||||
break;
|
||||
|
@ -1172,25 +1205,22 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
|
|||
delTempDir = std::make_unique<AutoDelete>(tempDir);
|
||||
tempPath = tempDir + "/x";
|
||||
|
||||
if (method == FileIngestionMethod::Recursive)
|
||||
restorePath(tempPath, bothSource);
|
||||
else
|
||||
writeFile(tempPath, bothSource);
|
||||
restorePath(tempPath, bothSource, method.getFileIngestionMethod());
|
||||
|
||||
dump.clear();
|
||||
dumpBuffer.reset();
|
||||
dump = {};
|
||||
}
|
||||
|
||||
auto [hash, size] = hashSink->finish();
|
||||
|
||||
ContentAddressWithReferences desc = FixedOutputInfo {
|
||||
.method = method,
|
||||
.hash = hash,
|
||||
.references = {
|
||||
auto desc = ContentAddressWithReferences::fromParts(
|
||||
method,
|
||||
hash,
|
||||
{
|
||||
.others = references,
|
||||
// caller is not capable of creating a self-reference, because this is content-addressed without modulus
|
||||
.self = false,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
auto dstPath = makeFixedOutputPathFromCA(name, desc);
|
||||
|
||||
|
@ -1213,11 +1243,8 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
|
|||
|
||||
if (inMemory) {
|
||||
StringSource dumpSource { dump };
|
||||
/* Restore from the NAR in memory. */
|
||||
if (method == FileIngestionMethod::Recursive)
|
||||
restorePath(realPath, dumpSource);
|
||||
else
|
||||
writeFile(realPath, dumpSource);
|
||||
/* Restore from the buffer in memory. */
|
||||
restorePath(realPath, dumpSource, method.getFileIngestionMethod());
|
||||
} else {
|
||||
/* Move the temporary path we restored above. */
|
||||
moveFile(tempPath, realPath);
|
||||
|
@ -1253,58 +1280,6 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
|
|||
}
|
||||
|
||||
|
||||
StorePath LocalStore::addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references, RepairFlag repair)
|
||||
{
|
||||
auto hash = hashString(HashAlgorithm::SHA256, s);
|
||||
auto dstPath = makeTextPath(name, TextInfo {
|
||||
.hash = hash,
|
||||
.references = references,
|
||||
});
|
||||
|
||||
addTempRoot(dstPath);
|
||||
|
||||
if (repair || !isValidPath(dstPath)) {
|
||||
|
||||
auto realPath = Store::toRealPath(dstPath);
|
||||
|
||||
PathLocks outputLock({realPath});
|
||||
|
||||
if (repair || !isValidPath(dstPath)) {
|
||||
|
||||
deletePath(realPath);
|
||||
|
||||
autoGC();
|
||||
|
||||
writeFile(realPath, s);
|
||||
|
||||
canonicalisePathMetaData(realPath, {});
|
||||
|
||||
StringSink sink;
|
||||
dumpString(s, sink);
|
||||
auto narHash = hashString(HashAlgorithm::SHA256, sink.s);
|
||||
|
||||
optimisePath(realPath, repair);
|
||||
|
||||
ValidPathInfo info { dstPath, narHash };
|
||||
info.narSize = sink.s.size();
|
||||
info.references = references;
|
||||
info.ca = {
|
||||
.method = TextIngestionMethod {},
|
||||
.hash = hash,
|
||||
};
|
||||
registerValidPath(info);
|
||||
}
|
||||
|
||||
outputLock.setDeletion(true);
|
||||
}
|
||||
|
||||
return dstPath;
|
||||
}
|
||||
|
||||
|
||||
/* Create a temporary directory in the store that won't be
|
||||
garbage-collected until the returned FD is closed. */
|
||||
std::pair<Path, AutoCloseFD> LocalStore::createTempDirInStore()
|
||||
|
@ -1367,7 +1342,10 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
for (auto & link : readDirectory(linksDir)) {
|
||||
printMsg(lvlTalkative, "checking contents of '%s'", link.name);
|
||||
Path linkPath = linksDir + "/" + link.name;
|
||||
std::string hash = hashPath(HashAlgorithm::SHA256, linkPath).first.to_string(HashFormat::Nix32, false);
|
||||
PosixSourceAccessor accessor;
|
||||
std::string hash = hashPath(
|
||||
accessor, CanonPath { linkPath },
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first.to_string(HashFormat::Nix32, false);
|
||||
if (hash != link.name) {
|
||||
printError("link '%s' was modified! expected hash '%s', got '%s'",
|
||||
linkPath, link.name, hash);
|
||||
|
@ -1623,7 +1601,8 @@ void LocalStore::signRealisation(Realisation & realisation)
|
|||
|
||||
for (auto & secretKeyFile : secretKeyFiles.get()) {
|
||||
SecretKey secretKey(readFile(secretKeyFile));
|
||||
realisation.sign(secretKey);
|
||||
LocalSigner signer(std::move(secretKey));
|
||||
realisation.sign(signer);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1635,7 +1614,8 @@ void LocalStore::signPathInfo(ValidPathInfo & info)
|
|||
|
||||
for (auto & secretKeyFile : secretKeyFiles.get()) {
|
||||
SecretKey secretKey(readFile(secretKeyFile));
|
||||
info.sign(*this, secretKey);
|
||||
LocalSigner signer(std::move(secretKey));
|
||||
info.sign(*this, signer);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1714,42 +1694,6 @@ void LocalStore::queryRealisationUncached(const DrvOutput & id,
|
|||
}
|
||||
}
|
||||
|
||||
ContentAddress LocalStore::hashCAPath(
|
||||
const ContentAddressMethod & method, const HashAlgorithm & hashAlgo,
|
||||
const StorePath & path)
|
||||
{
|
||||
return hashCAPath(method, hashAlgo, Store::toRealPath(path), path.hashPart());
|
||||
}
|
||||
|
||||
ContentAddress LocalStore::hashCAPath(
|
||||
const ContentAddressMethod & method,
|
||||
const HashAlgorithm & hashAlgo,
|
||||
const Path & path,
|
||||
const std::string_view pathHash
|
||||
)
|
||||
{
|
||||
HashModuloSink caSink ( hashAlgo, std::string(pathHash) );
|
||||
std::visit(overloaded {
|
||||
[&](const TextIngestionMethod &) {
|
||||
readFile(path, caSink);
|
||||
},
|
||||
[&](const FileIngestionMethod & m2) {
|
||||
switch (m2) {
|
||||
case FileIngestionMethod::Recursive:
|
||||
dumpPath(path, caSink);
|
||||
break;
|
||||
case FileIngestionMethod::Flat:
|
||||
readFile(path, caSink);
|
||||
break;
|
||||
}
|
||||
},
|
||||
}, method.raw);
|
||||
return ContentAddress {
|
||||
.method = method,
|
||||
.hash = caSink.finish().first,
|
||||
};
|
||||
}
|
||||
|
||||
void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log)
|
||||
{
|
||||
assert(drvPath.isDerivation());
|
||||
|
|
|
@ -177,12 +177,11 @@ public:
|
|||
void addToStore(const ValidPathInfo & info, Source & source,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||
|
||||
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references) override;
|
||||
|
||||
StorePath addTextToStore(
|
||||
StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override;
|
||||
|
||||
|
@ -396,19 +395,6 @@ private:
|
|||
void signPathInfo(ValidPathInfo & info);
|
||||
void signRealisation(Realisation &);
|
||||
|
||||
// XXX: Make a generic `Store` method
|
||||
ContentAddress hashCAPath(
|
||||
const ContentAddressMethod & method,
|
||||
const HashAlgorithm & hashAlgo,
|
||||
const StorePath & path);
|
||||
|
||||
ContentAddress hashCAPath(
|
||||
const ContentAddressMethod & method,
|
||||
const HashAlgorithm & hashAlgo,
|
||||
const Path & path,
|
||||
const std::string_view pathHash
|
||||
);
|
||||
|
||||
void addBuildLog(const StorePath & drvPath, std::string_view log) override;
|
||||
|
||||
friend struct LocalDerivationGoal;
|
||||
|
|
|
@ -8,7 +8,7 @@ libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc)
|
|||
|
||||
libstore_LIBS = libutil
|
||||
|
||||
libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
|
||||
libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(THREAD_LDFLAGS)
|
||||
ifdef HOST_LINUX
|
||||
libstore_LDFLAGS += -ldl
|
||||
endif
|
||||
|
@ -16,15 +16,15 @@ endif
|
|||
$(foreach file,$(libstore_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/sandbox)))
|
||||
|
||||
ifeq ($(ENABLE_S3), 1)
|
||||
libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core -laws-crt-cpp
|
||||
libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core -laws-crt-cpp
|
||||
endif
|
||||
|
||||
ifdef HOST_SOLARIS
|
||||
libstore_LDFLAGS += -lsocket
|
||||
libstore_LDFLAGS += -lsocket
|
||||
endif
|
||||
|
||||
ifeq ($(HAVE_SECCOMP), 1)
|
||||
libstore_LDFLAGS += $(LIBSECCOMP_LIBS)
|
||||
libstore_LDFLAGS += $(LIBSECCOMP_LIBS)
|
||||
endif
|
||||
|
||||
libstore_CXXFLAGS += \
|
||||
|
@ -48,9 +48,9 @@ $(d)/embedded-sandbox-shell.gen.hh: $(sandbox_shell)
|
|||
$(trace-gen) hexdump -v -e '1/1 "0x%x," "\n"' < $< > $@.tmp
|
||||
@mv $@.tmp $@
|
||||
else
|
||||
ifneq ($(sandbox_shell),)
|
||||
libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\""
|
||||
endif
|
||||
ifneq ($(sandbox_shell),)
|
||||
libstore_CXXFLAGS += -DSANDBOX_SHELL="\"$(sandbox_shell)\""
|
||||
endif
|
||||
endif
|
||||
|
||||
$(d)/local-store.cc: $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
|
||||
|
|
|
@ -32,11 +32,19 @@ Machine::Machine(decltype(storeUri) storeUri,
|
|||
systemTypes(systemTypes),
|
||||
sshKey(sshKey),
|
||||
maxJobs(maxJobs),
|
||||
speedFactor(std::max(1U, speedFactor)),
|
||||
speedFactor(speedFactor == 0.0f ? 1.0f : std::move(speedFactor)),
|
||||
supportedFeatures(supportedFeatures),
|
||||
mandatoryFeatures(mandatoryFeatures),
|
||||
sshPublicHostKey(sshPublicHostKey)
|
||||
{}
|
||||
{
|
||||
if (speedFactor < 0.0)
|
||||
throw UsageError("speed factor must be >= 0");
|
||||
}
|
||||
|
||||
bool Machine::systemSupported(const std::string & system) const
|
||||
{
|
||||
return system == "builtin" || (systemTypes.count(system) > 0);
|
||||
}
|
||||
|
||||
bool Machine::allSupported(const std::set<std::string> & features) const
|
||||
{
|
||||
|
@ -130,6 +138,14 @@ static Machine parseBuilderLine(const std::string & line)
|
|||
return result.value();
|
||||
};
|
||||
|
||||
auto parseFloatField = [&](size_t fieldIndex) {
|
||||
const auto result = string2Int<float>(tokens[fieldIndex]);
|
||||
if (!result) {
|
||||
throw FormatError("bad machine specification: failed to convert column #%lu in a row: '%s' to 'float'", fieldIndex, line);
|
||||
}
|
||||
return result.value();
|
||||
};
|
||||
|
||||
auto ensureBase64 = [&](size_t fieldIndex) {
|
||||
const auto & str = tokens[fieldIndex];
|
||||
try {
|
||||
|
@ -145,10 +161,10 @@ static Machine parseBuilderLine(const std::string & line)
|
|||
|
||||
return {
|
||||
tokens[0],
|
||||
isSet(1) ? tokenizeString<std::vector<std::string>>(tokens[1], ",") : std::vector<std::string>{settings.thisSystem},
|
||||
isSet(1) ? tokenizeString<std::set<std::string>>(tokens[1], ",") : std::set<std::string>{settings.thisSystem},
|
||||
isSet(2) ? tokens[2] : "",
|
||||
isSet(3) ? parseUnsignedIntField(3) : 1U,
|
||||
isSet(4) ? parseUnsignedIntField(4) : 1U,
|
||||
isSet(4) ? parseFloatField(4) : 1.0f,
|
||||
isSet(5) ? tokenizeString<std::set<std::string>>(tokens[5], ",") : std::set<std::string>{},
|
||||
isSet(6) ? tokenizeString<std::set<std::string>>(tokens[6], ",") : std::set<std::string>{},
|
||||
isSet(7) ? ensureBase64(7) : ""
|
||||
|
|
|
@ -10,17 +10,30 @@ class Store;
|
|||
struct Machine {
|
||||
|
||||
const std::string storeUri;
|
||||
const std::vector<std::string> systemTypes;
|
||||
const std::set<std::string> systemTypes;
|
||||
const std::string sshKey;
|
||||
const unsigned int maxJobs;
|
||||
const unsigned int speedFactor;
|
||||
const float speedFactor;
|
||||
const std::set<std::string> supportedFeatures;
|
||||
const std::set<std::string> mandatoryFeatures;
|
||||
const std::string sshPublicHostKey;
|
||||
bool enabled = true;
|
||||
|
||||
/**
|
||||
* @return Whether `system` is either `"builtin"` or in
|
||||
* `systemTypes`.
|
||||
*/
|
||||
bool systemSupported(const std::string & system) const;
|
||||
|
||||
/**
|
||||
* @return Whether `features` is a subset of the union of `supportedFeatures` and
|
||||
* `mandatoryFeatures`
|
||||
*/
|
||||
bool allSupported(const std::set<std::string> & features) const;
|
||||
|
||||
/**
|
||||
* @return @Whether `mandatoryFeatures` is a subset of `features`
|
||||
*/
|
||||
bool mandatoryMet(const std::set<std::string> & features) const;
|
||||
|
||||
Machine(decltype(storeUri) storeUri,
|
||||
|
|
|
@ -331,8 +331,11 @@ std::map<DrvOutput, StorePath> drvOutputReferences(
|
|||
std::map<DrvOutput, StorePath> drvOutputReferences(
|
||||
Store & store,
|
||||
const Derivation & drv,
|
||||
const StorePath & outputPath)
|
||||
const StorePath & outputPath,
|
||||
Store * evalStore_)
|
||||
{
|
||||
auto & evalStore = evalStore_ ? *evalStore_ : store;
|
||||
|
||||
std::set<Realisation> inputRealisations;
|
||||
|
||||
std::function<void(const StorePath &, const DerivedPathMap<StringSet>::ChildNode &)> accumRealisations;
|
||||
|
@ -340,7 +343,7 @@ std::map<DrvOutput, StorePath> drvOutputReferences(
|
|||
accumRealisations = [&](const StorePath & inputDrv, const DerivedPathMap<StringSet>::ChildNode & inputNode) {
|
||||
if (!inputNode.value.empty()) {
|
||||
auto outputHashes =
|
||||
staticOutputHashes(store, store.readDerivation(inputDrv));
|
||||
staticOutputHashes(evalStore, evalStore.readDerivation(inputDrv));
|
||||
for (const auto & outputName : inputNode.value) {
|
||||
auto outputHash = get(outputHashes, outputName);
|
||||
if (!outputHash)
|
||||
|
@ -362,7 +365,7 @@ std::map<DrvOutput, StorePath> drvOutputReferences(
|
|||
SingleDerivedPath next = SingleDerivedPath::Built { d, outputName };
|
||||
accumRealisations(
|
||||
// TODO deep resolutions for dynamic derivations, issue #8947, would go here.
|
||||
resolveDerivedPath(store, next),
|
||||
resolveDerivedPath(store, next, evalStore_),
|
||||
childNode);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,35 @@ struct NarMember
|
|||
std::map<std::string, NarMember> children;
|
||||
};
|
||||
|
||||
struct NarMemberConstructor : CreateRegularFileSink
|
||||
{
|
||||
private:
|
||||
|
||||
NarMember & narMember;
|
||||
|
||||
uint64_t & pos;
|
||||
|
||||
public:
|
||||
|
||||
NarMemberConstructor(NarMember & nm, uint64_t & pos)
|
||||
: narMember(nm), pos(pos)
|
||||
{ }
|
||||
|
||||
void isExecutable() override
|
||||
{
|
||||
narMember.stat.isExecutable = true;
|
||||
}
|
||||
|
||||
void preallocateContents(uint64_t size) override
|
||||
{
|
||||
narMember.stat.fileSize = size;
|
||||
narMember.stat.narOffset = pos;
|
||||
}
|
||||
|
||||
void operator () (std::string_view data) override
|
||||
{ }
|
||||
};
|
||||
|
||||
struct NarAccessor : public SourceAccessor
|
||||
{
|
||||
std::optional<const std::string> nar;
|
||||
|
@ -27,7 +56,7 @@ struct NarAccessor : public SourceAccessor
|
|||
|
||||
NarMember root;
|
||||
|
||||
struct NarIndexer : ParseSink, Source
|
||||
struct NarIndexer : FileSystemObjectSink, Source
|
||||
{
|
||||
NarAccessor & acc;
|
||||
Source & source;
|
||||
|
@ -42,7 +71,7 @@ struct NarAccessor : public SourceAccessor
|
|||
: acc(acc), source(source)
|
||||
{ }
|
||||
|
||||
void createMember(const Path & path, NarMember member)
|
||||
NarMember & createMember(const Path & path, NarMember member)
|
||||
{
|
||||
size_t level = std::count(path.begin(), path.end(), '/');
|
||||
while (parents.size() > level) parents.pop();
|
||||
|
@ -50,11 +79,14 @@ struct NarAccessor : public SourceAccessor
|
|||
if (parents.empty()) {
|
||||
acc.root = std::move(member);
|
||||
parents.push(&acc.root);
|
||||
return acc.root;
|
||||
} else {
|
||||
if (parents.top()->stat.type != Type::tDirectory)
|
||||
throw Error("NAR file missing parent directory of path '%s'", path);
|
||||
auto result = parents.top()->children.emplace(baseNameOf(path), std::move(member));
|
||||
parents.push(&result.first->second);
|
||||
auto & ref = result.first->second;
|
||||
parents.push(&ref);
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -68,34 +100,18 @@ struct NarAccessor : public SourceAccessor
|
|||
} });
|
||||
}
|
||||
|
||||
void createRegularFile(const Path & path) override
|
||||
void createRegularFile(const Path & path, std::function<void(CreateRegularFileSink &)> func) override
|
||||
{
|
||||
createMember(path, NarMember{ .stat = {
|
||||
auto & nm = createMember(path, NarMember{ .stat = {
|
||||
.type = Type::tRegular,
|
||||
.fileSize = 0,
|
||||
.isExecutable = false,
|
||||
.narOffset = 0
|
||||
} });
|
||||
NarMemberConstructor nmc { nm, pos };
|
||||
func(nmc);
|
||||
}
|
||||
|
||||
void closeRegularFile() override
|
||||
{ }
|
||||
|
||||
void isExecutable() override
|
||||
{
|
||||
parents.top()->stat.isExecutable = true;
|
||||
}
|
||||
|
||||
void preallocateContents(uint64_t size) override
|
||||
{
|
||||
auto & st = parents.top()->stat;
|
||||
st.fileSize = size;
|
||||
st.narOffset = pos;
|
||||
}
|
||||
|
||||
void receiveContents(std::string_view data) override
|
||||
{ }
|
||||
|
||||
void createSymlink(const Path & path, const std::string & target) override
|
||||
{
|
||||
createMember(path,
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "globals.hh"
|
||||
#include "signals.hh"
|
||||
#include "posix-fs-canonicalise.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
|
||||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
|
@ -146,7 +147,12 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
|||
Also note that if `path' is a symlink, then we're hashing the
|
||||
contents of the symlink (i.e. the result of readlink()), not
|
||||
the contents of the target (which may not even exist). */
|
||||
Hash hash = hashPath(HashAlgorithm::SHA256, path).first;
|
||||
Hash hash = ({
|
||||
PosixSourceAccessor accessor;
|
||||
hashPath(
|
||||
accessor, CanonPath { path },
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first;
|
||||
});
|
||||
debug("'%1%' has hash '%2%'", path, hash.to_string(HashFormat::Nix32, true));
|
||||
|
||||
/* Check if this is a known hash. */
|
||||
|
@ -156,7 +162,12 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
|||
if (pathExists(linkPath)) {
|
||||
auto stLink = lstat(linkPath);
|
||||
if (st.st_size != stLink.st_size
|
||||
|| (repair && hash != hashPath(HashAlgorithm::SHA256, linkPath).first))
|
||||
|| (repair && hash != ({
|
||||
PosixSourceAccessor accessor;
|
||||
hashPath(
|
||||
accessor, CanonPath { linkPath },
|
||||
FileIngestionMethod::Recursive, HashAlgorithm::SHA256).first;
|
||||
})))
|
||||
{
|
||||
// XXX: Consider overwriting linkPath with our valid version.
|
||||
warn("removing corrupted link '%s'", linkPath);
|
||||
|
@ -231,7 +242,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
|
|||
/* Atomically replace the old file with the new hard link. */
|
||||
try {
|
||||
renameFile(tempLink, path);
|
||||
} catch (SysError & e) {
|
||||
} catch (SystemError & e) {
|
||||
if (unlink(tempLink.c_str()) == -1)
|
||||
printError("unable to unlink '%1%'", tempLink);
|
||||
if (errno == EMLINK) {
|
||||
|
|
|
@ -38,9 +38,9 @@ std::string ValidPathInfo::fingerprint(const Store & store) const
|
|||
}
|
||||
|
||||
|
||||
void ValidPathInfo::sign(const Store & store, const SecretKey & secretKey)
|
||||
void ValidPathInfo::sign(const Store & store, const Signer & signer)
|
||||
{
|
||||
sigs.insert(secretKey.signDetached(fingerprint(store)));
|
||||
sigs.insert(signer.signDetached(fingerprint(store)));
|
||||
}
|
||||
|
||||
std::optional<ContentAddressWithReferences> ValidPathInfo::contentAddressWithReferences() const
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "crypto.hh"
|
||||
#include "signature/signer.hh"
|
||||
#include "path.hh"
|
||||
#include "hash.hh"
|
||||
#include "content-address.hh"
|
||||
|
@ -107,7 +107,7 @@ struct ValidPathInfo : UnkeyedValidPathInfo {
|
|||
*/
|
||||
std::string fingerprint(const Store & store) const;
|
||||
|
||||
void sign(const Store & store, const SecretKey & secretKey);
|
||||
void sign(const Store & store, const Signer & signer);
|
||||
|
||||
/**
|
||||
* @return The `ContentAddressWithReferences` that determines the
|
||||
|
|
|
@ -3,6 +3,11 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
static constexpr std::string_view nameRegexStr = R"([0-9a-zA-Z\+\-_\?=][0-9a-zA-Z\+\-\._\?=]*)";
|
||||
|
||||
static constexpr std::string_view nameRegexStr =
|
||||
// This uses a negative lookahead: (?!\.\.?(-|$))
|
||||
// - deny ".", "..", or those strings followed by '-'
|
||||
// - when it's not those, start again at the start of the input and apply the next regex, which is [0-9a-zA-Z\+\-\._\?=]+
|
||||
R"((?!\.\.?(-|$))[0-9a-zA-Z\+\-\._\?=]+)";
|
||||
|
||||
}
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
#include "store-dir-config.hh"
|
||||
|
||||
#include <sodium.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
static void checkName(std::string_view path, std::string_view name)
|
||||
|
@ -11,9 +9,20 @@ static void checkName(std::string_view path, std::string_view name)
|
|||
if (name.size() > StorePath::MaxPathLen)
|
||||
throw BadStorePath("store path '%s' has a name longer than %d characters",
|
||||
path, StorePath::MaxPathLen);
|
||||
if (name[0] == '.')
|
||||
throw BadStorePath("store path '%s' starts with illegal character '.'", path);
|
||||
// See nameRegexStr for the definition
|
||||
if (name[0] == '.') {
|
||||
// check against "." and "..", followed by end or dash
|
||||
if (name.size() == 1)
|
||||
throw BadStorePath("store path '%s' has invalid name '%s'", path, name);
|
||||
if (name[1] == '-')
|
||||
throw BadStorePath("store path '%s' has invalid name '%s': first dash-separated component must not be '%s'", path, name, ".");
|
||||
if (name[1] == '.') {
|
||||
if (name.size() == 2)
|
||||
throw BadStorePath("store path '%s' has invalid name '%s'", path, name);
|
||||
if (name[2] == '-')
|
||||
throw BadStorePath("store path '%s' has invalid name '%s': first dash-separated component must not be '%s'", path, name, "..");
|
||||
}
|
||||
}
|
||||
for (auto c : name)
|
||||
if (!((c >= '0' && c <= '9')
|
||||
|| (c >= 'a' && c <= 'z')
|
||||
|
@ -49,9 +58,7 @@ StorePath StorePath::dummy("ffffffffffffffffffffffffffffffff-x");
|
|||
|
||||
StorePath StorePath::random(std::string_view name)
|
||||
{
|
||||
Hash hash(HashAlgorithm::SHA1);
|
||||
randombytes_buf(hash.hash, hash.hashSize);
|
||||
return StorePath(hash, name);
|
||||
return StorePath(Hash::random(HashAlgorithm::SHA1), name);
|
||||
}
|
||||
|
||||
StorePath StoreDirConfig::parseStorePath(std::string_view path) const
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#if HAVE_SYS_XATTR_H
|
||||
#if HAVE_ACL_SUPPORT
|
||||
# include <sys/xattr.h>
|
||||
#endif
|
||||
|
||||
|
@ -78,7 +78,7 @@ static void canonicalisePathMetaData_(
|
|||
if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode)))
|
||||
throw Error("file '%1%' has an unsupported type", path);
|
||||
|
||||
#ifdef HAVE_SYS_XATTR_H
|
||||
#if HAVE_ACL_SUPPORT
|
||||
/* Remove extended attributes / ACLs. */
|
||||
ssize_t eaSize = llistxattr(path.c_str(), nullptr, 0);
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#include "realisation.hh"
|
||||
#include "store-api.hh"
|
||||
#include "closure.hh"
|
||||
#include "signature/local-keys.hh"
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
namespace nix {
|
||||
|
@ -113,9 +114,9 @@ std::string Realisation::fingerprint() const
|
|||
return serialized.dump();
|
||||
}
|
||||
|
||||
void Realisation::sign(const SecretKey & secretKey)
|
||||
void Realisation::sign(const Signer &signer)
|
||||
{
|
||||
signatures.insert(secretKey.signDetached(fingerprint()));
|
||||
signatures.insert(signer.signDetached(fingerprint()));
|
||||
}
|
||||
|
||||
bool Realisation::checkSignature(const PublicKeys & publicKeys, const std::string & sig) const
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
#include "derived-path.hh"
|
||||
#include <nlohmann/json_fwd.hpp>
|
||||
#include "comparator.hh"
|
||||
#include "crypto.hh"
|
||||
#include "signature/signer.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -64,7 +64,7 @@ struct Realisation {
|
|||
static Realisation fromJSON(const nlohmann::json& json, const std::string& whence);
|
||||
|
||||
std::string fingerprint() const;
|
||||
void sign(const SecretKey &);
|
||||
void sign(const Signer &);
|
||||
bool checkSignature(const PublicKeys & publicKeys, const std::string & sig) const;
|
||||
size_t checkSignatures(const PublicKeys & publicKeys) const;
|
||||
|
||||
|
|
|
@ -87,13 +87,13 @@ std::pair<ref<SourceAccessor>, CanonPath> RemoteFSAccessor::fetch(const CanonPat
|
|||
nars.emplace(storePath.hashPart(), narAccessor);
|
||||
return {narAccessor, restPath};
|
||||
|
||||
} catch (SysError &) { }
|
||||
} catch (SystemError &) { }
|
||||
|
||||
try {
|
||||
auto narAccessor = makeNarAccessor(nix::readFile(cacheFile));
|
||||
nars.emplace(storePath.hashPart(), narAccessor);
|
||||
return {narAccessor, restPath};
|
||||
} catch (SysError &) { }
|
||||
} catch (SystemError &) { }
|
||||
}
|
||||
|
||||
StringSink sink;
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
#include "logging.hh"
|
||||
#include "callback.hh"
|
||||
#include "filetransfer.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
namespace nix {
|
||||
|
@ -65,6 +67,7 @@ void RemoteStore::initConnection(Connection & conn)
|
|||
{
|
||||
/* Send the magic greeting, check for the reply. */
|
||||
try {
|
||||
conn.from.endOfFileError = "Nix daemon disconnected unexpectedly (maybe it crashed?)";
|
||||
conn.to << WORKER_MAGIC_1;
|
||||
conn.to.flush();
|
||||
StringSink saved;
|
||||
|
@ -186,7 +189,7 @@ void RemoteStore::ConnectionHandle::processStderr(Sink * sink, Source * source,
|
|||
if (m.find("parsing derivation") != std::string::npos &&
|
||||
m.find("expected string") != std::string::npos &&
|
||||
m.find("Derive([") != std::string::npos)
|
||||
throw Error("%s, this might be because the daemon is too old to understand dependencies on dynamic derivations. Check to see if the raw dervation is in the form '%s'", std::move(m), "DrvWithVersion(..)");
|
||||
throw Error("%s, this might be because the daemon is too old to understand dependencies on dynamic derivations. Check to see if the raw derivation is in the form '%s'", std::move(m), "DrvWithVersion(..)");
|
||||
}
|
||||
throw;
|
||||
}
|
||||
|
@ -225,7 +228,7 @@ StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, Substitute
|
|||
conn->to << WorkerProto::Op::QueryValidPaths;
|
||||
WorkerProto::write(*this, *conn, paths);
|
||||
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 27) {
|
||||
conn->to << (settings.buildersUseSubstitutes ? 1 : 0);
|
||||
conn->to << maybeSubstitute;
|
||||
}
|
||||
conn.processStderr();
|
||||
return WorkerProto::Serialise<StorePathSet>::read(*this, *conn);
|
||||
|
@ -502,8 +505,13 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
|
|||
}
|
||||
|
||||
|
||||
StorePath RemoteStore::addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashAlgorithm hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||
StorePath RemoteStore::addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
ContentAddressMethod method,
|
||||
HashAlgorithm hashAlgo,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair)
|
||||
{
|
||||
return addCAToStore(dump, name, method, hashAlgo, references, repair)->path;
|
||||
}
|
||||
|
@ -603,16 +611,6 @@ void RemoteStore::addMultipleToStore(
|
|||
}
|
||||
|
||||
|
||||
StorePath RemoteStore::addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair)
|
||||
{
|
||||
StringSource source(s);
|
||||
return addCAToStore(source, name, TextIngestionMethod {}, HashAlgorithm::SHA256, references, repair)->path;
|
||||
}
|
||||
|
||||
void RemoteStore::registerDrvOutput(const Realisation & info)
|
||||
{
|
||||
auto conn(getConnection());
|
||||
|
@ -1071,6 +1069,7 @@ void RemoteStore::ConnectionHandle::withFramedSink(std::function<void(Sink & sin
|
|||
std::thread stderrThread([&]()
|
||||
{
|
||||
try {
|
||||
ReceiveInterrupts receiveInterrupts;
|
||||
processStderr(nullptr, nullptr, false);
|
||||
} catch (...) {
|
||||
ex = std::current_exception();
|
||||
|
|
|
@ -82,10 +82,15 @@ public:
|
|||
RepairFlag repair);
|
||||
|
||||
/**
|
||||
* Add a content-addressable store path. Does not support references. `dump` will be drained.
|
||||
* Add a content-addressable store path. `dump` will be drained.
|
||||
*/
|
||||
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, RepairFlag repair = NoRepair, const StorePathSet & references = StorePathSet()) override;
|
||||
StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
ContentAddressMethod method = FileIngestionMethod::Recursive,
|
||||
HashAlgorithm hashAlgo = HashAlgorithm::SHA256,
|
||||
const StorePathSet & references = StorePathSet(),
|
||||
RepairFlag repair = NoRepair) override;
|
||||
|
||||
void addToStore(const ValidPathInfo & info, Source & nar,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||
|
@ -101,12 +106,6 @@ public:
|
|||
RepairFlag repair,
|
||||
CheckSigsFlag checkSigs) override;
|
||||
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override;
|
||||
|
||||
void registerDrvOutput(const Realisation & info) override;
|
||||
|
||||
void queryRealisationUncached(const DrvOutput &,
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue