mirror of
https://github.com/NixOS/nix
synced 2025-07-06 21:41:48 +02:00
Merge remote-tracking branch 'upstream/master' into store-path-complete-construction
This commit is contained in:
commit
eb76b35efa
224 changed files with 4699 additions and 3115 deletions
|
@ -137,11 +137,8 @@ static int main_build_remote(int argc, char * * argv)
|
|||
for (auto & m : machines) {
|
||||
debug("considering building on remote machine '%s'", m.storeUri);
|
||||
|
||||
if (m.enabled
|
||||
&& (neededSystem == "builtin"
|
||||
|| std::find(m.systemTypes.begin(),
|
||||
m.systemTypes.end(),
|
||||
neededSystem) != m.systemTypes.end()) &&
|
||||
if (m.enabled &&
|
||||
m.systemSupported(neededSystem) &&
|
||||
m.allSupported(requiredFeatures) &&
|
||||
m.mandatoryMet(requiredFeatures))
|
||||
{
|
||||
|
@ -205,7 +202,7 @@ static int main_build_remote(int argc, char * * argv)
|
|||
else
|
||||
drvstr = "<unknown>";
|
||||
|
||||
auto error = hintformat(errorText);
|
||||
auto error = HintFmt(errorText);
|
||||
error
|
||||
% drvstr
|
||||
% neededSystem
|
||||
|
@ -214,7 +211,7 @@ static int main_build_remote(int argc, char * * argv)
|
|||
|
||||
for (auto & m : machines)
|
||||
error
|
||||
% concatStringsSep<std::vector<std::string>>(", ", m.systemTypes)
|
||||
% concatStringsSep<StringSet>(", ", m.systemTypes)
|
||||
% m.maxJobs
|
||||
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
|
||||
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
|
||||
|
|
|
@ -156,7 +156,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
|||
for (auto & i : autoArgs) {
|
||||
auto v = state.allocValue();
|
||||
if (i.second[0] == 'E')
|
||||
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(CanonPath::fromCwd())));
|
||||
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(".")));
|
||||
else
|
||||
v->mkString(((std::string_view) i.second).substr(1));
|
||||
res.insert(state.symbols.create(i.first), v);
|
||||
|
@ -164,7 +164,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
|||
return res.finish();
|
||||
}
|
||||
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir)
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir)
|
||||
{
|
||||
if (EvalSettings::isPseudoUrl(s)) {
|
||||
auto storePath = fetchers::downloadTarball(
|
||||
|
@ -185,7 +185,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDi
|
|||
}
|
||||
|
||||
else
|
||||
return state.rootPath(CanonPath(s, baseDir));
|
||||
return state.rootPath(baseDir ? absPath(s, *baseDir) : absPath(s));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -29,6 +29,6 @@ private:
|
|||
std::map<std::string, std::string> autoArgs;
|
||||
};
|
||||
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir = CanonPath::fromCwd());
|
||||
SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr);
|
||||
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ Strings editorFor(const SourcePath & file, uint32_t line)
|
|||
editor.find("vim") != std::string::npos ||
|
||||
editor.find("kak") != std::string::npos))
|
||||
args.push_back(fmt("+%d", line));
|
||||
args.push_back(path->abs());
|
||||
args.push_back(path->string());
|
||||
return args;
|
||||
}
|
||||
|
||||
|
|
|
@ -487,10 +487,11 @@ Installables SourceExprCommand::parseInstallables(
|
|||
state->eval(e, *vFile);
|
||||
}
|
||||
else if (file) {
|
||||
state->evalFile(lookupFileArg(*state, *file, CanonPath::fromCwd(getCommandBaseDir())), *vFile);
|
||||
auto dir = absPath(getCommandBaseDir());
|
||||
state->evalFile(lookupFileArg(*state, *file, &dir), *vFile);
|
||||
}
|
||||
else {
|
||||
CanonPath dir(CanonPath::fromCwd(getCommandBaseDir()));
|
||||
Path dir = absPath(getCommandBaseDir());
|
||||
auto e = state->parseExprFromString(*expr, state->rootPath(dir));
|
||||
state->eval(e, *vFile);
|
||||
}
|
||||
|
|
|
@ -232,7 +232,7 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
|
|||
: positions[dt.expr.getPos() ? dt.expr.getPos() : noPos];
|
||||
|
||||
if (pos) {
|
||||
out << pos;
|
||||
out << *pos;
|
||||
if (auto loc = pos->getCodeLines()) {
|
||||
out << "\n";
|
||||
printCodeLines(out, "", *pos, *loc);
|
||||
|
@ -243,10 +243,19 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
|
|||
return out;
|
||||
}
|
||||
|
||||
static bool isFirstRepl = true;
|
||||
|
||||
void NixRepl::mainLoop()
|
||||
{
|
||||
std::string error = ANSI_RED "error:" ANSI_NORMAL " ";
|
||||
notice("Welcome to Nix " + nixVersion + ". Type :? for help.\n");
|
||||
if (isFirstRepl) {
|
||||
std::string_view debuggerNotice = "";
|
||||
if (state->debugRepl) {
|
||||
debuggerNotice = " debugger";
|
||||
}
|
||||
notice("Nix %1%%2%\nType :? for help.", nixVersion, debuggerNotice);
|
||||
}
|
||||
|
||||
isFirstRepl = false;
|
||||
|
||||
loadFiles();
|
||||
|
||||
|
@ -422,8 +431,6 @@ StringSet NixRepl::completePrefix(const std::string & prefix)
|
|||
// Quietly ignore parse errors.
|
||||
} catch (EvalError & e) {
|
||||
// Quietly ignore evaluation errors.
|
||||
} catch (UndefinedVarError & e) {
|
||||
// Quietly ignore undefined variable errors.
|
||||
} catch (BadURL & e) {
|
||||
// Quietly ignore BadURL flake-related errors.
|
||||
}
|
||||
|
@ -890,7 +897,7 @@ void NixRepl::addVarToScope(const Symbol name, Value & v)
|
|||
|
||||
Expr * NixRepl::parseString(std::string s)
|
||||
{
|
||||
return state->parseExprFromString(std::move(s), state->rootPath(CanonPath::fromCwd()), staticEnv);
|
||||
return state->parseExprFromString(std::move(s), state->rootPath("."), staticEnv);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -65,10 +65,10 @@ std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::strin
|
|||
if (!attrIndex) {
|
||||
|
||||
if (v->type() != nAttrs)
|
||||
throw TypeError(
|
||||
state.error<TypeError>(
|
||||
"the expression selected by the selection path '%1%' should be a set but is %2%",
|
||||
attrPath,
|
||||
showType(*v));
|
||||
showType(*v)).debugThrow();
|
||||
if (attr.empty())
|
||||
throw Error("empty attribute name in selection path '%1%'", attrPath);
|
||||
|
||||
|
@ -88,10 +88,10 @@ std::pair<Value *, PosIdx> findAlongAttrPath(EvalState & state, const std::strin
|
|||
else {
|
||||
|
||||
if (!v->isList())
|
||||
throw TypeError(
|
||||
state.error<TypeError>(
|
||||
"the expression selected by the selection path '%1%' should be a list but is %2%",
|
||||
attrPath,
|
||||
showType(*v));
|
||||
showType(*v)).debugThrow();
|
||||
if (*attrIndex >= v->listSize())
|
||||
throw AttrPathNotFound("list index %1% in selection path '%2%' is out of range", *attrIndex, attrPath);
|
||||
|
||||
|
|
|
@ -491,7 +491,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
|
|||
if (forceErrors)
|
||||
debug("reevaluating failed cached attribute '%s'", getAttrPathStr(name));
|
||||
else
|
||||
throw CachedEvalError("cached failure of attribute '%s'", getAttrPathStr(name));
|
||||
throw CachedEvalError(root->state, "cached failure of attribute '%s'", getAttrPathStr(name));
|
||||
} else
|
||||
return std::make_shared<AttrCursor>(root,
|
||||
std::make_pair(shared_from_this(), name), nullptr, std::move(attr));
|
||||
|
@ -500,7 +500,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
|
|||
// evaluate to see whether 'name' exists
|
||||
} else
|
||||
return nullptr;
|
||||
//throw TypeError("'%s' is not an attribute set", getAttrPathStr());
|
||||
//error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -508,7 +508,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
|
|||
|
||||
if (v.type() != nAttrs)
|
||||
return nullptr;
|
||||
//throw TypeError("'%s' is not an attribute set", getAttrPathStr());
|
||||
//error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||
|
||||
auto attr = v.attrs->get(name);
|
||||
|
||||
|
@ -574,14 +574,14 @@ std::string AttrCursor::getString()
|
|||
debug("using cached string attribute '%s'", getAttrPathStr());
|
||||
return s->first;
|
||||
} else
|
||||
root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not a string", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
auto & v = forceValue();
|
||||
|
||||
if (v.type() != nString && v.type() != nPath)
|
||||
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not a string but %s", getAttrPathStr()).debugThrow();
|
||||
|
||||
return v.type() == nString ? v.c_str() : v.path().to_string();
|
||||
}
|
||||
|
@ -616,7 +616,7 @@ string_t AttrCursor::getStringWithContext()
|
|||
return *s;
|
||||
}
|
||||
} else
|
||||
root->state.error("'%s' is not a string", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not a string", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -630,7 +630,7 @@ string_t AttrCursor::getStringWithContext()
|
|||
else if (v.type() == nPath)
|
||||
return {v.path().to_string(), {}};
|
||||
else
|
||||
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not a string but %s", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
|
||||
bool AttrCursor::getBool()
|
||||
|
@ -643,14 +643,14 @@ bool AttrCursor::getBool()
|
|||
debug("using cached Boolean attribute '%s'", getAttrPathStr());
|
||||
return *b;
|
||||
} else
|
||||
root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not a Boolean", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
auto & v = forceValue();
|
||||
|
||||
if (v.type() != nBool)
|
||||
root->state.error("'%s' is not a Boolean", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not a Boolean", getAttrPathStr()).debugThrow();
|
||||
|
||||
return v.boolean;
|
||||
}
|
||||
|
@ -665,14 +665,14 @@ NixInt AttrCursor::getInt()
|
|||
debug("using cached integer attribute '%s'", getAttrPathStr());
|
||||
return i->x;
|
||||
} else
|
||||
throw TypeError("'%s' is not an integer", getAttrPathStr());
|
||||
root->state.error<TypeError>("'%s' is not an integer", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
auto & v = forceValue();
|
||||
|
||||
if (v.type() != nInt)
|
||||
throw TypeError("'%s' is not an integer", getAttrPathStr());
|
||||
root->state.error<TypeError>("'%s' is not an integer", getAttrPathStr()).debugThrow();
|
||||
|
||||
return v.integer;
|
||||
}
|
||||
|
@ -687,7 +687,7 @@ std::vector<std::string> AttrCursor::getListOfStrings()
|
|||
debug("using cached list of strings attribute '%s'", getAttrPathStr());
|
||||
return *l;
|
||||
} else
|
||||
throw TypeError("'%s' is not a list of strings", getAttrPathStr());
|
||||
root->state.error<TypeError>("'%s' is not a list of strings", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -697,7 +697,7 @@ std::vector<std::string> AttrCursor::getListOfStrings()
|
|||
root->state.forceValue(v, noPos);
|
||||
|
||||
if (v.type() != nList)
|
||||
throw TypeError("'%s' is not a list", getAttrPathStr());
|
||||
root->state.error<TypeError>("'%s' is not a list", getAttrPathStr()).debugThrow();
|
||||
|
||||
std::vector<std::string> res;
|
||||
|
||||
|
@ -720,14 +720,14 @@ std::vector<Symbol> AttrCursor::getAttrs()
|
|||
debug("using cached attrset attribute '%s'", getAttrPathStr());
|
||||
return *attrs;
|
||||
} else
|
||||
root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
auto & v = forceValue();
|
||||
|
||||
if (v.type() != nAttrs)
|
||||
root->state.error("'%s' is not an attribute set", getAttrPathStr()).debugThrow<TypeError>();
|
||||
root->state.error<TypeError>("'%s' is not an attribute set", getAttrPathStr()).debugThrow();
|
||||
|
||||
std::vector<Symbol> attrs;
|
||||
for (auto & attr : *getValue().attrs)
|
||||
|
|
113
src/libexpr/eval-error.cc
Normal file
113
src/libexpr/eval-error.cc
Normal file
|
@ -0,0 +1,113 @@
|
|||
#include "eval-error.hh"
|
||||
#include "eval.hh"
|
||||
#include "value.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
template<class T>
|
||||
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withExitStatus(unsigned int exitStatus)
|
||||
{
|
||||
error.withExitStatus(exitStatus);
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
EvalErrorBuilder<T> & EvalErrorBuilder<T>::atPos(PosIdx pos)
|
||||
{
|
||||
error.err.pos = error.state.positions[pos];
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
EvalErrorBuilder<T> & EvalErrorBuilder<T>::atPos(Value & value, PosIdx fallback)
|
||||
{
|
||||
return atPos(value.determinePos(fallback));
|
||||
}
|
||||
|
||||
template<class T>
|
||||
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withTrace(PosIdx pos, const std::string_view text)
|
||||
{
|
||||
error.err.traces.push_front(
|
||||
Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = false});
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withFrameTrace(PosIdx pos, const std::string_view text)
|
||||
{
|
||||
error.err.traces.push_front(
|
||||
Trace{.pos = error.state.positions[pos], .hint = HintFmt(std::string(text)), .frame = true});
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withSuggestions(Suggestions & s)
|
||||
{
|
||||
error.err.suggestions = s;
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
EvalErrorBuilder<T> & EvalErrorBuilder<T>::withFrame(const Env & env, const Expr & expr)
|
||||
{
|
||||
// NOTE: This is abusing side-effects.
|
||||
// TODO: check compatibility with nested debugger calls.
|
||||
// TODO: What side-effects??
|
||||
error.state.debugTraces.push_front(DebugTrace{
|
||||
.pos = error.state.positions[expr.getPos()],
|
||||
.expr = expr,
|
||||
.env = env,
|
||||
.hint = HintFmt("Fake frame for debugging purposes"),
|
||||
.isError = true});
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
EvalErrorBuilder<T> & EvalErrorBuilder<T>::addTrace(PosIdx pos, HintFmt hint, bool frame)
|
||||
{
|
||||
error.addTrace(error.state.positions[pos], hint, frame);
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
template<typename... Args>
|
||||
EvalErrorBuilder<T> &
|
||||
EvalErrorBuilder<T>::addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs)
|
||||
{
|
||||
|
||||
addTrace(error.state.positions[pos], HintFmt(std::string(formatString), formatArgs...));
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
void EvalErrorBuilder<T>::debugThrow()
|
||||
{
|
||||
if (error.state.debugRepl && !error.state.debugTraces.empty()) {
|
||||
const DebugTrace & last = error.state.debugTraces.front();
|
||||
const Env * env = &last.env;
|
||||
const Expr * expr = &last.expr;
|
||||
error.state.runDebugRepl(&error, *env, *expr);
|
||||
}
|
||||
|
||||
// `EvalState` is the only class that can construct an `EvalErrorBuilder`,
|
||||
// and it does so in dynamic storage. This is the final method called on
|
||||
// any such instance and must delete itself before throwing the underlying
|
||||
// error.
|
||||
auto error = std::move(this->error);
|
||||
delete this;
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
template class EvalErrorBuilder<EvalError>;
|
||||
template class EvalErrorBuilder<AssertionError>;
|
||||
template class EvalErrorBuilder<ThrownError>;
|
||||
template class EvalErrorBuilder<Abort>;
|
||||
template class EvalErrorBuilder<TypeError>;
|
||||
template class EvalErrorBuilder<UndefinedVarError>;
|
||||
template class EvalErrorBuilder<MissingArgumentError>;
|
||||
template class EvalErrorBuilder<InfiniteRecursionError>;
|
||||
template class EvalErrorBuilder<CachedEvalError>;
|
||||
template class EvalErrorBuilder<InvalidPathError>;
|
||||
|
||||
}
|
104
src/libexpr/eval-error.hh
Normal file
104
src/libexpr/eval-error.hh
Normal file
|
@ -0,0 +1,104 @@
|
|||
#pragma once
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
#include "error.hh"
|
||||
#include "pos-idx.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct Env;
|
||||
struct Expr;
|
||||
struct Value;
|
||||
|
||||
class EvalState;
|
||||
template<class T>
|
||||
class EvalErrorBuilder;
|
||||
|
||||
class EvalError : public Error
|
||||
{
|
||||
template<class T>
|
||||
friend class EvalErrorBuilder;
|
||||
public:
|
||||
EvalState & state;
|
||||
|
||||
EvalError(EvalState & state, ErrorInfo && errorInfo)
|
||||
: Error(errorInfo)
|
||||
, state(state)
|
||||
{
|
||||
}
|
||||
|
||||
template<typename... Args>
|
||||
explicit EvalError(EvalState & state, const std::string & formatString, const Args &... formatArgs)
|
||||
: Error(formatString, formatArgs...)
|
||||
, state(state)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
MakeError(ParseError, Error);
|
||||
MakeError(AssertionError, EvalError);
|
||||
MakeError(ThrownError, AssertionError);
|
||||
MakeError(Abort, EvalError);
|
||||
MakeError(TypeError, EvalError);
|
||||
MakeError(UndefinedVarError, EvalError);
|
||||
MakeError(MissingArgumentError, EvalError);
|
||||
MakeError(CachedEvalError, EvalError);
|
||||
MakeError(InfiniteRecursionError, EvalError);
|
||||
|
||||
struct InvalidPathError : public EvalError
|
||||
{
|
||||
public:
|
||||
Path path;
|
||||
InvalidPathError(EvalState & state, const Path & path)
|
||||
: EvalError(state, "path '%s' is not valid", path)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* `EvalErrorBuilder`s may only be constructed by `EvalState`. The `debugThrow`
|
||||
* method must be the final method in any such `EvalErrorBuilder` usage, and it
|
||||
* handles deleting the object.
|
||||
*/
|
||||
template<class T>
|
||||
class EvalErrorBuilder final
|
||||
{
|
||||
friend class EvalState;
|
||||
|
||||
template<typename... Args>
|
||||
explicit EvalErrorBuilder(EvalState & state, const Args &... args)
|
||||
: error(T(state, args...))
|
||||
{
|
||||
}
|
||||
|
||||
public:
|
||||
T error;
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withExitStatus(unsigned int exitStatus);
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & atPos(PosIdx pos);
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & atPos(Value & value, PosIdx fallback = noPos);
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withTrace(PosIdx pos, const std::string_view text);
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withFrameTrace(PosIdx pos, const std::string_view text);
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withSuggestions(Suggestions & s);
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & withFrame(const Env & e, const Expr & ex);
|
||||
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> & addTrace(PosIdx pos, HintFmt hint, bool frame = false);
|
||||
|
||||
template<typename... Args>
|
||||
[[nodiscard, gnu::noinline]] EvalErrorBuilder<T> &
|
||||
addTrace(PosIdx pos, std::string_view formatString, const Args &... formatArgs);
|
||||
|
||||
/**
|
||||
* Delete the `EvalErrorBuilder` and throw the underlying exception.
|
||||
*/
|
||||
[[gnu::noinline, gnu::noreturn]] void debugThrow();
|
||||
};
|
||||
|
||||
}
|
|
@ -1,7 +1,9 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "print.hh"
|
||||
#include "eval.hh"
|
||||
#include "eval-error.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -114,7 +116,11 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e
|
|||
PosIdx pos = getPos();
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nAttrs) {
|
||||
error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a set but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).withTrace(pos, errorCtx).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -124,7 +130,11 @@ inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view e
|
|||
{
|
||||
forceValue(v, pos);
|
||||
if (!v.isList()) {
|
||||
error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a list but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).withTrace(pos, errorCtx).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "eval-settings.hh"
|
||||
#include "hash.hh"
|
||||
#include "primops.hh"
|
||||
#include "print-options.hh"
|
||||
#include "types.hh"
|
||||
#include "util.hh"
|
||||
#include "store-api.hh"
|
||||
|
@ -20,6 +21,9 @@
|
|||
#include "gc-small-vector.hh"
|
||||
#include "url.hh"
|
||||
#include "fetch-to-store.hh"
|
||||
#include "tarball.hh"
|
||||
#include "flake/flakeref.hh"
|
||||
#include "parser-tab.hh"
|
||||
|
||||
#include <algorithm>
|
||||
#include <chrono>
|
||||
|
@ -29,9 +33,9 @@
|
|||
#include <unistd.h>
|
||||
#include <sys/time.h>
|
||||
#include <sys/resource.h>
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <functional>
|
||||
#include <iostream>
|
||||
|
||||
#include <sys/resource.h>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
@ -335,46 +339,6 @@ void initGC()
|
|||
gcInitialised = true;
|
||||
}
|
||||
|
||||
|
||||
ErrorBuilder & ErrorBuilder::atPos(PosIdx pos)
|
||||
{
|
||||
info.errPos = state.positions[pos];
|
||||
return *this;
|
||||
}
|
||||
|
||||
ErrorBuilder & ErrorBuilder::withTrace(PosIdx pos, const std::string_view text)
|
||||
{
|
||||
info.traces.push_front(Trace{ .pos = state.positions[pos], .hint = hintformat(std::string(text)), .frame = false });
|
||||
return *this;
|
||||
}
|
||||
|
||||
ErrorBuilder & ErrorBuilder::withFrameTrace(PosIdx pos, const std::string_view text)
|
||||
{
|
||||
info.traces.push_front(Trace{ .pos = state.positions[pos], .hint = hintformat(std::string(text)), .frame = true });
|
||||
return *this;
|
||||
}
|
||||
|
||||
ErrorBuilder & ErrorBuilder::withSuggestions(Suggestions & s)
|
||||
{
|
||||
info.suggestions = s;
|
||||
return *this;
|
||||
}
|
||||
|
||||
ErrorBuilder & ErrorBuilder::withFrame(const Env & env, const Expr & expr)
|
||||
{
|
||||
// NOTE: This is abusing side-effects.
|
||||
// TODO: check compatibility with nested debugger calls.
|
||||
state.debugTraces.push_front(DebugTrace {
|
||||
.pos = nullptr,
|
||||
.expr = expr,
|
||||
.env = env,
|
||||
.hint = hintformat("Fake frame for debugging purposes"),
|
||||
.isError = true
|
||||
});
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
EvalState::EvalState(
|
||||
const SearchPath & _searchPath,
|
||||
ref<Store> store,
|
||||
|
@ -416,18 +380,28 @@ EvalState::EvalState(
|
|||
, sPath(symbols.create("path"))
|
||||
, sPrefix(symbols.create("prefix"))
|
||||
, sOutputSpecified(symbols.create("outputSpecified"))
|
||||
, exprSymbols{
|
||||
.sub = symbols.create("__sub"),
|
||||
.lessThan = symbols.create("__lessThan"),
|
||||
.mul = symbols.create("__mul"),
|
||||
.div = symbols.create("__div"),
|
||||
.or_ = symbols.create("or"),
|
||||
.findFile = symbols.create("__findFile"),
|
||||
.nixPath = symbols.create("__nixPath"),
|
||||
.body = symbols.create("body"),
|
||||
}
|
||||
, repair(NoRepair)
|
||||
, emptyBindings(0)
|
||||
, rootFS(
|
||||
evalSettings.restrictEval || evalSettings.pureEval
|
||||
? ref<InputAccessor>(AllowListInputAccessor::create(makeFSInputAccessor(CanonPath::root), {},
|
||||
? ref<InputAccessor>(AllowListInputAccessor::create(makeFSInputAccessor(), {},
|
||||
[](const CanonPath & path) -> RestrictedPathError {
|
||||
auto modeInformation = evalSettings.pureEval
|
||||
? "in pure evaluation mode (use '--impure' to override)"
|
||||
: "in restricted mode";
|
||||
throw RestrictedPathError("access to absolute path '%1%' is forbidden %2%", path, modeInformation);
|
||||
}))
|
||||
: makeFSInputAccessor(CanonPath::root))
|
||||
: makeFSInputAccessor())
|
||||
, corepkgsFS(makeMemoryInputAccessor())
|
||||
, internalFS(makeMemoryInputAccessor())
|
||||
, derivationInternal{corepkgsFS->addFile(
|
||||
|
@ -730,7 +704,8 @@ void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env &
|
|||
if (se.up && env.up) {
|
||||
std::cout << "static: ";
|
||||
printStaticEnvBindings(st, se);
|
||||
printWithBindings(st, env);
|
||||
if (se.isWith)
|
||||
printWithBindings(st, env);
|
||||
std::cout << std::endl;
|
||||
printEnvBindings(st, *se.up, *env.up, ++lvl);
|
||||
} else {
|
||||
|
@ -742,7 +717,8 @@ void printEnvBindings(const SymbolTable & st, const StaticEnv & se, const Env &
|
|||
std::cout << st[i.first] << " ";
|
||||
std::cout << ANSI_NORMAL;
|
||||
std::cout << std::endl;
|
||||
printWithBindings(st, env); // probably nothing there for the top level.
|
||||
if (se.isWith)
|
||||
printWithBindings(st, env); // probably nothing there for the top level.
|
||||
std::cout << std::endl;
|
||||
|
||||
}
|
||||
|
@ -764,7 +740,7 @@ void mapStaticEnvBindings(const SymbolTable & st, const StaticEnv & se, const En
|
|||
if (env.up && se.up) {
|
||||
mapStaticEnvBindings(st, *se.up, *env.up, vm);
|
||||
|
||||
if (!env.values[0]->isThunk()) {
|
||||
if (se.isWith && !env.values[0]->isThunk()) {
|
||||
// add 'with' bindings.
|
||||
Bindings::iterator j = env.values[0]->attrs->begin();
|
||||
while (j != env.values[0]->attrs->end()) {
|
||||
|
@ -797,7 +773,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
|
|||
? std::make_unique<DebugTraceStacker>(
|
||||
*this,
|
||||
DebugTrace {
|
||||
.pos = error->info().errPos ? error->info().errPos : positions[expr.getPos()],
|
||||
.pos = error->info().pos ? error->info().pos : positions[expr.getPos()],
|
||||
.expr = expr,
|
||||
.env = env,
|
||||
.hint = error->info().msg,
|
||||
|
@ -807,12 +783,10 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
|
|||
|
||||
if (error)
|
||||
{
|
||||
printError("%s\n\n", error->what());
|
||||
printError("%s\n", error->what());
|
||||
|
||||
if (trylevel > 0 && error->info().level != lvlInfo)
|
||||
printError("This exception occurred in a 'tryEval' call. Use " ANSI_GREEN "--ignore-try" ANSI_NORMAL " to skip these.\n");
|
||||
|
||||
printError(ANSI_BOLD "Starting REPL to allow you to inspect the current state of the evaluator.\n" ANSI_NORMAL);
|
||||
}
|
||||
|
||||
auto se = getStaticEnv(expr);
|
||||
|
@ -829,23 +803,23 @@ void EvalState::addErrorTrace(Error & e, const char * s, const std::string & s2)
|
|||
|
||||
void EvalState::addErrorTrace(Error & e, const PosIdx pos, const char * s, const std::string & s2, bool frame) const
|
||||
{
|
||||
e.addTrace(positions[pos], hintfmt(s, s2), frame);
|
||||
e.addTrace(positions[pos], HintFmt(s, s2), frame);
|
||||
}
|
||||
|
||||
template<typename... Args>
|
||||
static std::unique_ptr<DebugTraceStacker> makeDebugTraceStacker(
|
||||
EvalState & state,
|
||||
Expr & expr,
|
||||
Env & env,
|
||||
std::shared_ptr<Pos> && pos,
|
||||
const char * s,
|
||||
const std::string & s2)
|
||||
const Args & ... formatArgs)
|
||||
{
|
||||
return std::make_unique<DebugTraceStacker>(state,
|
||||
DebugTrace {
|
||||
.pos = std::move(pos),
|
||||
.expr = expr,
|
||||
.env = env,
|
||||
.hint = hintfmt(s, s2),
|
||||
.hint = HintFmt(formatArgs...),
|
||||
.isError = false
|
||||
});
|
||||
}
|
||||
|
@ -916,7 +890,7 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval)
|
|||
return j->value;
|
||||
}
|
||||
if (!fromWith->parentWith)
|
||||
error("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow<UndefinedVarError>();
|
||||
error<UndefinedVarError>("undefined variable '%1%'", symbols[var.name]).atPos(var.pos).withFrame(*env, var).debugThrow();
|
||||
for (size_t l = fromWith->prevWith; l; --l, env = env->up) ;
|
||||
fromWith = fromWith->parentWith;
|
||||
}
|
||||
|
@ -1122,7 +1096,7 @@ void EvalState::evalFile(const SourcePath & path, Value & v, bool mustBeTrivial)
|
|||
// computation.
|
||||
if (mustBeTrivial &&
|
||||
!(dynamic_cast<ExprAttrs *>(e)))
|
||||
error("file '%s' must be an attribute set", path).debugThrow<EvalError>();
|
||||
error<EvalError>("file '%s' must be an attribute set", path).debugThrow();
|
||||
eval(e, v);
|
||||
} catch (Error & e) {
|
||||
addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath.to_string());
|
||||
|
@ -1153,7 +1127,11 @@ inline bool EvalState::evalBool(Env & env, Expr * e, const PosIdx pos, std::stri
|
|||
Value v;
|
||||
e->eval(*this, env, v);
|
||||
if (v.type() != nBool)
|
||||
error("value is %1% while a Boolean was expected", showType(v)).withFrame(env, *e).debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a Boolean but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).atPos(pos).withFrame(env, *e).debugThrow();
|
||||
return v.boolean;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
|
@ -1167,7 +1145,11 @@ inline void EvalState::evalAttrs(Env & env, Expr * e, Value & v, const PosIdx po
|
|||
try {
|
||||
e->eval(*this, env, v);
|
||||
if (v.type() != nAttrs)
|
||||
error("value is %1% while a set was expected", showType(v)).withFrame(env, *e).debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a set but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).withFrame(env, *e).debugThrow();
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
throw;
|
||||
|
@ -1276,7 +1258,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
|
|||
auto nameSym = state.symbols.create(nameVal.string_view());
|
||||
Bindings::iterator j = v.attrs->find(nameSym);
|
||||
if (j != v.attrs->end())
|
||||
state.error("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow<EvalError>();
|
||||
state.error<EvalError>("dynamic attribute '%1%' already defined at %2%", state.symbols[nameSym], state.positions[j->pos]).atPos(i.pos).withFrame(env, *this).debugThrow();
|
||||
|
||||
i.valueExpr->setName(nameSym);
|
||||
/* Keep sorted order so find can catch duplicates */
|
||||
|
@ -1302,6 +1284,19 @@ void ExprLet::eval(EvalState & state, Env & env, Value & v)
|
|||
for (auto & i : attrs->attrs)
|
||||
env2.values[displ++] = i.second.e->maybeThunk(state, i.second.inherited ? env : env2);
|
||||
|
||||
auto dts = state.debugRepl
|
||||
? makeDebugTraceStacker(
|
||||
state,
|
||||
*this,
|
||||
env2,
|
||||
getPos()
|
||||
? std::make_shared<Pos>(state.positions[getPos()])
|
||||
: nullptr,
|
||||
"while evaluating a '%1%' expression",
|
||||
"let"
|
||||
)
|
||||
: nullptr;
|
||||
|
||||
body->eval(state, env2, v);
|
||||
}
|
||||
|
||||
|
@ -1388,8 +1383,8 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
|
|||
for (auto & attr : *vAttrs->attrs)
|
||||
allAttrNames.insert(state.symbols[attr.name]);
|
||||
auto suggestions = Suggestions::bestMatches(allAttrNames, state.symbols[name]);
|
||||
state.error("attribute '%1%' missing", state.symbols[name])
|
||||
.atPos(pos).withSuggestions(suggestions).withFrame(env, *this).debugThrow<EvalError>();
|
||||
state.error<EvalError>("attribute '%1%' missing", state.symbols[name])
|
||||
.atPos(pos).withSuggestions(suggestions).withFrame(env, *this).debugThrow();
|
||||
}
|
||||
}
|
||||
vAttrs = j->value;
|
||||
|
@ -1462,7 +1457,7 @@ public:
|
|||
void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const PosIdx pos)
|
||||
{
|
||||
if (callDepth > evalSettings.maxCallDepth)
|
||||
error("stack overflow; max-call-depth exceeded").atPos(pos).template debugThrow<EvalError>();
|
||||
error<EvalError>("stack overflow; max-call-depth exceeded").atPos(pos).debugThrow();
|
||||
CallDepth _level(callDepth);
|
||||
|
||||
auto trace = evalSettings.traceFunctionCalls
|
||||
|
@ -1520,13 +1515,13 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
|||
auto j = args[0]->attrs->get(i.name);
|
||||
if (!j) {
|
||||
if (!i.def) {
|
||||
error("function '%1%' called without required argument '%2%'",
|
||||
error<TypeError>("function '%1%' called without required argument '%2%'",
|
||||
(lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"),
|
||||
symbols[i.name])
|
||||
.atPos(lambda.pos)
|
||||
.withTrace(pos, "from call site")
|
||||
.withFrame(*fun.lambda.env, lambda)
|
||||
.debugThrow<TypeError>();
|
||||
.debugThrow();
|
||||
}
|
||||
env2.values[displ++] = i.def->maybeThunk(*this, env2);
|
||||
} else {
|
||||
|
@ -1546,14 +1541,14 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
|||
for (auto & formal : lambda.formals->formals)
|
||||
formalNames.insert(symbols[formal.name]);
|
||||
auto suggestions = Suggestions::bestMatches(formalNames, symbols[i.name]);
|
||||
error("function '%1%' called with unexpected argument '%2%'",
|
||||
error<TypeError>("function '%1%' called with unexpected argument '%2%'",
|
||||
(lambda.name ? std::string(symbols[lambda.name]) : "anonymous lambda"),
|
||||
symbols[i.name])
|
||||
.atPos(lambda.pos)
|
||||
.withTrace(pos, "from call site")
|
||||
.withSuggestions(suggestions)
|
||||
.withFrame(*fun.lambda.env, lambda)
|
||||
.debugThrow<TypeError>();
|
||||
.debugThrow();
|
||||
}
|
||||
abort(); // can't happen
|
||||
}
|
||||
|
@ -1685,7 +1680,12 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
|||
}
|
||||
|
||||
else
|
||||
error("attempt to call something which is not a function but %1%", showType(vCur)).atPos(pos).debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"attempt to call something which is not a function but %1%: %2%",
|
||||
showType(vCur),
|
||||
ValuePrinter(*this, vCur, errorPrintOptions))
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
}
|
||||
|
||||
vRes = vCur;
|
||||
|
@ -1694,6 +1694,18 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
|||
|
||||
void ExprCall::eval(EvalState & state, Env & env, Value & v)
|
||||
{
|
||||
auto dts = state.debugRepl
|
||||
? makeDebugTraceStacker(
|
||||
state,
|
||||
*this,
|
||||
env,
|
||||
getPos()
|
||||
? std::make_shared<Pos>(state.positions[getPos()])
|
||||
: nullptr,
|
||||
"while calling a function"
|
||||
)
|
||||
: nullptr;
|
||||
|
||||
Value vFun;
|
||||
fun->eval(state, env, vFun);
|
||||
|
||||
|
@ -1755,12 +1767,12 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
|
|||
if (j != args.end()) {
|
||||
attrs.insert(*j);
|
||||
} else if (!i.def) {
|
||||
error(R"(cannot evaluate a function that has an argument without a value ('%1%')
|
||||
error<MissingArgumentError>(R"(cannot evaluate a function that has an argument without a value ('%1%')
|
||||
Nix attempted to evaluate a function as a top level expression; in
|
||||
this case it must have its arguments supplied either by default
|
||||
values, or passed explicitly with '--arg' or '--argstr'. See
|
||||
https://nixos.org/manual/nix/stable/language/constructs.html#functions.)", symbols[i.name])
|
||||
.atPos(i.pos).withFrame(*fun.lambda.env, *fun.lambda.fun).debugThrow<MissingArgumentError>();
|
||||
.atPos(i.pos).withFrame(*fun.lambda.env, *fun.lambda.fun).debugThrow();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1791,7 +1803,7 @@ void ExprAssert::eval(EvalState & state, Env & env, Value & v)
|
|||
if (!state.evalBool(env, cond, pos, "in the condition of the assert statement")) {
|
||||
std::ostringstream out;
|
||||
cond->show(state.symbols, out);
|
||||
state.error("assertion '%1%' failed", out.str()).atPos(pos).withFrame(env, *this).debugThrow<AssertionError>();
|
||||
state.error<AssertionError>("assertion '%1%' failed", out.str()).atPos(pos).withFrame(env, *this).debugThrow();
|
||||
}
|
||||
body->eval(state, env, v);
|
||||
}
|
||||
|
@ -1969,14 +1981,14 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
|||
nf = n;
|
||||
nf += vTmp.fpoint;
|
||||
} else
|
||||
state.error("cannot add %1% to an integer", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow<EvalError>();
|
||||
state.error<EvalError>("cannot add %1% to an integer", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow();
|
||||
} else if (firstType == nFloat) {
|
||||
if (vTmp.type() == nInt) {
|
||||
nf += vTmp.integer;
|
||||
} else if (vTmp.type() == nFloat) {
|
||||
nf += vTmp.fpoint;
|
||||
} else
|
||||
state.error("cannot add %1% to a float", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow<EvalError>();
|
||||
state.error<EvalError>("cannot add %1% to a float", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow();
|
||||
} else {
|
||||
if (s.empty()) s.reserve(es->size());
|
||||
/* skip canonization of first path, which would only be not
|
||||
|
@ -1998,7 +2010,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
|||
v.mkFloat(nf);
|
||||
else if (firstType == nPath) {
|
||||
if (!context.empty())
|
||||
state.error("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow<EvalError>();
|
||||
state.error<EvalError>("a string that refers to a store path cannot be appended to a path").atPos(pos).withFrame(env, *this).debugThrow();
|
||||
v.mkPath(state.rootPath(CanonPath(canonPath(str()))));
|
||||
} else
|
||||
v.mkStringMove(c_str(), context);
|
||||
|
@ -2013,8 +2025,9 @@ void ExprPos::eval(EvalState & state, Env & env, Value & v)
|
|||
|
||||
void ExprBlackHole::eval(EvalState & state, Env & env, Value & v)
|
||||
{
|
||||
state.error("infinite recursion encountered")
|
||||
.debugThrow<InfiniteRecursionError>();
|
||||
state.error<InfiniteRecursionError>("infinite recursion encountered")
|
||||
.atPos(v.determinePos(noPos))
|
||||
.debugThrow();
|
||||
}
|
||||
|
||||
// always force this to be separate, otherwise forceValue may inline it and take
|
||||
|
@ -2028,7 +2041,7 @@ void EvalState::tryFixupBlackHolePos(Value & v, PosIdx pos)
|
|||
try {
|
||||
std::rethrow_exception(e);
|
||||
} catch (InfiniteRecursionError & e) {
|
||||
e.err.errPos = positions[pos];
|
||||
e.atPos(positions[pos]);
|
||||
} catch (...) {
|
||||
}
|
||||
}
|
||||
|
@ -2076,12 +2089,18 @@ NixInt EvalState::forceInt(Value & v, const PosIdx pos, std::string_view errorCt
|
|||
try {
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nInt)
|
||||
error("value is %1% while an integer was expected", showType(v)).debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected an integer but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).atPos(pos).debugThrow();
|
||||
return v.integer;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
throw;
|
||||
}
|
||||
|
||||
return v.integer;
|
||||
}
|
||||
|
||||
|
||||
|
@ -2092,7 +2111,11 @@ NixFloat EvalState::forceFloat(Value & v, const PosIdx pos, std::string_view err
|
|||
if (v.type() == nInt)
|
||||
return v.integer;
|
||||
else if (v.type() != nFloat)
|
||||
error("value is %1% while a float was expected", showType(v)).debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a float but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).atPos(pos).debugThrow();
|
||||
return v.fpoint;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
|
@ -2106,12 +2129,18 @@ bool EvalState::forceBool(Value & v, const PosIdx pos, std::string_view errorCtx
|
|||
try {
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nBool)
|
||||
error("value is %1% while a Boolean was expected", showType(v)).debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a Boolean but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).atPos(pos).debugThrow();
|
||||
return v.boolean;
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
throw;
|
||||
}
|
||||
|
||||
return v.boolean;
|
||||
}
|
||||
|
||||
|
||||
|
@ -2126,7 +2155,11 @@ void EvalState::forceFunction(Value & v, const PosIdx pos, std::string_view erro
|
|||
try {
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nFunction && !isFunctor(v))
|
||||
error("value is %1% while a function was expected", showType(v)).debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a function but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).atPos(pos).debugThrow();
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
throw;
|
||||
|
@ -2139,7 +2172,11 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string
|
|||
try {
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nString)
|
||||
error("value is %1% while a string was expected", showType(v)).debugThrow<TypeError>();
|
||||
error<TypeError>(
|
||||
"expected a string but found %1%: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
).atPos(pos).debugThrow();
|
||||
return v.string_view();
|
||||
} catch (Error & e) {
|
||||
e.addTrace(positions[pos], errorCtx);
|
||||
|
@ -2168,7 +2205,7 @@ std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos, std::s
|
|||
{
|
||||
auto s = forceString(v, pos, errorCtx);
|
||||
if (v.context()) {
|
||||
error("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
error<EvalError>("the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]).withTrace(pos, errorCtx).debugThrow();
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
@ -2233,9 +2270,13 @@ BackedStringView EvalState::coerceToString(
|
|||
return std::move(*maybeString);
|
||||
auto i = v.attrs->find(sOutPath);
|
||||
if (i == v.attrs->end()) {
|
||||
error("cannot coerce %1% to a string", showType(v))
|
||||
error<TypeError>(
|
||||
"cannot coerce %1% to a string: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
)
|
||||
.withTrace(pos, errorCtx)
|
||||
.debugThrow<TypeError>();
|
||||
.debugThrow();
|
||||
}
|
||||
return coerceToString(pos, *i->value, context, errorCtx,
|
||||
coerceMore, copyToStore, canonicalizePath);
|
||||
|
@ -2243,7 +2284,7 @@ BackedStringView EvalState::coerceToString(
|
|||
|
||||
if (v.type() == nExternal) {
|
||||
try {
|
||||
return v.external->coerceToString(positions[pos], context, coerceMore, copyToStore);
|
||||
return v.external->coerceToString(*this, pos, context, coerceMore, copyToStore);
|
||||
} catch (Error & e) {
|
||||
e.addTrace(nullptr, errorCtx);
|
||||
throw;
|
||||
|
@ -2279,23 +2320,26 @@ BackedStringView EvalState::coerceToString(
|
|||
}
|
||||
}
|
||||
|
||||
error("cannot coerce %1% to a string", showType(v))
|
||||
error<TypeError>("cannot coerce %1% to a string: %2%",
|
||||
showType(v),
|
||||
ValuePrinter(*this, v, errorPrintOptions)
|
||||
)
|
||||
.withTrace(pos, errorCtx)
|
||||
.debugThrow<TypeError>();
|
||||
.debugThrow();
|
||||
}
|
||||
|
||||
|
||||
StorePath EvalState::copyPathToStore(NixStringContext & context, const SourcePath & path)
|
||||
{
|
||||
if (nix::isDerivation(path.path.abs()))
|
||||
error("file names are not allowed to end in '%1%'", drvExtension).debugThrow<EvalError>();
|
||||
error<EvalError>("file names are not allowed to end in '%1%'", drvExtension).debugThrow();
|
||||
|
||||
auto i = srcToStore.find(path);
|
||||
|
||||
auto dstPath = i != srcToStore.end()
|
||||
? i->second
|
||||
: [&]() {
|
||||
auto dstPath = fetchToStore(*store, path, path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
|
||||
auto dstPath = fetchToStore(*store, path.resolveSymlinks(), path.baseName(), FileIngestionMethod::Recursive, nullptr, repair);
|
||||
allowPath(dstPath);
|
||||
srcToStore.insert_or_assign(path, dstPath);
|
||||
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, store->printStorePath(dstPath));
|
||||
|
@ -2337,7 +2381,7 @@ SourcePath EvalState::coerceToPath(const PosIdx pos, Value & v, NixStringContext
|
|||
relative to the root filesystem. */
|
||||
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
||||
if (path == "" || path[0] != '/')
|
||||
error("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
error<EvalError>("string '%1%' doesn't represent an absolute path", path).withTrace(pos, errorCtx).debugThrow();
|
||||
return rootPath(CanonPath(path));
|
||||
}
|
||||
|
||||
|
@ -2347,7 +2391,7 @@ StorePath EvalState::coerceToStorePath(const PosIdx pos, Value & v, NixStringCon
|
|||
auto path = coerceToString(pos, v, context, errorCtx, false, false, true).toOwned();
|
||||
if (auto storePath = store->maybeParseStorePath(path))
|
||||
return *storePath;
|
||||
error("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
error<EvalError>("path '%1%' is not in the Nix store", path).withTrace(pos, errorCtx).debugThrow();
|
||||
}
|
||||
|
||||
|
||||
|
@ -2357,18 +2401,18 @@ std::pair<SingleDerivedPath, std::string_view> EvalState::coerceToSingleDerivedP
|
|||
auto s = forceString(v, context, pos, errorCtx);
|
||||
auto csize = context.size();
|
||||
if (csize != 1)
|
||||
error(
|
||||
error<EvalError>(
|
||||
"string '%s' has %d entries in its context. It should only have exactly one entry",
|
||||
s, csize)
|
||||
.withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
.withTrace(pos, errorCtx).debugThrow();
|
||||
auto derivedPath = std::visit(overloaded {
|
||||
[&](NixStringContextElem::Opaque && o) -> SingleDerivedPath {
|
||||
return std::move(o);
|
||||
},
|
||||
[&](NixStringContextElem::DrvDeep &&) -> SingleDerivedPath {
|
||||
error(
|
||||
error<EvalError>(
|
||||
"string '%s' has a context which refers to a complete source and binary closure. This is not supported at this time",
|
||||
s).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
s).withTrace(pos, errorCtx).debugThrow();
|
||||
},
|
||||
[&](NixStringContextElem::Built && b) -> SingleDerivedPath {
|
||||
return std::move(b);
|
||||
|
@ -2391,16 +2435,16 @@ SingleDerivedPath EvalState::coerceToSingleDerivedPath(const PosIdx pos, Value &
|
|||
error message. */
|
||||
std::visit(overloaded {
|
||||
[&](const SingleDerivedPath::Opaque & o) {
|
||||
error(
|
||||
error<EvalError>(
|
||||
"path string '%s' has context with the different path '%s'",
|
||||
s, sExpected)
|
||||
.withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
.withTrace(pos, errorCtx).debugThrow();
|
||||
},
|
||||
[&](const SingleDerivedPath::Built & b) {
|
||||
error(
|
||||
error<EvalError>(
|
||||
"string '%s' has context with the output '%s' from derivation '%s', but the string is not the right placeholder for this derivation output. It should be '%s'",
|
||||
s, b.output, b.drvPath->to_string(*store), sExpected)
|
||||
.withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
.withTrace(pos, errorCtx).debugThrow();
|
||||
}
|
||||
}, derivedPath.raw());
|
||||
}
|
||||
|
@ -2485,7 +2529,7 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v
|
|||
|
||||
case nThunk: // Must not be left by forceValue
|
||||
default:
|
||||
error("cannot compare %1% with %2%", showType(v1), showType(v2)).withTrace(pos, errorCtx).debugThrow<EvalError>();
|
||||
error<EvalError>("cannot compare %1% with %2%", showType(v1), showType(v2)).withTrace(pos, errorCtx).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2636,11 +2680,187 @@ void EvalState::printStatistics()
|
|||
}
|
||||
|
||||
|
||||
std::string ExternalValueBase::coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const
|
||||
SourcePath resolveExprPath(SourcePath path)
|
||||
{
|
||||
throw TypeError({
|
||||
.msg = hintfmt("cannot coerce %1% to a string", showType())
|
||||
});
|
||||
unsigned int followCount = 0, maxFollow = 1024;
|
||||
|
||||
/* If `path' is a symlink, follow it. This is so that relative
|
||||
path references work. */
|
||||
while (!path.path.isRoot()) {
|
||||
// Basic cycle/depth limit to avoid infinite loops.
|
||||
if (++followCount >= maxFollow)
|
||||
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
||||
auto p = path.parent().resolveSymlinks() / path.baseName();
|
||||
if (p.lstat().type != InputAccessor::tSymlink) break;
|
||||
path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))};
|
||||
}
|
||||
|
||||
/* If `path' refers to a directory, append `/default.nix'. */
|
||||
if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory)
|
||||
return path / "default.nix";
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromFile(const SourcePath & path)
|
||||
{
|
||||
return parseExprFromFile(path, staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto buffer = path.resolveSymlinks().readFile();
|
||||
// readFile hopefully have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto s = make_ref<std::string>(std::move(s_));
|
||||
s->append("\0\0", 2);
|
||||
return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath)
|
||||
{
|
||||
return parseExprFromString(std::move(s), basePath, staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseStdin()
|
||||
{
|
||||
//Activity act(*logger, lvlTalkative, "parsing standard input");
|
||||
auto buffer = drainFD(0);
|
||||
// drainFD should have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
auto s = make_ref<std::string>(std::move(buffer));
|
||||
return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath("."), staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
SourcePath EvalState::findFile(const std::string_view path)
|
||||
{
|
||||
return findFile(searchPath, path);
|
||||
}
|
||||
|
||||
|
||||
SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos)
|
||||
{
|
||||
for (auto & i : searchPath.elements) {
|
||||
auto suffixOpt = i.prefix.suffixIfPotentialMatch(path);
|
||||
|
||||
if (!suffixOpt) continue;
|
||||
auto suffix = *suffixOpt;
|
||||
|
||||
auto rOpt = resolveSearchPathPath(i.path);
|
||||
if (!rOpt) continue;
|
||||
auto r = *rOpt;
|
||||
|
||||
Path res = suffix == "" ? r : concatStrings(r, "/", suffix);
|
||||
if (pathExists(res)) return rootPath(CanonPath(canonPath(res)));
|
||||
}
|
||||
|
||||
if (hasPrefix(path, "nix/"))
|
||||
return {corepkgsFS, CanonPath(path.substr(3))};
|
||||
|
||||
error<ThrownError>(
|
||||
evalSettings.pureEval
|
||||
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
|
||||
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
|
||||
path
|
||||
).atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
|
||||
std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl)
|
||||
{
|
||||
auto & value = value0.s;
|
||||
auto i = searchPathResolved.find(value);
|
||||
if (i != searchPathResolved.end()) return i->second;
|
||||
|
||||
std::optional<std::string> res;
|
||||
|
||||
if (EvalSettings::isPseudoUrl(value)) {
|
||||
try {
|
||||
auto storePath = fetchers::downloadTarball(
|
||||
store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath;
|
||||
res = { store->toRealPath(storePath) };
|
||||
} catch (FileTransferError & e) {
|
||||
logWarning({
|
||||
.msg = HintFmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
else if (hasPrefix(value, "flake:")) {
|
||||
experimentalFeatureSettings.require(Xp::Flakes);
|
||||
auto flakeRef = parseFlakeRef(value.substr(6), {}, true, false);
|
||||
debug("fetching flake search path element '%s''", value);
|
||||
auto storePath = flakeRef.resolve(store).fetchTree(store).first;
|
||||
res = { store->toRealPath(storePath) };
|
||||
}
|
||||
|
||||
else {
|
||||
auto path = absPath(value);
|
||||
|
||||
/* Allow access to paths in the search path. */
|
||||
if (initAccessControl) {
|
||||
allowPath(path);
|
||||
if (store->isInStore(path)) {
|
||||
try {
|
||||
StorePathSet closure;
|
||||
store->computeFSClosure(store->toStorePath(path).first, closure);
|
||||
for (auto & p : closure)
|
||||
allowPath(p);
|
||||
} catch (InvalidPath &) { }
|
||||
}
|
||||
}
|
||||
|
||||
if (pathExists(path))
|
||||
res = { path };
|
||||
else {
|
||||
logWarning({
|
||||
.msg = HintFmt("Nix search path entry '%1%' does not exist, ignoring", value)
|
||||
});
|
||||
res = std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
if (res)
|
||||
debug("resolved search path element '%s' to '%s'", value, *res);
|
||||
else
|
||||
debug("failed to resolve search path element '%s'", value);
|
||||
|
||||
searchPathResolved.emplace(value, res);
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parse(
|
||||
char * text,
|
||||
size_t length,
|
||||
Pos::Origin origin,
|
||||
const SourcePath & basePath,
|
||||
std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto result = parseExprFromBuf(text, length, origin, basePath, symbols, positions, rootFS, exprSymbols);
|
||||
|
||||
result->bindVars(*this, staticEnv);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
std::string ExternalValueBase::coerceToString(EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const
|
||||
{
|
||||
state.error<TypeError>(
|
||||
"cannot coerce %1% to a string: %2%", showType(), *this
|
||||
).atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
///@file
|
||||
|
||||
#include "attr-set.hh"
|
||||
#include "eval-error.hh"
|
||||
#include "types.hh"
|
||||
#include "value.hh"
|
||||
#include "nixexpr.hh"
|
||||
|
@ -147,49 +148,10 @@ struct DebugTrace {
|
|||
std::shared_ptr<Pos> pos;
|
||||
const Expr & expr;
|
||||
const Env & env;
|
||||
hintformat hint;
|
||||
HintFmt hint;
|
||||
bool isError;
|
||||
};
|
||||
|
||||
void debugError(Error * e, Env & env, Expr & expr);
|
||||
|
||||
class ErrorBuilder
|
||||
{
|
||||
private:
|
||||
EvalState & state;
|
||||
ErrorInfo info;
|
||||
|
||||
ErrorBuilder(EvalState & s, ErrorInfo && i): state(s), info(i) { }
|
||||
|
||||
public:
|
||||
template<typename... Args>
|
||||
[[nodiscard, gnu::noinline]]
|
||||
static ErrorBuilder * create(EvalState & s, const Args & ... args)
|
||||
{
|
||||
return new ErrorBuilder(s, ErrorInfo { .msg = hintfmt(args...) });
|
||||
}
|
||||
|
||||
[[nodiscard, gnu::noinline]]
|
||||
ErrorBuilder & atPos(PosIdx pos);
|
||||
|
||||
[[nodiscard, gnu::noinline]]
|
||||
ErrorBuilder & withTrace(PosIdx pos, const std::string_view text);
|
||||
|
||||
[[nodiscard, gnu::noinline]]
|
||||
ErrorBuilder & withFrameTrace(PosIdx pos, const std::string_view text);
|
||||
|
||||
[[nodiscard, gnu::noinline]]
|
||||
ErrorBuilder & withSuggestions(Suggestions & s);
|
||||
|
||||
[[nodiscard, gnu::noinline]]
|
||||
ErrorBuilder & withFrame(const Env & e, const Expr & ex);
|
||||
|
||||
template<class ErrorType>
|
||||
[[gnu::noinline, gnu::noreturn]]
|
||||
void debugThrow();
|
||||
};
|
||||
|
||||
|
||||
class EvalState : public std::enable_shared_from_this<EvalState>
|
||||
{
|
||||
public:
|
||||
|
@ -207,6 +169,8 @@ public:
|
|||
sPrefix,
|
||||
sOutputSpecified;
|
||||
|
||||
const Expr::AstSymbols exprSymbols;
|
||||
|
||||
/**
|
||||
* If set, force copying files to the Nix store even if they
|
||||
* already exist there.
|
||||
|
@ -272,39 +236,11 @@ public:
|
|||
|
||||
void runDebugRepl(const Error * error, const Env & env, const Expr & expr);
|
||||
|
||||
template<class E>
|
||||
[[gnu::noinline, gnu::noreturn]]
|
||||
void debugThrowLastTrace(E && error)
|
||||
{
|
||||
debugThrow(error, nullptr, nullptr);
|
||||
}
|
||||
|
||||
template<class E>
|
||||
[[gnu::noinline, gnu::noreturn]]
|
||||
void debugThrow(E && error, const Env * env, const Expr * expr)
|
||||
{
|
||||
if (debugRepl && ((env && expr) || !debugTraces.empty())) {
|
||||
if (!env || !expr) {
|
||||
const DebugTrace & last = debugTraces.front();
|
||||
env = &last.env;
|
||||
expr = &last.expr;
|
||||
}
|
||||
runDebugRepl(&error, *env, *expr);
|
||||
}
|
||||
|
||||
throw std::move(error);
|
||||
}
|
||||
|
||||
// This is dangerous, but gets in line with the idea that error creation and
|
||||
// throwing should not allocate on the stack of hot functions.
|
||||
// as long as errors are immediately thrown, it works.
|
||||
ErrorBuilder * errorBuilder;
|
||||
|
||||
template<typename... Args>
|
||||
template<class T, typename... Args>
|
||||
[[nodiscard, gnu::noinline]]
|
||||
ErrorBuilder & error(const Args & ... args) {
|
||||
errorBuilder = ErrorBuilder::create(*this, args...);
|
||||
return *errorBuilder;
|
||||
EvalErrorBuilder<T> & error(const Args & ... args) {
|
||||
// `EvalErrorBuilder::debugThrow` performs the corresponding `delete`.
|
||||
return *new EvalErrorBuilder<T>(*this, args...);
|
||||
}
|
||||
|
||||
private:
|
||||
|
@ -370,6 +306,11 @@ public:
|
|||
*/
|
||||
SourcePath rootPath(CanonPath path);
|
||||
|
||||
/**
|
||||
* Variant which accepts relative paths too.
|
||||
*/
|
||||
SourcePath rootPath(PathView path);
|
||||
|
||||
/**
|
||||
* Allow access to a path.
|
||||
*/
|
||||
|
@ -843,22 +784,6 @@ SourcePath resolveExprPath(SourcePath path);
|
|||
*/
|
||||
bool isAllowedURI(std::string_view uri, const Strings & allowedPaths);
|
||||
|
||||
struct InvalidPathError : EvalError
|
||||
{
|
||||
Path path;
|
||||
InvalidPathError(const Path & path);
|
||||
#ifdef EXCEPTION_NEEDS_THROW_SPEC
|
||||
~InvalidPathError() throw () { };
|
||||
#endif
|
||||
};
|
||||
|
||||
template<class ErrorType>
|
||||
void ErrorBuilder::debugThrow()
|
||||
{
|
||||
// NOTE: We always use the -LastTrace version as we push the new trace in withFrame()
|
||||
state.debugThrowLastTrace(ErrorType(info));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#include "eval-inline.hh"
|
||||
|
|
|
@ -147,15 +147,15 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
NixStringContext emptyContext = {};
|
||||
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, emptyContext).dump());
|
||||
} else
|
||||
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
|
||||
state.symbols[attr.name], showType(*attr.value));
|
||||
state.error<TypeError>("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
|
||||
state.symbols[attr.name], showType(*attr.value)).debugThrow();
|
||||
}
|
||||
#pragma GCC diagnostic pop
|
||||
}
|
||||
} catch (Error & e) {
|
||||
e.addTrace(
|
||||
state.positions[attr.pos],
|
||||
hintfmt("while evaluating flake attribute '%s'", state.symbols[attr.name]));
|
||||
HintFmt("while evaluating flake attribute '%s'", state.symbols[attr.name]));
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
@ -164,7 +164,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
try {
|
||||
input.ref = FlakeRef::fromAttrs(attrs);
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[pos], hintfmt("while evaluating flake input"));
|
||||
e.addTrace(state.positions[pos], HintFmt("while evaluating flake input"));
|
||||
throw;
|
||||
}
|
||||
else {
|
||||
|
@ -295,15 +295,15 @@ static Flake getFlake(
|
|||
std::vector<std::string> ss;
|
||||
for (auto elem : setting.value->listItems()) {
|
||||
if (elem->type() != nString)
|
||||
throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected",
|
||||
state.symbols[setting.name], showType(*setting.value));
|
||||
state.error<TypeError>("list element in flake configuration setting '%s' is %s while a string is expected",
|
||||
state.symbols[setting.name], showType(*setting.value)).debugThrow();
|
||||
ss.emplace_back(state.forceStringNoCtx(*elem, setting.pos, ""));
|
||||
}
|
||||
flake.config.settings.emplace(state.symbols[setting.name], ss);
|
||||
}
|
||||
else
|
||||
throw TypeError("flake configuration setting '%s' is %s",
|
||||
state.symbols[setting.name], showType(*setting.value));
|
||||
state.error<TypeError>("flake configuration setting '%s' is %s",
|
||||
state.symbols[setting.name], showType(*setting.value)).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -865,11 +865,11 @@ static void prim_flakeRefToString(
|
|||
attrs.emplace(state.symbols[attr.name],
|
||||
std::string(attr.value->string_view()));
|
||||
} else {
|
||||
state.error(
|
||||
state.error<EvalError>(
|
||||
"flake reference attribute sets may only contain integers, Booleans, "
|
||||
"and strings, but attribute '%s' is %s",
|
||||
state.symbols[attr.name],
|
||||
showType(*attr.value)).debugThrow<EvalError>();
|
||||
showType(*attr.value)).debugThrow();
|
||||
}
|
||||
}
|
||||
auto flakeRef = FlakeRef::fromAttrs(attrs);
|
||||
|
|
|
@ -49,7 +49,7 @@ std::string PackageInfo::queryName() const
|
|||
{
|
||||
if (name == "" && attrs) {
|
||||
auto i = attrs->find(state->sName);
|
||||
if (i == attrs->end()) throw TypeError("derivation name missing");
|
||||
if (i == attrs->end()) state->error<TypeError>("derivation name missing").debugThrow();
|
||||
name = state->forceStringNoCtx(*i->value, noPos, "while evaluating the 'name' attribute of a derivation");
|
||||
}
|
||||
return name;
|
||||
|
@ -396,7 +396,8 @@ static void getDerivations(EvalState & state, Value & vIn,
|
|||
}
|
||||
}
|
||||
|
||||
else throw TypeError("expression does not evaluate to a derivation (or a set or list of those)");
|
||||
else
|
||||
state.error<TypeError>("expression does not evaluate to a derivation (or a set or list of those)").debugThrow();
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
#include "json-to-value.hh"
|
||||
#include "value.hh"
|
||||
#include "eval.hh"
|
||||
|
||||
#include <variant>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
@ -159,7 +161,7 @@ public:
|
|||
}
|
||||
|
||||
bool parse_error(std::size_t, const std::string&, const nlohmann::detail::exception& ex) {
|
||||
throw JSONParseError(ex.what());
|
||||
throw JSONParseError("%s", ex.what());
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -1,13 +1,16 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "eval.hh"
|
||||
#include "error.hh"
|
||||
|
||||
#include <string>
|
||||
|
||||
namespace nix {
|
||||
|
||||
MakeError(JSONParseError, EvalError);
|
||||
class EvalState;
|
||||
struct Value;
|
||||
|
||||
MakeError(JSONParseError, Error);
|
||||
|
||||
void parseJSON(EvalState & state, const std::string_view & s, Value & v);
|
||||
|
||||
|
|
|
@ -29,12 +29,7 @@ using namespace nix;
|
|||
|
||||
namespace nix {
|
||||
|
||||
static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data)
|
||||
{
|
||||
return data->state.positions.add(data->origin, loc.first_line, loc.first_column);
|
||||
}
|
||||
|
||||
#define CUR_POS makeCurPos(*yylloc, data)
|
||||
#define CUR_POS state->at(*yylloc)
|
||||
|
||||
static void initLoc(YYLTYPE * loc)
|
||||
{
|
||||
|
@ -151,9 +146,9 @@ or { return OR_KW; }
|
|||
try {
|
||||
yylval->n = boost::lexical_cast<int64_t>(yytext);
|
||||
} catch (const boost::bad_lexical_cast &) {
|
||||
throw ParseError({
|
||||
.msg = hintfmt("invalid integer '%1%'", yytext),
|
||||
.errPos = data->state.positions[CUR_POS],
|
||||
throw ParseError(ErrorInfo{
|
||||
.msg = HintFmt("invalid integer '%1%'", yytext),
|
||||
.pos = state->positions[CUR_POS],
|
||||
});
|
||||
}
|
||||
return INT_LIT;
|
||||
|
@ -161,9 +156,9 @@ or { return OR_KW; }
|
|||
{FLOAT} { errno = 0;
|
||||
yylval->nf = strtod(yytext, 0);
|
||||
if (errno != 0)
|
||||
throw ParseError({
|
||||
.msg = hintfmt("invalid float '%1%'", yytext),
|
||||
.errPos = data->state.positions[CUR_POS],
|
||||
throw ParseError(ErrorInfo{
|
||||
.msg = HintFmt("invalid float '%1%'", yytext),
|
||||
.pos = state->positions[CUR_POS],
|
||||
});
|
||||
return FLOAT_LIT;
|
||||
}
|
||||
|
@ -186,7 +181,7 @@ or { return OR_KW; }
|
|||
/* It is impossible to match strings ending with '$' with one
|
||||
regex because trailing contexts are only valid at the end
|
||||
of a rule. (A sane but undocumented limitation.) */
|
||||
yylval->str = unescapeStr(data->symbols, yytext, yyleng);
|
||||
yylval->str = unescapeStr(state->symbols, yytext, yyleng);
|
||||
return STR;
|
||||
}
|
||||
<STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
||||
|
@ -214,7 +209,7 @@ or { return OR_KW; }
|
|||
return IND_STR;
|
||||
}
|
||||
<IND_STRING>\'\'\\{ANY} {
|
||||
yylval->str = unescapeStr(data->symbols, yytext + 2, yyleng - 2);
|
||||
yylval->str = unescapeStr(state->symbols, yytext + 2, yyleng - 2);
|
||||
return IND_STR;
|
||||
}
|
||||
<IND_STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
||||
|
@ -290,9 +285,9 @@ or { return OR_KW; }
|
|||
|
||||
<INPATH_SLASH>{ANY} |
|
||||
<INPATH_SLASH><<EOF>> {
|
||||
throw ParseError({
|
||||
.msg = hintfmt("path has a trailing slash"),
|
||||
.errPos = data->state.positions[CUR_POS],
|
||||
throw ParseError(ErrorInfo{
|
||||
.msg = HintFmt("path has a trailing slash"),
|
||||
.pos = state->positions[CUR_POS],
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -9,6 +9,8 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
unsigned long Expr::nrExprs = 0;
|
||||
|
||||
ExprBlackHole eBlackHole;
|
||||
|
||||
// FIXME: remove, because *symbols* are abstract and do not have a single
|
||||
|
@ -294,10 +296,10 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
|||
enclosing `with'. If there is no `with', then we can issue an
|
||||
"undefined variable" error now. */
|
||||
if (withLevel == -1)
|
||||
throw UndefinedVarError({
|
||||
.msg = hintfmt("undefined variable '%1%'", es.symbols[name]),
|
||||
.errPos = es.positions[pos]
|
||||
});
|
||||
es.error<UndefinedVarError>(
|
||||
"undefined variable '%1%'",
|
||||
es.symbols[name]
|
||||
).atPos(pos).debugThrow();
|
||||
for (auto * e = env.get(); e && !fromWith; e = e->up)
|
||||
fromWith = e->isWith;
|
||||
this->level = withLevel;
|
||||
|
@ -407,9 +409,6 @@ void ExprCall::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
|||
|
||||
void ExprLet::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
||||
{
|
||||
if (es.debugRepl)
|
||||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
|
||||
auto newEnv = std::make_shared<StaticEnv>(nullptr, env.get(), attrs->attrs.size());
|
||||
|
||||
Displacement displ = 0;
|
||||
|
@ -421,6 +420,9 @@ void ExprLet::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
|||
for (auto & i : attrs->attrs)
|
||||
i.second.e->bindVars(es, i.second.inherited ? env : newEnv);
|
||||
|
||||
if (es.debugRepl)
|
||||
es.exprEnvs.insert(std::make_pair(this, newEnv));
|
||||
|
||||
body->bindVars(es, newEnv);
|
||||
}
|
||||
|
||||
|
@ -445,9 +447,6 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
|
|||
break;
|
||||
}
|
||||
|
||||
if (es.debugRepl)
|
||||
es.exprEnvs.insert(std::make_pair(this, env));
|
||||
|
||||
attrs->bindVars(es, env);
|
||||
auto newEnv = std::make_shared<StaticEnv>(this, env.get());
|
||||
body->bindVars(es, newEnv);
|
||||
|
|
|
@ -9,110 +9,13 @@
|
|||
#include "error.hh"
|
||||
#include "chunked-vector.hh"
|
||||
#include "position.hh"
|
||||
#include "eval-error.hh"
|
||||
#include "pos-idx.hh"
|
||||
#include "pos-table.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
MakeError(EvalError, Error);
|
||||
MakeError(ParseError, Error);
|
||||
MakeError(AssertionError, EvalError);
|
||||
MakeError(ThrownError, AssertionError);
|
||||
MakeError(Abort, EvalError);
|
||||
MakeError(TypeError, EvalError);
|
||||
MakeError(UndefinedVarError, Error);
|
||||
MakeError(MissingArgumentError, EvalError);
|
||||
|
||||
class InfiniteRecursionError : public EvalError
|
||||
{
|
||||
friend class EvalState;
|
||||
public:
|
||||
using EvalError::EvalError;
|
||||
};
|
||||
|
||||
class PosIdx {
|
||||
friend class PosTable;
|
||||
|
||||
private:
|
||||
uint32_t id;
|
||||
|
||||
explicit PosIdx(uint32_t id): id(id) {}
|
||||
|
||||
public:
|
||||
PosIdx() : id(0) {}
|
||||
|
||||
explicit operator bool() const { return id > 0; }
|
||||
|
||||
bool operator <(const PosIdx other) const { return id < other.id; }
|
||||
|
||||
bool operator ==(const PosIdx other) const { return id == other.id; }
|
||||
|
||||
bool operator !=(const PosIdx other) const { return id != other.id; }
|
||||
};
|
||||
|
||||
class PosTable
|
||||
{
|
||||
public:
|
||||
class Origin {
|
||||
friend PosTable;
|
||||
private:
|
||||
// must always be invalid by default, add() replaces this with the actual value.
|
||||
// subsequent add() calls use this index as a token to quickly check whether the
|
||||
// current origins.back() can be reused or not.
|
||||
mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
|
||||
|
||||
// Used for searching in PosTable::[].
|
||||
explicit Origin(uint32_t idx): idx(idx), origin{std::monostate()} {}
|
||||
|
||||
public:
|
||||
const Pos::Origin origin;
|
||||
|
||||
Origin(Pos::Origin origin): origin(origin) {}
|
||||
};
|
||||
|
||||
struct Offset {
|
||||
uint32_t line, column;
|
||||
};
|
||||
|
||||
private:
|
||||
std::vector<Origin> origins;
|
||||
ChunkedVector<Offset, 8192> offsets;
|
||||
|
||||
public:
|
||||
PosTable(): offsets(1024)
|
||||
{
|
||||
origins.reserve(1024);
|
||||
}
|
||||
|
||||
PosIdx add(const Origin & origin, uint32_t line, uint32_t column)
|
||||
{
|
||||
const auto idx = offsets.add({line, column}).second;
|
||||
if (origins.empty() || origins.back().idx != origin.idx) {
|
||||
origin.idx = idx;
|
||||
origins.push_back(origin);
|
||||
}
|
||||
return PosIdx(idx + 1);
|
||||
}
|
||||
|
||||
Pos operator[](PosIdx p) const
|
||||
{
|
||||
if (p.id == 0 || p.id > offsets.size())
|
||||
return {};
|
||||
const auto idx = p.id - 1;
|
||||
/* we want the last key <= idx, so we'll take prev(first key > idx).
|
||||
this is guaranteed to never rewind origin.begin because the first
|
||||
key is always 0. */
|
||||
const auto pastOrigin = std::upper_bound(
|
||||
origins.begin(), origins.end(), Origin(idx),
|
||||
[] (const auto & a, const auto & b) { return a.idx < b.idx; });
|
||||
const auto origin = *std::prev(pastOrigin);
|
||||
const auto offset = offsets[idx];
|
||||
return {offset.line, offset.column, origin.origin};
|
||||
}
|
||||
};
|
||||
|
||||
inline PosIdx noPos = {};
|
||||
|
||||
|
||||
struct Env;
|
||||
struct Value;
|
||||
class EvalState;
|
||||
|
@ -140,6 +43,11 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
|
|||
|
||||
struct Expr
|
||||
{
|
||||
struct AstSymbols {
|
||||
Symbol sub, lessThan, mul, div, or_, findFile, nixPath, body;
|
||||
};
|
||||
|
||||
|
||||
static unsigned long nrExprs;
|
||||
Expr() {
|
||||
nrExprs++;
|
||||
|
|
271
src/libexpr/parser-state.hh
Normal file
271
src/libexpr/parser-state.hh
Normal file
|
@ -0,0 +1,271 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "eval.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* @note Storing a C-style `char *` and `size_t` allows us to avoid
|
||||
* having to define the special members that using string_view here
|
||||
* would implicitly delete.
|
||||
*/
|
||||
struct StringToken
|
||||
{
|
||||
const char * p;
|
||||
size_t l;
|
||||
bool hasIndentation;
|
||||
operator std::string_view() const { return {p, l}; }
|
||||
};
|
||||
|
||||
struct ParserLocation
|
||||
{
|
||||
int first_line, first_column;
|
||||
int last_line, last_column;
|
||||
|
||||
// backup to recover from yyless(0)
|
||||
int stashed_first_line, stashed_first_column;
|
||||
int stashed_last_line, stashed_last_column;
|
||||
|
||||
void stash() {
|
||||
stashed_first_line = first_line;
|
||||
stashed_first_column = first_column;
|
||||
stashed_last_line = last_line;
|
||||
stashed_last_column = last_column;
|
||||
}
|
||||
|
||||
void unstash() {
|
||||
first_line = stashed_first_line;
|
||||
first_column = stashed_first_column;
|
||||
last_line = stashed_last_line;
|
||||
last_column = stashed_last_column;
|
||||
}
|
||||
};
|
||||
|
||||
struct ParserState
|
||||
{
|
||||
SymbolTable & symbols;
|
||||
PosTable & positions;
|
||||
Expr * result;
|
||||
SourcePath basePath;
|
||||
PosTable::Origin origin;
|
||||
const ref<InputAccessor> rootFS;
|
||||
const Expr::AstSymbols & s;
|
||||
|
||||
void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos);
|
||||
void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos);
|
||||
void addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos);
|
||||
Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {});
|
||||
Expr * stripIndentation(const PosIdx pos,
|
||||
std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> && es);
|
||||
PosIdx at(const ParserLocation & loc);
|
||||
};
|
||||
|
||||
inline void ParserState::dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos)
|
||||
{
|
||||
throw ParseError({
|
||||
.msg = HintFmt("attribute '%1%' already defined at %2%",
|
||||
showAttrPath(symbols, attrPath), positions[prevPos]),
|
||||
.pos = positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos)
|
||||
{
|
||||
throw ParseError({
|
||||
.msg = HintFmt("attribute '%1%' already defined at %2%", symbols[attr], positions[prevPos]),
|
||||
.pos = positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos)
|
||||
{
|
||||
AttrPath::iterator i;
|
||||
// All attrpaths have at least one attr
|
||||
assert(!attrPath.empty());
|
||||
// Checking attrPath validity.
|
||||
// ===========================
|
||||
for (i = attrPath.begin(); i + 1 < attrPath.end(); i++) {
|
||||
if (i->symbol) {
|
||||
ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
|
||||
if (j != attrs->attrs.end()) {
|
||||
if (!j->second.inherited) {
|
||||
ExprAttrs * attrs2 = dynamic_cast<ExprAttrs *>(j->second.e);
|
||||
if (!attrs2) dupAttr(attrPath, pos, j->second.pos);
|
||||
attrs = attrs2;
|
||||
} else
|
||||
dupAttr(attrPath, pos, j->second.pos);
|
||||
} else {
|
||||
ExprAttrs * nested = new ExprAttrs;
|
||||
attrs->attrs[i->symbol] = ExprAttrs::AttrDef(nested, pos);
|
||||
attrs = nested;
|
||||
}
|
||||
} else {
|
||||
ExprAttrs *nested = new ExprAttrs;
|
||||
attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, nested, pos));
|
||||
attrs = nested;
|
||||
}
|
||||
}
|
||||
// Expr insertion.
|
||||
// ==========================
|
||||
if (i->symbol) {
|
||||
ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
|
||||
if (j != attrs->attrs.end()) {
|
||||
// This attr path is already defined. However, if both
|
||||
// e and the expr pointed by the attr path are two attribute sets,
|
||||
// we want to merge them.
|
||||
// Otherwise, throw an error.
|
||||
auto ae = dynamic_cast<ExprAttrs *>(e);
|
||||
auto jAttrs = dynamic_cast<ExprAttrs *>(j->second.e);
|
||||
if (jAttrs && ae) {
|
||||
for (auto & ad : ae->attrs) {
|
||||
auto j2 = jAttrs->attrs.find(ad.first);
|
||||
if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error.
|
||||
dupAttr(ad.first, j2->second.pos, ad.second.pos);
|
||||
jAttrs->attrs.emplace(ad.first, ad.second);
|
||||
}
|
||||
jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end());
|
||||
} else {
|
||||
dupAttr(attrPath, pos, j->second.pos);
|
||||
}
|
||||
} else {
|
||||
// This attr path is not defined. Let's create it.
|
||||
attrs->attrs.emplace(i->symbol, ExprAttrs::AttrDef(e, pos));
|
||||
e->setName(i->symbol);
|
||||
}
|
||||
} else {
|
||||
attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, e, pos));
|
||||
}
|
||||
}
|
||||
|
||||
inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Symbol arg)
|
||||
{
|
||||
std::sort(formals->formals.begin(), formals->formals.end(),
|
||||
[] (const auto & a, const auto & b) {
|
||||
return std::tie(a.name, a.pos) < std::tie(b.name, b.pos);
|
||||
});
|
||||
|
||||
std::optional<std::pair<Symbol, PosIdx>> duplicate;
|
||||
for (size_t i = 0; i + 1 < formals->formals.size(); i++) {
|
||||
if (formals->formals[i].name != formals->formals[i + 1].name)
|
||||
continue;
|
||||
std::pair thisDup{formals->formals[i].name, formals->formals[i + 1].pos};
|
||||
duplicate = std::min(thisDup, duplicate.value_or(thisDup));
|
||||
}
|
||||
if (duplicate)
|
||||
throw ParseError({
|
||||
.msg = HintFmt("duplicate formal function argument '%1%'", symbols[duplicate->first]),
|
||||
.pos = positions[duplicate->second]
|
||||
});
|
||||
|
||||
if (arg && formals->has(arg))
|
||||
throw ParseError({
|
||||
.msg = HintFmt("duplicate formal function argument '%1%'", symbols[arg]),
|
||||
.pos = positions[pos]
|
||||
});
|
||||
|
||||
return formals;
|
||||
}
|
||||
|
||||
inline Expr * ParserState::stripIndentation(const PosIdx pos,
|
||||
std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> && es)
|
||||
{
|
||||
if (es.empty()) return new ExprString("");
|
||||
|
||||
/* Figure out the minimum indentation. Note that by design
|
||||
whitespace-only final lines are not taken into account. (So
|
||||
the " " in "\n ''" is ignored, but the " " in "\n foo''" is.) */
|
||||
bool atStartOfLine = true; /* = seen only whitespace in the current line */
|
||||
size_t minIndent = 1000000;
|
||||
size_t curIndent = 0;
|
||||
for (auto & [i_pos, i] : es) {
|
||||
auto * str = std::get_if<StringToken>(&i);
|
||||
if (!str || !str->hasIndentation) {
|
||||
/* Anti-quotations and escaped characters end the current start-of-line whitespace. */
|
||||
if (atStartOfLine) {
|
||||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
for (size_t j = 0; j < str->l; ++j) {
|
||||
if (atStartOfLine) {
|
||||
if (str->p[j] == ' ')
|
||||
curIndent++;
|
||||
else if (str->p[j] == '\n') {
|
||||
/* Empty line, doesn't influence minimum
|
||||
indentation. */
|
||||
curIndent = 0;
|
||||
} else {
|
||||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
}
|
||||
} else if (str->p[j] == '\n') {
|
||||
atStartOfLine = true;
|
||||
curIndent = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Strip spaces from each line. */
|
||||
auto * es2 = new std::vector<std::pair<PosIdx, Expr *>>;
|
||||
atStartOfLine = true;
|
||||
size_t curDropped = 0;
|
||||
size_t n = es.size();
|
||||
auto i = es.begin();
|
||||
const auto trimExpr = [&] (Expr * e) {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
es2->emplace_back(i->first, e);
|
||||
};
|
||||
const auto trimString = [&] (const StringToken & t) {
|
||||
std::string s2;
|
||||
for (size_t j = 0; j < t.l; ++j) {
|
||||
if (atStartOfLine) {
|
||||
if (t.p[j] == ' ') {
|
||||
if (curDropped++ >= minIndent)
|
||||
s2 += t.p[j];
|
||||
}
|
||||
else if (t.p[j] == '\n') {
|
||||
curDropped = 0;
|
||||
s2 += t.p[j];
|
||||
} else {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
s2 += t.p[j];
|
||||
}
|
||||
} else {
|
||||
s2 += t.p[j];
|
||||
if (t.p[j] == '\n') atStartOfLine = true;
|
||||
}
|
||||
}
|
||||
|
||||
/* Remove the last line if it is empty and consists only of
|
||||
spaces. */
|
||||
if (n == 1) {
|
||||
std::string::size_type p = s2.find_last_of('\n');
|
||||
if (p != std::string::npos && s2.find_first_not_of(' ', p + 1) == std::string::npos)
|
||||
s2 = std::string(s2, 0, p + 1);
|
||||
}
|
||||
|
||||
es2->emplace_back(i->first, new ExprString(std::move(s2)));
|
||||
};
|
||||
for (; i != es.end(); ++i, --n) {
|
||||
std::visit(overloaded { trimExpr, trimString }, i->second);
|
||||
}
|
||||
|
||||
/* If this is a single string, then don't do a concatenation. */
|
||||
if (es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0].second)) {
|
||||
auto *const result = (*es2)[0].second;
|
||||
delete es2;
|
||||
return result;
|
||||
}
|
||||
return new ExprConcatStrings(pos, true, es2);
|
||||
}
|
||||
|
||||
inline PosIdx ParserState::at(const ParserLocation & loc)
|
||||
{
|
||||
return positions.add(origin, loc.first_line, loc.first_column);
|
||||
}
|
||||
|
||||
}
|
|
@ -5,9 +5,9 @@
|
|||
%defines
|
||||
/* %no-lines */
|
||||
%parse-param { void * scanner }
|
||||
%parse-param { nix::ParseData * data }
|
||||
%parse-param { nix::ParserState * state }
|
||||
%lex-param { void * scanner }
|
||||
%lex-param { nix::ParseData * data }
|
||||
%lex-param { nix::ParserState * state }
|
||||
%expect 1
|
||||
%expect-rr 1
|
||||
|
||||
|
@ -18,6 +18,7 @@
|
|||
|
||||
#include <variant>
|
||||
|
||||
#include "finally.hh"
|
||||
#include "util.hh"
|
||||
#include "users.hh"
|
||||
|
||||
|
@ -25,63 +26,26 @@
|
|||
#include "eval.hh"
|
||||
#include "eval-settings.hh"
|
||||
#include "globals.hh"
|
||||
#include "parser-state.hh"
|
||||
|
||||
#define YYLTYPE ::nix::ParserLocation
|
||||
#define YY_DECL int yylex \
|
||||
(YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParserState * state)
|
||||
|
||||
namespace nix {
|
||||
|
||||
#define YYLTYPE ::nix::ParserLocation
|
||||
struct ParserLocation
|
||||
{
|
||||
int first_line, first_column;
|
||||
int last_line, last_column;
|
||||
|
||||
// backup to recover from yyless(0)
|
||||
int stashed_first_line, stashed_first_column;
|
||||
int stashed_last_line, stashed_last_column;
|
||||
|
||||
void stash() {
|
||||
stashed_first_line = first_line;
|
||||
stashed_first_column = first_column;
|
||||
stashed_last_line = last_line;
|
||||
stashed_last_column = last_column;
|
||||
}
|
||||
|
||||
void unstash() {
|
||||
first_line = stashed_first_line;
|
||||
first_column = stashed_first_column;
|
||||
last_line = stashed_last_line;
|
||||
last_column = stashed_last_column;
|
||||
}
|
||||
};
|
||||
|
||||
struct ParseData
|
||||
{
|
||||
EvalState & state;
|
||||
SymbolTable & symbols;
|
||||
Expr * result;
|
||||
SourcePath basePath;
|
||||
PosTable::Origin origin;
|
||||
std::optional<ErrorInfo> error;
|
||||
};
|
||||
|
||||
struct ParserFormals {
|
||||
std::vector<Formal> formals;
|
||||
bool ellipsis = false;
|
||||
};
|
||||
Expr * parseExprFromBuf(
|
||||
char * text,
|
||||
size_t length,
|
||||
Pos::Origin origin,
|
||||
const SourcePath & basePath,
|
||||
SymbolTable & symbols,
|
||||
PosTable & positions,
|
||||
const ref<InputAccessor> rootFS,
|
||||
const Expr::AstSymbols & astSymbols);
|
||||
|
||||
}
|
||||
|
||||
// using C a struct allows us to avoid having to define the special
|
||||
// members that using string_view here would implicitly delete.
|
||||
struct StringToken {
|
||||
const char * p;
|
||||
size_t l;
|
||||
bool hasIndentation;
|
||||
operator std::string_view() const { return {p, l}; }
|
||||
};
|
||||
|
||||
#define YY_DECL int yylex \
|
||||
(YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParseData * data)
|
||||
|
||||
#endif
|
||||
|
||||
}
|
||||
|
@ -95,242 +59,17 @@ YY_DECL;
|
|||
|
||||
using namespace nix;
|
||||
|
||||
|
||||
namespace nix {
|
||||
#define CUR_POS state->at(*yylocp)
|
||||
|
||||
|
||||
static void dupAttr(const EvalState & state, const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos)
|
||||
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error)
|
||||
{
|
||||
throw ParseError({
|
||||
.msg = hintfmt("attribute '%1%' already defined at %2%",
|
||||
showAttrPath(state.symbols, attrPath), state.positions[prevPos]),
|
||||
.errPos = state.positions[pos]
|
||||
.msg = HintFmt(error),
|
||||
.pos = state->positions[state->at(*loc)]
|
||||
});
|
||||
}
|
||||
|
||||
static void dupAttr(const EvalState & state, Symbol attr, const PosIdx pos, const PosIdx prevPos)
|
||||
{
|
||||
throw ParseError({
|
||||
.msg = hintfmt("attribute '%1%' already defined at %2%", state.symbols[attr], state.positions[prevPos]),
|
||||
.errPos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
static void addAttr(ExprAttrs * attrs, AttrPath && attrPath,
|
||||
Expr * e, const PosIdx pos, const nix::EvalState & state)
|
||||
{
|
||||
AttrPath::iterator i;
|
||||
// All attrpaths have at least one attr
|
||||
assert(!attrPath.empty());
|
||||
// Checking attrPath validity.
|
||||
// ===========================
|
||||
for (i = attrPath.begin(); i + 1 < attrPath.end(); i++) {
|
||||
if (i->symbol) {
|
||||
ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
|
||||
if (j != attrs->attrs.end()) {
|
||||
if (!j->second.inherited) {
|
||||
ExprAttrs * attrs2 = dynamic_cast<ExprAttrs *>(j->second.e);
|
||||
if (!attrs2) dupAttr(state, attrPath, pos, j->second.pos);
|
||||
attrs = attrs2;
|
||||
} else
|
||||
dupAttr(state, attrPath, pos, j->second.pos);
|
||||
} else {
|
||||
ExprAttrs * nested = new ExprAttrs;
|
||||
attrs->attrs[i->symbol] = ExprAttrs::AttrDef(nested, pos);
|
||||
attrs = nested;
|
||||
}
|
||||
} else {
|
||||
ExprAttrs *nested = new ExprAttrs;
|
||||
attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, nested, pos));
|
||||
attrs = nested;
|
||||
}
|
||||
}
|
||||
// Expr insertion.
|
||||
// ==========================
|
||||
if (i->symbol) {
|
||||
ExprAttrs::AttrDefs::iterator j = attrs->attrs.find(i->symbol);
|
||||
if (j != attrs->attrs.end()) {
|
||||
// This attr path is already defined. However, if both
|
||||
// e and the expr pointed by the attr path are two attribute sets,
|
||||
// we want to merge them.
|
||||
// Otherwise, throw an error.
|
||||
auto ae = dynamic_cast<ExprAttrs *>(e);
|
||||
auto jAttrs = dynamic_cast<ExprAttrs *>(j->second.e);
|
||||
if (jAttrs && ae) {
|
||||
for (auto & ad : ae->attrs) {
|
||||
auto j2 = jAttrs->attrs.find(ad.first);
|
||||
if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error.
|
||||
dupAttr(state, ad.first, j2->second.pos, ad.second.pos);
|
||||
jAttrs->attrs.emplace(ad.first, ad.second);
|
||||
}
|
||||
jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end());
|
||||
} else {
|
||||
dupAttr(state, attrPath, pos, j->second.pos);
|
||||
}
|
||||
} else {
|
||||
// This attr path is not defined. Let's create it.
|
||||
attrs->attrs.emplace(i->symbol, ExprAttrs::AttrDef(e, pos));
|
||||
e->setName(i->symbol);
|
||||
}
|
||||
} else {
|
||||
attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, e, pos));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static Formals * toFormals(ParseData & data, ParserFormals * formals,
|
||||
PosIdx pos = noPos, Symbol arg = {})
|
||||
{
|
||||
std::sort(formals->formals.begin(), formals->formals.end(),
|
||||
[] (const auto & a, const auto & b) {
|
||||
return std::tie(a.name, a.pos) < std::tie(b.name, b.pos);
|
||||
});
|
||||
|
||||
std::optional<std::pair<Symbol, PosIdx>> duplicate;
|
||||
for (size_t i = 0; i + 1 < formals->formals.size(); i++) {
|
||||
if (formals->formals[i].name != formals->formals[i + 1].name)
|
||||
continue;
|
||||
std::pair thisDup{formals->formals[i].name, formals->formals[i + 1].pos};
|
||||
duplicate = std::min(thisDup, duplicate.value_or(thisDup));
|
||||
}
|
||||
if (duplicate)
|
||||
throw ParseError({
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'", data.symbols[duplicate->first]),
|
||||
.errPos = data.state.positions[duplicate->second]
|
||||
});
|
||||
|
||||
Formals result;
|
||||
result.ellipsis = formals->ellipsis;
|
||||
result.formals = std::move(formals->formals);
|
||||
|
||||
if (arg && result.has(arg))
|
||||
throw ParseError({
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'", data.symbols[arg]),
|
||||
.errPos = data.state.positions[pos]
|
||||
});
|
||||
|
||||
delete formals;
|
||||
return new Formals(std::move(result));
|
||||
}
|
||||
|
||||
|
||||
static Expr * stripIndentation(const PosIdx pos, SymbolTable & symbols,
|
||||
std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> && es)
|
||||
{
|
||||
if (es.empty()) return new ExprString("");
|
||||
|
||||
/* Figure out the minimum indentation. Note that by design
|
||||
whitespace-only final lines are not taken into account. (So
|
||||
the " " in "\n ''" is ignored, but the " " in "\n foo''" is.) */
|
||||
bool atStartOfLine = true; /* = seen only whitespace in the current line */
|
||||
size_t minIndent = 1000000;
|
||||
size_t curIndent = 0;
|
||||
for (auto & [i_pos, i] : es) {
|
||||
auto * str = std::get_if<StringToken>(&i);
|
||||
if (!str || !str->hasIndentation) {
|
||||
/* Anti-quotations and escaped characters end the current start-of-line whitespace. */
|
||||
if (atStartOfLine) {
|
||||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
for (size_t j = 0; j < str->l; ++j) {
|
||||
if (atStartOfLine) {
|
||||
if (str->p[j] == ' ')
|
||||
curIndent++;
|
||||
else if (str->p[j] == '\n') {
|
||||
/* Empty line, doesn't influence minimum
|
||||
indentation. */
|
||||
curIndent = 0;
|
||||
} else {
|
||||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
}
|
||||
} else if (str->p[j] == '\n') {
|
||||
atStartOfLine = true;
|
||||
curIndent = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Strip spaces from each line. */
|
||||
auto * es2 = new std::vector<std::pair<PosIdx, Expr *>>;
|
||||
atStartOfLine = true;
|
||||
size_t curDropped = 0;
|
||||
size_t n = es.size();
|
||||
auto i = es.begin();
|
||||
const auto trimExpr = [&] (Expr * e) {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
es2->emplace_back(i->first, e);
|
||||
};
|
||||
const auto trimString = [&] (const StringToken & t) {
|
||||
std::string s2;
|
||||
for (size_t j = 0; j < t.l; ++j) {
|
||||
if (atStartOfLine) {
|
||||
if (t.p[j] == ' ') {
|
||||
if (curDropped++ >= minIndent)
|
||||
s2 += t.p[j];
|
||||
}
|
||||
else if (t.p[j] == '\n') {
|
||||
curDropped = 0;
|
||||
s2 += t.p[j];
|
||||
} else {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
s2 += t.p[j];
|
||||
}
|
||||
} else {
|
||||
s2 += t.p[j];
|
||||
if (t.p[j] == '\n') atStartOfLine = true;
|
||||
}
|
||||
}
|
||||
|
||||
/* Remove the last line if it is empty and consists only of
|
||||
spaces. */
|
||||
if (n == 1) {
|
||||
std::string::size_type p = s2.find_last_of('\n');
|
||||
if (p != std::string::npos && s2.find_first_not_of(' ', p + 1) == std::string::npos)
|
||||
s2 = std::string(s2, 0, p + 1);
|
||||
}
|
||||
|
||||
es2->emplace_back(i->first, new ExprString(std::move(s2)));
|
||||
};
|
||||
for (; i != es.end(); ++i, --n) {
|
||||
std::visit(overloaded { trimExpr, trimString }, i->second);
|
||||
}
|
||||
|
||||
/* If this is a single string, then don't do a concatenation. */
|
||||
if (es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0].second)) {
|
||||
auto *const result = (*es2)[0].second;
|
||||
delete es2;
|
||||
return result;
|
||||
}
|
||||
return new ExprConcatStrings(pos, true, es2);
|
||||
}
|
||||
|
||||
|
||||
static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data)
|
||||
{
|
||||
return data->state.positions.add(data->origin, loc.first_line, loc.first_column);
|
||||
}
|
||||
|
||||
#define CUR_POS makeCurPos(*yylocp, data)
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * error)
|
||||
{
|
||||
data->error = {
|
||||
.msg = hintfmt(error),
|
||||
.errPos = data->state.positions[makeCurPos(*loc, data)]
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
%}
|
||||
|
||||
|
@ -339,17 +78,17 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
|
|||
nix::Expr * e;
|
||||
nix::ExprList * list;
|
||||
nix::ExprAttrs * attrs;
|
||||
nix::ParserFormals * formals;
|
||||
nix::Formals * formals;
|
||||
nix::Formal * formal;
|
||||
nix::NixInt n;
|
||||
nix::NixFloat nf;
|
||||
StringToken id; // !!! -> Symbol
|
||||
StringToken path;
|
||||
StringToken uri;
|
||||
StringToken str;
|
||||
nix::StringToken id; // !!! -> Symbol
|
||||
nix::StringToken path;
|
||||
nix::StringToken uri;
|
||||
nix::StringToken str;
|
||||
std::vector<nix::AttrName> * attrNames;
|
||||
std::vector<std::pair<nix::PosIdx, nix::Expr *>> * string_parts;
|
||||
std::vector<std::pair<nix::PosIdx, std::variant<nix::Expr *, StringToken>>> * ind_string_parts;
|
||||
std::vector<std::pair<nix::PosIdx, std::variant<nix::Expr *, nix::StringToken>>> * ind_string_parts;
|
||||
}
|
||||
|
||||
%type <e> start expr expr_function expr_if expr_op
|
||||
|
@ -389,24 +128,24 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
|
|||
|
||||
%%
|
||||
|
||||
start: expr { data->result = $1; };
|
||||
start: expr { state->result = $1; };
|
||||
|
||||
expr: expr_function;
|
||||
|
||||
expr_function
|
||||
: ID ':' expr_function
|
||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); }
|
||||
{ $$ = new ExprLambda(CUR_POS, state->symbols.create($1), 0, $3); }
|
||||
| '{' formals '}' ':' expr_function
|
||||
{ $$ = new ExprLambda(CUR_POS, toFormals(*data, $2), $5); }
|
||||
{ $$ = new ExprLambda(CUR_POS, state->validateFormals($2), $5); }
|
||||
| '{' formals '}' '@' ID ':' expr_function
|
||||
{
|
||||
auto arg = data->symbols.create($5);
|
||||
$$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $2, CUR_POS, arg), $7);
|
||||
auto arg = state->symbols.create($5);
|
||||
$$ = new ExprLambda(CUR_POS, arg, state->validateFormals($2, CUR_POS, arg), $7);
|
||||
}
|
||||
| ID '@' '{' formals '}' ':' expr_function
|
||||
{
|
||||
auto arg = data->symbols.create($1);
|
||||
$$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $4, CUR_POS, arg), $7);
|
||||
auto arg = state->symbols.create($1);
|
||||
$$ = new ExprLambda(CUR_POS, arg, state->validateFormals($4, CUR_POS, arg), $7);
|
||||
}
|
||||
| ASSERT expr ';' expr_function
|
||||
{ $$ = new ExprAssert(CUR_POS, $2, $4); }
|
||||
|
@ -415,8 +154,8 @@ expr_function
|
|||
| LET binds IN_KW expr_function
|
||||
{ if (!$2->dynamicAttrs.empty())
|
||||
throw ParseError({
|
||||
.msg = hintfmt("dynamic attributes not allowed in let"),
|
||||
.errPos = data->state.positions[CUR_POS]
|
||||
.msg = HintFmt("dynamic attributes not allowed in let"),
|
||||
.pos = state->positions[CUR_POS]
|
||||
});
|
||||
$$ = new ExprLet($2, $4);
|
||||
}
|
||||
|
@ -430,24 +169,24 @@ expr_if
|
|||
|
||||
expr_op
|
||||
: '!' expr_op %prec NOT { $$ = new ExprOpNot($2); }
|
||||
| '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {new ExprInt(0), $2}); }
|
||||
| '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(state->s.sub), {new ExprInt(0), $2}); }
|
||||
| expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); }
|
||||
| expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); }
|
||||
| expr_op '<' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); }
|
||||
| expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); }
|
||||
| expr_op '>' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); }
|
||||
| expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); }
|
||||
| expr_op AND expr_op { $$ = new ExprOpAnd(makeCurPos(@2, data), $1, $3); }
|
||||
| expr_op OR expr_op { $$ = new ExprOpOr(makeCurPos(@2, data), $1, $3); }
|
||||
| expr_op IMPL expr_op { $$ = new ExprOpImpl(makeCurPos(@2, data), $1, $3); }
|
||||
| expr_op UPDATE expr_op { $$ = new ExprOpUpdate(makeCurPos(@2, data), $1, $3); }
|
||||
| expr_op '<' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$1, $3}); }
|
||||
| expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$3, $1})); }
|
||||
| expr_op '>' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$3, $1}); }
|
||||
| expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(state->at(@2), new ExprVar(state->s.lessThan), {$1, $3})); }
|
||||
| expr_op AND expr_op { $$ = new ExprOpAnd(state->at(@2), $1, $3); }
|
||||
| expr_op OR expr_op { $$ = new ExprOpOr(state->at(@2), $1, $3); }
|
||||
| expr_op IMPL expr_op { $$ = new ExprOpImpl(state->at(@2), $1, $3); }
|
||||
| expr_op UPDATE expr_op { $$ = new ExprOpUpdate(state->at(@2), $1, $3); }
|
||||
| expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, std::move(*$3)); delete $3; }
|
||||
| expr_op '+' expr_op
|
||||
{ $$ = new ExprConcatStrings(makeCurPos(@2, data), false, new std::vector<std::pair<PosIdx, Expr *> >({{makeCurPos(@1, data), $1}, {makeCurPos(@3, data), $3}})); }
|
||||
| expr_op '-' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__sub")), {$1, $3}); }
|
||||
| expr_op '*' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__mul")), {$1, $3}); }
|
||||
| expr_op '/' expr_op { $$ = new ExprCall(makeCurPos(@2, data), new ExprVar(data->symbols.create("__div")), {$1, $3}); }
|
||||
| expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(makeCurPos(@2, data), $1, $3); }
|
||||
{ $$ = new ExprConcatStrings(state->at(@2), false, new std::vector<std::pair<PosIdx, Expr *> >({{state->at(@1), $1}, {state->at(@3), $3}})); }
|
||||
| expr_op '-' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.sub), {$1, $3}); }
|
||||
| expr_op '*' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.mul), {$1, $3}); }
|
||||
| expr_op '/' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.div), {$1, $3}); }
|
||||
| expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(state->at(@2), $1, $3); }
|
||||
| expr_app
|
||||
;
|
||||
|
||||
|
@ -470,7 +209,7 @@ expr_select
|
|||
| /* Backwards compatibility: because Nixpkgs has a rarely used
|
||||
function named ‘or’, allow stuff like ‘map or [...]’. */
|
||||
expr_simple OR_KW
|
||||
{ $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, data->symbols.create("or"))}); }
|
||||
{ $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, state->s.or_)}); }
|
||||
| expr_simple
|
||||
;
|
||||
|
||||
|
@ -480,33 +219,33 @@ expr_simple
|
|||
if ($1.l == s.size() && strncmp($1.p, s.data(), s.size()) == 0)
|
||||
$$ = new ExprPos(CUR_POS);
|
||||
else
|
||||
$$ = new ExprVar(CUR_POS, data->symbols.create($1));
|
||||
$$ = new ExprVar(CUR_POS, state->symbols.create($1));
|
||||
}
|
||||
| INT_LIT { $$ = new ExprInt($1); }
|
||||
| FLOAT_LIT { $$ = new ExprFloat($1); }
|
||||
| '"' string_parts '"' { $$ = $2; }
|
||||
| IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE {
|
||||
$$ = stripIndentation(CUR_POS, data->symbols, std::move(*$2));
|
||||
$$ = state->stripIndentation(CUR_POS, std::move(*$2));
|
||||
delete $2;
|
||||
}
|
||||
| path_start PATH_END
|
||||
| path_start string_parts_interpolated PATH_END {
|
||||
$2->insert($2->begin(), {makeCurPos(@1, data), $1});
|
||||
$2->insert($2->begin(), {state->at(@1), $1});
|
||||
$$ = new ExprConcatStrings(CUR_POS, false, $2);
|
||||
}
|
||||
| SPATH {
|
||||
std::string path($1.p + 1, $1.l - 2);
|
||||
$$ = new ExprCall(CUR_POS,
|
||||
new ExprVar(data->symbols.create("__findFile")),
|
||||
{new ExprVar(data->symbols.create("__nixPath")),
|
||||
new ExprVar(state->s.findFile),
|
||||
{new ExprVar(state->s.nixPath),
|
||||
new ExprString(std::move(path))});
|
||||
}
|
||||
| URI {
|
||||
static bool noURLLiterals = experimentalFeatureSettings.isEnabled(Xp::NoUrlLiterals);
|
||||
if (noURLLiterals)
|
||||
throw ParseError({
|
||||
.msg = hintfmt("URL literals are disabled"),
|
||||
.errPos = data->state.positions[CUR_POS]
|
||||
.msg = HintFmt("URL literals are disabled"),
|
||||
.pos = state->positions[CUR_POS]
|
||||
});
|
||||
$$ = new ExprString(std::string($1));
|
||||
}
|
||||
|
@ -514,7 +253,7 @@ expr_simple
|
|||
/* Let expressions `let {..., body = ...}' are just desugared
|
||||
into `(rec {..., body = ...}).body'. */
|
||||
| LET '{' binds '}'
|
||||
{ $3->recursive = true; $$ = new ExprSelect(noPos, $3, data->symbols.create("body")); }
|
||||
{ $3->recursive = true; $$ = new ExprSelect(noPos, $3, state->s.body); }
|
||||
| REC '{' binds '}'
|
||||
{ $3->recursive = true; $$ = $3; }
|
||||
| '{' binds '}'
|
||||
|
@ -530,23 +269,23 @@ string_parts
|
|||
|
||||
string_parts_interpolated
|
||||
: string_parts_interpolated STR
|
||||
{ $$ = $1; $1->emplace_back(makeCurPos(@2, data), new ExprString(std::string($2))); }
|
||||
| string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
|
||||
| DOLLAR_CURLY expr '}' { $$ = new std::vector<std::pair<PosIdx, Expr *>>; $$->emplace_back(makeCurPos(@1, data), $2); }
|
||||
{ $$ = $1; $1->emplace_back(state->at(@2), new ExprString(std::string($2))); }
|
||||
| string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->at(@2), $3); }
|
||||
| DOLLAR_CURLY expr '}' { $$ = new std::vector<std::pair<PosIdx, Expr *>>; $$->emplace_back(state->at(@1), $2); }
|
||||
| STR DOLLAR_CURLY expr '}' {
|
||||
$$ = new std::vector<std::pair<PosIdx, Expr *>>;
|
||||
$$->emplace_back(makeCurPos(@1, data), new ExprString(std::string($1)));
|
||||
$$->emplace_back(makeCurPos(@2, data), $3);
|
||||
$$->emplace_back(state->at(@1), new ExprString(std::string($1)));
|
||||
$$->emplace_back(state->at(@2), $3);
|
||||
}
|
||||
;
|
||||
|
||||
path_start
|
||||
: PATH {
|
||||
Path path(absPath({$1.p, $1.l}, data->basePath.path.abs()));
|
||||
Path path(absPath({$1.p, $1.l}, state->basePath.path.abs()));
|
||||
/* add back in the trailing '/' to the first segment */
|
||||
if ($1.p[$1.l-1] == '/' && $1.l > 1)
|
||||
path += "/";
|
||||
$$ = new ExprPath(ref<InputAccessor>(data->state.rootFS), std::move(path));
|
||||
$$ = new ExprPath(ref<InputAccessor>(state->rootFS), std::move(path));
|
||||
}
|
||||
| HPATH {
|
||||
if (evalSettings.pureEval) {
|
||||
|
@ -556,24 +295,24 @@ path_start
|
|||
);
|
||||
}
|
||||
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
|
||||
$$ = new ExprPath(ref<InputAccessor>(data->state.rootFS), std::move(path));
|
||||
$$ = new ExprPath(ref<InputAccessor>(state->rootFS), std::move(path));
|
||||
}
|
||||
;
|
||||
|
||||
ind_string_parts
|
||||
: ind_string_parts IND_STR { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $2); }
|
||||
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
|
||||
: ind_string_parts IND_STR { $$ = $1; $1->emplace_back(state->at(@2), $2); }
|
||||
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->at(@2), $3); }
|
||||
| { $$ = new std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>>; }
|
||||
;
|
||||
|
||||
binds
|
||||
: binds attrpath '=' expr ';' { $$ = $1; addAttr($$, std::move(*$2), $4, makeCurPos(@2, data), data->state); delete $2; }
|
||||
: binds attrpath '=' expr ';' { $$ = $1; state->addAttr($$, std::move(*$2), $4, state->at(@2)); delete $2; }
|
||||
| binds INHERIT attrs ';'
|
||||
{ $$ = $1;
|
||||
for (auto & i : *$3) {
|
||||
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
||||
dupAttr(data->state, i.symbol, makeCurPos(@3, data), $$->attrs[i.symbol].pos);
|
||||
auto pos = makeCurPos(@3, data);
|
||||
state->dupAttr(i.symbol, state->at(@3), $$->attrs[i.symbol].pos);
|
||||
auto pos = state->at(@3);
|
||||
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true));
|
||||
}
|
||||
delete $3;
|
||||
|
@ -583,48 +322,48 @@ binds
|
|||
/* !!! Should ensure sharing of the expression in $4. */
|
||||
for (auto & i : *$6) {
|
||||
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
||||
dupAttr(data->state, i.symbol, makeCurPos(@6, data), $$->attrs[i.symbol].pos);
|
||||
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), makeCurPos(@6, data)));
|
||||
state->dupAttr(i.symbol, state->at(@6), $$->attrs[i.symbol].pos);
|
||||
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), state->at(@6)));
|
||||
}
|
||||
delete $6;
|
||||
}
|
||||
| { $$ = new ExprAttrs(makeCurPos(@0, data)); }
|
||||
| { $$ = new ExprAttrs(state->at(@0)); }
|
||||
;
|
||||
|
||||
attrs
|
||||
: attrs attr { $$ = $1; $1->push_back(AttrName(data->symbols.create($2))); }
|
||||
: attrs attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($2))); }
|
||||
| attrs string_attr
|
||||
{ $$ = $1;
|
||||
ExprString * str = dynamic_cast<ExprString *>($2);
|
||||
if (str) {
|
||||
$$->push_back(AttrName(data->symbols.create(str->s)));
|
||||
$$->push_back(AttrName(state->symbols.create(str->s)));
|
||||
delete str;
|
||||
} else
|
||||
throw ParseError({
|
||||
.msg = hintfmt("dynamic attributes not allowed in inherit"),
|
||||
.errPos = data->state.positions[makeCurPos(@2, data)]
|
||||
.msg = HintFmt("dynamic attributes not allowed in inherit"),
|
||||
.pos = state->positions[state->at(@2)]
|
||||
});
|
||||
}
|
||||
| { $$ = new AttrPath; }
|
||||
;
|
||||
|
||||
attrpath
|
||||
: attrpath '.' attr { $$ = $1; $1->push_back(AttrName(data->symbols.create($3))); }
|
||||
: attrpath '.' attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($3))); }
|
||||
| attrpath '.' string_attr
|
||||
{ $$ = $1;
|
||||
ExprString * str = dynamic_cast<ExprString *>($3);
|
||||
if (str) {
|
||||
$$->push_back(AttrName(data->symbols.create(str->s)));
|
||||
$$->push_back(AttrName(state->symbols.create(str->s)));
|
||||
delete str;
|
||||
} else
|
||||
$$->push_back(AttrName($3));
|
||||
}
|
||||
| attr { $$ = new std::vector<AttrName>; $$->push_back(AttrName(data->symbols.create($1))); }
|
||||
| attr { $$ = new std::vector<AttrName>; $$->push_back(AttrName(state->symbols.create($1))); }
|
||||
| string_attr
|
||||
{ $$ = new std::vector<AttrName>;
|
||||
ExprString *str = dynamic_cast<ExprString *>($1);
|
||||
if (str) {
|
||||
$$->push_back(AttrName(data->symbols.create(str->s)));
|
||||
$$->push_back(AttrName(state->symbols.create(str->s)));
|
||||
delete str;
|
||||
} else
|
||||
$$->push_back(AttrName($1));
|
||||
|
@ -650,226 +389,52 @@ formals
|
|||
: formal ',' formals
|
||||
{ $$ = $3; $$->formals.emplace_back(*$1); delete $1; }
|
||||
| formal
|
||||
{ $$ = new ParserFormals; $$->formals.emplace_back(*$1); $$->ellipsis = false; delete $1; }
|
||||
{ $$ = new Formals; $$->formals.emplace_back(*$1); $$->ellipsis = false; delete $1; }
|
||||
|
|
||||
{ $$ = new ParserFormals; $$->ellipsis = false; }
|
||||
{ $$ = new Formals; $$->ellipsis = false; }
|
||||
| ELLIPSIS
|
||||
{ $$ = new ParserFormals; $$->ellipsis = true; }
|
||||
{ $$ = new Formals; $$->ellipsis = true; }
|
||||
;
|
||||
|
||||
formal
|
||||
: ID { $$ = new Formal{CUR_POS, data->symbols.create($1), 0}; }
|
||||
| ID '?' expr { $$ = new Formal{CUR_POS, data->symbols.create($1), $3}; }
|
||||
: ID { $$ = new Formal{CUR_POS, state->symbols.create($1), 0}; }
|
||||
| ID '?' expr { $$ = new Formal{CUR_POS, state->symbols.create($1), $3}; }
|
||||
;
|
||||
|
||||
%%
|
||||
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
#include <fcntl.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include "eval.hh"
|
||||
#include "filetransfer.hh"
|
||||
#include "tarball.hh"
|
||||
#include "store-api.hh"
|
||||
#include "flake/flake.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
#include "memory-input-accessor.hh"
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
unsigned long Expr::nrExprs = 0;
|
||||
|
||||
Expr * EvalState::parse(
|
||||
Expr * parseExprFromBuf(
|
||||
char * text,
|
||||
size_t length,
|
||||
Pos::Origin origin,
|
||||
const SourcePath & basePath,
|
||||
std::shared_ptr<StaticEnv> & staticEnv)
|
||||
SymbolTable & symbols,
|
||||
PosTable & positions,
|
||||
const ref<InputAccessor> rootFS,
|
||||
const Expr::AstSymbols & astSymbols)
|
||||
{
|
||||
yyscan_t scanner;
|
||||
ParseData data {
|
||||
.state = *this,
|
||||
ParserState state {
|
||||
.symbols = symbols,
|
||||
.positions = positions,
|
||||
.basePath = basePath,
|
||||
.origin = {origin},
|
||||
.rootFS = rootFS,
|
||||
.s = astSymbols,
|
||||
};
|
||||
|
||||
yylex_init(&scanner);
|
||||
Finally _destroy([&] { yylex_destroy(scanner); });
|
||||
|
||||
yy_scan_buffer(text, length, scanner);
|
||||
int res = yyparse(scanner, &data);
|
||||
yylex_destroy(scanner);
|
||||
yyparse(scanner, &state);
|
||||
|
||||
if (res) throw ParseError(data.error.value());
|
||||
|
||||
data.result->bindVars(*this, staticEnv);
|
||||
|
||||
return data.result;
|
||||
}
|
||||
|
||||
|
||||
SourcePath resolveExprPath(SourcePath path)
|
||||
{
|
||||
unsigned int followCount = 0, maxFollow = 1024;
|
||||
|
||||
/* If `path' is a symlink, follow it. This is so that relative
|
||||
path references work. */
|
||||
while (!path.path.isRoot()) {
|
||||
// Basic cycle/depth limit to avoid infinite loops.
|
||||
if (++followCount >= maxFollow)
|
||||
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
|
||||
auto p = path.parent().resolveSymlinks() + path.baseName();
|
||||
if (p.lstat().type != InputAccessor::tSymlink) break;
|
||||
path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))};
|
||||
}
|
||||
|
||||
/* If `path' refers to a directory, append `/default.nix'. */
|
||||
if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory)
|
||||
return path + "default.nix";
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromFile(const SourcePath & path)
|
||||
{
|
||||
return parseExprFromFile(path, staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto buffer = path.resolveSymlinks().readFile();
|
||||
// readFile hopefully have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string s_, const SourcePath & basePath, std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto s = make_ref<std::string>(std::move(s_));
|
||||
s->append("\0\0", 2);
|
||||
return parse(s->data(), s->size(), Pos::String{.source = s}, basePath, staticEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string s, const SourcePath & basePath)
|
||||
{
|
||||
return parseExprFromString(std::move(s), basePath, staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseStdin()
|
||||
{
|
||||
//Activity act(*logger, lvlTalkative, "parsing standard input");
|
||||
auto buffer = drainFD(0);
|
||||
// drainFD should have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
auto s = make_ref<std::string>(std::move(buffer));
|
||||
return parse(s->data(), s->size(), Pos::Stdin{.source = s}, rootPath(CanonPath::fromCwd()), staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
SourcePath EvalState::findFile(const std::string_view path)
|
||||
{
|
||||
return findFile(searchPath, path);
|
||||
}
|
||||
|
||||
|
||||
SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos)
|
||||
{
|
||||
for (auto & i : searchPath.elements) {
|
||||
auto suffixOpt = i.prefix.suffixIfPotentialMatch(path);
|
||||
|
||||
if (!suffixOpt) continue;
|
||||
auto suffix = *suffixOpt;
|
||||
|
||||
auto rOpt = resolveSearchPathPath(i.path);
|
||||
if (!rOpt) continue;
|
||||
auto r = *rOpt;
|
||||
|
||||
Path res = suffix == "" ? r : concatStrings(r, "/", suffix);
|
||||
if (pathExists(res)) return rootPath(CanonPath(canonPath(res)));
|
||||
}
|
||||
|
||||
if (hasPrefix(path, "nix/"))
|
||||
return {corepkgsFS, CanonPath(path.substr(3))};
|
||||
|
||||
debugThrow(ThrownError({
|
||||
.msg = hintfmt(evalSettings.pureEval
|
||||
? "cannot look up '<%s>' in pure evaluation mode (use '--impure' to override)"
|
||||
: "file '%s' was not found in the Nix search path (add it using $NIX_PATH or -I)",
|
||||
path),
|
||||
.errPos = positions[pos]
|
||||
}), 0, 0);
|
||||
}
|
||||
|
||||
|
||||
std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl)
|
||||
{
|
||||
auto & value = value0.s;
|
||||
auto i = searchPathResolved.find(value);
|
||||
if (i != searchPathResolved.end()) return i->second;
|
||||
|
||||
std::optional<std::string> res;
|
||||
|
||||
if (EvalSettings::isPseudoUrl(value)) {
|
||||
try {
|
||||
auto storePath = fetchers::downloadTarball(
|
||||
store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath;
|
||||
res = { store->toRealPath(storePath) };
|
||||
} catch (FileTransferError & e) {
|
||||
logWarning({
|
||||
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
else if (hasPrefix(value, "flake:")) {
|
||||
experimentalFeatureSettings.require(Xp::Flakes);
|
||||
auto flakeRef = parseFlakeRef(value.substr(6), {}, true, false);
|
||||
debug("fetching flake search path element '%s''", value);
|
||||
auto storePath = flakeRef.resolve(store).fetchTree(store).first;
|
||||
res = { store->toRealPath(storePath) };
|
||||
}
|
||||
|
||||
else {
|
||||
auto path = absPath(value);
|
||||
|
||||
/* Allow access to paths in the search path. */
|
||||
if (initAccessControl) {
|
||||
allowPath(path);
|
||||
if (store->isInStore(path)) {
|
||||
try {
|
||||
StorePathSet closure;
|
||||
store->computeFSClosure(store->toStorePath(path).first, closure);
|
||||
for (auto & p : closure)
|
||||
allowPath(p);
|
||||
} catch (InvalidPath &) { }
|
||||
}
|
||||
}
|
||||
|
||||
if (pathExists(path))
|
||||
res = { path };
|
||||
else {
|
||||
logWarning({
|
||||
.msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", value)
|
||||
});
|
||||
res = std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
if (res)
|
||||
debug("resolved search path element '%s' to '%s'", value, *res);
|
||||
else
|
||||
debug("failed to resolve search path element '%s'", value);
|
||||
|
||||
searchPathResolved.emplace(value, res);
|
||||
return res;
|
||||
return state.result;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
#include "eval.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -8,4 +7,9 @@ SourcePath EvalState::rootPath(CanonPath path)
|
|||
return {rootFS, std::move(path)};
|
||||
}
|
||||
|
||||
SourcePath EvalState::rootPath(PathView path)
|
||||
{
|
||||
return {rootFS, CanonPath(absPath(path))};
|
||||
}
|
||||
|
||||
}
|
||||
|
|
48
src/libexpr/pos-idx.hh
Normal file
48
src/libexpr/pos-idx.hh
Normal file
|
@ -0,0 +1,48 @@
|
|||
#pragma once
|
||||
|
||||
#include <cinttypes>
|
||||
|
||||
namespace nix {
|
||||
|
||||
class PosIdx
|
||||
{
|
||||
friend class PosTable;
|
||||
|
||||
private:
|
||||
uint32_t id;
|
||||
|
||||
explicit PosIdx(uint32_t id)
|
||||
: id(id)
|
||||
{
|
||||
}
|
||||
|
||||
public:
|
||||
PosIdx()
|
||||
: id(0)
|
||||
{
|
||||
}
|
||||
|
||||
explicit operator bool() const
|
||||
{
|
||||
return id > 0;
|
||||
}
|
||||
|
||||
bool operator<(const PosIdx other) const
|
||||
{
|
||||
return id < other.id;
|
||||
}
|
||||
|
||||
bool operator==(const PosIdx other) const
|
||||
{
|
||||
return id == other.id;
|
||||
}
|
||||
|
||||
bool operator!=(const PosIdx other) const
|
||||
{
|
||||
return id != other.id;
|
||||
}
|
||||
};
|
||||
|
||||
inline PosIdx noPos = {};
|
||||
|
||||
}
|
83
src/libexpr/pos-table.hh
Normal file
83
src/libexpr/pos-table.hh
Normal file
|
@ -0,0 +1,83 @@
|
|||
#pragma once
|
||||
|
||||
#include <cinttypes>
|
||||
#include <numeric>
|
||||
#include <vector>
|
||||
|
||||
#include "chunked-vector.hh"
|
||||
#include "pos-idx.hh"
|
||||
#include "position.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
class PosTable
|
||||
{
|
||||
public:
|
||||
class Origin
|
||||
{
|
||||
friend PosTable;
|
||||
private:
|
||||
// must always be invalid by default, add() replaces this with the actual value.
|
||||
// subsequent add() calls use this index as a token to quickly check whether the
|
||||
// current origins.back() can be reused or not.
|
||||
mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
|
||||
|
||||
// Used for searching in PosTable::[].
|
||||
explicit Origin(uint32_t idx)
|
||||
: idx(idx)
|
||||
, origin{std::monostate()}
|
||||
{
|
||||
}
|
||||
|
||||
public:
|
||||
const Pos::Origin origin;
|
||||
|
||||
Origin(Pos::Origin origin)
|
||||
: origin(origin)
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
struct Offset
|
||||
{
|
||||
uint32_t line, column;
|
||||
};
|
||||
|
||||
private:
|
||||
std::vector<Origin> origins;
|
||||
ChunkedVector<Offset, 8192> offsets;
|
||||
|
||||
public:
|
||||
PosTable()
|
||||
: offsets(1024)
|
||||
{
|
||||
origins.reserve(1024);
|
||||
}
|
||||
|
||||
PosIdx add(const Origin & origin, uint32_t line, uint32_t column)
|
||||
{
|
||||
const auto idx = offsets.add({line, column}).second;
|
||||
if (origins.empty() || origins.back().idx != origin.idx) {
|
||||
origin.idx = idx;
|
||||
origins.push_back(origin);
|
||||
}
|
||||
return PosIdx(idx + 1);
|
||||
}
|
||||
|
||||
Pos operator[](PosIdx p) const
|
||||
{
|
||||
if (p.id == 0 || p.id > offsets.size())
|
||||
return {};
|
||||
const auto idx = p.id - 1;
|
||||
/* we want the last key <= idx, so we'll take prev(first key > idx).
|
||||
this is guaranteed to never rewind origin.begin because the first
|
||||
key is always 0. */
|
||||
const auto pastOrigin = std::upper_bound(
|
||||
origins.begin(), origins.end(), Origin(idx), [](const auto & a, const auto & b) { return a.idx < b.idx; });
|
||||
const auto origin = *std::prev(pastOrigin);
|
||||
const auto offset = offsets[idx];
|
||||
return {offset.line, offset.column, origin.origin};
|
||||
}
|
||||
};
|
||||
|
||||
}
|
|
@ -39,10 +39,6 @@ namespace nix {
|
|||
* Miscellaneous
|
||||
*************************************************************/
|
||||
|
||||
|
||||
InvalidPathError::InvalidPathError(const Path & path) :
|
||||
EvalError("path '%s' is not valid", path), path(path) {}
|
||||
|
||||
StringMap EvalState::realiseContext(const NixStringContext & context)
|
||||
{
|
||||
std::vector<DerivedPath::Built> drvs;
|
||||
|
@ -51,7 +47,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
|||
for (auto & c : context) {
|
||||
auto ensureValid = [&](const StorePath & p) {
|
||||
if (!store->isValidPath(p))
|
||||
debugThrowLastTrace(InvalidPathError(store->printStorePath(p)));
|
||||
error<InvalidPathError>(store->printStorePath(p)).debugThrow();
|
||||
};
|
||||
std::visit(overloaded {
|
||||
[&](const NixStringContextElem::Built & b) {
|
||||
|
@ -78,9 +74,10 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
|||
if (drvs.empty()) return {};
|
||||
|
||||
if (!evalSettings.enableImportFromDerivation)
|
||||
debugThrowLastTrace(Error(
|
||||
error<EvalError>(
|
||||
"cannot build '%1%' during evaluation because the option 'allow-import-from-derivation' is disabled",
|
||||
drvs.begin()->to_string(*store)));
|
||||
drvs.begin()->to_string(*store)
|
||||
).debugThrow();
|
||||
|
||||
/* Build/substitute the context. */
|
||||
std::vector<DerivedPath> buildReqs;
|
||||
|
@ -112,7 +109,7 @@ StringMap EvalState::realiseContext(const NixStringContext & context)
|
|||
for (auto & outputPath : outputsToCopyAndAllow) {
|
||||
/* Add the output of this derivations to the allowed
|
||||
paths. */
|
||||
allowPath(store->toRealPath(outputPath));
|
||||
allowPath(outputPath);
|
||||
}
|
||||
|
||||
return res;
|
||||
|
@ -340,16 +337,16 @@ void prim_importNative(EvalState & state, const PosIdx pos, Value * * args, Valu
|
|||
|
||||
void *handle = dlopen(path.path.c_str(), RTLD_LAZY | RTLD_LOCAL);
|
||||
if (!handle)
|
||||
state.debugThrowLastTrace(EvalError("could not open '%1%': %2%", path, dlerror()));
|
||||
state.error<EvalError>("could not open '%1%': %2%", path, dlerror()).debugThrow();
|
||||
|
||||
dlerror();
|
||||
ValueInitializer func = (ValueInitializer) dlsym(handle, sym.c_str());
|
||||
if(!func) {
|
||||
char *message = dlerror();
|
||||
if (message)
|
||||
state.debugThrowLastTrace(EvalError("could not load symbol '%1%' from '%2%': %3%", sym, path, message));
|
||||
state.error<EvalError>("could not load symbol '%1%' from '%2%': %3%", sym, path, message).debugThrow();
|
||||
else
|
||||
state.debugThrowLastTrace(EvalError("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path));
|
||||
state.error<EvalError>("symbol '%1%' from '%2%' resolved to NULL when a function pointer was expected", sym, path).debugThrow();
|
||||
}
|
||||
|
||||
(func)(state, v);
|
||||
|
@ -365,7 +362,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
|||
auto elems = args[0]->listElems();
|
||||
auto count = args[0]->listSize();
|
||||
if (count == 0)
|
||||
state.error("at least one argument to 'exec' required").atPos(pos).debugThrow<EvalError>();
|
||||
state.error<EvalError>("at least one argument to 'exec' required").atPos(pos).debugThrow();
|
||||
NixStringContext context;
|
||||
auto program = state.coerceToString(pos, *elems[0], context,
|
||||
"while evaluating the first element of the argument passed to builtins.exec",
|
||||
|
@ -380,7 +377,7 @@ void prim_exec(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
|||
try {
|
||||
auto _ = state.realiseContext(context); // FIXME: Handle CA derivations
|
||||
} catch (InvalidPathError & e) {
|
||||
state.error("cannot execute '%1%', since path '%2%' is not valid", program, e.path).atPos(pos).debugThrow<EvalError>();
|
||||
state.error<EvalError>("cannot execute '%1%', since path '%2%' is not valid", program, e.path).atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
auto output = runProgram(program, true, commandArgs);
|
||||
|
@ -582,7 +579,7 @@ struct CompareValues
|
|||
if (v1->type() == nInt && v2->type() == nFloat)
|
||||
return v1->integer < v2->fpoint;
|
||||
if (v1->type() != v2->type())
|
||||
state.error("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow<EvalError>();
|
||||
state.error<EvalError>("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow();
|
||||
// Allow selecting a subset of enum values
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wswitch-enum"
|
||||
|
@ -610,7 +607,7 @@ struct CompareValues
|
|||
}
|
||||
}
|
||||
default:
|
||||
state.error("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow<EvalError>();
|
||||
state.error<EvalError>("cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)).debugThrow();
|
||||
#pragma GCC diagnostic pop
|
||||
}
|
||||
} catch (Error & e) {
|
||||
|
@ -637,7 +634,7 @@ static Bindings::iterator getAttr(
|
|||
{
|
||||
Bindings::iterator value = attrSet->find(attrSym);
|
||||
if (value == attrSet->end()) {
|
||||
state.error("attribute '%s' missing", state.symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow<TypeError>();
|
||||
state.error<TypeError>("attribute '%s' missing", state.symbols[attrSym]).withTrace(noPos, errorCtx).debugThrow();
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
@ -757,8 +754,8 @@ static RegisterPrimOp primop_break({
|
|||
if (state.debugRepl && !state.debugTraces.empty()) {
|
||||
auto error = Error(ErrorInfo {
|
||||
.level = lvlInfo,
|
||||
.msg = hintfmt("breakpoint reached"),
|
||||
.errPos = state.positions[pos],
|
||||
.msg = HintFmt("breakpoint reached"),
|
||||
.pos = state.positions[pos],
|
||||
});
|
||||
|
||||
auto & dt = state.debugTraces.front();
|
||||
|
@ -768,8 +765,8 @@ static RegisterPrimOp primop_break({
|
|||
// If the user elects to quit the repl, throw an exception.
|
||||
throw Error(ErrorInfo{
|
||||
.level = lvlInfo,
|
||||
.msg = hintfmt("quit the debugger"),
|
||||
.errPos = nullptr,
|
||||
.msg = HintFmt("quit the debugger"),
|
||||
.pos = nullptr,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -790,7 +787,7 @@ static RegisterPrimOp primop_abort({
|
|||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the error message passed to builtins.abort").toOwned();
|
||||
state.debugThrowLastTrace(Abort("evaluation aborted with the following error message: '%1%'", s));
|
||||
state.error<Abort>("evaluation aborted with the following error message: '%1%'", s).debugThrow();
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -809,7 +806,7 @@ static RegisterPrimOp primop_throw({
|
|||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the error message passed to builtin.throw").toOwned();
|
||||
state.debugThrowLastTrace(ThrownError(s));
|
||||
state.error<ThrownError>(s).debugThrow();
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -823,7 +820,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
|
|||
auto message = state.coerceToString(pos, *args[0], context,
|
||||
"while evaluating the error message passed to builtins.addErrorContext",
|
||||
false, false).toOwned();
|
||||
e.addTrace(nullptr, hintfmt(message), true);
|
||||
e.addTrace(nullptr, HintFmt(message), true);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
@ -997,7 +994,7 @@ static void prim_trace(EvalState & state, const PosIdx pos, Value * * args, Valu
|
|||
if (args[0]->type() == nString)
|
||||
printError("trace: %1%", args[0]->string_view());
|
||||
else
|
||||
printError("trace: %1%", printValue(state, *args[0]));
|
||||
printError("trace: %1%", ValuePrinter(state, *args[0]));
|
||||
state.forceValue(*args[1], pos);
|
||||
v = *args[1];
|
||||
}
|
||||
|
@ -1074,7 +1071,7 @@ static void prim_derivationStrict(EvalState & state, const PosIdx pos, Value * *
|
|||
* often results from the composition of several functions
|
||||
* (derivationStrict, derivation, mkDerivation, mkPythonModule, etc.)
|
||||
*/
|
||||
e.addTrace(nullptr, hintfmt(
|
||||
e.addTrace(nullptr, HintFmt(
|
||||
"while evaluating derivation '%s'\n"
|
||||
" whose name attribute is located at %s",
|
||||
drvName, pos), true);
|
||||
|
@ -1088,9 +1085,10 @@ drvName, Bindings * attrs, Value & v)
|
|||
/* Check whether attributes should be passed as a JSON file. */
|
||||
using nlohmann::json;
|
||||
std::optional<json> jsonObject;
|
||||
auto pos = v.determinePos(noPos);
|
||||
auto attr = attrs->find(state.sStructuredAttrs);
|
||||
if (attr != attrs->end() &&
|
||||
state.forceBool(*attr->value, noPos,
|
||||
state.forceBool(*attr->value, pos,
|
||||
"while evaluating the `__structuredAttrs` "
|
||||
"attribute passed to builtins.derivationStrict"))
|
||||
jsonObject = json::object();
|
||||
|
@ -1099,7 +1097,7 @@ drvName, Bindings * attrs, Value & v)
|
|||
bool ignoreNulls = false;
|
||||
attr = attrs->find(state.sIgnoreNulls);
|
||||
if (attr != attrs->end())
|
||||
ignoreNulls = state.forceBool(*attr->value, noPos, "while evaluating the `__ignoreNulls` attribute " "passed to builtins.derivationStrict");
|
||||
ignoreNulls = state.forceBool(*attr->value, pos, "while evaluating the `__ignoreNulls` attribute " "passed to builtins.derivationStrict");
|
||||
|
||||
/* Build the derivation expression by processing the attributes. */
|
||||
Derivation drv;
|
||||
|
@ -1128,37 +1126,33 @@ drvName, Bindings * attrs, Value & v)
|
|||
experimentalFeatureSettings.require(Xp::DynamicDerivations);
|
||||
ingestionMethod = TextIngestionMethod {};
|
||||
} else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("invalid value '%s' for 'outputHashMode' attribute", s),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"invalid value '%s' for 'outputHashMode' attribute", s
|
||||
).atPos(v).debugThrow();
|
||||
};
|
||||
|
||||
auto handleOutputs = [&](const Strings & ss) {
|
||||
outputs.clear();
|
||||
for (auto & j : ss) {
|
||||
if (outputs.find(j) != outputs.end())
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("duplicate derivation output '%1%'", j),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>("duplicate derivation output '%1%'", j)
|
||||
.atPos(v)
|
||||
.debugThrow();
|
||||
/* !!! Check whether j is a valid attribute
|
||||
name. */
|
||||
/* Derivations cannot be named ‘drv’, because
|
||||
then we'd have an attribute ‘drvPath’ in
|
||||
the resulting set. */
|
||||
if (j == "drv")
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("invalid derivation output name 'drv'" ),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>("invalid derivation output name 'drv'")
|
||||
.atPos(v)
|
||||
.debugThrow();
|
||||
outputs.insert(j);
|
||||
}
|
||||
if (outputs.empty())
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("derivation cannot have an empty set of outputs"),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>("derivation cannot have an empty set of outputs")
|
||||
.atPos(v)
|
||||
.debugThrow();
|
||||
};
|
||||
|
||||
try {
|
||||
|
@ -1167,16 +1161,16 @@ drvName, Bindings * attrs, Value & v)
|
|||
const std::string_view context_below("");
|
||||
|
||||
if (ignoreNulls) {
|
||||
state.forceValue(*i->value, noPos);
|
||||
state.forceValue(*i->value, pos);
|
||||
if (i->value->type() == nNull) continue;
|
||||
}
|
||||
|
||||
if (i->name == state.sContentAddressed && state.forceBool(*i->value, noPos, context_below)) {
|
||||
if (i->name == state.sContentAddressed && state.forceBool(*i->value, pos, context_below)) {
|
||||
contentAddressed = true;
|
||||
experimentalFeatureSettings.require(Xp::CaDerivations);
|
||||
}
|
||||
|
||||
else if (i->name == state.sImpure && state.forceBool(*i->value, noPos, context_below)) {
|
||||
else if (i->name == state.sImpure && state.forceBool(*i->value, pos, context_below)) {
|
||||
isImpure = true;
|
||||
experimentalFeatureSettings.require(Xp::ImpureDerivations);
|
||||
}
|
||||
|
@ -1184,9 +1178,9 @@ drvName, Bindings * attrs, Value & v)
|
|||
/* The `args' attribute is special: it supplies the
|
||||
command-line arguments to the builder. */
|
||||
else if (i->name == state.sArgs) {
|
||||
state.forceList(*i->value, noPos, context_below);
|
||||
state.forceList(*i->value, pos, context_below);
|
||||
for (auto elem : i->value->listItems()) {
|
||||
auto s = state.coerceToString(noPos, *elem, context,
|
||||
auto s = state.coerceToString(pos, *elem, context,
|
||||
"while evaluating an element of the argument list",
|
||||
true).toOwned();
|
||||
drv.args.push_back(s);
|
||||
|
@ -1201,29 +1195,29 @@ drvName, Bindings * attrs, Value & v)
|
|||
|
||||
if (i->name == state.sStructuredAttrs) continue;
|
||||
|
||||
(*jsonObject)[key] = printValueAsJSON(state, true, *i->value, noPos, context);
|
||||
(*jsonObject)[key] = printValueAsJSON(state, true, *i->value, pos, context);
|
||||
|
||||
if (i->name == state.sBuilder)
|
||||
drv.builder = state.forceString(*i->value, context, noPos, context_below);
|
||||
drv.builder = state.forceString(*i->value, context, pos, context_below);
|
||||
else if (i->name == state.sSystem)
|
||||
drv.platform = state.forceStringNoCtx(*i->value, noPos, context_below);
|
||||
drv.platform = state.forceStringNoCtx(*i->value, pos, context_below);
|
||||
else if (i->name == state.sOutputHash)
|
||||
outputHash = state.forceStringNoCtx(*i->value, noPos, context_below);
|
||||
outputHash = state.forceStringNoCtx(*i->value, pos, context_below);
|
||||
else if (i->name == state.sOutputHashAlgo)
|
||||
outputHashAlgo = state.forceStringNoCtx(*i->value, noPos, context_below);
|
||||
outputHashAlgo = state.forceStringNoCtx(*i->value, pos, context_below);
|
||||
else if (i->name == state.sOutputHashMode)
|
||||
handleHashMode(state.forceStringNoCtx(*i->value, noPos, context_below));
|
||||
handleHashMode(state.forceStringNoCtx(*i->value, pos, context_below));
|
||||
else if (i->name == state.sOutputs) {
|
||||
/* Require ‘outputs’ to be a list of strings. */
|
||||
state.forceList(*i->value, noPos, context_below);
|
||||
state.forceList(*i->value, pos, context_below);
|
||||
Strings ss;
|
||||
for (auto elem : i->value->listItems())
|
||||
ss.emplace_back(state.forceStringNoCtx(*elem, noPos, context_below));
|
||||
ss.emplace_back(state.forceStringNoCtx(*elem, pos, context_below));
|
||||
handleOutputs(ss);
|
||||
}
|
||||
|
||||
} else {
|
||||
auto s = state.coerceToString(noPos, *i->value, context, context_below, true).toOwned();
|
||||
auto s = state.coerceToString(pos, *i->value, context, context_below, true).toOwned();
|
||||
drv.env.emplace(key, s);
|
||||
if (i->name == state.sBuilder) drv.builder = std::move(s);
|
||||
else if (i->name == state.sSystem) drv.platform = std::move(s);
|
||||
|
@ -1238,7 +1232,7 @@ drvName, Bindings * attrs, Value & v)
|
|||
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[i->pos],
|
||||
hintfmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName),
|
||||
HintFmt("while evaluating attribute '%1%' of derivation '%2%'", key, drvName),
|
||||
true);
|
||||
throw;
|
||||
}
|
||||
|
@ -1281,16 +1275,14 @@ drvName, Bindings * attrs, Value & v)
|
|||
|
||||
/* Do we have all required attributes? */
|
||||
if (drv.builder == "")
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("required attribute 'builder' missing"),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>("required attribute 'builder' missing")
|
||||
.atPos(v)
|
||||
.debugThrow();
|
||||
|
||||
if (drv.platform == "")
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("required attribute 'system' missing"),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>("required attribute 'system' missing")
|
||||
.atPos(v)
|
||||
.debugThrow();
|
||||
|
||||
/* Check whether the derivation name is valid. */
|
||||
if (isDerivation(drvName) &&
|
||||
|
@ -1298,10 +1290,10 @@ drvName, Bindings * attrs, Value & v)
|
|||
outputs.size() == 1 &&
|
||||
*(outputs.begin()) == "out"))
|
||||
{
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("derivation names are allowed to end in '%s' only if they produce a single derivation file", drvExtension),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"derivation names are allowed to end in '%s' only if they produce a single derivation file",
|
||||
drvExtension
|
||||
).atPos(v).debugThrow();
|
||||
}
|
||||
|
||||
if (outputHash) {
|
||||
|
@ -1310,10 +1302,9 @@ drvName, Bindings * attrs, Value & v)
|
|||
Ignore `__contentAddressed` because fixed output derivations are
|
||||
already content addressed. */
|
||||
if (outputs.size() != 1 || *(outputs.begin()) != "out")
|
||||
state.debugThrowLastTrace(Error({
|
||||
.msg = hintfmt("multiple outputs are not supported in fixed-output derivations"),
|
||||
.errPos = state.positions[noPos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"multiple outputs are not supported in fixed-output derivations"
|
||||
).atPos(v).debugThrow();
|
||||
|
||||
auto h = newHashAllowEmpty(*outputHash, parseHashAlgoOpt(outputHashAlgo));
|
||||
|
||||
|
@ -1332,10 +1323,8 @@ drvName, Bindings * attrs, Value & v)
|
|||
|
||||
else if (contentAddressed || isImpure) {
|
||||
if (contentAddressed && isImpure)
|
||||
throw EvalError({
|
||||
.msg = hintfmt("derivation cannot be both content-addressed and impure"),
|
||||
.errPos = state.positions[noPos]
|
||||
});
|
||||
state.error<EvalError>("derivation cannot be both content-addressed and impure")
|
||||
.atPos(v).debugThrow();
|
||||
|
||||
auto ha = parseHashAlgoOpt(outputHashAlgo).value_or(HashAlgorithm::SHA256);
|
||||
auto method = ingestionMethod.value_or(FileIngestionMethod::Recursive);
|
||||
|
@ -1376,10 +1365,10 @@ drvName, Bindings * attrs, Value & v)
|
|||
for (auto & i : outputs) {
|
||||
auto h = get(hashModulo.hashes, i);
|
||||
if (!h)
|
||||
throw AssertionError({
|
||||
.msg = hintfmt("derivation produced no hash for output '%s'", i),
|
||||
.errPos = state.positions[noPos],
|
||||
});
|
||||
state.error<AssertionError>(
|
||||
"derivation produced no hash for output '%s'",
|
||||
i
|
||||
).atPos(v).debugThrow();
|
||||
auto outPath = state.store->makeOutputPath(i, *h, drvName);
|
||||
drv.env[i] = state.store->printStorePath(outPath);
|
||||
drv.outputs.insert_or_assign(
|
||||
|
@ -1485,10 +1474,10 @@ static RegisterPrimOp primop_toPath({
|
|||
static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
if (evalSettings.pureEval)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("'%s' is not allowed in pure evaluation mode", "builtins.storePath"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"'%s' is not allowed in pure evaluation mode",
|
||||
"builtins.storePath"
|
||||
).atPos(pos).debugThrow();
|
||||
|
||||
NixStringContext context;
|
||||
auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to 'builtins.storePath'").path;
|
||||
|
@ -1498,10 +1487,8 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value * * args,
|
|||
if (!state.store->isStorePath(path.abs()))
|
||||
path = CanonPath(canonPath(path.abs(), true));
|
||||
if (!state.store->isInStore(path.abs()))
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("path '%1%' is not in the Nix store", path),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("path '%1%' is not in the Nix store", path)
|
||||
.atPos(pos).debugThrow();
|
||||
auto path2 = state.store->toStorePath(path.abs()).first;
|
||||
if (!settings.readOnlyMode)
|
||||
state.store->ensurePath(path2);
|
||||
|
@ -1616,7 +1603,10 @@ static void prim_readFile(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
auto path = realisePath(state, pos, *args[0]);
|
||||
auto s = path.readFile();
|
||||
if (s.find((char) 0) != std::string::npos)
|
||||
state.debugThrowLastTrace(Error("the contents of the file '%1%' cannot be represented as a Nix string", path));
|
||||
state.error<EvalError>(
|
||||
"the contents of the file '%1%' cannot be represented as a Nix string",
|
||||
path
|
||||
).atPos(pos).debugThrow();
|
||||
StorePathSet refs;
|
||||
if (state.store->isInStore(path.path.abs())) {
|
||||
try {
|
||||
|
@ -1673,10 +1663,11 @@ static void prim_findFile(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
auto rewrites = state.realiseContext(context);
|
||||
path = rewriteStrings(path, rewrites);
|
||||
} catch (InvalidPathError & e) {
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("cannot find '%1%', since path '%2%' is not valid", path, e.path),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"cannot find '%1%', since path '%2%' is not valid",
|
||||
path,
|
||||
e.path
|
||||
).atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
searchPath.elements.emplace_back(SearchPath::Elem {
|
||||
|
@ -1745,10 +1736,7 @@ static void prim_hashFile(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashFile");
|
||||
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
|
||||
if (!ha)
|
||||
state.debugThrowLastTrace(Error({
|
||||
.msg = hintfmt("unknown hash algo '%1%'", algo),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("unknown hash algorithm '%1%'", algo).atPos(pos).debugThrow();
|
||||
|
||||
auto path = realisePath(state, pos, *args[1]);
|
||||
|
||||
|
@ -1816,7 +1804,7 @@ static void prim_readDir(EvalState & state, const PosIdx pos, Value * * args, Va
|
|||
// detailed node info quickly in this case we produce a thunk to
|
||||
// query the file type lazily.
|
||||
auto epath = state.allocValue();
|
||||
epath->mkPath(path + name);
|
||||
epath->mkPath(path / name);
|
||||
if (!readFileType)
|
||||
readFileType = &state.getBuiltin("readFileType");
|
||||
attr.mkApp(readFileType, epath);
|
||||
|
@ -1878,7 +1866,7 @@ static RegisterPrimOp primop_outputOf({
|
|||
For instance,
|
||||
```nix
|
||||
builtins.outputOf
|
||||
(builtins.outputOf myDrv "out)
|
||||
(builtins.outputOf myDrv "out")
|
||||
"out"
|
||||
```
|
||||
will return a placeholder for the output of the output of `myDrv`.
|
||||
|
@ -2068,13 +2056,12 @@ static void prim_toFile(EvalState & state, const PosIdx pos, Value * * args, Val
|
|||
if (auto p = std::get_if<NixStringContextElem::Opaque>(&c.raw))
|
||||
refs.insert(p->path);
|
||||
else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt(
|
||||
"in 'toFile': the file named '%1%' must not contain a reference "
|
||||
"to a derivation but contains (%2%)",
|
||||
name, c.to_string()),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"files created by %1% may not reference derivations, but %2% references %3%",
|
||||
"builtins.toFile",
|
||||
name,
|
||||
c.to_string()
|
||||
).atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
auto storePath = settings.readOnlyMode
|
||||
|
@ -2241,9 +2228,12 @@ static void addPath(
|
|||
});
|
||||
|
||||
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
|
||||
auto dstPath = fetchToStore(*state.store, path, name, method, filter.get(), state.repair);
|
||||
auto dstPath = fetchToStore(*state.store, path.resolveSymlinks(), name, method, filter.get(), state.repair);
|
||||
if (expectedHash && expectedStorePath != dstPath)
|
||||
state.debugThrowLastTrace(Error("store path mismatch in (possibly filtered) path added from '%s'", path));
|
||||
state.error<EvalError>(
|
||||
"store path mismatch in (possibly filtered) path added from '%s'",
|
||||
path
|
||||
).atPos(pos).debugThrow();
|
||||
state.allowAndSetStorePathString(dstPath, v);
|
||||
} else
|
||||
state.allowAndSetStorePathString(*expectedStorePath, v);
|
||||
|
@ -2343,16 +2333,15 @@ static void prim_path(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
else if (n == "sha256")
|
||||
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `sha256` attribute passed to builtins.path"), HashAlgorithm::SHA256);
|
||||
else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("unsupported argument '%1%' to 'addPath'", state.symbols[attr.name]),
|
||||
.errPos = state.positions[attr.pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"unsupported argument '%1%' to 'addPath'",
|
||||
state.symbols[attr.name]
|
||||
).atPos(attr.pos).debugThrow();
|
||||
}
|
||||
if (!path)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("missing required 'path' attribute in the first argument to builtins.path"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"missing required 'path' attribute in the first argument to builtins.path"
|
||||
).atPos(pos).debugThrow();
|
||||
if (name.empty())
|
||||
name = path->baseName();
|
||||
|
||||
|
@ -2770,10 +2759,7 @@ static void prim_functionArgs(EvalState & state, const PosIdx pos, Value * * arg
|
|||
return;
|
||||
}
|
||||
if (!args[0]->isLambda())
|
||||
state.debugThrowLastTrace(TypeError({
|
||||
.msg = hintfmt("'functionArgs' requires a function"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<TypeError>("'functionArgs' requires a function").atPos(pos).debugThrow();
|
||||
|
||||
if (!args[0]->lambda.fun->hasFormals()) {
|
||||
v.mkAttrs(&state.emptyBindings);
|
||||
|
@ -2943,10 +2929,10 @@ static void elemAt(EvalState & state, const PosIdx pos, Value & list, int n, Val
|
|||
{
|
||||
state.forceList(list, pos, "while evaluating the first argument passed to builtins.elemAt");
|
||||
if (n < 0 || (unsigned int) n >= list.listSize())
|
||||
state.debugThrowLastTrace(Error({
|
||||
.msg = hintfmt("list index %1% is out of bounds", n),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"list index %1% is out of bounds",
|
||||
n
|
||||
).atPos(pos).debugThrow();
|
||||
state.forceValue(*list.listElems()[n], pos);
|
||||
v = *list.listElems()[n];
|
||||
}
|
||||
|
@ -2991,10 +2977,7 @@ static void prim_tail(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
{
|
||||
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.tail");
|
||||
if (args[0]->listSize() == 0)
|
||||
state.debugThrowLastTrace(Error({
|
||||
.msg = hintfmt("'tail' called on an empty list"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("'tail' called on an empty list").atPos(pos).debugThrow();
|
||||
|
||||
state.mkList(v, args[0]->listSize() - 1);
|
||||
for (unsigned int n = 0; n < v.listSize(); ++n)
|
||||
|
@ -3251,7 +3234,7 @@ static void prim_genList(EvalState & state, const PosIdx pos, Value * * args, Va
|
|||
auto len = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.genList");
|
||||
|
||||
if (len < 0)
|
||||
state.error("cannot create list of size %1%", len).debugThrow<EvalError>();
|
||||
state.error<EvalError>("cannot create list of size %1%", len).atPos(pos).debugThrow();
|
||||
|
||||
// More strict than striclty (!) necessary, but acceptable
|
||||
// as evaluating map without accessing any values makes little sense.
|
||||
|
@ -3568,10 +3551,7 @@ static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
|
||||
NixFloat f2 = state.forceFloat(*args[1], pos, "while evaluating the second operand of the division");
|
||||
if (f2 == 0)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("division by zero"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("division by zero").atPos(pos).debugThrow();
|
||||
|
||||
if (args[0]->type() == nFloat || args[1]->type() == nFloat) {
|
||||
v.mkFloat(state.forceFloat(*args[0], pos, "while evaluating the first operand of the division") / f2);
|
||||
|
@ -3580,10 +3560,7 @@ static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
NixInt i2 = state.forceInt(*args[1], pos, "while evaluating the second operand of the division");
|
||||
/* Avoid division overflow as it might raise SIGFPE. */
|
||||
if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("overflow in integer division"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("overflow in integer division").atPos(pos).debugThrow();
|
||||
|
||||
v.mkInt(i1 / i2);
|
||||
}
|
||||
|
@ -3714,10 +3691,7 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value * * args,
|
|||
int start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring");
|
||||
|
||||
if (start < 0)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("negative start position in 'substring'"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("negative start position in 'substring'").atPos(pos).debugThrow();
|
||||
|
||||
|
||||
int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring");
|
||||
|
@ -3782,10 +3756,7 @@ static void prim_hashString(EvalState & state, const PosIdx pos, Value * * args,
|
|||
auto algo = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.hashString");
|
||||
std::optional<HashAlgorithm> ha = parseHashAlgo(algo);
|
||||
if (!ha)
|
||||
state.debugThrowLastTrace(Error({
|
||||
.msg = hintfmt("unknown hash algo '%1%'", algo),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("unknown hash algorithm '%1%'", algo).atPos(pos).debugThrow();
|
||||
|
||||
NixStringContext context; // discarded
|
||||
auto s = state.forceString(*args[1], context, pos, "while evaluating the second argument passed to builtins.hashString");
|
||||
|
@ -3951,15 +3922,13 @@ void prim_match(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
|||
} catch (std::regex_error & e) {
|
||||
if (e.code() == std::regex_constants::error_space) {
|
||||
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("memory limit exceeded by regular expression '%s'", re)
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
} else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("invalid regular expression '%s'", re),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("invalid regular expression '%s'", re)
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4055,15 +4024,13 @@ void prim_split(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
|||
} catch (std::regex_error & e) {
|
||||
if (e.code() == std::regex_constants::error_space) {
|
||||
// limit is _GLIBCXX_REGEX_STATE_LIMIT for libstdc++
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("memory limit exceeded by regular expression '%s'", re),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("memory limit exceeded by regular expression '%s'", re)
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
} else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("invalid regular expression '%s'", re),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>("invalid regular expression '%s'", re)
|
||||
.atPos(pos)
|
||||
.debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4139,7 +4106,9 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value * * a
|
|||
state.forceList(*args[0], pos, "while evaluating the first argument passed to builtins.replaceStrings");
|
||||
state.forceList(*args[1], pos, "while evaluating the second argument passed to builtins.replaceStrings");
|
||||
if (args[0]->listSize() != args[1]->listSize())
|
||||
state.error("'from' and 'to' arguments passed to builtins.replaceStrings have different lengths").atPos(pos).debugThrow<EvalError>();
|
||||
state.error<EvalError>(
|
||||
"'from' and 'to' arguments passed to builtins.replaceStrings have different lengths"
|
||||
).atPos(pos).debugThrow();
|
||||
|
||||
std::vector<std::string> from;
|
||||
from.reserve(args[0]->listSize());
|
||||
|
|
|
@ -98,30 +98,30 @@ static void prim_addDrvOutputDependencies(EvalState & state, const PosIdx pos, V
|
|||
|
||||
auto contextSize = context.size();
|
||||
if (contextSize != 1) {
|
||||
throw EvalError({
|
||||
.msg = hintfmt("context of string '%s' must have exactly one element, but has %d", *s, contextSize),
|
||||
.errPos = state.positions[pos]
|
||||
});
|
||||
state.error<EvalError>(
|
||||
"context of string '%s' must have exactly one element, but has %d",
|
||||
*s,
|
||||
contextSize
|
||||
).atPos(pos).debugThrow();
|
||||
}
|
||||
NixStringContext context2 {
|
||||
(NixStringContextElem { std::visit(overloaded {
|
||||
[&](const NixStringContextElem::Opaque & c) -> NixStringContextElem::DrvDeep {
|
||||
if (!c.path.isDerivation()) {
|
||||
throw EvalError({
|
||||
.msg = hintfmt("path '%s' is not a derivation",
|
||||
state.store->printStorePath(c.path)),
|
||||
.errPos = state.positions[pos],
|
||||
});
|
||||
state.error<EvalError>(
|
||||
"path '%s' is not a derivation",
|
||||
state.store->printStorePath(c.path)
|
||||
).atPos(pos).debugThrow();
|
||||
}
|
||||
return NixStringContextElem::DrvDeep {
|
||||
.drvPath = c.path,
|
||||
};
|
||||
},
|
||||
[&](const NixStringContextElem::Built & c) -> NixStringContextElem::DrvDeep {
|
||||
throw EvalError({
|
||||
.msg = hintfmt("`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'", c.output),
|
||||
.errPos = state.positions[pos],
|
||||
});
|
||||
state.error<EvalError>(
|
||||
"`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'",
|
||||
c.output
|
||||
).atPos(pos).debugThrow();
|
||||
},
|
||||
[&](const NixStringContextElem::DrvDeep & c) -> NixStringContextElem::DrvDeep {
|
||||
/* Reuse original item because we want this to be idempotent. */
|
||||
|
@ -261,10 +261,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
|||
for (auto & i : *args[1]->attrs) {
|
||||
const auto & name = state.symbols[i.name];
|
||||
if (!state.store->isStorePath(name))
|
||||
throw EvalError({
|
||||
.msg = hintfmt("context key '%s' is not a store path", name),
|
||||
.errPos = state.positions[i.pos]
|
||||
});
|
||||
state.error<EvalError>(
|
||||
"context key '%s' is not a store path",
|
||||
name
|
||||
).atPos(i.pos).debugThrow();
|
||||
auto namePath = state.store->parseStorePath(name);
|
||||
if (!settings.readOnlyMode)
|
||||
state.store->ensurePath(namePath);
|
||||
|
@ -281,10 +281,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
|||
if (iter != i.value->attrs->end()) {
|
||||
if (state.forceBool(*iter->value, iter->pos, "while evaluating the `allOutputs` attribute of a string context")) {
|
||||
if (!isDerivation(name)) {
|
||||
throw EvalError({
|
||||
.msg = hintfmt("tried to add all-outputs context of %s, which is not a derivation, to a string", name),
|
||||
.errPos = state.positions[i.pos]
|
||||
});
|
||||
state.error<EvalError>(
|
||||
"tried to add all-outputs context of %s, which is not a derivation, to a string",
|
||||
name
|
||||
).atPos(i.pos).debugThrow();
|
||||
}
|
||||
context.emplace(NixStringContextElem::DrvDeep {
|
||||
.drvPath = namePath,
|
||||
|
@ -296,10 +296,10 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
|
|||
if (iter != i.value->attrs->end()) {
|
||||
state.forceList(*iter->value, iter->pos, "while evaluating the `outputs` attribute of a string context");
|
||||
if (iter->value->listSize() && !isDerivation(name)) {
|
||||
throw EvalError({
|
||||
.msg = hintfmt("tried to add derivation output context of %s, which is not a derivation, to a string", name),
|
||||
.errPos = state.positions[i.pos]
|
||||
});
|
||||
state.error<EvalError>(
|
||||
"tried to add derivation output context of %s, which is not a derivation, to a string",
|
||||
name
|
||||
).atPos(i.pos).debugThrow();
|
||||
}
|
||||
for (auto elem : iter->value->listItems()) {
|
||||
auto outputName = state.forceStringNoCtx(*elem, iter->pos, "while evaluating an output name within a string context");
|
||||
|
|
|
@ -23,20 +23,20 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor
|
|||
auto rewrittenPath = makeContentAddressed(fromStore, *state.store, fromPath);
|
||||
if (toPathMaybe && *toPathMaybe != rewrittenPath)
|
||||
throw Error({
|
||||
.msg = hintfmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected",
|
||||
.msg = HintFmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected",
|
||||
state.store->printStorePath(fromPath),
|
||||
state.store->printStorePath(rewrittenPath),
|
||||
state.store->printStorePath(*toPathMaybe)),
|
||||
.errPos = state.positions[pos]
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
if (!toPathMaybe)
|
||||
throw Error({
|
||||
.msg = hintfmt(
|
||||
.msg = HintFmt(
|
||||
"rewriting '%s' to content-addressed form yielded '%s'\n"
|
||||
"Use this value for the 'toPath' attribute passed to 'fetchClosure'",
|
||||
state.store->printStorePath(fromPath),
|
||||
state.store->printStorePath(rewrittenPath)),
|
||||
.errPos = state.positions[pos]
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -50,11 +50,11 @@ static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Stor
|
|||
// We don't perform the rewriting when outPath already exists, as an optimisation.
|
||||
// However, we can quickly detect a mistake if the toPath is input addressed.
|
||||
throw Error({
|
||||
.msg = hintfmt(
|
||||
.msg = HintFmt(
|
||||
"The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n"
|
||||
"Set 'toPath' to an empty string to make Nix report the correct content-addressed path.",
|
||||
state.store->printStorePath(toPath)),
|
||||
.errPos = state.positions[pos]
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -73,14 +73,14 @@ static void runFetchClosureWithContentAddressedPath(EvalState & state, const Pos
|
|||
|
||||
if (!info->isContentAddressed(*state.store)) {
|
||||
throw Error({
|
||||
.msg = hintfmt(
|
||||
.msg = HintFmt(
|
||||
"The 'fromPath' value '%s' is input-addressed, but 'inputAddressed' is set to 'false' (default).\n\n"
|
||||
"If you do intend to fetch an input-addressed store path, add\n\n"
|
||||
" inputAddressed = true;\n\n"
|
||||
"to the 'fetchClosure' arguments.\n\n"
|
||||
"Note that to ensure authenticity input-addressed store paths, users must configure a trusted binary cache public key on their systems. This is not needed for content-addressed paths.",
|
||||
state.store->printStorePath(fromPath)),
|
||||
.errPos = state.positions[pos]
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -99,11 +99,11 @@ static void runFetchClosureWithInputAddressedPath(EvalState & state, const PosId
|
|||
|
||||
if (info->isContentAddressed(*state.store)) {
|
||||
throw Error({
|
||||
.msg = hintfmt(
|
||||
.msg = HintFmt(
|
||||
"The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n"
|
||||
"Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed",
|
||||
state.store->printStorePath(fromPath)),
|
||||
.errPos = state.positions[pos]
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -153,15 +153,15 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
|||
|
||||
else
|
||||
throw Error({
|
||||
.msg = hintfmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName),
|
||||
.errPos = state.positions[pos]
|
||||
.msg = HintFmt("attribute '%s' isn't supported in call to 'fetchClosure'", attrName),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
if (!fromPath)
|
||||
throw Error({
|
||||
.msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"),
|
||||
.errPos = state.positions[pos]
|
||||
.msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromPath"),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
|
||||
bool inputAddressed = inputAddressedMaybe.value_or(false);
|
||||
|
@ -169,17 +169,17 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
|||
if (inputAddressed) {
|
||||
if (toPath)
|
||||
throw Error({
|
||||
.msg = hintfmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them",
|
||||
.msg = HintFmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them",
|
||||
"inputAddressed",
|
||||
"toPath"),
|
||||
.errPos = state.positions[pos]
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
}
|
||||
|
||||
if (!fromStoreUrl)
|
||||
throw Error({
|
||||
.msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"),
|
||||
.errPos = state.positions[pos]
|
||||
.msg = HintFmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
|
||||
auto parsedURL = parseURL(*fromStoreUrl);
|
||||
|
@ -188,14 +188,14 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
|
|||
parsedURL.scheme != "https" &&
|
||||
!(getEnv("_NIX_IN_TEST").has_value() && parsedURL.scheme == "file"))
|
||||
throw Error({
|
||||
.msg = hintfmt("'fetchClosure' only supports http:// and https:// stores"),
|
||||
.errPos = state.positions[pos]
|
||||
.msg = HintFmt("'fetchClosure' only supports http:// and https:// stores"),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
|
||||
if (!parsedURL.query.empty())
|
||||
throw Error({
|
||||
.msg = hintfmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl),
|
||||
.errPos = state.positions[pos]
|
||||
.msg = HintFmt("'fetchClosure' does not support URL query parameters (in '%s')", *fromStoreUrl),
|
||||
.pos = state.positions[pos]
|
||||
});
|
||||
|
||||
auto fromStore = openStore(parsedURL.to_string());
|
||||
|
|
|
@ -38,17 +38,11 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
|
|||
else if (n == "name")
|
||||
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `name` attribute passed to builtins.fetchMercurial");
|
||||
else
|
||||
throw EvalError({
|
||||
.msg = hintfmt("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]),
|
||||
.errPos = state.positions[attr.pos]
|
||||
});
|
||||
state.error<EvalError>("unsupported argument '%s' to 'fetchMercurial'", state.symbols[attr.name]).atPos(attr.pos).debugThrow();
|
||||
}
|
||||
|
||||
if (url.empty())
|
||||
throw EvalError({
|
||||
.msg = hintfmt("'url' argument required"),
|
||||
.errPos = state.positions[pos]
|
||||
});
|
||||
state.error<EvalError>("'url' argument required").atPos(pos).debugThrow();
|
||||
|
||||
} else
|
||||
url = state.coerceToString(pos, *args[0], context,
|
||||
|
|
|
@ -100,16 +100,14 @@ static void fetchTree(
|
|||
|
||||
if (auto aType = args[0]->attrs->get(state.sType)) {
|
||||
if (type)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("unexpected attribute 'type'"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"unexpected attribute 'type'"
|
||||
).atPos(pos).debugThrow();
|
||||
type = state.forceStringNoCtx(*aType->value, aType->pos, "while evaluating the `type` attribute passed to builtins.fetchTree");
|
||||
} else if (!type)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"attribute 'type' is missing in call to 'fetchTree'"
|
||||
).atPos(pos).debugThrow();
|
||||
|
||||
attrs.emplace("type", type.value());
|
||||
|
||||
|
@ -132,8 +130,8 @@ static void fetchTree(
|
|||
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump());
|
||||
}
|
||||
else
|
||||
state.debugThrowLastTrace(TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
|
||||
state.symbols[attr.name], showType(*attr.value)));
|
||||
state.error<TypeError>("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
|
||||
state.symbols[attr.name], showType(*attr.value)).debugThrow();
|
||||
}
|
||||
|
||||
if (params.isFetchGit && !attrs.contains("exportIgnore") && (!attrs.contains("submodules") || !*fetchers::maybeGetBoolAttr(attrs, "submodules"))) {
|
||||
|
@ -142,10 +140,9 @@ static void fetchTree(
|
|||
|
||||
if (!params.allowNameArgument)
|
||||
if (auto nameIter = attrs.find("name"); nameIter != attrs.end())
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("attribute 'name' isn’t supported in call to 'fetchTree'"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"attribute 'name' isn’t supported in call to 'fetchTree'"
|
||||
).atPos(pos).debugThrow();
|
||||
|
||||
input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||
} else {
|
||||
|
@ -163,10 +160,9 @@ static void fetchTree(
|
|||
input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||
} else {
|
||||
if (!experimentalFeatureSettings.isEnabled(Xp::Flakes))
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("passing a string argument to 'fetchTree' requires the 'flakes' experimental feature"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"passing a string argument to 'fetchTree' requires the 'flakes' experimental feature"
|
||||
).atPos(pos).debugThrow();
|
||||
input = fetchers::Input::fromURL(url);
|
||||
}
|
||||
}
|
||||
|
@ -174,8 +170,16 @@ static void fetchTree(
|
|||
if (!evalSettings.pureEval && !input.isDirect() && experimentalFeatureSettings.isEnabled(Xp::Flakes))
|
||||
input = lookupInRegistries(state.store, input).first;
|
||||
|
||||
if (evalSettings.pureEval && !input.isLocked())
|
||||
state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos]));
|
||||
if (evalSettings.pureEval && !input.isLocked()) {
|
||||
auto fetcher = "fetchTree";
|
||||
if (params.isFetchGit)
|
||||
fetcher = "fetchGit";
|
||||
|
||||
state.error<EvalError>(
|
||||
"in pure evaluation mode, %s requires a locked input",
|
||||
fetcher
|
||||
).atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
state.checkURI(input.toURLString());
|
||||
|
||||
|
@ -428,17 +432,13 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
|||
else if (n == "name")
|
||||
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
|
||||
else
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("unsupported argument '%s' to '%s'", n, who),
|
||||
.errPos = state.positions[attr.pos]
|
||||
}));
|
||||
state.error<EvalError>("unsupported argument '%s' to '%s'", n, who)
|
||||
.atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
if (!url)
|
||||
state.debugThrowLastTrace(EvalError({
|
||||
.msg = hintfmt("'url' argument required"),
|
||||
.errPos = state.positions[pos]
|
||||
}));
|
||||
state.error<EvalError>(
|
||||
"'url' argument required").atPos(pos).debugThrow();
|
||||
} else
|
||||
url = state.forceStringNoCtx(*args[0], pos, "while evaluating the url we should fetch");
|
||||
|
||||
|
@ -451,7 +451,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
|||
name = baseNameOf(*url);
|
||||
|
||||
if (evalSettings.pureEval && !expectedHash)
|
||||
state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who));
|
||||
state.error<EvalError>("in pure evaluation mode, '%s' requires a 'sha256' argument", who).atPos(pos).debugThrow();
|
||||
|
||||
// early exit if pinned and already in the store
|
||||
if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) {
|
||||
|
@ -480,9 +480,15 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
|||
auto hash = unpack
|
||||
? state.store->queryPathInfo(storePath)->narHash
|
||||
: hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath));
|
||||
if (hash != *expectedHash)
|
||||
state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
||||
*url, expectedHash->to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true)));
|
||||
if (hash != *expectedHash) {
|
||||
state.error<EvalError>(
|
||||
"hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
||||
*url,
|
||||
expectedHash->to_string(HashFormat::Nix32, true),
|
||||
hash.to_string(HashFormat::Nix32, true)
|
||||
).withExitStatus(102)
|
||||
.debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
state.allowAndSetStorePathString(storePath, v);
|
||||
|
@ -610,8 +616,7 @@ static RegisterPrimOp primop_fetchGit({
|
|||
|
||||
- `shallow` (default: `false`)
|
||||
|
||||
A Boolean parameter that specifies whether fetching from a shallow remote repository is allowed.
|
||||
This still performs a full clone of what is available on the remote.
|
||||
Make a shallow clone when fetching the Git tree.
|
||||
|
||||
- `allRefs`
|
||||
|
||||
|
|
|
@ -83,10 +83,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value * * args, V
|
|||
try {
|
||||
visit(val, toml::parse(tomlStream, "fromTOML" /* the "filename" */));
|
||||
} catch (std::exception & e) { // TODO: toml::syntax_error
|
||||
throw EvalError({
|
||||
.msg = hintfmt("while parsing a TOML string: %s", e.what()),
|
||||
.errPos = state.positions[pos]
|
||||
});
|
||||
state.error<EvalError>("while parsing TOML: %s", e.what()).atPos(pos).debugThrow();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#include "print-ambiguous.hh"
|
||||
#include "print.hh"
|
||||
#include "signals.hh"
|
||||
#include "eval.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -36,11 +36,17 @@ struct PrintOptions
|
|||
*/
|
||||
size_t maxDepth = std::numeric_limits<size_t>::max();
|
||||
/**
|
||||
* Maximum number of attributes in an attribute set to print.
|
||||
* Maximum number of attributes in attribute sets to print.
|
||||
*
|
||||
* Note that this is a limit for the entire print invocation, not for each
|
||||
* attribute set encountered.
|
||||
*/
|
||||
size_t maxAttrs = std::numeric_limits<size_t>::max();
|
||||
/**
|
||||
* Maximum number of list items to print.
|
||||
*
|
||||
* Note that this is a limit for the entire print invocation, not for each
|
||||
* list encountered.
|
||||
*/
|
||||
size_t maxListItems = std::numeric_limits<size_t>::max();
|
||||
/**
|
||||
|
@ -49,4 +55,16 @@ struct PrintOptions
|
|||
size_t maxStringLength = std::numeric_limits<size_t>::max();
|
||||
};
|
||||
|
||||
/**
|
||||
* `PrintOptions` for unknown and therefore potentially large values in error messages,
|
||||
* to avoid printing "too much" output.
|
||||
*/
|
||||
static PrintOptions errorPrintOptions = PrintOptions {
|
||||
.ansiColors = true,
|
||||
.maxDepth = 10,
|
||||
.maxAttrs = 10,
|
||||
.maxListItems = 10,
|
||||
.maxStringLength = 1024
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
#include "store-api.hh"
|
||||
#include "terminal.hh"
|
||||
#include "english.hh"
|
||||
#include "eval.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -19,7 +20,7 @@ void printElided(
|
|||
{
|
||||
if (ansiColors)
|
||||
output << ANSI_FAINT;
|
||||
output << " «";
|
||||
output << "«";
|
||||
pluralize(output, value, single, plural);
|
||||
output << " elided»";
|
||||
if (ansiColors)
|
||||
|
@ -36,7 +37,7 @@ printLiteralString(std::ostream & str, const std::string_view string, size_t max
|
|||
str << "\"";
|
||||
for (auto i = string.begin(); i != string.end(); ++i) {
|
||||
if (charsPrinted >= maxLength) {
|
||||
str << "\"";
|
||||
str << "\" ";
|
||||
printElided(str, string.length() - charsPrinted, "byte", "bytes", ansiColors);
|
||||
return str;
|
||||
}
|
||||
|
@ -151,7 +152,7 @@ struct ImportantFirstAttrNameCmp
|
|||
}
|
||||
};
|
||||
|
||||
typedef std::set<Value *> ValuesSeen;
|
||||
typedef std::set<const void *> ValuesSeen;
|
||||
|
||||
class Printer
|
||||
{
|
||||
|
@ -160,6 +161,8 @@ private:
|
|||
EvalState & state;
|
||||
PrintOptions options;
|
||||
std::optional<ValuesSeen> seen;
|
||||
size_t attrsPrinted = 0;
|
||||
size_t listItemsPrinted = 0;
|
||||
|
||||
void printRepeated()
|
||||
{
|
||||
|
@ -252,14 +255,14 @@ private:
|
|||
output << "»";
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
} catch (BaseError & e) {
|
||||
} catch (Error & e) {
|
||||
printError_(e);
|
||||
}
|
||||
}
|
||||
|
||||
void printAttrs(Value & v, size_t depth)
|
||||
{
|
||||
if (seen && !seen->insert(&v).second) {
|
||||
if (seen && !seen->insert(v.attrs).second) {
|
||||
printRepeated();
|
||||
return;
|
||||
}
|
||||
|
@ -278,7 +281,6 @@ private:
|
|||
else
|
||||
std::sort(sorted.begin(), sorted.end(), ImportantFirstAttrNameCmp());
|
||||
|
||||
size_t attrsPrinted = 0;
|
||||
for (auto & i : sorted) {
|
||||
if (attrsPrinted >= options.maxAttrs) {
|
||||
printElided(sorted.size() - attrsPrinted, "attribute", "attributes");
|
||||
|
@ -306,7 +308,6 @@ private:
|
|||
|
||||
output << "[ ";
|
||||
if (depth < options.maxDepth) {
|
||||
size_t listItemsPrinted = 0;
|
||||
for (auto elem : v.listItems()) {
|
||||
if (listItemsPrinted >= options.maxListItems) {
|
||||
printElided(v.listSize() - listItemsPrinted, "item", "items");
|
||||
|
@ -404,11 +405,11 @@ private:
|
|||
output << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
void printError_(BaseError & e)
|
||||
void printError_(Error & e)
|
||||
{
|
||||
if (options.ansiColors)
|
||||
output << ANSI_RED;
|
||||
output << "«" << e.msg() << "»";
|
||||
output << "«error: " << filterANSIEscapes(e.info().msg.str(), true) << "»";
|
||||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
}
|
||||
|
@ -421,7 +422,7 @@ private:
|
|||
if (options.force) {
|
||||
try {
|
||||
state.forceValue(v, v.determinePos(noPos));
|
||||
} catch (BaseError & e) {
|
||||
} catch (Error & e) {
|
||||
printError_(e);
|
||||
return;
|
||||
}
|
||||
|
@ -485,6 +486,9 @@ public:
|
|||
|
||||
void print(Value & v)
|
||||
{
|
||||
attrsPrinted = 0;
|
||||
listItemsPrinted = 0;
|
||||
|
||||
if (options.trackRepeated) {
|
||||
seen.emplace();
|
||||
} else {
|
||||
|
@ -501,4 +505,17 @@ void printValue(EvalState & state, std::ostream & output, Value & v, PrintOption
|
|||
Printer(output, state, options).print(v);
|
||||
}
|
||||
|
||||
std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer)
|
||||
{
|
||||
printValue(printer.state, output, printer.value, printer.options);
|
||||
return output;
|
||||
}
|
||||
|
||||
template<>
|
||||
HintFmt & HintFmt::operator%(const ValuePrinter & value)
|
||||
{
|
||||
fmt % value;
|
||||
return *this;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -9,11 +9,14 @@
|
|||
|
||||
#include <iostream>
|
||||
|
||||
#include "eval.hh"
|
||||
#include "fmt.hh"
|
||||
#include "print-options.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
class EvalState;
|
||||
struct Value;
|
||||
|
||||
/**
|
||||
* Print a string as a Nix string literal.
|
||||
*
|
||||
|
@ -59,4 +62,30 @@ std::ostream & printIdentifier(std::ostream & o, std::string_view s);
|
|||
|
||||
void printValue(EvalState & state, std::ostream & str, Value & v, PrintOptions options = PrintOptions {});
|
||||
|
||||
/**
|
||||
* A partially-applied form of `printValue` which can be formatted using `<<`
|
||||
* without allocating an intermediate string.
|
||||
*/
|
||||
class ValuePrinter {
|
||||
friend std::ostream & operator << (std::ostream & output, const ValuePrinter & printer);
|
||||
private:
|
||||
EvalState & state;
|
||||
Value & value;
|
||||
PrintOptions options;
|
||||
|
||||
public:
|
||||
ValuePrinter(EvalState & state, Value & value, PrintOptions options = PrintOptions {})
|
||||
: state(state), value(value), options(options) { }
|
||||
};
|
||||
|
||||
std::ostream & operator<<(std::ostream & output, const ValuePrinter & printer);
|
||||
|
||||
|
||||
/**
|
||||
* `ValuePrinter` does its own ANSI formatting, so we don't color it
|
||||
* magenta.
|
||||
*/
|
||||
template<>
|
||||
HintFmt & HintFmt::operator%(const ValuePrinter & value);
|
||||
|
||||
}
|
||||
|
|
|
@ -64,7 +64,7 @@ json printValueAsJSON(EvalState & state, bool strict,
|
|||
out[j] = printValueAsJSON(state, strict, *a.value, a.pos, context, copyToStore);
|
||||
} catch (Error & e) {
|
||||
e.addTrace(state.positions[a.pos],
|
||||
hintfmt("while evaluating attribute '%1%'", j));
|
||||
HintFmt("while evaluating attribute '%1%'", j));
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
@ -80,8 +80,8 @@ json printValueAsJSON(EvalState & state, bool strict,
|
|||
try {
|
||||
out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore));
|
||||
} catch (Error & e) {
|
||||
e.addTrace({},
|
||||
hintfmt("while evaluating list element at index %1%", i));
|
||||
e.addTrace(state.positions[pos],
|
||||
HintFmt("while evaluating list element at index %1%", i));
|
||||
throw;
|
||||
}
|
||||
i++;
|
||||
|
@ -99,13 +99,12 @@ json printValueAsJSON(EvalState & state, bool strict,
|
|||
|
||||
case nThunk:
|
||||
case nFunction:
|
||||
auto e = TypeError({
|
||||
.msg = hintfmt("cannot convert %1% to JSON", showType(v)),
|
||||
.errPos = state.positions[v.determinePos(pos)]
|
||||
});
|
||||
e.addTrace(state.positions[pos], hintfmt("message for the trace"));
|
||||
state.debugThrowLastTrace(e);
|
||||
throw e;
|
||||
state.error<TypeError>(
|
||||
"cannot convert %1% to JSON",
|
||||
showType(v)
|
||||
)
|
||||
.atPos(v.determinePos(pos))
|
||||
.debugThrow();
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
@ -119,7 +118,8 @@ void printValueAsJSON(EvalState & state, bool strict,
|
|||
json ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,
|
||||
NixStringContext & context, bool copyToStore) const
|
||||
{
|
||||
state.debugThrowLastTrace(TypeError("cannot convert %1% to JSON", showType()));
|
||||
state.error<TypeError>("cannot convert %1% to JSON", showType())
|
||||
.debugThrow();
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -105,7 +105,7 @@ class ExternalValueBase
|
|||
* Coerce the value to a string. Defaults to uncoercable, i.e. throws an
|
||||
* error.
|
||||
*/
|
||||
virtual std::string coerceToString(const Pos & pos, NixStringContext & context, bool copyMore, bool copyToStore) const;
|
||||
virtual std::string coerceToString(EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const;
|
||||
|
||||
/**
|
||||
* Compare to another value of the same type. Defaults to uncomparable,
|
||||
|
|
|
@ -19,8 +19,8 @@ public:
|
|||
: Error("")
|
||||
{
|
||||
raw = raw_;
|
||||
auto hf = hintfmt(args...);
|
||||
err.msg = hintfmt("Bad String Context element: %1%: %2%", normaltxt(hf.str()), raw);
|
||||
auto hf = HintFmt(args...);
|
||||
err.msg = HintFmt("Bad String Context element: %1%: %2%", Uncolored(hf.str()), raw);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -5,26 +5,26 @@ namespace nix {
|
|||
std::string FilteringInputAccessor::readFile(const CanonPath & path)
|
||||
{
|
||||
checkAccess(path);
|
||||
return next->readFile(prefix + path);
|
||||
return next->readFile(prefix / path);
|
||||
}
|
||||
|
||||
bool FilteringInputAccessor::pathExists(const CanonPath & path)
|
||||
{
|
||||
return isAllowed(path) && next->pathExists(prefix + path);
|
||||
return isAllowed(path) && next->pathExists(prefix / path);
|
||||
}
|
||||
|
||||
std::optional<InputAccessor::Stat> FilteringInputAccessor::maybeLstat(const CanonPath & path)
|
||||
{
|
||||
checkAccess(path);
|
||||
return next->maybeLstat(prefix + path);
|
||||
return next->maybeLstat(prefix / path);
|
||||
}
|
||||
|
||||
InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath & path)
|
||||
{
|
||||
checkAccess(path);
|
||||
DirEntries entries;
|
||||
for (auto & entry : next->readDirectory(prefix + path)) {
|
||||
if (isAllowed(path + entry.first))
|
||||
for (auto & entry : next->readDirectory(prefix / path)) {
|
||||
if (isAllowed(path / entry.first))
|
||||
entries.insert(std::move(entry));
|
||||
}
|
||||
return entries;
|
||||
|
@ -33,12 +33,12 @@ InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath
|
|||
std::string FilteringInputAccessor::readLink(const CanonPath & path)
|
||||
{
|
||||
checkAccess(path);
|
||||
return next->readLink(prefix + path);
|
||||
return next->readLink(prefix / path);
|
||||
}
|
||||
|
||||
std::string FilteringInputAccessor::showPath(const CanonPath & path)
|
||||
{
|
||||
return next->showPath(prefix + path);
|
||||
return next->showPath(prefix / path);
|
||||
}
|
||||
|
||||
void FilteringInputAccessor::checkAccess(const CanonPath & path)
|
||||
|
|
|
@ -6,72 +6,30 @@ namespace nix {
|
|||
|
||||
struct FSInputAccessor : InputAccessor, PosixSourceAccessor
|
||||
{
|
||||
CanonPath root;
|
||||
|
||||
FSInputAccessor(const CanonPath & root)
|
||||
: root(root)
|
||||
{
|
||||
displayPrefix = root.isRoot() ? "" : root.abs();
|
||||
}
|
||||
|
||||
void readFile(
|
||||
const CanonPath & path,
|
||||
Sink & sink,
|
||||
std::function<void(uint64_t)> sizeCallback) override
|
||||
{
|
||||
auto absPath = makeAbsPath(path);
|
||||
PosixSourceAccessor::readFile(absPath, sink, sizeCallback);
|
||||
}
|
||||
|
||||
bool pathExists(const CanonPath & path) override
|
||||
{
|
||||
return PosixSourceAccessor::pathExists(makeAbsPath(path));
|
||||
}
|
||||
|
||||
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
||||
{
|
||||
return PosixSourceAccessor::maybeLstat(makeAbsPath(path));
|
||||
}
|
||||
|
||||
DirEntries readDirectory(const CanonPath & path) override
|
||||
{
|
||||
DirEntries res;
|
||||
for (auto & entry : PosixSourceAccessor::readDirectory(makeAbsPath(path)))
|
||||
res.emplace(entry);
|
||||
return res;
|
||||
}
|
||||
|
||||
std::string readLink(const CanonPath & path) override
|
||||
{
|
||||
return PosixSourceAccessor::readLink(makeAbsPath(path));
|
||||
}
|
||||
|
||||
CanonPath makeAbsPath(const CanonPath & path)
|
||||
{
|
||||
return root + path;
|
||||
}
|
||||
|
||||
std::optional<CanonPath> getPhysicalPath(const CanonPath & path) override
|
||||
{
|
||||
return makeAbsPath(path);
|
||||
}
|
||||
using PosixSourceAccessor::PosixSourceAccessor;
|
||||
};
|
||||
|
||||
ref<InputAccessor> makeFSInputAccessor(const CanonPath & root)
|
||||
ref<InputAccessor> makeFSInputAccessor()
|
||||
{
|
||||
return make_ref<FSInputAccessor>(root);
|
||||
return make_ref<FSInputAccessor>();
|
||||
}
|
||||
|
||||
ref<InputAccessor> makeFSInputAccessor(std::filesystem::path root)
|
||||
{
|
||||
return make_ref<FSInputAccessor>(std::move(root));
|
||||
}
|
||||
|
||||
ref<InputAccessor> makeStorePathAccessor(
|
||||
ref<Store> store,
|
||||
const StorePath & storePath)
|
||||
{
|
||||
return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)));
|
||||
// FIXME: should use `store->getFSAccessor()`
|
||||
return makeFSInputAccessor(std::filesystem::path { store->toRealPath(storePath) });
|
||||
}
|
||||
|
||||
SourcePath getUnfilteredRootPath(CanonPath path)
|
||||
{
|
||||
static auto rootFS = makeFSInputAccessor(CanonPath::root);
|
||||
static auto rootFS = makeFSInputAccessor();
|
||||
return {rootFS, path};
|
||||
}
|
||||
|
||||
|
|
|
@ -8,8 +8,9 @@ namespace nix {
|
|||
class StorePath;
|
||||
class Store;
|
||||
|
||||
ref<InputAccessor> makeFSInputAccessor(
|
||||
const CanonPath & root);
|
||||
ref<InputAccessor> makeFSInputAccessor();
|
||||
|
||||
ref<InputAccessor> makeFSInputAccessor(std::filesystem::path root);
|
||||
|
||||
ref<InputAccessor> makeStorePathAccessor(
|
||||
ref<Store> store,
|
||||
|
|
|
@ -139,15 +139,16 @@ T peelObject(git_repository * repo, git_object * obj, git_object_t type)
|
|||
|
||||
struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
{
|
||||
CanonPath path;
|
||||
/** Location of the repository on disk. */
|
||||
std::filesystem::path path;
|
||||
Repository repo;
|
||||
|
||||
GitRepoImpl(CanonPath _path, bool create, bool bare)
|
||||
GitRepoImpl(std::filesystem::path _path, bool create, bool bare)
|
||||
: path(std::move(_path))
|
||||
{
|
||||
initLibGit2();
|
||||
|
||||
if (pathExists(path.abs())) {
|
||||
if (pathExists(path.native())) {
|
||||
if (git_repository_open(Setter(repo), path.c_str()))
|
||||
throw Error("opening Git repository '%s': %s", path, git_error_last()->message);
|
||||
} else {
|
||||
|
@ -220,10 +221,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
return toHash(*oid);
|
||||
}
|
||||
|
||||
std::vector<Submodule> parseSubmodules(const CanonPath & configFile)
|
||||
std::vector<Submodule> parseSubmodules(const std::filesystem::path & configFile)
|
||||
{
|
||||
GitConfig config;
|
||||
if (git_config_open_ondisk(Setter(config), configFile.abs().c_str()))
|
||||
if (git_config_open_ondisk(Setter(config), configFile.c_str()))
|
||||
throw Error("parsing .gitmodules file: %s", git_error_last()->message);
|
||||
|
||||
ConfigIterator it;
|
||||
|
@ -294,8 +295,8 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
throw Error("getting working directory status: %s", git_error_last()->message);
|
||||
|
||||
/* Get submodule info. */
|
||||
auto modulesFile = path + ".gitmodules";
|
||||
if (pathExists(modulesFile.abs()))
|
||||
auto modulesFile = path / ".gitmodules";
|
||||
if (pathExists(modulesFile))
|
||||
info.submodules = parseSubmodules(modulesFile);
|
||||
|
||||
return info;
|
||||
|
@ -382,27 +383,27 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
{
|
||||
Activity act(*logger, lvlTalkative, actFetchTree, fmt("fetching Git repository '%s'", url));
|
||||
|
||||
Remote remote;
|
||||
// TODO: implement git-credential helper support (preferably via libgit2, which as of 2024-01 does not support that)
|
||||
// then use code that was removed in this commit (see blame)
|
||||
|
||||
if (git_remote_create_anonymous(Setter(remote), *this, url.c_str()))
|
||||
throw Error("cannot create Git remote '%s': %s", url, git_error_last()->message);
|
||||
auto dir = this->path;
|
||||
Strings gitArgs;
|
||||
if (shallow) {
|
||||
gitArgs = { "-C", dir, "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec };
|
||||
}
|
||||
else {
|
||||
gitArgs = { "-C", dir, "fetch", "--quiet", "--force", "--", url, refspec };
|
||||
}
|
||||
|
||||
char * refspecs[] = {(char *) refspec.c_str()};
|
||||
git_strarray refspecs2 {
|
||||
.strings = refspecs,
|
||||
.count = 1
|
||||
};
|
||||
|
||||
git_fetch_options opts = GIT_FETCH_OPTIONS_INIT;
|
||||
// FIXME: for some reason, shallow fetching over ssh barfs
|
||||
// with "could not read from remote repository".
|
||||
opts.depth = shallow && parseURL(url).scheme != "ssh" ? 1 : GIT_FETCH_DEPTH_FULL;
|
||||
opts.callbacks.payload = &act;
|
||||
opts.callbacks.sideband_progress = sidebandProgressCallback;
|
||||
opts.callbacks.transfer_progress = transferProgressCallback;
|
||||
|
||||
if (git_remote_fetch(remote.get(), &refspecs2, &opts, nullptr))
|
||||
throw Error("fetching '%s' from '%s': %s", refspec, url, git_error_last()->message);
|
||||
runProgram(RunOptions {
|
||||
.program = "git",
|
||||
.searchPath = true,
|
||||
// FIXME: git stderr messes up our progress indicator, so
|
||||
// we're using --quiet for now. Should process its stderr.
|
||||
.args = gitArgs,
|
||||
.input = {},
|
||||
.isInteractive = true
|
||||
});
|
||||
}
|
||||
|
||||
void verifyCommit(
|
||||
|
@ -437,7 +438,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
.args = {
|
||||
"-c",
|
||||
"gpg.ssh.allowedSignersFile=" + allowedSignersFile,
|
||||
"-C", path.abs(),
|
||||
"-C", path,
|
||||
"verify-commit",
|
||||
rev.gitRev()
|
||||
},
|
||||
|
@ -464,7 +465,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
}
|
||||
};
|
||||
|
||||
ref<GitRepo> GitRepo::openRepo(const CanonPath & path, bool create, bool bare)
|
||||
ref<GitRepo> GitRepo::openRepo(const std::filesystem::path & path, bool create, bool bare)
|
||||
{
|
||||
return make_ref<GitRepoImpl>(path, create, bare);
|
||||
}
|
||||
|
@ -780,7 +781,7 @@ std::vector<std::tuple<GitRepoImpl::Submodule, Hash>> GitRepoImpl::getSubmodules
|
|||
|
||||
auto rawAccessor = getRawAccessor(rev);
|
||||
|
||||
for (auto & submodule : parseSubmodules(CanonPath(pathTemp))) {
|
||||
for (auto & submodule : parseSubmodules(pathTemp)) {
|
||||
auto rev = rawAccessor->getSubmoduleRev(submodule.path);
|
||||
result.push_back({std::move(submodule), rev});
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ struct GitRepo
|
|||
virtual ~GitRepo()
|
||||
{ }
|
||||
|
||||
static ref<GitRepo> openRepo(const CanonPath & path, bool create = false, bool bare = false);
|
||||
static ref<GitRepo> openRepo(const std::filesystem::path & path, bool create = false, bool bare = false);
|
||||
|
||||
virtual uint64_t getRevCount(const Hash & rev) = 0;
|
||||
|
||||
|
|
|
@ -50,10 +50,12 @@ bool touchCacheFile(const Path & path, time_t touch_time)
|
|||
return lutimes(path.c_str(), times) == 0;
|
||||
}
|
||||
|
||||
Path getCachePath(std::string_view key)
|
||||
Path getCachePath(std::string_view key, bool shallow)
|
||||
{
|
||||
return getCacheDir() + "/nix/gitv3/" +
|
||||
hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false);
|
||||
return getCacheDir()
|
||||
+ "/nix/gitv3/"
|
||||
+ hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false)
|
||||
+ (shallow ? "-shallow" : "");
|
||||
}
|
||||
|
||||
// Returns the name of the HEAD branch.
|
||||
|
@ -92,7 +94,8 @@ std::optional<std::string> readHead(const Path & path)
|
|||
// Persist the HEAD ref from the remote repo in the local cached repo.
|
||||
bool storeCachedHead(const std::string & actualUrl, const std::string & headRef)
|
||||
{
|
||||
Path cacheDir = getCachePath(actualUrl);
|
||||
// set shallow=false as HEAD will never be queried for a shallow repo
|
||||
Path cacheDir = getCachePath(actualUrl, false);
|
||||
try {
|
||||
runProgram("git", true, { "-C", cacheDir, "--git-dir", ".", "symbolic-ref", "--", "HEAD", headRef });
|
||||
} catch (ExecError &e) {
|
||||
|
@ -107,7 +110,8 @@ std::optional<std::string> readHeadCached(const std::string & actualUrl)
|
|||
{
|
||||
// Create a cache path to store the branch of the HEAD ref. Append something
|
||||
// in front of the URL to prevent collision with the repository itself.
|
||||
Path cacheDir = getCachePath(actualUrl);
|
||||
// set shallow=false as HEAD will never be queried for a shallow repo
|
||||
Path cacheDir = getCachePath(actualUrl, false);
|
||||
Path headRefFile = cacheDir + "/HEAD";
|
||||
|
||||
time_t now = time(0);
|
||||
|
@ -315,7 +319,7 @@ struct GitInputScheme : InputScheme
|
|||
if (!repoInfo.isLocal)
|
||||
throw Error("cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string());
|
||||
|
||||
writeFile((CanonPath(repoInfo.url) + path).abs(), contents);
|
||||
writeFile((CanonPath(repoInfo.url) / path).abs(), contents);
|
||||
|
||||
auto result = runProgram(RunOptions {
|
||||
.program = "git",
|
||||
|
@ -411,7 +415,7 @@ struct GitInputScheme : InputScheme
|
|||
// If this is a local directory and no ref or revision is
|
||||
// given, then allow the use of an unclean working tree.
|
||||
if (!input.getRef() && !input.getRev() && repoInfo.isLocal)
|
||||
repoInfo.workdirInfo = GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirInfo();
|
||||
repoInfo.workdirInfo = GitRepo::openRepo(repoInfo.url)->getWorkdirInfo();
|
||||
|
||||
return repoInfo;
|
||||
}
|
||||
|
@ -425,7 +429,7 @@ struct GitInputScheme : InputScheme
|
|||
if (auto res = cache->lookup(key))
|
||||
return getIntAttr(*res, "lastModified");
|
||||
|
||||
auto lastModified = GitRepo::openRepo(CanonPath(repoDir))->getLastModified(rev);
|
||||
auto lastModified = GitRepo::openRepo(repoDir)->getLastModified(rev);
|
||||
|
||||
cache->upsert(key, Attrs{{"lastModified", lastModified}});
|
||||
|
||||
|
@ -443,7 +447,7 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
Activity act(*logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.url));
|
||||
|
||||
auto revCount = GitRepo::openRepo(CanonPath(repoDir))->getRevCount(rev);
|
||||
auto revCount = GitRepo::openRepo(repoDir)->getRevCount(rev);
|
||||
|
||||
cache->upsert(key, Attrs{{"revCount", revCount}});
|
||||
|
||||
|
@ -453,7 +457,7 @@ struct GitInputScheme : InputScheme
|
|||
std::string getDefaultRef(const RepoInfo & repoInfo) const
|
||||
{
|
||||
auto head = repoInfo.isLocal
|
||||
? GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirRef()
|
||||
? GitRepo::openRepo(repoInfo.url)->getWorkdirRef()
|
||||
: readHeadCached(repoInfo.url);
|
||||
if (!head) {
|
||||
warn("could not read HEAD ref from repo at '%s', using 'master'", repoInfo.url);
|
||||
|
@ -506,16 +510,16 @@ struct GitInputScheme : InputScheme
|
|||
if (repoInfo.isLocal) {
|
||||
repoDir = repoInfo.url;
|
||||
if (!input.getRev())
|
||||
input.attrs.insert_or_assign("rev", GitRepo::openRepo(CanonPath(repoDir))->resolveRef(ref).gitRev());
|
||||
input.attrs.insert_or_assign("rev", GitRepo::openRepo(repoDir)->resolveRef(ref).gitRev());
|
||||
} else {
|
||||
Path cacheDir = getCachePath(repoInfo.url);
|
||||
Path cacheDir = getCachePath(repoInfo.url, getShallowAttr(input));
|
||||
repoDir = cacheDir;
|
||||
repoInfo.gitDir = ".";
|
||||
|
||||
createDirs(dirOf(cacheDir));
|
||||
PathLocks cacheDirLock({cacheDir});
|
||||
|
||||
auto repo = GitRepo::openRepo(CanonPath(cacheDir), true, true);
|
||||
auto repo = GitRepo::openRepo(cacheDir, true, true);
|
||||
|
||||
Path localRefFile =
|
||||
ref.compare(0, 5, "refs/") == 0
|
||||
|
@ -584,7 +588,7 @@ struct GitInputScheme : InputScheme
|
|||
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
||||
}
|
||||
|
||||
auto repo = GitRepo::openRepo(CanonPath(repoDir));
|
||||
auto repo = GitRepo::openRepo(repoDir);
|
||||
|
||||
auto isShallow = repo->isShallow();
|
||||
|
||||
|
@ -660,7 +664,7 @@ struct GitInputScheme : InputScheme
|
|||
for (auto & submodule : repoInfo.workdirInfo.submodules)
|
||||
repoInfo.workdirInfo.files.insert(submodule.path);
|
||||
|
||||
auto repo = GitRepo::openRepo(CanonPath(repoInfo.url), false, false);
|
||||
auto repo = GitRepo::openRepo(repoInfo.url, false, false);
|
||||
|
||||
auto exportIgnore = getExportIgnoreAttr(input);
|
||||
|
||||
|
@ -676,7 +680,7 @@ struct GitInputScheme : InputScheme
|
|||
std::map<CanonPath, nix::ref<InputAccessor>> mounts;
|
||||
|
||||
for (auto & submodule : repoInfo.workdirInfo.submodules) {
|
||||
auto submodulePath = CanonPath(repoInfo.url) + submodule.path;
|
||||
auto submodulePath = CanonPath(repoInfo.url) / submodule.path;
|
||||
fetchers::Attrs attrs;
|
||||
attrs.insert_or_assign("type", "git");
|
||||
attrs.insert_or_assign("url", submodulePath.abs());
|
||||
|
@ -699,7 +703,7 @@ struct GitInputScheme : InputScheme
|
|||
}
|
||||
|
||||
if (!repoInfo.workdirInfo.isDirty) {
|
||||
auto repo = GitRepo::openRepo(CanonPath(repoInfo.url));
|
||||
auto repo = GitRepo::openRepo(repoInfo.url);
|
||||
|
||||
if (auto ref = repo->getWorkdirRef())
|
||||
input.attrs.insert_or_assign("ref", *ref);
|
||||
|
|
|
@ -141,7 +141,7 @@ struct MercurialInputScheme : InputScheme
|
|||
if (!isLocal)
|
||||
throw Error("cannot commit '%s' to Mercurial repository '%s' because it's not a working tree", path, input.to_string());
|
||||
|
||||
auto absPath = CanonPath(repoPath) + path;
|
||||
auto absPath = CanonPath(repoPath) / path;
|
||||
|
||||
writeFile(absPath.abs(), contents);
|
||||
|
||||
|
|
|
@ -84,7 +84,7 @@ struct PathInputScheme : InputScheme
|
|||
std::string_view contents,
|
||||
std::optional<std::string> commitMsg) const override
|
||||
{
|
||||
writeFile((CanonPath(getAbsPath(input)) + path).abs(), contents);
|
||||
writeFile((CanonPath(getAbsPath(input)) / path).abs(), contents);
|
||||
}
|
||||
|
||||
CanonPath getAbsPath(const Input & input) const
|
||||
|
|
|
@ -340,7 +340,7 @@ int handleExceptions(const std::string & programName, std::function<void()> fun)
|
|||
return 1;
|
||||
} catch (BaseError & e) {
|
||||
logError(e.info());
|
||||
return e.status;
|
||||
return e.info().status;
|
||||
} catch (std::bad_alloc & e) {
|
||||
printError(error + "out of memory");
|
||||
return 1;
|
||||
|
|
|
@ -235,14 +235,14 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
|||
std::regex regex2("^[0-9a-f]{38}\\.debug$");
|
||||
|
||||
for (auto & [s1, _type] : narAccessor->readDirectory(buildIdDir)) {
|
||||
auto dir = buildIdDir + s1;
|
||||
auto dir = buildIdDir / s1;
|
||||
|
||||
if (narAccessor->lstat(dir).type != SourceAccessor::tDirectory
|
||||
|| !std::regex_match(s1, regex1))
|
||||
continue;
|
||||
|
||||
for (auto & [s2, _type] : narAccessor->readDirectory(dir)) {
|
||||
auto debugPath = dir + s2;
|
||||
auto debugPath = dir / s2;
|
||||
|
||||
if (narAccessor->lstat(debugPath).type != SourceAccessor::tRegular
|
||||
|| !std::regex_match(s2, regex2))
|
||||
|
|
|
@ -223,7 +223,7 @@ void DerivationGoal::haveDerivation()
|
|||
if (!drv->type().hasKnownOutputPaths())
|
||||
experimentalFeatureSettings.require(Xp::CaDerivations);
|
||||
|
||||
if (!drv->type().isPure()) {
|
||||
if (drv->type().isImpure()) {
|
||||
experimentalFeatureSettings.require(Xp::ImpureDerivations);
|
||||
|
||||
for (auto & [outputName, output] : drv->outputs) {
|
||||
|
@ -304,7 +304,7 @@ void DerivationGoal::outputsSubstitutionTried()
|
|||
{
|
||||
trace("all outputs substituted (maybe)");
|
||||
|
||||
assert(drv->type().isPure());
|
||||
assert(!drv->type().isImpure());
|
||||
|
||||
if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) {
|
||||
done(BuildResult::TransientFailure, {},
|
||||
|
@ -397,9 +397,9 @@ void DerivationGoal::gaveUpOnSubstitution()
|
|||
for (const auto & [inputDrvPath, inputNode] : dynamic_cast<Derivation *>(drv.get())->inputDrvs.map) {
|
||||
/* Ensure that pure, non-fixed-output derivations don't
|
||||
depend on impure derivations. */
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::ImpureDerivations) && drv->type().isPure() && !drv->type().isFixed()) {
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::ImpureDerivations) && !drv->type().isImpure() && !drv->type().isFixed()) {
|
||||
auto inputDrv = worker.evalStore.readDerivation(inputDrvPath);
|
||||
if (!inputDrv.type().isPure())
|
||||
if (inputDrv.type().isImpure())
|
||||
throw Error("pure derivation '%s' depends on impure derivation '%s'",
|
||||
worker.store.printStorePath(drvPath),
|
||||
worker.store.printStorePath(inputDrvPath));
|
||||
|
@ -439,7 +439,7 @@ void DerivationGoal::gaveUpOnSubstitution()
|
|||
|
||||
void DerivationGoal::repairClosure()
|
||||
{
|
||||
assert(drv->type().isPure());
|
||||
assert(!drv->type().isImpure());
|
||||
|
||||
/* If we're repairing, we now know that our own outputs are valid.
|
||||
Now check whether the other paths in the outputs closure are
|
||||
|
@ -708,7 +708,7 @@ void DerivationGoal::tryToBuild()
|
|||
if (!outputLocks.lockPaths(lockFiles, "", false)) {
|
||||
if (!actLock)
|
||||
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
|
||||
fmt("waiting for lock on %s", yellowtxt(showPaths(lockFiles))));
|
||||
fmt("waiting for lock on %s", Magenta(showPaths(lockFiles))));
|
||||
worker.waitForAWhile(shared_from_this());
|
||||
return;
|
||||
}
|
||||
|
@ -762,7 +762,7 @@ void DerivationGoal::tryToBuild()
|
|||
the wake-up timeout expires. */
|
||||
if (!actLock)
|
||||
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
|
||||
fmt("waiting for a machine to build '%s'", yellowtxt(worker.store.printStorePath(drvPath))));
|
||||
fmt("waiting for a machine to build '%s'", Magenta(worker.store.printStorePath(drvPath))));
|
||||
worker.waitForAWhile(shared_from_this());
|
||||
outputLocks.unlock();
|
||||
return;
|
||||
|
@ -891,7 +891,7 @@ void runPostBuildHook(
|
|||
if (hook == "")
|
||||
return;
|
||||
|
||||
Activity act(logger, lvlInfo, actPostBuildHook,
|
||||
Activity act(logger, lvlTalkative, actPostBuildHook,
|
||||
fmt("running post-build-hook '%s'", settings.postBuildHook),
|
||||
Logger::Fields{store.printStorePath(drvPath)});
|
||||
PushActivity pact(act.id);
|
||||
|
@ -987,7 +987,7 @@ void DerivationGoal::buildDone()
|
|||
diskFull |= cleanupDecideWhetherDiskFull();
|
||||
|
||||
auto msg = fmt("builder for '%s' %s",
|
||||
yellowtxt(worker.store.printStorePath(drvPath)),
|
||||
Magenta(worker.store.printStorePath(drvPath)),
|
||||
statusToString(status));
|
||||
|
||||
if (!logger->isVerbose() && !logTail.empty()) {
|
||||
|
@ -1100,7 +1100,7 @@ void DerivationGoal::resolvedFinished()
|
|||
worker.store.printStorePath(resolvedDrvGoal->drvPath), outputName);
|
||||
}();
|
||||
|
||||
if (drv->type().isPure()) {
|
||||
if (!drv->type().isImpure()) {
|
||||
auto newRealisation = realisation;
|
||||
newRealisation.id = DrvOutput { initialOutput->outputHash, outputName };
|
||||
newRealisation.signatures.clear();
|
||||
|
@ -1395,7 +1395,7 @@ void DerivationGoal::flushLine()
|
|||
|
||||
std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDerivationOutputMap()
|
||||
{
|
||||
assert(drv->type().isPure());
|
||||
assert(!drv->type().isImpure());
|
||||
if (!useDerivation || drv->type().hasKnownOutputPaths()) {
|
||||
std::map<std::string, std::optional<StorePath>> res;
|
||||
for (auto & [name, output] : drv->outputs)
|
||||
|
@ -1411,7 +1411,7 @@ std::map<std::string, std::optional<StorePath>> DerivationGoal::queryPartialDeri
|
|||
|
||||
OutputPathMap DerivationGoal::queryDerivationOutputMap()
|
||||
{
|
||||
assert(drv->type().isPure());
|
||||
assert(!drv->type().isImpure());
|
||||
if (!useDerivation || drv->type().hasKnownOutputPaths()) {
|
||||
OutputPathMap res;
|
||||
for (auto & [name, output] : drv->outputsAndOptPaths(worker.store))
|
||||
|
@ -1428,7 +1428,7 @@ OutputPathMap DerivationGoal::queryDerivationOutputMap()
|
|||
|
||||
std::pair<bool, SingleDrvOutputs> DerivationGoal::checkPathValidity()
|
||||
{
|
||||
if (!drv->type().isPure()) return { false, {} };
|
||||
if (drv->type().isImpure()) return { false, {} };
|
||||
|
||||
bool checkHash = buildMode == bmRepair;
|
||||
auto wantedOutputsLeft = std::visit(overloaded {
|
||||
|
@ -1523,7 +1523,7 @@ void DerivationGoal::done(
|
|||
outputLocks.unlock();
|
||||
buildResult.status = status;
|
||||
if (ex)
|
||||
buildResult.errorMsg = fmt("%s", normaltxt(ex->info().msg));
|
||||
buildResult.errorMsg = fmt("%s", Uncolored(ex->info().msg));
|
||||
if (buildResult.status == BuildResult::TimedOut)
|
||||
worker.timedOut = true;
|
||||
if (buildResult.status == BuildResult::PermanentFailure)
|
||||
|
|
|
@ -33,7 +33,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
|||
}
|
||||
|
||||
if (failed.size() == 1 && ex) {
|
||||
ex->status = worker.failingExitStatus();
|
||||
ex->withExitStatus(worker.failingExitStatus());
|
||||
throw std::move(*ex);
|
||||
} else if (!failed.empty()) {
|
||||
if (ex) logError(ex->info());
|
||||
|
@ -104,7 +104,7 @@ void Store::ensurePath(const StorePath & path)
|
|||
|
||||
if (goal->exitCode != Goal::ecSuccess) {
|
||||
if (goal->ex) {
|
||||
goal->ex->status = worker.failingExitStatus();
|
||||
goal->ex->withExitStatus(worker.failingExitStatus());
|
||||
throw std::move(*goal->ex);
|
||||
} else
|
||||
throw Error(worker.failingExitStatus(), "path '%s' does not exist and cannot be created", printStorePath(path));
|
||||
|
|
|
@ -92,7 +92,7 @@ void handleDiffHook(
|
|||
} catch (Error & error) {
|
||||
ErrorInfo ei = error.info();
|
||||
// FIXME: wrap errors.
|
||||
ei.msg = hintfmt("diff hook execution failed: %s", ei.msg.str());
|
||||
ei.msg = HintFmt("diff hook execution failed: %s", ei.msg.str());
|
||||
logError(ei);
|
||||
}
|
||||
}
|
||||
|
@ -232,7 +232,7 @@ void LocalDerivationGoal::tryLocalBuild()
|
|||
if (!buildUser) {
|
||||
if (!actLock)
|
||||
actLock = std::make_unique<Activity>(*logger, lvlWarn, actBuildWaiting,
|
||||
fmt("waiting for a free build user ID for '%s'", yellowtxt(worker.store.printStorePath(drvPath))));
|
||||
fmt("waiting for a free build user ID for '%s'", Magenta(worker.store.printStorePath(drvPath))));
|
||||
worker.waitForAWhile(shared_from_this());
|
||||
return;
|
||||
}
|
||||
|
@ -2724,7 +2724,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
.outPath = newInfo.path
|
||||
};
|
||||
if (experimentalFeatureSettings.isEnabled(Xp::CaDerivations)
|
||||
&& drv->type().isPure())
|
||||
&& !drv->type().isImpure())
|
||||
{
|
||||
signRealisation(thisRealisation);
|
||||
worker.store.registerDrvOutput(thisRealisation);
|
||||
|
|
|
@ -251,7 +251,7 @@ void Worker::childTerminated(Goal * goal, bool wakeSleepers)
|
|||
|
||||
void Worker::waitForBuildSlot(GoalPtr goal)
|
||||
{
|
||||
debug("wait for build slot");
|
||||
goal->trace("wait for build slot");
|
||||
bool isSubstitutionGoal = goal->jobCategory() == JobCategory::Substitution;
|
||||
if ((!isSubstitutionGoal && getNrLocalBuilds() < settings.maxBuildJobs) ||
|
||||
(isSubstitutionGoal && getNrSubstitutions() < settings.maxSubstitutionJobs))
|
||||
|
|
|
@ -116,7 +116,7 @@ private:
|
|||
WeakGoals waitingForAWhile;
|
||||
|
||||
/**
|
||||
* Last time the goals in `waitingForAWhile` where woken up.
|
||||
* Last time the goals in `waitingForAWhile` were woken up.
|
||||
*/
|
||||
steady_time_point lastWokenUp;
|
||||
|
||||
|
|
|
@ -16,6 +16,14 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
|||
writeFile(settings.netrcFile, netrcData, 0600);
|
||||
}
|
||||
|
||||
auto out = get(drv.outputs, "out");
|
||||
if (!out)
|
||||
throw Error("'builtin:fetchurl' requires an 'out' output");
|
||||
|
||||
auto dof = std::get_if<DerivationOutput::CAFixed>(&out->raw);
|
||||
if (!dof)
|
||||
throw Error("'builtin:fetchurl' must be a fixed-output derivation");
|
||||
|
||||
auto getAttr = [&](const std::string & name) {
|
||||
auto i = drv.env.find(name);
|
||||
if (i == drv.env.end()) throw Error("attribute '%s' missing", name);
|
||||
|
@ -59,13 +67,11 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
|||
};
|
||||
|
||||
/* Try the hashed mirrors first. */
|
||||
if (getAttr("outputHashMode") == "flat")
|
||||
if (dof->ca.method.getFileIngestionMethod() == FileIngestionMethod::Flat)
|
||||
for (auto hashedMirror : settings.hashedMirrors.get())
|
||||
try {
|
||||
if (!hasSuffix(hashedMirror, "/")) hashedMirror += '/';
|
||||
std::optional<HashAlgorithm> ht = parseHashAlgoOpt(getAttr("outputHashAlgo"));
|
||||
Hash h = newHashAllowEmpty(getAttr("outputHash"), ht);
|
||||
fetch(hashedMirror + printHashAlgo(h.algo) + "/" + h.to_string(HashFormat::Base16, false));
|
||||
fetch(hashedMirror + printHashAlgo(dof->ca.hash.algo) + "/" + dof->ca.hash.to_string(HashFormat::Base16, false));
|
||||
return;
|
||||
} catch (Error & e) {
|
||||
debug(e.what());
|
||||
|
|
|
@ -119,7 +119,7 @@ struct TunnelLogger : public Logger
|
|||
if (GET_PROTOCOL_MINOR(clientVersion) >= 26) {
|
||||
to << STDERR_ERROR << *ex;
|
||||
} else {
|
||||
to << STDERR_ERROR << ex->what() << ex->status;
|
||||
to << STDERR_ERROR << ex->what() << ex->info().status;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -441,7 +441,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
eagerly consume the entire stream it's given, past the
|
||||
length of the Nar. */
|
||||
TeeSource savedNARSource(from, saved);
|
||||
NullParseSink sink; /* just parse the NAR */
|
||||
NullFileSystemObjectSink sink; /* just parse the NAR */
|
||||
parseDump(sink, savedNARSource);
|
||||
} else {
|
||||
/* Incrementally parse the NAR file, stripping the
|
||||
|
@ -913,7 +913,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
source = std::make_unique<TunnelSource>(from, to);
|
||||
else {
|
||||
TeeSource tee { from, saved };
|
||||
NullParseSink ether;
|
||||
NullFileSystemObjectSink ether;
|
||||
parseDump(ether, tee);
|
||||
source = std::make_unique<StringSource>(saved.s);
|
||||
}
|
||||
|
|
|
@ -110,17 +110,17 @@ bool DerivationType::isSandboxed() const
|
|||
}
|
||||
|
||||
|
||||
bool DerivationType::isPure() const
|
||||
bool DerivationType::isImpure() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](const InputAddressed & ia) {
|
||||
return true;
|
||||
return false;
|
||||
},
|
||||
[](const ContentAddressed & ca) {
|
||||
return true;
|
||||
return false;
|
||||
},
|
||||
[](const Impure &) {
|
||||
return false;
|
||||
return true;
|
||||
},
|
||||
}, raw);
|
||||
}
|
||||
|
@ -840,7 +840,7 @@ DrvHash hashDerivationModulo(Store & store, const Derivation & drv, bool maskOut
|
|||
};
|
||||
}
|
||||
|
||||
if (!type.isPure()) {
|
||||
if (type.isImpure()) {
|
||||
std::map<std::string, Hash> outputHashes;
|
||||
for (const auto & [outputName, _] : drv.outputs)
|
||||
outputHashes.insert_or_assign(outputName, impureOutputHash);
|
||||
|
|
|
@ -253,12 +253,17 @@ struct DerivationType {
|
|||
bool isSandboxed() const;
|
||||
|
||||
/**
|
||||
* Whether the derivation is expected to produce the same result
|
||||
* every time, and therefore it only needs to be built once. This is
|
||||
* only false for derivations that have the attribute '__impure =
|
||||
* Whether the derivation is expected to produce a different result
|
||||
* every time, and therefore it needs to be rebuilt every time. This is
|
||||
* only true for derivations that have the attribute '__impure =
|
||||
* true'.
|
||||
*
|
||||
* Non-impure derivations can still behave impurely, to the degree permitted
|
||||
* by the sandbox. Hence why this method isn't `isPure`: impure derivations
|
||||
* are not the negation of pure derivations. Purity can not be ascertained
|
||||
* except by rather heavy tools.
|
||||
*/
|
||||
bool isPure() const;
|
||||
bool isImpure() const;
|
||||
|
||||
/**
|
||||
* Does the derivation knows its own output paths?
|
||||
|
|
|
@ -65,7 +65,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
|
|||
/* Extract the NAR from the source. */
|
||||
StringSink saved;
|
||||
TeeSource tee { source, saved };
|
||||
NullParseSink ether;
|
||||
NullFileSystemObjectSink ether;
|
||||
parseDump(ether, tee);
|
||||
|
||||
uint32_t magic = readInt(source);
|
||||
|
|
|
@ -882,12 +882,12 @@ template<typename... Args>
|
|||
FileTransferError::FileTransferError(FileTransfer::Error error, std::optional<std::string> response, const Args & ... args)
|
||||
: Error(args...), error(error), response(response)
|
||||
{
|
||||
const auto hf = hintfmt(args...);
|
||||
const auto hf = HintFmt(args...);
|
||||
// FIXME: Due to https://github.com/NixOS/nix/issues/3841 we don't know how
|
||||
// to print different messages for different verbosity levels. For now
|
||||
// we add some heuristics for detecting when we want to show the response.
|
||||
if (response && (response->size() < 1024 || response->find("<html>") != std::string::npos))
|
||||
err.msg = hintfmt("%1%\n\nresponse body:\n\n%2%", normaltxt(hf.str()), chomp(*response));
|
||||
err.msg = HintFmt("%1%\n\nresponse body:\n\n%2%", Uncolored(hf.str()), chomp(*response));
|
||||
else
|
||||
err.msg = hf;
|
||||
}
|
||||
|
|
|
@ -151,13 +151,18 @@ public:
|
|||
MaxBuildJobsSetting maxBuildJobs{
|
||||
this, 1, "max-jobs",
|
||||
R"(
|
||||
This option defines the maximum number of jobs that Nix will try to
|
||||
build in parallel. The default is `1`. The special value `auto`
|
||||
causes Nix to use the number of CPUs in your system. `0` is useful
|
||||
when using remote builders to prevent any local builds (except for
|
||||
`preferLocalBuild` derivation attribute which executes locally
|
||||
regardless). It can be overridden using the `--max-jobs` (`-j`)
|
||||
command line switch.
|
||||
Maximum number of jobs that Nix will try to build locally in parallel.
|
||||
|
||||
The special value `auto` causes Nix to use the number of CPUs in your system.
|
||||
Use `0` to disable local builds and directly use the remote machines specified in [`builders`](#conf-builders).
|
||||
This will not affect derivations that have [`preferLocalBuild = true`](@docroot@/language/advanced-attributes.md#adv-attr-preferLocalBuild), which are always built locally.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> The number of CPU cores to use for each build job is independently determined by the [`cores`](#conf-cores) setting.
|
||||
|
||||
<!-- TODO(@fricklerhandwerk): would be good to have those shorthands for common options as part of the specification -->
|
||||
The setting can be overridden using the `--max-jobs` (`-j`) command line switch.
|
||||
)",
|
||||
{"build-max-jobs"}};
|
||||
|
||||
|
@ -635,7 +640,7 @@ public:
|
|||
|
||||
- the store object has been signed using a key in the trusted keys list
|
||||
- the [`require-sigs`](#conf-require-sigs) option has been set to `false`
|
||||
- the store object is [output-addressed](@docroot@/glossary.md#gloss-output-addressed-store-object)
|
||||
- the store object is [content-addressed](@docroot@/glossary.md#gloss-content-addressed-store-object)
|
||||
)",
|
||||
{"binary-cache-public-keys"}};
|
||||
|
||||
|
|
|
@ -22,45 +22,10 @@ std::string LegacySSHStoreConfig::doc()
|
|||
}
|
||||
|
||||
|
||||
struct LegacySSHStore::Connection
|
||||
struct LegacySSHStore::Connection : public ServeProto::BasicClientConnection
|
||||
{
|
||||
std::unique_ptr<SSHMaster::Connection> sshConn;
|
||||
FdSink to;
|
||||
FdSource from;
|
||||
ServeProto::Version remoteVersion;
|
||||
bool good = true;
|
||||
|
||||
/**
|
||||
* Coercion to `ServeProto::ReadConn`. This makes it easy to use the
|
||||
* factored out serve protocol searlizers with a
|
||||
* `LegacySSHStore::Connection`.
|
||||
*
|
||||
* The serve protocol connection types are unidirectional, unlike
|
||||
* this type.
|
||||
*/
|
||||
operator ServeProto::ReadConn ()
|
||||
{
|
||||
return ServeProto::ReadConn {
|
||||
.from = from,
|
||||
.version = remoteVersion,
|
||||
};
|
||||
}
|
||||
|
||||
/*
|
||||
* Coercion to `ServeProto::WriteConn`. This makes it easy to use the
|
||||
* factored out serve protocol searlizers with a
|
||||
* `LegacySSHStore::Connection`.
|
||||
*
|
||||
* The serve protocol connection types are unidirectional, unlike
|
||||
* this type.
|
||||
*/
|
||||
operator ServeProto::WriteConn ()
|
||||
{
|
||||
return ServeProto::WriteConn {
|
||||
.to = to,
|
||||
.version = remoteVersion,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
@ -90,34 +55,31 @@ LegacySSHStore::LegacySSHStore(const std::string & scheme, const std::string & h
|
|||
ref<LegacySSHStore::Connection> LegacySSHStore::openConnection()
|
||||
{
|
||||
auto conn = make_ref<Connection>();
|
||||
conn->sshConn = master.startCommand(
|
||||
fmt("%s --serve --write", remoteProgram)
|
||||
+ (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get())));
|
||||
Strings command = remoteProgram.get();
|
||||
command.push_back("--serve");
|
||||
command.push_back("--write");
|
||||
if (remoteStore.get() != "") {
|
||||
command.push_back("--store");
|
||||
command.push_back(remoteStore.get());
|
||||
}
|
||||
conn->sshConn = master.startCommand(std::move(command));
|
||||
conn->to = FdSink(conn->sshConn->in.get());
|
||||
conn->from = FdSource(conn->sshConn->out.get());
|
||||
|
||||
StringSink saved;
|
||||
TeeSource tee(conn->from, saved);
|
||||
try {
|
||||
conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION;
|
||||
conn->to.flush();
|
||||
|
||||
StringSink saved;
|
||||
try {
|
||||
TeeSource tee(conn->from, saved);
|
||||
unsigned int magic = readInt(tee);
|
||||
if (magic != SERVE_MAGIC_2)
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
|
||||
} catch (SerialisationError & e) {
|
||||
/* In case the other side is waiting for our input,
|
||||
close it. */
|
||||
conn->sshConn->in.close();
|
||||
auto msg = conn->from.drain();
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
|
||||
host, chomp(saved.s + msg));
|
||||
conn->remoteVersion = ServeProto::BasicClientConnection::handshake(
|
||||
conn->to, tee, SERVE_PROTOCOL_VERSION, host);
|
||||
} catch (SerialisationError & e) {
|
||||
// in.close(): Don't let the remote block on us not writing.
|
||||
conn->sshConn->in.close();
|
||||
{
|
||||
NullSink nullSink;
|
||||
conn->from.drainInto(nullSink);
|
||||
}
|
||||
conn->remoteVersion = readInt(conn->from);
|
||||
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
|
||||
throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host);
|
||||
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
|
||||
host, chomp(saved.s));
|
||||
} catch (EndOfFile & e) {
|
||||
throw Error("cannot connect to '%1%'", host);
|
||||
}
|
||||
|
@ -232,16 +194,16 @@ void LegacySSHStore::narFromPath(const StorePath & path, Sink & sink)
|
|||
}
|
||||
|
||||
|
||||
void LegacySSHStore::putBuildSettings(Connection & conn)
|
||||
static ServeProto::BuildOptions buildSettings()
|
||||
{
|
||||
ServeProto::write(*this, conn, ServeProto::BuildOptions {
|
||||
return {
|
||||
.maxSilentTime = settings.maxSilentTime,
|
||||
.buildTimeout = settings.buildTimeout,
|
||||
.maxLogSize = settings.maxLogSize,
|
||||
.nrRepeats = 0, // buildRepeat hasn't worked for ages anyway
|
||||
.enforceDeterminism = 0,
|
||||
.keepFailed = settings.keepFailed,
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
@ -250,14 +212,7 @@ BuildResult LegacySSHStore::buildDerivation(const StorePath & drvPath, const Bas
|
|||
{
|
||||
auto conn(connections->get());
|
||||
|
||||
conn->to
|
||||
<< ServeProto::Command::BuildDerivation
|
||||
<< printStorePath(drvPath);
|
||||
writeDerivation(conn->to, *this, drv);
|
||||
|
||||
putBuildSettings(*conn);
|
||||
|
||||
conn->to.flush();
|
||||
conn->putBuildDerivationRequest(*this, drvPath, drv, buildSettings());
|
||||
|
||||
return ServeProto::Serialise<BuildResult>::read(*this, *conn);
|
||||
}
|
||||
|
@ -288,7 +243,7 @@ void LegacySSHStore::buildPaths(const std::vector<DerivedPath> & drvPaths, Build
|
|||
}
|
||||
conn->to << ss;
|
||||
|
||||
putBuildSettings(*conn);
|
||||
ServeProto::write(*this, *conn, buildSettings());
|
||||
|
||||
conn->to.flush();
|
||||
|
||||
|
@ -328,15 +283,8 @@ StorePathSet LegacySSHStore::queryValidPaths(const StorePathSet & paths,
|
|||
SubstituteFlag maybeSubstitute)
|
||||
{
|
||||
auto conn(connections->get());
|
||||
|
||||
conn->to
|
||||
<< ServeProto::Command::QueryValidPaths
|
||||
<< false // lock
|
||||
<< maybeSubstitute;
|
||||
ServeProto::write(*this, *conn, paths);
|
||||
conn->to.flush();
|
||||
|
||||
return ServeProto::Serialise<StorePathSet>::read(*this, *conn);
|
||||
return conn->queryValidPaths(*this,
|
||||
false, paths, maybeSubstitute);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ struct LegacySSHStoreConfig : virtual CommonSSHStoreConfig
|
|||
{
|
||||
using CommonSSHStoreConfig::CommonSSHStoreConfig;
|
||||
|
||||
const Setting<Path> remoteProgram{this, "nix-store", "remote-program",
|
||||
const Setting<Strings> remoteProgram{this, {"nix-store"}, "remote-program",
|
||||
"Path to the `nix-store` executable on the remote machine."};
|
||||
|
||||
const Setting<int> maxConnections{this, 1, "max-connections",
|
||||
|
@ -78,10 +78,6 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
RepairFlag repair = NoRepair) override
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
private:
|
||||
|
||||
void putBuildSettings(Connection & conn);
|
||||
|
||||
public:
|
||||
|
||||
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
|
|
|
@ -28,7 +28,7 @@ struct LocalStoreAccessor : PosixSourceAccessor
|
|||
auto [storePath, rest] = store->toStorePath(path.abs());
|
||||
if (requireValidPath && !store->isValidPath(storePath))
|
||||
throw InvalidPath("path '%1%' is not a valid store path", store->printStorePath(storePath));
|
||||
return CanonPath(store->getRealStoreDir()) + storePath.to_string() + CanonPath(rest);
|
||||
return CanonPath(store->getRealStoreDir()) / storePath.to_string() / CanonPath(rest);
|
||||
}
|
||||
|
||||
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
||||
|
|
|
@ -1048,8 +1048,12 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
bool narRead = false;
|
||||
Finally cleanup = [&]() {
|
||||
if (!narRead) {
|
||||
NullParseSink sink;
|
||||
parseDump(sink, source);
|
||||
NullFileSystemObjectSink sink;
|
||||
try {
|
||||
parseDump(sink, source);
|
||||
} catch (...) {
|
||||
ignoreException();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -32,11 +32,19 @@ Machine::Machine(decltype(storeUri) storeUri,
|
|||
systemTypes(systemTypes),
|
||||
sshKey(sshKey),
|
||||
maxJobs(maxJobs),
|
||||
speedFactor(std::max(1U, speedFactor)),
|
||||
speedFactor(speedFactor == 0.0f ? 1.0f : std::move(speedFactor)),
|
||||
supportedFeatures(supportedFeatures),
|
||||
mandatoryFeatures(mandatoryFeatures),
|
||||
sshPublicHostKey(sshPublicHostKey)
|
||||
{}
|
||||
{
|
||||
if (speedFactor < 0.0)
|
||||
throw UsageError("speed factor must be >= 0");
|
||||
}
|
||||
|
||||
bool Machine::systemSupported(const std::string & system) const
|
||||
{
|
||||
return system == "builtin" || (systemTypes.count(system) > 0);
|
||||
}
|
||||
|
||||
bool Machine::allSupported(const std::set<std::string> & features) const
|
||||
{
|
||||
|
@ -130,6 +138,14 @@ static Machine parseBuilderLine(const std::string & line)
|
|||
return result.value();
|
||||
};
|
||||
|
||||
auto parseFloatField = [&](size_t fieldIndex) {
|
||||
const auto result = string2Int<float>(tokens[fieldIndex]);
|
||||
if (!result) {
|
||||
throw FormatError("bad machine specification: failed to convert column #%lu in a row: '%s' to 'float'", fieldIndex, line);
|
||||
}
|
||||
return result.value();
|
||||
};
|
||||
|
||||
auto ensureBase64 = [&](size_t fieldIndex) {
|
||||
const auto & str = tokens[fieldIndex];
|
||||
try {
|
||||
|
@ -145,10 +161,10 @@ static Machine parseBuilderLine(const std::string & line)
|
|||
|
||||
return {
|
||||
tokens[0],
|
||||
isSet(1) ? tokenizeString<std::vector<std::string>>(tokens[1], ",") : std::vector<std::string>{settings.thisSystem},
|
||||
isSet(1) ? tokenizeString<std::set<std::string>>(tokens[1], ",") : std::set<std::string>{settings.thisSystem},
|
||||
isSet(2) ? tokens[2] : "",
|
||||
isSet(3) ? parseUnsignedIntField(3) : 1U,
|
||||
isSet(4) ? parseUnsignedIntField(4) : 1U,
|
||||
isSet(4) ? parseFloatField(4) : 1.0f,
|
||||
isSet(5) ? tokenizeString<std::set<std::string>>(tokens[5], ",") : std::set<std::string>{},
|
||||
isSet(6) ? tokenizeString<std::set<std::string>>(tokens[6], ",") : std::set<std::string>{},
|
||||
isSet(7) ? ensureBase64(7) : ""
|
||||
|
|
|
@ -10,17 +10,30 @@ class Store;
|
|||
struct Machine {
|
||||
|
||||
const std::string storeUri;
|
||||
const std::vector<std::string> systemTypes;
|
||||
const std::set<std::string> systemTypes;
|
||||
const std::string sshKey;
|
||||
const unsigned int maxJobs;
|
||||
const unsigned int speedFactor;
|
||||
const float speedFactor;
|
||||
const std::set<std::string> supportedFeatures;
|
||||
const std::set<std::string> mandatoryFeatures;
|
||||
const std::string sshPublicHostKey;
|
||||
bool enabled = true;
|
||||
|
||||
/**
|
||||
* @return Whether `system` is either `"builtin"` or in
|
||||
* `systemTypes`.
|
||||
*/
|
||||
bool systemSupported(const std::string & system) const;
|
||||
|
||||
/**
|
||||
* @return Whether `features` is a subset of the union of `supportedFeatures` and
|
||||
* `mandatoryFeatures`
|
||||
*/
|
||||
bool allSupported(const std::set<std::string> & features) const;
|
||||
|
||||
/**
|
||||
* @return @Whether `mandatoryFeatures` is a subset of `features`
|
||||
*/
|
||||
bool mandatoryMet(const std::set<std::string> & features) const;
|
||||
|
||||
Machine(decltype(storeUri) storeUri,
|
||||
|
|
|
@ -19,6 +19,35 @@ struct NarMember
|
|||
std::map<std::string, NarMember> children;
|
||||
};
|
||||
|
||||
struct NarMemberConstructor : CreateRegularFileSink
|
||||
{
|
||||
private:
|
||||
|
||||
NarMember & narMember;
|
||||
|
||||
uint64_t & pos;
|
||||
|
||||
public:
|
||||
|
||||
NarMemberConstructor(NarMember & nm, uint64_t & pos)
|
||||
: narMember(nm), pos(pos)
|
||||
{ }
|
||||
|
||||
void isExecutable() override
|
||||
{
|
||||
narMember.stat.isExecutable = true;
|
||||
}
|
||||
|
||||
void preallocateContents(uint64_t size) override
|
||||
{
|
||||
narMember.stat.fileSize = size;
|
||||
narMember.stat.narOffset = pos;
|
||||
}
|
||||
|
||||
void operator () (std::string_view data) override
|
||||
{ }
|
||||
};
|
||||
|
||||
struct NarAccessor : public SourceAccessor
|
||||
{
|
||||
std::optional<const std::string> nar;
|
||||
|
@ -27,7 +56,7 @@ struct NarAccessor : public SourceAccessor
|
|||
|
||||
NarMember root;
|
||||
|
||||
struct NarIndexer : ParseSink, Source
|
||||
struct NarIndexer : FileSystemObjectSink, Source
|
||||
{
|
||||
NarAccessor & acc;
|
||||
Source & source;
|
||||
|
@ -42,7 +71,7 @@ struct NarAccessor : public SourceAccessor
|
|||
: acc(acc), source(source)
|
||||
{ }
|
||||
|
||||
void createMember(const Path & path, NarMember member)
|
||||
NarMember & createMember(const Path & path, NarMember member)
|
||||
{
|
||||
size_t level = std::count(path.begin(), path.end(), '/');
|
||||
while (parents.size() > level) parents.pop();
|
||||
|
@ -50,11 +79,14 @@ struct NarAccessor : public SourceAccessor
|
|||
if (parents.empty()) {
|
||||
acc.root = std::move(member);
|
||||
parents.push(&acc.root);
|
||||
return acc.root;
|
||||
} else {
|
||||
if (parents.top()->stat.type != Type::tDirectory)
|
||||
throw Error("NAR file missing parent directory of path '%s'", path);
|
||||
auto result = parents.top()->children.emplace(baseNameOf(path), std::move(member));
|
||||
parents.push(&result.first->second);
|
||||
auto & ref = result.first->second;
|
||||
parents.push(&ref);
|
||||
return ref;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -68,34 +100,18 @@ struct NarAccessor : public SourceAccessor
|
|||
} });
|
||||
}
|
||||
|
||||
void createRegularFile(const Path & path) override
|
||||
void createRegularFile(const Path & path, std::function<void(CreateRegularFileSink &)> func) override
|
||||
{
|
||||
createMember(path, NarMember{ .stat = {
|
||||
auto & nm = createMember(path, NarMember{ .stat = {
|
||||
.type = Type::tRegular,
|
||||
.fileSize = 0,
|
||||
.isExecutable = false,
|
||||
.narOffset = 0
|
||||
} });
|
||||
NarMemberConstructor nmc { nm, pos };
|
||||
func(nmc);
|
||||
}
|
||||
|
||||
void closeRegularFile() override
|
||||
{ }
|
||||
|
||||
void isExecutable() override
|
||||
{
|
||||
parents.top()->stat.isExecutable = true;
|
||||
}
|
||||
|
||||
void preallocateContents(uint64_t size) override
|
||||
{
|
||||
auto & st = parents.top()->stat;
|
||||
st.fileSize = size;
|
||||
st.narOffset = pos;
|
||||
}
|
||||
|
||||
void receiveContents(std::string_view data) override
|
||||
{ }
|
||||
|
||||
void createSymlink(const Path & path, const std::string & target) override
|
||||
{
|
||||
createMember(path,
|
||||
|
@ -261,7 +277,7 @@ json listNar(ref<SourceAccessor> accessor, const CanonPath & path, bool recurse)
|
|||
json &res2 = obj["entries"];
|
||||
for (const auto & [name, type] : accessor->readDirectory(path)) {
|
||||
if (recurse) {
|
||||
res2[name] = listNar(accessor, path + name, true);
|
||||
res2[name] = listNar(accessor, path / name, true);
|
||||
} else
|
||||
res2[name] = json::object();
|
||||
}
|
||||
|
|
|
@ -3,6 +3,11 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
static constexpr std::string_view nameRegexStr = R"([0-9a-zA-Z\+\-_\?=][0-9a-zA-Z\+\-\._\?=]*)";
|
||||
|
||||
static constexpr std::string_view nameRegexStr =
|
||||
// This uses a negative lookahead: (?!\.\.?(-|$))
|
||||
// - deny ".", "..", or those strings followed by '-'
|
||||
// - when it's not those, start again at the start of the input and apply the next regex, which is [0-9a-zA-Z\+\-\._\?=]+
|
||||
R"((?!\.\.?(-|$))[0-9a-zA-Z\+\-\._\?=]+)";
|
||||
|
||||
}
|
||||
|
|
|
@ -9,9 +9,20 @@ static void checkName(std::string_view path, std::string_view name)
|
|||
if (name.size() > StorePath::MaxPathLen)
|
||||
throw BadStorePath("store path '%s' has a name longer than %d characters",
|
||||
path, StorePath::MaxPathLen);
|
||||
if (name[0] == '.')
|
||||
throw BadStorePath("store path '%s' starts with illegal character '.'", path);
|
||||
// See nameRegexStr for the definition
|
||||
if (name[0] == '.') {
|
||||
// check against "." and "..", followed by end or dash
|
||||
if (name.size() == 1)
|
||||
throw BadStorePath("store path '%s' has invalid name '%s'", path, name);
|
||||
if (name[1] == '-')
|
||||
throw BadStorePath("store path '%s' has invalid name '%s': first dash-separated component must not be '%s'", path, name, ".");
|
||||
if (name[1] == '.') {
|
||||
if (name.size() == 2)
|
||||
throw BadStorePath("store path '%s' has invalid name '%s'", path, name);
|
||||
if (name[2] == '-')
|
||||
throw BadStorePath("store path '%s' has invalid name '%s': first dash-separated component must not be '%s'", path, name, "..");
|
||||
}
|
||||
}
|
||||
for (auto c : name)
|
||||
if (!((c >= '0' && c <= '9')
|
||||
|| (c >= 'a' && c <= 'z')
|
||||
|
|
69
src/libstore/serve-protocol-impl.cc
Normal file
69
src/libstore/serve-protocol-impl.cc
Normal file
|
@ -0,0 +1,69 @@
|
|||
#include "serve-protocol-impl.hh"
|
||||
#include "build-result.hh"
|
||||
#include "derivations.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
ServeProto::Version ServeProto::BasicClientConnection::handshake(
|
||||
BufferedSink & to,
|
||||
Source & from,
|
||||
ServeProto::Version localVersion,
|
||||
std::string_view host)
|
||||
{
|
||||
to << SERVE_MAGIC_1 << localVersion;
|
||||
to.flush();
|
||||
|
||||
unsigned int magic = readInt(from);
|
||||
if (magic != SERVE_MAGIC_2)
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
|
||||
auto remoteVersion = readInt(from);
|
||||
if (GET_PROTOCOL_MAJOR(remoteVersion) != 0x200)
|
||||
throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host);
|
||||
return remoteVersion;
|
||||
}
|
||||
|
||||
ServeProto::Version ServeProto::BasicServerConnection::handshake(
|
||||
BufferedSink & to,
|
||||
Source & from,
|
||||
ServeProto::Version localVersion)
|
||||
{
|
||||
unsigned int magic = readInt(from);
|
||||
if (magic != SERVE_MAGIC_1) throw Error("protocol mismatch");
|
||||
to << SERVE_MAGIC_2 << localVersion;
|
||||
to.flush();
|
||||
return readInt(from);
|
||||
}
|
||||
|
||||
|
||||
StorePathSet ServeProto::BasicClientConnection::queryValidPaths(
|
||||
const Store & store,
|
||||
bool lock, const StorePathSet & paths,
|
||||
SubstituteFlag maybeSubstitute)
|
||||
{
|
||||
to
|
||||
<< ServeProto::Command::QueryValidPaths
|
||||
<< lock
|
||||
<< maybeSubstitute;
|
||||
write(store, *this, paths);
|
||||
to.flush();
|
||||
|
||||
return Serialise<StorePathSet>::read(store, *this);
|
||||
}
|
||||
|
||||
|
||||
void ServeProto::BasicClientConnection::putBuildDerivationRequest(
|
||||
const Store & store,
|
||||
const StorePath & drvPath, const BasicDerivation & drv,
|
||||
const ServeProto::BuildOptions & options)
|
||||
{
|
||||
to
|
||||
<< ServeProto::Command::BuildDerivation
|
||||
<< store.printStorePath(drvPath);
|
||||
writeDerivation(to, store, drv);
|
||||
|
||||
ServeProto::write(store, *this, options);
|
||||
|
||||
to.flush();
|
||||
}
|
||||
|
||||
}
|
|
@ -10,6 +10,7 @@
|
|||
|
||||
#include "serve-protocol.hh"
|
||||
#include "length-prefixed-protocol-helper.hh"
|
||||
#include "store-api.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -56,4 +57,101 @@ struct ServeProto::Serialise
|
|||
|
||||
/* protocol-specific templates */
|
||||
|
||||
struct ServeProto::BasicClientConnection
|
||||
{
|
||||
FdSink to;
|
||||
FdSource from;
|
||||
ServeProto::Version remoteVersion;
|
||||
|
||||
/**
|
||||
* Establishes connection, negotiating version.
|
||||
*
|
||||
* @return the version provided by the other side of the
|
||||
* connection.
|
||||
*
|
||||
* @param to Taken by reference to allow for various error handling
|
||||
* mechanisms.
|
||||
*
|
||||
* @param from Taken by reference to allow for various error
|
||||
* handling mechanisms.
|
||||
*
|
||||
* @param localVersion Our version which is sent over
|
||||
*
|
||||
* @param host Just used to add context to thrown exceptions.
|
||||
*/
|
||||
static ServeProto::Version handshake(
|
||||
BufferedSink & to,
|
||||
Source & from,
|
||||
ServeProto::Version localVersion,
|
||||
std::string_view host);
|
||||
|
||||
/**
|
||||
* Coercion to `ServeProto::ReadConn`. This makes it easy to use the
|
||||
* factored out serve protocol serializers with a
|
||||
* `LegacySSHStore::Connection`.
|
||||
*
|
||||
* The serve protocol connection types are unidirectional, unlike
|
||||
* this type.
|
||||
*/
|
||||
operator ServeProto::ReadConn ()
|
||||
{
|
||||
return ServeProto::ReadConn {
|
||||
.from = from,
|
||||
.version = remoteVersion,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Coercion to `ServeProto::WriteConn`. This makes it easy to use the
|
||||
* factored out serve protocol serializers with a
|
||||
* `LegacySSHStore::Connection`.
|
||||
*
|
||||
* The serve protocol connection types are unidirectional, unlike
|
||||
* this type.
|
||||
*/
|
||||
operator ServeProto::WriteConn ()
|
||||
{
|
||||
return ServeProto::WriteConn {
|
||||
.to = to,
|
||||
.version = remoteVersion,
|
||||
};
|
||||
}
|
||||
|
||||
StorePathSet queryValidPaths(
|
||||
const Store & remoteStore,
|
||||
bool lock, const StorePathSet & paths,
|
||||
SubstituteFlag maybeSubstitute);
|
||||
|
||||
/**
|
||||
* Just the request half, because Hydra may do other things between
|
||||
* issuing the request and reading the `BuildResult` response.
|
||||
*/
|
||||
void putBuildDerivationRequest(
|
||||
const Store & store,
|
||||
const StorePath & drvPath, const BasicDerivation & drv,
|
||||
const ServeProto::BuildOptions & options);
|
||||
};
|
||||
|
||||
struct ServeProto::BasicServerConnection
|
||||
{
|
||||
/**
|
||||
* Establishes connection, negotiating version.
|
||||
*
|
||||
* @return the version provided by the other side of the
|
||||
* connection.
|
||||
*
|
||||
* @param to Taken by reference to allow for various error handling
|
||||
* mechanisms.
|
||||
*
|
||||
* @param from Taken by reference to allow for various error
|
||||
* handling mechanisms.
|
||||
*
|
||||
* @param localVersion Our version which is sent over
|
||||
*/
|
||||
static ServeProto::Version handshake(
|
||||
BufferedSink & to,
|
||||
Source & from,
|
||||
ServeProto::Version localVersion);
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -59,6 +59,14 @@ struct ServeProto
|
|||
Version version;
|
||||
};
|
||||
|
||||
/**
|
||||
* Stripped down serialization logic suitable for sharing with Hydra.
|
||||
*
|
||||
* @todo remove once Hydra uses Store abstraction consistently.
|
||||
*/
|
||||
struct BasicClientConnection;
|
||||
struct BasicServerConnection;
|
||||
|
||||
/**
|
||||
* Data type for canonical pairs of serialisers for the serve protocol.
|
||||
*
|
||||
|
|
|
@ -10,19 +10,19 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintformat && hf)
|
||||
SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, HintFmt && hf)
|
||||
: Error(""), path(path), errMsg(errMsg), errNo(errNo), extendedErrNo(extendedErrNo), offset(offset)
|
||||
{
|
||||
auto offsetStr = (offset == -1) ? "" : "at offset " + std::to_string(offset) + ": ";
|
||||
err.msg = hintfmt("%s: %s%s, %s (in '%s')",
|
||||
normaltxt(hf.str()),
|
||||
err.msg = HintFmt("%s: %s%s, %s (in '%s')",
|
||||
Uncolored(hf.str()),
|
||||
offsetStr,
|
||||
sqlite3_errstr(extendedErrNo),
|
||||
errMsg,
|
||||
path ? path : "(in-memory)");
|
||||
}
|
||||
|
||||
[[noreturn]] void SQLiteError::throw_(sqlite3 * db, hintformat && hf)
|
||||
[[noreturn]] void SQLiteError::throw_(sqlite3 * db, HintFmt && hf)
|
||||
{
|
||||
int err = sqlite3_errcode(db);
|
||||
int exterr = sqlite3_extended_errcode(db);
|
||||
|
@ -33,7 +33,7 @@ SQLiteError::SQLiteError(const char *path, const char *errMsg, int errNo, int ex
|
|||
|
||||
if (err == SQLITE_BUSY || err == SQLITE_PROTOCOL) {
|
||||
auto exp = SQLiteBusy(path, errMsg, err, exterr, offset, std::move(hf));
|
||||
exp.err.msg = hintfmt(
|
||||
exp.err.msg = HintFmt(
|
||||
err == SQLITE_PROTOCOL
|
||||
? "SQLite database '%s' is busy (SQLITE_PROTOCOL)"
|
||||
: "SQLite database '%s' is busy",
|
||||
|
@ -249,7 +249,7 @@ void handleSQLiteBusy(const SQLiteBusy & e, time_t & nextWarning)
|
|||
if (now > nextWarning) {
|
||||
nextWarning = now + 10;
|
||||
logWarning({
|
||||
.msg = hintfmt(e.what())
|
||||
.msg = HintFmt(e.what())
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -142,19 +142,19 @@ struct SQLiteError : Error
|
|||
|
||||
template<typename... Args>
|
||||
[[noreturn]] static void throw_(sqlite3 * db, const std::string & fs, const Args & ... args) {
|
||||
throw_(db, hintfmt(fs, args...));
|
||||
throw_(db, HintFmt(fs, args...));
|
||||
}
|
||||
|
||||
SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, hintformat && hf);
|
||||
SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, HintFmt && hf);
|
||||
|
||||
protected:
|
||||
|
||||
template<typename... Args>
|
||||
SQLiteError(const char *path, const char *errMsg, int errNo, int extendedErrNo, int offset, const std::string & fs, const Args & ... args)
|
||||
: SQLiteError(path, errNo, extendedErrNo, offset, hintfmt(fs, args...))
|
||||
: SQLiteError(path, errMsg, errNo, extendedErrNo, offset, HintFmt(fs, args...))
|
||||
{ }
|
||||
|
||||
[[noreturn]] static void throw_(sqlite3 * db, hintformat && hf);
|
||||
[[noreturn]] static void throw_(sqlite3 * db, HintFmt && hf);
|
||||
|
||||
};
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ struct SSHStoreConfig : virtual RemoteStoreConfig, virtual CommonSSHStoreConfig
|
|||
using RemoteStoreConfig::RemoteStoreConfig;
|
||||
using CommonSSHStoreConfig::CommonSSHStoreConfig;
|
||||
|
||||
const Setting<Path> remoteProgram{this, "nix-daemon", "remote-program",
|
||||
const Setting<Strings> remoteProgram{this, {"nix-daemon"}, "remote-program",
|
||||
"Path to the `nix-daemon` executable on the remote machine."};
|
||||
|
||||
const std::string name() override { return "Experimental SSH Store"; }
|
||||
|
@ -212,14 +212,15 @@ public:
|
|||
ref<RemoteStore::Connection> SSHStore::openConnection()
|
||||
{
|
||||
auto conn = make_ref<Connection>();
|
||||
|
||||
std::string command = remoteProgram + " --stdio";
|
||||
if (remoteStore.get() != "")
|
||||
command += " --store " + shellEscape(remoteStore.get());
|
||||
for (auto & arg : extraRemoteProgramArgs)
|
||||
command += " " + shellEscape(arg);
|
||||
|
||||
conn->sshConn = master.startCommand(command);
|
||||
Strings command = remoteProgram.get();
|
||||
command.push_back("--stdio");
|
||||
if (remoteStore.get() != "") {
|
||||
command.push_back("--store");
|
||||
command.push_back(remoteStore.get());
|
||||
}
|
||||
command.insert(command.end(),
|
||||
extraRemoteProgramArgs.begin(), extraRemoteProgramArgs.end());
|
||||
conn->sshConn = master.startCommand(std::move(command));
|
||||
conn->to = FdSink(conn->sshConn->in.get());
|
||||
conn->from = FdSource(conn->sshConn->out.get());
|
||||
return conn;
|
||||
|
|
|
@ -52,7 +52,8 @@ bool SSHMaster::isMasterRunning() {
|
|||
return res.first == 0;
|
||||
}
|
||||
|
||||
std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string & command)
|
||||
std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(
|
||||
Strings && command, Strings && extraSshArgs)
|
||||
{
|
||||
Path socketPath = startMaster();
|
||||
|
||||
|
@ -84,18 +85,19 @@ std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string
|
|||
|
||||
Strings args;
|
||||
|
||||
if (fakeSSH) {
|
||||
args = { "bash", "-c" };
|
||||
} else {
|
||||
if (!fakeSSH) {
|
||||
args = { "ssh", host.c_str(), "-x" };
|
||||
addCommonSSHOpts(args);
|
||||
if (socketPath != "")
|
||||
args.insert(args.end(), {"-S", socketPath});
|
||||
if (verbosity >= lvlChatty)
|
||||
args.push_back("-v");
|
||||
args.splice(args.end(), std::move(extraSshArgs));
|
||||
args.push_back("--");
|
||||
}
|
||||
|
||||
args.push_back(command);
|
||||
args.splice(args.end(), std::move(command));
|
||||
|
||||
execvp(args.begin()->c_str(), stringsToCharPtrs(args).data());
|
||||
|
||||
// could not exec ssh/bash
|
||||
|
|
|
@ -41,7 +41,16 @@ public:
|
|||
AutoCloseFD out, in;
|
||||
};
|
||||
|
||||
std::unique_ptr<Connection> startCommand(const std::string & command);
|
||||
/**
|
||||
* @param command The command (arg vector) to execute.
|
||||
*
|
||||
* @param extraSShArgs Extra args to pass to SSH (not the command to
|
||||
* execute). Will not be used when "fake SSHing" to the local
|
||||
* machine.
|
||||
*/
|
||||
std::unique_ptr<Connection> startCommand(
|
||||
Strings && command,
|
||||
Strings && extraSshArgs = {});
|
||||
|
||||
Path startMaster();
|
||||
};
|
||||
|
|
|
@ -352,12 +352,12 @@ ValidPathInfo Store::addToStoreSlow(
|
|||
information to narSink. */
|
||||
TeeSource tapped { *fileSource, narSink };
|
||||
|
||||
NullParseSink blank;
|
||||
NullFileSystemObjectSink blank;
|
||||
auto & parseSink = method.getFileIngestionMethod() == FileIngestionMethod::Flat
|
||||
? (ParseSink &) fileSink
|
||||
? (FileSystemObjectSink &) fileSink
|
||||
: method.getFileIngestionMethod() == FileIngestionMethod::Recursive
|
||||
? (ParseSink &) blank
|
||||
: (abort(), (ParseSink &)*(ParseSink *)nullptr); // handled both cases
|
||||
? (FileSystemObjectSink &) blank
|
||||
: (abort(), (FileSystemObjectSink &)*(FileSystemObjectSink *)nullptr); // handled both cases
|
||||
|
||||
/* The information that flows from tapped (besides being replicated in
|
||||
narSink), is now put in parseSink. */
|
||||
|
@ -614,38 +614,56 @@ static bool goodStorePath(const StorePath & expected, const StorePath & actual)
|
|||
}
|
||||
|
||||
|
||||
std::optional<std::shared_ptr<const ValidPathInfo>> Store::queryPathInfoFromClientCache(const StorePath & storePath)
|
||||
{
|
||||
auto hashPart = std::string(storePath.hashPart());
|
||||
|
||||
{
|
||||
auto res = state.lock()->pathInfoCache.get(std::string(storePath.to_string()));
|
||||
if (res && res->isKnownNow()) {
|
||||
stats.narInfoReadAverted++;
|
||||
if (res->didExist())
|
||||
return std::make_optional(res->value);
|
||||
else
|
||||
return std::make_optional(nullptr);
|
||||
}
|
||||
}
|
||||
|
||||
if (diskCache) {
|
||||
auto res = diskCache->lookupNarInfo(getUri(), hashPart);
|
||||
if (res.first != NarInfoDiskCache::oUnknown) {
|
||||
stats.narInfoReadAverted++;
|
||||
{
|
||||
auto state_(state.lock());
|
||||
state_->pathInfoCache.upsert(std::string(storePath.to_string()),
|
||||
res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{ .value = res.second });
|
||||
if (res.first == NarInfoDiskCache::oInvalid ||
|
||||
!goodStorePath(storePath, res.second->path))
|
||||
return std::make_optional(nullptr);
|
||||
}
|
||||
assert(res.second);
|
||||
return std::make_optional(res.second);
|
||||
}
|
||||
}
|
||||
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
|
||||
void Store::queryPathInfo(const StorePath & storePath,
|
||||
Callback<ref<const ValidPathInfo>> callback) noexcept
|
||||
{
|
||||
auto hashPart = std::string(storePath.hashPart());
|
||||
|
||||
try {
|
||||
{
|
||||
auto res = state.lock()->pathInfoCache.get(std::string(storePath.to_string()));
|
||||
if (res && res->isKnownNow()) {
|
||||
stats.narInfoReadAverted++;
|
||||
if (!res->didExist())
|
||||
throw InvalidPath("path '%s' is not valid", printStorePath(storePath));
|
||||
return callback(ref<const ValidPathInfo>(res->value));
|
||||
}
|
||||
auto r = queryPathInfoFromClientCache(storePath);
|
||||
if (r.has_value()) {
|
||||
std::shared_ptr<const ValidPathInfo> & info = *r;
|
||||
if (info)
|
||||
return callback(ref(info));
|
||||
else
|
||||
throw InvalidPath("path '%s' is not valid", printStorePath(storePath));
|
||||
}
|
||||
|
||||
if (diskCache) {
|
||||
auto res = diskCache->lookupNarInfo(getUri(), hashPart);
|
||||
if (res.first != NarInfoDiskCache::oUnknown) {
|
||||
stats.narInfoReadAverted++;
|
||||
{
|
||||
auto state_(state.lock());
|
||||
state_->pathInfoCache.upsert(std::string(storePath.to_string()),
|
||||
res.first == NarInfoDiskCache::oInvalid ? PathInfoCacheValue{} : PathInfoCacheValue{ .value = res.second });
|
||||
if (res.first == NarInfoDiskCache::oInvalid ||
|
||||
!goodStorePath(storePath, res.second->path))
|
||||
throw InvalidPath("path '%s' is not valid", printStorePath(storePath));
|
||||
}
|
||||
return callback(ref<const ValidPathInfo>(res.second));
|
||||
}
|
||||
}
|
||||
|
||||
} catch (...) { return callback.rethrow(); }
|
||||
|
||||
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
|
||||
|
@ -757,7 +775,7 @@ void Store::substitutePaths(const StorePathSet & paths)
|
|||
if (!willSubstitute.empty())
|
||||
try {
|
||||
std::vector<DerivedPath> subs;
|
||||
for (auto & p : willSubstitute) subs.push_back(DerivedPath::Opaque{p});
|
||||
for (auto & p : willSubstitute) subs.emplace_back(DerivedPath::Opaque{p});
|
||||
buildPaths(subs);
|
||||
} catch (Error & e) {
|
||||
logWarning(e.info());
|
||||
|
@ -909,6 +927,11 @@ void copyStorePath(
|
|||
RepairFlag repair,
|
||||
CheckSigsFlag checkSigs)
|
||||
{
|
||||
/* Bail out early (before starting a download from srcStore) if
|
||||
dstStore already has this path. */
|
||||
if (!repair && dstStore.isValidPath(storePath))
|
||||
return;
|
||||
|
||||
auto srcUri = srcStore.getUri();
|
||||
auto dstUri = dstStore.getUri();
|
||||
auto storePathS = srcStore.printStorePath(storePath);
|
||||
|
|
|
@ -108,7 +108,7 @@ struct StoreConfig : public StoreDirConfig
|
|||
|
||||
StoreConfig() = delete;
|
||||
|
||||
StringSet getDefaultSystemFeatures();
|
||||
static StringSet getDefaultSystemFeatures();
|
||||
|
||||
virtual ~StoreConfig() { }
|
||||
|
||||
|
@ -282,6 +282,16 @@ public:
|
|||
void queryPathInfo(const StorePath & path,
|
||||
Callback<ref<const ValidPathInfo>> callback) noexcept;
|
||||
|
||||
/**
|
||||
* Version of queryPathInfo() that only queries the local narinfo cache and not
|
||||
* the actual store.
|
||||
*
|
||||
* @return `std::nullopt` if nothing is known about the path in the local narinfo cache.
|
||||
* @return `std::make_optional(nullptr)` if the path is known to not exist.
|
||||
* @return `std::make_optional(validPathInfo)` if the path is known to exist.
|
||||
*/
|
||||
std::optional<std::shared_ptr<const ValidPathInfo>> queryPathInfoFromClientCache(const StorePath & path);
|
||||
|
||||
/**
|
||||
* Query the information about a realisation.
|
||||
*/
|
||||
|
|
|
@ -77,20 +77,20 @@ void SourceAccessor::dumpPath(
|
|||
std::string name(i.first);
|
||||
size_t pos = i.first.find(caseHackSuffix);
|
||||
if (pos != std::string::npos) {
|
||||
debug("removing case hack suffix from '%s'", path + i.first);
|
||||
debug("removing case hack suffix from '%s'", path / i.first);
|
||||
name.erase(pos);
|
||||
}
|
||||
if (!unhacked.emplace(name, i.first).second)
|
||||
throw Error("file name collision in between '%s' and '%s'",
|
||||
(path + unhacked[name]),
|
||||
(path + i.first));
|
||||
(path / unhacked[name]),
|
||||
(path / i.first));
|
||||
} else
|
||||
unhacked.emplace(i.first, i.first);
|
||||
|
||||
for (auto & i : unhacked)
|
||||
if (filter((path + i.first).abs())) {
|
||||
if (filter((path / i.first).abs())) {
|
||||
sink << "entry" << "(" << "name" << i.first << "node";
|
||||
dump(path + i.second);
|
||||
dump(path / i.second);
|
||||
sink << ")";
|
||||
}
|
||||
}
|
||||
|
@ -110,8 +110,8 @@ void SourceAccessor::dumpPath(
|
|||
|
||||
time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter)
|
||||
{
|
||||
PosixSourceAccessor accessor;
|
||||
accessor.dumpPath(CanonPath::fromCwd(path), sink, filter);
|
||||
auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path);
|
||||
accessor.dumpPath(canonPath, sink, filter);
|
||||
return accessor.mtime;
|
||||
}
|
||||
|
||||
|
@ -133,7 +133,7 @@ static SerialisationError badArchive(const std::string & s)
|
|||
}
|
||||
|
||||
|
||||
static void parseContents(ParseSink & sink, Source & source, const Path & path)
|
||||
static void parseContents(CreateRegularFileSink & sink, Source & source)
|
||||
{
|
||||
uint64_t size = readLongLong(source);
|
||||
|
||||
|
@ -147,7 +147,7 @@ static void parseContents(ParseSink & sink, Source & source, const Path & path)
|
|||
auto n = buf.size();
|
||||
if ((uint64_t)n > left) n = left;
|
||||
source(buf.data(), n);
|
||||
sink.receiveContents({buf.data(), n});
|
||||
sink({buf.data(), n});
|
||||
left -= n;
|
||||
}
|
||||
|
||||
|
@ -164,109 +164,121 @@ struct CaseInsensitiveCompare
|
|||
};
|
||||
|
||||
|
||||
static void parse(ParseSink & sink, Source & source, const Path & path)
|
||||
static void parse(FileSystemObjectSink & sink, Source & source, const Path & path)
|
||||
{
|
||||
std::string s;
|
||||
|
||||
s = readString(source);
|
||||
if (s != "(") throw badArchive("expected open tag");
|
||||
|
||||
enum { tpUnknown, tpRegular, tpDirectory, tpSymlink } type = tpUnknown;
|
||||
|
||||
std::map<Path, int, CaseInsensitiveCompare> names;
|
||||
|
||||
while (1) {
|
||||
auto getString = [&]() {
|
||||
checkInterrupt();
|
||||
return readString(source);
|
||||
};
|
||||
|
||||
s = readString(source);
|
||||
// For first iteration
|
||||
s = getString();
|
||||
|
||||
while (1) {
|
||||
|
||||
if (s == ")") {
|
||||
break;
|
||||
}
|
||||
|
||||
else if (s == "type") {
|
||||
if (type != tpUnknown)
|
||||
throw badArchive("multiple type fields");
|
||||
std::string t = readString(source);
|
||||
std::string t = getString();
|
||||
|
||||
if (t == "regular") {
|
||||
type = tpRegular;
|
||||
sink.createRegularFile(path);
|
||||
sink.createRegularFile(path, [&](auto & crf) {
|
||||
while (1) {
|
||||
s = getString();
|
||||
|
||||
if (s == "contents") {
|
||||
parseContents(crf, source);
|
||||
}
|
||||
|
||||
else if (s == "executable") {
|
||||
auto s2 = getString();
|
||||
if (s2 != "") throw badArchive("executable marker has non-empty value");
|
||||
crf.isExecutable();
|
||||
}
|
||||
|
||||
else break;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
else if (t == "directory") {
|
||||
sink.createDirectory(path);
|
||||
type = tpDirectory;
|
||||
|
||||
while (1) {
|
||||
s = getString();
|
||||
|
||||
if (s == "entry") {
|
||||
std::string name, prevName;
|
||||
|
||||
s = getString();
|
||||
if (s != "(") throw badArchive("expected open tag");
|
||||
|
||||
while (1) {
|
||||
s = getString();
|
||||
|
||||
if (s == ")") {
|
||||
break;
|
||||
} else if (s == "name") {
|
||||
name = getString();
|
||||
if (name.empty() || name == "." || name == ".." || name.find('/') != std::string::npos || name.find((char) 0) != std::string::npos)
|
||||
throw Error("NAR contains invalid file name '%1%'", name);
|
||||
if (name <= prevName)
|
||||
throw Error("NAR directory is not sorted");
|
||||
prevName = name;
|
||||
if (archiveSettings.useCaseHack) {
|
||||
auto i = names.find(name);
|
||||
if (i != names.end()) {
|
||||
debug("case collision between '%1%' and '%2%'", i->first, name);
|
||||
name += caseHackSuffix;
|
||||
name += std::to_string(++i->second);
|
||||
} else
|
||||
names[name] = 0;
|
||||
}
|
||||
} else if (s == "node") {
|
||||
if (name.empty()) throw badArchive("entry name missing");
|
||||
parse(sink, source, path + "/" + name);
|
||||
} else
|
||||
throw badArchive("unknown field " + s);
|
||||
}
|
||||
}
|
||||
|
||||
else break;
|
||||
}
|
||||
}
|
||||
|
||||
else if (t == "symlink") {
|
||||
type = tpSymlink;
|
||||
s = getString();
|
||||
|
||||
if (s != "target")
|
||||
throw badArchive("expected 'target' got " + s);
|
||||
|
||||
std::string target = getString();
|
||||
sink.createSymlink(path, target);
|
||||
|
||||
// for the next iteration
|
||||
s = getString();
|
||||
}
|
||||
|
||||
else throw badArchive("unknown file type " + t);
|
||||
|
||||
}
|
||||
|
||||
else if (s == "contents" && type == tpRegular) {
|
||||
parseContents(sink, source, path);
|
||||
sink.closeRegularFile();
|
||||
}
|
||||
|
||||
else if (s == "executable" && type == tpRegular) {
|
||||
auto s = readString(source);
|
||||
if (s != "") throw badArchive("executable marker has non-empty value");
|
||||
sink.isExecutable();
|
||||
}
|
||||
|
||||
else if (s == "entry" && type == tpDirectory) {
|
||||
std::string name, prevName;
|
||||
|
||||
s = readString(source);
|
||||
if (s != "(") throw badArchive("expected open tag");
|
||||
|
||||
while (1) {
|
||||
checkInterrupt();
|
||||
|
||||
s = readString(source);
|
||||
|
||||
if (s == ")") {
|
||||
break;
|
||||
} else if (s == "name") {
|
||||
name = readString(source);
|
||||
if (name.empty() || name == "." || name == ".." || name.find('/') != std::string::npos || name.find((char) 0) != std::string::npos)
|
||||
throw Error("NAR contains invalid file name '%1%'", name);
|
||||
if (name <= prevName)
|
||||
throw Error("NAR directory is not sorted");
|
||||
prevName = name;
|
||||
if (archiveSettings.useCaseHack) {
|
||||
auto i = names.find(name);
|
||||
if (i != names.end()) {
|
||||
debug("case collision between '%1%' and '%2%'", i->first, name);
|
||||
name += caseHackSuffix;
|
||||
name += std::to_string(++i->second);
|
||||
} else
|
||||
names[name] = 0;
|
||||
}
|
||||
} else if (s == "node") {
|
||||
if (name.empty()) throw badArchive("entry name missing");
|
||||
parse(sink, source, path + "/" + name);
|
||||
} else
|
||||
throw badArchive("unknown field " + s);
|
||||
}
|
||||
}
|
||||
|
||||
else if (s == "target" && type == tpSymlink) {
|
||||
std::string target = readString(source);
|
||||
sink.createSymlink(path, target);
|
||||
}
|
||||
|
||||
else
|
||||
throw badArchive("unknown field " + s);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void parseDump(ParseSink & sink, Source & source)
|
||||
void parseDump(FileSystemObjectSink & sink, Source & source)
|
||||
{
|
||||
std::string version;
|
||||
try {
|
||||
|
@ -294,7 +306,7 @@ void copyNAR(Source & source, Sink & sink)
|
|||
// FIXME: if 'source' is the output of dumpPath() followed by EOF,
|
||||
// we should just forward all data directly without parsing.
|
||||
|
||||
NullParseSink parseSink; /* just parse the NAR */
|
||||
NullFileSystemObjectSink parseSink; /* just parse the NAR */
|
||||
|
||||
TeeSource wrapper { source, sink };
|
||||
|
||||
|
|
|
@ -73,7 +73,7 @@ time_t dumpPathAndGetMtime(const Path & path, Sink & sink,
|
|||
*/
|
||||
void dumpString(std::string_view s, Sink & sink);
|
||||
|
||||
void parseDump(ParseSink & sink, Source & source);
|
||||
void parseDump(FileSystemObjectSink & sink, Source & source);
|
||||
|
||||
void restorePath(const Path & path, Source & source);
|
||||
|
||||
|
|
|
@ -557,7 +557,7 @@ Args::Flag Args::Flag::mkHashFormatFlagWithDefault(std::string &&longName, HashF
|
|||
assert(*hf == nix::HashFormat::SRI);
|
||||
return Flag{
|
||||
.longName = std::move(longName),
|
||||
.description = "hash format ('base16', 'nix32', 'base64', 'sri'). Default: 'sri'",
|
||||
.description = "Hash format (`base16`, `nix32`, `base64`, `sri`). Default: `sri`.",
|
||||
.labels = {"hash-format"},
|
||||
.handler = {[hf](std::string s) {
|
||||
*hf = parseHashFormat(s);
|
||||
|
@ -569,7 +569,7 @@ Args::Flag Args::Flag::mkHashFormatFlagWithDefault(std::string &&longName, HashF
|
|||
Args::Flag Args::Flag::mkHashFormatOptFlag(std::string && longName, std::optional<HashFormat> * ohf) {
|
||||
return Flag{
|
||||
.longName = std::move(longName),
|
||||
.description = "hash format ('base16', 'nix32', 'base64', 'sri').",
|
||||
.description = "Hash format (`base16`, `nix32`, `base64`, `sri`).",
|
||||
.labels = {"hash-format"},
|
||||
.handler = {[ohf](std::string s) {
|
||||
*ohf = std::optional<HashFormat>{parseHashFormat(s)};
|
||||
|
@ -589,7 +589,7 @@ Args::Flag Args::Flag::mkHashAlgoFlag(std::string && longName, HashAlgorithm * h
|
|||
{
|
||||
return Flag{
|
||||
.longName = std::move(longName),
|
||||
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')",
|
||||
.description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`).",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[ha](std::string s) {
|
||||
*ha = parseHashAlgo(s);
|
||||
|
@ -602,7 +602,7 @@ Args::Flag Args::Flag::mkHashAlgoOptFlag(std::string && longName, std::optional<
|
|||
{
|
||||
return Flag{
|
||||
.longName = std::move(longName),
|
||||
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.",
|
||||
.description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`). Can be omitted for SRI hashes.",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[oha](std::string s) {
|
||||
*oha = std::optional<HashAlgorithm>{parseHashAlgo(s)};
|
||||
|
|
|
@ -177,7 +177,13 @@ protected:
|
|||
std::optional<ExperimentalFeature> experimentalFeature;
|
||||
|
||||
static Flag mkHashAlgoFlag(std::string && longName, HashAlgorithm * ha);
|
||||
static Flag mkHashAlgoFlag(HashAlgorithm * ha) {
|
||||
return mkHashAlgoFlag("hash-algo", ha);
|
||||
}
|
||||
static Flag mkHashAlgoOptFlag(std::string && longName, std::optional<HashAlgorithm> * oha);
|
||||
static Flag mkHashAlgoOptFlag(std::optional<HashAlgorithm> * oha) {
|
||||
return mkHashAlgoOptFlag("hash-algo", oha);
|
||||
}
|
||||
static Flag mkHashFormatFlagWithDefault(std::string && longName, HashFormat * hf);
|
||||
static Flag mkHashFormatOptFlag(std::string && longName, std::optional<HashFormat> * ohf);
|
||||
};
|
||||
|
|
|
@ -20,11 +20,6 @@ CanonPath::CanonPath(const std::vector<std::string> & elems)
|
|||
push(s);
|
||||
}
|
||||
|
||||
CanonPath CanonPath::fromCwd(std::string_view path)
|
||||
{
|
||||
return CanonPath(unchecked_t(), absPath(path));
|
||||
}
|
||||
|
||||
std::optional<CanonPath> CanonPath::parent() const
|
||||
{
|
||||
if (isRoot()) return std::nullopt;
|
||||
|
@ -63,7 +58,7 @@ void CanonPath::extend(const CanonPath & x)
|
|||
path += x.abs();
|
||||
}
|
||||
|
||||
CanonPath CanonPath::operator + (const CanonPath & x) const
|
||||
CanonPath CanonPath::operator / (const CanonPath & x) const
|
||||
{
|
||||
auto res = *this;
|
||||
res.extend(x);
|
||||
|
@ -78,7 +73,7 @@ void CanonPath::push(std::string_view c)
|
|||
path += c;
|
||||
}
|
||||
|
||||
CanonPath CanonPath::operator + (std::string_view c) const
|
||||
CanonPath CanonPath::operator / (std::string_view c) const
|
||||
{
|
||||
auto res = *this;
|
||||
res.push(c);
|
||||
|
|
|
@ -52,8 +52,6 @@ public:
|
|||
*/
|
||||
CanonPath(const std::vector<std::string> & elems);
|
||||
|
||||
static CanonPath fromCwd(std::string_view path = ".");
|
||||
|
||||
static CanonPath root;
|
||||
|
||||
/**
|
||||
|
@ -190,14 +188,14 @@ public:
|
|||
/**
|
||||
* Concatenate two paths.
|
||||
*/
|
||||
CanonPath operator + (const CanonPath & x) const;
|
||||
CanonPath operator / (const CanonPath & x) const;
|
||||
|
||||
/**
|
||||
* Add a path component to this one. It must not contain any slashes.
|
||||
*/
|
||||
void push(std::string_view c);
|
||||
|
||||
CanonPath operator + (std::string_view c) const;
|
||||
CanonPath operator / (std::string_view c) const;
|
||||
|
||||
/**
|
||||
* Check whether access to this path is allowed, which is the case
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
#include <algorithm>
|
||||
#include <cstring>
|
||||
|
||||
#include "current-process.hh"
|
||||
#include "namespaces.hh"
|
||||
#include "util.hh"
|
||||
|
@ -49,20 +52,27 @@ unsigned int getMaxCPU()
|
|||
//////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
#if __linux__
|
||||
rlim_t savedStackSize = 0;
|
||||
#endif
|
||||
|
||||
void setStackSize(size_t stackSize)
|
||||
void setStackSize(rlim_t stackSize)
|
||||
{
|
||||
#if __linux__
|
||||
struct rlimit limit;
|
||||
if (getrlimit(RLIMIT_STACK, &limit) == 0 && limit.rlim_cur < stackSize) {
|
||||
savedStackSize = limit.rlim_cur;
|
||||
limit.rlim_cur = stackSize;
|
||||
setrlimit(RLIMIT_STACK, &limit);
|
||||
limit.rlim_cur = std::min(stackSize, limit.rlim_max);
|
||||
if (setrlimit(RLIMIT_STACK, &limit) != 0) {
|
||||
logger->log(
|
||||
lvlError,
|
||||
HintFmt(
|
||||
"Failed to increase stack size from %1% to %2% (maximum allowed stack size: %3%): %4%",
|
||||
savedStackSize,
|
||||
stackSize,
|
||||
limit.rlim_max,
|
||||
std::strerror(errno)
|
||||
).str()
|
||||
);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void restoreProcessContext(bool restoreMounts)
|
||||
|
@ -72,7 +82,6 @@ void restoreProcessContext(bool restoreMounts)
|
|||
restoreMountNamespace();
|
||||
}
|
||||
|
||||
#if __linux__
|
||||
if (savedStackSize) {
|
||||
struct rlimit limit;
|
||||
if (getrlimit(RLIMIT_STACK, &limit) == 0) {
|
||||
|
@ -80,7 +89,6 @@ void restoreProcessContext(bool restoreMounts)
|
|||
setrlimit(RLIMIT_STACK, &limit);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
///@file
|
||||
|
||||
#include <optional>
|
||||
#include <sys/resource.h>
|
||||
|
||||
#include "types.hh"
|
||||
|
||||
|
@ -16,7 +17,7 @@ unsigned int getMaxCPU();
|
|||
/**
|
||||
* Change the stack size.
|
||||
*/
|
||||
void setStackSize(size_t stackSize);
|
||||
void setStackSize(rlim_t stackSize);
|
||||
|
||||
/**
|
||||
* Restore the original inherited Unix process context (such as signal
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
void BaseError::addTrace(std::shared_ptr<Pos> && e, hintformat hint, bool frame)
|
||||
void BaseError::addTrace(std::shared_ptr<Pos> && e, HintFmt hint, bool frame)
|
||||
{
|
||||
err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .frame = frame });
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ const std::string & BaseError::calcWhat() const
|
|||
|
||||
std::optional<std::string> ErrorInfo::programName = std::nullopt;
|
||||
|
||||
std::ostream & operator <<(std::ostream & os, const hintformat & hf)
|
||||
std::ostream & operator <<(std::ostream & os, const HintFmt & hf)
|
||||
{
|
||||
return os << hf.str();
|
||||
}
|
||||
|
@ -335,7 +335,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
|
|||
* try {
|
||||
* e->eval(*this, env, v);
|
||||
* if (v.type() != nAttrs)
|
||||
* throwTypeError("value is %1% while a set was expected", v);
|
||||
* error<TypeError>("expected a set but found %1%", v);
|
||||
* } catch (Error & e) {
|
||||
* e.addTrace(pos, errorCtx);
|
||||
* throw;
|
||||
|
@ -349,7 +349,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
|
|||
* e->eval(*this, env, v);
|
||||
* try {
|
||||
* if (v.type() != nAttrs)
|
||||
* throwTypeError("value is %1% while a set was expected", v);
|
||||
* error<TypeError>("expected a set but found %1%", v);
|
||||
* } catch (Error & e) {
|
||||
* e.addTrace(pos, errorCtx);
|
||||
* throw;
|
||||
|
@ -411,7 +411,7 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
|
|||
|
||||
oss << einfo.msg << "\n";
|
||||
|
||||
printPosMaybe(oss, "", einfo.errPos);
|
||||
printPosMaybe(oss, "", einfo.pos);
|
||||
|
||||
auto suggestions = einfo.suggestions.trim();
|
||||
if (!suggestions.suggestions.empty()) {
|
||||
|
|
|
@ -31,15 +31,6 @@
|
|||
#include <sys/stat.h>
|
||||
#include <fcntl.h>
|
||||
|
||||
/* Before 4.7, gcc's std::exception uses empty throw() specifiers for
|
||||
* its (virtual) destructor and what() in c++11 mode, in violation of spec
|
||||
*/
|
||||
#ifdef __GNUC__
|
||||
#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 7)
|
||||
#define EXCEPTION_NEEDS_THROW_SPEC
|
||||
#endif
|
||||
#endif
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
|
@ -72,7 +63,7 @@ void printCodeLines(std::ostream & out,
|
|||
|
||||
struct Trace {
|
||||
std::shared_ptr<Pos> pos;
|
||||
hintformat hint;
|
||||
HintFmt hint;
|
||||
bool frame;
|
||||
};
|
||||
|
||||
|
@ -83,10 +74,15 @@ inline bool operator>=(const Trace& lhs, const Trace& rhs);
|
|||
|
||||
struct ErrorInfo {
|
||||
Verbosity level;
|
||||
hintformat msg;
|
||||
std::shared_ptr<Pos> errPos;
|
||||
HintFmt msg;
|
||||
std::shared_ptr<Pos> pos;
|
||||
std::list<Trace> traces;
|
||||
|
||||
/**
|
||||
* Exit status.
|
||||
*/
|
||||
unsigned int status = 1;
|
||||
|
||||
Suggestions suggestions;
|
||||
|
||||
static std::optional<std::string> programName;
|
||||
|
@ -103,31 +99,34 @@ class BaseError : public std::exception
|
|||
protected:
|
||||
mutable ErrorInfo err;
|
||||
|
||||
/**
|
||||
* Cached formatted contents of `err.msg`.
|
||||
*/
|
||||
mutable std::optional<std::string> what_;
|
||||
/**
|
||||
* Format `err.msg` and set `what_` to the resulting value.
|
||||
*/
|
||||
const std::string & calcWhat() const;
|
||||
|
||||
public:
|
||||
unsigned int status = 1; // exit status
|
||||
|
||||
BaseError(const BaseError &) = default;
|
||||
|
||||
template<typename... Args>
|
||||
BaseError(unsigned int status, const Args & ... args)
|
||||
: err { .level = lvlError, .msg = hintfmt(args...) }
|
||||
, status(status)
|
||||
: err { .level = lvlError, .msg = HintFmt(args...), .status = status }
|
||||
{ }
|
||||
|
||||
template<typename... Args>
|
||||
explicit BaseError(const std::string & fs, const Args & ... args)
|
||||
: err { .level = lvlError, .msg = hintfmt(fs, args...) }
|
||||
: err { .level = lvlError, .msg = HintFmt(fs, args...) }
|
||||
{ }
|
||||
|
||||
template<typename... Args>
|
||||
BaseError(const Suggestions & sug, const Args & ... args)
|
||||
: err { .level = lvlError, .msg = hintfmt(args...), .suggestions = sug }
|
||||
: err { .level = lvlError, .msg = HintFmt(args...), .suggestions = sug }
|
||||
{ }
|
||||
|
||||
BaseError(hintformat hint)
|
||||
BaseError(HintFmt hint)
|
||||
: err { .level = lvlError, .msg = hint }
|
||||
{ }
|
||||
|
||||
|
@ -139,16 +138,19 @@ public:
|
|||
: err(e)
|
||||
{ }
|
||||
|
||||
#ifdef EXCEPTION_NEEDS_THROW_SPEC
|
||||
~BaseError() throw () { };
|
||||
const char * what() const throw () { return calcWhat().c_str(); }
|
||||
#else
|
||||
const char * what() const noexcept override { return calcWhat().c_str(); }
|
||||
#endif
|
||||
|
||||
const std::string & msg() const { return calcWhat(); }
|
||||
const ErrorInfo & info() const { calcWhat(); return err; }
|
||||
|
||||
void withExitStatus(unsigned int status)
|
||||
{
|
||||
err.status = status;
|
||||
}
|
||||
|
||||
void atPos(std::shared_ptr<Pos> pos) {
|
||||
err.pos = pos;
|
||||
}
|
||||
|
||||
void pushTrace(Trace trace)
|
||||
{
|
||||
err.traces.push_front(trace);
|
||||
|
@ -157,10 +159,10 @@ public:
|
|||
template<typename... Args>
|
||||
void addTrace(std::shared_ptr<Pos> && e, std::string_view fs, const Args & ... args)
|
||||
{
|
||||
addTrace(std::move(e), hintfmt(std::string(fs), args...));
|
||||
addTrace(std::move(e), HintFmt(std::string(fs), args...));
|
||||
}
|
||||
|
||||
void addTrace(std::shared_ptr<Pos> && e, hintformat hint, bool frame = false);
|
||||
void addTrace(std::shared_ptr<Pos> && e, HintFmt hint, bool frame = false);
|
||||
|
||||
bool hasTrace() const { return !err.traces.empty(); }
|
||||
|
||||
|
@ -212,8 +214,8 @@ public:
|
|||
SysError(int errNo, const Args & ... args)
|
||||
: SystemError(""), errNo(errNo)
|
||||
{
|
||||
auto hf = hintfmt(args...);
|
||||
err.msg = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo));
|
||||
auto hf = HintFmt(args...);
|
||||
err.msg = HintFmt("%1%: %2%", Uncolored(hf.str()), strerror(errNo));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -35,7 +35,7 @@ void dumpPath(
|
|||
/**
|
||||
* Restore a serialization of the given file system object.
|
||||
*
|
||||
* @TODO use an arbitrary `ParseSink`.
|
||||
* @TODO use an arbitrary `FileSystemObjectSink`.
|
||||
*/
|
||||
void restorePath(
|
||||
const Path & path,
|
||||
|
|
|
@ -25,7 +25,7 @@ Path absPath(PathView path, std::optional<PathView> dir, bool resolveSymlinks)
|
|||
{
|
||||
std::string scratch;
|
||||
|
||||
if (path[0] != '/') {
|
||||
if (path.empty() || path[0] != '/') {
|
||||
// In this case we need to call `canonPath` on a newly-created
|
||||
// string. We set `scratch` to that string first, and then set
|
||||
// `path` to `scratch`. This ensures the newly-created string
|
||||
|
|
|
@ -8,37 +8,64 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
|
||||
namespace {
|
||||
/**
|
||||
* Inherit some names from other namespaces for convenience.
|
||||
*/
|
||||
using boost::format;
|
||||
|
||||
|
||||
/**
|
||||
* A variadic template that does nothing. Useful to call a function
|
||||
* for all variadic arguments but ignoring the result.
|
||||
*/
|
||||
struct nop { template<typename... T> nop(T...) {} };
|
||||
|
||||
|
||||
/**
|
||||
* A helper for formatting strings. ‘fmt(format, a_0, ..., a_n)’ is
|
||||
* equivalent to ‘boost::format(format) % a_0 % ... %
|
||||
* ... a_n’. However, ‘fmt(s)’ is equivalent to ‘s’ (so no %-expansion
|
||||
* takes place).
|
||||
* A helper for writing `boost::format` expressions.
|
||||
*
|
||||
* These are equivalent:
|
||||
*
|
||||
* ```
|
||||
* formatHelper(formatter, a_0, ..., a_n)
|
||||
* formatter % a_0 % ... % a_n
|
||||
* ```
|
||||
*
|
||||
* With a single argument, `formatHelper(s)` is a no-op.
|
||||
*/
|
||||
template<class F>
|
||||
inline void formatHelper(F & f)
|
||||
{
|
||||
}
|
||||
{ }
|
||||
|
||||
template<class F, typename T, typename... Args>
|
||||
inline void formatHelper(F & f, const T & x, const Args & ... args)
|
||||
{
|
||||
// Interpolate one argument and then recurse.
|
||||
formatHelper(f % x, args...);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the correct exceptions for `fmt`.
|
||||
*/
|
||||
void setExceptions(boost::format & fmt)
|
||||
{
|
||||
fmt.exceptions(
|
||||
boost::io::all_error_bits ^
|
||||
boost::io::too_many_args_bit ^
|
||||
boost::io::too_few_args_bit);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A helper for writing a `boost::format` expression to a string.
|
||||
*
|
||||
* These are (roughly) equivalent:
|
||||
*
|
||||
* ```
|
||||
* fmt(formatString, a_0, ..., a_n)
|
||||
* (boost::format(formatString) % a_0 % ... % a_n).str()
|
||||
* ```
|
||||
*
|
||||
* However, when called with a single argument, the string is returned
|
||||
* unchanged.
|
||||
*
|
||||
* If you write code like this:
|
||||
*
|
||||
* ```
|
||||
* std::cout << boost::format(stringFromUserInput) << std::endl;
|
||||
* ```
|
||||
*
|
||||
* And `stringFromUserInput` contains formatting placeholders like `%s`, then
|
||||
* the code will crash at runtime. `fmt` helps you avoid this pitfall.
|
||||
*/
|
||||
inline std::string fmt(const std::string & s)
|
||||
{
|
||||
return s;
|
||||
|
@ -58,68 +85,96 @@ template<typename... Args>
|
|||
inline std::string fmt(const std::string & fs, const Args & ... args)
|
||||
{
|
||||
boost::format f(fs);
|
||||
f.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit);
|
||||
setExceptions(f);
|
||||
formatHelper(f, args...);
|
||||
return f.str();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// format function for hints in errors. same as fmt, except templated values
|
||||
// are always in yellow.
|
||||
|
||||
/**
|
||||
* Values wrapped in this struct are printed in magenta.
|
||||
*
|
||||
* By default, arguments to `HintFmt` are printed in magenta. To avoid this,
|
||||
* either wrap the argument in `Uncolored` or add a specialization of
|
||||
* `HintFmt::operator%`.
|
||||
*/
|
||||
template <class T>
|
||||
struct yellowtxt
|
||||
struct Magenta
|
||||
{
|
||||
yellowtxt(const T &s) : value(s) {}
|
||||
Magenta(const T &s) : value(s) {}
|
||||
const T & value;
|
||||
};
|
||||
|
||||
template <class T>
|
||||
std::ostream & operator<<(std::ostream & out, const yellowtxt<T> & y)
|
||||
std::ostream & operator<<(std::ostream & out, const Magenta<T> & y)
|
||||
{
|
||||
return out << ANSI_WARNING << y.value << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Values wrapped in this class are printed without coloring.
|
||||
*
|
||||
* By default, arguments to `HintFmt` are printed in magenta (see `Magenta`).
|
||||
*/
|
||||
template <class T>
|
||||
struct normaltxt
|
||||
struct Uncolored
|
||||
{
|
||||
normaltxt(const T & s) : value(s) {}
|
||||
Uncolored(const T & s) : value(s) {}
|
||||
const T & value;
|
||||
};
|
||||
|
||||
template <class T>
|
||||
std::ostream & operator<<(std::ostream & out, const normaltxt<T> & y)
|
||||
std::ostream & operator<<(std::ostream & out, const Uncolored<T> & y)
|
||||
{
|
||||
return out << ANSI_NORMAL << y.value;
|
||||
}
|
||||
|
||||
class hintformat
|
||||
/**
|
||||
* A wrapper around `boost::format` which colors interpolated arguments in
|
||||
* magenta by default.
|
||||
*/
|
||||
class HintFmt
|
||||
{
|
||||
public:
|
||||
hintformat(const std::string & format) : fmt(format)
|
||||
{
|
||||
fmt.exceptions(boost::io::all_error_bits ^
|
||||
boost::io::too_many_args_bit ^
|
||||
boost::io::too_few_args_bit);
|
||||
}
|
||||
private:
|
||||
boost::format fmt;
|
||||
|
||||
hintformat(const hintformat & hf)
|
||||
public:
|
||||
/**
|
||||
* Format the given string literally, without interpolating format
|
||||
* placeholders.
|
||||
*/
|
||||
HintFmt(const std::string & literal)
|
||||
: HintFmt("%s", Uncolored(literal))
|
||||
{ }
|
||||
|
||||
/**
|
||||
* Interpolate the given arguments into the format string.
|
||||
*/
|
||||
template<typename... Args>
|
||||
HintFmt(const std::string & format, const Args & ... args)
|
||||
: HintFmt(boost::format(format), args...)
|
||||
{ }
|
||||
|
||||
HintFmt(const HintFmt & hf)
|
||||
: fmt(hf.fmt)
|
||||
{ }
|
||||
|
||||
hintformat(format && fmt)
|
||||
template<typename... Args>
|
||||
HintFmt(boost::format && fmt, const Args & ... args)
|
||||
: fmt(std::move(fmt))
|
||||
{ }
|
||||
{
|
||||
setExceptions(fmt);
|
||||
formatHelper(*this, args...);
|
||||
}
|
||||
|
||||
template<class T>
|
||||
hintformat & operator%(const T & value)
|
||||
HintFmt & operator%(const T & value)
|
||||
{
|
||||
fmt % yellowtxt(value);
|
||||
fmt % Magenta(value);
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
hintformat & operator%(const normaltxt<T> & value)
|
||||
HintFmt & operator%(const Uncolored<T> & value)
|
||||
{
|
||||
fmt % value.value;
|
||||
return *this;
|
||||
|
@ -129,25 +184,8 @@ public:
|
|||
{
|
||||
return fmt.str();
|
||||
}
|
||||
|
||||
private:
|
||||
format fmt;
|
||||
};
|
||||
|
||||
std::ostream & operator<<(std::ostream & os, const hintformat & hf);
|
||||
|
||||
template<typename... Args>
|
||||
inline hintformat hintfmt(const std::string & fs, const Args & ... args)
|
||||
{
|
||||
hintformat f(fs);
|
||||
formatHelper(f, args...);
|
||||
return f;
|
||||
}
|
||||
|
||||
inline hintformat hintfmt(const std::string & plain_string)
|
||||
{
|
||||
// we won't be receiving any args in this case, so just print the original string
|
||||
return hintfmt("%s", normaltxt(plain_string));
|
||||
}
|
||||
std::ostream & operator<<(std::ostream & os, const HintFmt & hf);
|
||||
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ namespace nix {
|
|||
|
||||
void copyRecursive(
|
||||
SourceAccessor & accessor, const CanonPath & from,
|
||||
ParseSink & sink, const Path & to)
|
||||
FileSystemObjectSink & sink, const Path & to)
|
||||
{
|
||||
auto stat = accessor.lstat(from);
|
||||
|
||||
|
@ -19,16 +19,12 @@ void copyRecursive(
|
|||
|
||||
case SourceAccessor::tRegular:
|
||||
{
|
||||
sink.createRegularFile(to);
|
||||
if (stat.isExecutable)
|
||||
sink.isExecutable();
|
||||
LambdaSink sink2 {
|
||||
[&](auto d) {
|
||||
sink.receiveContents(d);
|
||||
}
|
||||
};
|
||||
accessor.readFile(from, sink2, [&](uint64_t size) {
|
||||
sink.preallocateContents(size);
|
||||
sink.createRegularFile(to, [&](CreateRegularFileSink & crf) {
|
||||
if (stat.isExecutable)
|
||||
crf.isExecutable();
|
||||
accessor.readFile(from, crf, [&](uint64_t size) {
|
||||
crf.preallocateContents(size);
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
@ -38,7 +34,7 @@ void copyRecursive(
|
|||
sink.createDirectory(to);
|
||||
for (auto & [name, _] : accessor.readDirectory(from)) {
|
||||
copyRecursive(
|
||||
accessor, from + name,
|
||||
accessor, from / name,
|
||||
sink, to + "/" + name);
|
||||
break;
|
||||
}
|
||||
|
@ -71,20 +67,24 @@ void RestoreSink::createDirectory(const Path & path)
|
|||
throw SysError("creating directory '%1%'", p);
|
||||
};
|
||||
|
||||
void RestoreSink::createRegularFile(const Path & path)
|
||||
struct RestoreRegularFile : CreateRegularFileSink {
|
||||
AutoCloseFD fd;
|
||||
|
||||
void operator () (std::string_view data) override;
|
||||
void isExecutable() override;
|
||||
void preallocateContents(uint64_t size) override;
|
||||
};
|
||||
|
||||
void RestoreSink::createRegularFile(const Path & path, std::function<void(CreateRegularFileSink &)> func)
|
||||
{
|
||||
Path p = dstPath + path;
|
||||
fd = open(p.c_str(), O_CREAT | O_EXCL | O_WRONLY | O_CLOEXEC, 0666);
|
||||
if (!fd) throw SysError("creating file '%1%'", p);
|
||||
RestoreRegularFile crf;
|
||||
crf.fd = open(p.c_str(), O_CREAT | O_EXCL | O_WRONLY | O_CLOEXEC, 0666);
|
||||
if (!crf.fd) throw SysError("creating file '%1%'", p);
|
||||
func(crf);
|
||||
}
|
||||
|
||||
void RestoreSink::closeRegularFile()
|
||||
{
|
||||
/* Call close explicitly to make sure the error is checked */
|
||||
fd.close();
|
||||
}
|
||||
|
||||
void RestoreSink::isExecutable()
|
||||
void RestoreRegularFile::isExecutable()
|
||||
{
|
||||
struct stat st;
|
||||
if (fstat(fd.get(), &st) == -1)
|
||||
|
@ -93,7 +93,7 @@ void RestoreSink::isExecutable()
|
|||
throw SysError("fchmod");
|
||||
}
|
||||
|
||||
void RestoreSink::preallocateContents(uint64_t len)
|
||||
void RestoreRegularFile::preallocateContents(uint64_t len)
|
||||
{
|
||||
if (!restoreSinkSettings.preallocateContents)
|
||||
return;
|
||||
|
@ -111,7 +111,7 @@ void RestoreSink::preallocateContents(uint64_t len)
|
|||
#endif
|
||||
}
|
||||
|
||||
void RestoreSink::receiveContents(std::string_view data)
|
||||
void RestoreRegularFile::operator () (std::string_view data)
|
||||
{
|
||||
writeFull(fd.get(), data);
|
||||
}
|
||||
|
@ -122,4 +122,32 @@ void RestoreSink::createSymlink(const Path & path, const std::string & target)
|
|||
nix::createSymlink(target, p);
|
||||
}
|
||||
|
||||
|
||||
void RegularFileSink::createRegularFile(const Path & path, std::function<void(CreateRegularFileSink &)> func)
|
||||
{
|
||||
struct CRF : CreateRegularFileSink {
|
||||
RegularFileSink & back;
|
||||
CRF(RegularFileSink & back) : back(back) {}
|
||||
void operator () (std::string_view data) override
|
||||
{
|
||||
back.sink(data);
|
||||
}
|
||||
void isExecutable() override {}
|
||||
} crf { *this };
|
||||
func(crf);
|
||||
}
|
||||
|
||||
|
||||
void NullFileSystemObjectSink::createRegularFile(const Path & path, std::function<void(CreateRegularFileSink &)> func)
|
||||
{
|
||||
struct : CreateRegularFileSink {
|
||||
void operator () (std::string_view data) override {}
|
||||
void isExecutable() override {}
|
||||
} crf;
|
||||
// Even though `NullFileSystemObjectSink` doesn't do anything, it's important
|
||||
// that we call the function, to e.g. advance the parser using this
|
||||
// sink.
|
||||
func(crf);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -9,18 +9,13 @@
|
|||
namespace nix {
|
||||
|
||||
/**
|
||||
* \todo Fix this API, it sucks.
|
||||
* Actions on an open regular file in the process of creating it.
|
||||
*
|
||||
* See `FileSystemObjectSink::createRegularFile`.
|
||||
*/
|
||||
struct ParseSink
|
||||
struct CreateRegularFileSink : Sink
|
||||
{
|
||||
virtual void createDirectory(const Path & path) = 0;
|
||||
|
||||
virtual void createRegularFile(const Path & path) = 0;
|
||||
virtual void receiveContents(std::string_view data) = 0;
|
||||
virtual void isExecutable() = 0;
|
||||
virtual void closeRegularFile() = 0;
|
||||
|
||||
virtual void createSymlink(const Path & path, const std::string & target) = 0;
|
||||
|
||||
/**
|
||||
* An optimization. By default, do nothing.
|
||||
|
@ -28,46 +23,55 @@ struct ParseSink
|
|||
virtual void preallocateContents(uint64_t size) { };
|
||||
};
|
||||
|
||||
|
||||
struct FileSystemObjectSink
|
||||
{
|
||||
virtual void createDirectory(const Path & path) = 0;
|
||||
|
||||
/**
|
||||
* This function in general is no re-entrant. Only one file can be
|
||||
* written at a time.
|
||||
*/
|
||||
virtual void createRegularFile(
|
||||
const Path & path,
|
||||
std::function<void(CreateRegularFileSink &)>) = 0;
|
||||
|
||||
virtual void createSymlink(const Path & path, const std::string & target) = 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* Recusively copy file system objects from the source into the sink.
|
||||
* Recursively copy file system objects from the source into the sink.
|
||||
*/
|
||||
void copyRecursive(
|
||||
SourceAccessor & accessor, const CanonPath & sourcePath,
|
||||
ParseSink & sink, const Path & destPath);
|
||||
FileSystemObjectSink & sink, const Path & destPath);
|
||||
|
||||
/**
|
||||
* Ignore everything and do nothing
|
||||
*/
|
||||
struct NullParseSink : ParseSink
|
||||
struct NullFileSystemObjectSink : FileSystemObjectSink
|
||||
{
|
||||
void createDirectory(const Path & path) override { }
|
||||
void receiveContents(std::string_view data) override { }
|
||||
void createSymlink(const Path & path, const std::string & target) override { }
|
||||
void createRegularFile(const Path & path) override { }
|
||||
void closeRegularFile() override { }
|
||||
void isExecutable() override { }
|
||||
void createRegularFile(
|
||||
const Path & path,
|
||||
std::function<void(CreateRegularFileSink &)>) override;
|
||||
};
|
||||
|
||||
/**
|
||||
* Write files at the given path
|
||||
*/
|
||||
struct RestoreSink : ParseSink
|
||||
struct RestoreSink : FileSystemObjectSink
|
||||
{
|
||||
Path dstPath;
|
||||
|
||||
void createDirectory(const Path & path) override;
|
||||
|
||||
void createRegularFile(const Path & path) override;
|
||||
void receiveContents(std::string_view data) override;
|
||||
void isExecutable() override;
|
||||
void closeRegularFile() override;
|
||||
void createRegularFile(
|
||||
const Path & path,
|
||||
std::function<void(CreateRegularFileSink &)>) override;
|
||||
|
||||
void createSymlink(const Path & path, const std::string & target) override;
|
||||
|
||||
void preallocateContents(uint64_t size) override;
|
||||
|
||||
private:
|
||||
AutoCloseFD fd;
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -75,7 +79,7 @@ private:
|
|||
* `receiveContents` to the underlying `Sink`. For anything but a single
|
||||
* file, set `regular = true` so the caller can fail accordingly.
|
||||
*/
|
||||
struct RegularFileSink : ParseSink
|
||||
struct RegularFileSink : FileSystemObjectSink
|
||||
{
|
||||
bool regular = true;
|
||||
Sink & sink;
|
||||
|
@ -87,19 +91,14 @@ struct RegularFileSink : ParseSink
|
|||
regular = false;
|
||||
}
|
||||
|
||||
void receiveContents(std::string_view data) override
|
||||
{
|
||||
sink(data);
|
||||
}
|
||||
|
||||
void createSymlink(const Path & path, const std::string & target) override
|
||||
{
|
||||
regular = false;
|
||||
}
|
||||
|
||||
void createRegularFile(const Path & path) override { }
|
||||
void closeRegularFile() override { }
|
||||
void isExecutable() override { }
|
||||
void createRegularFile(
|
||||
const Path & path,
|
||||
std::function<void(CreateRegularFileSink &)>) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -52,24 +52,22 @@ static std::string getString(Source & source, int n)
|
|||
return v;
|
||||
}
|
||||
|
||||
|
||||
void parse(
|
||||
ParseSink & sink,
|
||||
void parseBlob(
|
||||
FileSystemObjectSink & sink,
|
||||
const Path & sinkPath,
|
||||
Source & source,
|
||||
std::function<SinkHook> hook,
|
||||
bool executable,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
xpSettings.require(Xp::GitHashing);
|
||||
|
||||
auto type = getString(source, 5);
|
||||
|
||||
if (type == "blob ") {
|
||||
sink.createRegularFile(sinkPath);
|
||||
sink.createRegularFile(sinkPath, [&](auto & crf) {
|
||||
if (executable)
|
||||
crf.isExecutable();
|
||||
|
||||
unsigned long long size = std::stoi(getStringUntil(source, 0));
|
||||
|
||||
sink.preallocateContents(size);
|
||||
crf.preallocateContents(size);
|
||||
|
||||
unsigned long long left = size;
|
||||
std::string buf;
|
||||
|
@ -79,47 +77,91 @@ void parse(
|
|||
checkInterrupt();
|
||||
buf.resize(std::min((unsigned long long)buf.capacity(), left));
|
||||
source(buf);
|
||||
sink.receiveContents(buf);
|
||||
crf(buf);
|
||||
left -= buf.size();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void parseTree(
|
||||
FileSystemObjectSink & sink,
|
||||
const Path & sinkPath,
|
||||
Source & source,
|
||||
std::function<SinkHook> hook,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
unsigned long long size = std::stoi(getStringUntil(source, 0));
|
||||
unsigned long long left = size;
|
||||
|
||||
sink.createDirectory(sinkPath);
|
||||
|
||||
while (left) {
|
||||
std::string perms = getStringUntil(source, ' ');
|
||||
left -= perms.size();
|
||||
left -= 1;
|
||||
|
||||
RawMode rawMode = std::stoi(perms, 0, 8);
|
||||
auto modeOpt = decodeMode(rawMode);
|
||||
if (!modeOpt)
|
||||
throw Error("Unknown Git permission: %o", perms);
|
||||
auto mode = std::move(*modeOpt);
|
||||
|
||||
std::string name = getStringUntil(source, '\0');
|
||||
left -= name.size();
|
||||
left -= 1;
|
||||
|
||||
std::string hashs = getString(source, 20);
|
||||
left -= 20;
|
||||
|
||||
Hash hash(HashAlgorithm::SHA1);
|
||||
std::copy(hashs.begin(), hashs.end(), hash.hash);
|
||||
|
||||
hook(name, TreeEntry {
|
||||
.mode = mode,
|
||||
.hash = hash,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
ObjectType parseObjectType(
|
||||
Source & source,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
xpSettings.require(Xp::GitHashing);
|
||||
|
||||
auto type = getString(source, 5);
|
||||
|
||||
if (type == "blob ") {
|
||||
return ObjectType::Blob;
|
||||
} else if (type == "tree ") {
|
||||
unsigned long long size = std::stoi(getStringUntil(source, 0));
|
||||
unsigned long long left = size;
|
||||
|
||||
sink.createDirectory(sinkPath);
|
||||
|
||||
while (left) {
|
||||
std::string perms = getStringUntil(source, ' ');
|
||||
left -= perms.size();
|
||||
left -= 1;
|
||||
|
||||
RawMode rawMode = std::stoi(perms, 0, 8);
|
||||
auto modeOpt = decodeMode(rawMode);
|
||||
if (!modeOpt)
|
||||
throw Error("Unknown Git permission: %o", perms);
|
||||
auto mode = std::move(*modeOpt);
|
||||
|
||||
std::string name = getStringUntil(source, '\0');
|
||||
left -= name.size();
|
||||
left -= 1;
|
||||
|
||||
std::string hashs = getString(source, 20);
|
||||
left -= 20;
|
||||
|
||||
Hash hash(HashAlgorithm::SHA1);
|
||||
std::copy(hashs.begin(), hashs.end(), hash.hash);
|
||||
|
||||
hook(name, TreeEntry {
|
||||
.mode = mode,
|
||||
.hash = hash,
|
||||
});
|
||||
|
||||
if (mode == Mode::Executable)
|
||||
sink.isExecutable();
|
||||
}
|
||||
return ObjectType::Tree;
|
||||
} else throw Error("input doesn't look like a Git object");
|
||||
}
|
||||
|
||||
void parse(
|
||||
FileSystemObjectSink & sink,
|
||||
const Path & sinkPath,
|
||||
Source & source,
|
||||
bool executable,
|
||||
std::function<SinkHook> hook,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
xpSettings.require(Xp::GitHashing);
|
||||
|
||||
auto type = parseObjectType(source, xpSettings);
|
||||
|
||||
switch (type) {
|
||||
case ObjectType::Blob:
|
||||
parseBlob(sink, sinkPath, source, executable, xpSettings);
|
||||
break;
|
||||
case ObjectType::Tree:
|
||||
parseTree(sink, sinkPath, source, hook, xpSettings);
|
||||
break;
|
||||
default:
|
||||
assert(false);
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
std::optional<Mode> convertMode(SourceAccessor::Type type)
|
||||
{
|
||||
|
@ -133,9 +175,9 @@ std::optional<Mode> convertMode(SourceAccessor::Type type)
|
|||
}
|
||||
|
||||
|
||||
void restore(ParseSink & sink, Source & source, std::function<RestoreHook> hook)
|
||||
void restore(FileSystemObjectSink & sink, Source & source, std::function<RestoreHook> hook)
|
||||
{
|
||||
parse(sink, "", source, [&](Path name, TreeEntry entry) {
|
||||
parse(sink, "", source, false, [&](Path name, TreeEntry entry) {
|
||||
auto [accessor, from] = hook(entry.hash);
|
||||
auto stat = accessor->lstat(from);
|
||||
auto gotOpt = convertMode(stat.type);
|
||||
|
@ -217,7 +259,7 @@ Mode dump(
|
|||
{
|
||||
Tree entries;
|
||||
for (auto & [name, _] : accessor.readDirectory(path)) {
|
||||
auto child = path + name;
|
||||
auto child = path / name;
|
||||
if (!filter(child.abs())) continue;
|
||||
|
||||
auto entry = hook(child);
|
||||
|
|
|
@ -13,12 +13,19 @@
|
|||
|
||||
namespace nix::git {
|
||||
|
||||
enum struct ObjectType {
|
||||
Blob,
|
||||
Tree,
|
||||
//Commit,
|
||||
//Tag,
|
||||
};
|
||||
|
||||
using RawMode = uint32_t;
|
||||
|
||||
enum struct Mode : RawMode {
|
||||
Directory = 0040000,
|
||||
Executable = 0100755,
|
||||
Regular = 0100644,
|
||||
Executable = 0100755,
|
||||
Symlink = 0120000,
|
||||
};
|
||||
|
||||
|
@ -59,9 +66,34 @@ using Tree = std::map<std::string, TreeEntry>;
|
|||
*/
|
||||
using SinkHook = void(const Path & name, TreeEntry entry);
|
||||
|
||||
void parse(
|
||||
ParseSink & sink, const Path & sinkPath,
|
||||
/**
|
||||
* Parse the "blob " or "tree " prefix.
|
||||
*
|
||||
* @throws if prefix not recognized
|
||||
*/
|
||||
ObjectType parseObjectType(
|
||||
Source & source,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
void parseBlob(
|
||||
FileSystemObjectSink & sink, const Path & sinkPath,
|
||||
Source & source,
|
||||
bool executable,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
void parseTree(
|
||||
FileSystemObjectSink & sink, const Path & sinkPath,
|
||||
Source & source,
|
||||
std::function<SinkHook> hook,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
/**
|
||||
* Helper putting the previous three `parse*` functions together.
|
||||
*/
|
||||
void parse(
|
||||
FileSystemObjectSink & sink, const Path & sinkPath,
|
||||
Source & source,
|
||||
bool executable,
|
||||
std::function<SinkHook> hook,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
|
@ -81,7 +113,7 @@ using RestoreHook = std::pair<SourceAccessor *, CanonPath>(Hash);
|
|||
/**
|
||||
* Wrapper around `parse` and `RestoreSink`
|
||||
*/
|
||||
void restore(ParseSink & sink, Source & source, std::function<RestoreHook> hook);
|
||||
void restore(FileSystemObjectSink & sink, Source & source, std::function<RestoreHook> hook);
|
||||
|
||||
/**
|
||||
* Dumps a single file to a sink
|
||||
|
|
|
@ -199,7 +199,7 @@ struct JSONLogger : Logger {
|
|||
json["level"] = ei.level;
|
||||
json["msg"] = oss.str();
|
||||
json["raw_msg"] = ei.msg.str();
|
||||
to_json(json, ei.errPos);
|
||||
to_json(json, ei.pos);
|
||||
|
||||
if (loggerSettings.showTrace.get() && !ei.traces.empty()) {
|
||||
nlohmann::json traces = nlohmann::json::array();
|
||||
|
|
|
@ -120,6 +120,17 @@ public:
|
|||
{ }
|
||||
};
|
||||
|
||||
/**
|
||||
* A variadic template that does nothing.
|
||||
*
|
||||
* Useful to call a function with each argument in a parameter pack.
|
||||
*/
|
||||
struct nop
|
||||
{
|
||||
template<typename... T> nop(T...)
|
||||
{ }
|
||||
};
|
||||
|
||||
ActivityId getCurActivity();
|
||||
void setCurActivity(const ActivityId activityId);
|
||||
|
||||
|
|
|
@ -134,36 +134,43 @@ void MemorySink::createDirectory(const Path & path)
|
|||
throw Error("file '%s' is not a directory", path);
|
||||
};
|
||||
|
||||
void MemorySink::createRegularFile(const Path & path)
|
||||
struct CreateMemoryRegularFile : CreateRegularFileSink {
|
||||
File::Regular & regularFile;
|
||||
|
||||
CreateMemoryRegularFile(File::Regular & r)
|
||||
: regularFile(r)
|
||||
{ }
|
||||
|
||||
void operator () (std::string_view data) override;
|
||||
void isExecutable() override;
|
||||
void preallocateContents(uint64_t size) override;
|
||||
};
|
||||
|
||||
void MemorySink::createRegularFile(const Path & path, std::function<void(CreateRegularFileSink &)> func)
|
||||
{
|
||||
auto * f = dst.open(CanonPath{path}, File { File::Regular {} });
|
||||
if (!f)
|
||||
throw Error("file '%s' cannot be made because some parent file is not a directory", path);
|
||||
if (!(r = std::get_if<File::Regular>(&f->raw)))
|
||||
if (auto * rp = std::get_if<File::Regular>(&f->raw)) {
|
||||
CreateMemoryRegularFile crf { *rp };
|
||||
func(crf);
|
||||
} else
|
||||
throw Error("file '%s' is not a regular file", path);
|
||||
}
|
||||
|
||||
void MemorySink::closeRegularFile()
|
||||
void CreateMemoryRegularFile::isExecutable()
|
||||
{
|
||||
r = nullptr;
|
||||
regularFile.executable = true;
|
||||
}
|
||||
|
||||
void MemorySink::isExecutable()
|
||||
void CreateMemoryRegularFile::preallocateContents(uint64_t len)
|
||||
{
|
||||
assert(r);
|
||||
r->executable = true;
|
||||
regularFile.contents.reserve(len);
|
||||
}
|
||||
|
||||
void MemorySink::preallocateContents(uint64_t len)
|
||||
void CreateMemoryRegularFile::operator () (std::string_view data)
|
||||
{
|
||||
assert(r);
|
||||
r->contents.reserve(len);
|
||||
}
|
||||
|
||||
void MemorySink::receiveContents(std::string_view data)
|
||||
{
|
||||
assert(r);
|
||||
r->contents += data;
|
||||
regularFile.contents += data;
|
||||
}
|
||||
|
||||
void MemorySink::createSymlink(const Path & path, const std::string & target)
|
||||
|
|
|
@ -75,7 +75,7 @@ struct MemorySourceAccessor : virtual SourceAccessor
|
|||
/**
|
||||
* Write to a `MemorySourceAccessor` at the given path
|
||||
*/
|
||||
struct MemorySink : ParseSink
|
||||
struct MemorySink : FileSystemObjectSink
|
||||
{
|
||||
MemorySourceAccessor & dst;
|
||||
|
||||
|
@ -83,17 +83,11 @@ struct MemorySink : ParseSink
|
|||
|
||||
void createDirectory(const Path & path) override;
|
||||
|
||||
void createRegularFile(const Path & path) override;
|
||||
void receiveContents(std::string_view data) override;
|
||||
void isExecutable() override;
|
||||
void closeRegularFile() override;
|
||||
void createRegularFile(
|
||||
const Path & path,
|
||||
std::function<void(CreateRegularFileSink &)>) override;
|
||||
|
||||
void createSymlink(const Path & path, const std::string & target) override;
|
||||
|
||||
void preallocateContents(uint64_t size) override;
|
||||
|
||||
private:
|
||||
MemorySourceAccessor::File::Regular * r;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue