mirror of
https://github.com/NixOS/nix
synced 2025-06-29 23:13:14 +02:00
Merge remote-tracking branch 'upstream/master' into indexed-store-path-outputs
This commit is contained in:
commit
e5c42bba9b
439 changed files with 24000 additions and 36411 deletions
|
@ -14,10 +14,12 @@
|
|||
#include "pathlocks.hh"
|
||||
#include "globals.hh"
|
||||
#include "serialise.hh"
|
||||
#include "build-result.hh"
|
||||
#include "store-api.hh"
|
||||
#include "derivations.hh"
|
||||
#include "local-store.hh"
|
||||
#include "legacy.hh"
|
||||
#include "experimental-features.hh"
|
||||
|
||||
using namespace nix;
|
||||
using std::cin;
|
||||
|
@ -31,7 +33,7 @@ std::string escapeUri(std::string uri)
|
|||
return uri;
|
||||
}
|
||||
|
||||
static string currentLoad;
|
||||
static std::string currentLoad;
|
||||
|
||||
static AutoCloseFD openSlotLock(const Machine & m, uint64_t slot)
|
||||
{
|
||||
|
@ -96,7 +98,7 @@ static int main_build_remote(int argc, char * * argv)
|
|||
}
|
||||
|
||||
std::optional<StorePath> drvPath;
|
||||
string storeUri;
|
||||
std::string storeUri;
|
||||
|
||||
while (true) {
|
||||
|
||||
|
@ -130,11 +132,14 @@ static int main_build_remote(int argc, char * * argv)
|
|||
for (auto & m : machines) {
|
||||
debug("considering building on remote machine '%s'", m.storeUri);
|
||||
|
||||
if (m.enabled && std::find(m.systemTypes.begin(),
|
||||
m.systemTypes.end(),
|
||||
neededSystem) != m.systemTypes.end() &&
|
||||
if (m.enabled
|
||||
&& (neededSystem == "builtin"
|
||||
|| std::find(m.systemTypes.begin(),
|
||||
m.systemTypes.end(),
|
||||
neededSystem) != m.systemTypes.end()) &&
|
||||
m.allSupported(requiredFeatures) &&
|
||||
m.mandatoryMet(requiredFeatures)) {
|
||||
m.mandatoryMet(requiredFeatures))
|
||||
{
|
||||
rightType = true;
|
||||
AutoCloseFD free;
|
||||
uint64_t load = 0;
|
||||
|
@ -179,7 +184,7 @@ static int main_build_remote(int argc, char * * argv)
|
|||
else
|
||||
{
|
||||
// build the hint template.
|
||||
string errorText =
|
||||
std::string errorText =
|
||||
"Failed to find a machine for remote build!\n"
|
||||
"derivation: %s\nrequired (system, features): (%s, %s)";
|
||||
errorText += "\n%s available machines:";
|
||||
|
@ -189,7 +194,7 @@ static int main_build_remote(int argc, char * * argv)
|
|||
errorText += "\n(%s, %s, %s, %s)";
|
||||
|
||||
// add the template values.
|
||||
string drvstr;
|
||||
std::string drvstr;
|
||||
if (drvPath.has_value())
|
||||
drvstr = drvPath->to_string();
|
||||
else
|
||||
|
@ -204,7 +209,7 @@ static int main_build_remote(int argc, char * * argv)
|
|||
|
||||
for (auto & m : machines)
|
||||
error
|
||||
% concatStringsSep<vector<string>>(", ", m.systemTypes)
|
||||
% concatStringsSep<std::vector<std::string>>(", ", m.systemTypes)
|
||||
% m.maxJobs
|
||||
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
|
||||
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
|
||||
|
@ -295,7 +300,7 @@ connected:
|
|||
|
||||
std::set<Realisation> missingRealisations;
|
||||
StorePathSet missingPaths;
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations") && !derivationHasKnownOutputPaths(drv.type())) {
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) && !derivationHasKnownOutputPaths(drv.type())) {
|
||||
for (auto & outputName : wantedOutputs) {
|
||||
auto thisOutputHash = outputHashes.at(outputName);
|
||||
auto thisOutputId = DrvOutput{ thisOutputHash, outputName };
|
||||
|
@ -327,7 +332,7 @@ connected:
|
|||
for (auto & realisation : missingRealisations) {
|
||||
// Should hold, because if the feature isn't enabled the set
|
||||
// of missing realisations should be empty
|
||||
settings.requireExperimentalFeature("ca-derivations");
|
||||
settings.requireExperimentalFeature(Xp::CaDerivations);
|
||||
store->registerDrvOutput(realisation);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
Copyright (c) 2014 Chase Geigle
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
File diff suppressed because it is too large
Load diff
|
@ -54,6 +54,36 @@ void StoreCommand::run()
|
|||
run(getStore());
|
||||
}
|
||||
|
||||
CopyCommand::CopyCommand()
|
||||
{
|
||||
addFlag({
|
||||
.longName = "from",
|
||||
.description = "URL of the source Nix store.",
|
||||
.labels = {"store-uri"},
|
||||
.handler = {&srcUri},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "to",
|
||||
.description = "URL of the destination Nix store.",
|
||||
.labels = {"store-uri"},
|
||||
.handler = {&dstUri},
|
||||
});
|
||||
}
|
||||
|
||||
ref<Store> CopyCommand::createStore()
|
||||
{
|
||||
return srcUri.empty() ? StoreCommand::createStore() : openStore(srcUri);
|
||||
}
|
||||
|
||||
ref<Store> CopyCommand::getDstStore()
|
||||
{
|
||||
if (srcUri.empty() && dstUri.empty())
|
||||
throw UsageError("you must pass '--from' and/or '--to'");
|
||||
|
||||
return dstUri.empty() ? openStore() : openStore(dstUri);
|
||||
}
|
||||
|
||||
EvalCommand::EvalCommand()
|
||||
{
|
||||
}
|
||||
|
@ -74,7 +104,15 @@ ref<Store> EvalCommand::getEvalStore()
|
|||
ref<EvalState> EvalCommand::getEvalState()
|
||||
{
|
||||
if (!evalState)
|
||||
evalState = std::make_shared<EvalState>(searchPath, getEvalStore(), getStore());
|
||||
evalState =
|
||||
#if HAVE_BOEHMGC
|
||||
std::allocate_shared<EvalState>(traceable_allocator<EvalState>(),
|
||||
searchPath, getEvalStore(), getStore())
|
||||
#else
|
||||
std::make_shared<EvalState>(
|
||||
searchPath, getEvalStore(), getStore())
|
||||
#endif
|
||||
;
|
||||
return ref<EvalState>(evalState);
|
||||
}
|
||||
|
||||
|
@ -115,7 +153,7 @@ void BuiltPathsCommand::run(ref<Store> store)
|
|||
for (auto & p : store->queryAllValidPaths())
|
||||
paths.push_back(BuiltPath::Opaque{p});
|
||||
} else {
|
||||
paths = toBuiltPaths(getEvalStore(), store, realiseMode, operateOn, installables);
|
||||
paths = Installable::toBuiltPaths(getEvalStore(), store, realiseMode, operateOn, installables);
|
||||
if (recursive) {
|
||||
// XXX: This only computes the store path closure, ignoring
|
||||
// intermediate realisations
|
||||
|
@ -203,10 +241,10 @@ void MixProfile::updateProfile(const BuiltPaths & buildables)
|
|||
|
||||
for (auto & buildable : buildables) {
|
||||
std::visit(overloaded {
|
||||
[&](BuiltPath::Opaque bo) {
|
||||
[&](const BuiltPath::Opaque & bo) {
|
||||
result.push_back(bo.path);
|
||||
},
|
||||
[&](BuiltPath::Built bfd) {
|
||||
[&](const BuiltPath::Built & bfd) {
|
||||
for (auto & output : bfd.outputs) {
|
||||
result.push_back(output.second);
|
||||
}
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
#include "common-eval-args.hh"
|
||||
#include "path.hh"
|
||||
#include "flake/lockfile.hh"
|
||||
#include "store-api.hh"
|
||||
|
||||
#include <optional>
|
||||
|
||||
|
@ -43,6 +42,19 @@ private:
|
|||
std::shared_ptr<Store> _store;
|
||||
};
|
||||
|
||||
/* A command that copies something between `--from` and `--to`
|
||||
stores. */
|
||||
struct CopyCommand : virtual StoreCommand
|
||||
{
|
||||
std::string srcUri, dstUri;
|
||||
|
||||
CopyCommand();
|
||||
|
||||
ref<Store> createStore() override;
|
||||
|
||||
ref<Store> getDstStore();
|
||||
};
|
||||
|
||||
struct EvalCommand : virtual StoreCommand, MixEvalArgs
|
||||
{
|
||||
EvalCommand();
|
||||
|
@ -69,14 +81,6 @@ struct MixFlakeOptions : virtual Args, EvalCommand
|
|||
{ return {}; }
|
||||
};
|
||||
|
||||
/* How to handle derivations in commands that operate on store paths. */
|
||||
enum class OperateOn {
|
||||
/* Operate on the output path. */
|
||||
Output,
|
||||
/* Operate on the .drv path. */
|
||||
Derivation
|
||||
};
|
||||
|
||||
struct SourceExprCommand : virtual Args, MixFlakeOptions
|
||||
{
|
||||
std::optional<Path> file;
|
||||
|
@ -100,19 +104,6 @@ struct SourceExprCommand : virtual Args, MixFlakeOptions
|
|||
void completeInstallable(std::string_view prefix);
|
||||
};
|
||||
|
||||
enum class Realise {
|
||||
/* Build the derivation. Postcondition: the
|
||||
derivation outputs exist. */
|
||||
Outputs,
|
||||
/* Don't build the derivation. Postcondition: the store derivation
|
||||
exists. */
|
||||
Derivation,
|
||||
/* Evaluate in dry-run mode. Postcondition: nothing. */
|
||||
// FIXME: currently unused, but could be revived if we can
|
||||
// evaluate derivations in-memory.
|
||||
Nothing
|
||||
};
|
||||
|
||||
/* A command that operates on a list of "installables", which can be
|
||||
store paths, attribute paths, Nix expressions, etc. */
|
||||
struct InstallablesCommand : virtual Args, SourceExprCommand
|
||||
|
@ -225,38 +216,6 @@ static RegisterCommand registerCommand2(std::vector<std::string> && name)
|
|||
return RegisterCommand(std::move(name), [](){ return make_ref<T>(); });
|
||||
}
|
||||
|
||||
BuiltPaths build(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store, Realise mode,
|
||||
const std::vector<std::shared_ptr<Installable>> & installables,
|
||||
BuildMode bMode = bmNormal);
|
||||
|
||||
std::set<StorePath> toStorePaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
OperateOn operateOn,
|
||||
const std::vector<std::shared_ptr<Installable>> & installables);
|
||||
|
||||
StorePath toStorePath(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
OperateOn operateOn,
|
||||
std::shared_ptr<Installable> installable);
|
||||
|
||||
std::set<StorePath> toDerivations(
|
||||
ref<Store> store,
|
||||
const std::vector<std::shared_ptr<Installable>> & installables,
|
||||
bool useDeriver = false);
|
||||
|
||||
BuiltPaths toBuiltPaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
OperateOn operateOn,
|
||||
const std::vector<std::shared_ptr<Installable>> & installables);
|
||||
|
||||
/* Helper function to generate args that invoke $EDITOR on
|
||||
filename:lineno. */
|
||||
Strings editorFor(const Pos & pos);
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
#include "eval-cache.hh"
|
||||
#include "url.hh"
|
||||
#include "registry.hh"
|
||||
#include "build-result.hh"
|
||||
|
||||
#include <regex>
|
||||
#include <queue>
|
||||
|
@ -97,7 +98,7 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
lockFlags.writeLockFile = false;
|
||||
lockFlags.inputOverrides.insert_or_assign(
|
||||
flake::parseInputPath(inputPath),
|
||||
parseFlakeRef(flakeRef, absPath(".")));
|
||||
parseFlakeRef(flakeRef, absPath("."), true));
|
||||
}}
|
||||
});
|
||||
|
||||
|
@ -158,7 +159,10 @@ SourceExprCommand::SourceExprCommand()
|
|||
|
||||
Strings SourceExprCommand::getDefaultFlakeAttrPaths()
|
||||
{
|
||||
return {"defaultPackage." + settings.thisSystem.get()};
|
||||
return {
|
||||
"packages." + settings.thisSystem.get() + ".default",
|
||||
"defaultPackage." + settings.thisSystem.get()
|
||||
};
|
||||
}
|
||||
|
||||
Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
|
||||
|
@ -191,18 +195,21 @@ void SourceExprCommand::completeInstallable(std::string_view prefix)
|
|||
auto sep = prefix_.rfind('.');
|
||||
std::string searchWord;
|
||||
if (sep != std::string::npos) {
|
||||
searchWord = prefix_.substr(sep, std::string::npos);
|
||||
searchWord = prefix_.substr(sep + 1, std::string::npos);
|
||||
prefix_ = prefix_.substr(0, sep);
|
||||
} else {
|
||||
searchWord = prefix_;
|
||||
prefix_ = "";
|
||||
}
|
||||
|
||||
Value &v1(*findAlongAttrPath(*state, prefix_, *autoArgs, root).first);
|
||||
state->forceValue(v1);
|
||||
auto [v, pos] = findAlongAttrPath(*state, prefix_, *autoArgs, root);
|
||||
Value &v1(*v);
|
||||
state->forceValue(v1, pos);
|
||||
Value v2;
|
||||
state->autoCallFunction(*autoArgs, v1, v2);
|
||||
|
||||
completionType = ctAttrs;
|
||||
|
||||
if (v2.type() == nAttrs) {
|
||||
for (auto & i : *v2.attrs) {
|
||||
std::string name = i.name;
|
||||
|
@ -232,7 +239,9 @@ void completeFlakeRefWithFragment(
|
|||
prefix. */
|
||||
try {
|
||||
auto hash = prefix.find('#');
|
||||
if (hash != std::string::npos) {
|
||||
if (hash == std::string::npos) {
|
||||
completeFlakeRef(evalState->store, prefix);
|
||||
} else {
|
||||
auto fragment = prefix.substr(hash + 1);
|
||||
auto flakeRefS = std::string(prefix.substr(0, hash));
|
||||
// FIXME: do tilde expansion.
|
||||
|
@ -248,6 +257,8 @@ void completeFlakeRefWithFragment(
|
|||
flake. */
|
||||
attrPathPrefixes.push_back("");
|
||||
|
||||
completionType = ctAttrs;
|
||||
|
||||
for (auto & attrPathPrefixS : attrPathPrefixes) {
|
||||
auto attrPathPrefix = parseAttrPath(*evalState, attrPathPrefixS);
|
||||
auto attrPathS = attrPathPrefixS + std::string(fragment);
|
||||
|
@ -262,9 +273,9 @@ void completeFlakeRefWithFragment(
|
|||
auto attr = root->findAlongAttrPath(attrPath);
|
||||
if (!attr) continue;
|
||||
|
||||
for (auto & attr2 : attr->getAttrs()) {
|
||||
for (auto & attr2 : (*attr)->getAttrs()) {
|
||||
if (hasPrefix(attr2, lastAttr)) {
|
||||
auto attrPath2 = attr->getAttrPath(attr2);
|
||||
auto attrPath2 = (*attr)->getAttrPath(attr2);
|
||||
/* Strip the attrpath prefix. */
|
||||
attrPath2.erase(attrPath2.begin(), attrPath2.begin() + attrPathPrefix.size());
|
||||
completions->add(flakeRefS + "#" + concatStringsSep(".", attrPath2));
|
||||
|
@ -285,12 +296,13 @@ void completeFlakeRefWithFragment(
|
|||
} catch (Error & e) {
|
||||
warn(e.msg());
|
||||
}
|
||||
|
||||
completeFlakeRef(evalState->store, prefix);
|
||||
}
|
||||
|
||||
void completeFlakeRef(ref<Store> store, std::string_view prefix)
|
||||
{
|
||||
if (!settings.isExperimentalFeatureEnabled(Xp::Flakes))
|
||||
return;
|
||||
|
||||
if (prefix == "")
|
||||
completions->add(".");
|
||||
|
||||
|
@ -338,6 +350,18 @@ Installable::getCursor(EvalState & state)
|
|||
return cursors[0];
|
||||
}
|
||||
|
||||
static StorePath getDeriver(
|
||||
ref<Store> store,
|
||||
const Installable & i,
|
||||
const StorePath & drvPath)
|
||||
{
|
||||
auto derivers = store->queryValidDerivers(drvPath);
|
||||
if (derivers.empty())
|
||||
throw Error("'%s' does not have a known deriver", i.what());
|
||||
// FIXME: use all derivers?
|
||||
return *derivers.begin();
|
||||
}
|
||||
|
||||
struct InstallableStorePath : Installable
|
||||
{
|
||||
ref<Store> store;
|
||||
|
@ -346,7 +370,7 @@ struct InstallableStorePath : Installable
|
|||
InstallableStorePath(ref<Store> store, StorePath && storePath)
|
||||
: store(store), storePath(std::move(storePath)) { }
|
||||
|
||||
std::string what() override { return store->printStorePath(storePath); }
|
||||
std::string what() const override { return store->printStorePath(storePath); }
|
||||
|
||||
DerivedPaths toDerivedPaths() override
|
||||
{
|
||||
|
@ -367,6 +391,15 @@ struct InstallableStorePath : Installable
|
|||
}
|
||||
}
|
||||
|
||||
StorePathSet toDrvPaths(ref<Store> store) override
|
||||
{
|
||||
if (storePath.isDerivation()) {
|
||||
return {storePath};
|
||||
} else {
|
||||
return {getDeriver(store, *this, storePath)};
|
||||
}
|
||||
}
|
||||
|
||||
std::optional<StorePath> getStorePath() override
|
||||
{
|
||||
return storePath;
|
||||
|
@ -382,7 +415,7 @@ struct InstallableIndexedStorePath : Installable
|
|||
: store(store), req(std::move(req))
|
||||
{ }
|
||||
|
||||
std::string what() override
|
||||
std::string what() const override
|
||||
{
|
||||
return req.to_string(*store);
|
||||
}
|
||||
|
@ -415,6 +448,14 @@ DerivedPaths InstallableValue::toDerivedPaths()
|
|||
return res;
|
||||
}
|
||||
|
||||
StorePathSet InstallableValue::toDrvPaths(ref<Store> store)
|
||||
{
|
||||
StorePathSet res;
|
||||
for (auto & drv : toDerivations())
|
||||
res.insert(drv.drvPath);
|
||||
return res;
|
||||
}
|
||||
|
||||
struct InstallableAttrPath : InstallableValue
|
||||
{
|
||||
SourceExprCommand & cmd;
|
||||
|
@ -425,12 +466,12 @@ struct InstallableAttrPath : InstallableValue
|
|||
: InstallableValue(state), cmd(cmd), v(allocRootValue(v)), attrPath(attrPath)
|
||||
{ }
|
||||
|
||||
std::string what() override { return attrPath; }
|
||||
std::string what() const override { return attrPath; }
|
||||
|
||||
std::pair<Value *, Pos> toValue(EvalState & state) override
|
||||
{
|
||||
auto [vRes, pos] = findAlongAttrPath(state, attrPath, *cmd.getAutoArgs(state), **v);
|
||||
state.forceValue(*vRes);
|
||||
state.forceValue(*vRes, pos);
|
||||
return {vRes, pos};
|
||||
}
|
||||
|
||||
|
@ -448,11 +489,10 @@ std::vector<InstallableValue::DerivationInfo> InstallableAttrPath::toDerivations
|
|||
|
||||
std::vector<DerivationInfo> res;
|
||||
for (auto & drvInfo : drvInfos) {
|
||||
res.push_back({
|
||||
state->store->parseStorePath(drvInfo.queryDrvPath()),
|
||||
state->store->maybeParseStorePath(drvInfo.queryOutPath()),
|
||||
drvInfo.queryOutputName()
|
||||
});
|
||||
auto drvPath = drvInfo.queryDrvPath();
|
||||
if (!drvPath)
|
||||
throw Error("'%s' is not a derivation", what());
|
||||
res.push_back({ *drvPath, drvInfo.queryOutputName() });
|
||||
}
|
||||
|
||||
return res;
|
||||
|
@ -480,7 +520,7 @@ Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::Locked
|
|||
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
||||
assert(aOutputs);
|
||||
|
||||
state.forceValue(*aOutputs->value);
|
||||
state.forceValue(*aOutputs->value, [&]() { return aOutputs->value->determinePos(noPos); });
|
||||
|
||||
return aOutputs->value;
|
||||
}
|
||||
|
@ -505,7 +545,7 @@ ref<eval_cache::EvalCache> openEvalCache(
|
|||
auto vFlake = state.allocValue();
|
||||
flake::callFlake(state, *lockedFlake, *vFlake);
|
||||
|
||||
state.forceAttrs(*vFlake);
|
||||
state.forceAttrs(*vFlake, noPos);
|
||||
|
||||
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
||||
assert(aOutputs);
|
||||
|
@ -528,13 +568,14 @@ InstallableFlake::InstallableFlake(
|
|||
SourceExprCommand * cmd,
|
||||
ref<EvalState> state,
|
||||
FlakeRef && flakeRef,
|
||||
Strings && attrPaths,
|
||||
Strings && prefixes,
|
||||
std::string_view fragment,
|
||||
Strings attrPaths,
|
||||
Strings prefixes,
|
||||
const flake::LockFlags & lockFlags)
|
||||
: InstallableValue(state),
|
||||
flakeRef(flakeRef),
|
||||
attrPaths(attrPaths),
|
||||
prefixes(prefixes),
|
||||
attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}),
|
||||
prefixes(fragment == "" ? Strings{} : prefixes),
|
||||
lockFlags(lockFlags)
|
||||
{
|
||||
if (cmd && cmd->getAutoArgs(*state)->size())
|
||||
|
@ -548,29 +589,37 @@ std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableF
|
|||
auto cache = openEvalCache(*state, lockedFlake);
|
||||
auto root = cache->getRoot();
|
||||
|
||||
Suggestions suggestions;
|
||||
|
||||
for (auto & attrPath : getActualAttrPaths()) {
|
||||
auto attr = root->findAlongAttrPath(
|
||||
debug("trying flake output attribute '%s'", attrPath);
|
||||
|
||||
auto attrOrSuggestions = root->findAlongAttrPath(
|
||||
parseAttrPath(*state, attrPath),
|
||||
true
|
||||
);
|
||||
|
||||
if (!attr) continue;
|
||||
if (!attrOrSuggestions) {
|
||||
suggestions += attrOrSuggestions.getSuggestions();
|
||||
continue;
|
||||
}
|
||||
|
||||
auto attr = *attrOrSuggestions;
|
||||
|
||||
if (!attr->isDerivation())
|
||||
throw Error("flake output attribute '%s' is not a derivation", attrPath);
|
||||
|
||||
auto drvPath = attr->forceDerivation();
|
||||
|
||||
auto drvInfo = DerivationInfo{
|
||||
auto drvInfo = DerivationInfo {
|
||||
std::move(drvPath),
|
||||
state->store->maybeParseStorePath(attr->getAttr(state->sOutPath)->getString()),
|
||||
attr->getAttr(state->sOutputName)->getString()
|
||||
};
|
||||
|
||||
return {attrPath, lockedFlake->flake.lockedRef, std::move(drvInfo)};
|
||||
}
|
||||
|
||||
throw Error("flake '%s' does not provide attribute %s",
|
||||
throw Error(suggestions, "flake '%s' does not provide attribute %s",
|
||||
flakeRef, showAttrPaths(getActualAttrPaths()));
|
||||
}
|
||||
|
||||
|
@ -589,17 +638,24 @@ std::pair<Value *, Pos> InstallableFlake::toValue(EvalState & state)
|
|||
|
||||
auto emptyArgs = state.allocBindings(0);
|
||||
|
||||
Suggestions suggestions;
|
||||
|
||||
for (auto & attrPath : getActualAttrPaths()) {
|
||||
try {
|
||||
auto [v, pos] = findAlongAttrPath(state, attrPath, *emptyArgs, *vOutputs);
|
||||
state.forceValue(*v);
|
||||
state.forceValue(*v, pos);
|
||||
return {v, pos};
|
||||
} catch (AttrPathNotFound & e) {
|
||||
suggestions += e.info().suggestions;
|
||||
}
|
||||
}
|
||||
|
||||
throw Error("flake '%s' does not provide attribute %s",
|
||||
flakeRef, showAttrPaths(getActualAttrPaths()));
|
||||
throw Error(
|
||||
suggestions,
|
||||
"flake '%s' does not provide attribute %s",
|
||||
flakeRef,
|
||||
showAttrPaths(getActualAttrPaths())
|
||||
);
|
||||
}
|
||||
|
||||
std::vector<std::pair<std::shared_ptr<eval_cache::AttrCursor>, std::string>>
|
||||
|
@ -614,7 +670,7 @@ InstallableFlake::getCursors(EvalState & state)
|
|||
|
||||
for (auto & attrPath : getActualAttrPaths()) {
|
||||
auto attr = root->findAlongAttrPath(parseAttrPath(state, attrPath));
|
||||
if (attr) res.push_back({attr, attrPath});
|
||||
if (attr) res.push_back({*attr, attrPath});
|
||||
}
|
||||
|
||||
return res;
|
||||
|
@ -704,7 +760,8 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
|||
this,
|
||||
getEvalState(),
|
||||
std::move(flakeRef),
|
||||
fragment == "" ? getDefaultFlakeAttrPaths() : Strings{fragment},
|
||||
fragment,
|
||||
getDefaultFlakeAttrPaths(),
|
||||
getDefaultFlakeAttrPathPrefixes(),
|
||||
lockFlags));
|
||||
continue;
|
||||
|
@ -730,13 +787,13 @@ std::shared_ptr<Installable> SourceExprCommand::parseInstallable(
|
|||
BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPaths & hopefullyBuiltPaths)
|
||||
{
|
||||
BuiltPaths res;
|
||||
for (auto & b : hopefullyBuiltPaths)
|
||||
for (const auto & b : hopefullyBuiltPaths)
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](DerivedPath::Opaque bo) {
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
res.push_back(BuiltPath::Opaque{bo.path});
|
||||
},
|
||||
[&](DerivedPath::Built bfd) {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
OutputPathMap outputs;
|
||||
auto drv = evalStore->readDerivation(bfd.drvPath);
|
||||
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
||||
|
@ -746,8 +803,7 @@ BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPa
|
|||
throw Error(
|
||||
"the derivation '%s' doesn't have an output named '%s'",
|
||||
store->printStorePath(bfd.drvPath), output);
|
||||
if (settings.isExperimentalFeatureEnabled(
|
||||
"ca-derivations")) {
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||
auto outputId =
|
||||
DrvOutput{outputHashes.at(output), output};
|
||||
auto realisation =
|
||||
|
@ -776,7 +832,7 @@ BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPa
|
|||
return res;
|
||||
}
|
||||
|
||||
BuiltPaths build(
|
||||
BuiltPaths Installable::build(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
|
@ -793,15 +849,39 @@ BuiltPaths build(
|
|||
pathsToBuild.insert(pathsToBuild.end(), b.begin(), b.end());
|
||||
}
|
||||
|
||||
if (mode == Realise::Nothing || mode == Realise::Derivation)
|
||||
switch (mode) {
|
||||
case Realise::Nothing:
|
||||
case Realise::Derivation:
|
||||
printMissing(store, pathsToBuild, lvlError);
|
||||
else if (mode == Realise::Outputs)
|
||||
store->buildPaths(pathsToBuild, bMode, evalStore);
|
||||
|
||||
return getBuiltPaths(evalStore, store, pathsToBuild);
|
||||
return getBuiltPaths(evalStore, store, pathsToBuild);
|
||||
case Realise::Outputs: {
|
||||
BuiltPaths res;
|
||||
for (auto & buildResult : store->buildPathsWithResults(pathsToBuild, bMode, evalStore)) {
|
||||
if (!buildResult.success())
|
||||
buildResult.rethrow();
|
||||
std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
std::map<std::string, StorePath> outputs;
|
||||
for (auto & path : buildResult.builtOutputs) {
|
||||
// Don't report unrequested outputs
|
||||
if (!wantOutput(path.first.outputName, bfd.outputs)) continue;
|
||||
outputs.emplace(path.first.outputName, path.second.outPath);
|
||||
}
|
||||
res.push_back(BuiltPath::Built { bfd.drvPath, outputs });
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
res.push_back(BuiltPath::Opaque { bo.path });
|
||||
},
|
||||
}, buildResult.path.raw());
|
||||
}
|
||||
return res;
|
||||
}
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
BuiltPaths toBuiltPaths(
|
||||
BuiltPaths Installable::toBuiltPaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
|
@ -809,19 +889,19 @@ BuiltPaths toBuiltPaths(
|
|||
const std::vector<std::shared_ptr<Installable>> & installables)
|
||||
{
|
||||
if (operateOn == OperateOn::Output)
|
||||
return build(evalStore, store, mode, installables);
|
||||
return Installable::build(evalStore, store, mode, installables);
|
||||
else {
|
||||
if (mode == Realise::Nothing)
|
||||
settings.readOnlyMode = true;
|
||||
|
||||
BuiltPaths res;
|
||||
for (auto & drvPath : toDerivations(store, installables, true))
|
||||
for (auto & drvPath : Installable::toDerivations(store, installables, true))
|
||||
res.push_back(BuiltPath::Opaque{drvPath});
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
StorePathSet toStorePaths(
|
||||
StorePathSet Installable::toStorePaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode, OperateOn operateOn,
|
||||
|
@ -835,7 +915,7 @@ StorePathSet toStorePaths(
|
|||
return outPaths;
|
||||
}
|
||||
|
||||
StorePath toStorePath(
|
||||
StorePath Installable::toStorePath(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode, OperateOn operateOn,
|
||||
|
@ -849,26 +929,22 @@ StorePath toStorePath(
|
|||
return *paths.begin();
|
||||
}
|
||||
|
||||
StorePathSet toDerivations(
|
||||
StorePathSet Installable::toDerivations(
|
||||
ref<Store> store,
|
||||
const std::vector<std::shared_ptr<Installable>> & installables,
|
||||
bool useDeriver)
|
||||
{
|
||||
StorePathSet drvPaths;
|
||||
|
||||
for (auto & i : installables)
|
||||
for (auto & b : i->toDerivedPaths())
|
||||
for (const auto & i : installables)
|
||||
for (const auto & b : i->toDerivedPaths())
|
||||
std::visit(overloaded {
|
||||
[&](DerivedPath::Opaque bo) {
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
if (!useDeriver)
|
||||
throw Error("argument '%s' did not evaluate to a derivation", i->what());
|
||||
auto derivers = store->queryValidDerivers(bo.path);
|
||||
if (derivers.empty())
|
||||
throw Error("'%s' does not have a known deriver", i->what());
|
||||
// FIXME: use all derivers?
|
||||
drvPaths.insert(*derivers.begin());
|
||||
drvPaths.insert(getDeriver(store, *i, bo.path));
|
||||
},
|
||||
[&](DerivedPath::Built bfd) {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
drvPaths.insert(bfd.drvPath);
|
||||
},
|
||||
}, b.raw());
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
#include "path-with-outputs.hh"
|
||||
#include "derived-path.hh"
|
||||
#include "eval.hh"
|
||||
#include "store-api.hh"
|
||||
#include "flake/flake.hh"
|
||||
|
||||
#include <optional>
|
||||
|
@ -29,14 +30,40 @@ struct UnresolvedApp
|
|||
App resolve(ref<Store> evalStore, ref<Store> store);
|
||||
};
|
||||
|
||||
enum class Realise {
|
||||
/* Build the derivation. Postcondition: the
|
||||
derivation outputs exist. */
|
||||
Outputs,
|
||||
/* Don't build the derivation. Postcondition: the store derivation
|
||||
exists. */
|
||||
Derivation,
|
||||
/* Evaluate in dry-run mode. Postcondition: nothing. */
|
||||
// FIXME: currently unused, but could be revived if we can
|
||||
// evaluate derivations in-memory.
|
||||
Nothing
|
||||
};
|
||||
|
||||
/* How to handle derivations in commands that operate on store paths. */
|
||||
enum class OperateOn {
|
||||
/* Operate on the output path. */
|
||||
Output,
|
||||
/* Operate on the .drv path. */
|
||||
Derivation
|
||||
};
|
||||
|
||||
struct Installable
|
||||
{
|
||||
virtual ~Installable() { }
|
||||
|
||||
virtual std::string what() = 0;
|
||||
virtual std::string what() const = 0;
|
||||
|
||||
virtual DerivedPaths toDerivedPaths() = 0;
|
||||
|
||||
virtual StorePathSet toDrvPaths(ref<Store> store)
|
||||
{
|
||||
throw Error("'%s' cannot be converted to a derivation path", what());
|
||||
}
|
||||
|
||||
DerivedPath toDerivedPath();
|
||||
|
||||
UnresolvedApp toApp(EvalState & state);
|
||||
|
@ -63,6 +90,39 @@ struct Installable
|
|||
{
|
||||
return FlakeRef::fromAttrs({{"type","indirect"}, {"id", "nixpkgs"}});
|
||||
}
|
||||
|
||||
static BuiltPaths build(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
const std::vector<std::shared_ptr<Installable>> & installables,
|
||||
BuildMode bMode = bmNormal);
|
||||
|
||||
static std::set<StorePath> toStorePaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
OperateOn operateOn,
|
||||
const std::vector<std::shared_ptr<Installable>> & installables);
|
||||
|
||||
static StorePath toStorePath(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
OperateOn operateOn,
|
||||
std::shared_ptr<Installable> installable);
|
||||
|
||||
static std::set<StorePath> toDerivations(
|
||||
ref<Store> store,
|
||||
const std::vector<std::shared_ptr<Installable>> & installables,
|
||||
bool useDeriver = false);
|
||||
|
||||
static BuiltPaths toBuiltPaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
Realise mode,
|
||||
OperateOn operateOn,
|
||||
const std::vector<std::shared_ptr<Installable>> & installables);
|
||||
};
|
||||
|
||||
struct InstallableValue : Installable
|
||||
|
@ -74,13 +134,14 @@ struct InstallableValue : Installable
|
|||
struct DerivationInfo
|
||||
{
|
||||
StorePath drvPath;
|
||||
std::optional<StorePath> outPath;
|
||||
std::string outputName;
|
||||
};
|
||||
|
||||
virtual std::vector<DerivationInfo> toDerivations() = 0;
|
||||
|
||||
DerivedPaths toDerivedPaths() override;
|
||||
|
||||
StorePathSet toDrvPaths(ref<Store> store) override;
|
||||
};
|
||||
|
||||
struct InstallableFlake : InstallableValue
|
||||
|
@ -95,11 +156,12 @@ struct InstallableFlake : InstallableValue
|
|||
SourceExprCommand * cmd,
|
||||
ref<EvalState> state,
|
||||
FlakeRef && flakeRef,
|
||||
Strings && attrPaths,
|
||||
Strings && prefixes,
|
||||
std::string_view fragment,
|
||||
Strings attrPaths,
|
||||
Strings prefixes,
|
||||
const flake::LockFlags & lockFlags);
|
||||
|
||||
std::string what() override { return flakeRef.to_string() + "#" + *attrPaths.begin(); }
|
||||
std::string what() const override { return flakeRef.to_string() + "#" + *attrPaths.begin(); }
|
||||
|
||||
std::vector<std::string> getActualAttrPaths();
|
||||
|
||||
|
@ -123,4 +185,9 @@ ref<eval_cache::EvalCache> openEvalCache(
|
|||
EvalState & state,
|
||||
std::shared_ptr<flake::LockedFlake> lockedFlake);
|
||||
|
||||
BuiltPaths getBuiltPaths(
|
||||
ref<Store> evalStore,
|
||||
ref<Store> store,
|
||||
const DerivedPaths & hopefullyBuiltPaths);
|
||||
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc)
|
|||
|
||||
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers
|
||||
|
||||
libcmd_LDFLAGS += -llowdown -pthread
|
||||
libcmd_LDFLAGS += $(LOWDOWN_LIBS) -pthread
|
||||
|
||||
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ namespace nix {
|
|||
static Strings parseAttrPath(std::string_view s)
|
||||
{
|
||||
Strings res;
|
||||
string cur;
|
||||
std::string cur;
|
||||
auto i = s.begin();
|
||||
while (i != s.end()) {
|
||||
if (*i == '.') {
|
||||
|
@ -41,7 +41,7 @@ std::vector<Symbol> parseAttrPath(EvalState & state, std::string_view s)
|
|||
}
|
||||
|
||||
|
||||
std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attrPath,
|
||||
std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const std::string & attrPath,
|
||||
Bindings & autoArgs, Value & vIn)
|
||||
{
|
||||
Strings tokens = parseAttrPath(attrPath);
|
||||
|
@ -58,7 +58,7 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
|
|||
Value * vNew = state.allocValue();
|
||||
state.autoCallFunction(autoArgs, *v, *vNew);
|
||||
v = vNew;
|
||||
state.forceValue(*v);
|
||||
state.forceValue(*v, noPos);
|
||||
|
||||
/* It should evaluate to either a set or an expression,
|
||||
according to what is specified in the attrPath. */
|
||||
|
@ -74,8 +74,14 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
|
|||
throw Error("empty attribute name in selection path '%1%'", attrPath);
|
||||
|
||||
Bindings::iterator a = v->attrs->find(state.symbols.create(attr));
|
||||
if (a == v->attrs->end())
|
||||
throw AttrPathNotFound("attribute '%1%' in selection path '%2%' not found", attr, attrPath);
|
||||
if (a == v->attrs->end()) {
|
||||
std::set<std::string> attrNames;
|
||||
for (auto & attr : *v->attrs)
|
||||
attrNames.insert(attr.name);
|
||||
|
||||
auto suggestions = Suggestions::bestMatches(attrNames, attr);
|
||||
throw AttrPathNotFound(suggestions, "attribute '%1%' in selection path '%2%' not found", attr, attrPath);
|
||||
}
|
||||
v = &*a->value;
|
||||
pos = *a->pos;
|
||||
}
|
||||
|
@ -121,7 +127,7 @@ Pos findPackageFilename(EvalState & state, Value & v, std::string what)
|
|||
std::string filename(pos, 0, colon);
|
||||
unsigned int lineno;
|
||||
try {
|
||||
lineno = std::stoi(std::string(pos, colon + 1));
|
||||
lineno = std::stoi(std::string(pos, colon + 1, std::string::npos));
|
||||
} catch (std::invalid_argument & e) {
|
||||
throw ParseError("cannot parse line number '%s'", pos);
|
||||
}
|
||||
|
|
|
@ -10,8 +10,11 @@ namespace nix {
|
|||
MakeError(AttrPathNotFound, Error);
|
||||
MakeError(NoPositionInfo, Error);
|
||||
|
||||
std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attrPath,
|
||||
Bindings & autoArgs, Value & vIn);
|
||||
std::pair<Value *, Pos> findAlongAttrPath(
|
||||
EvalState & state,
|
||||
const std::string & attrPath,
|
||||
Bindings & autoArgs,
|
||||
Value & vIn);
|
||||
|
||||
/* Heuristic to find the filename and lineno or a nix value. */
|
||||
Pos findPackageFilename(EvalState & state, Value & v, std::string what);
|
||||
|
|
|
@ -7,26 +7,19 @@
|
|||
namespace nix {
|
||||
|
||||
|
||||
|
||||
/* Allocate a new array of attributes for an attribute set with a specific
|
||||
capacity. The space is implicitly reserved after the Bindings
|
||||
structure. */
|
||||
Bindings * EvalState::allocBindings(size_t capacity)
|
||||
{
|
||||
if (capacity == 0)
|
||||
return &emptyBindings;
|
||||
if (capacity > std::numeric_limits<Bindings::size_t>::max())
|
||||
throw Error("attribute set of size %d is too big", capacity);
|
||||
return new (allocBytes(sizeof(Bindings) + sizeof(Attr) * capacity)) Bindings((Bindings::size_t) capacity);
|
||||
}
|
||||
|
||||
|
||||
void EvalState::mkAttrs(Value & v, size_t capacity)
|
||||
{
|
||||
if (capacity == 0) {
|
||||
v = vEmptySet;
|
||||
return;
|
||||
}
|
||||
v.mkAttrs(allocBindings(capacity));
|
||||
nrAttrsets++;
|
||||
nrAttrsInAttrsets += capacity;
|
||||
return new (allocBytes(sizeof(Bindings) + sizeof(Attr) * capacity)) Bindings((Bindings::size_t) capacity);
|
||||
}
|
||||
|
||||
|
||||
|
@ -41,15 +34,36 @@ Value * EvalState::allocAttr(Value & vAttrs, const Symbol & name)
|
|||
}
|
||||
|
||||
|
||||
Value * EvalState::allocAttr(Value & vAttrs, const std::string & name)
|
||||
Value * EvalState::allocAttr(Value & vAttrs, std::string_view name)
|
||||
{
|
||||
return allocAttr(vAttrs, symbols.create(name));
|
||||
}
|
||||
|
||||
|
||||
Value & BindingsBuilder::alloc(const Symbol & name, ptr<Pos> pos)
|
||||
{
|
||||
auto value = state.allocValue();
|
||||
bindings->push_back(Attr(name, value, pos));
|
||||
return *value;
|
||||
}
|
||||
|
||||
|
||||
Value & BindingsBuilder::alloc(std::string_view name, ptr<Pos> pos)
|
||||
{
|
||||
return alloc(state.symbols.create(name), pos);
|
||||
}
|
||||
|
||||
|
||||
void Bindings::sort()
|
||||
{
|
||||
std::sort(begin(), end());
|
||||
if (size_) std::sort(begin(), end());
|
||||
}
|
||||
|
||||
|
||||
Value & Value::mkAttrs(BindingsBuilder & bindings)
|
||||
{
|
||||
mkAttrs(bindings.finish());
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -105,7 +105,7 @@ public:
|
|||
for (size_t n = 0; n < size_; n++)
|
||||
res.emplace_back(&attrs[n]);
|
||||
std::sort(res.begin(), res.end(), [](const Attr * a, const Attr * b) {
|
||||
return (const string &) a->name < (const string &) b->name;
|
||||
return (const std::string &) a->name < (const std::string &) b->name;
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
@ -113,5 +113,52 @@ public:
|
|||
friend class EvalState;
|
||||
};
|
||||
|
||||
/* A wrapper around Bindings that ensures that its always in sorted
|
||||
order at the end. The only way to consume a BindingsBuilder is to
|
||||
call finish(), which sorts the bindings. */
|
||||
class BindingsBuilder
|
||||
{
|
||||
Bindings * bindings;
|
||||
|
||||
public:
|
||||
// needed by std::back_inserter
|
||||
using value_type = Attr;
|
||||
|
||||
EvalState & state;
|
||||
|
||||
BindingsBuilder(EvalState & state, Bindings * bindings)
|
||||
: bindings(bindings), state(state)
|
||||
{ }
|
||||
|
||||
void insert(Symbol name, Value * value, ptr<Pos> pos = ptr(&noPos))
|
||||
{
|
||||
insert(Attr(name, value, pos));
|
||||
}
|
||||
|
||||
void insert(const Attr & attr)
|
||||
{
|
||||
push_back(attr);
|
||||
}
|
||||
|
||||
void push_back(const Attr & attr)
|
||||
{
|
||||
bindings->push_back(attr);
|
||||
}
|
||||
|
||||
Value & alloc(const Symbol & name, ptr<Pos> pos = ptr(&noPos));
|
||||
|
||||
Value & alloc(std::string_view name, ptr<Pos> pos = ptr(&noPos));
|
||||
|
||||
Bindings * finish()
|
||||
{
|
||||
bindings->sort();
|
||||
return bindings;
|
||||
}
|
||||
|
||||
Bindings * alreadySorted()
|
||||
{
|
||||
return bindings;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -73,30 +73,29 @@ MixEvalArgs::MixEvalArgs()
|
|||
|
||||
Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||
{
|
||||
Bindings * res = state.allocBindings(autoArgs.size());
|
||||
auto res = state.buildBindings(autoArgs.size());
|
||||
for (auto & i : autoArgs) {
|
||||
Value * v = state.allocValue();
|
||||
auto v = state.allocValue();
|
||||
if (i.second[0] == 'E')
|
||||
state.mkThunk_(*v, state.parseExprFromString(string(i.second, 1), absPath(".")));
|
||||
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), absPath(".")));
|
||||
else
|
||||
mkString(*v, string(i.second, 1));
|
||||
res->push_back(Attr(state.symbols.create(i.first), v));
|
||||
v->mkString(((std::string_view) i.second).substr(1));
|
||||
res.insert(state.symbols.create(i.first), v);
|
||||
}
|
||||
res->sort();
|
||||
return res;
|
||||
return res.finish();
|
||||
}
|
||||
|
||||
Path lookupFileArg(EvalState & state, string s)
|
||||
Path lookupFileArg(EvalState & state, std::string_view s)
|
||||
{
|
||||
if (isUri(s)) {
|
||||
return state.store->toRealPath(
|
||||
fetchers::downloadTarball(
|
||||
state.store, resolveUri(s), "source", false).first.storePath);
|
||||
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
|
||||
Path p = s.substr(1, s.size() - 2);
|
||||
Path p(s.substr(1, s.size() - 2));
|
||||
return state.findFile(p);
|
||||
} else
|
||||
return absPath(s);
|
||||
return absPath(std::string(s));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,6 +22,6 @@ private:
|
|||
std::map<std::string, std::string> autoArgs;
|
||||
};
|
||||
|
||||
Path lookupFileArg(EvalState & state, string s);
|
||||
Path lookupFileArg(EvalState & state, std::string_view s);
|
||||
|
||||
}
|
||||
|
|
|
@ -336,7 +336,7 @@ Value & AttrCursor::getValue()
|
|||
if (!_value) {
|
||||
if (parent) {
|
||||
auto & vParent = parent->first->getValue();
|
||||
root->state.forceAttrs(vParent);
|
||||
root->state.forceAttrs(vParent, noPos);
|
||||
auto attr = vParent.attrs->get(parent->second);
|
||||
if (!attr)
|
||||
throw Error("attribute '%s' is unexpectedly missing", getAttrPathStr());
|
||||
|
@ -381,7 +381,7 @@ Value & AttrCursor::forceValue()
|
|||
auto & v = getValue();
|
||||
|
||||
try {
|
||||
root->state.forceValue(v);
|
||||
root->state.forceValue(v, noPos);
|
||||
} catch (EvalError &) {
|
||||
debug("setting '%s' to failed", getAttrPathStr());
|
||||
if (root->db)
|
||||
|
@ -406,6 +406,16 @@ Value & AttrCursor::forceValue()
|
|||
return v;
|
||||
}
|
||||
|
||||
Suggestions AttrCursor::getSuggestionsForAttr(Symbol name)
|
||||
{
|
||||
auto attrNames = getAttrs();
|
||||
std::set<std::string> strAttrNames;
|
||||
for (auto & name : attrNames)
|
||||
strAttrNames.insert(std::string(name));
|
||||
|
||||
return Suggestions::bestMatches(strAttrNames, name);
|
||||
}
|
||||
|
||||
std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErrors)
|
||||
{
|
||||
if (root->db) {
|
||||
|
@ -446,6 +456,11 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
|
|||
return nullptr;
|
||||
//throw TypeError("'%s' is not an attribute set", getAttrPathStr());
|
||||
|
||||
for (auto & attr : *v.attrs) {
|
||||
if (root->db)
|
||||
root->db->setPlaceholder({cachedValue->first, attr.name});
|
||||
}
|
||||
|
||||
auto attr = v.attrs->get(name);
|
||||
|
||||
if (!attr) {
|
||||
|
@ -464,7 +479,7 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(Symbol name, bool forceErro
|
|||
cachedValue2 = {root->db->setPlaceholder({cachedValue->first, name}), placeholder_t()};
|
||||
}
|
||||
|
||||
return std::make_shared<AttrCursor>(
|
||||
return make_ref<AttrCursor>(
|
||||
root, std::make_pair(shared_from_this(), name), attr->value, std::move(cachedValue2));
|
||||
}
|
||||
|
||||
|
@ -473,27 +488,31 @@ std::shared_ptr<AttrCursor> AttrCursor::maybeGetAttr(std::string_view name)
|
|||
return maybeGetAttr(root->state.symbols.create(name));
|
||||
}
|
||||
|
||||
std::shared_ptr<AttrCursor> AttrCursor::getAttr(Symbol name, bool forceErrors)
|
||||
ref<AttrCursor> AttrCursor::getAttr(Symbol name, bool forceErrors)
|
||||
{
|
||||
auto p = maybeGetAttr(name, forceErrors);
|
||||
if (!p)
|
||||
throw Error("attribute '%s' does not exist", getAttrPathStr(name));
|
||||
return p;
|
||||
return ref(p);
|
||||
}
|
||||
|
||||
std::shared_ptr<AttrCursor> AttrCursor::getAttr(std::string_view name)
|
||||
ref<AttrCursor> AttrCursor::getAttr(std::string_view name)
|
||||
{
|
||||
return getAttr(root->state.symbols.create(name));
|
||||
}
|
||||
|
||||
std::shared_ptr<AttrCursor> AttrCursor::findAlongAttrPath(const std::vector<Symbol> & attrPath, bool force)
|
||||
OrSuggestions<ref<AttrCursor>> AttrCursor::findAlongAttrPath(const std::vector<Symbol> & attrPath, bool force)
|
||||
{
|
||||
auto res = shared_from_this();
|
||||
for (auto & attr : attrPath) {
|
||||
res = res->maybeGetAttr(attr, force);
|
||||
if (!res) return {};
|
||||
auto child = res->maybeGetAttr(attr, force);
|
||||
if (!child) {
|
||||
auto suggestions = res->getSuggestionsForAttr(attr);
|
||||
return OrSuggestions<ref<AttrCursor>>::failed(suggestions);
|
||||
}
|
||||
res = child;
|
||||
}
|
||||
return res;
|
||||
return ref(res);
|
||||
}
|
||||
|
||||
std::string AttrCursor::getString()
|
||||
|
@ -596,7 +615,7 @@ std::vector<Symbol> AttrCursor::getAttrs()
|
|||
for (auto & attr : *getValue().attrs)
|
||||
attrs.push_back(attr.name);
|
||||
std::sort(attrs.begin(), attrs.end(), [](const Symbol & a, const Symbol & b) {
|
||||
return (const string &) a < (const string &) b;
|
||||
return (const std::string &) a < (const std::string &) b;
|
||||
});
|
||||
|
||||
if (root->db)
|
||||
|
|
|
@ -94,15 +94,17 @@ public:
|
|||
|
||||
std::string getAttrPathStr(Symbol name) const;
|
||||
|
||||
Suggestions getSuggestionsForAttr(Symbol name);
|
||||
|
||||
std::shared_ptr<AttrCursor> maybeGetAttr(Symbol name, bool forceErrors = false);
|
||||
|
||||
std::shared_ptr<AttrCursor> maybeGetAttr(std::string_view name);
|
||||
|
||||
std::shared_ptr<AttrCursor> getAttr(Symbol name, bool forceErrors = false);
|
||||
ref<AttrCursor> getAttr(Symbol name, bool forceErrors = false);
|
||||
|
||||
std::shared_ptr<AttrCursor> getAttr(std::string_view name);
|
||||
ref<AttrCursor> getAttr(std::string_view name);
|
||||
|
||||
std::shared_ptr<AttrCursor> findAlongAttrPath(const std::vector<Symbol> & attrPath, bool force = false);
|
||||
OrSuggestions<ref<AttrCursor>> findAlongAttrPath(const std::vector<Symbol> & attrPath, bool force = false);
|
||||
|
||||
std::string getString();
|
||||
|
||||
|
|
|
@ -15,12 +15,6 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s))
|
|||
});
|
||||
}
|
||||
|
||||
LocalNoInlineNoReturn(void throwTypeError(const char * s, const Value & v))
|
||||
{
|
||||
throw TypeError(s, showType(v));
|
||||
}
|
||||
|
||||
|
||||
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const Value & v))
|
||||
{
|
||||
throw TypeError({
|
||||
|
@ -31,6 +25,13 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const
|
|||
|
||||
|
||||
void EvalState::forceValue(Value & v, const Pos & pos)
|
||||
{
|
||||
forceValue(v, [&]() { return pos; });
|
||||
}
|
||||
|
||||
|
||||
template<typename Callable>
|
||||
void EvalState::forceValue(Value & v, Callable getPos)
|
||||
{
|
||||
if (v.isThunk()) {
|
||||
Env * env = v.thunk.env;
|
||||
|
@ -47,31 +48,22 @@ void EvalState::forceValue(Value & v, const Pos & pos)
|
|||
else if (v.isApp())
|
||||
callFunction(*v.app.left, *v.app.right, v, noPos);
|
||||
else if (v.isBlackhole())
|
||||
throwEvalError(pos, "infinite recursion encountered");
|
||||
}
|
||||
|
||||
|
||||
inline void EvalState::forceAttrs(Value & v)
|
||||
{
|
||||
forceValue(v);
|
||||
if (v.type() != nAttrs)
|
||||
throwTypeError("value is %1% while a set was expected", v);
|
||||
throwEvalError(getPos(), "infinite recursion encountered");
|
||||
}
|
||||
|
||||
|
||||
inline void EvalState::forceAttrs(Value & v, const Pos & pos)
|
||||
{
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nAttrs)
|
||||
throwTypeError(pos, "value is %1% while a set was expected", v);
|
||||
forceAttrs(v, [&]() { return pos; });
|
||||
}
|
||||
|
||||
|
||||
inline void EvalState::forceList(Value & v)
|
||||
template <typename Callable>
|
||||
inline void EvalState::forceAttrs(Value & v, Callable getPos)
|
||||
{
|
||||
forceValue(v);
|
||||
if (!v.isList())
|
||||
throwTypeError("value is %1% while a list was expected", v);
|
||||
forceValue(v, getPos);
|
||||
if (v.type() != nAttrs)
|
||||
throwTypeError(getPos(), "value is %1% while a set was expected", v);
|
||||
}
|
||||
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,10 +1,12 @@
|
|||
#pragma once
|
||||
|
||||
#include "attr-set.hh"
|
||||
#include "types.hh"
|
||||
#include "value.hh"
|
||||
#include "nixexpr.hh"
|
||||
#include "symbol-table.hh"
|
||||
#include "config.hh"
|
||||
#include "experimental-features.hh"
|
||||
|
||||
#include <map>
|
||||
#include <optional>
|
||||
|
@ -43,8 +45,6 @@ struct Env
|
|||
};
|
||||
|
||||
|
||||
Value & mkString(Value & v, std::string_view s, const PathSet & context = PathSet());
|
||||
|
||||
void copyContext(const Value & v, PathSet & context);
|
||||
|
||||
|
||||
|
@ -81,7 +81,8 @@ public:
|
|||
sContentAddressed,
|
||||
sOutputHash, sOutputHashAlgo, sOutputHashMode,
|
||||
sRecurseForDerivations,
|
||||
sDescription, sSelf, sEpsilon;
|
||||
sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath,
|
||||
sPrefix;
|
||||
Symbol sDerivationNix;
|
||||
|
||||
/* If set, force copying files to the Nix store even if they
|
||||
|
@ -92,7 +93,7 @@ public:
|
|||
mode. */
|
||||
std::optional<PathSet> allowedPaths;
|
||||
|
||||
Value vEmptySet;
|
||||
Bindings emptyBindings;
|
||||
|
||||
/* Store used to materialise .drv files. */
|
||||
const ref<Store> store;
|
||||
|
@ -132,6 +133,9 @@ private:
|
|||
/* Cache used by prim_match(). */
|
||||
std::shared_ptr<RegexCache> regexCache;
|
||||
|
||||
/* Allocation cache for GC'd Value objects. */
|
||||
std::shared_ptr<void *> valueAllocCache;
|
||||
|
||||
public:
|
||||
|
||||
EvalState(
|
||||
|
@ -141,15 +145,27 @@ public:
|
|||
~EvalState();
|
||||
|
||||
void requireExperimentalFeatureOnEvaluation(
|
||||
const std::string & feature,
|
||||
const ExperimentalFeature &,
|
||||
const std::string_view fName,
|
||||
const Pos & pos
|
||||
);
|
||||
|
||||
void addToSearchPath(const string & s);
|
||||
void addToSearchPath(const std::string & s);
|
||||
|
||||
SearchPath getSearchPath() { return searchPath; }
|
||||
|
||||
/* Allow access to a path. */
|
||||
void allowPath(const Path & path);
|
||||
|
||||
/* Allow access to a store path. Note that this gets remapped to
|
||||
the real store path if `store` is a chroot store. */
|
||||
void allowPath(const StorePath & storePath);
|
||||
|
||||
/* Allow access to a store path and return it as a string. */
|
||||
void allowAndSetStorePathString(const StorePath & storePath, Value & v);
|
||||
|
||||
/* Check whether access to a path is allowed and throw an error if
|
||||
not. Otherwise return the canonicalised path. */
|
||||
Path checkSourcePath(const Path & path);
|
||||
|
||||
void checkURI(const std::string & uri);
|
||||
|
@ -168,8 +184,8 @@ public:
|
|||
Expr * parseExprFromFile(const Path & path, StaticEnv & staticEnv);
|
||||
|
||||
/* Parse a Nix expression from the specified string. */
|
||||
Expr * parseExprFromString(std::string_view s, const Path & basePath, StaticEnv & staticEnv);
|
||||
Expr * parseExprFromString(std::string_view s, const Path & basePath);
|
||||
Expr * parseExprFromString(std::string s, const Path & basePath, StaticEnv & staticEnv);
|
||||
Expr * parseExprFromString(std::string s, const Path & basePath);
|
||||
|
||||
Expr * parseStdin();
|
||||
|
||||
|
@ -189,8 +205,8 @@ public:
|
|||
void resetFileCache();
|
||||
|
||||
/* Look up a file in the search path. */
|
||||
Path findFile(const string & path);
|
||||
Path findFile(SearchPath & searchPath, const string & path, const Pos & pos = noPos);
|
||||
Path findFile(const std::string_view path);
|
||||
Path findFile(SearchPath & searchPath, const std::string_view path, const Pos & pos = noPos);
|
||||
|
||||
/* If the specified search path element is a URI, download it. */
|
||||
std::pair<bool, std::string> resolveSearchPathElem(const SearchPathElem & elem);
|
||||
|
@ -209,7 +225,10 @@ public:
|
|||
of the evaluation of the thunk. If `v' is a delayed function
|
||||
application, call the function and overwrite `v' with the
|
||||
result. Otherwise, this is a no-op. */
|
||||
inline void forceValue(Value & v, const Pos & pos = noPos);
|
||||
inline void forceValue(Value & v, const Pos & pos);
|
||||
|
||||
template <typename Callable>
|
||||
inline void forceValue(Value & v, Callable getPos);
|
||||
|
||||
/* Force a value, then recursively force list elements and
|
||||
attributes. */
|
||||
|
@ -219,37 +238,43 @@ public:
|
|||
NixInt forceInt(Value & v, const Pos & pos);
|
||||
NixFloat forceFloat(Value & v, const Pos & pos);
|
||||
bool forceBool(Value & v, const Pos & pos);
|
||||
inline void forceAttrs(Value & v);
|
||||
inline void forceAttrs(Value & v, const Pos & pos);
|
||||
inline void forceList(Value & v);
|
||||
|
||||
void forceAttrs(Value & v, const Pos & pos);
|
||||
|
||||
template <typename Callable>
|
||||
inline void forceAttrs(Value & v, Callable getPos);
|
||||
|
||||
inline void forceList(Value & v, const Pos & pos);
|
||||
void forceFunction(Value & v, const Pos & pos); // either lambda or primop
|
||||
string forceString(Value & v, const Pos & pos = noPos);
|
||||
string forceString(Value & v, PathSet & context, const Pos & pos = noPos);
|
||||
string forceStringNoCtx(Value & v, const Pos & pos = noPos);
|
||||
std::string_view forceString(Value & v, const Pos & pos = noPos);
|
||||
std::string_view forceString(Value & v, PathSet & context, const Pos & pos = noPos);
|
||||
std::string_view forceStringNoCtx(Value & v, const Pos & pos = noPos);
|
||||
|
||||
/* Return true iff the value `v' denotes a derivation (i.e. a
|
||||
set with attribute `type = "derivation"'). */
|
||||
bool isDerivation(Value & v);
|
||||
|
||||
std::optional<string> tryAttrsToString(const Pos & pos, Value & v,
|
||||
std::optional<std::string> tryAttrsToString(const Pos & pos, Value & v,
|
||||
PathSet & context, bool coerceMore = false, bool copyToStore = true);
|
||||
|
||||
/* String coercion. Converts strings, paths and derivations to a
|
||||
string. If `coerceMore' is set, also converts nulls, integers,
|
||||
booleans and lists to a string. If `copyToStore' is set,
|
||||
referenced paths are copied to the Nix store as a side effect. */
|
||||
string coerceToString(const Pos & pos, Value & v, PathSet & context,
|
||||
BackedStringView coerceToString(const Pos & pos, Value & v, PathSet & context,
|
||||
bool coerceMore = false, bool copyToStore = true,
|
||||
bool canonicalizePath = true);
|
||||
|
||||
string copyPathToStore(PathSet & context, const Path & path);
|
||||
std::string copyPathToStore(PathSet & context, const Path & path);
|
||||
|
||||
/* Path coercion. Converts strings, paths and derivations to a
|
||||
path. The result is guaranteed to be a canonicalised, absolute
|
||||
path. Nothing is copied to the store. */
|
||||
Path coerceToPath(const Pos & pos, Value & v, PathSet & context);
|
||||
|
||||
/* Like coerceToPath, but the result must be a store path. */
|
||||
StorePath coerceToStorePath(const Pos & pos, Value & v, PathSet & context);
|
||||
|
||||
public:
|
||||
|
||||
/* The base environment, containing the builtin functions and
|
||||
|
@ -265,16 +290,18 @@ private:
|
|||
|
||||
void createBaseEnv();
|
||||
|
||||
Value * addConstant(const string & name, Value & v);
|
||||
Value * addConstant(const std::string & name, Value & v);
|
||||
|
||||
Value * addPrimOp(const string & name,
|
||||
void addConstant(const std::string & name, Value * v);
|
||||
|
||||
Value * addPrimOp(const std::string & name,
|
||||
size_t arity, PrimOpFun primOp);
|
||||
|
||||
Value * addPrimOp(PrimOp && primOp);
|
||||
|
||||
public:
|
||||
|
||||
Value & getBuiltin(const string & name);
|
||||
Value & getBuiltin(const std::string & name);
|
||||
|
||||
struct Doc
|
||||
{
|
||||
|
@ -295,8 +322,8 @@ private:
|
|||
friend struct ExprAttrs;
|
||||
friend struct ExprLet;
|
||||
|
||||
Expr * parse(const char * text, FileOrigin origin, const Path & path,
|
||||
const Path & basePath, StaticEnv & staticEnv);
|
||||
Expr * parse(char * text, size_t length, FileOrigin origin, const PathView path,
|
||||
const PathView basePath, StaticEnv & staticEnv);
|
||||
|
||||
public:
|
||||
|
||||
|
@ -306,8 +333,14 @@ public:
|
|||
|
||||
bool isFunctor(Value & fun);
|
||||
|
||||
void callFunction(Value & fun, Value & arg, Value & v, const Pos & pos);
|
||||
void callPrimOp(Value & fun, Value & arg, Value & v, const Pos & pos);
|
||||
// FIXME: use std::span
|
||||
void callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const Pos & pos);
|
||||
|
||||
void callFunction(Value & fun, Value & arg, Value & vRes, const Pos & pos)
|
||||
{
|
||||
Value * args[] = {&arg};
|
||||
callFunction(fun, 1, args, vRes, pos);
|
||||
}
|
||||
|
||||
/* Automatically call a function for which each argument has a
|
||||
default value or has a binding in the `args' map. */
|
||||
|
@ -318,12 +351,16 @@ public:
|
|||
Env & allocEnv(size_t size);
|
||||
|
||||
Value * allocAttr(Value & vAttrs, const Symbol & name);
|
||||
Value * allocAttr(Value & vAttrs, const std::string & name);
|
||||
Value * allocAttr(Value & vAttrs, std::string_view name);
|
||||
|
||||
Bindings * allocBindings(size_t capacity);
|
||||
|
||||
BindingsBuilder buildBindings(size_t capacity)
|
||||
{
|
||||
return BindingsBuilder(*this, allocBindings(capacity));
|
||||
}
|
||||
|
||||
void mkList(Value & v, size_t length);
|
||||
void mkAttrs(Value & v, size_t capacity);
|
||||
void mkThunk_(Value & v, Expr * expr);
|
||||
void mkPos(Value & v, ptr<Pos> pos);
|
||||
|
||||
|
@ -332,7 +369,10 @@ public:
|
|||
/* Print statistics. */
|
||||
void printStats();
|
||||
|
||||
void realiseContext(const PathSet & context);
|
||||
/* Realise the given context, and return a mapping from the placeholders
|
||||
* used to construct the associated value to their final store path
|
||||
*/
|
||||
[[nodiscard]] StringMap realiseContext(const PathSet & context);
|
||||
|
||||
private:
|
||||
|
||||
|
@ -373,16 +413,19 @@ private:
|
|||
friend struct ExprSelect;
|
||||
friend void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v);
|
||||
friend void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v);
|
||||
friend void prim_split(EvalState & state, const Pos & pos, Value * * args, Value & v);
|
||||
|
||||
friend struct Value;
|
||||
};
|
||||
|
||||
|
||||
/* Return a string representing the type of the value `v'. */
|
||||
string showType(ValueType type);
|
||||
string showType(const Value & v);
|
||||
std::string_view showType(ValueType type);
|
||||
std::string showType(const Value & v);
|
||||
|
||||
/* Decode a context string ‘!<name>!<path>’ into a pair <path,
|
||||
name>. */
|
||||
std::pair<string, string> decodeContext(std::string_view s);
|
||||
std::pair<std::string, std::string> decodeContext(std::string_view s);
|
||||
|
||||
/* If `path' refers to a directory, then append "/default.nix". */
|
||||
Path resolveExprPath(Path path);
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
#include "flake.hh"
|
||||
#include "globals.hh"
|
||||
#include "fetch-settings.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
@ -37,11 +39,11 @@ void ConfigFile::apply()
|
|||
|
||||
// FIXME: Move into libutil/config.cc.
|
||||
std::string valueS;
|
||||
if (auto s = std::get_if<std::string>(&value))
|
||||
if (auto* s = std::get_if<std::string>(&value))
|
||||
valueS = *s;
|
||||
else if (auto n = std::get_if<int64_t>(&value))
|
||||
valueS = fmt("%d", n);
|
||||
else if (auto b = std::get_if<Explicit<bool>>(&value))
|
||||
else if (auto* n = std::get_if<int64_t>(&value))
|
||||
valueS = fmt("%d", *n);
|
||||
else if (auto* b = std::get_if<Explicit<bool>>(&value))
|
||||
valueS = b->t ? "true" : "false";
|
||||
else if (auto ss = std::get_if<std::vector<std::string>>(&value))
|
||||
valueS = concatStringsSep(" ", *ss); // FIXME: evil
|
||||
|
@ -52,21 +54,19 @@ void ConfigFile::apply()
|
|||
auto trustedList = readTrustedList();
|
||||
|
||||
bool trusted = false;
|
||||
|
||||
if (auto saved = get(get(trustedList, name).value_or(std::map<std::string, bool>()), valueS)) {
|
||||
if (nix::fetchSettings.acceptFlakeConfig){
|
||||
trusted = true;
|
||||
} else if (auto saved = get(get(trustedList, name).value_or(std::map<std::string, bool>()), valueS)) {
|
||||
trusted = *saved;
|
||||
warn("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name,valueS);
|
||||
} else {
|
||||
// FIXME: filter ANSI escapes, newlines, \r, etc.
|
||||
if (std::tolower(logger->ask(fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED "%s" ANSI_NORMAL "' (y/N)?", name, valueS)).value_or('n')) != 'y') {
|
||||
if (std::tolower(logger->ask("do you want to permanently mark this value as untrusted (y/N)?").value_or('n')) == 'y') {
|
||||
trustedList[name][valueS] = false;
|
||||
writeTrustedList(trustedList);
|
||||
}
|
||||
} else {
|
||||
if (std::tolower(logger->ask("do you want to permanently mark this value as trusted (y/N)?").value_or('n')) == 'y') {
|
||||
trustedList[name][valueS] = trusted = true;
|
||||
writeTrustedList(trustedList);
|
||||
}
|
||||
if (std::tolower(logger->ask(fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED "%s" ANSI_NORMAL "' (y/N)?", name, valueS)).value_or('n')) == 'y') {
|
||||
trusted = true;
|
||||
}
|
||||
if (std::tolower(logger->ask(fmt("do you want to permanently mark this value as %s (y/N)?", trusted ? "trusted": "untrusted" )).value_or('n')) == 'y') {
|
||||
trustedList[name][valueS] = trusted;
|
||||
writeTrustedList(trustedList);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
#include "store-api.hh"
|
||||
#include "fetchers.hh"
|
||||
#include "finally.hh"
|
||||
#include "fetch-settings.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -64,8 +65,7 @@ static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
|
|||
debug("got tree '%s' from '%s'",
|
||||
state.store->printStorePath(tree.storePath), lockedRef);
|
||||
|
||||
if (state.allowedPaths)
|
||||
state.allowedPaths->insert(tree.actualPath);
|
||||
state.allowPath(tree.storePath);
|
||||
|
||||
assert(!originalRef.input.getNarHash() || tree.storePath == originalRef.input.computeStorePath(*state.store));
|
||||
|
||||
|
@ -90,11 +90,11 @@ static void expectType(EvalState & state, ValueType type,
|
|||
|
||||
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
||||
EvalState & state, Value * value, const Pos & pos,
|
||||
const std::optional<Path> & baseDir);
|
||||
const std::optional<Path> & baseDir, InputPath lockRootPath);
|
||||
|
||||
static FlakeInput parseFlakeInput(EvalState & state,
|
||||
const std::string & inputName, Value * value, const Pos & pos,
|
||||
const std::optional<Path> & baseDir)
|
||||
const std::optional<Path> & baseDir, InputPath lockRootPath)
|
||||
{
|
||||
expectType(state, nAttrs, *value, pos);
|
||||
|
||||
|
@ -118,10 +118,12 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
expectType(state, nBool, *attr.value, *attr.pos);
|
||||
input.isFlake = attr.value->boolean;
|
||||
} else if (attr.name == sInputs) {
|
||||
input.overrides = parseFlakeInputs(state, attr.value, *attr.pos, baseDir);
|
||||
input.overrides = parseFlakeInputs(state, attr.value, *attr.pos, baseDir, lockRootPath);
|
||||
} else if (attr.name == sFollows) {
|
||||
expectType(state, nString, *attr.value, *attr.pos);
|
||||
input.follows = parseInputPath(attr.value->string.s);
|
||||
auto follows(parseInputPath(attr.value->string.s));
|
||||
follows.insert(follows.begin(), lockRootPath.begin(), lockRootPath.end());
|
||||
input.follows = follows;
|
||||
} else {
|
||||
switch (attr.value->type()) {
|
||||
case nString:
|
||||
|
@ -156,7 +158,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
if (!attrs.empty())
|
||||
throw Error("unexpected flake input attribute '%s', at %s", attrs.begin()->first, pos);
|
||||
if (url)
|
||||
input.ref = parseFlakeRef(*url, baseDir, true);
|
||||
input.ref = parseFlakeRef(*url, baseDir, true, input.isFlake);
|
||||
}
|
||||
|
||||
if (!input.follows && !input.ref)
|
||||
|
@ -167,7 +169,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
|
||||
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
||||
EvalState & state, Value * value, const Pos & pos,
|
||||
const std::optional<Path> & baseDir)
|
||||
const std::optional<Path> & baseDir, InputPath lockRootPath)
|
||||
{
|
||||
std::map<FlakeId, FlakeInput> inputs;
|
||||
|
||||
|
@ -179,7 +181,8 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
|||
inputAttr.name,
|
||||
inputAttr.value,
|
||||
*inputAttr.pos,
|
||||
baseDir));
|
||||
baseDir,
|
||||
lockRootPath));
|
||||
}
|
||||
|
||||
return inputs;
|
||||
|
@ -189,14 +192,15 @@ static Flake getFlake(
|
|||
EvalState & state,
|
||||
const FlakeRef & originalRef,
|
||||
bool allowLookup,
|
||||
FlakeCache & flakeCache)
|
||||
FlakeCache & flakeCache,
|
||||
InputPath lockRootPath)
|
||||
{
|
||||
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||
state, originalRef, allowLookup, flakeCache);
|
||||
|
||||
// Guard against symlink attacks.
|
||||
auto flakeDir = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir);
|
||||
auto flakeFile = canonPath(flakeDir + "/flake.nix");
|
||||
auto flakeDir = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir, true);
|
||||
auto flakeFile = canonPath(flakeDir + "/flake.nix", true);
|
||||
if (!isInDir(flakeFile, sourceInfo.actualPath))
|
||||
throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
|
||||
lockedRef, state.store->printStorePath(sourceInfo.storePath));
|
||||
|
@ -224,14 +228,14 @@ static Flake getFlake(
|
|||
auto sInputs = state.symbols.create("inputs");
|
||||
|
||||
if (auto inputs = vInfo.attrs->get(sInputs))
|
||||
flake.inputs = parseFlakeInputs(state, inputs->value, *inputs->pos, flakeDir);
|
||||
flake.inputs = parseFlakeInputs(state, inputs->value, *inputs->pos, flakeDir, lockRootPath);
|
||||
|
||||
auto sOutputs = state.symbols.create("outputs");
|
||||
|
||||
if (auto outputs = vInfo.attrs->get(sOutputs)) {
|
||||
expectType(state, nFunction, *outputs->value, *outputs->pos);
|
||||
|
||||
if (outputs->value->isLambda() && outputs->value->lambda.fun->matchAttrs) {
|
||||
if (outputs->value->isLambda() && outputs->value->lambda.fun->hasFormals()) {
|
||||
for (auto & formal : outputs->value->lambda.fun->formals->formals) {
|
||||
if (formal.name != state.sSelf)
|
||||
flake.inputs.emplace(formal.name, FlakeInput {
|
||||
|
@ -251,19 +255,24 @@ static Flake getFlake(
|
|||
for (auto & setting : *nixConfig->value->attrs) {
|
||||
forceTrivialValue(state, *setting.value, *setting.pos);
|
||||
if (setting.value->type() == nString)
|
||||
flake.config.settings.insert({setting.name, state.forceStringNoCtx(*setting.value, *setting.pos)});
|
||||
flake.config.settings.insert({setting.name, std::string(state.forceStringNoCtx(*setting.value, *setting.pos))});
|
||||
else if (setting.value->type() == nPath) {
|
||||
PathSet emptyContext = {};
|
||||
flake.config.settings.emplace(
|
||||
setting.name,
|
||||
state.coerceToString(*setting.pos, *setting.value, emptyContext, false, true, true) .toOwned());
|
||||
}
|
||||
else if (setting.value->type() == nInt)
|
||||
flake.config.settings.insert({setting.name, state.forceInt(*setting.value, *setting.pos)});
|
||||
else if (setting.value->type() == nBool)
|
||||
flake.config.settings.insert({setting.name, state.forceBool(*setting.value, *setting.pos)});
|
||||
flake.config.settings.insert({setting.name, Explicit<bool> { state.forceBool(*setting.value, *setting.pos) }});
|
||||
else if (setting.value->type() == nList) {
|
||||
std::vector<std::string> ss;
|
||||
for (unsigned int n = 0; n < setting.value->listSize(); ++n) {
|
||||
auto elem = setting.value->listElems()[n];
|
||||
for (auto elem : setting.value->listItems()) {
|
||||
if (elem->type() != nString)
|
||||
throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected",
|
||||
setting.name, showType(*setting.value));
|
||||
ss.push_back(state.forceStringNoCtx(*elem, *setting.pos));
|
||||
ss.emplace_back(state.forceStringNoCtx(*elem, *setting.pos));
|
||||
}
|
||||
flake.config.settings.insert({setting.name, ss});
|
||||
}
|
||||
|
@ -285,6 +294,11 @@ static Flake getFlake(
|
|||
return flake;
|
||||
}
|
||||
|
||||
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup, FlakeCache & flakeCache)
|
||||
{
|
||||
return getFlake(state, originalRef, allowLookup, flakeCache, {});
|
||||
}
|
||||
|
||||
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup)
|
||||
{
|
||||
FlakeCache flakeCache;
|
||||
|
@ -298,17 +312,17 @@ LockedFlake lockFlake(
|
|||
const FlakeRef & topRef,
|
||||
const LockFlags & lockFlags)
|
||||
{
|
||||
settings.requireExperimentalFeature("flakes");
|
||||
settings.requireExperimentalFeature(Xp::Flakes);
|
||||
|
||||
FlakeCache flakeCache;
|
||||
|
||||
auto useRegistries = lockFlags.useRegistries.value_or(settings.useRegistries);
|
||||
auto useRegistries = lockFlags.useRegistries.value_or(fetchSettings.useRegistries);
|
||||
|
||||
auto flake = getFlake(state, topRef, useRegistries, flakeCache);
|
||||
|
||||
if (lockFlags.applyNixConfig) {
|
||||
flake.config.apply();
|
||||
// FIXME: send new config to the daemon.
|
||||
state.store->setOptions();
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -330,23 +344,14 @@ LockedFlake lockFlake(
|
|||
|
||||
std::vector<FlakeRef> parents;
|
||||
|
||||
struct LockParent {
|
||||
/* The path to this parent. */
|
||||
InputPath path;
|
||||
|
||||
/* Whether we are currently inside a top-level lockfile
|
||||
(inputs absolute) or subordinate lockfile (inputs
|
||||
relative). */
|
||||
bool absolute;
|
||||
};
|
||||
|
||||
std::function<void(
|
||||
const FlakeInputs & flakeInputs,
|
||||
std::shared_ptr<Node> node,
|
||||
const InputPath & inputPathPrefix,
|
||||
std::shared_ptr<const Node> oldNode,
|
||||
const LockParent & parent,
|
||||
const Path & parentPath)>
|
||||
const InputPath & lockRootPath,
|
||||
const Path & parentPath,
|
||||
bool trustLock)>
|
||||
computeLocks;
|
||||
|
||||
computeLocks = [&](
|
||||
|
@ -354,8 +359,9 @@ LockedFlake lockFlake(
|
|||
std::shared_ptr<Node> node,
|
||||
const InputPath & inputPathPrefix,
|
||||
std::shared_ptr<const Node> oldNode,
|
||||
const LockParent & parent,
|
||||
const Path & parentPath)
|
||||
const InputPath & lockRootPath,
|
||||
const Path & parentPath,
|
||||
bool trustLock)
|
||||
{
|
||||
debug("computing lock file node '%s'", printInputPath(inputPathPrefix));
|
||||
|
||||
|
@ -398,17 +404,7 @@ LockedFlake lockFlake(
|
|||
if (input.follows) {
|
||||
InputPath target;
|
||||
|
||||
if (parent.absolute && !hasOverride) {
|
||||
target = *input.follows;
|
||||
} else {
|
||||
if (hasOverride) {
|
||||
target = inputPathPrefix;
|
||||
target.pop_back();
|
||||
} else
|
||||
target = parent.path;
|
||||
|
||||
for (auto & i : *input.follows) target.push_back(i);
|
||||
}
|
||||
target.insert(target.end(), input.follows->begin(), input.follows->end());
|
||||
|
||||
debug("input '%s' follows '%s'", inputPathS, printInputPath(target));
|
||||
node->inputs.insert_or_assign(id, target);
|
||||
|
@ -447,22 +443,18 @@ LockedFlake lockFlake(
|
|||
update it. */
|
||||
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
|
||||
|
||||
auto hasChildUpdate =
|
||||
auto mustRefetch =
|
||||
lb != lockFlags.inputUpdates.end()
|
||||
&& lb->size() > inputPath.size()
|
||||
&& std::equal(inputPath.begin(), inputPath.end(), lb->begin());
|
||||
|
||||
if (hasChildUpdate) {
|
||||
auto inputFlake = getFlake(
|
||||
state, oldLock->lockedRef, false, flakeCache);
|
||||
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, parent, parentPath);
|
||||
} else {
|
||||
FlakeInputs fakeInputs;
|
||||
|
||||
if (!mustRefetch) {
|
||||
/* No need to fetch this flake, we can be
|
||||
lazy. However there may be new overrides on the
|
||||
inputs of this flake, so we need to check
|
||||
those. */
|
||||
FlakeInputs fakeInputs;
|
||||
|
||||
for (auto & i : oldLock->inputs) {
|
||||
if (auto lockedNode = std::get_if<0>(&i.second)) {
|
||||
fakeInputs.emplace(i.first, FlakeInput {
|
||||
|
@ -470,21 +462,47 @@ LockedFlake lockFlake(
|
|||
.isFlake = (*lockedNode)->isFlake,
|
||||
});
|
||||
} else if (auto follows = std::get_if<1>(&i.second)) {
|
||||
if (! trustLock) {
|
||||
// It is possible that the flake has changed,
|
||||
// so we must confirm all the follows that are in the lockfile are also in the flake.
|
||||
auto overridePath(inputPath);
|
||||
overridePath.push_back(i.first);
|
||||
auto o = overrides.find(overridePath);
|
||||
// If the override disappeared, we have to refetch the flake,
|
||||
// since some of the inputs may not be present in the lockfile.
|
||||
if (o == overrides.end()) {
|
||||
mustRefetch = true;
|
||||
// There's no point populating the rest of the fake inputs,
|
||||
// since we'll refetch the flake anyways.
|
||||
break;
|
||||
}
|
||||
}
|
||||
auto absoluteFollows(lockRootPath);
|
||||
absoluteFollows.insert(absoluteFollows.end(), follows->begin(), follows->end());
|
||||
fakeInputs.emplace(i.first, FlakeInput {
|
||||
.follows = *follows,
|
||||
.follows = absoluteFollows,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
computeLocks(fakeInputs, childNode, inputPath, oldLock, parent, parentPath);
|
||||
}
|
||||
|
||||
auto localPath(parentPath);
|
||||
// If this input is a path, recurse it down.
|
||||
// This allows us to resolve path inputs relative to the current flake.
|
||||
if ((*input.ref).input.getType() == "path")
|
||||
localPath = absPath(*input.ref->input.getSourcePath(), parentPath);
|
||||
computeLocks(
|
||||
mustRefetch
|
||||
? getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath).inputs
|
||||
: fakeInputs,
|
||||
childNode, inputPath, oldLock, lockRootPath, parentPath, !mustRefetch);
|
||||
|
||||
} else {
|
||||
/* We need to create a new lock file entry. So fetch
|
||||
this input. */
|
||||
debug("creating new input '%s'", inputPathS);
|
||||
|
||||
if (!lockFlags.allowMutable && !input.ref->input.isImmutable())
|
||||
if (!lockFlags.allowMutable && !input.ref->input.isLocked())
|
||||
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
|
||||
|
||||
if (input.isFlake) {
|
||||
|
@ -496,7 +514,7 @@ LockedFlake lockFlake(
|
|||
if (localRef.input.getType() == "path")
|
||||
localPath = absPath(*input.ref->input.getSourcePath(), parentPath);
|
||||
|
||||
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache);
|
||||
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
|
||||
|
||||
/* Note: in case of an --override-input, we use
|
||||
the *original* ref (input2.ref) for the
|
||||
|
@ -517,13 +535,6 @@ LockedFlake lockFlake(
|
|||
parents.push_back(*input.ref);
|
||||
Finally cleanup([&]() { parents.pop_back(); });
|
||||
|
||||
// Follows paths from existing inputs in the top-level lockfile are absolute,
|
||||
// whereas paths in subordinate lockfiles are relative to those lockfiles.
|
||||
LockParent newParent {
|
||||
.path = inputPath,
|
||||
.absolute = oldLock ? true : false
|
||||
};
|
||||
|
||||
/* Recursively process the inputs of this
|
||||
flake. Also, unless we already have this flake
|
||||
in the top-level lock file, use this flake's
|
||||
|
@ -534,7 +545,7 @@ LockedFlake lockFlake(
|
|||
? std::dynamic_pointer_cast<const Node>(oldLock)
|
||||
: LockFile::read(
|
||||
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root,
|
||||
newParent, localPath);
|
||||
oldLock ? lockRootPath : inputPath, localPath, false);
|
||||
}
|
||||
|
||||
else {
|
||||
|
@ -552,17 +563,12 @@ LockedFlake lockFlake(
|
|||
}
|
||||
};
|
||||
|
||||
LockParent parent {
|
||||
.path = {},
|
||||
.absolute = true
|
||||
};
|
||||
|
||||
// Bring in the current ref for relative path resolution if we have it
|
||||
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir);
|
||||
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir, true);
|
||||
|
||||
computeLocks(
|
||||
flake.inputs, newLockFile.root, {},
|
||||
lockFlags.recreateLockFile ? nullptr : oldLockFile.root, parent, parentPath);
|
||||
lockFlags.recreateLockFile ? nullptr : oldLockFile.root, {}, parentPath, false);
|
||||
|
||||
for (auto & i : lockFlags.inputOverrides)
|
||||
if (!overridesUsed.count(i.first))
|
||||
|
@ -586,7 +592,7 @@ LockedFlake lockFlake(
|
|||
if (lockFlags.writeLockFile) {
|
||||
if (auto sourcePath = topRef.input.getSourcePath()) {
|
||||
if (!newLockFile.isImmutable()) {
|
||||
if (settings.warnDirty)
|
||||
if (fetchSettings.warnDirty)
|
||||
warn("will not write lock file of flake '%s' because it has a mutable input", topRef);
|
||||
} else {
|
||||
if (!lockFlags.updateLockFile)
|
||||
|
@ -609,12 +615,24 @@ LockedFlake lockFlake(
|
|||
|
||||
newLockFile.write(path);
|
||||
|
||||
std::optional<std::string> commitMessage = std::nullopt;
|
||||
if (lockFlags.commitLockFile) {
|
||||
std::string cm;
|
||||
|
||||
cm = fetchSettings.commitLockFileSummary.get();
|
||||
|
||||
if (cm == "") {
|
||||
cm = fmt("%s: %s", relPath, lockFileExists ? "Update" : "Add");
|
||||
}
|
||||
|
||||
cm += "\n\nFlake lock file updates:\n\n";
|
||||
cm += filterANSIEscapes(diff, true);
|
||||
commitMessage = cm;
|
||||
}
|
||||
|
||||
topRef.input.markChangedFile(
|
||||
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
|
||||
lockFlags.commitLockFile
|
||||
? std::optional<std::string>(fmt("%s: %s\n\nFlake lock file changes:\n\n%s",
|
||||
relPath, lockFileExists ? "Update" : "Add", filterANSIEscapes(diff, true)))
|
||||
: std::nullopt);
|
||||
commitMessage);
|
||||
|
||||
/* Rewriting the lockfile changed the top-level
|
||||
repo, so we should re-read it. FIXME: we could
|
||||
|
@ -633,7 +651,7 @@ LockedFlake lockFlake(
|
|||
now. Corner case: we could have reverted from a
|
||||
dirty to a clean tree! */
|
||||
if (flake.lockedRef.input == prevLockedRef.input
|
||||
&& !flake.lockedRef.input.isImmutable())
|
||||
&& !flake.lockedRef.input.isLocked())
|
||||
throw Error("'%s' did not change after I updated its 'flake.lock' file; is 'flake.lock' under version control?", flake.originalRef);
|
||||
}
|
||||
} else
|
||||
|
@ -662,7 +680,7 @@ void callFlake(EvalState & state,
|
|||
auto vTmp1 = state.allocValue();
|
||||
auto vTmp2 = state.allocValue();
|
||||
|
||||
mkString(*vLocks, lockedFlake.lockFile.to_string());
|
||||
vLocks->mkString(lockedFlake.lockFile.to_string());
|
||||
|
||||
emitTreeAttrs(
|
||||
state,
|
||||
|
@ -672,7 +690,7 @@ void callFlake(EvalState & state,
|
|||
false,
|
||||
lockedFlake.flake.forceDirty);
|
||||
|
||||
mkString(*vRootSubdir, lockedFlake.flake.lockedRef.subdir);
|
||||
vRootSubdir->mkString(lockedFlake.flake.lockedRef.subdir);
|
||||
|
||||
if (!state.vCallFlake) {
|
||||
state.vCallFlake = allocRootValue(state.allocValue());
|
||||
|
@ -688,18 +706,18 @@ void callFlake(EvalState & state,
|
|||
|
||||
static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
state.requireExperimentalFeatureOnEvaluation("flakes", "builtins.getFlake", pos);
|
||||
state.requireExperimentalFeatureOnEvaluation(Xp::Flakes, "builtins.getFlake", pos);
|
||||
|
||||
auto flakeRefS = state.forceStringNoCtx(*args[0], pos);
|
||||
std::string flakeRefS(state.forceStringNoCtx(*args[0], pos));
|
||||
auto flakeRef = parseFlakeRef(flakeRefS, {}, true);
|
||||
if (evalSettings.pureEval && !flakeRef.input.isImmutable())
|
||||
throw Error("cannot call 'getFlake' on mutable flake reference '%s', at %s (use --impure to override)", flakeRefS, pos);
|
||||
if (evalSettings.pureEval && !flakeRef.input.isLocked())
|
||||
throw Error("cannot call 'getFlake' on unlocked flake reference '%s', at %s (use --impure to override)", flakeRefS, pos);
|
||||
|
||||
callFlake(state,
|
||||
lockFlake(state, flakeRef,
|
||||
LockFlags {
|
||||
.updateLockFile = false,
|
||||
.useRegistries = !evalSettings.pureEval && settings.useRegistries,
|
||||
.useRegistries = !evalSettings.pureEval && fetchSettings.useRegistries,
|
||||
.allowMutable = !evalSettings.pureEval,
|
||||
}),
|
||||
v);
|
||||
|
|
|
@ -48,9 +48,12 @@ FlakeRef FlakeRef::resolve(ref<Store> store) const
|
|||
}
|
||||
|
||||
FlakeRef parseFlakeRef(
|
||||
const std::string & url, const std::optional<Path> & baseDir, bool allowMissing)
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir,
|
||||
bool allowMissing,
|
||||
bool isFlake)
|
||||
{
|
||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(url, baseDir, allowMissing);
|
||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(url, baseDir, allowMissing, isFlake);
|
||||
if (fragment != "")
|
||||
throw Error("unexpected fragment '%s' in flake reference '%s'", fragment, url);
|
||||
return flakeRef;
|
||||
|
@ -67,7 +70,10 @@ std::optional<FlakeRef> maybeParseFlakeRef(
|
|||
}
|
||||
|
||||
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||
const std::string & url, const std::optional<Path> & baseDir, bool allowMissing)
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir,
|
||||
bool allowMissing,
|
||||
bool isFlake)
|
||||
{
|
||||
using namespace fetchers;
|
||||
|
||||
|
@ -92,7 +98,7 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
|||
if (std::regex_match(url, match, flakeRegex)) {
|
||||
auto parsedURL = ParsedURL{
|
||||
.url = url,
|
||||
.base = "flake:" + std::string(match[1]),
|
||||
.base = "flake:" + match.str(1),
|
||||
.scheme = "flake",
|
||||
.authority = "",
|
||||
.path = match[1],
|
||||
|
@ -100,58 +106,83 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
|||
|
||||
return std::make_pair(
|
||||
FlakeRef(Input::fromURL(parsedURL), ""),
|
||||
percentDecode(std::string(match[6])));
|
||||
percentDecode(match.str(6)));
|
||||
}
|
||||
|
||||
else if (std::regex_match(url, match, pathUrlRegex)) {
|
||||
std::string path = match[1];
|
||||
std::string fragment = percentDecode(std::string(match[3]));
|
||||
std::string fragment = percentDecode(match.str(3));
|
||||
|
||||
if (baseDir) {
|
||||
/* Check if 'url' is a path (either absolute or relative
|
||||
to 'baseDir'). If so, search upward to the root of the
|
||||
repo (i.e. the directory containing .git). */
|
||||
|
||||
path = absPath(path, baseDir, true);
|
||||
path = absPath(path, baseDir);
|
||||
|
||||
if (!S_ISDIR(lstat(path).st_mode))
|
||||
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
|
||||
if (isFlake) {
|
||||
|
||||
if (!allowMissing && !pathExists(path + "/flake.nix"))
|
||||
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
|
||||
if (!allowMissing && !pathExists(path + "/flake.nix")){
|
||||
notice("path '%s' does not contain a 'flake.nix', searching up",path);
|
||||
|
||||
auto flakeRoot = path;
|
||||
std::string subdir;
|
||||
|
||||
while (flakeRoot != "/") {
|
||||
if (pathExists(flakeRoot + "/.git")) {
|
||||
auto base = std::string("git+file://") + flakeRoot;
|
||||
|
||||
auto parsedURL = ParsedURL{
|
||||
.url = base, // FIXME
|
||||
.base = base,
|
||||
.scheme = "git+file",
|
||||
.authority = "",
|
||||
.path = flakeRoot,
|
||||
.query = decodeQuery(match[2]),
|
||||
};
|
||||
|
||||
if (subdir != "") {
|
||||
if (parsedURL.query.count("dir"))
|
||||
throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
|
||||
parsedURL.query.insert_or_assign("dir", subdir);
|
||||
// Save device to detect filesystem boundary
|
||||
dev_t device = lstat(path).st_dev;
|
||||
bool found = false;
|
||||
while (path != "/") {
|
||||
if (pathExists(path + "/flake.nix")) {
|
||||
found = true;
|
||||
break;
|
||||
} else if (pathExists(path + "/.git"))
|
||||
throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path);
|
||||
else {
|
||||
if (lstat(path).st_dev != device)
|
||||
throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path);
|
||||
}
|
||||
path = dirOf(path);
|
||||
}
|
||||
|
||||
if (pathExists(flakeRoot + "/.git/shallow"))
|
||||
parsedURL.query.insert_or_assign("shallow", "1");
|
||||
|
||||
return std::make_pair(
|
||||
FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
|
||||
fragment);
|
||||
if (!found)
|
||||
throw BadURL("could not find a flake.nix file");
|
||||
}
|
||||
|
||||
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
|
||||
flakeRoot = dirOf(flakeRoot);
|
||||
if (!S_ISDIR(lstat(path).st_mode))
|
||||
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
|
||||
|
||||
if (!allowMissing && !pathExists(path + "/flake.nix"))
|
||||
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
|
||||
|
||||
auto flakeRoot = path;
|
||||
std::string subdir;
|
||||
|
||||
while (flakeRoot != "/") {
|
||||
if (pathExists(flakeRoot + "/.git")) {
|
||||
auto base = std::string("git+file://") + flakeRoot;
|
||||
|
||||
auto parsedURL = ParsedURL{
|
||||
.url = base, // FIXME
|
||||
.base = base,
|
||||
.scheme = "git+file",
|
||||
.authority = "",
|
||||
.path = flakeRoot,
|
||||
.query = decodeQuery(match[2]),
|
||||
};
|
||||
|
||||
if (subdir != "") {
|
||||
if (parsedURL.query.count("dir"))
|
||||
throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
|
||||
parsedURL.query.insert_or_assign("dir", subdir);
|
||||
}
|
||||
|
||||
if (pathExists(flakeRoot + "/.git/shallow"))
|
||||
parsedURL.query.insert_or_assign("shallow", "1");
|
||||
|
||||
return std::make_pair(
|
||||
FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
|
||||
fragment);
|
||||
}
|
||||
|
||||
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
|
||||
flakeRoot = dirOf(flakeRoot);
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
|
|
|
@ -62,13 +62,19 @@ struct FlakeRef
|
|||
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef);
|
||||
|
||||
FlakeRef parseFlakeRef(
|
||||
const std::string & url, const std::optional<Path> & baseDir = {}, bool allowMissing = false);
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir = {},
|
||||
bool allowMissing = false,
|
||||
bool isFlake = true);
|
||||
|
||||
std::optional<FlakeRef> maybeParseFlake(
|
||||
const std::string & url, const std::optional<Path> & baseDir = {});
|
||||
|
||||
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||
const std::string & url, const std::optional<Path> & baseDir = {}, bool allowMissing = false);
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir = {},
|
||||
bool allowMissing = false,
|
||||
bool isFlake = true);
|
||||
|
||||
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
||||
const std::string & url, const std::optional<Path> & baseDir = {});
|
||||
|
|
|
@ -35,7 +35,7 @@ LockedNode::LockedNode(const nlohmann::json & json)
|
|||
, originalRef(getFlakeRef(json, "original", nullptr))
|
||||
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
|
||||
{
|
||||
if (!lockedRef.input.isImmutable())
|
||||
if (!lockedRef.input.isLocked())
|
||||
throw Error("lockfile contains mutable lock '%s'",
|
||||
fetchers::attrsToJSON(lockedRef.input.toAttrs()));
|
||||
}
|
||||
|
@ -220,7 +220,7 @@ bool LockFile::isImmutable() const
|
|||
for (auto & i : nodes) {
|
||||
if (i == root) continue;
|
||||
auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(i);
|
||||
if (lockedNode && !lockedNode->lockedRef.input.isImmutable()) return false;
|
||||
if (lockedNode && !lockedNode->lockedRef.input.isLocked()) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
|
|
|
@ -11,8 +11,8 @@
|
|||
namespace nix {
|
||||
|
||||
|
||||
DrvInfo::DrvInfo(EvalState & state, const string & attrPath, Bindings * attrs)
|
||||
: state(&state), attrs(attrs), attrPath(attrPath)
|
||||
DrvInfo::DrvInfo(EvalState & state, std::string attrPath, Bindings * attrs)
|
||||
: state(&state), attrs(attrs), attrPath(std::move(attrPath))
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -22,7 +22,7 @@ DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPat
|
|||
{
|
||||
auto [drvPath, selectedOutputs] = parsePathWithOutputs(*store, drvPathWithOutputs);
|
||||
|
||||
this->drvPath = store->printStorePath(drvPath);
|
||||
this->drvPath = drvPath;
|
||||
|
||||
auto drv = store->derivationFromPath(drvPath);
|
||||
|
||||
|
@ -41,13 +41,11 @@ DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPat
|
|||
throw Error("derivation '%s' does not have output '%s'", store->printStorePath(drvPath), outputName);
|
||||
auto & [outputName, output] = *i;
|
||||
|
||||
auto optStorePath = output.path(*store, drv.name, outputName);
|
||||
if (optStorePath)
|
||||
outPath = store->printStorePath(*optStorePath);
|
||||
outPath = {output.path(*store, drv.name, outputName)};
|
||||
}
|
||||
|
||||
|
||||
string DrvInfo::queryName() const
|
||||
std::string DrvInfo::queryName() const
|
||||
{
|
||||
if (name == "" && attrs) {
|
||||
auto i = attrs->find(state->sName);
|
||||
|
@ -58,7 +56,7 @@ string DrvInfo::queryName() const
|
|||
}
|
||||
|
||||
|
||||
string DrvInfo::querySystem() const
|
||||
std::string DrvInfo::querySystem() const
|
||||
{
|
||||
if (system == "" && attrs) {
|
||||
auto i = attrs->find(state->sSystem);
|
||||
|
@ -68,24 +66,35 @@ string DrvInfo::querySystem() const
|
|||
}
|
||||
|
||||
|
||||
string DrvInfo::queryDrvPath() const
|
||||
std::optional<StorePath> DrvInfo::queryDrvPath() const
|
||||
{
|
||||
if (drvPath == "" && attrs) {
|
||||
if (!drvPath && attrs) {
|
||||
Bindings::iterator i = attrs->find(state->sDrvPath);
|
||||
PathSet context;
|
||||
drvPath = i != attrs->end() ? state->coerceToPath(*i->pos, *i->value, context) : "";
|
||||
if (i == attrs->end())
|
||||
drvPath = {std::nullopt};
|
||||
else
|
||||
drvPath = {state->coerceToStorePath(*i->pos, *i->value, context)};
|
||||
}
|
||||
return drvPath;
|
||||
return drvPath.value_or(std::nullopt);
|
||||
}
|
||||
|
||||
|
||||
string DrvInfo::queryOutPath() const
|
||||
StorePath DrvInfo::requireDrvPath() const
|
||||
{
|
||||
if (auto drvPath = queryDrvPath())
|
||||
return *drvPath;
|
||||
throw Error("derivation does not contain a 'drvPath' attribute");
|
||||
}
|
||||
|
||||
|
||||
StorePath DrvInfo::queryOutPath() const
|
||||
{
|
||||
if (!outPath && attrs) {
|
||||
Bindings::iterator i = attrs->find(state->sOutPath);
|
||||
PathSet context;
|
||||
if (i != attrs->end())
|
||||
outPath = state->coerceToPath(*i->pos, *i->value, context);
|
||||
outPath = state->coerceToStorePath(*i->pos, *i->value, context);
|
||||
}
|
||||
if (!outPath)
|
||||
throw UnimplementedError("CA derivations are not yet supported");
|
||||
|
@ -102,21 +111,21 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool onlyOutputsToInstall)
|
|||
state->forceList(*i->value, *i->pos);
|
||||
|
||||
/* For each output... */
|
||||
for (unsigned int j = 0; j < i->value->listSize(); ++j) {
|
||||
for (auto elem : i->value->listItems()) {
|
||||
/* Evaluate the corresponding set. */
|
||||
string name = state->forceStringNoCtx(*i->value->listElems()[j], *i->pos);
|
||||
std::string name(state->forceStringNoCtx(*elem, *i->pos));
|
||||
Bindings::iterator out = attrs->find(state->symbols.create(name));
|
||||
if (out == attrs->end()) continue; // FIXME: throw error?
|
||||
state->forceAttrs(*out->value);
|
||||
state->forceAttrs(*out->value, *i->pos);
|
||||
|
||||
/* And evaluate its ‘outPath’ attribute. */
|
||||
Bindings::iterator outPath = out->value->attrs->find(state->sOutPath);
|
||||
if (outPath == out->value->attrs->end()) continue; // FIXME: throw error?
|
||||
PathSet context;
|
||||
outputs[name] = state->coerceToPath(*outPath->pos, *outPath->value, context);
|
||||
outputs.emplace(name, state->coerceToStorePath(*outPath->pos, *outPath->value, context));
|
||||
}
|
||||
} else
|
||||
outputs["out"] = queryOutPath();
|
||||
outputs.emplace("out", queryOutPath());
|
||||
}
|
||||
if (!onlyOutputsToInstall || !attrs)
|
||||
return outputs;
|
||||
|
@ -128,9 +137,9 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool onlyOutputsToInstall)
|
|||
/* ^ this shows during `nix-env -i` right under the bad derivation */
|
||||
if (!outTI->isList()) throw errMsg;
|
||||
Outputs result;
|
||||
for (auto i = outTI->listElems(); i != outTI->listElems() + outTI->listSize(); ++i) {
|
||||
if ((*i)->type() != nString) throw errMsg;
|
||||
auto out = outputs.find((*i)->string.s);
|
||||
for (auto elem : outTI->listItems()) {
|
||||
if (elem->type() != nString) throw errMsg;
|
||||
auto out = outputs.find(elem->string.s);
|
||||
if (out == outputs.end()) throw errMsg;
|
||||
result.insert(*out);
|
||||
}
|
||||
|
@ -138,7 +147,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool onlyOutputsToInstall)
|
|||
}
|
||||
|
||||
|
||||
string DrvInfo::queryOutputName() const
|
||||
std::string DrvInfo::queryOutputName() const
|
||||
{
|
||||
if (outputName == "" && attrs) {
|
||||
Bindings::iterator i = attrs->find(state->sOutputName);
|
||||
|
@ -172,10 +181,10 @@ StringSet DrvInfo::queryMetaNames()
|
|||
|
||||
bool DrvInfo::checkMeta(Value & v)
|
||||
{
|
||||
state->forceValue(v);
|
||||
state->forceValue(v, [&]() { return v.determinePos(noPos); });
|
||||
if (v.type() == nList) {
|
||||
for (unsigned int n = 0; n < v.listSize(); ++n)
|
||||
if (!checkMeta(*v.listElems()[n])) return false;
|
||||
for (auto elem : v.listItems())
|
||||
if (!checkMeta(*elem)) return false;
|
||||
return true;
|
||||
}
|
||||
else if (v.type() == nAttrs) {
|
||||
|
@ -190,7 +199,7 @@ bool DrvInfo::checkMeta(Value & v)
|
|||
}
|
||||
|
||||
|
||||
Value * DrvInfo::queryMeta(const string & name)
|
||||
Value * DrvInfo::queryMeta(const std::string & name)
|
||||
{
|
||||
if (!getMeta()) return 0;
|
||||
Bindings::iterator a = meta->find(state->symbols.create(name));
|
||||
|
@ -199,7 +208,7 @@ Value * DrvInfo::queryMeta(const string & name)
|
|||
}
|
||||
|
||||
|
||||
string DrvInfo::queryMetaString(const string & name)
|
||||
std::string DrvInfo::queryMetaString(const std::string & name)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v || v->type() != nString) return "";
|
||||
|
@ -207,7 +216,7 @@ string DrvInfo::queryMetaString(const string & name)
|
|||
}
|
||||
|
||||
|
||||
NixInt DrvInfo::queryMetaInt(const string & name, NixInt def)
|
||||
NixInt DrvInfo::queryMetaInt(const std::string & name, NixInt def)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v) return def;
|
||||
|
@ -221,7 +230,7 @@ NixInt DrvInfo::queryMetaInt(const string & name, NixInt def)
|
|||
return def;
|
||||
}
|
||||
|
||||
NixFloat DrvInfo::queryMetaFloat(const string & name, NixFloat def)
|
||||
NixFloat DrvInfo::queryMetaFloat(const std::string & name, NixFloat def)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v) return def;
|
||||
|
@ -236,7 +245,7 @@ NixFloat DrvInfo::queryMetaFloat(const string & name, NixFloat def)
|
|||
}
|
||||
|
||||
|
||||
bool DrvInfo::queryMetaBool(const string & name, bool def)
|
||||
bool DrvInfo::queryMetaBool(const std::string & name, bool def)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v) return def;
|
||||
|
@ -251,23 +260,22 @@ bool DrvInfo::queryMetaBool(const string & name, bool def)
|
|||
}
|
||||
|
||||
|
||||
void DrvInfo::setMeta(const string & name, Value * v)
|
||||
void DrvInfo::setMeta(const std::string & name, Value * v)
|
||||
{
|
||||
getMeta();
|
||||
Bindings * old = meta;
|
||||
meta = state->allocBindings(1 + (old ? old->size() : 0));
|
||||
auto attrs = state->buildBindings(1 + (meta ? meta->size() : 0));
|
||||
Symbol sym = state->symbols.create(name);
|
||||
if (old)
|
||||
for (auto i : *old)
|
||||
if (meta)
|
||||
for (auto i : *meta)
|
||||
if (i.name != sym)
|
||||
meta->push_back(i);
|
||||
if (v) meta->push_back(Attr(sym, v));
|
||||
meta->sort();
|
||||
attrs.insert(i);
|
||||
if (v) attrs.insert(sym, v);
|
||||
meta = attrs.finish();
|
||||
}
|
||||
|
||||
|
||||
/* Cache for already considered attrsets. */
|
||||
typedef set<Bindings *> Done;
|
||||
typedef std::set<Bindings *> Done;
|
||||
|
||||
|
||||
/* Evaluate value `v'. If it evaluates to a set of type `derivation',
|
||||
|
@ -275,11 +283,11 @@ typedef set<Bindings *> Done;
|
|||
The result boolean indicates whether it makes sense
|
||||
for the caller to recursively search for derivations in `v'. */
|
||||
static bool getDerivation(EvalState & state, Value & v,
|
||||
const string & attrPath, DrvInfos & drvs, Done & done,
|
||||
const std::string & attrPath, DrvInfos & drvs, Done & done,
|
||||
bool ignoreAssertionFailures)
|
||||
{
|
||||
try {
|
||||
state.forceValue(v);
|
||||
state.forceValue(v, [&]() { return v.determinePos(noPos); });
|
||||
if (!state.isDerivation(v)) return true;
|
||||
|
||||
/* Remove spurious duplicates (e.g., a set like `rec { x =
|
||||
|
@ -312,7 +320,7 @@ std::optional<DrvInfo> getDerivation(EvalState & state, Value & v,
|
|||
}
|
||||
|
||||
|
||||
static string addToPath(const string & s1, const string & s2)
|
||||
static std::string addToPath(const std::string & s1, const std::string & s2)
|
||||
{
|
||||
return s1.empty() ? s2 : s1 + "." + s2;
|
||||
}
|
||||
|
@ -322,7 +330,7 @@ static std::regex attrRegex("[A-Za-z_][A-Za-z0-9-_+]*");
|
|||
|
||||
|
||||
static void getDerivations(EvalState & state, Value & vIn,
|
||||
const string & pathPrefix, Bindings & autoArgs,
|
||||
const std::string & pathPrefix, Bindings & autoArgs,
|
||||
DrvInfos & drvs, Done & done,
|
||||
bool ignoreAssertionFailures)
|
||||
{
|
||||
|
@ -347,7 +355,7 @@ static void getDerivations(EvalState & state, Value & vIn,
|
|||
debug("evaluating attribute '%1%'", i->name);
|
||||
if (!std::regex_match(std::string(i->name), attrRegex))
|
||||
continue;
|
||||
string pathPrefix2 = addToPath(pathPrefix, i->name);
|
||||
std::string pathPrefix2 = addToPath(pathPrefix, i->name);
|
||||
if (combineChannels)
|
||||
getDerivations(state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
|
||||
else if (getDerivation(state, *i->value, pathPrefix2, drvs, done, ignoreAssertionFailures)) {
|
||||
|
@ -364,10 +372,10 @@ static void getDerivations(EvalState & state, Value & vIn,
|
|||
}
|
||||
|
||||
else if (v.type() == nList) {
|
||||
for (unsigned int n = 0; n < v.listSize(); ++n) {
|
||||
string pathPrefix2 = addToPath(pathPrefix, (format("%1%") % n).str());
|
||||
if (getDerivation(state, *v.listElems()[n], pathPrefix2, drvs, done, ignoreAssertionFailures))
|
||||
getDerivations(state, *v.listElems()[n], pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
|
||||
for (auto [n, elem] : enumerate(v.listItems())) {
|
||||
std::string pathPrefix2 = addToPath(pathPrefix, fmt("%d", n));
|
||||
if (getDerivation(state, *elem, pathPrefix2, drvs, done, ignoreAssertionFailures))
|
||||
getDerivations(state, *elem, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -375,7 +383,7 @@ static void getDerivations(EvalState & state, Value & vIn,
|
|||
}
|
||||
|
||||
|
||||
void getDerivations(EvalState & state, Value & v, const string & pathPrefix,
|
||||
void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix,
|
||||
Bindings & autoArgs, DrvInfos & drvs, bool ignoreAssertionFailures)
|
||||
{
|
||||
Done done;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#pragma once
|
||||
|
||||
#include "eval.hh"
|
||||
#include "path.hh"
|
||||
|
||||
#include <string>
|
||||
#include <map>
|
||||
|
@ -12,16 +13,16 @@ namespace nix {
|
|||
struct DrvInfo
|
||||
{
|
||||
public:
|
||||
typedef std::map<string, Path> Outputs;
|
||||
typedef std::map<std::string, StorePath> Outputs;
|
||||
|
||||
private:
|
||||
EvalState * state;
|
||||
|
||||
mutable string name;
|
||||
mutable string system;
|
||||
mutable string drvPath;
|
||||
mutable std::optional<string> outPath;
|
||||
mutable string outputName;
|
||||
mutable std::string name;
|
||||
mutable std::string system;
|
||||
mutable std::optional<std::optional<StorePath>> drvPath;
|
||||
mutable std::optional<StorePath> outPath;
|
||||
mutable std::string outputName;
|
||||
Outputs outputs;
|
||||
|
||||
bool failed = false; // set if we get an AssertionError
|
||||
|
@ -33,36 +34,37 @@ private:
|
|||
bool checkMeta(Value & v);
|
||||
|
||||
public:
|
||||
string attrPath; /* path towards the derivation */
|
||||
std::string attrPath; /* path towards the derivation */
|
||||
|
||||
DrvInfo(EvalState & state) : state(&state) { };
|
||||
DrvInfo(EvalState & state, const string & attrPath, Bindings * attrs);
|
||||
DrvInfo(EvalState & state, std::string attrPath, Bindings * attrs);
|
||||
DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPathWithOutputs);
|
||||
|
||||
string queryName() const;
|
||||
string querySystem() const;
|
||||
string queryDrvPath() const;
|
||||
string queryOutPath() const;
|
||||
string queryOutputName() const;
|
||||
std::string queryName() const;
|
||||
std::string querySystem() const;
|
||||
std::optional<StorePath> queryDrvPath() const;
|
||||
StorePath requireDrvPath() const;
|
||||
StorePath queryOutPath() const;
|
||||
std::string queryOutputName() const;
|
||||
/** Return the list of outputs. The "outputs to install" are determined by `meta.outputsToInstall`. */
|
||||
Outputs queryOutputs(bool onlyOutputsToInstall = false);
|
||||
|
||||
StringSet queryMetaNames();
|
||||
Value * queryMeta(const string & name);
|
||||
string queryMetaString(const string & name);
|
||||
NixInt queryMetaInt(const string & name, NixInt def);
|
||||
NixFloat queryMetaFloat(const string & name, NixFloat def);
|
||||
bool queryMetaBool(const string & name, bool def);
|
||||
void setMeta(const string & name, Value * v);
|
||||
Value * queryMeta(const std::string & name);
|
||||
std::string queryMetaString(const std::string & name);
|
||||
NixInt queryMetaInt(const std::string & name, NixInt def);
|
||||
NixFloat queryMetaFloat(const std::string & name, NixFloat def);
|
||||
bool queryMetaBool(const std::string & name, bool def);
|
||||
void setMeta(const std::string & name, Value * v);
|
||||
|
||||
/*
|
||||
MetaInfo queryMetaInfo(EvalState & state) const;
|
||||
MetaValue queryMetaInfo(EvalState & state, const string & name) const;
|
||||
*/
|
||||
|
||||
void setName(const string & s) { name = s; }
|
||||
void setDrvPath(const string & s) { drvPath = s; }
|
||||
void setOutPath(const string & s) { outPath = s; }
|
||||
void setName(const std::string & s) { name = s; }
|
||||
void setDrvPath(StorePath path) { drvPath = {{std::move(path)}}; }
|
||||
void setOutPath(StorePath path) { outPath = {{std::move(path)}}; }
|
||||
|
||||
void setFailed() { failed = true; };
|
||||
bool hasFailed() { return failed; };
|
||||
|
@ -70,9 +72,9 @@ public:
|
|||
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
typedef list<DrvInfo, traceable_allocator<DrvInfo> > DrvInfos;
|
||||
typedef std::list<DrvInfo, traceable_allocator<DrvInfo> > DrvInfos;
|
||||
#else
|
||||
typedef list<DrvInfo> DrvInfos;
|
||||
typedef std::list<DrvInfo> DrvInfos;
|
||||
#endif
|
||||
|
||||
|
||||
|
@ -81,7 +83,7 @@ typedef list<DrvInfo> DrvInfos;
|
|||
std::optional<DrvInfo> getDerivation(EvalState & state,
|
||||
Value & v, bool ignoreAssertionFailures);
|
||||
|
||||
void getDerivations(EvalState & state, Value & v, const string & pathPrefix,
|
||||
void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix,
|
||||
Bindings & autoArgs, DrvInfos & drvs,
|
||||
bool ignoreAssertionFailures);
|
||||
|
||||
|
|
|
@ -37,10 +37,10 @@ class JSONSax : nlohmann::json_sax<json> {
|
|||
ValueMap attrs;
|
||||
std::unique_ptr<JSONState> resolve(EvalState & state) override
|
||||
{
|
||||
Value & v = parent->value(state);
|
||||
state.mkAttrs(v, attrs.size());
|
||||
auto attrs2 = state.buildBindings(attrs.size());
|
||||
for (auto & i : attrs)
|
||||
v.attrs->push_back(Attr(i.first, i.second));
|
||||
attrs2.insert(i.first, i.second);
|
||||
parent->value(state).mkAttrs(attrs2.alreadySorted());
|
||||
return std::move(parent);
|
||||
}
|
||||
void add() override { v = nullptr; }
|
||||
|
@ -76,45 +76,51 @@ class JSONSax : nlohmann::json_sax<json> {
|
|||
EvalState & state;
|
||||
std::unique_ptr<JSONState> rs;
|
||||
|
||||
template<typename T, typename... Args> inline bool handle_value(T f, Args... args)
|
||||
{
|
||||
f(rs->value(state), args...);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
public:
|
||||
JSONSax(EvalState & state, Value & v) : state(state), rs(new JSONState(&v)) {};
|
||||
|
||||
bool null()
|
||||
{
|
||||
return handle_value(mkNull);
|
||||
rs->value(state).mkNull();
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool boolean(bool val)
|
||||
{
|
||||
return handle_value(mkBool, val);
|
||||
rs->value(state).mkBool(val);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool number_integer(number_integer_t val)
|
||||
{
|
||||
return handle_value(mkInt, val);
|
||||
rs->value(state).mkInt(val);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool number_unsigned(number_unsigned_t val)
|
||||
{
|
||||
return handle_value(mkInt, val);
|
||||
rs->value(state).mkInt(val);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool number_float(number_float_t val, const string_t & s)
|
||||
{
|
||||
return handle_value(mkFloat, val);
|
||||
rs->value(state).mkFloat(val);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool string(string_t & val)
|
||||
{
|
||||
return handle_value<void(Value&, const char*)>(mkString, val.c_str());
|
||||
rs->value(state).mkString(val);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
#if NLOHMANN_JSON_VERSION_MAJOR >= 3 && NLOHMANN_JSON_VERSION_MINOR >= 8
|
||||
bool binary(binary_t&)
|
||||
{
|
||||
|
@ -157,7 +163,7 @@ public:
|
|||
}
|
||||
};
|
||||
|
||||
void parseJSON(EvalState & state, const string & s_, Value & v)
|
||||
void parseJSON(EvalState & state, const std::string_view & s_, Value & v)
|
||||
{
|
||||
JSONSax parser(state, v);
|
||||
bool res = json::sax_parse(s_, &parser);
|
||||
|
|
|
@ -8,6 +8,6 @@ namespace nix {
|
|||
|
||||
MakeError(JSONParseError, EvalError);
|
||||
|
||||
void parseJSON(EvalState & state, const string & s, Value & v);
|
||||
void parseJSON(EvalState & state, const std::string_view & s, Value & v);
|
||||
|
||||
}
|
||||
|
|
|
@ -64,28 +64,32 @@ static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
|
|||
}
|
||||
|
||||
|
||||
static Expr * unescapeStr(SymbolTable & symbols, const char * s, size_t length)
|
||||
// we make use of the fact that the parser receives a private copy of the input
|
||||
// string and can munge around in it.
|
||||
static StringToken unescapeStr(SymbolTable & symbols, char * s, size_t length)
|
||||
{
|
||||
string t;
|
||||
t.reserve(length);
|
||||
char * result = s;
|
||||
char * t = s;
|
||||
char c;
|
||||
// the input string is terminated with *two* NULs, so we can safely take
|
||||
// *one* character after the one being checked against.
|
||||
while ((c = *s++)) {
|
||||
if (c == '\\') {
|
||||
assert(*s);
|
||||
c = *s++;
|
||||
if (c == 'n') t += '\n';
|
||||
else if (c == 'r') t += '\r';
|
||||
else if (c == 't') t += '\t';
|
||||
else t += c;
|
||||
if (c == 'n') *t = '\n';
|
||||
else if (c == 'r') *t = '\r';
|
||||
else if (c == 't') *t = '\t';
|
||||
else *t = c;
|
||||
}
|
||||
else if (c == '\r') {
|
||||
/* Normalise CR and CR/LF into LF. */
|
||||
t += '\n';
|
||||
*t = '\n';
|
||||
if (*s == '\n') s++; /* cr/lf */
|
||||
}
|
||||
else t += c;
|
||||
else *t = c;
|
||||
t++;
|
||||
}
|
||||
return new ExprString(symbols.create(t));
|
||||
return {result, size_t(t - result)};
|
||||
}
|
||||
|
||||
|
||||
|
@ -138,7 +142,7 @@ or { return OR_KW; }
|
|||
\/\/ { return UPDATE; }
|
||||
\+\+ { return CONCAT; }
|
||||
|
||||
{ID} { yylval->id = strdup(yytext); return ID; }
|
||||
{ID} { yylval->id = {yytext, (size_t) yyleng}; return ID; }
|
||||
{INT} { errno = 0;
|
||||
try {
|
||||
yylval->n = boost::lexical_cast<int64_t>(yytext);
|
||||
|
@ -172,7 +176,7 @@ or { return OR_KW; }
|
|||
/* It is impossible to match strings ending with '$' with one
|
||||
regex because trailing contexts are only valid at the end
|
||||
of a rule. (A sane but undocumented limitation.) */
|
||||
yylval->e = unescapeStr(data->symbols, yytext, yyleng);
|
||||
yylval->str = unescapeStr(data->symbols, yytext, yyleng);
|
||||
return STR;
|
||||
}
|
||||
<STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
||||
|
@ -187,26 +191,26 @@ or { return OR_KW; }
|
|||
|
||||
\'\'(\ *\n)? { PUSH_STATE(IND_STRING); return IND_STRING_OPEN; }
|
||||
<IND_STRING>([^\$\']|\$[^\{\']|\'[^\'\$])+ {
|
||||
yylval->e = new ExprIndStr(yytext);
|
||||
yylval->str = {yytext, (size_t) yyleng, true};
|
||||
return IND_STR;
|
||||
}
|
||||
<IND_STRING>\'\'\$ |
|
||||
<IND_STRING>\$ {
|
||||
yylval->e = new ExprIndStr("$");
|
||||
yylval->str = {"$", 1};
|
||||
return IND_STR;
|
||||
}
|
||||
<IND_STRING>\'\'\' {
|
||||
yylval->e = new ExprIndStr("''");
|
||||
yylval->str = {"''", 2};
|
||||
return IND_STR;
|
||||
}
|
||||
<IND_STRING>\'\'\\{ANY} {
|
||||
yylval->e = unescapeStr(data->symbols, yytext + 2, yyleng - 2);
|
||||
yylval->str = unescapeStr(data->symbols, yytext + 2, yyleng - 2);
|
||||
return IND_STR;
|
||||
}
|
||||
<IND_STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
||||
<IND_STRING>\'\' { POP_STATE(); return IND_STRING_CLOSE; }
|
||||
<IND_STRING>\' {
|
||||
yylval->e = new ExprIndStr("'");
|
||||
yylval->str = {"'", 1};
|
||||
return IND_STR;
|
||||
}
|
||||
|
||||
|
@ -220,14 +224,14 @@ or { return OR_KW; }
|
|||
<PATH_START>{PATH_SEG} {
|
||||
POP_STATE();
|
||||
PUSH_STATE(INPATH_SLASH);
|
||||
yylval->path = strdup(yytext);
|
||||
yylval->path = {yytext, (size_t) yyleng};
|
||||
return PATH;
|
||||
}
|
||||
|
||||
<PATH_START>{HPATH_START} {
|
||||
POP_STATE();
|
||||
PUSH_STATE(INPATH_SLASH);
|
||||
yylval->path = strdup(yytext);
|
||||
yylval->path = {yytext, (size_t) yyleng};
|
||||
return HPATH;
|
||||
}
|
||||
|
||||
|
@ -236,7 +240,7 @@ or { return OR_KW; }
|
|||
PUSH_STATE(INPATH_SLASH);
|
||||
else
|
||||
PUSH_STATE(INPATH);
|
||||
yylval->path = strdup(yytext);
|
||||
yylval->path = {yytext, (size_t) yyleng};
|
||||
return PATH;
|
||||
}
|
||||
{HPATH} {
|
||||
|
@ -244,7 +248,7 @@ or { return OR_KW; }
|
|||
PUSH_STATE(INPATH_SLASH);
|
||||
else
|
||||
PUSH_STATE(INPATH);
|
||||
yylval->path = strdup(yytext);
|
||||
yylval->path = {yytext, (size_t) yyleng};
|
||||
return HPATH;
|
||||
}
|
||||
|
||||
|
@ -260,7 +264,7 @@ or { return OR_KW; }
|
|||
PUSH_STATE(INPATH_SLASH);
|
||||
else
|
||||
PUSH_STATE(INPATH);
|
||||
yylval->e = new ExprString(data->symbols.create(string(yytext)));
|
||||
yylval->str = {yytext, (size_t) yyleng};
|
||||
return STR;
|
||||
}
|
||||
<INPATH>{ANY} |
|
||||
|
@ -279,8 +283,8 @@ or { return OR_KW; }
|
|||
throw ParseError("path has a trailing slash");
|
||||
}
|
||||
|
||||
{SPATH} { yylval->path = strdup(yytext); return SPATH; }
|
||||
{URI} { yylval->uri = strdup(yytext); return URI; }
|
||||
{SPATH} { yylval->path = {yytext, (size_t) yyleng}; return SPATH; }
|
||||
{URI} { yylval->uri = {yytext, (size_t) yyleng}; return URI; }
|
||||
|
||||
[ \t\r\n]+ /* eat up whitespace */
|
||||
\#[^\r\n]* /* single-line comments */
|
||||
|
|
|
@ -16,10 +16,10 @@ std::ostream & operator << (std::ostream & str, const Expr & e)
|
|||
return str;
|
||||
}
|
||||
|
||||
static void showString(std::ostream & str, const string & s)
|
||||
static void showString(std::ostream & str, std::string_view s)
|
||||
{
|
||||
str << '"';
|
||||
for (auto c : (string) s)
|
||||
for (auto c : s)
|
||||
if (c == '"' || c == '\\' || c == '$') str << "\\" << c;
|
||||
else if (c == '\n') str << "\\n";
|
||||
else if (c == '\r') str << "\\r";
|
||||
|
@ -28,7 +28,7 @@ static void showString(std::ostream & str, const string & s)
|
|||
str << '"';
|
||||
}
|
||||
|
||||
static void showId(std::ostream & str, const string & s)
|
||||
static void showId(std::ostream & str, std::string_view s)
|
||||
{
|
||||
if (s.empty())
|
||||
str << "\"\"";
|
||||
|
@ -103,11 +103,18 @@ void ExprAttrs::show(std::ostream & str) const
|
|||
{
|
||||
if (recursive) str << "rec ";
|
||||
str << "{ ";
|
||||
for (auto & i : attrs)
|
||||
if (i.second.inherited)
|
||||
str << "inherit " << i.first << " " << "; ";
|
||||
typedef const decltype(attrs)::value_type * Attr;
|
||||
std::vector<Attr> sorted;
|
||||
for (auto & i : attrs) sorted.push_back(&i);
|
||||
std::sort(sorted.begin(), sorted.end(), [](Attr a, Attr b) {
|
||||
return (const std::string &) a->first < (const std::string &) b->first;
|
||||
});
|
||||
for (auto & i : sorted) {
|
||||
if (i->second.inherited)
|
||||
str << "inherit " << i->first << " " << "; ";
|
||||
else
|
||||
str << i.first << " = " << *i.second.e << "; ";
|
||||
str << i->first << " = " << *i->second.e << "; ";
|
||||
}
|
||||
for (auto & i : dynamicAttrs)
|
||||
str << "\"${" << *i.nameExpr << "}\" = " << *i.valueExpr << "; ";
|
||||
str << "}";
|
||||
|
@ -124,7 +131,7 @@ void ExprList::show(std::ostream & str) const
|
|||
void ExprLambda::show(std::ostream & str) const
|
||||
{
|
||||
str << "(";
|
||||
if (matchAttrs) {
|
||||
if (hasFormals()) {
|
||||
str << "{ ";
|
||||
bool first = true;
|
||||
for (auto & i : formals->formals) {
|
||||
|
@ -143,6 +150,16 @@ void ExprLambda::show(std::ostream & str) const
|
|||
str << ": " << *body << ")";
|
||||
}
|
||||
|
||||
void ExprCall::show(std::ostream & str) const
|
||||
{
|
||||
str << '(' << *fun;
|
||||
for (auto e : args) {
|
||||
str << ' ';
|
||||
str << *e;
|
||||
}
|
||||
str << ')';
|
||||
}
|
||||
|
||||
void ExprLet::show(std::ostream & str) const
|
||||
{
|
||||
str << "(let ";
|
||||
|
@ -181,7 +198,7 @@ void ExprConcatStrings::show(std::ostream & str) const
|
|||
str << "(";
|
||||
for (auto & i : *es) {
|
||||
if (first) first = false; else str << " + ";
|
||||
str << *i;
|
||||
str << *i.second;
|
||||
}
|
||||
str << ")";
|
||||
}
|
||||
|
@ -201,7 +218,7 @@ std::ostream & operator << (std::ostream & str, const Pos & pos)
|
|||
auto f = format(ANSI_BOLD "%1%" ANSI_NORMAL ":%2%:%3%");
|
||||
switch (pos.origin) {
|
||||
case foFile:
|
||||
f % (string) pos.file;
|
||||
f % (const std::string &) pos.file;
|
||||
break;
|
||||
case foStdin:
|
||||
case foString:
|
||||
|
@ -217,7 +234,7 @@ std::ostream & operator << (std::ostream & str, const Pos & pos)
|
|||
}
|
||||
|
||||
|
||||
string showAttrPath(const AttrPath & attrPath)
|
||||
std::string showAttrPath(const AttrPath & attrPath)
|
||||
{
|
||||
std::ostringstream out;
|
||||
bool first = true;
|
||||
|
@ -263,13 +280,13 @@ void ExprVar::bindVars(const StaticEnv & env)
|
|||
/* Check whether the variable appears in the environment. If so,
|
||||
set its level and displacement. */
|
||||
const StaticEnv * curEnv;
|
||||
unsigned int level;
|
||||
Level level;
|
||||
int withLevel = -1;
|
||||
for (curEnv = &env, level = 0; curEnv; curEnv = curEnv->up, level++) {
|
||||
if (curEnv->isWith) {
|
||||
if (withLevel == -1) withLevel = level;
|
||||
} else {
|
||||
StaticEnv::Vars::const_iterator i = curEnv->vars.find(name);
|
||||
auto i = curEnv->find(name);
|
||||
if (i != curEnv->vars.end()) {
|
||||
fromWith = false;
|
||||
this->level = level;
|
||||
|
@ -311,14 +328,16 @@ void ExprOpHasAttr::bindVars(const StaticEnv & env)
|
|||
void ExprAttrs::bindVars(const StaticEnv & env)
|
||||
{
|
||||
const StaticEnv * dynamicEnv = &env;
|
||||
StaticEnv newEnv(false, &env);
|
||||
StaticEnv newEnv(false, &env, recursive ? attrs.size() : 0);
|
||||
|
||||
if (recursive) {
|
||||
dynamicEnv = &newEnv;
|
||||
|
||||
unsigned int displ = 0;
|
||||
Displacement displ = 0;
|
||||
for (auto & i : attrs)
|
||||
newEnv.vars[i.first] = i.second.displ = displ++;
|
||||
newEnv.vars.emplace_back(i.first, i.second.displ = displ++);
|
||||
|
||||
// No need to sort newEnv since attrs is in sorted order.
|
||||
|
||||
for (auto & i : attrs)
|
||||
i.second.e->bindVars(i.second.inherited ? env : newEnv);
|
||||
|
@ -342,15 +361,20 @@ void ExprList::bindVars(const StaticEnv & env)
|
|||
|
||||
void ExprLambda::bindVars(const StaticEnv & env)
|
||||
{
|
||||
StaticEnv newEnv(false, &env);
|
||||
StaticEnv newEnv(
|
||||
false, &env,
|
||||
(hasFormals() ? formals->formals.size() : 0) +
|
||||
(arg.empty() ? 0 : 1));
|
||||
|
||||
unsigned int displ = 0;
|
||||
Displacement displ = 0;
|
||||
|
||||
if (!arg.empty()) newEnv.vars[arg] = displ++;
|
||||
if (!arg.empty()) newEnv.vars.emplace_back(arg, displ++);
|
||||
|
||||
if (matchAttrs) {
|
||||
if (hasFormals()) {
|
||||
for (auto & i : formals->formals)
|
||||
newEnv.vars[i.name] = displ++;
|
||||
newEnv.vars.emplace_back(i.name, displ++);
|
||||
|
||||
newEnv.sort();
|
||||
|
||||
for (auto & i : formals->formals)
|
||||
if (i.def) i.def->bindVars(newEnv);
|
||||
|
@ -359,13 +383,22 @@ void ExprLambda::bindVars(const StaticEnv & env)
|
|||
body->bindVars(newEnv);
|
||||
}
|
||||
|
||||
void ExprCall::bindVars(const StaticEnv & env)
|
||||
{
|
||||
fun->bindVars(env);
|
||||
for (auto e : args)
|
||||
e->bindVars(env);
|
||||
}
|
||||
|
||||
void ExprLet::bindVars(const StaticEnv & env)
|
||||
{
|
||||
StaticEnv newEnv(false, &env);
|
||||
StaticEnv newEnv(false, &env, attrs->attrs.size());
|
||||
|
||||
unsigned int displ = 0;
|
||||
Displacement displ = 0;
|
||||
for (auto & i : attrs->attrs)
|
||||
newEnv.vars[i.first] = i.second.displ = displ++;
|
||||
newEnv.vars.emplace_back(i.first, i.second.displ = displ++);
|
||||
|
||||
// No need to sort newEnv since attrs->attrs is in sorted order.
|
||||
|
||||
for (auto & i : attrs->attrs)
|
||||
i.second.e->bindVars(i.second.inherited ? env : newEnv);
|
||||
|
@ -379,7 +412,7 @@ void ExprWith::bindVars(const StaticEnv & env)
|
|||
level so that `lookupVar' can look up variables in the previous
|
||||
`with' if this one doesn't contain the desired attribute. */
|
||||
const StaticEnv * curEnv;
|
||||
unsigned int level;
|
||||
Level level;
|
||||
prevWith = 0;
|
||||
for (curEnv = &env, level = 1; curEnv; curEnv = curEnv->up, level++)
|
||||
if (curEnv->isWith) {
|
||||
|
@ -413,7 +446,7 @@ void ExprOpNot::bindVars(const StaticEnv & env)
|
|||
void ExprConcatStrings::bindVars(const StaticEnv & env)
|
||||
{
|
||||
for (auto & i : *es)
|
||||
i->bindVars(env);
|
||||
i.second->bindVars(env);
|
||||
}
|
||||
|
||||
void ExprPos::bindVars(const StaticEnv & env)
|
||||
|
@ -435,9 +468,9 @@ void ExprLambda::setName(Symbol & name)
|
|||
}
|
||||
|
||||
|
||||
string ExprLambda::showNamePos() const
|
||||
std::string ExprLambda::showNamePos() const
|
||||
{
|
||||
return (format("%1% at %2%") % (name.set() ? "'" + (string) name + "'" : "anonymous function") % pos).str();
|
||||
return fmt("%1% at %2%", name.set() ? "'" + (std::string) name + "'" : "anonymous function", pos);
|
||||
}
|
||||
|
||||
|
||||
|
@ -447,10 +480,9 @@ string ExprLambda::showNamePos() const
|
|||
size_t SymbolTable::totalSize() const
|
||||
{
|
||||
size_t n = 0;
|
||||
for (auto & i : symbols)
|
||||
for (auto & i : store)
|
||||
n += i.size();
|
||||
return n;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -4,8 +4,6 @@
|
|||
#include "symbol-table.hh"
|
||||
#include "error.hh"
|
||||
|
||||
#include <map>
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -28,18 +26,21 @@ struct Pos
|
|||
FileOrigin origin;
|
||||
Symbol file;
|
||||
unsigned int line, column;
|
||||
Pos() : origin(foString), line(0), column(0) { };
|
||||
|
||||
Pos() : origin(foString), line(0), column(0) { }
|
||||
Pos(FileOrigin origin, const Symbol & file, unsigned int line, unsigned int column)
|
||||
: origin(origin), file(file), line(line), column(column) { };
|
||||
: origin(origin), file(file), line(line), column(column) { }
|
||||
|
||||
operator bool() const
|
||||
{
|
||||
return line != 0;
|
||||
}
|
||||
|
||||
bool operator < (const Pos & p2) const
|
||||
{
|
||||
if (!line) return p2.line;
|
||||
if (!p2.line) return false;
|
||||
int d = ((string) file).compare((string) p2.file);
|
||||
int d = ((const std::string &) file).compare((const std::string &) p2.file);
|
||||
if (d < 0) return true;
|
||||
if (d > 0) return false;
|
||||
if (line < p2.line) return true;
|
||||
|
@ -70,7 +71,7 @@ struct AttrName
|
|||
|
||||
typedef std::vector<AttrName> AttrPath;
|
||||
|
||||
string showAttrPath(const AttrPath & attrPath);
|
||||
std::string showAttrPath(const AttrPath & attrPath);
|
||||
|
||||
|
||||
/* Abstract syntax of Nix expressions. */
|
||||
|
@ -96,7 +97,7 @@ struct ExprInt : Expr
|
|||
{
|
||||
NixInt n;
|
||||
Value v;
|
||||
ExprInt(NixInt n) : n(n) { mkInt(v, n); };
|
||||
ExprInt(NixInt n) : n(n) { v.mkInt(n); };
|
||||
COMMON_METHODS
|
||||
Value * maybeThunk(EvalState & state, Env & env);
|
||||
};
|
||||
|
@ -105,36 +106,32 @@ struct ExprFloat : Expr
|
|||
{
|
||||
NixFloat nf;
|
||||
Value v;
|
||||
ExprFloat(NixFloat nf) : nf(nf) { mkFloat(v, nf); };
|
||||
ExprFloat(NixFloat nf) : nf(nf) { v.mkFloat(nf); };
|
||||
COMMON_METHODS
|
||||
Value * maybeThunk(EvalState & state, Env & env);
|
||||
};
|
||||
|
||||
struct ExprString : Expr
|
||||
{
|
||||
Symbol s;
|
||||
std::string s;
|
||||
Value v;
|
||||
ExprString(const Symbol & s) : s(s) { mkString(v, s); };
|
||||
ExprString(std::string s) : s(std::move(s)) { v.mkString(this->s.data()); };
|
||||
COMMON_METHODS
|
||||
Value * maybeThunk(EvalState & state, Env & env);
|
||||
};
|
||||
|
||||
/* Temporary class used during parsing of indented strings. */
|
||||
struct ExprIndStr : Expr
|
||||
{
|
||||
string s;
|
||||
ExprIndStr(const string & s) : s(s) { };
|
||||
};
|
||||
|
||||
struct ExprPath : Expr
|
||||
{
|
||||
string s;
|
||||
std::string s;
|
||||
Value v;
|
||||
ExprPath(const string & s) : s(s) { v.mkPath(this->s.c_str()); };
|
||||
ExprPath(std::string s) : s(std::move(s)) { v.mkPath(this->s.c_str()); };
|
||||
COMMON_METHODS
|
||||
Value * maybeThunk(EvalState & state, Env & env);
|
||||
};
|
||||
|
||||
typedef uint32_t Level;
|
||||
typedef uint32_t Displacement;
|
||||
|
||||
struct ExprVar : Expr
|
||||
{
|
||||
Pos pos;
|
||||
|
@ -150,8 +147,8 @@ struct ExprVar : Expr
|
|||
value is obtained by getting the attribute named `name' from
|
||||
the set stored in the environment that is `level' levels up
|
||||
from the current one.*/
|
||||
unsigned int level;
|
||||
unsigned int displ;
|
||||
Level level;
|
||||
Displacement displ;
|
||||
|
||||
ExprVar(const Symbol & name) : name(name) { };
|
||||
ExprVar(const Pos & pos, const Symbol & name) : pos(pos), name(name) { };
|
||||
|
@ -185,7 +182,7 @@ struct ExprAttrs : Expr
|
|||
bool inherited;
|
||||
Expr * e;
|
||||
Pos pos;
|
||||
unsigned int displ; // displacement
|
||||
Displacement displ; // displacement
|
||||
AttrDef(Expr * e, const Pos & pos, bool inherited=false)
|
||||
: inherited(inherited), e(e), pos(pos) { };
|
||||
AttrDef() { };
|
||||
|
@ -222,10 +219,25 @@ struct Formal
|
|||
|
||||
struct Formals
|
||||
{
|
||||
typedef std::list<Formal> Formals_;
|
||||
typedef std::vector<Formal> Formals_;
|
||||
Formals_ formals;
|
||||
std::set<Symbol> argNames; // used during parsing
|
||||
bool ellipsis;
|
||||
|
||||
bool has(Symbol arg) const {
|
||||
auto it = std::lower_bound(formals.begin(), formals.end(), arg,
|
||||
[] (const Formal & f, const Symbol & sym) { return f.name < sym; });
|
||||
return it != formals.end() && it->name == arg;
|
||||
}
|
||||
|
||||
std::vector<Formal> lexicographicOrder() const
|
||||
{
|
||||
std::vector<Formal> result(formals.begin(), formals.end());
|
||||
std::sort(result.begin(), result.end(),
|
||||
[] (const Formal & a, const Formal & b) {
|
||||
return std::string_view(a.name) < std::string_view(b.name);
|
||||
});
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
struct ExprLambda : Expr
|
||||
|
@ -233,20 +245,26 @@ struct ExprLambda : Expr
|
|||
Pos pos;
|
||||
Symbol name;
|
||||
Symbol arg;
|
||||
bool matchAttrs;
|
||||
Formals * formals;
|
||||
Expr * body;
|
||||
ExprLambda(const Pos & pos, const Symbol & arg, bool matchAttrs, Formals * formals, Expr * body)
|
||||
: pos(pos), arg(arg), matchAttrs(matchAttrs), formals(formals), body(body)
|
||||
ExprLambda(const Pos & pos, const Symbol & arg, Formals * formals, Expr * body)
|
||||
: pos(pos), arg(arg), formals(formals), body(body)
|
||||
{
|
||||
if (!arg.empty() && formals && formals->argNames.find(arg) != formals->argNames.end())
|
||||
throw ParseError({
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'", arg),
|
||||
.errPos = pos
|
||||
});
|
||||
};
|
||||
void setName(Symbol & name);
|
||||
string showNamePos() const;
|
||||
std::string showNamePos() const;
|
||||
inline bool hasFormals() const { return formals != nullptr; }
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
struct ExprCall : Expr
|
||||
{
|
||||
Expr * fun;
|
||||
std::vector<Expr *> args;
|
||||
Pos pos;
|
||||
ExprCall(const Pos & pos, Expr * fun, std::vector<Expr *> && args)
|
||||
: fun(fun), args(args), pos(pos)
|
||||
{ }
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
|
@ -308,7 +326,6 @@ struct ExprOpNot : Expr
|
|||
void eval(EvalState & state, Env & env, Value & v); \
|
||||
};
|
||||
|
||||
MakeBinOp(ExprApp, "")
|
||||
MakeBinOp(ExprOpEq, "==")
|
||||
MakeBinOp(ExprOpNEq, "!=")
|
||||
MakeBinOp(ExprOpAnd, "&&")
|
||||
|
@ -321,8 +338,8 @@ struct ExprConcatStrings : Expr
|
|||
{
|
||||
Pos pos;
|
||||
bool forceString;
|
||||
vector<Expr *> * es;
|
||||
ExprConcatStrings(const Pos & pos, bool forceString, vector<Expr *> * es)
|
||||
std::vector<std::pair<Pos, Expr *> > * es;
|
||||
ExprConcatStrings(const Pos & pos, bool forceString, std::vector<std::pair<Pos, Expr *> > * es)
|
||||
: pos(pos), forceString(forceString), es(es) { };
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
@ -342,9 +359,39 @@ struct StaticEnv
|
|||
{
|
||||
bool isWith;
|
||||
const StaticEnv * up;
|
||||
typedef std::map<Symbol, unsigned int> Vars;
|
||||
|
||||
// Note: these must be in sorted order.
|
||||
typedef std::vector<std::pair<Symbol, Displacement>> Vars;
|
||||
Vars vars;
|
||||
StaticEnv(bool isWith, const StaticEnv * up) : isWith(isWith), up(up) { };
|
||||
|
||||
StaticEnv(bool isWith, const StaticEnv * up, size_t expectedSize = 0) : isWith(isWith), up(up) {
|
||||
vars.reserve(expectedSize);
|
||||
};
|
||||
|
||||
void sort()
|
||||
{
|
||||
std::stable_sort(vars.begin(), vars.end(),
|
||||
[](const Vars::value_type & a, const Vars::value_type & b) { return a.first < b.first; });
|
||||
}
|
||||
|
||||
void deduplicate()
|
||||
{
|
||||
auto it = vars.begin(), jt = it, end = vars.end();
|
||||
while (jt != end) {
|
||||
*it = *jt++;
|
||||
while (jt != end && it->first == jt->first) *it = *jt++;
|
||||
it++;
|
||||
}
|
||||
vars.erase(it, end);
|
||||
}
|
||||
|
||||
Vars::const_iterator find(const Symbol & name) const
|
||||
{
|
||||
Vars::value_type key(name, 0);
|
||||
auto i = std::lower_bound(vars.begin(), vars.end(), key);
|
||||
if (i != vars.end() && i->first == name) return i;
|
||||
return vars.end();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
#ifndef BISON_HEADER
|
||||
#define BISON_HEADER
|
||||
|
||||
#include <variant>
|
||||
|
||||
#include "util.hh"
|
||||
|
||||
#include "nixexpr.hh"
|
||||
|
@ -33,16 +35,28 @@ namespace nix {
|
|||
Symbol file;
|
||||
FileOrigin origin;
|
||||
std::optional<ErrorInfo> error;
|
||||
Symbol sLetBody;
|
||||
ParseData(EvalState & state)
|
||||
: state(state)
|
||||
, symbols(state.symbols)
|
||||
, sLetBody(symbols.create("<let-body>"))
|
||||
{ };
|
||||
};
|
||||
|
||||
struct ParserFormals {
|
||||
std::vector<Formal> formals;
|
||||
bool ellipsis = false;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
// using C a struct allows us to avoid having to define the special
|
||||
// members that using string_view here would implicitly delete.
|
||||
struct StringToken {
|
||||
const char * p;
|
||||
size_t l;
|
||||
bool hasIndentation;
|
||||
operator std::string_view() const { return {p, l}; }
|
||||
};
|
||||
|
||||
#define YY_DECL int yylex \
|
||||
(YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParseData * data)
|
||||
|
||||
|
@ -126,14 +140,14 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath,
|
|||
auto j2 = jAttrs->attrs.find(ad.first);
|
||||
if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error.
|
||||
dupAttr(ad.first, j2->second.pos, ad.second.pos);
|
||||
jAttrs->attrs[ad.first] = ad.second;
|
||||
jAttrs->attrs.emplace(ad.first, ad.second);
|
||||
}
|
||||
} else {
|
||||
dupAttr(attrPath, pos, j->second.pos);
|
||||
}
|
||||
} else {
|
||||
// This attr path is not defined. Let's create it.
|
||||
attrs->attrs[i->symbol] = ExprAttrs::AttrDef(e, pos);
|
||||
attrs->attrs.emplace(i->symbol, ExprAttrs::AttrDef(e, pos));
|
||||
e->setName(i->symbol);
|
||||
}
|
||||
} else {
|
||||
|
@ -142,21 +156,46 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath,
|
|||
}
|
||||
|
||||
|
||||
static void addFormal(const Pos & pos, Formals * formals, const Formal & formal)
|
||||
static Formals * toFormals(ParseData & data, ParserFormals * formals,
|
||||
Pos pos = noPos, Symbol arg = {})
|
||||
{
|
||||
if (!formals->argNames.insert(formal.name).second)
|
||||
std::sort(formals->formals.begin(), formals->formals.end(),
|
||||
[] (const auto & a, const auto & b) {
|
||||
return std::tie(a.name, a.pos) < std::tie(b.name, b.pos);
|
||||
});
|
||||
|
||||
std::optional<std::pair<Symbol, Pos>> duplicate;
|
||||
for (size_t i = 0; i + 1 < formals->formals.size(); i++) {
|
||||
if (formals->formals[i].name != formals->formals[i + 1].name)
|
||||
continue;
|
||||
std::pair thisDup{formals->formals[i].name, formals->formals[i + 1].pos};
|
||||
duplicate = std::min(thisDup, duplicate.value_or(thisDup));
|
||||
}
|
||||
if (duplicate)
|
||||
throw ParseError({
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'",
|
||||
formal.name),
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'", duplicate->first),
|
||||
.errPos = duplicate->second
|
||||
});
|
||||
|
||||
Formals result;
|
||||
result.ellipsis = formals->ellipsis;
|
||||
result.formals = std::move(formals->formals);
|
||||
|
||||
if (arg.set() && result.has(arg))
|
||||
throw ParseError({
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'", arg),
|
||||
.errPos = pos
|
||||
});
|
||||
formals->formals.push_front(formal);
|
||||
|
||||
delete formals;
|
||||
return new Formals(std::move(result));
|
||||
}
|
||||
|
||||
|
||||
static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<Expr *> & es)
|
||||
static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols,
|
||||
std::vector<std::pair<Pos, std::variant<Expr *, StringToken> > > & es)
|
||||
{
|
||||
if (es.empty()) return new ExprString(symbols.create(""));
|
||||
if (es.empty()) return new ExprString("");
|
||||
|
||||
/* Figure out the minimum indentation. Note that by design
|
||||
whitespace-only final lines are not taken into account. (So
|
||||
|
@ -164,21 +203,21 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<Ex
|
|||
bool atStartOfLine = true; /* = seen only whitespace in the current line */
|
||||
size_t minIndent = 1000000;
|
||||
size_t curIndent = 0;
|
||||
for (auto & i : es) {
|
||||
ExprIndStr * e = dynamic_cast<ExprIndStr *>(i);
|
||||
if (!e) {
|
||||
/* Anti-quotations end the current start-of-line whitespace. */
|
||||
for (auto & [i_pos, i] : es) {
|
||||
auto * str = std::get_if<StringToken>(&i);
|
||||
if (!str || !str->hasIndentation) {
|
||||
/* Anti-quotations and escaped characters end the current start-of-line whitespace. */
|
||||
if (atStartOfLine) {
|
||||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
for (size_t j = 0; j < e->s.size(); ++j) {
|
||||
for (size_t j = 0; j < str->l; ++j) {
|
||||
if (atStartOfLine) {
|
||||
if (e->s[j] == ' ')
|
||||
if (str->p[j] == ' ')
|
||||
curIndent++;
|
||||
else if (e->s[j] == '\n') {
|
||||
else if (str->p[j] == '\n') {
|
||||
/* Empty line, doesn't influence minimum
|
||||
indentation. */
|
||||
curIndent = 0;
|
||||
|
@ -186,7 +225,7 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<Ex
|
|||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
}
|
||||
} else if (e->s[j] == '\n') {
|
||||
} else if (str->p[j] == '\n') {
|
||||
atStartOfLine = true;
|
||||
curIndent = 0;
|
||||
}
|
||||
|
@ -194,53 +233,54 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<Ex
|
|||
}
|
||||
|
||||
/* Strip spaces from each line. */
|
||||
vector<Expr *> * es2 = new vector<Expr *>;
|
||||
std::vector<std::pair<Pos, Expr *> > * es2 = new std::vector<std::pair<Pos, Expr *> >;
|
||||
atStartOfLine = true;
|
||||
size_t curDropped = 0;
|
||||
size_t n = es.size();
|
||||
for (vector<Expr *>::iterator i = es.begin(); i != es.end(); ++i, --n) {
|
||||
ExprIndStr * e = dynamic_cast<ExprIndStr *>(*i);
|
||||
if (!e) {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
es2->push_back(*i);
|
||||
continue;
|
||||
}
|
||||
|
||||
string s2;
|
||||
for (size_t j = 0; j < e->s.size(); ++j) {
|
||||
auto i = es.begin();
|
||||
const auto trimExpr = [&] (Expr * e) {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
es2->emplace_back(i->first, e);
|
||||
};
|
||||
const auto trimString = [&] (const StringToken & t) {
|
||||
std::string s2;
|
||||
for (size_t j = 0; j < t.l; ++j) {
|
||||
if (atStartOfLine) {
|
||||
if (e->s[j] == ' ') {
|
||||
if (t.p[j] == ' ') {
|
||||
if (curDropped++ >= minIndent)
|
||||
s2 += e->s[j];
|
||||
s2 += t.p[j];
|
||||
}
|
||||
else if (e->s[j] == '\n') {
|
||||
else if (t.p[j] == '\n') {
|
||||
curDropped = 0;
|
||||
s2 += e->s[j];
|
||||
s2 += t.p[j];
|
||||
} else {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
s2 += e->s[j];
|
||||
s2 += t.p[j];
|
||||
}
|
||||
} else {
|
||||
s2 += e->s[j];
|
||||
if (e->s[j] == '\n') atStartOfLine = true;
|
||||
s2 += t.p[j];
|
||||
if (t.p[j] == '\n') atStartOfLine = true;
|
||||
}
|
||||
}
|
||||
|
||||
/* Remove the last line if it is empty and consists only of
|
||||
spaces. */
|
||||
if (n == 1) {
|
||||
string::size_type p = s2.find_last_of('\n');
|
||||
if (p != string::npos && s2.find_first_not_of(' ', p + 1) == string::npos)
|
||||
s2 = string(s2, 0, p + 1);
|
||||
std::string::size_type p = s2.find_last_of('\n');
|
||||
if (p != std::string::npos && s2.find_first_not_of(' ', p + 1) == std::string::npos)
|
||||
s2 = std::string(s2, 0, p + 1);
|
||||
}
|
||||
|
||||
es2->push_back(new ExprString(symbols.create(s2)));
|
||||
es2->emplace_back(i->first, new ExprString(s2));
|
||||
};
|
||||
for (; i != es.end(); ++i, --n) {
|
||||
std::visit(overloaded { trimExpr, trimString }, i->second);
|
||||
}
|
||||
|
||||
/* If this is a single string, then don't do a concatenation. */
|
||||
return es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0]) ? (*es2)[0] : new ExprConcatStrings(pos, true, es2);
|
||||
return es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0].second) ? (*es2)[0].second : new ExprConcatStrings(pos, true, es2);
|
||||
}
|
||||
|
||||
|
||||
|
@ -271,29 +311,32 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
|
|||
nix::Expr * e;
|
||||
nix::ExprList * list;
|
||||
nix::ExprAttrs * attrs;
|
||||
nix::Formals * formals;
|
||||
nix::ParserFormals * formals;
|
||||
nix::Formal * formal;
|
||||
nix::NixInt n;
|
||||
nix::NixFloat nf;
|
||||
const char * id; // !!! -> Symbol
|
||||
char * path;
|
||||
char * uri;
|
||||
StringToken id; // !!! -> Symbol
|
||||
StringToken path;
|
||||
StringToken uri;
|
||||
StringToken str;
|
||||
std::vector<nix::AttrName> * attrNames;
|
||||
std::vector<nix::Expr *> * string_parts;
|
||||
std::vector<std::pair<nix::Pos, nix::Expr *> > * string_parts;
|
||||
std::vector<std::pair<nix::Pos, std::variant<nix::Expr *, StringToken> > > * ind_string_parts;
|
||||
}
|
||||
|
||||
%type <e> start expr expr_function expr_if expr_op
|
||||
%type <e> expr_app expr_select expr_simple
|
||||
%type <e> expr_select expr_simple expr_app
|
||||
%type <list> expr_list
|
||||
%type <attrs> binds
|
||||
%type <formals> formals
|
||||
%type <formal> formal
|
||||
%type <attrNames> attrs attrpath
|
||||
%type <string_parts> string_parts_interpolated ind_string_parts
|
||||
%type <string_parts> string_parts_interpolated
|
||||
%type <ind_string_parts> ind_string_parts
|
||||
%type <e> path_start string_parts string_attr
|
||||
%type <id> attr
|
||||
%token <id> ID ATTRPATH
|
||||
%token <e> STR IND_STR
|
||||
%token <str> STR IND_STR
|
||||
%token <n> INT
|
||||
%token <nf> FLOAT
|
||||
%token <path> PATH HPATH SPATH PATH_END
|
||||
|
@ -324,13 +367,19 @@ expr: expr_function;
|
|||
|
||||
expr_function
|
||||
: ID ':' expr_function
|
||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), false, 0, $3); }
|
||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); }
|
||||
| '{' formals '}' ':' expr_function
|
||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create(""), true, $2, $5); }
|
||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create(""), toFormals(*data, $2), $5); }
|
||||
| '{' formals '}' '@' ID ':' expr_function
|
||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($5), true, $2, $7); }
|
||||
{
|
||||
Symbol arg = data->symbols.create($5);
|
||||
$$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $2, CUR_POS, arg), $7);
|
||||
}
|
||||
| ID '@' '{' formals '}' ':' expr_function
|
||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), true, $4, $7); }
|
||||
{
|
||||
Symbol arg = data->symbols.create($1);
|
||||
$$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $4, CUR_POS, arg), $7);
|
||||
}
|
||||
| ASSERT expr ';' expr_function
|
||||
{ $$ = new ExprAssert(CUR_POS, $2, $4); }
|
||||
| WITH expr ';' expr_function
|
||||
|
@ -353,31 +402,36 @@ expr_if
|
|||
|
||||
expr_op
|
||||
: '!' expr_op %prec NOT { $$ = new ExprOpNot($2); }
|
||||
| '-' expr_op %prec NEGATE { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__sub")), new ExprInt(0)), $2); }
|
||||
| '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {new ExprInt(0), $2}); }
|
||||
| expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); }
|
||||
| expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); }
|
||||
| expr_op '<' expr_op { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__lessThan")), $1), $3); }
|
||||
| expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__lessThan")), $3), $1)); }
|
||||
| expr_op '>' expr_op { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__lessThan")), $3), $1); }
|
||||
| expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__lessThan")), $1), $3)); }
|
||||
| expr_op '<' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); }
|
||||
| expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); }
|
||||
| expr_op '>' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); }
|
||||
| expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); }
|
||||
| expr_op AND expr_op { $$ = new ExprOpAnd(CUR_POS, $1, $3); }
|
||||
| expr_op OR expr_op { $$ = new ExprOpOr(CUR_POS, $1, $3); }
|
||||
| expr_op IMPL expr_op { $$ = new ExprOpImpl(CUR_POS, $1, $3); }
|
||||
| expr_op UPDATE expr_op { $$ = new ExprOpUpdate(CUR_POS, $1, $3); }
|
||||
| expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, *$3); }
|
||||
| expr_op '+' expr_op
|
||||
{ $$ = new ExprConcatStrings(CUR_POS, false, new vector<Expr *>({$1, $3})); }
|
||||
| expr_op '-' expr_op { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__sub")), $1), $3); }
|
||||
| expr_op '*' expr_op { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__mul")), $1), $3); }
|
||||
| expr_op '/' expr_op { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__div")), $1), $3); }
|
||||
{ $$ = new ExprConcatStrings(CUR_POS, false, new std::vector<std::pair<Pos, Expr *> >({{makeCurPos(@1, data), $1}, {makeCurPos(@3, data), $3}})); }
|
||||
| expr_op '-' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {$1, $3}); }
|
||||
| expr_op '*' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__mul")), {$1, $3}); }
|
||||
| expr_op '/' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__div")), {$1, $3}); }
|
||||
| expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(CUR_POS, $1, $3); }
|
||||
| expr_app
|
||||
;
|
||||
|
||||
expr_app
|
||||
: expr_app expr_select
|
||||
{ $$ = new ExprApp(CUR_POS, $1, $2); }
|
||||
| expr_select { $$ = $1; }
|
||||
: expr_app expr_select {
|
||||
if (auto e2 = dynamic_cast<ExprCall *>($1)) {
|
||||
e2->args.push_back($2);
|
||||
$$ = $1;
|
||||
} else
|
||||
$$ = new ExprCall(CUR_POS, $1, {$2});
|
||||
}
|
||||
| expr_select
|
||||
;
|
||||
|
||||
expr_select
|
||||
|
@ -388,13 +442,14 @@ expr_select
|
|||
| /* Backwards compatibility: because Nixpkgs has a rarely used
|
||||
function named ‘or’, allow stuff like ‘map or [...]’. */
|
||||
expr_simple OR_KW
|
||||
{ $$ = new ExprApp(CUR_POS, $1, new ExprVar(CUR_POS, data->symbols.create("or"))); }
|
||||
{ $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, data->symbols.create("or"))}); }
|
||||
| expr_simple { $$ = $1; }
|
||||
;
|
||||
|
||||
expr_simple
|
||||
: ID {
|
||||
if (strcmp($1, "__curPos") == 0)
|
||||
std::string_view s = "__curPos";
|
||||
if ($1.l == s.size() && strncmp($1.p, s.data(), s.size()) == 0)
|
||||
$$ = new ExprPos(CUR_POS);
|
||||
else
|
||||
$$ = new ExprVar(CUR_POS, data->symbols.create($1));
|
||||
|
@ -407,24 +462,24 @@ expr_simple
|
|||
}
|
||||
| path_start PATH_END { $$ = $1; }
|
||||
| path_start string_parts_interpolated PATH_END {
|
||||
$2->insert($2->begin(), $1);
|
||||
$2->insert($2->begin(), {makeCurPos(@1, data), $1});
|
||||
$$ = new ExprConcatStrings(CUR_POS, false, $2);
|
||||
}
|
||||
| SPATH {
|
||||
string path($1 + 1, strlen($1) - 2);
|
||||
$$ = new ExprApp(CUR_POS,
|
||||
new ExprApp(new ExprVar(data->symbols.create("__findFile")),
|
||||
new ExprVar(data->symbols.create("__nixPath"))),
|
||||
new ExprString(data->symbols.create(path)));
|
||||
std::string path($1.p + 1, $1.l - 2);
|
||||
$$ = new ExprCall(CUR_POS,
|
||||
new ExprVar(data->symbols.create("__findFile")),
|
||||
{new ExprVar(data->symbols.create("__nixPath")),
|
||||
new ExprString(path)});
|
||||
}
|
||||
| URI {
|
||||
static bool noURLLiterals = settings.isExperimentalFeatureEnabled("no-url-literals");
|
||||
static bool noURLLiterals = settings.isExperimentalFeatureEnabled(Xp::NoUrlLiterals);
|
||||
if (noURLLiterals)
|
||||
throw ParseError({
|
||||
.msg = hintfmt("URL literals are disabled"),
|
||||
.errPos = CUR_POS
|
||||
});
|
||||
$$ = new ExprString(data->symbols.create($1));
|
||||
$$ = new ExprString(std::string($1));
|
||||
}
|
||||
| '(' expr ')' { $$ = $2; }
|
||||
/* Let expressions `let {..., body = ...}' are just desugared
|
||||
|
@ -439,40 +494,41 @@ expr_simple
|
|||
;
|
||||
|
||||
string_parts
|
||||
: STR
|
||||
: STR { $$ = new ExprString(std::string($1)); }
|
||||
| string_parts_interpolated { $$ = new ExprConcatStrings(CUR_POS, true, $1); }
|
||||
| { $$ = new ExprString(data->symbols.create("")); }
|
||||
| { $$ = new ExprString(""); }
|
||||
;
|
||||
|
||||
string_parts_interpolated
|
||||
: string_parts_interpolated STR { $$ = $1; $1->push_back($2); }
|
||||
| string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->push_back($3); }
|
||||
| DOLLAR_CURLY expr '}' { $$ = new vector<Expr *>; $$->push_back($2); }
|
||||
: string_parts_interpolated STR
|
||||
{ $$ = $1; $1->emplace_back(makeCurPos(@2, data), new ExprString(std::string($2))); }
|
||||
| string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
|
||||
| DOLLAR_CURLY expr '}' { $$ = new std::vector<std::pair<Pos, Expr *> >; $$->emplace_back(makeCurPos(@1, data), $2); }
|
||||
| STR DOLLAR_CURLY expr '}' {
|
||||
$$ = new vector<Expr *>;
|
||||
$$->push_back($1);
|
||||
$$->push_back($3);
|
||||
$$ = new std::vector<std::pair<Pos, Expr *> >;
|
||||
$$->emplace_back(makeCurPos(@1, data), new ExprString(std::string($1)));
|
||||
$$->emplace_back(makeCurPos(@2, data), $3);
|
||||
}
|
||||
;
|
||||
|
||||
path_start
|
||||
: PATH {
|
||||
Path path(absPath($1, data->basePath));
|
||||
Path path(absPath({$1.p, $1.l}, data->basePath));
|
||||
/* add back in the trailing '/' to the first segment */
|
||||
if ($1[strlen($1)-1] == '/' && strlen($1) > 1)
|
||||
if ($1.p[$1.l-1] == '/' && $1.l > 1)
|
||||
path += "/";
|
||||
$$ = new ExprPath(path);
|
||||
}
|
||||
| HPATH {
|
||||
Path path(getHome() + string($1 + 1));
|
||||
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
|
||||
$$ = new ExprPath(path);
|
||||
}
|
||||
;
|
||||
|
||||
ind_string_parts
|
||||
: ind_string_parts IND_STR { $$ = $1; $1->push_back($2); }
|
||||
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->push_back($3); }
|
||||
| { $$ = new vector<Expr *>; }
|
||||
: ind_string_parts IND_STR { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $2); }
|
||||
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
|
||||
| { $$ = new std::vector<std::pair<Pos, std::variant<Expr *, StringToken> > >; }
|
||||
;
|
||||
|
||||
binds
|
||||
|
@ -483,7 +539,7 @@ binds
|
|||
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
||||
dupAttr(i.symbol, makeCurPos(@3, data), $$->attrs[i.symbol].pos);
|
||||
Pos pos = makeCurPos(@3, data);
|
||||
$$->attrs[i.symbol] = ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true);
|
||||
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true));
|
||||
}
|
||||
}
|
||||
| binds INHERIT '(' expr ')' attrs ';'
|
||||
|
@ -492,7 +548,7 @@ binds
|
|||
for (auto & i : *$6) {
|
||||
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
||||
dupAttr(i.symbol, makeCurPos(@6, data), $$->attrs[i.symbol].pos);
|
||||
$$->attrs[i.symbol] = ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), makeCurPos(@6, data));
|
||||
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), makeCurPos(@6, data)));
|
||||
}
|
||||
}
|
||||
| { $$ = new ExprAttrs(makeCurPos(@0, data)); }
|
||||
|
@ -504,7 +560,7 @@ attrs
|
|||
{ $$ = $1;
|
||||
ExprString * str = dynamic_cast<ExprString *>($2);
|
||||
if (str) {
|
||||
$$->push_back(AttrName(str->s));
|
||||
$$->push_back(AttrName(data->symbols.create(str->s)));
|
||||
delete str;
|
||||
} else
|
||||
throw ParseError({
|
||||
|
@ -521,17 +577,17 @@ attrpath
|
|||
{ $$ = $1;
|
||||
ExprString * str = dynamic_cast<ExprString *>($3);
|
||||
if (str) {
|
||||
$$->push_back(AttrName(str->s));
|
||||
$$->push_back(AttrName(data->symbols.create(str->s)));
|
||||
delete str;
|
||||
} else
|
||||
$$->push_back(AttrName($3));
|
||||
}
|
||||
| attr { $$ = new vector<AttrName>; $$->push_back(AttrName(data->symbols.create($1))); }
|
||||
| attr { $$ = new std::vector<AttrName>; $$->push_back(AttrName(data->symbols.create($1))); }
|
||||
| string_attr
|
||||
{ $$ = new vector<AttrName>;
|
||||
{ $$ = new std::vector<AttrName>;
|
||||
ExprString *str = dynamic_cast<ExprString *>($1);
|
||||
if (str) {
|
||||
$$->push_back(AttrName(str->s));
|
||||
$$->push_back(AttrName(data->symbols.create(str->s)));
|
||||
delete str;
|
||||
} else
|
||||
$$->push_back(AttrName($1));
|
||||
|
@ -540,7 +596,7 @@ attrpath
|
|||
|
||||
attr
|
||||
: ID { $$ = $1; }
|
||||
| OR_KW { $$ = "or"; }
|
||||
| OR_KW { $$ = {"or", 2}; }
|
||||
;
|
||||
|
||||
string_attr
|
||||
|
@ -555,13 +611,13 @@ expr_list
|
|||
|
||||
formals
|
||||
: formal ',' formals
|
||||
{ $$ = $3; addFormal(CUR_POS, $$, *$1); }
|
||||
{ $$ = $3; $$->formals.push_back(*$1); }
|
||||
| formal
|
||||
{ $$ = new Formals; addFormal(CUR_POS, $$, *$1); $$->ellipsis = false; }
|
||||
{ $$ = new ParserFormals; $$->formals.push_back(*$1); $$->ellipsis = false; }
|
||||
|
|
||||
{ $$ = new Formals; $$->ellipsis = false; }
|
||||
{ $$ = new ParserFormals; $$->ellipsis = false; }
|
||||
| ELLIPSIS
|
||||
{ $$ = new Formals; $$->ellipsis = true; }
|
||||
{ $$ = new ParserFormals; $$->ellipsis = true; }
|
||||
;
|
||||
|
||||
formal
|
||||
|
@ -586,8 +642,8 @@ formal
|
|||
namespace nix {
|
||||
|
||||
|
||||
Expr * EvalState::parse(const char * text, FileOrigin origin,
|
||||
const Path & path, const Path & basePath, StaticEnv & staticEnv)
|
||||
Expr * EvalState::parse(char * text, size_t length, FileOrigin origin,
|
||||
const PathView path, const PathView basePath, StaticEnv & staticEnv)
|
||||
{
|
||||
yyscan_t scanner;
|
||||
ParseData data(*this);
|
||||
|
@ -606,7 +662,7 @@ Expr * EvalState::parse(const char * text, FileOrigin origin,
|
|||
data.basePath = basePath;
|
||||
|
||||
yylex_init(&scanner);
|
||||
yy_scan_string(text, scanner);
|
||||
yy_scan_buffer(text, length, scanner);
|
||||
int res = yyparse(scanner, &data);
|
||||
yylex_destroy(scanner);
|
||||
|
||||
|
@ -652,63 +708,70 @@ Expr * EvalState::parseExprFromFile(const Path & path)
|
|||
|
||||
Expr * EvalState::parseExprFromFile(const Path & path, StaticEnv & staticEnv)
|
||||
{
|
||||
return parse(readFile(path).c_str(), foFile, path, dirOf(path), staticEnv);
|
||||
auto buffer = readFile(path);
|
||||
// readFile should have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
return parse(buffer.data(), buffer.size(), foFile, path, dirOf(path), staticEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath, StaticEnv & staticEnv)
|
||||
Expr * EvalState::parseExprFromString(std::string s, const Path & basePath, StaticEnv & staticEnv)
|
||||
{
|
||||
return parse(s.data(), foString, "", basePath, staticEnv);
|
||||
s.append("\0\0", 2);
|
||||
return parse(s.data(), s.size(), foString, "", basePath, staticEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath)
|
||||
Expr * EvalState::parseExprFromString(std::string s, const Path & basePath)
|
||||
{
|
||||
return parseExprFromString(s, basePath, staticBaseEnv);
|
||||
return parseExprFromString(std::move(s), basePath, staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseStdin()
|
||||
{
|
||||
//Activity act(*logger, lvlTalkative, format("parsing standard input"));
|
||||
return parse(drainFD(0).data(), foStdin, "", absPath("."), staticBaseEnv);
|
||||
auto buffer = drainFD(0);
|
||||
// drainFD should have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
return parse(buffer.data(), buffer.size(), foStdin, "", absPath("."), staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
void EvalState::addToSearchPath(const string & s)
|
||||
void EvalState::addToSearchPath(const std::string & s)
|
||||
{
|
||||
size_t pos = s.find('=');
|
||||
string prefix;
|
||||
std::string prefix;
|
||||
Path path;
|
||||
if (pos == string::npos) {
|
||||
if (pos == std::string::npos) {
|
||||
path = s;
|
||||
} else {
|
||||
prefix = string(s, 0, pos);
|
||||
path = string(s, pos + 1);
|
||||
prefix = std::string(s, 0, pos);
|
||||
path = std::string(s, pos + 1);
|
||||
}
|
||||
|
||||
searchPath.emplace_back(prefix, path);
|
||||
}
|
||||
|
||||
|
||||
Path EvalState::findFile(const string & path)
|
||||
Path EvalState::findFile(const std::string_view path)
|
||||
{
|
||||
return findFile(searchPath, path);
|
||||
}
|
||||
|
||||
|
||||
Path EvalState::findFile(SearchPath & searchPath, const string & path, const Pos & pos)
|
||||
Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, const Pos & pos)
|
||||
{
|
||||
for (auto & i : searchPath) {
|
||||
std::string suffix;
|
||||
if (i.first.empty())
|
||||
suffix = "/" + path;
|
||||
suffix = concatStrings("/", path);
|
||||
else {
|
||||
auto s = i.first.size();
|
||||
if (path.compare(0, s, i.first) != 0 ||
|
||||
(path.size() > s && path[s] != '/'))
|
||||
continue;
|
||||
suffix = path.size() == s ? "" : "/" + string(path, s);
|
||||
suffix = path.size() == s ? "" : concatStrings("/", path.substr(s));
|
||||
}
|
||||
auto r = resolveSearchPathElem(i);
|
||||
if (!r.first) continue;
|
||||
|
@ -717,7 +780,7 @@ Path EvalState::findFile(SearchPath & searchPath, const string & path, const Pos
|
|||
}
|
||||
|
||||
if (hasPrefix(path, "nix/"))
|
||||
return corepkgsPrefix + path.substr(4);
|
||||
return concatStrings(corepkgsPrefix, path.substr(4));
|
||||
|
||||
throw ThrownError({
|
||||
.msg = hintfmt(evalSettings.pureEval
|
||||
|
@ -752,7 +815,7 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
|
|||
res = { true, path };
|
||||
else {
|
||||
logWarning({
|
||||
.msg = hintfmt("warning: Nix search path entry '%1%' does not exist, ignoring", elem.second)
|
||||
.msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", elem.second)
|
||||
});
|
||||
res = { false, "" };
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -7,8 +7,8 @@ namespace nix {
|
|||
static void prim_unsafeDiscardStringContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
string s = state.coerceToString(pos, *args[0], context);
|
||||
mkString(v, s, PathSet());
|
||||
auto s = state.coerceToString(pos, *args[0], context);
|
||||
v.mkString(*s);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_unsafeDiscardStringContext("__unsafeDiscardStringContext", 1, prim_unsafeDiscardStringContext);
|
||||
|
@ -18,7 +18,7 @@ static void prim_hasContext(EvalState & state, const Pos & pos, Value * * args,
|
|||
{
|
||||
PathSet context;
|
||||
state.forceString(*args[0], context, pos);
|
||||
mkBool(v, !context.empty());
|
||||
v.mkBool(!context.empty());
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_hasContext("__hasContext", 1, prim_hasContext);
|
||||
|
@ -33,13 +33,13 @@ static RegisterPrimOp primop_hasContext("__hasContext", 1, prim_hasContext);
|
|||
static void prim_unsafeDiscardOutputDependency(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
string s = state.coerceToString(pos, *args[0], context);
|
||||
auto s = state.coerceToString(pos, *args[0], context);
|
||||
|
||||
PathSet context2;
|
||||
for (auto & p : context)
|
||||
context2.insert(p.at(0) == '=' ? string(p, 1) : p);
|
||||
context2.insert(p.at(0) == '=' ? std::string(p, 1) : p);
|
||||
|
||||
mkString(v, s, context2);
|
||||
v.mkString(*s, context2);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_unsafeDiscardOutputDependency("__unsafeDiscardOutputDependency", 1, prim_unsafeDiscardOutputDependency);
|
||||
|
@ -76,13 +76,13 @@ static void prim_getContext(EvalState & state, const Pos & pos, Value * * args,
|
|||
auto contextInfos = std::map<Path, ContextInfo>();
|
||||
for (const auto & p : context) {
|
||||
Path drv;
|
||||
string output;
|
||||
std::string output;
|
||||
const Path * path = &p;
|
||||
if (p.at(0) == '=') {
|
||||
drv = string(p, 1);
|
||||
drv = std::string(p, 1);
|
||||
path = &drv;
|
||||
} else if (p.at(0) == '!') {
|
||||
std::pair<string, string> ctx = decodeContext(p);
|
||||
std::pair<std::string, std::string> ctx = decodeContext(p);
|
||||
drv = ctx.first;
|
||||
output = ctx.second;
|
||||
path = &drv;
|
||||
|
@ -103,28 +103,26 @@ static void prim_getContext(EvalState & state, const Pos & pos, Value * * args,
|
|||
}
|
||||
}
|
||||
|
||||
state.mkAttrs(v, contextInfos.size());
|
||||
auto attrs = state.buildBindings(contextInfos.size());
|
||||
|
||||
auto sPath = state.symbols.create("path");
|
||||
auto sAllOutputs = state.symbols.create("allOutputs");
|
||||
for (const auto & info : contextInfos) {
|
||||
auto & infoVal = *state.allocAttr(v, state.symbols.create(info.first));
|
||||
state.mkAttrs(infoVal, 3);
|
||||
auto infoAttrs = state.buildBindings(3);
|
||||
if (info.second.path)
|
||||
mkBool(*state.allocAttr(infoVal, sPath), true);
|
||||
infoAttrs.alloc(sPath).mkBool(true);
|
||||
if (info.second.allOutputs)
|
||||
mkBool(*state.allocAttr(infoVal, sAllOutputs), true);
|
||||
infoAttrs.alloc(sAllOutputs).mkBool(true);
|
||||
if (!info.second.outputs.empty()) {
|
||||
auto & outputsVal = *state.allocAttr(infoVal, state.sOutputs);
|
||||
auto & outputsVal = infoAttrs.alloc(state.sOutputs);
|
||||
state.mkList(outputsVal, info.second.outputs.size());
|
||||
size_t i = 0;
|
||||
for (const auto & output : info.second.outputs) {
|
||||
mkString(*(outputsVal.listElems()[i++] = state.allocValue()), output);
|
||||
}
|
||||
for (const auto & [i, output] : enumerate(info.second.outputs))
|
||||
(outputsVal.listElems()[i] = state.allocValue())->mkString(output);
|
||||
}
|
||||
infoVal.attrs->sort();
|
||||
attrs.alloc(info.first).mkAttrs(infoAttrs);
|
||||
}
|
||||
v.attrs->sort();
|
||||
|
||||
v.mkAttrs(attrs);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_getContext("__getContext", 1, prim_getContext);
|
||||
|
@ -168,7 +166,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
|
|||
.errPos = *i.pos
|
||||
});
|
||||
}
|
||||
context.insert("=" + string(i.name));
|
||||
context.insert("=" + std::string(i.name));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -181,14 +179,14 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
|
|||
.errPos = *i.pos
|
||||
});
|
||||
}
|
||||
for (unsigned int n = 0; n < iter->value->listSize(); ++n) {
|
||||
auto name = state.forceStringNoCtx(*iter->value->listElems()[n], *iter->pos);
|
||||
context.insert("!" + name + "!" + string(i.name));
|
||||
for (auto elem : iter->value->listItems()) {
|
||||
auto name = state.forceStringNoCtx(*elem, *iter->pos);
|
||||
context.insert(concatStrings("!", name, "!", i.name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mkString(v, orig, context);
|
||||
v.mkString(orig, context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_appendContext("__appendContext", 2, prim_appendContext);
|
||||
|
|
|
@ -12,24 +12,24 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
|||
std::string url;
|
||||
std::optional<Hash> rev;
|
||||
std::optional<std::string> ref;
|
||||
std::string name = "source";
|
||||
std::string_view name = "source";
|
||||
PathSet context;
|
||||
|
||||
state.forceValue(*args[0]);
|
||||
state.forceValue(*args[0], pos);
|
||||
|
||||
if (args[0]->type() == nAttrs) {
|
||||
|
||||
state.forceAttrs(*args[0], pos);
|
||||
|
||||
for (auto & attr : *args[0]->attrs) {
|
||||
string n(attr.name);
|
||||
std::string_view n(attr.name);
|
||||
if (n == "url")
|
||||
url = state.coerceToString(*attr.pos, *attr.value, context, false, false);
|
||||
url = state.coerceToString(*attr.pos, *attr.value, context, false, false).toOwned();
|
||||
else if (n == "rev") {
|
||||
// Ugly: unlike fetchGit, here the "rev" attribute can
|
||||
// be both a revision or a branch/tag name.
|
||||
auto value = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
if (std::regex_match(value, revRegex))
|
||||
if (std::regex_match(value.begin(), value.end(), revRegex))
|
||||
rev = Hash::parseAny(value, htSHA1);
|
||||
else
|
||||
ref = value;
|
||||
|
@ -50,7 +50,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
|||
});
|
||||
|
||||
} else
|
||||
url = state.coerceToString(pos, *args[0], context, false, false);
|
||||
url = state.coerceToString(pos, *args[0], context, false, false).toOwned();
|
||||
|
||||
// FIXME: git externals probably can be used to bypass the URI
|
||||
// whitelist. Ah well.
|
||||
|
@ -62,7 +62,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
|||
fetchers::Attrs attrs;
|
||||
attrs.insert_or_assign("type", "hg");
|
||||
attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
|
||||
attrs.insert_or_assign("name", name);
|
||||
attrs.insert_or_assign("name", std::string(name));
|
||||
if (ref) attrs.insert_or_assign("ref", *ref);
|
||||
if (rev) attrs.insert_or_assign("rev", rev->gitRev());
|
||||
auto input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||
|
@ -70,22 +70,21 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
|||
// FIXME: use name
|
||||
auto [tree, input2] = input.fetch(state.store);
|
||||
|
||||
state.mkAttrs(v, 8);
|
||||
auto attrs2 = state.buildBindings(8);
|
||||
auto storePath = state.store->printStorePath(tree.storePath);
|
||||
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
|
||||
attrs2.alloc(state.sOutPath).mkString(storePath, {storePath});
|
||||
if (input2.getRef())
|
||||
mkString(*state.allocAttr(v, state.symbols.create("branch")), *input2.getRef());
|
||||
attrs2.alloc("branch").mkString(*input2.getRef());
|
||||
// Backward compatibility: set 'rev' to
|
||||
// 0000000000000000000000000000000000000000 for a dirty tree.
|
||||
auto rev2 = input2.getRev().value_or(Hash(htSHA1));
|
||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
|
||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(rev2.gitRev(), 0, 12));
|
||||
attrs2.alloc("rev").mkString(rev2.gitRev());
|
||||
attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12));
|
||||
if (auto revCount = input2.getRevCount())
|
||||
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
|
||||
v.attrs->sort();
|
||||
attrs2.alloc("revCount").mkInt(*revCount);
|
||||
v.mkAttrs(attrs2);
|
||||
|
||||
if (state.allowedPaths)
|
||||
state.allowedPaths->insert(tree.actualPath);
|
||||
state.allowPath(tree.storePath);
|
||||
}
|
||||
|
||||
static RegisterPrimOp r_fetchMercurial("fetchMercurial", 1, prim_fetchMercurial);
|
||||
|
|
|
@ -19,54 +19,53 @@ void emitTreeAttrs(
|
|||
bool emptyRevFallback,
|
||||
bool forceDirty)
|
||||
{
|
||||
assert(input.isImmutable());
|
||||
assert(input.isLocked());
|
||||
|
||||
state.mkAttrs(v, 8);
|
||||
auto attrs = state.buildBindings(8);
|
||||
|
||||
auto storePath = state.store->printStorePath(tree.storePath);
|
||||
|
||||
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
|
||||
attrs.alloc(state.sOutPath).mkString(storePath, {storePath});
|
||||
|
||||
// FIXME: support arbitrary input attributes.
|
||||
|
||||
auto narHash = input.getNarHash();
|
||||
assert(narHash);
|
||||
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
|
||||
narHash->to_string(SRI, true));
|
||||
attrs.alloc("narHash").mkString(narHash->to_string(SRI, true));
|
||||
|
||||
if (input.getType() == "git")
|
||||
mkBool(*state.allocAttr(v, state.symbols.create("submodules")),
|
||||
attrs.alloc("submodules").mkBool(
|
||||
fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false));
|
||||
|
||||
if (!forceDirty) {
|
||||
|
||||
if (auto rev = input.getRev()) {
|
||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev->gitRev());
|
||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev->gitShortRev());
|
||||
attrs.alloc("rev").mkString(rev->gitRev());
|
||||
attrs.alloc("shortRev").mkString(rev->gitShortRev());
|
||||
} else if (emptyRevFallback) {
|
||||
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
|
||||
auto emptyHash = Hash(htSHA1);
|
||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), emptyHash.gitRev());
|
||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), emptyHash.gitShortRev());
|
||||
attrs.alloc("rev").mkString(emptyHash.gitRev());
|
||||
attrs.alloc("shortRev").mkString(emptyHash.gitShortRev());
|
||||
}
|
||||
|
||||
if (auto revCount = input.getRevCount())
|
||||
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
|
||||
attrs.alloc("revCount").mkInt(*revCount);
|
||||
else if (emptyRevFallback)
|
||||
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), 0);
|
||||
attrs.alloc("revCount").mkInt(0);
|
||||
|
||||
}
|
||||
|
||||
if (auto lastModified = input.getLastModified()) {
|
||||
mkInt(*state.allocAttr(v, state.symbols.create("lastModified")), *lastModified);
|
||||
mkString(*state.allocAttr(v, state.symbols.create("lastModifiedDate")),
|
||||
attrs.alloc("lastModified").mkInt(*lastModified);
|
||||
attrs.alloc("lastModifiedDate").mkString(
|
||||
fmt("%s", std::put_time(std::gmtime(&*lastModified), "%Y%m%d%H%M%S")));
|
||||
}
|
||||
|
||||
v.attrs->sort();
|
||||
v.mkAttrs(attrs);
|
||||
}
|
||||
|
||||
std::string fixURI(std::string uri, EvalState &state, const std::string & defaultScheme = "file")
|
||||
std::string fixURI(std::string uri, EvalState & state, const std::string & defaultScheme = "file")
|
||||
{
|
||||
state.checkURI(uri);
|
||||
return uri.find("://") != std::string::npos ? uri : defaultScheme + "://" + uri;
|
||||
|
@ -74,53 +73,66 @@ std::string fixURI(std::string uri, EvalState &state, const std::string & defaul
|
|||
|
||||
std::string fixURIForGit(std::string uri, EvalState & state)
|
||||
{
|
||||
static std::regex scp_uri("([^/].*)@(.*):(.*)");
|
||||
/* Detects scp-style uris (e.g. git@github.com:NixOS/nix) and fixes
|
||||
* them by removing the `:` and assuming a scheme of `ssh://`
|
||||
* */
|
||||
static std::regex scp_uri("([^/]*)@(.*):(.*)");
|
||||
if (uri[0] != '/' && std::regex_match(uri, scp_uri))
|
||||
return fixURI(std::regex_replace(uri, scp_uri, "$1@$2/$3"), state, "ssh");
|
||||
else
|
||||
return fixURI(uri, state);
|
||||
}
|
||||
|
||||
void addURI(EvalState &state, fetchers::Attrs &attrs, Symbol name, std::string v)
|
||||
{
|
||||
string n(name);
|
||||
attrs.emplace(name, n == "url" ? fixURI(v, state) : v);
|
||||
}
|
||||
|
||||
struct FetchTreeParams {
|
||||
bool emptyRevFallback = false;
|
||||
bool allowNameArgument = false;
|
||||
};
|
||||
|
||||
static void fetchTree(
|
||||
EvalState &state,
|
||||
const Pos &pos,
|
||||
Value **args,
|
||||
Value &v,
|
||||
const std::optional<std::string> type,
|
||||
EvalState & state,
|
||||
const Pos & pos,
|
||||
Value * * args,
|
||||
Value & v,
|
||||
std::optional<std::string> type,
|
||||
const FetchTreeParams & params = FetchTreeParams{}
|
||||
) {
|
||||
fetchers::Input input;
|
||||
PathSet context;
|
||||
|
||||
state.forceValue(*args[0]);
|
||||
state.forceValue(*args[0], pos);
|
||||
|
||||
if (args[0]->type() == nAttrs) {
|
||||
state.forceAttrs(*args[0], pos);
|
||||
|
||||
fetchers::Attrs attrs;
|
||||
|
||||
if (auto aType = args[0]->attrs->get(state.sType)) {
|
||||
if (type)
|
||||
throw Error({
|
||||
.msg = hintfmt("unexpected attribute 'type'"),
|
||||
.errPos = pos
|
||||
});
|
||||
type = state.forceStringNoCtx(*aType->value, *aType->pos);
|
||||
} else if (!type)
|
||||
throw Error({
|
||||
.msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
attrs.emplace("type", type.value());
|
||||
|
||||
for (auto & attr : *args[0]->attrs) {
|
||||
state.forceValue(*attr.value);
|
||||
if (attr.value->type() == nPath || attr.value->type() == nString)
|
||||
addURI(
|
||||
state,
|
||||
attrs,
|
||||
attr.name,
|
||||
state.coerceToString(*attr.pos, *attr.value, context, false, false)
|
||||
);
|
||||
else if (attr.value->type() == nString)
|
||||
addURI(state, attrs, attr.name, attr.value->string.s);
|
||||
if (attr.name == state.sType) continue;
|
||||
state.forceValue(*attr.value, *attr.pos);
|
||||
if (attr.value->type() == nPath || attr.value->type() == nString) {
|
||||
auto s = state.coerceToString(*attr.pos, *attr.value, context, false, false).toOwned();
|
||||
attrs.emplace(attr.name,
|
||||
attr.name == "url"
|
||||
? type == "git"
|
||||
? fixURIForGit(s, state)
|
||||
: fixURI(s, state)
|
||||
: s);
|
||||
}
|
||||
else if (attr.value->type() == nBool)
|
||||
attrs.emplace(attr.name, Explicit<bool>{attr.value->boolean});
|
||||
else if (attr.value->type() == nInt)
|
||||
|
@ -130,15 +142,6 @@ static void fetchTree(
|
|||
attr.name, showType(*attr.value));
|
||||
}
|
||||
|
||||
if (type)
|
||||
attrs.emplace("type", type.value());
|
||||
|
||||
if (!attrs.count("type"))
|
||||
throw Error({
|
||||
.msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
if (!params.allowNameArgument)
|
||||
if (auto nameIter = attrs.find("name"); nameIter != attrs.end())
|
||||
throw Error({
|
||||
|
@ -146,10 +149,9 @@ static void fetchTree(
|
|||
.errPos = pos
|
||||
});
|
||||
|
||||
|
||||
input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||
} else {
|
||||
auto url = state.coerceToString(pos, *args[0], context, false, false);
|
||||
auto url = state.coerceToString(pos, *args[0], context, false, false).toOwned();
|
||||
|
||||
if (type == "git") {
|
||||
fetchers::Attrs attrs;
|
||||
|
@ -164,20 +166,19 @@ static void fetchTree(
|
|||
if (!evalSettings.pureEval && !input.isDirect())
|
||||
input = lookupInRegistries(state.store, input).first;
|
||||
|
||||
if (evalSettings.pureEval && !input.isImmutable())
|
||||
throw Error("in pure evaluation mode, 'fetchTree' requires an immutable input, at %s", pos);
|
||||
if (evalSettings.pureEval && !input.isLocked())
|
||||
throw Error("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", pos);
|
||||
|
||||
auto [tree, input2] = input.fetch(state.store);
|
||||
|
||||
if (state.allowedPaths)
|
||||
state.allowedPaths->insert(tree.actualPath);
|
||||
state.allowPath(tree.storePath);
|
||||
|
||||
emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback, false);
|
||||
}
|
||||
|
||||
static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
settings.requireExperimentalFeature("flakes");
|
||||
settings.requireExperimentalFeature(Xp::Flakes);
|
||||
fetchTree(state, pos, args, v, std::nullopt, FetchTreeParams { .allowNameArgument = false });
|
||||
}
|
||||
|
||||
|
@ -185,19 +186,19 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
|
|||
static RegisterPrimOp primop_fetchTree("fetchTree", 1, prim_fetchTree);
|
||||
|
||||
static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||
const string & who, bool unpack, std::string name)
|
||||
const std::string & who, bool unpack, std::string name)
|
||||
{
|
||||
std::optional<std::string> url;
|
||||
std::optional<Hash> expectedHash;
|
||||
|
||||
state.forceValue(*args[0]);
|
||||
state.forceValue(*args[0], pos);
|
||||
|
||||
if (args[0]->type() == nAttrs) {
|
||||
|
||||
state.forceAttrs(*args[0], pos);
|
||||
|
||||
for (auto & attr : *args[0]->attrs) {
|
||||
string n(attr.name);
|
||||
std::string n(attr.name);
|
||||
if (n == "url")
|
||||
url = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
else if (n == "sha256")
|
||||
|
@ -229,27 +230,36 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
|||
if (evalSettings.pureEval && !expectedHash)
|
||||
throw Error("in pure evaluation mode, '%s' requires a 'sha256' argument", who);
|
||||
|
||||
// early exit if pinned and already in the store
|
||||
if (expectedHash && expectedHash->type == htSHA256) {
|
||||
auto expectedPath =
|
||||
unpack
|
||||
? state.store->makeFixedOutputPath(FileIngestionMethod::Recursive, *expectedHash, name, {})
|
||||
: state.store->makeFixedOutputPath(FileIngestionMethod::Flat, *expectedHash, name, {});
|
||||
|
||||
if (state.store->isValidPath(expectedPath)) {
|
||||
state.allowAndSetStorePathString(expectedPath, v);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: fetching may fail, yet the path may be substitutable.
|
||||
// https://github.com/NixOS/nix/issues/4313
|
||||
auto storePath =
|
||||
unpack
|
||||
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).first.storePath
|
||||
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
|
||||
|
||||
auto realPath = state.store->toRealPath(storePath);
|
||||
|
||||
if (expectedHash) {
|
||||
auto hash = unpack
|
||||
? state.store->queryPathInfo(storePath)->narHash
|
||||
: hashFile(htSHA256, realPath);
|
||||
: hashFile(htSHA256, state.store->toRealPath(storePath));
|
||||
if (hash != *expectedHash)
|
||||
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
||||
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
|
||||
}
|
||||
|
||||
if (state.allowedPaths)
|
||||
state.allowedPaths->insert(realPath);
|
||||
|
||||
auto path = state.store->printStorePath(storePath);
|
||||
mkString(v, path, PathSet({path}));
|
||||
state.allowAndSetStorePathString(storePath, v);
|
||||
}
|
||||
|
||||
static void prim_fetchurl(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
|
@ -291,13 +301,13 @@ static RegisterPrimOp primop_fetchTarball({
|
|||
stdenv.mkDerivation { … }
|
||||
```
|
||||
|
||||
The fetched tarball is cached for a certain amount of time (1 hour
|
||||
by default) in `~/.cache/nix/tarballs/`. You can change the cache
|
||||
timeout either on the command line with `--option tarball-ttl number
|
||||
of seconds` or in the Nix configuration file with this option: `
|
||||
number of seconds to cache `.
|
||||
The fetched tarball is cached for a certain amount of time (1
|
||||
hour by default) in `~/.cache/nix/tarballs/`. You can change the
|
||||
cache timeout either on the command line with `--tarball-ttl`
|
||||
*number-of-seconds* or in the Nix configuration file by adding
|
||||
the line `tarball-ttl = ` *number-of-seconds*.
|
||||
|
||||
Note that when obtaining the hash with ` nix-prefetch-url ` the
|
||||
Note that when obtaining the hash with `nix-prefetch-url` the
|
||||
option `--unpack` is required.
|
||||
|
||||
This function can also verify the contents against a hash. In that
|
||||
|
@ -397,7 +407,7 @@ static RegisterPrimOp primop_fetchGit({
|
|||
```
|
||||
|
||||
> **Note**
|
||||
>
|
||||
>
|
||||
> It is nice to always specify the branch which a revision
|
||||
> belongs to. Without the branch being specified, the fetcher
|
||||
> might fail if the default branch changes. Additionally, it can
|
||||
|
@ -434,12 +444,12 @@ static RegisterPrimOp primop_fetchGit({
|
|||
```
|
||||
|
||||
> **Note**
|
||||
>
|
||||
>
|
||||
> Nix will refetch the branch in accordance with
|
||||
> the option `tarball-ttl`.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
>
|
||||
> This behavior is disabled in *Pure evaluation mode*.
|
||||
)",
|
||||
.fun = prim_fetchGit,
|
||||
|
|
|
@ -1,86 +1,76 @@
|
|||
#include "primops.hh"
|
||||
#include "eval-inline.hh"
|
||||
|
||||
#include "../../cpptoml/cpptoml.h"
|
||||
#include "../../toml11/toml.hpp"
|
||||
|
||||
namespace nix {
|
||||
|
||||
static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Value & val)
|
||||
{
|
||||
using namespace cpptoml;
|
||||
|
||||
auto toml = state.forceStringNoCtx(*args[0], pos);
|
||||
|
||||
std::istringstream tomlStream(toml);
|
||||
std::istringstream tomlStream(std::string{toml});
|
||||
|
||||
std::function<void(Value &, std::shared_ptr<base>)> visit;
|
||||
std::function<void(Value &, toml::value)> visit;
|
||||
|
||||
visit = [&](Value & v, std::shared_ptr<base> t) {
|
||||
visit = [&](Value & v, toml::value t) {
|
||||
|
||||
if (auto t2 = t->as_table()) {
|
||||
switch(t.type())
|
||||
{
|
||||
case toml::value_t::table:
|
||||
{
|
||||
auto table = toml::get<toml::table>(t);
|
||||
|
||||
size_t size = 0;
|
||||
for (auto & i : *t2) { (void) i; size++; }
|
||||
size_t size = 0;
|
||||
for (auto & i : table) { (void) i; size++; }
|
||||
|
||||
state.mkAttrs(v, size);
|
||||
auto attrs = state.buildBindings(size);
|
||||
|
||||
for (auto & i : *t2) {
|
||||
auto & v2 = *state.allocAttr(v, state.symbols.create(i.first));
|
||||
for(auto & elem : table)
|
||||
visit(attrs.alloc(elem.first), elem.second);
|
||||
|
||||
if (auto i2 = i.second->as_table_array()) {
|
||||
size_t size2 = i2->get().size();
|
||||
state.mkList(v2, size2);
|
||||
for (size_t j = 0; j < size2; ++j)
|
||||
visit(*(v2.listElems()[j] = state.allocValue()), i2->get()[j]);
|
||||
v.mkAttrs(attrs);
|
||||
}
|
||||
else
|
||||
visit(v2, i.second);
|
||||
}
|
||||
break;;
|
||||
case toml::value_t::array:
|
||||
{
|
||||
auto array = toml::get<std::vector<toml::value>>(t);
|
||||
|
||||
size_t size = array.size();
|
||||
state.mkList(v, size);
|
||||
for (size_t i = 0; i < size; ++i)
|
||||
visit(*(v.listElems()[i] = state.allocValue()), array[i]);
|
||||
}
|
||||
break;;
|
||||
case toml::value_t::boolean:
|
||||
v.mkBool(toml::get<bool>(t));
|
||||
break;;
|
||||
case toml::value_t::integer:
|
||||
v.mkInt(toml::get<int64_t>(t));
|
||||
break;;
|
||||
case toml::value_t::floating:
|
||||
v.mkFloat(toml::get<NixFloat>(t));
|
||||
break;;
|
||||
case toml::value_t::string:
|
||||
v.mkString(toml::get<std::string>(t));
|
||||
break;;
|
||||
case toml::value_t::local_datetime:
|
||||
case toml::value_t::offset_datetime:
|
||||
case toml::value_t::local_date:
|
||||
case toml::value_t::local_time:
|
||||
// We fail since Nix doesn't have date and time types
|
||||
throw std::runtime_error("Dates and times are not supported");
|
||||
break;;
|
||||
case toml::value_t::empty:
|
||||
v.mkNull();
|
||||
break;;
|
||||
|
||||
v.attrs->sort();
|
||||
}
|
||||
|
||||
else if (auto t2 = t->as_array()) {
|
||||
size_t size = t2->get().size();
|
||||
|
||||
state.mkList(v, size);
|
||||
|
||||
for (size_t i = 0; i < size; ++i)
|
||||
visit(*(v.listElems()[i] = state.allocValue()), t2->get()[i]);
|
||||
}
|
||||
|
||||
// Handle cases like 'a = [[{ a = true }]]', which IMHO should be
|
||||
// parsed as a array containing an array containing a table,
|
||||
// but instead are parsed as an array containing a table array
|
||||
// containing a table.
|
||||
else if (auto t2 = t->as_table_array()) {
|
||||
size_t size = t2->get().size();
|
||||
|
||||
state.mkList(v, size);
|
||||
|
||||
for (size_t j = 0; j < size; ++j)
|
||||
visit(*(v.listElems()[j] = state.allocValue()), t2->get()[j]);
|
||||
}
|
||||
|
||||
else if (t->is_value()) {
|
||||
if (auto val = t->as<int64_t>())
|
||||
mkInt(v, val->get());
|
||||
else if (auto val = t->as<NixFloat>())
|
||||
mkFloat(v, val->get());
|
||||
else if (auto val = t->as<bool>())
|
||||
mkBool(v, val->get());
|
||||
else if (auto val = t->as<std::string>())
|
||||
mkString(v, val->get());
|
||||
else
|
||||
throw EvalError("unsupported value type in TOML");
|
||||
}
|
||||
|
||||
else abort();
|
||||
};
|
||||
|
||||
try {
|
||||
visit(v, parser(tomlStream).parse());
|
||||
} catch (std::runtime_error & e) {
|
||||
visit(val, toml::parse(tomlStream, "fromTOML" /* the "filename" */));
|
||||
} catch (std::exception & e) { // TODO: toml::syntax_error
|
||||
throw EvalError({
|
||||
.msg = hintfmt("while parsing a TOML string: %s", e.what()),
|
||||
.errPos = pos
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
#pragma once
|
||||
|
||||
#include <list>
|
||||
#include <map>
|
||||
#include <unordered_set>
|
||||
#include <unordered_map>
|
||||
|
||||
#include "types.hh"
|
||||
|
||||
|
@ -16,8 +17,8 @@ namespace nix {
|
|||
class Symbol
|
||||
{
|
||||
private:
|
||||
const string * s; // pointer into SymbolTable
|
||||
Symbol(const string * s) : s(s) { };
|
||||
const std::string * s; // pointer into SymbolTable
|
||||
Symbol(const std::string * s) : s(s) { };
|
||||
friend class SymbolTable;
|
||||
|
||||
public:
|
||||
|
@ -70,15 +71,21 @@ public:
|
|||
class SymbolTable
|
||||
{
|
||||
private:
|
||||
typedef std::unordered_set<string> Symbols;
|
||||
Symbols symbols;
|
||||
std::unordered_map<std::string_view, Symbol> symbols;
|
||||
std::list<std::string> store;
|
||||
|
||||
public:
|
||||
Symbol create(std::string_view s)
|
||||
{
|
||||
// FIXME: avoid allocation if 's' already exists in the symbol table.
|
||||
std::pair<Symbols::iterator, bool> res = symbols.emplace(std::string(s));
|
||||
return Symbol(&*res.first);
|
||||
// Most symbols are looked up more than once, so we trade off insertion performance
|
||||
// for lookup performance.
|
||||
// TODO: could probably be done more efficiently with transparent Hash and Equals
|
||||
// on the original implementation using unordered_set
|
||||
auto it = symbols.find(s);
|
||||
if (it != symbols.end()) return it->second;
|
||||
|
||||
auto & rawSym = store.emplace_back(s);
|
||||
return symbols.emplace(rawSym, Symbol(&rawSym)).first->second;
|
||||
}
|
||||
|
||||
size_t size() const
|
||||
|
@ -91,7 +98,7 @@ public:
|
|||
template<typename T>
|
||||
void dump(T callback)
|
||||
{
|
||||
for (auto & s : symbols)
|
||||
for (auto & s : store)
|
||||
callback(s);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -10,11 +10,11 @@
|
|||
namespace nix {
|
||||
|
||||
void printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, JSONPlaceholder & out, PathSet & context)
|
||||
Value & v, const Pos & pos, JSONPlaceholder & out, PathSet & context)
|
||||
{
|
||||
checkInterrupt();
|
||||
|
||||
if (strict) state.forceValue(v);
|
||||
if (strict) state.forceValue(v, pos);
|
||||
|
||||
switch (v.type()) {
|
||||
|
||||
|
@ -40,7 +40,7 @@ void printValueAsJSON(EvalState & state, bool strict,
|
|||
break;
|
||||
|
||||
case nAttrs: {
|
||||
auto maybeString = state.tryAttrsToString(noPos, v, context, false, false);
|
||||
auto maybeString = state.tryAttrsToString(pos, v, context, false, false);
|
||||
if (maybeString) {
|
||||
out.write(*maybeString);
|
||||
break;
|
||||
|
@ -54,18 +54,18 @@ void printValueAsJSON(EvalState & state, bool strict,
|
|||
for (auto & j : names) {
|
||||
Attr & a(*v.attrs->find(state.symbols.create(j)));
|
||||
auto placeholder(obj.placeholder(j));
|
||||
printValueAsJSON(state, strict, *a.value, placeholder, context);
|
||||
printValueAsJSON(state, strict, *a.value, *a.pos, placeholder, context);
|
||||
}
|
||||
} else
|
||||
printValueAsJSON(state, strict, *i->value, out, context);
|
||||
printValueAsJSON(state, strict, *i->value, *i->pos, out, context);
|
||||
break;
|
||||
}
|
||||
|
||||
case nList: {
|
||||
auto list(out.list());
|
||||
for (unsigned int n = 0; n < v.listSize(); ++n) {
|
||||
for (auto elem : v.listItems()) {
|
||||
auto placeholder(list.placeholder());
|
||||
printValueAsJSON(state, strict, *v.listElems()[n], placeholder, context);
|
||||
printValueAsJSON(state, strict, *elem, pos, placeholder, context);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -79,18 +79,20 @@ void printValueAsJSON(EvalState & state, bool strict,
|
|||
break;
|
||||
|
||||
case nThunk:
|
||||
throw TypeError("cannot convert %1% to JSON", showType(v));
|
||||
|
||||
case nFunction:
|
||||
throw TypeError("cannot convert %1% to JSON", showType(v));
|
||||
auto e = TypeError({
|
||||
.msg = hintfmt("cannot convert %1% to JSON", showType(v)),
|
||||
.errPos = v.determinePos(pos)
|
||||
});
|
||||
throw e.addTrace(pos, hintfmt("message for the trace"));
|
||||
}
|
||||
}
|
||||
|
||||
void printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, std::ostream & str, PathSet & context)
|
||||
Value & v, const Pos & pos, std::ostream & str, PathSet & context)
|
||||
{
|
||||
JSONPlaceholder out(str);
|
||||
printValueAsJSON(state, strict, v, out, context);
|
||||
printValueAsJSON(state, strict, v, pos, out, context);
|
||||
}
|
||||
|
||||
void ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,
|
||||
|
|
|
@ -11,9 +11,9 @@ namespace nix {
|
|||
class JSONPlaceholder;
|
||||
|
||||
void printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, JSONPlaceholder & out, PathSet & context);
|
||||
Value & v, const Pos & pos, JSONPlaceholder & out, PathSet & context);
|
||||
|
||||
void printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, std::ostream & str, PathSet & context);
|
||||
Value & v, const Pos & pos, std::ostream & str, PathSet & context);
|
||||
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
namespace nix {
|
||||
|
||||
|
||||
static XMLAttrs singletonAttrs(const string & name, const string & value)
|
||||
static XMLAttrs singletonAttrs(const std::string & name, const std::string & value)
|
||||
{
|
||||
XMLAttrs attrs;
|
||||
attrs[name] = value;
|
||||
|
@ -18,7 +18,8 @@ static XMLAttrs singletonAttrs(const string & name, const string & value)
|
|||
|
||||
|
||||
static void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen);
|
||||
Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
|
||||
const Pos & pos);
|
||||
|
||||
|
||||
static void posToXML(XMLAttrs & xmlAttrs, const Pos & pos)
|
||||
|
@ -46,17 +47,18 @@ static void showAttrs(EvalState & state, bool strict, bool location,
|
|||
|
||||
XMLOpenElement _(doc, "attr", xmlAttrs);
|
||||
printValueAsXML(state, strict, location,
|
||||
*a.value, doc, context, drvsSeen);
|
||||
*a.value, doc, context, drvsSeen, *a.pos);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen)
|
||||
Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
|
||||
const Pos & pos)
|
||||
{
|
||||
checkInterrupt();
|
||||
|
||||
if (strict) state.forceValue(v);
|
||||
if (strict) state.forceValue(v, pos);
|
||||
|
||||
switch (v.type()) {
|
||||
|
||||
|
@ -91,14 +93,14 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
|||
Path drvPath;
|
||||
a = v.attrs->find(state.sDrvPath);
|
||||
if (a != v.attrs->end()) {
|
||||
if (strict) state.forceValue(*a->value);
|
||||
if (strict) state.forceValue(*a->value, *a->pos);
|
||||
if (a->value->type() == nString)
|
||||
xmlAttrs["drvPath"] = drvPath = a->value->string.s;
|
||||
}
|
||||
|
||||
a = v.attrs->find(state.sOutPath);
|
||||
if (a != v.attrs->end()) {
|
||||
if (strict) state.forceValue(*a->value);
|
||||
if (strict) state.forceValue(*a->value, *a->pos);
|
||||
if (a->value->type() == nString)
|
||||
xmlAttrs["outPath"] = a->value->string.s;
|
||||
}
|
||||
|
@ -120,8 +122,8 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
|||
|
||||
case nList: {
|
||||
XMLOpenElement _(doc, "list");
|
||||
for (unsigned int n = 0; n < v.listSize(); ++n)
|
||||
printValueAsXML(state, strict, location, *v.listElems()[n], doc, context, drvsSeen);
|
||||
for (auto v2 : v.listItems())
|
||||
printValueAsXML(state, strict, location, *v2, doc, context, drvsSeen, pos);
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -135,12 +137,12 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
|||
if (location) posToXML(xmlAttrs, v.lambda.fun->pos);
|
||||
XMLOpenElement _(doc, "function", xmlAttrs);
|
||||
|
||||
if (v.lambda.fun->matchAttrs) {
|
||||
if (v.lambda.fun->hasFormals()) {
|
||||
XMLAttrs attrs;
|
||||
if (!v.lambda.fun->arg.empty()) attrs["name"] = v.lambda.fun->arg;
|
||||
if (v.lambda.fun->formals->ellipsis) attrs["ellipsis"] = "1";
|
||||
XMLOpenElement _(doc, "attrspat", attrs);
|
||||
for (auto & i : v.lambda.fun->formals->formals)
|
||||
for (auto & i : v.lambda.fun->formals->lexicographicOrder())
|
||||
doc.writeEmptyElement("attr", singletonAttrs("name", i.name));
|
||||
} else
|
||||
doc.writeEmptyElement("varpat", singletonAttrs("name", v.lambda.fun->arg));
|
||||
|
@ -149,7 +151,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
|||
}
|
||||
|
||||
case nExternal:
|
||||
v.external->printValueAsXML(state, strict, location, doc, context, drvsSeen);
|
||||
v.external->printValueAsXML(state, strict, location, doc, context, drvsSeen, pos);
|
||||
break;
|
||||
|
||||
case nFloat:
|
||||
|
@ -163,19 +165,20 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
|||
|
||||
|
||||
void ExternalValueBase::printValueAsXML(EvalState & state, bool strict,
|
||||
bool location, XMLWriter & doc, PathSet & context, PathSet & drvsSeen) const
|
||||
bool location, XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
|
||||
const Pos & pos) const
|
||||
{
|
||||
doc.writeEmptyElement("unevaluated");
|
||||
}
|
||||
|
||||
|
||||
void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, std::ostream & out, PathSet & context)
|
||||
Value & v, std::ostream & out, PathSet & context, const Pos & pos)
|
||||
{
|
||||
XMLWriter doc(true, out);
|
||||
XMLOpenElement root(doc, "expr");
|
||||
PathSet drvsSeen;
|
||||
printValueAsXML(state, strict, location, v, doc, context, drvsSeen);
|
||||
printValueAsXML(state, strict, location, v, doc, context, drvsSeen, pos);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -9,6 +9,6 @@
|
|||
namespace nix {
|
||||
|
||||
void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, std::ostream & out, PathSet & context);
|
||||
|
||||
Value & v, std::ostream & out, PathSet & context, const Pos & pos);
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
#pragma once
|
||||
|
||||
#include <cassert>
|
||||
|
||||
#include "symbol-table.hh"
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
|
@ -8,6 +10,8 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
class BindingsBuilder;
|
||||
|
||||
|
||||
typedef enum {
|
||||
tInt = 1,
|
||||
|
@ -73,20 +77,20 @@ class ExternalValueBase
|
|||
|
||||
public:
|
||||
/* Return a simple string describing the type */
|
||||
virtual string showType() const = 0;
|
||||
virtual std::string showType() const = 0;
|
||||
|
||||
/* Return a string to be used in builtins.typeOf */
|
||||
virtual string typeOf() const = 0;
|
||||
virtual std::string typeOf() const = 0;
|
||||
|
||||
/* Coerce the value to a string. Defaults to uncoercable, i.e. throws an
|
||||
* error
|
||||
* error.
|
||||
*/
|
||||
virtual string coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const;
|
||||
virtual std::string coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const;
|
||||
|
||||
/* Compare to another value of the same type. Defaults to uncomparable,
|
||||
* i.e. always false.
|
||||
*/
|
||||
virtual bool operator==(const ExternalValueBase & b) const;
|
||||
virtual bool operator ==(const ExternalValueBase & b) const;
|
||||
|
||||
/* Print the value as JSON. Defaults to unconvertable, i.e. throws an error */
|
||||
virtual void printValueAsJSON(EvalState & state, bool strict,
|
||||
|
@ -94,7 +98,8 @@ class ExternalValueBase
|
|||
|
||||
/* Print the value as XML. Defaults to unevaluated */
|
||||
virtual void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
XMLWriter & doc, PathSet & context, PathSet & drvsSeen) const;
|
||||
XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
|
||||
const Pos & pos) const;
|
||||
|
||||
virtual ~ExternalValueBase()
|
||||
{
|
||||
|
@ -109,8 +114,8 @@ struct Value
|
|||
private:
|
||||
InternalType internalType;
|
||||
|
||||
friend std::string showType(const Value & v);
|
||||
friend void printValue(std::ostream & str, std::set<const Value *> & active, const Value & v);
|
||||
friend std::string showType(const Value & v);
|
||||
friend void printValue(std::ostream & str, std::set<const void *> & seen, const Value & v);
|
||||
|
||||
public:
|
||||
|
||||
|
@ -232,6 +237,17 @@ public:
|
|||
string.context = context;
|
||||
}
|
||||
|
||||
void mkString(std::string_view s);
|
||||
|
||||
void mkString(std::string_view s, const PathSet & context);
|
||||
|
||||
void mkStringMove(const char * s, const PathSet & context);
|
||||
|
||||
inline void mkString(const Symbol & s)
|
||||
{
|
||||
mkString(((const std::string &) s).c_str());
|
||||
}
|
||||
|
||||
inline void mkPath(const char * s)
|
||||
{
|
||||
clearValue();
|
||||
|
@ -239,6 +255,8 @@ public:
|
|||
path = s;
|
||||
}
|
||||
|
||||
void mkPath(std::string_view s);
|
||||
|
||||
inline void mkNull()
|
||||
{
|
||||
clearValue();
|
||||
|
@ -252,6 +270,8 @@ public:
|
|||
attrs = a;
|
||||
}
|
||||
|
||||
Value & mkAttrs(BindingsBuilder & bindings);
|
||||
|
||||
inline void mkList(size_t size)
|
||||
{
|
||||
clearValue();
|
||||
|
@ -341,7 +361,7 @@ public:
|
|||
return internalType == tList1 ? 1 : internalType == tList2 ? 2 : bigList.size;
|
||||
}
|
||||
|
||||
Pos determinePos(const Pos &pos) const;
|
||||
Pos determinePos(const Pos & pos) const;
|
||||
|
||||
/* Check whether forcing this value requires a trivial amount of
|
||||
computation. In particular, function applications are
|
||||
|
@ -349,54 +369,45 @@ public:
|
|||
bool isTrivial() const;
|
||||
|
||||
std::vector<std::pair<Path, std::string>> getContext();
|
||||
|
||||
auto listItems()
|
||||
{
|
||||
struct ListIterable
|
||||
{
|
||||
typedef Value * const * iterator;
|
||||
iterator _begin, _end;
|
||||
iterator begin() const { return _begin; }
|
||||
iterator end() const { return _end; }
|
||||
};
|
||||
assert(isList());
|
||||
auto begin = listElems();
|
||||
return ListIterable { begin, begin + listSize() };
|
||||
}
|
||||
|
||||
auto listItems() const
|
||||
{
|
||||
struct ConstListIterable
|
||||
{
|
||||
typedef const Value * const * iterator;
|
||||
iterator _begin, _end;
|
||||
iterator begin() const { return _begin; }
|
||||
iterator end() const { return _end; }
|
||||
};
|
||||
assert(isList());
|
||||
auto begin = listElems();
|
||||
return ConstListIterable { begin, begin + listSize() };
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
// TODO: Remove these static functions, replace call sites with v.mk* instead
|
||||
static inline void mkInt(Value & v, NixInt n)
|
||||
{
|
||||
v.mkInt(n);
|
||||
}
|
||||
|
||||
static inline void mkFloat(Value & v, NixFloat n)
|
||||
{
|
||||
v.mkFloat(n);
|
||||
}
|
||||
|
||||
static inline void mkBool(Value & v, bool b)
|
||||
{
|
||||
v.mkBool(b);
|
||||
}
|
||||
|
||||
static inline void mkNull(Value & v)
|
||||
{
|
||||
v.mkNull();
|
||||
}
|
||||
|
||||
static inline void mkApp(Value & v, Value & left, Value & right)
|
||||
{
|
||||
v.mkApp(&left, &right);
|
||||
}
|
||||
|
||||
static inline void mkString(Value & v, const Symbol & s)
|
||||
{
|
||||
v.mkString(((const string &) s).c_str());
|
||||
}
|
||||
|
||||
|
||||
void mkString(Value & v, const char * s);
|
||||
|
||||
|
||||
void mkPath(Value & v, const char * s);
|
||||
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
typedef std::vector<Value *, traceable_allocator<Value *> > ValueVector;
|
||||
typedef std::map<Symbol, Value *, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, Value *> > > ValueMap;
|
||||
typedef std::map<Symbol, ValueVector, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, ValueVector> > > ValueVectorMap;
|
||||
#else
|
||||
typedef std::vector<Value *> ValueVector;
|
||||
typedef std::map<Symbol, Value *> ValueMap;
|
||||
typedef std::map<Symbol, ValueVector> ValueVectorMap;
|
||||
#endif
|
||||
|
||||
|
||||
|
|
|
@ -52,13 +52,13 @@ struct CacheImpl : Cache
|
|||
const Attrs & inAttrs,
|
||||
const Attrs & infoAttrs,
|
||||
const StorePath & storePath,
|
||||
bool immutable) override
|
||||
bool locked) override
|
||||
{
|
||||
_state.lock()->add.use()
|
||||
(attrsToJSON(inAttrs).dump())
|
||||
(attrsToJSON(infoAttrs).dump())
|
||||
(store->printStorePath(storePath))
|
||||
(immutable)
|
||||
(locked)
|
||||
(time(0)).exec();
|
||||
}
|
||||
|
||||
|
@ -91,7 +91,7 @@ struct CacheImpl : Cache
|
|||
|
||||
auto infoJSON = stmt.getStr(0);
|
||||
auto storePath = store->parseStorePath(stmt.getStr(1));
|
||||
auto immutable = stmt.getInt(2) != 0;
|
||||
auto locked = stmt.getInt(2) != 0;
|
||||
auto timestamp = stmt.getInt(3);
|
||||
|
||||
store->addTempRoot(storePath);
|
||||
|
@ -105,7 +105,7 @@ struct CacheImpl : Cache
|
|||
inAttrsJSON, infoJSON, store->printStorePath(storePath));
|
||||
|
||||
return Result {
|
||||
.expired = !immutable && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),
|
||||
.expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),
|
||||
.infoAttrs = jsonToAttrs(nlohmann::json::parse(infoJSON)),
|
||||
.storePath = std::move(storePath)
|
||||
};
|
||||
|
|
|
@ -13,7 +13,7 @@ struct Cache
|
|||
const Attrs & inAttrs,
|
||||
const Attrs & infoAttrs,
|
||||
const StorePath & storePath,
|
||||
bool immutable) = 0;
|
||||
bool locked) = 0;
|
||||
|
||||
virtual std::optional<std::pair<Attrs, StorePath>> lookup(
|
||||
ref<Store> store,
|
||||
|
|
13
src/libfetchers/fetch-settings.cc
Normal file
13
src/libfetchers/fetch-settings.cc
Normal file
|
@ -0,0 +1,13 @@
|
|||
#include "fetch-settings.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
FetchSettings::FetchSettings()
|
||||
{
|
||||
}
|
||||
|
||||
FetchSettings fetchSettings;
|
||||
|
||||
static GlobalConfig::Register rFetchSettings(&fetchSettings);
|
||||
|
||||
}
|
93
src/libfetchers/fetch-settings.hh
Normal file
93
src/libfetchers/fetch-settings.hh
Normal file
|
@ -0,0 +1,93 @@
|
|||
#pragma once
|
||||
|
||||
#include "types.hh"
|
||||
#include "config.hh"
|
||||
#include "util.hh"
|
||||
|
||||
#include <map>
|
||||
#include <limits>
|
||||
|
||||
#include <sys/types.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct FetchSettings : public Config
|
||||
{
|
||||
FetchSettings();
|
||||
|
||||
Setting<StringMap> accessTokens{this, {}, "access-tokens",
|
||||
R"(
|
||||
Access tokens used to access protected GitHub, GitLab, or
|
||||
other locations requiring token-based authentication.
|
||||
|
||||
Access tokens are specified as a string made up of
|
||||
space-separated `host=token` values. The specific token
|
||||
used is selected by matching the `host` portion against the
|
||||
"host" specification of the input. The actual use of the
|
||||
`token` value is determined by the type of resource being
|
||||
accessed:
|
||||
|
||||
* Github: the token value is the OAUTH-TOKEN string obtained
|
||||
as the Personal Access Token from the Github server (see
|
||||
https://docs.github.com/en/developers/apps/building-oauth-apps/authorizing-oauth-apps).
|
||||
|
||||
* Gitlab: the token value is either the OAuth2 token or the
|
||||
Personal Access Token (these are different types tokens
|
||||
for gitlab, see
|
||||
https://docs.gitlab.com/12.10/ee/api/README.html#authentication).
|
||||
The `token` value should be `type:tokenstring` where
|
||||
`type` is either `OAuth2` or `PAT` to indicate which type
|
||||
of token is being specified.
|
||||
|
||||
Example `~/.config/nix/nix.conf`:
|
||||
|
||||
```
|
||||
access-tokens = github.com=23ac...b289 gitlab.mycompany.com=PAT:A123Bp_Cd..EfG gitlab.com=OAuth2:1jklw3jk
|
||||
```
|
||||
|
||||
Example `~/code/flake.nix`:
|
||||
|
||||
```nix
|
||||
input.foo = {
|
||||
type = "gitlab";
|
||||
host = "gitlab.mycompany.com";
|
||||
owner = "mycompany";
|
||||
repo = "pro";
|
||||
};
|
||||
```
|
||||
|
||||
This example specifies three tokens, one each for accessing
|
||||
github.com, gitlab.mycompany.com, and sourceforge.net.
|
||||
|
||||
The `input.foo` uses the "gitlab" fetcher, which might
|
||||
requires specifying the token type along with the token
|
||||
value.
|
||||
)"};
|
||||
|
||||
Setting<bool> allowDirty{this, true, "allow-dirty",
|
||||
"Whether to allow dirty Git/Mercurial trees."};
|
||||
|
||||
Setting<bool> warnDirty{this, true, "warn-dirty",
|
||||
"Whether to warn about dirty Git/Mercurial trees."};
|
||||
|
||||
Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry",
|
||||
"Path or URI of the global flake registry."};
|
||||
|
||||
Setting<bool> useRegistries{this, true, "use-registries",
|
||||
"Whether to use flake registries to resolve flake references."};
|
||||
|
||||
Setting<bool> acceptFlakeConfig{this, false, "accept-flake-config",
|
||||
"Whether to accept nix configuration from a flake without prompting."};
|
||||
|
||||
Setting<std::string> commitLockFileSummary{
|
||||
this, "", "commit-lockfile-summary",
|
||||
R"(
|
||||
The commit summary to use when committing changed flake lock files. If
|
||||
empty, the summary is generated based on the action performed.
|
||||
)"};
|
||||
};
|
||||
|
||||
// FIXME: don't use a global variable.
|
||||
extern FetchSettings fetchSettings;
|
||||
|
||||
}
|
|
@ -24,11 +24,11 @@ static void fixupInput(Input & input)
|
|||
input.getType();
|
||||
input.getRef();
|
||||
if (input.getRev())
|
||||
input.immutable = true;
|
||||
input.locked = true;
|
||||
input.getRevCount();
|
||||
input.getLastModified();
|
||||
if (input.getNarHash())
|
||||
input.immutable = true;
|
||||
input.locked = true;
|
||||
}
|
||||
|
||||
Input Input::fromURL(const ParsedURL & url)
|
||||
|
@ -124,15 +124,13 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
|
|||
debug("using substituted/cached input '%s' in '%s'",
|
||||
to_string(), store->printStorePath(storePath));
|
||||
|
||||
auto actualPath = store->toRealPath(storePath);
|
||||
|
||||
return {fetchers::Tree(std::move(actualPath), std::move(storePath)), *this};
|
||||
return {Tree { .actualPath = store->toRealPath(storePath), .storePath = std::move(storePath) }, *this};
|
||||
} catch (Error & e) {
|
||||
debug("substitution of input '%s' failed: %s", to_string(), e.what());
|
||||
}
|
||||
}
|
||||
|
||||
auto [tree, input] = [&]() -> std::pair<Tree, Input> {
|
||||
auto [storePath, input] = [&]() -> std::pair<StorePath, Input> {
|
||||
try {
|
||||
return scheme->fetch(store, *this);
|
||||
} catch (Error & e) {
|
||||
|
@ -141,8 +139,10 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
|
|||
}
|
||||
}();
|
||||
|
||||
if (tree.actualPath == "")
|
||||
tree.actualPath = store->toRealPath(tree.storePath);
|
||||
Tree tree {
|
||||
.actualPath = store->toRealPath(storePath),
|
||||
.storePath = storePath,
|
||||
};
|
||||
|
||||
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
|
||||
input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
|
||||
|
@ -165,7 +165,7 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
|
|||
input.to_string(), *prevRevCount);
|
||||
}
|
||||
|
||||
input.immutable = true;
|
||||
input.locked = true;
|
||||
|
||||
assert(input.hasAllInfo());
|
||||
|
||||
|
@ -209,7 +209,7 @@ StorePath Input::computeStorePath(Store & store) const
|
|||
{
|
||||
auto narHash = getNarHash();
|
||||
if (!narHash)
|
||||
throw Error("cannot compute store path for mutable input '%s'", to_string());
|
||||
throw Error("cannot compute store path for unlocked input '%s'", to_string());
|
||||
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, getName());
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@ struct Tree
|
|||
{
|
||||
Path actualPath;
|
||||
StorePath storePath;
|
||||
Tree(Path && actualPath, StorePath && storePath) : actualPath(actualPath), storePath(std::move(storePath)) {}
|
||||
};
|
||||
|
||||
struct InputScheme;
|
||||
|
@ -35,7 +34,7 @@ struct Input
|
|||
|
||||
std::shared_ptr<InputScheme> scheme; // note: can be null
|
||||
Attrs attrs;
|
||||
bool immutable = false;
|
||||
bool locked = false;
|
||||
bool direct = true;
|
||||
|
||||
/* path of the parent of this input, used for relative path resolution */
|
||||
|
@ -60,9 +59,9 @@ public:
|
|||
one that goes through a registry. */
|
||||
bool isDirect() const { return direct; }
|
||||
|
||||
/* Check whether this is an "immutable" input, that is,
|
||||
/* Check whether this is a "locked" input, that is,
|
||||
one that contains a commit hash or content hash. */
|
||||
bool isImmutable() const { return immutable; }
|
||||
bool isLocked() const { return locked; }
|
||||
|
||||
bool hasAllInfo() const;
|
||||
|
||||
|
@ -70,6 +69,8 @@ public:
|
|||
|
||||
bool contains(const Input & other) const;
|
||||
|
||||
/* Fetch the input into the Nix store, returning the location in
|
||||
the Nix store and the locked input. */
|
||||
std::pair<Tree, Input> fetch(ref<Store> store) const;
|
||||
|
||||
Input applyOverrides(
|
||||
|
@ -131,7 +132,7 @@ struct InputScheme
|
|||
|
||||
virtual void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg);
|
||||
|
||||
virtual std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) = 0;
|
||||
virtual std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) = 0;
|
||||
};
|
||||
|
||||
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
|
||||
|
@ -147,14 +148,14 @@ DownloadFileResult downloadFile(
|
|||
ref<Store> store,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
bool immutable,
|
||||
bool locked,
|
||||
const Headers & headers = {});
|
||||
|
||||
std::pair<Tree, time_t> downloadTarball(
|
||||
ref<Store> store,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
bool immutable,
|
||||
bool locked,
|
||||
const Headers & headers = {});
|
||||
|
||||
}
|
||||
|
|
|
@ -6,6 +6,8 @@
|
|||
#include "url-parts.hh"
|
||||
#include "pathlocks.hh"
|
||||
|
||||
#include "fetch-settings.hh"
|
||||
|
||||
#include <sys/time.h>
|
||||
#include <sys/wait.h>
|
||||
|
||||
|
@ -51,7 +53,7 @@ struct GitInputScheme : InputScheme
|
|||
for (auto &[name, value] : url.query) {
|
||||
if (name == "rev" || name == "ref")
|
||||
attrs.emplace(name, value);
|
||||
else if (name == "shallow")
|
||||
else if (name == "shallow" || name == "submodules")
|
||||
attrs.emplace(name, Explicit<bool> { value == "1" });
|
||||
else
|
||||
url2.query.emplace(name, value);
|
||||
|
@ -172,7 +174,7 @@ struct GitInputScheme : InputScheme
|
|||
return {isLocal, isLocal ? url.path : url.base};
|
||||
}
|
||||
|
||||
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
{
|
||||
Input input(_input);
|
||||
|
||||
|
@ -187,7 +189,7 @@ struct GitInputScheme : InputScheme
|
|||
if (submodules) cacheType += "-submodules";
|
||||
if (allRefs) cacheType += "-all-refs";
|
||||
|
||||
auto getImmutableAttrs = [&]()
|
||||
auto getLockedAttrs = [&]()
|
||||
{
|
||||
return Attrs({
|
||||
{"type", cacheType},
|
||||
|
@ -197,21 +199,18 @@ struct GitInputScheme : InputScheme
|
|||
};
|
||||
|
||||
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
|
||||
-> std::pair<Tree, Input>
|
||||
-> std::pair<StorePath, Input>
|
||||
{
|
||||
assert(input.getRev());
|
||||
assert(!_input.getRev() || _input.getRev() == input.getRev());
|
||||
if (!shallow)
|
||||
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
|
||||
input.attrs.insert_or_assign("lastModified", getIntAttr(infoAttrs, "lastModified"));
|
||||
return {
|
||||
Tree(store->toRealPath(storePath), std::move(storePath)),
|
||||
input
|
||||
};
|
||||
return {std::move(storePath), input};
|
||||
};
|
||||
|
||||
if (input.getRev()) {
|
||||
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
|
||||
if (auto res = getCache()->lookup(store, getLockedAttrs()))
|
||||
return makeResult(res->first, std::move(res->second));
|
||||
}
|
||||
|
||||
|
@ -249,10 +248,10 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
/* This is an unclean working tree. So copy all tracked files. */
|
||||
|
||||
if (!settings.allowDirty)
|
||||
if (!fetchSettings.allowDirty)
|
||||
throw Error("Git tree '%s' is dirty", actualUrl);
|
||||
|
||||
if (settings.warnDirty)
|
||||
if (fetchSettings.warnDirty)
|
||||
warn("Git tree '%s' is dirty", actualUrl);
|
||||
|
||||
auto gitOpts = Strings({ "-C", actualUrl, "ls-files", "-z" });
|
||||
|
@ -285,16 +284,13 @@ struct GitInputScheme : InputScheme
|
|||
"lastModified",
|
||||
haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
|
||||
|
||||
return {
|
||||
Tree(store->toRealPath(storePath), std::move(storePath)),
|
||||
input
|
||||
};
|
||||
return {std::move(storePath), input};
|
||||
}
|
||||
}
|
||||
|
||||
if (!input.getRef()) input.attrs.insert_or_assign("ref", isLocal ? readHead(actualUrl) : "master");
|
||||
|
||||
Attrs mutableAttrs({
|
||||
Attrs unlockedAttrs({
|
||||
{"type", cacheType},
|
||||
{"name", name},
|
||||
{"url", actualUrl},
|
||||
|
@ -313,7 +309,7 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
} else {
|
||||
|
||||
if (auto res = getCache()->lookup(store, mutableAttrs)) {
|
||||
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
|
||||
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
|
||||
if (!input.getRev() || input.getRev() == rev2) {
|
||||
input.attrs.insert_or_assign("rev", rev2.gitRev());
|
||||
|
@ -324,17 +320,13 @@ struct GitInputScheme : InputScheme
|
|||
Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false);
|
||||
repoDir = cacheDir;
|
||||
|
||||
Path cacheDirLock = cacheDir + ".lock";
|
||||
createDirs(dirOf(cacheDir));
|
||||
AutoCloseFD lock = openLockFile(cacheDirLock, true);
|
||||
lockFile(lock.get(), ltWrite, true);
|
||||
PathLocks cacheDirLock({cacheDir + ".lock"});
|
||||
|
||||
if (!pathExists(cacheDir)) {
|
||||
runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", "--bare", repoDir });
|
||||
}
|
||||
|
||||
deleteLockFile(cacheDirLock, lock.get());
|
||||
|
||||
Path localRefFile =
|
||||
input.getRef()->compare(0, 5, "refs/") == 0
|
||||
? cacheDir + "/" + *input.getRef()
|
||||
|
@ -399,6 +391,8 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
if (!input.getRev())
|
||||
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev());
|
||||
|
||||
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
||||
}
|
||||
|
||||
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true";
|
||||
|
@ -412,7 +406,7 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
/* Now that we know the ref, check again whether we have it in
|
||||
the store. */
|
||||
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
|
||||
if (auto res = getCache()->lookup(store, getLockedAttrs()))
|
||||
return makeResult(res->first, std::move(res->second));
|
||||
|
||||
Path tmpDir = createTempDir();
|
||||
|
@ -484,14 +478,14 @@ struct GitInputScheme : InputScheme
|
|||
if (!_input.getRev())
|
||||
getCache()->add(
|
||||
store,
|
||||
mutableAttrs,
|
||||
unlockedAttrs,
|
||||
infoAttrs,
|
||||
storePath,
|
||||
false);
|
||||
|
||||
getCache()->add(
|
||||
store,
|
||||
getImmutableAttrs(),
|
||||
getLockedAttrs(),
|
||||
infoAttrs,
|
||||
storePath,
|
||||
true);
|
||||
|
|
|
@ -1,13 +1,16 @@
|
|||
#include "filetransfer.hh"
|
||||
#include "cache.hh"
|
||||
#include "fetchers.hh"
|
||||
#include "globals.hh"
|
||||
#include "store-api.hh"
|
||||
#include "types.hh"
|
||||
#include "url-parts.hh"
|
||||
|
||||
#include "fetchers.hh"
|
||||
#include "fetch-settings.hh"
|
||||
|
||||
#include <optional>
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <fstream>
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
|
@ -17,7 +20,7 @@ struct DownloadUrl
|
|||
Headers headers;
|
||||
};
|
||||
|
||||
// A github or gitlab host
|
||||
// A github, gitlab, or sourcehut host
|
||||
const static std::string hostRegexS = "[a-zA-Z0-9.]*"; // FIXME: check
|
||||
std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
|
||||
|
||||
|
@ -156,7 +159,7 @@ struct GitArchiveInputScheme : InputScheme
|
|||
|
||||
std::optional<std::string> getAccessToken(const std::string & host) const
|
||||
{
|
||||
auto tokens = settings.accessTokens.get();
|
||||
auto tokens = fetchSettings.accessTokens.get();
|
||||
if (auto token = get(tokens, host))
|
||||
return *token;
|
||||
return {};
|
||||
|
@ -180,7 +183,7 @@ struct GitArchiveInputScheme : InputScheme
|
|||
|
||||
virtual DownloadUrl getDownloadUrl(const Input & input) const = 0;
|
||||
|
||||
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
{
|
||||
Input input(_input);
|
||||
|
||||
|
@ -192,17 +195,14 @@ struct GitArchiveInputScheme : InputScheme
|
|||
input.attrs.erase("ref");
|
||||
input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
|
||||
Attrs immutableAttrs({
|
||||
Attrs lockedAttrs({
|
||||
{"type", "git-tarball"},
|
||||
{"rev", rev->gitRev()},
|
||||
});
|
||||
|
||||
if (auto res = getCache()->lookup(store, immutableAttrs)) {
|
||||
if (auto res = getCache()->lookup(store, lockedAttrs)) {
|
||||
input.attrs.insert_or_assign("lastModified", getIntAttr(res->first, "lastModified"));
|
||||
return {
|
||||
Tree(store->toRealPath(res->second), std::move(res->second)),
|
||||
input
|
||||
};
|
||||
return {std::move(res->second), input};
|
||||
}
|
||||
|
||||
auto url = getDownloadUrl(input);
|
||||
|
@ -213,7 +213,7 @@ struct GitArchiveInputScheme : InputScheme
|
|||
|
||||
getCache()->add(
|
||||
store,
|
||||
immutableAttrs,
|
||||
lockedAttrs,
|
||||
{
|
||||
{"rev", rev->gitRev()},
|
||||
{"lastModified", uint64_t(lastModified)}
|
||||
|
@ -221,7 +221,7 @@ struct GitArchiveInputScheme : InputScheme
|
|||
tree.storePath,
|
||||
true);
|
||||
|
||||
return {std::move(tree), input};
|
||||
return {std::move(tree.storePath), input};
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -300,7 +300,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
|||
if ("PAT" == token.substr(0, fldsplit))
|
||||
return std::make_pair("Private-token", token.substr(fldsplit+1));
|
||||
warn("Unrecognized GitLab token type %s", token.substr(0, fldsplit));
|
||||
return std::nullopt;
|
||||
return std::make_pair(token.substr(0,fldsplit), token.substr(fldsplit+1));
|
||||
}
|
||||
|
||||
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
|
||||
|
@ -348,7 +348,95 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
|||
}
|
||||
};
|
||||
|
||||
struct SourceHutInputScheme : GitArchiveInputScheme
|
||||
{
|
||||
std::string type() override { return "sourcehut"; }
|
||||
|
||||
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
|
||||
{
|
||||
// SourceHut supports both PAT and OAuth2. See
|
||||
// https://man.sr.ht/meta.sr.ht/oauth.md
|
||||
return std::pair<std::string, std::string>("Authorization", fmt("Bearer %s", token));
|
||||
// Note: This currently serves no purpose, as this kind of authorization
|
||||
// does not allow for downloading tarballs on sourcehut private repos.
|
||||
// Once it is implemented, however, should work as expected.
|
||||
}
|
||||
|
||||
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
|
||||
{
|
||||
// TODO: In the future, when the sourcehut graphql API is implemented for mercurial
|
||||
// and with anonymous access, this method should use it instead.
|
||||
|
||||
auto ref = *input.getRef();
|
||||
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
|
||||
auto base_url = fmt("https://%s/%s/%s",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"));
|
||||
|
||||
Headers headers = makeHeadersWithAuthTokens(host);
|
||||
|
||||
std::string ref_uri;
|
||||
if (ref == "HEAD") {
|
||||
auto file = store->toRealPath(
|
||||
downloadFile(store, fmt("%s/HEAD", base_url), "source", false, headers).storePath);
|
||||
std::ifstream is(file);
|
||||
std::string line;
|
||||
getline(is, line);
|
||||
|
||||
auto ref_index = line.find("ref: ");
|
||||
if (ref_index == std::string::npos) {
|
||||
throw BadURL("in '%d', couldn't resolve HEAD ref '%d'", input.to_string(), ref);
|
||||
}
|
||||
|
||||
ref_uri = line.substr(ref_index+5, line.length()-1);
|
||||
} else
|
||||
ref_uri = fmt("refs/heads/%s", ref);
|
||||
|
||||
auto file = store->toRealPath(
|
||||
downloadFile(store, fmt("%s/info/refs", base_url), "source", false, headers).storePath);
|
||||
std::ifstream is(file);
|
||||
|
||||
std::string line;
|
||||
std::string id;
|
||||
while(getline(is, line)) {
|
||||
auto index = line.find(ref_uri);
|
||||
if (index != std::string::npos) {
|
||||
id = line.substr(0, index-1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if(id.empty())
|
||||
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
|
||||
|
||||
auto rev = Hash::parseAny(id, htSHA1);
|
||||
debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev());
|
||||
return rev;
|
||||
}
|
||||
|
||||
DownloadUrl getDownloadUrl(const Input & input) const override
|
||||
{
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
|
||||
auto url = fmt("https://%s/%s/%s/archive/%s.tar.gz",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
||||
input.getRev()->to_string(Base16, false));
|
||||
|
||||
Headers headers = makeHeadersWithAuthTokens(host);
|
||||
return DownloadUrl { url, headers };
|
||||
}
|
||||
|
||||
void clone(const Input & input, const Path & destDir) override
|
||||
{
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
|
||||
Input::fromURL(fmt("git+https://%s/%s/%s",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
|
||||
.applyOverrides(input.getRef(), input.getRev())
|
||||
.clone(destDir);
|
||||
}
|
||||
};
|
||||
|
||||
static auto rGitHubInputScheme = OnStartup([] { registerInputScheme(std::make_unique<GitHubInputScheme>()); });
|
||||
static auto rGitLabInputScheme = OnStartup([] { registerInputScheme(std::make_unique<GitLabInputScheme>()); });
|
||||
static auto rSourceHutInputScheme = OnStartup([] { registerInputScheme(std::make_unique<SourceHutInputScheme>()); });
|
||||
|
||||
}
|
||||
|
|
|
@ -94,7 +94,7 @@ struct IndirectInputScheme : InputScheme
|
|||
return input;
|
||||
}
|
||||
|
||||
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override
|
||||
{
|
||||
throw Error("indirect input '%s' cannot be fetched directly", input.to_string());
|
||||
}
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
#include "store-api.hh"
|
||||
#include "url-parts.hh"
|
||||
|
||||
#include "fetch-settings.hh"
|
||||
|
||||
#include <sys/time.h>
|
||||
|
||||
using namespace std::string_literals;
|
||||
|
@ -26,7 +28,7 @@ static RunOptions hgOptions(const Strings & args)
|
|||
}
|
||||
|
||||
// runProgram wrapper that uses hgOptions instead of stock RunOptions.
|
||||
static string runHg(const Strings & args, const std::optional<std::string> & input = {})
|
||||
static std::string runHg(const Strings & args, const std::optional<std::string> & input = {})
|
||||
{
|
||||
RunOptions opts = hgOptions(args);
|
||||
opts.input = input;
|
||||
|
@ -143,7 +145,7 @@ struct MercurialInputScheme : InputScheme
|
|||
return {isLocal, isLocal ? url.path : url.base};
|
||||
}
|
||||
|
||||
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
{
|
||||
Input input(_input);
|
||||
|
||||
|
@ -165,10 +167,10 @@ struct MercurialInputScheme : InputScheme
|
|||
/* This is an unclean working tree. So copy all tracked
|
||||
files. */
|
||||
|
||||
if (!settings.allowDirty)
|
||||
if (!fetchSettings.allowDirty)
|
||||
throw Error("Mercurial tree '%s' is unclean", actualUrl);
|
||||
|
||||
if (settings.warnDirty)
|
||||
if (fetchSettings.warnDirty)
|
||||
warn("Mercurial tree '%s' is unclean", actualUrl);
|
||||
|
||||
input.attrs.insert_or_assign("ref", chomp(runHg({ "branch", "-R", actualUrl })));
|
||||
|
@ -193,16 +195,13 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
auto storePath = store->addToStore(input.getName(), actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||
|
||||
return {
|
||||
Tree(store->toRealPath(storePath), std::move(storePath)),
|
||||
input
|
||||
};
|
||||
return {std::move(storePath), input};
|
||||
}
|
||||
}
|
||||
|
||||
if (!input.getRef()) input.attrs.insert_or_assign("ref", "default");
|
||||
|
||||
auto getImmutableAttrs = [&]()
|
||||
auto getLockedAttrs = [&]()
|
||||
{
|
||||
return Attrs({
|
||||
{"type", "hg"},
|
||||
|
@ -212,32 +211,29 @@ struct MercurialInputScheme : InputScheme
|
|||
};
|
||||
|
||||
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
|
||||
-> std::pair<Tree, Input>
|
||||
-> std::pair<StorePath, Input>
|
||||
{
|
||||
assert(input.getRev());
|
||||
assert(!_input.getRev() || _input.getRev() == input.getRev());
|
||||
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
|
||||
return {
|
||||
Tree(store->toRealPath(storePath), std::move(storePath)),
|
||||
input
|
||||
};
|
||||
return {std::move(storePath), input};
|
||||
};
|
||||
|
||||
if (input.getRev()) {
|
||||
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
|
||||
if (auto res = getCache()->lookup(store, getLockedAttrs()))
|
||||
return makeResult(res->first, std::move(res->second));
|
||||
}
|
||||
|
||||
auto revOrRef = input.getRev() ? input.getRev()->gitRev() : *input.getRef();
|
||||
|
||||
Attrs mutableAttrs({
|
||||
Attrs unlockedAttrs({
|
||||
{"type", "hg"},
|
||||
{"name", name},
|
||||
{"url", actualUrl},
|
||||
{"ref", *input.getRef()},
|
||||
});
|
||||
|
||||
if (auto res = getCache()->lookup(store, mutableAttrs)) {
|
||||
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
|
||||
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
|
||||
if (!input.getRev() || input.getRev() == rev2) {
|
||||
input.attrs.insert_or_assign("rev", rev2.gitRev());
|
||||
|
@ -260,7 +256,7 @@ struct MercurialInputScheme : InputScheme
|
|||
runHg({ "pull", "-R", cacheDir, "--", actualUrl });
|
||||
}
|
||||
catch (ExecError & e) {
|
||||
string transJournal = cacheDir + "/.hg/store/journal";
|
||||
auto transJournal = cacheDir + "/.hg/store/journal";
|
||||
/* hg throws "abandoned transaction" error only if this file exists */
|
||||
if (pathExists(transJournal)) {
|
||||
runHg({ "recover", "-R", cacheDir });
|
||||
|
@ -283,7 +279,7 @@ struct MercurialInputScheme : InputScheme
|
|||
auto revCount = std::stoull(tokens[1]);
|
||||
input.attrs.insert_or_assign("ref", tokens[2]);
|
||||
|
||||
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
|
||||
if (auto res = getCache()->lookup(store, getLockedAttrs()))
|
||||
return makeResult(res->first, std::move(res->second));
|
||||
|
||||
Path tmpDir = createTempDir();
|
||||
|
@ -303,14 +299,14 @@ struct MercurialInputScheme : InputScheme
|
|||
if (!_input.getRev())
|
||||
getCache()->add(
|
||||
store,
|
||||
mutableAttrs,
|
||||
unlockedAttrs,
|
||||
infoAttrs,
|
||||
storePath,
|
||||
false);
|
||||
|
||||
getCache()->add(
|
||||
store,
|
||||
getImmutableAttrs(),
|
||||
getLockedAttrs(),
|
||||
infoAttrs,
|
||||
storePath,
|
||||
true);
|
||||
|
|
|
@ -80,7 +80,7 @@ struct PathInputScheme : InputScheme
|
|||
// nothing to do
|
||||
}
|
||||
|
||||
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override
|
||||
{
|
||||
std::string absPath;
|
||||
auto path = getStrAttr(input.attrs, "path");
|
||||
|
@ -97,7 +97,7 @@ struct PathInputScheme : InputScheme
|
|||
// for security, ensure that if the parent is a store path, it's inside it
|
||||
if (store->isInStore(parent)) {
|
||||
auto storePath = store->printStorePath(store->toStorePath(parent).first);
|
||||
if (!isInDir(absPath, storePath))
|
||||
if (!isDirOrInDir(absPath, storePath))
|
||||
throw BadStorePath("relative path '%s' points outside of its parent's store path '%s'", path, storePath);
|
||||
}
|
||||
} else
|
||||
|
@ -115,10 +115,7 @@ struct PathInputScheme : InputScheme
|
|||
// FIXME: try to substitute storePath.
|
||||
storePath = store->addToStore("source", absPath);
|
||||
|
||||
return {
|
||||
Tree(store->toRealPath(*storePath), std::move(*storePath)),
|
||||
input
|
||||
};
|
||||
return {std::move(*storePath), input};
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
#include "store-api.hh"
|
||||
#include "local-fs-store.hh"
|
||||
|
||||
#include "fetch-settings.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
@ -150,7 +152,7 @@ void overrideRegistry(
|
|||
static std::shared_ptr<Registry> getGlobalRegistry(ref<Store> store)
|
||||
{
|
||||
static auto reg = [&]() {
|
||||
auto path = settings.flakeRegistry.get();
|
||||
auto path = fetchSettings.flakeRegistry.get();
|
||||
|
||||
if (!hasPrefix(path, "/")) {
|
||||
auto storePath = downloadFile(store, path, "flake-registry.json", false).storePath;
|
||||
|
|
|
@ -13,7 +13,7 @@ DownloadFileResult downloadFile(
|
|||
ref<Store> store,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
bool immutable,
|
||||
bool locked,
|
||||
const Headers & headers)
|
||||
{
|
||||
// FIXME: check store
|
||||
|
@ -67,18 +67,18 @@ DownloadFileResult downloadFile(
|
|||
storePath = std::move(cached->storePath);
|
||||
} else {
|
||||
StringSink sink;
|
||||
dumpString(*res.data, sink);
|
||||
auto hash = hashString(htSHA256, *res.data);
|
||||
dumpString(res.data, sink);
|
||||
auto hash = hashString(htSHA256, res.data);
|
||||
ValidPathInfo info {
|
||||
store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name),
|
||||
hashString(htSHA256, *sink.s),
|
||||
hashString(htSHA256, sink.s),
|
||||
};
|
||||
info.narSize = sink.s->size();
|
||||
info.narSize = sink.s.size();
|
||||
info.ca = FixedOutputHash {
|
||||
.method = FileIngestionMethod::Flat,
|
||||
.hash = hash,
|
||||
};
|
||||
auto source = StringSource { *sink.s };
|
||||
auto source = StringSource(sink.s);
|
||||
store->addToStore(info, source, NoRepair, NoCheckSigs);
|
||||
storePath = std::move(info.path);
|
||||
}
|
||||
|
@ -88,7 +88,7 @@ DownloadFileResult downloadFile(
|
|||
inAttrs,
|
||||
infoAttrs,
|
||||
*storePath,
|
||||
immutable);
|
||||
locked);
|
||||
|
||||
if (url != res.effectiveUri)
|
||||
getCache()->add(
|
||||
|
@ -100,7 +100,7 @@ DownloadFileResult downloadFile(
|
|||
},
|
||||
infoAttrs,
|
||||
*storePath,
|
||||
immutable);
|
||||
locked);
|
||||
|
||||
return {
|
||||
.storePath = std::move(*storePath),
|
||||
|
@ -113,7 +113,7 @@ std::pair<Tree, time_t> downloadTarball(
|
|||
ref<Store> store,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
bool immutable,
|
||||
bool locked,
|
||||
const Headers & headers)
|
||||
{
|
||||
Attrs inAttrs({
|
||||
|
@ -126,11 +126,11 @@ std::pair<Tree, time_t> downloadTarball(
|
|||
|
||||
if (cached && !cached->expired)
|
||||
return {
|
||||
Tree(store->toRealPath(cached->storePath), std::move(cached->storePath)),
|
||||
Tree { .actualPath = store->toRealPath(cached->storePath), .storePath = std::move(cached->storePath) },
|
||||
getIntAttr(cached->infoAttrs, "lastModified")
|
||||
};
|
||||
|
||||
auto res = downloadFile(store, url, name, immutable, headers);
|
||||
auto res = downloadFile(store, url, name, locked, headers);
|
||||
|
||||
std::optional<StorePath> unpackedStorePath;
|
||||
time_t lastModified;
|
||||
|
@ -160,10 +160,10 @@ std::pair<Tree, time_t> downloadTarball(
|
|||
inAttrs,
|
||||
infoAttrs,
|
||||
*unpackedStorePath,
|
||||
immutable);
|
||||
locked);
|
||||
|
||||
return {
|
||||
Tree(store->toRealPath(*unpackedStorePath), std::move(*unpackedStorePath)),
|
||||
Tree { .actualPath = store->toRealPath(*unpackedStorePath), .storePath = std::move(*unpackedStorePath) },
|
||||
lastModified,
|
||||
};
|
||||
}
|
||||
|
@ -176,6 +176,7 @@ struct TarballInputScheme : InputScheme
|
|||
|
||||
if (!hasSuffix(url.path, ".zip")
|
||||
&& !hasSuffix(url.path, ".tar")
|
||||
&& !hasSuffix(url.path, ".tgz")
|
||||
&& !hasSuffix(url.path, ".tar.gz")
|
||||
&& !hasSuffix(url.path, ".tar.xz")
|
||||
&& !hasSuffix(url.path, ".tar.bz2")
|
||||
|
@ -201,7 +202,7 @@ struct TarballInputScheme : InputScheme
|
|||
|
||||
Input input;
|
||||
input.attrs = attrs;
|
||||
//input.immutable = (bool) maybeGetStrAttr(input.attrs, "hash");
|
||||
//input.locked = (bool) maybeGetStrAttr(input.attrs, "hash");
|
||||
return input;
|
||||
}
|
||||
|
||||
|
@ -224,10 +225,10 @@ struct TarballInputScheme : InputScheme
|
|||
return true;
|
||||
}
|
||||
|
||||
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override
|
||||
{
|
||||
auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), input.getName(), false).first;
|
||||
return {std::move(tree), input};
|
||||
return {std::move(tree.storePath), input};
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
MixCommonArgs::MixCommonArgs(const string & programName)
|
||||
MixCommonArgs::MixCommonArgs(const std::string & programName)
|
||||
: programName(programName)
|
||||
{
|
||||
addFlag({
|
||||
|
|
|
@ -11,8 +11,8 @@ class MixCommonArgs : public virtual Args
|
|||
{
|
||||
void initialFlagsProcessed() override;
|
||||
public:
|
||||
string programName;
|
||||
MixCommonArgs(const string & programName);
|
||||
std::string programName;
|
||||
MixCommonArgs(const std::string & programName);
|
||||
protected:
|
||||
virtual void pluginsInited() {}
|
||||
};
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
static std::string getS(const std::vector<Logger::Field> & fields, size_t n)
|
||||
static std::string_view getS(const std::vector<Logger::Field> & fields, size_t n)
|
||||
{
|
||||
assert(n < fields.size());
|
||||
assert(fields[n].type == Logger::Field::tString);
|
||||
|
@ -103,17 +103,19 @@ public:
|
|||
~ProgressBar()
|
||||
{
|
||||
stop();
|
||||
updateThread.join();
|
||||
}
|
||||
|
||||
void stop() override
|
||||
{
|
||||
auto state(state_.lock());
|
||||
if (!state->active) return;
|
||||
state->active = false;
|
||||
writeToStderr("\r\e[K");
|
||||
updateCV.notify_one();
|
||||
quitCV.notify_one();
|
||||
{
|
||||
auto state(state_.lock());
|
||||
if (!state->active) return;
|
||||
state->active = false;
|
||||
writeToStderr("\r\e[K");
|
||||
updateCV.notify_one();
|
||||
quitCV.notify_one();
|
||||
}
|
||||
updateThread.join();
|
||||
}
|
||||
|
||||
bool isVerbose() override {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#include "globals.hh"
|
||||
#include "shared.hh"
|
||||
#include "store-api.hh"
|
||||
#include "gc-store.hh"
|
||||
#include "util.hh"
|
||||
#include "loggers.hh"
|
||||
|
||||
|
@ -15,9 +16,14 @@
|
|||
#include <sys/stat.h>
|
||||
#include <unistd.h>
|
||||
#include <signal.h>
|
||||
#include <sys/types.h>
|
||||
#include <sys/socket.h>
|
||||
#include <netdb.h>
|
||||
#ifdef __linux__
|
||||
#include <features.h>
|
||||
#endif
|
||||
#ifdef __GLIBC__
|
||||
#include <gnu/lib-names.h>
|
||||
#include <nss.h>
|
||||
#include <dlfcn.h>
|
||||
#endif
|
||||
|
||||
#include <openssl/crypto.h>
|
||||
|
||||
|
@ -89,7 +95,7 @@ void printMissing(ref<Store> store, const StorePathSet & willBuild,
|
|||
}
|
||||
|
||||
|
||||
string getArg(const string & opt,
|
||||
std::string getArg(const std::string & opt,
|
||||
Strings::iterator & i, const Strings::iterator & end)
|
||||
{
|
||||
++i;
|
||||
|
@ -121,21 +127,30 @@ static void preloadNSS() {
|
|||
been loaded in the parent. So we force a lookup of an invalid domain to force the NSS machinery to
|
||||
load its lookup libraries in the parent before any child gets a chance to. */
|
||||
std::call_once(dns_resolve_flag, []() {
|
||||
struct addrinfo *res = NULL;
|
||||
|
||||
/* nss will only force the "local" (not through nscd) dns resolution if its on the LOCALDOMAIN.
|
||||
We need the resolution to be done locally, as nscd socket will not be accessible in the
|
||||
sandbox. */
|
||||
char * previous_env = getenv("LOCALDOMAIN");
|
||||
setenv("LOCALDOMAIN", "invalid", 1);
|
||||
if (getaddrinfo("this.pre-initializes.the.dns.resolvers.invalid.", "http", NULL, &res) == 0) {
|
||||
if (res) freeaddrinfo(res);
|
||||
}
|
||||
if (previous_env) {
|
||||
setenv("LOCALDOMAIN", previous_env, 1);
|
||||
} else {
|
||||
unsetenv("LOCALDOMAIN");
|
||||
}
|
||||
#ifdef __GLIBC__
|
||||
/* On linux, glibc will run every lookup through the nss layer.
|
||||
* That means every lookup goes, by default, through nscd, which acts as a local
|
||||
* cache.
|
||||
* Because we run builds in a sandbox, we also remove access to nscd otherwise
|
||||
* lookups would leak into the sandbox.
|
||||
*
|
||||
* But now we have a new problem, we need to make sure the nss_dns backend that
|
||||
* does the dns lookups when nscd is not available is loaded or available.
|
||||
*
|
||||
* We can't make it available without leaking nix's environment, so instead we'll
|
||||
* load the backend, and configure nss so it does not try to run dns lookups
|
||||
* through nscd.
|
||||
*
|
||||
* This is technically only used for builtins:fetch* functions so we only care
|
||||
* about dns.
|
||||
*
|
||||
* All other platforms are unaffected.
|
||||
*/
|
||||
if (!dlopen(LIBNSS_DNS_SO, RTLD_NOW))
|
||||
warn("unable to load nss_dns backend");
|
||||
// FIXME: get hosts entry from nsswitch.conf.
|
||||
__nss_configure_lookup("hosts", "files dns");
|
||||
#endif
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -308,14 +323,14 @@ void parseCmdLine(int argc, char * * argv,
|
|||
}
|
||||
|
||||
|
||||
void parseCmdLine(const string & programName, const Strings & args,
|
||||
void parseCmdLine(const std::string & programName, const Strings & args,
|
||||
std::function<bool(Strings::iterator & arg, const Strings::iterator & end)> parseArg)
|
||||
{
|
||||
LegacyArgs(programName, parseArg).parseCmdline(args);
|
||||
}
|
||||
|
||||
|
||||
void printVersion(const string & programName)
|
||||
void printVersion(const std::string & programName)
|
||||
{
|
||||
std::cout << format("%1% (Nix) %2%") % programName % nixVersion << std::endl;
|
||||
if (verbosity > lvlInfo) {
|
||||
|
@ -338,7 +353,7 @@ void printVersion(const string & programName)
|
|||
}
|
||||
|
||||
|
||||
void showManPage(const string & name)
|
||||
void showManPage(const std::string & name)
|
||||
{
|
||||
restoreProcessContext();
|
||||
setenv("MANPATH", settings.nixManDir.c_str(), 1);
|
||||
|
@ -347,13 +362,13 @@ void showManPage(const string & name)
|
|||
}
|
||||
|
||||
|
||||
int handleExceptions(const string & programName, std::function<void()> fun)
|
||||
int handleExceptions(const std::string & programName, std::function<void()> fun)
|
||||
{
|
||||
ReceiveInterrupts receiveInterrupts; // FIXME: need better place for this
|
||||
|
||||
ErrorInfo::programName = baseNameOf(programName);
|
||||
|
||||
string error = ANSI_RED "error:" ANSI_NORMAL " ";
|
||||
std::string error = ANSI_RED "error:" ANSI_NORMAL " ";
|
||||
try {
|
||||
try {
|
||||
fun();
|
||||
|
@ -393,7 +408,7 @@ RunPager::RunPager()
|
|||
if (!isatty(STDOUT_FILENO)) return;
|
||||
char * pager = getenv("NIX_PAGER");
|
||||
if (!pager) pager = getenv("PAGER");
|
||||
if (pager && ((string) pager == "" || (string) pager == "cat")) return;
|
||||
if (pager && ((std::string) pager == "" || (std::string) pager == "cat")) return;
|
||||
|
||||
Pipe toPager;
|
||||
toPager.create();
|
||||
|
@ -413,7 +428,7 @@ RunPager::RunPager()
|
|||
});
|
||||
|
||||
pid.setKillSignal(SIGINT);
|
||||
|
||||
stdout = fcntl(STDOUT_FILENO, F_DUPFD_CLOEXEC, 0);
|
||||
if (dup2(toPager.writeSide.get(), STDOUT_FILENO) == -1)
|
||||
throw SysError("dupping stdout");
|
||||
}
|
||||
|
@ -424,7 +439,7 @@ RunPager::~RunPager()
|
|||
try {
|
||||
if (pid != -1) {
|
||||
std::cout.flush();
|
||||
close(STDOUT_FILENO);
|
||||
dup2(stdout, STDOUT_FILENO);
|
||||
pid.wait();
|
||||
}
|
||||
} catch (...) {
|
||||
|
|
|
@ -22,7 +22,7 @@ public:
|
|||
virtual ~Exit();
|
||||
};
|
||||
|
||||
int handleExceptions(const string & programName, std::function<void()> fun);
|
||||
int handleExceptions(const std::string & programName, std::function<void()> fun);
|
||||
|
||||
/* Don't forget to call initPlugins() after settings are initialized! */
|
||||
void initNix();
|
||||
|
@ -30,10 +30,10 @@ void initNix();
|
|||
void parseCmdLine(int argc, char * * argv,
|
||||
std::function<bool(Strings::iterator & arg, const Strings::iterator & end)> parseArg);
|
||||
|
||||
void parseCmdLine(const string & programName, const Strings & args,
|
||||
void parseCmdLine(const std::string & programName, const Strings & args,
|
||||
std::function<bool(Strings::iterator & arg, const Strings::iterator & end)> parseArg);
|
||||
|
||||
void printVersion(const string & programName);
|
||||
void printVersion(const std::string & programName);
|
||||
|
||||
/* Ugh. No better place to put this. */
|
||||
void printGCWarning();
|
||||
|
@ -50,10 +50,10 @@ void printMissing(ref<Store> store, const StorePathSet & willBuild,
|
|||
const StorePathSet & willSubstitute, const StorePathSet & unknown,
|
||||
uint64_t downloadSize, uint64_t narSize, Verbosity lvl = lvlInfo);
|
||||
|
||||
string getArg(const string & opt,
|
||||
std::string getArg(const std::string & opt,
|
||||
Strings::iterator & i, const Strings::iterator & end);
|
||||
|
||||
template<class N> N getIntArg(const string & opt,
|
||||
template<class N> N getIntArg(const std::string & opt,
|
||||
Strings::iterator & i, const Strings::iterator & end, bool allowUnit)
|
||||
{
|
||||
++i;
|
||||
|
@ -76,7 +76,7 @@ struct LegacyArgs : public MixCommonArgs
|
|||
|
||||
|
||||
/* Show the manual page for the specified program. */
|
||||
void showManPage(const string & name);
|
||||
void showManPage(const std::string & name);
|
||||
|
||||
/* The constructor of this class starts a pager if stdout is a
|
||||
terminal and $PAGER is set. Stdout is redirected to the pager. */
|
||||
|
@ -88,6 +88,7 @@ public:
|
|||
|
||||
private:
|
||||
Pid pid;
|
||||
int stdout;
|
||||
};
|
||||
|
||||
extern volatile ::sig_atomic_t blockInt;
|
||||
|
@ -95,7 +96,7 @@ extern volatile ::sig_atomic_t blockInt;
|
|||
|
||||
/* GC helpers. */
|
||||
|
||||
string showBytes(uint64_t bytes);
|
||||
std::string showBytes(uint64_t bytes);
|
||||
|
||||
struct GCResults;
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ BinaryCacheStore::BinaryCacheStore(const Params & params)
|
|||
|
||||
StringSink sink;
|
||||
sink << narVersionMagic1;
|
||||
narMagic = *sink.s;
|
||||
narMagic = sink.s;
|
||||
}
|
||||
|
||||
void BinaryCacheStore::init()
|
||||
|
@ -68,7 +68,7 @@ void BinaryCacheStore::upsertFile(const std::string & path,
|
|||
}
|
||||
|
||||
void BinaryCacheStore::getFile(const std::string & path,
|
||||
Callback<std::shared_ptr<std::string>> callback) noexcept
|
||||
Callback<std::optional<std::string>> callback) noexcept
|
||||
{
|
||||
try {
|
||||
callback(getFile(path));
|
||||
|
@ -77,9 +77,9 @@ void BinaryCacheStore::getFile(const std::string & path,
|
|||
|
||||
void BinaryCacheStore::getFile(const std::string & path, Sink & sink)
|
||||
{
|
||||
std::promise<std::shared_ptr<std::string>> promise;
|
||||
std::promise<std::optional<std::string>> promise;
|
||||
getFile(path,
|
||||
{[&](std::future<std::shared_ptr<std::string>> result) {
|
||||
{[&](std::future<std::optional<std::string>> result) {
|
||||
try {
|
||||
promise.set_value(result.get());
|
||||
} catch (...) {
|
||||
|
@ -89,15 +89,15 @@ void BinaryCacheStore::getFile(const std::string & path, Sink & sink)
|
|||
sink(*promise.get_future().get());
|
||||
}
|
||||
|
||||
std::shared_ptr<std::string> BinaryCacheStore::getFile(const std::string & path)
|
||||
std::optional<std::string> BinaryCacheStore::getFile(const std::string & path)
|
||||
{
|
||||
StringSink sink;
|
||||
try {
|
||||
getFile(path, sink);
|
||||
} catch (NoSuchBinaryCacheFile &) {
|
||||
return nullptr;
|
||||
return std::nullopt;
|
||||
}
|
||||
return sink.s;
|
||||
return std::move(sink.s);
|
||||
}
|
||||
|
||||
std::string BinaryCacheStore::narInfoFileFor(const StorePath & storePath)
|
||||
|
@ -111,15 +111,15 @@ void BinaryCacheStore::writeNarInfo(ref<NarInfo> narInfo)
|
|||
|
||||
upsertFile(narInfoFile, narInfo->to_string(*this), "text/x-nix-narinfo");
|
||||
|
||||
std::string hashPart(narInfo->path.hashPart());
|
||||
|
||||
{
|
||||
auto state_(state.lock());
|
||||
state_->pathInfoCache.upsert(hashPart, PathInfoCacheValue { .value = std::shared_ptr<NarInfo>(narInfo) });
|
||||
state_->pathInfoCache.upsert(
|
||||
std::string(narInfo->path.to_string()),
|
||||
PathInfoCacheValue { .value = std::shared_ptr<NarInfo>(narInfo) });
|
||||
}
|
||||
|
||||
if (diskCache)
|
||||
diskCache->upsertNarInfo(getUri(), hashPart, std::shared_ptr<NarInfo>(narInfo));
|
||||
diskCache->upsertNarInfo(getUri(), std::string(narInfo->path.hashPart()), std::shared_ptr<NarInfo>(narInfo));
|
||||
}
|
||||
|
||||
AutoCloseFD openFile(const Path & path)
|
||||
|
@ -149,7 +149,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
|||
{
|
||||
FdSink fileSink(fdTemp.get());
|
||||
TeeSink teeSinkCompressed { fileSink, fileHashSink };
|
||||
auto compressionSink = makeCompressionSink(compression, teeSinkCompressed);
|
||||
auto compressionSink = makeCompressionSink(compression, teeSinkCompressed, parallelCompression, compressionLevel);
|
||||
TeeSink teeSinkUncompressed { *compressionSink, narHashSink };
|
||||
TeeSource teeSource { narSource, teeSinkUncompressed };
|
||||
narAccessor = makeNarAccessor(teeSource);
|
||||
|
@ -307,17 +307,18 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
|
|||
}});
|
||||
}
|
||||
|
||||
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, const string & name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
|
||||
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||
{
|
||||
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256)
|
||||
unsupported("addToStoreFromDump");
|
||||
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info {
|
||||
makeFixedOutputPath(method, nar.first, name),
|
||||
makeFixedOutputPath(method, nar.first, name, references),
|
||||
nar.first,
|
||||
};
|
||||
info.narSize = nar.second;
|
||||
info.references = references;
|
||||
return info;
|
||||
})->path;
|
||||
}
|
||||
|
@ -366,11 +367,11 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
|
|||
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
|
||||
|
||||
getFile(narInfoFile,
|
||||
{[=](std::future<std::shared_ptr<std::string>> fut) {
|
||||
{[=](std::future<std::optional<std::string>> fut) {
|
||||
try {
|
||||
auto data = fut.get();
|
||||
|
||||
if (!data) return (*callbackPtr)(nullptr);
|
||||
if (!data) return (*callbackPtr)({});
|
||||
|
||||
stats.narInfoRead++;
|
||||
|
||||
|
@ -384,8 +385,14 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
|
|||
}});
|
||||
}
|
||||
|
||||
StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath,
|
||||
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
|
||||
StorePath BinaryCacheStore::addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references)
|
||||
{
|
||||
/* FIXME: Make BinaryCacheStore::addToStoreCommon support
|
||||
non-recursive+sha256 so we can just use the default
|
||||
|
@ -404,10 +411,11 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
|||
});
|
||||
return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info {
|
||||
makeFixedOutputPath(method, h, name),
|
||||
makeFixedOutputPath(method, h, name, references),
|
||||
nar.first,
|
||||
};
|
||||
info.narSize = nar.second;
|
||||
info.references = references;
|
||||
info.ca = FixedOutputHash {
|
||||
.method = method,
|
||||
.hash = h,
|
||||
|
@ -416,8 +424,11 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
|||
})->path;
|
||||
}
|
||||
|
||||
StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s,
|
||||
const StorePathSet & references, RepairFlag repair)
|
||||
StorePath BinaryCacheStore::addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair)
|
||||
{
|
||||
auto textHash = hashString(htSHA256, s);
|
||||
auto path = makeTextPath(name, textHash, references);
|
||||
|
@ -427,7 +438,7 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s
|
|||
|
||||
StringSink sink;
|
||||
dumpString(s, sink);
|
||||
auto source = StringSource { *sink.s };
|
||||
StringSource source(sink.s);
|
||||
return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info { path, nar.first };
|
||||
info.narSize = nar.second;
|
||||
|
@ -437,40 +448,29 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s
|
|||
})->path;
|
||||
}
|
||||
|
||||
std::optional<const Realisation> BinaryCacheStore::queryRealisation(const DrvOutput & id)
|
||||
void BinaryCacheStore::queryRealisationUncached(const DrvOutput & id,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept
|
||||
{
|
||||
if (diskCache) {
|
||||
auto [cacheOutcome, maybeCachedRealisation] =
|
||||
diskCache->lookupRealisation(getUri(), id);
|
||||
switch (cacheOutcome) {
|
||||
case NarInfoDiskCache::oValid:
|
||||
debug("Returning a cached realisation for %s", id.to_string());
|
||||
return *maybeCachedRealisation;
|
||||
case NarInfoDiskCache::oInvalid:
|
||||
debug("Returning a cached missing realisation for %s", id.to_string());
|
||||
return {};
|
||||
case NarInfoDiskCache::oUnknown:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
auto outputInfoFilePath = realisationsPrefix + "/" + id.to_string() + ".doi";
|
||||
auto rawOutputInfo = getFile(outputInfoFilePath);
|
||||
|
||||
if (rawOutputInfo) {
|
||||
auto realisation = Realisation::fromJSON(
|
||||
nlohmann::json::parse(*rawOutputInfo), outputInfoFilePath);
|
||||
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
|
||||
|
||||
if (diskCache)
|
||||
diskCache->upsertRealisation(
|
||||
getUri(), realisation);
|
||||
Callback<std::optional<std::string>> newCallback = {
|
||||
[=](std::future<std::optional<std::string>> fut) {
|
||||
try {
|
||||
auto data = fut.get();
|
||||
if (!data) return (*callbackPtr)({});
|
||||
|
||||
return {realisation};
|
||||
} else {
|
||||
if (diskCache)
|
||||
diskCache->upsertAbsentRealisation(getUri(), id);
|
||||
return std::nullopt;
|
||||
}
|
||||
auto realisation = Realisation::fromJSON(
|
||||
nlohmann::json::parse(*data), outputInfoFilePath);
|
||||
return (*callbackPtr)(std::make_shared<const Realisation>(realisation));
|
||||
} catch (...) {
|
||||
callbackPtr->rethrow();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
getFile(outputInfoFilePath, std::move(newCallback));
|
||||
}
|
||||
|
||||
void BinaryCacheStore::registerDrvOutput(const Realisation& info) {
|
||||
|
@ -499,7 +499,7 @@ void BinaryCacheStore::addSignatures(const StorePath & storePath, const StringSe
|
|||
writeNarInfo(narInfo);
|
||||
}
|
||||
|
||||
std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const StorePath & path)
|
||||
std::optional<std::string> BinaryCacheStore::getBuildLog(const StorePath & path)
|
||||
{
|
||||
auto drvPath = path;
|
||||
|
||||
|
@ -507,10 +507,10 @@ std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const StorePath & pat
|
|||
try {
|
||||
auto info = queryPathInfo(path);
|
||||
// FIXME: add a "Log" field to .narinfo
|
||||
if (!info->deriver) return nullptr;
|
||||
if (!info->deriver) return std::nullopt;
|
||||
drvPath = *info->deriver;
|
||||
} catch (InvalidPath &) {
|
||||
return nullptr;
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -521,4 +521,14 @@ std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const StorePath & pat
|
|||
return getFile(logPath);
|
||||
}
|
||||
|
||||
void BinaryCacheStore::addBuildLog(const StorePath & drvPath, std::string_view log)
|
||||
{
|
||||
assert(drvPath.isDerivation());
|
||||
|
||||
upsertFile(
|
||||
"log/" + std::string(drvPath.to_string()),
|
||||
(std::string) log, // FIXME: don't copy
|
||||
"text/plain; charset=utf-8");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -15,13 +15,17 @@ struct BinaryCacheStoreConfig : virtual StoreConfig
|
|||
{
|
||||
using StoreConfig::StoreConfig;
|
||||
|
||||
const Setting<std::string> compression{(StoreConfig*) this, "xz", "compression", "NAR compression method ('xz', 'bzip2', or 'none')"};
|
||||
const Setting<std::string> compression{(StoreConfig*) this, "xz", "compression", "NAR compression method ('xz', 'bzip2', 'gzip', 'zstd', or 'none')"};
|
||||
const Setting<bool> writeNARListing{(StoreConfig*) this, false, "write-nar-listing", "whether to write a JSON file listing the files in each NAR"};
|
||||
const Setting<bool> writeDebugInfo{(StoreConfig*) this, false, "index-debug-info", "whether to index DWARF debug info files by build ID"};
|
||||
const Setting<Path> secretKeyFile{(StoreConfig*) this, "", "secret-key", "path to secret key used to sign the binary cache"};
|
||||
const Setting<Path> localNarCache{(StoreConfig*) this, "", "local-nar-cache", "path to a local cache of NARs"};
|
||||
const Setting<bool> parallelCompression{(StoreConfig*) this, false, "parallel-compression",
|
||||
"enable multi-threading compression, available for xz only currently"};
|
||||
"enable multi-threading compression for NARs, available for xz and zstd only currently"};
|
||||
const Setting<int> compressionLevel{(StoreConfig*) this, -1, "compression-level",
|
||||
"specify 'preset level' of compression to be used with NARs: "
|
||||
"meaning and accepted range of values depends on compression method selected, "
|
||||
"other than -1 which we reserve to indicate Nix defaults should be used"};
|
||||
};
|
||||
|
||||
class BinaryCacheStore : public virtual BinaryCacheStoreConfig, public virtual Store
|
||||
|
@ -47,6 +51,7 @@ public:
|
|||
const std::string & mimeType) = 0;
|
||||
|
||||
void upsertFile(const std::string & path,
|
||||
// FIXME: use std::string_view
|
||||
std::string && data,
|
||||
const std::string & mimeType);
|
||||
|
||||
|
@ -58,10 +63,11 @@ public:
|
|||
|
||||
/* Fetch the specified file and call the specified callback with
|
||||
the result. A subclass may implement this asynchronously. */
|
||||
virtual void getFile(const std::string & path,
|
||||
Callback<std::shared_ptr<std::string>> callback) noexcept;
|
||||
virtual void getFile(
|
||||
const std::string & path,
|
||||
Callback<std::optional<std::string>> callback) noexcept;
|
||||
|
||||
std::shared_ptr<std::string> getFile(const std::string & path);
|
||||
std::optional<std::string> getFile(const std::string & path);
|
||||
|
||||
public:
|
||||
|
||||
|
@ -92,19 +98,28 @@ public:
|
|||
void addToStore(const ValidPathInfo & info, Source & narSource,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||
|
||||
StorePath addToStoreFromDump(Source & dump, const string & name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) override;
|
||||
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override;
|
||||
|
||||
StorePath addToStore(const string & name, const Path & srcPath,
|
||||
FileIngestionMethod method, HashType hashAlgo,
|
||||
PathFilter & filter, RepairFlag repair) override;
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override;
|
||||
|
||||
StorePath addTextToStore(const string & name, const string & s,
|
||||
const StorePathSet & references, RepairFlag repair) override;
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override;
|
||||
|
||||
void registerDrvOutput(const Realisation & info) override;
|
||||
|
||||
std::optional<const Realisation> queryRealisation(const DrvOutput &) override;
|
||||
void queryRealisationUncached(const DrvOutput &,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override;
|
||||
|
||||
void narFromPath(const StorePath & path, Sink & sink) override;
|
||||
|
||||
|
@ -112,7 +127,9 @@ public:
|
|||
|
||||
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
||||
|
||||
std::shared_ptr<std::string> getBuildLog(const StorePath & path) override;
|
||||
std::optional<std::string> getBuildLog(const StorePath & path) override;
|
||||
|
||||
void addBuildLog(const StorePath & drvPath, std::string_view log) override;
|
||||
|
||||
};
|
||||
|
||||
|
|
89
src/libstore/build-result.hh
Normal file
89
src/libstore/build-result.hh
Normal file
|
@ -0,0 +1,89 @@
|
|||
#pragma once
|
||||
|
||||
#include "realisation.hh"
|
||||
|
||||
#include <string>
|
||||
#include <chrono>
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct BuildResult
|
||||
{
|
||||
/* Note: don't remove status codes, and only add new status codes
|
||||
at the end of the list, to prevent client/server
|
||||
incompatibilities in the nix-store --serve protocol. */
|
||||
enum Status {
|
||||
Built = 0,
|
||||
Substituted,
|
||||
AlreadyValid,
|
||||
PermanentFailure,
|
||||
InputRejected,
|
||||
OutputRejected,
|
||||
TransientFailure, // possibly transient
|
||||
CachedFailure, // no longer used
|
||||
TimedOut,
|
||||
MiscFailure,
|
||||
DependencyFailed,
|
||||
LogLimitExceeded,
|
||||
NotDeterministic,
|
||||
ResolvesToAlreadyValid,
|
||||
NoSubstituters,
|
||||
} status = MiscFailure;
|
||||
std::string errorMsg;
|
||||
|
||||
std::string toString() const {
|
||||
auto strStatus = [&]() {
|
||||
switch (status) {
|
||||
case Built: return "Built";
|
||||
case Substituted: return "Substituted";
|
||||
case AlreadyValid: return "AlreadyValid";
|
||||
case PermanentFailure: return "PermanentFailure";
|
||||
case InputRejected: return "InputRejected";
|
||||
case OutputRejected: return "OutputRejected";
|
||||
case TransientFailure: return "TransientFailure";
|
||||
case CachedFailure: return "CachedFailure";
|
||||
case TimedOut: return "TimedOut";
|
||||
case MiscFailure: return "MiscFailure";
|
||||
case DependencyFailed: return "DependencyFailed";
|
||||
case LogLimitExceeded: return "LogLimitExceeded";
|
||||
case NotDeterministic: return "NotDeterministic";
|
||||
case ResolvesToAlreadyValid: return "ResolvesToAlreadyValid";
|
||||
default: return "Unknown";
|
||||
};
|
||||
}();
|
||||
return strStatus + ((errorMsg == "") ? "" : " : " + errorMsg);
|
||||
}
|
||||
|
||||
/* How many times this build was performed. */
|
||||
unsigned int timesBuilt = 0;
|
||||
|
||||
/* If timesBuilt > 1, whether some builds did not produce the same
|
||||
result. (Note that 'isNonDeterministic = false' does not mean
|
||||
the build is deterministic, just that we don't have evidence of
|
||||
non-determinism.) */
|
||||
bool isNonDeterministic = false;
|
||||
|
||||
/* The derivation we built or the store path we substituted. */
|
||||
DerivedPath path;
|
||||
|
||||
/* For derivations, a mapping from the names of the wanted outputs
|
||||
to actual paths. */
|
||||
DrvOutputs builtOutputs;
|
||||
|
||||
/* The start/stop times of the build (or one of the rounds, if it
|
||||
was repeated). */
|
||||
time_t startTime = 0, stopTime = 0;
|
||||
|
||||
bool success()
|
||||
{
|
||||
return status == Built || status == Substituted || status == AlreadyValid || status == ResolvesToAlreadyValid;
|
||||
}
|
||||
|
||||
void rethrow()
|
||||
{
|
||||
throw Error("%s", errorMsg);
|
||||
}
|
||||
};
|
||||
|
||||
}
|
|
@ -17,6 +17,7 @@
|
|||
#include <regex>
|
||||
#include <queue>
|
||||
|
||||
#include <fstream>
|
||||
#include <sys/types.h>
|
||||
#include <sys/socket.h>
|
||||
#include <sys/un.h>
|
||||
|
@ -65,7 +66,7 @@ namespace nix {
|
|||
|
||||
DerivationGoal::DerivationGoal(const StorePath & drvPath,
|
||||
const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode)
|
||||
: Goal(worker)
|
||||
: Goal(worker, DerivedPath::Built { .drvPath = drvPath, .outputs = wantedOutputs })
|
||||
, useDerivation(true)
|
||||
, drvPath(drvPath)
|
||||
, wantedOutputs(wantedOutputs)
|
||||
|
@ -84,7 +85,7 @@ DerivationGoal::DerivationGoal(const StorePath & drvPath,
|
|||
|
||||
DerivationGoal::DerivationGoal(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
const StringSet & wantedOutputs, Worker & worker, BuildMode buildMode)
|
||||
: Goal(worker)
|
||||
: Goal(worker, DerivedPath::Built { .drvPath = drvPath, .outputs = wantedOutputs })
|
||||
, useDerivation(false)
|
||||
, drvPath(drvPath)
|
||||
, wantedOutputs(wantedOutputs)
|
||||
|
@ -115,7 +116,7 @@ DerivationGoal::~DerivationGoal()
|
|||
}
|
||||
|
||||
|
||||
string DerivationGoal::key()
|
||||
std::string DerivationGoal::key()
|
||||
{
|
||||
/* Ensure that derivations get built in order of their name,
|
||||
i.e. a derivation named "aardvark" always comes before
|
||||
|
@ -134,7 +135,7 @@ void DerivationGoal::killChild()
|
|||
void DerivationGoal::timedOut(Error && ex)
|
||||
{
|
||||
killChild();
|
||||
done(BuildResult::TimedOut, ex);
|
||||
done(BuildResult::TimedOut, {}, ex);
|
||||
}
|
||||
|
||||
|
||||
|
@ -181,7 +182,7 @@ void DerivationGoal::loadDerivation()
|
|||
trace("loading derivation");
|
||||
|
||||
if (nrFailed != 0) {
|
||||
done(BuildResult::MiscFailure, Error("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath)));
|
||||
done(BuildResult::MiscFailure, {}, Error("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath)));
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -193,7 +194,7 @@ void DerivationGoal::loadDerivation()
|
|||
assert(worker.evalStore.isValidPath(drvPath));
|
||||
|
||||
/* Get the derivation. */
|
||||
drv = std::make_unique<Derivation>(worker.evalStore.derivationFromPath(drvPath));
|
||||
drv = std::make_unique<Derivation>(worker.evalStore.readDerivation(drvPath));
|
||||
|
||||
haveDerivation();
|
||||
}
|
||||
|
@ -204,7 +205,7 @@ void DerivationGoal::haveDerivation()
|
|||
trace("have derivation");
|
||||
|
||||
if (drv->type() == DerivationType::CAFloating)
|
||||
settings.requireExperimentalFeature("ca-derivations");
|
||||
settings.requireExperimentalFeature(Xp::CaDerivations);
|
||||
|
||||
retrySubstitution = false;
|
||||
|
||||
|
@ -214,28 +215,20 @@ void DerivationGoal::haveDerivation()
|
|||
|
||||
auto outputHashes = staticOutputHashes(worker.evalStore, *drv);
|
||||
for (auto & [outputName, outputHash] : outputHashes)
|
||||
initialOutputs.insert({
|
||||
initialOutputs.insert({
|
||||
outputName,
|
||||
InitialOutput{
|
||||
InitialOutput {
|
||||
.wanted = true, // Will be refined later
|
||||
.outputHash = outputHash
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
/* Check what outputs paths are not already valid. */
|
||||
checkPathValidity();
|
||||
bool allValid = true;
|
||||
for (auto & [_, status] : initialOutputs) {
|
||||
if (!status.wanted) continue;
|
||||
if (!status.known || !status.known->isValid()) {
|
||||
allValid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
auto [allValid, validOutputs] = checkPathValidity();
|
||||
|
||||
/* If they are all valid, then we're done. */
|
||||
if (allValid && buildMode == bmNormal) {
|
||||
done(BuildResult::AlreadyValid);
|
||||
done(BuildResult::AlreadyValid, std::move(validOutputs));
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -276,8 +269,8 @@ void DerivationGoal::outputsSubstitutionTried()
|
|||
trace("all outputs substituted (maybe)");
|
||||
|
||||
if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) {
|
||||
done(BuildResult::TransientFailure,
|
||||
fmt("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ",
|
||||
done(BuildResult::TransientFailure, {},
|
||||
Error("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ",
|
||||
worker.store.printStorePath(drvPath)));
|
||||
return;
|
||||
}
|
||||
|
@ -300,23 +293,17 @@ void DerivationGoal::outputsSubstitutionTried()
|
|||
return;
|
||||
}
|
||||
|
||||
checkPathValidity();
|
||||
size_t nrInvalid = 0;
|
||||
for (auto & [_, status] : initialOutputs) {
|
||||
if (!status.wanted) continue;
|
||||
if (!status.known || !status.known->isValid())
|
||||
nrInvalid++;
|
||||
}
|
||||
auto [allValid, validOutputs] = checkPathValidity();
|
||||
|
||||
if (buildMode == bmNormal && nrInvalid == 0) {
|
||||
done(BuildResult::Substituted);
|
||||
if (buildMode == bmNormal && allValid) {
|
||||
done(BuildResult::Substituted, std::move(validOutputs));
|
||||
return;
|
||||
}
|
||||
if (buildMode == bmRepair && nrInvalid == 0) {
|
||||
if (buildMode == bmRepair && allValid) {
|
||||
repairClosure();
|
||||
return;
|
||||
}
|
||||
if (buildMode == bmCheck && nrInvalid > 0)
|
||||
if (buildMode == bmCheck && !allValid)
|
||||
throw Error("some outputs of '%s' are not valid, so checking is not possible",
|
||||
worker.store.printStorePath(drvPath));
|
||||
|
||||
|
@ -408,7 +395,7 @@ void DerivationGoal::repairClosure()
|
|||
}
|
||||
|
||||
if (waitees.empty()) {
|
||||
done(BuildResult::AlreadyValid);
|
||||
done(BuildResult::AlreadyValid, assertPathValidity());
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -422,7 +409,7 @@ void DerivationGoal::closureRepaired()
|
|||
if (nrFailed > 0)
|
||||
throw Error("some paths in the output closure of derivation '%s' could not be repaired",
|
||||
worker.store.printStorePath(drvPath));
|
||||
done(BuildResult::AlreadyValid);
|
||||
done(BuildResult::AlreadyValid, assertPathValidity());
|
||||
}
|
||||
|
||||
|
||||
|
@ -433,7 +420,7 @@ void DerivationGoal::inputsRealised()
|
|||
if (nrFailed != 0) {
|
||||
if (!useDerivation)
|
||||
throw Error("some dependencies of '%s' are missing", worker.store.printStorePath(drvPath));
|
||||
done(BuildResult::DependencyFailed, Error(
|
||||
done(BuildResult::DependencyFailed, {}, Error(
|
||||
"%s dependencies of derivation '%s' failed to build",
|
||||
nrFailed, worker.store.printStorePath(drvPath)));
|
||||
return;
|
||||
|
@ -453,7 +440,7 @@ void DerivationGoal::inputsRealised()
|
|||
if (useDerivation) {
|
||||
auto & fullDrv = *dynamic_cast<Derivation *>(drv.get());
|
||||
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations") &&
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) &&
|
||||
((!fullDrv.inputDrvs.empty() && derivationIsCA(fullDrv.type()))
|
||||
|| fullDrv.type() == DerivationType::DeferredInputAddressed)) {
|
||||
/* We are be able to resolve this derivation based on the
|
||||
|
@ -464,7 +451,6 @@ void DerivationGoal::inputsRealised()
|
|||
Derivation drvResolved { *std::move(attempt) };
|
||||
|
||||
auto pathResolved = writeDerivation(worker.store, drvResolved);
|
||||
resolvedDrv = drvResolved;
|
||||
|
||||
auto msg = fmt("Resolved derivation: '%s' -> '%s'",
|
||||
worker.store.printStorePath(drvPath),
|
||||
|
@ -475,9 +461,9 @@ void DerivationGoal::inputsRealised()
|
|||
worker.store.printStorePath(pathResolved),
|
||||
});
|
||||
|
||||
auto resolvedGoal = worker.makeDerivationGoal(
|
||||
resolvedDrvGoal = worker.makeDerivationGoal(
|
||||
pathResolved, wantedOutputs, buildMode);
|
||||
addWaitee(resolvedGoal);
|
||||
addWaitee(resolvedDrvGoal);
|
||||
|
||||
state = &DerivationGoal::resolvedFinished;
|
||||
return;
|
||||
|
@ -523,10 +509,11 @@ void DerivationGoal::inputsRealised()
|
|||
state = &DerivationGoal::tryToBuild;
|
||||
worker.wakeUp(shared_from_this());
|
||||
|
||||
result = BuildResult();
|
||||
buildResult = BuildResult { .path = buildResult.path };
|
||||
}
|
||||
|
||||
void DerivationGoal::started() {
|
||||
void DerivationGoal::started()
|
||||
{
|
||||
auto msg = fmt(
|
||||
buildMode == bmRepair ? "repairing outputs of '%s'" :
|
||||
buildMode == bmCheck ? "checking outputs of '%s'" :
|
||||
|
@ -588,19 +575,12 @@ void DerivationGoal::tryToBuild()
|
|||
omitted, but that would be less efficient.) Note that since we
|
||||
now hold the locks on the output paths, no other process can
|
||||
build this derivation, so no further checks are necessary. */
|
||||
checkPathValidity();
|
||||
bool allValid = true;
|
||||
for (auto & [_, status] : initialOutputs) {
|
||||
if (!status.wanted) continue;
|
||||
if (!status.known || !status.known->isValid()) {
|
||||
allValid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
auto [allValid, validOutputs] = checkPathValidity();
|
||||
|
||||
if (buildMode != bmCheck && allValid) {
|
||||
debug("skipping build of derivation '%s', someone beat us to it", worker.store.printStorePath(drvPath));
|
||||
outputLocks.setDeletion(true);
|
||||
done(BuildResult::AlreadyValid);
|
||||
done(BuildResult::AlreadyValid, std::move(validOutputs));
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -616,7 +596,9 @@ void DerivationGoal::tryToBuild()
|
|||
/* Don't do a remote build if the derivation has the attribute
|
||||
`preferLocalBuild' set. Also, check and repair modes are only
|
||||
supported for local builds. */
|
||||
bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store);
|
||||
bool buildLocally =
|
||||
(buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store))
|
||||
&& settings.maxBuildJobs.get() != 0;
|
||||
|
||||
if (!buildLocally) {
|
||||
switch (tryBuildHook()) {
|
||||
|
@ -624,7 +606,7 @@ void DerivationGoal::tryToBuild()
|
|||
/* Yes, it has started doing so. Wait until we get
|
||||
EOF from the hook. */
|
||||
actLock.reset();
|
||||
result.startTime = time(0); // inexact
|
||||
buildResult.startTime = time(0); // inexact
|
||||
state = &DerivationGoal::buildDone;
|
||||
started();
|
||||
return;
|
||||
|
@ -653,7 +635,7 @@ void DerivationGoal::tryLocalBuild() {
|
|||
throw Error(
|
||||
"unable to build with a primary store that isn't a local store; "
|
||||
"either pass a different '--store' or enable remote builds."
|
||||
"\nhttps://nixos.org/nix/manual/#chap-distributed-builds");
|
||||
"\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
|
||||
}
|
||||
|
||||
|
||||
|
@ -828,8 +810,8 @@ void DerivationGoal::buildDone()
|
|||
|
||||
debug("builder process for '%s' finished", worker.store.printStorePath(drvPath));
|
||||
|
||||
result.timesBuilt++;
|
||||
result.stopTime = time(0);
|
||||
buildResult.timesBuilt++;
|
||||
buildResult.stopTime = time(0);
|
||||
|
||||
/* So the child is gone now. */
|
||||
worker.childTerminated(this);
|
||||
|
@ -874,11 +856,11 @@ void DerivationGoal::buildDone()
|
|||
|
||||
/* Compute the FS closure of the outputs and register them as
|
||||
being valid. */
|
||||
registerOutputs();
|
||||
auto builtOutputs = registerOutputs();
|
||||
|
||||
StorePathSet outputPaths;
|
||||
for (auto & [_, path] : finalOutputs)
|
||||
outputPaths.insert(path);
|
||||
for (auto & [_, output] : buildResult.builtOutputs)
|
||||
outputPaths.insert(output.outPath);
|
||||
runPostBuildHook(
|
||||
worker.store,
|
||||
*logger,
|
||||
|
@ -888,7 +870,7 @@ void DerivationGoal::buildDone()
|
|||
|
||||
if (buildMode == bmCheck) {
|
||||
cleanupPostOutputsRegisteredModeCheck();
|
||||
done(BuildResult::Built);
|
||||
done(BuildResult::Built, std::move(builtOutputs));
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -909,6 +891,8 @@ void DerivationGoal::buildDone()
|
|||
outputLocks.setDeletion(true);
|
||||
outputLocks.unlock();
|
||||
|
||||
done(BuildResult::Built, std::move(builtOutputs));
|
||||
|
||||
} catch (BuildError & e) {
|
||||
outputLocks.unlock();
|
||||
|
||||
|
@ -928,30 +912,32 @@ void DerivationGoal::buildDone()
|
|||
BuildResult::PermanentFailure;
|
||||
}
|
||||
|
||||
done(st, e);
|
||||
done(st, {}, e);
|
||||
return;
|
||||
}
|
||||
|
||||
done(BuildResult::Built);
|
||||
}
|
||||
|
||||
void DerivationGoal::resolvedFinished() {
|
||||
assert(resolvedDrv);
|
||||
void DerivationGoal::resolvedFinished()
|
||||
{
|
||||
assert(resolvedDrvGoal);
|
||||
auto resolvedDrv = *resolvedDrvGoal->drv;
|
||||
|
||||
auto resolvedHashes = staticOutputHashes(worker.store, *resolvedDrv);
|
||||
auto resolvedHashes = staticOutputHashes(worker.store, resolvedDrv);
|
||||
|
||||
StorePathSet outputPaths;
|
||||
|
||||
// `wantedOutputs` might be empty, which means “all the outputs”
|
||||
auto realWantedOutputs = wantedOutputs;
|
||||
if (realWantedOutputs.empty())
|
||||
realWantedOutputs = resolvedDrv->outputNames();
|
||||
realWantedOutputs = resolvedDrv.outputNames();
|
||||
|
||||
DrvOutputs builtOutputs;
|
||||
|
||||
for (auto & wantedOutput : realWantedOutputs) {
|
||||
assert(initialOutputs.count(wantedOutput) != 0);
|
||||
assert(resolvedHashes.count(wantedOutput) != 0);
|
||||
auto realisation = worker.store.queryRealisation(
|
||||
DrvOutput{resolvedHashes.at(wantedOutput), wantedOutput}
|
||||
DrvOutput{resolvedHashes.at(wantedOutput), wantedOutput}
|
||||
);
|
||||
// We've just built it, but maybe the build failed, in which case the
|
||||
// realisation won't be there
|
||||
|
@ -963,10 +949,11 @@ void DerivationGoal::resolvedFinished() {
|
|||
signRealisation(newRealisation);
|
||||
worker.store.registerDrvOutput(newRealisation);
|
||||
outputPaths.insert(realisation->outPath);
|
||||
builtOutputs.emplace(realisation->id, *realisation);
|
||||
} else {
|
||||
// If we don't have a realisation, then it must mean that something
|
||||
// failed when building the resolved drv
|
||||
assert(!result.success());
|
||||
assert(!buildResult.success());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -977,9 +964,17 @@ void DerivationGoal::resolvedFinished() {
|
|||
outputPaths
|
||||
);
|
||||
|
||||
// This is potentially a bit fishy in terms of error reporting. Not sure
|
||||
// how to do it in a cleaner way
|
||||
amDone(nrFailed == 0 ? ecSuccess : ecFailed, ex);
|
||||
auto status = [&]() {
|
||||
auto & resolvedResult = resolvedDrvGoal->buildResult;
|
||||
switch (resolvedResult.status) {
|
||||
case BuildResult::AlreadyValid:
|
||||
return BuildResult::ResolvesToAlreadyValid;
|
||||
default:
|
||||
return resolvedResult.status;
|
||||
}
|
||||
}();
|
||||
|
||||
done(status, std::move(builtOutputs));
|
||||
}
|
||||
|
||||
HookReply DerivationGoal::tryBuildHook()
|
||||
|
@ -1002,7 +997,7 @@ HookReply DerivationGoal::tryBuildHook()
|
|||
|
||||
/* Read the first line of input, which should be a word indicating
|
||||
whether the hook wishes to perform the build. */
|
||||
string reply;
|
||||
std::string reply;
|
||||
while (true) {
|
||||
auto s = [&]() {
|
||||
try {
|
||||
|
@ -1014,8 +1009,8 @@ HookReply DerivationGoal::tryBuildHook()
|
|||
}();
|
||||
if (handleJSONLogMessage(s, worker.act, worker.hook->activities, true))
|
||||
;
|
||||
else if (string(s, 0, 2) == "# ") {
|
||||
reply = string(s, 2);
|
||||
else if (s.substr(0, 2) == "# ") {
|
||||
reply = s.substr(2);
|
||||
break;
|
||||
}
|
||||
else {
|
||||
|
@ -1080,7 +1075,7 @@ HookReply DerivationGoal::tryBuildHook()
|
|||
/* Create the log file and pipe. */
|
||||
Path logFile = openLogFile();
|
||||
|
||||
set<int> fds;
|
||||
std::set<int> fds;
|
||||
fds.insert(hook->fromHook.readSide.get());
|
||||
fds.insert(hook->builderOut.readSide.get());
|
||||
worker.childStarted(shared_from_this(), fds, false, false);
|
||||
|
@ -1089,7 +1084,7 @@ HookReply DerivationGoal::tryBuildHook()
|
|||
}
|
||||
|
||||
|
||||
void DerivationGoal::registerOutputs()
|
||||
DrvOutputs DerivationGoal::registerOutputs()
|
||||
{
|
||||
/* When using a build hook, the build hook can register the output
|
||||
as valid (by doing `nix-store --import'). If so we don't have
|
||||
|
@ -1098,21 +1093,7 @@ void DerivationGoal::registerOutputs()
|
|||
We can only early return when the outputs are known a priori. For
|
||||
floating content-addressed derivations this isn't the case.
|
||||
*/
|
||||
for (auto & [outputName, optOutputPath] : worker.store.queryPartialDerivationOutputMap(drvPath)) {
|
||||
if (!wantOutput(outputName, wantedOutputs))
|
||||
continue;
|
||||
if (!optOutputPath)
|
||||
throw BuildError(
|
||||
"output '%s' from derivation '%s' does not have a known output path",
|
||||
outputName, worker.store.printStorePath(drvPath));
|
||||
auto & outputPath = *optOutputPath;
|
||||
if (!worker.store.isValidPath(outputPath))
|
||||
throw BuildError(
|
||||
"output '%s' from derivation '%s' is supposed to be at '%s' but that path is not valid",
|
||||
outputName, worker.store.printStorePath(drvPath), worker.store.printStorePath(outputPath));
|
||||
|
||||
finalOutputs.insert_or_assign(outputName, outputPath);
|
||||
}
|
||||
return assertPathValidity();
|
||||
}
|
||||
|
||||
Path DerivationGoal::openLogFile()
|
||||
|
@ -1129,10 +1110,10 @@ Path DerivationGoal::openLogFile()
|
|||
logDir = localStore->logDir;
|
||||
else
|
||||
logDir = settings.nixLogDir;
|
||||
Path dir = fmt("%s/%s/%s/", logDir, LocalFSStore::drvsLogDir, string(baseName, 0, 2));
|
||||
Path dir = fmt("%s/%s/%s/", logDir, LocalFSStore::drvsLogDir, baseName.substr(0, 2));
|
||||
createDirs(dir);
|
||||
|
||||
Path logFileName = fmt("%s/%s%s", dir, string(baseName, 2),
|
||||
Path logFileName = fmt("%s/%s%s", dir, baseName.substr(2),
|
||||
settings.compressLog ? ".bz2" : "");
|
||||
|
||||
fdLogFile = open(logFileName.c_str(), O_CREAT | O_WRONLY | O_TRUNC | O_CLOEXEC, 0666);
|
||||
|
@ -1164,16 +1145,17 @@ bool DerivationGoal::isReadDesc(int fd)
|
|||
return fd == hook->builderOut.readSide.get();
|
||||
}
|
||||
|
||||
|
||||
void DerivationGoal::handleChildOutput(int fd, const string & data)
|
||||
void DerivationGoal::handleChildOutput(int fd, std::string_view data)
|
||||
{
|
||||
if (isReadDesc(fd))
|
||||
// local & `ssh://`-builds are dealt with here.
|
||||
auto isWrittenToLog = isReadDesc(fd);
|
||||
if (isWrittenToLog)
|
||||
{
|
||||
logSize += data.size();
|
||||
if (settings.maxLogSize && logSize > settings.maxLogSize) {
|
||||
killChild();
|
||||
done(
|
||||
BuildResult::LogLimitExceeded,
|
||||
BuildResult::LogLimitExceeded, {},
|
||||
Error("%s killed after writing more than %d bytes of log output",
|
||||
getName(), settings.maxLogSize));
|
||||
return;
|
||||
|
@ -1196,7 +1178,16 @@ void DerivationGoal::handleChildOutput(int fd, const string & data)
|
|||
if (hook && fd == hook->fromHook.readSide.get()) {
|
||||
for (auto c : data)
|
||||
if (c == '\n') {
|
||||
handleJSONLogMessage(currentHookLine, worker.act, hook->activities, true);
|
||||
auto json = parseJSONMessage(currentHookLine);
|
||||
if (json) {
|
||||
auto s = handleJSONLogMessage(*json, worker.act, hook->activities, true);
|
||||
// ensure that logs from a builder using `ssh-ng://` as protocol
|
||||
// are also available to `nix log`.
|
||||
if (s && !isWrittenToLog && logSink && (*json)["type"] == resBuildLogLine) {
|
||||
auto f = (*json)["fields"];
|
||||
(*logSink)((f.size() > 0 ? f.at(0).get<std::string>() : "") + "\n");
|
||||
}
|
||||
}
|
||||
currentHookLine.clear();
|
||||
} else
|
||||
currentHookLine += c;
|
||||
|
@ -1253,10 +1244,12 @@ OutputPathMap DerivationGoal::queryDerivationOutputMap()
|
|||
}
|
||||
|
||||
|
||||
void DerivationGoal::checkPathValidity()
|
||||
std::pair<bool, DrvOutputs> DerivationGoal::checkPathValidity()
|
||||
{
|
||||
bool checkHash = buildMode == bmRepair;
|
||||
auto wantedOutputsLeft = wantedOutputs;
|
||||
DrvOutputs validOutputs;
|
||||
|
||||
for (auto & i : queryPartialDerivationOutputMap()) {
|
||||
InitialOutput & info = initialOutputs.at(i.first);
|
||||
info.wanted = wantOutput(i.first, wantedOutputs);
|
||||
|
@ -1273,26 +1266,28 @@ void DerivationGoal::checkPathValidity()
|
|||
: PathStatus::Corrupt,
|
||||
};
|
||||
}
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
|
||||
auto drvOutput = DrvOutput{initialOutputs.at(i.first).outputHash, i.first};
|
||||
auto drvOutput = DrvOutput{initialOutputs.at(i.first).outputHash, i.first};
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||
if (auto real = worker.store.queryRealisation(drvOutput)) {
|
||||
info.known = {
|
||||
.path = real->outPath,
|
||||
.status = PathStatus::Valid,
|
||||
};
|
||||
} else if (info.known && info.known->status == PathStatus::Valid) {
|
||||
// We know the output because it' a static output of the
|
||||
} else if (info.known && info.known->isValid()) {
|
||||
// We know the output because it's a static output of the
|
||||
// derivation, and the output path is valid, but we don't have
|
||||
// its realisation stored (probably because it has been built
|
||||
// without the `ca-derivations` experimental flag)
|
||||
// without the `ca-derivations` experimental flag).
|
||||
worker.store.registerDrvOutput(
|
||||
Realisation{
|
||||
Realisation {
|
||||
drvOutput,
|
||||
info.known->path,
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
if (info.wanted && info.known && info.known->isValid())
|
||||
validOutputs.emplace(drvOutput, Realisation { drvOutput, info.known->path });
|
||||
}
|
||||
// If we requested all the outputs via the empty set, we are always fine.
|
||||
// If we requested specific elements, the loop above removes all the valid
|
||||
|
@ -1301,24 +1296,50 @@ void DerivationGoal::checkPathValidity()
|
|||
throw Error("derivation '%s' does not have wanted outputs %s",
|
||||
worker.store.printStorePath(drvPath),
|
||||
concatStringsSep(", ", quoteStrings(wantedOutputsLeft)));
|
||||
|
||||
bool allValid = true;
|
||||
for (auto & [_, status] : initialOutputs) {
|
||||
if (!status.wanted) continue;
|
||||
if (!status.known || !status.known->isValid()) {
|
||||
allValid = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return { allValid, validOutputs };
|
||||
}
|
||||
|
||||
|
||||
void DerivationGoal::done(BuildResult::Status status, std::optional<Error> ex)
|
||||
DrvOutputs DerivationGoal::assertPathValidity()
|
||||
{
|
||||
result.status = status;
|
||||
auto [allValid, validOutputs] = checkPathValidity();
|
||||
if (!allValid)
|
||||
throw Error("some outputs are unexpectedly invalid");
|
||||
return validOutputs;
|
||||
}
|
||||
|
||||
|
||||
void DerivationGoal::done(
|
||||
BuildResult::Status status,
|
||||
DrvOutputs builtOutputs,
|
||||
std::optional<Error> ex)
|
||||
{
|
||||
buildResult.status = status;
|
||||
if (ex)
|
||||
result.errorMsg = ex->what();
|
||||
amDone(result.success() ? ecSuccess : ecFailed, ex);
|
||||
if (result.status == BuildResult::TimedOut)
|
||||
// FIXME: strip: "error: "
|
||||
buildResult.errorMsg = ex->what();
|
||||
amDone(buildResult.success() ? ecSuccess : ecFailed, ex);
|
||||
if (buildResult.status == BuildResult::TimedOut)
|
||||
worker.timedOut = true;
|
||||
if (result.status == BuildResult::PermanentFailure)
|
||||
if (buildResult.status == BuildResult::PermanentFailure)
|
||||
worker.permanentFailure = true;
|
||||
|
||||
mcExpectedBuilds.reset();
|
||||
mcRunningBuilds.reset();
|
||||
|
||||
if (result.success()) {
|
||||
if (buildResult.success()) {
|
||||
assert(!builtOutputs.empty());
|
||||
buildResult.builtOutputs = std::move(builtOutputs);
|
||||
if (status == BuildResult::Built)
|
||||
worker.doneBuilds++;
|
||||
} else {
|
||||
|
@ -1327,6 +1348,13 @@ void DerivationGoal::done(BuildResult::Status status, std::optional<Error> ex)
|
|||
}
|
||||
|
||||
worker.updateProgress();
|
||||
|
||||
auto traceBuiltOutputsFile = getEnv("_NIX_TRACE_BUILT_OUTPUTS").value_or("");
|
||||
if (traceBuiltOutputsFile != "") {
|
||||
std::fstream fs;
|
||||
fs.open(traceBuiltOutputsFile, std::fstream::out);
|
||||
fs << worker.store.printStorePath(drvPath) << "\t" << buildResult.toString() << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -50,8 +50,8 @@ struct DerivationGoal : public Goal
|
|||
/* The path of the derivation. */
|
||||
StorePath drvPath;
|
||||
|
||||
/* The path of the corresponding resolved derivation */
|
||||
std::optional<BasicDerivation> resolvedDrv;
|
||||
/* The goal for the corresponding resolved derivation */
|
||||
std::shared_ptr<DerivationGoal> resolvedDrvGoal;
|
||||
|
||||
/* The specific outputs that we need to build. Empty means all of
|
||||
them. */
|
||||
|
@ -104,20 +104,8 @@ struct DerivationGoal : public Goal
|
|||
typedef void (DerivationGoal::*GoalState)();
|
||||
GoalState state;
|
||||
|
||||
/* The final output paths of the build.
|
||||
|
||||
- For input-addressed derivations, always the precomputed paths
|
||||
|
||||
- For content-addressed derivations, calcuated from whatever the hash
|
||||
ends up being. (Note that fixed outputs derivations that produce the
|
||||
"wrong" output still install that data under its true content-address.)
|
||||
*/
|
||||
OutputPathMap finalOutputs;
|
||||
|
||||
BuildMode buildMode;
|
||||
|
||||
BuildResult result;
|
||||
|
||||
/* The current round, if we're building multiple times. */
|
||||
size_t curRound = 1;
|
||||
|
||||
|
@ -145,15 +133,13 @@ struct DerivationGoal : public Goal
|
|||
|
||||
void timedOut(Error && ex) override;
|
||||
|
||||
string key() override;
|
||||
std::string key() override;
|
||||
|
||||
void work() override;
|
||||
|
||||
/* Add wanted outputs to an already existing derivation goal. */
|
||||
void addWantedOutputs(const StringSet & outputs);
|
||||
|
||||
BuildResult getResult() { return result; }
|
||||
|
||||
/* The states. */
|
||||
void getDerivation();
|
||||
void loadDerivation();
|
||||
|
@ -175,7 +161,7 @@ struct DerivationGoal : public Goal
|
|||
|
||||
/* Check that the derivation outputs all exist and register them
|
||||
as valid. */
|
||||
virtual void registerOutputs();
|
||||
virtual DrvOutputs registerOutputs();
|
||||
|
||||
/* Open a log file and a pipe to it. */
|
||||
Path openLogFile();
|
||||
|
@ -200,7 +186,7 @@ struct DerivationGoal : public Goal
|
|||
virtual bool isReadDesc(int fd);
|
||||
|
||||
/* Callback used by the worker to write to the log. */
|
||||
void handleChildOutput(int fd, const string & data) override;
|
||||
void handleChildOutput(int fd, std::string_view data) override;
|
||||
void handleEOF(int fd) override;
|
||||
void flushLine();
|
||||
|
||||
|
@ -210,8 +196,17 @@ struct DerivationGoal : public Goal
|
|||
std::map<std::string, std::optional<StorePath>> queryPartialDerivationOutputMap();
|
||||
OutputPathMap queryDerivationOutputMap();
|
||||
|
||||
/* Return the set of (in)valid paths. */
|
||||
void checkPathValidity();
|
||||
/* Update 'initialOutputs' to determine the current status of the
|
||||
outputs of the derivation. Also returns a Boolean denoting
|
||||
whether all outputs are valid and non-corrupt, and a
|
||||
'DrvOutputs' structure containing the valid and wanted
|
||||
outputs. */
|
||||
std::pair<bool, DrvOutputs> checkPathValidity();
|
||||
|
||||
/* Aborts if any output is not valid or corrupt, and otherwise
|
||||
returns a 'DrvOutputs' structure containing the wanted
|
||||
outputs. */
|
||||
DrvOutputs assertPathValidity();
|
||||
|
||||
/* Forcibly kill the child process, if any. */
|
||||
virtual void killChild();
|
||||
|
@ -222,6 +217,7 @@ struct DerivationGoal : public Goal
|
|||
|
||||
void done(
|
||||
BuildResult::Status status,
|
||||
DrvOutputs builtOutputs = {},
|
||||
std::optional<Error> ex = {});
|
||||
|
||||
StorePathSet exportReferences(const StorePathSet & storePaths);
|
||||
|
|
|
@ -1,11 +1,17 @@
|
|||
#include "drv-output-substitution-goal.hh"
|
||||
#include "finally.hh"
|
||||
#include "worker.hh"
|
||||
#include "substitution-goal.hh"
|
||||
#include "callback.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(const DrvOutput& id, Worker & worker, RepairFlag repair, std::optional<ContentAddress> ca)
|
||||
: Goal(worker)
|
||||
DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(
|
||||
const DrvOutput & id,
|
||||
Worker & worker,
|
||||
RepairFlag repair,
|
||||
std::optional<ContentAddress> ca)
|
||||
: Goal(worker, DerivedPath::Opaque { StorePath::dummy })
|
||||
, id(id)
|
||||
{
|
||||
state = &DrvOutputSubstitutionGoal::init;
|
||||
|
@ -30,7 +36,7 @@ void DrvOutputSubstitutionGoal::init()
|
|||
|
||||
void DrvOutputSubstitutionGoal::tryNext()
|
||||
{
|
||||
trace("Trying next substituter");
|
||||
trace("trying next substituter");
|
||||
|
||||
if (subs.size() == 0) {
|
||||
/* None left. Terminate this goal and let someone else deal
|
||||
|
@ -50,14 +56,42 @@ void DrvOutputSubstitutionGoal::tryNext()
|
|||
return;
|
||||
}
|
||||
|
||||
auto sub = subs.front();
|
||||
sub = subs.front();
|
||||
subs.pop_front();
|
||||
|
||||
// FIXME: Make async
|
||||
outputInfo = sub->queryRealisation(id);
|
||||
// outputInfo = sub->queryRealisation(id);
|
||||
outPipe.create();
|
||||
promise = decltype(promise)();
|
||||
|
||||
sub->queryRealisation(
|
||||
id, { [&](std::future<std::shared_ptr<const Realisation>> res) {
|
||||
try {
|
||||
Finally updateStats([this]() { outPipe.writeSide.close(); });
|
||||
promise.set_value(res.get());
|
||||
} catch (...) {
|
||||
promise.set_exception(std::current_exception());
|
||||
}
|
||||
} });
|
||||
|
||||
worker.childStarted(shared_from_this(), {outPipe.readSide.get()}, true, false);
|
||||
|
||||
state = &DrvOutputSubstitutionGoal::realisationFetched;
|
||||
}
|
||||
|
||||
void DrvOutputSubstitutionGoal::realisationFetched()
|
||||
{
|
||||
worker.childTerminated(this);
|
||||
|
||||
try {
|
||||
outputInfo = promise.get_future().get();
|
||||
} catch (std::exception & e) {
|
||||
printError(e.what());
|
||||
substituterFailed = true;
|
||||
}
|
||||
|
||||
if (!outputInfo) {
|
||||
tryNext();
|
||||
return;
|
||||
return tryNext();
|
||||
}
|
||||
|
||||
for (const auto & [depId, depPath] : outputInfo->dependentRealisations) {
|
||||
|
@ -89,7 +123,7 @@ void DrvOutputSubstitutionGoal::tryNext()
|
|||
void DrvOutputSubstitutionGoal::outPathValid()
|
||||
{
|
||||
assert(outputInfo);
|
||||
trace("Output path substituted");
|
||||
trace("output path substituted");
|
||||
|
||||
if (nrFailed > 0) {
|
||||
debug("The output path of the derivation output '%s' could not be substituted", id.to_string());
|
||||
|
@ -107,7 +141,7 @@ void DrvOutputSubstitutionGoal::finished()
|
|||
amDone(ecSuccess);
|
||||
}
|
||||
|
||||
string DrvOutputSubstitutionGoal::key()
|
||||
std::string DrvOutputSubstitutionGoal::key()
|
||||
{
|
||||
/* "a$" ensures substitution goals happen before derivation
|
||||
goals. */
|
||||
|
@ -119,4 +153,10 @@ void DrvOutputSubstitutionGoal::work()
|
|||
(this->*state)();
|
||||
}
|
||||
|
||||
void DrvOutputSubstitutionGoal::handleEOF(int fd)
|
||||
{
|
||||
if (fd == outPipe.readSide.get()) worker.wakeUp(shared_from_this());
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
#include "store-api.hh"
|
||||
#include "goal.hh"
|
||||
#include "realisation.hh"
|
||||
#include <thread>
|
||||
#include <future>
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -20,11 +22,18 @@ private:
|
|||
|
||||
// The realisation corresponding to the given output id.
|
||||
// Will be filled once we can get it.
|
||||
std::optional<Realisation> outputInfo;
|
||||
std::shared_ptr<const Realisation> outputInfo;
|
||||
|
||||
/* The remaining substituters. */
|
||||
std::list<ref<Store>> subs;
|
||||
|
||||
/* The current substituter. */
|
||||
std::shared_ptr<Store> sub;
|
||||
|
||||
Pipe outPipe;
|
||||
std::thread thr;
|
||||
std::promise<std::shared_ptr<const Realisation>> promise;
|
||||
|
||||
/* Whether a substituter failed. */
|
||||
bool substituterFailed = false;
|
||||
|
||||
|
@ -36,15 +45,16 @@ public:
|
|||
|
||||
void init();
|
||||
void tryNext();
|
||||
void realisationFetched();
|
||||
void outPathValid();
|
||||
void finished();
|
||||
|
||||
void timedOut(Error && ex) override { abort(); };
|
||||
|
||||
string key() override;
|
||||
std::string key() override;
|
||||
|
||||
void work() override;
|
||||
|
||||
void handleEOF(int fd) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
#include "machines.hh"
|
||||
#include "worker.hh"
|
||||
#include "substitution-goal.hh"
|
||||
#include "derivation-goal.hh"
|
||||
|
@ -11,12 +10,12 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
|||
Worker worker(*this, evalStore ? *evalStore : *this);
|
||||
|
||||
Goals goals;
|
||||
for (auto & br : reqs) {
|
||||
for (const auto & br : reqs) {
|
||||
std::visit(overloaded {
|
||||
[&](DerivedPath::Built bfd) {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
goals.insert(worker.makeDerivationGoal(bfd.drvPath, bfd.outputs, buildMode));
|
||||
},
|
||||
[&](DerivedPath::Opaque bo) {
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
goals.insert(worker.makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair));
|
||||
},
|
||||
}, br.raw());
|
||||
|
@ -48,43 +47,51 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
|||
}
|
||||
}
|
||||
|
||||
std::vector<BuildResult> Store::buildPathsWithResults(
|
||||
const std::vector<DerivedPath> & reqs,
|
||||
BuildMode buildMode,
|
||||
std::shared_ptr<Store> evalStore)
|
||||
{
|
||||
Worker worker(*this, evalStore ? *evalStore : *this);
|
||||
|
||||
Goals goals;
|
||||
for (const auto & br : reqs) {
|
||||
std::visit(overloaded {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
goals.insert(worker.makeDerivationGoal(bfd.drvPath, bfd.outputs, buildMode));
|
||||
},
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
goals.insert(worker.makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair));
|
||||
},
|
||||
}, br.raw());
|
||||
}
|
||||
|
||||
worker.run(goals);
|
||||
|
||||
std::vector<BuildResult> results;
|
||||
|
||||
for (auto & i : goals)
|
||||
results.push_back(i->buildResult);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
BuildMode buildMode)
|
||||
{
|
||||
Worker worker(*this, *this);
|
||||
auto goal = worker.makeBasicDerivationGoal(drvPath, drv, {}, buildMode);
|
||||
|
||||
BuildResult result;
|
||||
|
||||
try {
|
||||
worker.run(Goals{goal});
|
||||
result = goal->getResult();
|
||||
return goal->buildResult;
|
||||
} catch (Error & e) {
|
||||
result.status = BuildResult::MiscFailure;
|
||||
result.errorMsg = e.msg();
|
||||
}
|
||||
// XXX: Should use `goal->queryPartialDerivationOutputMap()` once it's
|
||||
// extended to return the full realisation for each output
|
||||
auto staticDrvOutputs = drv.outputsAndOptPaths(*this);
|
||||
auto outputHashes = staticOutputHashes(*this, drv);
|
||||
for (auto & [outputName, staticOutput] : staticDrvOutputs) {
|
||||
auto outputId = DrvOutput{outputHashes.at(outputName), outputName};
|
||||
if (staticOutput.second)
|
||||
result.builtOutputs.insert_or_assign(
|
||||
outputId,
|
||||
Realisation{ outputId, *staticOutput.second}
|
||||
);
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations") && !derivationHasKnownOutputPaths(drv.type())) {
|
||||
auto realisation = this->queryRealisation(outputId);
|
||||
if (realisation)
|
||||
result.builtOutputs.insert_or_assign(
|
||||
outputId,
|
||||
*realisation
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
return BuildResult {
|
||||
.status = BuildResult::MiscFailure,
|
||||
.errorMsg = e.msg(),
|
||||
.path = DerivedPath::Built { .drvPath = drvPath },
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -5,8 +5,8 @@ namespace nix {
|
|||
|
||||
|
||||
bool CompareGoalPtrs::operator() (const GoalPtr & a, const GoalPtr & b) const {
|
||||
string s1 = a->key();
|
||||
string s2 = b->key();
|
||||
std::string s1 = a->key();
|
||||
std::string s2 = b->key();
|
||||
return s1 < s2;
|
||||
}
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
#include "types.hh"
|
||||
#include "store-api.hh"
|
||||
#include "build-result.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -18,8 +19,8 @@ struct CompareGoalPtrs {
|
|||
};
|
||||
|
||||
/* Set of goals. */
|
||||
typedef set<GoalPtr, CompareGoalPtrs> Goals;
|
||||
typedef set<WeakGoalPtr, std::owner_less<WeakGoalPtr>> WeakGoals;
|
||||
typedef std::set<GoalPtr, CompareGoalPtrs> Goals;
|
||||
typedef std::set<WeakGoalPtr, std::owner_less<WeakGoalPtr>> WeakGoals;
|
||||
|
||||
/* A map of paths to goals (and the other way around). */
|
||||
typedef std::map<StorePath, WeakGoalPtr> WeakGoalMap;
|
||||
|
@ -50,15 +51,20 @@ struct Goal : public std::enable_shared_from_this<Goal>
|
|||
unsigned int nrIncompleteClosure;
|
||||
|
||||
/* Name of this goal for debugging purposes. */
|
||||
string name;
|
||||
std::string name;
|
||||
|
||||
/* Whether the goal is finished. */
|
||||
ExitCode exitCode;
|
||||
|
||||
/* Build result. */
|
||||
BuildResult buildResult;
|
||||
|
||||
/* Exception containing an error message, if any. */
|
||||
std::optional<Error> ex;
|
||||
|
||||
Goal(Worker & worker) : worker(worker)
|
||||
Goal(Worker & worker, DerivedPath path)
|
||||
: worker(worker)
|
||||
, buildResult { .path = std::move(path) }
|
||||
{
|
||||
nrFailed = nrNoSubstituters = nrIncompleteClosure = 0;
|
||||
exitCode = ecBusy;
|
||||
|
@ -75,7 +81,7 @@ struct Goal : public std::enable_shared_from_this<Goal>
|
|||
|
||||
virtual void waiteeDone(GoalPtr waitee, ExitCode result);
|
||||
|
||||
virtual void handleChildOutput(int fd, const string & data)
|
||||
virtual void handleChildOutput(int fd, std::string_view data)
|
||||
{
|
||||
abort();
|
||||
}
|
||||
|
@ -87,7 +93,7 @@ struct Goal : public std::enable_shared_from_this<Goal>
|
|||
|
||||
void trace(const FormatOrString & fs);
|
||||
|
||||
string getName()
|
||||
std::string getName()
|
||||
{
|
||||
return name;
|
||||
}
|
||||
|
@ -97,7 +103,7 @@ struct Goal : public std::enable_shared_from_this<Goal>
|
|||
by the worker (important!), etc. */
|
||||
virtual void timedOut(Error && ex) = 0;
|
||||
|
||||
virtual string key() = 0;
|
||||
virtual std::string key() = 0;
|
||||
|
||||
void amDone(ExitCode result, std::optional<Error> ex = {});
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "local-derivation-goal.hh"
|
||||
#include "gc-store.hh"
|
||||
#include "hook-instance.hh"
|
||||
#include "worker.hh"
|
||||
#include "builtins.hh"
|
||||
|
@ -193,7 +194,7 @@ void LocalDerivationGoal::tryLocalBuild() {
|
|||
outputLocks.unlock();
|
||||
buildUser.reset();
|
||||
worker.permanentFailure = true;
|
||||
done(BuildResult::InputRejected, e);
|
||||
done(BuildResult::InputRejected, {}, e);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -260,6 +261,7 @@ void LocalDerivationGoal::cleanupHookFinally()
|
|||
void LocalDerivationGoal::cleanupPreChildKill()
|
||||
{
|
||||
sandboxMountNamespace = -1;
|
||||
sandboxUserNamespace = -1;
|
||||
}
|
||||
|
||||
|
||||
|
@ -342,7 +344,7 @@ int childEntry(void * arg)
|
|||
return 1;
|
||||
}
|
||||
|
||||
|
||||
#if __linux__
|
||||
static void linkOrCopy(const Path & from, const Path & to)
|
||||
{
|
||||
if (link(from.c_str(), to.c_str()) == -1) {
|
||||
|
@ -358,6 +360,7 @@ static void linkOrCopy(const Path & from, const Path & to)
|
|||
copyPath(from, to);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
void LocalDerivationGoal::startBuilder()
|
||||
|
@ -479,12 +482,12 @@ void LocalDerivationGoal::startBuilder()
|
|||
temporary build directory. The text files have the format used
|
||||
by `nix-store --register-validity'. However, the deriver
|
||||
fields are left empty. */
|
||||
string s = get(drv->env, "exportReferencesGraph").value_or("");
|
||||
auto s = get(drv->env, "exportReferencesGraph").value_or("");
|
||||
Strings ss = tokenizeString<Strings>(s);
|
||||
if (ss.size() % 2 != 0)
|
||||
throw BuildError("odd number of tokens in 'exportReferencesGraph': '%1%'", s);
|
||||
for (Strings::iterator i = ss.begin(); i != ss.end(); ) {
|
||||
string fileName = *i++;
|
||||
auto fileName = *i++;
|
||||
static std::regex regex("[A-Za-z_][A-Za-z0-9_.-]*");
|
||||
if (!std::regex_match(fileName, regex))
|
||||
throw Error("invalid file name '%s' in 'exportReferencesGraph'", fileName);
|
||||
|
@ -515,10 +518,10 @@ void LocalDerivationGoal::startBuilder()
|
|||
i.pop_back();
|
||||
}
|
||||
size_t p = i.find('=');
|
||||
if (p == string::npos)
|
||||
if (p == std::string::npos)
|
||||
dirsInChroot[i] = {i, optional};
|
||||
else
|
||||
dirsInChroot[string(i, 0, p)] = {string(i, p + 1), optional};
|
||||
dirsInChroot[i.substr(0, p)] = {i.substr(p + 1), optional};
|
||||
}
|
||||
dirsInChroot[tmpDirInSandbox] = tmpDir;
|
||||
|
||||
|
@ -669,9 +672,10 @@ void LocalDerivationGoal::startBuilder()
|
|||
auto state = stBegin;
|
||||
auto lines = runProgram(settings.preBuildHook, false, args);
|
||||
auto lastPos = std::string::size_type{0};
|
||||
for (auto nlPos = lines.find('\n'); nlPos != string::npos;
|
||||
nlPos = lines.find('\n', lastPos)) {
|
||||
auto line = std::string{lines, lastPos, nlPos - lastPos};
|
||||
for (auto nlPos = lines.find('\n'); nlPos != std::string::npos;
|
||||
nlPos = lines.find('\n', lastPos))
|
||||
{
|
||||
auto line = lines.substr(lastPos, nlPos - lastPos);
|
||||
lastPos = nlPos + 1;
|
||||
if (state == stBegin) {
|
||||
if (line == "extra-sandbox-paths" || line == "extra-chroot-dirs") {
|
||||
|
@ -684,10 +688,10 @@ void LocalDerivationGoal::startBuilder()
|
|||
state = stBegin;
|
||||
} else {
|
||||
auto p = line.find('=');
|
||||
if (p == string::npos)
|
||||
if (p == std::string::npos)
|
||||
dirsInChroot[line] = line;
|
||||
else
|
||||
dirsInChroot[string(line, 0, p)] = string(line, p + 1);
|
||||
dirsInChroot[line.substr(0, p)] = line.substr(p + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -711,6 +715,7 @@ void LocalDerivationGoal::startBuilder()
|
|||
if (!builderOut.readSide)
|
||||
throw SysError("opening pseudoterminal master");
|
||||
|
||||
// FIXME: not thread-safe, use ptsname_r
|
||||
std::string slaveName(ptsname(builderOut.readSide.get()));
|
||||
|
||||
if (buildUser) {
|
||||
|
@ -751,10 +756,9 @@ void LocalDerivationGoal::startBuilder()
|
|||
if (tcsetattr(builderOut.writeSide.get(), TCSANOW, &term))
|
||||
throw SysError("putting pseudoterminal into raw mode");
|
||||
|
||||
result.startTime = time(0);
|
||||
buildResult.startTime = time(0);
|
||||
|
||||
/* Fork a child to build the package. */
|
||||
ProcessOptions options;
|
||||
|
||||
#if __linux__
|
||||
if (useChroot) {
|
||||
|
@ -797,8 +801,6 @@ void LocalDerivationGoal::startBuilder()
|
|||
|
||||
userNamespaceSync.create();
|
||||
|
||||
options.allowVfork = false;
|
||||
|
||||
Path maxUserNamespaces = "/proc/sys/user/max_user_namespaces";
|
||||
static bool userNamespacesEnabled =
|
||||
pathExists(maxUserNamespaces)
|
||||
|
@ -856,7 +858,7 @@ void LocalDerivationGoal::startBuilder()
|
|||
writeFull(builderOut.writeSide.get(),
|
||||
fmt("%d %d\n", usingUserNamespace, child));
|
||||
_exit(0);
|
||||
}, options);
|
||||
});
|
||||
|
||||
int res = helper.wait();
|
||||
if (res != 0 && settings.sandboxFallback) {
|
||||
|
@ -907,23 +909,30 @@ void LocalDerivationGoal::startBuilder()
|
|||
"nobody:x:65534:65534:Nobody:/:/noshell\n",
|
||||
sandboxUid(), sandboxGid(), settings.sandboxBuildDir));
|
||||
|
||||
/* Save the mount namespace of the child. We have to do this
|
||||
/* Save the mount- and user namespace of the child. We have to do this
|
||||
*before* the child does a chroot. */
|
||||
sandboxMountNamespace = open(fmt("/proc/%d/ns/mnt", (pid_t) pid).c_str(), O_RDONLY);
|
||||
if (sandboxMountNamespace.get() == -1)
|
||||
throw SysError("getting sandbox mount namespace");
|
||||
|
||||
if (usingUserNamespace) {
|
||||
sandboxUserNamespace = open(fmt("/proc/%d/ns/user", (pid_t) pid).c_str(), O_RDONLY);
|
||||
if (sandboxUserNamespace.get() == -1)
|
||||
throw SysError("getting sandbox user namespace");
|
||||
}
|
||||
|
||||
/* Signal the builder that we've updated its user namespace. */
|
||||
writeFull(userNamespaceSync.writeSide.get(), "1");
|
||||
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
#if __linux__
|
||||
fallback:
|
||||
options.allowVfork = !buildUser && !drv->isBuiltin();
|
||||
#endif
|
||||
pid = startProcess([&]() {
|
||||
runChild();
|
||||
}, options);
|
||||
});
|
||||
}
|
||||
|
||||
/* parent */
|
||||
|
@ -934,7 +943,7 @@ void LocalDerivationGoal::startBuilder()
|
|||
/* Check if setting up the build environment failed. */
|
||||
std::vector<std::string> msgs;
|
||||
while (true) {
|
||||
string msg = [&]() {
|
||||
std::string msg = [&]() {
|
||||
try {
|
||||
return readLine(builderOut.readSide.get());
|
||||
} catch (Error & e) {
|
||||
|
@ -946,12 +955,12 @@ void LocalDerivationGoal::startBuilder()
|
|||
throw;
|
||||
}
|
||||
}();
|
||||
if (string(msg, 0, 1) == "\2") break;
|
||||
if (string(msg, 0, 1) == "\1") {
|
||||
if (msg.substr(0, 1) == "\2") break;
|
||||
if (msg.substr(0, 1) == "\1") {
|
||||
FdSource source(builderOut.readSide.get());
|
||||
auto ex = readError(source);
|
||||
ex.addTrace({}, "while setting up the build environment");
|
||||
throw;
|
||||
throw ex;
|
||||
}
|
||||
debug("sandbox setup: " + msg);
|
||||
msgs.push_back(std::move(msg));
|
||||
|
@ -983,7 +992,7 @@ void LocalDerivationGoal::initTmpDir() {
|
|||
env[i.first] = i.second;
|
||||
} else {
|
||||
auto hash = hashString(htSHA256, i.first);
|
||||
string fn = ".attr-" + hash.to_string(Base32, false);
|
||||
std::string fn = ".attr-" + hash.to_string(Base32, false);
|
||||
Path p = tmpDir + "/" + fn;
|
||||
writeFile(p, rewriteStrings(i.second, inputRewrites));
|
||||
chownToBuilder(p);
|
||||
|
@ -1074,7 +1083,7 @@ void LocalDerivationGoal::writeStructuredAttrs()
|
|||
for (auto & [i, v] : json["outputs"].get<nlohmann::json::object_t>()) {
|
||||
/* The placeholder must have a rewrite, so we use it to cover both the
|
||||
cases where we know or don't know the output path ahead of time. */
|
||||
rewritten[i] = rewriteStrings(v, inputRewrites);
|
||||
rewritten[i] = rewriteStrings((std::string) v, inputRewrites);
|
||||
}
|
||||
|
||||
json["outputs"] = rewritten;
|
||||
|
@ -1094,10 +1103,10 @@ void LocalDerivationGoal::writeStructuredAttrs()
|
|||
static StorePath pathPartOfReq(const DerivedPath & req)
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](DerivedPath::Opaque bo) {
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
return bo.path;
|
||||
},
|
||||
[&](DerivedPath::Built bfd) {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
return bfd.drvPath;
|
||||
},
|
||||
}, req.raw());
|
||||
|
@ -1119,7 +1128,7 @@ struct RestrictedStoreConfig : virtual LocalFSStoreConfig
|
|||
/* A wrapper around LocalStore that only allows building/querying of
|
||||
paths that are in the input closures of the build or were added via
|
||||
recursive Nix calls. */
|
||||
struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual LocalFSStore
|
||||
struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual LocalFSStore, public virtual GcStore
|
||||
{
|
||||
ref<LocalStore> next;
|
||||
|
||||
|
@ -1180,9 +1189,14 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
|||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
|
||||
{ throw Error("queryPathFromHashPart"); }
|
||||
|
||||
StorePath addToStore(const string & name, const Path & srcPath,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
|
||||
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
{ throw Error("addToStore"); }
|
||||
|
||||
void addToStore(const ValidPathInfo & info, Source & narSource,
|
||||
|
@ -1192,18 +1206,26 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
|||
goal.addDependency(info.path);
|
||||
}
|
||||
|
||||
StorePath addTextToStore(const string & name, const string & s,
|
||||
const StorePathSet & references, RepairFlag repair = NoRepair) override
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair = NoRepair) override
|
||||
{
|
||||
auto path = next->addTextToStore(name, s, references, repair);
|
||||
goal.addDependency(path);
|
||||
return path;
|
||||
}
|
||||
|
||||
StorePath addToStoreFromDump(Source & dump, const string & name,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override
|
||||
StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
{
|
||||
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair);
|
||||
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair, references);
|
||||
goal.addDependency(path);
|
||||
return path;
|
||||
}
|
||||
|
@ -1227,16 +1249,27 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
|||
// corresponds to an allowed derivation
|
||||
{ throw Error("registerDrvOutput"); }
|
||||
|
||||
std::optional<const Realisation> queryRealisation(const DrvOutput & id) override
|
||||
void queryRealisationUncached(const DrvOutput & id,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
|
||||
// XXX: This should probably be allowed if the realisation corresponds to
|
||||
// an allowed derivation
|
||||
{
|
||||
if (!goal.isAllowed(id))
|
||||
throw InvalidPath("cannot query an unknown output id '%s' in recursive Nix", id.to_string());
|
||||
return next->queryRealisation(id);
|
||||
callback(nullptr);
|
||||
next->queryRealisation(id, std::move(callback));
|
||||
}
|
||||
|
||||
void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override
|
||||
{
|
||||
for (auto & result : buildPathsWithResults(paths, buildMode, evalStore))
|
||||
if (!result.success())
|
||||
result.rethrow();
|
||||
}
|
||||
|
||||
std::vector<BuildResult> buildPathsWithResults(
|
||||
const std::vector<DerivedPath> & paths,
|
||||
BuildMode buildMode = bmNormal,
|
||||
std::shared_ptr<Store> evalStore = nullptr) override
|
||||
{
|
||||
assert(!evalStore);
|
||||
|
||||
|
@ -1250,26 +1283,13 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
|||
throw InvalidPath("cannot build '%s' in recursive Nix because path is unknown", req.to_string(*next));
|
||||
}
|
||||
|
||||
next->buildPaths(paths, buildMode);
|
||||
auto results = next->buildPathsWithResults(paths, buildMode);
|
||||
|
||||
for (auto & path : paths) {
|
||||
auto p = std::get_if<DerivedPath::Built>(&path);
|
||||
if (!p) continue;
|
||||
auto & bfd = *p;
|
||||
auto drv = readDerivation(bfd.drvPath);
|
||||
auto drvHashes = staticOutputHashes(*this, drv);
|
||||
auto outputs = next->queryDerivationOutputMap(bfd.drvPath);
|
||||
for (auto & [outputName, outputPath] : outputs)
|
||||
if (wantOutput(outputName, bfd.outputs)) {
|
||||
newPaths.insert(outputPath);
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
|
||||
auto thisRealisation = next->queryRealisation(
|
||||
DrvOutput{drvHashes.at(outputName), outputName}
|
||||
);
|
||||
assert(thisRealisation);
|
||||
newRealisations.insert(*thisRealisation);
|
||||
}
|
||||
}
|
||||
for (auto & result : results) {
|
||||
for (auto & [outputName, output] : result.builtOutputs) {
|
||||
newPaths.insert(output.outPath);
|
||||
newRealisations.insert(output);
|
||||
}
|
||||
}
|
||||
|
||||
StorePathSet closure;
|
||||
|
@ -1278,6 +1298,8 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
|||
goal.addDependency(path);
|
||||
for (auto & real : Realisation::closure(*next, newRealisations))
|
||||
goal.addedDrvOutputs.insert(real.id);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
|
||||
|
@ -1323,7 +1345,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
|||
|
||||
void LocalDerivationGoal::startDaemon()
|
||||
{
|
||||
settings.requireExperimentalFeature("recursive-nix");
|
||||
settings.requireExperimentalFeature(Xp::RecursiveNix);
|
||||
|
||||
Store::Params params;
|
||||
params["path-info-cache-size"] = "0";
|
||||
|
@ -1356,7 +1378,7 @@ void LocalDerivationGoal::startDaemon()
|
|||
AutoCloseFD remote = accept(daemonSocket.get(),
|
||||
(struct sockaddr *) &remoteAddr, &remoteAddrLen);
|
||||
if (!remote) {
|
||||
if (errno == EINTR) continue;
|
||||
if (errno == EINTR || errno == EAGAIN) continue;
|
||||
if (errno == EINVAL) break;
|
||||
throw SysError("accepting connection");
|
||||
}
|
||||
|
@ -1435,6 +1457,9 @@ void LocalDerivationGoal::addDependency(const StorePath & path)
|
|||
child process.*/
|
||||
Pid child(startProcess([&]() {
|
||||
|
||||
if (usingUserNamespace && (setns(sandboxUserNamespace.get(), 0) == -1))
|
||||
throw SysError("entering sandbox user namespace");
|
||||
|
||||
if (setns(sandboxMountNamespace.get(), 0) == -1)
|
||||
throw SysError("entering sandbox mount namespace");
|
||||
|
||||
|
@ -1776,11 +1801,14 @@ void LocalDerivationGoal::runChild()
|
|||
i686-linux build on an x86_64-linux machine. */
|
||||
struct utsname utsbuf;
|
||||
uname(&utsbuf);
|
||||
if (drv->platform == "i686-linux" &&
|
||||
(settings.thisSystem == "x86_64-linux" ||
|
||||
(!strcmp(utsbuf.sysname, "Linux") && !strcmp(utsbuf.machine, "x86_64")))) {
|
||||
if ((drv->platform == "i686-linux"
|
||||
&& (settings.thisSystem == "x86_64-linux"
|
||||
|| (!strcmp(utsbuf.sysname, "Linux") && !strcmp(utsbuf.machine, "x86_64"))))
|
||||
|| drv->platform == "armv7l-linux"
|
||||
|| drv->platform == "armv6l-linux")
|
||||
{
|
||||
if (personality(PER_LINUX32) == -1)
|
||||
throw SysError("cannot set i686-linux personality");
|
||||
throw SysError("cannot set 32-bit personality");
|
||||
}
|
||||
|
||||
/* Impersonate a Linux 2.6 machine to get some determinism in
|
||||
|
@ -1905,7 +1933,7 @@ void LocalDerivationGoal::runChild()
|
|||
"can't map '%1%' to '%2%': mismatched impure paths not supported on Darwin",
|
||||
i.first, i.second.source);
|
||||
|
||||
string path = i.first;
|
||||
std::string path = i.first;
|
||||
struct stat st;
|
||||
if (lstat(path.c_str(), &st)) {
|
||||
if (i.second.optional && errno == ENOENT)
|
||||
|
@ -1957,7 +1985,7 @@ void LocalDerivationGoal::runChild()
|
|||
args.push_back("IMPORT_DIR=" + settings.nixDataDir + "/nix/sandbox/");
|
||||
if (allowLocalNetworking) {
|
||||
args.push_back("-D");
|
||||
args.push_back(string("_ALLOW_LOCAL_NETWORKING=1"));
|
||||
args.push_back(std::string("_ALLOW_LOCAL_NETWORKING=1"));
|
||||
}
|
||||
args.push_back(drv->builder);
|
||||
} else {
|
||||
|
@ -1976,7 +2004,7 @@ void LocalDerivationGoal::runChild()
|
|||
args.push_back(rewriteStrings(i, inputRewrites));
|
||||
|
||||
/* Indicate that we managed to set up the build environment. */
|
||||
writeFull(STDERR_FILENO, string("\2\n"));
|
||||
writeFull(STDERR_FILENO, std::string("\2\n"));
|
||||
|
||||
/* Execute the program. This should not return. */
|
||||
if (drv->isBuiltin()) {
|
||||
|
@ -1994,7 +2022,7 @@ void LocalDerivationGoal::runChild()
|
|||
else if (drv->builder == "builtin:unpack-channel")
|
||||
builtinUnpackChannel(drv2);
|
||||
else
|
||||
throw Error("unsupported builtin function '%1%'", string(drv->builder, 8));
|
||||
throw Error("unsupported builtin builder '%1%'", drv->builder.substr(8));
|
||||
_exit(0);
|
||||
} catch (std::exception & e) {
|
||||
writeFull(STDERR_FILENO, e.what() + std::string("\n"));
|
||||
|
@ -2040,7 +2068,7 @@ void LocalDerivationGoal::runChild()
|
|||
}
|
||||
|
||||
|
||||
void LocalDerivationGoal::registerOutputs()
|
||||
DrvOutputs LocalDerivationGoal::registerOutputs()
|
||||
{
|
||||
/* When using a build hook, the build hook can register the output
|
||||
as valid (by doing `nix-store --import'). If so we don't have
|
||||
|
@ -2049,10 +2077,8 @@ void LocalDerivationGoal::registerOutputs()
|
|||
We can only early return when the outputs are known a priori. For
|
||||
floating content-addressed derivations this isn't the case.
|
||||
*/
|
||||
if (hook) {
|
||||
DerivationGoal::registerOutputs();
|
||||
return;
|
||||
}
|
||||
if (hook)
|
||||
return DerivationGoal::registerOutputs();
|
||||
|
||||
std::map<std::string, ValidPathInfo> infos;
|
||||
|
||||
|
@ -2140,8 +2166,7 @@ void LocalDerivationGoal::registerOutputs()
|
|||
|
||||
/* Pass blank Sink as we are not ready to hash data at this stage. */
|
||||
NullSink blank;
|
||||
auto references = worker.store.parseStorePathSet(
|
||||
scanForReferences(blank, actualPath, worker.store.printStorePathSet(referenceablePaths)));
|
||||
auto references = scanForReferences(blank, actualPath, referenceablePaths);
|
||||
|
||||
outputReferencesIfUnregistered.insert_or_assign(
|
||||
outputName,
|
||||
|
@ -2155,8 +2180,8 @@ void LocalDerivationGoal::registerOutputs()
|
|||
/* Since we'll use the already installed versions of these, we
|
||||
can treat them as leaves and ignore any references they
|
||||
have. */
|
||||
[&](AlreadyRegistered _) { return StringSet {}; },
|
||||
[&](PerhapsNeedToRegister refs) {
|
||||
[&](const AlreadyRegistered &) { return StringSet {}; },
|
||||
[&](const PerhapsNeedToRegister & refs) {
|
||||
StringSet referencedOutputs;
|
||||
/* FIXME build inverted map up front so no quadratic waste here */
|
||||
for (auto & r : refs.refs)
|
||||
|
@ -2176,6 +2201,8 @@ void LocalDerivationGoal::registerOutputs()
|
|||
|
||||
std::reverse(sortedOutputNames.begin(), sortedOutputNames.end());
|
||||
|
||||
OutputPathMap finalOutputs;
|
||||
|
||||
for (auto & outputName : sortedOutputNames) {
|
||||
auto output = drv->outputs.at(outputName);
|
||||
auto & scratchPath = scratchOutputs.at(outputName);
|
||||
|
@ -2192,11 +2219,11 @@ void LocalDerivationGoal::registerOutputs()
|
|||
};
|
||||
|
||||
std::optional<StorePathSet> referencesOpt = std::visit(overloaded {
|
||||
[&](AlreadyRegistered skippedFinalPath) -> std::optional<StorePathSet> {
|
||||
[&](const AlreadyRegistered & skippedFinalPath) -> std::optional<StorePathSet> {
|
||||
finish(skippedFinalPath.path);
|
||||
return std::nullopt;
|
||||
},
|
||||
[&](PerhapsNeedToRegister r) -> std::optional<StorePathSet> {
|
||||
[&](const PerhapsNeedToRegister & r) -> std::optional<StorePathSet> {
|
||||
return r.refs;
|
||||
},
|
||||
}, outputReferencesIfUnregistered.at(outputName));
|
||||
|
@ -2208,14 +2235,14 @@ void LocalDerivationGoal::registerOutputs()
|
|||
auto rewriteOutput = [&]() {
|
||||
/* Apply hash rewriting if necessary. */
|
||||
if (!outputRewrites.empty()) {
|
||||
warn("rewriting hashes in '%1%'; cross fingers", actualPath);
|
||||
debug("rewriting hashes in '%1%'; cross fingers", actualPath);
|
||||
|
||||
/* FIXME: this is in-memory. */
|
||||
StringSink sink;
|
||||
dumpPath(actualPath, sink);
|
||||
deletePath(actualPath);
|
||||
sink.s = make_ref<std::string>(rewriteStrings(*sink.s, outputRewrites));
|
||||
StringSource source(*sink.s);
|
||||
sink.s = rewriteStrings(sink.s, outputRewrites);
|
||||
StringSource source(sink.s);
|
||||
restorePath(actualPath, source);
|
||||
}
|
||||
};
|
||||
|
@ -2283,7 +2310,7 @@ void LocalDerivationGoal::registerOutputs()
|
|||
StringSink sink;
|
||||
dumpPath(actualPath, sink);
|
||||
RewritingSink rsink2(oldHashPart, std::string(finalPath.hashPart()), nextSink);
|
||||
rsink2(*sink.s);
|
||||
rsink2(sink.s);
|
||||
rsink2.flush();
|
||||
});
|
||||
Path tmpPath = actualPath + ".tmp";
|
||||
|
@ -2312,7 +2339,8 @@ void LocalDerivationGoal::registerOutputs()
|
|||
};
|
||||
|
||||
ValidPathInfo newInfo = std::visit(overloaded {
|
||||
[&](DerivationOutputInputAddressed output) {
|
||||
|
||||
[&](const DerivationOutputInputAddressed & output) {
|
||||
/* input-addressed case */
|
||||
auto requiredFinalPath = output.path;
|
||||
/* Preemptively add rewrite rule for final hash, as that is
|
||||
|
@ -2331,14 +2359,15 @@ void LocalDerivationGoal::registerOutputs()
|
|||
newInfo0.references.insert(newInfo0.path);
|
||||
return newInfo0;
|
||||
},
|
||||
[&](DerivationOutputCAFixed dof) {
|
||||
|
||||
[&](const DerivationOutputCAFixed & dof) {
|
||||
auto newInfo0 = newInfoFromCA(DerivationOutputCAFloating {
|
||||
.method = dof.hash.method,
|
||||
.hashType = dof.hash.hash.type,
|
||||
});
|
||||
|
||||
/* Check wanted hash */
|
||||
Hash & wanted = dof.hash.hash;
|
||||
const Hash & wanted = dof.hash.hash;
|
||||
assert(newInfo0.ca);
|
||||
auto got = getContentAddressHash(*newInfo0.ca);
|
||||
if (wanted != got) {
|
||||
|
@ -2353,18 +2382,17 @@ void LocalDerivationGoal::registerOutputs()
|
|||
}
|
||||
return newInfo0;
|
||||
},
|
||||
[&](DerivationOutputCAFloating dof) {
|
||||
|
||||
[&](DerivationOutputCAFloating & dof) {
|
||||
return newInfoFromCA(dof);
|
||||
},
|
||||
[&](DerivationOutputDeferred) {
|
||||
|
||||
[&](DerivationOutputDeferred) -> ValidPathInfo {
|
||||
// No derivation should reach that point without having been
|
||||
// rewritten first
|
||||
assert(false);
|
||||
// Ugly, but the compiler insists on having this return a value
|
||||
// of type `ValidPathInfo` despite the `assert(false)`, so
|
||||
// let's provide it
|
||||
return *(ValidPathInfo*)0;
|
||||
},
|
||||
|
||||
}, output.output);
|
||||
|
||||
/* FIXME: set proper permissions in restorePath() so
|
||||
|
@ -2455,7 +2483,7 @@ void LocalDerivationGoal::registerOutputs()
|
|||
}
|
||||
|
||||
if (curRound == nrRounds) {
|
||||
localStore.optimisePath(actualPath); // FIXME: combine with scanForReferences()
|
||||
localStore.optimisePath(actualPath, NoRepair); // FIXME: combine with scanForReferences()
|
||||
worker.markContentsGood(newInfo.path);
|
||||
}
|
||||
|
||||
|
@ -2475,11 +2503,12 @@ void LocalDerivationGoal::registerOutputs()
|
|||
}
|
||||
|
||||
if (buildMode == bmCheck) {
|
||||
// In case of FOD mismatches on `--check` an error must be thrown as this is also
|
||||
// a source for non-determinism.
|
||||
/* In case of fixed-output derivations, if there are
|
||||
mismatches on `--check` an error must be thrown as this is
|
||||
also a source for non-determinism. */
|
||||
if (delayedException)
|
||||
std::rethrow_exception(delayedException);
|
||||
return;
|
||||
return assertPathValidity();
|
||||
}
|
||||
|
||||
/* Apply output checks. */
|
||||
|
@ -2491,7 +2520,7 @@ void LocalDerivationGoal::registerOutputs()
|
|||
assert(prevInfos.size() == infos.size());
|
||||
for (auto i = prevInfos.begin(), j = infos.begin(); i != prevInfos.end(); ++i, ++j)
|
||||
if (!(*i == *j)) {
|
||||
result.isNonDeterministic = true;
|
||||
buildResult.isNonDeterministic = true;
|
||||
Path prev = worker.store.printStorePath(i->second.path) + checkSuffix;
|
||||
bool prevExists = keepPreviousRound && pathExists(prev);
|
||||
hintformat hint = prevExists
|
||||
|
@ -2529,7 +2558,7 @@ void LocalDerivationGoal::registerOutputs()
|
|||
|
||||
if (curRound < nrRounds) {
|
||||
prevInfos = std::move(infos);
|
||||
return;
|
||||
return {};
|
||||
}
|
||||
|
||||
/* Remove the .check directories if we're done. FIXME: keep them
|
||||
|
@ -2564,17 +2593,24 @@ void LocalDerivationGoal::registerOutputs()
|
|||
means it's safe to link the derivation to the output hash. We must do
|
||||
that for floating CA derivations, which otherwise couldn't be cached,
|
||||
but it's fine to do in all cases. */
|
||||
DrvOutputs builtOutputs;
|
||||
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
|
||||
for (auto& [outputName, newInfo] : infos) {
|
||||
auto thisRealisation = Realisation{
|
||||
.id = DrvOutput{initialOutputs.at(outputName).outputHash,
|
||||
outputName},
|
||||
.outPath = newInfo.path};
|
||||
for (auto & [outputName, newInfo] : infos) {
|
||||
auto thisRealisation = Realisation {
|
||||
.id = DrvOutput {
|
||||
initialOutputs.at(outputName).outputHash,
|
||||
outputName
|
||||
},
|
||||
.outPath = newInfo.path
|
||||
};
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||
signRealisation(thisRealisation);
|
||||
worker.store.registerDrvOutput(thisRealisation);
|
||||
}
|
||||
builtOutputs.emplace(thisRealisation.id, thisRealisation);
|
||||
}
|
||||
|
||||
return builtOutputs;
|
||||
}
|
||||
|
||||
void LocalDerivationGoal::signRealisation(Realisation & realisation)
|
||||
|
@ -2583,7 +2619,7 @@ void LocalDerivationGoal::signRealisation(Realisation & realisation)
|
|||
}
|
||||
|
||||
|
||||
void LocalDerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & outputs)
|
||||
void LocalDerivationGoal::checkOutputs(const std::map<std::string, ValidPathInfo> & outputs)
|
||||
{
|
||||
std::map<Path, const ValidPathInfo &> outputsByPath;
|
||||
for (auto & output : outputs)
|
||||
|
@ -2655,8 +2691,8 @@ void LocalDerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & out
|
|||
for (auto & i : *value) {
|
||||
if (worker.store.isStorePath(i))
|
||||
spec.insert(worker.store.parseStorePath(i));
|
||||
else if (finalOutputs.count(i))
|
||||
spec.insert(finalOutputs.at(i));
|
||||
else if (outputs.count(i))
|
||||
spec.insert(outputs.at(i).path);
|
||||
else throw BuildError("derivation contains an illegal reference specifier '%s'", i);
|
||||
}
|
||||
|
||||
|
@ -2679,7 +2715,7 @@ void LocalDerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & out
|
|||
}
|
||||
|
||||
if (!badPaths.empty()) {
|
||||
string badPathsStr;
|
||||
std::string badPathsStr;
|
||||
for (auto & i : badPaths) {
|
||||
badPathsStr += "\n ";
|
||||
badPathsStr += worker.store.printStorePath(i);
|
||||
|
|
|
@ -27,9 +27,10 @@ struct LocalDerivationGoal : public DerivationGoal
|
|||
/* Pipe for synchronising updates to the builder namespaces. */
|
||||
Pipe userNamespaceSync;
|
||||
|
||||
/* The mount namespace of the builder, used to add additional
|
||||
/* The mount namespace and user namespace of the builder, used to add additional
|
||||
paths to the sandbox as a result of recursive Nix calls. */
|
||||
AutoCloseFD sandboxMountNamespace;
|
||||
AutoCloseFD sandboxUserNamespace;
|
||||
|
||||
/* On Linux, whether we're doing the build in its own user
|
||||
namespace. */
|
||||
|
@ -57,11 +58,11 @@ struct LocalDerivationGoal : public DerivationGoal
|
|||
typedef map<Path, ChrootPath> DirsInChroot; // maps target path to source path
|
||||
DirsInChroot dirsInChroot;
|
||||
|
||||
typedef map<string, string> Environment;
|
||||
typedef map<std::string, std::string> Environment;
|
||||
Environment env;
|
||||
|
||||
#if __APPLE__
|
||||
typedef string SandboxProfile;
|
||||
typedef std::string SandboxProfile;
|
||||
SandboxProfile additionalSandboxProfile;
|
||||
#endif
|
||||
|
||||
|
@ -168,7 +169,7 @@ struct LocalDerivationGoal : public DerivationGoal
|
|||
|
||||
/* Check that the derivation outputs all exist and register them
|
||||
as valid. */
|
||||
void registerOutputs() override;
|
||||
DrvOutputs registerOutputs() override;
|
||||
|
||||
void signRealisation(Realisation &) override;
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
namespace nix {
|
||||
|
||||
PathSubstitutionGoal::PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair, std::optional<ContentAddress> ca)
|
||||
: Goal(worker)
|
||||
: Goal(worker, DerivedPath::Opaque { storePath })
|
||||
, storePath(storePath)
|
||||
, repair(repair)
|
||||
, ca(ca)
|
||||
|
@ -24,6 +24,13 @@ PathSubstitutionGoal::~PathSubstitutionGoal()
|
|||
}
|
||||
|
||||
|
||||
void PathSubstitutionGoal::done(ExitCode result, BuildResult::Status status)
|
||||
{
|
||||
buildResult.status = status;
|
||||
amDone(result);
|
||||
}
|
||||
|
||||
|
||||
void PathSubstitutionGoal::work()
|
||||
{
|
||||
(this->*state)();
|
||||
|
@ -38,7 +45,7 @@ void PathSubstitutionGoal::init()
|
|||
|
||||
/* If the path already exists we're done. */
|
||||
if (!repair && worker.store.isValidPath(storePath)) {
|
||||
amDone(ecSuccess);
|
||||
done(ecSuccess, BuildResult::AlreadyValid);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -65,7 +72,7 @@ void PathSubstitutionGoal::tryNext()
|
|||
/* Hack: don't indicate failure if there were no substituters.
|
||||
In that case the calling derivation should just do a
|
||||
build. */
|
||||
amDone(substituterFailed ? ecFailed : ecNoSubstituters);
|
||||
done(substituterFailed ? ecFailed : ecNoSubstituters, BuildResult::NoSubstituters);
|
||||
|
||||
if (substituterFailed) {
|
||||
worker.failedSubstitutions++;
|
||||
|
@ -138,8 +145,8 @@ void PathSubstitutionGoal::tryNext()
|
|||
only after we've downloaded the path. */
|
||||
if (!sub->isTrusted && worker.store.pathInfoIsUntrusted(*info))
|
||||
{
|
||||
warn("substituter '%s' does not have a valid signature for path '%s'",
|
||||
sub->getUri(), worker.store.printStorePath(storePath));
|
||||
warn("the substitute for '%s' from '%s' is not signed by any of the keys in 'trusted-public-keys'",
|
||||
worker.store.printStorePath(storePath), sub->getUri());
|
||||
tryNext();
|
||||
return;
|
||||
}
|
||||
|
@ -163,7 +170,9 @@ void PathSubstitutionGoal::referencesValid()
|
|||
|
||||
if (nrFailed > 0) {
|
||||
debug("some references of path '%s' could not be realised", worker.store.printStorePath(storePath));
|
||||
amDone(nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed);
|
||||
done(
|
||||
nrNoSubstituters > 0 || nrIncompleteClosure > 0 ? ecIncompleteClosure : ecFailed,
|
||||
BuildResult::DependencyFailed);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -268,11 +277,11 @@ void PathSubstitutionGoal::finished()
|
|||
|
||||
worker.updateProgress();
|
||||
|
||||
amDone(ecSuccess);
|
||||
done(ecSuccess, BuildResult::Substituted);
|
||||
}
|
||||
|
||||
|
||||
void PathSubstitutionGoal::handleChildOutput(int fd, const string & data)
|
||||
void PathSubstitutionGoal::handleChildOutput(int fd, std::string_view data)
|
||||
{
|
||||
}
|
||||
|
||||
|
|
|
@ -53,13 +53,15 @@ struct PathSubstitutionGoal : public Goal
|
|||
/* Content address for recomputing store path */
|
||||
std::optional<ContentAddress> ca;
|
||||
|
||||
void done(ExitCode result, BuildResult::Status status);
|
||||
|
||||
public:
|
||||
PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
||||
~PathSubstitutionGoal();
|
||||
|
||||
void timedOut(Error && ex) override { abort(); };
|
||||
|
||||
string key() override
|
||||
std::string key() override
|
||||
{
|
||||
/* "a$" ensures substitution goals happen before derivation
|
||||
goals. */
|
||||
|
@ -77,7 +79,7 @@ public:
|
|||
void finished();
|
||||
|
||||
/* Callback used by the worker to write to the log. */
|
||||
void handleChildOutput(int fd, const string & data) override;
|
||||
void handleChildOutput(int fd, std::string_view data) override;
|
||||
void handleEOF(int fd) override;
|
||||
|
||||
void cleanup() override;
|
||||
|
|
|
@ -161,7 +161,7 @@ unsigned Worker::getNrLocalBuilds()
|
|||
}
|
||||
|
||||
|
||||
void Worker::childStarted(GoalPtr goal, const set<int> & fds,
|
||||
void Worker::childStarted(GoalPtr goal, const std::set<int> & fds,
|
||||
bool inBuildSlot, bool respectTimeouts)
|
||||
{
|
||||
Child child;
|
||||
|
@ -239,7 +239,7 @@ void Worker::run(const Goals & _topGoals)
|
|||
}
|
||||
}
|
||||
|
||||
/* Call queryMissing() efficiently query substitutes. */
|
||||
/* Call queryMissing() to efficiently query substitutes. */
|
||||
StorePathSet willBuild, willSubstitute, unknown;
|
||||
uint64_t downloadSize, narSize;
|
||||
store.queryMissing(topPaths, willBuild, willSubstitute, unknown, downloadSize, narSize);
|
||||
|
@ -281,11 +281,11 @@ void Worker::run(const Goals & _topGoals)
|
|||
if (getMachines().empty())
|
||||
throw Error("unable to start any build; either increase '--max-jobs' "
|
||||
"or enable remote builds."
|
||||
"\nhttps://nixos.org/nix/manual/#chap-distributed-builds");
|
||||
"\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
|
||||
else
|
||||
throw Error("unable to start any build; remote machines may not have "
|
||||
"all required system features."
|
||||
"\nhttps://nixos.org/nix/manual/#chap-distributed-builds");
|
||||
"\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
|
||||
|
||||
}
|
||||
assert(!awake.empty());
|
||||
|
@ -377,7 +377,7 @@ void Worker::waitForInput()
|
|||
GoalPtr goal = j->goal.lock();
|
||||
assert(goal);
|
||||
|
||||
set<int> fds2(j->fds);
|
||||
std::set<int> fds2(j->fds);
|
||||
std::vector<unsigned char> buffer(4096);
|
||||
for (auto & k : fds2) {
|
||||
if (pollStatus.at(fdToPollStatus.at(k)).revents) {
|
||||
|
@ -394,7 +394,7 @@ void Worker::waitForInput()
|
|||
} else {
|
||||
printMsg(lvlVomit, "%1%: read %2% bytes",
|
||||
goal->getName(), rd);
|
||||
string data((char *) buffer.data(), rd);
|
||||
std::string data((char *) buffer.data(), rd);
|
||||
j->lastOutput = after;
|
||||
goal->handleChildOutput(k, data);
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ struct Child
|
|||
{
|
||||
WeakGoalPtr goal;
|
||||
Goal * goal2; // ugly hackery
|
||||
set<int> fds;
|
||||
std::set<int> fds;
|
||||
bool respectTimeouts;
|
||||
bool inBuildSlot;
|
||||
steady_time_point lastOutput; /* time we last got output on stdout/stderr */
|
||||
|
@ -167,7 +167,7 @@ public:
|
|||
|
||||
/* Registers a running child process. `inBuildSlot' means that
|
||||
the process counts towards the jobs limit. */
|
||||
void childStarted(GoalPtr goal, const set<int> & fds,
|
||||
void childStarted(GoalPtr goal, const std::set<int> & fds,
|
||||
bool inBuildSlot, bool respectTimeouts);
|
||||
|
||||
/* Unregisters a running child process. `wakeSleepers' should be
|
||||
|
|
|
@ -123,7 +123,7 @@ void buildProfile(const Path & out, Packages && pkgs)
|
|||
createLinks(state, pkgDir, out, priority);
|
||||
|
||||
try {
|
||||
for (const auto & p : tokenizeString<std::vector<string>>(
|
||||
for (const auto & p : tokenizeString<std::vector<std::string>>(
|
||||
readFile(pkgDir + "/nix-support/propagated-user-env-packages"), " \n"))
|
||||
if (!done.count(p))
|
||||
postponed.insert(p);
|
||||
|
@ -161,7 +161,7 @@ void buildProfile(const Path & out, Packages && pkgs)
|
|||
|
||||
void builtinBuildenv(const BasicDerivation & drv)
|
||||
{
|
||||
auto getAttr = [&](const string & name) {
|
||||
auto getAttr = [&](const std::string & name) {
|
||||
auto i = drv.env.find(name);
|
||||
if (i == drv.env.end()) throw Error("attribute '%s' missing", name);
|
||||
return i->second;
|
||||
|
|
|
@ -16,7 +16,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
|||
writeFile(settings.netrcFile, netrcData, 0600);
|
||||
}
|
||||
|
||||
auto getAttr = [&](const string & name) {
|
||||
auto getAttr = [&](const std::string & name) {
|
||||
auto i = drv.env.find(name);
|
||||
if (i == drv.env.end()) throw Error("attribute '%s' missing", name);
|
||||
return i->second;
|
||||
|
|
|
@ -5,7 +5,7 @@ namespace nix {
|
|||
|
||||
void builtinUnpackChannel(const BasicDerivation & drv)
|
||||
{
|
||||
auto getAttr = [&](const string & name) {
|
||||
auto getAttr = [&](const std::string & name) {
|
||||
auto i = drv.env.find(name);
|
||||
if (i == drv.env.end()) throw Error("attribute '%s' missing", name);
|
||||
return i->second;
|
||||
|
|
|
@ -19,3 +19,8 @@ create table if not exists RealisationsRefs (
|
|||
foreign key (referrer) references Realisations(id) on delete cascade,
|
||||
foreign key (realisationReference) references Realisations(id) on delete restrict
|
||||
);
|
||||
|
||||
-- used by QueryRealisationReferences
|
||||
create index if not exists IndexRealisationsRefs on RealisationsRefs(referrer);
|
||||
-- used by cascade deletion when ValidPaths is deleted
|
||||
create index if not exists IndexRealisationsRefsOnOutputPath on Realisations(outputPath);
|
||||
|
|
|
@ -31,10 +31,10 @@ std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
|
|||
std::string renderContentAddress(ContentAddress ca)
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](TextHash th) {
|
||||
[](TextHash & th) {
|
||||
return "text:" + th.hash.to_string(Base32, true);
|
||||
},
|
||||
[](FixedOutputHash fsh) {
|
||||
[](FixedOutputHash & fsh) {
|
||||
return makeFixedOutputCA(fsh.method, fsh.hash);
|
||||
}
|
||||
}, ca);
|
||||
|
@ -43,10 +43,10 @@ std::string renderContentAddress(ContentAddress ca)
|
|||
std::string renderContentAddressMethod(ContentAddressMethod cam)
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](TextHashMethod &th) {
|
||||
[](TextHashMethod & th) {
|
||||
return std::string{"text:"} + printHashType(htSHA256);
|
||||
},
|
||||
[](FixedOutputHashMethod &fshm) {
|
||||
[](FixedOutputHashMethod & fshm) {
|
||||
return "fixed:" + makeFileIngestionPrefix(fshm.fileIngestionMethod) + printHashType(fshm.hashType);
|
||||
}
|
||||
}, cam);
|
||||
|
@ -104,12 +104,12 @@ ContentAddress parseContentAddress(std::string_view rawCa) {
|
|||
|
||||
return std::visit(
|
||||
overloaded {
|
||||
[&](TextHashMethod thm) {
|
||||
[&](TextHashMethod & thm) {
|
||||
return ContentAddress(TextHash {
|
||||
.hash = Hash::parseNonSRIUnprefixed(rest, htSHA256)
|
||||
});
|
||||
},
|
||||
[&](FixedOutputHashMethod fohMethod) {
|
||||
[&](FixedOutputHashMethod & fohMethod) {
|
||||
return ContentAddress(FixedOutputHash {
|
||||
.method = fohMethod.fileIngestionMethod,
|
||||
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(fohMethod.hashType)),
|
||||
|
@ -120,8 +120,10 @@ ContentAddress parseContentAddress(std::string_view rawCa) {
|
|||
|
||||
ContentAddressMethod parseContentAddressMethod(std::string_view caMethod)
|
||||
{
|
||||
std::string_view asPrefix {std::string{caMethod} + ":"};
|
||||
return parseContentAddressMethodPrefix(asPrefix);
|
||||
std::string asPrefix = std::string{caMethod} + ":";
|
||||
// parseContentAddressMethodPrefix takes its argument by reference
|
||||
std::string_view asPrefixView = asPrefix;
|
||||
return parseContentAddressMethodPrefix(asPrefixView);
|
||||
}
|
||||
|
||||
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt)
|
||||
|
@ -137,10 +139,10 @@ std::string renderContentAddress(std::optional<ContentAddress> ca)
|
|||
Hash getContentAddressHash(const ContentAddress & ca)
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](TextHash th) {
|
||||
[](const TextHash & th) {
|
||||
return th.hash;
|
||||
},
|
||||
[](FixedOutputHash fsh) {
|
||||
[](const FixedOutputHash & fsh) {
|
||||
return fsh.hash;
|
||||
}
|
||||
}, ca);
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
#include "daemon.hh"
|
||||
#include "monitor-fd.hh"
|
||||
#include "worker-protocol.hh"
|
||||
#include "build-result.hh"
|
||||
#include "store-api.hh"
|
||||
#include "gc-store.hh"
|
||||
#include "path-with-outputs.hh"
|
||||
#include "finally.hh"
|
||||
#include "affinity.hh"
|
||||
#include "archive.hh"
|
||||
#include "derivations.hh"
|
||||
#include "args.hh"
|
||||
|
@ -70,7 +71,7 @@ struct TunnelLogger : public Logger
|
|||
|
||||
StringSink buf;
|
||||
buf << STDERR_NEXT << (fs.s + "\n");
|
||||
enqueueMsg(*buf.s);
|
||||
enqueueMsg(buf.s);
|
||||
}
|
||||
|
||||
void logEI(const ErrorInfo & ei) override
|
||||
|
@ -82,7 +83,7 @@ struct TunnelLogger : public Logger
|
|||
|
||||
StringSink buf;
|
||||
buf << STDERR_NEXT << oss.str();
|
||||
enqueueMsg(*buf.s);
|
||||
enqueueMsg(buf.s);
|
||||
}
|
||||
|
||||
/* startWork() means that we're starting an operation for which we
|
||||
|
@ -130,7 +131,7 @@ struct TunnelLogger : public Logger
|
|||
|
||||
StringSink buf;
|
||||
buf << STDERR_START_ACTIVITY << act << lvl << type << s << fields << parent;
|
||||
enqueueMsg(*buf.s);
|
||||
enqueueMsg(buf.s);
|
||||
}
|
||||
|
||||
void stopActivity(ActivityId act) override
|
||||
|
@ -138,7 +139,7 @@ struct TunnelLogger : public Logger
|
|||
if (GET_PROTOCOL_MINOR(clientVersion) < 20) return;
|
||||
StringSink buf;
|
||||
buf << STDERR_STOP_ACTIVITY << act;
|
||||
enqueueMsg(*buf.s);
|
||||
enqueueMsg(buf.s);
|
||||
}
|
||||
|
||||
void result(ActivityId act, ResultType type, const Fields & fields) override
|
||||
|
@ -146,7 +147,7 @@ struct TunnelLogger : public Logger
|
|||
if (GET_PROTOCOL_MINOR(clientVersion) < 20) return;
|
||||
StringSink buf;
|
||||
buf << STDERR_RESULT << act << type << fields;
|
||||
enqueueMsg(*buf.s);
|
||||
enqueueMsg(buf.s);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -230,11 +231,12 @@ struct ClientSettings
|
|||
else if (name == settings.experimentalFeatures.name) {
|
||||
// We don’t want to forward the experimental features to
|
||||
// the daemon, as that could cause some pretty weird stuff
|
||||
if (tokenizeString<Strings>(value) != settings.experimentalFeatures.get())
|
||||
if (parseFeatures(tokenizeString<StringSet>(value)) != settings.experimentalFeatures.get())
|
||||
debug("Ignoring the client-specified experimental features");
|
||||
}
|
||||
else if (trusted
|
||||
|| name == settings.buildTimeout.name
|
||||
|| name == settings.buildRepeat.name
|
||||
|| name == "connect-timeout"
|
||||
|| (name == "builders" && value == ""))
|
||||
settings.set(name, value);
|
||||
|
@ -395,16 +397,14 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
FramedSource source(from);
|
||||
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
|
||||
return std::visit(overloaded {
|
||||
[&](TextHashMethod &_) {
|
||||
[&](TextHashMethod &) {
|
||||
// We could stream this by changing Store
|
||||
std::string contents = source.drain();
|
||||
auto path = store->addTextToStore(name, contents, refs, repair);
|
||||
return store->queryPathInfo(path);
|
||||
},
|
||||
[&](FixedOutputHashMethod &fohm) {
|
||||
if (!refs.empty())
|
||||
throw UnimplementedError("cannot yet have refs with flat or nar-hashed data");
|
||||
auto path = store->addToStoreFromDump(source, name, fohm.fileIngestionMethod, fohm.hashType, repair);
|
||||
[&](FixedOutputHashMethod & fohm) {
|
||||
auto path = store->addToStoreFromDump(source, name, fohm.fileIngestionMethod, fohm.hashType, repair, refs);
|
||||
return store->queryPathInfo(path);
|
||||
},
|
||||
}, contentAddressMethod);
|
||||
|
@ -432,25 +432,30 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
hashAlgo = parseHashType(hashAlgoRaw);
|
||||
}
|
||||
|
||||
StringSink saved;
|
||||
TeeSource savedNARSource(from, saved);
|
||||
RetrieveRegularNARSink savedRegular { saved };
|
||||
|
||||
if (method == FileIngestionMethod::Recursive) {
|
||||
/* Get the entire NAR dump from the client and save it to
|
||||
a string so that we can pass it to
|
||||
addToStoreFromDump(). */
|
||||
ParseSink sink; /* null sink; just parse the NAR */
|
||||
parseDump(sink, savedNARSource);
|
||||
} else
|
||||
parseDump(savedRegular, from);
|
||||
|
||||
auto dumpSource = sinkToSource([&](Sink & saved) {
|
||||
if (method == FileIngestionMethod::Recursive) {
|
||||
/* We parse the NAR dump through into `saved` unmodified,
|
||||
so why all this extra work? We still parse the NAR so
|
||||
that we aren't sending arbitrary data to `saved`
|
||||
unwittingly`, and we know when the NAR ends so we don't
|
||||
consume the rest of `from` and can't parse another
|
||||
command. (We don't trust `addToStoreFromDump` to not
|
||||
eagerly consume the entire stream it's given, past the
|
||||
length of the Nar. */
|
||||
TeeSource savedNARSource(from, saved);
|
||||
ParseSink sink; /* null sink; just parse the NAR */
|
||||
parseDump(sink, savedNARSource);
|
||||
} else {
|
||||
/* Incrementally parse the NAR file, stripping the
|
||||
metadata, and streaming the sole file we expect into
|
||||
`saved`. */
|
||||
RetrieveRegularNARSink savedRegular { saved };
|
||||
parseDump(savedRegular, from);
|
||||
if (!savedRegular.regular) throw Error("regular file expected");
|
||||
}
|
||||
});
|
||||
logger->startWork();
|
||||
if (!savedRegular.regular) throw Error("regular file expected");
|
||||
|
||||
// FIXME: try to stream directly from `from`.
|
||||
StringSource dumpSource { *saved.s };
|
||||
auto path = store->addToStoreFromDump(dumpSource, baseName, method, hashAlgo);
|
||||
auto path = store->addToStoreFromDump(*dumpSource, baseName, method, hashAlgo);
|
||||
logger->stopWork();
|
||||
|
||||
to << store->printStorePath(path);
|
||||
|
@ -465,17 +470,19 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
dontCheckSigs = false;
|
||||
|
||||
logger->startWork();
|
||||
FramedSource source(from);
|
||||
store->addMultipleToStore(source,
|
||||
RepairFlag{repair},
|
||||
dontCheckSigs ? NoCheckSigs : CheckSigs);
|
||||
{
|
||||
FramedSource source(from);
|
||||
store->addMultipleToStore(source,
|
||||
RepairFlag{repair},
|
||||
dontCheckSigs ? NoCheckSigs : CheckSigs);
|
||||
}
|
||||
logger->stopWork();
|
||||
break;
|
||||
}
|
||||
|
||||
case wopAddTextToStore: {
|
||||
string suffix = readString(from);
|
||||
string s = readString(from);
|
||||
std::string suffix = readString(from);
|
||||
std::string s = readString(from);
|
||||
auto refs = worker_proto::read(*store, from, Phantom<StorePathSet> {});
|
||||
logger->startWork();
|
||||
auto path = store->addTextToStore(suffix, s, refs, NoRepair);
|
||||
|
@ -525,6 +532,25 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
break;
|
||||
}
|
||||
|
||||
case wopBuildPathsWithResults: {
|
||||
auto drvs = readDerivedPaths(*store, clientVersion, from);
|
||||
BuildMode mode = bmNormal;
|
||||
mode = (BuildMode) readInt(from);
|
||||
|
||||
/* Repairing is not atomic, so disallowed for "untrusted"
|
||||
clients. */
|
||||
if (mode == bmRepair && !trusted)
|
||||
throw Error("repairing is not allowed because you are not in 'trusted-users'");
|
||||
|
||||
logger->startWork();
|
||||
auto results = store->buildPathsWithResults(drvs, mode);
|
||||
logger->stopWork();
|
||||
|
||||
worker_proto::write(*store, to, results);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case wopBuildDerivation: {
|
||||
auto drvPath = store->parseStorePath(readString(from));
|
||||
BasicDerivation drv;
|
||||
|
@ -617,16 +643,19 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
|
||||
case wopAddIndirectRoot: {
|
||||
Path path = absPath(readString(from));
|
||||
|
||||
logger->startWork();
|
||||
store->addIndirectRoot(path);
|
||||
auto & gcStore = requireGcStore(*store);
|
||||
gcStore.addIndirectRoot(path);
|
||||
logger->stopWork();
|
||||
|
||||
to << 1;
|
||||
break;
|
||||
}
|
||||
|
||||
// Obsolete.
|
||||
case wopSyncWithGC: {
|
||||
logger->startWork();
|
||||
store->syncWithGC();
|
||||
logger->stopWork();
|
||||
to << 1;
|
||||
break;
|
||||
|
@ -634,7 +663,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
|
||||
case wopFindRoots: {
|
||||
logger->startWork();
|
||||
Roots roots = store->findRoots(!trusted);
|
||||
auto & gcStore = requireGcStore(*store);
|
||||
Roots roots = gcStore.findRoots(!trusted);
|
||||
logger->stopWork();
|
||||
|
||||
size_t size = 0;
|
||||
|
@ -665,7 +695,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
logger->startWork();
|
||||
if (options.ignoreLiveness)
|
||||
throw Error("you are not allowed to ignore liveness");
|
||||
store->collectGarbage(options, results);
|
||||
auto & gcStore = requireGcStore(*store);
|
||||
gcStore.collectGarbage(options, results);
|
||||
logger->stopWork();
|
||||
|
||||
to << results.paths << results.bytesFreed << 0 /* obsolete */;
|
||||
|
@ -693,8 +724,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
if (GET_PROTOCOL_MINOR(clientVersion) >= 12) {
|
||||
unsigned int n = readInt(from);
|
||||
for (unsigned int i = 0; i < n; i++) {
|
||||
string name = readString(from);
|
||||
string value = readString(from);
|
||||
auto name = readString(from);
|
||||
auto value = readString(from);
|
||||
clientSettings.overrides.emplace(name, value);
|
||||
}
|
||||
}
|
||||
|
@ -849,14 +880,14 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
|
||||
else {
|
||||
std::unique_ptr<Source> source;
|
||||
StringSink saved;
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 21)
|
||||
source = std::make_unique<TunnelSource>(from, to);
|
||||
else {
|
||||
StringSink saved;
|
||||
TeeSource tee { from, saved };
|
||||
ParseSink ether;
|
||||
parseDump(ether, tee);
|
||||
source = std::make_unique<StringSource>(std::move(*saved.s));
|
||||
source = std::make_unique<StringSource>(saved.s);
|
||||
}
|
||||
|
||||
logger->startWork();
|
||||
|
@ -917,6 +948,22 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
break;
|
||||
}
|
||||
|
||||
case wopAddBuildLog: {
|
||||
StorePath path{readString(from)};
|
||||
logger->startWork();
|
||||
if (!trusted)
|
||||
throw Error("you are not privileged to add logs");
|
||||
{
|
||||
FramedSource source(from);
|
||||
StringSink sink;
|
||||
source.drainInto(sink);
|
||||
store->addBuildLog(path, sink.s);
|
||||
}
|
||||
logger->stopWork();
|
||||
to << 1;
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
throw Error("invalid operation %1%", op);
|
||||
}
|
||||
|
@ -952,15 +999,19 @@ void processConnection(
|
|||
|
||||
Finally finally([&]() {
|
||||
_isInterrupted = false;
|
||||
prevLogger->log(lvlDebug, fmt("%d operations", opCount));
|
||||
printMsgUsing(prevLogger, lvlDebug, "%d operations", opCount);
|
||||
});
|
||||
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 14 && readInt(from)) {
|
||||
auto affinity = readInt(from);
|
||||
setAffinityTo(affinity);
|
||||
// Obsolete CPU affinity.
|
||||
readInt(from);
|
||||
}
|
||||
|
||||
readInt(from); // obsolete reserveSpace
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 11)
|
||||
readInt(from); // obsolete reserveSpace
|
||||
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 33)
|
||||
to << nixVersion;
|
||||
|
||||
/* Send startup error messages to the client. */
|
||||
tunnelLogger->startWork();
|
||||
|
@ -985,6 +1036,8 @@ void processConnection(
|
|||
break;
|
||||
}
|
||||
|
||||
printMsgUsing(prevLogger, lvlDebug, "received daemon op %d", op);
|
||||
|
||||
opCount++;
|
||||
|
||||
try {
|
||||
|
|
|
@ -4,24 +4,25 @@
|
|||
#include "util.hh"
|
||||
#include "worker-protocol.hh"
|
||||
#include "fs-accessor.hh"
|
||||
#include <boost/container/small_vector.hpp>
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::optional<StorePath> DerivationOutput::path(const Store & store, std::string_view drvName, std::string_view outputName) const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](DerivationOutputInputAddressed doi) -> std::optional<StorePath> {
|
||||
[](const DerivationOutputInputAddressed & doi) -> std::optional<StorePath> {
|
||||
return { doi.path };
|
||||
},
|
||||
[&](DerivationOutputCAFixed dof) -> std::optional<StorePath> {
|
||||
[&](const DerivationOutputCAFixed & dof) -> std::optional<StorePath> {
|
||||
return {
|
||||
dof.path(store, drvName, outputName)
|
||||
};
|
||||
},
|
||||
[](DerivationOutputCAFloating dof) -> std::optional<StorePath> {
|
||||
[](const DerivationOutputCAFloating & dof) -> std::optional<StorePath> {
|
||||
return std::nullopt;
|
||||
},
|
||||
[](DerivationOutputDeferred) -> std::optional<StorePath> {
|
||||
[](const DerivationOutputDeferred &) -> std::optional<StorePath> {
|
||||
return std::nullopt;
|
||||
},
|
||||
}, output);
|
||||
|
@ -81,7 +82,7 @@ bool derivationIsImpure(DerivationType dt) {
|
|||
|
||||
bool BasicDerivation::isBuiltin() const
|
||||
{
|
||||
return string(builder, 0, 8) == "builtin:";
|
||||
return builder.substr(0, 8) == "builtin:";
|
||||
}
|
||||
|
||||
|
||||
|
@ -103,19 +104,19 @@ StorePath writeDerivation(Store & store,
|
|||
|
||||
|
||||
/* Read string `s' from stream `str'. */
|
||||
static void expect(std::istream & str, const string & s)
|
||||
static void expect(std::istream & str, std::string_view s)
|
||||
{
|
||||
char s2[s.size()];
|
||||
str.read(s2, s.size());
|
||||
if (string(s2, s.size()) != s)
|
||||
if (std::string(s2, s.size()) != s)
|
||||
throw FormatError("expected string '%1%'", s);
|
||||
}
|
||||
|
||||
|
||||
/* Read a C-style string from stream `str'. */
|
||||
static string parseString(std::istream & str)
|
||||
static std::string parseString(std::istream & str)
|
||||
{
|
||||
string res;
|
||||
std::string res;
|
||||
expect(str, "\"");
|
||||
int c;
|
||||
while ((c = str.get()) != '"')
|
||||
|
@ -171,7 +172,7 @@ static DerivationOutput parseDerivationOutput(const Store & store,
|
|||
{
|
||||
if (hashAlgo != "") {
|
||||
auto method = FileIngestionMethod::Flat;
|
||||
if (string(hashAlgo, 0, 2) == "r:") {
|
||||
if (hashAlgo.substr(0, 2) == "r:") {
|
||||
method = FileIngestionMethod::Recursive;
|
||||
hashAlgo = hashAlgo.substr(2);
|
||||
}
|
||||
|
@ -187,7 +188,7 @@ static DerivationOutput parseDerivationOutput(const Store & store,
|
|||
},
|
||||
};
|
||||
} else {
|
||||
settings.requireExperimentalFeature("ca-derivations");
|
||||
settings.requireExperimentalFeature(Xp::CaDerivations);
|
||||
assert(pathS == "");
|
||||
return DerivationOutput {
|
||||
.output = DerivationOutputCAFloating {
|
||||
|
@ -259,8 +260,8 @@ Derivation parseDerivation(const Store & store, std::string && s, std::string_vi
|
|||
/* Parse the environment variables. */
|
||||
expect(str, ",[");
|
||||
while (!endOfList(str)) {
|
||||
expect(str, "("); string name = parseString(str);
|
||||
expect(str, ","); string value = parseString(str);
|
||||
expect(str, "("); auto name = parseString(str);
|
||||
expect(str, ","); auto value = parseString(str);
|
||||
expect(str, ")");
|
||||
drv.env[name] = value;
|
||||
}
|
||||
|
@ -270,9 +271,11 @@ Derivation parseDerivation(const Store & store, std::string && s, std::string_vi
|
|||
}
|
||||
|
||||
|
||||
static void printString(string & res, std::string_view s)
|
||||
static void printString(std::string & res, std::string_view s)
|
||||
{
|
||||
char buf[s.size() * 2 + 2];
|
||||
boost::container::small_vector<char, 64 * 1024> buffer;
|
||||
buffer.reserve(s.size() * 2 + 2);
|
||||
char * buf = buffer.data();
|
||||
char * p = buf;
|
||||
*p++ = '"';
|
||||
for (auto c : s)
|
||||
|
@ -286,7 +289,7 @@ static void printString(string & res, std::string_view s)
|
|||
}
|
||||
|
||||
|
||||
static void printUnquotedString(string & res, std::string_view s)
|
||||
static void printUnquotedString(std::string & res, std::string_view s)
|
||||
{
|
||||
res += '"';
|
||||
res.append(s);
|
||||
|
@ -295,7 +298,7 @@ static void printUnquotedString(string & res, std::string_view s)
|
|||
|
||||
|
||||
template<class ForwardIterator>
|
||||
static void printStrings(string & res, ForwardIterator i, ForwardIterator j)
|
||||
static void printStrings(std::string & res, ForwardIterator i, ForwardIterator j)
|
||||
{
|
||||
res += '[';
|
||||
bool first = true;
|
||||
|
@ -308,7 +311,7 @@ static void printStrings(string & res, ForwardIterator i, ForwardIterator j)
|
|||
|
||||
|
||||
template<class ForwardIterator>
|
||||
static void printUnquotedStrings(string & res, ForwardIterator i, ForwardIterator j)
|
||||
static void printUnquotedStrings(std::string & res, ForwardIterator i, ForwardIterator j)
|
||||
{
|
||||
res += '[';
|
||||
bool first = true;
|
||||
|
@ -320,10 +323,10 @@ static void printUnquotedStrings(string & res, ForwardIterator i, ForwardIterato
|
|||
}
|
||||
|
||||
|
||||
string Derivation::unparse(const Store & store, bool maskOutputs,
|
||||
std::string Derivation::unparse(const Store & store, bool maskOutputs,
|
||||
std::map<std::string, StringSet> * actualInputs) const
|
||||
{
|
||||
string s;
|
||||
std::string s;
|
||||
s.reserve(65536);
|
||||
s += "Derive([";
|
||||
|
||||
|
@ -332,22 +335,22 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
|
|||
if (first) first = false; else s += ',';
|
||||
s += '('; printUnquotedString(s, i.first);
|
||||
std::visit(overloaded {
|
||||
[&](DerivationOutputInputAddressed doi) {
|
||||
[&](const DerivationOutputInputAddressed & doi) {
|
||||
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(doi.path));
|
||||
s += ','; printUnquotedString(s, "");
|
||||
s += ','; printUnquotedString(s, "");
|
||||
},
|
||||
[&](DerivationOutputCAFixed dof) {
|
||||
[&](const DerivationOutputCAFixed & dof) {
|
||||
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first)));
|
||||
s += ','; printUnquotedString(s, dof.hash.printMethodAlgo());
|
||||
s += ','; printUnquotedString(s, dof.hash.hash.to_string(Base16, false));
|
||||
},
|
||||
[&](DerivationOutputCAFloating dof) {
|
||||
[&](const DerivationOutputCAFloating & dof) {
|
||||
s += ','; printUnquotedString(s, "");
|
||||
s += ','; printUnquotedString(s, makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
|
||||
s += ','; printUnquotedString(s, "");
|
||||
},
|
||||
[&](DerivationOutputDeferred) {
|
||||
[&](const DerivationOutputDeferred &) {
|
||||
s += ','; printUnquotedString(s, "");
|
||||
s += ','; printUnquotedString(s, "");
|
||||
s += ','; printUnquotedString(s, "");
|
||||
|
@ -398,7 +401,7 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
|
|||
|
||||
|
||||
// FIXME: remove
|
||||
bool isDerivation(const string & fileName)
|
||||
bool isDerivation(const std::string & fileName)
|
||||
{
|
||||
return hasSuffix(fileName, drvExtension);
|
||||
}
|
||||
|
@ -420,13 +423,13 @@ DerivationType BasicDerivation::type() const
|
|||
std::optional<HashType> floatingHashType;
|
||||
for (auto & i : outputs) {
|
||||
std::visit(overloaded {
|
||||
[&](DerivationOutputInputAddressed _) {
|
||||
[&](const DerivationOutputInputAddressed &) {
|
||||
inputAddressedOutputs.insert(i.first);
|
||||
},
|
||||
[&](DerivationOutputCAFixed _) {
|
||||
[&](const DerivationOutputCAFixed &) {
|
||||
fixedCAOutputs.insert(i.first);
|
||||
},
|
||||
[&](DerivationOutputCAFloating dof) {
|
||||
[&](const DerivationOutputCAFloating & dof) {
|
||||
floatingCAOutputs.insert(i.first);
|
||||
if (!floatingHashType) {
|
||||
floatingHashType = dof.hashType;
|
||||
|
@ -435,7 +438,7 @@ DerivationType BasicDerivation::type() const
|
|||
throw Error("All floating outputs must use the same hash type");
|
||||
}
|
||||
},
|
||||
[&](DerivationOutputDeferred _) {
|
||||
[&](const DerivationOutputDeferred &) {
|
||||
deferredIAOutputs.insert(i.first);
|
||||
},
|
||||
}, i.second.output);
|
||||
|
@ -538,15 +541,15 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
|
|||
const auto & res = pathDerivationModulo(store, i.first);
|
||||
std::visit(overloaded {
|
||||
// Regular non-CA derivation, replace derivation
|
||||
[&](Hash drvHash) {
|
||||
[&](const Hash & drvHash) {
|
||||
inputs2.insert_or_assign(drvHash.to_string(Base16, false), i.second);
|
||||
},
|
||||
[&](DeferredHash deferredHash) {
|
||||
[&](const DeferredHash & deferredHash) {
|
||||
isDeferred = true;
|
||||
inputs2.insert_or_assign(deferredHash.hash.to_string(Base16, false), i.second);
|
||||
},
|
||||
// CA derivation's output hashes
|
||||
[&](CaOutputHashes outputHashes) {
|
||||
[&](const CaOutputHashes & outputHashes) {
|
||||
std::set<std::string> justOut = { "out" };
|
||||
for (auto & output : i.second) {
|
||||
/* Put each one in with a single "out" output.. */
|
||||
|
@ -572,17 +575,17 @@ std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation &
|
|||
{
|
||||
std::map<std::string, Hash> res;
|
||||
std::visit(overloaded {
|
||||
[&](Hash drvHash) {
|
||||
[&](const Hash & drvHash) {
|
||||
for (auto & outputName : drv.outputNames()) {
|
||||
res.insert({outputName, drvHash});
|
||||
}
|
||||
},
|
||||
[&](DeferredHash deferredHash) {
|
||||
[&](const DeferredHash & deferredHash) {
|
||||
for (auto & outputName : drv.outputNames()) {
|
||||
res.insert({outputName, deferredHash.hash});
|
||||
}
|
||||
},
|
||||
[&](CaOutputHashes outputHashes) {
|
||||
[&](const CaOutputHashes & outputHashes) {
|
||||
res = outputHashes;
|
||||
},
|
||||
}, hashDerivationModulo(store, drv, true));
|
||||
|
@ -590,7 +593,7 @@ std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation &
|
|||
}
|
||||
|
||||
|
||||
bool wantOutput(const string & output, const std::set<string> & wanted)
|
||||
bool wantOutput(const std::string & output, const std::set<std::string> & wanted)
|
||||
{
|
||||
return wanted.empty() || wanted.find(output) != wanted.end();
|
||||
}
|
||||
|
@ -666,22 +669,22 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
|||
for (auto & i : drv.outputs) {
|
||||
out << i.first;
|
||||
std::visit(overloaded {
|
||||
[&](DerivationOutputInputAddressed doi) {
|
||||
[&](const DerivationOutputInputAddressed & doi) {
|
||||
out << store.printStorePath(doi.path)
|
||||
<< ""
|
||||
<< "";
|
||||
},
|
||||
[&](DerivationOutputCAFixed dof) {
|
||||
[&](const DerivationOutputCAFixed & dof) {
|
||||
out << store.printStorePath(dof.path(store, drv.name, i.first))
|
||||
<< dof.hash.printMethodAlgo()
|
||||
<< dof.hash.hash.to_string(Base16, false);
|
||||
},
|
||||
[&](DerivationOutputCAFloating dof) {
|
||||
[&](const DerivationOutputCAFloating & dof) {
|
||||
out << ""
|
||||
<< (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
|
||||
<< "";
|
||||
},
|
||||
[&](DerivationOutputDeferred) {
|
||||
[&](const DerivationOutputDeferred &) {
|
||||
out << ""
|
||||
<< ""
|
||||
<< "";
|
||||
|
@ -696,10 +699,10 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
|||
}
|
||||
|
||||
|
||||
std::string hashPlaceholder(const std::string & outputName)
|
||||
std::string hashPlaceholder(const std::string_view outputName)
|
||||
{
|
||||
// FIXME: memoize?
|
||||
return "/" + hashString(htSHA256, "nix-output:" + outputName).to_string(Base32, false);
|
||||
return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(Base32, false);
|
||||
}
|
||||
|
||||
std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName)
|
||||
|
|
|
@ -59,21 +59,19 @@ struct DerivationOutput
|
|||
std::optional<StorePath> path(const Store & store, std::string_view drvName, std::string_view outputName) const;
|
||||
};
|
||||
|
||||
typedef std::map<string, DerivationOutput> DerivationOutputs;
|
||||
typedef std::map<std::string, DerivationOutput> DerivationOutputs;
|
||||
|
||||
/* These are analogues to the previous DerivationOutputs data type, but they
|
||||
also contains, for each output, the (optional) store path in which it would
|
||||
be written. To calculate values of these types, see the corresponding
|
||||
functions in BasicDerivation */
|
||||
typedef std::map<string, std::pair<DerivationOutput, std::optional<StorePath>>>
|
||||
typedef std::map<std::string, std::pair<DerivationOutput, std::optional<StorePath>>>
|
||||
DerivationOutputsAndOptPaths;
|
||||
|
||||
/* For inputs that are sub-derivations, we specify exactly which
|
||||
output IDs we are interested in. */
|
||||
typedef std::map<StorePath, StringSet> DerivationInputs;
|
||||
|
||||
typedef std::map<string, string> StringPairs;
|
||||
|
||||
enum struct DerivationType : uint8_t {
|
||||
InputAddressed,
|
||||
DeferredInputAddressed,
|
||||
|
@ -103,7 +101,7 @@ struct BasicDerivation
|
|||
{
|
||||
DerivationOutputs outputs; /* keyed on symbolic IDs */
|
||||
StorePathSet inputSrcs; /* inputs that are sources */
|
||||
string platform;
|
||||
std::string platform;
|
||||
Path builder;
|
||||
Strings args;
|
||||
StringPairs env;
|
||||
|
@ -164,7 +162,7 @@ StorePath writeDerivation(Store & store,
|
|||
Derivation parseDerivation(const Store & store, std::string && s, std::string_view name);
|
||||
|
||||
// FIXME: remove
|
||||
bool isDerivation(const string & fileName);
|
||||
bool isDerivation(const std::string & fileName);
|
||||
|
||||
/* Calculate the name that will be used for the store path for this
|
||||
output.
|
||||
|
@ -222,7 +220,7 @@ typedef std::map<StorePath, DrvHashModulo> DrvHashes;
|
|||
// FIXME: global, though at least thread-safe.
|
||||
extern Sync<DrvHashes> drvHashes;
|
||||
|
||||
bool wantOutput(const string & output, const std::set<string> & wanted);
|
||||
bool wantOutput(const std::string & output, const std::set<std::string> & wanted);
|
||||
|
||||
struct Source;
|
||||
struct Sink;
|
||||
|
@ -236,7 +234,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
|||
It is used as a placeholder to allow derivations to refer to their
|
||||
own outputs without needing to use the hash of a derivation in
|
||||
itself, making the hash near-impossible to calculate. */
|
||||
std::string hashPlaceholder(const std::string & outputName);
|
||||
std::string hashPlaceholder(const std::string_view outputName);
|
||||
|
||||
/* This creates an opaque and almost certainly unique string
|
||||
deterministically from a derivation path and output name.
|
||||
|
|
|
@ -24,8 +24,8 @@ StorePathSet BuiltPath::outPaths() const
|
|||
{
|
||||
return std::visit(
|
||||
overloaded{
|
||||
[](BuiltPath::Opaque p) { return StorePathSet{p.path}; },
|
||||
[](BuiltPath::Built b) {
|
||||
[](const BuiltPath::Opaque & p) { return StorePathSet{p.path}; },
|
||||
[](const BuiltPath::Built & b) {
|
||||
StorePathSet res;
|
||||
for (auto & [_, path] : b.outputs)
|
||||
res.insert(path);
|
||||
|
@ -75,9 +75,9 @@ DerivedPath::Built DerivedPath::Built::parse(const Store & store, std::string_vi
|
|||
assert(n != s.npos);
|
||||
auto drvPath = store.parseStorePath(s.substr(0, n));
|
||||
auto outputsS = s.substr(n + 1);
|
||||
std::set<string> outputs;
|
||||
std::set<std::string> outputs;
|
||||
if (outputsS != "*")
|
||||
outputs = tokenizeString<std::set<string>>(outputsS, ",");
|
||||
outputs = tokenizeString<std::set<std::string>>(outputsS, ",");
|
||||
return {drvPath, outputs};
|
||||
}
|
||||
|
||||
|
@ -94,13 +94,13 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
|
|||
RealisedPath::Set res;
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](BuiltPath::Opaque p) { res.insert(p.path); },
|
||||
[&](BuiltPath::Built p) {
|
||||
[&](const BuiltPath::Opaque & p) { res.insert(p.path); },
|
||||
[&](const BuiltPath::Built & p) {
|
||||
auto drvHashes =
|
||||
staticOutputHashes(store, store.readDerivation(p.drvPath));
|
||||
for (auto& [outputName, outputPath] : p.outputs) {
|
||||
if (settings.isExperimentalFeatureEnabled(
|
||||
"ca-derivations")) {
|
||||
Xp::CaDerivations)) {
|
||||
auto thisRealisation = store.queryRealisation(
|
||||
DrvOutput{drvHashes.at(outputName), outputName});
|
||||
assert(thisRealisation); // We’ve built it, so we must h
|
||||
|
|
|
@ -21,7 +21,7 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
|
|||
, Store(params)
|
||||
{ }
|
||||
|
||||
string getUri() override
|
||||
std::string getUri() override
|
||||
{
|
||||
return *uriSchemes().begin();
|
||||
}
|
||||
|
@ -43,15 +43,19 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
|
|||
RepairFlag repair, CheckSigsFlag checkSigs) override
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
StorePath addTextToStore(const string & name, const string & s,
|
||||
const StorePathSet & references, RepairFlag repair) override
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override
|
||||
{ unsupported("addTextToStore"); }
|
||||
|
||||
void narFromPath(const StorePath & path, Sink & sink) override
|
||||
{ unsupported("narFromPath"); }
|
||||
|
||||
std::optional<const Realisation> queryRealisation(const DrvOutput&) override
|
||||
{ unsupported("queryRealisation"); }
|
||||
void queryRealisationUncached(const DrvOutput &,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
|
||||
{ callback(nullptr); }
|
||||
};
|
||||
|
||||
static RegisterStoreImplementation<DummyStore, DummyStoreConfig> regDummyStore;
|
||||
|
|
|
@ -75,20 +75,20 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
|
|||
|
||||
auto references = worker_proto::read(*this, source, Phantom<StorePathSet> {});
|
||||
auto deriver = readString(source);
|
||||
auto narHash = hashString(htSHA256, *saved.s);
|
||||
auto narHash = hashString(htSHA256, saved.s);
|
||||
|
||||
ValidPathInfo info { path, narHash };
|
||||
if (deriver != "")
|
||||
info.deriver = parseStorePath(deriver);
|
||||
info.references = references;
|
||||
info.narSize = saved.s->size();
|
||||
info.narSize = saved.s.size();
|
||||
|
||||
// Ignore optional legacy signature.
|
||||
if (readInt(source) == 1)
|
||||
readString(source);
|
||||
|
||||
// Can't use underlying source, which would have been exhausted
|
||||
auto source = StringSource { *saved.s };
|
||||
auto source = StringSource(saved.s);
|
||||
addToStore(info, source, NoRepair, checkSigs);
|
||||
|
||||
res.push_back(info.path);
|
||||
|
|
|
@ -33,12 +33,12 @@ FileTransferSettings fileTransferSettings;
|
|||
|
||||
static GlobalConfig::Register rFileTransferSettings(&fileTransferSettings);
|
||||
|
||||
std::string resolveUri(const std::string & uri)
|
||||
std::string resolveUri(std::string_view uri)
|
||||
{
|
||||
if (uri.compare(0, 8, "channel:") == 0)
|
||||
return "https://nixos.org/channels/" + std::string(uri, 8) + "/nixexprs.tar.xz";
|
||||
return "https://nixos.org/channels/" + std::string(uri.substr(8)) + "/nixexprs.tar.xz";
|
||||
else
|
||||
return uri;
|
||||
return std::string(uri);
|
||||
}
|
||||
|
||||
struct curlFileTransfer : public FileTransfer
|
||||
|
@ -106,7 +106,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
this->request.dataCallback(data);
|
||||
}
|
||||
} else
|
||||
this->result.data->append(data);
|
||||
this->result.data.append(data);
|
||||
})
|
||||
{
|
||||
if (!request.expectedETag.empty())
|
||||
|
@ -128,7 +128,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
if (requestHeaders) curl_slist_free_all(requestHeaders);
|
||||
try {
|
||||
if (!done)
|
||||
fail(FileTransferError(Interrupted, nullptr, "download of '%s' was interrupted", request.uri));
|
||||
fail(FileTransferError(Interrupted, {}, "download of '%s' was interrupted", request.uri));
|
||||
} catch (...) {
|
||||
ignoreException();
|
||||
}
|
||||
|
@ -195,17 +195,17 @@ struct curlFileTransfer : public FileTransfer
|
|||
std::smatch match;
|
||||
if (std::regex_match(line, match, statusLine)) {
|
||||
result.etag = "";
|
||||
result.data = std::make_shared<std::string>();
|
||||
result.data.clear();
|
||||
result.bodySize = 0;
|
||||
statusMsg = trim(match[1]);
|
||||
statusMsg = trim(match.str(1));
|
||||
acceptRanges = false;
|
||||
encoding = "";
|
||||
} else {
|
||||
auto i = line.find(':');
|
||||
if (i != string::npos) {
|
||||
string name = toLower(trim(string(line, 0, i)));
|
||||
if (i != std::string::npos) {
|
||||
std::string name = toLower(trim(line.substr(0, i)));
|
||||
if (name == "etag") {
|
||||
result.etag = trim(string(line, i + 1));
|
||||
result.etag = trim(line.substr(i + 1));
|
||||
/* Hack to work around a GitHub bug: it sends
|
||||
ETags, but ignores If-None-Match. So if we get
|
||||
the expected ETag on a 200 response, then shut
|
||||
|
@ -218,8 +218,8 @@ struct curlFileTransfer : public FileTransfer
|
|||
return 0;
|
||||
}
|
||||
} else if (name == "content-encoding")
|
||||
encoding = trim(string(line, i + 1));
|
||||
else if (name == "accept-ranges" && toLower(trim(std::string(line, i + 1))) == "bytes")
|
||||
encoding = trim(line.substr(i + 1));
|
||||
else if (name == "accept-ranges" && toLower(trim(line.substr(i + 1))) == "bytes")
|
||||
acceptRanges = true;
|
||||
}
|
||||
}
|
||||
|
@ -340,7 +340,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
if (writtenToSink)
|
||||
curl_easy_setopt(req, CURLOPT_RESUME_FROM_LARGE, writtenToSink);
|
||||
|
||||
result.data = std::make_shared<std::string>();
|
||||
result.data.clear();
|
||||
result.bodySize = 0;
|
||||
}
|
||||
|
||||
|
@ -434,21 +434,21 @@ struct curlFileTransfer : public FileTransfer
|
|||
|
||||
attempt++;
|
||||
|
||||
std::shared_ptr<std::string> response;
|
||||
std::optional<std::string> response;
|
||||
if (errorSink)
|
||||
response = errorSink->s;
|
||||
response = std::move(errorSink->s);
|
||||
auto exc =
|
||||
code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted
|
||||
? FileTransferError(Interrupted, response, "%s of '%s' was interrupted", request.verb(), request.uri)
|
||||
? FileTransferError(Interrupted, std::move(response), "%s of '%s' was interrupted", request.verb(), request.uri)
|
||||
: httpStatus != 0
|
||||
? FileTransferError(err,
|
||||
response,
|
||||
std::move(response),
|
||||
fmt("unable to %s '%s': HTTP error %d ('%s')",
|
||||
request.verb(), request.uri, httpStatus, statusMsg)
|
||||
+ (code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code)))
|
||||
)
|
||||
: FileTransferError(err,
|
||||
response,
|
||||
std::move(response),
|
||||
fmt("unable to %s '%s': %s (%d)",
|
||||
request.verb(), request.uri, curl_easy_strerror(code), code));
|
||||
|
||||
|
@ -544,6 +544,8 @@ struct curlFileTransfer : public FileTransfer
|
|||
stopWorkerThread();
|
||||
});
|
||||
|
||||
unshareFilesystem();
|
||||
|
||||
std::map<CURL *, std::shared_ptr<TransferItem>> items;
|
||||
|
||||
bool quit = false;
|
||||
|
@ -702,8 +704,8 @@ struct curlFileTransfer : public FileTransfer
|
|||
auto s3Res = s3Helper.getObject(bucketName, key);
|
||||
FileTransferResult res;
|
||||
if (!s3Res.data)
|
||||
throw FileTransferError(NotFound, nullptr, "S3 object '%s' does not exist", request.uri);
|
||||
res.data = s3Res.data;
|
||||
throw FileTransferError(NotFound, {}, "S3 object '%s' does not exist", request.uri);
|
||||
res.data = std::move(*s3Res.data);
|
||||
callback(std::move(res));
|
||||
#else
|
||||
throw nix::Error("cannot download '%s' because Nix is not built with S3 support", request.uri);
|
||||
|
@ -716,15 +718,24 @@ struct curlFileTransfer : public FileTransfer
|
|||
}
|
||||
};
|
||||
|
||||
ref<curlFileTransfer> makeCurlFileTransfer()
|
||||
{
|
||||
return make_ref<curlFileTransfer>();
|
||||
}
|
||||
|
||||
ref<FileTransfer> getFileTransfer()
|
||||
{
|
||||
static ref<FileTransfer> fileTransfer = makeFileTransfer();
|
||||
static ref<curlFileTransfer> fileTransfer = makeCurlFileTransfer();
|
||||
|
||||
if (fileTransfer->state_.lock()->quit)
|
||||
fileTransfer = makeCurlFileTransfer();
|
||||
|
||||
return fileTransfer;
|
||||
}
|
||||
|
||||
ref<FileTransfer> makeFileTransfer()
|
||||
{
|
||||
return make_ref<curlFileTransfer>();
|
||||
return makeCurlFileTransfer();
|
||||
}
|
||||
|
||||
std::future<FileTransferResult> FileTransfer::enqueueFileTransfer(const FileTransferRequest & request)
|
||||
|
@ -848,25 +859,25 @@ void FileTransfer::download(FileTransferRequest && request, Sink & sink)
|
|||
}
|
||||
|
||||
template<typename... Args>
|
||||
FileTransferError::FileTransferError(FileTransfer::Error error, std::shared_ptr<string> response, const Args & ... args)
|
||||
FileTransferError::FileTransferError(FileTransfer::Error error, std::optional<std::string> response, const Args & ... args)
|
||||
: Error(args...), error(error), response(response)
|
||||
{
|
||||
const auto hf = hintfmt(args...);
|
||||
// FIXME: Due to https://github.com/NixOS/nix/issues/3841 we don't know how
|
||||
// to print different messages for different verbosity levels. For now
|
||||
// we add some heuristics for detecting when we want to show the response.
|
||||
if (response && (response->size() < 1024 || response->find("<html>") != string::npos))
|
||||
if (response && (response->size() < 1024 || response->find("<html>") != std::string::npos))
|
||||
err.msg = hintfmt("%1%\n\nresponse body:\n\n%2%", normaltxt(hf.str()), chomp(*response));
|
||||
else
|
||||
err.msg = hf;
|
||||
}
|
||||
|
||||
bool isUri(const string & s)
|
||||
bool isUri(std::string_view s)
|
||||
{
|
||||
if (s.compare(0, 8, "channel:") == 0) return true;
|
||||
size_t pos = s.find("://");
|
||||
if (pos == string::npos) return false;
|
||||
string scheme(s, 0, pos);
|
||||
if (pos == std::string::npos) return false;
|
||||
std::string scheme(s, 0, pos);
|
||||
return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3" || scheme == "ssh";
|
||||
}
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ struct FileTransferRequest
|
|||
unsigned int baseRetryTimeMs = 250;
|
||||
ActivityId parentAct;
|
||||
bool decompress = true;
|
||||
std::shared_ptr<std::string> data;
|
||||
std::optional<std::string> data;
|
||||
std::string mimeType;
|
||||
std::function<void(std::string_view data)> dataCallback;
|
||||
|
||||
|
@ -77,7 +77,7 @@ struct FileTransferResult
|
|||
bool cached = false;
|
||||
std::string etag;
|
||||
std::string effectiveUri;
|
||||
std::shared_ptr<std::string> data;
|
||||
std::string data;
|
||||
uint64_t bodySize = 0;
|
||||
};
|
||||
|
||||
|
@ -119,17 +119,17 @@ class FileTransferError : public Error
|
|||
{
|
||||
public:
|
||||
FileTransfer::Error error;
|
||||
std::shared_ptr<string> response; // intentionally optional
|
||||
std::optional<std::string> response; // intentionally optional
|
||||
|
||||
template<typename... Args>
|
||||
FileTransferError(FileTransfer::Error error, std::shared_ptr<string> response, const Args & ... args);
|
||||
FileTransferError(FileTransfer::Error error, std::optional<std::string> response, const Args & ... args);
|
||||
|
||||
virtual const char* sname() const override { return "FileTransferError"; }
|
||||
};
|
||||
|
||||
bool isUri(const string & s);
|
||||
bool isUri(std::string_view s);
|
||||
|
||||
/* Resolve deprecated 'channel:<foo>' URLs. */
|
||||
std::string resolveUri(const std::string & uri);
|
||||
std::string resolveUri(std::string_view uri);
|
||||
|
||||
}
|
||||
|
|
13
src/libstore/gc-store.cc
Normal file
13
src/libstore/gc-store.cc
Normal file
|
@ -0,0 +1,13 @@
|
|||
#include "gc-store.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
GcStore & requireGcStore(Store & store)
|
||||
{
|
||||
auto * gcStore = dynamic_cast<GcStore *>(&store);
|
||||
if (!gcStore)
|
||||
throw UsageError("Garbage collection not supported by this store");
|
||||
return *gcStore;
|
||||
}
|
||||
|
||||
}
|
84
src/libstore/gc-store.hh
Normal file
84
src/libstore/gc-store.hh
Normal file
|
@ -0,0 +1,84 @@
|
|||
#pragma once
|
||||
|
||||
#include "store-api.hh"
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
typedef std::unordered_map<StorePath, std::unordered_set<std::string>> Roots;
|
||||
|
||||
|
||||
struct GCOptions
|
||||
{
|
||||
/* Garbage collector operation:
|
||||
|
||||
- `gcReturnLive': return the set of paths reachable from
|
||||
(i.e. in the closure of) the roots.
|
||||
|
||||
- `gcReturnDead': return the set of paths not reachable from
|
||||
the roots.
|
||||
|
||||
- `gcDeleteDead': actually delete the latter set.
|
||||
|
||||
- `gcDeleteSpecific': delete the paths listed in
|
||||
`pathsToDelete', insofar as they are not reachable.
|
||||
*/
|
||||
typedef enum {
|
||||
gcReturnLive,
|
||||
gcReturnDead,
|
||||
gcDeleteDead,
|
||||
gcDeleteSpecific,
|
||||
} GCAction;
|
||||
|
||||
GCAction action{gcDeleteDead};
|
||||
|
||||
/* If `ignoreLiveness' is set, then reachability from the roots is
|
||||
ignored (dangerous!). However, the paths must still be
|
||||
unreferenced *within* the store (i.e., there can be no other
|
||||
store paths that depend on them). */
|
||||
bool ignoreLiveness{false};
|
||||
|
||||
/* For `gcDeleteSpecific', the paths to delete. */
|
||||
StorePathSet pathsToDelete;
|
||||
|
||||
/* Stop after at least `maxFreed' bytes have been freed. */
|
||||
uint64_t maxFreed{std::numeric_limits<uint64_t>::max()};
|
||||
};
|
||||
|
||||
|
||||
struct GCResults
|
||||
{
|
||||
/* Depending on the action, the GC roots, or the paths that would
|
||||
be or have been deleted. */
|
||||
PathSet paths;
|
||||
|
||||
/* For `gcReturnDead', `gcDeleteDead' and `gcDeleteSpecific', the
|
||||
number of bytes that would be or was freed. */
|
||||
uint64_t bytesFreed = 0;
|
||||
};
|
||||
|
||||
|
||||
struct GcStore : public virtual Store
|
||||
{
|
||||
/* Add an indirect root, which is merely a symlink to `path' from
|
||||
/nix/var/nix/gcroots/auto/<hash of `path'>. `path' is supposed
|
||||
to be a symlink to a store path. The garbage collector will
|
||||
automatically remove the indirect root when it finds that
|
||||
`path' has disappeared. */
|
||||
virtual void addIndirectRoot(const Path & path) = 0;
|
||||
|
||||
/* Find the roots of the garbage collector. Each root is a pair
|
||||
(link, storepath) where `link' is the path of the symlink
|
||||
outside of the Nix store that point to `storePath'. If
|
||||
'censor' is true, privacy-sensitive information about roots
|
||||
found in /proc is censored. */
|
||||
virtual Roots findRoots(bool censor) = 0;
|
||||
|
||||
/* Perform a garbage collection. */
|
||||
virtual void collectGarbage(const GCOptions & options, GCResults & results) = 0;
|
||||
};
|
||||
|
||||
GcStore & requireGcStore(Store & store);
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -100,7 +100,7 @@ std::vector<Path> getUserConfigFiles()
|
|||
// Use the paths specified in NIX_USER_CONF_FILES if it has been defined
|
||||
auto nixConfFiles = getEnv("NIX_USER_CONF_FILES");
|
||||
if (nixConfFiles.has_value()) {
|
||||
return tokenizeString<std::vector<string>>(nixConfFiles.value(), ":");
|
||||
return tokenizeString<std::vector<std::string>>(nixConfFiles.value(), ":");
|
||||
}
|
||||
|
||||
// Use the paths specified by the XDG spec
|
||||
|
@ -122,7 +122,7 @@ StringSet Settings::getDefaultSystemFeatures()
|
|||
/* For backwards compatibility, accept some "features" that are
|
||||
used in Nixpkgs to route builds to certain machines but don't
|
||||
actually require anything special on the machines. */
|
||||
StringSet features{"nixos-test", "benchmark", "big-parallel", "recursive-nix"};
|
||||
StringSet features{"nixos-test", "benchmark", "big-parallel"};
|
||||
|
||||
#if __linux__
|
||||
if (access("/dev/kvm", R_OK | W_OK) == 0)
|
||||
|
@ -148,7 +148,8 @@ StringSet Settings::getDefaultExtraPlatforms()
|
|||
// machines. Note that we can’t force processes from executing
|
||||
// x86_64 in aarch64 environments or vice versa since they can
|
||||
// always exec with their own binary preferences.
|
||||
if (pathExists("/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist")) {
|
||||
if (pathExists("/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist") ||
|
||||
pathExists("/System/Library/LaunchDaemons/com.apple.oahd.plist")) {
|
||||
if (std::string{SYSTEM} == "x86_64-darwin")
|
||||
extraPlatforms.insert("aarch64-darwin");
|
||||
else if (std::string{SYSTEM} == "aarch64-darwin")
|
||||
|
@ -159,21 +160,16 @@ StringSet Settings::getDefaultExtraPlatforms()
|
|||
return extraPlatforms;
|
||||
}
|
||||
|
||||
bool Settings::isExperimentalFeatureEnabled(const std::string & name)
|
||||
bool Settings::isExperimentalFeatureEnabled(const ExperimentalFeature & feature)
|
||||
{
|
||||
auto & f = experimentalFeatures.get();
|
||||
return std::find(f.begin(), f.end(), name) != f.end();
|
||||
return std::find(f.begin(), f.end(), feature) != f.end();
|
||||
}
|
||||
|
||||
MissingExperimentalFeature::MissingExperimentalFeature(std::string feature)
|
||||
: Error("experimental Nix feature '%1%' is disabled; use '--extra-experimental-features %1%' to override", feature)
|
||||
, missingFeature(feature)
|
||||
{}
|
||||
|
||||
void Settings::requireExperimentalFeature(const std::string & name)
|
||||
void Settings::requireExperimentalFeature(const ExperimentalFeature & feature)
|
||||
{
|
||||
if (!isExperimentalFeatureEnabled(name))
|
||||
throw MissingExperimentalFeature(name);
|
||||
if (!isExperimentalFeatureEnabled(feature))
|
||||
throw MissingExperimentalFeature(feature);
|
||||
}
|
||||
|
||||
bool Settings::isWSL1()
|
||||
|
@ -185,7 +181,7 @@ bool Settings::isWSL1()
|
|||
return hasSuffix(utsbuf.release, "-Microsoft");
|
||||
}
|
||||
|
||||
const string nixVersion = PACKAGE_VERSION;
|
||||
const std::string nixVersion = PACKAGE_VERSION;
|
||||
|
||||
NLOHMANN_JSON_SERIALIZE_ENUM(SandboxMode, {
|
||||
{SandboxMode::smEnabled, true},
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#include "types.hh"
|
||||
#include "config.hh"
|
||||
#include "util.hh"
|
||||
#include "experimental-features.hh"
|
||||
|
||||
#include <map>
|
||||
#include <limits>
|
||||
|
@ -20,7 +21,7 @@ struct MaxBuildJobsSetting : public BaseSetting<unsigned int>
|
|||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases = {})
|
||||
: BaseSetting<unsigned int>(def, name, description, aliases)
|
||||
: BaseSetting<unsigned int>(def, true, name, description, aliases)
|
||||
{
|
||||
options->addSetting(this);
|
||||
}
|
||||
|
@ -37,7 +38,7 @@ struct PluginFilesSetting : public BaseSetting<Paths>
|
|||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases = {})
|
||||
: BaseSetting<Paths>(def, name, description, aliases)
|
||||
: BaseSetting<Paths>(def, true, name, description, aliases)
|
||||
{
|
||||
options->addSetting(this);
|
||||
}
|
||||
|
@ -45,15 +46,6 @@ struct PluginFilesSetting : public BaseSetting<Paths>
|
|||
void set(const std::string & str, bool append = false) override;
|
||||
};
|
||||
|
||||
class MissingExperimentalFeature: public Error
|
||||
{
|
||||
public:
|
||||
std::string missingFeature;
|
||||
|
||||
MissingExperimentalFeature(std::string feature);
|
||||
virtual const char* sname() const override { return "MissingExperimentalFeature"; }
|
||||
};
|
||||
|
||||
class Settings : public Config {
|
||||
|
||||
unsigned int getDefaultCores();
|
||||
|
@ -121,7 +113,7 @@ public:
|
|||
bool verboseBuild = true;
|
||||
|
||||
Setting<size_t> logLines{this, 10, "log-lines",
|
||||
"If `verbose-build` is false, the number of lines of the tail of "
|
||||
"The number of lines of the tail of "
|
||||
"the log to show if a build fails."};
|
||||
|
||||
MaxBuildJobsSetting maxBuildJobs{
|
||||
|
@ -138,7 +130,9 @@ public:
|
|||
{"build-max-jobs"}};
|
||||
|
||||
Setting<unsigned int> buildCores{
|
||||
this, getDefaultCores(), "cores",
|
||||
this,
|
||||
getDefaultCores(),
|
||||
"cores",
|
||||
R"(
|
||||
Sets the value of the `NIX_BUILD_CORES` environment variable in the
|
||||
invocation of builders. Builders can use this variable at their
|
||||
|
@ -149,7 +143,7 @@ public:
|
|||
command line switch and defaults to `1`. The value `0` means that
|
||||
the builder should use all available CPU cores in the system.
|
||||
)",
|
||||
{"build-cores"}};
|
||||
{"build-cores"}, false};
|
||||
|
||||
/* Read-only mode. Don't copy stuff to the store, don't change
|
||||
the database. */
|
||||
|
@ -591,10 +585,11 @@ public:
|
|||
platform and generate incompatible code, so you may wish to
|
||||
cross-check the results of using this option against proper
|
||||
natively-built versions of your derivations.
|
||||
)"};
|
||||
)", {}, false};
|
||||
|
||||
Setting<StringSet> systemFeatures{
|
||||
this, getDefaultSystemFeatures(),
|
||||
this,
|
||||
getDefaultSystemFeatures(),
|
||||
"system-features",
|
||||
R"(
|
||||
A set of system “features” supported by this machine, e.g. `kvm`.
|
||||
|
@ -610,7 +605,7 @@ public:
|
|||
This setting by default includes `kvm` if `/dev/kvm` is accessible,
|
||||
and the pseudo-features `nixos-test`, `benchmark` and `big-parallel`
|
||||
that are used in Nixpkgs to route builds to specific machines.
|
||||
)"};
|
||||
)", {}, false};
|
||||
|
||||
Setting<Strings> substituters{
|
||||
this,
|
||||
|
@ -805,6 +800,15 @@ public:
|
|||
may be useful in certain scenarios (e.g. to spin up containers or
|
||||
set up userspace network interfaces in tests).
|
||||
)"};
|
||||
|
||||
Setting<StringSet> ignoredAcls{
|
||||
this, {"security.selinux", "system.nfs4_acl"}, "ignored-acls",
|
||||
R"(
|
||||
A list of ACLs that should be ignored, normally Nix attempts to
|
||||
remove all ACLs from files and directories in the Nix store, but
|
||||
some ACLs like `security.selinux` or `system.nfs4_acl` can't be
|
||||
removed even by root. Therefore it's best to just ignore them.
|
||||
)"};
|
||||
#endif
|
||||
|
||||
Setting<Strings> hashedMirrors{
|
||||
|
@ -876,74 +880,16 @@ public:
|
|||
are loaded as plugins (non-recursively).
|
||||
)"};
|
||||
|
||||
Setting<StringMap> accessTokens{this, {}, "access-tokens",
|
||||
R"(
|
||||
Access tokens used to access protected GitHub, GitLab, or
|
||||
other locations requiring token-based authentication.
|
||||
|
||||
Access tokens are specified as a string made up of
|
||||
space-separated `host=token` values. The specific token
|
||||
used is selected by matching the `host` portion against the
|
||||
"host" specification of the input. The actual use of the
|
||||
`token` value is determined by the type of resource being
|
||||
accessed:
|
||||
|
||||
* Github: the token value is the OAUTH-TOKEN string obtained
|
||||
as the Personal Access Token from the Github server (see
|
||||
https://docs.github.com/en/developers/apps/authorizing-oath-apps).
|
||||
|
||||
* Gitlab: the token value is either the OAuth2 token or the
|
||||
Personal Access Token (these are different types tokens
|
||||
for gitlab, see
|
||||
https://docs.gitlab.com/12.10/ee/api/README.html#authentication).
|
||||
The `token` value should be `type:tokenstring` where
|
||||
`type` is either `OAuth2` or `PAT` to indicate which type
|
||||
of token is being specified.
|
||||
|
||||
Example `~/.config/nix/nix.conf`:
|
||||
|
||||
```
|
||||
access-tokens = github.com=23ac...b289 gitlab.mycompany.com=PAT:A123Bp_Cd..EfG gitlab.com=OAuth2:1jklw3jk
|
||||
```
|
||||
|
||||
Example `~/code/flake.nix`:
|
||||
|
||||
```nix
|
||||
input.foo = {
|
||||
type = "gitlab";
|
||||
host = "gitlab.mycompany.com";
|
||||
owner = "mycompany";
|
||||
repo = "pro";
|
||||
};
|
||||
```
|
||||
|
||||
This example specifies three tokens, one each for accessing
|
||||
github.com, gitlab.mycompany.com, and sourceforge.net.
|
||||
|
||||
The `input.foo` uses the "gitlab" fetcher, which might
|
||||
requires specifying the token type along with the token
|
||||
value.
|
||||
)"};
|
||||
|
||||
Setting<Strings> experimentalFeatures{this, {}, "experimental-features",
|
||||
Setting<std::set<ExperimentalFeature>> experimentalFeatures{this, {}, "experimental-features",
|
||||
"Experimental Nix features to enable."};
|
||||
|
||||
bool isExperimentalFeatureEnabled(const std::string & name);
|
||||
bool isExperimentalFeatureEnabled(const ExperimentalFeature &);
|
||||
|
||||
void requireExperimentalFeature(const std::string & name);
|
||||
|
||||
Setting<bool> allowDirty{this, true, "allow-dirty",
|
||||
"Whether to allow dirty Git/Mercurial trees."};
|
||||
|
||||
Setting<bool> warnDirty{this, true, "warn-dirty",
|
||||
"Whether to warn about dirty Git/Mercurial trees."};
|
||||
void requireExperimentalFeature(const ExperimentalFeature &);
|
||||
|
||||
Setting<size_t> narBufferSize{this, 32 * 1024 * 1024, "nar-buffer-size",
|
||||
"Maximum size of NARs before spilling them to disk."};
|
||||
|
||||
Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry",
|
||||
"Path or URI of the global flake registry."};
|
||||
|
||||
Setting<bool> allowSymlinkedStore{
|
||||
this, false, "allow-symlinked-store",
|
||||
R"(
|
||||
|
@ -956,9 +902,6 @@ public:
|
|||
resolves to a different location from that of the build machine. You
|
||||
can enable this setting if you are sure you're not going to do that.
|
||||
)"};
|
||||
|
||||
Setting<bool> useRegistries{this, true, "use-registries",
|
||||
"Whether to use flake registries to resolve flake references."};
|
||||
};
|
||||
|
||||
|
||||
|
@ -974,6 +917,6 @@ void loadConfFile();
|
|||
// Used by the Settings constructor
|
||||
std::vector<Path> getUserConfigFiles();
|
||||
|
||||
extern const string nixVersion;
|
||||
extern const std::string nixVersion;
|
||||
|
||||
}
|
||||
|
|
|
@ -126,7 +126,7 @@ protected:
|
|||
const std::string & mimeType) override
|
||||
{
|
||||
auto req = makeRequest(path);
|
||||
req.data = std::make_shared<string>(StreamToSourceAdapter(istream).drain());
|
||||
req.data = StreamToSourceAdapter(istream).drain();
|
||||
req.mimeType = mimeType;
|
||||
try {
|
||||
getFileTransfer()->upload(req);
|
||||
|
@ -159,7 +159,7 @@ protected:
|
|||
}
|
||||
|
||||
void getFile(const std::string & path,
|
||||
Callback<std::shared_ptr<std::string>> callback) noexcept override
|
||||
Callback<std::optional<std::string>> callback) noexcept override
|
||||
{
|
||||
checkEnabled();
|
||||
|
||||
|
@ -170,10 +170,10 @@ protected:
|
|||
getFileTransfer()->enqueueFileTransfer(request,
|
||||
{[callbackPtr, this](std::future<FileTransferResult> result) {
|
||||
try {
|
||||
(*callbackPtr)(result.get().data);
|
||||
(*callbackPtr)(std::move(result.get().data));
|
||||
} catch (FileTransferError & e) {
|
||||
if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
|
||||
return (*callbackPtr)(std::shared_ptr<std::string>());
|
||||
return (*callbackPtr)({});
|
||||
maybeDisable();
|
||||
callbackPtr->rethrow();
|
||||
} catch (...) {
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "pool.hh"
|
||||
#include "remote-store.hh"
|
||||
#include "serve-protocol.hh"
|
||||
#include "build-result.hh"
|
||||
#include "store-api.hh"
|
||||
#include "path-with-outputs.hh"
|
||||
#include "worker-protocol.hh"
|
||||
|
@ -48,7 +49,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
|
||||
static std::set<std::string> uriSchemes() { return {"ssh"}; }
|
||||
|
||||
LegacySSHStore(const string & scheme, const string & host, const Params & params)
|
||||
LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params)
|
||||
: StoreConfig(params)
|
||||
, LegacySSHStoreConfig(params)
|
||||
, Store(params)
|
||||
|
@ -94,7 +95,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
conn->sshConn->in.close();
|
||||
auto msg = conn->from.drain();
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
|
||||
host, chomp(*saved.s + msg));
|
||||
host, chomp(saved.s + msg));
|
||||
}
|
||||
conn->remoteVersion = readInt(conn->from);
|
||||
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
|
||||
|
@ -107,7 +108,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
return conn;
|
||||
};
|
||||
|
||||
string getUri() override
|
||||
std::string getUri() override
|
||||
{
|
||||
return *uriSchemes().begin() + "://" + host;
|
||||
}
|
||||
|
@ -225,13 +226,21 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
|
||||
{ unsupported("queryPathFromHashPart"); }
|
||||
|
||||
StorePath addToStore(const string & name, const Path & srcPath,
|
||||
FileIngestionMethod method, HashType hashAlgo,
|
||||
PathFilter & filter, RepairFlag repair) override
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
StorePath addTextToStore(const string & name, const string & s,
|
||||
const StorePathSet & references, RepairFlag repair) override
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override
|
||||
{ unsupported("addTextToStore"); }
|
||||
|
||||
private:
|
||||
|
@ -248,6 +257,10 @@ private:
|
|||
conn.to
|
||||
<< settings.buildRepeat
|
||||
<< settings.enforceDeterminism;
|
||||
|
||||
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 7) {
|
||||
conn.to << ((int) settings.keepFailed);
|
||||
}
|
||||
}
|
||||
|
||||
public:
|
||||
|
@ -266,7 +279,7 @@ public:
|
|||
|
||||
conn->to.flush();
|
||||
|
||||
BuildResult status;
|
||||
BuildResult status { .path = DerivedPath::Built { .drvPath = drvPath } };
|
||||
status.status = (BuildResult::Status) readInt(conn->from);
|
||||
conn->from >> status.errorMsg;
|
||||
|
||||
|
@ -290,10 +303,10 @@ public:
|
|||
for (auto & p : drvPaths) {
|
||||
auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p);
|
||||
std::visit(overloaded {
|
||||
[&](StorePathWithOutputs s) {
|
||||
[&](const StorePathWithOutputs & s) {
|
||||
ss.push_back(s.to_string(*this));
|
||||
},
|
||||
[&](StorePath drvPath) {
|
||||
[&](const StorePath & drvPath) {
|
||||
throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath));
|
||||
},
|
||||
}, sOrDrvPath);
|
||||
|
@ -304,7 +317,7 @@ public:
|
|||
|
||||
conn->to.flush();
|
||||
|
||||
BuildResult result;
|
||||
BuildResult result { .path = DerivedPath::Opaque { StorePath::dummy } };
|
||||
result.status = (BuildResult::Status) readInt(conn->from);
|
||||
|
||||
if (!result.success()) {
|
||||
|
@ -363,7 +376,8 @@ public:
|
|||
return conn->remoteVersion;
|
||||
}
|
||||
|
||||
std::optional<const Realisation> queryRealisation(const DrvOutput&) override
|
||||
void queryRealisationUncached(const DrvOutput &,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
|
||||
// TODO: Implement
|
||||
{ unsupported("queryRealisation"); }
|
||||
};
|
||||
|
|
|
@ -96,6 +96,7 @@ void LocalBinaryCacheStore::init()
|
|||
createDirs(binaryCacheDir + "/" + realisationsPrefix);
|
||||
if (writeDebugInfo)
|
||||
createDirs(binaryCacheDir + "/debuginfo");
|
||||
createDirs(binaryCacheDir + "/log");
|
||||
BinaryCacheStore::init();
|
||||
}
|
||||
|
||||
|
|
|
@ -85,36 +85,34 @@ void LocalFSStore::narFromPath(const StorePath & path, Sink & sink)
|
|||
dumpPath(getRealStoreDir() + std::string(printStorePath(path), storeDir.size()), sink);
|
||||
}
|
||||
|
||||
const string LocalFSStore::drvsLogDir = "drvs";
|
||||
const std::string LocalFSStore::drvsLogDir = "drvs";
|
||||
|
||||
|
||||
|
||||
std::shared_ptr<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
|
||||
std::optional<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
|
||||
{
|
||||
auto path = path_;
|
||||
|
||||
if (!path.isDerivation()) {
|
||||
try {
|
||||
auto info = queryPathInfo(path);
|
||||
if (!info->deriver) return nullptr;
|
||||
if (!info->deriver) return std::nullopt;
|
||||
path = *info->deriver;
|
||||
} catch (InvalidPath &) {
|
||||
return nullptr;
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
auto baseName = std::string(baseNameOf(printStorePath(path)));
|
||||
auto baseName = path.to_string();
|
||||
|
||||
for (int j = 0; j < 2; j++) {
|
||||
|
||||
Path logPath =
|
||||
j == 0
|
||||
? fmt("%s/%s/%s/%s", logDir, drvsLogDir, string(baseName, 0, 2), string(baseName, 2))
|
||||
? fmt("%s/%s/%s/%s", logDir, drvsLogDir, baseName.substr(0, 2), baseName.substr(2))
|
||||
: fmt("%s/%s/%s", logDir, drvsLogDir, baseName);
|
||||
Path logBz2Path = logPath + ".bz2";
|
||||
|
||||
if (pathExists(logPath))
|
||||
return std::make_shared<std::string>(readFile(logPath));
|
||||
return readFile(logPath);
|
||||
|
||||
else if (pathExists(logBz2Path)) {
|
||||
try {
|
||||
|
@ -124,7 +122,7 @@ std::shared_ptr<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
|
|||
|
||||
}
|
||||
|
||||
return nullptr;
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#pragma once
|
||||
|
||||
#include "store-api.hh"
|
||||
#include "gc-store.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -23,11 +24,11 @@ struct LocalFSStoreConfig : virtual StoreConfig
|
|||
"physical path to the Nix store"};
|
||||
};
|
||||
|
||||
class LocalFSStore : public virtual LocalFSStoreConfig, public virtual Store
|
||||
class LocalFSStore : public virtual LocalFSStoreConfig, public virtual Store, virtual GcStore
|
||||
{
|
||||
public:
|
||||
|
||||
const static string drvsLogDir;
|
||||
const static std::string drvsLogDir;
|
||||
|
||||
LocalFSStore(const Params & params);
|
||||
|
||||
|
@ -45,7 +46,8 @@ public:
|
|||
return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1);
|
||||
}
|
||||
|
||||
std::shared_ptr<std::string> getBuildLog(const StorePath & path) override;
|
||||
std::optional<std::string> getBuildLog(const StorePath & path) override;
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue