mirror of
https://github.com/NixOS/nix
synced 2025-07-07 10:11:47 +02:00
Merge remote-tracking branch 'upstream/master' into auto-uid-allocation
This commit is contained in:
commit
dc92b01885
417 changed files with 22620 additions and 35795 deletions
|
@ -18,6 +18,7 @@
|
|||
#include "derivations.hh"
|
||||
#include "local-store.hh"
|
||||
#include "legacy.hh"
|
||||
#include "experimental-features.hh"
|
||||
|
||||
using namespace nix;
|
||||
using std::cin;
|
||||
|
@ -31,7 +32,7 @@ std::string escapeUri(std::string uri)
|
|||
return uri;
|
||||
}
|
||||
|
||||
static string currentLoad;
|
||||
static std::string currentLoad;
|
||||
|
||||
static AutoCloseFD openSlotLock(const Machine & m, uint64_t slot)
|
||||
{
|
||||
|
@ -96,7 +97,7 @@ static int main_build_remote(int argc, char * * argv)
|
|||
}
|
||||
|
||||
std::optional<StorePath> drvPath;
|
||||
string storeUri;
|
||||
std::string storeUri;
|
||||
|
||||
while (true) {
|
||||
|
||||
|
@ -130,11 +131,14 @@ static int main_build_remote(int argc, char * * argv)
|
|||
for (auto & m : machines) {
|
||||
debug("considering building on remote machine '%s'", m.storeUri);
|
||||
|
||||
if (m.enabled && std::find(m.systemTypes.begin(),
|
||||
m.systemTypes.end(),
|
||||
neededSystem) != m.systemTypes.end() &&
|
||||
if (m.enabled
|
||||
&& (neededSystem == "builtin"
|
||||
|| std::find(m.systemTypes.begin(),
|
||||
m.systemTypes.end(),
|
||||
neededSystem) != m.systemTypes.end()) &&
|
||||
m.allSupported(requiredFeatures) &&
|
||||
m.mandatoryMet(requiredFeatures)) {
|
||||
m.mandatoryMet(requiredFeatures))
|
||||
{
|
||||
rightType = true;
|
||||
AutoCloseFD free;
|
||||
uint64_t load = 0;
|
||||
|
@ -179,7 +183,7 @@ static int main_build_remote(int argc, char * * argv)
|
|||
else
|
||||
{
|
||||
// build the hint template.
|
||||
string errorText =
|
||||
std::string errorText =
|
||||
"Failed to find a machine for remote build!\n"
|
||||
"derivation: %s\nrequired (system, features): (%s, %s)";
|
||||
errorText += "\n%s available machines:";
|
||||
|
@ -189,7 +193,7 @@ static int main_build_remote(int argc, char * * argv)
|
|||
errorText += "\n(%s, %s, %s, %s)";
|
||||
|
||||
// add the template values.
|
||||
string drvstr;
|
||||
std::string drvstr;
|
||||
if (drvPath.has_value())
|
||||
drvstr = drvPath->to_string();
|
||||
else
|
||||
|
@ -204,7 +208,7 @@ static int main_build_remote(int argc, char * * argv)
|
|||
|
||||
for (auto & m : machines)
|
||||
error
|
||||
% concatStringsSep<vector<string>>(", ", m.systemTypes)
|
||||
% concatStringsSep<std::vector<std::string>>(", ", m.systemTypes)
|
||||
% m.maxJobs
|
||||
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
|
||||
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
|
||||
|
@ -295,7 +299,7 @@ connected:
|
|||
|
||||
std::set<Realisation> missingRealisations;
|
||||
StorePathSet missingPaths;
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations") && !derivationHasKnownOutputPaths(drv.type())) {
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) && !derivationHasKnownOutputPaths(drv.type())) {
|
||||
for (auto & outputName : wantedOutputs) {
|
||||
auto thisOutputHash = outputHashes.at(outputName);
|
||||
auto thisOutputId = DrvOutput{ thisOutputHash, outputName };
|
||||
|
@ -327,7 +331,7 @@ connected:
|
|||
for (auto & realisation : missingRealisations) {
|
||||
// Should hold, because if the feature isn't enabled the set
|
||||
// of missing realisations should be empty
|
||||
settings.requireExperimentalFeature("ca-derivations");
|
||||
settings.requireExperimentalFeature(Xp::CaDerivations);
|
||||
store->registerDrvOutput(realisation);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
Copyright (c) 2014 Chase Geigle
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
File diff suppressed because it is too large
Load diff
|
@ -54,6 +54,36 @@ void StoreCommand::run()
|
|||
run(getStore());
|
||||
}
|
||||
|
||||
CopyCommand::CopyCommand()
|
||||
{
|
||||
addFlag({
|
||||
.longName = "from",
|
||||
.description = "URL of the source Nix store.",
|
||||
.labels = {"store-uri"},
|
||||
.handler = {&srcUri},
|
||||
});
|
||||
|
||||
addFlag({
|
||||
.longName = "to",
|
||||
.description = "URL of the destination Nix store.",
|
||||
.labels = {"store-uri"},
|
||||
.handler = {&dstUri},
|
||||
});
|
||||
}
|
||||
|
||||
ref<Store> CopyCommand::createStore()
|
||||
{
|
||||
return srcUri.empty() ? StoreCommand::createStore() : openStore(srcUri);
|
||||
}
|
||||
|
||||
ref<Store> CopyCommand::getDstStore()
|
||||
{
|
||||
if (srcUri.empty() && dstUri.empty())
|
||||
throw UsageError("you must pass '--from' and/or '--to'");
|
||||
|
||||
return dstUri.empty() ? openStore() : openStore(dstUri);
|
||||
}
|
||||
|
||||
EvalCommand::EvalCommand()
|
||||
{
|
||||
}
|
||||
|
@ -74,7 +104,15 @@ ref<Store> EvalCommand::getEvalStore()
|
|||
ref<EvalState> EvalCommand::getEvalState()
|
||||
{
|
||||
if (!evalState)
|
||||
evalState = std::make_shared<EvalState>(searchPath, getEvalStore(), getStore());
|
||||
evalState =
|
||||
#if HAVE_BOEHMGC
|
||||
std::allocate_shared<EvalState>(traceable_allocator<EvalState>(),
|
||||
searchPath, getEvalStore(), getStore())
|
||||
#else
|
||||
std::make_shared<EvalState>(
|
||||
searchPath, getEvalStore(), getStore())
|
||||
#endif
|
||||
;
|
||||
return ref<EvalState>(evalState);
|
||||
}
|
||||
|
||||
|
@ -120,7 +158,7 @@ void BuiltPathsCommand::run(ref<Store> store)
|
|||
// XXX: This only computes the store path closure, ignoring
|
||||
// intermediate realisations
|
||||
StorePathSet pathsRoots, pathsClosure;
|
||||
for (auto & root: paths) {
|
||||
for (auto & root : paths) {
|
||||
auto rootFromThis = root.outPaths();
|
||||
pathsRoots.insert(rootFromThis.begin(), rootFromThis.end());
|
||||
}
|
||||
|
@ -138,17 +176,20 @@ StorePathsCommand::StorePathsCommand(bool recursive)
|
|||
{
|
||||
}
|
||||
|
||||
void StorePathsCommand::run(ref<Store> store, BuiltPaths paths)
|
||||
void StorePathsCommand::run(ref<Store> store, BuiltPaths && paths)
|
||||
{
|
||||
StorePaths storePaths;
|
||||
for (auto& builtPath : paths)
|
||||
for (auto& p : builtPath.outPaths())
|
||||
storePaths.push_back(p);
|
||||
StorePathSet storePaths;
|
||||
for (auto & builtPath : paths)
|
||||
for (auto & p : builtPath.outPaths())
|
||||
storePaths.insert(p);
|
||||
|
||||
run(store, std::move(storePaths));
|
||||
auto sorted = store->topoSortPaths(storePaths);
|
||||
std::reverse(sorted.begin(), sorted.end());
|
||||
|
||||
run(store, std::move(sorted));
|
||||
}
|
||||
|
||||
void StorePathCommand::run(ref<Store> store, std::vector<StorePath> storePaths)
|
||||
void StorePathCommand::run(ref<Store> store, std::vector<StorePath> && storePaths)
|
||||
{
|
||||
if (storePaths.size() != 1)
|
||||
throw UsageError("this command requires exactly one store path");
|
||||
|
@ -200,10 +241,10 @@ void MixProfile::updateProfile(const BuiltPaths & buildables)
|
|||
|
||||
for (auto & buildable : buildables) {
|
||||
std::visit(overloaded {
|
||||
[&](BuiltPath::Opaque bo) {
|
||||
[&](const BuiltPath::Opaque & bo) {
|
||||
result.push_back(bo.path);
|
||||
},
|
||||
[&](BuiltPath::Built bfd) {
|
||||
[&](const BuiltPath::Built & bfd) {
|
||||
for (auto & output : bfd.outputs) {
|
||||
result.push_back(output.second);
|
||||
}
|
||||
|
|
|
@ -43,6 +43,19 @@ private:
|
|||
std::shared_ptr<Store> _store;
|
||||
};
|
||||
|
||||
/* A command that copies something between `--from` and `--to`
|
||||
stores. */
|
||||
struct CopyCommand : virtual StoreCommand
|
||||
{
|
||||
std::string srcUri, dstUri;
|
||||
|
||||
CopyCommand();
|
||||
|
||||
ref<Store> createStore() override;
|
||||
|
||||
ref<Store> getDstStore();
|
||||
};
|
||||
|
||||
struct EvalCommand : virtual StoreCommand, MixEvalArgs
|
||||
{
|
||||
EvalCommand();
|
||||
|
@ -169,7 +182,7 @@ public:
|
|||
|
||||
using StoreCommand::run;
|
||||
|
||||
virtual void run(ref<Store> store, BuiltPaths paths) = 0;
|
||||
virtual void run(ref<Store> store, BuiltPaths && paths) = 0;
|
||||
|
||||
void run(ref<Store> store) override;
|
||||
|
||||
|
@ -182,9 +195,9 @@ struct StorePathsCommand : public BuiltPathsCommand
|
|||
|
||||
using BuiltPathsCommand::run;
|
||||
|
||||
virtual void run(ref<Store> store, std::vector<StorePath> storePaths) = 0;
|
||||
virtual void run(ref<Store> store, std::vector<StorePath> && storePaths) = 0;
|
||||
|
||||
void run(ref<Store> store, BuiltPaths paths) override;
|
||||
void run(ref<Store> store, BuiltPaths && paths) override;
|
||||
};
|
||||
|
||||
/* A command that operates on exactly one store path. */
|
||||
|
@ -194,7 +207,7 @@ struct StorePathCommand : public StorePathsCommand
|
|||
|
||||
virtual void run(ref<Store> store, const StorePath & storePath) = 0;
|
||||
|
||||
void run(ref<Store> store, std::vector<StorePath> storePaths) override;
|
||||
void run(ref<Store> store, std::vector<StorePath> && storePaths) override;
|
||||
};
|
||||
|
||||
/* A helper class for registering commands globally. */
|
||||
|
|
|
@ -97,7 +97,7 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
lockFlags.writeLockFile = false;
|
||||
lockFlags.inputOverrides.insert_or_assign(
|
||||
flake::parseInputPath(inputPath),
|
||||
parseFlakeRef(flakeRef, absPath(".")));
|
||||
parseFlakeRef(flakeRef, absPath("."), true));
|
||||
}}
|
||||
});
|
||||
|
||||
|
@ -158,7 +158,10 @@ SourceExprCommand::SourceExprCommand()
|
|||
|
||||
Strings SourceExprCommand::getDefaultFlakeAttrPaths()
|
||||
{
|
||||
return {"defaultPackage." + settings.thisSystem.get()};
|
||||
return {
|
||||
"packages." + settings.thisSystem.get() + ".default",
|
||||
"defaultPackage." + settings.thisSystem.get()
|
||||
};
|
||||
}
|
||||
|
||||
Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
|
||||
|
@ -191,18 +194,21 @@ void SourceExprCommand::completeInstallable(std::string_view prefix)
|
|||
auto sep = prefix_.rfind('.');
|
||||
std::string searchWord;
|
||||
if (sep != std::string::npos) {
|
||||
searchWord = prefix_.substr(sep, std::string::npos);
|
||||
searchWord = prefix_.substr(sep + 1, std::string::npos);
|
||||
prefix_ = prefix_.substr(0, sep);
|
||||
} else {
|
||||
searchWord = prefix_;
|
||||
prefix_ = "";
|
||||
}
|
||||
|
||||
Value &v1(*findAlongAttrPath(*state, prefix_, *autoArgs, root).first);
|
||||
state->forceValue(v1);
|
||||
auto [v, pos] = findAlongAttrPath(*state, prefix_, *autoArgs, root);
|
||||
Value &v1(*v);
|
||||
state->forceValue(v1, pos);
|
||||
Value v2;
|
||||
state->autoCallFunction(*autoArgs, v1, v2);
|
||||
|
||||
completionType = ctAttrs;
|
||||
|
||||
if (v2.type() == nAttrs) {
|
||||
for (auto & i : *v2.attrs) {
|
||||
std::string name = i.name;
|
||||
|
@ -232,7 +238,9 @@ void completeFlakeRefWithFragment(
|
|||
prefix. */
|
||||
try {
|
||||
auto hash = prefix.find('#');
|
||||
if (hash != std::string::npos) {
|
||||
if (hash == std::string::npos) {
|
||||
completeFlakeRef(evalState->store, prefix);
|
||||
} else {
|
||||
auto fragment = prefix.substr(hash + 1);
|
||||
auto flakeRefS = std::string(prefix.substr(0, hash));
|
||||
// FIXME: do tilde expansion.
|
||||
|
@ -248,6 +256,8 @@ void completeFlakeRefWithFragment(
|
|||
flake. */
|
||||
attrPathPrefixes.push_back("");
|
||||
|
||||
completionType = ctAttrs;
|
||||
|
||||
for (auto & attrPathPrefixS : attrPathPrefixes) {
|
||||
auto attrPathPrefix = parseAttrPath(*evalState, attrPathPrefixS);
|
||||
auto attrPathS = attrPathPrefixS + std::string(fragment);
|
||||
|
@ -285,12 +295,13 @@ void completeFlakeRefWithFragment(
|
|||
} catch (Error & e) {
|
||||
warn(e.msg());
|
||||
}
|
||||
|
||||
completeFlakeRef(evalState->store, prefix);
|
||||
}
|
||||
|
||||
void completeFlakeRef(ref<Store> store, std::string_view prefix)
|
||||
{
|
||||
if (!settings.isExperimentalFeatureEnabled(Xp::Flakes))
|
||||
return;
|
||||
|
||||
if (prefix == "")
|
||||
completions->add(".");
|
||||
|
||||
|
@ -338,6 +349,18 @@ Installable::getCursor(EvalState & state)
|
|||
return cursors[0];
|
||||
}
|
||||
|
||||
static StorePath getDeriver(
|
||||
ref<Store> store,
|
||||
const Installable & i,
|
||||
const StorePath & drvPath)
|
||||
{
|
||||
auto derivers = store->queryValidDerivers(drvPath);
|
||||
if (derivers.empty())
|
||||
throw Error("'%s' does not have a known deriver", i.what());
|
||||
// FIXME: use all derivers?
|
||||
return *derivers.begin();
|
||||
}
|
||||
|
||||
struct InstallableStorePath : Installable
|
||||
{
|
||||
ref<Store> store;
|
||||
|
@ -346,7 +369,7 @@ struct InstallableStorePath : Installable
|
|||
InstallableStorePath(ref<Store> store, StorePath && storePath)
|
||||
: store(store), storePath(std::move(storePath)) { }
|
||||
|
||||
std::string what() override { return store->printStorePath(storePath); }
|
||||
std::string what() const override { return store->printStorePath(storePath); }
|
||||
|
||||
DerivedPaths toDerivedPaths() override
|
||||
{
|
||||
|
@ -367,6 +390,15 @@ struct InstallableStorePath : Installable
|
|||
}
|
||||
}
|
||||
|
||||
StorePathSet toDrvPaths(ref<Store> store) override
|
||||
{
|
||||
if (storePath.isDerivation()) {
|
||||
return {storePath};
|
||||
} else {
|
||||
return {getDeriver(store, *this, storePath)};
|
||||
}
|
||||
}
|
||||
|
||||
std::optional<StorePath> getStorePath() override
|
||||
{
|
||||
return storePath;
|
||||
|
@ -395,6 +427,14 @@ DerivedPaths InstallableValue::toDerivedPaths()
|
|||
return res;
|
||||
}
|
||||
|
||||
StorePathSet InstallableValue::toDrvPaths(ref<Store> store)
|
||||
{
|
||||
StorePathSet res;
|
||||
for (auto & drv : toDerivations())
|
||||
res.insert(drv.drvPath);
|
||||
return res;
|
||||
}
|
||||
|
||||
struct InstallableAttrPath : InstallableValue
|
||||
{
|
||||
SourceExprCommand & cmd;
|
||||
|
@ -405,12 +445,12 @@ struct InstallableAttrPath : InstallableValue
|
|||
: InstallableValue(state), cmd(cmd), v(allocRootValue(v)), attrPath(attrPath)
|
||||
{ }
|
||||
|
||||
std::string what() override { return attrPath; }
|
||||
std::string what() const override { return attrPath; }
|
||||
|
||||
std::pair<Value *, Pos> toValue(EvalState & state) override
|
||||
{
|
||||
auto [vRes, pos] = findAlongAttrPath(state, attrPath, *cmd.getAutoArgs(state), **v);
|
||||
state.forceValue(*vRes);
|
||||
state.forceValue(*vRes, pos);
|
||||
return {vRes, pos};
|
||||
}
|
||||
|
||||
|
@ -460,7 +500,7 @@ Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::Locked
|
|||
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
||||
assert(aOutputs);
|
||||
|
||||
state.forceValue(*aOutputs->value);
|
||||
state.forceValue(*aOutputs->value, [&]() { return aOutputs->value->determinePos(noPos); });
|
||||
|
||||
return aOutputs->value;
|
||||
}
|
||||
|
@ -485,7 +525,7 @@ ref<eval_cache::EvalCache> openEvalCache(
|
|||
auto vFlake = state.allocValue();
|
||||
flake::callFlake(state, *lockedFlake, *vFlake);
|
||||
|
||||
state.forceAttrs(*vFlake);
|
||||
state.forceAttrs(*vFlake, noPos);
|
||||
|
||||
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
||||
assert(aOutputs);
|
||||
|
@ -508,13 +548,14 @@ InstallableFlake::InstallableFlake(
|
|||
SourceExprCommand * cmd,
|
||||
ref<EvalState> state,
|
||||
FlakeRef && flakeRef,
|
||||
Strings && attrPaths,
|
||||
Strings && prefixes,
|
||||
std::string_view fragment,
|
||||
Strings attrPaths,
|
||||
Strings prefixes,
|
||||
const flake::LockFlags & lockFlags)
|
||||
: InstallableValue(state),
|
||||
flakeRef(flakeRef),
|
||||
attrPaths(attrPaths),
|
||||
prefixes(prefixes),
|
||||
attrPaths(fragment == "" ? attrPaths : Strings{(std::string) fragment}),
|
||||
prefixes(fragment == "" ? Strings{} : prefixes),
|
||||
lockFlags(lockFlags)
|
||||
{
|
||||
if (cmd && cmd->getAutoArgs(*state)->size())
|
||||
|
@ -529,6 +570,8 @@ std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableF
|
|||
auto root = cache->getRoot();
|
||||
|
||||
for (auto & attrPath : getActualAttrPaths()) {
|
||||
debug("trying flake output attribute '%s'", attrPath);
|
||||
|
||||
auto attr = root->findAlongAttrPath(
|
||||
parseAttrPath(*state, attrPath),
|
||||
true
|
||||
|
@ -572,7 +615,7 @@ std::pair<Value *, Pos> InstallableFlake::toValue(EvalState & state)
|
|||
for (auto & attrPath : getActualAttrPaths()) {
|
||||
try {
|
||||
auto [v, pos] = findAlongAttrPath(state, attrPath, *emptyArgs, *vOutputs);
|
||||
state.forceValue(*v);
|
||||
state.forceValue(*v, pos);
|
||||
return {v, pos};
|
||||
} catch (AttrPathNotFound & e) {
|
||||
}
|
||||
|
@ -671,7 +714,8 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
|
|||
this,
|
||||
getEvalState(),
|
||||
std::move(flakeRef),
|
||||
fragment == "" ? getDefaultFlakeAttrPaths() : Strings{fragment},
|
||||
fragment,
|
||||
getDefaultFlakeAttrPaths(),
|
||||
getDefaultFlakeAttrPathPrefixes(),
|
||||
lockFlags));
|
||||
continue;
|
||||
|
@ -697,13 +741,13 @@ std::shared_ptr<Installable> SourceExprCommand::parseInstallable(
|
|||
BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPaths & hopefullyBuiltPaths)
|
||||
{
|
||||
BuiltPaths res;
|
||||
for (auto & b : hopefullyBuiltPaths)
|
||||
for (const auto & b : hopefullyBuiltPaths)
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](DerivedPath::Opaque bo) {
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
res.push_back(BuiltPath::Opaque{bo.path});
|
||||
},
|
||||
[&](DerivedPath::Built bfd) {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
OutputPathMap outputs;
|
||||
auto drv = evalStore->readDerivation(bfd.drvPath);
|
||||
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
|
||||
|
@ -714,7 +758,7 @@ BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPa
|
|||
"the derivation '%s' doesn't have an output named '%s'",
|
||||
store->printStorePath(bfd.drvPath), output);
|
||||
if (settings.isExperimentalFeatureEnabled(
|
||||
"ca-derivations")) {
|
||||
Xp::CaDerivations)) {
|
||||
auto outputId =
|
||||
DrvOutput{outputHashes.at(output), output};
|
||||
auto realisation =
|
||||
|
@ -823,19 +867,15 @@ StorePathSet toDerivations(
|
|||
{
|
||||
StorePathSet drvPaths;
|
||||
|
||||
for (auto & i : installables)
|
||||
for (auto & b : i->toDerivedPaths())
|
||||
for (const auto & i : installables)
|
||||
for (const auto & b : i->toDerivedPaths())
|
||||
std::visit(overloaded {
|
||||
[&](DerivedPath::Opaque bo) {
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
if (!useDeriver)
|
||||
throw Error("argument '%s' did not evaluate to a derivation", i->what());
|
||||
auto derivers = store->queryValidDerivers(bo.path);
|
||||
if (derivers.empty())
|
||||
throw Error("'%s' does not have a known deriver", i->what());
|
||||
// FIXME: use all derivers?
|
||||
drvPaths.insert(*derivers.begin());
|
||||
drvPaths.insert(getDeriver(store, *i, bo.path));
|
||||
},
|
||||
[&](DerivedPath::Built bfd) {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
drvPaths.insert(bfd.drvPath);
|
||||
},
|
||||
}, b.raw());
|
||||
|
|
|
@ -33,10 +33,15 @@ struct Installable
|
|||
{
|
||||
virtual ~Installable() { }
|
||||
|
||||
virtual std::string what() = 0;
|
||||
virtual std::string what() const = 0;
|
||||
|
||||
virtual DerivedPaths toDerivedPaths() = 0;
|
||||
|
||||
virtual StorePathSet toDrvPaths(ref<Store> store)
|
||||
{
|
||||
throw Error("'%s' cannot be converted to a derivation path", what());
|
||||
}
|
||||
|
||||
DerivedPath toDerivedPath();
|
||||
|
||||
UnresolvedApp toApp(EvalState & state);
|
||||
|
@ -81,6 +86,8 @@ struct InstallableValue : Installable
|
|||
virtual std::vector<DerivationInfo> toDerivations() = 0;
|
||||
|
||||
DerivedPaths toDerivedPaths() override;
|
||||
|
||||
StorePathSet toDrvPaths(ref<Store> store) override;
|
||||
};
|
||||
|
||||
struct InstallableFlake : InstallableValue
|
||||
|
@ -95,11 +102,12 @@ struct InstallableFlake : InstallableValue
|
|||
SourceExprCommand * cmd,
|
||||
ref<EvalState> state,
|
||||
FlakeRef && flakeRef,
|
||||
Strings && attrPaths,
|
||||
Strings && prefixes,
|
||||
std::string_view fragment,
|
||||
Strings attrPaths,
|
||||
Strings prefixes,
|
||||
const flake::LockFlags & lockFlags);
|
||||
|
||||
std::string what() override { return flakeRef.to_string() + "#" + *attrPaths.begin(); }
|
||||
std::string what() const override { return flakeRef.to_string() + "#" + *attrPaths.begin(); }
|
||||
|
||||
std::vector<std::string> getActualAttrPaths();
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc)
|
|||
|
||||
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers
|
||||
|
||||
libcmd_LDFLAGS += -llowdown -pthread
|
||||
libcmd_LDFLAGS += $(LOWDOWN_LIBS) -pthread
|
||||
|
||||
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
|
|||
Finally freeDoc([&]() { lowdown_doc_free(doc); });
|
||||
|
||||
size_t maxn = 0;
|
||||
auto node = lowdown_doc_parse(doc, &maxn, markdown.data(), markdown.size());
|
||||
auto node = lowdown_doc_parse(doc, &maxn, markdown.data(), markdown.size(), nullptr);
|
||||
if (!node)
|
||||
throw Error("cannot parse Markdown document");
|
||||
Finally freeNode([&]() { lowdown_node_free(node); });
|
||||
|
@ -40,7 +40,7 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
|
|||
throw Error("cannot allocate Markdown output buffer");
|
||||
Finally freeBuffer([&]() { lowdown_buf_free(buf); });
|
||||
|
||||
int rndr_res = lowdown_term_rndr(buf, nullptr, renderer, node);
|
||||
int rndr_res = lowdown_term_rndr(buf, renderer, node);
|
||||
if (!rndr_res)
|
||||
throw Error("allocation error while rendering Markdown");
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ namespace nix {
|
|||
static Strings parseAttrPath(std::string_view s)
|
||||
{
|
||||
Strings res;
|
||||
string cur;
|
||||
std::string cur;
|
||||
auto i = s.begin();
|
||||
while (i != s.end()) {
|
||||
if (*i == '.') {
|
||||
|
@ -41,7 +41,7 @@ std::vector<Symbol> parseAttrPath(EvalState & state, std::string_view s)
|
|||
}
|
||||
|
||||
|
||||
std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attrPath,
|
||||
std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const std::string & attrPath,
|
||||
Bindings & autoArgs, Value & vIn)
|
||||
{
|
||||
Strings tokens = parseAttrPath(attrPath);
|
||||
|
@ -58,7 +58,7 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
|
|||
Value * vNew = state.allocValue();
|
||||
state.autoCallFunction(autoArgs, *v, *vNew);
|
||||
v = vNew;
|
||||
state.forceValue(*v);
|
||||
state.forceValue(*v, noPos);
|
||||
|
||||
/* It should evaluate to either a set or an expression,
|
||||
according to what is specified in the attrPath. */
|
||||
|
@ -121,7 +121,7 @@ Pos findPackageFilename(EvalState & state, Value & v, std::string what)
|
|||
std::string filename(pos, 0, colon);
|
||||
unsigned int lineno;
|
||||
try {
|
||||
lineno = std::stoi(std::string(pos, colon + 1));
|
||||
lineno = std::stoi(std::string(pos, colon + 1, std::string::npos));
|
||||
} catch (std::invalid_argument & e) {
|
||||
throw ParseError("cannot parse line number '%s'", pos);
|
||||
}
|
||||
|
|
|
@ -10,8 +10,11 @@ namespace nix {
|
|||
MakeError(AttrPathNotFound, Error);
|
||||
MakeError(NoPositionInfo, Error);
|
||||
|
||||
std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attrPath,
|
||||
Bindings & autoArgs, Value & vIn);
|
||||
std::pair<Value *, Pos> findAlongAttrPath(
|
||||
EvalState & state,
|
||||
const std::string & attrPath,
|
||||
Bindings & autoArgs,
|
||||
Value & vIn);
|
||||
|
||||
/* Heuristic to find the filename and lineno or a nix value. */
|
||||
Pos findPackageFilename(EvalState & state, Value & v, std::string what);
|
||||
|
|
|
@ -7,26 +7,19 @@
|
|||
namespace nix {
|
||||
|
||||
|
||||
|
||||
/* Allocate a new array of attributes for an attribute set with a specific
|
||||
capacity. The space is implicitly reserved after the Bindings
|
||||
structure. */
|
||||
Bindings * EvalState::allocBindings(size_t capacity)
|
||||
{
|
||||
if (capacity == 0)
|
||||
return &emptyBindings;
|
||||
if (capacity > std::numeric_limits<Bindings::size_t>::max())
|
||||
throw Error("attribute set of size %d is too big", capacity);
|
||||
return new (allocBytes(sizeof(Bindings) + sizeof(Attr) * capacity)) Bindings((Bindings::size_t) capacity);
|
||||
}
|
||||
|
||||
|
||||
void EvalState::mkAttrs(Value & v, size_t capacity)
|
||||
{
|
||||
if (capacity == 0) {
|
||||
v = vEmptySet;
|
||||
return;
|
||||
}
|
||||
v.mkAttrs(allocBindings(capacity));
|
||||
nrAttrsets++;
|
||||
nrAttrsInAttrsets += capacity;
|
||||
return new (allocBytes(sizeof(Bindings) + sizeof(Attr) * capacity)) Bindings((Bindings::size_t) capacity);
|
||||
}
|
||||
|
||||
|
||||
|
@ -41,15 +34,36 @@ Value * EvalState::allocAttr(Value & vAttrs, const Symbol & name)
|
|||
}
|
||||
|
||||
|
||||
Value * EvalState::allocAttr(Value & vAttrs, const std::string & name)
|
||||
Value * EvalState::allocAttr(Value & vAttrs, std::string_view name)
|
||||
{
|
||||
return allocAttr(vAttrs, symbols.create(name));
|
||||
}
|
||||
|
||||
|
||||
Value & BindingsBuilder::alloc(const Symbol & name, ptr<Pos> pos)
|
||||
{
|
||||
auto value = state.allocValue();
|
||||
bindings->push_back(Attr(name, value, pos));
|
||||
return *value;
|
||||
}
|
||||
|
||||
|
||||
Value & BindingsBuilder::alloc(std::string_view name, ptr<Pos> pos)
|
||||
{
|
||||
return alloc(state.symbols.create(name), pos);
|
||||
}
|
||||
|
||||
|
||||
void Bindings::sort()
|
||||
{
|
||||
std::sort(begin(), end());
|
||||
if (size_) std::sort(begin(), end());
|
||||
}
|
||||
|
||||
|
||||
Value & Value::mkAttrs(BindingsBuilder & bindings)
|
||||
{
|
||||
mkAttrs(bindings.finish());
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -105,7 +105,7 @@ public:
|
|||
for (size_t n = 0; n < size_; n++)
|
||||
res.emplace_back(&attrs[n]);
|
||||
std::sort(res.begin(), res.end(), [](const Attr * a, const Attr * b) {
|
||||
return (const string &) a->name < (const string &) b->name;
|
||||
return (const std::string &) a->name < (const std::string &) b->name;
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
@ -113,5 +113,52 @@ public:
|
|||
friend class EvalState;
|
||||
};
|
||||
|
||||
/* A wrapper around Bindings that ensures that its always in sorted
|
||||
order at the end. The only way to consume a BindingsBuilder is to
|
||||
call finish(), which sorts the bindings. */
|
||||
class BindingsBuilder
|
||||
{
|
||||
Bindings * bindings;
|
||||
|
||||
public:
|
||||
// needed by std::back_inserter
|
||||
using value_type = Attr;
|
||||
|
||||
EvalState & state;
|
||||
|
||||
BindingsBuilder(EvalState & state, Bindings * bindings)
|
||||
: bindings(bindings), state(state)
|
||||
{ }
|
||||
|
||||
void insert(Symbol name, Value * value, ptr<Pos> pos = ptr(&noPos))
|
||||
{
|
||||
insert(Attr(name, value, pos));
|
||||
}
|
||||
|
||||
void insert(const Attr & attr)
|
||||
{
|
||||
push_back(attr);
|
||||
}
|
||||
|
||||
void push_back(const Attr & attr)
|
||||
{
|
||||
bindings->push_back(attr);
|
||||
}
|
||||
|
||||
Value & alloc(const Symbol & name, ptr<Pos> pos = ptr(&noPos));
|
||||
|
||||
Value & alloc(std::string_view name, ptr<Pos> pos = ptr(&noPos));
|
||||
|
||||
Bindings * finish()
|
||||
{
|
||||
bindings->sort();
|
||||
return bindings;
|
||||
}
|
||||
|
||||
Bindings * alreadySorted()
|
||||
{
|
||||
return bindings;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -73,30 +73,29 @@ MixEvalArgs::MixEvalArgs()
|
|||
|
||||
Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||
{
|
||||
Bindings * res = state.allocBindings(autoArgs.size());
|
||||
auto res = state.buildBindings(autoArgs.size());
|
||||
for (auto & i : autoArgs) {
|
||||
Value * v = state.allocValue();
|
||||
auto v = state.allocValue();
|
||||
if (i.second[0] == 'E')
|
||||
state.mkThunk_(*v, state.parseExprFromString(string(i.second, 1), absPath(".")));
|
||||
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), absPath(".")));
|
||||
else
|
||||
mkString(*v, string(i.second, 1));
|
||||
res->push_back(Attr(state.symbols.create(i.first), v));
|
||||
v->mkString(((std::string_view) i.second).substr(1));
|
||||
res.insert(state.symbols.create(i.first), v);
|
||||
}
|
||||
res->sort();
|
||||
return res;
|
||||
return res.finish();
|
||||
}
|
||||
|
||||
Path lookupFileArg(EvalState & state, string s)
|
||||
Path lookupFileArg(EvalState & state, std::string_view s)
|
||||
{
|
||||
if (isUri(s)) {
|
||||
return state.store->toRealPath(
|
||||
fetchers::downloadTarball(
|
||||
state.store, resolveUri(s), "source", false).first.storePath);
|
||||
} else if (s.size() > 2 && s.at(0) == '<' && s.at(s.size() - 1) == '>') {
|
||||
Path p = s.substr(1, s.size() - 2);
|
||||
Path p(s.substr(1, s.size() - 2));
|
||||
return state.findFile(p);
|
||||
} else
|
||||
return absPath(s);
|
||||
return absPath(std::string(s));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,6 +22,6 @@ private:
|
|||
std::map<std::string, std::string> autoArgs;
|
||||
};
|
||||
|
||||
Path lookupFileArg(EvalState & state, string s);
|
||||
Path lookupFileArg(EvalState & state, std::string_view s);
|
||||
|
||||
}
|
||||
|
|
|
@ -336,7 +336,7 @@ Value & AttrCursor::getValue()
|
|||
if (!_value) {
|
||||
if (parent) {
|
||||
auto & vParent = parent->first->getValue();
|
||||
root->state.forceAttrs(vParent);
|
||||
root->state.forceAttrs(vParent, noPos);
|
||||
auto attr = vParent.attrs->get(parent->second);
|
||||
if (!attr)
|
||||
throw Error("attribute '%s' is unexpectedly missing", getAttrPathStr());
|
||||
|
@ -381,7 +381,7 @@ Value & AttrCursor::forceValue()
|
|||
auto & v = getValue();
|
||||
|
||||
try {
|
||||
root->state.forceValue(v);
|
||||
root->state.forceValue(v, noPos);
|
||||
} catch (EvalError &) {
|
||||
debug("setting '%s' to failed", getAttrPathStr());
|
||||
if (root->db)
|
||||
|
@ -596,7 +596,7 @@ std::vector<Symbol> AttrCursor::getAttrs()
|
|||
for (auto & attr : *getValue().attrs)
|
||||
attrs.push_back(attr.name);
|
||||
std::sort(attrs.begin(), attrs.end(), [](const Symbol & a, const Symbol & b) {
|
||||
return (const string &) a < (const string &) b;
|
||||
return (const std::string &) a < (const std::string &) b;
|
||||
});
|
||||
|
||||
if (root->db)
|
||||
|
|
|
@ -15,12 +15,6 @@ LocalNoInlineNoReturn(void throwEvalError(const Pos & pos, const char * s))
|
|||
});
|
||||
}
|
||||
|
||||
LocalNoInlineNoReturn(void throwTypeError(const char * s, const Value & v))
|
||||
{
|
||||
throw TypeError(s, showType(v));
|
||||
}
|
||||
|
||||
|
||||
LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const Value & v))
|
||||
{
|
||||
throw TypeError({
|
||||
|
@ -31,6 +25,13 @@ LocalNoInlineNoReturn(void throwTypeError(const Pos & pos, const char * s, const
|
|||
|
||||
|
||||
void EvalState::forceValue(Value & v, const Pos & pos)
|
||||
{
|
||||
forceValue(v, [&]() { return pos; });
|
||||
}
|
||||
|
||||
|
||||
template<typename Callable>
|
||||
void EvalState::forceValue(Value & v, Callable getPos)
|
||||
{
|
||||
if (v.isThunk()) {
|
||||
Env * env = v.thunk.env;
|
||||
|
@ -47,31 +48,22 @@ void EvalState::forceValue(Value & v, const Pos & pos)
|
|||
else if (v.isApp())
|
||||
callFunction(*v.app.left, *v.app.right, v, noPos);
|
||||
else if (v.isBlackhole())
|
||||
throwEvalError(pos, "infinite recursion encountered");
|
||||
}
|
||||
|
||||
|
||||
inline void EvalState::forceAttrs(Value & v)
|
||||
{
|
||||
forceValue(v);
|
||||
if (v.type() != nAttrs)
|
||||
throwTypeError("value is %1% while a set was expected", v);
|
||||
throwEvalError(getPos(), "infinite recursion encountered");
|
||||
}
|
||||
|
||||
|
||||
inline void EvalState::forceAttrs(Value & v, const Pos & pos)
|
||||
{
|
||||
forceValue(v, pos);
|
||||
if (v.type() != nAttrs)
|
||||
throwTypeError(pos, "value is %1% while a set was expected", v);
|
||||
forceAttrs(v, [&]() { return pos; });
|
||||
}
|
||||
|
||||
|
||||
inline void EvalState::forceList(Value & v)
|
||||
template <typename Callable>
|
||||
inline void EvalState::forceAttrs(Value & v, Callable getPos)
|
||||
{
|
||||
forceValue(v);
|
||||
if (!v.isList())
|
||||
throwTypeError("value is %1% while a list was expected", v);
|
||||
forceValue(v, getPos);
|
||||
if (v.type() != nAttrs)
|
||||
throwTypeError(getPos(), "value is %1% while a set was expected", v);
|
||||
}
|
||||
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,10 +1,12 @@
|
|||
#pragma once
|
||||
|
||||
#include "attr-set.hh"
|
||||
#include "types.hh"
|
||||
#include "value.hh"
|
||||
#include "nixexpr.hh"
|
||||
#include "symbol-table.hh"
|
||||
#include "config.hh"
|
||||
#include "experimental-features.hh"
|
||||
|
||||
#include <map>
|
||||
#include <optional>
|
||||
|
@ -43,8 +45,6 @@ struct Env
|
|||
};
|
||||
|
||||
|
||||
Value & mkString(Value & v, std::string_view s, const PathSet & context = PathSet());
|
||||
|
||||
void copyContext(const Value & v, PathSet & context);
|
||||
|
||||
|
||||
|
@ -81,7 +81,8 @@ public:
|
|||
sContentAddressed,
|
||||
sOutputHash, sOutputHashAlgo, sOutputHashMode,
|
||||
sRecurseForDerivations,
|
||||
sDescription, sSelf, sEpsilon;
|
||||
sDescription, sSelf, sEpsilon, sStartSet, sOperator, sKey, sPath,
|
||||
sPrefix;
|
||||
Symbol sDerivationNix;
|
||||
|
||||
/* If set, force copying files to the Nix store even if they
|
||||
|
@ -92,7 +93,7 @@ public:
|
|||
mode. */
|
||||
std::optional<PathSet> allowedPaths;
|
||||
|
||||
Value vEmptySet;
|
||||
Bindings emptyBindings;
|
||||
|
||||
/* Store used to materialise .drv files. */
|
||||
const ref<Store> store;
|
||||
|
@ -132,6 +133,9 @@ private:
|
|||
/* Cache used by prim_match(). */
|
||||
std::shared_ptr<RegexCache> regexCache;
|
||||
|
||||
/* Allocation cache for GC'd Value objects. */
|
||||
std::shared_ptr<void *> valueAllocCache;
|
||||
|
||||
public:
|
||||
|
||||
EvalState(
|
||||
|
@ -140,10 +144,25 @@ public:
|
|||
std::shared_ptr<Store> buildStore = nullptr);
|
||||
~EvalState();
|
||||
|
||||
void addToSearchPath(const string & s);
|
||||
void requireExperimentalFeatureOnEvaluation(
|
||||
const ExperimentalFeature &,
|
||||
const std::string_view fName,
|
||||
const Pos & pos
|
||||
);
|
||||
|
||||
void addToSearchPath(const std::string & s);
|
||||
|
||||
SearchPath getSearchPath() { return searchPath; }
|
||||
|
||||
/* Allow access to a path. */
|
||||
void allowPath(const Path & path);
|
||||
|
||||
/* Allow access to a store path. Note that this gets remapped to
|
||||
the real store path if `store` is a chroot store. */
|
||||
void allowPath(const StorePath & storePath);
|
||||
|
||||
/* Check whether access to a path is allowed and throw an error if
|
||||
not. Otherwise return the canonicalised path. */
|
||||
Path checkSourcePath(const Path & path);
|
||||
|
||||
void checkURI(const std::string & uri);
|
||||
|
@ -162,8 +181,8 @@ public:
|
|||
Expr * parseExprFromFile(const Path & path, StaticEnv & staticEnv);
|
||||
|
||||
/* Parse a Nix expression from the specified string. */
|
||||
Expr * parseExprFromString(std::string_view s, const Path & basePath, StaticEnv & staticEnv);
|
||||
Expr * parseExprFromString(std::string_view s, const Path & basePath);
|
||||
Expr * parseExprFromString(std::string s, const Path & basePath, StaticEnv & staticEnv);
|
||||
Expr * parseExprFromString(std::string s, const Path & basePath);
|
||||
|
||||
Expr * parseStdin();
|
||||
|
||||
|
@ -183,8 +202,8 @@ public:
|
|||
void resetFileCache();
|
||||
|
||||
/* Look up a file in the search path. */
|
||||
Path findFile(const string & path);
|
||||
Path findFile(SearchPath & searchPath, const string & path, const Pos & pos = noPos);
|
||||
Path findFile(const std::string_view path);
|
||||
Path findFile(SearchPath & searchPath, const std::string_view path, const Pos & pos = noPos);
|
||||
|
||||
/* If the specified search path element is a URI, download it. */
|
||||
std::pair<bool, std::string> resolveSearchPathElem(const SearchPathElem & elem);
|
||||
|
@ -203,7 +222,10 @@ public:
|
|||
of the evaluation of the thunk. If `v' is a delayed function
|
||||
application, call the function and overwrite `v' with the
|
||||
result. Otherwise, this is a no-op. */
|
||||
inline void forceValue(Value & v, const Pos & pos = noPos);
|
||||
inline void forceValue(Value & v, const Pos & pos);
|
||||
|
||||
template <typename Callable>
|
||||
inline void forceValue(Value & v, Callable getPos);
|
||||
|
||||
/* Force a value, then recursively force list elements and
|
||||
attributes. */
|
||||
|
@ -213,31 +235,34 @@ public:
|
|||
NixInt forceInt(Value & v, const Pos & pos);
|
||||
NixFloat forceFloat(Value & v, const Pos & pos);
|
||||
bool forceBool(Value & v, const Pos & pos);
|
||||
inline void forceAttrs(Value & v);
|
||||
inline void forceAttrs(Value & v, const Pos & pos);
|
||||
inline void forceList(Value & v);
|
||||
|
||||
void forceAttrs(Value & v, const Pos & pos);
|
||||
|
||||
template <typename Callable>
|
||||
inline void forceAttrs(Value & v, Callable getPos);
|
||||
|
||||
inline void forceList(Value & v, const Pos & pos);
|
||||
void forceFunction(Value & v, const Pos & pos); // either lambda or primop
|
||||
string forceString(Value & v, const Pos & pos = noPos);
|
||||
string forceString(Value & v, PathSet & context, const Pos & pos = noPos);
|
||||
string forceStringNoCtx(Value & v, const Pos & pos = noPos);
|
||||
std::string_view forceString(Value & v, const Pos & pos = noPos);
|
||||
std::string_view forceString(Value & v, PathSet & context, const Pos & pos = noPos);
|
||||
std::string_view forceStringNoCtx(Value & v, const Pos & pos = noPos);
|
||||
|
||||
/* Return true iff the value `v' denotes a derivation (i.e. a
|
||||
set with attribute `type = "derivation"'). */
|
||||
bool isDerivation(Value & v);
|
||||
|
||||
std::optional<string> tryAttrsToString(const Pos & pos, Value & v,
|
||||
std::optional<std::string> tryAttrsToString(const Pos & pos, Value & v,
|
||||
PathSet & context, bool coerceMore = false, bool copyToStore = true);
|
||||
|
||||
/* String coercion. Converts strings, paths and derivations to a
|
||||
string. If `coerceMore' is set, also converts nulls, integers,
|
||||
booleans and lists to a string. If `copyToStore' is set,
|
||||
referenced paths are copied to the Nix store as a side effect. */
|
||||
string coerceToString(const Pos & pos, Value & v, PathSet & context,
|
||||
BackedStringView coerceToString(const Pos & pos, Value & v, PathSet & context,
|
||||
bool coerceMore = false, bool copyToStore = true,
|
||||
bool canonicalizePath = true);
|
||||
|
||||
string copyPathToStore(PathSet & context, const Path & path);
|
||||
std::string copyPathToStore(PathSet & context, const Path & path);
|
||||
|
||||
/* Path coercion. Converts strings, paths and derivations to a
|
||||
path. The result is guaranteed to be a canonicalised, absolute
|
||||
|
@ -259,16 +284,18 @@ private:
|
|||
|
||||
void createBaseEnv();
|
||||
|
||||
Value * addConstant(const string & name, Value & v);
|
||||
Value * addConstant(const std::string & name, Value & v);
|
||||
|
||||
Value * addPrimOp(const string & name,
|
||||
void addConstant(const std::string & name, Value * v);
|
||||
|
||||
Value * addPrimOp(const std::string & name,
|
||||
size_t arity, PrimOpFun primOp);
|
||||
|
||||
Value * addPrimOp(PrimOp && primOp);
|
||||
|
||||
public:
|
||||
|
||||
Value & getBuiltin(const string & name);
|
||||
Value & getBuiltin(const std::string & name);
|
||||
|
||||
struct Doc
|
||||
{
|
||||
|
@ -289,8 +316,8 @@ private:
|
|||
friend struct ExprAttrs;
|
||||
friend struct ExprLet;
|
||||
|
||||
Expr * parse(const char * text, FileOrigin origin, const Path & path,
|
||||
const Path & basePath, StaticEnv & staticEnv);
|
||||
Expr * parse(char * text, size_t length, FileOrigin origin, const PathView path,
|
||||
const PathView basePath, StaticEnv & staticEnv);
|
||||
|
||||
public:
|
||||
|
||||
|
@ -300,8 +327,14 @@ public:
|
|||
|
||||
bool isFunctor(Value & fun);
|
||||
|
||||
void callFunction(Value & fun, Value & arg, Value & v, const Pos & pos);
|
||||
void callPrimOp(Value & fun, Value & arg, Value & v, const Pos & pos);
|
||||
// FIXME: use std::span
|
||||
void callFunction(Value & fun, size_t nrArgs, Value * * args, Value & vRes, const Pos & pos);
|
||||
|
||||
void callFunction(Value & fun, Value & arg, Value & vRes, const Pos & pos)
|
||||
{
|
||||
Value * args[] = {&arg};
|
||||
callFunction(fun, 1, args, vRes, pos);
|
||||
}
|
||||
|
||||
/* Automatically call a function for which each argument has a
|
||||
default value or has a binding in the `args' map. */
|
||||
|
@ -312,12 +345,16 @@ public:
|
|||
Env & allocEnv(size_t size);
|
||||
|
||||
Value * allocAttr(Value & vAttrs, const Symbol & name);
|
||||
Value * allocAttr(Value & vAttrs, const std::string & name);
|
||||
Value * allocAttr(Value & vAttrs, std::string_view name);
|
||||
|
||||
Bindings * allocBindings(size_t capacity);
|
||||
|
||||
BindingsBuilder buildBindings(size_t capacity)
|
||||
{
|
||||
return BindingsBuilder(*this, allocBindings(capacity));
|
||||
}
|
||||
|
||||
void mkList(Value & v, size_t length);
|
||||
void mkAttrs(Value & v, size_t capacity);
|
||||
void mkThunk_(Value & v, Expr * expr);
|
||||
void mkPos(Value & v, ptr<Pos> pos);
|
||||
|
||||
|
@ -326,7 +363,10 @@ public:
|
|||
/* Print statistics. */
|
||||
void printStats();
|
||||
|
||||
void realiseContext(const PathSet & context);
|
||||
/* Realise the given context, and return a mapping from the placeholders
|
||||
* used to construct the associated value to their final store path
|
||||
*/
|
||||
[[nodiscard]] StringMap realiseContext(const PathSet & context);
|
||||
|
||||
private:
|
||||
|
||||
|
@ -367,16 +407,19 @@ private:
|
|||
friend struct ExprSelect;
|
||||
friend void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v);
|
||||
friend void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v);
|
||||
friend void prim_split(EvalState & state, const Pos & pos, Value * * args, Value & v);
|
||||
|
||||
friend struct Value;
|
||||
};
|
||||
|
||||
|
||||
/* Return a string representing the type of the value `v'. */
|
||||
string showType(ValueType type);
|
||||
string showType(const Value & v);
|
||||
std::string_view showType(ValueType type);
|
||||
std::string showType(const Value & v);
|
||||
|
||||
/* Decode a context string ‘!<name>!<path>’ into a pair <path,
|
||||
name>. */
|
||||
std::pair<string, string> decodeContext(std::string_view s);
|
||||
std::pair<std::string, std::string> decodeContext(std::string_view s);
|
||||
|
||||
/* If `path' refers to a directory, then append "/default.nix". */
|
||||
Path resolveExprPath(Path path);
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "flake.hh"
|
||||
#include "globals.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
@ -37,11 +38,11 @@ void ConfigFile::apply()
|
|||
|
||||
// FIXME: Move into libutil/config.cc.
|
||||
std::string valueS;
|
||||
if (auto s = std::get_if<std::string>(&value))
|
||||
if (auto* s = std::get_if<std::string>(&value))
|
||||
valueS = *s;
|
||||
else if (auto n = std::get_if<int64_t>(&value))
|
||||
valueS = fmt("%d", n);
|
||||
else if (auto b = std::get_if<Explicit<bool>>(&value))
|
||||
else if (auto* n = std::get_if<int64_t>(&value))
|
||||
valueS = fmt("%d", *n);
|
||||
else if (auto* b = std::get_if<Explicit<bool>>(&value))
|
||||
valueS = b->t ? "true" : "false";
|
||||
else if (auto ss = std::get_if<std::vector<std::string>>(&value))
|
||||
valueS = concatStringsSep(" ", *ss); // FIXME: evil
|
||||
|
@ -52,21 +53,19 @@ void ConfigFile::apply()
|
|||
auto trustedList = readTrustedList();
|
||||
|
||||
bool trusted = false;
|
||||
|
||||
if (auto saved = get(get(trustedList, name).value_or(std::map<std::string, bool>()), valueS)) {
|
||||
if (nix::settings.acceptFlakeConfig){
|
||||
trusted = true;
|
||||
} else if (auto saved = get(get(trustedList, name).value_or(std::map<std::string, bool>()), valueS)) {
|
||||
trusted = *saved;
|
||||
warn("Using saved setting for '%s = %s' from ~/.local/share/nix/trusted-settings.json.", name,valueS);
|
||||
} else {
|
||||
// FIXME: filter ANSI escapes, newlines, \r, etc.
|
||||
if (std::tolower(logger->ask(fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED "%s" ANSI_NORMAL "' (y/N)?", name, valueS)).value_or('n')) != 'y') {
|
||||
if (std::tolower(logger->ask("do you want to permanently mark this value as untrusted (y/N)?").value_or('n')) == 'y') {
|
||||
trustedList[name][valueS] = false;
|
||||
writeTrustedList(trustedList);
|
||||
}
|
||||
} else {
|
||||
if (std::tolower(logger->ask("do you want to permanently mark this value as trusted (y/N)?").value_or('n')) == 'y') {
|
||||
trustedList[name][valueS] = trusted = true;
|
||||
writeTrustedList(trustedList);
|
||||
}
|
||||
if (std::tolower(logger->ask(fmt("do you want to allow configuration setting '%s' to be set to '" ANSI_RED "%s" ANSI_NORMAL "' (y/N)?", name, valueS)).value_or('n')) == 'y') {
|
||||
trusted = true;
|
||||
}
|
||||
if (std::tolower(logger->ask(fmt("do you want to permanently mark this value as %s (y/N)?", trusted ? "trusted": "untrusted" )).value_or('n')) == 'y') {
|
||||
trustedList[name][valueS] = trusted;
|
||||
writeTrustedList(trustedList);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -64,8 +64,7 @@ static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
|
|||
debug("got tree '%s' from '%s'",
|
||||
state.store->printStorePath(tree.storePath), lockedRef);
|
||||
|
||||
if (state.allowedPaths)
|
||||
state.allowedPaths->insert(tree.actualPath);
|
||||
state.allowPath(tree.storePath);
|
||||
|
||||
assert(!originalRef.input.getNarHash() || tree.storePath == originalRef.input.computeStorePath(*state.store));
|
||||
|
||||
|
@ -89,10 +88,12 @@ static void expectType(EvalState & state, ValueType type,
|
|||
}
|
||||
|
||||
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
||||
EvalState & state, Value * value, const Pos & pos);
|
||||
EvalState & state, Value * value, const Pos & pos,
|
||||
const std::optional<Path> & baseDir, InputPath lockRootPath);
|
||||
|
||||
static FlakeInput parseFlakeInput(EvalState & state,
|
||||
const std::string & inputName, Value * value, const Pos & pos)
|
||||
const std::string & inputName, Value * value, const Pos & pos,
|
||||
const std::optional<Path> & baseDir, InputPath lockRootPath)
|
||||
{
|
||||
expectType(state, nAttrs, *value, pos);
|
||||
|
||||
|
@ -116,10 +117,12 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
expectType(state, nBool, *attr.value, *attr.pos);
|
||||
input.isFlake = attr.value->boolean;
|
||||
} else if (attr.name == sInputs) {
|
||||
input.overrides = parseFlakeInputs(state, attr.value, *attr.pos);
|
||||
input.overrides = parseFlakeInputs(state, attr.value, *attr.pos, baseDir, lockRootPath);
|
||||
} else if (attr.name == sFollows) {
|
||||
expectType(state, nString, *attr.value, *attr.pos);
|
||||
input.follows = parseInputPath(attr.value->string.s);
|
||||
auto follows(parseInputPath(attr.value->string.s));
|
||||
follows.insert(follows.begin(), lockRootPath.begin(), lockRootPath.end());
|
||||
input.follows = follows;
|
||||
} else {
|
||||
switch (attr.value->type()) {
|
||||
case nString:
|
||||
|
@ -154,7 +157,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
if (!attrs.empty())
|
||||
throw Error("unexpected flake input attribute '%s', at %s", attrs.begin()->first, pos);
|
||||
if (url)
|
||||
input.ref = parseFlakeRef(*url, {}, true);
|
||||
input.ref = parseFlakeRef(*url, baseDir, true, input.isFlake);
|
||||
}
|
||||
|
||||
if (!input.follows && !input.ref)
|
||||
|
@ -164,7 +167,8 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
}
|
||||
|
||||
static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
||||
EvalState & state, Value * value, const Pos & pos)
|
||||
EvalState & state, Value * value, const Pos & pos,
|
||||
const std::optional<Path> & baseDir, InputPath lockRootPath)
|
||||
{
|
||||
std::map<FlakeId, FlakeInput> inputs;
|
||||
|
||||
|
@ -175,7 +179,9 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
|||
parseFlakeInput(state,
|
||||
inputAttr.name,
|
||||
inputAttr.value,
|
||||
*inputAttr.pos));
|
||||
*inputAttr.pos,
|
||||
baseDir,
|
||||
lockRootPath));
|
||||
}
|
||||
|
||||
return inputs;
|
||||
|
@ -185,13 +191,15 @@ static Flake getFlake(
|
|||
EvalState & state,
|
||||
const FlakeRef & originalRef,
|
||||
bool allowLookup,
|
||||
FlakeCache & flakeCache)
|
||||
FlakeCache & flakeCache,
|
||||
InputPath lockRootPath)
|
||||
{
|
||||
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||
state, originalRef, allowLookup, flakeCache);
|
||||
|
||||
// Guard against symlink attacks.
|
||||
auto flakeFile = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir + "/flake.nix");
|
||||
auto flakeDir = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir, true);
|
||||
auto flakeFile = canonPath(flakeDir + "/flake.nix", true);
|
||||
if (!isInDir(flakeFile, sourceInfo.actualPath))
|
||||
throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
|
||||
lockedRef, state.store->printStorePath(sourceInfo.storePath));
|
||||
|
@ -219,14 +227,14 @@ static Flake getFlake(
|
|||
auto sInputs = state.symbols.create("inputs");
|
||||
|
||||
if (auto inputs = vInfo.attrs->get(sInputs))
|
||||
flake.inputs = parseFlakeInputs(state, inputs->value, *inputs->pos);
|
||||
flake.inputs = parseFlakeInputs(state, inputs->value, *inputs->pos, flakeDir, lockRootPath);
|
||||
|
||||
auto sOutputs = state.symbols.create("outputs");
|
||||
|
||||
if (auto outputs = vInfo.attrs->get(sOutputs)) {
|
||||
expectType(state, nFunction, *outputs->value, *outputs->pos);
|
||||
|
||||
if (outputs->value->isLambda() && outputs->value->lambda.fun->matchAttrs) {
|
||||
if (outputs->value->isLambda() && outputs->value->lambda.fun->hasFormals()) {
|
||||
for (auto & formal : outputs->value->lambda.fun->formals->formals) {
|
||||
if (formal.name != state.sSelf)
|
||||
flake.inputs.emplace(formal.name, FlakeInput {
|
||||
|
@ -246,19 +254,24 @@ static Flake getFlake(
|
|||
for (auto & setting : *nixConfig->value->attrs) {
|
||||
forceTrivialValue(state, *setting.value, *setting.pos);
|
||||
if (setting.value->type() == nString)
|
||||
flake.config.settings.insert({setting.name, state.forceStringNoCtx(*setting.value, *setting.pos)});
|
||||
flake.config.settings.insert({setting.name, std::string(state.forceStringNoCtx(*setting.value, *setting.pos))});
|
||||
else if (setting.value->type() == nPath) {
|
||||
PathSet emptyContext = {};
|
||||
flake.config.settings.emplace(
|
||||
setting.name,
|
||||
state.coerceToString(*setting.pos, *setting.value, emptyContext, false, true, true) .toOwned());
|
||||
}
|
||||
else if (setting.value->type() == nInt)
|
||||
flake.config.settings.insert({setting.name, state.forceInt(*setting.value, *setting.pos)});
|
||||
else if (setting.value->type() == nBool)
|
||||
flake.config.settings.insert({setting.name, state.forceBool(*setting.value, *setting.pos)});
|
||||
flake.config.settings.insert({setting.name, Explicit<bool> { state.forceBool(*setting.value, *setting.pos) }});
|
||||
else if (setting.value->type() == nList) {
|
||||
std::vector<std::string> ss;
|
||||
for (unsigned int n = 0; n < setting.value->listSize(); ++n) {
|
||||
auto elem = setting.value->listElems()[n];
|
||||
for (auto elem : setting.value->listItems()) {
|
||||
if (elem->type() != nString)
|
||||
throw TypeError("list element in flake configuration setting '%s' is %s while a string is expected",
|
||||
setting.name, showType(*setting.value));
|
||||
ss.push_back(state.forceStringNoCtx(*elem, *setting.pos));
|
||||
ss.emplace_back(state.forceStringNoCtx(*elem, *setting.pos));
|
||||
}
|
||||
flake.config.settings.insert({setting.name, ss});
|
||||
}
|
||||
|
@ -280,6 +293,11 @@ static Flake getFlake(
|
|||
return flake;
|
||||
}
|
||||
|
||||
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup, FlakeCache & flakeCache)
|
||||
{
|
||||
return getFlake(state, originalRef, allowLookup, flakeCache, {});
|
||||
}
|
||||
|
||||
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup)
|
||||
{
|
||||
FlakeCache flakeCache;
|
||||
|
@ -293,7 +311,7 @@ LockedFlake lockFlake(
|
|||
const FlakeRef & topRef,
|
||||
const LockFlags & lockFlags)
|
||||
{
|
||||
settings.requireExperimentalFeature("flakes");
|
||||
settings.requireExperimentalFeature(Xp::Flakes);
|
||||
|
||||
FlakeCache flakeCache;
|
||||
|
||||
|
@ -303,7 +321,7 @@ LockedFlake lockFlake(
|
|||
|
||||
if (lockFlags.applyNixConfig) {
|
||||
flake.config.apply();
|
||||
// FIXME: send new config to the daemon.
|
||||
state.store->setOptions();
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -325,23 +343,14 @@ LockedFlake lockFlake(
|
|||
|
||||
std::vector<FlakeRef> parents;
|
||||
|
||||
struct LockParent {
|
||||
/* The path to this parent. */
|
||||
InputPath path;
|
||||
|
||||
/* Whether we are currently inside a top-level lockfile
|
||||
(inputs absolute) or subordinate lockfile (inputs
|
||||
relative). */
|
||||
bool absolute;
|
||||
};
|
||||
|
||||
std::function<void(
|
||||
const FlakeInputs & flakeInputs,
|
||||
std::shared_ptr<Node> node,
|
||||
const InputPath & inputPathPrefix,
|
||||
std::shared_ptr<const Node> oldNode,
|
||||
const LockParent & parent,
|
||||
const Path & parentPath)>
|
||||
const InputPath & lockRootPath,
|
||||
const Path & parentPath,
|
||||
bool trustLock)>
|
||||
computeLocks;
|
||||
|
||||
computeLocks = [&](
|
||||
|
@ -349,8 +358,9 @@ LockedFlake lockFlake(
|
|||
std::shared_ptr<Node> node,
|
||||
const InputPath & inputPathPrefix,
|
||||
std::shared_ptr<const Node> oldNode,
|
||||
const LockParent & parent,
|
||||
const Path & parentPath)
|
||||
const InputPath & lockRootPath,
|
||||
const Path & parentPath,
|
||||
bool trustLock)
|
||||
{
|
||||
debug("computing lock file node '%s'", printInputPath(inputPathPrefix));
|
||||
|
||||
|
@ -393,17 +403,7 @@ LockedFlake lockFlake(
|
|||
if (input.follows) {
|
||||
InputPath target;
|
||||
|
||||
if (parent.absolute && !hasOverride) {
|
||||
target = *input.follows;
|
||||
} else {
|
||||
if (hasOverride) {
|
||||
target = inputPathPrefix;
|
||||
target.pop_back();
|
||||
} else
|
||||
target = parent.path;
|
||||
|
||||
for (auto & i : *input.follows) target.push_back(i);
|
||||
}
|
||||
target.insert(target.end(), input.follows->begin(), input.follows->end());
|
||||
|
||||
debug("input '%s' follows '%s'", inputPathS, printInputPath(target));
|
||||
node->inputs.insert_or_assign(id, target);
|
||||
|
@ -442,22 +442,18 @@ LockedFlake lockFlake(
|
|||
update it. */
|
||||
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
|
||||
|
||||
auto hasChildUpdate =
|
||||
auto mustRefetch =
|
||||
lb != lockFlags.inputUpdates.end()
|
||||
&& lb->size() > inputPath.size()
|
||||
&& std::equal(inputPath.begin(), inputPath.end(), lb->begin());
|
||||
|
||||
if (hasChildUpdate) {
|
||||
auto inputFlake = getFlake(
|
||||
state, oldLock->lockedRef, false, flakeCache);
|
||||
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, parent, parentPath);
|
||||
} else {
|
||||
FlakeInputs fakeInputs;
|
||||
|
||||
if (!mustRefetch) {
|
||||
/* No need to fetch this flake, we can be
|
||||
lazy. However there may be new overrides on the
|
||||
inputs of this flake, so we need to check
|
||||
those. */
|
||||
FlakeInputs fakeInputs;
|
||||
|
||||
for (auto & i : oldLock->inputs) {
|
||||
if (auto lockedNode = std::get_if<0>(&i.second)) {
|
||||
fakeInputs.emplace(i.first, FlakeInput {
|
||||
|
@ -465,21 +461,47 @@ LockedFlake lockFlake(
|
|||
.isFlake = (*lockedNode)->isFlake,
|
||||
});
|
||||
} else if (auto follows = std::get_if<1>(&i.second)) {
|
||||
if (! trustLock) {
|
||||
// It is possible that the flake has changed,
|
||||
// so we must confirm all the follows that are in the lockfile are also in the flake.
|
||||
auto overridePath(inputPath);
|
||||
overridePath.push_back(i.first);
|
||||
auto o = overrides.find(overridePath);
|
||||
// If the override disappeared, we have to refetch the flake,
|
||||
// since some of the inputs may not be present in the lockfile.
|
||||
if (o == overrides.end()) {
|
||||
mustRefetch = true;
|
||||
// There's no point populating the rest of the fake inputs,
|
||||
// since we'll refetch the flake anyways.
|
||||
break;
|
||||
}
|
||||
}
|
||||
auto absoluteFollows(lockRootPath);
|
||||
absoluteFollows.insert(absoluteFollows.end(), follows->begin(), follows->end());
|
||||
fakeInputs.emplace(i.first, FlakeInput {
|
||||
.follows = *follows,
|
||||
.follows = absoluteFollows,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
computeLocks(fakeInputs, childNode, inputPath, oldLock, parent, parentPath);
|
||||
}
|
||||
|
||||
auto localPath(parentPath);
|
||||
// If this input is a path, recurse it down.
|
||||
// This allows us to resolve path inputs relative to the current flake.
|
||||
if ((*input.ref).input.getType() == "path")
|
||||
localPath = absPath(*input.ref->input.getSourcePath(), parentPath);
|
||||
computeLocks(
|
||||
mustRefetch
|
||||
? getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath).inputs
|
||||
: fakeInputs,
|
||||
childNode, inputPath, oldLock, lockRootPath, parentPath, !mustRefetch);
|
||||
|
||||
} else {
|
||||
/* We need to create a new lock file entry. So fetch
|
||||
this input. */
|
||||
debug("creating new input '%s'", inputPathS);
|
||||
|
||||
if (!lockFlags.allowMutable && !input.ref->input.isImmutable())
|
||||
if (!lockFlags.allowMutable && !input.ref->input.isLocked())
|
||||
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
|
||||
|
||||
if (input.isFlake) {
|
||||
|
@ -488,12 +510,10 @@ LockedFlake lockFlake(
|
|||
|
||||
// If this input is a path, recurse it down.
|
||||
// This allows us to resolve path inputs relative to the current flake.
|
||||
if (localRef.input.getType() == "path") {
|
||||
localRef.input.parent = parentPath;
|
||||
localPath = canonPath(parentPath + "/" + *input.ref->input.getSourcePath());
|
||||
}
|
||||
if (localRef.input.getType() == "path")
|
||||
localPath = absPath(*input.ref->input.getSourcePath(), parentPath);
|
||||
|
||||
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache);
|
||||
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache, inputPath);
|
||||
|
||||
/* Note: in case of an --override-input, we use
|
||||
the *original* ref (input2.ref) for the
|
||||
|
@ -514,13 +534,6 @@ LockedFlake lockFlake(
|
|||
parents.push_back(*input.ref);
|
||||
Finally cleanup([&]() { parents.pop_back(); });
|
||||
|
||||
// Follows paths from existing inputs in the top-level lockfile are absolute,
|
||||
// whereas paths in subordinate lockfiles are relative to those lockfiles.
|
||||
LockParent newParent {
|
||||
.path = inputPath,
|
||||
.absolute = oldLock ? true : false
|
||||
};
|
||||
|
||||
/* Recursively process the inputs of this
|
||||
flake. Also, unless we already have this flake
|
||||
in the top-level lock file, use this flake's
|
||||
|
@ -531,7 +544,7 @@ LockedFlake lockFlake(
|
|||
? std::dynamic_pointer_cast<const Node>(oldLock)
|
||||
: LockFile::read(
|
||||
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root,
|
||||
newParent, localPath);
|
||||
oldLock ? lockRootPath : inputPath, localPath, false);
|
||||
}
|
||||
|
||||
else {
|
||||
|
@ -549,17 +562,12 @@ LockedFlake lockFlake(
|
|||
}
|
||||
};
|
||||
|
||||
LockParent parent {
|
||||
.path = {},
|
||||
.absolute = true
|
||||
};
|
||||
|
||||
// Bring in the current ref for relative path resolution if we have it
|
||||
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir);
|
||||
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir, true);
|
||||
|
||||
computeLocks(
|
||||
flake.inputs, newLockFile.root, {},
|
||||
lockFlags.recreateLockFile ? nullptr : oldLockFile.root, parent, parentPath);
|
||||
lockFlags.recreateLockFile ? nullptr : oldLockFile.root, {}, parentPath, false);
|
||||
|
||||
for (auto & i : lockFlags.inputOverrides)
|
||||
if (!overridesUsed.count(i.first))
|
||||
|
@ -606,12 +614,24 @@ LockedFlake lockFlake(
|
|||
|
||||
newLockFile.write(path);
|
||||
|
||||
std::optional<std::string> commitMessage = std::nullopt;
|
||||
if (lockFlags.commitLockFile) {
|
||||
std::string cm;
|
||||
|
||||
cm = settings.commitLockFileSummary.get();
|
||||
|
||||
if (cm == "") {
|
||||
cm = fmt("%s: %s", relPath, lockFileExists ? "Update" : "Add");
|
||||
}
|
||||
|
||||
cm += "\n\nFlake lock file updates:\n\n";
|
||||
cm += filterANSIEscapes(diff, true);
|
||||
commitMessage = cm;
|
||||
}
|
||||
|
||||
topRef.input.markChangedFile(
|
||||
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
|
||||
lockFlags.commitLockFile
|
||||
? std::optional<std::string>(fmt("%s: %s\n\nFlake lock file changes:\n\n%s",
|
||||
relPath, lockFileExists ? "Update" : "Add", filterANSIEscapes(diff, true)))
|
||||
: std::nullopt);
|
||||
commitMessage);
|
||||
|
||||
/* Rewriting the lockfile changed the top-level
|
||||
repo, so we should re-read it. FIXME: we could
|
||||
|
@ -630,13 +650,15 @@ LockedFlake lockFlake(
|
|||
now. Corner case: we could have reverted from a
|
||||
dirty to a clean tree! */
|
||||
if (flake.lockedRef.input == prevLockedRef.input
|
||||
&& !flake.lockedRef.input.isImmutable())
|
||||
&& !flake.lockedRef.input.isLocked())
|
||||
throw Error("'%s' did not change after I updated its 'flake.lock' file; is 'flake.lock' under version control?", flake.originalRef);
|
||||
}
|
||||
} else
|
||||
throw Error("cannot write modified lock file of flake '%s' (use '--no-write-lock-file' to ignore)", topRef);
|
||||
} else
|
||||
} else {
|
||||
warn("not writing modified lock file of flake '%s':\n%s", topRef, chomp(diff));
|
||||
flake.forceDirty = true;
|
||||
}
|
||||
}
|
||||
|
||||
return LockedFlake { .flake = std::move(flake), .lockFile = std::move(newLockFile) };
|
||||
|
@ -657,11 +679,17 @@ void callFlake(EvalState & state,
|
|||
auto vTmp1 = state.allocValue();
|
||||
auto vTmp2 = state.allocValue();
|
||||
|
||||
mkString(*vLocks, lockedFlake.lockFile.to_string());
|
||||
vLocks->mkString(lockedFlake.lockFile.to_string());
|
||||
|
||||
emitTreeAttrs(state, *lockedFlake.flake.sourceInfo, lockedFlake.flake.lockedRef.input, *vRootSrc);
|
||||
emitTreeAttrs(
|
||||
state,
|
||||
*lockedFlake.flake.sourceInfo,
|
||||
lockedFlake.flake.lockedRef.input,
|
||||
*vRootSrc,
|
||||
false,
|
||||
lockedFlake.flake.forceDirty);
|
||||
|
||||
mkString(*vRootSubdir, lockedFlake.flake.lockedRef.subdir);
|
||||
vRootSubdir->mkString(lockedFlake.flake.lockedRef.subdir);
|
||||
|
||||
if (!state.vCallFlake) {
|
||||
state.vCallFlake = allocRootValue(state.allocValue());
|
||||
|
@ -677,10 +705,12 @@ void callFlake(EvalState & state,
|
|||
|
||||
static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
auto flakeRefS = state.forceStringNoCtx(*args[0], pos);
|
||||
state.requireExperimentalFeatureOnEvaluation(Xp::Flakes, "builtins.getFlake", pos);
|
||||
|
||||
std::string flakeRefS(state.forceStringNoCtx(*args[0], pos));
|
||||
auto flakeRef = parseFlakeRef(flakeRefS, {}, true);
|
||||
if (evalSettings.pureEval && !flakeRef.input.isImmutable())
|
||||
throw Error("cannot call 'getFlake' on mutable flake reference '%s', at %s (use --impure to override)", flakeRefS, pos);
|
||||
if (evalSettings.pureEval && !flakeRef.input.isLocked())
|
||||
throw Error("cannot call 'getFlake' on unlocked flake reference '%s', at %s (use --impure to override)", flakeRefS, pos);
|
||||
|
||||
callFlake(state,
|
||||
lockFlake(state, flakeRef,
|
||||
|
@ -692,7 +722,7 @@ static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Va
|
|||
v);
|
||||
}
|
||||
|
||||
static RegisterPrimOp r2("__getFlake", 1, prim_getFlake, "flakes");
|
||||
static RegisterPrimOp r2("__getFlake", 1, prim_getFlake);
|
||||
|
||||
}
|
||||
|
||||
|
@ -702,8 +732,9 @@ Fingerprint LockedFlake::getFingerprint() const
|
|||
// and we haven't changed it, then it's sufficient to use
|
||||
// flake.sourceInfo.storePath for the fingerprint.
|
||||
return hashString(htSHA256,
|
||||
fmt("%s;%d;%d;%s",
|
||||
fmt("%s;%s;%d;%d;%s",
|
||||
flake.sourceInfo->storePath.to_string(),
|
||||
flake.lockedRef.subdir,
|
||||
flake.lockedRef.input.getRevCount().value_or(0),
|
||||
flake.lockedRef.input.getLastModified().value_or(0),
|
||||
lockFile));
|
||||
|
|
|
@ -58,9 +58,10 @@ struct ConfigFile
|
|||
/* The contents of a flake.nix file. */
|
||||
struct Flake
|
||||
{
|
||||
FlakeRef originalRef; // the original flake specification (by the user)
|
||||
FlakeRef resolvedRef; // registry references and caching resolved to the specific underlying flake
|
||||
FlakeRef lockedRef; // the specific local store result of invoking the fetcher
|
||||
FlakeRef originalRef; // the original flake specification (by the user)
|
||||
FlakeRef resolvedRef; // registry references and caching resolved to the specific underlying flake
|
||||
FlakeRef lockedRef; // the specific local store result of invoking the fetcher
|
||||
bool forceDirty = false; // pretend that 'lockedRef' is dirty
|
||||
std::optional<std::string> description;
|
||||
std::shared_ptr<const fetchers::Tree> sourceInfo;
|
||||
FlakeInputs inputs;
|
||||
|
@ -140,6 +141,8 @@ void emitTreeAttrs(
|
|||
EvalState & state,
|
||||
const fetchers::Tree & tree,
|
||||
const fetchers::Input & input,
|
||||
Value & v, bool emptyRevFallback = false);
|
||||
Value & v,
|
||||
bool emptyRevFallback = false,
|
||||
bool forceDirty = false);
|
||||
|
||||
}
|
||||
|
|
|
@ -48,9 +48,12 @@ FlakeRef FlakeRef::resolve(ref<Store> store) const
|
|||
}
|
||||
|
||||
FlakeRef parseFlakeRef(
|
||||
const std::string & url, const std::optional<Path> & baseDir, bool allowMissing)
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir,
|
||||
bool allowMissing,
|
||||
bool isFlake)
|
||||
{
|
||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(url, baseDir, allowMissing);
|
||||
auto [flakeRef, fragment] = parseFlakeRefWithFragment(url, baseDir, allowMissing, isFlake);
|
||||
if (fragment != "")
|
||||
throw Error("unexpected fragment '%s' in flake reference '%s'", fragment, url);
|
||||
return flakeRef;
|
||||
|
@ -67,7 +70,10 @@ std::optional<FlakeRef> maybeParseFlakeRef(
|
|||
}
|
||||
|
||||
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||
const std::string & url, const std::optional<Path> & baseDir, bool allowMissing)
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir,
|
||||
bool allowMissing,
|
||||
bool isFlake)
|
||||
{
|
||||
using namespace fetchers;
|
||||
|
||||
|
@ -112,46 +118,71 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
|||
to 'baseDir'). If so, search upward to the root of the
|
||||
repo (i.e. the directory containing .git). */
|
||||
|
||||
path = absPath(path, baseDir, true);
|
||||
path = absPath(path, baseDir);
|
||||
|
||||
if (!S_ISDIR(lstat(path).st_mode))
|
||||
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
|
||||
if (isFlake) {
|
||||
|
||||
if (!allowMissing && !pathExists(path + "/flake.nix"))
|
||||
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
|
||||
if (!allowMissing && !pathExists(path + "/flake.nix")){
|
||||
notice("path '%s' does not contain a 'flake.nix', searching up",path);
|
||||
|
||||
auto flakeRoot = path;
|
||||
std::string subdir;
|
||||
|
||||
while (flakeRoot != "/") {
|
||||
if (pathExists(flakeRoot + "/.git")) {
|
||||
auto base = std::string("git+file://") + flakeRoot;
|
||||
|
||||
auto parsedURL = ParsedURL{
|
||||
.url = base, // FIXME
|
||||
.base = base,
|
||||
.scheme = "git+file",
|
||||
.authority = "",
|
||||
.path = flakeRoot,
|
||||
.query = decodeQuery(match[2]),
|
||||
};
|
||||
|
||||
if (subdir != "") {
|
||||
if (parsedURL.query.count("dir"))
|
||||
throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
|
||||
parsedURL.query.insert_or_assign("dir", subdir);
|
||||
// Save device to detect filesystem boundary
|
||||
dev_t device = lstat(path).st_dev;
|
||||
bool found = false;
|
||||
while (path != "/") {
|
||||
if (pathExists(path + "/flake.nix")) {
|
||||
found = true;
|
||||
break;
|
||||
} else if (pathExists(path + "/.git"))
|
||||
throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path);
|
||||
else {
|
||||
if (lstat(path).st_dev != device)
|
||||
throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path);
|
||||
}
|
||||
path = dirOf(path);
|
||||
}
|
||||
|
||||
if (pathExists(flakeRoot + "/.git/shallow"))
|
||||
parsedURL.query.insert_or_assign("shallow", "1");
|
||||
|
||||
return std::make_pair(
|
||||
FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
|
||||
fragment);
|
||||
if (!found)
|
||||
throw BadURL("could not find a flake.nix file");
|
||||
}
|
||||
|
||||
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
|
||||
flakeRoot = dirOf(flakeRoot);
|
||||
if (!S_ISDIR(lstat(path).st_mode))
|
||||
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
|
||||
|
||||
if (!allowMissing && !pathExists(path + "/flake.nix"))
|
||||
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
|
||||
|
||||
auto flakeRoot = path;
|
||||
std::string subdir;
|
||||
|
||||
while (flakeRoot != "/") {
|
||||
if (pathExists(flakeRoot + "/.git")) {
|
||||
auto base = std::string("git+file://") + flakeRoot;
|
||||
|
||||
auto parsedURL = ParsedURL{
|
||||
.url = base, // FIXME
|
||||
.base = base,
|
||||
.scheme = "git+file",
|
||||
.authority = "",
|
||||
.path = flakeRoot,
|
||||
.query = decodeQuery(match[2]),
|
||||
};
|
||||
|
||||
if (subdir != "") {
|
||||
if (parsedURL.query.count("dir"))
|
||||
throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
|
||||
parsedURL.query.insert_or_assign("dir", subdir);
|
||||
}
|
||||
|
||||
if (pathExists(flakeRoot + "/.git/shallow"))
|
||||
parsedURL.query.insert_or_assign("shallow", "1");
|
||||
|
||||
return std::make_pair(
|
||||
FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
|
||||
fragment);
|
||||
}
|
||||
|
||||
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
|
||||
flakeRoot = dirOf(flakeRoot);
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -172,8 +203,12 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
|||
auto parsedURL = parseURL(url);
|
||||
std::string fragment;
|
||||
std::swap(fragment, parsedURL.fragment);
|
||||
|
||||
auto input = Input::fromURL(parsedURL);
|
||||
input.parent = baseDir;
|
||||
|
||||
return std::make_pair(
|
||||
FlakeRef(Input::fromURL(parsedURL), get(parsedURL.query, "dir").value_or("")),
|
||||
FlakeRef(std::move(input), get(parsedURL.query, "dir").value_or("")),
|
||||
fragment);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -62,13 +62,19 @@ struct FlakeRef
|
|||
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef);
|
||||
|
||||
FlakeRef parseFlakeRef(
|
||||
const std::string & url, const std::optional<Path> & baseDir = {}, bool allowMissing = false);
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir = {},
|
||||
bool allowMissing = false,
|
||||
bool isFlake = true);
|
||||
|
||||
std::optional<FlakeRef> maybeParseFlake(
|
||||
const std::string & url, const std::optional<Path> & baseDir = {});
|
||||
|
||||
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
|
||||
const std::string & url, const std::optional<Path> & baseDir = {}, bool allowMissing = false);
|
||||
const std::string & url,
|
||||
const std::optional<Path> & baseDir = {},
|
||||
bool allowMissing = false,
|
||||
bool isFlake = true);
|
||||
|
||||
std::optional<std::pair<FlakeRef, std::string>> maybeParseFlakeRefWithFragment(
|
||||
const std::string & url, const std::optional<Path> & baseDir = {});
|
||||
|
|
|
@ -35,7 +35,7 @@ LockedNode::LockedNode(const nlohmann::json & json)
|
|||
, originalRef(getFlakeRef(json, "original", nullptr))
|
||||
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
|
||||
{
|
||||
if (!lockedRef.input.isImmutable())
|
||||
if (!lockedRef.input.isLocked())
|
||||
throw Error("lockfile contains mutable lock '%s'",
|
||||
fetchers::attrsToJSON(lockedRef.input.toAttrs()));
|
||||
}
|
||||
|
@ -220,7 +220,7 @@ bool LockFile::isImmutable() const
|
|||
for (auto & i : nodes) {
|
||||
if (i == root) continue;
|
||||
auto lockedNode = std::dynamic_pointer_cast<const LockedNode>(i);
|
||||
if (lockedNode && !lockedNode->lockedRef.input.isImmutable()) return false;
|
||||
if (lockedNode && !lockedNode->lockedRef.input.isLocked()) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
|
|
|
@ -11,8 +11,8 @@
|
|||
namespace nix {
|
||||
|
||||
|
||||
DrvInfo::DrvInfo(EvalState & state, const string & attrPath, Bindings * attrs)
|
||||
: state(&state), attrs(attrs), attrPath(attrPath)
|
||||
DrvInfo::DrvInfo(EvalState & state, std::string attrPath, Bindings * attrs)
|
||||
: state(&state), attrs(attrs), attrPath(std::move(attrPath))
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -47,7 +47,7 @@ DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPat
|
|||
}
|
||||
|
||||
|
||||
string DrvInfo::queryName() const
|
||||
std::string DrvInfo::queryName() const
|
||||
{
|
||||
if (name == "" && attrs) {
|
||||
auto i = attrs->find(state->sName);
|
||||
|
@ -58,7 +58,7 @@ string DrvInfo::queryName() const
|
|||
}
|
||||
|
||||
|
||||
string DrvInfo::querySystem() const
|
||||
std::string DrvInfo::querySystem() const
|
||||
{
|
||||
if (system == "" && attrs) {
|
||||
auto i = attrs->find(state->sSystem);
|
||||
|
@ -68,7 +68,7 @@ string DrvInfo::querySystem() const
|
|||
}
|
||||
|
||||
|
||||
string DrvInfo::queryDrvPath() const
|
||||
std::string DrvInfo::queryDrvPath() const
|
||||
{
|
||||
if (drvPath == "" && attrs) {
|
||||
Bindings::iterator i = attrs->find(state->sDrvPath);
|
||||
|
@ -79,7 +79,7 @@ string DrvInfo::queryDrvPath() const
|
|||
}
|
||||
|
||||
|
||||
string DrvInfo::queryOutPath() const
|
||||
std::string DrvInfo::queryOutPath() const
|
||||
{
|
||||
if (!outPath && attrs) {
|
||||
Bindings::iterator i = attrs->find(state->sOutPath);
|
||||
|
@ -102,12 +102,12 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool onlyOutputsToInstall)
|
|||
state->forceList(*i->value, *i->pos);
|
||||
|
||||
/* For each output... */
|
||||
for (unsigned int j = 0; j < i->value->listSize(); ++j) {
|
||||
for (auto elem : i->value->listItems()) {
|
||||
/* Evaluate the corresponding set. */
|
||||
string name = state->forceStringNoCtx(*i->value->listElems()[j], *i->pos);
|
||||
std::string name(state->forceStringNoCtx(*elem, *i->pos));
|
||||
Bindings::iterator out = attrs->find(state->symbols.create(name));
|
||||
if (out == attrs->end()) continue; // FIXME: throw error?
|
||||
state->forceAttrs(*out->value);
|
||||
state->forceAttrs(*out->value, *i->pos);
|
||||
|
||||
/* And evaluate its ‘outPath’ attribute. */
|
||||
Bindings::iterator outPath = out->value->attrs->find(state->sOutPath);
|
||||
|
@ -128,9 +128,9 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool onlyOutputsToInstall)
|
|||
/* ^ this shows during `nix-env -i` right under the bad derivation */
|
||||
if (!outTI->isList()) throw errMsg;
|
||||
Outputs result;
|
||||
for (auto i = outTI->listElems(); i != outTI->listElems() + outTI->listSize(); ++i) {
|
||||
if ((*i)->type() != nString) throw errMsg;
|
||||
auto out = outputs.find((*i)->string.s);
|
||||
for (auto elem : outTI->listItems()) {
|
||||
if (elem->type() != nString) throw errMsg;
|
||||
auto out = outputs.find(elem->string.s);
|
||||
if (out == outputs.end()) throw errMsg;
|
||||
result.insert(*out);
|
||||
}
|
||||
|
@ -138,7 +138,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool onlyOutputsToInstall)
|
|||
}
|
||||
|
||||
|
||||
string DrvInfo::queryOutputName() const
|
||||
std::string DrvInfo::queryOutputName() const
|
||||
{
|
||||
if (outputName == "" && attrs) {
|
||||
Bindings::iterator i = attrs->find(state->sOutputName);
|
||||
|
@ -172,10 +172,10 @@ StringSet DrvInfo::queryMetaNames()
|
|||
|
||||
bool DrvInfo::checkMeta(Value & v)
|
||||
{
|
||||
state->forceValue(v);
|
||||
state->forceValue(v, [&]() { return v.determinePos(noPos); });
|
||||
if (v.type() == nList) {
|
||||
for (unsigned int n = 0; n < v.listSize(); ++n)
|
||||
if (!checkMeta(*v.listElems()[n])) return false;
|
||||
for (auto elem : v.listItems())
|
||||
if (!checkMeta(*elem)) return false;
|
||||
return true;
|
||||
}
|
||||
else if (v.type() == nAttrs) {
|
||||
|
@ -190,7 +190,7 @@ bool DrvInfo::checkMeta(Value & v)
|
|||
}
|
||||
|
||||
|
||||
Value * DrvInfo::queryMeta(const string & name)
|
||||
Value * DrvInfo::queryMeta(const std::string & name)
|
||||
{
|
||||
if (!getMeta()) return 0;
|
||||
Bindings::iterator a = meta->find(state->symbols.create(name));
|
||||
|
@ -199,7 +199,7 @@ Value * DrvInfo::queryMeta(const string & name)
|
|||
}
|
||||
|
||||
|
||||
string DrvInfo::queryMetaString(const string & name)
|
||||
std::string DrvInfo::queryMetaString(const std::string & name)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v || v->type() != nString) return "";
|
||||
|
@ -207,7 +207,7 @@ string DrvInfo::queryMetaString(const string & name)
|
|||
}
|
||||
|
||||
|
||||
NixInt DrvInfo::queryMetaInt(const string & name, NixInt def)
|
||||
NixInt DrvInfo::queryMetaInt(const std::string & name, NixInt def)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v) return def;
|
||||
|
@ -221,7 +221,7 @@ NixInt DrvInfo::queryMetaInt(const string & name, NixInt def)
|
|||
return def;
|
||||
}
|
||||
|
||||
NixFloat DrvInfo::queryMetaFloat(const string & name, NixFloat def)
|
||||
NixFloat DrvInfo::queryMetaFloat(const std::string & name, NixFloat def)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v) return def;
|
||||
|
@ -236,7 +236,7 @@ NixFloat DrvInfo::queryMetaFloat(const string & name, NixFloat def)
|
|||
}
|
||||
|
||||
|
||||
bool DrvInfo::queryMetaBool(const string & name, bool def)
|
||||
bool DrvInfo::queryMetaBool(const std::string & name, bool def)
|
||||
{
|
||||
Value * v = queryMeta(name);
|
||||
if (!v) return def;
|
||||
|
@ -251,23 +251,22 @@ bool DrvInfo::queryMetaBool(const string & name, bool def)
|
|||
}
|
||||
|
||||
|
||||
void DrvInfo::setMeta(const string & name, Value * v)
|
||||
void DrvInfo::setMeta(const std::string & name, Value * v)
|
||||
{
|
||||
getMeta();
|
||||
Bindings * old = meta;
|
||||
meta = state->allocBindings(1 + (old ? old->size() : 0));
|
||||
auto attrs = state->buildBindings(1 + (meta ? meta->size() : 0));
|
||||
Symbol sym = state->symbols.create(name);
|
||||
if (old)
|
||||
for (auto i : *old)
|
||||
if (meta)
|
||||
for (auto i : *meta)
|
||||
if (i.name != sym)
|
||||
meta->push_back(i);
|
||||
if (v) meta->push_back(Attr(sym, v));
|
||||
meta->sort();
|
||||
attrs.insert(i);
|
||||
if (v) attrs.insert(sym, v);
|
||||
meta = attrs.finish();
|
||||
}
|
||||
|
||||
|
||||
/* Cache for already considered attrsets. */
|
||||
typedef set<Bindings *> Done;
|
||||
typedef std::set<Bindings *> Done;
|
||||
|
||||
|
||||
/* Evaluate value `v'. If it evaluates to a set of type `derivation',
|
||||
|
@ -275,11 +274,11 @@ typedef set<Bindings *> Done;
|
|||
The result boolean indicates whether it makes sense
|
||||
for the caller to recursively search for derivations in `v'. */
|
||||
static bool getDerivation(EvalState & state, Value & v,
|
||||
const string & attrPath, DrvInfos & drvs, Done & done,
|
||||
const std::string & attrPath, DrvInfos & drvs, Done & done,
|
||||
bool ignoreAssertionFailures)
|
||||
{
|
||||
try {
|
||||
state.forceValue(v);
|
||||
state.forceValue(v, [&]() { return v.determinePos(noPos); });
|
||||
if (!state.isDerivation(v)) return true;
|
||||
|
||||
/* Remove spurious duplicates (e.g., a set like `rec { x =
|
||||
|
@ -312,7 +311,7 @@ std::optional<DrvInfo> getDerivation(EvalState & state, Value & v,
|
|||
}
|
||||
|
||||
|
||||
static string addToPath(const string & s1, const string & s2)
|
||||
static std::string addToPath(const std::string & s1, const std::string & s2)
|
||||
{
|
||||
return s1.empty() ? s2 : s1 + "." + s2;
|
||||
}
|
||||
|
@ -322,7 +321,7 @@ static std::regex attrRegex("[A-Za-z_][A-Za-z0-9-_+]*");
|
|||
|
||||
|
||||
static void getDerivations(EvalState & state, Value & vIn,
|
||||
const string & pathPrefix, Bindings & autoArgs,
|
||||
const std::string & pathPrefix, Bindings & autoArgs,
|
||||
DrvInfos & drvs, Done & done,
|
||||
bool ignoreAssertionFailures)
|
||||
{
|
||||
|
@ -347,7 +346,7 @@ static void getDerivations(EvalState & state, Value & vIn,
|
|||
debug("evaluating attribute '%1%'", i->name);
|
||||
if (!std::regex_match(std::string(i->name), attrRegex))
|
||||
continue;
|
||||
string pathPrefix2 = addToPath(pathPrefix, i->name);
|
||||
std::string pathPrefix2 = addToPath(pathPrefix, i->name);
|
||||
if (combineChannels)
|
||||
getDerivations(state, *i->value, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
|
||||
else if (getDerivation(state, *i->value, pathPrefix2, drvs, done, ignoreAssertionFailures)) {
|
||||
|
@ -364,10 +363,10 @@ static void getDerivations(EvalState & state, Value & vIn,
|
|||
}
|
||||
|
||||
else if (v.type() == nList) {
|
||||
for (unsigned int n = 0; n < v.listSize(); ++n) {
|
||||
string pathPrefix2 = addToPath(pathPrefix, (format("%1%") % n).str());
|
||||
if (getDerivation(state, *v.listElems()[n], pathPrefix2, drvs, done, ignoreAssertionFailures))
|
||||
getDerivations(state, *v.listElems()[n], pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
|
||||
for (auto [n, elem] : enumerate(v.listItems())) {
|
||||
std::string pathPrefix2 = addToPath(pathPrefix, fmt("%d", n));
|
||||
if (getDerivation(state, *elem, pathPrefix2, drvs, done, ignoreAssertionFailures))
|
||||
getDerivations(state, *elem, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -375,7 +374,7 @@ static void getDerivations(EvalState & state, Value & vIn,
|
|||
}
|
||||
|
||||
|
||||
void getDerivations(EvalState & state, Value & v, const string & pathPrefix,
|
||||
void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix,
|
||||
Bindings & autoArgs, DrvInfos & drvs, bool ignoreAssertionFailures)
|
||||
{
|
||||
Done done;
|
||||
|
|
|
@ -12,16 +12,16 @@ namespace nix {
|
|||
struct DrvInfo
|
||||
{
|
||||
public:
|
||||
typedef std::map<string, Path> Outputs;
|
||||
typedef std::map<std::string, Path> Outputs;
|
||||
|
||||
private:
|
||||
EvalState * state;
|
||||
|
||||
mutable string name;
|
||||
mutable string system;
|
||||
mutable string drvPath;
|
||||
mutable std::optional<string> outPath;
|
||||
mutable string outputName;
|
||||
mutable std::string name;
|
||||
mutable std::string system;
|
||||
mutable std::string drvPath;
|
||||
mutable std::optional<std::string> outPath;
|
||||
mutable std::string outputName;
|
||||
Outputs outputs;
|
||||
|
||||
bool failed = false; // set if we get an AssertionError
|
||||
|
@ -33,36 +33,36 @@ private:
|
|||
bool checkMeta(Value & v);
|
||||
|
||||
public:
|
||||
string attrPath; /* path towards the derivation */
|
||||
std::string attrPath; /* path towards the derivation */
|
||||
|
||||
DrvInfo(EvalState & state) : state(&state) { };
|
||||
DrvInfo(EvalState & state, const string & attrPath, Bindings * attrs);
|
||||
DrvInfo(EvalState & state, std::string attrPath, Bindings * attrs);
|
||||
DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPathWithOutputs);
|
||||
|
||||
string queryName() const;
|
||||
string querySystem() const;
|
||||
string queryDrvPath() const;
|
||||
string queryOutPath() const;
|
||||
string queryOutputName() const;
|
||||
std::string queryName() const;
|
||||
std::string querySystem() const;
|
||||
std::string queryDrvPath() const;
|
||||
std::string queryOutPath() const;
|
||||
std::string queryOutputName() const;
|
||||
/** Return the list of outputs. The "outputs to install" are determined by `meta.outputsToInstall`. */
|
||||
Outputs queryOutputs(bool onlyOutputsToInstall = false);
|
||||
|
||||
StringSet queryMetaNames();
|
||||
Value * queryMeta(const string & name);
|
||||
string queryMetaString(const string & name);
|
||||
NixInt queryMetaInt(const string & name, NixInt def);
|
||||
NixFloat queryMetaFloat(const string & name, NixFloat def);
|
||||
bool queryMetaBool(const string & name, bool def);
|
||||
void setMeta(const string & name, Value * v);
|
||||
Value * queryMeta(const std::string & name);
|
||||
std::string queryMetaString(const std::string & name);
|
||||
NixInt queryMetaInt(const std::string & name, NixInt def);
|
||||
NixFloat queryMetaFloat(const std::string & name, NixFloat def);
|
||||
bool queryMetaBool(const std::string & name, bool def);
|
||||
void setMeta(const std::string & name, Value * v);
|
||||
|
||||
/*
|
||||
MetaInfo queryMetaInfo(EvalState & state) const;
|
||||
MetaValue queryMetaInfo(EvalState & state, const string & name) const;
|
||||
*/
|
||||
|
||||
void setName(const string & s) { name = s; }
|
||||
void setDrvPath(const string & s) { drvPath = s; }
|
||||
void setOutPath(const string & s) { outPath = s; }
|
||||
void setName(const std::string & s) { name = s; }
|
||||
void setDrvPath(const std::string & s) { drvPath = s; }
|
||||
void setOutPath(const std::string & s) { outPath = s; }
|
||||
|
||||
void setFailed() { failed = true; };
|
||||
bool hasFailed() { return failed; };
|
||||
|
@ -70,9 +70,9 @@ public:
|
|||
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
typedef list<DrvInfo, traceable_allocator<DrvInfo> > DrvInfos;
|
||||
typedef std::list<DrvInfo, traceable_allocator<DrvInfo> > DrvInfos;
|
||||
#else
|
||||
typedef list<DrvInfo> DrvInfos;
|
||||
typedef std::list<DrvInfo> DrvInfos;
|
||||
#endif
|
||||
|
||||
|
||||
|
@ -81,7 +81,7 @@ typedef list<DrvInfo> DrvInfos;
|
|||
std::optional<DrvInfo> getDerivation(EvalState & state,
|
||||
Value & v, bool ignoreAssertionFailures);
|
||||
|
||||
void getDerivations(EvalState & state, Value & v, const string & pathPrefix,
|
||||
void getDerivations(EvalState & state, Value & v, const std::string & pathPrefix,
|
||||
Bindings & autoArgs, DrvInfos & drvs,
|
||||
bool ignoreAssertionFailures);
|
||||
|
||||
|
|
|
@ -37,10 +37,10 @@ class JSONSax : nlohmann::json_sax<json> {
|
|||
ValueMap attrs;
|
||||
std::unique_ptr<JSONState> resolve(EvalState & state) override
|
||||
{
|
||||
Value & v = parent->value(state);
|
||||
state.mkAttrs(v, attrs.size());
|
||||
auto attrs2 = state.buildBindings(attrs.size());
|
||||
for (auto & i : attrs)
|
||||
v.attrs->push_back(Attr(i.first, i.second));
|
||||
attrs2.insert(i.first, i.second);
|
||||
parent->value(state).mkAttrs(attrs2.alreadySorted());
|
||||
return std::move(parent);
|
||||
}
|
||||
void add() override { v = nullptr; }
|
||||
|
@ -76,45 +76,51 @@ class JSONSax : nlohmann::json_sax<json> {
|
|||
EvalState & state;
|
||||
std::unique_ptr<JSONState> rs;
|
||||
|
||||
template<typename T, typename... Args> inline bool handle_value(T f, Args... args)
|
||||
{
|
||||
f(rs->value(state), args...);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
public:
|
||||
JSONSax(EvalState & state, Value & v) : state(state), rs(new JSONState(&v)) {};
|
||||
|
||||
bool null()
|
||||
{
|
||||
return handle_value(mkNull);
|
||||
rs->value(state).mkNull();
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool boolean(bool val)
|
||||
{
|
||||
return handle_value(mkBool, val);
|
||||
rs->value(state).mkBool(val);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool number_integer(number_integer_t val)
|
||||
{
|
||||
return handle_value(mkInt, val);
|
||||
rs->value(state).mkInt(val);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool number_unsigned(number_unsigned_t val)
|
||||
{
|
||||
return handle_value(mkInt, val);
|
||||
rs->value(state).mkInt(val);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool number_float(number_float_t val, const string_t & s)
|
||||
{
|
||||
return handle_value(mkFloat, val);
|
||||
rs->value(state).mkFloat(val);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool string(string_t & val)
|
||||
{
|
||||
return handle_value<void(Value&, const char*)>(mkString, val.c_str());
|
||||
rs->value(state).mkString(val);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
#if NLOHMANN_JSON_VERSION_MAJOR >= 3 && NLOHMANN_JSON_VERSION_MINOR >= 8
|
||||
bool binary(binary_t&)
|
||||
{
|
||||
|
@ -157,7 +163,7 @@ public:
|
|||
}
|
||||
};
|
||||
|
||||
void parseJSON(EvalState & state, const string & s_, Value & v)
|
||||
void parseJSON(EvalState & state, const std::string_view & s_, Value & v)
|
||||
{
|
||||
JSONSax parser(state, v);
|
||||
bool res = json::sax_parse(s_, &parser);
|
||||
|
|
|
@ -8,6 +8,6 @@ namespace nix {
|
|||
|
||||
MakeError(JSONParseError, EvalError);
|
||||
|
||||
void parseJSON(EvalState & state, const string & s, Value & v);
|
||||
void parseJSON(EvalState & state, const std::string_view & s, Value & v);
|
||||
|
||||
}
|
||||
|
|
|
@ -28,6 +28,8 @@ using namespace nix;
|
|||
|
||||
namespace nix {
|
||||
|
||||
// backup to recover from yyless(0)
|
||||
YYLTYPE prev_yylloc;
|
||||
|
||||
static void initLoc(YYLTYPE * loc)
|
||||
{
|
||||
|
@ -38,6 +40,8 @@ static void initLoc(YYLTYPE * loc)
|
|||
|
||||
static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
|
||||
{
|
||||
prev_yylloc = *loc;
|
||||
|
||||
loc->first_line = loc->last_line;
|
||||
loc->first_column = loc->last_column;
|
||||
|
||||
|
@ -60,28 +64,32 @@ static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
|
|||
}
|
||||
|
||||
|
||||
static Expr * unescapeStr(SymbolTable & symbols, const char * s, size_t length)
|
||||
// we make use of the fact that the parser receives a private copy of the input
|
||||
// string and can munge around in it.
|
||||
static StringToken unescapeStr(SymbolTable & symbols, char * s, size_t length)
|
||||
{
|
||||
string t;
|
||||
t.reserve(length);
|
||||
char * result = s;
|
||||
char * t = s;
|
||||
char c;
|
||||
// the input string is terminated with *two* NULs, so we can safely take
|
||||
// *one* character after the one being checked against.
|
||||
while ((c = *s++)) {
|
||||
if (c == '\\') {
|
||||
assert(*s);
|
||||
c = *s++;
|
||||
if (c == 'n') t += '\n';
|
||||
else if (c == 'r') t += '\r';
|
||||
else if (c == 't') t += '\t';
|
||||
else t += c;
|
||||
if (c == 'n') *t = '\n';
|
||||
else if (c == 'r') *t = '\r';
|
||||
else if (c == 't') *t = '\t';
|
||||
else *t = c;
|
||||
}
|
||||
else if (c == '\r') {
|
||||
/* Normalise CR and CR/LF into LF. */
|
||||
t += '\n';
|
||||
*t = '\n';
|
||||
if (*s == '\n') s++; /* cr/lf */
|
||||
}
|
||||
else t += c;
|
||||
else *t = c;
|
||||
t++;
|
||||
}
|
||||
return new ExprString(symbols.create(t));
|
||||
return {result, size_t(t - result)};
|
||||
}
|
||||
|
||||
|
||||
|
@ -134,7 +142,7 @@ or { return OR_KW; }
|
|||
\/\/ { return UPDATE; }
|
||||
\+\+ { return CONCAT; }
|
||||
|
||||
{ID} { yylval->id = strdup(yytext); return ID; }
|
||||
{ID} { yylval->id = {yytext, (size_t) yyleng}; return ID; }
|
||||
{INT} { errno = 0;
|
||||
try {
|
||||
yylval->n = boost::lexical_cast<int64_t>(yytext);
|
||||
|
@ -168,7 +176,7 @@ or { return OR_KW; }
|
|||
/* It is impossible to match strings ending with '$' with one
|
||||
regex because trailing contexts are only valid at the end
|
||||
of a rule. (A sane but undocumented limitation.) */
|
||||
yylval->e = unescapeStr(data->symbols, yytext, yyleng);
|
||||
yylval->str = unescapeStr(data->symbols, yytext, yyleng);
|
||||
return STR;
|
||||
}
|
||||
<STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
||||
|
@ -183,26 +191,26 @@ or { return OR_KW; }
|
|||
|
||||
\'\'(\ *\n)? { PUSH_STATE(IND_STRING); return IND_STRING_OPEN; }
|
||||
<IND_STRING>([^\$\']|\$[^\{\']|\'[^\'\$])+ {
|
||||
yylval->e = new ExprIndStr(yytext);
|
||||
yylval->str = {yytext, (size_t) yyleng, true};
|
||||
return IND_STR;
|
||||
}
|
||||
<IND_STRING>\'\'\$ |
|
||||
<IND_STRING>\$ {
|
||||
yylval->e = new ExprIndStr("$");
|
||||
yylval->str = {"$", 1};
|
||||
return IND_STR;
|
||||
}
|
||||
<IND_STRING>\'\'\' {
|
||||
yylval->e = new ExprIndStr("''");
|
||||
yylval->str = {"''", 2};
|
||||
return IND_STR;
|
||||
}
|
||||
<IND_STRING>\'\'\\{ANY} {
|
||||
yylval->e = unescapeStr(data->symbols, yytext + 2, yyleng - 2);
|
||||
yylval->str = unescapeStr(data->symbols, yytext + 2, yyleng - 2);
|
||||
return IND_STR;
|
||||
}
|
||||
<IND_STRING>\$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; }
|
||||
<IND_STRING>\'\' { POP_STATE(); return IND_STRING_CLOSE; }
|
||||
<IND_STRING>\' {
|
||||
yylval->e = new ExprIndStr("'");
|
||||
yylval->str = {"'", 1};
|
||||
return IND_STR;
|
||||
}
|
||||
|
||||
|
@ -210,19 +218,20 @@ or { return OR_KW; }
|
|||
{HPATH_START}\$\{ {
|
||||
PUSH_STATE(PATH_START);
|
||||
yyless(0);
|
||||
*yylloc = prev_yylloc;
|
||||
}
|
||||
|
||||
<PATH_START>{PATH_SEG} {
|
||||
POP_STATE();
|
||||
PUSH_STATE(INPATH_SLASH);
|
||||
yylval->path = strdup(yytext);
|
||||
yylval->path = {yytext, (size_t) yyleng};
|
||||
return PATH;
|
||||
}
|
||||
|
||||
<PATH_START>{HPATH_START} {
|
||||
POP_STATE();
|
||||
PUSH_STATE(INPATH_SLASH);
|
||||
yylval->path = strdup(yytext);
|
||||
yylval->path = {yytext, (size_t) yyleng};
|
||||
return HPATH;
|
||||
}
|
||||
|
||||
|
@ -231,7 +240,7 @@ or { return OR_KW; }
|
|||
PUSH_STATE(INPATH_SLASH);
|
||||
else
|
||||
PUSH_STATE(INPATH);
|
||||
yylval->path = strdup(yytext);
|
||||
yylval->path = {yytext, (size_t) yyleng};
|
||||
return PATH;
|
||||
}
|
||||
{HPATH} {
|
||||
|
@ -239,7 +248,7 @@ or { return OR_KW; }
|
|||
PUSH_STATE(INPATH_SLASH);
|
||||
else
|
||||
PUSH_STATE(INPATH);
|
||||
yylval->path = strdup(yytext);
|
||||
yylval->path = {yytext, (size_t) yyleng};
|
||||
return HPATH;
|
||||
}
|
||||
|
||||
|
@ -255,7 +264,7 @@ or { return OR_KW; }
|
|||
PUSH_STATE(INPATH_SLASH);
|
||||
else
|
||||
PUSH_STATE(INPATH);
|
||||
yylval->e = new ExprString(data->symbols.create(string(yytext)));
|
||||
yylval->str = {yytext, (size_t) yyleng};
|
||||
return STR;
|
||||
}
|
||||
<INPATH>{ANY} |
|
||||
|
@ -265,6 +274,7 @@ or { return OR_KW; }
|
|||
context (it may be ')', ';', or something of that sort) */
|
||||
POP_STATE();
|
||||
yyless(0);
|
||||
*yylloc = prev_yylloc;
|
||||
return PATH_END;
|
||||
}
|
||||
|
||||
|
@ -273,8 +283,8 @@ or { return OR_KW; }
|
|||
throw ParseError("path has a trailing slash");
|
||||
}
|
||||
|
||||
{SPATH} { yylval->path = strdup(yytext); return SPATH; }
|
||||
{URI} { yylval->uri = strdup(yytext); return URI; }
|
||||
{SPATH} { yylval->path = {yytext, (size_t) yyleng}; return SPATH; }
|
||||
{URI} { yylval->uri = {yytext, (size_t) yyleng}; return URI; }
|
||||
|
||||
[ \t\r\n]+ /* eat up whitespace */
|
||||
\#[^\r\n]* /* single-line comments */
|
||||
|
|
|
@ -16,10 +16,10 @@ std::ostream & operator << (std::ostream & str, const Expr & e)
|
|||
return str;
|
||||
}
|
||||
|
||||
static void showString(std::ostream & str, const string & s)
|
||||
static void showString(std::ostream & str, std::string_view s)
|
||||
{
|
||||
str << '"';
|
||||
for (auto c : (string) s)
|
||||
for (auto c : s)
|
||||
if (c == '"' || c == '\\' || c == '$') str << "\\" << c;
|
||||
else if (c == '\n') str << "\\n";
|
||||
else if (c == '\r') str << "\\r";
|
||||
|
@ -28,7 +28,7 @@ static void showString(std::ostream & str, const string & s)
|
|||
str << '"';
|
||||
}
|
||||
|
||||
static void showId(std::ostream & str, const string & s)
|
||||
static void showId(std::ostream & str, std::string_view s)
|
||||
{
|
||||
if (s.empty())
|
||||
str << "\"\"";
|
||||
|
@ -103,11 +103,18 @@ void ExprAttrs::show(std::ostream & str) const
|
|||
{
|
||||
if (recursive) str << "rec ";
|
||||
str << "{ ";
|
||||
for (auto & i : attrs)
|
||||
if (i.second.inherited)
|
||||
str << "inherit " << i.first << " " << "; ";
|
||||
typedef const decltype(attrs)::value_type * Attr;
|
||||
std::vector<Attr> sorted;
|
||||
for (auto & i : attrs) sorted.push_back(&i);
|
||||
std::sort(sorted.begin(), sorted.end(), [](Attr a, Attr b) {
|
||||
return (const std::string &) a->first < (const std::string &) b->first;
|
||||
});
|
||||
for (auto & i : sorted) {
|
||||
if (i->second.inherited)
|
||||
str << "inherit " << i->first << " " << "; ";
|
||||
else
|
||||
str << i.first << " = " << *i.second.e << "; ";
|
||||
str << i->first << " = " << *i->second.e << "; ";
|
||||
}
|
||||
for (auto & i : dynamicAttrs)
|
||||
str << "\"${" << *i.nameExpr << "}\" = " << *i.valueExpr << "; ";
|
||||
str << "}";
|
||||
|
@ -124,7 +131,7 @@ void ExprList::show(std::ostream & str) const
|
|||
void ExprLambda::show(std::ostream & str) const
|
||||
{
|
||||
str << "(";
|
||||
if (matchAttrs) {
|
||||
if (hasFormals()) {
|
||||
str << "{ ";
|
||||
bool first = true;
|
||||
for (auto & i : formals->formals) {
|
||||
|
@ -143,6 +150,16 @@ void ExprLambda::show(std::ostream & str) const
|
|||
str << ": " << *body << ")";
|
||||
}
|
||||
|
||||
void ExprCall::show(std::ostream & str) const
|
||||
{
|
||||
str << '(' << *fun;
|
||||
for (auto e : args) {
|
||||
str << ' ';
|
||||
str << *e;
|
||||
}
|
||||
str << ')';
|
||||
}
|
||||
|
||||
void ExprLet::show(std::ostream & str) const
|
||||
{
|
||||
str << "(let ";
|
||||
|
@ -181,7 +198,7 @@ void ExprConcatStrings::show(std::ostream & str) const
|
|||
str << "(";
|
||||
for (auto & i : *es) {
|
||||
if (first) first = false; else str << " + ";
|
||||
str << *i;
|
||||
str << *i.second;
|
||||
}
|
||||
str << ")";
|
||||
}
|
||||
|
@ -201,7 +218,7 @@ std::ostream & operator << (std::ostream & str, const Pos & pos)
|
|||
auto f = format(ANSI_BOLD "%1%" ANSI_NORMAL ":%2%:%3%");
|
||||
switch (pos.origin) {
|
||||
case foFile:
|
||||
f % (string) pos.file;
|
||||
f % (const std::string &) pos.file;
|
||||
break;
|
||||
case foStdin:
|
||||
case foString:
|
||||
|
@ -217,7 +234,7 @@ std::ostream & operator << (std::ostream & str, const Pos & pos)
|
|||
}
|
||||
|
||||
|
||||
string showAttrPath(const AttrPath & attrPath)
|
||||
std::string showAttrPath(const AttrPath & attrPath)
|
||||
{
|
||||
std::ostringstream out;
|
||||
bool first = true;
|
||||
|
@ -263,13 +280,13 @@ void ExprVar::bindVars(const StaticEnv & env)
|
|||
/* Check whether the variable appears in the environment. If so,
|
||||
set its level and displacement. */
|
||||
const StaticEnv * curEnv;
|
||||
unsigned int level;
|
||||
Level level;
|
||||
int withLevel = -1;
|
||||
for (curEnv = &env, level = 0; curEnv; curEnv = curEnv->up, level++) {
|
||||
if (curEnv->isWith) {
|
||||
if (withLevel == -1) withLevel = level;
|
||||
} else {
|
||||
StaticEnv::Vars::const_iterator i = curEnv->vars.find(name);
|
||||
auto i = curEnv->find(name);
|
||||
if (i != curEnv->vars.end()) {
|
||||
fromWith = false;
|
||||
this->level = level;
|
||||
|
@ -311,14 +328,16 @@ void ExprOpHasAttr::bindVars(const StaticEnv & env)
|
|||
void ExprAttrs::bindVars(const StaticEnv & env)
|
||||
{
|
||||
const StaticEnv * dynamicEnv = &env;
|
||||
StaticEnv newEnv(false, &env);
|
||||
StaticEnv newEnv(false, &env, recursive ? attrs.size() : 0);
|
||||
|
||||
if (recursive) {
|
||||
dynamicEnv = &newEnv;
|
||||
|
||||
unsigned int displ = 0;
|
||||
Displacement displ = 0;
|
||||
for (auto & i : attrs)
|
||||
newEnv.vars[i.first] = i.second.displ = displ++;
|
||||
newEnv.vars.emplace_back(i.first, i.second.displ = displ++);
|
||||
|
||||
// No need to sort newEnv since attrs is in sorted order.
|
||||
|
||||
for (auto & i : attrs)
|
||||
i.second.e->bindVars(i.second.inherited ? env : newEnv);
|
||||
|
@ -342,15 +361,20 @@ void ExprList::bindVars(const StaticEnv & env)
|
|||
|
||||
void ExprLambda::bindVars(const StaticEnv & env)
|
||||
{
|
||||
StaticEnv newEnv(false, &env);
|
||||
StaticEnv newEnv(
|
||||
false, &env,
|
||||
(hasFormals() ? formals->formals.size() : 0) +
|
||||
(arg.empty() ? 0 : 1));
|
||||
|
||||
unsigned int displ = 0;
|
||||
Displacement displ = 0;
|
||||
|
||||
if (!arg.empty()) newEnv.vars[arg] = displ++;
|
||||
if (!arg.empty()) newEnv.vars.emplace_back(arg, displ++);
|
||||
|
||||
if (matchAttrs) {
|
||||
if (hasFormals()) {
|
||||
for (auto & i : formals->formals)
|
||||
newEnv.vars[i.name] = displ++;
|
||||
newEnv.vars.emplace_back(i.name, displ++);
|
||||
|
||||
newEnv.sort();
|
||||
|
||||
for (auto & i : formals->formals)
|
||||
if (i.def) i.def->bindVars(newEnv);
|
||||
|
@ -359,13 +383,22 @@ void ExprLambda::bindVars(const StaticEnv & env)
|
|||
body->bindVars(newEnv);
|
||||
}
|
||||
|
||||
void ExprCall::bindVars(const StaticEnv & env)
|
||||
{
|
||||
fun->bindVars(env);
|
||||
for (auto e : args)
|
||||
e->bindVars(env);
|
||||
}
|
||||
|
||||
void ExprLet::bindVars(const StaticEnv & env)
|
||||
{
|
||||
StaticEnv newEnv(false, &env);
|
||||
StaticEnv newEnv(false, &env, attrs->attrs.size());
|
||||
|
||||
unsigned int displ = 0;
|
||||
Displacement displ = 0;
|
||||
for (auto & i : attrs->attrs)
|
||||
newEnv.vars[i.first] = i.second.displ = displ++;
|
||||
newEnv.vars.emplace_back(i.first, i.second.displ = displ++);
|
||||
|
||||
// No need to sort newEnv since attrs->attrs is in sorted order.
|
||||
|
||||
for (auto & i : attrs->attrs)
|
||||
i.second.e->bindVars(i.second.inherited ? env : newEnv);
|
||||
|
@ -379,7 +412,7 @@ void ExprWith::bindVars(const StaticEnv & env)
|
|||
level so that `lookupVar' can look up variables in the previous
|
||||
`with' if this one doesn't contain the desired attribute. */
|
||||
const StaticEnv * curEnv;
|
||||
unsigned int level;
|
||||
Level level;
|
||||
prevWith = 0;
|
||||
for (curEnv = &env, level = 1; curEnv; curEnv = curEnv->up, level++)
|
||||
if (curEnv->isWith) {
|
||||
|
@ -413,7 +446,7 @@ void ExprOpNot::bindVars(const StaticEnv & env)
|
|||
void ExprConcatStrings::bindVars(const StaticEnv & env)
|
||||
{
|
||||
for (auto & i : *es)
|
||||
i->bindVars(env);
|
||||
i.second->bindVars(env);
|
||||
}
|
||||
|
||||
void ExprPos::bindVars(const StaticEnv & env)
|
||||
|
@ -435,9 +468,9 @@ void ExprLambda::setName(Symbol & name)
|
|||
}
|
||||
|
||||
|
||||
string ExprLambda::showNamePos() const
|
||||
std::string ExprLambda::showNamePos() const
|
||||
{
|
||||
return (format("%1% at %2%") % (name.set() ? "'" + (string) name + "'" : "anonymous function") % pos).str();
|
||||
return fmt("%1% at %2%", name.set() ? "'" + (std::string) name + "'" : "anonymous function", pos);
|
||||
}
|
||||
|
||||
|
||||
|
@ -447,10 +480,9 @@ string ExprLambda::showNamePos() const
|
|||
size_t SymbolTable::totalSize() const
|
||||
{
|
||||
size_t n = 0;
|
||||
for (auto & i : symbols)
|
||||
for (auto & i : store)
|
||||
n += i.size();
|
||||
return n;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -4,8 +4,6 @@
|
|||
#include "symbol-table.hh"
|
||||
#include "error.hh"
|
||||
|
||||
#include <map>
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -28,18 +26,21 @@ struct Pos
|
|||
FileOrigin origin;
|
||||
Symbol file;
|
||||
unsigned int line, column;
|
||||
Pos() : origin(foString), line(0), column(0) { };
|
||||
|
||||
Pos() : origin(foString), line(0), column(0) { }
|
||||
Pos(FileOrigin origin, const Symbol & file, unsigned int line, unsigned int column)
|
||||
: origin(origin), file(file), line(line), column(column) { };
|
||||
: origin(origin), file(file), line(line), column(column) { }
|
||||
|
||||
operator bool() const
|
||||
{
|
||||
return line != 0;
|
||||
}
|
||||
|
||||
bool operator < (const Pos & p2) const
|
||||
{
|
||||
if (!line) return p2.line;
|
||||
if (!p2.line) return false;
|
||||
int d = ((string) file).compare((string) p2.file);
|
||||
int d = ((const std::string &) file).compare((const std::string &) p2.file);
|
||||
if (d < 0) return true;
|
||||
if (d > 0) return false;
|
||||
if (line < p2.line) return true;
|
||||
|
@ -70,7 +71,7 @@ struct AttrName
|
|||
|
||||
typedef std::vector<AttrName> AttrPath;
|
||||
|
||||
string showAttrPath(const AttrPath & attrPath);
|
||||
std::string showAttrPath(const AttrPath & attrPath);
|
||||
|
||||
|
||||
/* Abstract syntax of Nix expressions. */
|
||||
|
@ -96,7 +97,7 @@ struct ExprInt : Expr
|
|||
{
|
||||
NixInt n;
|
||||
Value v;
|
||||
ExprInt(NixInt n) : n(n) { mkInt(v, n); };
|
||||
ExprInt(NixInt n) : n(n) { v.mkInt(n); };
|
||||
COMMON_METHODS
|
||||
Value * maybeThunk(EvalState & state, Env & env);
|
||||
};
|
||||
|
@ -105,36 +106,32 @@ struct ExprFloat : Expr
|
|||
{
|
||||
NixFloat nf;
|
||||
Value v;
|
||||
ExprFloat(NixFloat nf) : nf(nf) { mkFloat(v, nf); };
|
||||
ExprFloat(NixFloat nf) : nf(nf) { v.mkFloat(nf); };
|
||||
COMMON_METHODS
|
||||
Value * maybeThunk(EvalState & state, Env & env);
|
||||
};
|
||||
|
||||
struct ExprString : Expr
|
||||
{
|
||||
Symbol s;
|
||||
std::string s;
|
||||
Value v;
|
||||
ExprString(const Symbol & s) : s(s) { mkString(v, s); };
|
||||
ExprString(std::string s) : s(std::move(s)) { v.mkString(this->s.data()); };
|
||||
COMMON_METHODS
|
||||
Value * maybeThunk(EvalState & state, Env & env);
|
||||
};
|
||||
|
||||
/* Temporary class used during parsing of indented strings. */
|
||||
struct ExprIndStr : Expr
|
||||
{
|
||||
string s;
|
||||
ExprIndStr(const string & s) : s(s) { };
|
||||
};
|
||||
|
||||
struct ExprPath : Expr
|
||||
{
|
||||
string s;
|
||||
std::string s;
|
||||
Value v;
|
||||
ExprPath(const string & s) : s(s) { v.mkPath(this->s.c_str()); };
|
||||
ExprPath(std::string s) : s(std::move(s)) { v.mkPath(this->s.c_str()); };
|
||||
COMMON_METHODS
|
||||
Value * maybeThunk(EvalState & state, Env & env);
|
||||
};
|
||||
|
||||
typedef uint32_t Level;
|
||||
typedef uint32_t Displacement;
|
||||
|
||||
struct ExprVar : Expr
|
||||
{
|
||||
Pos pos;
|
||||
|
@ -150,8 +147,8 @@ struct ExprVar : Expr
|
|||
value is obtained by getting the attribute named `name' from
|
||||
the set stored in the environment that is `level' levels up
|
||||
from the current one.*/
|
||||
unsigned int level;
|
||||
unsigned int displ;
|
||||
Level level;
|
||||
Displacement displ;
|
||||
|
||||
ExprVar(const Symbol & name) : name(name) { };
|
||||
ExprVar(const Pos & pos, const Symbol & name) : pos(pos), name(name) { };
|
||||
|
@ -185,7 +182,7 @@ struct ExprAttrs : Expr
|
|||
bool inherited;
|
||||
Expr * e;
|
||||
Pos pos;
|
||||
unsigned int displ; // displacement
|
||||
Displacement displ; // displacement
|
||||
AttrDef(Expr * e, const Pos & pos, bool inherited=false)
|
||||
: inherited(inherited), e(e), pos(pos) { };
|
||||
AttrDef() { };
|
||||
|
@ -222,10 +219,25 @@ struct Formal
|
|||
|
||||
struct Formals
|
||||
{
|
||||
typedef std::list<Formal> Formals_;
|
||||
typedef std::vector<Formal> Formals_;
|
||||
Formals_ formals;
|
||||
std::set<Symbol> argNames; // used during parsing
|
||||
bool ellipsis;
|
||||
|
||||
bool has(Symbol arg) const {
|
||||
auto it = std::lower_bound(formals.begin(), formals.end(), arg,
|
||||
[] (const Formal & f, const Symbol & sym) { return f.name < sym; });
|
||||
return it != formals.end() && it->name == arg;
|
||||
}
|
||||
|
||||
std::vector<Formal> lexicographicOrder() const
|
||||
{
|
||||
std::vector<Formal> result(formals.begin(), formals.end());
|
||||
std::sort(result.begin(), result.end(),
|
||||
[] (const Formal & a, const Formal & b) {
|
||||
return std::string_view(a.name) < std::string_view(b.name);
|
||||
});
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
struct ExprLambda : Expr
|
||||
|
@ -233,20 +245,26 @@ struct ExprLambda : Expr
|
|||
Pos pos;
|
||||
Symbol name;
|
||||
Symbol arg;
|
||||
bool matchAttrs;
|
||||
Formals * formals;
|
||||
Expr * body;
|
||||
ExprLambda(const Pos & pos, const Symbol & arg, bool matchAttrs, Formals * formals, Expr * body)
|
||||
: pos(pos), arg(arg), matchAttrs(matchAttrs), formals(formals), body(body)
|
||||
ExprLambda(const Pos & pos, const Symbol & arg, Formals * formals, Expr * body)
|
||||
: pos(pos), arg(arg), formals(formals), body(body)
|
||||
{
|
||||
if (!arg.empty() && formals && formals->argNames.find(arg) != formals->argNames.end())
|
||||
throw ParseError({
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'", arg),
|
||||
.errPos = pos
|
||||
});
|
||||
};
|
||||
void setName(Symbol & name);
|
||||
string showNamePos() const;
|
||||
std::string showNamePos() const;
|
||||
inline bool hasFormals() const { return formals != nullptr; }
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
struct ExprCall : Expr
|
||||
{
|
||||
Expr * fun;
|
||||
std::vector<Expr *> args;
|
||||
Pos pos;
|
||||
ExprCall(const Pos & pos, Expr * fun, std::vector<Expr *> && args)
|
||||
: fun(fun), args(args), pos(pos)
|
||||
{ }
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
|
@ -308,7 +326,6 @@ struct ExprOpNot : Expr
|
|||
void eval(EvalState & state, Env & env, Value & v); \
|
||||
};
|
||||
|
||||
MakeBinOp(ExprApp, "")
|
||||
MakeBinOp(ExprOpEq, "==")
|
||||
MakeBinOp(ExprOpNEq, "!=")
|
||||
MakeBinOp(ExprOpAnd, "&&")
|
||||
|
@ -321,8 +338,8 @@ struct ExprConcatStrings : Expr
|
|||
{
|
||||
Pos pos;
|
||||
bool forceString;
|
||||
vector<Expr *> * es;
|
||||
ExprConcatStrings(const Pos & pos, bool forceString, vector<Expr *> * es)
|
||||
std::vector<std::pair<Pos, Expr *> > * es;
|
||||
ExprConcatStrings(const Pos & pos, bool forceString, std::vector<std::pair<Pos, Expr *> > * es)
|
||||
: pos(pos), forceString(forceString), es(es) { };
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
@ -342,9 +359,39 @@ struct StaticEnv
|
|||
{
|
||||
bool isWith;
|
||||
const StaticEnv * up;
|
||||
typedef std::map<Symbol, unsigned int> Vars;
|
||||
|
||||
// Note: these must be in sorted order.
|
||||
typedef std::vector<std::pair<Symbol, Displacement>> Vars;
|
||||
Vars vars;
|
||||
StaticEnv(bool isWith, const StaticEnv * up) : isWith(isWith), up(up) { };
|
||||
|
||||
StaticEnv(bool isWith, const StaticEnv * up, size_t expectedSize = 0) : isWith(isWith), up(up) {
|
||||
vars.reserve(expectedSize);
|
||||
};
|
||||
|
||||
void sort()
|
||||
{
|
||||
std::stable_sort(vars.begin(), vars.end(),
|
||||
[](const Vars::value_type & a, const Vars::value_type & b) { return a.first < b.first; });
|
||||
}
|
||||
|
||||
void deduplicate()
|
||||
{
|
||||
auto it = vars.begin(), jt = it, end = vars.end();
|
||||
while (jt != end) {
|
||||
*it = *jt++;
|
||||
while (jt != end && it->first == jt->first) *it = *jt++;
|
||||
it++;
|
||||
}
|
||||
vars.erase(it, end);
|
||||
}
|
||||
|
||||
Vars::const_iterator find(const Symbol & name) const
|
||||
{
|
||||
Vars::value_type key(name, 0);
|
||||
auto i = std::lower_bound(vars.begin(), vars.end(), key);
|
||||
if (i != vars.end() && i->first == name) return i;
|
||||
return vars.end();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
#ifndef BISON_HEADER
|
||||
#define BISON_HEADER
|
||||
|
||||
#include <variant>
|
||||
|
||||
#include "util.hh"
|
||||
|
||||
#include "nixexpr.hh"
|
||||
|
@ -33,16 +35,28 @@ namespace nix {
|
|||
Symbol file;
|
||||
FileOrigin origin;
|
||||
std::optional<ErrorInfo> error;
|
||||
Symbol sLetBody;
|
||||
ParseData(EvalState & state)
|
||||
: state(state)
|
||||
, symbols(state.symbols)
|
||||
, sLetBody(symbols.create("<let-body>"))
|
||||
{ };
|
||||
};
|
||||
|
||||
struct ParserFormals {
|
||||
std::vector<Formal> formals;
|
||||
bool ellipsis = false;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
// using C a struct allows us to avoid having to define the special
|
||||
// members that using string_view here would implicitly delete.
|
||||
struct StringToken {
|
||||
const char * p;
|
||||
size_t l;
|
||||
bool hasIndentation;
|
||||
operator std::string_view() const { return {p, l}; }
|
||||
};
|
||||
|
||||
#define YY_DECL int yylex \
|
||||
(YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParseData * data)
|
||||
|
||||
|
@ -126,14 +140,14 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath,
|
|||
auto j2 = jAttrs->attrs.find(ad.first);
|
||||
if (j2 != jAttrs->attrs.end()) // Attr already defined in iAttrs, error.
|
||||
dupAttr(ad.first, j2->second.pos, ad.second.pos);
|
||||
jAttrs->attrs[ad.first] = ad.second;
|
||||
jAttrs->attrs.emplace(ad.first, ad.second);
|
||||
}
|
||||
} else {
|
||||
dupAttr(attrPath, pos, j->second.pos);
|
||||
}
|
||||
} else {
|
||||
// This attr path is not defined. Let's create it.
|
||||
attrs->attrs[i->symbol] = ExprAttrs::AttrDef(e, pos);
|
||||
attrs->attrs.emplace(i->symbol, ExprAttrs::AttrDef(e, pos));
|
||||
e->setName(i->symbol);
|
||||
}
|
||||
} else {
|
||||
|
@ -142,21 +156,46 @@ static void addAttr(ExprAttrs * attrs, AttrPath & attrPath,
|
|||
}
|
||||
|
||||
|
||||
static void addFormal(const Pos & pos, Formals * formals, const Formal & formal)
|
||||
static Formals * toFormals(ParseData & data, ParserFormals * formals,
|
||||
Pos pos = noPos, Symbol arg = {})
|
||||
{
|
||||
if (!formals->argNames.insert(formal.name).second)
|
||||
std::sort(formals->formals.begin(), formals->formals.end(),
|
||||
[] (const auto & a, const auto & b) {
|
||||
return std::tie(a.name, a.pos) < std::tie(b.name, b.pos);
|
||||
});
|
||||
|
||||
std::optional<std::pair<Symbol, Pos>> duplicate;
|
||||
for (size_t i = 0; i + 1 < formals->formals.size(); i++) {
|
||||
if (formals->formals[i].name != formals->formals[i + 1].name)
|
||||
continue;
|
||||
std::pair thisDup{formals->formals[i].name, formals->formals[i + 1].pos};
|
||||
duplicate = std::min(thisDup, duplicate.value_or(thisDup));
|
||||
}
|
||||
if (duplicate)
|
||||
throw ParseError({
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'",
|
||||
formal.name),
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'", duplicate->first),
|
||||
.errPos = duplicate->second
|
||||
});
|
||||
|
||||
Formals result;
|
||||
result.ellipsis = formals->ellipsis;
|
||||
result.formals = std::move(formals->formals);
|
||||
|
||||
if (arg.set() && result.has(arg))
|
||||
throw ParseError({
|
||||
.msg = hintfmt("duplicate formal function argument '%1%'", arg),
|
||||
.errPos = pos
|
||||
});
|
||||
formals->formals.push_front(formal);
|
||||
|
||||
delete formals;
|
||||
return new Formals(std::move(result));
|
||||
}
|
||||
|
||||
|
||||
static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<Expr *> & es)
|
||||
static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols,
|
||||
std::vector<std::pair<Pos, std::variant<Expr *, StringToken> > > & es)
|
||||
{
|
||||
if (es.empty()) return new ExprString(symbols.create(""));
|
||||
if (es.empty()) return new ExprString("");
|
||||
|
||||
/* Figure out the minimum indentation. Note that by design
|
||||
whitespace-only final lines are not taken into account. (So
|
||||
|
@ -164,21 +203,21 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<Ex
|
|||
bool atStartOfLine = true; /* = seen only whitespace in the current line */
|
||||
size_t minIndent = 1000000;
|
||||
size_t curIndent = 0;
|
||||
for (auto & i : es) {
|
||||
ExprIndStr * e = dynamic_cast<ExprIndStr *>(i);
|
||||
if (!e) {
|
||||
/* Anti-quotations end the current start-of-line whitespace. */
|
||||
for (auto & [i_pos, i] : es) {
|
||||
auto * str = std::get_if<StringToken>(&i);
|
||||
if (!str || !str->hasIndentation) {
|
||||
/* Anti-quotations and escaped characters end the current start-of-line whitespace. */
|
||||
if (atStartOfLine) {
|
||||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
for (size_t j = 0; j < e->s.size(); ++j) {
|
||||
for (size_t j = 0; j < str->l; ++j) {
|
||||
if (atStartOfLine) {
|
||||
if (e->s[j] == ' ')
|
||||
if (str->p[j] == ' ')
|
||||
curIndent++;
|
||||
else if (e->s[j] == '\n') {
|
||||
else if (str->p[j] == '\n') {
|
||||
/* Empty line, doesn't influence minimum
|
||||
indentation. */
|
||||
curIndent = 0;
|
||||
|
@ -186,7 +225,7 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<Ex
|
|||
atStartOfLine = false;
|
||||
if (curIndent < minIndent) minIndent = curIndent;
|
||||
}
|
||||
} else if (e->s[j] == '\n') {
|
||||
} else if (str->p[j] == '\n') {
|
||||
atStartOfLine = true;
|
||||
curIndent = 0;
|
||||
}
|
||||
|
@ -194,53 +233,54 @@ static Expr * stripIndentation(const Pos & pos, SymbolTable & symbols, vector<Ex
|
|||
}
|
||||
|
||||
/* Strip spaces from each line. */
|
||||
vector<Expr *> * es2 = new vector<Expr *>;
|
||||
std::vector<std::pair<Pos, Expr *> > * es2 = new std::vector<std::pair<Pos, Expr *> >;
|
||||
atStartOfLine = true;
|
||||
size_t curDropped = 0;
|
||||
size_t n = es.size();
|
||||
for (vector<Expr *>::iterator i = es.begin(); i != es.end(); ++i, --n) {
|
||||
ExprIndStr * e = dynamic_cast<ExprIndStr *>(*i);
|
||||
if (!e) {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
es2->push_back(*i);
|
||||
continue;
|
||||
}
|
||||
|
||||
string s2;
|
||||
for (size_t j = 0; j < e->s.size(); ++j) {
|
||||
auto i = es.begin();
|
||||
const auto trimExpr = [&] (Expr * e) {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
es2->emplace_back(i->first, e);
|
||||
};
|
||||
const auto trimString = [&] (const StringToken & t) {
|
||||
std::string s2;
|
||||
for (size_t j = 0; j < t.l; ++j) {
|
||||
if (atStartOfLine) {
|
||||
if (e->s[j] == ' ') {
|
||||
if (t.p[j] == ' ') {
|
||||
if (curDropped++ >= minIndent)
|
||||
s2 += e->s[j];
|
||||
s2 += t.p[j];
|
||||
}
|
||||
else if (e->s[j] == '\n') {
|
||||
else if (t.p[j] == '\n') {
|
||||
curDropped = 0;
|
||||
s2 += e->s[j];
|
||||
s2 += t.p[j];
|
||||
} else {
|
||||
atStartOfLine = false;
|
||||
curDropped = 0;
|
||||
s2 += e->s[j];
|
||||
s2 += t.p[j];
|
||||
}
|
||||
} else {
|
||||
s2 += e->s[j];
|
||||
if (e->s[j] == '\n') atStartOfLine = true;
|
||||
s2 += t.p[j];
|
||||
if (t.p[j] == '\n') atStartOfLine = true;
|
||||
}
|
||||
}
|
||||
|
||||
/* Remove the last line if it is empty and consists only of
|
||||
spaces. */
|
||||
if (n == 1) {
|
||||
string::size_type p = s2.find_last_of('\n');
|
||||
if (p != string::npos && s2.find_first_not_of(' ', p + 1) == string::npos)
|
||||
s2 = string(s2, 0, p + 1);
|
||||
std::string::size_type p = s2.find_last_of('\n');
|
||||
if (p != std::string::npos && s2.find_first_not_of(' ', p + 1) == std::string::npos)
|
||||
s2 = std::string(s2, 0, p + 1);
|
||||
}
|
||||
|
||||
es2->push_back(new ExprString(symbols.create(s2)));
|
||||
es2->emplace_back(i->first, new ExprString(s2));
|
||||
};
|
||||
for (; i != es.end(); ++i, --n) {
|
||||
std::visit(overloaded { trimExpr, trimString }, i->second);
|
||||
}
|
||||
|
||||
/* If this is a single string, then don't do a concatenation. */
|
||||
return es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0]) ? (*es2)[0] : new ExprConcatStrings(pos, true, es2);
|
||||
return es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0].second) ? (*es2)[0].second : new ExprConcatStrings(pos, true, es2);
|
||||
}
|
||||
|
||||
|
||||
|
@ -271,29 +311,32 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
|
|||
nix::Expr * e;
|
||||
nix::ExprList * list;
|
||||
nix::ExprAttrs * attrs;
|
||||
nix::Formals * formals;
|
||||
nix::ParserFormals * formals;
|
||||
nix::Formal * formal;
|
||||
nix::NixInt n;
|
||||
nix::NixFloat nf;
|
||||
const char * id; // !!! -> Symbol
|
||||
char * path;
|
||||
char * uri;
|
||||
StringToken id; // !!! -> Symbol
|
||||
StringToken path;
|
||||
StringToken uri;
|
||||
StringToken str;
|
||||
std::vector<nix::AttrName> * attrNames;
|
||||
std::vector<nix::Expr *> * string_parts;
|
||||
std::vector<std::pair<nix::Pos, nix::Expr *> > * string_parts;
|
||||
std::vector<std::pair<nix::Pos, std::variant<nix::Expr *, StringToken> > > * ind_string_parts;
|
||||
}
|
||||
|
||||
%type <e> start expr expr_function expr_if expr_op
|
||||
%type <e> expr_app expr_select expr_simple
|
||||
%type <e> expr_select expr_simple expr_app
|
||||
%type <list> expr_list
|
||||
%type <attrs> binds
|
||||
%type <formals> formals
|
||||
%type <formal> formal
|
||||
%type <attrNames> attrs attrpath
|
||||
%type <string_parts> string_parts_interpolated ind_string_parts
|
||||
%type <string_parts> string_parts_interpolated
|
||||
%type <ind_string_parts> ind_string_parts
|
||||
%type <e> path_start string_parts string_attr
|
||||
%type <id> attr
|
||||
%token <id> ID ATTRPATH
|
||||
%token <e> STR IND_STR
|
||||
%token <str> STR IND_STR
|
||||
%token <n> INT
|
||||
%token <nf> FLOAT
|
||||
%token <path> PATH HPATH SPATH PATH_END
|
||||
|
@ -324,13 +367,19 @@ expr: expr_function;
|
|||
|
||||
expr_function
|
||||
: ID ':' expr_function
|
||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), false, 0, $3); }
|
||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), 0, $3); }
|
||||
| '{' formals '}' ':' expr_function
|
||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create(""), true, $2, $5); }
|
||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create(""), toFormals(*data, $2), $5); }
|
||||
| '{' formals '}' '@' ID ':' expr_function
|
||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($5), true, $2, $7); }
|
||||
{
|
||||
Symbol arg = data->symbols.create($5);
|
||||
$$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $2, CUR_POS, arg), $7);
|
||||
}
|
||||
| ID '@' '{' formals '}' ':' expr_function
|
||||
{ $$ = new ExprLambda(CUR_POS, data->symbols.create($1), true, $4, $7); }
|
||||
{
|
||||
Symbol arg = data->symbols.create($1);
|
||||
$$ = new ExprLambda(CUR_POS, arg, toFormals(*data, $4, CUR_POS, arg), $7);
|
||||
}
|
||||
| ASSERT expr ';' expr_function
|
||||
{ $$ = new ExprAssert(CUR_POS, $2, $4); }
|
||||
| WITH expr ';' expr_function
|
||||
|
@ -353,31 +402,36 @@ expr_if
|
|||
|
||||
expr_op
|
||||
: '!' expr_op %prec NOT { $$ = new ExprOpNot($2); }
|
||||
| '-' expr_op %prec NEGATE { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__sub")), new ExprInt(0)), $2); }
|
||||
| '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {new ExprInt(0), $2}); }
|
||||
| expr_op EQ expr_op { $$ = new ExprOpEq($1, $3); }
|
||||
| expr_op NEQ expr_op { $$ = new ExprOpNEq($1, $3); }
|
||||
| expr_op '<' expr_op { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__lessThan")), $1), $3); }
|
||||
| expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__lessThan")), $3), $1)); }
|
||||
| expr_op '>' expr_op { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__lessThan")), $3), $1); }
|
||||
| expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__lessThan")), $1), $3)); }
|
||||
| expr_op '<' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$1, $3}); }
|
||||
| expr_op LEQ expr_op { $$ = new ExprOpNot(new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$3, $1})); }
|
||||
| expr_op '>' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$3, $1}); }
|
||||
| expr_op GEQ expr_op { $$ = new ExprOpNot(new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__lessThan")), {$1, $3})); }
|
||||
| expr_op AND expr_op { $$ = new ExprOpAnd(CUR_POS, $1, $3); }
|
||||
| expr_op OR expr_op { $$ = new ExprOpOr(CUR_POS, $1, $3); }
|
||||
| expr_op IMPL expr_op { $$ = new ExprOpImpl(CUR_POS, $1, $3); }
|
||||
| expr_op UPDATE expr_op { $$ = new ExprOpUpdate(CUR_POS, $1, $3); }
|
||||
| expr_op '?' attrpath { $$ = new ExprOpHasAttr($1, *$3); }
|
||||
| expr_op '+' expr_op
|
||||
{ $$ = new ExprConcatStrings(CUR_POS, false, new vector<Expr *>({$1, $3})); }
|
||||
| expr_op '-' expr_op { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__sub")), $1), $3); }
|
||||
| expr_op '*' expr_op { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__mul")), $1), $3); }
|
||||
| expr_op '/' expr_op { $$ = new ExprApp(CUR_POS, new ExprApp(new ExprVar(data->symbols.create("__div")), $1), $3); }
|
||||
{ $$ = new ExprConcatStrings(CUR_POS, false, new std::vector<std::pair<Pos, Expr *> >({{makeCurPos(@1, data), $1}, {makeCurPos(@3, data), $3}})); }
|
||||
| expr_op '-' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__sub")), {$1, $3}); }
|
||||
| expr_op '*' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__mul")), {$1, $3}); }
|
||||
| expr_op '/' expr_op { $$ = new ExprCall(CUR_POS, new ExprVar(data->symbols.create("__div")), {$1, $3}); }
|
||||
| expr_op CONCAT expr_op { $$ = new ExprOpConcatLists(CUR_POS, $1, $3); }
|
||||
| expr_app
|
||||
;
|
||||
|
||||
expr_app
|
||||
: expr_app expr_select
|
||||
{ $$ = new ExprApp(CUR_POS, $1, $2); }
|
||||
| expr_select { $$ = $1; }
|
||||
: expr_app expr_select {
|
||||
if (auto e2 = dynamic_cast<ExprCall *>($1)) {
|
||||
e2->args.push_back($2);
|
||||
$$ = $1;
|
||||
} else
|
||||
$$ = new ExprCall(CUR_POS, $1, {$2});
|
||||
}
|
||||
| expr_select
|
||||
;
|
||||
|
||||
expr_select
|
||||
|
@ -388,13 +442,14 @@ expr_select
|
|||
| /* Backwards compatibility: because Nixpkgs has a rarely used
|
||||
function named ‘or’, allow stuff like ‘map or [...]’. */
|
||||
expr_simple OR_KW
|
||||
{ $$ = new ExprApp(CUR_POS, $1, new ExprVar(CUR_POS, data->symbols.create("or"))); }
|
||||
{ $$ = new ExprCall(CUR_POS, $1, {new ExprVar(CUR_POS, data->symbols.create("or"))}); }
|
||||
| expr_simple { $$ = $1; }
|
||||
;
|
||||
|
||||
expr_simple
|
||||
: ID {
|
||||
if (strcmp($1, "__curPos") == 0)
|
||||
std::string_view s = "__curPos";
|
||||
if ($1.l == s.size() && strncmp($1.p, s.data(), s.size()) == 0)
|
||||
$$ = new ExprPos(CUR_POS);
|
||||
else
|
||||
$$ = new ExprVar(CUR_POS, data->symbols.create($1));
|
||||
|
@ -407,24 +462,24 @@ expr_simple
|
|||
}
|
||||
| path_start PATH_END { $$ = $1; }
|
||||
| path_start string_parts_interpolated PATH_END {
|
||||
$2->insert($2->begin(), $1);
|
||||
$2->insert($2->begin(), {makeCurPos(@1, data), $1});
|
||||
$$ = new ExprConcatStrings(CUR_POS, false, $2);
|
||||
}
|
||||
| SPATH {
|
||||
string path($1 + 1, strlen($1) - 2);
|
||||
$$ = new ExprApp(CUR_POS,
|
||||
new ExprApp(new ExprVar(data->symbols.create("__findFile")),
|
||||
new ExprVar(data->symbols.create("__nixPath"))),
|
||||
new ExprString(data->symbols.create(path)));
|
||||
std::string path($1.p + 1, $1.l - 2);
|
||||
$$ = new ExprCall(CUR_POS,
|
||||
new ExprVar(data->symbols.create("__findFile")),
|
||||
{new ExprVar(data->symbols.create("__nixPath")),
|
||||
new ExprString(path)});
|
||||
}
|
||||
| URI {
|
||||
static bool noURLLiterals = settings.isExperimentalFeatureEnabled("no-url-literals");
|
||||
static bool noURLLiterals = settings.isExperimentalFeatureEnabled(Xp::NoUrlLiterals);
|
||||
if (noURLLiterals)
|
||||
throw ParseError({
|
||||
.msg = hintfmt("URL literals are disabled"),
|
||||
.errPos = CUR_POS
|
||||
});
|
||||
$$ = new ExprString(data->symbols.create($1));
|
||||
$$ = new ExprString(std::string($1));
|
||||
}
|
||||
| '(' expr ')' { $$ = $2; }
|
||||
/* Let expressions `let {..., body = ...}' are just desugared
|
||||
|
@ -439,40 +494,41 @@ expr_simple
|
|||
;
|
||||
|
||||
string_parts
|
||||
: STR
|
||||
: STR { $$ = new ExprString(std::string($1)); }
|
||||
| string_parts_interpolated { $$ = new ExprConcatStrings(CUR_POS, true, $1); }
|
||||
| { $$ = new ExprString(data->symbols.create("")); }
|
||||
| { $$ = new ExprString(""); }
|
||||
;
|
||||
|
||||
string_parts_interpolated
|
||||
: string_parts_interpolated STR { $$ = $1; $1->push_back($2); }
|
||||
| string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->push_back($3); }
|
||||
| DOLLAR_CURLY expr '}' { $$ = new vector<Expr *>; $$->push_back($2); }
|
||||
: string_parts_interpolated STR
|
||||
{ $$ = $1; $1->emplace_back(makeCurPos(@2, data), new ExprString(std::string($2))); }
|
||||
| string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
|
||||
| DOLLAR_CURLY expr '}' { $$ = new std::vector<std::pair<Pos, Expr *> >; $$->emplace_back(makeCurPos(@1, data), $2); }
|
||||
| STR DOLLAR_CURLY expr '}' {
|
||||
$$ = new vector<Expr *>;
|
||||
$$->push_back($1);
|
||||
$$->push_back($3);
|
||||
$$ = new std::vector<std::pair<Pos, Expr *> >;
|
||||
$$->emplace_back(makeCurPos(@1, data), new ExprString(std::string($1)));
|
||||
$$->emplace_back(makeCurPos(@2, data), $3);
|
||||
}
|
||||
;
|
||||
|
||||
path_start
|
||||
: PATH {
|
||||
Path path(absPath($1, data->basePath));
|
||||
Path path(absPath({$1.p, $1.l}, data->basePath));
|
||||
/* add back in the trailing '/' to the first segment */
|
||||
if ($1[strlen($1)-1] == '/' && strlen($1) > 1)
|
||||
if ($1.p[$1.l-1] == '/' && $1.l > 1)
|
||||
path += "/";
|
||||
$$ = new ExprPath(path);
|
||||
}
|
||||
| HPATH {
|
||||
Path path(getHome() + string($1 + 1));
|
||||
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
|
||||
$$ = new ExprPath(path);
|
||||
}
|
||||
;
|
||||
|
||||
ind_string_parts
|
||||
: ind_string_parts IND_STR { $$ = $1; $1->push_back($2); }
|
||||
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->push_back($3); }
|
||||
| { $$ = new vector<Expr *>; }
|
||||
: ind_string_parts IND_STR { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $2); }
|
||||
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(makeCurPos(@2, data), $3); }
|
||||
| { $$ = new std::vector<std::pair<Pos, std::variant<Expr *, StringToken> > >; }
|
||||
;
|
||||
|
||||
binds
|
||||
|
@ -483,7 +539,7 @@ binds
|
|||
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
||||
dupAttr(i.symbol, makeCurPos(@3, data), $$->attrs[i.symbol].pos);
|
||||
Pos pos = makeCurPos(@3, data);
|
||||
$$->attrs[i.symbol] = ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true);
|
||||
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprVar(CUR_POS, i.symbol), pos, true));
|
||||
}
|
||||
}
|
||||
| binds INHERIT '(' expr ')' attrs ';'
|
||||
|
@ -492,7 +548,7 @@ binds
|
|||
for (auto & i : *$6) {
|
||||
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
||||
dupAttr(i.symbol, makeCurPos(@6, data), $$->attrs[i.symbol].pos);
|
||||
$$->attrs[i.symbol] = ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), makeCurPos(@6, data));
|
||||
$$->attrs.emplace(i.symbol, ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), makeCurPos(@6, data)));
|
||||
}
|
||||
}
|
||||
| { $$ = new ExprAttrs(makeCurPos(@0, data)); }
|
||||
|
@ -504,7 +560,7 @@ attrs
|
|||
{ $$ = $1;
|
||||
ExprString * str = dynamic_cast<ExprString *>($2);
|
||||
if (str) {
|
||||
$$->push_back(AttrName(str->s));
|
||||
$$->push_back(AttrName(data->symbols.create(str->s)));
|
||||
delete str;
|
||||
} else
|
||||
throw ParseError({
|
||||
|
@ -521,17 +577,17 @@ attrpath
|
|||
{ $$ = $1;
|
||||
ExprString * str = dynamic_cast<ExprString *>($3);
|
||||
if (str) {
|
||||
$$->push_back(AttrName(str->s));
|
||||
$$->push_back(AttrName(data->symbols.create(str->s)));
|
||||
delete str;
|
||||
} else
|
||||
$$->push_back(AttrName($3));
|
||||
}
|
||||
| attr { $$ = new vector<AttrName>; $$->push_back(AttrName(data->symbols.create($1))); }
|
||||
| attr { $$ = new std::vector<AttrName>; $$->push_back(AttrName(data->symbols.create($1))); }
|
||||
| string_attr
|
||||
{ $$ = new vector<AttrName>;
|
||||
{ $$ = new std::vector<AttrName>;
|
||||
ExprString *str = dynamic_cast<ExprString *>($1);
|
||||
if (str) {
|
||||
$$->push_back(AttrName(str->s));
|
||||
$$->push_back(AttrName(data->symbols.create(str->s)));
|
||||
delete str;
|
||||
} else
|
||||
$$->push_back(AttrName($1));
|
||||
|
@ -540,7 +596,7 @@ attrpath
|
|||
|
||||
attr
|
||||
: ID { $$ = $1; }
|
||||
| OR_KW { $$ = "or"; }
|
||||
| OR_KW { $$ = {"or", 2}; }
|
||||
;
|
||||
|
||||
string_attr
|
||||
|
@ -555,13 +611,13 @@ expr_list
|
|||
|
||||
formals
|
||||
: formal ',' formals
|
||||
{ $$ = $3; addFormal(CUR_POS, $$, *$1); }
|
||||
{ $$ = $3; $$->formals.push_back(*$1); }
|
||||
| formal
|
||||
{ $$ = new Formals; addFormal(CUR_POS, $$, *$1); $$->ellipsis = false; }
|
||||
{ $$ = new ParserFormals; $$->formals.push_back(*$1); $$->ellipsis = false; }
|
||||
|
|
||||
{ $$ = new Formals; $$->ellipsis = false; }
|
||||
{ $$ = new ParserFormals; $$->ellipsis = false; }
|
||||
| ELLIPSIS
|
||||
{ $$ = new Formals; $$->ellipsis = true; }
|
||||
{ $$ = new ParserFormals; $$->ellipsis = true; }
|
||||
;
|
||||
|
||||
formal
|
||||
|
@ -586,8 +642,8 @@ formal
|
|||
namespace nix {
|
||||
|
||||
|
||||
Expr * EvalState::parse(const char * text, FileOrigin origin,
|
||||
const Path & path, const Path & basePath, StaticEnv & staticEnv)
|
||||
Expr * EvalState::parse(char * text, size_t length, FileOrigin origin,
|
||||
const PathView path, const PathView basePath, StaticEnv & staticEnv)
|
||||
{
|
||||
yyscan_t scanner;
|
||||
ParseData data(*this);
|
||||
|
@ -606,7 +662,7 @@ Expr * EvalState::parse(const char * text, FileOrigin origin,
|
|||
data.basePath = basePath;
|
||||
|
||||
yylex_init(&scanner);
|
||||
yy_scan_string(text, scanner);
|
||||
yy_scan_buffer(text, length, scanner);
|
||||
int res = yyparse(scanner, &data);
|
||||
yylex_destroy(scanner);
|
||||
|
||||
|
@ -652,63 +708,70 @@ Expr * EvalState::parseExprFromFile(const Path & path)
|
|||
|
||||
Expr * EvalState::parseExprFromFile(const Path & path, StaticEnv & staticEnv)
|
||||
{
|
||||
return parse(readFile(path).c_str(), foFile, path, dirOf(path), staticEnv);
|
||||
auto buffer = readFile(path);
|
||||
// readFile should have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
return parse(buffer.data(), buffer.size(), foFile, path, dirOf(path), staticEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath, StaticEnv & staticEnv)
|
||||
Expr * EvalState::parseExprFromString(std::string s, const Path & basePath, StaticEnv & staticEnv)
|
||||
{
|
||||
return parse(s.data(), foString, "", basePath, staticEnv);
|
||||
s.append("\0\0", 2);
|
||||
return parse(s.data(), s.size(), foString, "", basePath, staticEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseExprFromString(std::string_view s, const Path & basePath)
|
||||
Expr * EvalState::parseExprFromString(std::string s, const Path & basePath)
|
||||
{
|
||||
return parseExprFromString(s, basePath, staticBaseEnv);
|
||||
return parseExprFromString(std::move(s), basePath, staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
Expr * EvalState::parseStdin()
|
||||
{
|
||||
//Activity act(*logger, lvlTalkative, format("parsing standard input"));
|
||||
return parse(drainFD(0).data(), foStdin, "", absPath("."), staticBaseEnv);
|
||||
auto buffer = drainFD(0);
|
||||
// drainFD should have left some extra space for terminators
|
||||
buffer.append("\0\0", 2);
|
||||
return parse(buffer.data(), buffer.size(), foStdin, "", absPath("."), staticBaseEnv);
|
||||
}
|
||||
|
||||
|
||||
void EvalState::addToSearchPath(const string & s)
|
||||
void EvalState::addToSearchPath(const std::string & s)
|
||||
{
|
||||
size_t pos = s.find('=');
|
||||
string prefix;
|
||||
std::string prefix;
|
||||
Path path;
|
||||
if (pos == string::npos) {
|
||||
if (pos == std::string::npos) {
|
||||
path = s;
|
||||
} else {
|
||||
prefix = string(s, 0, pos);
|
||||
path = string(s, pos + 1);
|
||||
prefix = std::string(s, 0, pos);
|
||||
path = std::string(s, pos + 1);
|
||||
}
|
||||
|
||||
searchPath.emplace_back(prefix, path);
|
||||
}
|
||||
|
||||
|
||||
Path EvalState::findFile(const string & path)
|
||||
Path EvalState::findFile(const std::string_view path)
|
||||
{
|
||||
return findFile(searchPath, path);
|
||||
}
|
||||
|
||||
|
||||
Path EvalState::findFile(SearchPath & searchPath, const string & path, const Pos & pos)
|
||||
Path EvalState::findFile(SearchPath & searchPath, const std::string_view path, const Pos & pos)
|
||||
{
|
||||
for (auto & i : searchPath) {
|
||||
std::string suffix;
|
||||
if (i.first.empty())
|
||||
suffix = "/" + path;
|
||||
suffix = concatStrings("/", path);
|
||||
else {
|
||||
auto s = i.first.size();
|
||||
if (path.compare(0, s, i.first) != 0 ||
|
||||
(path.size() > s && path[s] != '/'))
|
||||
continue;
|
||||
suffix = path.size() == s ? "" : "/" + string(path, s);
|
||||
suffix = path.size() == s ? "" : concatStrings("/", path.substr(s));
|
||||
}
|
||||
auto r = resolveSearchPathElem(i);
|
||||
if (!r.first) continue;
|
||||
|
@ -717,7 +780,7 @@ Path EvalState::findFile(SearchPath & searchPath, const string & path, const Pos
|
|||
}
|
||||
|
||||
if (hasPrefix(path, "nix/"))
|
||||
return corepkgsPrefix + path.substr(4);
|
||||
return concatStrings(corepkgsPrefix, path.substr(4));
|
||||
|
||||
throw ThrownError({
|
||||
.msg = hintfmt(evalSettings.pureEval
|
||||
|
@ -752,7 +815,7 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl
|
|||
res = { true, path };
|
||||
else {
|
||||
logWarning({
|
||||
.msg = hintfmt("warning: Nix search path entry '%1%' does not exist, ignoring", elem.second)
|
||||
.msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", elem.second)
|
||||
});
|
||||
res = { false, "" };
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -15,7 +15,6 @@ struct RegisterPrimOp
|
|||
std::vector<std::string> args;
|
||||
size_t arity = 0;
|
||||
const char * doc;
|
||||
std::optional<std::string> requiredFeature;
|
||||
PrimOpFun fun;
|
||||
};
|
||||
|
||||
|
@ -28,8 +27,7 @@ struct RegisterPrimOp
|
|||
RegisterPrimOp(
|
||||
std::string name,
|
||||
size_t arity,
|
||||
PrimOpFun fun,
|
||||
std::optional<std::string> requiredFeature = {});
|
||||
PrimOpFun fun);
|
||||
|
||||
RegisterPrimOp(Info && info);
|
||||
};
|
||||
|
|
|
@ -7,8 +7,8 @@ namespace nix {
|
|||
static void prim_unsafeDiscardStringContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
string s = state.coerceToString(pos, *args[0], context);
|
||||
mkString(v, s, PathSet());
|
||||
auto s = state.coerceToString(pos, *args[0], context);
|
||||
v.mkString(*s);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_unsafeDiscardStringContext("__unsafeDiscardStringContext", 1, prim_unsafeDiscardStringContext);
|
||||
|
@ -18,7 +18,7 @@ static void prim_hasContext(EvalState & state, const Pos & pos, Value * * args,
|
|||
{
|
||||
PathSet context;
|
||||
state.forceString(*args[0], context, pos);
|
||||
mkBool(v, !context.empty());
|
||||
v.mkBool(!context.empty());
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_hasContext("__hasContext", 1, prim_hasContext);
|
||||
|
@ -33,13 +33,13 @@ static RegisterPrimOp primop_hasContext("__hasContext", 1, prim_hasContext);
|
|||
static void prim_unsafeDiscardOutputDependency(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
string s = state.coerceToString(pos, *args[0], context);
|
||||
auto s = state.coerceToString(pos, *args[0], context);
|
||||
|
||||
PathSet context2;
|
||||
for (auto & p : context)
|
||||
context2.insert(p.at(0) == '=' ? string(p, 1) : p);
|
||||
context2.insert(p.at(0) == '=' ? std::string(p, 1) : p);
|
||||
|
||||
mkString(v, s, context2);
|
||||
v.mkString(*s, context2);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_unsafeDiscardOutputDependency("__unsafeDiscardOutputDependency", 1, prim_unsafeDiscardOutputDependency);
|
||||
|
@ -76,13 +76,13 @@ static void prim_getContext(EvalState & state, const Pos & pos, Value * * args,
|
|||
auto contextInfos = std::map<Path, ContextInfo>();
|
||||
for (const auto & p : context) {
|
||||
Path drv;
|
||||
string output;
|
||||
std::string output;
|
||||
const Path * path = &p;
|
||||
if (p.at(0) == '=') {
|
||||
drv = string(p, 1);
|
||||
drv = std::string(p, 1);
|
||||
path = &drv;
|
||||
} else if (p.at(0) == '!') {
|
||||
std::pair<string, string> ctx = decodeContext(p);
|
||||
std::pair<std::string, std::string> ctx = decodeContext(p);
|
||||
drv = ctx.first;
|
||||
output = ctx.second;
|
||||
path = &drv;
|
||||
|
@ -103,28 +103,26 @@ static void prim_getContext(EvalState & state, const Pos & pos, Value * * args,
|
|||
}
|
||||
}
|
||||
|
||||
state.mkAttrs(v, contextInfos.size());
|
||||
auto attrs = state.buildBindings(contextInfos.size());
|
||||
|
||||
auto sPath = state.symbols.create("path");
|
||||
auto sAllOutputs = state.symbols.create("allOutputs");
|
||||
for (const auto & info : contextInfos) {
|
||||
auto & infoVal = *state.allocAttr(v, state.symbols.create(info.first));
|
||||
state.mkAttrs(infoVal, 3);
|
||||
auto infoAttrs = state.buildBindings(3);
|
||||
if (info.second.path)
|
||||
mkBool(*state.allocAttr(infoVal, sPath), true);
|
||||
infoAttrs.alloc(sPath).mkBool(true);
|
||||
if (info.second.allOutputs)
|
||||
mkBool(*state.allocAttr(infoVal, sAllOutputs), true);
|
||||
infoAttrs.alloc(sAllOutputs).mkBool(true);
|
||||
if (!info.second.outputs.empty()) {
|
||||
auto & outputsVal = *state.allocAttr(infoVal, state.sOutputs);
|
||||
auto & outputsVal = infoAttrs.alloc(state.sOutputs);
|
||||
state.mkList(outputsVal, info.second.outputs.size());
|
||||
size_t i = 0;
|
||||
for (const auto & output : info.second.outputs) {
|
||||
mkString(*(outputsVal.listElems()[i++] = state.allocValue()), output);
|
||||
}
|
||||
for (const auto & [i, output] : enumerate(info.second.outputs))
|
||||
(outputsVal.listElems()[i] = state.allocValue())->mkString(output);
|
||||
}
|
||||
infoVal.attrs->sort();
|
||||
attrs.alloc(info.first).mkAttrs(infoAttrs);
|
||||
}
|
||||
v.attrs->sort();
|
||||
|
||||
v.mkAttrs(attrs);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_getContext("__getContext", 1, prim_getContext);
|
||||
|
@ -168,7 +166,7 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
|
|||
.errPos = *i.pos
|
||||
});
|
||||
}
|
||||
context.insert("=" + string(i.name));
|
||||
context.insert("=" + std::string(i.name));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -181,14 +179,14 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
|
|||
.errPos = *i.pos
|
||||
});
|
||||
}
|
||||
for (unsigned int n = 0; n < iter->value->listSize(); ++n) {
|
||||
auto name = state.forceStringNoCtx(*iter->value->listElems()[n], *iter->pos);
|
||||
context.insert("!" + name + "!" + string(i.name));
|
||||
for (auto elem : iter->value->listItems()) {
|
||||
auto name = state.forceStringNoCtx(*elem, *iter->pos);
|
||||
context.insert(concatStrings("!", name, "!", i.name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mkString(v, orig, context);
|
||||
v.mkString(orig, context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_appendContext("__appendContext", 2, prim_appendContext);
|
||||
|
|
|
@ -12,24 +12,24 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
|||
std::string url;
|
||||
std::optional<Hash> rev;
|
||||
std::optional<std::string> ref;
|
||||
std::string name = "source";
|
||||
std::string_view name = "source";
|
||||
PathSet context;
|
||||
|
||||
state.forceValue(*args[0]);
|
||||
state.forceValue(*args[0], pos);
|
||||
|
||||
if (args[0]->type() == nAttrs) {
|
||||
|
||||
state.forceAttrs(*args[0], pos);
|
||||
|
||||
for (auto & attr : *args[0]->attrs) {
|
||||
string n(attr.name);
|
||||
std::string_view n(attr.name);
|
||||
if (n == "url")
|
||||
url = state.coerceToString(*attr.pos, *attr.value, context, false, false);
|
||||
url = state.coerceToString(*attr.pos, *attr.value, context, false, false).toOwned();
|
||||
else if (n == "rev") {
|
||||
// Ugly: unlike fetchGit, here the "rev" attribute can
|
||||
// be both a revision or a branch/tag name.
|
||||
auto value = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
if (std::regex_match(value, revRegex))
|
||||
if (std::regex_match(value.begin(), value.end(), revRegex))
|
||||
rev = Hash::parseAny(value, htSHA1);
|
||||
else
|
||||
ref = value;
|
||||
|
@ -50,7 +50,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
|||
});
|
||||
|
||||
} else
|
||||
url = state.coerceToString(pos, *args[0], context, false, false);
|
||||
url = state.coerceToString(pos, *args[0], context, false, false).toOwned();
|
||||
|
||||
// FIXME: git externals probably can be used to bypass the URI
|
||||
// whitelist. Ah well.
|
||||
|
@ -62,7 +62,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
|||
fetchers::Attrs attrs;
|
||||
attrs.insert_or_assign("type", "hg");
|
||||
attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
|
||||
attrs.insert_or_assign("name", name);
|
||||
attrs.insert_or_assign("name", std::string(name));
|
||||
if (ref) attrs.insert_or_assign("ref", *ref);
|
||||
if (rev) attrs.insert_or_assign("rev", rev->gitRev());
|
||||
auto input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||
|
@ -70,22 +70,21 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
|
|||
// FIXME: use name
|
||||
auto [tree, input2] = input.fetch(state.store);
|
||||
|
||||
state.mkAttrs(v, 8);
|
||||
auto attrs2 = state.buildBindings(8);
|
||||
auto storePath = state.store->printStorePath(tree.storePath);
|
||||
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
|
||||
attrs2.alloc(state.sOutPath).mkString(storePath, {storePath});
|
||||
if (input2.getRef())
|
||||
mkString(*state.allocAttr(v, state.symbols.create("branch")), *input2.getRef());
|
||||
attrs2.alloc("branch").mkString(*input2.getRef());
|
||||
// Backward compatibility: set 'rev' to
|
||||
// 0000000000000000000000000000000000000000 for a dirty tree.
|
||||
auto rev2 = input2.getRev().value_or(Hash(htSHA1));
|
||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev2.gitRev());
|
||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(rev2.gitRev(), 0, 12));
|
||||
attrs2.alloc("rev").mkString(rev2.gitRev());
|
||||
attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12));
|
||||
if (auto revCount = input2.getRevCount())
|
||||
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
|
||||
v.attrs->sort();
|
||||
attrs2.alloc("revCount").mkInt(*revCount);
|
||||
v.mkAttrs(attrs2);
|
||||
|
||||
if (state.allowedPaths)
|
||||
state.allowedPaths->insert(tree.actualPath);
|
||||
state.allowPath(tree.storePath);
|
||||
}
|
||||
|
||||
static RegisterPrimOp r_fetchMercurial("fetchMercurial", 1, prim_fetchMercurial);
|
||||
|
|
|
@ -16,52 +16,56 @@ void emitTreeAttrs(
|
|||
const fetchers::Tree & tree,
|
||||
const fetchers::Input & input,
|
||||
Value & v,
|
||||
bool emptyRevFallback)
|
||||
bool emptyRevFallback,
|
||||
bool forceDirty)
|
||||
{
|
||||
assert(input.isImmutable());
|
||||
assert(input.isLocked());
|
||||
|
||||
state.mkAttrs(v, 8);
|
||||
auto attrs = state.buildBindings(8);
|
||||
|
||||
auto storePath = state.store->printStorePath(tree.storePath);
|
||||
|
||||
mkString(*state.allocAttr(v, state.sOutPath), storePath, PathSet({storePath}));
|
||||
attrs.alloc(state.sOutPath).mkString(storePath, {storePath});
|
||||
|
||||
// FIXME: support arbitrary input attributes.
|
||||
|
||||
auto narHash = input.getNarHash();
|
||||
assert(narHash);
|
||||
mkString(*state.allocAttr(v, state.symbols.create("narHash")),
|
||||
narHash->to_string(SRI, true));
|
||||
|
||||
if (auto rev = input.getRev()) {
|
||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), rev->gitRev());
|
||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), rev->gitShortRev());
|
||||
} else if (emptyRevFallback) {
|
||||
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
|
||||
auto emptyHash = Hash(htSHA1);
|
||||
mkString(*state.allocAttr(v, state.symbols.create("rev")), emptyHash.gitRev());
|
||||
mkString(*state.allocAttr(v, state.symbols.create("shortRev")), emptyHash.gitShortRev());
|
||||
}
|
||||
attrs.alloc("narHash").mkString(narHash->to_string(SRI, true));
|
||||
|
||||
if (input.getType() == "git")
|
||||
mkBool(*state.allocAttr(v, state.symbols.create("submodules")),
|
||||
fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(true));
|
||||
attrs.alloc("submodules").mkBool(
|
||||
fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false));
|
||||
|
||||
if (auto revCount = input.getRevCount())
|
||||
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
|
||||
else if (emptyRevFallback)
|
||||
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), 0);
|
||||
if (!forceDirty) {
|
||||
|
||||
if (auto rev = input.getRev()) {
|
||||
attrs.alloc("rev").mkString(rev->gitRev());
|
||||
attrs.alloc("shortRev").mkString(rev->gitShortRev());
|
||||
} else if (emptyRevFallback) {
|
||||
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
|
||||
auto emptyHash = Hash(htSHA1);
|
||||
attrs.alloc("rev").mkString(emptyHash.gitRev());
|
||||
attrs.alloc("shortRev").mkString(emptyHash.gitShortRev());
|
||||
}
|
||||
|
||||
if (auto revCount = input.getRevCount())
|
||||
attrs.alloc("revCount").mkInt(*revCount);
|
||||
else if (emptyRevFallback)
|
||||
attrs.alloc("revCount").mkInt(0);
|
||||
|
||||
}
|
||||
|
||||
if (auto lastModified = input.getLastModified()) {
|
||||
mkInt(*state.allocAttr(v, state.symbols.create("lastModified")), *lastModified);
|
||||
mkString(*state.allocAttr(v, state.symbols.create("lastModifiedDate")),
|
||||
attrs.alloc("lastModified").mkInt(*lastModified);
|
||||
attrs.alloc("lastModifiedDate").mkString(
|
||||
fmt("%s", std::put_time(std::gmtime(&*lastModified), "%Y%m%d%H%M%S")));
|
||||
}
|
||||
|
||||
v.attrs->sort();
|
||||
v.mkAttrs(attrs);
|
||||
}
|
||||
|
||||
std::string fixURI(std::string uri, EvalState &state, const std::string & defaultScheme = "file")
|
||||
std::string fixURI(std::string uri, EvalState & state, const std::string & defaultScheme = "file")
|
||||
{
|
||||
state.checkURI(uri);
|
||||
return uri.find("://") != std::string::npos ? uri : defaultScheme + "://" + uri;
|
||||
|
@ -69,53 +73,66 @@ std::string fixURI(std::string uri, EvalState &state, const std::string & defaul
|
|||
|
||||
std::string fixURIForGit(std::string uri, EvalState & state)
|
||||
{
|
||||
static std::regex scp_uri("([^/].*)@(.*):(.*)");
|
||||
/* Detects scp-style uris (e.g. git@github.com:NixOS/nix) and fixes
|
||||
* them by removing the `:` and assuming a scheme of `ssh://`
|
||||
* */
|
||||
static std::regex scp_uri("([^/]*)@(.*):(.*)");
|
||||
if (uri[0] != '/' && std::regex_match(uri, scp_uri))
|
||||
return fixURI(std::regex_replace(uri, scp_uri, "$1@$2/$3"), state, "ssh");
|
||||
else
|
||||
return fixURI(uri, state);
|
||||
}
|
||||
|
||||
void addURI(EvalState &state, fetchers::Attrs &attrs, Symbol name, std::string v)
|
||||
{
|
||||
string n(name);
|
||||
attrs.emplace(name, n == "url" ? fixURI(v, state) : v);
|
||||
}
|
||||
|
||||
struct FetchTreeParams {
|
||||
bool emptyRevFallback = false;
|
||||
bool allowNameArgument = false;
|
||||
};
|
||||
|
||||
static void fetchTree(
|
||||
EvalState &state,
|
||||
const Pos &pos,
|
||||
Value **args,
|
||||
Value &v,
|
||||
const std::optional<std::string> type,
|
||||
EvalState & state,
|
||||
const Pos & pos,
|
||||
Value * * args,
|
||||
Value & v,
|
||||
std::optional<std::string> type,
|
||||
const FetchTreeParams & params = FetchTreeParams{}
|
||||
) {
|
||||
fetchers::Input input;
|
||||
PathSet context;
|
||||
|
||||
state.forceValue(*args[0]);
|
||||
state.forceValue(*args[0], pos);
|
||||
|
||||
if (args[0]->type() == nAttrs) {
|
||||
state.forceAttrs(*args[0], pos);
|
||||
|
||||
fetchers::Attrs attrs;
|
||||
|
||||
if (auto aType = args[0]->attrs->get(state.sType)) {
|
||||
if (type)
|
||||
throw Error({
|
||||
.msg = hintfmt("unexpected attribute 'type'"),
|
||||
.errPos = pos
|
||||
});
|
||||
type = state.forceStringNoCtx(*aType->value, *aType->pos);
|
||||
} else if (!type)
|
||||
throw Error({
|
||||
.msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
attrs.emplace("type", type.value());
|
||||
|
||||
for (auto & attr : *args[0]->attrs) {
|
||||
state.forceValue(*attr.value);
|
||||
if (attr.value->type() == nPath || attr.value->type() == nString)
|
||||
addURI(
|
||||
state,
|
||||
attrs,
|
||||
attr.name,
|
||||
state.coerceToString(*attr.pos, *attr.value, context, false, false)
|
||||
);
|
||||
else if (attr.value->type() == nString)
|
||||
addURI(state, attrs, attr.name, attr.value->string.s);
|
||||
if (attr.name == state.sType) continue;
|
||||
state.forceValue(*attr.value, *attr.pos);
|
||||
if (attr.value->type() == nPath || attr.value->type() == nString) {
|
||||
auto s = state.coerceToString(*attr.pos, *attr.value, context, false, false).toOwned();
|
||||
attrs.emplace(attr.name,
|
||||
attr.name == "url"
|
||||
? type == "git"
|
||||
? fixURIForGit(s, state)
|
||||
: fixURI(s, state)
|
||||
: s);
|
||||
}
|
||||
else if (attr.value->type() == nBool)
|
||||
attrs.emplace(attr.name, Explicit<bool>{attr.value->boolean});
|
||||
else if (attr.value->type() == nInt)
|
||||
|
@ -125,15 +142,6 @@ static void fetchTree(
|
|||
attr.name, showType(*attr.value));
|
||||
}
|
||||
|
||||
if (type)
|
||||
attrs.emplace("type", type.value());
|
||||
|
||||
if (!attrs.count("type"))
|
||||
throw Error({
|
||||
.msg = hintfmt("attribute 'type' is missing in call to 'fetchTree'"),
|
||||
.errPos = pos
|
||||
});
|
||||
|
||||
if (!params.allowNameArgument)
|
||||
if (auto nameIter = attrs.find("name"); nameIter != attrs.end())
|
||||
throw Error({
|
||||
|
@ -141,10 +149,9 @@ static void fetchTree(
|
|||
.errPos = pos
|
||||
});
|
||||
|
||||
|
||||
input = fetchers::Input::fromAttrs(std::move(attrs));
|
||||
} else {
|
||||
auto url = state.coerceToString(pos, *args[0], context, false, false);
|
||||
auto url = state.coerceToString(pos, *args[0], context, false, false).toOwned();
|
||||
|
||||
if (type == "git") {
|
||||
fetchers::Attrs attrs;
|
||||
|
@ -159,20 +166,19 @@ static void fetchTree(
|
|||
if (!evalSettings.pureEval && !input.isDirect())
|
||||
input = lookupInRegistries(state.store, input).first;
|
||||
|
||||
if (evalSettings.pureEval && !input.isImmutable())
|
||||
throw Error("in pure evaluation mode, 'fetchTree' requires an immutable input, at %s", pos);
|
||||
if (evalSettings.pureEval && !input.isLocked())
|
||||
throw Error("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", pos);
|
||||
|
||||
auto [tree, input2] = input.fetch(state.store);
|
||||
|
||||
if (state.allowedPaths)
|
||||
state.allowedPaths->insert(tree.actualPath);
|
||||
state.allowPath(tree.storePath);
|
||||
|
||||
emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback);
|
||||
emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback, false);
|
||||
}
|
||||
|
||||
static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
settings.requireExperimentalFeature("flakes");
|
||||
settings.requireExperimentalFeature(Xp::Flakes);
|
||||
fetchTree(state, pos, args, v, std::nullopt, FetchTreeParams { .allowNameArgument = false });
|
||||
}
|
||||
|
||||
|
@ -180,19 +186,19 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
|
|||
static RegisterPrimOp primop_fetchTree("fetchTree", 1, prim_fetchTree);
|
||||
|
||||
static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||
const string & who, bool unpack, std::string name)
|
||||
const std::string & who, bool unpack, std::string name)
|
||||
{
|
||||
std::optional<std::string> url;
|
||||
std::optional<Hash> expectedHash;
|
||||
|
||||
state.forceValue(*args[0]);
|
||||
state.forceValue(*args[0], pos);
|
||||
|
||||
if (args[0]->type() == nAttrs) {
|
||||
|
||||
state.forceAttrs(*args[0], pos);
|
||||
|
||||
for (auto & attr : *args[0]->attrs) {
|
||||
string n(attr.name);
|
||||
std::string n(attr.name);
|
||||
if (n == "url")
|
||||
url = state.forceStringNoCtx(*attr.value, *attr.pos);
|
||||
else if (n == "sha256")
|
||||
|
@ -229,21 +235,19 @@ static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
|||
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).first.storePath
|
||||
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
|
||||
|
||||
auto path = state.store->toRealPath(storePath);
|
||||
|
||||
if (expectedHash) {
|
||||
auto hash = unpack
|
||||
? state.store->queryPathInfo(storePath)->narHash
|
||||
: hashFile(htSHA256, path);
|
||||
: hashFile(htSHA256, state.store->toRealPath(storePath));
|
||||
if (hash != *expectedHash)
|
||||
throw Error((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
|
||||
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true));
|
||||
}
|
||||
|
||||
if (state.allowedPaths)
|
||||
state.allowedPaths->insert(path);
|
||||
state.allowPath(storePath);
|
||||
|
||||
mkString(v, path, PathSet({path}));
|
||||
auto path = state.store->printStorePath(storePath);
|
||||
v.mkString(path, PathSet({path}));
|
||||
}
|
||||
|
||||
static void prim_fetchurl(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
|
@ -285,13 +289,13 @@ static RegisterPrimOp primop_fetchTarball({
|
|||
stdenv.mkDerivation { … }
|
||||
```
|
||||
|
||||
The fetched tarball is cached for a certain amount of time (1 hour
|
||||
by default) in `~/.cache/nix/tarballs/`. You can change the cache
|
||||
timeout either on the command line with `--option tarball-ttl number
|
||||
of seconds` or in the Nix configuration file with this option: `
|
||||
number of seconds to cache `.
|
||||
The fetched tarball is cached for a certain amount of time (1
|
||||
hour by default) in `~/.cache/nix/tarballs/`. You can change the
|
||||
cache timeout either on the command line with `--tarball-ttl`
|
||||
*number-of-seconds* or in the Nix configuration file by adding
|
||||
the line `tarball-ttl = ` *number-of-seconds*.
|
||||
|
||||
Note that when obtaining the hash with ` nix-prefetch-url ` the
|
||||
Note that when obtaining the hash with `nix-prefetch-url` the
|
||||
option `--unpack` is required.
|
||||
|
||||
This function can also verify the contents against a hash. In that
|
||||
|
@ -391,7 +395,7 @@ static RegisterPrimOp primop_fetchGit({
|
|||
```
|
||||
|
||||
> **Note**
|
||||
>
|
||||
>
|
||||
> It is nice to always specify the branch which a revision
|
||||
> belongs to. Without the branch being specified, the fetcher
|
||||
> might fail if the default branch changes. Additionally, it can
|
||||
|
@ -428,12 +432,12 @@ static RegisterPrimOp primop_fetchGit({
|
|||
```
|
||||
|
||||
> **Note**
|
||||
>
|
||||
>
|
||||
> Nix will refetch the branch in accordance with
|
||||
> the option `tarball-ttl`.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
>
|
||||
> This behavior is disabled in *Pure evaluation mode*.
|
||||
)",
|
||||
.fun = prim_fetchGit,
|
||||
|
|
|
@ -1,86 +1,76 @@
|
|||
#include "primops.hh"
|
||||
#include "eval-inline.hh"
|
||||
|
||||
#include "../../cpptoml/cpptoml.h"
|
||||
#include "../../toml11/toml.hpp"
|
||||
|
||||
namespace nix {
|
||||
|
||||
static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Value & val)
|
||||
{
|
||||
using namespace cpptoml;
|
||||
|
||||
auto toml = state.forceStringNoCtx(*args[0], pos);
|
||||
|
||||
std::istringstream tomlStream(toml);
|
||||
std::istringstream tomlStream(std::string{toml});
|
||||
|
||||
std::function<void(Value &, std::shared_ptr<base>)> visit;
|
||||
std::function<void(Value &, toml::value)> visit;
|
||||
|
||||
visit = [&](Value & v, std::shared_ptr<base> t) {
|
||||
visit = [&](Value & v, toml::value t) {
|
||||
|
||||
if (auto t2 = t->as_table()) {
|
||||
switch(t.type())
|
||||
{
|
||||
case toml::value_t::table:
|
||||
{
|
||||
auto table = toml::get<toml::table>(t);
|
||||
|
||||
size_t size = 0;
|
||||
for (auto & i : *t2) { (void) i; size++; }
|
||||
size_t size = 0;
|
||||
for (auto & i : table) { (void) i; size++; }
|
||||
|
||||
state.mkAttrs(v, size);
|
||||
auto attrs = state.buildBindings(size);
|
||||
|
||||
for (auto & i : *t2) {
|
||||
auto & v2 = *state.allocAttr(v, state.symbols.create(i.first));
|
||||
for(auto & elem : table)
|
||||
visit(attrs.alloc(elem.first), elem.second);
|
||||
|
||||
if (auto i2 = i.second->as_table_array()) {
|
||||
size_t size2 = i2->get().size();
|
||||
state.mkList(v2, size2);
|
||||
for (size_t j = 0; j < size2; ++j)
|
||||
visit(*(v2.listElems()[j] = state.allocValue()), i2->get()[j]);
|
||||
v.mkAttrs(attrs);
|
||||
}
|
||||
else
|
||||
visit(v2, i.second);
|
||||
}
|
||||
break;;
|
||||
case toml::value_t::array:
|
||||
{
|
||||
auto array = toml::get<std::vector<toml::value>>(t);
|
||||
|
||||
size_t size = array.size();
|
||||
state.mkList(v, size);
|
||||
for (size_t i = 0; i < size; ++i)
|
||||
visit(*(v.listElems()[i] = state.allocValue()), array[i]);
|
||||
}
|
||||
break;;
|
||||
case toml::value_t::boolean:
|
||||
v.mkBool(toml::get<bool>(t));
|
||||
break;;
|
||||
case toml::value_t::integer:
|
||||
v.mkInt(toml::get<int64_t>(t));
|
||||
break;;
|
||||
case toml::value_t::floating:
|
||||
v.mkFloat(toml::get<NixFloat>(t));
|
||||
break;;
|
||||
case toml::value_t::string:
|
||||
v.mkString(toml::get<std::string>(t));
|
||||
break;;
|
||||
case toml::value_t::local_datetime:
|
||||
case toml::value_t::offset_datetime:
|
||||
case toml::value_t::local_date:
|
||||
case toml::value_t::local_time:
|
||||
// We fail since Nix doesn't have date and time types
|
||||
throw std::runtime_error("Dates and times are not supported");
|
||||
break;;
|
||||
case toml::value_t::empty:
|
||||
v.mkNull();
|
||||
break;;
|
||||
|
||||
v.attrs->sort();
|
||||
}
|
||||
|
||||
else if (auto t2 = t->as_array()) {
|
||||
size_t size = t2->get().size();
|
||||
|
||||
state.mkList(v, size);
|
||||
|
||||
for (size_t i = 0; i < size; ++i)
|
||||
visit(*(v.listElems()[i] = state.allocValue()), t2->get()[i]);
|
||||
}
|
||||
|
||||
// Handle cases like 'a = [[{ a = true }]]', which IMHO should be
|
||||
// parsed as a array containing an array containing a table,
|
||||
// but instead are parsed as an array containing a table array
|
||||
// containing a table.
|
||||
else if (auto t2 = t->as_table_array()) {
|
||||
size_t size = t2->get().size();
|
||||
|
||||
state.mkList(v, size);
|
||||
|
||||
for (size_t j = 0; j < size; ++j)
|
||||
visit(*(v.listElems()[j] = state.allocValue()), t2->get()[j]);
|
||||
}
|
||||
|
||||
else if (t->is_value()) {
|
||||
if (auto val = t->as<int64_t>())
|
||||
mkInt(v, val->get());
|
||||
else if (auto val = t->as<NixFloat>())
|
||||
mkFloat(v, val->get());
|
||||
else if (auto val = t->as<bool>())
|
||||
mkBool(v, val->get());
|
||||
else if (auto val = t->as<std::string>())
|
||||
mkString(v, val->get());
|
||||
else
|
||||
throw EvalError("unsupported value type in TOML");
|
||||
}
|
||||
|
||||
else abort();
|
||||
};
|
||||
|
||||
try {
|
||||
visit(v, parser(tomlStream).parse());
|
||||
} catch (std::runtime_error & e) {
|
||||
visit(val, toml::parse(tomlStream, "fromTOML" /* the "filename" */));
|
||||
} catch (std::exception & e) { // TODO: toml::syntax_error
|
||||
throw EvalError({
|
||||
.msg = hintfmt("while parsing a TOML string: %s", e.what()),
|
||||
.errPos = pos
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
#pragma once
|
||||
|
||||
#include <list>
|
||||
#include <map>
|
||||
#include <unordered_set>
|
||||
#include <unordered_map>
|
||||
|
||||
#include "types.hh"
|
||||
|
||||
|
@ -16,8 +17,8 @@ namespace nix {
|
|||
class Symbol
|
||||
{
|
||||
private:
|
||||
const string * s; // pointer into SymbolTable
|
||||
Symbol(const string * s) : s(s) { };
|
||||
const std::string * s; // pointer into SymbolTable
|
||||
Symbol(const std::string * s) : s(s) { };
|
||||
friend class SymbolTable;
|
||||
|
||||
public:
|
||||
|
@ -70,15 +71,21 @@ public:
|
|||
class SymbolTable
|
||||
{
|
||||
private:
|
||||
typedef std::unordered_set<string> Symbols;
|
||||
Symbols symbols;
|
||||
std::unordered_map<std::string_view, Symbol> symbols;
|
||||
std::list<std::string> store;
|
||||
|
||||
public:
|
||||
Symbol create(std::string_view s)
|
||||
{
|
||||
// FIXME: avoid allocation if 's' already exists in the symbol table.
|
||||
std::pair<Symbols::iterator, bool> res = symbols.emplace(std::string(s));
|
||||
return Symbol(&*res.first);
|
||||
// Most symbols are looked up more than once, so we trade off insertion performance
|
||||
// for lookup performance.
|
||||
// TODO: could probably be done more efficiently with transparent Hash and Equals
|
||||
// on the original implementation using unordered_set
|
||||
auto it = symbols.find(s);
|
||||
if (it != symbols.end()) return it->second;
|
||||
|
||||
auto & rawSym = store.emplace_back(s);
|
||||
return symbols.emplace(rawSym, Symbol(&rawSym)).first->second;
|
||||
}
|
||||
|
||||
size_t size() const
|
||||
|
@ -91,7 +98,7 @@ public:
|
|||
template<typename T>
|
||||
void dump(T callback)
|
||||
{
|
||||
for (auto & s : symbols)
|
||||
for (auto & s : store)
|
||||
callback(s);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -10,11 +10,11 @@
|
|||
namespace nix {
|
||||
|
||||
void printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, JSONPlaceholder & out, PathSet & context)
|
||||
Value & v, const Pos & pos, JSONPlaceholder & out, PathSet & context)
|
||||
{
|
||||
checkInterrupt();
|
||||
|
||||
if (strict) state.forceValue(v);
|
||||
if (strict) state.forceValue(v, pos);
|
||||
|
||||
switch (v.type()) {
|
||||
|
||||
|
@ -40,7 +40,7 @@ void printValueAsJSON(EvalState & state, bool strict,
|
|||
break;
|
||||
|
||||
case nAttrs: {
|
||||
auto maybeString = state.tryAttrsToString(noPos, v, context, false, false);
|
||||
auto maybeString = state.tryAttrsToString(pos, v, context, false, false);
|
||||
if (maybeString) {
|
||||
out.write(*maybeString);
|
||||
break;
|
||||
|
@ -54,18 +54,18 @@ void printValueAsJSON(EvalState & state, bool strict,
|
|||
for (auto & j : names) {
|
||||
Attr & a(*v.attrs->find(state.symbols.create(j)));
|
||||
auto placeholder(obj.placeholder(j));
|
||||
printValueAsJSON(state, strict, *a.value, placeholder, context);
|
||||
printValueAsJSON(state, strict, *a.value, *a.pos, placeholder, context);
|
||||
}
|
||||
} else
|
||||
printValueAsJSON(state, strict, *i->value, out, context);
|
||||
printValueAsJSON(state, strict, *i->value, *i->pos, out, context);
|
||||
break;
|
||||
}
|
||||
|
||||
case nList: {
|
||||
auto list(out.list());
|
||||
for (unsigned int n = 0; n < v.listSize(); ++n) {
|
||||
for (auto elem : v.listItems()) {
|
||||
auto placeholder(list.placeholder());
|
||||
printValueAsJSON(state, strict, *v.listElems()[n], placeholder, context);
|
||||
printValueAsJSON(state, strict, *elem, pos, placeholder, context);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -79,18 +79,20 @@ void printValueAsJSON(EvalState & state, bool strict,
|
|||
break;
|
||||
|
||||
case nThunk:
|
||||
throw TypeError("cannot convert %1% to JSON", showType(v));
|
||||
|
||||
case nFunction:
|
||||
throw TypeError("cannot convert %1% to JSON", showType(v));
|
||||
auto e = TypeError({
|
||||
.msg = hintfmt("cannot convert %1% to JSON", showType(v)),
|
||||
.errPos = v.determinePos(pos)
|
||||
});
|
||||
throw e.addTrace(pos, hintfmt("message for the trace"));
|
||||
}
|
||||
}
|
||||
|
||||
void printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, std::ostream & str, PathSet & context)
|
||||
Value & v, const Pos & pos, std::ostream & str, PathSet & context)
|
||||
{
|
||||
JSONPlaceholder out(str);
|
||||
printValueAsJSON(state, strict, v, out, context);
|
||||
printValueAsJSON(state, strict, v, pos, out, context);
|
||||
}
|
||||
|
||||
void ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,
|
||||
|
|
|
@ -11,9 +11,9 @@ namespace nix {
|
|||
class JSONPlaceholder;
|
||||
|
||||
void printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, JSONPlaceholder & out, PathSet & context);
|
||||
Value & v, const Pos & pos, JSONPlaceholder & out, PathSet & context);
|
||||
|
||||
void printValueAsJSON(EvalState & state, bool strict,
|
||||
Value & v, std::ostream & str, PathSet & context);
|
||||
Value & v, const Pos & pos, std::ostream & str, PathSet & context);
|
||||
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
namespace nix {
|
||||
|
||||
|
||||
static XMLAttrs singletonAttrs(const string & name, const string & value)
|
||||
static XMLAttrs singletonAttrs(const std::string & name, const std::string & value)
|
||||
{
|
||||
XMLAttrs attrs;
|
||||
attrs[name] = value;
|
||||
|
@ -18,7 +18,8 @@ static XMLAttrs singletonAttrs(const string & name, const string & value)
|
|||
|
||||
|
||||
static void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen);
|
||||
Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
|
||||
const Pos & pos);
|
||||
|
||||
|
||||
static void posToXML(XMLAttrs & xmlAttrs, const Pos & pos)
|
||||
|
@ -46,17 +47,18 @@ static void showAttrs(EvalState & state, bool strict, bool location,
|
|||
|
||||
XMLOpenElement _(doc, "attr", xmlAttrs);
|
||||
printValueAsXML(state, strict, location,
|
||||
*a.value, doc, context, drvsSeen);
|
||||
*a.value, doc, context, drvsSeen, *a.pos);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen)
|
||||
Value & v, XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
|
||||
const Pos & pos)
|
||||
{
|
||||
checkInterrupt();
|
||||
|
||||
if (strict) state.forceValue(v);
|
||||
if (strict) state.forceValue(v, pos);
|
||||
|
||||
switch (v.type()) {
|
||||
|
||||
|
@ -91,14 +93,14 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
|||
Path drvPath;
|
||||
a = v.attrs->find(state.sDrvPath);
|
||||
if (a != v.attrs->end()) {
|
||||
if (strict) state.forceValue(*a->value);
|
||||
if (strict) state.forceValue(*a->value, *a->pos);
|
||||
if (a->value->type() == nString)
|
||||
xmlAttrs["drvPath"] = drvPath = a->value->string.s;
|
||||
}
|
||||
|
||||
a = v.attrs->find(state.sOutPath);
|
||||
if (a != v.attrs->end()) {
|
||||
if (strict) state.forceValue(*a->value);
|
||||
if (strict) state.forceValue(*a->value, *a->pos);
|
||||
if (a->value->type() == nString)
|
||||
xmlAttrs["outPath"] = a->value->string.s;
|
||||
}
|
||||
|
@ -120,8 +122,8 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
|||
|
||||
case nList: {
|
||||
XMLOpenElement _(doc, "list");
|
||||
for (unsigned int n = 0; n < v.listSize(); ++n)
|
||||
printValueAsXML(state, strict, location, *v.listElems()[n], doc, context, drvsSeen);
|
||||
for (auto v2 : v.listItems())
|
||||
printValueAsXML(state, strict, location, *v2, doc, context, drvsSeen, pos);
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -135,12 +137,12 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
|||
if (location) posToXML(xmlAttrs, v.lambda.fun->pos);
|
||||
XMLOpenElement _(doc, "function", xmlAttrs);
|
||||
|
||||
if (v.lambda.fun->matchAttrs) {
|
||||
if (v.lambda.fun->hasFormals()) {
|
||||
XMLAttrs attrs;
|
||||
if (!v.lambda.fun->arg.empty()) attrs["name"] = v.lambda.fun->arg;
|
||||
if (v.lambda.fun->formals->ellipsis) attrs["ellipsis"] = "1";
|
||||
XMLOpenElement _(doc, "attrspat", attrs);
|
||||
for (auto & i : v.lambda.fun->formals->formals)
|
||||
for (auto & i : v.lambda.fun->formals->lexicographicOrder())
|
||||
doc.writeEmptyElement("attr", singletonAttrs("name", i.name));
|
||||
} else
|
||||
doc.writeEmptyElement("varpat", singletonAttrs("name", v.lambda.fun->arg));
|
||||
|
@ -149,7 +151,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
|||
}
|
||||
|
||||
case nExternal:
|
||||
v.external->printValueAsXML(state, strict, location, doc, context, drvsSeen);
|
||||
v.external->printValueAsXML(state, strict, location, doc, context, drvsSeen, pos);
|
||||
break;
|
||||
|
||||
case nFloat:
|
||||
|
@ -163,19 +165,20 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
|
|||
|
||||
|
||||
void ExternalValueBase::printValueAsXML(EvalState & state, bool strict,
|
||||
bool location, XMLWriter & doc, PathSet & context, PathSet & drvsSeen) const
|
||||
bool location, XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
|
||||
const Pos & pos) const
|
||||
{
|
||||
doc.writeEmptyElement("unevaluated");
|
||||
}
|
||||
|
||||
|
||||
void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, std::ostream & out, PathSet & context)
|
||||
Value & v, std::ostream & out, PathSet & context, const Pos & pos)
|
||||
{
|
||||
XMLWriter doc(true, out);
|
||||
XMLOpenElement root(doc, "expr");
|
||||
PathSet drvsSeen;
|
||||
printValueAsXML(state, strict, location, v, doc, context, drvsSeen);
|
||||
printValueAsXML(state, strict, location, v, doc, context, drvsSeen, pos);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -9,6 +9,6 @@
|
|||
namespace nix {
|
||||
|
||||
void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
Value & v, std::ostream & out, PathSet & context);
|
||||
|
||||
Value & v, std::ostream & out, PathSet & context, const Pos & pos);
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
#pragma once
|
||||
|
||||
#include <cassert>
|
||||
|
||||
#include "symbol-table.hh"
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
|
@ -8,6 +10,8 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
class BindingsBuilder;
|
||||
|
||||
|
||||
typedef enum {
|
||||
tInt = 1,
|
||||
|
@ -73,20 +77,20 @@ class ExternalValueBase
|
|||
|
||||
public:
|
||||
/* Return a simple string describing the type */
|
||||
virtual string showType() const = 0;
|
||||
virtual std::string showType() const = 0;
|
||||
|
||||
/* Return a string to be used in builtins.typeOf */
|
||||
virtual string typeOf() const = 0;
|
||||
virtual std::string typeOf() const = 0;
|
||||
|
||||
/* Coerce the value to a string. Defaults to uncoercable, i.e. throws an
|
||||
* error
|
||||
* error.
|
||||
*/
|
||||
virtual string coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const;
|
||||
virtual std::string coerceToString(const Pos & pos, PathSet & context, bool copyMore, bool copyToStore) const;
|
||||
|
||||
/* Compare to another value of the same type. Defaults to uncomparable,
|
||||
* i.e. always false.
|
||||
*/
|
||||
virtual bool operator==(const ExternalValueBase & b) const;
|
||||
virtual bool operator ==(const ExternalValueBase & b) const;
|
||||
|
||||
/* Print the value as JSON. Defaults to unconvertable, i.e. throws an error */
|
||||
virtual void printValueAsJSON(EvalState & state, bool strict,
|
||||
|
@ -94,7 +98,8 @@ class ExternalValueBase
|
|||
|
||||
/* Print the value as XML. Defaults to unevaluated */
|
||||
virtual void printValueAsXML(EvalState & state, bool strict, bool location,
|
||||
XMLWriter & doc, PathSet & context, PathSet & drvsSeen) const;
|
||||
XMLWriter & doc, PathSet & context, PathSet & drvsSeen,
|
||||
const Pos & pos) const;
|
||||
|
||||
virtual ~ExternalValueBase()
|
||||
{
|
||||
|
@ -232,6 +237,17 @@ public:
|
|||
string.context = context;
|
||||
}
|
||||
|
||||
void mkString(std::string_view s);
|
||||
|
||||
void mkString(std::string_view s, const PathSet & context);
|
||||
|
||||
void mkStringMove(const char * s, const PathSet & context);
|
||||
|
||||
inline void mkString(const Symbol & s)
|
||||
{
|
||||
mkString(((const std::string &) s).c_str());
|
||||
}
|
||||
|
||||
inline void mkPath(const char * s)
|
||||
{
|
||||
clearValue();
|
||||
|
@ -239,6 +255,8 @@ public:
|
|||
path = s;
|
||||
}
|
||||
|
||||
void mkPath(std::string_view s);
|
||||
|
||||
inline void mkNull()
|
||||
{
|
||||
clearValue();
|
||||
|
@ -252,6 +270,8 @@ public:
|
|||
attrs = a;
|
||||
}
|
||||
|
||||
Value & mkAttrs(BindingsBuilder & bindings);
|
||||
|
||||
inline void mkList(size_t size)
|
||||
{
|
||||
clearValue();
|
||||
|
@ -341,7 +361,7 @@ public:
|
|||
return internalType == tList1 ? 1 : internalType == tList2 ? 2 : bigList.size;
|
||||
}
|
||||
|
||||
Pos determinePos(const Pos &pos) const;
|
||||
Pos determinePos(const Pos & pos) const;
|
||||
|
||||
/* Check whether forcing this value requires a trivial amount of
|
||||
computation. In particular, function applications are
|
||||
|
@ -349,54 +369,45 @@ public:
|
|||
bool isTrivial() const;
|
||||
|
||||
std::vector<std::pair<Path, std::string>> getContext();
|
||||
|
||||
auto listItems()
|
||||
{
|
||||
struct ListIterable
|
||||
{
|
||||
typedef Value * const * iterator;
|
||||
iterator _begin, _end;
|
||||
iterator begin() const { return _begin; }
|
||||
iterator end() const { return _end; }
|
||||
};
|
||||
assert(isList());
|
||||
auto begin = listElems();
|
||||
return ListIterable { begin, begin + listSize() };
|
||||
}
|
||||
|
||||
auto listItems() const
|
||||
{
|
||||
struct ConstListIterable
|
||||
{
|
||||
typedef const Value * const * iterator;
|
||||
iterator _begin, _end;
|
||||
iterator begin() const { return _begin; }
|
||||
iterator end() const { return _end; }
|
||||
};
|
||||
assert(isList());
|
||||
auto begin = listElems();
|
||||
return ConstListIterable { begin, begin + listSize() };
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
// TODO: Remove these static functions, replace call sites with v.mk* instead
|
||||
static inline void mkInt(Value & v, NixInt n)
|
||||
{
|
||||
v.mkInt(n);
|
||||
}
|
||||
|
||||
static inline void mkFloat(Value & v, NixFloat n)
|
||||
{
|
||||
v.mkFloat(n);
|
||||
}
|
||||
|
||||
static inline void mkBool(Value & v, bool b)
|
||||
{
|
||||
v.mkBool(b);
|
||||
}
|
||||
|
||||
static inline void mkNull(Value & v)
|
||||
{
|
||||
v.mkNull();
|
||||
}
|
||||
|
||||
static inline void mkApp(Value & v, Value & left, Value & right)
|
||||
{
|
||||
v.mkApp(&left, &right);
|
||||
}
|
||||
|
||||
static inline void mkString(Value & v, const Symbol & s)
|
||||
{
|
||||
v.mkString(((const string &) s).c_str());
|
||||
}
|
||||
|
||||
|
||||
void mkString(Value & v, const char * s);
|
||||
|
||||
|
||||
void mkPath(Value & v, const char * s);
|
||||
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
typedef std::vector<Value *, traceable_allocator<Value *> > ValueVector;
|
||||
typedef std::map<Symbol, Value *, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, Value *> > > ValueMap;
|
||||
typedef std::map<Symbol, ValueVector, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, ValueVector> > > ValueVectorMap;
|
||||
#else
|
||||
typedef std::vector<Value *> ValueVector;
|
||||
typedef std::map<Symbol, Value *> ValueMap;
|
||||
typedef std::map<Symbol, ValueVector> ValueVectorMap;
|
||||
#endif
|
||||
|
||||
|
||||
|
|
|
@ -52,13 +52,13 @@ struct CacheImpl : Cache
|
|||
const Attrs & inAttrs,
|
||||
const Attrs & infoAttrs,
|
||||
const StorePath & storePath,
|
||||
bool immutable) override
|
||||
bool locked) override
|
||||
{
|
||||
_state.lock()->add.use()
|
||||
(attrsToJSON(inAttrs).dump())
|
||||
(attrsToJSON(infoAttrs).dump())
|
||||
(store->printStorePath(storePath))
|
||||
(immutable)
|
||||
(locked)
|
||||
(time(0)).exec();
|
||||
}
|
||||
|
||||
|
@ -91,7 +91,7 @@ struct CacheImpl : Cache
|
|||
|
||||
auto infoJSON = stmt.getStr(0);
|
||||
auto storePath = store->parseStorePath(stmt.getStr(1));
|
||||
auto immutable = stmt.getInt(2) != 0;
|
||||
auto locked = stmt.getInt(2) != 0;
|
||||
auto timestamp = stmt.getInt(3);
|
||||
|
||||
store->addTempRoot(storePath);
|
||||
|
@ -105,7 +105,7 @@ struct CacheImpl : Cache
|
|||
inAttrsJSON, infoJSON, store->printStorePath(storePath));
|
||||
|
||||
return Result {
|
||||
.expired = !immutable && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),
|
||||
.expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),
|
||||
.infoAttrs = jsonToAttrs(nlohmann::json::parse(infoJSON)),
|
||||
.storePath = std::move(storePath)
|
||||
};
|
||||
|
|
|
@ -13,7 +13,7 @@ struct Cache
|
|||
const Attrs & inAttrs,
|
||||
const Attrs & infoAttrs,
|
||||
const StorePath & storePath,
|
||||
bool immutable) = 0;
|
||||
bool locked) = 0;
|
||||
|
||||
virtual std::optional<std::pair<Attrs, StorePath>> lookup(
|
||||
ref<Store> store,
|
||||
|
|
|
@ -24,11 +24,11 @@ static void fixupInput(Input & input)
|
|||
input.getType();
|
||||
input.getRef();
|
||||
if (input.getRev())
|
||||
input.immutable = true;
|
||||
input.locked = true;
|
||||
input.getRevCount();
|
||||
input.getLastModified();
|
||||
if (input.getNarHash())
|
||||
input.immutable = true;
|
||||
input.locked = true;
|
||||
}
|
||||
|
||||
Input Input::fromURL(const ParsedURL & url)
|
||||
|
@ -124,15 +124,13 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
|
|||
debug("using substituted/cached input '%s' in '%s'",
|
||||
to_string(), store->printStorePath(storePath));
|
||||
|
||||
auto actualPath = store->toRealPath(storePath);
|
||||
|
||||
return {fetchers::Tree(std::move(actualPath), std::move(storePath)), *this};
|
||||
return {Tree { .actualPath = store->toRealPath(storePath), .storePath = std::move(storePath) }, *this};
|
||||
} catch (Error & e) {
|
||||
debug("substitution of input '%s' failed: %s", to_string(), e.what());
|
||||
}
|
||||
}
|
||||
|
||||
auto [tree, input] = [&]() -> std::pair<Tree, Input> {
|
||||
auto [storePath, input] = [&]() -> std::pair<StorePath, Input> {
|
||||
try {
|
||||
return scheme->fetch(store, *this);
|
||||
} catch (Error & e) {
|
||||
|
@ -141,8 +139,10 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
|
|||
}
|
||||
}();
|
||||
|
||||
if (tree.actualPath == "")
|
||||
tree.actualPath = store->toRealPath(tree.storePath);
|
||||
Tree tree {
|
||||
.actualPath = store->toRealPath(storePath),
|
||||
.storePath = storePath,
|
||||
};
|
||||
|
||||
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
|
||||
input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
|
||||
|
@ -165,7 +165,7 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
|
|||
input.to_string(), *prevRevCount);
|
||||
}
|
||||
|
||||
input.immutable = true;
|
||||
input.locked = true;
|
||||
|
||||
assert(input.hasAllInfo());
|
||||
|
||||
|
@ -209,7 +209,7 @@ StorePath Input::computeStorePath(Store & store) const
|
|||
{
|
||||
auto narHash = getNarHash();
|
||||
if (!narHash)
|
||||
throw Error("cannot compute store path for mutable input '%s'", to_string());
|
||||
throw Error("cannot compute store path for unlocked input '%s'", to_string());
|
||||
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, getName());
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@ struct Tree
|
|||
{
|
||||
Path actualPath;
|
||||
StorePath storePath;
|
||||
Tree(Path && actualPath, StorePath && storePath) : actualPath(actualPath), storePath(std::move(storePath)) {}
|
||||
};
|
||||
|
||||
struct InputScheme;
|
||||
|
@ -35,7 +34,7 @@ struct Input
|
|||
|
||||
std::shared_ptr<InputScheme> scheme; // note: can be null
|
||||
Attrs attrs;
|
||||
bool immutable = false;
|
||||
bool locked = false;
|
||||
bool direct = true;
|
||||
|
||||
/* path of the parent of this input, used for relative path resolution */
|
||||
|
@ -60,9 +59,9 @@ public:
|
|||
one that goes through a registry. */
|
||||
bool isDirect() const { return direct; }
|
||||
|
||||
/* Check whether this is an "immutable" input, that is,
|
||||
/* Check whether this is a "locked" input, that is,
|
||||
one that contains a commit hash or content hash. */
|
||||
bool isImmutable() const { return immutable; }
|
||||
bool isLocked() const { return locked; }
|
||||
|
||||
bool hasAllInfo() const;
|
||||
|
||||
|
@ -70,6 +69,8 @@ public:
|
|||
|
||||
bool contains(const Input & other) const;
|
||||
|
||||
/* Fetch the input into the Nix store, returning the location in
|
||||
the Nix store and the locked input. */
|
||||
std::pair<Tree, Input> fetch(ref<Store> store) const;
|
||||
|
||||
Input applyOverrides(
|
||||
|
@ -131,7 +132,7 @@ struct InputScheme
|
|||
|
||||
virtual void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg);
|
||||
|
||||
virtual std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) = 0;
|
||||
virtual std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) = 0;
|
||||
};
|
||||
|
||||
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
|
||||
|
@ -147,14 +148,14 @@ DownloadFileResult downloadFile(
|
|||
ref<Store> store,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
bool immutable,
|
||||
bool locked,
|
||||
const Headers & headers = {});
|
||||
|
||||
std::pair<Tree, time_t> downloadTarball(
|
||||
ref<Store> store,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
bool immutable,
|
||||
bool locked,
|
||||
const Headers & headers = {});
|
||||
|
||||
}
|
||||
|
|
|
@ -13,6 +13,12 @@ using namespace std::string_literals;
|
|||
|
||||
namespace nix::fetchers {
|
||||
|
||||
// Explicit initial branch of our bare repo to suppress warnings from new version of git.
|
||||
// The value itself does not matter, since we always fetch a specific revision or branch.
|
||||
// It is set with `-c init.defaultBranch=` instead of `--initial-branch=` to stay compatible with
|
||||
// old version of git, which will ignore unrecognized `-c` options.
|
||||
const std::string gitInitialBranch = "__nix_dummy_branch";
|
||||
|
||||
static std::string readHead(const Path & path)
|
||||
{
|
||||
return chomp(runProgram("git", true, { "-C", path, "rev-parse", "--abbrev-ref", "HEAD" }));
|
||||
|
@ -45,7 +51,7 @@ struct GitInputScheme : InputScheme
|
|||
for (auto &[name, value] : url.query) {
|
||||
if (name == "rev" || name == "ref")
|
||||
attrs.emplace(name, value);
|
||||
else if (name == "shallow")
|
||||
else if (name == "shallow" || name == "submodules")
|
||||
attrs.emplace(name, Explicit<bool> { value == "1" });
|
||||
else
|
||||
url2.query.emplace(name, value);
|
||||
|
@ -166,14 +172,14 @@ struct GitInputScheme : InputScheme
|
|||
return {isLocal, isLocal ? url.path : url.base};
|
||||
}
|
||||
|
||||
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
{
|
||||
Input input(_input);
|
||||
|
||||
std::string name = input.getName();
|
||||
|
||||
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
|
||||
bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(true);
|
||||
bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
|
||||
bool allRefs = maybeGetBoolAttr(input.attrs, "allRefs").value_or(false);
|
||||
|
||||
std::string cacheType = "git";
|
||||
|
@ -181,7 +187,7 @@ struct GitInputScheme : InputScheme
|
|||
if (submodules) cacheType += "-submodules";
|
||||
if (allRefs) cacheType += "-all-refs";
|
||||
|
||||
auto getImmutableAttrs = [&]()
|
||||
auto getLockedAttrs = [&]()
|
||||
{
|
||||
return Attrs({
|
||||
{"type", cacheType},
|
||||
|
@ -191,21 +197,18 @@ struct GitInputScheme : InputScheme
|
|||
};
|
||||
|
||||
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
|
||||
-> std::pair<Tree, Input>
|
||||
-> std::pair<StorePath, Input>
|
||||
{
|
||||
assert(input.getRev());
|
||||
assert(!_input.getRev() || _input.getRev() == input.getRev());
|
||||
if (!shallow)
|
||||
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
|
||||
input.attrs.insert_or_assign("lastModified", getIntAttr(infoAttrs, "lastModified"));
|
||||
return {
|
||||
Tree(store->toRealPath(storePath), std::move(storePath)),
|
||||
input
|
||||
};
|
||||
return {std::move(storePath), input};
|
||||
};
|
||||
|
||||
if (input.getRev()) {
|
||||
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
|
||||
if (auto res = getCache()->lookup(store, getLockedAttrs()))
|
||||
return makeResult(res->first, std::move(res->second));
|
||||
}
|
||||
|
||||
|
@ -279,16 +282,13 @@ struct GitInputScheme : InputScheme
|
|||
"lastModified",
|
||||
haveCommits ? std::stoull(runProgram("git", true, { "-C", actualUrl, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
|
||||
|
||||
return {
|
||||
Tree(store->toRealPath(storePath), std::move(storePath)),
|
||||
input
|
||||
};
|
||||
return {std::move(storePath), input};
|
||||
}
|
||||
}
|
||||
|
||||
if (!input.getRef()) input.attrs.insert_or_assign("ref", isLocal ? readHead(actualUrl) : "master");
|
||||
|
||||
Attrs mutableAttrs({
|
||||
Attrs unlockedAttrs({
|
||||
{"type", cacheType},
|
||||
{"name", name},
|
||||
{"url", actualUrl},
|
||||
|
@ -307,7 +307,7 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
} else {
|
||||
|
||||
if (auto res = getCache()->lookup(store, mutableAttrs)) {
|
||||
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
|
||||
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
|
||||
if (!input.getRev() || input.getRev() == rev2) {
|
||||
input.attrs.insert_or_assign("rev", rev2.gitRev());
|
||||
|
@ -318,17 +318,13 @@ struct GitInputScheme : InputScheme
|
|||
Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false);
|
||||
repoDir = cacheDir;
|
||||
|
||||
Path cacheDirLock = cacheDir + ".lock";
|
||||
createDirs(dirOf(cacheDir));
|
||||
AutoCloseFD lock = openLockFile(cacheDirLock, true);
|
||||
lockFile(lock.get(), ltWrite, true);
|
||||
PathLocks cacheDirLock({cacheDir + ".lock"});
|
||||
|
||||
if (!pathExists(cacheDir)) {
|
||||
runProgram("git", true, { "init", "--bare", repoDir });
|
||||
runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", "--bare", repoDir });
|
||||
}
|
||||
|
||||
deleteLockFile(cacheDirLock, lock.get());
|
||||
|
||||
Path localRefFile =
|
||||
input.getRef()->compare(0, 5, "refs/") == 0
|
||||
? cacheDir + "/" + *input.getRef()
|
||||
|
@ -393,6 +389,8 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
if (!input.getRev())
|
||||
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev());
|
||||
|
||||
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
|
||||
}
|
||||
|
||||
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "rev-parse", "--is-shallow-repository" })) == "true";
|
||||
|
@ -406,7 +404,7 @@ struct GitInputScheme : InputScheme
|
|||
|
||||
/* Now that we know the ref, check again whether we have it in
|
||||
the store. */
|
||||
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
|
||||
if (auto res = getCache()->lookup(store, getLockedAttrs()))
|
||||
return makeResult(res->first, std::move(res->second));
|
||||
|
||||
Path tmpDir = createTempDir();
|
||||
|
@ -436,7 +434,7 @@ struct GitInputScheme : InputScheme
|
|||
Path tmpGitDir = createTempDir();
|
||||
AutoDelete delTmpGitDir(tmpGitDir, true);
|
||||
|
||||
runProgram("git", true, { "init", tmpDir, "--separate-git-dir", tmpGitDir });
|
||||
runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", tmpDir, "--separate-git-dir", tmpGitDir });
|
||||
// TODO: repoDir might lack the ref (it only checks if rev
|
||||
// exists, see FIXME above) so use a big hammer and fetch
|
||||
// everything to ensure we get the rev.
|
||||
|
@ -478,14 +476,14 @@ struct GitInputScheme : InputScheme
|
|||
if (!_input.getRev())
|
||||
getCache()->add(
|
||||
store,
|
||||
mutableAttrs,
|
||||
unlockedAttrs,
|
||||
infoAttrs,
|
||||
storePath,
|
||||
false);
|
||||
|
||||
getCache()->add(
|
||||
store,
|
||||
getImmutableAttrs(),
|
||||
getLockedAttrs(),
|
||||
infoAttrs,
|
||||
storePath,
|
||||
true);
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
|
||||
#include <optional>
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <fstream>
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
|
@ -17,7 +18,7 @@ struct DownloadUrl
|
|||
Headers headers;
|
||||
};
|
||||
|
||||
// A github or gitlab host
|
||||
// A github, gitlab, or sourcehut host
|
||||
const static std::string hostRegexS = "[a-zA-Z0-9.]*"; // FIXME: check
|
||||
std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
|
||||
|
||||
|
@ -180,7 +181,7 @@ struct GitArchiveInputScheme : InputScheme
|
|||
|
||||
virtual DownloadUrl getDownloadUrl(const Input & input) const = 0;
|
||||
|
||||
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
{
|
||||
Input input(_input);
|
||||
|
||||
|
@ -192,17 +193,14 @@ struct GitArchiveInputScheme : InputScheme
|
|||
input.attrs.erase("ref");
|
||||
input.attrs.insert_or_assign("rev", rev->gitRev());
|
||||
|
||||
Attrs immutableAttrs({
|
||||
Attrs lockedAttrs({
|
||||
{"type", "git-tarball"},
|
||||
{"rev", rev->gitRev()},
|
||||
});
|
||||
|
||||
if (auto res = getCache()->lookup(store, immutableAttrs)) {
|
||||
if (auto res = getCache()->lookup(store, lockedAttrs)) {
|
||||
input.attrs.insert_or_assign("lastModified", getIntAttr(res->first, "lastModified"));
|
||||
return {
|
||||
Tree(store->toRealPath(res->second), std::move(res->second)),
|
||||
input
|
||||
};
|
||||
return {std::move(res->second), input};
|
||||
}
|
||||
|
||||
auto url = getDownloadUrl(input);
|
||||
|
@ -213,7 +211,7 @@ struct GitArchiveInputScheme : InputScheme
|
|||
|
||||
getCache()->add(
|
||||
store,
|
||||
immutableAttrs,
|
||||
lockedAttrs,
|
||||
{
|
||||
{"rev", rev->gitRev()},
|
||||
{"lastModified", uint64_t(lastModified)}
|
||||
|
@ -221,7 +219,7 @@ struct GitArchiveInputScheme : InputScheme
|
|||
tree.storePath,
|
||||
true);
|
||||
|
||||
return {std::move(tree), input};
|
||||
return {std::move(tree.storePath), input};
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -300,7 +298,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
|||
if ("PAT" == token.substr(0, fldsplit))
|
||||
return std::make_pair("Private-token", token.substr(fldsplit+1));
|
||||
warn("Unrecognized GitLab token type %s", token.substr(0, fldsplit));
|
||||
return std::nullopt;
|
||||
return std::make_pair(token.substr(0,fldsplit), token.substr(fldsplit+1));
|
||||
}
|
||||
|
||||
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
|
||||
|
@ -348,7 +346,95 @@ struct GitLabInputScheme : GitArchiveInputScheme
|
|||
}
|
||||
};
|
||||
|
||||
struct SourceHutInputScheme : GitArchiveInputScheme
|
||||
{
|
||||
std::string type() override { return "sourcehut"; }
|
||||
|
||||
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
|
||||
{
|
||||
// SourceHut supports both PAT and OAuth2. See
|
||||
// https://man.sr.ht/meta.sr.ht/oauth.md
|
||||
return std::pair<std::string, std::string>("Authorization", fmt("Bearer %s", token));
|
||||
// Note: This currently serves no purpose, as this kind of authorization
|
||||
// does not allow for downloading tarballs on sourcehut private repos.
|
||||
// Once it is implemented, however, should work as expected.
|
||||
}
|
||||
|
||||
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
|
||||
{
|
||||
// TODO: In the future, when the sourcehut graphql API is implemented for mercurial
|
||||
// and with anonymous access, this method should use it instead.
|
||||
|
||||
auto ref = *input.getRef();
|
||||
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
|
||||
auto base_url = fmt("https://%s/%s/%s",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"));
|
||||
|
||||
Headers headers = makeHeadersWithAuthTokens(host);
|
||||
|
||||
std::string ref_uri;
|
||||
if (ref == "HEAD") {
|
||||
auto file = store->toRealPath(
|
||||
downloadFile(store, fmt("%s/HEAD", base_url), "source", false, headers).storePath);
|
||||
std::ifstream is(file);
|
||||
std::string line;
|
||||
getline(is, line);
|
||||
|
||||
auto ref_index = line.find("ref: ");
|
||||
if (ref_index == std::string::npos) {
|
||||
throw BadURL("in '%d', couldn't resolve HEAD ref '%d'", input.to_string(), ref);
|
||||
}
|
||||
|
||||
ref_uri = line.substr(ref_index+5, line.length()-1);
|
||||
} else
|
||||
ref_uri = fmt("refs/heads/%s", ref);
|
||||
|
||||
auto file = store->toRealPath(
|
||||
downloadFile(store, fmt("%s/info/refs", base_url), "source", false, headers).storePath);
|
||||
std::ifstream is(file);
|
||||
|
||||
std::string line;
|
||||
std::string id;
|
||||
while(getline(is, line)) {
|
||||
auto index = line.find(ref_uri);
|
||||
if (index != std::string::npos) {
|
||||
id = line.substr(0, index-1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if(id.empty())
|
||||
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
|
||||
|
||||
auto rev = Hash::parseAny(id, htSHA1);
|
||||
debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev());
|
||||
return rev;
|
||||
}
|
||||
|
||||
DownloadUrl getDownloadUrl(const Input & input) const override
|
||||
{
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
|
||||
auto url = fmt("https://%s/%s/%s/archive/%s.tar.gz",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
|
||||
input.getRev()->to_string(Base16, false));
|
||||
|
||||
Headers headers = makeHeadersWithAuthTokens(host);
|
||||
return DownloadUrl { url, headers };
|
||||
}
|
||||
|
||||
void clone(const Input & input, const Path & destDir) override
|
||||
{
|
||||
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
|
||||
Input::fromURL(fmt("git+https://%s/%s/%s",
|
||||
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
|
||||
.applyOverrides(input.getRef(), input.getRev())
|
||||
.clone(destDir);
|
||||
}
|
||||
};
|
||||
|
||||
static auto rGitHubInputScheme = OnStartup([] { registerInputScheme(std::make_unique<GitHubInputScheme>()); });
|
||||
static auto rGitLabInputScheme = OnStartup([] { registerInputScheme(std::make_unique<GitLabInputScheme>()); });
|
||||
static auto rSourceHutInputScheme = OnStartup([] { registerInputScheme(std::make_unique<SourceHutInputScheme>()); });
|
||||
|
||||
}
|
||||
|
|
|
@ -94,7 +94,7 @@ struct IndirectInputScheme : InputScheme
|
|||
return input;
|
||||
}
|
||||
|
||||
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override
|
||||
{
|
||||
throw Error("indirect input '%s' cannot be fetched directly", input.to_string());
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ static RunOptions hgOptions(const Strings & args)
|
|||
}
|
||||
|
||||
// runProgram wrapper that uses hgOptions instead of stock RunOptions.
|
||||
static string runHg(const Strings & args, const std::optional<std::string> & input = {})
|
||||
static std::string runHg(const Strings & args, const std::optional<std::string> & input = {})
|
||||
{
|
||||
RunOptions opts = hgOptions(args);
|
||||
opts.input = input;
|
||||
|
@ -143,7 +143,7 @@ struct MercurialInputScheme : InputScheme
|
|||
return {isLocal, isLocal ? url.path : url.base};
|
||||
}
|
||||
|
||||
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
{
|
||||
Input input(_input);
|
||||
|
||||
|
@ -193,16 +193,13 @@ struct MercurialInputScheme : InputScheme
|
|||
|
||||
auto storePath = store->addToStore(input.getName(), actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
|
||||
|
||||
return {
|
||||
Tree(store->toRealPath(storePath), std::move(storePath)),
|
||||
input
|
||||
};
|
||||
return {std::move(storePath), input};
|
||||
}
|
||||
}
|
||||
|
||||
if (!input.getRef()) input.attrs.insert_or_assign("ref", "default");
|
||||
|
||||
auto getImmutableAttrs = [&]()
|
||||
auto getLockedAttrs = [&]()
|
||||
{
|
||||
return Attrs({
|
||||
{"type", "hg"},
|
||||
|
@ -212,32 +209,29 @@ struct MercurialInputScheme : InputScheme
|
|||
};
|
||||
|
||||
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
|
||||
-> std::pair<Tree, Input>
|
||||
-> std::pair<StorePath, Input>
|
||||
{
|
||||
assert(input.getRev());
|
||||
assert(!_input.getRev() || _input.getRev() == input.getRev());
|
||||
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
|
||||
return {
|
||||
Tree(store->toRealPath(storePath), std::move(storePath)),
|
||||
input
|
||||
};
|
||||
return {std::move(storePath), input};
|
||||
};
|
||||
|
||||
if (input.getRev()) {
|
||||
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
|
||||
if (auto res = getCache()->lookup(store, getLockedAttrs()))
|
||||
return makeResult(res->first, std::move(res->second));
|
||||
}
|
||||
|
||||
auto revOrRef = input.getRev() ? input.getRev()->gitRev() : *input.getRef();
|
||||
|
||||
Attrs mutableAttrs({
|
||||
Attrs unlockedAttrs({
|
||||
{"type", "hg"},
|
||||
{"name", name},
|
||||
{"url", actualUrl},
|
||||
{"ref", *input.getRef()},
|
||||
});
|
||||
|
||||
if (auto res = getCache()->lookup(store, mutableAttrs)) {
|
||||
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
|
||||
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
|
||||
if (!input.getRev() || input.getRev() == rev2) {
|
||||
input.attrs.insert_or_assign("rev", rev2.gitRev());
|
||||
|
@ -260,7 +254,7 @@ struct MercurialInputScheme : InputScheme
|
|||
runHg({ "pull", "-R", cacheDir, "--", actualUrl });
|
||||
}
|
||||
catch (ExecError & e) {
|
||||
string transJournal = cacheDir + "/.hg/store/journal";
|
||||
auto transJournal = cacheDir + "/.hg/store/journal";
|
||||
/* hg throws "abandoned transaction" error only if this file exists */
|
||||
if (pathExists(transJournal)) {
|
||||
runHg({ "recover", "-R", cacheDir });
|
||||
|
@ -283,7 +277,7 @@ struct MercurialInputScheme : InputScheme
|
|||
auto revCount = std::stoull(tokens[1]);
|
||||
input.attrs.insert_or_assign("ref", tokens[2]);
|
||||
|
||||
if (auto res = getCache()->lookup(store, getImmutableAttrs()))
|
||||
if (auto res = getCache()->lookup(store, getLockedAttrs()))
|
||||
return makeResult(res->first, std::move(res->second));
|
||||
|
||||
Path tmpDir = createTempDir();
|
||||
|
@ -303,14 +297,14 @@ struct MercurialInputScheme : InputScheme
|
|||
if (!_input.getRev())
|
||||
getCache()->add(
|
||||
store,
|
||||
mutableAttrs,
|
||||
unlockedAttrs,
|
||||
infoAttrs,
|
||||
storePath,
|
||||
false);
|
||||
|
||||
getCache()->add(
|
||||
store,
|
||||
getImmutableAttrs(),
|
||||
getLockedAttrs(),
|
||||
infoAttrs,
|
||||
storePath,
|
||||
true);
|
||||
|
|
|
@ -80,23 +80,31 @@ struct PathInputScheme : InputScheme
|
|||
// nothing to do
|
||||
}
|
||||
|
||||
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override
|
||||
{
|
||||
std::string absPath;
|
||||
auto path = getStrAttr(input.attrs, "path");
|
||||
|
||||
if (path[0] != '/' && input.parent) {
|
||||
if (path[0] != '/') {
|
||||
if (!input.parent)
|
||||
throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string());
|
||||
|
||||
auto parent = canonPath(*input.parent);
|
||||
|
||||
// the path isn't relative, prefix it
|
||||
absPath = canonPath(parent + "/" + path);
|
||||
absPath = nix::absPath(path, parent);
|
||||
|
||||
// for security, ensure that if the parent is a store path, it's inside it
|
||||
if (!parent.rfind(store->storeDir, 0) && absPath.rfind(store->storeDir, 0))
|
||||
throw BadStorePath("relative path '%s' points outside of its parent's store path %s, this is a security violation", path, parent);
|
||||
if (store->isInStore(parent)) {
|
||||
auto storePath = store->printStorePath(store->toStorePath(parent).first);
|
||||
if (!isDirOrInDir(absPath, storePath))
|
||||
throw BadStorePath("relative path '%s' points outside of its parent's store path '%s'", path, storePath);
|
||||
}
|
||||
} else
|
||||
absPath = path;
|
||||
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying '%s'", absPath));
|
||||
|
||||
// FIXME: check whether access to 'path' is allowed.
|
||||
auto storePath = store->maybeParseStorePath(absPath);
|
||||
|
||||
|
@ -107,10 +115,7 @@ struct PathInputScheme : InputScheme
|
|||
// FIXME: try to substitute storePath.
|
||||
storePath = store->addToStore("source", absPath);
|
||||
|
||||
return {
|
||||
Tree(store->toRealPath(*storePath), std::move(*storePath)),
|
||||
input
|
||||
};
|
||||
return {std::move(*storePath), input};
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ DownloadFileResult downloadFile(
|
|||
ref<Store> store,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
bool immutable,
|
||||
bool locked,
|
||||
const Headers & headers)
|
||||
{
|
||||
// FIXME: check store
|
||||
|
@ -67,18 +67,18 @@ DownloadFileResult downloadFile(
|
|||
storePath = std::move(cached->storePath);
|
||||
} else {
|
||||
StringSink sink;
|
||||
dumpString(*res.data, sink);
|
||||
auto hash = hashString(htSHA256, *res.data);
|
||||
dumpString(res.data, sink);
|
||||
auto hash = hashString(htSHA256, res.data);
|
||||
ValidPathInfo info {
|
||||
store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name),
|
||||
hashString(htSHA256, *sink.s),
|
||||
hashString(htSHA256, sink.s),
|
||||
};
|
||||
info.narSize = sink.s->size();
|
||||
info.narSize = sink.s.size();
|
||||
info.ca = FixedOutputHash {
|
||||
.method = FileIngestionMethod::Flat,
|
||||
.hash = hash,
|
||||
};
|
||||
auto source = StringSource { *sink.s };
|
||||
auto source = StringSource(sink.s);
|
||||
store->addToStore(info, source, NoRepair, NoCheckSigs);
|
||||
storePath = std::move(info.path);
|
||||
}
|
||||
|
@ -88,7 +88,7 @@ DownloadFileResult downloadFile(
|
|||
inAttrs,
|
||||
infoAttrs,
|
||||
*storePath,
|
||||
immutable);
|
||||
locked);
|
||||
|
||||
if (url != res.effectiveUri)
|
||||
getCache()->add(
|
||||
|
@ -100,7 +100,7 @@ DownloadFileResult downloadFile(
|
|||
},
|
||||
infoAttrs,
|
||||
*storePath,
|
||||
immutable);
|
||||
locked);
|
||||
|
||||
return {
|
||||
.storePath = std::move(*storePath),
|
||||
|
@ -113,7 +113,7 @@ std::pair<Tree, time_t> downloadTarball(
|
|||
ref<Store> store,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
bool immutable,
|
||||
bool locked,
|
||||
const Headers & headers)
|
||||
{
|
||||
Attrs inAttrs({
|
||||
|
@ -126,11 +126,11 @@ std::pair<Tree, time_t> downloadTarball(
|
|||
|
||||
if (cached && !cached->expired)
|
||||
return {
|
||||
Tree(store->toRealPath(cached->storePath), std::move(cached->storePath)),
|
||||
Tree { .actualPath = store->toRealPath(cached->storePath), .storePath = std::move(cached->storePath) },
|
||||
getIntAttr(cached->infoAttrs, "lastModified")
|
||||
};
|
||||
|
||||
auto res = downloadFile(store, url, name, immutable, headers);
|
||||
auto res = downloadFile(store, url, name, locked, headers);
|
||||
|
||||
std::optional<StorePath> unpackedStorePath;
|
||||
time_t lastModified;
|
||||
|
@ -160,10 +160,10 @@ std::pair<Tree, time_t> downloadTarball(
|
|||
inAttrs,
|
||||
infoAttrs,
|
||||
*unpackedStorePath,
|
||||
immutable);
|
||||
locked);
|
||||
|
||||
return {
|
||||
Tree(store->toRealPath(*unpackedStorePath), std::move(*unpackedStorePath)),
|
||||
Tree { .actualPath = store->toRealPath(*unpackedStorePath), .storePath = std::move(*unpackedStorePath) },
|
||||
lastModified,
|
||||
};
|
||||
}
|
||||
|
@ -176,6 +176,7 @@ struct TarballInputScheme : InputScheme
|
|||
|
||||
if (!hasSuffix(url.path, ".zip")
|
||||
&& !hasSuffix(url.path, ".tar")
|
||||
&& !hasSuffix(url.path, ".tgz")
|
||||
&& !hasSuffix(url.path, ".tar.gz")
|
||||
&& !hasSuffix(url.path, ".tar.xz")
|
||||
&& !hasSuffix(url.path, ".tar.bz2")
|
||||
|
@ -201,7 +202,7 @@ struct TarballInputScheme : InputScheme
|
|||
|
||||
Input input;
|
||||
input.attrs = attrs;
|
||||
//input.immutable = (bool) maybeGetStrAttr(input.attrs, "hash");
|
||||
//input.locked = (bool) maybeGetStrAttr(input.attrs, "hash");
|
||||
return input;
|
||||
}
|
||||
|
||||
|
@ -224,10 +225,10 @@ struct TarballInputScheme : InputScheme
|
|||
return true;
|
||||
}
|
||||
|
||||
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override
|
||||
{
|
||||
auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), input.getName(), false).first;
|
||||
return {std::move(tree), input};
|
||||
return {std::move(tree.storePath), input};
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
MixCommonArgs::MixCommonArgs(const string & programName)
|
||||
MixCommonArgs::MixCommonArgs(const std::string & programName)
|
||||
: programName(programName)
|
||||
{
|
||||
addFlag({
|
||||
|
|
|
@ -11,8 +11,8 @@ class MixCommonArgs : public virtual Args
|
|||
{
|
||||
void initialFlagsProcessed() override;
|
||||
public:
|
||||
string programName;
|
||||
MixCommonArgs(const string & programName);
|
||||
std::string programName;
|
||||
MixCommonArgs(const std::string & programName);
|
||||
protected:
|
||||
virtual void pluginsInited() {}
|
||||
};
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
static std::string getS(const std::vector<Logger::Field> & fields, size_t n)
|
||||
static std::string_view getS(const std::vector<Logger::Field> & fields, size_t n)
|
||||
{
|
||||
assert(n < fields.size());
|
||||
assert(fields[n].type == Logger::Field::tString);
|
||||
|
@ -103,17 +103,19 @@ public:
|
|||
~ProgressBar()
|
||||
{
|
||||
stop();
|
||||
updateThread.join();
|
||||
}
|
||||
|
||||
void stop() override
|
||||
{
|
||||
auto state(state_.lock());
|
||||
if (!state->active) return;
|
||||
state->active = false;
|
||||
writeToStderr("\r\e[K");
|
||||
updateCV.notify_one();
|
||||
quitCV.notify_one();
|
||||
{
|
||||
auto state(state_.lock());
|
||||
if (!state->active) return;
|
||||
state->active = false;
|
||||
writeToStderr("\r\e[K");
|
||||
updateCV.notify_one();
|
||||
quitCV.notify_one();
|
||||
}
|
||||
updateThread.join();
|
||||
}
|
||||
|
||||
bool isVerbose() override {
|
||||
|
|
|
@ -15,9 +15,14 @@
|
|||
#include <sys/stat.h>
|
||||
#include <unistd.h>
|
||||
#include <signal.h>
|
||||
#include <sys/types.h>
|
||||
#include <sys/socket.h>
|
||||
#include <netdb.h>
|
||||
#ifdef __linux__
|
||||
#include <features.h>
|
||||
#endif
|
||||
#ifdef __GLIBC__
|
||||
#include <gnu/lib-names.h>
|
||||
#include <nss.h>
|
||||
#include <dlfcn.h>
|
||||
#endif
|
||||
|
||||
#include <openssl/crypto.h>
|
||||
|
||||
|
@ -89,7 +94,7 @@ void printMissing(ref<Store> store, const StorePathSet & willBuild,
|
|||
}
|
||||
|
||||
|
||||
string getArg(const string & opt,
|
||||
std::string getArg(const std::string & opt,
|
||||
Strings::iterator & i, const Strings::iterator & end)
|
||||
{
|
||||
++i;
|
||||
|
@ -121,21 +126,30 @@ static void preloadNSS() {
|
|||
been loaded in the parent. So we force a lookup of an invalid domain to force the NSS machinery to
|
||||
load its lookup libraries in the parent before any child gets a chance to. */
|
||||
std::call_once(dns_resolve_flag, []() {
|
||||
struct addrinfo *res = NULL;
|
||||
|
||||
/* nss will only force the "local" (not through nscd) dns resolution if its on the LOCALDOMAIN.
|
||||
We need the resolution to be done locally, as nscd socket will not be accessible in the
|
||||
sandbox. */
|
||||
char * previous_env = getenv("LOCALDOMAIN");
|
||||
setenv("LOCALDOMAIN", "invalid", 1);
|
||||
if (getaddrinfo("this.pre-initializes.the.dns.resolvers.invalid.", "http", NULL, &res) == 0) {
|
||||
if (res) freeaddrinfo(res);
|
||||
}
|
||||
if (previous_env) {
|
||||
setenv("LOCALDOMAIN", previous_env, 1);
|
||||
} else {
|
||||
unsetenv("LOCALDOMAIN");
|
||||
}
|
||||
#ifdef __GLIBC__
|
||||
/* On linux, glibc will run every lookup through the nss layer.
|
||||
* That means every lookup goes, by default, through nscd, which acts as a local
|
||||
* cache.
|
||||
* Because we run builds in a sandbox, we also remove access to nscd otherwise
|
||||
* lookups would leak into the sandbox.
|
||||
*
|
||||
* But now we have a new problem, we need to make sure the nss_dns backend that
|
||||
* does the dns lookups when nscd is not available is loaded or available.
|
||||
*
|
||||
* We can't make it available without leaking nix's environment, so instead we'll
|
||||
* load the backend, and configure nss so it does not try to run dns lookups
|
||||
* through nscd.
|
||||
*
|
||||
* This is technically only used for builtins:fetch* functions so we only care
|
||||
* about dns.
|
||||
*
|
||||
* All other platforms are unaffected.
|
||||
*/
|
||||
if (!dlopen(LIBNSS_DNS_SO, RTLD_NOW))
|
||||
warn("unable to load nss_dns backend");
|
||||
// FIXME: get hosts entry from nsswitch.conf.
|
||||
__nss_configure_lookup("hosts", "files dns");
|
||||
#endif
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -213,6 +227,8 @@ LegacyArgs::LegacyArgs(const std::string & programName,
|
|||
std::function<bool(Strings::iterator & arg, const Strings::iterator & end)> parseArg)
|
||||
: MixCommonArgs(programName), parseArg(parseArg)
|
||||
{
|
||||
printError("FOO %s", programName);
|
||||
|
||||
addFlag({
|
||||
.longName = "no-build-output",
|
||||
.shortName = 'Q',
|
||||
|
@ -308,14 +324,14 @@ void parseCmdLine(int argc, char * * argv,
|
|||
}
|
||||
|
||||
|
||||
void parseCmdLine(const string & programName, const Strings & args,
|
||||
void parseCmdLine(const std::string & programName, const Strings & args,
|
||||
std::function<bool(Strings::iterator & arg, const Strings::iterator & end)> parseArg)
|
||||
{
|
||||
LegacyArgs(programName, parseArg).parseCmdline(args);
|
||||
}
|
||||
|
||||
|
||||
void printVersion(const string & programName)
|
||||
void printVersion(const std::string & programName)
|
||||
{
|
||||
std::cout << format("%1% (Nix) %2%") % programName % nixVersion << std::endl;
|
||||
if (verbosity > lvlInfo) {
|
||||
|
@ -338,7 +354,7 @@ void printVersion(const string & programName)
|
|||
}
|
||||
|
||||
|
||||
void showManPage(const string & name)
|
||||
void showManPage(const std::string & name)
|
||||
{
|
||||
restoreProcessContext();
|
||||
setenv("MANPATH", settings.nixManDir.c_str(), 1);
|
||||
|
@ -347,13 +363,13 @@ void showManPage(const string & name)
|
|||
}
|
||||
|
||||
|
||||
int handleExceptions(const string & programName, std::function<void()> fun)
|
||||
int handleExceptions(const std::string & programName, std::function<void()> fun)
|
||||
{
|
||||
ReceiveInterrupts receiveInterrupts; // FIXME: need better place for this
|
||||
|
||||
ErrorInfo::programName = baseNameOf(programName);
|
||||
|
||||
string error = ANSI_RED "error:" ANSI_NORMAL " ";
|
||||
std::string error = ANSI_RED "error:" ANSI_NORMAL " ";
|
||||
try {
|
||||
try {
|
||||
fun();
|
||||
|
@ -393,7 +409,7 @@ RunPager::RunPager()
|
|||
if (!isatty(STDOUT_FILENO)) return;
|
||||
char * pager = getenv("NIX_PAGER");
|
||||
if (!pager) pager = getenv("PAGER");
|
||||
if (pager && ((string) pager == "" || (string) pager == "cat")) return;
|
||||
if (pager && ((std::string) pager == "" || (std::string) pager == "cat")) return;
|
||||
|
||||
Pipe toPager;
|
||||
toPager.create();
|
||||
|
@ -413,7 +429,7 @@ RunPager::RunPager()
|
|||
});
|
||||
|
||||
pid.setKillSignal(SIGINT);
|
||||
|
||||
stdout = fcntl(STDOUT_FILENO, F_DUPFD_CLOEXEC, 0);
|
||||
if (dup2(toPager.writeSide.get(), STDOUT_FILENO) == -1)
|
||||
throw SysError("dupping stdout");
|
||||
}
|
||||
|
@ -424,7 +440,7 @@ RunPager::~RunPager()
|
|||
try {
|
||||
if (pid != -1) {
|
||||
std::cout.flush();
|
||||
close(STDOUT_FILENO);
|
||||
dup2(stdout, STDOUT_FILENO);
|
||||
pid.wait();
|
||||
}
|
||||
} catch (...) {
|
||||
|
|
|
@ -22,7 +22,7 @@ public:
|
|||
virtual ~Exit();
|
||||
};
|
||||
|
||||
int handleExceptions(const string & programName, std::function<void()> fun);
|
||||
int handleExceptions(const std::string & programName, std::function<void()> fun);
|
||||
|
||||
/* Don't forget to call initPlugins() after settings are initialized! */
|
||||
void initNix();
|
||||
|
@ -30,10 +30,10 @@ void initNix();
|
|||
void parseCmdLine(int argc, char * * argv,
|
||||
std::function<bool(Strings::iterator & arg, const Strings::iterator & end)> parseArg);
|
||||
|
||||
void parseCmdLine(const string & programName, const Strings & args,
|
||||
void parseCmdLine(const std::string & programName, const Strings & args,
|
||||
std::function<bool(Strings::iterator & arg, const Strings::iterator & end)> parseArg);
|
||||
|
||||
void printVersion(const string & programName);
|
||||
void printVersion(const std::string & programName);
|
||||
|
||||
/* Ugh. No better place to put this. */
|
||||
void printGCWarning();
|
||||
|
@ -50,10 +50,10 @@ void printMissing(ref<Store> store, const StorePathSet & willBuild,
|
|||
const StorePathSet & willSubstitute, const StorePathSet & unknown,
|
||||
uint64_t downloadSize, uint64_t narSize, Verbosity lvl = lvlInfo);
|
||||
|
||||
string getArg(const string & opt,
|
||||
std::string getArg(const std::string & opt,
|
||||
Strings::iterator & i, const Strings::iterator & end);
|
||||
|
||||
template<class N> N getIntArg(const string & opt,
|
||||
template<class N> N getIntArg(const std::string & opt,
|
||||
Strings::iterator & i, const Strings::iterator & end, bool allowUnit)
|
||||
{
|
||||
++i;
|
||||
|
@ -76,7 +76,7 @@ struct LegacyArgs : public MixCommonArgs
|
|||
|
||||
|
||||
/* Show the manual page for the specified program. */
|
||||
void showManPage(const string & name);
|
||||
void showManPage(const std::string & name);
|
||||
|
||||
/* The constructor of this class starts a pager if stdout is a
|
||||
terminal and $PAGER is set. Stdout is redirected to the pager. */
|
||||
|
@ -88,6 +88,7 @@ public:
|
|||
|
||||
private:
|
||||
Pid pid;
|
||||
int stdout;
|
||||
};
|
||||
|
||||
extern volatile ::sig_atomic_t blockInt;
|
||||
|
@ -95,7 +96,7 @@ extern volatile ::sig_atomic_t blockInt;
|
|||
|
||||
/* GC helpers. */
|
||||
|
||||
string showBytes(uint64_t bytes);
|
||||
std::string showBytes(uint64_t bytes);
|
||||
|
||||
struct GCResults;
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ BinaryCacheStore::BinaryCacheStore(const Params & params)
|
|||
|
||||
StringSink sink;
|
||||
sink << narVersionMagic1;
|
||||
narMagic = *sink.s;
|
||||
narMagic = sink.s;
|
||||
}
|
||||
|
||||
void BinaryCacheStore::init()
|
||||
|
@ -52,9 +52,9 @@ void BinaryCacheStore::init()
|
|||
throw Error("binary cache '%s' is for Nix stores with prefix '%s', not '%s'",
|
||||
getUri(), value, storeDir);
|
||||
} else if (name == "WantMassQuery") {
|
||||
wantMassQuery.setDefault(value == "1" ? "true" : "false");
|
||||
wantMassQuery.setDefault(value == "1");
|
||||
} else if (name == "Priority") {
|
||||
priority.setDefault(fmt("%d", std::stoi(value)));
|
||||
priority.setDefault(std::stoi(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -68,7 +68,7 @@ void BinaryCacheStore::upsertFile(const std::string & path,
|
|||
}
|
||||
|
||||
void BinaryCacheStore::getFile(const std::string & path,
|
||||
Callback<std::shared_ptr<std::string>> callback) noexcept
|
||||
Callback<std::optional<std::string>> callback) noexcept
|
||||
{
|
||||
try {
|
||||
callback(getFile(path));
|
||||
|
@ -77,9 +77,9 @@ void BinaryCacheStore::getFile(const std::string & path,
|
|||
|
||||
void BinaryCacheStore::getFile(const std::string & path, Sink & sink)
|
||||
{
|
||||
std::promise<std::shared_ptr<std::string>> promise;
|
||||
std::promise<std::optional<std::string>> promise;
|
||||
getFile(path,
|
||||
{[&](std::future<std::shared_ptr<std::string>> result) {
|
||||
{[&](std::future<std::optional<std::string>> result) {
|
||||
try {
|
||||
promise.set_value(result.get());
|
||||
} catch (...) {
|
||||
|
@ -89,15 +89,15 @@ void BinaryCacheStore::getFile(const std::string & path, Sink & sink)
|
|||
sink(*promise.get_future().get());
|
||||
}
|
||||
|
||||
std::shared_ptr<std::string> BinaryCacheStore::getFile(const std::string & path)
|
||||
std::optional<std::string> BinaryCacheStore::getFile(const std::string & path)
|
||||
{
|
||||
StringSink sink;
|
||||
try {
|
||||
getFile(path, sink);
|
||||
} catch (NoSuchBinaryCacheFile &) {
|
||||
return nullptr;
|
||||
return std::nullopt;
|
||||
}
|
||||
return sink.s;
|
||||
return std::move(sink.s);
|
||||
}
|
||||
|
||||
std::string BinaryCacheStore::narInfoFileFor(const StorePath & storePath)
|
||||
|
@ -111,15 +111,15 @@ void BinaryCacheStore::writeNarInfo(ref<NarInfo> narInfo)
|
|||
|
||||
upsertFile(narInfoFile, narInfo->to_string(*this), "text/x-nix-narinfo");
|
||||
|
||||
std::string hashPart(narInfo->path.hashPart());
|
||||
|
||||
{
|
||||
auto state_(state.lock());
|
||||
state_->pathInfoCache.upsert(hashPart, PathInfoCacheValue { .value = std::shared_ptr<NarInfo>(narInfo) });
|
||||
state_->pathInfoCache.upsert(
|
||||
std::string(narInfo->path.to_string()),
|
||||
PathInfoCacheValue { .value = std::shared_ptr<NarInfo>(narInfo) });
|
||||
}
|
||||
|
||||
if (diskCache)
|
||||
diskCache->upsertNarInfo(getUri(), hashPart, std::shared_ptr<NarInfo>(narInfo));
|
||||
diskCache->upsertNarInfo(getUri(), std::string(narInfo->path.hashPart()), std::shared_ptr<NarInfo>(narInfo));
|
||||
}
|
||||
|
||||
AutoCloseFD openFile(const Path & path)
|
||||
|
@ -149,7 +149,7 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
|
|||
{
|
||||
FdSink fileSink(fdTemp.get());
|
||||
TeeSink teeSinkCompressed { fileSink, fileHashSink };
|
||||
auto compressionSink = makeCompressionSink(compression, teeSinkCompressed);
|
||||
auto compressionSink = makeCompressionSink(compression, teeSinkCompressed, parallelCompression, compressionLevel);
|
||||
TeeSink teeSinkUncompressed { *compressionSink, narHashSink };
|
||||
TeeSource teeSource { narSource, teeSinkUncompressed };
|
||||
narAccessor = makeNarAccessor(teeSource);
|
||||
|
@ -307,17 +307,18 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
|
|||
}});
|
||||
}
|
||||
|
||||
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, const string & name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
|
||||
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||
{
|
||||
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256)
|
||||
unsupported("addToStoreFromDump");
|
||||
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info {
|
||||
makeFixedOutputPath(method, nar.first, name),
|
||||
makeFixedOutputPath(method, nar.first, name, references),
|
||||
nar.first,
|
||||
};
|
||||
info.narSize = nar.second;
|
||||
info.references = references;
|
||||
return info;
|
||||
})->path;
|
||||
}
|
||||
|
@ -366,11 +367,11 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
|
|||
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
|
||||
|
||||
getFile(narInfoFile,
|
||||
{[=](std::future<std::shared_ptr<std::string>> fut) {
|
||||
{[=](std::future<std::optional<std::string>> fut) {
|
||||
try {
|
||||
auto data = fut.get();
|
||||
|
||||
if (!data) return (*callbackPtr)(nullptr);
|
||||
if (!data) return (*callbackPtr)({});
|
||||
|
||||
stats.narInfoRead++;
|
||||
|
||||
|
@ -384,8 +385,14 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
|
|||
}});
|
||||
}
|
||||
|
||||
StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath,
|
||||
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
|
||||
StorePath BinaryCacheStore::addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references)
|
||||
{
|
||||
/* FIXME: Make BinaryCacheStore::addToStoreCommon support
|
||||
non-recursive+sha256 so we can just use the default
|
||||
|
@ -404,10 +411,11 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
|||
});
|
||||
return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info {
|
||||
makeFixedOutputPath(method, h, name),
|
||||
makeFixedOutputPath(method, h, name, references),
|
||||
nar.first,
|
||||
};
|
||||
info.narSize = nar.second;
|
||||
info.references = references;
|
||||
info.ca = FixedOutputHash {
|
||||
.method = method,
|
||||
.hash = h,
|
||||
|
@ -416,8 +424,11 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
|
|||
})->path;
|
||||
}
|
||||
|
||||
StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s,
|
||||
const StorePathSet & references, RepairFlag repair)
|
||||
StorePath BinaryCacheStore::addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair)
|
||||
{
|
||||
auto textHash = hashString(htSHA256, s);
|
||||
auto path = makeTextPath(name, textHash, references);
|
||||
|
@ -427,7 +438,7 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s
|
|||
|
||||
StringSink sink;
|
||||
dumpString(s, sink);
|
||||
auto source = StringSource { *sink.s };
|
||||
StringSource source(sink.s);
|
||||
return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) {
|
||||
ValidPathInfo info { path, nar.first };
|
||||
info.narSize = nar.second;
|
||||
|
@ -437,40 +448,29 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s
|
|||
})->path;
|
||||
}
|
||||
|
||||
std::optional<const Realisation> BinaryCacheStore::queryRealisation(const DrvOutput & id)
|
||||
void BinaryCacheStore::queryRealisationUncached(const DrvOutput & id,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept
|
||||
{
|
||||
if (diskCache) {
|
||||
auto [cacheOutcome, maybeCachedRealisation] =
|
||||
diskCache->lookupRealisation(getUri(), id);
|
||||
switch (cacheOutcome) {
|
||||
case NarInfoDiskCache::oValid:
|
||||
debug("Returning a cached realisation for %s", id.to_string());
|
||||
return *maybeCachedRealisation;
|
||||
case NarInfoDiskCache::oInvalid:
|
||||
debug("Returning a cached missing realisation for %s", id.to_string());
|
||||
return {};
|
||||
case NarInfoDiskCache::oUnknown:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
auto outputInfoFilePath = realisationsPrefix + "/" + id.to_string() + ".doi";
|
||||
auto rawOutputInfo = getFile(outputInfoFilePath);
|
||||
|
||||
if (rawOutputInfo) {
|
||||
auto realisation = Realisation::fromJSON(
|
||||
nlohmann::json::parse(*rawOutputInfo), outputInfoFilePath);
|
||||
auto callbackPtr = std::make_shared<decltype(callback)>(std::move(callback));
|
||||
|
||||
if (diskCache)
|
||||
diskCache->upsertRealisation(
|
||||
getUri(), realisation);
|
||||
Callback<std::optional<std::string>> newCallback = {
|
||||
[=](std::future<std::optional<std::string>> fut) {
|
||||
try {
|
||||
auto data = fut.get();
|
||||
if (!data) return (*callbackPtr)({});
|
||||
|
||||
return {realisation};
|
||||
} else {
|
||||
if (diskCache)
|
||||
diskCache->upsertAbsentRealisation(getUri(), id);
|
||||
return std::nullopt;
|
||||
}
|
||||
auto realisation = Realisation::fromJSON(
|
||||
nlohmann::json::parse(*data), outputInfoFilePath);
|
||||
return (*callbackPtr)(std::make_shared<const Realisation>(realisation));
|
||||
} catch (...) {
|
||||
callbackPtr->rethrow();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
getFile(outputInfoFilePath, std::move(newCallback));
|
||||
}
|
||||
|
||||
void BinaryCacheStore::registerDrvOutput(const Realisation& info) {
|
||||
|
@ -499,7 +499,7 @@ void BinaryCacheStore::addSignatures(const StorePath & storePath, const StringSe
|
|||
writeNarInfo(narInfo);
|
||||
}
|
||||
|
||||
std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const StorePath & path)
|
||||
std::optional<std::string> BinaryCacheStore::getBuildLog(const StorePath & path)
|
||||
{
|
||||
auto drvPath = path;
|
||||
|
||||
|
@ -507,10 +507,10 @@ std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const StorePath & pat
|
|||
try {
|
||||
auto info = queryPathInfo(path);
|
||||
// FIXME: add a "Log" field to .narinfo
|
||||
if (!info->deriver) return nullptr;
|
||||
if (!info->deriver) return std::nullopt;
|
||||
drvPath = *info->deriver;
|
||||
} catch (InvalidPath &) {
|
||||
return nullptr;
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -521,4 +521,14 @@ std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const StorePath & pat
|
|||
return getFile(logPath);
|
||||
}
|
||||
|
||||
void BinaryCacheStore::addBuildLog(const StorePath & drvPath, std::string_view log)
|
||||
{
|
||||
assert(drvPath.isDerivation());
|
||||
|
||||
upsertFile(
|
||||
"log/" + std::string(drvPath.to_string()),
|
||||
(std::string) log, // FIXME: don't copy
|
||||
"text/plain; charset=utf-8");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -15,13 +15,17 @@ struct BinaryCacheStoreConfig : virtual StoreConfig
|
|||
{
|
||||
using StoreConfig::StoreConfig;
|
||||
|
||||
const Setting<std::string> compression{(StoreConfig*) this, "xz", "compression", "NAR compression method ('xz', 'bzip2', or 'none')"};
|
||||
const Setting<std::string> compression{(StoreConfig*) this, "xz", "compression", "NAR compression method ('xz', 'bzip2', 'gzip', 'zstd', or 'none')"};
|
||||
const Setting<bool> writeNARListing{(StoreConfig*) this, false, "write-nar-listing", "whether to write a JSON file listing the files in each NAR"};
|
||||
const Setting<bool> writeDebugInfo{(StoreConfig*) this, false, "index-debug-info", "whether to index DWARF debug info files by build ID"};
|
||||
const Setting<Path> secretKeyFile{(StoreConfig*) this, "", "secret-key", "path to secret key used to sign the binary cache"};
|
||||
const Setting<Path> localNarCache{(StoreConfig*) this, "", "local-nar-cache", "path to a local cache of NARs"};
|
||||
const Setting<bool> parallelCompression{(StoreConfig*) this, false, "parallel-compression",
|
||||
"enable multi-threading compression, available for xz only currently"};
|
||||
"enable multi-threading compression for NARs, available for xz and zstd only currently"};
|
||||
const Setting<int> compressionLevel{(StoreConfig*) this, -1, "compression-level",
|
||||
"specify 'preset level' of compression to be used with NARs: "
|
||||
"meaning and accepted range of values depends on compression method selected, "
|
||||
"other than -1 which we reserve to indicate Nix defaults should be used"};
|
||||
};
|
||||
|
||||
class BinaryCacheStore : public virtual BinaryCacheStoreConfig, public virtual Store
|
||||
|
@ -47,6 +51,7 @@ public:
|
|||
const std::string & mimeType) = 0;
|
||||
|
||||
void upsertFile(const std::string & path,
|
||||
// FIXME: use std::string_view
|
||||
std::string && data,
|
||||
const std::string & mimeType);
|
||||
|
||||
|
@ -58,10 +63,11 @@ public:
|
|||
|
||||
/* Fetch the specified file and call the specified callback with
|
||||
the result. A subclass may implement this asynchronously. */
|
||||
virtual void getFile(const std::string & path,
|
||||
Callback<std::shared_ptr<std::string>> callback) noexcept;
|
||||
virtual void getFile(
|
||||
const std::string & path,
|
||||
Callback<std::optional<std::string>> callback) noexcept;
|
||||
|
||||
std::shared_ptr<std::string> getFile(const std::string & path);
|
||||
std::optional<std::string> getFile(const std::string & path);
|
||||
|
||||
public:
|
||||
|
||||
|
@ -92,19 +98,28 @@ public:
|
|||
void addToStore(const ValidPathInfo & info, Source & narSource,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||
|
||||
StorePath addToStoreFromDump(Source & dump, const string & name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) override;
|
||||
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override;
|
||||
|
||||
StorePath addToStore(const string & name, const Path & srcPath,
|
||||
FileIngestionMethod method, HashType hashAlgo,
|
||||
PathFilter & filter, RepairFlag repair) override;
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override;
|
||||
|
||||
StorePath addTextToStore(const string & name, const string & s,
|
||||
const StorePathSet & references, RepairFlag repair) override;
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override;
|
||||
|
||||
void registerDrvOutput(const Realisation & info) override;
|
||||
|
||||
std::optional<const Realisation> queryRealisation(const DrvOutput &) override;
|
||||
void queryRealisationUncached(const DrvOutput &,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override;
|
||||
|
||||
void narFromPath(const StorePath & path, Sink & sink) override;
|
||||
|
||||
|
@ -112,7 +127,9 @@ public:
|
|||
|
||||
void addSignatures(const StorePath & storePath, const StringSet & sigs) override;
|
||||
|
||||
std::shared_ptr<std::string> getBuildLog(const StorePath & path) override;
|
||||
std::optional<std::string> getBuildLog(const StorePath & path) override;
|
||||
|
||||
void addBuildLog(const StorePath & drvPath, std::string_view log) override;
|
||||
|
||||
};
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#include <regex>
|
||||
#include <queue>
|
||||
|
||||
#include <fstream>
|
||||
#include <sys/types.h>
|
||||
#include <sys/socket.h>
|
||||
#include <sys/un.h>
|
||||
|
@ -115,7 +116,7 @@ DerivationGoal::~DerivationGoal()
|
|||
}
|
||||
|
||||
|
||||
string DerivationGoal::key()
|
||||
std::string DerivationGoal::key()
|
||||
{
|
||||
/* Ensure that derivations get built in order of their name,
|
||||
i.e. a derivation named "aardvark" always comes before
|
||||
|
@ -193,7 +194,7 @@ void DerivationGoal::loadDerivation()
|
|||
assert(worker.evalStore.isValidPath(drvPath));
|
||||
|
||||
/* Get the derivation. */
|
||||
drv = std::make_unique<Derivation>(worker.evalStore.derivationFromPath(drvPath));
|
||||
drv = std::make_unique<Derivation>(worker.evalStore.readDerivation(drvPath));
|
||||
|
||||
haveDerivation();
|
||||
}
|
||||
|
@ -204,7 +205,7 @@ void DerivationGoal::haveDerivation()
|
|||
trace("have derivation");
|
||||
|
||||
if (drv->type() == DerivationType::CAFloating)
|
||||
settings.requireExperimentalFeature("ca-derivations");
|
||||
settings.requireExperimentalFeature(Xp::CaDerivations);
|
||||
|
||||
retrySubstitution = false;
|
||||
|
||||
|
@ -277,7 +278,7 @@ void DerivationGoal::outputsSubstitutionTried()
|
|||
|
||||
if (nrFailed > 0 && nrFailed > nrNoSubstituters + nrIncompleteClosure && !settings.tryFallback) {
|
||||
done(BuildResult::TransientFailure,
|
||||
fmt("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ",
|
||||
Error("some substitutes for the outputs of derivation '%s' failed (usually happens due to networking issues); try '--fallback' to build derivation from source ",
|
||||
worker.store.printStorePath(drvPath)));
|
||||
return;
|
||||
}
|
||||
|
@ -453,7 +454,7 @@ void DerivationGoal::inputsRealised()
|
|||
if (useDerivation) {
|
||||
auto & fullDrv = *dynamic_cast<Derivation *>(drv.get());
|
||||
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations") &&
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) &&
|
||||
((!fullDrv.inputDrvs.empty() && derivationIsCA(fullDrv.type()))
|
||||
|| fullDrv.type() == DerivationType::DeferredInputAddressed)) {
|
||||
/* We are be able to resolve this derivation based on the
|
||||
|
@ -464,7 +465,6 @@ void DerivationGoal::inputsRealised()
|
|||
Derivation drvResolved { *std::move(attempt) };
|
||||
|
||||
auto pathResolved = writeDerivation(worker.store, drvResolved);
|
||||
resolvedDrv = drvResolved;
|
||||
|
||||
auto msg = fmt("Resolved derivation: '%s' -> '%s'",
|
||||
worker.store.printStorePath(drvPath),
|
||||
|
@ -475,9 +475,9 @@ void DerivationGoal::inputsRealised()
|
|||
worker.store.printStorePath(pathResolved),
|
||||
});
|
||||
|
||||
auto resolvedGoal = worker.makeDerivationGoal(
|
||||
resolvedDrvGoal = worker.makeDerivationGoal(
|
||||
pathResolved, wantedOutputs, buildMode);
|
||||
addWaitee(resolvedGoal);
|
||||
addWaitee(resolvedDrvGoal);
|
||||
|
||||
state = &DerivationGoal::resolvedFinished;
|
||||
return;
|
||||
|
@ -616,7 +616,9 @@ void DerivationGoal::tryToBuild()
|
|||
/* Don't do a remote build if the derivation has the attribute
|
||||
`preferLocalBuild' set. Also, check and repair modes are only
|
||||
supported for local builds. */
|
||||
bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store);
|
||||
bool buildLocally =
|
||||
(buildMode != bmNormal || parsedDrv->willBuildLocally(worker.store))
|
||||
&& settings.maxBuildJobs.get() != 0;
|
||||
|
||||
if (!buildLocally) {
|
||||
switch (tryBuildHook()) {
|
||||
|
@ -653,7 +655,7 @@ void DerivationGoal::tryLocalBuild() {
|
|||
throw Error(
|
||||
"unable to build with a primary store that isn't a local store; "
|
||||
"either pass a different '--store' or enable remote builds."
|
||||
"\nhttps://nixos.org/nix/manual/#chap-distributed-builds");
|
||||
"\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
|
||||
}
|
||||
|
||||
|
||||
|
@ -936,16 +938,17 @@ void DerivationGoal::buildDone()
|
|||
}
|
||||
|
||||
void DerivationGoal::resolvedFinished() {
|
||||
assert(resolvedDrv);
|
||||
assert(resolvedDrvGoal);
|
||||
auto resolvedDrv = *resolvedDrvGoal->drv;
|
||||
|
||||
auto resolvedHashes = staticOutputHashes(worker.store, *resolvedDrv);
|
||||
auto resolvedHashes = staticOutputHashes(worker.store, resolvedDrv);
|
||||
|
||||
StorePathSet outputPaths;
|
||||
|
||||
// `wantedOutputs` might be empty, which means “all the outputs”
|
||||
auto realWantedOutputs = wantedOutputs;
|
||||
if (realWantedOutputs.empty())
|
||||
realWantedOutputs = resolvedDrv->outputNames();
|
||||
realWantedOutputs = resolvedDrv.outputNames();
|
||||
|
||||
for (auto & wantedOutput : realWantedOutputs) {
|
||||
assert(initialOutputs.count(wantedOutput) != 0);
|
||||
|
@ -977,9 +980,17 @@ void DerivationGoal::resolvedFinished() {
|
|||
outputPaths
|
||||
);
|
||||
|
||||
// This is potentially a bit fishy in terms of error reporting. Not sure
|
||||
// how to do it in a cleaner way
|
||||
amDone(nrFailed == 0 ? ecSuccess : ecFailed, ex);
|
||||
auto status = [&]() {
|
||||
auto resolvedResult = resolvedDrvGoal->getResult();
|
||||
switch (resolvedResult.status) {
|
||||
case BuildResult::AlreadyValid:
|
||||
return BuildResult::ResolvesToAlreadyValid;
|
||||
default:
|
||||
return resolvedResult.status;
|
||||
}
|
||||
}();
|
||||
|
||||
done(status);
|
||||
}
|
||||
|
||||
HookReply DerivationGoal::tryBuildHook()
|
||||
|
@ -1002,20 +1013,20 @@ HookReply DerivationGoal::tryBuildHook()
|
|||
|
||||
/* Read the first line of input, which should be a word indicating
|
||||
whether the hook wishes to perform the build. */
|
||||
string reply;
|
||||
std::string reply;
|
||||
while (true) {
|
||||
auto s = [&]() {
|
||||
try {
|
||||
return readLine(worker.hook->fromHook.readSide.get());
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while reading the response from the build hook");
|
||||
throw e;
|
||||
throw;
|
||||
}
|
||||
}();
|
||||
if (handleJSONLogMessage(s, worker.act, worker.hook->activities, true))
|
||||
;
|
||||
else if (string(s, 0, 2) == "# ") {
|
||||
reply = string(s, 2);
|
||||
else if (s.substr(0, 2) == "# ") {
|
||||
reply = s.substr(2);
|
||||
break;
|
||||
}
|
||||
else {
|
||||
|
@ -1055,7 +1066,7 @@ HookReply DerivationGoal::tryBuildHook()
|
|||
machineName = readLine(hook->fromHook.readSide.get());
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while reading the machine name from the build hook");
|
||||
throw e;
|
||||
throw;
|
||||
}
|
||||
|
||||
/* Tell the hook all the inputs that have to be copied to the
|
||||
|
@ -1080,7 +1091,7 @@ HookReply DerivationGoal::tryBuildHook()
|
|||
/* Create the log file and pipe. */
|
||||
Path logFile = openLogFile();
|
||||
|
||||
set<int> fds;
|
||||
std::set<int> fds;
|
||||
fds.insert(hook->fromHook.readSide.get());
|
||||
fds.insert(hook->builderOut.readSide.get());
|
||||
worker.childStarted(shared_from_this(), fds, false, false);
|
||||
|
@ -1129,10 +1140,10 @@ Path DerivationGoal::openLogFile()
|
|||
logDir = localStore->logDir;
|
||||
else
|
||||
logDir = settings.nixLogDir;
|
||||
Path dir = fmt("%s/%s/%s/", logDir, LocalFSStore::drvsLogDir, string(baseName, 0, 2));
|
||||
Path dir = fmt("%s/%s/%s/", logDir, LocalFSStore::drvsLogDir, baseName.substr(0, 2));
|
||||
createDirs(dir);
|
||||
|
||||
Path logFileName = fmt("%s/%s%s", dir, string(baseName, 2),
|
||||
Path logFileName = fmt("%s/%s%s", dir, baseName.substr(2),
|
||||
settings.compressLog ? ".bz2" : "");
|
||||
|
||||
fdLogFile = open(logFileName.c_str(), O_CREAT | O_WRONLY | O_TRUNC | O_CLOEXEC, 0666);
|
||||
|
@ -1165,7 +1176,7 @@ bool DerivationGoal::isReadDesc(int fd)
|
|||
}
|
||||
|
||||
|
||||
void DerivationGoal::handleChildOutput(int fd, const string & data)
|
||||
void DerivationGoal::handleChildOutput(int fd, std::string_view data)
|
||||
{
|
||||
if (isReadDesc(fd))
|
||||
{
|
||||
|
@ -1273,7 +1284,7 @@ void DerivationGoal::checkPathValidity()
|
|||
: PathStatus::Corrupt,
|
||||
};
|
||||
}
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||
auto drvOutput = DrvOutput{initialOutputs.at(i.first).outputHash, i.first};
|
||||
if (auto real = worker.store.queryRealisation(drvOutput)) {
|
||||
info.known = {
|
||||
|
@ -1327,6 +1338,13 @@ void DerivationGoal::done(BuildResult::Status status, std::optional<Error> ex)
|
|||
}
|
||||
|
||||
worker.updateProgress();
|
||||
|
||||
auto traceBuiltOutputsFile = getEnv("_NIX_TRACE_BUILT_OUTPUTS").value_or("");
|
||||
if (traceBuiltOutputsFile != "") {
|
||||
std::fstream fs;
|
||||
fs.open(traceBuiltOutputsFile, std::fstream::out);
|
||||
fs << worker.store.printStorePath(drvPath) << "\t" << result.toString() << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -50,8 +50,8 @@ struct DerivationGoal : public Goal
|
|||
/* The path of the derivation. */
|
||||
StorePath drvPath;
|
||||
|
||||
/* The path of the corresponding resolved derivation */
|
||||
std::optional<BasicDerivation> resolvedDrv;
|
||||
/* The goal for the corresponding resolved derivation */
|
||||
std::shared_ptr<DerivationGoal> resolvedDrvGoal;
|
||||
|
||||
/* The specific outputs that we need to build. Empty means all of
|
||||
them. */
|
||||
|
@ -145,7 +145,7 @@ struct DerivationGoal : public Goal
|
|||
|
||||
void timedOut(Error && ex) override;
|
||||
|
||||
string key() override;
|
||||
std::string key() override;
|
||||
|
||||
void work() override;
|
||||
|
||||
|
@ -200,7 +200,7 @@ struct DerivationGoal : public Goal
|
|||
virtual bool isReadDesc(int fd);
|
||||
|
||||
/* Callback used by the worker to write to the log. */
|
||||
void handleChildOutput(int fd, const string & data) override;
|
||||
void handleChildOutput(int fd, std::string_view data) override;
|
||||
void handleEOF(int fd) override;
|
||||
void flushLine();
|
||||
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
#include "drv-output-substitution-goal.hh"
|
||||
#include "finally.hh"
|
||||
#include "worker.hh"
|
||||
#include "substitution-goal.hh"
|
||||
#include "callback.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -50,14 +52,42 @@ void DrvOutputSubstitutionGoal::tryNext()
|
|||
return;
|
||||
}
|
||||
|
||||
auto sub = subs.front();
|
||||
sub = subs.front();
|
||||
subs.pop_front();
|
||||
|
||||
// FIXME: Make async
|
||||
outputInfo = sub->queryRealisation(id);
|
||||
// outputInfo = sub->queryRealisation(id);
|
||||
outPipe.create();
|
||||
promise = decltype(promise)();
|
||||
|
||||
sub->queryRealisation(
|
||||
id, { [&](std::future<std::shared_ptr<const Realisation>> res) {
|
||||
try {
|
||||
Finally updateStats([this]() { outPipe.writeSide.close(); });
|
||||
promise.set_value(res.get());
|
||||
} catch (...) {
|
||||
promise.set_exception(std::current_exception());
|
||||
}
|
||||
} });
|
||||
|
||||
worker.childStarted(shared_from_this(), {outPipe.readSide.get()}, true, false);
|
||||
|
||||
state = &DrvOutputSubstitutionGoal::realisationFetched;
|
||||
}
|
||||
|
||||
void DrvOutputSubstitutionGoal::realisationFetched()
|
||||
{
|
||||
worker.childTerminated(this);
|
||||
|
||||
try {
|
||||
outputInfo = promise.get_future().get();
|
||||
} catch (std::exception & e) {
|
||||
printError(e.what());
|
||||
substituterFailed = true;
|
||||
}
|
||||
|
||||
if (!outputInfo) {
|
||||
tryNext();
|
||||
return;
|
||||
return tryNext();
|
||||
}
|
||||
|
||||
for (const auto & [depId, depPath] : outputInfo->dependentRealisations) {
|
||||
|
@ -107,7 +137,7 @@ void DrvOutputSubstitutionGoal::finished()
|
|||
amDone(ecSuccess);
|
||||
}
|
||||
|
||||
string DrvOutputSubstitutionGoal::key()
|
||||
std::string DrvOutputSubstitutionGoal::key()
|
||||
{
|
||||
/* "a$" ensures substitution goals happen before derivation
|
||||
goals. */
|
||||
|
@ -119,4 +149,10 @@ void DrvOutputSubstitutionGoal::work()
|
|||
(this->*state)();
|
||||
}
|
||||
|
||||
void DrvOutputSubstitutionGoal::handleEOF(int fd)
|
||||
{
|
||||
if (fd == outPipe.readSide.get()) worker.wakeUp(shared_from_this());
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
#include "store-api.hh"
|
||||
#include "goal.hh"
|
||||
#include "realisation.hh"
|
||||
#include <thread>
|
||||
#include <future>
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -20,11 +22,18 @@ private:
|
|||
|
||||
// The realisation corresponding to the given output id.
|
||||
// Will be filled once we can get it.
|
||||
std::optional<Realisation> outputInfo;
|
||||
std::shared_ptr<const Realisation> outputInfo;
|
||||
|
||||
/* The remaining substituters. */
|
||||
std::list<ref<Store>> subs;
|
||||
|
||||
/* The current substituter. */
|
||||
std::shared_ptr<Store> sub;
|
||||
|
||||
Pipe outPipe;
|
||||
std::thread thr;
|
||||
std::promise<std::shared_ptr<const Realisation>> promise;
|
||||
|
||||
/* Whether a substituter failed. */
|
||||
bool substituterFailed = false;
|
||||
|
||||
|
@ -36,15 +45,16 @@ public:
|
|||
|
||||
void init();
|
||||
void tryNext();
|
||||
void realisationFetched();
|
||||
void outPathValid();
|
||||
void finished();
|
||||
|
||||
void timedOut(Error && ex) override { abort(); };
|
||||
|
||||
string key() override;
|
||||
std::string key() override;
|
||||
|
||||
void work() override;
|
||||
|
||||
void handleEOF(int fd) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
#include "machines.hh"
|
||||
#include "worker.hh"
|
||||
#include "substitution-goal.hh"
|
||||
#include "derivation-goal.hh"
|
||||
|
@ -11,12 +10,12 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
|
|||
Worker worker(*this, evalStore ? *evalStore : *this);
|
||||
|
||||
Goals goals;
|
||||
for (auto & br : reqs) {
|
||||
for (const auto & br : reqs) {
|
||||
std::visit(overloaded {
|
||||
[&](DerivedPath::Built bfd) {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
goals.insert(worker.makeDerivationGoal(bfd.drvPath, bfd.outputs, buildMode));
|
||||
},
|
||||
[&](DerivedPath::Opaque bo) {
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
goals.insert(worker.makePathSubstitutionGoal(bo.path, buildMode == bmRepair ? Repair : NoRepair));
|
||||
},
|
||||
}, br.raw());
|
||||
|
@ -74,7 +73,7 @@ BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivat
|
|||
outputId,
|
||||
Realisation{ outputId, *staticOutput.second}
|
||||
);
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations") && !derivationHasKnownOutputPaths(drv.type())) {
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations) && !derivationHasKnownOutputPaths(drv.type())) {
|
||||
auto realisation = this->queryRealisation(outputId);
|
||||
if (realisation)
|
||||
result.builtOutputs.insert_or_assign(
|
||||
|
|
|
@ -5,8 +5,8 @@ namespace nix {
|
|||
|
||||
|
||||
bool CompareGoalPtrs::operator() (const GoalPtr & a, const GoalPtr & b) const {
|
||||
string s1 = a->key();
|
||||
string s2 = b->key();
|
||||
std::string s1 = a->key();
|
||||
std::string s2 = b->key();
|
||||
return s1 < s2;
|
||||
}
|
||||
|
||||
|
|
|
@ -18,8 +18,8 @@ struct CompareGoalPtrs {
|
|||
};
|
||||
|
||||
/* Set of goals. */
|
||||
typedef set<GoalPtr, CompareGoalPtrs> Goals;
|
||||
typedef set<WeakGoalPtr, std::owner_less<WeakGoalPtr>> WeakGoals;
|
||||
typedef std::set<GoalPtr, CompareGoalPtrs> Goals;
|
||||
typedef std::set<WeakGoalPtr, std::owner_less<WeakGoalPtr>> WeakGoals;
|
||||
|
||||
/* A map of paths to goals (and the other way around). */
|
||||
typedef std::map<StorePath, WeakGoalPtr> WeakGoalMap;
|
||||
|
@ -50,7 +50,7 @@ struct Goal : public std::enable_shared_from_this<Goal>
|
|||
unsigned int nrIncompleteClosure;
|
||||
|
||||
/* Name of this goal for debugging purposes. */
|
||||
string name;
|
||||
std::string name;
|
||||
|
||||
/* Whether the goal is finished. */
|
||||
ExitCode exitCode;
|
||||
|
@ -75,7 +75,7 @@ struct Goal : public std::enable_shared_from_this<Goal>
|
|||
|
||||
virtual void waiteeDone(GoalPtr waitee, ExitCode result);
|
||||
|
||||
virtual void handleChildOutput(int fd, const string & data)
|
||||
virtual void handleChildOutput(int fd, std::string_view data)
|
||||
{
|
||||
abort();
|
||||
}
|
||||
|
@ -87,7 +87,7 @@ struct Goal : public std::enable_shared_from_this<Goal>
|
|||
|
||||
void trace(const FormatOrString & fs);
|
||||
|
||||
string getName()
|
||||
std::string getName()
|
||||
{
|
||||
return name;
|
||||
}
|
||||
|
@ -97,7 +97,7 @@ struct Goal : public std::enable_shared_from_this<Goal>
|
|||
by the worker (important!), etc. */
|
||||
virtual void timedOut(Error && ex) = 0;
|
||||
|
||||
virtual string key() = 0;
|
||||
virtual std::string key() = 0;
|
||||
|
||||
void amDone(ExitCode result, std::optional<Error> ex = {});
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
#include <sys/mman.h>
|
||||
#include <sys/utsname.h>
|
||||
#include <sys/resource.h>
|
||||
#include <sys/socket.h>
|
||||
|
||||
#if HAVE_STATVFS
|
||||
#include <sys/statvfs.h>
|
||||
|
@ -252,6 +253,7 @@ void LocalDerivationGoal::cleanupHookFinally()
|
|||
void LocalDerivationGoal::cleanupPreChildKill()
|
||||
{
|
||||
sandboxMountNamespace = -1;
|
||||
sandboxUserNamespace = -1;
|
||||
}
|
||||
|
||||
|
||||
|
@ -334,7 +336,7 @@ int childEntry(void * arg)
|
|||
return 1;
|
||||
}
|
||||
|
||||
|
||||
#if __linux__
|
||||
static void linkOrCopy(const Path & from, const Path & to)
|
||||
{
|
||||
if (link(from.c_str(), to.c_str()) == -1) {
|
||||
|
@ -350,6 +352,7 @@ static void linkOrCopy(const Path & from, const Path & to)
|
|||
copyPath(from, to);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
void LocalDerivationGoal::startBuilder()
|
||||
|
@ -471,12 +474,12 @@ void LocalDerivationGoal::startBuilder()
|
|||
temporary build directory. The text files have the format used
|
||||
by `nix-store --register-validity'. However, the deriver
|
||||
fields are left empty. */
|
||||
string s = get(drv->env, "exportReferencesGraph").value_or("");
|
||||
auto s = get(drv->env, "exportReferencesGraph").value_or("");
|
||||
Strings ss = tokenizeString<Strings>(s);
|
||||
if (ss.size() % 2 != 0)
|
||||
throw BuildError("odd number of tokens in 'exportReferencesGraph': '%1%'", s);
|
||||
for (Strings::iterator i = ss.begin(); i != ss.end(); ) {
|
||||
string fileName = *i++;
|
||||
auto fileName = *i++;
|
||||
static std::regex regex("[A-Za-z_][A-Za-z0-9_.-]*");
|
||||
if (!std::regex_match(fileName, regex))
|
||||
throw Error("invalid file name '%s' in 'exportReferencesGraph'", fileName);
|
||||
|
@ -494,7 +497,7 @@ void LocalDerivationGoal::startBuilder()
|
|||
}
|
||||
|
||||
useUidRange = parsedDrv->getRequiredSystemFeatures().count("uid-range");
|
||||
useSystemdCgroup = parsedDrv->getRequiredSystemFeatures().count("systemd-cgroup");
|
||||
useSystemdCgroup = parsedDrv->getRequiredSystemFeatures().count("Systemd-cgroup");
|
||||
|
||||
if (useChroot) {
|
||||
|
||||
|
@ -510,10 +513,10 @@ void LocalDerivationGoal::startBuilder()
|
|||
i.pop_back();
|
||||
}
|
||||
size_t p = i.find('=');
|
||||
if (p == string::npos)
|
||||
if (p == std::string::npos)
|
||||
dirsInChroot[i] = {i, optional};
|
||||
else
|
||||
dirsInChroot[string(i, 0, p)] = {string(i, p + 1), optional};
|
||||
dirsInChroot[i.substr(0, p)] = {i.substr(p + 1), optional};
|
||||
}
|
||||
dirsInChroot[tmpDirInSandbox] = tmpDir;
|
||||
|
||||
|
@ -647,7 +650,7 @@ void LocalDerivationGoal::startBuilder()
|
|||
}
|
||||
|
||||
if (useSystemdCgroup) {
|
||||
settings.requireExperimentalFeature("systemd-cgroup");
|
||||
settings.requireExperimentalFeature(Xp::SystemdCgroup);
|
||||
std::optional<Path> cgroup;
|
||||
if (!buildUser || !(cgroup = buildUser->getCgroup()))
|
||||
throw Error("feature 'systemd-cgroup' requires 'auto-allocate-uids = true' in nix.conf");
|
||||
|
@ -689,9 +692,10 @@ void LocalDerivationGoal::startBuilder()
|
|||
auto state = stBegin;
|
||||
auto lines = runProgram(settings.preBuildHook, false, args);
|
||||
auto lastPos = std::string::size_type{0};
|
||||
for (auto nlPos = lines.find('\n'); nlPos != string::npos;
|
||||
nlPos = lines.find('\n', lastPos)) {
|
||||
auto line = std::string{lines, lastPos, nlPos - lastPos};
|
||||
for (auto nlPos = lines.find('\n'); nlPos != std::string::npos;
|
||||
nlPos = lines.find('\n', lastPos))
|
||||
{
|
||||
auto line = lines.substr(lastPos, nlPos - lastPos);
|
||||
lastPos = nlPos + 1;
|
||||
if (state == stBegin) {
|
||||
if (line == "extra-sandbox-paths" || line == "extra-chroot-dirs") {
|
||||
|
@ -704,10 +708,10 @@ void LocalDerivationGoal::startBuilder()
|
|||
state = stBegin;
|
||||
} else {
|
||||
auto p = line.find('=');
|
||||
if (p == string::npos)
|
||||
if (p == std::string::npos)
|
||||
dirsInChroot[line] = line;
|
||||
else
|
||||
dirsInChroot[string(line, 0, p)] = string(line, p + 1);
|
||||
dirsInChroot[line.substr(0, p)] = line.substr(p + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -731,6 +735,7 @@ void LocalDerivationGoal::startBuilder()
|
|||
if (!builderOut.readSide)
|
||||
throw SysError("opening pseudoterminal master");
|
||||
|
||||
// FIXME: not thread-safe, use ptsname_r
|
||||
std::string slaveName(ptsname(builderOut.readSide.get()));
|
||||
|
||||
if (buildUser) {
|
||||
|
@ -774,7 +779,6 @@ void LocalDerivationGoal::startBuilder()
|
|||
result.startTime = time(0);
|
||||
|
||||
/* Fork a child to build the package. */
|
||||
ProcessOptions options;
|
||||
|
||||
#if __linux__
|
||||
if (useChroot) {
|
||||
|
@ -817,8 +821,6 @@ void LocalDerivationGoal::startBuilder()
|
|||
|
||||
userNamespaceSync.create();
|
||||
|
||||
options.allowVfork = false;
|
||||
|
||||
Path maxUserNamespaces = "/proc/sys/user/max_user_namespaces";
|
||||
static bool userNamespacesEnabled =
|
||||
pathExists(maxUserNamespaces)
|
||||
|
@ -876,7 +878,7 @@ void LocalDerivationGoal::startBuilder()
|
|||
writeFull(builderOut.writeSide.get(),
|
||||
fmt("%d %d\n", usingUserNamespace, child));
|
||||
_exit(0);
|
||||
}, options);
|
||||
});
|
||||
|
||||
int res = helper.wait();
|
||||
if (res != 0 && settings.sandboxFallback) {
|
||||
|
@ -929,12 +931,18 @@ void LocalDerivationGoal::startBuilder()
|
|||
"nobody:x:65534:65534:Nobody:/:/noshell\n",
|
||||
sandboxUid(), sandboxGid(), settings.sandboxBuildDir));
|
||||
|
||||
/* Save the mount namespace of the child. We have to do this
|
||||
/* Save the mount- and user namespace of the child. We have to do this
|
||||
*before* the child does a chroot. */
|
||||
sandboxMountNamespace = open(fmt("/proc/%d/ns/mnt", (pid_t) pid).c_str(), O_RDONLY);
|
||||
if (sandboxMountNamespace.get() == -1)
|
||||
throw SysError("getting sandbox mount namespace");
|
||||
|
||||
if (usingUserNamespace) {
|
||||
sandboxUserNamespace = open(fmt("/proc/%d/ns/user", (pid_t) pid).c_str(), O_RDONLY);
|
||||
if (sandboxUserNamespace.get() == -1)
|
||||
throw SysError("getting sandbox user namespace");
|
||||
}
|
||||
|
||||
/* Move the child into its own cgroup. */
|
||||
if (buildUser) {
|
||||
if (auto cgroup = buildUser->getCgroup())
|
||||
|
@ -947,11 +955,12 @@ void LocalDerivationGoal::startBuilder()
|
|||
} else
|
||||
#endif
|
||||
{
|
||||
#if __linux__
|
||||
fallback:
|
||||
options.allowVfork = !buildUser && !drv->isBuiltin();
|
||||
#endif
|
||||
pid = startProcess([&]() {
|
||||
runChild();
|
||||
}, options);
|
||||
});
|
||||
}
|
||||
|
||||
/* parent */
|
||||
|
@ -962,17 +971,20 @@ void LocalDerivationGoal::startBuilder()
|
|||
/* Check if setting up the build environment failed. */
|
||||
std::vector<std::string> msgs;
|
||||
while (true) {
|
||||
string msg = [&]() {
|
||||
std::string msg = [&]() {
|
||||
try {
|
||||
return readLine(builderOut.readSide.get());
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while waiting for the build environment to initialize (previous messages: %s)",
|
||||
auto status = pid.wait();
|
||||
e.addTrace({}, "while waiting for the build environment for '%s' to initialize (%s, previous messages: %s)",
|
||||
worker.store.printStorePath(drvPath),
|
||||
statusToString(status),
|
||||
concatStringsSep("|", msgs));
|
||||
throw e;
|
||||
throw;
|
||||
}
|
||||
}();
|
||||
if (string(msg, 0, 1) == "\2") break;
|
||||
if (string(msg, 0, 1) == "\1") {
|
||||
if (msg.substr(0, 1) == "\2") break;
|
||||
if (msg.substr(0, 1) == "\1") {
|
||||
FdSource source(builderOut.readSide.get());
|
||||
auto ex = readError(source);
|
||||
ex.addTrace({}, "while setting up the build environment");
|
||||
|
@ -1008,7 +1020,7 @@ void LocalDerivationGoal::initTmpDir() {
|
|||
env[i.first] = i.second;
|
||||
} else {
|
||||
auto hash = hashString(htSHA256, i.first);
|
||||
string fn = ".attr-" + hash.to_string(Base32, false);
|
||||
std::string fn = ".attr-" + hash.to_string(Base32, false);
|
||||
Path p = tmpDir + "/" + fn;
|
||||
writeFile(p, rewriteStrings(i.second, inputRewrites));
|
||||
chownToBuilder(p);
|
||||
|
@ -1099,7 +1111,7 @@ void LocalDerivationGoal::writeStructuredAttrs()
|
|||
for (auto & [i, v] : json["outputs"].get<nlohmann::json::object_t>()) {
|
||||
/* The placeholder must have a rewrite, so we use it to cover both the
|
||||
cases where we know or don't know the output path ahead of time. */
|
||||
rewritten[i] = rewriteStrings(v, inputRewrites);
|
||||
rewritten[i] = rewriteStrings((std::string) v, inputRewrites);
|
||||
}
|
||||
|
||||
json["outputs"] = rewritten;
|
||||
|
@ -1119,10 +1131,10 @@ void LocalDerivationGoal::writeStructuredAttrs()
|
|||
static StorePath pathPartOfReq(const DerivedPath & req)
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[&](DerivedPath::Opaque bo) {
|
||||
[&](const DerivedPath::Opaque & bo) {
|
||||
return bo.path;
|
||||
},
|
||||
[&](DerivedPath::Built bfd) {
|
||||
[&](const DerivedPath::Built & bfd) {
|
||||
return bfd.drvPath;
|
||||
},
|
||||
}, req.raw());
|
||||
|
@ -1205,9 +1217,14 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
|||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
|
||||
{ throw Error("queryPathFromHashPart"); }
|
||||
|
||||
StorePath addToStore(const string & name, const Path & srcPath,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
|
||||
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
{ throw Error("addToStore"); }
|
||||
|
||||
void addToStore(const ValidPathInfo & info, Source & narSource,
|
||||
|
@ -1217,18 +1234,26 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
|||
goal.addDependency(info.path);
|
||||
}
|
||||
|
||||
StorePath addTextToStore(const string & name, const string & s,
|
||||
const StorePathSet & references, RepairFlag repair = NoRepair) override
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair = NoRepair) override
|
||||
{
|
||||
auto path = next->addTextToStore(name, s, references, repair);
|
||||
goal.addDependency(path);
|
||||
return path;
|
||||
}
|
||||
|
||||
StorePath addToStoreFromDump(Source & dump, const string & name,
|
||||
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override
|
||||
StorePath addToStoreFromDump(
|
||||
Source & dump,
|
||||
std::string_view name,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
{
|
||||
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair);
|
||||
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair, references);
|
||||
goal.addDependency(path);
|
||||
return path;
|
||||
}
|
||||
|
@ -1252,13 +1277,14 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
|||
// corresponds to an allowed derivation
|
||||
{ throw Error("registerDrvOutput"); }
|
||||
|
||||
std::optional<const Realisation> queryRealisation(const DrvOutput & id) override
|
||||
void queryRealisationUncached(const DrvOutput & id,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
|
||||
// XXX: This should probably be allowed if the realisation corresponds to
|
||||
// an allowed derivation
|
||||
{
|
||||
if (!goal.isAllowed(id))
|
||||
throw InvalidPath("cannot query an unknown output id '%s' in recursive Nix", id.to_string());
|
||||
return next->queryRealisation(id);
|
||||
callback(nullptr);
|
||||
next->queryRealisation(id, std::move(callback));
|
||||
}
|
||||
|
||||
void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override
|
||||
|
@ -1287,7 +1313,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
|||
for (auto & [outputName, outputPath] : outputs)
|
||||
if (wantOutput(outputName, bfd.outputs)) {
|
||||
newPaths.insert(outputPath);
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||
auto thisRealisation = next->queryRealisation(
|
||||
DrvOutput{drvHashes.at(outputName), outputName}
|
||||
);
|
||||
|
@ -1348,7 +1374,7 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
|
|||
|
||||
void LocalDerivationGoal::startDaemon()
|
||||
{
|
||||
settings.requireExperimentalFeature("recursive-nix");
|
||||
settings.requireExperimentalFeature(Xp::RecursiveNix);
|
||||
|
||||
Store::Params params;
|
||||
params["path-info-cache-size"] = "0";
|
||||
|
@ -1381,7 +1407,7 @@ void LocalDerivationGoal::startDaemon()
|
|||
AutoCloseFD remote = accept(daemonSocket.get(),
|
||||
(struct sockaddr *) &remoteAddr, &remoteAddrLen);
|
||||
if (!remote) {
|
||||
if (errno == EINTR) continue;
|
||||
if (errno == EINTR || errno == EAGAIN) continue;
|
||||
if (errno == EINVAL) break;
|
||||
throw SysError("accepting connection");
|
||||
}
|
||||
|
@ -1460,6 +1486,9 @@ void LocalDerivationGoal::addDependency(const StorePath & path)
|
|||
child process.*/
|
||||
Pid child(startProcess([&]() {
|
||||
|
||||
if (usingUserNamespace && (setns(sandboxUserNamespace.get(), 0) == -1))
|
||||
throw SysError("entering sandbox user namespace");
|
||||
|
||||
if (setns(sandboxMountNamespace.get(), 0) == -1)
|
||||
throw SysError("entering sandbox mount namespace");
|
||||
|
||||
|
@ -1814,11 +1843,14 @@ void LocalDerivationGoal::runChild()
|
|||
i686-linux build on an x86_64-linux machine. */
|
||||
struct utsname utsbuf;
|
||||
uname(&utsbuf);
|
||||
if (drv->platform == "i686-linux" &&
|
||||
(settings.thisSystem == "x86_64-linux" ||
|
||||
(!strcmp(utsbuf.sysname, "Linux") && !strcmp(utsbuf.machine, "x86_64")))) {
|
||||
if ((drv->platform == "i686-linux"
|
||||
&& (settings.thisSystem == "x86_64-linux"
|
||||
|| (!strcmp(utsbuf.sysname, "Linux") && !strcmp(utsbuf.machine, "x86_64"))))
|
||||
|| drv->platform == "armv7l-linux"
|
||||
|| drv->platform == "armv6l-linux")
|
||||
{
|
||||
if (personality(PER_LINUX32) == -1)
|
||||
throw SysError("cannot set i686-linux personality");
|
||||
throw SysError("cannot set 32-bit personality");
|
||||
}
|
||||
|
||||
/* Impersonate a Linux 2.6 machine to get some determinism in
|
||||
|
@ -1873,7 +1905,7 @@ void LocalDerivationGoal::runChild()
|
|||
/* Fill in the arguments. */
|
||||
Strings args;
|
||||
|
||||
const char *builder = "invalid";
|
||||
std::string builder = "invalid";
|
||||
|
||||
if (drv->isBuiltin()) {
|
||||
;
|
||||
|
@ -1943,7 +1975,7 @@ void LocalDerivationGoal::runChild()
|
|||
"can't map '%1%' to '%2%': mismatched impure paths not supported on Darwin",
|
||||
i.first, i.second.source);
|
||||
|
||||
string path = i.first;
|
||||
std::string path = i.first;
|
||||
struct stat st;
|
||||
if (lstat(path.c_str(), &st)) {
|
||||
if (i.second.optional && errno == ENOENT)
|
||||
|
@ -1995,17 +2027,17 @@ void LocalDerivationGoal::runChild()
|
|||
args.push_back("IMPORT_DIR=" + settings.nixDataDir + "/nix/sandbox/");
|
||||
if (allowLocalNetworking) {
|
||||
args.push_back("-D");
|
||||
args.push_back(string("_ALLOW_LOCAL_NETWORKING=1"));
|
||||
args.push_back(std::string("_ALLOW_LOCAL_NETWORKING=1"));
|
||||
}
|
||||
args.push_back(drv->builder);
|
||||
} else {
|
||||
builder = drv->builder.c_str();
|
||||
builder = drv->builder;
|
||||
args.push_back(std::string(baseNameOf(drv->builder)));
|
||||
}
|
||||
}
|
||||
#else
|
||||
else {
|
||||
builder = drv->builder.c_str();
|
||||
builder = drv->builder;
|
||||
args.push_back(std::string(baseNameOf(drv->builder)));
|
||||
}
|
||||
#endif
|
||||
|
@ -2014,7 +2046,7 @@ void LocalDerivationGoal::runChild()
|
|||
args.push_back(rewriteStrings(i, inputRewrites));
|
||||
|
||||
/* Indicate that we managed to set up the build environment. */
|
||||
writeFull(STDERR_FILENO, string("\2\n"));
|
||||
writeFull(STDERR_FILENO, std::string("\2\n"));
|
||||
|
||||
/* Execute the program. This should not return. */
|
||||
if (drv->isBuiltin()) {
|
||||
|
@ -2032,7 +2064,7 @@ void LocalDerivationGoal::runChild()
|
|||
else if (drv->builder == "builtin:unpack-channel")
|
||||
builtinUnpackChannel(drv2);
|
||||
else
|
||||
throw Error("unsupported builtin function '%1%'", string(drv->builder, 8));
|
||||
throw Error("unsupported builtin builder '%1%'", drv->builder.substr(8));
|
||||
_exit(0);
|
||||
} catch (std::exception & e) {
|
||||
writeFull(STDERR_FILENO, e.what() + std::string("\n"));
|
||||
|
@ -2061,9 +2093,9 @@ void LocalDerivationGoal::runChild()
|
|||
posix_spawnattr_setbinpref_np(&attrp, 1, &cpu, NULL);
|
||||
}
|
||||
|
||||
posix_spawn(NULL, builder, NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
|
||||
posix_spawn(NULL, builder.c_str(), NULL, &attrp, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
|
||||
#else
|
||||
execve(builder, stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
|
||||
execve(builder.c_str(), stringsToCharPtrs(args).data(), stringsToCharPtrs(envStrs).data());
|
||||
#endif
|
||||
|
||||
throw SysError("executing '%1%'", drv->builder);
|
||||
|
@ -2181,8 +2213,7 @@ void LocalDerivationGoal::registerOutputs()
|
|||
|
||||
/* Pass blank Sink as we are not ready to hash data at this stage. */
|
||||
NullSink blank;
|
||||
auto references = worker.store.parseStorePathSet(
|
||||
scanForReferences(blank, actualPath, worker.store.printStorePathSet(referenceablePaths)));
|
||||
auto references = scanForReferences(blank, actualPath, referenceablePaths);
|
||||
|
||||
outputReferencesIfUnregistered.insert_or_assign(
|
||||
outputName,
|
||||
|
@ -2196,8 +2227,8 @@ void LocalDerivationGoal::registerOutputs()
|
|||
/* Since we'll use the already installed versions of these, we
|
||||
can treat them as leaves and ignore any references they
|
||||
have. */
|
||||
[&](AlreadyRegistered _) { return StringSet {}; },
|
||||
[&](PerhapsNeedToRegister refs) {
|
||||
[&](const AlreadyRegistered &) { return StringSet {}; },
|
||||
[&](const PerhapsNeedToRegister & refs) {
|
||||
StringSet referencedOutputs;
|
||||
/* FIXME build inverted map up front so no quadratic waste here */
|
||||
for (auto & r : refs.refs)
|
||||
|
@ -2233,11 +2264,11 @@ void LocalDerivationGoal::registerOutputs()
|
|||
};
|
||||
|
||||
std::optional<StorePathSet> referencesOpt = std::visit(overloaded {
|
||||
[&](AlreadyRegistered skippedFinalPath) -> std::optional<StorePathSet> {
|
||||
[&](const AlreadyRegistered & skippedFinalPath) -> std::optional<StorePathSet> {
|
||||
finish(skippedFinalPath.path);
|
||||
return std::nullopt;
|
||||
},
|
||||
[&](PerhapsNeedToRegister r) -> std::optional<StorePathSet> {
|
||||
[&](const PerhapsNeedToRegister & r) -> std::optional<StorePathSet> {
|
||||
return r.refs;
|
||||
},
|
||||
}, outputReferencesIfUnregistered.at(outputName));
|
||||
|
@ -2249,14 +2280,14 @@ void LocalDerivationGoal::registerOutputs()
|
|||
auto rewriteOutput = [&]() {
|
||||
/* Apply hash rewriting if necessary. */
|
||||
if (!outputRewrites.empty()) {
|
||||
warn("rewriting hashes in '%1%'; cross fingers", actualPath);
|
||||
debug("rewriting hashes in '%1%'; cross fingers", actualPath);
|
||||
|
||||
/* FIXME: this is in-memory. */
|
||||
StringSink sink;
|
||||
dumpPath(actualPath, sink);
|
||||
deletePath(actualPath);
|
||||
sink.s = make_ref<std::string>(rewriteStrings(*sink.s, outputRewrites));
|
||||
StringSource source(*sink.s);
|
||||
sink.s = rewriteStrings(sink.s, outputRewrites);
|
||||
StringSource source(sink.s);
|
||||
restorePath(actualPath, source);
|
||||
|
||||
/* FIXME: set proper permissions in restorePath() so
|
||||
|
@ -2328,7 +2359,7 @@ void LocalDerivationGoal::registerOutputs()
|
|||
StringSink sink;
|
||||
dumpPath(actualPath, sink);
|
||||
RewritingSink rsink2(oldHashPart, std::string(finalPath.hashPart()), nextSink);
|
||||
rsink2(*sink.s);
|
||||
rsink2(sink.s);
|
||||
rsink2.flush();
|
||||
});
|
||||
Path tmpPath = actualPath + ".tmp";
|
||||
|
@ -2357,7 +2388,7 @@ void LocalDerivationGoal::registerOutputs()
|
|||
};
|
||||
|
||||
ValidPathInfo newInfo = std::visit(overloaded {
|
||||
[&](DerivationOutputInputAddressed output) {
|
||||
[&](const DerivationOutputInputAddressed & output) {
|
||||
/* input-addressed case */
|
||||
auto requiredFinalPath = output.path;
|
||||
/* Preemptively add rewrite rule for final hash, as that is
|
||||
|
@ -2376,14 +2407,14 @@ void LocalDerivationGoal::registerOutputs()
|
|||
newInfo0.references.insert(newInfo0.path);
|
||||
return newInfo0;
|
||||
},
|
||||
[&](DerivationOutputCAFixed dof) {
|
||||
[&](const DerivationOutputCAFixed & dof) {
|
||||
auto newInfo0 = newInfoFromCA(DerivationOutputCAFloating {
|
||||
.method = dof.hash.method,
|
||||
.hashType = dof.hash.hash.type,
|
||||
});
|
||||
|
||||
/* Check wanted hash */
|
||||
Hash & wanted = dof.hash.hash;
|
||||
const Hash & wanted = dof.hash.hash;
|
||||
assert(newInfo0.ca);
|
||||
auto got = getContentAddressHash(*newInfo0.ca);
|
||||
if (wanted != got) {
|
||||
|
@ -2401,14 +2432,10 @@ void LocalDerivationGoal::registerOutputs()
|
|||
[&](DerivationOutputCAFloating dof) {
|
||||
return newInfoFromCA(dof);
|
||||
},
|
||||
[&](DerivationOutputDeferred) {
|
||||
[&](DerivationOutputDeferred) -> ValidPathInfo {
|
||||
// No derivation should reach that point without having been
|
||||
// rewritten first
|
||||
assert(false);
|
||||
// Ugly, but the compiler insists on having this return a value
|
||||
// of type `ValidPathInfo` despite the `assert(false)`, so
|
||||
// let's provide it
|
||||
return *(ValidPathInfo*)0;
|
||||
},
|
||||
}, output.output);
|
||||
|
||||
|
@ -2500,7 +2527,7 @@ void LocalDerivationGoal::registerOutputs()
|
|||
}
|
||||
|
||||
if (curRound == nrRounds) {
|
||||
localStore.optimisePath(actualPath); // FIXME: combine with scanForReferences()
|
||||
localStore.optimisePath(actualPath, NoRepair); // FIXME: combine with scanForReferences()
|
||||
worker.markContentsGood(newInfo.path);
|
||||
}
|
||||
|
||||
|
@ -2519,7 +2546,13 @@ void LocalDerivationGoal::registerOutputs()
|
|||
infos.emplace(outputName, std::move(newInfo));
|
||||
}
|
||||
|
||||
if (buildMode == bmCheck) return;
|
||||
if (buildMode == bmCheck) {
|
||||
// In case of FOD mismatches on `--check` an error must be thrown as this is also
|
||||
// a source for non-determinism.
|
||||
if (delayedException)
|
||||
std::rethrow_exception(delayedException);
|
||||
return;
|
||||
}
|
||||
|
||||
/* Apply output checks. */
|
||||
checkOutputs(infos);
|
||||
|
@ -2604,7 +2637,7 @@ void LocalDerivationGoal::registerOutputs()
|
|||
that for floating CA derivations, which otherwise couldn't be cached,
|
||||
but it's fine to do in all cases. */
|
||||
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||
for (auto& [outputName, newInfo] : infos) {
|
||||
auto thisRealisation = Realisation{
|
||||
.id = DrvOutput{initialOutputs.at(outputName).outputHash,
|
||||
|
@ -2718,7 +2751,7 @@ void LocalDerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & out
|
|||
}
|
||||
|
||||
if (!badPaths.empty()) {
|
||||
string badPathsStr;
|
||||
std::string badPathsStr;
|
||||
for (auto & i : badPaths) {
|
||||
badPathsStr += "\n ";
|
||||
badPathsStr += worker.store.printStorePath(i);
|
||||
|
|
|
@ -27,9 +27,10 @@ struct LocalDerivationGoal : public DerivationGoal
|
|||
/* Pipe for synchronising updates to the builder namespaces. */
|
||||
Pipe userNamespaceSync;
|
||||
|
||||
/* The mount namespace of the builder, used to add additional
|
||||
/* The mount namespace and user namespace of the builder, used to add additional
|
||||
paths to the sandbox as a result of recursive Nix calls. */
|
||||
AutoCloseFD sandboxMountNamespace;
|
||||
AutoCloseFD sandboxUserNamespace;
|
||||
|
||||
/* On Linux, whether we're doing the build in its own user
|
||||
namespace. */
|
||||
|
@ -64,11 +65,11 @@ struct LocalDerivationGoal : public DerivationGoal
|
|||
typedef map<Path, ChrootPath> DirsInChroot; // maps target path to source path
|
||||
DirsInChroot dirsInChroot;
|
||||
|
||||
typedef map<string, string> Environment;
|
||||
typedef map<std::string, std::string> Environment;
|
||||
Environment env;
|
||||
|
||||
#if __APPLE__
|
||||
typedef string SandboxProfile;
|
||||
typedef std::string SandboxProfile;
|
||||
SandboxProfile additionalSandboxProfile;
|
||||
#endif
|
||||
|
||||
|
|
|
@ -138,8 +138,8 @@ void PathSubstitutionGoal::tryNext()
|
|||
only after we've downloaded the path. */
|
||||
if (!sub->isTrusted && worker.store.pathInfoIsUntrusted(*info))
|
||||
{
|
||||
warn("substituter '%s' does not have a valid signature for path '%s'",
|
||||
sub->getUri(), worker.store.printStorePath(storePath));
|
||||
warn("the substitute for '%s' from '%s' is not signed by any of the keys in 'trusted-public-keys'",
|
||||
worker.store.printStorePath(storePath), sub->getUri());
|
||||
tryNext();
|
||||
return;
|
||||
}
|
||||
|
@ -272,7 +272,7 @@ void PathSubstitutionGoal::finished()
|
|||
}
|
||||
|
||||
|
||||
void PathSubstitutionGoal::handleChildOutput(int fd, const string & data)
|
||||
void PathSubstitutionGoal::handleChildOutput(int fd, std::string_view data)
|
||||
{
|
||||
}
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ public:
|
|||
|
||||
void timedOut(Error && ex) override { abort(); };
|
||||
|
||||
string key() override
|
||||
std::string key() override
|
||||
{
|
||||
/* "a$" ensures substitution goals happen before derivation
|
||||
goals. */
|
||||
|
@ -77,7 +77,7 @@ public:
|
|||
void finished();
|
||||
|
||||
/* Callback used by the worker to write to the log. */
|
||||
void handleChildOutput(int fd, const string & data) override;
|
||||
void handleChildOutput(int fd, std::string_view data) override;
|
||||
void handleEOF(int fd) override;
|
||||
|
||||
void cleanup() override;
|
||||
|
|
|
@ -161,7 +161,7 @@ unsigned Worker::getNrLocalBuilds()
|
|||
}
|
||||
|
||||
|
||||
void Worker::childStarted(GoalPtr goal, const set<int> & fds,
|
||||
void Worker::childStarted(GoalPtr goal, const std::set<int> & fds,
|
||||
bool inBuildSlot, bool respectTimeouts)
|
||||
{
|
||||
Child child;
|
||||
|
@ -239,7 +239,7 @@ void Worker::run(const Goals & _topGoals)
|
|||
}
|
||||
}
|
||||
|
||||
/* Call queryMissing() efficiently query substitutes. */
|
||||
/* Call queryMissing() to efficiently query substitutes. */
|
||||
StorePathSet willBuild, willSubstitute, unknown;
|
||||
uint64_t downloadSize, narSize;
|
||||
store.queryMissing(topPaths, willBuild, willSubstitute, unknown, downloadSize, narSize);
|
||||
|
@ -281,11 +281,11 @@ void Worker::run(const Goals & _topGoals)
|
|||
if (getMachines().empty())
|
||||
throw Error("unable to start any build; either increase '--max-jobs' "
|
||||
"or enable remote builds."
|
||||
"\nhttps://nixos.org/nix/manual/#chap-distributed-builds");
|
||||
"\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
|
||||
else
|
||||
throw Error("unable to start any build; remote machines may not have "
|
||||
"all required system features."
|
||||
"\nhttps://nixos.org/nix/manual/#chap-distributed-builds");
|
||||
"\nhttps://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html");
|
||||
|
||||
}
|
||||
assert(!awake.empty());
|
||||
|
@ -377,7 +377,7 @@ void Worker::waitForInput()
|
|||
GoalPtr goal = j->goal.lock();
|
||||
assert(goal);
|
||||
|
||||
set<int> fds2(j->fds);
|
||||
std::set<int> fds2(j->fds);
|
||||
std::vector<unsigned char> buffer(4096);
|
||||
for (auto & k : fds2) {
|
||||
if (pollStatus.at(fdToPollStatus.at(k)).revents) {
|
||||
|
@ -394,7 +394,7 @@ void Worker::waitForInput()
|
|||
} else {
|
||||
printMsg(lvlVomit, "%1%: read %2% bytes",
|
||||
goal->getName(), rd);
|
||||
string data((char *) buffer.data(), rd);
|
||||
std::string data((char *) buffer.data(), rd);
|
||||
j->lastOutput = after;
|
||||
goal->handleChildOutput(k, data);
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ struct Child
|
|||
{
|
||||
WeakGoalPtr goal;
|
||||
Goal * goal2; // ugly hackery
|
||||
set<int> fds;
|
||||
std::set<int> fds;
|
||||
bool respectTimeouts;
|
||||
bool inBuildSlot;
|
||||
steady_time_point lastOutput; /* time we last got output on stdout/stderr */
|
||||
|
@ -167,7 +167,7 @@ public:
|
|||
|
||||
/* Registers a running child process. `inBuildSlot' means that
|
||||
the process counts towards the jobs limit. */
|
||||
void childStarted(GoalPtr goal, const set<int> & fds,
|
||||
void childStarted(GoalPtr goal, const std::set<int> & fds,
|
||||
bool inBuildSlot, bool respectTimeouts);
|
||||
|
||||
/* Unregisters a running child process. `wakeSleepers' should be
|
||||
|
|
|
@ -123,7 +123,7 @@ void buildProfile(const Path & out, Packages && pkgs)
|
|||
createLinks(state, pkgDir, out, priority);
|
||||
|
||||
try {
|
||||
for (const auto & p : tokenizeString<std::vector<string>>(
|
||||
for (const auto & p : tokenizeString<std::vector<std::string>>(
|
||||
readFile(pkgDir + "/nix-support/propagated-user-env-packages"), " \n"))
|
||||
if (!done.count(p))
|
||||
postponed.insert(p);
|
||||
|
@ -161,7 +161,7 @@ void buildProfile(const Path & out, Packages && pkgs)
|
|||
|
||||
void builtinBuildenv(const BasicDerivation & drv)
|
||||
{
|
||||
auto getAttr = [&](const string & name) {
|
||||
auto getAttr = [&](const std::string & name) {
|
||||
auto i = drv.env.find(name);
|
||||
if (i == drv.env.end()) throw Error("attribute '%s' missing", name);
|
||||
return i->second;
|
||||
|
|
|
@ -16,7 +16,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
|
|||
writeFile(settings.netrcFile, netrcData, 0600);
|
||||
}
|
||||
|
||||
auto getAttr = [&](const string & name) {
|
||||
auto getAttr = [&](const std::string & name) {
|
||||
auto i = drv.env.find(name);
|
||||
if (i == drv.env.end()) throw Error("attribute '%s' missing", name);
|
||||
return i->second;
|
||||
|
|
|
@ -5,7 +5,7 @@ namespace nix {
|
|||
|
||||
void builtinUnpackChannel(const BasicDerivation & drv)
|
||||
{
|
||||
auto getAttr = [&](const string & name) {
|
||||
auto getAttr = [&](const std::string & name) {
|
||||
auto i = drv.env.find(name);
|
||||
if (i == drv.env.end()) throw Error("attribute '%s' missing", name);
|
||||
return i->second;
|
||||
|
|
|
@ -19,3 +19,8 @@ create table if not exists RealisationsRefs (
|
|||
foreign key (referrer) references Realisations(id) on delete cascade,
|
||||
foreign key (realisationReference) references Realisations(id) on delete restrict
|
||||
);
|
||||
|
||||
-- used by QueryRealisationReferences
|
||||
create index if not exists IndexRealisationsRefs on RealisationsRefs(referrer);
|
||||
-- used by cascade deletion when ValidPaths is deleted
|
||||
create index if not exists IndexRealisationsRefsOnOutputPath on Realisations(outputPath);
|
||||
|
|
|
@ -31,10 +31,10 @@ std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
|
|||
std::string renderContentAddress(ContentAddress ca)
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](TextHash th) {
|
||||
[](TextHash & th) {
|
||||
return "text:" + th.hash.to_string(Base32, true);
|
||||
},
|
||||
[](FixedOutputHash fsh) {
|
||||
[](FixedOutputHash & fsh) {
|
||||
return makeFixedOutputCA(fsh.method, fsh.hash);
|
||||
}
|
||||
}, ca);
|
||||
|
@ -43,10 +43,10 @@ std::string renderContentAddress(ContentAddress ca)
|
|||
std::string renderContentAddressMethod(ContentAddressMethod cam)
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](TextHashMethod &th) {
|
||||
[](TextHashMethod & th) {
|
||||
return std::string{"text:"} + printHashType(htSHA256);
|
||||
},
|
||||
[](FixedOutputHashMethod &fshm) {
|
||||
[](FixedOutputHashMethod & fshm) {
|
||||
return "fixed:" + makeFileIngestionPrefix(fshm.fileIngestionMethod) + printHashType(fshm.hashType);
|
||||
}
|
||||
}, cam);
|
||||
|
@ -104,12 +104,12 @@ ContentAddress parseContentAddress(std::string_view rawCa) {
|
|||
|
||||
return std::visit(
|
||||
overloaded {
|
||||
[&](TextHashMethod thm) {
|
||||
[&](TextHashMethod & thm) {
|
||||
return ContentAddress(TextHash {
|
||||
.hash = Hash::parseNonSRIUnprefixed(rest, htSHA256)
|
||||
});
|
||||
},
|
||||
[&](FixedOutputHashMethod fohMethod) {
|
||||
[&](FixedOutputHashMethod & fohMethod) {
|
||||
return ContentAddress(FixedOutputHash {
|
||||
.method = fohMethod.fileIngestionMethod,
|
||||
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(fohMethod.hashType)),
|
||||
|
@ -120,8 +120,10 @@ ContentAddress parseContentAddress(std::string_view rawCa) {
|
|||
|
||||
ContentAddressMethod parseContentAddressMethod(std::string_view caMethod)
|
||||
{
|
||||
std::string_view asPrefix {std::string{caMethod} + ":"};
|
||||
return parseContentAddressMethodPrefix(asPrefix);
|
||||
std::string asPrefix = std::string{caMethod} + ":";
|
||||
// parseContentAddressMethodPrefix takes its argument by reference
|
||||
std::string_view asPrefixView = asPrefix;
|
||||
return parseContentAddressMethodPrefix(asPrefixView);
|
||||
}
|
||||
|
||||
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt)
|
||||
|
@ -137,10 +139,10 @@ std::string renderContentAddress(std::optional<ContentAddress> ca)
|
|||
Hash getContentAddressHash(const ContentAddress & ca)
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](TextHash th) {
|
||||
[](const TextHash & th) {
|
||||
return th.hash;
|
||||
},
|
||||
[](FixedOutputHash fsh) {
|
||||
[](const FixedOutputHash & fsh) {
|
||||
return fsh.hash;
|
||||
}
|
||||
}, ca);
|
||||
|
|
|
@ -4,7 +4,6 @@
|
|||
#include "store-api.hh"
|
||||
#include "path-with-outputs.hh"
|
||||
#include "finally.hh"
|
||||
#include "affinity.hh"
|
||||
#include "archive.hh"
|
||||
#include "derivations.hh"
|
||||
#include "args.hh"
|
||||
|
@ -70,7 +69,7 @@ struct TunnelLogger : public Logger
|
|||
|
||||
StringSink buf;
|
||||
buf << STDERR_NEXT << (fs.s + "\n");
|
||||
enqueueMsg(*buf.s);
|
||||
enqueueMsg(buf.s);
|
||||
}
|
||||
|
||||
void logEI(const ErrorInfo & ei) override
|
||||
|
@ -82,7 +81,7 @@ struct TunnelLogger : public Logger
|
|||
|
||||
StringSink buf;
|
||||
buf << STDERR_NEXT << oss.str();
|
||||
enqueueMsg(*buf.s);
|
||||
enqueueMsg(buf.s);
|
||||
}
|
||||
|
||||
/* startWork() means that we're starting an operation for which we
|
||||
|
@ -130,7 +129,7 @@ struct TunnelLogger : public Logger
|
|||
|
||||
StringSink buf;
|
||||
buf << STDERR_START_ACTIVITY << act << lvl << type << s << fields << parent;
|
||||
enqueueMsg(*buf.s);
|
||||
enqueueMsg(buf.s);
|
||||
}
|
||||
|
||||
void stopActivity(ActivityId act) override
|
||||
|
@ -138,7 +137,7 @@ struct TunnelLogger : public Logger
|
|||
if (GET_PROTOCOL_MINOR(clientVersion) < 20) return;
|
||||
StringSink buf;
|
||||
buf << STDERR_STOP_ACTIVITY << act;
|
||||
enqueueMsg(*buf.s);
|
||||
enqueueMsg(buf.s);
|
||||
}
|
||||
|
||||
void result(ActivityId act, ResultType type, const Fields & fields) override
|
||||
|
@ -146,7 +145,7 @@ struct TunnelLogger : public Logger
|
|||
if (GET_PROTOCOL_MINOR(clientVersion) < 20) return;
|
||||
StringSink buf;
|
||||
buf << STDERR_RESULT << act << type << fields;
|
||||
enqueueMsg(*buf.s);
|
||||
enqueueMsg(buf.s);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -230,11 +229,12 @@ struct ClientSettings
|
|||
else if (name == settings.experimentalFeatures.name) {
|
||||
// We don’t want to forward the experimental features to
|
||||
// the daemon, as that could cause some pretty weird stuff
|
||||
if (tokenizeString<Strings>(value) != settings.experimentalFeatures.get())
|
||||
if (parseFeatures(tokenizeString<StringSet>(value)) != settings.experimentalFeatures.get())
|
||||
debug("Ignoring the client-specified experimental features");
|
||||
}
|
||||
else if (trusted
|
||||
|| name == settings.buildTimeout.name
|
||||
|| name == settings.buildRepeat.name
|
||||
|| name == "connect-timeout"
|
||||
|| (name == "builders" && value == ""))
|
||||
settings.set(name, value);
|
||||
|
@ -395,16 +395,14 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
FramedSource source(from);
|
||||
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
|
||||
return std::visit(overloaded {
|
||||
[&](TextHashMethod &_) {
|
||||
[&](TextHashMethod &) {
|
||||
// We could stream this by changing Store
|
||||
std::string contents = source.drain();
|
||||
auto path = store->addTextToStore(name, contents, refs, repair);
|
||||
return store->queryPathInfo(path);
|
||||
},
|
||||
[&](FixedOutputHashMethod &fohm) {
|
||||
if (!refs.empty())
|
||||
throw UnimplementedError("cannot yet have refs with flat or nar-hashed data");
|
||||
auto path = store->addToStoreFromDump(source, name, fohm.fileIngestionMethod, fohm.hashType, repair);
|
||||
[&](FixedOutputHashMethod & fohm) {
|
||||
auto path = store->addToStoreFromDump(source, name, fohm.fileIngestionMethod, fohm.hashType, repair, refs);
|
||||
return store->queryPathInfo(path);
|
||||
},
|
||||
}, contentAddressMethod);
|
||||
|
@ -432,25 +430,30 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
hashAlgo = parseHashType(hashAlgoRaw);
|
||||
}
|
||||
|
||||
StringSink saved;
|
||||
TeeSource savedNARSource(from, saved);
|
||||
RetrieveRegularNARSink savedRegular { saved };
|
||||
|
||||
if (method == FileIngestionMethod::Recursive) {
|
||||
/* Get the entire NAR dump from the client and save it to
|
||||
a string so that we can pass it to
|
||||
addToStoreFromDump(). */
|
||||
ParseSink sink; /* null sink; just parse the NAR */
|
||||
parseDump(sink, savedNARSource);
|
||||
} else
|
||||
parseDump(savedRegular, from);
|
||||
|
||||
auto dumpSource = sinkToSource([&](Sink & saved) {
|
||||
if (method == FileIngestionMethod::Recursive) {
|
||||
/* We parse the NAR dump through into `saved` unmodified,
|
||||
so why all this extra work? We still parse the NAR so
|
||||
that we aren't sending arbitrary data to `saved`
|
||||
unwittingly`, and we know when the NAR ends so we don't
|
||||
consume the rest of `from` and can't parse another
|
||||
command. (We don't trust `addToStoreFromDump` to not
|
||||
eagerly consume the entire stream it's given, past the
|
||||
length of the Nar. */
|
||||
TeeSource savedNARSource(from, saved);
|
||||
ParseSink sink; /* null sink; just parse the NAR */
|
||||
parseDump(sink, savedNARSource);
|
||||
} else {
|
||||
/* Incrementally parse the NAR file, stripping the
|
||||
metadata, and streaming the sole file we expect into
|
||||
`saved`. */
|
||||
RetrieveRegularNARSink savedRegular { saved };
|
||||
parseDump(savedRegular, from);
|
||||
if (!savedRegular.regular) throw Error("regular file expected");
|
||||
}
|
||||
});
|
||||
logger->startWork();
|
||||
if (!savedRegular.regular) throw Error("regular file expected");
|
||||
|
||||
// FIXME: try to stream directly from `from`.
|
||||
StringSource dumpSource { *saved.s };
|
||||
auto path = store->addToStoreFromDump(dumpSource, baseName, method, hashAlgo);
|
||||
auto path = store->addToStoreFromDump(*dumpSource, baseName, method, hashAlgo);
|
||||
logger->stopWork();
|
||||
|
||||
to << store->printStorePath(path);
|
||||
|
@ -465,17 +468,19 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
dontCheckSigs = false;
|
||||
|
||||
logger->startWork();
|
||||
FramedSource source(from);
|
||||
store->addMultipleToStore(source,
|
||||
RepairFlag{repair},
|
||||
dontCheckSigs ? NoCheckSigs : CheckSigs);
|
||||
{
|
||||
FramedSource source(from);
|
||||
store->addMultipleToStore(source,
|
||||
RepairFlag{repair},
|
||||
dontCheckSigs ? NoCheckSigs : CheckSigs);
|
||||
}
|
||||
logger->stopWork();
|
||||
break;
|
||||
}
|
||||
|
||||
case wopAddTextToStore: {
|
||||
string suffix = readString(from);
|
||||
string s = readString(from);
|
||||
std::string suffix = readString(from);
|
||||
std::string s = readString(from);
|
||||
auto refs = worker_proto::read(*store, from, Phantom<StorePathSet> {});
|
||||
logger->startWork();
|
||||
auto path = store->addTextToStore(suffix, s, refs, NoRepair);
|
||||
|
@ -624,9 +629,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
break;
|
||||
}
|
||||
|
||||
// Obsolete.
|
||||
case wopSyncWithGC: {
|
||||
logger->startWork();
|
||||
store->syncWithGC();
|
||||
logger->stopWork();
|
||||
to << 1;
|
||||
break;
|
||||
|
@ -693,8 +698,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
if (GET_PROTOCOL_MINOR(clientVersion) >= 12) {
|
||||
unsigned int n = readInt(from);
|
||||
for (unsigned int i = 0; i < n; i++) {
|
||||
string name = readString(from);
|
||||
string value = readString(from);
|
||||
auto name = readString(from);
|
||||
auto value = readString(from);
|
||||
clientSettings.overrides.emplace(name, value);
|
||||
}
|
||||
}
|
||||
|
@ -849,14 +854,14 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
|
||||
else {
|
||||
std::unique_ptr<Source> source;
|
||||
StringSink saved;
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 21)
|
||||
source = std::make_unique<TunnelSource>(from, to);
|
||||
else {
|
||||
StringSink saved;
|
||||
TeeSource tee { from, saved };
|
||||
ParseSink ether;
|
||||
parseDump(ether, tee);
|
||||
source = std::make_unique<StringSource>(std::move(*saved.s));
|
||||
source = std::make_unique<StringSource>(saved.s);
|
||||
}
|
||||
|
||||
logger->startWork();
|
||||
|
@ -917,6 +922,22 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
break;
|
||||
}
|
||||
|
||||
case wopAddBuildLog: {
|
||||
StorePath path{readString(from)};
|
||||
logger->startWork();
|
||||
if (!trusted)
|
||||
throw Error("you are not privileged to add logs");
|
||||
{
|
||||
FramedSource source(from);
|
||||
StringSink sink;
|
||||
source.drainInto(sink);
|
||||
store->addBuildLog(path, sink.s);
|
||||
}
|
||||
logger->stopWork();
|
||||
to << 1;
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
throw Error("invalid operation %1%", op);
|
||||
}
|
||||
|
@ -952,15 +973,19 @@ void processConnection(
|
|||
|
||||
Finally finally([&]() {
|
||||
_isInterrupted = false;
|
||||
prevLogger->log(lvlDebug, fmt("%d operations", opCount));
|
||||
printMsgUsing(prevLogger, lvlDebug, "%d operations", opCount);
|
||||
});
|
||||
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 14 && readInt(from)) {
|
||||
auto affinity = readInt(from);
|
||||
setAffinityTo(affinity);
|
||||
// Obsolete CPU affinity.
|
||||
readInt(from);
|
||||
}
|
||||
|
||||
readInt(from); // obsolete reserveSpace
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 11)
|
||||
readInt(from); // obsolete reserveSpace
|
||||
|
||||
if (GET_PROTOCOL_MINOR(clientVersion) >= 33)
|
||||
to << nixVersion;
|
||||
|
||||
/* Send startup error messages to the client. */
|
||||
tunnelLogger->startWork();
|
||||
|
@ -985,6 +1010,8 @@ void processConnection(
|
|||
break;
|
||||
}
|
||||
|
||||
printMsgUsing(prevLogger, lvlDebug, "received daemon op %d", op);
|
||||
|
||||
opCount++;
|
||||
|
||||
try {
|
||||
|
|
|
@ -4,24 +4,25 @@
|
|||
#include "util.hh"
|
||||
#include "worker-protocol.hh"
|
||||
#include "fs-accessor.hh"
|
||||
#include <boost/container/small_vector.hpp>
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::optional<StorePath> DerivationOutput::path(const Store & store, std::string_view drvName, std::string_view outputName) const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](DerivationOutputInputAddressed doi) -> std::optional<StorePath> {
|
||||
[](const DerivationOutputInputAddressed & doi) -> std::optional<StorePath> {
|
||||
return { doi.path };
|
||||
},
|
||||
[&](DerivationOutputCAFixed dof) -> std::optional<StorePath> {
|
||||
[&](const DerivationOutputCAFixed & dof) -> std::optional<StorePath> {
|
||||
return {
|
||||
dof.path(store, drvName, outputName)
|
||||
};
|
||||
},
|
||||
[](DerivationOutputCAFloating dof) -> std::optional<StorePath> {
|
||||
[](const DerivationOutputCAFloating & dof) -> std::optional<StorePath> {
|
||||
return std::nullopt;
|
||||
},
|
||||
[](DerivationOutputDeferred) -> std::optional<StorePath> {
|
||||
[](const DerivationOutputDeferred &) -> std::optional<StorePath> {
|
||||
return std::nullopt;
|
||||
},
|
||||
}, output);
|
||||
|
@ -81,7 +82,7 @@ bool derivationIsImpure(DerivationType dt) {
|
|||
|
||||
bool BasicDerivation::isBuiltin() const
|
||||
{
|
||||
return string(builder, 0, 8) == "builtin:";
|
||||
return builder.substr(0, 8) == "builtin:";
|
||||
}
|
||||
|
||||
|
||||
|
@ -103,19 +104,19 @@ StorePath writeDerivation(Store & store,
|
|||
|
||||
|
||||
/* Read string `s' from stream `str'. */
|
||||
static void expect(std::istream & str, const string & s)
|
||||
static void expect(std::istream & str, std::string_view s)
|
||||
{
|
||||
char s2[s.size()];
|
||||
str.read(s2, s.size());
|
||||
if (string(s2, s.size()) != s)
|
||||
if (std::string(s2, s.size()) != s)
|
||||
throw FormatError("expected string '%1%'", s);
|
||||
}
|
||||
|
||||
|
||||
/* Read a C-style string from stream `str'. */
|
||||
static string parseString(std::istream & str)
|
||||
static std::string parseString(std::istream & str)
|
||||
{
|
||||
string res;
|
||||
std::string res;
|
||||
expect(str, "\"");
|
||||
int c;
|
||||
while ((c = str.get()) != '"')
|
||||
|
@ -171,7 +172,7 @@ static DerivationOutput parseDerivationOutput(const Store & store,
|
|||
{
|
||||
if (hashAlgo != "") {
|
||||
auto method = FileIngestionMethod::Flat;
|
||||
if (string(hashAlgo, 0, 2) == "r:") {
|
||||
if (hashAlgo.substr(0, 2) == "r:") {
|
||||
method = FileIngestionMethod::Recursive;
|
||||
hashAlgo = hashAlgo.substr(2);
|
||||
}
|
||||
|
@ -187,7 +188,7 @@ static DerivationOutput parseDerivationOutput(const Store & store,
|
|||
},
|
||||
};
|
||||
} else {
|
||||
settings.requireExperimentalFeature("ca-derivations");
|
||||
settings.requireExperimentalFeature(Xp::CaDerivations);
|
||||
assert(pathS == "");
|
||||
return DerivationOutput {
|
||||
.output = DerivationOutputCAFloating {
|
||||
|
@ -259,8 +260,8 @@ Derivation parseDerivation(const Store & store, std::string && s, std::string_vi
|
|||
/* Parse the environment variables. */
|
||||
expect(str, ",[");
|
||||
while (!endOfList(str)) {
|
||||
expect(str, "("); string name = parseString(str);
|
||||
expect(str, ","); string value = parseString(str);
|
||||
expect(str, "("); auto name = parseString(str);
|
||||
expect(str, ","); auto value = parseString(str);
|
||||
expect(str, ")");
|
||||
drv.env[name] = value;
|
||||
}
|
||||
|
@ -270,9 +271,11 @@ Derivation parseDerivation(const Store & store, std::string && s, std::string_vi
|
|||
}
|
||||
|
||||
|
||||
static void printString(string & res, std::string_view s)
|
||||
static void printString(std::string & res, std::string_view s)
|
||||
{
|
||||
char buf[s.size() * 2 + 2];
|
||||
boost::container::small_vector<char, 64 * 1024> buffer;
|
||||
buffer.reserve(s.size() * 2 + 2);
|
||||
char * buf = buffer.data();
|
||||
char * p = buf;
|
||||
*p++ = '"';
|
||||
for (auto c : s)
|
||||
|
@ -286,7 +289,7 @@ static void printString(string & res, std::string_view s)
|
|||
}
|
||||
|
||||
|
||||
static void printUnquotedString(string & res, std::string_view s)
|
||||
static void printUnquotedString(std::string & res, std::string_view s)
|
||||
{
|
||||
res += '"';
|
||||
res.append(s);
|
||||
|
@ -295,7 +298,7 @@ static void printUnquotedString(string & res, std::string_view s)
|
|||
|
||||
|
||||
template<class ForwardIterator>
|
||||
static void printStrings(string & res, ForwardIterator i, ForwardIterator j)
|
||||
static void printStrings(std::string & res, ForwardIterator i, ForwardIterator j)
|
||||
{
|
||||
res += '[';
|
||||
bool first = true;
|
||||
|
@ -308,7 +311,7 @@ static void printStrings(string & res, ForwardIterator i, ForwardIterator j)
|
|||
|
||||
|
||||
template<class ForwardIterator>
|
||||
static void printUnquotedStrings(string & res, ForwardIterator i, ForwardIterator j)
|
||||
static void printUnquotedStrings(std::string & res, ForwardIterator i, ForwardIterator j)
|
||||
{
|
||||
res += '[';
|
||||
bool first = true;
|
||||
|
@ -320,10 +323,10 @@ static void printUnquotedStrings(string & res, ForwardIterator i, ForwardIterato
|
|||
}
|
||||
|
||||
|
||||
string Derivation::unparse(const Store & store, bool maskOutputs,
|
||||
std::string Derivation::unparse(const Store & store, bool maskOutputs,
|
||||
std::map<std::string, StringSet> * actualInputs) const
|
||||
{
|
||||
string s;
|
||||
std::string s;
|
||||
s.reserve(65536);
|
||||
s += "Derive([";
|
||||
|
||||
|
@ -332,22 +335,22 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
|
|||
if (first) first = false; else s += ',';
|
||||
s += '('; printUnquotedString(s, i.first);
|
||||
std::visit(overloaded {
|
||||
[&](DerivationOutputInputAddressed doi) {
|
||||
[&](const DerivationOutputInputAddressed & doi) {
|
||||
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(doi.path));
|
||||
s += ','; printUnquotedString(s, "");
|
||||
s += ','; printUnquotedString(s, "");
|
||||
},
|
||||
[&](DerivationOutputCAFixed dof) {
|
||||
[&](const DerivationOutputCAFixed & dof) {
|
||||
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first)));
|
||||
s += ','; printUnquotedString(s, dof.hash.printMethodAlgo());
|
||||
s += ','; printUnquotedString(s, dof.hash.hash.to_string(Base16, false));
|
||||
},
|
||||
[&](DerivationOutputCAFloating dof) {
|
||||
[&](const DerivationOutputCAFloating & dof) {
|
||||
s += ','; printUnquotedString(s, "");
|
||||
s += ','; printUnquotedString(s, makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
|
||||
s += ','; printUnquotedString(s, "");
|
||||
},
|
||||
[&](DerivationOutputDeferred) {
|
||||
[&](const DerivationOutputDeferred &) {
|
||||
s += ','; printUnquotedString(s, "");
|
||||
s += ','; printUnquotedString(s, "");
|
||||
s += ','; printUnquotedString(s, "");
|
||||
|
@ -398,7 +401,7 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
|
|||
|
||||
|
||||
// FIXME: remove
|
||||
bool isDerivation(const string & fileName)
|
||||
bool isDerivation(const std::string & fileName)
|
||||
{
|
||||
return hasSuffix(fileName, drvExtension);
|
||||
}
|
||||
|
@ -420,13 +423,13 @@ DerivationType BasicDerivation::type() const
|
|||
std::optional<HashType> floatingHashType;
|
||||
for (auto & i : outputs) {
|
||||
std::visit(overloaded {
|
||||
[&](DerivationOutputInputAddressed _) {
|
||||
[&](const DerivationOutputInputAddressed &) {
|
||||
inputAddressedOutputs.insert(i.first);
|
||||
},
|
||||
[&](DerivationOutputCAFixed _) {
|
||||
[&](const DerivationOutputCAFixed &) {
|
||||
fixedCAOutputs.insert(i.first);
|
||||
},
|
||||
[&](DerivationOutputCAFloating dof) {
|
||||
[&](const DerivationOutputCAFloating & dof) {
|
||||
floatingCAOutputs.insert(i.first);
|
||||
if (!floatingHashType) {
|
||||
floatingHashType = dof.hashType;
|
||||
|
@ -435,7 +438,7 @@ DerivationType BasicDerivation::type() const
|
|||
throw Error("All floating outputs must use the same hash type");
|
||||
}
|
||||
},
|
||||
[&](DerivationOutputDeferred _) {
|
||||
[&](const DerivationOutputDeferred &) {
|
||||
deferredIAOutputs.insert(i.first);
|
||||
},
|
||||
}, i.second.output);
|
||||
|
@ -538,15 +541,15 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
|
|||
const auto & res = pathDerivationModulo(store, i.first);
|
||||
std::visit(overloaded {
|
||||
// Regular non-CA derivation, replace derivation
|
||||
[&](Hash drvHash) {
|
||||
[&](const Hash & drvHash) {
|
||||
inputs2.insert_or_assign(drvHash.to_string(Base16, false), i.second);
|
||||
},
|
||||
[&](DeferredHash deferredHash) {
|
||||
[&](const DeferredHash & deferredHash) {
|
||||
isDeferred = true;
|
||||
inputs2.insert_or_assign(deferredHash.hash.to_string(Base16, false), i.second);
|
||||
},
|
||||
// CA derivation's output hashes
|
||||
[&](CaOutputHashes outputHashes) {
|
||||
[&](const CaOutputHashes & outputHashes) {
|
||||
std::set<std::string> justOut = { "out" };
|
||||
for (auto & output : i.second) {
|
||||
/* Put each one in with a single "out" output.. */
|
||||
|
@ -572,17 +575,17 @@ std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation &
|
|||
{
|
||||
std::map<std::string, Hash> res;
|
||||
std::visit(overloaded {
|
||||
[&](Hash drvHash) {
|
||||
[&](const Hash & drvHash) {
|
||||
for (auto & outputName : drv.outputNames()) {
|
||||
res.insert({outputName, drvHash});
|
||||
}
|
||||
},
|
||||
[&](DeferredHash deferredHash) {
|
||||
[&](const DeferredHash & deferredHash) {
|
||||
for (auto & outputName : drv.outputNames()) {
|
||||
res.insert({outputName, deferredHash.hash});
|
||||
}
|
||||
},
|
||||
[&](CaOutputHashes outputHashes) {
|
||||
[&](const CaOutputHashes & outputHashes) {
|
||||
res = outputHashes;
|
||||
},
|
||||
}, hashDerivationModulo(store, drv, true));
|
||||
|
@ -590,7 +593,7 @@ std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation &
|
|||
}
|
||||
|
||||
|
||||
bool wantOutput(const string & output, const std::set<string> & wanted)
|
||||
bool wantOutput(const std::string & output, const std::set<std::string> & wanted)
|
||||
{
|
||||
return wanted.empty() || wanted.find(output) != wanted.end();
|
||||
}
|
||||
|
@ -666,22 +669,22 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
|||
for (auto & i : drv.outputs) {
|
||||
out << i.first;
|
||||
std::visit(overloaded {
|
||||
[&](DerivationOutputInputAddressed doi) {
|
||||
[&](const DerivationOutputInputAddressed & doi) {
|
||||
out << store.printStorePath(doi.path)
|
||||
<< ""
|
||||
<< "";
|
||||
},
|
||||
[&](DerivationOutputCAFixed dof) {
|
||||
[&](const DerivationOutputCAFixed & dof) {
|
||||
out << store.printStorePath(dof.path(store, drv.name, i.first))
|
||||
<< dof.hash.printMethodAlgo()
|
||||
<< dof.hash.hash.to_string(Base16, false);
|
||||
},
|
||||
[&](DerivationOutputCAFloating dof) {
|
||||
[&](const DerivationOutputCAFloating & dof) {
|
||||
out << ""
|
||||
<< (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
|
||||
<< "";
|
||||
},
|
||||
[&](DerivationOutputDeferred) {
|
||||
[&](const DerivationOutputDeferred &) {
|
||||
out << ""
|
||||
<< ""
|
||||
<< "";
|
||||
|
@ -696,10 +699,10 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
|||
}
|
||||
|
||||
|
||||
std::string hashPlaceholder(const std::string & outputName)
|
||||
std::string hashPlaceholder(const std::string_view outputName)
|
||||
{
|
||||
// FIXME: memoize?
|
||||
return "/" + hashString(htSHA256, "nix-output:" + outputName).to_string(Base32, false);
|
||||
return "/" + hashString(htSHA256, concatStrings("nix-output:", outputName)).to_string(Base32, false);
|
||||
}
|
||||
|
||||
std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName)
|
||||
|
|
|
@ -59,21 +59,19 @@ struct DerivationOutput
|
|||
std::optional<StorePath> path(const Store & store, std::string_view drvName, std::string_view outputName) const;
|
||||
};
|
||||
|
||||
typedef std::map<string, DerivationOutput> DerivationOutputs;
|
||||
typedef std::map<std::string, DerivationOutput> DerivationOutputs;
|
||||
|
||||
/* These are analogues to the previous DerivationOutputs data type, but they
|
||||
also contains, for each output, the (optional) store path in which it would
|
||||
be written. To calculate values of these types, see the corresponding
|
||||
functions in BasicDerivation */
|
||||
typedef std::map<string, std::pair<DerivationOutput, std::optional<StorePath>>>
|
||||
typedef std::map<std::string, std::pair<DerivationOutput, std::optional<StorePath>>>
|
||||
DerivationOutputsAndOptPaths;
|
||||
|
||||
/* For inputs that are sub-derivations, we specify exactly which
|
||||
output IDs we are interested in. */
|
||||
typedef std::map<StorePath, StringSet> DerivationInputs;
|
||||
|
||||
typedef std::map<string, string> StringPairs;
|
||||
|
||||
enum struct DerivationType : uint8_t {
|
||||
InputAddressed,
|
||||
DeferredInputAddressed,
|
||||
|
@ -103,7 +101,7 @@ struct BasicDerivation
|
|||
{
|
||||
DerivationOutputs outputs; /* keyed on symbolic IDs */
|
||||
StorePathSet inputSrcs; /* inputs that are sources */
|
||||
string platform;
|
||||
std::string platform;
|
||||
Path builder;
|
||||
Strings args;
|
||||
StringPairs env;
|
||||
|
@ -164,7 +162,7 @@ StorePath writeDerivation(Store & store,
|
|||
Derivation parseDerivation(const Store & store, std::string && s, std::string_view name);
|
||||
|
||||
// FIXME: remove
|
||||
bool isDerivation(const string & fileName);
|
||||
bool isDerivation(const std::string & fileName);
|
||||
|
||||
/* Calculate the name that will be used for the store path for this
|
||||
output.
|
||||
|
@ -222,7 +220,7 @@ typedef std::map<StorePath, DrvHashModulo> DrvHashes;
|
|||
// FIXME: global, though at least thread-safe.
|
||||
extern Sync<DrvHashes> drvHashes;
|
||||
|
||||
bool wantOutput(const string & output, const std::set<string> & wanted);
|
||||
bool wantOutput(const std::string & output, const std::set<std::string> & wanted);
|
||||
|
||||
struct Source;
|
||||
struct Sink;
|
||||
|
@ -236,7 +234,7 @@ void writeDerivation(Sink & out, const Store & store, const BasicDerivation & dr
|
|||
It is used as a placeholder to allow derivations to refer to their
|
||||
own outputs without needing to use the hash of a derivation in
|
||||
itself, making the hash near-impossible to calculate. */
|
||||
std::string hashPlaceholder(const std::string & outputName);
|
||||
std::string hashPlaceholder(const std::string_view outputName);
|
||||
|
||||
/* This creates an opaque and almost certainly unique string
|
||||
deterministically from a derivation path and output name.
|
||||
|
|
|
@ -24,8 +24,8 @@ StorePathSet BuiltPath::outPaths() const
|
|||
{
|
||||
return std::visit(
|
||||
overloaded{
|
||||
[](BuiltPath::Opaque p) { return StorePathSet{p.path}; },
|
||||
[](BuiltPath::Built b) {
|
||||
[](const BuiltPath::Opaque & p) { return StorePathSet{p.path}; },
|
||||
[](const BuiltPath::Built & b) {
|
||||
StorePathSet res;
|
||||
for (auto & [_, path] : b.outputs)
|
||||
res.insert(path);
|
||||
|
@ -75,9 +75,9 @@ DerivedPath::Built DerivedPath::Built::parse(const Store & store, std::string_vi
|
|||
assert(n != s.npos);
|
||||
auto drvPath = store.parseStorePath(s.substr(0, n));
|
||||
auto outputsS = s.substr(n + 1);
|
||||
std::set<string> outputs;
|
||||
std::set<std::string> outputs;
|
||||
if (outputsS != "*")
|
||||
outputs = tokenizeString<std::set<string>>(outputsS, ",");
|
||||
outputs = tokenizeString<std::set<std::string>>(outputsS, ",");
|
||||
return {drvPath, outputs};
|
||||
}
|
||||
|
||||
|
@ -94,13 +94,13 @@ RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
|
|||
RealisedPath::Set res;
|
||||
std::visit(
|
||||
overloaded{
|
||||
[&](BuiltPath::Opaque p) { res.insert(p.path); },
|
||||
[&](BuiltPath::Built p) {
|
||||
[&](const BuiltPath::Opaque & p) { res.insert(p.path); },
|
||||
[&](const BuiltPath::Built & p) {
|
||||
auto drvHashes =
|
||||
staticOutputHashes(store, store.readDerivation(p.drvPath));
|
||||
for (auto& [outputName, outputPath] : p.outputs) {
|
||||
if (settings.isExperimentalFeatureEnabled(
|
||||
"ca-derivations")) {
|
||||
Xp::CaDerivations)) {
|
||||
auto thisRealisation = store.queryRealisation(
|
||||
DrvOutput{drvHashes.at(outputName), outputName});
|
||||
assert(thisRealisation); // We’ve built it, so we must h
|
||||
|
|
|
@ -21,7 +21,7 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
|
|||
, Store(params)
|
||||
{ }
|
||||
|
||||
string getUri() override
|
||||
std::string getUri() override
|
||||
{
|
||||
return *uriSchemes().begin();
|
||||
}
|
||||
|
@ -43,15 +43,19 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
|
|||
RepairFlag repair, CheckSigsFlag checkSigs) override
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
StorePath addTextToStore(const string & name, const string & s,
|
||||
const StorePathSet & references, RepairFlag repair) override
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override
|
||||
{ unsupported("addTextToStore"); }
|
||||
|
||||
void narFromPath(const StorePath & path, Sink & sink) override
|
||||
{ unsupported("narFromPath"); }
|
||||
|
||||
std::optional<const Realisation> queryRealisation(const DrvOutput&) override
|
||||
{ unsupported("queryRealisation"); }
|
||||
void queryRealisationUncached(const DrvOutput &,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
|
||||
{ callback(nullptr); }
|
||||
};
|
||||
|
||||
static RegisterStoreImplementation<DummyStore, DummyStoreConfig> regDummyStore;
|
||||
|
|
|
@ -75,20 +75,20 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
|
|||
|
||||
auto references = worker_proto::read(*this, source, Phantom<StorePathSet> {});
|
||||
auto deriver = readString(source);
|
||||
auto narHash = hashString(htSHA256, *saved.s);
|
||||
auto narHash = hashString(htSHA256, saved.s);
|
||||
|
||||
ValidPathInfo info { path, narHash };
|
||||
if (deriver != "")
|
||||
info.deriver = parseStorePath(deriver);
|
||||
info.references = references;
|
||||
info.narSize = saved.s->size();
|
||||
info.narSize = saved.s.size();
|
||||
|
||||
// Ignore optional legacy signature.
|
||||
if (readInt(source) == 1)
|
||||
readString(source);
|
||||
|
||||
// Can't use underlying source, which would have been exhausted
|
||||
auto source = StringSource { *saved.s };
|
||||
auto source = StringSource(saved.s);
|
||||
addToStore(info, source, NoRepair, checkSigs);
|
||||
|
||||
res.push_back(info.path);
|
||||
|
|
|
@ -33,12 +33,12 @@ FileTransferSettings fileTransferSettings;
|
|||
|
||||
static GlobalConfig::Register rFileTransferSettings(&fileTransferSettings);
|
||||
|
||||
std::string resolveUri(const std::string & uri)
|
||||
std::string resolveUri(std::string_view uri)
|
||||
{
|
||||
if (uri.compare(0, 8, "channel:") == 0)
|
||||
return "https://nixos.org/channels/" + std::string(uri, 8) + "/nixexprs.tar.xz";
|
||||
return "https://nixos.org/channels/" + std::string(uri.substr(8)) + "/nixexprs.tar.xz";
|
||||
else
|
||||
return uri;
|
||||
return std::string(uri);
|
||||
}
|
||||
|
||||
struct curlFileTransfer : public FileTransfer
|
||||
|
@ -106,7 +106,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
this->request.dataCallback(data);
|
||||
}
|
||||
} else
|
||||
this->result.data->append(data);
|
||||
this->result.data.append(data);
|
||||
})
|
||||
{
|
||||
if (!request.expectedETag.empty())
|
||||
|
@ -128,7 +128,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
if (requestHeaders) curl_slist_free_all(requestHeaders);
|
||||
try {
|
||||
if (!done)
|
||||
fail(FileTransferError(Interrupted, nullptr, "download of '%s' was interrupted", request.uri));
|
||||
fail(FileTransferError(Interrupted, {}, "download of '%s' was interrupted", request.uri));
|
||||
} catch (...) {
|
||||
ignoreException();
|
||||
}
|
||||
|
@ -195,17 +195,17 @@ struct curlFileTransfer : public FileTransfer
|
|||
std::smatch match;
|
||||
if (std::regex_match(line, match, statusLine)) {
|
||||
result.etag = "";
|
||||
result.data = std::make_shared<std::string>();
|
||||
result.data.clear();
|
||||
result.bodySize = 0;
|
||||
statusMsg = trim(match[1]);
|
||||
statusMsg = trim((std::string &) match[1]);
|
||||
acceptRanges = false;
|
||||
encoding = "";
|
||||
} else {
|
||||
auto i = line.find(':');
|
||||
if (i != string::npos) {
|
||||
string name = toLower(trim(string(line, 0, i)));
|
||||
if (i != std::string::npos) {
|
||||
std::string name = toLower(trim(line.substr(0, i)));
|
||||
if (name == "etag") {
|
||||
result.etag = trim(string(line, i + 1));
|
||||
result.etag = trim(line.substr(i + 1));
|
||||
/* Hack to work around a GitHub bug: it sends
|
||||
ETags, but ignores If-None-Match. So if we get
|
||||
the expected ETag on a 200 response, then shut
|
||||
|
@ -218,8 +218,8 @@ struct curlFileTransfer : public FileTransfer
|
|||
return 0;
|
||||
}
|
||||
} else if (name == "content-encoding")
|
||||
encoding = trim(string(line, i + 1));
|
||||
else if (name == "accept-ranges" && toLower(trim(std::string(line, i + 1))) == "bytes")
|
||||
encoding = trim(line.substr(i + 1));
|
||||
else if (name == "accept-ranges" && toLower(trim(line.substr(i + 1))) == "bytes")
|
||||
acceptRanges = true;
|
||||
}
|
||||
}
|
||||
|
@ -340,7 +340,7 @@ struct curlFileTransfer : public FileTransfer
|
|||
if (writtenToSink)
|
||||
curl_easy_setopt(req, CURLOPT_RESUME_FROM_LARGE, writtenToSink);
|
||||
|
||||
result.data = std::make_shared<std::string>();
|
||||
result.data.clear();
|
||||
result.bodySize = 0;
|
||||
}
|
||||
|
||||
|
@ -434,21 +434,21 @@ struct curlFileTransfer : public FileTransfer
|
|||
|
||||
attempt++;
|
||||
|
||||
std::shared_ptr<std::string> response;
|
||||
std::optional<std::string> response;
|
||||
if (errorSink)
|
||||
response = errorSink->s;
|
||||
response = std::move(errorSink->s);
|
||||
auto exc =
|
||||
code == CURLE_ABORTED_BY_CALLBACK && _isInterrupted
|
||||
? FileTransferError(Interrupted, response, "%s of '%s' was interrupted", request.verb(), request.uri)
|
||||
? FileTransferError(Interrupted, std::move(response), "%s of '%s' was interrupted", request.verb(), request.uri)
|
||||
: httpStatus != 0
|
||||
? FileTransferError(err,
|
||||
response,
|
||||
std::move(response),
|
||||
fmt("unable to %s '%s': HTTP error %d ('%s')",
|
||||
request.verb(), request.uri, httpStatus, statusMsg)
|
||||
+ (code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code)))
|
||||
)
|
||||
: FileTransferError(err,
|
||||
response,
|
||||
std::move(response),
|
||||
fmt("unable to %s '%s': %s (%d)",
|
||||
request.verb(), request.uri, curl_easy_strerror(code), code));
|
||||
|
||||
|
@ -544,6 +544,8 @@ struct curlFileTransfer : public FileTransfer
|
|||
stopWorkerThread();
|
||||
});
|
||||
|
||||
unshareFilesystem();
|
||||
|
||||
std::map<CURL *, std::shared_ptr<TransferItem>> items;
|
||||
|
||||
bool quit = false;
|
||||
|
@ -702,8 +704,8 @@ struct curlFileTransfer : public FileTransfer
|
|||
auto s3Res = s3Helper.getObject(bucketName, key);
|
||||
FileTransferResult res;
|
||||
if (!s3Res.data)
|
||||
throw FileTransferError(NotFound, nullptr, "S3 object '%s' does not exist", request.uri);
|
||||
res.data = s3Res.data;
|
||||
throw FileTransferError(NotFound, {}, "S3 object '%s' does not exist", request.uri);
|
||||
res.data = std::move(*s3Res.data);
|
||||
callback(std::move(res));
|
||||
#else
|
||||
throw nix::Error("cannot download '%s' because Nix is not built with S3 support", request.uri);
|
||||
|
@ -716,15 +718,24 @@ struct curlFileTransfer : public FileTransfer
|
|||
}
|
||||
};
|
||||
|
||||
ref<curlFileTransfer> makeCurlFileTransfer()
|
||||
{
|
||||
return make_ref<curlFileTransfer>();
|
||||
}
|
||||
|
||||
ref<FileTransfer> getFileTransfer()
|
||||
{
|
||||
static ref<FileTransfer> fileTransfer = makeFileTransfer();
|
||||
static ref<curlFileTransfer> fileTransfer = makeCurlFileTransfer();
|
||||
|
||||
if (fileTransfer->state_.lock()->quit)
|
||||
fileTransfer = makeCurlFileTransfer();
|
||||
|
||||
return fileTransfer;
|
||||
}
|
||||
|
||||
ref<FileTransfer> makeFileTransfer()
|
||||
{
|
||||
return make_ref<curlFileTransfer>();
|
||||
return makeCurlFileTransfer();
|
||||
}
|
||||
|
||||
std::future<FileTransferResult> FileTransfer::enqueueFileTransfer(const FileTransferRequest & request)
|
||||
|
@ -848,25 +859,25 @@ void FileTransfer::download(FileTransferRequest && request, Sink & sink)
|
|||
}
|
||||
|
||||
template<typename... Args>
|
||||
FileTransferError::FileTransferError(FileTransfer::Error error, std::shared_ptr<string> response, const Args & ... args)
|
||||
FileTransferError::FileTransferError(FileTransfer::Error error, std::optional<std::string> response, const Args & ... args)
|
||||
: Error(args...), error(error), response(response)
|
||||
{
|
||||
const auto hf = hintfmt(args...);
|
||||
// FIXME: Due to https://github.com/NixOS/nix/issues/3841 we don't know how
|
||||
// to print different messages for different verbosity levels. For now
|
||||
// we add some heuristics for detecting when we want to show the response.
|
||||
if (response && (response->size() < 1024 || response->find("<html>") != string::npos))
|
||||
if (response && (response->size() < 1024 || response->find("<html>") != std::string::npos))
|
||||
err.msg = hintfmt("%1%\n\nresponse body:\n\n%2%", normaltxt(hf.str()), chomp(*response));
|
||||
else
|
||||
err.msg = hf;
|
||||
}
|
||||
|
||||
bool isUri(const string & s)
|
||||
bool isUri(std::string_view s)
|
||||
{
|
||||
if (s.compare(0, 8, "channel:") == 0) return true;
|
||||
size_t pos = s.find("://");
|
||||
if (pos == string::npos) return false;
|
||||
string scheme(s, 0, pos);
|
||||
if (pos == std::string::npos) return false;
|
||||
std::string scheme(s, 0, pos);
|
||||
return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3" || scheme == "ssh";
|
||||
}
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ struct FileTransferRequest
|
|||
unsigned int baseRetryTimeMs = 250;
|
||||
ActivityId parentAct;
|
||||
bool decompress = true;
|
||||
std::shared_ptr<std::string> data;
|
||||
std::optional<std::string> data;
|
||||
std::string mimeType;
|
||||
std::function<void(std::string_view data)> dataCallback;
|
||||
|
||||
|
@ -77,7 +77,7 @@ struct FileTransferResult
|
|||
bool cached = false;
|
||||
std::string etag;
|
||||
std::string effectiveUri;
|
||||
std::shared_ptr<std::string> data;
|
||||
std::string data;
|
||||
uint64_t bodySize = 0;
|
||||
};
|
||||
|
||||
|
@ -119,17 +119,17 @@ class FileTransferError : public Error
|
|||
{
|
||||
public:
|
||||
FileTransfer::Error error;
|
||||
std::shared_ptr<string> response; // intentionally optional
|
||||
std::optional<std::string> response; // intentionally optional
|
||||
|
||||
template<typename... Args>
|
||||
FileTransferError(FileTransfer::Error error, std::shared_ptr<string> response, const Args & ... args);
|
||||
FileTransferError(FileTransfer::Error error, std::optional<std::string> response, const Args & ... args);
|
||||
|
||||
virtual const char* sname() const override { return "FileTransferError"; }
|
||||
};
|
||||
|
||||
bool isUri(const string & s);
|
||||
bool isUri(std::string_view s);
|
||||
|
||||
/* Resolve deprecated 'channel:<foo>' URLs. */
|
||||
std::string resolveUri(const std::string & uri);
|
||||
std::string resolveUri(std::string_view uri);
|
||||
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -100,7 +100,7 @@ std::vector<Path> getUserConfigFiles()
|
|||
// Use the paths specified in NIX_USER_CONF_FILES if it has been defined
|
||||
auto nixConfFiles = getEnv("NIX_USER_CONF_FILES");
|
||||
if (nixConfFiles.has_value()) {
|
||||
return tokenizeString<std::vector<string>>(nixConfFiles.value(), ":");
|
||||
return tokenizeString<std::vector<std::string>>(nixConfFiles.value(), ":");
|
||||
}
|
||||
|
||||
// Use the paths specified by the XDG spec
|
||||
|
@ -122,7 +122,7 @@ StringSet Settings::getDefaultSystemFeatures()
|
|||
/* For backwards compatibility, accept some "features" that are
|
||||
used in Nixpkgs to route builds to certain machines but don't
|
||||
actually require anything special on the machines. */
|
||||
StringSet features{"nixos-test", "benchmark", "big-parallel", "recursive-nix"};
|
||||
StringSet features{"nixos-test", "benchmark", "big-parallel"};
|
||||
|
||||
#if __linux__
|
||||
if (access("/dev/kvm", R_OK | W_OK) == 0)
|
||||
|
@ -148,7 +148,8 @@ StringSet Settings::getDefaultExtraPlatforms()
|
|||
// machines. Note that we can’t force processes from executing
|
||||
// x86_64 in aarch64 environments or vice versa since they can
|
||||
// always exec with their own binary preferences.
|
||||
if (pathExists("/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist")) {
|
||||
if (pathExists("/Library/Apple/System/Library/LaunchDaemons/com.apple.oahd.plist") ||
|
||||
pathExists("/System/Library/LaunchDaemons/com.apple.oahd.plist")) {
|
||||
if (std::string{SYSTEM} == "x86_64-darwin")
|
||||
extraPlatforms.insert("aarch64-darwin");
|
||||
else if (std::string{SYSTEM} == "aarch64-darwin")
|
||||
|
@ -159,21 +160,16 @@ StringSet Settings::getDefaultExtraPlatforms()
|
|||
return extraPlatforms;
|
||||
}
|
||||
|
||||
bool Settings::isExperimentalFeatureEnabled(const std::string & name)
|
||||
bool Settings::isExperimentalFeatureEnabled(const ExperimentalFeature & feature)
|
||||
{
|
||||
auto & f = experimentalFeatures.get();
|
||||
return std::find(f.begin(), f.end(), name) != f.end();
|
||||
return std::find(f.begin(), f.end(), feature) != f.end();
|
||||
}
|
||||
|
||||
MissingExperimentalFeature::MissingExperimentalFeature(std::string feature)
|
||||
: Error("experimental Nix feature '%1%' is disabled; use '--extra-experimental-features %1%' to override", feature)
|
||||
, missingFeature(feature)
|
||||
{}
|
||||
|
||||
void Settings::requireExperimentalFeature(const std::string & name)
|
||||
void Settings::requireExperimentalFeature(const ExperimentalFeature & feature)
|
||||
{
|
||||
if (!isExperimentalFeatureEnabled(name))
|
||||
throw MissingExperimentalFeature(name);
|
||||
if (!isExperimentalFeatureEnabled(feature))
|
||||
throw MissingExperimentalFeature(feature);
|
||||
}
|
||||
|
||||
bool Settings::isWSL1()
|
||||
|
@ -185,7 +181,7 @@ bool Settings::isWSL1()
|
|||
return hasSuffix(utsbuf.release, "-Microsoft");
|
||||
}
|
||||
|
||||
const string nixVersion = PACKAGE_VERSION;
|
||||
const std::string nixVersion = PACKAGE_VERSION;
|
||||
|
||||
NLOHMANN_JSON_SERIALIZE_ENUM(SandboxMode, {
|
||||
{SandboxMode::smEnabled, true},
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
#include "types.hh"
|
||||
#include "config.hh"
|
||||
#include "util.hh"
|
||||
#include "experimental-features.hh"
|
||||
|
||||
#include <map>
|
||||
#include <limits>
|
||||
|
@ -20,7 +21,7 @@ struct MaxBuildJobsSetting : public BaseSetting<unsigned int>
|
|||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases = {})
|
||||
: BaseSetting<unsigned int>(def, name, description, aliases)
|
||||
: BaseSetting<unsigned int>(def, true, name, description, aliases)
|
||||
{
|
||||
options->addSetting(this);
|
||||
}
|
||||
|
@ -37,7 +38,7 @@ struct PluginFilesSetting : public BaseSetting<Paths>
|
|||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases = {})
|
||||
: BaseSetting<Paths>(def, name, description, aliases)
|
||||
: BaseSetting<Paths>(def, true, name, description, aliases)
|
||||
{
|
||||
options->addSetting(this);
|
||||
}
|
||||
|
@ -45,15 +46,6 @@ struct PluginFilesSetting : public BaseSetting<Paths>
|
|||
void set(const std::string & str, bool append = false) override;
|
||||
};
|
||||
|
||||
class MissingExperimentalFeature: public Error
|
||||
{
|
||||
public:
|
||||
std::string missingFeature;
|
||||
|
||||
MissingExperimentalFeature(std::string feature);
|
||||
virtual const char* sname() const override { return "MissingExperimentalFeature"; }
|
||||
};
|
||||
|
||||
class Settings : public Config {
|
||||
|
||||
unsigned int getDefaultCores();
|
||||
|
@ -121,7 +113,7 @@ public:
|
|||
bool verboseBuild = true;
|
||||
|
||||
Setting<size_t> logLines{this, 10, "log-lines",
|
||||
"If `verbose-build` is false, the number of lines of the tail of "
|
||||
"The number of lines of the tail of "
|
||||
"the log to show if a build fails."};
|
||||
|
||||
MaxBuildJobsSetting maxBuildJobs{
|
||||
|
@ -138,7 +130,9 @@ public:
|
|||
{"build-max-jobs"}};
|
||||
|
||||
Setting<unsigned int> buildCores{
|
||||
this, getDefaultCores(), "cores",
|
||||
this,
|
||||
getDefaultCores(),
|
||||
"cores",
|
||||
R"(
|
||||
Sets the value of the `NIX_BUILD_CORES` environment variable in the
|
||||
invocation of builders. Builders can use this variable at their
|
||||
|
@ -149,7 +143,7 @@ public:
|
|||
command line switch and defaults to `1`. The value `0` means that
|
||||
the builder should use all available CPU cores in the system.
|
||||
)",
|
||||
{"build-cores"}};
|
||||
{"build-cores"}, false};
|
||||
|
||||
/* Read-only mode. Don't copy stuff to the store, don't change
|
||||
the database. */
|
||||
|
@ -604,10 +598,11 @@ public:
|
|||
platform and generate incompatible code, so you may wish to
|
||||
cross-check the results of using this option against proper
|
||||
natively-built versions of your derivations.
|
||||
)"};
|
||||
)", {}, false};
|
||||
|
||||
Setting<StringSet> systemFeatures{
|
||||
this, getDefaultSystemFeatures(),
|
||||
this,
|
||||
getDefaultSystemFeatures(),
|
||||
"system-features",
|
||||
R"(
|
||||
A set of system “features” supported by this machine, e.g. `kvm`.
|
||||
|
@ -623,7 +618,7 @@ public:
|
|||
This setting by default includes `kvm` if `/dev/kvm` is accessible,
|
||||
and the pseudo-features `nixos-test`, `benchmark` and `big-parallel`
|
||||
that are used in Nixpkgs to route builds to specific machines.
|
||||
)"};
|
||||
)", {}, false};
|
||||
|
||||
Setting<Strings> substituters{
|
||||
this,
|
||||
|
@ -818,6 +813,15 @@ public:
|
|||
may be useful in certain scenarios (e.g. to spin up containers or
|
||||
set up userspace network interfaces in tests).
|
||||
)"};
|
||||
|
||||
Setting<StringSet> ignoredAcls{
|
||||
this, {"security.selinux", "system.nfs4_acl"}, "ignored-acls",
|
||||
R"(
|
||||
A list of ACLs that should be ignored, normally Nix attempts to
|
||||
remove all ACLs from files and directories in the Nix store, but
|
||||
some ACLs like `security.selinux` or `system.nfs4_acl` can't be
|
||||
removed even by root. Therefore it's best to just ignore them.
|
||||
)"};
|
||||
#endif
|
||||
|
||||
Setting<Strings> hashedMirrors{
|
||||
|
@ -938,12 +942,12 @@ public:
|
|||
value.
|
||||
)"};
|
||||
|
||||
Setting<Strings> experimentalFeatures{this, {}, "experimental-features",
|
||||
Setting<std::set<ExperimentalFeature>> experimentalFeatures{this, {}, "experimental-features",
|
||||
"Experimental Nix features to enable."};
|
||||
|
||||
bool isExperimentalFeatureEnabled(const std::string & name);
|
||||
bool isExperimentalFeatureEnabled(const ExperimentalFeature &);
|
||||
|
||||
void requireExperimentalFeature(const std::string & name);
|
||||
void requireExperimentalFeature(const ExperimentalFeature &);
|
||||
|
||||
Setting<bool> allowDirty{this, true, "allow-dirty",
|
||||
"Whether to allow dirty Git/Mercurial trees."};
|
||||
|
@ -972,6 +976,16 @@ public:
|
|||
|
||||
Setting<bool> useRegistries{this, true, "use-registries",
|
||||
"Whether to use flake registries to resolve flake references."};
|
||||
|
||||
Setting<bool> acceptFlakeConfig{this, false, "accept-flake-config",
|
||||
"Whether to accept nix configuration from a flake without prompting."};
|
||||
|
||||
Setting<std::string> commitLockFileSummary{
|
||||
this, "", "commit-lockfile-summary",
|
||||
R"(
|
||||
The commit summary to use when committing changed flake lock files. If
|
||||
empty, the summary is generated based on the action performed.
|
||||
)"};
|
||||
};
|
||||
|
||||
|
||||
|
@ -987,6 +1001,6 @@ void loadConfFile();
|
|||
// Used by the Settings constructor
|
||||
std::vector<Path> getUserConfigFiles();
|
||||
|
||||
extern const string nixVersion;
|
||||
extern const std::string nixVersion;
|
||||
|
||||
}
|
||||
|
|
|
@ -57,8 +57,8 @@ public:
|
|||
{
|
||||
// FIXME: do this lazily?
|
||||
if (auto cacheInfo = diskCache->cacheExists(cacheUri)) {
|
||||
wantMassQuery.setDefault(cacheInfo->wantMassQuery ? "true" : "false");
|
||||
priority.setDefault(fmt("%d", cacheInfo->priority));
|
||||
wantMassQuery.setDefault(cacheInfo->wantMassQuery);
|
||||
priority.setDefault(cacheInfo->priority);
|
||||
} else {
|
||||
try {
|
||||
BinaryCacheStore::init();
|
||||
|
@ -126,7 +126,7 @@ protected:
|
|||
const std::string & mimeType) override
|
||||
{
|
||||
auto req = makeRequest(path);
|
||||
req.data = std::make_shared<string>(StreamToSourceAdapter(istream).drain());
|
||||
req.data = StreamToSourceAdapter(istream).drain();
|
||||
req.mimeType = mimeType;
|
||||
try {
|
||||
getFileTransfer()->upload(req);
|
||||
|
@ -159,7 +159,7 @@ protected:
|
|||
}
|
||||
|
||||
void getFile(const std::string & path,
|
||||
Callback<std::shared_ptr<std::string>> callback) noexcept override
|
||||
Callback<std::optional<std::string>> callback) noexcept override
|
||||
{
|
||||
checkEnabled();
|
||||
|
||||
|
@ -170,10 +170,10 @@ protected:
|
|||
getFileTransfer()->enqueueFileTransfer(request,
|
||||
{[callbackPtr, this](std::future<FileTransferResult> result) {
|
||||
try {
|
||||
(*callbackPtr)(result.get().data);
|
||||
(*callbackPtr)(std::move(result.get().data));
|
||||
} catch (FileTransferError & e) {
|
||||
if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden)
|
||||
return (*callbackPtr)(std::shared_ptr<std::string>());
|
||||
return (*callbackPtr)({});
|
||||
maybeDisable();
|
||||
callbackPtr->rethrow();
|
||||
} catch (...) {
|
||||
|
|
|
@ -48,7 +48,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
|
||||
static std::set<std::string> uriSchemes() { return {"ssh"}; }
|
||||
|
||||
LegacySSHStore(const string & scheme, const string & host, const Params & params)
|
||||
LegacySSHStore(const std::string & scheme, const std::string & host, const Params & params)
|
||||
: StoreConfig(params)
|
||||
, LegacySSHStoreConfig(params)
|
||||
, Store(params)
|
||||
|
@ -82,9 +82,20 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
conn->to << SERVE_MAGIC_1 << SERVE_PROTOCOL_VERSION;
|
||||
conn->to.flush();
|
||||
|
||||
unsigned int magic = readInt(conn->from);
|
||||
if (magic != SERVE_MAGIC_2)
|
||||
throw Error("protocol mismatch with 'nix-store --serve' on '%s'", host);
|
||||
StringSink saved;
|
||||
try {
|
||||
TeeSource tee(conn->from, saved);
|
||||
unsigned int magic = readInt(tee);
|
||||
if (magic != SERVE_MAGIC_2)
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s'", host);
|
||||
} catch (SerialisationError & e) {
|
||||
/* In case the other side is waiting for our input,
|
||||
close it. */
|
||||
conn->sshConn->in.close();
|
||||
auto msg = conn->from.drain();
|
||||
throw Error("'nix-store --serve' protocol mismatch from '%s', got '%s'",
|
||||
host, chomp(saved.s + msg));
|
||||
}
|
||||
conn->remoteVersion = readInt(conn->from);
|
||||
if (GET_PROTOCOL_MAJOR(conn->remoteVersion) != 0x200)
|
||||
throw Error("unsupported 'nix-store --serve' protocol version on '%s'", host);
|
||||
|
@ -96,7 +107,7 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
return conn;
|
||||
};
|
||||
|
||||
string getUri() override
|
||||
std::string getUri() override
|
||||
{
|
||||
return *uriSchemes().begin() + "://" + host;
|
||||
}
|
||||
|
@ -214,13 +225,21 @@ struct LegacySSHStore : public virtual LegacySSHStoreConfig, public virtual Stor
|
|||
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
|
||||
{ unsupported("queryPathFromHashPart"); }
|
||||
|
||||
StorePath addToStore(const string & name, const Path & srcPath,
|
||||
FileIngestionMethod method, HashType hashAlgo,
|
||||
PathFilter & filter, RepairFlag repair) override
|
||||
StorePath addToStore(
|
||||
std::string_view name,
|
||||
const Path & srcPath,
|
||||
FileIngestionMethod method,
|
||||
HashType hashAlgo,
|
||||
PathFilter & filter,
|
||||
RepairFlag repair,
|
||||
const StorePathSet & references) override
|
||||
{ unsupported("addToStore"); }
|
||||
|
||||
StorePath addTextToStore(const string & name, const string & s,
|
||||
const StorePathSet & references, RepairFlag repair) override
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override
|
||||
{ unsupported("addTextToStore"); }
|
||||
|
||||
private:
|
||||
|
@ -237,6 +256,10 @@ private:
|
|||
conn.to
|
||||
<< settings.buildRepeat
|
||||
<< settings.enforceDeterminism;
|
||||
|
||||
if (GET_PROTOCOL_MINOR(conn.remoteVersion) >= 7) {
|
||||
conn.to << ((int) settings.keepFailed);
|
||||
}
|
||||
}
|
||||
|
||||
public:
|
||||
|
@ -279,10 +302,10 @@ public:
|
|||
for (auto & p : drvPaths) {
|
||||
auto sOrDrvPath = StorePathWithOutputs::tryFromDerivedPath(p);
|
||||
std::visit(overloaded {
|
||||
[&](StorePathWithOutputs s) {
|
||||
[&](const StorePathWithOutputs & s) {
|
||||
ss.push_back(s.to_string(*this));
|
||||
},
|
||||
[&](StorePath drvPath) {
|
||||
[&](const StorePath & drvPath) {
|
||||
throw Error("wanted to fetch '%s' but the legacy ssh protocol doesn't support merely substituting drv files via the build paths command. It would build them instead. Try using ssh-ng://", printStorePath(drvPath));
|
||||
},
|
||||
}, sOrDrvPath);
|
||||
|
@ -352,7 +375,8 @@ public:
|
|||
return conn->remoteVersion;
|
||||
}
|
||||
|
||||
std::optional<const Realisation> queryRealisation(const DrvOutput&) override
|
||||
void queryRealisationUncached(const DrvOutput &,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override
|
||||
// TODO: Implement
|
||||
{ unsupported("queryRealisation"); }
|
||||
};
|
||||
|
|
|
@ -96,6 +96,7 @@ void LocalBinaryCacheStore::init()
|
|||
createDirs(binaryCacheDir + "/" + realisationsPrefix);
|
||||
if (writeDebugInfo)
|
||||
createDirs(binaryCacheDir + "/debuginfo");
|
||||
createDirs(binaryCacheDir + "/log");
|
||||
BinaryCacheStore::init();
|
||||
}
|
||||
|
||||
|
|
|
@ -85,36 +85,34 @@ void LocalFSStore::narFromPath(const StorePath & path, Sink & sink)
|
|||
dumpPath(getRealStoreDir() + std::string(printStorePath(path), storeDir.size()), sink);
|
||||
}
|
||||
|
||||
const string LocalFSStore::drvsLogDir = "drvs";
|
||||
const std::string LocalFSStore::drvsLogDir = "drvs";
|
||||
|
||||
|
||||
|
||||
std::shared_ptr<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
|
||||
std::optional<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
|
||||
{
|
||||
auto path = path_;
|
||||
|
||||
if (!path.isDerivation()) {
|
||||
try {
|
||||
auto info = queryPathInfo(path);
|
||||
if (!info->deriver) return nullptr;
|
||||
if (!info->deriver) return std::nullopt;
|
||||
path = *info->deriver;
|
||||
} catch (InvalidPath &) {
|
||||
return nullptr;
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
auto baseName = std::string(baseNameOf(printStorePath(path)));
|
||||
auto baseName = path.to_string();
|
||||
|
||||
for (int j = 0; j < 2; j++) {
|
||||
|
||||
Path logPath =
|
||||
j == 0
|
||||
? fmt("%s/%s/%s/%s", logDir, drvsLogDir, string(baseName, 0, 2), string(baseName, 2))
|
||||
? fmt("%s/%s/%s/%s", logDir, drvsLogDir, baseName.substr(0, 2), baseName.substr(2))
|
||||
: fmt("%s/%s/%s", logDir, drvsLogDir, baseName);
|
||||
Path logBz2Path = logPath + ".bz2";
|
||||
|
||||
if (pathExists(logPath))
|
||||
return std::make_shared<std::string>(readFile(logPath));
|
||||
return readFile(logPath);
|
||||
|
||||
else if (pathExists(logBz2Path)) {
|
||||
try {
|
||||
|
@ -124,7 +122,7 @@ std::shared_ptr<std::string> LocalFSStore::getBuildLog(const StorePath & path_)
|
|||
|
||||
}
|
||||
|
||||
return nullptr;
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ class LocalFSStore : public virtual LocalFSStoreConfig, public virtual Store
|
|||
{
|
||||
public:
|
||||
|
||||
const static string drvsLogDir;
|
||||
const static std::string drvsLogDir;
|
||||
|
||||
LocalFSStore(const Params & params);
|
||||
|
||||
|
@ -45,7 +45,8 @@ public:
|
|||
return getRealStoreDir() + "/" + std::string(storePath, storeDir.size() + 1);
|
||||
}
|
||||
|
||||
std::shared_ptr<std::string> getBuildLog(const StorePath & path) override;
|
||||
std::optional<std::string> getBuildLog(const StorePath & path) override;
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
#include "references.hh"
|
||||
#include "callback.hh"
|
||||
#include "topo-sort.hh"
|
||||
#include "finally.hh"
|
||||
#include "compression.hh"
|
||||
|
||||
#include <iostream>
|
||||
#include <algorithm>
|
||||
|
@ -68,7 +70,7 @@ int getSchema(Path schemaPath)
|
|||
{
|
||||
int curSchema = 0;
|
||||
if (pathExists(schemaPath)) {
|
||||
string s = readFile(schemaPath);
|
||||
auto s = readFile(schemaPath);
|
||||
auto n = string2Int<int>(s);
|
||||
if (!n)
|
||||
throw Error("'%1%' is corrupt", schemaPath);
|
||||
|
@ -79,7 +81,7 @@ int getSchema(Path schemaPath)
|
|||
|
||||
void migrateCASchema(SQLite& db, Path schemaPath, AutoCloseFD& lockFd)
|
||||
{
|
||||
const int nixCASchemaVersion = 2;
|
||||
const int nixCASchemaVersion = 3;
|
||||
int curCASchema = getSchema(schemaPath);
|
||||
if (curCASchema != nixCASchemaVersion) {
|
||||
if (curCASchema > nixCASchemaVersion) {
|
||||
|
@ -130,6 +132,17 @@ void migrateCASchema(SQLite& db, Path schemaPath, AutoCloseFD& lockFd)
|
|||
txn.commit();
|
||||
}
|
||||
|
||||
if (curCASchema < 3) {
|
||||
SQLiteTxn txn(db);
|
||||
// Apply new indices added in this schema update.
|
||||
db.exec(R"(
|
||||
-- used by QueryRealisationReferences
|
||||
create index if not exists IndexRealisationsRefs on RealisationsRefs(referrer);
|
||||
-- used by cascade deletion when ValidPaths is deleted
|
||||
create index if not exists IndexRealisationsRefsOnOutputPath on Realisations(outputPath);
|
||||
)");
|
||||
txn.commit();
|
||||
}
|
||||
writeFile(schemaPath, fmt("%d", nixCASchemaVersion));
|
||||
lockFile(lockFd.get(), ltRead, true);
|
||||
}
|
||||
|
@ -145,7 +158,6 @@ LocalStore::LocalStore(const Params & params)
|
|||
, linksDir(realStoreDir + "/.links")
|
||||
, reservedPath(dbDir + "/reserved")
|
||||
, schemaPath(dbDir + "/schema")
|
||||
, trashDir(realStoreDir + "/trash")
|
||||
, tempRootsDir(stateDir + "/temproots")
|
||||
, fnTempRoots(fmt("%s/%d", tempRootsDir, getpid()))
|
||||
, locksHeld(tokenizeString<PathSet>(getEnv("NIX_HELD_LOCKS").value_or("")))
|
||||
|
@ -227,7 +239,7 @@ LocalStore::LocalStore(const Params & params)
|
|||
res = posix_fallocate(fd.get(), 0, settings.reservedSize);
|
||||
#endif
|
||||
if (res == -1) {
|
||||
writeFull(fd.get(), string(settings.reservedSize, 'X'));
|
||||
writeFull(fd.get(), std::string(settings.reservedSize, 'X'));
|
||||
[[gnu::unused]] auto res2 = ftruncate(fd.get(), settings.reservedSize);
|
||||
}
|
||||
}
|
||||
|
@ -309,7 +321,7 @@ LocalStore::LocalStore(const Params & params)
|
|||
|
||||
else openDB(*state, false);
|
||||
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||
migrateCASchema(state->db, dbDir + "/ca-schema", globalLock);
|
||||
}
|
||||
|
||||
|
@ -339,7 +351,7 @@ LocalStore::LocalStore(const Params & params)
|
|||
state->stmts->QueryPathFromHashPart.create(state->db,
|
||||
"select path from ValidPaths where path >= ? limit 1;");
|
||||
state->stmts->QueryValidPaths.create(state->db, "select path from ValidPaths");
|
||||
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
|
||||
if (settings.isExperimentalFeatureEnabled(Xp::CaDerivations)) {
|
||||
state->stmts->RegisterRealisedOutput.create(state->db,
|
||||
R"(
|
||||
insert into Realisations (drvPath, outputName, outputPath, signatures)
|
||||
|
@ -386,6 +398,16 @@ LocalStore::LocalStore(const Params & params)
|
|||
}
|
||||
|
||||
|
||||
AutoCloseFD LocalStore::openGCLock()
|
||||
{
|
||||
Path fnGCLock = stateDir + "/gc.lock";
|
||||
auto fdGCLock = open(fnGCLock.c_str(), O_RDWR | O_CREAT | O_CLOEXEC, 0600);
|
||||
if (!fdGCLock)
|
||||
throw SysError("opening global GC lock '%1%'", fnGCLock);
|
||||
return fdGCLock;
|
||||
}
|
||||
|
||||
|
||||
LocalStore::~LocalStore()
|
||||
{
|
||||
std::shared_future<void> future;
|
||||
|
@ -428,7 +450,7 @@ void LocalStore::openDB(State & state, bool create)
|
|||
throw SysError("Nix database directory '%1%' is not writable", dbDir);
|
||||
|
||||
/* Open the Nix database. */
|
||||
string dbPath = dbDir + "/db.sqlite";
|
||||
std::string dbPath = dbDir + "/db.sqlite";
|
||||
auto & db(state.db);
|
||||
state.db = SQLite(dbPath, create);
|
||||
|
||||
|
@ -449,19 +471,19 @@ void LocalStore::openDB(State & state, bool create)
|
|||
should be safe enough. If the user asks for it, don't sync at
|
||||
all. This can cause database corruption if the system
|
||||
crashes. */
|
||||
string syncMode = settings.fsyncMetadata ? "normal" : "off";
|
||||
std::string syncMode = settings.fsyncMetadata ? "normal" : "off";
|
||||
db.exec("pragma synchronous = " + syncMode);
|
||||
|
||||
/* Set the SQLite journal mode. WAL mode is fastest, so it's the
|
||||
default. */
|
||||
string mode = settings.useSQLiteWAL ? "wal" : "truncate";
|
||||
string prevMode;
|
||||
std::string mode = settings.useSQLiteWAL ? "wal" : "truncate";
|
||||
std::string prevMode;
|
||||
{
|
||||
SQLiteStmt stmt;
|
||||
stmt.create(db, "pragma main.journal_mode;");
|
||||
if (sqlite3_step(stmt) != SQLITE_ROW)
|
||||
throwSQLiteError(db, "querying journal mode");
|
||||
prevMode = string((const char *) sqlite3_column_text(stmt, 0));
|
||||
prevMode = std::string((const char *) sqlite3_column_text(stmt, 0));
|
||||
}
|
||||
if (prevMode != mode &&
|
||||
sqlite3_exec(db, ("pragma main.journal_mode = " + mode + ";").c_str(), 0, 0, 0) != SQLITE_OK)
|
||||
|
@ -495,9 +517,6 @@ void LocalStore::makeStoreWritable()
|
|||
throw SysError("getting info about the Nix store mount point");
|
||||
|
||||
if (stat.f_flag & ST_RDONLY) {
|
||||
if (unshare(CLONE_NEWNS) == -1)
|
||||
throw SysError("setting up a private mount namespace");
|
||||
|
||||
if (mount(0, realStoreDir.get().c_str(), "none", MS_REMOUNT | MS_BIND, 0) == -1)
|
||||
throw SysError("remounting %1% writable", realStoreDir);
|
||||
}
|
||||
|
@ -586,9 +605,7 @@ static void canonicalisePathMetaData_(
|
|||
throw SysError("querying extended attributes of '%s'", path);
|
||||
|
||||
for (auto & eaName: tokenizeString<Strings>(std::string(eaBuf.data(), eaSize), std::string("\000", 1))) {
|
||||
/* Ignore SELinux security labels since these cannot be
|
||||
removed even by root. */
|
||||
if (eaName == "security.selinux") continue;
|
||||
if (settings.ignoredAcls.get().count(eaName)) continue;
|
||||
if (lremovexattr(path.c_str(), eaName.c_str()) == -1)
|
||||
throw SysError("removing extended attribute '%s' from '%s'", eaName, path);
|
||||
}
|
||||
|
@ -669,7 +686,7 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
|
|||
{
|
||||
assert(drvPath.isDerivation());
|
||||
std::string drvName(drvPath.name());
|
||||
drvName = string(drvName, 0, drvName.size() - drvExtension.size());
|
||||
drvName = drvName.substr(0, drvName.size() - drvExtension.size());
|
||||
|
||||
auto envHasRightPath = [&](const StorePath & actual, const std::string & varName)
|
||||
{
|
||||
|
@ -688,7 +705,7 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
|
|||
std::optional<Hash> h;
|
||||
for (auto & i : drv.outputs) {
|
||||
std::visit(overloaded {
|
||||
[&](DerivationOutputInputAddressed doia) {
|
||||
[&](const DerivationOutputInputAddressed & doia) {
|
||||
if (!h) {
|
||||
// somewhat expensive so we do lazily
|
||||
auto temp = hashDerivationModulo(*this, drv, true);
|
||||
|
@ -700,14 +717,14 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
|
|||
printStorePath(drvPath), printStorePath(doia.path), printStorePath(recomputed));
|
||||
envHasRightPath(doia.path, i.first);
|
||||
},
|
||||
[&](DerivationOutputCAFixed dof) {
|
||||
[&](const DerivationOutputCAFixed & dof) {
|
||||
StorePath path = makeFixedOutputPath(dof.hash.method, dof.hash.hash, drvName);
|
||||
envHasRightPath(path, i.first);
|
||||
},
|
||||
[&](DerivationOutputCAFloating _) {
|
||||
[&](const DerivationOutputCAFloating &) {
|
||||
/* Nothing to check */
|
||||
},
|
||||
[&](DerivationOutputDeferred) {
|
||||
[&](const DerivationOutputDeferred &) {
|
||||
},
|
||||
}, i.second.output);
|
||||
}
|
||||
|
@ -715,7 +732,7 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
|
|||
|
||||
void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs)
|
||||
{
|
||||
settings.requireExperimentalFeature("ca-derivations");
|
||||
settings.requireExperimentalFeature(Xp::CaDerivations);
|
||||
if (checkSigs == NoCheckSigs || !realisationIsUntrusted(info))
|
||||
registerDrvOutput(info);
|
||||
else
|
||||
|
@ -724,7 +741,7 @@ void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag check
|
|||
|
||||
void LocalStore::registerDrvOutput(const Realisation & info)
|
||||
{
|
||||
settings.requireExperimentalFeature("ca-derivations");
|
||||
settings.requireExperimentalFeature(Xp::CaDerivations);
|
||||
retrySQLite<void>([&]() {
|
||||
auto state(_state.lock());
|
||||
if (auto oldR = queryRealisation_(*state, info.id)) {
|
||||
|
@ -776,7 +793,11 @@ void LocalStore::registerDrvOutput(const Realisation & info)
|
|||
});
|
||||
}
|
||||
|
||||
void LocalStore::cacheDrvOutputMapping(State & state, const uint64_t deriver, const string & outputName, const StorePath & output)
|
||||
void LocalStore::cacheDrvOutputMapping(
|
||||
State & state,
|
||||
const uint64_t deriver,
|
||||
const std::string & outputName,
|
||||
const StorePath & output)
|
||||
{
|
||||
retrySQLite<void>([&]() {
|
||||
state.stmts->AddDerivationOutput.use()
|
||||
|
@ -785,7 +806,6 @@ void LocalStore::cacheDrvOutputMapping(State & state, const uint64_t deriver, co
|
|||
(printStorePath(output))
|
||||
.exec();
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@ -832,7 +852,7 @@ uint64_t LocalStore::addValidPath(State & state,
|
|||
|
||||
{
|
||||
auto state_(Store::state.lock());
|
||||
state_->pathInfoCache.upsert(std::string(info.path.hashPart()),
|
||||
state_->pathInfoCache.upsert(std::string(info.path.to_string()),
|
||||
PathInfoCacheValue{ .value = std::make_shared<const ValidPathInfo>(info) });
|
||||
}
|
||||
|
||||
|
@ -1010,7 +1030,7 @@ LocalStore::queryPartialDerivationOutputMap(const StorePath & path_)
|
|||
return outputs;
|
||||
});
|
||||
|
||||
if (!settings.isExperimentalFeatureEnabled("ca-derivations"))
|
||||
if (!settings.isExperimentalFeatureEnabled(Xp::CaDerivations))
|
||||
return outputs;
|
||||
|
||||
auto drv = readInvalidDerivation(path);
|
||||
|
@ -1205,7 +1225,7 @@ void LocalStore::invalidatePath(State & state, const StorePath & path)
|
|||
|
||||
{
|
||||
auto state_(Store::state.lock());
|
||||
state_->pathInfoCache.erase(std::string(path.hashPart()));
|
||||
state_->pathInfoCache.erase(std::string(path.to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1298,7 +1318,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
|
||||
canonicalisePathMetaData(realPath, {});
|
||||
|
||||
optimisePath(realPath); // FIXME: combine with hashPath()
|
||||
optimisePath(realPath, repair); // FIXME: combine with hashPath()
|
||||
|
||||
registerValidPath(info);
|
||||
}
|
||||
|
@ -1308,8 +1328,8 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
|
|||
}
|
||||
|
||||
|
||||
StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
|
||||
StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references)
|
||||
{
|
||||
/* For computing the store path. */
|
||||
auto hashSink = std::make_unique<HashSink>(hashAlgo);
|
||||
|
@ -1334,13 +1354,15 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
|
|||
auto want = std::min(chunkSize, settings.narBufferSize - oldSize);
|
||||
dump.resize(oldSize + want);
|
||||
auto got = 0;
|
||||
Finally cleanup([&]() {
|
||||
dump.resize(oldSize + got);
|
||||
});
|
||||
try {
|
||||
got = source.read(dump.data() + oldSize, want);
|
||||
} catch (EndOfFile &) {
|
||||
inMemory = true;
|
||||
break;
|
||||
}
|
||||
dump.resize(oldSize + got);
|
||||
}
|
||||
|
||||
std::unique_ptr<AutoDelete> delTempDir;
|
||||
|
@ -1365,7 +1387,7 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
|
|||
|
||||
auto [hash, size] = hashSink->finish();
|
||||
|
||||
auto dstPath = makeFixedOutputPath(method, hash, name);
|
||||
auto dstPath = makeFixedOutputPath(method, hash, name, references);
|
||||
|
||||
addTempRoot(dstPath);
|
||||
|
||||
|
@ -1408,10 +1430,11 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
|
|||
|
||||
canonicalisePathMetaData(realPath, {}); // FIXME: merge into restorePath
|
||||
|
||||
optimisePath(realPath);
|
||||
optimisePath(realPath, repair);
|
||||
|
||||
ValidPathInfo info { dstPath, narHash.first };
|
||||
info.narSize = narHash.second;
|
||||
info.references = references;
|
||||
info.ca = FixedOutputHash { .method = method, .hash = hash };
|
||||
registerValidPath(info);
|
||||
}
|
||||
|
@ -1423,7 +1446,9 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, const string & name,
|
|||
}
|
||||
|
||||
|
||||
StorePath LocalStore::addTextToStore(const string & name, const string & s,
|
||||
StorePath LocalStore::addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references, RepairFlag repair)
|
||||
{
|
||||
auto hash = hashString(htSHA256, s);
|
||||
|
@ -1449,12 +1474,12 @@ StorePath LocalStore::addTextToStore(const string & name, const string & s,
|
|||
|
||||
StringSink sink;
|
||||
dumpString(s, sink);
|
||||
auto narHash = hashString(htSHA256, *sink.s);
|
||||
auto narHash = hashString(htSHA256, sink.s);
|
||||
|
||||
optimisePath(realPath);
|
||||
optimisePath(realPath, repair);
|
||||
|
||||
ValidPathInfo info { dstPath, narHash };
|
||||
info.narSize = sink.s->size();
|
||||
info.narSize = sink.s.size();
|
||||
info.references = references;
|
||||
info.ca = TextHash { .hash = hash };
|
||||
registerValidPath(info);
|
||||
|
@ -1512,7 +1537,8 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
|
||||
/* Acquire the global GC lock to get a consistent snapshot of
|
||||
existing and valid paths. */
|
||||
AutoCloseFD fdGCLock = openGCLock(ltWrite);
|
||||
auto fdGCLock = openGCLock();
|
||||
FdLock gcLock(fdGCLock.get(), ltRead, true, "waiting for the big garbage collector lock...");
|
||||
|
||||
StringSet store;
|
||||
for (auto & i : readDirectory(realStoreDir)) store.insert(i.name);
|
||||
|
@ -1523,8 +1549,6 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
StorePathSet validPaths;
|
||||
PathSet done;
|
||||
|
||||
fdGCLock = -1;
|
||||
|
||||
for (auto & i : queryAllValidPaths())
|
||||
verifyPath(printStorePath(i), store, done, validPaths, repair, errors);
|
||||
|
||||
|
@ -1536,7 +1560,7 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
|
|||
for (auto & link : readDirectory(linksDir)) {
|
||||
printMsg(lvlTalkative, "checking contents of '%s'", link.name);
|
||||
Path linkPath = linksDir + "/" + link.name;
|
||||
string hash = hashPath(htSHA256, linkPath).first.to_string(Base32, false);
|
||||
std::string hash = hashPath(htSHA256, linkPath).first.to_string(Base32, false);
|
||||
if (hash != link.name) {
|
||||
printError("link '%s' was modified! expected hash '%s', got '%s'",
|
||||
linkPath, link.name, hash);
|
||||
|
@ -1837,13 +1861,24 @@ std::optional<const Realisation> LocalStore::queryRealisation_(
|
|||
return { res };
|
||||
}
|
||||
|
||||
std::optional<const Realisation>
|
||||
LocalStore::queryRealisation(const DrvOutput & id)
|
||||
void LocalStore::queryRealisationUncached(const DrvOutput & id,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept
|
||||
{
|
||||
return retrySQLite<std::optional<const Realisation>>([&]() {
|
||||
auto state(_state.lock());
|
||||
return queryRealisation_(*state, id);
|
||||
});
|
||||
try {
|
||||
auto maybeRealisation
|
||||
= retrySQLite<std::optional<const Realisation>>([&]() {
|
||||
auto state(_state.lock());
|
||||
return queryRealisation_(*state, id);
|
||||
});
|
||||
if (maybeRealisation)
|
||||
callback(
|
||||
std::make_shared<const Realisation>(maybeRealisation.value()));
|
||||
else
|
||||
callback(nullptr);
|
||||
|
||||
} catch (...) {
|
||||
callback.rethrow();
|
||||
}
|
||||
}
|
||||
|
||||
FixedOutputHash LocalStore::hashCAPath(
|
||||
|
@ -1876,4 +1911,30 @@ FixedOutputHash LocalStore::hashCAPath(
|
|||
};
|
||||
}
|
||||
|
||||
void LocalStore::addBuildLog(const StorePath & drvPath, std::string_view log)
|
||||
{
|
||||
assert(drvPath.isDerivation());
|
||||
|
||||
auto baseName = drvPath.to_string();
|
||||
|
||||
auto logPath = fmt("%s/%s/%s/%s.bz2", logDir, drvsLogDir, baseName.substr(0, 2), baseName.substr(2));
|
||||
|
||||
if (pathExists(logPath)) return;
|
||||
|
||||
createDirs(dirOf(logPath));
|
||||
|
||||
auto tmpFile = fmt("%s.tmp.%d", logPath, getpid());
|
||||
|
||||
writeFile(tmpFile, compress("bzip2", log));
|
||||
|
||||
if (rename(tmpFile.c_str(), logPath.c_str()) != 0)
|
||||
throw SysError("renaming '%1%' to '%2%'", tmpFile, logPath);
|
||||
}
|
||||
|
||||
std::optional<std::string> LocalStore::getVersion()
|
||||
{
|
||||
return nixVersion;
|
||||
}
|
||||
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
@ -58,9 +58,15 @@ private:
|
|||
struct Stmts;
|
||||
std::unique_ptr<Stmts> stmts;
|
||||
|
||||
/* The global GC lock */
|
||||
AutoCloseFD fdGCLock;
|
||||
|
||||
/* The file to which we write our temporary roots. */
|
||||
AutoCloseFD fdTempRoots;
|
||||
|
||||
/* Connection to the garbage collector. */
|
||||
AutoCloseFD fdRootsSocket;
|
||||
|
||||
/* The last time we checked whether to do an auto-GC, or an
|
||||
auto-GC finished. */
|
||||
std::chrono::time_point<std::chrono::steady_clock> lastGCCheck;
|
||||
|
@ -87,7 +93,6 @@ public:
|
|||
const Path linksDir;
|
||||
const Path reservedPath;
|
||||
const Path schemaPath;
|
||||
const Path trashDir;
|
||||
const Path tempRootsDir;
|
||||
const Path fnTempRoots;
|
||||
|
||||
|
@ -139,24 +144,24 @@ public:
|
|||
void addToStore(const ValidPathInfo & info, Source & source,
|
||||
RepairFlag repair, CheckSigsFlag checkSigs) override;
|
||||
|
||||
StorePath addToStoreFromDump(Source & dump, const string & name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) override;
|
||||
StorePath addToStoreFromDump(Source & dump, std::string_view name,
|
||||
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair, const StorePathSet & references) override;
|
||||
|
||||
StorePath addTextToStore(const string & name, const string & s,
|
||||
const StorePathSet & references, RepairFlag repair) override;
|
||||
StorePath addTextToStore(
|
||||
std::string_view name,
|
||||
std::string_view s,
|
||||
const StorePathSet & references,
|
||||
RepairFlag repair) override;
|
||||
|
||||
void addTempRoot(const StorePath & path) override;
|
||||
|
||||
void addIndirectRoot(const Path & path) override;
|
||||
|
||||
void syncWithGC() override;
|
||||
|
||||
private:
|
||||
|
||||
typedef std::shared_ptr<AutoCloseFD> FDPtr;
|
||||
typedef list<FDPtr> FDs;
|
||||
void findTempRoots(Roots & roots, bool censor);
|
||||
|
||||
void findTempRoots(FDs & fds, Roots & roots, bool censor);
|
||||
AutoCloseFD openGCLock();
|
||||
|
||||
public:
|
||||
|
||||
|
@ -170,8 +175,9 @@ public:
|
|||
|
||||
void optimiseStore() override;
|
||||
|
||||
/* Optimise a single store path. */
|
||||
void optimisePath(const Path & path);
|
||||
/* Optimise a single store path. Optionally, test the encountered
|
||||
symlinks for corruption. */
|
||||
void optimisePath(const Path & path, RepairFlag repair);
|
||||
|
||||
bool verifyStore(bool checkContents, RepairFlag repair) override;
|
||||
|
||||
|
@ -201,11 +207,18 @@ public:
|
|||
derivation 'deriver'. */
|
||||
void registerDrvOutput(const Realisation & info) override;
|
||||
void registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs) override;
|
||||
void cacheDrvOutputMapping(State & state, const uint64_t deriver, const string & outputName, const StorePath & output);
|
||||
void cacheDrvOutputMapping(
|
||||
State & state,
|
||||
const uint64_t deriver,
|
||||
const std::string & outputName,
|
||||
const StorePath & output);
|
||||
|
||||
std::optional<const Realisation> queryRealisation_(State & state, const DrvOutput & id);
|
||||
std::optional<std::pair<int64_t, Realisation>> queryRealisationCore_(State & state, const DrvOutput & id);
|
||||
std::optional<const Realisation> queryRealisation(const DrvOutput&) override;
|
||||
void queryRealisationUncached(const DrvOutput&,
|
||||
Callback<std::shared_ptr<const Realisation>> callback) noexcept override;
|
||||
|
||||
std::optional<std::string> getVersion() override;
|
||||
|
||||
private:
|
||||
|
||||
|
@ -236,29 +249,12 @@ private:
|
|||
PathSet queryValidPathsOld();
|
||||
ValidPathInfo queryPathInfoOld(const Path & path);
|
||||
|
||||
struct GCState;
|
||||
|
||||
void deleteGarbage(GCState & state, const Path & path);
|
||||
|
||||
void tryToDelete(GCState & state, const Path & path);
|
||||
|
||||
bool canReachRoot(GCState & state, StorePathSet & visited, const StorePath & path);
|
||||
|
||||
void deletePathRecursive(GCState & state, const Path & path);
|
||||
|
||||
bool isActiveTempFile(const GCState & state,
|
||||
const Path & path, const string & suffix);
|
||||
|
||||
AutoCloseFD openGCLock(LockType lockType);
|
||||
|
||||
void findRoots(const Path & path, unsigned char type, Roots & roots);
|
||||
|
||||
void findRootsNoTemp(Roots & roots, bool censor);
|
||||
|
||||
void findRuntimeRoots(Roots & roots, bool censor);
|
||||
|
||||
void removeUnusedLinks(const GCState & state);
|
||||
|
||||
Path createTempDirInStore();
|
||||
|
||||
void checkDerivationOutputs(const StorePath & drvPath, const Derivation & drv);
|
||||
|
@ -267,7 +263,7 @@ private:
|
|||
|
||||
InodeHash loadInodeHash();
|
||||
Strings readDirectoryIgnoringInodes(const Path & path, const InodeHash & inodeHash);
|
||||
void optimisePath_(Activity * act, OptimiseStats & stats, const Path & path, InodeHash & inodeHash);
|
||||
void optimisePath_(Activity * act, OptimiseStats & stats, const Path & path, InodeHash & inodeHash, RepairFlag repair);
|
||||
|
||||
// Internal versions that are not wrapped in retry_sqlite.
|
||||
bool isValidPath_(State & state, const StorePath & path);
|
||||
|
@ -293,6 +289,8 @@ private:
|
|||
const std::string_view pathHash
|
||||
);
|
||||
|
||||
void addBuildLog(const StorePath & drvPath, std::string_view log) override;
|
||||
|
||||
friend struct LocalDerivationGoal;
|
||||
friend struct PathSubstitutionGoal;
|
||||
friend struct SubstitutionGoal;
|
||||
|
@ -301,7 +299,7 @@ private:
|
|||
|
||||
|
||||
typedef std::pair<dev_t, ino_t> Inode;
|
||||
typedef set<Inode> InodesSeen;
|
||||
typedef std::set<Inode> InodesSeen;
|
||||
|
||||
|
||||
/* "Fix", or canonicalise, the meta-data of the files in a store path
|
||||
|
|
|
@ -129,7 +129,7 @@ struct CgroupUserLock : UserLock
|
|||
|
||||
static std::unique_ptr<UserLock> acquire()
|
||||
{
|
||||
settings.requireExperimentalFeature("auto-allocate-uids");
|
||||
settings.requireExperimentalFeature(Xp::AutoAllocateUids);
|
||||
assert(settings.startId > 0);
|
||||
assert(settings.startId % settings.idsPerBuild == 0);
|
||||
assert(settings.uidCount % settings.idsPerBuild == 0);
|
||||
|
|
|
@ -39,22 +39,25 @@ Machine::Machine(decltype(storeUri) storeUri,
|
|||
sshPublicHostKey(sshPublicHostKey)
|
||||
{}
|
||||
|
||||
bool Machine::allSupported(const std::set<string> & features) const {
|
||||
bool Machine::allSupported(const std::set<std::string> & features) const
|
||||
{
|
||||
return std::all_of(features.begin(), features.end(),
|
||||
[&](const string & feature) {
|
||||
[&](const std::string & feature) {
|
||||
return supportedFeatures.count(feature) ||
|
||||
mandatoryFeatures.count(feature);
|
||||
});
|
||||
}
|
||||
|
||||
bool Machine::mandatoryMet(const std::set<string> & features) const {
|
||||
bool Machine::mandatoryMet(const std::set<std::string> & features) const
|
||||
{
|
||||
return std::all_of(mandatoryFeatures.begin(), mandatoryFeatures.end(),
|
||||
[&](const string & feature) {
|
||||
[&](const std::string & feature) {
|
||||
return features.count(feature);
|
||||
});
|
||||
}
|
||||
|
||||
ref<Store> Machine::openStore() const {
|
||||
ref<Store> Machine::openStore() const
|
||||
{
|
||||
Store::Params storeParams;
|
||||
if (hasPrefix(storeUri, "ssh://")) {
|
||||
storeParams["max-connections"] = "1";
|
||||
|
@ -83,53 +86,87 @@ ref<Store> Machine::openStore() const {
|
|||
return nix::openStore(storeUri, storeParams);
|
||||
}
|
||||
|
||||
void parseMachines(const std::string & s, Machines & machines)
|
||||
static std::vector<std::string> expandBuilderLines(const std::string & builders)
|
||||
{
|
||||
for (auto line : tokenizeString<std::vector<string>>(s, "\n;")) {
|
||||
std::vector<std::string> result;
|
||||
for (auto line : tokenizeString<std::vector<std::string>>(builders, "\n;")) {
|
||||
trim(line);
|
||||
line.erase(std::find(line.begin(), line.end(), '#'), line.end());
|
||||
if (line.empty()) continue;
|
||||
|
||||
if (line[0] == '@') {
|
||||
auto file = trim(std::string(line, 1));
|
||||
const std::string path = trim(std::string(line, 1));
|
||||
std::string text;
|
||||
try {
|
||||
parseMachines(readFile(file), machines);
|
||||
text = readFile(path);
|
||||
} catch (const SysError & e) {
|
||||
if (e.errNo != ENOENT)
|
||||
throw;
|
||||
debug("cannot find machines file '%s'", file);
|
||||
debug("cannot find machines file '%s'", path);
|
||||
}
|
||||
|
||||
const auto lines = expandBuilderLines(text);
|
||||
result.insert(end(result), begin(lines), end(lines));
|
||||
continue;
|
||||
}
|
||||
|
||||
auto tokens = tokenizeString<std::vector<string>>(line);
|
||||
auto sz = tokens.size();
|
||||
if (sz < 1)
|
||||
throw FormatError("bad machine specification '%s'", line);
|
||||
|
||||
auto isSet = [&](size_t n) {
|
||||
return tokens.size() > n && tokens[n] != "" && tokens[n] != "-";
|
||||
};
|
||||
|
||||
machines.emplace_back(tokens[0],
|
||||
isSet(1) ? tokenizeString<std::vector<string>>(tokens[1], ",") : std::vector<string>{settings.thisSystem},
|
||||
isSet(2) ? tokens[2] : "",
|
||||
isSet(3) ? std::stoull(tokens[3]) : 1LL,
|
||||
isSet(4) ? std::stoull(tokens[4]) : 1LL,
|
||||
isSet(5) ? tokenizeString<std::set<string>>(tokens[5], ",") : std::set<string>{},
|
||||
isSet(6) ? tokenizeString<std::set<string>>(tokens[6], ",") : std::set<string>{},
|
||||
isSet(7) ? tokens[7] : "");
|
||||
result.emplace_back(line);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
static Machine parseBuilderLine(const std::string & line)
|
||||
{
|
||||
const auto tokens = tokenizeString<std::vector<std::string>>(line);
|
||||
|
||||
auto isSet = [&](size_t fieldIndex) {
|
||||
return tokens.size() > fieldIndex && tokens[fieldIndex] != "" && tokens[fieldIndex] != "-";
|
||||
};
|
||||
|
||||
auto parseUnsignedIntField = [&](size_t fieldIndex) {
|
||||
const auto result = string2Int<unsigned int>(tokens[fieldIndex]);
|
||||
if (!result) {
|
||||
throw FormatError("bad machine specification: failed to convert column #%lu in a row: '%s' to 'unsigned int'", fieldIndex, line);
|
||||
}
|
||||
return result.value();
|
||||
};
|
||||
|
||||
auto ensureBase64 = [&](size_t fieldIndex) {
|
||||
const auto & str = tokens[fieldIndex];
|
||||
try {
|
||||
base64Decode(str);
|
||||
} catch (const Error & e) {
|
||||
throw FormatError("bad machine specification: a column #%lu in a row: '%s' is not valid base64 string: %s", fieldIndex, line, e.what());
|
||||
}
|
||||
return str;
|
||||
};
|
||||
|
||||
if (!isSet(0))
|
||||
throw FormatError("bad machine specification: store URL was not found at the first column of a row: '%s'", line);
|
||||
|
||||
return {
|
||||
tokens[0],
|
||||
isSet(1) ? tokenizeString<std::vector<std::string>>(tokens[1], ",") : std::vector<std::string>{settings.thisSystem},
|
||||
isSet(2) ? tokens[2] : "",
|
||||
isSet(3) ? parseUnsignedIntField(3) : 1U,
|
||||
isSet(4) ? parseUnsignedIntField(4) : 1U,
|
||||
isSet(5) ? tokenizeString<std::set<std::string>>(tokens[5], ",") : std::set<std::string>{},
|
||||
isSet(6) ? tokenizeString<std::set<std::string>>(tokens[6], ",") : std::set<std::string>{},
|
||||
isSet(7) ? ensureBase64(7) : ""
|
||||
};
|
||||
}
|
||||
|
||||
static Machines parseBuilderLines(const std::vector<std::string> & builders)
|
||||
{
|
||||
Machines result;
|
||||
std::transform(builders.begin(), builders.end(), std::back_inserter(result), parseBuilderLine);
|
||||
return result;
|
||||
}
|
||||
|
||||
Machines getMachines()
|
||||
{
|
||||
static auto machines = [&]() {
|
||||
Machines machines;
|
||||
parseMachines(settings.builders, machines);
|
||||
return machines;
|
||||
}();
|
||||
return machines;
|
||||
const auto builderLines = expandBuilderLines(settings.builders);
|
||||
return parseBuilderLines(builderLines);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue