1
0
Fork 0
mirror of https://github.com/NixOS/nix synced 2025-07-01 08:28:00 +02:00

Merge remote-tracking branch 'upstream/master' into auto-uid-allocation

This commit is contained in:
Matthew Kenigsberg 2021-09-15 11:51:52 -05:00
commit 3b82c1a5fe
No known key found for this signature in database
GPG key ID: 5DFF42F2615122A8
287 changed files with 32441 additions and 3143 deletions

View file

@ -270,14 +270,23 @@ connected:
{
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying dependencies to '%s'", storeUri));
copyPaths(store, ref<Store>(sshStore), store->parseStorePathSet(inputs), NoRepair, NoCheckSigs, substitute);
copyPaths(*store, *sshStore, store->parseStorePathSet(inputs), NoRepair, NoCheckSigs, substitute);
}
uploadLock = -1;
auto drv = store->readDerivation(*drvPath);
auto outputHashes = staticOutputHashes(*store, drv);
drv.inputSrcs = store->parseStorePathSet(inputs);
// Hijack the inputs paths of the derivation to include all the paths
// that come from the `inputDrvs` set.
// We dont do that for the derivations whose `inputDrvs` is empty
// because
// 1. Its not needed
// 2. Changing the `inputSrcs` set changes the associated output ids,
// which break CA derivations
if (!drv.inputDrvs.empty())
drv.inputSrcs = store->parseStorePathSet(inputs);
auto result = sshStore->buildDerivation(*drvPath, drv);
@ -312,7 +321,7 @@ connected:
if (auto localStore = store.dynamic_pointer_cast<LocalStore>())
for (auto & path : missingPaths)
localStore->locksHeld.insert(store->printStorePath(path)); /* FIXME: ugly */
copyPaths(ref<Store>(sshStore), store, missingPaths, NoRepair, NoCheckSigs, NoSubstitute);
copyPaths(*sshStore, *store, missingPaths, NoRepair, NoCheckSigs, NoSubstitute);
}
// XXX: Should be done as part of `copyPaths`
for (auto & realisation : missingRealisations) {

View file

@ -54,7 +54,31 @@ void StoreCommand::run()
run(getStore());
}
RealisedPathsCommand::RealisedPathsCommand(bool recursive)
EvalCommand::EvalCommand()
{
}
EvalCommand::~EvalCommand()
{
if (evalState)
evalState->printStats();
}
ref<Store> EvalCommand::getEvalStore()
{
if (!evalStore)
evalStore = evalStoreUrl ? openStore(*evalStoreUrl) : getStore();
return ref<Store>(evalStore);
}
ref<EvalState> EvalCommand::getEvalState()
{
if (!evalState)
evalState = std::make_shared<EvalState>(searchPath, getEvalStore(), getStore());
return ref<EvalState>(evalState);
}
BuiltPathsCommand::BuiltPathsCommand(bool recursive)
: recursive(recursive)
{
if (recursive)
@ -81,39 +105,45 @@ RealisedPathsCommand::RealisedPathsCommand(bool recursive)
});
}
void RealisedPathsCommand::run(ref<Store> store)
void BuiltPathsCommand::run(ref<Store> store)
{
std::vector<RealisedPath> paths;
BuiltPaths paths;
if (all) {
if (installables.size())
throw UsageError("'--all' does not expect arguments");
// XXX: Only uses opaque paths, ignores all the realisations
for (auto & p : store->queryAllValidPaths())
paths.push_back(p);
paths.push_back(BuiltPath::Opaque{p});
} else {
auto pathSet = toRealisedPaths(store, realiseMode, operateOn, installables);
paths = toBuiltPaths(getEvalStore(), store, realiseMode, operateOn, installables);
if (recursive) {
auto roots = std::move(pathSet);
pathSet = {};
RealisedPath::closure(*store, roots, pathSet);
// XXX: This only computes the store path closure, ignoring
// intermediate realisations
StorePathSet pathsRoots, pathsClosure;
for (auto & root: paths) {
auto rootFromThis = root.outPaths();
pathsRoots.insert(rootFromThis.begin(), rootFromThis.end());
}
store->computeFSClosure(pathsRoots, pathsClosure);
for (auto & path : pathsClosure)
paths.push_back(BuiltPath::Opaque{path});
}
for (auto & path : pathSet)
paths.push_back(path);
}
run(store, std::move(paths));
}
StorePathsCommand::StorePathsCommand(bool recursive)
: RealisedPathsCommand(recursive)
: BuiltPathsCommand(recursive)
{
}
void StorePathsCommand::run(ref<Store> store, std::vector<RealisedPath> paths)
void StorePathsCommand::run(ref<Store> store, BuiltPaths paths)
{
StorePaths storePaths;
for (auto & p : paths)
storePaths.push_back(p.path());
for (auto& builtPath : paths)
for (auto& p : builtPath.outPaths())
storePaths.push_back(p);
run(store, std::move(storePaths));
}
@ -162,7 +192,7 @@ void MixProfile::updateProfile(const StorePath & storePath)
profile2, storePath));
}
void MixProfile::updateProfile(const DerivedPathsWithHints & buildables)
void MixProfile::updateProfile(const BuiltPaths & buildables)
{
if (!profile) return;
@ -170,22 +200,19 @@ void MixProfile::updateProfile(const DerivedPathsWithHints & buildables)
for (auto & buildable : buildables) {
std::visit(overloaded {
[&](DerivedPathWithHints::Opaque bo) {
[&](BuiltPath::Opaque bo) {
result.push_back(bo.path);
},
[&](DerivedPathWithHints::Built bfd) {
[&](BuiltPath::Built bfd) {
for (auto & output : bfd.outputs) {
/* Output path should be known because we just tried to
build it. */
assert(output.second);
result.push_back(*output.second);
result.push_back(output.second);
}
},
}, buildable.raw());
}
if (result.size() != 1)
throw Error("'--profile' requires that the arguments produce a single store path, but there are %d", result.size());
throw UsageError("'--profile' requires that the arguments produce a single store path, but there are %d", result.size());
updateProfile(result[0]);
}

View file

@ -45,11 +45,18 @@ private:
struct EvalCommand : virtual StoreCommand, MixEvalArgs
{
ref<EvalState> getEvalState();
std::shared_ptr<EvalState> evalState;
EvalCommand();
~EvalCommand();
ref<Store> getEvalStore();
ref<EvalState> getEvalState();
private:
std::shared_ptr<Store> evalStore;
std::shared_ptr<EvalState> evalState;
};
struct MixFlakeOptions : virtual Args, EvalCommand
@ -101,6 +108,8 @@ enum class Realise {
exists. */
Derivation,
/* Evaluate in dry-run mode. Postcondition: nothing. */
// FIXME: currently unused, but could be revived if we can
// evaluate derivations in-memory.
Nothing
};
@ -143,7 +152,7 @@ private:
};
/* A command that operates on zero or more store paths. */
struct RealisedPathsCommand : public InstallablesCommand
struct BuiltPathsCommand : public InstallablesCommand
{
private:
@ -156,26 +165,26 @@ protected:
public:
RealisedPathsCommand(bool recursive = false);
BuiltPathsCommand(bool recursive = false);
using StoreCommand::run;
virtual void run(ref<Store> store, std::vector<RealisedPath> paths) = 0;
virtual void run(ref<Store> store, BuiltPaths paths) = 0;
void run(ref<Store> store) override;
bool useDefaultInstallables() override { return !all; }
};
struct StorePathsCommand : public RealisedPathsCommand
struct StorePathsCommand : public BuiltPathsCommand
{
StorePathsCommand(bool recursive = false);
using RealisedPathsCommand::run;
using BuiltPathsCommand::run;
virtual void run(ref<Store> store, std::vector<StorePath> storePaths) = 0;
void run(ref<Store> store, std::vector<RealisedPath> paths) override;
void run(ref<Store> store, BuiltPaths paths) override;
};
/* A command that operates on exactly one store path. */
@ -216,26 +225,37 @@ static RegisterCommand registerCommand2(std::vector<std::string> && name)
return RegisterCommand(std::move(name), [](){ return make_ref<T>(); });
}
DerivedPathsWithHints build(ref<Store> store, Realise mode,
std::vector<std::shared_ptr<Installable>> installables, BuildMode bMode = bmNormal);
BuiltPaths build(
ref<Store> evalStore,
ref<Store> store, Realise mode,
const std::vector<std::shared_ptr<Installable>> & installables,
BuildMode bMode = bmNormal);
std::set<StorePath> toStorePaths(ref<Store> store,
Realise mode, OperateOn operateOn,
std::vector<std::shared_ptr<Installable>> installables);
StorePath toStorePath(ref<Store> store,
Realise mode, OperateOn operateOn,
std::shared_ptr<Installable> installable);
std::set<StorePath> toDerivations(ref<Store> store,
std::vector<std::shared_ptr<Installable>> installables,
bool useDeriver = false);
std::set<RealisedPath> toRealisedPaths(
std::set<StorePath> toStorePaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
OperateOn operateOn,
std::vector<std::shared_ptr<Installable>> installables);
const std::vector<std::shared_ptr<Installable>> & installables);
StorePath toStorePath(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
OperateOn operateOn,
std::shared_ptr<Installable> installable);
std::set<StorePath> toDerivations(
ref<Store> store,
const std::vector<std::shared_ptr<Installable>> & installables,
bool useDeriver = false);
BuiltPaths toBuiltPaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
OperateOn operateOn,
const std::vector<std::shared_ptr<Installable>> & installables);
/* Helper function to generate args that invoke $EDITOR on
filename:lineno. */
@ -252,7 +272,7 @@ struct MixProfile : virtual StoreCommand
/* If 'profile' is set, make it point at the store path produced
by 'buildables'. */
void updateProfile(const DerivedPathsWithHints & buildables);
void updateProfile(const BuiltPaths & buildables);
};
struct MixDefaultProfile : MixProfile

View file

@ -58,9 +58,13 @@ MixFlakeOptions::MixFlakeOptions()
addFlag({
.longName = "no-registries",
.description = "Don't allow lookups in the flake registries.",
.description =
"Don't allow lookups in the flake registries. This option is deprecated; use `--no-use-registries`.",
.category = category,
.handler = {&lockFlags.useRegistries, false}
.handler = {[&]() {
lockFlags.useRegistries = false;
warn("'--no-registries' is deprecated; use '--no-use-registries'");
}}
});
addFlag({
@ -171,14 +175,50 @@ Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
void SourceExprCommand::completeInstallable(std::string_view prefix)
{
if (file) return; // FIXME
if (file) {
evalSettings.pureEval = false;
auto state = getEvalState();
Expr *e = state->parseExprFromFile(
resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file)))
);
completeFlakeRefWithFragment(
getEvalState(),
lockFlags,
getDefaultFlakeAttrPathPrefixes(),
getDefaultFlakeAttrPaths(),
prefix);
Value root;
state->eval(e, root);
auto autoArgs = getAutoArgs(*state);
std::string prefix_ = std::string(prefix);
auto sep = prefix_.rfind('.');
std::string searchWord;
if (sep != std::string::npos) {
searchWord = prefix_.substr(sep, std::string::npos);
prefix_ = prefix_.substr(0, sep);
} else {
searchWord = prefix_;
prefix_ = "";
}
Value &v1(*findAlongAttrPath(*state, prefix_, *autoArgs, root).first);
state->forceValue(v1);
Value v2;
state->autoCallFunction(*autoArgs, v1, v2);
if (v2.type() == nAttrs) {
for (auto & i : *v2.attrs) {
std::string name = i.name;
if (name.find(searchWord) == 0) {
completions->add(i.name);
}
}
}
} else {
completeFlakeRefWithFragment(
getEvalState(),
lockFlags,
getDefaultFlakeAttrPathPrefixes(),
getDefaultFlakeAttrPaths(),
prefix);
}
}
void completeFlakeRefWithFragment(
@ -249,19 +289,6 @@ void completeFlakeRefWithFragment(
completeFlakeRef(evalState->store, prefix);
}
ref<EvalState> EvalCommand::getEvalState()
{
if (!evalState)
evalState = std::make_shared<EvalState>(searchPath, getStore());
return ref<EvalState>(evalState);
}
EvalCommand::~EvalCommand()
{
if (evalState)
evalState->printStats();
}
void completeFlakeRef(ref<Store> store, std::string_view prefix)
{
if (prefix == "")
@ -285,9 +312,9 @@ void completeFlakeRef(ref<Store> store, std::string_view prefix)
}
}
DerivedPathWithHints Installable::toDerivedPathWithHints()
DerivedPath Installable::toDerivedPath()
{
auto buildables = toDerivedPathsWithHints();
auto buildables = toDerivedPaths();
if (buildables.size() != 1)
throw Error("installable '%s' evaluates to %d derivations, where only one is expected", what(), buildables.size());
return std::move(buildables[0]);
@ -321,22 +348,19 @@ struct InstallableStorePath : Installable
std::string what() override { return store->printStorePath(storePath); }
DerivedPathsWithHints toDerivedPathsWithHints() override
DerivedPaths toDerivedPaths() override
{
if (storePath.isDerivation()) {
std::map<std::string, std::optional<StorePath>> outputs;
auto drv = store->readDerivation(storePath);
for (auto & [name, output] : drv.outputsAndOptPaths(*store))
outputs.emplace(name, output.second);
return {
DerivedPathWithHints::Built {
DerivedPath::Built {
.drvPath = storePath,
.outputs = std::move(outputs)
.outputs = drv.outputNames(),
}
};
} else {
return {
DerivedPathWithHints::Opaque {
DerivedPath::Opaque {
.path = storePath,
}
};
@ -349,22 +373,24 @@ struct InstallableStorePath : Installable
}
};
DerivedPathsWithHints InstallableValue::toDerivedPathsWithHints()
DerivedPaths InstallableValue::toDerivedPaths()
{
DerivedPathsWithHints res;
DerivedPaths res;
std::map<StorePath, std::map<std::string, std::optional<StorePath>>> drvsToOutputs;
std::map<StorePath, std::set<std::string>> drvsToOutputs;
RealisedPath::Set drvsToCopy;
// Group by derivation, helps with .all in particular
for (auto & drv : toDerivations()) {
auto outputName = drv.outputName;
if (outputName == "")
throw Error("derivation '%s' lacks an 'outputName' attribute", state->store->printStorePath(drv.drvPath));
drvsToOutputs[drv.drvPath].insert_or_assign(outputName, drv.outPath);
drvsToOutputs[drv.drvPath].insert(outputName);
drvsToCopy.insert(drv.drvPath);
}
for (auto & i : drvsToOutputs)
res.push_back(DerivedPathWithHints::Built { i.first, i.second });
res.push_back(DerivedPath::Built { i.first, i.second });
return res;
}
@ -503,7 +529,11 @@ std::tuple<std::string, FlakeRef, InstallableValue::DerivationInfo> InstallableF
auto root = cache->getRoot();
for (auto & attrPath : getActualAttrPaths()) {
auto attr = root->findAlongAttrPath(parseAttrPath(*state, attrPath));
auto attr = root->findAlongAttrPath(
parseAttrPath(*state, attrPath),
true
);
if (!attr) continue;
if (!attr->isDerivation())
@ -572,10 +602,10 @@ InstallableFlake::getCursors(EvalState & state)
std::shared_ptr<flake::LockedFlake> InstallableFlake::getLockedFlake() const
{
flake::LockFlags lockFlagsApplyConfig = lockFlags;
lockFlagsApplyConfig.applyNixConfig = true;
if (!_lockedFlake) {
_lockedFlake = std::make_shared<flake::LockedFlake>(lockFlake(*state, flakeRef, lockFlags));
_lockedFlake->flake.config.apply();
// FIXME: send new config to the daemon.
_lockedFlake = std::make_shared<flake::LockedFlake>(lockFlake(*state, flakeRef, lockFlagsApplyConfig));
}
return _lockedFlake;
}
@ -624,6 +654,17 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
for (auto & s : ss) {
std::exception_ptr ex;
if (s.find('/') != std::string::npos) {
try {
result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s)));
continue;
} catch (BadStorePath &) {
} catch (...) {
if (!ex)
ex = std::current_exception();
}
}
try {
auto [flakeRef, fragment] = parseFlakeRefWithFragment(s, absPath("."));
result.push_back(std::make_shared<InstallableFlake>(
@ -638,25 +679,7 @@ std::vector<std::shared_ptr<Installable>> SourceExprCommand::parseInstallables(
ex = std::current_exception();
}
if (s.find('/') != std::string::npos) {
try {
result.push_back(std::make_shared<InstallableStorePath>(store, store->followLinksToStorePath(s)));
continue;
} catch (BadStorePath &) {
} catch (...) {
if (!ex)
ex = std::current_exception();
}
}
std::rethrow_exception(ex);
/*
throw Error(
pathExists(s)
? "path '%s' is not a flake or a store path"
: "don't know how to handle argument '%s'", s);
*/
}
}
@ -671,109 +694,121 @@ std::shared_ptr<Installable> SourceExprCommand::parseInstallable(
return installables.front();
}
DerivedPathsWithHints build(ref<Store> store, Realise mode,
std::vector<std::shared_ptr<Installable>> installables, BuildMode bMode)
BuiltPaths getBuiltPaths(ref<Store> evalStore, ref<Store> store, const DerivedPaths & hopefullyBuiltPaths)
{
if (mode == Realise::Nothing)
settings.readOnlyMode = true;
DerivedPathsWithHints buildables;
std::vector<DerivedPath> pathsToBuild;
for (auto & i : installables) {
for (auto & b : i->toDerivedPathsWithHints()) {
std::visit(overloaded {
[&](DerivedPathWithHints::Opaque bo) {
pathsToBuild.push_back(bo);
BuiltPaths res;
for (auto & b : hopefullyBuiltPaths)
std::visit(
overloaded{
[&](DerivedPath::Opaque bo) {
res.push_back(BuiltPath::Opaque{bo.path});
},
[&](DerivedPathWithHints::Built bfd) {
StringSet outputNames;
for (auto & output : bfd.outputs)
outputNames.insert(output.first);
pathsToBuild.push_back(
DerivedPath::Built{bfd.drvPath, outputNames});
},
}, b.raw());
buildables.push_back(std::move(b));
}
}
if (mode == Realise::Nothing)
printMissing(store, pathsToBuild, lvlError);
else if (mode == Realise::Outputs)
store->buildPaths(pathsToBuild, bMode);
return buildables;
}
std::set<RealisedPath> toRealisedPaths(
ref<Store> store,
Realise mode,
OperateOn operateOn,
std::vector<std::shared_ptr<Installable>> installables)
{
std::set<RealisedPath> res;
if (operateOn == OperateOn::Output) {
for (auto & b : build(store, mode, installables))
std::visit(overloaded {
[&](DerivedPathWithHints::Opaque bo) {
res.insert(bo.path);
},
[&](DerivedPathWithHints::Built bfd) {
auto drv = store->readDerivation(bfd.drvPath);
auto outputHashes = staticOutputHashes(*store, drv);
[&](DerivedPath::Built bfd) {
OutputPathMap outputs;
auto drv = evalStore->readDerivation(bfd.drvPath);
auto outputHashes = staticOutputHashes(*evalStore, drv); // FIXME: expensive
auto drvOutputs = drv.outputsAndOptPaths(*store);
for (auto & output : bfd.outputs) {
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
if (!outputHashes.count(output.first))
throw Error(
"the derivation '%s' doesn't have an output named '%s'",
store->printStorePath(bfd.drvPath),
output.first);
auto outputId = DrvOutput{outputHashes.at(output.first), output.first};
auto realisation = store->queryRealisation(outputId);
if (!outputHashes.count(output))
throw Error(
"the derivation '%s' doesn't have an output named '%s'",
store->printStorePath(bfd.drvPath), output);
if (settings.isExperimentalFeatureEnabled(
"ca-derivations")) {
auto outputId =
DrvOutput{outputHashes.at(output), output};
auto realisation =
store->queryRealisation(outputId);
if (!realisation)
throw Error("cannot operate on an output of unbuilt content-addresed derivation '%s'", outputId.to_string());
res.insert(RealisedPath{*realisation});
}
else {
// If ca-derivations isn't enabled, behave as if
// all the paths are opaque to keep the default
// behavior
assert(output.second);
res.insert(*output.second);
throw Error(
"cannot operate on an output of unbuilt "
"content-addressed derivation '%s'",
outputId.to_string());
outputs.insert_or_assign(
output, realisation->outPath);
} else {
// If ca-derivations isn't enabled, assume that
// the output path is statically known.
assert(drvOutputs.count(output));
assert(drvOutputs.at(output).second);
outputs.insert_or_assign(
output, *drvOutputs.at(output).second);
}
}
res.push_back(BuiltPath::Built{bfd.drvPath, outputs});
},
}, b.raw());
} else {
if (mode == Realise::Nothing)
settings.readOnlyMode = true;
for (auto & i : installables)
for (auto & b : i->toDerivedPathsWithHints())
if (auto bfd = std::get_if<DerivedPathWithHints::Built>(&b))
res.insert(bfd->drvPath);
}
},
b.raw());
return res;
}
StorePathSet toStorePaths(ref<Store> store,
BuiltPaths build(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
const std::vector<std::shared_ptr<Installable>> & installables,
BuildMode bMode)
{
if (mode == Realise::Nothing)
settings.readOnlyMode = true;
std::vector<DerivedPath> pathsToBuild;
for (auto & i : installables) {
auto b = i->toDerivedPaths();
pathsToBuild.insert(pathsToBuild.end(), b.begin(), b.end());
}
if (mode == Realise::Nothing || mode == Realise::Derivation)
printMissing(store, pathsToBuild, lvlError);
else if (mode == Realise::Outputs)
store->buildPaths(pathsToBuild, bMode, evalStore);
return getBuiltPaths(evalStore, store, pathsToBuild);
}
BuiltPaths toBuiltPaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
OperateOn operateOn,
const std::vector<std::shared_ptr<Installable>> & installables)
{
if (operateOn == OperateOn::Output)
return build(evalStore, store, mode, installables);
else {
if (mode == Realise::Nothing)
settings.readOnlyMode = true;
BuiltPaths res;
for (auto & drvPath : toDerivations(store, installables, true))
res.push_back(BuiltPath::Opaque{drvPath});
return res;
}
}
StorePathSet toStorePaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode, OperateOn operateOn,
std::vector<std::shared_ptr<Installable>> installables)
const std::vector<std::shared_ptr<Installable>> & installables)
{
StorePathSet outPaths;
for (auto & path : toRealisedPaths(store, mode, operateOn, installables))
outPaths.insert(path.path());
for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) {
auto thisOutPaths = path.outPaths();
outPaths.insert(thisOutPaths.begin(), thisOutPaths.end());
}
return outPaths;
}
StorePath toStorePath(ref<Store> store,
StorePath toStorePath(
ref<Store> evalStore,
ref<Store> store,
Realise mode, OperateOn operateOn,
std::shared_ptr<Installable> installable)
{
auto paths = toStorePaths(store, mode, operateOn, {installable});
auto paths = toStorePaths(evalStore, store, mode, operateOn, {installable});
if (paths.size() != 1)
throw Error("argument '%s' should evaluate to one store path", installable->what());
@ -781,15 +816,17 @@ StorePath toStorePath(ref<Store> store,
return *paths.begin();
}
StorePathSet toDerivations(ref<Store> store,
std::vector<std::shared_ptr<Installable>> installables, bool useDeriver)
StorePathSet toDerivations(
ref<Store> store,
const std::vector<std::shared_ptr<Installable>> & installables,
bool useDeriver)
{
StorePathSet drvPaths;
for (auto & i : installables)
for (auto & b : i->toDerivedPathsWithHints())
for (auto & b : i->toDerivedPaths())
std::visit(overloaded {
[&](DerivedPathWithHints::Opaque bo) {
[&](DerivedPath::Opaque bo) {
if (!useDeriver)
throw Error("argument '%s' did not evaluate to a derivation", i->what());
auto derivers = store->queryValidDerivers(bo.path);
@ -798,7 +835,7 @@ StorePathSet toDerivations(ref<Store> store,
// FIXME: use all derivers?
drvPaths.insert(*derivers.begin());
},
[&](DerivedPathWithHints::Built bfd) {
[&](DerivedPath::Built bfd) {
drvPaths.insert(bfd.drvPath);
},
}, b.raw());

View file

@ -23,17 +23,23 @@ struct App
// FIXME: add args, sandbox settings, metadata, ...
};
struct UnresolvedApp
{
App unresolved;
App resolve(ref<Store> evalStore, ref<Store> store);
};
struct Installable
{
virtual ~Installable() { }
virtual std::string what() = 0;
virtual DerivedPathsWithHints toDerivedPathsWithHints() = 0;
virtual DerivedPaths toDerivedPaths() = 0;
DerivedPathWithHints toDerivedPathWithHints();
DerivedPath toDerivedPath();
App toApp(EvalState & state);
UnresolvedApp toApp(EvalState & state);
virtual std::pair<Value *, Pos> toValue(EvalState & state)
{
@ -74,7 +80,7 @@ struct InstallableValue : Installable
virtual std::vector<DerivationInfo> toDerivations() = 0;
DerivedPathsWithHints toDerivedPathsWithHints() override;
DerivedPaths toDerivedPaths() override;
};
struct InstallableFlake : InstallableValue

View file

@ -8,8 +8,8 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc)
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers
libcmd_LDFLAGS = -llowdown
libcmd_LDFLAGS += -llowdown -pthread
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
$(eval $(call install-file-in, $(d)/nix-cmd.pc, $(prefix)/lib/pkgconfig, 0644))
$(eval $(call install-file-in, $(d)/nix-cmd.pc, $(libdir)/pkgconfig, 0644))

View file

@ -12,7 +12,7 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
struct lowdown_opts opts {
.type = LOWDOWN_TERM,
.maxdepth = 20,
.cols = std::min(getWindowSize().second, (unsigned short) 80),
.cols = std::max(getWindowSize().second, (unsigned short) 80),
.hmargin = 0,
.vmargin = 0,
.feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES,
@ -44,7 +44,7 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
if (!rndr_res)
throw Error("allocation error while rendering Markdown");
return std::string(buf->data, buf->size);
return filterANSIEscapes(std::string(buf->data, buf->size), !shouldANSI());
}
}

View file

@ -19,7 +19,7 @@ static Strings parseAttrPath(std::string_view s)
++i;
while (1) {
if (i == s.end())
throw Error("missing closing quote in selection path '%1%'", s);
throw ParseError("missing closing quote in selection path '%1%'", s);
if (*i == '"') break;
cur.push_back(*i++);
}
@ -100,7 +100,7 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
}
Pos findDerivationFilename(EvalState & state, Value & v, std::string what)
Pos findPackageFilename(EvalState & state, Value & v, std::string what)
{
Value * v2;
try {
@ -116,14 +116,14 @@ Pos findDerivationFilename(EvalState & state, Value & v, std::string what)
auto colon = pos.rfind(':');
if (colon == std::string::npos)
throw Error("cannot parse meta.position attribute '%s'", pos);
throw ParseError("cannot parse meta.position attribute '%s'", pos);
std::string filename(pos, 0, colon);
unsigned int lineno;
try {
lineno = std::stoi(std::string(pos, colon + 1));
} catch (std::invalid_argument & e) {
throw Error("cannot parse line number '%s'", pos);
throw ParseError("cannot parse line number '%s'", pos);
}
Symbol file = state.symbols.create(filename);

View file

@ -14,7 +14,7 @@ std::pair<Value *, Pos> findAlongAttrPath(EvalState & state, const string & attr
Bindings & autoArgs, Value & vIn);
/* Heuristic to find the filename and lineno or a nix value. */
Pos findDerivationFilename(EvalState & state, Value & v, std::string what);
Pos findPackageFilename(EvalState & state, Value & v, std::string what);
std::vector<Symbol> parseAttrPath(EvalState & state, std::string_view s);

View file

@ -17,8 +17,8 @@ struct Attr
{
Symbol name;
Value * value;
Pos * pos;
Attr(Symbol name, Value * value, Pos * pos = &noPos)
ptr<Pos> pos;
Attr(Symbol name, Value * value, ptr<Pos> pos = ptr(&noPos))
: name(name), value(value), pos(pos) { };
Attr() : pos(&noPos) { };
bool operator < (const Attr & a) const
@ -35,12 +35,13 @@ class Bindings
{
public:
typedef uint32_t size_t;
ptr<Pos> pos;
private:
size_t size_, capacity_;
Attr attrs[0];
Bindings(size_t capacity) : size_(0), capacity_(capacity) { }
Bindings(size_t capacity) : pos(&noPos), size_(0), capacity_(capacity) { }
Bindings(const Bindings & bindings) = delete;
public:

View file

@ -61,6 +61,14 @@ MixEvalArgs::MixEvalArgs()
fetchers::overrideRegistry(from.input, to.input, extraAttrs);
}}
});
addFlag({
.longName = "eval-store",
.description = "The Nix store to use for evaluations.",
.category = category,
.labels = {"store-url"},
.handler = {&evalStoreUrl},
});
}
Bindings * MixEvalArgs::getAutoArgs(EvalState & state)

View file

@ -16,8 +16,9 @@ struct MixEvalArgs : virtual Args
Strings searchPath;
private:
std::optional<std::string> evalStoreUrl;
private:
std::map<std::string, std::string> autoArgs;
};

View file

@ -486,11 +486,11 @@ std::shared_ptr<AttrCursor> AttrCursor::getAttr(std::string_view name)
return getAttr(root->state.symbols.create(name));
}
std::shared_ptr<AttrCursor> AttrCursor::findAlongAttrPath(const std::vector<Symbol> & attrPath)
std::shared_ptr<AttrCursor> AttrCursor::findAlongAttrPath(const std::vector<Symbol> & attrPath, bool force)
{
auto res = shared_from_this();
for (auto & attr : attrPath) {
res = res->maybeGetAttr(attr);
res = res->maybeGetAttr(attr, force);
if (!res) return {};
}
return res;

View file

@ -102,7 +102,7 @@ public:
std::shared_ptr<AttrCursor> getAttr(std::string_view name);
std::shared_ptr<AttrCursor> findAlongAttrPath(const std::vector<Symbol> & attrPath);
std::shared_ptr<AttrCursor> findAlongAttrPath(const std::vector<Symbol> & attrPath, bool force = false);
std::string getString();

View file

@ -64,7 +64,11 @@ static char * dupStringWithLen(const char * s, size_t size)
RootValue allocRootValue(Value * v)
{
#if HAVE_BOEHMGC
return std::allocate_shared<Value *>(traceable_allocator<Value *>(), v);
#else
return std::make_shared<Value *>(v);
#endif
}
@ -201,6 +205,15 @@ string showType(const Value & v)
}
}
Pos Value::determinePos(const Pos &pos) const
{
switch (internalType) {
case tAttrs: return *attrs->pos;
case tLambda: return lambda.fun->pos;
case tApp: return app.left->determinePos(pos);
default: return pos;
}
}
bool Value::isTrivial() const
{
@ -224,22 +237,34 @@ static void * oomHandler(size_t requested)
}
class BoehmGCStackAllocator : public StackAllocator {
boost::coroutines2::protected_fixedsize_stack stack {
// We allocate 8 MB, the default max stack size on NixOS.
// A smaller stack might be quicker to allocate but reduces the stack
// depth available for source filter expressions etc.
std::max(boost::context::stack_traits::default_size(), static_cast<std::size_t>(8 * 1024 * 1024))
boost::coroutines2::protected_fixedsize_stack stack {
// We allocate 8 MB, the default max stack size on NixOS.
// A smaller stack might be quicker to allocate but reduces the stack
// depth available for source filter expressions etc.
std::max(boost::context::stack_traits::default_size(), static_cast<std::size_t>(8 * 1024 * 1024))
};
// This is specific to boost::coroutines2::protected_fixedsize_stack.
// The stack protection page is included in sctx.size, so we have to
// subtract one page size from the stack size.
std::size_t pfss_usable_stack_size(boost::context::stack_context &sctx) {
return sctx.size - boost::context::stack_traits::page_size();
}
public:
boost::context::stack_context allocate() override {
auto sctx = stack.allocate();
GC_add_roots(static_cast<char *>(sctx.sp) - sctx.size, sctx.sp);
// Stacks generally start at a high address and grow to lower addresses.
// Architectures that do the opposite are rare; in fact so rare that
// boost_routine does not implement it.
// So we subtract the stack size.
GC_add_roots(static_cast<char *>(sctx.sp) - pfss_usable_stack_size(sctx), sctx.sp);
return sctx;
}
void deallocate(boost::context::stack_context sctx) override {
GC_remove_roots(static_cast<char *>(sctx.sp) - sctx.size, sctx.sp);
GC_remove_roots(static_cast<char *>(sctx.sp) - pfss_usable_stack_size(sctx), sctx.sp);
stack.deallocate(sctx);
}
@ -353,7 +378,10 @@ static Strings parseNixPath(const string & s)
}
EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
EvalState::EvalState(
const Strings & _searchPath,
ref<Store> store,
std::shared_ptr<Store> buildStore)
: sWith(symbols.create("<with>"))
, sOutPath(symbols.create("outPath"))
, sDrvPath(symbols.create("drvPath"))
@ -386,6 +414,7 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
, sEpsilon(symbols.create(""))
, repair(NoRepair)
, store(store)
, buildStore(buildStore ? buildStore : store)
, regexCache(makeRegexCache())
, baseEnv(allocEnv(128))
, staticBaseEnv(false, 0)
@ -741,7 +770,7 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval)
}
Bindings::iterator j = env->values[0]->attrs->find(var.name);
if (j != env->values[0]->attrs->end()) {
if (countCalls && j->pos) attrSelects[*j->pos]++;
if (countCalls) attrSelects[*j->pos]++;
return j->value;
}
if (!env->prevWith)
@ -751,18 +780,10 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval)
}
std::atomic<uint64_t> nrValuesFreed{0};
void finalizeValue(void * obj, void * data)
{
nrValuesFreed++;
}
Value * EvalState::allocValue()
{
nrValues++;
auto v = (Value *) allocBytes(sizeof(Value));
//GC_register_finalizer_no_order(v, finalizeValue, nullptr, nullptr, nullptr);
return v;
}
@ -804,9 +825,9 @@ void EvalState::mkThunk_(Value & v, Expr * expr)
}
void EvalState::mkPos(Value & v, Pos * pos)
void EvalState::mkPos(Value & v, ptr<Pos> pos)
{
if (pos && pos->file.set()) {
if (pos->file.set()) {
mkAttrs(v, 3);
mkString(*allocAttr(v, sFile), pos->file);
mkInt(*allocAttr(v, sLine), pos->line);
@ -829,39 +850,37 @@ Value * Expr::maybeThunk(EvalState & state, Env & env)
}
unsigned long nrAvoided = 0;
Value * ExprVar::maybeThunk(EvalState & state, Env & env)
{
Value * v = state.lookupVar(&env, *this, true);
/* The value might not be initialised in the environment yet.
In that case, ignore it. */
if (v) { nrAvoided++; return v; }
if (v) { state.nrAvoided++; return v; }
return Expr::maybeThunk(state, env);
}
Value * ExprString::maybeThunk(EvalState & state, Env & env)
{
nrAvoided++;
state.nrAvoided++;
return &v;
}
Value * ExprInt::maybeThunk(EvalState & state, Env & env)
{
nrAvoided++;
state.nrAvoided++;
return &v;
}
Value * ExprFloat::maybeThunk(EvalState & state, Env & env)
{
nrAvoided++;
state.nrAvoided++;
return &v;
}
Value * ExprPath::maybeThunk(EvalState & state, Env & env)
{
nrAvoided++;
state.nrAvoided++;
return &v;
}
@ -876,38 +895,23 @@ void EvalState::evalFile(const Path & path_, Value & v, bool mustBeTrivial)
return;
}
Path path2 = resolveExprPath(path);
if ((i = fileEvalCache.find(path2)) != fileEvalCache.end()) {
Path resolvedPath = resolveExprPath(path);
if ((i = fileEvalCache.find(resolvedPath)) != fileEvalCache.end()) {
v = i->second;
return;
}
printTalkative("evaluating file '%1%'", path2);
printTalkative("evaluating file '%1%'", resolvedPath);
Expr * e = nullptr;
auto j = fileParseCache.find(path2);
auto j = fileParseCache.find(resolvedPath);
if (j != fileParseCache.end())
e = j->second;
if (!e)
e = parseExprFromFile(checkSourcePath(path2));
e = parseExprFromFile(checkSourcePath(resolvedPath));
fileParseCache[path2] = e;
try {
// Enforce that 'flake.nix' is a direct attrset, not a
// computation.
if (mustBeTrivial &&
!(dynamic_cast<ExprAttrs *>(e)))
throw Error("file '%s' must be an attribute set", path);
eval(e, v);
} catch (Error & e) {
addErrorTrace(e, "while evaluating the file '%1%':", path2);
throw;
}
fileEvalCache[path2] = v;
if (path != path2) fileEvalCache[path] = v;
cacheFile(path, resolvedPath, e, v, mustBeTrivial);
}
@ -918,6 +922,32 @@ void EvalState::resetFileCache()
}
void EvalState::cacheFile(
const Path & path,
const Path & resolvedPath,
Expr * e,
Value & v,
bool mustBeTrivial)
{
fileParseCache[resolvedPath] = e;
try {
// Enforce that 'flake.nix' is a direct attrset, not a
// computation.
if (mustBeTrivial &&
!(dynamic_cast<ExprAttrs *>(e)))
throw EvalError("file '%s' must be an attribute set", path);
eval(e, v);
} catch (Error & e) {
addErrorTrace(e, "while evaluating the file '%1%':", resolvedPath);
throw;
}
fileEvalCache[resolvedPath] = v;
if (path != resolvedPath) fileEvalCache[path] = v;
}
void EvalState::eval(Expr * e, Value & v)
{
e->eval(*this, baseEnv, v);
@ -1008,7 +1038,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
} else
vAttr = i.second.e->maybeThunk(state, i.second.inherited ? env : env2);
env2.values[displ++] = vAttr;
v.attrs->push_back(Attr(i.first, vAttr, &i.second.pos));
v.attrs->push_back(Attr(i.first, vAttr, ptr(&i.second.pos)));
}
/* If the rec contains an attribute called `__overrides', then
@ -1040,7 +1070,7 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
else
for (auto & i : attrs)
v.attrs->push_back(Attr(i.first, i.second.e->maybeThunk(state, env), &i.second.pos));
v.attrs->push_back(Attr(i.first, i.second.e->maybeThunk(state, env), ptr(&i.second.pos)));
/* Dynamic attrs apply *after* rec and __overrides. */
for (auto & i : dynamicAttrs) {
@ -1057,9 +1087,11 @@ void ExprAttrs::eval(EvalState & state, Env & env, Value & v)
i.valueExpr->setName(nameSym);
/* Keep sorted order so find can catch duplicates */
v.attrs->push_back(Attr(nameSym, i.valueExpr->maybeThunk(state, *dynamicEnv), &i.pos));
v.attrs->push_back(Attr(nameSym, i.valueExpr->maybeThunk(state, *dynamicEnv), ptr(&i.pos)));
v.attrs->sort(); // FIXME: inefficient
}
v.attrs->pos = ptr(&pos);
}
@ -1114,12 +1146,10 @@ static string showAttrPath(EvalState & state, Env & env, const AttrPath & attrPa
}
unsigned long nrLookups = 0;
void ExprSelect::eval(EvalState & state, Env & env, Value & v)
{
Value vTmp;
Pos * pos2 = 0;
ptr<Pos> pos2(&noPos);
Value * vAttrs = &vTmp;
e->eval(state, env, vTmp);
@ -1127,7 +1157,7 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
try {
for (auto & i : attrPath) {
nrLookups++;
state.nrLookups++;
Bindings::iterator j;
Symbol name = getName(i, state, env);
if (def) {
@ -1145,13 +1175,13 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
}
vAttrs = j->value;
pos2 = j->pos;
if (state.countCalls && pos2) state.attrSelects[*pos2]++;
if (state.countCalls) state.attrSelects[*pos2]++;
}
state.forceValue(*vAttrs, ( pos2 != NULL ? *pos2 : this->pos ) );
state.forceValue(*vAttrs, (*pos2 != noPos ? *pos2 : this->pos ) );
} catch (Error & e) {
if (pos2 && pos2->file != state.sDerivationNix)
if (*pos2 != noPos && pos2->file != state.sDerivationNix)
addErrorTrace(e, *pos2, "while evaluating the attribute '%1%'",
showAttrPath(state, env, attrPath));
throw;
@ -1557,7 +1587,6 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
and none of the strings are allowed to have contexts. */
if (first) {
firstType = vTmp.type();
first = false;
}
if (firstType == nInt) {
@ -1578,7 +1607,12 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
} else
throwEvalError(pos, "cannot add %1% to a float", showType(vTmp));
} else
s << state.coerceToString(pos, vTmp, context, false, firstType == nString);
/* skip canonization of first path, which would only be not
canonized in the first place if it's coming from a ./${foo} type
path */
s << state.coerceToString(pos, vTmp, context, false, firstType == nString, !first);
first = false;
}
if (firstType == nInt)
@ -1597,7 +1631,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
void ExprPos::eval(EvalState & state, Env & env, Value & v)
{
state.mkPos(v, &pos);
state.mkPos(v, ptr(&pos));
}
@ -1767,7 +1801,7 @@ std::optional<string> EvalState::tryAttrsToString(const Pos & pos, Value & v,
}
string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
bool coerceMore, bool copyToStore)
bool coerceMore, bool copyToStore, bool canonicalizePath)
{
forceValue(v, pos);
@ -1779,7 +1813,7 @@ string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
}
if (v.type() == nPath) {
Path path(canonPath(v.path));
Path path(canonicalizePath ? canonPath(v.path) : v.path);
return copyToStore ? copyPathToStore(context, path) : path;
}
@ -2091,9 +2125,12 @@ Strings EvalSettings::getDefaultNixPath()
}
};
add(getHome() + "/.nix-defexpr/channels");
add(settings.nixStateDir + "/profiles/per-user/root/channels/nixpkgs", "nixpkgs");
add(settings.nixStateDir + "/profiles/per-user/root/channels");
if (!evalSettings.restrictEval && !evalSettings.pureEval) {
add(getHome() + "/.nix-defexpr/channels");
add(settings.nixStateDir + "/profiles/per-user/root/channels/nixpkgs", "nixpkgs");
add(settings.nixStateDir + "/profiles/per-user/root/channels");
}
return res;
}

View file

@ -94,8 +94,14 @@ public:
Value vEmptySet;
/* Store used to materialise .drv files. */
const ref<Store> store;
/* Store used to build stuff. */
const ref<Store> buildStore;
RootValue vCallFlake = nullptr;
RootValue vImportedDrvToDerivation = nullptr;
private:
SrcToStore srcToStore;
@ -128,7 +134,10 @@ private:
public:
EvalState(const Strings & _searchPath, ref<Store> store);
EvalState(
const Strings & _searchPath,
ref<Store> store,
std::shared_ptr<Store> buildStore = nullptr);
~EvalState();
void addToSearchPath(const string & s);
@ -163,6 +172,14 @@ public:
trivial (i.e. doesn't require arbitrary computation). */
void evalFile(const Path & path, Value & v, bool mustBeTrivial = false);
/* Like `cacheFile`, but with an already parsed expression. */
void cacheFile(
const Path & path,
const Path & resolvedPath,
Expr * e,
Value & v,
bool mustBeTrivial = false);
void resetFileCache();
/* Look up a file in the search path. */
@ -217,7 +234,8 @@ public:
booleans and lists to a string. If `copyToStore' is set,
referenced paths are copied to the Nix store as a side effect. */
string coerceToString(const Pos & pos, Value & v, PathSet & context,
bool coerceMore = false, bool copyToStore = true);
bool coerceMore = false, bool copyToStore = true,
bool canonicalizePath = true);
string copyPathToStore(PathSet & context, const Path & path);
@ -301,7 +319,7 @@ public:
void mkList(Value & v, size_t length);
void mkAttrs(Value & v, size_t capacity);
void mkThunk_(Value & v, Expr * expr);
void mkPos(Value & v, Pos * pos);
void mkPos(Value & v, ptr<Pos> pos);
void concatLists(Value & v, size_t nrLists, Value * * lists, const Pos & pos);
@ -316,8 +334,10 @@ private:
unsigned long nrValuesInEnvs = 0;
unsigned long nrValues = 0;
unsigned long nrListElems = 0;
unsigned long nrLookups = 0;
unsigned long nrAttrsets = 0;
unsigned long nrAttrsInAttrsets = 0;
unsigned long nrAvoided = 0;
unsigned long nrOpUpdates = 0;
unsigned long nrOpUpdateValuesCopied = 0;
unsigned long nrListConcats = 0;
@ -339,6 +359,11 @@ private:
friend struct ExprOpUpdate;
friend struct ExprOpConcatLists;
friend struct ExprVar;
friend struct ExprString;
friend struct ExprInt;
friend struct ExprFloat;
friend struct ExprPath;
friend struct ExprSelect;
friend void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v);
friend void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v);

View file

@ -22,12 +22,14 @@ static TrustedList readTrustedList()
static void writeTrustedList(const TrustedList & trustedList)
{
writeFile(trustedListPath(), nlohmann::json(trustedList).dump());
auto path = trustedListPath();
createDirs(dirOf(path));
writeFile(path, nlohmann::json(trustedList).dump());
}
void ConfigFile::apply()
{
std::set<std::string> whitelist{"bash-prompt", "bash-prompt-suffix"};
std::set<std::string> whitelist{"bash-prompt", "bash-prompt-suffix", "flake-registry"};
for (auto & [name, value] : settings) {

View file

@ -1,4 +1,5 @@
#include "flake.hh"
#include "eval.hh"
#include "lockfile.hh"
#include "primops.hh"
#include "eval-inline.hh"
@ -296,7 +297,14 @@ LockedFlake lockFlake(
FlakeCache flakeCache;
auto flake = getFlake(state, topRef, lockFlags.useRegistries, flakeCache);
auto useRegistries = lockFlags.useRegistries.value_or(settings.useRegistries);
auto flake = getFlake(state, topRef, useRegistries, flakeCache);
if (lockFlags.applyNixConfig) {
flake.config.apply();
// FIXME: send new config to the daemon.
}
try {
@ -317,25 +325,38 @@ LockedFlake lockFlake(
std::vector<FlakeRef> parents;
struct LockParent {
/* The path to this parent. */
InputPath path;
/* Whether we are currently inside a top-level lockfile
(inputs absolute) or subordinate lockfile (inputs
relative). */
bool absolute;
};
std::function<void(
const FlakeInputs & flakeInputs,
std::shared_ptr<Node> node,
const InputPath & inputPathPrefix,
std::shared_ptr<const Node> oldNode)>
std::shared_ptr<const Node> oldNode,
const LockParent & parent,
const Path & parentPath)>
computeLocks;
computeLocks = [&](
const FlakeInputs & flakeInputs,
std::shared_ptr<Node> node,
const InputPath & inputPathPrefix,
std::shared_ptr<const Node> oldNode)
std::shared_ptr<const Node> oldNode,
const LockParent & parent,
const Path & parentPath)
{
debug("computing lock file node '%s'", printInputPath(inputPathPrefix));
/* Get the overrides (i.e. attributes of the form
'inputs.nixops.inputs.nixpkgs.url = ...'). */
// FIXME: check this
for (auto & [id, input] : flake.inputs) {
for (auto & [id, input] : flakeInputs) {
for (auto & [idOverride, inputOverride] : input.overrides) {
auto inputPath(inputPathPrefix);
inputPath.push_back(id);
@ -359,22 +380,31 @@ LockedFlake lockFlake(
ancestors? */
auto i = overrides.find(inputPath);
bool hasOverride = i != overrides.end();
if (hasOverride) overridesUsed.insert(inputPath);
if (hasOverride) {
overridesUsed.insert(inputPath);
// Respect the “flakeness” of the input even if we
// override it
i->second.isFlake = input2.isFlake;
}
auto & input = hasOverride ? i->second : input2;
/* Resolve 'follows' later (since it may refer to an input
path we haven't processed yet. */
if (input.follows) {
InputPath target;
if (hasOverride || input.absolute)
/* 'follows' from an override is relative to the
root of the graph. */
if (parent.absolute && !hasOverride) {
target = *input.follows;
else {
/* Otherwise, it's relative to the current flake. */
target = inputPathPrefix;
} else {
if (hasOverride) {
target = inputPathPrefix;
target.pop_back();
} else
target = parent.path;
for (auto & i : *input.follows) target.push_back(i);
}
debug("input '%s' follows '%s'", inputPathS, printInputPath(target));
node->inputs.insert_or_assign(id, target);
continue;
@ -420,7 +450,7 @@ LockedFlake lockFlake(
if (hasChildUpdate) {
auto inputFlake = getFlake(
state, oldLock->lockedRef, false, flakeCache);
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock);
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, parent, parentPath);
} else {
/* No need to fetch this flake, we can be
lazy. However there may be new overrides on the
@ -437,12 +467,11 @@ LockedFlake lockFlake(
} else if (auto follows = std::get_if<1>(&i.second)) {
fakeInputs.emplace(i.first, FlakeInput {
.follows = *follows,
.absolute = true
});
}
}
computeLocks(fakeInputs, childNode, inputPath, oldLock);
computeLocks(fakeInputs, childNode, inputPath, oldLock, parent, parentPath);
}
} else {
@ -454,7 +483,17 @@ LockedFlake lockFlake(
throw Error("cannot update flake input '%s' in pure mode", inputPathS);
if (input.isFlake) {
auto inputFlake = getFlake(state, *input.ref, lockFlags.useRegistries, flakeCache);
Path localPath = parentPath;
FlakeRef localRef = *input.ref;
// If this input is a path, recurse it down.
// This allows us to resolve path inputs relative to the current flake.
if (localRef.input.getType() == "path") {
localRef.input.parent = parentPath;
localPath = canonPath(parentPath + "/" + *input.ref->input.getSourcePath());
}
auto inputFlake = getFlake(state, localRef, useRegistries, flakeCache);
/* Note: in case of an --override-input, we use
the *original* ref (input2.ref) for the
@ -475,6 +514,13 @@ LockedFlake lockFlake(
parents.push_back(*input.ref);
Finally cleanup([&]() { parents.pop_back(); });
// Follows paths from existing inputs in the top-level lockfile are absolute,
// whereas paths in subordinate lockfiles are relative to those lockfiles.
LockParent newParent {
.path = inputPath,
.absolute = oldLock ? true : false
};
/* Recursively process the inputs of this
flake. Also, unless we already have this flake
in the top-level lock file, use this flake's
@ -484,12 +530,13 @@ LockedFlake lockFlake(
oldLock
? std::dynamic_pointer_cast<const Node>(oldLock)
: LockFile::read(
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root);
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root,
newParent, localPath);
}
else {
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, *input.ref, lockFlags.useRegistries, flakeCache);
state, *input.ref, useRegistries, flakeCache);
node->inputs.insert_or_assign(id,
std::make_shared<LockedNode>(lockedRef, *input.ref, false));
}
@ -502,9 +549,17 @@ LockedFlake lockFlake(
}
};
LockParent parent {
.path = {},
.absolute = true
};
// Bring in the current ref for relative path resolution if we have it
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir);
computeLocks(
flake.inputs, newLockFile.root, {},
lockFlags.recreateLockFile ? nullptr : oldLockFile.root);
lockFlags.recreateLockFile ? nullptr : oldLockFile.root, parent, parentPath);
for (auto & i : lockFlags.inputOverrides)
if (!overridesUsed.count(i.first))
@ -554,8 +609,8 @@ LockedFlake lockFlake(
topRef.input.markChangedFile(
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
lockFlags.commitLockFile
? std::optional<std::string>(fmt("%s: %s\n\nFlake input changes:\n\n%s",
relPath, lockFileExists ? "Update" : "Add", diff))
? std::optional<std::string>(fmt("%s: %s\n\nFlake lock file changes:\n\n%s",
relPath, lockFileExists ? "Update" : "Add", filterANSIEscapes(diff, true)))
: std::nullopt);
/* Rewriting the lockfile changed the top-level
@ -563,7 +618,7 @@ LockedFlake lockFlake(
also just clear the 'rev' field... */
auto prevLockedRef = flake.lockedRef;
FlakeCache dummyCache;
flake = getFlake(state, topRef, lockFlags.useRegistries, dummyCache);
flake = getFlake(state, topRef, useRegistries, dummyCache);
if (lockFlags.commitLockFile &&
flake.lockedRef.input.getRev() &&
@ -608,16 +663,14 @@ void callFlake(EvalState & state,
mkString(*vRootSubdir, lockedFlake.flake.lockedRef.subdir);
static RootValue vCallFlake = nullptr;
if (!vCallFlake) {
vCallFlake = allocRootValue(state.allocValue());
if (!state.vCallFlake) {
state.vCallFlake = allocRootValue(state.allocValue());
state.eval(state.parseExprFromString(
#include "call-flake.nix.gen.hh"
, "/"), **vCallFlake);
, "/"), **state.vCallFlake);
}
state.callFunction(**vCallFlake, *vLocks, *vTmp1, noPos);
state.callFunction(**state.vCallFlake, *vLocks, *vTmp1, noPos);
state.callFunction(*vTmp1, *vRootSrc, *vTmp2, noPos);
state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos);
}
@ -633,7 +686,7 @@ static void prim_getFlake(EvalState & state, const Pos & pos, Value * * args, Va
lockFlake(state, flakeRef,
LockFlags {
.updateLockFile = false,
.useRegistries = !evalSettings.pureEval,
.useRegistries = !evalSettings.pureEval && settings.useRegistries,
.allowMutable = !evalSettings.pureEval,
}),
v);

View file

@ -43,7 +43,6 @@ struct FlakeInput
std::optional<FlakeRef> ref;
bool isFlake = true; // true = process flake to get outputs, false = (fetched) static source path
std::optional<InputPath> follows;
bool absolute = false; // whether 'follows' is relative to the flake root
FlakeInputs overrides;
};
@ -102,7 +101,11 @@ struct LockFlags
/* Whether to use the registries to lookup indirect flake
references like 'nixpkgs'. */
bool useRegistries = true;
std::optional<bool> useRegistries = std::nullopt;
/* Whether to apply flake's nixConfig attribute to the configuration */
bool applyNixConfig = false;
/* Whether mutable flake references (i.e. those without a Git
revision or similar) without a corresponding lock are

View file

@ -2,6 +2,8 @@
#include "store-api.hh"
#include "url-parts.hh"
#include <iomanip>
#include <nlohmann/json.hpp>
namespace nix::flake {
@ -268,10 +270,20 @@ std::map<InputPath, Node::Edge> LockFile::getAllInputs() const
return res;
}
static std::string describe(const FlakeRef & flakeRef)
{
auto s = fmt("'%s'", flakeRef.to_string());
if (auto lastModified = flakeRef.input.getLastModified())
s += fmt(" (%s)", std::put_time(std::gmtime(&*lastModified), "%Y-%m-%d"));
return s;
}
std::ostream & operator <<(std::ostream & stream, const Node::Edge & edge)
{
if (auto node = std::get_if<0>(&edge))
stream << "'" << (*node)->lockedRef << "'";
stream << describe((*node)->lockedRef);
else if (auto follows = std::get_if<1>(&edge))
stream << fmt("follows '%s'", printInputPath(*follows));
return stream;
@ -299,14 +311,15 @@ std::string LockFile::diff(const LockFile & oldLocks, const LockFile & newLocks)
while (i != oldFlat.end() || j != newFlat.end()) {
if (j != newFlat.end() && (i == oldFlat.end() || i->first > j->first)) {
res += fmt("* Added '%s': %s\n", printInputPath(j->first), j->second);
res += fmt("" ANSI_GREEN "Added input '%s':" ANSI_NORMAL "\n %s\n",
printInputPath(j->first), j->second);
++j;
} else if (i != oldFlat.end() && (j == newFlat.end() || i->first < j->first)) {
res += fmt("* Removed '%s'\n", printInputPath(i->first));
res += fmt("" ANSI_RED "Removed input '%s'" ANSI_NORMAL "\n", printInputPath(i->first));
++i;
} else {
if (!equals(i->second, j->second)) {
res += fmt("* Updated '%s': %s -> %s\n",
res += fmt("" ANSI_BOLD "Updated input '%s':" ANSI_NORMAL "\n %s\n %s\n",
printInputPath(i->first),
i->second,
j->second);

View file

@ -9,6 +9,9 @@
%s DEFAULT
%x STRING
%x IND_STRING
%x INPATH
%x INPATH_SLASH
%x PATH_START
%{
@ -38,11 +41,13 @@ static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
loc->first_line = loc->last_line;
loc->first_column = loc->last_column;
while (len--) {
for (size_t i = 0; i < len; i++) {
switch (*s++) {
case '\r':
if (*s == '\n') /* cr/lf */
if (*s == '\n') { /* cr/lf */
i++;
s++;
}
/* fall through */
case '\n':
++loc->last_line;
@ -95,9 +100,12 @@ ANY .|\n
ID [a-zA-Z\_][a-zA-Z0-9\_\'\-]*
INT [0-9]+
FLOAT (([1-9][0-9]*\.[0-9]*)|(0?\.[0-9]+))([Ee][+-]?[0-9]+)?
PATH [a-zA-Z0-9\.\_\-\+]*(\/[a-zA-Z0-9\.\_\-\+]+)+\/?
HPATH \~(\/[a-zA-Z0-9\.\_\-\+]+)+\/?
SPATH \<[a-zA-Z0-9\.\_\-\+]+(\/[a-zA-Z0-9\.\_\-\+]+)*\>
PATH_CHAR [a-zA-Z0-9\.\_\-\+]
PATH {PATH_CHAR}*(\/{PATH_CHAR}+)+\/?
PATH_SEG {PATH_CHAR}*\/
HPATH \~(\/{PATH_CHAR}+)+\/?
HPATH_START \~\/
SPATH \<{PATH_CHAR}+(\/{PATH_CHAR}+)*\>
URI [a-zA-Z][a-zA-Z0-9\+\-\.]*\:[a-zA-Z0-9\%\/\?\:\@\&\=\+\$\,\-\_\.\!\~\*\']+
@ -198,17 +206,73 @@ or { return OR_KW; }
return IND_STR;
}
{PATH_SEG}\$\{ |
{HPATH_START}\$\{ {
PUSH_STATE(PATH_START);
yyless(0);
}
<PATH_START>{PATH_SEG} {
POP_STATE();
PUSH_STATE(INPATH_SLASH);
yylval->path = strdup(yytext);
return PATH;
}
<PATH_START>{HPATH_START} {
POP_STATE();
PUSH_STATE(INPATH_SLASH);
yylval->path = strdup(yytext);
return HPATH;
}
{PATH} {
if (yytext[yyleng-1] == '/')
PUSH_STATE(INPATH_SLASH);
else
PUSH_STATE(INPATH);
yylval->path = strdup(yytext);
return PATH;
}
{HPATH} {
if (yytext[yyleng-1] == '/')
PUSH_STATE(INPATH_SLASH);
else
PUSH_STATE(INPATH);
yylval->path = strdup(yytext);
return HPATH;
}
<INPATH,INPATH_SLASH>\$\{ {
POP_STATE();
PUSH_STATE(INPATH);
PUSH_STATE(DEFAULT);
return DOLLAR_CURLY;
}
<INPATH,INPATH_SLASH>{PATH}|{PATH_SEG}|{PATH_CHAR}+ {
POP_STATE();
if (yytext[yyleng-1] == '/')
PUSH_STATE(INPATH_SLASH);
else
PUSH_STATE(INPATH);
yylval->e = new ExprString(data->symbols.create(string(yytext)));
return STR;
}
<INPATH>{ANY} |
<INPATH><<EOF>> {
/* if we encounter a non-path character we inform the parser that the path has
ended with a PATH_END token and re-parse this character in the default
context (it may be ')', ';', or something of that sort) */
POP_STATE();
yyless(0);
return PATH_END;
}
<INPATH_SLASH>{ANY} |
<INPATH_SLASH><<EOF>> {
throw ParseError("path has a trailing slash");
}
{PATH} { if (yytext[yyleng-1] == '/')
throw ParseError("path '%s' has a trailing slash", yytext);
yylval->path = strdup(yytext);
return PATH;
}
{HPATH} { if (yytext[yyleng-1] == '/')
throw ParseError("path '%s' has a trailing slash", yytext);
yylval->path = strdup(yytext);
return HPATH;
}
{SPATH} { yylval->path = strdup(yytext); return SPATH; }
{URI} { yylval->uri = strdup(yytext); return URI; }

View file

@ -15,8 +15,8 @@ libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/lib
libexpr_LIBS = libutil libstore libfetchers
libexpr_LDFLAGS = -lboost_context
ifneq ($(OS), FreeBSD)
libexpr_LDFLAGS += -lboost_context -pthread
ifdef HOST_LINUX
libexpr_LDFLAGS += -ldl
endif
@ -35,7 +35,7 @@ $(d)/lexer-tab.cc $(d)/lexer-tab.hh: $(d)/lexer.l
clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(prefix)/lib/pkgconfig, 0644))
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))

View file

@ -180,6 +180,7 @@ struct ExprOpHasAttr : Expr
struct ExprAttrs : Expr
{
bool recursive;
Pos pos;
struct AttrDef {
bool inherited;
Expr * e;
@ -199,7 +200,8 @@ struct ExprAttrs : Expr
};
typedef std::vector<DynamicAttrDef> DynamicAttrDefs;
DynamicAttrDefs dynamicAttrs;
ExprAttrs() : recursive(false) { };
ExprAttrs(const Pos &pos) : recursive(false), pos(pos) { };
ExprAttrs() : recursive(false), pos(noPos) { };
COMMON_METHODS
};

View file

@ -290,13 +290,13 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
%type <formal> formal
%type <attrNames> attrs attrpath
%type <string_parts> string_parts_interpolated ind_string_parts
%type <e> string_parts string_attr
%type <e> path_start string_parts string_attr
%type <id> attr
%token <id> ID ATTRPATH
%token <e> STR IND_STR
%token <n> INT
%token <nf> FLOAT
%token <path> PATH HPATH SPATH
%token <path> PATH HPATH SPATH PATH_END
%token <uri> URI
%token IF THEN ELSE ASSERT WITH LET IN REC INHERIT EQ NEQ AND OR IMPL OR_KW
%token DOLLAR_CURLY /* == ${ */
@ -405,8 +405,11 @@ expr_simple
| IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE {
$$ = stripIndentation(CUR_POS, data->symbols, *$2);
}
| PATH { $$ = new ExprPath(absPath($1, data->basePath)); }
| HPATH { $$ = new ExprPath(getHome() + string{$1 + 1}); }
| path_start PATH_END { $$ = $1; }
| path_start string_parts_interpolated PATH_END {
$2->insert($2->begin(), $1);
$$ = new ExprConcatStrings(CUR_POS, false, $2);
}
| SPATH {
string path($1 + 1, strlen($1) - 2);
$$ = new ExprApp(CUR_POS,
@ -452,6 +455,20 @@ string_parts_interpolated
}
;
path_start
: PATH {
Path path(absPath($1, data->basePath));
/* add back in the trailing '/' to the first segment */
if ($1[strlen($1)-1] == '/' && strlen($1) > 1)
path += "/";
$$ = new ExprPath(path);
}
| HPATH {
Path path(getHome() + string($1 + 1));
$$ = new ExprPath(path);
}
;
ind_string_parts
: ind_string_parts IND_STR { $$ = $1; $1->push_back($2); }
| ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->push_back($3); }
@ -478,7 +495,7 @@ binds
$$->attrs[i.symbol] = ExprAttrs::AttrDef(new ExprSelect(CUR_POS, $4, i.symbol), makeCurPos(@6, data));
}
}
| { $$ = new ExprAttrs; }
| { $$ = new ExprAttrs(makeCurPos(@0, data)); }
;
attrs

View file

@ -21,6 +21,8 @@
#include <regex>
#include <dlfcn.h>
#include <cmath>
namespace nix {
@ -158,16 +160,15 @@ static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vS
}
w.attrs->sort();
static RootValue fun;
if (!fun) {
fun = allocRootValue(state.allocValue());
if (!state.vImportedDrvToDerivation) {
state.vImportedDrvToDerivation = allocRootValue(state.allocValue());
state.eval(state.parseExprFromString(
#include "imported-drv-to-derivation.nix.gen.hh"
, "/"), **fun);
, "/"), **state.vImportedDrvToDerivation);
}
state.forceFunction(**fun, pos);
mkApp(v, **fun, w);
state.forceFunction(**state.vImportedDrvToDerivation, pos);
mkApp(v, **state.vImportedDrvToDerivation, w);
state.forceAttrs(v, pos);
}
@ -547,18 +548,56 @@ typedef list<Value *> ValueList;
#endif
static Bindings::iterator getAttr(
EvalState & state,
string funcName,
string attrName,
Bindings * attrSet,
const Pos & pos)
{
Bindings::iterator value = attrSet->find(state.symbols.create(attrName));
if (value == attrSet->end()) {
hintformat errorMsg = hintfmt(
"attribute '%s' missing for call to '%s'",
attrName,
funcName
);
Pos aPos = *attrSet->pos;
if (aPos == noPos) {
throw TypeError({
.msg = errorMsg,
.errPos = pos,
});
} else {
auto e = TypeError({
.msg = errorMsg,
.errPos = aPos,
});
// Adding another trace for the function name to make it clear
// which call received wrong arguments.
e.addTrace(pos, hintfmt("while invoking '%s'", funcName));
throw e;
}
}
return value;
}
static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceAttrs(*args[0], pos);
/* Get the start set. */
Bindings::iterator startSet =
args[0]->attrs->find(state.symbols.create("startSet"));
if (startSet == args[0]->attrs->end())
throw EvalError({
.msg = hintfmt("attribute 'startSet' required"),
.errPos = pos
});
Bindings::iterator startSet = getAttr(
state,
"genericClosure",
"startSet",
args[0]->attrs,
pos
);
state.forceList(*startSet->value, pos);
ValueList workSet;
@ -566,13 +605,14 @@ static void prim_genericClosure(EvalState & state, const Pos & pos, Value * * ar
workSet.push_back(startSet->value->listElems()[n]);
/* Get the operator. */
Bindings::iterator op =
args[0]->attrs->find(state.symbols.create("operator"));
if (op == args[0]->attrs->end())
throw EvalError({
.msg = hintfmt("attribute 'operator' required"),
.errPos = pos
});
Bindings::iterator op = getAttr(
state,
"genericClosure",
"operator",
args[0]->attrs,
pos
);
state.forceValue(*op->value, pos);
/* Construct the closure by applying the operator to element of
@ -675,6 +715,44 @@ static RegisterPrimOp primop_addErrorContext(RegisterPrimOp::Info {
.fun = prim_addErrorContext,
});
static void prim_ceil(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
auto value = state.forceFloat(*args[0], args[0]->determinePos(pos));
mkInt(v, ceil(value));
}
static RegisterPrimOp primop_ceil({
.name = "__ceil",
.args = {"double"},
.doc = R"(
Converts an IEEE-754 double-precision floating-point number (*double*) to
the next higher integer.
If the datatype is neither an integer nor a "float", an evaluation error will be
thrown.
)",
.fun = prim_ceil,
});
static void prim_floor(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
auto value = state.forceFloat(*args[0], args[0]->determinePos(pos));
mkInt(v, floor(value));
}
static RegisterPrimOp primop_floor({
.name = "__floor",
.args = {"double"},
.doc = R"(
Converts an IEEE-754 double-precision floating-point number (*double*) to
the next lower integer.
If the datatype is neither an integer nor a "float", an evaluation error will be
thrown.
)",
.fun = prim_floor,
});
/* Try evaluating the argument. Success => {success=true; value=something;},
* else => {success=false; value=false;} */
static void prim_tryEval(EvalState & state, const Pos & pos, Value * * args, Value & v)
@ -816,12 +894,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
state.forceAttrs(*args[0], pos);
/* Figure out the name first (for stack backtraces). */
Bindings::iterator attr = args[0]->attrs->find(state.sName);
if (attr == args[0]->attrs->end())
throw EvalError({
.msg = hintfmt("required attribute 'name' missing"),
.errPos = pos
});
Bindings::iterator attr = getAttr(
state,
"derivationStrict",
state.sName,
args[0]->attrs,
pos
);
string drvName;
Pos & posDrvName(*attr->pos);
try {
@ -953,7 +1033,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
}
} else {
auto s = state.coerceToString(posDrvName, *i->value, context, true);
auto s = state.coerceToString(*i->pos, *i->value, context, true);
drv.env.emplace(key, s);
if (i->name == state.sBuilder) drv.builder = s;
else if (i->name == state.sSystem) drv.platform = s;
@ -1210,7 +1290,10 @@ static RegisterPrimOp primop_toPath({
static void prim_storePath(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
if (evalSettings.pureEval)
throw EvalError("builtins.storePath' is not allowed in pure evaluation mode");
throw EvalError({
.msg = hintfmt("'%s' is not allowed in pure evaluation mode", "builtins.storePath"),
.errPos = pos
});
PathSet context;
Path path = state.checkSourcePath(state.coerceToPath(pos, *args[0], context));
@ -1329,6 +1412,11 @@ static void prim_readFile(EvalState & state, const Pos & pos, Value * * args, Va
{
PathSet context;
Path path = state.coerceToPath(pos, *args[0], context);
if (baseNameOf(path) == "flake.lock")
throw Error({
.msg = hintfmt("cannot read '%s' because flake lock files can be out of sync", path),
.errPos = pos
});
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
@ -1369,12 +1457,13 @@ static void prim_findFile(EvalState & state, const Pos & pos, Value * * args, Va
if (i != v2.attrs->end())
prefix = state.forceStringNoCtx(*i->value, pos);
i = v2.attrs->find(state.symbols.create("path"));
if (i == v2.attrs->end())
throw EvalError({
.msg = hintfmt("attribute 'path' missing"),
.errPos = pos
});
i = getAttr(
state,
"findFile",
"path",
v2.attrs,
pos
);
PathSet context;
string path = state.coerceToString(pos, *i->value, context, false, false);
@ -1408,15 +1497,20 @@ static void prim_hashFile(EvalState & state, const Pos & pos, Value * * args, Va
string type = state.forceStringNoCtx(*args[0], pos);
std::optional<HashType> ht = parseHashType(type);
if (!ht)
throw Error({
.msg = hintfmt("unknown hash type '%1%'", type),
.errPos = pos
});
throw Error({
.msg = hintfmt("unknown hash type '%1%'", type),
.errPos = pos
});
PathSet context; // discarded
Path p = state.coerceToPath(pos, *args[1], context);
PathSet context;
Path path = state.coerceToPath(pos, *args[1], context);
try {
state.realiseContext(context);
} catch (InvalidPathError & e) {
throw EvalError("cannot read '%s' since path '%s' is not valid, at %s", path, e.path, pos);
}
mkString(v, hashFile(*ht, state.checkSourcePath(p)).to_string(Base16, false), context);
mkString(v, hashFile(*ht, state.checkSourcePath(state.toRealPath(path, context))).to_string(Base16, false));
}
static RegisterPrimOp primop_hashFile({
@ -1920,26 +2014,26 @@ static RegisterPrimOp primop_path({
An enrichment of the built-in path type, based on the attributes
present in *args*. All are optional except `path`:
- path
- path\
The underlying path.
- name
- name\
The name of the path when added to the store. This can used to
reference paths that have nix-illegal characters in their names,
like `@`.
- filter
- filter\
A function of the type expected by `builtins.filterSource`,
with the same semantics.
- recursive
- recursive\
When `false`, when `path` is added to the store it is with a
flat hash, rather than a hash of the NAR serialization of the
file. Thus, `path` must refer to a regular file, not a
directory. This allows similar behavior to `fetchurl`. Defaults
to `true`.
- sha256
- sha256\
When provided, this is the expected hash of the file at the
path. Evaluation will fail if the hash is incorrect, and
providing a hash allows `builtins.path` to be used even when the
@ -2016,14 +2110,15 @@ void prim_getAttr(EvalState & state, const Pos & pos, Value * * args, Value & v)
string attr = state.forceStringNoCtx(*args[0], pos);
state.forceAttrs(*args[1], pos);
// !!! Should we create a symbol here or just do a lookup?
Bindings::iterator i = args[1]->attrs->find(state.symbols.create(attr));
if (i == args[1]->attrs->end())
throw EvalError({
.msg = hintfmt("attribute '%1%' missing", attr),
.errPos = pos
});
Bindings::iterator i = getAttr(
state,
"getAttr",
attr,
args[1]->attrs,
pos
);
// !!! add to stack trace?
if (state.countCalls && i->pos) state.attrSelects[*i->pos]++;
if (state.countCalls && *i->pos != noPos) state.attrSelects[*i->pos]++;
state.forceValue(*i->value, pos);
v = *i->value;
}
@ -2148,22 +2243,25 @@ static void prim_listToAttrs(EvalState & state, const Pos & pos, Value * * args,
Value & v2(*args[0]->listElems()[i]);
state.forceAttrs(v2, pos);
Bindings::iterator j = v2.attrs->find(state.sName);
if (j == v2.attrs->end())
throw TypeError({
.msg = hintfmt("'name' attribute missing in a call to 'listToAttrs'"),
.errPos = pos
});
string name = state.forceStringNoCtx(*j->value, pos);
Bindings::iterator j = getAttr(
state,
"listToAttrs",
state.sName,
v2.attrs,
pos
);
string name = state.forceStringNoCtx(*j->value, *j->pos);
Symbol sym = state.symbols.create(name);
if (seen.insert(sym).second) {
Bindings::iterator j2 = v2.attrs->find(state.symbols.create(state.sValue));
if (j2 == v2.attrs->end())
throw TypeError({
.msg = hintfmt("'value' attribute missing in a call to 'listToAttrs'"),
.errPos = pos
});
Bindings::iterator j2 = getAttr(
state,
"listToAttrs",
state.sValue,
v2.attrs,
pos
);
v.attrs->push_back(Attr(sym, j2->value, j2->pos));
}
}
@ -2280,7 +2378,7 @@ static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args
for (auto & i : args[0]->lambda.fun->formals->formals) {
// !!! should optimise booleans (allocate only once)
Value * value = state.allocValue();
v.attrs->push_back(Attr(i.name, value, &i.pos));
v.attrs->push_back(Attr(i.name, value, ptr(&i.pos)));
mkBool(*value, i.def);
}
v.attrs->sort();
@ -2804,7 +2902,12 @@ static void prim_concatMap(EvalState & state, const Pos & pos, Value * * args, V
for (unsigned int n = 0; n < nrLists; ++n) {
Value * vElem = args[1]->listElems()[n];
state.callFunction(*args[0], *vElem, lists[n], pos);
state.forceList(lists[n], pos);
try {
state.forceList(lists[n], lists[n].determinePos(args[0]->determinePos(pos)));
} catch (TypeError &e) {
e.addTrace(pos, hintfmt("while invoking '%s'", "concatMap"));
throw e;
}
len += lists[n].listSize();
}
@ -3015,7 +3118,7 @@ static RegisterPrimOp primop_toString({
- A path (e.g., `toString /foo/bar` yields `"/foo/bar"`.
- A set containing `{ __toString = self: ...; }`.
- A set containing `{ __toString = self: ...; }` or `{ outPath = ...; }`.
- An integer.
@ -3100,7 +3203,7 @@ static void prim_hashString(EvalState & state, const Pos & pos, Value * * args,
PathSet context; // discarded
string s = state.forceString(*args[1], context, pos);
mkString(v, hashString(*ht, s).to_string(Base16, false), context);
mkString(v, hashString(*ht, s).to_string(Base16, false));
}
static RegisterPrimOp primop_hashString({
@ -3551,9 +3654,7 @@ void EvalState::createBaseEnv()
if (!evalSettings.pureEval) {
mkInt(v, time(0));
addConstant("__currentTime", v);
}
if (!evalSettings.pureEval) {
mkString(v, settings.thisSystem.get());
addConstant("__currentSystem", v);
}

View file

@ -62,6 +62,7 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
fetchers::Attrs attrs;
attrs.insert_or_assign("type", "hg");
attrs.insert_or_assign("url", url.find("://") != std::string::npos ? url : "file://" + url);
attrs.insert_or_assign("name", name);
if (ref) attrs.insert_or_assign("ref", *ref);
if (rev) attrs.insert_or_assign("rev", rev->gitRev());
auto input = fetchers::Input::fromAttrs(std::move(attrs));

View file

@ -7,6 +7,7 @@
#include <ctime>
#include <iomanip>
#include <regex>
namespace nix {
@ -44,7 +45,7 @@ void emitTreeAttrs(
if (input.getType() == "git")
mkBool(*state.allocAttr(v, state.symbols.create("submodules")),
fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(false));
fetchers::maybeGetBoolAttr(input.attrs, "submodules").value_or(true));
if (auto revCount = input.getRevCount())
mkInt(*state.allocAttr(v, state.symbols.create("revCount")), *revCount);
@ -60,10 +61,19 @@ void emitTreeAttrs(
v.attrs->sort();
}
std::string fixURI(std::string uri, EvalState &state)
std::string fixURI(std::string uri, EvalState &state, const std::string & defaultScheme = "file")
{
state.checkURI(uri);
return uri.find("://") != std::string::npos ? uri : "file://" + uri;
return uri.find("://") != std::string::npos ? uri : defaultScheme + "://" + uri;
}
std::string fixURIForGit(std::string uri, EvalState & state)
{
static std::regex scp_uri("([^/].*)@(.*):(.*)");
if (uri[0] != '/' && std::regex_match(uri, scp_uri))
return fixURI(std::regex_replace(uri, scp_uri, "$1@$2/$3"), state, "ssh");
else
return fixURI(uri, state);
}
void addURI(EvalState &state, fetchers::Attrs &attrs, Symbol name, std::string v)
@ -72,13 +82,18 @@ void addURI(EvalState &state, fetchers::Attrs &attrs, Symbol name, std::string v
attrs.emplace(name, n == "url" ? fixURI(v, state) : v);
}
struct FetchTreeParams {
bool emptyRevFallback = false;
bool allowNameArgument = false;
};
static void fetchTree(
EvalState &state,
const Pos &pos,
Value **args,
Value &v,
const std::optional<std::string> type,
bool emptyRevFallback = false
const FetchTreeParams & params = FetchTreeParams{}
) {
fetchers::Input input;
PathSet context;
@ -119,17 +134,25 @@ static void fetchTree(
.errPos = pos
});
if (!params.allowNameArgument)
if (auto nameIter = attrs.find("name"); nameIter != attrs.end())
throw Error({
.msg = hintfmt("attribute 'name' isnt supported in call to 'fetchTree'"),
.errPos = pos
});
input = fetchers::Input::fromAttrs(std::move(attrs));
} else {
auto url = fixURI(state.coerceToString(pos, *args[0], context, false, false), state);
auto url = state.coerceToString(pos, *args[0], context, false, false);
if (type == "git") {
fetchers::Attrs attrs;
attrs.emplace("type", "git");
attrs.emplace("url", url);
attrs.emplace("url", fixURIForGit(url, state));
input = fetchers::Input::fromAttrs(std::move(attrs));
} else {
input = fetchers::Input::fromURL(url);
input = fetchers::Input::fromURL(fixURI(url, state));
}
}
@ -144,13 +167,13 @@ static void fetchTree(
if (state.allowedPaths)
state.allowedPaths->insert(tree.actualPath);
emitTreeAttrs(state, tree, input2, v, emptyRevFallback);
emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback);
}
static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
settings.requireExperimentalFeature("flakes");
fetchTree(state, pos, args, v, std::nullopt);
fetchTree(state, pos, args, v, std::nullopt, FetchTreeParams { .allowNameArgument = false });
}
// FIXME: document
@ -292,7 +315,7 @@ static RegisterPrimOp primop_fetchTarball({
static void prim_fetchGit(EvalState &state, const Pos &pos, Value **args, Value &v)
{
fetchTree(state, pos, args, v, "git", true);
fetchTree(state, pos, args, v, "git", FetchTreeParams { .emptyRevFallback = true, .allowNameArgument = true });
}
static RegisterPrimOp primop_fetchGit({
@ -303,17 +326,17 @@ static RegisterPrimOp primop_fetchGit({
of the repo at that URL is fetched. Otherwise, it can be an
attribute with the following attributes (all except `url` optional):
- url
- url\
The URL of the repo.
- name
- name\
The name of the directory the repo should be exported to in the
store. Defaults to the basename of the URL.
- rev
- rev\
The git revision to fetch. Defaults to the tip of `ref`.
- ref
- ref\
The git ref to look for the requested revision under. This is
often a branch or tag name. Defaults to `HEAD`.
@ -321,11 +344,11 @@ static RegisterPrimOp primop_fetchGit({
of Nix 2.3.0 Nix will not prefix `refs/heads/` if `ref` starts
with `refs/`.
- submodules
- submodules\
A Boolean parameter that specifies whether submodules should be
checked out. Defaults to `false`.
- allRefs
- allRefs\
Whether to fetch all refs of the repository. With this argument being
true, it's possible to load a `rev` from *any* `ref` (by default only
`rev`s from the specified `ref` are supported).

View file

@ -42,7 +42,7 @@ static void showAttrs(EvalState & state, bool strict, bool location,
XMLAttrs xmlAttrs;
xmlAttrs["name"] = i;
if (location && a.pos != &noPos) posToXML(xmlAttrs, *a.pos);
if (location && a.pos != ptr(&noPos)) posToXML(xmlAttrs, *a.pos);
XMLOpenElement _(doc, "attr", xmlAttrs);
printValueAsXML(state, strict, location,

View file

@ -341,6 +341,8 @@ public:
return internalType == tList1 ? 1 : internalType == tList2 ? 2 : bigList.size;
}
Pos determinePos(const Pos &pos) const;
/* Check whether forcing this value requires a trivial amount of
computation. In particular, function applications are
non-trivial. */

View file

@ -6,6 +6,8 @@
#include <nlohmann/json_fwd.hpp>
#include <optional>
namespace nix::fetchers {
typedef std::variant<std::string, uint64_t, Explicit<bool>> Attr;

View file

@ -200,12 +200,17 @@ void Input::markChangedFile(
return scheme->markChangedFile(*this, file, commitMsg);
}
std::string Input::getName() const
{
return maybeGetStrAttr(attrs, "name").value_or("source");
}
StorePath Input::computeStorePath(Store & store) const
{
auto narHash = getNarHash();
if (!narHash)
throw Error("cannot compute store path for mutable input '%s'", to_string());
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, "source");
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, *narHash, getName());
}
std::string Input::getType() const

View file

@ -38,6 +38,9 @@ struct Input
bool immutable = false;
bool direct = true;
/* path of the parent of this input, used for relative path resolution */
std::optional<Path> parent;
public:
static Input fromURL(const std::string & url);
@ -81,6 +84,8 @@ public:
std::string_view file,
std::optional<std::string> commitMsg) const;
std::string getName() const;
StorePath computeStorePath(Store & store) const;
// Convenience functions for common attributes.
@ -145,13 +150,7 @@ DownloadFileResult downloadFile(
bool immutable,
const Headers & headers = {});
struct DownloadTarballMeta
{
time_t lastModified;
std::string effectiveUrl;
};
std::pair<Tree, DownloadTarballMeta> downloadTarball(
std::pair<Tree, time_t> downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,

View file

@ -4,8 +4,10 @@
#include "tarfile.hh"
#include "store-api.hh"
#include "url-parts.hh"
#include "pathlocks.hh"
#include <sys/time.h>
#include <sys/wait.h>
using namespace std::string_literals;
@ -59,7 +61,7 @@ struct GitInputScheme : InputScheme
if (maybeGetStrAttr(attrs, "type") != "git") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash" && name != "allRefs")
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash" && name != "allRefs" && name != "name")
throw Error("unsupported Git input attribute '%s'", name);
parseURL(getStrAttr(attrs, "url"));
@ -166,12 +168,12 @@ struct GitInputScheme : InputScheme
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
{
auto name = "source";
Input input(_input);
std::string name = input.getName();
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(true);
bool allRefs = maybeGetBoolAttr(input.attrs, "allRefs").value_or(false);
std::string cacheType = "git";
@ -269,7 +271,7 @@ struct GitInputScheme : InputScheme
return files.count(file);
};
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
auto storePath = store->addToStore(input.getName(), actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
// FIXME: maybe we should use the timestamp of the last
// modified dirty file?
@ -316,11 +318,17 @@ struct GitInputScheme : InputScheme
Path cacheDir = getCacheDir() + "/nix/gitv3/" + hashString(htSHA256, actualUrl).to_string(Base32, false);
repoDir = cacheDir;
Path cacheDirLock = cacheDir + ".lock";
createDirs(dirOf(cacheDir));
AutoCloseFD lock = openLockFile(cacheDirLock, true);
lockFile(lock.get(), ltWrite, true);
if (!pathExists(cacheDir)) {
createDirs(dirOf(cacheDir));
runProgram("git", true, { "init", "--bare", repoDir });
}
deleteLockFile(cacheDirLock, lock.get());
Path localRefFile =
input.getRef()->compare(0, 5, "refs/") == 0
? cacheDir + "/" + *input.getRef()
@ -405,17 +413,14 @@ struct GitInputScheme : InputScheme
AutoDelete delTmpDir(tmpDir, true);
PathFilter filter = defaultPathFilter;
RunOptions checkCommitOpts(
"git",
{ "-C", repoDir, "cat-file", "commit", input.getRev()->gitRev() }
);
checkCommitOpts.searchPath = true;
checkCommitOpts.mergeStderrToStdout = true;
auto result = runProgram(checkCommitOpts);
auto result = runProgram(RunOptions {
.program = "git",
.args = { "-C", repoDir, "cat-file", "commit", input.getRev()->gitRev() },
.mergeStderrToStdout = true
});
if (WEXITSTATUS(result.first) == 128
&& result.second.find("bad file") != std::string::npos
) {
&& result.second.find("bad file") != std::string::npos)
{
throw Error(
"Cannot find Git revision '%s' in ref '%s' of repository '%s'! "
"Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the "
@ -447,9 +452,11 @@ struct GitInputScheme : InputScheme
// FIXME: should pipe this, or find some better way to extract a
// revision.
auto source = sinkToSource([&](Sink & sink) {
RunOptions gitOptions("git", { "-C", repoDir, "archive", input.getRev()->gitRev() });
gitOptions.standardOut = &sink;
runProgram2(gitOptions);
runProgram2({
.program = "git",
.args = { "-C", repoDir, "archive", input.getRev()->gitRev() },
.standardOut = &sink
});
});
unpackTarfile(*source, tmpDir);

View file

@ -207,16 +207,16 @@ struct GitArchiveInputScheme : InputScheme
auto url = getDownloadUrl(input);
auto [tree, meta] = downloadTarball(store, url.url, "source", true, url.headers);
auto [tree, lastModified] = downloadTarball(store, url.url, input.getName(), true, url.headers);
input.attrs.insert_or_assign("lastModified", uint64_t(meta.lastModified));
input.attrs.insert_or_assign("lastModified", uint64_t(lastModified));
getCache()->add(
store,
immutableAttrs,
{
{"rev", rev->gitRev()},
{"lastModified", uint64_t(meta.lastModified)}
{"lastModified", uint64_t(lastModified)}
},
tree.storePath,
true);
@ -273,9 +273,9 @@ struct GitHubInputScheme : GitArchiveInputScheme
void clone(const Input & input, const Path & destDir) override
{
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
Input::fromURL(fmt("git+https://%s/%s/%s.git",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
.applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
.applyOverrides(input.getRef(), input.getRev())
.clone(destDir);
}
};
@ -341,9 +341,9 @@ struct GitLabInputScheme : GitArchiveInputScheme
{
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
// FIXME: get username somewhere
Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
Input::fromURL(fmt("git+https://%s/%s/%s.git",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
.applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
.applyOverrides(input.getRef(), input.getRev())
.clone(destDir);
}
};

View file

@ -8,4 +8,6 @@ libfetchers_SOURCES := $(wildcard $(d)/*.cc)
libfetchers_CXXFLAGS += -I src/libutil -I src/libstore
libfetchers_LDFLAGS += -pthread
libfetchers_LIBS = libutil libstore

View file

@ -11,34 +11,32 @@ using namespace std::string_literals;
namespace nix::fetchers {
namespace {
static RunOptions hgOptions(const Strings & args)
{
auto env = getEnv();
// Set HGPLAIN: this means we get consistent output from hg and avoids leakage from a user or system .hgrc.
env["HGPLAIN"] = "";
RunOptions hgOptions(const Strings & args) {
RunOptions opts("hg", args);
opts.searchPath = true;
auto env = getEnv();
// Set HGPLAIN: this means we get consistent output from hg and avoids leakage from a user or system .hgrc.
env["HGPLAIN"] = "";
opts.environment = env;
return opts;
return {
.program = "hg",
.searchPath = true,
.args = args,
.environment = env
};
}
// runProgram wrapper that uses hgOptions instead of stock RunOptions.
string runHg(const Strings & args, const std::optional<std::string> & input = {})
static string runHg(const Strings & args, const std::optional<std::string> & input = {})
{
RunOptions opts = hgOptions(args);
opts.input = input;
RunOptions opts = hgOptions(args);
opts.input = input;
auto res = runProgram(opts);
auto res = runProgram(std::move(opts));
if (!statusOk(res.first))
throw ExecError(res.first, fmt("hg %1%", statusToString(res.first)));
return res.second;
}
if (!statusOk(res.first))
throw ExecError(res.first, fmt("hg %1%", statusToString(res.first)));
return res.second;
}
struct MercurialInputScheme : InputScheme
@ -74,7 +72,7 @@ struct MercurialInputScheme : InputScheme
if (maybeGetStrAttr(attrs, "type") != "hg") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "revCount" && name != "narHash")
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "revCount" && name != "narHash" && name != "name")
throw Error("unsupported Mercurial input attribute '%s'", name);
parseURL(getStrAttr(attrs, "url"));
@ -147,10 +145,10 @@ struct MercurialInputScheme : InputScheme
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
{
auto name = "source";
Input input(_input);
auto name = input.getName();
auto [isLocal, actualUrl_] = getActualUrl(input);
auto actualUrl = actualUrl_; // work around clang bug
@ -193,7 +191,7 @@ struct MercurialInputScheme : InputScheme
return files.count(file);
};
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
auto storePath = store->addToStore(input.getName(), actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
return {
Tree(store->toRealPath(storePath), std::move(storePath)),
@ -253,9 +251,7 @@ struct MercurialInputScheme : InputScheme
have to pull again. */
if (!(input.getRev()
&& pathExists(cacheDir)
&& runProgram(
hgOptions({ "log", "-R", cacheDir, "-r", input.getRev()->gitRev(), "--template", "1" })
.killStderr(true)).second == "1"))
&& runProgram(hgOptions({ "log", "-R", cacheDir, "-r", input.getRev()->gitRev(), "--template", "1" })).second == "1"))
{
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", actualUrl));

View file

@ -82,18 +82,30 @@ struct PathInputScheme : InputScheme
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
{
std::string absPath;
auto path = getStrAttr(input.attrs, "path");
// FIXME: check whether access to 'path' is allowed.
if (path[0] != '/' && input.parent) {
auto parent = canonPath(*input.parent);
auto storePath = store->maybeParseStorePath(path);
// the path isn't relative, prefix it
absPath = canonPath(parent + "/" + path);
// for security, ensure that if the parent is a store path, it's inside it
if (!parent.rfind(store->storeDir, 0) && absPath.rfind(store->storeDir, 0))
throw BadStorePath("relative path '%s' points outside of its parent's store path %s, this is a security violation", path, parent);
} else
absPath = path;
// FIXME: check whether access to 'path' is allowed.
auto storePath = store->maybeParseStorePath(absPath);
if (storePath)
store->addTempRoot(*storePath);
if (!storePath || storePath->name() != "source" || !store->isValidPath(*storePath))
// FIXME: try to substitute storePath.
storePath = store->addToStore("source", path);
storePath = store->addToStore("source", absPath);
return {
Tree(store->toRealPath(*storePath), std::move(*storePath)),

View file

@ -124,6 +124,13 @@ std::shared_ptr<Registry> getUserRegistry()
return userRegistry;
}
std::shared_ptr<Registry> getCustomRegistry(const Path & p)
{
static auto customRegistry =
Registry::read(p, Registry::Custom);
return customRegistry;
}
static std::shared_ptr<Registry> flagRegistry =
std::make_shared<Registry>(Registry::Flag);

View file

@ -14,6 +14,7 @@ struct Registry
User = 1,
System = 2,
Global = 3,
Custom = 4,
};
RegistryType type;
@ -48,6 +49,8 @@ typedef std::vector<std::shared_ptr<Registry>> Registries;
std::shared_ptr<Registry> getUserRegistry();
std::shared_ptr<Registry> getCustomRegistry(const Path & p);
Path getUserRegistryPath();
Registries getRegistries(ref<Store> store);

View file

@ -109,7 +109,7 @@ DownloadFileResult downloadFile(
};
}
std::pair<Tree, DownloadTarballMeta> downloadTarball(
std::pair<Tree, time_t> downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,
@ -127,10 +127,7 @@ std::pair<Tree, DownloadTarballMeta> downloadTarball(
if (cached && !cached->expired)
return {
Tree(store->toRealPath(cached->storePath), std::move(cached->storePath)),
{
.lastModified = time_t(getIntAttr(cached->infoAttrs, "lastModified")),
.effectiveUrl = maybeGetStrAttr(cached->infoAttrs, "effectiveUrl").value_or(url),
},
getIntAttr(cached->infoAttrs, "lastModified")
};
auto res = downloadFile(store, url, name, immutable, headers);
@ -155,7 +152,6 @@ std::pair<Tree, DownloadTarballMeta> downloadTarball(
Attrs infoAttrs({
{"lastModified", uint64_t(lastModified)},
{"effectiveUrl", res.effectiveUrl},
{"etag", res.etag},
});
@ -168,10 +164,7 @@ std::pair<Tree, DownloadTarballMeta> downloadTarball(
return {
Tree(store->toRealPath(*unpackedStorePath), std::move(*unpackedStorePath)),
{
.lastModified = lastModified,
.effectiveUrl = res.effectiveUrl,
},
lastModified,
};
}
@ -185,7 +178,8 @@ struct TarballInputScheme : InputScheme
&& !hasSuffix(url.path, ".tar")
&& !hasSuffix(url.path, ".tar.gz")
&& !hasSuffix(url.path, ".tar.xz")
&& !hasSuffix(url.path, ".tar.bz2"))
&& !hasSuffix(url.path, ".tar.bz2")
&& !hasSuffix(url.path, ".tar.zst"))
return {};
Input input;
@ -202,7 +196,7 @@ struct TarballInputScheme : InputScheme
if (maybeGetStrAttr(attrs, "type") != "tarball") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "url" && /* name != "hash" && */ name != "narHash")
if (name != "type" && name != "url" && /* name != "hash" && */ name != "narHash" && name != "name")
throw Error("unsupported tarball input attribute '%s'", name);
Input input;
@ -230,11 +224,9 @@ struct TarballInputScheme : InputScheme
return true;
}
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
std::pair<Tree, Input> fetch(ref<Store> store, const Input & input) override
{
Input input(_input);
auto [tree, meta] = downloadTarball(store, getStrAttr(input.attrs, "url"), "source", false);
input.attrs.insert_or_assign("url", meta.effectiveUrl);
auto tree = downloadTarball(store, getStrAttr(input.attrs, "url"), input.getName(), false).first;
return {std::move(tree), input};
}
};

View file

@ -8,10 +8,10 @@ libmain_SOURCES := $(wildcard $(d)/*.cc)
libmain_CXXFLAGS += -I src/libutil -I src/libstore
libmain_LDFLAGS = $(OPENSSL_LIBS)
libmain_LDFLAGS += $(OPENSSL_LIBS)
libmain_LIBS = libstore libutil
libmain_ALLOW_UNDEFINED = 1
$(eval $(call install-file-in, $(d)/nix-main.pc, $(prefix)/lib/pkgconfig, 0644))
$(eval $(call install-file-in, $(d)/nix-main.pc, $(libdir)/pkgconfig, 0644))

View file

@ -484,7 +484,7 @@ Logger * makeProgressBar(bool printBuildLogs)
{
return new ProgressBar(
printBuildLogs,
isatty(STDERR_FILENO) && getEnv("TERM").value_or("dumb") != "dumb"
shouldANSI()
);
}

View file

@ -15,6 +15,9 @@
#include <sys/stat.h>
#include <unistd.h>
#include <signal.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netdb.h>
#include <openssl/crypto.h>
@ -110,6 +113,31 @@ static void opensslLockCallback(int mode, int type, const char * file, int line)
}
#endif
static std::once_flag dns_resolve_flag;
static void preloadNSS() {
/* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of
one of the glibc NSS libraries in a sandboxed child, which will fail unless the library's already
been loaded in the parent. So we force a lookup of an invalid domain to force the NSS machinery to
load its lookup libraries in the parent before any child gets a chance to. */
std::call_once(dns_resolve_flag, []() {
struct addrinfo *res = NULL;
/* nss will only force the "local" (not through nscd) dns resolution if its on the LOCALDOMAIN.
We need the resolution to be done locally, as nscd socket will not be accessible in the
sandbox. */
char * previous_env = getenv("LOCALDOMAIN");
setenv("LOCALDOMAIN", "invalid", 1);
if (getaddrinfo("this.pre-initializes.the.dns.resolvers.invalid.", "http", NULL, &res) == 0) {
if (res) freeaddrinfo(res);
}
if (previous_env) {
setenv("LOCALDOMAIN", previous_env, 1);
} else {
unsetenv("LOCALDOMAIN");
}
});
}
static void sigHandler(int signo) { }
@ -176,6 +204,8 @@ void initNix()
if (hasPrefix(getEnv("TMPDIR").value_or("/tmp"), "/var/folders/"))
unsetenv("TMPDIR");
#endif
preloadNSS();
}
@ -238,7 +268,7 @@ LegacyArgs::LegacyArgs(const std::string & programName,
addFlag({
.longName = "no-gc-warning",
.description = "Disable warnings about not using `--add-root`.",
.handler = {&gcWarning, true},
.handler = {&gcWarning, false},
});
addFlag({

View file

@ -130,17 +130,6 @@ AutoCloseFD openFile(const Path & path)
return fd;
}
struct FileSource : FdSource
{
AutoCloseFD fd2;
FileSource(const Path & path)
: fd2(openFile(path))
{
fd = fd2.get();
}
};
ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs,
std::function<ValidPathInfo(HashResult)> mkInfo)
@ -179,6 +168,9 @@ ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
narInfo->url = "nar/" + narInfo->fileHash->to_string(Base32, false) + ".nar"
+ (compression == "xz" ? ".xz" :
compression == "bzip2" ? ".bz2" :
compression == "zstd" ? ".zst" :
compression == "lzip" ? ".lzip" :
compression == "lz4" ? ".lz4" :
compression == "br" ? ".br" :
"");
@ -447,18 +439,43 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s
std::optional<const Realisation> BinaryCacheStore::queryRealisation(const DrvOutput & id)
{
if (diskCache) {
auto [cacheOutcome, maybeCachedRealisation] =
diskCache->lookupRealisation(getUri(), id);
switch (cacheOutcome) {
case NarInfoDiskCache::oValid:
debug("Returning a cached realisation for %s", id.to_string());
return *maybeCachedRealisation;
case NarInfoDiskCache::oInvalid:
debug("Returning a cached missing realisation for %s", id.to_string());
return {};
case NarInfoDiskCache::oUnknown:
break;
}
}
auto outputInfoFilePath = realisationsPrefix + "/" + id.to_string() + ".doi";
auto rawOutputInfo = getFile(outputInfoFilePath);
if (rawOutputInfo) {
return {Realisation::fromJSON(
nlohmann::json::parse(*rawOutputInfo), outputInfoFilePath)};
auto realisation = Realisation::fromJSON(
nlohmann::json::parse(*rawOutputInfo), outputInfoFilePath);
if (diskCache)
diskCache->upsertRealisation(
getUri(), realisation);
return {realisation};
} else {
if (diskCache)
diskCache->upsertAbsentRealisation(getUri(), id);
return std::nullopt;
}
}
void BinaryCacheStore::registerDrvOutput(const Realisation& info) {
if (diskCache)
diskCache->upsertRealisation(getUri(), info);
auto filePath = realisationsPrefix + "/" + info.id.to_string() + ".doi";
upsertFile(filePath, info.toJSON().dump(), "application/json");
}

View file

@ -34,7 +34,7 @@ private:
protected:
// The prefix under which realisation infos will be stored
const std::string realisationsPrefix = "/realisations";
const std::string realisationsPrefix = "realisations";
BinaryCacheStore(const Params & params);

View file

@ -20,6 +20,7 @@
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/un.h>
#include <sys/wait.h>
#include <netdb.h>
#include <fcntl.h>
#include <termios.h>
@ -142,7 +143,6 @@ void DerivationGoal::work()
(this->*state)();
}
void DerivationGoal::addWantedOutputs(const StringSet & outputs)
{
/* If we already want all outputs, there is nothing to do. */
@ -165,7 +165,7 @@ void DerivationGoal::getDerivation()
/* The first thing to do is to make sure that the derivation
exists. If it doesn't, it may be created through a
substitute. */
if (buildMode == bmNormal && worker.store.isValidPath(drvPath)) {
if (buildMode == bmNormal && worker.evalStore.isValidPath(drvPath)) {
loadDerivation();
return;
}
@ -188,12 +188,12 @@ void DerivationGoal::loadDerivation()
/* `drvPath' should already be a root, but let's be on the safe
side: if the user forgot to make it a root, we wouldn't want
things being garbage collected while we're busy. */
worker.store.addTempRoot(drvPath);
worker.evalStore.addTempRoot(drvPath);
assert(worker.store.isValidPath(drvPath));
assert(worker.evalStore.isValidPath(drvPath));
/* Get the derivation. */
drv = std::make_unique<Derivation>(worker.store.derivationFromPath(drvPath));
drv = std::make_unique<Derivation>(worker.evalStore.derivationFromPath(drvPath));
haveDerivation();
}
@ -212,8 +212,8 @@ void DerivationGoal::haveDerivation()
if (i.second.second)
worker.store.addTempRoot(*i.second.second);
auto outputHashes = staticOutputHashes(worker.store, *drv);
for (auto &[outputName, outputHash] : outputHashes)
auto outputHashes = staticOutputHashes(worker.evalStore, *drv);
for (auto & [outputName, outputHash] : outputHashes)
initialOutputs.insert({
outputName,
InitialOutput{
@ -337,6 +337,15 @@ void DerivationGoal::gaveUpOnSubstitution()
for (auto & i : dynamic_cast<Derivation *>(drv.get())->inputDrvs)
addWaitee(worker.makeDerivationGoal(i.first, i.second, buildMode == bmRepair ? bmRepair : bmNormal));
/* Copy the input sources from the eval store to the build
store. */
if (&worker.evalStore != &worker.store) {
RealisedPath::Set inputSrcs;
for (auto & i : drv->inputSrcs)
inputSrcs.insert(i);
copyClosure(worker.evalStore, worker.store, inputSrcs);
}
for (auto & i : drv->inputSrcs) {
if (worker.store.isValidPath(i)) continue;
if (!settings.useSubstitutes)
@ -478,8 +487,8 @@ void DerivationGoal::inputsRealised()
/* Add the relevant output closures of the input derivation
`i' as input paths. Only add the closures of output paths
that are specified as inputs. */
assert(worker.store.isValidPath(drvPath));
auto outputs = worker.store.queryPartialDerivationOutputMap(depDrvPath);
assert(worker.evalStore.isValidPath(drvPath));
auto outputs = worker.evalStore.queryPartialDerivationOutputMap(depDrvPath);
for (auto & j : wantedDepOutputs) {
if (outputs.count(j) > 0) {
auto optRealizedInput = outputs.at(j);
@ -544,7 +553,7 @@ void DerivationGoal::tryToBuild()
PathSet lockFiles;
/* FIXME: Should lock something like the drv itself so we don't build same
CA drv concurrently */
if (dynamic_cast<LocalStore *>(&worker.store))
if (dynamic_cast<LocalStore *>(&worker.store)) {
/* If we aren't a local store, we might need to use the local store as
a build remote, but that would cause a deadlock. */
/* FIXME: Make it so we can use ourselves as a build remote even if we
@ -552,9 +561,15 @@ void DerivationGoal::tryToBuild()
/* FIXME: find some way to lock for scheduling for the other stores so
a forking daemon with --store still won't farm out redundant builds.
*/
for (auto & i : drv->outputsAndOptPaths(worker.store))
for (auto & i : drv->outputsAndOptPaths(worker.store)) {
if (i.second.second)
lockFiles.insert(worker.store.Store::toRealPath(*i.second.second));
else
lockFiles.insert(
worker.store.Store::toRealPath(drvPath) + "." + i.first
);
}
}
if (!outputLocks.lockPaths(lockFiles, "", false)) {
if (!actLock)
@ -738,6 +753,64 @@ void DerivationGoal::cleanupPostOutputsRegisteredModeNonCheck()
{
}
void runPostBuildHook(
Store & store,
Logger & logger,
const StorePath & drvPath,
StorePathSet outputPaths
)
{
auto hook = settings.postBuildHook;
if (hook == "")
return;
Activity act(logger, lvlInfo, actPostBuildHook,
fmt("running post-build-hook '%s'", settings.postBuildHook),
Logger::Fields{store.printStorePath(drvPath)});
PushActivity pact(act.id);
std::map<std::string, std::string> hookEnvironment = getEnv();
hookEnvironment.emplace("DRV_PATH", store.printStorePath(drvPath));
hookEnvironment.emplace("OUT_PATHS", chomp(concatStringsSep(" ", store.printStorePathSet(outputPaths))));
hookEnvironment.emplace("NIX_CONFIG", globalConfig.toKeyValue());
struct LogSink : Sink {
Activity & act;
std::string currentLine;
LogSink(Activity & act) : act(act) { }
void operator() (std::string_view data) override {
for (auto c : data) {
if (c == '\n') {
flushLine();
} else {
currentLine += c;
}
}
}
void flushLine() {
act.result(resPostBuildLogLine, currentLine);
currentLine.clear();
}
~LogSink() {
if (currentLine != "") {
currentLine += '\n';
flushLine();
}
}
};
LogSink sink(act);
runProgram2({
.program = settings.postBuildHook,
.environment = hookEnvironment,
.standardOut = &sink,
.mergeStderrToStdout = true,
});
}
void DerivationGoal::buildDone()
{
@ -803,57 +876,15 @@ void DerivationGoal::buildDone()
being valid. */
registerOutputs();
if (settings.postBuildHook != "") {
Activity act(*logger, lvlInfo, actPostBuildHook,
fmt("running post-build-hook '%s'", settings.postBuildHook),
Logger::Fields{worker.store.printStorePath(drvPath)});
PushActivity pact(act.id);
StorePathSet outputPaths;
for (auto i : drv->outputs) {
outputPaths.insert(finalOutputs.at(i.first));
}
std::map<std::string, std::string> hookEnvironment = getEnv();
hookEnvironment.emplace("DRV_PATH", worker.store.printStorePath(drvPath));
hookEnvironment.emplace("OUT_PATHS", chomp(concatStringsSep(" ", worker.store.printStorePathSet(outputPaths))));
RunOptions opts(settings.postBuildHook, {});
opts.environment = hookEnvironment;
struct LogSink : Sink {
Activity & act;
std::string currentLine;
LogSink(Activity & act) : act(act) { }
void operator() (std::string_view data) override {
for (auto c : data) {
if (c == '\n') {
flushLine();
} else {
currentLine += c;
}
}
}
void flushLine() {
act.result(resPostBuildLogLine, currentLine);
currentLine.clear();
}
~LogSink() {
if (currentLine != "") {
currentLine += '\n';
flushLine();
}
}
};
LogSink sink(act);
opts.standardOut = &sink;
opts.mergeStderrToStdout = true;
runProgram2(opts);
}
StorePathSet outputPaths;
for (auto & [_, path] : finalOutputs)
outputPaths.insert(path);
runPostBuildHook(
worker.store,
*logger,
drvPath,
outputPaths
);
if (buildMode == bmCheck) {
cleanupPostOutputsRegisteredModeCheck();
@ -909,6 +940,8 @@ void DerivationGoal::resolvedFinished() {
auto resolvedHashes = staticOutputHashes(worker.store, *resolvedDrv);
StorePathSet outputPaths;
// `wantedOutputs` might be empty, which means “all the outputs”
auto realWantedOutputs = wantedOutputs;
if (realWantedOutputs.empty())
@ -926,8 +959,10 @@ void DerivationGoal::resolvedFinished() {
auto newRealisation = *realisation;
newRealisation.id = DrvOutput{initialOutputs.at(wantedOutput).outputHash, wantedOutput};
newRealisation.signatures.clear();
newRealisation.dependentRealisations = drvOutputReferences(worker.store, *drv, realisation->outPath);
signRealisation(newRealisation);
worker.store.registerDrvOutput(newRealisation);
outputPaths.insert(realisation->outPath);
} else {
// If we don't have a realisation, then it must mean that something
// failed when building the resolved drv
@ -935,6 +970,13 @@ void DerivationGoal::resolvedFinished() {
}
}
runPostBuildHook(
worker.store,
*logger,
drvPath,
outputPaths
);
// This is potentially a bit fishy in terms of error reporting. Not sure
// how to do it in a cleaner way
amDone(nrFailed == 0 ? ecSuccess : ecFailed, ex);
@ -1047,42 +1089,6 @@ HookReply DerivationGoal::tryBuildHook()
}
StorePathSet DerivationGoal::exportReferences(const StorePathSet & storePaths)
{
StorePathSet paths;
for (auto & storePath : storePaths) {
if (!inputPaths.count(storePath))
throw BuildError("cannot export references of path '%s' because it is not in the input closure of the derivation", worker.store.printStorePath(storePath));
worker.store.computeFSClosure({storePath}, paths);
}
/* If there are derivations in the graph, then include their
outputs as well. This is useful if you want to do things
like passing all build-time dependencies of some path to a
derivation that builds a NixOS DVD image. */
auto paths2 = paths;
for (auto & j : paths2) {
if (j.isDerivation()) {
Derivation drv = worker.store.derivationFromPath(j);
for (auto & k : drv.outputsAndOptPaths(worker.store)) {
if (!k.second.second)
/* FIXME: I am confused why we are calling
`computeFSClosure` on the output path, rather than
derivation itself. That doesn't seem right to me, so I
won't try to implemented this for CA derivations. */
throw UnimplementedError("exportReferences on CA derivations is not yet implemented");
worker.store.computeFSClosure(*k.second.second, paths);
}
}
}
return paths;
}
void DerivationGoal::registerOutputs()
{
/* When using a build hook, the build hook can register the output
@ -1268,12 +1274,23 @@ void DerivationGoal::checkPathValidity()
};
}
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
if (auto real = worker.store.queryRealisation(
DrvOutput{initialOutputs.at(i.first).outputHash, i.first})) {
auto drvOutput = DrvOutput{initialOutputs.at(i.first).outputHash, i.first};
if (auto real = worker.store.queryRealisation(drvOutput)) {
info.known = {
.path = real->outPath,
.status = PathStatus::Valid,
};
} else if (info.known && info.known->status == PathStatus::Valid) {
// We know the output because it' a static output of the
// derivation, and the output path is valid, but we don't have
// its realisation stored (probably because it has been built
// without the `ca-derivations` experimental flag)
worker.store.registerDrvOutput(
Realisation{
drvOutput,
info.known->path,
}
);
}
}
}

View file

@ -17,6 +17,13 @@ DrvOutputSubstitutionGoal::DrvOutputSubstitutionGoal(const DrvOutput& id, Worker
void DrvOutputSubstitutionGoal::init()
{
trace("init");
/* If the derivation already exists, were done */
if (worker.store.queryRealisation(id)) {
amDone(ecSuccess);
return;
}
subs = settings.useSubstitutes ? getDefaultSubstituters() : std::list<ref<Store>>();
tryNext();
}
@ -53,6 +60,26 @@ void DrvOutputSubstitutionGoal::tryNext()
return;
}
for (const auto & [depId, depPath] : outputInfo->dependentRealisations) {
if (depId != id) {
if (auto localOutputInfo = worker.store.queryRealisation(depId);
localOutputInfo && localOutputInfo->outPath != depPath) {
warn(
"substituter '%s' has an incompatible realisation for '%s', ignoring.\n"
"Local: %s\n"
"Remote: %s",
sub->getUri(),
depId.to_string(),
worker.store.printStorePath(localOutputInfo->outPath),
worker.store.printStorePath(depPath)
);
tryNext();
return;
}
addWaitee(worker.makeDrvOutputSubstitutionGoal(depId));
}
}
addWaitee(worker.makePathSubstitutionGoal(outputInfo->outPath));
if (waitees.empty()) outPathValid();

View file

@ -6,9 +6,9 @@
namespace nix {
void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMode)
void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMode, std::shared_ptr<Store> evalStore)
{
Worker worker(*this);
Worker worker(*this, evalStore ? *evalStore : *this);
Goals goals;
for (auto & br : reqs) {
@ -51,7 +51,7 @@ void Store::buildPaths(const std::vector<DerivedPath> & reqs, BuildMode buildMod
BuildResult Store::buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode)
{
Worker worker(*this);
Worker worker(*this, *this);
auto goal = worker.makeBasicDerivationGoal(drvPath, drv, {}, buildMode);
BuildResult result;
@ -93,7 +93,7 @@ void Store::ensurePath(const StorePath & path)
/* If the path is already valid, we're done. */
if (isValidPath(path)) return;
Worker worker(*this);
Worker worker(*this, *this);
GoalPtr goal = worker.makePathSubstitutionGoal(path);
Goals goals = {goal};
@ -111,7 +111,7 @@ void Store::ensurePath(const StorePath & path)
void LocalStore::repairPath(const StorePath & path)
{
Worker worker(*this);
Worker worker(*this, *this);
GoalPtr goal = worker.makePathSubstitutionGoal(path, Repair);
Goals goals = {goal};

View file

@ -13,11 +13,9 @@ bool CompareGoalPtrs::operator() (const GoalPtr & a, const GoalPtr & b) const {
void addToWeakGoals(WeakGoals & goals, GoalPtr p)
{
// FIXME: necessary?
// FIXME: O(n)
for (auto & i : goals)
if (i.lock() == p) return;
goals.push_back(p);
if (goals.find(p) != goals.end())
return;
goals.insert(p);
}
@ -46,10 +44,7 @@ void Goal::waiteeDone(GoalPtr waitee, ExitCode result)
/* If we failed and keepGoing is not set, we remove all
remaining waitees. */
for (auto & goal : waitees) {
WeakGoals waiters2;
for (auto & j : goal->waiters)
if (j.lock() != shared_from_this()) waiters2.push_back(j);
goal->waiters = waiters2;
goal->waiters.extract(shared_from_this());
}
waitees.clear();

View file

@ -19,7 +19,7 @@ struct CompareGoalPtrs {
/* Set of goals. */
typedef set<GoalPtr, CompareGoalPtrs> Goals;
typedef list<WeakGoalPtr> WeakGoals;
typedef set<WeakGoalPtr, std::owner_less<WeakGoalPtr>> WeakGoals;
/* A map of paths to goals (and the other way around). */
typedef std::map<StorePath, WeakGoalPtr> WeakGoalMap;

View file

@ -17,10 +17,7 @@
#include <regex>
#include <queue>
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/un.h>
#include <netdb.h>
#include <fcntl.h>
#include <termios.h>
#include <unistd.h>
@ -34,7 +31,6 @@
/* Includes required for chroot support. */
#if __linux__
#include <sys/socket.h>
#include <sys/ioctl.h>
#include <net/if.h>
#include <netinet/ip.h>
@ -70,12 +66,14 @@ void handleDiffHook(
auto diffHook = settings.diffHook;
if (diffHook != "" && settings.runDiffHook) {
try {
RunOptions diffHookOptions(diffHook,{tryA, tryB, drvPath, tmpDir});
diffHookOptions.searchPath = true;
diffHookOptions.uid = uid;
diffHookOptions.gid = gid;
diffHookOptions.chdir = "/";
auto diffRes = runProgram(diffHookOptions);
auto diffRes = runProgram(RunOptions {
.program = diffHook,
.searchPath = true,
.args = {tryA, tryB, drvPath, tmpDir},
.uid = uid,
.gid = gid,
.chdir = "/"
});
if (!statusOk(diffRes.first))
throw ExecError(diffRes.first,
"diff-hook program '%1%' %2%",
@ -153,6 +151,7 @@ void LocalDerivationGoal::killChild()
void LocalDerivationGoal::tryLocalBuild() {
unsigned int curBuilds = worker.getNrLocalBuilds();
if (curBuilds >= settings.maxBuildJobs) {
state = &DerivationGoal::tryToBuild;
worker.waitForBuildSlot(shared_from_this());
outputLocks.unlock();
return;
@ -284,7 +283,7 @@ bool LocalDerivationGoal::cleanupDecideWhetherDiskFull()
auto & localStore = getLocalStore();
uint64_t required = 8ULL * 1024 * 1024; // FIXME: make configurable
struct statvfs st;
if (statvfs(localStore.realStoreDir.c_str(), &st) == 0 &&
if (statvfs(localStore.realStoreDir.get().c_str(), &st) == 0 &&
(uint64_t) st.f_bavail * st.f_bsize < required)
diskFull = true;
if (statvfs(tmpDir.c_str(), &st) == 0 &&
@ -336,23 +335,6 @@ int childEntry(void * arg)
}
static std::once_flag dns_resolve_flag;
static void preloadNSS() {
/* builtin:fetchurl can trigger a DNS lookup, which with glibc can trigger a dynamic library load of
one of the glibc NSS libraries in a sandboxed child, which will fail unless the library's already
been loaded in the parent. So we force a lookup of an invalid domain to force the NSS machinery to
load its lookup libraries in the parent before any child gets a chance to. */
std::call_once(dns_resolve_flag, []() {
struct addrinfo *res = NULL;
if (getaddrinfo("this.pre-initializes.the.dns.resolvers.invalid.", "http", NULL, &res) != 0) {
if (res) freeaddrinfo(res);
}
});
}
static void linkOrCopy(const Path & from, const Path & to)
{
if (link(from.c_str(), to.c_str()) == -1) {
@ -381,9 +363,6 @@ void LocalDerivationGoal::startBuilder()
settings.thisSystem,
concatStringsSep<StringSet>(", ", worker.store.systemFeatures));
if (drv->isBuiltin())
preloadNSS();
#if __APPLE__
additionalSandboxProfile = parsedDrv->getStringAttr("__sandboxProfile").value_or("");
#endif
@ -409,7 +388,7 @@ void LocalDerivationGoal::startBuilder()
}
auto & localStore = getLocalStore();
if (localStore.storeDir != localStore.realStoreDir) {
if (localStore.storeDir != localStore.realStoreDir.get()) {
#if __linux__
useChroot = true;
#else
@ -510,7 +489,7 @@ void LocalDerivationGoal::startBuilder()
/* Write closure info to <fileName>. */
writeFile(tmpDir + "/" + fileName,
worker.store.makeValidityRegistration(
exportReferences({storePath}), false, false));
worker.store.exportReferences({storePath}, inputPaths), false, false));
}
}
@ -577,7 +556,9 @@ void LocalDerivationGoal::startBuilder()
throw Error("derivation '%s' requested impure path '%s', but it was not in allowed-impure-host-deps",
worker.store.printStorePath(drvPath), i);
dirsInChroot[i] = i;
/* Allow files in __impureHostDeps to be missing; e.g.
macOS 11+ has no /usr/lib/libSystem*.dylib */
dirsInChroot[i] = {i, true};
}
#if __linux__
@ -1110,113 +1091,28 @@ void LocalDerivationGoal::initEnv()
}
static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*");
void LocalDerivationGoal::writeStructuredAttrs()
{
auto structuredAttrs = parsedDrv->getStructuredAttrs();
if (!structuredAttrs) return;
if (auto structAttrsJson = parsedDrv->prepareStructuredAttrs(worker.store, inputPaths)) {
auto json = structAttrsJson.value();
nlohmann::json rewritten;
for (auto & [i, v] : json["outputs"].get<nlohmann::json::object_t>()) {
/* The placeholder must have a rewrite, so we use it to cover both the
cases where we know or don't know the output path ahead of time. */
rewritten[i] = rewriteStrings(v, inputRewrites);
}
auto json = *structuredAttrs;
json["outputs"] = rewritten;
/* Add an "outputs" object containing the output paths. */
nlohmann::json outputs;
for (auto & i : drv->outputs) {
/* The placeholder must have a rewrite, so we use it to cover both the
cases where we know or don't know the output path ahead of time. */
outputs[i.first] = rewriteStrings(hashPlaceholder(i.first), inputRewrites);
auto jsonSh = writeStructuredAttrsShell(json);
writeFile(tmpDir + "/.attrs.sh", rewriteStrings(jsonSh, inputRewrites));
chownToBuilder(tmpDir + "/.attrs.sh");
env["NIX_ATTRS_SH_FILE"] = tmpDir + "/.attrs.sh";
writeFile(tmpDir + "/.attrs.json", rewriteStrings(json.dump(), inputRewrites));
chownToBuilder(tmpDir + "/.attrs.json");
env["NIX_ATTRS_JSON_FILE"] = tmpDir + "/.attrs.json";
}
json["outputs"] = outputs;
/* Handle exportReferencesGraph. */
auto e = json.find("exportReferencesGraph");
if (e != json.end() && e->is_object()) {
for (auto i = e->begin(); i != e->end(); ++i) {
std::ostringstream str;
{
JSONPlaceholder jsonRoot(str, true);
StorePathSet storePaths;
for (auto & p : *i)
storePaths.insert(worker.store.parseStorePath(p.get<std::string>()));
worker.store.pathInfoToJSON(jsonRoot,
exportReferences(storePaths), false, true);
}
json[i.key()] = nlohmann::json::parse(str.str()); // urgh
}
}
writeFile(tmpDir + "/.attrs.json", rewriteStrings(json.dump(), inputRewrites));
chownToBuilder(tmpDir + "/.attrs.json");
/* As a convenience to bash scripts, write a shell file that
maps all attributes that are representable in bash -
namely, strings, integers, nulls, Booleans, and arrays and
objects consisting entirely of those values. (So nested
arrays or objects are not supported.) */
auto handleSimpleType = [](const nlohmann::json & value) -> std::optional<std::string> {
if (value.is_string())
return shellEscape(value);
if (value.is_number()) {
auto f = value.get<float>();
if (std::ceil(f) == f)
return std::to_string(value.get<int>());
}
if (value.is_null())
return std::string("''");
if (value.is_boolean())
return value.get<bool>() ? std::string("1") : std::string("");
return {};
};
std::string jsonSh;
for (auto i = json.begin(); i != json.end(); ++i) {
if (!std::regex_match(i.key(), shVarName)) continue;
auto & value = i.value();
auto s = handleSimpleType(value);
if (s)
jsonSh += fmt("declare %s=%s\n", i.key(), *s);
else if (value.is_array()) {
std::string s2;
bool good = true;
for (auto i = value.begin(); i != value.end(); ++i) {
auto s3 = handleSimpleType(i.value());
if (!s3) { good = false; break; }
s2 += *s3; s2 += ' ';
}
if (good)
jsonSh += fmt("declare -a %s=(%s)\n", i.key(), s2);
}
else if (value.is_object()) {
std::string s2;
bool good = true;
for (auto i = value.begin(); i != value.end(); ++i) {
auto s3 = handleSimpleType(i.value());
if (!s3) { good = false; break; }
s2 += fmt("[%s]=%s ", shellEscape(i.key()), *s3);
}
if (good)
jsonSh += fmt("declare -A %s=(%s)\n", i.key(), s2);
}
}
writeFile(tmpDir + "/.attrs.sh", rewriteStrings(jsonSh, inputRewrites));
chownToBuilder(tmpDir + "/.attrs.sh");
}
@ -1359,13 +1255,20 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
std::optional<const Realisation> queryRealisation(const DrvOutput & id) override
// XXX: This should probably be allowed if the realisation corresponds to
// an allowed derivation
{ throw Error("queryRealisation"); }
void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode) override
{
if (!goal.isAllowed(id))
throw InvalidPath("cannot query an unknown output id '%s' in recursive Nix", id.to_string());
return next->queryRealisation(id);
}
void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override
{
assert(!evalStore);
if (buildMode != bmNormal) throw Error("unsupported build mode");
StorePathSet newPaths;
std::set<Realisation> newRealisations;
for (auto & req : paths) {
if (!goal.isAllowed(req))
@ -1378,16 +1281,28 @@ struct RestrictedStore : public virtual RestrictedStoreConfig, public virtual Lo
auto p = std::get_if<DerivedPath::Built>(&path);
if (!p) continue;
auto & bfd = *p;
auto drv = readDerivation(bfd.drvPath);
auto drvHashes = staticOutputHashes(*this, drv);
auto outputs = next->queryDerivationOutputMap(bfd.drvPath);
for (auto & [outputName, outputPath] : outputs)
if (wantOutput(outputName, bfd.outputs))
if (wantOutput(outputName, bfd.outputs)) {
newPaths.insert(outputPath);
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
auto thisRealisation = next->queryRealisation(
DrvOutput{drvHashes.at(outputName), outputName}
);
assert(thisRealisation);
newRealisations.insert(*thisRealisation);
}
}
}
StorePathSet closure;
next->computeFSClosure(newPaths, closure);
for (auto & path : closure)
goal.addDependency(path);
for (auto & real : Realisation::closure(*next, newRealisations))
goal.addedDrvOutputs.insert(real.id);
}
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
@ -1763,7 +1678,7 @@ void LocalDerivationGoal::runChild()
/* N.B. it is realistic that these paths might not exist. It
happens when testing Nix building fixed-output derivations
within a pure derivation. */
for (auto & path : { "/etc/resolv.conf", "/etc/services", "/etc/hosts", "/var/run/nscd/socket" })
for (auto & path : { "/etc/resolv.conf", "/etc/services", "/etc/hosts" })
if (pathExists(path))
ss.push_back(path);
}
@ -2400,32 +2315,19 @@ void LocalDerivationGoal::registerOutputs()
}
auto got = caSink.finish().first;
auto refs = rewriteRefs();
HashModuloSink narSink { htSHA256, oldHashPart };
dumpPath(actualPath, narSink);
auto narHashAndSize = narSink.finish();
ValidPathInfo newInfo0 {
worker.store.makeFixedOutputPath(
auto finalPath = worker.store.makeFixedOutputPath(
outputHash.method,
got,
outputPathName(drv->name, outputName),
refs.second,
refs.first),
narHashAndSize.first,
};
newInfo0.narSize = narHashAndSize.second;
newInfo0.ca = FixedOutputHash {
.method = outputHash.method,
.hash = got,
};
newInfo0.references = refs.second;
if (refs.first)
newInfo0.references.insert(newInfo0.path);
if (scratchPath != newInfo0.path) {
refs.first);
if (scratchPath != finalPath) {
// Also rewrite the output path
auto source = sinkToSource([&](Sink & nextSink) {
StringSink sink;
dumpPath(actualPath, sink);
RewritingSink rsink2(oldHashPart, std::string(newInfo0.path.hashPart()), nextSink);
RewritingSink rsink2(oldHashPart, std::string(finalPath.hashPart()), nextSink);
rsink2(*sink.s);
rsink2.flush();
});
@ -2435,6 +2337,21 @@ void LocalDerivationGoal::registerOutputs()
movePath(tmpPath, actualPath);
}
HashResult narHashAndSize = hashPath(htSHA256, actualPath);
ValidPathInfo newInfo0 {
finalPath,
narHashAndSize.first,
};
newInfo0.narSize = narHashAndSize.second;
newInfo0.ca = FixedOutputHash {
.method = outputHash.method,
.hash = got,
};
newInfo0.references = refs.second;
if (refs.first)
newInfo0.references.insert(newInfo0.path);
assert(newInfo0.ca);
return newInfo0;
};
@ -2495,6 +2412,10 @@ void LocalDerivationGoal::registerOutputs()
},
}, output.output);
/* FIXME: set proper permissions in restorePath() so
we don't have to do another traversal. */
canonicalisePathMetaData(actualPath, {}, inodesSeen);
/* Calculate where we'll move the output files. In the checking case we
will leave leave them where they are, for now, rather than move to
their usual "final destination" */
@ -2504,6 +2425,7 @@ void LocalDerivationGoal::registerOutputs()
floating CA derivations and hash-mismatching fixed-output
derivations. */
PathLocks dynamicOutputLock;
dynamicOutputLock.setDeletion(true);
auto optFixedPath = output.path(worker.store, drv->name, outputName);
if (!optFixedPath ||
worker.store.printStorePath(*optFixedPath) != finalDestPath)
@ -2527,6 +2449,7 @@ void LocalDerivationGoal::registerOutputs()
assert(newInfo.ca);
} else {
auto destPath = worker.store.toRealPath(finalDestPath);
deletePath(destPath);
movePath(actualPath, destPath);
actualPath = destPath;
}

View file

@ -115,6 +115,9 @@ struct LocalDerivationGoal : public DerivationGoal
/* Paths that were added via recursive Nix calls. */
StorePathSet addedPaths;
/* Realisations that were added via recursive Nix calls. */
std::set<DrvOutput> addedDrvOutputs;
/* Recursive Nix calls are only allowed to build or realize paths
in the original input closure or added via a recursive Nix call
(so e.g. you can't do 'nix-store -r /nix/store/<bla>' where
@ -123,6 +126,11 @@ struct LocalDerivationGoal : public DerivationGoal
{
return inputPaths.count(path) || addedPaths.count(path);
}
bool isAllowed(const DrvOutput & id)
{
return addedDrvOutputs.count(id);
}
bool isAllowed(const DerivedPath & req);
friend struct RestrictedStore;

View file

@ -204,7 +204,7 @@ void PathSubstitutionGoal::tryToRun()
Activity act(*logger, actSubstitute, Logger::Fields{worker.store.printStorePath(storePath), sub->getUri()});
PushActivity pact(act.id);
copyStorePath(ref<Store>(sub), ref<Store>(worker.store.shared_from_this()),
copyStorePath(*sub, worker.store,
subPath ? *subPath : storePath, repair, sub->isTrusted ? NoCheckSigs : CheckSigs);
promise.set_value();

View file

@ -9,11 +9,12 @@
namespace nix {
Worker::Worker(Store & store)
Worker::Worker(Store & store, Store & evalStore)
: act(*logger, actRealise)
, actDerivations(*logger, actBuilds)
, actSubstitutions(*logger, actCopyPaths)
, store(store)
, evalStore(evalStore)
{
/* Debugging: prevent recursive workers. */
nrLocalBuilds = 0;

View file

@ -110,6 +110,7 @@ public:
bool checkMismatch;
Store & store;
Store & evalStore;
std::unique_ptr<HookInstance> hook;
@ -131,7 +132,7 @@ public:
it answers with "decline-permanently", we don't try again. */
bool tryBuildHook = true;
Worker(Store & store);
Worker(Store & store, Store & evalStore);
~Worker();
/* Make a goal (with caching). */

View file

@ -3,10 +3,19 @@
-- is enabled
create table if not exists Realisations (
id integer primary key autoincrement not null,
drvPath text not null,
outputName text not null, -- symbolic output id, usually "out"
outputPath integer not null,
signatures text, -- space-separated list
primary key (drvPath, outputName),
foreign key (outputPath) references ValidPaths(id) on delete cascade
);
create index if not exists IndexRealisations on Realisations(drvPath, outputName);
create table if not exists RealisationsRefs (
referrer integer not null,
realisationReference integer,
foreign key (referrer) references Realisations(id) on delete cascade,
foreign key (realisationReference) references Realisations(id) on delete restrict
);

View file

@ -227,6 +227,12 @@ struct ClientSettings
try {
if (name == "ssh-auth-sock") // obsolete
;
else if (name == settings.experimentalFeatures.name) {
// We dont want to forward the experimental features to
// the daemon, as that could cause some pretty weird stuff
if (tokenizeString<Strings>(value) != settings.experimentalFeatures.get())
debug("Ignoring the client-specified experimental features");
}
else if (trusted
|| name == settings.buildTimeout.name
|| name == "connect-timeout"
@ -243,27 +249,10 @@ struct ClientSettings
}
};
static void writeValidPathInfo(
ref<Store> store,
unsigned int clientVersion,
Sink & to,
std::shared_ptr<const ValidPathInfo> info)
{
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
<< info->narHash.to_string(Base16, false);
worker_proto::write(*store, to, info->references);
to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
to << info->ultimate
<< info->sigs
<< renderContentAddress(info->ca);
}
}
static std::vector<DerivedPath> readDerivedPaths(Store & store, unsigned int clientVersion, Source & from)
{
std::vector<DerivedPath> reqs;
if (GET_PROTOCOL_MINOR(clientVersion) >= 29) {
if (GET_PROTOCOL_MINOR(clientVersion) >= 30) {
reqs = worker_proto::read(store, from, Phantom<std::vector<DerivedPath>> {});
} else {
for (auto & s : readStrings<Strings>(from))
@ -422,9 +411,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
}();
logger->stopWork();
to << store->printStorePath(pathInfo->path);
writeValidPathInfo(store, clientVersion, to, pathInfo);
pathInfo->write(to, *store, GET_PROTOCOL_MINOR(clientVersion));
} else {
HashType hashAlgo;
std::string baseName;
@ -471,6 +458,21 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
break;
}
case wopAddMultipleToStore: {
bool repair, dontCheckSigs;
from >> repair >> dontCheckSigs;
if (!trusted && dontCheckSigs)
dontCheckSigs = false;
logger->startWork();
FramedSource source(from);
store->addMultipleToStore(source,
RepairFlag{repair},
dontCheckSigs ? NoCheckSigs : CheckSigs);
logger->stopWork();
break;
}
case wopAddTextToStore: {
string suffix = readString(from);
string s = readString(from);
@ -770,7 +772,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (info) {
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
to << 1;
writeValidPathInfo(store, clientVersion, to, info);
info->write(to, *store, GET_PROTOCOL_MINOR(clientVersion), false);
} else {
assert(GET_PROTOCOL_MINOR(clientVersion) >= 17);
to << 0;
@ -885,11 +887,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
case wopRegisterDrvOutput: {
logger->startWork();
auto outputId = DrvOutput::parse(readString(from));
auto outputPath = StorePath(readString(from));
auto resolvedDrv = StorePath(readString(from));
store->registerDrvOutput(Realisation{
.id = outputId, .outPath = outputPath});
if (GET_PROTOCOL_MINOR(clientVersion) < 31) {
auto outputId = DrvOutput::parse(readString(from));
auto outputPath = StorePath(readString(from));
store->registerDrvOutput(Realisation{
.id = outputId, .outPath = outputPath});
} else {
auto realisation = worker_proto::read(*store, from, Phantom<Realisation>());
store->registerDrvOutput(realisation);
}
logger->stopWork();
break;
}
@ -899,9 +905,15 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
auto outputId = DrvOutput::parse(readString(from));
auto info = store->queryRealisation(outputId);
logger->stopWork();
std::set<StorePath> outPaths;
if (info) outPaths.insert(info->outPath);
worker_proto::write(*store, to, outPaths);
if (GET_PROTOCOL_MINOR(clientVersion) < 31) {
std::set<StorePath> outPaths;
if (info) outPaths.insert(info->outPath);
worker_proto::write(*store, to, outPaths);
} else {
std::set<Realisation> realisations;
if (info) realisations.insert(*info);
worker_proto::write(*store, to, realisations);
}
break;
}

View file

@ -568,7 +568,7 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
}
std::map<std::string, Hash> staticOutputHashes(Store& store, const Derivation& drv)
std::map<std::string, Hash> staticOutputHashes(Store & store, const Derivation & drv)
{
std::map<std::string, Hash> res;
std::visit(overloaded {

View file

@ -138,8 +138,8 @@ struct Derivation : BasicDerivation
/* Return the underlying basic derivation but with these changes:
1. Input drvs are emptied, but the outputs of them that were used are
added directly to input sources.
1. Input drvs are emptied, but the outputs of them that were used are
added directly to input sources.
2. Input placeholders are replaced with realized input store paths. */
std::optional<BasicDerivation> tryResolve(Store & store);

View file

@ -11,18 +11,33 @@ nlohmann::json DerivedPath::Opaque::toJSON(ref<Store> store) const {
return res;
}
nlohmann::json DerivedPathWithHints::Built::toJSON(ref<Store> store) const {
nlohmann::json BuiltPath::Built::toJSON(ref<Store> store) const {
nlohmann::json res;
res["drvPath"] = store->printStorePath(drvPath);
for (const auto& [output, path] : outputs) {
res["outputs"][output] = path ? store->printStorePath(*path) : "";
res["outputs"][output] = store->printStorePath(path);
}
return res;
}
nlohmann::json derivedPathsWithHintsToJSON(const DerivedPathsWithHints & buildables, ref<Store> store) {
StorePathSet BuiltPath::outPaths() const
{
return std::visit(
overloaded{
[](BuiltPath::Opaque p) { return StorePathSet{p.path}; },
[](BuiltPath::Built b) {
StorePathSet res;
for (auto & [_, path] : b.outputs)
res.insert(path);
return res;
},
}, raw()
);
}
nlohmann::json derivedPathsWithHintsToJSON(const BuiltPaths & buildables, ref<Store> store) {
auto res = nlohmann::json::array();
for (const DerivedPathWithHints & buildable : buildables) {
for (const BuiltPath & buildable : buildables) {
std::visit([&res, store](const auto & buildable) {
res.push_back(buildable.toJSON(store));
}, buildable.raw());
@ -62,7 +77,7 @@ DerivedPath::Built DerivedPath::Built::parse(const Store & store, std::string_vi
auto outputsS = s.substr(n + 1);
std::set<string> outputs;
if (outputsS != "*")
outputs = tokenizeString<std::set<string>>(outputsS);
outputs = tokenizeString<std::set<string>>(outputsS, ",");
return {drvPath, outputs};
}
@ -74,4 +89,30 @@ DerivedPath DerivedPath::parse(const Store & store, std::string_view s)
: (DerivedPath) DerivedPath::Built::parse(store, s);
}
RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
{
RealisedPath::Set res;
std::visit(
overloaded{
[&](BuiltPath::Opaque p) { res.insert(p.path); },
[&](BuiltPath::Built p) {
auto drvHashes =
staticOutputHashes(store, store.readDerivation(p.drvPath));
for (auto& [outputName, outputPath] : p.outputs) {
if (settings.isExperimentalFeatureEnabled(
"ca-derivations")) {
auto thisRealisation = store.queryRealisation(
DrvOutput{drvHashes.at(outputName), outputName});
assert(thisRealisation); // Weve built it, so we must h
// ve the realisation
res.insert(*thisRealisation);
} else {
res.insert(outputPath);
}
}
},
},
raw());
return res;
}
}

View file

@ -2,6 +2,7 @@
#include "util.hh"
#include "path.hh"
#include "realisation.hh"
#include <optional>
@ -79,51 +80,44 @@ struct DerivedPath : _DerivedPathRaw {
/**
* A built derived path with hints in the form of optional concrete output paths.
*
* See 'DerivedPathWithHints' for more an explanation.
* See 'BuiltPath' for more an explanation.
*/
struct DerivedPathWithHintsBuilt {
struct BuiltPathBuilt {
StorePath drvPath;
std::map<std::string, std::optional<StorePath>> outputs;
std::map<std::string, StorePath> outputs;
nlohmann::json toJSON(ref<Store> store) const;
static DerivedPathWithHintsBuilt parse(const Store & store, std::string_view);
static BuiltPathBuilt parse(const Store & store, std::string_view);
};
using _DerivedPathWithHintsRaw = std::variant<
using _BuiltPathRaw = std::variant<
DerivedPath::Opaque,
DerivedPathWithHintsBuilt
BuiltPathBuilt
>;
/**
* A derived path with hints in the form of optional concrete output paths in the built case.
*
* This type is currently just used by the CLI. The paths are filled in
* during evaluation for derivations that know what paths they will
* produce in advanced, i.e. input-addressed or fixed-output content
* addressed derivations.
*
* That isn't very good, because it puts floating content-addressed
* derivations "at a disadvantage". It would be better to never rely on
* the output path of unbuilt derivations, and exclusively use the
* realizations types to work with built derivations' concrete output
* paths.
* A built path. Similar to a `DerivedPath`, but enriched with the corresponding
* output path(s).
*/
// FIXME Stop using and delete this, or if that is not possible move out of libstore to libcmd.
struct DerivedPathWithHints : _DerivedPathWithHintsRaw {
using Raw = _DerivedPathWithHintsRaw;
struct BuiltPath : _BuiltPathRaw {
using Raw = _BuiltPathRaw;
using Raw::Raw;
using Opaque = DerivedPathOpaque;
using Built = DerivedPathWithHintsBuilt;
using Built = BuiltPathBuilt;
inline const Raw & raw() const {
return static_cast<const Raw &>(*this);
}
StorePathSet outPaths() const;
RealisedPath::Set toRealisedPaths(Store & store) const;
};
typedef std::vector<DerivedPathWithHints> DerivedPathsWithHints;
typedef std::vector<DerivedPath> DerivedPaths;
typedef std::vector<BuiltPath> BuiltPaths;
nlohmann::json derivedPathsWithHintsToJSON(const DerivedPathsWithHints & buildables, ref<Store> store);
nlohmann::json derivedPathsWithHintsToJSON(const BuiltPaths & buildables, ref<Store> store);
}

View file

@ -43,11 +43,6 @@ struct DummyStore : public virtual DummyStoreConfig, public virtual Store
RepairFlag repair, CheckSigsFlag checkSigs) override
{ unsupported("addToStore"); }
StorePath addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo,
PathFilter & filter, RepairFlag repair) override
{ unsupported("addToStore"); }
StorePath addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) override
{ unsupported("addTextToStore"); }

View file

@ -7,7 +7,7 @@
#include "finally.hh"
#include "callback.hh"
#ifdef ENABLE_S3
#if ENABLE_S3
#include <aws/core/client/ClientConfiguration.h>
#endif
@ -148,7 +148,7 @@ struct curlFileTransfer : public FileTransfer
}
LambdaSink finalSink;
std::shared_ptr<CompressionSink> decompressionSink;
std::shared_ptr<FinishSink> decompressionSink;
std::optional<StringSink> errorSink;
std::exception_ptr writeException;
@ -665,7 +665,7 @@ struct curlFileTransfer : public FileTransfer
writeFull(wakeupPipe.writeSide.get(), " ");
}
#ifdef ENABLE_S3
#if ENABLE_S3
std::tuple<std::string, std::string, Store::Params> parseS3Uri(std::string uri)
{
auto [path, params] = splitUriAndParams(uri);
@ -688,7 +688,7 @@ struct curlFileTransfer : public FileTransfer
if (hasPrefix(request.uri, "s3://")) {
// FIXME: do this on a worker thread
try {
#ifdef ENABLE_S3
#if ENABLE_S3
auto [bucketName, key, params] = parseS3Uri(request.uri);
std::string profile = get(params, "profile").value_or("");

View file

@ -775,7 +775,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
try {
AutoCloseDir dir(opendir(realStoreDir.c_str()));
AutoCloseDir dir(opendir(realStoreDir.get().c_str()));
if (!dir) throw SysError("opening directory '%1%'", realStoreDir);
/* Read the store and immediately delete all paths that
@ -856,7 +856,7 @@ void LocalStore::autoGC(bool sync)
return std::stoll(readFile(*fakeFreeSpaceFile));
struct statvfs st;
if (statvfs(realStoreDir.c_str(), &st))
if (statvfs(realStoreDir.get().c_str(), &st))
throw SysError("getting filesystem info about '%s'", realStoreDir);
return (uint64_t) st.f_bavail * st.f_frsize;

View file

@ -166,7 +166,7 @@ bool Settings::isExperimentalFeatureEnabled(const std::string & name)
}
MissingExperimentalFeature::MissingExperimentalFeature(std::string feature)
: Error("experimental Nix feature '%1%' is disabled; use '--experimental-features %1%' to override", feature)
: Error("experimental Nix feature '%1%' is disabled; use '--extra-experimental-features %1%' to override", feature)
, missingFeature(feature)
{}

View file

@ -630,8 +630,10 @@ public:
Strings{"https://cache.nixos.org/"},
"substituters",
R"(
A list of URLs of substituters, separated by whitespace. The default
is `https://cache.nixos.org`.
A list of URLs of substituters, separated by whitespace. Substituters
are tried based on their Priority value, which each substituter can set
independently. Lower value means higher priority.
The default is `https://cache.nixos.org`, with a Priority of 40.
)",
{"binary-caches"}};
@ -714,7 +716,7 @@ public:
send a series of commands to modify various settings to stdout. The
currently recognized commands are:
- `extra-sandbox-paths`
- `extra-sandbox-paths`\
Pass a list of files and directories to be included in the
sandbox for this build. One entry per line, terminated by an
empty line. Entries have the same format as `sandbox-paths`.
@ -967,6 +969,9 @@ public:
resolves to a different location from that of the build machine. You
can enable this setting if you are sure you're not going to do that.
)"};
Setting<bool> useRegistries{this, true, "use-registries",
"Whether to use flake registries to resolve flake references."};
};

View file

@ -267,8 +267,11 @@ public:
return status;
}
void buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode) override
void buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override
{
if (evalStore && evalStore.get() != this)
throw Error("building on an SSH store is incompatible with '--eval-store'");
auto conn(connections->get());
conn->to << cmdBuildPaths;

View file

@ -93,7 +93,7 @@ protected:
void LocalBinaryCacheStore::init()
{
createDirs(binaryCacheDir + "/nar");
createDirs(binaryCacheDir + realisationsPrefix);
createDirs(binaryCacheDir + "/" + realisationsPrefix);
if (writeDebugInfo)
createDirs(binaryCacheDir + "/debuginfo");
BinaryCacheStore::init();

View file

@ -18,6 +18,9 @@ struct LocalFSStoreConfig : virtual StoreConfig
const PathSetting logDir{(StoreConfig*) this, false,
rootDir != "" ? rootDir + "/nix/var/log/nix" : settings.nixLogDir,
"log", "directory where Nix will store state"};
const PathSetting realStoreDir{(StoreConfig*) this, false,
rootDir != "" ? rootDir + "/nix/store" : storeDir, "real",
"physical path to the Nix store"};
};
class LocalFSStore : public virtual LocalFSStoreConfig, public virtual Store
@ -34,7 +37,7 @@ public:
/* Register a permanent GC root. */
Path addPermRoot(const StorePath & storePath, const Path & gcRoot);
virtual Path getRealStoreDir() { return storeDir; }
virtual Path getRealStoreDir() { return realStoreDir; }
Path toRealPath(const Path & storePath) override
{

View file

@ -53,12 +53,15 @@ struct LocalStore::State::Stmts {
SQLiteStmt InvalidatePath;
SQLiteStmt AddDerivationOutput;
SQLiteStmt RegisterRealisedOutput;
SQLiteStmt UpdateRealisedOutput;
SQLiteStmt QueryValidDerivers;
SQLiteStmt QueryDerivationOutputs;
SQLiteStmt QueryRealisedOutput;
SQLiteStmt QueryAllRealisedOutputs;
SQLiteStmt QueryPathFromHashPart;
SQLiteStmt QueryValidPaths;
SQLiteStmt QueryRealisationReferences;
SQLiteStmt AddRealisationReference;
};
int getSchema(Path schemaPath)
@ -76,7 +79,7 @@ int getSchema(Path schemaPath)
void migrateCASchema(SQLite& db, Path schemaPath, AutoCloseFD& lockFd)
{
const int nixCASchemaVersion = 1;
const int nixCASchemaVersion = 2;
int curCASchema = getSchema(schemaPath);
if (curCASchema != nixCASchemaVersion) {
if (curCASchema > nixCASchemaVersion) {
@ -94,7 +97,39 @@ void migrateCASchema(SQLite& db, Path schemaPath, AutoCloseFD& lockFd)
#include "ca-specific-schema.sql.gen.hh"
;
db.exec(schema);
curCASchema = nixCASchemaVersion;
}
if (curCASchema < 2) {
SQLiteTxn txn(db);
// Ugly little sql dance to add a new `id` column and make it the primary key
db.exec(R"(
create table Realisations2 (
id integer primary key autoincrement not null,
drvPath text not null,
outputName text not null, -- symbolic output id, usually "out"
outputPath integer not null,
signatures text, -- space-separated list
foreign key (outputPath) references ValidPaths(id) on delete cascade
);
insert into Realisations2 (drvPath, outputName, outputPath, signatures)
select drvPath, outputName, outputPath, signatures from Realisations;
drop table Realisations;
alter table Realisations2 rename to Realisations;
)");
db.exec(R"(
create index if not exists IndexRealisations on Realisations(drvPath, outputName);
create table if not exists RealisationsRefs (
referrer integer not null,
realisationReference integer,
foreign key (referrer) references Realisations(id) on delete cascade,
foreign key (realisationReference) references Realisations(id) on delete restrict
);
)");
txn.commit();
}
writeFile(schemaPath, fmt("%d", nixCASchemaVersion));
lockFile(lockFd.get(), ltRead, true);
}
@ -106,9 +141,6 @@ LocalStore::LocalStore(const Params & params)
, LocalStoreConfig(params)
, Store(params)
, LocalFSStore(params)
, realStoreDir_{this, false, rootDir != "" ? rootDir + "/nix/store" : storeDir, "real",
"physical path to the Nix store"}
, realStoreDir(realStoreDir_)
, dbDir(stateDir + "/db")
, linksDir(realStoreDir + "/.links")
, reservedPath(dbDir + "/reserved")
@ -153,13 +185,13 @@ LocalStore::LocalStore(const Params & params)
printError("warning: the group '%1%' specified in 'build-users-group' does not exist", settings.buildUsersGroup);
else {
struct stat st;
if (stat(realStoreDir.c_str(), &st))
if (stat(realStoreDir.get().c_str(), &st))
throw SysError("getting attributes of path '%1%'", realStoreDir);
if (st.st_uid != 0 || st.st_gid != gr->gr_gid || (st.st_mode & ~S_IFMT) != perm) {
if (chown(realStoreDir.c_str(), 0, gr->gr_gid) == -1)
if (chown(realStoreDir.get().c_str(), 0, gr->gr_gid) == -1)
throw SysError("changing ownership of path '%1%'", realStoreDir);
if (chmod(realStoreDir.c_str(), perm) == -1)
if (chmod(realStoreDir.get().c_str(), perm) == -1)
throw SysError("changing permissions on path '%1%'", realStoreDir);
}
}
@ -310,13 +342,22 @@ LocalStore::LocalStore(const Params & params)
if (settings.isExperimentalFeatureEnabled("ca-derivations")) {
state->stmts->RegisterRealisedOutput.create(state->db,
R"(
insert or replace into Realisations (drvPath, outputName, outputPath, signatures)
insert into Realisations (drvPath, outputName, outputPath, signatures)
values (?, ?, (select id from ValidPaths where path = ?), ?)
;
)");
state->stmts->UpdateRealisedOutput.create(state->db,
R"(
update Realisations
set signatures = ?
where
drvPath = ? and
outputName = ?
;
)");
state->stmts->QueryRealisedOutput.create(state->db,
R"(
select Output.path, Realisations.signatures from Realisations
select Realisations.id, Output.path, Realisations.signatures from Realisations
inner join ValidPaths as Output on Output.id = Realisations.outputPath
where drvPath = ? and outputName = ?
;
@ -328,6 +369,19 @@ LocalStore::LocalStore(const Params & params)
where drvPath = ?
;
)");
state->stmts->QueryRealisationReferences.create(state->db,
R"(
select drvPath, outputName from Realisations
join RealisationsRefs on realisationReference = Realisations.id
where referrer = ?;
)");
state->stmts->AddRealisationReference.create(state->db,
R"(
insert or replace into RealisationsRefs (referrer, realisationReference)
values (
(select id from Realisations where drvPath = ? and outputName = ?),
(select id from Realisations where drvPath = ? and outputName = ?));
)");
}
}
@ -437,14 +491,14 @@ void LocalStore::makeStoreWritable()
if (getuid() != 0) return;
/* Check if /nix/store is on a read-only mount. */
struct statvfs stat;
if (statvfs(realStoreDir.c_str(), &stat) != 0)
if (statvfs(realStoreDir.get().c_str(), &stat) != 0)
throw SysError("getting info about the Nix store mount point");
if (stat.f_flag & ST_RDONLY) {
if (unshare(CLONE_NEWNS) == -1)
throw SysError("setting up a private mount namespace");
if (mount(0, realStoreDir.c_str(), "none", MS_REMOUNT | MS_BIND, 0) == -1)
if (mount(0, realStoreDir.get().c_str(), "none", MS_REMOUNT | MS_BIND, 0) == -1)
throw SysError("remounting %1% writable", realStoreDir);
}
#endif
@ -671,14 +725,54 @@ void LocalStore::registerDrvOutput(const Realisation & info, CheckSigsFlag check
void LocalStore::registerDrvOutput(const Realisation & info)
{
settings.requireExperimentalFeature("ca-derivations");
auto state(_state.lock());
retrySQLite<void>([&]() {
state->stmts->RegisterRealisedOutput.use()
(info.id.strHash())
(info.id.outputName)
(printStorePath(info.outPath))
(concatStringsSep(" ", info.signatures))
.exec();
auto state(_state.lock());
if (auto oldR = queryRealisation_(*state, info.id)) {
if (info.isCompatibleWith(*oldR)) {
auto combinedSignatures = oldR->signatures;
combinedSignatures.insert(info.signatures.begin(),
info.signatures.end());
state->stmts->UpdateRealisedOutput.use()
(concatStringsSep(" ", combinedSignatures))
(info.id.strHash())
(info.id.outputName)
.exec();
} else {
throw Error("Trying to register a realisation of '%s', but we already "
"have another one locally.\n"
"Local: %s\n"
"Remote: %s",
info.id.to_string(),
printStorePath(oldR->outPath),
printStorePath(info.outPath)
);
}
} else {
state->stmts->RegisterRealisedOutput.use()
(info.id.strHash())
(info.id.outputName)
(printStorePath(info.outPath))
(concatStringsSep(" ", info.signatures))
.exec();
}
for (auto & [outputId, depPath] : info.dependentRealisations) {
auto localRealisation = queryRealisationCore_(*state, outputId);
if (!localRealisation)
throw Error("unable to register the derivation '%s' as it "
"depends on the non existent '%s'",
info.id.to_string(), outputId.to_string());
if (localRealisation->second.outPath != depPath)
throw Error("unable to register the derivation '%s' as it "
"depends on a realisation of '%s' that doesnt"
"match what we have locally",
info.id.to_string(), outputId.to_string());
state->stmts->AddRealisationReference.use()
(info.id.strHash())
(info.id.outputName)
(outputId.strHash())
(outputId.outputName)
.exec();
}
});
}
@ -984,14 +1078,19 @@ StorePathSet LocalStore::querySubstitutablePaths(const StorePathSet & paths)
}
// FIXME: move this, it's not specific to LocalStore.
void LocalStore::querySubstitutablePathInfos(const StorePathCAMap & paths, SubstitutablePathInfos & infos)
{
if (!settings.useSubstitutes) return;
for (auto & sub : getDefaultSubstituters()) {
for (auto & path : paths) {
if (infos.count(path.first))
// Choose first succeeding substituter.
continue;
auto subPath(path.first);
// recompute store path so that we can use a different store root
// Recompute store path so that we can use a different store root.
if (path.second) {
subPath = makeFixedOutputPathFromCA(path.first.name(), *path.second);
if (sub->storeDir == storeDir)
@ -1152,24 +1251,15 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
deletePath(realPath);
// text hashing has long been allowed to have non-self-references because it is used for drv files.
bool refersToSelf = info.references.count(info.path) > 0;
if (info.ca.has_value() && !info.references.empty() && !(std::holds_alternative<TextHash>(*info.ca) && !refersToSelf))
settings.requireExperimentalFeature("ca-references");
/* While restoring the path from the NAR, compute the hash
of the NAR. */
std::unique_ptr<AbstractHashSink> hashSink;
if (!info.ca.has_value() || !info.references.count(info.path))
hashSink = std::make_unique<HashSink>(htSHA256);
else
hashSink = std::make_unique<HashModuloSink>(htSHA256, std::string(info.path.hashPart()));
HashSink hashSink(htSHA256);
TeeSource wrapperSource { source, *hashSink };
TeeSource wrapperSource { source, hashSink };
restorePath(realPath, wrapperSource);
auto hashResult = hashSink->finish();
auto hashResult = hashSink.finish();
if (hashResult.first != info.narHash)
throw Error("hash mismatch importing path '%s';\n specified: %s\n got: %s",
@ -1179,6 +1269,31 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
throw Error("size mismatch importing path '%s';\n specified: %s\n got: %s",
printStorePath(info.path), info.narSize, hashResult.second);
if (info.ca) {
if (auto foHash = std::get_if<FixedOutputHash>(&*info.ca)) {
auto actualFoHash = hashCAPath(
foHash->method,
foHash->hash.type,
info.path
);
if (foHash->hash != actualFoHash.hash) {
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
printStorePath(info.path),
foHash->hash.to_string(Base32, true),
actualFoHash.hash.to_string(Base32, true));
}
}
if (auto textHash = std::get_if<TextHash>(&*info.ca)) {
auto actualTextHash = hashString(htSHA256, readFile(realPath));
if (textHash->hash != actualTextHash) {
throw Error("ca hash mismatch importing path '%s';\n specified: %s\n got: %s",
printStorePath(info.path),
textHash->hash.to_string(Base32, true),
actualTextHash.to_string(Base32, true));
}
}
}
autoGC();
canonicalisePathMetaData(realPath, {});
@ -1447,14 +1562,10 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
/* Check the content hash (optionally - slow). */
printMsg(lvlTalkative, "checking contents of '%s'", printStorePath(i));
std::unique_ptr<AbstractHashSink> hashSink;
if (!info->ca || !info->references.count(info->path))
hashSink = std::make_unique<HashSink>(info->narHash.type);
else
hashSink = std::make_unique<HashModuloSink>(info->narHash.type, std::string(info->path.hashPart()));
auto hashSink = HashSink(info->narHash.type);
dumpPath(Store::toRealPath(i), *hashSink);
auto current = hashSink->finish();
dumpPath(Store::toRealPath(i), hashSink);
auto current = hashSink.finish();
if (info->narHash != nullHash && info->narHash != current.first) {
printError("path '%s' was modified! expected hash '%s', got '%s'",
@ -1672,19 +1783,97 @@ void LocalStore::createUser(const std::string & userName, uid_t userId)
}
}
std::optional<const Realisation> LocalStore::queryRealisation(
const DrvOutput& id) {
typedef std::optional<const Realisation> Ret;
return retrySQLite<Ret>([&]() -> Ret {
std::optional<std::pair<int64_t, Realisation>> LocalStore::queryRealisationCore_(
LocalStore::State & state,
const DrvOutput & id)
{
auto useQueryRealisedOutput(
state.stmts->QueryRealisedOutput.use()
(id.strHash())
(id.outputName));
if (!useQueryRealisedOutput.next())
return std::nullopt;
auto realisationDbId = useQueryRealisedOutput.getInt(0);
auto outputPath = parseStorePath(useQueryRealisedOutput.getStr(1));
auto signatures =
tokenizeString<StringSet>(useQueryRealisedOutput.getStr(2));
return {{
realisationDbId,
Realisation{
.id = id,
.outPath = outputPath,
.signatures = signatures,
}
}};
}
std::optional<const Realisation> LocalStore::queryRealisation_(
LocalStore::State & state,
const DrvOutput & id)
{
auto maybeCore = queryRealisationCore_(state, id);
if (!maybeCore)
return std::nullopt;
auto [realisationDbId, res] = *maybeCore;
std::map<DrvOutput, StorePath> dependentRealisations;
auto useRealisationRefs(
state.stmts->QueryRealisationReferences.use()
(realisationDbId));
while (useRealisationRefs.next()) {
auto depId = DrvOutput {
Hash::parseAnyPrefixed(useRealisationRefs.getStr(0)),
useRealisationRefs.getStr(1),
};
auto dependentRealisation = queryRealisationCore_(state, depId);
assert(dependentRealisation); // Enforced by the db schema
auto outputPath = dependentRealisation->second.outPath;
dependentRealisations.insert({depId, outputPath});
}
res.dependentRealisations = dependentRealisations;
return { res };
}
std::optional<const Realisation>
LocalStore::queryRealisation(const DrvOutput & id)
{
return retrySQLite<std::optional<const Realisation>>([&]() {
auto state(_state.lock());
auto use(state->stmts->QueryRealisedOutput.use()(id.strHash())(
id.outputName));
if (!use.next())
return std::nullopt;
auto outputPath = parseStorePath(use.getStr(0));
auto signatures = tokenizeString<StringSet>(use.getStr(1));
return Ret{Realisation{
.id = id, .outPath = outputPath, .signatures = signatures}};
return queryRealisation_(*state, id);
});
}
FixedOutputHash LocalStore::hashCAPath(
const FileIngestionMethod & method, const HashType & hashType,
const StorePath & path)
{
return hashCAPath(method, hashType, Store::toRealPath(path), path.hashPart());
}
FixedOutputHash LocalStore::hashCAPath(
const FileIngestionMethod & method,
const HashType & hashType,
const Path & path,
const std::string_view pathHash
)
{
HashModuloSink caSink ( hashType, std::string(pathHash) );
switch (method) {
case FileIngestionMethod::Recursive:
dumpPath(path, caSink);
break;
case FileIngestionMethod::Flat:
readFile(path, caSink);
break;
}
auto hash = caSink.finish().first;
return FixedOutputHash{
.method = method,
.hash = hash,
};
}
} // namespace nix

View file

@ -83,9 +83,6 @@ private:
public:
PathSetting realStoreDir_;
const Path realStoreDir;
const Path dbDir;
const Path linksDir;
const Path reservedPath;
@ -206,6 +203,8 @@ public:
void registerDrvOutput(const Realisation & info, CheckSigsFlag checkSigs) override;
void cacheDrvOutputMapping(State & state, const uint64_t deriver, const string & outputName, const StorePath & output);
std::optional<const Realisation> queryRealisation_(State & state, const DrvOutput & id);
std::optional<std::pair<int64_t, Realisation>> queryRealisationCore_(State & state, const DrvOutput & id);
std::optional<const Realisation> queryRealisation(const DrvOutput&) override;
private:
@ -279,10 +278,21 @@ private:
void signPathInfo(ValidPathInfo & info);
void signRealisation(Realisation &);
Path getRealStoreDir() override { return realStoreDir; }
void createUser(const std::string & userName, uid_t userId) override;
// XXX: Make a generic `Store` method
FixedOutputHash hashCAPath(
const FileIngestionMethod & method,
const HashType & hashType,
const StorePath & path);
FixedOutputHash hashCAPath(
const FileIngestionMethod & method,
const HashType & hashType,
const Path & path,
const std::string_view pathHash
);
friend struct LocalDerivationGoal;
friend struct PathSubstitutionGoal;
friend struct SubstitutionGoal;

View file

@ -8,12 +8,12 @@ libstore_SOURCES := $(wildcard $(d)/*.cc $(d)/builtins/*.cc $(d)/build/*.cc)
libstore_LIBS = libutil
libstore_LDFLAGS = $(SQLITE3_LIBS) -lbz2 $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
ifneq ($(OS), FreeBSD)
libstore_LDFLAGS += $(SQLITE3_LIBS) $(LIBCURL_LIBS) $(SODIUM_LIBS) -pthread
ifdef HOST_LINUX
libstore_LDFLAGS += -ldl
endif
ifeq ($(OS), Darwin)
ifdef HOST_DARWIN
libstore_FILES = sandbox-defaults.sb sandbox-minimal.sb sandbox-network.sb
endif
@ -23,7 +23,7 @@ ifeq ($(ENABLE_S3), 1)
libstore_LDFLAGS += -laws-cpp-sdk-transfer -laws-cpp-sdk-s3 -laws-cpp-sdk-core
endif
ifeq ($(OS), SunOS)
ifdef HOST_SOLARIS
libstore_LDFLAGS += -lsocket
endif
@ -60,7 +60,7 @@ $(d)/build.cc:
clean-files += $(d)/schema.sql.gen.hh $(d)/ca-specific-schema.sql.gen.hh
$(eval $(call install-file-in, $(d)/nix-store.pc, $(prefix)/lib/pkgconfig, 0644))
$(eval $(call install-file-in, $(d)/nix-store.pc, $(libdir)/pkgconfig, 0644))
$(foreach i, $(wildcard src/libstore/builtins/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/builtins, 0644)))

View file

@ -16,13 +16,18 @@ Machine::Machine(decltype(storeUri) storeUri,
decltype(mandatoryFeatures) mandatoryFeatures,
decltype(sshPublicHostKey) sshPublicHostKey) :
storeUri(
// Backwards compatibility: if the URI is a hostname,
// prepend ssh://.
// Backwards compatibility: if the URI is schemeless, is not a path,
// and is not one of the special store connection words, prepend
// ssh://.
storeUri.find("://") != std::string::npos
|| hasPrefix(storeUri, "local")
|| hasPrefix(storeUri, "remote")
|| hasPrefix(storeUri, "auto")
|| hasPrefix(storeUri, "/")
|| storeUri.find("/") != std::string::npos
|| storeUri == "auto"
|| storeUri == "daemon"
|| storeUri == "local"
|| hasPrefix(storeUri, "auto?")
|| hasPrefix(storeUri, "daemon?")
|| hasPrefix(storeUri, "local?")
|| hasPrefix(storeUri, "?")
? storeUri
: "ssh://" + storeUri),
systemTypes(systemTypes),

View file

@ -6,98 +6,73 @@
#include "thread-pool.hh"
#include "topo-sort.hh"
#include "callback.hh"
#include "closure.hh"
namespace nix {
void Store::computeFSClosure(const StorePathSet & startPaths,
StorePathSet & paths_, bool flipDirection, bool includeOutputs, bool includeDerivers)
{
struct State
{
size_t pending;
StorePathSet & paths;
std::exception_ptr exc;
};
std::function<std::set<StorePath>(const StorePath & path, std::future<ref<const ValidPathInfo>> &)> queryDeps;
if (flipDirection)
queryDeps = [&](const StorePath& path,
std::future<ref<const ValidPathInfo>> & fut) {
StorePathSet res;
StorePathSet referrers;
queryReferrers(path, referrers);
for (auto& ref : referrers)
if (ref != path)
res.insert(ref);
Sync<State> state_(State{0, paths_, 0});
if (includeOutputs)
for (auto& i : queryValidDerivers(path))
res.insert(i);
std::function<void(const StorePath &)> enqueue;
if (includeDerivers && path.isDerivation())
for (auto& [_, maybeOutPath] : queryPartialDerivationOutputMap(path))
if (maybeOutPath && isValidPath(*maybeOutPath))
res.insert(*maybeOutPath);
return res;
};
else
queryDeps = [&](const StorePath& path,
std::future<ref<const ValidPathInfo>> & fut) {
StorePathSet res;
auto info = fut.get();
for (auto& ref : info->references)
if (ref != path)
res.insert(ref);
std::condition_variable done;
if (includeOutputs && path.isDerivation())
for (auto& [_, maybeOutPath] : queryPartialDerivationOutputMap(path))
if (maybeOutPath && isValidPath(*maybeOutPath))
res.insert(*maybeOutPath);
enqueue = [&](const StorePath & path) -> void {
{
auto state(state_.lock());
if (state->exc) return;
if (!state->paths.insert(path).second) return;
state->pending++;
}
if (includeDerivers && info->deriver && isValidPath(*info->deriver))
res.insert(*info->deriver);
return res;
};
queryPathInfo(path, {[&](std::future<ref<const ValidPathInfo>> fut) {
// FIXME: calls to isValidPath() should be async
try {
auto info = fut.get();
if (flipDirection) {
StorePathSet referrers;
queryReferrers(path, referrers);
for (auto & ref : referrers)
if (ref != path)
enqueue(ref);
if (includeOutputs)
for (auto & i : queryValidDerivers(path))
enqueue(i);
if (includeDerivers && path.isDerivation())
for (auto & i : queryDerivationOutputs(path))
if (isValidPath(i) && queryPathInfo(i)->deriver == path)
enqueue(i);
} else {
for (auto & ref : info->references)
if (ref != path)
enqueue(ref);
if (includeOutputs && path.isDerivation())
for (auto & i : queryDerivationOutputs(path))
if (isValidPath(i)) enqueue(i);
if (includeDerivers && info->deriver && isValidPath(*info->deriver))
enqueue(*info->deriver);
}
{
auto state(state_.lock());
assert(state->pending);
if (!--state->pending) done.notify_one();
}
} catch (...) {
auto state(state_.lock());
if (!state->exc) state->exc = std::current_exception();
assert(state->pending);
if (!--state->pending) done.notify_one();
};
}});
};
for (auto & startPath : startPaths)
enqueue(startPath);
{
auto state(state_.lock());
while (state->pending) state.wait(done);
if (state->exc) std::rethrow_exception(state->exc);
}
computeClosure<StorePath>(
startPaths, paths_,
[&](const StorePath& path,
std::function<void(std::promise<std::set<StorePath>>&)>
processEdges) {
std::promise<std::set<StorePath>> promise;
std::function<void(std::future<ref<const ValidPathInfo>>)>
getDependencies =
[&](std::future<ref<const ValidPathInfo>> fut) {
try {
promise.set_value(queryDeps(path, fut));
} catch (...) {
promise.set_exception(std::current_exception());
}
};
queryPathInfo(path, getDependencies);
processEdges(promise);
});
}
void Store::computeFSClosure(const StorePath & startPath,
StorePathSet & paths_, bool flipDirection, bool includeOutputs, bool includeDerivers)
{
@ -279,5 +254,44 @@ StorePaths Store::topoSortPaths(const StorePathSet & paths)
}});
}
std::map<DrvOutput, StorePath> drvOutputReferences(
const std::set<Realisation> & inputRealisations,
const StorePathSet & pathReferences)
{
std::map<DrvOutput, StorePath> res;
for (const auto & input : inputRealisations) {
if (pathReferences.count(input.outPath)) {
res.insert({input.id, input.outPath});
}
}
return res;
}
std::map<DrvOutput, StorePath> drvOutputReferences(
Store & store,
const Derivation & drv,
const StorePath & outputPath)
{
std::set<Realisation> inputRealisations;
for (const auto& [inputDrv, outputNames] : drv.inputDrvs) {
auto outputHashes =
staticOutputHashes(store, store.readDerivation(inputDrv));
for (const auto& outputName : outputNames) {
auto thisRealisation = store.queryRealisation(
DrvOutput{outputHashes.at(outputName), outputName});
if (!thisRealisation)
throw Error(
"output '%s' of derivation '%s' isnt built", outputName,
store.printStorePath(inputDrv));
inputRealisations.insert(*thisRealisation);
}
}
auto info = store.queryPathInfo(outputPath);
return drvOutputReferences(Realisation::closure(store, inputRealisations), info->references);
}
}

View file

@ -4,6 +4,7 @@
#include "globals.hh"
#include <sqlite3.h>
#include <nlohmann/json.hpp>
namespace nix {
@ -38,6 +39,15 @@ create table if not exists NARs (
foreign key (cache) references BinaryCaches(id) on delete cascade
);
create table if not exists Realisations (
cache integer not null,
outputId text not null,
content blob, -- Json serialisation of the realisation, or null if the realisation is absent
timestamp integer not null,
primary key (cache, outputId),
foreign key (cache) references BinaryCaches(id) on delete cascade
);
create table if not exists LastPurge (
dummy text primary key,
value integer
@ -63,7 +73,9 @@ public:
struct State
{
SQLite db;
SQLiteStmt insertCache, queryCache, insertNAR, insertMissingNAR, queryNAR, purgeCache;
SQLiteStmt insertCache, queryCache, insertNAR, insertMissingNAR,
queryNAR, insertRealisation, insertMissingRealisation,
queryRealisation, purgeCache;
std::map<std::string, Cache> caches;
};
@ -98,6 +110,26 @@ public:
state->queryNAR.create(state->db,
"select present, namePart, url, compression, fileHash, fileSize, narHash, narSize, refs, deriver, sigs, ca from NARs where cache = ? and hashPart = ? and ((present = 0 and timestamp > ?) or (present = 1 and timestamp > ?))");
state->insertRealisation.create(state->db,
R"(
insert or replace into Realisations(cache, outputId, content, timestamp)
values (?, ?, ?, ?)
)");
state->insertMissingRealisation.create(state->db,
R"(
insert or replace into Realisations(cache, outputId, timestamp)
values (?, ?, ?)
)");
state->queryRealisation.create(state->db,
R"(
select content from Realisations
where cache = ? and outputId = ? and
((content is null and timestamp > ?) or
(content is not null and timestamp > ?))
)");
/* Periodically purge expired entries from the database. */
retrySQLite<void>([&]() {
auto now = time(0);
@ -212,6 +244,38 @@ public:
});
}
std::pair<Outcome, std::shared_ptr<Realisation>> lookupRealisation(
const std::string & uri, const DrvOutput & id) override
{
return retrySQLite<std::pair<Outcome, std::shared_ptr<Realisation>>>(
[&]() -> std::pair<Outcome, std::shared_ptr<Realisation>> {
auto state(_state.lock());
auto & cache(getCache(*state, uri));
auto now = time(0);
auto queryRealisation(state->queryRealisation.use()
(cache.id)
(id.to_string())
(now - settings.ttlNegativeNarInfoCache)
(now - settings.ttlPositiveNarInfoCache));
if (!queryRealisation.next())
return {oUnknown, 0};
if (queryRealisation.isNull(0))
return {oInvalid, 0};
auto realisation =
std::make_shared<Realisation>(Realisation::fromJSON(
nlohmann::json::parse(queryRealisation.getStr(0)),
"Local disk cache"));
return {oValid, realisation};
});
}
void upsertNarInfo(
const std::string & uri, const std::string & hashPart,
std::shared_ptr<const ValidPathInfo> info) override
@ -251,6 +315,39 @@ public:
}
});
}
void upsertRealisation(
const std::string & uri,
const Realisation & realisation) override
{
retrySQLite<void>([&]() {
auto state(_state.lock());
auto & cache(getCache(*state, uri));
state->insertRealisation.use()
(cache.id)
(realisation.id.to_string())
(realisation.toJSON().dump())
(time(0)).exec();
});
}
virtual void upsertAbsentRealisation(
const std::string & uri,
const DrvOutput & id) override
{
retrySQLite<void>([&]() {
auto state(_state.lock());
auto & cache(getCache(*state, uri));
state->insertMissingRealisation.use()
(cache.id)
(id.to_string())
(time(0)).exec();
});
}
};
ref<NarInfoDiskCache> getNarInfoDiskCache()

View file

@ -2,6 +2,7 @@
#include "ref.hh"
#include "nar-info.hh"
#include "realisation.hh"
namespace nix {
@ -29,6 +30,15 @@ public:
virtual void upsertNarInfo(
const std::string & uri, const std::string & hashPart,
std::shared_ptr<const ValidPathInfo> info) = 0;
virtual void upsertRealisation(
const std::string & uri,
const Realisation & realisation) = 0;
virtual void upsertAbsentRealisation(
const std::string & uri,
const DrvOutput & id) = 0;
virtual std::pair<Outcome, std::shared_ptr<Realisation>> lookupRealisation(
const std::string & uri, const DrvOutput & id) = 0;
};
/* Return a singleton cache object that can be used concurrently by

View file

@ -198,7 +198,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
/* Make the containing directory writable, but only if it's not
the store itself (we don't want or need to mess with its
permissions). */
bool mustToggle = dirOf(path) != realStoreDir;
bool mustToggle = dirOf(path) != realStoreDir.get();
if (mustToggle) makeWritable(dirOf(path));
/* When we're done, make the directory read-only again and reset

View file

@ -1,6 +1,8 @@
#include "parsed-derivations.hh"
#include <nlohmann/json.hpp>
#include <regex>
#include "json.hh"
namespace nix {
@ -91,6 +93,8 @@ StringSet ParsedDerivation::getRequiredSystemFeatures() const
StringSet res;
for (auto & i : getStringsAttr("requiredSystemFeatures").value_or(Strings()))
res.insert(i);
if (!derivationHasKnownOutputPaths(drv.type()))
res.insert("ca-derivations");
return res;
}
@ -121,4 +125,107 @@ bool ParsedDerivation::substitutesAllowed() const
return getBoolAttr("allowSubstitutes", true);
}
static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*");
std::optional<nlohmann::json> ParsedDerivation::prepareStructuredAttrs(Store & store, const StorePathSet & inputPaths)
{
auto structuredAttrs = getStructuredAttrs();
if (!structuredAttrs) return std::nullopt;
auto json = *structuredAttrs;
/* Add an "outputs" object containing the output paths. */
nlohmann::json outputs;
for (auto & i : drv.outputs)
outputs[i.first] = hashPlaceholder(i.first);
json["outputs"] = outputs;
/* Handle exportReferencesGraph. */
auto e = json.find("exportReferencesGraph");
if (e != json.end() && e->is_object()) {
for (auto i = e->begin(); i != e->end(); ++i) {
std::ostringstream str;
{
JSONPlaceholder jsonRoot(str, true);
StorePathSet storePaths;
for (auto & p : *i)
storePaths.insert(store.parseStorePath(p.get<std::string>()));
store.pathInfoToJSON(jsonRoot,
store.exportReferences(storePaths, inputPaths), false, true);
}
json[i.key()] = nlohmann::json::parse(str.str()); // urgh
}
}
return json;
}
/* As a convenience to bash scripts, write a shell file that
maps all attributes that are representable in bash -
namely, strings, integers, nulls, Booleans, and arrays and
objects consisting entirely of those values. (So nested
arrays or objects are not supported.) */
std::string writeStructuredAttrsShell(const nlohmann::json & json)
{
auto handleSimpleType = [](const nlohmann::json & value) -> std::optional<std::string> {
if (value.is_string())
return shellEscape(value);
if (value.is_number()) {
auto f = value.get<float>();
if (std::ceil(f) == f)
return std::to_string(value.get<int>());
}
if (value.is_null())
return std::string("''");
if (value.is_boolean())
return value.get<bool>() ? std::string("1") : std::string("");
return {};
};
std::string jsonSh;
for (auto & [key, value] : json.items()) {
if (!std::regex_match(key, shVarName)) continue;
auto s = handleSimpleType(value);
if (s)
jsonSh += fmt("declare %s=%s\n", key, *s);
else if (value.is_array()) {
std::string s2;
bool good = true;
for (auto & value2 : value) {
auto s3 = handleSimpleType(value2);
if (!s3) { good = false; break; }
s2 += *s3; s2 += ' ';
}
if (good)
jsonSh += fmt("declare -a %s=(%s)\n", key, s2);
}
else if (value.is_object()) {
std::string s2;
bool good = true;
for (auto & [key2, value2] : value.items()) {
auto s3 = handleSimpleType(value2);
if (!s3) { good = false; break; }
s2 += fmt("[%s]=%s ", shellEscape(key2), *s3);
}
if (good)
jsonSh += fmt("declare -A %s=(%s)\n", key, s2);
}
}
return jsonSh;
}
}

View file

@ -36,6 +36,10 @@ public:
bool willBuildLocally(Store & localStore) const;
bool substitutesAllowed() const;
std::optional<nlohmann::json> prepareStructuredAttrs(Store & store, const StorePathSet & inputPaths);
};
std::string writeStructuredAttrsShell(const nlohmann::json & json);
}

46
src/libstore/path-info.cc Normal file
View file

@ -0,0 +1,46 @@
#include "path-info.hh"
#include "worker-protocol.hh"
namespace nix {
ValidPathInfo ValidPathInfo::read(Source & source, const Store & store, unsigned int format)
{
return read(source, store, format, store.parseStorePath(readString(source)));
}
ValidPathInfo ValidPathInfo::read(Source & source, const Store & store, unsigned int format, StorePath && path)
{
auto deriver = readString(source);
auto narHash = Hash::parseAny(readString(source), htSHA256);
ValidPathInfo info(path, narHash);
if (deriver != "") info.deriver = store.parseStorePath(deriver);
info.references = worker_proto::read(store, source, Phantom<StorePathSet> {});
source >> info.registrationTime >> info.narSize;
if (format >= 16) {
source >> info.ultimate;
info.sigs = readStrings<StringSet>(source);
info.ca = parseContentAddressOpt(readString(source));
}
return info;
}
void ValidPathInfo::write(
Sink & sink,
const Store & store,
unsigned int format,
bool includePath) const
{
if (includePath)
sink << store.printStorePath(path);
sink << (deriver ? store.printStorePath(*deriver) : "")
<< narHash.to_string(Base16, false);
worker_proto::write(store, sink, references);
sink << registrationTime << narSize;
if (format >= 16) {
sink << ultimate
<< sigs
<< renderContentAddress(ca);
}
}
}

View file

@ -105,6 +105,11 @@ struct ValidPathInfo
ValidPathInfo(const StorePath & path, Hash narHash) : path(path), narHash(narHash) { };
virtual ~ValidPathInfo() { }
static ValidPathInfo read(Source & source, const Store & store, unsigned int format);
static ValidPathInfo read(Source & source, const Store & store, unsigned int format, StorePath && path);
void write(Sink & sink, const Store & store, unsigned int format, bool includePath = true) const;
};
typedef std::map<StorePath, ValidPathInfo> ValidPathInfos;

View file

@ -126,9 +126,9 @@ void deleteGeneration(const Path & profile, GenerationNumber gen)
static void deleteGeneration2(const Path & profile, GenerationNumber gen, bool dryRun)
{
if (dryRun)
printInfo(format("would remove generation %1%") % gen);
notice("would remove profile version %1%", gen);
else {
printInfo(format("removing generation %1%") % gen);
notice("removing profile version %1%", gen);
deleteGeneration(profile, gen);
}
}
@ -142,7 +142,7 @@ void deleteGenerations(const Path & profile, const std::set<GenerationNumber> &
auto [gens, curGen] = findGenerations(profile);
if (gensToDelete.count(*curGen))
throw Error("cannot delete current generation of profile %1%'", profile);
throw Error("cannot delete current version of profile %1%'", profile);
for (auto & i : gens) {
if (!gensToDelete.count(i.number)) continue;
@ -211,12 +211,15 @@ void deleteGenerationsOlderThan(const Path & profile, time_t t, bool dryRun)
void deleteGenerationsOlderThan(const Path & profile, const string & timeSpec, bool dryRun)
{
if (timeSpec.empty() || timeSpec[timeSpec.size() - 1] != 'd')
throw UsageError("invalid number of days specifier '%1%', expected something like '14d'", timeSpec);
time_t curTime = time(0);
string strDays = string(timeSpec, 0, timeSpec.size() - 1);
auto days = string2Int<int>(strDays);
if (!days || *days < 1)
throw Error("invalid number of days specifier '%1%'", timeSpec);
throw UsageError("invalid number of days specifier '%1%'", timeSpec);
time_t oldTime = curTime - *days * 24 * 3600;
@ -233,6 +236,37 @@ void switchLink(Path link, Path target)
}
void switchGeneration(
const Path & profile,
std::optional<GenerationNumber> dstGen,
bool dryRun)
{
PathLocks lock;
lockProfile(lock, profile);
auto [gens, curGen] = findGenerations(profile);
std::optional<Generation> dst;
for (auto & i : gens)
if ((!dstGen && i.number < curGen) ||
(dstGen && i.number == *dstGen))
dst = i;
if (!dst) {
if (dstGen)
throw Error("profile version %1% does not exist", *dstGen);
else
throw Error("no profile version older than the current (%1%) exists", curGen.value_or(0));
}
notice("switching profile from version %d to %d", curGen.value_or(0), dst->number);
if (dryRun) return;
switchLink(profile, dst->path);
}
void lockProfile(PathLocks & lock, const Path & profile)
{
lock.lockPaths({profile}, (format("waiting for lock on profile '%1%'") % profile).str());

View file

@ -11,7 +11,7 @@ namespace nix {
class StorePath;
typedef unsigned int GenerationNumber;
typedef uint64_t GenerationNumber;
struct Generation
{
@ -46,6 +46,13 @@ void deleteGenerationsOlderThan(const Path & profile, const string & timeSpec, b
void switchLink(Path link, Path target);
/* Roll back a profile to the specified generation, or to the most
recent one older than the current. */
void switchGeneration(
const Path & profile,
std::optional<GenerationNumber> dstGen,
bool dryRun);
/* Ensure exclusive access to a profile. Any command that modifies
the profile first acquires this lock. */
void lockProfile(PathLocks & lock, const Path & profile);

View file

@ -1,5 +1,6 @@
#include "realisation.hh"
#include "store-api.hh"
#include "closure.hh"
#include <nlohmann/json.hpp>
namespace nix {
@ -21,11 +22,52 @@ std::string DrvOutput::to_string() const {
return strHash() + "!" + outputName;
}
std::set<Realisation> Realisation::closure(Store & store, const std::set<Realisation> & startOutputs)
{
std::set<Realisation> res;
Realisation::closure(store, startOutputs, res);
return res;
}
void Realisation::closure(Store & store, const std::set<Realisation> & startOutputs, std::set<Realisation> & res)
{
auto getDeps = [&](const Realisation& current) -> std::set<Realisation> {
std::set<Realisation> res;
for (auto& [currentDep, _] : current.dependentRealisations) {
if (auto currentRealisation = store.queryRealisation(currentDep))
res.insert(*currentRealisation);
else
throw Error(
"Unrealised derivation '%s'", currentDep.to_string());
}
return res;
};
computeClosure<Realisation>(
startOutputs, res,
[&](const Realisation& current,
std::function<void(std::promise<std::set<Realisation>>&)>
processEdges) {
std::promise<std::set<Realisation>> promise;
try {
auto res = getDeps(current);
promise.set_value(res);
} catch (...) {
promise.set_exception(std::current_exception());
}
return processEdges(promise);
});
}
nlohmann::json Realisation::toJSON() const {
auto jsonDependentRealisations = nlohmann::json::object();
for (auto & [depId, depOutPath] : dependentRealisations)
jsonDependentRealisations.emplace(depId.to_string(), depOutPath.to_string());
return nlohmann::json{
{"id", id.to_string()},
{"outPath", outPath.to_string()},
{"signatures", signatures},
{"dependentRealisations", jsonDependentRealisations},
};
}
@ -51,10 +93,16 @@ Realisation Realisation::fromJSON(
if (auto signaturesIterator = json.find("signatures"); signaturesIterator != json.end())
signatures.insert(signaturesIterator->begin(), signaturesIterator->end());
std::map <DrvOutput, StorePath> dependentRealisations;
if (auto jsonDependencies = json.find("dependentRealisations"); jsonDependencies != json.end())
for (auto & [jsonDepId, jsonDepOutPath] : jsonDependencies->get<std::map<std::string, std::string>>())
dependentRealisations.insert({DrvOutput::parse(jsonDepId), StorePath(jsonDepOutPath)});
return Realisation{
.id = DrvOutput::parse(getField("id")),
.outPath = StorePath(getField("outPath")),
.signatures = signatures,
.dependentRealisations = dependentRealisations,
};
}
@ -92,6 +140,24 @@ StorePath RealisedPath::path() const {
return std::visit([](auto && arg) { return arg.getPath(); }, raw);
}
bool Realisation::isCompatibleWith(const Realisation & other) const
{
assert (id == other.id);
if (outPath == other.outPath) {
if (dependentRealisations.empty() != other.dependentRealisations.empty()) {
warn(
"Encountered a realisation for '%s' with an empty set of "
"dependencies. This is likely an artifact from an older Nix. "
"Ill try to fix the realisation if I can",
id.to_string());
return true;
} else if (dependentRealisations == other.dependentRealisations) {
return true;
}
}
return false;
}
void RealisedPath::closure(
Store& store,
const RealisedPath::Set& startPaths,

View file

@ -28,6 +28,14 @@ struct Realisation {
StringSet signatures;
/**
* The realisations that are required for the current one to be valid.
*
* When importing this realisation, the store will first check that all its
* dependencies exist, and map to the correct output path
*/
std::map<DrvOutput, StorePath> dependentRealisations;
nlohmann::json toJSON() const;
static Realisation fromJSON(const nlohmann::json& json, const std::string& whence);
@ -36,6 +44,11 @@ struct Realisation {
bool checkSignature(const PublicKeys & publicKeys, const std::string & sig) const;
size_t checkSignatures(const PublicKeys & publicKeys) const;
static std::set<Realisation> closure(Store &, const std::set<Realisation> &);
static void closure(Store &, const std::set<Realisation> &, std::set<Realisation> & res);
bool isCompatibleWith(const Realisation & other) const;
StorePath getPath() const { return outPath; }
GENERATE_CMP(Realisation, me->id, me->outPath);

View file

@ -5,6 +5,7 @@
#include <map>
#include <cstdlib>
#include <mutex>
namespace nix {
@ -16,14 +17,13 @@ static unsigned int refLength = 32; /* characters */
static void search(const unsigned char * s, size_t len,
StringSet & hashes, StringSet & seen)
{
static bool initialised = false;
static std::once_flag initialised;
static bool isBase32[256];
if (!initialised) {
std::call_once(initialised, [](){
for (unsigned int i = 0; i < 256; ++i) isBase32[i] = false;
for (unsigned int i = 0; i < base32Chars.size(); ++i)
isBase32[(unsigned char) base32Chars[i]] = true;
initialised = true;
}
});
for (size_t i = 0; i + refLength <= len; ) {
int j;

View file

@ -222,6 +222,7 @@ void RemoteStore::setOptions(Connection & conn)
overrides.erase(settings.buildCores.name);
overrides.erase(settings.useSubstitutes.name);
overrides.erase(loggerSettings.showTrace.name);
overrides.erase(settings.experimentalFeatures.name);
conn.to << overrides.size();
for (auto & i : overrides)
conn.to << i.first << i.second.value;
@ -386,23 +387,6 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S
}
ref<const ValidPathInfo> RemoteStore::readValidPathInfo(ConnectionHandle & conn, const StorePath & path)
{
auto deriver = readString(conn->from);
auto narHash = Hash::parseAny(readString(conn->from), htSHA256);
auto info = make_ref<ValidPathInfo>(path, narHash);
if (deriver != "") info->deriver = parseStorePath(deriver);
info->references = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
conn->from >> info->registrationTime >> info->narSize;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
conn->from >> info->ultimate;
info->sigs = readStrings<StringSet>(conn->from);
info->ca = parseContentAddressOpt(readString(conn->from));
}
return info;
}
void RemoteStore::queryPathInfoUncached(const StorePath & path,
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
{
@ -423,7 +407,8 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
bool valid; conn->from >> valid;
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
}
info = readValidPathInfo(conn, path);
info = std::make_shared<ValidPathInfo>(
ValidPathInfo::read(conn->from, *this, GET_PROTOCOL_MINOR(conn->daemonVersion), StorePath{path}));
}
callback(std::move(info));
} catch (...) { callback.rethrow(); }
@ -525,8 +510,8 @@ ref<const ValidPathInfo> RemoteStore::addCAToStore(
});
}
auto path = parseStorePath(readString(conn->from));
return readValidPathInfo(conn, path);
return make_ref<ValidPathInfo>(
ValidPathInfo::read(conn->from, *this, GET_PROTOCOL_MINOR(conn->daemonVersion)));
}
else {
if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25");
@ -642,6 +627,25 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
}
void RemoteStore::addMultipleToStore(
Source & source,
RepairFlag repair,
CheckSigsFlag checkSigs)
{
if (GET_PROTOCOL_MINOR(getConnection()->daemonVersion) >= 32) {
auto conn(getConnection());
conn->to
<< wopAddMultipleToStore
<< repair
<< !checkSigs;
conn.withFramedSink([&](Sink & sink) {
source.drainInto(sink);
});
} else
Store::addMultipleToStore(source, repair, checkSigs);
}
StorePath RemoteStore::addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair)
{
@ -653,8 +657,12 @@ void RemoteStore::registerDrvOutput(const Realisation & info)
{
auto conn(getConnection());
conn->to << wopRegisterDrvOutput;
conn->to << info.id.to_string();
conn->to << std::string(info.outPath.to_string());
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 31) {
conn->to << info.id.to_string();
conn->to << std::string(info.outPath.to_string());
} else {
worker_proto::write(*this, conn->to, info);
}
conn.processStderr();
}
@ -664,15 +672,22 @@ std::optional<const Realisation> RemoteStore::queryRealisation(const DrvOutput &
conn->to << wopQueryRealisation;
conn->to << id.to_string();
conn.processStderr();
auto outPaths = worker_proto::read(*this, conn->from, Phantom<std::set<StorePath>>{});
if (outPaths.empty())
return std::nullopt;
return {Realisation{.id = id, .outPath = *outPaths.begin()}};
if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 31) {
auto outPaths = worker_proto::read(*this, conn->from, Phantom<std::set<StorePath>>{});
if (outPaths.empty())
return std::nullopt;
return {Realisation{.id = id, .outPath = *outPaths.begin()}};
} else {
auto realisations = worker_proto::read(*this, conn->from, Phantom<std::set<Realisation>>{});
if (realisations.empty())
return std::nullopt;
return *realisations.begin();
}
}
static void writeDerivedPaths(RemoteStore & store, ConnectionHandle & conn, const std::vector<DerivedPath> & reqs)
{
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 29) {
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 30) {
worker_proto::write(store, conn->to, reqs);
} else {
Strings ss;
@ -694,8 +709,18 @@ static void writeDerivedPaths(RemoteStore & store, ConnectionHandle & conn, cons
}
}
void RemoteStore::buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode)
void RemoteStore::buildPaths(const std::vector<DerivedPath> & drvPaths, BuildMode buildMode, std::shared_ptr<Store> evalStore)
{
if (evalStore && evalStore.get() != this) {
/* The remote doesn't have a way to access evalStore, so copy
the .drvs. */
RealisedPath::Set drvPaths2;
for (auto & i : drvPaths)
if (auto p = std::get_if<DerivedPath::Built>(&i))
drvPaths2.insert(p->drvPath);
copyClosure(*evalStore, *this, drvPaths2);
}
auto conn(getConnection());
conn->to << wopBuildPaths;
assert(GET_PROTOCOL_MINOR(conn->daemonVersion) >= 13);
@ -990,14 +1015,14 @@ std::exception_ptr RemoteStore::Connection::processStderr(Sink * sink, Source *
return nullptr;
}
void ConnectionHandle::withFramedSink(std::function<void(Sink &sink)> fun)
void ConnectionHandle::withFramedSink(std::function<void(Sink & sink)> fun)
{
(*this)->to.flush();
std::exception_ptr ex;
/* Handle log messages / exceptions from the remote on a
separate thread. */
/* Handle log messages / exceptions from the remote on a separate
thread. */
std::thread stderrThread([&]()
{
try {
@ -1030,7 +1055,6 @@ void ConnectionHandle::withFramedSink(std::function<void(Sink &sink)> fun)
stderrThread.join();
if (ex)
std::rethrow_exception(ex);
}
}

View file

@ -78,6 +78,11 @@ public:
void addToStore(const ValidPathInfo & info, Source & nar,
RepairFlag repair, CheckSigsFlag checkSigs) override;
void addMultipleToStore(
Source & source,
RepairFlag repair,
CheckSigsFlag checkSigs) override;
StorePath addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) override;
@ -85,7 +90,7 @@ public:
std::optional<const Realisation> queryRealisation(const DrvOutput &) override;
void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode) override;
void buildPaths(const std::vector<DerivedPath> & paths, BuildMode buildMode, std::shared_ptr<Store> evalStore) override;
BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode) override;
@ -151,8 +156,6 @@ protected:
virtual void narFromPath(const StorePath & path, Sink & sink) override;
ref<const ValidPathInfo> readValidPathInfo(ConnectionHandle & conn, const StorePath & path);
private:
std::atomic_bool failed{false};

View file

@ -32,7 +32,9 @@
(literal "/tmp") (subpath TMPDIR))
; Some packages like to read the system version.
(allow file-read* (literal "/System/Library/CoreServices/SystemVersion.plist"))
(allow file-read*
(literal "/System/Library/CoreServices/SystemVersion.plist")
(literal "/System/Library/CoreServices/SystemVersionCompat.plist"))
; Without this line clang cannot write to /dev/null, breaking some configure tests.
(allow file-read-metadata (literal "/dev"))
@ -95,3 +97,7 @@
; This is used by /bin/sh on macOS 10.15 and later.
(allow file*
(literal "/private/var/select/sh"))
; Allow Rosetta 2 to run x86_64 binaries on aarch64-darwin.
(allow file-read*
(subpath "/Library/Apple/usr/libexec/oah"))

View file

@ -9,6 +9,7 @@
#include "url.hh"
#include "archive.hh"
#include "callback.hh"
#include "remote-store.hh"
#include <regex>
@ -249,6 +250,20 @@ StorePath Store::addToStore(const string & name, const Path & _srcPath,
}
void Store::addMultipleToStore(
Source & source,
RepairFlag repair,
CheckSigsFlag checkSigs)
{
auto expected = readNum<uint64_t>(source);
for (uint64_t i = 0; i < expected; ++i) {
auto info = ValidPathInfo::read(source, *this, 16);
info.ultimate = false;
addToStore(info, source, repair, checkSigs);
}
}
/*
The aim of this function is to compute in one pass the correct ValidPathInfo for
the files that we are trying to add to the store. To accomplish that in one
@ -337,6 +352,13 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
return info;
}
StringSet StoreConfig::getDefaultSystemFeatures()
{
auto res = settings.systemFeatures.get();
if (settings.isExperimentalFeatureEnabled("ca-derivations"))
res.insert("ca-derivations");
return res;
}
Store::Store(const Params & params)
: StoreConfig(params)
@ -627,6 +649,42 @@ string Store::makeValidityRegistration(const StorePathSet & paths,
}
StorePathSet Store::exportReferences(const StorePathSet & storePaths, const StorePathSet & inputPaths)
{
StorePathSet paths;
for (auto & storePath : storePaths) {
if (!inputPaths.count(storePath))
throw BuildError("cannot export references of path '%s' because it is not in the input closure of the derivation", printStorePath(storePath));
computeFSClosure({storePath}, paths);
}
/* If there are derivations in the graph, then include their
outputs as well. This is useful if you want to do things
like passing all build-time dependencies of some path to a
derivation that builds a NixOS DVD image. */
auto paths2 = paths;
for (auto & j : paths2) {
if (j.isDerivation()) {
Derivation drv = derivationFromPath(j);
for (auto & k : drv.outputsAndOptPaths(*this)) {
if (!k.second.second)
/* FIXME: I am confused why we are calling
`computeFSClosure` on the output path, rather than
derivation itself. That doesn't seem right to me, so I
won't try to implemented this for CA derivations. */
throw UnimplementedError("exportReferences on CA derivations is not yet implemented");
computeFSClosure(*k.second.second, paths);
}
}
}
return paths;
}
void Store::pathInfoToJSON(JSONPlaceholder & jsonOut, const StorePathSet & storePaths,
bool includeImpureInfo, bool showClosureSize,
Base hashBase,
@ -727,30 +785,43 @@ const Store::Stats & Store::getStats()
}
void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
const StorePath & storePath, RepairFlag repair, CheckSigsFlag checkSigs)
static std::string makeCopyPathMessage(
std::string_view srcUri,
std::string_view dstUri,
std::string_view storePath)
{
auto srcUri = srcStore->getUri();
auto dstUri = dstStore->getUri();
return srcUri == "local" || srcUri == "daemon"
? fmt("copying path '%s' to '%s'", storePath, dstUri)
: dstUri == "local" || dstUri == "daemon"
? fmt("copying path '%s' from '%s'", storePath, srcUri)
: fmt("copying path '%s' from '%s' to '%s'", storePath, srcUri, dstUri);
}
void copyStorePath(
Store & srcStore,
Store & dstStore,
const StorePath & storePath,
RepairFlag repair,
CheckSigsFlag checkSigs)
{
auto srcUri = srcStore.getUri();
auto dstUri = dstStore.getUri();
auto storePathS = srcStore.printStorePath(storePath);
Activity act(*logger, lvlInfo, actCopyPath,
srcUri == "local" || srcUri == "daemon"
? fmt("copying path '%s' to '%s'", srcStore->printStorePath(storePath), dstUri)
: dstUri == "local" || dstUri == "daemon"
? fmt("copying path '%s' from '%s'", srcStore->printStorePath(storePath), srcUri)
: fmt("copying path '%s' from '%s' to '%s'", srcStore->printStorePath(storePath), srcUri, dstUri),
{srcStore->printStorePath(storePath), srcUri, dstUri});
makeCopyPathMessage(srcUri, dstUri, storePathS),
{storePathS, srcUri, dstUri});
PushActivity pact(act.id);
auto info = srcStore->queryPathInfo(storePath);
auto info = srcStore.queryPathInfo(storePath);
uint64_t total = 0;
// recompute store path on the chance dstStore does it differently
if (info->ca && info->references.empty()) {
auto info2 = make_ref<ValidPathInfo>(*info);
info2->path = dstStore->makeFixedOutputPathFromCA(info->path.name(), *info->ca);
if (dstStore->storeDir == srcStore->storeDir)
info2->path = dstStore.makeFixedOutputPathFromCA(info->path.name(), *info->ca);
if (dstStore.storeDir == srcStore.storeDir)
assert(info->path == info2->path);
info = info2;
}
@ -767,32 +838,56 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
act.progress(total, info->narSize);
});
TeeSink tee { sink, progressSink };
srcStore->narFromPath(storePath, tee);
srcStore.narFromPath(storePath, tee);
}, [&]() {
throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", srcStore->printStorePath(storePath), srcStore->getUri());
throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", srcStore.printStorePath(storePath), srcStore.getUri());
});
dstStore->addToStore(*info, *source, repair, checkSigs);
dstStore.addToStore(*info, *source, repair, checkSigs);
}
std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStore, const RealisedPath::Set & paths,
RepairFlag repair, CheckSigsFlag checkSigs, SubstituteFlag substitute)
std::map<StorePath, StorePath> copyPaths(
Store & srcStore,
Store & dstStore,
const RealisedPath::Set & paths,
RepairFlag repair,
CheckSigsFlag checkSigs,
SubstituteFlag substitute)
{
StorePathSet storePaths;
std::set<Realisation> realisations;
std::set<Realisation> toplevelRealisations;
for (auto & path : paths) {
storePaths.insert(path.path());
if (auto realisation = std::get_if<Realisation>(&path.raw)) {
settings.requireExperimentalFeature("ca-derivations");
realisations.insert(*realisation);
toplevelRealisations.insert(*realisation);
}
}
auto pathsMap = copyPaths(srcStore, dstStore, storePaths, repair, checkSigs, substitute);
ThreadPool pool;
try {
for (auto & realisation : realisations) {
dstStore->registerDrvOutput(realisation, checkSigs);
}
// Copy the realisation closure
processGraph<Realisation>(
pool, Realisation::closure(srcStore, toplevelRealisations),
[&](const Realisation & current) -> std::set<Realisation> {
std::set<Realisation> children;
for (const auto & [drvOutput, _] : current.dependentRealisations) {
auto currentChild = srcStore.queryRealisation(drvOutput);
if (!currentChild)
throw Error(
"incomplete realisation closure: '%s' is a "
"dependency of '%s' but isn't registered",
drvOutput.to_string(), current.id.to_string());
children.insert(*currentChild);
}
return children;
},
[&](const Realisation& current) -> void {
dstStore.registerDrvOutput(current, checkSigs);
});
} catch (MissingExperimentalFeature & e) {
// Don't fail if the remote doesn't support CA derivations is it might
// not be within our control to change that, and we might still want
@ -806,10 +901,15 @@ std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStor
return pathsMap;
}
std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStore, const StorePathSet & storePaths,
RepairFlag repair, CheckSigsFlag checkSigs, SubstituteFlag substitute)
std::map<StorePath, StorePath> copyPaths(
Store & srcStore,
Store & dstStore,
const StorePathSet & storePaths,
RepairFlag repair,
CheckSigsFlag checkSigs,
SubstituteFlag substitute)
{
auto valid = dstStore->queryValidPaths(storePaths, substitute);
auto valid = dstStore.queryValidPaths(storePaths, substitute);
StorePathSet missing;
for (auto & path : storePaths)
@ -819,9 +919,31 @@ std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStor
for (auto & path : storePaths)
pathsMap.insert_or_assign(path, path);
Activity act(*logger, lvlInfo, actCopyPaths, fmt("copying %d paths", missing.size()));
auto sorted = srcStore.topoSortPaths(missing);
std::reverse(sorted.begin(), sorted.end());
auto source = sinkToSource([&](Sink & sink) {
sink << sorted.size();
for (auto & storePath : sorted) {
auto srcUri = srcStore.getUri();
auto dstUri = dstStore.getUri();
auto storePathS = srcStore.printStorePath(storePath);
Activity act(*logger, lvlInfo, actCopyPath,
makeCopyPathMessage(srcUri, dstUri, storePathS),
{storePathS, srcUri, dstUri});
PushActivity pact(act.id);
auto info = srcStore.queryPathInfo(storePath);
info->write(sink, srcStore, 16);
srcStore.narFromPath(storePath, sink);
}
});
dstStore.addMultipleToStore(*source, repair, checkSigs);
#if 0
std::atomic<size_t> nrDone{0};
std::atomic<size_t> nrFailed{0};
std::atomic<uint64_t> bytesExpected{0};
@ -837,18 +959,21 @@ std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStor
StorePathSet(missing.begin(), missing.end()),
[&](const StorePath & storePath) {
auto info = srcStore->queryPathInfo(storePath);
auto info = srcStore.queryPathInfo(storePath);
auto storePathForDst = storePath;
if (info->ca && info->references.empty()) {
storePathForDst = dstStore->makeFixedOutputPathFromCA(storePath.name(), *info->ca);
if (dstStore->storeDir == srcStore->storeDir)
storePathForDst = dstStore.makeFixedOutputPathFromCA(storePath.name(), *info->ca);
if (dstStore.storeDir == srcStore.storeDir)
assert(storePathForDst == storePath);
if (storePathForDst != storePath)
debug("replaced path '%s' to '%s' for substituter '%s'", srcStore->printStorePath(storePath), dstStore->printStorePath(storePathForDst), dstStore->getUri());
debug("replaced path '%s' to '%s' for substituter '%s'",
srcStore.printStorePath(storePath),
dstStore.printStorePath(storePathForDst),
dstStore.getUri());
}
pathsMap.insert_or_assign(storePath, storePathForDst);
if (dstStore->isValidPath(storePath)) {
if (dstStore.isValidPath(storePath)) {
nrDone++;
showProgress();
return StorePathSet();
@ -863,19 +988,22 @@ std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStor
[&](const StorePath & storePath) {
checkInterrupt();
auto info = srcStore->queryPathInfo(storePath);
auto info = srcStore.queryPathInfo(storePath);
auto storePathForDst = storePath;
if (info->ca && info->references.empty()) {
storePathForDst = dstStore->makeFixedOutputPathFromCA(storePath.name(), *info->ca);
if (dstStore->storeDir == srcStore->storeDir)
storePathForDst = dstStore.makeFixedOutputPathFromCA(storePath.name(), *info->ca);
if (dstStore.storeDir == srcStore.storeDir)
assert(storePathForDst == storePath);
if (storePathForDst != storePath)
debug("replaced path '%s' to '%s' for substituter '%s'", srcStore->printStorePath(storePath), dstStore->printStorePath(storePathForDst), dstStore->getUri());
debug("replaced path '%s' to '%s' for substituter '%s'",
srcStore.printStorePath(storePath),
dstStore.printStorePath(storePathForDst),
dstStore.getUri());
}
pathsMap.insert_or_assign(storePath, storePathForDst);
if (!dstStore->isValidPath(storePathForDst)) {
if (!dstStore.isValidPath(storePathForDst)) {
MaintainCount<decltype(nrRunning)> mc(nrRunning);
showProgress();
try {
@ -884,7 +1012,7 @@ std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStor
nrFailed++;
if (!settings.keepGoing)
throw e;
logger->log(lvlError, fmt("could not copy %s: %s", dstStore->printStorePath(storePath), e.what()));
logger->log(lvlError, fmt("could not copy %s: %s", dstStore.printStorePath(storePath), e.what()));
showProgress();
return;
}
@ -893,9 +1021,27 @@ std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStor
nrDone++;
showProgress();
});
#endif
return pathsMap;
}
void copyClosure(
Store & srcStore,
Store & dstStore,
const RealisedPath::Set & paths,
RepairFlag repair,
CheckSigsFlag checkSigs,
SubstituteFlag substitute)
{
if (&srcStore == &dstStore) return;
RealisedPath::Set closure;
RealisedPath::closure(srcStore, paths, closure);
copyPaths(srcStore, dstStore, closure, repair, checkSigs, substitute);
}
std::optional<ValidPathInfo> decodeValidPathInfo(const Store & store, std::istream & str, std::optional<HashResult> hashGiven)
{
std::string path;

View file

@ -180,6 +180,8 @@ struct StoreConfig : public Config
StoreConfig() = delete;
StringSet getDefaultSystemFeatures();
virtual ~StoreConfig() { }
virtual const std::string name() = 0;
@ -196,7 +198,7 @@ struct StoreConfig : public Config
Setting<bool> wantMassQuery{this, false, "want-mass-query", "whether this substituter can be queried efficiently for path validity"};
Setting<StringSet> systemFeatures{this, settings.systemFeatures,
Setting<StringSet> systemFeatures{this, getDefaultSystemFeatures(),
"system-features",
"Optional features that the system this store builds on implements (like \"kvm\")."};
@ -428,9 +430,10 @@ public:
virtual StorePathSet querySubstitutablePaths(const StorePathSet & paths) { return {}; };
/* Query substitute info (i.e. references, derivers and download
sizes) of a map of paths to their optional ca values. If a path
does not have substitute info, it's omitted from the resulting
infos map. */
sizes) of a map of paths to their optional ca values. The info
of the first succeeding substituter for each path will be
returned. If a path does not have substitute info, it's omitted
from the resulting infos map. */
virtual void querySubstitutablePathInfos(const StorePathCAMap & paths,
SubstitutablePathInfos & infos) { return; };
@ -438,6 +441,12 @@ public:
virtual void addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs) = 0;
/* Import multiple paths into the store. */
virtual void addMultipleToStore(
Source & source,
RepairFlag repair = NoRepair,
CheckSigsFlag checkSigs = CheckSigs);
/* Copy the contents of a path to the store and register the
validity the resulting path. The resulting path is returned.
The function object `filter' can be used to exclude files (see
@ -495,7 +504,8 @@ public:
not derivations, substitute them. */
virtual void buildPaths(
const std::vector<DerivedPath> & paths,
BuildMode buildMode = bmNormal);
BuildMode buildMode = bmNormal,
std::shared_ptr<Store> evalStore = nullptr);
/* Build a single non-materialized derivation (i.e. not from an
on-disk .drv file).
@ -541,7 +551,7 @@ public:
/* Add a store path as a temporary root of the garbage collector.
The root disappears as soon as we exit. */
virtual void addTempRoot(const StorePath & path)
{ warn("not creating temp root, store doesn't support GC"); }
{ debug("not creating temporary root, store doesn't support GC"); }
/* Add an indirect root, which is merely a symlink to `path' from
/nix/var/nix/gcroots/auto/<hash of `path'>. `path' is supposed
@ -695,6 +705,11 @@ public:
const Stats & getStats();
/* Computes the full closure of of a set of store-paths for e.g.
derivations that need this information for `exportReferencesGraph`.
*/
StorePathSet exportReferences(const StorePathSet & storePaths, const StorePathSet & inputPaths);
/* Return the build log of the specified store path, if available,
or null otherwise. */
virtual std::shared_ptr<std::string> getBuildLog(const StorePath & path)
@ -744,8 +759,12 @@ protected:
/* Copy a path from one store to another. */
void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
const StorePath & storePath, RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs);
void copyStorePath(
Store & srcStore,
Store & dstStore,
const StorePath & storePath,
RepairFlag repair = NoRepair,
CheckSigsFlag checkSigs = CheckSigs);
/* Copy store paths from one store to another. The paths may be copied
@ -754,17 +773,27 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
of store paths is not automatically closed; use copyClosure() for
that. Returns a map of what each path was copied to the dstStore
as. */
std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStore,
std::map<StorePath, StorePath> copyPaths(
Store & srcStore, Store & dstStore,
const RealisedPath::Set &,
RepairFlag repair = NoRepair,
CheckSigsFlag checkSigs = CheckSigs,
SubstituteFlag substitute = NoSubstitute);
std::map<StorePath, StorePath> copyPaths(ref<Store> srcStore, ref<Store> dstStore,
const StorePathSet& paths,
std::map<StorePath, StorePath> copyPaths(
Store & srcStore, Store & dstStore,
const StorePathSet & paths,
RepairFlag repair = NoRepair,
CheckSigsFlag checkSigs = CheckSigs,
SubstituteFlag substitute = NoSubstitute);
/* Copy the closure of `paths` from `srcStore` to `dstStore`. */
void copyClosure(
Store & srcStore, Store & dstStore,
const RealisedPath::Set & paths,
RepairFlag repair = NoRepair,
CheckSigsFlag checkSigs = CheckSigs,
SubstituteFlag substitute = NoSubstitute);
/* Remove the temporary roots file for this process. Any temporary
root becomes garbage after this point unless it has been registered
@ -864,4 +893,9 @@ std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri)
std::optional<ContentAddress> getDerivationCA(const BasicDerivation & drv);
std::map<DrvOutput, StorePath> drvOutputReferences(
Store & store,
const Derivation & drv,
const StorePath & outputPath);
}

View file

@ -9,7 +9,7 @@ namespace nix {
#define WORKER_MAGIC_1 0x6e697863
#define WORKER_MAGIC_2 0x6478696f
#define PROTOCOL_VERSION (1 << 8 | 29)
#define PROTOCOL_VERSION (1 << 8 | 32)
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
@ -55,6 +55,7 @@ typedef enum {
wopQueryDerivationOutputMap = 41,
wopRegisterDrvOutput = 42,
wopQueryRealisation = 43,
wopAddMultipleToStore = 44,
} WorkerOp;

View file

@ -9,7 +9,7 @@ namespace nix {
#define ANSI_ITALIC "\e[3m"
#define ANSI_RED "\e[31;1m"
#define ANSI_GREEN "\e[32;1m"
#define ANSI_YELLOW "\e[33;1m"
#define ANSI_WARNING "\e[35;1m"
#define ANSI_BLUE "\e[34;1m"
#define ANSI_MAGENTA "\e[35;1m"
#define ANSI_CYAN "\e[36;1m"

View file

@ -331,6 +331,7 @@ MultiCommand::MultiCommand(const Commands & commands_)
if (i == commands.end())
throw UsageError("'%s' is not a recognised command", s);
command = {s, i->second()};
command->second->parent = this;
}}
});

View file

@ -12,6 +12,8 @@ namespace nix {
enum HashType : char;
class MultiCommand;
class Args
{
public:
@ -89,6 +91,14 @@ protected:
})
, arity(1)
{ }
template<class I>
Handler(std::optional<I> * dest)
: fun([=](std::vector<std::string> ss) {
*dest = string2IntWithUnitPrefix<I>(ss[0]);
})
, arity(1)
{ }
};
/* Options. */
@ -169,11 +179,13 @@ public:
virtual nlohmann::json toJSON();
friend class MultiCommand;
MultiCommand * parent = nullptr;
};
/* A command is an argument parser that can be executed by calling its
run() method. */
struct Command : virtual Args
struct Command : virtual public Args
{
friend class MultiCommand;
@ -193,7 +205,7 @@ typedef std::map<std::string, std::function<ref<Command>()>> Commands;
/* An argument parser that supports multiple subcommands,
i.e. <command> <subcommand>. */
class MultiCommand : virtual Args
class MultiCommand : virtual public Args
{
public:
Commands commands;

69
src/libutil/closure.hh Normal file
View file

@ -0,0 +1,69 @@
#include <set>
#include <future>
#include "sync.hh"
using std::set;
namespace nix {
template<typename T>
using GetEdgesAsync = std::function<void(const T &, std::function<void(std::promise<set<T>> &)>)>;
template<typename T>
void computeClosure(
const set<T> startElts,
set<T> & res,
GetEdgesAsync<T> getEdgesAsync
)
{
struct State
{
size_t pending;
set<T> & res;
std::exception_ptr exc;
};
Sync<State> state_(State{0, res, 0});
std::function<void(const T &)> enqueue;
std::condition_variable done;
enqueue = [&](const T & current) -> void {
{
auto state(state_.lock());
if (state->exc) return;
if (!state->res.insert(current).second) return;
state->pending++;
}
getEdgesAsync(current, [&](std::promise<set<T>> & prom) {
try {
auto children = prom.get_future().get();
for (auto & child : children)
enqueue(child);
{
auto state(state_.lock());
assert(state->pending);
if (!--state->pending) done.notify_one();
}
} catch (...) {
auto state(state_.lock());
if (!state->exc) state->exc = std::current_exception();
assert(state->pending);
if (!--state->pending) done.notify_one();
};
});
};
for (auto & startElt : startElts)
enqueue(startElt);
{
auto state(state_.lock());
while (state->pending) state.wait(done);
if (state->exc) std::rethrow_exception(state->exc);
}
}
}

View file

@ -25,6 +25,8 @@
}
#define GENERATE_EQUAL(args...) GENERATE_ONE_CMP(==, args)
#define GENERATE_LEQ(args...) GENERATE_ONE_CMP(<, args)
#define GENERATE_NEQ(args...) GENERATE_ONE_CMP(!=, args)
#define GENERATE_CMP(args...) \
GENERATE_EQUAL(args) \
GENERATE_LEQ(args)
GENERATE_LEQ(args) \
GENERATE_NEQ(args)

View file

@ -1,18 +1,17 @@
#include "compression.hh"
#include "tarfile.hh"
#include "util.hh"
#include "finally.hh"
#include "logging.hh"
#include <lzma.h>
#include <bzlib.h>
#include <archive.h>
#include <archive_entry.h>
#include <cstdio>
#include <cstring>
#include <brotli/decode.h>
#include <brotli/encode.h>
#include <zlib.h>
#include <iostream>
namespace nix {
@ -27,7 +26,7 @@ struct ChunkedCompressionSink : CompressionSink
const size_t CHUNK_SIZE = sizeof(outbuf) << 2;
while (!data.empty()) {
size_t n = std::min(CHUNK_SIZE, data.size());
writeInternal(data);
writeInternal(data.substr(0, n));
data.remove_prefix(n);
}
}
@ -35,6 +34,95 @@ struct ChunkedCompressionSink : CompressionSink
virtual void writeInternal(std::string_view data) = 0;
};
struct ArchiveDecompressionSource : Source
{
std::unique_ptr<TarArchive> archive = 0;
Source & src;
ArchiveDecompressionSource(Source & src) : src(src) {}
~ArchiveDecompressionSource() override {}
size_t read(char * data, size_t len) override {
struct archive_entry * ae;
if (!archive) {
archive = std::make_unique<TarArchive>(src, true);
this->archive->check(archive_read_next_header(this->archive->archive, &ae),
"failed to read header (%s)");
if (archive_filter_count(this->archive->archive) < 2) {
throw CompressionError("input compression not recognized");
}
}
ssize_t result = archive_read_data(this->archive->archive, data, len);
if (result > 0) return result;
if (result == 0) {
throw EndOfFile("reached end of compressed file");
}
this->archive->check(result, "failed to read compressed data (%s)");
return result;
}
};
struct ArchiveCompressionSink : CompressionSink
{
Sink & nextSink;
struct archive * archive;
ArchiveCompressionSink(Sink & nextSink, std::string format, bool parallel) : nextSink(nextSink) {
archive = archive_write_new();
if (!archive) throw Error("failed to initialize libarchive");
check(archive_write_add_filter_by_name(archive, format.c_str()), "couldn't initialize compression (%s)");
check(archive_write_set_format_raw(archive));
if (format == "xz" && parallel) {
check(archive_write_set_filter_option(archive, format.c_str(), "threads", "0"));
}
// disable internal buffering
check(archive_write_set_bytes_per_block(archive, 0));
// disable output padding
check(archive_write_set_bytes_in_last_block(archive, 1));
open();
}
~ArchiveCompressionSink() override
{
if (archive) archive_write_free(archive);
}
void finish() override
{
flush();
check(archive_write_close(archive));
}
void check(int err, const std::string & reason = "failed to compress (%s)")
{
if (err == ARCHIVE_EOF)
throw EndOfFile("reached end of archive");
else if (err != ARCHIVE_OK)
throw Error(reason, archive_error_string(this->archive));
}
void write(std::string_view data) override
{
ssize_t result = archive_write_data(archive, data.data(), data.length());
if (result <= 0) check(result);
}
private:
void open()
{
check(archive_write_open(archive, this, nullptr, ArchiveCompressionSink::callback_write, nullptr));
auto ae = archive_entry_new();
archive_entry_set_filetype(ae, AE_IFREG);
check(archive_write_header(archive, ae));
archive_entry_free(ae);
}
static ssize_t callback_write(struct archive * archive, void * _self, const void * buffer, size_t length)
{
auto self = (ArchiveCompressionSink *) _self;
self->nextSink({(const char *) buffer, length});
return length;
}
};
struct NoneSink : CompressionSink
{
Sink & nextSink;
@ -43,171 +131,6 @@ struct NoneSink : CompressionSink
void write(std::string_view data) override { nextSink(data); }
};
struct GzipDecompressionSink : CompressionSink
{
Sink & nextSink;
z_stream strm;
bool finished = false;
uint8_t outbuf[BUFSIZ];
GzipDecompressionSink(Sink & nextSink) : nextSink(nextSink)
{
strm.zalloc = Z_NULL;
strm.zfree = Z_NULL;
strm.opaque = Z_NULL;
strm.avail_in = 0;
strm.next_in = Z_NULL;
strm.next_out = outbuf;
strm.avail_out = sizeof(outbuf);
// Enable gzip and zlib decoding (+32) with 15 windowBits
int ret = inflateInit2(&strm,15+32);
if (ret != Z_OK)
throw CompressionError("unable to initialise gzip encoder");
}
~GzipDecompressionSink()
{
inflateEnd(&strm);
}
void finish() override
{
CompressionSink::flush();
write({});
}
void write(std::string_view data) override
{
assert(data.size() <= std::numeric_limits<decltype(strm.avail_in)>::max());
strm.next_in = (Bytef *) data.data();
strm.avail_in = data.size();
while (!finished && (!data.data() || strm.avail_in)) {
checkInterrupt();
int ret = inflate(&strm,Z_SYNC_FLUSH);
if (ret != Z_OK && ret != Z_STREAM_END)
throw CompressionError("error while decompressing gzip file: %d (%d, %d)",
zError(ret), data.size(), strm.avail_in);
finished = ret == Z_STREAM_END;
if (strm.avail_out < sizeof(outbuf) || strm.avail_in == 0) {
nextSink({(char *) outbuf, sizeof(outbuf) - strm.avail_out});
strm.next_out = (Bytef *) outbuf;
strm.avail_out = sizeof(outbuf);
}
}
}
};
struct XzDecompressionSink : CompressionSink
{
Sink & nextSink;
uint8_t outbuf[BUFSIZ];
lzma_stream strm = LZMA_STREAM_INIT;
bool finished = false;
XzDecompressionSink(Sink & nextSink) : nextSink(nextSink)
{
lzma_ret ret = lzma_stream_decoder(
&strm, UINT64_MAX, LZMA_CONCATENATED);
if (ret != LZMA_OK)
throw CompressionError("unable to initialise lzma decoder");
strm.next_out = outbuf;
strm.avail_out = sizeof(outbuf);
}
~XzDecompressionSink()
{
lzma_end(&strm);
}
void finish() override
{
CompressionSink::flush();
write({});
}
void write(std::string_view data) override
{
strm.next_in = (const unsigned char *) data.data();
strm.avail_in = data.size();
while (!finished && (!data.data() || strm.avail_in)) {
checkInterrupt();
lzma_ret ret = lzma_code(&strm, data.data() ? LZMA_RUN : LZMA_FINISH);
if (ret != LZMA_OK && ret != LZMA_STREAM_END)
throw CompressionError("error %d while decompressing xz file", ret);
finished = ret == LZMA_STREAM_END;
if (strm.avail_out < sizeof(outbuf) || strm.avail_in == 0) {
nextSink({(char *) outbuf, sizeof(outbuf) - strm.avail_out});
strm.next_out = outbuf;
strm.avail_out = sizeof(outbuf);
}
}
}
};
struct BzipDecompressionSink : ChunkedCompressionSink
{
Sink & nextSink;
bz_stream strm;
bool finished = false;
BzipDecompressionSink(Sink & nextSink) : nextSink(nextSink)
{
memset(&strm, 0, sizeof(strm));
int ret = BZ2_bzDecompressInit(&strm, 0, 0);
if (ret != BZ_OK)
throw CompressionError("unable to initialise bzip2 decoder");
strm.next_out = (char *) outbuf;
strm.avail_out = sizeof(outbuf);
}
~BzipDecompressionSink()
{
BZ2_bzDecompressEnd(&strm);
}
void finish() override
{
flush();
write({});
}
void writeInternal(std::string_view data) override
{
assert(data.size() <= std::numeric_limits<decltype(strm.avail_in)>::max());
strm.next_in = (char *) data.data();
strm.avail_in = data.size();
while (strm.avail_in) {
checkInterrupt();
int ret = BZ2_bzDecompress(&strm);
if (ret != BZ_OK && ret != BZ_STREAM_END)
throw CompressionError("error while decompressing bzip2 file");
finished = ret == BZ_STREAM_END;
if (strm.avail_out < sizeof(outbuf) || strm.avail_in == 0) {
nextSink({(char *) outbuf, sizeof(outbuf) - strm.avail_out});
strm.next_out = (char *) outbuf;
strm.avail_out = sizeof(outbuf);
}
}
}
};
struct BrotliDecompressionSink : ChunkedCompressionSink
{
Sink & nextSink;
@ -268,159 +191,24 @@ ref<std::string> decompress(const std::string & method, const std::string & in)
return ssink.s;
}
ref<CompressionSink> makeDecompressionSink(const std::string & method, Sink & nextSink)
std::unique_ptr<FinishSink> makeDecompressionSink(const std::string & method, Sink & nextSink)
{
if (method == "none" || method == "")
return make_ref<NoneSink>(nextSink);
else if (method == "xz")
return make_ref<XzDecompressionSink>(nextSink);
else if (method == "bzip2")
return make_ref<BzipDecompressionSink>(nextSink);
else if (method == "gzip")
return make_ref<GzipDecompressionSink>(nextSink);
return std::make_unique<NoneSink>(nextSink);
else if (method == "br")
return make_ref<BrotliDecompressionSink>(nextSink);
return std::make_unique<BrotliDecompressionSink>(nextSink);
else
throw UnknownCompressionMethod("unknown compression method '%s'", method);
return sourceToSink([&](Source & source) {
auto decompressionSource = std::make_unique<ArchiveDecompressionSource>(source);
decompressionSource->drainInto(nextSink);
});
}
struct XzCompressionSink : CompressionSink
{
Sink & nextSink;
uint8_t outbuf[BUFSIZ];
lzma_stream strm = LZMA_STREAM_INIT;
bool finished = false;
XzCompressionSink(Sink & nextSink, bool parallel) : nextSink(nextSink)
{
lzma_ret ret;
bool done = false;
if (parallel) {
#ifdef HAVE_LZMA_MT
lzma_mt mt_options = {};
mt_options.flags = 0;
mt_options.timeout = 300; // Using the same setting as the xz cmd line
mt_options.preset = LZMA_PRESET_DEFAULT;
mt_options.filters = NULL;
mt_options.check = LZMA_CHECK_CRC64;
mt_options.threads = lzma_cputhreads();
mt_options.block_size = 0;
if (mt_options.threads == 0)
mt_options.threads = 1;
// FIXME: maybe use lzma_stream_encoder_mt_memusage() to control the
// number of threads.
ret = lzma_stream_encoder_mt(&strm, &mt_options);
done = true;
#else
printMsg(lvlError, "warning: parallel XZ compression requested but not supported, falling back to single-threaded compression");
#endif
}
if (!done)
ret = lzma_easy_encoder(&strm, 6, LZMA_CHECK_CRC64);
if (ret != LZMA_OK)
throw CompressionError("unable to initialise lzma encoder");
// FIXME: apply the x86 BCJ filter?
strm.next_out = outbuf;
strm.avail_out = sizeof(outbuf);
}
~XzCompressionSink()
{
lzma_end(&strm);
}
void finish() override
{
CompressionSink::flush();
write({});
}
void write(std::string_view data) override
{
strm.next_in = (const unsigned char *) data.data();
strm.avail_in = data.size();
while (!finished && (!data.data() || strm.avail_in)) {
checkInterrupt();
lzma_ret ret = lzma_code(&strm, data.data() ? LZMA_RUN : LZMA_FINISH);
if (ret != LZMA_OK && ret != LZMA_STREAM_END)
throw CompressionError("error %d while compressing xz file", ret);
finished = ret == LZMA_STREAM_END;
if (strm.avail_out < sizeof(outbuf) || strm.avail_in == 0) {
nextSink({(const char *) outbuf, sizeof(outbuf) - strm.avail_out});
strm.next_out = outbuf;
strm.avail_out = sizeof(outbuf);
}
}
}
};
struct BzipCompressionSink : ChunkedCompressionSink
{
Sink & nextSink;
bz_stream strm;
bool finished = false;
BzipCompressionSink(Sink & nextSink) : nextSink(nextSink)
{
memset(&strm, 0, sizeof(strm));
int ret = BZ2_bzCompressInit(&strm, 9, 0, 30);
if (ret != BZ_OK)
throw CompressionError("unable to initialise bzip2 encoder");
strm.next_out = (char *) outbuf;
strm.avail_out = sizeof(outbuf);
}
~BzipCompressionSink()
{
BZ2_bzCompressEnd(&strm);
}
void finish() override
{
flush();
writeInternal({});
}
void writeInternal(std::string_view data) override
{
assert(data.size() <= std::numeric_limits<decltype(strm.avail_in)>::max());
strm.next_in = (char *) data.data();
strm.avail_in = data.size();
while (!finished && (!data.data() || strm.avail_in)) {
checkInterrupt();
int ret = BZ2_bzCompress(&strm, data.data() ? BZ_RUN : BZ_FINISH);
if (ret != BZ_RUN_OK && ret != BZ_FINISH_OK && ret != BZ_STREAM_END)
throw CompressionError("error %d while compressing bzip2 file", ret);
finished = ret == BZ_STREAM_END;
if (strm.avail_out < sizeof(outbuf) || strm.avail_in == 0) {
nextSink({(const char *) outbuf, sizeof(outbuf) - strm.avail_out});
strm.next_out = (char *) outbuf;
strm.avail_out = sizeof(outbuf);
}
}
}
};
struct BrotliCompressionSink : ChunkedCompressionSink
{
Sink & nextSink;
uint8_t outbuf[BUFSIZ];
BrotliEncoderState *state;
BrotliEncoderState * state;
bool finished = false;
BrotliCompressionSink(Sink & nextSink) : nextSink(nextSink)
@ -471,12 +259,14 @@ struct BrotliCompressionSink : ChunkedCompressionSink
ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel)
{
std::vector<std::string> la_supports = {
"bzip2", "compress", "grzip", "gzip", "lrzip", "lz4", "lzip", "lzma", "lzop", "xz", "zstd"
};
if (std::find(la_supports.begin(), la_supports.end(), method) != la_supports.end()) {
return make_ref<ArchiveCompressionSink>(nextSink, method, parallel);
}
if (method == "none")
return make_ref<NoneSink>(nextSink);
else if (method == "xz")
return make_ref<XzCompressionSink>(nextSink, parallel);
else if (method == "bzip2")
return make_ref<BzipCompressionSink>(nextSink);
else if (method == "br")
return make_ref<BrotliCompressionSink>(nextSink);
else

View file

@ -8,14 +8,16 @@
namespace nix {
struct CompressionSink : BufferedSink
struct CompressionSink : BufferedSink, FinishSink
{
virtual void finish() = 0;
using BufferedSink::operator ();
using BufferedSink::write;
using FinishSink::finish;
};
ref<std::string> decompress(const std::string & method, const std::string & in);
ref<CompressionSink> makeDecompressionSink(const std::string & method, Sink & nextSink);
std::unique_ptr<FinishSink> makeDecompressionSink(const std::string & method, Sink & nextSink);
ref<std::string> compress(const std::string & method, const std::string & in, const bool parallel = false);

View file

@ -152,6 +152,16 @@ nlohmann::json Config::toJSON()
return res;
}
std::string Config::toKeyValue()
{
auto res = std::string();
for (auto & s : _settings)
if (!s.second.isAlias) {
res += fmt("%s = %s\n", s.first, s.second.setting->to_string());
}
return res;
}
void Config::convertToArgs(Args & args, const std::string & category)
{
for (auto & s : _settings)
@ -385,6 +395,16 @@ nlohmann::json GlobalConfig::toJSON()
return res;
}
std::string GlobalConfig::toKeyValue()
{
std::string res;
std::map<std::string, Config::SettingInfo> settings;
globalConfig.getSettings(settings);
for (auto & s : settings)
res += fmt("%s = %s\n", s.first, s.second.value);
return res;
}
void GlobalConfig::convertToArgs(Args & args, const std::string & category)
{
for (auto & config : *configRegistrations)

Some files were not shown because too many files have changed in this diff Show more