1
0
Fork 0
mirror of https://github.com/NixOS/nix synced 2025-07-07 06:01:48 +02:00

Merge remote-tracking branch 'origin/master' into handle-missing-gc-socket

This commit is contained in:
Eelco Dolstra 2024-01-12 12:26:25 +01:00
commit 5703c31325
1298 changed files with 33858 additions and 15133 deletions

View file

@ -299,7 +299,7 @@ connected:
!trusted || *trusted;
});
// See the very large comment in `case wopBuildDerivation:` in
// See the very large comment in `case WorkerProto::Op::BuildDerivation:` in
// `src/libstore/daemon.cc` that explains the trust model here.
//
// This condition mirrors that: that code enforces the "rules" outlined there;
@ -314,7 +314,7 @@ connected:
//
// 2. Changing the `inputSrcs` set changes the associated
// output ids, which break CA derivations
if (!drv.inputDrvs.empty())
if (!drv.inputDrvs.map.empty())
drv.inputSrcs = store->parseStorePathSet(inputs);
optResult = sshStore->buildDerivation(*drvPath, (const BasicDerivation &) drv);
auto & result = *optResult;
@ -322,7 +322,12 @@ connected:
throw Error("build of '%s' on '%s' failed: %s", store->printStorePath(*drvPath), storeUri, result.errorMsg);
} else {
copyClosure(*store, *sshStore, StorePathSet {*drvPath}, NoRepair, NoCheckSigs, substitute);
auto res = sshStore->buildPathsWithResults({ DerivedPath::Built { *drvPath, OutputsSpec::All {} } });
auto res = sshStore->buildPathsWithResults({
DerivedPath::Built {
.drvPath = makeConstantStorePathRef(*drvPath),
.outputs = OutputsSpec::All {},
}
});
// One path to build should produce exactly one build result
assert(res.size() == 1);
optResult = std::move(res[0]);

149
src/libcmd/built-path.cc Normal file
View file

@ -0,0 +1,149 @@
#include "built-path.hh"
#include "derivations.hh"
#include "store-api.hh"
#include <nlohmann/json.hpp>
#include <optional>
namespace nix {
#define CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, COMPARATOR) \
bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \
{ \
const MY_TYPE* me = this; \
auto fields1 = std::tie(*me->drvPath, me->FIELD); \
me = &other; \
auto fields2 = std::tie(*me->drvPath, me->FIELD); \
return fields1 COMPARATOR fields2; \
}
#define CMP(CHILD_TYPE, MY_TYPE, FIELD) \
CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, ==) \
CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, !=) \
CMP_ONE(CHILD_TYPE, MY_TYPE, FIELD, <)
#define FIELD_TYPE std::pair<std::string, StorePath>
CMP(SingleBuiltPath, SingleBuiltPathBuilt, output)
#undef FIELD_TYPE
#define FIELD_TYPE std::map<std::string, StorePath>
CMP(SingleBuiltPath, BuiltPathBuilt, outputs)
#undef FIELD_TYPE
#undef CMP
#undef CMP_ONE
StorePath SingleBuiltPath::outPath() const
{
return std::visit(
overloaded{
[](const SingleBuiltPath::Opaque & p) { return p.path; },
[](const SingleBuiltPath::Built & b) { return b.output.second; },
}, raw()
);
}
StorePathSet BuiltPath::outPaths() const
{
return std::visit(
overloaded{
[](const BuiltPath::Opaque & p) { return StorePathSet{p.path}; },
[](const BuiltPath::Built & b) {
StorePathSet res;
for (auto & [_, path] : b.outputs)
res.insert(path);
return res;
},
}, raw()
);
}
SingleDerivedPath::Built SingleBuiltPath::Built::discardOutputPath() const
{
return SingleDerivedPath::Built {
.drvPath = make_ref<SingleDerivedPath>(drvPath->discardOutputPath()),
.output = output.first,
};
}
SingleDerivedPath SingleBuiltPath::discardOutputPath() const
{
return std::visit(
overloaded{
[](const SingleBuiltPath::Opaque & p) -> SingleDerivedPath {
return p;
},
[](const SingleBuiltPath::Built & b) -> SingleDerivedPath {
return b.discardOutputPath();
},
}, raw()
);
}
nlohmann::json BuiltPath::Built::toJSON(const StoreDirConfig & store) const
{
nlohmann::json res;
res["drvPath"] = drvPath->toJSON(store);
for (const auto & [outputName, outputPath] : outputs) {
res["outputs"][outputName] = store.printStorePath(outputPath);
}
return res;
}
nlohmann::json SingleBuiltPath::Built::toJSON(const StoreDirConfig & store) const
{
nlohmann::json res;
res["drvPath"] = drvPath->toJSON(store);
auto & [outputName, outputPath] = output;
res["output"] = outputName;
res["outputPath"] = store.printStorePath(outputPath);
return res;
}
nlohmann::json SingleBuiltPath::toJSON(const StoreDirConfig & store) const
{
return std::visit([&](const auto & buildable) {
return buildable.toJSON(store);
}, raw());
}
nlohmann::json BuiltPath::toJSON(const StoreDirConfig & store) const
{
return std::visit([&](const auto & buildable) {
return buildable.toJSON(store);
}, raw());
}
RealisedPath::Set BuiltPath::toRealisedPaths(Store & store) const
{
RealisedPath::Set res;
std::visit(
overloaded{
[&](const BuiltPath::Opaque & p) { res.insert(p.path); },
[&](const BuiltPath::Built & p) {
auto drvHashes =
staticOutputHashes(store, store.readDerivation(p.drvPath->outPath()));
for (auto& [outputName, outputPath] : p.outputs) {
if (experimentalFeatureSettings.isEnabled(
Xp::CaDerivations)) {
auto drvOutput = get(drvHashes, outputName);
if (!drvOutput)
throw Error(
"the derivation '%s' has unrealised output '%s' (derived-path.cc/toRealisedPaths)",
store.printStorePath(p.drvPath->outPath()), outputName);
auto thisRealisation = store.queryRealisation(
DrvOutput{*drvOutput, outputName});
assert(thisRealisation); // Weve built it, so we must
// have the realisation
res.insert(*thisRealisation);
} else {
res.insert(outputPath);
}
}
},
},
raw());
return res;
}
}

97
src/libcmd/built-path.hh Normal file
View file

@ -0,0 +1,97 @@
#pragma once
///@file
#include "derived-path.hh"
#include "realisation.hh"
namespace nix {
struct SingleBuiltPath;
struct SingleBuiltPathBuilt {
ref<SingleBuiltPath> drvPath;
std::pair<std::string, StorePath> output;
SingleDerivedPathBuilt discardOutputPath() const;
std::string to_string(const StoreDirConfig & store) const;
static SingleBuiltPathBuilt parse(const StoreDirConfig & store, std::string_view, std::string_view);
nlohmann::json toJSON(const StoreDirConfig & store) const;
DECLARE_CMP(SingleBuiltPathBuilt);
};
using _SingleBuiltPathRaw = std::variant<
DerivedPathOpaque,
SingleBuiltPathBuilt
>;
struct SingleBuiltPath : _SingleBuiltPathRaw {
using Raw = _SingleBuiltPathRaw;
using Raw::Raw;
using Opaque = DerivedPathOpaque;
using Built = SingleBuiltPathBuilt;
inline const Raw & raw() const {
return static_cast<const Raw &>(*this);
}
StorePath outPath() const;
SingleDerivedPath discardOutputPath() const;
static SingleBuiltPath parse(const StoreDirConfig & store, std::string_view);
nlohmann::json toJSON(const StoreDirConfig & store) const;
};
static inline ref<SingleBuiltPath> staticDrv(StorePath drvPath)
{
return make_ref<SingleBuiltPath>(SingleBuiltPath::Opaque { drvPath });
}
/**
* A built derived path with hints in the form of optional concrete output paths.
*
* See 'BuiltPath' for more an explanation.
*/
struct BuiltPathBuilt {
ref<SingleBuiltPath> drvPath;
std::map<std::string, StorePath> outputs;
std::string to_string(const StoreDirConfig & store) const;
static BuiltPathBuilt parse(const StoreDirConfig & store, std::string_view, std::string_view);
nlohmann::json toJSON(const StoreDirConfig & store) const;
DECLARE_CMP(BuiltPathBuilt);
};
using _BuiltPathRaw = std::variant<
DerivedPath::Opaque,
BuiltPathBuilt
>;
/**
* A built path. Similar to a DerivedPath, but enriched with the corresponding
* output path(s).
*/
struct BuiltPath : _BuiltPathRaw {
using Raw = _BuiltPathRaw;
using Raw::Raw;
using Opaque = DerivedPathOpaque;
using Built = BuiltPathBuilt;
inline const Raw & raw() const {
return static_cast<const Raw &>(*this);
}
StorePathSet outPaths() const;
RealisedPath::Set toRealisedPaths(Store & store) const;
nlohmann::json toJSON(const StoreDirConfig & store) const;
};
typedef std::vector<BuiltPath> BuiltPaths;
}

View file

@ -1,4 +1,5 @@
#include "command.hh"
#include "markdown.hh"
#include "store-api.hh"
#include "local-fs-store.hh"
#include "derivations.hh"
@ -34,6 +35,19 @@ nlohmann::json NixMultiCommand::toJSON()
return MultiCommand::toJSON();
}
void NixMultiCommand::run()
{
if (!command) {
std::set<std::string> subCommandTextLines;
for (auto & [name, _] : commands)
subCommandTextLines.insert(fmt("- `%s`", name));
std::string markdownError = fmt("`nix %s` requires a sub-command. Available sub-commands:\n\n%s\n",
commandName, concatStringsSep("\n", subCommandTextLines));
throw UsageError(renderMarkdownToTerminal(markdownError));
}
command->second->run();
}
StoreCommand::StoreCommand()
{
}
@ -98,7 +112,7 @@ EvalCommand::EvalCommand()
EvalCommand::~EvalCommand()
{
if (evalState)
evalState->printStats();
evalState->maybePrintStats();
}
ref<Store> EvalCommand::getEvalStore()
@ -175,7 +189,7 @@ void BuiltPathsCommand::run(ref<Store> store, Installables && installables)
throw UsageError("'--all' does not expect arguments");
// XXX: Only uses opaque paths, ignores all the realisations
for (auto & p : store->queryAllValidPaths())
paths.push_back(BuiltPath::Opaque{p});
paths.emplace_back(BuiltPath::Opaque{p});
} else {
paths = Installable::toBuiltPaths(getEvalStore(), store, realiseMode, operateOn, installables);
if (recursive) {
@ -188,7 +202,7 @@ void BuiltPathsCommand::run(ref<Store> store, Installables && installables)
}
store->computeFSClosure(pathsRoots, pathsClosure);
for (auto & path : pathsClosure)
paths.push_back(BuiltPath::Opaque{path});
paths.emplace_back(BuiltPath::Opaque{path});
}
}

View file

@ -26,29 +26,40 @@ static constexpr Command::Category catNixInstallation = 102;
static constexpr auto installablesCategory = "Options that change the interpretation of [installables](@docroot@/command-ref/new-cli/nix.md#installables)";
struct NixMultiCommand : virtual MultiCommand, virtual Command
struct NixMultiCommand : MultiCommand, virtual Command
{
nlohmann::json toJSON() override;
using MultiCommand::MultiCommand;
virtual void run() override;
};
// For the overloaded run methods
#pragma GCC diagnostic ignored "-Woverloaded-virtual"
/* A command that requires a Nix store. */
/**
* A command that requires a \ref Store "Nix store".
*/
struct StoreCommand : virtual Command
{
StoreCommand();
void run() override;
ref<Store> getStore();
virtual ref<Store> createStore();
/**
* Main entry point, with a `Store` provided
*/
virtual void run(ref<Store>) = 0;
private:
std::shared_ptr<Store> _store;
};
/* A command that copies something between `--from` and `--to`
stores. */
/**
* A command that copies something between `--from` and `--to` \ref
* Store stores.
*/
struct CopyCommand : virtual StoreCommand
{
std::string srcUri, dstUri;
@ -60,6 +71,9 @@ struct CopyCommand : virtual StoreCommand
ref<Store> getDstStore();
};
/**
* A command that needs to evaluate Nix language expressions.
*/
struct EvalCommand : virtual StoreCommand, MixEvalArgs
{
bool startReplOnEvalErrors = false;
@ -79,20 +93,26 @@ private:
std::shared_ptr<EvalState> evalState;
};
/**
* A mixin class for commands that process flakes, adding a few standard
* flake-related options/flags.
*/
struct MixFlakeOptions : virtual Args, EvalCommand
{
flake::LockFlags lockFlags;
std::optional<std::string> needsFlakeInputCompletion = {};
MixFlakeOptions();
virtual std::vector<std::string> getFlakesForCompletion()
/**
* The completion for some of these flags depends on the flake(s) in
* question.
*
* This method should be implemented to gather all flakerefs the
* command is operating with (presumably specified via some other
* arguments) so that the completions for these flags can use them.
*/
virtual std::vector<FlakeRef> getFlakeRefsForCompletion()
{ return {}; }
void completeFlakeInput(std::string_view prefix);
void completionHook() override;
};
struct SourceExprCommand : virtual Args, MixFlakeOptions
@ -112,15 +132,35 @@ struct SourceExprCommand : virtual Args, MixFlakeOptions
virtual Strings getDefaultFlakeAttrPathPrefixes();
void completeInstallable(std::string_view prefix);
/**
* Complete an installable from the given prefix.
*/
void completeInstallable(AddCompletions & completions, std::string_view prefix);
/**
* Convenience wrapper around the underlying function to make setting the
* callback easier.
*/
CompleterClosure getCompleteInstallable();
};
/**
* A mixin class for commands that need a read-only flag.
*
* What exactly is "read-only" is unspecified, but it will usually be
* the \ref Store "Nix store".
*/
struct MixReadOnlyOption : virtual Args
{
MixReadOnlyOption();
};
/* Like InstallablesCommand but the installables are not loaded */
/**
* Like InstallablesCommand but the installables are not loaded.
*
* This is needed by `CmdRepl` which wants to load (and reload) the
* installables itself.
*/
struct RawInstallablesCommand : virtual Args, SourceExprCommand
{
RawInstallablesCommand();
@ -129,19 +169,22 @@ struct RawInstallablesCommand : virtual Args, SourceExprCommand
void run(ref<Store> store) override;
// FIXME make const after CmdRepl's override is fixed up
// FIXME make const after `CmdRepl`'s override is fixed up
virtual void applyDefaultInstallables(std::vector<std::string> & rawInstallables);
bool readFromStdIn = false;
std::vector<std::string> getFlakesForCompletion() override;
std::vector<FlakeRef> getFlakeRefsForCompletion() override;
private:
std::vector<std::string> rawInstallables;
};
/* A command that operates on a list of "installables", which can be
store paths, attribute paths, Nix expressions, etc. */
/**
* A command that operates on a list of "installables", which can be
* store paths, attribute paths, Nix expressions, etc.
*/
struct InstallablesCommand : RawInstallablesCommand
{
virtual void run(ref<Store> store, Installables && installables) = 0;
@ -149,7 +192,9 @@ struct InstallablesCommand : RawInstallablesCommand
void run(ref<Store> store, std::vector<std::string> && rawInstallables) override;
};
/* A command that operates on exactly one "installable" */
/**
* A command that operates on exactly one "installable".
*/
struct InstallableCommand : virtual Args, SourceExprCommand
{
InstallableCommand();
@ -158,10 +203,7 @@ struct InstallableCommand : virtual Args, SourceExprCommand
void run(ref<Store> store) override;
std::vector<std::string> getFlakesForCompletion() override
{
return {_installable};
}
std::vector<FlakeRef> getFlakeRefsForCompletion() override;
private:
@ -175,7 +217,12 @@ struct MixOperateOnOptions : virtual Args
MixOperateOnOptions();
};
/* A command that operates on zero or more store paths. */
/**
* A command that operates on zero or more extant store paths.
*
* If the argument the user passes is a some sort of recipe for a path
* not yet built, it must be built first.
*/
struct BuiltPathsCommand : InstallablesCommand, virtual MixOperateOnOptions
{
private:
@ -207,7 +254,9 @@ struct StorePathsCommand : public BuiltPathsCommand
void run(ref<Store> store, BuiltPaths && paths) override;
};
/* A command that operates on exactly one store path. */
/**
* A command that operates on exactly one store path.
*/
struct StorePathCommand : public StorePathsCommand
{
virtual void run(ref<Store> store, const StorePath & storePath) = 0;
@ -215,7 +264,9 @@ struct StorePathCommand : public StorePathsCommand
void run(ref<Store> store, StorePaths && storePaths) override;
};
/* A helper class for registering commands globally. */
/**
* A helper class for registering \ref Command commands globally.
*/
struct RegisterCommand
{
typedef std::map<std::vector<std::string>, std::function<ref<Command>()>> Commands;
@ -271,13 +322,24 @@ struct MixEnvironment : virtual Args {
MixEnvironment();
/* Modify global environ based on ignoreEnvironment, keep, and unset. It's expected that exec will be called before this class goes out of scope, otherwise environ will become invalid. */
/***
* Modify global environ based on `ignoreEnvironment`, `keep`, and
* `unset`. It's expected that exec will be called before this class
* goes out of scope, otherwise `environ` will become invalid.
*/
void setEnviron();
};
void completeFlakeRef(ref<Store> store, std::string_view prefix);
void completeFlakeInputPath(
AddCompletions & completions,
ref<EvalState> evalState,
const std::vector<FlakeRef> & flakeRefs,
std::string_view prefix);
void completeFlakeRef(AddCompletions & completions, ref<Store> store, std::string_view prefix);
void completeFlakeRefWithFragment(
AddCompletions & completions,
ref<EvalState> evalState,
flake::LockFlags lockFlags,
Strings attrPathPrefixes,

View file

@ -1,13 +1,14 @@
#include "eval-settings.hh"
#include "common-eval-args.hh"
#include "shared.hh"
#include "filetransfer.hh"
#include "util.hh"
#include "eval.hh"
#include "fetchers.hh"
#include "registry.hh"
#include "flake/flakeref.hh"
#include "store-api.hh"
#include "command.hh"
#include "tarball.hh"
namespace nix {
@ -105,7 +106,9 @@ MixEvalArgs::MixEvalArgs()
)",
.category = category,
.labels = {"path"},
.handler = {[&](std::string s) { searchPath.push_back(s); }}
.handler = {[&](std::string s) {
searchPath.elements.emplace_back(SearchPath::Elem::parse(s));
}}
});
addFlag({
@ -129,8 +132,8 @@ MixEvalArgs::MixEvalArgs()
if (to.subdir != "") extraAttrs["dir"] = to.subdir;
fetchers::overrideRegistry(from.input, to.input, extraAttrs);
}},
.completer = {[&](size_t, std::string_view prefix) {
completeFlakeRef(openStore(), prefix);
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
completeFlakeRef(completions, openStore(), prefix);
}}
});
@ -138,7 +141,7 @@ MixEvalArgs::MixEvalArgs()
.longName = "eval-store",
.description =
R"(
The [URL of the Nix store](@docroot@/command-ref/new-cli/nix3-help-stores.md#store-url-format)
The [URL of the Nix store](@docroot@/store/types/index.md#store-url-format)
to use for evaluation, i.e. to store derivations (`.drv` files) and inputs referenced by them.
)",
.category = category,
@ -161,18 +164,18 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
return res.finish();
}
SourcePath lookupFileArg(EvalState & state, std::string_view s)
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir)
{
if (EvalSettings::isPseudoUrl(s)) {
auto storePath = fetchers::downloadTarball(
state.store, EvalSettings::resolvePseudoUrl(s), "source", false).tree.storePath;
state.store, EvalSettings::resolvePseudoUrl(s), "source", false).storePath;
return state.rootPath(CanonPath(state.store->toRealPath(storePath)));
}
else if (hasPrefix(s, "flake:")) {
experimentalFeatureSettings.require(Xp::Flakes);
auto flakeRef = parseFlakeRef(std::string(s.substr(6)), {}, true, false);
auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first.storePath;
auto storePath = flakeRef.resolve(state.store).fetchTree(state.store).first;
return state.rootPath(CanonPath(state.store->toRealPath(storePath)));
}
@ -182,7 +185,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s)
}
else
return state.rootPath(CanonPath::fromCwd(s));
return state.rootPath(CanonPath(s, baseDir));
}
}

View file

@ -2,7 +2,9 @@
///@file
#include "args.hh"
#include "canon-path.hh"
#include "common-args.hh"
#include "search-path.hh"
namespace nix {
@ -19,7 +21,7 @@ struct MixEvalArgs : virtual Args, virtual MixRepair
Bindings * getAutoArgs(EvalState & state);
Strings searchPath;
SearchPath searchPath;
std::optional<std::string> evalStoreUrl;
@ -27,6 +29,6 @@ private:
std::map<std::string, std::string> autoArgs;
};
SourcePath lookupFileArg(EvalState & state, std::string_view s);
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir = CanonPath::fromCwd());
}

View file

@ -1,5 +1,6 @@
#include "util.hh"
#include "editor-for.hh"
#include "environment-variables.hh"
#include "source-path.hh"
namespace nix {

View file

@ -2,7 +2,7 @@
///@file
#include "types.hh"
#include "input-accessor.hh"
#include "source-path.hh"
namespace nix {

View file

@ -80,7 +80,7 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths()
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec {
return e;
},
}, extendedOutputsSpec.raw());
}, extendedOutputsSpec.raw);
auto [iter, didInsert] = byDrvPath.emplace(*drvPath, newOutputs);
@ -92,10 +92,11 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths()
for (auto & [drvPath, outputs] : byDrvPath)
res.push_back({
.path = DerivedPath::Built {
.drvPath = drvPath,
.drvPath = makeConstantStorePathRef(drvPath),
.outputs = outputs,
},
.info = make_ref<ExtraPathInfoValue>(ExtraPathInfoValue::Value {
.extendedOutputsSpec = outputs,
/* FIXME: reconsider backwards compatibility above
so we can fill in this info. */
}),
@ -114,7 +115,7 @@ InstallableAttrPath InstallableAttrPath::parse(
return {
state, cmd, v,
prefix == "." ? "" : std::string { prefix },
extendedOutputsSpec
std::move(extendedOutputsSpec),
};
}

View file

@ -4,7 +4,6 @@
#include "globals.hh"
#include "installable-value.hh"
#include "outputs-spec.hh"
#include "util.hh"
#include "command.hh"
#include "attr-path.hh"
#include "common-eval-args.hh"

View file

@ -18,14 +18,7 @@ DerivedPathsWithInfo InstallableDerivedPath::toDerivedPaths()
std::optional<StorePath> InstallableDerivedPath::getStorePath()
{
return std::visit(overloaded {
[&](const DerivedPath::Built & bfd) {
return bfd.drvPath;
},
[&](const DerivedPath::Opaque & bo) {
return bo.path;
},
}, derivedPath.raw());
return derivedPath.getBaseStorePath();
}
InstallableDerivedPath InstallableDerivedPath::parse(
@ -42,7 +35,7 @@ InstallableDerivedPath InstallableDerivedPath::parse(
// Remove this prior to stabilizing the new CLI.
if (storePath.isDerivation()) {
auto oldDerivedPath = DerivedPath::Built {
.drvPath = storePath,
.drvPath = makeConstantStorePathRef(storePath),
.outputs = OutputsSpec::All { },
};
warn(
@ -55,12 +48,14 @@ InstallableDerivedPath InstallableDerivedPath::parse(
},
// If the user did use ^, we just do exactly what is written.
[&](const ExtendedOutputsSpec::Explicit & outputSpec) -> DerivedPath {
auto drv = make_ref<SingleDerivedPath>(SingleDerivedPath::parse(*store, prefix));
drvRequireExperiment(*drv);
return DerivedPath::Built {
.drvPath = store->parseStorePath(prefix),
.drvPath = std::move(drv),
.outputs = outputSpec,
};
},
}, extendedOutputsSpec.raw());
}, extendedOutputsSpec.raw);
return InstallableDerivedPath {
store,
std::move(derivedPath),

View file

@ -28,6 +28,11 @@ namespace nix {
std::vector<std::string> InstallableFlake::getActualAttrPaths()
{
std::vector<std::string> res;
if (attrPaths.size() == 1 && attrPaths.front().starts_with(".")){
attrPaths.front().erase(0,1);
res.push_back(attrPaths.front());
return res;
}
for (auto & prefix : prefixes)
res.push_back(prefix + *attrPaths.begin());
@ -47,7 +52,7 @@ Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::Locked
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
assert(aOutputs);
state.forceValue(*aOutputs->value, [&]() { return aOutputs->value->determinePos(noPos); });
state.forceValue(*aOutputs->value, aOutputs->value->determinePos(noPos));
return aOutputs->value;
}
@ -118,7 +123,7 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
return {{
.path = DerivedPath::Built {
.drvPath = std::move(drvPath),
.drvPath = makeConstantStorePathRef(std::move(drvPath)),
.outputs = std::visit(overloaded {
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
std::set<std::string> outputsToInstall;
@ -141,7 +146,7 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
[&](const ExtendedOutputsSpec::Explicit & e) -> OutputsSpec {
return e;
},
}, extendedOutputsSpec.raw()),
}, extendedOutputsSpec.raw),
},
.info = make_ref<ExtraPathInfoFlake>(
ExtraPathInfoValue::Value {
@ -151,7 +156,7 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
},
ExtraPathInfoFlake::Flake {
.originalRef = flakeRef,
.resolvedRef = getLockedFlake()->flake.lockedRef,
.lockedRef = getLockedFlake()->flake.lockedRef,
}),
}};
}

View file

@ -19,7 +19,7 @@ struct ExtraPathInfoFlake : ExtraPathInfoValue
*/
struct Flake {
FlakeRef originalRef;
FlakeRef resolvedRef;
FlakeRef lockedRef;
};
Flake flake;

View file

@ -1,5 +1,6 @@
#include "installable-value.hh"
#include "eval-cache.hh"
#include "fetch-to-store.hh"
namespace nix {
@ -44,7 +45,7 @@ ref<InstallableValue> InstallableValue::require(ref<Installable> installable)
std::optional<DerivedPathWithInfo> InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
{
if (v.type() == nPath) {
auto storePath = v.path().fetchToStore(state->store);
auto storePath = fetchToStore(*state->store, v.path());
return {{
.path = DerivedPath::Opaque {
.path = std::move(storePath),
@ -55,7 +56,8 @@ std::optional<DerivedPathWithInfo> InstallableValue::trySinglePathToDerivedPaths
else if (v.type() == nString) {
return {{
.path = state->coerceToDerivedPath(pos, v, errorCtx),
.path = DerivedPath::fromSingle(
state->coerceToSingleDerivedPath(pos, v, errorCtx)),
.info = make_ref<ExtraPathInfo>(),
}};
}

View file

@ -4,6 +4,7 @@
#include "installable-attr-path.hh"
#include "installable-flake.hh"
#include "outputs-spec.hh"
#include "users.hh"
#include "util.hh"
#include "command.hh"
#include "attr-path.hh"
@ -11,6 +12,7 @@
#include "derivations.hh"
#include "eval-inline.hh"
#include "eval.hh"
#include "eval-settings.hh"
#include "get-drvs.hh"
#include "store-api.hh"
#include "shared.hh"
@ -27,15 +29,38 @@
namespace nix {
void completeFlakeInputPath(
AddCompletions & completions,
ref<EvalState> evalState,
const std::vector<FlakeRef> & flakeRefs,
std::string_view prefix)
{
for (auto & flakeRef : flakeRefs) {
auto flake = flake::getFlake(*evalState, flakeRef, true);
for (auto & input : flake.inputs)
if (hasPrefix(input.first, prefix))
completions.add(input.first);
}
}
MixFlakeOptions::MixFlakeOptions()
{
auto category = "Common flake-related options";
addFlag({
.longName = "recreate-lock-file",
.description = "Recreate the flake's lock file from scratch.",
.description = R"(
Recreate the flake's lock file from scratch.
> **DEPRECATED**
>
> Use [`nix flake update`](@docroot@/command-ref/new-cli/nix3-flake-update.md) instead.
)",
.category = category,
.handler = {&lockFlags.recreateLockFile, true}
.handler = {[&]() {
lockFlags.recreateLockFile = true;
warn("'--recreate-lock-file' is deprecated and will be removed in a future version; use 'nix flake update' instead.");
}}
});
addFlag({
@ -54,8 +79,13 @@ MixFlakeOptions::MixFlakeOptions()
addFlag({
.longName = "no-registries",
.description =
"Don't allow lookups in the flake registries. This option is deprecated; use `--no-use-registries`.",
.description = R"(
Don't allow lookups in the flake registries.
> **DEPRECATED**
>
> Use [`--no-use-registries`](#opt-no-use-registries) instead.
)",
.category = category,
.handler = {[&]() {
lockFlags.useRegistries = false;
@ -72,14 +102,21 @@ MixFlakeOptions::MixFlakeOptions()
addFlag({
.longName = "update-input",
.description = "Update a specific flake input (ignoring its previous entry in the lock file).",
.description = R"(
Update a specific flake input (ignoring its previous entry in the lock file).
> **DEPRECATED**
>
> Use [`nix flake update`](@docroot@/command-ref/new-cli/nix3-flake-update.md) instead.
)",
.category = category,
.labels = {"input-path"},
.handler = {[&](std::string s) {
warn("'--update-input' is a deprecated alias for 'flake update' and will be removed in a future version.");
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
}},
.completer = {[&](size_t, std::string_view prefix) {
needsFlakeInputCompletion = {std::string(prefix)};
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
}}
});
@ -92,13 +129,14 @@ MixFlakeOptions::MixFlakeOptions()
lockFlags.writeLockFile = false;
lockFlags.inputOverrides.insert_or_assign(
flake::parseInputPath(inputPath),
parseFlakeRef(flakeRef, absPath("."), true));
parseFlakeRef(flakeRef, absPath(getCommandBaseDir()), true));
}},
.completer = {[&](size_t n, std::string_view prefix) {
if (n == 0)
needsFlakeInputCompletion = {std::string(prefix)};
else if (n == 1)
completeFlakeRef(getEvalState()->store, prefix);
.completer = {[&](AddCompletions & completions, size_t n, std::string_view prefix) {
if (n == 0) {
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
} else if (n == 1) {
completeFlakeRef(completions, getEvalState()->store, prefix);
}
}}
});
@ -133,7 +171,7 @@ MixFlakeOptions::MixFlakeOptions()
auto evalState = getEvalState();
auto flake = flake::lockFlake(
*evalState,
parseFlakeRef(flakeRef, absPath(".")),
parseFlakeRef(flakeRef, absPath(getCommandBaseDir())),
{ .writeLockFile = false });
for (auto & [inputName, input] : flake.lockFile.root->inputs) {
auto input2 = flake.lockFile.findInput({inputName}); // resolve 'follows' nodes
@ -145,30 +183,12 @@ MixFlakeOptions::MixFlakeOptions()
}
}
}},
.completer = {[&](size_t, std::string_view prefix) {
completeFlakeRef(getEvalState()->store, prefix);
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
completeFlakeRef(completions, getEvalState()->store, prefix);
}}
});
}
void MixFlakeOptions::completeFlakeInput(std::string_view prefix)
{
auto evalState = getEvalState();
for (auto & flakeRefS : getFlakesForCompletion()) {
auto flakeRef = parseFlakeRefWithFragment(expandTilde(flakeRefS), absPath(".")).first;
auto flake = flake::getFlake(*evalState, flakeRef, true);
for (auto & input : flake.inputs)
if (hasPrefix(input.first, prefix))
completions->add(input.first);
}
}
void MixFlakeOptions::completionHook()
{
if (auto & prefix = needsFlakeInputCompletion)
completeFlakeInput(*prefix);
}
SourceExprCommand::SourceExprCommand()
{
addFlag({
@ -225,17 +245,25 @@ Strings SourceExprCommand::getDefaultFlakeAttrPathPrefixes()
};
}
void SourceExprCommand::completeInstallable(std::string_view prefix)
Args::CompleterClosure SourceExprCommand::getCompleteInstallable()
{
return [this](AddCompletions & completions, size_t, std::string_view prefix) {
completeInstallable(completions, prefix);
};
}
void SourceExprCommand::completeInstallable(AddCompletions & completions, std::string_view prefix)
{
try {
if (file) {
completionType = ctAttrs;
completions.setType(AddCompletions::Type::Attrs);
evalSettings.pureEval = false;
auto state = getEvalState();
Expr *e = state->parseExprFromFile(
resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file)))
);
auto e =
state->parseExprFromFile(
resolveExprPath(
lookupFileArg(*state, *file)));
Value root;
state->eval(e, root);
@ -264,14 +292,15 @@ void SourceExprCommand::completeInstallable(std::string_view prefix)
std::string name = state->symbols[i.name];
if (name.find(searchWord) == 0) {
if (prefix_ == "")
completions->add(name);
completions.add(name);
else
completions->add(prefix_ + "." + name);
completions.add(prefix_ + "." + name);
}
}
}
} else {
completeFlakeRefWithFragment(
completions,
getEvalState(),
lockFlags,
getDefaultFlakeAttrPathPrefixes(),
@ -284,6 +313,7 @@ void SourceExprCommand::completeInstallable(std::string_view prefix)
}
void completeFlakeRefWithFragment(
AddCompletions & completions,
ref<EvalState> evalState,
flake::LockFlags lockFlags,
Strings attrPathPrefixes,
@ -295,12 +325,19 @@ void completeFlakeRefWithFragment(
try {
auto hash = prefix.find('#');
if (hash == std::string::npos) {
completeFlakeRef(evalState->store, prefix);
completeFlakeRef(completions, evalState->store, prefix);
} else {
completionType = ctAttrs;
completions.setType(AddCompletions::Type::Attrs);
auto fragment = prefix.substr(hash + 1);
std::string prefixRoot = "";
if (fragment.starts_with(".")){
fragment = fragment.substr(1);
prefixRoot = ".";
}
auto flakeRefS = std::string(prefix.substr(0, hash));
// TODO: ideally this would use the command base directory instead of assuming ".".
auto flakeRef = parseFlakeRef(expandTilde(flakeRefS), absPath("."));
auto evalCache = openEvalCache(*evalState,
@ -308,6 +345,9 @@ void completeFlakeRefWithFragment(
auto root = evalCache->getRoot();
if (prefixRoot == "."){
attrPathPrefixes.clear();
}
/* Complete 'fragment' relative to all the
attrpath prefixes as well as the root of the
flake. */
@ -332,7 +372,7 @@ void completeFlakeRefWithFragment(
auto attrPath2 = (*attr)->getAttrPath(attr2);
/* Strip the attrpath prefix. */
attrPath2.erase(attrPath2.begin(), attrPath2.begin() + attrPathPrefix.size());
completions->add(flakeRefS + "#" + concatStringsSep(".", evalState->symbols.resolve(attrPath2)));
completions.add(flakeRefS + "#" + prefixRoot + concatStringsSep(".", evalState->symbols.resolve(attrPath2)));
}
}
}
@ -343,7 +383,7 @@ void completeFlakeRefWithFragment(
for (auto & attrPath : defaultFlakeAttrPaths) {
auto attr = root->findAlongAttrPath(parseAttrPath(*evalState, attrPath));
if (!attr) continue;
completions->add(flakeRefS + "#");
completions.add(flakeRefS + "#" + prefixRoot);
}
}
}
@ -352,15 +392,15 @@ void completeFlakeRefWithFragment(
}
}
void completeFlakeRef(ref<Store> store, std::string_view prefix)
void completeFlakeRef(AddCompletions & completions, ref<Store> store, std::string_view prefix)
{
if (!experimentalFeatureSettings.isEnabled(Xp::Flakes))
return;
if (prefix == "")
completions->add(".");
completions.add(".");
completeDir(0, prefix);
Args::completeDir(completions, 0, prefix);
/* Look for registry entries that match the prefix. */
for (auto & registry : fetchers::getRegistries(store)) {
@ -369,10 +409,10 @@ void completeFlakeRef(ref<Store> store, std::string_view prefix)
if (!hasPrefix(prefix, "flake:") && hasPrefix(from, "flake:")) {
std::string from2(from, 6);
if (hasPrefix(from2, prefix))
completions->add(from2);
completions.add(from2);
} else {
if (hasPrefix(from, prefix))
completions->add(from);
completions.add(from);
}
}
}
@ -446,10 +486,12 @@ Installables SourceExprCommand::parseInstallables(
auto e = state->parseStdin();
state->eval(e, *vFile);
}
else if (file)
state->evalFile(lookupFileArg(*state, *file), *vFile);
else if (file) {
state->evalFile(lookupFileArg(*state, *file, CanonPath::fromCwd(getCommandBaseDir())), *vFile);
}
else {
auto e = state->parseExprFromString(*expr, state->rootPath(CanonPath::fromCwd()));
CanonPath dir(CanonPath::fromCwd(getCommandBaseDir()));
auto e = state->parseExprFromString(*expr, state->rootPath(dir));
state->eval(e, *vFile);
}
@ -458,7 +500,7 @@ Installables SourceExprCommand::parseInstallables(
result.push_back(
make_ref<InstallableAttrPath>(
InstallableAttrPath::parse(
state, *this, vFile, prefix, extendedOutputsSpec)));
state, *this, vFile, std::move(prefix), std::move(extendedOutputsSpec))));
}
} else {
@ -474,7 +516,7 @@ Installables SourceExprCommand::parseInstallables(
if (prefix.find('/') != std::string::npos) {
try {
result.push_back(make_ref<InstallableDerivedPath>(
InstallableDerivedPath::parse(store, prefix, extendedOutputsSpec)));
InstallableDerivedPath::parse(store, prefix, extendedOutputsSpec.raw)));
continue;
} catch (BadStorePath &) {
} catch (...) {
@ -484,13 +526,13 @@ Installables SourceExprCommand::parseInstallables(
}
try {
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, absPath("."));
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, absPath(getCommandBaseDir()));
result.push_back(make_ref<InstallableFlake>(
this,
getEvalState(),
std::move(flakeRef),
fragment,
extendedOutputsSpec,
std::move(extendedOutputsSpec),
getDefaultFlakeAttrPaths(),
getDefaultFlakeAttrPathPrefixes(),
lockFlags));
@ -514,6 +556,30 @@ ref<Installable> SourceExprCommand::parseInstallable(
return installables.front();
}
static SingleBuiltPath getBuiltPath(ref<Store> evalStore, ref<Store> store, const SingleDerivedPath & b)
{
return std::visit(
overloaded{
[&](const SingleDerivedPath::Opaque & bo) -> SingleBuiltPath {
return SingleBuiltPath::Opaque { bo.path };
},
[&](const SingleDerivedPath::Built & bfd) -> SingleBuiltPath {
auto drvPath = getBuiltPath(evalStore, store, *bfd.drvPath);
// Resolving this instead of `bfd` will yield the same result, but avoid duplicative work.
SingleDerivedPath::Built truncatedBfd {
.drvPath = makeConstantStorePathRef(drvPath.outPath()),
.output = bfd.output,
};
auto outputPath = resolveDerivedPath(*store, truncatedBfd, &*evalStore);
return SingleBuiltPath::Built {
.drvPath = make_ref<SingleBuiltPath>(std::move(drvPath)),
.output = { bfd.output, outputPath },
};
},
},
b.raw());
}
std::vector<BuiltPathWithResult> Installable::build(
ref<Store> evalStore,
ref<Store> store,
@ -567,7 +633,10 @@ std::vector<std::pair<ref<Installable>, BuiltPathWithResult>> Installable::build
[&](const DerivedPath::Built & bfd) {
auto outputs = resolveDerivedPath(*store, bfd, &*evalStore);
res.push_back({aux.installable, {
.path = BuiltPath::Built { bfd.drvPath, outputs },
.path = BuiltPath::Built {
.drvPath = make_ref<SingleBuiltPath>(getBuiltPath(evalStore, store, *bfd.drvPath)),
.outputs = outputs,
},
.info = aux.info}});
},
[&](const DerivedPath::Opaque & bo) {
@ -596,7 +665,10 @@ std::vector<std::pair<ref<Installable>, BuiltPathWithResult>> Installable::build
for (auto & [outputName, realisation] : buildResult.builtOutputs)
outputs.emplace(outputName, realisation.outPath);
res.push_back({aux.installable, {
.path = BuiltPath::Built { bfd.drvPath, outputs },
.path = BuiltPath::Built {
.drvPath = make_ref<SingleBuiltPath>(getBuiltPath(evalStore, store, *bfd.drvPath)),
.outputs = outputs,
},
.info = aux.info,
.result = buildResult}});
},
@ -638,12 +710,12 @@ BuiltPaths Installable::toBuiltPaths(
BuiltPaths res;
for (auto & drvPath : Installable::toDerivations(store, installables, true))
res.push_back(BuiltPath::Opaque{drvPath});
res.emplace_back(BuiltPath::Opaque{drvPath});
return res;
}
}
StorePathSet Installable::toStorePaths(
StorePathSet Installable::toStorePathSet(
ref<Store> evalStore,
ref<Store> store,
Realise mode, OperateOn operateOn,
@ -657,13 +729,27 @@ StorePathSet Installable::toStorePaths(
return outPaths;
}
StorePaths Installable::toStorePaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode, OperateOn operateOn,
const Installables & installables)
{
StorePaths outPaths;
for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) {
auto thisOutPaths = path.outPaths();
outPaths.insert(outPaths.end(), thisOutPaths.begin(), thisOutPaths.end());
}
return outPaths;
}
StorePath Installable::toStorePath(
ref<Store> evalStore,
ref<Store> store,
Realise mode, OperateOn operateOn,
ref<Installable> installable)
{
auto paths = toStorePaths(evalStore, store, mode, operateOn, {installable});
auto paths = toStorePathSet(evalStore, store, mode, operateOn, {installable});
if (paths.size() != 1)
throw Error("argument '%s' should evaluate to one store path", installable->what());
@ -690,7 +776,7 @@ StorePathSet Installable::toDerivations(
: throw Error("argument '%s' did not evaluate to a derivation", i->what()));
},
[&](const DerivedPath::Built & bfd) {
drvPaths.insert(bfd.drvPath);
drvPaths.insert(resolveDerivedPath(*store, *bfd.drvPath));
},
}, b.path.raw());
@ -708,9 +794,7 @@ RawInstallablesCommand::RawInstallablesCommand()
expectArgs({
.label = "installables",
.handler = {&rawInstallables},
.completer = {[&](size_t, std::string_view prefix) {
completeInstallable(prefix);
}}
.completer = getCompleteInstallable(),
});
}
@ -723,6 +807,17 @@ void RawInstallablesCommand::applyDefaultInstallables(std::vector<std::string> &
}
}
std::vector<FlakeRef> RawInstallablesCommand::getFlakeRefsForCompletion()
{
applyDefaultInstallables(rawInstallables);
std::vector<FlakeRef> res;
for (auto i : rawInstallables)
res.push_back(parseFlakeRefWithFragment(
expandTilde(i),
absPath(getCommandBaseDir())).first);
return res;
}
void RawInstallablesCommand::run(ref<Store> store)
{
if (readFromStdIn && !isatty(STDIN_FILENO)) {
@ -736,10 +831,13 @@ void RawInstallablesCommand::run(ref<Store> store)
run(store, std::move(rawInstallables));
}
std::vector<std::string> RawInstallablesCommand::getFlakesForCompletion()
std::vector<FlakeRef> InstallableCommand::getFlakeRefsForCompletion()
{
applyDefaultInstallables(rawInstallables);
return rawInstallables;
return {
parseFlakeRefWithFragment(
expandTilde(_installable),
absPath(getCommandBaseDir())).first
};
}
void InstallablesCommand::run(ref<Store> store, std::vector<std::string> && rawInstallables)
@ -755,9 +853,7 @@ InstallableCommand::InstallableCommand()
.label = "installable",
.optional = true,
.handler = {&_installable},
.completer = {[&](size_t, std::string_view prefix) {
completeInstallable(prefix);
}}
.completer = getCompleteInstallable(),
});
}

View file

@ -1,10 +1,10 @@
#pragma once
///@file
#include "util.hh"
#include "path.hh"
#include "outputs-spec.hh"
#include "derived-path.hh"
#include "built-path.hh"
#include "store-api.hh"
#include "build-result.hh"
@ -165,7 +165,14 @@ struct Installable
const Installables & installables,
BuildMode bMode = bmNormal);
static std::set<StorePath> toStorePaths(
static std::set<StorePath> toStorePathSet(
ref<Store> evalStore,
ref<Store> store,
Realise mode,
OperateOn operateOn,
const Installables & installables);
static std::vector<StorePath> toStorePaths(
ref<Store> evalStore,
ref<Store> store,
Realise mode,

View file

@ -8,8 +8,8 @@ libcmd_SOURCES := $(wildcard $(d)/*.cc)
libcmd_CXXFLAGS += -I src/libutil -I src/libstore -I src/libexpr -I src/libmain -I src/libfetchers
libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) -pthread
libcmd_LDFLAGS = $(EDITLINE_LIBS) $(LOWDOWN_LIBS) $(THREAD_LDFLAGS)
libcmd_LIBS = libstore libutil libexpr libmain libfetchers
$(eval $(call install-file-in, $(d)/nix-cmd.pc, $(libdir)/pkgconfig, 0644))
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-cmd.pc, $(libdir)/pkgconfig, 0644))

View file

@ -1,14 +1,18 @@
#include "markdown.hh"
#include "util.hh"
#include "finally.hh"
#include "terminal.hh"
#include <sys/queue.h>
#if HAVE_LOWDOWN
#include <lowdown.h>
#endif
namespace nix {
std::string renderMarkdownToTerminal(std::string_view markdown)
{
#if HAVE_LOWDOWN
int windowWidth = getWindowSize().second;
struct lowdown_opts opts {
@ -47,6 +51,9 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
throw Error("allocation error while rendering Markdown");
return filterANSIEscapes(std::string(buf->data, buf->size), !shouldANSI());
#else
return std::string(markdown);
#endif
}
}

View file

@ -5,7 +5,7 @@
#include <setjmp.h>
#ifdef READLINE
#ifdef USE_READLINE
#include <readline/history.h>
#include <readline/readline.h>
#else
@ -22,10 +22,12 @@ extern "C" {
#include "repl.hh"
#include "ansicolor.hh"
#include "signals.hh"
#include "shared.hh"
#include "eval.hh"
#include "eval-cache.hh"
#include "eval-inline.hh"
#include "eval-settings.hh"
#include "attr-path.hh"
#include "store-api.hh"
#include "log-store.hh"
@ -35,11 +37,12 @@ extern "C" {
#include "globals.hh"
#include "flake/flake.hh"
#include "flake/lockfile.hh"
#include "users.hh"
#include "terminal.hh"
#include "editor-for.hh"
#include "finally.hh"
#include "markdown.hh"
#include "local-fs-store.hh"
#include "progress-bar.hh"
#include "print.hh"
#if HAVE_BOEHMGC
@ -68,7 +71,7 @@ struct NixRepl
const Path historyFile;
NixRepl(const Strings & searchPath, nix::ref<Store> store,ref<EvalState> state,
NixRepl(const SearchPath & searchPath, nix::ref<Store> store,ref<EvalState> state,
std::function<AnnotatedValues()> getValues);
virtual ~NixRepl();
@ -104,12 +107,12 @@ std::string removeWhitespace(std::string s)
}
NixRepl::NixRepl(const Strings & searchPath, nix::ref<Store> store, ref<EvalState> state,
NixRepl::NixRepl(const SearchPath & searchPath, nix::ref<Store> store, ref<EvalState> state,
std::function<NixRepl::AnnotatedValues()> getValues)
: AbstractNixRepl(state)
, debugTraceIndex(0)
, getValues(getValues)
, staticEnv(new StaticEnv(false, state->staticBaseEnv.get()))
, staticEnv(new StaticEnv(nullptr, state->staticBaseEnv.get()))
, historyFile(getDataDir() + "/nix/repl-history")
{
}
@ -218,7 +221,7 @@ static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positi
// prefer direct pos, but if noPos then try the expr.
auto pos = dt.pos
? dt.pos
: static_cast<std::shared_ptr<AbstractPos>>(positions[dt.expr.getPos() ? dt.expr.getPos() : noPos]);
: positions[dt.expr.getPos() ? dt.expr.getPos() : noPos];
if (pos) {
out << pos;
@ -246,25 +249,23 @@ void NixRepl::mainLoop()
} catch (SysError & e) {
logWarning(e.info());
}
#ifndef READLINE
#ifndef USE_READLINE
el_hist_size = 1000;
#endif
read_history(historyFile.c_str());
auto oldRepl = curRepl;
curRepl = this;
Finally restoreRepl([&] { curRepl = oldRepl; });
#ifndef READLINE
#ifndef USE_READLINE
rl_set_complete_func(completionCallback);
rl_set_list_possib_func(listPossibleCallback);
#endif
/* Stop the progress bar because it interferes with the display of
the repl. */
stopProgressBar();
std::string input;
while (true) {
// Hide the progress bar while waiting for user input, so that it won't interfere.
logger->pause();
// When continuing input from previous lines, don't print a prompt, just align to the same
// number of chars as the prompt.
if (!getLine(input, input.empty() ? "nix-repl> " : " ")) {
@ -274,6 +275,7 @@ void NixRepl::mainLoop()
logger->cout("");
break;
}
logger->resume();
try {
if (!removeWhitespace(input).empty() && !processLine(input)) return;
} catch (ParseError & e) {
@ -487,35 +489,40 @@ bool NixRepl::processLine(std::string line)
std::cout
<< "The following commands are available:\n"
<< "\n"
<< " <expr> Evaluate and print expression\n"
<< " <x> = <expr> Bind expression to variable\n"
<< " :a <expr> Add attributes from resulting set to scope\n"
<< " :b <expr> Build a derivation\n"
<< " :bl <expr> Build a derivation, creating GC roots in the working directory\n"
<< " :e <expr> Open package or function in $EDITOR\n"
<< " :i <expr> Build derivation, then install result into current profile\n"
<< " :l <path> Load Nix expression and add it to scope\n"
<< " :lf <ref> Load Nix flake and add it to scope\n"
<< " :p <expr> Evaluate and print expression recursively\n"
<< " :q Exit nix-repl\n"
<< " :r Reload all files\n"
<< " :sh <expr> Build dependencies of derivation, then start nix-shell\n"
<< " :t <expr> Describe result of evaluation\n"
<< " :u <expr> Build derivation, then start nix-shell\n"
<< " :doc <expr> Show documentation of a builtin function\n"
<< " :log <expr> Show logs for a derivation\n"
<< " :te [bool] Enable, disable or toggle showing traces for errors\n"
<< " <expr> Evaluate and print expression\n"
<< " <x> = <expr> Bind expression to variable\n"
<< " :a, :add <expr> Add attributes from resulting set to scope\n"
<< " :b <expr> Build a derivation\n"
<< " :bl <expr> Build a derivation, creating GC roots in the\n"
<< " working directory\n"
<< " :e, :edit <expr> Open package or function in $EDITOR\n"
<< " :i <expr> Build derivation, then install result into\n"
<< " current profile\n"
<< " :l, :load <path> Load Nix expression and add it to scope\n"
<< " :lf, :load-flake <ref> Load Nix flake and add it to scope\n"
<< " :p, :print <expr> Evaluate and print expression recursively\n"
<< " :q, :quit Exit nix-repl\n"
<< " :r, :reload Reload all files\n"
<< " :sh <expr> Build dependencies of derivation, then start\n"
<< " nix-shell\n"
<< " :t <expr> Describe result of evaluation\n"
<< " :u <expr> Build derivation, then start nix-shell\n"
<< " :doc <expr> Show documentation of a builtin function\n"
<< " :log <expr> Show logs for a derivation\n"
<< " :te, :trace-enable [bool] Enable, disable or toggle showing traces for\n"
<< " errors\n"
<< " :?, :help Brings up this help menu\n"
;
if (state->debugRepl) {
std::cout
<< "\n"
<< " Debug mode commands\n"
<< " :env Show env stack\n"
<< " :bt Show trace stack\n"
<< " :st Show current trace\n"
<< " :st <idx> Change to another trace in the stack\n"
<< " :c Go until end of program, exception, or builtins.break\n"
<< " :s Go one step\n"
<< " :env Show env stack\n"
<< " :bt, :backtrace Show trace stack\n"
<< " :st Show current trace\n"
<< " :st <idx> Change to another trace in the stack\n"
<< " :c, :continue Go until end of program, exception, or builtins.break\n"
<< " :s, :step Go one step\n"
;
}
@ -647,7 +654,7 @@ bool NixRepl::processLine(std::string line)
if (command == ":b" || command == ":bl") {
state->store->buildPaths({
DerivedPath::Built {
.drvPath = drvPath,
.drvPath = makeConstantStorePathRef(drvPath),
.outputs = OutputsSpec::All { },
},
});
@ -881,7 +888,7 @@ void NixRepl::evalString(std::string s, Value & v)
{
Expr * e = parseString(s);
e->eval(*state, *env, v);
state->forceValue(v, [&]() { return v.determinePos(noPos); });
state->forceValue(v, v.determinePos(noPos));
}
@ -900,7 +907,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
str.flush();
checkInterrupt();
state->forceValue(v, [&]() { return v.determinePos(noPos); });
state->forceValue(v, v.determinePos(noPos));
switch (v.type()) {
@ -916,7 +923,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
case nString:
str << ANSI_WARNING;
printLiteralString(str, v.string.s);
printLiteralString(str, v.string_view());
str << ANSI_NORMAL;
break;
@ -1024,7 +1031,7 @@ std::ostream & NixRepl::printValue(std::ostream & str, Value & v, unsigned int m
std::unique_ptr<AbstractNixRepl> AbstractNixRepl::create(
const Strings & searchPath, nix::ref<Store> store, ref<EvalState> state,
const SearchPath & searchPath, nix::ref<Store> store, ref<EvalState> state,
std::function<AnnotatedValues()> getValues)
{
return std::make_unique<NixRepl>(
@ -1044,7 +1051,7 @@ void AbstractNixRepl::runSimple(
NixRepl::AnnotatedValues values;
return values;
};
const Strings & searchPath = {};
SearchPath searchPath = {};
auto repl = std::make_unique<NixRepl>(
searchPath,
openStore(),

View file

@ -25,7 +25,7 @@ struct AbstractNixRepl
typedef std::vector<std::pair<Value*,std::string>> AnnotatedValues;
static std::unique_ptr<AbstractNixRepl> create(
const Strings & searchPath, nix::ref<Store> store, ref<EvalState> state,
const SearchPath & searchPath, nix::ref<Store> store, ref<EvalState> state,
std::function<AnnotatedValues()> getValues);
static void runSimple(

View file

@ -1,6 +1,5 @@
#include "attr-path.hh"
#include "eval-inline.hh"
#include "util.hh"
namespace nix {
@ -132,7 +131,7 @@ std::pair<SourcePath, uint32_t> findPackageFilename(EvalState & state, Value & v
if (colon == std::string::npos) fail();
std::string filename(fn, 0, colon);
auto lineno = std::stoi(std::string(fn, colon + 1, std::string::npos));
return {CanonPath(fn.substr(0, colon)), lineno};
return {SourcePath{path.accessor, CanonPath(fn.substr(0, colon))}, lineno};
} catch (std::invalid_argument & e) {
fail();
abort();

View file

@ -1,3 +1,4 @@
#include "users.hh"
#include "eval-cache.hh"
#include "sqlite.hh"
#include "eval.hh"
@ -21,7 +22,7 @@ struct AttrDb
{
std::atomic_bool failed{false};
const Store & cfg;
const StoreDirConfig & cfg;
struct State
{
@ -38,7 +39,7 @@ struct AttrDb
SymbolTable & symbols;
AttrDb(
const Store & cfg,
const StoreDirConfig & cfg,
const Hash & fingerprint,
SymbolTable & symbols)
: cfg(cfg)
@ -50,7 +51,7 @@ struct AttrDb
Path cacheDir = getCacheDir() + "/nix/eval-cache-v5";
createDirs(cacheDir);
Path dbPath = cacheDir + "/" + fingerprint.to_string(Base16, false) + ".sqlite";
Path dbPath = cacheDir + "/" + fingerprint.to_string(HashFormat::Base16, false) + ".sqlite";
state->db = SQLite(dbPath);
state->db.isCache();
@ -322,7 +323,7 @@ struct AttrDb
};
static std::shared_ptr<AttrDb> makeAttrDb(
const Store & cfg,
const StoreDirConfig & cfg,
const Hash & fingerprint,
SymbolTable & symbols)
{
@ -440,8 +441,8 @@ Value & AttrCursor::forceValue()
if (root->db && (!cachedValue || std::get_if<placeholder_t>(&cachedValue->second))) {
if (v.type() == nString)
cachedValue = {root->db->setString(getKey(), v.string.s, v.string.context),
string_t{v.string.s, {}}};
cachedValue = {root->db->setString(getKey(), v.c_str(), v.context()),
string_t{v.c_str(), {}}};
else if (v.type() == nPath) {
auto path = v.path().path;
cachedValue = {root->db->setString(getKey(), path.abs()), string_t{path.abs(), {}}};
@ -582,7 +583,7 @@ std::string AttrCursor::getString()
if (v.type() != nString && v.type() != nPath)
root->state.error("'%s' is not a string but %s", getAttrPathStr()).debugThrow<TypeError>();
return v.type() == nString ? v.string.s : v.path().to_string();
return v.type() == nString ? v.c_str() : v.path().to_string();
}
string_t AttrCursor::getStringWithContext()
@ -599,12 +600,12 @@ string_t AttrCursor::getStringWithContext()
return d.drvPath;
},
[&](const NixStringContextElem::Built & b) -> const StorePath & {
return b.drvPath;
return b.drvPath->getBaseStorePath();
},
[&](const NixStringContextElem::Opaque & o) -> const StorePath & {
return o.path;
},
}, c.raw());
}, c.raw);
if (!root->state.store->isValidPath(path)) {
valid = false;
break;
@ -624,7 +625,7 @@ string_t AttrCursor::getStringWithContext()
if (v.type() == nString) {
NixStringContext context;
copyContext(v, context);
return {v.string.s, std::move(context)};
return {v.c_str(), std::move(context)};
}
else if (v.type() == nPath)
return {v.path().to_string(), {}};

View file

@ -73,8 +73,6 @@ Env & EvalState::allocEnv(size_t size)
#endif
env = (Env *) allocBytes(sizeof(Env) + size * sizeof(Value *));
env->type = Env::Plain;
/* We assume that env->values has been cleared by the allocator; maybeThunk() and lookupVar fromWith expect this. */
return *env;
@ -83,13 +81,6 @@ Env & EvalState::allocEnv(size_t size)
[[gnu::always_inline]]
void EvalState::forceValue(Value & v, const PosIdx pos)
{
forceValue(v, [&]() { return pos; });
}
template<typename Callable>
void EvalState::forceValue(Value & v, Callable getPos)
{
if (v.isThunk()) {
Env * env = v.thunk.env;
@ -100,13 +91,12 @@ void EvalState::forceValue(Value & v, Callable getPos)
expr->eval(*this, *env, v);
} catch (...) {
v.mkThunk(env, expr);
tryFixupBlackHolePos(v, pos);
throw;
}
}
else if (v.isApp())
callFunction(*v.app.left, *v.app.right, v, noPos);
else if (v.isBlackhole())
error("infinite recursion encountered").atPos(getPos()).template debugThrow<EvalError>();
callFunction(*v.app.left, *v.app.right, v, pos);
}
@ -121,9 +111,9 @@ template <typename Callable>
[[gnu::always_inline]]
inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view errorCtx)
{
forceValue(v, noPos);
PosIdx pos = getPos();
forceValue(v, pos);
if (v.type() != nAttrs) {
PosIdx pos = getPos();
error("value is %1% while a set was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
}
}
@ -132,7 +122,7 @@ inline void EvalState::forceAttrs(Value & v, Callable getPos, std::string_view e
[[gnu::always_inline]]
inline void EvalState::forceList(Value & v, const PosIdx pos, std::string_view errorCtx)
{
forceValue(v, noPos);
forceValue(v, pos);
if (!v.isList()) {
error("value is %1% while a list was expected", showType(v)).withTrace(pos, errorCtx).debugThrow<TypeError>();
}

View file

@ -0,0 +1,109 @@
#include "users.hh"
#include "globals.hh"
#include "profiles.hh"
#include "eval.hh"
#include "eval-settings.hh"
namespace nix {
/* Very hacky way to parse $NIX_PATH, which is colon-separated, but
can contain URLs (e.g. "nixpkgs=https://bla...:foo=https://"). */
static Strings parseNixPath(const std::string & s)
{
Strings res;
auto p = s.begin();
while (p != s.end()) {
auto start = p;
auto start2 = p;
while (p != s.end() && *p != ':') {
if (*p == '=') start2 = p + 1;
++p;
}
if (p == s.end()) {
if (p != start) res.push_back(std::string(start, p));
break;
}
if (*p == ':') {
auto prefix = std::string(start2, s.end());
if (EvalSettings::isPseudoUrl(prefix) || hasPrefix(prefix, "flake:")) {
++p;
while (p != s.end() && *p != ':') ++p;
}
res.push_back(std::string(start, p));
if (p == s.end()) break;
}
++p;
}
return res;
}
EvalSettings::EvalSettings()
{
auto var = getEnv("NIX_PATH");
if (var) nixPath = parseNixPath(*var);
}
Strings EvalSettings::getDefaultNixPath()
{
Strings res;
auto add = [&](const Path & p, const std::string & s = std::string()) {
if (pathAccessible(p)) {
if (s.empty()) {
res.push_back(p);
} else {
res.push_back(s + "=" + p);
}
}
};
if (!evalSettings.restrictEval && !evalSettings.pureEval) {
add(getNixDefExpr() + "/channels");
add(rootChannelsDir() + "/nixpkgs", "nixpkgs");
add(rootChannelsDir());
}
return res;
}
bool EvalSettings::isPseudoUrl(std::string_view s)
{
if (s.compare(0, 8, "channel:") == 0) return true;
size_t pos = s.find("://");
if (pos == std::string::npos) return false;
std::string scheme(s, 0, pos);
return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3" || scheme == "ssh";
}
std::string EvalSettings::resolvePseudoUrl(std::string_view url)
{
if (hasPrefix(url, "channel:"))
return "https://nixos.org/channels/" + std::string(url.substr(8)) + "/nixexprs.tar.xz";
else
return std::string(url);
}
const std::string & EvalSettings::getCurrentSystem()
{
const auto & evalSystem = currentSystem.get();
return evalSystem != "" ? evalSystem : settings.thisSystem.get();
}
EvalSettings evalSettings;
static GlobalConfig::Register rEvalSettings(&evalSettings);
Path getNixDefExpr()
{
return settings.useXDGBaseDirectories
? getStateDir() + "/nix/defexpr"
: getHome() + "/.nix-defexpr";
}
}

View file

@ -0,0 +1,139 @@
#pragma once
///@file
#include "config.hh"
namespace nix {
struct EvalSettings : Config
{
EvalSettings();
static Strings getDefaultNixPath();
static bool isPseudoUrl(std::string_view s);
static std::string resolvePseudoUrl(std::string_view url);
Setting<bool> enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation",
"Whether builtin functions that allow executing native code should be enabled."};
Setting<Strings> nixPath{
this, getDefaultNixPath(), "nix-path",
R"(
List of directories to be searched for `<...>` file references
In particular, outside of [pure evaluation mode](#conf-pure-eval), this determines the value of
[`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath).
)"};
Setting<std::string> currentSystem{
this, "", "eval-system",
R"(
This option defines
[`builtins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem)
in the Nix language if it is set as a non-empty string.
Otherwise, if it is defined as the empty string (the default), the value of the
[`system` ](#conf-system)
configuration setting is used instead.
Unlike `system`, this setting does not change what kind of derivations can be built locally.
This is useful for evaluating Nix code on one system to produce derivations to be built on another type of system.
)"};
/**
* Implements the `eval-system` vs `system` defaulting logic
* described for `eval-system`.
*/
const std::string & getCurrentSystem();
Setting<bool> restrictEval{
this, false, "restrict-eval",
R"(
If set to `true`, the Nix evaluator will not allow access to any
files outside of
[`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath),
or to URIs outside of
[`allowed-uris`](@docroot@/command-ref/conf-file.md#conf-allowed-uris).
Also the default value for [`nix-path`](#conf-nix-path) is ignored, such that only explicitly set search path entries are taken into account.
)"};
Setting<bool> pureEval{this, false, "pure-eval",
R"(
Pure evaluation mode ensures that the result of Nix expressions is fully determined by explicitly declared inputs, and not influenced by external state:
- Restrict file system and network access to files specified by cryptographic hash
- Disable impure constants:
- [`bultins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem)
- [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime)
- [`builtins.nixPath`](@docroot@/language/builtin-constants.md#builtins-nixPath)
)"
};
Setting<bool> enableImportFromDerivation{
this, true, "allow-import-from-derivation",
R"(
By default, Nix allows [Import from Derivation](@docroot@/language/import-from-derivation.md).
With this option set to `false`, Nix will throw an error when evaluating an expression that uses this feature,
even when the required store object is readily available.
This ensures that evaluation will not require any builds to take place,
regardless of the state of the store.
)"};
Setting<Strings> allowedUris{this, {}, "allowed-uris",
R"(
A list of URI prefixes to which access is allowed in restricted
evaluation mode. For example, when set to
`https://github.com/NixOS`, builtin functions such as `fetchGit` are
allowed to access `https://github.com/NixOS/patchelf.git`.
Access is granted when
- the URI is equal to the prefix,
- or the URI is a subpath of the prefix,
- or the prefix is a URI scheme ended by a colon `:` and the URI has the same scheme.
)"};
Setting<bool> traceFunctionCalls{this, false, "trace-function-calls",
R"(
If set to `true`, the Nix evaluator will trace every function call.
Nix will print a log message at the "vomit" level for every function
entrance and function exit.
function-trace entered undefined position at 1565795816999559622
function-trace exited undefined position at 1565795816999581277
function-trace entered /nix/store/.../example.nix:226:41 at 1565795253249935150
function-trace exited /nix/store/.../example.nix:226:41 at 1565795253249941684
The `undefined position` means the function call is a builtin.
Use the `contrib/stack-collapse.py` script distributed with the Nix
source code to convert the trace logs in to a format suitable for
`flamegraph.pl`.
)"};
Setting<bool> useEvalCache{this, true, "eval-cache",
"Whether to use the flake evaluation cache."};
Setting<bool> ignoreExceptionsDuringTry{this, false, "ignore-try",
R"(
If set to true, ignore exceptions inside 'tryEval' calls when evaluating nix expressions in
debug mode (using the --debugger flag). By default the debugger will pause on all exceptions.
)"};
Setting<bool> traceVerbose{this, false, "trace-verbose",
"Whether `builtins.traceVerbose` should trace its first argument when evaluated."};
Setting<unsigned int> maxCallDepth{this, 10000, "max-call-depth",
"The maximum function call depth to allow before erroring."};
};
extern EvalSettings evalSettings;
/**
* Conventionally part of the default nix path in impure mode.
*/
Path getNixDefExpr();
}

File diff suppressed because it is too large Load diff

View file

@ -9,6 +9,7 @@
#include "config.hh"
#include "experimental-features.hh"
#include "input-accessor.hh"
#include "search-path.hh"
#include <map>
#include <optional>
@ -17,23 +18,93 @@
namespace nix {
/**
* We put a limit on primop arity because it lets us use a fixed size array on
* the stack. 8 is already an impractical number of arguments. Use an attrset
* argument for such overly complicated functions.
*/
constexpr size_t maxPrimOpArity = 8;
class Store;
class EvalState;
class StorePath;
struct DerivedPath;
struct SingleDerivedPath;
enum RepairFlag : bool;
struct MemoryInputAccessor;
/**
* Function that implements a primop.
*/
typedef void (* PrimOpFun) (EvalState & state, const PosIdx pos, Value * * args, Value & v);
/**
* Info about a primitive operation, and its implementation
*/
struct PrimOp
{
PrimOpFun fun;
size_t arity;
/**
* Name of the primop. `__` prefix is treated specially.
*/
std::string name;
/**
* Names of the parameters of a primop, for primops that take a
* fixed number of arguments to be substituted for these parameters.
*/
std::vector<std::string> args;
/**
* Aritiy of the primop.
*
* If `args` is not empty, this field will be computed from that
* field instead, so it doesn't need to be manually set.
*/
size_t arity = 0;
/**
* Optional free-form documentation about the primop.
*/
const char * doc = nullptr;
/**
* Implementation of the primop.
*/
PrimOpFun fun;
/**
* Optional experimental for this to be gated on.
*/
std::optional<ExperimentalFeature> experimentalFeature;
/**
* Validity check to be performed by functions that introduce primops,
* such as RegisterPrimOp() and Value::mkPrimOp().
*/
void check();
};
/**
* Info about a constant
*/
struct Constant
{
/**
* Optional type of the constant (known since it is a fixed value).
*
* @todo we should use an enum for this.
*/
ValueType type = nThunk;
/**
* Optional free-form documentation about the constant.
*/
const char * doc = nullptr;
/**
* Whether the constant is impure, and not available in pure mode.
*/
bool impureOnly = false;
};
#if HAVE_BOEHMGC
@ -45,11 +116,6 @@ struct PrimOp
struct Env
{
Env * up;
/**
* Number of of levels up to next `with` environment
*/
unsigned short prevWith:14;
enum { Plain = 0, HasWithExpr, HasWithAttrs } type:2;
Value * values[0];
};
@ -65,11 +131,6 @@ std::string printValue(const EvalState & state, const Value & v);
std::ostream & operator << (std::ostream & os, const ValueType t);
// FIXME: maybe change this to an std::variant<SourcePath, URL>.
typedef std::pair<std::string, std::string> SearchPathElem;
typedef std::list<SearchPathElem> SearchPath;
/**
* Initialise the Boehm GC, if applicable.
*/
@ -81,7 +142,7 @@ struct RegexCache;
std::shared_ptr<RegexCache> makeRegexCache();
struct DebugTrace {
std::shared_ptr<AbstractPos> pos;
std::shared_ptr<Pos> pos;
const Expr & expr;
const Env & env;
hintformat hint;
@ -150,16 +211,33 @@ public:
*/
RepairFlag repair;
/**
* The allowed filesystem paths in restricted or pure evaluation
* mode.
*/
std::optional<PathSet> allowedPaths;
Bindings emptyBindings;
/**
* Empty list constant.
*/
Value vEmptyList;
/**
* The accessor for the root filesystem.
*/
const ref<InputAccessor> rootFS;
/**
* The in-memory filesystem for <nix/...> paths.
*/
const ref<MemoryInputAccessor> corepkgsFS;
/**
* In-memory filesystem for internal, non-user-callable Nix
* expressions like call-flake.nix.
*/
const ref<MemoryInputAccessor> internalFS;
const SourcePath derivationInternal;
const SourcePath callFlakeInternal;
/**
* Store used to materialise .drv files.
*/
@ -170,7 +248,6 @@ public:
*/
const ref<Store> buildStore;
RootValue vCallFlake = nullptr;
RootValue vImportedDrvToDerivation = nullptr;
/**
@ -256,12 +333,7 @@ private:
SearchPath searchPath;
std::map<std::string, std::pair<bool, std::string>> searchPathResolved;
/**
* Cache used by checkSourcePath().
*/
std::unordered_map<Path, SourcePath> resolvedPaths;
std::map<std::string, std::optional<std::string>> searchPathResolved;
/**
* Cache used by prim_match().
@ -283,13 +355,11 @@ private:
public:
EvalState(
const Strings & _searchPath,
const SearchPath & _searchPath,
ref<Store> store,
std::shared_ptr<Store> buildStore = nullptr);
~EvalState();
void addToSearchPath(const std::string & s);
SearchPath getSearchPath() { return searchPath; }
/**
@ -314,12 +384,6 @@ public:
*/
void allowAndSetStorePathString(const StorePath & storePath, Value & v);
/**
* Check whether access to a path is allowed and throw an error if
* not. Otherwise return the canonicalised path.
*/
SourcePath checkSourcePath(const SourcePath & path);
void checkURI(const std::string & uri);
/**
@ -354,28 +418,24 @@ public:
*/
void evalFile(const SourcePath & path, Value & v, bool mustBeTrivial = false);
/**
* Like `evalFile`, but with an already parsed expression.
*/
void cacheFile(
const SourcePath & path,
const SourcePath & resolvedPath,
Expr * e,
Value & v,
bool mustBeTrivial = false);
void resetFileCache();
/**
* Look up a file in the search path.
*/
SourcePath findFile(const std::string_view path);
SourcePath findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
SourcePath findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos = noPos);
/**
* Try to resolve a search path value (not the optional key part).
*
* If the specified search path element is a URI, download it.
*
* If it is not found, return `std::nullopt`
*/
std::pair<bool, std::string> resolveSearchPathElem(const SearchPathElem & elem);
std::optional<std::string> resolveSearchPathPath(
const SearchPath::Path & elem,
bool initAccessControl = false);
/**
* Evaluate an expression to normal form
@ -400,8 +460,7 @@ public:
*/
inline void forceValue(Value & v, const PosIdx pos);
template <typename Callable>
inline void forceValue(Value & v, Callable getPos);
void tryFixupBlackHolePos(Value & v, PosIdx pos);
/**
* Force a value, then recursively force list elements and
@ -475,12 +534,12 @@ public:
StorePath coerceToStorePath(const PosIdx pos, Value & v, NixStringContext & context, std::string_view errorCtx);
/**
* Part of `coerceToDerivedPath()` without any store IO which is exposed for unit testing only.
* Part of `coerceToSingleDerivedPath()` without any store IO which is exposed for unit testing only.
*/
std::pair<DerivedPath, std::string_view> coerceToDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx);
std::pair<SingleDerivedPath, std::string_view> coerceToSingleDerivedPathUnchecked(const PosIdx pos, Value & v, std::string_view errorCtx);
/**
* Coerce to `DerivedPath`.
* Coerce to `SingleDerivedPath`.
*
* Must be a string which is either a literal store path or a
* "placeholder (see `DownstreamPlaceholder`).
@ -494,7 +553,7 @@ public:
* source of truth, and ultimately tells us what we want, and then
* we ensure the string corresponds to it.
*/
DerivedPath coerceToDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx);
SingleDerivedPath coerceToSingleDerivedPath(const PosIdx pos, Value & v, std::string_view errorCtx);
public:
@ -509,18 +568,23 @@ public:
*/
std::shared_ptr<StaticEnv> staticBaseEnv; // !!! should be private
/**
* Name and documentation about every constant.
*
* Constants from primops are hard to crawl, and their docs will go
* here too.
*/
std::vector<std::pair<std::string, Constant>> constantInfos;
private:
unsigned int baseEnvDispl = 0;
void createBaseEnv();
Value * addConstant(const std::string & name, Value & v);
Value * addConstant(const std::string & name, Value & v, Constant info);
void addConstant(const std::string & name, Value * v);
Value * addPrimOp(const std::string & name,
size_t arity, PrimOpFun primOp);
void addConstant(const std::string & name, Value * v, Constant info);
Value * addPrimOp(PrimOp && primOp);
@ -534,6 +598,10 @@ public:
std::optional<std::string> name;
size_t arity;
std::vector<std::string> args;
/**
* Unlike the other `doc` fields in this file, this one should never be
* `null`.
*/
const char * doc;
};
@ -554,6 +622,11 @@ private:
const SourcePath & basePath,
std::shared_ptr<StaticEnv> & staticEnv);
/**
* Current Nix call stack depth, used with `max-call-depth` setting to throw stack overflow hopefully before we run out of system stack.
*/
size_t callDepth = 0;
public:
/**
@ -602,40 +675,68 @@ public:
/**
* Create a string representing a store path.
*
* The string is the printed store path with a context containing a single
* `NixStringContextElem::Opaque` element of that store path.
* The string is the printed store path with a context containing a
* single `NixStringContextElem::Opaque` element of that store path.
*/
void mkStorePathString(const StorePath & storePath, Value & v);
/**
* Create a string representing a `DerivedPath::Built`.
* Create a string representing a `SingleDerivedPath::Built`.
*
* The string is the printed store path with a context containing a single
* `NixStringContextElem::Built` element of the drv path and output name.
* The string is the printed store path with a context containing a
* single `NixStringContextElem::Built` element of the drv path and
* output name.
*
* @param value Value we are settings
*
* @param drvPath Path the drv whose output we are making a string for
* @param b the drv whose output we are making a string for, and the
* output
*
* @param outputName Name of the output
* @param optStaticOutputPath Optional output path for that string.
* Must be passed if and only if output store object is
* input-addressed or fixed output. Will be printed to form string
* if passed, otherwise a placeholder will be used (see
* `DownstreamPlaceholder`).
*
* @param optOutputPath Optional output path for that string. Must
* be passed if and only if output store object is input-addressed.
* Will be printed to form string if passed, otherwise a placeholder
* will be used (see `DownstreamPlaceholder`).
* @param xpSettings Stop-gap to avoid globals during unit tests.
*/
void mkOutputString(
Value & value,
const StorePath & drvPath,
const std::string outputName,
std::optional<StorePath> optOutputPath);
const SingleDerivedPath::Built & b,
std::optional<StorePath> optStaticOutputPath,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
/**
* Create a string representing a `SingleDerivedPath`.
*
* A combination of `mkStorePathString` and `mkOutputString`.
*/
void mkSingleDerivedPathString(
const SingleDerivedPath & p,
Value & v);
void concatLists(Value & v, size_t nrLists, Value * * lists, const PosIdx pos, std::string_view errorCtx);
/**
* Print statistics.
* Print statistics, if enabled.
*
* Performs a full memory GC before printing the statistics, so that the
* GC statistics are more accurate.
*/
void printStats();
void maybePrintStats();
/**
* Print statistics, unconditionally, cheaply, without performing a GC first.
*/
void printStatistics();
/**
* Perform a full memory garbage collection - not incremental.
*
* @return true if Nix was built with GC and a GC was performed, false if not.
* The return value is currently not thread safe - just the return value.
*/
bool fullGC();
/**
* Realise the given context, and return a mapping from the placeholders
@ -643,8 +744,31 @@ public:
*/
[[nodiscard]] StringMap realiseContext(const NixStringContext & context);
/* Call the binary path filter predicate used builtins.path etc. */
bool callPathFilter(
Value * filterFun,
const SourcePath & path,
std::string_view pathArg,
PosIdx pos);
private:
/**
* Like `mkOutputString` but just creates a raw string, not an
* string Value, which would also have a string context.
*/
std::string mkOutputStringRaw(
const SingleDerivedPath::Built & b,
std::optional<StorePath> optStaticOutputPath,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
/**
* Like `mkSingleDerivedPathStringRaw` but just creates a raw string
* Value, which would also have a string context.
*/
std::string mkSingleDerivedPathStringRaw(
const SingleDerivedPath & p);
unsigned long nrEnvs = 0;
unsigned long nrValuesInEnvs = 0;
unsigned long nrValues = 0;
@ -700,14 +824,22 @@ struct DebugTraceStacker {
/**
* @return A string representing the type of the value `v`.
*
* @param withArticle Whether to begin with an english article, e.g. "an
* integer" vs "integer".
*/
std::string_view showType(ValueType type);
std::string_view showType(ValueType type, bool withArticle = true);
std::string showType(const Value & v);
/**
* If `path` refers to a directory, then append "/default.nix".
*/
SourcePath resolveExprPath(const SourcePath & path);
SourcePath resolveExprPath(SourcePath path);
/**
* Whether a URI is allowed, assuming restrictEval is enabled
*/
bool isAllowedURI(std::string_view uri, const Strings & allowedPaths);
struct InvalidPathError : EvalError
{
@ -718,95 +850,6 @@ struct InvalidPathError : EvalError
#endif
};
struct EvalSettings : Config
{
EvalSettings();
static Strings getDefaultNixPath();
static bool isPseudoUrl(std::string_view s);
static std::string resolvePseudoUrl(std::string_view url);
Setting<bool> enableNativeCode{this, false, "allow-unsafe-native-code-during-evaluation",
"Whether builtin functions that allow executing native code should be enabled."};
Setting<Strings> nixPath{
this, getDefaultNixPath(), "nix-path",
"List of directories to be searched for `<...>` file references."};
Setting<bool> restrictEval{
this, false, "restrict-eval",
R"(
If set to `true`, the Nix evaluator will not allow access to any
files outside of the Nix search path (as set via the `NIX_PATH`
environment variable or the `-I` option), or to URIs outside of
[`allowed-uris`](../command-ref/conf-file.md#conf-allowed-uris).
The default is `false`.
)"};
Setting<bool> pureEval{this, false, "pure-eval",
R"(
Pure evaluation mode ensures that the result of Nix expressions is fully determined by explicitly declared inputs, and not influenced by external state:
- Restrict file system and network access to files specified by cryptographic hash
- Disable [`bultins.currentSystem`](@docroot@/language/builtin-constants.md#builtins-currentSystem) and [`builtins.currentTime`](@docroot@/language/builtin-constants.md#builtins-currentTime)
)"
};
Setting<bool> enableImportFromDerivation{
this, true, "allow-import-from-derivation",
R"(
By default, Nix allows you to `import` from a derivation, allowing
building at evaluation time. With this option set to false, Nix will
throw an error when evaluating an expression that uses this feature,
allowing users to ensure their evaluation will not require any
builds to take place.
)"};
Setting<Strings> allowedUris{this, {}, "allowed-uris",
R"(
A list of URI prefixes to which access is allowed in restricted
evaluation mode. For example, when set to
`https://github.com/NixOS`, builtin functions such as `fetchGit` are
allowed to access `https://github.com/NixOS/patchelf.git`.
)"};
Setting<bool> traceFunctionCalls{this, false, "trace-function-calls",
R"(
If set to `true`, the Nix evaluator will trace every function call.
Nix will print a log message at the "vomit" level for every function
entrance and function exit.
function-trace entered undefined position at 1565795816999559622
function-trace exited undefined position at 1565795816999581277
function-trace entered /nix/store/.../example.nix:226:41 at 1565795253249935150
function-trace exited /nix/store/.../example.nix:226:41 at 1565795253249941684
The `undefined position` means the function call is a builtin.
Use the `contrib/stack-collapse.py` script distributed with the Nix
source code to convert the trace logs in to a format suitable for
`flamegraph.pl`.
)"};
Setting<bool> useEvalCache{this, true, "eval-cache",
"Whether to use the flake evaluation cache."};
Setting<bool> ignoreExceptionsDuringTry{this, false, "ignore-try",
R"(
If set to true, ignore exceptions inside 'tryEval' calls when evaluating nix expressions in
debug mode (using the --debugger flag). By default the debugger will pause on all exceptions.
)"};
Setting<bool> traceVerbose{this, false, "trace-verbose",
"Whether `builtins.traceVerbose` should trace its first argument when evaluated."};
};
extern EvalSettings evalSettings;
static const std::string corepkgsPrefix{"/__corepkgs__/"};
template<class ErrorType>
void ErrorBuilder::debugThrow()
{

View file

@ -1,6 +1,7 @@
#include "flake.hh"
#include "users.hh"
#include "globals.hh"
#include "fetch-settings.hh"
#include "flake.hh"
#include <nlohmann/json.hpp>

View file

@ -1,5 +1,7 @@
#include "terminal.hh"
#include "flake.hh"
#include "eval.hh"
#include "eval-settings.hh"
#include "lockfile.hh"
#include "primops.hh"
#include "eval-inline.hh"
@ -7,6 +9,7 @@
#include "fetchers.hh"
#include "finally.hh"
#include "fetch-settings.hh"
#include "value-to-json.hh"
namespace nix {
@ -14,7 +17,7 @@ using namespace flake;
namespace flake {
typedef std::pair<fetchers::Tree, FlakeRef> FetchedFlake;
typedef std::pair<StorePath, FlakeRef> FetchedFlake;
typedef std::vector<std::pair<FlakeRef, FetchedFlake>> FlakeCache;
static std::optional<FetchedFlake> lookupInFlakeCache(
@ -33,7 +36,7 @@ static std::optional<FetchedFlake> lookupInFlakeCache(
return std::nullopt;
}
static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
static std::tuple<StorePath, FlakeRef, FlakeRef> fetchOrSubstituteTree(
EvalState & state,
const FlakeRef & originalRef,
bool allowLookup,
@ -60,16 +63,16 @@ static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
flakeCache.push_back({originalRef, *fetched});
}
auto [tree, lockedRef] = *fetched;
auto [storePath, lockedRef] = *fetched;
debug("got tree '%s' from '%s'",
state.store->printStorePath(tree.storePath), lockedRef);
state.store->printStorePath(storePath), lockedRef);
state.allowPath(tree.storePath);
state.allowPath(storePath);
assert(!originalRef.input.getNarHash() || tree.storePath == originalRef.input.computeStorePath(*state.store));
assert(!originalRef.input.getNarHash() || storePath == originalRef.input.computeStorePath(*state.store));
return {std::move(tree), resolvedRef, lockedRef};
return {std::move(storePath), resolvedRef, lockedRef};
}
static void forceTrivialValue(EvalState & state, Value & value, const PosIdx pos)
@ -112,7 +115,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
try {
if (attr.name == sUrl) {
expectType(state, nString, *attr.value, attr.pos);
url = attr.value->string.s;
url = attr.value->string_view();
attrs.emplace("url", *url);
} else if (attr.name == sFlake) {
expectType(state, nBool, *attr.value, attr.pos);
@ -121,7 +124,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
input.overrides = parseFlakeInputs(state, attr.value, attr.pos, baseDir, lockRootPath);
} else if (attr.name == sFollows) {
expectType(state, nString, *attr.value, attr.pos);
auto follows(parseInputPath(attr.value->string.s));
auto follows(parseInputPath(attr.value->c_str()));
follows.insert(follows.begin(), lockRootPath.begin(), lockRootPath.end());
input.follows = follows;
} else {
@ -130,7 +133,7 @@ static FlakeInput parseFlakeInput(EvalState & state,
#pragma GCC diagnostic ignored "-Wswitch-enum"
switch (attr.value->type()) {
case nString:
attrs.emplace(state.symbols[attr.name], attr.value->string.s);
attrs.emplace(state.symbols[attr.name], attr.value->c_str());
break;
case nBool:
attrs.emplace(state.symbols[attr.name], Explicit<bool> { attr.value->boolean });
@ -139,8 +142,13 @@ static FlakeInput parseFlakeInput(EvalState & state,
attrs.emplace(state.symbols[attr.name], (long unsigned int)attr.value->integer);
break;
default:
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
state.symbols[attr.name], showType(*attr.value));
if (attr.name == state.symbols.create("publicKeys")) {
experimentalFeatureSettings.require(Xp::VerifiedFetches);
NixStringContext emptyContext = {};
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, emptyContext).dump());
} else
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
state.symbols[attr.name], showType(*attr.value));
}
#pragma GCC diagnostic pop
}
@ -201,34 +209,39 @@ static Flake getFlake(
FlakeCache & flakeCache,
InputPath lockRootPath)
{
auto [sourceInfo, resolvedRef, lockedRef] = fetchOrSubstituteTree(
auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
state, originalRef, allowLookup, flakeCache);
// We need to guard against symlink attacks, but before we start doing
// filesystem operations we should make sure there's a flake.nix in the
// first place.
auto unsafeFlakeDir = state.store->toRealPath(storePath) + "/" + lockedRef.subdir;
auto unsafeFlakeFile = unsafeFlakeDir + "/flake.nix";
if (!pathExists(unsafeFlakeFile))
throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir);
// Guard against symlink attacks.
auto flakeDir = canonPath(sourceInfo.actualPath + "/" + lockedRef.subdir, true);
auto flakeDir = canonPath(unsafeFlakeDir, true);
auto flakeFile = canonPath(flakeDir + "/flake.nix", true);
if (!isInDir(flakeFile, sourceInfo.actualPath))
if (!isInDir(flakeFile, state.store->toRealPath(storePath)))
throw Error("'flake.nix' file of flake '%s' escapes from '%s'",
lockedRef, state.store->printStorePath(sourceInfo.storePath));
lockedRef, state.store->printStorePath(storePath));
Flake flake {
.originalRef = originalRef,
.resolvedRef = resolvedRef,
.lockedRef = lockedRef,
.sourceInfo = std::make_shared<fetchers::Tree>(std::move(sourceInfo))
.storePath = storePath,
};
if (!pathExists(flakeFile))
throw Error("source tree referenced by '%s' does not contain a '%s/flake.nix' file", lockedRef, lockedRef.subdir);
Value vInfo;
state.evalFile(CanonPath(flakeFile), vInfo, true); // FIXME: symlink attack
state.evalFile(state.rootPath(CanonPath(flakeFile)), vInfo, true); // FIXME: symlink attack
expectType(state, nAttrs, vInfo, state.positions.add({CanonPath(flakeFile)}, 1, 1));
expectType(state, nAttrs, vInfo, state.positions.add({state.rootPath(CanonPath(flakeFile))}, 1, 1));
if (auto description = vInfo.attrs->get(state.sDescription)) {
expectType(state, nString, *description->value, description->pos);
flake.description = description->value->string.s;
flake.description = description->value->c_str();
}
auto sInputs = state.symbols.create("inputs");
@ -345,15 +358,18 @@ LockedFlake lockFlake(
// FIXME: symlink attack
auto oldLockFile = LockFile::read(
lockFlags.referenceLockFilePath.value_or(
flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir + "/flake.lock"));
state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir + "/flake.lock"));
debug("old lock file: %s", oldLockFile);
std::map<InputPath, FlakeInput> overrides;
std::set<InputPath> explicitCliOverrides;
std::set<InputPath> overridesUsed, updatesUsed;
for (auto & i : lockFlags.inputOverrides)
for (auto & i : lockFlags.inputOverrides) {
overrides.insert_or_assign(i.first, FlakeInput { .ref = i.second });
explicitCliOverrides.insert(i.first);
}
LockFile newLockFile;
@ -424,6 +440,7 @@ LockedFlake lockFlake(
ancestors? */
auto i = overrides.find(inputPath);
bool hasOverride = i != overrides.end();
bool hasCliOverride = explicitCliOverrides.contains(inputPath);
if (hasOverride) {
overridesUsed.insert(inputPath);
// Respect the “flakeness” of the input even if we
@ -446,8 +463,8 @@ LockedFlake lockFlake(
assert(input.ref);
/* Do we have an entry in the existing lock file? And we
don't have a --update-input flag for this input? */
/* Do we have an entry in the existing lock file?
And the input is not in updateInputs? */
std::shared_ptr<LockedNode> oldLock;
updatesUsed.insert(inputPath);
@ -459,7 +476,7 @@ LockedFlake lockFlake(
if (oldLock
&& oldLock->originalRef == *input.ref
&& !hasOverride)
&& !hasCliOverride)
{
debug("keeping existing input '%s'", inputPathS);
@ -471,9 +488,8 @@ LockedFlake lockFlake(
node->inputs.insert_or_assign(id, childNode);
/* If we have an --update-input flag for an input
of this input, then we must fetch the flake to
update it. */
/* If we have this input in updateInputs, then we
must fetch the flake to update it. */
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
auto mustRefetch =
@ -519,11 +535,6 @@ LockedFlake lockFlake(
}
}
auto localPath(parentPath);
// If this input is a path, recurse it down.
// This allows us to resolve path inputs relative to the current flake.
if ((*input.ref).input.getType() == "path")
localPath = absPath(*input.ref->input.getSourcePath(), parentPath);
computeLocks(
mustRefetch
? getFlake(state, oldLock->lockedRef, false, flakeCache, inputPath).inputs
@ -545,7 +556,7 @@ LockedFlake lockFlake(
nuked the next time we update the lock
file. That is, overrides are sticky unless you
use --no-write-lock-file. */
auto ref = input2.ref ? *input2.ref : *input.ref;
auto ref = (input2.ref && explicitCliOverrides.contains(inputPath)) ? *input2.ref : *input.ref;
if (input.isFlake) {
Path localPath = parentPath;
@ -578,7 +589,7 @@ LockedFlake lockFlake(
oldLock
? std::dynamic_pointer_cast<const Node>(oldLock)
: LockFile::read(
inputFlake.sourceInfo->actualPath + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(),
state.store->toRealPath(inputFlake.storePath) + "/" + inputFlake.lockedRef.subdir + "/flake.lock").root.get_ptr(),
oldLock ? lockRootPath : inputPath,
localPath,
false);
@ -602,7 +613,7 @@ LockedFlake lockFlake(
};
// Bring in the current ref for relative path resolution if we have it
auto parentPath = canonPath(flake.sourceInfo->actualPath + "/" + flake.lockedRef.subdir, true);
auto parentPath = canonPath(state.store->toRealPath(flake.storePath) + "/" + flake.lockedRef.subdir, true);
computeLocks(
flake.inputs,
@ -620,19 +631,14 @@ LockedFlake lockFlake(
for (auto & i : lockFlags.inputUpdates)
if (!updatesUsed.count(i))
warn("the flag '--update-input %s' does not match any input", printInputPath(i));
warn("'%s' does not match any input of this flake", printInputPath(i));
/* Check 'follows' inputs. */
newLockFile.check();
debug("new lock file: %s", newLockFile);
auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
auto sourcePath = topRef.input.getSourcePath();
auto outputLockFilePath = sourcePath ? std::optional{*sourcePath + "/" + relPath} : std::nullopt;
if (lockFlags.outputLockFilePath) {
outputLockFilePath = lockFlags.outputLockFilePath;
}
/* Check whether we need to / can write the new lock file. */
if (newLockFile != oldLockFile || lockFlags.outputLockFilePath) {
@ -640,7 +646,7 @@ LockedFlake lockFlake(
auto diff = LockFile::diff(oldLockFile, newLockFile);
if (lockFlags.writeLockFile) {
if (outputLockFilePath) {
if (sourcePath || lockFlags.outputLockFilePath) {
if (auto unlockedInput = newLockFile.isUnlocked()) {
if (fetchSettings.warnDirty)
warn("will not write lock file of flake '%s' because it has an unlocked input ('%s')", topRef, *unlockedInput);
@ -648,41 +654,48 @@ LockedFlake lockFlake(
if (!lockFlags.updateLockFile)
throw Error("flake '%s' requires lock file changes but they're not allowed due to '--no-update-lock-file'", topRef);
bool lockFileExists = pathExists(*outputLockFilePath);
auto newLockFileS = fmt("%s\n", newLockFile);
if (lockFlags.outputLockFilePath) {
if (lockFlags.commitLockFile)
throw Error("'--commit-lock-file' and '--output-lock-file' are incompatible");
writeFile(*lockFlags.outputLockFilePath, newLockFileS);
} else {
auto relPath = (topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock";
auto outputLockFilePath = *sourcePath + "/" + relPath;
bool lockFileExists = pathExists(outputLockFilePath);
if (lockFileExists) {
auto s = chomp(diff);
if (s.empty())
warn("updating lock file '%s'", *outputLockFilePath);
else
warn("updating lock file '%s':\n%s", *outputLockFilePath, s);
} else
warn("creating lock file '%s'", *outputLockFilePath);
if (lockFileExists) {
if (s.empty())
warn("updating lock file '%s'", outputLockFilePath);
else
warn("updating lock file '%s':\n%s", outputLockFilePath, s);
} else
warn("creating lock file '%s': \n%s", outputLockFilePath, s);
newLockFile.write(*outputLockFilePath);
std::optional<std::string> commitMessage = std::nullopt;
std::optional<std::string> commitMessage = std::nullopt;
if (lockFlags.commitLockFile) {
if (lockFlags.outputLockFilePath) {
throw Error("--commit-lock-file and --output-lock-file are currently incompatible");
}
std::string cm;
if (lockFlags.commitLockFile) {
std::string cm;
cm = fetchSettings.commitLockFileSummary.get();
cm = fetchSettings.commitLockFileSummary.get();
if (cm == "") {
cm = fmt("%s: %s", relPath, lockFileExists ? "Update" : "Add");
if (cm == "") {
cm = fmt("%s: %s", relPath, lockFileExists ? "Update" : "Add");
}
cm += "\n\nFlake lock file updates:\n\n";
cm += filterANSIEscapes(diff, true);
commitMessage = cm;
}
cm += "\n\nFlake lock file updates:\n\n";
cm += filterANSIEscapes(diff, true);
commitMessage = cm;
topRef.input.putFile(
CanonPath((topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock"),
newLockFileS, commitMessage);
}
topRef.input.markChangedFile(
(topRef.subdir == "" ? "" : topRef.subdir + "/") + "flake.lock",
commitMessage);
/* Rewriting the lockfile changed the top-level
repo, so we should re-read it. FIXME: we could
also just clear the 'rev' field... */
@ -733,7 +746,7 @@ void callFlake(EvalState & state,
emitTreeAttrs(
state,
*lockedFlake.flake.sourceInfo,
lockedFlake.flake.storePath,
lockedFlake.flake.lockedRef.input,
*vRootSrc,
false,
@ -741,14 +754,10 @@ void callFlake(EvalState & state,
vRootSubdir->mkString(lockedFlake.flake.lockedRef.subdir);
if (!state.vCallFlake) {
state.vCallFlake = allocRootValue(state.allocValue());
state.eval(state.parseExprFromString(
#include "call-flake.nix.gen.hh"
, CanonPath::root), **state.vCallFlake);
}
auto vCallFlake = state.allocValue();
state.evalFile(state.callFlakeInternal, *vCallFlake);
state.callFunction(**state.vCallFlake, *vLocks, *vTmp1, noPos);
state.callFunction(*vCallFlake, *vLocks, *vTmp1, noPos);
state.callFunction(*vTmp1, *vRootSrc, *vTmp2, noPos);
state.callFunction(*vTmp2, *vRootSubdir, vRes, noPos);
}
@ -788,14 +797,106 @@ static RegisterPrimOp r2({
```nix
(builtins.getFlake "github:edolstra/dwarffs").rev
```
This function is only available if you enable the experimental feature
`flakes`.
)",
.fun = prim_getFlake,
.experimentalFeature = Xp::Flakes,
});
static void prim_parseFlakeRef(
EvalState & state,
const PosIdx pos,
Value * * args,
Value & v)
{
std::string flakeRefS(state.forceStringNoCtx(*args[0], pos,
"while evaluating the argument passed to builtins.parseFlakeRef"));
auto attrs = parseFlakeRef(flakeRefS, {}, true).toAttrs();
auto binds = state.buildBindings(attrs.size());
for (const auto & [key, value] : attrs) {
auto s = state.symbols.create(key);
auto & vv = binds.alloc(s);
std::visit(overloaded {
[&vv](const std::string & value) { vv.mkString(value); },
[&vv](const uint64_t & value) { vv.mkInt(value); },
[&vv](const Explicit<bool> & value) { vv.mkBool(value.t); }
}, value);
}
v.mkAttrs(binds);
}
static RegisterPrimOp r3({
.name = "__parseFlakeRef",
.args = {"flake-ref"},
.doc = R"(
Parse a flake reference, and return its exploded form.
For example:
```nix
builtins.parseFlakeRef "github:NixOS/nixpkgs/23.05?dir=lib"
```
evaluates to:
```nix
{ dir = "lib"; owner = "NixOS"; ref = "23.05"; repo = "nixpkgs"; type = "github"; }
```
)",
.fun = prim_parseFlakeRef,
.experimentalFeature = Xp::Flakes,
});
static void prim_flakeRefToString(
EvalState & state,
const PosIdx pos,
Value * * args,
Value & v)
{
state.forceAttrs(*args[0], noPos,
"while evaluating the argument passed to builtins.flakeRefToString");
fetchers::Attrs attrs;
for (const auto & attr : *args[0]->attrs) {
auto t = attr.value->type();
if (t == nInt) {
attrs.emplace(state.symbols[attr.name],
(uint64_t) attr.value->integer);
} else if (t == nBool) {
attrs.emplace(state.symbols[attr.name],
Explicit<bool> { attr.value->boolean });
} else if (t == nString) {
attrs.emplace(state.symbols[attr.name],
std::string(attr.value->string_view()));
} else {
state.error(
"flake reference attribute sets may only contain integers, Booleans, "
"and strings, but attribute '%s' is %s",
state.symbols[attr.name],
showType(*attr.value)).debugThrow<EvalError>();
}
}
auto flakeRef = FlakeRef::fromAttrs(attrs);
v.mkString(flakeRef.to_string());
}
static RegisterPrimOp r4({
.name = "__flakeRefToString",
.args = {"attrs"},
.doc = R"(
Convert a flake reference from attribute set format to URL format.
For example:
```nix
builtins.flakeRefToString {
dir = "lib"; owner = "NixOS"; ref = "23.05"; repo = "nixpkgs"; type = "github";
}
```
evaluates to
```nix
"github:NixOS/nixpkgs/23.05?dir=lib"
```
)",
.fun = prim_flakeRefToString,
.experimentalFeature = Xp::Flakes,
});
}
Fingerprint LockedFlake::getFingerprint() const
@ -803,9 +904,9 @@ Fingerprint LockedFlake::getFingerprint() const
// FIXME: as an optimization, if the flake contains a lock file
// and we haven't changed it, then it's sufficient to use
// flake.sourceInfo.storePath for the fingerprint.
return hashString(htSHA256,
return hashString(HashAlgorithm::SHA256,
fmt("%s;%s;%d;%d;%s",
flake.sourceInfo->storePath.to_string(),
flake.storePath.to_string(),
flake.lockedRef.subdir,
flake.lockedRef.input.getRevCount().value_or(0),
flake.lockedRef.input.getLastModified().value_or(0),

View file

@ -10,8 +10,6 @@ namespace nix {
class EvalState;
namespace fetchers { struct Tree; }
namespace flake {
struct FlakeInput;
@ -84,7 +82,7 @@ struct Flake
*/
bool forceDirty = false;
std::optional<std::string> description;
std::shared_ptr<const fetchers::Tree> sourceInfo;
StorePath storePath;
FlakeInputs inputs;
/**
* 'nixConfig' attribute
@ -193,7 +191,7 @@ void callFlake(
void emitTreeAttrs(
EvalState & state,
const fetchers::Tree & tree,
const StorePath & storePath,
const fetchers::Input & input,
Value & v,
bool emptyRevFallback = false,

View file

@ -69,32 +69,130 @@ std::optional<FlakeRef> maybeParseFlakeRef(
}
}
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
std::pair<FlakeRef, std::string> parsePathFlakeRefWithFragment(
const std::string & url,
const std::optional<Path> & baseDir,
bool allowMissing,
bool isFlake)
{
using namespace fetchers;
std::string path = url;
std::string fragment = "";
std::map<std::string, std::string> query;
auto pathEnd = url.find_first_of("#?");
auto fragmentStart = pathEnd;
if (pathEnd != std::string::npos && url[pathEnd] == '?') {
fragmentStart = url.find("#");
}
if (pathEnd != std::string::npos) {
path = url.substr(0, pathEnd);
}
if (fragmentStart != std::string::npos) {
fragment = percentDecode(url.substr(fragmentStart+1));
}
if (pathEnd != std::string::npos && fragmentStart != std::string::npos) {
query = decodeQuery(url.substr(pathEnd+1, fragmentStart-pathEnd-1));
}
static std::string fnRegex = "[0-9a-zA-Z-._~!$&'\"()*+,;=]+";
if (baseDir) {
/* Check if 'url' is a path (either absolute or relative
to 'baseDir'). If so, search upward to the root of the
repo (i.e. the directory containing .git). */
static std::regex pathUrlRegex(
"(/?" + fnRegex + "(?:/" + fnRegex + ")*/?)"
+ "(?:\\?(" + queryRegex + "))?"
+ "(?:#(" + queryRegex + "))?",
std::regex::ECMAScript);
path = absPath(path, baseDir);
if (isFlake) {
if (!allowMissing && !pathExists(path + "/flake.nix")){
notice("path '%s' does not contain a 'flake.nix', searching up",path);
// Save device to detect filesystem boundary
dev_t device = lstat(path).st_dev;
bool found = false;
while (path != "/") {
if (pathExists(path + "/flake.nix")) {
found = true;
break;
} else if (pathExists(path + "/.git"))
throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path);
else {
if (lstat(path).st_dev != device)
throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path);
}
path = dirOf(path);
}
if (!found)
throw BadURL("could not find a flake.nix file");
}
if (!S_ISDIR(lstat(path).st_mode))
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
if (!allowMissing && !pathExists(path + "/flake.nix"))
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
auto flakeRoot = path;
std::string subdir;
while (flakeRoot != "/") {
if (pathExists(flakeRoot + "/.git")) {
auto base = std::string("git+file://") + flakeRoot;
auto parsedURL = ParsedURL{
.url = base, // FIXME
.base = base,
.scheme = "git+file",
.authority = "",
.path = flakeRoot,
.query = query,
};
if (subdir != "") {
if (parsedURL.query.count("dir"))
throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
parsedURL.query.insert_or_assign("dir", subdir);
}
if (pathExists(flakeRoot + "/.git/shallow"))
parsedURL.query.insert_or_assign("shallow", "1");
return std::make_pair(
FlakeRef(fetchers::Input::fromURL(parsedURL), getOr(parsedURL.query, "dir", "")),
fragment);
}
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
flakeRoot = dirOf(flakeRoot);
}
}
} else {
if (!hasPrefix(path, "/"))
throw BadURL("flake reference '%s' is not an absolute path", url);
path = canonPath(path + "/" + getOr(query, "dir", ""));
}
fetchers::Attrs attrs;
attrs.insert_or_assign("type", "path");
attrs.insert_or_assign("path", path);
return std::make_pair(FlakeRef(fetchers::Input::fromAttrs(std::move(attrs)), ""), fragment);
};
/* Check if 'url' is a flake ID. This is an abbreviated syntax for
'flake:<flake-id>?ref=<ref>&rev=<rev>'. */
std::optional<std::pair<FlakeRef, std::string>> parseFlakeIdRef(
const std::string & url,
bool isFlake
)
{
std::smatch match;
static std::regex flakeRegex(
"((" + flakeIdRegexS + ")(?:/(?:" + refAndOrRevRegex + "))?)"
+ "(?:#(" + queryRegex + "))?",
+ "(?:#(" + fragmentRegex + "))?",
std::regex::ECMAScript);
std::smatch match;
/* Check if 'url' is a flake ID. This is an abbreviated syntax for
'flake:<flake-id>?ref=<ref>&rev=<rev>'. */
if (std::regex_match(url, match, flakeRegex)) {
auto parsedURL = ParsedURL{
.url = url,
@ -105,111 +203,53 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
};
return std::make_pair(
FlakeRef(Input::fromURL(parsedURL), ""),
FlakeRef(fetchers::Input::fromURL(parsedURL, isFlake), ""),
percentDecode(match.str(6)));
}
else if (std::regex_match(url, match, pathUrlRegex)) {
std::string path = match[1];
std::string fragment = percentDecode(match.str(3));
return {};
}
if (baseDir) {
/* Check if 'url' is a path (either absolute or relative
to 'baseDir'). If so, search upward to the root of the
repo (i.e. the directory containing .git). */
path = absPath(path, baseDir);
if (isFlake) {
if (!allowMissing && !pathExists(path + "/flake.nix")){
notice("path '%s' does not contain a 'flake.nix', searching up",path);
// Save device to detect filesystem boundary
dev_t device = lstat(path).st_dev;
bool found = false;
while (path != "/") {
if (pathExists(path + "/flake.nix")) {
found = true;
break;
} else if (pathExists(path + "/.git"))
throw Error("path '%s' is not part of a flake (neither it nor its parent directories contain a 'flake.nix' file)", path);
else {
if (lstat(path).st_dev != device)
throw Error("unable to find a flake before encountering filesystem boundary at '%s'", path);
}
path = dirOf(path);
}
if (!found)
throw BadURL("could not find a flake.nix file");
}
if (!S_ISDIR(lstat(path).st_mode))
throw BadURL("path '%s' is not a flake (because it's not a directory)", path);
if (!allowMissing && !pathExists(path + "/flake.nix"))
throw BadURL("path '%s' is not a flake (because it doesn't contain a 'flake.nix' file)", path);
auto flakeRoot = path;
std::string subdir;
while (flakeRoot != "/") {
if (pathExists(flakeRoot + "/.git")) {
auto base = std::string("git+file://") + flakeRoot;
auto parsedURL = ParsedURL{
.url = base, // FIXME
.base = base,
.scheme = "git+file",
.authority = "",
.path = flakeRoot,
.query = decodeQuery(match[2]),
};
if (subdir != "") {
if (parsedURL.query.count("dir"))
throw Error("flake URL '%s' has an inconsistent 'dir' parameter", url);
parsedURL.query.insert_or_assign("dir", subdir);
}
if (pathExists(flakeRoot + "/.git/shallow"))
parsedURL.query.insert_or_assign("shallow", "1");
return std::make_pair(
FlakeRef(Input::fromURL(parsedURL), getOr(parsedURL.query, "dir", "")),
fragment);
}
subdir = std::string(baseNameOf(flakeRoot)) + (subdir.empty() ? "" : "/" + subdir);
flakeRoot = dirOf(flakeRoot);
}
}
} else {
if (!hasPrefix(path, "/"))
throw BadURL("flake reference '%s' is not an absolute path", url);
auto query = decodeQuery(match[2]);
path = canonPath(path + "/" + getOr(query, "dir", ""));
}
fetchers::Attrs attrs;
attrs.insert_or_assign("type", "path");
attrs.insert_or_assign("path", path);
return std::make_pair(FlakeRef(Input::fromAttrs(std::move(attrs)), ""), fragment);
std::optional<std::pair<FlakeRef, std::string>> parseURLFlakeRef(
const std::string & url,
const std::optional<Path> & baseDir,
bool isFlake
)
{
ParsedURL parsedURL;
try {
parsedURL = parseURL(url);
} catch (BadURL &) {
return std::nullopt;
}
else {
auto parsedURL = parseURL(url);
std::string fragment;
std::swap(fragment, parsedURL.fragment);
std::string fragment;
std::swap(fragment, parsedURL.fragment);
auto input = Input::fromURL(parsedURL);
input.parent = baseDir;
auto input = fetchers::Input::fromURL(parsedURL, isFlake);
input.parent = baseDir;
return std::make_pair(
FlakeRef(std::move(input), getOr(parsedURL.query, "dir", "")),
fragment);
return std::make_pair(
FlakeRef(std::move(input), getOr(parsedURL.query, "dir", "")),
fragment);
}
std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
const std::string & url,
const std::optional<Path> & baseDir,
bool allowMissing,
bool isFlake)
{
using namespace fetchers;
std::smatch match;
if (auto res = parseFlakeIdRef(url, isFlake)) {
return *res;
} else if (auto res = parseURLFlakeRef(url, baseDir, isFlake)) {
return *res;
} else {
return parsePathFlakeRefWithFragment(url, baseDir, allowMissing, isFlake);
}
}
@ -232,10 +272,10 @@ FlakeRef FlakeRef::fromAttrs(const fetchers::Attrs & attrs)
fetchers::maybeGetStrAttr(attrs, "dir").value_or(""));
}
std::pair<fetchers::Tree, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
std::pair<StorePath, FlakeRef> FlakeRef::fetchTree(ref<Store> store) const
{
auto [tree, lockedInput] = input.fetch(store);
return {std::move(tree), FlakeRef(std::move(lockedInput), subdir)};
auto [storePath, lockedInput] = input.fetch(store);
return {std::move(storePath), FlakeRef(std::move(lockedInput), subdir)};
}
std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragmentAndExtendedOutputsSpec(
@ -246,7 +286,9 @@ std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragment
{
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(url);
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, baseDir, allowMissing, isFlake);
return {std::move(flakeRef), fragment, extendedOutputsSpec};
return {std::move(flakeRef), fragment, std::move(extendedOutputsSpec)};
}
std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript);
}

View file

@ -6,6 +6,7 @@
#include "fetchers.hh"
#include "outputs-spec.hh"
#include <regex>
#include <variant>
namespace nix {
@ -62,7 +63,7 @@ struct FlakeRef
static FlakeRef fromAttrs(const fetchers::Attrs & attrs);
std::pair<fetchers::Tree, FlakeRef> fetchTree(ref<Store> store) const;
std::pair<StorePath, FlakeRef> fetchTree(ref<Store> store) const;
};
std::ostream & operator << (std::ostream & str, const FlakeRef & flakeRef);
@ -91,5 +92,7 @@ std::tuple<FlakeRef, std::string, ExtendedOutputsSpec> parseFlakeRefWithFragment
bool allowMissing = false,
bool isFlake = true);
const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
extern std::regex flakeIdRegex;
}

View file

@ -2,8 +2,10 @@
#include "store-api.hh"
#include "url-parts.hh"
#include <algorithm>
#include <iomanip>
#include <iterator>
#include <nlohmann/json.hpp>
namespace nix::flake {
@ -45,16 +47,26 @@ StorePath LockedNode::computeStorePath(Store & store) const
return lockedRef.input.computeStorePath(store);
}
std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
{
static std::shared_ptr<Node> doFind(const ref<Node>& root, const InputPath & path, std::vector<InputPath>& visited) {
auto pos = root;
auto found = std::find(visited.cbegin(), visited.cend(), path);
if(found != visited.end()) {
std::vector<std::string> cycle;
std::transform(found, visited.cend(), std::back_inserter(cycle), printInputPath);
cycle.push_back(printInputPath(path));
throw Error("follow cycle detected: [%s]", concatStringsSep(" -> ", cycle));
}
visited.push_back(path);
for (auto & elem : path) {
if (auto i = get(pos->inputs, elem)) {
if (auto node = std::get_if<0>(&*i))
pos = *node;
else if (auto follows = std::get_if<1>(&*i)) {
if (auto p = findInput(*follows))
if (auto p = doFind(root, *follows, visited))
pos = ref(p);
else
return {};
@ -66,6 +78,12 @@ std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
return pos;
}
std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
{
std::vector<InputPath> visited;
return doFind(root, path, visited);
}
LockFile::LockFile(const nlohmann::json & json, const Path & path)
{
auto version = json.value("version", 0);
@ -196,12 +214,6 @@ std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile)
return stream;
}
void LockFile::write(const Path & path) const
{
createDirs(dirOf(path));
writeFile(path, fmt("%s\n", *this));
}
std::optional<FlakeRef> LockFile::isUnlocked() const
{
std::set<ref<const Node>> nodes;
@ -345,7 +357,7 @@ void LockFile::check()
for (auto & [inputPath, input] : inputs) {
if (auto follows = std::get_if<1>(&input)) {
if (!follows->empty() && !get(inputs, *follows))
if (!follows->empty() && !findInput(*follows))
throw Error("input '%s' follows a non-existent input '%s'",
printInputPath(inputPath),
printInputPath(*follows));

View file

@ -65,8 +65,6 @@ struct LockFile
static LockFile read(const Path & path);
void write(const Path & path) const;
/**
* Check whether this lock file has any unlocked inputs.
*/

View file

@ -0,0 +1,48 @@
#include "url-name.hh"
#include <regex>
#include <iostream>
namespace nix {
static const std::string attributeNamePattern("[a-zA-Z0-9_-]+");
static const std::regex lastAttributeRegex("(?:" + attributeNamePattern + "\\.)*(?!default)(" + attributeNamePattern +")(\\^.*)?");
static const std::string pathSegmentPattern("[a-zA-Z0-9_-]+");
static const std::regex lastPathSegmentRegex(".*/(" + pathSegmentPattern +")");
static const std::regex secondPathSegmentRegex("(?:" + pathSegmentPattern + ")/(" + pathSegmentPattern +")(?:/.*)?");
static const std::regex gitProviderRegex("github|gitlab|sourcehut");
static const std::regex gitSchemeRegex("git($|\\+.*)");
static const std::regex defaultOutputRegex(".*\\.default($|\\^.*)");
std::optional<std::string> getNameFromURL(const ParsedURL & url)
{
std::smatch match;
/* If there is a dir= argument, use its value */
if (url.query.count("dir") > 0)
return url.query.at("dir");
/* If the fragment isn't a "default" and contains two attribute elements, use the last one */
if (std::regex_match(url.fragment, match, lastAttributeRegex))
return match.str(1);
/* If this is a github/gitlab/sourcehut flake, use the repo name */
if (std::regex_match(url.scheme, gitProviderRegex) && std::regex_match(url.path, match, secondPathSegmentRegex))
return match.str(1);
/* If it is a regular git flake, use the directory name */
if (std::regex_match(url.scheme, gitSchemeRegex) && std::regex_match(url.path, match, lastPathSegmentRegex))
return match.str(1);
/* If everything failed but there is a non-default fragment, use it in full */
if (!url.fragment.empty() && !std::regex_match(url.fragment, defaultOutputRegex))
return url.fragment;
/* If there is no fragment, take the last element of the path */
if (std::regex_match(url.path, match, lastPathSegmentRegex))
return match.str(1);
/* If even that didn't work, the URL does not contain enough info to determine a useful name */
return {};
}
}

View file

@ -0,0 +1,20 @@
#include "url.hh"
#include "url-parts.hh"
#include "util.hh"
#include "split.hh"
namespace nix {
/**
* Try to extract a reasonably unique and meaningful, human-readable
* name of a flake output from a parsed URL.
* When nullopt is returned, the callsite should use information available
* to it outside of the URL to determine a useful name.
* This is a heuristic approach intended for user interfaces.
* @return nullopt if the extracted name is not useful to identify a
* flake output, for example because it is empty or "default".
* Otherwise returns the extracted name.
*/
std::optional<std::string> getNameFromURL(const ParsedURL & url);
}

View file

@ -0,0 +1,42 @@
#pragma once
#include <boost/container/small_vector.hpp>
#if HAVE_BOEHMGC
#include <gc/gc.h>
#include <gc/gc_cpp.h>
#include <gc/gc_allocator.h>
#endif
namespace nix {
struct Value;
/**
* A GC compatible vector that may used a reserved portion of `nItems` on the stack instead of allocating on the heap.
*/
#if HAVE_BOEHMGC
template <typename T, size_t nItems>
using SmallVector = boost::container::small_vector<T, nItems, traceable_allocator<T>>;
#else
template <typename T, size_t nItems>
using SmallVector = boost::container::small_vector<T, nItems>;
#endif
/**
* A vector of value pointers. See `SmallVector`.
*/
template <size_t nItems>
using SmallValueVector = SmallVector<Value *, nItems>;
/**
* A vector of values that must not be referenced after the vector is destroyed.
*
* See also `SmallValueVector`.
*/
template <size_t nItems>
using SmallTemporaryValueVector = SmallVector<Value, nItems>;
}

View file

@ -1,5 +1,4 @@
#include "get-drvs.hh"
#include "util.hh"
#include "eval-inline.hh"
#include "derivations.hh"
#include "store-api.hh"
@ -156,7 +155,7 @@ DrvInfo::Outputs DrvInfo::queryOutputs(bool withPaths, bool onlyOutputsToInstall
Outputs result;
for (auto elem : outTI->listItems()) {
if (elem->type() != nString) throw errMsg;
auto out = outputs.find(elem->string.s);
auto out = outputs.find(elem->c_str());
if (out == outputs.end()) throw errMsg;
result.insert(*out);
}
@ -199,7 +198,7 @@ StringSet DrvInfo::queryMetaNames()
bool DrvInfo::checkMeta(Value & v)
{
state->forceValue(v, [&]() { return v.determinePos(noPos); });
state->forceValue(v, v.determinePos(noPos));
if (v.type() == nList) {
for (auto elem : v.listItems())
if (!checkMeta(*elem)) return false;
@ -230,7 +229,7 @@ std::string DrvInfo::queryMetaString(const std::string & name)
{
Value * v = queryMeta(name);
if (!v || v->type() != nString) return "";
return v->string.s;
return v->c_str();
}
@ -242,7 +241,7 @@ NixInt DrvInfo::queryMetaInt(const std::string & name, NixInt def)
if (v->type() == nString) {
/* Backwards compatibility with before we had support for
integer meta fields. */
if (auto n = string2Int<NixInt>(v->string.s))
if (auto n = string2Int<NixInt>(v->c_str()))
return *n;
}
return def;
@ -256,7 +255,7 @@ NixFloat DrvInfo::queryMetaFloat(const std::string & name, NixFloat def)
if (v->type() == nString) {
/* Backwards compatibility with before we had support for
float meta fields. */
if (auto n = string2Float<NixFloat>(v->string.s))
if (auto n = string2Float<NixFloat>(v->c_str()))
return *n;
}
return def;
@ -271,8 +270,8 @@ bool DrvInfo::queryMetaBool(const std::string & name, bool def)
if (v->type() == nString) {
/* Backwards compatibility with before we had support for
Boolean meta fields. */
if (strcmp(v->string.s, "true") == 0) return true;
if (strcmp(v->string.s, "false") == 0) return false;
if (v->string_view() == "true") return true;
if (v->string_view() == "false") return false;
}
return def;
}
@ -305,7 +304,7 @@ static bool getDerivation(EvalState & state, Value & v,
bool ignoreAssertionFailures)
{
try {
state.forceValue(v, [&]() { return v.determinePos(noPos); });
state.forceValue(v, v.determinePos(noPos));
if (!state.isDerivation(v)) return true;
/* Remove spurious duplicates (e.g., a set like `rec { x =

View file

@ -1,4 +1,5 @@
%option reentrant bison-bridge bison-locations
%option align
%option noyywrap
%option never-interactive
%option stack
@ -35,9 +36,6 @@ static inline PosIdx makeCurPos(const YYLTYPE & loc, ParseData * data)
#define CUR_POS makeCurPos(*yylloc, data)
// backup to recover from yyless(0)
YYLTYPE prev_yylloc;
static void initLoc(YYLTYPE * loc)
{
loc->first_line = loc->last_line = 1;
@ -46,7 +44,7 @@ static void initLoc(YYLTYPE * loc)
static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
{
prev_yylloc = *loc;
loc->stash();
loc->first_line = loc->last_line;
loc->first_column = loc->last_column;
@ -230,7 +228,7 @@ or { return OR_KW; }
{HPATH_START}\$\{ {
PUSH_STATE(PATH_START);
yyless(0);
*yylloc = prev_yylloc;
yylloc->unstash();
}
<PATH_START>{PATH_SEG} {
@ -286,7 +284,7 @@ or { return OR_KW; }
context (it may be ')', ';', or something of that sort) */
POP_STATE();
yyless(0);
*yylloc = prev_yylloc;
yylloc->unstash();
return PATH_END;
}

View file

@ -16,9 +16,9 @@ libexpr_CXXFLAGS += -I src/libutil -I src/libstore -I src/libfetchers -I src/lib
libexpr_LIBS = libutil libstore libfetchers
libexpr_LDFLAGS += -lboost_context -pthread
libexpr_LDFLAGS += -lboost_context $(THREAD_LDFLAGS)
ifdef HOST_LINUX
libexpr_LDFLAGS += -ldl
libexpr_LDFLAGS += -ldl
endif
# The dependency on libgc must be propagated (i.e. meaning that
@ -36,15 +36,15 @@ $(d)/lexer-tab.cc $(d)/lexer-tab.hh: $(d)/lexer.l
clean-files += $(d)/parser-tab.cc $(d)/parser-tab.hh $(d)/lexer-tab.cc $(d)/lexer-tab.hh
$(eval $(call install-file-in, $(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-expr.pc, $(libdir)/pkgconfig, 0644))
$(foreach i, $(wildcard src/libexpr/value/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/value, 0644)))
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh
$(d)/flake/flake.cc: $(d)/flake/call-flake.nix.gen.hh
$(d)/eval.cc: $(d)/primops/derivation.nix.gen.hh $(d)/fetchurl.nix.gen.hh $(d)/flake/call-flake.nix.gen.hh
src/libexpr/primops/fromTOML.o: ERROR_SWITCH_ENUM =
$(buildprefix)src/libexpr/primops/fromTOML.o: ERROR_SWITCH_ENUM =

View file

@ -9,57 +9,7 @@
namespace nix {
struct PosAdapter : AbstractPos
{
Pos::Origin origin;
PosAdapter(Pos::Origin origin)
: origin(std::move(origin))
{
}
std::optional<std::string> getSource() const override
{
return std::visit(overloaded {
[](const Pos::none_tag &) -> std::optional<std::string> {
return std::nullopt;
},
[](const Pos::Stdin & s) -> std::optional<std::string> {
// Get rid of the null terminators added by the parser.
return std::string(s.source->c_str());
},
[](const Pos::String & s) -> std::optional<std::string> {
// Get rid of the null terminators added by the parser.
return std::string(s.source->c_str());
},
[](const SourcePath & path) -> std::optional<std::string> {
try {
return path.readFile();
} catch (Error &) {
return std::nullopt;
}
}
}, origin);
}
void print(std::ostream & out) const override
{
std::visit(overloaded {
[&](const Pos::none_tag &) { out << "«none»"; },
[&](const Pos::Stdin &) { out << "«stdin»"; },
[&](const Pos::String & s) { out << "«string»"; },
[&](const SourcePath & path) { out << path; }
}, origin);
}
};
Pos::operator std::shared_ptr<AbstractPos>() const
{
auto pos = std::make_shared<PosAdapter>(origin);
pos->line = line;
pos->column = column;
return pos;
}
ExprBlackHole eBlackHole;
// FIXME: remove, because *symbols* are abstract and do not have a single
// textual representation; see printIdentifier()
@ -76,12 +26,12 @@ void Expr::show(const SymbolTable & symbols, std::ostream & str) const
void ExprInt::show(const SymbolTable & symbols, std::ostream & str) const
{
str << n;
str << v.integer;
}
void ExprFloat::show(const SymbolTable & symbols, std::ostream & str) const
{
str << nf;
str << v.fpoint;
}
void ExprString::show(const SymbolTable & symbols, std::ostream & str) const
@ -266,17 +216,6 @@ void ExprPos::show(const SymbolTable & symbols, std::ostream & str) const
}
std::ostream & operator << (std::ostream & str, const Pos & pos)
{
if (auto pos2 = (std::shared_ptr<AbstractPos>) pos) {
str << *pos2;
} else
str << "undefined position";
return str;
}
std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
{
std::ostringstream out;
@ -331,6 +270,8 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
if (es.debugRepl)
es.exprEnvs.insert(std::make_pair(this, env));
fromWith = nullptr;
/* Check whether the variable appears in the environment. If so,
set its level and displacement. */
const StaticEnv * curEnv;
@ -342,7 +283,6 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
} else {
auto i = curEnv->find(name);
if (i != curEnv->vars.end()) {
fromWith = false;
this->level = level;
displ = i->second;
return;
@ -358,7 +298,8 @@ void ExprVar::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
.msg = hintfmt("undefined variable '%1%'", es.symbols[name]),
.errPos = es.positions[pos]
});
fromWith = true;
for (auto * e = env.get(); e && !fromWith; e = e->up)
fromWith = e->isWith;
this->level = withLevel;
}
@ -391,7 +332,7 @@ void ExprAttrs::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv>
es.exprEnvs.insert(std::make_pair(this, env));
if (recursive) {
auto newEnv = std::make_shared<StaticEnv>(false, env.get(), recursive ? attrs.size() : 0);
auto newEnv = std::make_shared<StaticEnv>(nullptr, env.get(), recursive ? attrs.size() : 0);
Displacement displ = 0;
for (auto & i : attrs)
@ -433,7 +374,7 @@ void ExprLambda::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv>
es.exprEnvs.insert(std::make_pair(this, env));
auto newEnv = std::make_shared<StaticEnv>(
false, env.get(),
nullptr, env.get(),
(hasFormals() ? formals->formals.size() : 0) +
(!arg ? 0 : 1));
@ -469,7 +410,7 @@ void ExprLet::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
if (es.debugRepl)
es.exprEnvs.insert(std::make_pair(this, env));
auto newEnv = std::make_shared<StaticEnv>(false, env.get(), attrs->attrs.size());
auto newEnv = std::make_shared<StaticEnv>(nullptr, env.get(), attrs->attrs.size());
Displacement displ = 0;
for (auto & i : attrs->attrs)
@ -488,6 +429,10 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
if (es.debugRepl)
es.exprEnvs.insert(std::make_pair(this, env));
parentWith = nullptr;
for (auto * e = env.get(); e && !parentWith; e = e->up)
parentWith = e->isWith;
/* Does this `with' have an enclosing `with'? If so, record its
level so that `lookupVar' can look up variables in the previous
`with' if this one doesn't contain the desired attribute. */
@ -504,7 +449,7 @@ void ExprWith::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> &
es.exprEnvs.insert(std::make_pair(this, env));
attrs->bindVars(es, env);
auto newEnv = std::make_shared<StaticEnv>(true, env.get());
auto newEnv = std::make_shared<StaticEnv>(this, env.get());
body->bindVars(es, newEnv);
}

View file

@ -8,6 +8,7 @@
#include "symbol-table.hh"
#include "error.hh"
#include "chunked-vector.hh"
#include "position.hh"
namespace nix {
@ -20,27 +21,12 @@ MakeError(Abort, EvalError);
MakeError(TypeError, EvalError);
MakeError(UndefinedVarError, Error);
MakeError(MissingArgumentError, EvalError);
MakeError(RestrictedPathError, Error);
/**
* Position objects.
*/
struct Pos
class InfiniteRecursionError : public EvalError
{
uint32_t line;
uint32_t column;
struct none_tag { };
struct Stdin { ref<std::string> source; };
struct String { ref<std::string> source; };
typedef std::variant<none_tag, Stdin, String, SourcePath> Origin;
Origin origin;
explicit operator bool() const { return line > 0; }
operator std::shared_ptr<AbstractPos>() const;
friend class EvalState;
public:
using EvalError::EvalError;
};
class PosIdx {
@ -75,7 +61,7 @@ public:
mutable uint32_t idx = std::numeric_limits<uint32_t>::max();
// Used for searching in PosTable::[].
explicit Origin(uint32_t idx): idx(idx), origin{Pos::none_tag()} {}
explicit Origin(uint32_t idx): idx(idx), origin{std::monostate()} {}
public:
const Pos::Origin origin;
@ -126,12 +112,11 @@ public:
inline PosIdx noPos = {};
std::ostream & operator << (std::ostream & str, const Pos & pos);
struct Env;
struct Value;
class EvalState;
struct ExprWith;
struct StaticEnv;
@ -155,6 +140,10 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
struct Expr
{
static unsigned long nrExprs;
Expr() {
nrExprs++;
}
virtual ~Expr() { };
virtual void show(const SymbolTable & symbols, std::ostream & str) const;
virtual void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env);
@ -171,18 +160,16 @@ struct Expr
struct ExprInt : Expr
{
NixInt n;
Value v;
ExprInt(NixInt n) : n(n) { v.mkInt(n); };
ExprInt(NixInt n) { v.mkInt(n); };
Value * maybeThunk(EvalState & state, Env & env) override;
COMMON_METHODS
};
struct ExprFloat : Expr
{
NixFloat nf;
Value v;
ExprFloat(NixFloat nf) : nf(nf) { v.mkFloat(nf); };
ExprFloat(NixFloat nf) { v.mkFloat(nf); };
Value * maybeThunk(EvalState & state, Env & env) override;
COMMON_METHODS
};
@ -198,9 +185,13 @@ struct ExprString : Expr
struct ExprPath : Expr
{
ref<InputAccessor> accessor;
std::string s;
Value v;
ExprPath(std::string s) : s(std::move(s)) { v.mkPath(this->s.c_str()); };
ExprPath(ref<InputAccessor> accessor, std::string s) : accessor(accessor), s(std::move(s))
{
v.mkPath(&*accessor, this->s.c_str());
}
Value * maybeThunk(EvalState & state, Env & env) override;
COMMON_METHODS
};
@ -214,8 +205,11 @@ struct ExprVar : Expr
Symbol name;
/* Whether the variable comes from an environment (e.g. a rec, let
or function argument) or from a "with". */
bool fromWith;
or function argument) or from a "with".
`nullptr`: Not from a `with`.
Valid pointer: the nearest, innermost `with` expression to query first. */
ExprWith * fromWith;
/* In the former case, the value is obtained by going `level`
levels up from the current environment and getting the
@ -238,7 +232,7 @@ struct ExprSelect : Expr
PosIdx pos;
Expr * e, * def;
AttrPath attrPath;
ExprSelect(const PosIdx & pos, Expr * e, const AttrPath && attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(std::move(attrPath)) { };
ExprSelect(const PosIdx & pos, Expr * e, AttrPath attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(std::move(attrPath)) { };
ExprSelect(const PosIdx & pos, Expr * e, Symbol name) : pos(pos), e(e), def(0) { attrPath.push_back(AttrName(name)); };
PosIdx getPos() const override { return pos; }
COMMON_METHODS
@ -248,7 +242,7 @@ struct ExprOpHasAttr : Expr
{
Expr * e;
AttrPath attrPath;
ExprOpHasAttr(Expr * e, const AttrPath && attrPath) : e(e), attrPath(std::move(attrPath)) { };
ExprOpHasAttr(Expr * e, AttrPath attrPath) : e(e), attrPath(std::move(attrPath)) { };
PosIdx getPos() const override { return e->getPos(); }
COMMON_METHODS
};
@ -287,6 +281,7 @@ struct ExprList : Expr
std::vector<Expr *> elems;
ExprList() { };
COMMON_METHODS
Value * maybeThunk(EvalState & state, Env & env) override;
PosIdx getPos() const override
{
@ -373,6 +368,7 @@ struct ExprWith : Expr
PosIdx pos;
Expr * attrs, * body;
size_t prevWith;
ExprWith * parentWith;
ExprWith(const PosIdx & pos, Expr * attrs, Expr * body) : pos(pos), attrs(attrs), body(body) { };
PosIdx getPos() const override { return pos; }
COMMON_METHODS
@ -400,6 +396,7 @@ struct ExprOpNot : Expr
{
Expr * e;
ExprOpNot(Expr * e) : e(e) { };
PosIdx getPos() const override { return e->getPos(); }
COMMON_METHODS
};
@ -449,20 +446,30 @@ struct ExprPos : Expr
COMMON_METHODS
};
/* only used to mark thunks as black holes. */
struct ExprBlackHole : Expr
{
void show(const SymbolTable & symbols, std::ostream & str) const override {}
void eval(EvalState & state, Env & env, Value & v) override;
void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) override {}
};
extern ExprBlackHole eBlackHole;
/* Static environments are used to map variable names onto (level,
displacement) pairs used to obtain the value of the variable at
runtime. */
struct StaticEnv
{
bool isWith;
ExprWith * isWith;
const StaticEnv * up;
// Note: these must be in sorted order.
typedef std::vector<std::pair<Symbol, Displacement>> Vars;
Vars vars;
StaticEnv(bool isWith, const StaticEnv * up, size_t expectedSize = 0) : isWith(isWith), up(up) {
StaticEnv(ExprWith * isWith, const StaticEnv * up, size_t expectedSize = 0) : isWith(isWith), up(up) {
vars.reserve(expectedSize);
};

View file

@ -19,13 +19,40 @@
#include <variant>
#include "util.hh"
#include "users.hh"
#include "nixexpr.hh"
#include "eval.hh"
#include "eval-settings.hh"
#include "globals.hh"
namespace nix {
#define YYLTYPE ::nix::ParserLocation
struct ParserLocation
{
int first_line, first_column;
int last_line, last_column;
// backup to recover from yyless(0)
int stashed_first_line, stashed_first_column;
int stashed_last_line, stashed_last_column;
void stash() {
stashed_first_line = first_line;
stashed_first_column = first_column;
stashed_last_line = last_line;
stashed_last_column = last_column;
}
void unstash() {
first_line = stashed_first_line;
first_column = stashed_first_column;
last_line = stashed_last_line;
last_column = stashed_last_column;
}
};
struct ParseData
{
EvalState & state;
@ -137,6 +164,7 @@ static void addAttr(ExprAttrs * attrs, AttrPath && attrPath,
dupAttr(state, ad.first, j2->second.pos, ad.second.pos);
jAttrs->attrs.emplace(ad.first, ad.second);
}
jAttrs->dynamicAttrs.insert(jAttrs->dynamicAttrs.end(), ae->dynamicAttrs.begin(), ae->dynamicAttrs.end());
} else {
dupAttr(state, attrPath, pos, j->second.pos);
}
@ -275,7 +303,12 @@ static Expr * stripIndentation(const PosIdx pos, SymbolTable & symbols,
}
/* If this is a single string, then don't do a concatenation. */
return es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0].second) ? (*es2)[0].second : new ExprConcatStrings(pos, true, es2);
if (es2->size() == 1 && dynamic_cast<ExprString *>((*es2)[0].second)) {
auto *const result = (*es2)[0].second;
delete es2;
return result;
}
return new ExprConcatStrings(pos, true, es2);
}
@ -330,7 +363,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
%type <ind_string_parts> ind_string_parts
%type <e> path_start string_parts string_attr
%type <id> attr
%token <id> ID ATTRPATH
%token <id> ID
%token <str> STR IND_STR
%token <n> INT
%token <nf> FLOAT
@ -513,7 +546,7 @@ path_start
/* add back in the trailing '/' to the first segment */
if ($1.p[$1.l-1] == '/' && $1.l > 1)
path += "/";
$$ = new ExprPath(path);
$$ = new ExprPath(ref<InputAccessor>(data->state.rootFS), std::move(path));
}
| HPATH {
if (evalSettings.pureEval) {
@ -523,7 +556,7 @@ path_start
);
}
Path path(getHome() + std::string($1.p + 1, $1.l - 1));
$$ = new ExprPath(path);
$$ = new ExprPath(ref<InputAccessor>(data->state.rootFS), std::move(path));
}
;
@ -639,13 +672,16 @@ formal
#include "eval.hh"
#include "filetransfer.hh"
#include "fetchers.hh"
#include "tarball.hh"
#include "store-api.hh"
#include "flake/flake.hh"
#include "fs-input-accessor.hh"
#include "memory-input-accessor.hh"
namespace nix {
unsigned long Expr::nrExprs = 0;
Expr * EvalState::parse(
char * text,
@ -658,7 +694,7 @@ Expr * EvalState::parse(
ParseData data {
.state = *this,
.symbols = symbols,
.basePath = std::move(basePath),
.basePath = basePath,
.origin = {origin},
};
@ -675,17 +711,26 @@ Expr * EvalState::parse(
}
SourcePath resolveExprPath(const SourcePath & path)
SourcePath resolveExprPath(SourcePath path)
{
unsigned int followCount = 0, maxFollow = 1024;
/* If `path' is a symlink, follow it. This is so that relative
path references work. */
auto path2 = path.resolveSymlinks();
while (!path.path.isRoot()) {
// Basic cycle/depth limit to avoid infinite loops.
if (++followCount >= maxFollow)
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
auto p = path.parent().resolveSymlinks() + path.baseName();
if (p.lstat().type != InputAccessor::tSymlink) break;
path = {path.accessor, CanonPath(p.readLink(), path.path.parent().value_or(CanonPath::root))};
}
/* If `path' refers to a directory, append `/default.nix'. */
if (path2.lstat().type == InputAccessor::tDirectory)
return path2 + "default.nix";
if (path.resolveSymlinks().lstat().type == InputAccessor::tDirectory)
return path + "default.nix";
return path2;
return path;
}
@ -697,7 +742,7 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path)
Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr<StaticEnv> & staticEnv)
{
auto buffer = path.readFile();
auto buffer = path.resolveSymlinks().readFile();
// readFile hopefully have left some extra space for terminators
buffer.append("\0\0", 2);
return parse(buffer.data(), buffer.size(), Pos::Origin(path), path.parent(), staticEnv);
@ -729,49 +774,30 @@ Expr * EvalState::parseStdin()
}
void EvalState::addToSearchPath(const std::string & s)
{
size_t pos = s.find('=');
std::string prefix;
Path path;
if (pos == std::string::npos) {
path = s;
} else {
prefix = std::string(s, 0, pos);
path = std::string(s, pos + 1);
}
searchPath.emplace_back(prefix, path);
}
SourcePath EvalState::findFile(const std::string_view path)
{
return findFile(searchPath, path);
}
SourcePath EvalState::findFile(SearchPath & searchPath, const std::string_view path, const PosIdx pos)
SourcePath EvalState::findFile(const SearchPath & searchPath, const std::string_view path, const PosIdx pos)
{
for (auto & i : searchPath) {
std::string suffix;
if (i.first.empty())
suffix = concatStrings("/", path);
else {
auto s = i.first.size();
if (path.compare(0, s, i.first) != 0 ||
(path.size() > s && path[s] != '/'))
continue;
suffix = path.size() == s ? "" : concatStrings("/", path.substr(s));
}
auto r = resolveSearchPathElem(i);
if (!r.first) continue;
Path res = r.second + suffix;
if (pathExists(res)) return CanonPath(canonPath(res));
for (auto & i : searchPath.elements) {
auto suffixOpt = i.prefix.suffixIfPotentialMatch(path);
if (!suffixOpt) continue;
auto suffix = *suffixOpt;
auto rOpt = resolveSearchPathPath(i.path);
if (!rOpt) continue;
auto r = *rOpt;
Path res = suffix == "" ? r : concatStrings(r, "/", suffix);
if (pathExists(res)) return rootPath(CanonPath(canonPath(res)));
}
if (hasPrefix(path, "nix/"))
return CanonPath(concatStrings(corepkgsPrefix, path.substr(4)));
return {corepkgsFS, CanonPath(path.substr(3))};
debugThrow(ThrownError({
.msg = hintfmt(evalSettings.pureEval
@ -783,49 +809,66 @@ SourcePath EvalState::findFile(SearchPath & searchPath, const std::string_view p
}
std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathElem & elem)
std::optional<std::string> EvalState::resolveSearchPathPath(const SearchPath::Path & value0, bool initAccessControl)
{
auto i = searchPathResolved.find(elem.second);
auto & value = value0.s;
auto i = searchPathResolved.find(value);
if (i != searchPathResolved.end()) return i->second;
std::pair<bool, std::string> res;
std::optional<std::string> res;
if (EvalSettings::isPseudoUrl(elem.second)) {
if (EvalSettings::isPseudoUrl(value)) {
try {
auto storePath = fetchers::downloadTarball(
store, EvalSettings::resolvePseudoUrl(elem.second), "source", false).tree.storePath;
res = { true, store->toRealPath(storePath) };
store, EvalSettings::resolvePseudoUrl(value), "source", false).storePath;
res = { store->toRealPath(storePath) };
} catch (FileTransferError & e) {
logWarning({
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", elem.second)
.msg = hintfmt("Nix search path entry '%1%' cannot be downloaded, ignoring", value)
});
res = { false, "" };
}
}
else if (hasPrefix(elem.second, "flake:")) {
else if (hasPrefix(value, "flake:")) {
experimentalFeatureSettings.require(Xp::Flakes);
auto flakeRef = parseFlakeRef(elem.second.substr(6), {}, true, false);
debug("fetching flake search path element '%s''", elem.second);
auto storePath = flakeRef.resolve(store).fetchTree(store).first.storePath;
res = { true, store->toRealPath(storePath) };
auto flakeRef = parseFlakeRef(value.substr(6), {}, true, false);
debug("fetching flake search path element '%s''", value);
auto storePath = flakeRef.resolve(store).fetchTree(store).first;
res = { store->toRealPath(storePath) };
}
else {
auto path = absPath(elem.second);
auto path = absPath(value);
/* Allow access to paths in the search path. */
if (initAccessControl) {
allowPath(path);
if (store->isInStore(path)) {
try {
StorePathSet closure;
store->computeFSClosure(store->toStorePath(path).first, closure);
for (auto & p : closure)
allowPath(p);
} catch (InvalidPath &) { }
}
}
if (pathExists(path))
res = { true, path };
res = { path };
else {
logWarning({
.msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", elem.second)
.msg = hintfmt("Nix search path entry '%1%' does not exist, ignoring", value)
});
res = { false, "" };
res = std::nullopt;
}
}
debug("resolved search path element '%s' to '%s'", elem.second, res.second);
if (res)
debug("resolved search path element '%s' to '%s'", value, *res);
else
debug("failed to resolve search path element '%s'", value);
searchPathResolved[elem.second] = res;
searchPathResolved.emplace(value, res);
return res;
}

View file

@ -1,10 +1,11 @@
#include "eval.hh"
#include "fs-input-accessor.hh"
namespace nix {
SourcePath EvalState::rootPath(CanonPath path)
{
return std::move(path);
return {rootFS, std::move(path)};
}
}

File diff suppressed because it is too large Load diff

View file

@ -8,19 +8,25 @@
namespace nix {
/**
* For functions where we do not expect deep recursion, we can use a sizable
* part of the stack a free allocation space.
*
* Note: this is expected to be multiplied by sizeof(Value), or about 24 bytes.
*/
constexpr size_t nonRecursiveStackReservation = 128;
/**
* Functions that maybe applied to self-similar inputs, such as concatMap on a
* tree, should reserve a smaller part of the stack for allocation.
*
* Note: this is expected to be multiplied by sizeof(Value), or about 24 bytes.
*/
constexpr size_t conservativeStackReservation = 16;
struct RegisterPrimOp
{
struct Info
{
std::string name;
std::vector<std::string> args;
size_t arity = 0;
const char * doc;
PrimOpFun fun;
std::optional<ExperimentalFeature> experimentalFeature;
};
typedef std::vector<Info> PrimOps;
typedef std::vector<PrimOp> PrimOps;
static PrimOps * primOps;
/**
@ -28,7 +34,7 @@ struct RegisterPrimOp
* will get called during EvalState initialization, so there
* may be primops not yet added and builtins is not yet sorted.
*/
RegisterPrimOp(Info && info);
RegisterPrimOp(PrimOp && primOp);
};
/* These primops are disabled without enableNativeCode, but plugins

View file

@ -30,20 +30,27 @@ static RegisterPrimOp primop_hasContext({
.name = "__hasContext",
.args = {"s"},
.doc = R"(
Return `true` if string *s* has a non-empty context. The
context can be obtained with
Return `true` if string *s* has a non-empty context.
The context can be obtained with
[`getContext`](#builtins-getContext).
> **Example**
>
> Many operations require a string context to be empty because they are intended only to work with "regular" strings, and also to help users avoid unintentionally loosing track of string context elements.
> `builtins.hasContext` can help create better domain-specific errors in those case.
>
> ```nix
> name: meta:
>
> if builtins.hasContext name
> then throw "package name cannot contain string context"
> else { ${name} = meta; }
> ```
)",
.fun = prim_hasContext
});
/* Sometimes we want to pass a derivation path (i.e. pkg.drvPath) to a
builder without causing the derivation to be built (for instance,
in the derivation that builds NARs in nix-push, when doing
source-only deployment). This primop marks the string context so
that builtins.derivation adds the path to drv.inputSrcs rather than
drv.inputDrvs. */
static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
NixStringContext context;
@ -51,13 +58,13 @@ static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx p
NixStringContext context2;
for (auto && c : context) {
if (auto * ptr = std::get_if<NixStringContextElem::DrvDeep>(&c)) {
if (auto * ptr = std::get_if<NixStringContextElem::DrvDeep>(&c.raw)) {
context2.emplace(NixStringContextElem::Opaque {
.path = ptr->drvPath
});
} else {
/* Can reuse original item */
context2.emplace(std::move(c));
context2.emplace(std::move(c).raw);
}
}
@ -66,11 +73,83 @@ static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx p
static RegisterPrimOp primop_unsafeDiscardOutputDependency({
.name = "__unsafeDiscardOutputDependency",
.arity = 1,
.args = {"s"},
.doc = R"(
Create a copy of the given string where every "derivation deep" string context element is turned into a constant string context element.
This is the opposite of [`builtins.addDrvOutputDependencies`](#builtins-addDrvOutputDependencies).
This is unsafe because it allows us to "forget" store objects we would have otherwise refered to with the string context,
whereas Nix normally tracks all dependencies consistently.
Safe operations "grow" but never "shrink" string contexts.
[`builtins.addDrvOutputDependencies`] in contrast is safe because "derivation deep" string context element always refers to the underlying derivation (among many more things).
Replacing a constant string context element with a "derivation deep" element is a safe operation that just enlargens the string context without forgetting anything.
[`builtins.addDrvOutputDependencies`]: #builtins-addDrvOutputDependencies
)",
.fun = prim_unsafeDiscardOutputDependency
});
static void prim_addDrvOutputDependencies(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
NixStringContext context;
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.addDrvOutputDependencies");
auto contextSize = context.size();
if (contextSize != 1) {
throw EvalError({
.msg = hintfmt("context of string '%s' must have exactly one element, but has %d", *s, contextSize),
.errPos = state.positions[pos]
});
}
NixStringContext context2 {
(NixStringContextElem { std::visit(overloaded {
[&](const NixStringContextElem::Opaque & c) -> NixStringContextElem::DrvDeep {
if (!c.path.isDerivation()) {
throw EvalError({
.msg = hintfmt("path '%s' is not a derivation",
state.store->printStorePath(c.path)),
.errPos = state.positions[pos],
});
}
return NixStringContextElem::DrvDeep {
.drvPath = c.path,
};
},
[&](const NixStringContextElem::Built & c) -> NixStringContextElem::DrvDeep {
throw EvalError({
.msg = hintfmt("`addDrvOutputDependencies` can only act on derivations, not on a derivation output such as '%1%'", c.output),
.errPos = state.positions[pos],
});
},
[&](const NixStringContextElem::DrvDeep & c) -> NixStringContextElem::DrvDeep {
/* Reuse original item because we want this to be idempotent. */
return std::move(c);
},
}, context.begin()->raw) }),
};
v.mkString(*s, context2);
}
static RegisterPrimOp primop_addDrvOutputDependencies({
.name = "__addDrvOutputDependencies",
.args = {"s"},
.doc = R"(
Create a copy of the given string where a single consant string context element is turned into a "derivation deep" string context element.
The store path that is the constant string context element should point to a valid derivation, and end in `.drv`.
The original string context element must not be empty or have multiple elements, and it must not have any other type of element other than a constant or derivation deep element.
The latter is supported so this function is idempotent.
This is the opposite of [`builtins.unsafeDiscardOutputDependency`](#builtins-addDrvOutputDependencies).
)",
.fun = prim_addDrvOutputDependencies
});
/* Extract the context of a string as a structured Nix value.
The context is represented as an attribute set whose keys are the
@ -106,12 +185,15 @@ static void prim_getContext(EvalState & state, const PosIdx pos, Value * * args,
contextInfos[std::move(d.drvPath)].allOutputs = true;
},
[&](NixStringContextElem::Built && b) {
contextInfos[std::move(b.drvPath)].outputs.emplace_back(std::move(b.output));
// FIXME should eventually show string context as is, no
// resolving here.
auto drvPath = resolveDerivedPath(*state.store, *b.drvPath);
contextInfos[std::move(drvPath)].outputs.emplace_back(std::move(b.output));
},
[&](NixStringContextElem::Opaque && o) {
contextInfos[std::move(o.path)].path = true;
},
}, ((NixStringContextElem &&) i).raw());
}, ((NixStringContextElem &&) i).raw);
}
auto attrs = state.buildBindings(contextInfos.size());
@ -222,7 +304,7 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value * * ar
for (auto elem : iter->value->listItems()) {
auto outputName = state.forceStringNoCtx(*elem, iter->pos, "while evaluating an output name within a string context");
context.emplace(NixStringContextElem::Built {
.drvPath = namePath,
.drvPath = makeConstantStorePathRef(namePath),
.output = std::string { outputName },
});
}

View file

@ -1,41 +1,155 @@
#include "primops.hh"
#include "store-api.hh"
#include "realisation.hh"
#include "make-content-addressed.hh"
#include "url.hh"
namespace nix {
/**
* Handler for the content addressed case.
*
* @param state Evaluator state and store to write to.
* @param fromStore Store containing the path to rewrite.
* @param fromPath Source path to be rewritten.
* @param toPathMaybe Path to write the rewritten path to. If empty, the error shows the actual path.
* @param v Return `Value`
*/
static void runFetchClosureWithRewrite(EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, const std::optional<StorePath> & toPathMaybe, Value &v) {
// establish toPath or throw
if (!toPathMaybe || !state.store->isValidPath(*toPathMaybe)) {
auto rewrittenPath = makeContentAddressed(fromStore, *state.store, fromPath);
if (toPathMaybe && *toPathMaybe != rewrittenPath)
throw Error({
.msg = hintfmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected",
state.store->printStorePath(fromPath),
state.store->printStorePath(rewrittenPath),
state.store->printStorePath(*toPathMaybe)),
.errPos = state.positions[pos]
});
if (!toPathMaybe)
throw Error({
.msg = hintfmt(
"rewriting '%s' to content-addressed form yielded '%s'\n"
"Use this value for the 'toPath' attribute passed to 'fetchClosure'",
state.store->printStorePath(fromPath),
state.store->printStorePath(rewrittenPath)),
.errPos = state.positions[pos]
});
}
auto toPath = *toPathMaybe;
// check and return
auto resultInfo = state.store->queryPathInfo(toPath);
if (!resultInfo->isContentAddressed(*state.store)) {
// We don't perform the rewriting when outPath already exists, as an optimisation.
// However, we can quickly detect a mistake if the toPath is input addressed.
throw Error({
.msg = hintfmt(
"The 'toPath' value '%s' is input-addressed, so it can't possibly be the result of rewriting to a content-addressed path.\n\n"
"Set 'toPath' to an empty string to make Nix report the correct content-addressed path.",
state.store->printStorePath(toPath)),
.errPos = state.positions[pos]
});
}
state.mkStorePathString(toPath, v);
}
/**
* Fetch the closure and make sure it's content addressed.
*/
static void runFetchClosureWithContentAddressedPath(EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, Value & v) {
if (!state.store->isValidPath(fromPath))
copyClosure(fromStore, *state.store, RealisedPath::Set { fromPath });
auto info = state.store->queryPathInfo(fromPath);
if (!info->isContentAddressed(*state.store)) {
throw Error({
.msg = hintfmt(
"The 'fromPath' value '%s' is input-addressed, but 'inputAddressed' is set to 'false' (default).\n\n"
"If you do intend to fetch an input-addressed store path, add\n\n"
" inputAddressed = true;\n\n"
"to the 'fetchClosure' arguments.\n\n"
"Note that to ensure authenticity input-addressed store paths, users must configure a trusted binary cache public key on their systems. This is not needed for content-addressed paths.",
state.store->printStorePath(fromPath)),
.errPos = state.positions[pos]
});
}
state.mkStorePathString(fromPath, v);
}
/**
* Fetch the closure and make sure it's input addressed.
*/
static void runFetchClosureWithInputAddressedPath(EvalState & state, const PosIdx pos, Store & fromStore, const StorePath & fromPath, Value & v) {
if (!state.store->isValidPath(fromPath))
copyClosure(fromStore, *state.store, RealisedPath::Set { fromPath });
auto info = state.store->queryPathInfo(fromPath);
if (info->isContentAddressed(*state.store)) {
throw Error({
.msg = hintfmt(
"The store object referred to by 'fromPath' at '%s' is not input-addressed, but 'inputAddressed' is set to 'true'.\n\n"
"Remove the 'inputAddressed' attribute (it defaults to 'false') to expect 'fromPath' to be content-addressed",
state.store->printStorePath(fromPath)),
.errPos = state.positions[pos]
});
}
state.mkStorePathString(fromPath, v);
}
typedef std::optional<StorePath> StorePathOrGap;
static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
state.forceAttrs(*args[0], pos, "while evaluating the argument passed to builtins.fetchClosure");
std::optional<std::string> fromStoreUrl;
std::optional<StorePath> fromPath;
bool toCA = false;
std::optional<StorePath> toPath;
std::optional<StorePathOrGap> toPath;
std::optional<bool> inputAddressedMaybe;
for (auto & attr : *args[0]->attrs) {
const auto & attrName = state.symbols[attr.name];
auto attrHint = [&]() -> std::string {
return "while evaluating the '" + attrName + "' attribute passed to builtins.fetchClosure";
};
if (attrName == "fromPath") {
NixStringContext context;
fromPath = state.coerceToStorePath(attr.pos, *attr.value, context,
"while evaluating the 'fromPath' attribute passed to builtins.fetchClosure");
fromPath = state.coerceToStorePath(attr.pos, *attr.value, context, attrHint());
}
else if (attrName == "toPath") {
state.forceValue(*attr.value, attr.pos);
toCA = true;
if (attr.value->type() != nString || attr.value->string.s != std::string("")) {
bool isEmptyString = attr.value->type() == nString && attr.value->string_view() == "";
if (isEmptyString) {
toPath = StorePathOrGap {};
}
else {
NixStringContext context;
toPath = state.coerceToStorePath(attr.pos, *attr.value, context,
"while evaluating the 'toPath' attribute passed to builtins.fetchClosure");
toPath = state.coerceToStorePath(attr.pos, *attr.value, context, attrHint());
}
}
else if (attrName == "fromStore")
fromStoreUrl = state.forceStringNoCtx(*attr.value, attr.pos,
"while evaluating the 'fromStore' attribute passed to builtins.fetchClosure");
attrHint());
else if (attrName == "inputAddressed")
inputAddressedMaybe = state.forceBool(*attr.value, attr.pos, attrHint());
else
throw Error({
@ -50,6 +164,18 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
.errPos = state.positions[pos]
});
bool inputAddressed = inputAddressedMaybe.value_or(false);
if (inputAddressed) {
if (toPath)
throw Error({
.msg = hintfmt("attribute '%s' is set to true, but '%s' is also set. Please remove one of them",
"inputAddressed",
"toPath"),
.errPos = state.positions[pos]
});
}
if (!fromStoreUrl)
throw Error({
.msg = hintfmt("attribute '%s' is missing in call to 'fetchClosure'", "fromStore"),
@ -74,55 +200,40 @@ static void prim_fetchClosure(EvalState & state, const PosIdx pos, Value * * arg
auto fromStore = openStore(parsedURL.to_string());
if (toCA) {
if (!toPath || !state.store->isValidPath(*toPath)) {
auto remappings = makeContentAddressed(*fromStore, *state.store, { *fromPath });
auto i = remappings.find(*fromPath);
assert(i != remappings.end());
if (toPath && *toPath != i->second)
throw Error({
.msg = hintfmt("rewriting '%s' to content-addressed form yielded '%s', while '%s' was expected",
state.store->printStorePath(*fromPath),
state.store->printStorePath(i->second),
state.store->printStorePath(*toPath)),
.errPos = state.positions[pos]
});
if (!toPath)
throw Error({
.msg = hintfmt(
"rewriting '%s' to content-addressed form yielded '%s'; "
"please set this in the 'toPath' attribute passed to 'fetchClosure'",
state.store->printStorePath(*fromPath),
state.store->printStorePath(i->second)),
.errPos = state.positions[pos]
});
}
} else {
if (!state.store->isValidPath(*fromPath))
copyClosure(*fromStore, *state.store, RealisedPath::Set { *fromPath });
toPath = fromPath;
}
/* In pure mode, require a CA path. */
if (evalSettings.pureEval) {
auto info = state.store->queryPathInfo(*toPath);
if (!info->isContentAddressed(*state.store))
throw Error({
.msg = hintfmt("in pure mode, 'fetchClosure' requires a content-addressed path, which '%s' isn't",
state.store->printStorePath(*toPath)),
.errPos = state.positions[pos]
});
}
state.mkStorePathString(*toPath, v);
if (toPath)
runFetchClosureWithRewrite(state, pos, *fromStore, *fromPath, *toPath, v);
else if (inputAddressed)
runFetchClosureWithInputAddressedPath(state, pos, *fromStore, *fromPath, v);
else
runFetchClosureWithContentAddressedPath(state, pos, *fromStore, *fromPath, v);
}
static RegisterPrimOp primop_fetchClosure({
.name = "__fetchClosure",
.args = {"args"},
.doc = R"(
Fetch a Nix store closure from a binary cache, rewriting it into
content-addressed form. For example,
Fetch a store path [closure](@docroot@/glossary.md#gloss-closure) from a binary cache, and return the store path as a string with context.
This function can be invoked in three ways, that we will discuss in order of preference.
**Fetch a content-addressed store path**
Example:
```nix
builtins.fetchClosure {
fromStore = "https://cache.nixos.org";
fromPath = /nix/store/ldbhlwhh39wha58rm61bkiiwm6j7211j-git-2.33.1;
}
```
This is the simplest invocation, and it does not require the user of the expression to configure [`trusted-public-keys`](@docroot@/command-ref/conf-file.md#conf-trusted-public-keys) to ensure their authenticity.
If your store path is [input addressed](@docroot@/glossary.md#gloss-input-addressed-store-object) instead of content addressed, consider the other two invocations.
**Fetch any store path and rewrite it to a fully content-addressed store path**
Example:
```nix
builtins.fetchClosure {
@ -132,31 +243,42 @@ static RegisterPrimOp primop_fetchClosure({
}
```
fetches `/nix/store/r2jd...` from the specified binary cache,
This example fetches `/nix/store/r2jd...` from the specified binary cache,
and rewrites it into the content-addressed store path
`/nix/store/ldbh...`.
If `fromPath` is already content-addressed, or if you are
allowing impure evaluation (`--impure`), then `toPath` may be
omitted.
Like the previous example, no extra configuration or privileges are required.
To find out the correct value for `toPath` given a `fromPath`,
you can use `nix store make-content-addressed`:
use [`nix store make-content-addressed`](@docroot@/command-ref/new-cli/nix3-store-make-content-addressed.md):
```console
# nix store make-content-addressed --from https://cache.nixos.org /nix/store/r2jd6ygnmirm2g803mksqqjm4y39yi6i-git-2.33.1
rewrote '/nix/store/r2jd6ygnmirm2g803mksqqjm4y39yi6i-git-2.33.1' to '/nix/store/ldbhlwhh39wha58rm61bkiiwm6j7211j-git-2.33.1'
```
This function is similar to `builtins.storePath` in that it
allows you to use a previously built store path in a Nix
expression. However, it is more reproducible because it requires
specifying a binary cache from which the path can be fetched.
Also, requiring a content-addressed final store path avoids the
need for users to configure binary cache public keys.
Alternatively, set `toPath = ""` and find the correct `toPath` in the error message.
This function is only available if you enable the experimental
feature `fetch-closure`.
**Fetch an input-addressed store path as is**
Example:
```nix
builtins.fetchClosure {
fromStore = "https://cache.nixos.org";
fromPath = /nix/store/r2jd6ygnmirm2g803mksqqjm4y39yi6i-git-2.33.1;
inputAddressed = true;
}
```
It is possible to fetch an [input-addressed store path](@docroot@/glossary.md#gloss-input-addressed-store-object) and return it as is.
However, this is the least preferred way of invoking `fetchClosure`, because it requires that the input-addressed paths are trusted by the Nix configuration.
**`builtins.storePath`**
`fetchClosure` is similar to [`builtins.storePath`](#builtins-storePath) in that it allows you to use a previously built store path in a Nix expression.
However, `fetchClosure` is more reproducible because it specifies a binary cache from which the path can be fetched.
Also, using content-addressed store paths does not require users to configure [`trusted-public-keys`](@docroot@/command-ref/conf-file.md#conf-trusted-public-keys) to ensure their authenticity.
)",
.fun = prim_fetchClosure,
.experimentalFeature = Xp::FetchClosure,

View file

@ -1,5 +1,6 @@
#include "primops.hh"
#include "eval-inline.hh"
#include "eval-settings.hh"
#include "store-api.hh"
#include "fetchers.hh"
#include "url.hh"
@ -30,7 +31,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
// be both a revision or a branch/tag name.
auto value = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the `rev` attribute passed to builtins.fetchMercurial");
if (std::regex_match(value.begin(), value.end(), revRegex))
rev = Hash::parseAny(value, htSHA1);
rev = Hash::parseAny(value, HashAlgorithm::SHA1);
else
ref = value;
}
@ -70,22 +71,22 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value * * a
auto input = fetchers::Input::fromAttrs(std::move(attrs));
// FIXME: use name
auto [tree, input2] = input.fetch(state.store);
auto [storePath, input2] = input.fetch(state.store);
auto attrs2 = state.buildBindings(8);
state.mkStorePathString(tree.storePath, attrs2.alloc(state.sOutPath));
state.mkStorePathString(storePath, attrs2.alloc(state.sOutPath));
if (input2.getRef())
attrs2.alloc("branch").mkString(*input2.getRef());
// Backward compatibility: set 'rev' to
// 0000000000000000000000000000000000000000 for a dirty tree.
auto rev2 = input2.getRev().value_or(Hash(htSHA1));
auto rev2 = input2.getRev().value_or(Hash(HashAlgorithm::SHA1));
attrs2.alloc("rev").mkString(rev2.gitRev());
attrs2.alloc("shortRev").mkString(rev2.gitRev().substr(0, 12));
if (auto revCount = input2.getRevCount())
attrs2.alloc("revCount").mkInt(*revCount);
v.mkAttrs(attrs2);
state.allowPath(tree.storePath);
state.allowPath(storePath);
}
static RegisterPrimOp r_fetchMercurial({

View file

@ -1,10 +1,13 @@
#include "primops.hh"
#include "eval-inline.hh"
#include "eval-settings.hh"
#include "store-api.hh"
#include "fetchers.hh"
#include "filetransfer.hh"
#include "registry.hh"
#include "tarball.hh"
#include "url.hh"
#include "value-to-json.hh"
#include <ctime>
#include <iomanip>
@ -14,7 +17,7 @@ namespace nix {
void emitTreeAttrs(
EvalState & state,
const fetchers::Tree & tree,
const StorePath & storePath,
const fetchers::Input & input,
Value & v,
bool emptyRevFallback,
@ -22,16 +25,15 @@ void emitTreeAttrs(
{
assert(input.isLocked());
auto attrs = state.buildBindings(8);
auto attrs = state.buildBindings(10);
state.mkStorePathString(tree.storePath, attrs.alloc(state.sOutPath));
state.mkStorePathString(storePath, attrs.alloc(state.sOutPath));
// FIXME: support arbitrary input attributes.
auto narHash = input.getNarHash();
assert(narHash);
attrs.alloc("narHash").mkString(narHash->to_string(SRI, true));
attrs.alloc("narHash").mkString(narHash->to_string(HashFormat::SRI, true));
if (input.getType() == "git")
attrs.alloc("submodules").mkBool(
@ -44,7 +46,7 @@ void emitTreeAttrs(
attrs.alloc("shortRev").mkString(rev->gitShortRev());
} else if (emptyRevFallback) {
// Backwards compat for `builtins.fetchGit`: dirty repos return an empty sha1 as rev
auto emptyHash = Hash(htSHA1);
auto emptyHash = Hash(HashAlgorithm::SHA1);
attrs.alloc("rev").mkString(emptyHash.gitRev());
attrs.alloc("shortRev").mkString(emptyHash.gitShortRev());
}
@ -56,6 +58,11 @@ void emitTreeAttrs(
}
if (auto dirtyRev = fetchers::maybeGetStrAttr(input.attrs, "dirtyRev")) {
attrs.alloc("dirtyRev").mkString(*dirtyRev);
attrs.alloc("dirtyShortRev").mkString(*fetchers::maybeGetStrAttr(input.attrs, "dirtyShortRev"));
}
if (auto lastModified = input.getLastModified()) {
attrs.alloc("lastModified").mkInt(*lastModified);
attrs.alloc("lastModifiedDate").mkString(
@ -65,36 +72,10 @@ void emitTreeAttrs(
v.mkAttrs(attrs);
}
std::string fixURI(std::string uri, EvalState & state, const std::string & defaultScheme = "file")
{
state.checkURI(uri);
if (uri.find("://") == std::string::npos) {
const auto p = ParsedURL {
.scheme = defaultScheme,
.authority = "",
.path = uri
};
return p.to_string();
} else {
return uri;
}
}
std::string fixURIForGit(std::string uri, EvalState & state)
{
/* Detects scp-style uris (e.g. git@github.com:NixOS/nix) and fixes
* them by removing the `:` and assuming a scheme of `ssh://`
* */
static std::regex scp_uri("([^/]*)@(.*):(.*)");
if (uri[0] != '/' && std::regex_match(uri, scp_uri))
return fixURI(std::regex_replace(uri, scp_uri, "$1@$2/$3"), state, "ssh");
else
return fixURI(uri, state);
}
struct FetchTreeParams {
bool emptyRevFallback = false;
bool allowNameArgument = false;
bool isFetchGit = false;
};
static void fetchTree(
@ -102,11 +83,12 @@ static void fetchTree(
const PosIdx pos,
Value * * args,
Value & v,
std::optional<std::string> type,
const FetchTreeParams & params = FetchTreeParams{}
) {
fetchers::Input input;
NixStringContext context;
std::optional<std::string> type;
if (params.isFetchGit) type = "git";
state.forceValue(*args[0], pos);
@ -136,16 +118,18 @@ static void fetchTree(
if (attr.value->type() == nPath || attr.value->type() == nString) {
auto s = state.coerceToString(attr.pos, *attr.value, context, "", false, false).toOwned();
attrs.emplace(state.symbols[attr.name],
state.symbols[attr.name] == "url"
? type == "git"
? fixURIForGit(s, state)
: fixURI(s, state)
params.isFetchGit && state.symbols[attr.name] == "url"
? fixGitURL(s)
: s);
}
else if (attr.value->type() == nBool)
attrs.emplace(state.symbols[attr.name], Explicit<bool>{attr.value->boolean});
else if (attr.value->type() == nInt)
attrs.emplace(state.symbols[attr.name], uint64_t(attr.value->integer));
else if (state.symbols[attr.name] == "publicKeys") {
experimentalFeatureSettings.require(Xp::VerifiedFetches);
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump());
}
else
state.debugThrowLastTrace(TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
state.symbols[attr.name], showType(*attr.value)));
@ -164,40 +148,257 @@ static void fetchTree(
"while evaluating the first argument passed to the fetcher",
false, false).toOwned();
if (type == "git") {
if (params.isFetchGit) {
fetchers::Attrs attrs;
attrs.emplace("type", "git");
attrs.emplace("url", fixURIForGit(url, state));
attrs.emplace("url", fixGitURL(url));
input = fetchers::Input::fromAttrs(std::move(attrs));
} else {
input = fetchers::Input::fromURL(fixURI(url, state));
if (!experimentalFeatureSettings.isEnabled(Xp::Flakes))
state.debugThrowLastTrace(EvalError({
.msg = hintfmt("passing a string argument to 'fetchTree' requires the 'flakes' experimental feature"),
.errPos = state.positions[pos]
}));
input = fetchers::Input::fromURL(url);
}
}
if (!evalSettings.pureEval && !input.isDirect())
if (!evalSettings.pureEval && !input.isDirect() && experimentalFeatureSettings.isEnabled(Xp::Flakes))
input = lookupInRegistries(state.store, input).first;
if (evalSettings.pureEval && !input.isLocked())
state.debugThrowLastTrace(EvalError("in pure evaluation mode, 'fetchTree' requires a locked input, at %s", state.positions[pos]));
auto [tree, input2] = input.fetch(state.store);
state.checkURI(input.toURLString());
state.allowPath(tree.storePath);
auto [storePath, input2] = input.fetch(state.store);
emitTreeAttrs(state, tree, input2, v, params.emptyRevFallback, false);
state.allowPath(storePath);
emitTreeAttrs(state, storePath, input2, v, params.emptyRevFallback, false);
}
static void prim_fetchTree(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
experimentalFeatureSettings.require(Xp::Flakes);
fetchTree(state, pos, args, v, std::nullopt, FetchTreeParams { .allowNameArgument = false });
fetchTree(state, pos, args, v, { });
}
// FIXME: document
static RegisterPrimOp primop_fetchTree({
.name = "fetchTree",
.arity = 1,
.fun = prim_fetchTree
.args = {"input"},
.doc = R"(
Fetch a file system tree or a plain file using one of the supported backends and return an attribute set with:
- the resulting fixed-output [store path](@docroot@/glossary.md#gloss-store-path)
- the corresponding [NAR](@docroot@/glossary.md#gloss-nar) hash
- backend-specific metadata (currently not documented). <!-- TODO: document output attributes -->
*input* must be an attribute set with the following attributes:
- `type` (String, required)
One of the [supported source types](#source-types).
This determines other required and allowed input attributes.
- `narHash` (String, optional)
The `narHash` parameter can be used to substitute the source of the tree.
It also allows for verification of tree contents that may not be provided by the underlying transfer mechanism.
If `narHash` is set, the source is first looked up is the Nix store and [substituters](@docroot@/command-ref/conf-file.md#conf-substituters), and only fetched if not available.
A subset of the output attributes of `fetchTree` can be re-used for subsequent calls to `fetchTree` to produce the same result again.
That is, `fetchTree` is idempotent.
Downloads are cached in `$XDG_CACHE_HOME/nix`.
The remote source will be fetched from the network if both are true:
- A NAR hash is supplied and the corresponding store path is not [valid](@docroot@/glossary.md#gloss-validity), that is, not available in the store
> **Note**
>
> [Substituters](@docroot@/command-ref/conf-file.md#conf-substituters) are not used in fetching.
- There is no cache entry or the cache entry is older than [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl)
## Source types
The following source types and associated input attributes are supported.
<!-- TODO: It would be soooo much more predictable to work with (and
document) if `fetchTree` was a curried call with the first paramter for
`type` or an attribute like `builtins.fetchTree.git`! -->
- `"file"`
Place a plain file into the Nix store.
This is similar to [`builtins.fetchurl`](@docroot@/language/builtins.md#builtins-fetchurl)
- `url` (String, required)
Supported protocols:
- `https`
> **Example**
>
> ```nix
> fetchTree {
> type = "file";
> url = "https://example.com/index.html";
> }
> ```
- `http`
Insecure HTTP transfer for legacy sources.
> **Warning**
>
> HTTP performs no encryption or authentication.
> Use a `narHash` known in advance to ensure the output has expected contents.
- `file`
A file on the local file system.
> **Example**
>
> ```nix
> fetchTree {
> type = "file";
> url = "file:///home/eelco/nix/README.md";
> }
> ```
- `"tarball"`
Download a tar archive and extract it into the Nix store.
This has the same underyling implementation as [`builtins.fetchTarball`](@docroot@/language/builtins.md#builtins-fetchTarball)
- `url` (String, required)
> **Example**
>
> ```nix
> fetchTree {
> type = "tarball";
> url = "https://github.com/NixOS/nixpkgs/tarball/nixpkgs-23.11";
> }
> ```
- `"git"`
Fetch a Git tree and copy it to the Nix store.
This is similar to [`builtins.fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit).
- `url` (String, required)
The URL formats supported are the same as for Git itself.
> **Example**
>
> ```nix
> fetchTree {
> type = "git";
> url = "git@github.com:NixOS/nixpkgs.git";
> }
> ```
> **Note**
>
> If the URL points to a local directory, and no `ref` or `rev` is given, Nix will only consider files added to the Git index, as listed by `git ls-files` but use the *current file contents* of the Git working directory.
- `ref` (String, optional)
A [Git reference](https://git-scm.com/book/en/v2/Git-Internals-Git-References), such as a branch or tag name.
Default: `"HEAD"`
- `rev` (String, optional)
A Git revision; a commit hash.
Default: the tip of `ref`
- `shallow` (Bool, optional)
Make a shallow clone when fetching the Git tree.
Default: `false`
- `submodules` (Bool, optional)
Also fetch submodules if available.
Default: `false`
- `allRefs` (Bool, optional)
If set to `true`, always fetch the entire repository, even if the latest commit is still in the cache.
Otherwise, only the latest commit is fetched if it is not already cached.
Default: `false`
- `lastModified` (Integer, optional)
Unix timestamp of the fetched commit.
If set, pass through the value to the output attribute set.
Otherwise, generated from the fetched Git tree.
- `revCount` (Integer, optional)
Number of revisions in the history of the Git repository before the fetched commit.
If set, pass through the value to the output attribute set.
Otherwise, generated from the fetched Git tree.
The following input types are still subject to change:
- `"path"`
- `"github"`
- `"gitlab"`
- `"sourcehut"`
- `"mercurial"`
*input* can also be a [URL-like reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references).
The additional input types and the URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled.
> **Example**
>
> Fetch a GitHub repository using the attribute set representation:
>
> ```nix
> builtins.fetchTree {
> type = "github";
> owner = "NixOS";
> repo = "nixpkgs";
> rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
> }
> ```
>
> This evaluates to the following attribute set:
>
> ```nix
> {
> lastModified = 1686503798;
> lastModifiedDate = "20230611171638";
> narHash = "sha256-rA9RqKP9OlBrgGCPvfd5HVAXDOy8k2SmPtB/ijShNXc=";
> outPath = "/nix/store/l5m6qlvfs9sdw14ja3qbzpglcjlb6j1x-source";
> rev = "ae2e6b3958682513d28f7d633734571fb18285dd";
> shortRev = "ae2e6b3";
> }
> ```
> **Example**
>
> Fetch the same GitHub repository using the URL-like syntax:
>
> ```nix
> builtins.fetchTree "github:NixOS/nixpkgs/ae2e6b3958682513d28f7d633734571fb18285dd"
> ```
)",
.fun = prim_fetchTree,
.experimentalFeature = Xp::FetchTree,
});
static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v,
@ -215,7 +416,7 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
if (n == "url")
url = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the url we should fetch");
else if (n == "sha256")
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), htSHA256);
expectedHash = newHashAllowEmpty(state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the sha256 of the content we should fetch"), HashAlgorithm::SHA256);
else if (n == "name")
name = state.forceStringNoCtx(*attr.value, attr.pos, "while evaluating the name of the content we should fetch");
else
@ -245,14 +446,12 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
state.debugThrowLastTrace(EvalError("in pure evaluation mode, '%s' requires a 'sha256' argument", who));
// early exit if pinned and already in the store
if (expectedHash && expectedHash->type == htSHA256) {
if (expectedHash && expectedHash->algo == HashAlgorithm::SHA256) {
auto expectedPath = state.store->makeFixedOutputPath(
name,
FixedOutputInfo {
.hash = {
.method = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat,
.hash = *expectedHash,
},
.method = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat,
.hash = *expectedHash,
.references = {}
});
@ -266,16 +465,16 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
// https://github.com/NixOS/nix/issues/4313
auto storePath =
unpack
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).tree.storePath
? fetchers::downloadTarball(state.store, *url, name, (bool) expectedHash).storePath
: fetchers::downloadFile(state.store, *url, name, (bool) expectedHash).storePath;
if (expectedHash) {
auto hash = unpack
? state.store->queryPathInfo(storePath)->narHash
: hashFile(htSHA256, state.store->toRealPath(storePath));
: hashFile(HashAlgorithm::SHA256, state.store->toRealPath(storePath));
if (hash != *expectedHash)
state.debugThrowLastTrace(EvalError((unsigned int) 102, "hash mismatch in file downloaded from '%s':\n specified: %s\n got: %s",
*url, expectedHash->to_string(Base32, true), hash.to_string(Base32, true)));
*url, expectedHash->to_string(HashFormat::Nix32, true), hash.to_string(HashFormat::Nix32, true)));
}
state.allowAndSetStorePathString(storePath, v);
@ -349,7 +548,12 @@ static RegisterPrimOp primop_fetchTarball({
static void prim_fetchGit(EvalState & state, const PosIdx pos, Value * * args, Value & v)
{
fetchTree(state, pos, args, v, "git", FetchTreeParams { .emptyRevFallback = true, .allowNameArgument = true });
fetchTree(state, pos, args, v,
FetchTreeParams {
.emptyRevFallback = true,
.allowNameArgument = true,
.isFetchGit = true
});
}
static RegisterPrimOp primop_fetchGit({
@ -364,7 +568,7 @@ static RegisterPrimOp primop_fetchGit({
The URL of the repo.
- `name` (default: *basename of the URL*)
- `name` (default: `source`)
The name of the directory the repo should be exported to in the store.
@ -391,7 +595,8 @@ static RegisterPrimOp primop_fetchGit({
- `shallow` (default: `false`)
A Boolean parameter that specifies whether fetching a shallow clone is allowed.
A Boolean parameter that specifies whether fetching from a shallow remote repository is allowed.
This still performs a full clone of what is available on the remote.
- `allRefs`
@ -399,6 +604,42 @@ static RegisterPrimOp primop_fetchGit({
With this argument being true, it's possible to load a `rev` from *any* `ref`
(by default only `rev`s from the specified `ref` are supported).
- `verifyCommit` (default: `true` if `publicKey` or `publicKeys` are provided, otherwise `false`)
Whether to check `rev` for a signature matching `publicKey` or `publicKeys`.
If `verifyCommit` is enabled, then `fetchGit` cannot use a local repository with uncommitted changes.
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
- `publicKey`
The public key against which `rev` is verified if `verifyCommit` is enabled.
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
- `keytype` (default: `"ssh-ed25519"`)
The key type of `publicKey`.
Possible values:
- `"ssh-dsa"`
- `"ssh-ecdsa"`
- `"ssh-ecdsa-sk"`
- `"ssh-ed25519"`
- `"ssh-ed25519-sk"`
- `"ssh-rsa"`
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
- `publicKeys`
The public keys against which `rev` is verified if `verifyCommit` is enabled.
Must be given as a list of attribute sets with the following form:
```nix
{
key = "<public key>";
type = "<key type>"; # optional, default: "ssh-ed25519"
}
```
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
Here are some examples of how to use `fetchGit`.
- To fetch a private repository over SSH:
@ -473,6 +714,21 @@ static RegisterPrimOp primop_fetchGit({
}
```
- To verify the commit signature:
```nix
builtins.fetchGit {
url = "ssh://git@github.com/nixos/nix.git";
verifyCommit = true;
publicKeys = [
{
type = "ssh-ed25519";
key = "AAAAC3NzaC1lZDI1NTE5AAAAIArPKULJOid8eS6XETwUjO48/HKBWl7FTCK0Z//fplDi";
}
];
}
```
Nix will refetch the branch according to the [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl) setting.
This behavior is disabled in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval).

View file

@ -0,0 +1,55 @@
#include "search-path.hh"
namespace nix {
std::optional<std::string_view> SearchPath::Prefix::suffixIfPotentialMatch(
std::string_view path) const
{
auto n = s.size();
/* Non-empty prefix and suffix must be separated by a /, or the
prefix is not a valid path prefix. */
bool needSeparator = n > 0 && n < path.size();
if (needSeparator && path[n] != '/') {
return std::nullopt;
}
/* Prefix must be prefix of this path. */
if (path.compare(0, n, s) != 0) {
return std::nullopt;
}
/* Skip next path separator. */
return {
path.substr(needSeparator ? n + 1 : n)
};
}
SearchPath::Elem SearchPath::Elem::parse(std::string_view rawElem)
{
size_t pos = rawElem.find('=');
return SearchPath::Elem {
.prefix = Prefix {
.s = pos == std::string::npos
? std::string { "" }
: std::string { rawElem.substr(0, pos) },
},
.path = Path {
.s = std::string { rawElem.substr(pos + 1) },
},
};
}
SearchPath parseSearchPath(const Strings & rawElems)
{
SearchPath res;
for (auto & rawElem : rawElems)
res.elements.emplace_back(SearchPath::Elem::parse(rawElem));
return res;
}
}

108
src/libexpr/search-path.hh Normal file
View file

@ -0,0 +1,108 @@
#pragma once
///@file
#include <optional>
#include "types.hh"
#include "comparator.hh"
namespace nix {
/**
* A "search path" is a list of ways look for something, used with
* `builtins.findFile` and `< >` lookup expressions.
*/
struct SearchPath
{
/**
* A single element of a `SearchPath`.
*
* Each element is tried in succession when looking up a path. The first
* element to completely match wins.
*/
struct Elem;
/**
* The first part of a `SearchPath::Elem` pair.
*
* Called a "prefix" because it takes the form of a prefix of a file
* path (first `n` path components). When looking up a path, to use
* a `SearchPath::Elem`, its `Prefix` must match the path.
*/
struct Prefix;
/**
* The second part of a `SearchPath::Elem` pair.
*
* It is either a path or a URL (with certain restrictions / extra
* structure).
*
* If the prefix of the path we are looking up matches, we then
* check if the rest of the path points to something that exists
* within the directory denoted by this. If so, the
* `SearchPath::Elem` as a whole matches, and that *something* being
* pointed to by the rest of the path we are looking up is the
* result.
*/
struct Path;
/**
* The list of search path elements. Each one is checked for a path
* when looking up. (The actual lookup entry point is in `EvalState`
* not in this class.)
*/
std::list<SearchPath::Elem> elements;
/**
* Parse a string into a `SearchPath`
*/
static SearchPath parse(const Strings & rawElems);
};
struct SearchPath::Prefix
{
/**
* Underlying string
*
* @todo Should we normalize this when constructing a `SearchPath::Prefix`?
*/
std::string s;
GENERATE_CMP(SearchPath::Prefix, me->s);
/**
* If the path possibly matches this search path element, return the
* suffix that we should look for inside the resolved value of the
* element
* Note the double optionality in the name. While we might have a matching prefix, the suffix may not exist.
*/
std::optional<std::string_view> suffixIfPotentialMatch(std::string_view path) const;
};
struct SearchPath::Path
{
/**
* The location of a search path item, as a path or URL.
*
* @todo Maybe change this to `std::variant<SourcePath, URL>`.
*/
std::string s;
GENERATE_CMP(SearchPath::Path, me->s);
};
struct SearchPath::Elem
{
Prefix prefix;
Path path;
GENERATE_CMP(SearchPath::Elem, me->prefix, me->path);
/**
* Parse a string into a `SearchPath::Elem`
*/
static SearchPath::Elem parse(std::string_view rawElem);
};
}

View file

@ -1,65 +0,0 @@
#include <nlohmann/json.hpp>
#include <gtest/gtest.h>
#include <rapidcheck/gtest.h>
#include "tests/derived-path.hh"
#include "tests/libexpr.hh"
namespace nix {
// Testing of trivial expressions
class DerivedPathExpressionTest : public LibExprTest {};
// FIXME: `RC_GTEST_FIXTURE_PROP` isn't calling `SetUpTestSuite` because it is
// no a real fixture.
//
// See https://github.com/emil-e/rapidcheck/blob/master/doc/gtest.md#rc_gtest_fixture_propfixture-name-args
TEST_F(DerivedPathExpressionTest, force_init)
{
}
RC_GTEST_FIXTURE_PROP(
DerivedPathExpressionTest,
prop_opaque_path_round_trip,
(const DerivedPath::Opaque & o))
{
auto * v = state.allocValue();
state.mkStorePathString(o.path, *v);
auto d = state.coerceToDerivedPath(noPos, *v, "");
RC_ASSERT(DerivedPath { o } == d);
}
// TODO use DerivedPath::Built for parameter once it supports a single output
// path only.
RC_GTEST_FIXTURE_PROP(
DerivedPathExpressionTest,
prop_built_path_placeholder_round_trip,
(const StorePath & drvPath, const StorePathName & outputName))
{
auto * v = state.allocValue();
state.mkOutputString(*v, drvPath, outputName.name, std::nullopt);
auto [d, _] = state.coerceToDerivedPathUnchecked(noPos, *v, "");
DerivedPath::Built b {
.drvPath = drvPath,
.outputs = OutputsSpec::Names { outputName.name },
};
RC_ASSERT(DerivedPath { b } == d);
}
RC_GTEST_FIXTURE_PROP(
DerivedPathExpressionTest,
prop_built_path_out_path_round_trip,
(const StorePath & drvPath, const StorePathName & outputName, const StorePath & outPath))
{
auto * v = state.allocValue();
state.mkOutputString(*v, drvPath, outputName.name, outPath);
auto [d, _] = state.coerceToDerivedPathUnchecked(noPos, *v, "");
DerivedPath::Built b {
.drvPath = drvPath,
.outputs = OutputsSpec::Names { outputName.name },
};
RC_ASSERT(DerivedPath { b } == d);
}
} /* namespace nix */

File diff suppressed because it is too large Load diff

View file

@ -1,68 +0,0 @@
#include "tests/libexpr.hh"
#include "value-to-json.hh"
namespace nix {
// Testing the conversion to JSON
class JSONValueTest : public LibExprTest {
protected:
std::string getJSONValue(Value& value) {
std::stringstream ss;
NixStringContext ps;
printValueAsJSON(state, true, value, noPos, ss, ps);
return ss.str();
}
};
TEST_F(JSONValueTest, null) {
Value v;
v.mkNull();
ASSERT_EQ(getJSONValue(v), "null");
}
TEST_F(JSONValueTest, BoolFalse) {
Value v;
v.mkBool(false);
ASSERT_EQ(getJSONValue(v),"false");
}
TEST_F(JSONValueTest, BoolTrue) {
Value v;
v.mkBool(true);
ASSERT_EQ(getJSONValue(v), "true");
}
TEST_F(JSONValueTest, IntPositive) {
Value v;
v.mkInt(100);
ASSERT_EQ(getJSONValue(v), "100");
}
TEST_F(JSONValueTest, IntNegative) {
Value v;
v.mkInt(-100);
ASSERT_EQ(getJSONValue(v), "-100");
}
TEST_F(JSONValueTest, String) {
Value v;
v.mkString("test");
ASSERT_EQ(getJSONValue(v), "\"test\"");
}
TEST_F(JSONValueTest, StringQuotes) {
Value v;
v.mkString("test\"");
ASSERT_EQ(getJSONValue(v), "\"test\\\"\"");
}
// The dummy store doesn't support writing files. Fails with this exception message:
// C++ exception with description "error: operation 'addToStoreFromDump' is
// not supported by store 'dummy'" thrown in the test body.
TEST_F(JSONValueTest, DISABLED_Path) {
Value v;
v.mkPath("test");
ASSERT_EQ(getJSONValue(v), "\"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x\"");
}
} /* namespace nix */

View file

@ -1,140 +0,0 @@
#pragma once
///@file
#include <gtest/gtest.h>
#include <gmock/gmock.h>
#include "value.hh"
#include "nixexpr.hh"
#include "eval.hh"
#include "eval-inline.hh"
#include "store-api.hh"
#include "tests/libstore.hh"
namespace nix {
class LibExprTest : public LibStoreTest {
public:
static void SetUpTestSuite() {
LibStoreTest::SetUpTestSuite();
initGC();
}
protected:
LibExprTest()
: LibStoreTest()
, state({}, store)
{
}
Value eval(std::string input, bool forceValue = true) {
Value v;
Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root));
assert(e);
state.eval(e, v);
if (forceValue)
state.forceValue(v, noPos);
return v;
}
Symbol createSymbol(const char * value) {
return state.symbols.create(value);
}
EvalState state;
};
MATCHER(IsListType, "") {
return arg != nList;
}
MATCHER(IsList, "") {
return arg.type() == nList;
}
MATCHER(IsString, "") {
return arg.type() == nString;
}
MATCHER(IsNull, "") {
return arg.type() == nNull;
}
MATCHER(IsThunk, "") {
return arg.type() == nThunk;
}
MATCHER(IsAttrs, "") {
return arg.type() == nAttrs;
}
MATCHER_P(IsStringEq, s, fmt("The string is equal to \"%1%\"", s)) {
if (arg.type() != nString) {
return false;
}
return std::string_view(arg.string.s) == s;
}
MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v)) {
if (arg.type() != nInt) {
return false;
}
return arg.integer == v;
}
MATCHER_P(IsFloatEq, v, fmt("The float is equal to \"%1%\"", v)) {
if (arg.type() != nFloat) {
return false;
}
return arg.fpoint == v;
}
MATCHER(IsTrue, "") {
if (arg.type() != nBool) {
return false;
}
return arg.boolean == true;
}
MATCHER(IsFalse, "") {
if (arg.type() != nBool) {
return false;
}
return arg.boolean == false;
}
MATCHER_P(IsPathEq, p, fmt("Is a path equal to \"%1%\"", p)) {
if (arg.type() != nPath) {
*result_listener << "Expected a path got " << arg.type();
return false;
} else if (std::string_view(arg.string.s) != p) {
*result_listener << "Expected a path that equals \"" << p << "\" but got: " << arg.string.s;
return false;
}
return true;
}
MATCHER_P(IsListOfSize, n, fmt("Is a list of size [%1%]", n)) {
if (arg.type() != nList) {
*result_listener << "Expected list got " << arg.type();
return false;
} else if (arg.listSize() != (size_t)n) {
*result_listener << "Expected as list of size " << n << " got " << arg.listSize();
return false;
}
return true;
}
MATCHER_P(IsAttrsOfSize, n, fmt("Is a set of size [%1%]", n)) {
if (arg.type() != nAttrs) {
*result_listener << "Expected set got " << arg.type();
return false;
} else if (arg.attrs->size() != (size_t)n) {
*result_listener << "Expected a set with " << n << " attributes but got " << arg.attrs->size();
return false;
}
return true;
}
} /* namespace nix */

View file

@ -1,19 +0,0 @@
check: libexpr-tests_RUN
programs += libexpr-tests
libexpr-tests_NAME := libnixexpr-tests
libexpr-tests_DIR := $(d)
libexpr-tests_INSTALL_DIR :=
libexpr-tests_SOURCES := \
$(wildcard $(d)/*.cc) \
$(wildcard $(d)/value/*.cc)
libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests -I src/libfetchers
libexpr-tests_LIBS = libstore-tests libutils-tests libexpr libutil libstore libfetchers
libexpr-tests_LDFLAGS := $(GTEST_LIBS) -lgmock

View file

@ -1,832 +0,0 @@
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include "tests/libexpr.hh"
namespace nix {
class CaptureLogger : public Logger
{
std::ostringstream oss;
public:
CaptureLogger() {}
std::string get() const {
return oss.str();
}
void log(Verbosity lvl, std::string_view s) override {
oss << s << std::endl;
}
void logEI(const ErrorInfo & ei) override {
showErrorInfo(oss, ei, loggerSettings.showTrace.get());
}
};
class CaptureLogging {
Logger * oldLogger;
std::unique_ptr<CaptureLogger> tempLogger;
public:
CaptureLogging() : tempLogger(std::make_unique<CaptureLogger>()) {
oldLogger = logger;
logger = tempLogger.get();
}
~CaptureLogging() {
logger = oldLogger;
}
std::string get() const {
return tempLogger->get();
}
};
// Testing eval of PrimOp's
class PrimOpTest : public LibExprTest {};
TEST_F(PrimOpTest, throw) {
ASSERT_THROW(eval("throw \"foo\""), ThrownError);
}
TEST_F(PrimOpTest, abort) {
ASSERT_THROW(eval("abort \"abort\""), Abort);
}
TEST_F(PrimOpTest, ceil) {
auto v = eval("builtins.ceil 1.9");
ASSERT_THAT(v, IsIntEq(2));
}
TEST_F(PrimOpTest, floor) {
auto v = eval("builtins.floor 1.9");
ASSERT_THAT(v, IsIntEq(1));
}
TEST_F(PrimOpTest, tryEvalFailure) {
auto v = eval("builtins.tryEval (throw \"\")");
ASSERT_THAT(v, IsAttrsOfSize(2));
auto s = createSymbol("success");
auto p = v.attrs->get(s);
ASSERT_NE(p, nullptr);
ASSERT_THAT(*p->value, IsFalse());
}
TEST_F(PrimOpTest, tryEvalSuccess) {
auto v = eval("builtins.tryEval 123");
ASSERT_THAT(v, IsAttrs());
auto s = createSymbol("success");
auto p = v.attrs->get(s);
ASSERT_NE(p, nullptr);
ASSERT_THAT(*p->value, IsTrue());
s = createSymbol("value");
p = v.attrs->get(s);
ASSERT_NE(p, nullptr);
ASSERT_THAT(*p->value, IsIntEq(123));
}
TEST_F(PrimOpTest, getEnv) {
setenv("_NIX_UNIT_TEST_ENV_VALUE", "test value", 1);
auto v = eval("builtins.getEnv \"_NIX_UNIT_TEST_ENV_VALUE\"");
ASSERT_THAT(v, IsStringEq("test value"));
}
TEST_F(PrimOpTest, seq) {
ASSERT_THROW(eval("let x = throw \"test\"; in builtins.seq x { }"), ThrownError);
}
TEST_F(PrimOpTest, seqNotDeep) {
auto v = eval("let x = { z = throw \"test\"; }; in builtins.seq x { }");
ASSERT_THAT(v, IsAttrs());
}
TEST_F(PrimOpTest, deepSeq) {
ASSERT_THROW(eval("let x = { z = throw \"test\"; }; in builtins.deepSeq x { }"), ThrownError);
}
TEST_F(PrimOpTest, trace) {
CaptureLogging l;
auto v = eval("builtins.trace \"test string 123\" 123");
ASSERT_THAT(v, IsIntEq(123));
auto text = l.get();
ASSERT_NE(text.find("test string 123"), std::string::npos);
}
TEST_F(PrimOpTest, placeholder) {
auto v = eval("builtins.placeholder \"out\"");
ASSERT_THAT(v, IsStringEq("/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"));
}
TEST_F(PrimOpTest, baseNameOf) {
auto v = eval("builtins.baseNameOf /some/path");
ASSERT_THAT(v, IsStringEq("path"));
}
TEST_F(PrimOpTest, dirOf) {
auto v = eval("builtins.dirOf /some/path");
ASSERT_THAT(v, IsPathEq("/some"));
}
TEST_F(PrimOpTest, attrValues) {
auto v = eval("builtins.attrValues { x = \"foo\"; a = 1; }");
ASSERT_THAT(v, IsListOfSize(2));
ASSERT_THAT(*v.listElems()[0], IsIntEq(1));
ASSERT_THAT(*v.listElems()[1], IsStringEq("foo"));
}
TEST_F(PrimOpTest, getAttr) {
auto v = eval("builtins.getAttr \"x\" { x = \"foo\"; }");
ASSERT_THAT(v, IsStringEq("foo"));
}
TEST_F(PrimOpTest, getAttrNotFound) {
// FIXME: TypeError is really bad here, also the error wording is worse
// than on Nix <=2.3
ASSERT_THROW(eval("builtins.getAttr \"y\" { }"), TypeError);
}
TEST_F(PrimOpTest, unsafeGetAttrPos) {
// The `y` attribute is at position
const char* expr = "builtins.unsafeGetAttrPos \"y\" { y = \"x\"; }";
auto v = eval(expr);
ASSERT_THAT(v, IsNull());
}
TEST_F(PrimOpTest, hasAttr) {
auto v = eval("builtins.hasAttr \"x\" { x = 1; }");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, hasAttrNotFound) {
auto v = eval("builtins.hasAttr \"x\" { }");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, isAttrs) {
auto v = eval("builtins.isAttrs {}");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, isAttrsFalse) {
auto v = eval("builtins.isAttrs null");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, removeAttrs) {
auto v = eval("builtins.removeAttrs { x = 1; } [\"x\"]");
ASSERT_THAT(v, IsAttrsOfSize(0));
}
TEST_F(PrimOpTest, removeAttrsRetains) {
auto v = eval("builtins.removeAttrs { x = 1; y = 2; } [\"x\"]");
ASSERT_THAT(v, IsAttrsOfSize(1));
ASSERT_NE(v.attrs->find(createSymbol("y")), nullptr);
}
TEST_F(PrimOpTest, listToAttrsEmptyList) {
auto v = eval("builtins.listToAttrs []");
ASSERT_THAT(v, IsAttrsOfSize(0));
ASSERT_EQ(v.type(), nAttrs);
ASSERT_EQ(v.attrs->size(), 0);
}
TEST_F(PrimOpTest, listToAttrsNotFieldName) {
ASSERT_THROW(eval("builtins.listToAttrs [{}]"), Error);
}
TEST_F(PrimOpTest, listToAttrs) {
auto v = eval("builtins.listToAttrs [ { name = \"key\"; value = 123; } ]");
ASSERT_THAT(v, IsAttrsOfSize(1));
auto key = v.attrs->find(createSymbol("key"));
ASSERT_NE(key, nullptr);
ASSERT_THAT(*key->value, IsIntEq(123));
}
TEST_F(PrimOpTest, intersectAttrs) {
auto v = eval("builtins.intersectAttrs { a = 1; b = 2; } { b = 3; c = 4; }");
ASSERT_THAT(v, IsAttrsOfSize(1));
auto b = v.attrs->find(createSymbol("b"));
ASSERT_NE(b, nullptr);
ASSERT_THAT(*b->value, IsIntEq(3));
}
TEST_F(PrimOpTest, catAttrs) {
auto v = eval("builtins.catAttrs \"a\" [{a = 1;} {b = 0;} {a = 2;}]");
ASSERT_THAT(v, IsListOfSize(2));
ASSERT_THAT(*v.listElems()[0], IsIntEq(1));
ASSERT_THAT(*v.listElems()[1], IsIntEq(2));
}
TEST_F(PrimOpTest, functionArgs) {
auto v = eval("builtins.functionArgs ({ x, y ? 123}: 1)");
ASSERT_THAT(v, IsAttrsOfSize(2));
auto x = v.attrs->find(createSymbol("x"));
ASSERT_NE(x, nullptr);
ASSERT_THAT(*x->value, IsFalse());
auto y = v.attrs->find(createSymbol("y"));
ASSERT_NE(y, nullptr);
ASSERT_THAT(*y->value, IsTrue());
}
TEST_F(PrimOpTest, mapAttrs) {
auto v = eval("builtins.mapAttrs (name: value: value * 10) { a = 1; b = 2; }");
ASSERT_THAT(v, IsAttrsOfSize(2));
auto a = v.attrs->find(createSymbol("a"));
ASSERT_NE(a, nullptr);
ASSERT_THAT(*a->value, IsThunk());
state.forceValue(*a->value, noPos);
ASSERT_THAT(*a->value, IsIntEq(10));
auto b = v.attrs->find(createSymbol("b"));
ASSERT_NE(b, nullptr);
ASSERT_THAT(*b->value, IsThunk());
state.forceValue(*b->value, noPos);
ASSERT_THAT(*b->value, IsIntEq(20));
}
TEST_F(PrimOpTest, isList) {
auto v = eval("builtins.isList []");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, isListFalse) {
auto v = eval("builtins.isList null");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, elemtAt) {
auto v = eval("builtins.elemAt [0 1 2 3] 3");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, elemtAtOutOfBounds) {
ASSERT_THROW(eval("builtins.elemAt [0 1 2 3] 5"), Error);
}
TEST_F(PrimOpTest, head) {
auto v = eval("builtins.head [ 3 2 1 0 ]");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, headEmpty) {
ASSERT_THROW(eval("builtins.head [ ]"), Error);
}
TEST_F(PrimOpTest, headWrongType) {
ASSERT_THROW(eval("builtins.head { }"), Error);
}
TEST_F(PrimOpTest, tail) {
auto v = eval("builtins.tail [ 3 2 1 0 ]");
ASSERT_THAT(v, IsListOfSize(3));
for (const auto [n, elem] : enumerate(v.listItems()))
ASSERT_THAT(*elem, IsIntEq(2 - static_cast<int>(n)));
}
TEST_F(PrimOpTest, tailEmpty) {
ASSERT_THROW(eval("builtins.tail []"), Error);
}
TEST_F(PrimOpTest, map) {
auto v = eval("map (x: \"foo\" + x) [ \"bar\" \"bla\" \"abc\" ]");
ASSERT_THAT(v, IsListOfSize(3));
auto elem = v.listElems()[0];
ASSERT_THAT(*elem, IsThunk());
state.forceValue(*elem, noPos);
ASSERT_THAT(*elem, IsStringEq("foobar"));
elem = v.listElems()[1];
ASSERT_THAT(*elem, IsThunk());
state.forceValue(*elem, noPos);
ASSERT_THAT(*elem, IsStringEq("foobla"));
elem = v.listElems()[2];
ASSERT_THAT(*elem, IsThunk());
state.forceValue(*elem, noPos);
ASSERT_THAT(*elem, IsStringEq("fooabc"));
}
TEST_F(PrimOpTest, filter) {
auto v = eval("builtins.filter (x: x == 2) [ 3 2 3 2 3 2 ]");
ASSERT_THAT(v, IsListOfSize(3));
for (const auto elem : v.listItems())
ASSERT_THAT(*elem, IsIntEq(2));
}
TEST_F(PrimOpTest, elemTrue) {
auto v = eval("builtins.elem 3 [ 1 2 3 4 5 ]");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, elemFalse) {
auto v = eval("builtins.elem 6 [ 1 2 3 4 5 ]");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, concatLists) {
auto v = eval("builtins.concatLists [[1 2] [3 4]]");
ASSERT_THAT(v, IsListOfSize(4));
for (const auto [i, elem] : enumerate(v.listItems()))
ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1));
}
TEST_F(PrimOpTest, length) {
auto v = eval("builtins.length [ 1 2 3 ]");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, foldStrict) {
auto v = eval("builtins.foldl' (a: b: a + b) 0 [1 2 3]");
ASSERT_THAT(v, IsIntEq(6));
}
TEST_F(PrimOpTest, anyTrue) {
auto v = eval("builtins.any (x: x == 2) [ 1 2 3 ]");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, anyFalse) {
auto v = eval("builtins.any (x: x == 5) [ 1 2 3 ]");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, allTrue) {
auto v = eval("builtins.all (x: x > 0) [ 1 2 3 ]");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, allFalse) {
auto v = eval("builtins.all (x: x <= 0) [ 1 2 3 ]");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, genList) {
auto v = eval("builtins.genList (x: x + 1) 3");
ASSERT_EQ(v.type(), nList);
ASSERT_EQ(v.listSize(), 3);
for (const auto [i, elem] : enumerate(v.listItems())) {
ASSERT_THAT(*elem, IsThunk());
state.forceValue(*elem, noPos);
ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1));
}
}
TEST_F(PrimOpTest, sortLessThan) {
auto v = eval("builtins.sort builtins.lessThan [ 483 249 526 147 42 77 ]");
ASSERT_EQ(v.type(), nList);
ASSERT_EQ(v.listSize(), 6);
const std::vector<int> numbers = { 42, 77, 147, 249, 483, 526 };
for (const auto [n, elem] : enumerate(v.listItems()))
ASSERT_THAT(*elem, IsIntEq(numbers[n]));
}
TEST_F(PrimOpTest, partition) {
auto v = eval("builtins.partition (x: x > 10) [1 23 9 3 42]");
ASSERT_THAT(v, IsAttrsOfSize(2));
auto right = v.attrs->get(createSymbol("right"));
ASSERT_NE(right, nullptr);
ASSERT_THAT(*right->value, IsListOfSize(2));
ASSERT_THAT(*right->value->listElems()[0], IsIntEq(23));
ASSERT_THAT(*right->value->listElems()[1], IsIntEq(42));
auto wrong = v.attrs->get(createSymbol("wrong"));
ASSERT_NE(wrong, nullptr);
ASSERT_EQ(wrong->value->type(), nList);
ASSERT_EQ(wrong->value->listSize(), 3);
ASSERT_THAT(*wrong->value, IsListOfSize(3));
ASSERT_THAT(*wrong->value->listElems()[0], IsIntEq(1));
ASSERT_THAT(*wrong->value->listElems()[1], IsIntEq(9));
ASSERT_THAT(*wrong->value->listElems()[2], IsIntEq(3));
}
TEST_F(PrimOpTest, concatMap) {
auto v = eval("builtins.concatMap (x: x ++ [0]) [ [1 2] [3 4] ]");
ASSERT_EQ(v.type(), nList);
ASSERT_EQ(v.listSize(), 6);
const std::vector<int> numbers = { 1, 2, 0, 3, 4, 0 };
for (const auto [n, elem] : enumerate(v.listItems()))
ASSERT_THAT(*elem, IsIntEq(numbers[n]));
}
TEST_F(PrimOpTest, addInt) {
auto v = eval("builtins.add 3 5");
ASSERT_THAT(v, IsIntEq(8));
}
TEST_F(PrimOpTest, addFloat) {
auto v = eval("builtins.add 3.0 5.0");
ASSERT_THAT(v, IsFloatEq(8.0));
}
TEST_F(PrimOpTest, addFloatToInt) {
auto v = eval("builtins.add 3.0 5");
ASSERT_THAT(v, IsFloatEq(8.0));
v = eval("builtins.add 3 5.0");
ASSERT_THAT(v, IsFloatEq(8.0));
}
TEST_F(PrimOpTest, subInt) {
auto v = eval("builtins.sub 5 2");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, subFloat) {
auto v = eval("builtins.sub 5.0 2.0");
ASSERT_THAT(v, IsFloatEq(3.0));
}
TEST_F(PrimOpTest, subFloatFromInt) {
auto v = eval("builtins.sub 5.0 2");
ASSERT_THAT(v, IsFloatEq(3.0));
v = eval("builtins.sub 4 2.0");
ASSERT_THAT(v, IsFloatEq(2.0));
}
TEST_F(PrimOpTest, mulInt) {
auto v = eval("builtins.mul 3 5");
ASSERT_THAT(v, IsIntEq(15));
}
TEST_F(PrimOpTest, mulFloat) {
auto v = eval("builtins.mul 3.0 5.0");
ASSERT_THAT(v, IsFloatEq(15.0));
}
TEST_F(PrimOpTest, mulFloatMixed) {
auto v = eval("builtins.mul 3 5.0");
ASSERT_THAT(v, IsFloatEq(15.0));
v = eval("builtins.mul 2.0 5");
ASSERT_THAT(v, IsFloatEq(10.0));
}
TEST_F(PrimOpTest, divInt) {
auto v = eval("builtins.div 5 (-1)");
ASSERT_THAT(v, IsIntEq(-5));
}
TEST_F(PrimOpTest, divIntZero) {
ASSERT_THROW(eval("builtins.div 5 0"), EvalError);
}
TEST_F(PrimOpTest, divFloat) {
auto v = eval("builtins.div 5.0 (-1)");
ASSERT_THAT(v, IsFloatEq(-5.0));
}
TEST_F(PrimOpTest, divFloatZero) {
ASSERT_THROW(eval("builtins.div 5.0 0.0"), EvalError);
}
TEST_F(PrimOpTest, bitOr) {
auto v = eval("builtins.bitOr 1 2");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, bitXor) {
auto v = eval("builtins.bitXor 3 2");
ASSERT_THAT(v, IsIntEq(1));
}
TEST_F(PrimOpTest, lessThanFalse) {
auto v = eval("builtins.lessThan 3 1");
ASSERT_THAT(v, IsFalse());
}
TEST_F(PrimOpTest, lessThanTrue) {
auto v = eval("builtins.lessThan 1 3");
ASSERT_THAT(v, IsTrue());
}
TEST_F(PrimOpTest, toStringAttrsThrows) {
ASSERT_THROW(eval("builtins.toString {}"), EvalError);
}
TEST_F(PrimOpTest, toStringLambdaThrows) {
ASSERT_THROW(eval("builtins.toString (x: x)"), EvalError);
}
class ToStringPrimOpTest :
public PrimOpTest,
public testing::WithParamInterface<std::tuple<std::string, std::string_view>>
{};
TEST_P(ToStringPrimOpTest, toString) {
const auto [input, output] = GetParam();
auto v = eval(input);
ASSERT_THAT(v, IsStringEq(output));
}
#define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " input), std::string_view(output)))
INSTANTIATE_TEST_SUITE_P(
toString,
ToStringPrimOpTest,
testing::Values(
CASE(R"("foo")", "foo"),
CASE(R"(1)", "1"),
CASE(R"([1 2 3])", "1 2 3"),
CASE(R"(.123)", "0.123000"),
CASE(R"(true)", "1"),
CASE(R"(false)", ""),
CASE(R"(null)", ""),
CASE(R"({ v = "bar"; __toString = self: self.v; })", "bar"),
CASE(R"({ v = "bar"; __toString = self: self.v; outPath = "foo"; })", "bar"),
CASE(R"({ outPath = "foo"; })", "foo"),
CASE(R"(./test)", "/test")
)
);
#undef CASE
TEST_F(PrimOpTest, substring){
auto v = eval("builtins.substring 0 3 \"nixos\"");
ASSERT_THAT(v, IsStringEq("nix"));
}
TEST_F(PrimOpTest, substringSmallerString){
auto v = eval("builtins.substring 0 3 \"n\"");
ASSERT_THAT(v, IsStringEq("n"));
}
TEST_F(PrimOpTest, substringEmptyString){
auto v = eval("builtins.substring 1 3 \"\"");
ASSERT_THAT(v, IsStringEq(""));
}
TEST_F(PrimOpTest, stringLength) {
auto v = eval("builtins.stringLength \"123\"");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(PrimOpTest, hashStringMd5) {
auto v = eval("builtins.hashString \"md5\" \"asdf\"");
ASSERT_THAT(v, IsStringEq("912ec803b2ce49e4a541068d495ab570"));
}
TEST_F(PrimOpTest, hashStringSha1) {
auto v = eval("builtins.hashString \"sha1\" \"asdf\"");
ASSERT_THAT(v, IsStringEq("3da541559918a808c2402bba5012f6c60b27661c"));
}
TEST_F(PrimOpTest, hashStringSha256) {
auto v = eval("builtins.hashString \"sha256\" \"asdf\"");
ASSERT_THAT(v, IsStringEq("f0e4c2f76c58916ec258f246851bea091d14d4247a2fc3e18694461b1816e13b"));
}
TEST_F(PrimOpTest, hashStringSha512) {
auto v = eval("builtins.hashString \"sha512\" \"asdf\"");
ASSERT_THAT(v, IsStringEq("401b09eab3c013d4ca54922bb802bec8fd5318192b0a75f201d8b3727429080fb337591abd3e44453b954555b7a0812e1081c39b740293f765eae731f5a65ed1"));
}
TEST_F(PrimOpTest, hashStringInvalidHashType) {
ASSERT_THROW(eval("builtins.hashString \"foobar\" \"asdf\""), Error);
}
TEST_F(PrimOpTest, nixPath) {
auto v = eval("builtins.nixPath");
ASSERT_EQ(v.type(), nList);
// We can't test much more as currently the EvalSettings are a global
// that we can't easily swap / replace
}
TEST_F(PrimOpTest, langVersion) {
auto v = eval("builtins.langVersion");
ASSERT_EQ(v.type(), nInt);
}
TEST_F(PrimOpTest, storeDir) {
auto v = eval("builtins.storeDir");
ASSERT_THAT(v, IsStringEq(settings.nixStore));
}
TEST_F(PrimOpTest, nixVersion) {
auto v = eval("builtins.nixVersion");
ASSERT_THAT(v, IsStringEq(nixVersion));
}
TEST_F(PrimOpTest, currentSystem) {
auto v = eval("builtins.currentSystem");
ASSERT_THAT(v, IsStringEq(settings.thisSystem.get()));
}
TEST_F(PrimOpTest, derivation) {
auto v = eval("derivation");
ASSERT_EQ(v.type(), nFunction);
ASSERT_TRUE(v.isLambda());
ASSERT_NE(v.lambda.fun, nullptr);
ASSERT_TRUE(v.lambda.fun->hasFormals());
}
TEST_F(PrimOpTest, currentTime) {
auto v = eval("builtins.currentTime");
ASSERT_EQ(v.type(), nInt);
ASSERT_TRUE(v.integer > 0);
}
TEST_F(PrimOpTest, splitVersion) {
auto v = eval("builtins.splitVersion \"1.2.3git\"");
ASSERT_THAT(v, IsListOfSize(4));
const std::vector<std::string_view> strings = { "1", "2", "3", "git" };
for (const auto [n, p] : enumerate(v.listItems()))
ASSERT_THAT(*p, IsStringEq(strings[n]));
}
class CompareVersionsPrimOpTest :
public PrimOpTest,
public testing::WithParamInterface<std::tuple<std::string, const int>>
{};
TEST_P(CompareVersionsPrimOpTest, compareVersions) {
auto [expression, expectation] = GetParam();
auto v = eval(expression);
ASSERT_THAT(v, IsIntEq(expectation));
}
#define CASE(a, b, expected) (std::make_tuple("builtins.compareVersions \"" #a "\" \"" #b "\"", expected))
INSTANTIATE_TEST_SUITE_P(
compareVersions,
CompareVersionsPrimOpTest,
testing::Values(
// The first two are weird cases. Intuition tells they should
// be the same but they aren't.
CASE(1.0, 1.0.0, -1),
CASE(1.0.0, 1.0, 1),
// the following are from the nix-env manual:
CASE(1.0, 2.3, -1),
CASE(2.1, 2.3, -1),
CASE(2.3, 2.3, 0),
CASE(2.5, 2.3, 1),
CASE(3.1, 2.3, 1),
CASE(2.3.1, 2.3, 1),
CASE(2.3.1, 2.3a, 1),
CASE(2.3pre1, 2.3, -1),
CASE(2.3pre3, 2.3pre12, -1),
CASE(2.3a, 2.3c, -1),
CASE(2.3pre1, 2.3c, -1),
CASE(2.3pre1, 2.3q, -1)
)
);
#undef CASE
class ParseDrvNamePrimOpTest :
public PrimOpTest,
public testing::WithParamInterface<std::tuple<std::string, std::string_view, std::string_view>>
{};
TEST_P(ParseDrvNamePrimOpTest, parseDrvName) {
auto [input, expectedName, expectedVersion] = GetParam();
const auto expr = fmt("builtins.parseDrvName \"%1%\"", input);
auto v = eval(expr);
ASSERT_THAT(v, IsAttrsOfSize(2));
auto name = v.attrs->find(createSymbol("name"));
ASSERT_TRUE(name);
ASSERT_THAT(*name->value, IsStringEq(expectedName));
auto version = v.attrs->find(createSymbol("version"));
ASSERT_TRUE(version);
ASSERT_THAT(*version->value, IsStringEq(expectedVersion));
}
INSTANTIATE_TEST_SUITE_P(
parseDrvName,
ParseDrvNamePrimOpTest,
testing::Values(
std::make_tuple("nix-0.12pre12876", "nix", "0.12pre12876"),
std::make_tuple("a-b-c-1234pre5+git", "a-b-c", "1234pre5+git")
)
);
TEST_F(PrimOpTest, replaceStrings) {
// FIXME: add a test that verifies the string context is as expected
auto v = eval("builtins.replaceStrings [\"oo\" \"a\"] [\"a\" \"i\"] \"foobar\"");
ASSERT_EQ(v.type(), nString);
ASSERT_EQ(v.string.s, std::string_view("fabir"));
}
TEST_F(PrimOpTest, concatStringsSep) {
// FIXME: add a test that verifies the string context is as expected
auto v = eval("builtins.concatStringsSep \"%\" [\"foo\" \"bar\" \"baz\"]");
ASSERT_EQ(v.type(), nString);
ASSERT_EQ(std::string_view(v.string.s), "foo%bar%baz");
}
TEST_F(PrimOpTest, split1) {
// v = [ "" [ "a" ] "c" ]
auto v = eval("builtins.split \"(a)b\" \"abc\"");
ASSERT_THAT(v, IsListOfSize(3));
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
ASSERT_THAT(*v.listElems()[1], IsListOfSize(1));
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
ASSERT_THAT(*v.listElems()[2], IsStringEq("c"));
}
TEST_F(PrimOpTest, split2) {
// v is expected to be a list [ "" [ "a" ] "b" [ "c"] "" ]
auto v = eval("builtins.split \"([ac])\" \"abc\"");
ASSERT_THAT(v, IsListOfSize(5));
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
ASSERT_THAT(*v.listElems()[1], IsListOfSize(1));
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
ASSERT_THAT(*v.listElems()[2], IsStringEq("b"));
ASSERT_THAT(*v.listElems()[3], IsListOfSize(1));
ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsStringEq("c"));
ASSERT_THAT(*v.listElems()[4], IsStringEq(""));
}
TEST_F(PrimOpTest, split3) {
auto v = eval("builtins.split \"(a)|(c)\" \"abc\"");
ASSERT_THAT(v, IsListOfSize(5));
// First list element
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
// 2nd list element is a list [ "" null ]
ASSERT_THAT(*v.listElems()[1], IsListOfSize(2));
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
ASSERT_THAT(*v.listElems()[1]->listElems()[1], IsNull());
// 3rd element
ASSERT_THAT(*v.listElems()[2], IsStringEq("b"));
// 4th element is a list: [ null "c" ]
ASSERT_THAT(*v.listElems()[3], IsListOfSize(2));
ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsNull());
ASSERT_THAT(*v.listElems()[3]->listElems()[1], IsStringEq("c"));
// 5th element is the empty string
ASSERT_THAT(*v.listElems()[4], IsStringEq(""));
}
TEST_F(PrimOpTest, split4) {
auto v = eval("builtins.split \"([[:upper:]]+)\" \" FOO \"");
ASSERT_THAT(v, IsListOfSize(3));
auto first = v.listElems()[0];
auto second = v.listElems()[1];
auto third = v.listElems()[2];
ASSERT_THAT(*first, IsStringEq(" "));
ASSERT_THAT(*second, IsListOfSize(1));
ASSERT_THAT(*second->listElems()[0], IsStringEq("FOO"));
ASSERT_THAT(*third, IsStringEq(" "));
}
TEST_F(PrimOpTest, match1) {
auto v = eval("builtins.match \"ab\" \"abc\"");
ASSERT_THAT(v, IsNull());
}
TEST_F(PrimOpTest, match2) {
auto v = eval("builtins.match \"abc\" \"abc\"");
ASSERT_THAT(v, IsListOfSize(0));
}
TEST_F(PrimOpTest, match3) {
auto v = eval("builtins.match \"a(b)(c)\" \"abc\"");
ASSERT_THAT(v, IsListOfSize(2));
ASSERT_THAT(*v.listElems()[0], IsStringEq("b"));
ASSERT_THAT(*v.listElems()[1], IsStringEq("c"));
}
TEST_F(PrimOpTest, match4) {
auto v = eval("builtins.match \"[[:space:]]+([[:upper:]]+)[[:space:]]+\" \" FOO \"");
ASSERT_THAT(v, IsListOfSize(1));
ASSERT_THAT(*v.listElems()[0], IsStringEq("FOO"));
}
TEST_F(PrimOpTest, attrNames) {
auto v = eval("builtins.attrNames { x = 1; y = 2; z = 3; a = 2; }");
ASSERT_THAT(v, IsListOfSize(4));
// ensure that the list is sorted
const std::vector<std::string_view> expected { "a", "x", "y", "z" };
for (const auto [n, elem] : enumerate(v.listItems()))
ASSERT_THAT(*elem, IsStringEq(expected[n]));
}
TEST_F(PrimOpTest, genericClosure_not_strict) {
// Operator should not be used when startSet is empty
auto v = eval("builtins.genericClosure { startSet = []; }");
ASSERT_THAT(v, IsListOfSize(0));
}
} /* namespace nix */

View file

@ -1,196 +0,0 @@
#include "tests/libexpr.hh"
namespace nix {
// Testing of trivial expressions
class TrivialExpressionTest : public LibExprTest {};
TEST_F(TrivialExpressionTest, true) {
auto v = eval("true");
ASSERT_THAT(v, IsTrue());
}
TEST_F(TrivialExpressionTest, false) {
auto v = eval("false");
ASSERT_THAT(v, IsFalse());
}
TEST_F(TrivialExpressionTest, null) {
auto v = eval("null");
ASSERT_THAT(v, IsNull());
}
TEST_F(TrivialExpressionTest, 1) {
auto v = eval("1");
ASSERT_THAT(v, IsIntEq(1));
}
TEST_F(TrivialExpressionTest, 1plus1) {
auto v = eval("1+1");
ASSERT_THAT(v, IsIntEq(2));
}
TEST_F(TrivialExpressionTest, minus1) {
auto v = eval("-1");
ASSERT_THAT(v, IsIntEq(-1));
}
TEST_F(TrivialExpressionTest, 1minus1) {
auto v = eval("1-1");
ASSERT_THAT(v, IsIntEq(0));
}
TEST_F(TrivialExpressionTest, lambdaAdd) {
auto v = eval("let add = a: b: a + b; in add 1 2");
ASSERT_THAT(v, IsIntEq(3));
}
TEST_F(TrivialExpressionTest, list) {
auto v = eval("[]");
ASSERT_THAT(v, IsListOfSize(0));
}
TEST_F(TrivialExpressionTest, attrs) {
auto v = eval("{}");
ASSERT_THAT(v, IsAttrsOfSize(0));
}
TEST_F(TrivialExpressionTest, float) {
auto v = eval("1.234");
ASSERT_THAT(v, IsFloatEq(1.234));
}
TEST_F(TrivialExpressionTest, updateAttrs) {
auto v = eval("{ a = 1; } // { b = 2; a = 3; }");
ASSERT_THAT(v, IsAttrsOfSize(2));
auto a = v.attrs->find(createSymbol("a"));
ASSERT_NE(a, nullptr);
ASSERT_THAT(*a->value, IsIntEq(3));
auto b = v.attrs->find(createSymbol("b"));
ASSERT_NE(b, nullptr);
ASSERT_THAT(*b->value, IsIntEq(2));
}
TEST_F(TrivialExpressionTest, hasAttrOpFalse) {
auto v = eval("{} ? a");
ASSERT_THAT(v, IsFalse());
}
TEST_F(TrivialExpressionTest, hasAttrOpTrue) {
auto v = eval("{ a = 123; } ? a");
ASSERT_THAT(v, IsTrue());
}
TEST_F(TrivialExpressionTest, withFound) {
auto v = eval("with { a = 23; }; a");
ASSERT_THAT(v, IsIntEq(23));
}
TEST_F(TrivialExpressionTest, withNotFound) {
ASSERT_THROW(eval("with {}; a"), Error);
}
TEST_F(TrivialExpressionTest, withOverride) {
auto v = eval("with { a = 23; }; with { a = 42; }; a");
ASSERT_THAT(v, IsIntEq(42));
}
TEST_F(TrivialExpressionTest, letOverWith) {
auto v = eval("let a = 23; in with { a = 1; }; a");
ASSERT_THAT(v, IsIntEq(23));
}
TEST_F(TrivialExpressionTest, multipleLet) {
auto v = eval("let a = 23; in let a = 42; in a");
ASSERT_THAT(v, IsIntEq(42));
}
TEST_F(TrivialExpressionTest, defaultFunctionArgs) {
auto v = eval("({ a ? 123 }: a) {}");
ASSERT_THAT(v, IsIntEq(123));
}
TEST_F(TrivialExpressionTest, defaultFunctionArgsOverride) {
auto v = eval("({ a ? 123 }: a) { a = 5; }");
ASSERT_THAT(v, IsIntEq(5));
}
TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureBack) {
auto v = eval("({ a ? 123 }@args: args) {}");
ASSERT_THAT(v, IsAttrsOfSize(0));
}
TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureFront) {
auto v = eval("(args@{ a ? 123 }: args) {}");
ASSERT_THAT(v, IsAttrsOfSize(0));
}
TEST_F(TrivialExpressionTest, assertThrows) {
ASSERT_THROW(eval("let x = arg: assert arg == 1; 123; in x 2"), Error);
}
TEST_F(TrivialExpressionTest, assertPassed) {
auto v = eval("let x = arg: assert arg == 1; 123; in x 1");
ASSERT_THAT(v, IsIntEq(123));
}
class AttrSetMergeTrvialExpressionTest :
public TrivialExpressionTest,
public testing::WithParamInterface<const char*>
{};
TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy) {
// Usually Nix rejects duplicate keys in an attrset but it does allow
// so if it is an attribute set that contains disjoint sets of keys.
// The below is equivalent to `{a.b = 1; a.c = 2; }`.
// The attribute set `a` will be a Thunk at first as the attribuets
// have to be merged (or otherwise computed) and that is done in a lazy
// manner.
auto expr = GetParam();
auto v = eval(expr);
ASSERT_THAT(v, IsAttrsOfSize(1));
auto a = v.attrs->find(createSymbol("a"));
ASSERT_NE(a, nullptr);
ASSERT_THAT(*a->value, IsThunk());
state.forceValue(*a->value, noPos);
ASSERT_THAT(*a->value, IsAttrsOfSize(2));
auto b = a->value->attrs->find(createSymbol("b"));
ASSERT_NE(b, nullptr);
ASSERT_THAT(*b->value, IsIntEq(1));
auto c = a->value->attrs->find(createSymbol("c"));
ASSERT_NE(c, nullptr);
ASSERT_THAT(*c->value, IsIntEq(2));
}
INSTANTIATE_TEST_SUITE_P(
attrsetMergeLazy,
AttrSetMergeTrvialExpressionTest,
testing::Values(
"{ a.b = 1; a.c = 2; }",
"{ a = { b = 1; }; a = { c = 2; }; }"
)
);
TEST_F(TrivialExpressionTest, functor) {
auto v = eval("{ __functor = self: arg: self.v + arg; v = 10; } 5");
ASSERT_THAT(v, IsIntEq(15));
}
TEST_F(TrivialExpressionTest, bindOr) {
auto v = eval("{ or = 1; }");
ASSERT_THAT(v, IsAttrsOfSize(1));
auto b = v.attrs->find(createSymbol("or"));
ASSERT_NE(b, nullptr);
ASSERT_THAT(*b->value, IsIntEq(1));
}
TEST_F(TrivialExpressionTest, orCantBeUsed) {
ASSERT_THROW(eval("let or = 1; in or"), Error);
}
} /* namespace nix */

View file

@ -1,122 +0,0 @@
#include <nlohmann/json.hpp>
#include <gtest/gtest.h>
#include <rapidcheck/gtest.h>
#include "tests/path.hh"
#include "tests/libexpr.hh"
#include "tests/value/context.hh"
namespace nix {
TEST(NixStringContextElemTest, empty_invalid) {
EXPECT_THROW(
NixStringContextElem::parse(""),
BadNixStringContextElem);
}
TEST(NixStringContextElemTest, single_bang_invalid) {
EXPECT_THROW(
NixStringContextElem::parse("!"),
BadNixStringContextElem);
}
TEST(NixStringContextElemTest, double_bang_invalid) {
EXPECT_THROW(
NixStringContextElem::parse("!!/"),
BadStorePath);
}
TEST(NixStringContextElemTest, eq_slash_invalid) {
EXPECT_THROW(
NixStringContextElem::parse("=/"),
BadStorePath);
}
TEST(NixStringContextElemTest, slash_invalid) {
EXPECT_THROW(
NixStringContextElem::parse("/"),
BadStorePath);
}
TEST(NixStringContextElemTest, opaque) {
std::string_view opaque = "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x";
auto elem = NixStringContextElem::parse(opaque);
auto * p = std::get_if<NixStringContextElem::Opaque>(&elem);
ASSERT_TRUE(p);
ASSERT_EQ(p->path, StorePath { opaque });
ASSERT_EQ(elem.to_string(), opaque);
}
TEST(NixStringContextElemTest, drvDeep) {
std::string_view drvDeep = "=g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
auto elem = NixStringContextElem::parse(drvDeep);
auto * p = std::get_if<NixStringContextElem::DrvDeep>(&elem);
ASSERT_TRUE(p);
ASSERT_EQ(p->drvPath, StorePath { drvDeep.substr(1) });
ASSERT_EQ(elem.to_string(), drvDeep);
}
TEST(NixStringContextElemTest, built) {
std::string_view built = "!foo!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
auto elem = NixStringContextElem::parse(built);
auto * p = std::get_if<NixStringContextElem::Built>(&elem);
ASSERT_TRUE(p);
ASSERT_EQ(p->output, "foo");
ASSERT_EQ(p->drvPath, StorePath { built.substr(5) });
ASSERT_EQ(elem.to_string(), built);
}
}
namespace rc {
using namespace nix;
Gen<NixStringContextElem::Opaque> Arbitrary<NixStringContextElem::Opaque>::arbitrary()
{
return gen::just(NixStringContextElem::Opaque {
.path = *gen::arbitrary<StorePath>(),
});
}
Gen<NixStringContextElem::DrvDeep> Arbitrary<NixStringContextElem::DrvDeep>::arbitrary()
{
return gen::just(NixStringContextElem::DrvDeep {
.drvPath = *gen::arbitrary<StorePath>(),
});
}
Gen<NixStringContextElem::Built> Arbitrary<NixStringContextElem::Built>::arbitrary()
{
return gen::just(NixStringContextElem::Built {
.drvPath = *gen::arbitrary<StorePath>(),
.output = (*gen::arbitrary<StorePathName>()).name,
});
}
Gen<NixStringContextElem> Arbitrary<NixStringContextElem>::arbitrary()
{
switch (*gen::inRange<uint8_t>(0, std::variant_size_v<NixStringContextElem::Raw>)) {
case 0:
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::Opaque>());
case 1:
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::DrvDeep>());
case 2:
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::Built>());
default:
assert(false);
}
}
}
namespace nix {
RC_GTEST_PROP(
NixStringContextElemTest,
prop_round_rip,
(const NixStringContextElem & o))
{
RC_ASSERT(o == NixStringContextElem::parse(o.to_string()));
}
}

View file

@ -1,31 +0,0 @@
#pragma once
///@file
#include <rapidcheck/gen/Arbitrary.h>
#include <value/context.hh>
namespace rc {
using namespace nix;
template<>
struct Arbitrary<NixStringContextElem::Opaque> {
static Gen<NixStringContextElem::Opaque> arbitrary();
};
template<>
struct Arbitrary<NixStringContextElem::Built> {
static Gen<NixStringContextElem::Built> arbitrary();
};
template<>
struct Arbitrary<NixStringContextElem::DrvDeep> {
static Gen<NixStringContextElem::DrvDeep> arbitrary();
};
template<>
struct Arbitrary<NixStringContextElem> {
static Gen<NixStringContextElem> arbitrary();
};
}

View file

@ -1,7 +1,7 @@
#include "value-to-json.hh"
#include "eval-inline.hh"
#include "util.hh"
#include "store-api.hh"
#include "signals.hh"
#include <cstdlib>
#include <iomanip>
@ -31,7 +31,7 @@ json printValueAsJSON(EvalState & state, bool strict,
case nString:
copyContext(v, context);
out = v.string.s;
out = v.c_str();
break;
case nPath:
@ -43,6 +43,7 @@ json printValueAsJSON(EvalState & state, bool strict,
break;
case nNull:
// already initialized as null
break;
case nAttrs: {
@ -59,7 +60,13 @@ json printValueAsJSON(EvalState & state, bool strict,
names.emplace(state.symbols[j.name]);
for (auto & j : names) {
Attr & a(*v.attrs->find(state.symbols.create(j)));
out[j] = printValueAsJSON(state, strict, *a.value, a.pos, context, copyToStore);
try {
out[j] = printValueAsJSON(state, strict, *a.value, a.pos, context, copyToStore);
} catch (Error & e) {
e.addTrace(state.positions[a.pos],
hintfmt("while evaluating attribute '%1%'", j));
throw;
}
}
} else
return printValueAsJSON(state, strict, *i->value, i->pos, context, copyToStore);
@ -68,8 +75,17 @@ json printValueAsJSON(EvalState & state, bool strict,
case nList: {
out = json::array();
for (auto elem : v.listItems())
out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore));
int i = 0;
for (auto elem : v.listItems()) {
try {
out.push_back(printValueAsJSON(state, strict, *elem, pos, context, copyToStore));
} catch (Error & e) {
e.addTrace({},
hintfmt("while evaluating list element at index %1%", i));
throw;
}
i++;
}
break;
}

View file

@ -1,7 +1,7 @@
#include "value-to-xml.hh"
#include "xml-writer.hh"
#include "eval-inline.hh"
#include "util.hh"
#include "signals.hh"
#include <cstdlib>
@ -74,7 +74,7 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
case nString:
/* !!! show the context? */
copyContext(v, context);
doc.writeEmptyElement("string", singletonAttrs("value", v.string.s));
doc.writeEmptyElement("string", singletonAttrs("value", v.c_str()));
break;
case nPath:
@ -96,14 +96,14 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
if (a != v.attrs->end()) {
if (strict) state.forceValue(*a->value, a->pos);
if (a->value->type() == nString)
xmlAttrs["drvPath"] = drvPath = a->value->string.s;
xmlAttrs["drvPath"] = drvPath = a->value->c_str();
}
a = v.attrs->find(state.sOutPath);
if (a != v.attrs->end()) {
if (strict) state.forceValue(*a->value, a->pos);
if (a->value->type() == nString)
xmlAttrs["outPath"] = a->value->string.s;
xmlAttrs["outPath"] = a->value->c_str();
}
XMLOpenElement _(doc, "derivation", xmlAttrs);

View file

@ -2,10 +2,13 @@
///@file
#include <cassert>
#include <climits>
#include <span>
#include "symbol-table.hh"
#include "value/context.hh"
#include "input-accessor.hh"
#include "source-path.hh"
#if HAVE_BOEHMGC
#include <gc/gc_allocator.h>
@ -30,7 +33,6 @@ typedef enum {
tThunk,
tApp,
tLambda,
tBlackhole,
tPrimOp,
tPrimOpApp,
tExternal,
@ -60,12 +62,12 @@ class Bindings;
struct Env;
struct Expr;
struct ExprLambda;
struct ExprBlackHole;
struct PrimOp;
class Symbol;
class PosIdx;
struct Pos;
class StorePath;
class Store;
class EvalState;
class XMLWriter;
@ -137,11 +139,11 @@ private:
friend std::string showType(const Value & v);
void print(const SymbolTable & symbols, std::ostream & str, std::set<const void *> * seen) const;
void print(const SymbolTable &symbols, std::ostream &str, std::set<const void *> *seen, int depth) const;
public:
void print(const SymbolTable & symbols, std::ostream & str, bool showRepeated = false) const;
void print(const SymbolTable &symbols, std::ostream &str, bool showRepeated = false, int depth = INT_MAX) const;
// Functions needed to distinguish the type
// These should be removed eventually, by putting the functionality that's
@ -150,67 +152,79 @@ public:
// type() == nThunk
inline bool isThunk() const { return internalType == tThunk; };
inline bool isApp() const { return internalType == tApp; };
inline bool isBlackhole() const { return internalType == tBlackhole; };
inline bool isBlackhole() const;
// type() == nFunction
inline bool isLambda() const { return internalType == tLambda; };
inline bool isPrimOp() const { return internalType == tPrimOp; };
inline bool isPrimOpApp() const { return internalType == tPrimOpApp; };
/**
* Strings in the evaluator carry a so-called `context` which
* is a list of strings representing store paths. This is to
* allow users to write things like
*
* "--with-freetype2-library=" + freetype + "/lib"
*
* where `freetype` is a derivation (or a source to be copied
* to the store). If we just concatenated the strings without
* keeping track of the referenced store paths, then if the
* string is used as a derivation attribute, the derivation
* will not have the correct dependencies in its inputDrvs and
* inputSrcs.
* The semantics of the context is as follows: when a string
* with context C is used as a derivation attribute, then the
* derivations in C will be added to the inputDrvs of the
* derivation, and the other store paths in C will be added to
* the inputSrcs of the derivations.
* For canonicity, the store paths should be in sorted order.
*/
struct StringWithContext {
const char * c_str;
const char * * context; // must be in sorted order
};
struct Path {
InputAccessor * accessor;
const char * path;
};
struct ClosureThunk {
Env * env;
Expr * expr;
};
struct FunctionApplicationThunk {
Value * left, * right;
};
struct Lambda {
Env * env;
ExprLambda * fun;
};
union
{
NixInt integer;
bool boolean;
/**
* Strings in the evaluator carry a so-called `context` which
* is a list of strings representing store paths. This is to
* allow users to write things like
StringWithContext string;
* "--with-freetype2-library=" + freetype + "/lib"
Path _path;
* where `freetype` is a derivation (or a source to be copied
* to the store). If we just concatenated the strings without
* keeping track of the referenced store paths, then if the
* string is used as a derivation attribute, the derivation
* will not have the correct dependencies in its inputDrvs and
* inputSrcs.
* The semantics of the context is as follows: when a string
* with context C is used as a derivation attribute, then the
* derivations in C will be added to the inputDrvs of the
* derivation, and the other store paths in C will be added to
* the inputSrcs of the derivations.
* For canonicity, the store paths should be in sorted order.
*/
struct {
const char * s;
const char * * context; // must be in sorted order
} string;
const char * _path;
Bindings * attrs;
struct {
size_t size;
Value * * elems;
} bigList;
Value * smallList[2];
struct {
Env * env;
Expr * expr;
} thunk;
struct {
Value * left, * right;
} app;
struct {
Env * env;
ExprLambda * fun;
} lambda;
ClosureThunk thunk;
FunctionApplicationThunk app;
Lambda lambda;
PrimOp * primOp;
struct {
Value * left, * right;
} primOpApp;
FunctionApplicationThunk primOpApp;
ExternalValueBase * external;
NixFloat fpoint;
};
@ -218,8 +232,11 @@ public:
/**
* Returns the normal type of a Value. This only returns nThunk if
* the Value hasn't been forceValue'd
*
* @param invalidIsThunk Instead of aborting an an invalid (probably
* 0, so uninitialized) internal type, return `nThunk`.
*/
inline ValueType type() const
inline ValueType type(bool invalidIsThunk = false) const
{
switch (internalType) {
case tInt: return nInt;
@ -232,9 +249,12 @@ public:
case tLambda: case tPrimOp: case tPrimOpApp: return nFunction;
case tExternal: return nExternal;
case tFloat: return nFloat;
case tThunk: case tApp: case tBlackhole: return nThunk;
case tThunk: case tApp: return nThunk;
}
abort();
if (invalidIsThunk)
return nThunk;
else
abort();
}
/**
@ -263,7 +283,7 @@ public:
inline void mkString(const char * s, const char * * context = 0)
{
internalType = tString;
string.s = s;
string.c_str = s;
string.context = context;
}
@ -280,11 +300,12 @@ public:
void mkPath(const SourcePath & path);
inline void mkPath(const char * path)
inline void mkPath(InputAccessor * accessor, const char * path)
{
clearValue();
internalType = tPath;
_path = path;
_path.accessor = accessor;
_path.path = path;
}
inline void mkNull()
@ -336,19 +357,9 @@ public:
lambda.fun = f;
}
inline void mkBlackhole()
{
internalType = tBlackhole;
// Value will be overridden anyways
}
inline void mkPrimOp(PrimOp * p)
{
clearValue();
internalType = tPrimOp;
primOp = p;
}
inline void mkBlackhole();
void mkPrimOp(PrimOp * p);
inline void mkPrimOpApp(Value * l, Value * r)
{
@ -381,7 +392,13 @@ public:
return internalType == tList1 || internalType == tList2 ? smallList : bigList.elems;
}
const Value * const * listElems() const
std::span<Value * const> listItems() const
{
assert(isList());
return std::span<Value * const>(listElems(), listSize());
}
Value * const * listElems() const
{
return internalType == tList1 || internalType == tList2 ? smallList : bigList.elems;
}
@ -400,48 +417,47 @@ public:
*/
bool isTrivial() const;
auto listItems()
{
struct ListIterable
{
typedef Value * const * iterator;
iterator _begin, _end;
iterator begin() const { return _begin; }
iterator end() const { return _end; }
};
assert(isList());
auto begin = listElems();
return ListIterable { begin, begin + listSize() };
}
auto listItems() const
{
struct ConstListIterable
{
typedef const Value * const * iterator;
iterator _begin, _end;
iterator begin() const { return _begin; }
iterator end() const { return _end; }
};
assert(isList());
auto begin = listElems();
return ConstListIterable { begin, begin + listSize() };
}
SourcePath path() const
{
assert(internalType == tPath);
return SourcePath{CanonPath(_path)};
return SourcePath(
ref(_path.accessor->shared_from_this()),
CanonPath(CanonPath::unchecked_t(), _path.path));
}
std::string_view str() const
std::string_view string_view() const
{
assert(internalType == tString);
return std::string_view(string.s);
return std::string_view(string.c_str);
}
const char * const c_str() const
{
assert(internalType == tString);
return string.c_str;
}
const char * * context() const
{
return string.context;
}
};
extern ExprBlackHole eBlackHole;
bool Value::isBlackhole() const
{
return internalType == tThunk && thunk.expr == (Expr*) &eBlackHole;
}
void Value::mkBlackhole()
{
internalType = tThunk;
thunk.expr = (Expr*) &eBlackHole;
}
#if HAVE_BOEHMGC
typedef std::vector<Value *, traceable_allocator<Value *>> ValueVector;
typedef std::map<Symbol, Value *, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, Value *>>> ValueMap;

View file

@ -1,32 +1,56 @@
#include "util.hh"
#include "value/context.hh"
#include <optional>
namespace nix {
NixStringContextElem NixStringContextElem::parse(std::string_view s0)
NixStringContextElem NixStringContextElem::parse(
std::string_view s0,
const ExperimentalFeatureSettings & xpSettings)
{
std::string_view s = s0;
std::function<SingleDerivedPath()> parseRest;
parseRest = [&]() -> SingleDerivedPath {
// Case on whether there is a '!'
size_t index = s.find("!");
if (index == std::string_view::npos) {
return SingleDerivedPath::Opaque {
.path = StorePath { s },
};
} else {
std::string output { s.substr(0, index) };
// Advance string to parse after the '!'
s = s.substr(index + 1);
auto drv = make_ref<SingleDerivedPath>(parseRest());
drvRequireExperiment(*drv, xpSettings);
return SingleDerivedPath::Built {
.drvPath = std::move(drv),
.output = std::move(output),
};
}
};
if (s.size() == 0) {
throw BadNixStringContextElem(s0,
"String context element should never be an empty string");
}
switch (s.at(0)) {
case '!': {
s = s.substr(1); // advance string to parse after first !
size_t index = s.find("!");
// This makes index + 1 safe. Index can be the length (one after index
// of last character), so given any valid character index --- a
// successful find --- we can add one.
if (index == std::string_view::npos) {
// Advance string to parse after the '!'
s = s.substr(1);
// Find *second* '!'
if (s.find("!") == std::string_view::npos) {
throw BadNixStringContextElem(s0,
"String content element beginning with '!' should have a second '!'");
}
return NixStringContextElem::Built {
.drvPath = StorePath { s.substr(index + 1) },
.output = std::string(s.substr(0, index)),
};
return std::visit(
[&](auto x) -> NixStringContextElem { return std::move(x); },
parseRest());
}
case '=': {
return NixStringContextElem::DrvDeep {
@ -34,33 +58,51 @@ NixStringContextElem NixStringContextElem::parse(std::string_view s0)
};
}
default: {
return NixStringContextElem::Opaque {
.path = StorePath { s },
};
// Ensure no '!'
if (s.find("!") != std::string_view::npos) {
throw BadNixStringContextElem(s0,
"String content element not beginning with '!' should not have a second '!'");
}
return std::visit(
[&](auto x) -> NixStringContextElem { return std::move(x); },
parseRest());
}
}
}
std::string NixStringContextElem::to_string() const {
return std::visit(overloaded {
std::string NixStringContextElem::to_string() const
{
std::string res;
std::function<void(const SingleDerivedPath &)> toStringRest;
toStringRest = [&](auto & p) {
std::visit(overloaded {
[&](const SingleDerivedPath::Opaque & o) {
res += o.path.to_string();
},
[&](const SingleDerivedPath::Built & o) {
res += o.output;
res += '!';
toStringRest(*o.drvPath);
},
}, p.raw());
};
std::visit(overloaded {
[&](const NixStringContextElem::Built & b) {
std::string res;
res += '!';
res += b.output;
res += '!';
res += b.drvPath.to_string();
return res;
},
[&](const NixStringContextElem::DrvDeep & d) {
std::string res;
res += '=';
res += d.drvPath.to_string();
return res;
toStringRest(b);
},
[&](const NixStringContextElem::Opaque & o) {
return std::string { o.path.to_string() };
toStringRest(o);
},
}, raw());
[&](const NixStringContextElem::DrvDeep & d) {
res += '=';
res += d.drvPath.to_string();
},
}, raw);
return res;
}
}

View file

@ -1,11 +1,9 @@
#pragma once
///@file
#include "util.hh"
#include "comparator.hh"
#include "path.hh"
#include <variant>
#include "derived-path.hh"
#include "variant-wrapper.hh"
#include <nlohmann/json_fwd.hpp>
@ -26,75 +24,59 @@ public:
}
};
/**
* Plain opaque path to some store object.
*
* Encoded as just the path: <path>.
*/
struct NixStringContextElem_Opaque {
StorePath path;
struct NixStringContextElem {
/**
* Plain opaque path to some store object.
*
* Encoded as just the path: <path>.
*/
using Opaque = SingleDerivedPath::Opaque;
GENERATE_CMP(NixStringContextElem_Opaque, me->path);
};
/**
* Path to a derivation and its entire build closure.
*
* The path doesn't just refer to derivation itself and its closure, but
* also all outputs of all derivations in that closure (including the
* root derivation).
*
* Encoded in the form =<drvPath>.
*/
struct DrvDeep {
StorePath drvPath;
/**
* Path to a derivation and its entire build closure.
*
* The path doesn't just refer to derivation itself and its closure, but
* also all outputs of all derivations in that closure (including the
* root derivation).
*
* Encoded in the form =<drvPath>.
*/
struct NixStringContextElem_DrvDeep {
StorePath drvPath;
GENERATE_CMP(DrvDeep, me->drvPath);
};
GENERATE_CMP(NixStringContextElem_DrvDeep, me->drvPath);
};
/**
* Derivation output.
*
* Encoded in the form !<output>!<drvPath>.
*/
using Built = SingleDerivedPath::Built;
/**
* Derivation output.
*
* Encoded in the form !<output>!<drvPath>.
*/
struct NixStringContextElem_Built {
StorePath drvPath;
std::string output;
using Raw = std::variant<
Opaque,
DrvDeep,
Built
>;
GENERATE_CMP(NixStringContextElem_Built, me->drvPath, me->output);
};
Raw raw;
using _NixStringContextElem_Raw = std::variant<
NixStringContextElem_Opaque,
NixStringContextElem_DrvDeep,
NixStringContextElem_Built
>;
GENERATE_CMP(NixStringContextElem, me->raw);
struct NixStringContextElem : _NixStringContextElem_Raw {
using Raw = _NixStringContextElem_Raw;
using Raw::Raw;
using Opaque = NixStringContextElem_Opaque;
using DrvDeep = NixStringContextElem_DrvDeep;
using Built = NixStringContextElem_Built;
inline const Raw & raw() const & {
return static_cast<const Raw &>(*this);
}
inline Raw & raw() & {
return static_cast<Raw &>(*this);
}
inline Raw && raw() && {
return static_cast<Raw &&>(*this);
}
MAKE_WRAPPER_CONSTRUCTOR(NixStringContextElem);
/**
* Decode a context string, one of:
* - <path>
* - =<path>
* - !<name>!<path>
*
* @param xpSettings Stop-gap to avoid globals during unit tests.
*/
static NixStringContextElem parse(std::string_view s);
static NixStringContextElem parse(
std::string_view s,
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
std::string to_string() const;
};

View file

@ -13,6 +13,12 @@
namespace nix::fetchers {
typedef std::variant<std::string, uint64_t, Explicit<bool>> Attr;
/**
* An `Attrs` can be thought of a JSON object restricted or simplified
* to be "flat", not containing any subcontainers (arrays or objects)
* and also not containing any `null`s.
*/
typedef std::map<std::string, Attr> Attrs;
Attrs jsonToAttrs(const nlohmann::json & json);

View file

@ -1,4 +1,5 @@
#include "cache.hh"
#include "users.hh"
#include "sqlite.hh"
#include "sync.hh"
#include "store-api.hh"
@ -19,6 +20,9 @@ create table if not exists Cache (
);
)sql";
// FIXME: we should periodically purge/nuke this cache to prevent it
// from growing too big.
struct CacheImpl : Cache
{
struct State
@ -47,8 +51,62 @@ struct CacheImpl : Cache
"select info, path, immutable, timestamp from Cache where input = ?");
}
void upsert(
const Attrs & inAttrs,
const Attrs & infoAttrs) override
{
_state.lock()->add.use()
(attrsToJSON(inAttrs).dump())
(attrsToJSON(infoAttrs).dump())
("") // no path
(false)
(time(0)).exec();
}
std::optional<Attrs> lookup(const Attrs & inAttrs) override
{
if (auto res = lookupExpired(inAttrs))
return std::move(res->infoAttrs);
return {};
}
std::optional<Attrs> lookupWithTTL(const Attrs & inAttrs) override
{
if (auto res = lookupExpired(inAttrs)) {
if (!res->expired)
return std::move(res->infoAttrs);
debug("ignoring expired cache entry '%s'",
attrsToJSON(inAttrs).dump());
}
return {};
}
std::optional<Result2> lookupExpired(const Attrs & inAttrs) override
{
auto state(_state.lock());
auto inAttrsJSON = attrsToJSON(inAttrs).dump();
auto stmt(state->lookup.use()(inAttrsJSON));
if (!stmt.next()) {
debug("did not find cache entry for '%s'", inAttrsJSON);
return {};
}
auto infoJSON = stmt.getStr(0);
auto locked = stmt.getInt(2) != 0;
auto timestamp = stmt.getInt(3);
debug("using cache entry '%s' -> '%s'", inAttrsJSON, infoJSON);
return Result2 {
.expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),
.infoAttrs = jsonToAttrs(nlohmann::json::parse(infoJSON)),
};
}
void add(
ref<Store> store,
Store & store,
const Attrs & inAttrs,
const Attrs & infoAttrs,
const StorePath & storePath,
@ -57,13 +115,13 @@ struct CacheImpl : Cache
_state.lock()->add.use()
(attrsToJSON(inAttrs).dump())
(attrsToJSON(infoAttrs).dump())
(store->printStorePath(storePath))
(store.printStorePath(storePath))
(locked)
(time(0)).exec();
}
std::optional<std::pair<Attrs, StorePath>> lookup(
ref<Store> store,
Store & store,
const Attrs & inAttrs) override
{
if (auto res = lookupExpired(store, inAttrs)) {
@ -76,7 +134,7 @@ struct CacheImpl : Cache
}
std::optional<Result> lookupExpired(
ref<Store> store,
Store & store,
const Attrs & inAttrs) override
{
auto state(_state.lock());
@ -90,19 +148,19 @@ struct CacheImpl : Cache
}
auto infoJSON = stmt.getStr(0);
auto storePath = store->parseStorePath(stmt.getStr(1));
auto storePath = store.parseStorePath(stmt.getStr(1));
auto locked = stmt.getInt(2) != 0;
auto timestamp = stmt.getInt(3);
store->addTempRoot(storePath);
if (!store->isValidPath(storePath)) {
store.addTempRoot(storePath);
if (!store.isValidPath(storePath)) {
// FIXME: we could try to substitute 'storePath'.
debug("ignoring disappeared cache entry '%s'", inAttrsJSON);
return {};
}
debug("using cache entry '%s' -> '%s', '%s'",
inAttrsJSON, infoJSON, store->printStorePath(storePath));
inAttrsJSON, infoJSON, store.printStorePath(storePath));
return Result {
.expired = !locked && (settings.tarballTtl.get() == 0 || timestamp + settings.tarballTtl < time(0)),

View file

@ -2,22 +2,62 @@
///@file
#include "fetchers.hh"
#include "path.hh"
namespace nix::fetchers {
/**
* A cache for arbitrary `Attrs` -> `Attrs` mappings with a timestamp
* for expiration.
*/
struct Cache
{
virtual ~Cache() { }
/**
* Add a value to the cache. The cache is an arbitrary mapping of
* Attrs to Attrs.
*/
virtual void upsert(
const Attrs & inAttrs,
const Attrs & infoAttrs) = 0;
/**
* Look up a key with infinite TTL.
*/
virtual std::optional<Attrs> lookup(
const Attrs & inAttrs) = 0;
/**
* Look up a key. Return nothing if its TTL has exceeded
* `settings.tarballTTL`.
*/
virtual std::optional<Attrs> lookupWithTTL(
const Attrs & inAttrs) = 0;
struct Result2
{
bool expired = false;
Attrs infoAttrs;
};
/**
* Look up a key. Return a bool denoting whether its TTL has
* exceeded `settings.tarballTTL`.
*/
virtual std::optional<Result2> lookupExpired(
const Attrs & inAttrs) = 0;
/* Old cache for things that have a store path. */
virtual void add(
ref<Store> store,
Store & store,
const Attrs & inAttrs,
const Attrs & infoAttrs,
const StorePath & storePath,
bool locked) = 0;
virtual std::optional<std::pair<Attrs, StorePath>> lookup(
ref<Store> store,
Store & store,
const Attrs & inAttrs) = 0;
struct Result
@ -28,7 +68,7 @@ struct Cache
};
virtual std::optional<Result> lookupExpired(
ref<Store> store,
Store & store,
const Attrs & inAttrs) = 0;
};

View file

@ -3,7 +3,6 @@
#include "types.hh"
#include "config.hh"
#include "util.hh"
#include <map>
#include <limits>

View file

@ -0,0 +1,68 @@
#include "fetch-to-store.hh"
#include "fetchers.hh"
#include "cache.hh"
namespace nix {
StorePath fetchToStore(
Store & store,
const SourcePath & path,
std::string_view name,
ContentAddressMethod method,
PathFilter * filter,
RepairFlag repair)
{
// FIXME: add an optimisation for the case where the accessor is
// an FSInputAccessor pointing to a store path.
std::optional<fetchers::Attrs> cacheKey;
if (!filter && path.accessor->fingerprint) {
cacheKey = fetchers::Attrs{
{"_what", "fetchToStore"},
{"store", store.storeDir},
{"name", std::string(name)},
{"fingerprint", *path.accessor->fingerprint},
{
"method",
std::visit(overloaded {
[](const TextIngestionMethod &) {
return "text";
},
[](const FileIngestionMethod & fim) {
switch (fim) {
case FileIngestionMethod::Flat: return "flat";
case FileIngestionMethod::Recursive: return "nar";
default: assert(false);
}
},
}, method.raw),
},
{"path", path.path.abs()}
};
if (auto res = fetchers::getCache()->lookup(store, *cacheKey)) {
debug("store path cache hit for '%s'", path);
return res->second;
}
} else
debug("source path '%s' is uncacheable", path);
Activity act(*logger, lvlChatty, actUnknown, fmt("copying '%s' to the store", path));
auto filter2 = filter ? *filter : defaultPathFilter;
auto storePath =
settings.readOnlyMode
? store.computeStorePath(
name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2).first
: store.addToStore(
name, *path.accessor, path.path, method, HashAlgorithm::SHA256, {}, filter2, repair);
if (cacheKey)
fetchers::getCache()->add(store, *cacheKey, {}, storePath, true);
return storePath;
}
}

View file

@ -0,0 +1,22 @@
#pragma once
#include "source-path.hh"
#include "store-api.hh"
#include "file-system.hh"
#include "repair-flag.hh"
#include "file-content-address.hh"
namespace nix {
/**
* Copy the `path` to the Nix store.
*/
StorePath fetchToStore(
Store & store,
const SourcePath & path,
std::string_view name = "source",
ContentAddressMethod method = FileIngestionMethod::Recursive,
PathFilter * filter = nullptr,
RepairFlag repair = NoRepair);
}

View file

@ -1,21 +1,43 @@
#include "fetchers.hh"
#include "store-api.hh"
#include "input-accessor.hh"
#include "source-path.hh"
#include "fetch-to-store.hh"
#include <nlohmann/json.hpp>
namespace nix::fetchers {
std::unique_ptr<std::vector<std::shared_ptr<InputScheme>>> inputSchemes = nullptr;
using InputSchemeMap = std::map<std::string_view, std::shared_ptr<InputScheme>>;
std::unique_ptr<InputSchemeMap> inputSchemes = nullptr;
void registerInputScheme(std::shared_ptr<InputScheme> && inputScheme)
{
if (!inputSchemes) inputSchemes = std::make_unique<std::vector<std::shared_ptr<InputScheme>>>();
inputSchemes->push_back(std::move(inputScheme));
if (!inputSchemes)
inputSchemes = std::make_unique<InputSchemeMap>();
auto schemeName = inputScheme->schemeName();
if (inputSchemes->count(schemeName) > 0)
throw Error("Input scheme with name %s already registered", schemeName);
inputSchemes->insert_or_assign(schemeName, std::move(inputScheme));
}
Input Input::fromURL(const std::string & url)
nlohmann::json dumpRegisterInputSchemeInfo() {
using nlohmann::json;
auto res = json::object();
for (auto & [name, scheme] : *inputSchemes) {
auto & r = res[name] = json::object();
r["allowedAttrs"] = scheme->allowedAttrs();
}
return res;
}
Input Input::fromURL(const std::string & url, bool requireTree)
{
return fromURL(parseURL(url));
return fromURL(parseURL(url), requireTree);
}
static void fixupInput(Input & input)
@ -31,11 +53,12 @@ static void fixupInput(Input & input)
input.locked = true;
}
Input Input::fromURL(const ParsedURL & url)
Input Input::fromURL(const ParsedURL & url, bool requireTree)
{
for (auto & inputScheme : *inputSchemes) {
auto res = inputScheme->inputFromURL(url);
for (auto & [_, inputScheme] : *inputSchemes) {
auto res = inputScheme->inputFromURL(url, requireTree);
if (res) {
experimentalFeatureSettings.require(inputScheme->experimentalFeature());
res->scheme = inputScheme;
fixupInput(*res);
return std::move(*res);
@ -47,19 +70,49 @@ Input Input::fromURL(const ParsedURL & url)
Input Input::fromAttrs(Attrs && attrs)
{
for (auto & inputScheme : *inputSchemes) {
auto res = inputScheme->inputFromAttrs(attrs);
if (res) {
res->scheme = inputScheme;
fixupInput(*res);
return std::move(*res);
}
}
auto schemeName = ({
auto schemeNameOpt = maybeGetStrAttr(attrs, "type");
if (!schemeNameOpt)
throw Error("'type' attribute to specify input scheme is required but not provided");
*std::move(schemeNameOpt);
});
Input input;
input.attrs = attrs;
fixupInput(input);
return input;
auto raw = [&]() {
// Return an input without a scheme; most operations will fail,
// but not all of them. Doing this is to support those other
// operations which are supposed to be robust on
// unknown/uninterpretable inputs.
Input input;
input.attrs = attrs;
fixupInput(input);
return input;
};
std::shared_ptr<InputScheme> inputScheme = ({
auto i = inputSchemes->find(schemeName);
i == inputSchemes->end() ? nullptr : i->second;
});
if (!inputScheme) return raw();
experimentalFeatureSettings.require(inputScheme->experimentalFeature());
auto allowedAttrs = inputScheme->allowedAttrs();
for (auto & [name, _] : attrs)
if (name != "type" && allowedAttrs.count(name) == 0)
throw Error("input attribute '%s' not supported by scheme '%s'", name, schemeName);
auto res = inputScheme->inputFromAttrs(attrs);
if (!res) return raw();
res->scheme = inputScheme;
fixupInput(*res);
return std::move(*res);
}
std::optional<std::string> Input::getFingerprint(ref<Store> store) const
{
return scheme ? scheme->getFingerprint(store, *this) : std::nullopt;
}
ParsedURL Input::toURL() const
@ -82,16 +135,16 @@ std::string Input::to_string() const
return toURL().to_string();
}
bool Input::isDirect() const
{
return !scheme || scheme->isDirect(*this);
}
Attrs Input::toAttrs() const
{
return attrs;
}
bool Input::hasAllInfo() const
{
return getNarHash() && scheme && scheme->hasAllInfo(*this);
}
bool Input::operator ==(const Input & other) const
{
return attrs == other.attrs;
@ -107,7 +160,7 @@ bool Input::contains(const Input & other) const
return false;
}
std::pair<Tree, Input> Input::fetch(ref<Store> store) const
std::pair<StorePath, Input> Input::fetch(ref<Store> store) const
{
if (!scheme)
throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs()));
@ -115,7 +168,7 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
/* The tree may already be in the Nix store, or it could be
substituted (which is often faster than fetching from the
original source). So check that. */
if (hasAllInfo()) {
if (getNarHash()) {
try {
auto storePath = computeStorePath(*store);
@ -124,7 +177,7 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
debug("using substituted/cached input '%s' in '%s'",
to_string(), store->printStorePath(storePath));
return {Tree { .actualPath = store->toRealPath(storePath), .storePath = std::move(storePath) }, *this};
return {std::move(storePath), *this};
} catch (Error & e) {
debug("substitution of input '%s' failed: %s", to_string(), e.what());
}
@ -139,18 +192,16 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
}
}();
Tree tree {
.actualPath = store->toRealPath(storePath),
.storePath = storePath,
};
auto narHash = store->queryPathInfo(tree.storePath)->narHash;
input.attrs.insert_or_assign("narHash", narHash.to_string(SRI, true));
auto narHash = store->queryPathInfo(storePath)->narHash;
input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
if (auto prevNarHash = getNarHash()) {
if (narHash != *prevNarHash)
throw Error((unsigned int) 102, "NAR hash mismatch in input '%s' (%s), expected '%s', got '%s'",
to_string(), tree.actualPath, prevNarHash->to_string(SRI, true), narHash.to_string(SRI, true));
to_string(),
store->printStorePath(storePath),
prevNarHash->to_string(HashFormat::SRI, true),
narHash.to_string(HashFormat::SRI, true));
}
if (auto prevLastModified = getLastModified()) {
@ -173,9 +224,17 @@ std::pair<Tree, Input> Input::fetch(ref<Store> store) const
input.locked = true;
assert(input.hasAllInfo());
return {std::move(storePath), input};
}
return {std::move(tree), input};
std::pair<ref<InputAccessor>, Input> Input::getAccessor(ref<Store> store) const
{
try {
return scheme->getAccessor(store, *this);
} catch (Error & e) {
e.addTrace({}, "while fetching the input '%s'", to_string());
throw;
}
}
Input Input::applyOverrides(
@ -198,12 +257,13 @@ std::optional<Path> Input::getSourcePath() const
return scheme->getSourcePath(*this);
}
void Input::markChangedFile(
std::string_view file,
void Input::putFile(
const CanonPath & path,
std::string_view contents,
std::optional<std::string> commitMsg) const
{
assert(scheme);
return scheme->markChangedFile(*this, file, commitMsg);
return scheme->putFile(*this, path, contents, commitMsg);
}
std::string Input::getName() const
@ -217,10 +277,8 @@ StorePath Input::computeStorePath(Store & store) const
if (!narHash)
throw Error("cannot compute store path for unlocked input '%s'", to_string());
return store.makeFixedOutputPath(getName(), FixedOutputInfo {
.hash = {
.method = FileIngestionMethod::Recursive,
.hash = *narHash,
},
.method = FileIngestionMethod::Recursive,
.hash = *narHash,
.references = {},
});
}
@ -233,8 +291,8 @@ std::string Input::getType() const
std::optional<Hash> Input::getNarHash() const
{
if (auto s = maybeGetStrAttr(attrs, "narHash")) {
auto hash = s->empty() ? Hash(htSHA256) : Hash::parseSRI(*s);
if (hash.type != htSHA256)
auto hash = s->empty() ? Hash(HashAlgorithm::SHA256) : Hash::parseSRI(*s);
if (hash.algo != HashAlgorithm::SHA256)
throw UsageError("narHash must use SHA-256");
return hash;
}
@ -256,8 +314,9 @@ std::optional<Hash> Input::getRev() const
try {
hash = Hash::parseAnyPrefixed(*s);
} catch (BadHash &e) {
// Default to sha1 for backwards compatibility with existing flakes
hash = Hash::parseAny(*s, htSHA1);
// Default to sha1 for backwards compatibility with existing
// usages (e.g. `builtins.fetchTree` calls or flake inputs).
hash = Hash::parseAny(*s, HashAlgorithm::SHA1);
}
}
@ -295,14 +354,18 @@ Input InputScheme::applyOverrides(
return input;
}
std::optional<Path> InputScheme::getSourcePath(const Input & input)
std::optional<Path> InputScheme::getSourcePath(const Input & input) const
{
return {};
}
void InputScheme::markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg)
void InputScheme::putFile(
const Input & input,
const CanonPath & path,
std::string_view contents,
std::optional<std::string> commitMsg) const
{
assert(false);
throw Error("input '%s' does not support modifying file '%s'", input.to_string(), path);
}
void InputScheme::clone(const Input & input, const Path & destDir) const
@ -310,4 +373,26 @@ void InputScheme::clone(const Input & input, const Path & destDir) const
throw Error("do not know how to clone input '%s'", input.to_string());
}
std::pair<StorePath, Input> InputScheme::fetch(ref<Store> store, const Input & input)
{
auto [accessor, input2] = getAccessor(store, input);
auto storePath = fetchToStore(*store, SourcePath(accessor), input2.getName());
return {storePath, input2};
}
std::pair<ref<InputAccessor>, Input> InputScheme::getAccessor(ref<Store> store, const Input & input) const
{
throw UnimplementedError("InputScheme must implement fetch() or getAccessor()");
}
std::optional<ExperimentalFeature> InputScheme::experimentalFeature() const
{
return {};
}
std::string publicKeys_to_string(const std::vector<PublicKey>& publicKeys)
{
return ((nlohmann::json) publicKeys).dump();
}
}

View file

@ -3,31 +3,25 @@
#include "types.hh"
#include "hash.hh"
#include "path.hh"
#include "canon-path.hh"
#include "attrs.hh"
#include "url.hh"
#include <memory>
#include <nlohmann/json_fwd.hpp>
namespace nix { class Store; }
namespace nix { class Store; class StorePath; struct InputAccessor; }
namespace nix::fetchers {
struct Tree
{
Path actualPath;
StorePath storePath;
};
struct InputScheme;
/**
* The Input object is generated by a specific fetcher, based on the
* user-supplied input attribute in the flake.nix file, and contains
* The `Input` object is generated by a specific fetcher, based on
* user-supplied information, and contains
* the information that the specific fetcher needs to perform the
* actual fetch. The Input object is most commonly created via the
* "fromURL()" or "fromAttrs()" static functions which are provided
* the url or attrset specified in the flake file.
* `fromURL()` or `fromAttrs()` static functions.
*/
struct Input
{
@ -36,7 +30,6 @@ struct Input
std::shared_ptr<InputScheme> scheme; // note: can be null
Attrs attrs;
bool locked = false;
bool direct = true;
/**
* path of the parent of this input, used for relative path resolution
@ -44,10 +37,20 @@ struct Input
std::optional<Path> parent;
public:
static Input fromURL(const std::string & url);
/**
* Create an `Input` from a URL.
*
* The URL indicate which sort of fetcher, and provides information to that fetcher.
*/
static Input fromURL(const std::string & url, bool requireTree = true);
static Input fromURL(const ParsedURL & url);
static Input fromURL(const ParsedURL & url, bool requireTree = true);
/**
* Create an `Input` from a an `Attrs`.
*
* The URL indicate which sort of fetcher, and provides information to that fetcher.
*/
static Input fromAttrs(Attrs && attrs);
ParsedURL toURL() const;
@ -62,7 +65,7 @@ public:
* Check whether this is a "direct" input, that is, not
* one that goes through a registry.
*/
bool isDirect() const { return direct; }
bool isDirect() const;
/**
* Check whether this is a "locked" input, that is,
@ -70,24 +73,17 @@ public:
*/
bool isLocked() const { return locked; }
/**
* Check whether the input carries all necessary info required
* for cache insertion and substitution.
* These fields are used to uniquely identify cached trees
* within the "tarball TTL" window without necessarily
* indicating that the input's origin is unchanged.
*/
bool hasAllInfo() const;
bool operator ==(const Input & other) const;
bool contains(const Input & other) const;
/**
* Fetch the input into the Nix store, returning the location in
* the Nix store and the locked input.
* Fetch the entire input into the Nix store, returning the
* location in the Nix store and the locked input.
*/
std::pair<Tree, Input> fetch(ref<Store> store) const;
std::pair<StorePath, Input> fetch(ref<Store> store) const;
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store) const;
Input applyOverrides(
std::optional<std::string> ref,
@ -97,8 +93,13 @@ public:
std::optional<Path> getSourcePath() const;
void markChangedFile(
std::string_view file,
/**
* Write a file to this input, for input types that support
* writing. Optionally commit the change (for e.g. Git inputs).
*/
void putFile(
const CanonPath & path,
std::string_view contents,
std::optional<std::string> commitMsg) const;
std::string getName() const;
@ -112,30 +113,52 @@ public:
std::optional<Hash> getRev() const;
std::optional<uint64_t> getRevCount() const;
std::optional<time_t> getLastModified() const;
/**
* For locked inputs, return a string that uniquely specifies the
* content of the input (typically a commit hash or content hash).
*/
std::optional<std::string> getFingerprint(ref<Store> store) const;
};
/**
* The InputScheme represents a type of fetcher. Each fetcher
* registers with nix at startup time. When processing an input for a
* flake, each scheme is given an opportunity to "recognize" that
* input from the url or attributes in the flake file's specification
* and return an Input object to represent the input if it is
* recognized. The Input object contains the information the fetcher
* needs to actually perform the "fetch()" when called.
* The `InputScheme` represents a type of fetcher. Each fetcher
* registers with nix at startup time. When processing an `Input`,
* each scheme is given an opportunity to "recognize" that
* input from the user-provided url or attributes
* and return an `Input` object to represent the input if it is
* recognized. The `Input` object contains the information the fetcher
* needs to actually perform the `fetch()` when called.
*/
struct InputScheme
{
virtual ~InputScheme()
{ }
virtual std::optional<Input> inputFromURL(const ParsedURL & url) const = 0;
virtual std::optional<Input> inputFromURL(const ParsedURL & url, bool requireTree) const = 0;
virtual std::optional<Input> inputFromAttrs(const Attrs & attrs) const = 0;
virtual ParsedURL toURL(const Input & input) const;
/**
* What is the name of the scheme?
*
* The `type` attribute is used to select which input scheme is
* used, and then the other fields are forwarded to that input
* scheme.
*/
virtual std::string_view schemeName() const = 0;
virtual bool hasAllInfo(const Input & input) const = 0;
/**
* Allowed attributes in an attribute set that is converted to an
* input.
*
* `type` is not included from this set, because the `type` field is
parsed first to choose which scheme; `type` is always required.
*/
virtual StringSet allowedAttrs() const = 0;
virtual ParsedURL toURL(const Input & input) const;
virtual Input applyOverrides(
const Input & input,
@ -144,42 +167,41 @@ struct InputScheme
virtual void clone(const Input & input, const Path & destDir) const;
virtual std::optional<Path> getSourcePath(const Input & input);
virtual std::optional<Path> getSourcePath(const Input & input) const;
virtual void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg);
virtual void putFile(
const Input & input,
const CanonPath & path,
std::string_view contents,
std::optional<std::string> commitMsg) const;
virtual std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) = 0;
virtual std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input);
virtual std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & input) const;
/**
* Is this `InputScheme` part of an experimental feature?
*/
virtual std::optional<ExperimentalFeature> experimentalFeature() const;
virtual bool isDirect(const Input & input) const
{ return true; }
virtual std::optional<std::string> getFingerprint(ref<Store> store, const Input & input) const
{ return std::nullopt; }
};
void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
struct DownloadFileResult
nlohmann::json dumpRegisterInputSchemeInfo();
struct PublicKey
{
StorePath storePath;
std::string etag;
std::string effectiveUrl;
std::optional<std::string> immutableUrl;
std::string type = "ssh-ed25519";
std::string key;
};
NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE_WITH_DEFAULT(PublicKey, type, key)
DownloadFileResult downloadFile(
ref<Store> store,
const std::string & url,
const std::string & name,
bool locked,
const Headers & headers = {});
struct DownloadTarballResult
{
Tree tree;
time_t lastModified;
std::optional<std::string> immutableUrl;
};
DownloadTarballResult downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,
bool locked,
const Headers & headers = {});
std::string publicKeys_to_string(const std::vector<PublicKey>&);
}

View file

@ -0,0 +1,83 @@
#include "filtering-input-accessor.hh"
namespace nix {
std::string FilteringInputAccessor::readFile(const CanonPath & path)
{
checkAccess(path);
return next->readFile(prefix + path);
}
bool FilteringInputAccessor::pathExists(const CanonPath & path)
{
return isAllowed(path) && next->pathExists(prefix + path);
}
std::optional<InputAccessor::Stat> FilteringInputAccessor::maybeLstat(const CanonPath & path)
{
checkAccess(path);
return next->maybeLstat(prefix + path);
}
InputAccessor::DirEntries FilteringInputAccessor::readDirectory(const CanonPath & path)
{
checkAccess(path);
DirEntries entries;
for (auto & entry : next->readDirectory(prefix + path)) {
if (isAllowed(path + entry.first))
entries.insert(std::move(entry));
}
return entries;
}
std::string FilteringInputAccessor::readLink(const CanonPath & path)
{
checkAccess(path);
return next->readLink(prefix + path);
}
std::string FilteringInputAccessor::showPath(const CanonPath & path)
{
return next->showPath(prefix + path);
}
void FilteringInputAccessor::checkAccess(const CanonPath & path)
{
if (!isAllowed(path))
throw makeNotAllowedError
? makeNotAllowedError(path)
: RestrictedPathError("access to path '%s' is forbidden", showPath(path));
}
struct AllowListInputAccessorImpl : AllowListInputAccessor
{
std::set<CanonPath> allowedPaths;
AllowListInputAccessorImpl(
ref<InputAccessor> next,
std::set<CanonPath> && allowedPaths,
MakeNotAllowedError && makeNotAllowedError)
: AllowListInputAccessor(SourcePath(next), std::move(makeNotAllowedError))
, allowedPaths(std::move(allowedPaths))
{ }
bool isAllowed(const CanonPath & path) override
{
return path.isAllowed(allowedPaths);
}
void allowPath(CanonPath path) override
{
allowedPaths.insert(std::move(path));
}
};
ref<AllowListInputAccessor> AllowListInputAccessor::create(
ref<InputAccessor> next,
std::set<CanonPath> && allowedPaths,
MakeNotAllowedError && makeNotAllowedError)
{
return make_ref<AllowListInputAccessorImpl>(next, std::move(allowedPaths), std::move(makeNotAllowedError));
}
}

View file

@ -0,0 +1,74 @@
#pragma once
#include "input-accessor.hh"
#include "source-path.hh"
namespace nix {
/**
* A function that should throw an exception of type
* `RestrictedPathError` explaining that access to `path` is
* forbidden.
*/
typedef std::function<RestrictedPathError(const CanonPath & path)> MakeNotAllowedError;
/**
* An abstract wrapping `InputAccessor` that performs access
* control. Subclasses should override `isAllowed()` to implement an
* access control policy. The error message is customized at construction.
*/
struct FilteringInputAccessor : InputAccessor
{
ref<InputAccessor> next;
CanonPath prefix;
MakeNotAllowedError makeNotAllowedError;
FilteringInputAccessor(const SourcePath & src, MakeNotAllowedError && makeNotAllowedError)
: next(src.accessor)
, prefix(src.path)
, makeNotAllowedError(std::move(makeNotAllowedError))
{ }
std::string readFile(const CanonPath & path) override;
bool pathExists(const CanonPath & path) override;
std::optional<Stat> maybeLstat(const CanonPath & path) override;
DirEntries readDirectory(const CanonPath & path) override;
std::string readLink(const CanonPath & path) override;
std::string showPath(const CanonPath & path) override;
/**
* Call `makeNotAllowedError` to throw a `RestrictedPathError`
* exception if `isAllowed()` returns `false` for `path`.
*/
void checkAccess(const CanonPath & path);
/**
* Return `true` iff access to path is allowed.
*/
virtual bool isAllowed(const CanonPath & path) = 0;
};
/**
* A wrapping `InputAccessor` that checks paths against an allow-list.
*/
struct AllowListInputAccessor : public FilteringInputAccessor
{
/**
* Grant access to the specified path.
*/
virtual void allowPath(CanonPath path) = 0;
static ref<AllowListInputAccessor> create(
ref<InputAccessor> next,
std::set<CanonPath> && allowedPaths,
MakeNotAllowedError && makeNotAllowedError);
using FilteringInputAccessor::FilteringInputAccessor;
};
}

View file

@ -0,0 +1,78 @@
#include "fs-input-accessor.hh"
#include "posix-source-accessor.hh"
#include "store-api.hh"
namespace nix {
struct FSInputAccessor : InputAccessor, PosixSourceAccessor
{
CanonPath root;
FSInputAccessor(const CanonPath & root)
: root(root)
{
displayPrefix = root.isRoot() ? "" : root.abs();
}
void readFile(
const CanonPath & path,
Sink & sink,
std::function<void(uint64_t)> sizeCallback) override
{
auto absPath = makeAbsPath(path);
PosixSourceAccessor::readFile(absPath, sink, sizeCallback);
}
bool pathExists(const CanonPath & path) override
{
return PosixSourceAccessor::pathExists(makeAbsPath(path));
}
std::optional<Stat> maybeLstat(const CanonPath & path) override
{
return PosixSourceAccessor::maybeLstat(makeAbsPath(path));
}
DirEntries readDirectory(const CanonPath & path) override
{
DirEntries res;
for (auto & entry : PosixSourceAccessor::readDirectory(makeAbsPath(path)))
res.emplace(entry);
return res;
}
std::string readLink(const CanonPath & path) override
{
return PosixSourceAccessor::readLink(makeAbsPath(path));
}
CanonPath makeAbsPath(const CanonPath & path)
{
return root + path;
}
std::optional<CanonPath> getPhysicalPath(const CanonPath & path) override
{
return makeAbsPath(path);
}
};
ref<InputAccessor> makeFSInputAccessor(const CanonPath & root)
{
return make_ref<FSInputAccessor>(root);
}
ref<InputAccessor> makeStorePathAccessor(
ref<Store> store,
const StorePath & storePath)
{
return makeFSInputAccessor(CanonPath(store->toRealPath(storePath)));
}
SourcePath getUnfilteredRootPath(CanonPath path)
{
static auto rootFS = makeFSInputAccessor(CanonPath::root);
return {rootFS, path};
}
}

View file

@ -0,0 +1,20 @@
#pragma once
#include "input-accessor.hh"
#include "source-path.hh"
namespace nix {
class StorePath;
class Store;
ref<InputAccessor> makeFSInputAccessor(
const CanonPath & root);
ref<InputAccessor> makeStorePathAccessor(
ref<Store> store,
const StorePath & storePath);
SourcePath getUnfilteredRootPath(CanonPath path);
}

View file

@ -0,0 +1,677 @@
#include "git-utils.hh"
#include "input-accessor.hh"
#include "cache.hh"
#include "finally.hh"
#include "processes.hh"
#include "signals.hh"
#include <boost/core/span.hpp>
#include <git2/blob.h>
#include <git2/commit.h>
#include <git2/config.h>
#include <git2/describe.h>
#include <git2/errors.h>
#include <git2/global.h>
#include <git2/object.h>
#include <git2/refs.h>
#include <git2/remote.h>
#include <git2/repository.h>
#include <git2/status.h>
#include <git2/submodule.h>
#include <git2/tree.h>
#include <unordered_set>
#include <queue>
#include <regex>
namespace std {
template<> struct hash<git_oid>
{
size_t operator()(const git_oid & oid) const
{
return * (size_t *) oid.id;
}
};
}
std::ostream & operator << (std::ostream & str, const git_oid & oid)
{
str << git_oid_tostr_s(&oid);
return str;
}
bool operator == (const git_oid & oid1, const git_oid & oid2)
{
return git_oid_equal(&oid1, &oid2);
}
namespace nix {
// Some wrapper types that ensure that the git_*_free functions get called.
template<auto del>
struct Deleter
{
template <typename T>
void operator()(T * p) const { del(p); };
};
typedef std::unique_ptr<git_repository, Deleter<git_repository_free>> Repository;
typedef std::unique_ptr<git_tree_entry, Deleter<git_tree_entry_free>> TreeEntry;
typedef std::unique_ptr<git_tree, Deleter<git_tree_free>> Tree;
typedef std::unique_ptr<git_treebuilder, Deleter<git_treebuilder_free>> TreeBuilder;
typedef std::unique_ptr<git_blob, Deleter<git_blob_free>> Blob;
typedef std::unique_ptr<git_object, Deleter<git_object_free>> Object;
typedef std::unique_ptr<git_commit, Deleter<git_commit_free>> Commit;
typedef std::unique_ptr<git_reference, Deleter<git_reference_free>> Reference;
typedef std::unique_ptr<git_describe_result, Deleter<git_describe_result_free>> DescribeResult;
typedef std::unique_ptr<git_status_list, Deleter<git_status_list_free>> StatusList;
typedef std::unique_ptr<git_remote, Deleter<git_remote_free>> Remote;
typedef std::unique_ptr<git_config, Deleter<git_config_free>> GitConfig;
typedef std::unique_ptr<git_config_iterator, Deleter<git_config_iterator_free>> ConfigIterator;
// A helper to ensure that we don't leak objects returned by libgit2.
template<typename T>
struct Setter
{
T & t;
typename T::pointer p = nullptr;
Setter(T & t) : t(t) { }
~Setter() { if (p) t = T(p); }
operator typename T::pointer * () { return &p; }
};
Hash toHash(const git_oid & oid)
{
#ifdef GIT_EXPERIMENTAL_SHA256
assert(oid.type == GIT_OID_SHA1);
#endif
Hash hash(HashAlgorithm::SHA1);
memcpy(hash.hash, oid.id, hash.hashSize);
return hash;
}
static void initLibGit2()
{
if (git_libgit2_init() < 0)
throw Error("initialising libgit2: %s", git_error_last()->message);
}
git_oid hashToOID(const Hash & hash)
{
git_oid oid;
if (git_oid_fromstr(&oid, hash.gitRev().c_str()))
throw Error("cannot convert '%s' to a Git OID", hash.gitRev());
return oid;
}
Object lookupObject(git_repository * repo, const git_oid & oid)
{
Object obj;
if (git_object_lookup(Setter(obj), repo, &oid, GIT_OBJECT_ANY)) {
auto err = git_error_last();
throw Error("getting Git object '%s': %s", oid, err->message);
}
return obj;
}
template<typename T>
T peelObject(git_repository * repo, git_object * obj, git_object_t type)
{
T obj2;
if (git_object_peel((git_object * *) (typename T::pointer *) Setter(obj2), obj, type)) {
auto err = git_error_last();
throw Error("peeling Git object '%s': %s", git_object_id(obj), err->message);
}
return obj2;
}
struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
{
CanonPath path;
Repository repo;
GitRepoImpl(CanonPath _path, bool create, bool bare)
: path(std::move(_path))
{
initLibGit2();
if (pathExists(path.abs())) {
if (git_repository_open(Setter(repo), path.c_str()))
throw Error("opening Git repository '%s': %s", path, git_error_last()->message);
} else {
if (git_repository_init(Setter(repo), path.c_str(), bare))
throw Error("creating Git repository '%s': %s", path, git_error_last()->message);
}
}
operator git_repository * ()
{
return repo.get();
}
uint64_t getRevCount(const Hash & rev) override
{
std::unordered_set<git_oid> done;
std::queue<Commit> todo;
todo.push(peelObject<Commit>(*this, lookupObject(*this, hashToOID(rev)).get(), GIT_OBJECT_COMMIT));
while (auto commit = pop(todo)) {
if (!done.insert(*git_commit_id(commit->get())).second) continue;
for (size_t n = 0; n < git_commit_parentcount(commit->get()); ++n) {
git_commit * parent;
if (git_commit_parent(&parent, commit->get(), n))
throw Error("getting parent of Git commit '%s': %s", *git_commit_id(commit->get()), git_error_last()->message);
todo.push(Commit(parent));
}
}
return done.size();
}
uint64_t getLastModified(const Hash & rev) override
{
auto commit = peelObject<Commit>(*this, lookupObject(*this, hashToOID(rev)).get(), GIT_OBJECT_COMMIT);
return git_commit_time(commit.get());
}
bool isShallow() override
{
return git_repository_is_shallow(*this);
}
Hash resolveRef(std::string ref) override
{
// Handle revisions used as refs.
{
git_oid oid;
if (git_oid_fromstr(&oid, ref.c_str()) == 0)
return toHash(oid);
}
// Resolve short names like 'master'.
Reference ref2;
if (!git_reference_dwim(Setter(ref2), *this, ref.c_str()))
ref = git_reference_name(ref2.get());
// Resolve full references like 'refs/heads/master'.
Reference ref3;
if (git_reference_lookup(Setter(ref3), *this, ref.c_str()))
throw Error("resolving Git reference '%s': %s", ref, git_error_last()->message);
auto oid = git_reference_target(ref3.get());
if (!oid)
throw Error("cannot get OID for Git reference '%s'", git_reference_name(ref3.get()));
return toHash(*oid);
}
std::vector<Submodule> parseSubmodules(const CanonPath & configFile)
{
GitConfig config;
if (git_config_open_ondisk(Setter(config), configFile.abs().c_str()))
throw Error("parsing .gitmodules file: %s", git_error_last()->message);
ConfigIterator it;
if (git_config_iterator_glob_new(Setter(it), config.get(), "^submodule\\..*\\.(path|url|branch)$"))
throw Error("iterating over .gitmodules: %s", git_error_last()->message);
std::map<std::string, std::string> entries;
while (true) {
git_config_entry * entry = nullptr;
if (auto err = git_config_next(&entry, it.get())) {
if (err == GIT_ITEROVER) break;
throw Error("iterating over .gitmodules: %s", git_error_last()->message);
}
entries.emplace(entry->name + 10, entry->value);
}
std::vector<Submodule> result;
for (auto & [key, value] : entries) {
if (!hasSuffix(key, ".path")) continue;
std::string key2(key, 0, key.size() - 5);
auto path = CanonPath(value);
result.push_back(Submodule {
.path = path,
.url = entries[key2 + ".url"],
.branch = entries[key2 + ".branch"],
});
}
return result;
}
// Helper for statusCallback below.
static int statusCallbackTrampoline(const char * path, unsigned int statusFlags, void * payload)
{
return (*((std::function<int(const char * path, unsigned int statusFlags)> *) payload))(path, statusFlags);
}
WorkdirInfo getWorkdirInfo() override
{
WorkdirInfo info;
/* Get the head revision, if any. */
git_oid headRev;
if (auto err = git_reference_name_to_id(&headRev, *this, "HEAD")) {
if (err != GIT_ENOTFOUND)
throw Error("resolving HEAD: %s", git_error_last()->message);
} else
info.headRev = toHash(headRev);
/* Get all tracked files and determine whether the working
directory is dirty. */
std::function<int(const char * path, unsigned int statusFlags)> statusCallback = [&](const char * path, unsigned int statusFlags)
{
if (!(statusFlags & GIT_STATUS_INDEX_DELETED) &&
!(statusFlags & GIT_STATUS_WT_DELETED))
info.files.insert(CanonPath(path));
if (statusFlags != GIT_STATUS_CURRENT)
info.isDirty = true;
return 0;
};
git_status_options options = GIT_STATUS_OPTIONS_INIT;
options.flags |= GIT_STATUS_OPT_INCLUDE_UNMODIFIED;
options.flags |= GIT_STATUS_OPT_EXCLUDE_SUBMODULES;
if (git_status_foreach_ext(*this, &options, &statusCallbackTrampoline, &statusCallback))
throw Error("getting working directory status: %s", git_error_last()->message);
/* Get submodule info. */
auto modulesFile = path + ".gitmodules";
if (pathExists(modulesFile.abs()))
info.submodules = parseSubmodules(modulesFile);
return info;
}
std::optional<std::string> getWorkdirRef() override
{
Reference ref;
if (git_reference_lookup(Setter(ref), *this, "HEAD"))
throw Error("looking up HEAD: %s", git_error_last()->message);
if (auto target = git_reference_symbolic_target(ref.get()))
return target;
return std::nullopt;
}
std::vector<std::tuple<Submodule, Hash>> getSubmodules(const Hash & rev) override;
std::string resolveSubmoduleUrl(
const std::string & url,
const std::string & base) override
{
git_buf buf = GIT_BUF_INIT;
if (git_submodule_resolve_url(&buf, *this, url.c_str()))
throw Error("resolving Git submodule URL '%s'", url);
Finally cleanup = [&]() { git_buf_dispose(&buf); };
std::string res(buf.ptr);
if (!hasPrefix(res, "/") && res.find("://") == res.npos)
res = parseURL(base + "/" + res).canonicalise().to_string();
return res;
}
bool hasObject(const Hash & oid_) override
{
auto oid = hashToOID(oid_);
Object obj;
if (auto errCode = git_object_lookup(Setter(obj), *this, &oid, GIT_OBJECT_ANY)) {
if (errCode == GIT_ENOTFOUND) return false;
auto err = git_error_last();
throw Error("getting Git object '%s': %s", oid, err->message);
}
return true;
}
ref<InputAccessor> getAccessor(const Hash & rev) override;
static int sidebandProgressCallback(const char * str, int len, void * payload)
{
auto act = (Activity *) payload;
act->result(resFetchStatus, trim(std::string_view(str, len)));
return _isInterrupted ? -1 : 0;
}
static int transferProgressCallback(const git_indexer_progress * stats, void * payload)
{
auto act = (Activity *) payload;
act->result(resFetchStatus,
fmt("%d/%d objects received, %d/%d deltas indexed, %.1f MiB",
stats->received_objects,
stats->total_objects,
stats->indexed_deltas,
stats->total_deltas,
stats->received_bytes / (1024.0 * 1024.0)));
return _isInterrupted ? -1 : 0;
}
void fetch(
const std::string & url,
const std::string & refspec,
bool shallow) override
{
Activity act(*logger, lvlTalkative, actFetchTree, fmt("fetching Git repository '%s'", url));
Remote remote;
if (git_remote_create_anonymous(Setter(remote), *this, url.c_str()))
throw Error("cannot create Git remote '%s': %s", url, git_error_last()->message);
char * refspecs[] = {(char *) refspec.c_str()};
git_strarray refspecs2 {
.strings = refspecs,
.count = 1
};
git_fetch_options opts = GIT_FETCH_OPTIONS_INIT;
// FIXME: for some reason, shallow fetching over ssh barfs
// with "could not read from remote repository".
opts.depth = shallow && parseURL(url).scheme != "ssh" ? 1 : GIT_FETCH_DEPTH_FULL;
opts.callbacks.payload = &act;
opts.callbacks.sideband_progress = sidebandProgressCallback;
opts.callbacks.transfer_progress = transferProgressCallback;
if (git_remote_fetch(remote.get(), &refspecs2, &opts, nullptr))
throw Error("fetching '%s' from '%s': %s", refspec, url, git_error_last()->message);
}
void verifyCommit(
const Hash & rev,
const std::vector<fetchers::PublicKey> & publicKeys) override
{
// Create ad-hoc allowedSignersFile and populate it with publicKeys
auto allowedSignersFile = createTempFile().second;
std::string allowedSigners;
for (const fetchers::PublicKey & k : publicKeys) {
if (k.type != "ssh-dsa"
&& k.type != "ssh-ecdsa"
&& k.type != "ssh-ecdsa-sk"
&& k.type != "ssh-ed25519"
&& k.type != "ssh-ed25519-sk"
&& k.type != "ssh-rsa")
throw Error("Unknown key type '%s'.\n"
"Please use one of\n"
"- ssh-dsa\n"
" ssh-ecdsa\n"
" ssh-ecdsa-sk\n"
" ssh-ed25519\n"
" ssh-ed25519-sk\n"
" ssh-rsa", k.type);
allowedSigners += "* " + k.type + " " + k.key + "\n";
}
writeFile(allowedSignersFile, allowedSigners);
// Run verification command
auto [status, output] = runProgram(RunOptions {
.program = "git",
.args = {
"-c",
"gpg.ssh.allowedSignersFile=" + allowedSignersFile,
"-C", path.abs(),
"verify-commit",
rev.gitRev()
},
.mergeStderrToStdout = true,
});
/* Evaluate result through status code and checking if public
key fingerprints appear on stderr. This is neccessary
because the git command might also succeed due to the
commit being signed by gpg keys that are present in the
users key agent. */
std::string re = R"(Good "git" signature for \* with .* key SHA256:[)";
for (const fetchers::PublicKey & k : publicKeys){
// Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally
auto fingerprint = trim(hashString(HashAlgorithm::SHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" );
re += "(" + escaped_fingerprint + ")";
}
re += "]";
if (status == 0 && std::regex_search(output, std::regex(re)))
printTalkative("Signature verification on commit %s succeeded.", rev.gitRev());
else
throw Error("Commit signature verification on commit %s failed: %s", rev.gitRev(), output);
}
};
ref<GitRepo> GitRepo::openRepo(const CanonPath & path, bool create, bool bare)
{
return make_ref<GitRepoImpl>(path, create, bare);
}
struct GitInputAccessor : InputAccessor
{
ref<GitRepoImpl> repo;
Tree root;
GitInputAccessor(ref<GitRepoImpl> repo_, const Hash & rev)
: repo(repo_)
, root(peelObject<Tree>(*repo, lookupObject(*repo, hashToOID(rev)).get(), GIT_OBJECT_TREE))
{
}
std::string readBlob(const CanonPath & path, bool symlink)
{
auto blob = getBlob(path, symlink);
auto data = std::string_view((const char *) git_blob_rawcontent(blob.get()), git_blob_rawsize(blob.get()));
return std::string(data);
}
std::string readFile(const CanonPath & path) override
{
return readBlob(path, false);
}
bool pathExists(const CanonPath & path) override
{
return path.isRoot() ? true : (bool) lookup(path);
}
std::optional<Stat> maybeLstat(const CanonPath & path) override
{
if (path.isRoot())
return Stat { .type = tDirectory };
auto entry = lookup(path);
if (!entry)
return std::nullopt;
auto mode = git_tree_entry_filemode(entry);
if (mode == GIT_FILEMODE_TREE)
return Stat { .type = tDirectory };
else if (mode == GIT_FILEMODE_BLOB)
return Stat { .type = tRegular };
else if (mode == GIT_FILEMODE_BLOB_EXECUTABLE)
return Stat { .type = tRegular, .isExecutable = true };
else if (mode == GIT_FILEMODE_LINK)
return Stat { .type = tSymlink };
else if (mode == GIT_FILEMODE_COMMIT)
// Treat submodules as an empty directory.
return Stat { .type = tDirectory };
else
throw Error("file '%s' has an unsupported Git file type");
}
DirEntries readDirectory(const CanonPath & path) override
{
return std::visit(overloaded {
[&](Tree tree) {
DirEntries res;
auto count = git_tree_entrycount(tree.get());
for (size_t n = 0; n < count; ++n) {
auto entry = git_tree_entry_byindex(tree.get(), n);
// FIXME: add to cache
res.emplace(std::string(git_tree_entry_name(entry)), DirEntry{});
}
return res;
},
[&](Submodule) {
return DirEntries();
}
}, getTree(path));
}
std::string readLink(const CanonPath & path) override
{
return readBlob(path, true);
}
Hash getSubmoduleRev(const CanonPath & path)
{
auto entry = need(path);
if (git_tree_entry_type(entry) != GIT_OBJECT_COMMIT)
throw Error("'%s' is not a submodule", showPath(path));
return toHash(*git_tree_entry_id(entry));
}
std::unordered_map<CanonPath, TreeEntry> lookupCache;
/* Recursively look up 'path' relative to the root. */
git_tree_entry * lookup(const CanonPath & path)
{
if (path.isRoot()) return nullptr;
auto i = lookupCache.find(path);
if (i == lookupCache.end()) {
TreeEntry entry;
if (auto err = git_tree_entry_bypath(Setter(entry), root.get(), std::string(path.rel()).c_str())) {
if (err != GIT_ENOTFOUND)
throw Error("looking up '%s': %s", showPath(path), git_error_last()->message);
}
i = lookupCache.emplace(path, std::move(entry)).first;
}
return &*i->second;
}
git_tree_entry * need(const CanonPath & path)
{
auto entry = lookup(path);
if (!entry)
throw Error("'%s' does not exist", showPath(path));
return entry;
}
struct Submodule { };
std::variant<Tree, Submodule> getTree(const CanonPath & path)
{
if (path.isRoot()) {
Tree tree;
if (git_tree_dup(Setter(tree), root.get()))
throw Error("duplicating directory '%s': %s", showPath(path), git_error_last()->message);
return tree;
}
auto entry = need(path);
if (git_tree_entry_type(entry) == GIT_OBJECT_COMMIT)
return Submodule();
if (git_tree_entry_type(entry) != GIT_OBJECT_TREE)
throw Error("'%s' is not a directory", showPath(path));
Tree tree;
if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(tree), *repo, entry))
throw Error("looking up directory '%s': %s", showPath(path), git_error_last()->message);
return tree;
}
Blob getBlob(const CanonPath & path, bool expectSymlink)
{
auto notExpected = [&]()
{
throw Error(
expectSymlink
? "'%s' is not a symlink"
: "'%s' is not a regular file",
showPath(path));
};
if (path.isRoot()) notExpected();
auto entry = need(path);
if (git_tree_entry_type(entry) != GIT_OBJECT_BLOB)
notExpected();
auto mode = git_tree_entry_filemode(entry);
if (expectSymlink) {
if (mode != GIT_FILEMODE_LINK)
notExpected();
} else {
if (mode != GIT_FILEMODE_BLOB && mode != GIT_FILEMODE_BLOB_EXECUTABLE)
notExpected();
}
Blob blob;
if (git_tree_entry_to_object((git_object * *) (git_blob * *) Setter(blob), *repo, entry))
throw Error("looking up file '%s': %s", showPath(path), git_error_last()->message);
return blob;
}
};
ref<InputAccessor> GitRepoImpl::getAccessor(const Hash & rev)
{
return make_ref<GitInputAccessor>(ref<GitRepoImpl>(shared_from_this()), rev);
}
std::vector<std::tuple<GitRepoImpl::Submodule, Hash>> GitRepoImpl::getSubmodules(const Hash & rev)
{
/* Read the .gitmodules files from this revision. */
CanonPath modulesFile(".gitmodules");
auto accessor = getAccessor(rev);
if (!accessor->pathExists(modulesFile)) return {};
/* Parse it and get the revision of each submodule. */
auto configS = accessor->readFile(modulesFile);
auto [fdTemp, pathTemp] = createTempFile("nix-git-submodules");
writeFull(fdTemp.get(), configS);
std::vector<std::tuple<Submodule, Hash>> result;
for (auto & submodule : parseSubmodules(CanonPath(pathTemp))) {
auto rev = accessor.dynamic_pointer_cast<GitInputAccessor>()->getSubmoduleRev(submodule.path);
result.push_back({std::move(submodule), rev});
}
return result;
}
}

View file

@ -0,0 +1,90 @@
#pragma once
#include "input-accessor.hh"
namespace nix {
namespace fetchers { struct PublicKey; }
struct GitRepo
{
virtual ~GitRepo()
{ }
static ref<GitRepo> openRepo(const CanonPath & path, bool create = false, bool bare = false);
virtual uint64_t getRevCount(const Hash & rev) = 0;
virtual uint64_t getLastModified(const Hash & rev) = 0;
virtual bool isShallow() = 0;
/* Return the commit hash to which a ref points. */
virtual Hash resolveRef(std::string ref) = 0;
/**
* Info about a submodule.
*/
struct Submodule
{
CanonPath path;
std::string url;
std::string branch;
};
struct WorkdirInfo
{
bool isDirty = false;
/* The checked out commit, or nullopt if there are no commits
in the repo yet. */
std::optional<Hash> headRev;
/* All files in the working directory that are unchanged,
modified or added, but excluding deleted files. */
std::set<CanonPath> files;
/* The submodules listed in .gitmodules of this workdir. */
std::vector<Submodule> submodules;
};
virtual WorkdirInfo getWorkdirInfo() = 0;
/* Get the ref that HEAD points to. */
virtual std::optional<std::string> getWorkdirRef() = 0;
/**
* Return the submodules of this repo at the indicated revision,
* along with the revision of each submodule.
*/
virtual std::vector<std::tuple<Submodule, Hash>> getSubmodules(const Hash & rev) = 0;
virtual std::string resolveSubmoduleUrl(
const std::string & url,
const std::string & base) = 0;
struct TarballInfo
{
Hash treeHash;
time_t lastModified;
};
virtual bool hasObject(const Hash & oid) = 0;
virtual ref<InputAccessor> getAccessor(const Hash & rev) = 0;
virtual void fetch(
const std::string & url,
const std::string & refspec,
bool shallow) = 0;
/**
* Verify that commit `rev` is signed by one of the keys in
* `publicKeys`. Throw an error if it isn't.
*/
virtual void verifyCommit(
const Hash & rev,
const std::vector<fetchers::PublicKey> & publicKeys) = 0;
};
}

View file

@ -1,12 +1,19 @@
#include "fetchers.hh"
#include "users.hh"
#include "cache.hh"
#include "globals.hh"
#include "tarfile.hh"
#include "store-api.hh"
#include "url-parts.hh"
#include "pathlocks.hh"
#include "util.hh"
#include "processes.hh"
#include "git.hh"
#include "fs-input-accessor.hh"
#include "filtering-input-accessor.hh"
#include "mounted-input-accessor.hh"
#include "git-utils.hh"
#include "logging.hh"
#include "finally.hh"
#include "fetch-settings.hh"
@ -46,7 +53,7 @@ bool touchCacheFile(const Path & path, time_t touch_time)
Path getCachePath(std::string_view key)
{
return getCacheDir() + "/nix/gitv3/" +
hashString(htSHA256, key).to_string(Base32, false);
hashString(HashAlgorithm::SHA256, key).to_string(HashFormat::Nix32, false);
}
// Returns the name of the HEAD branch.
@ -132,124 +139,24 @@ std::optional<std::string> readHeadCached(const std::string & actualUrl)
return std::nullopt;
}
bool isNotDotGitDirectory(const Path & path)
std::vector<PublicKey> getPublicKeys(const Attrs & attrs)
{
return baseNameOf(path) != ".git";
}
struct WorkdirInfo
{
bool clean = false;
bool hasHead = false;
};
// Returns whether a git workdir is clean and has commits.
WorkdirInfo getWorkdirInfo(const Input & input, const Path & workdir)
{
const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
std::string gitDir(".git");
auto env = getEnv();
// Set LC_ALL to C: because we rely on the error messages from git rev-parse to determine what went wrong
// that way unknown errors can lead to a failure instead of continuing through the wrong code path
env["LC_ALL"] = "C";
/* Check whether HEAD points to something that looks like a commit,
since that is the refrence we want to use later on. */
auto result = runProgram(RunOptions {
.program = "git",
.args = { "-C", workdir, "--git-dir", gitDir, "rev-parse", "--verify", "--no-revs", "HEAD^{commit}" },
.environment = env,
.mergeStderrToStdout = true
});
auto exitCode = WEXITSTATUS(result.first);
auto errorMessage = result.second;
if (errorMessage.find("fatal: not a git repository") != std::string::npos) {
throw Error("'%s' is not a Git repository", workdir);
} else if (errorMessage.find("fatal: Needed a single revision") != std::string::npos) {
// indicates that the repo does not have any commits
// we want to proceed and will consider it dirty later
} else if (exitCode != 0) {
// any other errors should lead to a failure
throw Error("getting the HEAD of the Git tree '%s' failed with exit code %d:\n%s", workdir, exitCode, errorMessage);
std::vector<PublicKey> publicKeys;
if (attrs.contains("publicKeys")) {
nlohmann::json publicKeysJson = nlohmann::json::parse(getStrAttr(attrs, "publicKeys"));
ensureType(publicKeysJson, nlohmann::json::value_t::array);
publicKeys = publicKeysJson.get<std::vector<PublicKey>>();
}
bool clean = false;
bool hasHead = exitCode == 0;
try {
if (hasHead) {
// Using git diff is preferrable over lower-level operations here,
// because its conceptually simpler and we only need the exit code anyways.
auto gitDiffOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "diff", "HEAD", "--quiet"});
if (!submodules) {
// Changes in submodules should only make the tree dirty
// when those submodules will be copied as well.
gitDiffOpts.emplace_back("--ignore-submodules");
}
gitDiffOpts.emplace_back("--");
runProgram("git", true, gitDiffOpts);
clean = true;
}
} catch (ExecError & e) {
if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw;
}
return WorkdirInfo { .clean = clean, .hasHead = hasHead };
if (attrs.contains("publicKey"))
publicKeys.push_back(PublicKey{maybeGetStrAttr(attrs, "keytype").value_or("ssh-ed25519"),getStrAttr(attrs, "publicKey")});
return publicKeys;
}
std::pair<StorePath, Input> fetchFromWorkdir(ref<Store> store, Input & input, const Path & workdir, const WorkdirInfo & workdirInfo)
{
const bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
auto gitDir = ".git";
if (!fetchSettings.allowDirty)
throw Error("Git tree '%s' is dirty", workdir);
if (fetchSettings.warnDirty)
warn("Git tree '%s' is dirty", workdir);
auto gitOpts = Strings({ "-C", workdir, "--git-dir", gitDir, "ls-files", "-z" });
if (submodules)
gitOpts.emplace_back("--recurse-submodules");
auto files = tokenizeString<std::set<std::string>>(
runProgram("git", true, gitOpts), "\0"s);
Path actualPath(absPath(workdir));
PathFilter filter = [&](const Path & p) -> bool {
assert(hasPrefix(p, actualPath));
std::string file(p, actualPath.size() + 1);
auto st = lstat(p);
if (S_ISDIR(st.st_mode)) {
auto prefix = file + "/";
auto i = files.lower_bound(prefix);
return i != files.end() && hasPrefix(*i, prefix);
}
return files.count(file);
};
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
// FIXME: maybe we should use the timestamp of the last
// modified dirty file?
input.attrs.insert_or_assign(
"lastModified",
workdirInfo.hasHead ? std::stoull(runProgram("git", true, { "-C", actualPath, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", "HEAD" })) : 0);
return {std::move(storePath), input};
}
} // end namespace
struct GitInputScheme : InputScheme
{
std::optional<Input> inputFromURL(const ParsedURL & url) const override
std::optional<Input> inputFromURL(const ParsedURL & url, bool requireTree) const override
{
if (url.scheme != "git" &&
url.scheme != "git+http" &&
@ -265,9 +172,9 @@ struct GitInputScheme : InputScheme
attrs.emplace("type", "git");
for (auto & [name, value] : url.query) {
if (name == "rev" || name == "ref")
if (name == "rev" || name == "ref" || name == "keytype" || name == "publicKey" || name == "publicKeys")
attrs.emplace(name, value);
else if (name == "shallow" || name == "submodules" || name == "allRefs")
else if (name == "shallow" || name == "submodules" || name == "allRefs" || name == "verifyCommit")
attrs.emplace(name, Explicit<bool> { value == "1" });
else
url2.query.emplace(name, value);
@ -278,18 +185,44 @@ struct GitInputScheme : InputScheme
return inputFromAttrs(attrs);
}
std::string_view schemeName() const override
{
return "git";
}
StringSet allowedAttrs() const override
{
return {
"url",
"ref",
"rev",
"shallow",
"submodules",
"lastModified",
"revCount",
"narHash",
"allRefs",
"name",
"dirtyRev",
"dirtyShortRev",
"verifyCommit",
"keytype",
"publicKey",
"publicKeys",
};
}
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
if (maybeGetStrAttr(attrs, "type") != "git") return {};
for (auto & [name, _] : attrs)
if (name == "verifyCommit"
|| name == "keytype"
|| name == "publicKey"
|| name == "publicKeys")
experimentalFeatureSettings.require(Xp::VerifiedFetches);
for (auto & [name, value] : attrs)
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "shallow" && name != "submodules" && name != "lastModified" && name != "revCount" && name != "narHash" && name != "allRefs" && name != "name")
throw Error("unsupported Git input attribute '%s'", name);
parseURL(getStrAttr(attrs, "url"));
maybeGetBoolAttr(attrs, "shallow");
maybeGetBoolAttr(attrs, "submodules");
maybeGetBoolAttr(attrs, "allRefs");
maybeGetBoolAttr(attrs, "verifyCommit");
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
if (std::regex_search(*ref, badGitRefRegex))
@ -298,6 +231,12 @@ struct GitInputScheme : InputScheme
Input input;
input.attrs = attrs;
auto url = fixGitURL(getStrAttr(attrs, "url"));
parseURL(url);
input.attrs["url"] = url;
getShallowAttr(input);
getSubmodulesAttr(input);
getAllRefsAttr(input);
return input;
}
@ -307,20 +246,22 @@ struct GitInputScheme : InputScheme
if (url.scheme != "git") url.scheme = "git+" + url.scheme;
if (auto rev = input.getRev()) url.query.insert_or_assign("rev", rev->gitRev());
if (auto ref = input.getRef()) url.query.insert_or_assign("ref", *ref);
if (maybeGetBoolAttr(input.attrs, "shallow").value_or(false))
if (getShallowAttr(input))
url.query.insert_or_assign("shallow", "1");
if (getSubmodulesAttr(input))
url.query.insert_or_assign("submodules", "1");
if (maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(false))
url.query.insert_or_assign("verifyCommit", "1");
auto publicKeys = getPublicKeys(input.attrs);
if (publicKeys.size() == 1) {
url.query.insert_or_assign("keytype", publicKeys.at(0).type);
url.query.insert_or_assign("publicKey", publicKeys.at(0).key);
}
else if (publicKeys.size() > 1)
url.query.insert_or_assign("publicKeys", publicKeys_to_string(publicKeys));
return url;
}
bool hasAllInfo(const Input & input) const override
{
bool maybeDirty = !input.getRef();
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
return
maybeGetIntAttr(input.attrs, "lastModified")
&& (shallow || maybeDirty || maybeGetIntAttr(input.attrs, "revCount"));
}
Input applyOverrides(
const Input & input,
std::optional<std::string> ref,
@ -336,11 +277,11 @@ struct GitInputScheme : InputScheme
void clone(const Input & input, const Path & destDir) const override
{
auto [isLocal, actualUrl] = getActualUrl(input);
auto repoInfo = getRepoInfo(input);
Strings args = {"clone"};
args.push_back(actualUrl);
args.push_back(repoInfo.url);
if (auto ref = input.getRef()) {
args.push_back("--branch");
@ -354,30 +295,101 @@ struct GitInputScheme : InputScheme
runProgram("git", true, args, {}, true);
}
std::optional<Path> getSourcePath(const Input & input) override
std::optional<Path> getSourcePath(const Input & input) const override
{
auto url = parseURL(getStrAttr(input.attrs, "url"));
if (url.scheme == "file" && !input.getRef() && !input.getRev())
return url.path;
return {};
auto repoInfo = getRepoInfo(input);
if (repoInfo.isLocal) return repoInfo.url;
return std::nullopt;
}
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
void putFile(
const Input & input,
const CanonPath & path,
std::string_view contents,
std::optional<std::string> commitMsg) const override
{
auto sourcePath = getSourcePath(input);
assert(sourcePath);
auto gitDir = ".git";
auto repoInfo = getRepoInfo(input);
if (!repoInfo.isLocal)
throw Error("cannot commit '%s' to Git repository '%s' because it's not a working tree", path, input.to_string());
runProgram("git", true,
{ "-C", *sourcePath, "--git-dir", gitDir, "add", "--intent-to-add", "--", std::string(file) });
writeFile((CanonPath(repoInfo.url) + path).abs(), contents);
if (commitMsg)
auto result = runProgram(RunOptions {
.program = "git",
.args = {"-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "check-ignore", "--quiet", std::string(path.rel())},
});
auto exitCode = WEXITSTATUS(result.first);
if (exitCode != 0) {
// The path is not `.gitignore`d, we can add the file.
runProgram("git", true,
{ "-C", *sourcePath, "--git-dir", gitDir, "commit", std::string(file), "-m", *commitMsg });
{ "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "add", "--intent-to-add", "--", std::string(path.rel()) });
if (commitMsg) {
// Pause the logger to allow for user input (such as a gpg passphrase) in `git commit`
logger->pause();
Finally restoreLogger([]() { logger->resume(); });
runProgram("git", true,
{ "-C", repoInfo.url, "--git-dir", repoInfo.gitDir, "commit", std::string(path.rel()), "-m", *commitMsg });
}
}
}
std::pair<bool, std::string> getActualUrl(const Input & input) const
struct RepoInfo
{
/* Whether this is a local, non-bare repository. */
bool isLocal = false;
/* Working directory info: the complete list of files, and
whether the working directory is dirty compared to HEAD. */
GitRepo::WorkdirInfo workdirInfo;
/* URL of the repo, or its path if isLocal. Never a `file` URL. */
std::string url;
void warnDirty() const
{
if (workdirInfo.isDirty) {
if (!fetchSettings.allowDirty)
throw Error("Git tree '%s' is dirty", url);
if (fetchSettings.warnDirty)
warn("Git tree '%s' is dirty", url);
}
}
std::string gitDir = ".git";
};
bool getShallowAttr(const Input & input) const
{
return maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
}
bool getSubmodulesAttr(const Input & input) const
{
return maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
}
bool getAllRefsAttr(const Input & input) const
{
return maybeGetBoolAttr(input.attrs, "allRefs").value_or(false);
}
RepoInfo getRepoInfo(const Input & input) const
{
auto checkHashAlgorithm = [&](const std::optional<Hash> & hash)
{
if (hash.has_value() && !(hash->algo == HashAlgorithm::SHA1 || hash->algo == HashAlgorithm::SHA256))
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(HashFormat::Base16, true));
};
if (auto rev = input.getRev())
checkHashAlgorithm(rev);
RepoInfo repoInfo;
// file:// URIs are normally not cloned (but otherwise treated the
// same as remote URIs, i.e. we don't use the working tree or
// HEAD). Exception: If _NIX_FORCE_HTTP is set, or the repo is a bare git
@ -385,153 +397,132 @@ struct GitInputScheme : InputScheme
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1"; // for testing
auto url = parseURL(getStrAttr(input.attrs, "url"));
bool isBareRepository = url.scheme == "file" && !pathExists(url.path + "/.git");
bool isLocal = url.scheme == "file" && !forceHttp && !isBareRepository;
return {isLocal, isLocal ? url.path : url.base};
repoInfo.isLocal = url.scheme == "file" && !forceHttp && !isBareRepository;
repoInfo.url = repoInfo.isLocal ? url.path : url.base;
// If this is a local directory and no ref or revision is
// given, then allow the use of an unclean working tree.
if (!input.getRef() && !input.getRev() && repoInfo.isLocal)
repoInfo.workdirInfo = GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirInfo();
return repoInfo;
}
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
uint64_t getLastModified(const RepoInfo & repoInfo, const std::string & repoDir, const Hash & rev) const
{
Input input(_input);
auto gitDir = ".git";
Attrs key{{"_what", "gitLastModified"}, {"rev", rev.gitRev()}};
auto cache = getCache();
if (auto res = cache->lookup(key))
return getIntAttr(*res, "lastModified");
auto lastModified = GitRepo::openRepo(CanonPath(repoDir))->getLastModified(rev);
cache->upsert(key, Attrs{{"lastModified", lastModified}});
return lastModified;
}
uint64_t getRevCount(const RepoInfo & repoInfo, const std::string & repoDir, const Hash & rev) const
{
Attrs key{{"_what", "gitRevCount"}, {"rev", rev.gitRev()}};
auto cache = getCache();
if (auto revCountAttrs = cache->lookup(key))
return getIntAttr(*revCountAttrs, "revCount");
Activity act(*logger, lvlChatty, actUnknown, fmt("getting Git revision count of '%s'", repoInfo.url));
auto revCount = GitRepo::openRepo(CanonPath(repoDir))->getRevCount(rev);
cache->upsert(key, Attrs{{"revCount", revCount}});
return revCount;
}
std::string getDefaultRef(const RepoInfo & repoInfo) const
{
auto head = repoInfo.isLocal
? GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirRef()
: readHeadCached(repoInfo.url);
if (!head) {
warn("could not read HEAD ref from repo at '%s', using 'master'", repoInfo.url);
return "master";
}
return *head;
}
static MakeNotAllowedError makeNotAllowedError(std::string url)
{
return [url{std::move(url)}](const CanonPath & path) -> RestrictedPathError
{
if (nix::pathExists(path.abs()))
return RestrictedPathError("access to path '%s' is forbidden because it is not under Git control; maybe you should 'git add' it to the repository '%s'?", path, url);
else
return RestrictedPathError("path '%s' does not exist in Git repository '%s'", path, url);
};
}
void verifyCommit(const Input & input, std::shared_ptr<GitRepo> repo) const
{
auto publicKeys = getPublicKeys(input.attrs);
auto verifyCommit = maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(!publicKeys.empty());
if (verifyCommit) {
if (input.getRev() && repo)
repo->verifyCommit(*input.getRev(), publicKeys);
else
throw Error("commit verification is required for Git repository '%s', but it's dirty", input.to_string());
}
}
std::pair<ref<InputAccessor>, Input> getAccessorFromCommit(
ref<Store> store,
RepoInfo & repoInfo,
Input && input) const
{
assert(!repoInfo.workdirInfo.isDirty);
auto origRev = input.getRev();
std::string name = input.getName();
bool shallow = maybeGetBoolAttr(input.attrs, "shallow").value_or(false);
bool submodules = maybeGetBoolAttr(input.attrs, "submodules").value_or(false);
bool allRefs = maybeGetBoolAttr(input.attrs, "allRefs").value_or(false);
std::string cacheType = "git";
if (shallow) cacheType += "-shallow";
if (submodules) cacheType += "-submodules";
if (allRefs) cacheType += "-all-refs";
auto checkHashType = [&](const std::optional<Hash> & hash)
{
if (hash.has_value() && !(hash->type == htSHA1 || hash->type == htSHA256))
throw Error("Hash '%s' is not supported by Git. Supported types are sha1 and sha256.", hash->to_string(Base16, true));
};
auto getLockedAttrs = [&]()
{
checkHashType(input.getRev());
return Attrs({
{"type", cacheType},
{"name", name},
{"rev", input.getRev()->gitRev()},
});
};
auto makeResult = [&](const Attrs & infoAttrs, StorePath && storePath)
-> std::pair<StorePath, Input>
{
assert(input.getRev());
assert(!_input.getRev() || _input.getRev() == input.getRev());
if (!shallow)
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
input.attrs.insert_or_assign("lastModified", getIntAttr(infoAttrs, "lastModified"));
return {std::move(storePath), input};
};
if (input.getRev()) {
if (auto res = getCache()->lookup(store, getLockedAttrs()))
return makeResult(res->first, std::move(res->second));
}
auto [isLocal, actualUrl_] = getActualUrl(input);
auto actualUrl = actualUrl_; // work around clang bug
/* If this is a local directory and no ref or revision is given,
allow fetching directly from a dirty workdir. */
if (!input.getRef() && !input.getRev() && isLocal) {
auto workdirInfo = getWorkdirInfo(input, actualUrl);
if (!workdirInfo.clean) {
return fetchFromWorkdir(store, input, actualUrl, workdirInfo);
}
}
Attrs unlockedAttrs({
{"type", cacheType},
{"name", name},
{"url", actualUrl},
});
auto originalRef = input.getRef();
auto ref = originalRef ? *originalRef : getDefaultRef(repoInfo);
input.attrs.insert_or_assign("ref", ref);
Path repoDir;
if (isLocal) {
if (!input.getRef()) {
auto head = readHead(actualUrl);
if (!head) {
warn("could not read HEAD ref from repo at '%s', using 'master'", actualUrl);
head = "master";
}
input.attrs.insert_or_assign("ref", *head);
unlockedAttrs.insert_or_assign("ref", *head);
}
if (repoInfo.isLocal) {
repoDir = repoInfo.url;
if (!input.getRev())
input.attrs.insert_or_assign("rev",
Hash::parseAny(chomp(runProgram("git", true, { "-C", actualUrl, "--git-dir", gitDir, "rev-parse", *input.getRef() })), htSHA1).gitRev());
repoDir = actualUrl;
input.attrs.insert_or_assign("rev", GitRepo::openRepo(CanonPath(repoDir))->resolveRef(ref).gitRev());
} else {
const bool useHeadRef = !input.getRef();
if (useHeadRef) {
auto head = readHeadCached(actualUrl);
if (!head) {
warn("could not read HEAD ref from repo at '%s', using 'master'", actualUrl);
head = "master";
}
input.attrs.insert_or_assign("ref", *head);
unlockedAttrs.insert_or_assign("ref", *head);
} else {
if (!input.getRev()) {
unlockedAttrs.insert_or_assign("ref", input.getRef().value());
}
}
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
if (!input.getRev() || input.getRev() == rev2) {
input.attrs.insert_or_assign("rev", rev2.gitRev());
return makeResult(res->first, std::move(res->second));
}
}
Path cacheDir = getCachePath(actualUrl);
Path cacheDir = getCachePath(repoInfo.url);
repoDir = cacheDir;
gitDir = ".";
repoInfo.gitDir = ".";
createDirs(dirOf(cacheDir));
PathLocks cacheDirLock({cacheDir + ".lock"});
PathLocks cacheDirLock({cacheDir});
if (!pathExists(cacheDir)) {
runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", "--bare", repoDir });
}
auto repo = GitRepo::openRepo(CanonPath(cacheDir), true, true);
Path localRefFile =
input.getRef()->compare(0, 5, "refs/") == 0
? cacheDir + "/" + *input.getRef()
: cacheDir + "/refs/heads/" + *input.getRef();
ref.compare(0, 5, "refs/") == 0
? cacheDir + "/" + ref
: cacheDir + "/refs/heads/" + ref;
bool doFetch;
time_t now = time(0);
/* If a rev was specified, we need to fetch if it's not in the
repo. */
if (input.getRev()) {
try {
runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "cat-file", "-e", input.getRev()->gitRev() });
doFetch = false;
} catch (ExecError & e) {
if (WIFEXITED(e.status)) {
doFetch = true;
} else {
throw;
}
}
if (auto rev = input.getRev()) {
doFetch = !repo->hasObject(*rev);
} else {
if (allRefs) {
if (getAllRefsAttr(input)) {
doFetch = true;
} else {
/* If the local ref is older than tarball-ttl seconds, do a
@ -543,160 +534,213 @@ struct GitInputScheme : InputScheme
}
if (doFetch) {
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", actualUrl));
// FIXME: git stderr messes up our progress indicator, so
// we're using --quiet for now. Should process its stderr.
try {
auto ref = input.getRef();
auto fetchRef = allRefs
auto fetchRef =
getAllRefsAttr(input)
? "refs/*"
: ref->compare(0, 5, "refs/") == 0
? *ref
: ref == "HEAD"
? *ref
: "refs/heads/" + *ref;
runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) }, {}, true);
: input.getRev()
? input.getRev()->gitRev()
: ref.compare(0, 5, "refs/") == 0
? ref
: ref == "HEAD"
? ref
: "refs/heads/" + ref;
repo->fetch(repoInfo.url, fmt("%s:%s", fetchRef, fetchRef), getShallowAttr(input));
} catch (Error & e) {
if (!pathExists(localRefFile)) throw;
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
logError(e.info());
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", repoInfo.url);
}
if (!touchCacheFile(localRefFile, now))
warn("could not update mtime for file '%s': %s", localRefFile, strerror(errno));
if (useHeadRef && !storeCachedHead(actualUrl, *input.getRef()))
warn("could not update cached head '%s' for '%s'", *input.getRef(), actualUrl);
if (!originalRef && !storeCachedHead(repoInfo.url, ref))
warn("could not update cached head '%s' for '%s'", ref, repoInfo.url);
}
if (!input.getRev())
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), htSHA1).gitRev());
if (auto rev = input.getRev()) {
if (!repo->hasObject(*rev))
throw Error(
"Cannot find Git revision '%s' in ref '%s' of repository '%s'! "
"Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the "
ANSI_BOLD "ref" ANSI_NORMAL " you've specified or add " ANSI_BOLD
"allRefs = true;" ANSI_NORMAL " to " ANSI_BOLD "fetchGit" ANSI_NORMAL ".",
rev->gitRev(),
ref,
repoInfo.url
);
} else
input.attrs.insert_or_assign("rev", Hash::parseAny(chomp(readFile(localRefFile)), HashAlgorithm::SHA1).gitRev());
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
}
bool isShallow = chomp(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-parse", "--is-shallow-repository" })) == "true";
auto repo = GitRepo::openRepo(CanonPath(repoDir));
if (isShallow && !shallow)
throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified.", actualUrl);
auto isShallow = repo->isShallow();
// FIXME: check whether rev is an ancestor of ref.
if (isShallow && !getShallowAttr(input))
throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified", repoInfo.url);
printTalkative("using revision %s of repo '%s'", input.getRev()->gitRev(), actualUrl);
// FIXME: check whether rev is an ancestor of ref?
/* Now that we know the ref, check again whether we have it in
the store. */
if (auto res = getCache()->lookup(store, getLockedAttrs()))
return makeResult(res->first, std::move(res->second));
Path tmpDir = createTempDir();
AutoDelete delTmpDir(tmpDir, true);
PathFilter filter = defaultPathFilter;
auto result = runProgram(RunOptions {
.program = "git",
.args = { "-C", repoDir, "--git-dir", gitDir, "cat-file", "commit", input.getRev()->gitRev() },
.mergeStderrToStdout = true
});
if (WEXITSTATUS(result.first) == 128
&& result.second.find("bad file") != std::string::npos)
{
throw Error(
"Cannot find Git revision '%s' in ref '%s' of repository '%s'! "
"Please make sure that the " ANSI_BOLD "rev" ANSI_NORMAL " exists on the "
ANSI_BOLD "ref" ANSI_NORMAL " you've specified or add " ANSI_BOLD
"allRefs = true;" ANSI_NORMAL " to " ANSI_BOLD "fetchGit" ANSI_NORMAL ".",
input.getRev()->gitRev(),
*input.getRef(),
actualUrl
);
}
if (submodules) {
Path tmpGitDir = createTempDir();
AutoDelete delTmpGitDir(tmpGitDir, true);
runProgram("git", true, { "-c", "init.defaultBranch=" + gitInitialBranch, "init", tmpDir, "--separate-git-dir", tmpGitDir });
{
// TODO: repoDir might lack the ref (it only checks if rev
// exists, see FIXME above) so use a big hammer and fetch
// everything to ensure we get the rev.
Activity act(*logger, lvlTalkative, actUnknown, fmt("making temporary clone of '%s'", repoDir));
runProgram("git", true, { "-C", tmpDir, "fetch", "--quiet", "--force",
"--update-head-ok", "--", repoDir, "refs/*:refs/*" }, {}, true);
}
runProgram("git", true, { "-C", tmpDir, "checkout", "--quiet", input.getRev()->gitRev() });
/* Ensure that we use the correct origin for fetching
submodules. This matters for submodules with relative
URLs. */
if (isLocal) {
writeFile(tmpGitDir + "/config", readFile(repoDir + "/" + gitDir + "/config"));
/* Restore the config.bare setting we may have just
copied erroneously from the user's repo. */
runProgram("git", true, { "-C", tmpDir, "config", "core.bare", "false" });
} else
runProgram("git", true, { "-C", tmpDir, "config", "remote.origin.url", actualUrl });
/* As an optimisation, copy the modules directory of the
source repo if it exists. */
auto modulesPath = repoDir + "/" + gitDir + "/modules";
if (pathExists(modulesPath)) {
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying submodules of '%s'", actualUrl));
runProgram("cp", true, { "-R", "--", modulesPath, tmpGitDir + "/modules" });
}
{
Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching submodules of '%s'", actualUrl));
runProgram("git", true, { "-C", tmpDir, "submodule", "--quiet", "update", "--init", "--recursive" }, {}, true);
}
filter = isNotDotGitDirectory;
} else {
// FIXME: should pipe this, or find some better way to extract a
// revision.
auto source = sinkToSource([&](Sink & sink) {
runProgram2({
.program = "git",
.args = { "-C", repoDir, "--git-dir", gitDir, "archive", input.getRev()->gitRev() },
.standardOut = &sink
});
});
unpackTarfile(*source, tmpDir);
}
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "log", "-1", "--format=%ct", "--no-show-signature", input.getRev()->gitRev() }));
auto rev = *input.getRev();
Attrs infoAttrs({
{"rev", input.getRev()->gitRev()},
{"lastModified", lastModified},
{"rev", rev.gitRev()},
{"lastModified", getLastModified(repoInfo, repoDir, rev)},
});
if (!shallow)
if (!getShallowAttr(input))
infoAttrs.insert_or_assign("revCount",
std::stoull(runProgram("git", true, { "-C", repoDir, "--git-dir", gitDir, "rev-list", "--count", input.getRev()->gitRev() })));
getRevCount(repoInfo, repoDir, rev));
if (!_input.getRev())
getCache()->add(
store,
unlockedAttrs,
infoAttrs,
storePath,
false);
printTalkative("using revision %s of repo '%s'", rev.gitRev(), repoInfo.url);
getCache()->add(
store,
getLockedAttrs(),
infoAttrs,
storePath,
true);
verifyCommit(input, repo);
return makeResult(infoAttrs, std::move(storePath));
auto accessor = repo->getAccessor(rev);
accessor->setPathDisplay("«" + input.to_string() + "»");
/* If the repo has submodules, fetch them and return a mounted
input accessor consisting of the accessor for the top-level
repo and the accessors for the submodules. */
if (getSubmodulesAttr(input)) {
std::map<CanonPath, nix::ref<InputAccessor>> mounts;
for (auto & [submodule, submoduleRev] : repo->getSubmodules(rev)) {
auto resolved = repo->resolveSubmoduleUrl(submodule.url, repoInfo.url);
debug("Git submodule %s: %s %s %s -> %s",
submodule.path, submodule.url, submodule.branch, submoduleRev.gitRev(), resolved);
fetchers::Attrs attrs;
attrs.insert_or_assign("type", "git");
attrs.insert_or_assign("url", resolved);
if (submodule.branch != "")
attrs.insert_or_assign("ref", submodule.branch);
attrs.insert_or_assign("rev", submoduleRev.gitRev());
auto submoduleInput = fetchers::Input::fromAttrs(std::move(attrs));
auto [submoduleAccessor, submoduleInput2] =
submoduleInput.getAccessor(store);
mounts.insert_or_assign(submodule.path, submoduleAccessor);
}
if (!mounts.empty()) {
mounts.insert_or_assign(CanonPath::root, accessor);
accessor = makeMountedInputAccessor(std::move(mounts));
}
}
assert(!origRev || origRev == rev);
if (!getShallowAttr(input))
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
input.attrs.insert_or_assign("lastModified", getIntAttr(infoAttrs, "lastModified"));
return {accessor, std::move(input)};
}
std::pair<ref<InputAccessor>, Input> getAccessorFromWorkdir(
ref<Store> store,
RepoInfo & repoInfo,
Input && input) const
{
if (getSubmodulesAttr(input))
/* Create mountpoints for the submodules. */
for (auto & submodule : repoInfo.workdirInfo.submodules)
repoInfo.workdirInfo.files.insert(submodule.path);
ref<InputAccessor> accessor =
AllowListInputAccessor::create(
makeFSInputAccessor(CanonPath(repoInfo.url)),
std::move(repoInfo.workdirInfo.files),
makeNotAllowedError(repoInfo.url));
/* If the repo has submodules, return a mounted input accessor
consisting of the accessor for the top-level repo and the
accessors for the submodule workdirs. */
if (getSubmodulesAttr(input) && !repoInfo.workdirInfo.submodules.empty()) {
std::map<CanonPath, nix::ref<InputAccessor>> mounts;
for (auto & submodule : repoInfo.workdirInfo.submodules) {
auto submodulePath = CanonPath(repoInfo.url) + submodule.path;
fetchers::Attrs attrs;
attrs.insert_or_assign("type", "git");
attrs.insert_or_assign("url", submodulePath.abs());
auto submoduleInput = fetchers::Input::fromAttrs(std::move(attrs));
auto [submoduleAccessor, submoduleInput2] =
submoduleInput.getAccessor(store);
/* If the submodule is dirty, mark this repo dirty as
well. */
if (!submoduleInput2.getRev())
repoInfo.workdirInfo.isDirty = true;
mounts.insert_or_assign(submodule.path, submoduleAccessor);
}
mounts.insert_or_assign(CanonPath::root, accessor);
accessor = makeMountedInputAccessor(std::move(mounts));
}
if (!repoInfo.workdirInfo.isDirty) {
auto repo = GitRepo::openRepo(CanonPath(repoInfo.url));
if (auto ref = repo->getWorkdirRef())
input.attrs.insert_or_assign("ref", *ref);
auto rev = repoInfo.workdirInfo.headRev.value();
input.attrs.insert_or_assign("rev", rev.gitRev());
input.attrs.insert_or_assign("revCount", getRevCount(repoInfo, repoInfo.url, rev));
verifyCommit(input, repo);
} else {
repoInfo.warnDirty();
if (repoInfo.workdirInfo.headRev) {
input.attrs.insert_or_assign("dirtyRev",
repoInfo.workdirInfo.headRev->gitRev() + "-dirty");
input.attrs.insert_or_assign("dirtyShortRev",
repoInfo.workdirInfo.headRev->gitShortRev() + "-dirty");
}
verifyCommit(input, nullptr);
}
input.attrs.insert_or_assign(
"lastModified",
repoInfo.workdirInfo.headRev
? getLastModified(repoInfo, repoInfo.url, *repoInfo.workdirInfo.headRev)
: 0);
input.locked = true; // FIXME
return {accessor, std::move(input)};
}
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & _input) const override
{
Input input(_input);
auto repoInfo = getRepoInfo(input);
auto [accessor, final] =
input.getRef() || input.getRev() || !repoInfo.isLocal
? getAccessorFromCommit(store, repoInfo, std::move(input))
: getAccessorFromWorkdir(store, repoInfo, std::move(input));
accessor->fingerprint = final.getFingerprint(store);
return {accessor, std::move(final)};
}
std::optional<std::string> getFingerprint(ref<Store> store, const Input & input) const override
{
if (auto rev = input.getRev())
return rev->gitRev() + (getSubmodulesAttr(input) ? ";s" : "");
else
return std::nullopt;
}
};

View file

@ -7,6 +7,7 @@
#include "git.hh"
#include "fetchers.hh"
#include "fetch-settings.hh"
#include "tarball.hh"
#include <optional>
#include <nlohmann/json.hpp>
@ -26,13 +27,11 @@ std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
struct GitArchiveInputScheme : InputScheme
{
virtual std::string type() const = 0;
virtual std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const = 0;
std::optional<Input> inputFromURL(const ParsedURL & url) const override
std::optional<Input> inputFromURL(const ParsedURL & url, bool requireTree) const override
{
if (url.scheme != type()) return {};
if (url.scheme != schemeName()) return {};
auto path = tokenizeString<std::vector<std::string>>(url.path, "/");
@ -43,7 +42,7 @@ struct GitArchiveInputScheme : InputScheme
auto size = path.size();
if (size == 3) {
if (std::regex_match(path[2], revRegex))
rev = Hash::parseAny(path[2], htSHA1);
rev = Hash::parseAny(path[2], HashAlgorithm::SHA1);
else if (std::regex_match(path[2], refRegex))
ref = path[2];
else
@ -69,7 +68,7 @@ struct GitArchiveInputScheme : InputScheme
if (name == "rev") {
if (rev)
throw BadURL("URL '%s' contains multiple commit hashes", url.url);
rev = Hash::parseAny(value, htSHA1);
rev = Hash::parseAny(value, HashAlgorithm::SHA1);
}
else if (name == "ref") {
if (!std::regex_match(value, refRegex))
@ -90,7 +89,7 @@ struct GitArchiveInputScheme : InputScheme
throw BadURL("URL '%s' contains both a commit hash and a branch/tag name %s %s", url.url, *ref, rev->gitRev());
Input input;
input.attrs.insert_or_assign("type", type());
input.attrs.insert_or_assign("type", std::string { schemeName() });
input.attrs.insert_or_assign("owner", path[0]);
input.attrs.insert_or_assign("repo", path[1]);
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
@ -100,14 +99,21 @@ struct GitArchiveInputScheme : InputScheme
return input;
}
StringSet allowedAttrs() const override
{
return {
"owner",
"repo",
"ref",
"rev",
"narHash",
"lastModified",
"host",
};
}
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
if (maybeGetStrAttr(attrs, "type") != type()) return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev" && name != "narHash" && name != "lastModified" && name != "host")
throw Error("unsupported input attribute '%s'", name);
getStrAttr(attrs, "owner");
getStrAttr(attrs, "repo");
@ -125,18 +131,13 @@ struct GitArchiveInputScheme : InputScheme
auto path = owner + "/" + repo;
assert(!(ref && rev));
if (ref) path += "/" + *ref;
if (rev) path += "/" + rev->to_string(Base16, false);
if (rev) path += "/" + rev->to_string(HashFormat::Base16, false);
return ParsedURL {
.scheme = type(),
.scheme = std::string { schemeName() },
.path = path,
};
}
bool hasAllInfo(const Input & input) const override
{
return input.getRev() && maybeGetIntAttr(input.attrs, "lastModified");
}
Input applyOverrides(
const Input & _input,
std::optional<std::string> ref,
@ -200,7 +201,7 @@ struct GitArchiveInputScheme : InputScheme
{"rev", rev->gitRev()},
});
if (auto res = getCache()->lookup(store, lockedAttrs)) {
if (auto res = getCache()->lookup(*store, lockedAttrs)) {
input.attrs.insert_or_assign("lastModified", getIntAttr(res->first, "lastModified"));
return {std::move(res->second), input};
}
@ -212,22 +213,35 @@ struct GitArchiveInputScheme : InputScheme
input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified));
getCache()->add(
store,
*store,
lockedAttrs,
{
{"rev", rev->gitRev()},
{"lastModified", uint64_t(result.lastModified)}
},
result.tree.storePath,
result.storePath,
true);
return {result.tree.storePath, input};
return {result.storePath, input};
}
std::optional<ExperimentalFeature> experimentalFeature() const override
{
return Xp::Flakes;
}
std::optional<std::string> getFingerprint(ref<Store> store, const Input & input) const override
{
if (auto rev = input.getRev())
return rev->gitRev();
else
return std::nullopt;
}
};
struct GitHubInputScheme : GitArchiveInputScheme
{
std::string type() const override { return "github"; }
std::string_view schemeName() const override { return "github"; }
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{
@ -270,7 +284,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
readFile(
store->toRealPath(
downloadFile(store, url, "source", false, headers).storePath)));
auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1);
auto rev = Hash::parseAny(std::string { json["sha"] }, HashAlgorithm::SHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev;
}
@ -291,7 +305,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
: "https://api.%s/repos/%s/%s/tarball/%s";
const auto url = fmt(urlFmt, host, getOwner(input), getRepo(input),
input.getRev()->to_string(Base16, false));
input.getRev()->to_string(HashFormat::Base16, false));
return DownloadUrl { url, headers };
}
@ -308,7 +322,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
struct GitLabInputScheme : GitArchiveInputScheme
{
std::string type() const override { return "gitlab"; }
std::string_view schemeName() const override { return "gitlab"; }
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{
@ -342,7 +356,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
readFile(
store->toRealPath(
downloadFile(store, url, "source", false, headers).storePath)));
auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1);
auto rev = Hash::parseAny(std::string(json[0]["id"]), HashAlgorithm::SHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev;
}
@ -357,7 +371,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false));
input.getRev()->to_string(HashFormat::Base16, false));
Headers headers = makeHeadersWithAuthTokens(host);
return DownloadUrl { url, headers };
@ -376,7 +390,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
struct SourceHutInputScheme : GitArchiveInputScheme
{
std::string type() const override { return "sourcehut"; }
std::string_view schemeName() const override { return "sourcehut"; }
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{
@ -434,7 +448,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
if(!id)
throw BadURL("in '%d', couldn't find ref '%d'", input.to_string(), ref);
auto rev = Hash::parseAny(*id, htSHA1);
auto rev = Hash::parseAny(*id, HashAlgorithm::SHA1);
debug("HEAD revision for '%s' is %s", fmt("%s/%s", base_url, ref), rev.gitRev());
return rev;
}
@ -444,7 +458,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht");
auto url = fmt("https://%s/%s/%s/archive/%s.tar.gz",
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false));
input.getRev()->to_string(HashFormat::Base16, false));
Headers headers = makeHeadersWithAuthTokens(host);
return DownloadUrl { url, headers };

View file

@ -1,5 +1,6 @@
#include "fetchers.hh"
#include "url-parts.hh"
#include "path.hh"
namespace nix::fetchers {
@ -7,7 +8,7 @@ std::regex flakeRegex("[a-zA-Z][a-zA-Z0-9_-]*", std::regex::ECMAScript);
struct IndirectInputScheme : InputScheme
{
std::optional<Input> inputFromURL(const ParsedURL & url) const override
std::optional<Input> inputFromURL(const ParsedURL & url, bool requireTree) const override
{
if (url.scheme != "flake") return {};
@ -19,7 +20,7 @@ struct IndirectInputScheme : InputScheme
if (path.size() == 1) {
} else if (path.size() == 2) {
if (std::regex_match(path[1], revRegex))
rev = Hash::parseAny(path[1], htSHA1);
rev = Hash::parseAny(path[1], HashAlgorithm::SHA1);
else if (std::regex_match(path[1], refRegex))
ref = path[1];
else
@ -30,7 +31,7 @@ struct IndirectInputScheme : InputScheme
ref = path[1];
if (!std::regex_match(path[2], revRegex))
throw BadURL("in flake URL '%s', '%s' is not a commit hash", url.url, path[2]);
rev = Hash::parseAny(path[2], htSHA1);
rev = Hash::parseAny(path[2], HashAlgorithm::SHA1);
} else
throw BadURL("GitHub URL '%s' is invalid", url.url);
@ -41,7 +42,6 @@ struct IndirectInputScheme : InputScheme
// FIXME: forbid query params?
Input input;
input.direct = false;
input.attrs.insert_or_assign("type", "indirect");
input.attrs.insert_or_assign("id", id);
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
@ -50,20 +50,28 @@ struct IndirectInputScheme : InputScheme
return input;
}
std::string_view schemeName() const override
{
return "indirect";
}
StringSet allowedAttrs() const override
{
return {
"id",
"ref",
"rev",
"narHash",
};
}
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
if (maybeGetStrAttr(attrs, "type") != "indirect") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "id" && name != "ref" && name != "rev" && name != "narHash")
throw Error("unsupported indirect input attribute '%s'", name);
auto id = getStrAttr(attrs, "id");
if (!std::regex_match(id, flakeRegex))
throw BadURL("'%s' is not a valid flake ID", id);
Input input;
input.direct = false;
input.attrs = attrs;
return input;
}
@ -78,11 +86,6 @@ struct IndirectInputScheme : InputScheme
return url;
}
bool hasAllInfo(const Input & input) const override
{
return false;
}
Input applyOverrides(
const Input & _input,
std::optional<std::string> ref,
@ -98,6 +101,14 @@ struct IndirectInputScheme : InputScheme
{
throw Error("indirect input '%s' cannot be fetched directly", input.to_string());
}
std::optional<ExperimentalFeature> experimentalFeature() const override
{
return Xp::Flakes;
}
bool isDirect(const Input & input) const override
{ return false; }
};
static auto rIndirectInputScheme = OnStartup([] { registerInputScheme(std::make_unique<IndirectInputScheme>()); });

View file

@ -1,106 +0,0 @@
#include "input-accessor.hh"
#include "store-api.hh"
namespace nix {
std::ostream & operator << (std::ostream & str, const SourcePath & path)
{
str << path.to_string();
return str;
}
std::string_view SourcePath::baseName() const
{
return path.baseName().value_or("source");
}
SourcePath SourcePath::parent() const
{
auto p = path.parent();
assert(p);
return std::move(*p);
}
InputAccessor::Stat SourcePath::lstat() const
{
auto st = nix::lstat(path.abs());
return InputAccessor::Stat {
.type =
S_ISREG(st.st_mode) ? InputAccessor::tRegular :
S_ISDIR(st.st_mode) ? InputAccessor::tDirectory :
S_ISLNK(st.st_mode) ? InputAccessor::tSymlink :
InputAccessor::tMisc,
.isExecutable = S_ISREG(st.st_mode) && st.st_mode & S_IXUSR
};
}
std::optional<InputAccessor::Stat> SourcePath::maybeLstat() const
{
// FIXME: merge these into one operation.
if (!pathExists())
return {};
return lstat();
}
InputAccessor::DirEntries SourcePath::readDirectory() const
{
InputAccessor::DirEntries res;
for (auto & entry : nix::readDirectory(path.abs())) {
std::optional<InputAccessor::Type> type;
switch (entry.type) {
case DT_REG: type = InputAccessor::Type::tRegular; break;
case DT_LNK: type = InputAccessor::Type::tSymlink; break;
case DT_DIR: type = InputAccessor::Type::tDirectory; break;
}
res.emplace(entry.name, type);
}
return res;
}
StorePath SourcePath::fetchToStore(
ref<Store> store,
std::string_view name,
PathFilter * filter,
RepairFlag repair) const
{
return
settings.readOnlyMode
? store->computeStorePathForPath(name, path.abs(), FileIngestionMethod::Recursive, htSHA256, filter ? *filter : defaultPathFilter).first
: store->addToStore(name, path.abs(), FileIngestionMethod::Recursive, htSHA256, filter ? *filter : defaultPathFilter, repair);
}
SourcePath SourcePath::resolveSymlinks() const
{
SourcePath res(CanonPath::root);
int linksAllowed = 1024;
std::list<std::string> todo;
for (auto & c : path)
todo.push_back(std::string(c));
while (!todo.empty()) {
auto c = *todo.begin();
todo.pop_front();
if (c == "" || c == ".")
;
else if (c == "..")
res.path.pop();
else {
res.path.push(c);
if (auto st = res.maybeLstat(); st && st->type == InputAccessor::tSymlink) {
if (!linksAllowed--)
throw Error("infinite symlink recursion in path '%s'", path);
auto target = res.readLink();
res.path.pop();
if (hasPrefix(target, "/"))
res.path = CanonPath::root;
todo.splice(todo.begin(), tokenizeString<std::list<std::string>>(target, "/"));
}
}
}
return res;
}
}

View file

@ -8,6 +8,6 @@ libfetchers_SOURCES := $(wildcard $(d)/*.cc)
libfetchers_CXXFLAGS += -I src/libutil -I src/libstore
libfetchers_LDFLAGS += -pthread
libfetchers_LDFLAGS += $(THREAD_LDFLAGS) $(LIBGIT2_LIBS) -larchive
libfetchers_LIBS = libutil libstore

View file

@ -0,0 +1,23 @@
#include "memory-input-accessor.hh"
#include "memory-source-accessor.hh"
#include "source-path.hh"
namespace nix {
struct MemoryInputAccessorImpl : MemoryInputAccessor, MemorySourceAccessor
{
SourcePath addFile(CanonPath path, std::string && contents) override
{
return {
ref(shared_from_this()),
MemorySourceAccessor::addFile(path, std::move(contents))
};
}
};
ref<MemoryInputAccessor> makeMemoryInputAccessor()
{
return make_ref<MemoryInputAccessorImpl>();
}
}

View file

@ -0,0 +1,16 @@
#include "input-accessor.hh"
#include "source-path.hh"
namespace nix {
/**
* An input accessor for an in-memory file system.
*/
struct MemoryInputAccessor : InputAccessor
{
virtual SourcePath addFile(CanonPath path, std::string && contents) = 0;
};
ref<MemoryInputAccessor> makeMemoryInputAccessor();
}

View file

@ -1,9 +1,12 @@
#include "fetchers.hh"
#include "processes.hh"
#include "users.hh"
#include "cache.hh"
#include "globals.hh"
#include "tarfile.hh"
#include "store-api.hh"
#include "url-parts.hh"
#include "posix-source-accessor.hh"
#include "fetch-settings.hh"
@ -43,7 +46,7 @@ static std::string runHg(const Strings & args, const std::optional<std::string>
struct MercurialInputScheme : InputScheme
{
std::optional<Input> inputFromURL(const ParsedURL & url) const override
std::optional<Input> inputFromURL(const ParsedURL & url, bool requireTree) const override
{
if (url.scheme != "hg+http" &&
url.scheme != "hg+https" &&
@ -69,14 +72,25 @@ struct MercurialInputScheme : InputScheme
return inputFromAttrs(attrs);
}
std::string_view schemeName() const override
{
return "hg";
}
StringSet allowedAttrs() const override
{
return {
"url",
"ref",
"rev",
"revCount",
"narHash",
"name",
};
}
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
if (maybeGetStrAttr(attrs, "type") != "hg") return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "url" && name != "ref" && name != "rev" && name != "revCount" && name != "narHash" && name != "name")
throw Error("unsupported Mercurial input attribute '%s'", name);
parseURL(getStrAttr(attrs, "url"));
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
@ -98,13 +112,6 @@ struct MercurialInputScheme : InputScheme
return url;
}
bool hasAllInfo(const Input & input) const override
{
// FIXME: ugly, need to distinguish between dirty and clean
// default trees.
return input.getRef() == "default" || maybeGetIntAttr(input.attrs, "revCount");
}
Input applyOverrides(
const Input & input,
std::optional<std::string> ref,
@ -116,7 +123,7 @@ struct MercurialInputScheme : InputScheme
return res;
}
std::optional<Path> getSourcePath(const Input & input) override
std::optional<Path> getSourcePath(const Input & input) const override
{
auto url = parseURL(getStrAttr(input.attrs, "url"));
if (url.scheme == "file" && !input.getRef() && !input.getRev())
@ -124,18 +131,27 @@ struct MercurialInputScheme : InputScheme
return {};
}
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
void putFile(
const Input & input,
const CanonPath & path,
std::string_view contents,
std::optional<std::string> commitMsg) const override
{
auto sourcePath = getSourcePath(input);
assert(sourcePath);
auto [isLocal, repoPath] = getActualUrl(input);
if (!isLocal)
throw Error("cannot commit '%s' to Mercurial repository '%s' because it's not a working tree", path, input.to_string());
auto absPath = CanonPath(repoPath) + path;
writeFile(absPath.abs(), contents);
// FIXME: shut up if file is already tracked.
runHg(
{ "add", *sourcePath + "/" + std::string(file) });
{ "add", absPath.abs() });
if (commitMsg)
runHg(
{ "commit", *sourcePath + "/" + std::string(file), "-m", *commitMsg });
{ "commit", absPath.abs(), "-m", *commitMsg });
}
std::pair<bool, std::string> getActualUrl(const Input & input) const
@ -195,7 +211,12 @@ struct MercurialInputScheme : InputScheme
return files.count(file);
};
auto storePath = store->addToStore(input.getName(), actualPath, FileIngestionMethod::Recursive, htSHA256, filter);
PosixSourceAccessor accessor;
auto storePath = store->addToStore(
input.getName(),
accessor, CanonPath { actualPath },
FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {},
filter);
return {std::move(storePath), input};
}
@ -203,16 +224,16 @@ struct MercurialInputScheme : InputScheme
if (!input.getRef()) input.attrs.insert_or_assign("ref", "default");
auto checkHashType = [&](const std::optional<Hash> & hash)
auto checkHashAlgorithm = [&](const std::optional<Hash> & hash)
{
if (hash.has_value() && hash->type != htSHA1)
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(Base16, true));
if (hash.has_value() && hash->algo != HashAlgorithm::SHA1)
throw Error("Hash '%s' is not supported by Mercurial. Only sha1 is supported.", hash->to_string(HashFormat::Base16, true));
};
auto getLockedAttrs = [&]()
{
checkHashType(input.getRev());
checkHashAlgorithm(input.getRev());
return Attrs({
{"type", "hg"},
@ -231,7 +252,7 @@ struct MercurialInputScheme : InputScheme
};
if (input.getRev()) {
if (auto res = getCache()->lookup(store, getLockedAttrs()))
if (auto res = getCache()->lookup(*store, getLockedAttrs()))
return makeResult(res->first, std::move(res->second));
}
@ -244,15 +265,15 @@ struct MercurialInputScheme : InputScheme
{"ref", *input.getRef()},
});
if (auto res = getCache()->lookup(store, unlockedAttrs)) {
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), htSHA1);
if (auto res = getCache()->lookup(*store, unlockedAttrs)) {
auto rev2 = Hash::parseAny(getStrAttr(res->first, "rev"), HashAlgorithm::SHA1);
if (!input.getRev() || input.getRev() == rev2) {
input.attrs.insert_or_assign("rev", rev2.gitRev());
return makeResult(res->first, std::move(res->second));
}
}
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, actualUrl).to_string(Base32, false));
Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(HashAlgorithm::SHA256, actualUrl).to_string(HashFormat::Nix32, false));
/* If this is a commit hash that we already have, we don't
have to pull again. */
@ -286,11 +307,11 @@ struct MercurialInputScheme : InputScheme
runHg({ "log", "-R", cacheDir, "-r", revOrRef, "--template", "{node} {rev} {branch}" }));
assert(tokens.size() == 3);
input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], htSHA1).gitRev());
input.attrs.insert_or_assign("rev", Hash::parseAny(tokens[0], HashAlgorithm::SHA1).gitRev());
auto revCount = std::stoull(tokens[1]);
input.attrs.insert_or_assign("ref", tokens[2]);
if (auto res = getCache()->lookup(store, getLockedAttrs()))
if (auto res = getCache()->lookup(*store, getLockedAttrs()))
return makeResult(res->first, std::move(res->second));
Path tmpDir = createTempDir();
@ -300,7 +321,8 @@ struct MercurialInputScheme : InputScheme
deletePath(tmpDir + "/.hg_archival.txt");
auto storePath = store->addToStore(name, tmpDir);
PosixSourceAccessor accessor;
auto storePath = store->addToStore(name, accessor, CanonPath { tmpDir });
Attrs infoAttrs({
{"rev", input.getRev()->gitRev()},
@ -309,14 +331,14 @@ struct MercurialInputScheme : InputScheme
if (!_input.getRev())
getCache()->add(
store,
*store,
unlockedAttrs,
infoAttrs,
storePath,
false);
getCache()->add(
store,
*store,
getLockedAttrs(),
infoAttrs,
storePath,
@ -324,6 +346,14 @@ struct MercurialInputScheme : InputScheme
return makeResult(infoAttrs, std::move(storePath));
}
std::optional<std::string> getFingerprint(ref<Store> store, const Input & input) const override
{
if (auto rev = input.getRev())
return rev->gitRev();
else
return std::nullopt;
}
};
static auto rMercurialInputScheme = OnStartup([] { registerInputScheme(std::make_unique<MercurialInputScheme>()); });

View file

@ -0,0 +1,77 @@
#include "mounted-input-accessor.hh"
namespace nix {
struct MountedInputAccessor : InputAccessor
{
std::map<CanonPath, ref<InputAccessor>> mounts;
MountedInputAccessor(std::map<CanonPath, ref<InputAccessor>> _mounts)
: mounts(std::move(_mounts))
{
// Currently we require a root filesystem. This could be relaxed.
assert(mounts.contains(CanonPath::root));
// FIXME: return dummy parent directories automatically?
}
std::string readFile(const CanonPath & path) override
{
auto [accessor, subpath] = resolve(path);
return accessor->readFile(subpath);
}
bool pathExists(const CanonPath & path) override
{
auto [accessor, subpath] = resolve(path);
return accessor->pathExists(subpath);
}
std::optional<Stat> maybeLstat(const CanonPath & path) override
{
auto [accessor, subpath] = resolve(path);
return accessor->maybeLstat(subpath);
}
DirEntries readDirectory(const CanonPath & path) override
{
auto [accessor, subpath] = resolve(path);
return accessor->readDirectory(subpath);
}
std::string readLink(const CanonPath & path) override
{
auto [accessor, subpath] = resolve(path);
return accessor->readLink(subpath);
}
std::string showPath(const CanonPath & path) override
{
auto [accessor, subpath] = resolve(path);
return accessor->showPath(subpath);
}
std::pair<ref<InputAccessor>, CanonPath> resolve(CanonPath path)
{
// Find the nearest parent of `path` that is a mount point.
std::vector<std::string> subpath;
while (true) {
auto i = mounts.find(path);
if (i != mounts.end()) {
std::reverse(subpath.begin(), subpath.end());
return {i->second, CanonPath(subpath)};
}
assert(!path.isRoot());
subpath.push_back(std::string(*path.baseName()));
path.pop();
}
}
};
ref<InputAccessor> makeMountedInputAccessor(std::map<CanonPath, ref<InputAccessor>> mounts)
{
return make_ref<MountedInputAccessor>(std::move(mounts));
}
}

View file

@ -0,0 +1,9 @@
#pragma once
#include "input-accessor.hh"
namespace nix {
ref<InputAccessor> makeMountedInputAccessor(std::map<CanonPath, ref<InputAccessor>> mounts);
}

View file

@ -6,7 +6,7 @@ namespace nix::fetchers {
struct PathInputScheme : InputScheme
{
std::optional<Input> inputFromURL(const ParsedURL & url) const override
std::optional<Input> inputFromURL(const ParsedURL & url, bool requireTree) const override
{
if (url.scheme != "path") return {};
@ -32,23 +32,30 @@ struct PathInputScheme : InputScheme
return input;
}
std::string_view schemeName() const override
{
return "path";
}
StringSet allowedAttrs() const override
{
return {
"path",
/* Allow the user to pass in "fake" tree info
attributes. This is useful for making a pinned tree work
the same as the repository from which is exported (e.g.
path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...).
*/
"rev",
"revCount",
"lastModified",
"narHash",
};
}
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
if (maybeGetStrAttr(attrs, "type") != "path") return {};
getStrAttr(attrs, "path");
for (auto & [name, value] : attrs)
/* Allow the user to pass in "fake" tree info
attributes. This is useful for making a pinned tree
work the same as the repository from which is exported
(e.g. path:/nix/store/...-source?lastModified=1585388205&rev=b0c285...). */
if (name == "type" || name == "rev" || name == "revCount" || name == "lastModified" || name == "narHash" || name == "path")
// checked in Input::fromAttrs
;
else
throw Error("unsupported path input attribute '%s'", name);
Input input;
input.attrs = attrs;
return input;
@ -66,19 +73,28 @@ struct PathInputScheme : InputScheme
};
}
bool hasAllInfo(const Input & input) const override
{
return true;
}
std::optional<Path> getSourcePath(const Input & input) override
std::optional<Path> getSourcePath(const Input & input) const override
{
return getStrAttr(input.attrs, "path");
}
void markChangedFile(const Input & input, std::string_view file, std::optional<std::string> commitMsg) override
void putFile(
const Input & input,
const CanonPath & path,
std::string_view contents,
std::optional<std::string> commitMsg) const override
{
// nothing to do
writeFile((CanonPath(getAbsPath(input)) + path).abs(), contents);
}
CanonPath getAbsPath(const Input & input) const
{
auto path = getStrAttr(input.attrs, "path");
if (path[0] == '/')
return CanonPath(path);
throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string());
}
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
@ -125,6 +141,11 @@ struct PathInputScheme : InputScheme
return {std::move(*storePath), input};
}
std::optional<ExperimentalFeature> experimentalFeature() const override
{
return Xp::Flakes;
}
};
static auto rPathInputScheme = OnStartup([] { registerInputScheme(std::make_unique<PathInputScheme>()); });

View file

@ -1,6 +1,6 @@
#include "registry.hh"
#include "fetchers.hh"
#include "util.hh"
#include "tarball.hh"
#include "users.hh"
#include "globals.hh"
#include "store-api.hh"
#include "local-fs-store.hh"

View file

@ -1,3 +1,4 @@
#include "tarball.hh"
#include "fetchers.hh"
#include "cache.hh"
#include "filetransfer.hh"
@ -7,6 +8,7 @@
#include "tarfile.hh"
#include "types.hh"
#include "split.hh"
#include "posix-source-accessor.hh"
namespace nix::fetchers {
@ -25,7 +27,7 @@ DownloadFileResult downloadFile(
{"name", name},
});
auto cached = getCache()->lookupExpired(store, inAttrs);
auto cached = getCache()->lookupExpired(*store, inAttrs);
auto useCached = [&]() -> DownloadFileResult
{
@ -72,18 +74,16 @@ DownloadFileResult downloadFile(
} else {
StringSink sink;
dumpString(res.data, sink);
auto hash = hashString(htSHA256, res.data);
auto hash = hashString(HashAlgorithm::SHA256, res.data);
ValidPathInfo info {
*store,
name,
FixedOutputInfo {
.hash = {
.method = FileIngestionMethod::Flat,
.hash = hash,
},
.method = FileIngestionMethod::Flat,
.hash = hash,
.references = {},
},
hashString(htSHA256, sink.s),
hashString(HashAlgorithm::SHA256, sink.s),
};
info.narSize = sink.s.size();
auto source = StringSource { sink.s };
@ -92,7 +92,7 @@ DownloadFileResult downloadFile(
}
getCache()->add(
store,
*store,
inAttrs,
infoAttrs,
*storePath,
@ -100,7 +100,7 @@ DownloadFileResult downloadFile(
if (url != res.effectiveUri)
getCache()->add(
store,
*store,
{
{"type", "file"},
{"url", res.effectiveUri},
@ -131,11 +131,11 @@ DownloadTarballResult downloadTarball(
{"name", name},
});
auto cached = getCache()->lookupExpired(store, inAttrs);
auto cached = getCache()->lookupExpired(*store, inAttrs);
if (cached && !cached->expired)
return {
.tree = Tree { .actualPath = store->toRealPath(cached->storePath), .storePath = std::move(cached->storePath) },
.storePath = std::move(cached->storePath),
.lastModified = (time_t) getIntAttr(cached->infoAttrs, "lastModified"),
.immutableUrl = maybeGetStrAttr(cached->infoAttrs, "immutableUrl"),
};
@ -157,7 +157,8 @@ DownloadTarballResult downloadTarball(
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
auto topDir = tmpDir + "/" + members.begin()->name;
lastModified = lstat(topDir).st_mtime;
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair);
PosixSourceAccessor accessor;
unpackedStorePath = store->addToStore(name, accessor, CanonPath { topDir }, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, defaultPathFilter, NoRepair);
}
Attrs infoAttrs({
@ -169,14 +170,14 @@ DownloadTarballResult downloadTarball(
infoAttrs.emplace("immutableUrl", *res.immutableUrl);
getCache()->add(
store,
*store,
inAttrs,
infoAttrs,
*unpackedStorePath,
locked);
return {
.tree = Tree { .actualPath = store->toRealPath(*unpackedStorePath), .storePath = std::move(*unpackedStorePath) },
.storePath = std::move(*unpackedStorePath),
.lastModified = lastModified,
.immutableUrl = res.immutableUrl,
};
@ -185,7 +186,6 @@ DownloadTarballResult downloadTarball(
// An input scheme corresponding to a curl-downloadable resource.
struct CurlInputScheme : InputScheme
{
virtual const std::string inputType() const = 0;
const std::set<std::string> transportUrlSchemes = {"file", "http", "https"};
const bool hasTarballExtension(std::string_view path) const
@ -196,11 +196,11 @@ struct CurlInputScheme : InputScheme
|| hasSuffix(path, ".tar.zst");
}
virtual bool isValidURL(const ParsedURL & url) const = 0;
virtual bool isValidURL(const ParsedURL & url, bool requireTree) const = 0;
std::optional<Input> inputFromURL(const ParsedURL & _url) const override
std::optional<Input> inputFromURL(const ParsedURL & _url, bool requireTree) const override
{
if (!isValidURL(_url))
if (!isValidURL(_url, requireTree))
return std::nullopt;
Input input;
@ -223,22 +223,27 @@ struct CurlInputScheme : InputScheme
url.query.erase("rev");
url.query.erase("revCount");
input.attrs.insert_or_assign("type", inputType());
input.attrs.insert_or_assign("type", std::string { schemeName() });
input.attrs.insert_or_assign("url", url.to_string());
return input;
}
StringSet allowedAttrs() const override
{
return {
"type",
"url",
"narHash",
"name",
"unpack",
"rev",
"revCount",
"lastModified",
};
}
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
auto type = maybeGetStrAttr(attrs, "type");
if (type != inputType()) return {};
// FIXME: some of these only apply to TarballInputScheme.
std::set<std::string> allowedNames = {"type", "url", "narHash", "name", "unpack", "rev", "revCount"};
for (auto & [name, value] : attrs)
if (!allowedNames.count(name))
throw Error("unsupported %s input attribute '%s'", *type, name);
Input input;
input.attrs = attrs;
@ -252,28 +257,22 @@ struct CurlInputScheme : InputScheme
// NAR hashes are preferred over file hashes since tar/zip
// files don't have a canonical representation.
if (auto narHash = input.getNarHash())
url.query.insert_or_assign("narHash", narHash->to_string(SRI, true));
url.query.insert_or_assign("narHash", narHash->to_string(HashFormat::SRI, true));
return url;
}
bool hasAllInfo(const Input & input) const override
{
return true;
}
};
struct FileInputScheme : CurlInputScheme
{
const std::string inputType() const override { return "file"; }
std::string_view schemeName() const override { return "file"; }
bool isValidURL(const ParsedURL & url) const override
bool isValidURL(const ParsedURL & url, bool requireTree) const override
{
auto parsedUrlScheme = parseUrlScheme(url.scheme);
return transportUrlSchemes.count(std::string(parsedUrlScheme.transport))
&& (parsedUrlScheme.application
? parsedUrlScheme.application.value() == inputType()
: !hasTarballExtension(url.path));
? parsedUrlScheme.application.value() == schemeName()
: (!requireTree && !hasTarballExtension(url.path)));
}
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override
@ -285,16 +284,16 @@ struct FileInputScheme : CurlInputScheme
struct TarballInputScheme : CurlInputScheme
{
const std::string inputType() const override { return "tarball"; }
std::string_view schemeName() const override { return "tarball"; }
bool isValidURL(const ParsedURL & url) const override
bool isValidURL(const ParsedURL & url, bool requireTree) const override
{
auto parsedUrlScheme = parseUrlScheme(url.scheme);
return transportUrlSchemes.count(std::string(parsedUrlScheme.transport))
&& (parsedUrlScheme.application
? parsedUrlScheme.application.value() == inputType()
: hasTarballExtension(url.path));
? parsedUrlScheme.application.value() == schemeName()
: (requireTree || hasTarballExtension(url.path)));
}
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
@ -312,7 +311,10 @@ struct TarballInputScheme : CurlInputScheme
input = immutableInput;
}
return {result.tree.storePath, std::move(input)};
if (result.lastModified && !input.attrs.contains("lastModified"))
input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified));
return {result.storePath, std::move(input)};
}
};

View file

@ -0,0 +1,43 @@
#pragma once
#include "types.hh"
#include "path.hh"
#include <optional>
namespace nix {
class Store;
}
namespace nix::fetchers {
struct DownloadFileResult
{
StorePath storePath;
std::string etag;
std::string effectiveUrl;
std::optional<std::string> immutableUrl;
};
DownloadFileResult downloadFile(
ref<Store> store,
const std::string & url,
const std::string & name,
bool locked,
const Headers & headers = {});
struct DownloadTarballResult
{
StorePath storePath;
time_t lastModified;
std::optional<std::string> immutableUrl;
};
DownloadTarballResult downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,
bool locked,
const Headers & headers = {});
}

View file

@ -1,6 +1,9 @@
#include "common-args.hh"
#include "args/root.hh"
#include "globals.hh"
#include "logging.hh"
#include "loggers.hh"
#include "util.hh"
namespace nix {
@ -34,21 +37,21 @@ MixCommonArgs::MixCommonArgs(const std::string & programName)
.description = "Set the Nix configuration setting *name* to *value* (overriding `nix.conf`).",
.category = miscCategory,
.labels = {"name", "value"},
.handler = {[](std::string name, std::string value) {
.handler = {[this](std::string name, std::string value) {
try {
globalConfig.set(name, value);
} catch (UsageError & e) {
if (!completions)
if (!getRoot().completions)
warn(e.what());
}
}},
.completer = [](size_t index, std::string_view prefix) {
.completer = [](AddCompletions & completions, size_t index, std::string_view prefix) {
if (index == 0) {
std::map<std::string, Config::SettingInfo> settings;
globalConfig.getSettings(settings);
for (auto & s : settings)
if (hasPrefix(s.first, prefix))
completions->add(s.first, fmt("Set the `%s` setting.", s.first));
completions.add(s.first, fmt("Set the `%s` setting.", s.first));
}
}
});

View file

@ -14,4 +14,4 @@ libmain_LIBS = libstore libutil
libmain_ALLOW_UNDEFINED = 1
$(eval $(call install-file-in, $(d)/nix-main.pc, $(libdir)/pkgconfig, 0644))
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-main.pc, $(libdir)/pkgconfig, 0644))

View file

@ -1,6 +1,6 @@
#include "loggers.hh"
#include "environment-variables.hh"
#include "progress-bar.hh"
#include "util.hh"
namespace nix {

View file

@ -1,5 +1,5 @@
#include "progress-bar.hh"
#include "util.hh"
#include "terminal.hh"
#include "sync.hh"
#include "store-api.hh"
#include "names.hh"
@ -108,7 +108,8 @@ public:
stop();
}
void stop() override
/* Called by destructor, can't be overridden */
void stop() override final
{
{
auto state(state_.lock());
@ -339,6 +340,14 @@ public:
state->activitiesByType[type].expected += j;
update(*state);
}
else if (type == resFetchStatus) {
auto i = state->its.find(act);
assert(i != state->its.end());
ActInfo & actInfo = *i->second;
actInfo.lastLine = getS(fields, 0);
update(*state);
}
}
void update(State & state)

View file

@ -1,10 +1,11 @@
#include "globals.hh"
#include "current-process.hh"
#include "shared.hh"
#include "store-api.hh"
#include "gc-store.hh"
#include "util.hh"
#include "loggers.hh"
#include "progress-bar.hh"
#include "signals.hh"
#include <algorithm>
#include <cctype>
@ -379,9 +380,9 @@ RunPager::RunPager()
});
pid.setKillSignal(SIGINT);
stdout = fcntl(STDOUT_FILENO, F_DUPFD_CLOEXEC, 0);
std_out = fcntl(STDOUT_FILENO, F_DUPFD_CLOEXEC, 0);
if (dup2(toPager.writeSide.get(), STDOUT_FILENO) == -1)
throw SysError("dupping stdout");
throw SysError("dupping standard output");
}
@ -390,7 +391,7 @@ RunPager::~RunPager()
try {
if (pid != -1) {
std::cout.flush();
dup2(stdout, STDOUT_FILENO);
dup2(std_out, STDOUT_FILENO);
pid.wait();
}
} catch (...) {

Some files were not shown because too many files have changed in this diff Show more