1
0
Fork 0
mirror of https://github.com/NixOS/nix synced 2025-07-07 01:51:47 +02:00

Merge remote-tracking branch 'upstream/master' into fix-and-ci-static-builds

This commit is contained in:
John Ericson 2020-10-09 18:26:47 +00:00
commit 39de73550d
193 changed files with 3689 additions and 2107 deletions

View file

@ -44,7 +44,7 @@ static bool allSupportedLocally(Store & store, const std::set<std::string>& requ
return true;
}
static int _main(int argc, char * * argv)
static int main_build_remote(int argc, char * * argv)
{
{
logger = makeJSONLogger(*logger);
@ -297,4 +297,4 @@ connected:
}
}
static RegisterLegacyCommand s1("build-remote", _main);
static RegisterLegacyCommand r_build_remote("build-remote", main_build_remote);

View file

@ -11,7 +11,7 @@ namespace nix::eval_cache {
MakeError(CachedEvalError, EvalError);
class AttrDb;
struct AttrDb;
class AttrCursor;
class EvalCache : public std::enable_shared_from_this<EvalCache>

View file

@ -87,6 +87,7 @@ static void printValue(std::ostream & str, std::set<const Value *> & active, con
else if (*i == '\n') str << "\\n";
else if (*i == '\r') str << "\\r";
else if (*i == '\t') str << "\\t";
else if (*i == '$' && *(i+1) == '{') str << "\\" << *i;
else str << *i;
str << "\"";
break;
@ -355,6 +356,7 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
, sEpsilon(symbols.create(""))
, repair(NoRepair)
, store(store)
, regexCache(makeRegexCache())
, baseEnv(allocEnv(128))
, staticBaseEnv(false, 0)
{
@ -369,7 +371,11 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
for (auto & i : _searchPath) addToSearchPath(i);
for (auto & i : evalSettings.nixPath.get()) addToSearchPath(i);
}
addToSearchPath("nix=" + canonPath(settings.nixDataDir + "/nix/corepkgs", true));
try {
addToSearchPath("nix=" + canonPath(settings.nixDataDir + "/nix/corepkgs", true));
} catch (Error &) {
}
if (evalSettings.restrictEval || evalSettings.pureEval) {
allowedPaths = PathSet();
@ -1348,7 +1354,7 @@ void EvalState::autoCallFunction(Bindings & args, Value & fun, Value & res)
}
Value * actualArgs = allocValue();
mkAttrs(*actualArgs, fun.lambda.fun->formals->formals.size());
mkAttrs(*actualArgs, std::max(static_cast<uint32_t>(fun.lambda.fun->formals->formals.size()), args.size()));
if (fun.lambda.fun->formals->ellipsis) {
// If the formals have an ellipsis (eg the function accepts extra args) pass
@ -2075,7 +2081,7 @@ Strings EvalSettings::getDefaultNixPath()
EvalSettings evalSettings;
static GlobalConfig::Register r1(&evalSettings);
static GlobalConfig::Register rEvalSettings(&evalSettings);
}

View file

@ -6,7 +6,6 @@
#include "symbol-table.hh"
#include "config.hh"
#include <regex>
#include <map>
#include <optional>
#include <unordered_map>
@ -65,6 +64,11 @@ typedef std::list<SearchPathElem> SearchPath;
void initGC();
struct RegexCache;
std::shared_ptr<RegexCache> makeRegexCache();
class EvalState
{
public:
@ -120,7 +124,7 @@ private:
std::unordered_map<Path, Path> resolvedPaths;
/* Cache used by prim_match(). */
std::unordered_map<std::string, std::regex> regexCache;
std::shared_ptr<RegexCache> regexCache;
public:

View file

@ -12,7 +12,7 @@ using namespace flake;
namespace flake {
typedef std::pair<Tree, FlakeRef> FetchedFlake;
typedef std::pair<fetchers::Tree, FlakeRef> FetchedFlake;
typedef std::vector<std::pair<FlakeRef, FetchedFlake>> FlakeCache;
static std::optional<FetchedFlake> lookupInFlakeCache(
@ -48,17 +48,17 @@ static std::tuple<fetchers::Tree, FlakeRef, FlakeRef> fetchOrSubstituteTree(
resolvedRef = originalRef.resolve(state.store);
auto fetchedResolved = lookupInFlakeCache(flakeCache, originalRef);
if (!fetchedResolved) fetchedResolved.emplace(resolvedRef.fetchTree(state.store));
flakeCache.push_back({resolvedRef, fetchedResolved.value()});
fetched.emplace(fetchedResolved.value());
flakeCache.push_back({resolvedRef, *fetchedResolved});
fetched.emplace(*fetchedResolved);
}
else {
throw Error("'%s' is an indirect flake reference, but registry lookups are not allowed", originalRef);
}
}
flakeCache.push_back({originalRef, fetched.value()});
flakeCache.push_back({originalRef, *fetched});
}
auto [tree, lockedRef] = fetched.value();
auto [tree, lockedRef] = *fetched;
debug("got tree '%s' from '%s'",
state.store->printStorePath(tree.storePath), lockedRef);
@ -215,10 +215,9 @@ static Flake getFlake(
if (auto outputs = vInfo.attrs->get(sOutputs)) {
expectType(state, tLambda, *outputs->value, *outputs->pos);
flake.vOutputs = allocRootValue(outputs->value);
if ((*flake.vOutputs)->lambda.fun->matchAttrs) {
for (auto & formal : (*flake.vOutputs)->lambda.fun->formals->formals) {
if (outputs->value->lambda.fun->matchAttrs) {
for (auto & formal : outputs->value->lambda.fun->formals->formals) {
if (formal.name != state.sSelf)
flake.inputs.emplace(formal.name, FlakeInput {
.ref = parseFlakeRef(formal.name)
@ -248,7 +247,7 @@ Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool allowLookup
}
/* Compute an in-memory lock file for the specified top-level flake,
and optionally write it to file, it the flake is writable. */
and optionally write it to file, if the flake is writable. */
LockedFlake lockFlake(
EvalState & state,
const FlakeRef & topRef,
@ -367,7 +366,7 @@ LockedFlake lockFlake(
/* If we have an --update-input flag for an input
of this input, then we must fetch the flake to
to update it. */
update it. */
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
auto hasChildUpdate =

View file

@ -34,7 +34,6 @@ struct Flake
std::optional<std::string> description;
std::shared_ptr<const fetchers::Tree> sourceInfo;
FlakeInputs inputs;
RootValue vOutputs;
~Flake();
};

View file

@ -1,6 +1,7 @@
#include "flakeref.hh"
#include "store-api.hh"
#include "url.hh"
#include "url-parts.hh"
#include "fetchers.hh"
#include "registry.hh"
@ -15,10 +16,10 @@ const static std::string subDirRegex = subDirElemRegex + "(?:/" + subDirElemRege
std::string FlakeRef::to_string() const
{
auto url = input.toURL();
std::map<std::string, std::string> extraQuery;
if (subdir != "")
url.query.insert_or_assign("dir", subdir);
return url.to_string();
extraQuery.insert_or_assign("dir", subdir);
return input.toURLString(extraQuery);
}
fetchers::Attrs FlakeRef::toAttrs() const
@ -156,7 +157,8 @@ std::pair<FlakeRef, std::string> parseFlakeRefWithFragment(
} else {
if (!hasPrefix(path, "/"))
throw BadURL("flake reference '%s' is not an absolute path", url);
path = canonPath(path);
auto query = decodeQuery(match[2]);
path = canonPath(path + "/" + get(query, "dir").value_or(""));
}
fetchers::Attrs attrs;

View file

@ -1,5 +1,6 @@
#include "lockfile.hh"
#include "store-api.hh"
#include "url-parts.hh"
#include <nlohmann/json.hpp>
@ -12,12 +13,12 @@ FlakeRef getFlakeRef(
{
auto i = json.find(attr);
if (i != json.end()) {
auto attrs = jsonToAttrs(*i);
auto attrs = fetchers::jsonToAttrs(*i);
// FIXME: remove when we drop support for version 5.
if (info) {
auto j = json.find(info);
if (j != json.end()) {
for (auto k : jsonToAttrs(*j))
for (auto k : fetchers::jsonToAttrs(*j))
attrs.insert_or_assign(k.first, k.second);
}
}

View file

@ -6,13 +6,11 @@
namespace nix {
class Store;
struct StorePath;
class StorePath;
}
namespace nix::flake {
using namespace fetchers;
typedef std::vector<FlakeId> InputPath;
struct LockedNode;

View file

@ -38,8 +38,11 @@ DrvInfo::DrvInfo(EvalState & state, ref<Store> store, const std::string & drvPat
auto i = drv.outputs.find(outputName);
if (i == drv.outputs.end())
throw Error("derivation '%s' does not have output '%s'", store->printStorePath(drvPath), outputName);
auto & [outputName, output] = *i;
outPath = store->printStorePath(i->second.path(*store, drv.name));
auto optStorePath = output.path(*store, drv.name, outputName);
if (optStorePath)
outPath = store->printStorePath(*optStorePath);
}
@ -77,12 +80,15 @@ string DrvInfo::queryDrvPath() const
string DrvInfo::queryOutPath() const
{
if (outPath == "" && attrs) {
if (!outPath && attrs) {
Bindings::iterator i = attrs->find(state->sOutPath);
PathSet context;
outPath = i != attrs->end() ? state->coerceToPath(*i->pos, *i->value, context) : "";
if (i != attrs->end())
outPath = state->coerceToPath(*i->pos, *i->value, context);
}
return outPath;
if (!outPath)
throw UnimplementedError("CA derivations are not yet supported");
return *outPath;
}

View file

@ -20,7 +20,7 @@ private:
mutable string name;
mutable string system;
mutable string drvPath;
mutable string outPath;
mutable std::optional<string> outPath;
mutable string outputName;
Outputs outputs;

View file

@ -115,6 +115,14 @@ public:
{
return handle_value<void(Value&, const char*)>(mkString, val.c_str());
}
#if NLOHMANN_JSON_VERSION_MAJOR >= 3 && NLOHMANN_JSON_VERSION_MINOR >= 8
bool binary(binary_t&)
{
// This function ought to be unreachable
assert(false);
return true;
}
#endif
bool start_object(std::size_t len)
{

View file

@ -42,6 +42,6 @@ $(eval $(call install-file-in, $(d)/nix-expr.pc, $(prefix)/lib/pkgconfig, 0644))
$(foreach i, $(wildcard src/libexpr/flake/*.hh), \
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh
$(d)/primops.cc: $(d)/imported-drv-to-derivation.nix.gen.hh $(d)/primops/derivation.nix.gen.hh
$(d)/flake/flake.cc: $(d)/flake/call-flake.nix.gen.hh

View file

@ -614,8 +614,7 @@ Path resolveExprPath(Path path)
// Basic cycle/depth limit to avoid infinite loops.
if (++followCount >= maxFollow)
throw Error("too many symbolic links encountered while traversing the path '%s'", path);
if (lstat(path.c_str(), &st))
throw SysError("getting status of '%s'", path);
st = lstat(path);
if (!S_ISLNK(st.st_mode)) break;
path = absPath(readLink(path), dirOf(path));
}

View file

@ -44,16 +44,6 @@ void EvalState::realiseContext(const PathSet & context)
throw InvalidPathError(store->printStorePath(ctx));
if (!outputName.empty() && ctx.isDerivation()) {
drvs.push_back(StorePathWithOutputs{ctx, {outputName}});
/* Add the output of this derivation to the allowed
paths. */
if (allowedPaths) {
auto drv = store->derivationFromPath(ctx);
DerivationOutputs::iterator i = drv.outputs.find(outputName);
if (i == drv.outputs.end())
throw Error("derivation '%s' does not have an output named '%s'", ctxS, outputName);
allowedPaths->insert(store->printStorePath(i->second.path(*store, drv.name)));
}
}
}
@ -69,8 +59,50 @@ void EvalState::realiseContext(const PathSet & context)
store->queryMissing(drvs, willBuild, willSubstitute, unknown, downloadSize, narSize);
store->buildPaths(drvs);
/* Add the output of this derivations to the allowed
paths. */
if (allowedPaths) {
for (auto & [drvPath, outputs] : drvs) {
auto outputPaths = store->queryDerivationOutputMap(drvPath);
for (auto & outputName : outputs) {
if (outputPaths.count(outputName) == 0)
throw Error("derivation '%s' does not have an output named '%s'",
store->printStorePath(drvPath), outputName);
allowedPaths->insert(store->printStorePath(outputPaths.at(outputName)));
}
}
}
}
/* Add and attribute to the given attribute map from the output name to
the output path, or a placeholder.
Where possible the path is used, but for floating CA derivations we
may not know it. For sake of determinism we always assume we don't
and instead put in a place holder. In either case, however, the
string context will contain the drv path and output name, so
downstream derivations will have the proper dependency, and in
addition, before building, the placeholder will be rewritten to be
the actual path.
The 'drv' and 'drvPath' outputs must correspond. */
static void mkOutputString(EvalState & state, Value & v,
const StorePath & drvPath, const BasicDerivation & drv,
std::pair<string, DerivationOutput> o)
{
auto optOutputPath = o.second.path(*state.store, drv.name, o.first);
mkString(
*state.allocAttr(v, state.symbols.create(o.first)),
optOutputPath
? state.store->printStorePath(*optOutputPath)
/* Downstream we would substitute this for an actual path once
we build the floating CA derivation */
/* FIXME: we need to depend on the basic derivation, not
derivation */
: downstreamPlaceholder(*state.store, drvPath, o.first),
{"!" + o.first + "!" + state.store->printStorePath(drvPath)});
}
/* Load and evaluate an expression from path specified by the
argument. */
@ -114,9 +146,8 @@ static void import(EvalState & state, const Pos & pos, Value & vPath, Value * vS
state.mkList(*outputsVal, drv.outputs.size());
unsigned int outputs_index = 0;
for (const auto & o : drv.outputsAndPaths(*state.store)) {
v2 = state.allocAttr(w, state.symbols.create(o.first));
mkString(*v2, state.store->printStorePath(o.second.second), {"!" + o.first + "!" + path});
for (const auto & o : drv.outputs) {
mkOutputString(state, w, storePath, drv, o);
outputsVal->listElems()[outputs_index] = state.allocValue();
mkString(*(outputsVal->listElems()[outputs_index++]), o.first);
}
@ -1080,16 +1111,18 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
/* Optimisation, but required in read-only mode! because in that
case we don't actually write store derivations, so we can't
read them later. */
drvHashes.insert_or_assign(drvPath,
hashDerivationModulo(*state.store, Derivation(drv), false));
read them later.
However, we don't bother doing this for floating CA derivations because
their "hash modulo" is indeterminate until built. */
if (drv.type() != DerivationType::CAFloating)
drvHashes.insert_or_assign(drvPath,
hashDerivationModulo(*state.store, Derivation(drv), false));
state.mkAttrs(v, 1 + drv.outputs.size());
mkString(*state.allocAttr(v, state.sDrvPath), drvPathS, {"=" + drvPathS});
for (auto & i : drv.outputsAndPaths(*state.store)) {
mkString(*state.allocAttr(v, state.symbols.create(i.first)),
state.store->printStorePath(i.second.second), {"!" + i.first + "!" + drvPathS});
}
for (auto & i : drv.outputs)
mkOutputString(state, v, drvPath, drv, i);
v.attrs->sort();
}
@ -1671,7 +1704,7 @@ static RegisterPrimOp primop_toFile({
...
cp ${configFile} $out/etc/foo.conf
";
```
```
Note that `${configFile}` is an
[antiquotation](language-values.md), so the result of the
@ -2203,6 +2236,10 @@ static RegisterPrimOp primop_catAttrs({
static void prim_functionArgs(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceValue(*args[0], pos);
if (args[0]->type == tPrimOpApp || args[0]->type == tPrimOp) {
state.mkAttrs(v, 0);
return;
}
if (args[0]->type != tLambda)
throw TypeError({
.hint = hintfmt("'functionArgs' requires a function"),
@ -3052,17 +3089,25 @@ static RegisterPrimOp primop_hashString({
.fun = prim_hashString,
});
/* Match a regular expression against a string and return either
null or a list containing substring matches. */
struct RegexCache
{
std::unordered_map<std::string, std::regex> cache;
};
std::shared_ptr<RegexCache> makeRegexCache()
{
return std::make_shared<RegexCache>();
}
void prim_match(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
auto re = state.forceStringNoCtx(*args[0], pos);
try {
auto regex = state.regexCache.find(re);
if (regex == state.regexCache.end())
regex = state.regexCache.emplace(re, std::regex(re, std::regex::extended)).first;
auto regex = state.regexCache->cache.find(re);
if (regex == state.regexCache->cache.end())
regex = state.regexCache->cache.emplace(re, std::regex(re, std::regex::extended)).first;
PathSet context;
const std::string str = state.forceString(*args[1], context, pos);
@ -3532,9 +3577,10 @@ void EvalState::createBaseEnv()
/* Add a wrapper around the derivation primop that computes the
`drvPath' and `outPath' attributes lazily. */
string path = canonPath(settings.nixDataDir + "/nix/corepkgs/derivation.nix", true);
sDerivationNix = symbols.create(path);
evalFile(path, v);
sDerivationNix = symbols.create("//builtin/derivation.nix");
eval(parse(
#include "primops/derivation.nix.gen.hh"
, foFile, sDerivationNix, "/", staticBaseEnv), v);
addConstant("derivation", v);
/* Now that we've added all primops, sort the `builtins' set,

View file

@ -1,3 +1,5 @@
#pragma once
#include "eval.hh"
#include <tuple>

View file

@ -11,7 +11,7 @@ static void prim_unsafeDiscardStringContext(EvalState & state, const Pos & pos,
mkString(v, s, PathSet());
}
static RegisterPrimOp r1("__unsafeDiscardStringContext", 1, prim_unsafeDiscardStringContext);
static RegisterPrimOp primop_unsafeDiscardStringContext("__unsafeDiscardStringContext", 1, prim_unsafeDiscardStringContext);
static void prim_hasContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
@ -21,7 +21,7 @@ static void prim_hasContext(EvalState & state, const Pos & pos, Value * * args,
mkBool(v, !context.empty());
}
static RegisterPrimOp r2("__hasContext", 1, prim_hasContext);
static RegisterPrimOp primop_hasContext("__hasContext", 1, prim_hasContext);
/* Sometimes we want to pass a derivation path (i.e. pkg.drvPath) to a
@ -42,7 +42,7 @@ static void prim_unsafeDiscardOutputDependency(EvalState & state, const Pos & po
mkString(v, s, context2);
}
static RegisterPrimOp r3("__unsafeDiscardOutputDependency", 1, prim_unsafeDiscardOutputDependency);
static RegisterPrimOp primop_unsafeDiscardOutputDependency("__unsafeDiscardOutputDependency", 1, prim_unsafeDiscardOutputDependency);
/* Extract the context of a string as a structured Nix value.
@ -127,7 +127,7 @@ static void prim_getContext(EvalState & state, const Pos & pos, Value * * args,
v.attrs->sort();
}
static RegisterPrimOp r4("__getContext", 1, prim_getContext);
static RegisterPrimOp primop_getContext("__getContext", 1, prim_getContext);
/* Append the given context to a given string.
@ -191,6 +191,6 @@ static void prim_appendContext(EvalState & state, const Pos & pos, Value * * arg
mkString(v, orig, context);
}
static RegisterPrimOp r5("__appendContext", 2, prim_appendContext);
static RegisterPrimOp primop_appendContext("__appendContext", 2, prim_appendContext);
}

View file

@ -0,0 +1,27 @@
/* This is the implementation of the derivation builtin function.
It's actually a wrapper around the derivationStrict primop. */
drvAttrs @ { outputs ? [ "out" ], ... }:
let
strict = derivationStrict drvAttrs;
commonAttrs = drvAttrs // (builtins.listToAttrs outputsList) //
{ all = map (x: x.value) outputsList;
inherit drvAttrs;
};
outputToAttrListElement = outputName:
{ name = outputName;
value = commonAttrs // {
outPath = builtins.getAttr outputName strict;
drvPath = strict.drvPath;
type = "derivation";
inherit outputName;
};
};
outputsList = map outputToAttrListElement outputs;
in (builtins.head outputsList).value

View file

@ -3,8 +3,7 @@
#include "store-api.hh"
#include "fetchers.hh"
#include "url.hh"
#include <regex>
#include "url-parts.hh"
namespace nix {
@ -88,6 +87,6 @@ static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * ar
state.allowedPaths->insert(tree.actualPath);
}
static RegisterPrimOp r("fetchMercurial", 1, prim_fetchMercurial);
static RegisterPrimOp r_fetchMercurial("fetchMercurial", 1, prim_fetchMercurial);
}

View file

@ -152,7 +152,7 @@ static void prim_fetchTree(EvalState & state, const Pos & pos, Value * * args, V
fetchTree(state, pos, args, v, std::nullopt);
}
static RegisterPrimOp r("fetchTree", 1, prim_fetchTree);
static RegisterPrimOp primop_fetchTree("fetchTree", 1, prim_fetchTree);
static void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
const string & who, bool unpack, std::string name)

View file

@ -88,6 +88,6 @@ static void prim_fromTOML(EvalState & state, const Pos & pos, Value * * args, Va
}
}
static RegisterPrimOp r("fromTOML", 1, prim_fromTOML);
static RegisterPrimOp primop_fromTOML("fromTOML", 1, prim_fromTOML);
}

View file

@ -69,6 +69,14 @@ ParsedURL Input::toURL() const
return scheme->toURL(*this);
}
std::string Input::toURLString(const std::map<std::string, std::string> & extraQuery) const
{
auto url = toURL();
for (auto & attr : extraQuery)
url.query.insert(attr);
return url.to_string();
}
std::string Input::to_string() const
{
return toURL().to_string();

View file

@ -23,7 +23,7 @@ struct InputScheme;
struct Input
{
friend class InputScheme;
friend struct InputScheme;
std::shared_ptr<InputScheme> scheme; // note: can be null
Attrs attrs;
@ -39,6 +39,8 @@ public:
ParsedURL toURL() const;
std::string toURLString(const std::map<std::string, std::string> & extraQuery = {}) const;
std::string to_string() const;
Attrs toAttrs() const;
@ -73,7 +75,7 @@ public:
StorePath computeStorePath(Store & store) const;
// Convience functions for common attributes.
// Convenience functions for common attributes.
std::string getType() const;
std::optional<Hash> getNarHash() const;
std::optional<std::string> getRef() const;
@ -84,6 +86,9 @@ public:
struct InputScheme
{
virtual ~InputScheme()
{ }
virtual std::optional<Input> inputFromURL(const ParsedURL & url) = 0;
virtual std::optional<Input> inputFromAttrs(const Attrs & attrs) = 0;
@ -119,12 +124,14 @@ DownloadFileResult downloadFile(
ref<Store> store,
const std::string & url,
const std::string & name,
bool immutable);
bool immutable,
const Headers & headers = {});
std::pair<Tree, time_t> downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,
bool immutable);
bool immutable,
const Headers & headers = {});
}

View file

@ -3,6 +3,7 @@
#include "globals.hh"
#include "tarfile.hh"
#include "store-api.hh"
#include "url-parts.hh"
#include <sys/time.h>
@ -451,6 +452,6 @@ struct GitInputScheme : InputScheme
}
};
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitInputScheme>()); });
static auto rGitInputScheme = OnStartup([] { registerInputScheme(std::make_unique<GitInputScheme>()); });
}

View file

@ -3,19 +3,30 @@
#include "fetchers.hh"
#include "globals.hh"
#include "store-api.hh"
#include "types.hh"
#include "url-parts.hh"
#include <optional>
#include <nlohmann/json.hpp>
namespace nix::fetchers {
// A github or gitlab url
const static std::string urlRegexS = "[a-zA-Z0-9.]*"; // FIXME: check
std::regex urlRegex(urlRegexS, std::regex::ECMAScript);
struct DownloadUrl
{
std::string url;
Headers headers;
};
// A github or gitlab host
const static std::string hostRegexS = "[a-zA-Z0-9.]*"; // FIXME: check
std::regex hostRegex(hostRegexS, std::regex::ECMAScript);
struct GitArchiveInputScheme : InputScheme
{
virtual std::string type() = 0;
virtual std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const = 0;
std::optional<Input> inputFromURL(const ParsedURL & url) override
{
if (url.scheme != type()) return {};
@ -50,9 +61,9 @@ struct GitArchiveInputScheme : InputScheme
throw BadURL("URL '%s' contains multiple branch/tag names", url.url);
ref = value;
}
else if (name == "url") {
if (!std::regex_match(value, urlRegex))
throw BadURL("URL '%s' contains an invalid instance url", url.url);
else if (name == "host") {
if (!std::regex_match(value, hostRegex))
throw BadURL("URL '%s' contains an invalid instance host", url.url);
host_url = value;
}
// FIXME: barf on unsupported attributes
@ -67,7 +78,7 @@ struct GitArchiveInputScheme : InputScheme
input.attrs.insert_or_assign("repo", path[1]);
if (rev) input.attrs.insert_or_assign("rev", rev->gitRev());
if (ref) input.attrs.insert_or_assign("ref", *ref);
if (host_url) input.attrs.insert_or_assign("url", *host_url);
if (host_url) input.attrs.insert_or_assign("host", *host_url);
return input;
}
@ -77,7 +88,7 @@ struct GitArchiveInputScheme : InputScheme
if (maybeGetStrAttr(attrs, "type") != type()) return {};
for (auto & [name, value] : attrs)
if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev" && name != "narHash" && name != "lastModified")
if (name != "type" && name != "owner" && name != "repo" && name != "ref" && name != "rev" && name != "narHash" && name != "lastModified" && name != "host")
throw Error("unsupported input attribute '%s'", name);
getStrAttr(attrs, "owner");
@ -129,9 +140,31 @@ struct GitArchiveInputScheme : InputScheme
return input;
}
std::optional<std::string> getAccessToken(const std::string & host) const
{
auto tokens = settings.accessTokens.get();
if (auto token = get(tokens, host))
return *token;
return {};
}
Headers makeHeadersWithAuthTokens(const std::string & host) const
{
Headers headers;
auto accessToken = getAccessToken(host);
if (accessToken) {
auto hdr = accessHeaderFromToken(*accessToken);
if (hdr)
headers.push_back(*hdr);
else
warn("Unrecognized access token for host '%s'", host);
}
return headers;
}
virtual Hash getRevFromRef(nix::ref<Store> store, const Input & input) const = 0;
virtual std::string getDownloadUrl(const Input & input) const = 0;
virtual DownloadUrl getDownloadUrl(const Input & input) const = 0;
std::pair<Tree, Input> fetch(ref<Store> store, const Input & _input) override
{
@ -160,7 +193,7 @@ struct GitArchiveInputScheme : InputScheme
auto url = getDownloadUrl(input);
auto [tree, lastModified] = downloadTarball(store, url, "source", true);
auto [tree, lastModified] = downloadTarball(store, url.url, "source", true, url.headers);
input.attrs.insert_or_assign("lastModified", lastModified);
@ -182,49 +215,52 @@ struct GitHubInputScheme : GitArchiveInputScheme
{
std::string type() override { return "github"; }
void addAccessToken(std::string & url) const
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{
std::string accessToken = settings.githubAccessToken.get();
if (accessToken != "")
url += "?access_token=" + accessToken;
// Github supports PAT/OAuth2 tokens and HTTP Basic
// Authentication. The former simply specifies the token, the
// latter can use the token as the password. Only the first
// is used here. See
// https://developer.github.com/v3/#authentication and
// https://docs.github.com/en/developers/apps/authorizing-oath-apps
return std::pair<std::string, std::string>("Authorization", fmt("token %s", token));
}
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
{
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com");
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
auto url = fmt("https://api.%s/repos/%s/%s/commits/%s", // FIXME: check
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
addAccessToken(url);
Headers headers = makeHeadersWithAuthTokens(host);
auto json = nlohmann::json::parse(
readFile(
store->toRealPath(
downloadFile(store, url, "source", false).storePath)));
downloadFile(store, url, "source", false, headers).storePath)));
auto rev = Hash::parseAny(std::string { json["sha"] }, htSHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev;
}
std::string getDownloadUrl(const Input & input) const override
DownloadUrl getDownloadUrl(const Input & input) const override
{
// FIXME: use regular /archive URLs instead? api.github.com
// might have stricter rate limits.
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com");
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
auto url = fmt("https://api.%s/repos/%s/%s/tarball/%s", // FIXME: check if this is correct for self hosted instances
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false));
addAccessToken(url);
return url;
Headers headers = makeHeadersWithAuthTokens(host);
return DownloadUrl { url, headers };
}
void clone(const Input & input, const Path & destDir) override
{
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("github.com");
auto host = maybeGetStrAttr(input.attrs, "host").value_or("github.com");
Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
.applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
.clone(destDir);
}
@ -234,48 +270,71 @@ struct GitLabInputScheme : GitArchiveInputScheme
{
std::string type() override { return "gitlab"; }
std::optional<std::pair<std::string, std::string>> accessHeaderFromToken(const std::string & token) const override
{
// Gitlab supports 4 kinds of authorization, two of which are
// relevant here: OAuth2 and PAT (Private Access Token). The
// user can indicate which token is used by specifying the
// token as <TYPE>:<VALUE>, where type is "OAuth2" or "PAT".
// If the <TYPE> is unrecognized, this will fall back to
// treating this simply has <HDRNAME>:<HDRVAL>. See
// https://docs.gitlab.com/12.10/ee/api/README.html#authentication
auto fldsplit = token.find_first_of(':');
// n.b. C++20 would allow: if (token.starts_with("OAuth2:")) ...
if ("OAuth2" == token.substr(0, fldsplit))
return std::make_pair("Authorization", fmt("Bearer %s", token.substr(fldsplit+1)));
if ("PAT" == token.substr(0, fldsplit))
return std::make_pair("Private-token", token.substr(fldsplit+1));
warn("Unrecognized GitLab token type %s", token.substr(0, fldsplit));
return std::nullopt;
}
Hash getRevFromRef(nix::ref<Store> store, const Input & input) const override
{
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com");
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
// See rate limiting note below
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/commits?ref_name=%s",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"), *input.getRef());
Headers headers = makeHeadersWithAuthTokens(host);
auto json = nlohmann::json::parse(
readFile(
store->toRealPath(
downloadFile(store, url, "source", false).storePath)));
downloadFile(store, url, "source", false, headers).storePath)));
auto rev = Hash::parseAny(std::string(json[0]["id"]), htSHA1);
debug("HEAD revision for '%s' is %s", url, rev.gitRev());
return rev;
}
std::string getDownloadUrl(const Input & input) const override
DownloadUrl getDownloadUrl(const Input & input) const override
{
// FIXME: This endpoint has a rate limit threshold of 5 requests per minute
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com");
// This endpoint has a rate limit threshold that may be
// server-specific and vary based whether the user is
// authenticated via an accessToken or not, but the usual rate
// is 10 reqs/sec/ip-addr. See
// https://docs.gitlab.com/ee/user/gitlab_com/index.html#gitlabcom-specific-rate-limits
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
auto url = fmt("https://%s/api/v4/projects/%s%%2F%s/repository/archive.tar.gz?sha=%s",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"),
input.getRev()->to_string(Base16, false));
/* # FIXME: add privat token auth (`curl --header "PRIVATE-TOKEN: <your_access_token>"`)
std::string accessToken = settings.githubAccessToken.get();
if (accessToken != "")
url += "?access_token=" + accessToken;*/
return url;
Headers headers = makeHeadersWithAuthTokens(host);
return DownloadUrl { url, headers };
}
void clone(const Input & input, const Path & destDir) override
{
auto host_url = maybeGetStrAttr(input.attrs, "url").value_or("gitlab.com");
auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com");
// FIXME: get username somewhere
Input::fromURL(fmt("git+ssh://git@%s/%s/%s.git",
host_url, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")))
.applyOverrides(input.getRef().value_or("HEAD"), input.getRev())
.clone(destDir);
}
};
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<GitHubInputScheme>()); });
static auto r2 = OnStartup([] { registerInputScheme(std::make_unique<GitLabInputScheme>()); });
static auto rGitHubInputScheme = OnStartup([] { registerInputScheme(std::make_unique<GitHubInputScheme>()); });
static auto rGitLabInputScheme = OnStartup([] { registerInputScheme(std::make_unique<GitLabInputScheme>()); });
}

View file

@ -1,4 +1,5 @@
#include "fetchers.hh"
#include "url-parts.hh"
namespace nix::fetchers {
@ -99,6 +100,6 @@ struct IndirectInputScheme : InputScheme
}
};
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<IndirectInputScheme>()); });
static auto rIndirectInputScheme = OnStartup([] { registerInputScheme(std::make_unique<IndirectInputScheme>()); });
}

View file

@ -3,6 +3,7 @@
#include "globals.hh"
#include "tarfile.hh"
#include "store-api.hh"
#include "url-parts.hh"
#include <sys/time.h>
@ -292,6 +293,6 @@ struct MercurialInputScheme : InputScheme
}
};
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<MercurialInputScheme>()); });
static auto rMercurialInputScheme = OnStartup([] { registerInputScheme(std::make_unique<MercurialInputScheme>()); });
}

View file

@ -102,6 +102,6 @@ struct PathInputScheme : InputScheme
}
};
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<PathInputScheme>()); });
static auto rPathInputScheme = OnStartup([] { registerInputScheme(std::make_unique<PathInputScheme>()); });
}

View file

@ -5,6 +5,7 @@
#include "store-api.hh"
#include "archive.hh"
#include "tarfile.hh"
#include "types.hh"
namespace nix::fetchers {
@ -12,7 +13,8 @@ DownloadFileResult downloadFile(
ref<Store> store,
const std::string & url,
const std::string & name,
bool immutable)
bool immutable,
const Headers & headers)
{
// FIXME: check store
@ -37,6 +39,7 @@ DownloadFileResult downloadFile(
return useCached();
FileTransferRequest request(url);
request.headers = headers;
if (cached)
request.expectedETag = getStrAttr(cached->infoAttrs, "etag");
FileTransferResult res;
@ -111,7 +114,8 @@ std::pair<Tree, time_t> downloadTarball(
ref<Store> store,
const std::string & url,
const std::string & name,
bool immutable)
bool immutable,
const Headers & headers)
{
Attrs inAttrs({
{"type", "tarball"},
@ -127,7 +131,7 @@ std::pair<Tree, time_t> downloadTarball(
getIntAttr(cached->infoAttrs, "lastModified")
};
auto res = downloadFile(store, url, name, immutable);
auto res = downloadFile(store, url, name, immutable, headers);
std::optional<StorePath> unpackedStorePath;
time_t lastModified;
@ -227,6 +231,6 @@ struct TarballInputScheme : InputScheme
}
};
static auto r1 = OnStartup([] { registerInputScheme(std::make_unique<TarballInputScheme>()); });
static auto rTarballInputScheme = OnStartup([] { registerInputScheme(std::make_unique<TarballInputScheme>()); });
}

View file

@ -256,7 +256,7 @@ public:
}
else if (type == resBuildLogLine || type == resPostBuildLogLine) {
auto lastLine = trim(getS(fields, 0));
auto lastLine = chomp(getS(fields, 0));
if (!lastLine.empty()) {
auto i = state->its.find(act);
assert(i != state->its.end());

View file

@ -386,18 +386,12 @@ RunPager::~RunPager()
}
string showBytes(uint64_t bytes)
{
return (format("%.2f MiB") % (bytes / (1024.0 * 1024.0))).str();
}
PrintFreed::~PrintFreed()
{
if (show)
std::cout << format("%1% store paths deleted, %2% freed\n")
% results.paths.size()
% showBytes(results.bytesFreed);
std::cout << fmt("%d store paths deleted, %s freed\n",
results.paths.size(),
showBytes(results.bytesFreed));
}
Exit::~Exit() { }

View file

@ -11,6 +11,7 @@
#include "nar-accessor.hh"
#include "json.hh"
#include "thread-pool.hh"
#include "callback.hh"
#include <chrono>
#include <future>
@ -22,7 +23,8 @@
namespace nix {
BinaryCacheStore::BinaryCacheStore(const Params & params)
: Store(params)
: BinaryCacheStoreConfig(params)
, Store(params)
{
if (secretKeyFile != "")
secretKey = std::unique_ptr<SecretKey>(new SecretKey(readFile(secretKeyFile)));
@ -140,17 +142,10 @@ struct FileSource : FdSource
}
};
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs)
ref<const ValidPathInfo> BinaryCacheStore::addToStoreCommon(
Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs,
std::function<ValidPathInfo(HashResult)> mkInfo)
{
assert(info.narSize);
if (!repair && isValidPath(info.path)) {
// FIXME: copyNAR -> null sink
narSource.drain();
return;
}
auto [fdTemp, fnTemp] = createTempFile();
AutoDelete autoDelete(fnTemp);
@ -160,13 +155,15 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
/* Read the NAR simultaneously into a CompressionSink+FileSink (to
write the compressed NAR to disk), into a HashSink (to get the
NAR hash), and into a NarAccessor (to get the NAR listing). */
HashSink fileHashSink(htSHA256);
HashSink fileHashSink { htSHA256 };
std::shared_ptr<FSAccessor> narAccessor;
HashSink narHashSink { htSHA256 };
{
FdSink fileSink(fdTemp.get());
TeeSink teeSink(fileSink, fileHashSink);
auto compressionSink = makeCompressionSink(compression, teeSink);
TeeSource teeSource(narSource, *compressionSink);
TeeSink teeSinkCompressed { fileSink, fileHashSink };
auto compressionSink = makeCompressionSink(compression, teeSinkCompressed);
TeeSink teeSinkUncompressed { *compressionSink, narHashSink };
TeeSource teeSource { narSource, teeSinkUncompressed };
narAccessor = makeNarAccessor(teeSource);
compressionSink->finish();
fileSink.flush();
@ -174,9 +171,8 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
auto now2 = std::chrono::steady_clock::now();
auto info = mkInfo(narHashSink.finish());
auto narInfo = make_ref<NarInfo>(info);
narInfo->narSize = info.narSize;
narInfo->narHash = info.narHash;
narInfo->compression = compression;
auto [fileHash, fileSize] = fileHashSink.finish();
narInfo->fileHash = fileHash;
@ -298,6 +294,41 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource
writeNarInfo(narInfo);
stats.narInfoWrite++;
return narInfo;
}
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs)
{
if (!repair && isValidPath(info.path)) {
// FIXME: copyNAR -> null sink
narSource.drain();
return;
}
addToStoreCommon(narSource, repair, checkSigs, {[&](HashResult nar) {
/* FIXME reinstate these, once we can correctly do hash modulo sink as
needed. We need to throw here in case we uploaded a corrupted store path. */
// assert(info.narHash == nar.first);
// assert(info.narSize == nar.second);
return info;
}});
}
StorePath BinaryCacheStore::addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
{
if (method != FileIngestionMethod::Recursive || hashAlgo != htSHA256)
unsupported("addToStoreFromDump");
return addToStoreCommon(dump, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info {
makeFixedOutputPath(method, nar.first, name),
nar.first,
};
info.narSize = nar.second;
return info;
})->path;
}
bool BinaryCacheStore::isValidPathUncached(const StorePath & storePath)
@ -365,50 +396,52 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
{
// FIXME: some cut&paste from LocalStore::addToStore().
/* FIXME: Make BinaryCacheStore::addToStoreCommon support
non-recursive+sha256 so we can just use the default
implementation of this method in terms of addToStoreFromDump. */
/* Read the whole path into memory. This is not a very scalable
method for very large paths, but `copyPath' is mainly used for
small files. */
StringSink sink;
std::optional<Hash> h;
HashSink sink { hashAlgo };
if (method == FileIngestionMethod::Recursive) {
dumpPath(srcPath, sink, filter);
h = hashString(hashAlgo, *sink.s);
} else {
auto s = readFile(srcPath);
dumpString(s, sink);
h = hashString(hashAlgo, s);
readFile(srcPath, sink);
}
auto h = sink.finish().first;
ValidPathInfo info {
makeFixedOutputPath(method, *h, name),
Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
};
auto source = StringSource { *sink.s };
addToStore(info, source, repair, CheckSigs);
return std::move(info.path);
auto source = sinkToSource([&](Sink & sink) {
dumpPath(srcPath, sink, filter);
});
return addToStoreCommon(*source, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info {
makeFixedOutputPath(method, h, name),
nar.first,
};
info.narSize = nar.second;
info.ca = FixedOutputHash {
.method = method,
.hash = h,
};
return info;
})->path;
}
StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair)
{
ValidPathInfo info {
computeStorePathForText(name, s, references),
Hash::dummy, // Will be fixed in addToStore, which recomputes nar hash
};
info.references = references;
auto textHash = hashString(htSHA256, s);
auto path = makeTextPath(name, textHash, references);
if (repair || !isValidPath(info.path)) {
StringSink sink;
dumpString(s, sink);
auto source = StringSource { *sink.s };
addToStore(info, source, repair, CheckSigs);
}
if (!repair && isValidPath(path))
return path;
return std::move(info.path);
auto source = StringSource { s };
return addToStoreCommon(source, repair, CheckSigs, [&](HashResult nar) {
ValidPathInfo info { path, nar.first };
info.narSize = nar.second;
info.ca = TextHash { textHash };
info.references = references;
return info;
})->path;
}
ref<FSAccessor> BinaryCacheStore::getFSAccessor()

View file

@ -11,17 +11,21 @@ namespace nix {
struct NarInfo;
class BinaryCacheStore : public Store
struct BinaryCacheStoreConfig : virtual StoreConfig
{
public:
using StoreConfig::StoreConfig;
const Setting<std::string> compression{this, "xz", "compression", "NAR compression method ('xz', 'bzip2', or 'none')"};
const Setting<bool> writeNARListing{this, false, "write-nar-listing", "whether to write a JSON file listing the files in each NAR"};
const Setting<bool> writeDebugInfo{this, false, "index-debug-info", "whether to index DWARF debug info files by build ID"};
const Setting<Path> secretKeyFile{this, "", "secret-key", "path to secret key used to sign the binary cache"};
const Setting<Path> localNarCache{this, "", "local-nar-cache", "path to a local cache of NARs"};
const Setting<bool> parallelCompression{this, false, "parallel-compression",
const Setting<std::string> compression{(StoreConfig*) this, "xz", "compression", "NAR compression method ('xz', 'bzip2', or 'none')"};
const Setting<bool> writeNARListing{(StoreConfig*) this, false, "write-nar-listing", "whether to write a JSON file listing the files in each NAR"};
const Setting<bool> writeDebugInfo{(StoreConfig*) this, false, "index-debug-info", "whether to index DWARF debug info files by build ID"};
const Setting<Path> secretKeyFile{(StoreConfig*) this, "", "secret-key", "path to secret key used to sign the binary cache"};
const Setting<Path> localNarCache{(StoreConfig*) this, "", "local-nar-cache", "path to a local cache of NARs"};
const Setting<bool> parallelCompression{(StoreConfig*) this, false, "parallel-compression",
"enable multi-threading compression, available for xz only currently"};
};
class BinaryCacheStore : public Store, public virtual BinaryCacheStoreConfig
{
private:
@ -58,7 +62,7 @@ public:
public:
virtual void init();
virtual void init() override;
private:
@ -68,6 +72,10 @@ private:
void writeNarInfo(ref<NarInfo> narInfo);
ref<const ValidPathInfo> addToStoreCommon(
Source & narSource, RepairFlag repair, CheckSigsFlag checkSigs,
std::function<ValidPathInfo(HashResult)> mkInfo);
public:
bool isValidPathUncached(const StorePath & path) override;
@ -81,6 +89,9 @@ public:
void addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs) override;
StorePath addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair) override;
StorePath addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method, HashType hashAlgo,
PathFilter & filter, RepairFlag repair) override;

File diff suppressed because it is too large Load diff

View file

@ -4,11 +4,13 @@
namespace nix {
std::string FixedOutputHash::printMethodAlgo() const {
std::string FixedOutputHash::printMethodAlgo() const
{
return makeFileIngestionPrefix(method) + printHashType(hash.type);
}
std::string makeFileIngestionPrefix(const FileIngestionMethod m) {
std::string makeFileIngestionPrefix(const FileIngestionMethod m)
{
switch (m) {
case FileIngestionMethod::Flat:
return "";
@ -26,7 +28,8 @@ std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash)
+ hash.to_string(Base32, true);
}
std::string renderContentAddress(ContentAddress ca) {
std::string renderContentAddress(ContentAddress ca)
{
return std::visit(overloaded {
[](TextHash th) {
return "text:" + th.hash.to_string(Base32, true);
@ -37,54 +40,97 @@ std::string renderContentAddress(ContentAddress ca) {
}, ca);
}
ContentAddress parseContentAddress(std::string_view rawCa) {
auto rest = rawCa;
std::string renderContentAddressMethod(ContentAddressMethod cam)
{
return std::visit(overloaded {
[](TextHashMethod &th) {
return std::string{"text:"} + printHashType(htSHA256);
},
[](FixedOutputHashMethod &fshm) {
return "fixed:" + makeFileIngestionPrefix(fshm.fileIngestionMethod) + printHashType(fshm.hashType);
}
}, cam);
}
/*
Parses content address strings up to the hash.
*/
static ContentAddressMethod parseContentAddressMethodPrefix(std::string_view & rest)
{
std::string_view wholeInput { rest };
std::string_view prefix;
{
auto optPrefix = splitPrefixTo(rest, ':');
if (!optPrefix)
throw UsageError("not a content address because it is not in the form '<prefix>:<rest>': %s", rawCa);
throw UsageError("not a content address because it is not in the form '<prefix>:<rest>': %s", wholeInput);
prefix = *optPrefix;
}
auto parseHashType_ = [&](){
auto hashTypeRaw = splitPrefixTo(rest, ':');
if (!hashTypeRaw)
throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", rawCa);
throw UsageError("content address hash must be in form '<algo>:<hash>', but found: %s", wholeInput);
HashType hashType = parseHashType(*hashTypeRaw);
return std::move(hashType);
};
// Switch on prefix
if (prefix == "text") {
// No parsing of the method, "text" only support flat.
// No parsing of the ingestion method, "text" only support flat.
HashType hashType = parseHashType_();
if (hashType != htSHA256)
throw Error("text content address hash should use %s, but instead uses %s",
printHashType(htSHA256), printHashType(hashType));
return TextHash {
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(hashType)),
};
return TextHashMethod {};
} else if (prefix == "fixed") {
// Parse method
auto method = FileIngestionMethod::Flat;
if (splitPrefix(rest, "r:"))
method = FileIngestionMethod::Recursive;
HashType hashType = parseHashType_();
return FixedOutputHash {
.method = method,
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(hashType)),
return FixedOutputHashMethod {
.fileIngestionMethod = method,
.hashType = std::move(hashType),
};
} else
throw UsageError("content address prefix '%s' is unrecognized. Recogonized prefixes are 'text' or 'fixed'", prefix);
}
ContentAddress parseContentAddress(std::string_view rawCa) {
auto rest = rawCa;
ContentAddressMethod caMethod = parseContentAddressMethodPrefix(rest);
return std::visit(
overloaded {
[&](TextHashMethod thm) {
return ContentAddress(TextHash {
.hash = Hash::parseNonSRIUnprefixed(rest, htSHA256)
});
},
[&](FixedOutputHashMethod fohMethod) {
return ContentAddress(FixedOutputHash {
.method = fohMethod.fileIngestionMethod,
.hash = Hash::parseNonSRIUnprefixed(rest, std::move(fohMethod.hashType)),
});
},
}, caMethod);
}
ContentAddressMethod parseContentAddressMethod(std::string_view caMethod)
{
std::string_view asPrefix {std::string{caMethod} + ":"};
return parseContentAddressMethodPrefix(asPrefix);
}
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt)
{
return rawCaOpt == "" ? std::optional<ContentAddress>() : parseContentAddress(rawCaOpt);
};
std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt) {
return rawCaOpt == "" ? std::optional<ContentAddress> {} : parseContentAddress(rawCaOpt);
};
std::string renderContentAddress(std::optional<ContentAddress> ca) {
std::string renderContentAddress(std::optional<ContentAddress> ca)
{
return ca ? renderContentAddress(*ca) : "";
}

View file

@ -55,4 +55,23 @@ std::optional<ContentAddress> parseContentAddressOpt(std::string_view rawCaOpt);
Hash getContentAddressHash(const ContentAddress & ca);
/*
We only have one way to hash text with references, so this is single-value
type is only useful in std::variant.
*/
struct TextHashMethod { };
struct FixedOutputHashMethod {
FileIngestionMethod fileIngestionMethod;
HashType hashType;
};
typedef std::variant<
TextHashMethod,
FixedOutputHashMethod
> ContentAddressMethod;
ContentAddressMethod parseContentAddressMethod(std::string_view rawCaMethod);
std::string renderContentAddressMethod(ContentAddressMethod caMethod);
}

View file

@ -2,7 +2,6 @@
#include "monitor-fd.hh"
#include "worker-protocol.hh"
#include "store-api.hh"
#include "local-store.hh"
#include "finally.hh"
#include "affinity.hh"
#include "archive.hh"
@ -102,17 +101,20 @@ struct TunnelLogger : public Logger
/* stopWork() means that we're done; stop sending stderr to the
client. */
void stopWork(bool success = true, const string & msg = "", unsigned int status = 0)
void stopWork(const Error * ex = nullptr)
{
auto state(state_.lock());
state->canSendStderr = false;
if (success)
if (!ex)
to << STDERR_LAST;
else {
to << STDERR_ERROR << msg;
if (status != 0) to << status;
if (GET_PROTOCOL_MINOR(clientVersion) >= 26) {
to << STDERR_ERROR << *ex;
} else {
to << STDERR_ERROR << ex->what() << ex->status;
}
}
}
@ -232,7 +234,7 @@ struct ClientSettings
else if (setSubstituters(settings.extraSubstituters))
;
else
warn("ignoring the user-specified setting '%s', because it is a restricted setting and you are not a trusted user", name);
debug("ignoring the client-specified setting '%s', because it is a restricted setting and you are not a trusted user", name);
} catch (UsageError & e) {
warn(e.what());
}
@ -240,6 +242,23 @@ struct ClientSettings
}
};
static void writeValidPathInfo(
ref<Store> store,
unsigned int clientVersion,
Sink & to,
std::shared_ptr<const ValidPathInfo> info)
{
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
<< info->narHash.to_string(Base16, false);
worker_proto::write(*store, to, info->references);
to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
to << info->ultimate
<< info->sigs
<< renderContentAddress(info->ca);
}
}
static void performOp(TunnelLogger * logger, ref<Store> store,
TrustedFlag trusted, RecursiveFlag recursive, unsigned int clientVersion,
Source & from, BufferedSink & to, unsigned int op)
@ -256,11 +275,11 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
}
case wopQueryValidPaths: {
auto paths = readStorePaths<StorePathSet>(*store, from);
auto paths = worker_proto::read(*store, from, Phantom<StorePathSet> {});
logger->startWork();
auto res = store->queryValidPaths(paths);
logger->stopWork();
writeStorePaths(*store, to, res);
worker_proto::write(*store, to, res);
break;
}
@ -276,11 +295,11 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
}
case wopQuerySubstitutablePaths: {
auto paths = readStorePaths<StorePathSet>(*store, from);
auto paths = worker_proto::read(*store, from, Phantom<StorePathSet> {});
logger->startWork();
auto res = store->querySubstitutablePaths(paths);
logger->stopWork();
writeStorePaths(*store, to, res);
worker_proto::write(*store, to, res);
break;
}
@ -309,7 +328,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
paths = store->queryValidDerivers(path);
else paths = store->queryDerivationOutputs(path);
logger->stopWork();
writeStorePaths(*store, to, paths);
worker_proto::write(*store, to, paths);
break;
}
@ -350,54 +369,90 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
}
case wopAddToStore: {
HashType hashAlgo;
std::string baseName;
FileIngestionMethod method;
{
bool fixed;
uint8_t recursive;
std::string hashAlgoRaw;
from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw;
if (recursive > (uint8_t) FileIngestionMethod::Recursive)
throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive);
method = FileIngestionMethod { recursive };
/* Compatibility hack. */
if (!fixed) {
hashAlgoRaw = "sha256";
method = FileIngestionMethod::Recursive;
if (GET_PROTOCOL_MINOR(clientVersion) >= 25) {
auto name = readString(from);
auto camStr = readString(from);
auto refs = worker_proto::read(*store, from, Phantom<StorePathSet> {});
bool repairBool;
from >> repairBool;
auto repair = RepairFlag{repairBool};
logger->startWork();
auto pathInfo = [&]() {
// NB: FramedSource must be out of scope before logger->stopWork();
ContentAddressMethod contentAddressMethod = parseContentAddressMethod(camStr);
FramedSource source(from);
// TODO this is essentially RemoteStore::addCAToStore. Move it up to Store.
return std::visit(overloaded {
[&](TextHashMethod &_) {
// We could stream this by changing Store
std::string contents = source.drain();
auto path = store->addTextToStore(name, contents, refs, repair);
return store->queryPathInfo(path);
},
[&](FixedOutputHashMethod &fohm) {
if (!refs.empty())
throw UnimplementedError("cannot yet have refs with flat or nar-hashed data");
auto path = store->addToStoreFromDump(source, name, fohm.fileIngestionMethod, fohm.hashType, repair);
return store->queryPathInfo(path);
},
}, contentAddressMethod);
}();
logger->stopWork();
to << store->printStorePath(pathInfo->path);
writeValidPathInfo(store, clientVersion, to, pathInfo);
} else {
HashType hashAlgo;
std::string baseName;
FileIngestionMethod method;
{
bool fixed;
uint8_t recursive;
std::string hashAlgoRaw;
from >> baseName >> fixed /* obsolete */ >> recursive >> hashAlgoRaw;
if (recursive > (uint8_t) FileIngestionMethod::Recursive)
throw Error("unsupported FileIngestionMethod with value of %i; you may need to upgrade nix-daemon", recursive);
method = FileIngestionMethod { recursive };
/* Compatibility hack. */
if (!fixed) {
hashAlgoRaw = "sha256";
method = FileIngestionMethod::Recursive;
}
hashAlgo = parseHashType(hashAlgoRaw);
}
hashAlgo = parseHashType(hashAlgoRaw);
StringSink saved;
TeeSource savedNARSource(from, saved);
RetrieveRegularNARSink savedRegular { saved };
if (method == FileIngestionMethod::Recursive) {
/* Get the entire NAR dump from the client and save it to
a string so that we can pass it to
addToStoreFromDump(). */
ParseSink sink; /* null sink; just parse the NAR */
parseDump(sink, savedNARSource);
} else
parseDump(savedRegular, from);
logger->startWork();
if (!savedRegular.regular) throw Error("regular file expected");
// FIXME: try to stream directly from `from`.
StringSource dumpSource { *saved.s };
auto path = store->addToStoreFromDump(dumpSource, baseName, method, hashAlgo);
logger->stopWork();
to << store->printStorePath(path);
}
StringSink saved;
TeeSource savedNARSource(from, saved);
RetrieveRegularNARSink savedRegular { saved };
if (method == FileIngestionMethod::Recursive) {
/* Get the entire NAR dump from the client and save it to
a string so that we can pass it to
addToStoreFromDump(). */
ParseSink sink; /* null sink; just parse the NAR */
parseDump(sink, savedNARSource);
} else
parseDump(savedRegular, from);
logger->startWork();
if (!savedRegular.regular) throw Error("regular file expected");
// FIXME: try to stream directly from `from`.
StringSource dumpSource { *saved.s };
auto path = store->addToStoreFromDump(dumpSource, baseName, method, hashAlgo);
logger->stopWork();
to << store->printStorePath(path);
break;
}
case wopAddTextToStore: {
string suffix = readString(from);
string s = readString(from);
auto refs = readStorePaths<StorePathSet>(*store, from);
auto refs = worker_proto::read(*store, from, Phantom<StorePathSet> {});
logger->startWork();
auto path = store->addTextToStore(suffix, s, refs, NoRepair);
logger->stopWork();
@ -494,6 +549,20 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
are in fact content-addressed if we don't trust them. */
assert(derivationIsCA(drv.type()) || trusted);
/* Recompute the derivation path when we cannot trust the original. */
if (!trusted) {
/* Recomputing the derivation path for input-address derivations
makes it harder to audit them after the fact, since we need the
original not-necessarily-resolved derivation to verify the drv
derivation as adequate claim to the input-addressed output
paths. */
assert(derivationIsCA(drv.type()));
Derivation drv2;
static_cast<BasicDerivation &>(drv2) = drv;
drvPath = writeDerivation(*store, Derivation { drv2 });
}
auto res = store->buildDerivation(drvPath, drv, buildMode);
logger->stopWork();
to << res.status << res.errorMsg;
@ -556,7 +625,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
case wopCollectGarbage: {
GCOptions options;
options.action = (GCOptions::GCAction) readInt(from);
options.pathsToDelete = readStorePaths<StorePathSet>(*store, from);
options.pathsToDelete = worker_proto::read(*store, from, Phantom<StorePathSet> {});
from >> options.ignoreLiveness >> options.maxFreed;
// obsolete fields
readInt(from);
@ -625,7 +694,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
else {
to << 1
<< (i->second.deriver ? store->printStorePath(*i->second.deriver) : "");
writeStorePaths(*store, to, i->second.references);
worker_proto::write(*store, to, i->second.references);
to << i->second.downloadSize
<< i->second.narSize;
}
@ -636,11 +705,11 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
SubstitutablePathInfos infos;
StorePathCAMap pathsMap = {};
if (GET_PROTOCOL_MINOR(clientVersion) < 22) {
auto paths = readStorePaths<StorePathSet>(*store, from);
auto paths = worker_proto::read(*store, from, Phantom<StorePathSet> {});
for (auto & path : paths)
pathsMap.emplace(path, std::nullopt);
} else
pathsMap = readStorePathCAMap(*store, from);
pathsMap = worker_proto::read(*store, from, Phantom<StorePathCAMap> {});
logger->startWork();
store->querySubstitutablePathInfos(pathsMap, infos);
logger->stopWork();
@ -648,7 +717,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
for (auto & i : infos) {
to << store->printStorePath(i.first)
<< (i.second.deriver ? store->printStorePath(*i.second.deriver) : "");
writeStorePaths(*store, to, i.second.references);
worker_proto::write(*store, to, i.second.references);
to << i.second.downloadSize << i.second.narSize;
}
break;
@ -658,7 +727,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork();
auto paths = store->queryAllValidPaths();
logger->stopWork();
writeStorePaths(*store, to, paths);
worker_proto::write(*store, to, paths);
break;
}
@ -675,15 +744,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
if (info) {
if (GET_PROTOCOL_MINOR(clientVersion) >= 17)
to << 1;
to << (info->deriver ? store->printStorePath(*info->deriver) : "")
<< info->narHash.to_string(Base16, false);
writeStorePaths(*store, to, info->references);
to << info->registrationTime << info->narSize;
if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
to << info->ultimate
<< info->sigs
<< renderContentAddress(info->ca);
}
writeValidPathInfo(store, clientVersion, to, info);
} else {
assert(GET_PROTOCOL_MINOR(clientVersion) >= 17);
to << 0;
@ -738,7 +799,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
ValidPathInfo info { path, narHash };
if (deriver != "")
info.deriver = store->parseStorePath(deriver);
info.references = readStorePaths<StorePathSet>(*store, from);
info.references = worker_proto::read(*store, from, Phantom<StorePathSet> {});
from >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(from);
info.ca = parseContentAddressOpt(readString(from));
@ -749,59 +810,12 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
info.ultimate = false;
if (GET_PROTOCOL_MINOR(clientVersion) >= 23) {
struct FramedSource : Source
{
Source & from;
bool eof = false;
std::vector<unsigned char> pending;
size_t pos = 0;
FramedSource(Source & from) : from(from)
{ }
~FramedSource()
{
if (!eof) {
while (true) {
auto n = readInt(from);
if (!n) break;
std::vector<unsigned char> data(n);
from(data.data(), n);
}
}
}
size_t read(unsigned char * data, size_t len) override
{
if (eof) throw EndOfFile("reached end of FramedSource");
if (pos >= pending.size()) {
size_t len = readInt(from);
if (!len) {
eof = true;
return 0;
}
pending = std::vector<unsigned char>(len);
pos = 0;
from(pending.data(), len);
}
auto n = std::min(len, pending.size() - pos);
memcpy(data, pending.data() + pos, n);
pos += n;
return n;
}
};
logger->startWork();
{
FramedSource source(from);
store->addToStore(info, source, (RepairFlag) repair,
dontCheckSigs ? NoCheckSigs : CheckSigs);
}
logger->stopWork();
}
@ -838,9 +852,9 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
uint64_t downloadSize, narSize;
store->queryMissing(targets, willBuild, willSubstitute, unknown, downloadSize, narSize);
logger->stopWork();
writeStorePaths(*store, to, willBuild);
writeStorePaths(*store, to, willSubstitute);
writeStorePaths(*store, to, unknown);
worker_proto::write(*store, to, willBuild);
worker_proto::write(*store, to, willSubstitute);
worker_proto::write(*store, to, unknown);
to << downloadSize << narSize;
break;
}
@ -924,10 +938,11 @@ void processConnection(
during addTextToStore() / importPath(). If that
happens, just send the error message and exit. */
bool errorAllowed = tunnelLogger->state_.lock()->canSendStderr;
tunnelLogger->stopWork(false, e.msg(), e.status);
tunnelLogger->stopWork(&e);
if (!errorAllowed) throw;
} catch (std::bad_alloc & e) {
tunnelLogger->stopWork(false, "Nix daemon out of memory", 1);
auto ex = Error("Nix daemon out of memory");
tunnelLogger->stopWork(&ex);
throw;
}
@ -936,8 +951,13 @@ void processConnection(
assert(!tunnelLogger->state_.lock()->canSendStderr);
};
} catch (Error & e) {
tunnelLogger->stopWork(&e);
to.flush();
return;
} catch (std::exception & e) {
tunnelLogger->stopWork(false, e.what(), 1);
auto ex = Error(e.what());
tunnelLogger->stopWork(&ex);
to.flush();
return;
}

View file

@ -1,3 +1,5 @@
#pragma once
#include "serialise.hh"
#include "store-api.hh"

View file

@ -7,7 +7,7 @@
namespace nix {
std::optional<StorePath> DerivationOutput::pathOpt(const Store & store, std::string_view drvName) const
std::optional<StorePath> DerivationOutput::path(const Store & store, std::string_view drvName, std::string_view outputName) const
{
return std::visit(overloaded {
[](DerivationOutputInputAddressed doi) -> std::optional<StorePath> {
@ -15,7 +15,7 @@ std::optional<StorePath> DerivationOutput::pathOpt(const Store & store, std::str
},
[&](DerivationOutputCAFixed dof) -> std::optional<StorePath> {
return {
store.makeFixedOutputPath(dof.hash.method, dof.hash.hash, drvName)
dof.path(store, drvName, outputName)
};
},
[](DerivationOutputCAFloating dof) -> std::optional<StorePath> {
@ -25,6 +25,13 @@ std::optional<StorePath> DerivationOutput::pathOpt(const Store & store, std::str
}
StorePath DerivationOutputCAFixed::path(const Store & store, std::string_view drvName, std::string_view outputName) const {
return store.makeFixedOutputPath(
hash.method, hash.hash,
outputPathName(drvName, outputName));
}
bool derivationIsCA(DerivationType dt) {
switch (dt) {
case DerivationType::InputAddressed: return false;
@ -62,7 +69,7 @@ bool BasicDerivation::isBuiltin() const
StorePath writeDerivation(Store & store,
const Derivation & drv, RepairFlag repair)
const Derivation & drv, RepairFlag repair, bool readOnly)
{
auto references = drv.inputSrcs;
for (auto & i : drv.inputDrvs)
@ -72,7 +79,7 @@ StorePath writeDerivation(Store & store,
held during a garbage collection). */
auto suffix = std::string(drv.name) + drvExtension;
auto contents = drv.unparse(store, false);
return settings.readOnlyMode
return readOnly || settings.readOnlyMode
? store.computeStorePathForText(suffix, contents, references)
: store.addTextToStore(suffix, contents, references, repair);
}
@ -106,12 +113,15 @@ static string parseString(std::istream & str)
return res;
}
static void validatePath(std::string_view s) {
if (s.size() == 0 || s[0] != '/')
throw FormatError("bad path '%1%' in derivation", s);
}
static Path parsePath(std::istream & str)
{
string s = parseString(str);
if (s.size() == 0 || s[0] != '/')
throw FormatError("bad path '%1%' in derivation", s);
auto s = parseString(str);
validatePath(s);
return s;
}
@ -140,7 +150,7 @@ static StringSet parseStrings(std::istream & str, bool arePaths)
static DerivationOutput parseDerivationOutput(const Store & store,
StorePath path, std::string_view hashAlgo, std::string_view hash)
std::string_view pathS, std::string_view hashAlgo, std::string_view hash)
{
if (hashAlgo != "") {
auto method = FileIngestionMethod::Flat;
@ -148,40 +158,45 @@ static DerivationOutput parseDerivationOutput(const Store & store,
method = FileIngestionMethod::Recursive;
hashAlgo = hashAlgo.substr(2);
}
const HashType hashType = parseHashType(hashAlgo);
return hash != ""
? DerivationOutput {
.output = DerivationOutputCAFixed {
.hash = FixedOutputHash {
.method = std::move(method),
.hash = Hash::parseNonSRIUnprefixed(hash, hashType),
},
}
}
: (settings.requireExperimentalFeature("ca-derivations"),
DerivationOutput {
.output = DerivationOutputCAFloating {
.method = std::move(method),
.hashType = std::move(hashType),
},
});
} else
const auto hashType = parseHashType(hashAlgo);
if (hash != "") {
validatePath(pathS);
return DerivationOutput {
.output = DerivationOutputCAFixed {
.hash = FixedOutputHash {
.method = std::move(method),
.hash = Hash::parseNonSRIUnprefixed(hash, hashType),
},
},
};
} else {
settings.requireExperimentalFeature("ca-derivations");
assert(pathS == "");
return DerivationOutput {
.output = DerivationOutputCAFloating {
.method = std::move(method),
.hashType = std::move(hashType),
},
};
}
} else {
validatePath(pathS);
return DerivationOutput {
.output = DerivationOutputInputAddressed {
.path = std::move(path),
.path = store.parseStorePath(pathS),
}
};
}
}
static DerivationOutput parseDerivationOutput(const Store & store, std::istringstream & str)
{
expect(str, ","); auto path = store.parseStorePath(parsePath(str));
expect(str, ","); const auto pathS = parseString(str);
expect(str, ","); const auto hashAlgo = parseString(str);
expect(str, ","); const auto hash = parseString(str);
expect(str, ")");
return parseDerivationOutput(store, std::move(path), hashAlgo, hash);
return parseDerivationOutput(store, pathS, hashAlgo, hash);
}
@ -294,17 +309,19 @@ string Derivation::unparse(const Store & store, bool maskOutputs,
for (auto & i : outputs) {
if (first) first = false; else s += ',';
s += '('; printUnquotedString(s, i.first);
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(i.second.path(store, name)));
std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) {
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(doi.path));
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, "");
},
[&](DerivationOutputCAFixed dof) {
s += ','; printUnquotedString(s, maskOutputs ? "" : store.printStorePath(dof.path(store, name, i.first)));
s += ','; printUnquotedString(s, dof.hash.printMethodAlgo());
s += ','; printUnquotedString(s, dof.hash.hash.to_string(Base16, false));
},
[&](DerivationOutputCAFloating dof) {
s += ','; printUnquotedString(s, "");
s += ','; printUnquotedString(s, makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType));
s += ','; printUnquotedString(s, "");
},
@ -360,6 +377,16 @@ bool isDerivation(const string & fileName)
}
std::string outputPathName(std::string_view drvName, std::string_view outputName) {
std::string res { drvName };
if (outputName != "out") {
res += "-";
res += outputName;
}
return res;
}
DerivationType BasicDerivation::type() const
{
std::set<std::string_view> inputAddressedOutputs, fixedCAOutputs, floatingCAOutputs;
@ -452,12 +479,12 @@ DrvHashModulo hashDerivationModulo(Store & store, const Derivation & drv, bool m
throw Error("Regular input-addressed derivations are not yet allowed to depend on CA derivations");
case DerivationType::CAFixed: {
std::map<std::string, Hash> outputHashes;
for (const auto & i : drv.outputsAndPaths(store)) {
auto & dof = std::get<DerivationOutputCAFixed>(i.second.first.output);
for (const auto & i : drv.outputs) {
auto & dof = std::get<DerivationOutputCAFixed>(i.second.output);
auto hash = hashString(htSHA256, "fixed:out:"
+ dof.hash.printMethodAlgo() + ":"
+ dof.hash.hash.to_string(Base16, false) + ":"
+ store.printStorePath(i.second.second));
+ store.printStorePath(dof.path(store, drv.name, i.first)));
outputHashes.insert_or_assign(i.first, std::move(hash));
}
return outputHashes;
@ -508,21 +535,13 @@ bool wantOutput(const string & output, const std::set<string> & wanted)
}
StorePathSet BasicDerivation::outputPaths(const Store & store) const
{
StorePathSet paths;
for (auto & i : outputsAndPaths(store))
paths.insert(i.second.second);
return paths;
}
static DerivationOutput readDerivationOutput(Source & in, const Store & store)
{
auto path = store.parseStorePath(readString(in));
const auto pathS = readString(in);
const auto hashAlgo = readString(in);
const auto hash = readString(in);
return parseDerivationOutput(store, std::move(path), hashAlgo, hash);
return parseDerivationOutput(store, pathS, hashAlgo, hash);
}
StringSet BasicDerivation::outputNames() const
@ -533,23 +552,12 @@ StringSet BasicDerivation::outputNames() const
return names;
}
DerivationOutputsAndPaths BasicDerivation::outputsAndPaths(const Store & store) const {
DerivationOutputsAndPaths outsAndPaths;
for (auto output : outputs)
outsAndPaths.insert(std::make_pair(
output.first,
std::make_pair(output.second, output.second.path(store, name))
)
);
return outsAndPaths;
}
DerivationOutputsAndOptPaths BasicDerivation::outputsAndOptPaths(const Store & store) const {
DerivationOutputsAndOptPaths outsAndOptPaths;
for (auto output : outputs)
outsAndOptPaths.insert(std::make_pair(
output.first,
std::make_pair(output.second, output.second.pathOpt(store, output.first))
std::make_pair(output.second, output.second.path(store, name, output.first))
)
);
return outsAndOptPaths;
@ -576,7 +584,7 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv,
drv.outputs.emplace(std::move(name), std::move(output));
}
drv.inputSrcs = readStorePaths<StorePathSet>(store, in);
drv.inputSrcs = worker_proto::read(store, in, Phantom<StorePathSet> {});
in >> drv.platform >> drv.builder;
drv.args = readStrings<Strings>(in);
@ -594,24 +602,27 @@ Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv,
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv)
{
out << drv.outputs.size();
for (auto & i : drv.outputsAndPaths(store)) {
out << i.first
<< store.printStorePath(i.second.second);
for (auto & i : drv.outputs) {
out << i.first;
std::visit(overloaded {
[&](DerivationOutputInputAddressed doi) {
out << "" << "";
out << store.printStorePath(doi.path)
<< ""
<< "";
},
[&](DerivationOutputCAFixed dof) {
out << dof.hash.printMethodAlgo()
out << store.printStorePath(dof.path(store, drv.name, i.first))
<< dof.hash.printMethodAlgo()
<< dof.hash.hash.to_string(Base16, false);
},
[&](DerivationOutputCAFloating dof) {
out << (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
out << ""
<< (makeFileIngestionPrefix(dof.method) + printHashType(dof.hashType))
<< "";
},
}, i.second.first.output);
}, i.second.output);
}
writeStorePaths(store, out, drv.inputSrcs);
worker_proto::write(store, out, drv.inputSrcs);
out << drv.platform << drv.builder << drv.args;
out << drv.env.size();
for (auto & i : drv.env)
@ -625,5 +636,65 @@ std::string hashPlaceholder(const std::string & outputName)
return "/" + hashString(htSHA256, "nix-output:" + outputName).to_string(Base32, false);
}
std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName)
{
auto drvNameWithExtension = drvPath.name();
auto drvName = drvNameWithExtension.substr(0, drvNameWithExtension.size() - 4);
auto clearText = "nix-upstream-output:" + std::string { drvPath.hashPart() } + ":" + outputPathName(drvName, outputName);
return "/" + hashString(htSHA256, clearText).to_string(Base32, false);
}
// N.B. Outputs are left unchanged
static void rewriteDerivation(Store & store, BasicDerivation & drv, const StringMap & rewrites) {
debug("Rewriting the derivation");
for (auto &rewrite: rewrites) {
debug("rewriting %s as %s", rewrite.first, rewrite.second);
}
drv.builder = rewriteStrings(drv.builder, rewrites);
for (auto & arg: drv.args) {
arg = rewriteStrings(arg, rewrites);
}
StringPairs newEnv;
for (auto & envVar: drv.env) {
auto envName = rewriteStrings(envVar.first, rewrites);
auto envValue = rewriteStrings(envVar.second, rewrites);
newEnv.emplace(envName, envValue);
}
drv.env = newEnv;
}
Sync<DrvPathResolutions> drvPathResolutions;
std::optional<BasicDerivation> Derivation::tryResolve(Store & store) {
BasicDerivation resolved { *this };
// Input paths that we'll want to rewrite in the derivation
StringMap inputRewrites;
for (auto & input : inputDrvs) {
auto inputDrvOutputs = store.queryPartialDerivationOutputMap(input.first);
StringSet newOutputNames;
for (auto & outputName : input.second) {
auto actualPathOpt = inputDrvOutputs.at(outputName);
if (!actualPathOpt)
return std::nullopt;
auto actualPath = *actualPathOpt;
inputRewrites.emplace(
downstreamPlaceholder(store, input.first, outputName),
store.printStorePath(actualPath));
resolved.inputSrcs.insert(std::move(actualPath));
}
}
rewriteDerivation(store, resolved, inputRewrites);
return resolved;
}
}

View file

@ -4,6 +4,7 @@
#include "types.hh"
#include "hash.hh"
#include "content-address.hh"
#include "sync.hh"
#include <map>
#include <variant>
@ -27,6 +28,7 @@ struct DerivationOutputInputAddressed
struct DerivationOutputCAFixed
{
FixedOutputHash hash; /* hash used for expected hash computation */
StorePath path(const Store & store, std::string_view drvName, std::string_view outputName) const;
};
/* Floating-output derivations, whose output paths are content addressed, but
@ -49,14 +51,8 @@ struct DerivationOutput
std::optional<HashType> hashAlgoOpt(const Store & store) const;
/* Note, when you use this function you should make sure that you're passing
the right derivation name. When in doubt, you should use the safer
interface provided by BasicDerivation::outputsAndPaths */
std::optional<StorePath> pathOpt(const Store & store, std::string_view drvName) const;
/* DEPRECATED: Remove after CA drvs are fully implemented */
StorePath path(const Store & store, std::string_view drvName) const {
auto p = pathOpt(store, drvName);
if (!p) throw UnimplementedError("floating content-addressed derivations are not yet implemented");
return *p;
}
interface provided by BasicDerivation::outputsAndOptPaths */
std::optional<StorePath> path(const Store & store, std::string_view drvName, std::string_view outputName) const;
};
typedef std::map<string, DerivationOutput> DerivationOutputs;
@ -65,8 +61,6 @@ typedef std::map<string, DerivationOutput> DerivationOutputs;
also contains, for each output, the (optional) store path in which it would
be written. To calculate values of these types, see the corresponding
functions in BasicDerivation */
typedef std::map<string, std::pair<DerivationOutput, StorePath>>
DerivationOutputsAndPaths;
typedef std::map<string, std::pair<DerivationOutput, std::optional<StorePath>>>
DerivationOutputsAndOptPaths;
@ -105,7 +99,7 @@ struct BasicDerivation
StringPairs env;
std::string name;
BasicDerivation() { }
BasicDerivation() = default;
virtual ~BasicDerivation() { };
bool isBuiltin() const;
@ -113,17 +107,12 @@ struct BasicDerivation
/* Return true iff this is a fixed-output derivation. */
DerivationType type() const;
/* Return the output paths of a derivation. */
StorePathSet outputPaths(const Store & store) const;
/* Return the output names of a derivation. */
StringSet outputNames() const;
/* Calculates the maps that contains all the DerivationOutputs, but
augmented with knowledge of the Store paths they would be written into.
The first one of these functions will be removed when the CA work is
completed */
DerivationOutputsAndPaths outputsAndPaths(const Store & store) const;
augmented with knowledge of the Store paths they would be written
into. */
DerivationOutputsAndOptPaths outputsAndOptPaths(const Store & store) const;
static std::string_view nameFromPath(const StorePath & storePath);
@ -137,7 +126,17 @@ struct Derivation : BasicDerivation
std::string unparse(const Store & store, bool maskOutputs,
std::map<std::string, StringSet> * actualInputs = nullptr) const;
Derivation() { }
/* Return the underlying basic derivation but with these changes:
1. Input drvs are emptied, but the outputs of them that were used are
added directly to input sources.
2. Input placeholders are replaced with realized input store paths. */
std::optional<BasicDerivation> tryResolve(Store & store);
Derivation() = default;
Derivation(const BasicDerivation & bd) : BasicDerivation(bd) { }
Derivation(BasicDerivation && bd) : BasicDerivation(std::move(bd)) { }
};
@ -147,7 +146,9 @@ enum RepairFlag : bool { NoRepair = false, Repair = true };
/* Write a derivation to the Nix store, and return its path. */
StorePath writeDerivation(Store & store,
const Derivation & drv, RepairFlag repair = NoRepair);
const Derivation & drv,
RepairFlag repair = NoRepair,
bool readOnly = false);
/* Read a derivation from a file. */
Derivation parseDerivation(const Store & store, std::string && s, std::string_view name);
@ -155,6 +156,13 @@ Derivation parseDerivation(const Store & store, std::string && s, std::string_vi
// FIXME: remove
bool isDerivation(const string & fileName);
/* Calculate the name that will be used for the store path for this
output.
This is usually <drv-name>-<output-name>, but is just <drv-name> when
the output name is "out". */
std::string outputPathName(std::string_view drvName, std::string_view outputName);
// known CA drv's output hashes, current just for fixed-output derivations
// whose output hashes are always known since they are fixed up-front.
typedef std::map<std::string, Hash> CaOutputHashes;
@ -194,6 +202,16 @@ typedef std::map<StorePath, DrvHashModulo> DrvHashes;
extern DrvHashes drvHashes; // FIXME: global, not thread-safe
/* Memoisation of `readDerivation(..).resove()`. */
typedef std::map<
StorePath,
std::optional<StorePath>
> DrvPathResolutions;
// FIXME: global, though at least thread-safe.
// FIXME: arguably overlaps with hashDerivationModulo memo table.
extern Sync<DrvPathResolutions> drvPathResolutions;
bool wantOutput(const string & output, const std::set<string> & wanted);
struct Source;
@ -202,6 +220,21 @@ struct Sink;
Source & readDerivation(Source & in, const Store & store, BasicDerivation & drv, std::string_view name);
void writeDerivation(Sink & out, const Store & store, const BasicDerivation & drv);
/* This creates an opaque and almost certainly unique string
deterministically from the output name.
It is used as a placeholder to allow derivations to refer to their
own outputs without needing to use the hash of a derivation in
itself, making the hash near-impossible to calculate. */
std::string hashPlaceholder(const std::string & outputName);
/* This creates an opaque and almost certainly unique string
deterministically from a derivation path and output name.
It is used as a placeholder to allow derivations to refer to
content-addressed paths whose content --- and thus the path
themselves --- isn't yet known. This occurs when a derivation has a
dependency which is a CA derivation. */
std::string downstreamPlaceholder(const Store & store, const StorePath & drvPath, std::string_view outputName);
}

View file

@ -1,18 +1,28 @@
#include "store-api.hh"
#include "callback.hh"
namespace nix {
static std::string uriScheme = "dummy://";
struct DummyStoreConfig : virtual StoreConfig {
using StoreConfig::StoreConfig;
struct DummyStore : public Store
const std::string name() override { return "Dummy Store"; }
};
struct DummyStore : public Store, public virtual DummyStoreConfig
{
DummyStore(const std::string scheme, const std::string uri, const Params & params)
: DummyStore(params)
{ }
DummyStore(const Params & params)
: Store(params)
: StoreConfig(params)
, Store(params)
{ }
string getUri() override
{
return uriScheme;
return *uriSchemes().begin();
}
void queryPathInfoUncached(const StorePath & path,
@ -21,6 +31,10 @@ struct DummyStore : public Store
callback(nullptr);
}
static std::set<std::string> uriSchemes() {
return {"dummy"};
}
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
{ unsupported("queryPathFromHashPart"); }
@ -48,12 +62,6 @@ struct DummyStore : public Store
{ unsupported("buildDerivation"); }
};
static RegisterStoreImplementation regStore([](
const std::string & uri, const Store::Params & params)
-> std::shared_ptr<Store>
{
if (uri != uriScheme) return nullptr;
return std::make_shared<DummyStore>(params);
});
static RegisterStoreImplementation<DummyStore, DummyStoreConfig> regDummyStore;
}

View file

@ -45,7 +45,7 @@ void Store::exportPath(const StorePath & path, Sink & sink)
teeSink
<< exportMagic
<< printStorePath(path);
writeStorePaths(*this, teeSink, info->references);
worker_proto::write(*this, teeSink, info->references);
teeSink
<< (info->deriver ? printStorePath(*info->deriver) : "")
<< 0;
@ -73,7 +73,7 @@ StorePaths Store::importPaths(Source & source, CheckSigsFlag checkSigs)
//Activity act(*logger, lvlInfo, format("importing path '%s'") % info.path);
auto references = readStorePaths<StorePathSet>(*this, source);
auto references = worker_proto::read(*this, source, Phantom<StorePathSet> {});
auto deriver = readString(source);
auto narHash = hashString(htSHA256, *saved.s);

View file

@ -5,6 +5,7 @@
#include "s3.hh"
#include "compression.hh"
#include "finally.hh"
#include "callback.hh"
#ifdef ENABLE_S3
#include <aws/core/client/ClientConfiguration.h>
@ -30,7 +31,7 @@ namespace nix {
FileTransferSettings fileTransferSettings;
static GlobalConfig::Register r1(&fileTransferSettings);
static GlobalConfig::Register rFileTransferSettings(&fileTransferSettings);
std::string resolveUri(const std::string & uri)
{
@ -112,6 +113,9 @@ struct curlFileTransfer : public FileTransfer
requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + request.expectedETag).c_str());
if (!request.mimeType.empty())
requestHeaders = curl_slist_append(requestHeaders, ("Content-Type: " + request.mimeType).c_str());
for (auto it = request.headers.begin(); it != request.headers.end(); ++it){
requestHeaders = curl_slist_append(requestHeaders, fmt("%s: %s", it->first, it->second).c_str());
}
}
~TransferItem()

View file

@ -51,6 +51,7 @@ extern FileTransferSettings fileTransferSettings;
struct FileTransferRequest
{
std::string uri;
Headers headers;
std::string expectedETag;
bool verifyTLS = true;
bool head = false;

View file

@ -574,9 +574,12 @@ bool LocalStore::canReachRoot(GCState & state, StorePathSet & visited, const Sto
/* If keep-derivations is set and this is a derivation, then
don't delete the derivation if any of the outputs are alive. */
if (state.gcKeepDerivations && path.isDerivation()) {
for (auto & i : queryDerivationOutputs(path))
if (isValidPath(i) && queryPathInfo(i)->deriver == path)
incoming.insert(i);
for (auto & [name, maybeOutPath] : queryPartialDerivationOutputMap(path))
if (maybeOutPath &&
isValidPath(*maybeOutPath) &&
queryPathInfo(*maybeOutPath)->deriver == path
)
incoming.insert(*maybeOutPath);
}
/* If keep-outputs is set, then don't delete this path if there
@ -660,9 +663,7 @@ void LocalStore::removeUnusedLinks(const GCState & state)
if (name == "." || name == "..") continue;
Path path = linksDir + "/" + name;
struct stat st;
if (lstat(path.c_str(), &st) == -1)
throw SysError("statting '%1%'", path);
auto st = lstat(path);
if (st.st_nlink != 1) {
actualSize += st.st_size;

View file

@ -2,6 +2,7 @@
#include "util.hh"
#include "archive.hh"
#include "args.hh"
#include "abstract-setting-to-json.hh"
#include <algorithm>
#include <map>
@ -25,7 +26,7 @@ namespace nix {
Settings settings;
static GlobalConfig::Register r1(&settings);
static GlobalConfig::Register rSettings(&settings);
Settings::Settings()
: nixPrefix(NIX_PREFIX)
@ -42,6 +43,7 @@ Settings::Settings()
{
buildUsersGroup = getuid() == 0 ? "nixbld" : "";
lockCPU = getEnv("NIX_AFFINITY_HACK") == "1";
allowSymlinkedStore = getEnv("NIX_IGNORE_SYMLINK_STORE") == "1";
caFile = getEnv("NIX_SSL_CERT_FILE").value_or(getEnv("SSL_CERT_FILE").value_or(""));
if (caFile == "") {
@ -147,6 +149,12 @@ bool Settings::isWSL1()
const string nixVersion = PACKAGE_VERSION;
NLOHMANN_JSON_SERIALIZE_ENUM(SandboxMode, {
{SandboxMode::smEnabled, true},
{SandboxMode::smRelaxed, "relaxed"},
{SandboxMode::smDisabled, false},
});
template<> void BaseSetting<SandboxMode>::set(const std::string & str)
{
if (str == "true") value = smEnabled;
@ -163,11 +171,6 @@ template<> std::string BaseSetting<SandboxMode>::to_string() const
else abort();
}
template<> nlohmann::json BaseSetting<SandboxMode>::toJSON()
{
return AbstractSetting::toJSON();
}
template<> void BaseSetting<SandboxMode>::convertToArg(Args & args, const std::string & category)
{
args.addFlag({

View file

@ -859,8 +859,54 @@ public:
are loaded as plugins (non-recursively).
)"};
Setting<std::string> githubAccessToken{this, "", "github-access-token",
"GitHub access token to get access to GitHub data through the GitHub API for `github:<..>` flakes."};
Setting<StringMap> accessTokens{this, {}, "access-tokens",
R"(
Access tokens used to access protected GitHub, GitLab, or
other locations requiring token-based authentication.
Access tokens are specified as a string made up of
space-separated `host=token` values. The specific token
used is selected by matching the `host` portion against the
"host" specification of the input. The actual use of the
`token` value is determined by the type of resource being
accessed:
* Github: the token value is the OAUTH-TOKEN string obtained
as the Personal Access Token from the Github server (see
https://docs.github.com/en/developers/apps/authorizing-oath-apps).
* Gitlab: the token value is either the OAuth2 token or the
Personal Access Token (these are different types tokens
for gitlab, see
https://docs.gitlab.com/12.10/ee/api/README.html#authentication).
The `token` value should be `type:tokenstring` where
`type` is either `OAuth2` or `PAT` to indicate which type
of token is being specified.
Example `~/.config/nix/nix.conf`:
```
access-tokens = "github.com=23ac...b289 gitlab.mycompany.com=PAT:A123Bp_Cd..EfG gitlab.com=OAuth2:1jklw3jk"
```
Example `~/code/flake.nix`:
```nix
input.foo = {
type = "gitlab";
host = "gitlab.mycompany.com";
owner = "mycompany";
repo = "pro";
};
```
This example specifies three tokens, one each for accessing
github.com, gitlab.mycompany.com, and sourceforge.net.
The `input.foo` uses the "gitlab" fetcher, which might
requires specifying the token type along with the token
value.
)"};
Setting<Strings> experimentalFeatures{this, {}, "experimental-features",
"Experimental Nix features to enable."};
@ -880,6 +926,19 @@ public:
Setting<std::string> flakeRegistry{this, "https://github.com/NixOS/flake-registry/raw/master/flake-registry.json", "flake-registry",
"Path or URI of the global flake registry."};
Setting<bool> allowSymlinkedStore{
this, false, "allow-symlinked-store",
R"(
If set to `true`, Nix will stop complaining if the store directory
(typically /nix/store) contains symlink components.
This risks making some builds "impure" because builders sometimes
"canonicalise" paths by resolving all symlink components. Problems
occur if those builds are then deployed to machines where /nix/store
resolves to a different location from that of the build machine. You
can enable this setting if you are sure you're not going to do that.
)"};
};

View file

@ -2,12 +2,20 @@
#include "filetransfer.hh"
#include "globals.hh"
#include "nar-info-disk-cache.hh"
#include "callback.hh"
namespace nix {
MakeError(UploadToHTTP, Error);
class HttpBinaryCacheStore : public BinaryCacheStore
struct HttpBinaryCacheStoreConfig : virtual BinaryCacheStoreConfig
{
using BinaryCacheStoreConfig::BinaryCacheStoreConfig;
const std::string name() override { return "Http Binary Cache Store"; }
};
class HttpBinaryCacheStore : public BinaryCacheStore, public HttpBinaryCacheStoreConfig
{
private:
@ -24,9 +32,12 @@ private:
public:
HttpBinaryCacheStore(
const Params & params, const Path & _cacheUri)
: BinaryCacheStore(params)
, cacheUri(_cacheUri)
const std::string & scheme,
const Path & _cacheUri,
const Params & params)
: StoreConfig(params)
, BinaryCacheStore(params)
, cacheUri(scheme + "://" + _cacheUri)
{
if (cacheUri.back() == '/')
cacheUri.pop_back();
@ -55,6 +66,14 @@ public:
}
}
static std::set<std::string> uriSchemes()
{
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1";
auto ret = std::set<std::string>({"http", "https"});
if (forceHttp) ret.insert("file");
return ret;
}
protected:
void maybeDisable()
@ -162,18 +181,6 @@ protected:
};
static RegisterStoreImplementation regStore([](
const std::string & uri, const Store::Params & params)
-> std::shared_ptr<Store>
{
static bool forceHttp = getEnv("_NIX_FORCE_HTTP") == "1";
if (std::string(uri, 0, 7) != "http://" &&
std::string(uri, 0, 8) != "https://" &&
(!forceHttp || std::string(uri, 0, 7) != "file://"))
return 0;
auto store = std::make_shared<HttpBinaryCacheStore>(params, uri);
store->init();
return store;
});
static RegisterStoreImplementation<HttpBinaryCacheStore, HttpBinaryCacheStoreConfig> regHttpBinaryCacheStore;
}

View file

@ -6,21 +6,28 @@
#include "worker-protocol.hh"
#include "ssh.hh"
#include "derivations.hh"
#include "callback.hh"
namespace nix {
static std::string uriScheme = "ssh://";
struct LegacySSHStore : public Store
struct LegacySSHStoreConfig : virtual StoreConfig
{
const Setting<int> maxConnections{this, 1, "max-connections", "maximum number of concurrent SSH connections"};
const Setting<Path> sshKey{this, "", "ssh-key", "path to an SSH private key"};
const Setting<bool> compress{this, false, "compress", "whether to compress the connection"};
const Setting<Path> remoteProgram{this, "nix-store", "remote-program", "path to the nix-store executable on the remote system"};
const Setting<std::string> remoteStore{this, "", "remote-store", "URI of the store on the remote system"};
using StoreConfig::StoreConfig;
const Setting<int> maxConnections{(StoreConfig*) this, 1, "max-connections", "maximum number of concurrent SSH connections"};
const Setting<Path> sshKey{(StoreConfig*) this, "", "ssh-key", "path to an SSH private key"};
const Setting<bool> compress{(StoreConfig*) this, false, "compress", "whether to compress the connection"};
const Setting<Path> remoteProgram{(StoreConfig*) this, "nix-store", "remote-program", "path to the nix-store executable on the remote system"};
const Setting<std::string> remoteStore{(StoreConfig*) this, "", "remote-store", "URI of the store on the remote system"};
const std::string name() override { return "Legacy SSH Store"; }
};
struct LegacySSHStore : public Store, public virtual LegacySSHStoreConfig
{
// Hack for getting remote build log output.
const Setting<int> logFD{this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"};
// Intentionally not in `LegacySSHStoreConfig` so that it doesn't appear in
// the documentation
const Setting<int> logFD{(StoreConfig*) this, -1, "log-fd", "file descriptor to which SSH's stderr is connected"};
struct Connection
{
@ -37,8 +44,11 @@ struct LegacySSHStore : public Store
SSHMaster master;
LegacySSHStore(const string & host, const Params & params)
: Store(params)
static std::set<std::string> uriSchemes() { return {"ssh"}; }
LegacySSHStore(const string & scheme, const string & host, const Params & params)
: StoreConfig(params)
, Store(params)
, host(host)
, connections(make_ref<Pool<Connection>>(
std::max(1, (int) maxConnections),
@ -84,7 +94,7 @@ struct LegacySSHStore : public Store
string getUri() override
{
return uriScheme + host;
return *uriSchemes().begin() + "://" + host;
}
void queryPathInfoUncached(const StorePath & path,
@ -112,7 +122,7 @@ struct LegacySSHStore : public Store
auto deriver = readString(conn->from);
if (deriver != "")
info->deriver = parseStorePath(deriver);
info->references = readStorePaths<StorePathSet>(*this, conn->from);
info->references = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
readLongLong(conn->from); // download size
info->narSize = readLongLong(conn->from);
@ -146,7 +156,7 @@ struct LegacySSHStore : public Store
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references);
worker_proto::write(*this, conn->to, info.references);
conn->to
<< info.registrationTime
<< info.narSize
@ -175,7 +185,7 @@ struct LegacySSHStore : public Store
conn->to
<< exportMagic
<< printStorePath(info.path);
writeStorePaths(*this, conn->to, info.references);
worker_proto::write(*this, conn->to, info.references);
conn->to
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< 0
@ -291,10 +301,10 @@ public:
conn->to
<< cmdQueryClosure
<< includeOutputs;
writeStorePaths(*this, conn->to, paths);
worker_proto::write(*this, conn->to, paths);
conn->to.flush();
for (auto & i : readStorePaths<StorePathSet>(*this, conn->from))
for (auto & i : worker_proto::read(*this, conn->from, Phantom<StorePathSet> {}))
out.insert(i);
}
@ -307,10 +317,10 @@ public:
<< cmdQueryValidPaths
<< false // lock
<< maybeSubstitute;
writeStorePaths(*this, conn->to, paths);
worker_proto::write(*this, conn->to, paths);
conn->to.flush();
return readStorePaths<StorePathSet>(*this, conn->from);
return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
}
void connect() override
@ -325,12 +335,6 @@ public:
}
};
static RegisterStoreImplementation regStore([](
const std::string & uri, const Store::Params & params)
-> std::shared_ptr<Store>
{
if (std::string(uri, 0, uriScheme.size()) != uriScheme) return 0;
return std::make_shared<LegacySSHStore>(std::string(uri, uriScheme.size()), params);
});
static RegisterStoreImplementation<LegacySSHStore, LegacySSHStoreConfig> regLegacySSHStore;
}

View file

@ -4,7 +4,14 @@
namespace nix {
class LocalBinaryCacheStore : public BinaryCacheStore
struct LocalBinaryCacheStoreConfig : virtual BinaryCacheStoreConfig
{
using BinaryCacheStoreConfig::BinaryCacheStoreConfig;
const std::string name() override { return "Local Binary Cache Store"; }
};
class LocalBinaryCacheStore : public BinaryCacheStore, public virtual LocalBinaryCacheStoreConfig
{
private:
@ -13,8 +20,11 @@ private:
public:
LocalBinaryCacheStore(
const Params & params, const Path & binaryCacheDir)
: BinaryCacheStore(params)
const std::string scheme,
const Path & binaryCacheDir,
const Params & params)
: StoreConfig(params)
, BinaryCacheStore(params)
, binaryCacheDir(binaryCacheDir)
{
}
@ -26,6 +36,8 @@ public:
return "file://" + binaryCacheDir;
}
static std::set<std::string> uriSchemes();
protected:
bool fileExists(const std::string & path) override;
@ -85,16 +97,14 @@ bool LocalBinaryCacheStore::fileExists(const std::string & path)
return pathExists(binaryCacheDir + "/" + path);
}
static RegisterStoreImplementation regStore([](
const std::string & uri, const Store::Params & params)
-> std::shared_ptr<Store>
std::set<std::string> LocalBinaryCacheStore::uriSchemes()
{
if (getEnv("_NIX_FORCE_HTTP_BINARY_CACHE_STORE") == "1" ||
std::string(uri, 0, 7) != "file://")
return 0;
auto store = std::make_shared<LocalBinaryCacheStore>(params, std::string(uri, 7));
store->init();
return store;
});
if (getEnv("_NIX_FORCE_HTTP_BINARY_CACHE_STORE") == "1")
return {};
else
return {"file"};
}
static RegisterStoreImplementation<LocalBinaryCacheStore, LocalBinaryCacheStoreConfig> regLocalBinaryCacheStore;
}

View file

@ -6,6 +6,7 @@
#include "derivations.hh"
#include "nar-info.hh"
#include "references.hh"
#include "callback.hh"
#include <iostream>
#include <algorithm>
@ -42,7 +43,8 @@ namespace nix {
LocalStore::LocalStore(const Params & params)
: Store(params)
: StoreConfig(params)
, Store(params)
, LocalFSStore(params)
, realStoreDir_{this, false, rootDir != "" ? rootDir + "/nix/store" : storeDir, "real",
"physical path to the Nix store"}
@ -108,12 +110,11 @@ LocalStore::LocalStore(const Params & params)
}
/* Ensure that the store and its parents are not symlinks. */
if (getEnv("NIX_IGNORE_SYMLINK_STORE") != "1") {
if (!settings.allowSymlinkedStore) {
Path path = realStoreDir;
struct stat st;
while (path != "/") {
if (lstat(path.c_str(), &st))
throw SysError("getting status of '%1%'", path);
st = lstat(path);
if (S_ISLNK(st.st_mode))
throw Error(
"the path '%1%' is a symlink; "
@ -417,10 +418,7 @@ static void canonicaliseTimestampAndPermissions(const Path & path, const struct
void canonicaliseTimestampAndPermissions(const Path & path)
{
struct stat st;
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
canonicaliseTimestampAndPermissions(path, st);
canonicaliseTimestampAndPermissions(path, lstat(path));
}
@ -438,9 +436,7 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe
}
#endif
struct stat st;
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
auto st = lstat(path);
/* Really make sure that the path is of a supported type. */
if (!(S_ISREG(st.st_mode) || S_ISDIR(st.st_mode) || S_ISLNK(st.st_mode)))
@ -476,8 +472,7 @@ static void canonicalisePathMetaData_(const Path & path, uid_t fromUid, InodesSe
ensure that we don't fail on hard links within the same build
(i.e. "touch $out/foo; ln $out/foo $out/bar"). */
if (fromUid != (uid_t) -1 && st.st_uid != fromUid) {
assert(!S_ISDIR(st.st_mode));
if (inodesSeen.find(Inode(st.st_dev, st.st_ino)) == inodesSeen.end())
if (S_ISDIR(st.st_mode) || !inodesSeen.count(Inode(st.st_dev, st.st_ino)))
throw BuildError("invalid ownership on file '%1%'", path);
mode_t mode = st.st_mode & ~S_IFMT;
assert(S_ISLNK(st.st_mode) || (st.st_uid == geteuid() && (mode == 0444 || mode == 0555) && st.st_mtime == mtimeStore));
@ -520,9 +515,7 @@ void canonicalisePathMetaData(const Path & path, uid_t fromUid, InodesSeen & ino
/* On platforms that don't have lchown(), the top-level path can't
be a symlink, since we can't change its ownership. */
struct stat st;
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
auto st = lstat(path);
if (st.st_uid != geteuid()) {
assert(S_ISLNK(st.st_mode));
@ -578,13 +571,32 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
envHasRightPath(path, i.first);
},
[&](DerivationOutputCAFloating _) {
throw UnimplementedError("floating CA output derivations are not yet implemented");
/* Nothing to check */
},
}, i.second.output);
}
}
void LocalStore::linkDeriverToPath(const StorePath & deriver, const string & outputName, const StorePath & output)
{
auto state(_state.lock());
return linkDeriverToPath(*state, queryValidPathId(*state, deriver), outputName, output);
}
void LocalStore::linkDeriverToPath(State & state, uint64_t deriver, const string & outputName, const StorePath & output)
{
retrySQLite<void>([&]() {
state.stmtAddDerivationOutput.use()
(deriver)
(outputName)
(printStorePath(output))
.exec();
});
}
uint64_t LocalStore::addValidPath(State & state,
const ValidPathInfo & info, bool checkOutputs)
{
@ -618,12 +630,11 @@ uint64_t LocalStore::addValidPath(State & state,
registration above is undone. */
if (checkOutputs) checkDerivationOutputs(info.path, drv);
for (auto & i : drv.outputsAndPaths(*this)) {
state.stmtAddDerivationOutput.use()
(id)
(i.first)
(printStorePath(i.second.second))
.exec();
for (auto & i : drv.outputsAndOptPaths(*this)) {
/* Floating CA derivations have indeterminate output paths until
they are built, so don't register anything in that case */
if (i.second.second)
linkDeriverToPath(state, id, i.first, *i.second.second);
}
}
@ -710,7 +721,7 @@ uint64_t LocalStore::queryValidPathId(State & state, const StorePath & path)
{
auto use(state.stmtQueryPathInfo.use()(printStorePath(path)));
if (!use.next())
throw Error("path '%s' is not valid", printStorePath(path));
throw InvalidPath("path '%s' is not valid", printStorePath(path));
return use.getInt(0);
}
@ -785,18 +796,58 @@ StorePathSet LocalStore::queryValidDerivers(const StorePath & path)
}
std::map<std::string, std::optional<StorePath>> LocalStore::queryPartialDerivationOutputMap(const StorePath & path)
std::map<std::string, std::optional<StorePath>> LocalStore::queryPartialDerivationOutputMap(const StorePath & path_)
{
auto path = path_;
std::map<std::string, std::optional<StorePath>> outputs;
BasicDerivation drv = readDerivation(path);
Derivation drv = readDerivation(path);
for (auto & [outName, _] : drv.outputs) {
outputs.insert_or_assign(outName, std::nullopt);
}
bool haveCached = false;
{
auto resolutions = drvPathResolutions.lock();
auto resolvedPathOptIter = resolutions->find(path);
if (resolvedPathOptIter != resolutions->end()) {
auto & [_, resolvedPathOpt] = *resolvedPathOptIter;
if (resolvedPathOpt)
path = *resolvedPathOpt;
haveCached = true;
}
}
/* can't just use else-if instead of `!haveCached` because we need to unlock
`drvPathResolutions` before it is locked in `Derivation::resolve`. */
if (!haveCached && drv.type() == DerivationType::CAFloating) {
/* Try resolve drv and use that path instead. */
auto attempt = drv.tryResolve(*this);
if (!attempt)
/* If we cannot resolve the derivation, we cannot have any path
assigned so we return the map of all std::nullopts. */
return outputs;
/* Just compute store path */
auto pathResolved = writeDerivation(*this, *std::move(attempt), NoRepair, true);
/* Store in memo table. */
/* FIXME: memo logic should not be local-store specific, should have
wrapper-method instead. */
drvPathResolutions.lock()->insert_or_assign(path, pathResolved);
path = std::move(pathResolved);
}
return retrySQLite<std::map<std::string, std::optional<StorePath>>>([&]() {
auto state(_state.lock());
auto useQueryDerivationOutputs(state->stmtQueryDerivationOutputs.use()
(queryValidPathId(*state, path)));
uint64_t drvId;
try {
drvId = queryValidPathId(*state, path);
} catch (InvalidPath &) {
/* FIXME? if the derivation doesn't exist, we cannot have a mapping
for it. */
return outputs;
}
auto useQueryDerivationOutputs {
state->stmtQueryDerivationOutputs.use()
(drvId)
};
while (useQueryDerivationOutputs.next())
outputs.insert_or_assign(
@ -1435,7 +1486,7 @@ static void makeMutable(const Path & path)
{
checkInterrupt();
struct stat st = lstat(path);
auto st = lstat(path);
if (!S_ISDIR(st.st_mode) && !S_ISREG(st.st_mode)) return;

View file

@ -30,8 +30,19 @@ struct OptimiseStats
uint64_t blocksFreed = 0;
};
struct LocalStoreConfig : virtual LocalFSStoreConfig
{
using LocalFSStoreConfig::LocalFSStoreConfig;
class LocalStore : public LocalFSStore
Setting<bool> requireSigs{(StoreConfig*) this,
settings.requireSigs,
"require-sigs", "whether store paths should have a trusted signature on import"};
const std::string name() override { return "Local Store"; }
};
class LocalStore : public LocalFSStore, public virtual LocalStoreConfig
{
private:
@ -95,10 +106,6 @@ public:
private:
Setting<bool> requireSigs{(Store*) this,
settings.requireSigs,
"require-sigs", "whether store paths should have a trusted signature on import"};
const PublicKeys & getPublicKeys();
public:
@ -279,6 +286,11 @@ private:
specified by the secret-key-files option. */
void signPathInfo(ValidPathInfo & info);
/* Register the store path 'output' as the output named 'outputName' of
derivation 'deriver'. */
void linkDeriverToPath(const StorePath & deriver, const string & outputName, const StorePath & output);
void linkDeriverToPath(State & state, uint64_t deriver, const string & outputName, const StorePath & output);
Path getRealStoreDir() override { return realStoreDir; }
void createUser(const std::string & userName, uid_t userId) override;

View file

@ -5,7 +5,7 @@
#include "store-api.hh"
#include "thread-pool.hh"
#include "topo-sort.hh"
#include "callback.hh"
namespace nix {
@ -203,17 +203,24 @@ void Store::queryMissing(const std::vector<StorePathWithOutputs> & targets,
return;
}
PathSet invalid;
/* true for regular derivations, and CA derivations for which we
have a trust mapping for all wanted outputs. */
auto knownOutputPaths = true;
for (auto & [outputName, pathOpt] : queryPartialDerivationOutputMap(path.path)) {
if (!pathOpt) {
knownOutputPaths = false;
break;
}
if (wantOutput(outputName, path.outputs) && !isValidPath(*pathOpt))
invalid.insert(printStorePath(*pathOpt));
}
if (knownOutputPaths && invalid.empty()) return;
auto drv = make_ref<Derivation>(derivationFromPath(path.path));
ParsedDerivation parsedDrv(StorePath(path.path), *drv);
PathSet invalid;
for (auto & j : drv->outputsAndPaths(*this))
if (wantOutput(j.first, path.outputs)
&& !isValidPath(j.second.second))
invalid.insert(printStorePath(j.second.second));
if (invalid.empty()) return;
if (settings.useSubstitutes && parsedDrv.substitutesAllowed()) {
if (knownOutputPaths && settings.useSubstitutes && parsedDrv.substitutesAllowed()) {
auto drvState = make_ref<Sync<DrvState>>(DrvState(invalid.size()));
for (auto & output : invalid)
pool.enqueue(std::bind(checkOutput, printStorePath(path.path), drv, output, drvState));

View file

@ -1,10 +1,18 @@
#include "names.hh"
#include "util.hh"
#include <regex>
namespace nix {
struct Regex
{
std::regex regex;
};
DrvName::DrvName()
{
name = "";
@ -30,11 +38,18 @@ DrvName::DrvName(std::string_view s) : hits(0)
}
DrvName::~DrvName()
{ }
bool DrvName::matches(DrvName & n)
{
if (name != "*") {
if (!regex) regex = std::unique_ptr<std::regex>(new std::regex(name, std::regex::extended));
if (!std::regex_match(n.name, *regex)) return false;
if (!regex) {
regex = std::make_unique<Regex>();
regex->regex = std::regex(name, std::regex::extended);
}
if (!std::regex_match(n.name, regex->regex)) return false;
}
if (version != "" && version != n.version) return false;
return true;
@ -99,7 +114,7 @@ DrvNames drvNamesFromArgs(const Strings & opArgs)
{
DrvNames result;
for (auto & i : opArgs)
result.push_back(DrvName(i));
result.emplace_back(i);
return result;
}

View file

@ -3,10 +3,11 @@
#include <memory>
#include "types.hh"
#include <regex>
namespace nix {
struct Regex;
struct DrvName
{
string fullName;
@ -16,10 +17,12 @@ struct DrvName
DrvName();
DrvName(std::string_view s);
~DrvName();
bool matches(DrvName & n);
private:
std::unique_ptr<std::regex> regex;
std::unique_ptr<Regex> regex;
};
typedef list<DrvName> DrvNames;

View file

@ -17,9 +17,7 @@ namespace nix {
static void makeWritable(const Path & path)
{
struct stat st;
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
auto st = lstat(path);
if (chmod(path.c_str(), st.st_mode | S_IWUSR) == -1)
throw SysError("changing writability of '%1%'", path);
}
@ -94,9 +92,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
{
checkInterrupt();
struct stat st;
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
auto st = lstat(path);
#if __APPLE__
/* HFS/macOS has some undocumented security feature disabling hardlinking for
@ -187,9 +183,7 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
/* Yes! We've seen a file with the same contents. Replace the
current file with a hard link to that file. */
struct stat stLink;
if (lstat(linkPath.c_str(), &stLink))
throw SysError("getting attributes of path '%1%'", linkPath);
auto stLink = lstat(linkPath);
if (st.st_ino == stLink.st_ino) {
debug(format("'%1%' is already linked to '%2%'") % path % linkPath);
@ -282,21 +276,15 @@ void LocalStore::optimiseStore(OptimiseStats & stats)
}
}
static string showBytes(uint64_t bytes)
{
return (format("%.2f MiB") % (bytes / (1024.0 * 1024.0))).str();
}
void LocalStore::optimiseStore()
{
OptimiseStats stats;
optimiseStore(stats);
printInfo(
format("%1% freed by hard-linking %2% files")
% showBytes(stats.bytesFreed)
% stats.filesLinked);
printInfo("%s freed by hard-linking %d files",
showBytes(stats.bytesFreed),
stats.filesLinked);
}
void LocalStore::optimisePath(const Path & path)

View file

@ -1,3 +1,5 @@
#pragma once
#include "store-api.hh"
#include <nlohmann/json_fwd.hpp>

View file

@ -39,13 +39,10 @@ std::pair<Generations, std::optional<GenerationNumber>> findGenerations(Path pro
for (auto & i : readDirectory(profileDir)) {
if (auto n = parseName(profileName, i.name)) {
auto path = profileDir + "/" + i.name;
struct stat st;
if (lstat(path.c_str(), &st) != 0)
throw SysError("statting '%1%'", path);
gens.push_back({
.number = *n,
.path = path,
.creationTime = st.st_mtime
.creationTime = lstat(path).st_mtime
});
}
}

View file

@ -79,9 +79,17 @@ void RefScanSink::operator () (const unsigned char * data, size_t len)
std::pair<PathSet, HashResult> scanForReferences(const string & path,
const PathSet & refs)
{
RefScanSink refsSink;
HashSink hashSink { htSHA256 };
TeeSink sink { refsSink, hashSink };
auto found = scanForReferences(hashSink, path, refs);
auto hash = hashSink.finish();
return std::pair<PathSet, HashResult>(found, hash);
}
PathSet scanForReferences(Sink & toTee,
const string & path, const PathSet & refs)
{
RefScanSink refsSink;
TeeSink sink { refsSink, toTee };
std::map<string, Path> backMap;
/* For efficiency (and a higher hit rate), just search for the
@ -111,9 +119,7 @@ std::pair<PathSet, HashResult> scanForReferences(const string & path,
found.insert(j->second);
}
auto hash = hashSink.finish();
return std::pair<PathSet, HashResult>(found, hash);
return found;
}

View file

@ -7,6 +7,8 @@ namespace nix {
std::pair<PathSet, HashResult> scanForReferences(const Path & path, const PathSet & refs);
PathSet scanForReferences(Sink & toTee, const Path & path, const PathSet & refs);
struct RewritingSink : Sink
{
std::string from, to, prev;

View file

@ -1,5 +1,6 @@
#include "serialise.hh"
#include "util.hh"
#include "remote-fs-accessor.hh"
#include "remote-store.hh"
#include "worker-protocol.hh"
#include "archive.hh"
@ -9,6 +10,7 @@
#include "pool.hh"
#include "finally.hh"
#include "logging.hh"
#include "callback.hh"
#include <sys/types.h>
#include <sys/stat.h>
@ -22,44 +24,19 @@
namespace nix {
template<> StorePathSet readStorePaths(const Store & store, Source & from)
{
StorePathSet paths;
for (auto & i : readStrings<Strings>(from))
paths.insert(store.parseStorePath(i));
return paths;
}
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths)
{
out << paths.size();
for (auto & i : paths)
out << store.printStorePath(i);
}
StorePathCAMap readStorePathCAMap(const Store & store, Source & from)
{
StorePathCAMap paths;
auto count = readNum<size_t>(from);
while (count--)
paths.insert_or_assign(store.parseStorePath(readString(from)), parseContentAddressOpt(readString(from)));
return paths;
}
void writeStorePathCAMap(const Store & store, Sink & out, const StorePathCAMap & paths)
{
out << paths.size();
for (auto & i : paths) {
out << store.printStorePath(i.first);
out << renderContentAddress(i.second);
}
}
namespace worker_proto {
std::string read(const Store & store, Source & from, Phantom<std::string> _)
{
return readString(from);
}
void write(const Store & store, Sink & out, const std::string & str)
{
out << str;
}
StorePath read(const Store & store, Source & from, Phantom<StorePath> _)
{
return store.parseStorePath(readString(from));
@ -71,28 +48,58 @@ void write(const Store & store, Sink & out, const StorePath & storePath)
}
template<>
ContentAddress read(const Store & store, Source & from, Phantom<ContentAddress> _)
{
return parseContentAddress(readString(from));
}
void write(const Store & store, Sink & out, const ContentAddress & ca)
{
out << renderContentAddress(ca);
}
std::optional<StorePath> read(const Store & store, Source & from, Phantom<std::optional<StorePath>> _)
{
auto s = readString(from);
return s == "" ? std::optional<StorePath> {} : store.parseStorePath(s);
}
template<>
void write(const Store & store, Sink & out, const std::optional<StorePath> & storePathOpt)
{
out << (storePathOpt ? store.printStorePath(*storePathOpt) : "");
}
std::optional<ContentAddress> read(const Store & store, Source & from, Phantom<std::optional<ContentAddress>> _)
{
return parseContentAddressOpt(readString(from));
}
void write(const Store & store, Sink & out, const std::optional<ContentAddress> & caOpt)
{
out << (caOpt ? renderContentAddress(*caOpt) : "");
}
}
/* TODO: Separate these store impls into different files, give them better names */
RemoteStore::RemoteStore(const Params & params)
: Store(params)
, RemoteStoreConfig(params)
, connections(make_ref<Pool<Connection>>(
std::max(1, (int) maxConnections),
[this]() { return openConnectionWrapper(); },
[this]() {
auto conn = openConnectionWrapper();
try {
initConnection(*conn);
} catch (...) {
failed = true;
throw;
}
return conn;
},
[this](const ref<Connection> & r) {
return
r->to.good()
@ -119,19 +126,21 @@ ref<RemoteStore::Connection> RemoteStore::openConnectionWrapper()
UDSRemoteStore::UDSRemoteStore(const Params & params)
: Store(params)
: StoreConfig(params)
, Store(params)
, LocalFSStore(params)
, RemoteStore(params)
{
}
UDSRemoteStore::UDSRemoteStore(std::string socket_path, const Params & params)
: Store(params)
, LocalFSStore(params)
, RemoteStore(params)
, path(socket_path)
UDSRemoteStore::UDSRemoteStore(
const std::string scheme,
std::string socket_path,
const Params & params)
: UDSRemoteStore(params)
{
path.emplace(socket_path);
}
@ -175,8 +184,6 @@ ref<RemoteStore::Connection> UDSRemoteStore::openConnection()
conn->startTime = std::chrono::steady_clock::now();
initConnection(*conn);
return conn;
}
@ -292,6 +299,8 @@ struct ConnectionHandle
std::rethrow_exception(ex);
}
}
void withFramedSink(std::function<void(Sink & sink)> fun);
};
@ -320,9 +329,9 @@ StorePathSet RemoteStore::queryValidPaths(const StorePathSet & paths, Substitute
return res;
} else {
conn->to << wopQueryValidPaths;
writeStorePaths(*this, conn->to, paths);
worker_proto::write(*this, conn->to, paths);
conn.processStderr();
return readStorePaths<StorePathSet>(*this, conn->from);
return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
}
}
@ -332,7 +341,7 @@ StorePathSet RemoteStore::queryAllValidPaths()
auto conn(getConnection());
conn->to << wopQueryAllValidPaths;
conn.processStderr();
return readStorePaths<StorePathSet>(*this, conn->from);
return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
}
@ -349,9 +358,9 @@ StorePathSet RemoteStore::querySubstitutablePaths(const StorePathSet & paths)
return res;
} else {
conn->to << wopQuerySubstitutablePaths;
writeStorePaths(*this, conn->to, paths);
worker_proto::write(*this, conn->to, paths);
conn.processStderr();
return readStorePaths<StorePathSet>(*this, conn->from);
return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
}
}
@ -373,7 +382,7 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S
auto deriver = readString(conn->from);
if (deriver != "")
info.deriver = parseStorePath(deriver);
info.references = readStorePaths<StorePathSet>(*this, conn->from);
info.references = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
info.downloadSize = readLongLong(conn->from);
info.narSize = readLongLong(conn->from);
infos.insert_or_assign(i.first, std::move(info));
@ -386,9 +395,9 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S
StorePathSet paths;
for (auto & path : pathsMap)
paths.insert(path.first);
writeStorePaths(*this, conn->to, paths);
worker_proto::write(*this, conn->to, paths);
} else
writeStorePathCAMap(*this, conn->to, pathsMap);
worker_proto::write(*this, conn->to, pathsMap);
conn.processStderr();
size_t count = readNum<size_t>(conn->from);
for (size_t n = 0; n < count; n++) {
@ -396,7 +405,7 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S
auto deriver = readString(conn->from);
if (deriver != "")
info.deriver = parseStorePath(deriver);
info.references = readStorePaths<StorePathSet>(*this, conn->from);
info.references = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
info.downloadSize = readLongLong(conn->from);
info.narSize = readLongLong(conn->from);
}
@ -405,11 +414,28 @@ void RemoteStore::querySubstitutablePathInfos(const StorePathCAMap & pathsMap, S
}
ref<const ValidPathInfo> RemoteStore::readValidPathInfo(ConnectionHandle & conn, const StorePath & path)
{
auto deriver = readString(conn->from);
auto narHash = Hash::parseAny(readString(conn->from), htSHA256);
auto info = make_ref<ValidPathInfo>(path, narHash);
if (deriver != "") info->deriver = parseStorePath(deriver);
info->references = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
conn->from >> info->registrationTime >> info->narSize;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
conn->from >> info->ultimate;
info->sigs = readStrings<StringSet>(conn->from);
info->ca = parseContentAddressOpt(readString(conn->from));
}
return info;
}
void RemoteStore::queryPathInfoUncached(const StorePath & path,
Callback<std::shared_ptr<const ValidPathInfo>> callback) noexcept
{
try {
std::shared_ptr<ValidPathInfo> info;
std::shared_ptr<const ValidPathInfo> info;
{
auto conn(getConnection());
conn->to << wopQueryPathInfo << printStorePath(path);
@ -425,17 +451,7 @@ void RemoteStore::queryPathInfoUncached(const StorePath & path,
bool valid; conn->from >> valid;
if (!valid) throw InvalidPath("path '%s' is not valid", printStorePath(path));
}
auto deriver = readString(conn->from);
auto narHash = Hash::parseAny(readString(conn->from), htSHA256);
info = std::make_shared<ValidPathInfo>(path, narHash);
if (deriver != "") info->deriver = parseStorePath(deriver);
info->references = readStorePaths<StorePathSet>(*this, conn->from);
conn->from >> info->registrationTime >> info->narSize;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
conn->from >> info->ultimate;
info->sigs = readStrings<StringSet>(conn->from);
info->ca = parseContentAddressOpt(readString(conn->from));
}
info = readValidPathInfo(conn, path);
}
callback(std::move(info));
} catch (...) { callback.rethrow(); }
@ -448,7 +464,7 @@ void RemoteStore::queryReferrers(const StorePath & path,
auto conn(getConnection());
conn->to << wopQueryReferrers << printStorePath(path);
conn.processStderr();
for (auto & i : readStorePaths<StorePathSet>(*this, conn->from))
for (auto & i : worker_proto::read(*this, conn->from, Phantom<StorePathSet> {}))
referrers.insert(i);
}
@ -458,7 +474,7 @@ StorePathSet RemoteStore::queryValidDerivers(const StorePath & path)
auto conn(getConnection());
conn->to << wopQueryValidDerivers << printStorePath(path);
conn.processStderr();
return readStorePaths<StorePathSet>(*this, conn->from);
return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
}
@ -470,17 +486,32 @@ StorePathSet RemoteStore::queryDerivationOutputs(const StorePath & path)
}
conn->to << wopQueryDerivationOutputs << printStorePath(path);
conn.processStderr();
return readStorePaths<StorePathSet>(*this, conn->from);
return worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
}
std::map<std::string, std::optional<StorePath>> RemoteStore::queryPartialDerivationOutputMap(const StorePath & path)
{
auto conn(getConnection());
conn->to << wopQueryDerivationOutputMap << printStorePath(path);
conn.processStderr();
return worker_proto::read(*this, conn->from, Phantom<std::map<std::string, std::optional<StorePath>>> {});
if (GET_PROTOCOL_MINOR(getProtocol()) >= 0x16) {
auto conn(getConnection());
conn->to << wopQueryDerivationOutputMap << printStorePath(path);
conn.processStderr();
return worker_proto::read(*this, conn->from, Phantom<std::map<std::string, std::optional<StorePath>>> {});
} else {
// Fallback for old daemon versions.
// For floating-CA derivations (and their co-dependencies) this is an
// under-approximation as it only returns the paths that can be inferred
// from the derivation itself (and not the ones that are known because
// the have been built), but as old stores don't handle floating-CA
// derivations this shouldn't matter
auto derivation = readDerivation(path);
auto outputsWithOptPaths = derivation.outputsAndOptPaths(*this);
std::map<std::string, std::optional<StorePath>> ret;
for (auto & [outputName, outputAndPath] : outputsWithOptPaths) {
ret.emplace(outputName, outputAndPath.second);
}
return ret;
}
}
std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string & hashPart)
@ -494,6 +525,93 @@ std::optional<StorePath> RemoteStore::queryPathFromHashPart(const std::string &
}
ref<const ValidPathInfo> RemoteStore::addCAToStore(
Source & dump,
const string & name,
ContentAddressMethod caMethod,
const StorePathSet & references,
RepairFlag repair)
{
std::optional<ConnectionHandle> conn_(getConnection());
auto & conn = *conn_;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 25) {
conn->to
<< wopAddToStore
<< name
<< renderContentAddressMethod(caMethod);
worker_proto::write(*this, conn->to, references);
conn->to << repair;
conn.withFramedSink([&](Sink & sink) {
dump.drainInto(sink);
});
auto path = parseStorePath(readString(conn->from));
return readValidPathInfo(conn, path);
}
else {
if (repair) throw Error("repairing is not supported when building through the Nix daemon protocol < 1.25");
std::visit(overloaded {
[&](TextHashMethod thm) -> void {
std::string s = dump.drain();
conn->to << wopAddTextToStore << name << s;
worker_proto::write(*this, conn->to, references);
conn.processStderr();
},
[&](FixedOutputHashMethod fohm) -> void {
conn->to
<< wopAddToStore
<< name
<< ((fohm.hashType == htSHA256 && fohm.fileIngestionMethod == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
<< (fohm.fileIngestionMethod == FileIngestionMethod::Recursive ? 1 : 0)
<< printHashType(fohm.hashType);
try {
conn->to.written = 0;
conn->to.warn = true;
connections->incCapacity();
{
Finally cleanup([&]() { connections->decCapacity(); });
if (fohm.fileIngestionMethod == FileIngestionMethod::Recursive) {
dump.drainInto(conn->to);
} else {
std::string contents = dump.drain();
dumpString(contents, conn->to);
}
}
conn->to.warn = false;
conn.processStderr();
} catch (SysError & e) {
/* Daemon closed while we were sending the path. Probably OOM
or I/O error. */
if (e.errNo == EPIPE)
try {
conn.processStderr();
} catch (EndOfFile & e) { }
throw;
}
}
}, caMethod);
auto path = parseStorePath(readString(conn->from));
// Release our connection to prevent a deadlock in queryPathInfo().
conn_.reset();
return queryPathInfo(path);
}
}
StorePath RemoteStore::addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method, HashType hashType, RepairFlag repair)
{
StorePathSet references;
return addCAToStore(dump, name, FixedOutputHashMethod{ .fileIngestionMethod = method, .hashType = hashType }, references, repair)->path;
}
void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
RepairFlag repair, CheckSigsFlag checkSigs)
{
@ -509,7 +627,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
sink
<< exportMagic
<< printStorePath(info.path);
writeStorePaths(*this, sink, info.references);
worker_proto::write(*this, sink, info.references);
sink
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< 0 // == no legacy signature
@ -519,7 +637,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
conn.processStderr(0, source2.get());
auto importedPaths = readStorePaths<StorePathSet>(*this, conn->from);
auto importedPaths = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
assert(importedPaths.size() <= 1);
}
@ -528,84 +646,15 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
<< printStorePath(info.path)
<< (info.deriver ? printStorePath(*info.deriver) : "")
<< info.narHash.to_string(Base16, false);
writeStorePaths(*this, conn->to, info.references);
worker_proto::write(*this, conn->to, info.references);
conn->to << info.registrationTime << info.narSize
<< info.ultimate << info.sigs << renderContentAddress(info.ca)
<< repair << !checkSigs;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 23) {
conn->to.flush();
std::exception_ptr ex;
struct FramedSink : BufferedSink
{
ConnectionHandle & conn;
std::exception_ptr & ex;
FramedSink(ConnectionHandle & conn, std::exception_ptr & ex) : conn(conn), ex(ex)
{ }
~FramedSink()
{
try {
conn->to << 0;
conn->to.flush();
} catch (...) {
ignoreException();
}
}
void write(const unsigned char * data, size_t len) override
{
/* Don't send more data if the remote has
encountered an error. */
if (ex) {
auto ex2 = ex;
ex = nullptr;
std::rethrow_exception(ex2);
}
conn->to << len;
conn->to(data, len);
};
};
/* Handle log messages / exceptions from the remote on a
separate thread. */
std::thread stderrThread([&]()
{
try {
conn.processStderr(nullptr, nullptr, false);
} catch (...) {
ex = std::current_exception();
}
});
Finally joinStderrThread([&]()
{
if (stderrThread.joinable()) {
stderrThread.join();
if (ex) {
try {
std::rethrow_exception(ex);
} catch (...) {
ignoreException();
}
}
}
});
{
FramedSink sink(conn, ex);
conn.withFramedSink([&](Sink & sink) {
copyNAR(source, sink);
sink.flush();
}
stderrThread.join();
if (ex)
std::rethrow_exception(ex);
});
} else if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21) {
conn.processStderr(0, &source);
} else {
@ -616,57 +665,11 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
}
StorePath RemoteStore::addToStore(const string & name, const Path & _srcPath,
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
{
if (repair) throw Error("repairing is not supported when building through the Nix daemon");
auto conn(getConnection());
Path srcPath(absPath(_srcPath));
conn->to
<< wopAddToStore
<< name
<< ((hashAlgo == htSHA256 && method == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
<< (method == FileIngestionMethod::Recursive ? 1 : 0)
<< printHashType(hashAlgo);
try {
conn->to.written = 0;
conn->to.warn = true;
connections->incCapacity();
{
Finally cleanup([&]() { connections->decCapacity(); });
dumpPath(srcPath, conn->to, filter);
}
conn->to.warn = false;
conn.processStderr();
} catch (SysError & e) {
/* Daemon closed while we were sending the path. Probably OOM
or I/O error. */
if (e.errNo == EPIPE)
try {
conn.processStderr();
} catch (EndOfFile & e) { }
throw;
}
return parseStorePath(readString(conn->from));
}
StorePath RemoteStore::addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair)
{
if (repair) throw Error("repairing is not supported when building through the Nix daemon");
auto conn(getConnection());
conn->to << wopAddTextToStore << name << s;
writeStorePaths(*this, conn->to, references);
conn.processStderr();
return parseStorePath(readString(conn->from));
StringSource source(s);
return addCAToStore(source, name, TextHashMethod{}, references, repair)->path;
}
@ -765,7 +768,7 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results)
conn->to
<< wopCollectGarbage << options.action;
writeStorePaths(*this, conn->to, options.pathsToDelete);
worker_proto::write(*this, conn->to, options.pathsToDelete);
conn->to << options.ignoreLiveness
<< options.maxFreed
/* removed options */
@ -827,9 +830,9 @@ void RemoteStore::queryMissing(const std::vector<StorePathWithOutputs> & targets
ss.push_back(p.to_string(*this));
conn->to << ss;
conn.processStderr();
willBuild = readStorePaths<StorePathSet>(*this, conn->from);
willSubstitute = readStorePaths<StorePathSet>(*this, conn->from);
unknown = readStorePaths<StorePathSet>(*this, conn->from);
willBuild = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
willSubstitute = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
unknown = worker_proto::read(*this, conn->from, Phantom<StorePathSet> {});
conn->from >> downloadSize >> narSize;
return;
}
@ -868,6 +871,18 @@ RemoteStore::Connection::~Connection()
}
}
void RemoteStore::narFromPath(const StorePath & path, Sink & sink)
{
auto conn(connections->get());
conn->to << wopNarFromPath << printStorePath(path);
conn->processStderr();
copyNAR(conn->from, sink);
}
ref<FSAccessor> RemoteStore::getFSAccessor()
{
return make_ref<RemoteFSAccessor>(ref<Store>(shared_from_this()));
}
static Logger::Fields readFields(Source & from)
{
@ -910,9 +925,13 @@ std::exception_ptr RemoteStore::Connection::processStderr(Sink * sink, Source *
}
else if (msg == STDERR_ERROR) {
string error = readString(from);
unsigned int status = readInt(from);
return std::make_exception_ptr(Error(status, error));
if (GET_PROTOCOL_MINOR(daemonVersion) >= 26) {
return std::make_exception_ptr(readError(from));
} else {
string error = readString(from);
unsigned int status = readInt(from);
return std::make_exception_ptr(Error(status, error));
}
}
else if (msg == STDERR_NEXT)
@ -950,14 +969,49 @@ std::exception_ptr RemoteStore::Connection::processStderr(Sink * sink, Source *
return nullptr;
}
static std::string uriScheme = "unix://";
static RegisterStoreImplementation regStore([](
const std::string & uri, const Store::Params & params)
-> std::shared_ptr<Store>
void ConnectionHandle::withFramedSink(std::function<void(Sink &sink)> fun)
{
if (std::string(uri, 0, uriScheme.size()) != uriScheme) return 0;
return std::make_shared<UDSRemoteStore>(std::string(uri, uriScheme.size()), params);
});
(*this)->to.flush();
std::exception_ptr ex;
/* Handle log messages / exceptions from the remote on a
separate thread. */
std::thread stderrThread([&]()
{
try {
processStderr(nullptr, nullptr, false);
} catch (...) {
ex = std::current_exception();
}
});
Finally joinStderrThread([&]()
{
if (stderrThread.joinable()) {
stderrThread.join();
if (ex) {
try {
std::rethrow_exception(ex);
} catch (...) {
ignoreException();
}
}
}
});
{
FramedSink sink((*this)->to, ex);
fun(sink);
sink.flush();
}
stderrThread.join();
if (ex)
std::rethrow_exception(ex);
}
static RegisterStoreImplementation<UDSRemoteStore, UDSRemoteStoreConfig> regUDSRemoteStore;
}

View file

@ -16,19 +16,23 @@ struct FdSource;
template<typename T> class Pool;
struct ConnectionHandle;
struct RemoteStoreConfig : virtual StoreConfig
{
using StoreConfig::StoreConfig;
const Setting<int> maxConnections{(StoreConfig*) this, 1,
"max-connections", "maximum number of concurrent connections to the Nix daemon"};
const Setting<unsigned int> maxConnectionAge{(StoreConfig*) this, std::numeric_limits<unsigned int>::max(),
"max-connection-age", "number of seconds to reuse a connection"};
};
/* FIXME: RemoteStore is a misnomer - should be something like
DaemonStore. */
class RemoteStore : public virtual Store
class RemoteStore : public virtual Store, public virtual RemoteStoreConfig
{
public:
const Setting<int> maxConnections{(Store*) this, 1,
"max-connections", "maximum number of concurrent connections to the Nix daemon"};
const Setting<unsigned int> maxConnectionAge{(Store*) this, std::numeric_limits<unsigned int>::max(),
"max-connection-age", "number of seconds to reuse a connection"};
virtual bool sameMachine() = 0;
RemoteStore(const Params & params);
@ -59,13 +63,21 @@ public:
void querySubstitutablePathInfos(const StorePathCAMap & paths,
SubstitutablePathInfos & infos) override;
/* Add a content-addressable store path. `dump` will be drained. */
ref<const ValidPathInfo> addCAToStore(
Source & dump,
const string & name,
ContentAddressMethod caMethod,
const StorePathSet & references,
RepairFlag repair);
/* Add a content-addressable store path. Does not support references. `dump` will be drained. */
StorePath addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override;
void addToStore(const ValidPathInfo & info, Source & nar,
RepairFlag repair, CheckSigsFlag checkSigs) override;
StorePath addToStore(const string & name, const Path & srcPath,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override;
StorePath addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) override;
@ -102,8 +114,6 @@ public:
void flushBadConnections();
protected:
struct Connection
{
AutoCloseFD fd;
@ -119,6 +129,8 @@ protected:
ref<Connection> openConnectionWrapper();
protected:
virtual ref<Connection> openConnection() = 0;
void initConnection(Connection & conn);
@ -131,24 +143,56 @@ protected:
friend struct ConnectionHandle;
virtual ref<FSAccessor> getFSAccessor() override;
virtual void narFromPath(const StorePath & path, Sink & sink) override;
ref<const ValidPathInfo> readValidPathInfo(ConnectionHandle & conn, const StorePath & path);
private:
std::atomic_bool failed{false};
};
class UDSRemoteStore : public LocalFSStore, public RemoteStore
struct UDSRemoteStoreConfig : virtual LocalFSStoreConfig, virtual RemoteStoreConfig
{
UDSRemoteStoreConfig(const Store::Params & params)
: StoreConfig(params)
, LocalFSStoreConfig(params)
, RemoteStoreConfig(params)
{
}
UDSRemoteStoreConfig()
: UDSRemoteStoreConfig(Store::Params({}))
{
}
const std::string name() override { return "Local Daemon Store"; }
};
class UDSRemoteStore : public LocalFSStore, public RemoteStore, public virtual UDSRemoteStoreConfig
{
public:
UDSRemoteStore(const Params & params);
UDSRemoteStore(std::string path, const Params & params);
UDSRemoteStore(const std::string scheme, std::string path, const Params & params);
std::string getUri() override;
static std::set<std::string> uriSchemes()
{ return {"unix"}; }
bool sameMachine() override
{ return true; }
ref<FSAccessor> getFSAccessor() override
{ return LocalFSStore::getFSAccessor(); }
void narFromPath(const StorePath & path, Sink & sink) override
{ LocalFSStore::narFromPath(path, sink); }
private:
ref<RemoteStore::Connection> openConnection() override;

View file

@ -172,20 +172,26 @@ S3Helper::FileTransferResult S3Helper::getObject(
return res;
}
struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
struct S3BinaryCacheStoreConfig : virtual BinaryCacheStoreConfig
{
const Setting<std::string> profile{this, "", "profile", "The name of the AWS configuration profile to use."};
const Setting<std::string> region{this, Aws::Region::US_EAST_1, "region", {"aws-region"}};
const Setting<std::string> scheme{this, "", "scheme", "The scheme to use for S3 requests, https by default."};
const Setting<std::string> endpoint{this, "", "endpoint", "An optional override of the endpoint to use when talking to S3."};
const Setting<std::string> narinfoCompression{this, "", "narinfo-compression", "compression method for .narinfo files"};
const Setting<std::string> lsCompression{this, "", "ls-compression", "compression method for .ls files"};
const Setting<std::string> logCompression{this, "", "log-compression", "compression method for log/* files"};
using BinaryCacheStoreConfig::BinaryCacheStoreConfig;
const Setting<std::string> profile{(StoreConfig*) this, "", "profile", "The name of the AWS configuration profile to use."};
const Setting<std::string> region{(StoreConfig*) this, Aws::Region::US_EAST_1, "region", {"aws-region"}};
const Setting<std::string> scheme{(StoreConfig*) this, "", "scheme", "The scheme to use for S3 requests, https by default."};
const Setting<std::string> endpoint{(StoreConfig*) this, "", "endpoint", "An optional override of the endpoint to use when talking to S3."};
const Setting<std::string> narinfoCompression{(StoreConfig*) this, "", "narinfo-compression", "compression method for .narinfo files"};
const Setting<std::string> lsCompression{(StoreConfig*) this, "", "ls-compression", "compression method for .ls files"};
const Setting<std::string> logCompression{(StoreConfig*) this, "", "log-compression", "compression method for log/* files"};
const Setting<bool> multipartUpload{
this, false, "multipart-upload", "whether to use multi-part uploads"};
(StoreConfig*) this, false, "multipart-upload", "whether to use multi-part uploads"};
const Setting<uint64_t> bufferSize{
this, 5 * 1024 * 1024, "buffer-size", "size (in bytes) of each part in multi-part uploads"};
(StoreConfig*) this, 5 * 1024 * 1024, "buffer-size", "size (in bytes) of each part in multi-part uploads"};
const std::string name() override { return "S3 Binary Cache Store"; }
};
struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore, virtual S3BinaryCacheStoreConfig
{
std::string bucketName;
Stats stats;
@ -193,8 +199,11 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
S3Helper s3Helper;
S3BinaryCacheStoreImpl(
const Params & params, const std::string & bucketName)
: S3BinaryCacheStore(params)
const std::string & scheme,
const std::string & bucketName,
const Params & params)
: StoreConfig(params)
, S3BinaryCacheStore(params)
, bucketName(bucketName)
, s3Helper(profile, region, scheme, endpoint)
{
@ -426,17 +435,11 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
return paths;
}
static std::set<std::string> uriSchemes() { return {"s3"}; }
};
static RegisterStoreImplementation regStore([](
const std::string & uri, const Store::Params & params)
-> std::shared_ptr<Store>
{
if (std::string(uri, 0, 5) != "s3://") return 0;
auto store = std::make_shared<S3BinaryCacheStoreImpl>(params, std::string(uri, 5));
store->init();
return store;
});
static RegisterStoreImplementation<S3BinaryCacheStoreImpl, S3BinaryCacheStoreConfig> regS3BinaryCacheStore;
}

View file

@ -8,19 +8,25 @@
namespace nix {
static std::string uriScheme = "ssh-ng://";
struct SSHStoreConfig : virtual RemoteStoreConfig
{
using RemoteStoreConfig::RemoteStoreConfig;
class SSHStore : public RemoteStore
const Setting<Path> sshKey{(StoreConfig*) this, "", "ssh-key", "path to an SSH private key"};
const Setting<bool> compress{(StoreConfig*) this, false, "compress", "whether to compress the connection"};
const Setting<Path> remoteProgram{(StoreConfig*) this, "nix-daemon", "remote-program", "path to the nix-daemon executable on the remote system"};
const Setting<std::string> remoteStore{(StoreConfig*) this, "", "remote-store", "URI of the store on the remote system"};
const std::string name() override { return "SSH Store"; }
};
class SSHStore : public virtual RemoteStore, public virtual SSHStoreConfig
{
public:
const Setting<Path> sshKey{(Store*) this, "", "ssh-key", "path to an SSH private key"};
const Setting<bool> compress{(Store*) this, false, "compress", "whether to compress the connection"};
const Setting<Path> remoteProgram{(Store*) this, "nix-daemon", "remote-program", "path to the nix-daemon executable on the remote system"};
const Setting<std::string> remoteStore{(Store*) this, "", "remote-store", "URI of the store on the remote system"};
SSHStore(const std::string & host, const Params & params)
: Store(params)
SSHStore(const std::string & scheme, const std::string & host, const Params & params)
: StoreConfig(params)
, Store(params)
, RemoteStore(params)
, host(host)
, master(
@ -32,18 +38,16 @@ public:
{
}
static std::set<std::string> uriSchemes() { return {"ssh-ng"}; }
std::string getUri() override
{
return uriScheme + host;
return *uriSchemes().begin() + "://" + host;
}
bool sameMachine() override
{ return false; }
void narFromPath(const StorePath & path, Sink & sink) override;
ref<FSAccessor> getFSAccessor() override;
private:
struct Connection : RemoteStore::Connection
@ -68,19 +72,6 @@ private:
};
};
void SSHStore::narFromPath(const StorePath & path, Sink & sink)
{
auto conn(connections->get());
conn->to << wopNarFromPath << printStorePath(path);
conn->processStderr();
copyNAR(conn->from, sink);
}
ref<FSAccessor> SSHStore::getFSAccessor()
{
return make_ref<RemoteFSAccessor>(ref<Store>(shared_from_this()));
}
ref<RemoteStore::Connection> SSHStore::openConnection()
{
auto conn = make_ref<Connection>();
@ -89,16 +80,9 @@ ref<RemoteStore::Connection> SSHStore::openConnection()
+ (remoteStore.get() == "" ? "" : " --store " + shellEscape(remoteStore.get())));
conn->to = FdSink(conn->sshConn->in.get());
conn->from = FdSource(conn->sshConn->out.get());
initConnection(*conn);
return conn;
}
static RegisterStoreImplementation regStore([](
const std::string & uri, const Store::Params & params)
-> std::shared_ptr<Store>
{
if (std::string(uri, 0, uriScheme.size()) != uriScheme) return 0;
return std::make_shared<SSHStore>(std::string(uri, uriScheme.size()), params);
});
static RegisterStoreImplementation<SSHStore, SSHStoreConfig> regSSHStore;
}

View file

@ -8,9 +8,7 @@
#include "json.hh"
#include "url.hh"
#include "archive.hh"
#include <future>
#include "callback.hh"
namespace nix {
@ -140,21 +138,28 @@ StorePathWithOutputs Store::followLinksToStorePathWithOutputs(std::string_view p
*/
StorePath Store::makeStorePath(const string & type,
const Hash & hash, std::string_view name) const
StorePath Store::makeStorePath(std::string_view type,
std::string_view hash, std::string_view name) const
{
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
string s = type + ":" + hash.to_string(Base16, true) + ":" + storeDir + ":" + std::string(name);
string s = std::string { type } + ":" + std::string { hash }
+ ":" + storeDir + ":" + std::string { name };
auto h = compressHash(hashString(htSHA256, s), 20);
return StorePath(h, name);
}
StorePath Store::makeOutputPath(const string & id,
StorePath Store::makeStorePath(std::string_view type,
const Hash & hash, std::string_view name) const
{
return makeStorePath("output:" + id, hash,
std::string(name) + (id == "out" ? "" : "-" + id));
return makeStorePath(type, hash.to_string(Base16, true), name);
}
StorePath Store::makeOutputPath(std::string_view id,
const Hash & hash, std::string_view name) const
{
return makeStorePath("output:" + std::string { id }, hash, outputPathName(name, id));
}
@ -339,7 +344,7 @@ ValidPathInfo Store::addToStoreSlow(std::string_view name, const Path & srcPath,
Store::Store(const Params & params)
: Config(params)
: StoreConfig(params)
, state({(size_t) pathInfoCacheSize})
{
}
@ -1002,7 +1007,6 @@ Derivation Store::readDerivation(const StorePath & drvPath)
}
}
}
@ -1012,9 +1016,6 @@ Derivation Store::readDerivation(const StorePath & drvPath)
namespace nix {
RegisterStoreImplementation::Implementations * RegisterStoreImplementation::implementations = 0;
/* Split URI into protocol+hierarchy part and its parameter set. */
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri_)
{
@ -1028,24 +1029,6 @@ std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri_
return {uri, params};
}
ref<Store> openStore(const std::string & uri_,
const Store::Params & extraParams)
{
auto [uri, uriParams] = splitUriAndParams(uri_);
auto params = extraParams;
params.insert(uriParams.begin(), uriParams.end());
for (auto fun : *RegisterStoreImplementation::implementations) {
auto store = fun(uri, params);
if (store) {
store->warnUnknownSettings();
return ref<Store>(store);
}
}
throw Error("don't know how to open Nix store '%s'", uri);
}
static bool isNonUriPath(const std::string & spec) {
return
// is not a URL
@ -1055,44 +1038,62 @@ static bool isNonUriPath(const std::string & spec) {
&& spec.find("/") != std::string::npos;
}
StoreType getStoreType(const std::string & uri, const std::string & stateDir)
std::shared_ptr<Store> openFromNonUri(const std::string & uri, const Store::Params & params)
{
if (uri == "daemon") {
return tDaemon;
} else if (uri == "local" || isNonUriPath(uri)) {
return tLocal;
} else if (uri == "" || uri == "auto") {
if (uri == "" || uri == "auto") {
auto stateDir = get(params, "state").value_or(settings.nixStateDir);
if (access(stateDir.c_str(), R_OK | W_OK) == 0)
return tLocal;
return std::make_shared<LocalStore>(params);
else if (pathExists(settings.nixDaemonSocketFile))
return tDaemon;
return std::make_shared<UDSRemoteStore>(params);
else
return tLocal;
return std::make_shared<LocalStore>(params);
} else if (uri == "daemon") {
return std::make_shared<UDSRemoteStore>(params);
} else if (uri == "local") {
return std::make_shared<LocalStore>(params);
} else if (isNonUriPath(uri)) {
Store::Params params2 = params;
params2["root"] = absPath(uri);
return std::make_shared<LocalStore>(params2);
} else {
return tOther;
return nullptr;
}
}
static RegisterStoreImplementation regStore([](
const std::string & uri, const Store::Params & params)
-> std::shared_ptr<Store>
ref<Store> openStore(const std::string & uri_,
const Store::Params & extraParams)
{
switch (getStoreType(uri, get(params, "state").value_or(settings.nixStateDir))) {
case tDaemon:
return std::shared_ptr<Store>(std::make_shared<UDSRemoteStore>(params));
case tLocal: {
Store::Params params2 = params;
if (isNonUriPath(uri)) {
params2["root"] = absPath(uri);
}
return std::shared_ptr<Store>(std::make_shared<LocalStore>(params2));
}
default:
return nullptr;
}
});
auto params = extraParams;
try {
auto parsedUri = parseURL(uri_);
params.insert(parsedUri.query.begin(), parsedUri.query.end());
auto baseURI = parsedUri.authority.value_or("") + parsedUri.path;
for (auto implem : *Implementations::registered) {
if (implem.uriSchemes.count(parsedUri.scheme)) {
auto store = implem.create(parsedUri.scheme, baseURI, params);
if (store) {
store->init();
store->warnUnknownSettings();
return ref<Store>(store);
}
}
}
}
catch (BadURL &) {
auto [uri, uriParams] = splitUriAndParams(uri_);
params.insert(uriParams.begin(), uriParams.end());
if (auto store = openFromNonUri(uri, params)) {
store->warnUnknownSettings();
return ref<Store>(store);
}
}
throw Error("don't know how to open Nix store '%s'", uri_);
}
std::list<ref<Store>> getDefaultSubstituters()
{
@ -1126,5 +1127,6 @@ std::list<ref<Store>> getDefaultSubstituters()
return stores;
}
std::vector<StoreFactory> * Implementations::registered = 0;
}

View file

@ -24,6 +24,31 @@
namespace nix {
/**
* About the class hierarchy of the store implementations:
*
* Each store type `Foo` consists of two classes:
*
* 1. A class `FooConfig : virtual StoreConfig` that contains the configuration
* for the store
*
* It should only contain members of type `const Setting<T>` (or subclasses
* of it) and inherit the constructors of `StoreConfig`
* (`using StoreConfig::StoreConfig`).
*
* 2. A class `Foo : virtual Store, virtual FooConfig` that contains the
* implementation of the store.
*
* This class is expected to have a constructor `Foo(const Params & params)`
* that calls `StoreConfig(params)` (otherwise you're gonna encounter an
* `assertion failure` when trying to instantiate it).
*
* You can then register the new store using:
*
* ```
* cpp static RegisterStoreImplementation<Foo, FooConfig> regStore;
* ```
*/
MakeError(SubstError, Error);
MakeError(BuildError, Error); // denotes a permanent build failure
@ -33,6 +58,7 @@ MakeError(SubstituteGone, Error);
MakeError(SubstituterDisabled, Error);
MakeError(BadStorePath, Error);
MakeError(InvalidStoreURI, Error);
class FSAccessor;
class NarInfoDiskCache;
@ -144,12 +170,33 @@ struct BuildResult
}
};
class Store : public std::enable_shared_from_this<Store>, public Config
struct StoreConfig : public Config
{
public:
using Config::Config;
typedef std::map<std::string, std::string> Params;
/**
* When constructing a store implementation, we pass in a map `params` of
* parameters that's supposed to initialize the associated config.
* To do that, we must use the `StoreConfig(StringMap & params)`
* constructor, so we'd like to `delete` its default constructor to enforce
* it.
*
* However, actually deleting it means that all the subclasses of
* `StoreConfig` will have their default constructor deleted (because it's
* supposed to call the deleted default constructor of `StoreConfig`). But
* because we're always using virtual inheritance, the constructors of
* child classes will never implicitely call this one, so deleting it will
* be more painful than anything else.
*
* So we `assert(false)` here to ensure at runtime that the right
* constructor is always called without having to redefine a custom
* constructor for each `*Config` class.
*/
StoreConfig() { assert(false); }
virtual ~StoreConfig() { }
virtual const std::string name() = 0;
const PathSetting storeDir_{this, false, settings.nixStore,
"store", "path to the Nix store"};
@ -167,6 +214,14 @@ public:
"system-features",
"Optional features that the system this store builds on implements (like \"kvm\")."};
};
class Store : public std::enable_shared_from_this<Store>, public virtual StoreConfig
{
public:
typedef std::map<std::string, std::string> Params;
protected:
struct PathInfoCacheValue {
@ -200,6 +255,11 @@ protected:
Store(const Params & params);
public:
/**
* Perform any necessary effectful operation to make the store up and
* running
*/
virtual void init() {};
virtual ~Store() { }
@ -247,10 +307,12 @@ public:
StorePathWithOutputs followLinksToStorePathWithOutputs(std::string_view path) const;
/* Constructs a unique store path name. */
StorePath makeStorePath(const string & type,
StorePath makeStorePath(std::string_view type,
std::string_view hash, std::string_view name) const;
StorePath makeStorePath(std::string_view type,
const Hash & hash, std::string_view name) const;
StorePath makeOutputPath(const string & id,
StorePath makeOutputPath(std::string_view id,
const Hash & hash, std::string_view name) const;
StorePath makeFixedOutputPath(FileIngestionMethod method,
@ -389,13 +451,12 @@ public:
/* Like addToStore(), but the contents of the path are contained
in `dump', which is either a NAR serialisation (if recursive ==
true) or simply the contents of a regular file (if recursive ==
false). */
false).
`dump` may be drained */
// FIXME: remove?
virtual StorePath addToStoreFromDump(Source & dump, const string & name,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
{
throw Error("addToStoreFromDump() is not supported by this store");
}
{ unsupported("addToStoreFromDump"); }
/* Like addToStore, but the contents written to the output path is
a regular file containing the given string. */
@ -418,8 +479,38 @@ public:
BuildMode buildMode = bmNormal);
/* Build a single non-materialized derivation (i.e. not from an
on-disk .drv file). Note that drvPath is only used for
informational purposes. */
on-disk .drv file).
drvPath is used to deduplicate worker goals so it is imperative that
is correct. That said, it doesn't literally need to be store path that
would be calculated from writing this derivation to the store: it is OK
if it instead is that of a Derivation which would resolve to this (by
taking the outputs of it's input derivations and adding them as input
sources) such that the build time referenceable-paths are the same.
In the input-addressed case, we usually *do* use an "original"
unresolved derivations's path, as that is what will be used in the
`buildPaths` case. Also, the input-addressed output paths are verified
only by that contents of that specific unresolved derivation, so it is
nice to keep that information around so if the original derivation is
ever obtained later, it can be verified whether the trusted user in fact
used the proper output path.
In the content-addressed case, we want to always use the
resolved drv path calculated from the provided derivation. This serves
two purposes:
- It keeps the operation trustless, by ruling out a maliciously
invalid drv path corresponding to a non-resolution-equivalent
derivation.
- For the floating case in particular, it ensures that the derivation
to output mapping respects the resolution equivalence relation, so
one cannot choose different resolution-equivalent derivations to
subvert dependency coherence (i.e. the property that one doesn't end
up with multiple different versions of dependencies without
explicitly choosing to allow it).
*/
virtual BuildResult buildDerivation(const StorePath & drvPath, const BasicDerivation & drv,
BuildMode buildMode = bmNormal) = 0;
@ -456,7 +547,7 @@ public:
- The collector isn't running, or it's just started but hasn't
acquired the GC lock yet. In that case we get and release
the lock right away, then exit. The collector scans the
permanent root and sees our's.
permanent root and sees ours.
In either case the permanent root is seen by the collector. */
virtual void syncWithGC() { };
@ -624,22 +715,25 @@ protected:
};
class LocalFSStore : public virtual Store
struct LocalFSStoreConfig : virtual StoreConfig
{
public:
// FIXME: the (Store*) cast works around a bug in gcc that causes
using StoreConfig::StoreConfig;
// FIXME: the (StoreConfig*) cast works around a bug in gcc that causes
// it to omit the call to the Setting constructor. Clang works fine
// either way.
const PathSetting rootDir{(Store*) this, true, "",
const PathSetting rootDir{(StoreConfig*) this, true, "",
"root", "directory prefixed to all other paths"};
const PathSetting stateDir{(Store*) this, false,
const PathSetting stateDir{(StoreConfig*) this, false,
rootDir != "" ? rootDir + "/nix/var/nix" : settings.nixStateDir,
"state", "directory where Nix will store state"};
const PathSetting logDir{(Store*) this, false,
const PathSetting logDir{(StoreConfig*) this, false,
rootDir != "" ? rootDir + "/nix/var/log/nix" : settings.nixLogDir,
"log", "directory where Nix will store state"};
};
class LocalFSStore : public virtual Store, public virtual LocalFSStoreConfig
{
public:
const static string drvsLogDir;
@ -728,37 +822,48 @@ ref<Store> openStore(const std::string & uri = settings.storeUri.get(),
const Store::Params & extraParams = Store::Params());
enum StoreType {
tDaemon,
tLocal,
tOther
};
StoreType getStoreType(const std::string & uri = settings.storeUri.get(),
const std::string & stateDir = settings.nixStateDir);
/* Return the default substituter stores, defined by the
substituters option and various legacy options. */
std::list<ref<Store>> getDefaultSubstituters();
/* Store implementation registration. */
typedef std::function<std::shared_ptr<Store>(
const std::string & uri, const Store::Params & params)> OpenStore;
struct RegisterStoreImplementation
struct StoreFactory
{
typedef std::vector<OpenStore> Implementations;
static Implementations * implementations;
std::set<std::string> uriSchemes;
std::function<std::shared_ptr<Store> (const std::string & scheme, const std::string & uri, const Store::Params & params)> create;
std::function<std::shared_ptr<StoreConfig> ()> getConfig;
};
RegisterStoreImplementation(OpenStore fun)
struct Implementations
{
static std::vector<StoreFactory> * registered;
template<typename T, typename TConfig>
static void add()
{
if (!implementations) implementations = new Implementations;
implementations->push_back(fun);
if (!registered) registered = new std::vector<StoreFactory>();
StoreFactory factory{
.uriSchemes = T::uriSchemes(),
.create =
([](const std::string & scheme, const std::string & uri, const Store::Params & params)
-> std::shared_ptr<Store>
{ return std::make_shared<T>(scheme, uri, params); }),
.getConfig =
([]()
-> std::shared_ptr<StoreConfig>
{ return std::make_shared<TConfig>(StringMap({})); })
};
registered->push_back(factory);
}
};
template<typename T, typename TConfig>
struct RegisterStoreImplementation
{
RegisterStoreImplementation()
{
Implementations::add<T, TConfig>();
}
};
/* Display a set of paths in human-readable form (i.e., between quotes

View file

@ -6,7 +6,7 @@ namespace nix {
#define WORKER_MAGIC_1 0x6e697863
#define WORKER_MAGIC_2 0x6478696f
#define PROTOCOL_VERSION 0x118
#define PROTOCOL_VERSION 0x11a
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
@ -18,7 +18,7 @@ typedef enum {
wopQueryReferences = 5, // obsolete
wopQueryReferrers = 6,
wopAddToStore = 7,
wopAddTextToStore = 8,
wopAddTextToStore = 8, // obsolete since 1.25, Nix 3.0. Use wopAddToStore
wopBuildPaths = 9,
wopEnsurePath = 10,
wopAddTempRoot = 11,
@ -66,10 +66,6 @@ typedef enum {
class Store;
struct Source;
template<class T> T readStorePaths(const Store & store, Source & from);
void writeStorePaths(const Store & store, Sink & out, const StorePathSet & paths);
/* To guide overloading */
template<typename T>
struct Phantom {};
@ -78,76 +74,81 @@ struct Phantom {};
namespace worker_proto {
/* FIXME maybe move more stuff inside here */
StorePath read(const Store & store, Source & from, Phantom<StorePath> _);
void write(const Store & store, Sink & out, const StorePath & storePath);
#define MAKE_WORKER_PROTO(TEMPLATE, T) \
TEMPLATE T read(const Store & store, Source & from, Phantom< T > _); \
TEMPLATE void write(const Store & store, Sink & out, const T & str)
template<typename T>
std::map<std::string, T> read(const Store & store, Source & from, Phantom<std::map<std::string, T>> _);
template<typename T>
void write(const Store & store, Sink & out, const std::map<string, T> & resMap);
template<typename T>
std::optional<T> read(const Store & store, Source & from, Phantom<std::optional<T>> _);
template<typename T>
void write(const Store & store, Sink & out, const std::optional<T> & optVal);
MAKE_WORKER_PROTO(, std::string);
MAKE_WORKER_PROTO(, StorePath);
MAKE_WORKER_PROTO(, ContentAddress);
/* Specialization which uses and empty string for the empty case, taking
advantage of the fact StorePaths always serialize to a non-empty string.
This is done primarily for backwards compatability, so that StorePath <=
std::optional<StorePath>, where <= is the compatability partial order.
MAKE_WORKER_PROTO(template<typename T>, std::set<T>);
#define X_ template<typename K, typename V>
#define Y_ std::map<K, V>
MAKE_WORKER_PROTO(X_, Y_);
#undef X_
#undef Y_
/* These use the empty string for the null case, relying on the fact
that the underlying types never serialize to the empty string.
We do this instead of a generic std::optional<T> instance because
ordinal tags (0 or 1, here) are a bit of a compatability hazard. For
the same reason, we don't have a std::variant<T..> instances (ordinal
tags 0...n).
We could the generic instances and then these as specializations for
compatability, but that's proven a bit finnicky, and also makes the
worker protocol harder to implement in other languages where such
specializations may not be allowed.
*/
template<>
void write(const Store & store, Sink & out, const std::optional<StorePath> & optVal);
MAKE_WORKER_PROTO(, std::optional<StorePath>);
MAKE_WORKER_PROTO(, std::optional<ContentAddress>);
template<typename T>
std::map<std::string, T> read(const Store & store, Source & from, Phantom<std::map<std::string, T>> _)
std::set<T> read(const Store & store, Source & from, Phantom<std::set<T>> _)
{
std::map<string, T> resMap;
auto size = (size_t)readInt(from);
std::set<T> resSet;
auto size = readNum<size_t>(from);
while (size--) {
auto thisKey = readString(from);
resMap.insert_or_assign(std::move(thisKey), nix::worker_proto::read(store, from, Phantom<T> {}));
resSet.insert(read(store, from, Phantom<T> {}));
}
return resSet;
}
template<typename T>
void write(const Store & store, Sink & out, const std::set<T> & resSet)
{
out << resSet.size();
for (auto & key : resSet) {
write(store, out, key);
}
}
template<typename K, typename V>
std::map<K, V> read(const Store & store, Source & from, Phantom<std::map<K, V>> _)
{
std::map<K, V> resMap;
auto size = readNum<size_t>(from);
while (size--) {
auto k = read(store, from, Phantom<K> {});
auto v = read(store, from, Phantom<V> {});
resMap.insert_or_assign(std::move(k), std::move(v));
}
return resMap;
}
template<typename T>
void write(const Store & store, Sink & out, const std::map<string, T> & resMap)
template<typename K, typename V>
void write(const Store & store, Sink & out, const std::map<K, V> & resMap)
{
out << resMap.size();
for (auto & i : resMap) {
out << i.first;
nix::worker_proto::write(store, out, i.second);
write(store, out, i.first);
write(store, out, i.second);
}
}
template<typename T>
std::optional<T> read(const Store & store, Source & from, Phantom<std::optional<T>> _)
{
auto tag = readNum<uint8_t>(from);
switch (tag) {
case 0:
return std::nullopt;
case 1:
return nix::worker_proto::read(store, from, Phantom<T> {});
default:
throw Error("got an invalid tag bit for std::optional: %#04x", tag);
}
}
template<typename T>
void write(const Store & store, Sink & out, const std::optional<T> & optVal)
{
out << (optVal ? 1 : 0);
if (optVal)
nix::worker_proto::write(store, out, *optVal);
}
}
StorePathCAMap readStorePathCAMap(const Store & store, Source & from);
void writeStorePathCAMap(const Store & store, Sink & out, const StorePathCAMap & paths);
}

View file

@ -0,0 +1,15 @@
#pragma once
#include <nlohmann/json.hpp>
#include "config.hh"
namespace nix {
template<typename T>
std::map<std::string, nlohmann::json> BaseSetting<T>::toJSONObject()
{
auto obj = AbstractSetting::toJSONObject();
obj.emplace("value", value);
obj.emplace("defaultValue", defaultValue);
return obj;
}
}

View file

@ -27,11 +27,13 @@ struct ArchiveSettings : Config
#endif
"use-case-hack",
"Whether to enable a Darwin-specific hack for dealing with file name collisions."};
Setting<bool> preallocateContents{this, true, "preallocate-contents",
"Whether to preallocate files when writing objects with known size."};
};
static ArchiveSettings archiveSettings;
static GlobalConfig::Register r1(&archiveSettings);
static GlobalConfig::Register rArchiveSettings(&archiveSettings);
const std::string narVersionMagic1 = "nix-archive-1";
@ -66,9 +68,7 @@ static void dump(const Path & path, Sink & sink, PathFilter & filter)
{
checkInterrupt();
struct stat st;
if (lstat(path.c_str(), &st))
throw SysError("getting attributes of path '%1%'", path);
auto st = lstat(path);
sink << "(";
@ -325,6 +325,9 @@ struct RestoreSink : ParseSink
void preallocateContents(uint64_t len)
{
if (!archiveSettings.preallocateContents)
return;
#if HAVE_POSIX_FALLOCATE
if (len) {
errno = posix_fallocate(fd.get(), 0, len);

View file

@ -277,7 +277,7 @@ Args::Flag Args::Flag::mkHashTypeOptFlag(std::string && longName, std::optional<
};
}
static void completePath(std::string_view prefix, bool onlyDirs)
static void _completePath(std::string_view prefix, bool onlyDirs)
{
pathCompletions = true;
glob_t globbuf;
@ -300,12 +300,12 @@ static void completePath(std::string_view prefix, bool onlyDirs)
void completePath(size_t, std::string_view prefix)
{
completePath(prefix, false);
_completePath(prefix, false);
}
void completeDir(size_t, std::string_view prefix)
{
completePath(prefix, true);
_completePath(prefix, true);
}
Strings argvToStrings(int argc, char * * argv)

View file

@ -192,7 +192,7 @@ public:
{
expectArgs({
.label = label,
.optional = true,
.optional = optional,
.handler = {dest}
});
}

46
src/libutil/callback.hh Normal file
View file

@ -0,0 +1,46 @@
#pragma once
#include <future>
#include <functional>
namespace nix {
/* A callback is a wrapper around a lambda that accepts a valid of
type T or an exception. (We abuse std::future<T> to pass the value or
exception.) */
template<typename T>
class Callback
{
std::function<void(std::future<T>)> fun;
std::atomic_flag done = ATOMIC_FLAG_INIT;
public:
Callback(std::function<void(std::future<T>)> fun) : fun(fun) { }
Callback(Callback && callback) : fun(std::move(callback.fun))
{
auto prev = callback.done.test_and_set();
if (prev) done.test_and_set();
}
void operator()(T && t) noexcept
{
auto prev = done.test_and_set();
assert(!prev);
std::promise<T> promise;
promise.set_value(std::move(t));
fun(promise.get_future());
}
void rethrow(const std::exception_ptr & exc = std::current_exception()) noexcept
{
auto prev = done.test_and_set();
assert(!prev);
std::promise<T> promise;
promise.set_exception(exc);
fun(promise.get_future());
}
};
}

View file

@ -1,5 +1,6 @@
#include "config.hh"
#include "args.hh"
#include "abstract-setting-to-json.hh"
#include <nlohmann/json.hpp>
@ -137,11 +138,7 @@ nlohmann::json Config::toJSON()
auto res = nlohmann::json::object();
for (auto & s : _settings)
if (!s.second.isAlias) {
auto obj = nlohmann::json::object();
obj.emplace("description", s.second.setting->description);
obj.emplace("aliases", s.second.setting->aliases);
obj.emplace("value", s.second.setting->toJSON());
res.emplace(s.first, obj);
res.emplace(s.first, s.second.setting->toJSON());
}
return res;
}
@ -168,19 +165,21 @@ void AbstractSetting::setDefault(const std::string & str)
nlohmann::json AbstractSetting::toJSON()
{
return to_string();
return nlohmann::json(toJSONObject());
}
std::map<std::string, nlohmann::json> AbstractSetting::toJSONObject()
{
std::map<std::string, nlohmann::json> obj;
obj.emplace("description", description);
obj.emplace("aliases", aliases);
return obj;
}
void AbstractSetting::convertToArg(Args & args, const std::string & category)
{
}
template<typename T>
nlohmann::json BaseSetting<T>::toJSON()
{
return value;
}
template<typename T>
void BaseSetting<T>::convertToArg(Args & args, const std::string & category)
{
@ -259,11 +258,6 @@ template<> std::string BaseSetting<Strings>::to_string() const
return concatStringsSep(" ", value);
}
template<> nlohmann::json BaseSetting<Strings>::toJSON()
{
return value;
}
template<> void BaseSetting<StringSet>::set(const std::string & str)
{
value = tokenizeString<StringSet>(str);
@ -274,9 +268,24 @@ template<> std::string BaseSetting<StringSet>::to_string() const
return concatStringsSep(" ", value);
}
template<> nlohmann::json BaseSetting<StringSet>::toJSON()
template<> void BaseSetting<StringMap>::set(const std::string & str)
{
return value;
auto kvpairs = tokenizeString<Strings>(str);
for (auto & s : kvpairs)
{
auto eq = s.find_first_of('=');
if (std::string::npos != eq)
value.emplace(std::string(s, 0, eq), std::string(s, eq + 1));
// else ignored
}
}
template<> std::string BaseSetting<StringMap>::to_string() const
{
Strings kvstrs;
std::transform(value.begin(), value.end(), back_inserter(kvstrs),
[&](auto kvpair){ return kvpair.first + "=" + kvpair.second; });
return concatStringsSep(" ", kvstrs);
}
template class BaseSetting<int>;
@ -289,6 +298,7 @@ template class BaseSetting<bool>;
template class BaseSetting<std::string>;
template class BaseSetting<Strings>;
template class BaseSetting<StringSet>;
template class BaseSetting<StringMap>;
void PathSetting::set(const std::string & str)
{

View file

@ -206,7 +206,9 @@ protected:
virtual std::string to_string() const = 0;
virtual nlohmann::json toJSON();
nlohmann::json toJSON();
virtual std::map<std::string, nlohmann::json> toJSONObject();
virtual void convertToArg(Args & args, const std::string & category);
@ -220,6 +222,7 @@ class BaseSetting : public AbstractSetting
protected:
T value;
const T defaultValue;
public:
@ -229,6 +232,7 @@ public:
const std::set<std::string> & aliases = {})
: AbstractSetting(name, description, aliases)
, value(def)
, defaultValue(def)
{ }
operator const T &() const { return value; }
@ -251,7 +255,7 @@ public:
void convertToArg(Args & args, const std::string & category) override;
nlohmann::json toJSON() override;
std::map<std::string, nlohmann::json> toJSONObject() override;
};
template<typename T>

View file

@ -11,13 +11,13 @@ const std::string nativeSystem = SYSTEM;
BaseError & BaseError::addTrace(std::optional<ErrPos> e, hintformat hint)
{
err.traces.push_front(Trace { .pos = e, .hint = hint});
err.traces.push_front(Trace { .pos = e, .hint = hint });
return *this;
}
// c++ std::exception descendants must have a 'const char* what()' function.
// This stringifies the error and caches it for use by what(), or similarly by msg().
const string& BaseError::calcWhat() const
const string & BaseError::calcWhat() const
{
if (what_.has_value())
return *what_;
@ -34,12 +34,12 @@ const string& BaseError::calcWhat() const
std::optional<string> ErrorInfo::programName = std::nullopt;
std::ostream& operator<<(std::ostream &os, const hintformat &hf)
std::ostream & operator<<(std::ostream & os, const hintformat & hf)
{
return os << hf.str();
}
string showErrPos(const ErrPos &errPos)
string showErrPos(const ErrPos & errPos)
{
if (errPos.line > 0) {
if (errPos.column > 0) {
@ -53,7 +53,7 @@ string showErrPos(const ErrPos &errPos)
}
}
std::optional<LinesOfCode> getCodeLines(const ErrPos &errPos)
std::optional<LinesOfCode> getCodeLines(const ErrPos & errPos)
{
if (errPos.line <= 0)
return std::nullopt;
@ -92,13 +92,13 @@ std::optional<LinesOfCode> getCodeLines(const ErrPos &errPos)
return loc;
}
}
catch (EndOfFile &eof) {
catch (EndOfFile & eof) {
if (loc.errLineOfCode.has_value())
return loc;
else
return std::nullopt;
}
catch (std::exception &e) {
catch (std::exception & e) {
printError("error reading nix file: %s\n%s", errPos.file, e.what());
return std::nullopt;
}
@ -137,10 +137,10 @@ std::optional<LinesOfCode> getCodeLines(const ErrPos &errPos)
}
// print lines of code to the ostream, indicating the error column.
void printCodeLines(std::ostream &out,
const string &prefix,
const ErrPos &errPos,
const LinesOfCode &loc)
void printCodeLines(std::ostream & out,
const string & prefix,
const ErrPos & errPos,
const LinesOfCode & loc)
{
// previous line of code.
if (loc.prevLineOfCode.has_value()) {
@ -186,7 +186,7 @@ void printCodeLines(std::ostream &out,
}
}
void printAtPos(const string &prefix, const ErrPos &pos, std::ostream &out)
void printAtPos(const string & prefix, const ErrPos & pos, std::ostream & out)
{
if (pos)
{
@ -212,7 +212,7 @@ void printAtPos(const string &prefix, const ErrPos &pos, std::ostream &out)
}
}
std::ostream& showErrorInfo(std::ostream &out, const ErrorInfo &einfo, bool showTrace)
std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool showTrace)
{
auto errwidth = std::max<size_t>(getWindowSize().second, 20);
string prefix = "";

View file

@ -107,7 +107,7 @@ struct Trace {
struct ErrorInfo {
Verbosity level;
string name;
string description;
string description; // FIXME: remove? it seems to be barely used
std::optional<hintformat> hint;
std::optional<ErrPos> errPos;
std::list<Trace> traces;
@ -169,7 +169,7 @@ public:
#endif
const string & msg() const { return calcWhat(); }
const ErrorInfo & info() { calcWhat(); return err; }
const ErrorInfo & info() const { calcWhat(); return err; }
template<typename... Args>
BaseError & addTrace(std::optional<ErrPos> e, const string &fs, const Args & ... args)

View file

@ -1,7 +1,6 @@
#pragma once
#include <boost/format.hpp>
#include <boost/algorithm/string/replace.hpp>
#include <string>
#include "ansicolor.hh"
@ -77,11 +76,11 @@ template <class T>
struct yellowtxt
{
yellowtxt(const T &s) : value(s) {}
const T &value;
const T & value;
};
template <class T>
std::ostream& operator<<(std::ostream &out, const yellowtxt<T> &y)
std::ostream & operator<<(std::ostream & out, const yellowtxt<T> & y)
{
return out << ANSI_YELLOW << y.value << ANSI_NORMAL;
}
@ -89,12 +88,12 @@ std::ostream& operator<<(std::ostream &out, const yellowtxt<T> &y)
template <class T>
struct normaltxt
{
normaltxt(const T &s) : value(s) {}
const T &value;
normaltxt(const T & s) : value(s) {}
const T & value;
};
template <class T>
std::ostream& operator<<(std::ostream &out, const normaltxt<T> &y)
std::ostream & operator<<(std::ostream & out, const normaltxt<T> & y)
{
return out << ANSI_NORMAL << y.value;
}
@ -102,26 +101,30 @@ std::ostream& operator<<(std::ostream &out, const normaltxt<T> &y)
class hintformat
{
public:
hintformat(const string &format) :fmt(format)
hintformat(const string & format) : fmt(format)
{
fmt.exceptions(boost::io::all_error_bits ^
fmt.exceptions(boost::io::all_error_bits ^
boost::io::too_many_args_bit ^
boost::io::too_few_args_bit);
}
hintformat(const hintformat &hf)
: fmt(hf.fmt)
{}
hintformat(const hintformat & hf)
: fmt(hf.fmt)
{ }
hintformat(format && fmt)
: fmt(std::move(fmt))
{ }
template<class T>
hintformat& operator%(const T &value)
hintformat & operator%(const T & value)
{
fmt % yellowtxt(value);
return *this;
}
template<class T>
hintformat& operator%(const normaltxt<T> &value)
hintformat & operator%(const normaltxt<T> & value)
{
fmt % value.value;
return *this;
@ -136,7 +139,7 @@ private:
format fmt;
};
std::ostream& operator<<(std::ostream &os, const hintformat &hf);
std::ostream & operator<<(std::ostream & os, const hintformat & hf);
template<typename... Args>
inline hintformat hintfmt(const std::string & fs, const Args & ... args)

View file

@ -10,7 +10,7 @@ namespace nix {
LoggerSettings loggerSettings;
static GlobalConfig::Register r1(&loggerSettings);
static GlobalConfig::Register rLoggerSettings(&loggerSettings);
static thread_local ActivityId curActivity = 0;

View file

@ -93,7 +93,7 @@ void Source::operator () (unsigned char * data, size_t len)
}
std::string Source::drain()
void Source::drainInto(Sink & sink)
{
std::string s;
std::vector<unsigned char> buf(8192);
@ -101,12 +101,19 @@ std::string Source::drain()
size_t n;
try {
n = read(buf.data(), buf.size());
s.append((char *) buf.data(), n);
sink(buf.data(), n);
} catch (EndOfFile &) {
break;
}
}
return s;
}
std::string Source::drain()
{
StringSink s;
drainInto(s);
return *s.s;
}
@ -259,6 +266,24 @@ Sink & operator << (Sink & sink, const StringSet & s)
return sink;
}
Sink & operator << (Sink & sink, const Error & ex)
{
auto info = ex.info();
sink
<< "Error"
<< info.level
<< info.name
<< info.description
<< (info.hint ? info.hint->str() : "")
<< 0 // FIXME: info.errPos
<< info.traces.size();
for (auto & trace : info.traces) {
sink << 0; // FIXME: trace.pos
sink << trace.hint.str();
}
return sink;
}
void readPadding(size_t len, Source & source)
{
@ -312,6 +337,30 @@ template Paths readStrings(Source & source);
template PathSet readStrings(Source & source);
Error readError(Source & source)
{
auto type = readString(source);
assert(type == "Error");
ErrorInfo info;
info.level = (Verbosity) readInt(source);
info.name = readString(source);
info.description = readString(source);
auto hint = readString(source);
if (hint != "") info.hint = hintformat(std::move(format("%s") % hint));
auto havePos = readNum<size_t>(source);
assert(havePos == 0);
auto nrTraces = readNum<size_t>(source);
for (size_t i = 0; i < nrTraces; ++i) {
havePos = readNum<size_t>(source);
assert(havePos == 0);
info.traces.push_back(Trace {
.hint = hintformat(std::move(format("%s") % readString(source)))
});
}
return Error(std::move(info));
}
void StringSink::operator () (const unsigned char * data, size_t len)
{
static bool warned = false;

View file

@ -22,6 +22,12 @@ struct Sink
}
};
/* Just throws away data. */
struct NullSink : Sink
{
void operator () (const unsigned char * data, size_t len) override
{ }
};
/* A buffered abstract sink. Warning: a BufferedSink should not be
used from multiple threads concurrently. */
@ -63,6 +69,8 @@ struct Source
virtual bool good() { return true; }
void drainInto(Sink & sink);
std::string drain();
};
@ -313,6 +321,7 @@ inline Sink & operator << (Sink & sink, uint64_t n)
Sink & operator << (Sink & sink, const string & s);
Sink & operator << (Sink & sink, const Strings & s);
Sink & operator << (Sink & sink, const StringSet & s);
Sink & operator << (Sink & in, const Error & ex);
MakeError(SerialisationError, Error);
@ -334,7 +343,7 @@ T readNum(Source & source)
((uint64_t) buf[6] << 48) |
((uint64_t) buf[7] << 56);
if (n > std::numeric_limits<T>::max())
if (n > (uint64_t)std::numeric_limits<T>::max())
throw SerialisationError("serialised integer %d is too large for type '%s'", n, typeid(T).name());
return (T) n;
@ -374,6 +383,8 @@ Source & operator >> (Source & in, bool & b)
return in;
}
Error readError(Source & source);
/* An adapter that converts a std::basic_istream into a source. */
struct StreamToSourceAdapter : Source
@ -398,4 +409,93 @@ struct StreamToSourceAdapter : Source
};
/* A source that reads a distinct format of concatenated chunks back into its
logical form, in order to guarantee a known state to the original stream,
even in the event of errors.
Use with FramedSink, which also allows the logical stream to be terminated
in the event of an exception.
*/
struct FramedSource : Source
{
Source & from;
bool eof = false;
std::vector<unsigned char> pending;
size_t pos = 0;
FramedSource(Source & from) : from(from)
{ }
~FramedSource()
{
if (!eof) {
while (true) {
auto n = readInt(from);
if (!n) break;
std::vector<unsigned char> data(n);
from(data.data(), n);
}
}
}
size_t read(unsigned char * data, size_t len) override
{
if (eof) throw EndOfFile("reached end of FramedSource");
if (pos >= pending.size()) {
size_t len = readInt(from);
if (!len) {
eof = true;
return 0;
}
pending = std::vector<unsigned char>(len);
pos = 0;
from(pending.data(), len);
}
auto n = std::min(len, pending.size() - pos);
memcpy(data, pending.data() + pos, n);
pos += n;
return n;
}
};
/* Write as chunks in the format expected by FramedSource.
The exception_ptr reference can be used to terminate the stream when you
detect that an error has occurred on the remote end.
*/
struct FramedSink : nix::BufferedSink
{
BufferedSink & to;
std::exception_ptr & ex;
FramedSink(BufferedSink & to, std::exception_ptr & ex) : to(to), ex(ex)
{ }
~FramedSink()
{
try {
to << 0;
to.flush();
} catch (...) {
ignoreException();
}
}
void write(const unsigned char * data, size_t len) override
{
/* Don't send more data if the remote has
encountered an error. */
if (ex) {
auto ex2 = ex;
ex = nullptr;
std::rethrow_exception(ex2);
}
to << len;
to(data, len);
};
};
}

View file

@ -9,7 +9,7 @@ namespace nix {
// If `separator` is found, we return the portion of the string before the
// separator, and modify the string argument to contain only the part after the
// separator. Otherwise, wer return `std::nullopt`, and we leave the argument
// separator. Otherwise, we return `std::nullopt`, and we leave the argument
// string alone.
static inline std::optional<std::string_view> splitPrefixTo(std::string_view & string, char separator) {
auto sepInstance = string.find(separator);

View file

@ -161,7 +161,7 @@ namespace nix {
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
setting.assign("value");
ASSERT_EQ(config.toJSON().dump(), R"#({"name-of-the-setting":{"aliases":[],"description":"description\n","value":"value"}})#");
ASSERT_EQ(config.toJSON().dump(), R"#({"name-of-the-setting":{"aliases":[],"defaultValue":"","description":"description\n","value":"value"}})#");
}
TEST(Config, setSettingAlias) {

View file

@ -1,3 +1,5 @@
#pragma once
#include "error.hh"
namespace nix {

View file

@ -25,6 +25,8 @@ typedef string Path;
typedef list<Path> Paths;
typedef set<Path> PathSet;
typedef vector<std::pair<string, string>> Headers;
/* Helper class to run code at startup. */
template<typename T>
struct OnStartup

44
src/libutil/url-parts.hh Normal file
View file

@ -0,0 +1,44 @@
#pragma once
#include <string>
#include <regex>
namespace nix {
// URI stuff.
const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])";
const static std::string schemeRegex = "(?:[a-z][a-z0-9+.-]*)";
const static std::string ipv6AddressRegex = "(?:\\[[0-9a-fA-F:]+\\])";
const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])";
const static std::string subdelimsRegex = "(?:[!$&'\"()*+,;=])";
const static std::string hostnameRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + ")*)";
const static std::string hostRegex = "(?:" + ipv6AddressRegex + "|" + hostnameRegex + ")";
const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|:)*)";
const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?";
const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])";
const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*";
const static std::string segmentRegex = "(?:" + pcharRegex + "+)";
const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)";
const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)";
// A Git ref (i.e. branch or tag name).
const static std::string refRegexS = "[a-zA-Z0-9][a-zA-Z0-9_.-]*"; // FIXME: check
extern std::regex refRegex;
// Instead of defining what a good Git Ref is, we define what a bad Git Ref is
// This is because of the definition of a ref in refs.c in https://github.com/git/git
// See tests/fetchGitRefs.sh for the full definition
const static std::string badGitRefRegexS = "//|^[./]|/\\.|\\.\\.|[[:cntrl:][:space:]:?^~\[]|\\\\|\\*|\\.lock$|\\.lock/|@\\{|[/.]$|^@$|^$";
extern std::regex badGitRefRegex;
// A Git revision (a SHA-1 commit hash).
const static std::string revRegexS = "[0-9a-fA-F]{40}";
extern std::regex revRegex;
// A ref or revision, or a ref followed by a revision.
const static std::string refAndOrRevRegex = "(?:(" + revRegexS + ")|(?:(" + refRegexS + ")(?:/(" + revRegexS + "))?))";
const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
extern std::regex flakeIdRegex;
}

View file

@ -1,4 +1,5 @@
#include "url.hh"
#include "url-parts.hh"
#include "util.hh"
namespace nix {

View file

@ -2,8 +2,6 @@
#include "error.hh"
#include <regex>
namespace nix {
struct ParsedURL
@ -29,40 +27,4 @@ std::map<std::string, std::string> decodeQuery(const std::string & query);
ParsedURL parseURL(const std::string & url);
// URI stuff.
const static std::string pctEncoded = "(?:%[0-9a-fA-F][0-9a-fA-F])";
const static std::string schemeRegex = "(?:[a-z+]+)";
const static std::string ipv6AddressRegex = "(?:\\[[0-9a-fA-F:]+\\])";
const static std::string unreservedRegex = "(?:[a-zA-Z0-9-._~])";
const static std::string subdelimsRegex = "(?:[!$&'\"()*+,;=])";
const static std::string hostnameRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + ")*)";
const static std::string hostRegex = "(?:" + ipv6AddressRegex + "|" + hostnameRegex + ")";
const static std::string userRegex = "(?:(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|:)*)";
const static std::string authorityRegex = "(?:" + userRegex + "@)?" + hostRegex + "(?::[0-9]+)?";
const static std::string pcharRegex = "(?:" + unreservedRegex + "|" + pctEncoded + "|" + subdelimsRegex + "|[:@])";
const static std::string queryRegex = "(?:" + pcharRegex + "|[/? \"])*";
const static std::string segmentRegex = "(?:" + pcharRegex + "+)";
const static std::string absPathRegex = "(?:(?:/" + segmentRegex + ")*/?)";
const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRegex + ")*/?)";
// A Git ref (i.e. branch or tag name).
const static std::string refRegexS = "[a-zA-Z0-9][a-zA-Z0-9_.-]*"; // FIXME: check
extern std::regex refRegex;
// Instead of defining what a good Git Ref is, we define what a bad Git Ref is
// This is because of the definition of a ref in refs.c in https://github.com/git/git
// See tests/fetchGitRefs.sh for the full definition
const static std::string badGitRefRegexS = "//|^[./]|/\\.|\\.\\.|[[:cntrl:][:space:]:?^~\[]|\\\\|\\*|\\.lock$|\\.lock/|@\\{|[/.]$|^@$|^$";
extern std::regex badGitRefRegex;
// A Git revision (a SHA-1 commit hash).
const static std::string revRegexS = "[0-9a-fA-F]{40}";
extern std::regex revRegex;
// A ref or revision, or a ref followed by a revision.
const static std::string refAndOrRevRegex = "(?:(" + revRegexS + ")|(?:(" + refRegexS + ")(?:/(" + revRegexS + "))?))";
const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
extern std::regex flakeIdRegex;
}

View file

@ -325,7 +325,12 @@ void writeFile(const Path & path, const string & s, mode_t mode)
AutoCloseFD fd = open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT | O_CLOEXEC, mode);
if (!fd)
throw SysError("opening file '%1%'", path);
writeFull(fd.get(), s);
try {
writeFull(fd.get(), s);
} catch (Error & e) {
e.addTrace({}, "writing file '%1%'", path);
throw;
}
}
@ -337,11 +342,16 @@ void writeFile(const Path & path, Source & source, mode_t mode)
std::vector<unsigned char> buf(64 * 1024);
while (true) {
try {
auto n = source.read(buf.data(), buf.size());
writeFull(fd.get(), (unsigned char *) buf.data(), n);
} catch (EndOfFile &) { break; }
try {
while (true) {
try {
auto n = source.read(buf.data(), buf.size());
writeFull(fd.get(), (unsigned char *) buf.data(), n);
} catch (EndOfFile &) { break; }
}
} catch (Error & e) {
e.addTrace({}, "writing file '%1%'", path);
throw;
}
}
@ -511,22 +521,24 @@ std::string getUserName()
}
static Path getHome2()
Path getHome()
{
auto homeDir = getEnv("HOME");
if (!homeDir) {
std::vector<char> buf(16384);
struct passwd pwbuf;
struct passwd * pw;
if (getpwuid_r(geteuid(), &pwbuf, buf.data(), buf.size(), &pw) != 0
|| !pw || !pw->pw_dir || !pw->pw_dir[0])
throw Error("cannot determine user's home directory");
homeDir = pw->pw_dir;
}
return *homeDir;
};
Path getHome() { return getHome2(); }
static Path homeDir = []()
{
auto homeDir = getEnv("HOME");
if (!homeDir) {
std::vector<char> buf(16384);
struct passwd pwbuf;
struct passwd * pw;
if (getpwuid_r(geteuid(), &pwbuf, buf.data(), buf.size(), &pw) != 0
|| !pw || !pw->pw_dir || !pw->pw_dir[0])
throw Error("cannot determine user's home directory");
homeDir = pw->pw_dir;
}
return *homeDir;
}();
return homeDir;
}
Path getCacheDir()
@ -1641,4 +1653,11 @@ AutoCloseFD createUnixDomainSocket(const Path & path, mode_t mode)
return fdSocket;
}
string showBytes(uint64_t bytes)
{
return fmt("%.2f MiB", bytes / (1024.0 * 1024.0));
}
}

View file

@ -12,13 +12,9 @@
#include <signal.h>
#include <functional>
#include <limits>
#include <cstdio>
#include <map>
#include <sstream>
#include <optional>
#include <future>
#include <iterator>
#ifndef HAVE_STRUCT_DIRENT_D_TYPE
#define DT_UNKNOWN 0
@ -480,43 +476,8 @@ std::optional<typename T::mapped_type> get(const T & map, const typename T::key_
}
/* A callback is a wrapper around a lambda that accepts a valid of
type T or an exception. (We abuse std::future<T> to pass the value or
exception.) */
template<typename T>
class Callback
{
std::function<void(std::future<T>)> fun;
std::atomic_flag done = ATOMIC_FLAG_INIT;
public:
Callback(std::function<void(std::future<T>)> fun) : fun(fun) { }
Callback(Callback && callback) : fun(std::move(callback.fun))
{
auto prev = callback.done.test_and_set();
if (prev) done.test_and_set();
}
void operator()(T && t) noexcept
{
auto prev = done.test_and_set();
assert(!prev);
std::promise<T> promise;
promise.set_value(std::move(t));
fun(promise.get_future());
}
void rethrow(const std::exception_ptr & exc = std::current_exception()) noexcept
{
auto prev = done.test_and_set();
assert(!prev);
std::promise<T> promise;
promise.set_exception(exc);
fun(promise.get_future());
}
};
class Callback;
/* Start a thread that handles various signals. Also block those signals
@ -612,4 +573,7 @@ template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
std::string showBytes(uint64_t bytes);
}

View file

@ -67,7 +67,7 @@ std::vector<string> shellwords(const string & s)
return res;
}
static void _main(int argc, char * * argv)
static void main_nix_build(int argc, char * * argv)
{
auto dryRun = false;
auto runEnv = std::regex_search(argv[0], std::regex("nix-shell$"));
@ -98,8 +98,8 @@ static void _main(int argc, char * * argv)
// List of environment variables kept for --pure
std::set<string> keepVars{
"HOME", "USER", "LOGNAME", "DISPLAY", "PATH", "TERM",
"IN_NIX_SHELL", "TZ", "PAGER", "NIX_BUILD_SHELL", "SHLVL",
"HOME", "USER", "LOGNAME", "DISPLAY", "PATH", "TERM", "IN_NIX_SHELL",
"NIX_SHELL_PRESERVE_PROMPT", "TZ", "PAGER", "NIX_BUILD_SHELL", "SHLVL",
"http_proxy", "https_proxy", "ftp_proxy", "all_proxy", "no_proxy"
};
@ -446,7 +446,7 @@ static void _main(int argc, char * * argv)
"PATH=%4%:\"$PATH\"; "
"SHELL=%5%; "
"set +e; "
R"s([ -n "$PS1" ] && PS1='\n\[\033[1;32m\][nix-shell:\w]\$\[\033[0m\] '; )s"
R"s([ -n "$PS1" -a -z "$NIX_SHELL_PRESERVE_PROMPT" ] && PS1='\n\[\033[1;32m\][nix-shell:\w]\$\[\033[0m\] '; )s"
"if [ \"$(type -t runHook)\" = function ]; then runHook shellHook; fi; "
"unset NIX_ENFORCE_PURITY; "
"shopt -u nullglob; "
@ -487,52 +487,58 @@ static void _main(int argc, char * * argv)
std::vector<StorePathWithOutputs> pathsToBuild;
std::map<Path, Path> drvPrefixes;
std::map<Path, Path> resultSymlinks;
std::vector<Path> outPaths;
std::map<StorePath, std::pair<size_t, StringSet>> drvMap;
for (auto & drvInfo : drvs) {
auto drvPath = drvInfo.queryDrvPath();
auto outPath = drvInfo.queryOutPath();
auto drvPath = store->parseStorePath(drvInfo.queryDrvPath());
auto outputName = drvInfo.queryOutputName();
if (outputName == "")
throw Error("derivation '%s' lacks an 'outputName' attribute", drvPath);
throw Error("derivation '%s' lacks an 'outputName' attribute", store->printStorePath(drvPath));
pathsToBuild.push_back({store->parseStorePath(drvPath), {outputName}});
pathsToBuild.push_back({drvPath, {outputName}});
std::string drvPrefix;
auto i = drvPrefixes.find(drvPath);
if (i != drvPrefixes.end())
drvPrefix = i->second;
auto i = drvMap.find(drvPath);
if (i != drvMap.end())
i->second.second.insert(outputName);
else {
drvPrefix = outLink;
if (drvPrefixes.size())
drvPrefix += fmt("-%d", drvPrefixes.size() + 1);
drvPrefixes[drvPath] = drvPrefix;
drvMap[drvPath] = {drvMap.size(), {outputName}};
}
std::string symlink = drvPrefix;
if (outputName != "out") symlink += "-" + outputName;
resultSymlinks[symlink] = outPath;
outPaths.push_back(outPath);
}
buildPaths(pathsToBuild);
if (dryRun) return;
for (auto & symlink : resultSymlinks)
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>())
store2->addPermRoot(store->parseStorePath(symlink.second), absPath(symlink.first));
std::vector<StorePath> outPaths;
for (auto & [drvPath, info] : drvMap) {
auto & [counter, wantedOutputs] = info;
std::string drvPrefix = outLink;
if (counter)
drvPrefix += fmt("-%d", counter + 1);
auto builtOutputs = store->queryDerivationOutputMap(drvPath);
for (auto & outputName : wantedOutputs) {
auto outputPath = builtOutputs.at(outputName);
if (auto store2 = store.dynamic_pointer_cast<LocalFSStore>()) {
std::string symlink = drvPrefix;
if (outputName != "out") symlink += "-" + outputName;
store2->addPermRoot(outputPath, absPath(symlink));
}
outPaths.push_back(outputPath);
}
}
logger->stop();
for (auto & path : outPaths)
std::cout << path << '\n';
std::cout << store->printStorePath(path) << '\n';
}
}
static RegisterLegacyCommand s1("nix-build", _main);
static RegisterLegacyCommand s2("nix-shell", _main);
static RegisterLegacyCommand r_nix_build("nix-build", main_nix_build);
static RegisterLegacyCommand r_nix_shell("nix-shell", main_nix_build);

View file

@ -76,6 +76,13 @@ static void update(const StringSet & channelNames)
auto store = openStore();
auto [fd, unpackChannelPath] = createTempFile();
writeFull(fd.get(),
#include "unpack-channel.nix.gen.hh"
);
fd = -1;
AutoDelete del(unpackChannelPath, false);
// Download each channel.
Strings exprs;
for (const auto & channel : channels) {
@ -104,7 +111,7 @@ static void update(const StringSet & channelNames)
bool unpacked = false;
if (std::regex_search(filename, std::regex("\\.tar\\.(gz|bz2|xz)$"))) {
runProgram(settings.nixBinDir + "/nix-build", false, { "--no-out-link", "--expr", "import <nix/unpack-channel.nix> "
runProgram(settings.nixBinDir + "/nix-build", false, { "--no-out-link", "--expr", "import " + unpackChannelPath +
"{ name = \"" + cname + "\"; channelName = \"" + name + "\"; src = builtins.storePath \"" + filename + "\"; }" });
unpacked = true;
}
@ -125,7 +132,7 @@ static void update(const StringSet & channelNames)
// Unpack the channel tarballs into the Nix store and install them
// into the channels profile.
std::cerr << "unpacking channels...\n";
Strings envArgs{ "--profile", profile, "--file", "<nix/unpack-channel.nix>", "--install", "--from-expression" };
Strings envArgs{ "--profile", profile, "--file", unpackChannelPath, "--install", "--from-expression" };
for (auto & expr : exprs)
envArgs.push_back(std::move(expr));
envArgs.push_back("--quiet");
@ -146,7 +153,7 @@ static void update(const StringSet & channelNames)
replaceSymlink(profile, channelLink);
}
static int _main(int argc, char ** argv)
static int main_nix_channel(int argc, char ** argv)
{
{
// Figure out the name of the `.nix-channels' file to use
@ -243,4 +250,4 @@ static int _main(int argc, char ** argv)
}
}
static RegisterLegacyCommand s1("nix-channel", _main);
static RegisterLegacyCommand r_nix_channel("nix-channel", main_nix_channel);

View file

@ -0,0 +1,12 @@
{ name, channelName, src }:
derivation {
builder = "builtin:unpack-channel";
system = "builtin";
inherit name channelName src;
# No point in doing this remotely.
preferLocalBuild = true;
}

View file

@ -49,7 +49,7 @@ void removeOldGenerations(std::string dir)
}
}
static int _main(int argc, char * * argv)
static int main_nix_collect_garbage(int argc, char * * argv)
{
{
bool removeOld = false;
@ -92,4 +92,4 @@ static int _main(int argc, char * * argv)
}
}
static RegisterLegacyCommand s1("nix-collect-garbage", _main);
static RegisterLegacyCommand r_nix_collect_garbage("nix-collect-garbage", main_nix_collect_garbage);

View file

@ -4,7 +4,7 @@
using namespace nix;
static int _main(int argc, char ** argv)
static int main_nix_copy_closure(int argc, char ** argv)
{
{
auto gzip = false;
@ -65,4 +65,4 @@ static int _main(int argc, char ** argv)
}
}
static RegisterLegacyCommand s1("nix-copy-closure", _main);
static RegisterLegacyCommand r_nix_copy_closure("nix-copy-closure", main_nix_copy_closure);

View file

@ -1,5 +1,6 @@
#include "shared.hh"
#include "local-store.hh"
#include "remote-store.hh"
#include "util.hh"
#include "serialise.hh"
#include "archive.hh"
@ -264,7 +265,7 @@ static void daemonLoop(char * * argv)
}
static int _main(int argc, char * * argv)
static int main_nix_daemon(int argc, char * * argv)
{
{
auto stdio = false;
@ -285,44 +286,28 @@ static int _main(int argc, char * * argv)
initPlugins();
if (stdio) {
if (getStoreType() == tDaemon) {
// Forward on this connection to the real daemon
auto socketPath = settings.nixDaemonSocketFile;
auto s = socket(PF_UNIX, SOCK_STREAM, 0);
if (s == -1)
throw SysError("creating Unix domain socket");
if (auto store = openUncachedStore().dynamic_pointer_cast<RemoteStore>()) {
auto conn = store->openConnectionWrapper();
int from = conn->from.fd;
int to = conn->to.fd;
auto socketDir = dirOf(socketPath);
if (chdir(socketDir.c_str()) == -1)
throw SysError("changing to socket directory '%1%'", socketDir);
auto socketName = std::string(baseNameOf(socketPath));
auto addr = sockaddr_un{};
addr.sun_family = AF_UNIX;
if (socketName.size() + 1 >= sizeof(addr.sun_path))
throw Error("socket name %1% is too long", socketName);
strcpy(addr.sun_path, socketName.c_str());
if (connect(s, (struct sockaddr *) &addr, sizeof(addr)) == -1)
throw SysError("cannot connect to daemon at %1%", socketPath);
auto nfds = (s > STDIN_FILENO ? s : STDIN_FILENO) + 1;
auto nfds = std::max(from, STDIN_FILENO) + 1;
while (true) {
fd_set fds;
FD_ZERO(&fds);
FD_SET(s, &fds);
FD_SET(from, &fds);
FD_SET(STDIN_FILENO, &fds);
if (select(nfds, &fds, nullptr, nullptr, nullptr) == -1)
throw SysError("waiting for data from client or server");
if (FD_ISSET(s, &fds)) {
auto res = splice(s, nullptr, STDOUT_FILENO, nullptr, SSIZE_MAX, SPLICE_F_MOVE);
if (FD_ISSET(from, &fds)) {
auto res = splice(from, nullptr, STDOUT_FILENO, nullptr, SSIZE_MAX, SPLICE_F_MOVE);
if (res == -1)
throw SysError("splicing data from daemon socket to stdout");
else if (res == 0)
throw EndOfFile("unexpected EOF from daemon socket");
}
if (FD_ISSET(STDIN_FILENO, &fds)) {
auto res = splice(STDIN_FILENO, nullptr, s, nullptr, SSIZE_MAX, SPLICE_F_MOVE);
auto res = splice(STDIN_FILENO, nullptr, to, nullptr, SSIZE_MAX, SPLICE_F_MOVE);
if (res == -1)
throw SysError("splicing data from stdin to daemon socket");
else if (res == 0)
@ -345,4 +330,4 @@ static int _main(int argc, char * * argv)
}
}
static RegisterLegacyCommand s1("nix-daemon", _main);
static RegisterLegacyCommand r_nix_daemon("nix-daemon", main_nix_daemon);

View file

@ -230,7 +230,7 @@ static DrvInfos filterBySelector(EvalState & state, const DrvInfos & allElems,
{
DrvNames selectors = drvNamesFromArgs(args);
if (selectors.empty())
selectors.push_back(DrvName("*"));
selectors.emplace_back("*");
DrvInfos elems;
set<unsigned int> done;
@ -1330,7 +1330,7 @@ static void opVersion(Globals & globals, Strings opFlags, Strings opArgs)
}
static int _main(int argc, char * * argv)
static int main_nix_env(int argc, char * * argv)
{
{
Strings opFlags, opArgs;
@ -1460,4 +1460,4 @@ static int _main(int argc, char * * argv)
}
}
static RegisterLegacyCommand s1("nix-env", _main);
static RegisterLegacyCommand r_nix_env("nix-env", main_nix_env);

View file

@ -83,7 +83,7 @@ void processExpr(EvalState & state, const Strings & attrPaths,
}
static int _main(int argc, char * * argv)
static int main_nix_instantiate(int argc, char * * argv)
{
{
Strings files;
@ -192,4 +192,4 @@ static int _main(int argc, char * * argv)
}
}
static RegisterLegacyCommand s1("nix-instantiate", _main);
static RegisterLegacyCommand r_nix_instantiate("nix-instantiate", main_nix_instantiate);

View file

@ -48,7 +48,7 @@ string resolveMirrorUri(EvalState & state, string uri)
}
static int _main(int argc, char * * argv)
static int main_nix_prefetch_url(int argc, char * * argv)
{
{
HashType ht = htSHA256;
@ -57,6 +57,7 @@ static int _main(int argc, char * * argv)
bool fromExpr = false;
string attrPath;
bool unpack = false;
bool executable = false;
string name;
struct MyArgs : LegacyArgs, MixEvalArgs
@ -81,6 +82,8 @@ static int _main(int argc, char * * argv)
}
else if (*arg == "--unpack")
unpack = true;
else if (*arg == "--executable")
executable = true;
else if (*arg == "--name")
name = getArg(*arg, arg, end);
else if (*arg != "" && arg->at(0) == '-')
@ -175,7 +178,11 @@ static int _main(int argc, char * * argv)
/* Download the file. */
{
AutoCloseFD fd = open(tmpFile.c_str(), O_WRONLY | O_CREAT | O_EXCL, 0600);
auto mode = 0600;
if (executable)
mode = 0700;
AutoCloseFD fd = open(tmpFile.c_str(), O_WRONLY | O_CREAT | O_EXCL, mode);
if (!fd) throw SysError("creating temporary file '%s'", tmpFile);
FdSink sink(fd.get());
@ -201,7 +208,7 @@ static int _main(int argc, char * * argv)
tmpFile = unpacked;
}
const auto method = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
const auto method = unpack || executable ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
auto info = store->addToStoreSlow(name, tmpFile, method, ht, expectedHash);
storePath = info.path;
@ -222,4 +229,4 @@ static int _main(int argc, char * * argv)
}
}
static RegisterLegacyCommand s1("nix-prefetch-url", _main);
static RegisterLegacyCommand r_nix_prefetch_url("nix-prefetch-url", main_nix_prefetch_url);

View file

@ -24,6 +24,9 @@
#endif
namespace nix_store {
using namespace nix;
using std::cin;
using std::cout;
@ -64,6 +67,7 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true)
if (path.path.isDerivation()) {
if (build) store->buildPaths({path});
auto outputPaths = store->queryDerivationOutputMap(path.path);
Derivation drv = store->derivationFromPath(path.path);
rootNr++;
@ -76,7 +80,8 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true)
if (i == drv.outputs.end())
throw Error("derivation '%s' does not have an output named '%s'",
store2->printStorePath(path.path), j);
auto outPath = store2->printStorePath(i->second.path(*store, drv.name));
auto outPath = outputPaths.at(i->first);
auto retPath = store->printStorePath(outPath);
if (store2) {
if (gcRoot == "")
printGCWarning();
@ -84,10 +89,10 @@ static PathSet realisePath(StorePathWithOutputs path, bool build = true)
Path rootName = gcRoot;
if (rootNr > 1) rootName += "-" + std::to_string(rootNr);
if (i->first != "out") rootName += "-" + i->first;
outPath = store2->addPermRoot(store->parseStorePath(outPath), rootName);
retPath = store2->addPermRoot(outPath, rootName);
}
}
outputs.insert(outPath);
outputs.insert(retPath);
}
return outputs;
}
@ -217,8 +222,13 @@ static StorePathSet maybeUseOutputs(const StorePath & storePath, bool useOutput,
if (useOutput && storePath.isDerivation()) {
auto drv = store->derivationFromPath(storePath);
StorePathSet outputs;
for (auto & i : drv.outputsAndPaths(*store))
outputs.insert(i.second.second);
if (forceRealise)
return store->queryDerivationOutputs(storePath);
for (auto & i : drv.outputsAndOptPaths(*store)) {
if (!i.second.second)
throw UsageError("Cannot use output path of floating content-addressed derivation until we know what it is (e.g. by building it)");
outputs.insert(*i.second.second);
}
return outputs;
}
else return {storePath};
@ -308,11 +318,9 @@ static void opQuery(Strings opFlags, Strings opArgs)
case qOutputs: {
for (auto & i : opArgs) {
auto i2 = store->followLinksToStorePath(i);
if (forceRealise) realisePath({i2});
Derivation drv = store->derivationFromPath(i2);
for (auto & j : drv.outputsAndPaths(*store))
cout << fmt("%1%\n", store->printStorePath(j.second.second));
auto outputs = maybeUseOutputs(store->followLinksToStorePath(i), true, forceRealise);
for (auto & outputPath : outputs)
cout << fmt("%1%\n", store->printStorePath(outputPath));
}
break;
}
@ -817,7 +825,7 @@ static void opServe(Strings opFlags, Strings opArgs)
case cmdQueryValidPaths: {
bool lock = readInt(in);
bool substitute = readInt(in);
auto paths = readStorePaths<StorePathSet>(*store, in);
auto paths = worker_proto::read(*store, in, Phantom<StorePathSet> {});
if (lock && writeAllowed)
for (auto & path : paths)
store->addTempRoot(path);
@ -847,19 +855,19 @@ static void opServe(Strings opFlags, Strings opArgs)
}
}
writeStorePaths(*store, out, store->queryValidPaths(paths));
worker_proto::write(*store, out, store->queryValidPaths(paths));
break;
}
case cmdQueryPathInfos: {
auto paths = readStorePaths<StorePathSet>(*store, in);
auto paths = worker_proto::read(*store, in, Phantom<StorePathSet> {});
// !!! Maybe we want a queryPathInfos?
for (auto & i : paths) {
try {
auto info = store->queryPathInfo(i);
out << store->printStorePath(info->path)
<< (info->deriver ? store->printStorePath(*info->deriver) : "");
writeStorePaths(*store, out, info->references);
worker_proto::write(*store, out, info->references);
// !!! Maybe we want compression?
out << info->narSize // downloadSize
<< info->narSize;
@ -887,7 +895,7 @@ static void opServe(Strings opFlags, Strings opArgs)
case cmdExportPaths: {
readInt(in); // obsolete
store->exportPaths(readStorePaths<StorePathSet>(*store, in), out);
store->exportPaths(worker_proto::read(*store, in, Phantom<StorePathSet> {}), out);
break;
}
@ -936,9 +944,9 @@ static void opServe(Strings opFlags, Strings opArgs)
case cmdQueryClosure: {
bool includeOutputs = readInt(in);
StorePathSet closure;
store->computeFSClosure(readStorePaths<StorePathSet>(*store, in),
store->computeFSClosure(worker_proto::read(*store, in, Phantom<StorePathSet> {}),
closure, false, includeOutputs);
writeStorePaths(*store, out, closure);
worker_proto::write(*store, out, closure);
break;
}
@ -953,7 +961,7 @@ static void opServe(Strings opFlags, Strings opArgs)
};
if (deriver != "")
info.deriver = store->parseStorePath(deriver);
info.references = readStorePaths<StorePathSet>(*store, in);
info.references = worker_proto::read(*store, in, Phantom<StorePathSet> {});
in >> info.registrationTime >> info.narSize >> info.ultimate;
info.sigs = readStrings<StringSet>(in);
info.ca = parseContentAddressOpt(readString(in));
@ -1020,7 +1028,7 @@ static void opVersion(Strings opFlags, Strings opArgs)
/* Scan the arguments; find the operation, set global flags, put all
other flags in a list, and put all other arguments in another
list. */
static int _main(int argc, char * * argv)
static int main_nix_store(int argc, char * * argv)
{
{
Strings opFlags, opArgs;
@ -1116,4 +1124,6 @@ static int _main(int argc, char * * argv)
}
}
static RegisterLegacyCommand s1("nix-store", _main);
static RegisterLegacyCommand r_nix_store("nix-store", main_nix_store);
}

View file

@ -87,4 +87,4 @@ struct CmdAddToStore : MixDryRun, StoreCommand
}
};
static auto r1 = registerCommand<CmdAddToStore>("add-to-store");
static auto rCmdAddToStore = registerCommand<CmdAddToStore>("add-to-store");

Some files were not shown because too many files have changed in this diff Show more