1
0
Fork 0
mirror of https://github.com/NixOS/nix synced 2025-07-07 10:11:47 +02:00

Merge remote-tracking branch 'origin/master' into libgit2

This commit is contained in:
Eelco Dolstra 2023-11-09 16:48:41 +01:00
commit 98a120b8b8
197 changed files with 5187 additions and 3152 deletions

View file

@ -1,3 +1,6 @@
#pragma once
///@file
#include "derived-path.hh"
#include "realisation.hh"

View file

@ -175,7 +175,7 @@ void BuiltPathsCommand::run(ref<Store> store, Installables && installables)
throw UsageError("'--all' does not expect arguments");
// XXX: Only uses opaque paths, ignores all the realisations
for (auto & p : store->queryAllValidPaths())
paths.push_back(BuiltPath::Opaque{p});
paths.emplace_back(BuiltPath::Opaque{p});
} else {
paths = Installable::toBuiltPaths(getEvalStore(), store, realiseMode, operateOn, installables);
if (recursive) {
@ -188,7 +188,7 @@ void BuiltPathsCommand::run(ref<Store> store, Installables && installables)
}
store->computeFSClosure(pathsRoots, pathsClosure);
for (auto & path : pathsClosure)
paths.push_back(BuiltPath::Opaque{path});
paths.emplace_back(BuiltPath::Opaque{path});
}
}

View file

@ -2,7 +2,6 @@
#include "common-eval-args.hh"
#include "shared.hh"
#include "filetransfer.hh"
#include "util.hh"
#include "eval.hh"
#include "fetchers.hh"
#include "registry.hh"
@ -165,7 +164,7 @@ Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
return res.finish();
}
SourcePath lookupFileArg(EvalState & state, std::string_view s)
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir)
{
if (EvalSettings::isPseudoUrl(s)) {
auto storePath = fetchers::downloadTarball(
@ -186,7 +185,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s)
}
else
return state.rootPath(CanonPath::fromCwd(s));
return state.rootPath(CanonPath(s, baseDir));
}
}

View file

@ -2,6 +2,7 @@
///@file
#include "args.hh"
#include "canon-path.hh"
#include "common-args.hh"
#include "search-path.hh"
@ -28,6 +29,6 @@ private:
std::map<std::string, std::string> autoArgs;
};
SourcePath lookupFileArg(EvalState & state, std::string_view s);
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir = CanonPath::fromCwd());
}

View file

@ -1,5 +1,5 @@
#include "util.hh"
#include "editor-for.hh"
#include "environment-variables.hh"
namespace nix {

View file

@ -4,7 +4,6 @@
#include "globals.hh"
#include "installable-value.hh"
#include "outputs-spec.hh"
#include "util.hh"
#include "command.hh"
#include "attr-path.hh"
#include "common-eval-args.hh"

View file

@ -4,6 +4,7 @@
#include "installable-attr-path.hh"
#include "installable-flake.hh"
#include "outputs-spec.hh"
#include "users.hh"
#include "util.hh"
#include "command.hh"
#include "attr-path.hh"
@ -87,7 +88,7 @@ MixFlakeOptions::MixFlakeOptions()
lockFlags.writeLockFile = false;
lockFlags.inputOverrides.insert_or_assign(
flake::parseInputPath(inputPath),
parseFlakeRef(flakeRef, absPath("."), true));
parseFlakeRef(flakeRef, absPath(getCommandBaseDir()), true));
}},
.completer = {[&](AddCompletions & completions, size_t n, std::string_view prefix) {
if (n == 0) {
@ -129,7 +130,7 @@ MixFlakeOptions::MixFlakeOptions()
auto evalState = getEvalState();
auto flake = flake::lockFlake(
*evalState,
parseFlakeRef(flakeRef, absPath(".")),
parseFlakeRef(flakeRef, absPath(getCommandBaseDir())),
{ .writeLockFile = false });
for (auto & [inputName, input] : flake.lockFile.root->inputs) {
auto input2 = flake.lockFile.findInput({inputName}); // resolve 'follows' nodes
@ -293,6 +294,8 @@ void completeFlakeRefWithFragment(
prefixRoot = ".";
}
auto flakeRefS = std::string(prefix.substr(0, hash));
// TODO: ideally this would use the command base directory instead of assuming ".".
auto flakeRef = parseFlakeRef(expandTilde(flakeRefS), absPath("."));
auto evalCache = openEvalCache(*evalState,
@ -441,10 +444,12 @@ Installables SourceExprCommand::parseInstallables(
auto e = state->parseStdin();
state->eval(e, *vFile);
}
else if (file)
state->evalFile(lookupFileArg(*state, *file), *vFile);
else if (file) {
state->evalFile(lookupFileArg(*state, *file, CanonPath::fromCwd(getCommandBaseDir())), *vFile);
}
else {
auto e = state->parseExprFromString(*expr, state->rootPath(CanonPath::fromCwd()));
CanonPath dir(CanonPath::fromCwd(getCommandBaseDir()));
auto e = state->parseExprFromString(*expr, state->rootPath(dir));
state->eval(e, *vFile);
}
@ -479,7 +484,7 @@ Installables SourceExprCommand::parseInstallables(
}
try {
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, absPath("."));
auto [flakeRef, fragment] = parseFlakeRefWithFragment(std::string { prefix }, absPath(getCommandBaseDir()));
result.push_back(make_ref<InstallableFlake>(
this,
getEvalState(),
@ -663,7 +668,7 @@ BuiltPaths Installable::toBuiltPaths(
BuiltPaths res;
for (auto & drvPath : Installable::toDerivations(store, installables, true))
res.push_back(BuiltPath::Opaque{drvPath});
res.emplace_back(BuiltPath::Opaque{drvPath});
return res;
}
}
@ -753,7 +758,7 @@ std::vector<FlakeRef> RawInstallablesCommand::getFlakeRefsForCompletion()
for (auto i : rawInstallables)
res.push_back(parseFlakeRefWithFragment(
expandTilde(i),
absPath(".")).first);
absPath(getCommandBaseDir())).first);
return res;
}
@ -775,7 +780,7 @@ std::vector<FlakeRef> InstallableCommand::getFlakeRefsForCompletion()
return {
parseFlakeRefWithFragment(
expandTilde(_installable),
absPath(".")).first
absPath(getCommandBaseDir())).first
};
}

View file

@ -1,7 +1,6 @@
#pragma once
///@file
#include "util.hh"
#include "path.hh"
#include "outputs-spec.hh"
#include "derived-path.hh"

View file

@ -1,6 +1,7 @@
#include "markdown.hh"
#include "util.hh"
#include "finally.hh"
#include "terminal.hh"
#include <sys/queue.h>
#include <lowdown.h>

View file

@ -22,6 +22,7 @@ extern "C" {
#include "repl.hh"
#include "ansicolor.hh"
#include "signals.hh"
#include "shared.hh"
#include "eval.hh"
#include "eval-cache.hh"
@ -36,6 +37,8 @@ extern "C" {
#include "globals.hh"
#include "flake/flake.hh"
#include "flake/lockfile.hh"
#include "users.hh"
#include "terminal.hh"
#include "editor-for.hh"
#include "finally.hh"
#include "markdown.hh"

View file

@ -1,6 +1,5 @@
#include "attr-path.hh"
#include "eval-inline.hh"
#include "util.hh"
namespace nix {

View file

@ -1,3 +1,4 @@
#include "users.hh"
#include "eval-cache.hh"
#include "sqlite.hh"
#include "eval.hh"

View file

@ -1,3 +1,4 @@
#include "users.hh"
#include "globals.hh"
#include "profiles.hh"
#include "eval.hh"

View file

@ -1,4 +1,6 @@
#pragma once
///@file
#include "config.hh"
namespace nix {

View file

@ -14,6 +14,7 @@
#include "print.hh"
#include "fs-input-accessor.hh"
#include "memory-input-accessor.hh"
#include "signals.hh"
#include <algorithm>
#include <chrono>

View file

@ -1,6 +1,7 @@
#include "flake.hh"
#include "users.hh"
#include "globals.hh"
#include "fetch-settings.hh"
#include "flake.hh"
#include <nlohmann/json.hpp>

View file

@ -1,3 +1,4 @@
#include "terminal.hh"
#include "flake.hh"
#include "eval.hh"
#include "eval-settings.hh"
@ -8,6 +9,7 @@
#include "fetchers.hh"
#include "finally.hh"
#include "fetch-settings.hh"
#include "value-to-json.hh"
namespace nix {
@ -140,8 +142,13 @@ static FlakeInput parseFlakeInput(EvalState & state,
attrs.emplace(state.symbols[attr.name], (long unsigned int)attr.value->integer);
break;
default:
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
state.symbols[attr.name], showType(*attr.value));
if (attr.name == state.symbols.create("publicKeys")) {
experimentalFeatureSettings.require(Xp::VerifiedFetches);
NixStringContext emptyContext = {};
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, emptyContext).dump());
} else
throw TypeError("flake input attribute '%s' is %s while a string, Boolean, or integer is expected",
state.symbols[attr.name], showType(*attr.value));
}
#pragma GCC diagnostic pop
}

View file

@ -1,5 +1,4 @@
#include "get-drvs.hh"
#include "util.hh"
#include "eval-inline.hh"
#include "derivations.hh"
#include "store-api.hh"

View file

@ -19,6 +19,7 @@
#include <variant>
#include "util.hh"
#include "users.hh"
#include "nixexpr.hh"
#include "eval.hh"

View file

@ -10,6 +10,7 @@
#include "path-references.hh"
#include "store-api.hh"
#include "util.hh"
#include "processes.hh"
#include "value-to-json.hh"
#include "value-to-xml.hh"
#include "primops.hh"
@ -824,7 +825,7 @@ static void prim_addErrorContext(EvalState & state, const PosIdx pos, Value * *
auto message = state.coerceToString(pos, *args[0], context,
"while evaluating the error message passed to builtins.addErrorContext",
false, false).toOwned();
e.addTrace(nullptr, message, true);
e.addTrace(nullptr, hintfmt(message), true);
throw;
}
}

View file

@ -7,6 +7,7 @@
#include "registry.hh"
#include "tarball.hh"
#include "url.hh"
#include "value-to-json.hh"
#include <ctime>
#include <iomanip>
@ -125,6 +126,10 @@ static void fetchTree(
attrs.emplace(state.symbols[attr.name], Explicit<bool>{attr.value->boolean});
else if (attr.value->type() == nInt)
attrs.emplace(state.symbols[attr.name], uint64_t(attr.value->integer));
else if (state.symbols[attr.name] == "publicKeys") {
experimentalFeatureSettings.require(Xp::VerifiedFetches);
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump());
}
else
state.debugThrowLastTrace(TypeError("fetchTree argument '%s' is %s while a string, Boolean or integer is expected",
state.symbols[attr.name], showType(*attr.value)));
@ -223,6 +228,7 @@ static RegisterPrimOp primop_fetchTree({
```
)",
.fun = prim_fetchTree,
.experimentalFeature = Xp::FetchTree,
});
static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v,
@ -427,6 +433,42 @@ static RegisterPrimOp primop_fetchGit({
With this argument being true, it's possible to load a `rev` from *any* `ref`
(by default only `rev`s from the specified `ref` are supported).
- `verifyCommit` (default: `true` if `publicKey` or `publicKeys` are provided, otherwise `false`)
Whether to check `rev` for a signature matching `publicKey` or `publicKeys`.
If `verifyCommit` is enabled, then `fetchGit` cannot use a local repository with uncommitted changes.
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
- `publicKey`
The public key against which `rev` is verified if `verifyCommit` is enabled.
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
- `keytype` (default: `"ssh-ed25519"`)
The key type of `publicKey`.
Possible values:
- `"ssh-dsa"`
- `"ssh-ecdsa"`
- `"ssh-ecdsa-sk"`
- `"ssh-ed25519"`
- `"ssh-ed25519-sk"`
- `"ssh-rsa"`
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
- `publicKeys`
The public keys against which `rev` is verified if `verifyCommit` is enabled.
Must be given as a list of attribute sets with the following form:
```nix
{
key = "<public key>";
type = "<key type>"; # optional, default: "ssh-ed25519"
}
```
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
Here are some examples of how to use `fetchGit`.
- To fetch a private repository over SSH:
@ -501,6 +543,21 @@ static RegisterPrimOp primop_fetchGit({
}
```
- To verify the commit signature:
```nix
builtins.fetchGit {
url = "ssh://git@github.com/nixos/nix.git";
verifyCommit = true;
publicKeys = [
{
type = "ssh-ed25519";
key = "AAAAC3NzaC1lZDI1NTE5AAAAIArPKULJOid8eS6XETwUjO48/HKBWl7FTCK0Z//fplDi";
}
];
}
```
Nix will refetch the branch according to the [`tarball-ttl`](@docroot@/command-ref/conf-file.md#conf-tarball-ttl) setting.
This behavior is disabled in [pure evaluation mode](@docroot@/command-ref/conf-file.md#conf-pure-eval).

View file

@ -1,5 +1,4 @@
#include "search-path.hh"
#include "util.hh"
namespace nix {

View file

@ -1,7 +1,7 @@
#include "value-to-json.hh"
#include "eval-inline.hh"
#include "util.hh"
#include "store-api.hh"
#include "signals.hh"
#include <cstdlib>
#include <iomanip>

View file

@ -1,7 +1,7 @@
#include "value-to-xml.hh"
#include "xml-writer.hh"
#include "eval-inline.hh"
#include "util.hh"
#include "signals.hh"
#include <cstdlib>

View file

@ -1,3 +1,4 @@
#include "util.hh"
#include "value/context.hh"
#include <optional>

View file

@ -1,7 +1,6 @@
#pragma once
///@file
#include "util.hh"
#include "comparator.hh"
#include "derived-path.hh"
#include "variant-wrapper.hh"

View file

@ -1,4 +1,5 @@
#include "cache.hh"
#include "users.hh"
#include "sqlite.hh"
#include "sync.hh"
#include "store-api.hh"

View file

@ -3,7 +3,6 @@
#include "types.hh"
#include "config.hh"
#include "util.hh"
#include <map>
#include <limits>

View file

@ -373,4 +373,9 @@ std::optional<ExperimentalFeature> InputScheme::experimentalFeature() const
return {};
}
std::string publicKeys_to_string(const std::vector<PublicKey>& publicKeys)
{
return ((nlohmann::json) publicKeys).dump();
}
}

View file

@ -184,4 +184,13 @@ void registerInputScheme(std::shared_ptr<InputScheme> && fetcher);
nlohmann::json dumpRegisterInputSchemeInfo();
struct PublicKey
{
std::string type = "ssh-ed25519";
std::string key;
};
NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE_WITH_DEFAULT(PublicKey, type, key)
std::string publicKeys_to_string(const std::vector<PublicKey>&);
}

View file

@ -2,6 +2,7 @@
#include "input-accessor.hh"
#include "cache.hh"
#include "finally.hh"
#include "processes.hh"
#include <boost/core/span.hpp>
@ -21,6 +22,7 @@
#include <unordered_set>
#include <queue>
#include <regex>
namespace std {
@ -365,6 +367,64 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
refspec
}, {}, true);
}
void verifyCommit(
const Hash & rev,
const std::vector<fetchers::PublicKey> & publicKeys) override
{
// Create ad-hoc allowedSignersFile and populate it with publicKeys
auto allowedSignersFile = createTempFile().second;
std::string allowedSigners;
for (const fetchers::PublicKey & k : publicKeys) {
if (k.type != "ssh-dsa"
&& k.type != "ssh-ecdsa"
&& k.type != "ssh-ecdsa-sk"
&& k.type != "ssh-ed25519"
&& k.type != "ssh-ed25519-sk"
&& k.type != "ssh-rsa")
throw Error("Unknown key type '%s'.\n"
"Please use one of\n"
"- ssh-dsa\n"
" ssh-ecdsa\n"
" ssh-ecdsa-sk\n"
" ssh-ed25519\n"
" ssh-ed25519-sk\n"
" ssh-rsa", k.type);
allowedSigners += "* " + k.type + " " + k.key + "\n";
}
writeFile(allowedSignersFile, allowedSigners);
// Run verification command
auto [status, output] = runProgram(RunOptions {
.program = "git",
.args = {
"-c",
"gpg.ssh.allowedSignersFile=" + allowedSignersFile,
"-C", path.abs(),
"verify-commit",
rev.gitRev()
},
.mergeStderrToStdout = true,
});
/* Evaluate result through status code and checking if public
key fingerprints appear on stderr. This is neccessary
because the git command might also succeed due to the
commit being signed by gpg keys that are present in the
users key agent. */
std::string re = R"(Good "git" signature for \* with .* key SHA256:[)";
for (const fetchers::PublicKey & k : publicKeys){
// Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally
auto fingerprint = trim(hashString(htSHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" );
re += "(" + escaped_fingerprint + ")";
}
re += "]";
if (status == 0 && std::regex_search(output, std::regex(re)))
printTalkative("Signature verification on commit %s succeeded.", rev.gitRev());
else
throw Error("Commit signature verification on commit %s failed: %s", rev.gitRev(), output);
}
};
ref<GitRepo> GitRepo::openRepo(const CanonPath & path, bool create, bool bare)

View file

@ -4,6 +4,8 @@
namespace nix {
namespace fetchers { struct PublicKey; }
struct GitRepo
{
virtual ~GitRepo()
@ -72,6 +74,14 @@ struct GitRepo
virtual void fetch(
const std::string & url,
const std::string & refspec) = 0;
/**
* Verify that commit `rev` is signed by one of the keys in
* `publicKeys`. Throw an error if it isn't.
*/
virtual void verifyCommit(
const Hash & rev,
const std::vector<fetchers::PublicKey> & publicKeys) = 0;
};
}

View file

@ -1,11 +1,12 @@
#include "fetchers.hh"
#include "users.hh"
#include "cache.hh"
#include "globals.hh"
#include "tarfile.hh"
#include "store-api.hh"
#include "url-parts.hh"
#include "pathlocks.hh"
#include "util.hh"
#include "processes.hh"
#include "git.hh"
#include "fs-input-accessor.hh"
#include "union-input-accessor.hh"
@ -135,6 +136,19 @@ std::optional<std::string> readHeadCached(const std::string & actualUrl)
return std::nullopt;
}
std::vector<PublicKey> getPublicKeys(const Attrs & attrs)
{
std::vector<PublicKey> publicKeys;
if (attrs.contains("publicKeys")) {
nlohmann::json publicKeysJson = nlohmann::json::parse(getStrAttr(attrs, "publicKeys"));
ensureType(publicKeysJson, nlohmann::json::value_t::array);
publicKeys = publicKeysJson.get<std::vector<PublicKey>>();
}
if (attrs.contains("publicKey"))
publicKeys.push_back(PublicKey{maybeGetStrAttr(attrs, "keytype").value_or("ssh-ed25519"),getStrAttr(attrs, "publicKey")});
return publicKeys;
}
} // end namespace
struct GitInputScheme : InputScheme
@ -155,9 +169,9 @@ struct GitInputScheme : InputScheme
attrs.emplace("type", "git");
for (auto & [name, value] : url.query) {
if (name == "rev" || name == "ref")
if (name == "rev" || name == "ref" || name == "keytype" || name == "publicKey" || name == "publicKeys")
attrs.emplace(name, value);
else if (name == "shallow" || name == "submodules" || name == "allRefs")
else if (name == "shallow" || name == "submodules" || name == "allRefs" || name == "verifyCommit")
attrs.emplace(name, Explicit<bool> { value == "1" });
else
url2.query.emplace(name, value);
@ -189,14 +203,26 @@ struct GitInputScheme : InputScheme
"name",
"dirtyRev",
"dirtyShortRev",
"verifyCommit",
"keytype",
"publicKey",
"publicKeys",
};
}
std::optional<Input> inputFromAttrs(const Attrs & attrs) const override
{
for (auto & [name, _] : attrs)
if (name == "verifyCommit"
|| name == "keytype"
|| name == "publicKey"
|| name == "publicKeys")
experimentalFeatureSettings.require(Xp::VerifiedFetches);
maybeGetBoolAttr(attrs, "shallow");
maybeGetBoolAttr(attrs, "submodules");
maybeGetBoolAttr(attrs, "allRefs");
maybeGetBoolAttr(attrs, "verifyCommit");
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
if (std::regex_search(*ref, badGitRefRegex))
@ -219,6 +245,15 @@ struct GitInputScheme : InputScheme
if (auto ref = input.getRef()) url.query.insert_or_assign("ref", *ref);
if (maybeGetBoolAttr(input.attrs, "shallow").value_or(false))
url.query.insert_or_assign("shallow", "1");
if (maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(false))
url.query.insert_or_assign("verifyCommit", "1");
auto publicKeys = getPublicKeys(input.attrs);
if (publicKeys.size() == 1) {
url.query.insert_or_assign("keytype", publicKeys.at(0).type);
url.query.insert_or_assign("publicKey", publicKeys.at(0).key);
}
else if (publicKeys.size() > 1)
url.query.insert_or_assign("publicKeys", publicKeys_to_string(publicKeys));
return url;
}
@ -416,6 +451,19 @@ struct GitInputScheme : InputScheme
};
}
void verifyCommit(const Input & input, std::shared_ptr<GitRepo> repo) const
{
auto publicKeys = getPublicKeys(input.attrs);
auto verifyCommit = maybeGetBoolAttr(input.attrs, "verifyCommit").value_or(!publicKeys.empty());
if (verifyCommit) {
if (input.getRev() && repo)
repo->verifyCommit(*input.getRev(), publicKeys);
else
throw Error("commit verification is required for Git repository '%s', but it's dirty", input.to_string());
}
}
std::pair<ref<InputAccessor>, Input> getAccessorFromCommit(
ref<Store> store,
RepoInfo & repoInfo,
@ -513,7 +561,9 @@ struct GitInputScheme : InputScheme
// cache dir lock is removed at scope end; we will only use read-only operations on specific revisions in the remainder
}
auto isShallow = GitRepo::openRepo(CanonPath(repoDir))->isShallow();
auto repo = GitRepo::openRepo(CanonPath(repoDir));
auto isShallow = repo->isShallow();
if (isShallow && !repoInfo.shallow)
throw Error("'%s' is a shallow Git repository, but shallow repositories are only allowed when `shallow = true;` is specified", repoInfo.url);
@ -533,7 +583,7 @@ struct GitInputScheme : InputScheme
printTalkative("using revision %s of repo '%s'", rev.gitRev(), repoInfo.url);
auto repo = GitRepo::openRepo(CanonPath(repoDir));
verifyCommit(input, repo);
auto accessor = repo->getAccessor(rev);
@ -565,8 +615,7 @@ struct GitInputScheme : InputScheme
}
}
assert(input.getRev());
assert(!origRev || origRev == input.getRev());
assert(!origRev || origRev == rev);
if (!repoInfo.shallow)
input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount"));
input.attrs.insert_or_assign("lastModified", getIntAttr(infoAttrs, "lastModified"));
@ -615,14 +664,17 @@ struct GitInputScheme : InputScheme
}
if (!repoInfo.workdirInfo.isDirty) {
if (auto ref = GitRepo::openRepo(CanonPath(repoInfo.url))->getWorkdirRef())
auto repo = GitRepo::openRepo(CanonPath(repoInfo.url));
if (auto ref = repo->getWorkdirRef())
input.attrs.insert_or_assign("ref", *ref);
auto rev = repoInfo.workdirInfo.headRev.value();
input.attrs.insert_or_assign("rev", rev.gitRev());
input.attrs.insert_or_assign("revCount", getRevCount(repoInfo, repoInfo.url, rev));
verifyCommit(input, repo);
} else {
repoInfo.warnDirty();
@ -632,6 +684,8 @@ struct GitInputScheme : InputScheme
input.attrs.insert_or_assign("dirtyShortRev",
repoInfo.workdirInfo.headRev->gitShortRev() + "-dirty");
}
verifyCommit(input, nullptr);
}
input.attrs.insert_or_assign(
@ -651,10 +705,10 @@ struct GitInputScheme : InputScheme
auto repoInfo = getRepoInfo(input);
if (input.getRef() || input.getRev() || !repoInfo.isLocal)
return getAccessorFromCommit(store, repoInfo, std::move(input));
else
return getAccessorFromWorkdir(store, repoInfo, std::move(input));
return
input.getRef() || input.getRev() || !repoInfo.isLocal
? getAccessorFromCommit(store, repoInfo, std::move(input))
: getAccessorFromWorkdir(store, repoInfo, std::move(input));
}
};

View file

@ -1,8 +1,10 @@
#pragma once
///@file
#include "source-accessor.hh"
#include "ref.hh"
#include "types.hh"
#include "file-system.hh"
#include "repair-flag.hh"
#include "content-address.hh"
@ -14,7 +16,7 @@ struct SourcePath;
class StorePath;
class Store;
struct InputAccessor : SourceAccessor, std::enable_shared_from_this<InputAccessor>
struct InputAccessor : virtual SourceAccessor, std::enable_shared_from_this<InputAccessor>
{
/**
* Return the maximum last-modified time of the files in this

View file

@ -1,48 +1,16 @@
#include "memory-input-accessor.hh"
#include "memory-source-accessor.hh"
namespace nix {
struct MemoryInputAccessorImpl : MemoryInputAccessor
struct MemoryInputAccessorImpl : MemoryInputAccessor, MemorySourceAccessor
{
std::map<CanonPath, std::string> files;
std::string readFile(const CanonPath & path) override
{
auto i = files.find(path);
if (i == files.end())
throw Error("file '%s' does not exist", path);
return i->second;
}
bool pathExists(const CanonPath & path) override
{
auto i = files.find(path);
return i != files.end();
}
std::optional<Stat> maybeLstat(const CanonPath & path) override
{
auto i = files.find(path);
if (i != files.end())
return Stat { .type = tRegular, .isExecutable = false };
return std::nullopt;
}
DirEntries readDirectory(const CanonPath & path) override
{
return {};
}
std::string readLink(const CanonPath & path) override
{
throw UnimplementedError("MemoryInputAccessor::readLink");
}
SourcePath addFile(CanonPath path, std::string && contents) override
{
files.emplace(path, std::move(contents));
return {ref(shared_from_this()), std::move(path)};
return {
ref(shared_from_this()),
MemorySourceAccessor::addFile(path, std::move(contents))
};
}
};

View file

@ -1,4 +1,6 @@
#include "fetchers.hh"
#include "processes.hh"
#include "users.hh"
#include "cache.hh"
#include "globals.hh"
#include "tarfile.hh"

View file

@ -1,6 +1,6 @@
#include "registry.hh"
#include "tarball.hh"
#include "util.hh"
#include "users.hh"
#include "globals.hh"
#include "store-api.hh"
#include "local-fs-store.hh"

View file

@ -1,7 +1,9 @@
#include "common-args.hh"
#include "args/root.hh"
#include "globals.hh"
#include "logging.hh"
#include "loggers.hh"
#include "util.hh"
namespace nix {

View file

@ -1,6 +1,6 @@
#include "loggers.hh"
#include "environment-variables.hh"
#include "progress-bar.hh"
#include "util.hh"
namespace nix {

View file

@ -1,5 +1,5 @@
#include "progress-bar.hh"
#include "util.hh"
#include "terminal.hh"
#include "sync.hh"
#include "store-api.hh"
#include "names.hh"

View file

@ -1,10 +1,11 @@
#include "globals.hh"
#include "current-process.hh"
#include "shared.hh"
#include "store-api.hh"
#include "gc-store.hh"
#include "util.hh"
#include "loggers.hh"
#include "progress-bar.hh"
#include "signals.hh"
#include <algorithm>
#include <cctype>

View file

@ -1,7 +1,7 @@
#pragma once
///@file
#include "util.hh"
#include "processes.hh"
#include "args.hh"
#include "args/root.hh"
#include "common-args.hh"

View file

@ -11,6 +11,7 @@
#include "nar-accessor.hh"
#include "thread-pool.hh"
#include "callback.hh"
#include "signals.hh"
#include <chrono>
#include <future>

View file

@ -0,0 +1,37 @@
#include "child.hh"
#include "current-process.hh"
#include "logging.hh"
#include <fcntl.h>
#include <unistd.h>
namespace nix {
void commonChildInit()
{
logger = makeSimpleLogger();
const static std::string pathNullDevice = "/dev/null";
restoreProcessContext(false);
/* Put the child in a separate session (and thus a separate
process group) so that it has no controlling terminal (meaning
that e.g. ssh cannot open /dev/tty) and it doesn't receive
terminal signals. */
if (setsid() == -1)
throw SysError("creating a new session");
/* Dup stderr to stdout. */
if (dup2(STDERR_FILENO, STDOUT_FILENO) == -1)
throw SysError("cannot dup stderr into stdout");
/* Reroute stdin to /dev/null. */
int fdDevNull = open(pathNullDevice.c_str(), O_RDWR);
if (fdDevNull == -1)
throw SysError("cannot open '%1%'", pathNullDevice);
if (dup2(fdDevNull, STDIN_FILENO) == -1)
throw SysError("cannot dup null device into stdin");
close(fdDevNull);
}
}

View file

@ -0,0 +1,11 @@
#pragma once
///@file
namespace nix {
/**
* Common initialisation performed in child processes.
*/
void commonChildInit();
}

View file

@ -1,5 +1,7 @@
#include "globals.hh"
#include "hook-instance.hh"
#include "file-system.hh"
#include "child.hh"
namespace nix {

View file

@ -3,6 +3,7 @@
#include "logging.hh"
#include "serialise.hh"
#include "processes.hh"
namespace nix {

View file

@ -15,7 +15,10 @@
#include "json-utils.hh"
#include "cgroup.hh"
#include "personality.hh"
#include "current-process.hh"
#include "namespaces.hh"
#include "child.hh"
#include "unix-domain-socket.hh"
#include <regex>
#include <queue>
@ -1620,6 +1623,8 @@ void setupSeccomp()
seccomp_release(ctx);
});
constexpr std::string_view nativeSystem = SYSTEM;
if (nativeSystem == "x86_64-linux" &&
seccomp_arch_add(ctx, SCMP_ARCH_X86) != 0)
throw SysError("unable to add 32-bit seccomp architecture");

View file

@ -3,6 +3,7 @@
#include "derivation-goal.hh"
#include "local-store.hh"
#include "processes.hh"
namespace nix {

View file

@ -4,6 +4,7 @@
#include "drv-output-substitution-goal.hh"
#include "local-derivation-goal.hh"
#include "hook-instance.hh"
#include "signals.hh"
#include <poll.h>

View file

@ -1,5 +1,4 @@
#include "serialise.hh"
#include "util.hh"
#include "path-with-outputs.hh"
#include "store-api.hh"
#include "build-result.hh"

View file

@ -1,4 +1,5 @@
#include "crypto.hh"
#include "file-system.hh"
#include "util.hh"
#include "globals.hh"

View file

@ -352,7 +352,7 @@ Derivation parseDerivation(
expect(str, "erive(");
version = DerivationATermVersion::Traditional;
break;
case 'r':
case 'r': {
expect(str, "rvWithVersion(");
auto versionS = parseString(str);
if (versionS == "xp-dyn-drv") {
@ -365,6 +365,9 @@ Derivation parseDerivation(
expect(str, ",");
break;
}
default:
throw Error("derivation does not start with 'Derive' or 'DrvWithVersion'");
}
/* Parse the list of outputs. */
expect(str, "[");

View file

@ -1,4 +1,5 @@
#include "derived-path-map.hh"
#include "util.hh"
namespace nix {

View file

@ -1,4 +1,5 @@
#pragma once
///@file
#include "types.hh"
#include "derived-path.hh"

View file

@ -1,10 +1,10 @@
#pragma once
///@file
#include "util.hh"
#include "path.hh"
#include "outputs-spec.hh"
#include "comparator.hh"
#include "config.hh"
#include <variant>

View file

@ -1,11 +1,12 @@
#include "filetransfer.hh"
#include "util.hh"
#include "namespaces.hh"
#include "globals.hh"
#include "store-api.hh"
#include "s3.hh"
#include "compression.hh"
#include "finally.hh"
#include "callback.hh"
#include "signals.hh"
#if ENABLE_S3
#include <aws/core/client/ClientConfiguration.h>

View file

@ -2,6 +2,13 @@
#include "globals.hh"
#include "local-store.hh"
#include "finally.hh"
#include "unix-domain-socket.hh"
#include "signals.hh"
#if !defined(__linux__)
// For shelling out to lsof
# include "processes.hh"
#endif
#include <functional>
#include <queue>

View file

@ -1,7 +1,8 @@
#include "globals.hh"
#include "util.hh"
#include "current-process.hh"
#include "archive.hh"
#include "args.hh"
#include "users.hh"
#include "abstract-setting-to-json.hh"
#include "compute-levels.hh"
@ -17,9 +18,13 @@
#include <sodium/core.h>
#ifdef __GLIBC__
#include <gnu/lib-names.h>
#include <nss.h>
#include <dlfcn.h>
# include <gnu/lib-names.h>
# include <nss.h>
# include <dlfcn.h>
#endif
#if __APPLE__
# include "processes.hh"
#endif
#include "config-impl.hh"

View file

@ -3,7 +3,7 @@
#include "types.hh"
#include "config.hh"
#include "util.hh"
#include "environment-variables.hh"
#include "experimental-features.hh"
#include <map>

View file

@ -10,6 +10,7 @@
#include "topo-sort.hh"
#include "finally.hh"
#include "compression.hh"
#include "signals.hh"
#include <iostream>
#include <algorithm>

View file

@ -7,7 +7,6 @@
#include "store-api.hh"
#include "indirect-root-store.hh"
#include "sync.hh"
#include "util.hh"
#include <chrono>
#include <future>

View file

@ -1,4 +1,5 @@
#include "lock.hh"
#include "file-system.hh"
#include "globals.hh"
#include "pathlocks.hh"

View file

@ -1,5 +1,4 @@
#include "machines.hh"
#include "util.hh"
#include "globals.hh"
#include "store-api.hh"

View file

@ -60,12 +60,22 @@ struct NarAccessor : public SourceAccessor
void createDirectory(const Path & path) override
{
createMember(path, {Type::tDirectory, false, 0, 0});
createMember(path, NarMember{ .stat = {
.type = Type::tDirectory,
.fileSize = 0,
.isExecutable = false,
.narOffset = 0
} });
}
void createRegularFile(const Path & path) override
{
createMember(path, {Type::tRegular, false, 0, 0});
createMember(path, NarMember{ .stat = {
.type = Type::tRegular,
.fileSize = 0,
.isExecutable = false,
.narOffset = 0
} });
}
void closeRegularFile() override
@ -78,9 +88,8 @@ struct NarAccessor : public SourceAccessor
void preallocateContents(uint64_t size) override
{
assert(size <= std::numeric_limits<uint64_t>::max());
auto & st = parents.top()->stat;
st.fileSize = (uint64_t) size;
st.fileSize = size;
st.narOffset = pos;
}
@ -128,9 +137,8 @@ struct NarAccessor : public SourceAccessor
if (type == "directory") {
member.stat = {.type = Type::tDirectory};
for (auto i = v["entries"].begin(); i != v["entries"].end(); ++i) {
std::string name = i.key();
recurse(member.children[name], i.value());
for (const auto &[name, function] : v["entries"].items()) {
recurse(member.children[name], function);
}
} else if (type == "regular") {
member.stat = {
@ -153,7 +161,7 @@ struct NarAccessor : public SourceAccessor
{
NarMember * current = &root;
for (auto & i : path) {
for (const auto & i : path) {
if (current->stat.type != Type::tDirectory) return nullptr;
auto child = current->children.find(std::string(i));
if (child == current->children.end()) return nullptr;
@ -186,7 +194,7 @@ struct NarAccessor : public SourceAccessor
throw Error("path '%1%' inside NAR file is not a directory", path);
DirEntries res;
for (auto & child : i.children)
for (const auto & child : i.children)
res.insert_or_assign(child.first, std::nullopt);
return res;
@ -251,7 +259,7 @@ json listNar(ref<SourceAccessor> accessor, const CanonPath & path, bool recurse)
{
obj["entries"] = json::object();
json &res2 = obj["entries"];
for (auto & [name, type] : accessor->readDirectory(path)) {
for (const auto & [name, type] : accessor->readDirectory(path)) {
if (recurse) {
res2[name] = listNar(accessor, path + name, true);
} else

View file

@ -25,7 +25,7 @@ ref<SourceAccessor> makeNarAccessor(Source & source);
* readFile() method of the accessor to get the contents of files
* inside the NAR.
*/
typedef std::function<std::string(uint64_t, uint64_t)> GetNarBytes;
using GetNarBytes = std::function<std::string(uint64_t, uint64_t)>;
ref<SourceAccessor> makeLazyNarAccessor(
const std::string & listing,

View file

@ -1,4 +1,5 @@
#include "nar-info-disk-cache.hh"
#include "users.hh"
#include "sync.hh"
#include "sqlite.hh"
#include "globals.hh"

View file

@ -4,6 +4,15 @@
namespace nix {
GENERATE_CMP_EXT(
,
NarInfo,
me->url,
me->compression,
me->fileHash,
me->fileSize,
static_cast<const ValidPathInfo &>(*me));
NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence)
: ValidPathInfo(StorePath(StorePath::dummy), Hash(Hash::dummy)) // FIXME: hack
{
@ -29,12 +38,12 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
while (pos < s.size()) {
size_t colon = s.find(':', pos);
if (colon == std::string::npos) throw corrupt("expecting ':'");
if (colon == s.npos) throw corrupt("expecting ':'");
std::string name(s, pos, colon - pos);
size_t eol = s.find('\n', colon + 2);
if (eol == std::string::npos) throw corrupt("expecting '\\n'");
if (eol == s.npos) throw corrupt("expecting '\\n'");
std::string value(s, colon + 2, eol - colon - 2);
@ -125,4 +134,59 @@ std::string NarInfo::to_string(const Store & store) const
return res;
}
nlohmann::json NarInfo::toJSON(
const Store & store,
bool includeImpureInfo,
HashFormat hashFormat) const
{
using nlohmann::json;
auto jsonObject = ValidPathInfo::toJSON(store, includeImpureInfo, hashFormat);
if (includeImpureInfo) {
if (!url.empty())
jsonObject["url"] = url;
if (!compression.empty())
jsonObject["compression"] = compression;
if (fileHash)
jsonObject["downloadHash"] = fileHash->to_string(hashFormat, true);
if (fileSize)
jsonObject["downloadSize"] = fileSize;
}
return jsonObject;
}
NarInfo NarInfo::fromJSON(
const Store & store,
const StorePath & path,
const nlohmann::json & json)
{
using nlohmann::detail::value_t;
NarInfo res {
ValidPathInfo {
path,
UnkeyedValidPathInfo::fromJSON(store, json),
}
};
if (json.contains("url"))
res.url = ensureType(valueAt(json, "url"), value_t::string);
if (json.contains("compression"))
res.compression = ensureType(valueAt(json, "compression"), value_t::string);
if (json.contains("downloadHash"))
res.fileHash = Hash::parseAny(
static_cast<const std::string &>(
ensureType(valueAt(json, "downloadHash"), value_t::string)),
std::nullopt);
if (json.contains("downloadSize"))
res.fileSize = ensureType(valueAt(json, "downloadSize"), value_t::number_integer);
return res;
}
}

View file

@ -17,14 +17,25 @@ struct NarInfo : ValidPathInfo
uint64_t fileSize = 0;
NarInfo() = delete;
NarInfo(const Store & store, std::string && name, ContentAddressWithReferences && ca, Hash narHash)
NarInfo(const Store & store, std::string name, ContentAddressWithReferences ca, Hash narHash)
: ValidPathInfo(store, std::move(name), std::move(ca), narHash)
{ }
NarInfo(StorePath && path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }
NarInfo(StorePath path, Hash narHash) : ValidPathInfo(std::move(path), narHash) { }
NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { }
NarInfo(const Store & store, const std::string & s, const std::string & whence);
DECLARE_CMP(NarInfo);
std::string to_string(const Store & store) const;
nlohmann::json toJSON(
const Store & store,
bool includeImpureInfo,
HashFormat hashFormat) const override;
static NarInfo fromJSON(
const Store & store,
const StorePath & path,
const nlohmann::json & json);
};
}

View file

@ -1,6 +1,6 @@
#include "util.hh"
#include "local-store.hh"
#include "globals.hh"
#include "signals.hh"
#include <cstdlib>
#include <cstring>

View file

@ -132,6 +132,41 @@ bool ParsedDerivation::useUidRange() const
static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*");
/**
* Write a JSON representation of store object metadata, such as the
* hash and the references.
*/
static nlohmann::json pathInfoToJSON(
Store & store,
const StorePathSet & storePaths)
{
nlohmann::json::array_t jsonList = nlohmann::json::array();
for (auto & storePath : storePaths) {
auto info = store.queryPathInfo(storePath);
auto & jsonPath = jsonList.emplace_back(
info->toJSON(store, false, HashFormat::Base32));
// Add the path to the object whose metadata we are including.
jsonPath["path"] = store.printStorePath(storePath);
jsonPath["valid"] = true;
jsonPath["closureSize"] = ({
uint64_t totalNarSize = 0;
StorePathSet closure;
store.computeFSClosure(info->path, closure, false, false);
for (auto & p : closure) {
auto info = store.queryPathInfo(p);
totalNarSize += info->narSize;
}
totalNarSize;
});
}
return jsonList;
}
std::optional<nlohmann::json> ParsedDerivation::prepareStructuredAttrs(Store & store, const StorePathSet & inputPaths)
{
auto structuredAttrs = getStructuredAttrs();
@ -152,8 +187,8 @@ std::optional<nlohmann::json> ParsedDerivation::prepareStructuredAttrs(Store & s
StorePathSet storePaths;
for (auto & p : *i)
storePaths.insert(store.parseStorePath(p.get<std::string>()));
json[i.key()] = store.pathInfoToJSON(
store.exportReferences(storePaths, inputPaths), false, true);
json[i.key()] = pathInfoToJSON(store,
store.exportReferences(storePaths, inputPaths));
}
}

View file

@ -1,5 +1,8 @@
#include <nlohmann/json.hpp>
#include "path-info.hh"
#include "store-api.hh"
#include "json-utils.hh"
namespace nix {
@ -144,4 +147,94 @@ ValidPathInfo::ValidPathInfo(
}, std::move(ca).raw);
}
nlohmann::json UnkeyedValidPathInfo::toJSON(
const Store & store,
bool includeImpureInfo,
HashFormat hashFormat) const
{
using nlohmann::json;
auto jsonObject = json::object();
jsonObject["narHash"] = narHash.to_string(hashFormat, true);
jsonObject["narSize"] = narSize;
{
auto& jsonRefs = (jsonObject["references"] = json::array());
for (auto & ref : references)
jsonRefs.emplace_back(store.printStorePath(ref));
}
if (ca)
jsonObject["ca"] = renderContentAddress(ca);
if (includeImpureInfo) {
if (deriver)
jsonObject["deriver"] = store.printStorePath(*deriver);
if (registrationTime)
jsonObject["registrationTime"] = registrationTime;
if (ultimate)
jsonObject["ultimate"] = ultimate;
if (!sigs.empty()) {
for (auto & sig : sigs)
jsonObject["signatures"].push_back(sig);
}
}
return jsonObject;
}
UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(
const Store & store,
const nlohmann::json & json)
{
using nlohmann::detail::value_t;
UnkeyedValidPathInfo res {
Hash(Hash::dummy),
};
ensureType(json, value_t::object);
res.narHash = Hash::parseAny(
static_cast<const std::string &>(
ensureType(valueAt(json, "narHash"), value_t::string)),
std::nullopt);
res.narSize = ensureType(valueAt(json, "narSize"), value_t::number_integer);
try {
auto & references = ensureType(valueAt(json, "references"), value_t::array);
for (auto & input : references)
res.references.insert(store.parseStorePath(static_cast<const std::string &>
(input)));
} catch (Error & e) {
e.addTrace({}, "while reading key 'references'");
throw;
}
if (json.contains("ca"))
res.ca = ContentAddress::parse(
static_cast<const std::string &>(
ensureType(valueAt(json, "ca"), value_t::string)));
if (json.contains("deriver"))
res.deriver = store.parseStorePath(
static_cast<const std::string &>(
ensureType(valueAt(json, "deriver"), value_t::string)));
if (json.contains("registrationTime"))
res.registrationTime = ensureType(valueAt(json, "registrationTime"), value_t::number_integer);
if (json.contains("ultimate"))
res.ultimate = ensureType(valueAt(json, "ultimate"), value_t::boolean);
if (json.contains("signatures"))
res.sigs = valueAt(json, "signatures");
return res;
}
}

View file

@ -78,6 +78,18 @@ struct UnkeyedValidPathInfo
DECLARE_CMP(UnkeyedValidPathInfo);
virtual ~UnkeyedValidPathInfo() { }
/**
* @param includeImpureInfo If true, variable elements such as the
* registration time are included.
*/
virtual nlohmann::json toJSON(
const Store & store,
bool includeImpureInfo,
HashFormat hashFormat) const;
static UnkeyedValidPathInfo fromJSON(
const Store & store,
const nlohmann::json & json);
};
struct ValidPathInfo : UnkeyedValidPathInfo {

View file

@ -1,6 +1,5 @@
#include "path-references.hh"
#include "hash.hh"
#include "util.hh"
#include "archive.hh"
#include <map>

View file

@ -1,4 +1,5 @@
#pragma once
///@file
#include "references.hh"
#include "path.hh"

View file

@ -1,6 +1,7 @@
#include "pathlocks.hh"
#include "util.hh"
#include "sync.hh"
#include "signals.hh"
#include <cerrno>
#include <cstdlib>

View file

@ -1,7 +1,7 @@
#pragma once
///@file
#include "util.hh"
#include "file-descriptor.hh"
namespace nix {

View file

@ -1,7 +1,7 @@
#include "profiles.hh"
#include "store-api.hh"
#include "local-fs-store.hh"
#include "util.hh"
#include "users.hh"
#include <sys/types.h>
#include <sys/stat.h>

View file

@ -1,3 +1,6 @@
#pragma once
///@file
#include "remote-store.hh"
#include "worker-protocol.hh"
#include "pool.hh"

View file

@ -1,5 +1,4 @@
#include "serialise.hh"
#include "util.hh"
#include "path-with-outputs.hh"
#include "store-api.hh"
#include "build-result.hh"

View file

@ -2,6 +2,7 @@
#include "globals.hh"
#include "util.hh"
#include "url.hh"
#include "signals.hh"
#include <sqlite3.h>

View file

@ -1,5 +1,8 @@
#include "ssh.hh"
#include "finally.hh"
#include "current-process.hh"
#include "environment-variables.hh"
#include "util.hh"
namespace nix {
@ -111,8 +114,10 @@ std::unique_ptr<SSHMaster::Connection> SSHMaster::startCommand(const std::string
reply = readLine(out.readSide.get());
} catch (EndOfFile & e) { }
if (reply != "started")
if (reply != "started") {
printTalkative("SSH stdout first line: %s", reply);
throw Error("failed to start SSH connection to '%s'", host);
}
}
conn->out = std::move(out.readSide);
@ -129,7 +134,6 @@ Path SSHMaster::startMaster()
if (state->sshMaster != -1) return state->socketPath;
state->socketPath = (Path) *state->tmpDir + "/ssh.sock";
Pipe out;
@ -141,7 +145,8 @@ Path SSHMaster::startMaster()
logger->pause();
Finally cleanup = [&]() { logger->resume(); };
bool wasMasterRunning = isMasterRunning();
if (isMasterRunning())
return state->socketPath;
state->sshMaster = startProcess([&]() {
restoreProcessContext();
@ -162,14 +167,14 @@ Path SSHMaster::startMaster()
out.writeSide = -1;
if (!wasMasterRunning) {
std::string reply;
try {
reply = readLine(out.readSide.get());
} catch (EndOfFile & e) { }
std::string reply;
try {
reply = readLine(out.readSide.get());
} catch (EndOfFile & e) { }
if (reply != "started")
throw Error("failed to start SSH master connection to '%s'", host);
if (reply != "started") {
printTalkative("SSH master stdout first line: %s", reply);
throw Error("failed to start SSH master connection to '%s'", host);
}
return state->socketPath;

View file

@ -1,8 +1,9 @@
#pragma once
///@file
#include "util.hh"
#include "sync.hh"
#include "processes.hh"
#include "file-system.hh"
namespace nix {

View file

@ -14,6 +14,8 @@
// FIXME this should not be here, see TODO below on
// `addMultipleToStore`.
#include "worker-protocol.hh"
#include "signals.hh"
#include "users.hh"
#include <nlohmann/json.hpp>
#include <regex>
@ -819,7 +821,7 @@ void Store::substitutePaths(const StorePathSet & paths)
std::vector<DerivedPath> paths2;
for (auto & path : paths)
if (!path.isDerivation())
paths2.push_back(DerivedPath::Opaque{path});
paths2.emplace_back(DerivedPath::Opaque{path});
uint64_t downloadSize, narSize;
StorePathSet willBuild, willSubstitute, unknown;
queryMissing(paths2,
@ -949,96 +951,6 @@ StorePathSet Store::exportReferences(const StorePathSet & storePaths, const Stor
return paths;
}
json Store::pathInfoToJSON(const StorePathSet & storePaths,
bool includeImpureInfo, bool showClosureSize,
HashFormat hashFormat,
AllowInvalidFlag allowInvalid)
{
json::array_t jsonList = json::array();
for (auto & storePath : storePaths) {
auto& jsonPath = jsonList.emplace_back(json::object());
try {
auto info = queryPathInfo(storePath);
jsonPath["path"] = printStorePath(info->path);
jsonPath["valid"] = true;
jsonPath["narHash"] = info->narHash.to_string(hashFormat, true);
jsonPath["narSize"] = info->narSize;
{
auto& jsonRefs = (jsonPath["references"] = json::array());
for (auto & ref : info->references)
jsonRefs.emplace_back(printStorePath(ref));
}
if (info->ca)
jsonPath["ca"] = renderContentAddress(info->ca);
std::pair<uint64_t, uint64_t> closureSizes;
if (showClosureSize) {
closureSizes = getClosureSize(info->path);
jsonPath["closureSize"] = closureSizes.first;
}
if (includeImpureInfo) {
if (info->deriver)
jsonPath["deriver"] = printStorePath(*info->deriver);
if (info->registrationTime)
jsonPath["registrationTime"] = info->registrationTime;
if (info->ultimate)
jsonPath["ultimate"] = info->ultimate;
if (!info->sigs.empty()) {
for (auto & sig : info->sigs)
jsonPath["signatures"].push_back(sig);
}
auto narInfo = std::dynamic_pointer_cast<const NarInfo>(
std::shared_ptr<const ValidPathInfo>(info));
if (narInfo) {
if (!narInfo->url.empty())
jsonPath["url"] = narInfo->url;
if (narInfo->fileHash)
jsonPath["downloadHash"] = narInfo->fileHash->to_string(hashFormat, true);
if (narInfo->fileSize)
jsonPath["downloadSize"] = narInfo->fileSize;
if (showClosureSize)
jsonPath["closureDownloadSize"] = closureSizes.second;
}
}
} catch (InvalidPath &) {
jsonPath["path"] = printStorePath(storePath);
jsonPath["valid"] = false;
}
}
return jsonList;
}
std::pair<uint64_t, uint64_t> Store::getClosureSize(const StorePath & storePath)
{
uint64_t totalNarSize = 0, totalDownloadSize = 0;
StorePathSet closure;
computeFSClosure(storePath, closure, false, false);
for (auto & p : closure) {
auto info = queryPathInfo(p);
totalNarSize += info->narSize;
auto narInfo = std::dynamic_pointer_cast<const NarInfo>(
std::shared_ptr<const ValidPathInfo>(info));
if (narInfo)
totalDownloadSize += narInfo->fileSize;
}
return {totalNarSize, totalDownloadSize};
}
const Store::Stats & Store::getStats()
{

View file

@ -80,7 +80,6 @@ typedef std::map<std::string, StorePath> OutputPathMap;
enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true };
enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true };
enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true };
/**
* Magic header of exportPath() output (obsolete).
@ -665,28 +664,6 @@ public:
std::string makeValidityRegistration(const StorePathSet & paths,
bool showDerivers, bool showHash);
/**
* Write a JSON representation of store path metadata, such as the
* hash and the references.
*
* @param includeImpureInfo If true, variable elements such as the
* registration time are included.
*
* @param showClosureSize If true, the closure size of each path is
* included.
*/
nlohmann::json pathInfoToJSON(const StorePathSet & storePaths,
bool includeImpureInfo, bool showClosureSize,
HashFormat hashFormat = HashFormat::Base32,
AllowInvalidFlag allowInvalid = DisallowInvalid);
/**
* @return the size of the closure of the specified path, that is,
* the sum of the size of the NAR serialisation of each path in the
* closure.
*/
std::pair<uint64_t, uint64_t> getClosureSize(const StorePath & storePath);
/**
* Optimise the disk space usage of the Nix store by hard-linking files
* with the same contents.

View file

@ -1,28 +0,0 @@
#pragma once
///@file
namespace nix {
/**
* The path to the `unit-test-data` directory. See the contributing
* guide in the manual for further details.
*/
static Path getUnitTestData() {
return getEnv("_NIX_TEST_UNIT_DATA").value();
}
/**
* Whether we should update "golden masters" instead of running tests
* against them. See the contributing guide in the manual for further
* details.
*/
static bool testAccept() {
return getEnv("_NIX_TEST_ACCEPT") == "1";
}
constexpr std::string_view cannotReadGoldenMaster =
"Cannot read golden master because another test is also updating it";
constexpr std::string_view updatingGoldenMaster =
"Updating golden master";
}

View file

@ -20,16 +20,9 @@ public:
* Golden test for `T` reading
*/
template<typename T>
void readTest(PathView testStem, T value)
void readProtoTest(PathView testStem, const T & expected)
{
if (testAccept())
{
GTEST_SKIP() << cannotReadGoldenMaster;
}
else
{
auto encoded = readFile(goldenMaster(testStem));
CharacterizationTest::readTest(testStem, [&](const auto & encoded) {
T got = ({
StringSource from { encoded };
CommonProto::Serialise<T>::read(
@ -37,44 +30,33 @@ public:
CommonProto::ReadConn { .from = from });
});
ASSERT_EQ(got, value);
}
ASSERT_EQ(got, expected);
});
}
/**
* Golden test for `T` write
*/
template<typename T>
void writeTest(PathView testStem, const T & value)
void writeProtoTest(PathView testStem, const T & decoded)
{
auto file = goldenMaster(testStem);
StringSink to;
CommonProto::write(
*store,
CommonProto::WriteConn { .to = to },
value);
if (testAccept())
{
createDirs(dirOf(file));
writeFile(file, to.s);
GTEST_SKIP() << updatingGoldenMaster;
}
else
{
auto expected = readFile(file);
ASSERT_EQ(to.s, expected);
}
CharacterizationTest::writeTest(testStem, [&]() -> std::string {
StringSink to;
CommonProto::Serialise<T>::write(
*store,
CommonProto::WriteConn { .to = to },
decoded);
return to.s;
});
}
};
#define CHARACTERIZATION_TEST(NAME, STEM, VALUE) \
TEST_F(CommonProtoTest, NAME ## _read) { \
readTest(STEM, VALUE); \
readProtoTest(STEM, VALUE); \
} \
TEST_F(CommonProtoTest, NAME ## _write) { \
writeTest(STEM, VALUE); \
writeProtoTest(STEM, VALUE); \
}
CHARACTERIZATION_TEST(

View file

@ -11,20 +11,20 @@ namespace nix {
using nlohmann::json;
class DerivationTest : public LibStoreTest
class DerivationTest : public CharacterizationTest, public LibStoreTest
{
Path unitTestData = getUnitTestData() + "/libstore/derivation";
public:
Path goldenMaster(std::string_view testStem) const override {
return unitTestData + "/" + testStem;
}
/**
* We set these in tests rather than the regular globals so we don't have
* to worry about race conditions if the tests run concurrently.
*/
ExperimentalFeatureSettings mockXpSettings;
Path unitTestData = getUnitTestData() + "/libstore/derivation";
Path goldenMaster(std::string_view testStem) {
return unitTestData + "/" + testStem;
}
};
class CaDerivationTest : public DerivationTest
@ -73,14 +73,8 @@ TEST_F(DynDerivationTest, BadATerm_oldVersionDynDeps) {
#define TEST_JSON(FIXTURE, NAME, VAL, DRV_NAME, OUTPUT_NAME) \
TEST_F(FIXTURE, DerivationOutput_ ## NAME ## _from_json) { \
if (testAccept()) \
{ \
GTEST_SKIP() << cannotReadGoldenMaster; \
} \
else \
{ \
auto encoded = json::parse( \
readFile(goldenMaster("output-" #NAME ".json"))); \
readTest("output-" #NAME ".json", [&](const auto & encoded_) { \
auto encoded = json::parse(encoded_); \
DerivationOutput got = DerivationOutput::fromJSON( \
*store, \
DRV_NAME, \
@ -89,28 +83,20 @@ TEST_F(DynDerivationTest, BadATerm_oldVersionDynDeps) {
mockXpSettings); \
DerivationOutput expected { VAL }; \
ASSERT_EQ(got, expected); \
} \
}); \
} \
\
TEST_F(FIXTURE, DerivationOutput_ ## NAME ## _to_json) { \
auto file = goldenMaster("output-" #NAME ".json"); \
\
json got = DerivationOutput { VAL }.toJSON( \
*store, \
DRV_NAME, \
OUTPUT_NAME); \
\
if (testAccept()) \
{ \
createDirs(dirOf(file)); \
writeFile(file, got.dump(2) + "\n"); \
GTEST_SKIP() << updatingGoldenMaster; \
} \
else \
{ \
auto expected = json::parse(readFile(file)); \
ASSERT_EQ(got, expected); \
} \
writeTest("output-" #NAME ".json", [&]() -> json { \
return DerivationOutput { (VAL) }.toJSON( \
*store, \
(DRV_NAME), \
(OUTPUT_NAME)); \
}, [](const auto & file) { \
return json::parse(readFile(file)); \
}, [](const auto & file, const auto & got) { \
return writeFile(file, got.dump(2) + "\n"); \
}); \
}
TEST_JSON(DerivationTest, inputAddressed,
@ -167,50 +153,30 @@ TEST_JSON(ImpureDerivationTest, impure,
#define TEST_JSON(FIXTURE, NAME, VAL) \
TEST_F(FIXTURE, Derivation_ ## NAME ## _from_json) { \
if (testAccept()) \
{ \
GTEST_SKIP() << cannotReadGoldenMaster; \
} \
else \
{ \
auto encoded = json::parse( \
readFile(goldenMaster( #NAME ".json"))); \
readTest(#NAME ".json", [&](const auto & encoded_) { \
auto encoded = json::parse(encoded_); \
Derivation expected { VAL }; \
Derivation got = Derivation::fromJSON( \
*store, \
encoded, \
mockXpSettings); \
ASSERT_EQ(got, expected); \
} \
}); \
} \
\
TEST_F(FIXTURE, Derivation_ ## NAME ## _to_json) { \
auto file = goldenMaster( #NAME ".json"); \
\
json got = Derivation { VAL }.toJSON(*store); \
\
if (testAccept()) \
{ \
createDirs(dirOf(file)); \
writeFile(file, got.dump(2) + "\n"); \
GTEST_SKIP() << updatingGoldenMaster; \
} \
else \
{ \
auto expected = json::parse(readFile(file)); \
ASSERT_EQ(got, expected); \
} \
writeTest(#NAME ".json", [&]() -> json { \
return Derivation { VAL }.toJSON(*store); \
}, [](const auto & file) { \
return json::parse(readFile(file)); \
}, [](const auto & file, const auto & got) { \
return writeFile(file, got.dump(2) + "\n"); \
}); \
}
#define TEST_ATERM(FIXTURE, NAME, VAL, DRV_NAME) \
TEST_F(FIXTURE, Derivation_ ## NAME ## _from_aterm) { \
if (testAccept()) \
{ \
GTEST_SKIP() << cannotReadGoldenMaster; \
} \
else \
{ \
auto encoded = readFile(goldenMaster( #NAME ".drv")); \
readTest(#NAME ".drv", [&](auto encoded) { \
Derivation expected { VAL }; \
auto got = parseDerivation( \
*store, \
@ -219,25 +185,13 @@ TEST_JSON(ImpureDerivationTest, impure,
mockXpSettings); \
ASSERT_EQ(got.toJSON(*store), expected.toJSON(*store)) ; \
ASSERT_EQ(got, expected); \
} \
}); \
} \
\
TEST_F(FIXTURE, Derivation_ ## NAME ## _to_aterm) { \
auto file = goldenMaster( #NAME ".drv"); \
\
auto got = (VAL).unparse(*store, false); \
\
if (testAccept()) \
{ \
createDirs(dirOf(file)); \
writeFile(file, got); \
GTEST_SKIP() << updatingGoldenMaster; \
} \
else \
{ \
auto expected = readFile(file); \
ASSERT_EQ(got, expected); \
} \
writeTest(#NAME ".drv", [&]() -> std::string { \
return (VAL).unparse(*store, false); \
}); \
}
Derivation makeSimpleDrv(const Store & store) {

View file

@ -8,7 +8,7 @@
namespace nix {
class LibStoreTest : public ::testing::Test {
class LibStoreTest : public virtual ::testing::Test {
public:
static void SetUpTestSuite() {
initLibStore();

View file

@ -1,5 +1,7 @@
#include "machines.hh"
#include "globals.hh"
#include "file-system.hh"
#include "util.hh"
#include <gmock/gmock-matchers.h>

View file

@ -0,0 +1,85 @@
#include <nlohmann/json.hpp>
#include <gtest/gtest.h>
#include "path-info.hh"
#include "tests/characterization.hh"
#include "tests/libstore.hh"
namespace nix {
using nlohmann::json;
class NarInfoTest : public CharacterizationTest, public LibStoreTest
{
Path unitTestData = getUnitTestData() + "/libstore/nar-info";
Path goldenMaster(PathView testStem) const override {
return unitTestData + "/" + testStem + ".json";
}
};
static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) {
NarInfo info = ValidPathInfo {
store,
"foo",
FixedOutputInfo {
.method = FileIngestionMethod::Recursive,
.hash = hashString(HashType::htSHA256, "(...)"),
.references = {
.others = {
StorePath {
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
},
},
.self = true,
},
},
Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
};
info.narSize = 34878;
if (includeImpureInfo) {
info.deriver = StorePath {
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
};
info.registrationTime = 23423;
info.ultimate = true;
info.sigs = { "asdf", "qwer" };
info.url = "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz";
info.compression = "xz";
info.fileHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=");
info.fileSize = 4029176;
}
return info;
}
#define JSON_TEST(STEM, PURE) \
TEST_F(NarInfoTest, NarInfo_ ## STEM ## _from_json) { \
readTest(#STEM, [&](const auto & encoded_) { \
auto encoded = json::parse(encoded_); \
auto expected = makeNarInfo(*store, PURE); \
NarInfo got = NarInfo::fromJSON( \
*store, \
expected.path, \
encoded); \
ASSERT_EQ(got, expected); \
}); \
} \
\
TEST_F(NarInfoTest, NarInfo_ ## STEM ## _to_json) { \
writeTest(#STEM, [&]() -> json { \
return makeNarInfo(*store, PURE) \
.toJSON(*store, PURE, HashFormat::SRI); \
}, [](const auto & file) { \
return json::parse(readFile(file)); \
}, [](const auto & file, const auto & got) { \
return writeFile(file, got.dump(2) + "\n"); \
}); \
}
JSON_TEST(pure, false)
JSON_TEST(impure, true)
}

View file

@ -0,0 +1,79 @@
#include <nlohmann/json.hpp>
#include <gtest/gtest.h>
#include "path-info.hh"
#include "tests/characterization.hh"
#include "tests/libstore.hh"
namespace nix {
using nlohmann::json;
class PathInfoTest : public CharacterizationTest, public LibStoreTest
{
Path unitTestData = getUnitTestData() + "/libstore/path-info";
Path goldenMaster(PathView testStem) const override {
return unitTestData + "/" + testStem + ".json";
}
};
static UnkeyedValidPathInfo makePathInfo(const Store & store, bool includeImpureInfo) {
UnkeyedValidPathInfo info = ValidPathInfo {
store,
"foo",
FixedOutputInfo {
.method = FileIngestionMethod::Recursive,
.hash = hashString(HashType::htSHA256, "(...)"),
.references = {
.others = {
StorePath {
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
},
},
.self = true,
},
},
Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
};
info.narSize = 34878;
if (includeImpureInfo) {
info.deriver = StorePath {
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
};
info.registrationTime = 23423;
info.ultimate = true;
info.sigs = { "asdf", "qwer" };
}
return info;
}
#define JSON_TEST(STEM, PURE) \
TEST_F(PathInfoTest, PathInfo_ ## STEM ## _from_json) { \
readTest(#STEM, [&](const auto & encoded_) { \
auto encoded = json::parse(encoded_); \
UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON( \
*store, \
encoded); \
auto expected = makePathInfo(*store, PURE); \
ASSERT_EQ(got, expected); \
}); \
} \
\
TEST_F(PathInfoTest, PathInfo_ ## STEM ## _to_json) { \
writeTest(#STEM, [&]() -> json { \
return makePathInfo(*store, PURE) \
.toJSON(*store, PURE, HashFormat::SRI); \
}, [](const auto & file) { \
return json::parse(readFile(file)); \
}, [](const auto & file, const auto & got) { \
return writeFile(file, got.dump(2) + "\n"); \
}); \
}
JSON_TEST(pure, false)
JSON_TEST(impure, true)
}

View file

@ -1,3 +1,6 @@
#pragma once
///@file
#include <nlohmann/json.hpp>
#include <gtest/gtest.h>
@ -7,12 +10,11 @@
namespace nix {
template<class Proto, const char * protocolDir>
class ProtoTest : public LibStoreTest
class ProtoTest : public CharacterizationTest, public LibStoreTest
{
protected:
Path unitTestData = getUnitTestData() + "/libstore/" + protocolDir;
Path goldenMaster(std::string_view testStem) {
Path goldenMaster(std::string_view testStem) const override {
return unitTestData + "/" + testStem + ".bin";
}
};
@ -25,18 +27,11 @@ public:
* Golden test for `T` reading
*/
template<typename T>
void readTest(PathView testStem, typename Proto::Version version, T value)
void readProtoTest(PathView testStem, typename Proto::Version version, T expected)
{
if (testAccept())
{
GTEST_SKIP() << cannotReadGoldenMaster;
}
else
{
auto expected = readFile(ProtoTest<Proto, protocolDir>::goldenMaster(testStem));
CharacterizationTest::readTest(testStem, [&](const auto & encoded) {
T got = ({
StringSource from { expected };
StringSource from { encoded };
Proto::template Serialise<T>::read(
*LibStoreTest::store,
typename Proto::ReadConn {
@ -45,47 +40,36 @@ public:
});
});
ASSERT_EQ(got, value);
}
ASSERT_EQ(got, expected);
});
}
/**
* Golden test for `T` write
*/
template<typename T>
void writeTest(PathView testStem, typename Proto::Version version, const T & value)
void writeProtoTest(PathView testStem, typename Proto::Version version, const T & decoded)
{
auto file = ProtoTest<Proto, protocolDir>::goldenMaster(testStem);
StringSink to;
Proto::write(
*LibStoreTest::store,
typename Proto::WriteConn {
.to = to,
.version = version,
},
value);
if (testAccept())
{
createDirs(dirOf(file));
writeFile(file, to.s);
GTEST_SKIP() << updatingGoldenMaster;
}
else
{
auto expected = readFile(file);
ASSERT_EQ(to.s, expected);
}
CharacterizationTest::writeTest(testStem, [&]() {
StringSink to;
Proto::template Serialise<T>::write(
*LibStoreTest::store,
typename Proto::WriteConn {
.to = to,
.version = version,
},
decoded);
return std::move(to.s);
});
}
};
#define VERSIONED_CHARACTERIZATION_TEST(FIXTURE, NAME, STEM, VERSION, VALUE) \
TEST_F(FIXTURE, NAME ## _read) { \
readTest(STEM, VERSION, VALUE); \
readProtoTest(STEM, VERSION, VALUE); \
} \
TEST_F(FIXTURE, NAME ## _write) { \
writeTest(STEM, VERSION, VALUE); \
writeProtoTest(STEM, VERSION, VALUE); \
}
}

View file

@ -1,4 +1,5 @@
#include "uds-remote-store.hh"
#include "unix-domain-socket.hh"
#include "worker-protocol.hh"
#include <sys/types.h>

View file

@ -1,5 +1,4 @@
#include "serialise.hh"
#include "util.hh"
#include "path-with-outputs.hh"
#include "store-api.hh"
#include "build-result.hh"
@ -32,14 +31,14 @@ std::optional<TrustedFlag> WorkerProto::Serialise<std::optional<TrustedFlag>>::r
void WorkerProto::Serialise<std::optional<TrustedFlag>>::write(const Store & store, WorkerProto::WriteConn conn, const std::optional<TrustedFlag> & optTrusted)
{
if (!optTrusted)
conn.to << (uint8_t)0;
conn.to << uint8_t{0};
else {
switch (*optTrusted) {
case Trusted:
conn.to << (uint8_t)1;
conn.to << uint8_t{1};
break;
case NotTrusted:
conn.to << (uint8_t)2;
conn.to << uint8_t{2};
break;
default:
assert(false);
@ -102,7 +101,7 @@ void WorkerProto::Serialise<KeyedBuildResult>::write(const Store & store, Worker
BuildResult WorkerProto::Serialise<BuildResult>::read(const Store & store, WorkerProto::ReadConn conn)
{
BuildResult res;
res.status = (BuildResult::Status) readInt(conn.from);
res.status = static_cast<BuildResult::Status>(readInt(conn.from));
conn.from >> res.errorMsg;
if (GET_PROTOCOL_MINOR(conn.version) >= 29) {
conn.from

View file

@ -171,7 +171,7 @@ enum struct WorkerProto::Op : uint64_t
*/
inline Sink & operator << (Sink & sink, WorkerProto::Op op)
{
return sink << (uint64_t) op;
return sink << static_cast<uint64_t>(op);
}
/**
@ -181,7 +181,7 @@ inline Sink & operator << (Sink & sink, WorkerProto::Op op)
*/
inline std::ostream & operator << (std::ostream & s, WorkerProto::Op op)
{
return s << (uint64_t) op;
return s << static_cast<uint64_t>(op);
}
/**

View file

@ -7,7 +7,7 @@
namespace nix {
template<typename T>
std::map<std::string, nlohmann::json> BaseSetting<T>::toJSONObject()
std::map<std::string, nlohmann::json> BaseSetting<T>::toJSONObject() const
{
auto obj = AbstractSetting::toJSONObject();
obj.emplace("value", value);

View file

@ -6,9 +6,10 @@
#include <strings.h> // for strcasecmp
#include "archive.hh"
#include "util.hh"
#include "config.hh"
#include "posix-source-accessor.hh"
#include "file-system.hh"
#include "signals.hh"
namespace nix {

View file

@ -1,8 +1,14 @@
#include "args.hh"
#include "args/root.hh"
#include "hash.hh"
#include "environment-variables.hh"
#include "signals.hh"
#include "users.hh"
#include "json-utils.hh"
#include <fstream>
#include <string>
#include <regex>
#include <glob.h>
namespace nix {
@ -74,7 +80,176 @@ std::optional<std::string> RootArgs::needsCompletion(std::string_view s)
return {};
}
void RootArgs::parseCmdline(const Strings & _cmdline)
/**
* Basically this is `typedef std::optional<Parser> Parser(std::string_view s, Strings & r);`
*
* Except we can't recursively reference the Parser typedef, so we have to write a class.
*/
struct Parser {
std::string_view remaining;
/**
* @brief Parse the next character(s)
*
* @param r
* @return std::shared_ptr<Parser>
*/
virtual void operator()(std::shared_ptr<Parser> & state, Strings & r) = 0;
Parser(std::string_view s) : remaining(s) {};
};
struct ParseQuoted : public Parser {
/**
* @brief Accumulated string
*
* Parsed argument up to this point.
*/
std::string acc;
ParseQuoted(std::string_view s) : Parser(s) {};
virtual void operator()(std::shared_ptr<Parser> & state, Strings & r) override;
};
struct ParseUnquoted : public Parser {
/**
* @brief Accumulated string
*
* Parsed argument up to this point. Empty string is not representable in
* unquoted syntax, so we use it for the initial state.
*/
std::string acc;
ParseUnquoted(std::string_view s) : Parser(s) {};
virtual void operator()(std::shared_ptr<Parser> & state, Strings & r) override {
if (remaining.empty()) {
if (!acc.empty())
r.push_back(acc);
state = nullptr; // done
return;
}
switch (remaining[0]) {
case ' ': case '\t': case '\n': case '\r':
if (!acc.empty())
r.push_back(acc);
state = std::make_shared<ParseUnquoted>(ParseUnquoted(remaining.substr(1)));
return;
case '`':
if (remaining.size() > 1 && remaining[1] == '`') {
state = std::make_shared<ParseQuoted>(ParseQuoted(remaining.substr(2)));
return;
}
else
throw Error("single backtick is not a supported syntax in the nix shebang.");
// reserved characters
// meaning to be determined, or may be reserved indefinitely so that
// #!nix syntax looks unambiguous
case '$':
case '*':
case '~':
case '<':
case '>':
case '|':
case ';':
case '(':
case ')':
case '[':
case ']':
case '{':
case '}':
case '\'':
case '"':
case '\\':
throw Error("unsupported unquoted character in nix shebang: " + std::string(1, remaining[0]) + ". Use double backticks to escape?");
case '#':
if (acc.empty()) {
throw Error ("unquoted nix shebang argument cannot start with #. Use double backticks to escape?");
} else {
acc += remaining[0];
remaining = remaining.substr(1);
return;
}
default:
acc += remaining[0];
remaining = remaining.substr(1);
return;
}
assert(false);
}
};
void ParseQuoted::operator()(std::shared_ptr<Parser> &state, Strings & r) {
if (remaining.empty()) {
throw Error("unterminated quoted string in nix shebang");
}
switch (remaining[0]) {
case ' ':
if ((remaining.size() == 3 && remaining[1] == '`' && remaining[2] == '`')
|| (remaining.size() > 3 && remaining[1] == '`' && remaining[2] == '`' && remaining[3] != '`')) {
// exactly two backticks mark the end of a quoted string, but a preceding space is ignored if present.
state = std::make_shared<ParseUnquoted>(ParseUnquoted(remaining.substr(3)));
r.push_back(acc);
return;
}
else {
// just a normal space
acc += remaining[0];
remaining = remaining.substr(1);
return;
}
case '`':
// exactly two backticks mark the end of a quoted string
if ((remaining.size() == 2 && remaining[1] == '`')
|| (remaining.size() > 2 && remaining[1] == '`' && remaining[2] != '`')) {
state = std::make_shared<ParseUnquoted>(ParseUnquoted(remaining.substr(2)));
r.push_back(acc);
return;
}
// a sequence of at least 3 backticks is one escape-backtick which is ignored, followed by any number of backticks, which are verbatim
else if (remaining.size() >= 3 && remaining[1] == '`' && remaining[2] == '`') {
// ignore "escape" backtick
remaining = remaining.substr(1);
// add the rest
while (remaining.size() > 0 && remaining[0] == '`') {
acc += '`';
remaining = remaining.substr(1);
}
return;
}
else {
acc += remaining[0];
remaining = remaining.substr(1);
return;
}
default:
acc += remaining[0];
remaining = remaining.substr(1);
return;
}
assert(false);
}
Strings parseShebangContent(std::string_view s) {
Strings result;
std::shared_ptr<Parser> parserState(std::make_shared<ParseUnquoted>(ParseUnquoted(s)));
// trampoline == iterated strategy pattern
while (parserState) {
auto currentState = parserState;
(*currentState)(parserState, result);
}
return result;
}
void RootArgs::parseCmdline(const Strings & _cmdline, bool allowShebang)
{
Strings pendingArgs;
bool dashDash = false;
@ -90,6 +265,45 @@ void RootArgs::parseCmdline(const Strings & _cmdline)
}
bool argsSeen = false;
// Heuristic to see if we're invoked as a shebang script, namely,
// if we have at least one argument, it's the name of an
// executable file, and it starts with "#!".
Strings savedArgs;
if (allowShebang){
auto script = *cmdline.begin();
try {
std::ifstream stream(script);
char shebang[3]={0,0,0};
stream.get(shebang,3);
if (strncmp(shebang,"#!",2) == 0){
for (auto pos = std::next(cmdline.begin()); pos != cmdline.end();pos++)
savedArgs.push_back(*pos);
cmdline.clear();
std::string line;
std::getline(stream,line);
static const std::string commentChars("#/\\%@*-");
std::string shebangContent;
while (std::getline(stream,line) && !line.empty() && commentChars.find(line[0]) != std::string::npos){
line = chomp(line);
std::smatch match;
// We match one space after `nix` so that we preserve indentation.
// No space is necessary for an empty line. An empty line has basically no effect.
if (std::regex_match(line, match, std::regex("^#!\\s*nix(:? |$)(.*)$")))
shebangContent += match[2].str() + "\n";
}
for (const auto & word : parseShebangContent(shebangContent)) {
cmdline.push_back(word);
}
cmdline.push_back(script);
commandBaseDir = dirOf(script);
for (auto pos = savedArgs.begin(); pos != savedArgs.end();pos++)
cmdline.push_back(*pos);
}
} catch (SysError &) { }
}
for (auto pos = cmdline.begin(); pos != cmdline.end(); ) {
auto arg = *pos;
@ -145,6 +359,17 @@ void RootArgs::parseCmdline(const Strings & _cmdline)
d.completer(*completions, d.n, d.prefix);
}
Path Args::getCommandBaseDir() const
{
assert(parent);
return parent->getCommandBaseDir();
}
Path RootArgs::getCommandBaseDir() const
{
return commandBaseDir;
}
bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
{
assert(pos != end);

View file

@ -2,12 +2,15 @@
///@file
#include <iostream>
#include <functional>
#include <map>
#include <memory>
#include <optional>
#include <nlohmann/json_fwd.hpp>
#include "util.hh"
#include "types.hh"
#include "experimental-features.hh"
namespace nix {
@ -21,11 +24,12 @@ class AddCompletions;
class Args
{
public:
/**
* Return a short one-line description of the command.
*/
*/
virtual std::string description() { return ""; }
virtual bool forceImpureByDefault() { return false; }
@ -35,6 +39,16 @@ public:
*/
virtual std::string doc() { return ""; }
/**
* @brief Get the base directory for the command.
*
* @return Generally the working directory, but in case of a shebang
* interpreter, returns the directory of the script.
*
* This only returns the correct value after parseCmdline() has run.
*/
virtual Path getCommandBaseDir() const;
protected:
/**
@ -395,4 +409,6 @@ public:
virtual void add(std::string completion, std::string description = "") = 0;
};
Strings parseShebangContent(std::string_view s);
}

Some files were not shown because too many files have changed in this diff Show more