mirror of
https://github.com/NixOS/nix
synced 2025-07-06 21:41:48 +02:00
Merge remote-tracking branch 'upstream/master' into overlayfs-store
This commit is contained in:
commit
5c1cb0b696
941 changed files with 10981 additions and 4439 deletions
|
@ -14,6 +14,7 @@
|
|||
#include "archive.hh"
|
||||
#include "util.hh"
|
||||
#include "config.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -27,8 +28,6 @@ struct ArchiveSettings : Config
|
|||
#endif
|
||||
"use-case-hack",
|
||||
"Whether to enable a Darwin-specific hack for dealing with file name collisions."};
|
||||
Setting<bool> preallocateContents{this, false, "preallocate-contents",
|
||||
"Whether to preallocate files when writing objects with known size."};
|
||||
};
|
||||
|
||||
static ArchiveSettings archiveSettings;
|
||||
|
@ -38,91 +37,87 @@ static GlobalConfig::Register rArchiveSettings(&archiveSettings);
|
|||
PathFilter defaultPathFilter = [](const Path &) { return true; };
|
||||
|
||||
|
||||
static void dumpContents(const Path & path, off_t size,
|
||||
Sink & sink)
|
||||
void SourceAccessor::dumpPath(
|
||||
const CanonPath & path,
|
||||
Sink & sink,
|
||||
PathFilter & filter)
|
||||
{
|
||||
sink << "contents" << size;
|
||||
auto dumpContents = [&](const CanonPath & path)
|
||||
{
|
||||
sink << "contents";
|
||||
std::optional<uint64_t> size;
|
||||
readFile(path, sink, [&](uint64_t _size)
|
||||
{
|
||||
size = _size;
|
||||
sink << _size;
|
||||
});
|
||||
assert(size);
|
||||
writePadding(*size, sink);
|
||||
};
|
||||
|
||||
AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC);
|
||||
if (!fd) throw SysError("opening file '%1%'", path);
|
||||
std::function<void(const CanonPath & path)> dump;
|
||||
|
||||
std::vector<char> buf(65536);
|
||||
size_t left = size;
|
||||
dump = [&](const CanonPath & path) {
|
||||
checkInterrupt();
|
||||
|
||||
while (left > 0) {
|
||||
auto n = std::min(left, buf.size());
|
||||
readFull(fd.get(), buf.data(), n);
|
||||
left -= n;
|
||||
sink({buf.data(), n});
|
||||
}
|
||||
auto st = lstat(path);
|
||||
|
||||
writePadding(size, sink);
|
||||
}
|
||||
sink << "(";
|
||||
|
||||
if (st.type == tRegular) {
|
||||
sink << "type" << "regular";
|
||||
if (st.isExecutable)
|
||||
sink << "executable" << "";
|
||||
dumpContents(path);
|
||||
}
|
||||
|
||||
static time_t dump(const Path & path, Sink & sink, PathFilter & filter)
|
||||
{
|
||||
checkInterrupt();
|
||||
else if (st.type == tDirectory) {
|
||||
sink << "type" << "directory";
|
||||
|
||||
auto st = lstat(path);
|
||||
time_t result = st.st_mtime;
|
||||
/* If we're on a case-insensitive system like macOS, undo
|
||||
the case hack applied by restorePath(). */
|
||||
std::map<std::string, std::string> unhacked;
|
||||
for (auto & i : readDirectory(path))
|
||||
if (archiveSettings.useCaseHack) {
|
||||
std::string name(i.first);
|
||||
size_t pos = i.first.find(caseHackSuffix);
|
||||
if (pos != std::string::npos) {
|
||||
debug("removing case hack suffix from '%s'", path + i.first);
|
||||
name.erase(pos);
|
||||
}
|
||||
if (!unhacked.emplace(name, i.first).second)
|
||||
throw Error("file name collision in between '%s' and '%s'",
|
||||
(path + unhacked[name]),
|
||||
(path + i.first));
|
||||
} else
|
||||
unhacked.emplace(i.first, i.first);
|
||||
|
||||
sink << "(";
|
||||
|
||||
if (S_ISREG(st.st_mode)) {
|
||||
sink << "type" << "regular";
|
||||
if (st.st_mode & S_IXUSR)
|
||||
sink << "executable" << "";
|
||||
dumpContents(path, st.st_size, sink);
|
||||
}
|
||||
|
||||
else if (S_ISDIR(st.st_mode)) {
|
||||
sink << "type" << "directory";
|
||||
|
||||
/* If we're on a case-insensitive system like macOS, undo
|
||||
the case hack applied by restorePath(). */
|
||||
std::map<std::string, std::string> unhacked;
|
||||
for (auto & i : readDirectory(path))
|
||||
if (archiveSettings.useCaseHack) {
|
||||
std::string name(i.name);
|
||||
size_t pos = i.name.find(caseHackSuffix);
|
||||
if (pos != std::string::npos) {
|
||||
debug("removing case hack suffix from '%1%'", path + "/" + i.name);
|
||||
name.erase(pos);
|
||||
for (auto & i : unhacked)
|
||||
if (filter((path + i.first).abs())) {
|
||||
sink << "entry" << "(" << "name" << i.first << "node";
|
||||
dump(path + i.second);
|
||||
sink << ")";
|
||||
}
|
||||
if (!unhacked.emplace(name, i.name).second)
|
||||
throw Error("file name collision in between '%1%' and '%2%'",
|
||||
(path + "/" + unhacked[name]),
|
||||
(path + "/" + i.name));
|
||||
} else
|
||||
unhacked.emplace(i.name, i.name);
|
||||
}
|
||||
|
||||
for (auto & i : unhacked)
|
||||
if (filter(path + "/" + i.first)) {
|
||||
sink << "entry" << "(" << "name" << i.first << "node";
|
||||
auto tmp_mtime = dump(path + "/" + i.second, sink, filter);
|
||||
if (tmp_mtime > result) {
|
||||
result = tmp_mtime;
|
||||
}
|
||||
sink << ")";
|
||||
}
|
||||
}
|
||||
else if (st.type == tSymlink)
|
||||
sink << "type" << "symlink" << "target" << readLink(path);
|
||||
|
||||
else if (S_ISLNK(st.st_mode))
|
||||
sink << "type" << "symlink" << "target" << readLink(path);
|
||||
else throw Error("file '%s' has an unsupported type", path);
|
||||
|
||||
else throw Error("file '%1%' has an unsupported type", path);
|
||||
sink << ")";
|
||||
};
|
||||
|
||||
sink << ")";
|
||||
|
||||
return result;
|
||||
sink << narVersionMagic1;
|
||||
dump(path);
|
||||
}
|
||||
|
||||
|
||||
time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter)
|
||||
{
|
||||
sink << narVersionMagic1;
|
||||
return dump(path, sink, filter);
|
||||
PosixSourceAccessor accessor;
|
||||
accessor.dumpPath(CanonPath::fromCwd(path), sink, filter);
|
||||
return accessor.mtime;
|
||||
}
|
||||
|
||||
void dumpPath(const Path & path, Sink & sink, PathFilter & filter)
|
||||
|
@ -143,17 +138,6 @@ static SerialisationError badArchive(const std::string & s)
|
|||
}
|
||||
|
||||
|
||||
#if 0
|
||||
static void skipGeneric(Source & source)
|
||||
{
|
||||
if (readString(source) == "(") {
|
||||
while (readString(source) != ")")
|
||||
skipGeneric(source);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
static void parseContents(ParseSink & sink, Source & source, const Path & path)
|
||||
{
|
||||
uint64_t size = readLongLong(source);
|
||||
|
@ -302,71 +286,6 @@ void parseDump(ParseSink & sink, Source & source)
|
|||
}
|
||||
|
||||
|
||||
struct RestoreSink : ParseSink
|
||||
{
|
||||
Path dstPath;
|
||||
AutoCloseFD fd;
|
||||
|
||||
void createDirectory(const Path & path) override
|
||||
{
|
||||
Path p = dstPath + path;
|
||||
if (mkdir(p.c_str(), 0777) == -1)
|
||||
throw SysError("creating directory '%1%'", p);
|
||||
};
|
||||
|
||||
void createRegularFile(const Path & path) override
|
||||
{
|
||||
Path p = dstPath + path;
|
||||
fd = open(p.c_str(), O_CREAT | O_EXCL | O_WRONLY | O_CLOEXEC, 0666);
|
||||
if (!fd) throw SysError("creating file '%1%'", p);
|
||||
}
|
||||
|
||||
void closeRegularFile() override
|
||||
{
|
||||
/* Call close explicitly to make sure the error is checked */
|
||||
fd.close();
|
||||
}
|
||||
|
||||
void isExecutable() override
|
||||
{
|
||||
struct stat st;
|
||||
if (fstat(fd.get(), &st) == -1)
|
||||
throw SysError("fstat");
|
||||
if (fchmod(fd.get(), st.st_mode | (S_IXUSR | S_IXGRP | S_IXOTH)) == -1)
|
||||
throw SysError("fchmod");
|
||||
}
|
||||
|
||||
void preallocateContents(uint64_t len) override
|
||||
{
|
||||
if (!archiveSettings.preallocateContents)
|
||||
return;
|
||||
|
||||
#if HAVE_POSIX_FALLOCATE
|
||||
if (len) {
|
||||
errno = posix_fallocate(fd.get(), 0, len);
|
||||
/* Note that EINVAL may indicate that the underlying
|
||||
filesystem doesn't support preallocation (e.g. on
|
||||
OpenSolaris). Since preallocation is just an
|
||||
optimisation, ignore it. */
|
||||
if (errno && errno != EINVAL && errno != EOPNOTSUPP && errno != ENOSYS)
|
||||
throw SysError("preallocating file of %1% bytes", len);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void receiveContents(std::string_view data) override
|
||||
{
|
||||
writeFull(fd.get(), data);
|
||||
}
|
||||
|
||||
void createSymlink(const Path & path, const std::string & target) override
|
||||
{
|
||||
Path p = dstPath + path;
|
||||
nix::createSymlink(target, p);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
void restorePath(const Path & path, Source & source)
|
||||
{
|
||||
RestoreSink sink;
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
|
||||
#include "types.hh"
|
||||
#include "serialise.hh"
|
||||
#include "fs-sink.hh"
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
@ -72,22 +73,6 @@ time_t dumpPathAndGetMtime(const Path & path, Sink & sink,
|
|||
*/
|
||||
void dumpString(std::string_view s, Sink & sink);
|
||||
|
||||
/**
|
||||
* \todo Fix this API, it sucks.
|
||||
*/
|
||||
struct ParseSink
|
||||
{
|
||||
virtual void createDirectory(const Path & path) { };
|
||||
|
||||
virtual void createRegularFile(const Path & path) { };
|
||||
virtual void closeRegularFile() { };
|
||||
virtual void isExecutable() { };
|
||||
virtual void preallocateContents(uint64_t size) { };
|
||||
virtual void receiveContents(std::string_view data) { };
|
||||
|
||||
virtual void createSymlink(const Path & path, const std::string & target) { };
|
||||
};
|
||||
|
||||
/**
|
||||
* If the NAR archive contains a single file at top-level, then save
|
||||
* the contents of the file to `s`. Otherwise barf.
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#include "args.hh"
|
||||
#include "args/root.hh"
|
||||
#include "hash.hh"
|
||||
#include "json-utils.hh"
|
||||
|
||||
|
@ -26,6 +27,11 @@ void Args::removeFlag(const std::string & longName)
|
|||
longFlags.erase(flag);
|
||||
}
|
||||
|
||||
void Completions::setType(AddCompletions::Type t)
|
||||
{
|
||||
type = t;
|
||||
}
|
||||
|
||||
void Completions::add(std::string completion, std::string description)
|
||||
{
|
||||
description = trim(description);
|
||||
|
@ -37,7 +43,7 @@ void Completions::add(std::string completion, std::string description)
|
|||
if (needs_ellipsis)
|
||||
description.append(" [...]");
|
||||
}
|
||||
insert(Completion {
|
||||
completions.insert(Completion {
|
||||
.completion = completion,
|
||||
.description = description
|
||||
});
|
||||
|
@ -46,12 +52,20 @@ void Completions::add(std::string completion, std::string description)
|
|||
bool Completion::operator<(const Completion & other) const
|
||||
{ return completion < other.completion || (completion == other.completion && description < other.description); }
|
||||
|
||||
CompletionType completionType = ctNormal;
|
||||
std::shared_ptr<Completions> completions;
|
||||
|
||||
std::string completionMarker = "___COMPLETE___";
|
||||
|
||||
static std::optional<std::string> needsCompletion(std::string_view s)
|
||||
RootArgs & Args::getRoot()
|
||||
{
|
||||
Args * p = this;
|
||||
while (p->parent)
|
||||
p = p->parent;
|
||||
|
||||
auto * res = dynamic_cast<RootArgs *>(p);
|
||||
assert(res);
|
||||
return *res;
|
||||
}
|
||||
|
||||
std::optional<std::string> RootArgs::needsCompletion(std::string_view s)
|
||||
{
|
||||
if (!completions) return {};
|
||||
auto i = s.find(completionMarker);
|
||||
|
@ -60,7 +74,7 @@ static std::optional<std::string> needsCompletion(std::string_view s)
|
|||
return {};
|
||||
}
|
||||
|
||||
void Args::parseCmdline(const Strings & _cmdline)
|
||||
void RootArgs::parseCmdline(const Strings & _cmdline)
|
||||
{
|
||||
Strings pendingArgs;
|
||||
bool dashDash = false;
|
||||
|
@ -71,7 +85,7 @@ void Args::parseCmdline(const Strings & _cmdline)
|
|||
size_t n = std::stoi(*s);
|
||||
assert(n > 0 && n <= cmdline.size());
|
||||
*std::next(cmdline.begin(), n - 1) += completionMarker;
|
||||
completions = std::make_shared<decltype(completions)::element_type>();
|
||||
completions = std::make_shared<Completions>();
|
||||
verbosity = lvlError;
|
||||
}
|
||||
|
||||
|
@ -125,17 +139,23 @@ void Args::parseCmdline(const Strings & _cmdline)
|
|||
for (auto & f : flagExperimentalFeatures)
|
||||
experimentalFeatureSettings.require(f);
|
||||
|
||||
/* Now that all the other args are processed, run the deferred completions.
|
||||
*/
|
||||
for (auto d : deferredCompletions)
|
||||
d.completer(*completions, d.n, d.prefix);
|
||||
}
|
||||
|
||||
bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
|
||||
{
|
||||
assert(pos != end);
|
||||
|
||||
auto & rootArgs = getRoot();
|
||||
|
||||
auto process = [&](const std::string & name, const Flag & flag) -> bool {
|
||||
++pos;
|
||||
|
||||
if (auto & f = flag.experimentalFeature)
|
||||
flagExperimentalFeatures.insert(*f);
|
||||
rootArgs.flagExperimentalFeatures.insert(*f);
|
||||
|
||||
std::vector<std::string> args;
|
||||
bool anyCompleted = false;
|
||||
|
@ -146,10 +166,15 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
|
|||
"flag '%s' requires %d argument(s), but only %d were given",
|
||||
name, flag.handler.arity, n);
|
||||
}
|
||||
if (auto prefix = needsCompletion(*pos)) {
|
||||
if (auto prefix = rootArgs.needsCompletion(*pos)) {
|
||||
anyCompleted = true;
|
||||
if (flag.completer)
|
||||
flag.completer(n, *prefix);
|
||||
if (flag.completer) {
|
||||
rootArgs.deferredCompletions.push_back({
|
||||
.completer = flag.completer,
|
||||
.n = n,
|
||||
.prefix = *prefix,
|
||||
});
|
||||
}
|
||||
}
|
||||
args.push_back(*pos++);
|
||||
}
|
||||
|
@ -159,14 +184,14 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
|
|||
};
|
||||
|
||||
if (std::string(*pos, 0, 2) == "--") {
|
||||
if (auto prefix = needsCompletion(*pos)) {
|
||||
if (auto prefix = rootArgs.needsCompletion(*pos)) {
|
||||
for (auto & [name, flag] : longFlags) {
|
||||
if (!hiddenCategories.count(flag->category)
|
||||
&& hasPrefix(name, std::string(*prefix, 2)))
|
||||
{
|
||||
if (auto & f = flag->experimentalFeature)
|
||||
flagExperimentalFeatures.insert(*f);
|
||||
completions->add("--" + name, flag->description);
|
||||
rootArgs.flagExperimentalFeatures.insert(*f);
|
||||
rootArgs.completions->add("--" + name, flag->description);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
|
@ -183,12 +208,12 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
|
|||
return process(std::string("-") + c, *i->second);
|
||||
}
|
||||
|
||||
if (auto prefix = needsCompletion(*pos)) {
|
||||
if (auto prefix = rootArgs.needsCompletion(*pos)) {
|
||||
if (prefix == "-") {
|
||||
completions->add("--");
|
||||
rootArgs.completions->add("--");
|
||||
for (auto & [flagName, flag] : shortFlags)
|
||||
if (experimentalFeatureSettings.isEnabled(flag->experimentalFeature))
|
||||
completions->add(std::string("-") + flagName, flag->description);
|
||||
rootArgs.completions->add(std::string("-") + flagName, flag->description);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -203,6 +228,8 @@ bool Args::processArgs(const Strings & args, bool finish)
|
|||
return true;
|
||||
}
|
||||
|
||||
auto & rootArgs = getRoot();
|
||||
|
||||
auto & exp = expectedArgs.front();
|
||||
|
||||
bool res = false;
|
||||
|
@ -211,15 +238,23 @@ bool Args::processArgs(const Strings & args, bool finish)
|
|||
(exp.handler.arity != ArityAny && args.size() == exp.handler.arity))
|
||||
{
|
||||
std::vector<std::string> ss;
|
||||
bool anyCompleted = false;
|
||||
for (const auto &[n, s] : enumerate(args)) {
|
||||
if (auto prefix = needsCompletion(s)) {
|
||||
if (auto prefix = rootArgs.needsCompletion(s)) {
|
||||
anyCompleted = true;
|
||||
ss.push_back(*prefix);
|
||||
if (exp.completer)
|
||||
exp.completer(n, *prefix);
|
||||
if (exp.completer) {
|
||||
rootArgs.deferredCompletions.push_back({
|
||||
.completer = exp.completer,
|
||||
.n = n,
|
||||
.prefix = *prefix,
|
||||
});
|
||||
}
|
||||
} else
|
||||
ss.push_back(s);
|
||||
}
|
||||
exp.handler.fun(ss);
|
||||
if (!anyCompleted)
|
||||
exp.handler.fun(ss);
|
||||
expectedArgs.pop_front();
|
||||
res = true;
|
||||
}
|
||||
|
@ -236,6 +271,7 @@ nlohmann::json Args::toJSON()
|
|||
|
||||
for (auto & [name, flag] : longFlags) {
|
||||
auto j = nlohmann::json::object();
|
||||
j["hiddenCategory"] = hiddenCategories.count(flag->category) > 0;
|
||||
if (flag->aliases.count(name)) continue;
|
||||
if (flag->shortName)
|
||||
j["shortName"] = std::string(1, flag->shortName);
|
||||
|
@ -270,11 +306,11 @@ nlohmann::json Args::toJSON()
|
|||
return res;
|
||||
}
|
||||
|
||||
static void hashTypeCompleter(size_t index, std::string_view prefix)
|
||||
static void hashTypeCompleter(AddCompletions & completions, size_t index, std::string_view prefix)
|
||||
{
|
||||
for (auto & type : hashTypes)
|
||||
if (hasPrefix(type, prefix))
|
||||
completions->add(type);
|
||||
completions.add(type);
|
||||
}
|
||||
|
||||
Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht)
|
||||
|
@ -286,7 +322,7 @@ Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht)
|
|||
.handler = {[ht](std::string s) {
|
||||
*ht = parseHashType(s);
|
||||
}},
|
||||
.completer = hashTypeCompleter
|
||||
.completer = hashTypeCompleter,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -299,13 +335,13 @@ Args::Flag Args::Flag::mkHashTypeOptFlag(std::string && longName, std::optional<
|
|||
.handler = {[oht](std::string s) {
|
||||
*oht = std::optional<HashType> { parseHashType(s) };
|
||||
}},
|
||||
.completer = hashTypeCompleter
|
||||
.completer = hashTypeCompleter,
|
||||
};
|
||||
}
|
||||
|
||||
static void _completePath(std::string_view prefix, bool onlyDirs)
|
||||
static void _completePath(AddCompletions & completions, std::string_view prefix, bool onlyDirs)
|
||||
{
|
||||
completionType = ctFilenames;
|
||||
completions.setType(Completions::Type::Filenames);
|
||||
glob_t globbuf;
|
||||
int flags = GLOB_NOESCAPE;
|
||||
#ifdef GLOB_ONLYDIR
|
||||
|
@ -319,20 +355,20 @@ static void _completePath(std::string_view prefix, bool onlyDirs)
|
|||
auto st = stat(globbuf.gl_pathv[i]);
|
||||
if (!S_ISDIR(st.st_mode)) continue;
|
||||
}
|
||||
completions->add(globbuf.gl_pathv[i]);
|
||||
completions.add(globbuf.gl_pathv[i]);
|
||||
}
|
||||
}
|
||||
globfree(&globbuf);
|
||||
}
|
||||
|
||||
void completePath(size_t, std::string_view prefix)
|
||||
void Args::completePath(AddCompletions & completions, size_t, std::string_view prefix)
|
||||
{
|
||||
_completePath(prefix, false);
|
||||
_completePath(completions, prefix, false);
|
||||
}
|
||||
|
||||
void completeDir(size_t, std::string_view prefix)
|
||||
void Args::completeDir(AddCompletions & completions, size_t, std::string_view prefix)
|
||||
{
|
||||
_completePath(prefix, true);
|
||||
_completePath(completions, prefix, true);
|
||||
}
|
||||
|
||||
Strings argvToStrings(int argc, char * * argv)
|
||||
|
@ -367,10 +403,10 @@ MultiCommand::MultiCommand(const Commands & commands_)
|
|||
command = {s, i->second()};
|
||||
command->second->parent = this;
|
||||
}},
|
||||
.completer = {[&](size_t, std::string_view prefix) {
|
||||
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
|
||||
for (auto & [name, command] : commands)
|
||||
if (hasPrefix(name, prefix))
|
||||
completions->add(name);
|
||||
completions.add(name);
|
||||
}}
|
||||
});
|
||||
|
||||
|
@ -392,14 +428,6 @@ bool MultiCommand::processArgs(const Strings & args, bool finish)
|
|||
return Args::processArgs(args, finish);
|
||||
}
|
||||
|
||||
void MultiCommand::completionHook()
|
||||
{
|
||||
if (command)
|
||||
return command->second->completionHook();
|
||||
else
|
||||
return Args::completionHook();
|
||||
}
|
||||
|
||||
nlohmann::json MultiCommand::toJSON()
|
||||
{
|
||||
auto cmds = nlohmann::json::object();
|
||||
|
@ -410,8 +438,8 @@ nlohmann::json MultiCommand::toJSON()
|
|||
auto cat = nlohmann::json::object();
|
||||
cat["id"] = command->category();
|
||||
cat["description"] = trim(categories[command->category()]);
|
||||
j["category"] = std::move(cat);
|
||||
cat["experimental-feature"] = command->experimentalFeature();
|
||||
j["category"] = std::move(cat);
|
||||
cmds[name] = std::move(j);
|
||||
}
|
||||
|
||||
|
|
|
@ -15,16 +15,14 @@ enum HashType : char;
|
|||
|
||||
class MultiCommand;
|
||||
|
||||
class RootArgs;
|
||||
|
||||
class AddCompletions;
|
||||
|
||||
class Args
|
||||
{
|
||||
public:
|
||||
|
||||
/**
|
||||
* Parse the command line, throwing a UsageError if something goes
|
||||
* wrong.
|
||||
*/
|
||||
void parseCmdline(const Strings & cmdline);
|
||||
|
||||
/**
|
||||
* Return a short one-line description of the command.
|
||||
*/
|
||||
|
@ -39,8 +37,21 @@ public:
|
|||
|
||||
protected:
|
||||
|
||||
/**
|
||||
* The largest `size_t` is used to indicate the "any" arity, for
|
||||
* handlers/flags/arguments that accept an arbitrary number of
|
||||
* arguments.
|
||||
*/
|
||||
static const size_t ArityAny = std::numeric_limits<size_t>::max();
|
||||
|
||||
/**
|
||||
* Arguments (flags/options and positional) have a "handler" which is
|
||||
* caused when the argument is parsed. The handler has an arbitrary side
|
||||
* effect, including possible affect further command-line parsing.
|
||||
*
|
||||
* There are many constructors in order to support many shorthand
|
||||
* initializations, and this is used a lot.
|
||||
*/
|
||||
struct Handler
|
||||
{
|
||||
std::function<void(std::vector<std::string>)> fun;
|
||||
|
@ -110,7 +121,31 @@ protected:
|
|||
{ }
|
||||
};
|
||||
|
||||
/* Options. */
|
||||
/**
|
||||
* The basic function type of the completion callback.
|
||||
*
|
||||
* Used to define `CompleterClosure` and some common case completers
|
||||
* that individual flags/arguments can use.
|
||||
*
|
||||
* The `AddCompletions` that is passed is an interface to the state
|
||||
* stored as part of the root command
|
||||
*/
|
||||
typedef void CompleterFun(AddCompletions &, size_t, std::string_view);
|
||||
|
||||
/**
|
||||
* The closure type of the completion callback.
|
||||
*
|
||||
* This is what is actually stored as part of each Flag / Expected
|
||||
* Arg.
|
||||
*/
|
||||
typedef std::function<CompleterFun> CompleterClosure;
|
||||
|
||||
/**
|
||||
* Description of flags / options
|
||||
*
|
||||
* These are arguments like `-s` or `--long` that can (mostly)
|
||||
* appear in any order.
|
||||
*/
|
||||
struct Flag
|
||||
{
|
||||
typedef std::shared_ptr<Flag> ptr;
|
||||
|
@ -122,7 +157,7 @@ protected:
|
|||
std::string category;
|
||||
Strings labels;
|
||||
Handler handler;
|
||||
std::function<void(size_t, std::string_view)> completer;
|
||||
CompleterClosure completer;
|
||||
|
||||
std::optional<ExperimentalFeature> experimentalFeature;
|
||||
|
||||
|
@ -130,22 +165,56 @@ protected:
|
|||
static Flag mkHashTypeOptFlag(std::string && longName, std::optional<HashType> * oht);
|
||||
};
|
||||
|
||||
/**
|
||||
* Index of all registered "long" flag descriptions (flags like
|
||||
* `--long`).
|
||||
*/
|
||||
std::map<std::string, Flag::ptr> longFlags;
|
||||
|
||||
/**
|
||||
* Index of all registered "short" flag descriptions (flags like
|
||||
* `-s`).
|
||||
*/
|
||||
std::map<char, Flag::ptr> shortFlags;
|
||||
|
||||
/**
|
||||
* Process a single flag and its arguments, pulling from an iterator
|
||||
* of raw CLI args as needed.
|
||||
*/
|
||||
virtual bool processFlag(Strings::iterator & pos, Strings::iterator end);
|
||||
|
||||
/* Positional arguments. */
|
||||
/**
|
||||
* Description of positional arguments
|
||||
*
|
||||
* These are arguments that do not start with a `-`, and for which
|
||||
* the order does matter.
|
||||
*/
|
||||
struct ExpectedArg
|
||||
{
|
||||
std::string label;
|
||||
bool optional = false;
|
||||
Handler handler;
|
||||
std::function<void(size_t, std::string_view)> completer;
|
||||
CompleterClosure completer;
|
||||
};
|
||||
|
||||
/**
|
||||
* Queue of expected positional argument forms.
|
||||
*
|
||||
* Positional arugment descriptions are inserted on the back.
|
||||
*
|
||||
* As positional arguments are passed, these are popped from the
|
||||
* front, until there are hopefully none left as all args that were
|
||||
* expected in fact were passed.
|
||||
*/
|
||||
std::list<ExpectedArg> expectedArgs;
|
||||
|
||||
/**
|
||||
* Process some positional arugments
|
||||
*
|
||||
* @param finish: We have parsed everything else, and these are the only
|
||||
* arguments left. Used because we accumulate some "pending args" we might
|
||||
* have left over.
|
||||
*/
|
||||
virtual bool processArgs(const Strings & args, bool finish);
|
||||
|
||||
virtual Strings::iterator rewriteArgs(Strings & args, Strings::iterator pos)
|
||||
|
@ -159,13 +228,6 @@ protected:
|
|||
*/
|
||||
virtual void initialFlagsProcessed() {}
|
||||
|
||||
/**
|
||||
* Called after the command line has been processed if we need to generate
|
||||
* completions. Useful for commands that need to know the whole command line
|
||||
* in order to know what completions to generate.
|
||||
*/
|
||||
virtual void completionHook() { }
|
||||
|
||||
public:
|
||||
|
||||
void addFlag(Flag && flag);
|
||||
|
@ -200,21 +262,30 @@ public:
|
|||
});
|
||||
}
|
||||
|
||||
static CompleterFun completePath;
|
||||
|
||||
static CompleterFun completeDir;
|
||||
|
||||
virtual nlohmann::json toJSON();
|
||||
|
||||
friend class MultiCommand;
|
||||
|
||||
/**
|
||||
* The parent command, used if this is a subcommand.
|
||||
*
|
||||
* Invariant: An Args with a null parent must also be a RootArgs
|
||||
*
|
||||
* \todo this would probably be better in the CommandClass.
|
||||
* getRoot() could be an abstract method that peels off at most one
|
||||
* layer before recuring.
|
||||
*/
|
||||
MultiCommand * parent = nullptr;
|
||||
|
||||
private:
|
||||
|
||||
/**
|
||||
* Experimental features needed when parsing args. These are checked
|
||||
* after flag parsing is completed in order to support enabling
|
||||
* experimental features coming after the flag that needs the
|
||||
* experimental feature.
|
||||
* Traverse parent pointers until we find the \ref RootArgs "root
|
||||
* arguments" object.
|
||||
*/
|
||||
std::set<ExperimentalFeature> flagExperimentalFeatures;
|
||||
RootArgs & getRoot();
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -236,7 +307,7 @@ struct Command : virtual public Args
|
|||
|
||||
static constexpr Category catDefault = 0;
|
||||
|
||||
virtual std::optional<ExperimentalFeature> experimentalFeature ();
|
||||
virtual std::optional<ExperimentalFeature> experimentalFeature();
|
||||
|
||||
virtual Category category() { return catDefault; }
|
||||
};
|
||||
|
@ -265,8 +336,6 @@ public:
|
|||
|
||||
bool processArgs(const Strings & args, bool finish) override;
|
||||
|
||||
void completionHook() override;
|
||||
|
||||
nlohmann::json toJSON() override;
|
||||
};
|
||||
|
||||
|
@ -278,21 +347,40 @@ struct Completion {
|
|||
|
||||
bool operator<(const Completion & other) const;
|
||||
};
|
||||
class Completions : public std::set<Completion> {
|
||||
|
||||
/**
|
||||
* The abstract interface for completions callbacks
|
||||
*
|
||||
* The idea is to restrict the callback so it can only add additional
|
||||
* completions to the collection, or set the completion type. By making
|
||||
* it go through this interface, the callback cannot make any other
|
||||
* changes, or even view the completions / completion type that have
|
||||
* been set so far.
|
||||
*/
|
||||
class AddCompletions
|
||||
{
|
||||
public:
|
||||
void add(std::string completion, std::string description = "");
|
||||
|
||||
/**
|
||||
* The type of completion we are collecting.
|
||||
*/
|
||||
enum class Type {
|
||||
Normal,
|
||||
Filenames,
|
||||
Attrs,
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the type of the completions being collected
|
||||
*
|
||||
* \todo it should not be possible to change the type after it has been set.
|
||||
*/
|
||||
virtual void setType(Type type) = 0;
|
||||
|
||||
/**
|
||||
* Add a single completion to the collection
|
||||
*/
|
||||
virtual void add(std::string completion, std::string description = "") = 0;
|
||||
};
|
||||
extern std::shared_ptr<Completions> completions;
|
||||
|
||||
enum CompletionType {
|
||||
ctNormal,
|
||||
ctFilenames,
|
||||
ctAttrs
|
||||
};
|
||||
extern CompletionType completionType;
|
||||
|
||||
void completePath(size_t, std::string_view prefix);
|
||||
|
||||
void completeDir(size_t, std::string_view prefix);
|
||||
|
||||
}
|
||||
|
|
72
src/libutil/args/root.hh
Normal file
72
src/libutil/args/root.hh
Normal file
|
@ -0,0 +1,72 @@
|
|||
#pragma once
|
||||
|
||||
#include "args.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* The concrete implementation of a collection of completions.
|
||||
*
|
||||
* This is exposed so that the main entry point can print out the
|
||||
* collected completions.
|
||||
*/
|
||||
struct Completions final : AddCompletions
|
||||
{
|
||||
std::set<Completion> completions;
|
||||
Type type = Type::Normal;
|
||||
|
||||
void setType(Type type) override;
|
||||
void add(std::string completion, std::string description = "") override;
|
||||
};
|
||||
|
||||
/**
|
||||
* The outermost Args object. This is the one we will actually parse a command
|
||||
* line with, whereas the inner ones (if they exists) are subcommands (and this
|
||||
* is also a MultiCommand or something like it).
|
||||
*
|
||||
* This Args contains completions state shared between it and all of its
|
||||
* descendent Args.
|
||||
*/
|
||||
class RootArgs : virtual public Args
|
||||
{
|
||||
public:
|
||||
/** Parse the command line, throwing a UsageError if something goes
|
||||
* wrong.
|
||||
*/
|
||||
void parseCmdline(const Strings & cmdline);
|
||||
|
||||
std::shared_ptr<Completions> completions;
|
||||
|
||||
protected:
|
||||
|
||||
friend class Args;
|
||||
|
||||
/**
|
||||
* A pointer to the completion and its two arguments; a thunk;
|
||||
*/
|
||||
struct DeferredCompletion {
|
||||
const CompleterClosure & completer;
|
||||
size_t n;
|
||||
std::string prefix;
|
||||
};
|
||||
|
||||
/**
|
||||
* Completions are run after all args and flags are parsed, so completions
|
||||
* of earlier arguments can benefit from later arguments.
|
||||
*/
|
||||
std::vector<DeferredCompletion> deferredCompletions;
|
||||
|
||||
/**
|
||||
* Experimental features needed when parsing args. These are checked
|
||||
* after flag parsing is completed in order to support enabling
|
||||
* experimental features coming after the flag that needs the
|
||||
* experimental feature.
|
||||
*/
|
||||
std::set<ExperimentalFeature> flagExperimentalFeatures;
|
||||
|
||||
private:
|
||||
|
||||
std::optional<std::string> needsCompletion(std::string_view s);
|
||||
};
|
||||
|
||||
}
|
|
@ -1,6 +1,49 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#define DECLARE_ONE_CMP(PRE, QUAL, COMPARATOR, MY_TYPE) \
|
||||
PRE bool QUAL operator COMPARATOR(const MY_TYPE & other) const;
|
||||
#define DECLARE_EQUAL(prefix, qualification, my_type) \
|
||||
DECLARE_ONE_CMP(prefix, qualification, ==, my_type)
|
||||
#define DECLARE_LEQ(prefix, qualification, my_type) \
|
||||
DECLARE_ONE_CMP(prefix, qualification, <, my_type)
|
||||
#define DECLARE_NEQ(prefix, qualification, my_type) \
|
||||
DECLARE_ONE_CMP(prefix, qualification, !=, my_type)
|
||||
|
||||
#define GENERATE_ONE_CMP(PRE, QUAL, COMPARATOR, MY_TYPE, ...) \
|
||||
PRE bool QUAL operator COMPARATOR(const MY_TYPE & other) const { \
|
||||
__VA_OPT__(const MY_TYPE * me = this;) \
|
||||
auto fields1 = std::make_tuple( __VA_ARGS__ ); \
|
||||
__VA_OPT__(me = &other;) \
|
||||
auto fields2 = std::make_tuple( __VA_ARGS__ ); \
|
||||
return fields1 COMPARATOR fields2; \
|
||||
}
|
||||
#define GENERATE_EQUAL(prefix, qualification, my_type, args...) \
|
||||
GENERATE_ONE_CMP(prefix, qualification, ==, my_type, args)
|
||||
#define GENERATE_LEQ(prefix, qualification, my_type, args...) \
|
||||
GENERATE_ONE_CMP(prefix, qualification, <, my_type, args)
|
||||
#define GENERATE_NEQ(prefix, qualification, my_type, args...) \
|
||||
GENERATE_ONE_CMP(prefix, qualification, !=, my_type, args)
|
||||
|
||||
/**
|
||||
* Declare comparison methods without defining them.
|
||||
*/
|
||||
#define DECLARE_CMP(my_type) \
|
||||
DECLARE_EQUAL(,,my_type) \
|
||||
DECLARE_LEQ(,,my_type) \
|
||||
DECLARE_NEQ(,,my_type)
|
||||
|
||||
/**
|
||||
* @param prefix This is for something before each declaration like
|
||||
* `template<classname Foo>`.
|
||||
*
|
||||
* @param my_type the type are defining operators for.
|
||||
*/
|
||||
#define DECLARE_CMP_EXT(prefix, qualification, my_type) \
|
||||
DECLARE_EQUAL(prefix, qualification, my_type) \
|
||||
DECLARE_LEQ(prefix, qualification, my_type) \
|
||||
DECLARE_NEQ(prefix, qualification, my_type)
|
||||
|
||||
/**
|
||||
* Awful hacky generation of the comparison operators by doing a lexicographic
|
||||
* comparison between the choosen fields.
|
||||
|
@ -17,18 +60,18 @@
|
|||
* }
|
||||
* ```
|
||||
*/
|
||||
#define GENERATE_ONE_CMP(COMPARATOR, MY_TYPE, ...) \
|
||||
bool operator COMPARATOR(const MY_TYPE& other) const { \
|
||||
__VA_OPT__(const MY_TYPE* me = this;) \
|
||||
auto fields1 = std::make_tuple( __VA_ARGS__ ); \
|
||||
__VA_OPT__(me = &other;) \
|
||||
auto fields2 = std::make_tuple( __VA_ARGS__ ); \
|
||||
return fields1 COMPARATOR fields2; \
|
||||
}
|
||||
#define GENERATE_EQUAL(args...) GENERATE_ONE_CMP(==, args)
|
||||
#define GENERATE_LEQ(args...) GENERATE_ONE_CMP(<, args)
|
||||
#define GENERATE_NEQ(args...) GENERATE_ONE_CMP(!=, args)
|
||||
#define GENERATE_CMP(args...) \
|
||||
GENERATE_EQUAL(args) \
|
||||
GENERATE_LEQ(args) \
|
||||
GENERATE_NEQ(args)
|
||||
GENERATE_EQUAL(,,args) \
|
||||
GENERATE_LEQ(,,args) \
|
||||
GENERATE_NEQ(,,args)
|
||||
|
||||
/**
|
||||
* @param prefix This is for something before each declaration like
|
||||
* `template<classname Foo>`.
|
||||
*
|
||||
* @param my_type the type are defining operators for.
|
||||
*/
|
||||
#define GENERATE_CMP_EXT(prefix, my_type, args...) \
|
||||
GENERATE_EQUAL(prefix, my_type ::, my_type, args) \
|
||||
GENERATE_LEQ(prefix, my_type ::, my_type, args) \
|
||||
GENERATE_NEQ(prefix, my_type ::, my_type, args)
|
||||
|
|
|
@ -9,6 +9,10 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
Config::Config(StringMap initials)
|
||||
: AbstractConfig(std::move(initials))
|
||||
{ }
|
||||
|
||||
bool Config::set(const std::string & name, const std::string & value)
|
||||
{
|
||||
bool append = false;
|
||||
|
@ -29,9 +33,9 @@ bool Config::set(const std::string & name, const std::string & value)
|
|||
|
||||
void Config::addSetting(AbstractSetting * setting)
|
||||
{
|
||||
_settings.emplace(setting->name, Config::SettingData(false, setting));
|
||||
_settings.emplace(setting->name, Config::SettingData{false, setting});
|
||||
for (auto & alias : setting->aliases)
|
||||
_settings.emplace(alias, Config::SettingData(true, setting));
|
||||
_settings.emplace(alias, Config::SettingData{true, setting});
|
||||
|
||||
bool set = false;
|
||||
|
||||
|
@ -59,6 +63,10 @@ void Config::addSetting(AbstractSetting * setting)
|
|||
}
|
||||
}
|
||||
|
||||
AbstractConfig::AbstractConfig(StringMap initials)
|
||||
: unknownSettings(std::move(initials))
|
||||
{ }
|
||||
|
||||
void AbstractConfig::warnUnknownSettings()
|
||||
{
|
||||
for (auto & s : unknownSettings)
|
||||
|
@ -68,6 +76,7 @@ void AbstractConfig::warnUnknownSettings()
|
|||
void AbstractConfig::reapplyUnknownSettings()
|
||||
{
|
||||
auto unknownSettings2 = std::move(unknownSettings);
|
||||
unknownSettings = {};
|
||||
for (auto & s : unknownSettings2)
|
||||
set(s.first, s.second);
|
||||
}
|
||||
|
@ -198,6 +207,13 @@ AbstractSetting::AbstractSetting(
|
|||
{
|
||||
}
|
||||
|
||||
AbstractSetting::~AbstractSetting()
|
||||
{
|
||||
// Check against a gcc miscompilation causing our constructor
|
||||
// not to run (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80431).
|
||||
assert(created == 123);
|
||||
}
|
||||
|
||||
nlohmann::json AbstractSetting::toJSON()
|
||||
{
|
||||
return nlohmann::json(toJSONObject());
|
||||
|
@ -219,6 +235,9 @@ void AbstractSetting::convertToArg(Args & args, const std::string & category)
|
|||
{
|
||||
}
|
||||
|
||||
|
||||
bool AbstractSetting::isOverridden() const { return overridden; }
|
||||
|
||||
template<> std::string BaseSetting<std::string>::parse(const std::string & str) const
|
||||
{
|
||||
return str;
|
||||
|
@ -384,11 +403,33 @@ static Path parsePath(const AbstractSetting & s, const std::string & str)
|
|||
return canonPath(str);
|
||||
}
|
||||
|
||||
PathSetting::PathSetting(Config * options,
|
||||
const Path & def,
|
||||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases)
|
||||
: BaseSetting<Path>(def, true, name, description, aliases)
|
||||
{
|
||||
options->addSetting(this);
|
||||
}
|
||||
|
||||
Path PathSetting::parse(const std::string & str) const
|
||||
{
|
||||
return parsePath(*this, str);
|
||||
}
|
||||
|
||||
|
||||
OptionalPathSetting::OptionalPathSetting(Config * options,
|
||||
const std::optional<Path> & def,
|
||||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases)
|
||||
: BaseSetting<std::optional<Path>>(def, true, name, description, aliases)
|
||||
{
|
||||
options->addSetting(this);
|
||||
}
|
||||
|
||||
|
||||
std::optional<Path> OptionalPathSetting::parse(const std::string & str) const
|
||||
{
|
||||
if (str == "")
|
||||
|
@ -397,6 +438,11 @@ std::optional<Path> OptionalPathSetting::parse(const std::string & str) const
|
|||
return parsePath(*this, str);
|
||||
}
|
||||
|
||||
void OptionalPathSetting::operator =(const std::optional<Path> & v)
|
||||
{
|
||||
this->assign(v);
|
||||
}
|
||||
|
||||
bool GlobalConfig::set(const std::string & name, const std::string & value)
|
||||
{
|
||||
for (auto & config : *configRegistrations)
|
||||
|
|
|
@ -36,8 +36,8 @@ namespace nix {
|
|||
*
|
||||
* std::map<std::string, Config::SettingInfo> settings;
|
||||
* config.getSettings(settings);
|
||||
* config["system"].description == "the current system"
|
||||
* config["system"].value == "x86_64-linux"
|
||||
* settings["system"].description == "the current system"
|
||||
* settings["system"].value == "x86_64-linux"
|
||||
*
|
||||
*
|
||||
* The above retrieves all currently known settings from the `Config` object
|
||||
|
@ -52,9 +52,7 @@ class AbstractConfig
|
|||
protected:
|
||||
StringMap unknownSettings;
|
||||
|
||||
AbstractConfig(const StringMap & initials = {})
|
||||
: unknownSettings(initials)
|
||||
{ }
|
||||
AbstractConfig(StringMap initials = {});
|
||||
|
||||
public:
|
||||
|
||||
|
@ -150,9 +148,6 @@ public:
|
|||
{
|
||||
bool isAlias;
|
||||
AbstractSetting * setting;
|
||||
SettingData(bool isAlias, AbstractSetting * setting)
|
||||
: isAlias(isAlias), setting(setting)
|
||||
{ }
|
||||
};
|
||||
|
||||
typedef std::map<std::string, SettingData> Settings;
|
||||
|
@ -163,9 +158,7 @@ private:
|
|||
|
||||
public:
|
||||
|
||||
Config(const StringMap & initials = {})
|
||||
: AbstractConfig(initials)
|
||||
{ }
|
||||
Config(StringMap initials = {});
|
||||
|
||||
bool set(const std::string & name, const std::string & value) override;
|
||||
|
||||
|
@ -206,12 +199,7 @@ protected:
|
|||
const std::set<std::string> & aliases,
|
||||
std::optional<ExperimentalFeature> experimentalFeature = std::nullopt);
|
||||
|
||||
virtual ~AbstractSetting()
|
||||
{
|
||||
// Check against a gcc miscompilation causing our constructor
|
||||
// not to run (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80431).
|
||||
assert(created == 123);
|
||||
}
|
||||
virtual ~AbstractSetting();
|
||||
|
||||
virtual void set(const std::string & value, bool append = false) = 0;
|
||||
|
||||
|
@ -229,7 +217,7 @@ protected:
|
|||
|
||||
virtual void convertToArg(Args & args, const std::string & category);
|
||||
|
||||
bool isOverridden() const { return overridden; }
|
||||
bool isOverridden() const;
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -324,8 +312,7 @@ public:
|
|||
template<typename T>
|
||||
std::ostream & operator <<(std::ostream & str, const BaseSetting<T> & opt)
|
||||
{
|
||||
str << (const T &) opt;
|
||||
return str;
|
||||
return str << static_cast<const T &>(opt);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
|
@ -365,11 +352,7 @@ public:
|
|||
const Path & def,
|
||||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases = {})
|
||||
: BaseSetting<Path>(def, true, name, description, aliases)
|
||||
{
|
||||
options->addSetting(this);
|
||||
}
|
||||
const std::set<std::string> & aliases = {});
|
||||
|
||||
Path parse(const std::string & str) const override;
|
||||
|
||||
|
@ -391,15 +374,11 @@ public:
|
|||
const std::optional<Path> & def,
|
||||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases = {})
|
||||
: BaseSetting<std::optional<Path>>(def, true, name, description, aliases)
|
||||
{
|
||||
options->addSetting(this);
|
||||
}
|
||||
const std::set<std::string> & aliases = {});
|
||||
|
||||
std::optional<Path> parse(const std::string & str) const override;
|
||||
|
||||
void operator =(const std::optional<Path> & v) { this->assign(v); }
|
||||
void operator =(const std::optional<Path> & v);
|
||||
};
|
||||
|
||||
struct GlobalConfig : public AbstractConfig
|
||||
|
|
|
@ -14,6 +14,11 @@ void BaseError::addTrace(std::shared_ptr<AbstractPos> && e, hintformat hint, boo
|
|||
err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .frame = frame });
|
||||
}
|
||||
|
||||
void throwExceptionSelfCheck(){
|
||||
// This is meant to be caught in initLibUtil()
|
||||
throw SysError("C++ exception handling is broken. This would appear to be a problem with the way Nix was compiled and/or linked and/or loaded.");
|
||||
}
|
||||
|
||||
// c++ std::exception descendants must have a 'const char* what()' function.
|
||||
// This stringifies the error and caches it for use by what(), or similarly by msg().
|
||||
const std::string & BaseError::calcWhat() const
|
||||
|
@ -150,6 +155,36 @@ static std::string indent(std::string_view indentFirst, std::string_view indentR
|
|||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* A development aid for finding missing positions, to improve error messages. Example use:
|
||||
*
|
||||
* NIX_DEVELOPER_SHOW_UNKNOWN_LOCATIONS=1 _NIX_TEST_ACCEPT=1 make tests/lang.sh.test
|
||||
* git diff -U20 tests
|
||||
*
|
||||
*/
|
||||
static bool printUnknownLocations = getEnv("_NIX_DEVELOPER_SHOW_UNKNOWN_LOCATIONS").has_value();
|
||||
|
||||
/**
|
||||
* Print a position, if it is known.
|
||||
*
|
||||
* @return true if a position was printed.
|
||||
*/
|
||||
static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std::shared_ptr<AbstractPos> & pos) {
|
||||
bool hasPos = pos && *pos;
|
||||
if (hasPos) {
|
||||
oss << "\n" << indent << ANSI_BLUE << "at " ANSI_WARNING << *pos << ANSI_NORMAL << ":";
|
||||
|
||||
if (auto loc = pos->getCodeLines()) {
|
||||
oss << "\n";
|
||||
printCodeLines(oss, "", *pos, *loc);
|
||||
oss << "\n";
|
||||
}
|
||||
} else if (printUnknownLocations) {
|
||||
oss << "\n" << indent << ANSI_BLUE << "at " ANSI_RED << "UNKNOWN LOCATION" << ANSI_NORMAL << "\n";
|
||||
}
|
||||
return hasPos;
|
||||
}
|
||||
|
||||
std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool showTrace)
|
||||
{
|
||||
std::string prefix;
|
||||
|
@ -198,8 +233,6 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
|
|||
|
||||
std::ostringstream oss;
|
||||
|
||||
auto noSource = ANSI_ITALIC " (source not available)" ANSI_NORMAL "\n";
|
||||
|
||||
/*
|
||||
* Traces
|
||||
* ------
|
||||
|
@ -315,34 +348,15 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
|
|||
|
||||
oss << "\n" << "… " << trace.hint.str() << "\n";
|
||||
|
||||
if (trace.pos) {
|
||||
if (printPosMaybe(oss, ellipsisIndent, trace.pos))
|
||||
count++;
|
||||
|
||||
oss << "\n" << ellipsisIndent << ANSI_BLUE << "at " ANSI_WARNING << *trace.pos << ANSI_NORMAL << ":";
|
||||
|
||||
if (auto loc = trace.pos->getCodeLines()) {
|
||||
oss << "\n";
|
||||
printCodeLines(oss, "", *trace.pos, *loc);
|
||||
oss << "\n";
|
||||
} else
|
||||
oss << noSource;
|
||||
}
|
||||
}
|
||||
oss << "\n" << prefix;
|
||||
}
|
||||
|
||||
oss << einfo.msg << "\n";
|
||||
|
||||
if (einfo.errPos) {
|
||||
oss << "\n" << ANSI_BLUE << "at " ANSI_WARNING << *einfo.errPos << ANSI_NORMAL << ":";
|
||||
|
||||
if (auto loc = einfo.errPos->getCodeLines()) {
|
||||
oss << "\n";
|
||||
printCodeLines(oss, "", *einfo.errPos, *loc);
|
||||
oss << "\n";
|
||||
} else
|
||||
oss << noSource;
|
||||
}
|
||||
printPosMaybe(oss, "", einfo.errPos);
|
||||
|
||||
auto suggestions = einfo.suggestions.trim();
|
||||
if (!suggestions.suggestions.empty()) {
|
||||
|
|
|
@ -70,6 +70,13 @@ struct AbstractPos
|
|||
uint32_t line = 0;
|
||||
uint32_t column = 0;
|
||||
|
||||
/**
|
||||
* An AbstractPos may be a "null object", representing an unknown position.
|
||||
*
|
||||
* Return true if this position is known.
|
||||
*/
|
||||
inline operator bool() const { return line != 0; };
|
||||
|
||||
/**
|
||||
* Return the contents of the source file.
|
||||
*/
|
||||
|
@ -214,4 +221,8 @@ public:
|
|||
}
|
||||
};
|
||||
|
||||
/** Throw an exception for the purpose of checking that exception handling works; see 'initLibUtil()'.
|
||||
*/
|
||||
void throwExceptionSelfCheck();
|
||||
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ struct ExperimentalFeatureDetails
|
|||
std::string_view description;
|
||||
};
|
||||
|
||||
constexpr std::array<ExperimentalFeatureDetails, 15> xpFeatureDetails = {{
|
||||
constexpr std::array<ExperimentalFeatureDetails, 16> xpFeatureDetails = {{
|
||||
{
|
||||
.tag = Xp::CaDerivations,
|
||||
.name = "ca-derivations",
|
||||
|
@ -163,6 +163,8 @@ constexpr std::array<ExperimentalFeatureDetails, 15> xpFeatureDetails = {{
|
|||
.tag = Xp::ReplFlake,
|
||||
.name = "repl-flake",
|
||||
.description = R"(
|
||||
*Enabled with [`flakes`](#xp-feature-flakes) since 2.19*
|
||||
|
||||
Allow passing [installables](@docroot@/command-ref/new-cli/nix.md#installables) to `nix repl`, making its interface consistent with the other experimental commands.
|
||||
)",
|
||||
},
|
||||
|
@ -171,7 +173,7 @@ constexpr std::array<ExperimentalFeatureDetails, 15> xpFeatureDetails = {{
|
|||
.name = "auto-allocate-uids",
|
||||
.description = R"(
|
||||
Allows Nix to automatically pick UIDs for builds, rather than creating
|
||||
`nixbld*` user accounts. See the [`auto-allocate-uids`](#conf-auto-allocate-uids) setting for details.
|
||||
`nixbld*` user accounts. See the [`auto-allocate-uids`](@docroot@/command-ref/conf-file.md#conf-auto-allocate-uids) setting for details.
|
||||
)",
|
||||
},
|
||||
{
|
||||
|
@ -179,7 +181,7 @@ constexpr std::array<ExperimentalFeatureDetails, 15> xpFeatureDetails = {{
|
|||
.name = "cgroups",
|
||||
.description = R"(
|
||||
Allows Nix to execute builds inside cgroups. See
|
||||
the [`use-cgroups`](#conf-use-cgroups) setting for details.
|
||||
the [`use-cgroups`](@docroot@/command-ref/conf-file.md#conf-use-cgroups) setting for details.
|
||||
)",
|
||||
},
|
||||
{
|
||||
|
@ -226,6 +228,13 @@ constexpr std::array<ExperimentalFeatureDetails, 15> xpFeatureDetails = {{
|
|||
Allow the use of [local overlay store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-overlay-store).
|
||||
)",
|
||||
},
|
||||
{
|
||||
.tag = Xp::ConfigurableImpureEnv,
|
||||
.name = "configurable-impure-env",
|
||||
.description = R"(
|
||||
Allow the use of the [impure-env](@docroot@/command-ref/conf-file.md#conf-impure-env) setting.
|
||||
)",
|
||||
}
|
||||
}};
|
||||
|
||||
static_assert(
|
||||
|
@ -279,7 +288,7 @@ std::set<ExperimentalFeature> parseFeatures(const std::set<std::string> & rawFea
|
|||
}
|
||||
|
||||
MissingExperimentalFeature::MissingExperimentalFeature(ExperimentalFeature feature)
|
||||
: Error("experimental Nix feature '%1%' is disabled; use '--extra-experimental-features %1%' to override", showExperimentalFeature(feature))
|
||||
: Error("experimental Nix feature '%1%' is disabled; add '--extra-experimental-features %1%' to enable it", showExperimentalFeature(feature))
|
||||
, missingFeature(feature)
|
||||
{}
|
||||
|
||||
|
|
|
@ -32,6 +32,7 @@ enum struct ExperimentalFeature
|
|||
ParseTomlTimestamps,
|
||||
ReadOnlyLocalStore,
|
||||
LocalOverlayStore,
|
||||
ConfigurableImpureEnv,
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
77
src/libutil/fs-sink.cc
Normal file
77
src/libutil/fs-sink.cc
Normal file
|
@ -0,0 +1,77 @@
|
|||
#include <fcntl.h>
|
||||
|
||||
#include "config.hh"
|
||||
#include "fs-sink.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct RestoreSinkSettings : Config
|
||||
{
|
||||
Setting<bool> preallocateContents{this, false, "preallocate-contents",
|
||||
"Whether to preallocate files when writing objects with known size."};
|
||||
};
|
||||
|
||||
static RestoreSinkSettings restoreSinkSettings;
|
||||
|
||||
static GlobalConfig::Register r1(&restoreSinkSettings);
|
||||
|
||||
|
||||
void RestoreSink::createDirectory(const Path & path)
|
||||
{
|
||||
Path p = dstPath + path;
|
||||
if (mkdir(p.c_str(), 0777) == -1)
|
||||
throw SysError("creating directory '%1%'", p);
|
||||
};
|
||||
|
||||
void RestoreSink::createRegularFile(const Path & path)
|
||||
{
|
||||
Path p = dstPath + path;
|
||||
fd = open(p.c_str(), O_CREAT | O_EXCL | O_WRONLY | O_CLOEXEC, 0666);
|
||||
if (!fd) throw SysError("creating file '%1%'", p);
|
||||
}
|
||||
|
||||
void RestoreSink::closeRegularFile()
|
||||
{
|
||||
/* Call close explicitly to make sure the error is checked */
|
||||
fd.close();
|
||||
}
|
||||
|
||||
void RestoreSink::isExecutable()
|
||||
{
|
||||
struct stat st;
|
||||
if (fstat(fd.get(), &st) == -1)
|
||||
throw SysError("fstat");
|
||||
if (fchmod(fd.get(), st.st_mode | (S_IXUSR | S_IXGRP | S_IXOTH)) == -1)
|
||||
throw SysError("fchmod");
|
||||
}
|
||||
|
||||
void RestoreSink::preallocateContents(uint64_t len)
|
||||
{
|
||||
if (!restoreSinkSettings.preallocateContents)
|
||||
return;
|
||||
|
||||
#if HAVE_POSIX_FALLOCATE
|
||||
if (len) {
|
||||
errno = posix_fallocate(fd.get(), 0, len);
|
||||
/* Note that EINVAL may indicate that the underlying
|
||||
filesystem doesn't support preallocation (e.g. on
|
||||
OpenSolaris). Since preallocation is just an
|
||||
optimisation, ignore it. */
|
||||
if (errno && errno != EINVAL && errno != EOPNOTSUPP && errno != ENOSYS)
|
||||
throw SysError("preallocating file of %1% bytes", len);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void RestoreSink::receiveContents(std::string_view data)
|
||||
{
|
||||
writeFull(fd.get(), data);
|
||||
}
|
||||
|
||||
void RestoreSink::createSymlink(const Path & path, const std::string & target)
|
||||
{
|
||||
Path p = dstPath + path;
|
||||
nix::createSymlink(target, p);
|
||||
}
|
||||
|
||||
}
|
42
src/libutil/fs-sink.hh
Normal file
42
src/libutil/fs-sink.hh
Normal file
|
@ -0,0 +1,42 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "types.hh"
|
||||
#include "serialise.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* \todo Fix this API, it sucks.
|
||||
*/
|
||||
struct ParseSink
|
||||
{
|
||||
virtual void createDirectory(const Path & path) { };
|
||||
|
||||
virtual void createRegularFile(const Path & path) { };
|
||||
virtual void closeRegularFile() { };
|
||||
virtual void isExecutable() { };
|
||||
virtual void preallocateContents(uint64_t size) { };
|
||||
virtual void receiveContents(std::string_view data) { };
|
||||
|
||||
virtual void createSymlink(const Path & path, const std::string & target) { };
|
||||
};
|
||||
|
||||
struct RestoreSink : ParseSink
|
||||
{
|
||||
Path dstPath;
|
||||
AutoCloseFD fd;
|
||||
|
||||
|
||||
void createDirectory(const Path & path) override;
|
||||
|
||||
void createRegularFile(const Path & path) override;
|
||||
void closeRegularFile() override;
|
||||
void isExecutable() override;
|
||||
void preallocateContents(uint64_t size) override;
|
||||
void receiveContents(std::string_view data) override;
|
||||
|
||||
void createSymlink(const Path & path, const std::string & target) override;
|
||||
};
|
||||
|
||||
}
|
|
@ -111,26 +111,26 @@ static std::string printHash32(const Hash & hash)
|
|||
std::string printHash16or32(const Hash & hash)
|
||||
{
|
||||
assert(hash.type);
|
||||
return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false);
|
||||
return hash.to_string(hash.type == htMD5 ? HashFormat::Base16 : HashFormat::Base32, false);
|
||||
}
|
||||
|
||||
|
||||
std::string Hash::to_string(Base base, bool includeType) const
|
||||
std::string Hash::to_string(HashFormat hashFormat, bool includeType) const
|
||||
{
|
||||
std::string s;
|
||||
if (base == SRI || includeType) {
|
||||
if (hashFormat == HashFormat::SRI || includeType) {
|
||||
s += printHashType(type);
|
||||
s += base == SRI ? '-' : ':';
|
||||
s += hashFormat == HashFormat::SRI ? '-' : ':';
|
||||
}
|
||||
switch (base) {
|
||||
case Base16:
|
||||
switch (hashFormat) {
|
||||
case HashFormat::Base16:
|
||||
s += printHash16(*this);
|
||||
break;
|
||||
case Base32:
|
||||
case HashFormat::Base32:
|
||||
s += printHash32(*this);
|
||||
break;
|
||||
case Base64:
|
||||
case SRI:
|
||||
case HashFormat::Base64:
|
||||
case HashFormat::SRI:
|
||||
s += base64Encode(std::string_view((const char *) hash, hashSize));
|
||||
break;
|
||||
}
|
||||
|
@ -267,7 +267,7 @@ Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashType> ht)
|
|||
if (!ht)
|
||||
throw BadHash("empty hash requires explicit hash type");
|
||||
Hash h(*ht);
|
||||
warn("found empty hash, assuming '%s'", h.to_string(SRI, true));
|
||||
warn("found empty hash, assuming '%s'", h.to_string(HashFormat::SRI, true));
|
||||
return h;
|
||||
} else
|
||||
return Hash::parseAny(hashStr, ht);
|
||||
|
@ -386,13 +386,48 @@ Hash compressHash(const Hash & hash, unsigned int newSize)
|
|||
}
|
||||
|
||||
|
||||
std::optional<HashFormat> parseHashFormatOpt(std::string_view hashFormatName)
|
||||
{
|
||||
if (hashFormatName == "base16") return HashFormat::Base16;
|
||||
if (hashFormatName == "base32") return HashFormat::Base32;
|
||||
if (hashFormatName == "base64") return HashFormat::Base64;
|
||||
if (hashFormatName == "sri") return HashFormat::SRI;
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
HashFormat parseHashFormat(std::string_view hashFormatName)
|
||||
{
|
||||
auto opt_f = parseHashFormatOpt(hashFormatName);
|
||||
if (opt_f)
|
||||
return *opt_f;
|
||||
throw UsageError("unknown hash format '%1%', expect 'base16', 'base32', 'base64', or 'sri'", hashFormatName);
|
||||
}
|
||||
|
||||
std::string_view printHashFormat(HashFormat HashFormat)
|
||||
{
|
||||
switch (HashFormat) {
|
||||
case HashFormat::Base64:
|
||||
return "base64";
|
||||
case HashFormat::Base32:
|
||||
return "base32";
|
||||
case HashFormat::Base16:
|
||||
return "base16";
|
||||
case HashFormat::SRI:
|
||||
return "sri";
|
||||
default:
|
||||
// illegal hash base enum value internally, as opposed to external input
|
||||
// which should be validated with nice error message.
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
std::optional<HashType> parseHashTypeOpt(std::string_view s)
|
||||
{
|
||||
if (s == "md5") return htMD5;
|
||||
else if (s == "sha1") return htSHA1;
|
||||
else if (s == "sha256") return htSHA256;
|
||||
else if (s == "sha512") return htSHA512;
|
||||
else return std::optional<HashType> {};
|
||||
if (s == "sha1") return htSHA1;
|
||||
if (s == "sha256") return htSHA256;
|
||||
if (s == "sha512") return htSHA512;
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
HashType parseHashType(std::string_view s)
|
||||
|
@ -401,7 +436,7 @@ HashType parseHashType(std::string_view s)
|
|||
if (opt_h)
|
||||
return *opt_h;
|
||||
else
|
||||
throw UsageError("unknown hash algorithm '%1%'", s);
|
||||
throw UsageError("unknown hash algorithm '%1%', expect 'md5', 'sha1', 'sha256', or 'sha512'", s);
|
||||
}
|
||||
|
||||
std::string_view printHashType(HashType ht)
|
||||
|
|
|
@ -23,7 +23,21 @@ extern std::set<std::string> hashTypes;
|
|||
|
||||
extern const std::string base32Chars;
|
||||
|
||||
enum Base : int { Base64, Base32, Base16, SRI };
|
||||
/**
|
||||
* @brief Enumeration representing the hash formats.
|
||||
*/
|
||||
enum struct HashFormat : int {
|
||||
/// @brief Base 64 encoding.
|
||||
/// @see [IETF RFC 4648, section 4](https://datatracker.ietf.org/doc/html/rfc4648#section-4).
|
||||
Base64,
|
||||
/// @brief Nix-specific base-32 encoding. @see base32Chars
|
||||
Base32,
|
||||
/// @brief Lowercase hexadecimal encoding. @see base16Chars
|
||||
Base16,
|
||||
/// @brief "<hash algo>:<Base 64 hash>", format of the SRI integrity attribute.
|
||||
/// @see W3C recommendation [Subresource Intergrity](https://www.w3.org/TR/SRI/).
|
||||
SRI
|
||||
};
|
||||
|
||||
|
||||
struct Hash
|
||||
|
@ -114,16 +128,16 @@ public:
|
|||
* or base-64. By default, this is prefixed by the hash type
|
||||
* (e.g. "sha256:").
|
||||
*/
|
||||
std::string to_string(Base base, bool includeType) const;
|
||||
std::string to_string(HashFormat hashFormat, bool includeType) const;
|
||||
|
||||
std::string gitRev() const
|
||||
{
|
||||
return to_string(Base16, false);
|
||||
return to_string(HashFormat::Base16, false);
|
||||
}
|
||||
|
||||
std::string gitShortRev() const
|
||||
{
|
||||
return std::string(to_string(Base16, false), 0, 7);
|
||||
return std::string(to_string(HashFormat::Base16, false), 0, 7);
|
||||
}
|
||||
|
||||
static Hash dummy;
|
||||
|
@ -145,13 +159,17 @@ std::string printHash16or32(const Hash & hash);
|
|||
Hash hashString(HashType ht, std::string_view s);
|
||||
|
||||
/**
|
||||
* Compute the hash of the given file.
|
||||
* Compute the hash of the given file, hashing its contents directly.
|
||||
*
|
||||
* (Metadata, such as the executable permission bit, is ignored.)
|
||||
*/
|
||||
Hash hashFile(HashType ht, const Path & path);
|
||||
|
||||
/**
|
||||
* Compute the hash of the given path. The hash is defined as
|
||||
* (essentially) hashString(ht, dumpPath(path)).
|
||||
* Compute the hash of the given path, serializing as a Nix Archive and
|
||||
* then hashing that.
|
||||
*
|
||||
* The hash is defined as (essentially) hashString(ht, dumpPath(path)).
|
||||
*/
|
||||
typedef std::pair<Hash, uint64_t> HashResult;
|
||||
HashResult hashPath(HashType ht, const Path & path,
|
||||
|
@ -163,6 +181,21 @@ HashResult hashPath(HashType ht, const Path & path,
|
|||
*/
|
||||
Hash compressHash(const Hash & hash, unsigned int newSize);
|
||||
|
||||
/**
|
||||
* Parse a string representing a hash format.
|
||||
*/
|
||||
HashFormat parseHashFormat(std::string_view hashFormatName);
|
||||
|
||||
/**
|
||||
* std::optional version of parseHashFormat that doesn't throw error.
|
||||
*/
|
||||
std::optional<HashFormat> parseHashFormatOpt(std::string_view hashFormatName);
|
||||
|
||||
/**
|
||||
* The reverse of parseHashFormat.
|
||||
*/
|
||||
std::string_view printHashFormat(HashFormat hashFormat);
|
||||
|
||||
/**
|
||||
* Parse a string representing a hash type.
|
||||
*/
|
||||
|
|
|
@ -6,8 +6,13 @@ libutil_DIR := $(d)
|
|||
|
||||
libutil_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libutil_CXXFLAGS += -I src/libutil
|
||||
|
||||
libutil_LDFLAGS += -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context
|
||||
|
||||
$(foreach i, $(wildcard $(d)/args/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/args, 0644)))
|
||||
|
||||
ifeq ($(HAVE_LIBCPUID), 1)
|
||||
libutil_LDFLAGS += -lcpuid
|
||||
endif
|
||||
|
|
|
@ -67,7 +67,7 @@ public:
|
|||
case lvlWarn: c = '4'; break;
|
||||
case lvlNotice: case lvlInfo: c = '5'; break;
|
||||
case lvlTalkative: case lvlChatty: c = '6'; break;
|
||||
case lvlDebug: case lvlVomit: c = '7';
|
||||
case lvlDebug: case lvlVomit: c = '7'; break;
|
||||
default: c = '7'; break; // should not happen, and missing enum case is reported by -Werror=switch-enum
|
||||
}
|
||||
prefix = std::string("<") + c + ">";
|
||||
|
@ -220,8 +220,8 @@ struct JSONLogger : Logger {
|
|||
json["level"] = lvl;
|
||||
json["type"] = type;
|
||||
json["text"] = s;
|
||||
json["parent"] = parent;
|
||||
addFields(json, fields);
|
||||
// FIXME: handle parent
|
||||
write(json);
|
||||
}
|
||||
|
||||
|
|
86
src/libutil/posix-source-accessor.cc
Normal file
86
src/libutil/posix-source-accessor.cc
Normal file
|
@ -0,0 +1,86 @@
|
|||
#include "posix-source-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
void PosixSourceAccessor::readFile(
|
||||
const CanonPath & path,
|
||||
Sink & sink,
|
||||
std::function<void(uint64_t)> sizeCallback)
|
||||
{
|
||||
// FIXME: add O_NOFOLLOW since symlinks should be resolved by the
|
||||
// caller?
|
||||
AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC);
|
||||
if (!fd)
|
||||
throw SysError("opening file '%1%'", path);
|
||||
|
||||
struct stat st;
|
||||
if (fstat(fd.get(), &st) == -1)
|
||||
throw SysError("statting file");
|
||||
|
||||
sizeCallback(st.st_size);
|
||||
|
||||
off_t left = st.st_size;
|
||||
|
||||
std::vector<unsigned char> buf(64 * 1024);
|
||||
while (left) {
|
||||
checkInterrupt();
|
||||
ssize_t rd = read(fd.get(), buf.data(), (size_t) std::min(left, (off_t) buf.size()));
|
||||
if (rd == -1) {
|
||||
if (errno != EINTR)
|
||||
throw SysError("reading from file '%s'", showPath(path));
|
||||
}
|
||||
else if (rd == 0)
|
||||
throw SysError("unexpected end-of-file reading '%s'", showPath(path));
|
||||
else {
|
||||
assert(rd <= left);
|
||||
sink({(char *) buf.data(), (size_t) rd});
|
||||
left -= rd;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool PosixSourceAccessor::pathExists(const CanonPath & path)
|
||||
{
|
||||
return nix::pathExists(path.abs());
|
||||
}
|
||||
|
||||
SourceAccessor::Stat PosixSourceAccessor::lstat(const CanonPath & path)
|
||||
{
|
||||
auto st = nix::lstat(path.abs());
|
||||
mtime = std::max(mtime, st.st_mtime);
|
||||
return Stat {
|
||||
.type =
|
||||
S_ISREG(st.st_mode) ? tRegular :
|
||||
S_ISDIR(st.st_mode) ? tDirectory :
|
||||
S_ISLNK(st.st_mode) ? tSymlink :
|
||||
tMisc,
|
||||
.isExecutable = S_ISREG(st.st_mode) && st.st_mode & S_IXUSR
|
||||
};
|
||||
}
|
||||
|
||||
SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & path)
|
||||
{
|
||||
DirEntries res;
|
||||
for (auto & entry : nix::readDirectory(path.abs())) {
|
||||
std::optional<Type> type;
|
||||
switch (entry.type) {
|
||||
case DT_REG: type = Type::tRegular; break;
|
||||
case DT_LNK: type = Type::tSymlink; break;
|
||||
case DT_DIR: type = Type::tDirectory; break;
|
||||
}
|
||||
res.emplace(entry.name, type);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
std::string PosixSourceAccessor::readLink(const CanonPath & path)
|
||||
{
|
||||
return nix::readLink(path.abs());
|
||||
}
|
||||
|
||||
std::optional<CanonPath> PosixSourceAccessor::getPhysicalPath(const CanonPath & path)
|
||||
{
|
||||
return path;
|
||||
}
|
||||
|
||||
}
|
34
src/libutil/posix-source-accessor.hh
Normal file
34
src/libutil/posix-source-accessor.hh
Normal file
|
@ -0,0 +1,34 @@
|
|||
#pragma once
|
||||
|
||||
#include "source-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* A source accessor that uses the Unix filesystem.
|
||||
*/
|
||||
struct PosixSourceAccessor : SourceAccessor
|
||||
{
|
||||
/**
|
||||
* The most recent mtime seen by lstat(). This is a hack to
|
||||
* support dumpPathAndGetMtime(). Should remove this eventually.
|
||||
*/
|
||||
time_t mtime = 0;
|
||||
|
||||
void readFile(
|
||||
const CanonPath & path,
|
||||
Sink & sink,
|
||||
std::function<void(uint64_t)> sizeCallback) override;
|
||||
|
||||
bool pathExists(const CanonPath & path) override;
|
||||
|
||||
Stat lstat(const CanonPath & path) override;
|
||||
|
||||
DirEntries readDirectory(const CanonPath & path) override;
|
||||
|
||||
std::string readLink(const CanonPath & path) override;
|
||||
|
||||
std::optional<CanonPath> getPhysicalPath(const CanonPath & path) override;
|
||||
};
|
||||
|
||||
}
|
58
src/libutil/source-accessor.cc
Normal file
58
src/libutil/source-accessor.cc
Normal file
|
@ -0,0 +1,58 @@
|
|||
#include "source-accessor.hh"
|
||||
#include "archive.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
static std::atomic<size_t> nextNumber{0};
|
||||
|
||||
SourceAccessor::SourceAccessor()
|
||||
: number(++nextNumber)
|
||||
{
|
||||
}
|
||||
|
||||
std::string SourceAccessor::readFile(const CanonPath & path)
|
||||
{
|
||||
StringSink sink;
|
||||
std::optional<uint64_t> size;
|
||||
readFile(path, sink, [&](uint64_t _size)
|
||||
{
|
||||
size = _size;
|
||||
});
|
||||
assert(size && *size == sink.s.size());
|
||||
return std::move(sink.s);
|
||||
}
|
||||
|
||||
void SourceAccessor::readFile(
|
||||
const CanonPath & path,
|
||||
Sink & sink,
|
||||
std::function<void(uint64_t)> sizeCallback)
|
||||
{
|
||||
auto s = readFile(path);
|
||||
sizeCallback(s.size());
|
||||
sink(s);
|
||||
}
|
||||
|
||||
Hash SourceAccessor::hashPath(
|
||||
const CanonPath & path,
|
||||
PathFilter & filter,
|
||||
HashType ht)
|
||||
{
|
||||
HashSink sink(ht);
|
||||
dumpPath(path, sink, filter);
|
||||
return sink.finish().first;
|
||||
}
|
||||
|
||||
std::optional<SourceAccessor::Stat> SourceAccessor::maybeLstat(const CanonPath & path)
|
||||
{
|
||||
// FIXME: merge these into one operation.
|
||||
if (!pathExists(path))
|
||||
return {};
|
||||
return lstat(path);
|
||||
}
|
||||
|
||||
std::string SourceAccessor::showPath(const CanonPath & path)
|
||||
{
|
||||
return path.abs();
|
||||
}
|
||||
|
||||
}
|
107
src/libutil/source-accessor.hh
Normal file
107
src/libutil/source-accessor.hh
Normal file
|
@ -0,0 +1,107 @@
|
|||
#pragma once
|
||||
|
||||
#include "canon-path.hh"
|
||||
#include "hash.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct Sink;
|
||||
|
||||
/**
|
||||
* A read-only filesystem abstraction. This is used by the Nix
|
||||
* evaluator and elsewhere for accessing sources in various
|
||||
* filesystem-like entities (such as the real filesystem, tarballs or
|
||||
* Git repositories).
|
||||
*/
|
||||
struct SourceAccessor
|
||||
{
|
||||
const size_t number;
|
||||
|
||||
SourceAccessor();
|
||||
|
||||
virtual ~SourceAccessor()
|
||||
{ }
|
||||
|
||||
/**
|
||||
* Return the contents of a file as a string.
|
||||
*/
|
||||
virtual std::string readFile(const CanonPath & path);
|
||||
|
||||
/**
|
||||
* Write the contents of a file as a sink. `sizeCallback` must be
|
||||
* called with the size of the file before any data is written to
|
||||
* the sink.
|
||||
*
|
||||
* Note: subclasses of `SourceAccessor` need to implement at least
|
||||
* one of the `readFile()` variants.
|
||||
*/
|
||||
virtual void readFile(
|
||||
const CanonPath & path,
|
||||
Sink & sink,
|
||||
std::function<void(uint64_t)> sizeCallback = [](uint64_t size){});
|
||||
|
||||
virtual bool pathExists(const CanonPath & path) = 0;
|
||||
|
||||
enum Type {
|
||||
tRegular, tSymlink, tDirectory,
|
||||
/**
|
||||
Any other node types that may be encountered on the file system, such as device nodes, sockets, named pipe, and possibly even more exotic things.
|
||||
|
||||
Responsible for `"unknown"` from `builtins.readFileType "/dev/null"`.
|
||||
|
||||
Unlike `DT_UNKNOWN`, this must not be used for deferring the lookup of types.
|
||||
*/
|
||||
tMisc
|
||||
};
|
||||
|
||||
struct Stat
|
||||
{
|
||||
Type type = tMisc;
|
||||
//uint64_t fileSize = 0; // regular files only
|
||||
bool isExecutable = false; // regular files only
|
||||
};
|
||||
|
||||
virtual Stat lstat(const CanonPath & path) = 0;
|
||||
|
||||
std::optional<Stat> maybeLstat(const CanonPath & path);
|
||||
|
||||
typedef std::optional<Type> DirEntry;
|
||||
|
||||
typedef std::map<std::string, DirEntry> DirEntries;
|
||||
|
||||
virtual DirEntries readDirectory(const CanonPath & path) = 0;
|
||||
|
||||
virtual std::string readLink(const CanonPath & path) = 0;
|
||||
|
||||
virtual void dumpPath(
|
||||
const CanonPath & path,
|
||||
Sink & sink,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
Hash hashPath(
|
||||
const CanonPath & path,
|
||||
PathFilter & filter = defaultPathFilter,
|
||||
HashType ht = htSHA256);
|
||||
|
||||
/**
|
||||
* Return a corresponding path in the root filesystem, if
|
||||
* possible. This is only possible for filesystems that are
|
||||
* materialized in the root filesystem.
|
||||
*/
|
||||
virtual std::optional<CanonPath> getPhysicalPath(const CanonPath & path)
|
||||
{ return std::nullopt; }
|
||||
|
||||
bool operator == (const SourceAccessor & x) const
|
||||
{
|
||||
return number == x.number;
|
||||
}
|
||||
|
||||
bool operator < (const SourceAccessor & x) const
|
||||
{
|
||||
return number < x.number;
|
||||
}
|
||||
|
||||
virtual std::string showPath(const CanonPath & path);
|
||||
};
|
||||
|
||||
}
|
|
@ -18,28 +18,28 @@ namespace nix {
|
|||
// values taken from: https://tools.ietf.org/html/rfc1321
|
||||
auto s1 = "";
|
||||
auto hash = hashString(HashType::htMD5, s1);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e");
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownMD5Hashes2) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc1321
|
||||
auto s2 = "abc";
|
||||
auto hash = hashString(HashType::htMD5, s2);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72");
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA1Hashes1) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc3174
|
||||
auto s = "abc";
|
||||
auto hash = hashString(HashType::htSHA1, s);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d");
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA1Hashes2) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc3174
|
||||
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
|
||||
auto hash = hashString(HashType::htSHA1, s);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1");
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA256Hashes1) {
|
||||
|
@ -47,7 +47,7 @@ namespace nix {
|
|||
auto s = "abc";
|
||||
|
||||
auto hash = hashString(HashType::htSHA256, s);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16, true),
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
|
||||
"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad");
|
||||
}
|
||||
|
||||
|
@ -55,7 +55,7 @@ namespace nix {
|
|||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
|
||||
auto hash = hashString(HashType::htSHA256, s);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16, true),
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
|
||||
"sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1");
|
||||
}
|
||||
|
||||
|
@ -63,7 +63,7 @@ namespace nix {
|
|||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abc";
|
||||
auto hash = hashString(HashType::htSHA512, s);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16, true),
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
|
||||
"sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9"
|
||||
"7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd"
|
||||
"454d4423643ce80e2a9ac94fa54ca49f");
|
||||
|
@ -74,11 +74,26 @@ namespace nix {
|
|||
auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu";
|
||||
|
||||
auto hash = hashString(HashType::htSHA512, s);
|
||||
ASSERT_EQ(hash.to_string(Base::Base16, true),
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
|
||||
"sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1"
|
||||
"7299aeadb6889018501d289e4900f7e4331b99dec4b5433a"
|
||||
"c7d329eeb6dd26545e96e55b874be909");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* parseHashFormat, parseHashFormatOpt, printHashFormat
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(hashFormat, testRoundTripPrintParse) {
|
||||
for (const HashFormat hashFormat: { HashFormat::Base64, HashFormat::Base32, HashFormat::Base16, HashFormat::SRI}) {
|
||||
ASSERT_EQ(parseHashFormat(printHashFormat(hashFormat)), hashFormat);
|
||||
ASSERT_EQ(*parseHashFormatOpt(printHashFormat(hashFormat)), hashFormat);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(hashFormat, testParseHashFormatOptException) {
|
||||
ASSERT_EQ(parseHashFormatOpt("sha0042"), std::nullopt);
|
||||
}
|
||||
}
|
||||
|
||||
namespace rc {
|
||||
|
|
|
@ -335,4 +335,13 @@ namespace nix {
|
|||
ASSERT_EQ(d, s);
|
||||
}
|
||||
|
||||
TEST(percentEncode, yen) {
|
||||
// https://en.wikipedia.org/wiki/Percent-encoding#Character_data
|
||||
std::string s = reinterpret_cast<const char*>(u8"円");
|
||||
std::string e = "%E5%86%86";
|
||||
|
||||
ASSERT_EQ(percentEncode(s), e);
|
||||
ASSERT_EQ(percentDecode(e), s);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ extern std::regex refRegex;
|
|||
|
||||
/// Instead of defining what a good Git Ref is, we define what a bad Git Ref is
|
||||
/// This is because of the definition of a ref in refs.c in https://github.com/git/git
|
||||
/// See tests/fetchGitRefs.sh for the full definition
|
||||
/// See tests/functional/fetchGitRefs.sh for the full definition
|
||||
const static std::string badGitRefRegexS = "//|^[./]|/\\.|\\.\\.|[[:cntrl:][:space:]:?^~\[]|\\\\|\\*|\\.lock$|\\.lock/|@\\{|[/.]$|^@$|^$";
|
||||
extern std::regex badGitRefRegex;
|
||||
|
||||
|
@ -41,7 +41,4 @@ extern std::regex revRegex;
|
|||
/// A ref or revision, or a ref followed by a revision.
|
||||
const static std::string refAndOrRevRegex = "(?:(" + revRegexS + ")|(?:(" + refRegexS + ")(?:/(" + revRegexS + "))?))";
|
||||
|
||||
const static std::string flakeIdRegexS = "[a-zA-Z][a-zA-Z0-9_-]*";
|
||||
extern std::regex flakeIdRegex;
|
||||
|
||||
}
|
||||
|
|
|
@ -8,7 +8,6 @@ namespace nix {
|
|||
std::regex refRegex(refRegexS, std::regex::ECMAScript);
|
||||
std::regex badGitRefRegex(badGitRefRegexS, std::regex::ECMAScript);
|
||||
std::regex revRegex(revRegexS, std::regex::ECMAScript);
|
||||
std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript);
|
||||
|
||||
ParsedURL parseURL(const std::string & url)
|
||||
{
|
||||
|
@ -44,7 +43,7 @@ ParsedURL parseURL(const std::string & url)
|
|||
.base = base,
|
||||
.scheme = scheme,
|
||||
.authority = authority,
|
||||
.path = path,
|
||||
.path = percentDecode(path),
|
||||
.query = decodeQuery(query),
|
||||
.fragment = percentDecode(std::string(fragment))
|
||||
};
|
||||
|
@ -103,7 +102,7 @@ std::string percentEncode(std::string_view s, std::string_view keep)
|
|||
|| keep.find(c) != std::string::npos)
|
||||
res += c;
|
||||
else
|
||||
res += fmt("%%%02X", (unsigned int) c);
|
||||
res += fmt("%%%02X", c & 0xFF);
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -159,4 +158,21 @@ ParsedUrlScheme parseUrlScheme(std::string_view scheme)
|
|||
};
|
||||
}
|
||||
|
||||
std::string fixGitURL(const std::string & url)
|
||||
{
|
||||
std::regex scpRegex("([^/]*)@(.*):(.*)");
|
||||
if (!hasPrefix(url, "/") && std::regex_match(url, scpRegex))
|
||||
return std::regex_replace(url, scpRegex, "ssh://$1@$2/$3");
|
||||
else {
|
||||
if (url.find("://") == std::string::npos) {
|
||||
return (ParsedURL {
|
||||
.scheme = "file",
|
||||
.authority = "",
|
||||
.path = url
|
||||
}).to_string();
|
||||
} else
|
||||
return url;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -45,4 +45,9 @@ struct ParsedUrlScheme {
|
|||
|
||||
ParsedUrlScheme parseUrlScheme(std::string_view scheme);
|
||||
|
||||
/* Detects scp-style uris (e.g. git@github.com:NixOS/nix) and fixes
|
||||
them by removing the `:` and assuming a scheme of `ssh://`. Also
|
||||
changes absolute paths into file:// URLs. */
|
||||
std::string fixGitURL(const std::string & url);
|
||||
|
||||
}
|
||||
|
|
|
@ -48,6 +48,23 @@ extern char * * environ __attribute__((weak));
|
|||
namespace nix {
|
||||
|
||||
void initLibUtil() {
|
||||
// Check that exception handling works. Exception handling has been observed
|
||||
// not to work on darwin when the linker flags aren't quite right.
|
||||
// In this case we don't want to expose the user to some unrelated uncaught
|
||||
// exception, but rather tell them exactly that exception handling is
|
||||
// broken.
|
||||
// When exception handling fails, the message tends to be printed by the
|
||||
// C++ runtime, followed by an abort.
|
||||
// For example on macOS we might see an error such as
|
||||
// libc++abi: terminating with uncaught exception of type nix::SysError: error: C++ exception handling is broken. This would appear to be a problem with the way Nix was compiled and/or linked and/or loaded.
|
||||
bool caught = false;
|
||||
try {
|
||||
throwExceptionSelfCheck();
|
||||
} catch (const nix::Error & _e) {
|
||||
caught = true;
|
||||
}
|
||||
// This is not actually the main point of this check, but let's make sure anyway:
|
||||
assert(caught);
|
||||
}
|
||||
|
||||
std::optional<std::string> getEnv(const std::string & key)
|
||||
|
@ -1498,7 +1515,7 @@ bool shouldANSI()
|
|||
{
|
||||
return isatty(STDERR_FILENO)
|
||||
&& getEnv("TERM").value_or("dumb") != "dumb"
|
||||
&& !getEnv("NO_COLOR").has_value();
|
||||
&& !(getEnv("NO_COLOR").has_value() || getEnv("NOCOLOR").has_value());
|
||||
}
|
||||
|
||||
std::string filterANSIEscapes(std::string_view s, bool filterAll, unsigned int width)
|
||||
|
|
30
src/libutil/variant-wrapper.hh
Normal file
30
src/libutil/variant-wrapper.hh
Normal file
|
@ -0,0 +1,30 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
// not used, but will be used by callers
|
||||
#include <variant>
|
||||
|
||||
/**
|
||||
* Force the default versions of all constructors (copy, move, copy
|
||||
* assignment).
|
||||
*/
|
||||
#define FORCE_DEFAULT_CONSTRUCTORS(CLASS_NAME) \
|
||||
CLASS_NAME(const CLASS_NAME &) = default; \
|
||||
CLASS_NAME(CLASS_NAME &) = default; \
|
||||
CLASS_NAME(CLASS_NAME &&) = default; \
|
||||
\
|
||||
CLASS_NAME & operator =(const CLASS_NAME &) = default; \
|
||||
CLASS_NAME & operator =(CLASS_NAME &) = default;
|
||||
|
||||
/**
|
||||
* Make a wrapper constructor. All args are forwarded to the
|
||||
* construction of the "raw" field. (Which we assume is the only one.)
|
||||
*
|
||||
* The moral equivalent of `using Raw::Raw;`
|
||||
*/
|
||||
#define MAKE_WRAPPER_CONSTRUCTOR(CLASS_NAME) \
|
||||
FORCE_DEFAULT_CONSTRUCTORS(CLASS_NAME) \
|
||||
\
|
||||
CLASS_NAME(auto &&... arg) \
|
||||
: raw(std::forward<decltype(arg)>(arg)...) \
|
||||
{ }
|
Loading…
Add table
Add a link
Reference in a new issue