1
0
Fork 0
mirror of https://github.com/NixOS/nix synced 2025-07-07 14:21:48 +02:00

Merge branch 'errors-phase-2' into caveman-LOCs

This commit is contained in:
Ben Burdette 2020-06-08 11:10:13 -06:00
commit b1c53b034c
68 changed files with 2601 additions and 552 deletions

View file

@ -200,12 +200,11 @@ static int _main(int argc, char * * argv)
} catch (std::exception & e) {
auto msg = chomp(drainFD(5, false));
logError(
ErrorInfo {
.name = "Remote build",
.hint = hintfmt("cannot build on '%s': %s%s",
bestMachine->storeUri, e.what(),
(msg.empty() ? "" : ": " + msg))
logError({
.name = "Remote build",
.hint = hintfmt("cannot build on '%s': %s%s",
bestMachine->storeUri, e.what(),
(msg.empty() ? "" : ": " + msg))
});
bestMachine->enabled = false;
continue;

View file

@ -1,12 +0,0 @@
programs += error-demo
error-demo_DIR := $(d)
error-demo_SOURCES := \
$(wildcard $(d)/*.cc) \
error-demo_CXXFLAGS += -I src/libutil -I src/libexpr
error-demo_LIBS = libutil libexpr
error-demo_LDFLAGS = -pthread $(SODIUM_LIBS) $(EDITLINE_LIBS) $(BOOST_LDFLAGS) -lboost_context -lboost_thread -lboost_system

View file

@ -1698,7 +1698,7 @@ string EvalState::copyPathToStore(PathSet & context, const Path & path)
else {
auto p = settings.readOnlyMode
? store->computeStorePathForPath(std::string(baseNameOf(path)), checkSourcePath(path)).first
: store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), true, htSHA256, defaultPathFilter, repair);
: store->addToStore(std::string(baseNameOf(path)), checkSourcePath(path), FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, repair);
dstPath = store->printStorePath(p);
srcToStore.insert_or_assign(path, std::move(p));
printMsg(lvlChatty, "copied source '%1%' -> '%2%'", path, dstPath);

View file

@ -594,7 +594,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
std::optional<std::string> outputHash;
std::string outputHashAlgo;
bool outputHashRecursive = false;
auto ingestionMethod = FileIngestionMethod::Flat;
StringSet outputs;
outputs.insert("out");
@ -605,8 +605,8 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
vomit("processing attribute '%1%'", key);
auto handleHashMode = [&](const std::string & s) {
if (s == "recursive") outputHashRecursive = true;
else if (s == "flat") outputHashRecursive = false;
if (s == "recursive") ingestionMethod = FileIngestionMethod::Recursive;
else if (s == "flat") ingestionMethod = FileIngestionMethod::Flat;
else
throw EvalError(
ErrorInfo {
@ -787,11 +787,14 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
HashType ht = outputHashAlgo.empty() ? htUnknown : parseHashType(outputHashAlgo);
Hash h(*outputHash, ht);
auto outPath = state.store->makeFixedOutputPath(outputHashRecursive, h, drvName);
auto outPath = state.store->makeFixedOutputPath(ingestionMethod, h, drvName);
if (!jsonObject) drv.env["out"] = state.store->printStorePath(outPath);
drv.outputs.insert_or_assign("out", DerivationOutput(std::move(outPath),
(outputHashRecursive ? "r:" : "") + printHashType(h.type),
h.to_string(Base16, false)));
drv.outputs.insert_or_assign("out", DerivationOutput {
std::move(outPath),
(ingestionMethod == FileIngestionMethod::Recursive ? "r:" : "")
+ printHashType(h.type),
h.to_string(Base16, false),
});
}
else {
@ -1139,7 +1142,7 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
static void addPath(EvalState & state, const Pos & pos, const string & name, const Path & path_,
Value * filterFun, bool recursive, const Hash & expectedHash, Value & v)
Value * filterFun, FileIngestionMethod method, const Hash & expectedHash, Value & v)
{
const auto path = evalSettings.pureEval && expectedHash ?
path_ :
@ -1170,12 +1173,12 @@ static void addPath(EvalState & state, const Pos & pos, const string & name, con
std::optional<StorePath> expectedStorePath;
if (expectedHash)
expectedStorePath = state.store->makeFixedOutputPath(recursive, expectedHash, name);
expectedStorePath = state.store->makeFixedOutputPath(method, expectedHash, name);
Path dstPath;
if (!expectedHash || !state.store->isValidPath(*expectedStorePath)) {
dstPath = state.store->printStorePath(settings.readOnlyMode
? state.store->computeStorePathForPath(name, path, recursive, htSHA256, filter).first
: state.store->addToStore(name, path, recursive, htSHA256, filter, state.repair));
? state.store->computeStorePathForPath(name, path, method, htSHA256, filter).first
: state.store->addToStore(name, path, method, htSHA256, filter, state.repair));
if (expectedHash && expectedStorePath != state.store->parseStorePath(dstPath))
throw Error("store path mismatch in (possibly filtered) path added from '%s'", path);
} else
@ -1206,7 +1209,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
.nixCode = NixCode { .errPos = pos }
});
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], true, Hash(), v);
addPath(state, pos, std::string(baseNameOf(path)), path, args[0], FileIngestionMethod::Recursive, Hash(), v);
}
static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value & v)
@ -1215,7 +1218,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
Path path;
string name;
Value * filterFun = nullptr;
auto recursive = true;
auto method = FileIngestionMethod::Recursive;
Hash expectedHash;
for (auto & attr : *args[0]->attrs) {
@ -1236,7 +1239,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
state.forceValue(*attr.value, pos);
filterFun = attr.value;
} else if (n == "recursive")
recursive = state.forceBool(*attr.value, *attr.pos);
method = FileIngestionMethod { state.forceBool(*attr.value, *attr.pos) };
else if (n == "sha256")
expectedHash = Hash(state.forceStringNoCtx(*attr.value, *attr.pos), htSHA256);
else
@ -1256,7 +1259,7 @@ static void prim_path(EvalState & state, const Pos & pos, Value * * args, Value
if (name.empty())
name = baseNameOf(path);
addPath(state, pos, name, path, filterFun, recursive, expectedHash, v);
addPath(state, pos, name, path, filterFun, method, expectedHash, v);
}

View file

@ -195,7 +195,7 @@ struct GitInput : Input
return files.count(file);
};
auto storePath = store->addToStore("source", actualUrl, true, htSHA256, filter);
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
auto tree = Tree {
.actualPath = store->printStorePath(storePath),
@ -282,7 +282,10 @@ struct GitInput : Input
// FIXME: git stderr messes up our progress indicator, so
// we're using --quiet for now. Should process its stderr.
try {
runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", *input->ref, *input->ref) });
auto fetchRef = input->ref->compare(0, 5, "refs/") == 0
? *input->ref
: "refs/heads/" + *input->ref;
runProgram("git", true, { "-C", repoDir, "fetch", "--quiet", "--force", "--", actualUrl, fmt("%s:%s", fetchRef, fetchRef) });
} catch (Error & e) {
if (!pathExists(localRefFile)) throw;
warn("could not update local clone of Git repository '%s'; continuing with the most recent version", actualUrl);
@ -347,7 +350,7 @@ struct GitInput : Input
unpackTarfile(*source, tmpDir);
}
auto storePath = store->addToStore(name, tmpDir, true, htSHA256, filter);
auto storePath = store->addToStore(name, tmpDir, FileIngestionMethod::Recursive, htSHA256, filter);
auto lastModified = std::stoull(runProgram("git", true, { "-C", repoDir, "log", "-1", "--format=%ct", input->rev->gitRev() }));
@ -418,7 +421,7 @@ struct GitInputScheme : InputScheme
auto input = std::make_unique<GitInput>(parseURL(getStrAttr(attrs, "url")));
if (auto ref = maybeGetStrAttr(attrs, "ref")) {
if (!std::regex_match(*ref, refRegex))
if (std::regex_search(*ref, badGitRefRegex))
throw BadURL("invalid Git branch/tag name '%s'", *ref);
input->ref = *ref;
}

View file

@ -114,7 +114,7 @@ struct MercurialInput : Input
return files.count(file);
};
auto storePath = store->addToStore("source", actualUrl, true, htSHA256, filter);
auto storePath = store->addToStore("source", actualUrl, FileIngestionMethod::Recursive, htSHA256, filter);
return {Tree {
.actualPath = store->printStorePath(storePath),

View file

@ -67,11 +67,12 @@ DownloadFileResult downloadFile(
StringSink sink;
dumpString(*res.data, sink);
auto hash = hashString(htSHA256, *res.data);
ValidPathInfo info(store->makeFixedOutputPath(false, hash, name));
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Flat, hash, name));
info.narHash = hashString(htSHA256, *sink.s);
info.narSize = sink.s->size();
info.ca = makeFixedOutputCA(false, hash);
store->addToStore(info, sink.s, NoRepair, NoCheckSigs);
info.ca = makeFixedOutputCA(FileIngestionMethod::Flat, hash);
auto source = StringSource { *sink.s };
store->addToStore(info, source, NoRepair, NoCheckSigs);
storePath = std::move(info.path);
}
@ -141,7 +142,7 @@ Tree downloadTarball(
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
auto topDir = tmpDir + "/" + members.begin()->name;
lastModified = lstat(topDir).st_mtime;
unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair);
unpackedStorePath = store->addToStore(name, topDir, FileIngestionMethod::Recursive, htSHA256, defaultPathFilter, NoRepair);
}
Attrs infoAttrs({

View file

@ -8,7 +8,7 @@ namespace nix::fetchers {
StorePath TreeInfo::computeStorePath(Store & store) const
{
assert(narHash);
return store.makeFixedOutputPath(true, narHash, "source");
return store.makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, "source");
}
}

View file

@ -113,9 +113,12 @@ void BinaryCacheStore::writeNarInfo(ref<NarInfo> narInfo)
diskCache->upsertNarInfo(getUri(), hashPart, std::shared_ptr<NarInfo>(narInfo));
}
void BinaryCacheStore::addToStore(const ValidPathInfo & info, const ref<std::string> & nar,
void BinaryCacheStore::addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr<FSAccessor> accessor)
{
// FIXME: See if we can use the original source to reduce memory usage.
auto nar = make_ref<std::string>(narSource.drain());
if (!repair && isValidPath(info.path)) return;
/* Verify that all references are valid. This may do some .narinfo
@ -327,7 +330,7 @@ void BinaryCacheStore::queryPathInfoUncached(const StorePath & storePath,
}
StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath,
bool recursive, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
{
// FIXME: some cut&paste from LocalStore::addToStore().
@ -336,7 +339,7 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
small files. */
StringSink sink;
Hash h;
if (recursive) {
if (method == FileIngestionMethod::Recursive) {
dumpPath(srcPath, sink, filter);
h = hashString(hashAlgo, *sink.s);
} else {
@ -345,9 +348,10 @@ StorePath BinaryCacheStore::addToStore(const string & name, const Path & srcPath
h = hashString(hashAlgo, s);
}
ValidPathInfo info(makeFixedOutputPath(recursive, h, name));
ValidPathInfo info(makeFixedOutputPath(method, h, name));
addToStore(info, sink.s, repair, CheckSigs, nullptr);
auto source = StringSource { *sink.s };
addToStore(info, source, repair, CheckSigs, nullptr);
return std::move(info.path);
}
@ -361,7 +365,8 @@ StorePath BinaryCacheStore::addTextToStore(const string & name, const string & s
if (repair || !isValidPath(info.path)) {
StringSink sink;
dumpString(s, sink);
addToStore(info, sink.s, repair, CheckSigs, nullptr);
auto source = StringSource { *sink.s };
addToStore(info, source, repair, CheckSigs, nullptr);
}
return std::move(info.path);

View file

@ -74,12 +74,12 @@ public:
std::optional<StorePath> queryPathFromHashPart(const std::string & hashPart) override
{ unsupported("queryPathFromHashPart"); }
void addToStore(const ValidPathInfo & info, const ref<std::string> & nar,
void addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs,
std::shared_ptr<FSAccessor> accessor) override;
StorePath addToStore(const string & name, const Path & srcPath,
bool recursive, HashType hashAlgo,
FileIngestionMethod method, HashType hashAlgo,
PathFilter & filter, RepairFlag repair) override;
StorePath addTextToStore(const string & name, const string & s,

View file

@ -513,9 +513,10 @@ private:
Path fnUserLock;
AutoCloseFD fdUserLock;
bool isEnabled = false;
string user;
uid_t uid;
gid_t gid;
uid_t uid = 0;
gid_t gid = 0;
std::vector<gid_t> supplementaryGIDs;
public:
@ -528,7 +529,9 @@ public:
uid_t getGID() { assert(gid); return gid; }
std::vector<gid_t> getSupplementaryGIDs() { return supplementaryGIDs; }
bool enabled() { return uid != 0; }
bool findFreeUser();
bool enabled() { return isEnabled; }
};
@ -536,6 +539,11 @@ public:
UserLock::UserLock()
{
assert(settings.buildUsersGroup != "");
createDirs(settings.nixStateDir + "/userpool");
}
bool UserLock::findFreeUser() {
if (enabled()) return true;
/* Get the members of the build-users-group. */
struct group * gr = getgrnam(settings.buildUsersGroup.get().c_str());
@ -565,7 +573,6 @@ UserLock::UserLock()
throw Error("the user '%1%' in the group '%2%' does not exist",
i, settings.buildUsersGroup);
createDirs(settings.nixStateDir + "/userpool");
fnUserLock = (format("%1%/userpool/%2%") % settings.nixStateDir % pw->pw_uid).str();
@ -596,16 +603,14 @@ UserLock::UserLock()
supplementaryGIDs.resize(ngroups);
#endif
return;
isEnabled = true;
return true;
}
}
throw Error("all build users are currently in use; "
"consider creating additional users and adding them to the '%1%' group",
settings.buildUsersGroup);
return false;
}
void UserLock::kill()
{
killUser(uid);
@ -934,6 +939,7 @@ private:
void closureRepaired();
void inputsRealised();
void tryToBuild();
void tryLocalBuild();
void buildDone();
/* Is the build hook willing to perform the build? */
@ -1005,6 +1011,8 @@ private:
Goal::amDone(result);
}
void started();
void done(BuildResult::Status status, const string & msg = "");
StorePathSet exportReferences(const StorePathSet & storePaths);
@ -1150,10 +1158,9 @@ void DerivationGoal::loadDerivation()
trace("loading derivation");
if (nrFailed != 0) {
logError(
ErrorInfo {
.name = "missing derivation during build",
.hint = hintfmt("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath))
logError({
.name = "missing derivation during build",
.hint = hintfmt("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath))
});
done(BuildResult::MiscFailure);
return;
@ -1305,12 +1312,11 @@ void DerivationGoal::repairClosure()
/* Check each path (slow!). */
for (auto & i : outputClosure) {
if (worker.pathContentsGood(i)) continue;
logError(
ErrorInfo {
.name = "Corrupt path in closure",
.hint = hintfmt(
"found corrupted or missing path '%s' in the output closure of '%s'",
worker.store.printStorePath(i), worker.store.printStorePath(drvPath))
logError({
.name = "Corrupt path in closure",
.hint = hintfmt(
"found corrupted or missing path '%s' in the output closure of '%s'",
worker.store.printStorePath(i), worker.store.printStorePath(drvPath))
});
auto drvPath2 = outputsToDrv.find(i);
if (drvPath2 == outputsToDrv.end())
@ -1345,12 +1351,11 @@ void DerivationGoal::inputsRealised()
if (nrFailed != 0) {
if (!useDerivation)
throw Error("some dependencies of '%s' are missing", worker.store.printStorePath(drvPath));
logError(
ErrorInfo {
.name = "Dependencies could not be built",
.hint = hintfmt(
"cannot build derivation '%s': %s dependencies couldn't be built",
worker.store.printStorePath(drvPath), nrFailed)
logError({
.name = "Dependencies could not be built",
.hint = hintfmt(
"cannot build derivation '%s': %s dependencies couldn't be built",
worker.store.printStorePath(drvPath), nrFailed)
});
done(BuildResult::DependencyFailed);
return;
@ -1406,6 +1411,19 @@ void DerivationGoal::inputsRealised()
result = BuildResult();
}
void DerivationGoal::started() {
auto msg = fmt(
buildMode == bmRepair ? "repairing outputs of '%s'" :
buildMode == bmCheck ? "checking outputs of '%s'" :
nrRounds > 1 ? "building '%s' (round %d/%d)" :
"building '%s'", worker.store.printStorePath(drvPath), curRound, nrRounds);
fmt("building '%s'", worker.store.printStorePath(drvPath));
if (hook) msg += fmt(" on '%s'", machineName);
act = std::make_unique<Activity>(*logger, lvlInfo, actBuild, msg,
Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", curRound, nrRounds});
mcRunningBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.runningBuilds);
worker.updateProgress();
}
void DerivationGoal::tryToBuild()
{
@ -1457,20 +1475,6 @@ void DerivationGoal::tryToBuild()
supported for local builds. */
bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally();
auto started = [&]() {
auto msg = fmt(
buildMode == bmRepair ? "repairing outputs of '%s'" :
buildMode == bmCheck ? "checking outputs of '%s'" :
nrRounds > 1 ? "building '%s' (round %d/%d)" :
"building '%s'", worker.store.printStorePath(drvPath), curRound, nrRounds);
fmt("building '%s'", worker.store.printStorePath(drvPath));
if (hook) msg += fmt(" on '%s'", machineName);
act = std::make_unique<Activity>(*logger, lvlInfo, actBuild, msg,
Logger::Fields{worker.store.printStorePath(drvPath), hook ? machineName : "", curRound, nrRounds});
mcRunningBuilds = std::make_unique<MaintainCount<uint64_t>>(worker.runningBuilds);
worker.updateProgress();
};
/* Is the build hook willing to accept this job? */
if (!buildLocally) {
switch (tryBuildHook()) {
@ -1503,6 +1507,34 @@ void DerivationGoal::tryToBuild()
return;
}
state = &DerivationGoal::tryLocalBuild;
worker.wakeUp(shared_from_this());
}
void DerivationGoal::tryLocalBuild() {
/* If `build-users-group' is not empty, then we have to build as
one of the members of that group. */
if (settings.buildUsersGroup != "" && getuid() == 0) {
#if defined(__linux__) || defined(__APPLE__)
if (!buildUser) buildUser = std::make_unique<UserLock>();
if (buildUser->findFreeUser()) {
/* Make sure that no other processes are executing under this
uid. */
buildUser->kill();
} else {
debug("waiting for build users");
worker.waitForAWhile(shared_from_this());
return;
}
#else
/* Don't know how to block the creation of setuid/setgid
binaries on this platform. */
throw Error("build users are not supported on this platform for security reasons");
#endif
}
try {
/* Okay, we have to build. */
@ -1809,12 +1841,11 @@ HookReply DerivationGoal::tryBuildHook()
} catch (SysError & e) {
if (e.errNo == EPIPE) {
logError(
ErrorInfo {
.name = "Build hook died",
.hint = hintfmt(
"build hook died unexpectedly: %s",
chomp(drainFD(worker.hook->fromHook.readSide.get())))
logError({
.name = "Build hook died",
.hint = hintfmt(
"build hook died unexpectedly: %s",
chomp(drainFD(worker.hook->fromHook.readSide.get())))
});
worker.hook = 0;
return rpDecline;
@ -1968,22 +1999,6 @@ void DerivationGoal::startBuilder()
#endif
}
/* If `build-users-group' is not empty, then we have to build as
one of the members of that group. */
if (settings.buildUsersGroup != "" && getuid() == 0) {
#if defined(__linux__) || defined(__APPLE__)
buildUser = std::make_unique<UserLock>();
/* Make sure that no other processes are executing under this
uid. */
buildUser->kill();
#else
/* Don't know how to block the creation of setuid/setgid
binaries on this platform. */
throw Error("build users are not supported on this platform for security reasons");
#endif
}
/* Create a temporary directory where the build will take
place. */
tmpDir = createTempDir("", "nix-build-" + std::string(drvPath.name()), false, false, 0700);
@ -2740,7 +2755,7 @@ struct RestrictedStore : public LocalFSStore
{ throw Error("queryPathFromHashPart"); }
StorePath addToStore(const string & name, const Path & srcPath,
bool recursive = true, HashType hashAlgo = htSHA256,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override
{ throw Error("addToStore"); }
@ -2753,9 +2768,9 @@ struct RestrictedStore : public LocalFSStore
}
StorePath addToStoreFromDump(const string & dump, const string & name,
bool recursive = true, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override
{
auto path = next->addToStoreFromDump(dump, name, recursive, hashAlgo, repair);
auto path = next->addToStoreFromDump(dump, name, method, hashAlgo, repair);
goal.addDependency(path);
return path;
}
@ -3673,11 +3688,10 @@ void DerivationGoal::registerOutputs()
/* Apply hash rewriting if necessary. */
bool rewritten = false;
if (!outputRewrites.empty()) {
logWarning(
ErrorInfo {
.name = "Rewriting hashes",
.hint = hintfmt("rewriting hashes in '%1%'; cross fingers", path)
});
logWarning({
.name = "Rewriting hashes",
.hint = hintfmt("rewriting hashes in '%1%'; cross fingers", path)
});
/* Canonicalise first. This ensures that the path we're
rewriting doesn't contain a hard link to /etc/shadow or
@ -3702,10 +3716,10 @@ void DerivationGoal::registerOutputs()
if (fixedOutput) {
bool recursive; Hash h;
i.second.parseHashInfo(recursive, h);
FileIngestionMethod outputHashMode; Hash h;
i.second.parseHashInfo(outputHashMode, h);
if (!recursive) {
if (outputHashMode == FileIngestionMethod::Flat) {
/* The output path should be a regular file without execute permission. */
if (!S_ISREG(st.st_mode) || (st.st_mode & S_IXUSR) != 0)
throw BuildError(
@ -3716,9 +3730,11 @@ void DerivationGoal::registerOutputs()
/* Check the hash. In hash mode, move the path produced by
the derivation to its content-addressed location. */
Hash h2 = recursive ? hashPath(h.type, actualPath).first : hashFile(h.type, actualPath);
Hash h2 = outputHashMode == FileIngestionMethod::Recursive
? hashPath(h.type, actualPath).first
: hashFile(h.type, actualPath);
auto dest = worker.store.makeFixedOutputPath(recursive, h2, i.second.path.name());
auto dest = worker.store.makeFixedOutputPath(outputHashMode, h2, i.second.path.name());
if (h != h2) {
@ -3747,7 +3763,7 @@ void DerivationGoal::registerOutputs()
else
assert(worker.store.parseStorePath(path) == dest);
ca = makeFixedOutputCA(recursive, h2);
ca = makeFixedOutputCA(outputHashMode, h2);
}
/* Get rid of all weird permissions. This also checks that
@ -3854,10 +3870,9 @@ void DerivationGoal::registerOutputs()
if (settings.enforceDeterminism)
throw NotDeterministic(hint);
logError(
ErrorInfo {
.name = "Output determinism error",
.hint = hint
logError({
.name = "Output determinism error",
.hint = hint
});
@ -3974,7 +3989,9 @@ void DerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & outputs)
auto spec = parseReferenceSpecifiers(worker.store, *drv, *value);
auto used = recursive ? cloneStorePathSet(getClosure(info.path).first) : cloneStorePathSet(info.references);
auto used = recursive
? cloneStorePathSet(getClosure(info.path).first)
: cloneStorePathSet(info.references);
if (recursive && checks.ignoreSelfRefs)
used.erase(info.path);
@ -4122,12 +4139,11 @@ void DerivationGoal::handleChildOutput(int fd, const string & data)
{
logSize += data.size();
if (settings.maxLogSize && logSize > settings.maxLogSize) {
logError(
ErrorInfo {
.name = "Max log size exceeded",
.hint = hintfmt(
"%1% killed after writing more than %2% bytes of log output",
getName(), settings.maxLogSize)
logError({
.name = "Max log size exceeded",
.hint = hintfmt(
"%1% killed after writing more than %2% bytes of log output",
getName(), settings.maxLogSize)
});
killChild();
done(BuildResult::LogLimitExceeded);
@ -4444,12 +4460,11 @@ void SubstitutionGoal::tryNext()
&& !sub->isTrusted
&& !info->checkSignatures(worker.store, worker.store.getPublicKeys()))
{
logWarning(
ErrorInfo {
.name = "Invalid path signature",
.hint = hintfmt("substituter '%s' does not have a valid signature for path '%s'",
sub->getUri(), worker.store.printStorePath(storePath))
});
logWarning({
.name = "Invalid path signature",
.hint = hintfmt("substituter '%s' does not have a valid signature for path '%s'",
sub->getUri(), worker.store.printStorePath(storePath))
});
tryNext();
return;
}
@ -4861,7 +4876,7 @@ void Worker::waitForInput()
if (!waitingForAWhile.empty()) {
useTimeout = true;
if (lastWokenUp == steady_time_point::min())
printInfo("waiting for locks or build slots...");
printInfo("waiting for locks, build slots or build users...");
if (lastWokenUp == steady_time_point::min() || lastWokenUp > before) lastWokenUp = before;
timeout = std::max(1L,
(long) std::chrono::duration_cast<std::chrono::seconds>(
@ -4931,12 +4946,11 @@ void Worker::waitForInput()
j->respectTimeouts &&
after - j->lastOutput >= std::chrono::seconds(settings.maxSilentTime))
{
logError(
ErrorInfo {
.name = "Silent build timeout",
.hint = hintfmt(
"%1% timed out after %2% seconds of silence",
goal->getName(), settings.maxSilentTime)
logError({
.name = "Silent build timeout",
.hint = hintfmt(
"%1% timed out after %2% seconds of silence",
goal->getName(), settings.maxSilentTime)
});
goal->timedOut();
}
@ -4946,12 +4960,11 @@ void Worker::waitForInput()
j->respectTimeouts &&
after - j->timeStarted >= std::chrono::seconds(settings.buildTimeout))
{
logError(
ErrorInfo {
.name = "Build timeout",
.hint = hintfmt(
"%1% timed out after %2% seconds",
goal->getName(), settings.buildTimeout)
logError({
.name = "Build timeout",
.hint = hintfmt(
"%1% timed out after %2% seconds",
goal->getName(), settings.buildTimeout)
});
goal->timedOut();
}
@ -5012,10 +5025,9 @@ bool Worker::pathContentsGood(const StorePath & path)
}
pathContentsGoodCache.insert_or_assign(path.clone(), res);
if (!res)
logError(
ErrorInfo {
.name = "Corrupted path",
.hint = hintfmt("path '%s' is corrupted or missing!", store.printStorePath(path))
logError({
.name = "Corrupted path",
.hint = hintfmt("path '%s' is corrupted or missing!", store.printStorePath(path))
});
return res;
}

View file

@ -367,20 +367,24 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
}
case wopAddToStore: {
bool fixed, recursive;
std::string s, baseName;
from >> baseName >> fixed /* obsolete */ >> recursive >> s;
/* Compatibility hack. */
if (!fixed) {
s = "sha256";
recursive = true;
FileIngestionMethod method;
{
bool fixed, recursive;
from >> baseName >> fixed /* obsolete */ >> recursive >> s;
method = FileIngestionMethod { recursive };
/* Compatibility hack. */
if (!fixed) {
s = "sha256";
method = FileIngestionMethod::Recursive;
}
}
HashType hashAlgo = parseHashType(s);
TeeSource savedNAR(from);
RetrieveRegularNARSink savedRegular;
if (recursive) {
if (method == FileIngestionMethod::Recursive) {
/* Get the entire NAR dump from the client and save it to
a string so that we can pass it to
addToStoreFromDump(). */
@ -392,7 +396,11 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
logger->startWork();
if (!savedRegular.regular) throw Error("regular file expected");
auto path = store->addToStoreFromDump(recursive ? *savedNAR.data : savedRegular.s, baseName, recursive, hashAlgo);
auto path = store->addToStoreFromDump(
method == FileIngestionMethod::Recursive ? *savedNAR.data : savedRegular.s,
baseName,
method,
hashAlgo);
logger->stopWork();
to << store->printStorePath(path);

View file

@ -9,13 +9,13 @@
namespace nix {
void DerivationOutput::parseHashInfo(bool & recursive, Hash & hash) const
void DerivationOutput::parseHashInfo(FileIngestionMethod & recursive, Hash & hash) const
{
recursive = false;
recursive = FileIngestionMethod::Flat;
string algo = hashAlgo;
if (string(algo, 0, 2) == "r:") {
recursive = true;
recursive = FileIngestionMethod::Recursive;
algo = string(algo, 2);
}

View file

@ -22,7 +22,7 @@ struct DerivationOutput
, hashAlgo(std::move(hashAlgo))
, hash(std::move(hash))
{ }
void parseHashInfo(bool & recursive, Hash & hash) const;
void parseHashInfo(FileIngestionMethod & recursive, Hash & hash) const;
};
typedef std::map<string, DerivationOutput> DerivationOutputs;

View file

@ -1,3 +1,4 @@
#include "serialise.hh"
#include "store-api.hh"
#include "archive.hh"
#include "worker-protocol.hh"
@ -100,7 +101,9 @@ StorePaths Store::importPaths(Source & source, std::shared_ptr<FSAccessor> acces
if (readInt(source) == 1)
readString(source);
addToStore(info, tee.source.data, NoRepair, checkSigs, accessor);
// Can't use underlying source, which would have been exhausted
auto source = StringSource { *tee.source.data };
addToStore(info, source, NoRepair, checkSigs, accessor);
res.push_back(info.path.clone());
}

View file

@ -599,12 +599,11 @@ struct curlFileTransfer : public FileTransfer
workerThreadMain();
} catch (nix::Interrupted & e) {
} catch (std::exception & e) {
logError(
ErrorInfo {
.name = "File transfer",
.hint = hintfmt("unexpected error in download thread: %s",
e.what())
});
logError({
.name = "File transfer",
.hint = hintfmt("unexpected error in download thread: %s",
e.what())
});
}
{

View file

@ -195,7 +195,7 @@ struct LegacySSHStore : public Store
{ unsupported("queryPathFromHashPart"); }
StorePath addToStore(const string & name, const Path & srcPath,
bool recursive, HashType hashAlgo,
FileIngestionMethod method, HashType hashAlgo,
PathFilter & filter, RepairFlag repair) override
{ unsupported("addToStore"); }

View file

@ -87,12 +87,11 @@ LocalStore::LocalStore(const Params & params)
struct group * gr = getgrnam(settings.buildUsersGroup.get().c_str());
if (!gr)
logError(
ErrorInfo {
.name = "'build-users-group' not found",
.hint = hintfmt(
"warning: the group '%1%' specified in 'build-users-group' does not exist",
settings.buildUsersGroup)
logError({
.name = "'build-users-group' not found",
.hint = hintfmt(
"warning: the group '%1%' specified in 'build-users-group' does not exist",
settings.buildUsersGroup)
});
else {
struct stat st;
@ -562,10 +561,10 @@ void LocalStore::checkDerivationOutputs(const StorePath & drvPath, const Derivat
if (out == drv.outputs.end())
throw Error("derivation '%s' does not have an output named 'out'", printStorePath(drvPath));
bool recursive; Hash h;
out->second.parseHashInfo(recursive, h);
FileIngestionMethod method; Hash h;
out->second.parseHashInfo(method, h);
check(makeFixedOutputPath(recursive, h, drvName), out->second.path, "out");
check(makeFixedOutputPath(method, h, drvName), out->second.path, "out");
}
else {
@ -1048,11 +1047,11 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source,
StorePath LocalStore::addToStoreFromDump(const string & dump, const string & name,
bool recursive, HashType hashAlgo, RepairFlag repair)
FileIngestionMethod method, HashType hashAlgo, RepairFlag repair)
{
Hash h = hashString(hashAlgo, dump);
auto dstPath = makeFixedOutputPath(recursive, h, name);
auto dstPath = makeFixedOutputPath(method, h, name);
addTempRoot(dstPath);
@ -1072,7 +1071,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
autoGC();
if (recursive) {
if (method == FileIngestionMethod::Recursive) {
StringSource source(dump);
restorePath(realPath, source);
} else
@ -1085,7 +1084,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
above (if called with recursive == true and hashAlgo ==
sha256); otherwise, compute it here. */
HashResult hash;
if (recursive) {
if (method == FileIngestionMethod::Recursive) {
hash.first = hashAlgo == htSHA256 ? h : hashString(htSHA256, dump);
hash.second = dump.size();
} else
@ -1096,7 +1095,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
ValidPathInfo info(dstPath.clone());
info.narHash = hash.first;
info.narSize = hash.second;
info.ca = makeFixedOutputCA(recursive, h);
info.ca = makeFixedOutputCA(method, h);
registerValidPath(info);
}
@ -1108,7 +1107,7 @@ StorePath LocalStore::addToStoreFromDump(const string & dump, const string & nam
StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
bool recursive, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
{
Path srcPath(absPath(_srcPath));
@ -1116,12 +1115,12 @@ StorePath LocalStore::addToStore(const string & name, const Path & _srcPath,
method for very large paths, but `copyPath' is mainly used for
small files. */
StringSink sink;
if (recursive)
if (method == FileIngestionMethod::Recursive)
dumpPath(srcPath, sink, filter);
else
sink.s = make_ref<std::string>(readFile(srcPath));
return addToStoreFromDump(*sink.s, name, recursive, hashAlgo, repair);
return addToStoreFromDump(*sink.s, name, method, hashAlgo, repair);
}
@ -1242,12 +1241,11 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
Path linkPath = linksDir + "/" + link.name;
string hash = hashPath(htSHA256, linkPath).first.to_string(Base32, false);
if (hash != link.name) {
logError(
ErrorInfo {
.name = "Invalid hash",
.hint = hintfmt(
"link '%s' was modified! expected hash '%s', got '%s'",
linkPath, link.name, hash)
logError({
.name = "Invalid hash",
.hint = hintfmt(
"link '%s' was modified! expected hash '%s', got '%s'",
linkPath, link.name, hash)
});
if (repair) {
if (unlink(linkPath.c_str()) == 0)
@ -1281,11 +1279,10 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair)
auto current = hashSink->finish();
if (info->narHash != nullHash && info->narHash != current.first) {
logError(
ErrorInfo {
.name = "Invalid hash - path modified",
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
printStorePath(i), info->narHash.to_string(), current.first.to_string())
logError({
.name = "Invalid hash - path modified",
.hint = hintfmt("path '%s' was modified! expected hash '%s', got '%s'",
printStorePath(i), info->narHash.to_string(), current.first.to_string())
});
if (repair) repairPath(i); else errors = true;
} else {
@ -1337,10 +1334,9 @@ void LocalStore::verifyPath(const Path & pathS, const StringSet & store,
if (!done.insert(pathS).second) return;
if (!isStorePath(pathS)) {
logError(
ErrorInfo {
.name = "Nix path not found",
.hint = hintfmt("path '%s' is not in the Nix store", pathS)
logError({
.name = "Nix path not found",
.hint = hintfmt("path '%s' is not in the Nix store", pathS)
});
return;
}
@ -1364,10 +1360,9 @@ void LocalStore::verifyPath(const Path & pathS, const StringSet & store,
auto state(_state.lock());
invalidatePath(*state, path);
} else {
logError(
ErrorInfo {
.name = "Missing path with referrers",
.hint = hintfmt("path '%s' disappeared, but it still has valid referrers!", pathS)
logError({
.name = "Missing path with referrers",
.hint = hintfmt("path '%s' disappeared, but it still has valid referrers!", pathS)
});
if (repair)
try {

View file

@ -149,7 +149,7 @@ public:
std::shared_ptr<FSAccessor> accessor) override;
StorePath addToStore(const string & name, const Path & srcPath,
bool recursive, HashType hashAlgo,
FileIngestionMethod method, HashType hashAlgo,
PathFilter & filter, RepairFlag repair) override;
/* Like addToStore(), but the contents of the path are contained
@ -157,7 +157,7 @@ public:
true) or simply the contents of a regular file (if recursive ==
false). */
StorePath addToStoreFromDump(const string & dump, const string & name,
bool recursive = true, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override;
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair) override;
StorePath addTextToStore(const string & name, const string & s,
const StorePathSet & references, RepairFlag repair) override;

View file

@ -130,10 +130,9 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
NixOS (example: $fontconfig/var/cache being modified). Skip
those files. FIXME: check the modification time. */
if (S_ISREG(st.st_mode) && (st.st_mode & S_IWUSR)) {
logWarning(
ErrorInfo {
.name = "Suspicious file",
.hint = hintfmt("skipping suspicious writable file '%1%'", path)
logWarning({
.name = "Suspicious file",
.hint = hintfmt("skipping suspicious writable file '%1%'", path)
});
return;
}
@ -198,10 +197,9 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
}
if (st.st_size != stLink.st_size) {
logWarning(
ErrorInfo {
.name = "Corrupted link",
.hint = hintfmt("removing corrupted link '%1%'", linkPath)
logWarning({
.name = "Corrupted link",
.hint = hintfmt("removing corrupted link '%1%'", linkPath)
});
unlink(linkPath.c_str());
goto retry;
@ -237,10 +235,9 @@ void LocalStore::optimisePath_(Activity * act, OptimiseStats & stats,
/* Atomically replace the old file with the new hard link. */
if (rename(tempLink.c_str(), path.c_str()) == -1) {
if (unlink(tempLink.c_str()) == -1)
logError(
ErrorInfo {
.name = "Unlink error",
.hint = hintfmt("unable to unlink '%1%'", tempLink)
logError({
.name = "Unlink error",
.hint = hintfmt("unable to unlink '%1%'", tempLink)
});
if (errno == EMLINK) {
/* Some filesystems generate too many links on the rename,

View file

@ -73,6 +73,11 @@ const size_t storePathHashLen = 32; // i.e. 160 bits
/* Extension of derivations in the Nix store. */
const std::string drvExtension = ".drv";
enum struct FileIngestionMethod : uint8_t {
Flat = false,
Recursive = true
};
struct StorePathWithOutputs
{
StorePath path;

View file

@ -484,7 +484,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
StorePath RemoteStore::addToStore(const string & name, const Path & _srcPath,
bool recursive, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
FileIngestionMethod method, HashType hashAlgo, PathFilter & filter, RepairFlag repair)
{
if (repair) throw Error("repairing is not supported when building through the Nix daemon");
@ -492,10 +492,12 @@ StorePath RemoteStore::addToStore(const string & name, const Path & _srcPath,
Path srcPath(absPath(_srcPath));
conn->to << wopAddToStore << name
<< ((hashAlgo == htSHA256 && recursive) ? 0 : 1) /* backwards compatibility hack */
<< (recursive ? 1 : 0)
<< printHashType(hashAlgo);
conn->to
<< wopAddToStore
<< name
<< ((hashAlgo == htSHA256 && method == FileIngestionMethod::Recursive) ? 0 : 1) /* backwards compatibility hack */
<< (method == FileIngestionMethod::Recursive ? 1 : 0)
<< printHashType(hashAlgo);
try {
conn->to.written = 0;

View file

@ -65,7 +65,7 @@ public:
std::shared_ptr<FSAccessor> accessor) override;
StorePath addToStore(const string & name, const Path & srcPath,
bool recursive = true, HashType hashAlgo = htSHA256,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) override;
StorePath addTextToStore(const string & name, const string & s,

View file

@ -172,19 +172,22 @@ static std::string makeType(
StorePath Store::makeFixedOutputPath(
bool recursive,
FileIngestionMethod recursive,
const Hash & hash,
std::string_view name,
const StorePathSet & references,
bool hasSelfReference) const
{
if (hash.type == htSHA256 && recursive) {
if (hash.type == htSHA256 && recursive == FileIngestionMethod::Recursive) {
return makeStorePath(makeType(*this, "source", references, hasSelfReference), hash, name);
} else {
assert(references.empty());
return makeStorePath("output:out", hashString(htSHA256,
"fixed:out:" + (recursive ? (string) "r:" : "") +
hash.to_string(Base16) + ":"), name);
return makeStorePath("output:out",
hashString(htSHA256,
"fixed:out:"
+ (recursive == FileIngestionMethod::Recursive ? (string) "r:" : "")
+ hash.to_string(Base16) + ":"),
name);
}
}
@ -201,10 +204,12 @@ StorePath Store::makeTextPath(std::string_view name, const Hash & hash,
std::pair<StorePath, Hash> Store::computeStorePathForPath(std::string_view name,
const Path & srcPath, bool recursive, HashType hashAlgo, PathFilter & filter) const
const Path & srcPath, FileIngestionMethod method, HashType hashAlgo, PathFilter & filter) const
{
Hash h = recursive ? hashPath(hashAlgo, srcPath, filter).first : hashFile(hashAlgo, srcPath);
return std::make_pair(makeFixedOutputPath(recursive, h, name), h);
Hash h = method == FileIngestionMethod::Recursive
? hashPath(hashAlgo, srcPath, filter).first
: hashFile(hashAlgo, srcPath);
return std::make_pair(makeFixedOutputPath(method, h, name), h);
}
@ -786,8 +791,8 @@ bool ValidPathInfo::isContentAddressed(const Store & store) const
}
else if (hasPrefix(ca, "fixed:")) {
bool recursive = ca.compare(6, 2, "r:") == 0;
Hash hash(std::string(ca, recursive ? 8 : 6));
FileIngestionMethod recursive { ca.compare(6, 2, "r:") == 0 };
Hash hash(std::string(ca, recursive == FileIngestionMethod::Recursive ? 8 : 6));
auto refs = cloneStorePathSet(references);
bool hasSelfReference = false;
if (refs.count(path)) {
@ -831,27 +836,14 @@ Strings ValidPathInfo::shortRefs() const
}
std::string makeFixedOutputCA(bool recursive, const Hash & hash)
std::string makeFixedOutputCA(FileIngestionMethod recursive, const Hash & hash)
{
return "fixed:" + (recursive ? (std::string) "r:" : "") + hash.to_string();
return "fixed:"
+ (recursive == FileIngestionMethod::Recursive ? (std::string) "r:" : "")
+ hash.to_string();
}
void Store::addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair, CheckSigsFlag checkSigs,
std::shared_ptr<FSAccessor> accessor)
{
addToStore(info, make_ref<std::string>(narSource.drain()), repair, checkSigs, accessor);
}
void Store::addToStore(const ValidPathInfo & info, const ref<std::string> & nar,
RepairFlag repair, CheckSigsFlag checkSigs,
std::shared_ptr<FSAccessor> accessor)
{
StringSource source(*nar);
addToStore(info, source, repair, checkSigs, accessor);
}
}

View file

@ -44,7 +44,6 @@ enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true };
enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true };
enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true };
/* Magic header of exportPath() output (obsolete). */
const uint32_t exportMagic = 0x4558494e;
@ -347,7 +346,7 @@ public:
StorePath makeOutputPath(const string & id,
const Hash & hash, std::string_view name) const;
StorePath makeFixedOutputPath(bool recursive,
StorePath makeFixedOutputPath(FileIngestionMethod method,
const Hash & hash, std::string_view name,
const StorePathSet & references = {},
bool hasSelfReference = false) const;
@ -359,7 +358,7 @@ public:
store path to which srcPath is to be copied. Returns the store
path and the cryptographic hash of the contents of srcPath. */
std::pair<StorePath, Hash> computeStorePathForPath(std::string_view name,
const Path & srcPath, bool recursive = true,
const Path & srcPath, FileIngestionMethod method = FileIngestionMethod::Recursive,
HashType hashAlgo = htSHA256, PathFilter & filter = defaultPathFilter) const;
/* Preparatory part of addTextToStore().
@ -451,24 +450,19 @@ public:
/* Import a path into the store. */
virtual void addToStore(const ValidPathInfo & info, Source & narSource,
RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs,
std::shared_ptr<FSAccessor> accessor = 0);
// FIXME: remove
virtual void addToStore(const ValidPathInfo & info, const ref<std::string> & nar,
RepairFlag repair = NoRepair, CheckSigsFlag checkSigs = CheckSigs,
std::shared_ptr<FSAccessor> accessor = 0);
std::shared_ptr<FSAccessor> accessor = 0) = 0;
/* Copy the contents of a path to the store and register the
validity the resulting path. The resulting path is returned.
The function object `filter' can be used to exclude files (see
libutil/archive.hh). */
virtual StorePath addToStore(const string & name, const Path & srcPath,
bool recursive = true, HashType hashAlgo = htSHA256,
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256,
PathFilter & filter = defaultPathFilter, RepairFlag repair = NoRepair) = 0;
// FIXME: remove?
virtual StorePath addToStoreFromDump(const string & dump, const string & name,
bool recursive = true, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
FileIngestionMethod method = FileIngestionMethod::Recursive, HashType hashAlgo = htSHA256, RepairFlag repair = NoRepair)
{
throw Error("addToStoreFromDump() is not supported by this store");
}
@ -851,7 +845,7 @@ std::optional<ValidPathInfo> decodeValidPathInfo(
/* Compute the content-addressability assertion (ValidPathInfo::ca)
for paths created by makeFixedOutputPath() / addToStore(). */
std::string makeFixedOutputCA(bool recursive, const Hash & hash);
std::string makeFixedOutputCA(FileIngestionMethod method, const Hash & hash);
/* Split URI into protocol+hierarchy part and its parameter set. */

View file

@ -65,60 +65,63 @@ void Config::getSettings(std::map<std::string, SettingInfo> & res, bool override
res.emplace(opt.first, SettingInfo{opt.second.setting->to_string(), opt.second.setting->description});
}
void AbstractConfig::applyConfig(const std::string & contents, const std::string & path) {
unsigned int pos = 0;
while (pos < contents.size()) {
string line;
while (pos < contents.size() && contents[pos] != '\n')
line += contents[pos++];
pos++;
string::size_type hash = line.find('#');
if (hash != string::npos)
line = string(line, 0, hash);
vector<string> tokens = tokenizeString<vector<string> >(line);
if (tokens.empty()) continue;
if (tokens.size() < 2)
throw UsageError("illegal configuration line '%1%' in '%2%'", line, path);
auto include = false;
auto ignoreMissing = false;
if (tokens[0] == "include")
include = true;
else if (tokens[0] == "!include") {
include = true;
ignoreMissing = true;
}
if (include) {
if (tokens.size() != 2)
throw UsageError("illegal configuration line '%1%' in '%2%'", line, path);
auto p = absPath(tokens[1], dirOf(path));
if (pathExists(p)) {
applyConfigFile(p);
} else if (!ignoreMissing) {
throw Error("file '%1%' included from '%2%' not found", p, path);
}
continue;
}
if (tokens[1] != "=")
throw UsageError("illegal configuration line '%1%' in '%2%'", line, path);
string name = tokens[0];
vector<string>::iterator i = tokens.begin();
advance(i, 2);
set(name, concatStringsSep(" ", Strings(i, tokens.end()))); // FIXME: slow
};
}
void AbstractConfig::applyConfigFile(const Path & path)
{
try {
string contents = readFile(path);
unsigned int pos = 0;
while (pos < contents.size()) {
string line;
while (pos < contents.size() && contents[pos] != '\n')
line += contents[pos++];
pos++;
string::size_type hash = line.find('#');
if (hash != string::npos)
line = string(line, 0, hash);
vector<string> tokens = tokenizeString<vector<string> >(line);
if (tokens.empty()) continue;
if (tokens.size() < 2)
throw UsageError("illegal configuration line '%1%' in '%2%'", line, path);
auto include = false;
auto ignoreMissing = false;
if (tokens[0] == "include")
include = true;
else if (tokens[0] == "!include") {
include = true;
ignoreMissing = true;
}
if (include) {
if (tokens.size() != 2)
throw UsageError("illegal configuration line '%1%' in '%2%'", line, path);
auto p = absPath(tokens[1], dirOf(path));
if (pathExists(p)) {
applyConfigFile(p);
} else if (!ignoreMissing) {
throw Error("file '%1%' included from '%2%' not found", p, path);
}
continue;
}
if (tokens[1] != "=")
throw UsageError("illegal configuration line '%1%' in '%2%'", line, path);
string name = tokens[0];
vector<string>::iterator i = tokens.begin();
advance(i, 2);
set(name, concatStringsSep(" ", Strings(i, tokens.end()))); // FIXME: slow
};
applyConfig(contents, path);
} catch (SysError &) { }
}

View file

@ -7,6 +7,38 @@
namespace nix {
/**
* The Config class provides Nix runtime configurations.
*
* What is a Configuration?
* A collection of uniquely named Settings.
*
* What is a Setting?
* Each property that you can set in a configuration corresponds to a
* `Setting`. A setting records value and description of a property
* with a default and optional aliases.
*
* A valid configuration consists of settings that are registered to a
* `Config` object instance:
*
* Config config;
* Setting<std::string> systemSetting{&config, "x86_64-linux", "system", "the current system"};
*
* The above creates a `Config` object and registers a setting called "system"
* via the variable `systemSetting` with it. The setting defaults to the string
* "x86_64-linux", it's description is "the current system". All of the
* registered settings can then be accessed as shown below:
*
* std::map<std::string, Config::SettingInfo> settings;
* config.getSettings(settings);
* config["system"].description == "the current system"
* config["system"].value == "x86_64-linux"
*
*
* The above retrieves all currently known settings from the `Config` object
* and adds them to the `settings` map.
*/
class Args;
class AbstractSetting;
class JSONPlaceholder;
@ -23,6 +55,10 @@ protected:
public:
/**
* Sets the value referenced by `name` to `value`. Returns true if the
* setting is known, false otherwise.
*/
virtual bool set(const std::string & name, const std::string & value) = 0;
struct SettingInfo
@ -31,18 +67,52 @@ public:
std::string description;
};
/**
* Adds the currently known settings to the given result map `res`.
* - res: map to store settings in
* - overridenOnly: when set to true only overridden settings will be added to `res`
*/
virtual void getSettings(std::map<std::string, SettingInfo> & res, bool overridenOnly = false) = 0;
/**
* Parses the configuration in `contents` and applies it
* - contents: configuration contents to be parsed and applied
* - path: location of the configuration file
*/
void applyConfig(const std::string & contents, const std::string & path = "<unknown>");
/**
* Applies a nix configuration file
* - path: the location of the config file to apply
*/
void applyConfigFile(const Path & path);
/**
* Resets the `overridden` flag of all Settings
*/
virtual void resetOverriden() = 0;
/**
* Outputs all settings to JSON
* - out: JSONObject to write the configuration to
*/
virtual void toJSON(JSONObject & out) = 0;
/**
* Converts settings to `Args` to be used on the command line interface
* - args: args to write to
* - category: category of the settings
*/
virtual void convertToArgs(Args & args, const std::string & category) = 0;
/**
* Logs a warning for each unregistered setting
*/
void warnUnknownSettings();
/**
* Re-applies all previously attempted changes to unknown settings
*/
void reapplyUnknownSettings();
};

View file

@ -10,13 +10,16 @@ namespace nix {
const std::string nativeSystem = SYSTEM;
// addPrefix is used for show-trace. Strings added with addPrefix
// will print ahead of the error itself.
BaseError & BaseError::addPrefix(const FormatOrString & fs)
{
prefix_ = fs.s + prefix_;
return *this;
}
// c++ std::exception descendants must have a 'const char* what()' function.
// This stringifies the error and caches it for use by what(), or similarly by msg().
const string& BaseError::calcWhat() const
{
if (what_.has_value())
@ -124,25 +127,25 @@ void getCodeLines(NixCode &nixCode)
}
}
// if nixCode contains lines of code, print them to the ostream, indicating the error column.
void printCodeLines(std::ostream &out, const string &prefix, const NixCode &nixCode)
{
// previous line of code.
if (nixCode.prevLineOfCode.has_value()) {
out << fmt("%1% %|2$5d|| %3%",
prefix,
(nixCode.errPos.line - 1),
*nixCode.prevLineOfCode)
<< std::endl;
out << std::endl
<< fmt("%1% %|2$5d|| %3%",
prefix,
(nixCode.errPos.line - 1),
*nixCode.prevLineOfCode);
}
if (nixCode.errLineOfCode.has_value()) {
// line of code containing the error.
out << fmt("%1% %|2$5d|| %3%",
prefix,
(nixCode.errPos.line),
*nixCode.errLineOfCode)
<< std::endl;
out << std::endl
<< fmt("%1% %|2$5d|| %3%",
prefix,
(nixCode.errPos.line),
*nixCode.errLineOfCode);
// error arrows for the column range.
if (nixCode.errPos.column > 0) {
int start = nixCode.errPos.column;
@ -153,20 +156,21 @@ void printCodeLines(std::ostream &out, const string &prefix, const NixCode &nixC
std::string arrows("^");
out << fmt("%1% |%2%" ANSI_RED "%3%" ANSI_NORMAL,
prefix,
spaces,
arrows) << std::endl;
out << std::endl
<< fmt("%1% |%2%" ANSI_RED "%3%" ANSI_NORMAL,
prefix,
spaces,
arrows);
}
}
// next line of code.
if (nixCode.nextLineOfCode.has_value()) {
out << fmt("%1% %|2$5d|| %3%",
prefix,
(nixCode.errPos.line + 1),
*nixCode.nextLineOfCode)
<< std::endl;
out << std::endl
<< fmt("%1% %|2$5d|| %3%",
prefix,
(nixCode.errPos.line + 1),
*nixCode.nextLineOfCode);
}
}
@ -239,17 +243,15 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
levelString,
einfo.name,
dashes,
einfo.programName.value_or(""))
<< std::endl;
einfo.programName.value_or(""));
else
out << fmt("%1%%2%" ANSI_BLUE " -----%3% %4%" ANSI_NORMAL,
prefix,
levelString,
dashes,
einfo.programName.value_or(""))
<< std::endl;
einfo.programName.value_or(""));
// filename, line, column.
bool nl = false; // intersperse newline between sections.
if (einfo.nixCode.has_value()) {
switch (einfo.nixCode->errPos.origin) {
case foFile: {
@ -273,30 +275,36 @@ std::ostream& operator<<(std::ostream &out, const ErrorInfo &einfo)
default:
throw Error("invalid FileOrigin in errPos");
}
nl = true;
}
// description
if (einfo.description != "") {
out << prefix << einfo.description << std::endl;
out << prefix << std::endl;
if (nl)
out << std::endl << prefix;
out << std::endl << prefix << einfo.description;
nl = true;
}
if (einfo.nixCode.has_value()) {
NixCode nixcode = *einfo.nixCode;
getCodeLines(nixcode);
// lines of code.
if (nixcode.errLineOfCode.has_value()) {
if (nl)
out << std::endl << prefix;
printCodeLines(out, prefix, nixcode);
out << prefix << std::endl;
nl = true;
}
}
// hint
if (einfo.hint.has_value()) {
out << prefix << *einfo.hint << std::endl;
out << prefix << std::endl;
if (nl)
out << std::endl << prefix;
out << std::endl << prefix << *einfo.hint;
nl = true;
}
return out;

View file

@ -22,6 +22,23 @@
namespace nix {
/*
This file defines two main structs/classes used in nix error handling.
ErrorInfo provides a standard payload of error information, with conversion to string
happening in the logger rather than at the call site.
BaseError is the ancestor of nix specific exceptions (and Interrupted), and contains
an ErrorInfo.
ErrorInfo structs are sent to the logger as part of an exception, or directly with the
logError or logWarning macros.
See the error-demo.cc program for usage examples.
*/
typedef enum {
lvlError = 0,
lvlWarn,
@ -38,7 +55,7 @@ typedef enum {
foString
} FileOrigin;
// ErrPos indicates the location of an error in a nix file.
struct ErrPos {
int line = 0;
int column = 0;
@ -50,6 +67,7 @@ struct ErrPos {
return line != 0;
}
// convert from the Pos struct, found in libexpr.
template <class P>
ErrPos& operator=(const P &pos)
{
@ -74,8 +92,6 @@ struct NixCode {
std::optional<string> nextLineOfCode;
};
// -------------------------------------------------
// ErrorInfo.
struct ErrorInfo {
Verbosity level;
string name;
@ -164,7 +180,7 @@ public:
{
errNo = errno;
auto hf = hintfmt(args...);
err.hint = hintfmt("%1% : %2%", normaltxt(hf.str()), strerror(errNo));
err.hint = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo));
}
virtual const char* sname() const override { return "SysError"; }

View file

@ -251,10 +251,9 @@ bool handleJSONLogMessage(const std::string & msg,
}
} catch (std::exception & e) {
logError(
ErrorInfo {
.name = "Json log message",
.hint = hintfmt("bad log message from builder: %s", e.what())
logError({
.name = "Json log message",
.hint = hintfmt("bad log message from builder: %s", e.what())
});
}

View file

@ -150,9 +150,23 @@ bool handleJSONLogMessage(const std::string & msg,
extern Verbosity verbosity; /* suppress msgs > this */
/* Print a message if the current log level is at least the specified
level. Note that this has to be implemented as a macro to ensure
that the arguments are evaluated lazily. */
/* Print a message with the standard ErrorInfo format.
In general, use these 'log' macros for reporting problems that may require user
intervention or that need more explanation. Use the 'print' macros for more
lightweight status messages. */
#define logErrorInfo(level, errorInfo...) \
do { \
if (level <= nix::verbosity) { \
logger->logEI(level, errorInfo); \
} \
} while (0)
#define logError(errorInfo...) logErrorInfo(lvlError, errorInfo)
#define logWarning(errorInfo...) logErrorInfo(lvlWarn, errorInfo)
/* Print a string message if the current log level is at least the specified
level. Note that this has to be implemented as a macro to ensure that the
arguments are evaluated lazily. */
#define printMsg(level, args...) \
do { \
if (level <= nix::verbosity) { \
@ -166,18 +180,7 @@ extern Verbosity verbosity; /* suppress msgs > this */
#define debug(args...) printMsg(lvlDebug, args)
#define vomit(args...) printMsg(lvlVomit, args)
#define logErrorInfo(level, errorInfo...) \
do { \
if (level <= nix::verbosity) { \
logger->logEI(level, errorInfo); \
} \
} while (0)
#define logError(errorInfo...) logErrorInfo(lvlError, errorInfo)
#define logWarning(errorInfo...) logErrorInfo(lvlWarn, errorInfo)
/* if verbosity >= lvlWarn, print a message with a yellow 'warning:' prefix. */
template<typename... Args>
inline void warn(const std::string & fs, const Args & ... args)
{

View file

@ -1,5 +1,6 @@
#pragma once
#include <cassert>
#include <map>
#include <list>
#include <optional>

264
src/libutil/tests/config.cc Normal file
View file

@ -0,0 +1,264 @@
#include "json.hh"
#include "config.hh"
#include "args.hh"
#include <sstream>
#include <gtest/gtest.h>
namespace nix {
/* ----------------------------------------------------------------------------
* Config
* --------------------------------------------------------------------------*/
TEST(Config, setUndefinedSetting) {
Config config;
ASSERT_EQ(config.set("undefined-key", "value"), false);
}
TEST(Config, setDefinedSetting) {
Config config;
std::string value;
Setting<std::string> foo{&config, value, "name-of-the-setting", "description"};
ASSERT_EQ(config.set("name-of-the-setting", "value"), true);
}
TEST(Config, getDefinedSetting) {
Config config;
std::string value;
std::map<std::string, Config::SettingInfo> settings;
Setting<std::string> foo{&config, value, "name-of-the-setting", "description"};
config.getSettings(settings, /* overridenOnly = */ false);
const auto iter = settings.find("name-of-the-setting");
ASSERT_NE(iter, settings.end());
ASSERT_EQ(iter->second.value, "");
ASSERT_EQ(iter->second.description, "description");
}
TEST(Config, getDefinedOverridenSettingNotSet) {
Config config;
std::string value;
std::map<std::string, Config::SettingInfo> settings;
Setting<std::string> foo{&config, value, "name-of-the-setting", "description"};
config.getSettings(settings, /* overridenOnly = */ true);
const auto e = settings.find("name-of-the-setting");
ASSERT_EQ(e, settings.end());
}
TEST(Config, getDefinedSettingSet1) {
Config config;
std::string value;
std::map<std::string, Config::SettingInfo> settings;
Setting<std::string> setting{&config, value, "name-of-the-setting", "description"};
setting.assign("value");
config.getSettings(settings, /* overridenOnly = */ false);
const auto iter = settings.find("name-of-the-setting");
ASSERT_NE(iter, settings.end());
ASSERT_EQ(iter->second.value, "value");
ASSERT_EQ(iter->second.description, "description");
}
TEST(Config, getDefinedSettingSet2) {
Config config;
std::map<std::string, Config::SettingInfo> settings;
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
ASSERT_TRUE(config.set("name-of-the-setting", "value"));
config.getSettings(settings, /* overridenOnly = */ false);
const auto e = settings.find("name-of-the-setting");
ASSERT_NE(e, settings.end());
ASSERT_EQ(e->second.value, "value");
ASSERT_EQ(e->second.description, "description");
}
TEST(Config, addSetting) {
class TestSetting : public AbstractSetting {
public:
TestSetting() : AbstractSetting("test", "test", {}) {}
void set(const std::string & value) {}
std::string to_string() const { return {}; }
};
Config config;
TestSetting setting;
ASSERT_FALSE(config.set("test", "value"));
config.addSetting(&setting);
ASSERT_TRUE(config.set("test", "value"));
}
TEST(Config, withInitialValue) {
const StringMap initials = {
{ "key", "value" },
};
Config config(initials);
{
std::map<std::string, Config::SettingInfo> settings;
config.getSettings(settings, /* overridenOnly = */ false);
ASSERT_EQ(settings.find("key"), settings.end());
}
Setting<std::string> setting{&config, "default-value", "key", "description"};
{
std::map<std::string, Config::SettingInfo> settings;
config.getSettings(settings, /* overridenOnly = */ false);
ASSERT_EQ(settings["key"].value, "value");
}
}
TEST(Config, resetOverriden) {
Config config;
config.resetOverriden();
}
TEST(Config, resetOverridenWithSetting) {
Config config;
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
{
std::map<std::string, Config::SettingInfo> settings;
setting.set("foo");
ASSERT_EQ(setting.get(), "foo");
config.getSettings(settings, /* overridenOnly = */ true);
ASSERT_TRUE(settings.empty());
}
{
std::map<std::string, Config::SettingInfo> settings;
setting.override("bar");
ASSERT_TRUE(setting.overriden);
ASSERT_EQ(setting.get(), "bar");
config.getSettings(settings, /* overridenOnly = */ true);
ASSERT_FALSE(settings.empty());
}
{
std::map<std::string, Config::SettingInfo> settings;
config.resetOverriden();
ASSERT_FALSE(setting.overriden);
config.getSettings(settings, /* overridenOnly = */ true);
ASSERT_TRUE(settings.empty());
}
}
TEST(Config, toJSONOnEmptyConfig) {
std::stringstream out;
{ // Scoped to force the destructor of JSONObject to write the final `}`
JSONObject obj(out);
Config config;
config.toJSON(obj);
}
ASSERT_EQ(out.str(), "{}");
}
TEST(Config, toJSONOnNonEmptyConfig) {
std::stringstream out;
{ // Scoped to force the destructor of JSONObject to write the final `}`
JSONObject obj(out);
Config config;
std::map<std::string, Config::SettingInfo> settings;
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
setting.assign("value");
config.toJSON(obj);
}
ASSERT_EQ(out.str(), R"#({"name-of-the-setting":{"description":"description","value":"value"}})#");
}
TEST(Config, setSettingAlias) {
Config config;
Setting<std::string> setting{&config, "", "some-int", "best number", { "another-int" }};
ASSERT_TRUE(config.set("some-int", "1"));
ASSERT_EQ(setting.get(), "1");
ASSERT_TRUE(config.set("another-int", "2"));
ASSERT_EQ(setting.get(), "2");
ASSERT_TRUE(config.set("some-int", "3"));
ASSERT_EQ(setting.get(), "3");
}
/* FIXME: The reapplyUnknownSettings method doesn't seem to do anything
* useful (these days). Whenever we add a new setting to Config the
* unknown settings are always considered. In which case is this function
* actually useful? Is there some way to register a Setting without calling
* addSetting? */
TEST(Config, DISABLED_reapplyUnknownSettings) {
Config config;
ASSERT_FALSE(config.set("name-of-the-setting", "unknownvalue"));
Setting<std::string> setting{&config, "default", "name-of-the-setting", "description"};
ASSERT_EQ(setting.get(), "default");
config.reapplyUnknownSettings();
ASSERT_EQ(setting.get(), "unknownvalue");
}
TEST(Config, applyConfigEmpty) {
Config config;
std::map<std::string, Config::SettingInfo> settings;
config.applyConfig("");
config.getSettings(settings);
ASSERT_TRUE(settings.empty());
}
TEST(Config, applyConfigEmptyWithComment) {
Config config;
std::map<std::string, Config::SettingInfo> settings;
config.applyConfig("# just a comment");
config.getSettings(settings);
ASSERT_TRUE(settings.empty());
}
TEST(Config, applyConfigAssignment) {
Config config;
std::map<std::string, Config::SettingInfo> settings;
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
config.applyConfig(
"name-of-the-setting = value-from-file #useful comment\n"
"# name-of-the-setting = foo\n"
);
config.getSettings(settings);
ASSERT_FALSE(settings.empty());
ASSERT_EQ(settings["name-of-the-setting"].value, "value-from-file");
}
TEST(Config, applyConfigWithReassignedSetting) {
Config config;
std::map<std::string, Config::SettingInfo> settings;
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
config.applyConfig(
"name-of-the-setting = first-value\n"
"name-of-the-setting = second-value\n"
);
config.getSettings(settings);
ASSERT_FALSE(settings.empty());
ASSERT_EQ(settings["name-of-the-setting"].value, "second-value");
}
TEST(Config, applyConfigFailsOnMissingIncludes) {
Config config;
std::map<std::string, Config::SettingInfo> settings;
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
ASSERT_THROW(config.applyConfig(
"name-of-the-setting = value-from-file\n"
"# name-of-the-setting = foo\n"
"include /nix/store/does/not/exist.nix"
), Error);
}
TEST(Config, applyConfigInvalidThrows) {
Config config;
ASSERT_THROW(config.applyConfig("value == key"), UsageError);
ASSERT_THROW(config.applyConfig("value "), UsageError);
}
}

80
src/libutil/tests/hash.cc Normal file
View file

@ -0,0 +1,80 @@
#include "hash.hh"
#include <gtest/gtest.h>
namespace nix {
/* ----------------------------------------------------------------------------
* hashString
* --------------------------------------------------------------------------*/
TEST(hashString, testKnownMD5Hashes1) {
// values taken from: https://tools.ietf.org/html/rfc1321
auto s1 = "";
auto hash = hashString(HashType::htMD5, s1);
ASSERT_EQ(hash.to_string(Base::Base16), "md5:d41d8cd98f00b204e9800998ecf8427e");
}
TEST(hashString, testKnownMD5Hashes2) {
// values taken from: https://tools.ietf.org/html/rfc1321
auto s2 = "abc";
auto hash = hashString(HashType::htMD5, s2);
ASSERT_EQ(hash.to_string(Base::Base16), "md5:900150983cd24fb0d6963f7d28e17f72");
}
TEST(hashString, testKnownSHA1Hashes1) {
// values taken from: https://tools.ietf.org/html/rfc3174
auto s = "abc";
auto hash = hashString(HashType::htSHA1, s);
ASSERT_EQ(hash.to_string(Base::Base16),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d");
}
TEST(hashString, testKnownSHA1Hashes2) {
// values taken from: https://tools.ietf.org/html/rfc3174
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
auto hash = hashString(HashType::htSHA1, s);
ASSERT_EQ(hash.to_string(Base::Base16),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1");
}
TEST(hashString, testKnownSHA256Hashes1) {
// values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abc";
auto hash = hashString(HashType::htSHA256, s);
ASSERT_EQ(hash.to_string(Base::Base16),
"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad");
}
TEST(hashString, testKnownSHA256Hashes2) {
// values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
auto hash = hashString(HashType::htSHA256, s);
ASSERT_EQ(hash.to_string(Base::Base16),
"sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1");
}
TEST(hashString, testKnownSHA512Hashes1) {
// values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abc";
auto hash = hashString(HashType::htSHA512, s);
ASSERT_EQ(hash.to_string(Base::Base16),
"sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9"
"7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd"
"454d4423643ce80e2a9ac94fa54ca49f");
}
TEST(hashString, testKnownSHA512Hashes2) {
// values taken from: https://tools.ietf.org/html/rfc4634
auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu";
auto hash = hashString(HashType::htSHA512, s);
ASSERT_EQ(hash.to_string(Base::Base16),
"sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1"
"7299aeadb6889018501d289e4900f7e4331b99dec4b5433a"
"c7d329eeb6dd26545e96e55b874be909");
}
TEST(hashString, hashingWithUnknownAlgoExits) {
auto s = "unknown";
ASSERT_DEATH(hashString(HashType::htUnknown, s), "");
}
}

193
src/libutil/tests/json.cc Normal file
View file

@ -0,0 +1,193 @@
#include "json.hh"
#include <gtest/gtest.h>
#include <sstream>
namespace nix {
/* ----------------------------------------------------------------------------
* toJSON
* --------------------------------------------------------------------------*/
TEST(toJSON, quotesCharPtr) {
const char* input = "test";
std::stringstream out;
toJSON(out, input);
ASSERT_EQ(out.str(), "\"test\"");
}
TEST(toJSON, quotesStdString) {
std::string input = "test";
std::stringstream out;
toJSON(out, input);
ASSERT_EQ(out.str(), "\"test\"");
}
TEST(toJSON, convertsNullptrtoNull) {
auto input = nullptr;
std::stringstream out;
toJSON(out, input);
ASSERT_EQ(out.str(), "null");
}
TEST(toJSON, convertsNullToNull) {
const char* input = 0;
std::stringstream out;
toJSON(out, input);
ASSERT_EQ(out.str(), "null");
}
TEST(toJSON, convertsFloat) {
auto input = 1.024f;
std::stringstream out;
toJSON(out, input);
ASSERT_EQ(out.str(), "1.024");
}
TEST(toJSON, convertsDouble) {
const double input = 1.024;
std::stringstream out;
toJSON(out, input);
ASSERT_EQ(out.str(), "1.024");
}
TEST(toJSON, convertsBool) {
auto input = false;
std::stringstream out;
toJSON(out, input);
ASSERT_EQ(out.str(), "false");
}
TEST(toJSON, quotesTab) {
std::stringstream out;
toJSON(out, "\t");
ASSERT_EQ(out.str(), "\"\\t\"");
}
TEST(toJSON, quotesNewline) {
std::stringstream out;
toJSON(out, "\n");
ASSERT_EQ(out.str(), "\"\\n\"");
}
TEST(toJSON, quotesCreturn) {
std::stringstream out;
toJSON(out, "\r");
ASSERT_EQ(out.str(), "\"\\r\"");
}
TEST(toJSON, quotesCreturnNewLine) {
std::stringstream out;
toJSON(out, "\r\n");
ASSERT_EQ(out.str(), "\"\\r\\n\"");
}
TEST(toJSON, quotesDoublequotes) {
std::stringstream out;
toJSON(out, "\"");
ASSERT_EQ(out.str(), "\"\\\"\"");
}
TEST(toJSON, substringEscape) {
std::stringstream out;
const char *s = "foo\t";
toJSON(out, s+3, s + strlen(s));
ASSERT_EQ(out.str(), "\"\\t\"");
}
/* ----------------------------------------------------------------------------
* JSONObject
* --------------------------------------------------------------------------*/
TEST(JSONObject, emptyObject) {
std::stringstream out;
{
JSONObject t(out);
}
ASSERT_EQ(out.str(), "{}");
}
TEST(JSONObject, objectWithList) {
std::stringstream out;
{
JSONObject t(out);
auto l = t.list("list");
l.elem("element");
}
ASSERT_EQ(out.str(), R"#({"list":["element"]})#");
}
TEST(JSONObject, objectWithListIndent) {
std::stringstream out;
{
JSONObject t(out, true);
auto l = t.list("list");
l.elem("element");
}
ASSERT_EQ(out.str(),
R"#({
"list": [
"element"
]
})#");
}
TEST(JSONObject, objectWithPlaceholderAndList) {
std::stringstream out;
{
JSONObject t(out);
auto l = t.placeholder("list");
l.list().elem("element");
}
ASSERT_EQ(out.str(), R"#({"list":["element"]})#");
}
TEST(JSONObject, objectWithPlaceholderAndObject) {
std::stringstream out;
{
JSONObject t(out);
auto l = t.placeholder("object");
l.object().attr("key", "value");
}
ASSERT_EQ(out.str(), R"#({"object":{"key":"value"}})#");
}
/* ----------------------------------------------------------------------------
* JSONList
* --------------------------------------------------------------------------*/
TEST(JSONList, empty) {
std::stringstream out;
{
JSONList l(out);
}
ASSERT_EQ(out.str(), R"#([])#");
}
TEST(JSONList, withElements) {
std::stringstream out;
{
JSONList l(out);
l.elem("one");
l.object();
l.placeholder().write("three");
}
ASSERT_EQ(out.str(), R"#(["one",{},"three"])#");
}
}

View file

@ -8,7 +8,7 @@ libutil-tests_INSTALL_DIR :=
libutil-tests_SOURCES := $(wildcard $(d)/*.cc)
libutil-tests_CXXFLAGS += -I src/libutil
libutil-tests_CXXFLAGS += -I src/libutil -I src/libexpr
libutil-tests_LIBS = libutil

View file

@ -0,0 +1,255 @@
#include "logging.hh"
#include "nixexpr.hh"
#include "util.hh"
#include <gtest/gtest.h>
namespace nix {
/* ----------------------------------------------------------------------------
* logEI
* --------------------------------------------------------------------------*/
TEST(logEI, catpuresBasicProperties) {
MakeError(TestError, Error);
ErrorInfo::programName = std::optional("error-unit-test");
try {
throw TestError("an error for testing purposes");
} catch (Error &e) {
testing::internal::CaptureStderr();
logger->logEI(e.info());
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(),"\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError ------------------------------------ error-unit-test\x1B[0m\nan error for testing purposes\n");
}
}
TEST(logEI, appendingHintsToPreviousError) {
MakeError(TestError, Error);
ErrorInfo::programName = std::optional("error-unit-test");
try {
auto e = Error("initial error");
throw TestError(e.info());
} catch (Error &e) {
ErrorInfo ei = e.info();
ei.hint = hintfmt("%s; subsequent error message.", normaltxt(e.info().hint ? e.info().hint->str() : ""));
testing::internal::CaptureStderr();
logger->logEI(ei);
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError ------------------------------------ error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0minitial error\x1B[0m; subsequent error message.\n");
}
}
TEST(logEI, picksUpSysErrorExitCode) {
MakeError(TestError, Error);
ErrorInfo::programName = std::optional("error-unit-test");
try {
auto x = readFile(-1);
}
catch (SysError &e) {
testing::internal::CaptureStderr();
logError(e.info());
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError ------------------------------------- error-unit-test\x1B[0m\n\x1B[33;1m\x1B[0mstatting file\x1B[0m: \x1B[33;1mBad file descriptor\x1B[0m\n");
}
}
TEST(logEI, loggingErrorOnInfoLevel) {
testing::internal::CaptureStderr();
logger->logEI({ .level = lvlInfo,
.name = "Info name",
.description = "Info description",
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1minfo:\x1B[0m\x1B[34;1m --- Info name ------------------------------------- error-unit-test\x1B[0m\nInfo description\n");
}
TEST(logEI, loggingErrorOnTalkativeLevel) {
verbosity = lvlTalkative;
testing::internal::CaptureStderr();
logger->logEI({ .level = lvlTalkative,
.name = "Talkative name",
.description = "Talkative description",
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mtalk:\x1B[0m\x1B[34;1m --- Talkative name -------------------------------- error-unit-test\x1B[0m\nTalkative description\n");
}
TEST(logEI, loggingErrorOnChattyLevel) {
verbosity = lvlChatty;
testing::internal::CaptureStderr();
logger->logEI({ .level = lvlChatty,
.name = "Chatty name",
.description = "Talkative description",
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mchat:\x1B[0m\x1B[34;1m --- Chatty name ----------------------------------- error-unit-test\x1B[0m\nTalkative description\n");
}
TEST(logEI, loggingErrorOnDebugLevel) {
verbosity = lvlDebug;
testing::internal::CaptureStderr();
logger->logEI({ .level = lvlDebug,
.name = "Debug name",
.description = "Debug description",
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mdebug:\x1B[0m\x1B[34;1m --- Debug name ----------------------------------- error-unit-test\x1B[0m\nDebug description\n");
}
TEST(logEI, loggingErrorOnVomitLevel) {
verbosity = lvlVomit;
testing::internal::CaptureStderr();
logger->logEI({ .level = lvlVomit,
.name = "Vomit name",
.description = "Vomit description",
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[32;1mvomit:\x1B[0m\x1B[34;1m --- Vomit name ----------------------------------- error-unit-test\x1B[0m\nVomit description\n");
}
/* ----------------------------------------------------------------------------
* logError
* --------------------------------------------------------------------------*/
TEST(logError, logErrorWithoutHintOrCode) {
testing::internal::CaptureStderr();
logError({
.name = "name",
.description = "error description",
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- name ----------------------------------------- error-unit-test\x1B[0m\nerror description\n");
}
TEST(logError, logErrorWithPreviousAndNextLinesOfCode) {
SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix");
testing::internal::CaptureStderr();
logError({
.name = "error name",
.description = "error with code lines",
.hint = hintfmt("this hint has %1% templated %2%!!",
"yellow",
"values"),
.nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13),
.prevLineOfCode = "previous line of code",
.errLineOfCode = "this is the problem line of code",
.nextLineOfCode = "next line of code",
}});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name ----------------------------------- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nerror with code lines\n\n 39| previous line of code\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 41| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
}
TEST(logError, logErrorWithoutLinesOfCode) {
SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix");
testing::internal::CaptureStderr();
logError({
.name = "error name",
.description = "error without any code lines.",
.hint = hintfmt("this hint has %1% templated %2%!!",
"yellow",
"values"),
.nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13)
}});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name ----------------------------------- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nerror without any code lines.\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
}
TEST(logError, logErrorWithOnlyHintAndName) {
SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix");
testing::internal::CaptureStderr();
logError({
.name = "error name",
.hint = hintfmt("hint %1%", "only"),
.nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13)
}});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name ----------------------------------- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nhint \x1B[33;1monly\x1B[0m\n");
}
/* ----------------------------------------------------------------------------
* logWarning
* --------------------------------------------------------------------------*/
TEST(logWarning, logWarningWithNameDescriptionAndHint) {
testing::internal::CaptureStderr();
logWarning({
.name = "name",
.description = "error description",
.hint = hintfmt("there was a %1%", "warning"),
});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --------------------------------------- error-unit-test\x1B[0m\nerror description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n");
}
TEST(logWarning, logWarningWithFileLineNumAndCode) {
SymbolTable testTable;
auto problem_file = testTable.create("myfile.nix");
testing::internal::CaptureStderr();
logWarning({
.name = "warning name",
.description = "warning description",
.hint = hintfmt("this hint has %1% templated %2%!!",
"yellow",
"values"),
.nixCode = NixCode {
.errPos = Pos(problem_file, 40, 13),
.prevLineOfCode = std::nullopt,
.errLineOfCode = "this is the problem line of code",
.nextLineOfCode = std::nullopt
}});
auto str = testing::internal::GetCapturedStderr();
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name ------------------------------- error-unit-test\x1B[0m\nin file: \x1B[34;1mmyfile.nix (40:13)\x1B[0m\n\nwarning description\n\n 40| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
}
}

View file

@ -0,0 +1,130 @@
#include "lru-cache.hh"
#include <gtest/gtest.h>
namespace nix {
/* ----------------------------------------------------------------------------
* size
* --------------------------------------------------------------------------*/
TEST(LRUCache, sizeOfEmptyCacheIsZero) {
LRUCache<std::string, std::string> c(10);
ASSERT_EQ(c.size(), 0);
}
TEST(LRUCache, sizeOfSingleElementCacheIsOne) {
LRUCache<std::string, std::string> c(10);
c.upsert("foo", "bar");
ASSERT_EQ(c.size(), 1);
}
/* ----------------------------------------------------------------------------
* upsert / get
* --------------------------------------------------------------------------*/
TEST(LRUCache, getFromEmptyCache) {
LRUCache<std::string, std::string> c(10);
auto val = c.get("x");
ASSERT_EQ(val.has_value(), false);
}
TEST(LRUCache, getExistingValue) {
LRUCache<std::string, std::string> c(10);
c.upsert("foo", "bar");
auto val = c.get("foo");
ASSERT_EQ(val, "bar");
}
TEST(LRUCache, getNonExistingValueFromNonEmptyCache) {
LRUCache<std::string, std::string> c(10);
c.upsert("foo", "bar");
auto val = c.get("another");
ASSERT_EQ(val.has_value(), false);
}
TEST(LRUCache, upsertOnZeroCapacityCache) {
LRUCache<std::string, std::string> c(0);
c.upsert("foo", "bar");
auto val = c.get("foo");
ASSERT_EQ(val.has_value(), false);
}
TEST(LRUCache, updateExistingValue) {
LRUCache<std::string, std::string> c(1);
c.upsert("foo", "bar");
auto val = c.get("foo");
ASSERT_EQ(val.value_or("error"), "bar");
ASSERT_EQ(c.size(), 1);
c.upsert("foo", "changed");
val = c.get("foo");
ASSERT_EQ(val.value_or("error"), "changed");
ASSERT_EQ(c.size(), 1);
}
TEST(LRUCache, overwriteOldestWhenCapacityIsReached) {
LRUCache<std::string, std::string> c(3);
c.upsert("one", "eins");
c.upsert("two", "zwei");
c.upsert("three", "drei");
ASSERT_EQ(c.size(), 3);
ASSERT_EQ(c.get("one").value_or("error"), "eins");
// exceed capacity
c.upsert("another", "whatever");
ASSERT_EQ(c.size(), 3);
// Retrieving "one" makes it the most recent element thus
// two will be the oldest one and thus replaced.
ASSERT_EQ(c.get("two").has_value(), false);
ASSERT_EQ(c.get("another").value(), "whatever");
}
/* ----------------------------------------------------------------------------
* clear
* --------------------------------------------------------------------------*/
TEST(LRUCache, clearEmptyCache) {
LRUCache<std::string, std::string> c(10);
c.clear();
ASSERT_EQ(c.size(), 0);
}
TEST(LRUCache, clearNonEmptyCache) {
LRUCache<std::string, std::string> c(10);
c.upsert("one", "eins");
c.upsert("two", "zwei");
c.upsert("three", "drei");
ASSERT_EQ(c.size(), 3);
c.clear();
ASSERT_EQ(c.size(), 0);
}
/* ----------------------------------------------------------------------------
* erase
* --------------------------------------------------------------------------*/
TEST(LRUCache, eraseFromEmptyCache) {
LRUCache<std::string, std::string> c(10);
ASSERT_EQ(c.erase("foo"), false);
ASSERT_EQ(c.size(), 0);
}
TEST(LRUCache, eraseMissingFromNonEmptyCache) {
LRUCache<std::string, std::string> c(10);
c.upsert("one", "eins");
ASSERT_EQ(c.erase("foo"), false);
ASSERT_EQ(c.size(), 1);
ASSERT_EQ(c.get("one").value_or("error"), "eins");
}
TEST(LRUCache, eraseFromNonEmptyCache) {
LRUCache<std::string, std::string> c(10);
c.upsert("one", "eins");
ASSERT_EQ(c.erase("one"), true);
ASSERT_EQ(c.size(), 0);
ASSERT_EQ(c.get("one").value_or("empty"), "empty");
}
}

View file

@ -5,6 +5,8 @@
namespace nix {
/* ----------- tests for util.hh ------------------------------------------------*/
/* ----------------------------------------------------------------------------
* absPath
* --------------------------------------------------------------------------*/
@ -15,6 +17,9 @@ namespace nix {
ASSERT_EQ(p, "/");
}
TEST(absPath, turnsEmptyPathIntoCWD) {
char cwd[PATH_MAX+1];
auto p = absPath("");
@ -581,5 +586,4 @@ namespace nix {
ASSERT_EQ(filterANSIEscapes(s, true), "foo bar baz" );
}
}

266
src/libutil/tests/url.cc Normal file
View file

@ -0,0 +1,266 @@
#include "url.hh"
#include <gtest/gtest.h>
namespace nix {
/* ----------- tests for url.hh --------------------------------------------------*/
string print_map(std::map<string, string> m) {
std::map<string, string>::iterator it;
string s = "{ ";
for (it = m.begin(); it != m.end(); ++it) {
s += "{ ";
s += it->first;
s += " = ";
s += it->second;
s += " } ";
}
s += "}";
return s;
}
std::ostream& operator<<(std::ostream& os, const ParsedURL& p) {
return os << "\n"
<< "url: " << p.url << "\n"
<< "base: " << p.base << "\n"
<< "scheme: " << p.scheme << "\n"
<< "authority: " << p.authority.value() << "\n"
<< "path: " << p.path << "\n"
<< "query: " << print_map(p.query) << "\n"
<< "fragment: " << p.fragment << "\n";
}
TEST(parseURL, parsesSimpleHttpUrl) {
auto s = "http://www.example.org/file.tar.gz";
auto parsed = parseURL(s);
ParsedURL expected {
.url = "http://www.example.org/file.tar.gz",
.base = "http://www.example.org/file.tar.gz",
.scheme = "http",
.authority = "www.example.org",
.path = "/file.tar.gz",
.query = (StringMap) { },
.fragment = "",
};
ASSERT_EQ(parsed, expected);
}
TEST(parseURL, parsesSimpleHttpsUrl) {
auto s = "https://www.example.org/file.tar.gz";
auto parsed = parseURL(s);
ParsedURL expected {
.url = "https://www.example.org/file.tar.gz",
.base = "https://www.example.org/file.tar.gz",
.scheme = "https",
.authority = "www.example.org",
.path = "/file.tar.gz",
.query = (StringMap) { },
.fragment = "",
};
ASSERT_EQ(parsed, expected);
}
TEST(parseURL, parsesSimpleHttpUrlWithQueryAndFragment) {
auto s = "https://www.example.org/file.tar.gz?download=fast&when=now#hello";
auto parsed = parseURL(s);
ParsedURL expected {
.url = "https://www.example.org/file.tar.gz",
.base = "https://www.example.org/file.tar.gz",
.scheme = "https",
.authority = "www.example.org",
.path = "/file.tar.gz",
.query = (StringMap) { { "download", "fast" }, { "when", "now" } },
.fragment = "hello",
};
ASSERT_EQ(parsed, expected);
}
TEST(parseURL, parsesSimpleHttpUrlWithComplexFragment) {
auto s = "http://www.example.org/file.tar.gz?field=value#?foo=bar%23";
auto parsed = parseURL(s);
ParsedURL expected {
.url = "http://www.example.org/file.tar.gz",
.base = "http://www.example.org/file.tar.gz",
.scheme = "http",
.authority = "www.example.org",
.path = "/file.tar.gz",
.query = (StringMap) { { "field", "value" } },
.fragment = "?foo=bar#",
};
ASSERT_EQ(parsed, expected);
}
TEST(parseURL, parseIPv4Address) {
auto s = "http://127.0.0.1:8080/file.tar.gz?download=fast&when=now#hello";
auto parsed = parseURL(s);
ParsedURL expected {
.url = "http://127.0.0.1:8080/file.tar.gz",
.base = "https://127.0.0.1:8080/file.tar.gz",
.scheme = "http",
.authority = "127.0.0.1:8080",
.path = "/file.tar.gz",
.query = (StringMap) { { "download", "fast" }, { "when", "now" } },
.fragment = "hello",
};
ASSERT_EQ(parsed, expected);
}
TEST(parseURL, parseIPv6Address) {
auto s = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080";
auto parsed = parseURL(s);
ParsedURL expected {
.url = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080",
.base = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080",
.scheme = "http",
.authority = "[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080",
.path = "",
.query = (StringMap) { },
.fragment = "",
};
ASSERT_EQ(parsed, expected);
}
TEST(parseURL, parseEmptyQueryParams) {
auto s = "http://127.0.0.1:8080/file.tar.gz?&&&&&";
auto parsed = parseURL(s);
ASSERT_EQ(parsed.query, (StringMap) { });
}
TEST(parseURL, parseUserPassword) {
auto s = "http://user:pass@www.example.org:8080/file.tar.gz";
auto parsed = parseURL(s);
ParsedURL expected {
.url = "http://user:pass@www.example.org/file.tar.gz",
.base = "http://user:pass@www.example.org/file.tar.gz",
.scheme = "http",
.authority = "user:pass@www.example.org:8080",
.path = "/file.tar.gz",
.query = (StringMap) { },
.fragment = "",
};
ASSERT_EQ(parsed, expected);
}
TEST(parseURL, parseFileURLWithQueryAndFragment) {
auto s = "file:///none/of/your/business";
auto parsed = parseURL(s);
ParsedURL expected {
.url = "",
.base = "",
.scheme = "file",
.authority = "",
.path = "/none/of/your/business",
.query = (StringMap) { },
.fragment = "",
};
ASSERT_EQ(parsed, expected);
}
TEST(parseURL, parsedUrlsIsEqualToItself) {
auto s = "http://www.example.org/file.tar.gz";
auto url = parseURL(s);
ASSERT_TRUE(url == url);
}
TEST(parseURL, parseFTPUrl) {
auto s = "ftp://ftp.nixos.org/downloads/nixos.iso";
auto parsed = parseURL(s);
ParsedURL expected {
.url = "ftp://ftp.nixos.org/downloads/nixos.iso",
.base = "ftp://ftp.nixos.org/downloads/nixos.iso",
.scheme = "ftp",
.authority = "ftp.nixos.org",
.path = "/downloads/nixos.iso",
.query = (StringMap) { },
.fragment = "",
};
ASSERT_EQ(parsed, expected);
}
TEST(parseURL, parsesAnythingInUriFormat) {
auto s = "whatever://github.com/NixOS/nixpkgs.git";
auto parsed = parseURL(s);
}
TEST(parseURL, parsesAnythingInUriFormatWithoutDoubleSlash) {
auto s = "whatever:github.com/NixOS/nixpkgs.git";
auto parsed = parseURL(s);
}
TEST(parseURL, emptyStringIsInvalidURL) {
ASSERT_THROW(parseURL(""), Error);
}
/* ----------------------------------------------------------------------------
* decodeQuery
* --------------------------------------------------------------------------*/
TEST(decodeQuery, emptyStringYieldsEmptyMap) {
auto d = decodeQuery("");
ASSERT_EQ(d, (StringMap) { });
}
TEST(decodeQuery, simpleDecode) {
auto d = decodeQuery("yi=one&er=two");
ASSERT_EQ(d, ((StringMap) { { "yi", "one" }, { "er", "two" } }));
}
TEST(decodeQuery, decodeUrlEncodedArgs) {
auto d = decodeQuery("arg=%3D%3D%40%3D%3D");
ASSERT_EQ(d, ((StringMap) { { "arg", "==@==" } }));
}
TEST(decodeQuery, decodeArgWithEmptyValue) {
auto d = decodeQuery("arg=");
ASSERT_EQ(d, ((StringMap) { { "arg", ""} }));
}
/* ----------------------------------------------------------------------------
* percentDecode
* --------------------------------------------------------------------------*/
TEST(percentDecode, decodesUrlEncodedString) {
string s = "==@==";
string d = percentDecode("%3D%3D%40%3D%3D");
ASSERT_EQ(d, s);
}
TEST(percentDecode, multipleDecodesAreIdempotent) {
string once = percentDecode("%3D%3D%40%3D%3D");
string twice = percentDecode(once);
ASSERT_EQ(once, twice);
}
TEST(percentDecode, trailingPercent) {
string s = "==@==%";
string d = percentDecode("%3D%3D%40%3D%3D%25");
ASSERT_EQ(d, s);
}
}

View file

@ -0,0 +1,105 @@
#include "xml-writer.hh"
#include <gtest/gtest.h>
#include <sstream>
namespace nix {
/* ----------------------------------------------------------------------------
* XMLWriter
* --------------------------------------------------------------------------*/
TEST(XMLWriter, emptyObject) {
std::stringstream out;
{
XMLWriter t(false, out);
}
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n");
}
TEST(XMLWriter, objectWithEmptyElement) {
std::stringstream out;
{
XMLWriter t(false, out);
t.openElement("foobar");
}
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar></foobar>");
}
TEST(XMLWriter, objectWithElementWithAttrs) {
std::stringstream out;
{
XMLWriter t(false, out);
XMLAttrs attrs = {
{ "foo", "bar" }
};
t.openElement("foobar", attrs);
}
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar foo=\"bar\"></foobar>");
}
TEST(XMLWriter, objectWithElementWithEmptyAttrs) {
std::stringstream out;
{
XMLWriter t(false, out);
XMLAttrs attrs = {};
t.openElement("foobar", attrs);
}
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar></foobar>");
}
TEST(XMLWriter, objectWithElementWithAttrsEscaping) {
std::stringstream out;
{
XMLWriter t(false, out);
XMLAttrs attrs = {
{ "<key>", "<value>" }
};
t.openElement("foobar", attrs);
}
// XXX: While "<value>" is escaped, "<key>" isn't which I think is a bug.
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar <key>=\"&lt;value&gt;\"></foobar>");
}
TEST(XMLWriter, objectWithElementWithAttrsIndented) {
std::stringstream out;
{
XMLWriter t(true, out);
XMLAttrs attrs = {
{ "foo", "bar" }
};
t.openElement("foobar", attrs);
}
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar foo=\"bar\">\n</foobar>\n");
}
TEST(XMLWriter, writeEmptyElement) {
std::stringstream out;
{
XMLWriter t(false, out);
t.writeEmptyElement("foobar");
}
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar />");
}
TEST(XMLWriter, writeEmptyElementWithAttributes) {
std::stringstream out;
{
XMLWriter t(false, out);
XMLAttrs attrs = {
{ "foo", "bar" }
};
t.writeEmptyElement("foobar", attrs);
}
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar foo=\"bar\" />");
}
}

View file

@ -4,6 +4,7 @@
namespace nix {
std::regex refRegex(refRegexS, std::regex::ECMAScript);
std::regex badGitRefRegex(badGitRefRegexS, std::regex::ECMAScript);
std::regex revRegex(revRegexS, std::regex::ECMAScript);
std::regex flakeIdRegex(flakeIdRegexS, std::regex::ECMAScript);

View file

@ -49,6 +49,12 @@ const static std::string pathRegex = "(?:" + segmentRegex + "(?:/" + segmentRege
const static std::string refRegexS = "[a-zA-Z0-9][a-zA-Z0-9_.-]*"; // FIXME: check
extern std::regex refRegex;
// Instead of defining what a good Git Ref is, we define what a bad Git Ref is
// This is because of the definition of a ref in refs.c in https://github.com/git/git
// See tests/fetchGitRefs.sh for the full definition
const static std::string badGitRefRegexS = "//|^[./]|/\\.|\\.\\.|[[:cntrl:][:space:]:?^~\[]|\\\\|\\*|\\.lock$|\\.lock/|@\\{|[/.]$|^@$|^$";
extern std::regex badGitRefRegex;
// A Git revision (a SHA-1 commit hash).
const static std::string revRegexS = "[0-9a-fA-F]{40}";
extern std::regex revRegex;

View file

@ -123,10 +123,9 @@ static void getAllExprs(EvalState & state,
if (hasSuffix(attrName, ".nix"))
attrName = string(attrName, 0, attrName.size() - 4);
if (!attrs.insert(attrName).second) {
logError(
ErrorInfo {
.name = "Name collision",
.hint = hintfmt("warning: name collision in input Nix expressions, skipping '%1%'", path2)
logError({
.name = "Name collision",
.hint = hintfmt("warning: name collision in input Nix expressions, skipping '%1%'", path2)
});
continue;
}
@ -875,11 +874,10 @@ static void queryJSON(Globals & globals, vector<DrvInfo> & elems)
auto placeholder = metaObj.placeholder(j);
Value * v = i.queryMeta(j);
if (!v) {
logError(
ErrorInfo {
.name = "Invalid meta attribute",
.hint = hintfmt("derivation '%s' has invalid meta attribute '%s'",
i.queryName(), j)
logError({
.name = "Invalid meta attribute",
.hint = hintfmt("derivation '%s' has invalid meta attribute '%s'",
i.queryName(), j)
});
placeholder.write(nullptr);
} else {
@ -1131,12 +1129,11 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
attrs2["name"] = j;
Value * v = i.queryMeta(j);
if (!v)
logError(
ErrorInfo {
.name = "Invalid meta attribute",
.hint = hintfmt(
"derivation '%s' has invalid meta attribute '%s'",
i.queryName(), j)
logError({
.name = "Invalid meta attribute",
.hint = hintfmt(
"derivation '%s' has invalid meta attribute '%s'",
i.queryName(), j)
});
else {
if (v->type == tString) {

View file

@ -159,7 +159,8 @@ static int _main(int argc, char * * argv)
std::optional<StorePath> storePath;
if (args.size() == 2) {
expectedHash = Hash(args[1], ht);
storePath = store->makeFixedOutputPath(unpack, expectedHash, name);
const auto recursive = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
storePath = store->makeFixedOutputPath(recursive, expectedHash, name);
if (store->isValidPath(*storePath))
hash = expectedHash;
else
@ -208,13 +209,15 @@ static int _main(int argc, char * * argv)
if (expectedHash != Hash(ht) && expectedHash != hash)
throw Error("hash mismatch for '%1%'", uri);
const auto recursive = unpack ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
/* Copy the file to the Nix store. FIXME: if RemoteStore
implemented addToStoreFromDump() and downloadFile()
supported a sink, we could stream the download directly
into the Nix store. */
storePath = store->addToStore(name, tmpFile, unpack, ht);
storePath = store->addToStore(name, tmpFile, recursive, ht);
assert(*storePath == store->makeFixedOutputPath(unpack, hash, name));
assert(*storePath == store->makeFixedOutputPath(recursive, hash, name));
}
stopProgressBar();

View file

@ -174,10 +174,10 @@ static void opAdd(Strings opFlags, Strings opArgs)
store. */
static void opAddFixed(Strings opFlags, Strings opArgs)
{
bool recursive = false;
auto recursive = FileIngestionMethod::Flat;
for (auto & i : opFlags)
if (i == "--recursive") recursive = true;
if (i == "--recursive") recursive = FileIngestionMethod::Recursive;
else throw UsageError("unknown flag '%1%'", i);
if (opArgs.empty())
@ -194,10 +194,10 @@ static void opAddFixed(Strings opFlags, Strings opArgs)
/* Hack to support caching in `nix-prefetch-url'. */
static void opPrintFixedPath(Strings opFlags, Strings opArgs)
{
bool recursive = false;
auto recursive = FileIngestionMethod::Flat;
for (auto i : opFlags)
if (i == "--recursive") recursive = true;
if (i == "--recursive") recursive = FileIngestionMethod::Recursive;
else throw UsageError("unknown flag '%1%'", i);
if (opArgs.size() != 3)
@ -704,7 +704,7 @@ static void opVerify(Strings opFlags, Strings opArgs)
else throw UsageError("unknown flag '%1%'", i);
if (store->verifyStore(checkContents, repair)) {
logWarning(ErrorInfo {
logWarning({
.name = "Store consistency",
.description = "not all errors were fixed"
});
@ -729,14 +729,13 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
store->narFromPath(path, sink);
auto current = sink.finish();
if (current.first != info->narHash) {
logError(
ErrorInfo {
.name = "Hash mismatch",
.hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(path),
info->narHash.to_string(),
current.first.to_string())
logError({
.name = "Hash mismatch",
.hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(path),
info->narHash.to_string(),
current.first.to_string())
});
status = 1;
}

View file

@ -45,13 +45,15 @@ struct CmdAddToStore : MixDryRun, StoreCommand
auto narHash = hashString(htSHA256, *sink.s);
ValidPathInfo info(store->makeFixedOutputPath(true, narHash, *namePart));
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, *namePart));
info.narHash = narHash;
info.narSize = sink.s->size();
info.ca = makeFixedOutputCA(true, info.narHash);
info.ca = makeFixedOutputCA(FileIngestionMethod::Recursive, info.narHash);
if (!dryRun)
store->addToStore(info, sink.s);
if (!dryRun) {
auto source = StringSource { *sink.s };
store->addToStore(info, source);
}
logger->stdout("%s", store->printStorePath(info.path));
}

View file

@ -9,15 +9,14 @@ using namespace nix;
struct CmdHash : Command
{
enum Mode { mFile, mPath };
Mode mode;
FileIngestionMethod mode;
Base base = SRI;
bool truncate = false;
HashType ht = htSHA256;
std::vector<std::string> paths;
std::optional<std::string> modulus;
CmdHash(Mode mode) : mode(mode)
CmdHash(FileIngestionMethod mode) : mode(mode)
{
mkFlag(0, "sri", "print hash in SRI format", &base, SRI);
mkFlag(0, "base64", "print hash in base-64", &base, Base64);
@ -36,9 +35,14 @@ struct CmdHash : Command
std::string description() override
{
return mode == mFile
? "print cryptographic hash of a regular file"
: "print cryptographic hash of the NAR serialisation of a path";
const char* d;
switch (mode) {
case FileIngestionMethod::Flat:
d = "print cryptographic hash of a regular file";
case FileIngestionMethod::Recursive:
d = "print cryptographic hash of the NAR serialisation of a path";
};
return d;
}
Category category() override { return catUtility; }
@ -53,10 +57,14 @@ struct CmdHash : Command
else
hashSink = std::make_unique<HashSink>(ht);
if (mode == mFile)
switch (mode) {
case FileIngestionMethod::Flat:
readFile(path, *hashSink);
else
break;
case FileIngestionMethod::Recursive:
dumpPath(path, *hashSink);
break;
}
Hash h = hashSink->finish().first;
if (truncate && h.hashSize > 20) h = compressHash(h, 20);
@ -65,8 +73,8 @@ struct CmdHash : Command
}
};
static RegisterCommand r1("hash-file", [](){ return make_ref<CmdHash>(CmdHash::mFile); });
static RegisterCommand r2("hash-path", [](){ return make_ref<CmdHash>(CmdHash::mPath); });
static RegisterCommand r1("hash-file", [](){ return make_ref<CmdHash>(FileIngestionMethod::Flat); });
static RegisterCommand r2("hash-path", [](){ return make_ref<CmdHash>(FileIngestionMethod::Recursive); });
struct CmdToBase : Command
{
@ -137,7 +145,7 @@ static int compatNixHash(int argc, char * * argv)
});
if (op == opHash) {
CmdHash cmd(flat ? CmdHash::mFile : CmdHash::mPath);
CmdHash cmd(flat ? FileIngestionMethod::Flat : FileIngestionMethod::Recursive);
cmd.ht = ht;
cmd.base = base32 ? Base32 : Base16;
cmd.truncate = truncate;

View file

@ -77,12 +77,12 @@ struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
auto narHash = hashModuloSink.finish().first;
ValidPathInfo info(store->makeFixedOutputPath(true, narHash, path.name(), references, hasSelfReference));
ValidPathInfo info(store->makeFixedOutputPath(FileIngestionMethod::Recursive, narHash, path.name(), references, hasSelfReference));
info.references = std::move(references);
if (hasSelfReference) info.references.insert(info.path.clone());
info.narHash = narHash;
info.narSize = sink.s->size();
info.ca = makeFixedOutputCA(true, info.narHash);
info.ca = makeFixedOutputCA(FileIngestionMethod::Recursive, info.narHash);
if (!json)
printInfo("rewrote '%s' to '%s'", pathS, store->printStorePath(info.path));

View file

@ -99,15 +99,14 @@ struct CmdVerify : StorePathsCommand
if (hash.first != info->narHash) {
corrupted++;
act2.result(resCorruptedPath, store->printStorePath(info->path));
logError(
ErrorInfo {
.name = "Hash error - path modified",
.hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(info->path),
info->narHash.to_string(),
hash.first.to_string())
});
logError({
.name = "Hash error - path modified",
.hint = hintfmt(
"path '%s' was modified! expected hash '%s', got '%s'",
store->printStorePath(info->path),
info->narHash.to_string(),
hash.first.to_string())
});
}
}
@ -156,11 +155,10 @@ struct CmdVerify : StorePathsCommand
if (!good) {
untrusted++;
act2.result(resUntrustedPath, store->printStorePath(info->path));
logError(
ErrorInfo {
.name = "Untrusted path",
.hint = hintfmt("path '%s' is untrusted",
store->printStorePath(info->path))
logError({
.name = "Untrusted path",
.hint = hintfmt("path '%s' is untrusted",
store->printStorePath(info->path))
});
}

View file

@ -39,19 +39,17 @@ std::set<std::string> runResolver(const Path & filename)
throw SysError("statting '%s'", filename);
if (!S_ISREG(st.st_mode)) {
logError(
ErrorInfo {
.name = "Regular MACH file",
.hint = hintfmt("file '%s' is not a regular file", filename)
logError({
.name = "Regular MACH file",
.hint = hintfmt("file '%s' is not a regular file", filename)
});
return {};
}
if (st.st_size < sizeof(mach_header_64)) {
logError(
ErrorInfo {
.name = "File too short",
.hint = hintfmt("file '%s' is too short for a MACH binary", filename)
logError({
.name = "File too short",
.hint = hintfmt("file '%s' is too short for a MACH binary", filename)
});
return {};
}
@ -74,20 +72,18 @@ std::set<std::string> runResolver(const Path & filename)
}
}
if (mach64_offset == 0) {
logError(
ErrorInfo {
.name = "No mach64 blobs",
.hint = hintfmt("Could not find any mach64 blobs in file '%1%', continuing...", filename)
logError({
.name = "No mach64 blobs",
.hint = hintfmt("Could not find any mach64 blobs in file '%1%', continuing...", filename)
});
return {};
}
} else if (magic == MH_MAGIC_64 || magic == MH_CIGAM_64) {
mach64_offset = 0;
} else {
logError(
ErrorInfo {
.name = "Magic number",
.hint = hintfmt("Object file has unknown magic number '%1%', skipping it...", magic)
logError({
.name = "Magic number",
.hint = hintfmt("Object file has unknown magic number '%1%', skipping it...", magic)
});
return {};
}