mirror of
https://github.com/NixOS/nix
synced 2025-06-30 19:57:59 +02:00
Move tests to separate directories, and document
Today, with the tests inside a `tests` intermingled with the
corresponding library's source code, we have a few problems:
- We have to be careful that wildcards don't end up with tests being
built as part of Nix proper, or test headers being installed as part
of Nix proper.
- Tests in libraries but not executables is not right:
- It means each executable runs the previous unit tests again, because
it needs the libraries.
- It doesn't work right on Windows, which doesn't want you to load a
DLL just for the side global variable . It could be made to work
with the dlopen equivalent, but that's gross!
This reorg solves these problems.
There is a remaining problem which is that sibbling headers (like
`hash.hh` the test header vs `hash.hh` the main `libnixutil` header) end
up shadowing each other. This PR doesn't solve that. That is left as
future work for a future PR.
Co-authored-by: Valentin Gagarin <valentin.gagarin@tweag.io>
(cherry picked from commit 91b6833686
)
This commit is contained in:
parent
5e265bc140
commit
a61e42adb5
133 changed files with 464 additions and 352 deletions
|
@ -1,68 +0,0 @@
|
|||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
#include <rapidcheck/gtest.h>
|
||||
|
||||
#include "tests/derived-path.hh"
|
||||
#include "tests/libexpr.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
// Testing of trivial expressions
|
||||
class DerivedPathExpressionTest : public LibExprTest {};
|
||||
|
||||
// FIXME: `RC_GTEST_FIXTURE_PROP` isn't calling `SetUpTestSuite` because it is
|
||||
// no a real fixture.
|
||||
//
|
||||
// See https://github.com/emil-e/rapidcheck/blob/master/doc/gtest.md#rc_gtest_fixture_propfixture-name-args
|
||||
TEST_F(DerivedPathExpressionTest, force_init)
|
||||
{
|
||||
}
|
||||
|
||||
#ifndef COVERAGE
|
||||
|
||||
RC_GTEST_FIXTURE_PROP(
|
||||
DerivedPathExpressionTest,
|
||||
prop_opaque_path_round_trip,
|
||||
(const SingleDerivedPath::Opaque & o))
|
||||
{
|
||||
auto * v = state.allocValue();
|
||||
state.mkStorePathString(o.path, *v);
|
||||
auto d = state.coerceToSingleDerivedPath(noPos, *v, "");
|
||||
RC_ASSERT(SingleDerivedPath { o } == d);
|
||||
}
|
||||
|
||||
// TODO use DerivedPath::Built for parameter once it supports a single output
|
||||
// path only.
|
||||
|
||||
RC_GTEST_FIXTURE_PROP(
|
||||
DerivedPathExpressionTest,
|
||||
prop_derived_path_built_placeholder_round_trip,
|
||||
(const SingleDerivedPath::Built & b))
|
||||
{
|
||||
/**
|
||||
* We set these in tests rather than the regular globals so we don't have
|
||||
* to worry about race conditions if the tests run concurrently.
|
||||
*/
|
||||
ExperimentalFeatureSettings mockXpSettings;
|
||||
mockXpSettings.set("experimental-features", "ca-derivations");
|
||||
|
||||
auto * v = state.allocValue();
|
||||
state.mkOutputString(*v, b, std::nullopt, mockXpSettings);
|
||||
auto [d, _] = state.coerceToSingleDerivedPathUnchecked(noPos, *v, "");
|
||||
RC_ASSERT(SingleDerivedPath { b } == d);
|
||||
}
|
||||
|
||||
RC_GTEST_FIXTURE_PROP(
|
||||
DerivedPathExpressionTest,
|
||||
prop_derived_path_built_out_path_round_trip,
|
||||
(const SingleDerivedPath::Built & b, const StorePath & outPath))
|
||||
{
|
||||
auto * v = state.allocValue();
|
||||
state.mkOutputString(*v, b, outPath);
|
||||
auto [d, _] = state.coerceToSingleDerivedPathUnchecked(noPos, *v, "");
|
||||
RC_ASSERT(SingleDerivedPath { b } == d);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
} /* namespace nix */
|
File diff suppressed because it is too large
Load diff
|
@ -1,22 +0,0 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "flake/flakeref.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------- tests for flake/flakeref.hh --------------------------------------------------*/
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* to_string
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(to_string, doesntReencodeUrl) {
|
||||
auto s = "http://localhost:8181/test/+3d.tar.gz";
|
||||
auto flakeref = parseFlakeRef(s);
|
||||
auto parsed = flakeref.to_string();
|
||||
auto expected = "http://localhost:8181/test/%2B3d.tar.gz";
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,68 +0,0 @@
|
|||
#include "tests/libexpr.hh"
|
||||
#include "value-to-json.hh"
|
||||
|
||||
namespace nix {
|
||||
// Testing the conversion to JSON
|
||||
|
||||
class JSONValueTest : public LibExprTest {
|
||||
protected:
|
||||
std::string getJSONValue(Value& value) {
|
||||
std::stringstream ss;
|
||||
NixStringContext ps;
|
||||
printValueAsJSON(state, true, value, noPos, ss, ps);
|
||||
return ss.str();
|
||||
}
|
||||
};
|
||||
|
||||
TEST_F(JSONValueTest, null) {
|
||||
Value v;
|
||||
v.mkNull();
|
||||
ASSERT_EQ(getJSONValue(v), "null");
|
||||
}
|
||||
|
||||
TEST_F(JSONValueTest, BoolFalse) {
|
||||
Value v;
|
||||
v.mkBool(false);
|
||||
ASSERT_EQ(getJSONValue(v),"false");
|
||||
}
|
||||
|
||||
TEST_F(JSONValueTest, BoolTrue) {
|
||||
Value v;
|
||||
v.mkBool(true);
|
||||
ASSERT_EQ(getJSONValue(v), "true");
|
||||
}
|
||||
|
||||
TEST_F(JSONValueTest, IntPositive) {
|
||||
Value v;
|
||||
v.mkInt(100);
|
||||
ASSERT_EQ(getJSONValue(v), "100");
|
||||
}
|
||||
|
||||
TEST_F(JSONValueTest, IntNegative) {
|
||||
Value v;
|
||||
v.mkInt(-100);
|
||||
ASSERT_EQ(getJSONValue(v), "-100");
|
||||
}
|
||||
|
||||
TEST_F(JSONValueTest, String) {
|
||||
Value v;
|
||||
v.mkString("test");
|
||||
ASSERT_EQ(getJSONValue(v), "\"test\"");
|
||||
}
|
||||
|
||||
TEST_F(JSONValueTest, StringQuotes) {
|
||||
Value v;
|
||||
|
||||
v.mkString("test\"");
|
||||
ASSERT_EQ(getJSONValue(v), "\"test\\\"\"");
|
||||
}
|
||||
|
||||
// The dummy store doesn't support writing files. Fails with this exception message:
|
||||
// C++ exception with description "error: operation 'addToStoreFromDump' is
|
||||
// not supported by store 'dummy'" thrown in the test body.
|
||||
TEST_F(JSONValueTest, DISABLED_Path) {
|
||||
Value v;
|
||||
v.mkPath(state.rootPath(CanonPath("/test")));
|
||||
ASSERT_EQ(getJSONValue(v), "\"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x\"");
|
||||
}
|
||||
} /* namespace nix */
|
|
@ -1,143 +0,0 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include <gmock/gmock.h>
|
||||
|
||||
#include "value.hh"
|
||||
#include "nixexpr.hh"
|
||||
#include "eval.hh"
|
||||
#include "eval-inline.hh"
|
||||
#include "store-api.hh"
|
||||
|
||||
#include "tests/libstore.hh"
|
||||
|
||||
namespace nix {
|
||||
class LibExprTest : public LibStoreTest {
|
||||
public:
|
||||
static void SetUpTestSuite() {
|
||||
LibStoreTest::SetUpTestSuite();
|
||||
initGC();
|
||||
}
|
||||
|
||||
protected:
|
||||
LibExprTest()
|
||||
: LibStoreTest()
|
||||
, state({}, store)
|
||||
{
|
||||
}
|
||||
Value eval(std::string input, bool forceValue = true) {
|
||||
Value v;
|
||||
Expr * e = state.parseExprFromString(input, state.rootPath(CanonPath::root));
|
||||
assert(e);
|
||||
state.eval(e, v);
|
||||
if (forceValue)
|
||||
state.forceValue(v, noPos);
|
||||
return v;
|
||||
}
|
||||
|
||||
Symbol createSymbol(const char * value) {
|
||||
return state.symbols.create(value);
|
||||
}
|
||||
|
||||
EvalState state;
|
||||
};
|
||||
|
||||
MATCHER(IsListType, "") {
|
||||
return arg != nList;
|
||||
}
|
||||
|
||||
MATCHER(IsList, "") {
|
||||
return arg.type() == nList;
|
||||
}
|
||||
|
||||
MATCHER(IsString, "") {
|
||||
return arg.type() == nString;
|
||||
}
|
||||
|
||||
MATCHER(IsNull, "") {
|
||||
return arg.type() == nNull;
|
||||
}
|
||||
|
||||
MATCHER(IsThunk, "") {
|
||||
return arg.type() == nThunk;
|
||||
}
|
||||
|
||||
MATCHER(IsAttrs, "") {
|
||||
return arg.type() == nAttrs;
|
||||
}
|
||||
|
||||
MATCHER_P(IsStringEq, s, fmt("The string is equal to \"%1%\"", s)) {
|
||||
if (arg.type() != nString) {
|
||||
return false;
|
||||
}
|
||||
return std::string_view(arg.c_str()) == s;
|
||||
}
|
||||
|
||||
MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v)) {
|
||||
if (arg.type() != nInt) {
|
||||
return false;
|
||||
}
|
||||
return arg.integer == v;
|
||||
}
|
||||
|
||||
MATCHER_P(IsFloatEq, v, fmt("The float is equal to \"%1%\"", v)) {
|
||||
if (arg.type() != nFloat) {
|
||||
return false;
|
||||
}
|
||||
return arg.fpoint == v;
|
||||
}
|
||||
|
||||
MATCHER(IsTrue, "") {
|
||||
if (arg.type() != nBool) {
|
||||
return false;
|
||||
}
|
||||
return arg.boolean == true;
|
||||
}
|
||||
|
||||
MATCHER(IsFalse, "") {
|
||||
if (arg.type() != nBool) {
|
||||
return false;
|
||||
}
|
||||
return arg.boolean == false;
|
||||
}
|
||||
|
||||
MATCHER_P(IsPathEq, p, fmt("Is a path equal to \"%1%\"", p)) {
|
||||
if (arg.type() != nPath) {
|
||||
*result_listener << "Expected a path got " << arg.type();
|
||||
return false;
|
||||
} else {
|
||||
auto path = arg.path();
|
||||
if (path.path != CanonPath(p)) {
|
||||
*result_listener << "Expected a path that equals \"" << p << "\" but got: " << path.path;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
MATCHER_P(IsListOfSize, n, fmt("Is a list of size [%1%]", n)) {
|
||||
if (arg.type() != nList) {
|
||||
*result_listener << "Expected list got " << arg.type();
|
||||
return false;
|
||||
} else if (arg.listSize() != (size_t)n) {
|
||||
*result_listener << "Expected as list of size " << n << " got " << arg.listSize();
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
MATCHER_P(IsAttrsOfSize, n, fmt("Is a set of size [%1%]", n)) {
|
||||
if (arg.type() != nAttrs) {
|
||||
*result_listener << "Expected set got " << arg.type();
|
||||
return false;
|
||||
} else if (arg.attrs->size() != (size_t)n) {
|
||||
*result_listener << "Expected a set with " << n << " attributes but got " << arg.attrs->size();
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
} /* namespace nix */
|
|
@ -1,23 +0,0 @@
|
|||
check: libexpr-tests_RUN
|
||||
|
||||
programs += libexpr-tests
|
||||
|
||||
libexpr-tests_NAME := libnixexpr-tests
|
||||
|
||||
libexpr-tests_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libexpr-tests_INSTALL_DIR := $(checkbindir)
|
||||
else
|
||||
libexpr-tests_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libexpr-tests_SOURCES := \
|
||||
$(wildcard $(d)/*.cc) \
|
||||
$(wildcard $(d)/value/*.cc)
|
||||
|
||||
libexpr-tests_CXXFLAGS += -I src/libexpr -I src/libutil -I src/libstore -I src/libexpr/tests -I src/libfetchers
|
||||
|
||||
libexpr-tests_LIBS = libstore-tests libutils-tests libexpr libutil libstore libfetchers
|
||||
|
||||
libexpr-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) -lgmock
|
|
@ -1,832 +0,0 @@
|
|||
#include <gmock/gmock.h>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "tests/libexpr.hh"
|
||||
|
||||
namespace nix {
|
||||
class CaptureLogger : public Logger
|
||||
{
|
||||
std::ostringstream oss;
|
||||
|
||||
public:
|
||||
CaptureLogger() {}
|
||||
|
||||
std::string get() const {
|
||||
return oss.str();
|
||||
}
|
||||
|
||||
void log(Verbosity lvl, std::string_view s) override {
|
||||
oss << s << std::endl;
|
||||
}
|
||||
|
||||
void logEI(const ErrorInfo & ei) override {
|
||||
showErrorInfo(oss, ei, loggerSettings.showTrace.get());
|
||||
}
|
||||
};
|
||||
|
||||
class CaptureLogging {
|
||||
Logger * oldLogger;
|
||||
std::unique_ptr<CaptureLogger> tempLogger;
|
||||
public:
|
||||
CaptureLogging() : tempLogger(std::make_unique<CaptureLogger>()) {
|
||||
oldLogger = logger;
|
||||
logger = tempLogger.get();
|
||||
}
|
||||
|
||||
~CaptureLogging() {
|
||||
logger = oldLogger;
|
||||
}
|
||||
|
||||
std::string get() const {
|
||||
return tempLogger->get();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// Testing eval of PrimOp's
|
||||
class PrimOpTest : public LibExprTest {};
|
||||
|
||||
|
||||
TEST_F(PrimOpTest, throw) {
|
||||
ASSERT_THROW(eval("throw \"foo\""), ThrownError);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, abort) {
|
||||
ASSERT_THROW(eval("abort \"abort\""), Abort);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, ceil) {
|
||||
auto v = eval("builtins.ceil 1.9");
|
||||
ASSERT_THAT(v, IsIntEq(2));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, floor) {
|
||||
auto v = eval("builtins.floor 1.9");
|
||||
ASSERT_THAT(v, IsIntEq(1));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, tryEvalFailure) {
|
||||
auto v = eval("builtins.tryEval (throw \"\")");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(2));
|
||||
auto s = createSymbol("success");
|
||||
auto p = v.attrs->get(s);
|
||||
ASSERT_NE(p, nullptr);
|
||||
ASSERT_THAT(*p->value, IsFalse());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, tryEvalSuccess) {
|
||||
auto v = eval("builtins.tryEval 123");
|
||||
ASSERT_THAT(v, IsAttrs());
|
||||
auto s = createSymbol("success");
|
||||
auto p = v.attrs->get(s);
|
||||
ASSERT_NE(p, nullptr);
|
||||
ASSERT_THAT(*p->value, IsTrue());
|
||||
s = createSymbol("value");
|
||||
p = v.attrs->get(s);
|
||||
ASSERT_NE(p, nullptr);
|
||||
ASSERT_THAT(*p->value, IsIntEq(123));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, getEnv) {
|
||||
setenv("_NIX_UNIT_TEST_ENV_VALUE", "test value", 1);
|
||||
auto v = eval("builtins.getEnv \"_NIX_UNIT_TEST_ENV_VALUE\"");
|
||||
ASSERT_THAT(v, IsStringEq("test value"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, seq) {
|
||||
ASSERT_THROW(eval("let x = throw \"test\"; in builtins.seq x { }"), ThrownError);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, seqNotDeep) {
|
||||
auto v = eval("let x = { z = throw \"test\"; }; in builtins.seq x { }");
|
||||
ASSERT_THAT(v, IsAttrs());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, deepSeq) {
|
||||
ASSERT_THROW(eval("let x = { z = throw \"test\"; }; in builtins.deepSeq x { }"), ThrownError);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, trace) {
|
||||
CaptureLogging l;
|
||||
auto v = eval("builtins.trace \"test string 123\" 123");
|
||||
ASSERT_THAT(v, IsIntEq(123));
|
||||
auto text = l.get();
|
||||
ASSERT_NE(text.find("test string 123"), std::string::npos);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, placeholder) {
|
||||
auto v = eval("builtins.placeholder \"out\"");
|
||||
ASSERT_THAT(v, IsStringEq("/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, baseNameOf) {
|
||||
auto v = eval("builtins.baseNameOf /some/path");
|
||||
ASSERT_THAT(v, IsStringEq("path"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, dirOf) {
|
||||
auto v = eval("builtins.dirOf /some/path");
|
||||
ASSERT_THAT(v, IsPathEq("/some"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, attrValues) {
|
||||
auto v = eval("builtins.attrValues { x = \"foo\"; a = 1; }");
|
||||
ASSERT_THAT(v, IsListOfSize(2));
|
||||
ASSERT_THAT(*v.listElems()[0], IsIntEq(1));
|
||||
ASSERT_THAT(*v.listElems()[1], IsStringEq("foo"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, getAttr) {
|
||||
auto v = eval("builtins.getAttr \"x\" { x = \"foo\"; }");
|
||||
ASSERT_THAT(v, IsStringEq("foo"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, getAttrNotFound) {
|
||||
// FIXME: TypeError is really bad here, also the error wording is worse
|
||||
// than on Nix <=2.3
|
||||
ASSERT_THROW(eval("builtins.getAttr \"y\" { }"), TypeError);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, unsafeGetAttrPos) {
|
||||
// The `y` attribute is at position
|
||||
const char* expr = "builtins.unsafeGetAttrPos \"y\" { y = \"x\"; }";
|
||||
auto v = eval(expr);
|
||||
ASSERT_THAT(v, IsNull());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, hasAttr) {
|
||||
auto v = eval("builtins.hasAttr \"x\" { x = 1; }");
|
||||
ASSERT_THAT(v, IsTrue());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, hasAttrNotFound) {
|
||||
auto v = eval("builtins.hasAttr \"x\" { }");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, isAttrs) {
|
||||
auto v = eval("builtins.isAttrs {}");
|
||||
ASSERT_THAT(v, IsTrue());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, isAttrsFalse) {
|
||||
auto v = eval("builtins.isAttrs null");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, removeAttrs) {
|
||||
auto v = eval("builtins.removeAttrs { x = 1; } [\"x\"]");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, removeAttrsRetains) {
|
||||
auto v = eval("builtins.removeAttrs { x = 1; y = 2; } [\"x\"]");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(1));
|
||||
ASSERT_NE(v.attrs->find(createSymbol("y")), nullptr);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, listToAttrsEmptyList) {
|
||||
auto v = eval("builtins.listToAttrs []");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||
ASSERT_EQ(v.type(), nAttrs);
|
||||
ASSERT_EQ(v.attrs->size(), 0);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, listToAttrsNotFieldName) {
|
||||
ASSERT_THROW(eval("builtins.listToAttrs [{}]"), Error);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, listToAttrs) {
|
||||
auto v = eval("builtins.listToAttrs [ { name = \"key\"; value = 123; } ]");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(1));
|
||||
auto key = v.attrs->find(createSymbol("key"));
|
||||
ASSERT_NE(key, nullptr);
|
||||
ASSERT_THAT(*key->value, IsIntEq(123));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, intersectAttrs) {
|
||||
auto v = eval("builtins.intersectAttrs { a = 1; b = 2; } { b = 3; c = 4; }");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(1));
|
||||
auto b = v.attrs->find(createSymbol("b"));
|
||||
ASSERT_NE(b, nullptr);
|
||||
ASSERT_THAT(*b->value, IsIntEq(3));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, catAttrs) {
|
||||
auto v = eval("builtins.catAttrs \"a\" [{a = 1;} {b = 0;} {a = 2;}]");
|
||||
ASSERT_THAT(v, IsListOfSize(2));
|
||||
ASSERT_THAT(*v.listElems()[0], IsIntEq(1));
|
||||
ASSERT_THAT(*v.listElems()[1], IsIntEq(2));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, functionArgs) {
|
||||
auto v = eval("builtins.functionArgs ({ x, y ? 123}: 1)");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(2));
|
||||
|
||||
auto x = v.attrs->find(createSymbol("x"));
|
||||
ASSERT_NE(x, nullptr);
|
||||
ASSERT_THAT(*x->value, IsFalse());
|
||||
|
||||
auto y = v.attrs->find(createSymbol("y"));
|
||||
ASSERT_NE(y, nullptr);
|
||||
ASSERT_THAT(*y->value, IsTrue());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, mapAttrs) {
|
||||
auto v = eval("builtins.mapAttrs (name: value: value * 10) { a = 1; b = 2; }");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(2));
|
||||
|
||||
auto a = v.attrs->find(createSymbol("a"));
|
||||
ASSERT_NE(a, nullptr);
|
||||
ASSERT_THAT(*a->value, IsThunk());
|
||||
state.forceValue(*a->value, noPos);
|
||||
ASSERT_THAT(*a->value, IsIntEq(10));
|
||||
|
||||
auto b = v.attrs->find(createSymbol("b"));
|
||||
ASSERT_NE(b, nullptr);
|
||||
ASSERT_THAT(*b->value, IsThunk());
|
||||
state.forceValue(*b->value, noPos);
|
||||
ASSERT_THAT(*b->value, IsIntEq(20));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, isList) {
|
||||
auto v = eval("builtins.isList []");
|
||||
ASSERT_THAT(v, IsTrue());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, isListFalse) {
|
||||
auto v = eval("builtins.isList null");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, elemtAt) {
|
||||
auto v = eval("builtins.elemAt [0 1 2 3] 3");
|
||||
ASSERT_THAT(v, IsIntEq(3));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, elemtAtOutOfBounds) {
|
||||
ASSERT_THROW(eval("builtins.elemAt [0 1 2 3] 5"), Error);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, head) {
|
||||
auto v = eval("builtins.head [ 3 2 1 0 ]");
|
||||
ASSERT_THAT(v, IsIntEq(3));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, headEmpty) {
|
||||
ASSERT_THROW(eval("builtins.head [ ]"), Error);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, headWrongType) {
|
||||
ASSERT_THROW(eval("builtins.head { }"), Error);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, tail) {
|
||||
auto v = eval("builtins.tail [ 3 2 1 0 ]");
|
||||
ASSERT_THAT(v, IsListOfSize(3));
|
||||
for (const auto [n, elem] : enumerate(v.listItems()))
|
||||
ASSERT_THAT(*elem, IsIntEq(2 - static_cast<int>(n)));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, tailEmpty) {
|
||||
ASSERT_THROW(eval("builtins.tail []"), Error);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, map) {
|
||||
auto v = eval("map (x: \"foo\" + x) [ \"bar\" \"bla\" \"abc\" ]");
|
||||
ASSERT_THAT(v, IsListOfSize(3));
|
||||
auto elem = v.listElems()[0];
|
||||
ASSERT_THAT(*elem, IsThunk());
|
||||
state.forceValue(*elem, noPos);
|
||||
ASSERT_THAT(*elem, IsStringEq("foobar"));
|
||||
|
||||
elem = v.listElems()[1];
|
||||
ASSERT_THAT(*elem, IsThunk());
|
||||
state.forceValue(*elem, noPos);
|
||||
ASSERT_THAT(*elem, IsStringEq("foobla"));
|
||||
|
||||
elem = v.listElems()[2];
|
||||
ASSERT_THAT(*elem, IsThunk());
|
||||
state.forceValue(*elem, noPos);
|
||||
ASSERT_THAT(*elem, IsStringEq("fooabc"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, filter) {
|
||||
auto v = eval("builtins.filter (x: x == 2) [ 3 2 3 2 3 2 ]");
|
||||
ASSERT_THAT(v, IsListOfSize(3));
|
||||
for (const auto elem : v.listItems())
|
||||
ASSERT_THAT(*elem, IsIntEq(2));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, elemTrue) {
|
||||
auto v = eval("builtins.elem 3 [ 1 2 3 4 5 ]");
|
||||
ASSERT_THAT(v, IsTrue());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, elemFalse) {
|
||||
auto v = eval("builtins.elem 6 [ 1 2 3 4 5 ]");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, concatLists) {
|
||||
auto v = eval("builtins.concatLists [[1 2] [3 4]]");
|
||||
ASSERT_THAT(v, IsListOfSize(4));
|
||||
for (const auto [i, elem] : enumerate(v.listItems()))
|
||||
ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, length) {
|
||||
auto v = eval("builtins.length [ 1 2 3 ]");
|
||||
ASSERT_THAT(v, IsIntEq(3));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, foldStrict) {
|
||||
auto v = eval("builtins.foldl' (a: b: a + b) 0 [1 2 3]");
|
||||
ASSERT_THAT(v, IsIntEq(6));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, anyTrue) {
|
||||
auto v = eval("builtins.any (x: x == 2) [ 1 2 3 ]");
|
||||
ASSERT_THAT(v, IsTrue());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, anyFalse) {
|
||||
auto v = eval("builtins.any (x: x == 5) [ 1 2 3 ]");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, allTrue) {
|
||||
auto v = eval("builtins.all (x: x > 0) [ 1 2 3 ]");
|
||||
ASSERT_THAT(v, IsTrue());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, allFalse) {
|
||||
auto v = eval("builtins.all (x: x <= 0) [ 1 2 3 ]");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, genList) {
|
||||
auto v = eval("builtins.genList (x: x + 1) 3");
|
||||
ASSERT_EQ(v.type(), nList);
|
||||
ASSERT_EQ(v.listSize(), 3);
|
||||
for (const auto [i, elem] : enumerate(v.listItems())) {
|
||||
ASSERT_THAT(*elem, IsThunk());
|
||||
state.forceValue(*elem, noPos);
|
||||
ASSERT_THAT(*elem, IsIntEq(static_cast<int>(i)+1));
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, sortLessThan) {
|
||||
auto v = eval("builtins.sort builtins.lessThan [ 483 249 526 147 42 77 ]");
|
||||
ASSERT_EQ(v.type(), nList);
|
||||
ASSERT_EQ(v.listSize(), 6);
|
||||
|
||||
const std::vector<int> numbers = { 42, 77, 147, 249, 483, 526 };
|
||||
for (const auto [n, elem] : enumerate(v.listItems()))
|
||||
ASSERT_THAT(*elem, IsIntEq(numbers[n]));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, partition) {
|
||||
auto v = eval("builtins.partition (x: x > 10) [1 23 9 3 42]");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(2));
|
||||
|
||||
auto right = v.attrs->get(createSymbol("right"));
|
||||
ASSERT_NE(right, nullptr);
|
||||
ASSERT_THAT(*right->value, IsListOfSize(2));
|
||||
ASSERT_THAT(*right->value->listElems()[0], IsIntEq(23));
|
||||
ASSERT_THAT(*right->value->listElems()[1], IsIntEq(42));
|
||||
|
||||
auto wrong = v.attrs->get(createSymbol("wrong"));
|
||||
ASSERT_NE(wrong, nullptr);
|
||||
ASSERT_EQ(wrong->value->type(), nList);
|
||||
ASSERT_EQ(wrong->value->listSize(), 3);
|
||||
ASSERT_THAT(*wrong->value, IsListOfSize(3));
|
||||
ASSERT_THAT(*wrong->value->listElems()[0], IsIntEq(1));
|
||||
ASSERT_THAT(*wrong->value->listElems()[1], IsIntEq(9));
|
||||
ASSERT_THAT(*wrong->value->listElems()[2], IsIntEq(3));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, concatMap) {
|
||||
auto v = eval("builtins.concatMap (x: x ++ [0]) [ [1 2] [3 4] ]");
|
||||
ASSERT_EQ(v.type(), nList);
|
||||
ASSERT_EQ(v.listSize(), 6);
|
||||
|
||||
const std::vector<int> numbers = { 1, 2, 0, 3, 4, 0 };
|
||||
for (const auto [n, elem] : enumerate(v.listItems()))
|
||||
ASSERT_THAT(*elem, IsIntEq(numbers[n]));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, addInt) {
|
||||
auto v = eval("builtins.add 3 5");
|
||||
ASSERT_THAT(v, IsIntEq(8));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, addFloat) {
|
||||
auto v = eval("builtins.add 3.0 5.0");
|
||||
ASSERT_THAT(v, IsFloatEq(8.0));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, addFloatToInt) {
|
||||
auto v = eval("builtins.add 3.0 5");
|
||||
ASSERT_THAT(v, IsFloatEq(8.0));
|
||||
|
||||
v = eval("builtins.add 3 5.0");
|
||||
ASSERT_THAT(v, IsFloatEq(8.0));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, subInt) {
|
||||
auto v = eval("builtins.sub 5 2");
|
||||
ASSERT_THAT(v, IsIntEq(3));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, subFloat) {
|
||||
auto v = eval("builtins.sub 5.0 2.0");
|
||||
ASSERT_THAT(v, IsFloatEq(3.0));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, subFloatFromInt) {
|
||||
auto v = eval("builtins.sub 5.0 2");
|
||||
ASSERT_THAT(v, IsFloatEq(3.0));
|
||||
|
||||
v = eval("builtins.sub 4 2.0");
|
||||
ASSERT_THAT(v, IsFloatEq(2.0));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, mulInt) {
|
||||
auto v = eval("builtins.mul 3 5");
|
||||
ASSERT_THAT(v, IsIntEq(15));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, mulFloat) {
|
||||
auto v = eval("builtins.mul 3.0 5.0");
|
||||
ASSERT_THAT(v, IsFloatEq(15.0));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, mulFloatMixed) {
|
||||
auto v = eval("builtins.mul 3 5.0");
|
||||
ASSERT_THAT(v, IsFloatEq(15.0));
|
||||
|
||||
v = eval("builtins.mul 2.0 5");
|
||||
ASSERT_THAT(v, IsFloatEq(10.0));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, divInt) {
|
||||
auto v = eval("builtins.div 5 (-1)");
|
||||
ASSERT_THAT(v, IsIntEq(-5));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, divIntZero) {
|
||||
ASSERT_THROW(eval("builtins.div 5 0"), EvalError);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, divFloat) {
|
||||
auto v = eval("builtins.div 5.0 (-1)");
|
||||
ASSERT_THAT(v, IsFloatEq(-5.0));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, divFloatZero) {
|
||||
ASSERT_THROW(eval("builtins.div 5.0 0.0"), EvalError);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, bitOr) {
|
||||
auto v = eval("builtins.bitOr 1 2");
|
||||
ASSERT_THAT(v, IsIntEq(3));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, bitXor) {
|
||||
auto v = eval("builtins.bitXor 3 2");
|
||||
ASSERT_THAT(v, IsIntEq(1));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, lessThanFalse) {
|
||||
auto v = eval("builtins.lessThan 3 1");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, lessThanTrue) {
|
||||
auto v = eval("builtins.lessThan 1 3");
|
||||
ASSERT_THAT(v, IsTrue());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, toStringAttrsThrows) {
|
||||
ASSERT_THROW(eval("builtins.toString {}"), EvalError);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, toStringLambdaThrows) {
|
||||
ASSERT_THROW(eval("builtins.toString (x: x)"), EvalError);
|
||||
}
|
||||
|
||||
class ToStringPrimOpTest :
|
||||
public PrimOpTest,
|
||||
public testing::WithParamInterface<std::tuple<std::string, std::string_view>>
|
||||
{};
|
||||
|
||||
TEST_P(ToStringPrimOpTest, toString) {
|
||||
const auto [input, output] = GetParam();
|
||||
auto v = eval(input);
|
||||
ASSERT_THAT(v, IsStringEq(output));
|
||||
}
|
||||
|
||||
#define CASE(input, output) (std::make_tuple(std::string_view("builtins.toString " input), std::string_view(output)))
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
toString,
|
||||
ToStringPrimOpTest,
|
||||
testing::Values(
|
||||
CASE(R"("foo")", "foo"),
|
||||
CASE(R"(1)", "1"),
|
||||
CASE(R"([1 2 3])", "1 2 3"),
|
||||
CASE(R"(.123)", "0.123000"),
|
||||
CASE(R"(true)", "1"),
|
||||
CASE(R"(false)", ""),
|
||||
CASE(R"(null)", ""),
|
||||
CASE(R"({ v = "bar"; __toString = self: self.v; })", "bar"),
|
||||
CASE(R"({ v = "bar"; __toString = self: self.v; outPath = "foo"; })", "bar"),
|
||||
CASE(R"({ outPath = "foo"; })", "foo"),
|
||||
CASE(R"(./test)", "/test")
|
||||
)
|
||||
);
|
||||
#undef CASE
|
||||
|
||||
TEST_F(PrimOpTest, substring){
|
||||
auto v = eval("builtins.substring 0 3 \"nixos\"");
|
||||
ASSERT_THAT(v, IsStringEq("nix"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, substringSmallerString){
|
||||
auto v = eval("builtins.substring 0 3 \"n\"");
|
||||
ASSERT_THAT(v, IsStringEq("n"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, substringEmptyString){
|
||||
auto v = eval("builtins.substring 1 3 \"\"");
|
||||
ASSERT_THAT(v, IsStringEq(""));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, stringLength) {
|
||||
auto v = eval("builtins.stringLength \"123\"");
|
||||
ASSERT_THAT(v, IsIntEq(3));
|
||||
}
|
||||
TEST_F(PrimOpTest, hashStringMd5) {
|
||||
auto v = eval("builtins.hashString \"md5\" \"asdf\"");
|
||||
ASSERT_THAT(v, IsStringEq("912ec803b2ce49e4a541068d495ab570"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, hashStringSha1) {
|
||||
auto v = eval("builtins.hashString \"sha1\" \"asdf\"");
|
||||
ASSERT_THAT(v, IsStringEq("3da541559918a808c2402bba5012f6c60b27661c"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, hashStringSha256) {
|
||||
auto v = eval("builtins.hashString \"sha256\" \"asdf\"");
|
||||
ASSERT_THAT(v, IsStringEq("f0e4c2f76c58916ec258f246851bea091d14d4247a2fc3e18694461b1816e13b"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, hashStringSha512) {
|
||||
auto v = eval("builtins.hashString \"sha512\" \"asdf\"");
|
||||
ASSERT_THAT(v, IsStringEq("401b09eab3c013d4ca54922bb802bec8fd5318192b0a75f201d8b3727429080fb337591abd3e44453b954555b7a0812e1081c39b740293f765eae731f5a65ed1"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, hashStringInvalidHashType) {
|
||||
ASSERT_THROW(eval("builtins.hashString \"foobar\" \"asdf\""), Error);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, nixPath) {
|
||||
auto v = eval("builtins.nixPath");
|
||||
ASSERT_EQ(v.type(), nList);
|
||||
// We can't test much more as currently the EvalSettings are a global
|
||||
// that we can't easily swap / replace
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, langVersion) {
|
||||
auto v = eval("builtins.langVersion");
|
||||
ASSERT_EQ(v.type(), nInt);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, storeDir) {
|
||||
auto v = eval("builtins.storeDir");
|
||||
ASSERT_THAT(v, IsStringEq(settings.nixStore));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, nixVersion) {
|
||||
auto v = eval("builtins.nixVersion");
|
||||
ASSERT_THAT(v, IsStringEq(nixVersion));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, currentSystem) {
|
||||
auto v = eval("builtins.currentSystem");
|
||||
ASSERT_THAT(v, IsStringEq(settings.thisSystem.get()));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, derivation) {
|
||||
auto v = eval("derivation");
|
||||
ASSERT_EQ(v.type(), nFunction);
|
||||
ASSERT_TRUE(v.isLambda());
|
||||
ASSERT_NE(v.lambda.fun, nullptr);
|
||||
ASSERT_TRUE(v.lambda.fun->hasFormals());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, currentTime) {
|
||||
auto v = eval("builtins.currentTime");
|
||||
ASSERT_EQ(v.type(), nInt);
|
||||
ASSERT_TRUE(v.integer > 0);
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, splitVersion) {
|
||||
auto v = eval("builtins.splitVersion \"1.2.3git\"");
|
||||
ASSERT_THAT(v, IsListOfSize(4));
|
||||
|
||||
const std::vector<std::string_view> strings = { "1", "2", "3", "git" };
|
||||
for (const auto [n, p] : enumerate(v.listItems()))
|
||||
ASSERT_THAT(*p, IsStringEq(strings[n]));
|
||||
}
|
||||
|
||||
class CompareVersionsPrimOpTest :
|
||||
public PrimOpTest,
|
||||
public testing::WithParamInterface<std::tuple<std::string, const int>>
|
||||
{};
|
||||
|
||||
TEST_P(CompareVersionsPrimOpTest, compareVersions) {
|
||||
auto [expression, expectation] = GetParam();
|
||||
auto v = eval(expression);
|
||||
ASSERT_THAT(v, IsIntEq(expectation));
|
||||
}
|
||||
|
||||
#define CASE(a, b, expected) (std::make_tuple("builtins.compareVersions \"" #a "\" \"" #b "\"", expected))
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
compareVersions,
|
||||
CompareVersionsPrimOpTest,
|
||||
testing::Values(
|
||||
// The first two are weird cases. Intuition tells they should
|
||||
// be the same but they aren't.
|
||||
CASE(1.0, 1.0.0, -1),
|
||||
CASE(1.0.0, 1.0, 1),
|
||||
// the following are from the nix-env manual:
|
||||
CASE(1.0, 2.3, -1),
|
||||
CASE(2.1, 2.3, -1),
|
||||
CASE(2.3, 2.3, 0),
|
||||
CASE(2.5, 2.3, 1),
|
||||
CASE(3.1, 2.3, 1),
|
||||
CASE(2.3.1, 2.3, 1),
|
||||
CASE(2.3.1, 2.3a, 1),
|
||||
CASE(2.3pre1, 2.3, -1),
|
||||
CASE(2.3pre3, 2.3pre12, -1),
|
||||
CASE(2.3a, 2.3c, -1),
|
||||
CASE(2.3pre1, 2.3c, -1),
|
||||
CASE(2.3pre1, 2.3q, -1)
|
||||
)
|
||||
);
|
||||
#undef CASE
|
||||
|
||||
|
||||
class ParseDrvNamePrimOpTest :
|
||||
public PrimOpTest,
|
||||
public testing::WithParamInterface<std::tuple<std::string, std::string_view, std::string_view>>
|
||||
{};
|
||||
|
||||
TEST_P(ParseDrvNamePrimOpTest, parseDrvName) {
|
||||
auto [input, expectedName, expectedVersion] = GetParam();
|
||||
const auto expr = fmt("builtins.parseDrvName \"%1%\"", input);
|
||||
auto v = eval(expr);
|
||||
ASSERT_THAT(v, IsAttrsOfSize(2));
|
||||
|
||||
auto name = v.attrs->find(createSymbol("name"));
|
||||
ASSERT_TRUE(name);
|
||||
ASSERT_THAT(*name->value, IsStringEq(expectedName));
|
||||
|
||||
auto version = v.attrs->find(createSymbol("version"));
|
||||
ASSERT_TRUE(version);
|
||||
ASSERT_THAT(*version->value, IsStringEq(expectedVersion));
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
parseDrvName,
|
||||
ParseDrvNamePrimOpTest,
|
||||
testing::Values(
|
||||
std::make_tuple("nix-0.12pre12876", "nix", "0.12pre12876"),
|
||||
std::make_tuple("a-b-c-1234pre5+git", "a-b-c", "1234pre5+git")
|
||||
)
|
||||
);
|
||||
|
||||
TEST_F(PrimOpTest, replaceStrings) {
|
||||
// FIXME: add a test that verifies the string context is as expected
|
||||
auto v = eval("builtins.replaceStrings [\"oo\" \"a\"] [\"a\" \"i\"] \"foobar\"");
|
||||
ASSERT_EQ(v.type(), nString);
|
||||
ASSERT_EQ(v.string_view(), "fabir");
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, concatStringsSep) {
|
||||
// FIXME: add a test that verifies the string context is as expected
|
||||
auto v = eval("builtins.concatStringsSep \"%\" [\"foo\" \"bar\" \"baz\"]");
|
||||
ASSERT_EQ(v.type(), nString);
|
||||
ASSERT_EQ(v.string_view(), "foo%bar%baz");
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, split1) {
|
||||
// v = [ "" [ "a" ] "c" ]
|
||||
auto v = eval("builtins.split \"(a)b\" \"abc\"");
|
||||
ASSERT_THAT(v, IsListOfSize(3));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[1], IsListOfSize(1));
|
||||
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[2], IsStringEq("c"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, split2) {
|
||||
// v is expected to be a list [ "" [ "a" ] "b" [ "c"] "" ]
|
||||
auto v = eval("builtins.split \"([ac])\" \"abc\"");
|
||||
ASSERT_THAT(v, IsListOfSize(5));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[1], IsListOfSize(1));
|
||||
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[2], IsStringEq("b"));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[3], IsListOfSize(1));
|
||||
ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsStringEq("c"));
|
||||
|
||||
ASSERT_THAT(*v.listElems()[4], IsStringEq(""));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, split3) {
|
||||
auto v = eval("builtins.split \"(a)|(c)\" \"abc\"");
|
||||
ASSERT_THAT(v, IsListOfSize(5));
|
||||
|
||||
// First list element
|
||||
ASSERT_THAT(*v.listElems()[0], IsStringEq(""));
|
||||
|
||||
// 2nd list element is a list [ "" null ]
|
||||
ASSERT_THAT(*v.listElems()[1], IsListOfSize(2));
|
||||
ASSERT_THAT(*v.listElems()[1]->listElems()[0], IsStringEq("a"));
|
||||
ASSERT_THAT(*v.listElems()[1]->listElems()[1], IsNull());
|
||||
|
||||
// 3rd element
|
||||
ASSERT_THAT(*v.listElems()[2], IsStringEq("b"));
|
||||
|
||||
// 4th element is a list: [ null "c" ]
|
||||
ASSERT_THAT(*v.listElems()[3], IsListOfSize(2));
|
||||
ASSERT_THAT(*v.listElems()[3]->listElems()[0], IsNull());
|
||||
ASSERT_THAT(*v.listElems()[3]->listElems()[1], IsStringEq("c"));
|
||||
|
||||
// 5th element is the empty string
|
||||
ASSERT_THAT(*v.listElems()[4], IsStringEq(""));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, split4) {
|
||||
auto v = eval("builtins.split \"([[:upper:]]+)\" \" FOO \"");
|
||||
ASSERT_THAT(v, IsListOfSize(3));
|
||||
auto first = v.listElems()[0];
|
||||
auto second = v.listElems()[1];
|
||||
auto third = v.listElems()[2];
|
||||
|
||||
ASSERT_THAT(*first, IsStringEq(" "));
|
||||
|
||||
ASSERT_THAT(*second, IsListOfSize(1));
|
||||
ASSERT_THAT(*second->listElems()[0], IsStringEq("FOO"));
|
||||
|
||||
ASSERT_THAT(*third, IsStringEq(" "));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, match1) {
|
||||
auto v = eval("builtins.match \"ab\" \"abc\"");
|
||||
ASSERT_THAT(v, IsNull());
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, match2) {
|
||||
auto v = eval("builtins.match \"abc\" \"abc\"");
|
||||
ASSERT_THAT(v, IsListOfSize(0));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, match3) {
|
||||
auto v = eval("builtins.match \"a(b)(c)\" \"abc\"");
|
||||
ASSERT_THAT(v, IsListOfSize(2));
|
||||
ASSERT_THAT(*v.listElems()[0], IsStringEq("b"));
|
||||
ASSERT_THAT(*v.listElems()[1], IsStringEq("c"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, match4) {
|
||||
auto v = eval("builtins.match \"[[:space:]]+([[:upper:]]+)[[:space:]]+\" \" FOO \"");
|
||||
ASSERT_THAT(v, IsListOfSize(1));
|
||||
ASSERT_THAT(*v.listElems()[0], IsStringEq("FOO"));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, attrNames) {
|
||||
auto v = eval("builtins.attrNames { x = 1; y = 2; z = 3; a = 2; }");
|
||||
ASSERT_THAT(v, IsListOfSize(4));
|
||||
|
||||
// ensure that the list is sorted
|
||||
const std::vector<std::string_view> expected { "a", "x", "y", "z" };
|
||||
for (const auto [n, elem] : enumerate(v.listItems()))
|
||||
ASSERT_THAT(*elem, IsStringEq(expected[n]));
|
||||
}
|
||||
|
||||
TEST_F(PrimOpTest, genericClosure_not_strict) {
|
||||
// Operator should not be used when startSet is empty
|
||||
auto v = eval("builtins.genericClosure { startSet = []; }");
|
||||
ASSERT_THAT(v, IsListOfSize(0));
|
||||
}
|
||||
} /* namespace nix */
|
|
@ -1,90 +0,0 @@
|
|||
#include <gtest/gtest.h>
|
||||
#include <gmock/gmock.h>
|
||||
|
||||
#include "search-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
TEST(SearchPathElem, parse_justPath) {
|
||||
ASSERT_EQ(
|
||||
SearchPath::Elem::parse("foo"),
|
||||
(SearchPath::Elem {
|
||||
.prefix = SearchPath::Prefix { .s = "" },
|
||||
.path = SearchPath::Path { .s = "foo" },
|
||||
}));
|
||||
}
|
||||
|
||||
TEST(SearchPathElem, parse_emptyPrefix) {
|
||||
ASSERT_EQ(
|
||||
SearchPath::Elem::parse("=foo"),
|
||||
(SearchPath::Elem {
|
||||
.prefix = SearchPath::Prefix { .s = "" },
|
||||
.path = SearchPath::Path { .s = "foo" },
|
||||
}));
|
||||
}
|
||||
|
||||
TEST(SearchPathElem, parse_oneEq) {
|
||||
ASSERT_EQ(
|
||||
SearchPath::Elem::parse("foo=bar"),
|
||||
(SearchPath::Elem {
|
||||
.prefix = SearchPath::Prefix { .s = "foo" },
|
||||
.path = SearchPath::Path { .s = "bar" },
|
||||
}));
|
||||
}
|
||||
|
||||
TEST(SearchPathElem, parse_twoEqs) {
|
||||
ASSERT_EQ(
|
||||
SearchPath::Elem::parse("foo=bar=baz"),
|
||||
(SearchPath::Elem {
|
||||
.prefix = SearchPath::Prefix { .s = "foo" },
|
||||
.path = SearchPath::Path { .s = "bar=baz" },
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
TEST(SearchPathElem, suffixIfPotentialMatch_justPath) {
|
||||
SearchPath::Prefix prefix { .s = "" };
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("any/thing"), std::optional { "any/thing" });
|
||||
}
|
||||
|
||||
TEST(SearchPathElem, suffixIfPotentialMatch_misleadingPrefix1) {
|
||||
SearchPath::Prefix prefix { .s = "foo" };
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("fooX"), std::nullopt);
|
||||
}
|
||||
|
||||
TEST(SearchPathElem, suffixIfPotentialMatch_misleadingPrefix2) {
|
||||
SearchPath::Prefix prefix { .s = "foo" };
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("fooX/bar"), std::nullopt);
|
||||
}
|
||||
|
||||
TEST(SearchPathElem, suffixIfPotentialMatch_partialPrefix) {
|
||||
SearchPath::Prefix prefix { .s = "fooX" };
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::nullopt);
|
||||
}
|
||||
|
||||
TEST(SearchPathElem, suffixIfPotentialMatch_exactPrefix) {
|
||||
SearchPath::Prefix prefix { .s = "foo" };
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo"), std::optional { "" });
|
||||
}
|
||||
|
||||
TEST(SearchPathElem, suffixIfPotentialMatch_multiKey) {
|
||||
SearchPath::Prefix prefix { .s = "foo/bar" };
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional { "baz" });
|
||||
}
|
||||
|
||||
TEST(SearchPathElem, suffixIfPotentialMatch_trailingSlash) {
|
||||
SearchPath::Prefix prefix { .s = "foo" };
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/"), std::optional { "" });
|
||||
}
|
||||
|
||||
TEST(SearchPathElem, suffixIfPotentialMatch_trailingDoubleSlash) {
|
||||
SearchPath::Prefix prefix { .s = "foo" };
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo//"), std::optional { "/" });
|
||||
}
|
||||
|
||||
TEST(SearchPathElem, suffixIfPotentialMatch_trailingPath) {
|
||||
SearchPath::Prefix prefix { .s = "foo" };
|
||||
ASSERT_EQ(prefix.suffixIfPotentialMatch("foo/bar/baz"), std::optional { "bar/baz" });
|
||||
}
|
||||
|
||||
}
|
|
@ -1,196 +0,0 @@
|
|||
#include "tests/libexpr.hh"
|
||||
|
||||
namespace nix {
|
||||
// Testing of trivial expressions
|
||||
class TrivialExpressionTest : public LibExprTest {};
|
||||
|
||||
TEST_F(TrivialExpressionTest, true) {
|
||||
auto v = eval("true");
|
||||
ASSERT_THAT(v, IsTrue());
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, false) {
|
||||
auto v = eval("false");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, null) {
|
||||
auto v = eval("null");
|
||||
ASSERT_THAT(v, IsNull());
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, 1) {
|
||||
auto v = eval("1");
|
||||
ASSERT_THAT(v, IsIntEq(1));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, 1plus1) {
|
||||
auto v = eval("1+1");
|
||||
ASSERT_THAT(v, IsIntEq(2));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, minus1) {
|
||||
auto v = eval("-1");
|
||||
ASSERT_THAT(v, IsIntEq(-1));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, 1minus1) {
|
||||
auto v = eval("1-1");
|
||||
ASSERT_THAT(v, IsIntEq(0));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, lambdaAdd) {
|
||||
auto v = eval("let add = a: b: a + b; in add 1 2");
|
||||
ASSERT_THAT(v, IsIntEq(3));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, list) {
|
||||
auto v = eval("[]");
|
||||
ASSERT_THAT(v, IsListOfSize(0));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, attrs) {
|
||||
auto v = eval("{}");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, float) {
|
||||
auto v = eval("1.234");
|
||||
ASSERT_THAT(v, IsFloatEq(1.234));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, updateAttrs) {
|
||||
auto v = eval("{ a = 1; } // { b = 2; a = 3; }");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(2));
|
||||
auto a = v.attrs->find(createSymbol("a"));
|
||||
ASSERT_NE(a, nullptr);
|
||||
ASSERT_THAT(*a->value, IsIntEq(3));
|
||||
|
||||
auto b = v.attrs->find(createSymbol("b"));
|
||||
ASSERT_NE(b, nullptr);
|
||||
ASSERT_THAT(*b->value, IsIntEq(2));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, hasAttrOpFalse) {
|
||||
auto v = eval("{} ? a");
|
||||
ASSERT_THAT(v, IsFalse());
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, hasAttrOpTrue) {
|
||||
auto v = eval("{ a = 123; } ? a");
|
||||
ASSERT_THAT(v, IsTrue());
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, withFound) {
|
||||
auto v = eval("with { a = 23; }; a");
|
||||
ASSERT_THAT(v, IsIntEq(23));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, withNotFound) {
|
||||
ASSERT_THROW(eval("with {}; a"), Error);
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, withOverride) {
|
||||
auto v = eval("with { a = 23; }; with { a = 42; }; a");
|
||||
ASSERT_THAT(v, IsIntEq(42));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, letOverWith) {
|
||||
auto v = eval("let a = 23; in with { a = 1; }; a");
|
||||
ASSERT_THAT(v, IsIntEq(23));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, multipleLet) {
|
||||
auto v = eval("let a = 23; in let a = 42; in a");
|
||||
ASSERT_THAT(v, IsIntEq(42));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, defaultFunctionArgs) {
|
||||
auto v = eval("({ a ? 123 }: a) {}");
|
||||
ASSERT_THAT(v, IsIntEq(123));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, defaultFunctionArgsOverride) {
|
||||
auto v = eval("({ a ? 123 }: a) { a = 5; }");
|
||||
ASSERT_THAT(v, IsIntEq(5));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureBack) {
|
||||
auto v = eval("({ a ? 123 }@args: args) {}");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, defaultFunctionArgsCaptureFront) {
|
||||
auto v = eval("(args@{ a ? 123 }: args) {}");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(0));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, assertThrows) {
|
||||
ASSERT_THROW(eval("let x = arg: assert arg == 1; 123; in x 2"), Error);
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, assertPassed) {
|
||||
auto v = eval("let x = arg: assert arg == 1; 123; in x 1");
|
||||
ASSERT_THAT(v, IsIntEq(123));
|
||||
}
|
||||
|
||||
class AttrSetMergeTrvialExpressionTest :
|
||||
public TrivialExpressionTest,
|
||||
public testing::WithParamInterface<const char*>
|
||||
{};
|
||||
|
||||
TEST_P(AttrSetMergeTrvialExpressionTest, attrsetMergeLazy) {
|
||||
// Usually Nix rejects duplicate keys in an attrset but it does allow
|
||||
// so if it is an attribute set that contains disjoint sets of keys.
|
||||
// The below is equivalent to `{a.b = 1; a.c = 2; }`.
|
||||
// The attribute set `a` will be a Thunk at first as the attribuets
|
||||
// have to be merged (or otherwise computed) and that is done in a lazy
|
||||
// manner.
|
||||
|
||||
auto expr = GetParam();
|
||||
auto v = eval(expr);
|
||||
ASSERT_THAT(v, IsAttrsOfSize(1));
|
||||
|
||||
auto a = v.attrs->find(createSymbol("a"));
|
||||
ASSERT_NE(a, nullptr);
|
||||
|
||||
ASSERT_THAT(*a->value, IsThunk());
|
||||
state.forceValue(*a->value, noPos);
|
||||
|
||||
ASSERT_THAT(*a->value, IsAttrsOfSize(2));
|
||||
|
||||
auto b = a->value->attrs->find(createSymbol("b"));
|
||||
ASSERT_NE(b, nullptr);
|
||||
ASSERT_THAT(*b->value, IsIntEq(1));
|
||||
|
||||
auto c = a->value->attrs->find(createSymbol("c"));
|
||||
ASSERT_NE(c, nullptr);
|
||||
ASSERT_THAT(*c->value, IsIntEq(2));
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
attrsetMergeLazy,
|
||||
AttrSetMergeTrvialExpressionTest,
|
||||
testing::Values(
|
||||
"{ a.b = 1; a.c = 2; }",
|
||||
"{ a = { b = 1; }; a = { c = 2; }; }"
|
||||
)
|
||||
);
|
||||
|
||||
TEST_F(TrivialExpressionTest, functor) {
|
||||
auto v = eval("{ __functor = self: arg: self.v + arg; v = 10; } 5");
|
||||
ASSERT_THAT(v, IsIntEq(15));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, bindOr) {
|
||||
auto v = eval("{ or = 1; }");
|
||||
ASSERT_THAT(v, IsAttrsOfSize(1));
|
||||
auto b = v.attrs->find(createSymbol("or"));
|
||||
ASSERT_NE(b, nullptr);
|
||||
ASSERT_THAT(*b->value, IsIntEq(1));
|
||||
}
|
||||
|
||||
TEST_F(TrivialExpressionTest, orCantBeUsed) {
|
||||
ASSERT_THROW(eval("let or = 1; in or"), Error);
|
||||
}
|
||||
} /* namespace nix */
|
|
@ -1,162 +0,0 @@
|
|||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
#include <rapidcheck/gtest.h>
|
||||
|
||||
#include "tests/path.hh"
|
||||
#include "tests/libexpr.hh"
|
||||
#include "tests/value/context.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
// Test a few cases of invalid string context elements.
|
||||
|
||||
TEST(NixStringContextElemTest, empty_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse(""),
|
||||
BadNixStringContextElem);
|
||||
}
|
||||
|
||||
TEST(NixStringContextElemTest, single_bang_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse("!"),
|
||||
BadNixStringContextElem);
|
||||
}
|
||||
|
||||
TEST(NixStringContextElemTest, double_bang_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse("!!/"),
|
||||
BadStorePath);
|
||||
}
|
||||
|
||||
TEST(NixStringContextElemTest, eq_slash_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse("=/"),
|
||||
BadStorePath);
|
||||
}
|
||||
|
||||
TEST(NixStringContextElemTest, slash_invalid) {
|
||||
EXPECT_THROW(
|
||||
NixStringContextElem::parse("/"),
|
||||
BadStorePath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Round trip (string <-> data structure) test for
|
||||
* `NixStringContextElem::Opaque`.
|
||||
*/
|
||||
TEST(NixStringContextElemTest, opaque) {
|
||||
std::string_view opaque = "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x";
|
||||
auto elem = NixStringContextElem::parse(opaque);
|
||||
auto * p = std::get_if<NixStringContextElem::Opaque>(&elem.raw);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->path, StorePath { opaque });
|
||||
ASSERT_EQ(elem.to_string(), opaque);
|
||||
}
|
||||
|
||||
/**
|
||||
* Round trip (string <-> data structure) test for
|
||||
* `NixStringContextElem::DrvDeep`.
|
||||
*/
|
||||
TEST(NixStringContextElemTest, drvDeep) {
|
||||
std::string_view drvDeep = "=g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
|
||||
auto elem = NixStringContextElem::parse(drvDeep);
|
||||
auto * p = std::get_if<NixStringContextElem::DrvDeep>(&elem.raw);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->drvPath, StorePath { drvDeep.substr(1) });
|
||||
ASSERT_EQ(elem.to_string(), drvDeep);
|
||||
}
|
||||
|
||||
/**
|
||||
* Round trip (string <-> data structure) test for a simpler
|
||||
* `NixStringContextElem::Built`.
|
||||
*/
|
||||
TEST(NixStringContextElemTest, built_opaque) {
|
||||
std::string_view built = "!foo!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
|
||||
auto elem = NixStringContextElem::parse(built);
|
||||
auto * p = std::get_if<NixStringContextElem::Built>(&elem.raw);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->output, "foo");
|
||||
ASSERT_EQ(*p->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque {
|
||||
.path = StorePath { built.substr(5) },
|
||||
}));
|
||||
ASSERT_EQ(elem.to_string(), built);
|
||||
}
|
||||
|
||||
/**
|
||||
* Round trip (string <-> data structure) test for a more complex,
|
||||
* inductive `NixStringContextElem::Built`.
|
||||
*/
|
||||
TEST(NixStringContextElemTest, built_built) {
|
||||
/**
|
||||
* We set these in tests rather than the regular globals so we don't have
|
||||
* to worry about race conditions if the tests run concurrently.
|
||||
*/
|
||||
ExperimentalFeatureSettings mockXpSettings;
|
||||
mockXpSettings.set("experimental-features", "dynamic-derivations ca-derivations");
|
||||
|
||||
std::string_view built = "!foo!bar!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv";
|
||||
auto elem = NixStringContextElem::parse(built, mockXpSettings);
|
||||
auto * p = std::get_if<NixStringContextElem::Built>(&elem.raw);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->output, "foo");
|
||||
auto * drvPath = std::get_if<SingleDerivedPath::Built>(&*p->drvPath);
|
||||
ASSERT_TRUE(drvPath);
|
||||
ASSERT_EQ(drvPath->output, "bar");
|
||||
ASSERT_EQ(*drvPath->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque {
|
||||
.path = StorePath { built.substr(9) },
|
||||
}));
|
||||
ASSERT_EQ(elem.to_string(), built);
|
||||
}
|
||||
|
||||
/**
|
||||
* Without the right experimental features enabled, we cannot parse a
|
||||
* complex inductive string context element.
|
||||
*/
|
||||
TEST(NixStringContextElemTest, built_built_xp) {
|
||||
ASSERT_THROW(
|
||||
NixStringContextElem::parse("!foo!bar!g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv"), MissingExperimentalFeature);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace rc {
|
||||
using namespace nix;
|
||||
|
||||
Gen<NixStringContextElem::DrvDeep> Arbitrary<NixStringContextElem::DrvDeep>::arbitrary()
|
||||
{
|
||||
return gen::just(NixStringContextElem::DrvDeep {
|
||||
.drvPath = *gen::arbitrary<StorePath>(),
|
||||
});
|
||||
}
|
||||
|
||||
Gen<NixStringContextElem> Arbitrary<NixStringContextElem>::arbitrary()
|
||||
{
|
||||
switch (*gen::inRange<uint8_t>(0, std::variant_size_v<NixStringContextElem::Raw>)) {
|
||||
case 0:
|
||||
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::Opaque>());
|
||||
case 1:
|
||||
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::DrvDeep>());
|
||||
case 2:
|
||||
return gen::just<NixStringContextElem>(*gen::arbitrary<NixStringContextElem::Built>());
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace nix {
|
||||
|
||||
#ifndef COVERAGE
|
||||
|
||||
RC_GTEST_PROP(
|
||||
NixStringContextElemTest,
|
||||
prop_round_rip,
|
||||
(const NixStringContextElem & o))
|
||||
{
|
||||
RC_ASSERT(o == NixStringContextElem::parse(o.to_string()));
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <rapidcheck/gen/Arbitrary.h>
|
||||
|
||||
#include <value/context.hh>
|
||||
|
||||
namespace rc {
|
||||
using namespace nix;
|
||||
|
||||
template<>
|
||||
struct Arbitrary<NixStringContextElem::Opaque> {
|
||||
static Gen<NixStringContextElem::Opaque> arbitrary();
|
||||
};
|
||||
|
||||
template<>
|
||||
struct Arbitrary<NixStringContextElem::Built> {
|
||||
static Gen<NixStringContextElem::Built> arbitrary();
|
||||
};
|
||||
|
||||
template<>
|
||||
struct Arbitrary<NixStringContextElem::DrvDeep> {
|
||||
static Gen<NixStringContextElem::DrvDeep> arbitrary();
|
||||
};
|
||||
|
||||
template<>
|
||||
struct Arbitrary<NixStringContextElem> {
|
||||
static Gen<NixStringContextElem> arbitrary();
|
||||
};
|
||||
|
||||
}
|
|
@ -1,237 +0,0 @@
|
|||
#include "tests/libexpr.hh"
|
||||
|
||||
#include "value.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
using namespace testing;
|
||||
|
||||
struct ValuePrintingTests : LibExprTest
|
||||
{
|
||||
template<class... A>
|
||||
void test(Value v, std::string_view expected, A... args)
|
||||
{
|
||||
std::stringstream out;
|
||||
v.print(state.symbols, out, args...);
|
||||
ASSERT_EQ(out.str(), expected);
|
||||
}
|
||||
};
|
||||
|
||||
TEST_F(ValuePrintingTests, tInt)
|
||||
{
|
||||
Value vInt;
|
||||
vInt.mkInt(10);
|
||||
test(vInt, "10");
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, tBool)
|
||||
{
|
||||
Value vBool;
|
||||
vBool.mkBool(true);
|
||||
test(vBool, "true");
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, tString)
|
||||
{
|
||||
Value vString;
|
||||
vString.mkString("some-string");
|
||||
test(vString, "\"some-string\"");
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, tPath)
|
||||
{
|
||||
Value vPath;
|
||||
vPath.mkString("/foo");
|
||||
test(vPath, "\"/foo\"");
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, tNull)
|
||||
{
|
||||
Value vNull;
|
||||
vNull.mkNull();
|
||||
test(vNull, "null");
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, tAttrs)
|
||||
{
|
||||
Value vOne;
|
||||
vOne.mkInt(1);
|
||||
|
||||
Value vTwo;
|
||||
vTwo.mkInt(2);
|
||||
|
||||
BindingsBuilder builder(state, state.allocBindings(10));
|
||||
builder.insert(state.symbols.create("one"), &vOne);
|
||||
builder.insert(state.symbols.create("two"), &vTwo);
|
||||
|
||||
Value vAttrs;
|
||||
vAttrs.mkAttrs(builder.finish());
|
||||
|
||||
test(vAttrs, "{ one = 1; two = 2; }");
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, tList)
|
||||
{
|
||||
Value vOne;
|
||||
vOne.mkInt(1);
|
||||
|
||||
Value vTwo;
|
||||
vTwo.mkInt(2);
|
||||
|
||||
Value vList;
|
||||
state.mkList(vList, 5);
|
||||
vList.bigList.elems[0] = &vOne;
|
||||
vList.bigList.elems[1] = &vTwo;
|
||||
vList.bigList.size = 3;
|
||||
|
||||
test(vList, "[ 1 2 (nullptr) ]");
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, vThunk)
|
||||
{
|
||||
Value vThunk;
|
||||
vThunk.mkThunk(nullptr, nullptr);
|
||||
|
||||
test(vThunk, "<CODE>");
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, vApp)
|
||||
{
|
||||
Value vApp;
|
||||
vApp.mkApp(nullptr, nullptr);
|
||||
|
||||
test(vApp, "<CODE>");
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, vLambda)
|
||||
{
|
||||
Value vLambda;
|
||||
vLambda.mkLambda(nullptr, nullptr);
|
||||
|
||||
test(vLambda, "<LAMBDA>");
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, vPrimOp)
|
||||
{
|
||||
Value vPrimOp;
|
||||
PrimOp primOp{};
|
||||
vPrimOp.mkPrimOp(&primOp);
|
||||
|
||||
test(vPrimOp, "<PRIMOP>");
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, vPrimOpApp)
|
||||
{
|
||||
Value vPrimOpApp;
|
||||
vPrimOpApp.mkPrimOpApp(nullptr, nullptr);
|
||||
|
||||
test(vPrimOpApp, "<PRIMOP-APP>");
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, vExternal)
|
||||
{
|
||||
struct MyExternal : ExternalValueBase
|
||||
{
|
||||
public:
|
||||
std::string showType() const override
|
||||
{
|
||||
return "";
|
||||
}
|
||||
std::string typeOf() const override
|
||||
{
|
||||
return "";
|
||||
}
|
||||
virtual std::ostream & print(std::ostream & str) const override
|
||||
{
|
||||
str << "testing-external!";
|
||||
return str;
|
||||
}
|
||||
} myExternal;
|
||||
Value vExternal;
|
||||
vExternal.mkExternal(&myExternal);
|
||||
|
||||
test(vExternal, "testing-external!");
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, vFloat)
|
||||
{
|
||||
Value vFloat;
|
||||
vFloat.mkFloat(2.0);
|
||||
|
||||
test(vFloat, "2");
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, vBlackhole)
|
||||
{
|
||||
Value vBlackhole;
|
||||
vBlackhole.mkBlackhole();
|
||||
test(vBlackhole, "«potential infinite recursion»");
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, depthAttrs)
|
||||
{
|
||||
Value vOne;
|
||||
vOne.mkInt(1);
|
||||
|
||||
Value vTwo;
|
||||
vTwo.mkInt(2);
|
||||
|
||||
BindingsBuilder builder(state, state.allocBindings(10));
|
||||
builder.insert(state.symbols.create("one"), &vOne);
|
||||
builder.insert(state.symbols.create("two"), &vTwo);
|
||||
|
||||
Value vAttrs;
|
||||
vAttrs.mkAttrs(builder.finish());
|
||||
|
||||
BindingsBuilder builder2(state, state.allocBindings(10));
|
||||
builder2.insert(state.symbols.create("one"), &vOne);
|
||||
builder2.insert(state.symbols.create("two"), &vTwo);
|
||||
builder2.insert(state.symbols.create("nested"), &vAttrs);
|
||||
|
||||
Value vNested;
|
||||
vNested.mkAttrs(builder2.finish());
|
||||
|
||||
test(vNested, "{ nested = «too deep»; one = «too deep»; two = «too deep»; }", false, 1);
|
||||
test(vNested, "{ nested = { one = «too deep»; two = «too deep»; }; one = 1; two = 2; }", false, 2);
|
||||
test(vNested, "{ nested = { one = 1; two = 2; }; one = 1; two = 2; }", false, 3);
|
||||
test(vNested, "{ nested = { one = 1; two = 2; }; one = 1; two = 2; }", false, 4);
|
||||
}
|
||||
|
||||
TEST_F(ValuePrintingTests, depthList)
|
||||
{
|
||||
Value vOne;
|
||||
vOne.mkInt(1);
|
||||
|
||||
Value vTwo;
|
||||
vTwo.mkInt(2);
|
||||
|
||||
BindingsBuilder builder(state, state.allocBindings(10));
|
||||
builder.insert(state.symbols.create("one"), &vOne);
|
||||
builder.insert(state.symbols.create("two"), &vTwo);
|
||||
|
||||
Value vAttrs;
|
||||
vAttrs.mkAttrs(builder.finish());
|
||||
|
||||
BindingsBuilder builder2(state, state.allocBindings(10));
|
||||
builder2.insert(state.symbols.create("one"), &vOne);
|
||||
builder2.insert(state.symbols.create("two"), &vTwo);
|
||||
builder2.insert(state.symbols.create("nested"), &vAttrs);
|
||||
|
||||
Value vNested;
|
||||
vNested.mkAttrs(builder2.finish());
|
||||
|
||||
Value vList;
|
||||
state.mkList(vList, 5);
|
||||
vList.bigList.elems[0] = &vOne;
|
||||
vList.bigList.elems[1] = &vTwo;
|
||||
vList.bigList.elems[2] = &vNested;
|
||||
vList.bigList.size = 3;
|
||||
|
||||
test(vList, "[ «too deep» «too deep» «too deep» ]", false, 1);
|
||||
test(vList, "[ 1 2 { nested = «too deep»; one = «too deep»; two = «too deep»; } ]", false, 2);
|
||||
test(vList, "[ 1 2 { nested = { one = «too deep»; two = «too deep»; }; one = 1; two = 2; } ]", false, 3);
|
||||
test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", false, 4);
|
||||
test(vList, "[ 1 2 { nested = { one = 1; two = 2; }; one = 1; two = 2; } ]", false, 5);
|
||||
}
|
||||
|
||||
} // namespace nix
|
|
@ -1,187 +0,0 @@
|
|||
#include <regex>
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "common-protocol.hh"
|
||||
#include "common-protocol-impl.hh"
|
||||
#include "build-result.hh"
|
||||
#include "tests/protocol.hh"
|
||||
#include "tests/characterization.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
const char commonProtoDir[] = "common-protocol";
|
||||
|
||||
class CommonProtoTest : public ProtoTest<CommonProto, commonProtoDir>
|
||||
{
|
||||
public:
|
||||
/**
|
||||
* Golden test for `T` reading
|
||||
*/
|
||||
template<typename T>
|
||||
void readProtoTest(PathView testStem, const T & expected)
|
||||
{
|
||||
CharacterizationTest::readTest(testStem, [&](const auto & encoded) {
|
||||
T got = ({
|
||||
StringSource from { encoded };
|
||||
CommonProto::Serialise<T>::read(
|
||||
*store,
|
||||
CommonProto::ReadConn { .from = from });
|
||||
});
|
||||
|
||||
ASSERT_EQ(got, expected);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Golden test for `T` write
|
||||
*/
|
||||
template<typename T>
|
||||
void writeProtoTest(PathView testStem, const T & decoded)
|
||||
{
|
||||
CharacterizationTest::writeTest(testStem, [&]() -> std::string {
|
||||
StringSink to;
|
||||
CommonProto::Serialise<T>::write(
|
||||
*store,
|
||||
CommonProto::WriteConn { .to = to },
|
||||
decoded);
|
||||
return to.s;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
#define CHARACTERIZATION_TEST(NAME, STEM, VALUE) \
|
||||
TEST_F(CommonProtoTest, NAME ## _read) { \
|
||||
readProtoTest(STEM, VALUE); \
|
||||
} \
|
||||
TEST_F(CommonProtoTest, NAME ## _write) { \
|
||||
writeProtoTest(STEM, VALUE); \
|
||||
}
|
||||
|
||||
CHARACTERIZATION_TEST(
|
||||
string,
|
||||
"string",
|
||||
(std::tuple<std::string, std::string, std::string, std::string, std::string> {
|
||||
"",
|
||||
"hi",
|
||||
"white rabbit",
|
||||
"大白兔",
|
||||
"oh no \0\0\0 what was that!",
|
||||
}))
|
||||
|
||||
CHARACTERIZATION_TEST(
|
||||
storePath,
|
||||
"store-path",
|
||||
(std::tuple<StorePath, StorePath> {
|
||||
StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" },
|
||||
}))
|
||||
|
||||
CHARACTERIZATION_TEST(
|
||||
contentAddress,
|
||||
"content-address",
|
||||
(std::tuple<ContentAddress, ContentAddress, ContentAddress> {
|
||||
ContentAddress {
|
||||
.method = TextIngestionMethod {},
|
||||
.hash = hashString(HashType::htSHA256, "Derive(...)"),
|
||||
},
|
||||
ContentAddress {
|
||||
.method = FileIngestionMethod::Flat,
|
||||
.hash = hashString(HashType::htSHA1, "blob blob..."),
|
||||
},
|
||||
ContentAddress {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = hashString(HashType::htSHA256, "(...)"),
|
||||
},
|
||||
}))
|
||||
|
||||
CHARACTERIZATION_TEST(
|
||||
drvOutput,
|
||||
"drv-output",
|
||||
(std::tuple<DrvOutput, DrvOutput> {
|
||||
{
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
DrvOutput {
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "quux",
|
||||
},
|
||||
}))
|
||||
|
||||
CHARACTERIZATION_TEST(
|
||||
realisation,
|
||||
"realisation",
|
||||
(std::tuple<Realisation, Realisation> {
|
||||
Realisation {
|
||||
.id = DrvOutput {
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
.outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
.signatures = { "asdf", "qwer" },
|
||||
},
|
||||
Realisation {
|
||||
.id = {
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
.outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
.signatures = { "asdf", "qwer" },
|
||||
.dependentRealisations = {
|
||||
{
|
||||
DrvOutput {
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "quux",
|
||||
},
|
||||
StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
},
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
CHARACTERIZATION_TEST(
|
||||
vector,
|
||||
"vector",
|
||||
(std::tuple<std::vector<std::string>, std::vector<std::string>, std::vector<std::string>, std::vector<std::vector<std::string>>> {
|
||||
{ },
|
||||
{ "" },
|
||||
{ "", "foo", "bar" },
|
||||
{ {}, { "" }, { "", "1", "2" } },
|
||||
}))
|
||||
|
||||
CHARACTERIZATION_TEST(
|
||||
set,
|
||||
"set",
|
||||
(std::tuple<std::set<std::string>, std::set<std::string>, std::set<std::string>, std::set<std::set<std::string>>> {
|
||||
{ },
|
||||
{ "" },
|
||||
{ "", "foo", "bar" },
|
||||
{ {}, { "" }, { "", "1", "2" } },
|
||||
}))
|
||||
|
||||
CHARACTERIZATION_TEST(
|
||||
optionalStorePath,
|
||||
"optional-store-path",
|
||||
(std::tuple<std::optional<StorePath>, std::optional<StorePath>> {
|
||||
std::nullopt,
|
||||
std::optional {
|
||||
StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" },
|
||||
},
|
||||
}))
|
||||
|
||||
CHARACTERIZATION_TEST(
|
||||
optionalContentAddress,
|
||||
"optional-content-address",
|
||||
(std::tuple<std::optional<ContentAddress>, std::optional<ContentAddress>> {
|
||||
std::nullopt,
|
||||
std::optional {
|
||||
ContentAddress {
|
||||
.method = FileIngestionMethod::Flat,
|
||||
.hash = hashString(HashType::htSHA1, "blob blob..."),
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
}
|
|
@ -1,298 +0,0 @@
|
|||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "experimental-features.hh"
|
||||
#include "derivations.hh"
|
||||
|
||||
#include "tests/libstore.hh"
|
||||
#include "tests/characterization.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
using nlohmann::json;
|
||||
|
||||
class DerivationTest : public CharacterizationTest, public LibStoreTest
|
||||
{
|
||||
Path unitTestData = getUnitTestData() + "/libstore/derivation";
|
||||
|
||||
public:
|
||||
Path goldenMaster(std::string_view testStem) const override {
|
||||
return unitTestData + "/" + testStem;
|
||||
}
|
||||
|
||||
/**
|
||||
* We set these in tests rather than the regular globals so we don't have
|
||||
* to worry about race conditions if the tests run concurrently.
|
||||
*/
|
||||
ExperimentalFeatureSettings mockXpSettings;
|
||||
};
|
||||
|
||||
class CaDerivationTest : public DerivationTest
|
||||
{
|
||||
void SetUp() override
|
||||
{
|
||||
mockXpSettings.set("experimental-features", "ca-derivations");
|
||||
}
|
||||
};
|
||||
|
||||
class DynDerivationTest : public DerivationTest
|
||||
{
|
||||
void SetUp() override
|
||||
{
|
||||
mockXpSettings.set("experimental-features", "dynamic-derivations ca-derivations");
|
||||
}
|
||||
};
|
||||
|
||||
class ImpureDerivationTest : public DerivationTest
|
||||
{
|
||||
void SetUp() override
|
||||
{
|
||||
mockXpSettings.set("experimental-features", "impure-derivations");
|
||||
}
|
||||
};
|
||||
|
||||
TEST_F(DerivationTest, BadATerm_version) {
|
||||
ASSERT_THROW(
|
||||
parseDerivation(
|
||||
*store,
|
||||
readFile(goldenMaster("bad-version.drv")),
|
||||
"whatever",
|
||||
mockXpSettings),
|
||||
FormatError);
|
||||
}
|
||||
|
||||
TEST_F(DynDerivationTest, BadATerm_oldVersionDynDeps) {
|
||||
ASSERT_THROW(
|
||||
parseDerivation(
|
||||
*store,
|
||||
readFile(goldenMaster("bad-old-version-dyn-deps.drv")),
|
||||
"dyn-dep-derivation",
|
||||
mockXpSettings),
|
||||
FormatError);
|
||||
}
|
||||
|
||||
#define TEST_JSON(FIXTURE, NAME, VAL, DRV_NAME, OUTPUT_NAME) \
|
||||
TEST_F(FIXTURE, DerivationOutput_ ## NAME ## _from_json) { \
|
||||
readTest("output-" #NAME ".json", [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
DerivationOutput got = DerivationOutput::fromJSON( \
|
||||
*store, \
|
||||
DRV_NAME, \
|
||||
OUTPUT_NAME, \
|
||||
encoded, \
|
||||
mockXpSettings); \
|
||||
DerivationOutput expected { VAL }; \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(FIXTURE, DerivationOutput_ ## NAME ## _to_json) { \
|
||||
writeTest("output-" #NAME ".json", [&]() -> json { \
|
||||
return DerivationOutput { (VAL) }.toJSON( \
|
||||
*store, \
|
||||
(DRV_NAME), \
|
||||
(OUTPUT_NAME)); \
|
||||
}, [](const auto & file) { \
|
||||
return json::parse(readFile(file)); \
|
||||
}, [](const auto & file, const auto & got) { \
|
||||
return writeFile(file, got.dump(2) + "\n"); \
|
||||
}); \
|
||||
}
|
||||
|
||||
TEST_JSON(DerivationTest, inputAddressed,
|
||||
(DerivationOutput::InputAddressed {
|
||||
.path = store->parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-drv-name-output-name"),
|
||||
}),
|
||||
"drv-name", "output-name")
|
||||
|
||||
TEST_JSON(DerivationTest, caFixedFlat,
|
||||
(DerivationOutput::CAFixed {
|
||||
.ca = {
|
||||
.method = FileIngestionMethod::Flat,
|
||||
.hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="),
|
||||
},
|
||||
}),
|
||||
"drv-name", "output-name")
|
||||
|
||||
TEST_JSON(DerivationTest, caFixedNAR,
|
||||
(DerivationOutput::CAFixed {
|
||||
.ca = {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="),
|
||||
},
|
||||
}),
|
||||
"drv-name", "output-name")
|
||||
|
||||
TEST_JSON(DynDerivationTest, caFixedText,
|
||||
(DerivationOutput::CAFixed {
|
||||
.ca = {
|
||||
.hash = Hash::parseAnyPrefixed("sha256-iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8="),
|
||||
},
|
||||
}),
|
||||
"drv-name", "output-name")
|
||||
|
||||
TEST_JSON(CaDerivationTest, caFloating,
|
||||
(DerivationOutput::CAFloating {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hashType = htSHA256,
|
||||
}),
|
||||
"drv-name", "output-name")
|
||||
|
||||
TEST_JSON(DerivationTest, deferred,
|
||||
DerivationOutput::Deferred { },
|
||||
"drv-name", "output-name")
|
||||
|
||||
TEST_JSON(ImpureDerivationTest, impure,
|
||||
(DerivationOutput::Impure {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hashType = htSHA256,
|
||||
}),
|
||||
"drv-name", "output-name")
|
||||
|
||||
#undef TEST_JSON
|
||||
|
||||
#define TEST_JSON(FIXTURE, NAME, VAL) \
|
||||
TEST_F(FIXTURE, Derivation_ ## NAME ## _from_json) { \
|
||||
readTest(#NAME ".json", [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
Derivation expected { VAL }; \
|
||||
Derivation got = Derivation::fromJSON( \
|
||||
*store, \
|
||||
encoded, \
|
||||
mockXpSettings); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(FIXTURE, Derivation_ ## NAME ## _to_json) { \
|
||||
writeTest(#NAME ".json", [&]() -> json { \
|
||||
return Derivation { VAL }.toJSON(*store); \
|
||||
}, [](const auto & file) { \
|
||||
return json::parse(readFile(file)); \
|
||||
}, [](const auto & file, const auto & got) { \
|
||||
return writeFile(file, got.dump(2) + "\n"); \
|
||||
}); \
|
||||
}
|
||||
|
||||
#define TEST_ATERM(FIXTURE, NAME, VAL, DRV_NAME) \
|
||||
TEST_F(FIXTURE, Derivation_ ## NAME ## _from_aterm) { \
|
||||
readTest(#NAME ".drv", [&](auto encoded) { \
|
||||
Derivation expected { VAL }; \
|
||||
auto got = parseDerivation( \
|
||||
*store, \
|
||||
std::move(encoded), \
|
||||
DRV_NAME, \
|
||||
mockXpSettings); \
|
||||
ASSERT_EQ(got.toJSON(*store), expected.toJSON(*store)) ; \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(FIXTURE, Derivation_ ## NAME ## _to_aterm) { \
|
||||
writeTest(#NAME ".drv", [&]() -> std::string { \
|
||||
return (VAL).unparse(*store, false); \
|
||||
}); \
|
||||
}
|
||||
|
||||
Derivation makeSimpleDrv(const Store & store) {
|
||||
Derivation drv;
|
||||
drv.name = "simple-derivation";
|
||||
drv.inputSrcs = {
|
||||
store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1"),
|
||||
};
|
||||
drv.inputDrvs = {
|
||||
.map = {
|
||||
{
|
||||
store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv"),
|
||||
{
|
||||
.value = {
|
||||
"cat",
|
||||
"dog",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
drv.platform = "wasm-sel4";
|
||||
drv.builder = "foo";
|
||||
drv.args = {
|
||||
"bar",
|
||||
"baz",
|
||||
};
|
||||
drv.env = {
|
||||
{
|
||||
"BIG_BAD",
|
||||
"WOLF",
|
||||
},
|
||||
};
|
||||
return drv;
|
||||
}
|
||||
|
||||
TEST_JSON(DerivationTest, simple, makeSimpleDrv(*store))
|
||||
|
||||
TEST_ATERM(DerivationTest, simple,
|
||||
makeSimpleDrv(*store),
|
||||
"simple-derivation")
|
||||
|
||||
Derivation makeDynDepDerivation(const Store & store) {
|
||||
Derivation drv;
|
||||
drv.name = "dyn-dep-derivation";
|
||||
drv.inputSrcs = {
|
||||
store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1"),
|
||||
};
|
||||
drv.inputDrvs = {
|
||||
.map = {
|
||||
{
|
||||
store.parseStorePath("/nix/store/c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv"),
|
||||
DerivedPathMap<StringSet>::ChildNode {
|
||||
.value = {
|
||||
"cat",
|
||||
"dog",
|
||||
},
|
||||
.childMap = {
|
||||
{
|
||||
"cat",
|
||||
DerivedPathMap<StringSet>::ChildNode {
|
||||
.value = {
|
||||
"kitten",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
"goose",
|
||||
DerivedPathMap<StringSet>::ChildNode {
|
||||
.value = {
|
||||
"gosling",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
drv.platform = "wasm-sel4";
|
||||
drv.builder = "foo";
|
||||
drv.args = {
|
||||
"bar",
|
||||
"baz",
|
||||
};
|
||||
drv.env = {
|
||||
{
|
||||
"BIG_BAD",
|
||||
"WOLF",
|
||||
},
|
||||
};
|
||||
return drv;
|
||||
}
|
||||
|
||||
TEST_JSON(DynDerivationTest, dynDerivationDeps, makeDynDepDerivation(*store))
|
||||
|
||||
TEST_ATERM(DynDerivationTest, dynDerivationDeps,
|
||||
makeDynDepDerivation(*store),
|
||||
"dyn-dep-derivation")
|
||||
|
||||
#undef TEST_JSON
|
||||
#undef TEST_ATERM
|
||||
|
||||
}
|
|
@ -1,153 +0,0 @@
|
|||
#include <regex>
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
#include <rapidcheck/gtest.h>
|
||||
|
||||
#include "tests/derived-path.hh"
|
||||
#include "tests/libstore.hh"
|
||||
|
||||
namespace rc {
|
||||
using namespace nix;
|
||||
|
||||
Gen<DerivedPath::Opaque> Arbitrary<DerivedPath::Opaque>::arbitrary()
|
||||
{
|
||||
return gen::just(DerivedPath::Opaque {
|
||||
.path = *gen::arbitrary<StorePath>(),
|
||||
});
|
||||
}
|
||||
|
||||
Gen<SingleDerivedPath::Built> Arbitrary<SingleDerivedPath::Built>::arbitrary()
|
||||
{
|
||||
return gen::just(SingleDerivedPath::Built {
|
||||
.drvPath = make_ref<SingleDerivedPath>(*gen::arbitrary<SingleDerivedPath>()),
|
||||
.output = (*gen::arbitrary<StorePathName>()).name,
|
||||
});
|
||||
}
|
||||
|
||||
Gen<DerivedPath::Built> Arbitrary<DerivedPath::Built>::arbitrary()
|
||||
{
|
||||
return gen::just(DerivedPath::Built {
|
||||
.drvPath = make_ref<SingleDerivedPath>(*gen::arbitrary<SingleDerivedPath>()),
|
||||
.outputs = *gen::arbitrary<OutputsSpec>(),
|
||||
});
|
||||
}
|
||||
|
||||
Gen<SingleDerivedPath> Arbitrary<SingleDerivedPath>::arbitrary()
|
||||
{
|
||||
switch (*gen::inRange<uint8_t>(0, std::variant_size_v<SingleDerivedPath::Raw>)) {
|
||||
case 0:
|
||||
return gen::just<SingleDerivedPath>(*gen::arbitrary<SingleDerivedPath::Opaque>());
|
||||
case 1:
|
||||
return gen::just<SingleDerivedPath>(*gen::arbitrary<SingleDerivedPath::Built>());
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
Gen<DerivedPath> Arbitrary<DerivedPath>::arbitrary()
|
||||
{
|
||||
switch (*gen::inRange<uint8_t>(0, std::variant_size_v<DerivedPath::Raw>)) {
|
||||
case 0:
|
||||
return gen::just<DerivedPath>(*gen::arbitrary<DerivedPath::Opaque>());
|
||||
case 1:
|
||||
return gen::just<DerivedPath>(*gen::arbitrary<DerivedPath::Built>());
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace nix {
|
||||
|
||||
class DerivedPathTest : public LibStoreTest
|
||||
{
|
||||
};
|
||||
|
||||
/**
|
||||
* Round trip (string <-> data structure) test for
|
||||
* `DerivedPath::Opaque`.
|
||||
*/
|
||||
TEST_F(DerivedPathTest, opaque) {
|
||||
std::string_view opaque = "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x";
|
||||
auto elem = DerivedPath::parse(*store, opaque);
|
||||
auto * p = std::get_if<DerivedPath::Opaque>(&elem);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->path, store->parseStorePath(opaque));
|
||||
ASSERT_EQ(elem.to_string(*store), opaque);
|
||||
}
|
||||
|
||||
/**
|
||||
* Round trip (string <-> data structure) test for a simpler
|
||||
* `DerivedPath::Built`.
|
||||
*/
|
||||
TEST_F(DerivedPathTest, built_opaque) {
|
||||
std::string_view built = "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv^bar,foo";
|
||||
auto elem = DerivedPath::parse(*store, built);
|
||||
auto * p = std::get_if<DerivedPath::Built>(&elem);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->outputs, ((OutputsSpec) OutputsSpec::Names { "foo", "bar" }));
|
||||
ASSERT_EQ(*p->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque {
|
||||
.path = store->parseStorePath(built.substr(0, 49)),
|
||||
}));
|
||||
ASSERT_EQ(elem.to_string(*store), built);
|
||||
}
|
||||
|
||||
/**
|
||||
* Round trip (string <-> data structure) test for a more complex,
|
||||
* inductive `DerivedPath::Built`.
|
||||
*/
|
||||
TEST_F(DerivedPathTest, built_built) {
|
||||
/**
|
||||
* We set these in tests rather than the regular globals so we don't have
|
||||
* to worry about race conditions if the tests run concurrently.
|
||||
*/
|
||||
ExperimentalFeatureSettings mockXpSettings;
|
||||
mockXpSettings.set("experimental-features", "dynamic-derivations ca-derivations");
|
||||
|
||||
std::string_view built = "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv^foo^bar,baz";
|
||||
auto elem = DerivedPath::parse(*store, built, mockXpSettings);
|
||||
auto * p = std::get_if<DerivedPath::Built>(&elem);
|
||||
ASSERT_TRUE(p);
|
||||
ASSERT_EQ(p->outputs, ((OutputsSpec) OutputsSpec::Names { "bar", "baz" }));
|
||||
auto * drvPath = std::get_if<SingleDerivedPath::Built>(&*p->drvPath);
|
||||
ASSERT_TRUE(drvPath);
|
||||
ASSERT_EQ(drvPath->output, "foo");
|
||||
ASSERT_EQ(*drvPath->drvPath, ((SingleDerivedPath) SingleDerivedPath::Opaque {
|
||||
.path = store->parseStorePath(built.substr(0, 49)),
|
||||
}));
|
||||
ASSERT_EQ(elem.to_string(*store), built);
|
||||
}
|
||||
|
||||
/**
|
||||
* Without the right experimental features enabled, we cannot parse a
|
||||
* complex inductive derived path.
|
||||
*/
|
||||
TEST_F(DerivedPathTest, built_built_xp) {
|
||||
ASSERT_THROW(
|
||||
DerivedPath::parse(*store, "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-x.drv^foo^bar,baz"),
|
||||
MissingExperimentalFeature);
|
||||
}
|
||||
|
||||
#ifndef COVERAGE
|
||||
|
||||
RC_GTEST_FIXTURE_PROP(
|
||||
DerivedPathTest,
|
||||
prop_legacy_round_rip,
|
||||
(const DerivedPath & o))
|
||||
{
|
||||
RC_ASSERT(o == DerivedPath::parseLegacy(*store, o.to_string_legacy(*store)));
|
||||
}
|
||||
|
||||
RC_GTEST_FIXTURE_PROP(
|
||||
DerivedPathTest,
|
||||
prop_round_rip,
|
||||
(const DerivedPath & o))
|
||||
{
|
||||
RC_ASSERT(o == DerivedPath::parse(*store, o.to_string(*store)));
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
}
|
|
@ -1,39 +0,0 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <rapidcheck/gen/Arbitrary.h>
|
||||
|
||||
#include <derived-path.hh>
|
||||
|
||||
#include "tests/path.hh"
|
||||
#include "tests/outputs-spec.hh"
|
||||
|
||||
namespace rc {
|
||||
using namespace nix;
|
||||
|
||||
template<>
|
||||
struct Arbitrary<SingleDerivedPath::Opaque> {
|
||||
static Gen<SingleDerivedPath::Opaque> arbitrary();
|
||||
};
|
||||
|
||||
template<>
|
||||
struct Arbitrary<SingleDerivedPath::Built> {
|
||||
static Gen<SingleDerivedPath::Built> arbitrary();
|
||||
};
|
||||
|
||||
template<>
|
||||
struct Arbitrary<SingleDerivedPath> {
|
||||
static Gen<SingleDerivedPath> arbitrary();
|
||||
};
|
||||
|
||||
template<>
|
||||
struct Arbitrary<DerivedPath::Built> {
|
||||
static Gen<DerivedPath::Built> arbitrary();
|
||||
};
|
||||
|
||||
template<>
|
||||
struct Arbitrary<DerivedPath> {
|
||||
static Gen<DerivedPath> arbitrary();
|
||||
};
|
||||
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "downstream-placeholder.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
TEST(DownstreamPlaceholder, unknownCaOutput) {
|
||||
/**
|
||||
* We set these in tests rather than the regular globals so we don't have
|
||||
* to worry about race conditions if the tests run concurrently.
|
||||
*/
|
||||
ExperimentalFeatureSettings mockXpSettings;
|
||||
mockXpSettings.set("experimental-features", "ca-derivations");
|
||||
|
||||
ASSERT_EQ(
|
||||
DownstreamPlaceholder::unknownCaOutput(
|
||||
StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv" },
|
||||
"out",
|
||||
mockXpSettings).render(),
|
||||
"/0c6rn30q4frawknapgwq386zq358m8r6msvywcvc89n6m5p2dgbz");
|
||||
}
|
||||
|
||||
TEST(DownstreamPlaceholder, unknownDerivation) {
|
||||
/**
|
||||
* Same reason as above
|
||||
*/
|
||||
ExperimentalFeatureSettings mockXpSettings;
|
||||
mockXpSettings.set("experimental-features", "dynamic-derivations ca-derivations");
|
||||
|
||||
ASSERT_EQ(
|
||||
DownstreamPlaceholder::unknownDerivation(
|
||||
DownstreamPlaceholder::unknownCaOutput(
|
||||
StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv.drv" },
|
||||
"out",
|
||||
mockXpSettings),
|
||||
"out",
|
||||
mockXpSettings).render(),
|
||||
"/0gn6agqxjyyalf0dpihgyf49xq5hqxgw100f0wydnj6yqrhqsb3w");
|
||||
}
|
||||
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include <gmock/gmock.h>
|
||||
|
||||
#include "store-api.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
class LibStoreTest : public virtual ::testing::Test {
|
||||
public:
|
||||
static void SetUpTestSuite() {
|
||||
initLibStore();
|
||||
}
|
||||
|
||||
protected:
|
||||
LibStoreTest()
|
||||
: store(openStore("dummy://"))
|
||||
{ }
|
||||
|
||||
ref<Store> store;
|
||||
};
|
||||
|
||||
|
||||
} /* namespace nix */
|
|
@ -1,37 +0,0 @@
|
|||
check: libstore-tests-exe_RUN
|
||||
|
||||
programs += libstore-tests-exe
|
||||
|
||||
libstore-tests-exe_NAME = libnixstore-tests
|
||||
|
||||
libstore-tests-exe_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libstore-tests-exe_INSTALL_DIR := $(checkbindir)
|
||||
else
|
||||
libstore-tests-exe_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libstore-tests-exe_LIBS = libstore-tests
|
||||
|
||||
libstore-tests-exe_LDFLAGS := $(GTEST_LIBS)
|
||||
|
||||
libraries += libstore-tests
|
||||
|
||||
libstore-tests_NAME = libnixstore-tests
|
||||
|
||||
libstore-tests_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libstore-tests_INSTALL_DIR := $(checklibdir)
|
||||
else
|
||||
libstore-tests_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libstore-tests_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libstore-tests_CXXFLAGS += -I src/libstore -I src/libutil
|
||||
|
||||
libstore-tests_LIBS = libutil-tests libstore libutil
|
||||
|
||||
libstore-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS)
|
|
@ -1,171 +0,0 @@
|
|||
#include "machines.hh"
|
||||
#include "globals.hh"
|
||||
#include "file-system.hh"
|
||||
#include "util.hh"
|
||||
|
||||
#include <gmock/gmock-matchers.h>
|
||||
|
||||
using testing::Contains;
|
||||
using testing::ElementsAre;
|
||||
using testing::EndsWith;
|
||||
using testing::Eq;
|
||||
using testing::Field;
|
||||
using testing::SizeIs;
|
||||
|
||||
using nix::absPath;
|
||||
using nix::FormatError;
|
||||
using nix::getMachines;
|
||||
using nix::Machine;
|
||||
using nix::Machines;
|
||||
using nix::pathExists;
|
||||
using nix::Settings;
|
||||
using nix::settings;
|
||||
|
||||
class Environment : public ::testing::Environment {
|
||||
public:
|
||||
void SetUp() override { settings.thisSystem = "TEST_ARCH-TEST_OS"; }
|
||||
};
|
||||
|
||||
testing::Environment* const foo_env =
|
||||
testing::AddGlobalTestEnvironment(new Environment);
|
||||
|
||||
TEST(machines, getMachinesWithEmptyBuilders) {
|
||||
settings.builders = "";
|
||||
Machines actual = getMachines();
|
||||
ASSERT_THAT(actual, SizeIs(0));
|
||||
}
|
||||
|
||||
TEST(machines, getMachinesUriOnly) {
|
||||
settings.builders = "nix@scratchy.labs.cs.uu.nl";
|
||||
Machines actual = getMachines();
|
||||
ASSERT_THAT(actual, SizeIs(1));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::storeUri, Eq("ssh://nix@scratchy.labs.cs.uu.nl")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::systemTypes, ElementsAre("TEST_ARCH-TEST_OS")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::sshKey, SizeIs(0)));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::maxJobs, Eq(1)));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::speedFactor, Eq(1)));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::supportedFeatures, SizeIs(0)));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::mandatoryFeatures, SizeIs(0)));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::sshPublicHostKey, SizeIs(0)));
|
||||
}
|
||||
|
||||
TEST(machines, getMachinesDefaults) {
|
||||
settings.builders = "nix@scratchy.labs.cs.uu.nl - - - - - - -";
|
||||
Machines actual = getMachines();
|
||||
ASSERT_THAT(actual, SizeIs(1));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::storeUri, Eq("ssh://nix@scratchy.labs.cs.uu.nl")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::systemTypes, ElementsAre("TEST_ARCH-TEST_OS")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::sshKey, SizeIs(0)));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::maxJobs, Eq(1)));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::speedFactor, Eq(1)));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::supportedFeatures, SizeIs(0)));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::mandatoryFeatures, SizeIs(0)));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::sshPublicHostKey, SizeIs(0)));
|
||||
}
|
||||
|
||||
TEST(machines, getMachinesWithNewLineSeparator) {
|
||||
settings.builders = "nix@scratchy.labs.cs.uu.nl\nnix@itchy.labs.cs.uu.nl";
|
||||
Machines actual = getMachines();
|
||||
ASSERT_THAT(actual, SizeIs(2));
|
||||
EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, EndsWith("nix@scratchy.labs.cs.uu.nl"))));
|
||||
EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, EndsWith("nix@itchy.labs.cs.uu.nl"))));
|
||||
}
|
||||
|
||||
TEST(machines, getMachinesWithSemicolonSeparator) {
|
||||
settings.builders = "nix@scratchy.labs.cs.uu.nl ; nix@itchy.labs.cs.uu.nl";
|
||||
Machines actual = getMachines();
|
||||
EXPECT_THAT(actual, SizeIs(2));
|
||||
EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, EndsWith("nix@scratchy.labs.cs.uu.nl"))));
|
||||
EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, EndsWith("nix@itchy.labs.cs.uu.nl"))));
|
||||
}
|
||||
|
||||
TEST(machines, getMachinesWithCorrectCompleteSingleBuilder) {
|
||||
settings.builders = "nix@scratchy.labs.cs.uu.nl i686-linux "
|
||||
"/home/nix/.ssh/id_scratchy_auto 8 3 kvm "
|
||||
"benchmark SSH+HOST+PUBLIC+KEY+BASE64+ENCODED==";
|
||||
Machines actual = getMachines();
|
||||
ASSERT_THAT(actual, SizeIs(1));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::storeUri, EndsWith("nix@scratchy.labs.cs.uu.nl")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::systemTypes, ElementsAre("i686-linux")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::sshKey, Eq("/home/nix/.ssh/id_scratchy_auto")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::maxJobs, Eq(8)));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::speedFactor, Eq(3)));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::supportedFeatures, ElementsAre("kvm")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::mandatoryFeatures, ElementsAre("benchmark")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::sshPublicHostKey, Eq("SSH+HOST+PUBLIC+KEY+BASE64+ENCODED==")));
|
||||
}
|
||||
|
||||
TEST(machines,
|
||||
getMachinesWithCorrectCompleteSingleBuilderWithTabColumnDelimiter) {
|
||||
settings.builders =
|
||||
"nix@scratchy.labs.cs.uu.nl\ti686-linux\t/home/nix/.ssh/"
|
||||
"id_scratchy_auto\t8\t3\tkvm\tbenchmark\tSSH+HOST+PUBLIC+"
|
||||
"KEY+BASE64+ENCODED==";
|
||||
Machines actual = getMachines();
|
||||
ASSERT_THAT(actual, SizeIs(1));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::storeUri, EndsWith("nix@scratchy.labs.cs.uu.nl")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::systemTypes, ElementsAre("i686-linux")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::sshKey, Eq("/home/nix/.ssh/id_scratchy_auto")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::maxJobs, Eq(8)));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::speedFactor, Eq(3)));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::supportedFeatures, ElementsAre("kvm")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::mandatoryFeatures, ElementsAre("benchmark")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::sshPublicHostKey, Eq("SSH+HOST+PUBLIC+KEY+BASE64+ENCODED==")));
|
||||
}
|
||||
|
||||
TEST(machines, getMachinesWithMultiOptions) {
|
||||
settings.builders = "nix@scratchy.labs.cs.uu.nl Arch1,Arch2 - - - "
|
||||
"SupportedFeature1,SupportedFeature2 "
|
||||
"MandatoryFeature1,MandatoryFeature2";
|
||||
Machines actual = getMachines();
|
||||
ASSERT_THAT(actual, SizeIs(1));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::storeUri, EndsWith("nix@scratchy.labs.cs.uu.nl")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::systemTypes, ElementsAre("Arch1", "Arch2")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::supportedFeatures, ElementsAre("SupportedFeature1", "SupportedFeature2")));
|
||||
EXPECT_THAT(actual[0], Field(&Machine::mandatoryFeatures, ElementsAre("MandatoryFeature1", "MandatoryFeature2")));
|
||||
}
|
||||
|
||||
TEST(machines, getMachinesWithIncorrectFormat) {
|
||||
settings.builders = "nix@scratchy.labs.cs.uu.nl - - eight";
|
||||
EXPECT_THROW(getMachines(), FormatError);
|
||||
settings.builders = "nix@scratchy.labs.cs.uu.nl - - -1";
|
||||
EXPECT_THROW(getMachines(), FormatError);
|
||||
settings.builders = "nix@scratchy.labs.cs.uu.nl - - 8 three";
|
||||
EXPECT_THROW(getMachines(), FormatError);
|
||||
settings.builders = "nix@scratchy.labs.cs.uu.nl - - 8 -3";
|
||||
EXPECT_THROW(getMachines(), FormatError);
|
||||
settings.builders = "nix@scratchy.labs.cs.uu.nl - - 8 3 - - BAD_BASE64";
|
||||
EXPECT_THROW(getMachines(), FormatError);
|
||||
}
|
||||
|
||||
TEST(machines, getMachinesWithCorrectFileReference) {
|
||||
auto path = absPath("src/libstore/tests/test-data/machines.valid");
|
||||
ASSERT_TRUE(pathExists(path));
|
||||
|
||||
settings.builders = std::string("@") + path;
|
||||
Machines actual = getMachines();
|
||||
ASSERT_THAT(actual, SizeIs(3));
|
||||
EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, EndsWith("nix@scratchy.labs.cs.uu.nl"))));
|
||||
EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, EndsWith("nix@itchy.labs.cs.uu.nl"))));
|
||||
EXPECT_THAT(actual, Contains(Field(&Machine::storeUri, EndsWith("nix@poochie.labs.cs.uu.nl"))));
|
||||
}
|
||||
|
||||
TEST(machines, getMachinesWithCorrectFileReferenceToEmptyFile) {
|
||||
auto path = "/dev/null";
|
||||
ASSERT_TRUE(pathExists(path));
|
||||
|
||||
settings.builders = std::string("@") + path;
|
||||
Machines actual = getMachines();
|
||||
ASSERT_THAT(actual, SizeIs(0));
|
||||
}
|
||||
|
||||
TEST(machines, getMachinesWithIncorrectFileReference) {
|
||||
settings.builders = std::string("@") + absPath("/not/a/file");
|
||||
Machines actual = getMachines();
|
||||
ASSERT_THAT(actual, SizeIs(0));
|
||||
}
|
||||
|
||||
TEST(machines, getMachinesWithCorrectFileReferenceToIncorrectFile) {
|
||||
settings.builders = std::string("@") + absPath("src/libstore/tests/test-data/machines.bad_format");
|
||||
EXPECT_THROW(getMachines(), FormatError);
|
||||
}
|
|
@ -1,123 +0,0 @@
|
|||
#include "nar-info-disk-cache.hh"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include <rapidcheck/gtest.h>
|
||||
#include "sqlite.hh"
|
||||
#include <sqlite3.h>
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
TEST(NarInfoDiskCacheImpl, create_and_read) {
|
||||
// This is a large single test to avoid some setup overhead.
|
||||
|
||||
int prio = 12345;
|
||||
bool wantMassQuery = true;
|
||||
|
||||
Path tmpDir = createTempDir();
|
||||
AutoDelete delTmpDir(tmpDir);
|
||||
Path dbPath(tmpDir + "/test-narinfo-disk-cache.sqlite");
|
||||
|
||||
int savedId;
|
||||
int barId;
|
||||
SQLite db;
|
||||
SQLiteStmt getIds;
|
||||
|
||||
{
|
||||
auto cache = getTestNarInfoDiskCache(dbPath);
|
||||
|
||||
// Set up "background noise" and check that different caches receive different ids
|
||||
{
|
||||
auto bc1 = cache->createCache("https://bar", "/nix/storedir", wantMassQuery, prio);
|
||||
auto bc2 = cache->createCache("https://xyz", "/nix/storedir", false, 12);
|
||||
ASSERT_NE(bc1, bc2);
|
||||
barId = bc1;
|
||||
}
|
||||
|
||||
// Check that the fields are saved and returned correctly. This does not test
|
||||
// the select statement yet, because of in-memory caching.
|
||||
savedId = cache->createCache("http://foo", "/nix/storedir", wantMassQuery, prio);;
|
||||
{
|
||||
auto r = cache->upToDateCacheExists("http://foo");
|
||||
ASSERT_TRUE(r);
|
||||
ASSERT_EQ(r->priority, prio);
|
||||
ASSERT_EQ(r->wantMassQuery, wantMassQuery);
|
||||
ASSERT_EQ(savedId, r->id);
|
||||
}
|
||||
|
||||
// We're going to pay special attention to the id field because we had a bug
|
||||
// that changed it.
|
||||
db = SQLite(dbPath);
|
||||
getIds.create(db, "select id from BinaryCaches where url = 'http://foo'");
|
||||
|
||||
{
|
||||
auto q(getIds.use());
|
||||
ASSERT_TRUE(q.next());
|
||||
ASSERT_EQ(savedId, q.getInt(0));
|
||||
ASSERT_FALSE(q.next());
|
||||
}
|
||||
|
||||
// Pretend that the caches are older, but keep one up to date, as "background noise"
|
||||
db.exec("update BinaryCaches set timestamp = timestamp - 1 - 7 * 24 * 3600 where url <> 'https://xyz';");
|
||||
|
||||
// This shows that the in-memory cache works
|
||||
{
|
||||
auto r = cache->upToDateCacheExists("http://foo");
|
||||
ASSERT_TRUE(r);
|
||||
ASSERT_EQ(r->priority, prio);
|
||||
ASSERT_EQ(r->wantMassQuery, wantMassQuery);
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
// We can't clear the in-memory cache, so we use a new cache object. This is
|
||||
// more realistic anyway.
|
||||
auto cache2 = getTestNarInfoDiskCache(dbPath);
|
||||
|
||||
{
|
||||
auto r = cache2->upToDateCacheExists("http://foo");
|
||||
ASSERT_FALSE(r);
|
||||
}
|
||||
|
||||
// "Update", same data, check that the id number is reused
|
||||
cache2->createCache("http://foo", "/nix/storedir", wantMassQuery, prio);
|
||||
|
||||
{
|
||||
auto r = cache2->upToDateCacheExists("http://foo");
|
||||
ASSERT_TRUE(r);
|
||||
ASSERT_EQ(r->priority, prio);
|
||||
ASSERT_EQ(r->wantMassQuery, wantMassQuery);
|
||||
ASSERT_EQ(r->id, savedId);
|
||||
}
|
||||
|
||||
{
|
||||
auto q(getIds.use());
|
||||
ASSERT_TRUE(q.next());
|
||||
auto currentId = q.getInt(0);
|
||||
ASSERT_FALSE(q.next());
|
||||
ASSERT_EQ(currentId, savedId);
|
||||
}
|
||||
|
||||
// Check that the fields can be modified, and the id remains the same
|
||||
{
|
||||
auto r0 = cache2->upToDateCacheExists("https://bar");
|
||||
ASSERT_FALSE(r0);
|
||||
|
||||
cache2->createCache("https://bar", "/nix/storedir", !wantMassQuery, prio + 10);
|
||||
auto r = cache2->upToDateCacheExists("https://bar");
|
||||
ASSERT_EQ(r->wantMassQuery, !wantMassQuery);
|
||||
ASSERT_EQ(r->priority, prio + 10);
|
||||
ASSERT_EQ(r->id, barId);
|
||||
}
|
||||
|
||||
// // Force update (no use case yet; we only retrieve cache metadata when stale based on timestamp)
|
||||
// {
|
||||
// cache2->createCache("https://bar", "/nix/storedir", wantMassQuery, prio + 20);
|
||||
// auto r = cache2->upToDateCacheExists("https://bar");
|
||||
// ASSERT_EQ(r->wantMassQuery, wantMassQuery);
|
||||
// ASSERT_EQ(r->priority, prio + 20);
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,85 +0,0 @@
|
|||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "path-info.hh"
|
||||
|
||||
#include "tests/characterization.hh"
|
||||
#include "tests/libstore.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
using nlohmann::json;
|
||||
|
||||
class NarInfoTest : public CharacterizationTest, public LibStoreTest
|
||||
{
|
||||
Path unitTestData = getUnitTestData() + "/libstore/nar-info";
|
||||
|
||||
Path goldenMaster(PathView testStem) const override {
|
||||
return unitTestData + "/" + testStem + ".json";
|
||||
}
|
||||
};
|
||||
|
||||
static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) {
|
||||
NarInfo info = ValidPathInfo {
|
||||
store,
|
||||
"foo",
|
||||
FixedOutputInfo {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = hashString(HashType::htSHA256, "(...)"),
|
||||
|
||||
.references = {
|
||||
.others = {
|
||||
StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
},
|
||||
},
|
||||
.self = true,
|
||||
},
|
||||
},
|
||||
Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
};
|
||||
info.narSize = 34878;
|
||||
if (includeImpureInfo) {
|
||||
info.deriver = StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
};
|
||||
info.registrationTime = 23423;
|
||||
info.ultimate = true;
|
||||
info.sigs = { "asdf", "qwer" };
|
||||
|
||||
info.url = "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz";
|
||||
info.compression = "xz";
|
||||
info.fileHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=");
|
||||
info.fileSize = 4029176;
|
||||
}
|
||||
return info;
|
||||
}
|
||||
|
||||
#define JSON_TEST(STEM, PURE) \
|
||||
TEST_F(NarInfoTest, NarInfo_ ## STEM ## _from_json) { \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
auto expected = makeNarInfo(*store, PURE); \
|
||||
NarInfo got = NarInfo::fromJSON( \
|
||||
*store, \
|
||||
expected.path, \
|
||||
encoded); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(NarInfoTest, NarInfo_ ## STEM ## _to_json) { \
|
||||
writeTest(#STEM, [&]() -> json { \
|
||||
return makeNarInfo(*store, PURE) \
|
||||
.toJSON(*store, PURE, HashFormat::SRI); \
|
||||
}, [](const auto & file) { \
|
||||
return json::parse(readFile(file)); \
|
||||
}, [](const auto & file, const auto & got) { \
|
||||
return writeFile(file, got.dump(2) + "\n"); \
|
||||
}); \
|
||||
}
|
||||
|
||||
JSON_TEST(pure, false)
|
||||
JSON_TEST(impure, true)
|
||||
|
||||
}
|
|
@ -1,239 +0,0 @@
|
|||
#include "outputs-spec.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
#include <rapidcheck/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
#ifndef NDEBUG
|
||||
TEST(OutputsSpec, no_empty_names) {
|
||||
ASSERT_DEATH(OutputsSpec::Names { std::set<std::string> { } }, "");
|
||||
}
|
||||
#endif
|
||||
|
||||
#define TEST_DONT_PARSE(NAME, STR) \
|
||||
TEST(OutputsSpec, bad_ ## NAME) { \
|
||||
std::optional OutputsSpecOpt = \
|
||||
OutputsSpec::parseOpt(STR); \
|
||||
ASSERT_FALSE(OutputsSpecOpt); \
|
||||
}
|
||||
|
||||
TEST_DONT_PARSE(empty, "")
|
||||
TEST_DONT_PARSE(garbage, "&*()")
|
||||
TEST_DONT_PARSE(double_star, "**")
|
||||
TEST_DONT_PARSE(star_first, "*,foo")
|
||||
TEST_DONT_PARSE(star_second, "foo,*")
|
||||
|
||||
#undef TEST_DONT_PARSE
|
||||
|
||||
TEST(OutputsSpec, all) {
|
||||
std::string_view str = "*";
|
||||
OutputsSpec expected = OutputsSpec::All { };
|
||||
ASSERT_EQ(OutputsSpec::parse(str), expected);
|
||||
ASSERT_EQ(expected.to_string(), str);
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, names_out) {
|
||||
std::string_view str = "out";
|
||||
OutputsSpec expected = OutputsSpec::Names { "out" };
|
||||
ASSERT_EQ(OutputsSpec::parse(str), expected);
|
||||
ASSERT_EQ(expected.to_string(), str);
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, names_underscore) {
|
||||
std::string_view str = "a_b";
|
||||
OutputsSpec expected = OutputsSpec::Names { "a_b" };
|
||||
ASSERT_EQ(OutputsSpec::parse(str), expected);
|
||||
ASSERT_EQ(expected.to_string(), str);
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, names_numberic) {
|
||||
std::string_view str = "01";
|
||||
OutputsSpec expected = OutputsSpec::Names { "01" };
|
||||
ASSERT_EQ(OutputsSpec::parse(str), expected);
|
||||
ASSERT_EQ(expected.to_string(), str);
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, names_out_bin) {
|
||||
OutputsSpec expected = OutputsSpec::Names { "out", "bin" };
|
||||
ASSERT_EQ(OutputsSpec::parse("out,bin"), expected);
|
||||
// N.B. This normalization is OK.
|
||||
ASSERT_EQ(expected.to_string(), "bin,out");
|
||||
}
|
||||
|
||||
#define TEST_SUBSET(X, THIS, THAT) \
|
||||
X((OutputsSpec { THIS }).isSubsetOf(THAT));
|
||||
|
||||
TEST(OutputsSpec, subsets_all_all) {
|
||||
TEST_SUBSET(ASSERT_TRUE, OutputsSpec::All { }, OutputsSpec::All { });
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, subsets_names_all) {
|
||||
TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names { "a" }, OutputsSpec::All { });
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, subsets_names_names_eq) {
|
||||
TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names { "a" }, OutputsSpec::Names { "a" });
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, subsets_names_names_noneq) {
|
||||
TEST_SUBSET(ASSERT_TRUE, OutputsSpec::Names { "a" }, (OutputsSpec::Names { "a", "b" }));
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, not_subsets_all_names) {
|
||||
TEST_SUBSET(ASSERT_FALSE, OutputsSpec::All { }, OutputsSpec::Names { "a" });
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, not_subsets_names_names) {
|
||||
TEST_SUBSET(ASSERT_FALSE, (OutputsSpec::Names { "a", "b" }), (OutputsSpec::Names { "a" }));
|
||||
}
|
||||
|
||||
#undef TEST_SUBSET
|
||||
|
||||
#define TEST_UNION(RES, THIS, THAT) \
|
||||
ASSERT_EQ(OutputsSpec { RES }, (OutputsSpec { THIS }).union_(THAT));
|
||||
|
||||
TEST(OutputsSpec, union_all_all) {
|
||||
TEST_UNION(OutputsSpec::All { }, OutputsSpec::All { }, OutputsSpec::All { });
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, union_all_names) {
|
||||
TEST_UNION(OutputsSpec::All { }, OutputsSpec::All { }, OutputsSpec::Names { "a" });
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, union_names_all) {
|
||||
TEST_UNION(OutputsSpec::All { }, OutputsSpec::Names { "a" }, OutputsSpec::All { });
|
||||
}
|
||||
|
||||
TEST(OutputsSpec, union_names_names) {
|
||||
TEST_UNION((OutputsSpec::Names { "a", "b" }), OutputsSpec::Names { "a" }, OutputsSpec::Names { "b" });
|
||||
}
|
||||
|
||||
#undef TEST_UNION
|
||||
|
||||
#define TEST_DONT_PARSE(NAME, STR) \
|
||||
TEST(ExtendedOutputsSpec, bad_ ## NAME) { \
|
||||
std::optional extendedOutputsSpecOpt = \
|
||||
ExtendedOutputsSpec::parseOpt(STR); \
|
||||
ASSERT_FALSE(extendedOutputsSpecOpt); \
|
||||
}
|
||||
|
||||
TEST_DONT_PARSE(carot_empty, "^")
|
||||
TEST_DONT_PARSE(prefix_carot_empty, "foo^")
|
||||
TEST_DONT_PARSE(garbage, "^&*()")
|
||||
TEST_DONT_PARSE(double_star, "^**")
|
||||
TEST_DONT_PARSE(star_first, "^*,foo")
|
||||
TEST_DONT_PARSE(star_second, "^foo,*")
|
||||
|
||||
#undef TEST_DONT_PARSE
|
||||
|
||||
TEST(ExtendedOutputsSpec, defeault) {
|
||||
std::string_view str = "foo";
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str);
|
||||
ASSERT_EQ(prefix, "foo");
|
||||
ExtendedOutputsSpec expected = ExtendedOutputsSpec::Default { };
|
||||
ASSERT_EQ(extendedOutputsSpec, expected);
|
||||
ASSERT_EQ(std::string { prefix } + expected.to_string(), str);
|
||||
}
|
||||
|
||||
TEST(ExtendedOutputsSpec, all) {
|
||||
std::string_view str = "foo^*";
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str);
|
||||
ASSERT_EQ(prefix, "foo");
|
||||
ExtendedOutputsSpec expected = OutputsSpec::All { };
|
||||
ASSERT_EQ(extendedOutputsSpec, expected);
|
||||
ASSERT_EQ(std::string { prefix } + expected.to_string(), str);
|
||||
}
|
||||
|
||||
TEST(ExtendedOutputsSpec, out) {
|
||||
std::string_view str = "foo^out";
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse(str);
|
||||
ASSERT_EQ(prefix, "foo");
|
||||
ExtendedOutputsSpec expected = OutputsSpec::Names { "out" };
|
||||
ASSERT_EQ(extendedOutputsSpec, expected);
|
||||
ASSERT_EQ(std::string { prefix } + expected.to_string(), str);
|
||||
}
|
||||
|
||||
TEST(ExtendedOutputsSpec, out_bin) {
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse("foo^out,bin");
|
||||
ASSERT_EQ(prefix, "foo");
|
||||
ExtendedOutputsSpec expected = OutputsSpec::Names { "out", "bin" };
|
||||
ASSERT_EQ(extendedOutputsSpec, expected);
|
||||
ASSERT_EQ(std::string { prefix } + expected.to_string(), "foo^bin,out");
|
||||
}
|
||||
|
||||
TEST(ExtendedOutputsSpec, many_carrot) {
|
||||
auto [prefix, extendedOutputsSpec] = ExtendedOutputsSpec::parse("foo^bar^out,bin");
|
||||
ASSERT_EQ(prefix, "foo^bar");
|
||||
ExtendedOutputsSpec expected = OutputsSpec::Names { "out", "bin" };
|
||||
ASSERT_EQ(extendedOutputsSpec, expected);
|
||||
ASSERT_EQ(std::string { prefix } + expected.to_string(), "foo^bar^bin,out");
|
||||
}
|
||||
|
||||
|
||||
#define TEST_JSON(TYPE, NAME, STR, VAL) \
|
||||
\
|
||||
TEST(TYPE, NAME ## _to_json) { \
|
||||
using nlohmann::literals::operator "" _json; \
|
||||
ASSERT_EQ( \
|
||||
STR ## _json, \
|
||||
((nlohmann::json) TYPE { VAL })); \
|
||||
} \
|
||||
\
|
||||
TEST(TYPE, NAME ## _from_json) { \
|
||||
using nlohmann::literals::operator "" _json; \
|
||||
ASSERT_EQ( \
|
||||
TYPE { VAL }, \
|
||||
(STR ## _json).get<TYPE>()); \
|
||||
}
|
||||
|
||||
TEST_JSON(OutputsSpec, all, R"(["*"])", OutputsSpec::All { })
|
||||
TEST_JSON(OutputsSpec, name, R"(["a"])", OutputsSpec::Names { "a" })
|
||||
TEST_JSON(OutputsSpec, names, R"(["a","b"])", (OutputsSpec::Names { "a", "b" }))
|
||||
|
||||
TEST_JSON(ExtendedOutputsSpec, def, R"(null)", ExtendedOutputsSpec::Default { })
|
||||
TEST_JSON(ExtendedOutputsSpec, all, R"(["*"])", ExtendedOutputsSpec::Explicit { OutputsSpec::All { } })
|
||||
TEST_JSON(ExtendedOutputsSpec, name, R"(["a"])", ExtendedOutputsSpec::Explicit { OutputsSpec::Names { "a" } })
|
||||
TEST_JSON(ExtendedOutputsSpec, names, R"(["a","b"])", (ExtendedOutputsSpec::Explicit { OutputsSpec::Names { "a", "b" } }))
|
||||
|
||||
#undef TEST_JSON
|
||||
|
||||
}
|
||||
|
||||
namespace rc {
|
||||
using namespace nix;
|
||||
|
||||
Gen<OutputsSpec> Arbitrary<OutputsSpec>::arbitrary()
|
||||
{
|
||||
switch (*gen::inRange<uint8_t>(0, std::variant_size_v<OutputsSpec::Raw>)) {
|
||||
case 0:
|
||||
return gen::just((OutputsSpec) OutputsSpec::All { });
|
||||
case 1:
|
||||
return gen::just((OutputsSpec) OutputsSpec::Names {
|
||||
*gen::nonEmpty(gen::container<StringSet>(gen::map(
|
||||
gen::arbitrary<StorePathName>(),
|
||||
[](StorePathName n) { return n.name; }))),
|
||||
});
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace nix {
|
||||
|
||||
#ifndef COVERAGE
|
||||
|
||||
RC_GTEST_PROP(
|
||||
OutputsSpec,
|
||||
prop_round_rip,
|
||||
(const OutputsSpec & o))
|
||||
{
|
||||
RC_ASSERT(o == OutputsSpec::parse(o.to_string()));
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
}
|
|
@ -1,18 +0,0 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <rapidcheck/gen/Arbitrary.h>
|
||||
|
||||
#include <outputs-spec.hh>
|
||||
|
||||
#include <tests/path.hh>
|
||||
|
||||
namespace rc {
|
||||
using namespace nix;
|
||||
|
||||
template<>
|
||||
struct Arbitrary<OutputsSpec> {
|
||||
static Gen<OutputsSpec> arbitrary();
|
||||
};
|
||||
|
||||
}
|
|
@ -1,79 +0,0 @@
|
|||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "path-info.hh"
|
||||
|
||||
#include "tests/characterization.hh"
|
||||
#include "tests/libstore.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
using nlohmann::json;
|
||||
|
||||
class PathInfoTest : public CharacterizationTest, public LibStoreTest
|
||||
{
|
||||
Path unitTestData = getUnitTestData() + "/libstore/path-info";
|
||||
|
||||
Path goldenMaster(PathView testStem) const override {
|
||||
return unitTestData + "/" + testStem + ".json";
|
||||
}
|
||||
};
|
||||
|
||||
static UnkeyedValidPathInfo makePathInfo(const Store & store, bool includeImpureInfo) {
|
||||
UnkeyedValidPathInfo info = ValidPathInfo {
|
||||
store,
|
||||
"foo",
|
||||
FixedOutputInfo {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = hashString(HashType::htSHA256, "(...)"),
|
||||
|
||||
.references = {
|
||||
.others = {
|
||||
StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
},
|
||||
},
|
||||
.self = true,
|
||||
},
|
||||
},
|
||||
Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
};
|
||||
info.narSize = 34878;
|
||||
if (includeImpureInfo) {
|
||||
info.deriver = StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
};
|
||||
info.registrationTime = 23423;
|
||||
info.ultimate = true;
|
||||
info.sigs = { "asdf", "qwer" };
|
||||
}
|
||||
return info;
|
||||
}
|
||||
|
||||
#define JSON_TEST(STEM, PURE) \
|
||||
TEST_F(PathInfoTest, PathInfo_ ## STEM ## _from_json) { \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON( \
|
||||
*store, \
|
||||
encoded); \
|
||||
auto expected = makePathInfo(*store, PURE); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(PathInfoTest, PathInfo_ ## STEM ## _to_json) { \
|
||||
writeTest(#STEM, [&]() -> json { \
|
||||
return makePathInfo(*store, PURE) \
|
||||
.toJSON(*store, PURE, HashFormat::SRI); \
|
||||
}, [](const auto & file) { \
|
||||
return json::parse(readFile(file)); \
|
||||
}, [](const auto & file, const auto & got) { \
|
||||
return writeFile(file, got.dump(2) + "\n"); \
|
||||
}); \
|
||||
}
|
||||
|
||||
JSON_TEST(pure, false)
|
||||
JSON_TEST(impure, true)
|
||||
|
||||
}
|
|
@ -1,162 +0,0 @@
|
|||
#include <regex>
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
#include <rapidcheck/gtest.h>
|
||||
|
||||
#include "path-regex.hh"
|
||||
#include "store-api.hh"
|
||||
|
||||
#include "tests/hash.hh"
|
||||
#include "tests/libstore.hh"
|
||||
#include "tests/path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
#define STORE_DIR "/nix/store/"
|
||||
#define HASH_PART "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q"
|
||||
|
||||
class StorePathTest : public LibStoreTest
|
||||
{
|
||||
};
|
||||
|
||||
static std::regex nameRegex { std::string { nameRegexStr } };
|
||||
|
||||
#define TEST_DONT_PARSE(NAME, STR) \
|
||||
TEST_F(StorePathTest, bad_ ## NAME) { \
|
||||
std::string_view str = \
|
||||
STORE_DIR HASH_PART "-" STR; \
|
||||
ASSERT_THROW( \
|
||||
store->parseStorePath(str), \
|
||||
BadStorePath); \
|
||||
std::string name { STR }; \
|
||||
EXPECT_FALSE(std::regex_match(name, nameRegex)); \
|
||||
}
|
||||
|
||||
TEST_DONT_PARSE(empty, "")
|
||||
TEST_DONT_PARSE(garbage, "&*()")
|
||||
TEST_DONT_PARSE(double_star, "**")
|
||||
TEST_DONT_PARSE(star_first, "*,foo")
|
||||
TEST_DONT_PARSE(star_second, "foo,*")
|
||||
TEST_DONT_PARSE(bang, "foo!o")
|
||||
TEST_DONT_PARSE(dotfile, ".gitignore")
|
||||
|
||||
#undef TEST_DONT_PARSE
|
||||
|
||||
#define TEST_DO_PARSE(NAME, STR) \
|
||||
TEST_F(StorePathTest, good_ ## NAME) { \
|
||||
std::string_view str = \
|
||||
STORE_DIR HASH_PART "-" STR; \
|
||||
auto p = store->parseStorePath(str); \
|
||||
std::string name { p.name() }; \
|
||||
EXPECT_TRUE(std::regex_match(name, nameRegex)); \
|
||||
}
|
||||
|
||||
// 0-9 a-z A-Z + - . _ ? =
|
||||
|
||||
TEST_DO_PARSE(numbers, "02345")
|
||||
TEST_DO_PARSE(lower_case, "foo")
|
||||
TEST_DO_PARSE(upper_case, "FOO")
|
||||
TEST_DO_PARSE(plus, "foo+bar")
|
||||
TEST_DO_PARSE(dash, "foo-dev")
|
||||
TEST_DO_PARSE(underscore, "foo_bar")
|
||||
TEST_DO_PARSE(period, "foo.txt")
|
||||
TEST_DO_PARSE(question_mark, "foo?why")
|
||||
TEST_DO_PARSE(equals_sign, "foo=foo")
|
||||
|
||||
#undef TEST_DO_PARSE
|
||||
|
||||
// For rapidcheck
|
||||
void showValue(const StorePath & p, std::ostream & os) {
|
||||
os << p.to_string();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace rc {
|
||||
using namespace nix;
|
||||
|
||||
Gen<StorePathName> Arbitrary<StorePathName>::arbitrary()
|
||||
{
|
||||
auto len = *gen::inRange<size_t>(
|
||||
1,
|
||||
StorePath::MaxPathLen - std::string_view { HASH_PART }.size());
|
||||
|
||||
std::string pre;
|
||||
pre.reserve(len);
|
||||
|
||||
for (size_t c = 0; c < len; ++c) {
|
||||
switch (auto i = *gen::inRange<uint8_t>(0, 10 + 2 * 26 + 6)) {
|
||||
case 0 ... 9:
|
||||
pre += '0' + i;
|
||||
case 10 ... 35:
|
||||
pre += 'A' + (i - 10);
|
||||
break;
|
||||
case 36 ... 61:
|
||||
pre += 'a' + (i - 36);
|
||||
break;
|
||||
case 62:
|
||||
pre += '+';
|
||||
break;
|
||||
case 63:
|
||||
pre += '-';
|
||||
break;
|
||||
case 64:
|
||||
// names aren't permitted to start with a period,
|
||||
// so just fall through to the next case here
|
||||
if (c != 0) {
|
||||
pre += '.';
|
||||
break;
|
||||
}
|
||||
case 65:
|
||||
pre += '_';
|
||||
break;
|
||||
case 66:
|
||||
pre += '?';
|
||||
break;
|
||||
case 67:
|
||||
pre += '=';
|
||||
break;
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
return gen::just(StorePathName {
|
||||
.name = std::move(pre),
|
||||
});
|
||||
}
|
||||
|
||||
Gen<StorePath> Arbitrary<StorePath>::arbitrary()
|
||||
{
|
||||
return gen::just(StorePath {
|
||||
*gen::arbitrary<Hash>(),
|
||||
(*gen::arbitrary<StorePathName>()).name,
|
||||
});
|
||||
}
|
||||
|
||||
} // namespace rc
|
||||
|
||||
namespace nix {
|
||||
|
||||
#ifndef COVERAGE
|
||||
|
||||
RC_GTEST_FIXTURE_PROP(
|
||||
StorePathTest,
|
||||
prop_regex_accept,
|
||||
(const StorePath & p))
|
||||
{
|
||||
RC_ASSERT(std::regex_match(std::string { p.name() }, nameRegex));
|
||||
}
|
||||
|
||||
RC_GTEST_FIXTURE_PROP(
|
||||
StorePathTest,
|
||||
prop_round_rip,
|
||||
(const StorePath & p))
|
||||
{
|
||||
RC_ASSERT(p == store->parseStorePath(store->printStorePath(p)));
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <rapidcheck/gen/Arbitrary.h>
|
||||
|
||||
#include <path.hh>
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct StorePathName {
|
||||
std::string name;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
namespace rc {
|
||||
using namespace nix;
|
||||
|
||||
template<>
|
||||
struct Arbitrary<StorePathName> {
|
||||
static Gen<StorePathName> arbitrary();
|
||||
};
|
||||
|
||||
template<>
|
||||
struct Arbitrary<StorePath> {
|
||||
static Gen<StorePath> arbitrary();
|
||||
};
|
||||
|
||||
}
|
|
@ -1,75 +0,0 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "tests/libstore.hh"
|
||||
#include "tests/characterization.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
template<class Proto, const char * protocolDir>
|
||||
class ProtoTest : public CharacterizationTest, public LibStoreTest
|
||||
{
|
||||
Path unitTestData = getUnitTestData() + "/libstore/" + protocolDir;
|
||||
|
||||
Path goldenMaster(std::string_view testStem) const override {
|
||||
return unitTestData + "/" + testStem + ".bin";
|
||||
}
|
||||
};
|
||||
|
||||
template<class Proto, const char * protocolDir>
|
||||
class VersionedProtoTest : public ProtoTest<Proto, protocolDir>
|
||||
{
|
||||
public:
|
||||
/**
|
||||
* Golden test for `T` reading
|
||||
*/
|
||||
template<typename T>
|
||||
void readProtoTest(PathView testStem, typename Proto::Version version, T expected)
|
||||
{
|
||||
CharacterizationTest::readTest(testStem, [&](const auto & encoded) {
|
||||
T got = ({
|
||||
StringSource from { encoded };
|
||||
Proto::template Serialise<T>::read(
|
||||
*LibStoreTest::store,
|
||||
typename Proto::ReadConn {
|
||||
.from = from,
|
||||
.version = version,
|
||||
});
|
||||
});
|
||||
|
||||
ASSERT_EQ(got, expected);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Golden test for `T` write
|
||||
*/
|
||||
template<typename T>
|
||||
void writeProtoTest(PathView testStem, typename Proto::Version version, const T & decoded)
|
||||
{
|
||||
CharacterizationTest::writeTest(testStem, [&]() {
|
||||
StringSink to;
|
||||
Proto::template Serialise<T>::write(
|
||||
*LibStoreTest::store,
|
||||
typename Proto::WriteConn {
|
||||
.to = to,
|
||||
.version = version,
|
||||
},
|
||||
decoded);
|
||||
return std::move(to.s);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
#define VERSIONED_CHARACTERIZATION_TEST(FIXTURE, NAME, STEM, VERSION, VALUE) \
|
||||
TEST_F(FIXTURE, NAME ## _read) { \
|
||||
readProtoTest(STEM, VERSION, VALUE); \
|
||||
} \
|
||||
TEST_F(FIXTURE, NAME ## _write) { \
|
||||
writeProtoTest(STEM, VERSION, VALUE); \
|
||||
}
|
||||
|
||||
}
|
|
@ -1,45 +0,0 @@
|
|||
#include "references.hh"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
TEST(references, scan)
|
||||
{
|
||||
std::string hash1 = "dc04vv14dak1c1r48qa0m23vr9jy8sm0";
|
||||
std::string hash2 = "zc842j0rz61mjsp3h3wp5ly71ak6qgdn";
|
||||
|
||||
{
|
||||
RefScanSink scanner(StringSet{hash1});
|
||||
auto s = "foobar";
|
||||
scanner(s);
|
||||
ASSERT_EQ(scanner.getResult(), StringSet{});
|
||||
}
|
||||
|
||||
{
|
||||
RefScanSink scanner(StringSet{hash1});
|
||||
auto s = "foobar" + hash1 + "xyzzy";
|
||||
scanner(s);
|
||||
ASSERT_EQ(scanner.getResult(), StringSet{hash1});
|
||||
}
|
||||
|
||||
{
|
||||
RefScanSink scanner(StringSet{hash1, hash2});
|
||||
auto s = "foobar" + hash1 + "xyzzy" + hash2;
|
||||
scanner(((std::string_view) s).substr(0, 10));
|
||||
scanner(((std::string_view) s).substr(10, 5));
|
||||
scanner(((std::string_view) s).substr(15, 5));
|
||||
scanner(((std::string_view) s).substr(20));
|
||||
ASSERT_EQ(scanner.getResult(), StringSet({hash1, hash2}));
|
||||
}
|
||||
|
||||
{
|
||||
RefScanSink scanner(StringSet{hash1, hash2});
|
||||
auto s = "foobar" + hash1 + "xyzzy" + hash2;
|
||||
for (auto & i : s)
|
||||
scanner(std::string(1, i));
|
||||
ASSERT_EQ(scanner.getResult(), StringSet({hash1, hash2}));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,279 +0,0 @@
|
|||
#include <regex>
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "serve-protocol.hh"
|
||||
#include "serve-protocol-impl.hh"
|
||||
#include "build-result.hh"
|
||||
#include "tests/protocol.hh"
|
||||
#include "tests/characterization.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
const char serveProtoDir[] = "serve-protocol";
|
||||
|
||||
struct ServeProtoTest : VersionedProtoTest<ServeProto, serveProtoDir>
|
||||
{
|
||||
/**
|
||||
* For serializers that don't care about the minimum version, we
|
||||
* used the oldest one: 1.0.
|
||||
*/
|
||||
ServeProto::Version defaultVersion = 2 << 8 | 0;
|
||||
};
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
string,
|
||||
"string",
|
||||
defaultVersion,
|
||||
(std::tuple<std::string, std::string, std::string, std::string, std::string> {
|
||||
"",
|
||||
"hi",
|
||||
"white rabbit",
|
||||
"大白兔",
|
||||
"oh no \0\0\0 what was that!",
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
storePath,
|
||||
"store-path",
|
||||
defaultVersion,
|
||||
(std::tuple<StorePath, StorePath> {
|
||||
StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" },
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
contentAddress,
|
||||
"content-address",
|
||||
defaultVersion,
|
||||
(std::tuple<ContentAddress, ContentAddress, ContentAddress> {
|
||||
ContentAddress {
|
||||
.method = TextIngestionMethod {},
|
||||
.hash = hashString(HashType::htSHA256, "Derive(...)"),
|
||||
},
|
||||
ContentAddress {
|
||||
.method = FileIngestionMethod::Flat,
|
||||
.hash = hashString(HashType::htSHA1, "blob blob..."),
|
||||
},
|
||||
ContentAddress {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = hashString(HashType::htSHA256, "(...)"),
|
||||
},
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
drvOutput,
|
||||
"drv-output",
|
||||
defaultVersion,
|
||||
(std::tuple<DrvOutput, DrvOutput> {
|
||||
{
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
DrvOutput {
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "quux",
|
||||
},
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
realisation,
|
||||
"realisation",
|
||||
defaultVersion,
|
||||
(std::tuple<Realisation, Realisation> {
|
||||
Realisation {
|
||||
.id = DrvOutput {
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
.outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
.signatures = { "asdf", "qwer" },
|
||||
},
|
||||
Realisation {
|
||||
.id = {
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
.outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
.signatures = { "asdf", "qwer" },
|
||||
.dependentRealisations = {
|
||||
{
|
||||
DrvOutput {
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "quux",
|
||||
},
|
||||
StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
},
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
buildResult_2_2,
|
||||
"build-result-2.2",
|
||||
2 << 8 | 2,
|
||||
({
|
||||
using namespace std::literals::chrono_literals;
|
||||
std::tuple<BuildResult, BuildResult, BuildResult> t {
|
||||
BuildResult {
|
||||
.status = BuildResult::OutputRejected,
|
||||
.errorMsg = "no idea why",
|
||||
},
|
||||
BuildResult {
|
||||
.status = BuildResult::NotDeterministic,
|
||||
.errorMsg = "no idea why",
|
||||
},
|
||||
BuildResult {
|
||||
.status = BuildResult::Built,
|
||||
},
|
||||
};
|
||||
t;
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
buildResult_2_3,
|
||||
"build-result-2.3",
|
||||
2 << 8 | 3,
|
||||
({
|
||||
using namespace std::literals::chrono_literals;
|
||||
std::tuple<BuildResult, BuildResult, BuildResult> t {
|
||||
BuildResult {
|
||||
.status = BuildResult::OutputRejected,
|
||||
.errorMsg = "no idea why",
|
||||
},
|
||||
BuildResult {
|
||||
.status = BuildResult::NotDeterministic,
|
||||
.errorMsg = "no idea why",
|
||||
.timesBuilt = 3,
|
||||
.isNonDeterministic = true,
|
||||
.startTime = 30,
|
||||
.stopTime = 50,
|
||||
},
|
||||
BuildResult {
|
||||
.status = BuildResult::Built,
|
||||
.startTime = 30,
|
||||
.stopTime = 50,
|
||||
},
|
||||
};
|
||||
t;
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
buildResult_2_6,
|
||||
"build-result-2.6",
|
||||
2 << 8 | 6,
|
||||
({
|
||||
using namespace std::literals::chrono_literals;
|
||||
std::tuple<BuildResult, BuildResult, BuildResult> t {
|
||||
BuildResult {
|
||||
.status = BuildResult::OutputRejected,
|
||||
.errorMsg = "no idea why",
|
||||
},
|
||||
BuildResult {
|
||||
.status = BuildResult::NotDeterministic,
|
||||
.errorMsg = "no idea why",
|
||||
.timesBuilt = 3,
|
||||
.isNonDeterministic = true,
|
||||
.startTime = 30,
|
||||
.stopTime = 50,
|
||||
},
|
||||
BuildResult {
|
||||
.status = BuildResult::Built,
|
||||
.timesBuilt = 1,
|
||||
.builtOutputs = {
|
||||
{
|
||||
"foo",
|
||||
{
|
||||
.id = DrvOutput {
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "foo",
|
||||
},
|
||||
.outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
},
|
||||
},
|
||||
{
|
||||
"bar",
|
||||
{
|
||||
.id = DrvOutput {
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "bar",
|
||||
},
|
||||
.outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar" },
|
||||
},
|
||||
},
|
||||
},
|
||||
.startTime = 30,
|
||||
.stopTime = 50,
|
||||
#if 0
|
||||
// These fields are not yet serialized.
|
||||
// FIXME Include in next version of protocol or document
|
||||
// why they are skipped.
|
||||
.cpuUser = std::chrono::milliseconds(500s),
|
||||
.cpuSystem = std::chrono::milliseconds(604s),
|
||||
#endif
|
||||
},
|
||||
};
|
||||
t;
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
vector,
|
||||
"vector",
|
||||
defaultVersion,
|
||||
(std::tuple<std::vector<std::string>, std::vector<std::string>, std::vector<std::string>, std::vector<std::vector<std::string>>> {
|
||||
{ },
|
||||
{ "" },
|
||||
{ "", "foo", "bar" },
|
||||
{ {}, { "" }, { "", "1", "2" } },
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
set,
|
||||
"set",
|
||||
defaultVersion,
|
||||
(std::tuple<std::set<std::string>, std::set<std::string>, std::set<std::string>, std::set<std::set<std::string>>> {
|
||||
{ },
|
||||
{ "" },
|
||||
{ "", "foo", "bar" },
|
||||
{ {}, { "" }, { "", "1", "2" } },
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
optionalStorePath,
|
||||
"optional-store-path",
|
||||
defaultVersion,
|
||||
(std::tuple<std::optional<StorePath>, std::optional<StorePath>> {
|
||||
std::nullopt,
|
||||
std::optional {
|
||||
StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" },
|
||||
},
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
ServeProtoTest,
|
||||
optionalContentAddress,
|
||||
"optional-content-address",
|
||||
defaultVersion,
|
||||
(std::tuple<std::optional<ContentAddress>, std::optional<ContentAddress>> {
|
||||
std::nullopt,
|
||||
std::optional {
|
||||
ContentAddress {
|
||||
.method = FileIngestionMethod::Flat,
|
||||
.hash = hashString(HashType::htSHA1, "blob blob..."),
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
nix@scratchy.labs.cs.uu.nl - - eight
|
|
@ -1,3 +0,0 @@
|
|||
nix@scratchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 1 kvm
|
||||
nix@itchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 2
|
||||
nix@poochie.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 1 2 kvm benchmark c3NoLXJzYSBBQUFBQjNOemFDMXljMkVBQUFBREFRQUJBQUFDQVFDWWV5R1laNTNzd1VjMUZNSHBWL1BCcXlKaFR5S1JoRkpWWVRpRHlQN2h5c1JGa0w4VDlLOGdhL2Y2L3c3QjN2SjNHSFRIUFkybENiUEdZbGNLd2h6M2ZRbFNNOEViNi95b3ZLajdvM1FsMEx5Y0dzdGJvRmcwWkZKNldncUxsR0ltS0NobUlxOGZ3TW5ZTWUxbnRQeTBUZFZjSU1tOTV3YzF3SjBMd2c3cEVMRmtHazdkeTVvYnM4a3lGZ0pORDVRSmFwQWJjeWp4Z1QzdzdMcktNZ2xzeWhhd01JNVpkMGZsQTVudW5OZ3pid3plYVhLaUsyTW0vdGJXYTU1YTd4QmNYdHpIZGlPSWdSajJlRWxaMGh5bk10YjBmcklsdmxIcEtLaVFaZ3pQdCtIVXQ2bXpRMkRVME52MGYyYnNSU0krOGpJU2pQcmdlcVVHRldMUzVIUTg2N2xSMlpiaWtyclhZNTdqbVFEZk5DRHY1VFBHZU9UekFEd2pjMDc2aFZ3VFJCd3VTZFhtaWNxTS95b3lrWitkV1dnZ25MenE5QU1tdlNZcDhmZkZDcS9CSDBZNUFXWTFHay9vS3hMVTNaOWt3ZDd2UWNFQWFCQ2dxdnVZRGdTaHE1RlhndDM3OVZESWtEL05ZSTg2QXVvajVDRmVNTzlRM2pJSlRadlh6c1VldjVoSnA2djcxSVh5ODVtbTY5R20zcXdicVE1SjVQZDU1Um56SitpaW5BNjZxTEFSc0Y4amNsSnd5ekFXclBoYU9DRVY2bjVMeVhVazhzMW9EVVR4V1pWN25rVkFTbHJ0MllGcjN5dzdjRTRXQVhsemhHcDhocmdLMVVkMUlyeDVnZWRaSnBWcy9uNWVybmJFMUxmb2x5UHUvRUFIWlh6VGd4dHVDUFNobXc9PQo=
|
|
@ -1,547 +0,0 @@
|
|||
#include <regex>
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "worker-protocol.hh"
|
||||
#include "worker-protocol-impl.hh"
|
||||
#include "derived-path.hh"
|
||||
#include "build-result.hh"
|
||||
#include "tests/protocol.hh"
|
||||
#include "tests/characterization.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
const char workerProtoDir[] = "worker-protocol";
|
||||
|
||||
struct WorkerProtoTest : VersionedProtoTest<WorkerProto, workerProtoDir>
|
||||
{
|
||||
/**
|
||||
* For serializers that don't care about the minimum version, we
|
||||
* used the oldest one: 1.0.
|
||||
*/
|
||||
WorkerProto::Version defaultVersion = 1 << 8 | 0;
|
||||
};
|
||||
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
string,
|
||||
"string",
|
||||
defaultVersion,
|
||||
(std::tuple<std::string, std::string, std::string, std::string, std::string> {
|
||||
"",
|
||||
"hi",
|
||||
"white rabbit",
|
||||
"大白兔",
|
||||
"oh no \0\0\0 what was that!",
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
storePath,
|
||||
"store-path",
|
||||
defaultVersion,
|
||||
(std::tuple<StorePath, StorePath> {
|
||||
StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" },
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
contentAddress,
|
||||
"content-address",
|
||||
defaultVersion,
|
||||
(std::tuple<ContentAddress, ContentAddress, ContentAddress> {
|
||||
ContentAddress {
|
||||
.method = TextIngestionMethod {},
|
||||
.hash = hashString(HashType::htSHA256, "Derive(...)"),
|
||||
},
|
||||
ContentAddress {
|
||||
.method = FileIngestionMethod::Flat,
|
||||
.hash = hashString(HashType::htSHA1, "blob blob..."),
|
||||
},
|
||||
ContentAddress {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = hashString(HashType::htSHA256, "(...)"),
|
||||
},
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
derivedPath_1_29,
|
||||
"derived-path-1.29",
|
||||
1 << 8 | 29,
|
||||
(std::tuple<DerivedPath, DerivedPath, DerivedPath> {
|
||||
DerivedPath::Opaque {
|
||||
.path = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
},
|
||||
DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
}),
|
||||
.outputs = OutputsSpec::All { },
|
||||
},
|
||||
DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
}),
|
||||
.outputs = OutputsSpec::Names { "x", "y" },
|
||||
},
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
derivedPath_1_30,
|
||||
"derived-path-1.30",
|
||||
1 << 8 | 30,
|
||||
(std::tuple<DerivedPath, DerivedPath, DerivedPath, DerivedPath> {
|
||||
DerivedPath::Opaque {
|
||||
.path = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
},
|
||||
DerivedPath::Opaque {
|
||||
.path = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv" },
|
||||
},
|
||||
DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
}),
|
||||
.outputs = OutputsSpec::All { },
|
||||
},
|
||||
DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
}),
|
||||
.outputs = OutputsSpec::Names { "x", "y" },
|
||||
},
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
drvOutput,
|
||||
"drv-output",
|
||||
defaultVersion,
|
||||
(std::tuple<DrvOutput, DrvOutput> {
|
||||
{
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
DrvOutput {
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "quux",
|
||||
},
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
realisation,
|
||||
"realisation",
|
||||
defaultVersion,
|
||||
(std::tuple<Realisation, Realisation> {
|
||||
Realisation {
|
||||
.id = DrvOutput {
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
.outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
.signatures = { "asdf", "qwer" },
|
||||
},
|
||||
Realisation {
|
||||
.id = {
|
||||
.drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
.outputName = "baz",
|
||||
},
|
||||
.outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
.signatures = { "asdf", "qwer" },
|
||||
.dependentRealisations = {
|
||||
{
|
||||
DrvOutput {
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "quux",
|
||||
},
|
||||
StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
},
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
buildResult_1_27,
|
||||
"build-result-1.27",
|
||||
1 << 8 | 27,
|
||||
({
|
||||
using namespace std::literals::chrono_literals;
|
||||
std::tuple<BuildResult, BuildResult, BuildResult> t {
|
||||
BuildResult {
|
||||
.status = BuildResult::OutputRejected,
|
||||
.errorMsg = "no idea why",
|
||||
},
|
||||
BuildResult {
|
||||
.status = BuildResult::NotDeterministic,
|
||||
.errorMsg = "no idea why",
|
||||
},
|
||||
BuildResult {
|
||||
.status = BuildResult::Built,
|
||||
},
|
||||
};
|
||||
t;
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
buildResult_1_28,
|
||||
"build-result-1.28",
|
||||
1 << 8 | 28,
|
||||
({
|
||||
using namespace std::literals::chrono_literals;
|
||||
std::tuple<BuildResult, BuildResult, BuildResult> t {
|
||||
BuildResult {
|
||||
.status = BuildResult::OutputRejected,
|
||||
.errorMsg = "no idea why",
|
||||
},
|
||||
BuildResult {
|
||||
.status = BuildResult::NotDeterministic,
|
||||
.errorMsg = "no idea why",
|
||||
},
|
||||
BuildResult {
|
||||
.status = BuildResult::Built,
|
||||
.builtOutputs = {
|
||||
{
|
||||
"foo",
|
||||
{
|
||||
.id = DrvOutput {
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "foo",
|
||||
},
|
||||
.outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
},
|
||||
},
|
||||
{
|
||||
"bar",
|
||||
{
|
||||
.id = DrvOutput {
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "bar",
|
||||
},
|
||||
.outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar" },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
t;
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
buildResult_1_29,
|
||||
"build-result-1.29",
|
||||
1 << 8 | 29,
|
||||
({
|
||||
using namespace std::literals::chrono_literals;
|
||||
std::tuple<BuildResult, BuildResult, BuildResult> t {
|
||||
BuildResult {
|
||||
.status = BuildResult::OutputRejected,
|
||||
.errorMsg = "no idea why",
|
||||
},
|
||||
BuildResult {
|
||||
.status = BuildResult::NotDeterministic,
|
||||
.errorMsg = "no idea why",
|
||||
.timesBuilt = 3,
|
||||
.isNonDeterministic = true,
|
||||
.startTime = 30,
|
||||
.stopTime = 50,
|
||||
},
|
||||
BuildResult {
|
||||
.status = BuildResult::Built,
|
||||
.timesBuilt = 1,
|
||||
.builtOutputs = {
|
||||
{
|
||||
"foo",
|
||||
{
|
||||
.id = DrvOutput {
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "foo",
|
||||
},
|
||||
.outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" },
|
||||
},
|
||||
},
|
||||
{
|
||||
"bar",
|
||||
{
|
||||
.id = DrvOutput {
|
||||
.drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="),
|
||||
.outputName = "bar",
|
||||
},
|
||||
.outPath = StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar" },
|
||||
},
|
||||
},
|
||||
},
|
||||
.startTime = 30,
|
||||
.stopTime = 50,
|
||||
#if 0
|
||||
// These fields are not yet serialized.
|
||||
// FIXME Include in next version of protocol or document
|
||||
// why they are skipped.
|
||||
.cpuUser = std::chrono::milliseconds(500s),
|
||||
.cpuSystem = std::chrono::milliseconds(604s),
|
||||
#endif
|
||||
},
|
||||
};
|
||||
t;
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
keyedBuildResult_1_29,
|
||||
"keyed-build-result-1.29",
|
||||
1 << 8 | 29,
|
||||
({
|
||||
using namespace std::literals::chrono_literals;
|
||||
std::tuple<KeyedBuildResult, KeyedBuildResult/*, KeyedBuildResult*/> t {
|
||||
KeyedBuildResult {
|
||||
{
|
||||
.status = KeyedBuildResult::OutputRejected,
|
||||
.errorMsg = "no idea why",
|
||||
},
|
||||
/* .path = */ DerivedPath::Opaque {
|
||||
StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-xxx" },
|
||||
},
|
||||
},
|
||||
KeyedBuildResult {
|
||||
{
|
||||
.status = KeyedBuildResult::NotDeterministic,
|
||||
.errorMsg = "no idea why",
|
||||
.timesBuilt = 3,
|
||||
.isNonDeterministic = true,
|
||||
.startTime = 30,
|
||||
.stopTime = 50,
|
||||
},
|
||||
/* .path = */ DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
}),
|
||||
.outputs = OutputsSpec::Names { "out" },
|
||||
},
|
||||
},
|
||||
};
|
||||
t;
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
unkeyedValidPathInfo_1_15,
|
||||
"unkeyed-valid-path-info-1.15",
|
||||
1 << 8 | 15,
|
||||
(std::tuple<UnkeyedValidPathInfo, UnkeyedValidPathInfo> {
|
||||
({
|
||||
UnkeyedValidPathInfo info {
|
||||
Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
};
|
||||
info.registrationTime = 23423;
|
||||
info.narSize = 34878;
|
||||
info;
|
||||
}),
|
||||
({
|
||||
UnkeyedValidPathInfo info {
|
||||
Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
};
|
||||
info.deriver = StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
};
|
||||
info.references = {
|
||||
StorePath {
|
||||
"g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo.drv",
|
||||
},
|
||||
};
|
||||
info.registrationTime = 23423;
|
||||
info.narSize = 34878;
|
||||
info;
|
||||
}),
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
validPathInfo_1_15,
|
||||
"valid-path-info-1.15",
|
||||
1 << 8 | 15,
|
||||
(std::tuple<ValidPathInfo, ValidPathInfo> {
|
||||
({
|
||||
ValidPathInfo info {
|
||||
StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
},
|
||||
UnkeyedValidPathInfo {
|
||||
Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
},
|
||||
};
|
||||
info.registrationTime = 23423;
|
||||
info.narSize = 34878;
|
||||
info;
|
||||
}),
|
||||
({
|
||||
ValidPathInfo info {
|
||||
StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
},
|
||||
UnkeyedValidPathInfo {
|
||||
Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
},
|
||||
};
|
||||
info.deriver = StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
};
|
||||
info.references = {
|
||||
// other reference
|
||||
StorePath {
|
||||
"g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo",
|
||||
},
|
||||
// self reference
|
||||
StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
},
|
||||
};
|
||||
info.registrationTime = 23423;
|
||||
info.narSize = 34878;
|
||||
info;
|
||||
}),
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
validPathInfo_1_16,
|
||||
"valid-path-info-1.16",
|
||||
1 << 8 | 16,
|
||||
(std::tuple<ValidPathInfo, ValidPathInfo, ValidPathInfo> {
|
||||
({
|
||||
ValidPathInfo info {
|
||||
StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
},
|
||||
UnkeyedValidPathInfo {
|
||||
Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
},
|
||||
};
|
||||
info.registrationTime = 23423;
|
||||
info.narSize = 34878;
|
||||
info.ultimate = true;
|
||||
info;
|
||||
}),
|
||||
({
|
||||
ValidPathInfo info {
|
||||
StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
},
|
||||
UnkeyedValidPathInfo {
|
||||
Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
},
|
||||
};
|
||||
info.deriver = StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
};
|
||||
info.references = {
|
||||
// other reference
|
||||
StorePath {
|
||||
"g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo",
|
||||
},
|
||||
// self reference
|
||||
StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
},
|
||||
};
|
||||
info.registrationTime = 23423;
|
||||
info.narSize = 34878;
|
||||
info.sigs = {
|
||||
"fake-sig-1",
|
||||
"fake-sig-2",
|
||||
},
|
||||
info;
|
||||
}),
|
||||
({
|
||||
ValidPathInfo info {
|
||||
*LibStoreTest::store,
|
||||
"foo",
|
||||
FixedOutputInfo {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = hashString(HashType::htSHA256, "(...)"),
|
||||
.references = {
|
||||
.others = {
|
||||
StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
},
|
||||
},
|
||||
.self = true,
|
||||
},
|
||||
},
|
||||
Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
};
|
||||
info.registrationTime = 23423;
|
||||
info.narSize = 34878;
|
||||
info;
|
||||
}),
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
optionalTrustedFlag,
|
||||
"optional-trusted-flag",
|
||||
defaultVersion,
|
||||
(std::tuple<std::optional<TrustedFlag>, std::optional<TrustedFlag>, std::optional<TrustedFlag>> {
|
||||
std::nullopt,
|
||||
std::optional { Trusted },
|
||||
std::optional { NotTrusted },
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
vector,
|
||||
"vector",
|
||||
defaultVersion,
|
||||
(std::tuple<std::vector<std::string>, std::vector<std::string>, std::vector<std::string>, std::vector<std::vector<std::string>>> {
|
||||
{ },
|
||||
{ "" },
|
||||
{ "", "foo", "bar" },
|
||||
{ {}, { "" }, { "", "1", "2" } },
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
set,
|
||||
"set",
|
||||
defaultVersion,
|
||||
(std::tuple<std::set<std::string>, std::set<std::string>, std::set<std::string>, std::set<std::set<std::string>>> {
|
||||
{ },
|
||||
{ "" },
|
||||
{ "", "foo", "bar" },
|
||||
{ {}, { "" }, { "", "1", "2" } },
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
optionalStorePath,
|
||||
"optional-store-path",
|
||||
defaultVersion,
|
||||
(std::tuple<std::optional<StorePath>, std::optional<StorePath>> {
|
||||
std::nullopt,
|
||||
std::optional {
|
||||
StorePath { "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" },
|
||||
},
|
||||
}))
|
||||
|
||||
VERSIONED_CHARACTERIZATION_TEST(
|
||||
WorkerProtoTest,
|
||||
optionalContentAddress,
|
||||
"optional-content-address",
|
||||
defaultVersion,
|
||||
(std::tuple<std::optional<ContentAddress>, std::optional<ContentAddress>> {
|
||||
std::nullopt,
|
||||
std::optional {
|
||||
ContentAddress {
|
||||
.method = FileIngestionMethod::Flat,
|
||||
.hash = hashString(HashType::htSHA1, "blob blob..."),
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
}
|
|
@ -1,168 +0,0 @@
|
|||
#include "../args.hh"
|
||||
#include "libutil/fs-sink.hh"
|
||||
#include <list>
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include <rapidcheck/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
TEST(parseShebangContent, basic) {
|
||||
std::list<std::string> r = parseShebangContent("hi there");
|
||||
ASSERT_EQ(r.size(), 2);
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(*i++, "hi");
|
||||
ASSERT_EQ(*i++, "there");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, empty) {
|
||||
std::list<std::string> r = parseShebangContent("");
|
||||
ASSERT_EQ(r.size(), 0);
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, doubleBacktick) {
|
||||
std::list<std::string> r = parseShebangContent("``\"ain't that nice\"``");
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(*i++, "\"ain't that nice\"");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, doubleBacktickEmpty) {
|
||||
std::list<std::string> r = parseShebangContent("````");
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(*i++, "");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, doubleBacktickMarkdownInlineCode) {
|
||||
std::list<std::string> r = parseShebangContent("``# I'm markdown section about `coolFunction` ``");
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(*i++, "# I'm markdown section about `coolFunction`");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, doubleBacktickMarkdownCodeBlockNaive) {
|
||||
std::list<std::string> r = parseShebangContent("``Example 1\n```nix\na: a\n``` ``");
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
ASSERT_EQ(*i++, "Example 1\n``nix\na: a\n``");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, doubleBacktickMarkdownCodeBlockCorrect) {
|
||||
std::list<std::string> r = parseShebangContent("``Example 1\n````nix\na: a\n```` ``");
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
ASSERT_EQ(*i++, "Example 1\n```nix\na: a\n```");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, doubleBacktickMarkdownCodeBlock2) {
|
||||
std::list<std::string> r = parseShebangContent("``Example 1\n````nix\na: a\n````\nExample 2\n````nix\na: a\n```` ``");
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
ASSERT_EQ(*i++, "Example 1\n```nix\na: a\n```\nExample 2\n```nix\na: a\n```");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, singleBacktickInDoubleBacktickQuotes) {
|
||||
std::list<std::string> r = parseShebangContent("``` ``");
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
ASSERT_EQ(*i++, "`");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, singleBacktickAndSpaceInDoubleBacktickQuotes) {
|
||||
std::list<std::string> r = parseShebangContent("``` ``");
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
ASSERT_EQ(*i++, "` ");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, doubleBacktickInDoubleBacktickQuotes) {
|
||||
std::list<std::string> r = parseShebangContent("````` ``");
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
ASSERT_EQ(*i++, "``");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, increasingQuotes) {
|
||||
std::list<std::string> r = parseShebangContent("```` ``` `` ````` `` `````` ``");
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(r.size(), 4);
|
||||
ASSERT_EQ(*i++, "");
|
||||
ASSERT_EQ(*i++, "`");
|
||||
ASSERT_EQ(*i++, "``");
|
||||
ASSERT_EQ(*i++, "```");
|
||||
}
|
||||
|
||||
|
||||
#ifndef COVERAGE
|
||||
|
||||
// quick and dirty
|
||||
static inline std::string escape(std::string_view s_) {
|
||||
|
||||
std::string_view s = s_;
|
||||
std::string r = "``";
|
||||
|
||||
// make a guess to allocate ahead of time
|
||||
r.reserve(
|
||||
// plain chars
|
||||
s.size()
|
||||
// quotes
|
||||
+ 5
|
||||
// some "escape" backticks
|
||||
+ s.size() / 8);
|
||||
|
||||
while (!s.empty()) {
|
||||
if (s[0] == '`' && s.size() >= 2 && s[1] == '`') {
|
||||
// escape it
|
||||
r += "`";
|
||||
while (!s.empty() && s[0] == '`') {
|
||||
r += "`";
|
||||
s = s.substr(1);
|
||||
}
|
||||
} else {
|
||||
r += s[0];
|
||||
s = s.substr(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (!r.empty()
|
||||
&& (
|
||||
r[r.size() - 1] == '`'
|
||||
|| r[r.size() - 1] == ' '
|
||||
)) {
|
||||
r += " ";
|
||||
}
|
||||
|
||||
r += "``";
|
||||
|
||||
return r;
|
||||
};
|
||||
|
||||
RC_GTEST_PROP(
|
||||
parseShebangContent,
|
||||
prop_round_trip_single,
|
||||
(const std::string & orig))
|
||||
{
|
||||
auto escaped = escape(orig);
|
||||
// RC_LOG() << "escaped: <[[" << escaped << "]]>" << std::endl;
|
||||
auto ss = parseShebangContent(escaped);
|
||||
RC_ASSERT(ss.size() == 1);
|
||||
RC_ASSERT(*ss.begin() == orig);
|
||||
}
|
||||
|
||||
RC_GTEST_PROP(
|
||||
parseShebangContent,
|
||||
prop_round_trip_two,
|
||||
(const std::string & one, const std::string & two))
|
||||
{
|
||||
auto ss = parseShebangContent(escape(one) + " " + escape(two));
|
||||
RC_ASSERT(ss.size() == 2);
|
||||
auto i = ss.begin();
|
||||
RC_ASSERT(*i++ == one);
|
||||
RC_ASSERT(*i++ == two);
|
||||
}
|
||||
|
||||
|
||||
#endif
|
||||
|
||||
}
|
|
@ -1,162 +0,0 @@
|
|||
#include "canon-path.hh"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
TEST(CanonPath, basic) {
|
||||
{
|
||||
CanonPath p("/");
|
||||
ASSERT_EQ(p.abs(), "/");
|
||||
ASSERT_EQ(p.rel(), "");
|
||||
ASSERT_EQ(p.baseName(), std::nullopt);
|
||||
ASSERT_EQ(p.dirOf(), std::nullopt);
|
||||
ASSERT_FALSE(p.parent());
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("/foo//");
|
||||
ASSERT_EQ(p.abs(), "/foo");
|
||||
ASSERT_EQ(p.rel(), "foo");
|
||||
ASSERT_EQ(*p.baseName(), "foo");
|
||||
ASSERT_EQ(*p.dirOf(), ""); // FIXME: do we want this?
|
||||
ASSERT_EQ(p.parent()->abs(), "/");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("foo/bar");
|
||||
ASSERT_EQ(p.abs(), "/foo/bar");
|
||||
ASSERT_EQ(p.rel(), "foo/bar");
|
||||
ASSERT_EQ(*p.baseName(), "bar");
|
||||
ASSERT_EQ(*p.dirOf(), "/foo");
|
||||
ASSERT_EQ(p.parent()->abs(), "/foo");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("foo//bar/");
|
||||
ASSERT_EQ(p.abs(), "/foo/bar");
|
||||
ASSERT_EQ(p.rel(), "foo/bar");
|
||||
ASSERT_EQ(*p.baseName(), "bar");
|
||||
ASSERT_EQ(*p.dirOf(), "/foo");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(CanonPath, pop) {
|
||||
CanonPath p("foo/bar/x");
|
||||
ASSERT_EQ(p.abs(), "/foo/bar/x");
|
||||
p.pop();
|
||||
ASSERT_EQ(p.abs(), "/foo/bar");
|
||||
p.pop();
|
||||
ASSERT_EQ(p.abs(), "/foo");
|
||||
p.pop();
|
||||
ASSERT_EQ(p.abs(), "/");
|
||||
}
|
||||
|
||||
TEST(CanonPath, removePrefix) {
|
||||
CanonPath p1("foo/bar");
|
||||
CanonPath p2("foo/bar/a/b/c");
|
||||
ASSERT_EQ(p2.removePrefix(p1).abs(), "/a/b/c");
|
||||
ASSERT_EQ(p1.removePrefix(p1).abs(), "/");
|
||||
ASSERT_EQ(p1.removePrefix(CanonPath("/")).abs(), "/foo/bar");
|
||||
}
|
||||
|
||||
TEST(CanonPath, iter) {
|
||||
{
|
||||
CanonPath p("a//foo/bar//");
|
||||
std::vector<std::string_view> ss;
|
||||
for (auto & c : p) ss.push_back(c);
|
||||
ASSERT_EQ(ss, std::vector<std::string_view>({"a", "foo", "bar"}));
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("/");
|
||||
std::vector<std::string_view> ss;
|
||||
for (auto & c : p) ss.push_back(c);
|
||||
ASSERT_EQ(ss, std::vector<std::string_view>());
|
||||
}
|
||||
}
|
||||
|
||||
TEST(CanonPath, concat) {
|
||||
{
|
||||
CanonPath p1("a//foo/bar//");
|
||||
CanonPath p2("xyzzy/bla");
|
||||
ASSERT_EQ((p1 + p2).abs(), "/a/foo/bar/xyzzy/bla");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p1("/");
|
||||
CanonPath p2("/a/b");
|
||||
ASSERT_EQ((p1 + p2).abs(), "/a/b");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p1("/a/b");
|
||||
CanonPath p2("/");
|
||||
ASSERT_EQ((p1 + p2).abs(), "/a/b");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("/foo/bar");
|
||||
ASSERT_EQ((p + "x").abs(), "/foo/bar/x");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("/");
|
||||
ASSERT_EQ((p + "foo" + "bar").abs(), "/foo/bar");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(CanonPath, within) {
|
||||
ASSERT_TRUE(CanonPath("foo").isWithin(CanonPath("foo")));
|
||||
ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("bar")));
|
||||
ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("fo")));
|
||||
ASSERT_TRUE(CanonPath("foo/bar").isWithin(CanonPath("foo")));
|
||||
ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("foo/bar")));
|
||||
ASSERT_TRUE(CanonPath("/foo/bar/default.nix").isWithin(CanonPath("/")));
|
||||
ASSERT_TRUE(CanonPath("/").isWithin(CanonPath("/")));
|
||||
}
|
||||
|
||||
TEST(CanonPath, sort) {
|
||||
ASSERT_FALSE(CanonPath("foo") < CanonPath("foo"));
|
||||
ASSERT_TRUE (CanonPath("foo") < CanonPath("foo/bar"));
|
||||
ASSERT_TRUE (CanonPath("foo/bar") < CanonPath("foo!"));
|
||||
ASSERT_FALSE(CanonPath("foo!") < CanonPath("foo"));
|
||||
ASSERT_TRUE (CanonPath("foo") < CanonPath("foo!"));
|
||||
}
|
||||
|
||||
TEST(CanonPath, allowed) {
|
||||
std::set<CanonPath> allowed {
|
||||
CanonPath("foo/bar"),
|
||||
CanonPath("foo!"),
|
||||
CanonPath("xyzzy"),
|
||||
CanonPath("a/b/c"),
|
||||
};
|
||||
|
||||
ASSERT_TRUE (CanonPath("foo/bar").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("foo/bar/bla").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("foo").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("bar").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("bar/a").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("a").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("a/b").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("a/b/c").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("a/b/c/d").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("a/b/c/d/e").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("a/b/a").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("a/b/d").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("aaa").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("zzz").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("/").isAllowed(allowed));
|
||||
}
|
||||
|
||||
TEST(CanonPath, makeRelative) {
|
||||
CanonPath d("/foo/bar");
|
||||
ASSERT_EQ(d.makeRelative(CanonPath("/foo/bar")), ".");
|
||||
ASSERT_EQ(d.makeRelative(CanonPath("/foo")), "..");
|
||||
ASSERT_EQ(d.makeRelative(CanonPath("/")), "../..");
|
||||
ASSERT_EQ(d.makeRelative(CanonPath("/foo/bar/xyzzy")), "xyzzy");
|
||||
ASSERT_EQ(d.makeRelative(CanonPath("/foo/bar/xyzzy/bla")), "xyzzy/bla");
|
||||
ASSERT_EQ(d.makeRelative(CanonPath("/foo/xyzzy/bla")), "../xyzzy/bla");
|
||||
ASSERT_EQ(d.makeRelative(CanonPath("/xyzzy/bla")), "../../xyzzy/bla");
|
||||
}
|
||||
}
|
|
@ -1,108 +0,0 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "types.hh"
|
||||
#include "environment-variables.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* The path to the `unit-test-data` directory. See the contributing
|
||||
* guide in the manual for further details.
|
||||
*/
|
||||
static Path getUnitTestData() {
|
||||
return getEnv("_NIX_TEST_UNIT_DATA").value();
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether we should update "golden masters" instead of running tests
|
||||
* against them. See the contributing guide in the manual for further
|
||||
* details.
|
||||
*/
|
||||
static bool testAccept() {
|
||||
return getEnv("_NIX_TEST_ACCEPT") == "1";
|
||||
}
|
||||
|
||||
/**
|
||||
* Mixin class for writing characterization tests
|
||||
*/
|
||||
class CharacterizationTest : public virtual ::testing::Test
|
||||
{
|
||||
protected:
|
||||
/**
|
||||
* While the "golden master" for this characterization test is
|
||||
* located. It should not be shared with any other test.
|
||||
*/
|
||||
virtual Path goldenMaster(PathView testStem) const = 0;
|
||||
|
||||
public:
|
||||
/**
|
||||
* Golden test for reading
|
||||
*
|
||||
* @param test hook that takes the contents of the file and does the
|
||||
* actual work
|
||||
*/
|
||||
void readTest(PathView testStem, auto && test)
|
||||
{
|
||||
auto file = goldenMaster(testStem);
|
||||
|
||||
if (testAccept())
|
||||
{
|
||||
GTEST_SKIP()
|
||||
<< "Cannot read golden master "
|
||||
<< file
|
||||
<< "because another test is also updating it";
|
||||
}
|
||||
else
|
||||
{
|
||||
test(readFile(file));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Golden test for writing
|
||||
*
|
||||
* @param test hook that produces contents of the file and does the
|
||||
* actual work
|
||||
*/
|
||||
void writeTest(
|
||||
PathView testStem, auto && test, auto && readFile2, auto && writeFile2)
|
||||
{
|
||||
auto file = goldenMaster(testStem);
|
||||
|
||||
auto got = test();
|
||||
|
||||
if (testAccept())
|
||||
{
|
||||
createDirs(dirOf(file));
|
||||
writeFile2(file, got);
|
||||
GTEST_SKIP()
|
||||
<< "Updating golden master "
|
||||
<< file;
|
||||
}
|
||||
else
|
||||
{
|
||||
decltype(got) expected = readFile2(file);
|
||||
ASSERT_EQ(got, expected);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Specialize to `std::string`
|
||||
*/
|
||||
void writeTest(PathView testStem, auto && test)
|
||||
{
|
||||
writeTest(
|
||||
testStem, test,
|
||||
[](const Path & f) -> std::string {
|
||||
return readFile(f);
|
||||
},
|
||||
[](const Path & f, const std::string & c) {
|
||||
return writeFile(f, c);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
}
|
|
@ -1,54 +0,0 @@
|
|||
#include "chunked-vector.hh"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
TEST(ChunkedVector, InitEmpty) {
|
||||
auto v = ChunkedVector<int, 2>(100);
|
||||
ASSERT_EQ(v.size(), 0);
|
||||
}
|
||||
|
||||
TEST(ChunkedVector, GrowsCorrectly) {
|
||||
auto v = ChunkedVector<int, 2>(100);
|
||||
for (auto i = 1; i < 20; i++) {
|
||||
v.add(i);
|
||||
ASSERT_EQ(v.size(), i);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(ChunkedVector, AddAndGet) {
|
||||
auto v = ChunkedVector<int, 2>(100);
|
||||
for (auto i = 1; i < 20; i++) {
|
||||
auto [i2, idx] = v.add(i);
|
||||
auto & i3 = v[idx];
|
||||
ASSERT_EQ(i, i2);
|
||||
ASSERT_EQ(&i2, &i3);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(ChunkedVector, ForEach) {
|
||||
auto v = ChunkedVector<int, 2>(100);
|
||||
for (auto i = 1; i < 20; i++) {
|
||||
v.add(i);
|
||||
}
|
||||
int count = 0;
|
||||
v.forEach([&count](int elt) {
|
||||
count++;
|
||||
});
|
||||
ASSERT_EQ(count, v.size());
|
||||
}
|
||||
|
||||
TEST(ChunkedVector, OverflowOK) {
|
||||
// Similar to the AddAndGet, but intentionnally use a small
|
||||
// initial ChunkedVector to force it to overflow
|
||||
auto v = ChunkedVector<int, 2>(2);
|
||||
for (auto i = 1; i < 20; i++) {
|
||||
auto [i2, idx] = v.add(i);
|
||||
auto & i3 = v[idx];
|
||||
ASSERT_EQ(i, i2);
|
||||
ASSERT_EQ(&i2, &i3);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,70 +0,0 @@
|
|||
#include "closure.hh"
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
using namespace std;
|
||||
|
||||
map<string, set<string>> testGraph = {
|
||||
{ "A", { "B", "C", "G" } },
|
||||
{ "B", { "A" } }, // Loops back to A
|
||||
{ "C", { "F" } }, // Indirect reference
|
||||
{ "D", { "A" } }, // Not reachable, but has backreferences
|
||||
{ "E", {} }, // Just not reachable
|
||||
{ "F", {} },
|
||||
{ "G", { "G" } }, // Self reference
|
||||
};
|
||||
|
||||
TEST(closure, correctClosure) {
|
||||
set<string> aClosure;
|
||||
set<string> expectedClosure = {"A", "B", "C", "F", "G"};
|
||||
computeClosure<string>(
|
||||
{"A"},
|
||||
aClosure,
|
||||
[&](const string currentNode, function<void(promise<set<string>> &)> processEdges) {
|
||||
promise<set<string>> promisedNodes;
|
||||
promisedNodes.set_value(testGraph[currentNode]);
|
||||
processEdges(promisedNodes);
|
||||
}
|
||||
);
|
||||
|
||||
ASSERT_EQ(aClosure, expectedClosure);
|
||||
}
|
||||
|
||||
TEST(closure, properlyHandlesDirectExceptions) {
|
||||
struct TestExn {};
|
||||
set<string> aClosure;
|
||||
EXPECT_THROW(
|
||||
computeClosure<string>(
|
||||
{"A"},
|
||||
aClosure,
|
||||
[&](const string currentNode, function<void(promise<set<string>> &)> processEdges) {
|
||||
throw TestExn();
|
||||
}
|
||||
),
|
||||
TestExn
|
||||
);
|
||||
}
|
||||
|
||||
TEST(closure, properlyHandlesExceptionsInPromise) {
|
||||
struct TestExn {};
|
||||
set<string> aClosure;
|
||||
EXPECT_THROW(
|
||||
computeClosure<string>(
|
||||
{"A"},
|
||||
aClosure,
|
||||
[&](const string currentNode, function<void(promise<set<string>> &)> processEdges) {
|
||||
promise<set<string>> promise;
|
||||
try {
|
||||
throw TestExn();
|
||||
} catch (...) {
|
||||
promise.set_exception(std::current_exception());
|
||||
}
|
||||
processEdges(promise);
|
||||
}
|
||||
),
|
||||
TestExn
|
||||
);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,96 +0,0 @@
|
|||
#include "compression.hh"
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* compress / decompress
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(compress, compressWithUnknownMethod) {
|
||||
ASSERT_THROW(compress("invalid-method", "something-to-compress"), UnknownCompressionMethod);
|
||||
}
|
||||
|
||||
TEST(compress, noneMethodDoesNothingToTheInput) {
|
||||
auto o = compress("none", "this-is-a-test");
|
||||
|
||||
ASSERT_EQ(o, "this-is-a-test");
|
||||
}
|
||||
|
||||
TEST(decompress, decompressNoneCompressed) {
|
||||
auto method = "none";
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, str);
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST(decompress, decompressEmptyCompressed) {
|
||||
// Empty-method decompression used e.g. by S3 store
|
||||
// (Content-Encoding == "").
|
||||
auto method = "";
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, str);
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST(decompress, decompressXzCompressed) {
|
||||
auto method = "xz";
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, compress(method, str));
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST(decompress, decompressBzip2Compressed) {
|
||||
auto method = "bzip2";
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, compress(method, str));
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST(decompress, decompressBrCompressed) {
|
||||
auto method = "br";
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, compress(method, str));
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST(decompress, decompressInvalidInputThrowsCompressionError) {
|
||||
auto method = "bzip2";
|
||||
auto str = "this is a string that does not qualify as valid bzip2 data";
|
||||
|
||||
ASSERT_THROW(decompress(method, str), CompressionError);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* compression sinks
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(makeCompressionSink, noneSinkDoesNothingToInput) {
|
||||
StringSink strSink;
|
||||
auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto sink = makeCompressionSink("none", strSink);
|
||||
(*sink)(inputString);
|
||||
sink->finish();
|
||||
|
||||
ASSERT_STREQ(strSink.s.c_str(), inputString);
|
||||
}
|
||||
|
||||
TEST(makeCompressionSink, compressAndDecompress) {
|
||||
StringSink strSink;
|
||||
auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto decompressionSink = makeDecompressionSink("bzip2", strSink);
|
||||
auto sink = makeCompressionSink("bzip2", *decompressionSink);
|
||||
|
||||
(*sink)(inputString);
|
||||
sink->finish();
|
||||
decompressionSink->finish();
|
||||
|
||||
ASSERT_STREQ(strSink.s.c_str(), inputString);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,295 +0,0 @@
|
|||
#include "config.hh"
|
||||
#include "args.hh"
|
||||
|
||||
#include <sstream>
|
||||
#include <gtest/gtest.h>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Config
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(Config, setUndefinedSetting) {
|
||||
Config config;
|
||||
ASSERT_EQ(config.set("undefined-key", "value"), false);
|
||||
}
|
||||
|
||||
TEST(Config, setDefinedSetting) {
|
||||
Config config;
|
||||
std::string value;
|
||||
Setting<std::string> foo{&config, value, "name-of-the-setting", "description"};
|
||||
ASSERT_EQ(config.set("name-of-the-setting", "value"), true);
|
||||
}
|
||||
|
||||
TEST(Config, getDefinedSetting) {
|
||||
Config config;
|
||||
std::string value;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
Setting<std::string> foo{&config, value, "name-of-the-setting", "description"};
|
||||
|
||||
config.getSettings(settings, /* overriddenOnly = */ false);
|
||||
const auto iter = settings.find("name-of-the-setting");
|
||||
ASSERT_NE(iter, settings.end());
|
||||
ASSERT_EQ(iter->second.value, "");
|
||||
ASSERT_EQ(iter->second.description, "description\n");
|
||||
}
|
||||
|
||||
TEST(Config, getDefinedOverriddenSettingNotSet) {
|
||||
Config config;
|
||||
std::string value;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
Setting<std::string> foo{&config, value, "name-of-the-setting", "description"};
|
||||
|
||||
config.getSettings(settings, /* overriddenOnly = */ true);
|
||||
const auto e = settings.find("name-of-the-setting");
|
||||
ASSERT_EQ(e, settings.end());
|
||||
}
|
||||
|
||||
TEST(Config, getDefinedSettingSet1) {
|
||||
Config config;
|
||||
std::string value;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
Setting<std::string> setting{&config, value, "name-of-the-setting", "description"};
|
||||
|
||||
setting.assign("value");
|
||||
|
||||
config.getSettings(settings, /* overriddenOnly = */ false);
|
||||
const auto iter = settings.find("name-of-the-setting");
|
||||
ASSERT_NE(iter, settings.end());
|
||||
ASSERT_EQ(iter->second.value, "value");
|
||||
ASSERT_EQ(iter->second.description, "description\n");
|
||||
}
|
||||
|
||||
TEST(Config, getDefinedSettingSet2) {
|
||||
Config config;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||
|
||||
ASSERT_TRUE(config.set("name-of-the-setting", "value"));
|
||||
|
||||
config.getSettings(settings, /* overriddenOnly = */ false);
|
||||
const auto e = settings.find("name-of-the-setting");
|
||||
ASSERT_NE(e, settings.end());
|
||||
ASSERT_EQ(e->second.value, "value");
|
||||
ASSERT_EQ(e->second.description, "description\n");
|
||||
}
|
||||
|
||||
TEST(Config, addSetting) {
|
||||
class TestSetting : public AbstractSetting {
|
||||
public:
|
||||
TestSetting() : AbstractSetting("test", "test", {}) {}
|
||||
void set(const std::string & value, bool append) override {}
|
||||
std::string to_string() const override { return {}; }
|
||||
bool isAppendable() override { return false; }
|
||||
};
|
||||
|
||||
Config config;
|
||||
TestSetting setting;
|
||||
|
||||
ASSERT_FALSE(config.set("test", "value"));
|
||||
config.addSetting(&setting);
|
||||
ASSERT_TRUE(config.set("test", "value"));
|
||||
ASSERT_FALSE(config.set("extra-test", "value"));
|
||||
}
|
||||
|
||||
TEST(Config, withInitialValue) {
|
||||
const StringMap initials = {
|
||||
{ "key", "value" },
|
||||
};
|
||||
Config config(initials);
|
||||
|
||||
{
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
config.getSettings(settings, /* overriddenOnly = */ false);
|
||||
ASSERT_EQ(settings.find("key"), settings.end());
|
||||
}
|
||||
|
||||
Setting<std::string> setting{&config, "default-value", "key", "description"};
|
||||
|
||||
{
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
config.getSettings(settings, /* overriddenOnly = */ false);
|
||||
ASSERT_EQ(settings["key"].value, "value");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(Config, resetOverridden) {
|
||||
Config config;
|
||||
config.resetOverridden();
|
||||
}
|
||||
|
||||
TEST(Config, resetOverriddenWithSetting) {
|
||||
Config config;
|
||||
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||
|
||||
{
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
|
||||
setting.set("foo");
|
||||
ASSERT_EQ(setting.get(), "foo");
|
||||
config.getSettings(settings, /* overriddenOnly = */ true);
|
||||
ASSERT_TRUE(settings.empty());
|
||||
}
|
||||
|
||||
{
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
|
||||
setting.override("bar");
|
||||
ASSERT_TRUE(setting.overridden);
|
||||
ASSERT_EQ(setting.get(), "bar");
|
||||
config.getSettings(settings, /* overriddenOnly = */ true);
|
||||
ASSERT_FALSE(settings.empty());
|
||||
}
|
||||
|
||||
{
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
|
||||
config.resetOverridden();
|
||||
ASSERT_FALSE(setting.overridden);
|
||||
config.getSettings(settings, /* overriddenOnly = */ true);
|
||||
ASSERT_TRUE(settings.empty());
|
||||
}
|
||||
}
|
||||
|
||||
TEST(Config, toJSONOnEmptyConfig) {
|
||||
ASSERT_EQ(Config().toJSON().dump(), "{}");
|
||||
}
|
||||
|
||||
TEST(Config, toJSONOnNonEmptyConfig) {
|
||||
using nlohmann::literals::operator "" _json;
|
||||
Config config;
|
||||
Setting<std::string> setting{
|
||||
&config,
|
||||
"",
|
||||
"name-of-the-setting",
|
||||
"description",
|
||||
};
|
||||
setting.assign("value");
|
||||
|
||||
ASSERT_EQ(config.toJSON(),
|
||||
R"#({
|
||||
"name-of-the-setting": {
|
||||
"aliases": [],
|
||||
"defaultValue": "",
|
||||
"description": "description\n",
|
||||
"documentDefault": true,
|
||||
"value": "value",
|
||||
"experimentalFeature": null
|
||||
}
|
||||
})#"_json);
|
||||
}
|
||||
|
||||
TEST(Config, toJSONOnNonEmptyConfigWithExperimentalSetting) {
|
||||
using nlohmann::literals::operator "" _json;
|
||||
Config config;
|
||||
Setting<std::string> setting{
|
||||
&config,
|
||||
"",
|
||||
"name-of-the-setting",
|
||||
"description",
|
||||
{},
|
||||
true,
|
||||
Xp::Flakes,
|
||||
};
|
||||
setting.assign("value");
|
||||
|
||||
ASSERT_EQ(config.toJSON(),
|
||||
R"#({
|
||||
"name-of-the-setting": {
|
||||
"aliases": [],
|
||||
"defaultValue": "",
|
||||
"description": "description\n",
|
||||
"documentDefault": true,
|
||||
"value": "value",
|
||||
"experimentalFeature": "flakes"
|
||||
}
|
||||
})#"_json);
|
||||
}
|
||||
|
||||
TEST(Config, setSettingAlias) {
|
||||
Config config;
|
||||
Setting<std::string> setting{&config, "", "some-int", "best number", { "another-int" }};
|
||||
ASSERT_TRUE(config.set("some-int", "1"));
|
||||
ASSERT_EQ(setting.get(), "1");
|
||||
ASSERT_TRUE(config.set("another-int", "2"));
|
||||
ASSERT_EQ(setting.get(), "2");
|
||||
ASSERT_TRUE(config.set("some-int", "3"));
|
||||
ASSERT_EQ(setting.get(), "3");
|
||||
}
|
||||
|
||||
/* FIXME: The reapplyUnknownSettings method doesn't seem to do anything
|
||||
* useful (these days). Whenever we add a new setting to Config the
|
||||
* unknown settings are always considered. In which case is this function
|
||||
* actually useful? Is there some way to register a Setting without calling
|
||||
* addSetting? */
|
||||
TEST(Config, DISABLED_reapplyUnknownSettings) {
|
||||
Config config;
|
||||
ASSERT_FALSE(config.set("name-of-the-setting", "unknownvalue"));
|
||||
Setting<std::string> setting{&config, "default", "name-of-the-setting", "description"};
|
||||
ASSERT_EQ(setting.get(), "default");
|
||||
config.reapplyUnknownSettings();
|
||||
ASSERT_EQ(setting.get(), "unknownvalue");
|
||||
}
|
||||
|
||||
TEST(Config, applyConfigEmpty) {
|
||||
Config config;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
config.applyConfig("");
|
||||
config.getSettings(settings);
|
||||
ASSERT_TRUE(settings.empty());
|
||||
}
|
||||
|
||||
TEST(Config, applyConfigEmptyWithComment) {
|
||||
Config config;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
config.applyConfig("# just a comment");
|
||||
config.getSettings(settings);
|
||||
ASSERT_TRUE(settings.empty());
|
||||
}
|
||||
|
||||
TEST(Config, applyConfigAssignment) {
|
||||
Config config;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||
config.applyConfig(
|
||||
"name-of-the-setting = value-from-file #useful comment\n"
|
||||
"# name-of-the-setting = foo\n"
|
||||
);
|
||||
config.getSettings(settings);
|
||||
ASSERT_FALSE(settings.empty());
|
||||
ASSERT_EQ(settings["name-of-the-setting"].value, "value-from-file");
|
||||
}
|
||||
|
||||
TEST(Config, applyConfigWithReassignedSetting) {
|
||||
Config config;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||
config.applyConfig(
|
||||
"name-of-the-setting = first-value\n"
|
||||
"name-of-the-setting = second-value\n"
|
||||
);
|
||||
config.getSettings(settings);
|
||||
ASSERT_FALSE(settings.empty());
|
||||
ASSERT_EQ(settings["name-of-the-setting"].value, "second-value");
|
||||
}
|
||||
|
||||
TEST(Config, applyConfigFailsOnMissingIncludes) {
|
||||
Config config;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||
|
||||
ASSERT_THROW(config.applyConfig(
|
||||
"name-of-the-setting = value-from-file\n"
|
||||
"# name-of-the-setting = foo\n"
|
||||
"include /nix/store/does/not/exist.nix"
|
||||
), Error);
|
||||
}
|
||||
|
||||
TEST(Config, applyConfigInvalidThrows) {
|
||||
Config config;
|
||||
ASSERT_THROW(config.applyConfig("value == key"), UsageError);
|
||||
ASSERT_THROW(config.applyConfig("value "), UsageError);
|
||||
}
|
||||
}
|
|
@ -1,236 +0,0 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "git.hh"
|
||||
#include "memory-source-accessor.hh"
|
||||
|
||||
#include "tests/characterization.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
using namespace git;
|
||||
|
||||
class GitTest : public CharacterizationTest
|
||||
{
|
||||
Path unitTestData = getUnitTestData() + "/libutil/git";
|
||||
|
||||
public:
|
||||
|
||||
Path goldenMaster(std::string_view testStem) const override {
|
||||
return unitTestData + "/" + testStem;
|
||||
}
|
||||
|
||||
/**
|
||||
* We set these in tests rather than the regular globals so we don't have
|
||||
* to worry about race conditions if the tests run concurrently.
|
||||
*/
|
||||
ExperimentalFeatureSettings mockXpSettings;
|
||||
|
||||
private:
|
||||
|
||||
void SetUp() override
|
||||
{
|
||||
mockXpSettings.set("experimental-features", "git-hashing");
|
||||
}
|
||||
};
|
||||
|
||||
TEST(GitMode, gitMode_directory) {
|
||||
Mode m = Mode::Directory;
|
||||
RawMode r = 0040000;
|
||||
ASSERT_EQ(static_cast<RawMode>(m), r);
|
||||
ASSERT_EQ(decodeMode(r), std::optional { m });
|
||||
};
|
||||
|
||||
TEST(GitMode, gitMode_executable) {
|
||||
Mode m = Mode::Executable;
|
||||
RawMode r = 0100755;
|
||||
ASSERT_EQ(static_cast<RawMode>(m), r);
|
||||
ASSERT_EQ(decodeMode(r), std::optional { m });
|
||||
};
|
||||
|
||||
TEST(GitMode, gitMode_regular) {
|
||||
Mode m = Mode::Regular;
|
||||
RawMode r = 0100644;
|
||||
ASSERT_EQ(static_cast<RawMode>(m), r);
|
||||
ASSERT_EQ(decodeMode(r), std::optional { m });
|
||||
};
|
||||
|
||||
TEST(GitMode, gitMode_symlink) {
|
||||
Mode m = Mode::Symlink;
|
||||
RawMode r = 0120000;
|
||||
ASSERT_EQ(static_cast<RawMode>(m), r);
|
||||
ASSERT_EQ(decodeMode(r), std::optional { m });
|
||||
};
|
||||
|
||||
TEST_F(GitTest, blob_read) {
|
||||
readTest("hello-world-blob.bin", [&](const auto & encoded) {
|
||||
StringSource in { encoded };
|
||||
StringSink out;
|
||||
RegularFileSink out2 { out };
|
||||
parse(out2, "", in, [](auto &, auto) {}, mockXpSettings);
|
||||
|
||||
auto expected = readFile(goldenMaster("hello-world.bin"));
|
||||
|
||||
ASSERT_EQ(out.s, expected);
|
||||
});
|
||||
}
|
||||
|
||||
TEST_F(GitTest, blob_write) {
|
||||
writeTest("hello-world-blob.bin", [&]() {
|
||||
auto decoded = readFile(goldenMaster("hello-world.bin"));
|
||||
StringSink s;
|
||||
dumpBlobPrefix(decoded.size(), s, mockXpSettings);
|
||||
s(decoded);
|
||||
return s.s;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* This data is for "shallow" tree tests. However, we use "real" hashes
|
||||
* so that we can check our test data in the corresponding functional
|
||||
* test (`git-hashing/unit-test-data`).
|
||||
*/
|
||||
const static Tree tree = {
|
||||
{
|
||||
"Foo",
|
||||
{
|
||||
.mode = Mode::Regular,
|
||||
// hello world with special chars from above
|
||||
.hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", htSHA1),
|
||||
},
|
||||
},
|
||||
{
|
||||
"bAr",
|
||||
{
|
||||
.mode = Mode::Executable,
|
||||
// ditto
|
||||
.hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", htSHA1),
|
||||
},
|
||||
},
|
||||
{
|
||||
"baZ/",
|
||||
{
|
||||
.mode = Mode::Directory,
|
||||
// Empty directory hash
|
||||
.hash = Hash::parseAny("4b825dc642cb6eb9a060e54bf8d69288fbee4904", htSHA1),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
TEST_F(GitTest, tree_read) {
|
||||
readTest("tree.bin", [&](const auto & encoded) {
|
||||
StringSource in { encoded };
|
||||
NullParseSink out;
|
||||
Tree got;
|
||||
parse(out, "", in, [&](auto & name, auto entry) {
|
||||
auto name2 = name;
|
||||
if (entry.mode == Mode::Directory)
|
||||
name2 += '/';
|
||||
got.insert_or_assign(name2, std::move(entry));
|
||||
}, mockXpSettings);
|
||||
|
||||
ASSERT_EQ(got, tree);
|
||||
});
|
||||
}
|
||||
|
||||
TEST_F(GitTest, tree_write) {
|
||||
writeTest("tree.bin", [&]() {
|
||||
StringSink s;
|
||||
dumpTree(tree, s, mockXpSettings);
|
||||
return s.s;
|
||||
});
|
||||
}
|
||||
|
||||
TEST_F(GitTest, both_roundrip) {
|
||||
using File = MemorySourceAccessor::File;
|
||||
|
||||
MemorySourceAccessor files;
|
||||
files.root = File::Directory {
|
||||
.contents {
|
||||
{
|
||||
"foo",
|
||||
File::Regular {
|
||||
.contents = "hello\n\0\n\tworld!",
|
||||
},
|
||||
},
|
||||
{
|
||||
"bar",
|
||||
File::Directory {
|
||||
.contents = {
|
||||
{
|
||||
"baz",
|
||||
File::Regular {
|
||||
.executable = true,
|
||||
.contents = "good day,\n\0\n\tworld!",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
std::map<Hash, std::string> cas;
|
||||
|
||||
std::function<DumpHook> dumpHook;
|
||||
dumpHook = [&](const CanonPath & path) {
|
||||
StringSink s;
|
||||
HashSink hashSink { htSHA1 };
|
||||
TeeSink s2 { s, hashSink };
|
||||
auto mode = dump(
|
||||
files, path, s2, dumpHook,
|
||||
defaultPathFilter, mockXpSettings);
|
||||
auto hash = hashSink.finish().first;
|
||||
cas.insert_or_assign(hash, std::move(s.s));
|
||||
return TreeEntry {
|
||||
.mode = mode,
|
||||
.hash = hash,
|
||||
};
|
||||
};
|
||||
|
||||
auto root = dumpHook(CanonPath::root);
|
||||
|
||||
MemorySourceAccessor files2;
|
||||
|
||||
MemorySink sinkFiles2 { files2 };
|
||||
|
||||
std::function<void(const Path, const Hash &)> mkSinkHook;
|
||||
mkSinkHook = [&](const Path prefix, const Hash & hash) {
|
||||
StringSource in { cas[hash] };
|
||||
parse(sinkFiles2, prefix, in, [&](const Path & name, const auto & entry) {
|
||||
mkSinkHook(prefix + "/" + name, entry.hash);
|
||||
}, mockXpSettings);
|
||||
};
|
||||
|
||||
mkSinkHook("", root.hash);
|
||||
|
||||
ASSERT_EQ(files, files2);
|
||||
}
|
||||
|
||||
TEST(GitLsRemote, parseSymrefLineWithReference) {
|
||||
auto line = "ref: refs/head/main HEAD";
|
||||
auto res = parseLsRemoteLine(line);
|
||||
ASSERT_TRUE(res.has_value());
|
||||
ASSERT_EQ(res->kind, LsRemoteRefLine::Kind::Symbolic);
|
||||
ASSERT_EQ(res->target, "refs/head/main");
|
||||
ASSERT_EQ(res->reference, "HEAD");
|
||||
}
|
||||
|
||||
TEST(GitLsRemote, parseSymrefLineWithNoReference) {
|
||||
auto line = "ref: refs/head/main";
|
||||
auto res = parseLsRemoteLine(line);
|
||||
ASSERT_TRUE(res.has_value());
|
||||
ASSERT_EQ(res->kind, LsRemoteRefLine::Kind::Symbolic);
|
||||
ASSERT_EQ(res->target, "refs/head/main");
|
||||
ASSERT_EQ(res->reference, std::nullopt);
|
||||
}
|
||||
|
||||
TEST(GitLsRemote, parseObjectRefLine) {
|
||||
auto line = "abc123 refs/head/main";
|
||||
auto res = parseLsRemoteLine(line);
|
||||
ASSERT_TRUE(res.has_value());
|
||||
ASSERT_EQ(res->kind, LsRemoteRefLine::Kind::Object);
|
||||
ASSERT_EQ(res->target, "abc123");
|
||||
ASSERT_EQ(res->reference, "refs/head/main");
|
||||
}
|
||||
|
||||
}
|
|
@ -1,110 +0,0 @@
|
|||
#include <regex>
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
#include <rapidcheck/gtest.h>
|
||||
|
||||
#include <hash.hh>
|
||||
|
||||
#include "tests/hash.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* hashString
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(hashString, testKnownMD5Hashes1) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc1321
|
||||
auto s1 = "";
|
||||
auto hash = hashString(HashType::htMD5, s1);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownMD5Hashes2) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc1321
|
||||
auto s2 = "abc";
|
||||
auto hash = hashString(HashType::htMD5, s2);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA1Hashes1) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc3174
|
||||
auto s = "abc";
|
||||
auto hash = hashString(HashType::htSHA1, s);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA1Hashes2) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc3174
|
||||
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
|
||||
auto hash = hashString(HashType::htSHA1, s);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA256Hashes1) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abc";
|
||||
|
||||
auto hash = hashString(HashType::htSHA256, s);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
|
||||
"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA256Hashes2) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
|
||||
auto hash = hashString(HashType::htSHA256, s);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
|
||||
"sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA512Hashes1) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abc";
|
||||
auto hash = hashString(HashType::htSHA512, s);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
|
||||
"sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9"
|
||||
"7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd"
|
||||
"454d4423643ce80e2a9ac94fa54ca49f");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA512Hashes2) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu";
|
||||
|
||||
auto hash = hashString(HashType::htSHA512, s);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
|
||||
"sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1"
|
||||
"7299aeadb6889018501d289e4900f7e4331b99dec4b5433a"
|
||||
"c7d329eeb6dd26545e96e55b874be909");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* parseHashFormat, parseHashFormatOpt, printHashFormat
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(hashFormat, testRoundTripPrintParse) {
|
||||
for (const HashFormat hashFormat: { HashFormat::Base64, HashFormat::Base32, HashFormat::Base16, HashFormat::SRI}) {
|
||||
ASSERT_EQ(parseHashFormat(printHashFormat(hashFormat)), hashFormat);
|
||||
ASSERT_EQ(*parseHashFormatOpt(printHashFormat(hashFormat)), hashFormat);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(hashFormat, testParseHashFormatOptException) {
|
||||
ASSERT_EQ(parseHashFormatOpt("sha0042"), std::nullopt);
|
||||
}
|
||||
}
|
||||
|
||||
namespace rc {
|
||||
using namespace nix;
|
||||
|
||||
Gen<Hash> Arbitrary<Hash>::arbitrary()
|
||||
{
|
||||
Hash hash(htSHA1);
|
||||
for (size_t i = 0; i < hash.hashSize; ++i)
|
||||
hash.hash[i] = *gen::arbitrary<uint8_t>();
|
||||
return gen::just(hash);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <rapidcheck/gen/Arbitrary.h>
|
||||
|
||||
#include <hash.hh>
|
||||
|
||||
namespace rc {
|
||||
using namespace nix;
|
||||
|
||||
template<>
|
||||
struct Arbitrary<Hash> {
|
||||
static Gen<Hash> arbitrary();
|
||||
};
|
||||
|
||||
}
|
|
@ -1,66 +0,0 @@
|
|||
#include "hilite.hh"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
/* ----------- tests for fmt.hh -------------------------------------------------*/
|
||||
|
||||
TEST(hiliteMatches, noHighlight) {
|
||||
ASSERT_STREQ(hiliteMatches("Hello, world!", std::vector<std::smatch>(), "(", ")").c_str(), "Hello, world!");
|
||||
}
|
||||
|
||||
TEST(hiliteMatches, simpleHighlight) {
|
||||
std::string str = "Hello, world!";
|
||||
std::regex re = std::regex("world");
|
||||
auto matches = std::vector(std::sregex_iterator(str.begin(), str.end(), re), std::sregex_iterator());
|
||||
ASSERT_STREQ(
|
||||
hiliteMatches(str, matches, "(", ")").c_str(),
|
||||
"Hello, (world)!"
|
||||
);
|
||||
}
|
||||
|
||||
TEST(hiliteMatches, multipleMatches) {
|
||||
std::string str = "Hello, world, world, world, world, world, world, Hello!";
|
||||
std::regex re = std::regex("world");
|
||||
auto matches = std::vector(std::sregex_iterator(str.begin(), str.end(), re), std::sregex_iterator());
|
||||
ASSERT_STREQ(
|
||||
hiliteMatches(str, matches, "(", ")").c_str(),
|
||||
"Hello, (world), (world), (world), (world), (world), (world), Hello!"
|
||||
);
|
||||
}
|
||||
|
||||
TEST(hiliteMatches, overlappingMatches) {
|
||||
std::string str = "world, Hello, world, Hello, world, Hello, world, Hello, world!";
|
||||
std::regex re = std::regex("Hello, world");
|
||||
std::regex re2 = std::regex("world, Hello");
|
||||
auto v = std::vector(std::sregex_iterator(str.begin(), str.end(), re), std::sregex_iterator());
|
||||
for(auto it = std::sregex_iterator(str.begin(), str.end(), re2); it != std::sregex_iterator(); ++it) {
|
||||
v.push_back(*it);
|
||||
}
|
||||
ASSERT_STREQ(
|
||||
hiliteMatches(str, v, "(", ")").c_str(),
|
||||
"(world, Hello, world, Hello, world, Hello, world, Hello, world)!"
|
||||
);
|
||||
}
|
||||
|
||||
TEST(hiliteMatches, complexOverlappingMatches) {
|
||||
std::string str = "legacyPackages.x86_64-linux.git-crypt";
|
||||
std::vector regexes = {
|
||||
std::regex("t-cry"),
|
||||
std::regex("ux\\.git-cry"),
|
||||
std::regex("git-c"),
|
||||
std::regex("pt"),
|
||||
};
|
||||
std::vector<std::smatch> matches;
|
||||
for(auto regex : regexes)
|
||||
{
|
||||
for(auto it = std::sregex_iterator(str.begin(), str.end(), regex); it != std::sregex_iterator(); ++it) {
|
||||
matches.push_back(*it);
|
||||
}
|
||||
}
|
||||
ASSERT_STREQ(
|
||||
hiliteMatches(str, matches, "(", ")").c_str(),
|
||||
"legacyPackages.x86_64-lin(ux.git-crypt)"
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
check: libutil-tests-exe_RUN
|
||||
|
||||
programs += libutil-tests-exe
|
||||
|
||||
libutil-tests-exe_NAME = libnixutil-tests
|
||||
|
||||
libutil-tests-exe_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libutil-tests-exe_INSTALL_DIR := $(checkbindir)
|
||||
else
|
||||
libutil-tests-exe_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libutil-tests-exe_LIBS = libutil-tests
|
||||
|
||||
libutil-tests-exe_LDFLAGS := $(GTEST_LIBS)
|
||||
|
||||
libraries += libutil-tests
|
||||
|
||||
libutil-tests_NAME = libnixutil-tests
|
||||
|
||||
libutil-tests_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libutil-tests_INSTALL_DIR := $(checklibdir)
|
||||
else
|
||||
libutil-tests_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libutil-tests_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libutil-tests_CXXFLAGS += -I src/libutil
|
||||
|
||||
libutil-tests_LIBS = libutil
|
||||
|
||||
libutil-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS)
|
||||
|
||||
check: unit-test-data/libutil/git/check-data.sh.test
|
||||
|
||||
$(eval $(call run-test,unit-test-data/libutil/git/check-data.sh))
|
|
@ -1,369 +0,0 @@
|
|||
#if 0
|
||||
|
||||
#include "logging.hh"
|
||||
#include "nixexpr.hh"
|
||||
#include <fstream>
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* logEI
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
const char *test_file =
|
||||
"previous line of code\n"
|
||||
"this is the problem line of code\n"
|
||||
"next line of code\n";
|
||||
const char *one_liner =
|
||||
"this is the other problem line of code";
|
||||
|
||||
TEST(logEI, catpuresBasicProperties) {
|
||||
|
||||
MakeError(TestError, Error);
|
||||
ErrorInfo::programName = std::optional("error-unit-test");
|
||||
|
||||
try {
|
||||
throw TestError("an error for testing purposes");
|
||||
} catch (Error &e) {
|
||||
testing::internal::CaptureStderr();
|
||||
logger->logEI(e.info());
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
|
||||
ASSERT_STREQ(str.c_str(),"\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\nan error for testing purposes\n");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(logEI, jsonOutput) {
|
||||
SymbolTable testTable;
|
||||
auto problem_file = testTable.create("random.nix");
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
makeJSONLogger(*logger)->logEI({
|
||||
.name = "error name",
|
||||
.msg = hintfmt("this hint has %1% templated %2%!!",
|
||||
"yellow",
|
||||
"values"),
|
||||
.errPos = Pos(foFile, problem_file, 02, 13)
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "@nix {\"action\":\"msg\",\"column\":13,\"file\":\"random.nix\",\"level\":0,\"line\":2,\"msg\":\"\\u001b[31;1merror:\\u001b[0m\\u001b[34;1m --- error name --- error-unit-test\\u001b[0m\\n\\u001b[34;1mat: \\u001b[33;1m(2:13)\\u001b[34;1m in file: \\u001b[0mrandom.nix\\n\\nerror without any code lines.\\n\\nthis hint has \\u001b[33;1myellow\\u001b[0m templated \\u001b[33;1mvalues\\u001b[0m!!\",\"raw_msg\":\"this hint has \\u001b[33;1myellow\\u001b[0m templated \\u001b[33;1mvalues\\u001b[0m!!\"}\n");
|
||||
}
|
||||
|
||||
TEST(logEI, appendingHintsToPreviousError) {
|
||||
|
||||
MakeError(TestError, Error);
|
||||
ErrorInfo::programName = std::optional("error-unit-test");
|
||||
|
||||
try {
|
||||
auto e = Error("initial error");
|
||||
throw TestError(e.info());
|
||||
} catch (Error &e) {
|
||||
ErrorInfo ei = e.info();
|
||||
ei.msg = hintfmt("%s; subsequent error message.", normaltxt(e.info().msg.str()));
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
logger->logEI(ei);
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\ninitial error; subsequent error message.\n");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
TEST(logEI, picksUpSysErrorExitCode) {
|
||||
|
||||
MakeError(TestError, Error);
|
||||
ErrorInfo::programName = std::optional("error-unit-test");
|
||||
|
||||
try {
|
||||
auto x = readFile(-1);
|
||||
}
|
||||
catch (SysError &e) {
|
||||
testing::internal::CaptureStderr();
|
||||
logError(e.info());
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SysError --- error-unit-test\x1B[0m\nstatting file: \x1B[33;1mBad file descriptor\x1B[0m\n");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(logEI, loggingErrorOnInfoLevel) {
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logger->logEI({ .level = lvlInfo,
|
||||
.name = "Info name",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[32;1minfo:\x1B[0m\x1B[34;1m --- Info name --- error-unit-test\x1B[0m\nInfo description\n");
|
||||
}
|
||||
|
||||
TEST(logEI, loggingErrorOnTalkativeLevel) {
|
||||
verbosity = lvlTalkative;
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logger->logEI({ .level = lvlTalkative,
|
||||
.name = "Talkative name",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[32;1mtalk:\x1B[0m\x1B[34;1m --- Talkative name --- error-unit-test\x1B[0m\nTalkative description\n");
|
||||
}
|
||||
|
||||
TEST(logEI, loggingErrorOnChattyLevel) {
|
||||
verbosity = lvlChatty;
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logger->logEI({ .level = lvlChatty,
|
||||
.name = "Chatty name",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[32;1mchat:\x1B[0m\x1B[34;1m --- Chatty name --- error-unit-test\x1B[0m\nTalkative description\n");
|
||||
}
|
||||
|
||||
TEST(logEI, loggingErrorOnDebugLevel) {
|
||||
verbosity = lvlDebug;
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logger->logEI({ .level = lvlDebug,
|
||||
.name = "Debug name",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[33;1mdebug:\x1B[0m\x1B[34;1m --- Debug name --- error-unit-test\x1B[0m\nDebug description\n");
|
||||
}
|
||||
|
||||
TEST(logEI, loggingErrorOnVomitLevel) {
|
||||
verbosity = lvlVomit;
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logger->logEI({ .level = lvlVomit,
|
||||
.name = "Vomit name",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[32;1mvomit:\x1B[0m\x1B[34;1m --- Vomit name --- error-unit-test\x1B[0m\nVomit description\n");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* logError
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(logError, logErrorWithoutHintOrCode) {
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logError({
|
||||
.name = "name",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n");
|
||||
}
|
||||
|
||||
TEST(logError, logErrorWithPreviousAndNextLinesOfCode) {
|
||||
SymbolTable testTable;
|
||||
auto problem_file = testTable.create(test_file);
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logError({
|
||||
.name = "error name",
|
||||
.msg = hintfmt("this hint has %1% templated %2%!!",
|
||||
"yellow",
|
||||
"values"),
|
||||
.errPos = Pos(foString, problem_file, 02, 13),
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\nerror with code lines\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
|
||||
}
|
||||
|
||||
TEST(logError, logErrorWithInvalidFile) {
|
||||
SymbolTable testTable;
|
||||
auto problem_file = testTable.create("invalid filename");
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logError({
|
||||
.name = "error name",
|
||||
.msg = hintfmt("this hint has %1% templated %2%!!",
|
||||
"yellow",
|
||||
"values"),
|
||||
.errPos = Pos(foFile, problem_file, 02, 13)
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m in file: \x1B[0minvalid filename\n\nerror without any code lines.\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
|
||||
}
|
||||
|
||||
TEST(logError, logErrorWithOnlyHintAndName) {
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logError({
|
||||
.name = "error name",
|
||||
.msg = hintfmt("hint %1%", "only"),
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nhint \x1B[33;1monly\x1B[0m\n");
|
||||
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* logWarning
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(logWarning, logWarningWithNameDescriptionAndHint) {
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logWarning({
|
||||
.name = "name",
|
||||
.msg = hintfmt("there was a %1%", "warning"),
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nwarning description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n");
|
||||
}
|
||||
|
||||
TEST(logWarning, logWarningWithFileLineNumAndCode) {
|
||||
|
||||
SymbolTable testTable;
|
||||
auto problem_file = testTable.create(test_file);
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logWarning({
|
||||
.name = "warning name",
|
||||
.msg = hintfmt("this hint has %1% templated %2%!!",
|
||||
"yellow",
|
||||
"values"),
|
||||
.errPos = Pos(foStdin, problem_file, 2, 13),
|
||||
});
|
||||
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from stdin\x1B[0m\n\nwarning description\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* traces
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(addTrace, showTracesWithShowTrace) {
|
||||
SymbolTable testTable;
|
||||
auto problem_file = testTable.create(test_file);
|
||||
auto oneliner_file = testTable.create(one_liner);
|
||||
auto invalidfilename = testTable.create("invalid filename");
|
||||
|
||||
auto e = AssertionError(ErrorInfo {
|
||||
.name = "wat",
|
||||
.msg = hintfmt("it has been %1% days since our last error", "zero"),
|
||||
.errPos = Pos(foString, problem_file, 2, 13),
|
||||
});
|
||||
|
||||
e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
|
||||
e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
|
||||
e.addTrace(Pos(foFile, invalidfilename, 100, 1), "missing %s", "nix file");
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
loggerSettings.showTrace.assign(true);
|
||||
|
||||
logError(e.info());
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\nshow-traces\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n\x1B[34;1m---- show-trace ----\x1B[0m\n\x1B[34;1mtrace: \x1B[0mwhile trying to compute \x1B[33;1m42\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(1:19)\x1B[34;1m from stdin\x1B[0m\n\n 1| this is the other problem line of code\n | \x1B[31;1m^\x1B[0m\n\n\x1B[34;1mtrace: \x1B[0mwhile doing something without a \x1B[33;1mpos\x1B[0m\n\x1B[34;1mtrace: \x1B[0mmissing \x1B[33;1mnix file\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(100:1)\x1B[34;1m in file: \x1B[0minvalid filename\n");
|
||||
}
|
||||
|
||||
TEST(addTrace, hideTracesWithoutShowTrace) {
|
||||
SymbolTable testTable;
|
||||
auto problem_file = testTable.create(test_file);
|
||||
auto oneliner_file = testTable.create(one_liner);
|
||||
auto invalidfilename = testTable.create("invalid filename");
|
||||
|
||||
auto e = AssertionError(ErrorInfo {
|
||||
.name = "wat",
|
||||
.msg = hintfmt("it has been %1% days since our last error", "zero"),
|
||||
.errPos = Pos(foString, problem_file, 2, 13),
|
||||
});
|
||||
|
||||
e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
|
||||
e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
|
||||
e.addTrace(Pos(foFile, invalidfilename, 100, 1), "missing %s", "nix file");
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
loggerSettings.showTrace.assign(false);
|
||||
|
||||
logError(e.info());
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\nhide traces\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n");
|
||||
}
|
||||
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* hintfmt
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(hintfmt, percentStringWithoutArgs) {
|
||||
|
||||
const char *teststr = "this is 100%s correct!";
|
||||
|
||||
ASSERT_STREQ(
|
||||
hintfmt(teststr).str().c_str(),
|
||||
teststr);
|
||||
|
||||
}
|
||||
|
||||
TEST(hintfmt, fmtToHintfmt) {
|
||||
|
||||
ASSERT_STREQ(
|
||||
hintfmt(fmt("the color of this this text is %1%", "not yellow")).str().c_str(),
|
||||
"the color of this this text is not yellow");
|
||||
|
||||
}
|
||||
|
||||
TEST(hintfmt, tooFewArguments) {
|
||||
|
||||
ASSERT_STREQ(
|
||||
hintfmt("only one arg %1% %2%", "fulfilled").str().c_str(),
|
||||
"only one arg " ANSI_WARNING "fulfilled" ANSI_NORMAL " ");
|
||||
|
||||
}
|
||||
|
||||
TEST(hintfmt, tooManyArguments) {
|
||||
|
||||
ASSERT_STREQ(
|
||||
hintfmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(),
|
||||
"what about this " ANSI_WARNING "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL);
|
||||
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* ErrPos
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(errpos, invalidPos) {
|
||||
|
||||
// contains an invalid symbol, which we should not dereference!
|
||||
Pos invalid;
|
||||
|
||||
// constructing without access violation.
|
||||
ErrPos ep(invalid);
|
||||
|
||||
// assignment without access violation.
|
||||
ep = invalid;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif
|
|
@ -1,130 +0,0 @@
|
|||
#include "lru-cache.hh"
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* size
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(LRUCache, sizeOfEmptyCacheIsZero) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
ASSERT_EQ(c.size(), 0);
|
||||
}
|
||||
|
||||
TEST(LRUCache, sizeOfSingleElementCacheIsOne) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
c.upsert("foo", "bar");
|
||||
ASSERT_EQ(c.size(), 1);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* upsert / get
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(LRUCache, getFromEmptyCache) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
auto val = c.get("x");
|
||||
ASSERT_EQ(val.has_value(), false);
|
||||
}
|
||||
|
||||
TEST(LRUCache, getExistingValue) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
c.upsert("foo", "bar");
|
||||
auto val = c.get("foo");
|
||||
ASSERT_EQ(val, "bar");
|
||||
}
|
||||
|
||||
TEST(LRUCache, getNonExistingValueFromNonEmptyCache) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
c.upsert("foo", "bar");
|
||||
auto val = c.get("another");
|
||||
ASSERT_EQ(val.has_value(), false);
|
||||
}
|
||||
|
||||
TEST(LRUCache, upsertOnZeroCapacityCache) {
|
||||
LRUCache<std::string, std::string> c(0);
|
||||
c.upsert("foo", "bar");
|
||||
auto val = c.get("foo");
|
||||
ASSERT_EQ(val.has_value(), false);
|
||||
}
|
||||
|
||||
TEST(LRUCache, updateExistingValue) {
|
||||
LRUCache<std::string, std::string> c(1);
|
||||
c.upsert("foo", "bar");
|
||||
|
||||
auto val = c.get("foo");
|
||||
ASSERT_EQ(val.value_or("error"), "bar");
|
||||
ASSERT_EQ(c.size(), 1);
|
||||
|
||||
c.upsert("foo", "changed");
|
||||
val = c.get("foo");
|
||||
ASSERT_EQ(val.value_or("error"), "changed");
|
||||
ASSERT_EQ(c.size(), 1);
|
||||
}
|
||||
|
||||
TEST(LRUCache, overwriteOldestWhenCapacityIsReached) {
|
||||
LRUCache<std::string, std::string> c(3);
|
||||
c.upsert("one", "eins");
|
||||
c.upsert("two", "zwei");
|
||||
c.upsert("three", "drei");
|
||||
|
||||
ASSERT_EQ(c.size(), 3);
|
||||
ASSERT_EQ(c.get("one").value_or("error"), "eins");
|
||||
|
||||
// exceed capacity
|
||||
c.upsert("another", "whatever");
|
||||
|
||||
ASSERT_EQ(c.size(), 3);
|
||||
// Retrieving "one" makes it the most recent element thus
|
||||
// two will be the oldest one and thus replaced.
|
||||
ASSERT_EQ(c.get("two").has_value(), false);
|
||||
ASSERT_EQ(c.get("another").value(), "whatever");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* clear
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(LRUCache, clearEmptyCache) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
c.clear();
|
||||
ASSERT_EQ(c.size(), 0);
|
||||
}
|
||||
|
||||
TEST(LRUCache, clearNonEmptyCache) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
c.upsert("one", "eins");
|
||||
c.upsert("two", "zwei");
|
||||
c.upsert("three", "drei");
|
||||
ASSERT_EQ(c.size(), 3);
|
||||
c.clear();
|
||||
ASSERT_EQ(c.size(), 0);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* erase
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(LRUCache, eraseFromEmptyCache) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
ASSERT_EQ(c.erase("foo"), false);
|
||||
ASSERT_EQ(c.size(), 0);
|
||||
}
|
||||
|
||||
TEST(LRUCache, eraseMissingFromNonEmptyCache) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
c.upsert("one", "eins");
|
||||
ASSERT_EQ(c.erase("foo"), false);
|
||||
ASSERT_EQ(c.size(), 1);
|
||||
ASSERT_EQ(c.get("one").value_or("error"), "eins");
|
||||
}
|
||||
|
||||
TEST(LRUCache, eraseFromNonEmptyCache) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
c.upsert("one", "eins");
|
||||
ASSERT_EQ(c.erase("one"), true);
|
||||
ASSERT_EQ(c.size(), 0);
|
||||
ASSERT_EQ(c.get("one").value_or("empty"), "empty");
|
||||
}
|
||||
}
|
|
@ -1,127 +0,0 @@
|
|||
#include "pool.hh"
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct TestResource
|
||||
{
|
||||
|
||||
TestResource() {
|
||||
static int counter = 0;
|
||||
num = counter++;
|
||||
}
|
||||
|
||||
int dummyValue = 1;
|
||||
bool good = true;
|
||||
int num;
|
||||
};
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Pool
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(Pool, freshPoolHasZeroCountAndSpecifiedCapacity) {
|
||||
auto isGood = [](const ref<TestResource> & r) { return r->good; };
|
||||
auto createResource = []() { return make_ref<TestResource>(); };
|
||||
|
||||
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
|
||||
|
||||
ASSERT_EQ(pool.count(), 0);
|
||||
ASSERT_EQ(pool.capacity(), 1);
|
||||
}
|
||||
|
||||
TEST(Pool, freshPoolCanGetAResource) {
|
||||
auto isGood = [](const ref<TestResource> & r) { return r->good; };
|
||||
auto createResource = []() { return make_ref<TestResource>(); };
|
||||
|
||||
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
|
||||
ASSERT_EQ(pool.count(), 0);
|
||||
|
||||
TestResource r = *(pool.get());
|
||||
|
||||
ASSERT_EQ(pool.count(), 1);
|
||||
ASSERT_EQ(pool.capacity(), 1);
|
||||
ASSERT_EQ(r.dummyValue, 1);
|
||||
ASSERT_EQ(r.good, true);
|
||||
}
|
||||
|
||||
TEST(Pool, capacityCanBeIncremented) {
|
||||
auto isGood = [](const ref<TestResource> & r) { return r->good; };
|
||||
auto createResource = []() { return make_ref<TestResource>(); };
|
||||
|
||||
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
|
||||
ASSERT_EQ(pool.capacity(), 1);
|
||||
pool.incCapacity();
|
||||
ASSERT_EQ(pool.capacity(), 2);
|
||||
}
|
||||
|
||||
TEST(Pool, capacityCanBeDecremented) {
|
||||
auto isGood = [](const ref<TestResource> & r) { return r->good; };
|
||||
auto createResource = []() { return make_ref<TestResource>(); };
|
||||
|
||||
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
|
||||
ASSERT_EQ(pool.capacity(), 1);
|
||||
pool.decCapacity();
|
||||
ASSERT_EQ(pool.capacity(), 0);
|
||||
}
|
||||
|
||||
TEST(Pool, flushBadDropsOutOfScopeResources) {
|
||||
auto isGood = [](const ref<TestResource> & r) { return false; };
|
||||
auto createResource = []() { return make_ref<TestResource>(); };
|
||||
|
||||
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
|
||||
|
||||
{
|
||||
auto _r = pool.get();
|
||||
ASSERT_EQ(pool.count(), 1);
|
||||
}
|
||||
|
||||
pool.flushBad();
|
||||
ASSERT_EQ(pool.count(), 0);
|
||||
}
|
||||
|
||||
// Test that the resources we allocate are being reused when they are still good.
|
||||
TEST(Pool, reuseResource) {
|
||||
auto isGood = [](const ref<TestResource> & r) { return true; };
|
||||
auto createResource = []() { return make_ref<TestResource>(); };
|
||||
|
||||
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
|
||||
|
||||
// Compare the instance counter between the two handles. We expect them to be equal
|
||||
// as the pool should hand out the same (still) good one again.
|
||||
int counter = -1;
|
||||
{
|
||||
Pool<TestResource>::Handle h = pool.get();
|
||||
counter = h->num;
|
||||
} // the first handle goes out of scope
|
||||
|
||||
{ // the second handle should contain the same resource (with the same counter value)
|
||||
Pool<TestResource>::Handle h = pool.get();
|
||||
ASSERT_EQ(h->num, counter);
|
||||
}
|
||||
}
|
||||
|
||||
// Test that the resources we allocate are being thrown away when they are no longer good.
|
||||
TEST(Pool, badResourceIsNotReused) {
|
||||
auto isGood = [](const ref<TestResource> & r) { return false; };
|
||||
auto createResource = []() { return make_ref<TestResource>(); };
|
||||
|
||||
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
|
||||
|
||||
// Compare the instance counter between the two handles. We expect them
|
||||
// to *not* be equal as the pool should hand out a new instance after
|
||||
// the first one was returned.
|
||||
int counter = -1;
|
||||
{
|
||||
Pool<TestResource>::Handle h = pool.get();
|
||||
counter = h->num;
|
||||
} // the first handle goes out of scope
|
||||
|
||||
{
|
||||
// the second handle should contain a different resource (with a
|
||||
//different counter value)
|
||||
Pool<TestResource>::Handle h = pool.get();
|
||||
ASSERT_NE(h->num, counter);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
#include "references.hh"
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
using std::string;
|
||||
|
||||
struct RewriteParams {
|
||||
string originalString, finalString;
|
||||
StringMap rewrites;
|
||||
|
||||
friend std::ostream& operator<<(std::ostream& os, const RewriteParams& bar) {
|
||||
StringSet strRewrites;
|
||||
for (auto & [from, to] : bar.rewrites)
|
||||
strRewrites.insert(from + "->" + to);
|
||||
return os <<
|
||||
"OriginalString: " << bar.originalString << std::endl <<
|
||||
"Rewrites: " << concatStringsSep(",", strRewrites) << std::endl <<
|
||||
"Expected result: " << bar.finalString;
|
||||
}
|
||||
};
|
||||
|
||||
class RewriteTest : public ::testing::TestWithParam<RewriteParams> {
|
||||
};
|
||||
|
||||
TEST_P(RewriteTest, IdentityRewriteIsIdentity) {
|
||||
RewriteParams param = GetParam();
|
||||
StringSink rewritten;
|
||||
auto rewriter = RewritingSink(param.rewrites, rewritten);
|
||||
rewriter(param.originalString);
|
||||
rewriter.flush();
|
||||
ASSERT_EQ(rewritten.s, param.finalString);
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
references,
|
||||
RewriteTest,
|
||||
::testing::Values(
|
||||
RewriteParams{ "foooo", "baroo", {{"foo", "bar"}, {"bar", "baz"}}},
|
||||
RewriteParams{ "foooo", "bazoo", {{"fou", "bar"}, {"foo", "baz"}}},
|
||||
RewriteParams{ "foooo", "foooo", {}}
|
||||
)
|
||||
);
|
||||
|
||||
}
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
#include "suggestions.hh"
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct LevenshteinDistanceParam {
|
||||
std::string s1, s2;
|
||||
int distance;
|
||||
};
|
||||
|
||||
class LevenshteinDistanceTest :
|
||||
public testing::TestWithParam<LevenshteinDistanceParam> {
|
||||
};
|
||||
|
||||
TEST_P(LevenshteinDistanceTest, CorrectlyComputed) {
|
||||
auto params = GetParam();
|
||||
|
||||
ASSERT_EQ(levenshteinDistance(params.s1, params.s2), params.distance);
|
||||
ASSERT_EQ(levenshteinDistance(params.s2, params.s1), params.distance);
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(LevenshteinDistance, LevenshteinDistanceTest,
|
||||
testing::Values(
|
||||
LevenshteinDistanceParam{"foo", "foo", 0},
|
||||
LevenshteinDistanceParam{"foo", "", 3},
|
||||
LevenshteinDistanceParam{"", "", 0},
|
||||
LevenshteinDistanceParam{"foo", "fo", 1},
|
||||
LevenshteinDistanceParam{"foo", "oo", 1},
|
||||
LevenshteinDistanceParam{"foo", "fao", 1},
|
||||
LevenshteinDistanceParam{"foo", "abc", 3}
|
||||
)
|
||||
);
|
||||
|
||||
TEST(Suggestions, Trim) {
|
||||
auto suggestions = Suggestions::bestMatches({"foooo", "bar", "fo", "gao"}, "foo");
|
||||
auto onlyOne = suggestions.trim(1);
|
||||
ASSERT_EQ(onlyOne.suggestions.size(), 1);
|
||||
ASSERT_TRUE(onlyOne.suggestions.begin()->suggestion == "fo");
|
||||
|
||||
auto closest = suggestions.trim(999, 2);
|
||||
ASSERT_EQ(closest.suggestions.size(), 3);
|
||||
}
|
||||
}
|
|
@ -1,662 +0,0 @@
|
|||
#include "util.hh"
|
||||
#include "types.hh"
|
||||
#include "file-system.hh"
|
||||
#include "processes.hh"
|
||||
#include "terminal.hh"
|
||||
|
||||
#include <limits.h>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <numeric>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------- tests for util.hh ------------------------------------------------*/
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* absPath
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(absPath, doesntChangeRoot) {
|
||||
auto p = absPath("/");
|
||||
|
||||
ASSERT_EQ(p, "/");
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
TEST(absPath, turnsEmptyPathIntoCWD) {
|
||||
char cwd[PATH_MAX+1];
|
||||
auto p = absPath("");
|
||||
|
||||
ASSERT_EQ(p, getcwd((char*)&cwd, PATH_MAX));
|
||||
}
|
||||
|
||||
TEST(absPath, usesOptionalBasePathWhenGiven) {
|
||||
char _cwd[PATH_MAX+1];
|
||||
char* cwd = getcwd((char*)&_cwd, PATH_MAX);
|
||||
|
||||
auto p = absPath("", cwd);
|
||||
|
||||
ASSERT_EQ(p, cwd);
|
||||
}
|
||||
|
||||
TEST(absPath, isIdempotent) {
|
||||
char _cwd[PATH_MAX+1];
|
||||
char* cwd = getcwd((char*)&_cwd, PATH_MAX);
|
||||
auto p1 = absPath(cwd);
|
||||
auto p2 = absPath(p1);
|
||||
|
||||
ASSERT_EQ(p1, p2);
|
||||
}
|
||||
|
||||
|
||||
TEST(absPath, pathIsCanonicalised) {
|
||||
auto path = "/some/path/with/trailing/dot/.";
|
||||
auto p1 = absPath(path);
|
||||
auto p2 = absPath(p1);
|
||||
|
||||
ASSERT_EQ(p1, "/some/path/with/trailing/dot");
|
||||
ASSERT_EQ(p1, p2);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* canonPath
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(canonPath, removesTrailingSlashes) {
|
||||
auto path = "/this/is/a/path//";
|
||||
auto p = canonPath(path);
|
||||
|
||||
ASSERT_EQ(p, "/this/is/a/path");
|
||||
}
|
||||
|
||||
TEST(canonPath, removesDots) {
|
||||
auto path = "/this/./is/a/path/./";
|
||||
auto p = canonPath(path);
|
||||
|
||||
ASSERT_EQ(p, "/this/is/a/path");
|
||||
}
|
||||
|
||||
TEST(canonPath, removesDots2) {
|
||||
auto path = "/this/a/../is/a////path/foo/..";
|
||||
auto p = canonPath(path);
|
||||
|
||||
ASSERT_EQ(p, "/this/is/a/path");
|
||||
}
|
||||
|
||||
TEST(canonPath, requiresAbsolutePath) {
|
||||
ASSERT_ANY_THROW(canonPath("."));
|
||||
ASSERT_ANY_THROW(canonPath(".."));
|
||||
ASSERT_ANY_THROW(canonPath("../"));
|
||||
ASSERT_DEATH({ canonPath(""); }, "path != \"\"");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* dirOf
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(dirOf, returnsEmptyStringForRoot) {
|
||||
auto p = dirOf("/");
|
||||
|
||||
ASSERT_EQ(p, "/");
|
||||
}
|
||||
|
||||
TEST(dirOf, returnsFirstPathComponent) {
|
||||
auto p1 = dirOf("/dir/");
|
||||
ASSERT_EQ(p1, "/dir");
|
||||
auto p2 = dirOf("/dir");
|
||||
ASSERT_EQ(p2, "/");
|
||||
auto p3 = dirOf("/dir/..");
|
||||
ASSERT_EQ(p3, "/dir");
|
||||
auto p4 = dirOf("/dir/../");
|
||||
ASSERT_EQ(p4, "/dir/..");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* baseNameOf
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(baseNameOf, emptyPath) {
|
||||
auto p1 = baseNameOf("");
|
||||
ASSERT_EQ(p1, "");
|
||||
}
|
||||
|
||||
TEST(baseNameOf, pathOnRoot) {
|
||||
auto p1 = baseNameOf("/dir");
|
||||
ASSERT_EQ(p1, "dir");
|
||||
}
|
||||
|
||||
TEST(baseNameOf, relativePath) {
|
||||
auto p1 = baseNameOf("dir/foo");
|
||||
ASSERT_EQ(p1, "foo");
|
||||
}
|
||||
|
||||
TEST(baseNameOf, pathWithTrailingSlashRoot) {
|
||||
auto p1 = baseNameOf("/");
|
||||
ASSERT_EQ(p1, "");
|
||||
}
|
||||
|
||||
TEST(baseNameOf, trailingSlash) {
|
||||
auto p1 = baseNameOf("/dir/");
|
||||
ASSERT_EQ(p1, "dir");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* isInDir
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(isInDir, trivialCase) {
|
||||
auto p1 = isInDir("/foo/bar", "/foo");
|
||||
ASSERT_EQ(p1, true);
|
||||
}
|
||||
|
||||
TEST(isInDir, notInDir) {
|
||||
auto p1 = isInDir("/zes/foo/bar", "/foo");
|
||||
ASSERT_EQ(p1, false);
|
||||
}
|
||||
|
||||
// XXX: hm, bug or feature? :) Looking at the implementation
|
||||
// this might be problematic.
|
||||
TEST(isInDir, emptyDir) {
|
||||
auto p1 = isInDir("/zes/foo/bar", "");
|
||||
ASSERT_EQ(p1, true);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* isDirOrInDir
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(isDirOrInDir, trueForSameDirectory) {
|
||||
ASSERT_EQ(isDirOrInDir("/nix", "/nix"), true);
|
||||
ASSERT_EQ(isDirOrInDir("/", "/"), true);
|
||||
}
|
||||
|
||||
TEST(isDirOrInDir, trueForEmptyPaths) {
|
||||
ASSERT_EQ(isDirOrInDir("", ""), true);
|
||||
}
|
||||
|
||||
TEST(isDirOrInDir, falseForDisjunctPaths) {
|
||||
ASSERT_EQ(isDirOrInDir("/foo", "/bar"), false);
|
||||
}
|
||||
|
||||
TEST(isDirOrInDir, relativePaths) {
|
||||
ASSERT_EQ(isDirOrInDir("/foo/..", "/foo"), true);
|
||||
}
|
||||
|
||||
// XXX: while it is possible to use "." or ".." in the
|
||||
// first argument this doesn't seem to work in the second.
|
||||
TEST(isDirOrInDir, DISABLED_shouldWork) {
|
||||
ASSERT_EQ(isDirOrInDir("/foo/..", "/foo/."), true);
|
||||
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* pathExists
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(pathExists, rootExists) {
|
||||
ASSERT_TRUE(pathExists("/"));
|
||||
}
|
||||
|
||||
TEST(pathExists, cwdExists) {
|
||||
ASSERT_TRUE(pathExists("."));
|
||||
}
|
||||
|
||||
TEST(pathExists, bogusPathDoesNotExist) {
|
||||
ASSERT_FALSE(pathExists("/schnitzel/darmstadt/pommes"));
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* concatStringsSep
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(concatStringsSep, buildCommaSeparatedString) {
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
strings.push_back("is");
|
||||
strings.push_back("great");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), "this,is,great");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, buildStringWithEmptySeparator) {
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
strings.push_back("is");
|
||||
strings.push_back("great");
|
||||
|
||||
ASSERT_EQ(concatStringsSep("", strings), "thisisgreat");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, buildSingleString) {
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), "this");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* hasPrefix
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(hasPrefix, emptyStringHasNoPrefix) {
|
||||
ASSERT_FALSE(hasPrefix("", "foo"));
|
||||
}
|
||||
|
||||
TEST(hasPrefix, emptyStringIsAlwaysPrefix) {
|
||||
ASSERT_TRUE(hasPrefix("foo", ""));
|
||||
ASSERT_TRUE(hasPrefix("jshjkfhsadf", ""));
|
||||
}
|
||||
|
||||
TEST(hasPrefix, trivialCase) {
|
||||
ASSERT_TRUE(hasPrefix("foobar", "foo"));
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* hasSuffix
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(hasSuffix, emptyStringHasNoSuffix) {
|
||||
ASSERT_FALSE(hasSuffix("", "foo"));
|
||||
}
|
||||
|
||||
TEST(hasSuffix, trivialCase) {
|
||||
ASSERT_TRUE(hasSuffix("foo", "foo"));
|
||||
ASSERT_TRUE(hasSuffix("foobar", "bar"));
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* base64Encode
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(base64Encode, emptyString) {
|
||||
ASSERT_EQ(base64Encode(""), "");
|
||||
}
|
||||
|
||||
TEST(base64Encode, encodesAString) {
|
||||
ASSERT_EQ(base64Encode("quod erat demonstrandum"), "cXVvZCBlcmF0IGRlbW9uc3RyYW5kdW0=");
|
||||
}
|
||||
|
||||
TEST(base64Encode, encodeAndDecode) {
|
||||
auto s = "quod erat demonstrandum";
|
||||
auto encoded = base64Encode(s);
|
||||
auto decoded = base64Decode(encoded);
|
||||
|
||||
ASSERT_EQ(decoded, s);
|
||||
}
|
||||
|
||||
TEST(base64Encode, encodeAndDecodeNonPrintable) {
|
||||
char s[256];
|
||||
std::iota(std::rbegin(s), std::rend(s), 0);
|
||||
|
||||
auto encoded = base64Encode(s);
|
||||
auto decoded = base64Decode(encoded);
|
||||
|
||||
EXPECT_EQ(decoded.length(), 255);
|
||||
ASSERT_EQ(decoded, s);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* base64Decode
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(base64Decode, emptyString) {
|
||||
ASSERT_EQ(base64Decode(""), "");
|
||||
}
|
||||
|
||||
TEST(base64Decode, decodeAString) {
|
||||
ASSERT_EQ(base64Decode("cXVvZCBlcmF0IGRlbW9uc3RyYW5kdW0="), "quod erat demonstrandum");
|
||||
}
|
||||
|
||||
TEST(base64Decode, decodeThrowsOnInvalidChar) {
|
||||
ASSERT_THROW(base64Decode("cXVvZCBlcm_0IGRlbW9uc3RyYW5kdW0="), Error);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* getLine
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(getLine, all) {
|
||||
{
|
||||
auto [line, rest] = getLine("foo\nbar\nxyzzy");
|
||||
ASSERT_EQ(line, "foo");
|
||||
ASSERT_EQ(rest, "bar\nxyzzy");
|
||||
}
|
||||
|
||||
{
|
||||
auto [line, rest] = getLine("foo\r\nbar\r\nxyzzy");
|
||||
ASSERT_EQ(line, "foo");
|
||||
ASSERT_EQ(rest, "bar\r\nxyzzy");
|
||||
}
|
||||
|
||||
{
|
||||
auto [line, rest] = getLine("foo\n");
|
||||
ASSERT_EQ(line, "foo");
|
||||
ASSERT_EQ(rest, "");
|
||||
}
|
||||
|
||||
{
|
||||
auto [line, rest] = getLine("foo");
|
||||
ASSERT_EQ(line, "foo");
|
||||
ASSERT_EQ(rest, "");
|
||||
}
|
||||
|
||||
{
|
||||
auto [line, rest] = getLine("");
|
||||
ASSERT_EQ(line, "");
|
||||
ASSERT_EQ(rest, "");
|
||||
}
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* toLower
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(toLower, emptyString) {
|
||||
ASSERT_EQ(toLower(""), "");
|
||||
}
|
||||
|
||||
TEST(toLower, nonLetters) {
|
||||
auto s = "!@(*$#)(@#=\\234_";
|
||||
ASSERT_EQ(toLower(s), s);
|
||||
}
|
||||
|
||||
// std::tolower() doesn't handle unicode characters. In the context of
|
||||
// store paths this isn't relevant but doesn't hurt to record this behavior
|
||||
// here.
|
||||
TEST(toLower, umlauts) {
|
||||
auto s = "ÄÖÜ";
|
||||
ASSERT_EQ(toLower(s), "ÄÖÜ");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* string2Float
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(string2Float, emptyString) {
|
||||
ASSERT_EQ(string2Float<double>(""), std::nullopt);
|
||||
}
|
||||
|
||||
TEST(string2Float, trivialConversions) {
|
||||
ASSERT_EQ(string2Float<double>("1.0"), 1.0);
|
||||
|
||||
ASSERT_EQ(string2Float<double>("0.0"), 0.0);
|
||||
|
||||
ASSERT_EQ(string2Float<double>("-100.25"), -100.25);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* string2Int
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(string2Int, emptyString) {
|
||||
ASSERT_EQ(string2Int<int>(""), std::nullopt);
|
||||
}
|
||||
|
||||
TEST(string2Int, trivialConversions) {
|
||||
ASSERT_EQ(string2Int<int>("1"), 1);
|
||||
|
||||
ASSERT_EQ(string2Int<int>("0"), 0);
|
||||
|
||||
ASSERT_EQ(string2Int<int>("-100"), -100);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* statusOk
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(statusOk, zeroIsOk) {
|
||||
ASSERT_EQ(statusOk(0), true);
|
||||
ASSERT_EQ(statusOk(1), false);
|
||||
}
|
||||
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* rewriteStrings
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(rewriteStrings, emptyString) {
|
||||
StringMap rewrites;
|
||||
rewrites["this"] = "that";
|
||||
|
||||
ASSERT_EQ(rewriteStrings("", rewrites), "");
|
||||
}
|
||||
|
||||
TEST(rewriteStrings, emptyRewrites) {
|
||||
StringMap rewrites;
|
||||
|
||||
ASSERT_EQ(rewriteStrings("this and that", rewrites), "this and that");
|
||||
}
|
||||
|
||||
TEST(rewriteStrings, successfulRewrite) {
|
||||
StringMap rewrites;
|
||||
rewrites["this"] = "that";
|
||||
|
||||
ASSERT_EQ(rewriteStrings("this and that", rewrites), "that and that");
|
||||
}
|
||||
|
||||
TEST(rewriteStrings, doesntOccur) {
|
||||
StringMap rewrites;
|
||||
rewrites["foo"] = "bar";
|
||||
|
||||
ASSERT_EQ(rewriteStrings("this and that", rewrites), "this and that");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* replaceStrings
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(replaceStrings, emptyString) {
|
||||
ASSERT_EQ(replaceStrings("", "this", "that"), "");
|
||||
ASSERT_EQ(replaceStrings("this and that", "", ""), "this and that");
|
||||
}
|
||||
|
||||
TEST(replaceStrings, successfulReplace) {
|
||||
ASSERT_EQ(replaceStrings("this and that", "this", "that"), "that and that");
|
||||
}
|
||||
|
||||
TEST(replaceStrings, doesntOccur) {
|
||||
ASSERT_EQ(replaceStrings("this and that", "foo", "bar"), "this and that");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* trim
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(trim, emptyString) {
|
||||
ASSERT_EQ(trim(""), "");
|
||||
}
|
||||
|
||||
TEST(trim, removesWhitespace) {
|
||||
ASSERT_EQ(trim("foo"), "foo");
|
||||
ASSERT_EQ(trim(" foo "), "foo");
|
||||
ASSERT_EQ(trim(" foo bar baz"), "foo bar baz");
|
||||
ASSERT_EQ(trim(" \t foo bar baz\n"), "foo bar baz");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* chomp
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(chomp, emptyString) {
|
||||
ASSERT_EQ(chomp(""), "");
|
||||
}
|
||||
|
||||
TEST(chomp, removesWhitespace) {
|
||||
ASSERT_EQ(chomp("foo"), "foo");
|
||||
ASSERT_EQ(chomp("foo "), "foo");
|
||||
ASSERT_EQ(chomp(" foo "), " foo");
|
||||
ASSERT_EQ(chomp(" foo bar baz "), " foo bar baz");
|
||||
ASSERT_EQ(chomp("\t foo bar baz\n"), "\t foo bar baz");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* quoteStrings
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(quoteStrings, empty) {
|
||||
Strings s = { };
|
||||
Strings expected = { };
|
||||
|
||||
ASSERT_EQ(quoteStrings(s), expected);
|
||||
}
|
||||
|
||||
TEST(quoteStrings, emptyStrings) {
|
||||
Strings s = { "", "", "" };
|
||||
Strings expected = { "''", "''", "''" };
|
||||
ASSERT_EQ(quoteStrings(s), expected);
|
||||
|
||||
}
|
||||
|
||||
TEST(quoteStrings, trivialQuote) {
|
||||
Strings s = { "foo", "bar", "baz" };
|
||||
Strings expected = { "'foo'", "'bar'", "'baz'" };
|
||||
|
||||
ASSERT_EQ(quoteStrings(s), expected);
|
||||
}
|
||||
|
||||
TEST(quoteStrings, quotedStrings) {
|
||||
Strings s = { "'foo'", "'bar'", "'baz'" };
|
||||
Strings expected = { "''foo''", "''bar''", "''baz''" };
|
||||
|
||||
ASSERT_EQ(quoteStrings(s), expected);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* tokenizeString
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(tokenizeString, empty) {
|
||||
Strings expected = { };
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(""), expected);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, tokenizeSpacesWithDefaults) {
|
||||
auto s = "foo bar baz";
|
||||
Strings expected = { "foo", "bar", "baz" };
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s), expected);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, tokenizeTabsWithDefaults) {
|
||||
auto s = "foo\tbar\tbaz";
|
||||
Strings expected = { "foo", "bar", "baz" };
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s), expected);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, tokenizeTabsSpacesWithDefaults) {
|
||||
auto s = "foo\t bar\t baz";
|
||||
Strings expected = { "foo", "bar", "baz" };
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s), expected);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, tokenizeTabsSpacesNewlineWithDefaults) {
|
||||
auto s = "foo\t\n bar\t\n baz";
|
||||
Strings expected = { "foo", "bar", "baz" };
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s), expected);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, tokenizeTabsSpacesNewlineRetWithDefaults) {
|
||||
auto s = "foo\t\n\r bar\t\n\r baz";
|
||||
Strings expected = { "foo", "bar", "baz" };
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s), expected);
|
||||
|
||||
auto s2 = "foo \t\n\r bar \t\n\r baz";
|
||||
Strings expected2 = { "foo", "bar", "baz" };
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s2), expected2);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, tokenizeWithCustomSep) {
|
||||
auto s = "foo\n,bar\n,baz\n";
|
||||
Strings expected = { "foo\n", "bar\n", "baz\n" };
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s, ","), expected);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* get
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(get, emptyContainer) {
|
||||
StringMap s = { };
|
||||
auto expected = nullptr;
|
||||
|
||||
ASSERT_EQ(get(s, "one"), expected);
|
||||
}
|
||||
|
||||
TEST(get, getFromContainer) {
|
||||
StringMap s;
|
||||
s["one"] = "yi";
|
||||
s["two"] = "er";
|
||||
auto expected = "yi";
|
||||
|
||||
ASSERT_EQ(*get(s, "one"), expected);
|
||||
}
|
||||
|
||||
TEST(getOr, emptyContainer) {
|
||||
StringMap s = { };
|
||||
auto expected = "yi";
|
||||
|
||||
ASSERT_EQ(getOr(s, "one", "yi"), expected);
|
||||
}
|
||||
|
||||
TEST(getOr, getFromContainer) {
|
||||
StringMap s;
|
||||
s["one"] = "yi";
|
||||
s["two"] = "er";
|
||||
auto expected = "yi";
|
||||
|
||||
ASSERT_EQ(getOr(s, "one", "nope"), expected);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* filterANSIEscapes
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(filterANSIEscapes, emptyString) {
|
||||
auto s = "";
|
||||
auto expected = "";
|
||||
|
||||
ASSERT_EQ(filterANSIEscapes(s), expected);
|
||||
}
|
||||
|
||||
TEST(filterANSIEscapes, doesntChangePrintableChars) {
|
||||
auto s = "09 2q304ruyhr slk2-19024 kjsadh sar f";
|
||||
|
||||
ASSERT_EQ(filterANSIEscapes(s), s);
|
||||
}
|
||||
|
||||
TEST(filterANSIEscapes, filtersColorCodes) {
|
||||
auto s = "\u001b[30m A \u001b[31m B \u001b[32m C \u001b[33m D \u001b[0m";
|
||||
|
||||
ASSERT_EQ(filterANSIEscapes(s, true, 2), " A" );
|
||||
ASSERT_EQ(filterANSIEscapes(s, true, 3), " A " );
|
||||
ASSERT_EQ(filterANSIEscapes(s, true, 4), " A " );
|
||||
ASSERT_EQ(filterANSIEscapes(s, true, 5), " A B" );
|
||||
ASSERT_EQ(filterANSIEscapes(s, true, 8), " A B C" );
|
||||
}
|
||||
|
||||
TEST(filterANSIEscapes, expandsTabs) {
|
||||
auto s = "foo\tbar\tbaz";
|
||||
|
||||
ASSERT_EQ(filterANSIEscapes(s, true), "foo bar baz" );
|
||||
}
|
||||
|
||||
TEST(filterANSIEscapes, utf8) {
|
||||
ASSERT_EQ(filterANSIEscapes("foobar", true, 5), "fooba");
|
||||
ASSERT_EQ(filterANSIEscapes("fóóbär", true, 6), "fóóbär");
|
||||
ASSERT_EQ(filterANSIEscapes("fóóbär", true, 5), "fóóbä");
|
||||
ASSERT_EQ(filterANSIEscapes("fóóbär", true, 3), "fóó");
|
||||
ASSERT_EQ(filterANSIEscapes("f€€bär", true, 4), "f€€b");
|
||||
ASSERT_EQ(filterANSIEscapes("f𐍈𐍈bär", true, 4), "f𐍈𐍈b");
|
||||
}
|
||||
|
||||
}
|
|
@ -1,347 +0,0 @@
|
|||
#include "url.hh"
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------- tests for url.hh --------------------------------------------------*/
|
||||
|
||||
std::string print_map(std::map<std::string, std::string> m) {
|
||||
std::map<std::string, std::string>::iterator it;
|
||||
std::string s = "{ ";
|
||||
for (it = m.begin(); it != m.end(); ++it) {
|
||||
s += "{ ";
|
||||
s += it->first;
|
||||
s += " = ";
|
||||
s += it->second;
|
||||
s += " } ";
|
||||
}
|
||||
s += "}";
|
||||
return s;
|
||||
}
|
||||
|
||||
|
||||
std::ostream& operator<<(std::ostream& os, const ParsedURL& p) {
|
||||
return os << "\n"
|
||||
<< "url: " << p.url << "\n"
|
||||
<< "base: " << p.base << "\n"
|
||||
<< "scheme: " << p.scheme << "\n"
|
||||
<< "authority: " << p.authority.value() << "\n"
|
||||
<< "path: " << p.path << "\n"
|
||||
<< "query: " << print_map(p.query) << "\n"
|
||||
<< "fragment: " << p.fragment << "\n";
|
||||
}
|
||||
|
||||
TEST(parseURL, parsesSimpleHttpUrl) {
|
||||
auto s = "http://www.example.org/file.tar.gz";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "http://www.example.org/file.tar.gz",
|
||||
.base = "http://www.example.org/file.tar.gz",
|
||||
.scheme = "http",
|
||||
.authority = "www.example.org",
|
||||
.path = "/file.tar.gz",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, parsesSimpleHttpsUrl) {
|
||||
auto s = "https://www.example.org/file.tar.gz";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "https://www.example.org/file.tar.gz",
|
||||
.base = "https://www.example.org/file.tar.gz",
|
||||
.scheme = "https",
|
||||
.authority = "www.example.org",
|
||||
.path = "/file.tar.gz",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, parsesSimpleHttpUrlWithQueryAndFragment) {
|
||||
auto s = "https://www.example.org/file.tar.gz?download=fast&when=now#hello";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "https://www.example.org/file.tar.gz",
|
||||
.base = "https://www.example.org/file.tar.gz",
|
||||
.scheme = "https",
|
||||
.authority = "www.example.org",
|
||||
.path = "/file.tar.gz",
|
||||
.query = (StringMap) { { "download", "fast" }, { "when", "now" } },
|
||||
.fragment = "hello",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, parsesSimpleHttpUrlWithComplexFragment) {
|
||||
auto s = "http://www.example.org/file.tar.gz?field=value#?foo=bar%23";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "http://www.example.org/file.tar.gz",
|
||||
.base = "http://www.example.org/file.tar.gz",
|
||||
.scheme = "http",
|
||||
.authority = "www.example.org",
|
||||
.path = "/file.tar.gz",
|
||||
.query = (StringMap) { { "field", "value" } },
|
||||
.fragment = "?foo=bar#",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, parsesFilePlusHttpsUrl) {
|
||||
auto s = "file+https://www.example.org/video.mp4";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "file+https://www.example.org/video.mp4",
|
||||
.base = "https://www.example.org/video.mp4",
|
||||
.scheme = "file+https",
|
||||
.authority = "www.example.org",
|
||||
.path = "/video.mp4",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, rejectsAuthorityInUrlsWithFileTransportation) {
|
||||
auto s = "file://www.example.org/video.mp4";
|
||||
ASSERT_THROW(parseURL(s), Error);
|
||||
}
|
||||
|
||||
TEST(parseURL, parseIPv4Address) {
|
||||
auto s = "http://127.0.0.1:8080/file.tar.gz?download=fast&when=now#hello";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "http://127.0.0.1:8080/file.tar.gz",
|
||||
.base = "https://127.0.0.1:8080/file.tar.gz",
|
||||
.scheme = "http",
|
||||
.authority = "127.0.0.1:8080",
|
||||
.path = "/file.tar.gz",
|
||||
.query = (StringMap) { { "download", "fast" }, { "when", "now" } },
|
||||
.fragment = "hello",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, parseScopedRFC4007IPv6Address) {
|
||||
auto s = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080",
|
||||
.base = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080",
|
||||
.scheme = "http",
|
||||
.authority = "[fe80::818c:da4d:8975:415c\%enp0s25]:8080",
|
||||
.path = "",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
|
||||
}
|
||||
|
||||
TEST(parseURL, parseIPv6Address) {
|
||||
auto s = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080",
|
||||
.base = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080",
|
||||
.scheme = "http",
|
||||
.authority = "[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080",
|
||||
.path = "",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
|
||||
}
|
||||
|
||||
TEST(parseURL, parseEmptyQueryParams) {
|
||||
auto s = "http://127.0.0.1:8080/file.tar.gz?&&&&&";
|
||||
auto parsed = parseURL(s);
|
||||
ASSERT_EQ(parsed.query, (StringMap) { });
|
||||
}
|
||||
|
||||
TEST(parseURL, parseUserPassword) {
|
||||
auto s = "http://user:pass@www.example.org:8080/file.tar.gz";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "http://user:pass@www.example.org/file.tar.gz",
|
||||
.base = "http://user:pass@www.example.org/file.tar.gz",
|
||||
.scheme = "http",
|
||||
.authority = "user:pass@www.example.org:8080",
|
||||
.path = "/file.tar.gz",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, parseFileURLWithQueryAndFragment) {
|
||||
auto s = "file:///none/of//your/business";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "",
|
||||
.base = "",
|
||||
.scheme = "file",
|
||||
.authority = "",
|
||||
.path = "/none/of//your/business",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
|
||||
}
|
||||
|
||||
TEST(parseURL, parsedUrlsIsEqualToItself) {
|
||||
auto s = "http://www.example.org/file.tar.gz";
|
||||
auto url = parseURL(s);
|
||||
|
||||
ASSERT_TRUE(url == url);
|
||||
}
|
||||
|
||||
TEST(parseURL, parseFTPUrl) {
|
||||
auto s = "ftp://ftp.nixos.org/downloads/nixos.iso";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "ftp://ftp.nixos.org/downloads/nixos.iso",
|
||||
.base = "ftp://ftp.nixos.org/downloads/nixos.iso",
|
||||
.scheme = "ftp",
|
||||
.authority = "ftp.nixos.org",
|
||||
.path = "/downloads/nixos.iso",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, parsesAnythingInUriFormat) {
|
||||
auto s = "whatever://github.com/NixOS/nixpkgs.git";
|
||||
auto parsed = parseURL(s);
|
||||
}
|
||||
|
||||
TEST(parseURL, parsesAnythingInUriFormatWithoutDoubleSlash) {
|
||||
auto s = "whatever:github.com/NixOS/nixpkgs.git";
|
||||
auto parsed = parseURL(s);
|
||||
}
|
||||
|
||||
TEST(parseURL, emptyStringIsInvalidURL) {
|
||||
ASSERT_THROW(parseURL(""), Error);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* decodeQuery
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(decodeQuery, emptyStringYieldsEmptyMap) {
|
||||
auto d = decodeQuery("");
|
||||
ASSERT_EQ(d, (StringMap) { });
|
||||
}
|
||||
|
||||
TEST(decodeQuery, simpleDecode) {
|
||||
auto d = decodeQuery("yi=one&er=two");
|
||||
ASSERT_EQ(d, ((StringMap) { { "yi", "one" }, { "er", "two" } }));
|
||||
}
|
||||
|
||||
TEST(decodeQuery, decodeUrlEncodedArgs) {
|
||||
auto d = decodeQuery("arg=%3D%3D%40%3D%3D");
|
||||
ASSERT_EQ(d, ((StringMap) { { "arg", "==@==" } }));
|
||||
}
|
||||
|
||||
TEST(decodeQuery, decodeArgWithEmptyValue) {
|
||||
auto d = decodeQuery("arg=");
|
||||
ASSERT_EQ(d, ((StringMap) { { "arg", ""} }));
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* percentDecode
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(percentDecode, decodesUrlEncodedString) {
|
||||
std::string s = "==@==";
|
||||
std::string d = percentDecode("%3D%3D%40%3D%3D");
|
||||
ASSERT_EQ(d, s);
|
||||
}
|
||||
|
||||
TEST(percentDecode, multipleDecodesAreIdempotent) {
|
||||
std::string once = percentDecode("%3D%3D%40%3D%3D");
|
||||
std::string twice = percentDecode(once);
|
||||
|
||||
ASSERT_EQ(once, twice);
|
||||
}
|
||||
|
||||
TEST(percentDecode, trailingPercent) {
|
||||
std::string s = "==@==%";
|
||||
std::string d = percentDecode("%3D%3D%40%3D%3D%25");
|
||||
|
||||
ASSERT_EQ(d, s);
|
||||
}
|
||||
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* percentEncode
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(percentEncode, encodesUrlEncodedString) {
|
||||
std::string s = percentEncode("==@==");
|
||||
std::string d = "%3D%3D%40%3D%3D";
|
||||
ASSERT_EQ(d, s);
|
||||
}
|
||||
|
||||
TEST(percentEncode, keepArgument) {
|
||||
std::string a = percentEncode("abd / def");
|
||||
std::string b = percentEncode("abd / def", "/");
|
||||
ASSERT_EQ(a, "abd%20%2F%20def");
|
||||
ASSERT_EQ(b, "abd%20/%20def");
|
||||
}
|
||||
|
||||
TEST(percentEncode, inverseOfDecode) {
|
||||
std::string original = "%3D%3D%40%3D%3D";
|
||||
std::string once = percentEncode(original);
|
||||
std::string back = percentDecode(once);
|
||||
|
||||
ASSERT_EQ(back, original);
|
||||
}
|
||||
|
||||
TEST(percentEncode, trailingPercent) {
|
||||
std::string s = percentEncode("==@==%");
|
||||
std::string d = "%3D%3D%40%3D%3D%25";
|
||||
|
||||
ASSERT_EQ(d, s);
|
||||
}
|
||||
|
||||
TEST(percentEncode, yen) {
|
||||
// https://en.wikipedia.org/wiki/Percent-encoding#Character_data
|
||||
std::string s = reinterpret_cast<const char*>(u8"円");
|
||||
std::string e = "%E5%86%86";
|
||||
|
||||
ASSERT_EQ(percentEncode(s), e);
|
||||
ASSERT_EQ(percentDecode(e), s);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,105 +0,0 @@
|
|||
#include "xml-writer.hh"
|
||||
#include <gtest/gtest.h>
|
||||
#include <sstream>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* XMLWriter
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(XMLWriter, emptyObject) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(false, out);
|
||||
}
|
||||
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n");
|
||||
}
|
||||
|
||||
TEST(XMLWriter, objectWithEmptyElement) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(false, out);
|
||||
t.openElement("foobar");
|
||||
}
|
||||
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar></foobar>");
|
||||
}
|
||||
|
||||
TEST(XMLWriter, objectWithElementWithAttrs) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(false, out);
|
||||
XMLAttrs attrs = {
|
||||
{ "foo", "bar" }
|
||||
};
|
||||
t.openElement("foobar", attrs);
|
||||
}
|
||||
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar foo=\"bar\"></foobar>");
|
||||
}
|
||||
|
||||
TEST(XMLWriter, objectWithElementWithEmptyAttrs) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(false, out);
|
||||
XMLAttrs attrs = {};
|
||||
t.openElement("foobar", attrs);
|
||||
}
|
||||
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar></foobar>");
|
||||
}
|
||||
|
||||
TEST(XMLWriter, objectWithElementWithAttrsEscaping) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(false, out);
|
||||
XMLAttrs attrs = {
|
||||
{ "<key>", "<value>" }
|
||||
};
|
||||
t.openElement("foobar", attrs);
|
||||
}
|
||||
|
||||
// XXX: While "<value>" is escaped, "<key>" isn't which I think is a bug.
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar <key>=\"<value>\"></foobar>");
|
||||
}
|
||||
|
||||
TEST(XMLWriter, objectWithElementWithAttrsIndented) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(true, out);
|
||||
XMLAttrs attrs = {
|
||||
{ "foo", "bar" }
|
||||
};
|
||||
t.openElement("foobar", attrs);
|
||||
}
|
||||
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar foo=\"bar\">\n</foobar>\n");
|
||||
}
|
||||
|
||||
TEST(XMLWriter, writeEmptyElement) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(false, out);
|
||||
t.writeEmptyElement("foobar");
|
||||
}
|
||||
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar />");
|
||||
}
|
||||
|
||||
TEST(XMLWriter, writeEmptyElementWithAttributes) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(false, out);
|
||||
XMLAttrs attrs = {
|
||||
{ "foo", "bar" }
|
||||
};
|
||||
t.writeEmptyElement("foobar", attrs);
|
||||
|
||||
}
|
||||
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar foo=\"bar\" />");
|
||||
}
|
||||
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue