mirror of
https://github.com/NixOS/nix
synced 2025-06-25 10:41:16 +02:00
Move unit tests to the location Meson expects them to be
Everything that is a separate subproject should live in the subprojects
directory.
Progress on #2503
This reverts commit 451f8a8c19
.
This commit is contained in:
parent
1cd48008f0
commit
e65510da56
270 changed files with 158 additions and 168 deletions
1
src/libutil-tests/.version
Symbolic link
1
src/libutil-tests/.version
Symbolic link
|
@ -0,0 +1 @@
|
|||
../../.version
|
168
src/libutil-tests/args.cc
Normal file
168
src/libutil-tests/args.cc
Normal file
|
@ -0,0 +1,168 @@
|
|||
#include "args.hh"
|
||||
#include "fs-sink.hh"
|
||||
#include <list>
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include <rapidcheck/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
TEST(parseShebangContent, basic) {
|
||||
std::list<std::string> r = parseShebangContent("hi there");
|
||||
ASSERT_EQ(r.size(), 2);
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(*i++, "hi");
|
||||
ASSERT_EQ(*i++, "there");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, empty) {
|
||||
std::list<std::string> r = parseShebangContent("");
|
||||
ASSERT_EQ(r.size(), 0);
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, doubleBacktick) {
|
||||
std::list<std::string> r = parseShebangContent("``\"ain't that nice\"``");
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(*i++, "\"ain't that nice\"");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, doubleBacktickEmpty) {
|
||||
std::list<std::string> r = parseShebangContent("````");
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(*i++, "");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, doubleBacktickMarkdownInlineCode) {
|
||||
std::list<std::string> r = parseShebangContent("``# I'm markdown section about `coolFunction` ``");
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(*i++, "# I'm markdown section about `coolFunction`");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, doubleBacktickMarkdownCodeBlockNaive) {
|
||||
std::list<std::string> r = parseShebangContent("``Example 1\n```nix\na: a\n``` ``");
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
ASSERT_EQ(*i++, "Example 1\n``nix\na: a\n``");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, doubleBacktickMarkdownCodeBlockCorrect) {
|
||||
std::list<std::string> r = parseShebangContent("``Example 1\n````nix\na: a\n```` ``");
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
ASSERT_EQ(*i++, "Example 1\n```nix\na: a\n```");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, doubleBacktickMarkdownCodeBlock2) {
|
||||
std::list<std::string> r = parseShebangContent("``Example 1\n````nix\na: a\n````\nExample 2\n````nix\na: a\n```` ``");
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
ASSERT_EQ(*i++, "Example 1\n```nix\na: a\n```\nExample 2\n```nix\na: a\n```");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, singleBacktickInDoubleBacktickQuotes) {
|
||||
std::list<std::string> r = parseShebangContent("``` ``");
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
ASSERT_EQ(*i++, "`");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, singleBacktickAndSpaceInDoubleBacktickQuotes) {
|
||||
std::list<std::string> r = parseShebangContent("``` ``");
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
ASSERT_EQ(*i++, "` ");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, doubleBacktickInDoubleBacktickQuotes) {
|
||||
std::list<std::string> r = parseShebangContent("````` ``");
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(r.size(), 1);
|
||||
ASSERT_EQ(*i++, "``");
|
||||
}
|
||||
|
||||
TEST(parseShebangContent, increasingQuotes) {
|
||||
std::list<std::string> r = parseShebangContent("```` ``` `` ````` `` `````` ``");
|
||||
auto i = r.begin();
|
||||
ASSERT_EQ(r.size(), 4);
|
||||
ASSERT_EQ(*i++, "");
|
||||
ASSERT_EQ(*i++, "`");
|
||||
ASSERT_EQ(*i++, "``");
|
||||
ASSERT_EQ(*i++, "```");
|
||||
}
|
||||
|
||||
|
||||
#ifndef COVERAGE
|
||||
|
||||
// quick and dirty
|
||||
static inline std::string escape(std::string_view s_) {
|
||||
|
||||
std::string_view s = s_;
|
||||
std::string r = "``";
|
||||
|
||||
// make a guess to allocate ahead of time
|
||||
r.reserve(
|
||||
// plain chars
|
||||
s.size()
|
||||
// quotes
|
||||
+ 5
|
||||
// some "escape" backticks
|
||||
+ s.size() / 8);
|
||||
|
||||
while (!s.empty()) {
|
||||
if (s[0] == '`' && s.size() >= 2 && s[1] == '`') {
|
||||
// escape it
|
||||
r += "`";
|
||||
while (!s.empty() && s[0] == '`') {
|
||||
r += "`";
|
||||
s = s.substr(1);
|
||||
}
|
||||
} else {
|
||||
r += s[0];
|
||||
s = s.substr(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (!r.empty()
|
||||
&& (
|
||||
r[r.size() - 1] == '`'
|
||||
|| r[r.size() - 1] == ' '
|
||||
)) {
|
||||
r += " ";
|
||||
}
|
||||
|
||||
r += "``";
|
||||
|
||||
return r;
|
||||
};
|
||||
|
||||
RC_GTEST_PROP(
|
||||
parseShebangContent,
|
||||
prop_round_trip_single,
|
||||
(const std::string & orig))
|
||||
{
|
||||
auto escaped = escape(orig);
|
||||
// RC_LOG() << "escaped: <[[" << escaped << "]]>" << std::endl;
|
||||
auto ss = parseShebangContent(escaped);
|
||||
RC_ASSERT(ss.size() == 1);
|
||||
RC_ASSERT(*ss.begin() == orig);
|
||||
}
|
||||
|
||||
RC_GTEST_PROP(
|
||||
parseShebangContent,
|
||||
prop_round_trip_two,
|
||||
(const std::string & one, const std::string & two))
|
||||
{
|
||||
auto ss = parseShebangContent(escape(one) + " " + escape(two));
|
||||
RC_ASSERT(ss.size() == 2);
|
||||
auto i = ss.begin();
|
||||
RC_ASSERT(*i++ == one);
|
||||
RC_ASSERT(*i++ == two);
|
||||
}
|
||||
|
||||
|
||||
#endif
|
||||
|
||||
}
|
1
src/libutil-tests/build-utils-meson
Symbolic link
1
src/libutil-tests/build-utils-meson
Symbolic link
|
@ -0,0 +1 @@
|
|||
../../build-utils-meson
|
180
src/libutil-tests/canon-path.cc
Normal file
180
src/libutil-tests/canon-path.cc
Normal file
|
@ -0,0 +1,180 @@
|
|||
#include "canon-path.hh"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
TEST(CanonPath, basic) {
|
||||
{
|
||||
CanonPath p("/");
|
||||
ASSERT_EQ(p.abs(), "/");
|
||||
ASSERT_EQ(p.rel(), "");
|
||||
ASSERT_EQ(p.baseName(), std::nullopt);
|
||||
ASSERT_EQ(p.dirOf(), std::nullopt);
|
||||
ASSERT_FALSE(p.parent());
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("/foo//");
|
||||
ASSERT_EQ(p.abs(), "/foo");
|
||||
ASSERT_EQ(p.rel(), "foo");
|
||||
ASSERT_EQ(*p.baseName(), "foo");
|
||||
ASSERT_EQ(*p.dirOf(), ""); // FIXME: do we want this?
|
||||
ASSERT_EQ(p.parent()->abs(), "/");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("foo/bar");
|
||||
ASSERT_EQ(p.abs(), "/foo/bar");
|
||||
ASSERT_EQ(p.rel(), "foo/bar");
|
||||
ASSERT_EQ(*p.baseName(), "bar");
|
||||
ASSERT_EQ(*p.dirOf(), "/foo");
|
||||
ASSERT_EQ(p.parent()->abs(), "/foo");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("foo//bar/");
|
||||
ASSERT_EQ(p.abs(), "/foo/bar");
|
||||
ASSERT_EQ(p.rel(), "foo/bar");
|
||||
ASSERT_EQ(*p.baseName(), "bar");
|
||||
ASSERT_EQ(*p.dirOf(), "/foo");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(CanonPath, from_existing) {
|
||||
CanonPath p0("foo//bar/");
|
||||
{
|
||||
CanonPath p("/baz//quux/", p0);
|
||||
ASSERT_EQ(p.abs(), "/baz/quux");
|
||||
ASSERT_EQ(p.rel(), "baz/quux");
|
||||
ASSERT_EQ(*p.baseName(), "quux");
|
||||
ASSERT_EQ(*p.dirOf(), "/baz");
|
||||
}
|
||||
{
|
||||
CanonPath p("baz//quux/", p0);
|
||||
ASSERT_EQ(p.abs(), "/foo/bar/baz/quux");
|
||||
ASSERT_EQ(p.rel(), "foo/bar/baz/quux");
|
||||
ASSERT_EQ(*p.baseName(), "quux");
|
||||
ASSERT_EQ(*p.dirOf(), "/foo/bar/baz");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(CanonPath, pop) {
|
||||
CanonPath p("foo/bar/x");
|
||||
ASSERT_EQ(p.abs(), "/foo/bar/x");
|
||||
p.pop();
|
||||
ASSERT_EQ(p.abs(), "/foo/bar");
|
||||
p.pop();
|
||||
ASSERT_EQ(p.abs(), "/foo");
|
||||
p.pop();
|
||||
ASSERT_EQ(p.abs(), "/");
|
||||
}
|
||||
|
||||
TEST(CanonPath, removePrefix) {
|
||||
CanonPath p1("foo/bar");
|
||||
CanonPath p2("foo/bar/a/b/c");
|
||||
ASSERT_EQ(p2.removePrefix(p1).abs(), "/a/b/c");
|
||||
ASSERT_EQ(p1.removePrefix(p1).abs(), "/");
|
||||
ASSERT_EQ(p1.removePrefix(CanonPath("/")).abs(), "/foo/bar");
|
||||
}
|
||||
|
||||
TEST(CanonPath, iter) {
|
||||
{
|
||||
CanonPath p("a//foo/bar//");
|
||||
std::vector<std::string_view> ss;
|
||||
for (auto & c : p) ss.push_back(c);
|
||||
ASSERT_EQ(ss, std::vector<std::string_view>({"a", "foo", "bar"}));
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("/");
|
||||
std::vector<std::string_view> ss;
|
||||
for (auto & c : p) ss.push_back(c);
|
||||
ASSERT_EQ(ss, std::vector<std::string_view>());
|
||||
}
|
||||
}
|
||||
|
||||
TEST(CanonPath, concat) {
|
||||
{
|
||||
CanonPath p1("a//foo/bar//");
|
||||
CanonPath p2("xyzzy/bla");
|
||||
ASSERT_EQ((p1 / p2).abs(), "/a/foo/bar/xyzzy/bla");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p1("/");
|
||||
CanonPath p2("/a/b");
|
||||
ASSERT_EQ((p1 / p2).abs(), "/a/b");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p1("/a/b");
|
||||
CanonPath p2("/");
|
||||
ASSERT_EQ((p1 / p2).abs(), "/a/b");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("/foo/bar");
|
||||
ASSERT_EQ((p / "x").abs(), "/foo/bar/x");
|
||||
}
|
||||
|
||||
{
|
||||
CanonPath p("/");
|
||||
ASSERT_EQ((p / "foo" / "bar").abs(), "/foo/bar");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(CanonPath, within) {
|
||||
ASSERT_TRUE(CanonPath("foo").isWithin(CanonPath("foo")));
|
||||
ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("bar")));
|
||||
ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("fo")));
|
||||
ASSERT_TRUE(CanonPath("foo/bar").isWithin(CanonPath("foo")));
|
||||
ASSERT_FALSE(CanonPath("foo").isWithin(CanonPath("foo/bar")));
|
||||
ASSERT_TRUE(CanonPath("/foo/bar/default.nix").isWithin(CanonPath("/")));
|
||||
ASSERT_TRUE(CanonPath("/").isWithin(CanonPath("/")));
|
||||
}
|
||||
|
||||
TEST(CanonPath, sort) {
|
||||
ASSERT_FALSE(CanonPath("foo") < CanonPath("foo"));
|
||||
ASSERT_TRUE (CanonPath("foo") < CanonPath("foo/bar"));
|
||||
ASSERT_TRUE (CanonPath("foo/bar") < CanonPath("foo!"));
|
||||
ASSERT_FALSE(CanonPath("foo!") < CanonPath("foo"));
|
||||
ASSERT_TRUE (CanonPath("foo") < CanonPath("foo!"));
|
||||
}
|
||||
|
||||
TEST(CanonPath, allowed) {
|
||||
std::set<CanonPath> allowed {
|
||||
CanonPath("foo/bar"),
|
||||
CanonPath("foo!"),
|
||||
CanonPath("xyzzy"),
|
||||
CanonPath("a/b/c"),
|
||||
};
|
||||
|
||||
ASSERT_TRUE (CanonPath("foo/bar").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("foo/bar/bla").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("foo").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("bar").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("bar/a").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("a").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("a/b").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("a/b/c").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("a/b/c/d").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("a/b/c/d/e").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("a/b/a").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("a/b/d").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("aaa").isAllowed(allowed));
|
||||
ASSERT_FALSE(CanonPath("zzz").isAllowed(allowed));
|
||||
ASSERT_TRUE (CanonPath("/").isAllowed(allowed));
|
||||
}
|
||||
|
||||
TEST(CanonPath, makeRelative) {
|
||||
CanonPath d("/foo/bar");
|
||||
ASSERT_EQ(d.makeRelative(CanonPath("/foo/bar")), ".");
|
||||
ASSERT_EQ(d.makeRelative(CanonPath("/foo")), "..");
|
||||
ASSERT_EQ(d.makeRelative(CanonPath("/")), "../..");
|
||||
ASSERT_EQ(d.makeRelative(CanonPath("/foo/bar/xyzzy")), "xyzzy");
|
||||
ASSERT_EQ(d.makeRelative(CanonPath("/foo/bar/xyzzy/bla")), "xyzzy/bla");
|
||||
ASSERT_EQ(d.makeRelative(CanonPath("/foo/xyzzy/bla")), "../xyzzy/bla");
|
||||
ASSERT_EQ(d.makeRelative(CanonPath("/xyzzy/bla")), "../../xyzzy/bla");
|
||||
}
|
||||
}
|
158
src/libutil-tests/checked-arithmetic.cc
Normal file
158
src/libutil-tests/checked-arithmetic.cc
Normal file
|
@ -0,0 +1,158 @@
|
|||
#include <cstdint>
|
||||
#include <gtest/gtest.h>
|
||||
#include <limits>
|
||||
#include <rapidcheck/Assertions.h>
|
||||
#include <rapidcheck/gtest.h>
|
||||
#include <rapidcheck/gen/Arbitrary.hpp>
|
||||
|
||||
#include <checked-arithmetic.hh>
|
||||
|
||||
#include "tests/gtest-with-params.hh"
|
||||
|
||||
namespace rc {
|
||||
using namespace nix;
|
||||
|
||||
template<std::integral T>
|
||||
struct Arbitrary<nix::checked::Checked<T>>
|
||||
{
|
||||
static Gen<nix::checked::Checked<T>> arbitrary()
|
||||
{
|
||||
return gen::arbitrary<T>();
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
namespace nix::checked {
|
||||
|
||||
// Pointer to member function! Mildly gross.
|
||||
template<std::integral T>
|
||||
using Oper = Checked<T>::Result (Checked<T>::*)(T const other) const;
|
||||
|
||||
template<std::integral T>
|
||||
using ReferenceOper = T (*)(T a, T b);
|
||||
|
||||
/**
|
||||
* Checks that performing an operation that overflows into an inaccurate result
|
||||
* has the desired behaviour.
|
||||
*
|
||||
* TBig is a type large enough to represent all results of TSmall operations.
|
||||
*/
|
||||
template<std::integral TSmall, std::integral TBig>
|
||||
void checkType(TSmall a_, TSmall b, Oper<TSmall> oper, ReferenceOper<TBig> reference)
|
||||
{
|
||||
// Sufficient to fit all values
|
||||
TBig referenceResult = reference(a_, b);
|
||||
constexpr const TSmall minV = std::numeric_limits<TSmall>::min();
|
||||
constexpr const TSmall maxV = std::numeric_limits<TSmall>::max();
|
||||
|
||||
Checked<TSmall> a{a_};
|
||||
auto result = (a.*(oper))(b);
|
||||
|
||||
// Just truncate it to get the in-range result
|
||||
RC_ASSERT(result.valueWrapping() == static_cast<TSmall>(referenceResult));
|
||||
|
||||
if (referenceResult > maxV || referenceResult < minV) {
|
||||
RC_ASSERT(result.overflowed());
|
||||
RC_ASSERT(!result.valueChecked().has_value());
|
||||
} else {
|
||||
RC_ASSERT(!result.overflowed());
|
||||
RC_ASSERT(result.valueChecked().has_value());
|
||||
RC_ASSERT(*result.valueChecked() == referenceResult);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that performing an operation that overflows into an inaccurate result
|
||||
* has the desired behaviour.
|
||||
*
|
||||
* TBig is a type large enough to represent all results of TSmall operations.
|
||||
*/
|
||||
template<std::integral TSmall, std::integral TBig>
|
||||
void checkDivision(TSmall a_, TSmall b)
|
||||
{
|
||||
// Sufficient to fit all values
|
||||
constexpr const TSmall minV = std::numeric_limits<TSmall>::min();
|
||||
|
||||
Checked<TSmall> a{a_};
|
||||
auto result = a / b;
|
||||
|
||||
if (std::is_signed<TSmall>() && a_ == minV && b == -1) {
|
||||
// This is the only possible overflow condition
|
||||
RC_ASSERT(result.valueWrapping() == minV);
|
||||
RC_ASSERT(result.overflowed());
|
||||
} else if (b == 0) {
|
||||
RC_ASSERT(result.divideByZero());
|
||||
RC_ASSERT_THROWS_AS(result.valueWrapping(), nix::checked::DivideByZero);
|
||||
RC_ASSERT(result.valueChecked() == std::nullopt);
|
||||
} else {
|
||||
TBig referenceResult = a_ / b;
|
||||
auto result_ = result.valueChecked();
|
||||
RC_ASSERT(result_.has_value());
|
||||
RC_ASSERT(*result_ == referenceResult);
|
||||
RC_ASSERT(result.valueWrapping() == referenceResult);
|
||||
}
|
||||
}
|
||||
|
||||
/** Creates parameters that perform a more adequate number of checks to validate
|
||||
* extremely cheap tests such as arithmetic tests */
|
||||
static rc::detail::TestParams makeParams()
|
||||
{
|
||||
auto const & conf = rc::detail::configuration();
|
||||
auto newParams = conf.testParams;
|
||||
newParams.maxSuccess = 10000;
|
||||
return newParams;
|
||||
}
|
||||
|
||||
RC_GTEST_PROP_WITH_PARAMS(Checked, add_unsigned, makeParams, (uint16_t a, uint16_t b))
|
||||
{
|
||||
checkType<uint16_t, int32_t>(a, b, &Checked<uint16_t>::operator+, [](int32_t a, int32_t b) { return a + b; });
|
||||
}
|
||||
|
||||
RC_GTEST_PROP_WITH_PARAMS(Checked, add_signed, makeParams, (int16_t a, int16_t b))
|
||||
{
|
||||
checkType<int16_t, int32_t>(a, b, &Checked<int16_t>::operator+, [](int32_t a, int32_t b) { return a + b; });
|
||||
}
|
||||
|
||||
RC_GTEST_PROP_WITH_PARAMS(Checked, sub_unsigned, makeParams, (uint16_t a, uint16_t b))
|
||||
{
|
||||
checkType<uint16_t, int32_t>(a, b, &Checked<uint16_t>::operator-, [](int32_t a, int32_t b) { return a - b; });
|
||||
}
|
||||
|
||||
RC_GTEST_PROP_WITH_PARAMS(Checked, sub_signed, makeParams, (int16_t a, int16_t b))
|
||||
{
|
||||
checkType<int16_t, int32_t>(a, b, &Checked<int16_t>::operator-, [](int32_t a, int32_t b) { return a - b; });
|
||||
}
|
||||
|
||||
RC_GTEST_PROP_WITH_PARAMS(Checked, mul_unsigned, makeParams, (uint16_t a, uint16_t b))
|
||||
{
|
||||
checkType<uint16_t, int64_t>(a, b, &Checked<uint16_t>::operator*, [](int64_t a, int64_t b) { return a * b; });
|
||||
}
|
||||
|
||||
RC_GTEST_PROP_WITH_PARAMS(Checked, mul_signed, makeParams, (int16_t a, int16_t b))
|
||||
{
|
||||
checkType<int16_t, int64_t>(a, b, &Checked<int16_t>::operator*, [](int64_t a, int64_t b) { return a * b; });
|
||||
}
|
||||
|
||||
RC_GTEST_PROP_WITH_PARAMS(Checked, div_unsigned, makeParams, (uint16_t a, uint16_t b))
|
||||
{
|
||||
checkDivision<uint16_t, int64_t>(a, b);
|
||||
}
|
||||
|
||||
RC_GTEST_PROP_WITH_PARAMS(Checked, div_signed, makeParams, (int16_t a, int16_t b))
|
||||
{
|
||||
checkDivision<int16_t, int64_t>(a, b);
|
||||
}
|
||||
|
||||
// Make absolutely sure that we check the special cases if the proptest
|
||||
// generator does not come up with them. This one is especially important
|
||||
// because it has very specific pairs required for the edge cases unlike the
|
||||
// others.
|
||||
TEST(Checked, div_signed_special_cases)
|
||||
{
|
||||
checkDivision<int16_t, int64_t>(std::numeric_limits<int16_t>::min(), -1);
|
||||
checkDivision<int16_t, int64_t>(std::numeric_limits<int16_t>::min(), 0);
|
||||
checkDivision<int16_t, int64_t>(0, 0);
|
||||
}
|
||||
|
||||
}
|
54
src/libutil-tests/chunked-vector.cc
Normal file
54
src/libutil-tests/chunked-vector.cc
Normal file
|
@ -0,0 +1,54 @@
|
|||
#include "chunked-vector.hh"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
TEST(ChunkedVector, InitEmpty) {
|
||||
auto v = ChunkedVector<int, 2>(100);
|
||||
ASSERT_EQ(v.size(), 0);
|
||||
}
|
||||
|
||||
TEST(ChunkedVector, GrowsCorrectly) {
|
||||
auto v = ChunkedVector<int, 2>(100);
|
||||
for (auto i = 1; i < 20; i++) {
|
||||
v.add(i);
|
||||
ASSERT_EQ(v.size(), i);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(ChunkedVector, AddAndGet) {
|
||||
auto v = ChunkedVector<int, 2>(100);
|
||||
for (auto i = 1; i < 20; i++) {
|
||||
auto [i2, idx] = v.add(i);
|
||||
auto & i3 = v[idx];
|
||||
ASSERT_EQ(i, i2);
|
||||
ASSERT_EQ(&i2, &i3);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(ChunkedVector, ForEach) {
|
||||
auto v = ChunkedVector<int, 2>(100);
|
||||
for (auto i = 1; i < 20; i++) {
|
||||
v.add(i);
|
||||
}
|
||||
int count = 0;
|
||||
v.forEach([&count](int elt) {
|
||||
count++;
|
||||
});
|
||||
ASSERT_EQ(count, v.size());
|
||||
}
|
||||
|
||||
TEST(ChunkedVector, OverflowOK) {
|
||||
// Similar to the AddAndGet, but intentionnally use a small
|
||||
// initial ChunkedVector to force it to overflow
|
||||
auto v = ChunkedVector<int, 2>(2);
|
||||
for (auto i = 1; i < 20; i++) {
|
||||
auto [i2, idx] = v.add(i);
|
||||
auto & i3 = v[idx];
|
||||
ASSERT_EQ(i, i2);
|
||||
ASSERT_EQ(&i2, &i3);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
70
src/libutil-tests/closure.cc
Normal file
70
src/libutil-tests/closure.cc
Normal file
|
@ -0,0 +1,70 @@
|
|||
#include "closure.hh"
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
using namespace std;
|
||||
|
||||
map<string, set<string>> testGraph = {
|
||||
{ "A", { "B", "C", "G" } },
|
||||
{ "B", { "A" } }, // Loops back to A
|
||||
{ "C", { "F" } }, // Indirect reference
|
||||
{ "D", { "A" } }, // Not reachable, but has backreferences
|
||||
{ "E", {} }, // Just not reachable
|
||||
{ "F", {} },
|
||||
{ "G", { "G" } }, // Self reference
|
||||
};
|
||||
|
||||
TEST(closure, correctClosure) {
|
||||
set<string> aClosure;
|
||||
set<string> expectedClosure = {"A", "B", "C", "F", "G"};
|
||||
computeClosure<string>(
|
||||
{"A"},
|
||||
aClosure,
|
||||
[&](const string currentNode, function<void(promise<set<string>> &)> processEdges) {
|
||||
promise<set<string>> promisedNodes;
|
||||
promisedNodes.set_value(testGraph[currentNode]);
|
||||
processEdges(promisedNodes);
|
||||
}
|
||||
);
|
||||
|
||||
ASSERT_EQ(aClosure, expectedClosure);
|
||||
}
|
||||
|
||||
TEST(closure, properlyHandlesDirectExceptions) {
|
||||
struct TestExn {};
|
||||
set<string> aClosure;
|
||||
EXPECT_THROW(
|
||||
computeClosure<string>(
|
||||
{"A"},
|
||||
aClosure,
|
||||
[&](const string currentNode, function<void(promise<set<string>> &)> processEdges) {
|
||||
throw TestExn();
|
||||
}
|
||||
),
|
||||
TestExn
|
||||
);
|
||||
}
|
||||
|
||||
TEST(closure, properlyHandlesExceptionsInPromise) {
|
||||
struct TestExn {};
|
||||
set<string> aClosure;
|
||||
EXPECT_THROW(
|
||||
computeClosure<string>(
|
||||
{"A"},
|
||||
aClosure,
|
||||
[&](const string currentNode, function<void(promise<set<string>> &)> processEdges) {
|
||||
promise<set<string>> promise;
|
||||
try {
|
||||
throw TestExn();
|
||||
} catch (...) {
|
||||
promise.set_exception(std::current_exception());
|
||||
}
|
||||
processEdges(promise);
|
||||
}
|
||||
),
|
||||
TestExn
|
||||
);
|
||||
}
|
||||
|
||||
}
|
96
src/libutil-tests/compression.cc
Normal file
96
src/libutil-tests/compression.cc
Normal file
|
@ -0,0 +1,96 @@
|
|||
#include "compression.hh"
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* compress / decompress
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(compress, compressWithUnknownMethod) {
|
||||
ASSERT_THROW(compress("invalid-method", "something-to-compress"), UnknownCompressionMethod);
|
||||
}
|
||||
|
||||
TEST(compress, noneMethodDoesNothingToTheInput) {
|
||||
auto o = compress("none", "this-is-a-test");
|
||||
|
||||
ASSERT_EQ(o, "this-is-a-test");
|
||||
}
|
||||
|
||||
TEST(decompress, decompressNoneCompressed) {
|
||||
auto method = "none";
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, str);
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST(decompress, decompressEmptyCompressed) {
|
||||
// Empty-method decompression used e.g. by S3 store
|
||||
// (Content-Encoding == "").
|
||||
auto method = "";
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, str);
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST(decompress, decompressXzCompressed) {
|
||||
auto method = "xz";
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, compress(method, str));
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST(decompress, decompressBzip2Compressed) {
|
||||
auto method = "bzip2";
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, compress(method, str));
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST(decompress, decompressBrCompressed) {
|
||||
auto method = "br";
|
||||
auto str = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto o = decompress(method, compress(method, str));
|
||||
|
||||
ASSERT_EQ(o, str);
|
||||
}
|
||||
|
||||
TEST(decompress, decompressInvalidInputThrowsCompressionError) {
|
||||
auto method = "bzip2";
|
||||
auto str = "this is a string that does not qualify as valid bzip2 data";
|
||||
|
||||
ASSERT_THROW(decompress(method, str), CompressionError);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* compression sinks
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(makeCompressionSink, noneSinkDoesNothingToInput) {
|
||||
StringSink strSink;
|
||||
auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto sink = makeCompressionSink("none", strSink);
|
||||
(*sink)(inputString);
|
||||
sink->finish();
|
||||
|
||||
ASSERT_STREQ(strSink.s.c_str(), inputString);
|
||||
}
|
||||
|
||||
TEST(makeCompressionSink, compressAndDecompress) {
|
||||
StringSink strSink;
|
||||
auto inputString = "slfja;sljfklsa;jfklsjfkl;sdjfkl;sadjfkl;sdjf;lsdfjsadlf";
|
||||
auto decompressionSink = makeDecompressionSink("bzip2", strSink);
|
||||
auto sink = makeCompressionSink("bzip2", *decompressionSink);
|
||||
|
||||
(*sink)(inputString);
|
||||
sink->finish();
|
||||
decompressionSink->finish();
|
||||
|
||||
ASSERT_STREQ(strSink.s.c_str(), inputString);
|
||||
}
|
||||
|
||||
}
|
295
src/libutil-tests/config.cc
Normal file
295
src/libutil-tests/config.cc
Normal file
|
@ -0,0 +1,295 @@
|
|||
#include "config.hh"
|
||||
#include "args.hh"
|
||||
|
||||
#include <sstream>
|
||||
#include <gtest/gtest.h>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Config
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(Config, setUndefinedSetting) {
|
||||
Config config;
|
||||
ASSERT_EQ(config.set("undefined-key", "value"), false);
|
||||
}
|
||||
|
||||
TEST(Config, setDefinedSetting) {
|
||||
Config config;
|
||||
std::string value;
|
||||
Setting<std::string> foo{&config, value, "name-of-the-setting", "description"};
|
||||
ASSERT_EQ(config.set("name-of-the-setting", "value"), true);
|
||||
}
|
||||
|
||||
TEST(Config, getDefinedSetting) {
|
||||
Config config;
|
||||
std::string value;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
Setting<std::string> foo{&config, value, "name-of-the-setting", "description"};
|
||||
|
||||
config.getSettings(settings, /* overriddenOnly = */ false);
|
||||
const auto iter = settings.find("name-of-the-setting");
|
||||
ASSERT_NE(iter, settings.end());
|
||||
ASSERT_EQ(iter->second.value, "");
|
||||
ASSERT_EQ(iter->second.description, "description\n");
|
||||
}
|
||||
|
||||
TEST(Config, getDefinedOverriddenSettingNotSet) {
|
||||
Config config;
|
||||
std::string value;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
Setting<std::string> foo{&config, value, "name-of-the-setting", "description"};
|
||||
|
||||
config.getSettings(settings, /* overriddenOnly = */ true);
|
||||
const auto e = settings.find("name-of-the-setting");
|
||||
ASSERT_EQ(e, settings.end());
|
||||
}
|
||||
|
||||
TEST(Config, getDefinedSettingSet1) {
|
||||
Config config;
|
||||
std::string value;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
Setting<std::string> setting{&config, value, "name-of-the-setting", "description"};
|
||||
|
||||
setting.assign("value");
|
||||
|
||||
config.getSettings(settings, /* overriddenOnly = */ false);
|
||||
const auto iter = settings.find("name-of-the-setting");
|
||||
ASSERT_NE(iter, settings.end());
|
||||
ASSERT_EQ(iter->second.value, "value");
|
||||
ASSERT_EQ(iter->second.description, "description\n");
|
||||
}
|
||||
|
||||
TEST(Config, getDefinedSettingSet2) {
|
||||
Config config;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||
|
||||
ASSERT_TRUE(config.set("name-of-the-setting", "value"));
|
||||
|
||||
config.getSettings(settings, /* overriddenOnly = */ false);
|
||||
const auto e = settings.find("name-of-the-setting");
|
||||
ASSERT_NE(e, settings.end());
|
||||
ASSERT_EQ(e->second.value, "value");
|
||||
ASSERT_EQ(e->second.description, "description\n");
|
||||
}
|
||||
|
||||
TEST(Config, addSetting) {
|
||||
class TestSetting : public AbstractSetting {
|
||||
public:
|
||||
TestSetting() : AbstractSetting("test", "test", {}) {}
|
||||
void set(const std::string & value, bool append) override {}
|
||||
std::string to_string() const override { return {}; }
|
||||
bool isAppendable() override { return false; }
|
||||
};
|
||||
|
||||
Config config;
|
||||
TestSetting setting;
|
||||
|
||||
ASSERT_FALSE(config.set("test", "value"));
|
||||
config.addSetting(&setting);
|
||||
ASSERT_TRUE(config.set("test", "value"));
|
||||
ASSERT_FALSE(config.set("extra-test", "value"));
|
||||
}
|
||||
|
||||
TEST(Config, withInitialValue) {
|
||||
const StringMap initials = {
|
||||
{ "key", "value" },
|
||||
};
|
||||
Config config(initials);
|
||||
|
||||
{
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
config.getSettings(settings, /* overriddenOnly = */ false);
|
||||
ASSERT_EQ(settings.find("key"), settings.end());
|
||||
}
|
||||
|
||||
Setting<std::string> setting{&config, "default-value", "key", "description"};
|
||||
|
||||
{
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
config.getSettings(settings, /* overriddenOnly = */ false);
|
||||
ASSERT_EQ(settings["key"].value, "value");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(Config, resetOverridden) {
|
||||
Config config;
|
||||
config.resetOverridden();
|
||||
}
|
||||
|
||||
TEST(Config, resetOverriddenWithSetting) {
|
||||
Config config;
|
||||
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||
|
||||
{
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
|
||||
setting.set("foo");
|
||||
ASSERT_EQ(setting.get(), "foo");
|
||||
config.getSettings(settings, /* overriddenOnly = */ true);
|
||||
ASSERT_TRUE(settings.empty());
|
||||
}
|
||||
|
||||
{
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
|
||||
setting.override("bar");
|
||||
ASSERT_TRUE(setting.overridden);
|
||||
ASSERT_EQ(setting.get(), "bar");
|
||||
config.getSettings(settings, /* overriddenOnly = */ true);
|
||||
ASSERT_FALSE(settings.empty());
|
||||
}
|
||||
|
||||
{
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
|
||||
config.resetOverridden();
|
||||
ASSERT_FALSE(setting.overridden);
|
||||
config.getSettings(settings, /* overriddenOnly = */ true);
|
||||
ASSERT_TRUE(settings.empty());
|
||||
}
|
||||
}
|
||||
|
||||
TEST(Config, toJSONOnEmptyConfig) {
|
||||
ASSERT_EQ(Config().toJSON().dump(), "{}");
|
||||
}
|
||||
|
||||
TEST(Config, toJSONOnNonEmptyConfig) {
|
||||
using nlohmann::literals::operator "" _json;
|
||||
Config config;
|
||||
Setting<std::string> setting{
|
||||
&config,
|
||||
"",
|
||||
"name-of-the-setting",
|
||||
"description",
|
||||
};
|
||||
setting.assign("value");
|
||||
|
||||
ASSERT_EQ(config.toJSON(),
|
||||
R"#({
|
||||
"name-of-the-setting": {
|
||||
"aliases": [],
|
||||
"defaultValue": "",
|
||||
"description": "description\n",
|
||||
"documentDefault": true,
|
||||
"value": "value",
|
||||
"experimentalFeature": null
|
||||
}
|
||||
})#"_json);
|
||||
}
|
||||
|
||||
TEST(Config, toJSONOnNonEmptyConfigWithExperimentalSetting) {
|
||||
using nlohmann::literals::operator "" _json;
|
||||
Config config;
|
||||
Setting<std::string> setting{
|
||||
&config,
|
||||
"",
|
||||
"name-of-the-setting",
|
||||
"description",
|
||||
{},
|
||||
true,
|
||||
Xp::Flakes,
|
||||
};
|
||||
setting.assign("value");
|
||||
|
||||
ASSERT_EQ(config.toJSON(),
|
||||
R"#({
|
||||
"name-of-the-setting": {
|
||||
"aliases": [],
|
||||
"defaultValue": "",
|
||||
"description": "description\n",
|
||||
"documentDefault": true,
|
||||
"value": "value",
|
||||
"experimentalFeature": "flakes"
|
||||
}
|
||||
})#"_json);
|
||||
}
|
||||
|
||||
TEST(Config, setSettingAlias) {
|
||||
Config config;
|
||||
Setting<std::string> setting{&config, "", "some-int", "best number", { "another-int" }};
|
||||
ASSERT_TRUE(config.set("some-int", "1"));
|
||||
ASSERT_EQ(setting.get(), "1");
|
||||
ASSERT_TRUE(config.set("another-int", "2"));
|
||||
ASSERT_EQ(setting.get(), "2");
|
||||
ASSERT_TRUE(config.set("some-int", "3"));
|
||||
ASSERT_EQ(setting.get(), "3");
|
||||
}
|
||||
|
||||
/* FIXME: The reapplyUnknownSettings method doesn't seem to do anything
|
||||
* useful (these days). Whenever we add a new setting to Config the
|
||||
* unknown settings are always considered. In which case is this function
|
||||
* actually useful? Is there some way to register a Setting without calling
|
||||
* addSetting? */
|
||||
TEST(Config, DISABLED_reapplyUnknownSettings) {
|
||||
Config config;
|
||||
ASSERT_FALSE(config.set("name-of-the-setting", "unknownvalue"));
|
||||
Setting<std::string> setting{&config, "default", "name-of-the-setting", "description"};
|
||||
ASSERT_EQ(setting.get(), "default");
|
||||
config.reapplyUnknownSettings();
|
||||
ASSERT_EQ(setting.get(), "unknownvalue");
|
||||
}
|
||||
|
||||
TEST(Config, applyConfigEmpty) {
|
||||
Config config;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
config.applyConfig("");
|
||||
config.getSettings(settings);
|
||||
ASSERT_TRUE(settings.empty());
|
||||
}
|
||||
|
||||
TEST(Config, applyConfigEmptyWithComment) {
|
||||
Config config;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
config.applyConfig("# just a comment");
|
||||
config.getSettings(settings);
|
||||
ASSERT_TRUE(settings.empty());
|
||||
}
|
||||
|
||||
TEST(Config, applyConfigAssignment) {
|
||||
Config config;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||
config.applyConfig(
|
||||
"name-of-the-setting = value-from-file #useful comment\n"
|
||||
"# name-of-the-setting = foo\n"
|
||||
);
|
||||
config.getSettings(settings);
|
||||
ASSERT_FALSE(settings.empty());
|
||||
ASSERT_EQ(settings["name-of-the-setting"].value, "value-from-file");
|
||||
}
|
||||
|
||||
TEST(Config, applyConfigWithReassignedSetting) {
|
||||
Config config;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||
config.applyConfig(
|
||||
"name-of-the-setting = first-value\n"
|
||||
"name-of-the-setting = second-value\n"
|
||||
);
|
||||
config.getSettings(settings);
|
||||
ASSERT_FALSE(settings.empty());
|
||||
ASSERT_EQ(settings["name-of-the-setting"].value, "second-value");
|
||||
}
|
||||
|
||||
TEST(Config, applyConfigFailsOnMissingIncludes) {
|
||||
Config config;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
Setting<std::string> setting{&config, "", "name-of-the-setting", "description"};
|
||||
|
||||
ASSERT_THROW(config.applyConfig(
|
||||
"name-of-the-setting = value-from-file\n"
|
||||
"# name-of-the-setting = foo\n"
|
||||
"include /nix/store/does/not/exist.nix"
|
||||
), Error);
|
||||
}
|
||||
|
||||
TEST(Config, applyConfigInvalidThrows) {
|
||||
Config config;
|
||||
ASSERT_THROW(config.applyConfig("value == key"), UsageError);
|
||||
ASSERT_THROW(config.applyConfig("value "), UsageError);
|
||||
}
|
||||
}
|
31
src/libutil-tests/data/git/check-data.sh
Normal file
31
src/libutil-tests/data/git/check-data.sh
Normal file
|
@ -0,0 +1,31 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eu -o pipefail
|
||||
|
||||
export TEST_ROOT=$(realpath ${TMPDIR:-/tmp}/nix-test)/git-hashing/check-data
|
||||
mkdir -p $TEST_ROOT
|
||||
|
||||
repo="$TEST_ROOT/scratch"
|
||||
git init "$repo"
|
||||
|
||||
git -C "$repo" config user.email "you@example.com"
|
||||
git -C "$repo" config user.name "Your Name"
|
||||
|
||||
# `-w` to write for tree test
|
||||
freshlyAddedHash=$(git -C "$repo" hash-object -w -t blob --stdin < "./hello-world.bin")
|
||||
encodingHash=$(sha1sum -b < "./hello-world-blob.bin" | head -c 40)
|
||||
|
||||
# If the hashes match, then `hello-world-blob.bin` must be the encoding
|
||||
# of `hello-world.bin`.
|
||||
[[ "$encodingHash" == "$freshlyAddedHash" ]]
|
||||
|
||||
# Create empty directory object for tree test
|
||||
echo -n | git -C "$repo" hash-object -w -t tree --stdin
|
||||
|
||||
# Relies on both child hashes already existing in the git store
|
||||
freshlyAddedHash=$(git -C "$repo" mktree < "./tree.txt")
|
||||
encodingHash=$(sha1sum -b < "./tree.bin" | head -c 40)
|
||||
|
||||
# If the hashes match, then `tree.bin` must be the encoding of the
|
||||
# directory denoted by `tree.txt` interpreted as git directory listing.
|
||||
[[ "$encodingHash" == "$freshlyAddedHash" ]]
|
BIN
src/libutil-tests/data/git/hello-world-blob.bin
Normal file
BIN
src/libutil-tests/data/git/hello-world-blob.bin
Normal file
Binary file not shown.
BIN
src/libutil-tests/data/git/hello-world.bin
Normal file
BIN
src/libutil-tests/data/git/hello-world.bin
Normal file
Binary file not shown.
BIN
src/libutil-tests/data/git/tree.bin
Normal file
BIN
src/libutil-tests/data/git/tree.bin
Normal file
Binary file not shown.
4
src/libutil-tests/data/git/tree.txt
Normal file
4
src/libutil-tests/data/git/tree.txt
Normal file
|
@ -0,0 +1,4 @@
|
|||
100644 blob 63ddb340119baf8492d2da53af47e8c7cfcd5eb2 Foo
|
||||
100755 blob 63ddb340119baf8492d2da53af47e8c7cfcd5eb2 bAr
|
||||
040000 tree 4b825dc642cb6eb9a060e54bf8d69288fbee4904 baZ
|
||||
120000 blob 63ddb340119baf8492d2da53af47e8c7cfcd5eb2 quuX
|
64
src/libutil-tests/executable-path.cc
Normal file
64
src/libutil-tests/executable-path.cc
Normal file
|
@ -0,0 +1,64 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "executable-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
#ifdef WIN32
|
||||
# define PATH_VAR_SEP L";"
|
||||
#else
|
||||
# define PATH_VAR_SEP ":"
|
||||
#endif
|
||||
|
||||
#define PATH_ENV_ROUND_TRIP(NAME, STRING_LIT, CXX_LIT) \
|
||||
TEST(ExecutablePath, NAME) \
|
||||
{ \
|
||||
OsString s = STRING_LIT; \
|
||||
auto v = ExecutablePath::parse(s); \
|
||||
EXPECT_EQ(v, (ExecutablePath CXX_LIT)); \
|
||||
auto s2 = v.render(); \
|
||||
EXPECT_EQ(s2, s); \
|
||||
}
|
||||
|
||||
PATH_ENV_ROUND_TRIP(emptyRoundTrip, OS_STR(""), ({}))
|
||||
|
||||
PATH_ENV_ROUND_TRIP(
|
||||
oneElemRoundTrip,
|
||||
OS_STR("/foo"),
|
||||
({
|
||||
OS_STR("/foo"),
|
||||
}))
|
||||
|
||||
PATH_ENV_ROUND_TRIP(
|
||||
twoElemsRoundTrip,
|
||||
OS_STR("/foo" PATH_VAR_SEP "/bar"),
|
||||
({
|
||||
OS_STR("/foo"),
|
||||
OS_STR("/bar"),
|
||||
}))
|
||||
|
||||
PATH_ENV_ROUND_TRIP(
|
||||
threeElemsRoundTrip,
|
||||
OS_STR("/foo" PATH_VAR_SEP "." PATH_VAR_SEP "/bar"),
|
||||
({
|
||||
OS_STR("/foo"),
|
||||
OS_STR("."),
|
||||
OS_STR("/bar"),
|
||||
}))
|
||||
|
||||
TEST(ExecutablePath, elementyElemNormalize)
|
||||
{
|
||||
auto v = ExecutablePath::parse(PATH_VAR_SEP PATH_VAR_SEP PATH_VAR_SEP);
|
||||
EXPECT_EQ(
|
||||
v,
|
||||
(ExecutablePath{{
|
||||
OS_STR("."),
|
||||
OS_STR("."),
|
||||
OS_STR("."),
|
||||
OS_STR("."),
|
||||
}}));
|
||||
auto s2 = v.render();
|
||||
EXPECT_EQ(s2, OS_STR("." PATH_VAR_SEP "." PATH_VAR_SEP "." PATH_VAR_SEP "."));
|
||||
}
|
||||
|
||||
}
|
61
src/libutil-tests/file-content-address.cc
Normal file
61
src/libutil-tests/file-content-address.cc
Normal file
|
@ -0,0 +1,61 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "file-content-address.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* parseFileSerialisationMethod, renderFileSerialisationMethod
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(FileSerialisationMethod, testRoundTripPrintParse_1) {
|
||||
for (const FileSerialisationMethod fim : {
|
||||
FileSerialisationMethod::Flat,
|
||||
FileSerialisationMethod::NixArchive,
|
||||
}) {
|
||||
EXPECT_EQ(parseFileSerialisationMethod(renderFileSerialisationMethod(fim)), fim);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(FileSerialisationMethod, testRoundTripPrintParse_2) {
|
||||
for (const std::string_view fimS : {
|
||||
"flat",
|
||||
"nar",
|
||||
}) {
|
||||
EXPECT_EQ(renderFileSerialisationMethod(parseFileSerialisationMethod(fimS)), fimS);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(FileSerialisationMethod, testParseFileSerialisationMethodOptException) {
|
||||
EXPECT_THROW(parseFileSerialisationMethod("narwhal"), UsageError);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* parseFileIngestionMethod, renderFileIngestionMethod
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(FileIngestionMethod, testRoundTripPrintParse_1) {
|
||||
for (const FileIngestionMethod fim : {
|
||||
FileIngestionMethod::Flat,
|
||||
FileIngestionMethod::NixArchive,
|
||||
FileIngestionMethod::Git,
|
||||
}) {
|
||||
EXPECT_EQ(parseFileIngestionMethod(renderFileIngestionMethod(fim)), fim);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(FileIngestionMethod, testRoundTripPrintParse_2) {
|
||||
for (const std::string_view fimS : {
|
||||
"flat",
|
||||
"nar",
|
||||
"git",
|
||||
}) {
|
||||
EXPECT_EQ(renderFileIngestionMethod(parseFileIngestionMethod(fimS)), fimS);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(FileIngestionMethod, testParseFileIngestionMethodOptException) {
|
||||
EXPECT_THROW(parseFileIngestionMethod("narwhal"), UsageError);
|
||||
}
|
||||
|
||||
}
|
264
src/libutil-tests/file-system.cc
Normal file
264
src/libutil-tests/file-system.cc
Normal file
|
@ -0,0 +1,264 @@
|
|||
#include "util.hh"
|
||||
#include "types.hh"
|
||||
#include "file-system.hh"
|
||||
#include "processes.hh"
|
||||
#include "terminal.hh"
|
||||
#include "strings.hh"
|
||||
|
||||
#include <limits.h>
|
||||
#include <gtest/gtest.h>
|
||||
#include <rapidcheck/gtest.h>
|
||||
|
||||
#include <numeric>
|
||||
|
||||
#ifdef _WIN32
|
||||
# define FS_SEP L"\\"
|
||||
# define FS_ROOT L"C:" FS_SEP // Need a mounted one, C drive is likely
|
||||
#else
|
||||
# define FS_SEP "/"
|
||||
# define FS_ROOT FS_SEP
|
||||
#endif
|
||||
|
||||
#ifndef PATH_MAX
|
||||
# define PATH_MAX 4096
|
||||
#endif
|
||||
|
||||
#ifdef _WIN32
|
||||
# define GET_CWD _wgetcwd
|
||||
#else
|
||||
# define GET_CWD getcwd
|
||||
#endif
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------- tests for file-system.hh -------------------------------------*/
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* absPath
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(absPath, doesntChangeRoot)
|
||||
{
|
||||
auto p = absPath(std::filesystem::path{FS_ROOT});
|
||||
|
||||
ASSERT_EQ(p, FS_ROOT);
|
||||
}
|
||||
|
||||
TEST(absPath, turnsEmptyPathIntoCWD)
|
||||
{
|
||||
OsChar cwd[PATH_MAX + 1];
|
||||
auto p = absPath(std::filesystem::path{""});
|
||||
|
||||
ASSERT_EQ(p, GET_CWD((OsChar *) &cwd, PATH_MAX));
|
||||
}
|
||||
|
||||
TEST(absPath, usesOptionalBasePathWhenGiven)
|
||||
{
|
||||
OsChar _cwd[PATH_MAX + 1];
|
||||
OsChar * cwd = GET_CWD((OsChar *) &_cwd, PATH_MAX);
|
||||
|
||||
auto p = absPath(std::filesystem::path{""}.string(), std::filesystem::path{cwd}.string());
|
||||
|
||||
ASSERT_EQ(p, std::filesystem::path{cwd}.string());
|
||||
}
|
||||
|
||||
TEST(absPath, isIdempotent)
|
||||
{
|
||||
OsChar _cwd[PATH_MAX + 1];
|
||||
OsChar * cwd = GET_CWD((OsChar *) &_cwd, PATH_MAX);
|
||||
auto p1 = absPath(std::filesystem::path{cwd});
|
||||
auto p2 = absPath(p1);
|
||||
|
||||
ASSERT_EQ(p1, p2);
|
||||
}
|
||||
|
||||
TEST(absPath, pathIsCanonicalised)
|
||||
{
|
||||
auto path = FS_ROOT OS_STR("some/path/with/trailing/dot/.");
|
||||
auto p1 = absPath(std::filesystem::path{path});
|
||||
auto p2 = absPath(p1);
|
||||
|
||||
ASSERT_EQ(p1, FS_ROOT "some" FS_SEP "path" FS_SEP "with" FS_SEP "trailing" FS_SEP "dot");
|
||||
ASSERT_EQ(p1, p2);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* canonPath
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(canonPath, removesTrailingSlashes)
|
||||
{
|
||||
std::filesystem::path path = FS_ROOT "this/is/a/path//";
|
||||
auto p = canonPath(path.string());
|
||||
|
||||
ASSERT_EQ(p, std::filesystem::path{FS_ROOT "this" FS_SEP "is" FS_SEP "a" FS_SEP "path"}.string());
|
||||
}
|
||||
|
||||
TEST(canonPath, removesDots)
|
||||
{
|
||||
std::filesystem::path path = FS_ROOT "this/./is/a/path/./";
|
||||
auto p = canonPath(path.string());
|
||||
|
||||
ASSERT_EQ(p, std::filesystem::path{FS_ROOT "this" FS_SEP "is" FS_SEP "a" FS_SEP "path"}.string());
|
||||
}
|
||||
|
||||
TEST(canonPath, removesDots2)
|
||||
{
|
||||
std::filesystem::path path = FS_ROOT "this/a/../is/a////path/foo/..";
|
||||
auto p = canonPath(path.string());
|
||||
|
||||
ASSERT_EQ(p, std::filesystem::path{FS_ROOT "this" FS_SEP "is" FS_SEP "a" FS_SEP "path"}.string());
|
||||
}
|
||||
|
||||
TEST(canonPath, requiresAbsolutePath)
|
||||
{
|
||||
ASSERT_ANY_THROW(canonPath("."));
|
||||
ASSERT_ANY_THROW(canonPath(".."));
|
||||
ASSERT_ANY_THROW(canonPath("../"));
|
||||
ASSERT_DEATH({ canonPath(""); }, "path != \"\"");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* dirOf
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(dirOf, returnsEmptyStringForRoot)
|
||||
{
|
||||
auto p = dirOf("/");
|
||||
|
||||
ASSERT_EQ(p, "/");
|
||||
}
|
||||
|
||||
TEST(dirOf, returnsFirstPathComponent)
|
||||
{
|
||||
auto p1 = dirOf("/dir/");
|
||||
ASSERT_EQ(p1, "/dir");
|
||||
auto p2 = dirOf("/dir");
|
||||
ASSERT_EQ(p2, "/");
|
||||
auto p3 = dirOf("/dir/..");
|
||||
ASSERT_EQ(p3, "/dir");
|
||||
auto p4 = dirOf("/dir/../");
|
||||
ASSERT_EQ(p4, "/dir/..");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* baseNameOf
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(baseNameOf, emptyPath)
|
||||
{
|
||||
auto p1 = baseNameOf("");
|
||||
ASSERT_EQ(p1, "");
|
||||
}
|
||||
|
||||
TEST(baseNameOf, pathOnRoot)
|
||||
{
|
||||
auto p1 = baseNameOf("/dir");
|
||||
ASSERT_EQ(p1, "dir");
|
||||
}
|
||||
|
||||
TEST(baseNameOf, relativePath)
|
||||
{
|
||||
auto p1 = baseNameOf("dir/foo");
|
||||
ASSERT_EQ(p1, "foo");
|
||||
}
|
||||
|
||||
TEST(baseNameOf, pathWithTrailingSlashRoot)
|
||||
{
|
||||
auto p1 = baseNameOf("/");
|
||||
ASSERT_EQ(p1, "");
|
||||
}
|
||||
|
||||
TEST(baseNameOf, trailingSlash)
|
||||
{
|
||||
auto p1 = baseNameOf("/dir/");
|
||||
ASSERT_EQ(p1, "dir");
|
||||
}
|
||||
|
||||
TEST(baseNameOf, trailingSlashes)
|
||||
{
|
||||
auto p1 = baseNameOf("/dir//");
|
||||
ASSERT_EQ(p1, "dir");
|
||||
}
|
||||
|
||||
TEST(baseNameOf, absoluteNothingSlashNothing)
|
||||
{
|
||||
auto p1 = baseNameOf("//");
|
||||
ASSERT_EQ(p1, "");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* isInDir
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(isInDir, trivialCase)
|
||||
{
|
||||
auto p1 = isInDir("/foo/bar", "/foo");
|
||||
ASSERT_EQ(p1, true);
|
||||
}
|
||||
|
||||
TEST(isInDir, notInDir)
|
||||
{
|
||||
auto p1 = isInDir("/zes/foo/bar", "/foo");
|
||||
ASSERT_EQ(p1, false);
|
||||
}
|
||||
|
||||
// XXX: hm, bug or feature? :) Looking at the implementation
|
||||
// this might be problematic.
|
||||
TEST(isInDir, emptyDir)
|
||||
{
|
||||
auto p1 = isInDir("/zes/foo/bar", "");
|
||||
ASSERT_EQ(p1, true);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* isDirOrInDir
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(isDirOrInDir, trueForSameDirectory)
|
||||
{
|
||||
ASSERT_EQ(isDirOrInDir("/nix", "/nix"), true);
|
||||
ASSERT_EQ(isDirOrInDir("/", "/"), true);
|
||||
}
|
||||
|
||||
TEST(isDirOrInDir, trueForEmptyPaths)
|
||||
{
|
||||
ASSERT_EQ(isDirOrInDir("", ""), true);
|
||||
}
|
||||
|
||||
TEST(isDirOrInDir, falseForDisjunctPaths)
|
||||
{
|
||||
ASSERT_EQ(isDirOrInDir("/foo", "/bar"), false);
|
||||
}
|
||||
|
||||
TEST(isDirOrInDir, relativePaths)
|
||||
{
|
||||
ASSERT_EQ(isDirOrInDir("/foo/..", "/foo"), true);
|
||||
}
|
||||
|
||||
// XXX: while it is possible to use "." or ".." in the
|
||||
// first argument this doesn't seem to work in the second.
|
||||
TEST(isDirOrInDir, DISABLED_shouldWork)
|
||||
{
|
||||
ASSERT_EQ(isDirOrInDir("/foo/..", "/foo/."), true);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* pathExists
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(pathExists, rootExists)
|
||||
{
|
||||
ASSERT_TRUE(pathExists(std::filesystem::path{FS_ROOT}.string()));
|
||||
}
|
||||
|
||||
TEST(pathExists, cwdExists)
|
||||
{
|
||||
ASSERT_TRUE(pathExists("."));
|
||||
}
|
||||
|
||||
TEST(pathExists, bogusPathDoesNotExist)
|
||||
{
|
||||
ASSERT_FALSE(pathExists("/schnitzel/darmstadt/pommes"));
|
||||
}
|
||||
}
|
262
src/libutil-tests/git.cc
Normal file
262
src/libutil-tests/git.cc
Normal file
|
@ -0,0 +1,262 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "git.hh"
|
||||
#include "memory-source-accessor.hh"
|
||||
|
||||
#include "tests/characterization.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
using namespace git;
|
||||
|
||||
class GitTest : public CharacterizationTest
|
||||
{
|
||||
std::filesystem::path unitTestData = getUnitTestData() / "git";
|
||||
|
||||
public:
|
||||
|
||||
std::filesystem::path goldenMaster(std::string_view testStem) const override {
|
||||
return unitTestData / std::string(testStem);
|
||||
}
|
||||
|
||||
/**
|
||||
* We set these in tests rather than the regular globals so we don't have
|
||||
* to worry about race conditions if the tests run concurrently.
|
||||
*/
|
||||
ExperimentalFeatureSettings mockXpSettings;
|
||||
|
||||
private:
|
||||
|
||||
void SetUp() override
|
||||
{
|
||||
mockXpSettings.set("experimental-features", "git-hashing");
|
||||
}
|
||||
};
|
||||
|
||||
TEST(GitMode, gitMode_directory) {
|
||||
Mode m = Mode::Directory;
|
||||
RawMode r = 0040000;
|
||||
ASSERT_EQ(static_cast<RawMode>(m), r);
|
||||
ASSERT_EQ(decodeMode(r), std::optional { m });
|
||||
};
|
||||
|
||||
TEST(GitMode, gitMode_executable) {
|
||||
Mode m = Mode::Executable;
|
||||
RawMode r = 0100755;
|
||||
ASSERT_EQ(static_cast<RawMode>(m), r);
|
||||
ASSERT_EQ(decodeMode(r), std::optional { m });
|
||||
};
|
||||
|
||||
TEST(GitMode, gitMode_regular) {
|
||||
Mode m = Mode::Regular;
|
||||
RawMode r = 0100644;
|
||||
ASSERT_EQ(static_cast<RawMode>(m), r);
|
||||
ASSERT_EQ(decodeMode(r), std::optional { m });
|
||||
};
|
||||
|
||||
TEST(GitMode, gitMode_symlink) {
|
||||
Mode m = Mode::Symlink;
|
||||
RawMode r = 0120000;
|
||||
ASSERT_EQ(static_cast<RawMode>(m), r);
|
||||
ASSERT_EQ(decodeMode(r), std::optional { m });
|
||||
};
|
||||
|
||||
TEST_F(GitTest, blob_read) {
|
||||
readTest("hello-world-blob.bin", [&](const auto & encoded) {
|
||||
StringSource in { encoded };
|
||||
StringSink out;
|
||||
RegularFileSink out2 { out };
|
||||
ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Blob);
|
||||
parseBlob(out2, CanonPath::root, in, BlobMode::Regular, mockXpSettings);
|
||||
|
||||
auto expected = readFile(goldenMaster("hello-world.bin"));
|
||||
|
||||
ASSERT_EQ(out.s, expected);
|
||||
});
|
||||
}
|
||||
|
||||
TEST_F(GitTest, blob_write) {
|
||||
writeTest("hello-world-blob.bin", [&]() {
|
||||
auto decoded = readFile(goldenMaster("hello-world.bin"));
|
||||
StringSink s;
|
||||
dumpBlobPrefix(decoded.size(), s, mockXpSettings);
|
||||
s(decoded);
|
||||
return s.s;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* This data is for "shallow" tree tests. However, we use "real" hashes
|
||||
* so that we can check our test data in a small shell script test test
|
||||
* (`src/libutil-tests/data/git/check-data.sh`).
|
||||
*/
|
||||
const static Tree tree = {
|
||||
{
|
||||
"Foo",
|
||||
{
|
||||
.mode = Mode::Regular,
|
||||
// hello world with special chars from above
|
||||
.hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1),
|
||||
},
|
||||
},
|
||||
{
|
||||
"bAr",
|
||||
{
|
||||
.mode = Mode::Executable,
|
||||
// ditto
|
||||
.hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1),
|
||||
},
|
||||
},
|
||||
{
|
||||
"baZ/",
|
||||
{
|
||||
.mode = Mode::Directory,
|
||||
// Empty directory hash
|
||||
.hash = Hash::parseAny("4b825dc642cb6eb9a060e54bf8d69288fbee4904", HashAlgorithm::SHA1),
|
||||
},
|
||||
},
|
||||
{
|
||||
"quuX",
|
||||
{
|
||||
.mode = Mode::Symlink,
|
||||
// hello world with special chars from above (symlink target
|
||||
// can be anything)
|
||||
.hash = Hash::parseAny("63ddb340119baf8492d2da53af47e8c7cfcd5eb2", HashAlgorithm::SHA1),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
TEST_F(GitTest, tree_read) {
|
||||
readTest("tree.bin", [&](const auto & encoded) {
|
||||
StringSource in { encoded };
|
||||
NullFileSystemObjectSink out;
|
||||
Tree got;
|
||||
ASSERT_EQ(parseObjectType(in, mockXpSettings), ObjectType::Tree);
|
||||
parseTree(out, CanonPath::root, in, [&](auto & name, auto entry) {
|
||||
auto name2 = std::string{name.rel()};
|
||||
if (entry.mode == Mode::Directory)
|
||||
name2 += '/';
|
||||
got.insert_or_assign(name2, std::move(entry));
|
||||
}, mockXpSettings);
|
||||
|
||||
ASSERT_EQ(got, tree);
|
||||
});
|
||||
}
|
||||
|
||||
TEST_F(GitTest, tree_write) {
|
||||
writeTest("tree.bin", [&]() {
|
||||
StringSink s;
|
||||
dumpTree(tree, s, mockXpSettings);
|
||||
return s.s;
|
||||
});
|
||||
}
|
||||
|
||||
TEST_F(GitTest, both_roundrip) {
|
||||
using File = MemorySourceAccessor::File;
|
||||
|
||||
auto files = make_ref<MemorySourceAccessor>();
|
||||
files->root = File::Directory {
|
||||
.contents {
|
||||
{
|
||||
"foo",
|
||||
File::Regular {
|
||||
.contents = "hello\n\0\n\tworld!",
|
||||
},
|
||||
},
|
||||
{
|
||||
"bar",
|
||||
File::Directory {
|
||||
.contents = {
|
||||
{
|
||||
"baz",
|
||||
File::Regular {
|
||||
.executable = true,
|
||||
.contents = "good day,\n\0\n\tworld!",
|
||||
},
|
||||
},
|
||||
{
|
||||
"quux",
|
||||
File::Symlink {
|
||||
.target = "/over/there",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
std::map<Hash, std::string> cas;
|
||||
|
||||
std::function<DumpHook> dumpHook;
|
||||
dumpHook = [&](const SourcePath & path) {
|
||||
StringSink s;
|
||||
HashSink hashSink { HashAlgorithm::SHA1 };
|
||||
TeeSink s2 { s, hashSink };
|
||||
auto mode = dump(
|
||||
path, s2, dumpHook,
|
||||
defaultPathFilter, mockXpSettings);
|
||||
auto hash = hashSink.finish().first;
|
||||
cas.insert_or_assign(hash, std::move(s.s));
|
||||
return TreeEntry {
|
||||
.mode = mode,
|
||||
.hash = hash,
|
||||
};
|
||||
};
|
||||
|
||||
auto root = dumpHook({files});
|
||||
|
||||
auto files2 = make_ref<MemorySourceAccessor>();
|
||||
|
||||
MemorySink sinkFiles2 { *files2 };
|
||||
|
||||
std::function<void(const CanonPath, const Hash &, BlobMode)> mkSinkHook;
|
||||
mkSinkHook = [&](auto prefix, auto & hash, auto blobMode) {
|
||||
StringSource in { cas[hash] };
|
||||
parse(
|
||||
sinkFiles2, prefix, in, blobMode,
|
||||
[&](const CanonPath & name, const auto & entry) {
|
||||
mkSinkHook(
|
||||
prefix / name,
|
||||
entry.hash,
|
||||
// N.B. this cast would not be acceptable in real
|
||||
// code, because it would make an assert reachable,
|
||||
// but it should harmless in this test.
|
||||
static_cast<BlobMode>(entry.mode));
|
||||
},
|
||||
mockXpSettings);
|
||||
};
|
||||
|
||||
mkSinkHook(CanonPath::root, root.hash, BlobMode::Regular);
|
||||
|
||||
ASSERT_EQ(files->root, files2->root);
|
||||
}
|
||||
|
||||
TEST(GitLsRemote, parseSymrefLineWithReference) {
|
||||
auto line = "ref: refs/head/main HEAD";
|
||||
auto res = parseLsRemoteLine(line);
|
||||
ASSERT_TRUE(res.has_value());
|
||||
ASSERT_EQ(res->kind, LsRemoteRefLine::Kind::Symbolic);
|
||||
ASSERT_EQ(res->target, "refs/head/main");
|
||||
ASSERT_EQ(res->reference, "HEAD");
|
||||
}
|
||||
|
||||
TEST(GitLsRemote, parseSymrefLineWithNoReference) {
|
||||
auto line = "ref: refs/head/main";
|
||||
auto res = parseLsRemoteLine(line);
|
||||
ASSERT_TRUE(res.has_value());
|
||||
ASSERT_EQ(res->kind, LsRemoteRefLine::Kind::Symbolic);
|
||||
ASSERT_EQ(res->target, "refs/head/main");
|
||||
ASSERT_EQ(res->reference, std::nullopt);
|
||||
}
|
||||
|
||||
TEST(GitLsRemote, parseObjectRefLine) {
|
||||
auto line = "abc123 refs/head/main";
|
||||
auto res = parseLsRemoteLine(line);
|
||||
ASSERT_TRUE(res.has_value());
|
||||
ASSERT_EQ(res->kind, LsRemoteRefLine::Kind::Object);
|
||||
ASSERT_EQ(res->target, "abc123");
|
||||
ASSERT_EQ(res->reference, "refs/head/main");
|
||||
}
|
||||
|
||||
}
|
92
src/libutil-tests/hash.cc
Normal file
92
src/libutil-tests/hash.cc
Normal file
|
@ -0,0 +1,92 @@
|
|||
#include <regex>
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "hash.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* hashString
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(hashString, testKnownMD5Hashes1) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc1321
|
||||
auto s1 = "";
|
||||
auto hash = hashString(HashAlgorithm::MD5, s1);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:d41d8cd98f00b204e9800998ecf8427e");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownMD5Hashes2) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc1321
|
||||
auto s2 = "abc";
|
||||
auto hash = hashString(HashAlgorithm::MD5, s2);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true), "md5:900150983cd24fb0d6963f7d28e17f72");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA1Hashes1) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc3174
|
||||
auto s = "abc";
|
||||
auto hash = hashString(HashAlgorithm::SHA1, s);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:a9993e364706816aba3e25717850c26c9cd0d89d");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA1Hashes2) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc3174
|
||||
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
|
||||
auto hash = hashString(HashAlgorithm::SHA1, s);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),"sha1:84983e441c3bd26ebaae4aa1f95129e5e54670f1");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA256Hashes1) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abc";
|
||||
|
||||
auto hash = hashString(HashAlgorithm::SHA256, s);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
|
||||
"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA256Hashes2) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
|
||||
auto hash = hashString(HashAlgorithm::SHA256, s);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
|
||||
"sha256:248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownSHA512Hashes1) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abc";
|
||||
auto hash = hashString(HashAlgorithm::SHA512, s);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
|
||||
"sha512:ddaf35a193617abacc417349ae20413112e6fa4e89a9"
|
||||
"7ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd"
|
||||
"454d4423643ce80e2a9ac94fa54ca49f");
|
||||
}
|
||||
TEST(hashString, testKnownSHA512Hashes2) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu";
|
||||
|
||||
auto hash = hashString(HashAlgorithm::SHA512, s);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
|
||||
"sha512:8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa1"
|
||||
"7299aeadb6889018501d289e4900f7e4331b99dec4b5433a"
|
||||
"c7d329eeb6dd26545e96e55b874be909");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* parseHashFormat, parseHashFormatOpt, printHashFormat
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(hashFormat, testRoundTripPrintParse) {
|
||||
for (const HashFormat hashFormat: { HashFormat::Base64, HashFormat::Nix32, HashFormat::Base16, HashFormat::SRI}) {
|
||||
ASSERT_EQ(parseHashFormat(printHashFormat(hashFormat)), hashFormat);
|
||||
ASSERT_EQ(*parseHashFormatOpt(printHashFormat(hashFormat)), hashFormat);
|
||||
}
|
||||
}
|
||||
|
||||
TEST(hashFormat, testParseHashFormatOptException) {
|
||||
ASSERT_EQ(parseHashFormatOpt("sha0042"), std::nullopt);
|
||||
}
|
||||
}
|
66
src/libutil-tests/hilite.cc
Normal file
66
src/libutil-tests/hilite.cc
Normal file
|
@ -0,0 +1,66 @@
|
|||
#include "hilite.hh"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
/* ----------- tests for fmt.hh -------------------------------------------------*/
|
||||
|
||||
TEST(hiliteMatches, noHighlight) {
|
||||
ASSERT_STREQ(hiliteMatches("Hello, world!", std::vector<std::smatch>(), "(", ")").c_str(), "Hello, world!");
|
||||
}
|
||||
|
||||
TEST(hiliteMatches, simpleHighlight) {
|
||||
std::string str = "Hello, world!";
|
||||
std::regex re = std::regex("world");
|
||||
auto matches = std::vector(std::sregex_iterator(str.begin(), str.end(), re), std::sregex_iterator());
|
||||
ASSERT_STREQ(
|
||||
hiliteMatches(str, matches, "(", ")").c_str(),
|
||||
"Hello, (world)!"
|
||||
);
|
||||
}
|
||||
|
||||
TEST(hiliteMatches, multipleMatches) {
|
||||
std::string str = "Hello, world, world, world, world, world, world, Hello!";
|
||||
std::regex re = std::regex("world");
|
||||
auto matches = std::vector(std::sregex_iterator(str.begin(), str.end(), re), std::sregex_iterator());
|
||||
ASSERT_STREQ(
|
||||
hiliteMatches(str, matches, "(", ")").c_str(),
|
||||
"Hello, (world), (world), (world), (world), (world), (world), Hello!"
|
||||
);
|
||||
}
|
||||
|
||||
TEST(hiliteMatches, overlappingMatches) {
|
||||
std::string str = "world, Hello, world, Hello, world, Hello, world, Hello, world!";
|
||||
std::regex re = std::regex("Hello, world");
|
||||
std::regex re2 = std::regex("world, Hello");
|
||||
auto v = std::vector(std::sregex_iterator(str.begin(), str.end(), re), std::sregex_iterator());
|
||||
for(auto it = std::sregex_iterator(str.begin(), str.end(), re2); it != std::sregex_iterator(); ++it) {
|
||||
v.push_back(*it);
|
||||
}
|
||||
ASSERT_STREQ(
|
||||
hiliteMatches(str, v, "(", ")").c_str(),
|
||||
"(world, Hello, world, Hello, world, Hello, world, Hello, world)!"
|
||||
);
|
||||
}
|
||||
|
||||
TEST(hiliteMatches, complexOverlappingMatches) {
|
||||
std::string str = "legacyPackages.x86_64-linux.git-crypt";
|
||||
std::vector regexes = {
|
||||
std::regex("t-cry"),
|
||||
std::regex("ux\\.git-cry"),
|
||||
std::regex("git-c"),
|
||||
std::regex("pt"),
|
||||
};
|
||||
std::vector<std::smatch> matches;
|
||||
for(auto regex : regexes)
|
||||
{
|
||||
for(auto it = std::sregex_iterator(str.begin(), str.end(), regex); it != std::sregex_iterator(); ++it) {
|
||||
matches.push_back(*it);
|
||||
}
|
||||
}
|
||||
ASSERT_STREQ(
|
||||
hiliteMatches(str, matches, "(", ")").c_str(),
|
||||
"legacyPackages.x86_64-lin(ux.git-crypt)"
|
||||
);
|
||||
}
|
||||
}
|
190
src/libutil-tests/json-utils.cc
Normal file
190
src/libutil-tests/json-utils.cc
Normal file
|
@ -0,0 +1,190 @@
|
|||
#include <vector>
|
||||
#include <optional>
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "error.hh"
|
||||
#include "json-utils.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* Test `to_json` and `from_json` with `std::optional` types.
|
||||
* We are specifically interested in whether we can _nest_ optionals in STL
|
||||
* containers so we that we can leverage existing adl_serializer templates. */
|
||||
|
||||
TEST(to_json, optionalInt) {
|
||||
std::optional<int> val = std::make_optional(420);
|
||||
ASSERT_EQ(nlohmann::json(val), nlohmann::json(420));
|
||||
val = std::nullopt;
|
||||
ASSERT_EQ(nlohmann::json(val), nlohmann::json(nullptr));
|
||||
}
|
||||
|
||||
TEST(to_json, vectorOfOptionalInts) {
|
||||
std::vector<std::optional<int>> vals = {
|
||||
std::make_optional(420),
|
||||
std::nullopt,
|
||||
};
|
||||
ASSERT_EQ(nlohmann::json(vals), nlohmann::json::parse("[420,null]"));
|
||||
}
|
||||
|
||||
TEST(to_json, optionalVectorOfInts) {
|
||||
std::optional<std::vector<int>> val = std::make_optional(std::vector<int> {
|
||||
-420,
|
||||
420,
|
||||
});
|
||||
ASSERT_EQ(nlohmann::json(val), nlohmann::json::parse("[-420,420]"));
|
||||
val = std::nullopt;
|
||||
ASSERT_EQ(nlohmann::json(val), nlohmann::json(nullptr));
|
||||
}
|
||||
|
||||
TEST(from_json, optionalInt) {
|
||||
nlohmann::json json = 420;
|
||||
std::optional<int> val = json;
|
||||
ASSERT_TRUE(val.has_value());
|
||||
ASSERT_EQ(*val, 420);
|
||||
json = nullptr;
|
||||
json.get_to(val);
|
||||
ASSERT_FALSE(val.has_value());
|
||||
}
|
||||
|
||||
TEST(from_json, vectorOfOptionalInts) {
|
||||
nlohmann::json json = { 420, nullptr };
|
||||
std::vector<std::optional<int>> vals = json;
|
||||
ASSERT_EQ(vals.size(), 2);
|
||||
ASSERT_TRUE(vals.at(0).has_value());
|
||||
ASSERT_EQ(*vals.at(0), 420);
|
||||
ASSERT_FALSE(vals.at(1).has_value());
|
||||
}
|
||||
|
||||
TEST(valueAt, simpleObject) {
|
||||
auto simple = R"({ "hello": "world" })"_json;
|
||||
|
||||
ASSERT_EQ(valueAt(getObject(simple), "hello"), "world");
|
||||
|
||||
auto nested = R"({ "hello": { "world": "" } })"_json;
|
||||
|
||||
auto & nestedObject = valueAt(getObject(nested), "hello");
|
||||
|
||||
ASSERT_EQ(valueAt(nestedObject, "world"), "");
|
||||
}
|
||||
|
||||
TEST(valueAt, missingKey) {
|
||||
auto json = R"({ "hello": { "nested": "world" } })"_json;
|
||||
|
||||
auto & obj = getObject(json);
|
||||
|
||||
ASSERT_THROW(valueAt(obj, "foo"), Error);
|
||||
}
|
||||
|
||||
TEST(getObject, rightAssertions) {
|
||||
auto simple = R"({ "object": {} })"_json;
|
||||
|
||||
ASSERT_EQ(getObject(valueAt(getObject(simple), "object")), (nlohmann::json::object_t {}));
|
||||
|
||||
auto nested = R"({ "object": { "object": {} } })"_json;
|
||||
|
||||
auto & nestedObject = getObject(valueAt(getObject(nested), "object"));
|
||||
|
||||
ASSERT_EQ(nestedObject, getObject(nlohmann::json::parse(R"({ "object": {} })")));
|
||||
ASSERT_EQ(getObject(valueAt(getObject(nestedObject), "object")), (nlohmann::json::object_t {}));
|
||||
}
|
||||
|
||||
TEST(getObject, wrongAssertions) {
|
||||
auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "boolean": false })"_json;
|
||||
|
||||
auto & obj = getObject(json);
|
||||
|
||||
ASSERT_THROW(getObject(valueAt(obj, "array")), Error);
|
||||
ASSERT_THROW(getObject(valueAt(obj, "string")), Error);
|
||||
ASSERT_THROW(getObject(valueAt(obj, "int")), Error);
|
||||
ASSERT_THROW(getObject(valueAt(obj, "boolean")), Error);
|
||||
}
|
||||
|
||||
TEST(getArray, rightAssertions) {
|
||||
auto simple = R"({ "array": [] })"_json;
|
||||
|
||||
ASSERT_EQ(getArray(valueAt(getObject(simple), "array")), (nlohmann::json::array_t {}));
|
||||
}
|
||||
|
||||
TEST(getArray, wrongAssertions) {
|
||||
auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "boolean": false })"_json;
|
||||
|
||||
ASSERT_THROW(getArray(valueAt(json, "object")), Error);
|
||||
ASSERT_THROW(getArray(valueAt(json, "string")), Error);
|
||||
ASSERT_THROW(getArray(valueAt(json, "int")), Error);
|
||||
ASSERT_THROW(getArray(valueAt(json, "boolean")), Error);
|
||||
}
|
||||
|
||||
TEST(getString, rightAssertions) {
|
||||
auto simple = R"({ "string": "" })"_json;
|
||||
|
||||
ASSERT_EQ(getString(valueAt(getObject(simple), "string")), "");
|
||||
}
|
||||
|
||||
TEST(getString, wrongAssertions) {
|
||||
auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "boolean": false })"_json;
|
||||
|
||||
ASSERT_THROW(getString(valueAt(json, "object")), Error);
|
||||
ASSERT_THROW(getString(valueAt(json, "array")), Error);
|
||||
ASSERT_THROW(getString(valueAt(json, "int")), Error);
|
||||
ASSERT_THROW(getString(valueAt(json, "boolean")), Error);
|
||||
}
|
||||
|
||||
TEST(getInteger, rightAssertions) {
|
||||
auto simple = R"({ "int": 0 })"_json;
|
||||
|
||||
ASSERT_EQ(getInteger(valueAt(getObject(simple), "int")), 0);
|
||||
}
|
||||
|
||||
TEST(getInteger, wrongAssertions) {
|
||||
auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "boolean": false })"_json;
|
||||
|
||||
ASSERT_THROW(getInteger(valueAt(json, "object")), Error);
|
||||
ASSERT_THROW(getInteger(valueAt(json, "array")), Error);
|
||||
ASSERT_THROW(getInteger(valueAt(json, "string")), Error);
|
||||
ASSERT_THROW(getInteger(valueAt(json, "boolean")), Error);
|
||||
}
|
||||
|
||||
TEST(getBoolean, rightAssertions) {
|
||||
auto simple = R"({ "boolean": false })"_json;
|
||||
|
||||
ASSERT_EQ(getBoolean(valueAt(getObject(simple), "boolean")), false);
|
||||
}
|
||||
|
||||
TEST(getBoolean, wrongAssertions) {
|
||||
auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "boolean": false })"_json;
|
||||
|
||||
ASSERT_THROW(getBoolean(valueAt(json, "object")), Error);
|
||||
ASSERT_THROW(getBoolean(valueAt(json, "array")), Error);
|
||||
ASSERT_THROW(getBoolean(valueAt(json, "string")), Error);
|
||||
ASSERT_THROW(getBoolean(valueAt(json, "int")), Error);
|
||||
}
|
||||
|
||||
TEST(optionalValueAt, existing) {
|
||||
auto json = R"({ "string": "ssh-rsa" })"_json;
|
||||
|
||||
ASSERT_EQ(optionalValueAt(json, "string"), std::optional { "ssh-rsa" });
|
||||
}
|
||||
|
||||
TEST(optionalValueAt, empty) {
|
||||
auto json = R"({})"_json;
|
||||
|
||||
ASSERT_EQ(optionalValueAt(json, "string"), std::nullopt);
|
||||
}
|
||||
|
||||
TEST(getNullable, null) {
|
||||
auto json = R"(null)"_json;
|
||||
|
||||
ASSERT_EQ(getNullable(json), nullptr);
|
||||
}
|
||||
|
||||
TEST(getNullable, empty) {
|
||||
auto json = R"({})"_json;
|
||||
|
||||
auto * p = getNullable(json);
|
||||
|
||||
ASSERT_NE(p, nullptr);
|
||||
ASSERT_EQ(*p, R"({})"_json);
|
||||
}
|
||||
|
||||
} /* namespace nix */
|
37
src/libutil-tests/local.mk
Normal file
37
src/libutil-tests/local.mk
Normal file
|
@ -0,0 +1,37 @@
|
|||
check: libutil-tests_RUN
|
||||
|
||||
programs += libutil-tests
|
||||
|
||||
libutil-tests_NAME = libnixutil-tests
|
||||
|
||||
libutil-tests_ENV := _NIX_TEST_UNIT_DATA=$(d)/data GTEST_OUTPUT=xml:$$testresults/libutil-tests.xml
|
||||
|
||||
libutil-tests_DIR := $(d)
|
||||
|
||||
ifeq ($(INSTALL_UNIT_TESTS), yes)
|
||||
libutil-tests_INSTALL_DIR := $(checkbindir)
|
||||
else
|
||||
libutil-tests_INSTALL_DIR :=
|
||||
endif
|
||||
|
||||
libutil-tests_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libutil-tests_EXTRA_INCLUDES = \
|
||||
-I src/libutil-test-support \
|
||||
$(INCLUDE_libutil) \
|
||||
$(INCLUDE_libutilc)
|
||||
|
||||
libutil-tests_CXXFLAGS += $(libutil-tests_EXTRA_INCLUDES)
|
||||
|
||||
libutil-tests_LIBS = libutil-test-support libutil libutilc
|
||||
|
||||
libutil-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS)
|
||||
|
||||
ifdef HOST_WINDOWS
|
||||
# Increase the default reserved stack size to 65 MB so Nix doesn't run out of space
|
||||
libutil-tests_LDFLAGS += -Wl,--stack,$(shell echo $$((65 * 1024 * 1024)))
|
||||
endif
|
||||
|
||||
check: $(d)/data/git/check-data.sh.test
|
||||
|
||||
$(eval $(call run-test,$(d)/data/git/check-data.sh))
|
369
src/libutil-tests/logging.cc
Normal file
369
src/libutil-tests/logging.cc
Normal file
|
@ -0,0 +1,369 @@
|
|||
#if 0
|
||||
|
||||
#include "logging.hh"
|
||||
#include "nixexpr.hh"
|
||||
#include <fstream>
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* logEI
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
const char *test_file =
|
||||
"previous line of code\n"
|
||||
"this is the problem line of code\n"
|
||||
"next line of code\n";
|
||||
const char *one_liner =
|
||||
"this is the other problem line of code";
|
||||
|
||||
TEST(logEI, catpuresBasicProperties) {
|
||||
|
||||
MakeError(TestError, Error);
|
||||
ErrorInfo::programName = std::optional("error-unit-test");
|
||||
|
||||
try {
|
||||
throw TestError("an error for testing purposes");
|
||||
} catch (Error &e) {
|
||||
testing::internal::CaptureStderr();
|
||||
logger->logEI(e.info());
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
|
||||
ASSERT_STREQ(str.c_str(),"\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\nan error for testing purposes\n");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(logEI, jsonOutput) {
|
||||
SymbolTable testTable;
|
||||
auto problem_file = testTable.create("random.nix");
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
makeJSONLogger(*logger)->logEI({
|
||||
.name = "error name",
|
||||
.msg = HintFmt("this hint has %1% templated %2%!!",
|
||||
"yellow",
|
||||
"values"),
|
||||
.errPos = Pos(foFile, problem_file, 02, 13)
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "@nix {\"action\":\"msg\",\"column\":13,\"file\":\"random.nix\",\"level\":0,\"line\":2,\"msg\":\"\\u001b[31;1merror:\\u001b[0m\\u001b[34;1m --- error name --- error-unit-test\\u001b[0m\\n\\u001b[34;1mat: \\u001b[33;1m(2:13)\\u001b[34;1m in file: \\u001b[0mrandom.nix\\n\\nerror without any code lines.\\n\\nthis hint has \\u001b[33;1myellow\\u001b[0m templated \\u001b[33;1mvalues\\u001b[0m!!\",\"raw_msg\":\"this hint has \\u001b[33;1myellow\\u001b[0m templated \\u001b[33;1mvalues\\u001b[0m!!\"}\n");
|
||||
}
|
||||
|
||||
TEST(logEI, appendingHintsToPreviousError) {
|
||||
|
||||
MakeError(TestError, Error);
|
||||
ErrorInfo::programName = std::optional("error-unit-test");
|
||||
|
||||
try {
|
||||
auto e = Error("initial error");
|
||||
throw TestError(e.info());
|
||||
} catch (Error &e) {
|
||||
ErrorInfo ei = e.info();
|
||||
ei.msg = HintFmt("%s; subsequent error message.", Uncolored(e.info().msg.str()));
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
logger->logEI(ei);
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- TestError --- error-unit-test\x1B[0m\ninitial error; subsequent error message.\n");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
TEST(logEI, picksUpSystemErrorExitCode) {
|
||||
|
||||
MakeError(TestError, Error);
|
||||
ErrorInfo::programName = std::optional("error-unit-test");
|
||||
|
||||
try {
|
||||
auto x = readFile(-1);
|
||||
}
|
||||
catch (SystemError &e) {
|
||||
testing::internal::CaptureStderr();
|
||||
logError(e.info());
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- SystemError --- error-unit-test\x1B[0m\nstatting file: \x1B[33;1mBad file descriptor\x1B[0m\n");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(logEI, loggingErrorOnInfoLevel) {
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logger->logEI({ .level = lvlInfo,
|
||||
.name = "Info name",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[32;1minfo:\x1B[0m\x1B[34;1m --- Info name --- error-unit-test\x1B[0m\nInfo description\n");
|
||||
}
|
||||
|
||||
TEST(logEI, loggingErrorOnTalkativeLevel) {
|
||||
verbosity = lvlTalkative;
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logger->logEI({ .level = lvlTalkative,
|
||||
.name = "Talkative name",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[32;1mtalk:\x1B[0m\x1B[34;1m --- Talkative name --- error-unit-test\x1B[0m\nTalkative description\n");
|
||||
}
|
||||
|
||||
TEST(logEI, loggingErrorOnChattyLevel) {
|
||||
verbosity = lvlChatty;
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logger->logEI({ .level = lvlChatty,
|
||||
.name = "Chatty name",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[32;1mchat:\x1B[0m\x1B[34;1m --- Chatty name --- error-unit-test\x1B[0m\nTalkative description\n");
|
||||
}
|
||||
|
||||
TEST(logEI, loggingErrorOnDebugLevel) {
|
||||
verbosity = lvlDebug;
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logger->logEI({ .level = lvlDebug,
|
||||
.name = "Debug name",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[33;1mdebug:\x1B[0m\x1B[34;1m --- Debug name --- error-unit-test\x1B[0m\nDebug description\n");
|
||||
}
|
||||
|
||||
TEST(logEI, loggingErrorOnVomitLevel) {
|
||||
verbosity = lvlVomit;
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logger->logEI({ .level = lvlVomit,
|
||||
.name = "Vomit name",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[32;1mvomit:\x1B[0m\x1B[34;1m --- Vomit name --- error-unit-test\x1B[0m\nVomit description\n");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* logError
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(logError, logErrorWithoutHintOrCode) {
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logError({
|
||||
.name = "name",
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nerror description\n");
|
||||
}
|
||||
|
||||
TEST(logError, logErrorWithPreviousAndNextLinesOfCode) {
|
||||
SymbolTable testTable;
|
||||
auto problem_file = testTable.create(test_file);
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logError({
|
||||
.name = "error name",
|
||||
.msg = HintFmt("this hint has %1% templated %2%!!",
|
||||
"yellow",
|
||||
"values"),
|
||||
.errPos = Pos(foString, problem_file, 02, 13),
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\nerror with code lines\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
|
||||
}
|
||||
|
||||
TEST(logError, logErrorWithInvalidFile) {
|
||||
SymbolTable testTable;
|
||||
auto problem_file = testTable.create("invalid filename");
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logError({
|
||||
.name = "error name",
|
||||
.msg = HintFmt("this hint has %1% templated %2%!!",
|
||||
"yellow",
|
||||
"values"),
|
||||
.errPos = Pos(foFile, problem_file, 02, 13)
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m in file: \x1B[0minvalid filename\n\nerror without any code lines.\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
|
||||
}
|
||||
|
||||
TEST(logError, logErrorWithOnlyHintAndName) {
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logError({
|
||||
.name = "error name",
|
||||
.msg = HintFmt("hint %1%", "only"),
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- error name --- error-unit-test\x1B[0m\nhint \x1B[33;1monly\x1B[0m\n");
|
||||
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* logWarning
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(logWarning, logWarningWithNameDescriptionAndHint) {
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logWarning({
|
||||
.name = "name",
|
||||
.msg = HintFmt("there was a %1%", "warning"),
|
||||
});
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- name --- error-unit-test\x1B[0m\nwarning description\n\nthere was a \x1B[33;1mwarning\x1B[0m\n");
|
||||
}
|
||||
|
||||
TEST(logWarning, logWarningWithFileLineNumAndCode) {
|
||||
|
||||
SymbolTable testTable;
|
||||
auto problem_file = testTable.create(test_file);
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
logWarning({
|
||||
.name = "warning name",
|
||||
.msg = HintFmt("this hint has %1% templated %2%!!",
|
||||
"yellow",
|
||||
"values"),
|
||||
.errPos = Pos(foStdin, problem_file, 2, 13),
|
||||
});
|
||||
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[33;1mwarning:\x1B[0m\x1B[34;1m --- warning name --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from stdin\x1B[0m\n\nwarning description\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nthis hint has \x1B[33;1myellow\x1B[0m templated \x1B[33;1mvalues\x1B[0m!!\n");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* traces
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(addTrace, showTracesWithShowTrace) {
|
||||
SymbolTable testTable;
|
||||
auto problem_file = testTable.create(test_file);
|
||||
auto oneliner_file = testTable.create(one_liner);
|
||||
auto invalidfilename = testTable.create("invalid filename");
|
||||
|
||||
auto e = AssertionError(ErrorInfo {
|
||||
.name = "wat",
|
||||
.msg = HintFmt("it has been %1% days since our last error", "zero"),
|
||||
.errPos = Pos(foString, problem_file, 2, 13),
|
||||
});
|
||||
|
||||
e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
|
||||
e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
|
||||
e.addTrace(Pos(foFile, invalidfilename, 100, 1), "missing %s", "nix file");
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
loggerSettings.showTrace.assign(true);
|
||||
|
||||
logError(e.info());
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\nshow-traces\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n\x1B[34;1m---- show-trace ----\x1B[0m\n\x1B[34;1mtrace: \x1B[0mwhile trying to compute \x1B[33;1m42\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(1:19)\x1B[34;1m from stdin\x1B[0m\n\n 1| this is the other problem line of code\n | \x1B[31;1m^\x1B[0m\n\n\x1B[34;1mtrace: \x1B[0mwhile doing something without a \x1B[33;1mpos\x1B[0m\n\x1B[34;1mtrace: \x1B[0mmissing \x1B[33;1mnix file\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(100:1)\x1B[34;1m in file: \x1B[0minvalid filename\n");
|
||||
}
|
||||
|
||||
TEST(addTrace, hideTracesWithoutShowTrace) {
|
||||
SymbolTable testTable;
|
||||
auto problem_file = testTable.create(test_file);
|
||||
auto oneliner_file = testTable.create(one_liner);
|
||||
auto invalidfilename = testTable.create("invalid filename");
|
||||
|
||||
auto e = AssertionError(ErrorInfo {
|
||||
.name = "wat",
|
||||
.msg = HintFmt("it has been %1% days since our last error", "zero"),
|
||||
.errPos = Pos(foString, problem_file, 2, 13),
|
||||
});
|
||||
|
||||
e.addTrace(Pos(foStdin, oneliner_file, 1, 19), "while trying to compute %1%", 42);
|
||||
e.addTrace(std::nullopt, "while doing something without a %1%", "pos");
|
||||
e.addTrace(Pos(foFile, invalidfilename, 100, 1), "missing %s", "nix file");
|
||||
|
||||
testing::internal::CaptureStderr();
|
||||
|
||||
loggerSettings.showTrace.assign(false);
|
||||
|
||||
logError(e.info());
|
||||
|
||||
auto str = testing::internal::GetCapturedStderr();
|
||||
ASSERT_STREQ(str.c_str(), "\x1B[31;1merror:\x1B[0m\x1B[34;1m --- AssertionError --- error-unit-test\x1B[0m\n\x1B[34;1mat: \x1B[33;1m(2:13)\x1B[34;1m from string\x1B[0m\n\nhide traces\n\n 1| previous line of code\n 2| this is the problem line of code\n | \x1B[31;1m^\x1B[0m\n 3| next line of code\n\nit has been \x1B[33;1mzero\x1B[0m days since our last error\n");
|
||||
}
|
||||
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* HintFmt
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(HintFmt, percentStringWithoutArgs) {
|
||||
|
||||
const char *teststr = "this is 100%s correct!";
|
||||
|
||||
ASSERT_STREQ(
|
||||
HintFmt(teststr).str().c_str(),
|
||||
teststr);
|
||||
|
||||
}
|
||||
|
||||
TEST(HintFmt, fmtToHintfmt) {
|
||||
|
||||
ASSERT_STREQ(
|
||||
HintFmt(fmt("the color of this this text is %1%", "not yellow")).str().c_str(),
|
||||
"the color of this this text is not yellow");
|
||||
|
||||
}
|
||||
|
||||
TEST(HintFmt, tooFewArguments) {
|
||||
|
||||
ASSERT_STREQ(
|
||||
HintFmt("only one arg %1% %2%", "fulfilled").str().c_str(),
|
||||
"only one arg " ANSI_WARNING "fulfilled" ANSI_NORMAL " ");
|
||||
|
||||
}
|
||||
|
||||
TEST(HintFmt, tooManyArguments) {
|
||||
|
||||
ASSERT_STREQ(
|
||||
HintFmt("what about this %1% %2%", "%3%", "one", "two").str().c_str(),
|
||||
"what about this " ANSI_WARNING "%3%" ANSI_NORMAL " " ANSI_YELLOW "one" ANSI_NORMAL);
|
||||
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* ErrPos
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(errpos, invalidPos) {
|
||||
|
||||
// contains an invalid symbol, which we should not dereference!
|
||||
Pos invalid;
|
||||
|
||||
// constructing without access violation.
|
||||
ErrPos ep(invalid);
|
||||
|
||||
// assignment without access violation.
|
||||
ep = invalid;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif
|
130
src/libutil-tests/lru-cache.cc
Normal file
130
src/libutil-tests/lru-cache.cc
Normal file
|
@ -0,0 +1,130 @@
|
|||
#include "lru-cache.hh"
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* size
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(LRUCache, sizeOfEmptyCacheIsZero) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
ASSERT_EQ(c.size(), 0);
|
||||
}
|
||||
|
||||
TEST(LRUCache, sizeOfSingleElementCacheIsOne) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
c.upsert("foo", "bar");
|
||||
ASSERT_EQ(c.size(), 1);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* upsert / get
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(LRUCache, getFromEmptyCache) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
auto val = c.get("x");
|
||||
ASSERT_EQ(val.has_value(), false);
|
||||
}
|
||||
|
||||
TEST(LRUCache, getExistingValue) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
c.upsert("foo", "bar");
|
||||
auto val = c.get("foo");
|
||||
ASSERT_EQ(val, "bar");
|
||||
}
|
||||
|
||||
TEST(LRUCache, getNonExistingValueFromNonEmptyCache) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
c.upsert("foo", "bar");
|
||||
auto val = c.get("another");
|
||||
ASSERT_EQ(val.has_value(), false);
|
||||
}
|
||||
|
||||
TEST(LRUCache, upsertOnZeroCapacityCache) {
|
||||
LRUCache<std::string, std::string> c(0);
|
||||
c.upsert("foo", "bar");
|
||||
auto val = c.get("foo");
|
||||
ASSERT_EQ(val.has_value(), false);
|
||||
}
|
||||
|
||||
TEST(LRUCache, updateExistingValue) {
|
||||
LRUCache<std::string, std::string> c(1);
|
||||
c.upsert("foo", "bar");
|
||||
|
||||
auto val = c.get("foo");
|
||||
ASSERT_EQ(val.value_or("error"), "bar");
|
||||
ASSERT_EQ(c.size(), 1);
|
||||
|
||||
c.upsert("foo", "changed");
|
||||
val = c.get("foo");
|
||||
ASSERT_EQ(val.value_or("error"), "changed");
|
||||
ASSERT_EQ(c.size(), 1);
|
||||
}
|
||||
|
||||
TEST(LRUCache, overwriteOldestWhenCapacityIsReached) {
|
||||
LRUCache<std::string, std::string> c(3);
|
||||
c.upsert("one", "eins");
|
||||
c.upsert("two", "zwei");
|
||||
c.upsert("three", "drei");
|
||||
|
||||
ASSERT_EQ(c.size(), 3);
|
||||
ASSERT_EQ(c.get("one").value_or("error"), "eins");
|
||||
|
||||
// exceed capacity
|
||||
c.upsert("another", "whatever");
|
||||
|
||||
ASSERT_EQ(c.size(), 3);
|
||||
// Retrieving "one" makes it the most recent element thus
|
||||
// two will be the oldest one and thus replaced.
|
||||
ASSERT_EQ(c.get("two").has_value(), false);
|
||||
ASSERT_EQ(c.get("another").value(), "whatever");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* clear
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(LRUCache, clearEmptyCache) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
c.clear();
|
||||
ASSERT_EQ(c.size(), 0);
|
||||
}
|
||||
|
||||
TEST(LRUCache, clearNonEmptyCache) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
c.upsert("one", "eins");
|
||||
c.upsert("two", "zwei");
|
||||
c.upsert("three", "drei");
|
||||
ASSERT_EQ(c.size(), 3);
|
||||
c.clear();
|
||||
ASSERT_EQ(c.size(), 0);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* erase
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(LRUCache, eraseFromEmptyCache) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
ASSERT_EQ(c.erase("foo"), false);
|
||||
ASSERT_EQ(c.size(), 0);
|
||||
}
|
||||
|
||||
TEST(LRUCache, eraseMissingFromNonEmptyCache) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
c.upsert("one", "eins");
|
||||
ASSERT_EQ(c.erase("foo"), false);
|
||||
ASSERT_EQ(c.size(), 1);
|
||||
ASSERT_EQ(c.get("one").value_or("error"), "eins");
|
||||
}
|
||||
|
||||
TEST(LRUCache, eraseFromNonEmptyCache) {
|
||||
LRUCache<std::string, std::string> c(10);
|
||||
c.upsert("one", "eins");
|
||||
ASSERT_EQ(c.erase("one"), true);
|
||||
ASSERT_EQ(c.size(), 0);
|
||||
ASSERT_EQ(c.get("one").value_or("empty"), "empty");
|
||||
}
|
||||
}
|
100
src/libutil-tests/meson.build
Normal file
100
src/libutil-tests/meson.build
Normal file
|
@ -0,0 +1,100 @@
|
|||
project('nix-util-tests', 'cpp',
|
||||
version : files('.version'),
|
||||
default_options : [
|
||||
'cpp_std=c++2a',
|
||||
# TODO(Qyriad): increase the warning level
|
||||
'warning_level=1',
|
||||
'debug=true',
|
||||
'optimization=2',
|
||||
'errorlogs=true', # Please print logs for tests that fail
|
||||
],
|
||||
meson_version : '>= 1.1',
|
||||
license : 'LGPL-2.1-or-later',
|
||||
)
|
||||
|
||||
cxx = meson.get_compiler('cpp')
|
||||
|
||||
subdir('build-utils-meson/deps-lists')
|
||||
|
||||
deps_private_maybe_subproject = [
|
||||
dependency('nix-util'),
|
||||
dependency('nix-util-c'),
|
||||
dependency('nix-util-test-support'),
|
||||
]
|
||||
deps_public_maybe_subproject = [
|
||||
]
|
||||
subdir('build-utils-meson/subprojects')
|
||||
|
||||
subdir('build-utils-meson/threads')
|
||||
|
||||
subdir('build-utils-meson/export-all-symbols')
|
||||
|
||||
rapidcheck = dependency('rapidcheck')
|
||||
deps_private += rapidcheck
|
||||
|
||||
gtest = dependency('gtest', main : true)
|
||||
deps_private += gtest
|
||||
|
||||
add_project_arguments(
|
||||
# TODO(Qyriad): Yes this is how the autoconf+Make system did it.
|
||||
# It would be nice for our headers to be idempotent instead.
|
||||
'-include', 'config-util.hh',
|
||||
'-include', 'config-util.h',
|
||||
language : 'cpp',
|
||||
)
|
||||
|
||||
subdir('build-utils-meson/diagnostics')
|
||||
|
||||
sources = files(
|
||||
'args.cc',
|
||||
'canon-path.cc',
|
||||
'checked-arithmetic.cc',
|
||||
'chunked-vector.cc',
|
||||
'closure.cc',
|
||||
'compression.cc',
|
||||
'config.cc',
|
||||
'executable-path.cc',
|
||||
'file-content-address.cc',
|
||||
'file-system.cc',
|
||||
'git.cc',
|
||||
'hash.cc',
|
||||
'hilite.cc',
|
||||
'json-utils.cc',
|
||||
'logging.cc',
|
||||
'lru-cache.cc',
|
||||
'nix_api_util.cc',
|
||||
'pool.cc',
|
||||
'position.cc',
|
||||
'processes.cc',
|
||||
'references.cc',
|
||||
'spawn.cc',
|
||||
'strings.cc',
|
||||
'suggestions.cc',
|
||||
'terminal.cc',
|
||||
'url.cc',
|
||||
'util.cc',
|
||||
'xml-writer.cc',
|
||||
)
|
||||
|
||||
include_dirs = [include_directories('.')]
|
||||
|
||||
|
||||
this_exe = executable(
|
||||
meson.project_name(),
|
||||
sources,
|
||||
dependencies : deps_private_subproject + deps_private + deps_other,
|
||||
include_directories : include_dirs,
|
||||
# TODO: -lrapidcheck, see ../libutil-support/build.meson
|
||||
link_args: linker_export_flags + ['-lrapidcheck'],
|
||||
# get main from gtest
|
||||
install : true,
|
||||
)
|
||||
|
||||
test(
|
||||
meson.project_name(),
|
||||
this_exe,
|
||||
env : {
|
||||
'_NIX_TEST_UNIT_DATA': meson.current_source_dir() / 'data',
|
||||
},
|
||||
protocol : 'gtest',
|
||||
)
|
144
src/libutil-tests/nix_api_util.cc
Normal file
144
src/libutil-tests/nix_api_util.cc
Normal file
|
@ -0,0 +1,144 @@
|
|||
#include "config-global.hh"
|
||||
#include "args.hh"
|
||||
#include "nix_api_util.h"
|
||||
#include "nix_api_util_internal.h"
|
||||
#include "tests/nix_api_util.hh"
|
||||
#include "tests/string_callback.hh"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nixC {
|
||||
|
||||
TEST_F(nix_api_util_context, nix_context_error)
|
||||
{
|
||||
std::string err_msg_ref;
|
||||
try {
|
||||
throw nix::Error("testing error");
|
||||
} catch (nix::Error & e) {
|
||||
err_msg_ref = e.what();
|
||||
nix_context_error(ctx);
|
||||
}
|
||||
ASSERT_EQ(ctx->last_err_code, NIX_ERR_NIX_ERROR);
|
||||
ASSERT_EQ(ctx->name, "nix::Error");
|
||||
ASSERT_EQ(*ctx->last_err, err_msg_ref);
|
||||
ASSERT_EQ(ctx->info->msg.str(), "testing error");
|
||||
|
||||
try {
|
||||
throw std::runtime_error("testing exception");
|
||||
} catch (std::exception & e) {
|
||||
err_msg_ref = e.what();
|
||||
nix_context_error(ctx);
|
||||
}
|
||||
ASSERT_EQ(ctx->last_err_code, NIX_ERR_UNKNOWN);
|
||||
ASSERT_EQ(*ctx->last_err, err_msg_ref);
|
||||
|
||||
nix_clear_err(ctx);
|
||||
ASSERT_EQ(ctx->last_err_code, NIX_OK);
|
||||
}
|
||||
|
||||
TEST_F(nix_api_util_context, nix_set_err_msg)
|
||||
{
|
||||
ASSERT_EQ(ctx->last_err_code, NIX_OK);
|
||||
nix_set_err_msg(ctx, NIX_ERR_UNKNOWN, "unknown test error");
|
||||
ASSERT_EQ(ctx->last_err_code, NIX_ERR_UNKNOWN);
|
||||
ASSERT_EQ(*ctx->last_err, "unknown test error");
|
||||
}
|
||||
|
||||
TEST(nix_api_util, nix_version_get)
|
||||
{
|
||||
ASSERT_EQ(std::string(nix_version_get()), PACKAGE_VERSION);
|
||||
}
|
||||
|
||||
struct MySettings : nix::Config
|
||||
{
|
||||
nix::Setting<std::string> settingSet{this, "empty", "setting-name", "Description"};
|
||||
};
|
||||
|
||||
MySettings mySettings;
|
||||
static nix::GlobalConfig::Register rs(&mySettings);
|
||||
|
||||
TEST_F(nix_api_util_context, nix_setting_get)
|
||||
{
|
||||
ASSERT_EQ(ctx->last_err_code, NIX_OK);
|
||||
std::string setting_value;
|
||||
nix_err result = nix_setting_get(ctx, "invalid-key", OBSERVE_STRING(setting_value));
|
||||
ASSERT_EQ(result, NIX_ERR_KEY);
|
||||
|
||||
result = nix_setting_get(ctx, "setting-name", OBSERVE_STRING(setting_value));
|
||||
ASSERT_EQ(result, NIX_OK);
|
||||
ASSERT_STREQ("empty", setting_value.c_str());
|
||||
}
|
||||
|
||||
TEST_F(nix_api_util_context, nix_setting_set)
|
||||
{
|
||||
nix_err result = nix_setting_set(ctx, "invalid-key", "new-value");
|
||||
ASSERT_EQ(result, NIX_ERR_KEY);
|
||||
|
||||
result = nix_setting_set(ctx, "setting-name", "new-value");
|
||||
ASSERT_EQ(result, NIX_OK);
|
||||
|
||||
std::string setting_value;
|
||||
result = nix_setting_get(ctx, "setting-name", OBSERVE_STRING(setting_value));
|
||||
ASSERT_EQ(result, NIX_OK);
|
||||
ASSERT_STREQ("new-value", setting_value.c_str());
|
||||
}
|
||||
|
||||
TEST_F(nix_api_util_context, nix_err_msg)
|
||||
{
|
||||
// no error
|
||||
EXPECT_THROW(nix_err_msg(nullptr, ctx, NULL), nix::Error);
|
||||
|
||||
// set error
|
||||
nix_set_err_msg(ctx, NIX_ERR_UNKNOWN, "unknown test error");
|
||||
|
||||
// basic usage
|
||||
std::string err_msg = nix_err_msg(NULL, ctx, NULL);
|
||||
ASSERT_EQ(err_msg, "unknown test error");
|
||||
|
||||
// advanced usage
|
||||
unsigned int sz;
|
||||
err_msg = nix_err_msg(nix_c_context_create(), ctx, &sz);
|
||||
ASSERT_EQ(sz, err_msg.size());
|
||||
}
|
||||
|
||||
TEST_F(nix_api_util_context, nix_err_info_msg)
|
||||
{
|
||||
std::string err_info;
|
||||
|
||||
// no error
|
||||
EXPECT_THROW(nix_err_info_msg(NULL, ctx, OBSERVE_STRING(err_info)), nix::Error);
|
||||
|
||||
try {
|
||||
throw nix::Error("testing error");
|
||||
} catch (...) {
|
||||
nix_context_error(ctx);
|
||||
}
|
||||
nix_err_info_msg(nix_c_context_create(), ctx, OBSERVE_STRING(err_info));
|
||||
ASSERT_STREQ("testing error", err_info.c_str());
|
||||
}
|
||||
|
||||
TEST_F(nix_api_util_context, nix_err_name)
|
||||
{
|
||||
std::string err_name;
|
||||
|
||||
// no error
|
||||
EXPECT_THROW(nix_err_name(NULL, ctx, OBSERVE_STRING(err_name)), nix::Error);
|
||||
|
||||
std::string err_msg_ref;
|
||||
try {
|
||||
throw nix::Error("testing error");
|
||||
} catch (...) {
|
||||
nix_context_error(ctx);
|
||||
}
|
||||
nix_err_name(nix_c_context_create(), ctx, OBSERVE_STRING(err_name));
|
||||
ASSERT_EQ(std::string(err_name), "nix::Error");
|
||||
}
|
||||
|
||||
TEST_F(nix_api_util_context, nix_err_code)
|
||||
{
|
||||
ASSERT_EQ(nix_err_code(ctx), NIX_OK);
|
||||
nix_set_err_msg(ctx, NIX_ERR_UNKNOWN, "unknown test error");
|
||||
ASSERT_EQ(nix_err_code(ctx), NIX_ERR_UNKNOWN);
|
||||
}
|
||||
|
||||
}
|
82
src/libutil-tests/package.nix
Normal file
82
src/libutil-tests/package.nix
Normal file
|
@ -0,0 +1,82 @@
|
|||
{ lib
|
||||
, buildPackages
|
||||
, stdenv
|
||||
, mkMesonExecutable
|
||||
|
||||
, nix-util
|
||||
, nix-util-c
|
||||
, nix-util-test-support
|
||||
|
||||
, rapidcheck
|
||||
, gtest
|
||||
, runCommand
|
||||
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (lib) fileset;
|
||||
in
|
||||
|
||||
mkMesonExecutable (finalAttrs: {
|
||||
pname = "nix-util-tests";
|
||||
inherit version;
|
||||
|
||||
workDir = ./.;
|
||||
fileset = fileset.unions [
|
||||
../../build-utils-meson
|
||||
./build-utils-meson
|
||||
../../.version
|
||||
./.version
|
||||
./meson.build
|
||||
# ./meson.options
|
||||
(fileset.fileFilter (file: file.hasExt "cc") ./.)
|
||||
(fileset.fileFilter (file: file.hasExt "hh") ./.)
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
nix-util
|
||||
nix-util-c
|
||||
nix-util-test-support
|
||||
rapidcheck
|
||||
gtest
|
||||
];
|
||||
|
||||
preConfigure =
|
||||
# "Inline" .version so it's not a symlink, and includes the suffix.
|
||||
# Do the meson utils, without modification.
|
||||
''
|
||||
chmod u+w ./.version
|
||||
echo ${version} > ../../.version
|
||||
'';
|
||||
|
||||
mesonFlags = [
|
||||
];
|
||||
|
||||
env = lib.optionalAttrs (stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) {
|
||||
LDFLAGS = "-fuse-ld=gold";
|
||||
};
|
||||
|
||||
passthru = {
|
||||
tests = {
|
||||
run = runCommand "${finalAttrs.pname}-run" {
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
} (lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
'' + ''
|
||||
export _NIX_TEST_UNIT_DATA=${./data}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
'');
|
||||
};
|
||||
};
|
||||
|
||||
meta = {
|
||||
platforms = lib.platforms.unix ++ lib.platforms.windows;
|
||||
mainProgram = finalAttrs.pname + stdenv.hostPlatform.extensions.executable;
|
||||
};
|
||||
|
||||
})
|
127
src/libutil-tests/pool.cc
Normal file
127
src/libutil-tests/pool.cc
Normal file
|
@ -0,0 +1,127 @@
|
|||
#include "pool.hh"
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct TestResource
|
||||
{
|
||||
|
||||
TestResource() {
|
||||
static int counter = 0;
|
||||
num = counter++;
|
||||
}
|
||||
|
||||
int dummyValue = 1;
|
||||
bool good = true;
|
||||
int num;
|
||||
};
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* Pool
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(Pool, freshPoolHasZeroCountAndSpecifiedCapacity) {
|
||||
auto isGood = [](const ref<TestResource> & r) { return r->good; };
|
||||
auto createResource = []() { return make_ref<TestResource>(); };
|
||||
|
||||
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
|
||||
|
||||
ASSERT_EQ(pool.count(), 0);
|
||||
ASSERT_EQ(pool.capacity(), 1);
|
||||
}
|
||||
|
||||
TEST(Pool, freshPoolCanGetAResource) {
|
||||
auto isGood = [](const ref<TestResource> & r) { return r->good; };
|
||||
auto createResource = []() { return make_ref<TestResource>(); };
|
||||
|
||||
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
|
||||
ASSERT_EQ(pool.count(), 0);
|
||||
|
||||
TestResource r = *(pool.get());
|
||||
|
||||
ASSERT_EQ(pool.count(), 1);
|
||||
ASSERT_EQ(pool.capacity(), 1);
|
||||
ASSERT_EQ(r.dummyValue, 1);
|
||||
ASSERT_EQ(r.good, true);
|
||||
}
|
||||
|
||||
TEST(Pool, capacityCanBeIncremented) {
|
||||
auto isGood = [](const ref<TestResource> & r) { return r->good; };
|
||||
auto createResource = []() { return make_ref<TestResource>(); };
|
||||
|
||||
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
|
||||
ASSERT_EQ(pool.capacity(), 1);
|
||||
pool.incCapacity();
|
||||
ASSERT_EQ(pool.capacity(), 2);
|
||||
}
|
||||
|
||||
TEST(Pool, capacityCanBeDecremented) {
|
||||
auto isGood = [](const ref<TestResource> & r) { return r->good; };
|
||||
auto createResource = []() { return make_ref<TestResource>(); };
|
||||
|
||||
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
|
||||
ASSERT_EQ(pool.capacity(), 1);
|
||||
pool.decCapacity();
|
||||
ASSERT_EQ(pool.capacity(), 0);
|
||||
}
|
||||
|
||||
TEST(Pool, flushBadDropsOutOfScopeResources) {
|
||||
auto isGood = [](const ref<TestResource> & r) { return false; };
|
||||
auto createResource = []() { return make_ref<TestResource>(); };
|
||||
|
||||
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
|
||||
|
||||
{
|
||||
auto _r = pool.get();
|
||||
ASSERT_EQ(pool.count(), 1);
|
||||
}
|
||||
|
||||
pool.flushBad();
|
||||
ASSERT_EQ(pool.count(), 0);
|
||||
}
|
||||
|
||||
// Test that the resources we allocate are being reused when they are still good.
|
||||
TEST(Pool, reuseResource) {
|
||||
auto isGood = [](const ref<TestResource> & r) { return true; };
|
||||
auto createResource = []() { return make_ref<TestResource>(); };
|
||||
|
||||
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
|
||||
|
||||
// Compare the instance counter between the two handles. We expect them to be equal
|
||||
// as the pool should hand out the same (still) good one again.
|
||||
int counter = -1;
|
||||
{
|
||||
Pool<TestResource>::Handle h = pool.get();
|
||||
counter = h->num;
|
||||
} // the first handle goes out of scope
|
||||
|
||||
{ // the second handle should contain the same resource (with the same counter value)
|
||||
Pool<TestResource>::Handle h = pool.get();
|
||||
ASSERT_EQ(h->num, counter);
|
||||
}
|
||||
}
|
||||
|
||||
// Test that the resources we allocate are being thrown away when they are no longer good.
|
||||
TEST(Pool, badResourceIsNotReused) {
|
||||
auto isGood = [](const ref<TestResource> & r) { return false; };
|
||||
auto createResource = []() { return make_ref<TestResource>(); };
|
||||
|
||||
Pool<TestResource> pool = Pool<TestResource>((size_t)1, createResource, isGood);
|
||||
|
||||
// Compare the instance counter between the two handles. We expect them
|
||||
// to *not* be equal as the pool should hand out a new instance after
|
||||
// the first one was returned.
|
||||
int counter = -1;
|
||||
{
|
||||
Pool<TestResource>::Handle h = pool.get();
|
||||
counter = h->num;
|
||||
} // the first handle goes out of scope
|
||||
|
||||
{
|
||||
// the second handle should contain a different resource (with a
|
||||
//different counter value)
|
||||
Pool<TestResource>::Handle h = pool.get();
|
||||
ASSERT_NE(h->num, counter);
|
||||
}
|
||||
}
|
||||
}
|
122
src/libutil-tests/position.cc
Normal file
122
src/libutil-tests/position.cc
Normal file
|
@ -0,0 +1,122 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "position.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
inline Pos::Origin makeStdin(std::string s)
|
||||
{
|
||||
return Pos::Stdin{make_ref<std::string>(s)};
|
||||
}
|
||||
|
||||
TEST(Position, getSnippetUpTo_0)
|
||||
{
|
||||
Pos::Origin o = makeStdin("");
|
||||
Pos p(1, 1, o);
|
||||
ASSERT_EQ(p.getSnippetUpTo(p), "");
|
||||
}
|
||||
TEST(Position, getSnippetUpTo_1)
|
||||
{
|
||||
Pos::Origin o = makeStdin("x");
|
||||
{
|
||||
// NOTE: line and column are actually 1-based indexes
|
||||
Pos start(0, 0, o);
|
||||
Pos end(99, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(start), "");
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "x");
|
||||
ASSERT_EQ(end.getSnippetUpTo(end), "");
|
||||
ASSERT_EQ(end.getSnippetUpTo(start), std::nullopt);
|
||||
}
|
||||
{
|
||||
// NOTE: line and column are actually 1-based indexes
|
||||
Pos start(0, 99, o);
|
||||
Pos end(99, 0, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(start), "");
|
||||
|
||||
// "x" might be preferable, but we only care about not crashing for invalid inputs
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "");
|
||||
|
||||
ASSERT_EQ(end.getSnippetUpTo(end), "");
|
||||
ASSERT_EQ(end.getSnippetUpTo(start), std::nullopt);
|
||||
}
|
||||
{
|
||||
Pos start(1, 1, o);
|
||||
Pos end(1, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(start), "");
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "x");
|
||||
ASSERT_EQ(end.getSnippetUpTo(end), "");
|
||||
ASSERT_EQ(end.getSnippetUpTo(start), "");
|
||||
}
|
||||
{
|
||||
Pos start(1, 1, o);
|
||||
Pos end(99, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(start), "");
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "x");
|
||||
ASSERT_EQ(end.getSnippetUpTo(end), "");
|
||||
ASSERT_EQ(end.getSnippetUpTo(start), std::nullopt);
|
||||
}
|
||||
}
|
||||
TEST(Position, getSnippetUpTo_2)
|
||||
{
|
||||
Pos::Origin o = makeStdin("asdf\njkl\nqwer");
|
||||
{
|
||||
Pos start(1, 1, o);
|
||||
Pos end(1, 2, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(start), "");
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "a");
|
||||
ASSERT_EQ(end.getSnippetUpTo(end), "");
|
||||
|
||||
// nullopt? I feel like changing the column handling would just make it more fragile
|
||||
ASSERT_EQ(end.getSnippetUpTo(start), "");
|
||||
}
|
||||
{
|
||||
Pos start(1, 2, o);
|
||||
Pos end(1, 3, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "s");
|
||||
}
|
||||
{
|
||||
Pos start(1, 2, o);
|
||||
Pos end(2, 2, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "sdf\nj");
|
||||
}
|
||||
{
|
||||
Pos start(1, 2, o);
|
||||
Pos end(3, 2, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "sdf\njkl\nq");
|
||||
}
|
||||
{
|
||||
Pos start(1, 2, o);
|
||||
Pos end(2, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "sdf\njkl");
|
||||
}
|
||||
{
|
||||
Pos start(1, 4, o);
|
||||
Pos end(2, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "f\njkl");
|
||||
}
|
||||
{
|
||||
Pos start(1, 5, o);
|
||||
Pos end(2, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "\njkl");
|
||||
}
|
||||
{
|
||||
Pos start(1, 6, o); // invalid: starting column past last "line character", ie at the newline
|
||||
Pos end(2, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "\njkl"); // jkl might be acceptable for this invalid start position
|
||||
}
|
||||
{
|
||||
Pos start(1, 1, o);
|
||||
Pos end(2, 0, o); // invalid
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "asdf\n");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(Position, example_1)
|
||||
{
|
||||
Pos::Origin o = makeStdin(" unambiguous = \n /** Very close */\n x: x;\n# ok\n");
|
||||
Pos start(2, 5, o);
|
||||
Pos end(2, 22, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "/** Very close */");
|
||||
}
|
||||
|
||||
} // namespace nix
|
17
src/libutil-tests/processes.cc
Normal file
17
src/libutil-tests/processes.cc
Normal file
|
@ -0,0 +1,17 @@
|
|||
#include "processes.hh"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* statusOk
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(statusOk, zeroIsOk)
|
||||
{
|
||||
ASSERT_EQ(statusOk(0), true);
|
||||
ASSERT_EQ(statusOk(1), false);
|
||||
}
|
||||
|
||||
} // namespace nix
|
46
src/libutil-tests/references.cc
Normal file
46
src/libutil-tests/references.cc
Normal file
|
@ -0,0 +1,46 @@
|
|||
#include "references.hh"
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
using std::string;
|
||||
|
||||
struct RewriteParams {
|
||||
string originalString, finalString;
|
||||
StringMap rewrites;
|
||||
|
||||
friend std::ostream& operator<<(std::ostream& os, const RewriteParams& bar) {
|
||||
StringSet strRewrites;
|
||||
for (auto & [from, to] : bar.rewrites)
|
||||
strRewrites.insert(from + "->" + to);
|
||||
return os <<
|
||||
"OriginalString: " << bar.originalString << std::endl <<
|
||||
"Rewrites: " << dropEmptyInitThenConcatStringsSep(",", strRewrites) << std::endl <<
|
||||
"Expected result: " << bar.finalString;
|
||||
}
|
||||
};
|
||||
|
||||
class RewriteTest : public ::testing::TestWithParam<RewriteParams> {
|
||||
};
|
||||
|
||||
TEST_P(RewriteTest, IdentityRewriteIsIdentity) {
|
||||
RewriteParams param = GetParam();
|
||||
StringSink rewritten;
|
||||
auto rewriter = RewritingSink(param.rewrites, rewritten);
|
||||
rewriter(param.originalString);
|
||||
rewriter.flush();
|
||||
ASSERT_EQ(rewritten.s, param.finalString);
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_CASE_P(
|
||||
references,
|
||||
RewriteTest,
|
||||
::testing::Values(
|
||||
RewriteParams{ "foooo", "baroo", {{"foo", "bar"}, {"bar", "baz"}}},
|
||||
RewriteParams{ "foooo", "bazoo", {{"fou", "bar"}, {"foo", "baz"}}},
|
||||
RewriteParams{ "foooo", "foooo", {}}
|
||||
)
|
||||
);
|
||||
|
||||
}
|
||||
|
36
src/libutil-tests/spawn.cc
Normal file
36
src/libutil-tests/spawn.cc
Normal file
|
@ -0,0 +1,36 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "processes.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
#ifdef _WIN32
|
||||
TEST(SpawnTest, spawnEcho)
|
||||
{
|
||||
auto output = runProgram(RunOptions{.program = "cmd.exe", .args = {"/C", "echo", "hello world"}});
|
||||
ASSERT_EQ(output.first, 0);
|
||||
ASSERT_EQ(output.second, "\"hello world\"\r\n");
|
||||
}
|
||||
|
||||
std::string windowsEscape(const std::string & str, bool cmd);
|
||||
|
||||
TEST(SpawnTest, windowsEscape)
|
||||
{
|
||||
auto empty = windowsEscape("", false);
|
||||
ASSERT_EQ(empty, R"("")");
|
||||
// There's no quotes in this argument so the input should equal the output
|
||||
auto backslashStr = R"(\\\\)";
|
||||
auto backslashes = windowsEscape(backslashStr, false);
|
||||
ASSERT_EQ(backslashes, backslashStr);
|
||||
|
||||
auto nestedQuotes = windowsEscape(R"(he said: "hello there")", false);
|
||||
ASSERT_EQ(nestedQuotes, R"("he said: \"hello there\"")");
|
||||
|
||||
auto middleQuote = windowsEscape(R"( \\\" )", false);
|
||||
ASSERT_EQ(middleQuote, R"(" \\\\\\\" ")");
|
||||
|
||||
auto space = windowsEscape("hello world", false);
|
||||
ASSERT_EQ(space, R"("hello world")");
|
||||
}
|
||||
#endif
|
||||
}
|
348
src/libutil-tests/strings.cc
Normal file
348
src/libutil-tests/strings.cc
Normal file
|
@ -0,0 +1,348 @@
|
|||
#include <gtest/gtest.h>
|
||||
#include <rapidcheck/gtest.h>
|
||||
|
||||
#include "strings.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
using Strings = std::vector<std::string>;
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* concatStringsSep
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(concatStringsSep, empty)
|
||||
{
|
||||
Strings strings;
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), "");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, justOne)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), "this");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, emptyString)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), "");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, emptyStrings)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("");
|
||||
strings.push_back("");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), ",");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, threeEmptyStrings)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("");
|
||||
strings.push_back("");
|
||||
strings.push_back("");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), ",,");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, buildCommaSeparatedString)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
strings.push_back("is");
|
||||
strings.push_back("great");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), "this,is,great");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, buildStringWithEmptySeparator)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
strings.push_back("is");
|
||||
strings.push_back("great");
|
||||
|
||||
ASSERT_EQ(concatStringsSep("", strings), "thisisgreat");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, buildSingleString)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), "this");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* dropEmptyInitThenConcatStringsSep
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(dropEmptyInitThenConcatStringsSep, empty)
|
||||
{
|
||||
Strings strings;
|
||||
|
||||
ASSERT_EQ(dropEmptyInitThenConcatStringsSep(",", strings), "");
|
||||
}
|
||||
|
||||
TEST(dropEmptyInitThenConcatStringsSep, buildCommaSeparatedString)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
strings.push_back("is");
|
||||
strings.push_back("great");
|
||||
|
||||
ASSERT_EQ(dropEmptyInitThenConcatStringsSep(",", strings), "this,is,great");
|
||||
}
|
||||
|
||||
TEST(dropEmptyInitThenConcatStringsSep, buildStringWithEmptySeparator)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
strings.push_back("is");
|
||||
strings.push_back("great");
|
||||
|
||||
ASSERT_EQ(dropEmptyInitThenConcatStringsSep("", strings), "thisisgreat");
|
||||
}
|
||||
|
||||
TEST(dropEmptyInitThenConcatStringsSep, buildSingleString)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
strings.push_back("");
|
||||
|
||||
ASSERT_EQ(dropEmptyInitThenConcatStringsSep(",", strings), "this,");
|
||||
}
|
||||
|
||||
TEST(dropEmptyInitThenConcatStringsSep, emptyStrings)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("");
|
||||
strings.push_back("");
|
||||
|
||||
ASSERT_EQ(dropEmptyInitThenConcatStringsSep(",", strings), "");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* tokenizeString
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(tokenizeString, empty)
|
||||
{
|
||||
Strings expected = {};
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(""), expected);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, oneSep)
|
||||
{
|
||||
Strings expected = {};
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(" "), expected);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, twoSep)
|
||||
{
|
||||
Strings expected = {};
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(" \n"), expected);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, tokenizeSpacesWithDefaults)
|
||||
{
|
||||
auto s = "foo bar baz";
|
||||
Strings expected = {"foo", "bar", "baz"};
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s), expected);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, tokenizeTabsWithDefaults)
|
||||
{
|
||||
auto s = "foo\tbar\tbaz";
|
||||
Strings expected = {"foo", "bar", "baz"};
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s), expected);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, tokenizeTabsSpacesWithDefaults)
|
||||
{
|
||||
auto s = "foo\t bar\t baz";
|
||||
Strings expected = {"foo", "bar", "baz"};
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s), expected);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, tokenizeTabsSpacesNewlineWithDefaults)
|
||||
{
|
||||
auto s = "foo\t\n bar\t\n baz";
|
||||
Strings expected = {"foo", "bar", "baz"};
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s), expected);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, tokenizeTabsSpacesNewlineRetWithDefaults)
|
||||
{
|
||||
auto s = "foo\t\n\r bar\t\n\r baz";
|
||||
Strings expected = {"foo", "bar", "baz"};
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s), expected);
|
||||
|
||||
auto s2 = "foo \t\n\r bar \t\n\r baz";
|
||||
Strings expected2 = {"foo", "bar", "baz"};
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s2), expected2);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, tokenizeWithCustomSep)
|
||||
{
|
||||
auto s = "foo\n,bar\n,baz\n";
|
||||
Strings expected = {"foo\n", "bar\n", "baz\n"};
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s, ","), expected);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, tokenizeSepAtStart)
|
||||
{
|
||||
auto s = ",foo,bar,baz";
|
||||
Strings expected = {"foo", "bar", "baz"};
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s, ","), expected);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, tokenizeSepAtEnd)
|
||||
{
|
||||
auto s = "foo,bar,baz,";
|
||||
Strings expected = {"foo", "bar", "baz"};
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s, ","), expected);
|
||||
}
|
||||
|
||||
TEST(tokenizeString, tokenizeSepEmpty)
|
||||
{
|
||||
auto s = "foo,,baz";
|
||||
Strings expected = {"foo", "baz"};
|
||||
|
||||
ASSERT_EQ(tokenizeString<Strings>(s, ","), expected);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* splitString
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(splitString, empty)
|
||||
{
|
||||
Strings expected = {""};
|
||||
|
||||
ASSERT_EQ(splitString<Strings>("", " \t\n\r"), expected);
|
||||
}
|
||||
|
||||
TEST(splitString, oneSep)
|
||||
{
|
||||
Strings expected = {"", ""};
|
||||
|
||||
ASSERT_EQ(splitString<Strings>(" ", " \t\n\r"), expected);
|
||||
}
|
||||
|
||||
TEST(splitString, twoSep)
|
||||
{
|
||||
Strings expected = {"", "", ""};
|
||||
|
||||
ASSERT_EQ(splitString<Strings>(" \n", " \t\n\r"), expected);
|
||||
}
|
||||
|
||||
TEST(splitString, tokenizeSpacesWithSpaces)
|
||||
{
|
||||
auto s = "foo bar baz";
|
||||
Strings expected = {"foo", "bar", "baz"};
|
||||
|
||||
ASSERT_EQ(splitString<Strings>(s, " \t\n\r"), expected);
|
||||
}
|
||||
|
||||
TEST(splitString, tokenizeTabsWithDefaults)
|
||||
{
|
||||
auto s = "foo\tbar\tbaz";
|
||||
// Using it like this is weird, but shows the difference with tokenizeString, which also has this test
|
||||
Strings expected = {"foo", "bar", "baz"};
|
||||
|
||||
ASSERT_EQ(splitString<Strings>(s, " \t\n\r"), expected);
|
||||
}
|
||||
|
||||
TEST(splitString, tokenizeTabsSpacesWithDefaults)
|
||||
{
|
||||
auto s = "foo\t bar\t baz";
|
||||
// Using it like this is weird, but shows the difference with tokenizeString, which also has this test
|
||||
Strings expected = {"foo", "", "bar", "", "baz"};
|
||||
|
||||
ASSERT_EQ(splitString<Strings>(s, " \t\n\r"), expected);
|
||||
}
|
||||
|
||||
TEST(splitString, tokenizeTabsSpacesNewlineWithDefaults)
|
||||
{
|
||||
auto s = "foo\t\n bar\t\n baz";
|
||||
// Using it like this is weird, but shows the difference with tokenizeString, which also has this test
|
||||
Strings expected = {"foo", "", "", "bar", "", "", "baz"};
|
||||
|
||||
ASSERT_EQ(splitString<Strings>(s, " \t\n\r"), expected);
|
||||
}
|
||||
|
||||
TEST(splitString, tokenizeTabsSpacesNewlineRetWithDefaults)
|
||||
{
|
||||
auto s = "foo\t\n\r bar\t\n\r baz";
|
||||
// Using it like this is weird, but shows the difference with tokenizeString, which also has this test
|
||||
Strings expected = {"foo", "", "", "", "bar", "", "", "", "baz"};
|
||||
|
||||
ASSERT_EQ(splitString<Strings>(s, " \t\n\r"), expected);
|
||||
|
||||
auto s2 = "foo \t\n\r bar \t\n\r baz";
|
||||
Strings expected2 = {"foo", "", "", "", "", "bar", "", "", "", "", "baz"};
|
||||
|
||||
ASSERT_EQ(splitString<Strings>(s2, " \t\n\r"), expected2);
|
||||
}
|
||||
|
||||
TEST(splitString, tokenizeWithCustomSep)
|
||||
{
|
||||
auto s = "foo\n,bar\n,baz\n";
|
||||
Strings expected = {"foo\n", "bar\n", "baz\n"};
|
||||
|
||||
ASSERT_EQ(splitString<Strings>(s, ","), expected);
|
||||
}
|
||||
|
||||
TEST(splitString, tokenizeSepAtStart)
|
||||
{
|
||||
auto s = ",foo,bar,baz";
|
||||
Strings expected = {"", "foo", "bar", "baz"};
|
||||
|
||||
ASSERT_EQ(splitString<Strings>(s, ","), expected);
|
||||
}
|
||||
|
||||
TEST(splitString, tokenizeSepAtEnd)
|
||||
{
|
||||
auto s = "foo,bar,baz,";
|
||||
Strings expected = {"foo", "bar", "baz", ""};
|
||||
|
||||
ASSERT_EQ(splitString<Strings>(s, ","), expected);
|
||||
}
|
||||
|
||||
TEST(splitString, tokenizeSepEmpty)
|
||||
{
|
||||
auto s = "foo,,baz";
|
||||
Strings expected = {"foo", "", "baz"};
|
||||
|
||||
ASSERT_EQ(splitString<Strings>(s, ","), expected);
|
||||
}
|
||||
|
||||
// concatStringsSep sep . splitString sep = id if sep is 1 char
|
||||
RC_GTEST_PROP(splitString, recoveredByConcatStringsSep, (const std::string & s))
|
||||
{
|
||||
RC_ASSERT(concatStringsSep("/", splitString<Strings>(s, "/")) == s);
|
||||
RC_ASSERT(concatStringsSep("a", splitString<Strings>(s, "a")) == s);
|
||||
}
|
||||
|
||||
} // namespace nix
|
43
src/libutil-tests/suggestions.cc
Normal file
43
src/libutil-tests/suggestions.cc
Normal file
|
@ -0,0 +1,43 @@
|
|||
#include "suggestions.hh"
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct LevenshteinDistanceParam {
|
||||
std::string s1, s2;
|
||||
int distance;
|
||||
};
|
||||
|
||||
class LevenshteinDistanceTest :
|
||||
public testing::TestWithParam<LevenshteinDistanceParam> {
|
||||
};
|
||||
|
||||
TEST_P(LevenshteinDistanceTest, CorrectlyComputed) {
|
||||
auto params = GetParam();
|
||||
|
||||
ASSERT_EQ(levenshteinDistance(params.s1, params.s2), params.distance);
|
||||
ASSERT_EQ(levenshteinDistance(params.s2, params.s1), params.distance);
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(LevenshteinDistance, LevenshteinDistanceTest,
|
||||
testing::Values(
|
||||
LevenshteinDistanceParam{"foo", "foo", 0},
|
||||
LevenshteinDistanceParam{"foo", "", 3},
|
||||
LevenshteinDistanceParam{"", "", 0},
|
||||
LevenshteinDistanceParam{"foo", "fo", 1},
|
||||
LevenshteinDistanceParam{"foo", "oo", 1},
|
||||
LevenshteinDistanceParam{"foo", "fao", 1},
|
||||
LevenshteinDistanceParam{"foo", "abc", 3}
|
||||
)
|
||||
);
|
||||
|
||||
TEST(Suggestions, Trim) {
|
||||
auto suggestions = Suggestions::bestMatches({"foooo", "bar", "fo", "gao"}, "foo");
|
||||
auto onlyOne = suggestions.trim(1);
|
||||
ASSERT_EQ(onlyOne.suggestions.size(), 1);
|
||||
ASSERT_TRUE(onlyOne.suggestions.begin()->suggestion == "fo");
|
||||
|
||||
auto closest = suggestions.trim(999, 2);
|
||||
ASSERT_EQ(closest.suggestions.size(), 3);
|
||||
}
|
||||
}
|
60
src/libutil-tests/terminal.cc
Normal file
60
src/libutil-tests/terminal.cc
Normal file
|
@ -0,0 +1,60 @@
|
|||
#include "util.hh"
|
||||
#include "types.hh"
|
||||
#include "terminal.hh"
|
||||
#include "strings.hh"
|
||||
|
||||
#include <limits.h>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <numeric>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* filterANSIEscapes
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(filterANSIEscapes, emptyString)
|
||||
{
|
||||
auto s = "";
|
||||
auto expected = "";
|
||||
|
||||
ASSERT_EQ(filterANSIEscapes(s), expected);
|
||||
}
|
||||
|
||||
TEST(filterANSIEscapes, doesntChangePrintableChars)
|
||||
{
|
||||
auto s = "09 2q304ruyhr slk2-19024 kjsadh sar f";
|
||||
|
||||
ASSERT_EQ(filterANSIEscapes(s), s);
|
||||
}
|
||||
|
||||
TEST(filterANSIEscapes, filtersColorCodes)
|
||||
{
|
||||
auto s = "\u001b[30m A \u001b[31m B \u001b[32m C \u001b[33m D \u001b[0m";
|
||||
|
||||
ASSERT_EQ(filterANSIEscapes(s, true, 2), " A");
|
||||
ASSERT_EQ(filterANSIEscapes(s, true, 3), " A ");
|
||||
ASSERT_EQ(filterANSIEscapes(s, true, 4), " A ");
|
||||
ASSERT_EQ(filterANSIEscapes(s, true, 5), " A B");
|
||||
ASSERT_EQ(filterANSIEscapes(s, true, 8), " A B C");
|
||||
}
|
||||
|
||||
TEST(filterANSIEscapes, expandsTabs)
|
||||
{
|
||||
auto s = "foo\tbar\tbaz";
|
||||
|
||||
ASSERT_EQ(filterANSIEscapes(s, true), "foo bar baz");
|
||||
}
|
||||
|
||||
TEST(filterANSIEscapes, utf8)
|
||||
{
|
||||
ASSERT_EQ(filterANSIEscapes("foobar", true, 5), "fooba");
|
||||
ASSERT_EQ(filterANSIEscapes("fóóbär", true, 6), "fóóbär");
|
||||
ASSERT_EQ(filterANSIEscapes("fóóbär", true, 5), "fóóbä");
|
||||
ASSERT_EQ(filterANSIEscapes("fóóbär", true, 3), "fóó");
|
||||
ASSERT_EQ(filterANSIEscapes("f€€bär", true, 4), "f€€b");
|
||||
ASSERT_EQ(filterANSIEscapes("f𐍈𐍈bär", true, 4), "f𐍈𐍈b");
|
||||
}
|
||||
|
||||
} // namespace nix
|
370
src/libutil-tests/url.cc
Normal file
370
src/libutil-tests/url.cc
Normal file
|
@ -0,0 +1,370 @@
|
|||
#include "url.hh"
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------- tests for url.hh --------------------------------------------------*/
|
||||
|
||||
std::string print_map(std::map<std::string, std::string> m) {
|
||||
std::map<std::string, std::string>::iterator it;
|
||||
std::string s = "{ ";
|
||||
for (it = m.begin(); it != m.end(); ++it) {
|
||||
s += "{ ";
|
||||
s += it->first;
|
||||
s += " = ";
|
||||
s += it->second;
|
||||
s += " } ";
|
||||
}
|
||||
s += "}";
|
||||
return s;
|
||||
}
|
||||
|
||||
|
||||
std::ostream& operator<<(std::ostream& os, const ParsedURL& p) {
|
||||
return os << "\n"
|
||||
<< "url: " << p.url << "\n"
|
||||
<< "base: " << p.base << "\n"
|
||||
<< "scheme: " << p.scheme << "\n"
|
||||
<< "authority: " << p.authority.value() << "\n"
|
||||
<< "path: " << p.path << "\n"
|
||||
<< "query: " << print_map(p.query) << "\n"
|
||||
<< "fragment: " << p.fragment << "\n";
|
||||
}
|
||||
|
||||
TEST(parseURL, parsesSimpleHttpUrl) {
|
||||
auto s = "http://www.example.org/file.tar.gz";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "http://www.example.org/file.tar.gz",
|
||||
.base = "http://www.example.org/file.tar.gz",
|
||||
.scheme = "http",
|
||||
.authority = "www.example.org",
|
||||
.path = "/file.tar.gz",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, parsesSimpleHttpsUrl) {
|
||||
auto s = "https://www.example.org/file.tar.gz";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "https://www.example.org/file.tar.gz",
|
||||
.base = "https://www.example.org/file.tar.gz",
|
||||
.scheme = "https",
|
||||
.authority = "www.example.org",
|
||||
.path = "/file.tar.gz",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, parsesSimpleHttpUrlWithQueryAndFragment) {
|
||||
auto s = "https://www.example.org/file.tar.gz?download=fast&when=now#hello";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "https://www.example.org/file.tar.gz",
|
||||
.base = "https://www.example.org/file.tar.gz",
|
||||
.scheme = "https",
|
||||
.authority = "www.example.org",
|
||||
.path = "/file.tar.gz",
|
||||
.query = (StringMap) { { "download", "fast" }, { "when", "now" } },
|
||||
.fragment = "hello",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, parsesSimpleHttpUrlWithComplexFragment) {
|
||||
auto s = "http://www.example.org/file.tar.gz?field=value#?foo=bar%23";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "http://www.example.org/file.tar.gz",
|
||||
.base = "http://www.example.org/file.tar.gz",
|
||||
.scheme = "http",
|
||||
.authority = "www.example.org",
|
||||
.path = "/file.tar.gz",
|
||||
.query = (StringMap) { { "field", "value" } },
|
||||
.fragment = "?foo=bar#",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, parsesFilePlusHttpsUrl) {
|
||||
auto s = "file+https://www.example.org/video.mp4";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "file+https://www.example.org/video.mp4",
|
||||
.base = "https://www.example.org/video.mp4",
|
||||
.scheme = "file+https",
|
||||
.authority = "www.example.org",
|
||||
.path = "/video.mp4",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, rejectsAuthorityInUrlsWithFileTransportation) {
|
||||
auto s = "file://www.example.org/video.mp4";
|
||||
ASSERT_THROW(parseURL(s), Error);
|
||||
}
|
||||
|
||||
TEST(parseURL, parseIPv4Address) {
|
||||
auto s = "http://127.0.0.1:8080/file.tar.gz?download=fast&when=now#hello";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "http://127.0.0.1:8080/file.tar.gz",
|
||||
.base = "https://127.0.0.1:8080/file.tar.gz",
|
||||
.scheme = "http",
|
||||
.authority = "127.0.0.1:8080",
|
||||
.path = "/file.tar.gz",
|
||||
.query = (StringMap) { { "download", "fast" }, { "when", "now" } },
|
||||
.fragment = "hello",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, parseScopedRFC4007IPv6Address) {
|
||||
auto s = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080",
|
||||
.base = "http://[fe80::818c:da4d:8975:415c\%enp0s25]:8080",
|
||||
.scheme = "http",
|
||||
.authority = "[fe80::818c:da4d:8975:415c\%enp0s25]:8080",
|
||||
.path = "",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
|
||||
}
|
||||
|
||||
TEST(parseURL, parseIPv6Address) {
|
||||
auto s = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080",
|
||||
.base = "http://[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080",
|
||||
.scheme = "http",
|
||||
.authority = "[2a02:8071:8192:c100:311d:192d:81ac:11ea]:8080",
|
||||
.path = "",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
|
||||
}
|
||||
|
||||
TEST(parseURL, parseEmptyQueryParams) {
|
||||
auto s = "http://127.0.0.1:8080/file.tar.gz?&&&&&";
|
||||
auto parsed = parseURL(s);
|
||||
ASSERT_EQ(parsed.query, (StringMap) { });
|
||||
}
|
||||
|
||||
TEST(parseURL, parseUserPassword) {
|
||||
auto s = "http://user:pass@www.example.org:8080/file.tar.gz";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "http://user:pass@www.example.org/file.tar.gz",
|
||||
.base = "http://user:pass@www.example.org/file.tar.gz",
|
||||
.scheme = "http",
|
||||
.authority = "user:pass@www.example.org:8080",
|
||||
.path = "/file.tar.gz",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, parseFileURLWithQueryAndFragment) {
|
||||
auto s = "file:///none/of//your/business";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "",
|
||||
.base = "",
|
||||
.scheme = "file",
|
||||
.authority = "",
|
||||
.path = "/none/of//your/business",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
|
||||
}
|
||||
|
||||
TEST(parseURL, parsedUrlsIsEqualToItself) {
|
||||
auto s = "http://www.example.org/file.tar.gz";
|
||||
auto url = parseURL(s);
|
||||
|
||||
ASSERT_TRUE(url == url);
|
||||
}
|
||||
|
||||
TEST(parseURL, parseFTPUrl) {
|
||||
auto s = "ftp://ftp.nixos.org/downloads/nixos.iso";
|
||||
auto parsed = parseURL(s);
|
||||
|
||||
ParsedURL expected {
|
||||
.url = "ftp://ftp.nixos.org/downloads/nixos.iso",
|
||||
.base = "ftp://ftp.nixos.org/downloads/nixos.iso",
|
||||
.scheme = "ftp",
|
||||
.authority = "ftp.nixos.org",
|
||||
.path = "/downloads/nixos.iso",
|
||||
.query = (StringMap) { },
|
||||
.fragment = "",
|
||||
};
|
||||
|
||||
ASSERT_EQ(parsed, expected);
|
||||
}
|
||||
|
||||
TEST(parseURL, parsesAnythingInUriFormat) {
|
||||
auto s = "whatever://github.com/NixOS/nixpkgs.git";
|
||||
auto parsed = parseURL(s);
|
||||
}
|
||||
|
||||
TEST(parseURL, parsesAnythingInUriFormatWithoutDoubleSlash) {
|
||||
auto s = "whatever:github.com/NixOS/nixpkgs.git";
|
||||
auto parsed = parseURL(s);
|
||||
}
|
||||
|
||||
TEST(parseURL, emptyStringIsInvalidURL) {
|
||||
ASSERT_THROW(parseURL(""), Error);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* decodeQuery
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(decodeQuery, emptyStringYieldsEmptyMap) {
|
||||
auto d = decodeQuery("");
|
||||
ASSERT_EQ(d, (StringMap) { });
|
||||
}
|
||||
|
||||
TEST(decodeQuery, simpleDecode) {
|
||||
auto d = decodeQuery("yi=one&er=two");
|
||||
ASSERT_EQ(d, ((StringMap) { { "yi", "one" }, { "er", "two" } }));
|
||||
}
|
||||
|
||||
TEST(decodeQuery, decodeUrlEncodedArgs) {
|
||||
auto d = decodeQuery("arg=%3D%3D%40%3D%3D");
|
||||
ASSERT_EQ(d, ((StringMap) { { "arg", "==@==" } }));
|
||||
}
|
||||
|
||||
TEST(decodeQuery, decodeArgWithEmptyValue) {
|
||||
auto d = decodeQuery("arg=");
|
||||
ASSERT_EQ(d, ((StringMap) { { "arg", ""} }));
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* percentDecode
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(percentDecode, decodesUrlEncodedString) {
|
||||
std::string s = "==@==";
|
||||
std::string d = percentDecode("%3D%3D%40%3D%3D");
|
||||
ASSERT_EQ(d, s);
|
||||
}
|
||||
|
||||
TEST(percentDecode, multipleDecodesAreIdempotent) {
|
||||
std::string once = percentDecode("%3D%3D%40%3D%3D");
|
||||
std::string twice = percentDecode(once);
|
||||
|
||||
ASSERT_EQ(once, twice);
|
||||
}
|
||||
|
||||
TEST(percentDecode, trailingPercent) {
|
||||
std::string s = "==@==%";
|
||||
std::string d = percentDecode("%3D%3D%40%3D%3D%25");
|
||||
|
||||
ASSERT_EQ(d, s);
|
||||
}
|
||||
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* percentEncode
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(percentEncode, encodesUrlEncodedString) {
|
||||
std::string s = percentEncode("==@==");
|
||||
std::string d = "%3D%3D%40%3D%3D";
|
||||
ASSERT_EQ(d, s);
|
||||
}
|
||||
|
||||
TEST(percentEncode, keepArgument) {
|
||||
std::string a = percentEncode("abd / def");
|
||||
std::string b = percentEncode("abd / def", "/");
|
||||
ASSERT_EQ(a, "abd%20%2F%20def");
|
||||
ASSERT_EQ(b, "abd%20/%20def");
|
||||
}
|
||||
|
||||
TEST(percentEncode, inverseOfDecode) {
|
||||
std::string original = "%3D%3D%40%3D%3D";
|
||||
std::string once = percentEncode(original);
|
||||
std::string back = percentDecode(once);
|
||||
|
||||
ASSERT_EQ(back, original);
|
||||
}
|
||||
|
||||
TEST(percentEncode, trailingPercent) {
|
||||
std::string s = percentEncode("==@==%");
|
||||
std::string d = "%3D%3D%40%3D%3D%25";
|
||||
|
||||
ASSERT_EQ(d, s);
|
||||
}
|
||||
|
||||
TEST(percentEncode, yen) {
|
||||
// https://en.wikipedia.org/wiki/Percent-encoding#Character_data
|
||||
std::string s = reinterpret_cast<const char*>(u8"円");
|
||||
std::string e = "%E5%86%86";
|
||||
|
||||
ASSERT_EQ(percentEncode(s), e);
|
||||
ASSERT_EQ(percentDecode(e), s);
|
||||
}
|
||||
|
||||
TEST(nix, isValidSchemeName) {
|
||||
ASSERT_TRUE(isValidSchemeName("http"));
|
||||
ASSERT_TRUE(isValidSchemeName("https"));
|
||||
ASSERT_TRUE(isValidSchemeName("file"));
|
||||
ASSERT_TRUE(isValidSchemeName("file+https"));
|
||||
ASSERT_TRUE(isValidSchemeName("fi.le"));
|
||||
ASSERT_TRUE(isValidSchemeName("file-ssh"));
|
||||
ASSERT_TRUE(isValidSchemeName("file+"));
|
||||
ASSERT_TRUE(isValidSchemeName("file."));
|
||||
ASSERT_TRUE(isValidSchemeName("file1"));
|
||||
ASSERT_FALSE(isValidSchemeName("file:"));
|
||||
ASSERT_FALSE(isValidSchemeName("file/"));
|
||||
ASSERT_FALSE(isValidSchemeName("+file"));
|
||||
ASSERT_FALSE(isValidSchemeName(".file"));
|
||||
ASSERT_FALSE(isValidSchemeName("-file"));
|
||||
ASSERT_FALSE(isValidSchemeName("1file"));
|
||||
// regex ok?
|
||||
ASSERT_FALSE(isValidSchemeName("\nhttp"));
|
||||
ASSERT_FALSE(isValidSchemeName("\nhttp\n"));
|
||||
ASSERT_FALSE(isValidSchemeName("http\n"));
|
||||
ASSERT_FALSE(isValidSchemeName("http "));
|
||||
}
|
||||
|
||||
}
|
385
src/libutil-tests/util.cc
Normal file
385
src/libutil-tests/util.cc
Normal file
|
@ -0,0 +1,385 @@
|
|||
#include "util.hh"
|
||||
#include "types.hh"
|
||||
#include "file-system.hh"
|
||||
#include "terminal.hh"
|
||||
#include "strings.hh"
|
||||
|
||||
#include <limits.h>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <numeric>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------- tests for util.hh --------------------------------------------*/
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* hasPrefix
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(hasPrefix, emptyStringHasNoPrefix)
|
||||
{
|
||||
ASSERT_FALSE(hasPrefix("", "foo"));
|
||||
}
|
||||
|
||||
TEST(hasPrefix, emptyStringIsAlwaysPrefix)
|
||||
{
|
||||
ASSERT_TRUE(hasPrefix("foo", ""));
|
||||
ASSERT_TRUE(hasPrefix("jshjkfhsadf", ""));
|
||||
}
|
||||
|
||||
TEST(hasPrefix, trivialCase)
|
||||
{
|
||||
ASSERT_TRUE(hasPrefix("foobar", "foo"));
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* hasSuffix
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(hasSuffix, emptyStringHasNoSuffix)
|
||||
{
|
||||
ASSERT_FALSE(hasSuffix("", "foo"));
|
||||
}
|
||||
|
||||
TEST(hasSuffix, trivialCase)
|
||||
{
|
||||
ASSERT_TRUE(hasSuffix("foo", "foo"));
|
||||
ASSERT_TRUE(hasSuffix("foobar", "bar"));
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* base64Encode
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(base64Encode, emptyString)
|
||||
{
|
||||
ASSERT_EQ(base64Encode(""), "");
|
||||
}
|
||||
|
||||
TEST(base64Encode, encodesAString)
|
||||
{
|
||||
ASSERT_EQ(base64Encode("quod erat demonstrandum"), "cXVvZCBlcmF0IGRlbW9uc3RyYW5kdW0=");
|
||||
}
|
||||
|
||||
TEST(base64Encode, encodeAndDecode)
|
||||
{
|
||||
auto s = "quod erat demonstrandum";
|
||||
auto encoded = base64Encode(s);
|
||||
auto decoded = base64Decode(encoded);
|
||||
|
||||
ASSERT_EQ(decoded, s);
|
||||
}
|
||||
|
||||
TEST(base64Encode, encodeAndDecodeNonPrintable)
|
||||
{
|
||||
char s[256];
|
||||
std::iota(std::rbegin(s), std::rend(s), 0);
|
||||
|
||||
auto encoded = base64Encode(s);
|
||||
auto decoded = base64Decode(encoded);
|
||||
|
||||
EXPECT_EQ(decoded.length(), 255);
|
||||
ASSERT_EQ(decoded, s);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* base64Decode
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(base64Decode, emptyString)
|
||||
{
|
||||
ASSERT_EQ(base64Decode(""), "");
|
||||
}
|
||||
|
||||
TEST(base64Decode, decodeAString)
|
||||
{
|
||||
ASSERT_EQ(base64Decode("cXVvZCBlcmF0IGRlbW9uc3RyYW5kdW0="), "quod erat demonstrandum");
|
||||
}
|
||||
|
||||
TEST(base64Decode, decodeThrowsOnInvalidChar)
|
||||
{
|
||||
ASSERT_THROW(base64Decode("cXVvZCBlcm_0IGRlbW9uc3RyYW5kdW0="), Error);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* getLine
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(getLine, all)
|
||||
{
|
||||
{
|
||||
auto [line, rest] = getLine("foo\nbar\nxyzzy");
|
||||
ASSERT_EQ(line, "foo");
|
||||
ASSERT_EQ(rest, "bar\nxyzzy");
|
||||
}
|
||||
|
||||
{
|
||||
auto [line, rest] = getLine("foo\r\nbar\r\nxyzzy");
|
||||
ASSERT_EQ(line, "foo");
|
||||
ASSERT_EQ(rest, "bar\r\nxyzzy");
|
||||
}
|
||||
|
||||
{
|
||||
auto [line, rest] = getLine("foo\n");
|
||||
ASSERT_EQ(line, "foo");
|
||||
ASSERT_EQ(rest, "");
|
||||
}
|
||||
|
||||
{
|
||||
auto [line, rest] = getLine("foo");
|
||||
ASSERT_EQ(line, "foo");
|
||||
ASSERT_EQ(rest, "");
|
||||
}
|
||||
|
||||
{
|
||||
auto [line, rest] = getLine("");
|
||||
ASSERT_EQ(line, "");
|
||||
ASSERT_EQ(rest, "");
|
||||
}
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* toLower
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(toLower, emptyString)
|
||||
{
|
||||
ASSERT_EQ(toLower(""), "");
|
||||
}
|
||||
|
||||
TEST(toLower, nonLetters)
|
||||
{
|
||||
auto s = "!@(*$#)(@#=\\234_";
|
||||
ASSERT_EQ(toLower(s), s);
|
||||
}
|
||||
|
||||
// std::tolower() doesn't handle unicode characters. In the context of
|
||||
// store paths this isn't relevant but doesn't hurt to record this behavior
|
||||
// here.
|
||||
TEST(toLower, umlauts)
|
||||
{
|
||||
auto s = "ÄÖÜ";
|
||||
ASSERT_EQ(toLower(s), "ÄÖÜ");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* string2Float
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(string2Float, emptyString)
|
||||
{
|
||||
ASSERT_EQ(string2Float<double>(""), std::nullopt);
|
||||
}
|
||||
|
||||
TEST(string2Float, trivialConversions)
|
||||
{
|
||||
ASSERT_EQ(string2Float<double>("1.0"), 1.0);
|
||||
|
||||
ASSERT_EQ(string2Float<double>("0.0"), 0.0);
|
||||
|
||||
ASSERT_EQ(string2Float<double>("-100.25"), -100.25);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* string2Int
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(string2Int, emptyString)
|
||||
{
|
||||
ASSERT_EQ(string2Int<int>(""), std::nullopt);
|
||||
}
|
||||
|
||||
TEST(string2Int, trivialConversions)
|
||||
{
|
||||
ASSERT_EQ(string2Int<int>("1"), 1);
|
||||
|
||||
ASSERT_EQ(string2Int<int>("0"), 0);
|
||||
|
||||
ASSERT_EQ(string2Int<int>("-100"), -100);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* renderSize
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(renderSize, misc)
|
||||
{
|
||||
ASSERT_EQ(renderSize(0, true), " 0.0 KiB");
|
||||
ASSERT_EQ(renderSize(100, true), " 0.1 KiB");
|
||||
ASSERT_EQ(renderSize(100), "0.1 KiB");
|
||||
ASSERT_EQ(renderSize(972, true), " 0.9 KiB");
|
||||
ASSERT_EQ(renderSize(973, true), " 1.0 KiB"); // FIXME: should round down
|
||||
ASSERT_EQ(renderSize(1024, true), " 1.0 KiB");
|
||||
ASSERT_EQ(renderSize(1024 * 1024, true), "1024.0 KiB");
|
||||
ASSERT_EQ(renderSize(1100 * 1024, true), " 1.1 MiB");
|
||||
ASSERT_EQ(renderSize(2ULL * 1024 * 1024 * 1024, true), " 2.0 GiB");
|
||||
ASSERT_EQ(renderSize(2100ULL * 1024 * 1024 * 1024, true), " 2.1 TiB");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* rewriteStrings
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(rewriteStrings, emptyString)
|
||||
{
|
||||
StringMap rewrites;
|
||||
rewrites["this"] = "that";
|
||||
|
||||
ASSERT_EQ(rewriteStrings("", rewrites), "");
|
||||
}
|
||||
|
||||
TEST(rewriteStrings, emptyRewrites)
|
||||
{
|
||||
StringMap rewrites;
|
||||
|
||||
ASSERT_EQ(rewriteStrings("this and that", rewrites), "this and that");
|
||||
}
|
||||
|
||||
TEST(rewriteStrings, successfulRewrite)
|
||||
{
|
||||
StringMap rewrites;
|
||||
rewrites["this"] = "that";
|
||||
|
||||
ASSERT_EQ(rewriteStrings("this and that", rewrites), "that and that");
|
||||
}
|
||||
|
||||
TEST(rewriteStrings, doesntOccur)
|
||||
{
|
||||
StringMap rewrites;
|
||||
rewrites["foo"] = "bar";
|
||||
|
||||
ASSERT_EQ(rewriteStrings("this and that", rewrites), "this and that");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* replaceStrings
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(replaceStrings, emptyString)
|
||||
{
|
||||
ASSERT_EQ(replaceStrings("", "this", "that"), "");
|
||||
ASSERT_EQ(replaceStrings("this and that", "", ""), "this and that");
|
||||
}
|
||||
|
||||
TEST(replaceStrings, successfulReplace)
|
||||
{
|
||||
ASSERT_EQ(replaceStrings("this and that", "this", "that"), "that and that");
|
||||
}
|
||||
|
||||
TEST(replaceStrings, doesntOccur)
|
||||
{
|
||||
ASSERT_EQ(replaceStrings("this and that", "foo", "bar"), "this and that");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* trim
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(trim, emptyString)
|
||||
{
|
||||
ASSERT_EQ(trim(""), "");
|
||||
}
|
||||
|
||||
TEST(trim, removesWhitespace)
|
||||
{
|
||||
ASSERT_EQ(trim("foo"), "foo");
|
||||
ASSERT_EQ(trim(" foo "), "foo");
|
||||
ASSERT_EQ(trim(" foo bar baz"), "foo bar baz");
|
||||
ASSERT_EQ(trim(" \t foo bar baz\n"), "foo bar baz");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* chomp
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(chomp, emptyString)
|
||||
{
|
||||
ASSERT_EQ(chomp(""), "");
|
||||
}
|
||||
|
||||
TEST(chomp, removesWhitespace)
|
||||
{
|
||||
ASSERT_EQ(chomp("foo"), "foo");
|
||||
ASSERT_EQ(chomp("foo "), "foo");
|
||||
ASSERT_EQ(chomp(" foo "), " foo");
|
||||
ASSERT_EQ(chomp(" foo bar baz "), " foo bar baz");
|
||||
ASSERT_EQ(chomp("\t foo bar baz\n"), "\t foo bar baz");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* quoteStrings
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(quoteStrings, empty)
|
||||
{
|
||||
Strings s = {};
|
||||
Strings expected = {};
|
||||
|
||||
ASSERT_EQ(quoteStrings(s), expected);
|
||||
}
|
||||
|
||||
TEST(quoteStrings, emptyStrings)
|
||||
{
|
||||
Strings s = {"", "", ""};
|
||||
Strings expected = {"''", "''", "''"};
|
||||
ASSERT_EQ(quoteStrings(s), expected);
|
||||
}
|
||||
|
||||
TEST(quoteStrings, trivialQuote)
|
||||
{
|
||||
Strings s = {"foo", "bar", "baz"};
|
||||
Strings expected = {"'foo'", "'bar'", "'baz'"};
|
||||
|
||||
ASSERT_EQ(quoteStrings(s), expected);
|
||||
}
|
||||
|
||||
TEST(quoteStrings, quotedStrings)
|
||||
{
|
||||
Strings s = {"'foo'", "'bar'", "'baz'"};
|
||||
Strings expected = {"''foo''", "''bar''", "''baz''"};
|
||||
|
||||
ASSERT_EQ(quoteStrings(s), expected);
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* get
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(get, emptyContainer)
|
||||
{
|
||||
StringMap s = {};
|
||||
auto expected = nullptr;
|
||||
|
||||
ASSERT_EQ(get(s, "one"), expected);
|
||||
}
|
||||
|
||||
TEST(get, getFromContainer)
|
||||
{
|
||||
StringMap s;
|
||||
s["one"] = "yi";
|
||||
s["two"] = "er";
|
||||
auto expected = "yi";
|
||||
|
||||
ASSERT_EQ(*get(s, "one"), expected);
|
||||
}
|
||||
|
||||
TEST(getOr, emptyContainer)
|
||||
{
|
||||
StringMap s = {};
|
||||
auto expected = "yi";
|
||||
|
||||
ASSERT_EQ(getOr(s, "one", "yi"), expected);
|
||||
}
|
||||
|
||||
TEST(getOr, getFromContainer)
|
||||
{
|
||||
StringMap s;
|
||||
s["one"] = "yi";
|
||||
s["two"] = "er";
|
||||
auto expected = "yi";
|
||||
|
||||
ASSERT_EQ(getOr(s, "one", "nope"), expected);
|
||||
}
|
||||
|
||||
} // namespace nix
|
105
src/libutil-tests/xml-writer.cc
Normal file
105
src/libutil-tests/xml-writer.cc
Normal file
|
@ -0,0 +1,105 @@
|
|||
#include "xml-writer.hh"
|
||||
#include <gtest/gtest.h>
|
||||
#include <sstream>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* XMLWriter
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(XMLWriter, emptyObject) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(false, out);
|
||||
}
|
||||
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n");
|
||||
}
|
||||
|
||||
TEST(XMLWriter, objectWithEmptyElement) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(false, out);
|
||||
t.openElement("foobar");
|
||||
}
|
||||
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar></foobar>");
|
||||
}
|
||||
|
||||
TEST(XMLWriter, objectWithElementWithAttrs) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(false, out);
|
||||
XMLAttrs attrs = {
|
||||
{ "foo", "bar" }
|
||||
};
|
||||
t.openElement("foobar", attrs);
|
||||
}
|
||||
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar foo=\"bar\"></foobar>");
|
||||
}
|
||||
|
||||
TEST(XMLWriter, objectWithElementWithEmptyAttrs) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(false, out);
|
||||
XMLAttrs attrs = {};
|
||||
t.openElement("foobar", attrs);
|
||||
}
|
||||
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar></foobar>");
|
||||
}
|
||||
|
||||
TEST(XMLWriter, objectWithElementWithAttrsEscaping) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(false, out);
|
||||
XMLAttrs attrs = {
|
||||
{ "<key>", "<value>" }
|
||||
};
|
||||
t.openElement("foobar", attrs);
|
||||
}
|
||||
|
||||
// XXX: While "<value>" is escaped, "<key>" isn't which I think is a bug.
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar <key>=\"<value>\"></foobar>");
|
||||
}
|
||||
|
||||
TEST(XMLWriter, objectWithElementWithAttrsIndented) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(true, out);
|
||||
XMLAttrs attrs = {
|
||||
{ "foo", "bar" }
|
||||
};
|
||||
t.openElement("foobar", attrs);
|
||||
}
|
||||
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar foo=\"bar\">\n</foobar>\n");
|
||||
}
|
||||
|
||||
TEST(XMLWriter, writeEmptyElement) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(false, out);
|
||||
t.writeEmptyElement("foobar");
|
||||
}
|
||||
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar />");
|
||||
}
|
||||
|
||||
TEST(XMLWriter, writeEmptyElementWithAttributes) {
|
||||
std::stringstream out;
|
||||
{
|
||||
XMLWriter t(false, out);
|
||||
XMLAttrs attrs = {
|
||||
{ "foo", "bar" }
|
||||
};
|
||||
t.writeEmptyElement("foobar", attrs);
|
||||
|
||||
}
|
||||
|
||||
ASSERT_EQ(out.str(), "<?xml version='1.0' encoding='utf-8'?>\n<foobar foo=\"bar\" />");
|
||||
}
|
||||
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue