mirror of
https://github.com/NixOS/nix
synced 2025-07-09 12:03:55 +02:00
Merge remote-tracking branch 'origin/master' into detsys-main
This commit is contained in:
commit
042c2ae3ac
222 changed files with 3295 additions and 1254 deletions
|
@ -4,12 +4,14 @@
|
|||
#include <gtest/gtest.h>
|
||||
#include <gmock/gmock.h>
|
||||
|
||||
#include "fetch-settings.hh"
|
||||
#include "value.hh"
|
||||
#include "nixexpr.hh"
|
||||
#include "nixexpr.hh"
|
||||
#include "eval.hh"
|
||||
#include "eval-gc.hh"
|
||||
#include "eval-inline.hh"
|
||||
#include "eval-settings.hh"
|
||||
#include "store-api.hh"
|
||||
|
||||
#include "tests/libstore.hh"
|
||||
|
||||
|
@ -24,7 +26,7 @@ namespace nix {
|
|||
protected:
|
||||
LibExprTest()
|
||||
: LibStoreTest()
|
||||
, state({}, store, evalSettings, nullptr)
|
||||
, state({}, store, fetchSettings, evalSettings, nullptr)
|
||||
{
|
||||
evalSettings.nixPath = {};
|
||||
}
|
||||
|
@ -43,6 +45,7 @@ namespace nix {
|
|||
}
|
||||
|
||||
bool readOnlyMode = true;
|
||||
fetchers::Settings fetchSettings{};
|
||||
EvalSettings evalSettings{readOnlyMode};
|
||||
EvalState state;
|
||||
};
|
||||
|
|
112
tests/unit/libfetchers/git-utils.cc
Normal file
112
tests/unit/libfetchers/git-utils.cc
Normal file
|
@ -0,0 +1,112 @@
|
|||
#include "git-utils.hh"
|
||||
#include "file-system.hh"
|
||||
#include "gmock/gmock.h"
|
||||
#include <git2/global.h>
|
||||
#include <git2/repository.h>
|
||||
#include <git2/types.h>
|
||||
#include <gtest/gtest.h>
|
||||
#include "fs-sink.hh"
|
||||
#include "serialise.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
class GitUtilsTest : public ::testing::Test
|
||||
{
|
||||
// We use a single repository for all tests.
|
||||
Path tmpDir;
|
||||
std::unique_ptr<AutoDelete> delTmpDir;
|
||||
|
||||
public:
|
||||
void SetUp() override
|
||||
{
|
||||
tmpDir = createTempDir();
|
||||
delTmpDir = std::make_unique<AutoDelete>(tmpDir, true);
|
||||
|
||||
// Create the repo with libgit2
|
||||
git_libgit2_init();
|
||||
git_repository * repo = nullptr;
|
||||
auto r = git_repository_init(&repo, tmpDir.c_str(), 0);
|
||||
ASSERT_EQ(r, 0);
|
||||
git_repository_free(repo);
|
||||
}
|
||||
|
||||
void TearDown() override
|
||||
{
|
||||
// Destroy the AutoDelete, triggering removal
|
||||
// not AutoDelete::reset(), which would cancel the deletion.
|
||||
delTmpDir.reset();
|
||||
}
|
||||
|
||||
ref<GitRepo> openRepo()
|
||||
{
|
||||
return GitRepo::openRepo(tmpDir, true, false);
|
||||
}
|
||||
};
|
||||
|
||||
void writeString(CreateRegularFileSink & fileSink, std::string contents, bool executable)
|
||||
{
|
||||
if (executable)
|
||||
fileSink.isExecutable();
|
||||
fileSink.preallocateContents(contents.size());
|
||||
fileSink(contents);
|
||||
}
|
||||
|
||||
TEST_F(GitUtilsTest, sink_basic)
|
||||
{
|
||||
auto repo = openRepo();
|
||||
auto sink = repo->getFileSystemObjectSink();
|
||||
|
||||
// TODO/Question: It seems a little odd that we use the tarball-like convention of requiring a top-level directory
|
||||
// here
|
||||
// The sync method does not document this behavior, should probably renamed because it's not very
|
||||
// general, and I can't imagine that "non-conventional" archives or any other source to be handled by
|
||||
// this sink.
|
||||
|
||||
sink->createDirectory(CanonPath("foo-1.1"));
|
||||
|
||||
sink->createRegularFile(CanonPath("foo-1.1/hello"), [](CreateRegularFileSink & fileSink) {
|
||||
writeString(fileSink, "hello world", false);
|
||||
});
|
||||
sink->createRegularFile(CanonPath("foo-1.1/bye"), [](CreateRegularFileSink & fileSink) {
|
||||
writeString(fileSink, "thanks for all the fish", false);
|
||||
});
|
||||
sink->createSymlink(CanonPath("foo-1.1/bye-link"), "bye");
|
||||
sink->createDirectory(CanonPath("foo-1.1/empty"));
|
||||
sink->createDirectory(CanonPath("foo-1.1/links"));
|
||||
sink->createHardlink(CanonPath("foo-1.1/links/foo"), CanonPath("foo-1.1/hello"));
|
||||
|
||||
// sink->createHardlink("foo-1.1/links/foo-2", CanonPath("foo-1.1/hello"));
|
||||
|
||||
auto result = sink->sync();
|
||||
auto accessor = repo->getAccessor(result, false);
|
||||
auto entries = accessor->readDirectory(CanonPath::root);
|
||||
ASSERT_EQ(entries.size(), 5);
|
||||
ASSERT_EQ(accessor->readFile(CanonPath("hello")), "hello world");
|
||||
ASSERT_EQ(accessor->readFile(CanonPath("bye")), "thanks for all the fish");
|
||||
ASSERT_EQ(accessor->readLink(CanonPath("bye-link")), "bye");
|
||||
ASSERT_EQ(accessor->readDirectory(CanonPath("empty")).size(), 0);
|
||||
ASSERT_EQ(accessor->readFile(CanonPath("links/foo")), "hello world");
|
||||
};
|
||||
|
||||
TEST_F(GitUtilsTest, sink_hardlink)
|
||||
{
|
||||
auto repo = openRepo();
|
||||
auto sink = repo->getFileSystemObjectSink();
|
||||
|
||||
sink->createDirectory(CanonPath("foo-1.1"));
|
||||
|
||||
sink->createRegularFile(CanonPath("foo-1.1/hello"), [](CreateRegularFileSink & fileSink) {
|
||||
writeString(fileSink, "hello world", false);
|
||||
});
|
||||
|
||||
try {
|
||||
sink->createHardlink(CanonPath("foo-1.1/link"), CanonPath("hello"));
|
||||
FAIL() << "Expected an exception";
|
||||
} catch (const nix::Error & e) {
|
||||
ASSERT_THAT(e.msg(), testing::HasSubstr("invalid hard link target"));
|
||||
ASSERT_THAT(e.msg(), testing::HasSubstr("/hello"));
|
||||
ASSERT_THAT(e.msg(), testing::HasSubstr("foo-1.1/link"));
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace nix
|
|
@ -29,7 +29,7 @@ libfetchers-tests_LIBS = \
|
|||
libstore-test-support libutil-test-support \
|
||||
libfetchers libstore libutil
|
||||
|
||||
libfetchers-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS)
|
||||
libfetchers-tests_LDFLAGS := -lrapidcheck $(GTEST_LIBS) $(LIBGIT2_LIBS)
|
||||
|
||||
ifdef HOST_WINDOWS
|
||||
# Increase the default reserved stack size to 65 MB so Nix doesn't run out of space
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "fetch-settings.hh"
|
||||
#include "flake/flakeref.hh"
|
||||
|
||||
namespace nix {
|
||||
|
@ -11,8 +12,9 @@ namespace nix {
|
|||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(to_string, doesntReencodeUrl) {
|
||||
fetchers::Settings fetchSettings;
|
||||
auto s = "http://localhost:8181/test/+3d.tar.gz";
|
||||
auto flakeref = parseFlakeRef(s);
|
||||
auto flakeref = parseFlakeRef(fetchSettings, s);
|
||||
auto parsed = flakeref.to_string();
|
||||
auto expected = "http://localhost:8181/test/%2B3d.tar.gz";
|
||||
|
||||
|
|
26
tests/unit/libstore/legacy-ssh-store.cc
Normal file
26
tests/unit/libstore/legacy-ssh-store.cc
Normal file
|
@ -0,0 +1,26 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "legacy-ssh-store.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
TEST(LegacySSHStore, constructConfig)
|
||||
{
|
||||
LegacySSHStoreConfig config{
|
||||
"ssh",
|
||||
"localhost",
|
||||
StoreConfig::Params{
|
||||
{
|
||||
"remote-program",
|
||||
// TODO #11106, no more split on space
|
||||
"foo bar",
|
||||
},
|
||||
}};
|
||||
EXPECT_EQ(
|
||||
config.remoteProgram.get(),
|
||||
(Strings{
|
||||
"foo",
|
||||
"bar",
|
||||
}));
|
||||
}
|
||||
}
|
|
@ -58,6 +58,7 @@ sources = files(
|
|||
'derivation.cc',
|
||||
'derived-path.cc',
|
||||
'downstream-placeholder.cc',
|
||||
'legacy-ssh-store.cc',
|
||||
'machines.cc',
|
||||
'nar-info-disk-cache.cc',
|
||||
'nar-info.cc',
|
||||
|
@ -67,6 +68,7 @@ sources = files(
|
|||
'path.cc',
|
||||
'references.cc',
|
||||
'serve-protocol.cc',
|
||||
'ssh-store.cc',
|
||||
'store-reference.cc',
|
||||
'worker-protocol.cc',
|
||||
)
|
||||
|
|
|
@ -26,9 +26,9 @@ static UnkeyedValidPathInfo makeEmpty()
|
|||
};
|
||||
}
|
||||
|
||||
static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo)
|
||||
static ValidPathInfo makeFullKeyed(const Store & store, bool includeImpureInfo)
|
||||
{
|
||||
UnkeyedValidPathInfo info = ValidPathInfo {
|
||||
ValidPathInfo info = ValidPathInfo {
|
||||
store,
|
||||
"foo",
|
||||
FixedOutputInfo {
|
||||
|
@ -57,6 +57,9 @@ static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo
|
|||
}
|
||||
return info;
|
||||
}
|
||||
static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo) {
|
||||
return makeFullKeyed(store, includeImpureInfo);
|
||||
}
|
||||
|
||||
#define JSON_TEST(STEM, OBJ, PURE) \
|
||||
TEST_F(PathInfoTest, PathInfo_ ## STEM ## _from_json) { \
|
||||
|
@ -86,4 +89,13 @@ JSON_TEST(empty_impure, makeEmpty(), true)
|
|||
JSON_TEST(pure, makeFull(*store, false), false)
|
||||
JSON_TEST(impure, makeFull(*store, true), true)
|
||||
|
||||
TEST_F(PathInfoTest, PathInfo_full_shortRefs) {
|
||||
ValidPathInfo it = makeFullKeyed(*store, true);
|
||||
// it.references = unkeyed.references;
|
||||
auto refs = it.shortRefs();
|
||||
ASSERT_EQ(refs.size(), 2);
|
||||
ASSERT_EQ(*refs.begin(), "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar");
|
||||
ASSERT_EQ(*++refs.begin(), "n5wkd9frr45pa74if5gpz9j7mifg27fh-foo");
|
||||
}
|
||||
|
||||
} // namespace nix
|
||||
|
|
26
tests/unit/libstore/ssh-store.cc
Normal file
26
tests/unit/libstore/ssh-store.cc
Normal file
|
@ -0,0 +1,26 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "ssh-store.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
TEST(SSHStore, constructConfig)
|
||||
{
|
||||
SSHStoreConfig config{
|
||||
"ssh",
|
||||
"localhost",
|
||||
StoreConfig::Params{
|
||||
{
|
||||
"remote-program",
|
||||
// TODO #11106, no more split on space
|
||||
"foo bar",
|
||||
},
|
||||
}};
|
||||
EXPECT_EQ(
|
||||
config.remoteProgram.get(),
|
||||
(Strings{
|
||||
"foo",
|
||||
"bar",
|
||||
}));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
#include <iostream>
|
||||
#include "tracing-file-system-object-sink.hh"
|
||||
|
||||
namespace nix::test {
|
||||
|
||||
void TracingFileSystemObjectSink::createDirectory(const CanonPath & path)
|
||||
{
|
||||
std::cerr << "createDirectory(" << path << ")\n";
|
||||
sink.createDirectory(path);
|
||||
}
|
||||
|
||||
void TracingFileSystemObjectSink::createRegularFile(
|
||||
const CanonPath & path, std::function<void(CreateRegularFileSink &)> fn)
|
||||
{
|
||||
std::cerr << "createRegularFile(" << path << ")\n";
|
||||
sink.createRegularFile(path, [&](CreateRegularFileSink & crf) {
|
||||
// We could wrap this and trace about the chunks of data and such
|
||||
fn(crf);
|
||||
});
|
||||
}
|
||||
|
||||
void TracingFileSystemObjectSink::createSymlink(const CanonPath & path, const std::string & target)
|
||||
{
|
||||
std::cerr << "createSymlink(" << path << ", target: " << target << ")\n";
|
||||
sink.createSymlink(path, target);
|
||||
}
|
||||
|
||||
void TracingExtendedFileSystemObjectSink::createHardlink(const CanonPath & path, const CanonPath & target)
|
||||
{
|
||||
std::cerr << "createHardlink(" << path << ", target: " << target << ")\n";
|
||||
sink.createHardlink(path, target);
|
||||
}
|
||||
|
||||
} // namespace nix::test
|
|
@ -0,0 +1,41 @@
|
|||
#pragma once
|
||||
#include "fs-sink.hh"
|
||||
|
||||
namespace nix::test {
|
||||
|
||||
/**
|
||||
* A `FileSystemObjectSink` that traces calls, writing to stderr.
|
||||
*/
|
||||
class TracingFileSystemObjectSink : public virtual FileSystemObjectSink
|
||||
{
|
||||
FileSystemObjectSink & sink;
|
||||
public:
|
||||
TracingFileSystemObjectSink(FileSystemObjectSink & sink)
|
||||
: sink(sink)
|
||||
{
|
||||
}
|
||||
|
||||
void createDirectory(const CanonPath & path) override;
|
||||
|
||||
void createRegularFile(const CanonPath & path, std::function<void(CreateRegularFileSink &)> fn) override;
|
||||
|
||||
void createSymlink(const CanonPath & path, const std::string & target) override;
|
||||
};
|
||||
|
||||
/**
|
||||
* A `ExtendedFileSystemObjectSink` that traces calls, writing to stderr.
|
||||
*/
|
||||
class TracingExtendedFileSystemObjectSink : public TracingFileSystemObjectSink, public ExtendedFileSystemObjectSink
|
||||
{
|
||||
ExtendedFileSystemObjectSink & sink;
|
||||
public:
|
||||
TracingExtendedFileSystemObjectSink(ExtendedFileSystemObjectSink & sink)
|
||||
: TracingFileSystemObjectSink(sink)
|
||||
, sink(sink)
|
||||
{
|
||||
}
|
||||
|
||||
void createHardlink(const CanonPath & path, const CanonPath & target) override;
|
||||
};
|
||||
|
||||
}
|
|
@ -229,7 +229,7 @@ TEST_F(GitTest, both_roundrip) {
|
|||
|
||||
mkSinkHook(CanonPath::root, root.hash, BlobMode::Regular);
|
||||
|
||||
ASSERT_EQ(*files, *files2);
|
||||
ASSERT_EQ(files->root, files2->root);
|
||||
}
|
||||
|
||||
TEST(GitLsRemote, parseSymrefLineWithReference) {
|
||||
|
|
122
tests/unit/libutil/position.cc
Normal file
122
tests/unit/libutil/position.cc
Normal file
|
@ -0,0 +1,122 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "position.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
inline Pos::Origin makeStdin(std::string s)
|
||||
{
|
||||
return Pos::Stdin{make_ref<std::string>(s)};
|
||||
}
|
||||
|
||||
TEST(Position, getSnippetUpTo_0)
|
||||
{
|
||||
Pos::Origin o = makeStdin("");
|
||||
Pos p(1, 1, o);
|
||||
ASSERT_EQ(p.getSnippetUpTo(p), "");
|
||||
}
|
||||
TEST(Position, getSnippetUpTo_1)
|
||||
{
|
||||
Pos::Origin o = makeStdin("x");
|
||||
{
|
||||
// NOTE: line and column are actually 1-based indexes
|
||||
Pos start(0, 0, o);
|
||||
Pos end(99, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(start), "");
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "x");
|
||||
ASSERT_EQ(end.getSnippetUpTo(end), "");
|
||||
ASSERT_EQ(end.getSnippetUpTo(start), std::nullopt);
|
||||
}
|
||||
{
|
||||
// NOTE: line and column are actually 1-based indexes
|
||||
Pos start(0, 99, o);
|
||||
Pos end(99, 0, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(start), "");
|
||||
|
||||
// "x" might be preferable, but we only care about not crashing for invalid inputs
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "");
|
||||
|
||||
ASSERT_EQ(end.getSnippetUpTo(end), "");
|
||||
ASSERT_EQ(end.getSnippetUpTo(start), std::nullopt);
|
||||
}
|
||||
{
|
||||
Pos start(1, 1, o);
|
||||
Pos end(1, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(start), "");
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "x");
|
||||
ASSERT_EQ(end.getSnippetUpTo(end), "");
|
||||
ASSERT_EQ(end.getSnippetUpTo(start), "");
|
||||
}
|
||||
{
|
||||
Pos start(1, 1, o);
|
||||
Pos end(99, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(start), "");
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "x");
|
||||
ASSERT_EQ(end.getSnippetUpTo(end), "");
|
||||
ASSERT_EQ(end.getSnippetUpTo(start), std::nullopt);
|
||||
}
|
||||
}
|
||||
TEST(Position, getSnippetUpTo_2)
|
||||
{
|
||||
Pos::Origin o = makeStdin("asdf\njkl\nqwer");
|
||||
{
|
||||
Pos start(1, 1, o);
|
||||
Pos end(1, 2, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(start), "");
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "a");
|
||||
ASSERT_EQ(end.getSnippetUpTo(end), "");
|
||||
|
||||
// nullopt? I feel like changing the column handling would just make it more fragile
|
||||
ASSERT_EQ(end.getSnippetUpTo(start), "");
|
||||
}
|
||||
{
|
||||
Pos start(1, 2, o);
|
||||
Pos end(1, 3, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "s");
|
||||
}
|
||||
{
|
||||
Pos start(1, 2, o);
|
||||
Pos end(2, 2, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "sdf\nj");
|
||||
}
|
||||
{
|
||||
Pos start(1, 2, o);
|
||||
Pos end(3, 2, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "sdf\njkl\nq");
|
||||
}
|
||||
{
|
||||
Pos start(1, 2, o);
|
||||
Pos end(2, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "sdf\njkl");
|
||||
}
|
||||
{
|
||||
Pos start(1, 4, o);
|
||||
Pos end(2, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "f\njkl");
|
||||
}
|
||||
{
|
||||
Pos start(1, 5, o);
|
||||
Pos end(2, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "\njkl");
|
||||
}
|
||||
{
|
||||
Pos start(1, 6, o); // invalid: starting column past last "line character", ie at the newline
|
||||
Pos end(2, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "\njkl"); // jkl might be acceptable for this invalid start position
|
||||
}
|
||||
{
|
||||
Pos start(1, 1, o);
|
||||
Pos end(2, 0, o); // invalid
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "asdf\n");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(Position, example_1)
|
||||
{
|
||||
Pos::Origin o = makeStdin(" unambiguous = \n /** Very close */\n x: x;\n# ok\n");
|
||||
Pos start(2, 5, o);
|
||||
Pos end(2, 22, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "/** Very close */");
|
||||
}
|
||||
|
||||
} // namespace nix
|
|
@ -15,7 +15,7 @@ struct RewriteParams {
|
|||
strRewrites.insert(from + "->" + to);
|
||||
return os <<
|
||||
"OriginalString: " << bar.originalString << std::endl <<
|
||||
"Rewrites: " << concatStringsSep(",", strRewrites) << std::endl <<
|
||||
"Rewrites: " << dropEmptyInitThenConcatStringsSep(",", strRewrites) << std::endl <<
|
||||
"Expected result: " << bar.finalString;
|
||||
}
|
||||
};
|
||||
|
|
83
tests/unit/libutil/strings.cc
Normal file
83
tests/unit/libutil/strings.cc
Normal file
|
@ -0,0 +1,83 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "strings.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
using Strings = std::vector<std::string>;
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* concatStringsSep
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(concatStringsSep, empty)
|
||||
{
|
||||
Strings strings;
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), "");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, justOne)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), "this");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, emptyString)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), "");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, emptyStrings)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("");
|
||||
strings.push_back("");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), ",");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, threeEmptyStrings)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("");
|
||||
strings.push_back("");
|
||||
strings.push_back("");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), ",,");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, buildCommaSeparatedString)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
strings.push_back("is");
|
||||
strings.push_back("great");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), "this,is,great");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, buildStringWithEmptySeparator)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
strings.push_back("is");
|
||||
strings.push_back("great");
|
||||
|
||||
ASSERT_EQ(concatStringsSep("", strings), "thisisgreat");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, buildSingleString)
|
||||
{
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), "this");
|
||||
}
|
||||
|
||||
} // namespace nix
|
|
@ -227,32 +227,32 @@ namespace nix {
|
|||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* concatStringsSep
|
||||
* dropEmptyInitThenConcatStringsSep
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST(concatStringsSep, buildCommaSeparatedString) {
|
||||
TEST(dropEmptyInitThenConcatStringsSep, buildCommaSeparatedString) {
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
strings.push_back("is");
|
||||
strings.push_back("great");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), "this,is,great");
|
||||
ASSERT_EQ(dropEmptyInitThenConcatStringsSep(",", strings), "this,is,great");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, buildStringWithEmptySeparator) {
|
||||
TEST(dropEmptyInitThenConcatStringsSep, buildStringWithEmptySeparator) {
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
strings.push_back("is");
|
||||
strings.push_back("great");
|
||||
|
||||
ASSERT_EQ(concatStringsSep("", strings), "thisisgreat");
|
||||
ASSERT_EQ(dropEmptyInitThenConcatStringsSep("", strings), "thisisgreat");
|
||||
}
|
||||
|
||||
TEST(concatStringsSep, buildSingleString) {
|
||||
TEST(dropEmptyInitThenConcatStringsSep, buildSingleString) {
|
||||
Strings strings;
|
||||
strings.push_back("this");
|
||||
|
||||
ASSERT_EQ(concatStringsSep(",", strings), "this");
|
||||
ASSERT_EQ(dropEmptyInitThenConcatStringsSep(",", strings), "this");
|
||||
}
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue