mirror of
https://github.com/NixOS/nix
synced 2025-06-27 21:01:16 +02:00
Merge pull request #2748 from edolstra/rust
Make nix/unpack-channel.nix a builtin builder
This commit is contained in:
commit
f102d793f1
30 changed files with 514 additions and 86 deletions
|
@ -4,6 +4,7 @@
|
|||
#include "store-api.hh"
|
||||
#include "pathlocks.hh"
|
||||
#include "hash.hh"
|
||||
#include "tarfile.hh"
|
||||
|
||||
#include <sys/time.h>
|
||||
|
||||
|
@ -164,14 +165,16 @@ GitInfo exportGit(ref<Store> store, const std::string & uri,
|
|||
if (e.errNo != ENOENT) throw;
|
||||
}
|
||||
|
||||
// FIXME: should pipe this, or find some better way to extract a
|
||||
// revision.
|
||||
auto tar = runProgram("git", true, { "-C", cacheDir, "archive", gitInfo.rev });
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
RunOptions gitOptions("git", { "-C", cacheDir, "archive", gitInfo.rev });
|
||||
gitOptions.standardOut = &sink;
|
||||
runProgram2(gitOptions);
|
||||
});
|
||||
|
||||
Path tmpDir = createTempDir();
|
||||
AutoDelete delTmpDir(tmpDir, true);
|
||||
|
||||
runProgram("tar", true, { "x", "-C", tmpDir }, tar);
|
||||
unpackTarfile(*source, tmpDir);
|
||||
|
||||
gitInfo.storePath = store->addToStore(name, tmpDir);
|
||||
|
||||
|
|
|
@ -3128,6 +3128,8 @@ void DerivationGoal::runChild()
|
|||
builtinFetchurl(drv2, netrcData);
|
||||
else if (drv->builder == "builtin:buildenv")
|
||||
builtinBuildenv(drv2);
|
||||
else if (drv->builder == "builtin:unpack-channel")
|
||||
builtinUnpackChannel(drv2);
|
||||
else
|
||||
throw Error(format("unsupported builtin function '%1%'") % string(drv->builder, 8));
|
||||
_exit(0);
|
||||
|
|
|
@ -7,5 +7,6 @@ namespace nix {
|
|||
// TODO: make pluggable.
|
||||
void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData);
|
||||
void builtinBuildenv(const BasicDerivation & drv);
|
||||
void builtinUnpackChannel(const BasicDerivation & drv);
|
||||
|
||||
}
|
||||
|
|
29
src/libstore/builtins/unpack-channel.cc
Normal file
29
src/libstore/builtins/unpack-channel.cc
Normal file
|
@ -0,0 +1,29 @@
|
|||
#include "builtins.hh"
|
||||
#include "tarfile.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
void builtinUnpackChannel(const BasicDerivation & drv)
|
||||
{
|
||||
auto getAttr = [&](const string & name) {
|
||||
auto i = drv.env.find(name);
|
||||
if (i == drv.env.end()) throw Error("attribute '%s' missing", name);
|
||||
return i->second;
|
||||
};
|
||||
|
||||
Path out = getAttr("out");
|
||||
auto channelName = getAttr("channelName");
|
||||
auto src = getAttr("src");
|
||||
|
||||
createDirs(out);
|
||||
|
||||
unpackTarfile(src, out);
|
||||
|
||||
auto entries = readDirectory(out);
|
||||
if (entries.size() != 1)
|
||||
throw Error("channel tarball '%s' contains more than one file", src);
|
||||
if (rename((out + "/" + entries[0].name).c_str(), (out + "/" + channelName).c_str()) == -1)
|
||||
throw SysError("renaming channel directory");
|
||||
}
|
||||
|
||||
}
|
|
@ -8,6 +8,7 @@
|
|||
#include "compression.hh"
|
||||
#include "pathlocks.hh"
|
||||
#include "finally.hh"
|
||||
#include "tarfile.hh"
|
||||
|
||||
#ifdef ENABLE_S3
|
||||
#include <aws/core/client/ClientConfiguration.h>
|
||||
|
@ -903,12 +904,15 @@ CachedDownloadResult Downloader::downloadCached(
|
|||
unpackedStorePath = "";
|
||||
}
|
||||
if (unpackedStorePath.empty()) {
|
||||
printInfo(format("unpacking '%1%'...") % url);
|
||||
printInfo("unpacking '%s'...", url);
|
||||
Path tmpDir = createTempDir();
|
||||
AutoDelete autoDelete(tmpDir, true);
|
||||
// FIXME: this requires GNU tar for decompression.
|
||||
runProgram("tar", true, {"xf", store->toRealPath(storePath), "-C", tmpDir, "--strip-components", "1"});
|
||||
unpackedStorePath = store->addToStore(name, tmpDir, true, htSHA256, defaultPathFilter, NoRepair);
|
||||
unpackTarfile(store->toRealPath(storePath), tmpDir, baseNameOf(url));
|
||||
auto members = readDirectory(tmpDir);
|
||||
if (members.size() != 1)
|
||||
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
||||
auto topDir = tmpDir + "/" + members.begin()->name;
|
||||
unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair);
|
||||
}
|
||||
replaceSymlink(unpackedStorePath, unpackedLink);
|
||||
storePath = unpackedStorePath;
|
||||
|
|
|
@ -7,3 +7,5 @@ libutil_DIR := $(d)
|
|||
libutil_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libutil_LDFLAGS = $(LIBLZMA_LIBS) -lbz2 -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(BOOST_LDFLAGS) -lboost_context
|
||||
|
||||
libutil_LIBS = libnixrust
|
||||
|
|
12
src/libutil/rust-ffi.cc
Normal file
12
src/libutil/rust-ffi.cc
Normal file
|
@ -0,0 +1,12 @@
|
|||
#include "logging.hh"
|
||||
#include "rust-ffi.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
extern "C" std::exception_ptr * make_error(rust::StringSlice s)
|
||||
{
|
||||
// FIXME: leak
|
||||
return new std::exception_ptr(std::make_exception_ptr(Error(std::string(s.ptr, s.size))));
|
||||
}
|
||||
|
||||
}
|
84
src/libutil/rust-ffi.hh
Normal file
84
src/libutil/rust-ffi.hh
Normal file
|
@ -0,0 +1,84 @@
|
|||
#include "serialise.hh"
|
||||
|
||||
namespace rust {
|
||||
|
||||
// Depending on the internal representation of Rust slices is slightly
|
||||
// evil...
|
||||
template<typename T>
|
||||
struct Slice
|
||||
{
|
||||
T * ptr;
|
||||
size_t size;
|
||||
|
||||
Slice(T * ptr, size_t size) : ptr(ptr), size(size)
|
||||
{
|
||||
assert(ptr);
|
||||
}
|
||||
};
|
||||
|
||||
struct StringSlice : Slice<char>
|
||||
{
|
||||
StringSlice(const std::string & s): Slice((char *) s.data(), s.size()) {}
|
||||
};
|
||||
|
||||
struct Source
|
||||
{
|
||||
size_t (*fun)(void * source_this, rust::Slice<uint8_t> data);
|
||||
nix::Source * _this;
|
||||
|
||||
Source(nix::Source & _this)
|
||||
: fun(sourceWrapper), _this(&_this)
|
||||
{}
|
||||
|
||||
// FIXME: how to propagate exceptions?
|
||||
static size_t sourceWrapper(void * _this, rust::Slice<uint8_t> data)
|
||||
{
|
||||
auto n = ((nix::Source *) _this)->read(data.ptr, data.size);
|
||||
return n;
|
||||
}
|
||||
};
|
||||
|
||||
/* C++ representation of Rust's Result<T, CppException>. */
|
||||
template<typename T>
|
||||
struct Result
|
||||
{
|
||||
unsigned int tag;
|
||||
|
||||
union {
|
||||
T data;
|
||||
std::exception_ptr * exc;
|
||||
};
|
||||
|
||||
/* Rethrow the wrapped exception or return the wrapped value. */
|
||||
T unwrap()
|
||||
{
|
||||
if (tag == 0)
|
||||
return data;
|
||||
else if (tag == 1)
|
||||
std::rethrow_exception(*exc);
|
||||
else
|
||||
abort();
|
||||
}
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
struct CBox
|
||||
{
|
||||
T * ptr;
|
||||
|
||||
T * operator ->()
|
||||
{
|
||||
return ptr;
|
||||
}
|
||||
|
||||
CBox(T * ptr) : ptr(ptr) { }
|
||||
CBox(const CBox &) = delete;
|
||||
CBox(CBox &&) = delete;
|
||||
|
||||
~CBox()
|
||||
{
|
||||
free(ptr);
|
||||
}
|
||||
};
|
||||
|
||||
}
|
|
@ -77,7 +77,6 @@ struct BufferedSource : Source
|
|||
|
||||
size_t read(unsigned char * data, size_t len) override;
|
||||
|
||||
|
||||
bool hasData();
|
||||
|
||||
protected:
|
||||
|
|
36
src/libutil/tarfile.cc
Normal file
36
src/libutil/tarfile.cc
Normal file
|
@ -0,0 +1,36 @@
|
|||
#include "rust-ffi.hh"
|
||||
#include "compression.hh"
|
||||
|
||||
extern "C" {
|
||||
rust::Result<std::tuple<>> *
|
||||
unpack_tarfile(rust::Source source, rust::StringSlice dest_dir);
|
||||
}
|
||||
|
||||
namespace nix {
|
||||
|
||||
void unpackTarfile(Source & source, const Path & destDir)
|
||||
{
|
||||
rust::Source source2(source);
|
||||
rust::CBox(unpack_tarfile(source2, destDir))->unwrap();
|
||||
}
|
||||
|
||||
void unpackTarfile(const Path & tarFile, const Path & destDir,
|
||||
std::optional<std::string> baseName)
|
||||
{
|
||||
if (!baseName) baseName = baseNameOf(tarFile);
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
// FIXME: look at first few bytes to determine compression type.
|
||||
auto decompressor =
|
||||
// FIXME: add .gz support
|
||||
hasSuffix(*baseName, ".bz2") ? makeDecompressionSink("bzip2", sink) :
|
||||
hasSuffix(*baseName, ".xz") ? makeDecompressionSink("xz", sink) :
|
||||
makeDecompressionSink("none", sink);
|
||||
readFile(tarFile, *decompressor);
|
||||
decompressor->finish();
|
||||
});
|
||||
|
||||
unpackTarfile(*source, destDir);
|
||||
}
|
||||
|
||||
}
|
10
src/libutil/tarfile.hh
Normal file
10
src/libutil/tarfile.hh
Normal file
|
@ -0,0 +1,10 @@
|
|||
#include "serialise.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
void unpackTarfile(Source & source, const Path & destDir);
|
||||
|
||||
void unpackTarfile(const Path & tarFile, const Path & destDir,
|
||||
std::optional<std::string> baseName = {});
|
||||
|
||||
}
|
|
@ -9,6 +9,7 @@
|
|||
#include "legacy.hh"
|
||||
#include "finally.hh"
|
||||
#include "progress-bar.hh"
|
||||
#include "tarfile.hh"
|
||||
|
||||
#include <iostream>
|
||||
|
||||
|
@ -192,8 +193,7 @@ static int _main(int argc, char * * argv)
|
|||
if (hasSuffix(baseNameOf(uri), ".zip"))
|
||||
runProgram("unzip", true, {"-qq", tmpFile, "-d", unpacked});
|
||||
else
|
||||
// FIXME: this requires GNU tar for decompression.
|
||||
runProgram("tar", true, {"xf", tmpFile, "-C", unpacked});
|
||||
unpackTarfile(tmpFile, unpacked, baseNameOf(uri));
|
||||
|
||||
/* If the archive unpacks to a single file/directory, then use
|
||||
that as the top-level. */
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue