mirror of
https://github.com/NixOS/nix
synced 2025-06-30 15:48:00 +02:00
Merge remote-tracking branch 'origin/master' into flakes
This commit is contained in:
commit
c7866733d7
18 changed files with 132 additions and 189 deletions
|
@ -910,12 +910,7 @@ CachedDownloadResult Downloader::downloadCached(
|
|||
printInfo("unpacking '%s'...", url);
|
||||
Path tmpDir = createTempDir();
|
||||
AutoDelete autoDelete(tmpDir, true);
|
||||
#if 0
|
||||
unpackTarfile(store->toRealPath(store->printStorePath(*storePath)), tmpDir, std::string(baseNameOf(url)));
|
||||
#else
|
||||
// FIXME: this requires GNU tar for decompression.
|
||||
runProgram("tar", true, {"xf", store->toRealPath(store->printStorePath(*storePath)), "-C", tmpDir});
|
||||
#endif
|
||||
unpackTarfile(store->toRealPath(store->printStorePath(*storePath)), tmpDir);
|
||||
auto members = readDirectory(tmpDir);
|
||||
if (members.size() != 1)
|
||||
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
||||
|
|
|
@ -6,6 +6,6 @@ libutil_DIR := $(d)
|
|||
|
||||
libutil_SOURCES := $(wildcard $(d)/*.cc)
|
||||
|
||||
libutil_LDFLAGS = $(LIBLZMA_LIBS) -lbz2 -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(BOOST_LDFLAGS) -lboost_context
|
||||
libutil_LDFLAGS = $(LIBLZMA_LIBS) -lbz2 -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context
|
||||
|
||||
libutil_LIBS = libnixrust
|
||||
|
|
|
@ -19,15 +19,4 @@ std::ostream & operator << (std::ostream & str, const String & s)
|
|||
return str;
|
||||
}
|
||||
|
||||
size_t Source::sourceWrapper(void * _this, rust::Slice<uint8_t> data)
|
||||
{
|
||||
try {
|
||||
// FIXME: how to propagate exceptions?
|
||||
auto n = ((nix::Source *) _this)->read((unsigned char *) data.ptr, data.size);
|
||||
return n;
|
||||
} catch (...) {
|
||||
abort();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -131,18 +131,6 @@ struct String : Vec<char, ffi_String_drop>
|
|||
|
||||
std::ostream & operator << (std::ostream & str, const String & s);
|
||||
|
||||
struct Source
|
||||
{
|
||||
size_t (*fun)(void * source_this, rust::Slice<uint8_t> data);
|
||||
nix::Source * _this;
|
||||
|
||||
Source(nix::Source & _this)
|
||||
: fun(sourceWrapper), _this(&_this)
|
||||
{}
|
||||
|
||||
static size_t sourceWrapper(void * _this, rust::Slice<uint8_t> data);
|
||||
};
|
||||
|
||||
/* C++ representation of Rust's Result<T, CppException>. */
|
||||
template<typename T>
|
||||
struct Result
|
||||
|
|
|
@ -1,38 +1,125 @@
|
|||
#include "rust-ffi.hh"
|
||||
#include "compression.hh"
|
||||
#include <archive.h>
|
||||
#include <archive_entry.h>
|
||||
|
||||
extern "C" {
|
||||
rust::Result<std::tuple<>> *
|
||||
unpack_tarfile(rust::Source source, rust::StringSlice dest_dir, rust::Result<std::tuple<>> & out);
|
||||
}
|
||||
#include "serialise.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct TarArchive {
|
||||
struct archive * archive;
|
||||
Source * source;
|
||||
std::vector<unsigned char> buffer;
|
||||
|
||||
void check(int err, const char * reason = "failed to extract archive: %s")
|
||||
{
|
||||
if (err == ARCHIVE_EOF)
|
||||
throw EndOfFile("reached end of archive");
|
||||
else if (err != ARCHIVE_OK)
|
||||
throw Error(reason, archive_error_string(this->archive));
|
||||
}
|
||||
|
||||
TarArchive(Source & source) : buffer(4096)
|
||||
{
|
||||
this->archive = archive_read_new();
|
||||
this->source = &source;
|
||||
|
||||
archive_read_support_filter_all(archive);
|
||||
archive_read_support_format_all(archive);
|
||||
check(archive_read_open(archive,
|
||||
(void *)this,
|
||||
TarArchive::callback_open,
|
||||
TarArchive::callback_read,
|
||||
TarArchive::callback_close),
|
||||
"failed to open archive: %s");
|
||||
}
|
||||
|
||||
TarArchive(const Path & path)
|
||||
{
|
||||
this->archive = archive_read_new();
|
||||
|
||||
archive_read_support_filter_all(archive);
|
||||
archive_read_support_format_all(archive);
|
||||
check(archive_read_open_filename(archive, path.c_str(), 16384), "failed to open archive: %s");
|
||||
}
|
||||
|
||||
TarArchive(const TarArchive &) = delete;
|
||||
|
||||
void close()
|
||||
{
|
||||
check(archive_read_close(archive), "failed to close archive: %s");
|
||||
}
|
||||
|
||||
~TarArchive()
|
||||
{
|
||||
if (this->archive) archive_read_free(this->archive);
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
static int callback_open(struct archive *, void * self) {
|
||||
return ARCHIVE_OK;
|
||||
}
|
||||
|
||||
static ssize_t callback_read(struct archive * archive, void * _self, const void * * buffer)
|
||||
{
|
||||
auto self = (TarArchive *)_self;
|
||||
*buffer = self->buffer.data();
|
||||
|
||||
try {
|
||||
return self->source->read(self->buffer.data(), 4096);
|
||||
} catch (EndOfFile &) {
|
||||
return 0;
|
||||
} catch (std::exception & err) {
|
||||
archive_set_error(archive, EIO, "source threw exception: %s", err.what());
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
static int callback_close(struct archive *, void * self) {
|
||||
return ARCHIVE_OK;
|
||||
}
|
||||
};
|
||||
|
||||
static void extract_archive(TarArchive & archive, const Path & destDir)
|
||||
{
|
||||
int flags = ARCHIVE_EXTRACT_FFLAGS
|
||||
| ARCHIVE_EXTRACT_PERM
|
||||
| ARCHIVE_EXTRACT_TIME
|
||||
| ARCHIVE_EXTRACT_SECURE_SYMLINKS
|
||||
| ARCHIVE_EXTRACT_SECURE_NODOTDOT;
|
||||
|
||||
for (;;) {
|
||||
struct archive_entry * entry;
|
||||
int r = archive_read_next_header(archive.archive, &entry);
|
||||
if (r == ARCHIVE_EOF) break;
|
||||
else if (r == ARCHIVE_WARN)
|
||||
warn(archive_error_string(archive.archive));
|
||||
else
|
||||
archive.check(r);
|
||||
|
||||
archive_entry_set_pathname(entry,
|
||||
(destDir + "/" + archive_entry_pathname(entry)).c_str());
|
||||
|
||||
archive.check(archive_read_extract(archive.archive, entry, flags));
|
||||
}
|
||||
|
||||
archive.close();
|
||||
}
|
||||
|
||||
void unpackTarfile(Source & source, const Path & destDir)
|
||||
{
|
||||
rust::Source source2(source);
|
||||
rust::Result<std::tuple<>> res;
|
||||
unpack_tarfile(source2, destDir, res);
|
||||
res.unwrap();
|
||||
auto archive = TarArchive(source);
|
||||
|
||||
createDirs(destDir);
|
||||
extract_archive(archive, destDir);
|
||||
}
|
||||
|
||||
void unpackTarfile(const Path & tarFile, const Path & destDir,
|
||||
std::optional<std::string> baseName)
|
||||
void unpackTarfile(const Path & tarFile, const Path & destDir)
|
||||
{
|
||||
if (!baseName) baseName = std::string(baseNameOf(tarFile));
|
||||
auto archive = TarArchive(tarFile);
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
// FIXME: look at first few bytes to determine compression type.
|
||||
auto decompressor =
|
||||
hasSuffix(*baseName, ".bz2") ? makeDecompressionSink("bzip2", sink) :
|
||||
hasSuffix(*baseName, ".gz") ? makeDecompressionSink("gzip", sink) :
|
||||
hasSuffix(*baseName, ".xz") ? makeDecompressionSink("xz", sink) :
|
||||
makeDecompressionSink("none", sink);
|
||||
readFile(tarFile, *decompressor);
|
||||
decompressor->finish();
|
||||
});
|
||||
|
||||
unpackTarfile(*source, destDir);
|
||||
createDirs(destDir);
|
||||
extract_archive(archive, destDir);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -4,7 +4,6 @@ namespace nix {
|
|||
|
||||
void unpackTarfile(Source & source, const Path & destDir);
|
||||
|
||||
void unpackTarfile(const Path & tarFile, const Path & destDir,
|
||||
std::optional<std::string> baseName = {});
|
||||
void unpackTarfile(const Path & tarFile, const Path & destDir);
|
||||
|
||||
}
|
||||
|
|
|
@ -190,10 +190,7 @@ static int _main(int argc, char * * argv)
|
|||
printInfo("unpacking...");
|
||||
Path unpacked = (Path) tmpDir + "/unpacked";
|
||||
createDirs(unpacked);
|
||||
if (hasSuffix(baseNameOf(uri), ".zip"))
|
||||
runProgram("unzip", true, {"-qq", tmpFile, "-d", unpacked});
|
||||
else
|
||||
unpackTarfile(tmpFile, unpacked, std::string(baseNameOf(uri)));
|
||||
unpackTarfile(tmpFile, unpacked);
|
||||
|
||||
/* If the archive unpacks to a single file/directory, then use
|
||||
that as the top-level. */
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
#include "command.hh"
|
||||
#include "store-api.hh"
|
||||
#include "references.hh"
|
||||
#include "common-args.hh"
|
||||
#include "json.hh"
|
||||
|
||||
using namespace nix;
|
||||
|
||||
struct CmdMakeContentAddressable : StorePathsCommand
|
||||
struct CmdMakeContentAddressable : StorePathsCommand, MixJSON
|
||||
{
|
||||
CmdMakeContentAddressable()
|
||||
{
|
||||
|
@ -37,6 +39,9 @@ struct CmdMakeContentAddressable : StorePathsCommand
|
|||
|
||||
std::map<StorePath, StorePath> remappings;
|
||||
|
||||
auto jsonRoot = json ? std::make_unique<JSONObject>(std::cout) : nullptr;
|
||||
auto jsonRewrites = json ? std::make_unique<JSONObject>(jsonRoot->object("rewrites")) : nullptr;
|
||||
|
||||
for (auto & path : paths) {
|
||||
auto pathS = store->printStorePath(path);
|
||||
auto oldInfo = store->queryPathInfo(path);
|
||||
|
@ -76,7 +81,8 @@ struct CmdMakeContentAddressable : StorePathsCommand
|
|||
info.narSize = sink.s->size();
|
||||
info.ca = makeFixedOutputCA(true, info.narHash);
|
||||
|
||||
printError("rewrote '%s' to '%s'", pathS, store->printStorePath(info.path));
|
||||
if (!json)
|
||||
printError("rewrote '%s' to '%s'", pathS, store->printStorePath(info.path));
|
||||
|
||||
auto source = sinkToSource([&](Sink & nextSink) {
|
||||
RewritingSink rsink2(oldHashPart, storePathToHash(store->printStorePath(info.path)), nextSink);
|
||||
|
@ -86,6 +92,9 @@ struct CmdMakeContentAddressable : StorePathsCommand
|
|||
|
||||
store->addToStore(info, *source);
|
||||
|
||||
if (json)
|
||||
jsonRewrites->attr(store->printStorePath(path), store->printStorePath(info.path));
|
||||
|
||||
remappings.insert_or_assign(std::move(path), std::move(info.path));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue