mirror of
https://github.com/NixOS/nix
synced 2025-07-21 04:09:44 +02:00
Merge pull request #10038 from edolstra/tarball-git-cache
Use the Git cache for tarball flakes
This commit is contained in:
commit
2080d89b87
13 changed files with 208 additions and 104 deletions
|
@ -9,6 +9,9 @@
|
|||
#include "types.hh"
|
||||
#include "split.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
#include "fs-input-accessor.hh"
|
||||
#include "store-api.hh"
|
||||
#include "git-utils.hh"
|
||||
|
||||
namespace nix::fetchers {
|
||||
|
||||
|
@ -57,10 +60,8 @@ DownloadFileResult downloadFile(
|
|||
throw;
|
||||
}
|
||||
|
||||
// FIXME: write to temporary file.
|
||||
Attrs infoAttrs({
|
||||
{"etag", res.etag},
|
||||
{"url", res.effectiveUri},
|
||||
});
|
||||
|
||||
if (res.immutableUrl)
|
||||
|
@ -91,96 +92,102 @@ DownloadFileResult downloadFile(
|
|||
storePath = std::move(info.path);
|
||||
}
|
||||
|
||||
getCache()->add(
|
||||
*store,
|
||||
inAttrs,
|
||||
infoAttrs,
|
||||
*storePath,
|
||||
locked);
|
||||
|
||||
if (url != res.effectiveUri)
|
||||
/* Cache metadata for all URLs in the redirect chain. */
|
||||
for (auto & url : res.urls) {
|
||||
inAttrs.insert_or_assign("url", url);
|
||||
infoAttrs.insert_or_assign("url", *res.urls.rbegin());
|
||||
getCache()->add(
|
||||
*store,
|
||||
{
|
||||
{"type", "file"},
|
||||
{"url", res.effectiveUri},
|
||||
{"name", name},
|
||||
},
|
||||
inAttrs,
|
||||
infoAttrs,
|
||||
*storePath,
|
||||
locked);
|
||||
}
|
||||
|
||||
return {
|
||||
.storePath = std::move(*storePath),
|
||||
.etag = res.etag,
|
||||
.effectiveUrl = res.effectiveUri,
|
||||
.effectiveUrl = *res.urls.rbegin(),
|
||||
.immutableUrl = res.immutableUrl,
|
||||
};
|
||||
}
|
||||
|
||||
DownloadTarballResult downloadTarball(
|
||||
ref<Store> store,
|
||||
const std::string & url,
|
||||
const std::string & name,
|
||||
bool locked,
|
||||
const Headers & headers)
|
||||
{
|
||||
Attrs inAttrs({
|
||||
{"type", "tarball"},
|
||||
{"_what", "tarballCache"},
|
||||
{"url", url},
|
||||
{"name", name},
|
||||
});
|
||||
|
||||
auto cached = getCache()->lookupExpired(*store, inAttrs);
|
||||
auto cached = getCache()->lookupExpired(inAttrs);
|
||||
|
||||
auto attrsToResult = [&](const Attrs & infoAttrs)
|
||||
{
|
||||
auto treeHash = getRevAttr(infoAttrs, "treeHash");
|
||||
return DownloadTarballResult {
|
||||
.treeHash = treeHash,
|
||||
.lastModified = (time_t) getIntAttr(infoAttrs, "lastModified"),
|
||||
.immutableUrl = maybeGetStrAttr(infoAttrs, "immutableUrl"),
|
||||
.accessor = getTarballCache()->getAccessor(treeHash, false),
|
||||
};
|
||||
};
|
||||
|
||||
if (cached && !getTarballCache()->hasObject(getRevAttr(cached->infoAttrs, "treeHash")))
|
||||
cached.reset();
|
||||
|
||||
if (cached && !cached->expired)
|
||||
return {
|
||||
.storePath = std::move(cached->storePath),
|
||||
.lastModified = (time_t) getIntAttr(cached->infoAttrs, "lastModified"),
|
||||
.immutableUrl = maybeGetStrAttr(cached->infoAttrs, "immutableUrl"),
|
||||
};
|
||||
/* We previously downloaded this tarball and it's younger than
|
||||
`tarballTtl`, so no need to check the server. */
|
||||
return attrsToResult(cached->infoAttrs);
|
||||
|
||||
auto res = downloadFile(store, url, name, locked, headers);
|
||||
auto _res = std::make_shared<Sync<FileTransferResult>>();
|
||||
|
||||
std::optional<StorePath> unpackedStorePath;
|
||||
time_t lastModified;
|
||||
|
||||
if (cached && res.etag != "" && getStrAttr(cached->infoAttrs, "etag") == res.etag) {
|
||||
unpackedStorePath = std::move(cached->storePath);
|
||||
lastModified = getIntAttr(cached->infoAttrs, "lastModified");
|
||||
} else {
|
||||
Path tmpDir = createTempDir();
|
||||
AutoDelete autoDelete(tmpDir, true);
|
||||
unpackTarfile(store->toRealPath(res.storePath), tmpDir);
|
||||
auto members = readDirectory(tmpDir);
|
||||
if (members.size() != 1)
|
||||
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
|
||||
auto topDir = tmpDir + "/" + members.begin()->name;
|
||||
lastModified = lstat(topDir).st_mtime;
|
||||
PosixSourceAccessor accessor;
|
||||
unpackedStorePath = store->addToStore(name, accessor, CanonPath { topDir }, FileIngestionMethod::Recursive, HashAlgorithm::SHA256, {}, defaultPathFilter, NoRepair);
|
||||
}
|
||||
|
||||
Attrs infoAttrs({
|
||||
{"lastModified", uint64_t(lastModified)},
|
||||
{"etag", res.etag},
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
FileTransferRequest req(url);
|
||||
req.expectedETag = cached ? getStrAttr(cached->infoAttrs, "etag") : "";
|
||||
getFileTransfer()->download(std::move(req), sink,
|
||||
[_res](FileTransferResult r)
|
||||
{
|
||||
*_res->lock() = r;
|
||||
});
|
||||
});
|
||||
|
||||
if (res.immutableUrl)
|
||||
infoAttrs.emplace("immutableUrl", *res.immutableUrl);
|
||||
// TODO: fall back to cached value if download fails.
|
||||
|
||||
getCache()->add(
|
||||
*store,
|
||||
inAttrs,
|
||||
infoAttrs,
|
||||
*unpackedStorePath,
|
||||
locked);
|
||||
/* Note: if the download is cached, `importTarball()` will receive
|
||||
no data, which causes it to import an empty tarball. */
|
||||
TarArchive archive { *source };
|
||||
auto parseSink = getTarballCache()->getFileSystemObjectSink();
|
||||
auto lastModified = unpackTarfileToSink(archive, *parseSink);
|
||||
|
||||
return {
|
||||
.storePath = std::move(*unpackedStorePath),
|
||||
.lastModified = lastModified,
|
||||
.immutableUrl = res.immutableUrl,
|
||||
};
|
||||
auto res(_res->lock());
|
||||
|
||||
Attrs infoAttrs;
|
||||
|
||||
if (res->cached) {
|
||||
/* The server says that the previously downloaded version is
|
||||
still current. */
|
||||
infoAttrs = cached->infoAttrs;
|
||||
} else {
|
||||
infoAttrs.insert_or_assign("etag", res->etag);
|
||||
infoAttrs.insert_or_assign("treeHash", parseSink->sync().gitRev());
|
||||
infoAttrs.insert_or_assign("lastModified", uint64_t(lastModified));
|
||||
if (res->immutableUrl)
|
||||
infoAttrs.insert_or_assign("immutableUrl", *res->immutableUrl);
|
||||
}
|
||||
|
||||
/* Insert a cache entry for every URL in the redirect chain. */
|
||||
for (auto & url : res->urls) {
|
||||
inAttrs.insert_or_assign("url", url);
|
||||
getCache()->upsert(inAttrs, infoAttrs);
|
||||
}
|
||||
|
||||
// FIXME: add a cache entry for immutableUrl? That could allow
|
||||
// cache poisoning.
|
||||
|
||||
return attrsToResult(infoAttrs);
|
||||
}
|
||||
|
||||
// An input scheme corresponding to a curl-downloadable resource.
|
||||
|
@ -198,6 +205,8 @@ struct CurlInputScheme : InputScheme
|
|||
|
||||
virtual bool isValidURL(const ParsedURL & url, bool requireTree) const = 0;
|
||||
|
||||
static const std::set<std::string> specialParams;
|
||||
|
||||
std::optional<Input> inputFromURL(const ParsedURL & _url, bool requireTree) const override
|
||||
{
|
||||
if (!isValidURL(_url, requireTree))
|
||||
|
@ -220,8 +229,17 @@ struct CurlInputScheme : InputScheme
|
|||
if (auto n = string2Int<uint64_t>(*i))
|
||||
input.attrs.insert_or_assign("revCount", *n);
|
||||
|
||||
url.query.erase("rev");
|
||||
url.query.erase("revCount");
|
||||
if (auto i = get(url.query, "lastModified"))
|
||||
if (auto n = string2Int<uint64_t>(*i))
|
||||
input.attrs.insert_or_assign("lastModified", *n);
|
||||
|
||||
/* The URL query parameters serve two roles: specifying fetch
|
||||
settings for Nix itself, and arbitrary data as part of the
|
||||
HTTP request. Now that we've processed the Nix-specific
|
||||
attributes above, remove them so we don't also send them as
|
||||
part of the HTTP request. */
|
||||
for (auto & param : allowedAttrs())
|
||||
url.query.erase(param);
|
||||
|
||||
input.attrs.insert_or_assign("type", std::string { schemeName() });
|
||||
input.attrs.insert_or_assign("url", url.to_string());
|
||||
|
@ -280,10 +298,24 @@ struct FileInputScheme : CurlInputScheme
|
|||
: (!requireTree && !hasTarballExtension(url.path)));
|
||||
}
|
||||
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & input) override
|
||||
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & _input) const override
|
||||
{
|
||||
auto input(_input);
|
||||
|
||||
/* Unlike TarballInputScheme, this stores downloaded files in
|
||||
the Nix store directly, since there is little deduplication
|
||||
benefit in using the Git cache for single big files like
|
||||
tarballs. */
|
||||
auto file = downloadFile(store, getStrAttr(input.attrs, "url"), input.getName(), false);
|
||||
return {std::move(file.storePath), input};
|
||||
|
||||
auto narHash = store->queryPathInfo(file.storePath)->narHash;
|
||||
input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true));
|
||||
|
||||
auto accessor = makeStorePathAccessor(store, file.storePath);
|
||||
|
||||
accessor->setPathDisplay("«" + input.to_string() + "»");
|
||||
|
||||
return {accessor, input};
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -301,11 +333,13 @@ struct TarballInputScheme : CurlInputScheme
|
|||
: (requireTree || hasTarballExtension(url.path)));
|
||||
}
|
||||
|
||||
std::pair<StorePath, Input> fetch(ref<Store> store, const Input & _input) override
|
||||
std::pair<ref<InputAccessor>, Input> getAccessor(ref<Store> store, const Input & _input) const override
|
||||
{
|
||||
Input input(_input);
|
||||
auto url = getStrAttr(input.attrs, "url");
|
||||
auto result = downloadTarball(store, url, input.getName(), false);
|
||||
auto input(_input);
|
||||
|
||||
auto result = downloadTarball(getStrAttr(input.attrs, "url"), {});
|
||||
|
||||
result.accessor->setPathDisplay("«" + input.to_string() + "»");
|
||||
|
||||
if (result.immutableUrl) {
|
||||
auto immutableInput = Input::fromURL(*result.immutableUrl);
|
||||
|
@ -319,7 +353,10 @@ struct TarballInputScheme : CurlInputScheme
|
|||
if (result.lastModified && !input.attrs.contains("lastModified"))
|
||||
input.attrs.insert_or_assign("lastModified", uint64_t(result.lastModified));
|
||||
|
||||
return {result.storePath, std::move(input)};
|
||||
input.attrs.insert_or_assign("narHash",
|
||||
getTarballCache()->treeHashToNarHash(result.treeHash).to_string(HashFormat::SRI, true));
|
||||
|
||||
return {result.accessor, input};
|
||||
}
|
||||
};
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue