mirror of
https://github.com/NixOS/nix
synced 2025-07-07 18:31:49 +02:00
Merge remote-tracking branch 'origin/master' into lfs
This commit is contained in:
commit
1cb9a354fb
61 changed files with 921 additions and 478 deletions
|
@ -50,7 +50,7 @@ Args::Flag hashAlgo(std::string && longName, HashAlgorithm * ha)
|
|||
{
|
||||
return Args::Flag {
|
||||
.longName = std::move(longName),
|
||||
.description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`).",
|
||||
.description = "Hash algorithm (`blake3`, `md5`, `sha1`, `sha256`, or `sha512`).",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[ha](std::string s) {
|
||||
*ha = parseHashAlgo(s);
|
||||
|
@ -63,7 +63,7 @@ Args::Flag hashAlgoOpt(std::string && longName, std::optional<HashAlgorithm> * o
|
|||
{
|
||||
return Args::Flag {
|
||||
.longName = std::move(longName),
|
||||
.description = "Hash algorithm (`md5`, `sha1`, `sha256`, or `sha512`). Can be omitted for SRI hashes.",
|
||||
.description = "Hash algorithm (`blake3`, `md5`, `sha1`, `sha256`, or `sha512`). Can be omitted for SRI hashes.",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[oha](std::string s) {
|
||||
*oha = std::optional<HashAlgorithm>{parseHashAlgo(s)};
|
||||
|
@ -120,7 +120,7 @@ Args::Flag contentAddressMethod(ContentAddressMethod * method)
|
|||
|
||||
- [`text`](@docroot@/store/store-object/content-address.md#method-text):
|
||||
Like `flat`, but used for
|
||||
[derivations](@docroot@/glossary.md#store-derivation) serialized in store object and
|
||||
[derivations](@docroot@/glossary.md#gloss-store-derivation) serialized in store object and
|
||||
[`builtins.toFile`](@docroot@/language/builtins.html#builtins-toFile).
|
||||
For advanced use-cases only;
|
||||
for regular usage prefer `nar` and `flat`.
|
||||
|
|
|
@ -1152,7 +1152,7 @@ namespace nix {
|
|||
|
||||
ASSERT_TRACE1("hashString \"foo\" \"content\"",
|
||||
UsageError,
|
||||
HintFmt("unknown hash algorithm '%s', expect 'md5', 'sha1', 'sha256', or 'sha512'", "foo"));
|
||||
HintFmt("unknown hash algorithm '%s', expect 'blake3', 'md5', 'sha1', 'sha256', or 'sha512'", "foo"));
|
||||
|
||||
ASSERT_TRACE2("hashString \"sha256\" {}",
|
||||
TypeError,
|
||||
|
|
|
@ -172,7 +172,7 @@ TEST_F(nix_api_expr_test, nix_expr_realise_context_bad_build)
|
|||
|
||||
TEST_F(nix_api_expr_test, nix_expr_realise_context)
|
||||
{
|
||||
// TODO (ca-derivations): add a content-addressed derivation output, which produces a placeholder
|
||||
// TODO (ca-derivations): add a content-addressing derivation output, which produces a placeholder
|
||||
auto expr = R"(
|
||||
''
|
||||
a derivation output: ${
|
||||
|
|
|
@ -1595,9 +1595,13 @@ static RegisterPrimOp primop_placeholder({
|
|||
.name = "placeholder",
|
||||
.args = {"output"},
|
||||
.doc = R"(
|
||||
Return a placeholder string for the specified *output* that will be
|
||||
substituted by the corresponding output path at build time. Typical
|
||||
outputs would be `"out"`, `"bin"` or `"dev"`.
|
||||
Return at
|
||||
[output placeholder string](@docroot@/store/drv.md#output-placeholder)
|
||||
for the specified *output* that will be substituted by the corresponding
|
||||
[output path](@docroot@/glossary.md#gloss-output-path)
|
||||
at build time.
|
||||
|
||||
Typical outputs would be `"out"`, `"bin"` or `"dev"`.
|
||||
)",
|
||||
.fun = prim_placeholder,
|
||||
});
|
||||
|
@ -2135,12 +2139,15 @@ static RegisterPrimOp primop_outputOf({
|
|||
.name = "__outputOf",
|
||||
.args = {"derivation-reference", "output-name"},
|
||||
.doc = R"(
|
||||
Return the output path of a derivation, literally or using a placeholder if needed.
|
||||
Return the output path of a derivation, literally or using an
|
||||
[input placeholder string](@docroot@/store/drv.md#input-placeholder)
|
||||
if needed.
|
||||
|
||||
If the derivation has a statically-known output path (i.e. the derivation output is input-addressed, or fixed content-addresed), the output path will just be returned.
|
||||
But if the derivation is content-addressed or if the derivation is itself not-statically produced (i.e. is the output of another derivation), a placeholder will be returned instead.
|
||||
But if the derivation is content-addressed or if the derivation is itself not-statically produced (i.e. is the output of another derivation), an input placeholder will be returned instead.
|
||||
|
||||
*`derivation reference`* must be a string that may contain a regular store path to a derivation, or may be a placeholder reference. If the derivation is produced by a derivation, you must explicitly select `drv.outPath`.
|
||||
*`derivation reference`* must be a string that may contain a regular store path to a derivation, or may be an input placeholder reference.
|
||||
If the derivation is produced by a derivation, you must explicitly select `drv.outPath`.
|
||||
This primop can be chained arbitrarily deeply.
|
||||
For instance,
|
||||
|
||||
|
@ -2150,9 +2157,9 @@ static RegisterPrimOp primop_outputOf({
|
|||
"out"
|
||||
```
|
||||
|
||||
will return a placeholder for the output of the output of `myDrv`.
|
||||
will return a input placeholder for the output of the output of `myDrv`.
|
||||
|
||||
This primop corresponds to the `^` sigil for derivable paths, e.g. as part of installable syntax on the command line.
|
||||
This primop corresponds to the `^` sigil for [deriving paths](@docroot@/glossary.md#gloss-deriving-paths), e.g. as part of installable syntax on the command line.
|
||||
)",
|
||||
.fun = prim_outputOf,
|
||||
.experimentalFeature = Xp::DynamicDerivations,
|
||||
|
|
|
@ -69,7 +69,7 @@ std::optional<std::string> readHead(const Path & path)
|
|||
|
||||
std::string_view line = output;
|
||||
line = line.substr(0, line.find("\n"));
|
||||
if (const auto parseResult = git::parseLsRemoteLine(line)) {
|
||||
if (const auto parseResult = git::parseLsRemoteLine(line); parseResult && parseResult->reference == "HEAD") {
|
||||
switch (parseResult->kind) {
|
||||
case git::LsRemoteRefLine::Kind::Symbolic:
|
||||
debug("resolved HEAD ref '%s' for repo '%s'", parseResult->target, path);
|
||||
|
@ -467,8 +467,14 @@ struct GitInputScheme : InputScheme
|
|||
url);
|
||||
}
|
||||
repoInfo.location = std::filesystem::absolute(url.path);
|
||||
} else
|
||||
} else {
|
||||
if (url.scheme == "file")
|
||||
/* Query parameters are meaningless for file://, but
|
||||
Git interprets them as part of the file name. So get
|
||||
rid of them. */
|
||||
url.query.clear();
|
||||
repoInfo.location = url;
|
||||
}
|
||||
|
||||
// If this is a local directory and no ref or revision is
|
||||
// given, then allow the use of an unclean working tree.
|
||||
|
@ -613,16 +619,16 @@ struct GitInputScheme : InputScheme
|
|||
try {
|
||||
auto fetchRef =
|
||||
getAllRefsAttr(input)
|
||||
? "refs/*"
|
||||
? "refs/*:refs/*"
|
||||
: input.getRev()
|
||||
? input.getRev()->gitRev()
|
||||
: ref.compare(0, 5, "refs/") == 0
|
||||
? ref
|
||||
? fmt("%1%:%1%", ref)
|
||||
: ref == "HEAD"
|
||||
? ref
|
||||
: "refs/heads/" + ref;
|
||||
: fmt("%1%:%1%", "refs/heads/" + ref);
|
||||
|
||||
repo->fetch(repoUrl.to_string(), fmt("%s:%s", fetchRef, fetchRef), getShallowAttr(input));
|
||||
repo->fetch(repoUrl.to_string(), fetchRef, getShallowAttr(input));
|
||||
} catch (Error & e) {
|
||||
if (!std::filesystem::exists(localRefFile)) throw;
|
||||
logError(e.info());
|
||||
|
|
|
@ -36,14 +36,6 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
Goal::Co DerivationGoal::init() {
|
||||
if (useDerivation) {
|
||||
co_return getDerivation();
|
||||
} else {
|
||||
co_return haveDerivation();
|
||||
}
|
||||
}
|
||||
|
||||
DerivationGoal::DerivationGoal(const StorePath & drvPath,
|
||||
const OutputsSpec & wantedOutputs, Worker & worker, BuildMode buildMode)
|
||||
: Goal(worker, DerivedPath::Built { .drvPath = makeConstantStorePathRef(drvPath), .outputs = wantedOutputs })
|
||||
|
@ -141,50 +133,44 @@ void DerivationGoal::addWantedOutputs(const OutputsSpec & outputs)
|
|||
}
|
||||
|
||||
|
||||
Goal::Co DerivationGoal::getDerivation()
|
||||
{
|
||||
Goal::Co DerivationGoal::init() {
|
||||
trace("init");
|
||||
|
||||
/* The first thing to do is to make sure that the derivation
|
||||
exists. If it doesn't, it may be created through a
|
||||
substitute. */
|
||||
if (buildMode == bmNormal && worker.evalStore.isValidPath(drvPath)) {
|
||||
co_return loadDerivation();
|
||||
}
|
||||
if (useDerivation) {
|
||||
/* The first thing to do is to make sure that the derivation
|
||||
exists. If it doesn't, it may be created through a
|
||||
substitute. */
|
||||
|
||||
addWaitee(upcast_goal(worker.makePathSubstitutionGoal(drvPath)));
|
||||
|
||||
co_await Suspend{};
|
||||
co_return loadDerivation();
|
||||
}
|
||||
|
||||
|
||||
Goal::Co DerivationGoal::loadDerivation()
|
||||
{
|
||||
trace("loading derivation");
|
||||
|
||||
if (nrFailed != 0) {
|
||||
co_return done(BuildResult::MiscFailure, {}, Error("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath)));
|
||||
}
|
||||
|
||||
/* `drvPath' should already be a root, but let's be on the safe
|
||||
side: if the user forgot to make it a root, we wouldn't want
|
||||
things being garbage collected while we're busy. */
|
||||
worker.evalStore.addTempRoot(drvPath);
|
||||
|
||||
/* Get the derivation. It is probably in the eval store, but it might be inthe main store:
|
||||
|
||||
- Resolved derivation are resolved against main store realisations, and so must be stored there.
|
||||
|
||||
- Dynamic derivations are built, and so are found in the main store.
|
||||
*/
|
||||
for (auto * drvStore : { &worker.evalStore, &worker.store }) {
|
||||
if (drvStore->isValidPath(drvPath)) {
|
||||
drv = std::make_unique<Derivation>(drvStore->readDerivation(drvPath));
|
||||
break;
|
||||
if (buildMode != bmNormal || !worker.evalStore.isValidPath(drvPath)) {
|
||||
addWaitee(upcast_goal(worker.makePathSubstitutionGoal(drvPath)));
|
||||
co_await Suspend{};
|
||||
}
|
||||
|
||||
trace("loading derivation");
|
||||
|
||||
if (nrFailed != 0) {
|
||||
co_return done(BuildResult::MiscFailure, {}, Error("cannot build missing derivation '%s'", worker.store.printStorePath(drvPath)));
|
||||
}
|
||||
|
||||
/* `drvPath' should already be a root, but let's be on the safe
|
||||
side: if the user forgot to make it a root, we wouldn't want
|
||||
things being garbage collected while we're busy. */
|
||||
worker.evalStore.addTempRoot(drvPath);
|
||||
|
||||
/* Get the derivation. It is probably in the eval store, but it might be inthe main store:
|
||||
|
||||
- Resolved derivation are resolved against main store realisations, and so must be stored there.
|
||||
|
||||
- Dynamic derivations are built, and so are found in the main store.
|
||||
*/
|
||||
for (auto * drvStore : { &worker.evalStore, &worker.store }) {
|
||||
if (drvStore->isValidPath(drvPath)) {
|
||||
drv = std::make_unique<Derivation>(drvStore->readDerivation(drvPath));
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert(drv);
|
||||
}
|
||||
assert(drv);
|
||||
|
||||
co_return haveDerivation();
|
||||
}
|
||||
|
@ -235,12 +221,14 @@ Goal::Co DerivationGoal::haveDerivation()
|
|||
}
|
||||
});
|
||||
|
||||
/* Check what outputs paths are not already valid. */
|
||||
auto [allValid, validOutputs] = checkPathValidity();
|
||||
{
|
||||
/* Check what outputs paths are not already valid. */
|
||||
auto [allValid, validOutputs] = checkPathValidity();
|
||||
|
||||
/* If they are all valid, then we're done. */
|
||||
if (allValid && buildMode == bmNormal) {
|
||||
co_return done(BuildResult::AlreadyValid, std::move(validOutputs));
|
||||
/* If they are all valid, then we're done. */
|
||||
if (allValid && buildMode == bmNormal) {
|
||||
co_return done(BuildResult::AlreadyValid, std::move(validOutputs));
|
||||
}
|
||||
}
|
||||
|
||||
/* We are first going to try to create the invalid output paths
|
||||
|
@ -268,12 +256,7 @@ Goal::Co DerivationGoal::haveDerivation()
|
|||
}
|
||||
|
||||
if (!waitees.empty()) co_await Suspend{}; /* to prevent hang (no wake-up event) */
|
||||
co_return outputsSubstitutionTried();
|
||||
}
|
||||
|
||||
|
||||
Goal::Co DerivationGoal::outputsSubstitutionTried()
|
||||
{
|
||||
trace("all outputs substituted (maybe)");
|
||||
|
||||
assert(!drv->type().isImpure());
|
||||
|
@ -399,84 +382,7 @@ Goal::Co DerivationGoal::gaveUpOnSubstitution()
|
|||
}
|
||||
|
||||
if (!waitees.empty()) co_await Suspend{}; /* to prevent hang (no wake-up event) */
|
||||
co_return inputsRealised();
|
||||
}
|
||||
|
||||
|
||||
Goal::Co DerivationGoal::repairClosure()
|
||||
{
|
||||
assert(!drv->type().isImpure());
|
||||
|
||||
/* If we're repairing, we now know that our own outputs are valid.
|
||||
Now check whether the other paths in the outputs closure are
|
||||
good. If not, then start derivation goals for the derivations
|
||||
that produced those outputs. */
|
||||
|
||||
/* Get the output closure. */
|
||||
auto outputs = queryDerivationOutputMap();
|
||||
StorePathSet outputClosure;
|
||||
for (auto & i : outputs) {
|
||||
if (!wantedOutputs.contains(i.first)) continue;
|
||||
worker.store.computeFSClosure(i.second, outputClosure);
|
||||
}
|
||||
|
||||
/* Filter out our own outputs (which we have already checked). */
|
||||
for (auto & i : outputs)
|
||||
outputClosure.erase(i.second);
|
||||
|
||||
/* Get all dependencies of this derivation so that we know which
|
||||
derivation is responsible for which path in the output
|
||||
closure. */
|
||||
StorePathSet inputClosure;
|
||||
if (useDerivation) worker.store.computeFSClosure(drvPath, inputClosure);
|
||||
std::map<StorePath, StorePath> outputsToDrv;
|
||||
for (auto & i : inputClosure)
|
||||
if (i.isDerivation()) {
|
||||
auto depOutputs = worker.store.queryPartialDerivationOutputMap(i, &worker.evalStore);
|
||||
for (auto & j : depOutputs)
|
||||
if (j.second)
|
||||
outputsToDrv.insert_or_assign(*j.second, i);
|
||||
}
|
||||
|
||||
/* Check each path (slow!). */
|
||||
for (auto & i : outputClosure) {
|
||||
if (worker.pathContentsGood(i)) continue;
|
||||
printError(
|
||||
"found corrupted or missing path '%s' in the output closure of '%s'",
|
||||
worker.store.printStorePath(i), worker.store.printStorePath(drvPath));
|
||||
auto drvPath2 = outputsToDrv.find(i);
|
||||
if (drvPath2 == outputsToDrv.end())
|
||||
addWaitee(upcast_goal(worker.makePathSubstitutionGoal(i, Repair)));
|
||||
else
|
||||
addWaitee(worker.makeGoal(
|
||||
DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(drvPath2->second),
|
||||
.outputs = OutputsSpec::All { },
|
||||
},
|
||||
bmRepair));
|
||||
}
|
||||
|
||||
if (waitees.empty()) {
|
||||
co_return done(BuildResult::AlreadyValid, assertPathValidity());
|
||||
} else {
|
||||
co_await Suspend{};
|
||||
co_return closureRepaired();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Goal::Co DerivationGoal::closureRepaired()
|
||||
{
|
||||
trace("closure repaired");
|
||||
if (nrFailed > 0)
|
||||
throw Error("some paths in the output closure of derivation '%s' could not be repaired",
|
||||
worker.store.printStorePath(drvPath));
|
||||
co_return done(BuildResult::AlreadyValid, assertPathValidity());
|
||||
}
|
||||
|
||||
|
||||
Goal::Co DerivationGoal::inputsRealised()
|
||||
{
|
||||
trace("all inputs realised");
|
||||
|
||||
if (nrFailed != 0) {
|
||||
|
@ -766,6 +672,73 @@ Goal::Co DerivationGoal::tryLocalBuild() {
|
|||
}
|
||||
|
||||
|
||||
Goal::Co DerivationGoal::repairClosure()
|
||||
{
|
||||
assert(!drv->type().isImpure());
|
||||
|
||||
/* If we're repairing, we now know that our own outputs are valid.
|
||||
Now check whether the other paths in the outputs closure are
|
||||
good. If not, then start derivation goals for the derivations
|
||||
that produced those outputs. */
|
||||
|
||||
/* Get the output closure. */
|
||||
auto outputs = queryDerivationOutputMap();
|
||||
StorePathSet outputClosure;
|
||||
for (auto & i : outputs) {
|
||||
if (!wantedOutputs.contains(i.first)) continue;
|
||||
worker.store.computeFSClosure(i.second, outputClosure);
|
||||
}
|
||||
|
||||
/* Filter out our own outputs (which we have already checked). */
|
||||
for (auto & i : outputs)
|
||||
outputClosure.erase(i.second);
|
||||
|
||||
/* Get all dependencies of this derivation so that we know which
|
||||
derivation is responsible for which path in the output
|
||||
closure. */
|
||||
StorePathSet inputClosure;
|
||||
if (useDerivation) worker.store.computeFSClosure(drvPath, inputClosure);
|
||||
std::map<StorePath, StorePath> outputsToDrv;
|
||||
for (auto & i : inputClosure)
|
||||
if (i.isDerivation()) {
|
||||
auto depOutputs = worker.store.queryPartialDerivationOutputMap(i, &worker.evalStore);
|
||||
for (auto & j : depOutputs)
|
||||
if (j.second)
|
||||
outputsToDrv.insert_or_assign(*j.second, i);
|
||||
}
|
||||
|
||||
/* Check each path (slow!). */
|
||||
for (auto & i : outputClosure) {
|
||||
if (worker.pathContentsGood(i)) continue;
|
||||
printError(
|
||||
"found corrupted or missing path '%s' in the output closure of '%s'",
|
||||
worker.store.printStorePath(i), worker.store.printStorePath(drvPath));
|
||||
auto drvPath2 = outputsToDrv.find(i);
|
||||
if (drvPath2 == outputsToDrv.end())
|
||||
addWaitee(upcast_goal(worker.makePathSubstitutionGoal(i, Repair)));
|
||||
else
|
||||
addWaitee(worker.makeGoal(
|
||||
DerivedPath::Built {
|
||||
.drvPath = makeConstantStorePathRef(drvPath2->second),
|
||||
.outputs = OutputsSpec::All { },
|
||||
},
|
||||
bmRepair));
|
||||
}
|
||||
|
||||
if (waitees.empty()) {
|
||||
co_return done(BuildResult::AlreadyValid, assertPathValidity());
|
||||
} else {
|
||||
co_await Suspend{};
|
||||
|
||||
trace("closure repaired");
|
||||
if (nrFailed > 0)
|
||||
throw Error("some paths in the output closure of derivation '%s' could not be repaired",
|
||||
worker.store.printStorePath(drvPath));
|
||||
co_return done(BuildResult::AlreadyValid, assertPathValidity());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void chmod_(const Path & path, mode_t mode)
|
||||
{
|
||||
if (chmod(path.c_str(), mode) == -1)
|
||||
|
@ -1249,7 +1222,7 @@ SingleDrvOutputs DerivationGoal::registerOutputs()
|
|||
to do anything here.
|
||||
|
||||
We can only early return when the outputs are known a priori. For
|
||||
floating content-addressed derivations this isn't the case.
|
||||
floating content-addressing derivations this isn't the case.
|
||||
*/
|
||||
return assertPathValidity();
|
||||
}
|
||||
|
|
|
@ -80,7 +80,7 @@ struct DerivationGoal : public Goal
|
|||
/**
|
||||
* Mapping from input derivations + output names to actual store
|
||||
* paths. This is filled in by waiteeDone() as each dependency
|
||||
* finishes, before inputsRealised() is reached.
|
||||
* finishes, before `trace("all inputs realised")` is reached.
|
||||
*/
|
||||
std::map<std::pair<StorePath, std::string>, StorePath> inputDrvOutputs;
|
||||
|
||||
|
@ -233,13 +233,8 @@ struct DerivationGoal : public Goal
|
|||
* The states.
|
||||
*/
|
||||
Co init() override;
|
||||
Co getDerivation();
|
||||
Co loadDerivation();
|
||||
Co haveDerivation();
|
||||
Co outputsSubstitutionTried();
|
||||
Co gaveUpOnSubstitution();
|
||||
Co closureRepaired();
|
||||
Co inputsRealised();
|
||||
Co tryToBuild();
|
||||
virtual Co tryLocalBuild();
|
||||
Co buildDone();
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
-- Extension of the sql schema for content-addressed derivations.
|
||||
-- Extension of the sql schema for content-addressing derivations.
|
||||
-- Won't be loaded unless the experimental feature `ca-derivations`
|
||||
-- is enabled
|
||||
|
||||
|
|
|
@ -593,7 +593,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
|
||||
auto drvType = drv.type();
|
||||
|
||||
/* Content-addressed derivations are trustless because their output paths
|
||||
/* Content-addressing derivations are trustless because their output paths
|
||||
are verified by their content alone, so any derivation is free to
|
||||
try to produce such a path.
|
||||
|
||||
|
|
|
@ -300,7 +300,7 @@ static DerivationOutput parseDerivationOutput(
|
|||
} else {
|
||||
xpSettings.require(Xp::CaDerivations);
|
||||
if (pathS != "")
|
||||
throw FormatError("content-addressed derivation output should not specify output path");
|
||||
throw FormatError("content-addressing derivation output should not specify output path");
|
||||
return DerivationOutput::CAFloating {
|
||||
.method = std::move(method),
|
||||
.hashAlgo = std::move(hashAlgo),
|
||||
|
|
|
@ -187,7 +187,7 @@ struct DerivationType {
|
|||
};
|
||||
|
||||
/**
|
||||
* Content-addressed derivation types
|
||||
* Content-addressing derivation types
|
||||
*/
|
||||
struct ContentAddressed {
|
||||
/**
|
||||
|
|
|
@ -189,7 +189,7 @@ public:
|
|||
this, SYSTEM, "system",
|
||||
R"(
|
||||
The system type of the current Nix installation.
|
||||
Nix will only build a given [derivation](@docroot@/language/derivations.md) locally when its `system` attribute equals any of the values specified here or in [`extra-platforms`](#conf-extra-platforms).
|
||||
Nix will only build a given [store derivation](@docroot@/glossary.md#gloss-store-derivation) locally when its `system` attribute equals any of the values specified here or in [`extra-platforms`](#conf-extra-platforms).
|
||||
|
||||
The default value is set when Nix itself is compiled for the system it will run on.
|
||||
The following system types are widely used, as Nix is actively supported on these platforms:
|
||||
|
@ -825,7 +825,7 @@ public:
|
|||
R"(
|
||||
System types of executables that can be run on this machine.
|
||||
|
||||
Nix will only build a given [derivation](@docroot@/language/derivations.md) locally when its `system` attribute equals any of the values specified here or in the [`system` option](#conf-system).
|
||||
Nix will only build a given [store derivation](@docroot@/glossary.md#gloss-store-derivation) locally when its `system` attribute equals any of the values specified here or in the [`system` option](#conf-system).
|
||||
|
||||
Setting this can be useful to build derivations locally on compatible machines:
|
||||
- `i686-linux` executables can be run on `x86_64-linux` machines (set by default)
|
||||
|
|
|
@ -608,7 +608,7 @@ void RemoteStore::queryRealisationUncached(const DrvOutput & id,
|
|||
auto conn(getConnection());
|
||||
|
||||
if (GET_PROTOCOL_MINOR(conn->protoVersion) < 27) {
|
||||
warn("the daemon is too old to support content-addressed derivations, please upgrade it to 2.4");
|
||||
warn("the daemon is too old to support content-addressing derivations, please upgrade it to 2.4");
|
||||
return callback(nullptr);
|
||||
}
|
||||
|
||||
|
|
|
@ -715,7 +715,7 @@ public:
|
|||
|
||||
/**
|
||||
* Given a store path, return the realisation actually used in the realisation of this path:
|
||||
* - If the path is a content-addressed derivation, try to resolve it
|
||||
* - If the path is a content-addressing derivation, try to resolve it
|
||||
* - Otherwise, find one of its derivers
|
||||
*/
|
||||
std::optional<StorePath> getBuildDerivationPath(const StorePath &);
|
||||
|
|
|
@ -800,7 +800,7 @@ void LocalDerivationGoal::startBuilder()
|
|||
out. */
|
||||
for (auto & i : drv->outputsAndOptPaths(worker.store)) {
|
||||
/* If the name isn't known a priori (i.e. floating
|
||||
content-addressed derivation), the temporary location we use
|
||||
content-addressing derivation), the temporary location we use
|
||||
should be fresh. Freshness means it is impossible that the path
|
||||
is already in the sandbox, so we don't need to worry about
|
||||
removing it. */
|
||||
|
@ -2291,7 +2291,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
to do anything here.
|
||||
|
||||
We can only early return when the outputs are known a priori. For
|
||||
floating content-addressed derivations this isn't the case.
|
||||
floating content-addressing derivations this isn't the case.
|
||||
*/
|
||||
if (hook)
|
||||
return DerivationGoal::registerOutputs();
|
||||
|
|
|
@ -130,7 +130,7 @@ struct LocalDerivationGoal : public DerivationGoal
|
|||
* rewrite after the build. Otherwise the regular predetermined paths are
|
||||
* put here.
|
||||
*
|
||||
* - Floating content-addressed derivations do not know their final build
|
||||
* - Floating content-addressing derivations do not know their final build
|
||||
* output paths until the outputs are hashed, so random locations are
|
||||
* used, and then renamed. The randomness helps guard against hidden
|
||||
* self-references.
|
||||
|
|
|
@ -6,10 +6,52 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
class BLAKE3HashTest : public virtual ::testing::Test
|
||||
{
|
||||
public:
|
||||
|
||||
/**
|
||||
* We set these in tests rather than the regular globals so we don't have
|
||||
* to worry about race conditions if the tests run concurrently.
|
||||
*/
|
||||
ExperimentalFeatureSettings mockXpSettings;
|
||||
|
||||
private:
|
||||
|
||||
void SetUp() override
|
||||
{
|
||||
mockXpSettings.set("experimental-features", "blake3-hashes");
|
||||
}
|
||||
};
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
* hashString
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
TEST_F(BLAKE3HashTest, testKnownBLAKE3Hashes1) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abc";
|
||||
auto hash = hashString(HashAlgorithm::BLAKE3, s, mockXpSettings);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
|
||||
"blake3:6437b3ac38465133ffb63b75273a8db548c558465d79db03fd359c6cd5bd9d85");
|
||||
}
|
||||
|
||||
TEST_F(BLAKE3HashTest, testKnownBLAKE3Hashes2) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc4634
|
||||
auto s = "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq";
|
||||
auto hash = hashString(HashAlgorithm::BLAKE3, s, mockXpSettings);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
|
||||
"blake3:c19012cc2aaf0dc3d8e5c45a1b79114d2df42abb2a410bf54be09e891af06ff8");
|
||||
}
|
||||
|
||||
TEST_F(BLAKE3HashTest, testKnownBLAKE3Hashes3) {
|
||||
// values taken from: https://www.ietf.org/archive/id/draft-aumasson-blake3-00.txt
|
||||
auto s = "IETF";
|
||||
auto hash = hashString(HashAlgorithm::BLAKE3, s, mockXpSettings);
|
||||
ASSERT_EQ(hash.to_string(HashFormat::Base16, true),
|
||||
"blake3:83a2de1ee6f4e6ab686889248f4ec0cf4cc5709446a682ffd1cbb4d6165181e2");
|
||||
}
|
||||
|
||||
TEST(hashString, testKnownMD5Hashes1) {
|
||||
// values taken from: https://tools.ietf.org/html/rfc1321
|
||||
auto s1 = "";
|
||||
|
|
|
@ -24,7 +24,7 @@ struct ExperimentalFeatureDetails
|
|||
* feature, we either have no issue at all if few features are not added
|
||||
* at the end of the list, or a proper merge conflict if they are.
|
||||
*/
|
||||
constexpr size_t numXpFeatures = 1 + static_cast<size_t>(Xp::PipeOperators);
|
||||
constexpr size_t numXpFeatures = 1 + static_cast<size_t>(Xp::BLAKE3Hashes);
|
||||
|
||||
constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails = {{
|
||||
{
|
||||
|
@ -302,6 +302,14 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
|
|||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/55",
|
||||
},
|
||||
{
|
||||
.tag = Xp::BLAKE3Hashes,
|
||||
.name = "blake3-hashes",
|
||||
.description = R"(
|
||||
Enables support for BLAKE3 hashes.
|
||||
)",
|
||||
.trackingUrl = "",
|
||||
},
|
||||
}};
|
||||
|
||||
static_assert(
|
||||
|
|
|
@ -37,6 +37,7 @@ enum struct ExperimentalFeature
|
|||
MountedSSHStore,
|
||||
VerifiedFetches,
|
||||
PipeOperators,
|
||||
BLAKE3Hashes,
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#include <iostream>
|
||||
#include <cstring>
|
||||
|
||||
#include <blake3.h>
|
||||
#include <openssl/crypto.h>
|
||||
#include <openssl/md5.h>
|
||||
#include <openssl/sha.h>
|
||||
|
@ -8,6 +9,7 @@
|
|||
#include "args.hh"
|
||||
#include "hash.hh"
|
||||
#include "archive.hh"
|
||||
#include "config.hh"
|
||||
#include "split.hh"
|
||||
|
||||
#include <sys/types.h>
|
||||
|
@ -20,6 +22,7 @@ namespace nix {
|
|||
|
||||
static size_t regularHashSize(HashAlgorithm type) {
|
||||
switch (type) {
|
||||
case HashAlgorithm::BLAKE3: return blake3HashSize;
|
||||
case HashAlgorithm::MD5: return md5HashSize;
|
||||
case HashAlgorithm::SHA1: return sha1HashSize;
|
||||
case HashAlgorithm::SHA256: return sha256HashSize;
|
||||
|
@ -29,12 +32,15 @@ static size_t regularHashSize(HashAlgorithm type) {
|
|||
}
|
||||
|
||||
|
||||
const std::set<std::string> hashAlgorithms = {"md5", "sha1", "sha256", "sha512" };
|
||||
const std::set<std::string> hashAlgorithms = {"blake3", "md5", "sha1", "sha256", "sha512" };
|
||||
|
||||
const std::set<std::string> hashFormats = {"base64", "nix32", "base16", "sri" };
|
||||
|
||||
Hash::Hash(HashAlgorithm algo) : algo(algo)
|
||||
Hash::Hash(HashAlgorithm algo, const ExperimentalFeatureSettings & xpSettings) : algo(algo)
|
||||
{
|
||||
if (algo == HashAlgorithm::BLAKE3) {
|
||||
xpSettings.require(Xp::BLAKE3Hashes);
|
||||
}
|
||||
hashSize = regularHashSize(algo);
|
||||
assert(hashSize <= maxHashSize);
|
||||
memset(hash, 0, maxHashSize);
|
||||
|
@ -284,6 +290,7 @@ Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashAlgorithm> ha
|
|||
|
||||
union Ctx
|
||||
{
|
||||
blake3_hasher blake3;
|
||||
MD5_CTX md5;
|
||||
SHA_CTX sha1;
|
||||
SHA256_CTX sha256;
|
||||
|
@ -293,7 +300,8 @@ union Ctx
|
|||
|
||||
static void start(HashAlgorithm ha, Ctx & ctx)
|
||||
{
|
||||
if (ha == HashAlgorithm::MD5) MD5_Init(&ctx.md5);
|
||||
if (ha == HashAlgorithm::BLAKE3) blake3_hasher_init(&ctx.blake3);
|
||||
else if (ha == HashAlgorithm::MD5) MD5_Init(&ctx.md5);
|
||||
else if (ha == HashAlgorithm::SHA1) SHA1_Init(&ctx.sha1);
|
||||
else if (ha == HashAlgorithm::SHA256) SHA256_Init(&ctx.sha256);
|
||||
else if (ha == HashAlgorithm::SHA512) SHA512_Init(&ctx.sha512);
|
||||
|
@ -303,7 +311,8 @@ static void start(HashAlgorithm ha, Ctx & ctx)
|
|||
static void update(HashAlgorithm ha, Ctx & ctx,
|
||||
std::string_view data)
|
||||
{
|
||||
if (ha == HashAlgorithm::MD5) MD5_Update(&ctx.md5, data.data(), data.size());
|
||||
if (ha == HashAlgorithm::BLAKE3) blake3_hasher_update(&ctx.blake3, data.data(), data.size());
|
||||
else if (ha == HashAlgorithm::MD5) MD5_Update(&ctx.md5, data.data(), data.size());
|
||||
else if (ha == HashAlgorithm::SHA1) SHA1_Update(&ctx.sha1, data.data(), data.size());
|
||||
else if (ha == HashAlgorithm::SHA256) SHA256_Update(&ctx.sha256, data.data(), data.size());
|
||||
else if (ha == HashAlgorithm::SHA512) SHA512_Update(&ctx.sha512, data.data(), data.size());
|
||||
|
@ -312,24 +321,24 @@ static void update(HashAlgorithm ha, Ctx & ctx,
|
|||
|
||||
static void finish(HashAlgorithm ha, Ctx & ctx, unsigned char * hash)
|
||||
{
|
||||
if (ha == HashAlgorithm::MD5) MD5_Final(hash, &ctx.md5);
|
||||
if (ha == HashAlgorithm::BLAKE3) blake3_hasher_finalize(&ctx.blake3, hash, BLAKE3_OUT_LEN);
|
||||
else if (ha == HashAlgorithm::MD5) MD5_Final(hash, &ctx.md5);
|
||||
else if (ha == HashAlgorithm::SHA1) SHA1_Final(hash, &ctx.sha1);
|
||||
else if (ha == HashAlgorithm::SHA256) SHA256_Final(hash, &ctx.sha256);
|
||||
else if (ha == HashAlgorithm::SHA512) SHA512_Final(hash, &ctx.sha512);
|
||||
}
|
||||
|
||||
|
||||
Hash hashString(HashAlgorithm ha, std::string_view s)
|
||||
Hash hashString(
|
||||
HashAlgorithm ha, std::string_view s, const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
Ctx ctx;
|
||||
Hash hash(ha);
|
||||
Hash hash(ha, xpSettings);
|
||||
start(ha, ctx);
|
||||
update(ha, ctx, s);
|
||||
finish(ha, ctx, hash.hash);
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
||||
Hash hashFile(HashAlgorithm ha, const Path & path)
|
||||
{
|
||||
HashSink sink(ha);
|
||||
|
@ -426,6 +435,7 @@ std::string_view printHashFormat(HashFormat HashFormat)
|
|||
|
||||
std::optional<HashAlgorithm> parseHashAlgoOpt(std::string_view s)
|
||||
{
|
||||
if (s == "blake3") return HashAlgorithm::BLAKE3;
|
||||
if (s == "md5") return HashAlgorithm::MD5;
|
||||
if (s == "sha1") return HashAlgorithm::SHA1;
|
||||
if (s == "sha256") return HashAlgorithm::SHA256;
|
||||
|
@ -439,12 +449,13 @@ HashAlgorithm parseHashAlgo(std::string_view s)
|
|||
if (opt_h)
|
||||
return *opt_h;
|
||||
else
|
||||
throw UsageError("unknown hash algorithm '%1%', expect 'md5', 'sha1', 'sha256', or 'sha512'", s);
|
||||
throw UsageError("unknown hash algorithm '%1%', expect 'blake3', 'md5', 'sha1', 'sha256', or 'sha512'", s);
|
||||
}
|
||||
|
||||
std::string_view printHashAlgo(HashAlgorithm ha)
|
||||
{
|
||||
switch (ha) {
|
||||
case HashAlgorithm::BLAKE3: return "blake3";
|
||||
case HashAlgorithm::MD5: return "md5";
|
||||
case HashAlgorithm::SHA1: return "sha1";
|
||||
case HashAlgorithm::SHA256: return "sha256";
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "config.hh"
|
||||
#include "types.hh"
|
||||
#include "serialise.hh"
|
||||
#include "file-system.hh"
|
||||
|
@ -11,9 +12,9 @@ namespace nix {
|
|||
MakeError(BadHash, Error);
|
||||
|
||||
|
||||
enum struct HashAlgorithm : char { MD5 = 42, SHA1, SHA256, SHA512 };
|
||||
|
||||
enum struct HashAlgorithm : char { MD5 = 42, SHA1, SHA256, SHA512, BLAKE3 };
|
||||
|
||||
const int blake3HashSize = 32;
|
||||
const int md5HashSize = 16;
|
||||
const int sha1HashSize = 20;
|
||||
const int sha256HashSize = 32;
|
||||
|
@ -52,7 +53,7 @@ struct Hash
|
|||
/**
|
||||
* Create a zero-filled hash object.
|
||||
*/
|
||||
explicit Hash(HashAlgorithm algo);
|
||||
explicit Hash(HashAlgorithm algo, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
/**
|
||||
* Parse the hash from a string representation in the format
|
||||
|
@ -157,7 +158,7 @@ std::string printHash16or32(const Hash & hash);
|
|||
/**
|
||||
* Compute the hash of the given string.
|
||||
*/
|
||||
Hash hashString(HashAlgorithm ha, std::string_view s);
|
||||
Hash hashString(HashAlgorithm ha, std::string_view s, const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
/**
|
||||
* Compute the hash of the given file, hashing its contents directly.
|
||||
|
|
|
@ -62,6 +62,12 @@ elif host_machine.system() == 'sunos'
|
|||
deps_other += [socket, network_service_library]
|
||||
endif
|
||||
|
||||
blake3 = dependency(
|
||||
'libblake3',
|
||||
version: '>= 1.5.5',
|
||||
)
|
||||
deps_private += blake3
|
||||
|
||||
boost = dependency(
|
||||
'boost',
|
||||
modules : ['context', 'coroutine'],
|
||||
|
|
|
@ -6,6 +6,7 @@
|
|||
boost,
|
||||
brotli,
|
||||
libarchive,
|
||||
libblake3,
|
||||
libcpuid,
|
||||
libsodium,
|
||||
nlohmann_json,
|
||||
|
@ -42,6 +43,7 @@ mkMesonLibrary (finalAttrs: {
|
|||
|
||||
buildInputs = [
|
||||
brotli
|
||||
libblake3
|
||||
libsodium
|
||||
openssl
|
||||
] ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
|
||||
|
|
|
@ -252,7 +252,7 @@ static StorePathSet maybeUseOutputs(const StorePath & storePath, bool useOutput,
|
|||
return store->queryDerivationOutputs(storePath);
|
||||
for (auto & i : drv.outputsAndOptPaths(*store)) {
|
||||
if (!i.second.second)
|
||||
throw UsageError("Cannot use output path of floating content-addressed derivation until we know what it is (e.g. by building it)");
|
||||
throw UsageError("Cannot use output path of floating content-addressing derivation until we know what it is (e.g. by building it)");
|
||||
outputs.insert(*i.second.second);
|
||||
}
|
||||
return outputs;
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
// FIXME: integrate this with nix path-info?
|
||||
// FIXME: rename to 'nix store derivation show' or 'nix debug derivation show'?
|
||||
// FIXME: integrate this with `nix path-info`?
|
||||
// FIXME: rename to 'nix store derivation show'?
|
||||
|
||||
#include "command.hh"
|
||||
#include "common-args.hh"
|
||||
|
|
|
@ -62,8 +62,8 @@ R""(
|
|||
|
||||
# Description
|
||||
|
||||
`nix search` searches [*installable*](./nix.md#installables) (which can be evaluated, that is, a
|
||||
flake or Nix expression, but not a store path or store derivation path) for packages whose name or description matches all of the
|
||||
`nix search` searches [*installable*](./nix.md#installables) that can be evaluated, that is, a
|
||||
flake or Nix expression, but not a [store path] or [deriving path]) for packages whose name or description matches all of the
|
||||
regular expressions *regex*. For each matching package, It prints the
|
||||
full attribute name (from the root of the [installable](./nix.md#installables)), the version
|
||||
and the `meta.description` field, highlighting the substrings that
|
||||
|
@ -75,6 +75,9 @@ it avoids highlighting the entire name and description of every package.
|
|||
> Note that in this context, `^` is the regex character to match the beginning of a string, *not* the delimiter for
|
||||
> [selecting a derivation output](@docroot@/command-ref/new-cli/nix.md#derivation-output-selection).
|
||||
|
||||
[store path]: @docroot@/glossary.md#gloss-store-path
|
||||
[deriving path]: @docroot@/glossary.md#gloss-deriving-path
|
||||
|
||||
# Flake output attributes
|
||||
|
||||
If no flake output attribute is given, `nix search` searches for
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue