mirror of
https://github.com/NixOS/nix
synced 2025-07-07 06:01:48 +02:00
Merge remote-tracking branch 'origin/master' into flip-coroutines
This commit is contained in:
commit
257470b58d
342 changed files with 6839 additions and 2721 deletions
|
@ -101,7 +101,7 @@ static int main_build_remote(int argc, char * * argv)
|
|||
}
|
||||
|
||||
std::optional<StorePath> drvPath;
|
||||
StoreReference storeUri;
|
||||
std::string storeUri;
|
||||
|
||||
while (true) {
|
||||
|
||||
|
@ -234,17 +234,16 @@ static int main_build_remote(int argc, char * * argv)
|
|||
lock = -1;
|
||||
|
||||
try {
|
||||
storeUri = bestMachine->storeUri.render();
|
||||
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("connecting to '%s'", bestMachine->storeUri.render()));
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("connecting to '%s'", storeUri));
|
||||
|
||||
sshStore = bestMachine->openStore();
|
||||
sshStore->connect();
|
||||
storeUri = bestMachine->storeUri;
|
||||
|
||||
} catch (std::exception & e) {
|
||||
auto msg = chomp(drainFD(5, false));
|
||||
printError("cannot build on '%s': %s%s",
|
||||
bestMachine->storeUri.render(), e.what(),
|
||||
storeUri, e.what(),
|
||||
msg.empty() ? "" : ": " + msg);
|
||||
bestMachine->enabled = false;
|
||||
continue;
|
||||
|
@ -259,15 +258,28 @@ connected:
|
|||
|
||||
assert(sshStore);
|
||||
|
||||
std::cerr << "# accept\n" << storeUri.render() << "\n";
|
||||
std::cerr << "# accept\n" << storeUri << "\n";
|
||||
|
||||
auto inputs = readStrings<PathSet>(source);
|
||||
auto wantedOutputs = readStrings<StringSet>(source);
|
||||
|
||||
AutoCloseFD uploadLock = openLockFile(currentLoad + "/" + escapeUri(storeUri.render()) + ".upload-lock", true);
|
||||
AutoCloseFD uploadLock;
|
||||
{
|
||||
auto setUpdateLock = [&](auto && fileName){
|
||||
uploadLock = openLockFile(currentLoad + "/" + escapeUri(fileName) + ".upload-lock", true);
|
||||
};
|
||||
try {
|
||||
setUpdateLock(storeUri);
|
||||
} catch (SysError & e) {
|
||||
if (e.errNo != ENAMETOOLONG) throw;
|
||||
// Try again hashing the store URL so we have a shorter path
|
||||
auto h = hashString(HashAlgorithm::MD5, storeUri);
|
||||
setUpdateLock(h.to_string(HashFormat::Base64, false));
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("waiting for the upload lock to '%s'", storeUri.render()));
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("waiting for the upload lock to '%s'", storeUri));
|
||||
|
||||
auto old = signal(SIGALRM, handleAlarm);
|
||||
alarm(15 * 60);
|
||||
|
@ -280,7 +292,7 @@ connected:
|
|||
auto substitute = settings.buildersUseSubstitutes ? Substitute : NoSubstitute;
|
||||
|
||||
{
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying dependencies to '%s'", storeUri.render()));
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying dependencies to '%s'", storeUri));
|
||||
copyPaths(*store, *sshStore, store->parseStorePathSet(inputs), NoRepair, NoCheckSigs, substitute);
|
||||
}
|
||||
|
||||
|
@ -318,7 +330,7 @@ connected:
|
|||
optResult = sshStore->buildDerivation(*drvPath, (const BasicDerivation &) drv);
|
||||
auto & result = *optResult;
|
||||
if (!result.success())
|
||||
throw Error("build of '%s' on '%s' failed: %s", store->printStorePath(*drvPath), storeUri.render(), result.errorMsg);
|
||||
throw Error("build of '%s' on '%s' failed: %s", store->printStorePath(*drvPath), storeUri, result.errorMsg);
|
||||
} else {
|
||||
copyClosure(*store, *sshStore, StorePathSet {*drvPath}, NoRepair, NoCheckSigs, substitute);
|
||||
auto res = sshStore->buildPathsWithResults({
|
||||
|
@ -361,7 +373,7 @@ connected:
|
|||
}
|
||||
|
||||
if (!missingPaths.empty()) {
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying outputs from '%s'", storeUri.render()));
|
||||
Activity act(*logger, lvlTalkative, actUnknown, fmt("copying outputs from '%s'", storeUri));
|
||||
if (auto localStore = store.dynamic_pointer_cast<LocalStore>())
|
||||
for (auto & path : missingPaths)
|
||||
localStore->locksHeld.insert(store->printStorePath(path)); /* FIXME: ugly */
|
||||
|
|
|
@ -53,10 +53,6 @@ mkMesonDerivation (finalAttrs: {
|
|||
echo "doc external-api-docs $out/share/doc/nix/external-api/html" >> ''${!outputDoc}/nix-support/hydra-build-products
|
||||
'';
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
strictDeps = true;
|
||||
|
||||
meta = {
|
||||
platforms = lib.platforms.all;
|
||||
};
|
||||
|
|
|
@ -48,10 +48,6 @@ mkMesonDerivation (finalAttrs: {
|
|||
echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> ''${!outputDoc}/nix-support/hydra-build-products
|
||||
'';
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
strictDeps = true;
|
||||
|
||||
meta = {
|
||||
platforms = lib.platforms.all;
|
||||
};
|
||||
|
|
|
@ -91,75 +91,11 @@ MixEvalArgs::MixEvalArgs()
|
|||
.longName = "include",
|
||||
.shortName = 'I',
|
||||
.description = R"(
|
||||
Add *path* to the Nix search path. The Nix search path is
|
||||
initialized from the colon-separated [`NIX_PATH`](@docroot@/command-ref/env-common.md#env-NIX_PATH) environment
|
||||
variable, and is used to look up the location of Nix expressions using [paths](@docroot@/language/types.md#type-path) enclosed in angle
|
||||
brackets (i.e., `<nixpkgs>`).
|
||||
Add *path* to search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md)
|
||||
|
||||
For instance, passing
|
||||
This option may be given multiple times.
|
||||
|
||||
```
|
||||
-I /home/eelco/Dev
|
||||
-I /etc/nixos
|
||||
```
|
||||
|
||||
will cause Nix to look for paths relative to `/home/eelco/Dev` and
|
||||
`/etc/nixos`, in that order. This is equivalent to setting the
|
||||
`NIX_PATH` environment variable to
|
||||
|
||||
```
|
||||
/home/eelco/Dev:/etc/nixos
|
||||
```
|
||||
|
||||
It is also possible to match paths against a prefix. For example,
|
||||
passing
|
||||
|
||||
```
|
||||
-I nixpkgs=/home/eelco/Dev/nixpkgs-branch
|
||||
-I /etc/nixos
|
||||
```
|
||||
|
||||
will cause Nix to search for `<nixpkgs/path>` in
|
||||
`/home/eelco/Dev/nixpkgs-branch/path` and `/etc/nixos/nixpkgs/path`.
|
||||
|
||||
If a path in the Nix search path starts with `http://` or `https://`,
|
||||
it is interpreted as the URL of a tarball that will be downloaded and
|
||||
unpacked to a temporary location. The tarball must consist of a single
|
||||
top-level directory. For example, passing
|
||||
|
||||
```
|
||||
-I nixpkgs=https://github.com/NixOS/nixpkgs/archive/master.tar.gz
|
||||
```
|
||||
|
||||
tells Nix to download and use the current contents of the `master`
|
||||
branch in the `nixpkgs` repository.
|
||||
|
||||
The URLs of the tarballs from the official `nixos.org` channels
|
||||
(see [the manual page for `nix-channel`](../nix-channel.md)) can be
|
||||
abbreviated as `channel:<channel-name>`. For instance, the
|
||||
following two flags are equivalent:
|
||||
|
||||
```
|
||||
-I nixpkgs=channel:nixos-21.05
|
||||
-I nixpkgs=https://nixos.org/channels/nixos-21.05/nixexprs.tar.xz
|
||||
```
|
||||
|
||||
You can also fetch source trees using [flake URLs](./nix3-flake.md#url-like-syntax) and add them to the
|
||||
search path. For instance,
|
||||
|
||||
```
|
||||
-I nixpkgs=flake:nixpkgs
|
||||
```
|
||||
|
||||
specifies that the prefix `nixpkgs` shall refer to the source tree
|
||||
downloaded from the `nixpkgs` entry in the flake registry. Similarly,
|
||||
|
||||
```
|
||||
-I nixpkgs=flake:github:NixOS/nixpkgs/nixos-22.05
|
||||
```
|
||||
|
||||
makes `<nixpkgs>` refer to a particular branch of the
|
||||
`NixOS/nixpkgs` repository on GitHub.
|
||||
Paths added through `-I` take precedence over the [`nix-path` configuration setting](@docroot@/command-ref/conf-file.md#conf-nix-path) and the [`NIX_PATH` environment variable](@docroot@/command-ref/env-common.md#env-NIX_PATH).
|
||||
)",
|
||||
.category = category,
|
||||
.labels = {"path"},
|
||||
|
@ -235,7 +171,9 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * bas
|
|||
{
|
||||
if (EvalSettings::isPseudoUrl(s)) {
|
||||
auto accessor = fetchers::downloadTarball(
|
||||
EvalSettings::resolvePseudoUrl(s)).accessor;
|
||||
state.store,
|
||||
state.fetchSettings,
|
||||
EvalSettings::resolvePseudoUrl(s));
|
||||
auto storePath = fetchToStore(*state.store, SourcePath(accessor), FetchMode::Copy);
|
||||
return state.rootPath(CanonPath(state.store->toRealPath(storePath)));
|
||||
}
|
||||
|
|
|
@ -104,12 +104,12 @@ DerivedPathsWithInfo InstallableFlake::toDerivedPaths()
|
|||
|
||||
auto drvPath = attr->forceDerivation();
|
||||
|
||||
std::optional<NixInt> priority;
|
||||
std::optional<NixInt::Inner> priority;
|
||||
|
||||
if (attr->maybeGetAttr(state->sOutputSpecified)) {
|
||||
} else if (auto aMeta = attr->maybeGetAttr(state->sMeta)) {
|
||||
if (auto aPriority = aMeta->maybeGetAttr("priority"))
|
||||
priority = aPriority->getInt();
|
||||
priority = aPriority->getInt().value;
|
||||
}
|
||||
|
||||
return {{
|
||||
|
|
|
@ -40,7 +40,7 @@ struct ExtraPathInfoValue : ExtraPathInfo
|
|||
/**
|
||||
* An optional priority for use with "build envs". See Package
|
||||
*/
|
||||
std::optional<NixInt> priority;
|
||||
std::optional<NixInt::Inner> priority;
|
||||
|
||||
/**
|
||||
* The attribute path associated with this value. The idea is
|
||||
|
|
|
@ -1,21 +1,23 @@
|
|||
#include "markdown.hh"
|
||||
#include "util.hh"
|
||||
#include "environment-variables.hh"
|
||||
#include "error.hh"
|
||||
#include "finally.hh"
|
||||
#include "terminal.hh"
|
||||
|
||||
#if HAVE_LOWDOWN
|
||||
# include <sys/queue.h>
|
||||
# include <lowdown.h>
|
||||
# include <sys/queue.h>
|
||||
# include <lowdown.h>
|
||||
#endif
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::string renderMarkdownToTerminal(std::string_view markdown)
|
||||
{
|
||||
#if HAVE_LOWDOWN
|
||||
static std::string doRenderMarkdownToTerminal(std::string_view markdown)
|
||||
{
|
||||
int windowWidth = getWindowSize().second;
|
||||
|
||||
struct lowdown_opts opts {
|
||||
struct lowdown_opts opts
|
||||
{
|
||||
.type = LOWDOWN_TERM,
|
||||
.maxdepth = 20,
|
||||
.cols = (size_t) std::max(windowWidth - 5, 60),
|
||||
|
@ -51,9 +53,21 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
|
|||
throw Error("allocation error while rendering Markdown");
|
||||
|
||||
return filterANSIEscapes(std::string(buf->data, buf->size), !isTTY());
|
||||
#else
|
||||
return std::string(markdown);
|
||||
#endif
|
||||
}
|
||||
|
||||
std::string renderMarkdownToTerminal(std::string_view markdown)
|
||||
{
|
||||
if (auto e = getEnv("_NIX_TEST_RAW_MARKDOWN"); e && *e == "1")
|
||||
return std::string(markdown);
|
||||
else
|
||||
return doRenderMarkdownToTerminal(markdown);
|
||||
}
|
||||
|
||||
#else
|
||||
std::string renderMarkdownToTerminal(std::string_view markdown)
|
||||
{
|
||||
return std::string(markdown);
|
||||
}
|
||||
#endif
|
||||
|
||||
} // namespace nix
|
||||
|
|
|
@ -1,10 +1,17 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "types.hh"
|
||||
#include <string_view>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Render the given Markdown text to the terminal.
|
||||
*
|
||||
* If Nix is compiled without Markdown support, this function will return the input text as-is.
|
||||
*
|
||||
* The renderer takes into account the terminal width, and wraps text accordingly.
|
||||
*/
|
||||
std::string renderMarkdownToTerminal(std::string_view markdown);
|
||||
|
||||
}
|
||||
|
|
|
@ -93,14 +93,8 @@ mkMesonDerivation (finalAttrs: {
|
|||
LDFLAGS = "-fuse-ld=gold";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
separateDebugInfo = !stdenv.hostPlatform.isStatic;
|
||||
|
||||
# TODO `releaseTools.coverageAnalysis` in Nixpkgs needs to be updated
|
||||
# to work with `strictDeps`.
|
||||
strictDeps = true;
|
||||
|
||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||
|
||||
meta = {
|
||||
|
|
|
@ -19,6 +19,7 @@ extern "C" {
|
|||
#include "repl-interacter.hh"
|
||||
#include "file-system.hh"
|
||||
#include "repl.hh"
|
||||
#include "environment-variables.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -34,6 +35,7 @@ void sigintHandler(int signo)
|
|||
|
||||
static detail::ReplCompleterMixin * curRepl; // ugly
|
||||
|
||||
#ifndef USE_READLINE
|
||||
static char * completionCallback(char * s, int * match)
|
||||
{
|
||||
auto possible = curRepl->completePrefix(s);
|
||||
|
@ -100,6 +102,7 @@ static int listPossibleCallback(char * s, char *** avp)
|
|||
|
||||
return ac;
|
||||
}
|
||||
#endif
|
||||
|
||||
ReadlineLikeInteracter::Guard ReadlineLikeInteracter::init(detail::ReplCompleterMixin * repl)
|
||||
{
|
||||
|
@ -175,10 +178,23 @@ bool ReadlineLikeInteracter::getLine(std::string & input, ReplPromptType promptT
|
|||
return true;
|
||||
}
|
||||
|
||||
// editline doesn't echo the input to the output when non-interactive, unlike readline
|
||||
// this results in a different behavior when running tests. The echoing is
|
||||
// quite useful for reading the test output, so we add it here.
|
||||
if (auto e = getEnv("_NIX_TEST_REPL_ECHO"); s && e && *e == "1")
|
||||
{
|
||||
#ifndef USE_READLINE
|
||||
// This is probably not right for multi-line input, but we don't use that
|
||||
// in the characterisation tests, so it's fine.
|
||||
std::cout << promptForType(promptType) << s << std::endl;
|
||||
#endif
|
||||
}
|
||||
|
||||
if (!s)
|
||||
return false;
|
||||
input += s;
|
||||
input += '\n';
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
|
||||
#include "ansicolor.hh"
|
||||
#include "shared.hh"
|
||||
#include "config-global.hh"
|
||||
#include "eval.hh"
|
||||
#include "eval-settings.hh"
|
||||
#include "attr-path.hh"
|
||||
|
@ -77,10 +76,14 @@ struct NixRepl
|
|||
int displ;
|
||||
StringSet varNames;
|
||||
|
||||
RunNix * runNixPtr;
|
||||
|
||||
void runNix(Path program, const Strings & args, const std::optional<std::string> & input = {});
|
||||
|
||||
std::unique_ptr<ReplInteracter> interacter;
|
||||
|
||||
NixRepl(const LookupPath & lookupPath, nix::ref<Store> store,ref<EvalState> state,
|
||||
std::function<AnnotatedValues()> getValues);
|
||||
std::function<AnnotatedValues()> getValues, RunNix * runNix);
|
||||
virtual ~NixRepl() = default;
|
||||
|
||||
ReplExitStatus mainLoop() override;
|
||||
|
@ -125,32 +128,16 @@ std::string removeWhitespace(std::string s)
|
|||
|
||||
|
||||
NixRepl::NixRepl(const LookupPath & lookupPath, nix::ref<Store> store, ref<EvalState> state,
|
||||
std::function<NixRepl::AnnotatedValues()> getValues)
|
||||
std::function<NixRepl::AnnotatedValues()> getValues, RunNix * runNix = nullptr)
|
||||
: AbstractNixRepl(state)
|
||||
, debugTraceIndex(0)
|
||||
, getValues(getValues)
|
||||
, staticEnv(new StaticEnv(nullptr, state->staticBaseEnv.get()))
|
||||
, runNixPtr{runNix}
|
||||
, interacter(make_unique<ReadlineLikeInteracter>(getDataDir() + "/nix/repl-history"))
|
||||
{
|
||||
}
|
||||
|
||||
void runNix(Path program, const Strings & args,
|
||||
const std::optional<std::string> & input = {})
|
||||
{
|
||||
auto subprocessEnv = getEnv();
|
||||
subprocessEnv["NIX_CONFIG"] = globalConfig.toKeyValue();
|
||||
//isInteractive avoid grabling interactive commands
|
||||
runProgram2(RunOptions {
|
||||
.program = settings.nixBinDir+ "/" + program,
|
||||
.args = args,
|
||||
.environment = subprocessEnv,
|
||||
.input = input,
|
||||
.isInteractive = true,
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
static std::ostream & showDebugTrace(std::ostream & out, const PosTable & positions, const DebugTrace & dt)
|
||||
{
|
||||
if (dt.isError)
|
||||
|
@ -217,7 +204,7 @@ ReplExitStatus NixRepl::mainLoop()
|
|||
case ProcessLineResult::PromptAgain:
|
||||
break;
|
||||
default:
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
} catch (ParseError & e) {
|
||||
if (e.msg().find("unexpected end of file") != std::string::npos) {
|
||||
|
@ -644,9 +631,6 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
|||
fallbackPos = attr->pos;
|
||||
fallbackDoc = state->getDocCommentForPos(fallbackPos);
|
||||
}
|
||||
|
||||
} else {
|
||||
evalString(arg, v);
|
||||
}
|
||||
|
||||
evalString(arg, v);
|
||||
|
@ -836,9 +820,18 @@ void NixRepl::evalString(std::string s, Value & v)
|
|||
}
|
||||
|
||||
|
||||
void NixRepl::runNix(Path program, const Strings & args, const std::optional<std::string> & input)
|
||||
{
|
||||
if (runNixPtr)
|
||||
(*runNixPtr)(program, args, input);
|
||||
else
|
||||
throw Error("Cannot run '%s', no method of calling the Nix CLI provided", program);
|
||||
}
|
||||
|
||||
|
||||
std::unique_ptr<AbstractNixRepl> AbstractNixRepl::create(
|
||||
const LookupPath & lookupPath, nix::ref<Store> store, ref<EvalState> state,
|
||||
std::function<AnnotatedValues()> getValues)
|
||||
std::function<AnnotatedValues()> getValues, RunNix * runNix)
|
||||
{
|
||||
return std::make_unique<NixRepl>(
|
||||
lookupPath,
|
||||
|
|
|
@ -19,9 +19,19 @@ struct AbstractNixRepl
|
|||
|
||||
typedef std::vector<std::pair<Value*,std::string>> AnnotatedValues;
|
||||
|
||||
using RunNix = void(Path program, const Strings & args, const std::optional<std::string> & input);
|
||||
|
||||
/**
|
||||
* @param runNix Function to run the nix CLI to support various
|
||||
* `:<something>` commands. Optional; if not provided,
|
||||
* everything else will still work fine, but those commands won't.
|
||||
*/
|
||||
static std::unique_ptr<AbstractNixRepl> create(
|
||||
const LookupPath & lookupPath, nix::ref<Store> store, ref<EvalState> state,
|
||||
std::function<AnnotatedValues()> getValues);
|
||||
const LookupPath & lookupPath,
|
||||
nix::ref<Store> store,
|
||||
ref<EvalState> state,
|
||||
std::function<AnnotatedValues()> getValues,
|
||||
RunNix * runNix = nullptr);
|
||||
|
||||
static ReplExitStatus runSimple(
|
||||
ref<EvalState> evalState,
|
||||
|
|
|
@ -14,6 +14,16 @@
|
|||
#include "nix_api_util.h"
|
||||
#include <stddef.h>
|
||||
|
||||
#ifndef __has_c_attribute
|
||||
# define __has_c_attribute(x) 0
|
||||
#endif
|
||||
|
||||
#if __has_c_attribute(deprecated)
|
||||
# define NIX_DEPRECATED(msg) [[deprecated(msg)]]
|
||||
#else
|
||||
# define NIX_DEPRECATED(msg)
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
@ -45,7 +55,7 @@ typedef struct EvalState EvalState; // nix::EvalState
|
|||
* @see nix_value_incref, nix_value_decref
|
||||
*/
|
||||
typedef struct nix_value nix_value;
|
||||
[[deprecated("use nix_value instead")]] typedef nix_value Value;
|
||||
NIX_DEPRECATED("use nix_value instead") typedef nix_value Value;
|
||||
|
||||
// Function prototypes
|
||||
/**
|
||||
|
|
|
@ -306,7 +306,7 @@ int64_t nix_get_int(nix_c_context * context, const nix_value * value)
|
|||
try {
|
||||
auto & v = check_value_in(value);
|
||||
assert(v.type() == nix::nInt);
|
||||
return v.integer();
|
||||
return v.integer().value;
|
||||
}
|
||||
NIXC_CATCH_ERRS_RES(0);
|
||||
}
|
||||
|
|
|
@ -63,12 +63,8 @@ mkMesonDerivation (finalAttrs: {
|
|||
LDFLAGS = "-fuse-ld=gold";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
separateDebugInfo = !stdenv.hostPlatform.isStatic;
|
||||
|
||||
strictDeps = true;
|
||||
|
||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||
|
||||
meta = {
|
||||
|
|
|
@ -134,7 +134,7 @@ std::pair<SourcePath, uint32_t> findPackageFilename(EvalState & state, Value & v
|
|||
return {SourcePath{path.accessor, CanonPath(fn.substr(0, colon))}, lineno};
|
||||
} catch (std::invalid_argument & e) {
|
||||
fail();
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
#include "eval.hh"
|
||||
#include "eval-inline.hh"
|
||||
#include "store-api.hh"
|
||||
// Need specialization involving `SymbolStr` just in this one module.
|
||||
#include "strings-inline.hh"
|
||||
|
||||
namespace nix::eval_cache {
|
||||
|
||||
|
@ -326,7 +328,7 @@ struct AttrDb
|
|||
case AttrType::Bool:
|
||||
return {{rowId, queryAttribute.getInt(2) != 0}};
|
||||
case AttrType::Int:
|
||||
return {{rowId, int_t{queryAttribute.getInt(2)}}};
|
||||
return {{rowId, int_t{NixInt{queryAttribute.getInt(2)}}}};
|
||||
case AttrType::ListOfStrings:
|
||||
return {{rowId, tokenizeString<std::vector<std::string>>(queryAttribute.getStr(2), "\t")}};
|
||||
case AttrType::Missing:
|
||||
|
@ -469,7 +471,7 @@ Value & AttrCursor::forceValue()
|
|||
else if (v.type() == nBool)
|
||||
cachedValue = {root->db->setBool(getKey(), v.boolean()), v.boolean()};
|
||||
else if (v.type() == nInt)
|
||||
cachedValue = {root->db->setInt(getKey(), v.integer()), int_t{v.integer()}};
|
||||
cachedValue = {root->db->setInt(getKey(), v.integer().value), int_t{v.integer()}};
|
||||
else if (v.type() == nAttrs)
|
||||
; // FIXME: do something?
|
||||
else
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
#include "error.hh"
|
||||
#include "environment-variables.hh"
|
||||
#include "eval-settings.hh"
|
||||
#include "config-global.hh"
|
||||
#include "serialise.hh"
|
||||
#include "eval-gc.hh"
|
||||
|
||||
|
@ -98,6 +100,12 @@ void initGC()
|
|||
gcCyclesAfterInit = GC_get_gc_no();
|
||||
#endif
|
||||
|
||||
// NIX_PATH must override the regular setting
|
||||
// See the comment in applyConfig
|
||||
if (auto nixPathEnv = getEnv("NIX_PATH")) {
|
||||
globalConfig.set("nix-path", concatStringsSep(" ", EvalSettings::parseNixPath(nixPathEnv.value())));
|
||||
}
|
||||
|
||||
gcInitialised = true;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ namespace nix {
|
|||
|
||||
/* Very hacky way to parse $NIX_PATH, which is colon-separated, but
|
||||
can contain URLs (e.g. "nixpkgs=https://bla...:foo=https://"). */
|
||||
static Strings parseNixPath(const std::string & s)
|
||||
Strings EvalSettings::parseNixPath(const std::string & s)
|
||||
{
|
||||
Strings res;
|
||||
|
||||
|
@ -48,10 +48,7 @@ EvalSettings::EvalSettings(bool & readOnlyMode, EvalSettings::LookupPathHooks lo
|
|||
: readOnlyMode{readOnlyMode}
|
||||
, lookupPathHooks{lookupPathHooks}
|
||||
{
|
||||
auto var = getEnv("NIX_PATH");
|
||||
if (var) nixPath = parseNixPath(*var);
|
||||
|
||||
var = getEnv("NIX_ABORT_ON_WARN");
|
||||
auto var = getEnv("NIX_ABORT_ON_WARN");
|
||||
if (var && (var == "1" || var == "yes" || var == "true"))
|
||||
builtinsAbortOnWarn = true;
|
||||
}
|
||||
|
|
|
@ -47,6 +47,8 @@ struct EvalSettings : Config
|
|||
|
||||
static bool isPseudoUrl(std::string_view s);
|
||||
|
||||
static Strings parseNixPath(const std::string & s);
|
||||
|
||||
static std::string resolvePseudoUrl(std::string_view url);
|
||||
|
||||
LookupPathHooks lookupPathHooks;
|
||||
|
@ -63,7 +65,7 @@ struct EvalSettings : Config
|
|||
extern "C" typedef void (*ValueInitialiser) (EvalState & state, Value & v);
|
||||
```
|
||||
|
||||
The [Nix C++ API documentation](@docroot@/contributing/documentation.md#api-documentation) has more details on evaluator internals.
|
||||
The [Nix C++ API documentation](@docroot@/development/documentation.md#api-documentation) has more details on evaluator internals.
|
||||
|
||||
- `builtins.exec` *arguments*
|
||||
|
||||
|
@ -71,25 +73,30 @@ struct EvalSettings : Config
|
|||
)"};
|
||||
|
||||
Setting<Strings> nixPath{
|
||||
this, getDefaultNixPath(), "nix-path",
|
||||
this, {}, "nix-path",
|
||||
R"(
|
||||
List of search paths to use for [lookup path](@docroot@/language/constructs/lookup-path.md) resolution.
|
||||
This setting determines the value of
|
||||
[`builtins.nixPath`](@docroot@/language/builtins.md#builtins-nixPath) and can be used with [`builtins.findFile`](@docroot@/language/builtins.md#builtins-findFile).
|
||||
|
||||
The default value is
|
||||
- The configuration setting is overridden by the [`NIX_PATH`](@docroot@/command-ref/env-common.md#env-NIX_PATH)
|
||||
environment variable.
|
||||
- `NIX_PATH` is overridden by [specifying the setting as the command line flag](@docroot@/command-ref/conf-file.md#command-line-flags) `--nix-path`.
|
||||
- Any current value is extended by the [`-I` option](@docroot@/command-ref/opt-common.md#opt-I) or `--extra-nix-path`.
|
||||
|
||||
```
|
||||
$HOME/.nix-defexpr/channels
|
||||
nixpkgs=$NIX_STATE_DIR/profiles/per-user/root/channels/nixpkgs
|
||||
$NIX_STATE_DIR/profiles/per-user/root/channels
|
||||
```
|
||||
If the respective paths are accessible, the default values are:
|
||||
|
||||
It can be overridden with the [`NIX_PATH` environment variable](@docroot@/command-ref/env-common.md#env-NIX_PATH) or the [`-I` command line option](@docroot@/command-ref/opt-common.md#opt-I).
|
||||
- `$HOME/.nix-defexpr/channels`
|
||||
- `nixpkgs=$NIX_STATE_DIR/profiles/per-user/root/channels/nixpkgs`
|
||||
- `$NIX_STATE_DIR/profiles/per-user/root/channels`
|
||||
|
||||
See [`NIX_STATE_DIR`](@docroot@/command-ref/env-common.md#env-NIX_STATE_DIR) for details.
|
||||
|
||||
> **Note**
|
||||
>
|
||||
> If [pure evaluation](#conf-pure-eval) is enabled, `nixPath` evaluates to the empty list `[ ]`.
|
||||
> If [restricted evaluation](@docroot@/command-ref/conf-file.md#conf-restrict-eval) is enabled, the default value is empty.
|
||||
>
|
||||
> If [pure evaluation](#conf-pure-eval) is enabled, `builtins.nixPath` *always* evaluates to the empty list `[ ]`.
|
||||
)", {}, false};
|
||||
|
||||
Setting<std::string> currentSystem{
|
||||
|
@ -178,7 +185,11 @@ struct EvalSettings : Config
|
|||
)"};
|
||||
|
||||
Setting<bool> useEvalCache{this, true, "eval-cache",
|
||||
"Whether to use the flake evaluation cache."};
|
||||
R"(
|
||||
Whether to use the flake evaluation cache.
|
||||
Certain commands won't have to evaluate when invoked for the second time with a particular version of a flake.
|
||||
Intermediate results are not cached.
|
||||
)"};
|
||||
|
||||
Setting<bool> ignoreExceptionsDuringTry{this, false, "ignore-try",
|
||||
R"(
|
||||
|
|
|
@ -149,7 +149,7 @@ std::string_view showType(ValueType type, bool withArticle)
|
|||
case nFloat: return WA("a", "float");
|
||||
case nThunk: return WA("a", "thunk");
|
||||
}
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
|
||||
|
||||
|
@ -215,7 +215,7 @@ static Symbol getName(const AttrName & name, EvalState & state, Env & env)
|
|||
static constexpr size_t BASE_ENV_SIZE = 128;
|
||||
|
||||
EvalState::EvalState(
|
||||
const LookupPath & _lookupPath,
|
||||
const LookupPath & lookupPathFromArguments,
|
||||
ref<Store> store,
|
||||
const fetchers::Settings & fetchSettings,
|
||||
const EvalSettings & settings,
|
||||
|
@ -331,12 +331,21 @@ EvalState::EvalState(
|
|||
vStringSymlink.mkString("symlink");
|
||||
vStringUnknown.mkString("unknown");
|
||||
|
||||
/* Initialise the Nix expression search path. */
|
||||
/* Construct the Nix expression search path. */
|
||||
assert(lookupPath.elements.empty());
|
||||
if (!settings.pureEval) {
|
||||
for (auto & i : _lookupPath.elements)
|
||||
for (auto & i : lookupPathFromArguments.elements) {
|
||||
lookupPath.elements.emplace_back(LookupPath::Elem {i});
|
||||
for (auto & i : settings.nixPath.get())
|
||||
}
|
||||
/* $NIX_PATH overriding regular settings is implemented as a hack in `initGC()` */
|
||||
for (auto & i : settings.nixPath.get()) {
|
||||
lookupPath.elements.emplace_back(LookupPath::Elem::parse(i));
|
||||
}
|
||||
if (!settings.restrictEval) {
|
||||
for (auto & i : EvalSettings::getDefaultNixPath()) {
|
||||
lookupPath.elements.emplace_back(LookupPath::Elem::parse(i));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Allow access to all paths in the search path. */
|
||||
|
@ -771,7 +780,7 @@ void EvalState::runDebugRepl(const Error * error, const Env & env, const Expr &
|
|||
case ReplExitStatus::Continue:
|
||||
break;
|
||||
default:
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1140,7 +1149,7 @@ inline void EvalState::evalAttrs(Env & env, Expr * e, Value & v, const PosIdx po
|
|||
|
||||
void Expr::eval(EvalState & state, Env & env, Value & v)
|
||||
{
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
|
||||
|
||||
|
@ -1573,7 +1582,7 @@ void EvalState::callFunction(Value & fun, size_t nrArgs, Value * * args, Value &
|
|||
.withFrame(*fun.payload.lambda.env, lambda)
|
||||
.debugThrow();
|
||||
}
|
||||
abort(); // can't happen
|
||||
unreachable();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1970,7 +1979,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
|||
NixStringContext context;
|
||||
std::vector<BackedStringView> s;
|
||||
size_t sSize = 0;
|
||||
NixInt n = 0;
|
||||
NixInt n{0};
|
||||
NixFloat nf = 0;
|
||||
|
||||
bool first = !forceString;
|
||||
|
@ -2014,17 +2023,22 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
|
|||
|
||||
if (firstType == nInt) {
|
||||
if (vTmp.type() == nInt) {
|
||||
n += vTmp.integer();
|
||||
auto newN = n + vTmp.integer();
|
||||
if (auto checked = newN.valueChecked(); checked.has_value()) {
|
||||
n = NixInt(*checked);
|
||||
} else {
|
||||
state.error<EvalError>("integer overflow in adding %1% + %2%", n, vTmp.integer()).atPos(i_pos).debugThrow();
|
||||
}
|
||||
} else if (vTmp.type() == nFloat) {
|
||||
// Upgrade the type from int to float;
|
||||
firstType = nFloat;
|
||||
nf = n;
|
||||
nf = n.value;
|
||||
nf += vTmp.fpoint();
|
||||
} else
|
||||
state.error<EvalError>("cannot add %1% to an integer", showType(vTmp)).atPos(i_pos).withFrame(env, *this).debugThrow();
|
||||
} else if (firstType == nFloat) {
|
||||
if (vTmp.type() == nInt) {
|
||||
nf += vTmp.integer();
|
||||
nf += vTmp.integer().value;
|
||||
} else if (vTmp.type() == nFloat) {
|
||||
nf += vTmp.fpoint();
|
||||
} else
|
||||
|
@ -2149,7 +2163,7 @@ NixFloat EvalState::forceFloat(Value & v, const PosIdx pos, std::string_view err
|
|||
try {
|
||||
forceValue(v, pos);
|
||||
if (v.type() == nInt)
|
||||
return v.integer();
|
||||
return v.integer().value;
|
||||
else if (v.type() != nFloat)
|
||||
error<TypeError>(
|
||||
"expected a float but found %1%: %2%",
|
||||
|
@ -2336,7 +2350,7 @@ BackedStringView EvalState::coerceToString(
|
|||
shell scripting convenience, just like `null'. */
|
||||
if (v.type() == nBool && v.boolean()) return "1";
|
||||
if (v.type() == nBool && !v.boolean()) return "";
|
||||
if (v.type() == nInt) return std::to_string(v.integer());
|
||||
if (v.type() == nInt) return std::to_string(v.integer().value);
|
||||
if (v.type() == nFloat) return std::to_string(v.fpoint());
|
||||
if (v.type() == nNull) return "";
|
||||
|
||||
|
@ -2719,9 +2733,9 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v
|
|||
|
||||
// Special case type-compatibility between float and int
|
||||
if (v1.type() == nInt && v2.type() == nFloat)
|
||||
return v1.integer() == v2.fpoint();
|
||||
return v1.integer().value == v2.fpoint();
|
||||
if (v1.type() == nFloat && v2.type() == nInt)
|
||||
return v1.fpoint() == v2.integer();
|
||||
return v1.fpoint() == v2.integer().value;
|
||||
|
||||
// All other types are not compatible with each other.
|
||||
if (v1.type() != v2.type()) return false;
|
||||
|
@ -2851,8 +2865,10 @@ void EvalState::printStatistics()
|
|||
topObj["cpuTime"] = cpuTime;
|
||||
#endif
|
||||
topObj["time"] = {
|
||||
#ifndef _WIN32 // TODO implement
|
||||
{"cpu", cpuTime},
|
||||
#ifdef HAVE_BOEHMGC
|
||||
#endif
|
||||
#if HAVE_BOEHMGC
|
||||
{GC_is_incremental_mode() ? "gcNonIncremental" : "gc", gcFullOnlyTime},
|
||||
{GC_is_incremental_mode() ? "gcNonIncrementalFraction" : "gcFraction", gcFullOnlyTime / cpuTime},
|
||||
#endif
|
||||
|
@ -3072,7 +3088,9 @@ std::optional<std::string> EvalState::resolveLookupPathPath(const LookupPath::Pa
|
|||
if (EvalSettings::isPseudoUrl(value)) {
|
||||
try {
|
||||
auto accessor = fetchers::downloadTarball(
|
||||
EvalSettings::resolvePseudoUrl(value)).accessor;
|
||||
store,
|
||||
fetchSettings,
|
||||
EvalSettings::resolvePseudoUrl(value));
|
||||
auto storePath = fetchToStore(*store, SourcePath(accessor), FetchMode::Copy);
|
||||
return finish(store->toRealPath(storePath));
|
||||
} catch (Error & e) {
|
||||
|
|
|
@ -246,8 +246,8 @@ NixInt PackageInfo::queryMetaInt(const std::string & name, NixInt def)
|
|||
if (v->type() == nString) {
|
||||
/* Backwards compatibility with before we had support for
|
||||
integer meta fields. */
|
||||
if (auto n = string2Int<NixInt>(v->c_str()))
|
||||
return *n;
|
||||
if (auto n = string2Int<NixInt::Inner>(v->c_str()))
|
||||
return NixInt{*n};
|
||||
}
|
||||
return def;
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "value.hh"
|
||||
#include "eval.hh"
|
||||
|
||||
#include <limits>
|
||||
#include <variant>
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
@ -42,7 +43,7 @@ class JSONSax : nlohmann::json_sax<json> {
|
|||
auto attrs2 = state.buildBindings(attrs.size());
|
||||
for (auto & i : attrs)
|
||||
attrs2.insert(i.first, i.second);
|
||||
parent->value(state).mkAttrs(attrs2.alreadySorted());
|
||||
parent->value(state).mkAttrs(attrs2);
|
||||
return std::move(parent);
|
||||
}
|
||||
void add() override { v = nullptr; }
|
||||
|
@ -80,42 +81,46 @@ class JSONSax : nlohmann::json_sax<json> {
|
|||
public:
|
||||
JSONSax(EvalState & state, Value & v) : state(state), rs(new JSONState(&v)) {};
|
||||
|
||||
bool null()
|
||||
bool null() override
|
||||
{
|
||||
rs->value(state).mkNull();
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool boolean(bool val)
|
||||
bool boolean(bool val) override
|
||||
{
|
||||
rs->value(state).mkBool(val);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool number_integer(number_integer_t val)
|
||||
bool number_integer(number_integer_t val) override
|
||||
{
|
||||
rs->value(state).mkInt(val);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool number_unsigned(number_unsigned_t val)
|
||||
bool number_unsigned(number_unsigned_t val_) override
|
||||
{
|
||||
if (val_ > std::numeric_limits<NixInt::Inner>::max()) {
|
||||
throw Error("unsigned json number %1% outside of Nix integer range", val_);
|
||||
}
|
||||
NixInt::Inner val = val_;
|
||||
rs->value(state).mkInt(val);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool number_float(number_float_t val, const string_t & s)
|
||||
bool number_float(number_float_t val, const string_t & s) override
|
||||
{
|
||||
rs->value(state).mkFloat(val);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool string(string_t & val)
|
||||
bool string(string_t & val) override
|
||||
{
|
||||
rs->value(state).mkString(val);
|
||||
rs->add();
|
||||
|
@ -123,7 +128,7 @@ public:
|
|||
}
|
||||
|
||||
#if NLOHMANN_JSON_VERSION_MAJOR >= 3 && NLOHMANN_JSON_VERSION_MINOR >= 8
|
||||
bool binary(binary_t&)
|
||||
bool binary(binary_t&) override
|
||||
{
|
||||
// This function ought to be unreachable
|
||||
assert(false);
|
||||
|
@ -131,35 +136,35 @@ public:
|
|||
}
|
||||
#endif
|
||||
|
||||
bool start_object(std::size_t len)
|
||||
bool start_object(std::size_t len) override
|
||||
{
|
||||
rs = std::make_unique<JSONObjectState>(std::move(rs));
|
||||
return true;
|
||||
}
|
||||
|
||||
bool key(string_t & name)
|
||||
bool key(string_t & name) override
|
||||
{
|
||||
dynamic_cast<JSONObjectState*>(rs.get())->key(name, state);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool end_object() {
|
||||
bool end_object() override {
|
||||
rs = rs->resolve(state);
|
||||
rs->add();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool end_array() {
|
||||
bool end_array() override {
|
||||
return end_object();
|
||||
}
|
||||
|
||||
bool start_array(size_t len) {
|
||||
bool start_array(size_t len) override {
|
||||
rs = std::make_unique<JSONListState>(std::move(rs),
|
||||
len != std::numeric_limits<size_t>::max() ? len : 128);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool parse_error(std::size_t, const std::string&, const nlohmann::detail::exception& ex) {
|
||||
bool parse_error(std::size_t, const std::string&, const nlohmann::detail::exception& ex) override {
|
||||
throw JSONParseError("%s", ex.what());
|
||||
}
|
||||
};
|
||||
|
|
|
@ -67,6 +67,14 @@ static StringToken unescapeStr(SymbolTable & symbols, char * s, size_t length)
|
|||
return {result, size_t(t - result)};
|
||||
}
|
||||
|
||||
static void requireExperimentalFeature(const ExperimentalFeature & feature, const Pos & pos)
|
||||
{
|
||||
if (!experimentalFeatureSettings.isEnabled(feature))
|
||||
throw ParseError(ErrorInfo{
|
||||
.msg = HintFmt("experimental Nix feature '%1%' is disabled; add '--extra-experimental-features %1%' to enable it", showExperimentalFeature(feature)),
|
||||
.pos = pos,
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@ -119,12 +127,18 @@ or { return OR_KW; }
|
|||
\-\> { return IMPL; }
|
||||
\/\/ { return UPDATE; }
|
||||
\+\+ { return CONCAT; }
|
||||
\<\| { requireExperimentalFeature(Xp::PipeOperators, state->positions[CUR_POS]);
|
||||
return PIPE_FROM;
|
||||
}
|
||||
\|\> { requireExperimentalFeature(Xp::PipeOperators, state->positions[CUR_POS]);
|
||||
return PIPE_INTO;
|
||||
}
|
||||
|
||||
{ID} { yylval->id = {yytext, (size_t) yyleng}; return ID; }
|
||||
{INT} { errno = 0;
|
||||
std::optional<int64_t> numMay = string2Int<int64_t>(yytext);
|
||||
if (numMay.has_value()) {
|
||||
yylval->n = *numMay;
|
||||
yylval->n = NixInt{*numMay};
|
||||
} else {
|
||||
throw ParseError(ErrorInfo{
|
||||
.msg = HintFmt("invalid integer '%1%'", yytext),
|
||||
|
|
|
@ -32,6 +32,7 @@ subdir('build-utils-meson/threads')
|
|||
boost = dependency(
|
||||
'boost',
|
||||
modules : ['container', 'context'],
|
||||
include_type: 'system',
|
||||
)
|
||||
# boost is a public dependency, but not a pkg-config dependency unfortunately, so we
|
||||
# put in `deps_other`.
|
||||
|
@ -55,7 +56,12 @@ if bdw_gc.found()
|
|||
endif
|
||||
configdata.set('HAVE_BOEHMGC', bdw_gc.found().to_int())
|
||||
|
||||
toml11 = dependency('toml11', version : '>=3.7.0', method : 'cmake')
|
||||
toml11 = dependency(
|
||||
'toml11',
|
||||
version : '>=3.7.0',
|
||||
method : 'cmake',
|
||||
include_type: 'system',
|
||||
)
|
||||
deps_other += toml11
|
||||
|
||||
config_h = configure_file(
|
||||
|
|
|
@ -25,7 +25,7 @@ std::ostream & operator <<(std::ostream & str, const SymbolStr & symbol)
|
|||
|
||||
void Expr::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
{
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
|
||||
void ExprInt::show(const SymbolTable & symbols, std::ostream & str) const
|
||||
|
@ -271,7 +271,7 @@ std::string showAttrPath(const SymbolTable & symbols, const AttrPath & attrPath)
|
|||
|
||||
void Expr::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
||||
{
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
|
||||
void ExprInt::bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env)
|
||||
|
|
|
@ -107,6 +107,7 @@ struct ExprInt : Expr
|
|||
{
|
||||
Value v;
|
||||
ExprInt(NixInt n) { v.mkInt(n); };
|
||||
ExprInt(NixInt::Inner n) { v.mkInt(n); };
|
||||
Value * maybeThunk(EvalState & state, Env & env) override;
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
@ -186,7 +187,7 @@ struct ExprInheritFrom : ExprVar
|
|||
this->fromWith = nullptr;
|
||||
}
|
||||
|
||||
void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env);
|
||||
void bindVars(EvalState & es, const std::shared_ptr<const StaticEnv> & env) override;
|
||||
};
|
||||
|
||||
struct ExprSelect : Expr
|
||||
|
@ -203,7 +204,7 @@ struct ExprSelect : Expr
|
|||
*
|
||||
* @param[out] v The attribute set that should contain the last attribute name (if it exists).
|
||||
* @return The last attribute name in `attrPath`
|
||||
*
|
||||
*
|
||||
* @note This does *not* evaluate the final attribute, and does not fail if that's the only attribute that does not exist.
|
||||
*/
|
||||
Symbol evalExceptFinalSelect(EvalState & state, Env & env, Value & attrs);
|
||||
|
|
|
@ -102,12 +102,8 @@ mkMesonDerivation (finalAttrs: {
|
|||
LDFLAGS = "-fuse-ld=gold";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
separateDebugInfo = !stdenv.hostPlatform.isStatic;
|
||||
|
||||
strictDeps = true;
|
||||
|
||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||
|
||||
meta = {
|
||||
|
|
|
@ -20,6 +20,7 @@ struct StringToken
|
|||
operator std::string_view() const { return {p, l}; }
|
||||
};
|
||||
|
||||
// This type must be trivially copyable; see YYLTYPE_IS_TRIVIAL in parser.y.
|
||||
struct ParserLocation
|
||||
{
|
||||
int beginOffset;
|
||||
|
@ -86,7 +87,7 @@ struct ParserState
|
|||
|
||||
void dupAttr(const AttrPath & attrPath, const PosIdx pos, const PosIdx prevPos);
|
||||
void dupAttr(Symbol attr, const PosIdx pos, const PosIdx prevPos);
|
||||
void addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos);
|
||||
void addAttr(ExprAttrs * attrs, AttrPath && attrPath, const ParserLocation & loc, Expr * e, const ParserLocation & exprLoc);
|
||||
Formals * validateFormals(Formals * formals, PosIdx pos = noPos, Symbol arg = {});
|
||||
Expr * stripIndentation(const PosIdx pos,
|
||||
std::vector<std::pair<PosIdx, std::variant<Expr *, StringToken>>> && es);
|
||||
|
@ -110,11 +111,12 @@ inline void ParserState::dupAttr(Symbol attr, const PosIdx pos, const PosIdx pre
|
|||
});
|
||||
}
|
||||
|
||||
inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr * e, const PosIdx pos)
|
||||
inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, const ParserLocation & loc, Expr * e, const ParserLocation & exprLoc)
|
||||
{
|
||||
AttrPath::iterator i;
|
||||
// All attrpaths have at least one attr
|
||||
assert(!attrPath.empty());
|
||||
auto pos = at(loc);
|
||||
// Checking attrPath validity.
|
||||
// ===========================
|
||||
for (i = attrPath.begin(); i + 1 < attrPath.end(); i++) {
|
||||
|
@ -179,6 +181,12 @@ inline void ParserState::addAttr(ExprAttrs * attrs, AttrPath && attrPath, Expr *
|
|||
} else {
|
||||
attrs->dynamicAttrs.push_back(ExprAttrs::DynamicAttrDef(i->expr, e, pos));
|
||||
}
|
||||
|
||||
auto it = lexerState.positionToDocComment.find(pos);
|
||||
if (it != lexerState.positionToDocComment.end()) {
|
||||
e->setDocComment(it->second);
|
||||
lexerState.positionToDocComment.emplace(at(exprLoc), it->second);
|
||||
}
|
||||
}
|
||||
|
||||
inline Formals * ParserState::validateFormals(Formals * formals, PosIdx pos, Symbol arg)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
%glr-parser
|
||||
%define api.location.type { ::nix::ParserLocation }
|
||||
%define api.pure
|
||||
%locations
|
||||
%define parse.error verbose
|
||||
|
@ -8,8 +8,7 @@
|
|||
%parse-param { nix::ParserState * state }
|
||||
%lex-param { void * scanner }
|
||||
%lex-param { nix::ParserState * state }
|
||||
%expect 1
|
||||
%expect-rr 1
|
||||
%expect 0
|
||||
|
||||
%code requires {
|
||||
|
||||
|
@ -27,7 +26,17 @@
|
|||
#include "eval-settings.hh"
|
||||
#include "parser-state.hh"
|
||||
|
||||
#define YYLTYPE ::nix::ParserLocation
|
||||
// Bison seems to have difficulty growing the parser stack when using C++ with
|
||||
// a custom location type. This undocumented macro tells Bison that our
|
||||
// location type is "trivially copyable" in C++-ese, so it is safe to use the
|
||||
// same memcpy macro it uses to grow the stack that it uses with its own
|
||||
// default location type. Without this, we get "error: memory exhausted" when
|
||||
// parsing some large Nix files. Our other options are to increase the initial
|
||||
// stack size (200 by default) to be as large as we ever want to support (so
|
||||
// that growing the stack is unnecessary), or redefine the stack-relocation
|
||||
// macro ourselves (which is also undocumented).
|
||||
#define YYLTYPE_IS_TRIVIAL 1
|
||||
|
||||
#define YY_DECL int yylex \
|
||||
(YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParserState * state)
|
||||
|
||||
|
@ -77,7 +86,7 @@ YY_DECL;
|
|||
|
||||
using namespace nix;
|
||||
|
||||
#define CUR_POS state->at(*yylocp)
|
||||
#define CUR_POS state->at(yyloc)
|
||||
|
||||
|
||||
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error)
|
||||
|
@ -99,6 +108,14 @@ static void setDocPosition(const LexerState & lexerState, ExprLambda * lambda, P
|
|||
}
|
||||
}
|
||||
|
||||
static Expr * makeCall(PosIdx pos, Expr * fn, Expr * arg) {
|
||||
if (auto e2 = dynamic_cast<ExprCall *>(fn)) {
|
||||
e2->args.push_back(arg);
|
||||
return fn;
|
||||
}
|
||||
return new ExprCall(pos, fn, {arg});
|
||||
}
|
||||
|
||||
|
||||
%}
|
||||
|
||||
|
@ -123,9 +140,10 @@ static void setDocPosition(const LexerState & lexerState, ExprLambda * lambda, P
|
|||
|
||||
%type <e> start expr expr_function expr_if expr_op
|
||||
%type <e> expr_select expr_simple expr_app
|
||||
%type <e> expr_pipe_from expr_pipe_into
|
||||
%type <list> expr_list
|
||||
%type <attrs> binds
|
||||
%type <formals> formals
|
||||
%type <attrs> binds binds1
|
||||
%type <formals> formals formal_set
|
||||
%type <formal> formal
|
||||
%type <attrNames> attrpath
|
||||
%type <inheritAttrs> attrs
|
||||
|
@ -140,6 +158,7 @@ static void setDocPosition(const LexerState & lexerState, ExprLambda * lambda, P
|
|||
%token <path> PATH HPATH SPATH PATH_END
|
||||
%token <uri> URI
|
||||
%token IF THEN ELSE ASSERT WITH LET IN_KW REC INHERIT EQ NEQ AND OR IMPL OR_KW
|
||||
%token PIPE_FROM PIPE_INTO /* <| and |> */
|
||||
%token DOLLAR_CURLY /* == ${ */
|
||||
%token IND_STRING_OPEN IND_STRING_CLOSE
|
||||
%token ELLIPSIS
|
||||
|
@ -170,22 +189,22 @@ expr_function
|
|||
$$ = me;
|
||||
SET_DOC_POS(me, @1);
|
||||
}
|
||||
| '{' formals '}' ':' expr_function
|
||||
{ auto me = new ExprLambda(CUR_POS, state->validateFormals($2), $5);
|
||||
| formal_set ':' expr_function[body]
|
||||
{ auto me = new ExprLambda(CUR_POS, state->validateFormals($formal_set), $body);
|
||||
$$ = me;
|
||||
SET_DOC_POS(me, @1);
|
||||
}
|
||||
| '{' formals '}' '@' ID ':' expr_function
|
||||
| formal_set '@' ID ':' expr_function[body]
|
||||
{
|
||||
auto arg = state->symbols.create($5);
|
||||
auto me = new ExprLambda(CUR_POS, arg, state->validateFormals($2, CUR_POS, arg), $7);
|
||||
auto arg = state->symbols.create($ID);
|
||||
auto me = new ExprLambda(CUR_POS, arg, state->validateFormals($formal_set, CUR_POS, arg), $body);
|
||||
$$ = me;
|
||||
SET_DOC_POS(me, @1);
|
||||
}
|
||||
| ID '@' '{' formals '}' ':' expr_function
|
||||
| ID '@' formal_set ':' expr_function[body]
|
||||
{
|
||||
auto arg = state->symbols.create($1);
|
||||
auto me = new ExprLambda(CUR_POS, arg, state->validateFormals($4, CUR_POS, arg), $7);
|
||||
auto arg = state->symbols.create($ID);
|
||||
auto me = new ExprLambda(CUR_POS, arg, state->validateFormals($formal_set, CUR_POS, arg), $body);
|
||||
$$ = me;
|
||||
SET_DOC_POS(me, @1);
|
||||
}
|
||||
|
@ -206,9 +225,21 @@ expr_function
|
|||
|
||||
expr_if
|
||||
: IF expr THEN expr ELSE expr { $$ = new ExprIf(CUR_POS, $2, $4, $6); }
|
||||
| expr_pipe_from
|
||||
| expr_pipe_into
|
||||
| expr_op
|
||||
;
|
||||
|
||||
expr_pipe_from
|
||||
: expr_op PIPE_FROM expr_pipe_from { $$ = makeCall(state->at(@2), $1, $3); }
|
||||
| expr_op PIPE_FROM expr_op { $$ = makeCall(state->at(@2), $1, $3); }
|
||||
;
|
||||
|
||||
expr_pipe_into
|
||||
: expr_pipe_into PIPE_INTO expr_op { $$ = makeCall(state->at(@2), $3, $1); }
|
||||
| expr_op PIPE_INTO expr_op { $$ = makeCall(state->at(@2), $3, $1); }
|
||||
;
|
||||
|
||||
expr_op
|
||||
: '!' expr_op %prec NOT { $$ = new ExprOpNot($2); }
|
||||
| '-' expr_op %prec NEGATE { $$ = new ExprCall(CUR_POS, new ExprVar(state->s.sub), {new ExprInt(0), $2}); }
|
||||
|
@ -233,13 +264,7 @@ expr_op
|
|||
;
|
||||
|
||||
expr_app
|
||||
: expr_app expr_select {
|
||||
if (auto e2 = dynamic_cast<ExprCall *>($1)) {
|
||||
e2->args.push_back($2);
|
||||
$$ = $1;
|
||||
} else
|
||||
$$ = new ExprCall(CUR_POS, $1, {$2});
|
||||
}
|
||||
: expr_app expr_select { $$ = makeCall(CUR_POS, $1, $2); }
|
||||
| expr_select
|
||||
;
|
||||
|
||||
|
@ -295,11 +320,13 @@ expr_simple
|
|||
/* Let expressions `let {..., body = ...}' are just desugared
|
||||
into `(rec {..., body = ...}).body'. */
|
||||
| LET '{' binds '}'
|
||||
{ $3->recursive = true; $$ = new ExprSelect(noPos, $3, state->s.body); }
|
||||
{ $3->recursive = true; $3->pos = CUR_POS; $$ = new ExprSelect(noPos, $3, state->s.body); }
|
||||
| REC '{' binds '}'
|
||||
{ $3->recursive = true; $$ = $3; }
|
||||
| '{' binds '}'
|
||||
{ $$ = $2; }
|
||||
{ $3->recursive = true; $3->pos = CUR_POS; $$ = $3; }
|
||||
| '{' binds1 '}'
|
||||
{ $2->pos = CUR_POS; $$ = $2; }
|
||||
| '{' '}'
|
||||
{ $$ = new ExprAttrs(CUR_POS); }
|
||||
| '[' expr_list ']' { $$ = $2; }
|
||||
;
|
||||
|
||||
|
@ -348,52 +375,50 @@ ind_string_parts
|
|||
;
|
||||
|
||||
binds
|
||||
: binds attrpath '=' expr ';' {
|
||||
$$ = $1;
|
||||
: binds1
|
||||
| { $$ = new ExprAttrs; }
|
||||
;
|
||||
|
||||
auto pos = state->at(@2);
|
||||
auto exprPos = state->at(@4);
|
||||
{
|
||||
auto it = state->lexerState.positionToDocComment.find(pos);
|
||||
if (it != state->lexerState.positionToDocComment.end()) {
|
||||
$4->setDocComment(it->second);
|
||||
state->lexerState.positionToDocComment.emplace(exprPos, it->second);
|
||||
}
|
||||
}
|
||||
|
||||
state->addAttr($$, std::move(*$2), $4, pos);
|
||||
delete $2;
|
||||
binds1
|
||||
: binds1[accum] attrpath '=' expr ';'
|
||||
{ $$ = $accum;
|
||||
state->addAttr($$, std::move(*$attrpath), @attrpath, $expr, @expr);
|
||||
delete $attrpath;
|
||||
}
|
||||
| binds INHERIT attrs ';'
|
||||
{ $$ = $1;
|
||||
for (auto & [i, iPos] : *$3) {
|
||||
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
||||
state->dupAttr(i.symbol, iPos, $$->attrs[i.symbol].pos);
|
||||
$$->attrs.emplace(
|
||||
| binds[accum] INHERIT attrs ';'
|
||||
{ $$ = $accum;
|
||||
for (auto & [i, iPos] : *$attrs) {
|
||||
if ($accum->attrs.find(i.symbol) != $accum->attrs.end())
|
||||
state->dupAttr(i.symbol, iPos, $accum->attrs[i.symbol].pos);
|
||||
$accum->attrs.emplace(
|
||||
i.symbol,
|
||||
ExprAttrs::AttrDef(new ExprVar(iPos, i.symbol), iPos, ExprAttrs::AttrDef::Kind::Inherited));
|
||||
}
|
||||
delete $3;
|
||||
delete $attrs;
|
||||
}
|
||||
| binds INHERIT '(' expr ')' attrs ';'
|
||||
{ $$ = $1;
|
||||
if (!$$->inheritFromExprs)
|
||||
$$->inheritFromExprs = std::make_unique<std::vector<Expr *>>();
|
||||
$$->inheritFromExprs->push_back($4);
|
||||
auto from = new nix::ExprInheritFrom(state->at(@4), $$->inheritFromExprs->size() - 1);
|
||||
for (auto & [i, iPos] : *$6) {
|
||||
if ($$->attrs.find(i.symbol) != $$->attrs.end())
|
||||
state->dupAttr(i.symbol, iPos, $$->attrs[i.symbol].pos);
|
||||
$$->attrs.emplace(
|
||||
| binds[accum] INHERIT '(' expr ')' attrs ';'
|
||||
{ $$ = $accum;
|
||||
if (!$accum->inheritFromExprs)
|
||||
$accum->inheritFromExprs = std::make_unique<std::vector<Expr *>>();
|
||||
$accum->inheritFromExprs->push_back($expr);
|
||||
auto from = new nix::ExprInheritFrom(state->at(@expr), $accum->inheritFromExprs->size() - 1);
|
||||
for (auto & [i, iPos] : *$attrs) {
|
||||
if ($accum->attrs.find(i.symbol) != $accum->attrs.end())
|
||||
state->dupAttr(i.symbol, iPos, $accum->attrs[i.symbol].pos);
|
||||
$accum->attrs.emplace(
|
||||
i.symbol,
|
||||
ExprAttrs::AttrDef(
|
||||
new ExprSelect(iPos, from, i.symbol),
|
||||
iPos,
|
||||
ExprAttrs::AttrDef::Kind::InheritedFrom));
|
||||
}
|
||||
delete $6;
|
||||
delete $attrs;
|
||||
}
|
||||
| attrpath '=' expr ';'
|
||||
{ $$ = new ExprAttrs;
|
||||
state->addAttr($$, std::move(*$attrpath), @attrpath, $expr, @expr);
|
||||
delete $attrpath;
|
||||
}
|
||||
| { $$ = new ExprAttrs(state->at(@0)); }
|
||||
;
|
||||
|
||||
attrs
|
||||
|
@ -451,15 +476,19 @@ expr_list
|
|||
| { $$ = new ExprList; }
|
||||
;
|
||||
|
||||
formal_set
|
||||
: '{' formals ',' ELLIPSIS '}' { $$ = $formals; $$->ellipsis = true; }
|
||||
| '{' ELLIPSIS '}' { $$ = new Formals; $$->ellipsis = true; }
|
||||
| '{' formals ',' '}' { $$ = $formals; $$->ellipsis = false; }
|
||||
| '{' formals '}' { $$ = $formals; $$->ellipsis = false; }
|
||||
| '{' '}' { $$ = new Formals; $$->ellipsis = false; }
|
||||
;
|
||||
|
||||
formals
|
||||
: formal ',' formals
|
||||
{ $$ = $3; $$->formals.emplace_back(*$1); delete $1; }
|
||||
: formals[accum] ',' formal
|
||||
{ $$ = $accum; $$->formals.emplace_back(*$formal); delete $formal; }
|
||||
| formal
|
||||
{ $$ = new Formals; $$->formals.emplace_back(*$1); $$->ellipsis = false; delete $1; }
|
||||
|
|
||||
{ $$ = new Formals; $$->ellipsis = false; }
|
||||
| ELLIPSIS
|
||||
{ $$ = new Formals; $$->ellipsis = true; }
|
||||
{ $$ = new Formals; $$->formals.emplace_back(*$formal); delete $formal; }
|
||||
;
|
||||
|
||||
formal
|
||||
|
|
|
@ -426,7 +426,7 @@ static void prim_typeOf(EvalState & state, const PosIdx pos, Value * * args, Val
|
|||
t = args[0]->external()->typeOf();
|
||||
break;
|
||||
case nFloat: t = "float"; break;
|
||||
case nThunk: abort();
|
||||
case nThunk: unreachable();
|
||||
}
|
||||
v.mkString(t);
|
||||
}
|
||||
|
@ -587,9 +587,9 @@ struct CompareValues
|
|||
{
|
||||
try {
|
||||
if (v1->type() == nFloat && v2->type() == nInt)
|
||||
return v1->fpoint() < v2->integer();
|
||||
return v1->fpoint() < v2->integer().value;
|
||||
if (v1->type() == nInt && v2->type() == nFloat)
|
||||
return v1->integer() < v2->fpoint();
|
||||
return v1->integer().value < v2->fpoint();
|
||||
if (v1->type() != v2->type())
|
||||
state.error<EvalError>("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow();
|
||||
// Allow selecting a subset of enum values
|
||||
|
@ -1843,34 +1843,6 @@ static RegisterPrimOp primop_findFile(PrimOp {
|
|||
.doc = R"(
|
||||
Find *lookup-path* in *search-path*.
|
||||
|
||||
A search path is represented list of [attribute sets](./types.md#attribute-set) with two attributes:
|
||||
- `prefix` is a relative path.
|
||||
- `path` denotes a file system location
|
||||
The exact syntax depends on the command line interface.
|
||||
|
||||
Examples of search path attribute sets:
|
||||
|
||||
- ```
|
||||
{
|
||||
prefix = "nixos-config";
|
||||
path = "/etc/nixos/configuration.nix";
|
||||
}
|
||||
```
|
||||
|
||||
- ```
|
||||
{
|
||||
prefix = "";
|
||||
path = "/nix/var/nix/profiles/per-user/root/channels";
|
||||
}
|
||||
```
|
||||
|
||||
The lookup algorithm checks each entry until a match is found, returning a [path value](@docroot@/language/types.md#type-path) of the match:
|
||||
|
||||
- If *lookup-path* matches `prefix`, then the remainder of *lookup-path* (the "suffix") is searched for within the directory denoted by `path`.
|
||||
Note that the `path` may need to be downloaded at this point to look inside.
|
||||
- If the suffix is found inside that directory, then the entry is a match.
|
||||
The combined absolute path of the directory (now downloaded if need be) and the suffix is returned.
|
||||
|
||||
[Lookup path](@docroot@/language/constructs/lookup-path.md) expressions are [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and [`builtins.nixPath`](#builtins-nixPath):
|
||||
|
||||
```nix
|
||||
|
@ -1882,6 +1854,119 @@ static RegisterPrimOp primop_findFile(PrimOp {
|
|||
```nix
|
||||
builtins.findFile builtins.nixPath "nixpkgs"
|
||||
```
|
||||
|
||||
A search path is represented as a list of [attribute sets](./types.md#attribute-set) with two attributes:
|
||||
- `prefix` is a relative path.
|
||||
- `path` denotes a file system location
|
||||
|
||||
Examples of search path attribute sets:
|
||||
|
||||
- ```
|
||||
{
|
||||
prefix = "";
|
||||
path = "/nix/var/nix/profiles/per-user/root/channels";
|
||||
}
|
||||
```
|
||||
- ```
|
||||
{
|
||||
prefix = "nixos-config";
|
||||
path = "/etc/nixos/configuration.nix";
|
||||
}
|
||||
```
|
||||
- ```
|
||||
{
|
||||
prefix = "nixpkgs";
|
||||
path = "https://github.com/NixOS/nixpkgs/tarballs/master";
|
||||
}
|
||||
```
|
||||
- ```
|
||||
{
|
||||
prefix = "nixpkgs";
|
||||
path = "channel:nixpkgs-unstable";
|
||||
}
|
||||
```
|
||||
- ```
|
||||
{
|
||||
prefix = "flake-compat";
|
||||
path = "flake:github:edolstra/flake-compat";
|
||||
}
|
||||
```
|
||||
|
||||
The lookup algorithm checks each entry until a match is found, returning a [path value](@docroot@/language/types.md#type-path) of the match:
|
||||
|
||||
- If a prefix of `lookup-path` matches `prefix`, then the remainder of *lookup-path* (the "suffix") is searched for within the directory denoted by `path`.
|
||||
The contents of `path` may need to be downloaded at this point to look inside.
|
||||
|
||||
- If the suffix is found inside that directory, then the entry is a match.
|
||||
The combined absolute path of the directory (now downloaded if need be) and the suffix is returned.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> A *search-path* value
|
||||
>
|
||||
> ```
|
||||
> [
|
||||
> {
|
||||
> prefix = "";
|
||||
> path = "/home/eelco/Dev";
|
||||
> }
|
||||
> {
|
||||
> prefix = "nixos-config";
|
||||
> path = "/etc/nixos";
|
||||
> }
|
||||
> ]
|
||||
> ```
|
||||
>
|
||||
> and a *lookup-path* value `"nixos-config"` will cause Nix to try `/home/eelco/Dev/nixos-config` and `/etc/nixos` in that order and return the first path that exists.
|
||||
|
||||
If `path` starts with `http://` or `https://`, it is interpreted as the URL of a tarball that will be downloaded and unpacked to a temporary location.
|
||||
The tarball must consist of a single top-level directory.
|
||||
|
||||
The URLs of the tarballs from the official `nixos.org` channels can be abbreviated as `channel:<channel-name>`.
|
||||
See [documentation on `nix-channel`](@docroot@/command-ref/nix-channel.md) for details about channels.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> These two search path entries are equivalent:
|
||||
>
|
||||
> - ```
|
||||
> {
|
||||
> prefix = "nixpkgs";
|
||||
> path = "channel:nixpkgs-unstable";
|
||||
> }
|
||||
> ```
|
||||
> - ```
|
||||
> {
|
||||
> prefix = "nixpkgs";
|
||||
> path = "https://nixos.org/channels/nixos-unstable/nixexprs.tar.xz";
|
||||
> }
|
||||
> ```
|
||||
|
||||
Search paths can also point to source trees using [flake URLs](@docroot@/command-ref/new-cli/nix3-flake.md#url-like-syntax).
|
||||
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> The search path entry
|
||||
>
|
||||
> ```
|
||||
> {
|
||||
> prefix = "nixpkgs";
|
||||
> path = "flake:nixpkgs";
|
||||
> }
|
||||
> ```
|
||||
> specifies that the prefix `nixpkgs` shall refer to the source tree downloaded from the `nixpkgs` entry in the flake registry.
|
||||
>
|
||||
> Similarly
|
||||
>
|
||||
> ```
|
||||
> {
|
||||
> prefix = "nixpkgs";
|
||||
> path = "flake:github:nixos/nixpkgs/nixos-22.05";
|
||||
> }
|
||||
> ```
|
||||
>
|
||||
> makes `<nixpkgs>` refer to a particular branch of the `NixOS/nixpkgs` repository on GitHub.
|
||||
)",
|
||||
.fun = prim_findFile,
|
||||
});
|
||||
|
@ -2677,13 +2762,13 @@ static struct LazyPosAcessors {
|
|||
PrimOp primop_lineOfPos{
|
||||
.arity = 1,
|
||||
.fun = [] (EvalState & state, PosIdx pos, Value * * args, Value & v) {
|
||||
v.mkInt(state.positions[PosIdx(args[0]->integer())].line);
|
||||
v.mkInt(state.positions[PosIdx(args[0]->integer().value)].line);
|
||||
}
|
||||
};
|
||||
PrimOp primop_columnOfPos{
|
||||
.arity = 1,
|
||||
.fun = [] (EvalState & state, PosIdx pos, Value * * args, Value & v) {
|
||||
v.mkInt(state.positions[PosIdx(args[0]->integer())].column);
|
||||
v.mkInt(state.positions[PosIdx(args[0]->integer().value)].column);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -3159,7 +3244,8 @@ static void elemAt(EvalState & state, const PosIdx pos, Value & list, int n, Val
|
|||
/* Return the n-1'th element of a list. */
|
||||
static void prim_elemAt(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
elemAt(state, pos, *args[0], state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.elemAt"), v);
|
||||
NixInt::Inner elem = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.elemAt").value;
|
||||
elemAt(state, pos, *args[0], elem, v);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_elemAt({
|
||||
|
@ -3453,10 +3539,12 @@ static RegisterPrimOp primop_all({
|
|||
|
||||
static void prim_genList(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
auto len = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.genList");
|
||||
auto len_ = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.genList").value;
|
||||
|
||||
if (len < 0)
|
||||
state.error<EvalError>("cannot create list of size %1%", len).atPos(pos).debugThrow();
|
||||
if (len_ < 0)
|
||||
state.error<EvalError>("cannot create list of size %1%", len_).atPos(pos).debugThrow();
|
||||
|
||||
size_t len = size_t(len_);
|
||||
|
||||
// More strict than striclty (!) necessary, but acceptable
|
||||
// as evaluating map without accessing any values makes little sense.
|
||||
|
@ -3713,9 +3801,17 @@ static void prim_add(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
if (args[0]->type() == nFloat || args[1]->type() == nFloat)
|
||||
v.mkFloat(state.forceFloat(*args[0], pos, "while evaluating the first argument of the addition")
|
||||
+ state.forceFloat(*args[1], pos, "while evaluating the second argument of the addition"));
|
||||
else
|
||||
v.mkInt( state.forceInt(*args[0], pos, "while evaluating the first argument of the addition")
|
||||
+ state.forceInt(*args[1], pos, "while evaluating the second argument of the addition"));
|
||||
else {
|
||||
auto i1 = state.forceInt(*args[0], pos, "while evaluating the first argument of the addition");
|
||||
auto i2 = state.forceInt(*args[1], pos, "while evaluating the second argument of the addition");
|
||||
|
||||
auto result_ = i1 + i2;
|
||||
if (auto result = result_.valueChecked(); result.has_value()) {
|
||||
v.mkInt(*result);
|
||||
} else {
|
||||
state.error<EvalError>("integer overflow in adding %1% + %2%", i1, i2).atPos(pos).debugThrow();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_add({
|
||||
|
@ -3734,9 +3830,18 @@ static void prim_sub(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
if (args[0]->type() == nFloat || args[1]->type() == nFloat)
|
||||
v.mkFloat(state.forceFloat(*args[0], pos, "while evaluating the first argument of the subtraction")
|
||||
- state.forceFloat(*args[1], pos, "while evaluating the second argument of the subtraction"));
|
||||
else
|
||||
v.mkInt( state.forceInt(*args[0], pos, "while evaluating the first argument of the subtraction")
|
||||
- state.forceInt(*args[1], pos, "while evaluating the second argument of the subtraction"));
|
||||
else {
|
||||
auto i1 = state.forceInt(*args[0], pos, "while evaluating the first argument of the subtraction");
|
||||
auto i2 = state.forceInt(*args[1], pos, "while evaluating the second argument of the subtraction");
|
||||
|
||||
auto result_ = i1 - i2;
|
||||
|
||||
if (auto result = result_.valueChecked(); result.has_value()) {
|
||||
v.mkInt(*result);
|
||||
} else {
|
||||
state.error<EvalError>("integer overflow in subtracting %1% - %2%", i1, i2).atPos(pos).debugThrow();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_sub({
|
||||
|
@ -3755,9 +3860,18 @@ static void prim_mul(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
if (args[0]->type() == nFloat || args[1]->type() == nFloat)
|
||||
v.mkFloat(state.forceFloat(*args[0], pos, "while evaluating the first of the multiplication")
|
||||
* state.forceFloat(*args[1], pos, "while evaluating the second argument of the multiplication"));
|
||||
else
|
||||
v.mkInt( state.forceInt(*args[0], pos, "while evaluating the first argument of the multiplication")
|
||||
* state.forceInt(*args[1], pos, "while evaluating the second argument of the multiplication"));
|
||||
else {
|
||||
auto i1 = state.forceInt(*args[0], pos, "while evaluating the first argument of the multiplication");
|
||||
auto i2 = state.forceInt(*args[1], pos, "while evaluating the second argument of the multiplication");
|
||||
|
||||
auto result_ = i1 * i2;
|
||||
|
||||
if (auto result = result_.valueChecked(); result.has_value()) {
|
||||
v.mkInt(*result);
|
||||
} else {
|
||||
state.error<EvalError>("integer overflow in multiplying %1% * %2%", i1, i2).atPos(pos).debugThrow();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_mul({
|
||||
|
@ -3784,10 +3898,12 @@ static void prim_div(EvalState & state, const PosIdx pos, Value * * args, Value
|
|||
NixInt i1 = state.forceInt(*args[0], pos, "while evaluating the first operand of the division");
|
||||
NixInt i2 = state.forceInt(*args[1], pos, "while evaluating the second operand of the division");
|
||||
/* Avoid division overflow as it might raise SIGFPE. */
|
||||
if (i1 == std::numeric_limits<NixInt>::min() && i2 == -1)
|
||||
state.error<EvalError>("overflow in integer division").atPos(pos).debugThrow();
|
||||
|
||||
v.mkInt(i1 / i2);
|
||||
auto result_ = i1 / i2;
|
||||
if (auto result = result_.valueChecked(); result.has_value()) {
|
||||
v.mkInt(*result);
|
||||
} else {
|
||||
state.error<EvalError>("integer overflow in dividing %1% / %2%", i1, i2).atPos(pos).debugThrow();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3802,8 +3918,9 @@ static RegisterPrimOp primop_div({
|
|||
|
||||
static void prim_bitAnd(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
v.mkInt(state.forceInt(*args[0], pos, "while evaluating the first argument passed to builtins.bitAnd")
|
||||
& state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.bitAnd"));
|
||||
auto i1 = state.forceInt(*args[0], pos, "while evaluating the first argument passed to builtins.bitAnd");
|
||||
auto i2 = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.bitAnd");
|
||||
v.mkInt(i1.value & i2.value);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_bitAnd({
|
||||
|
@ -3817,8 +3934,10 @@ static RegisterPrimOp primop_bitAnd({
|
|||
|
||||
static void prim_bitOr(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
v.mkInt(state.forceInt(*args[0], pos, "while evaluating the first argument passed to builtins.bitOr")
|
||||
| state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.bitOr"));
|
||||
auto i1 = state.forceInt(*args[0], pos, "while evaluating the first argument passed to builtins.bitOr");
|
||||
auto i2 = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.bitOr");
|
||||
|
||||
v.mkInt(i1.value | i2.value);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_bitOr({
|
||||
|
@ -3832,8 +3951,10 @@ static RegisterPrimOp primop_bitOr({
|
|||
|
||||
static void prim_bitXor(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
v.mkInt(state.forceInt(*args[0], pos, "while evaluating the first argument passed to builtins.bitXor")
|
||||
^ state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.bitXor"));
|
||||
auto i1 = state.forceInt(*args[0], pos, "while evaluating the first argument passed to builtins.bitXor");
|
||||
auto i2 = state.forceInt(*args[1], pos, "while evaluating the second argument passed to builtins.bitXor");
|
||||
|
||||
v.mkInt(i1.value ^ i2.value);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_bitXor({
|
||||
|
@ -3913,13 +4034,19 @@ static RegisterPrimOp primop_toString({
|
|||
non-negative. */
|
||||
static void prim_substring(EvalState & state, const PosIdx pos, Value * * args, Value & v)
|
||||
{
|
||||
int start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring");
|
||||
NixInt::Inner start = state.forceInt(*args[0], pos, "while evaluating the first argument (the start offset) passed to builtins.substring").value;
|
||||
|
||||
if (start < 0)
|
||||
state.error<EvalError>("negative start position in 'substring'").atPos(pos).debugThrow();
|
||||
|
||||
|
||||
int len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring");
|
||||
NixInt::Inner len = state.forceInt(*args[1], pos, "while evaluating the second argument (the substring length) passed to builtins.substring").value;
|
||||
|
||||
// Negative length may be idiomatically passed to builtins.substring to get
|
||||
// the tail of the string.
|
||||
if (len < 0) {
|
||||
len = std::numeric_limits<NixInt::Inner>::max();
|
||||
}
|
||||
|
||||
// Special-case on empty substring to avoid O(n) strlen
|
||||
// This allows for the use of empty substrings to efficently capture string context
|
||||
|
@ -3962,7 +4089,7 @@ static void prim_stringLength(EvalState & state, const PosIdx pos, Value * * arg
|
|||
{
|
||||
NixStringContext context;
|
||||
auto s = state.coerceToString(pos, *args[0], context, "while evaluating the argument passed to builtins.stringLength");
|
||||
v.mkInt(s->size());
|
||||
v.mkInt(NixInt::Inner(s->size()));
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_stringLength({
|
||||
|
@ -4446,7 +4573,8 @@ static void prim_compareVersions(EvalState & state, const PosIdx pos, Value * *
|
|||
{
|
||||
auto version1 = state.forceStringNoCtx(*args[0], pos, "while evaluating the first argument passed to builtins.compareVersions");
|
||||
auto version2 = state.forceStringNoCtx(*args[1], pos, "while evaluating the second argument passed to builtins.compareVersions");
|
||||
v.mkInt(compareVersions(version1, version2));
|
||||
auto result = compareVersions(version1, version2);
|
||||
v.mkInt(result < 0 ? -1 : result > 0 ? 1 : 0);
|
||||
}
|
||||
|
||||
static RegisterPrimOp primop_compareVersions({
|
||||
|
@ -4729,7 +4857,17 @@ void EvalState::createBaseEnv()
|
|||
addConstant("__nixPath", v, {
|
||||
.type = nList,
|
||||
.doc = R"(
|
||||
The value of the [`nix-path` configuration setting](@docroot@/command-ref/conf-file.md#conf-nix-path): a list of search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md).
|
||||
A list of search path entries used to resolve [lookup paths](@docroot@/language/constructs/lookup-path.md).
|
||||
Its value is primarily determined by the [`nix-path` configuration setting](@docroot@/command-ref/conf-file.md#conf-nix-path), which are
|
||||
- Overridden by the [`NIX_PATH`](@docroot@/command-ref/env-common.md#env-NIX_PATH) environment variable or the `--nix-path` option
|
||||
- Extended by the [`-I` option](@docroot@/command-ref/opt-common.md#opt-I) or `--extra-nix-path`
|
||||
|
||||
> **Example**
|
||||
>
|
||||
> ```bash
|
||||
> $ NIX_PATH= nix-instantiate --eval --expr "builtins.nixPath" -I foo=bar --no-pure-eval
|
||||
> [ { path = "bar"; prefix = "foo"; } ]
|
||||
> ```
|
||||
|
||||
Lookup path expressions are [desugared](https://en.wikipedia.org/wiki/Syntactic_sugar) using this and
|
||||
[`builtins.findFile`](./builtins.html#builtins-findFile):
|
||||
|
|
|
@ -1,6 +1,31 @@
|
|||
/* This is the implementation of the ‘derivation’ builtin function.
|
||||
It's actually a wrapper around the ‘derivationStrict’ primop. */
|
||||
# This is the implementation of the ‘derivation’ builtin function.
|
||||
# It's actually a wrapper around the ‘derivationStrict’ primop.
|
||||
# Note that the following comment will be shown in :doc in the repl, but not in the manual.
|
||||
|
||||
/**
|
||||
Create a derivation.
|
||||
|
||||
# Inputs
|
||||
|
||||
The single argument is an attribute set that describes what to build and how to build it.
|
||||
See https://nix.dev/manual/nix/2.23/language/derivations
|
||||
|
||||
# Output
|
||||
|
||||
The result is an attribute set that describes the derivation.
|
||||
Notably it contains the outputs, which in the context of the Nix language are special strings that refer to the output paths, which may not yet exist.
|
||||
The realisation of these outputs only occurs when needed; for example
|
||||
|
||||
* When `nix-build` or a similar command is run, it realises the outputs that were requested on its command line.
|
||||
See https://nix.dev/manual/nix/2.23/command-ref/nix-build
|
||||
|
||||
* When `import`, `readFile`, `readDir` or some other functions are called, they have to realise the outputs they depend on.
|
||||
This is referred to as "import from derivation".
|
||||
See https://nix.dev/manual/nix/2.23/language/import-from-derivation
|
||||
|
||||
Note that `derivation` is very bare-bones, and provides almost no commands during the build.
|
||||
Most likely, you'll want to use functions like `stdenv.mkDerivation` in Nixpkgs to set up a basic environment.
|
||||
*/
|
||||
drvAttrs @ { outputs ? [ "out" ], ... }:
|
||||
|
||||
let
|
||||
|
|
|
@ -122,9 +122,15 @@ static void fetchTree(
|
|||
}
|
||||
else if (attr.value->type() == nBool)
|
||||
attrs.emplace(state.symbols[attr.name], Explicit<bool>{attr.value->boolean()});
|
||||
else if (attr.value->type() == nInt)
|
||||
attrs.emplace(state.symbols[attr.name], uint64_t(attr.value->integer()));
|
||||
else if (state.symbols[attr.name] == "publicKeys") {
|
||||
else if (attr.value->type() == nInt) {
|
||||
auto intValue = attr.value->integer().value;
|
||||
|
||||
if (intValue < 0) {
|
||||
state.error<EvalError>("negative value given for fetchTree attr %1%: %2%", state.symbols[attr.name], intValue).atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
attrs.emplace(state.symbols[attr.name], uint64_t(intValue));
|
||||
} else if (state.symbols[attr.name] == "publicKeys") {
|
||||
experimentalFeatureSettings.require(Xp::VerifiedFetches);
|
||||
attrs.emplace(state.symbols[attr.name], printValueAsJSON(state, true, *attr.value, pos, context).dump());
|
||||
}
|
||||
|
@ -383,7 +389,7 @@ static RegisterPrimOp primop_fetchTree({
|
|||
- `"mercurial"`
|
||||
|
||||
*input* can also be a [URL-like reference](@docroot@/command-ref/new-cli/nix3-flake.md#flake-references).
|
||||
The additional input types and the URL-like syntax requires the [`flakes` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-flakes) to be enabled.
|
||||
The additional input types and the URL-like syntax requires the [`flakes` experimental feature](@docroot@/development/experimental-features.md#xp-feature-flakes) to be enabled.
|
||||
|
||||
> **Example**
|
||||
>
|
||||
|
@ -501,7 +507,11 @@ static void fetch(EvalState & state, const PosIdx pos, Value * * args, Value & v
|
|||
// https://github.com/NixOS/nix/issues/4313
|
||||
auto storePath =
|
||||
unpack
|
||||
? fetchToStore(*state.store, fetchers::downloadTarball(*url).accessor, FetchMode::Copy, name)
|
||||
? fetchToStore(
|
||||
*state.store,
|
||||
fetchers::downloadTarball(state.store, state.fetchSettings, *url),
|
||||
FetchMode::Copy,
|
||||
name)
|
||||
: fetchers::downloadFile(state.store, *url, name).storePath;
|
||||
|
||||
if (expectedHash) {
|
||||
|
@ -559,11 +569,11 @@ static RegisterPrimOp primop_fetchTarball({
|
|||
.doc = R"(
|
||||
Download the specified URL, unpack it and return the path of the
|
||||
unpacked tree. The file must be a tape archive (`.tar`) compressed
|
||||
with `gzip`, `bzip2` or `xz`. The top-level path component of the
|
||||
files in the tarball is removed, so it is best if the tarball
|
||||
contains a single directory at top level. The typical use of the
|
||||
function is to obtain external Nix expression dependencies, such as
|
||||
a particular version of Nixpkgs, e.g.
|
||||
with `gzip`, `bzip2` or `xz`. If the tarball consists of a
|
||||
single directory, then the top-level path component of the files
|
||||
in the tarball is removed. The typical use of the function is to
|
||||
obtain external Nix expression dependencies, such as a
|
||||
particular version of Nixpkgs, e.g.
|
||||
|
||||
```nix
|
||||
with import (fetchTarball https://github.com/NixOS/nixpkgs/archive/nixos-14.12.tar.gz) {};
|
||||
|
@ -670,12 +680,12 @@ static RegisterPrimOp primop_fetchGit({
|
|||
|
||||
Whether to check `rev` for a signature matching `publicKey` or `publicKeys`.
|
||||
If `verifyCommit` is enabled, then `fetchGit` cannot use a local repository with uncommitted changes.
|
||||
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||
Requires the [`verified-fetches` experimental feature](@docroot@/development/experimental-features.md#xp-feature-verified-fetches).
|
||||
|
||||
- `publicKey`
|
||||
|
||||
The public key against which `rev` is verified if `verifyCommit` is enabled.
|
||||
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||
Requires the [`verified-fetches` experimental feature](@docroot@/development/experimental-features.md#xp-feature-verified-fetches).
|
||||
|
||||
- `keytype` (default: `"ssh-ed25519"`)
|
||||
|
||||
|
@ -687,7 +697,7 @@ static RegisterPrimOp primop_fetchGit({
|
|||
- `"ssh-ed25519"`
|
||||
- `"ssh-ed25519-sk"`
|
||||
- `"ssh-rsa"`
|
||||
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||
Requires the [`verified-fetches` experimental feature](@docroot@/development/experimental-features.md#xp-feature-verified-fetches).
|
||||
|
||||
- `publicKeys`
|
||||
|
||||
|
@ -701,7 +711,7 @@ static RegisterPrimOp primop_fetchGit({
|
|||
}
|
||||
```
|
||||
|
||||
Requires the [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||
Requires the [`verified-fetches` experimental feature](@docroot@/development/experimental-features.md#xp-feature-verified-fetches).
|
||||
|
||||
|
||||
Here are some examples of how to use `fetchGit`.
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#include "eval-inline.hh"
|
||||
|
||||
#include <sstream>
|
||||
|
||||
#include <toml.hpp>
|
||||
|
||||
namespace nix {
|
||||
|
|
|
@ -94,7 +94,7 @@ void printAmbiguous(
|
|||
break;
|
||||
default:
|
||||
printError("Nix evaluator internal error: printAmbiguous: invalid value type");
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -475,7 +475,7 @@ private:
|
|||
else
|
||||
output << "primop";
|
||||
} else {
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
|
||||
output << "»";
|
||||
|
@ -504,7 +504,7 @@ private:
|
|||
if (options.ansiColors)
|
||||
output << ANSI_NORMAL;
|
||||
} else {
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
|
||||
#include "types.hh"
|
||||
#include "chunked-vector.hh"
|
||||
#include "error.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -69,6 +70,8 @@ public:
|
|||
|
||||
auto operator<=>(const Symbol other) const { return id <=> other.id; }
|
||||
bool operator==(const Symbol other) const { return id == other.id; }
|
||||
|
||||
friend class std::hash<Symbol>;
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -113,7 +116,7 @@ public:
|
|||
SymbolStr operator[](Symbol s) const
|
||||
{
|
||||
if (s.id == 0 || s.id > store.size())
|
||||
abort();
|
||||
unreachable();
|
||||
return SymbolStr(store[s.id - 1]);
|
||||
}
|
||||
|
||||
|
@ -132,3 +135,12 @@ public:
|
|||
};
|
||||
|
||||
}
|
||||
|
||||
template<>
|
||||
struct std::hash<nix::Symbol>
|
||||
{
|
||||
std::size_t operator()(const nix::Symbol & s) const noexcept
|
||||
{
|
||||
return std::hash<decltype(s.id)>{}(s.id);
|
||||
}
|
||||
};
|
||||
|
|
|
@ -22,7 +22,7 @@ json printValueAsJSON(EvalState & state, bool strict,
|
|||
switch (v.type()) {
|
||||
|
||||
case nInt:
|
||||
out = v.integer();
|
||||
out = v.integer().value;
|
||||
break;
|
||||
|
||||
case nBool:
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
#include "value/context.hh"
|
||||
#include "source-path.hh"
|
||||
#include "print-options.hh"
|
||||
#include "checked-arithmetic.hh"
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
#include <gc/gc_allocator.h>
|
||||
|
@ -73,8 +74,8 @@ class EvalState;
|
|||
class XMLWriter;
|
||||
class Printer;
|
||||
|
||||
typedef int64_t NixInt;
|
||||
typedef double NixFloat;
|
||||
using NixInt = checked::Checked<int64_t>;
|
||||
using NixFloat = double;
|
||||
|
||||
/**
|
||||
* External values must descend from ExternalValueBase, so that
|
||||
|
@ -285,7 +286,7 @@ public:
|
|||
if (invalidIsThunk)
|
||||
return nThunk;
|
||||
else
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
|
||||
inline void finishValue(InternalType newType, Payload newPayload)
|
||||
|
@ -304,6 +305,11 @@ public:
|
|||
return internalType != tUninitialized;
|
||||
}
|
||||
|
||||
inline void mkInt(NixInt::Inner n)
|
||||
{
|
||||
mkInt(NixInt{n});
|
||||
}
|
||||
|
||||
inline void mkInt(NixInt n)
|
||||
{
|
||||
finishValue(tInt, { .integer = n });
|
||||
|
@ -325,9 +331,9 @@ public:
|
|||
|
||||
void mkStringMove(const char * s, const NixStringContext & context);
|
||||
|
||||
inline void mkString(const Symbol & s)
|
||||
inline void mkString(const SymbolStr & s)
|
||||
{
|
||||
mkString(((const std::string &) s).c_str());
|
||||
mkString(s.c_str());
|
||||
}
|
||||
|
||||
void mkPath(const SourcePath & path);
|
||||
|
@ -494,11 +500,11 @@ void Value::mkBlackhole()
|
|||
|
||||
#if HAVE_BOEHMGC
|
||||
typedef std::vector<Value *, traceable_allocator<Value *>> ValueVector;
|
||||
typedef std::map<Symbol, Value *, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, Value *>>> ValueMap;
|
||||
typedef std::unordered_map<Symbol, Value *, std::hash<Symbol>, std::equal_to<Symbol>, traceable_allocator<std::pair<const Symbol, Value *>>> ValueMap;
|
||||
typedef std::map<Symbol, ValueVector, std::less<Symbol>, traceable_allocator<std::pair<const Symbol, ValueVector>>> ValueVectorMap;
|
||||
#else
|
||||
typedef std::vector<Value *> ValueVector;
|
||||
typedef std::map<Symbol, Value *> ValueMap;
|
||||
typedef std::unordered_map<Symbol, Value *> ValueMap;
|
||||
typedef std::map<Symbol, ValueVector> ValueVectorMap;
|
||||
#endif
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ nlohmann::json attrsToJSON(const Attrs & attrs)
|
|||
json[attr.first] = *v;
|
||||
} else if (auto v = std::get_if<Explicit<bool>>(&attr.second)) {
|
||||
json[attr.first] = v->t;
|
||||
} else abort();
|
||||
} else unreachable();
|
||||
}
|
||||
return json;
|
||||
}
|
||||
|
@ -99,7 +99,7 @@ std::map<std::string, std::string> attrsToQuery(const Attrs & attrs)
|
|||
query.insert_or_assign(attr.first, *v);
|
||||
} else if (auto v = std::get_if<Explicit<bool>>(&attr.second)) {
|
||||
query.insert_or_assign(attr.first, v->t ? "1" : "0");
|
||||
} else abort();
|
||||
} else unreachable();
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
|
|
@ -126,16 +126,39 @@ Object lookupObject(git_repository * repo, const git_oid & oid, git_object_t typ
|
|||
}
|
||||
|
||||
template<typename T>
|
||||
T peelObject(git_repository * repo, git_object * obj, git_object_t type)
|
||||
T peelObject(git_object * obj, git_object_t type)
|
||||
{
|
||||
T obj2;
|
||||
if (git_object_peel((git_object * *) (typename T::pointer *) Setter(obj2), obj, type)) {
|
||||
auto err = git_error_last();
|
||||
throw Error("peeling Git object '%s': %s", git_object_id(obj), err->message);
|
||||
throw Error("peeling Git object '%s': %s", *git_object_id(obj), err->message);
|
||||
}
|
||||
return obj2;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
T dupObject(typename T::pointer obj)
|
||||
{
|
||||
T obj2;
|
||||
if (git_object_dup((git_object * *) (typename T::pointer *) Setter(obj2), (git_object *) obj))
|
||||
throw Error("duplicating object '%s': %s", *git_object_id((git_object *) obj), git_error_last()->message);
|
||||
return obj2;
|
||||
}
|
||||
|
||||
/**
|
||||
* Peel the specified object (i.e. follow tag and commit objects) to
|
||||
* either a blob or a tree.
|
||||
*/
|
||||
static Object peelToTreeOrBlob(git_object * obj)
|
||||
{
|
||||
/* git_object_peel() doesn't handle blob objects, so handle those
|
||||
specially. */
|
||||
if (git_object_type(obj) == GIT_OBJECT_BLOB)
|
||||
return dupObject<Object>(obj);
|
||||
else
|
||||
return peelObject<Object>(obj, GIT_OBJECT_TREE);
|
||||
}
|
||||
|
||||
struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
||||
{
|
||||
/** Location of the repository on disk. */
|
||||
|
@ -166,7 +189,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
std::unordered_set<git_oid> done;
|
||||
std::queue<Commit> todo;
|
||||
|
||||
todo.push(peelObject<Commit>(*this, lookupObject(*this, hashToOID(rev)).get(), GIT_OBJECT_COMMIT));
|
||||
todo.push(peelObject<Commit>(lookupObject(*this, hashToOID(rev)).get(), GIT_OBJECT_COMMIT));
|
||||
|
||||
while (auto commit = pop(todo)) {
|
||||
if (!done.insert(*git_commit_id(commit->get())).second) continue;
|
||||
|
@ -184,7 +207,7 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
|
||||
uint64_t getLastModified(const Hash & rev) override
|
||||
{
|
||||
auto commit = peelObject<Commit>(*this, lookupObject(*this, hashToOID(rev)).get(), GIT_OBJECT_COMMIT);
|
||||
auto commit = peelObject<Commit>(lookupObject(*this, hashToOID(rev)).get(), GIT_OBJECT_COMMIT);
|
||||
|
||||
return git_commit_time(commit.get());
|
||||
}
|
||||
|
@ -463,6 +486,23 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
|
||||
return narHash;
|
||||
}
|
||||
|
||||
Hash dereferenceSingletonDirectory(const Hash & oid_) override
|
||||
{
|
||||
auto oid = hashToOID(oid_);
|
||||
|
||||
auto _tree = lookupObject(*this, oid, GIT_OBJECT_TREE);
|
||||
auto tree = (const git_tree *) &*_tree;
|
||||
|
||||
if (git_tree_entrycount(tree) == 1) {
|
||||
auto entry = git_tree_entry_byindex(tree, 0);
|
||||
auto mode = git_tree_entry_filemode(entry);
|
||||
if (mode == GIT_FILEMODE_TREE)
|
||||
oid = *git_tree_entry_id(entry);
|
||||
}
|
||||
|
||||
return toHash(oid);
|
||||
}
|
||||
};
|
||||
|
||||
ref<GitRepo> GitRepo::openRepo(const std::filesystem::path & path, bool create, bool bare)
|
||||
|
@ -476,11 +516,11 @@ ref<GitRepo> GitRepo::openRepo(const std::filesystem::path & path, bool create,
|
|||
struct GitSourceAccessor : SourceAccessor
|
||||
{
|
||||
ref<GitRepoImpl> repo;
|
||||
Tree root;
|
||||
Object root;
|
||||
|
||||
GitSourceAccessor(ref<GitRepoImpl> repo_, const Hash & rev)
|
||||
: repo(repo_)
|
||||
, root(peelObject<Tree>(*repo, lookupObject(*repo, hashToOID(rev)).get(), GIT_OBJECT_TREE))
|
||||
, root(peelToTreeOrBlob(lookupObject(*repo, hashToOID(rev)).get()))
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -506,7 +546,7 @@ struct GitSourceAccessor : SourceAccessor
|
|||
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
||||
{
|
||||
if (path.isRoot())
|
||||
return Stat { .type = tDirectory };
|
||||
return Stat { .type = git_object_type(root.get()) == GIT_OBJECT_TREE ? tDirectory : tRegular };
|
||||
|
||||
auto entry = lookup(path);
|
||||
if (!entry)
|
||||
|
@ -616,10 +656,10 @@ struct GitSourceAccessor : SourceAccessor
|
|||
std::optional<Tree> lookupTree(const CanonPath & path)
|
||||
{
|
||||
if (path.isRoot()) {
|
||||
Tree tree;
|
||||
if (git_tree_dup(Setter(tree), root.get()))
|
||||
throw Error("duplicating directory '%s': %s", showPath(path), git_error_last()->message);
|
||||
return tree;
|
||||
if (git_object_type(root.get()) == GIT_OBJECT_TREE)
|
||||
return dupObject<Tree>((git_tree *) &*root);
|
||||
else
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
auto entry = lookup(path);
|
||||
|
@ -646,10 +686,10 @@ struct GitSourceAccessor : SourceAccessor
|
|||
std::variant<Tree, Submodule> getTree(const CanonPath & path)
|
||||
{
|
||||
if (path.isRoot()) {
|
||||
Tree tree;
|
||||
if (git_tree_dup(Setter(tree), root.get()))
|
||||
throw Error("duplicating directory '%s': %s", showPath(path), git_error_last()->message);
|
||||
return tree;
|
||||
if (git_object_type(root.get()) == GIT_OBJECT_TREE)
|
||||
return dupObject<Tree>((git_tree *) &*root);
|
||||
else
|
||||
throw Error("Git root object '%s' is not a directory", *git_object_id(root.get()));
|
||||
}
|
||||
|
||||
auto entry = need(path);
|
||||
|
@ -669,6 +709,9 @@ struct GitSourceAccessor : SourceAccessor
|
|||
|
||||
Blob getBlob(const CanonPath & path, bool expectSymlink)
|
||||
{
|
||||
if (!expectSymlink && git_object_type(root.get()) == GIT_OBJECT_BLOB)
|
||||
return dupObject<Blob>((git_blob *) &*root);
|
||||
|
||||
auto notExpected = [&]()
|
||||
{
|
||||
throw Error(
|
||||
|
@ -782,8 +825,6 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink
|
|||
|
||||
std::vector<PendingDir> pendingDirs;
|
||||
|
||||
size_t componentsToStrip = 1;
|
||||
|
||||
void pushBuilder(std::string name)
|
||||
{
|
||||
git_treebuilder * b;
|
||||
|
@ -839,9 +880,6 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink
|
|||
{
|
||||
std::span<const std::string> pathComponents2{pathComponents};
|
||||
|
||||
if (pathComponents2.size() <= componentsToStrip) return false;
|
||||
pathComponents2 = pathComponents2.subspan(componentsToStrip);
|
||||
|
||||
updateBuilders(
|
||||
isDir
|
||||
? pathComponents2
|
||||
|
@ -964,7 +1002,8 @@ struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink
|
|||
git_tree_entry_filemode(entry));
|
||||
}
|
||||
|
||||
Hash sync() override {
|
||||
Hash sync() override
|
||||
{
|
||||
updateBuilders({});
|
||||
|
||||
auto [oid, _name] = popBuilder();
|
||||
|
|
|
@ -98,6 +98,13 @@ struct GitRepo
|
|||
* serialisation. This is memoised on-disk.
|
||||
*/
|
||||
virtual Hash treeHashToNarHash(const Hash & treeHash) = 0;
|
||||
|
||||
/**
|
||||
* If the specified Git object is a directory with a single entry
|
||||
* that is a directory, return the ID of that object.
|
||||
* Otherwise, return the passed ID unchanged.
|
||||
*/
|
||||
virtual Hash dereferenceSingletonDirectory(const Hash & oid) = 0;
|
||||
};
|
||||
|
||||
ref<GitRepo> getTarballCache();
|
||||
|
|
|
@ -254,12 +254,18 @@ struct GitArchiveInputScheme : InputScheme
|
|||
getFileTransfer()->download(std::move(req), sink);
|
||||
});
|
||||
|
||||
auto act = std::make_unique<Activity>(*logger, lvlInfo, actUnknown,
|
||||
fmt("unpacking '%s' into the Git cache", input.to_string()));
|
||||
|
||||
TarArchive archive { *source };
|
||||
auto parseSink = getTarballCache()->getFileSystemObjectSink();
|
||||
auto tarballCache = getTarballCache();
|
||||
auto parseSink = tarballCache->getFileSystemObjectSink();
|
||||
auto lastModified = unpackTarfileToSink(archive, *parseSink);
|
||||
|
||||
act.reset();
|
||||
|
||||
TarballInfo tarballInfo {
|
||||
.treeHash = parseSink->sync(),
|
||||
.treeHash = tarballCache->dereferenceSingletonDirectory(parseSink->sync()),
|
||||
.lastModified = lastModified
|
||||
};
|
||||
|
||||
|
|
|
@ -67,14 +67,8 @@ mkMesonDerivation (finalAttrs: {
|
|||
LDFLAGS = "-fuse-ld=gold";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
separateDebugInfo = !stdenv.hostPlatform.isStatic;
|
||||
|
||||
# TODO `releaseTools.coverageAnalysis` in Nixpkgs needs to be updated
|
||||
# to work with `strictDeps`.
|
||||
strictDeps = true;
|
||||
|
||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||
|
||||
meta = {
|
||||
|
|
|
@ -102,7 +102,7 @@ DownloadFileResult downloadFile(
|
|||
};
|
||||
}
|
||||
|
||||
DownloadTarballResult downloadTarball(
|
||||
static DownloadTarballResult downloadTarball_(
|
||||
const std::string & url,
|
||||
const Headers & headers)
|
||||
{
|
||||
|
@ -143,6 +143,9 @@ DownloadTarballResult downloadTarball(
|
|||
|
||||
// TODO: fall back to cached value if download fails.
|
||||
|
||||
auto act = std::make_unique<Activity>(*logger, lvlInfo, actUnknown,
|
||||
fmt("unpacking '%s' into the Git cache", url));
|
||||
|
||||
AutoDelete cleanupTemp;
|
||||
|
||||
/* Note: if the download is cached, `importTarball()` will receive
|
||||
|
@ -164,9 +167,12 @@ DownloadTarballResult downloadTarball(
|
|||
TarArchive{path};
|
||||
})
|
||||
: TarArchive{*source};
|
||||
auto parseSink = getTarballCache()->getFileSystemObjectSink();
|
||||
auto tarballCache = getTarballCache();
|
||||
auto parseSink = tarballCache->getFileSystemObjectSink();
|
||||
auto lastModified = unpackTarfileToSink(archive, *parseSink);
|
||||
|
||||
act.reset();
|
||||
|
||||
auto res(_res->lock());
|
||||
|
||||
Attrs infoAttrs;
|
||||
|
@ -177,7 +183,8 @@ DownloadTarballResult downloadTarball(
|
|||
infoAttrs = cached->value;
|
||||
} else {
|
||||
infoAttrs.insert_or_assign("etag", res->etag);
|
||||
infoAttrs.insert_or_assign("treeHash", parseSink->sync().gitRev());
|
||||
infoAttrs.insert_or_assign("treeHash",
|
||||
tarballCache->dereferenceSingletonDirectory(parseSink->sync()).gitRev());
|
||||
infoAttrs.insert_or_assign("lastModified", uint64_t(lastModified));
|
||||
if (res->immutableUrl)
|
||||
infoAttrs.insert_or_assign("immutableUrl", *res->immutableUrl);
|
||||
|
@ -195,6 +202,22 @@ DownloadTarballResult downloadTarball(
|
|||
return attrsToResult(infoAttrs);
|
||||
}
|
||||
|
||||
ref<SourceAccessor> downloadTarball(
|
||||
ref<Store> store,
|
||||
const Settings & settings,
|
||||
const std::string & url)
|
||||
{
|
||||
/* Go through Input::getAccessor() to ensure that the resulting
|
||||
accessor has a fingerprint. */
|
||||
fetchers::Attrs attrs;
|
||||
attrs.insert_or_assign("type", "tarball");
|
||||
attrs.insert_or_assign("url", url);
|
||||
|
||||
auto input = Input::fromAttrs(settings, std::move(attrs));
|
||||
|
||||
return input.getAccessor(store).first;
|
||||
}
|
||||
|
||||
// An input scheme corresponding to a curl-downloadable resource.
|
||||
struct CurlInputScheme : InputScheme
|
||||
{
|
||||
|
@ -346,7 +369,7 @@ struct TarballInputScheme : CurlInputScheme
|
|||
{
|
||||
auto input(_input);
|
||||
|
||||
auto result = downloadTarball(getStrAttr(input.attrs, "url"), {});
|
||||
auto result = downloadTarball_(getStrAttr(input.attrs, "url"), {});
|
||||
|
||||
result.accessor->setPathDisplay("«" + input.to_string() + "»");
|
||||
|
||||
|
|
|
@ -14,6 +14,8 @@ struct SourceAccessor;
|
|||
|
||||
namespace nix::fetchers {
|
||||
|
||||
struct Settings;
|
||||
|
||||
struct DownloadFileResult
|
||||
{
|
||||
StorePath storePath;
|
||||
|
@ -40,8 +42,9 @@ struct DownloadTarballResult
|
|||
* Download and import a tarball into the Git cache. The result is the
|
||||
* Git tree hash of the root directory.
|
||||
*/
|
||||
DownloadTarballResult downloadTarball(
|
||||
const std::string & url,
|
||||
const Headers & headers = {});
|
||||
ref<SourceAccessor> downloadTarball(
|
||||
ref<Store> store,
|
||||
const Settings & settings,
|
||||
const std::string & url);
|
||||
|
||||
}
|
||||
|
|
|
@ -140,9 +140,16 @@ static FlakeInput parseFlakeInput(EvalState & state,
|
|||
case nBool:
|
||||
attrs.emplace(state.symbols[attr.name], Explicit<bool> { attr.value->boolean() });
|
||||
break;
|
||||
case nInt:
|
||||
attrs.emplace(state.symbols[attr.name], (long unsigned int) attr.value->integer());
|
||||
case nInt: {
|
||||
auto intValue = attr.value->integer().value;
|
||||
|
||||
if (intValue < 0) {
|
||||
state.error<EvalError>("negative value given for flake input attribute %1%: %2%", state.symbols[attr.name], intValue).debugThrow();
|
||||
}
|
||||
|
||||
attrs.emplace(state.symbols[attr.name], uint64_t(intValue));
|
||||
break;
|
||||
}
|
||||
default:
|
||||
if (attr.name == state.symbols.create("publicKeys")) {
|
||||
experimentalFeatureSettings.require(Xp::VerifiedFetches);
|
||||
|
@ -272,7 +279,7 @@ static Flake readFlake(
|
|||
else if (setting.value->type() == nInt)
|
||||
flake.config.settings.emplace(
|
||||
state.symbols[setting.name],
|
||||
state.forceInt(*setting.value, setting.pos, ""));
|
||||
state.forceInt(*setting.value, setting.pos, "").value);
|
||||
else if (setting.value->type() == nBool)
|
||||
flake.config.settings.emplace(
|
||||
state.symbols[setting.name],
|
||||
|
@ -904,8 +911,13 @@ static void prim_flakeRefToString(
|
|||
for (const auto & attr : *args[0]->attrs()) {
|
||||
auto t = attr.value->type();
|
||||
if (t == nInt) {
|
||||
attrs.emplace(state.symbols[attr.name],
|
||||
(uint64_t) attr.value->integer());
|
||||
auto intValue = attr.value->integer().value;
|
||||
|
||||
if (intValue < 0) {
|
||||
state.error<EvalError>("negative value given for flake ref attr %1%: %2%", state.symbols[attr.name], intValue).atPos(pos).debugThrow();
|
||||
}
|
||||
|
||||
attrs.emplace(state.symbols[attr.name], uint64_t(intValue));
|
||||
} else if (t == nBool) {
|
||||
attrs.emplace(state.symbols[attr.name],
|
||||
Explicit<bool> { attr.value->boolean() });
|
||||
|
|
10
src/libflake/flake/nix-flake.pc.in
Normal file
10
src/libflake/flake/nix-flake.pc.in
Normal file
|
@ -0,0 +1,10 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Package Manager
|
||||
Version: @PACKAGE_VERSION@
|
||||
Requires: nix-util nix-store nix-expr
|
||||
Libs: -L${libdir} -lnixflake
|
||||
Cflags: -I${includedir}/nix -std=c++2a
|
|
@ -15,3 +15,8 @@ libflake_CXXFLAGS += $(INCLUDE_libutil) $(INCLUDE_libstore) $(INCLUDE_libfetcher
|
|||
libflake_LDFLAGS += $(THREAD_LDFLAGS)
|
||||
|
||||
libflake_LIBS = libutil libstore libfetchers libexpr
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/flake/nix-flake.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
||||
$(foreach i, $(wildcard src/libflake/flake/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/flake, 0644)))
|
||||
|
|
|
@ -67,14 +67,8 @@ mkMesonDerivation (finalAttrs: {
|
|||
LDFLAGS = "-fuse-ld=gold";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
separateDebugInfo = !stdenv.hostPlatform.isStatic;
|
||||
|
||||
# TODO `releaseTools.coverageAnalysis` in Nixpkgs needs to be updated
|
||||
# to work with `strictDeps`.
|
||||
strictDeps = true;
|
||||
|
||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||
|
||||
meta = {
|
||||
|
|
|
@ -68,12 +68,8 @@ mkMesonDerivation (finalAttrs: {
|
|||
LDFLAGS = "-fuse-ld=gold";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
separateDebugInfo = !stdenv.hostPlatform.isStatic;
|
||||
|
||||
strictDeps = true;
|
||||
|
||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||
|
||||
meta = {
|
||||
|
|
|
@ -36,7 +36,7 @@ Logger * makeDefaultLogger() {
|
|||
return logger;
|
||||
}
|
||||
default:
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -62,14 +62,8 @@ mkMesonDerivation (finalAttrs: {
|
|||
LDFLAGS = "-fuse-ld=gold";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
separateDebugInfo = !stdenv.hostPlatform.isStatic;
|
||||
|
||||
# TODO `releaseTools.coverageAnalysis` in Nixpkgs needs to be updated
|
||||
# to work with `strictDeps`.
|
||||
strictDeps = true;
|
||||
|
||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||
|
||||
meta = {
|
||||
|
|
|
@ -64,12 +64,8 @@ mkMesonDerivation (finalAttrs: {
|
|||
LDFLAGS = "-fuse-ld=gold";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
separateDebugInfo = !stdenv.hostPlatform.isStatic;
|
||||
|
||||
strictDeps = true;
|
||||
|
||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||
|
||||
meta = {
|
||||
|
|
|
@ -36,7 +36,7 @@ public:
|
|||
Co init() override;
|
||||
Co realisationFetched(std::shared_ptr<const Realisation> outputInfo, nix::ref<nix::Store> sub);
|
||||
|
||||
void timedOut(Error && ex) override { abort(); };
|
||||
void timedOut(Error && ex) override { unreachable(); };
|
||||
|
||||
std::string key() override;
|
||||
|
||||
|
|
|
@ -400,12 +400,12 @@ public:
|
|||
|
||||
virtual void handleChildOutput(Descriptor fd, std::string_view data)
|
||||
{
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
|
||||
virtual void handleEOF(Descriptor fd)
|
||||
{
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
|
||||
void trace(std::string_view s);
|
||||
|
|
|
@ -145,8 +145,10 @@ Goal::Co PathSubstitutionGoal::init()
|
|||
/* None left. Terminate this goal and let someone else deal
|
||||
with it. */
|
||||
|
||||
worker.failedSubstitutions++;
|
||||
worker.updateProgress();
|
||||
if (substituterFailed) {
|
||||
worker.failedSubstitutions++;
|
||||
worker.updateProgress();
|
||||
}
|
||||
|
||||
/* Hack: don't indicate failure if there were no substituters.
|
||||
In that case the calling derivation should just do a
|
||||
|
@ -158,7 +160,7 @@ Goal::Co PathSubstitutionGoal::init()
|
|||
}
|
||||
|
||||
|
||||
Goal::Co PathSubstitutionGoal::tryToRun(StorePath subPath, nix::ref<Store> sub, std::shared_ptr<const ValidPathInfo> info, bool& substituterFailed)
|
||||
Goal::Co PathSubstitutionGoal::tryToRun(StorePath subPath, nix::ref<Store> sub, std::shared_ptr<const ValidPathInfo> info, bool & substituterFailed)
|
||||
{
|
||||
trace("all references realised");
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ public:
|
|||
PathSubstitutionGoal(const StorePath & storePath, Worker & worker, RepairFlag repair = NoRepair, std::optional<ContentAddress> ca = std::nullopt);
|
||||
~PathSubstitutionGoal();
|
||||
|
||||
void timedOut(Error && ex) override { abort(); };
|
||||
void timedOut(Error && ex) override { unreachable(); };
|
||||
|
||||
/**
|
||||
* We prepend "a$" to the key name to ensure substitution goals
|
||||
|
@ -66,7 +66,7 @@ public:
|
|||
*/
|
||||
Co init() override;
|
||||
Co gotInfo();
|
||||
Co tryToRun(StorePath subPath, nix::ref<Store> sub, std::shared_ptr<const ValidPathInfo> info, bool& substituterFailed);
|
||||
Co tryToRun(StorePath subPath, nix::ref<Store> sub, std::shared_ptr<const ValidPathInfo> info, bool & substituterFailed);
|
||||
Co finished();
|
||||
|
||||
/**
|
||||
|
|
|
@ -216,7 +216,7 @@ void Worker::childStarted(GoalPtr goal, const std::set<MuxablePipePollState::Com
|
|||
nrLocalBuilds++;
|
||||
break;
|
||||
default:
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -239,7 +239,7 @@ void Worker::childTerminated(Goal * goal, bool wakeSleepers)
|
|||
nrLocalBuilds--;
|
||||
break;
|
||||
default:
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ Sink & operator << (Sink & sink, const Logger::Fields & fields)
|
|||
sink << f.i;
|
||||
else if (f.type == Logger::Field::tString)
|
||||
sink << f.s;
|
||||
else abort();
|
||||
else unreachable();
|
||||
}
|
||||
return sink;
|
||||
}
|
||||
|
@ -167,7 +167,7 @@ struct TunnelSink : Sink
|
|||
{
|
||||
Sink & to;
|
||||
TunnelSink(Sink & to) : to(to) { }
|
||||
void operator () (std::string_view data)
|
||||
void operator () (std::string_view data) override
|
||||
{
|
||||
to << STDERR_WRITE;
|
||||
writeString(data, to);
|
||||
|
@ -1025,19 +1025,20 @@ void processConnection(
|
|||
#endif
|
||||
|
||||
/* Exchange the greeting. */
|
||||
WorkerProto::Version clientVersion =
|
||||
auto [protoVersion, features] =
|
||||
WorkerProto::BasicServerConnection::handshake(
|
||||
to, from, PROTOCOL_VERSION);
|
||||
to, from, PROTOCOL_VERSION, WorkerProto::allFeatures);
|
||||
|
||||
if (clientVersion < 0x10a)
|
||||
if (protoVersion < 0x10a)
|
||||
throw Error("the Nix client version is too old");
|
||||
|
||||
WorkerProto::BasicServerConnection conn;
|
||||
conn.to = std::move(to);
|
||||
conn.from = std::move(from);
|
||||
conn.protoVersion = clientVersion;
|
||||
conn.protoVersion = protoVersion;
|
||||
conn.features = features;
|
||||
|
||||
auto tunnelLogger = new TunnelLogger(conn.to, clientVersion);
|
||||
auto tunnelLogger = new TunnelLogger(conn.to, protoVersion);
|
||||
auto prevLogger = nix::logger;
|
||||
// FIXME
|
||||
if (!recursive)
|
||||
|
|
|
@ -54,6 +54,8 @@ struct curlFileTransfer : public FileTransfer
|
|||
bool done = false; // whether either the success or failure function has been called
|
||||
Callback<FileTransferResult> callback;
|
||||
CURL * req = 0;
|
||||
// buffer to accompany the `req` above
|
||||
char errbuf[CURL_ERROR_SIZE];
|
||||
bool active = false; // whether the handle has been added to the multi object
|
||||
std::string statusMsg;
|
||||
|
||||
|
@ -71,7 +73,10 @@ struct curlFileTransfer : public FileTransfer
|
|||
|
||||
curl_off_t writtenToSink = 0;
|
||||
|
||||
std::chrono::steady_clock::time_point startTime = std::chrono::steady_clock::now();
|
||||
|
||||
inline static const std::set<long> successfulStatuses {200, 201, 204, 206, 304, 0 /* other protocol */};
|
||||
|
||||
/* Get the HTTP status code, or 0 for other protocols. */
|
||||
long getHTTPStatus()
|
||||
{
|
||||
|
@ -367,16 +372,23 @@ struct curlFileTransfer : public FileTransfer
|
|||
if (writtenToSink)
|
||||
curl_easy_setopt(req, CURLOPT_RESUME_FROM_LARGE, writtenToSink);
|
||||
|
||||
curl_easy_setopt(req, CURLOPT_ERRORBUFFER, errbuf);
|
||||
errbuf[0] = 0;
|
||||
|
||||
result.data.clear();
|
||||
result.bodySize = 0;
|
||||
}
|
||||
|
||||
void finish(CURLcode code)
|
||||
{
|
||||
auto finishTime = std::chrono::steady_clock::now();
|
||||
|
||||
auto httpStatus = getHTTPStatus();
|
||||
|
||||
debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes",
|
||||
request.verb(), request.uri, code, httpStatus, result.bodySize);
|
||||
debug("finished %s of '%s'; curl status = %d, HTTP status = %d, body = %d bytes, duration = %.2f s",
|
||||
request.verb(), request.uri, code, httpStatus, result.bodySize,
|
||||
std::chrono::duration_cast<std::chrono::milliseconds>(finishTime - startTime).count() / 1000.0f
|
||||
);
|
||||
|
||||
appendCurrentUrl();
|
||||
|
||||
|
@ -477,8 +489,8 @@ struct curlFileTransfer : public FileTransfer
|
|||
code == CURLE_OK ? "" : fmt(" (curl error: %s)", curl_easy_strerror(code)))
|
||||
: FileTransferError(err,
|
||||
std::move(response),
|
||||
"unable to %s '%s': %s (%d)",
|
||||
request.verb(), request.uri, curl_easy_strerror(code), code);
|
||||
"unable to %s '%s': %s (%d) %s",
|
||||
request.verb(), request.uri, curl_easy_strerror(code), code, errbuf);
|
||||
|
||||
/* If this is a transient error, then maybe retry the
|
||||
download after a while. If we're writing to a
|
||||
|
@ -851,8 +863,10 @@ void FileTransfer::download(
|
|||
buffer). We don't wait forever to prevent stalling the
|
||||
download thread. (Hopefully sleeping will throttle the
|
||||
sender.) */
|
||||
if (state->data.size() > 1024 * 1024) {
|
||||
if (state->data.size() > fileTransferSettings.downloadBufferSize) {
|
||||
debug("download buffer is full; going to sleep");
|
||||
static bool haveWarned = false;
|
||||
warnOnce(haveWarned, "download buffer is full; consider increasing the 'download-buffer-size' setting");
|
||||
state.wait_for(state->request, std::chrono::seconds(10));
|
||||
}
|
||||
|
||||
|
|
|
@ -47,6 +47,12 @@ struct FileTransferSettings : Config
|
|||
|
||||
Setting<unsigned int> tries{this, 5, "download-attempts",
|
||||
"How often Nix will attempt to download a file before giving up."};
|
||||
|
||||
Setting<size_t> downloadBufferSize{this, 64 * 1024 * 1024, "download-buffer-size",
|
||||
R"(
|
||||
The size of Nix's internal download buffer during `curl` transfers. If data is
|
||||
not processed quickly enough to exceed the size of this buffer, downloads may stall.
|
||||
)"};
|
||||
};
|
||||
|
||||
extern FileTransferSettings fileTransferSettings;
|
||||
|
|
|
@ -559,7 +559,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
|
|||
non-blocking flag from the server socket, so
|
||||
explicitly make it blocking. */
|
||||
if (fcntl(fdClient.get(), F_SETFL, fcntl(fdClient.get(), F_GETFL) & ~O_NONBLOCK) == -1)
|
||||
abort();
|
||||
panic("Could not set non-blocking flag on client socket");
|
||||
|
||||
while (true) {
|
||||
try {
|
||||
|
@ -891,7 +891,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
|
|||
|
||||
void LocalStore::autoGC(bool sync)
|
||||
{
|
||||
#ifdef HAVE_STATVFS
|
||||
#if HAVE_STATVFS
|
||||
static auto fakeFreeSpaceFile = getEnv("_NIX_TEST_FREE_SPACE_FILE");
|
||||
|
||||
auto getAvail = [this]() -> uint64_t {
|
||||
|
|
|
@ -64,7 +64,6 @@ Settings::Settings()
|
|||
, nixStateDir(canonPath(getEnvNonEmpty("NIX_STATE_DIR").value_or(NIX_STATE_DIR)))
|
||||
, nixConfDir(canonPath(getEnvNonEmpty("NIX_CONF_DIR").value_or(NIX_CONF_DIR)))
|
||||
, nixUserConfFiles(getUserConfigFiles())
|
||||
, nixBinDir(canonPath(getEnvNonEmpty("NIX_BIN_DIR").value_or(NIX_BIN_DIR)))
|
||||
, nixManDir(canonPath(NIX_MAN_DIR))
|
||||
, nixDaemonSocketFile(canonPath(getEnvNonEmpty("NIX_DAEMON_SOCKET_PATH").value_or(nixStateDir + DEFAULT_SOCKET_PATH)))
|
||||
{
|
||||
|
@ -95,34 +94,6 @@ Settings::Settings()
|
|||
sandboxPaths = tokenizeString<StringSet>("/System/Library/Frameworks /System/Library/PrivateFrameworks /bin/sh /bin/bash /private/tmp /private/var/tmp /usr/lib");
|
||||
allowedImpureHostPrefixes = tokenizeString<StringSet>("/System/Library /usr/lib /dev /bin/sh");
|
||||
#endif
|
||||
|
||||
/* Set the build hook location
|
||||
|
||||
For builds we perform a self-invocation, so Nix has to be self-aware.
|
||||
That is, it has to know where it is installed. We don't think it's sentient.
|
||||
|
||||
Normally, nix is installed according to `nixBinDir`, which is set at compile time,
|
||||
but can be overridden. This makes for a great default that works even if this
|
||||
code is linked as a library into some other program whose main is not aware
|
||||
that it might need to be a build remote hook.
|
||||
|
||||
However, it may not have been installed at all. For example, if it's a static build,
|
||||
there's a good chance that it has been moved out of its installation directory.
|
||||
That makes `nixBinDir` useless. Instead, we'll query the OS for the path to the
|
||||
current executable, using `getSelfExe()`.
|
||||
|
||||
As a last resort, we resort to `PATH`. Hopefully we find a `nix` there that's compatible.
|
||||
If you're porting Nix to a new platform, that might be good enough for a while, but
|
||||
you'll want to improve `getSelfExe()` to work on your platform.
|
||||
*/
|
||||
std::string nixExePath = nixBinDir + "/nix";
|
||||
if (!pathExists(nixExePath)) {
|
||||
nixExePath = getSelfExe().value_or("nix");
|
||||
}
|
||||
buildHook = {
|
||||
nixExePath,
|
||||
"__build-remote",
|
||||
};
|
||||
}
|
||||
|
||||
void loadConfFile(AbstractConfig & config)
|
||||
|
@ -297,7 +268,7 @@ template<> std::string BaseSetting<SandboxMode>::to_string() const
|
|||
if (value == smEnabled) return "true";
|
||||
else if (value == smRelaxed) return "relaxed";
|
||||
else if (value == smDisabled) return "false";
|
||||
else abort();
|
||||
else unreachable();
|
||||
}
|
||||
|
||||
template<> void BaseSetting<SandboxMode>::convertToArg(Args & args, const std::string & category)
|
||||
|
|
|
@ -84,11 +84,6 @@ public:
|
|||
*/
|
||||
std::vector<Path> nixUserConfFiles;
|
||||
|
||||
/**
|
||||
* The directory where the main programs are stored.
|
||||
*/
|
||||
Path nixBinDir;
|
||||
|
||||
/**
|
||||
* The directory where the man pages are stored.
|
||||
*/
|
||||
|
@ -246,7 +241,7 @@ public:
|
|||
)",
|
||||
{"build-timeout"}};
|
||||
|
||||
Setting<Strings> buildHook{this, {}, "build-hook",
|
||||
Setting<Strings> buildHook{this, {"nix", "__build-remote"}, "build-hook",
|
||||
R"(
|
||||
The path to the helper program that executes remote builds.
|
||||
|
||||
|
@ -286,7 +281,7 @@ public:
|
|||
For backward compatibility, `ssh://` may be omitted.
|
||||
The hostname may be an alias defined in `~/.ssh/config`.
|
||||
|
||||
2. A comma-separated list of [Nix system types](@docroot@/contributing/hacking.md#system-type).
|
||||
2. A comma-separated list of [Nix system types](@docroot@/development/building.md#system-type).
|
||||
If omitted, this defaults to the local platform type.
|
||||
|
||||
> **Example**
|
||||
|
@ -866,13 +861,13 @@ public:
|
|||
|
||||
- `ca-derivations`
|
||||
|
||||
Included by default if the [`ca-derivations` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-ca-derivations) is enabled.
|
||||
Included by default if the [`ca-derivations` experimental feature](@docroot@/development/experimental-features.md#xp-feature-ca-derivations) is enabled.
|
||||
|
||||
This system feature is implicitly required by derivations with the [`__contentAddressed` attribute](@docroot@/language/advanced-attributes.md#adv-attr-__contentAddressed).
|
||||
|
||||
- `recursive-nix`
|
||||
|
||||
Included by default if the [`recursive-nix` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-recursive-nix) is enabled.
|
||||
Included by default if the [`recursive-nix` experimental feature](@docroot@/development/experimental-features.md#xp-feature-recursive-nix) is enabled.
|
||||
|
||||
- `uid-range`
|
||||
|
||||
|
@ -1131,7 +1126,10 @@ public:
|
|||
)"};
|
||||
|
||||
Setting<uint64_t> maxFree{
|
||||
this, std::numeric_limits<uint64_t>::max(), "max-free",
|
||||
// n.b. this is deliberately int64 max rather than uint64 max because
|
||||
// this goes through the Nix language JSON parser and thus needs to be
|
||||
// representable in Nix language integers.
|
||||
this, std::numeric_limits<int64_t>::max(), "max-free",
|
||||
R"(
|
||||
When a garbage collection is triggered by the `min-free` option, it
|
||||
stops as soon as `max-free` bytes are available. The default is
|
||||
|
@ -1221,7 +1219,10 @@ public:
|
|||
|
||||
Setting<uint64_t> warnLargePathThreshold{
|
||||
this,
|
||||
std::numeric_limits<uint64_t>::max(),
|
||||
// n.b. this is deliberately int64 max rather than uint64 max because
|
||||
// this goes through the Nix language JSON parser and thus needs to be
|
||||
// representable in Nix language integers.
|
||||
std::numeric_limits<int64_t>::max(),
|
||||
"warn-large-path-threshold",
|
||||
R"(
|
||||
Warn when copying a path larger than this number of bytes to the Nix store
|
||||
|
|
|
@ -71,7 +71,6 @@ libstore_CXXFLAGS += \
|
|||
-DNIX_STATE_DIR=\"$(NIX_ROOT)$(localstatedir)/nix\" \
|
||||
-DNIX_LOG_DIR=\"$(NIX_ROOT)$(localstatedir)/log/nix\" \
|
||||
-DNIX_CONF_DIR=\"$(NIX_ROOT)$(sysconfdir)/nix\" \
|
||||
-DNIX_BIN_DIR=\"$(NIX_ROOT)$(bindir)\" \
|
||||
-DNIX_MAN_DIR=\"$(NIX_ROOT)$(mandir)\" \
|
||||
-DLSOF=\"$(NIX_ROOT)$(lsof)\"
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ configdata = configuration_data()
|
|||
# TODO rename, because it will conflict with downstream projects
|
||||
configdata.set_quoted('PACKAGE_VERSION', meson.project_version())
|
||||
|
||||
configdata.set_quoted('SYSTEM', host_machine.system())
|
||||
configdata.set_quoted('SYSTEM', host_machine.cpu_family() + '-' + host_machine.system())
|
||||
|
||||
deps_private_maybe_subproject = [
|
||||
]
|
||||
|
@ -73,6 +73,7 @@ subdir('build-utils-meson/threads')
|
|||
boost = dependency(
|
||||
'boost',
|
||||
modules : ['container'],
|
||||
include_type: 'system',
|
||||
)
|
||||
# boost is a public dependency, but not a pkg-config dependency unfortunately, so we
|
||||
# put in `deps_other`.
|
||||
|
@ -113,7 +114,7 @@ if aws_s3.found()
|
|||
'-laws-cpp-sdk-core',
|
||||
'-laws-crt-cpp',
|
||||
],
|
||||
)
|
||||
).as_system('system')
|
||||
endif
|
||||
deps_other += aws_s3
|
||||
|
||||
|
@ -327,7 +328,6 @@ prefix = get_option('prefix')
|
|||
path_opts = [
|
||||
# Meson built-ins.
|
||||
'datadir',
|
||||
'bindir',
|
||||
'mandir',
|
||||
'libdir',
|
||||
'includedir',
|
||||
|
@ -372,7 +372,6 @@ cpp_str_defines = {
|
|||
'NIX_STATE_DIR': state_dir / 'nix',
|
||||
'NIX_LOG_DIR': log_dir,
|
||||
'NIX_CONF_DIR': sysconfdir / 'nix',
|
||||
'NIX_BIN_DIR': bindir,
|
||||
'NIX_MAN_DIR': mandir,
|
||||
}
|
||||
|
||||
|
|
|
@ -94,7 +94,7 @@ static bool componentsLT(const std::string_view c1, const std::string_view c2)
|
|||
}
|
||||
|
||||
|
||||
int compareVersions(const std::string_view v1, const std::string_view v2)
|
||||
std::strong_ordering compareVersions(const std::string_view v1, const std::string_view v2)
|
||||
{
|
||||
auto p1 = v1.begin();
|
||||
auto p2 = v2.begin();
|
||||
|
@ -102,11 +102,11 @@ int compareVersions(const std::string_view v1, const std::string_view v2)
|
|||
while (p1 != v1.end() || p2 != v2.end()) {
|
||||
auto c1 = nextComponent(p1, v1.end());
|
||||
auto c2 = nextComponent(p2, v2.end());
|
||||
if (componentsLT(c1, c2)) return -1;
|
||||
else if (componentsLT(c2, c1)) return 1;
|
||||
if (componentsLT(c1, c2)) return std::strong_ordering::less;
|
||||
else if (componentsLT(c2, c1)) return std::strong_ordering::greater;
|
||||
}
|
||||
|
||||
return 0;
|
||||
return std::strong_ordering::equal;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ typedef std::list<DrvName> DrvNames;
|
|||
|
||||
std::string_view nextComponent(std::string_view::const_iterator & p,
|
||||
const std::string_view::const_iterator end);
|
||||
int compareVersions(const std::string_view v1, const std::string_view v2);
|
||||
std::strong_ordering compareVersions(const std::string_view v1, const std::string_view v2);
|
||||
DrvNames drvNamesFromArgs(const Strings & opArgs);
|
||||
|
||||
}
|
||||
|
|
|
@ -164,7 +164,7 @@ public:
|
|||
Cache & getCache(State & state, const std::string & uri)
|
||||
{
|
||||
auto i = state.caches.find(uri);
|
||||
if (i == state.caches.end()) abort();
|
||||
if (i == state.caches.end()) unreachable();
|
||||
return i->second;
|
||||
}
|
||||
|
||||
|
@ -211,7 +211,7 @@ public:
|
|||
|
||||
{
|
||||
auto r(state->insertCache.use()(uri)(time(0))(storeDir)(wantMassQuery)(priority));
|
||||
if (!r.next()) { abort(); }
|
||||
if (!r.next()) { unreachable(); }
|
||||
ret.id = (int) r.getInt(0);
|
||||
}
|
||||
|
||||
|
|
|
@ -66,10 +66,7 @@ mkMesonDerivation (finalAttrs: {
|
|||
] ++ lib.optional stdenv.hostPlatform.isLinux libseccomp
|
||||
# There have been issues building these dependencies
|
||||
++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin))
|
||||
(aws-sdk-cpp.override {
|
||||
apis = ["s3" "transfer"];
|
||||
customMemoryManagement = false;
|
||||
})
|
||||
aws-sdk-cpp
|
||||
;
|
||||
|
||||
propagatedBuildInputs = [
|
||||
|
@ -101,12 +98,8 @@ mkMesonDerivation (finalAttrs: {
|
|||
LDFLAGS = "-fuse-ld=gold";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
separateDebugInfo = !stdenv.hostPlatform.isStatic;
|
||||
|
||||
strictDeps = true;
|
||||
|
||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||
|
||||
meta = {
|
||||
|
|
|
@ -73,8 +73,11 @@ void RemoteStore::initConnection(Connection & conn)
|
|||
StringSink saved;
|
||||
TeeSource tee(conn.from, saved);
|
||||
try {
|
||||
conn.protoVersion = WorkerProto::BasicClientConnection::handshake(
|
||||
conn.to, tee, PROTOCOL_VERSION);
|
||||
auto [protoVersion, features] = WorkerProto::BasicClientConnection::handshake(
|
||||
conn.to, tee, PROTOCOL_VERSION,
|
||||
WorkerProto::allFeatures);
|
||||
conn.protoVersion = protoVersion;
|
||||
conn.features = features;
|
||||
} catch (SerialisationError & e) {
|
||||
/* In case the other side is waiting for our input, close
|
||||
it. */
|
||||
|
@ -88,6 +91,9 @@ void RemoteStore::initConnection(Connection & conn)
|
|||
|
||||
static_cast<WorkerProto::ClientHandshakeInfo &>(conn) = conn.postHandshake(*this);
|
||||
|
||||
for (auto & feature : conn.features)
|
||||
debug("negotiated feature '%s'", feature);
|
||||
|
||||
auto ex = conn.processStderrReturn();
|
||||
if (ex) std::rethrow_exception(ex);
|
||||
}
|
||||
|
|
|
@ -220,8 +220,6 @@ std::string S3BinaryCacheStoreConfig::doc()
|
|||
|
||||
struct S3BinaryCacheStoreImpl : virtual S3BinaryCacheStoreConfig, public virtual S3BinaryCacheStore
|
||||
{
|
||||
std::string bucketName;
|
||||
|
||||
Stats stats;
|
||||
|
||||
S3Helper s3Helper;
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
#include <optional>
|
||||
#include <string>
|
||||
|
||||
namespace Aws { namespace Client { class ClientConfiguration; } }
|
||||
namespace Aws { namespace Client { struct ClientConfiguration; } }
|
||||
namespace Aws { namespace S3 { class S3Client; } }
|
||||
|
||||
namespace nix {
|
||||
|
|
|
@ -922,7 +922,7 @@ StorePathSet Store::exportReferences(const StorePathSet & storePaths, const Stor
|
|||
const Store::Stats & Store::getStats()
|
||||
{
|
||||
{
|
||||
auto state_(state.lock());
|
||||
auto state_(state.readLock());
|
||||
stats.pathInfoCacheSize = state_->pathInfoCache.size();
|
||||
}
|
||||
return stats;
|
||||
|
|
|
@ -201,7 +201,7 @@ protected:
|
|||
LRUCache<std::string, PathInfoCacheValue> pathInfoCache;
|
||||
};
|
||||
|
||||
Sync<State> state;
|
||||
SharedSync<State> state;
|
||||
|
||||
std::shared_ptr<NarInfoDiskCache> diskCache;
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
#include "file-system.hh"
|
||||
#include "child.hh"
|
||||
#include "strings.hh"
|
||||
#include "executable-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -16,11 +17,18 @@ HookInstance::HookInstance()
|
|||
if (buildHookArgs.empty())
|
||||
throw Error("'build-hook' setting is empty");
|
||||
|
||||
auto buildHook = canonPath(buildHookArgs.front());
|
||||
std::filesystem::path buildHook = buildHookArgs.front();
|
||||
buildHookArgs.pop_front();
|
||||
|
||||
try {
|
||||
buildHook = ExecutablePath::load().findPath(buildHook);
|
||||
} catch (ExecutableLookupError & e) {
|
||||
e.addTrace(nullptr, "while resolving the 'build-hook' setting'");
|
||||
throw;
|
||||
}
|
||||
|
||||
Strings args;
|
||||
args.push_back(std::string(baseNameOf(buildHook)));
|
||||
args.push_back(buildHook.filename().string());
|
||||
|
||||
for (auto & arg : buildHookArgs)
|
||||
args.push_back(arg);
|
||||
|
@ -59,7 +67,7 @@ HookInstance::HookInstance()
|
|||
if (dup2(builderOut.readSide.get(), 5) == -1)
|
||||
throw SysError("dupping builder's stdout/stderr");
|
||||
|
||||
execv(buildHook.c_str(), stringsToCharPtrs(args).data());
|
||||
execv(buildHook.native().c_str(), stringsToCharPtrs(args).data());
|
||||
|
||||
throw SysError("executing '%s'", buildHook);
|
||||
});
|
||||
|
|
|
@ -102,7 +102,14 @@ void handleDiffHook(
|
|||
}
|
||||
}
|
||||
|
||||
// We want $HOME to be un-creatable in the sandbox. On Linux,
|
||||
// you can't create anything inside /proc since it's a virtual filesystem.
|
||||
// On Darwin it seems that `/homeless-shelter` is good enough.
|
||||
#if __linux__
|
||||
const Path LocalDerivationGoal::homeDir = "/proc/homeless-shelter";
|
||||
#else
|
||||
const Path LocalDerivationGoal::homeDir = "/homeless-shelter";
|
||||
#endif
|
||||
|
||||
|
||||
LocalDerivationGoal::~LocalDerivationGoal()
|
||||
|
@ -165,7 +172,7 @@ void LocalDerivationGoal::killSandbox(bool getStats)
|
|||
buildResult.cpuSystem = stats.cpuSystem;
|
||||
}
|
||||
#else
|
||||
abort();
|
||||
unreachable();
|
||||
#endif
|
||||
}
|
||||
|
||||
|
@ -1258,7 +1265,7 @@ bool LocalDerivationGoal::isAllowed(const DerivedPath & req)
|
|||
struct RestrictedStoreConfig : virtual LocalFSStoreConfig
|
||||
{
|
||||
using LocalFSStoreConfig::LocalFSStoreConfig;
|
||||
const std::string name() { return "Restricted Store"; }
|
||||
const std::string name() override { return "Restricted Store"; }
|
||||
};
|
||||
|
||||
/* A wrapper around LocalStore that only allows building/querying of
|
||||
|
@ -1702,10 +1709,13 @@ void setupSeccomp()
|
|||
throw SysError("unable to add seccomp rule");
|
||||
}
|
||||
|
||||
/* Prevent builders from creating EAs or ACLs. Not all filesystems
|
||||
/* Prevent builders from using EAs or ACLs. Not all filesystems
|
||||
support these, and they're not allowed in the Nix store because
|
||||
they're not representable in the NAR serialisation. */
|
||||
if (seccomp_rule_add(ctx, SCMP_ACT_ERRNO(ENOTSUP), SCMP_SYS(setxattr), 0) != 0 ||
|
||||
if (seccomp_rule_add(ctx, SCMP_ACT_ERRNO(ENOTSUP), SCMP_SYS(getxattr), 0) != 0 ||
|
||||
seccomp_rule_add(ctx, SCMP_ACT_ERRNO(ENOTSUP), SCMP_SYS(lgetxattr), 0) != 0 ||
|
||||
seccomp_rule_add(ctx, SCMP_ACT_ERRNO(ENOTSUP), SCMP_SYS(fgetxattr), 0) != 0 ||
|
||||
seccomp_rule_add(ctx, SCMP_ACT_ERRNO(ENOTSUP), SCMP_SYS(setxattr), 0) != 0 ||
|
||||
seccomp_rule_add(ctx, SCMP_ACT_ERRNO(ENOTSUP), SCMP_SYS(lsetxattr), 0) != 0 ||
|
||||
seccomp_rule_add(ctx, SCMP_ACT_ERRNO(ENOTSUP), SCMP_SYS(fsetxattr), 0) != 0)
|
||||
throw SysError("unable to add seccomp rule");
|
||||
|
|
|
@ -49,6 +49,7 @@ R""(
|
|||
(if (param "_ALLOW_LOCAL_NETWORKING")
|
||||
(begin
|
||||
(allow network* (remote ip "localhost:*"))
|
||||
(allow network-inbound (local ip "*:*")) ; required to bind and listen
|
||||
|
||||
; Allow access to /etc/resolv.conf (which is a symlink to
|
||||
; /private/var/run/resolv.conf).
|
||||
|
|
|
@ -45,7 +45,7 @@ bool lockFile(Descriptor desc, LockType lockType, bool wait)
|
|||
if (lockType == ltRead) type = LOCK_SH;
|
||||
else if (lockType == ltWrite) type = LOCK_EX;
|
||||
else if (lockType == ltNone) type = LOCK_UN;
|
||||
else abort();
|
||||
else unreachable();
|
||||
|
||||
if (wait) {
|
||||
while (flock(desc, type) != 0) {
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
const std::set<WorkerProto::Feature> WorkerProto::allFeatures{};
|
||||
|
||||
WorkerProto::BasicClientConnection::~BasicClientConnection()
|
||||
{
|
||||
try {
|
||||
|
@ -137,8 +139,21 @@ void WorkerProto::BasicClientConnection::processStderr(bool * daemonException, S
|
|||
}
|
||||
}
|
||||
|
||||
WorkerProto::Version
|
||||
WorkerProto::BasicClientConnection::handshake(BufferedSink & to, Source & from, WorkerProto::Version localVersion)
|
||||
static std::set<WorkerProto::Feature>
|
||||
intersectFeatures(const std::set<WorkerProto::Feature> & a, const std::set<WorkerProto::Feature> & b)
|
||||
{
|
||||
std::set<WorkerProto::Feature> res;
|
||||
for (auto & x : a)
|
||||
if (b.contains(x))
|
||||
res.insert(x);
|
||||
return res;
|
||||
}
|
||||
|
||||
std::tuple<WorkerProto::Version, std::set<WorkerProto::Feature>> WorkerProto::BasicClientConnection::handshake(
|
||||
BufferedSink & to,
|
||||
Source & from,
|
||||
WorkerProto::Version localVersion,
|
||||
const std::set<WorkerProto::Feature> & supportedFeatures)
|
||||
{
|
||||
to << WORKER_MAGIC_1 << localVersion;
|
||||
to.flush();
|
||||
|
@ -153,11 +168,24 @@ WorkerProto::BasicClientConnection::handshake(BufferedSink & to, Source & from,
|
|||
if (GET_PROTOCOL_MINOR(daemonVersion) < 10)
|
||||
throw Error("the Nix daemon version is too old");
|
||||
|
||||
return std::min(daemonVersion, localVersion);
|
||||
auto protoVersion = std::min(daemonVersion, localVersion);
|
||||
|
||||
/* Exchange features. */
|
||||
std::set<WorkerProto::Feature> daemonFeatures;
|
||||
if (GET_PROTOCOL_MINOR(protoVersion) >= 38) {
|
||||
to << supportedFeatures;
|
||||
to.flush();
|
||||
daemonFeatures = readStrings<std::set<WorkerProto::Feature>>(from);
|
||||
}
|
||||
|
||||
return {protoVersion, intersectFeatures(daemonFeatures, supportedFeatures)};
|
||||
}
|
||||
|
||||
WorkerProto::Version
|
||||
WorkerProto::BasicServerConnection::handshake(BufferedSink & to, Source & from, WorkerProto::Version localVersion)
|
||||
std::tuple<WorkerProto::Version, std::set<WorkerProto::Feature>> WorkerProto::BasicServerConnection::handshake(
|
||||
BufferedSink & to,
|
||||
Source & from,
|
||||
WorkerProto::Version localVersion,
|
||||
const std::set<WorkerProto::Feature> & supportedFeatures)
|
||||
{
|
||||
unsigned int magic = readInt(from);
|
||||
if (magic != WORKER_MAGIC_1)
|
||||
|
@ -165,7 +193,18 @@ WorkerProto::BasicServerConnection::handshake(BufferedSink & to, Source & from,
|
|||
to << WORKER_MAGIC_2 << localVersion;
|
||||
to.flush();
|
||||
auto clientVersion = readInt(from);
|
||||
return std::min(clientVersion, localVersion);
|
||||
|
||||
auto protoVersion = std::min(clientVersion, localVersion);
|
||||
|
||||
/* Exchange features. */
|
||||
std::set<WorkerProto::Feature> clientFeatures;
|
||||
if (GET_PROTOCOL_MINOR(protoVersion) >= 38) {
|
||||
clientFeatures = readStrings<std::set<WorkerProto::Feature>>(from);
|
||||
to << supportedFeatures;
|
||||
to.flush();
|
||||
}
|
||||
|
||||
return {protoVersion, intersectFeatures(clientFeatures, supportedFeatures)};
|
||||
}
|
||||
|
||||
WorkerProto::ClientHandshakeInfo WorkerProto::BasicClientConnection::postHandshake(const StoreDirConfig & store)
|
||||
|
|
|
@ -23,6 +23,11 @@ struct WorkerProto::BasicConnection
|
|||
*/
|
||||
WorkerProto::Version protoVersion;
|
||||
|
||||
/**
|
||||
* The set of features that both sides support.
|
||||
*/
|
||||
std::set<Feature> features;
|
||||
|
||||
/**
|
||||
* Coercion to `WorkerProto::ReadConn`. This makes it easy to use the
|
||||
* factored out serve protocol serializers with a
|
||||
|
@ -72,8 +77,8 @@ struct WorkerProto::BasicClientConnection : WorkerProto::BasicConnection
|
|||
/**
|
||||
* Establishes connection, negotiating version.
|
||||
*
|
||||
* @return the version provided by the other side of the
|
||||
* connection.
|
||||
* @return the minimum version supported by both sides and the set
|
||||
* of protocol features supported by both sides.
|
||||
*
|
||||
* @param to Taken by reference to allow for various error handling
|
||||
* mechanisms.
|
||||
|
@ -82,8 +87,15 @@ struct WorkerProto::BasicClientConnection : WorkerProto::BasicConnection
|
|||
* handling mechanisms.
|
||||
*
|
||||
* @param localVersion Our version which is sent over
|
||||
*
|
||||
* @param features The protocol features that we support
|
||||
*/
|
||||
static Version handshake(BufferedSink & to, Source & from, WorkerProto::Version localVersion);
|
||||
// FIXME: this should probably be a constructor.
|
||||
static std::tuple<Version, std::set<Feature>> handshake(
|
||||
BufferedSink & to,
|
||||
Source & from,
|
||||
WorkerProto::Version localVersion,
|
||||
const std::set<Feature> & supportedFeatures);
|
||||
|
||||
/**
|
||||
* After calling handshake, must call this to exchange some basic
|
||||
|
@ -138,8 +150,15 @@ struct WorkerProto::BasicServerConnection : WorkerProto::BasicConnection
|
|||
* handling mechanisms.
|
||||
*
|
||||
* @param localVersion Our version which is sent over
|
||||
*
|
||||
* @param features The protocol features that we support
|
||||
*/
|
||||
static WorkerProto::Version handshake(BufferedSink & to, Source & from, WorkerProto::Version localVersion);
|
||||
// FIXME: this should probably be a constructor.
|
||||
static std::tuple<Version, std::set<Feature>> handshake(
|
||||
BufferedSink & to,
|
||||
Source & from,
|
||||
WorkerProto::Version localVersion,
|
||||
const std::set<Feature> & supportedFeatures);
|
||||
|
||||
/**
|
||||
* After calling handshake, must call this to exchange some basic
|
||||
|
|
|
@ -11,7 +11,9 @@ namespace nix {
|
|||
#define WORKER_MAGIC_1 0x6e697863
|
||||
#define WORKER_MAGIC_2 0x6478696f
|
||||
|
||||
#define PROTOCOL_VERSION (1 << 8 | 37)
|
||||
/* Note: you generally shouldn't change the protocol version. Define a
|
||||
new `WorkerProto::Feature` instead. */
|
||||
#define PROTOCOL_VERSION (1 << 8 | 38)
|
||||
#define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
|
||||
#define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
|
||||
|
||||
|
@ -131,6 +133,10 @@ struct WorkerProto
|
|||
{
|
||||
WorkerProto::Serialise<T>::write(store, conn, t);
|
||||
}
|
||||
|
||||
using Feature = std::string;
|
||||
|
||||
static const std::set<Feature> allFeatures;
|
||||
};
|
||||
|
||||
enum struct WorkerProto::Op : uint64_t
|
||||
|
|
|
@ -56,47 +56,51 @@ extern "C" {
|
|||
* - NIX_ERR_KEY: A key error occurred (-3)
|
||||
* - NIX_ERR_NIX_ERROR: A generic Nix error occurred (-4)
|
||||
*/
|
||||
typedef int nix_err;
|
||||
enum nix_err {
|
||||
|
||||
/**
|
||||
* @brief No error occurred.
|
||||
*
|
||||
* This error code is returned when no error has occurred during the function
|
||||
* execution.
|
||||
*/
|
||||
#define NIX_OK 0
|
||||
/**
|
||||
* @brief No error occurred.
|
||||
*
|
||||
* This error code is returned when no error has occurred during the function
|
||||
* execution.
|
||||
*/
|
||||
NIX_OK = 0,
|
||||
|
||||
/**
|
||||
* @brief An unknown error occurred.
|
||||
*
|
||||
* This error code is returned when an unknown error occurred during the
|
||||
* function execution.
|
||||
*/
|
||||
#define NIX_ERR_UNKNOWN -1
|
||||
/**
|
||||
* @brief An unknown error occurred.
|
||||
*
|
||||
* This error code is returned when an unknown error occurred during the
|
||||
* function execution.
|
||||
*/
|
||||
NIX_ERR_UNKNOWN = -1,
|
||||
|
||||
/**
|
||||
* @brief An overflow error occurred.
|
||||
*
|
||||
* This error code is returned when an overflow error occurred during the
|
||||
* function execution.
|
||||
*/
|
||||
#define NIX_ERR_OVERFLOW -2
|
||||
/**
|
||||
* @brief An overflow error occurred.
|
||||
*
|
||||
* This error code is returned when an overflow error occurred during the
|
||||
* function execution.
|
||||
*/
|
||||
NIX_ERR_OVERFLOW = -2,
|
||||
|
||||
/**
|
||||
* @brief A key error occurred.
|
||||
*
|
||||
* This error code is returned when a key error occurred during the function
|
||||
* execution.
|
||||
*/
|
||||
#define NIX_ERR_KEY -3
|
||||
/**
|
||||
* @brief A key error occurred.
|
||||
*
|
||||
* This error code is returned when a key error occurred during the function
|
||||
* execution.
|
||||
*/
|
||||
NIX_ERR_KEY = -3,
|
||||
|
||||
/**
|
||||
* @brief A generic Nix error occurred.
|
||||
*
|
||||
* This error code is returned when a generic Nix error occurred during the
|
||||
* function execution.
|
||||
*/
|
||||
#define NIX_ERR_NIX_ERROR -4
|
||||
/**
|
||||
* @brief A generic Nix error occurred.
|
||||
*
|
||||
* This error code is returned when a generic Nix error occurred during the
|
||||
* function execution.
|
||||
*/
|
||||
NIX_ERR_NIX_ERROR = -4,
|
||||
|
||||
};
|
||||
|
||||
typedef enum nix_err nix_err;
|
||||
|
||||
/**
|
||||
* @brief This object stores error state.
|
||||
|
|
|
@ -62,12 +62,8 @@ mkMesonDerivation (finalAttrs: {
|
|||
LDFLAGS = "-fuse-ld=gold";
|
||||
};
|
||||
|
||||
enableParallelBuilding = true;
|
||||
|
||||
separateDebugInfo = !stdenv.hostPlatform.isStatic;
|
||||
|
||||
strictDeps = true;
|
||||
|
||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||
|
||||
meta = {
|
||||
|
|
182
src/libutil/checked-arithmetic.hh
Normal file
182
src/libutil/checked-arithmetic.hh
Normal file
|
@ -0,0 +1,182 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file Checked arithmetic with classes that make it hard to accidentally make something an unchecked operation.
|
||||
*/
|
||||
|
||||
#include <compare>
|
||||
#include <concepts> // IWYU pragma: keep
|
||||
#include <exception>
|
||||
#include <ostream>
|
||||
#include <limits>
|
||||
#include <optional>
|
||||
#include <type_traits>
|
||||
|
||||
namespace nix::checked {
|
||||
|
||||
class DivideByZero : std::exception
|
||||
{};
|
||||
|
||||
/**
|
||||
* Numeric value enforcing checked arithmetic. Performing mathematical operations on such values will return a Result
|
||||
* type which needs to be checked.
|
||||
*/
|
||||
template<std::integral T>
|
||||
struct Checked
|
||||
{
|
||||
using Inner = T;
|
||||
|
||||
// TODO: this must be a "trivial default constructor", which means it
|
||||
// cannot set the value to NOT DO UB on uninit.
|
||||
T value;
|
||||
|
||||
Checked() = default;
|
||||
explicit Checked(T const value)
|
||||
: value{value}
|
||||
{
|
||||
}
|
||||
Checked(Checked<T> const & other) = default;
|
||||
Checked(Checked<T> && other) = default;
|
||||
Checked<T> & operator=(Checked<T> const & other) = default;
|
||||
|
||||
std::strong_ordering operator<=>(Checked<T> const & other) const = default;
|
||||
std::strong_ordering operator<=>(T const & other) const
|
||||
{
|
||||
return value <=> other;
|
||||
}
|
||||
|
||||
explicit operator T() const
|
||||
{
|
||||
return value;
|
||||
}
|
||||
|
||||
enum class OverflowKind {
|
||||
NoOverflow,
|
||||
Overflow,
|
||||
DivByZero,
|
||||
};
|
||||
|
||||
class Result
|
||||
{
|
||||
T value;
|
||||
OverflowKind overflowed_;
|
||||
|
||||
public:
|
||||
Result(T value, bool overflowed)
|
||||
: value{value}
|
||||
, overflowed_{overflowed ? OverflowKind::Overflow : OverflowKind::NoOverflow}
|
||||
{
|
||||
}
|
||||
Result(T value, OverflowKind overflowed)
|
||||
: value{value}
|
||||
, overflowed_{overflowed}
|
||||
{
|
||||
}
|
||||
|
||||
bool operator==(Result other) const
|
||||
{
|
||||
return value == other.value && overflowed_ == other.overflowed_;
|
||||
}
|
||||
|
||||
std::optional<T> valueChecked() const
|
||||
{
|
||||
if (overflowed_ != OverflowKind::NoOverflow) {
|
||||
return std::nullopt;
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the result as if the arithmetic were performed as wrapping arithmetic.
|
||||
*
|
||||
* \throws DivideByZero if the operation was a divide by zero.
|
||||
*/
|
||||
T valueWrapping() const
|
||||
{
|
||||
if (overflowed_ == OverflowKind::DivByZero) {
|
||||
throw DivideByZero{};
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
bool overflowed() const
|
||||
{
|
||||
return overflowed_ == OverflowKind::Overflow;
|
||||
}
|
||||
|
||||
bool divideByZero() const
|
||||
{
|
||||
return overflowed_ == OverflowKind::DivByZero;
|
||||
}
|
||||
};
|
||||
|
||||
Result operator+(Checked<T> const other) const
|
||||
{
|
||||
return (*this) + other.value;
|
||||
}
|
||||
Result operator+(T const other) const
|
||||
{
|
||||
T result;
|
||||
bool overflowed = __builtin_add_overflow(value, other, &result);
|
||||
return Result{result, overflowed};
|
||||
}
|
||||
|
||||
Result operator-(Checked<T> const other) const
|
||||
{
|
||||
return (*this) - other.value;
|
||||
}
|
||||
Result operator-(T const other) const
|
||||
{
|
||||
T result;
|
||||
bool overflowed = __builtin_sub_overflow(value, other, &result);
|
||||
return Result{result, overflowed};
|
||||
}
|
||||
|
||||
Result operator*(Checked<T> const other) const
|
||||
{
|
||||
return (*this) * other.value;
|
||||
}
|
||||
Result operator*(T const other) const
|
||||
{
|
||||
T result;
|
||||
bool overflowed = __builtin_mul_overflow(value, other, &result);
|
||||
return Result{result, overflowed};
|
||||
}
|
||||
|
||||
Result operator/(Checked<T> const other) const
|
||||
{
|
||||
return (*this) / other.value;
|
||||
}
|
||||
/**
|
||||
* Performs a checked division.
|
||||
*
|
||||
* If the right hand side is zero, the result is marked as a DivByZero and
|
||||
* valueWrapping will throw.
|
||||
*/
|
||||
Result operator/(T const other) const
|
||||
{
|
||||
constexpr T const minV = std::numeric_limits<T>::min();
|
||||
|
||||
// It's only possible to overflow with signed division since doing so
|
||||
// requires crossing the two's complement limits by MIN / -1 (since
|
||||
// two's complement has one more in range in the negative direction
|
||||
// than in the positive one).
|
||||
if (std::is_signed<T>() && (value == minV && other == -1)) {
|
||||
return Result{minV, true};
|
||||
} else if (other == 0) {
|
||||
return Result{0, OverflowKind::DivByZero};
|
||||
} else {
|
||||
T result = value / other;
|
||||
return Result{result, false};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template<std::integral T>
|
||||
std::ostream & operator<<(std::ostream & ios, Checked<T> v)
|
||||
{
|
||||
ios << v.value;
|
||||
return ios;
|
||||
}
|
||||
|
||||
}
|
|
@ -6,6 +6,8 @@
|
|||
#include <vector>
|
||||
#include <limits>
|
||||
|
||||
#include "error.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
|
@ -30,7 +32,7 @@ private:
|
|||
auto & addChunk()
|
||||
{
|
||||
if (size_ >= std::numeric_limits<uint32_t>::max() - ChunkSize)
|
||||
abort();
|
||||
unreachable();
|
||||
chunks.emplace_back();
|
||||
chunks.back().reserve(ChunkSize);
|
||||
return chunks.back();
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#include "config.hh"
|
||||
#include "args.hh"
|
||||
#include "abstract-setting-to-json.hh"
|
||||
#include "environment-variables.hh"
|
||||
#include "experimental-features.hh"
|
||||
#include "util.hh"
|
||||
#include "file-system.hh"
|
||||
|
@ -170,9 +171,18 @@ void AbstractConfig::applyConfig(const std::string & contents, const std::string
|
|||
set(name, value);
|
||||
|
||||
// Then apply other settings
|
||||
for (const auto & [name, value] : parsedContents)
|
||||
if (name != "experimental-features" && name != "extra-experimental-features")
|
||||
// XXX: NIX_PATH must override the regular setting! This is done in `initGC()`
|
||||
// Environment variables overriding settings should probably be part of the Config mechanism,
|
||||
// but at the time of writing it's not worth building that for just one thing
|
||||
for (const auto & [name, value] : parsedContents) {
|
||||
if (name != "experimental-features" && name != "extra-experimental-features") {
|
||||
if ((name == "nix-path" || name == "extra-nix-path")
|
||||
&& getEnv("NIX_PATH").has_value()) {
|
||||
continue;
|
||||
}
|
||||
set(name, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Config::resetOverridden()
|
||||
|
|
|
@ -393,7 +393,7 @@ struct ExperimentalFeatureSettings : Config {
|
|||
|
||||
{{#include experimental-features-shortlist.md}}
|
||||
|
||||
Experimental features are [further documented in the manual](@docroot@/contributing/experimental-features.md).
|
||||
Experimental features are [further documented in the manual](@docroot@/development/experimental-features.md).
|
||||
)"};
|
||||
|
||||
/**
|
||||
|
|
|
@ -15,13 +15,12 @@
|
|||
|
||||
#if __linux__
|
||||
# include <mutex>
|
||||
# include <sys/resource.h>
|
||||
# include "cgroup.hh"
|
||||
# include "namespaces.hh"
|
||||
#endif
|
||||
|
||||
#ifndef _WIN32
|
||||
# include <sys/mount.h>
|
||||
# include <sys/resource.h>
|
||||
#endif
|
||||
|
||||
namespace nix {
|
||||
|
@ -138,7 +137,7 @@ std::optional<Path> getSelfExe()
|
|||
{
|
||||
static auto cached = []() -> std::optional<Path>
|
||||
{
|
||||
#if __linux__
|
||||
#if __linux__ || __GNU__
|
||||
return readLink("/proc/self/exe");
|
||||
#elif __APPLE__
|
||||
char buf[1024];
|
||||
|
|
|
@ -1,20 +1,23 @@
|
|||
#include "util.hh"
|
||||
#include "environment-variables.hh"
|
||||
|
||||
extern char * * environ __attribute__((weak));
|
||||
extern char ** environ __attribute__((weak));
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::optional<std::string> getEnv(const std::string & key)
|
||||
{
|
||||
char * value = getenv(key.c_str());
|
||||
if (!value) return {};
|
||||
if (!value)
|
||||
return {};
|
||||
return std::string(value);
|
||||
}
|
||||
|
||||
std::optional<std::string> getEnvNonEmpty(const std::string & key) {
|
||||
std::optional<std::string> getEnvNonEmpty(const std::string & key)
|
||||
{
|
||||
auto value = getEnv(key);
|
||||
if (value == "") return {};
|
||||
if (value == "")
|
||||
return {};
|
||||
return value;
|
||||
}
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
#include <optional>
|
||||
|
||||
#include "types.hh"
|
||||
#include "file-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -17,6 +18,11 @@ namespace nix {
|
|||
*/
|
||||
std::optional<std::string> getEnv(const std::string & key);
|
||||
|
||||
/**
|
||||
* Like `getEnv`, but using `OsString` to avoid coercions.
|
||||
*/
|
||||
std::optional<OsString> getEnvOs(const OsString & key);
|
||||
|
||||
/**
|
||||
* @return a non empty environment variable. Returns nullopt if the env
|
||||
* variable is set to ""
|
||||
|
@ -43,6 +49,11 @@ int unsetenv(const char * name);
|
|||
*/
|
||||
int setEnv(const char * name, const char * value);
|
||||
|
||||
/**
|
||||
* Like `setEnv`, but using `OsString` to avoid coercions.
|
||||
*/
|
||||
int setEnvOs(const OsString & name, const OsString & value);
|
||||
|
||||
/**
|
||||
* Clear the environment.
|
||||
*/
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
#include <algorithm>
|
||||
|
||||
#include "error.hh"
|
||||
#include "environment-variables.hh"
|
||||
#include "signals.hh"
|
||||
|
@ -430,4 +432,36 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
|
|||
return out;
|
||||
}
|
||||
|
||||
/** Write to stderr in a robust and minimal way, considering that the process
|
||||
* may be in a bad state.
|
||||
*/
|
||||
static void writeErr(std::string_view buf)
|
||||
{
|
||||
while (!buf.empty()) {
|
||||
auto n = write(STDERR_FILENO, buf.data(), buf.size());
|
||||
if (n < 0) {
|
||||
if (errno == EINTR) continue;
|
||||
abort();
|
||||
}
|
||||
buf = buf.substr(n);
|
||||
}
|
||||
}
|
||||
|
||||
void panic(std::string_view msg)
|
||||
{
|
||||
writeErr("\n\n" ANSI_RED "terminating due to unexpected unrecoverable internal error: " ANSI_NORMAL );
|
||||
writeErr(msg);
|
||||
writeErr("\n");
|
||||
abort();
|
||||
}
|
||||
|
||||
void panic(const char * file, int line, const char * func)
|
||||
{
|
||||
char buf[512];
|
||||
int n = snprintf(buf, sizeof(buf), "Unexpected condition in %s at %s:%d", func, file, line);
|
||||
if (n < 0)
|
||||
panic("Unexpected condition and could not format error message");
|
||||
panic(std::string_view(buf, std::min(static_cast<int>(sizeof(buf)), n)));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -273,4 +273,24 @@ using NativeSysError =
|
|||
*/
|
||||
void throwExceptionSelfCheck();
|
||||
|
||||
/**
|
||||
* Print a message and abort().
|
||||
*/
|
||||
[[noreturn]]
|
||||
void panic(std::string_view msg);
|
||||
|
||||
/**
|
||||
* Print a basic error message with source position and abort().
|
||||
* Use the unreachable() macro to call this.
|
||||
*/
|
||||
[[noreturn]]
|
||||
void panic(const char * file, int line, const char * func);
|
||||
|
||||
/**
|
||||
* Print a basic error message with source position and abort().
|
||||
*
|
||||
* @note: This assumes that the logger is operational
|
||||
*/
|
||||
#define unreachable() (::nix::panic(__FILE__, __LINE__, __func__))
|
||||
|
||||
}
|
||||
|
|
95
src/libutil/executable-path.cc
Normal file
95
src/libutil/executable-path.cc
Normal file
|
@ -0,0 +1,95 @@
|
|||
#include "environment-variables.hh"
|
||||
#include "executable-path.hh"
|
||||
#include "strings-inline.hh"
|
||||
#include "util.hh"
|
||||
#include "file-path-impl.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
namespace fs = std::filesystem;
|
||||
|
||||
constexpr static const OsStringView path_var_separator{
|
||||
&ExecutablePath::separator,
|
||||
1,
|
||||
};
|
||||
|
||||
ExecutablePath ExecutablePath::load()
|
||||
{
|
||||
// "If PATH is unset or is set to null, the path search is
|
||||
// implementation-defined."
|
||||
// https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap08.html#tag_08_03
|
||||
return ExecutablePath::parse(getEnvOs(OS_STR("PATH")).value_or(OS_STR("")));
|
||||
}
|
||||
|
||||
ExecutablePath ExecutablePath::parse(const OsString & path)
|
||||
{
|
||||
auto strings = path.empty() ? (std::list<OsString>{})
|
||||
: basicSplitString<std::list<OsString>, OsString::value_type>(path, path_var_separator);
|
||||
|
||||
std::vector<fs::path> ret;
|
||||
ret.reserve(strings.size());
|
||||
|
||||
std::transform(
|
||||
std::make_move_iterator(strings.begin()),
|
||||
std::make_move_iterator(strings.end()),
|
||||
std::back_inserter(ret),
|
||||
[](auto && str) {
|
||||
return fs::path{
|
||||
str.empty()
|
||||
// "A zero-length prefix is a legacy feature that
|
||||
// indicates the current working directory. It
|
||||
// appears as two adjacent <colon> characters
|
||||
// ("::"), as an initial <colon> preceding the rest
|
||||
// of the list, or as a trailing <colon> following
|
||||
// the rest of the list."
|
||||
// https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap08.html#tag_08_03
|
||||
? OS_STR(".")
|
||||
: std::move(str),
|
||||
};
|
||||
});
|
||||
|
||||
return {ret};
|
||||
}
|
||||
|
||||
OsString ExecutablePath::render() const
|
||||
{
|
||||
std::vector<PathViewNG> path2;
|
||||
for (auto & p : directories)
|
||||
path2.push_back(p.native());
|
||||
return basicConcatStringsSep(path_var_separator, path2);
|
||||
}
|
||||
|
||||
std::optional<fs::path>
|
||||
ExecutablePath::findName(const OsString & exe, std::function<bool(const fs::path &)> isExecutable) const
|
||||
{
|
||||
// "If the pathname being sought contains a <slash>, the search
|
||||
// through the path prefixes shall not be performed."
|
||||
// https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap08.html#tag_08_03
|
||||
assert(OsPathTrait<fs::path::value_type>::rfindPathSep(exe) == exe.npos);
|
||||
|
||||
for (auto & dir : directories) {
|
||||
auto candidate = dir / exe;
|
||||
if (isExecutable(candidate))
|
||||
return std::filesystem::canonical(candidate);
|
||||
}
|
||||
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
fs::path ExecutablePath::findPath(const fs::path & exe, std::function<bool(const fs::path &)> isExecutable) const
|
||||
{
|
||||
// "If the pathname being sought contains a <slash>, the search
|
||||
// through the path prefixes shall not be performed."
|
||||
// https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap08.html#tag_08_03
|
||||
if (exe.filename() == exe) {
|
||||
auto resOpt = findName(exe, isExecutable);
|
||||
if (resOpt)
|
||||
return *resOpt;
|
||||
else
|
||||
throw ExecutableLookupError("Could not find executable '%s'", exe.native());
|
||||
} else {
|
||||
return exe;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace nix
|
77
src/libutil/executable-path.hh
Normal file
77
src/libutil/executable-path.hh
Normal file
|
@ -0,0 +1,77 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "file-system.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
MakeError(ExecutableLookupError, Error);
|
||||
|
||||
struct ExecutablePath
|
||||
{
|
||||
std::vector<std::filesystem::path> directories;
|
||||
|
||||
constexpr static const OsString::value_type separator =
|
||||
#ifdef WIN32
|
||||
L';'
|
||||
#else
|
||||
':'
|
||||
#endif
|
||||
;
|
||||
|
||||
/**
|
||||
* Parse `path` into a list of paths.
|
||||
*
|
||||
* On Unix we split on `:`, on Windows we split on `;`.
|
||||
*
|
||||
* For Unix, this is according to the POSIX spec for `PATH`.
|
||||
* https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap08.html#tag_08_03
|
||||
*/
|
||||
static ExecutablePath parse(const OsString & path);
|
||||
|
||||
/**
|
||||
* Load the `PATH` environment variable and `parse` it.
|
||||
*/
|
||||
static ExecutablePath load();
|
||||
|
||||
/**
|
||||
* Opposite of `parse`
|
||||
*/
|
||||
OsString render() const;
|
||||
|
||||
/**
|
||||
* Search for an executable.
|
||||
*
|
||||
* For Unix, this is according to the POSIX spec for `PATH`.
|
||||
* https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap08.html#tag_08_03
|
||||
*
|
||||
* @param exe This must just be a name, and not contain any `/` (or
|
||||
* `\` on Windows). in case it does, per the spec no lookup should
|
||||
* be perfomed, and the path (it is not just a file name) as is.
|
||||
* This is the caller's respsonsibility.
|
||||
*
|
||||
* This is a pure function, except for the default `isExecutable`
|
||||
* argument, which uses the ambient file system to check if a file is
|
||||
* executable (and exists).
|
||||
*
|
||||
* @return path to a resolved executable
|
||||
*/
|
||||
std::optional<std::filesystem::path> findName(
|
||||
const OsString & exe,
|
||||
std::function<bool(const std::filesystem::path &)> isExecutableFile = isExecutableFileAmbient) const;
|
||||
|
||||
/**
|
||||
* Like the `findName` but also allows a file path as input.
|
||||
*
|
||||
* This implements the full POSIX spec: if the path is just a name,
|
||||
* it searches like the above. Otherwise, it returns the path as is.
|
||||
* If (in the name case) the search fails, an exception is thrown.
|
||||
*/
|
||||
std::filesystem::path findPath(
|
||||
const std::filesystem::path & exe,
|
||||
std::function<bool(const std::filesystem::path &)> isExecutable = isExecutableFileAmbient) const;
|
||||
|
||||
bool operator==(const ExecutablePath &) const = default;
|
||||
};
|
||||
|
||||
} // namespace nix
|
|
@ -24,7 +24,7 @@ struct ExperimentalFeatureDetails
|
|||
* feature, we either have no issue at all if few features are not added
|
||||
* at the end of the list, or a proper merge conflict if they are.
|
||||
*/
|
||||
constexpr size_t numXpFeatures = 1 + static_cast<size_t>(Xp::VerifiedFetches);
|
||||
constexpr size_t numXpFeatures = 1 + static_cast<size_t>(Xp::PipeOperators);
|
||||
|
||||
constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails = {{
|
||||
{
|
||||
|
@ -294,6 +294,14 @@ constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails
|
|||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/48",
|
||||
},
|
||||
{
|
||||
.tag = Xp::PipeOperators,
|
||||
.name = "pipe-operators",
|
||||
.description = R"(
|
||||
Add `|>` and `<|` operators to the Nix language.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/55",
|
||||
},
|
||||
}};
|
||||
|
||||
static_assert(
|
||||
|
|
|
@ -35,6 +35,7 @@ enum struct ExperimentalFeature
|
|||
ConfigurableImpureEnv,
|
||||
MountedSSHStore,
|
||||
VerifiedFetches,
|
||||
PipeOperators,
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
|
@ -63,7 +63,7 @@ std::string_view renderFileIngestionMethod(FileIngestionMethod method)
|
|||
case FileIngestionMethod::Git:
|
||||
return "git";
|
||||
default:
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -91,13 +91,10 @@ struct WindowsPathTrait
|
|||
};
|
||||
|
||||
|
||||
/**
|
||||
* @todo Revisit choice of `char` or `wchar_t` for `WindowsPathTrait`
|
||||
* argument.
|
||||
*/
|
||||
using NativePathTrait =
|
||||
template<typename CharT>
|
||||
using OsPathTrait =
|
||||
#ifdef _WIN32
|
||||
WindowsPathTrait<char>
|
||||
WindowsPathTrait<CharT>
|
||||
#else
|
||||
UnixPathTrait
|
||||
#endif
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue