mirror of
https://github.com/NixOS/nix
synced 2025-07-06 21:41:48 +02:00
Merge remote-tracking branch 'upstream/master' into lfs
This commit is contained in:
commit
6a3b4afc0a
347 changed files with 8407 additions and 5795 deletions
|
@ -1,11 +1,12 @@
|
|||
{ lib
|
||||
, mkMesonDerivation
|
||||
{
|
||||
lib,
|
||||
mkMesonDerivation,
|
||||
|
||||
, doxygen
|
||||
doxygen,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -39,11 +40,10 @@ mkMesonDerivation (finalAttrs: {
|
|||
doxygen
|
||||
];
|
||||
|
||||
preConfigure =
|
||||
''
|
||||
chmod u+w ./.version
|
||||
echo ${finalAttrs.version} > ./.version
|
||||
'';
|
||||
preConfigure = ''
|
||||
chmod u+w ./.version
|
||||
echo ${finalAttrs.version} > ./.version
|
||||
'';
|
||||
|
||||
postInstall = ''
|
||||
mkdir -p ''${!outputDoc}/nix-support
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
{ lib
|
||||
, mkMesonDerivation
|
||||
{
|
||||
lib,
|
||||
mkMesonDerivation,
|
||||
|
||||
, doxygen
|
||||
doxygen,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -17,27 +18,28 @@ mkMesonDerivation (finalAttrs: {
|
|||
inherit version;
|
||||
|
||||
workDir = ./.;
|
||||
fileset = let
|
||||
cpp = fileset.fileFilter (file: file.hasExt "cc" || file.hasExt "hh");
|
||||
in fileset.unions [
|
||||
./.version
|
||||
../../.version
|
||||
./meson.build
|
||||
./doxygen.cfg.in
|
||||
# Source is not compiled, but still must be available for Doxygen
|
||||
# to gather comments.
|
||||
(cpp ../.)
|
||||
];
|
||||
fileset =
|
||||
let
|
||||
cpp = fileset.fileFilter (file: file.hasExt "cc" || file.hasExt "hh");
|
||||
in
|
||||
fileset.unions [
|
||||
./.version
|
||||
../../.version
|
||||
./meson.build
|
||||
./doxygen.cfg.in
|
||||
# Source is not compiled, but still must be available for Doxygen
|
||||
# to gather comments.
|
||||
(cpp ../.)
|
||||
];
|
||||
|
||||
nativeBuildInputs = [
|
||||
doxygen
|
||||
];
|
||||
|
||||
preConfigure =
|
||||
''
|
||||
chmod u+w ./.version
|
||||
echo ${finalAttrs.version} > ./.version
|
||||
'';
|
||||
preConfigure = ''
|
||||
chmod u+w ./.version
|
||||
echo ${finalAttrs.version} > ./.version
|
||||
'';
|
||||
|
||||
postInstall = ''
|
||||
mkdir -p ''${!outputDoc}/nix-support
|
||||
|
|
|
@ -347,7 +347,7 @@ struct MixEnvironment : virtual Args
|
|||
void setEnviron();
|
||||
};
|
||||
|
||||
void completeFlakeInputPath(
|
||||
void completeFlakeInputAttrPath(
|
||||
AddCompletions & completions,
|
||||
ref<EvalState> evalState,
|
||||
const std::vector<FlakeRef> & flakeRefs,
|
||||
|
|
|
@ -33,7 +33,7 @@ namespace nix {
|
|||
|
||||
namespace fs { using namespace std::filesystem; }
|
||||
|
||||
void completeFlakeInputPath(
|
||||
void completeFlakeInputAttrPath(
|
||||
AddCompletions & completions,
|
||||
ref<EvalState> evalState,
|
||||
const std::vector<FlakeRef> & flakeRefs,
|
||||
|
@ -117,10 +117,10 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
.labels = {"input-path"},
|
||||
.handler = {[&](std::string s) {
|
||||
warn("'--update-input' is a deprecated alias for 'flake update' and will be removed in a future version.");
|
||||
lockFlags.inputUpdates.insert(flake::parseInputPath(s));
|
||||
lockFlags.inputUpdates.insert(flake::parseInputAttrPath(s));
|
||||
}},
|
||||
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
|
||||
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
|
||||
completeFlakeInputAttrPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
|
||||
}}
|
||||
});
|
||||
|
||||
|
@ -129,15 +129,15 @@ MixFlakeOptions::MixFlakeOptions()
|
|||
.description = "Override a specific flake input (e.g. `dwarffs/nixpkgs`). This implies `--no-write-lock-file`.",
|
||||
.category = category,
|
||||
.labels = {"input-path", "flake-url"},
|
||||
.handler = {[&](std::string inputPath, std::string flakeRef) {
|
||||
.handler = {[&](std::string inputAttrPath, std::string flakeRef) {
|
||||
lockFlags.writeLockFile = false;
|
||||
lockFlags.inputOverrides.insert_or_assign(
|
||||
flake::parseInputPath(inputPath),
|
||||
flake::parseInputAttrPath(inputAttrPath),
|
||||
parseFlakeRef(fetchSettings, flakeRef, absPath(getCommandBaseDir()), true));
|
||||
}},
|
||||
.completer = {[&](AddCompletions & completions, size_t n, std::string_view prefix) {
|
||||
if (n == 0) {
|
||||
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
|
||||
completeFlakeInputAttrPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
|
||||
} else if (n == 1) {
|
||||
completeFlakeRef(completions, getEvalState()->store, prefix);
|
||||
}
|
||||
|
|
|
@ -1,32 +1,33 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util
|
||||
, nix-store
|
||||
, nix-fetchers
|
||||
, nix-expr
|
||||
, nix-flake
|
||||
, nix-main
|
||||
, editline
|
||||
, readline
|
||||
, lowdown
|
||||
, nlohmann_json
|
||||
nix-util,
|
||||
nix-store,
|
||||
nix-fetchers,
|
||||
nix-expr,
|
||||
nix-flake,
|
||||
nix-main,
|
||||
editline,
|
||||
readline,
|
||||
lowdown,
|
||||
nlohmann_json,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
|
||||
# Whether to enable Markdown rendering in the Nix binary.
|
||||
, enableMarkdown ? !stdenv.hostPlatform.isWindows
|
||||
# Whether to enable Markdown rendering in the Nix binary.
|
||||
enableMarkdown ? !stdenv.hostPlatform.isWindows,
|
||||
|
||||
# Which interactive line editor library to use for Nix's repl.
|
||||
#
|
||||
# Currently supported choices are:
|
||||
#
|
||||
# - editline (default)
|
||||
# - readline
|
||||
, readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline"
|
||||
# Which interactive line editor library to use for Nix's repl.
|
||||
#
|
||||
# Currently supported choices are:
|
||||
#
|
||||
# - editline (default)
|
||||
# - readline
|
||||
readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline",
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-store-c
|
||||
, nix-expr
|
||||
nix-store-c,
|
||||
nix-expr,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-store-test-support
|
||||
, nix-expr
|
||||
, nix-expr-c
|
||||
nix-store-test-support,
|
||||
nix-expr,
|
||||
nix-expr-c,
|
||||
|
||||
, rapidcheck
|
||||
rapidcheck,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,20 +1,21 @@
|
|||
{ lib
|
||||
, buildPackages
|
||||
, stdenv
|
||||
, mkMesonExecutable
|
||||
{
|
||||
lib,
|
||||
buildPackages,
|
||||
stdenv,
|
||||
mkMesonExecutable,
|
||||
|
||||
, nix-expr
|
||||
, nix-expr-c
|
||||
, nix-expr-test-support
|
||||
nix-expr,
|
||||
nix-expr-c,
|
||||
nix-expr-test-support,
|
||||
|
||||
, rapidcheck
|
||||
, gtest
|
||||
, runCommand
|
||||
rapidcheck,
|
||||
gtest,
|
||||
runCommand,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
, resolvePath
|
||||
version,
|
||||
resolvePath,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -58,16 +59,22 @@ mkMesonExecutable (finalAttrs: {
|
|||
|
||||
passthru = {
|
||||
tests = {
|
||||
run = runCommand "${finalAttrs.pname}-run" {
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
} (lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
'' + ''
|
||||
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
'');
|
||||
run =
|
||||
runCommand "${finalAttrs.pname}-run"
|
||||
{
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
}
|
||||
(
|
||||
lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
''
|
||||
+ ''
|
||||
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
''
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -20,77 +20,77 @@ let
|
|||
# Resolve a input spec into a node name. An input spec is
|
||||
# either a node name, or a 'follows' path from the root
|
||||
# node.
|
||||
resolveInput = inputSpec:
|
||||
if builtins.isList inputSpec
|
||||
then getInputByPath lockFile.root inputSpec
|
||||
else inputSpec;
|
||||
resolveInput =
|
||||
inputSpec: if builtins.isList inputSpec then getInputByPath lockFile.root inputSpec else inputSpec;
|
||||
|
||||
# Follow an input path (e.g. ["dwarffs" "nixpkgs"]) from the
|
||||
# Follow an input attrpath (e.g. ["dwarffs" "nixpkgs"]) from the
|
||||
# root node, returning the final node.
|
||||
getInputByPath = nodeName: path:
|
||||
if path == []
|
||||
then nodeName
|
||||
getInputByPath =
|
||||
nodeName: path:
|
||||
if path == [ ] then
|
||||
nodeName
|
||||
else
|
||||
getInputByPath
|
||||
# Since this could be a 'follows' input, call resolveInput.
|
||||
(resolveInput lockFile.nodes.${nodeName}.inputs.${builtins.head path})
|
||||
(builtins.tail path);
|
||||
|
||||
allNodes =
|
||||
builtins.mapAttrs
|
||||
(key: node:
|
||||
let
|
||||
allNodes = builtins.mapAttrs (
|
||||
key: node:
|
||||
let
|
||||
|
||||
parentNode = allNodes.${getInputByPath lockFile.root node.parent};
|
||||
parentNode = allNodes.${getInputByPath lockFile.root node.parent};
|
||||
|
||||
sourceInfo =
|
||||
if overrides ? ${key}
|
||||
then
|
||||
overrides.${key}.sourceInfo
|
||||
else if node.locked.type == "path" && builtins.substring 0 1 node.locked.path != "/"
|
||||
then
|
||||
parentNode.sourceInfo // {
|
||||
outPath = parentNode.outPath + ("/" + node.locked.path);
|
||||
}
|
||||
else
|
||||
# FIXME: remove obsolete node.info.
|
||||
# Note: lock file entries are always final.
|
||||
fetchTreeFinal (node.info or {} // removeAttrs node.locked ["dir"]);
|
||||
sourceInfo =
|
||||
if overrides ? ${key} then
|
||||
overrides.${key}.sourceInfo
|
||||
else if node.locked.type == "path" && builtins.substring 0 1 node.locked.path != "/" then
|
||||
parentNode.sourceInfo
|
||||
// {
|
||||
outPath = parentNode.outPath + ("/" + node.locked.path);
|
||||
}
|
||||
else
|
||||
# FIXME: remove obsolete node.info.
|
||||
# Note: lock file entries are always final.
|
||||
fetchTreeFinal (node.info or { } // removeAttrs node.locked [ "dir" ]);
|
||||
|
||||
subdir = overrides.${key}.dir or node.locked.dir or "";
|
||||
subdir = overrides.${key}.dir or node.locked.dir or "";
|
||||
|
||||
outPath = sourceInfo + ((if subdir == "" then "" else "/") + subdir);
|
||||
outPath = sourceInfo + ((if subdir == "" then "" else "/") + subdir);
|
||||
|
||||
flake = import (outPath + "/flake.nix");
|
||||
flake = import (outPath + "/flake.nix");
|
||||
|
||||
inputs = builtins.mapAttrs
|
||||
(inputName: inputSpec: allNodes.${resolveInput inputSpec})
|
||||
(node.inputs or {});
|
||||
inputs = builtins.mapAttrs (inputName: inputSpec: allNodes.${resolveInput inputSpec}) (
|
||||
node.inputs or { }
|
||||
);
|
||||
|
||||
outputs = flake.outputs (inputs // { self = result; });
|
||||
outputs = flake.outputs (inputs // { self = result; });
|
||||
|
||||
result =
|
||||
outputs
|
||||
# We add the sourceInfo attribute for its metadata, as they are
|
||||
# relevant metadata for the flake. However, the outPath of the
|
||||
# sourceInfo does not necessarily match the outPath of the flake,
|
||||
# as the flake may be in a subdirectory of a source.
|
||||
# This is shadowed in the next //
|
||||
// sourceInfo
|
||||
// {
|
||||
# This shadows the sourceInfo.outPath
|
||||
inherit outPath;
|
||||
result =
|
||||
outputs
|
||||
# We add the sourceInfo attribute for its metadata, as they are
|
||||
# relevant metadata for the flake. However, the outPath of the
|
||||
# sourceInfo does not necessarily match the outPath of the flake,
|
||||
# as the flake may be in a subdirectory of a source.
|
||||
# This is shadowed in the next //
|
||||
// sourceInfo
|
||||
// {
|
||||
# This shadows the sourceInfo.outPath
|
||||
inherit outPath;
|
||||
|
||||
inherit inputs; inherit outputs; inherit sourceInfo; _type = "flake";
|
||||
};
|
||||
inherit inputs;
|
||||
inherit outputs;
|
||||
inherit sourceInfo;
|
||||
_type = "flake";
|
||||
};
|
||||
|
||||
in
|
||||
if node.flake or true then
|
||||
assert builtins.isFunction flake.outputs;
|
||||
result
|
||||
else
|
||||
sourceInfo
|
||||
)
|
||||
lockFile.nodes;
|
||||
in
|
||||
if node.flake or true then
|
||||
assert builtins.isFunction flake.outputs;
|
||||
result
|
||||
else
|
||||
sourceInfo
|
||||
) lockFile.nodes;
|
||||
|
||||
in allNodes.${lockFile.root}
|
||||
in
|
||||
allNodes.${lockFile.root}
|
||||
|
|
|
@ -3114,7 +3114,7 @@ std::optional<SourcePath> EvalState::resolveLookupPathPath(const LookupPath::Pat
|
|||
}
|
||||
}
|
||||
|
||||
if (path.pathExists())
|
||||
if (path.resolveSymlinks().pathExists())
|
||||
return finish(std::move(path));
|
||||
else {
|
||||
logWarning({
|
||||
|
|
|
@ -1,40 +1,72 @@
|
|||
{ system ? "" # obsolete
|
||||
, url
|
||||
, hash ? "" # an SRI hash
|
||||
{
|
||||
system ? "", # obsolete
|
||||
url,
|
||||
hash ? "", # an SRI hash
|
||||
|
||||
# Legacy hash specification
|
||||
, md5 ? "", sha1 ? "", sha256 ? "", sha512 ? ""
|
||||
, outputHash ?
|
||||
if hash != "" then hash else if sha512 != "" then sha512 else if sha1 != "" then sha1 else if md5 != "" then md5 else sha256
|
||||
, outputHashAlgo ?
|
||||
if hash != "" then "" else if sha512 != "" then "sha512" else if sha1 != "" then "sha1" else if md5 != "" then "md5" else "sha256"
|
||||
# Legacy hash specification
|
||||
md5 ? "",
|
||||
sha1 ? "",
|
||||
sha256 ? "",
|
||||
sha512 ? "",
|
||||
outputHash ?
|
||||
if hash != "" then
|
||||
hash
|
||||
else if sha512 != "" then
|
||||
sha512
|
||||
else if sha1 != "" then
|
||||
sha1
|
||||
else if md5 != "" then
|
||||
md5
|
||||
else
|
||||
sha256,
|
||||
outputHashAlgo ?
|
||||
if hash != "" then
|
||||
""
|
||||
else if sha512 != "" then
|
||||
"sha512"
|
||||
else if sha1 != "" then
|
||||
"sha1"
|
||||
else if md5 != "" then
|
||||
"md5"
|
||||
else
|
||||
"sha256",
|
||||
|
||||
, executable ? false
|
||||
, unpack ? false
|
||||
, name ? baseNameOf (toString url)
|
||||
, impure ? false
|
||||
executable ? false,
|
||||
unpack ? false,
|
||||
name ? baseNameOf (toString url),
|
||||
impure ? false,
|
||||
}:
|
||||
|
||||
derivation ({
|
||||
builder = "builtin:fetchurl";
|
||||
derivation (
|
||||
{
|
||||
builder = "builtin:fetchurl";
|
||||
|
||||
# New-style output content requirements.
|
||||
outputHashMode = if unpack || executable then "recursive" else "flat";
|
||||
# New-style output content requirements.
|
||||
outputHashMode = if unpack || executable then "recursive" else "flat";
|
||||
|
||||
inherit name url executable unpack;
|
||||
inherit
|
||||
name
|
||||
url
|
||||
executable
|
||||
unpack
|
||||
;
|
||||
|
||||
system = "builtin";
|
||||
system = "builtin";
|
||||
|
||||
# No need to double the amount of network traffic
|
||||
preferLocalBuild = true;
|
||||
# No need to double the amount of network traffic
|
||||
preferLocalBuild = true;
|
||||
|
||||
# This attribute does nothing; it's here to avoid changing evaluation results.
|
||||
impureEnvVars = [
|
||||
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
|
||||
];
|
||||
# This attribute does nothing; it's here to avoid changing evaluation results.
|
||||
impureEnvVars = [
|
||||
"http_proxy"
|
||||
"https_proxy"
|
||||
"ftp_proxy"
|
||||
"all_proxy"
|
||||
"no_proxy"
|
||||
];
|
||||
|
||||
# To make "nix-prefetch-url" work.
|
||||
urls = [ url ];
|
||||
} // (if impure
|
||||
then { __impure = true; }
|
||||
else { inherit outputHashAlgo outputHash; }))
|
||||
# To make "nix-prefetch-url" work.
|
||||
urls = [ url ];
|
||||
}
|
||||
// (if impure then { __impure = true; } else { inherit outputHashAlgo outputHash; })
|
||||
)
|
||||
|
|
|
@ -1,21 +1,27 @@
|
|||
attrs @ { drvPath, outputs, name, ... }:
|
||||
attrs@{
|
||||
drvPath,
|
||||
outputs,
|
||||
name,
|
||||
...
|
||||
}:
|
||||
|
||||
let
|
||||
|
||||
commonAttrs = (builtins.listToAttrs outputsList) //
|
||||
{ all = map (x: x.value) outputsList;
|
||||
inherit drvPath name;
|
||||
type = "derivation";
|
||||
};
|
||||
commonAttrs = (builtins.listToAttrs outputsList) // {
|
||||
all = map (x: x.value) outputsList;
|
||||
inherit drvPath name;
|
||||
type = "derivation";
|
||||
};
|
||||
|
||||
outputToAttrListElement = outputName:
|
||||
{ name = outputName;
|
||||
value = commonAttrs // {
|
||||
outPath = builtins.getAttr outputName attrs;
|
||||
inherit outputName;
|
||||
};
|
||||
outputToAttrListElement = outputName: {
|
||||
name = outputName;
|
||||
value = commonAttrs // {
|
||||
outPath = builtins.getAttr outputName attrs;
|
||||
inherit outputName;
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
outputsList = map outputToAttrListElement outputs;
|
||||
|
||||
in (builtins.head outputsList).value
|
||||
|
||||
in
|
||||
(builtins.head outputsList).value
|
||||
|
|
|
@ -1,33 +1,34 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
mkMesonLibrary,
|
||||
|
||||
, bison
|
||||
, flex
|
||||
, cmake # for resolving toml11 dep
|
||||
bison,
|
||||
flex,
|
||||
cmake, # for resolving toml11 dep
|
||||
|
||||
, nix-util
|
||||
, nix-store
|
||||
, nix-fetchers
|
||||
, boost
|
||||
, boehmgc
|
||||
, nlohmann_json
|
||||
, toml11
|
||||
nix-util,
|
||||
nix-store,
|
||||
nix-fetchers,
|
||||
boost,
|
||||
boehmgc,
|
||||
nlohmann_json,
|
||||
toml11,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
|
||||
# Whether to use garbage collection for the Nix language evaluator.
|
||||
#
|
||||
# If it is disabled, we just leak memory, but this is not as bad as it
|
||||
# sounds so long as evaluation just takes places within short-lived
|
||||
# processes. (When the process exits, the memory is reclaimed; it is
|
||||
# only leaked *within* the process.)
|
||||
#
|
||||
# Temporarily disabled on Windows because the `GC_throw_bad_alloc`
|
||||
# symbol is missing during linking.
|
||||
, enableGC ? !stdenv.hostPlatform.isWindows
|
||||
# Whether to use garbage collection for the Nix language evaluator.
|
||||
#
|
||||
# If it is disabled, we just leak memory, but this is not as bad as it
|
||||
# sounds so long as evaluation just takes places within short-lived
|
||||
# processes. (When the process exits, the memory is reclaimed; it is
|
||||
# only leaked *within* the process.)
|
||||
#
|
||||
# Temporarily disabled on Windows because the `GC_throw_bad_alloc`
|
||||
# symbol is missing during linking.
|
||||
enableGC ? !stdenv.hostPlatform.isWindows,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -51,10 +52,7 @@ mkMesonLibrary (finalAttrs: {
|
|||
(fileset.fileFilter (file: file.hasExt "hh") ./.)
|
||||
./lexer.l
|
||||
./parser.y
|
||||
(fileset.difference
|
||||
(fileset.fileFilter (file: file.hasExt "nix") ./.)
|
||||
./package.nix
|
||||
)
|
||||
(fileset.difference (fileset.fileFilter (file: file.hasExt "nix") ./.) ./package.nix)
|
||||
];
|
||||
|
||||
nativeBuildInputs = [
|
||||
|
|
|
@ -26,27 +26,34 @@
|
|||
Note that `derivation` is very bare-bones, and provides almost no commands during the build.
|
||||
Most likely, you'll want to use functions like `stdenv.mkDerivation` in Nixpkgs to set up a basic environment.
|
||||
*/
|
||||
drvAttrs @ { outputs ? [ "out" ], ... }:
|
||||
drvAttrs@{
|
||||
outputs ? [ "out" ],
|
||||
...
|
||||
}:
|
||||
|
||||
let
|
||||
|
||||
strict = derivationStrict drvAttrs;
|
||||
|
||||
commonAttrs = drvAttrs // (builtins.listToAttrs outputsList) //
|
||||
{ all = map (x: x.value) outputsList;
|
||||
commonAttrs =
|
||||
drvAttrs
|
||||
// (builtins.listToAttrs outputsList)
|
||||
// {
|
||||
all = map (x: x.value) outputsList;
|
||||
inherit drvAttrs;
|
||||
};
|
||||
|
||||
outputToAttrListElement = outputName:
|
||||
{ name = outputName;
|
||||
value = commonAttrs // {
|
||||
outPath = builtins.getAttr outputName strict;
|
||||
drvPath = strict.drvPath;
|
||||
type = "derivation";
|
||||
inherit outputName;
|
||||
};
|
||||
outputToAttrListElement = outputName: {
|
||||
name = outputName;
|
||||
value = commonAttrs // {
|
||||
outPath = builtins.getAttr outputName strict;
|
||||
drvPath = strict.drvPath;
|
||||
type = "derivation";
|
||||
inherit outputName;
|
||||
};
|
||||
};
|
||||
|
||||
outputsList = map outputToAttrListElement outputs;
|
||||
|
||||
in (builtins.head outputsList).value
|
||||
in
|
||||
(builtins.head outputsList).value
|
||||
|
|
|
@ -1,19 +1,20 @@
|
|||
{ lib
|
||||
, buildPackages
|
||||
, stdenv
|
||||
, mkMesonExecutable
|
||||
{
|
||||
lib,
|
||||
buildPackages,
|
||||
stdenv,
|
||||
mkMesonExecutable,
|
||||
|
||||
, nix-fetchers
|
||||
, nix-store-test-support
|
||||
nix-fetchers,
|
||||
nix-store-test-support,
|
||||
|
||||
, rapidcheck
|
||||
, gtest
|
||||
, runCommand
|
||||
rapidcheck,
|
||||
gtest,
|
||||
runCommand,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
, resolvePath
|
||||
version,
|
||||
resolvePath,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -56,16 +57,22 @@ mkMesonExecutable (finalAttrs: {
|
|||
|
||||
passthru = {
|
||||
tests = {
|
||||
run = runCommand "${finalAttrs.pname}-run" {
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
} (lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
'' + ''
|
||||
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
'');
|
||||
run =
|
||||
runCommand "${finalAttrs.pname}-run"
|
||||
{
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
}
|
||||
(
|
||||
lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
''
|
||||
+ ''
|
||||
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
''
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -207,7 +207,8 @@ static git_packbuilder_progress PACKBUILDER_PROGRESS_CHECK_INTERRUPT = &packBuil
|
|||
|
||||
} // extern "C"
|
||||
|
||||
static void initRepoAtomically(std::filesystem::path &path, bool bare) {
|
||||
static void initRepoAtomically(std::filesystem::path &path, bool bare)
|
||||
{
|
||||
if (pathExists(path.string())) return;
|
||||
|
||||
Path tmpDir = createTempDir(os_string_to_string(PathViewNG { std::filesystem::path(path).parent_path() }));
|
||||
|
@ -545,13 +546,10 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|||
// then use code that was removed in this commit (see blame)
|
||||
|
||||
auto dir = this->path;
|
||||
Strings gitArgs;
|
||||
if (shallow) {
|
||||
gitArgs = { "-C", dir.string(), "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec };
|
||||
}
|
||||
else {
|
||||
gitArgs = { "-C", dir.string(), "fetch", "--quiet", "--force", "--", url, refspec };
|
||||
}
|
||||
Strings gitArgs{"-C", dir.string(), "--git-dir", ".", "fetch", "--quiet", "--force"};
|
||||
if (shallow)
|
||||
append(gitArgs, {"--depth", "1"});
|
||||
append(gitArgs, {std::string("--"), url, refspec});
|
||||
|
||||
runProgram(RunOptions {
|
||||
.program = "git",
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util
|
||||
, nix-store
|
||||
, nlohmann_json
|
||||
, libgit2
|
||||
, curl
|
||||
nix-util,
|
||||
nix-store,
|
||||
nlohmann_json,
|
||||
libgit2,
|
||||
curl,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,13 +1,14 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-store-c
|
||||
, nix-expr-c
|
||||
, nix-flake
|
||||
nix-store-c,
|
||||
nix-expr-c,
|
||||
nix-flake,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,20 +1,21 @@
|
|||
{ lib
|
||||
, buildPackages
|
||||
, stdenv
|
||||
, mkMesonExecutable
|
||||
{
|
||||
lib,
|
||||
buildPackages,
|
||||
stdenv,
|
||||
mkMesonExecutable,
|
||||
|
||||
, nix-flake
|
||||
, nix-flake-c
|
||||
, nix-expr-test-support
|
||||
nix-flake,
|
||||
nix-flake-c,
|
||||
nix-expr-test-support,
|
||||
|
||||
, rapidcheck
|
||||
, gtest
|
||||
, runCommand
|
||||
rapidcheck,
|
||||
gtest,
|
||||
runCommand,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
, resolvePath
|
||||
version,
|
||||
resolvePath,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -58,17 +59,23 @@ mkMesonExecutable (finalAttrs: {
|
|||
|
||||
passthru = {
|
||||
tests = {
|
||||
run = runCommand "${finalAttrs.pname}-run" {
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
} (lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
'' + ''
|
||||
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
|
||||
export NIX_CONFIG="extra-experimental-features = flakes"
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
'');
|
||||
run =
|
||||
runCommand "${finalAttrs.pname}-run"
|
||||
{
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
}
|
||||
(
|
||||
lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
''
|
||||
+ ''
|
||||
export _NIX_TEST_UNIT_DATA=${resolvePath ./data}
|
||||
export NIX_CONFIG="extra-experimental-features = flakes"
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
''
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -105,7 +105,7 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
|||
EvalState & state,
|
||||
Value * value,
|
||||
const PosIdx pos,
|
||||
const InputPath & lockRootPath,
|
||||
const InputAttrPath & lockRootAttrPath,
|
||||
const SourcePath & flakeDir);
|
||||
|
||||
static FlakeInput parseFlakeInput(
|
||||
|
@ -113,7 +113,7 @@ static FlakeInput parseFlakeInput(
|
|||
std::string_view inputName,
|
||||
Value * value,
|
||||
const PosIdx pos,
|
||||
const InputPath & lockRootPath,
|
||||
const InputAttrPath & lockRootAttrPath,
|
||||
const SourcePath & flakeDir)
|
||||
{
|
||||
expectType(state, nAttrs, *value, pos);
|
||||
|
@ -137,7 +137,7 @@ static FlakeInput parseFlakeInput(
|
|||
else if (attr.value->type() == nPath) {
|
||||
auto path = attr.value->path();
|
||||
if (path.accessor != flakeDir.accessor)
|
||||
throw Error("input path '%s' at %s must be in the same source tree as %s",
|
||||
throw Error("input attribute path '%s' at %s must be in the same source tree as %s",
|
||||
path, state.positions[attr.pos], flakeDir);
|
||||
url = "path:" + flakeDir.path.makeRelative(path.path);
|
||||
}
|
||||
|
@ -149,11 +149,11 @@ static FlakeInput parseFlakeInput(
|
|||
expectType(state, nBool, *attr.value, attr.pos);
|
||||
input.isFlake = attr.value->boolean();
|
||||
} else if (attr.name == sInputs) {
|
||||
input.overrides = parseFlakeInputs(state, attr.value, attr.pos, lockRootPath, flakeDir);
|
||||
input.overrides = parseFlakeInputs(state, attr.value, attr.pos, lockRootAttrPath, flakeDir);
|
||||
} else if (attr.name == sFollows) {
|
||||
expectType(state, nString, *attr.value, attr.pos);
|
||||
auto follows(parseInputPath(attr.value->c_str()));
|
||||
follows.insert(follows.begin(), lockRootPath.begin(), lockRootPath.end());
|
||||
auto follows(parseInputAttrPath(attr.value->c_str()));
|
||||
follows.insert(follows.begin(), lockRootAttrPath.begin(), lockRootAttrPath.end());
|
||||
input.follows = follows;
|
||||
} else {
|
||||
// Allow selecting a subset of enum values
|
||||
|
@ -220,7 +220,7 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
|||
EvalState & state,
|
||||
Value * value,
|
||||
const PosIdx pos,
|
||||
const InputPath & lockRootPath,
|
||||
const InputAttrPath & lockRootAttrPath,
|
||||
const SourcePath & flakeDir)
|
||||
{
|
||||
std::map<FlakeId, FlakeInput> inputs;
|
||||
|
@ -233,7 +233,7 @@ static std::map<FlakeId, FlakeInput> parseFlakeInputs(
|
|||
state.symbols[inputAttr.name],
|
||||
inputAttr.value,
|
||||
inputAttr.pos,
|
||||
lockRootPath,
|
||||
lockRootAttrPath,
|
||||
flakeDir));
|
||||
}
|
||||
|
||||
|
@ -246,7 +246,7 @@ static Flake readFlake(
|
|||
const FlakeRef & resolvedRef,
|
||||
const FlakeRef & lockedRef,
|
||||
const SourcePath & rootDir,
|
||||
const InputPath & lockRootPath)
|
||||
const InputAttrPath & lockRootAttrPath)
|
||||
{
|
||||
auto flakeDir = rootDir / CanonPath(resolvedRef.subdir);
|
||||
auto flakePath = flakeDir / "flake.nix";
|
||||
|
@ -270,7 +270,7 @@ static Flake readFlake(
|
|||
auto sInputs = state.symbols.create("inputs");
|
||||
|
||||
if (auto inputs = vInfo.attrs()->get(sInputs))
|
||||
flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, lockRootPath, flakeDir);
|
||||
flake.inputs = parseFlakeInputs(state, inputs->value, inputs->pos, lockRootAttrPath, flakeDir);
|
||||
|
||||
auto sOutputs = state.symbols.create("outputs");
|
||||
|
||||
|
@ -347,12 +347,12 @@ static Flake getFlake(
|
|||
const FlakeRef & originalRef,
|
||||
bool useRegistries,
|
||||
FlakeCache & flakeCache,
|
||||
const InputPath & lockRootPath)
|
||||
const InputAttrPath & lockRootAttrPath)
|
||||
{
|
||||
auto [storePath, resolvedRef, lockedRef] = fetchOrSubstituteTree(
|
||||
state, originalRef, useRegistries, flakeCache);
|
||||
|
||||
return readFlake(state, originalRef, resolvedRef, lockedRef, state.rootPath(state.store->toRealPath(storePath)), lockRootPath);
|
||||
return readFlake(state, originalRef, resolvedRef, lockedRef, state.rootPath(state.store->toRealPath(storePath)), lockRootAttrPath);
|
||||
}
|
||||
|
||||
Flake getFlake(EvalState & state, const FlakeRef & originalRef, bool useRegistries)
|
||||
|
@ -407,12 +407,12 @@ LockedFlake lockFlake(
|
|||
{
|
||||
FlakeInput input;
|
||||
SourcePath sourcePath;
|
||||
std::optional<InputPath> parentInputPath; // FIXME: rename to inputPathPrefix?
|
||||
std::optional<InputAttrPath> parentInputAttrPath; // FIXME: rename to inputAttrPathPrefix?
|
||||
};
|
||||
|
||||
std::map<InputPath, OverrideTarget> overrides;
|
||||
std::set<InputPath> explicitCliOverrides;
|
||||
std::set<InputPath> overridesUsed, updatesUsed;
|
||||
std::map<InputAttrPath, OverrideTarget> overrides;
|
||||
std::set<InputAttrPath> explicitCliOverrides;
|
||||
std::set<InputAttrPath> overridesUsed, updatesUsed;
|
||||
std::map<ref<Node>, SourcePath> nodePaths;
|
||||
|
||||
for (auto & i : lockFlags.inputOverrides) {
|
||||
|
@ -436,9 +436,9 @@ LockedFlake lockFlake(
|
|||
std::function<void(
|
||||
const FlakeInputs & flakeInputs,
|
||||
ref<Node> node,
|
||||
const InputPath & inputPathPrefix,
|
||||
const InputAttrPath & inputAttrPathPrefix,
|
||||
std::shared_ptr<const Node> oldNode,
|
||||
const InputPath & followsPrefix,
|
||||
const InputAttrPath & followsPrefix,
|
||||
const SourcePath & sourcePath,
|
||||
bool trustLock)>
|
||||
computeLocks;
|
||||
|
@ -450,7 +450,7 @@ LockedFlake lockFlake(
|
|||
/* The node whose locks are to be updated.*/
|
||||
ref<Node> node,
|
||||
/* The path to this node in the lock file graph. */
|
||||
const InputPath & inputPathPrefix,
|
||||
const InputAttrPath & inputAttrPathPrefix,
|
||||
/* The old node, if any, from which locks can be
|
||||
copied. */
|
||||
std::shared_ptr<const Node> oldNode,
|
||||
|
@ -458,59 +458,59 @@ LockedFlake lockFlake(
|
|||
interpreted. When a node is initially locked, it's
|
||||
relative to the node's flake; when it's already locked,
|
||||
it's relative to the root of the lock file. */
|
||||
const InputPath & followsPrefix,
|
||||
const InputAttrPath & followsPrefix,
|
||||
/* The source path of this node's flake. */
|
||||
const SourcePath & sourcePath,
|
||||
bool trustLock)
|
||||
{
|
||||
debug("computing lock file node '%s'", printInputPath(inputPathPrefix));
|
||||
debug("computing lock file node '%s'", printInputAttrPath(inputAttrPathPrefix));
|
||||
|
||||
/* Get the overrides (i.e. attributes of the form
|
||||
'inputs.nixops.inputs.nixpkgs.url = ...'). */
|
||||
for (auto & [id, input] : flakeInputs) {
|
||||
for (auto & [idOverride, inputOverride] : input.overrides) {
|
||||
auto inputPath(inputPathPrefix);
|
||||
inputPath.push_back(id);
|
||||
inputPath.push_back(idOverride);
|
||||
overrides.emplace(inputPath,
|
||||
auto inputAttrPath(inputAttrPathPrefix);
|
||||
inputAttrPath.push_back(id);
|
||||
inputAttrPath.push_back(idOverride);
|
||||
overrides.emplace(inputAttrPath,
|
||||
OverrideTarget {
|
||||
.input = inputOverride,
|
||||
.sourcePath = sourcePath,
|
||||
.parentInputPath = inputPathPrefix
|
||||
.parentInputAttrPath = inputAttrPathPrefix
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/* Check whether this input has overrides for a
|
||||
non-existent input. */
|
||||
for (auto [inputPath, inputOverride] : overrides) {
|
||||
auto inputPath2(inputPath);
|
||||
auto follow = inputPath2.back();
|
||||
inputPath2.pop_back();
|
||||
if (inputPath2 == inputPathPrefix && !flakeInputs.count(follow))
|
||||
for (auto [inputAttrPath, inputOverride] : overrides) {
|
||||
auto inputAttrPath2(inputAttrPath);
|
||||
auto follow = inputAttrPath2.back();
|
||||
inputAttrPath2.pop_back();
|
||||
if (inputAttrPath2 == inputAttrPathPrefix && !flakeInputs.count(follow))
|
||||
warn(
|
||||
"input '%s' has an override for a non-existent input '%s'",
|
||||
printInputPath(inputPathPrefix), follow);
|
||||
printInputAttrPath(inputAttrPathPrefix), follow);
|
||||
}
|
||||
|
||||
/* Go over the flake inputs, resolve/fetch them if
|
||||
necessary (i.e. if they're new or the flakeref changed
|
||||
from what's in the lock file). */
|
||||
for (auto & [id, input2] : flakeInputs) {
|
||||
auto inputPath(inputPathPrefix);
|
||||
inputPath.push_back(id);
|
||||
auto inputPathS = printInputPath(inputPath);
|
||||
debug("computing input '%s'", inputPathS);
|
||||
auto inputAttrPath(inputAttrPathPrefix);
|
||||
inputAttrPath.push_back(id);
|
||||
auto inputAttrPathS = printInputAttrPath(inputAttrPath);
|
||||
debug("computing input '%s'", inputAttrPathS);
|
||||
|
||||
try {
|
||||
|
||||
/* Do we have an override for this input from one of the
|
||||
ancestors? */
|
||||
auto i = overrides.find(inputPath);
|
||||
auto i = overrides.find(inputAttrPath);
|
||||
bool hasOverride = i != overrides.end();
|
||||
bool hasCliOverride = explicitCliOverrides.contains(inputPath);
|
||||
bool hasCliOverride = explicitCliOverrides.contains(inputAttrPath);
|
||||
if (hasOverride)
|
||||
overridesUsed.insert(inputPath);
|
||||
overridesUsed.insert(inputAttrPath);
|
||||
auto input = hasOverride ? i->second.input : input2;
|
||||
|
||||
/* Resolve relative 'path:' inputs relative to
|
||||
|
@ -525,11 +525,11 @@ LockedFlake lockFlake(
|
|||
/* Resolve 'follows' later (since it may refer to an input
|
||||
path we haven't processed yet. */
|
||||
if (input.follows) {
|
||||
InputPath target;
|
||||
InputAttrPath target;
|
||||
|
||||
target.insert(target.end(), input.follows->begin(), input.follows->end());
|
||||
|
||||
debug("input '%s' follows '%s'", inputPathS, printInputPath(target));
|
||||
debug("input '%s' follows '%s'", inputAttrPathS, printInputAttrPath(target));
|
||||
node->inputs.insert_or_assign(id, target);
|
||||
continue;
|
||||
}
|
||||
|
@ -538,7 +538,7 @@ LockedFlake lockFlake(
|
|||
|
||||
auto overridenParentPath =
|
||||
input.ref->input.isRelative()
|
||||
? std::optional<InputPath>(hasOverride ? i->second.parentInputPath : inputPathPrefix)
|
||||
? std::optional<InputAttrPath>(hasOverride ? i->second.parentInputAttrPath : inputAttrPathPrefix)
|
||||
: std::nullopt;
|
||||
|
||||
auto resolveRelativePath = [&]() -> std::optional<SourcePath>
|
||||
|
@ -557,9 +557,9 @@ LockedFlake lockFlake(
|
|||
auto getInputFlake = [&]()
|
||||
{
|
||||
if (auto resolvedPath = resolveRelativePath()) {
|
||||
return readFlake(state, *input.ref, *input.ref, *input.ref, *resolvedPath, inputPath);
|
||||
return readFlake(state, *input.ref, *input.ref, *input.ref, *resolvedPath, inputAttrPath);
|
||||
} else {
|
||||
return getFlake(state, *input.ref, useRegistries, flakeCache, inputPath);
|
||||
return getFlake(state, *input.ref, useRegistries, flakeCache, inputAttrPath);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -567,19 +567,19 @@ LockedFlake lockFlake(
|
|||
And the input is not in updateInputs? */
|
||||
std::shared_ptr<LockedNode> oldLock;
|
||||
|
||||
updatesUsed.insert(inputPath);
|
||||
updatesUsed.insert(inputAttrPath);
|
||||
|
||||
if (oldNode && !lockFlags.inputUpdates.count(inputPath))
|
||||
if (oldNode && !lockFlags.inputUpdates.count(inputAttrPath))
|
||||
if (auto oldLock2 = get(oldNode->inputs, id))
|
||||
if (auto oldLock3 = std::get_if<0>(&*oldLock2))
|
||||
oldLock = *oldLock3;
|
||||
|
||||
if (oldLock
|
||||
&& oldLock->originalRef == *input.ref
|
||||
&& oldLock->parentPath == overridenParentPath
|
||||
&& oldLock->parentInputAttrPath == overridenParentPath
|
||||
&& !hasCliOverride)
|
||||
{
|
||||
debug("keeping existing input '%s'", inputPathS);
|
||||
debug("keeping existing input '%s'", inputAttrPathS);
|
||||
|
||||
/* Copy the input from the old lock since its flakeref
|
||||
didn't change and there is no override from a
|
||||
|
@ -588,18 +588,18 @@ LockedFlake lockFlake(
|
|||
oldLock->lockedRef,
|
||||
oldLock->originalRef,
|
||||
oldLock->isFlake,
|
||||
oldLock->parentPath);
|
||||
oldLock->parentInputAttrPath);
|
||||
|
||||
node->inputs.insert_or_assign(id, childNode);
|
||||
|
||||
/* If we have this input in updateInputs, then we
|
||||
must fetch the flake to update it. */
|
||||
auto lb = lockFlags.inputUpdates.lower_bound(inputPath);
|
||||
auto lb = lockFlags.inputUpdates.lower_bound(inputAttrPath);
|
||||
|
||||
auto mustRefetch =
|
||||
lb != lockFlags.inputUpdates.end()
|
||||
&& lb->size() > inputPath.size()
|
||||
&& std::equal(inputPath.begin(), inputPath.end(), lb->begin());
|
||||
&& lb->size() > inputAttrPath.size()
|
||||
&& std::equal(inputAttrPath.begin(), inputAttrPath.end(), lb->begin());
|
||||
|
||||
FlakeInputs fakeInputs;
|
||||
|
||||
|
@ -618,7 +618,7 @@ LockedFlake lockFlake(
|
|||
if (!trustLock) {
|
||||
// It is possible that the flake has changed,
|
||||
// so we must confirm all the follows that are in the lock file are also in the flake.
|
||||
auto overridePath(inputPath);
|
||||
auto overridePath(inputAttrPath);
|
||||
overridePath.push_back(i.first);
|
||||
auto o = overrides.find(overridePath);
|
||||
// If the override disappeared, we have to refetch the flake,
|
||||
|
@ -642,21 +642,21 @@ LockedFlake lockFlake(
|
|||
if (mustRefetch) {
|
||||
auto inputFlake = getInputFlake();
|
||||
nodePaths.emplace(childNode, inputFlake.path.parent());
|
||||
computeLocks(inputFlake.inputs, childNode, inputPath, oldLock, followsPrefix,
|
||||
computeLocks(inputFlake.inputs, childNode, inputAttrPath, oldLock, followsPrefix,
|
||||
inputFlake.path, false);
|
||||
} else {
|
||||
computeLocks(fakeInputs, childNode, inputPath, oldLock, followsPrefix, sourcePath, true);
|
||||
computeLocks(fakeInputs, childNode, inputAttrPath, oldLock, followsPrefix, sourcePath, true);
|
||||
}
|
||||
|
||||
} else {
|
||||
/* We need to create a new lock file entry. So fetch
|
||||
this input. */
|
||||
debug("creating new input '%s'", inputPathS);
|
||||
debug("creating new input '%s'", inputAttrPathS);
|
||||
|
||||
if (!lockFlags.allowUnlocked
|
||||
&& !input.ref->input.isLocked()
|
||||
&& !input.ref->input.isRelative())
|
||||
throw Error("cannot update unlocked flake input '%s' in pure mode", inputPathS);
|
||||
throw Error("cannot update unlocked flake input '%s' in pure mode", inputAttrPathS);
|
||||
|
||||
/* Note: in case of an --override-input, we use
|
||||
the *original* ref (input2.ref) for the
|
||||
|
@ -665,7 +665,7 @@ LockedFlake lockFlake(
|
|||
nuked the next time we update the lock
|
||||
file. That is, overrides are sticky unless you
|
||||
use --no-write-lock-file. */
|
||||
auto ref = (input2.ref && explicitCliOverrides.contains(inputPath)) ? *input2.ref : *input.ref;
|
||||
auto ref = (input2.ref && explicitCliOverrides.contains(inputAttrPath)) ? *input2.ref : *input.ref;
|
||||
|
||||
if (input.isFlake) {
|
||||
auto inputFlake = getInputFlake();
|
||||
|
@ -691,11 +691,11 @@ LockedFlake lockFlake(
|
|||
own lock file. */
|
||||
nodePaths.emplace(childNode, inputFlake.path.parent());
|
||||
computeLocks(
|
||||
inputFlake.inputs, childNode, inputPath,
|
||||
inputFlake.inputs, childNode, inputAttrPath,
|
||||
oldLock
|
||||
? std::dynamic_pointer_cast<const Node>(oldLock)
|
||||
: readLockFile(state.fetchSettings, inputFlake.lockFilePath()).root.get_ptr(),
|
||||
oldLock ? followsPrefix : inputPath,
|
||||
oldLock ? followsPrefix : inputAttrPath,
|
||||
inputFlake.path,
|
||||
false);
|
||||
}
|
||||
|
@ -722,7 +722,7 @@ LockedFlake lockFlake(
|
|||
}
|
||||
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while updating the flake input '%s'", inputPathS);
|
||||
e.addTrace({}, "while updating the flake input '%s'", inputAttrPathS);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
@ -742,11 +742,11 @@ LockedFlake lockFlake(
|
|||
for (auto & i : lockFlags.inputOverrides)
|
||||
if (!overridesUsed.count(i.first))
|
||||
warn("the flag '--override-input %s %s' does not match any input",
|
||||
printInputPath(i.first), i.second);
|
||||
printInputAttrPath(i.first), i.second);
|
||||
|
||||
for (auto & i : lockFlags.inputUpdates)
|
||||
if (!updatesUsed.count(i))
|
||||
warn("'%s' does not match any input of this flake", printInputPath(i));
|
||||
warn("'%s' does not match any input of this flake", printInputAttrPath(i));
|
||||
|
||||
/* Check 'follows' inputs. */
|
||||
newLockFile.check();
|
||||
|
|
|
@ -57,7 +57,7 @@ struct FlakeInput
|
|||
* false = (fetched) static source path
|
||||
*/
|
||||
bool isFlake = true;
|
||||
std::optional<InputPath> follows;
|
||||
std::optional<InputAttrPath> follows;
|
||||
FlakeInputs overrides;
|
||||
};
|
||||
|
||||
|
@ -201,13 +201,13 @@ struct LockFlags
|
|||
/**
|
||||
* Flake inputs to be overridden.
|
||||
*/
|
||||
std::map<InputPath, FlakeRef> inputOverrides;
|
||||
std::map<InputAttrPath, FlakeRef> inputOverrides;
|
||||
|
||||
/**
|
||||
* Flake inputs to be updated. This means that any existing lock
|
||||
* for those inputs will be ignored.
|
||||
*/
|
||||
std::set<InputPath> inputUpdates;
|
||||
std::set<InputAttrPath> inputUpdates;
|
||||
};
|
||||
|
||||
LockedFlake lockFlake(
|
||||
|
|
|
@ -43,7 +43,7 @@ LockedNode::LockedNode(
|
|||
: lockedRef(getFlakeRef(fetchSettings, json, "locked", "info")) // FIXME: remove "info"
|
||||
, originalRef(getFlakeRef(fetchSettings, json, "original", nullptr))
|
||||
, isFlake(json.find("flake") != json.end() ? (bool) json["flake"] : true)
|
||||
, parentPath(json.find("parent") != json.end() ? (std::optional<InputPath>) json["parent"] : std::nullopt)
|
||||
, parentInputAttrPath(json.find("parent") != json.end() ? (std::optional<InputAttrPath>) json["parent"] : std::nullopt)
|
||||
{
|
||||
if (!lockedRef.input.isConsideredLocked(fetchSettings) && !lockedRef.input.isRelative())
|
||||
throw Error("Lock file contains unlocked input '%s'. Use '--allow-dirty-locks' to accept this lock file.",
|
||||
|
@ -59,7 +59,7 @@ StorePath LockedNode::computeStorePath(Store & store) const
|
|||
return lockedRef.input.computeStorePath(store);
|
||||
}
|
||||
|
||||
static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputPath & path, std::vector<InputPath> & visited)
|
||||
static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputAttrPath & path, std::vector<InputAttrPath> & visited)
|
||||
{
|
||||
auto pos = root;
|
||||
|
||||
|
@ -67,8 +67,8 @@ static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputPath & pa
|
|||
|
||||
if (found != visited.end()) {
|
||||
std::vector<std::string> cycle;
|
||||
std::transform(found, visited.cend(), std::back_inserter(cycle), printInputPath);
|
||||
cycle.push_back(printInputPath(path));
|
||||
std::transform(found, visited.cend(), std::back_inserter(cycle), printInputAttrPath);
|
||||
cycle.push_back(printInputAttrPath(path));
|
||||
throw Error("follow cycle detected: [%s]", concatStringsSep(" -> ", cycle));
|
||||
}
|
||||
visited.push_back(path);
|
||||
|
@ -90,9 +90,9 @@ static std::shared_ptr<Node> doFind(const ref<Node> & root, const InputPath & pa
|
|||
return pos;
|
||||
}
|
||||
|
||||
std::shared_ptr<Node> LockFile::findInput(const InputPath & path)
|
||||
std::shared_ptr<Node> LockFile::findInput(const InputAttrPath & path)
|
||||
{
|
||||
std::vector<InputPath> visited;
|
||||
std::vector<InputAttrPath> visited;
|
||||
return doFind(root, path, visited);
|
||||
}
|
||||
|
||||
|
@ -115,7 +115,7 @@ LockFile::LockFile(
|
|||
if (jsonNode.find("inputs") == jsonNode.end()) return;
|
||||
for (auto & i : jsonNode["inputs"].items()) {
|
||||
if (i.value().is_array()) { // FIXME: remove, obsolete
|
||||
InputPath path;
|
||||
InputAttrPath path;
|
||||
for (auto & j : i.value())
|
||||
path.push_back(j);
|
||||
node.inputs.insert_or_assign(i.key(), path);
|
||||
|
@ -203,8 +203,8 @@ std::pair<nlohmann::json, LockFile::KeyMap> LockFile::toJSON() const
|
|||
n["locked"].erase("__final");
|
||||
if (!lockedNode->isFlake)
|
||||
n["flake"] = false;
|
||||
if (lockedNode->parentPath)
|
||||
n["parent"] = *lockedNode->parentPath;
|
||||
if (lockedNode->parentInputAttrPath)
|
||||
n["parent"] = *lockedNode->parentInputAttrPath;
|
||||
}
|
||||
|
||||
nodes[key] = std::move(n);
|
||||
|
@ -267,36 +267,36 @@ bool LockFile::operator ==(const LockFile & other) const
|
|||
return toJSON().first == other.toJSON().first;
|
||||
}
|
||||
|
||||
InputPath parseInputPath(std::string_view s)
|
||||
InputAttrPath parseInputAttrPath(std::string_view s)
|
||||
{
|
||||
InputPath path;
|
||||
InputAttrPath path;
|
||||
|
||||
for (auto & elem : tokenizeString<std::vector<std::string>>(s, "/")) {
|
||||
if (!std::regex_match(elem, flakeIdRegex))
|
||||
throw UsageError("invalid flake input path element '%s'", elem);
|
||||
throw UsageError("invalid flake input attribute path element '%s'", elem);
|
||||
path.push_back(elem);
|
||||
}
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
std::map<InputPath, Node::Edge> LockFile::getAllInputs() const
|
||||
std::map<InputAttrPath, Node::Edge> LockFile::getAllInputs() const
|
||||
{
|
||||
std::set<ref<Node>> done;
|
||||
std::map<InputPath, Node::Edge> res;
|
||||
std::map<InputAttrPath, Node::Edge> res;
|
||||
|
||||
std::function<void(const InputPath & prefix, ref<Node> node)> recurse;
|
||||
std::function<void(const InputAttrPath & prefix, ref<Node> node)> recurse;
|
||||
|
||||
recurse = [&](const InputPath & prefix, ref<Node> node)
|
||||
recurse = [&](const InputAttrPath & prefix, ref<Node> node)
|
||||
{
|
||||
if (!done.insert(node).second) return;
|
||||
|
||||
for (auto &[id, input] : node->inputs) {
|
||||
auto inputPath(prefix);
|
||||
inputPath.push_back(id);
|
||||
res.emplace(inputPath, input);
|
||||
auto inputAttrPath(prefix);
|
||||
inputAttrPath.push_back(id);
|
||||
res.emplace(inputAttrPath, input);
|
||||
if (auto child = std::get_if<0>(&input))
|
||||
recurse(inputPath, *child);
|
||||
recurse(inputAttrPath, *child);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -320,7 +320,7 @@ std::ostream & operator <<(std::ostream & stream, const Node::Edge & edge)
|
|||
if (auto node = std::get_if<0>(&edge))
|
||||
stream << describe((*node)->lockedRef);
|
||||
else if (auto follows = std::get_if<1>(&edge))
|
||||
stream << fmt("follows '%s'", printInputPath(*follows));
|
||||
stream << fmt("follows '%s'", printInputAttrPath(*follows));
|
||||
return stream;
|
||||
}
|
||||
|
||||
|
@ -347,15 +347,15 @@ std::string LockFile::diff(const LockFile & oldLocks, const LockFile & newLocks)
|
|||
while (i != oldFlat.end() || j != newFlat.end()) {
|
||||
if (j != newFlat.end() && (i == oldFlat.end() || i->first > j->first)) {
|
||||
res += fmt("• " ANSI_GREEN "Added input '%s':" ANSI_NORMAL "\n %s\n",
|
||||
printInputPath(j->first), j->second);
|
||||
printInputAttrPath(j->first), j->second);
|
||||
++j;
|
||||
} else if (i != oldFlat.end() && (j == newFlat.end() || i->first < j->first)) {
|
||||
res += fmt("• " ANSI_RED "Removed input '%s'" ANSI_NORMAL "\n", printInputPath(i->first));
|
||||
res += fmt("• " ANSI_RED "Removed input '%s'" ANSI_NORMAL "\n", printInputAttrPath(i->first));
|
||||
++i;
|
||||
} else {
|
||||
if (!equals(i->second, j->second)) {
|
||||
res += fmt("• " ANSI_BOLD "Updated input '%s':" ANSI_NORMAL "\n %s\n → %s\n",
|
||||
printInputPath(i->first),
|
||||
printInputAttrPath(i->first),
|
||||
i->second,
|
||||
j->second);
|
||||
}
|
||||
|
@ -371,19 +371,19 @@ void LockFile::check()
|
|||
{
|
||||
auto inputs = getAllInputs();
|
||||
|
||||
for (auto & [inputPath, input] : inputs) {
|
||||
for (auto & [inputAttrPath, input] : inputs) {
|
||||
if (auto follows = std::get_if<1>(&input)) {
|
||||
if (!follows->empty() && !findInput(*follows))
|
||||
throw Error("input '%s' follows a non-existent input '%s'",
|
||||
printInputPath(inputPath),
|
||||
printInputPath(*follows));
|
||||
printInputAttrPath(inputAttrPath),
|
||||
printInputAttrPath(*follows));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void check();
|
||||
|
||||
std::string printInputPath(const InputPath & path)
|
||||
std::string printInputAttrPath(const InputAttrPath & path)
|
||||
{
|
||||
return concatStringsSep("/", path);
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ class StorePath;
|
|||
|
||||
namespace nix::flake {
|
||||
|
||||
typedef std::vector<FlakeId> InputPath;
|
||||
typedef std::vector<FlakeId> InputAttrPath;
|
||||
|
||||
struct LockedNode;
|
||||
|
||||
|
@ -23,7 +23,7 @@ struct LockedNode;
|
|||
*/
|
||||
struct Node : std::enable_shared_from_this<Node>
|
||||
{
|
||||
typedef std::variant<ref<LockedNode>, InputPath> Edge;
|
||||
typedef std::variant<ref<LockedNode>, InputAttrPath> Edge;
|
||||
|
||||
std::map<FlakeId, Edge> inputs;
|
||||
|
||||
|
@ -40,17 +40,17 @@ struct LockedNode : Node
|
|||
|
||||
/* The node relative to which relative source paths
|
||||
(e.g. 'path:../foo') are interpreted. */
|
||||
std::optional<InputPath> parentPath;
|
||||
std::optional<InputAttrPath> parentInputAttrPath;
|
||||
|
||||
LockedNode(
|
||||
const FlakeRef & lockedRef,
|
||||
const FlakeRef & originalRef,
|
||||
bool isFlake = true,
|
||||
std::optional<InputPath> parentPath = {})
|
||||
: lockedRef(lockedRef)
|
||||
, originalRef(originalRef)
|
||||
std::optional<InputAttrPath> parentInputAttrPath = {})
|
||||
: lockedRef(std::move(lockedRef))
|
||||
, originalRef(std::move(originalRef))
|
||||
, isFlake(isFlake)
|
||||
, parentPath(parentPath)
|
||||
, parentInputAttrPath(std::move(parentInputAttrPath))
|
||||
{ }
|
||||
|
||||
LockedNode(
|
||||
|
@ -83,9 +83,9 @@ struct LockFile
|
|||
|
||||
bool operator ==(const LockFile & other) const;
|
||||
|
||||
std::shared_ptr<Node> findInput(const InputPath & path);
|
||||
std::shared_ptr<Node> findInput(const InputAttrPath & path);
|
||||
|
||||
std::map<InputPath, Node::Edge> getAllInputs() const;
|
||||
std::map<InputAttrPath, Node::Edge> getAllInputs() const;
|
||||
|
||||
static std::string diff(const LockFile & oldLocks, const LockFile & newLocks);
|
||||
|
||||
|
@ -97,8 +97,8 @@ struct LockFile
|
|||
|
||||
std::ostream & operator <<(std::ostream & stream, const LockFile & lockFile);
|
||||
|
||||
InputPath parseInputPath(std::string_view s);
|
||||
InputAttrPath parseInputAttrPath(std::string_view s);
|
||||
|
||||
std::string printInputPath(const InputPath & path);
|
||||
std::string printInputAttrPath(const InputAttrPath & path);
|
||||
|
||||
}
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util
|
||||
, nix-store
|
||||
, nix-fetchers
|
||||
, nix-expr
|
||||
, nlohmann_json
|
||||
nix-util,
|
||||
nix-store,
|
||||
nix-fetchers,
|
||||
nix-expr,
|
||||
nlohmann_json,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util-c
|
||||
, nix-store
|
||||
, nix-store-c
|
||||
, nix-main
|
||||
nix-util-c,
|
||||
nix-store,
|
||||
nix-store-c,
|
||||
nix-main,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, openssl
|
||||
openssl,
|
||||
|
||||
, nix-util
|
||||
, nix-store
|
||||
nix-util,
|
||||
nix-store,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util-c
|
||||
, nix-store
|
||||
nix-util-c,
|
||||
nix-store,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util-test-support
|
||||
, nix-store
|
||||
, nix-store-c
|
||||
nix-util-test-support,
|
||||
nix-store,
|
||||
nix-store-c,
|
||||
|
||||
, rapidcheck
|
||||
rapidcheck,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,21 +1,22 @@
|
|||
{ lib
|
||||
, buildPackages
|
||||
, stdenv
|
||||
, mkMesonExecutable
|
||||
{
|
||||
lib,
|
||||
buildPackages,
|
||||
stdenv,
|
||||
mkMesonExecutable,
|
||||
|
||||
, nix-store
|
||||
, nix-store-c
|
||||
, nix-store-test-support
|
||||
, sqlite
|
||||
nix-store,
|
||||
nix-store-c,
|
||||
nix-store-test-support,
|
||||
sqlite,
|
||||
|
||||
, rapidcheck
|
||||
, gtest
|
||||
, runCommand
|
||||
rapidcheck,
|
||||
gtest,
|
||||
runCommand,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
, filesetToSource
|
||||
version,
|
||||
filesetToSource,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -64,26 +65,33 @@ mkMesonExecutable (finalAttrs: {
|
|||
|
||||
passthru = {
|
||||
tests = {
|
||||
run = let
|
||||
# Some data is shared with the functional tests: they create it,
|
||||
# we consume it.
|
||||
data = filesetToSource {
|
||||
root = ../..;
|
||||
fileset = lib.fileset.unions [
|
||||
./data
|
||||
../../tests/functional/derivation
|
||||
];
|
||||
};
|
||||
in runCommand "${finalAttrs.pname}-run" {
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
} (lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
'' + ''
|
||||
export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
'');
|
||||
run =
|
||||
let
|
||||
# Some data is shared with the functional tests: they create it,
|
||||
# we consume it.
|
||||
data = filesetToSource {
|
||||
root = ../..;
|
||||
fileset = lib.fileset.unions [
|
||||
./data
|
||||
../../tests/functional/derivation
|
||||
];
|
||||
};
|
||||
in
|
||||
runCommand "${finalAttrs.pname}-run"
|
||||
{
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
}
|
||||
(
|
||||
lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
''
|
||||
+ ''
|
||||
export _NIX_TEST_UNIT_DATA=${data + "/src/libstore-tests/data"}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
''
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -1,25 +1,26 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
mkMesonLibrary,
|
||||
|
||||
, unixtools
|
||||
, darwin
|
||||
unixtools,
|
||||
darwin,
|
||||
|
||||
, nix-util
|
||||
, boost
|
||||
, curl
|
||||
, aws-sdk-cpp
|
||||
, libseccomp
|
||||
, nlohmann_json
|
||||
, sqlite
|
||||
nix-util,
|
||||
boost,
|
||||
curl,
|
||||
aws-sdk-cpp,
|
||||
libseccomp,
|
||||
nlohmann_json,
|
||||
sqlite,
|
||||
|
||||
, busybox-sandbox-shell ? null
|
||||
busybox-sandbox-shell ? null,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
|
||||
, embeddedSandboxShell ? stdenv.hostPlatform.isStatic
|
||||
embeddedSandboxShell ? stdenv.hostPlatform.isStatic,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -48,19 +49,20 @@ mkMesonLibrary (finalAttrs: {
|
|||
(fileset.fileFilter (file: file.hasExt "sql") ./.)
|
||||
];
|
||||
|
||||
nativeBuildInputs =
|
||||
lib.optional embeddedSandboxShell unixtools.hexdump;
|
||||
nativeBuildInputs = lib.optional embeddedSandboxShell unixtools.hexdump;
|
||||
|
||||
buildInputs = [
|
||||
boost
|
||||
curl
|
||||
sqlite
|
||||
] ++ lib.optional stdenv.hostPlatform.isLinux libseccomp
|
||||
buildInputs =
|
||||
[
|
||||
boost
|
||||
curl
|
||||
sqlite
|
||||
]
|
||||
++ lib.optional stdenv.hostPlatform.isLinux libseccomp
|
||||
# There have been issues building these dependencies
|
||||
++ lib.optional stdenv.hostPlatform.isDarwin darwin.apple_sdk.libs.sandbox
|
||||
++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin))
|
||||
aws-sdk-cpp
|
||||
;
|
||||
++ lib.optional (
|
||||
stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin)
|
||||
) aws-sdk-cpp;
|
||||
|
||||
propagatedBuildInputs = [
|
||||
nix-util
|
||||
|
@ -75,12 +77,14 @@ mkMesonLibrary (finalAttrs: {
|
|||
echo ${version} > ../../.version
|
||||
'';
|
||||
|
||||
mesonFlags = [
|
||||
(lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux)
|
||||
(lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell)
|
||||
] ++ lib.optionals stdenv.hostPlatform.isLinux [
|
||||
(lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox")
|
||||
];
|
||||
mesonFlags =
|
||||
[
|
||||
(lib.mesonEnable "seccomp-sandboxing" stdenv.hostPlatform.isLinux)
|
||||
(lib.mesonBool "embedded-sandbox-shell" embeddedSandboxShell)
|
||||
]
|
||||
++ lib.optionals stdenv.hostPlatform.isLinux [
|
||||
(lib.mesonOption "sandbox-shell" "${busybox-sandbox-shell}/bin/busybox")
|
||||
];
|
||||
|
||||
env = {
|
||||
# Needed for Meson to find Boost.
|
||||
|
|
|
@ -539,11 +539,21 @@ void RemoteStore::addMultipleToStore(
|
|||
RepairFlag repair,
|
||||
CheckSigsFlag checkSigs)
|
||||
{
|
||||
// `addMultipleToStore` is single threaded
|
||||
size_t bytesExpected = 0;
|
||||
for (auto & [pathInfo, _] : pathsToCopy) {
|
||||
bytesExpected += pathInfo.narSize;
|
||||
}
|
||||
act.setExpected(actCopyPath, bytesExpected);
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
sink << pathsToCopy.size();
|
||||
size_t nrTotal = pathsToCopy.size();
|
||||
sink << nrTotal;
|
||||
// Reverse, so we can release memory at the original start
|
||||
std::reverse(pathsToCopy.begin(), pathsToCopy.end());
|
||||
while (!pathsToCopy.empty()) {
|
||||
act.progress(nrTotal - pathsToCopy.size(), nrTotal, size_t(1), size_t(0));
|
||||
|
||||
auto & [pathInfo, pathSource] = pathsToCopy.back();
|
||||
WorkerProto::Serialise<ValidPathInfo>::write(*this,
|
||||
WorkerProto::WriteConn {
|
||||
|
|
|
@ -242,8 +242,8 @@ void Store::addMultipleToStore(
|
|||
storePathsToAdd.insert(thingToAdd.first.path);
|
||||
}
|
||||
|
||||
auto showProgress = [&]() {
|
||||
act.progress(nrDone, pathsToCopy.size(), nrRunning, nrFailed);
|
||||
auto showProgress = [&, nrTotal = pathsToCopy.size()]() {
|
||||
act.progress(nrDone, nrTotal, nrRunning, nrFailed);
|
||||
};
|
||||
|
||||
processGraph<StorePath>(
|
||||
|
@ -1104,9 +1104,6 @@ std::map<StorePath, StorePath> copyPaths(
|
|||
return storePathForDst;
|
||||
};
|
||||
|
||||
// total is accessed by each copy, which are each handled in separate threads
|
||||
std::atomic<uint64_t> total = 0;
|
||||
|
||||
for (auto & missingPath : sortedMissing) {
|
||||
auto info = srcStore.queryPathInfo(missingPath);
|
||||
|
||||
|
@ -1116,9 +1113,10 @@ std::map<StorePath, StorePath> copyPaths(
|
|||
ValidPathInfo infoForDst = *info;
|
||||
infoForDst.path = storePathForDst;
|
||||
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
auto source = sinkToSource([&, narSize = info->narSize](Sink & sink) {
|
||||
// We can reasonably assume that the copy will happen whenever we
|
||||
// read the path, so log something about that at that point
|
||||
uint64_t total = 0;
|
||||
auto srcUri = srcStore.getUri();
|
||||
auto dstUri = dstStore.getUri();
|
||||
auto storePathS = srcStore.printStorePath(missingPath);
|
||||
|
@ -1129,13 +1127,13 @@ std::map<StorePath, StorePath> copyPaths(
|
|||
|
||||
LambdaSink progressSink([&](std::string_view data) {
|
||||
total += data.size();
|
||||
act.progress(total, info->narSize);
|
||||
act.progress(total, narSize);
|
||||
});
|
||||
TeeSink tee { sink, progressSink };
|
||||
|
||||
srcStore.narFromPath(missingPath, tee);
|
||||
});
|
||||
pathsToCopy.push_back(std::pair{infoForDst, std::move(source)});
|
||||
pathsToCopy.emplace_back(std::move(infoForDst), std::move(source));
|
||||
}
|
||||
|
||||
dstStore.addMultipleToStore(std::move(pathsToCopy), act, repair, checkSigs);
|
||||
|
|
|
@ -2565,7 +2565,7 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
case FileIngestionMethod::Git: {
|
||||
return git::dumpHash(
|
||||
outputHash.hashAlgo,
|
||||
{getFSSourceAccessor(), CanonPath(tmpDir + "/tmp")}).hash;
|
||||
{getFSSourceAccessor(), CanonPath(actualPath)}).hash;
|
||||
}
|
||||
}
|
||||
assert(false);
|
||||
|
@ -2657,10 +2657,14 @@ SingleDrvOutputs LocalDerivationGoal::registerOutputs()
|
|||
wanted.to_string(HashFormat::SRI, true),
|
||||
got.to_string(HashFormat::SRI, true)));
|
||||
}
|
||||
if (!newInfo0.references.empty())
|
||||
if (!newInfo0.references.empty()) {
|
||||
auto numViolations = newInfo.references.size();
|
||||
delayedException = std::make_exception_ptr(
|
||||
BuildError("illegal path references in fixed-output derivation '%s'",
|
||||
worker.store.printStorePath(drvPath)));
|
||||
BuildError("fixed-output derivations must not reference store paths: '%s' references %d distinct paths, e.g. '%s'",
|
||||
worker.store.printStorePath(drvPath),
|
||||
numViolations,
|
||||
worker.store.printStorePath(*newInfo.references.begin())));
|
||||
}
|
||||
|
||||
return newInfo0;
|
||||
},
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util
|
||||
nix-util,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
{ lib
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
mkMesonLibrary,
|
||||
|
||||
, nix-util
|
||||
, nix-util-c
|
||||
nix-util,
|
||||
nix-util-c,
|
||||
|
||||
, rapidcheck
|
||||
rapidcheck,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
|
|
@ -1,19 +1,20 @@
|
|||
{ lib
|
||||
, buildPackages
|
||||
, stdenv
|
||||
, mkMesonExecutable
|
||||
{
|
||||
lib,
|
||||
buildPackages,
|
||||
stdenv,
|
||||
mkMesonExecutable,
|
||||
|
||||
, nix-util
|
||||
, nix-util-c
|
||||
, nix-util-test-support
|
||||
nix-util,
|
||||
nix-util-c,
|
||||
nix-util-test-support,
|
||||
|
||||
, rapidcheck
|
||||
, gtest
|
||||
, runCommand
|
||||
rapidcheck,
|
||||
gtest,
|
||||
runCommand,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -57,16 +58,22 @@ mkMesonExecutable (finalAttrs: {
|
|||
|
||||
passthru = {
|
||||
tests = {
|
||||
run = runCommand "${finalAttrs.pname}-run" {
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
} (lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
'' + ''
|
||||
export _NIX_TEST_UNIT_DATA=${./data}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
'');
|
||||
run =
|
||||
runCommand "${finalAttrs.pname}-run"
|
||||
{
|
||||
meta.broken = !stdenv.hostPlatform.emulatorAvailable buildPackages;
|
||||
}
|
||||
(
|
||||
lib.optionalString stdenv.hostPlatform.isWindows ''
|
||||
export HOME="$PWD/home-dir"
|
||||
mkdir -p "$HOME"
|
||||
''
|
||||
+ ''
|
||||
export _NIX_TEST_UNIT_DATA=${./data}
|
||||
${stdenv.hostPlatform.emulator buildPackages} ${lib.getExe finalAttrs.finalPackage}
|
||||
touch $out
|
||||
''
|
||||
);
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -1,18 +1,19 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, mkMesonLibrary
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
mkMesonLibrary,
|
||||
|
||||
, boost
|
||||
, brotli
|
||||
, libarchive
|
||||
, libcpuid
|
||||
, libsodium
|
||||
, nlohmann_json
|
||||
, openssl
|
||||
boost,
|
||||
brotli,
|
||||
libarchive,
|
||||
libcpuid,
|
||||
libsodium,
|
||||
nlohmann_json,
|
||||
openssl,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -43,8 +44,7 @@ mkMesonLibrary (finalAttrs: {
|
|||
brotli
|
||||
libsodium
|
||||
openssl
|
||||
] ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid
|
||||
;
|
||||
] ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
|
||||
|
||||
propagatedBuildInputs = [
|
||||
boost
|
||||
|
|
|
@ -274,6 +274,17 @@ std::optional<typename T::value_type> pop(T & c)
|
|||
}
|
||||
|
||||
|
||||
/**
|
||||
* Append items to a container. TODO: remove this once we can use
|
||||
* C++23's `append_range()`.
|
||||
*/
|
||||
template<class C, typename T>
|
||||
void append(C & c, std::initializer_list<T> l)
|
||||
{
|
||||
c.insert(c.end(), l.begin(), l.end());
|
||||
}
|
||||
|
||||
|
||||
template<typename T>
|
||||
class Callback;
|
||||
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
{ name, channelName, src }:
|
||||
{
|
||||
name,
|
||||
channelName,
|
||||
src,
|
||||
}:
|
||||
|
||||
derivation {
|
||||
builder = "builtin:unpack-channel";
|
||||
|
|
|
@ -8,13 +8,15 @@ derivation {
|
|||
inherit manifest;
|
||||
|
||||
# !!! grmbl, need structured data for passing this in a clean way.
|
||||
derivations =
|
||||
map (d:
|
||||
[ (d.meta.active or "true")
|
||||
(d.meta.priority or 5)
|
||||
(builtins.length d.outputs)
|
||||
] ++ map (output: builtins.getAttr output d) d.outputs)
|
||||
derivations;
|
||||
derivations = map (
|
||||
d:
|
||||
[
|
||||
(d.meta.active or "true")
|
||||
(d.meta.priority or 5)
|
||||
(builtins.length d.outputs)
|
||||
]
|
||||
++ map (output: builtins.getAttr output d) d.outputs
|
||||
) derivations;
|
||||
|
||||
# Building user environments remotely just causes huge amounts of
|
||||
# network traffic, so don't do that.
|
||||
|
|
|
@ -95,20 +95,20 @@ public:
|
|||
.optional=true,
|
||||
.handler={[&](std::vector<std::string> inputsToUpdate){
|
||||
for (const auto & inputToUpdate : inputsToUpdate) {
|
||||
InputPath inputPath;
|
||||
InputAttrPath inputAttrPath;
|
||||
try {
|
||||
inputPath = flake::parseInputPath(inputToUpdate);
|
||||
inputAttrPath = flake::parseInputAttrPath(inputToUpdate);
|
||||
} catch (Error & e) {
|
||||
warn("Invalid flake input '%s'. To update a specific flake, use 'nix flake update --flake %s' instead.", inputToUpdate, inputToUpdate);
|
||||
throw e;
|
||||
}
|
||||
if (lockFlags.inputUpdates.contains(inputPath))
|
||||
warn("Input '%s' was specified multiple times. You may have done this by accident.");
|
||||
lockFlags.inputUpdates.insert(inputPath);
|
||||
if (lockFlags.inputUpdates.contains(inputAttrPath))
|
||||
warn("Input '%s' was specified multiple times. You may have done this by accident.", printInputAttrPath(inputAttrPath));
|
||||
lockFlags.inputUpdates.insert(inputAttrPath);
|
||||
}
|
||||
}},
|
||||
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
|
||||
completeFlakeInputPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
|
||||
completeFlakeInputAttrPath(completions, getEvalState(), getFlakeRefsForCompletion(), prefix);
|
||||
}}
|
||||
});
|
||||
|
||||
|
@ -304,7 +304,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON
|
|||
} else if (auto follows = std::get_if<1>(&input.second)) {
|
||||
logger->cout("%s" ANSI_BOLD "%s" ANSI_NORMAL " follows input '%s'",
|
||||
prefix + (last ? treeLast : treeConn), input.first,
|
||||
printInputPath(*follows));
|
||||
printInputAttrPath(*follows));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
{ lib
|
||||
, mkMesonExecutable
|
||||
{
|
||||
lib,
|
||||
mkMesonExecutable,
|
||||
|
||||
, nix-store
|
||||
, nix-expr
|
||||
, nix-main
|
||||
, nix-cmd
|
||||
nix-store,
|
||||
nix-expr,
|
||||
nix-main,
|
||||
nix-cmd,
|
||||
|
||||
# Configuration Options
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
version,
|
||||
}:
|
||||
|
||||
let
|
||||
|
@ -20,64 +21,67 @@ mkMesonExecutable (finalAttrs: {
|
|||
inherit version;
|
||||
|
||||
workDir = ./.;
|
||||
fileset = fileset.unions ([
|
||||
../../nix-meson-build-support
|
||||
./nix-meson-build-support
|
||||
../../.version
|
||||
./.version
|
||||
./meson.build
|
||||
./meson.options
|
||||
|
||||
# Symbolic links to other dirs
|
||||
## exes
|
||||
./build-remote
|
||||
./doc
|
||||
./nix-build
|
||||
./nix-channel
|
||||
./nix-collect-garbage
|
||||
./nix-copy-closure
|
||||
./nix-env
|
||||
./nix-instantiate
|
||||
./nix-store
|
||||
## dirs
|
||||
./scripts
|
||||
../../scripts
|
||||
./misc
|
||||
../../misc
|
||||
|
||||
# Doc nix files for --help
|
||||
../../doc/manual/generate-manpage.nix
|
||||
../../doc/manual/utils.nix
|
||||
../../doc/manual/generate-settings.nix
|
||||
../../doc/manual/generate-store-info.nix
|
||||
|
||||
# Other files to be included as string literals
|
||||
../nix-channel/unpack-channel.nix
|
||||
../nix-env/buildenv.nix
|
||||
./get-env.sh
|
||||
./help-stores.md
|
||||
../../doc/manual/source/store/types/index.md.in
|
||||
./profiles.md
|
||||
../../doc/manual/source/command-ref/files/profiles.md
|
||||
|
||||
# Files
|
||||
] ++ lib.concatMap
|
||||
(dir: [
|
||||
(fileset.fileFilter (file: file.hasExt "cc") dir)
|
||||
(fileset.fileFilter (file: file.hasExt "hh") dir)
|
||||
(fileset.fileFilter (file: file.hasExt "md") dir)
|
||||
])
|
||||
fileset = fileset.unions (
|
||||
[
|
||||
./.
|
||||
../build-remote
|
||||
../nix-build
|
||||
../nix-channel
|
||||
../nix-collect-garbage
|
||||
../nix-copy-closure
|
||||
../nix-env
|
||||
../nix-instantiate
|
||||
../nix-store
|
||||
../../nix-meson-build-support
|
||||
./nix-meson-build-support
|
||||
../../.version
|
||||
./.version
|
||||
./meson.build
|
||||
./meson.options
|
||||
|
||||
# Symbolic links to other dirs
|
||||
## exes
|
||||
./build-remote
|
||||
./doc
|
||||
./nix-build
|
||||
./nix-channel
|
||||
./nix-collect-garbage
|
||||
./nix-copy-closure
|
||||
./nix-env
|
||||
./nix-instantiate
|
||||
./nix-store
|
||||
## dirs
|
||||
./scripts
|
||||
../../scripts
|
||||
./misc
|
||||
../../misc
|
||||
|
||||
# Doc nix files for --help
|
||||
../../doc/manual/generate-manpage.nix
|
||||
../../doc/manual/utils.nix
|
||||
../../doc/manual/generate-settings.nix
|
||||
../../doc/manual/generate-store-info.nix
|
||||
|
||||
# Other files to be included as string literals
|
||||
../nix-channel/unpack-channel.nix
|
||||
../nix-env/buildenv.nix
|
||||
./get-env.sh
|
||||
./help-stores.md
|
||||
../../doc/manual/source/store/types/index.md.in
|
||||
./profiles.md
|
||||
../../doc/manual/source/command-ref/files/profiles.md
|
||||
|
||||
# Files
|
||||
]
|
||||
++
|
||||
lib.concatMap
|
||||
(dir: [
|
||||
(fileset.fileFilter (file: file.hasExt "cc") dir)
|
||||
(fileset.fileFilter (file: file.hasExt "hh") dir)
|
||||
(fileset.fileFilter (file: file.hasExt "md") dir)
|
||||
])
|
||||
[
|
||||
./.
|
||||
../build-remote
|
||||
../nix-build
|
||||
../nix-channel
|
||||
../nix-collect-garbage
|
||||
../nix-copy-closure
|
||||
../nix-env
|
||||
../nix-instantiate
|
||||
../nix-store
|
||||
]
|
||||
);
|
||||
|
||||
buildInputs = [
|
||||
|
|
|
@ -1,76 +1,82 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, mkMesonDerivation
|
||||
, pkg-config
|
||||
, perl
|
||||
, perlPackages
|
||||
, nix-store
|
||||
, version
|
||||
, curl
|
||||
, bzip2
|
||||
, libsodium
|
||||
{
|
||||
lib,
|
||||
stdenv,
|
||||
mkMesonDerivation,
|
||||
pkg-config,
|
||||
perl,
|
||||
perlPackages,
|
||||
nix-store,
|
||||
version,
|
||||
curl,
|
||||
bzip2,
|
||||
libsodium,
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (lib) fileset;
|
||||
in
|
||||
|
||||
perl.pkgs.toPerlModule (mkMesonDerivation (finalAttrs: {
|
||||
pname = "nix-perl";
|
||||
inherit version;
|
||||
perl.pkgs.toPerlModule (
|
||||
mkMesonDerivation (finalAttrs: {
|
||||
pname = "nix-perl";
|
||||
inherit version;
|
||||
|
||||
workDir = ./.;
|
||||
fileset = fileset.unions ([
|
||||
./.version
|
||||
../../.version
|
||||
./MANIFEST
|
||||
./lib
|
||||
./meson.build
|
||||
./meson.options
|
||||
] ++ lib.optionals finalAttrs.doCheck [
|
||||
./.yath.rc.in
|
||||
./t
|
||||
]);
|
||||
workDir = ./.;
|
||||
fileset = fileset.unions (
|
||||
[
|
||||
./.version
|
||||
../../.version
|
||||
./MANIFEST
|
||||
./lib
|
||||
./meson.build
|
||||
./meson.options
|
||||
]
|
||||
++ lib.optionals finalAttrs.doCheck [
|
||||
./.yath.rc.in
|
||||
./t
|
||||
]
|
||||
);
|
||||
|
||||
nativeBuildInputs = [
|
||||
pkg-config
|
||||
perl
|
||||
curl
|
||||
];
|
||||
nativeBuildInputs = [
|
||||
pkg-config
|
||||
perl
|
||||
curl
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
nix-store
|
||||
] ++ finalAttrs.passthru.externalBuildInputs;
|
||||
buildInputs = [
|
||||
nix-store
|
||||
] ++ finalAttrs.passthru.externalBuildInputs;
|
||||
|
||||
# Hack for sake of the dev shell
|
||||
passthru.externalBuildInputs = [
|
||||
bzip2
|
||||
libsodium
|
||||
];
|
||||
# Hack for sake of the dev shell
|
||||
passthru.externalBuildInputs = [
|
||||
bzip2
|
||||
libsodium
|
||||
];
|
||||
|
||||
# `perlPackages.Test2Harness` is marked broken for Darwin
|
||||
doCheck = !stdenv.isDarwin;
|
||||
# `perlPackages.Test2Harness` is marked broken for Darwin
|
||||
doCheck = !stdenv.isDarwin;
|
||||
|
||||
nativeCheckInputs = [
|
||||
perlPackages.Test2Harness
|
||||
];
|
||||
nativeCheckInputs = [
|
||||
perlPackages.Test2Harness
|
||||
];
|
||||
|
||||
preConfigure =
|
||||
# "Inline" .version so its not a symlink, and includes the suffix
|
||||
''
|
||||
chmod u+w .version
|
||||
echo ${finalAttrs.version} > .version
|
||||
'';
|
||||
preConfigure =
|
||||
# "Inline" .version so its not a symlink, and includes the suffix
|
||||
''
|
||||
chmod u+w .version
|
||||
echo ${finalAttrs.version} > .version
|
||||
'';
|
||||
|
||||
mesonFlags = [
|
||||
(lib.mesonOption "dbi_path" "${perlPackages.DBI}/${perl.libPrefix}")
|
||||
(lib.mesonOption "dbd_sqlite_path" "${perlPackages.DBDSQLite}/${perl.libPrefix}")
|
||||
(lib.mesonEnable "tests" finalAttrs.doCheck)
|
||||
];
|
||||
mesonFlags = [
|
||||
(lib.mesonOption "dbi_path" "${perlPackages.DBI}/${perl.libPrefix}")
|
||||
(lib.mesonOption "dbd_sqlite_path" "${perlPackages.DBDSQLite}/${perl.libPrefix}")
|
||||
(lib.mesonEnable "tests" finalAttrs.doCheck)
|
||||
];
|
||||
|
||||
mesonCheckFlags = [
|
||||
"--print-errorlogs"
|
||||
];
|
||||
mesonCheckFlags = [
|
||||
"--print-errorlogs"
|
||||
];
|
||||
|
||||
strictDeps = false;
|
||||
}))
|
||||
strictDeps = false;
|
||||
})
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue