mirror of
https://github.com/NixOS/nix
synced 2025-07-06 21:41:48 +02:00
OCD: foreach -> C++11 ranged for
This commit is contained in:
parent
1511aa9f48
commit
6bd2c7bb38
30 changed files with 849 additions and 874 deletions
|
@ -82,14 +82,14 @@ void checkStoreName(const string & name)
|
|||
reasons (e.g., "." and ".."). */
|
||||
if (string(name, 0, 1) == ".")
|
||||
throw Error(format("illegal name: ‘%1%’") % name);
|
||||
foreach (string::const_iterator, i, name)
|
||||
if (!((*i >= 'A' && *i <= 'Z') ||
|
||||
(*i >= 'a' && *i <= 'z') ||
|
||||
(*i >= '0' && *i <= '9') ||
|
||||
validChars.find(*i) != string::npos))
|
||||
for (auto & i : name)
|
||||
if (!((i >= 'A' && i <= 'Z') ||
|
||||
(i >= 'a' && i <= 'z') ||
|
||||
(i >= '0' && i <= '9') ||
|
||||
validChars.find(i) != string::npos))
|
||||
{
|
||||
throw Error(format("invalid character ‘%1%’ in name ‘%2%’")
|
||||
% *i % name);
|
||||
% i % name);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -101,22 +101,22 @@ void checkStoreName(const string & name)
|
|||
where
|
||||
|
||||
<store> = the location of the Nix store, usually /nix/store
|
||||
|
||||
|
||||
<name> = a human readable name for the path, typically obtained
|
||||
from the name attribute of the derivation, or the name of the
|
||||
source file from which the store path is created. For derivation
|
||||
outputs other than the default "out" output, the string "-<id>"
|
||||
is suffixed to <name>.
|
||||
|
||||
|
||||
<h> = base-32 representation of the first 160 bits of a SHA-256
|
||||
hash of <s>; the hash part of the store name
|
||||
|
||||
|
||||
<s> = the string "<type>:sha256:<h2>:<store>:<name>";
|
||||
note that it includes the location of the store as well as the
|
||||
name to make sure that changes to either of those are reflected
|
||||
in the hash (e.g. you won't get /nix/store/<h>-name1 and
|
||||
/nix/store/<h>-name2 with equal hash parts).
|
||||
|
||||
|
||||
<type> = one of:
|
||||
"text:<r1>:<r2>:...<rN>"
|
||||
for plain text files written to the store using
|
||||
|
@ -219,9 +219,9 @@ Path computeStorePathForText(const string & name, const string & s,
|
|||
hacky, but we can't put them in `s' since that would be
|
||||
ambiguous. */
|
||||
string type = "text";
|
||||
foreach (PathSet::const_iterator, i, references) {
|
||||
for (auto & i : references) {
|
||||
type += ":";
|
||||
type += *i;
|
||||
type += i;
|
||||
}
|
||||
return makeStorePath(type, hash, name);
|
||||
}
|
||||
|
@ -234,11 +234,11 @@ string StoreAPI::makeValidityRegistration(const PathSet & paths,
|
|||
bool showDerivers, bool showHash)
|
||||
{
|
||||
string s = "";
|
||||
|
||||
foreach (PathSet::iterator, i, paths) {
|
||||
s += *i + "\n";
|
||||
|
||||
ValidPathInfo info = queryPathInfo(*i);
|
||||
for (auto & i : paths) {
|
||||
s += i + "\n";
|
||||
|
||||
ValidPathInfo info = queryPathInfo(i);
|
||||
|
||||
if (showHash) {
|
||||
s += printHash(info.hash) + "\n";
|
||||
|
@ -250,8 +250,8 @@ string StoreAPI::makeValidityRegistration(const PathSet & paths,
|
|||
|
||||
s += (format("%1%\n") % info.references.size()).str();
|
||||
|
||||
foreach (PathSet::iterator, j, info.references)
|
||||
s += *j + "\n";
|
||||
for (auto & j : info.references)
|
||||
s += j + "\n";
|
||||
}
|
||||
|
||||
return s;
|
||||
|
@ -286,9 +286,9 @@ ValidPathInfo decodeValidPathInfo(std::istream & str, bool hashGiven)
|
|||
string showPaths(const PathSet & paths)
|
||||
{
|
||||
string s;
|
||||
foreach (PathSet::const_iterator, i, paths) {
|
||||
for (auto & i : paths) {
|
||||
if (s.size() != 0) s += ", ";
|
||||
s += "‘" + *i + "’";
|
||||
s += "‘" + i + "’";
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
@ -297,9 +297,9 @@ string showPaths(const PathSet & paths)
|
|||
void exportPaths(StoreAPI & store, const Paths & paths,
|
||||
bool sign, Sink & sink)
|
||||
{
|
||||
foreach (Paths::const_iterator, i, paths) {
|
||||
for (auto & i : paths) {
|
||||
writeInt(1, sink);
|
||||
store.exportPath(*i, sign, sink);
|
||||
store.exportPath(i, sign, sink);
|
||||
}
|
||||
writeInt(0, sink);
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue