mirror of
https://github.com/NixOS/nix
synced 2025-07-07 10:11:47 +02:00
* Maintain integrity of the substitute and successor mappings when
deleting a path in the store. * Allow absolute paths in Nix expressions. * Get nix-prefetch-url to work again. * Various other fixes.
This commit is contained in:
parent
40d9eb14df
commit
ab0bc4999a
15 changed files with 152 additions and 199 deletions
|
@ -104,6 +104,7 @@ exports
|
|||
"\"" ~[\n\"]* "\"" -> Str
|
||||
|
||||
PathComp ("/" PathComp)+ -> Path
|
||||
("/" PathComp)+ -> Path
|
||||
[a-zA-Z0-9\.\_\-\+]+ -> PathComp
|
||||
|
||||
"true" -> Bool
|
||||
|
@ -184,7 +185,7 @@ exports
|
|||
[0-9] -> Udigit
|
||||
|
||||
lexical restrictions
|
||||
Uri -/- [a-zA-Z0-9\-\_\.\!\~\*\'\(\)]
|
||||
Uri -/- [a-zA-Z0-9\-\_\.\!\~\*\'\(\)\/]
|
||||
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
|
|
@ -66,23 +66,9 @@ struct Cleanup : TermFun
|
|||
};
|
||||
|
||||
|
||||
Expr parseExprFromFile(Path path)
|
||||
static Expr parse(const char * text, const string & location,
|
||||
const Path & basePath)
|
||||
{
|
||||
assert(path[0] == '/');
|
||||
|
||||
#if 0
|
||||
/* Perhaps this is already an imploded parse tree? */
|
||||
Expr e = ATreadFromNamedFile(path.c_str());
|
||||
if (e) return e;
|
||||
#endif
|
||||
|
||||
/* If `path' refers to a directory, append `/default.nix'. */
|
||||
struct stat st;
|
||||
if (stat(path.c_str(), &st))
|
||||
throw SysError(format("getting status of `%1%'") % path);
|
||||
if (S_ISDIR(st.st_mode))
|
||||
path = canonPath(path + "/default.nix");
|
||||
|
||||
/* Initialise the SDF libraries. */
|
||||
static bool initialised = false;
|
||||
static ATerm parseTable = 0;
|
||||
|
@ -113,26 +99,13 @@ Expr parseExprFromFile(Path path)
|
|||
initialised = true;
|
||||
}
|
||||
|
||||
/* Read the input file. We can't use SGparseFile() because it's
|
||||
broken, so we read the input ourselves and call
|
||||
SGparseString(). */
|
||||
AutoCloseFD fd = open(path.c_str(), O_RDONLY);
|
||||
if (fd == -1) throw SysError(format("opening `%1%'") % path);
|
||||
|
||||
if (fstat(fd, &st) == -1)
|
||||
throw SysError(format("statting `%1%'") % path);
|
||||
|
||||
char text[st.st_size + 1];
|
||||
readFull(fd, (unsigned char *) text, st.st_size);
|
||||
text[st.st_size] = 0;
|
||||
|
||||
/* Parse it. */
|
||||
ATerm result = SGparseString(lang, "Expr", text);
|
||||
ATerm result = SGparseString(lang, "Expr", (char *) text);
|
||||
if (!result)
|
||||
throw SysError(format("parse failed in `%1%'") % path);
|
||||
throw SysError(format("parse failed in `%1%'") % location);
|
||||
if (SGisParseError(result))
|
||||
throw Error(format("parse error in `%1%': %2%")
|
||||
% path % result);
|
||||
% location % result);
|
||||
|
||||
/* Implode it. */
|
||||
PT_ParseTree tree = PT_makeParseTreeFromTerm(result);
|
||||
|
@ -155,10 +128,50 @@ Expr parseExprFromFile(Path path)
|
|||
throw Error(format("cannot implode parse tree"));
|
||||
|
||||
printMsg(lvlVomit, format("imploded parse tree of `%1%': %2%")
|
||||
% path % imploded);
|
||||
% location % imploded);
|
||||
|
||||
/* Finally, clean it up. */
|
||||
Cleanup cleanup;
|
||||
cleanup.basePath = dirOf(path);
|
||||
cleanup.basePath = basePath;
|
||||
return bottomupRewrite(cleanup, imploded);
|
||||
}
|
||||
|
||||
|
||||
Expr parseExprFromFile(Path path)
|
||||
{
|
||||
assert(path[0] == '/');
|
||||
|
||||
#if 0
|
||||
/* Perhaps this is already an imploded parse tree? */
|
||||
Expr e = ATreadFromNamedFile(path.c_str());
|
||||
if (e) return e;
|
||||
#endif
|
||||
|
||||
/* If `path' refers to a directory, append `/default.nix'. */
|
||||
struct stat st;
|
||||
if (stat(path.c_str(), &st))
|
||||
throw SysError(format("getting status of `%1%'") % path);
|
||||
if (S_ISDIR(st.st_mode))
|
||||
path = canonPath(path + "/default.nix");
|
||||
|
||||
/* Read the input file. We can't use SGparseFile() because it's
|
||||
broken, so we read the input ourselves and call
|
||||
SGparseString(). */
|
||||
AutoCloseFD fd = open(path.c_str(), O_RDONLY);
|
||||
if (fd == -1) throw SysError(format("opening `%1%'") % path);
|
||||
|
||||
if (fstat(fd, &st) == -1)
|
||||
throw SysError(format("statting `%1%'") % path);
|
||||
|
||||
char text[st.st_size + 1];
|
||||
readFull(fd, (unsigned char *) text, st.st_size);
|
||||
text[st.st_size] = 0;
|
||||
|
||||
return parse(text, path, dirOf(path));
|
||||
}
|
||||
|
||||
|
||||
Expr parseExprFromString(const string & s, const Path & basePath)
|
||||
{
|
||||
return parse(s.c_str(), "(string)", basePath);
|
||||
}
|
||||
|
|
|
@ -4,7 +4,13 @@
|
|||
#include "nixexpr.hh"
|
||||
|
||||
|
||||
/* Parse a Nix expression from the specified file. If `path' refers
|
||||
to a directory, the "/default.nix" is appended. */
|
||||
Expr parseExprFromFile(Path path);
|
||||
|
||||
/* Parse a Nix expression from the specified string. */
|
||||
Expr parseExprFromString(const string & s,
|
||||
const Path & basePath);
|
||||
|
||||
|
||||
#endif /* !__PARSER_H */
|
||||
|
|
|
@ -82,6 +82,7 @@ Path normaliseStoreExpr(const Path & _nePath, PathSet pending)
|
|||
debug(format("skipping build of expression `%1%', someone beat us to it")
|
||||
% (string) nePath);
|
||||
if (ne.type != StoreExpr::neClosure) abort();
|
||||
outputLocks.setDeletion(true);
|
||||
return nePath2;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -243,16 +243,33 @@ bool isValidPath(const Path & path)
|
|||
}
|
||||
|
||||
|
||||
void unregisterValidPath(const Path & _path)
|
||||
static void invalidatePath(const Path & path, Transaction & txn)
|
||||
{
|
||||
Path path(canonPath(_path));
|
||||
Transaction txn(nixDB);
|
||||
|
||||
debug(format("unregistering path `%1%'") % path);
|
||||
|
||||
nixDB.delPair(txn, dbValidPaths, path);
|
||||
|
||||
txn.commit();
|
||||
/* Remove any successor mappings to this path (but not *from*
|
||||
it). */
|
||||
Paths revs;
|
||||
nixDB.queryStrings(txn, dbSuccessorsRev, path, revs);
|
||||
for (Paths::iterator i = revs.begin(); i != revs.end(); ++i)
|
||||
nixDB.delPair(txn, dbSuccessors, *i);
|
||||
nixDB.delPair(txn, dbSuccessorsRev, path);
|
||||
|
||||
/* Remove any substitute mappings to this path. */
|
||||
revs.clear();
|
||||
nixDB.queryStrings(txn, dbSubstitutesRev, path, revs);
|
||||
for (Paths::iterator i = revs.begin(); i != revs.end(); ++i) {
|
||||
Paths subs;
|
||||
nixDB.queryStrings(txn, dbSubstitutes, *i, subs);
|
||||
remove(subs.begin(), subs.end(), path);
|
||||
if (subs.size() > 0)
|
||||
nixDB.setStrings(txn, dbSubstitutes, *i, subs);
|
||||
else
|
||||
nixDB.delPair(txn, dbSubstitutes, *i);
|
||||
}
|
||||
nixDB.delPair(txn, dbSubstitutesRev, path);
|
||||
}
|
||||
|
||||
|
||||
|
@ -289,6 +306,8 @@ Path addToStore(const Path & _srcPath)
|
|||
registerValidPath(txn, dstPath);
|
||||
txn.commit();
|
||||
}
|
||||
|
||||
outputLock.setDeletion(true);
|
||||
}
|
||||
|
||||
return dstPath;
|
||||
|
@ -310,6 +329,8 @@ void addTextToStore(const Path & dstPath, const string & s)
|
|||
registerValidPath(txn, dstPath);
|
||||
txn.commit();
|
||||
}
|
||||
|
||||
outputLock.setDeletion(true);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -321,7 +342,9 @@ void deleteFromStore(const Path & _path)
|
|||
if (!isInPrefix(path, nixStore))
|
||||
throw Error(format("path `%1%' is not in the store") % path);
|
||||
|
||||
unregisterValidPath(path);
|
||||
Transaction txn(nixDB);
|
||||
invalidatePath(path, txn);
|
||||
txn.commit();
|
||||
|
||||
deletePath(path);
|
||||
}
|
||||
|
@ -332,50 +355,43 @@ void verifyStore()
|
|||
Transaction txn(nixDB);
|
||||
|
||||
Paths paths;
|
||||
PathSet validPaths;
|
||||
nixDB.enumTable(txn, dbValidPaths, paths);
|
||||
|
||||
for (Paths::iterator i = paths.begin();
|
||||
i != paths.end(); i++)
|
||||
for (Paths::iterator i = paths.begin(); i != paths.end(); ++i)
|
||||
{
|
||||
Path path = *i;
|
||||
if (!pathExists(path)) {
|
||||
debug(format("path `%1%' disappeared") % path);
|
||||
nixDB.delPair(txn, dbValidPaths, path);
|
||||
nixDB.delPair(txn, dbSuccessorsRev, path);
|
||||
nixDB.delPair(txn, dbSubstitutesRev, path);
|
||||
}
|
||||
invalidatePath(path, txn);
|
||||
} else
|
||||
validPaths.insert(path);
|
||||
}
|
||||
|
||||
#if 0
|
||||
Strings subs;
|
||||
nixDB.enumTable(txn, dbSubstitutes, subs);
|
||||
|
||||
for (Strings::iterator i = subs.begin();
|
||||
i != subs.end(); i++)
|
||||
{
|
||||
FSId srcId = parseHash(*i);
|
||||
|
||||
Strings subIds;
|
||||
nixDB.queryStrings(txn, dbSubstitutes, srcId, subIds);
|
||||
|
||||
for (Strings::iterator j = subIds.begin();
|
||||
j != subIds.end(); )
|
||||
Paths sucs;
|
||||
nixDB.enumTable(txn, dbSuccessors, sucs);
|
||||
for (Paths::iterator i = sucs.begin(); i != sucs.end(); ++i) {
|
||||
/* Note that *i itself does not have to be valid, just its
|
||||
successor. */
|
||||
Path sucPath;
|
||||
if (nixDB.queryString(txn, dbSuccessors, *i, sucPath) &&
|
||||
validPaths.find(sucPath) == validPaths.end())
|
||||
{
|
||||
FSId subId = parseHash(*j);
|
||||
|
||||
Strings subPaths;
|
||||
nixDB.queryStrings(txn, dbId2Paths, subId, subPaths);
|
||||
if (subPaths.size() == 0) {
|
||||
debug(format("erasing substitute %1% for %2%")
|
||||
% (string) subId % (string) srcId);
|
||||
j = subIds.erase(j);
|
||||
} else j++;
|
||||
debug(format("found successor mapping to non-existent path `%1%'") % sucPath);
|
||||
nixDB.delPair(txn, dbSuccessors, *i);
|
||||
}
|
||||
|
||||
nixDB.setStrings(txn, dbSubstitutes, srcId, subIds);
|
||||
}
|
||||
#endif
|
||||
|
||||
Paths rsucs;
|
||||
nixDB.enumTable(txn, dbSuccessorsRev, rsucs);
|
||||
for (Paths::iterator i = rsucs.begin(); i != rsucs.end(); ++i) {
|
||||
if (validPaths.find(*i) == validPaths.end()) {
|
||||
debug(format("found reverse successor mapping for non-existent path `%1%'") % *i);
|
||||
nixDB.delPair(txn, dbSuccessorsRev, *i);
|
||||
}
|
||||
}
|
||||
|
||||
#if 0
|
||||
Paths sucs;
|
||||
nixDB.enumTable(txn, dbSuccessors, sucs);
|
||||
|
||||
|
@ -395,6 +411,7 @@ void verifyStore()
|
|||
nixDB.setStrings(txn, dbSuccessorsRev, sucPath, revs);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
txn.commit();
|
||||
}
|
||||
|
|
|
@ -48,9 +48,6 @@ Paths querySubstitutes(const Path & srcPath);
|
|||
/* Register the validity of a path. */
|
||||
void registerValidPath(const Transaction & txn, const Path & path);
|
||||
|
||||
/* Unregister the validity of a path. */
|
||||
void unregisterValidPath(const Path & path);
|
||||
|
||||
/* Checks whether a path is valid. */
|
||||
bool isValidPath(const Path & path);
|
||||
|
||||
|
|
|
@ -5,6 +5,7 @@
|
|||
#include "normalise.hh"
|
||||
#include "shared.hh"
|
||||
#include "eval.hh"
|
||||
#include "parser.hh"
|
||||
|
||||
|
||||
#if 0
|
||||
|
@ -29,9 +30,9 @@ static Path searchPath(const Paths & searchDirs, const Path & relPath)
|
|||
static Expr evalStdin(EvalState & state)
|
||||
{
|
||||
startNest(nest, lvlTalkative, format("evaluating standard input"));
|
||||
Expr e = ATreadFromFile(stdin);
|
||||
if (!e)
|
||||
throw Error(format("unable to read a term from stdin"));
|
||||
string s, s2;
|
||||
while (getline(cin, s2)) s += s2 + "\n";
|
||||
Expr e = parseExprFromString(s, absPath("."));
|
||||
return evalExpr(state, e);
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue