mirror of
https://github.com/NixOS/nix
synced 2025-07-05 12:21:48 +02:00
* Started implementing the new evaluation model.
* Lots of refactorings. * Unit tests.
This commit is contained in:
parent
b9f09b3268
commit
822794001c
15 changed files with 742 additions and 202 deletions
156
src/nix.cc
156
src/nix.cc
|
@ -11,155 +11,15 @@
|
|||
#include <sys/types.h>
|
||||
#include <sys/wait.h>
|
||||
|
||||
extern "C" {
|
||||
#include <aterm1.h>
|
||||
}
|
||||
|
||||
#include "util.hh"
|
||||
#include "hash.hh"
|
||||
#include "db.hh"
|
||||
#include "nix.hh"
|
||||
#include "eval.hh"
|
||||
|
||||
using namespace std;
|
||||
|
||||
|
||||
/* Database names. */
|
||||
|
||||
/* dbRefs :: Hash -> FileName
|
||||
|
||||
Maintains a mapping from hashes to filenames within the NixValues
|
||||
directory. This mapping is for performance only; it can be
|
||||
reconstructed unambiguously from the nixValues directory. The
|
||||
reason is that names in this directory are not printed hashes but
|
||||
also might carry some descriptive element (e.g.,
|
||||
"aterm-2.0-ae749a..."). Without this mapping, looking up a value
|
||||
would take O(n) time because we would need to read the entire
|
||||
directory. */
|
||||
static string dbRefs = "refs";
|
||||
|
||||
/* dbNFs :: Hash -> Hash
|
||||
|
||||
Each pair (h1, h2) in this mapping records the fact that h2 is a
|
||||
normal form obtained by evaluating the value h1.
|
||||
|
||||
We would really like to have h2 be the hash of the object
|
||||
referenced by h2. However, that gives a cyclic dependency: to
|
||||
compute the hash (and thus the file name) of the object, we need to
|
||||
compute the object, but to do that, we need the file name of the
|
||||
object.
|
||||
|
||||
So for now we abandon the requirement that
|
||||
|
||||
hashFile(dbRefs[h]) == h.
|
||||
|
||||
I.e., this property does not hold for computed normal forms.
|
||||
Rather, we use h2 = hash(h1). This allows dbNFs to be
|
||||
reconstructed. Perhaps using a pseudo random number would be
|
||||
better to prevent the system from being subverted in some way.
|
||||
*/
|
||||
static string dbNFs = "nfs";
|
||||
|
||||
/* dbNetSources :: Hash -> URL
|
||||
|
||||
Each pair (hash, url) in this mapping states that the object
|
||||
identified by hash can be obtained by fetching the object pointed
|
||||
to by url.
|
||||
|
||||
TODO: this should be Hash -> [URL]
|
||||
|
||||
TODO: factor this out into a separate tool? */
|
||||
static string dbNetSources = "netsources";
|
||||
|
||||
|
||||
/* Path names. */
|
||||
|
||||
/* nixValues is the directory where all Nix values (both files and
|
||||
directories, and both normal and non-normal forms) live. */
|
||||
static string nixValues;
|
||||
|
||||
/* nixLogDir is the directory where we log evaluations. */
|
||||
static string nixLogDir;
|
||||
|
||||
/* nixDB is the file name of the Berkeley DB database where we
|
||||
maintain the dbXXX mappings. */
|
||||
static string nixDB;
|
||||
|
||||
|
||||
/* Abstract syntax of Nix values:
|
||||
|
||||
e := Hash(h) -- external reference
|
||||
| Str(s) -- string constant
|
||||
| Bool(b) -- boolean constant
|
||||
| Name(e) -- "&" operator; pointer (file name) formation
|
||||
| App(e, e) -- application
|
||||
| Lam(x, e) -- lambda abstraction
|
||||
| Exec(platform, e, e*)
|
||||
-- primitive; execute e with args e* on platform
|
||||
;
|
||||
*/
|
||||
|
||||
|
||||
/* Download object referenced by the given URL into the sources
|
||||
directory. Return the file name it was downloaded to. */
|
||||
string fetchURL(string url)
|
||||
{
|
||||
string filename = baseNameOf(url);
|
||||
string fullname = nixSourcesDir + "/" + filename;
|
||||
struct stat st;
|
||||
if (stat(fullname.c_str(), &st)) {
|
||||
cerr << "fetching " << url << endl;
|
||||
/* !!! quoting */
|
||||
string shellCmd =
|
||||
"cd " + nixSourcesDir + " && wget --quiet -N \"" + url + "\"";
|
||||
int res = system(shellCmd.c_str());
|
||||
if (WEXITSTATUS(res) != 0)
|
||||
throw Error("cannot fetch " + url);
|
||||
}
|
||||
return fullname;
|
||||
}
|
||||
|
||||
|
||||
/* Obtain an object with the given hash. If a file with that hash is
|
||||
known to exist in the local file system (as indicated by the dbRefs
|
||||
database), we use that. Otherwise, we attempt to fetch it from the
|
||||
network (using dbNetSources). We verify that the file has the
|
||||
right hash. */
|
||||
string getFile(Hash hash)
|
||||
{
|
||||
bool checkedNet = false;
|
||||
|
||||
while (1) {
|
||||
|
||||
string fn, url;
|
||||
|
||||
if (queryDB(nixDB, dbRefs, hash, fn)) {
|
||||
|
||||
/* Verify that the file hasn't changed. !!! race */
|
||||
if (hashFile(fn) != hash)
|
||||
throw Error("file " + fn + " is stale");
|
||||
|
||||
return fn;
|
||||
}
|
||||
|
||||
if (checkedNet)
|
||||
throw Error("consistency problem: file fetched from " + url +
|
||||
" should have hash " + (string) hash + ", but it doesn't");
|
||||
|
||||
if (!queryDB(nixDB, dbNetSources, hash, url))
|
||||
throw Error("a file with hash " + (string) hash + " is requested, "
|
||||
"but it is not known to exist locally or on the network");
|
||||
|
||||
checkedNet = true;
|
||||
|
||||
fn = fetchURL(url);
|
||||
|
||||
setDB(nixDB, dbRefs, hash, fn);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
typedef map<string, string> Params;
|
||||
|
||||
|
||||
void readPkgDescr(Hash hash,
|
||||
Params & pkgImports, Params & fileImports, Params & arguments)
|
||||
{
|
||||
|
@ -204,9 +64,6 @@ void readPkgDescr(Hash hash,
|
|||
string getPkg(Hash hash);
|
||||
|
||||
|
||||
typedef map<string, string> Environment;
|
||||
|
||||
|
||||
void fetchDeps(Hash hash, Environment & env)
|
||||
{
|
||||
/* Read the package description file. */
|
||||
|
@ -538,15 +395,6 @@ void registerInstalledPkg(Hash hash, string path)
|
|||
}
|
||||
|
||||
|
||||
void initDB()
|
||||
{
|
||||
createDB(nixDB, dbRefs);
|
||||
createDB(nixDB, dbInstPkgs);
|
||||
createDB(nixDB, dbPrebuilts);
|
||||
createDB(nixDB, dbNetSources);
|
||||
}
|
||||
|
||||
|
||||
void verifyDB()
|
||||
{
|
||||
/* Check that all file references are still valid. */
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue