* Started removing closure store expressions, i.e., the explicit
representation of closures as ATerms in the Nix store. Instead, the file system pointer graph is now stored in the Nix database. This has many advantages: - It greatly simplifies the implementation (we can drop the notion of `successors', and so on). - It makes registering roots for the garbage collector much easier. Instead of specifying the closure expression as a root, you can simply specify the store path that must be retained as a root. This could not be done previously, since there was no way to find the closure store expression containing a given store path. - Better traceability: it is now possible to query what paths are referenced by a path, and what paths refer to a path.
This commit is contained in:
parent
e9762e2d10
commit
863dcff6c5
15 changed files with 407 additions and 890 deletions
|
@ -7,6 +7,7 @@
|
|||
#include <unistd.h>
|
||||
|
||||
|
||||
#if 0
|
||||
void followLivePaths(Path nePath, PathSet & live)
|
||||
{
|
||||
/* Just to be sure, canonicalise the path. It is important to do
|
||||
|
@ -96,3 +97,4 @@ PathSet findDeadPaths(const PathSet & live, time_t minAge)
|
|||
|
||||
return dead;
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -1,16 +1,32 @@
|
|||
#include "normalise.hh"
|
||||
|
||||
|
||||
StoreExpr storeExprFromPath(const Path & path)
|
||||
Derivation derivationFromPath(const Path & drvPath)
|
||||
{
|
||||
assertStorePath(path);
|
||||
ensurePath(path);
|
||||
ATerm t = ATreadFromNamedFile(path.c_str());
|
||||
if (!t) throw Error(format("cannot read aterm from `%1%'") % path);
|
||||
return parseStoreExpr(t);
|
||||
assertStorePath(drvPath);
|
||||
ensurePath(drvPath);
|
||||
ATerm t = ATreadFromNamedFile(drvPath.c_str());
|
||||
if (!t) throw Error(format("cannot read aterm from `%1%'") % drvPath);
|
||||
return parseDerivation(t);
|
||||
}
|
||||
|
||||
|
||||
void computeFSClosure(const Path & storePath,
|
||||
PathSet & paths)
|
||||
{
|
||||
if (paths.find(storePath) != paths.end()) return;
|
||||
paths.insert(storePath);
|
||||
|
||||
PathSet references;
|
||||
queryReferences(storePath, references);
|
||||
|
||||
for (PathSet::iterator i = references.begin();
|
||||
i != references.end(); ++i)
|
||||
computeFSClosure(*i, paths);
|
||||
}
|
||||
|
||||
|
||||
#if 0
|
||||
PathSet storeExprRoots(const Path & nePath)
|
||||
{
|
||||
PathSet paths;
|
||||
|
@ -71,3 +87,4 @@ PathSet storeExprRequisites(const Path & nePath,
|
|||
paths, doneSet);
|
||||
return paths;
|
||||
}
|
||||
#endif
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -4,28 +4,31 @@
|
|||
#include "storeexpr.hh"
|
||||
|
||||
|
||||
/* Normalise a store expression. That is, if the expression is a
|
||||
derivation, a path containing an equivalent closure expression is
|
||||
returned. This requires that the derivation is performed, unless a
|
||||
successor is known. */
|
||||
Path normaliseStoreExpr(const Path & nePath);
|
||||
|
||||
/* Realise a store expression. If the expression is a derivation, it
|
||||
is first normalised into a closure. The closure is then realised
|
||||
in the file system (i.e., it is ensured that each path in the
|
||||
closure exists in the file system, if necessary by using the
|
||||
substitute mechanism). Returns the normal form of the expression
|
||||
(i.e., its closure expression). */
|
||||
Path realiseStoreExpr(const Path & nePath);
|
||||
/* Perform the specified derivation, if necessary. That is, do
|
||||
whatever is necessary to create the output paths of the
|
||||
derivation. If the output paths already exists, we're done. If
|
||||
they have substitutes, we can use those instead. Otherwise, the
|
||||
build action described by the derivation is performed, after
|
||||
recursively building any sub-derivations. */
|
||||
void buildDerivation(const Path & drvPath);
|
||||
|
||||
/* Ensure that a path exists, possibly by instantiating it by
|
||||
realising a substitute. */
|
||||
void ensurePath(const Path & path);
|
||||
void ensurePath(const Path & storePath);
|
||||
|
||||
/* Read a store expression, after ensuring its existence through
|
||||
ensurePath(). */
|
||||
StoreExpr storeExprFromPath(const Path & path);
|
||||
/* Read a derivation store expression, after ensuring its existence
|
||||
through ensurePath(). */
|
||||
Derivation derivationFromPath(const Path & drvPath);
|
||||
|
||||
|
||||
/* Places in `paths' the set of all store paths in the file system
|
||||
closure of `storePath'; that is, all paths than can be directly or
|
||||
indirectly reached from it. `paths' is not cleared. */
|
||||
void computeFSClosure(const Path & storePath,
|
||||
PathSet & paths);
|
||||
|
||||
|
||||
#if 0
|
||||
/* Get the list of root (output) paths of the given store
|
||||
expression. */
|
||||
PathSet storeExprRoots(const Path & nePath);
|
||||
|
@ -39,6 +42,7 @@ PathSet storeExprRoots(const Path & nePath);
|
|||
successors. */
|
||||
PathSet storeExprRequisites(const Path & nePath,
|
||||
bool includeExprs, bool includeSuccessors);
|
||||
#endif
|
||||
|
||||
|
||||
#endif /* !__NORMALISE_H */
|
||||
|
|
|
@ -23,23 +23,18 @@ static Database nixDB;
|
|||
is, produced by a succesful build). */
|
||||
static TableId dbValidPaths = 0;
|
||||
|
||||
/* dbSuccessors :: Path -> Path
|
||||
/* dbReferences :: Path -> [Path]
|
||||
|
||||
Each pair $(p_1, p_2)$ in this mapping records the fact that the
|
||||
Nix expression stored at path $p_1$ has a successor expression
|
||||
stored at path $p_2$.
|
||||
This table lists the outgoing file system references for each
|
||||
output path that has been built by a Nix derivation. These are
|
||||
found by scanning the path for the hash components of input
|
||||
paths. */
|
||||
static TableId dbReferences = 0;
|
||||
|
||||
Note that a term $y$ is a successor of $x$ iff there exists a
|
||||
sequence of rewrite steps that rewrites $x$ into $y$.
|
||||
*/
|
||||
static TableId dbSuccessors = 0;
|
||||
/* dbReferers :: Path -> [Path]
|
||||
|
||||
/* dbSuccessorsRev :: Path -> [Path]
|
||||
|
||||
The reverse mapping of dbSuccessors (i.e., it stores the
|
||||
predecessors of a Nix expression).
|
||||
*/
|
||||
static TableId dbSuccessorsRev = 0;
|
||||
This table is just the reverse mapping of dbReferences. */
|
||||
static TableId dbReferers = 0;
|
||||
|
||||
/* dbSubstitutes :: Path -> [[Path]]
|
||||
|
||||
|
@ -76,8 +71,8 @@ void openDB()
|
|||
return;
|
||||
}
|
||||
dbValidPaths = nixDB.openTable("validpaths");
|
||||
dbSuccessors = nixDB.openTable("successors");
|
||||
dbSuccessorsRev = nixDB.openTable("successors-rev");
|
||||
dbReferences = nixDB.openTable("references");
|
||||
dbReferers = nixDB.openTable("referers");
|
||||
dbSubstitutes = nixDB.openTable("substitutes");
|
||||
}
|
||||
|
||||
|
@ -199,81 +194,31 @@ bool isValidPath(const Path & path)
|
|||
}
|
||||
|
||||
|
||||
static bool isUsablePathTxn(const Path & path, const Transaction & txn)
|
||||
void setReferences(const Transaction & txn, const Path & storePath,
|
||||
const PathSet & references)
|
||||
{
|
||||
if (isValidPathTxn(path, txn)) return true;
|
||||
Paths subs;
|
||||
nixDB.queryStrings(txn, dbSubstitutes, path, subs);
|
||||
return subs.size() > 0;
|
||||
}
|
||||
nixDB.setStrings(txn, dbReferences, storePath,
|
||||
Paths(references.begin(), references.end()));
|
||||
|
||||
|
||||
void registerSuccessor(const Transaction & txn,
|
||||
const Path & srcPath, const Path & sucPath)
|
||||
{
|
||||
assertStorePath(srcPath);
|
||||
assertStorePath(sucPath);
|
||||
|
||||
if (!isUsablePathTxn(sucPath, txn)) throw Error(
|
||||
format("path `%1%' cannot be a successor, since it is not usable")
|
||||
% sucPath);
|
||||
|
||||
Path known;
|
||||
if (nixDB.queryString(txn, dbSuccessors, srcPath, known) &&
|
||||
known != sucPath)
|
||||
/* Update the referers mappings of all referenced paths. */
|
||||
for (PathSet::const_iterator i = references.begin();
|
||||
i != references.end(); ++i)
|
||||
{
|
||||
throw Error(format(
|
||||
"the `impossible' happened: expression in path "
|
||||
"`%1%' appears to have multiple successors "
|
||||
"(known `%2%', new `%3%'")
|
||||
% srcPath % known % sucPath);
|
||||
Paths referers;
|
||||
nixDB.queryStrings(txn, dbReferers, *i, referers);
|
||||
PathSet referers2(referers.begin(), referers.end());
|
||||
referers2.insert(storePath);
|
||||
nixDB.setStrings(txn, dbReferers, *i,
|
||||
Paths(referers2.begin(), referers2.end()));
|
||||
}
|
||||
|
||||
Paths revs;
|
||||
nixDB.queryStrings(txn, dbSuccessorsRev, sucPath, revs);
|
||||
if (find(revs.begin(), revs.end(), srcPath) == revs.end())
|
||||
revs.push_back(srcPath);
|
||||
|
||||
nixDB.setString(txn, dbSuccessors, srcPath, sucPath);
|
||||
nixDB.setStrings(txn, dbSuccessorsRev, sucPath, revs);
|
||||
}
|
||||
|
||||
|
||||
void unregisterSuccessor(const Path & srcPath)
|
||||
void queryReferences(const Path & storePath, PathSet & references)
|
||||
{
|
||||
assertStorePath(srcPath);
|
||||
|
||||
Transaction txn(nixDB);
|
||||
|
||||
Path sucPath;
|
||||
if (!nixDB.queryString(txn, dbSuccessors, srcPath, sucPath)) {
|
||||
txn.abort();
|
||||
return;
|
||||
}
|
||||
nixDB.delPair(txn, dbSuccessors, srcPath);
|
||||
|
||||
Paths revs;
|
||||
nixDB.queryStrings(txn, dbSuccessorsRev, sucPath, revs);
|
||||
Paths::iterator i = find(revs.begin(), revs.end(), srcPath);
|
||||
assert(i != revs.end());
|
||||
revs.erase(i);
|
||||
nixDB.setStrings(txn, dbSuccessorsRev, sucPath, revs);
|
||||
|
||||
txn.commit();
|
||||
}
|
||||
|
||||
|
||||
bool querySuccessor(const Path & srcPath, Path & sucPath)
|
||||
{
|
||||
return nixDB.queryString(noTxn, dbSuccessors, srcPath, sucPath);
|
||||
}
|
||||
|
||||
|
||||
Paths queryPredecessors(const Path & sucPath)
|
||||
{
|
||||
Paths revs;
|
||||
nixDB.queryStrings(noTxn, dbSuccessorsRev, sucPath, revs);
|
||||
return revs;
|
||||
Paths references2;
|
||||
nixDB.queryStrings(noTxn, dbReferences, storePath, references2);
|
||||
references.insert(references2.begin(), references2.end());
|
||||
}
|
||||
|
||||
|
||||
|
@ -355,18 +300,6 @@ Substitutes querySubstitutes(const Path & srcPath)
|
|||
}
|
||||
|
||||
|
||||
static void unregisterPredecessors(const Path & path, Transaction & txn)
|
||||
{
|
||||
/* Remove any successor mappings to this path (but not *from*
|
||||
it). */
|
||||
Paths revs;
|
||||
nixDB.queryStrings(txn, dbSuccessorsRev, path, revs);
|
||||
for (Paths::iterator i = revs.begin(); i != revs.end(); ++i)
|
||||
nixDB.delPair(txn, dbSuccessors, *i);
|
||||
nixDB.delPair(txn, dbSuccessorsRev, path);
|
||||
}
|
||||
|
||||
|
||||
void clearSubstitutes()
|
||||
{
|
||||
Transaction txn(nixDB);
|
||||
|
@ -375,16 +308,6 @@ void clearSubstitutes()
|
|||
Paths subKeys;
|
||||
nixDB.enumTable(txn, dbSubstitutes, subKeys);
|
||||
for (Paths::iterator i = subKeys.begin(); i != subKeys.end(); ++i) {
|
||||
|
||||
/* If this path has not become valid in the mean-while, delete
|
||||
any successor mappings *to* it. This is to preserve the
|
||||
invariant the all successors are `usable' as opposed to
|
||||
`valid' (i.e., the successor must be valid *or* have at
|
||||
least one substitute). */
|
||||
if (!isValidPath(*i)) {
|
||||
unregisterPredecessors(*i, txn);
|
||||
}
|
||||
|
||||
/* Delete all substitutes for path *i. */
|
||||
nixDB.delPair(txn, dbSubstitutes, *i);
|
||||
}
|
||||
|
@ -407,7 +330,6 @@ static void invalidatePath(const Path & path, Transaction & txn)
|
|||
debug(format("unregistering path `%1%'") % path);
|
||||
|
||||
nixDB.delPair(txn, dbValidPaths, path);
|
||||
unregisterPredecessors(path, txn);
|
||||
}
|
||||
|
||||
|
||||
|
@ -562,34 +484,5 @@ void verifyStore()
|
|||
nixDB.delPair(txn, dbSubstitutes, *i);
|
||||
}
|
||||
|
||||
/* Check that the values of the successor mappings are usable
|
||||
paths. */
|
||||
Paths sucKeys;
|
||||
nixDB.enumTable(txn, dbSuccessors, sucKeys);
|
||||
for (Paths::iterator i = sucKeys.begin(); i != sucKeys.end(); ++i) {
|
||||
/* Note that *i itself does not have to be valid, just its
|
||||
successor. */
|
||||
Path sucPath;
|
||||
if (nixDB.queryString(txn, dbSuccessors, *i, sucPath) &&
|
||||
usablePaths.find(sucPath) == usablePaths.end())
|
||||
{
|
||||
printMsg(lvlError,
|
||||
format("found successor mapping to non-existent path `%1%'") % sucPath);
|
||||
nixDB.delPair(txn, dbSuccessors, *i);
|
||||
}
|
||||
}
|
||||
|
||||
/* Check that the keys of the reverse successor mappings are valid
|
||||
paths. */
|
||||
Paths rsucKeys;
|
||||
nixDB.enumTable(txn, dbSuccessorsRev, rsucKeys);
|
||||
for (Paths::iterator i = rsucKeys.begin(); i != rsucKeys.end(); ++i) {
|
||||
if (usablePaths.find(*i) == usablePaths.end()) {
|
||||
printMsg(lvlError,
|
||||
format("found reverse successor mapping for non-existent path `%1%'") % *i);
|
||||
nixDB.delPair(txn, dbSuccessorsRev, *i);
|
||||
}
|
||||
}
|
||||
|
||||
txn.commit();
|
||||
}
|
||||
|
|
|
@ -40,27 +40,6 @@ void createStoreTransaction(Transaction & txn);
|
|||
/* Copy a path recursively. */
|
||||
void copyPath(const Path & src, const Path & dst);
|
||||
|
||||
/* Register a successor. This function accepts a transaction handle
|
||||
so that it can be enclosed in an atomic operation with calls to
|
||||
registerValidPath(). This must be atomic, since if we register a
|
||||
successor for a derivation without registering the paths built in
|
||||
the derivation, we have a successor with dangling pointers, and if
|
||||
we do it in reverse order, we can get an obstructed build (since to
|
||||
rebuild the successor, the outputs paths must not exist). */
|
||||
void registerSuccessor(const Transaction & txn,
|
||||
const Path & srcPath, const Path & sucPath);
|
||||
|
||||
/* Remove a successor mapping. */
|
||||
void unregisterSuccessor(const Path & srcPath);
|
||||
|
||||
/* Return the predecessors of the Nix expression stored at the given
|
||||
path. */
|
||||
bool querySuccessor(const Path & srcPath, Path & sucPath);
|
||||
|
||||
/* Return the predecessors of the Nix expression stored at the given
|
||||
path. */
|
||||
Paths queryPredecessors(const Path & sucPath);
|
||||
|
||||
/* Register a substitute. */
|
||||
typedef list<pair<Path, Substitute> > SubstitutePairs;
|
||||
void registerSubstitutes(const Transaction & txn,
|
||||
|
@ -81,6 +60,14 @@ void assertStorePath(const Path & path);
|
|||
/* Checks whether a path is valid. */
|
||||
bool isValidPath(const Path & path);
|
||||
|
||||
/* Sets the set of outgoing FS references for a store path. */
|
||||
void setReferences(const Transaction & txn, const Path & storePath,
|
||||
const PathSet & references);
|
||||
|
||||
/* Queries the set of outgoing FS references for a store path. The
|
||||
result is not cleared. */
|
||||
void queryReferences(const Path & storePath, PathSet & references);
|
||||
|
||||
/* Constructs a unique store path name. */
|
||||
Path makeStorePath(const string & type,
|
||||
const Hash & hash, const string & suffix);
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
init initStoreExprHelpers
|
||||
|
||||
Closure | ATermList ATermList | ATerm |
|
||||
Derive | ATermList ATermList string string ATermList ATermList | ATerm |
|
||||
Derive | ATermList ATermList ATermList string string ATermList ATermList | ATerm |
|
||||
|
||||
| string string | ATerm | EnvBinding |
|
||||
| string ATermList | ATerm | ClosureElem |
|
||||
| string string string string | ATerm | DerivationOutput |
|
||||
|
|
|
@ -20,7 +20,7 @@ Path writeTerm(ATerm t, const string & suffix)
|
|||
}
|
||||
|
||||
|
||||
void checkPath(const string & s)
|
||||
static void checkPath(const string & s)
|
||||
{
|
||||
if (s.size() == 0 || s[0] != '/')
|
||||
throw Error(format("bad path `%1%' in store expression") % s);
|
||||
|
@ -39,108 +39,53 @@ static void parsePaths(ATermList paths, PathSet & out)
|
|||
}
|
||||
|
||||
|
||||
static void checkClosure(const Closure & closure)
|
||||
void throwBadDrv(ATerm t)
|
||||
{
|
||||
if (closure.elems.size() == 0)
|
||||
throw Error("empty closure");
|
||||
|
||||
PathSet decl;
|
||||
for (ClosureElems::const_iterator i = closure.elems.begin();
|
||||
i != closure.elems.end(); i++)
|
||||
decl.insert(i->first);
|
||||
|
||||
for (PathSet::const_iterator i = closure.roots.begin();
|
||||
i != closure.roots.end(); i++)
|
||||
if (decl.find(*i) == decl.end())
|
||||
throw Error(format("undefined root path `%1%'") % *i);
|
||||
|
||||
for (ClosureElems::const_iterator i = closure.elems.begin();
|
||||
i != closure.elems.end(); i++)
|
||||
for (PathSet::const_iterator j = i->second.refs.begin();
|
||||
j != i->second.refs.end(); j++)
|
||||
if (decl.find(*j) == decl.end())
|
||||
throw Error(
|
||||
format("undefined path `%1%' referenced by `%2%'")
|
||||
% *j % i->first);
|
||||
throw badTerm("not a valid derivation", t);
|
||||
}
|
||||
|
||||
|
||||
/* Parse a closure. */
|
||||
static bool parseClosure(ATerm t, Closure & closure)
|
||||
Derivation parseDerivation(ATerm t)
|
||||
{
|
||||
ATermList roots, elems;
|
||||
|
||||
if (!matchClosure(t, roots, elems))
|
||||
return false;
|
||||
|
||||
parsePaths(roots, closure.roots);
|
||||
|
||||
for (ATermIterator i(elems); i; ++i) {
|
||||
ATerm path;
|
||||
ATermList refs;
|
||||
if (!matchClosureElem(*i, path, refs))
|
||||
throw badTerm("not a closure element", *i);
|
||||
ClosureElem elem;
|
||||
parsePaths(refs, elem.refs);
|
||||
closure.elems[aterm2String(path)] = elem;
|
||||
}
|
||||
|
||||
checkClosure(closure);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
static bool parseDerivation(ATerm t, Derivation & derivation)
|
||||
{
|
||||
ATermList outs, ins, args, bnds;
|
||||
Derivation drv;
|
||||
ATermList outs, inDrvs, inSrcs, args, bnds;
|
||||
ATerm builder, platform;
|
||||
|
||||
if (!matchDerive(t, outs, ins, platform, builder, args, bnds))
|
||||
return false;
|
||||
if (!matchDerive(t, outs, inDrvs, inSrcs, platform, builder, args, bnds))
|
||||
throwBadDrv(t);
|
||||
|
||||
for (ATermIterator i(outs); i; ++i) {
|
||||
ATerm id, path, hashAlgo, hash;
|
||||
if (!matchDerivationOutput(*i, id, path, hashAlgo, hash))
|
||||
return false;
|
||||
throwBadDrv(t);
|
||||
DerivationOutput out;
|
||||
out.path = aterm2String(path);
|
||||
checkPath(out.path);
|
||||
out.hashAlgo = aterm2String(hashAlgo);
|
||||
out.hash = aterm2String(hash);
|
||||
derivation.outputs[aterm2String(id)] = out;
|
||||
drv.outputs[aterm2String(id)] = out;
|
||||
}
|
||||
|
||||
parsePaths(ins, derivation.inputs);
|
||||
parsePaths(inDrvs, drv.inputDrvs);
|
||||
parsePaths(inSrcs, drv.inputSrcs);
|
||||
|
||||
derivation.builder = aterm2String(builder);
|
||||
derivation.platform = aterm2String(platform);
|
||||
drv.builder = aterm2String(builder);
|
||||
drv.platform = aterm2String(platform);
|
||||
|
||||
for (ATermIterator i(args); i; ++i) {
|
||||
if (ATgetType(*i) != AT_APPL)
|
||||
throw badTerm("string expected", *i);
|
||||
derivation.args.push_back(aterm2String(*i));
|
||||
drv.args.push_back(aterm2String(*i));
|
||||
}
|
||||
|
||||
for (ATermIterator i(bnds); i; ++i) {
|
||||
ATerm s1, s2;
|
||||
if (!matchEnvBinding(*i, s1, s2))
|
||||
throw badTerm("tuple of strings expected", *i);
|
||||
derivation.env[aterm2String(s1)] = aterm2String(s2);
|
||||
drv.env[aterm2String(s1)] = aterm2String(s2);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
StoreExpr parseStoreExpr(ATerm t)
|
||||
{
|
||||
StoreExpr ne;
|
||||
if (parseClosure(t, ne.closure))
|
||||
ne.type = StoreExpr::neClosure;
|
||||
else if (parseDerivation(t, ne.derivation))
|
||||
ne.type = StoreExpr::neDerivation;
|
||||
else throw badTerm("not a store expression", t);
|
||||
return ne;
|
||||
return drv;
|
||||
}
|
||||
|
||||
|
||||
|
@ -154,27 +99,11 @@ static ATermList unparsePaths(const PathSet & paths)
|
|||
}
|
||||
|
||||
|
||||
static ATerm unparseClosure(const Closure & closure)
|
||||
{
|
||||
ATermList roots = unparsePaths(closure.roots);
|
||||
|
||||
ATermList elems = ATempty;
|
||||
for (ClosureElems::const_iterator i = closure.elems.begin();
|
||||
i != closure.elems.end(); i++)
|
||||
elems = ATinsert(elems,
|
||||
makeClosureElem(
|
||||
toATerm(i->first),
|
||||
unparsePaths(i->second.refs)));
|
||||
|
||||
return makeClosure(roots, elems);
|
||||
}
|
||||
|
||||
|
||||
static ATerm unparseDerivation(const Derivation & derivation)
|
||||
ATerm unparseDerivation(const Derivation & drv)
|
||||
{
|
||||
ATermList outputs = ATempty;
|
||||
for (DerivationOutputs::const_iterator i = derivation.outputs.begin();
|
||||
i != derivation.outputs.end(); i++)
|
||||
for (DerivationOutputs::const_iterator i = drv.outputs.begin();
|
||||
i != drv.outputs.end(); i++)
|
||||
outputs = ATinsert(outputs,
|
||||
makeDerivationOutput(
|
||||
toATerm(i->first),
|
||||
|
@ -183,13 +112,13 @@ static ATerm unparseDerivation(const Derivation & derivation)
|
|||
toATerm(i->second.hash)));
|
||||
|
||||
ATermList args = ATempty;
|
||||
for (Strings::const_iterator i = derivation.args.begin();
|
||||
i != derivation.args.end(); i++)
|
||||
for (Strings::const_iterator i = drv.args.begin();
|
||||
i != drv.args.end(); i++)
|
||||
args = ATinsert(args, toATerm(*i));
|
||||
|
||||
ATermList env = ATempty;
|
||||
for (StringPairs::const_iterator i = derivation.env.begin();
|
||||
i != derivation.env.end(); i++)
|
||||
for (StringPairs::const_iterator i = drv.env.begin();
|
||||
i != drv.env.end(); i++)
|
||||
env = ATinsert(env,
|
||||
makeEnvBinding(
|
||||
toATerm(i->first),
|
||||
|
@ -197,19 +126,10 @@ static ATerm unparseDerivation(const Derivation & derivation)
|
|||
|
||||
return makeDerive(
|
||||
ATreverse(outputs),
|
||||
unparsePaths(derivation.inputs),
|
||||
toATerm(derivation.platform),
|
||||
toATerm(derivation.builder),
|
||||
unparsePaths(drv.inputDrvs),
|
||||
unparsePaths(drv.inputSrcs),
|
||||
toATerm(drv.platform),
|
||||
toATerm(drv.builder),
|
||||
ATreverse(args),
|
||||
ATreverse(env));
|
||||
}
|
||||
|
||||
|
||||
ATerm unparseStoreExpr(const StoreExpr & ne)
|
||||
{
|
||||
if (ne.type == StoreExpr::neClosure)
|
||||
return unparseClosure(ne.closure);
|
||||
else if (ne.type == StoreExpr::neDerivation)
|
||||
return unparseDerivation(ne.derivation);
|
||||
else abort();
|
||||
}
|
||||
|
|
|
@ -14,11 +14,13 @@ struct ClosureElem
|
|||
|
||||
typedef map<Path, ClosureElem> ClosureElems;
|
||||
|
||||
/*
|
||||
struct Closure
|
||||
{
|
||||
PathSet roots;
|
||||
ClosureElems elems;
|
||||
};
|
||||
*/
|
||||
|
||||
|
||||
struct DerivationOutput
|
||||
|
@ -43,20 +45,14 @@ typedef map<string, string> StringPairs;
|
|||
struct Derivation
|
||||
{
|
||||
DerivationOutputs outputs; /* keyed on symbolic IDs */
|
||||
PathSet inputs; /* store expressions, not actual inputs */
|
||||
PathSet inputDrvs; /* inputs that are sub-derivations */
|
||||
PathSet inputSrcs; /* inputs that are sources */
|
||||
string platform;
|
||||
Path builder;
|
||||
Strings args;
|
||||
StringPairs env;
|
||||
};
|
||||
|
||||
struct StoreExpr
|
||||
{
|
||||
enum { neClosure, neDerivation } type;
|
||||
Closure closure;
|
||||
Derivation derivation;
|
||||
};
|
||||
|
||||
|
||||
/* Hash an aterm. */
|
||||
Hash hashTerm(ATerm t);
|
||||
|
@ -65,10 +61,10 @@ Hash hashTerm(ATerm t);
|
|||
Path writeTerm(ATerm t, const string & suffix);
|
||||
|
||||
/* Parse a store expression. */
|
||||
StoreExpr parseStoreExpr(ATerm t);
|
||||
Derivation parseDerivation(ATerm t);
|
||||
|
||||
/* Parse a store expression. */
|
||||
ATerm unparseStoreExpr(const StoreExpr & ne);
|
||||
ATerm unparseDerivation(const Derivation & drv);
|
||||
|
||||
|
||||
#endif /* !__STOREEXPR_H */
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue