2003-06-23 15:27:59 +02:00
|
|
|
#include <iostream>
|
2003-12-22 17:40:46 +01:00
|
|
|
#include <algorithm>
|
2003-06-23 15:27:59 +02:00
|
|
|
|
|
|
|
#include <sys/wait.h>
|
2005-01-19 17:39:47 +01:00
|
|
|
#include <sys/types.h>
|
|
|
|
#include <sys/stat.h>
|
2003-10-15 14:42:39 +02:00
|
|
|
#include <unistd.h>
|
2005-01-19 17:39:47 +01:00
|
|
|
#include <utime.h>
|
2003-06-23 15:27:59 +02:00
|
|
|
|
2003-07-07 09:43:58 +02:00
|
|
|
#include "store.hh"
|
2003-06-16 15:33:38 +02:00
|
|
|
#include "globals.hh"
|
|
|
|
#include "db.hh"
|
2003-06-23 15:27:59 +02:00
|
|
|
#include "archive.hh"
|
2003-08-04 09:09:36 +02:00
|
|
|
#include "pathlocks.hh"
|
2005-01-31 11:27:25 +01:00
|
|
|
#include "gc.hh"
|
2003-06-23 15:27:59 +02:00
|
|
|
|
|
|
|
|
2003-10-15 14:42:39 +02:00
|
|
|
/* Nix database. */
|
|
|
|
static Database nixDB;
|
|
|
|
|
|
|
|
|
|
|
|
/* Database tables. */
|
|
|
|
|
|
|
|
/* dbValidPaths :: Path -> ()
|
|
|
|
|
|
|
|
The existence of a key $p$ indicates that path $p$ is valid (that
|
|
|
|
is, produced by a succesful build). */
|
2004-10-25 16:38:23 +02:00
|
|
|
static TableId dbValidPaths = 0;
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2005-01-19 12:16:11 +01:00
|
|
|
/* dbReferences :: Path -> [Path]
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2005-01-19 12:16:11 +01:00
|
|
|
This table lists the outgoing file system references for each
|
|
|
|
output path that has been built by a Nix derivation. These are
|
|
|
|
found by scanning the path for the hash components of input
|
|
|
|
paths. */
|
|
|
|
static TableId dbReferences = 0;
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2005-01-19 12:16:11 +01:00
|
|
|
/* dbReferers :: Path -> [Path]
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2005-01-19 12:16:11 +01:00
|
|
|
This table is just the reverse mapping of dbReferences. */
|
|
|
|
static TableId dbReferers = 0;
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2004-12-20 14:43:32 +01:00
|
|
|
/* dbSubstitutes :: Path -> [[Path]]
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2004-06-20 21:17:54 +02:00
|
|
|
Each pair $(p, subs)$ tells Nix that it can use any of the
|
2004-12-20 14:43:32 +01:00
|
|
|
substitutes in $subs$ to build path $p$. Each substitute defines a
|
|
|
|
command-line invocation of a program (i.e., the first list element
|
|
|
|
is the full path to the program, the remaining elements are
|
|
|
|
arguments).
|
2003-10-15 14:42:39 +02:00
|
|
|
|
|
|
|
The main purpose of this is for distributed caching of derivates.
|
|
|
|
One system can compute a derivate and put it on a website (as a Nix
|
|
|
|
archive), for instance, and then another system can register a
|
|
|
|
substitute for that derivate. The substitute in this case might be
|
2005-01-20 17:01:07 +01:00
|
|
|
a Nix derivation that fetches the Nix archive.
|
2003-10-15 14:42:39 +02:00
|
|
|
*/
|
2004-10-25 16:38:23 +02:00
|
|
|
static TableId dbSubstitutes = 0;
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2005-02-07 14:40:40 +01:00
|
|
|
/* dbDerivers :: Path -> [Path]
|
|
|
|
|
|
|
|
This table lists the derivation used to build a path. There can
|
|
|
|
only be multiple such paths for fixed-output derivations (i.e.,
|
|
|
|
derivations specifying an expected hash). */
|
|
|
|
static TableId dbDerivers = 0;
|
|
|
|
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2004-06-20 21:17:54 +02:00
|
|
|
bool Substitute::operator == (const Substitute & sub)
|
|
|
|
{
|
2004-12-20 14:43:32 +01:00
|
|
|
return program == sub.program
|
2004-06-20 21:17:54 +02:00
|
|
|
&& args == sub.args;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-02-09 10:50:29 +01:00
|
|
|
static void upgradeStore();
|
|
|
|
|
|
|
|
|
2003-10-15 14:42:39 +02:00
|
|
|
void openDB()
|
|
|
|
{
|
2004-10-25 16:38:23 +02:00
|
|
|
if (readOnlyMode) return;
|
2005-02-09 10:50:29 +01:00
|
|
|
|
2004-10-25 16:38:23 +02:00
|
|
|
try {
|
|
|
|
nixDB.open(nixDBPath);
|
|
|
|
} catch (DbNoPermission & e) {
|
|
|
|
printMsg(lvlTalkative, "cannot access Nix database; continuing anyway");
|
|
|
|
readOnlyMode = true;
|
|
|
|
return;
|
|
|
|
}
|
2003-10-15 14:42:39 +02:00
|
|
|
dbValidPaths = nixDB.openTable("validpaths");
|
2005-01-19 12:16:11 +01:00
|
|
|
dbReferences = nixDB.openTable("references");
|
|
|
|
dbReferers = nixDB.openTable("referers");
|
2003-10-15 14:42:39 +02:00
|
|
|
dbSubstitutes = nixDB.openTable("substitutes");
|
2005-02-07 14:40:40 +01:00
|
|
|
dbDerivers = nixDB.openTable("derivers");
|
2005-02-09 10:50:29 +01:00
|
|
|
|
|
|
|
int curSchema = 0;
|
|
|
|
Path schemaFN = nixDBPath + "/schema";
|
|
|
|
if (pathExists(schemaFN)) {
|
|
|
|
string s = readFile(schemaFN);
|
|
|
|
if (!string2Int(s, curSchema))
|
|
|
|
throw Error(format("`%1%' is corrupt") % schemaFN);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (curSchema > nixSchemaVersion)
|
|
|
|
throw Error(format("current Nix store schema is version %1%, but I only support %2%")
|
|
|
|
% curSchema % nixSchemaVersion);
|
|
|
|
|
|
|
|
if (curSchema < nixSchemaVersion) {
|
|
|
|
upgradeStore();
|
|
|
|
writeFile(schemaFN, (format("%1%") % nixSchemaVersion).str());
|
|
|
|
}
|
2003-10-15 14:42:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void initDB()
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void createStoreTransaction(Transaction & txn)
|
|
|
|
{
|
|
|
|
Transaction txn2(nixDB);
|
|
|
|
txn2.moveTo(txn);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/* Path copying. */
|
|
|
|
|
2003-06-23 15:27:59 +02:00
|
|
|
struct CopySink : DumpSink
|
|
|
|
{
|
|
|
|
int fd;
|
|
|
|
virtual void operator () (const unsigned char * data, unsigned int len)
|
|
|
|
{
|
2003-07-20 23:11:43 +02:00
|
|
|
writeFull(fd, data, len);
|
2003-06-23 15:27:59 +02:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
struct CopySource : RestoreSource
|
|
|
|
{
|
|
|
|
int fd;
|
2003-07-20 23:11:43 +02:00
|
|
|
virtual void operator () (unsigned char * data, unsigned int len)
|
2003-06-23 15:27:59 +02:00
|
|
|
{
|
2003-07-20 23:11:43 +02:00
|
|
|
readFull(fd, data, len);
|
2003-06-23 15:27:59 +02:00
|
|
|
}
|
|
|
|
};
|
2003-06-16 15:33:38 +02:00
|
|
|
|
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
void copyPath(const Path & src, const Path & dst)
|
2003-06-16 15:33:38 +02:00
|
|
|
{
|
2003-07-31 18:05:35 +02:00
|
|
|
debug(format("copying `%1%' to `%2%'") % src % dst);
|
|
|
|
|
2003-06-23 15:27:59 +02:00
|
|
|
/* Unfortunately C++ doesn't support coprocedures, so we have no
|
|
|
|
nice way to chain CopySink and CopySource together. Instead we
|
|
|
|
fork off a child to run the sink. (Fork-less platforms should
|
|
|
|
use a thread). */
|
|
|
|
|
|
|
|
/* Create a pipe. */
|
2004-06-22 11:51:44 +02:00
|
|
|
Pipe pipe;
|
|
|
|
pipe.create();
|
2003-06-23 15:27:59 +02:00
|
|
|
|
|
|
|
/* Fork. */
|
2004-06-22 11:51:44 +02:00
|
|
|
Pid pid;
|
|
|
|
pid = fork();
|
|
|
|
switch (pid) {
|
2003-06-23 15:27:59 +02:00
|
|
|
|
|
|
|
case -1:
|
|
|
|
throw SysError("unable to fork");
|
|
|
|
|
|
|
|
case 0: /* child */
|
|
|
|
try {
|
2004-06-22 11:51:44 +02:00
|
|
|
pipe.writeSide.close();
|
2003-06-23 15:27:59 +02:00
|
|
|
CopySource source;
|
2004-06-22 11:51:44 +02:00
|
|
|
source.fd = pipe.readSide;
|
2003-06-23 15:27:59 +02:00
|
|
|
restorePath(dst, source);
|
|
|
|
_exit(0);
|
2004-01-15 21:23:55 +01:00
|
|
|
} catch (exception & e) {
|
2003-06-23 15:27:59 +02:00
|
|
|
cerr << "error: " << e.what() << endl;
|
|
|
|
}
|
|
|
|
_exit(1);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Parent. */
|
|
|
|
|
2004-06-22 11:51:44 +02:00
|
|
|
pipe.readSide.close();
|
|
|
|
|
2003-06-23 15:27:59 +02:00
|
|
|
CopySink sink;
|
2004-06-22 11:51:44 +02:00
|
|
|
sink.fd = pipe.writeSide;
|
2004-09-09 23:12:53 +02:00
|
|
|
{
|
|
|
|
SwitchToOriginalUser sw;
|
|
|
|
dumpPath(src, sink);
|
|
|
|
}
|
2003-06-23 15:27:59 +02:00
|
|
|
|
|
|
|
/* Wait for the child to finish. */
|
2004-06-22 11:51:44 +02:00
|
|
|
int status = pid.wait(true);
|
2004-06-22 13:03:41 +02:00
|
|
|
if (!statusOk(status))
|
2004-06-22 10:50:25 +02:00
|
|
|
throw Error(format("cannot copy `%1% to `%2%': child %3%")
|
|
|
|
% src % dst % statusToString(status));
|
2003-06-16 15:33:38 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-02-07 15:32:44 +01:00
|
|
|
bool isInStore(const Path & path)
|
2004-02-14 22:44:18 +01:00
|
|
|
{
|
|
|
|
return path[0] == '/'
|
2004-04-14 10:08:55 +02:00
|
|
|
&& path.compare(0, nixStore.size(), nixStore) == 0
|
|
|
|
&& path.size() >= nixStore.size() + 2
|
2005-02-07 15:32:44 +01:00
|
|
|
&& path[nixStore.size()] == '/';
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool isStorePath(const Path & path)
|
|
|
|
{
|
|
|
|
return isInStore(path)
|
2004-04-14 10:08:55 +02:00
|
|
|
&& path.find('/', nixStore.size() + 1) == Path::npos;
|
2004-02-14 22:44:18 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2004-04-14 10:08:55 +02:00
|
|
|
void assertStorePath(const Path & path)
|
2004-02-14 22:44:18 +01:00
|
|
|
{
|
2005-02-01 13:36:25 +01:00
|
|
|
if (!isStorePath(path))
|
2004-02-14 22:44:18 +01:00
|
|
|
throw Error(format("path `%1%' is not in the Nix store") % path);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-02-07 15:32:44 +01:00
|
|
|
Path toStorePath(const Path & path)
|
|
|
|
{
|
|
|
|
if (!isInStore(path))
|
|
|
|
throw Error(format("path `%1%' is not in the Nix store") % path);
|
|
|
|
unsigned int slash = path.find('/', nixStore.size() + 1);
|
|
|
|
if (slash == Path::npos)
|
|
|
|
return path;
|
|
|
|
else
|
|
|
|
return Path(path, 0, slash);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-19 17:39:47 +01:00
|
|
|
void canonicalisePathMetaData(const Path & path)
|
|
|
|
{
|
|
|
|
checkInterrupt();
|
|
|
|
|
|
|
|
struct stat st;
|
|
|
|
if (lstat(path.c_str(), &st))
|
|
|
|
throw SysError(format("getting attributes of path `%1%'") % path);
|
|
|
|
|
|
|
|
if (!S_ISLNK(st.st_mode)) {
|
|
|
|
|
|
|
|
/* Mask out all type related bits. */
|
|
|
|
mode_t mode = st.st_mode & ~S_IFMT;
|
|
|
|
|
|
|
|
if (mode != 0444 && mode != 0555) {
|
|
|
|
mode = (st.st_mode & S_IFMT)
|
|
|
|
| 0444
|
|
|
|
| (st.st_mode & S_IXUSR ? 0111 : 0);
|
|
|
|
if (chmod(path.c_str(), mode) == -1)
|
|
|
|
throw SysError(format("changing mode of `%1%' to %2$o") % path % mode);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (st.st_uid != getuid() || st.st_gid != getgid()) {
|
|
|
|
if (chown(path.c_str(), getuid(), getgid()) == -1)
|
|
|
|
throw SysError(format("changing owner/group of `%1%' to %2%/%3%")
|
|
|
|
% path % getuid() % getgid());
|
|
|
|
}
|
|
|
|
|
|
|
|
if (st.st_mtime != 0) {
|
|
|
|
struct utimbuf utimbuf;
|
|
|
|
utimbuf.actime = st.st_atime;
|
|
|
|
utimbuf.modtime = 0;
|
|
|
|
if (utime(path.c_str(), &utimbuf) == -1)
|
|
|
|
throw SysError(format("changing modification time of `%1%'") % path);
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
if (S_ISDIR(st.st_mode)) {
|
|
|
|
Strings names = readDirectory(path);
|
|
|
|
for (Strings::iterator i = names.begin(); i != names.end(); ++i)
|
|
|
|
canonicalisePathMetaData(path + "/" + *i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-25 22:28:25 +01:00
|
|
|
static bool isValidPathTxn(const Transaction & txn, const Path & path)
|
2003-12-05 12:05:19 +01:00
|
|
|
{
|
|
|
|
string s;
|
|
|
|
return nixDB.queryString(txn, dbValidPaths, path, s);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
bool isValidPath(const Path & path)
|
|
|
|
{
|
2005-01-25 22:28:25 +01:00
|
|
|
return isValidPathTxn(noTxn, path);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static Substitutes readSubstitutes(const Transaction & txn,
|
|
|
|
const Path & srcPath);
|
|
|
|
|
|
|
|
|
|
|
|
static bool isRealisablePath(const Transaction & txn, const Path & path)
|
|
|
|
{
|
|
|
|
return isValidPathTxn(txn, path)
|
|
|
|
|| readSubstitutes(txn, path).size() > 0;
|
2003-12-05 12:05:19 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-27 17:18:39 +01:00
|
|
|
static PathSet getReferers(const Transaction & txn, const Path & storePath)
|
|
|
|
{
|
|
|
|
Paths referers;
|
|
|
|
nixDB.queryStrings(txn, dbReferers, storePath, referers);
|
|
|
|
return PathSet(referers.begin(), referers.end());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-19 12:16:11 +01:00
|
|
|
void setReferences(const Transaction & txn, const Path & storePath,
|
|
|
|
const PathSet & references)
|
2003-10-10 16:46:28 +02:00
|
|
|
{
|
2005-01-25 22:28:25 +01:00
|
|
|
if (!isRealisablePath(txn, storePath))
|
|
|
|
throw Error(
|
|
|
|
format("cannot set references for path `%1%' which is invalid and has no substitutes")
|
|
|
|
% storePath);
|
2005-01-27 17:18:39 +01:00
|
|
|
|
|
|
|
Paths oldReferences;
|
2005-01-27 18:48:14 +01:00
|
|
|
nixDB.queryStrings(txn, dbReferences, storePath, oldReferences);
|
2005-01-25 22:28:25 +01:00
|
|
|
|
2005-01-19 12:16:11 +01:00
|
|
|
nixDB.setStrings(txn, dbReferences, storePath,
|
|
|
|
Paths(references.begin(), references.end()));
|
2003-12-05 12:05:19 +01:00
|
|
|
|
2005-01-19 12:16:11 +01:00
|
|
|
/* Update the referers mappings of all referenced paths. */
|
|
|
|
for (PathSet::const_iterator i = references.begin();
|
|
|
|
i != references.end(); ++i)
|
2003-10-10 16:46:28 +02:00
|
|
|
{
|
2005-01-27 17:18:39 +01:00
|
|
|
PathSet referers = getReferers(txn, *i);
|
|
|
|
referers.insert(storePath);
|
2005-01-19 12:16:11 +01:00
|
|
|
nixDB.setStrings(txn, dbReferers, *i,
|
2005-01-27 17:18:39 +01:00
|
|
|
Paths(referers.begin(), referers.end()));
|
2004-06-28 12:42:57 +02:00
|
|
|
}
|
2005-01-27 17:18:39 +01:00
|
|
|
|
|
|
|
/* Remove referer mappings from paths that are no longer
|
|
|
|
references. */
|
|
|
|
for (Paths::iterator i = oldReferences.begin();
|
|
|
|
i != oldReferences.end(); ++i)
|
|
|
|
if (references.find(*i) == references.end()) {
|
|
|
|
PathSet referers = getReferers(txn, *i);
|
|
|
|
referers.erase(storePath);
|
|
|
|
nixDB.setStrings(txn, dbReferers, *i,
|
|
|
|
Paths(referers.begin(), referers.end()));
|
|
|
|
}
|
2003-10-15 14:42:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-02-08 14:23:55 +01:00
|
|
|
void queryReferences(const Transaction & txn,
|
|
|
|
const Path & storePath, PathSet & references)
|
2003-10-10 17:25:21 +02:00
|
|
|
{
|
2005-01-19 12:16:11 +01:00
|
|
|
Paths references2;
|
2005-02-08 14:23:55 +01:00
|
|
|
if (!isRealisablePath(txn, storePath))
|
2005-01-25 22:28:25 +01:00
|
|
|
throw Error(format("path `%1%' is not valid") % storePath);
|
2005-02-08 14:23:55 +01:00
|
|
|
nixDB.queryStrings(txn, dbReferences, storePath, references2);
|
2005-01-19 12:16:11 +01:00
|
|
|
references.insert(references2.begin(), references2.end());
|
2003-10-10 17:25:21 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-02-08 14:23:55 +01:00
|
|
|
void queryReferers(const Transaction & txn,
|
|
|
|
const Path & storePath, PathSet & referers)
|
2005-01-19 17:59:56 +01:00
|
|
|
{
|
2005-02-08 14:23:55 +01:00
|
|
|
if (!isRealisablePath(txn, storePath))
|
2005-01-25 22:28:25 +01:00
|
|
|
throw Error(format("path `%1%' is not valid") % storePath);
|
2005-02-08 14:23:55 +01:00
|
|
|
PathSet referers2 = getReferers(txn, storePath);
|
2005-01-19 17:59:56 +01:00
|
|
|
referers.insert(referers2.begin(), referers2.end());
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-02-07 14:40:40 +01:00
|
|
|
void setDeriver(const Transaction & txn, const Path & storePath,
|
|
|
|
const Path & deriver)
|
|
|
|
{
|
|
|
|
assertStorePath(storePath);
|
|
|
|
if (deriver == "") return;
|
|
|
|
assertStorePath(deriver);
|
|
|
|
if (!isRealisablePath(txn, storePath))
|
|
|
|
throw Error(format("path `%1%' is not valid") % storePath);
|
|
|
|
nixDB.setString(txn, dbDerivers, storePath, deriver);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
Path queryDeriver(const Transaction & txn, const Path & storePath)
|
|
|
|
{
|
2005-02-08 14:23:55 +01:00
|
|
|
if (!isRealisablePath(txn, storePath))
|
2005-02-07 14:40:40 +01:00
|
|
|
throw Error(format("path `%1%' is not valid") % storePath);
|
|
|
|
Path deriver;
|
|
|
|
if (nixDB.queryString(txn, dbDerivers, storePath, deriver))
|
|
|
|
return deriver;
|
|
|
|
else
|
|
|
|
return "";
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-02-09 13:57:13 +01:00
|
|
|
const int substituteVersion = 2;
|
|
|
|
|
|
|
|
|
2004-06-20 21:17:54 +02:00
|
|
|
static Substitutes readSubstitutes(const Transaction & txn,
|
|
|
|
const Path & srcPath)
|
2003-07-10 17:11:48 +02:00
|
|
|
{
|
2004-06-20 21:17:54 +02:00
|
|
|
Strings ss;
|
|
|
|
nixDB.queryStrings(txn, dbSubstitutes, srcPath, ss);
|
|
|
|
|
|
|
|
Substitutes subs;
|
2004-02-14 22:44:18 +01:00
|
|
|
|
2004-06-20 21:17:54 +02:00
|
|
|
for (Strings::iterator i = ss.begin(); i != ss.end(); ++i) {
|
|
|
|
if (i->size() < 4 || (*i)[3] != 0) {
|
|
|
|
/* Old-style substitute. !!! remove this code
|
|
|
|
eventually? */
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
Strings ss2 = unpackStrings(*i);
|
2005-02-09 13:57:13 +01:00
|
|
|
if (ss2.size() == 0) continue;
|
|
|
|
int version;
|
|
|
|
if (!string2Int(ss2.front(), version)) continue;
|
|
|
|
if (version != substituteVersion) continue;
|
|
|
|
if (ss2.size() != 4) throw Error("malformed substitute");
|
2004-06-20 21:17:54 +02:00
|
|
|
Strings::iterator j = ss2.begin();
|
2005-02-09 13:57:13 +01:00
|
|
|
j++;
|
2004-06-20 21:17:54 +02:00
|
|
|
Substitute sub;
|
2005-02-09 13:57:13 +01:00
|
|
|
sub.deriver = *j++;
|
2004-06-20 21:17:54 +02:00
|
|
|
sub.program = *j++;
|
|
|
|
sub.args = unpackStrings(*j++);
|
|
|
|
subs.push_back(sub);
|
|
|
|
}
|
|
|
|
|
|
|
|
return subs;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static void writeSubstitutes(const Transaction & txn,
|
|
|
|
const Path & srcPath, const Substitutes & subs)
|
|
|
|
{
|
|
|
|
Strings ss;
|
|
|
|
|
|
|
|
for (Substitutes::const_iterator i = subs.begin();
|
|
|
|
i != subs.end(); ++i)
|
|
|
|
{
|
|
|
|
Strings ss2;
|
2005-02-09 13:57:13 +01:00
|
|
|
ss2.push_back((format("%1%") % substituteVersion).str());
|
|
|
|
ss2.push_back(i->deriver);
|
2004-06-20 21:17:54 +02:00
|
|
|
ss2.push_back(i->program);
|
|
|
|
ss2.push_back(packStrings(i->args));
|
|
|
|
ss.push_back(packStrings(ss2));
|
|
|
|
}
|
2003-12-05 12:05:19 +01:00
|
|
|
|
2004-06-28 12:42:57 +02:00
|
|
|
nixDB.setStrings(txn, dbSubstitutes, srcPath, ss);
|
2004-06-20 21:17:54 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-25 21:27:40 +01:00
|
|
|
void registerSubstitute(const Transaction & txn,
|
|
|
|
const Path & srcPath, const Substitute & sub)
|
2004-06-20 21:17:54 +02:00
|
|
|
{
|
2005-01-25 21:27:40 +01:00
|
|
|
assertStorePath(srcPath);
|
2004-06-20 21:17:54 +02:00
|
|
|
|
2005-01-25 21:27:40 +01:00
|
|
|
Substitutes subs = readSubstitutes(txn, srcPath);
|
2004-08-31 18:13:10 +02:00
|
|
|
|
2005-01-25 21:27:40 +01:00
|
|
|
/* New substitutes take precedence over old ones. If the
|
|
|
|
substitute is already present, it's moved to the front. */
|
|
|
|
remove(subs.begin(), subs.end(), sub);
|
|
|
|
subs.push_front(sub);
|
2004-08-31 18:13:10 +02:00
|
|
|
|
2005-01-25 21:27:40 +01:00
|
|
|
writeSubstitutes(txn, srcPath, subs);
|
2003-07-10 17:11:48 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-27 16:21:29 +01:00
|
|
|
Substitutes querySubstitutes(const Transaction & txn, const Path & srcPath)
|
2003-10-16 18:29:57 +02:00
|
|
|
{
|
2005-01-27 16:21:29 +01:00
|
|
|
return readSubstitutes(txn, srcPath);
|
2003-10-16 18:29:57 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2004-12-20 14:43:32 +01:00
|
|
|
void clearSubstitutes()
|
|
|
|
{
|
2004-12-20 15:16:55 +01:00
|
|
|
Transaction txn(nixDB);
|
2004-12-20 14:43:32 +01:00
|
|
|
|
2004-12-20 15:16:55 +01:00
|
|
|
/* Iterate over all paths for which there are substitutes. */
|
|
|
|
Paths subKeys;
|
|
|
|
nixDB.enumTable(txn, dbSubstitutes, subKeys);
|
|
|
|
for (Paths::iterator i = subKeys.begin(); i != subKeys.end(); ++i) {
|
|
|
|
/* Delete all substitutes for path *i. */
|
|
|
|
nixDB.delPair(txn, dbSubstitutes, *i);
|
|
|
|
}
|
|
|
|
|
|
|
|
txn.commit();
|
2004-12-20 14:43:32 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-02-09 10:50:29 +01:00
|
|
|
static void setHash(const Transaction & txn, const Path & storePath,
|
|
|
|
const Hash & hash)
|
|
|
|
{
|
|
|
|
assert(hash.type == htSHA256);
|
|
|
|
nixDB.setString(txn, dbValidPaths, storePath, "sha256:" + printHash(hash));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static Hash queryHash(const Transaction & txn, const Path & storePath)
|
|
|
|
{
|
|
|
|
string s;
|
|
|
|
nixDB.queryString(txn, dbValidPaths, storePath, s);
|
|
|
|
unsigned int colon = s.find(':');
|
|
|
|
if (colon == string::npos)
|
|
|
|
throw Error(format("corrupt hash `%1%' in valid-path entry for `%2%'")
|
|
|
|
% s % storePath);
|
|
|
|
HashType ht = parseHashType(string(s, 0, colon));
|
|
|
|
if (ht == htUnknown)
|
|
|
|
throw Error(format("unknown hash type `%1%' in valid-path entry for `%2%'")
|
2005-02-14 10:53:11 +01:00
|
|
|
% string(s, 0, colon) % storePath);
|
2005-02-09 10:50:29 +01:00
|
|
|
return parseHash(ht, string(s, colon + 1));
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-03-02 16:57:06 +01:00
|
|
|
Hash queryPathHash(const Path & path)
|
|
|
|
{
|
|
|
|
if (!isValidPath(path))
|
|
|
|
throw Error(format("path `%1%' is not valid") % path);
|
|
|
|
return queryHash(noTxn, path);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-19 17:39:47 +01:00
|
|
|
void registerValidPath(const Transaction & txn,
|
2005-02-07 14:40:40 +01:00
|
|
|
const Path & _path, const Hash & hash, const PathSet & references,
|
|
|
|
const Path & deriver)
|
2003-07-07 11:25:26 +02:00
|
|
|
{
|
2003-10-08 17:06:59 +02:00
|
|
|
Path path(canonPath(_path));
|
2004-02-14 22:44:18 +01:00
|
|
|
assertStorePath(path);
|
2005-01-19 17:39:47 +01:00
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
debug(format("registering path `%1%'") % path);
|
2005-02-09 10:50:29 +01:00
|
|
|
setHash(txn, path, hash);
|
2005-01-19 17:39:47 +01:00
|
|
|
|
2005-01-25 22:28:25 +01:00
|
|
|
setReferences(txn, path, references);
|
|
|
|
|
2005-01-19 17:39:47 +01:00
|
|
|
/* Check that all referenced paths are also valid. */
|
2005-01-25 22:28:25 +01:00
|
|
|
for (PathSet::iterator i = references.begin(); i != references.end(); ++i)
|
|
|
|
if (!isValidPathTxn(txn, *i))
|
2005-01-19 17:39:47 +01:00
|
|
|
throw Error(format("cannot register path `%1%' as valid, since its reference `%2%' is invalid")
|
|
|
|
% path % *i);
|
2005-02-07 14:40:40 +01:00
|
|
|
|
|
|
|
setDeriver(txn, path, deriver);
|
2003-10-08 17:06:59 +02:00
|
|
|
}
|
2003-07-07 11:25:26 +02:00
|
|
|
|
2003-07-31 18:05:35 +02:00
|
|
|
|
2005-01-31 15:00:43 +01:00
|
|
|
/* Invalidate a path. The caller is responsible for checking that
|
|
|
|
there are no referers. */
|
2003-11-22 19:45:56 +01:00
|
|
|
static void invalidatePath(const Path & path, Transaction & txn)
|
2003-07-08 11:54:47 +02:00
|
|
|
{
|
2003-07-31 18:05:35 +02:00
|
|
|
debug(format("unregistering path `%1%'") % path);
|
2003-07-08 11:54:47 +02:00
|
|
|
|
2005-01-27 16:21:29 +01:00
|
|
|
/* Clear the `references' entry for this path, as well as the
|
2005-02-07 14:40:40 +01:00
|
|
|
inverse `referers' entries, and the `derivers' entry; but only
|
|
|
|
if there are no substitutes for this path. This maintains the
|
|
|
|
cleanup invariant. */
|
|
|
|
if (querySubstitutes(txn, path).size() == 0) {
|
2005-01-27 16:21:29 +01:00
|
|
|
setReferences(txn, path, PathSet());
|
2005-02-07 14:40:40 +01:00
|
|
|
nixDB.delPair(txn, dbDerivers, path);
|
|
|
|
}
|
2005-01-27 17:18:39 +01:00
|
|
|
|
|
|
|
nixDB.delPair(txn, dbValidPaths, path);
|
2003-07-08 11:54:47 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-14 14:51:38 +01:00
|
|
|
Path makeStorePath(const string & type,
|
* Removed the `id' attribute hack.
* Formalise the notion of fixed-output derivations, i.e., derivations
for which a cryptographic hash of the output is known in advance.
Changes to such derivations should not propagate upwards through the
dependency graph. Previously this was done by specifying the hash
component of the output path through the `id' attribute, but this is
insecure since you can lie about it (i.e., you can specify any hash
and then produce a completely different output). Now the
responsibility for checking the output is moved from the builder to
Nix itself.
A fixed-output derivation can be created by specifying the
`outputHash' and `outputHashAlgo' attributes, the latter taking
values `md5', `sha1', and `sha256', and the former specifying the
actual hash in hexadecimal or in base-32 (auto-detected by looking
at the length of the attribute value). MD5 is included for
compatibility but should be considered deprecated.
* Removed the `drvPath' pseudo-attribute in derivation results. It's
no longer necessary.
* Cleaned up the support for multiple output paths in derivation store
expressions. Each output now has a unique identifier (e.g., `out',
`devel', `docs'). Previously there was no way to tell output paths
apart at the store expression level.
* `nix-hash' now has a flag `--base32' to specify that the hash should
be printed in base-32 notation.
* `fetchurl' accepts parameters `sha256' and `sha1' in addition to
`md5'.
* `nix-prefetch-url' now prints out a SHA-1 hash in base-32. (TODO: a
flag to specify the hash.)
2005-01-17 17:55:19 +01:00
|
|
|
const Hash & hash, const string & suffix)
|
2005-01-14 14:51:38 +01:00
|
|
|
{
|
|
|
|
/* e.g., "source:sha256:1abc...:/nix/store:foo.tar.gz" */
|
2005-01-14 17:04:03 +01:00
|
|
|
string s = type + ":sha256:" + printHash(hash) + ":"
|
2005-01-14 14:51:38 +01:00
|
|
|
+ nixStore + ":" + suffix;
|
|
|
|
|
2005-01-14 17:04:03 +01:00
|
|
|
return nixStore + "/"
|
* Removed the `id' attribute hack.
* Formalise the notion of fixed-output derivations, i.e., derivations
for which a cryptographic hash of the output is known in advance.
Changes to such derivations should not propagate upwards through the
dependency graph. Previously this was done by specifying the hash
component of the output path through the `id' attribute, but this is
insecure since you can lie about it (i.e., you can specify any hash
and then produce a completely different output). Now the
responsibility for checking the output is moved from the builder to
Nix itself.
A fixed-output derivation can be created by specifying the
`outputHash' and `outputHashAlgo' attributes, the latter taking
values `md5', `sha1', and `sha256', and the former specifying the
actual hash in hexadecimal or in base-32 (auto-detected by looking
at the length of the attribute value). MD5 is included for
compatibility but should be considered deprecated.
* Removed the `drvPath' pseudo-attribute in derivation results. It's
no longer necessary.
* Cleaned up the support for multiple output paths in derivation store
expressions. Each output now has a unique identifier (e.g., `out',
`devel', `docs'). Previously there was no way to tell output paths
apart at the store expression level.
* `nix-hash' now has a flag `--base32' to specify that the hash should
be printed in base-32 notation.
* `fetchurl' accepts parameters `sha256' and `sha1' in addition to
`md5'.
* `nix-prefetch-url' now prints out a SHA-1 hash in base-32. (TODO: a
flag to specify the hash.)
2005-01-17 17:55:19 +01:00
|
|
|
+ printHash32(compressHash(hashString(htSHA256, s), 20))
|
2005-01-14 17:04:03 +01:00
|
|
|
+ "-" + suffix;
|
2005-01-14 14:51:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
Path addToStore(const Path & _srcPath)
|
2003-07-07 11:25:26 +02:00
|
|
|
{
|
2003-10-08 17:06:59 +02:00
|
|
|
Path srcPath(absPath(_srcPath));
|
|
|
|
debug(format("adding `%1%' to the store") % srcPath);
|
2003-07-07 11:25:26 +02:00
|
|
|
|
2005-01-14 14:51:38 +01:00
|
|
|
Hash h(htSHA256);
|
2004-09-09 23:12:53 +02:00
|
|
|
{
|
|
|
|
SwitchToOriginalUser sw;
|
* Removed the `id' attribute hack.
* Formalise the notion of fixed-output derivations, i.e., derivations
for which a cryptographic hash of the output is known in advance.
Changes to such derivations should not propagate upwards through the
dependency graph. Previously this was done by specifying the hash
component of the output path through the `id' attribute, but this is
insecure since you can lie about it (i.e., you can specify any hash
and then produce a completely different output). Now the
responsibility for checking the output is moved from the builder to
Nix itself.
A fixed-output derivation can be created by specifying the
`outputHash' and `outputHashAlgo' attributes, the latter taking
values `md5', `sha1', and `sha256', and the former specifying the
actual hash in hexadecimal or in base-32 (auto-detected by looking
at the length of the attribute value). MD5 is included for
compatibility but should be considered deprecated.
* Removed the `drvPath' pseudo-attribute in derivation results. It's
no longer necessary.
* Cleaned up the support for multiple output paths in derivation store
expressions. Each output now has a unique identifier (e.g., `out',
`devel', `docs'). Previously there was no way to tell output paths
apart at the store expression level.
* `nix-hash' now has a flag `--base32' to specify that the hash should
be printed in base-32 notation.
* `fetchurl' accepts parameters `sha256' and `sha1' in addition to
`md5'.
* `nix-prefetch-url' now prints out a SHA-1 hash in base-32. (TODO: a
flag to specify the hash.)
2005-01-17 17:55:19 +01:00
|
|
|
h = hashPath(htSHA256, srcPath);
|
2004-09-09 23:12:53 +02:00
|
|
|
}
|
2003-07-10 17:11:48 +02:00
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
string baseName = baseNameOf(srcPath);
|
2005-01-14 14:51:38 +01:00
|
|
|
Path dstPath = makeStorePath("source", h, baseName);
|
2003-07-10 17:11:48 +02:00
|
|
|
|
2005-01-31 11:27:25 +01:00
|
|
|
addTempRoot(dstPath);
|
|
|
|
|
2004-10-25 16:38:23 +02:00
|
|
|
if (!readOnlyMode && !isValidPath(dstPath)) {
|
2003-07-10 17:11:48 +02:00
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
/* The first check above is an optimisation to prevent
|
|
|
|
unnecessary lock acquisition. */
|
2003-07-22 17:15:15 +02:00
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
PathSet lockPaths;
|
|
|
|
lockPaths.insert(dstPath);
|
|
|
|
PathLocks outputLock(lockPaths);
|
2003-07-22 17:15:15 +02:00
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
if (!isValidPath(dstPath)) {
|
2004-06-21 09:46:02 +02:00
|
|
|
|
|
|
|
if (pathExists(dstPath)) deletePath(dstPath);
|
2004-10-25 16:38:23 +02:00
|
|
|
|
|
|
|
/* !!! race: srcPath might change between hashPath() and
|
|
|
|
here! */
|
2004-06-21 09:46:02 +02:00
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
copyPath(srcPath, dstPath);
|
2003-08-01 11:01:51 +02:00
|
|
|
|
* Removed the `id' attribute hack.
* Formalise the notion of fixed-output derivations, i.e., derivations
for which a cryptographic hash of the output is known in advance.
Changes to such derivations should not propagate upwards through the
dependency graph. Previously this was done by specifying the hash
component of the output path through the `id' attribute, but this is
insecure since you can lie about it (i.e., you can specify any hash
and then produce a completely different output). Now the
responsibility for checking the output is moved from the builder to
Nix itself.
A fixed-output derivation can be created by specifying the
`outputHash' and `outputHashAlgo' attributes, the latter taking
values `md5', `sha1', and `sha256', and the former specifying the
actual hash in hexadecimal or in base-32 (auto-detected by looking
at the length of the attribute value). MD5 is included for
compatibility but should be considered deprecated.
* Removed the `drvPath' pseudo-attribute in derivation results. It's
no longer necessary.
* Cleaned up the support for multiple output paths in derivation store
expressions. Each output now has a unique identifier (e.g., `out',
`devel', `docs'). Previously there was no way to tell output paths
apart at the store expression level.
* `nix-hash' now has a flag `--base32' to specify that the hash should
be printed in base-32 notation.
* `fetchurl' accepts parameters `sha256' and `sha1' in addition to
`md5'.
* `nix-prefetch-url' now prints out a SHA-1 hash in base-32. (TODO: a
flag to specify the hash.)
2005-01-17 17:55:19 +01:00
|
|
|
Hash h2 = hashPath(htSHA256, dstPath);
|
2005-01-14 14:51:38 +01:00
|
|
|
if (h != h2)
|
|
|
|
throw Error(format("contents of `%1%' changed while copying it to `%2%' (%3% -> %4%)")
|
2005-01-14 17:04:03 +01:00
|
|
|
% srcPath % dstPath % printHash(h) % printHash(h2));
|
2005-01-14 14:51:38 +01:00
|
|
|
|
2005-01-19 17:39:47 +01:00
|
|
|
canonicalisePathMetaData(dstPath);
|
2004-09-09 23:19:20 +02:00
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
Transaction txn(nixDB);
|
2005-02-07 14:40:40 +01:00
|
|
|
registerValidPath(txn, dstPath, h, PathSet(), "");
|
2003-10-08 17:06:59 +02:00
|
|
|
txn.commit();
|
2003-08-01 11:01:51 +02:00
|
|
|
}
|
2003-11-22 19:45:56 +01:00
|
|
|
|
|
|
|
outputLock.setDeletion(true);
|
2003-06-16 15:33:38 +02:00
|
|
|
}
|
2003-08-04 09:09:36 +02:00
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
return dstPath;
|
2003-06-16 15:33:38 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2005-01-25 22:28:25 +01:00
|
|
|
Path addTextToStore(const string & suffix, const string & s,
|
|
|
|
const PathSet & references)
|
2003-10-15 14:42:39 +02:00
|
|
|
{
|
* Removed the `id' attribute hack.
* Formalise the notion of fixed-output derivations, i.e., derivations
for which a cryptographic hash of the output is known in advance.
Changes to such derivations should not propagate upwards through the
dependency graph. Previously this was done by specifying the hash
component of the output path through the `id' attribute, but this is
insecure since you can lie about it (i.e., you can specify any hash
and then produce a completely different output). Now the
responsibility for checking the output is moved from the builder to
Nix itself.
A fixed-output derivation can be created by specifying the
`outputHash' and `outputHashAlgo' attributes, the latter taking
values `md5', `sha1', and `sha256', and the former specifying the
actual hash in hexadecimal or in base-32 (auto-detected by looking
at the length of the attribute value). MD5 is included for
compatibility but should be considered deprecated.
* Removed the `drvPath' pseudo-attribute in derivation results. It's
no longer necessary.
* Cleaned up the support for multiple output paths in derivation store
expressions. Each output now has a unique identifier (e.g., `out',
`devel', `docs'). Previously there was no way to tell output paths
apart at the store expression level.
* `nix-hash' now has a flag `--base32' to specify that the hash should
be printed in base-32 notation.
* `fetchurl' accepts parameters `sha256' and `sha1' in addition to
`md5'.
* `nix-prefetch-url' now prints out a SHA-1 hash in base-32. (TODO: a
flag to specify the hash.)
2005-01-17 17:55:19 +01:00
|
|
|
Hash hash = hashString(htSHA256, s);
|
2005-01-14 14:51:38 +01:00
|
|
|
|
|
|
|
Path dstPath = makeStorePath("text", hash, suffix);
|
2004-02-14 22:44:18 +01:00
|
|
|
|
2005-01-31 11:27:25 +01:00
|
|
|
addTempRoot(dstPath);
|
|
|
|
|
2005-01-14 14:51:38 +01:00
|
|
|
if (!readOnlyMode && !isValidPath(dstPath)) {
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2003-10-23 12:51:55 +02:00
|
|
|
PathSet lockPaths;
|
|
|
|
lockPaths.insert(dstPath);
|
|
|
|
PathLocks outputLock(lockPaths);
|
|
|
|
|
|
|
|
if (!isValidPath(dstPath)) {
|
2004-06-21 09:46:02 +02:00
|
|
|
|
|
|
|
if (pathExists(dstPath)) deletePath(dstPath);
|
|
|
|
|
2003-11-22 16:58:34 +01:00
|
|
|
writeStringToFile(dstPath, s);
|
2003-10-15 14:42:39 +02:00
|
|
|
|
2005-01-19 17:39:47 +01:00
|
|
|
canonicalisePathMetaData(dstPath);
|
2004-09-09 23:19:20 +02:00
|
|
|
|
2003-10-23 12:51:55 +02:00
|
|
|
Transaction txn(nixDB);
|
2005-01-25 22:28:25 +01:00
|
|
|
registerValidPath(txn, dstPath,
|
2005-02-07 14:40:40 +01:00
|
|
|
hashPath(htSHA256, dstPath), references, "");
|
2003-10-23 12:51:55 +02:00
|
|
|
txn.commit();
|
|
|
|
}
|
2003-11-22 19:45:56 +01:00
|
|
|
|
|
|
|
outputLock.setDeletion(true);
|
2003-10-15 14:42:39 +02:00
|
|
|
}
|
2005-01-14 14:51:38 +01:00
|
|
|
|
|
|
|
return dstPath;
|
2003-10-15 14:42:39 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2003-10-08 17:06:59 +02:00
|
|
|
void deleteFromStore(const Path & _path)
|
2003-06-23 16:40:49 +02:00
|
|
|
{
|
2003-10-08 17:06:59 +02:00
|
|
|
Path path(canonPath(_path));
|
|
|
|
|
2004-02-14 22:44:18 +01:00
|
|
|
assertStorePath(path);
|
2003-07-08 11:54:47 +02:00
|
|
|
|
2003-11-22 19:45:56 +01:00
|
|
|
Transaction txn(nixDB);
|
2005-01-31 15:00:43 +01:00
|
|
|
if (isValidPathTxn(txn, path)) {
|
2005-02-01 10:23:38 +01:00
|
|
|
PathSet referers = getReferers(txn, path);
|
|
|
|
if (referers.size() > 1 ||
|
|
|
|
(referers.size() == 1 &&
|
|
|
|
*referers.begin() != path))
|
2005-01-31 15:00:43 +01:00
|
|
|
throw Error(format("cannot delete path `%1%' because it is in use") % path);
|
2005-01-28 12:05:46 +01:00
|
|
|
invalidatePath(path, txn);
|
2005-01-31 15:00:43 +01:00
|
|
|
}
|
2003-11-22 19:45:56 +01:00
|
|
|
txn.commit();
|
2003-07-08 11:54:47 +02:00
|
|
|
|
2003-06-27 16:56:12 +02:00
|
|
|
deletePath(path);
|
2003-06-23 16:40:49 +02:00
|
|
|
}
|
2003-07-17 14:27:55 +02:00
|
|
|
|
|
|
|
|
2005-02-08 14:48:53 +01:00
|
|
|
void verifyStore(bool checkContents)
|
2003-07-17 14:27:55 +02:00
|
|
|
{
|
2003-07-31 21:49:11 +02:00
|
|
|
Transaction txn(nixDB);
|
|
|
|
|
2003-10-10 17:14:29 +02:00
|
|
|
Paths paths;
|
2003-11-22 19:45:56 +01:00
|
|
|
PathSet validPaths;
|
2003-10-10 17:14:29 +02:00
|
|
|
nixDB.enumTable(txn, dbValidPaths, paths);
|
2003-07-17 14:27:55 +02:00
|
|
|
|
2003-12-05 12:05:19 +01:00
|
|
|
for (Paths::iterator i = paths.begin(); i != paths.end(); ++i) {
|
2005-02-08 14:48:53 +01:00
|
|
|
if (!pathExists(*i)) {
|
|
|
|
printMsg(lvlError, format("path `%1%' disappeared") % *i);
|
|
|
|
invalidatePath(*i, txn);
|
|
|
|
} else if (!isStorePath(*i)) {
|
|
|
|
printMsg(lvlError, format("path `%1%' is not in the Nix store") % *i);
|
|
|
|
invalidatePath(*i, txn);
|
|
|
|
} else {
|
|
|
|
if (checkContents) {
|
|
|
|
Hash expected = queryHash(txn, *i);
|
|
|
|
Hash current = hashPath(expected.type, *i);
|
|
|
|
if (current != expected) {
|
|
|
|
printMsg(lvlError, format("path `%1%' was modified! "
|
|
|
|
"expected hash `%2%', got `%3%'")
|
|
|
|
% *i % printHash(expected) % printHash(current));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
validPaths.insert(*i);
|
|
|
|
}
|
2003-07-17 14:27:55 +02:00
|
|
|
}
|
|
|
|
|
2005-02-08 14:23:55 +01:00
|
|
|
/* "Usable" paths are those that are valid or have a
|
|
|
|
substitute. */
|
2003-12-05 12:05:19 +01:00
|
|
|
PathSet usablePaths(validPaths);
|
2003-07-17 14:27:55 +02:00
|
|
|
|
2003-11-24 10:24:52 +01:00
|
|
|
/* Check that the values of the substitute mappings are valid
|
|
|
|
paths. */
|
2004-06-20 21:17:54 +02:00
|
|
|
Paths subKeys;
|
|
|
|
nixDB.enumTable(txn, dbSubstitutes, subKeys);
|
|
|
|
for (Paths::iterator i = subKeys.begin(); i != subKeys.end(); ++i) {
|
2004-12-20 14:43:32 +01:00
|
|
|
Substitutes subs = readSubstitutes(txn, *i);
|
2005-02-08 14:23:55 +01:00
|
|
|
if (!isStorePath(*i)) {
|
|
|
|
printMsg(lvlError, format("found substitutes for non-store path `%1%'") % *i);
|
2004-12-20 15:16:55 +01:00
|
|
|
nixDB.delPair(txn, dbSubstitutes, *i);
|
2005-02-08 14:23:55 +01:00
|
|
|
}
|
|
|
|
else if (subs.size() == 0)
|
|
|
|
nixDB.delPair(txn, dbSubstitutes, *i);
|
|
|
|
else
|
|
|
|
usablePaths.insert(*i);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Check the cleanup invariant: only usable paths can have
|
|
|
|
`references', `referers', or `derivers' entries. */
|
|
|
|
|
|
|
|
/* Check the `derivers' table. */
|
|
|
|
Paths deriversKeys;
|
|
|
|
nixDB.enumTable(txn, dbDerivers, deriversKeys);
|
|
|
|
for (Paths::iterator i = deriversKeys.begin();
|
|
|
|
i != deriversKeys.end(); ++i)
|
|
|
|
{
|
|
|
|
if (usablePaths.find(*i) == usablePaths.end()) {
|
|
|
|
printMsg(lvlError, format("found deriver entry for unusable path `%1%'")
|
|
|
|
% *i);
|
|
|
|
nixDB.delPair(txn, dbDerivers, *i);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
Path deriver = queryDeriver(txn, *i);
|
|
|
|
if (!isStorePath(deriver)) {
|
|
|
|
printMsg(lvlError, format("found corrupt deriver `%1%' for `%2%'")
|
|
|
|
% deriver % *i);
|
|
|
|
nixDB.delPair(txn, dbDerivers, *i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Check the `references' table. */
|
|
|
|
Paths referencesKeys;
|
|
|
|
nixDB.enumTable(txn, dbReferences, referencesKeys);
|
|
|
|
for (Paths::iterator i = referencesKeys.begin();
|
|
|
|
i != referencesKeys.end(); ++i)
|
|
|
|
{
|
|
|
|
if (usablePaths.find(*i) == usablePaths.end()) {
|
|
|
|
printMsg(lvlError, format("found references entry for unusable path `%1%'")
|
|
|
|
% *i);
|
|
|
|
nixDB.delPair(txn, dbReferences, *i);
|
|
|
|
}
|
|
|
|
else {
|
2005-02-09 10:50:29 +01:00
|
|
|
bool isValid = validPaths.find(*i) != validPaths.end();
|
2005-02-08 14:23:55 +01:00
|
|
|
PathSet references;
|
|
|
|
queryReferences(txn, *i, references);
|
|
|
|
for (PathSet::iterator j = references.begin();
|
|
|
|
j != references.end(); ++j)
|
|
|
|
{
|
|
|
|
PathSet referers = getReferers(txn, *j);
|
|
|
|
if (referers.find(*i) == referers.end()) {
|
|
|
|
printMsg(lvlError, format("missing referer mapping from `%1%' to `%2%'")
|
|
|
|
% *j % *i);
|
|
|
|
}
|
2005-02-09 10:50:29 +01:00
|
|
|
if (isValid && validPaths.find(*j) == validPaths.end()) {
|
|
|
|
printMsg(lvlError, format("incomplete closure: `%1%' needs missing `%2%'")
|
|
|
|
% *i % *j);
|
|
|
|
}
|
2005-02-08 14:23:55 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Check the `referers' table. */
|
|
|
|
Paths referersKeys;
|
|
|
|
nixDB.enumTable(txn, dbReferers, referersKeys);
|
|
|
|
for (Paths::iterator i = referersKeys.begin();
|
|
|
|
i != referersKeys.end(); ++i)
|
|
|
|
{
|
|
|
|
if (usablePaths.find(*i) == usablePaths.end()) {
|
|
|
|
printMsg(lvlError, format("found referers entry for unusable path `%1%'")
|
|
|
|
% *i);
|
|
|
|
nixDB.delPair(txn, dbReferers, *i);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
PathSet referers;
|
|
|
|
queryReferers(txn, *i, referers);
|
|
|
|
for (PathSet::iterator j = referers.begin();
|
|
|
|
j != referers.end(); ++j)
|
|
|
|
{
|
|
|
|
Paths references;
|
|
|
|
nixDB.queryStrings(txn, dbReferences, *j, references);
|
|
|
|
if (find(references.begin(), references.end(), *i) == references.end()) {
|
|
|
|
printMsg(lvlError, format("missing reference mapping from `%1%' to `%2%'")
|
|
|
|
% *j % *i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2003-11-24 10:24:52 +01:00
|
|
|
}
|
2003-10-10 17:14:29 +02:00
|
|
|
|
2003-07-31 21:49:11 +02:00
|
|
|
txn.commit();
|
2003-07-17 14:27:55 +02:00
|
|
|
}
|
2005-02-09 10:50:29 +01:00
|
|
|
|
|
|
|
|
|
|
|
#include "aterm.hh"
|
|
|
|
#include "derivations-ast.hh"
|
|
|
|
|
|
|
|
|
|
|
|
/* Upgrade from schema 1 (Nix <= 0.7) to schema 2 (Nix >= 0.8). */
|
|
|
|
static void upgradeStore()
|
|
|
|
{
|
|
|
|
printMsg(lvlError, "upgrading Nix store to new schema (this may take a while)...");
|
|
|
|
|
|
|
|
Transaction txn(nixDB);
|
|
|
|
|
|
|
|
Paths validPaths2;
|
|
|
|
nixDB.enumTable(txn, dbValidPaths, validPaths2);
|
|
|
|
PathSet validPaths(validPaths2.begin(), validPaths2.end());
|
|
|
|
|
|
|
|
cerr << "hashing paths...";
|
2005-02-09 15:37:24 +01:00
|
|
|
int n = 0;
|
2005-02-09 10:50:29 +01:00
|
|
|
for (PathSet::iterator i = validPaths.begin(); i != validPaths.end(); ++i) {
|
|
|
|
checkInterrupt();
|
|
|
|
string s;
|
|
|
|
nixDB.queryString(txn, dbValidPaths, *i, s);
|
|
|
|
if (s == "") {
|
|
|
|
Hash hash = hashPath(htSHA256, *i);
|
|
|
|
setHash(txn, *i, hash);
|
|
|
|
cerr << ".";
|
2005-02-09 15:37:24 +01:00
|
|
|
if (++n % 1000 == 0) {
|
|
|
|
txn.commit();
|
|
|
|
txn.begin(nixDB);
|
|
|
|
}
|
2005-02-09 10:50:29 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
cerr << "\n";
|
|
|
|
|
2005-02-09 15:37:24 +01:00
|
|
|
txn.commit();
|
|
|
|
|
|
|
|
txn.begin(nixDB);
|
|
|
|
|
2005-02-09 10:50:29 +01:00
|
|
|
cerr << "processing closures...";
|
|
|
|
for (PathSet::iterator i = validPaths.begin(); i != validPaths.end(); ++i) {
|
|
|
|
checkInterrupt();
|
|
|
|
if (i->size() > 6 && string(*i, i->size() - 6) == ".store") {
|
|
|
|
ATerm t = ATreadFromNamedFile(i->c_str());
|
|
|
|
if (!t) throw Error(format("cannot read aterm from `%1%'") % *i);
|
|
|
|
|
|
|
|
ATermList roots, elems;
|
|
|
|
if (!matchOldClosure(t, roots, elems)) continue;
|
|
|
|
|
|
|
|
for (ATermIterator j(elems); j; ++j) {
|
|
|
|
|
|
|
|
ATerm path2;
|
|
|
|
ATermList references2;
|
|
|
|
if (!matchOldClosureElem(*j, path2, references2)) continue;
|
|
|
|
|
|
|
|
Path path = aterm2String(path2);
|
|
|
|
if (validPaths.find(path) == validPaths.end())
|
|
|
|
/* Skip this path; it's invalid. This is a normal
|
|
|
|
condition (Nix <= 0.7 did not enforce closure
|
|
|
|
on closure store expressions). */
|
|
|
|
continue;
|
|
|
|
|
|
|
|
PathSet references;
|
|
|
|
for (ATermIterator k(references2); k; ++k) {
|
|
|
|
Path reference = aterm2String(*k);
|
|
|
|
if (validPaths.find(reference) == validPaths.end())
|
|
|
|
/* Bad reference. Set it anyway and let the
|
|
|
|
user fix it. */
|
|
|
|
printMsg(lvlError, format("closure `%1%' contains reference from `%2%' "
|
|
|
|
"to invalid path `%3%' (run `nix-store --verify')")
|
|
|
|
% *i % path % reference);
|
|
|
|
references.insert(reference);
|
|
|
|
}
|
|
|
|
|
|
|
|
PathSet prevReferences;
|
|
|
|
queryReferences(txn, path, prevReferences);
|
|
|
|
if (prevReferences.size() > 0 && references != prevReferences)
|
|
|
|
printMsg(lvlError, format("warning: conflicting references for `%1%'") % path);
|
|
|
|
|
|
|
|
if (references != prevReferences)
|
|
|
|
setReferences(txn, path, references);
|
|
|
|
}
|
|
|
|
|
|
|
|
cerr << ".";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
cerr << "\n";
|
|
|
|
|
|
|
|
/* !!! maybe this transaction is way too big */
|
|
|
|
txn.commit();
|
|
|
|
}
|