Fix whitespace
This commit is contained in:
parent
66a3ac6a56
commit
6183cf2f19
1 changed files with 71 additions and 71 deletions
|
@ -6,7 +6,7 @@
|
||||||
#include "worker-protocol.hh"
|
#include "worker-protocol.hh"
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
#include "immutable.hh"
|
#include "immutable.hh"
|
||||||
|
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
|
||||||
|
@ -147,11 +147,11 @@ struct SQLiteStmtUse
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
struct SQLiteTxn
|
struct SQLiteTxn
|
||||||
{
|
{
|
||||||
bool active;
|
bool active;
|
||||||
sqlite3 * db;
|
sqlite3 * db;
|
||||||
|
|
||||||
SQLiteTxn(sqlite3 * db) : active(false) {
|
SQLiteTxn(sqlite3 * db) : active(false) {
|
||||||
this->db = db;
|
this->db = db;
|
||||||
if (sqlite3_exec(db, "begin;", 0, 0, 0) != SQLITE_OK)
|
if (sqlite3_exec(db, "begin;", 0, 0, 0) != SQLITE_OK)
|
||||||
|
@ -159,14 +159,14 @@ struct SQLiteTxn
|
||||||
active = true;
|
active = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
void commit()
|
void commit()
|
||||||
{
|
{
|
||||||
if (sqlite3_exec(db, "commit;", 0, 0, 0) != SQLITE_OK)
|
if (sqlite3_exec(db, "commit;", 0, 0, 0) != SQLITE_OK)
|
||||||
throwSQLiteError(db, "committing transaction");
|
throwSQLiteError(db, "committing transaction");
|
||||||
active = false;
|
active = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
~SQLiteTxn()
|
~SQLiteTxn()
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
if (active && sqlite3_exec(db, "rollback;", 0, 0, 0) != SQLITE_OK)
|
if (active && sqlite3_exec(db, "rollback;", 0, 0, 0) != SQLITE_OK)
|
||||||
|
@ -199,9 +199,9 @@ void checkStoreNotSymlink()
|
||||||
LocalStore::LocalStore(bool reserveSpace)
|
LocalStore::LocalStore(bool reserveSpace)
|
||||||
{
|
{
|
||||||
substitutablePathsLoaded = false;
|
substitutablePathsLoaded = false;
|
||||||
|
|
||||||
schemaPath = nixDBPath + "/schema";
|
schemaPath = nixDBPath + "/schema";
|
||||||
|
|
||||||
if (readOnlyMode) {
|
if (readOnlyMode) {
|
||||||
openDB(false);
|
openDB(false);
|
||||||
return;
|
return;
|
||||||
|
@ -220,7 +220,7 @@ LocalStore::LocalStore(bool reserveSpace)
|
||||||
if (symlink(profilesDir.c_str(), (gcRootsDir + "/profiles").c_str()) == -1)
|
if (symlink(profilesDir.c_str(), (gcRootsDir + "/profiles").c_str()) == -1)
|
||||||
throw SysError(format("creating symlink to `%1%'") % profilesDir);
|
throw SysError(format("creating symlink to `%1%'") % profilesDir);
|
||||||
}
|
}
|
||||||
|
|
||||||
checkStoreNotSymlink();
|
checkStoreNotSymlink();
|
||||||
|
|
||||||
/* We can't open a SQLite database if the disk is full. Since
|
/* We can't open a SQLite database if the disk is full. Since
|
||||||
|
@ -252,7 +252,7 @@ LocalStore::LocalStore(bool reserveSpace)
|
||||||
openDB(false);
|
openDB(false);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!lockFile(globalLock, ltRead, false)) {
|
if (!lockFile(globalLock, ltRead, false)) {
|
||||||
printMsg(lvlError, "waiting for the big Nix store lock...");
|
printMsg(lvlError, "waiting for the big Nix store lock...");
|
||||||
lockFile(globalLock, ltRead, true);
|
lockFile(globalLock, ltRead, true);
|
||||||
|
@ -264,20 +264,20 @@ LocalStore::LocalStore(bool reserveSpace)
|
||||||
if (curSchema > nixSchemaVersion)
|
if (curSchema > nixSchemaVersion)
|
||||||
throw Error(format("current Nix store schema is version %1%, but I only support %2%")
|
throw Error(format("current Nix store schema is version %1%, but I only support %2%")
|
||||||
% curSchema % nixSchemaVersion);
|
% curSchema % nixSchemaVersion);
|
||||||
|
|
||||||
else if (curSchema == 0) { /* new store */
|
else if (curSchema == 0) { /* new store */
|
||||||
curSchema = nixSchemaVersion;
|
curSchema = nixSchemaVersion;
|
||||||
openDB(true);
|
openDB(true);
|
||||||
writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str());
|
writeFile(schemaPath, (format("%1%") % nixSchemaVersion).str());
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (curSchema < nixSchemaVersion) {
|
else if (curSchema < nixSchemaVersion) {
|
||||||
if (curSchema < 5)
|
if (curSchema < 5)
|
||||||
throw Error(
|
throw Error(
|
||||||
"Your Nix store has a database in Berkeley DB format,\n"
|
"Your Nix store has a database in Berkeley DB format,\n"
|
||||||
"which is no longer supported. To convert to the new format,\n"
|
"which is no longer supported. To convert to the new format,\n"
|
||||||
"please upgrade Nix to version 0.12 first.");
|
"please upgrade Nix to version 0.12 first.");
|
||||||
|
|
||||||
if (!lockFile(globalLock, ltWrite, false)) {
|
if (!lockFile(globalLock, ltWrite, false)) {
|
||||||
printMsg(lvlError, "waiting for exclusive access to the Nix store...");
|
printMsg(lvlError, "waiting for exclusive access to the Nix store...");
|
||||||
lockFile(globalLock, ltWrite, true);
|
lockFile(globalLock, ltWrite, true);
|
||||||
|
@ -293,7 +293,7 @@ LocalStore::LocalStore(bool reserveSpace)
|
||||||
|
|
||||||
lockFile(globalLock, ltRead, true);
|
lockFile(globalLock, ltRead, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
else openDB(false);
|
else openDB(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -339,7 +339,7 @@ void LocalStore::openDB(bool create)
|
||||||
|
|
||||||
/* !!! check whether sqlite has been built with foreign key
|
/* !!! check whether sqlite has been built with foreign key
|
||||||
support */
|
support */
|
||||||
|
|
||||||
/* Whether SQLite should fsync(). "Normal" synchronous mode
|
/* Whether SQLite should fsync(). "Normal" synchronous mode
|
||||||
should be safe enough. If the user asks for it, don't sync at
|
should be safe enough. If the user asks for it, don't sync at
|
||||||
all. This can cause database corruption if the system
|
all. This can cause database corruption if the system
|
||||||
|
@ -368,7 +368,7 @@ void LocalStore::openDB(bool create)
|
||||||
derivation is done in a single fsync(). */
|
derivation is done in a single fsync(). */
|
||||||
if (mode == "wal" && sqlite3_exec(db, "pragma wal_autocheckpoint = 8192;", 0, 0, 0) != SQLITE_OK)
|
if (mode == "wal" && sqlite3_exec(db, "pragma wal_autocheckpoint = 8192;", 0, 0, 0) != SQLITE_OK)
|
||||||
throwSQLiteError(db, "setting autocheckpoint interval");
|
throwSQLiteError(db, "setting autocheckpoint interval");
|
||||||
|
|
||||||
/* Initialise the database schema, if necessary. */
|
/* Initialise the database schema, if necessary. */
|
||||||
if (create) {
|
if (create) {
|
||||||
#include "schema.sql.hh"
|
#include "schema.sql.hh"
|
||||||
|
@ -423,7 +423,7 @@ void canonicalisePathMetaData(const Path & path, bool recurse)
|
||||||
|
|
||||||
struct stat st;
|
struct stat st;
|
||||||
if (lstat(path.c_str(), &st))
|
if (lstat(path.c_str(), &st))
|
||||||
throw SysError(format("getting attributes of path `%1%'") % path);
|
throw SysError(format("getting attributes of path `%1%'") % path);
|
||||||
|
|
||||||
/* Really make sure that the path is of a supported type. This
|
/* Really make sure that the path is of a supported type. This
|
||||||
has already been checked in dumpPath(). */
|
has already been checked in dumpPath(). */
|
||||||
|
@ -451,7 +451,7 @@ void canonicalisePathMetaData(const Path & path, bool recurse)
|
||||||
|
|
||||||
/* Mask out all type related bits. */
|
/* Mask out all type related bits. */
|
||||||
mode_t mode = st.st_mode & ~S_IFMT;
|
mode_t mode = st.st_mode & ~S_IFMT;
|
||||||
|
|
||||||
if (mode != 0444 && mode != 0555) {
|
if (mode != 0444 && mode != 0555) {
|
||||||
mode = (st.st_mode & S_IFMT)
|
mode = (st.st_mode & S_IFMT)
|
||||||
| 0444
|
| 0444
|
||||||
|
@ -461,7 +461,7 @@ void canonicalisePathMetaData(const Path & path, bool recurse)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (st.st_mtime != mtimeStore) {
|
if (st.st_mtime != mtimeStore) {
|
||||||
struct timeval times[2];
|
struct timeval times[2];
|
||||||
times[0].tv_sec = st.st_atime;
|
times[0].tv_sec = st.st_atime;
|
||||||
|
@ -472,14 +472,14 @@ void canonicalisePathMetaData(const Path & path, bool recurse)
|
||||||
if (lutimes(path.c_str(), times) == -1)
|
if (lutimes(path.c_str(), times) == -1)
|
||||||
#else
|
#else
|
||||||
if (!S_ISLNK(st.st_mode) && utimes(path.c_str(), times) == -1)
|
if (!S_ISLNK(st.st_mode) && utimes(path.c_str(), times) == -1)
|
||||||
#endif
|
#endif
|
||||||
throw SysError(format("changing modification time of `%1%'") % path);
|
throw SysError(format("changing modification time of `%1%'") % path);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (recurse && S_ISDIR(st.st_mode)) {
|
if (recurse && S_ISDIR(st.st_mode)) {
|
||||||
Strings names = readDirectory(path);
|
Strings names = readDirectory(path);
|
||||||
foreach (Strings::iterator, i, names)
|
foreach (Strings::iterator, i, names)
|
||||||
canonicalisePathMetaData(path + "/" + *i, true);
|
canonicalisePathMetaData(path + "/" + *i, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
makeImmutable(path);
|
makeImmutable(path);
|
||||||
|
@ -494,7 +494,7 @@ void canonicalisePathMetaData(const Path & path)
|
||||||
be a symlink, since we can't change its ownership. */
|
be a symlink, since we can't change its ownership. */
|
||||||
struct stat st;
|
struct stat st;
|
||||||
if (lstat(path.c_str(), &st))
|
if (lstat(path.c_str(), &st))
|
||||||
throw SysError(format("getting attributes of path `%1%'") % path);
|
throw SysError(format("getting attributes of path `%1%'") % path);
|
||||||
|
|
||||||
if (st.st_uid != geteuid()) {
|
if (st.st_uid != geteuid()) {
|
||||||
assert(S_ISLNK(st.st_mode));
|
assert(S_ISLNK(st.st_mode));
|
||||||
|
@ -508,7 +508,7 @@ void LocalStore::checkDerivationOutputs(const Path & drvPath, const Derivation &
|
||||||
string drvName = storePathToName(drvPath);
|
string drvName = storePathToName(drvPath);
|
||||||
assert(isDerivation(drvName));
|
assert(isDerivation(drvName));
|
||||||
drvName = string(drvName, 0, drvName.size() - drvExtension.size());
|
drvName = string(drvName, 0, drvName.size() - drvExtension.size());
|
||||||
|
|
||||||
if (isFixedOutputDrv(drv)) {
|
if (isFixedOutputDrv(drv)) {
|
||||||
DerivationOutputs::const_iterator out = drv.outputs.find("out");
|
DerivationOutputs::const_iterator out = drv.outputs.find("out");
|
||||||
if (out == drv.outputs.end())
|
if (out == drv.outputs.end())
|
||||||
|
@ -532,7 +532,7 @@ void LocalStore::checkDerivationOutputs(const Path & drvPath, const Derivation &
|
||||||
}
|
}
|
||||||
|
|
||||||
Hash h = hashDerivationModulo(*this, drvCopy);
|
Hash h = hashDerivationModulo(*this, drvCopy);
|
||||||
|
|
||||||
foreach (DerivationOutputs::const_iterator, i, drv.outputs) {
|
foreach (DerivationOutputs::const_iterator, i, drv.outputs) {
|
||||||
Path outPath = makeOutputPath(i->first, h, drvName);
|
Path outPath = makeOutputPath(i->first, h, drvName);
|
||||||
StringPairs::const_iterator j = drv.env.find(i->first);
|
StringPairs::const_iterator j = drv.env.find(i->first);
|
||||||
|
@ -568,14 +568,14 @@ unsigned long long LocalStore::addValidPath(const ValidPathInfo & info, bool che
|
||||||
derivation. */
|
derivation. */
|
||||||
if (isDerivation(info.path)) {
|
if (isDerivation(info.path)) {
|
||||||
Derivation drv = parseDerivation(readFile(info.path));
|
Derivation drv = parseDerivation(readFile(info.path));
|
||||||
|
|
||||||
/* Verify that the output paths in the derivation are correct
|
/* Verify that the output paths in the derivation are correct
|
||||||
(i.e., follow the scheme for computing output paths from
|
(i.e., follow the scheme for computing output paths from
|
||||||
derivations). Note that if this throws an error, then the
|
derivations). Note that if this throws an error, then the
|
||||||
DB transaction is rolled back, so the path validity
|
DB transaction is rolled back, so the path validity
|
||||||
registration above is undone. */
|
registration above is undone. */
|
||||||
if (checkOutputs) checkDerivationOutputs(info.path, drv);
|
if (checkOutputs) checkDerivationOutputs(info.path, drv);
|
||||||
|
|
||||||
foreach (DerivationOutputs::iterator, i, drv.outputs) {
|
foreach (DerivationOutputs::iterator, i, drv.outputs) {
|
||||||
SQLiteStmtUse use(stmtAddDerivationOutput);
|
SQLiteStmtUse use(stmtAddDerivationOutput);
|
||||||
stmtAddDerivationOutput.bind(id);
|
stmtAddDerivationOutput.bind(id);
|
||||||
|
@ -681,7 +681,7 @@ ValidPathInfo LocalStore::queryPathInfo(const Path & path)
|
||||||
SQLiteStmtUse use1(stmtQueryPathInfo);
|
SQLiteStmtUse use1(stmtQueryPathInfo);
|
||||||
|
|
||||||
stmtQueryPathInfo.bind(path);
|
stmtQueryPathInfo.bind(path);
|
||||||
|
|
||||||
int r = sqlite3_step(stmtQueryPathInfo);
|
int r = sqlite3_step(stmtQueryPathInfo);
|
||||||
if (r == SQLITE_DONE) throw Error(format("path `%1%' is not valid") % path);
|
if (r == SQLITE_DONE) throw Error(format("path `%1%' is not valid") % path);
|
||||||
if (r != SQLITE_ROW) throwSQLiteError(db, "querying path in database");
|
if (r != SQLITE_ROW) throwSQLiteError(db, "querying path in database");
|
||||||
|
@ -691,7 +691,7 @@ ValidPathInfo LocalStore::queryPathInfo(const Path & path)
|
||||||
const char * s = (const char *) sqlite3_column_text(stmtQueryPathInfo, 1);
|
const char * s = (const char *) sqlite3_column_text(stmtQueryPathInfo, 1);
|
||||||
assert(s);
|
assert(s);
|
||||||
info.hash = parseHashField(path, s);
|
info.hash = parseHashField(path, s);
|
||||||
|
|
||||||
info.registrationTime = sqlite3_column_int(stmtQueryPathInfo, 2);
|
info.registrationTime = sqlite3_column_int(stmtQueryPathInfo, 2);
|
||||||
|
|
||||||
s = (const char *) sqlite3_column_text(stmtQueryPathInfo, 3);
|
s = (const char *) sqlite3_column_text(stmtQueryPathInfo, 3);
|
||||||
|
@ -769,9 +769,9 @@ PathSet LocalStore::queryAllValidPaths()
|
||||||
{
|
{
|
||||||
SQLiteStmt stmt;
|
SQLiteStmt stmt;
|
||||||
stmt.create(db, "select path from ValidPaths");
|
stmt.create(db, "select path from ValidPaths");
|
||||||
|
|
||||||
PathSet res;
|
PathSet res;
|
||||||
|
|
||||||
int r;
|
int r;
|
||||||
while ((r = sqlite3_step(stmt)) == SQLITE_ROW) {
|
while ((r = sqlite3_step(stmt)) == SQLITE_ROW) {
|
||||||
const char * s = (const char *) sqlite3_column_text(stmt, 0);
|
const char * s = (const char *) sqlite3_column_text(stmt, 0);
|
||||||
|
@ -834,10 +834,10 @@ PathSet LocalStore::queryValidDerivers(const Path & path)
|
||||||
assert(s);
|
assert(s);
|
||||||
derivers.insert(s);
|
derivers.insert(s);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (r != SQLITE_DONE)
|
if (r != SQLITE_DONE)
|
||||||
throwSQLiteError(db, format("error getting valid derivers of `%1%'") % path);
|
throwSQLiteError(db, format("error getting valid derivers of `%1%'") % path);
|
||||||
|
|
||||||
return derivers;
|
return derivers;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -845,10 +845,10 @@ PathSet LocalStore::queryValidDerivers(const Path & path)
|
||||||
PathSet LocalStore::queryDerivationOutputs(const Path & path)
|
PathSet LocalStore::queryDerivationOutputs(const Path & path)
|
||||||
{
|
{
|
||||||
SQLiteTxn txn(db);
|
SQLiteTxn txn(db);
|
||||||
|
|
||||||
SQLiteStmtUse use(stmtQueryDerivationOutputs);
|
SQLiteStmtUse use(stmtQueryDerivationOutputs);
|
||||||
stmtQueryDerivationOutputs.bind(queryValidPathId(path));
|
stmtQueryDerivationOutputs.bind(queryValidPathId(path));
|
||||||
|
|
||||||
PathSet outputs;
|
PathSet outputs;
|
||||||
int r;
|
int r;
|
||||||
while ((r = sqlite3_step(stmtQueryDerivationOutputs)) == SQLITE_ROW) {
|
while ((r = sqlite3_step(stmtQueryDerivationOutputs)) == SQLITE_ROW) {
|
||||||
|
@ -856,7 +856,7 @@ PathSet LocalStore::queryDerivationOutputs(const Path & path)
|
||||||
assert(s);
|
assert(s);
|
||||||
outputs.insert(s);
|
outputs.insert(s);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (r != SQLITE_DONE)
|
if (r != SQLITE_DONE)
|
||||||
throwSQLiteError(db, format("error getting outputs of `%1%'") % path);
|
throwSQLiteError(db, format("error getting outputs of `%1%'") % path);
|
||||||
|
|
||||||
|
@ -867,10 +867,10 @@ PathSet LocalStore::queryDerivationOutputs(const Path & path)
|
||||||
StringSet LocalStore::queryDerivationOutputNames(const Path & path)
|
StringSet LocalStore::queryDerivationOutputNames(const Path & path)
|
||||||
{
|
{
|
||||||
SQLiteTxn txn(db);
|
SQLiteTxn txn(db);
|
||||||
|
|
||||||
SQLiteStmtUse use(stmtQueryDerivationOutputs);
|
SQLiteStmtUse use(stmtQueryDerivationOutputs);
|
||||||
stmtQueryDerivationOutputs.bind(queryValidPathId(path));
|
stmtQueryDerivationOutputs.bind(queryValidPathId(path));
|
||||||
|
|
||||||
StringSet outputNames;
|
StringSet outputNames;
|
||||||
int r;
|
int r;
|
||||||
while ((r = sqlite3_step(stmtQueryDerivationOutputs)) == SQLITE_ROW) {
|
while ((r = sqlite3_step(stmtQueryDerivationOutputs)) == SQLITE_ROW) {
|
||||||
|
@ -878,7 +878,7 @@ StringSet LocalStore::queryDerivationOutputNames(const Path & path)
|
||||||
assert(s);
|
assert(s);
|
||||||
outputNames.insert(s);
|
outputNames.insert(s);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (r != SQLITE_DONE)
|
if (r != SQLITE_DONE)
|
||||||
throwSQLiteError(db, format("error getting output names of `%1%'") % path);
|
throwSQLiteError(db, format("error getting output names of `%1%'") % path);
|
||||||
|
|
||||||
|
@ -889,11 +889,11 @@ StringSet LocalStore::queryDerivationOutputNames(const Path & path)
|
||||||
Path LocalStore::queryPathFromHashPart(const string & hashPart)
|
Path LocalStore::queryPathFromHashPart(const string & hashPart)
|
||||||
{
|
{
|
||||||
if (hashPart.size() != 32) throw Error("invalid hash part");
|
if (hashPart.size() != 32) throw Error("invalid hash part");
|
||||||
|
|
||||||
SQLiteTxn txn(db);
|
SQLiteTxn txn(db);
|
||||||
|
|
||||||
Path prefix = nixStore + "/" + hashPart;
|
Path prefix = nixStore + "/" + hashPart;
|
||||||
|
|
||||||
SQLiteStmtUse use(stmtQueryPathFromHashPart);
|
SQLiteStmtUse use(stmtQueryPathFromHashPart);
|
||||||
stmtQueryPathFromHashPart.bind(prefix);
|
stmtQueryPathFromHashPart.bind(prefix);
|
||||||
|
|
||||||
|
@ -909,16 +909,16 @@ Path LocalStore::queryPathFromHashPart(const string & hashPart)
|
||||||
void LocalStore::startSubstituter(const Path & substituter, RunningSubstituter & run)
|
void LocalStore::startSubstituter(const Path & substituter, RunningSubstituter & run)
|
||||||
{
|
{
|
||||||
if (run.pid != -1) return;
|
if (run.pid != -1) return;
|
||||||
|
|
||||||
debug(format("starting substituter program `%1%'") % substituter);
|
debug(format("starting substituter program `%1%'") % substituter);
|
||||||
|
|
||||||
Pipe toPipe, fromPipe;
|
Pipe toPipe, fromPipe;
|
||||||
|
|
||||||
toPipe.create();
|
toPipe.create();
|
||||||
fromPipe.create();
|
fromPipe.create();
|
||||||
|
|
||||||
run.pid = fork();
|
run.pid = fork();
|
||||||
|
|
||||||
switch (run.pid) {
|
switch (run.pid) {
|
||||||
|
|
||||||
case -1:
|
case -1:
|
||||||
|
@ -932,7 +932,7 @@ void LocalStore::startSubstituter(const Path & substituter, RunningSubstituter &
|
||||||
library named libutil. As a result, substituters
|
library named libutil. As a result, substituters
|
||||||
written in Perl (i.e. all of them) fail. */
|
written in Perl (i.e. all of them) fail. */
|
||||||
unsetenv("DYLD_LIBRARY_PATH");
|
unsetenv("DYLD_LIBRARY_PATH");
|
||||||
|
|
||||||
fromPipe.readSide.close();
|
fromPipe.readSide.close();
|
||||||
toPipe.writeSide.close();
|
toPipe.writeSide.close();
|
||||||
if (dup2(toPipe.readSide, STDIN_FILENO) == -1)
|
if (dup2(toPipe.readSide, STDIN_FILENO) == -1)
|
||||||
|
@ -949,7 +949,7 @@ void LocalStore::startSubstituter(const Path & substituter, RunningSubstituter &
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Parent. */
|
/* Parent. */
|
||||||
|
|
||||||
run.to = toPipe.writeSide.borrow();
|
run.to = toPipe.writeSide.borrow();
|
||||||
run.from = fromPipe.readSide.borrow();
|
run.from = fromPipe.readSide.borrow();
|
||||||
}
|
}
|
||||||
|
@ -1054,7 +1054,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
|
||||||
try {
|
try {
|
||||||
SQLiteTxn txn(db);
|
SQLiteTxn txn(db);
|
||||||
PathSet paths;
|
PathSet paths;
|
||||||
|
|
||||||
foreach (ValidPathInfos::const_iterator, i, infos) {
|
foreach (ValidPathInfos::const_iterator, i, infos) {
|
||||||
assert(i->hash.type == htSHA256);
|
assert(i->hash.type == htSHA256);
|
||||||
/* !!! Maybe the registration info should be updated if the
|
/* !!! Maybe the registration info should be updated if the
|
||||||
|
@ -1145,7 +1145,7 @@ Path LocalStore::addToStoreFromDump(const string & dump, const string & name,
|
||||||
hash = hashPath(htSHA256, dstPath);
|
hash = hashPath(htSHA256, dstPath);
|
||||||
|
|
||||||
optimisePath(dstPath); // FIXME: combine with hashPath()
|
optimisePath(dstPath); // FIXME: combine with hashPath()
|
||||||
|
|
||||||
ValidPathInfo info;
|
ValidPathInfo info;
|
||||||
info.path = dstPath;
|
info.path = dstPath;
|
||||||
info.hash = hash.first;
|
info.hash = hash.first;
|
||||||
|
@ -1183,7 +1183,7 @@ Path LocalStore::addTextToStore(const string & name, const string & s,
|
||||||
const PathSet & references)
|
const PathSet & references)
|
||||||
{
|
{
|
||||||
Path dstPath = computeStorePathForText(name, s, references);
|
Path dstPath = computeStorePathForText(name, s, references);
|
||||||
|
|
||||||
addTempRoot(dstPath);
|
addTempRoot(dstPath);
|
||||||
|
|
||||||
if (!isValidPath(dstPath)) {
|
if (!isValidPath(dstPath)) {
|
||||||
|
@ -1201,7 +1201,7 @@ Path LocalStore::addTextToStore(const string & name, const string & s,
|
||||||
HashResult hash = hashPath(htSHA256, dstPath);
|
HashResult hash = hashPath(htSHA256, dstPath);
|
||||||
|
|
||||||
optimisePath(dstPath);
|
optimisePath(dstPath);
|
||||||
|
|
||||||
ValidPathInfo info;
|
ValidPathInfo info;
|
||||||
info.path = dstPath;
|
info.path = dstPath;
|
||||||
info.hash = hash.first;
|
info.hash = hash.first;
|
||||||
|
@ -1259,7 +1259,7 @@ void LocalStore::exportPath(const Path & path, bool sign,
|
||||||
throw Error(format("path `%1%' is not valid") % path);
|
throw Error(format("path `%1%' is not valid") % path);
|
||||||
|
|
||||||
HashAndWriteSink hashAndWriteSink(sink);
|
HashAndWriteSink hashAndWriteSink(sink);
|
||||||
|
|
||||||
dumpPath(path, hashAndWriteSink);
|
dumpPath(path, hashAndWriteSink);
|
||||||
|
|
||||||
/* Refuse to export paths that have changed. This prevents
|
/* Refuse to export paths that have changed. This prevents
|
||||||
|
@ -1274,7 +1274,7 @@ void LocalStore::exportPath(const Path & path, bool sign,
|
||||||
writeInt(EXPORT_MAGIC, hashAndWriteSink);
|
writeInt(EXPORT_MAGIC, hashAndWriteSink);
|
||||||
|
|
||||||
writeString(path, hashAndWriteSink);
|
writeString(path, hashAndWriteSink);
|
||||||
|
|
||||||
PathSet references;
|
PathSet references;
|
||||||
queryReferences(path, references);
|
queryReferences(path, references);
|
||||||
writeStrings(references, hashAndWriteSink);
|
writeStrings(references, hashAndWriteSink);
|
||||||
|
@ -1284,9 +1284,9 @@ void LocalStore::exportPath(const Path & path, bool sign,
|
||||||
|
|
||||||
if (sign) {
|
if (sign) {
|
||||||
Hash hash = hashAndWriteSink.currentHash();
|
Hash hash = hashAndWriteSink.currentHash();
|
||||||
|
|
||||||
writeInt(1, hashAndWriteSink);
|
writeInt(1, hashAndWriteSink);
|
||||||
|
|
||||||
Path tmpDir = createTempDir();
|
Path tmpDir = createTempDir();
|
||||||
AutoDelete delTmp(tmpDir);
|
AutoDelete delTmp(tmpDir);
|
||||||
Path hashFile = tmpDir + "/hash";
|
Path hashFile = tmpDir + "/hash";
|
||||||
|
@ -1305,7 +1305,7 @@ void LocalStore::exportPath(const Path & path, bool sign,
|
||||||
string signature = runProgram(OPENSSL_PATH, true, args);
|
string signature = runProgram(OPENSSL_PATH, true, args);
|
||||||
|
|
||||||
writeString(signature, hashAndWriteSink);
|
writeString(signature, hashAndWriteSink);
|
||||||
|
|
||||||
} else
|
} else
|
||||||
writeInt(0, hashAndWriteSink);
|
writeInt(0, hashAndWriteSink);
|
||||||
}
|
}
|
||||||
|
@ -1348,7 +1348,7 @@ Path LocalStore::createTempDirInStore()
|
||||||
Path LocalStore::importPath(bool requireSignature, Source & source)
|
Path LocalStore::importPath(bool requireSignature, Source & source)
|
||||||
{
|
{
|
||||||
HashAndReadSource hashAndReadSource(source);
|
HashAndReadSource hashAndReadSource(source);
|
||||||
|
|
||||||
/* We don't yet know what store path this archive contains (the
|
/* We don't yet know what store path this archive contains (the
|
||||||
store path follows the archive data proper), and besides, we
|
store path follows the archive data proper), and besides, we
|
||||||
don't know yet whether the signature is valid. */
|
don't know yet whether the signature is valid. */
|
||||||
|
@ -1378,7 +1378,7 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
|
||||||
|
|
||||||
if (requireSignature && !haveSignature)
|
if (requireSignature && !haveSignature)
|
||||||
throw Error(format("imported archive of `%1%' lacks a signature") % dstPath);
|
throw Error(format("imported archive of `%1%' lacks a signature") % dstPath);
|
||||||
|
|
||||||
if (haveSignature) {
|
if (haveSignature) {
|
||||||
string signature = readString(hashAndReadSource);
|
string signature = readString(hashAndReadSource);
|
||||||
|
|
||||||
|
@ -1432,13 +1432,13 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
|
||||||
% unpacked % dstPath);
|
% unpacked % dstPath);
|
||||||
|
|
||||||
canonicalisePathMetaData(dstPath);
|
canonicalisePathMetaData(dstPath);
|
||||||
|
|
||||||
/* !!! if we were clever, we could prevent the hashPath()
|
/* !!! if we were clever, we could prevent the hashPath()
|
||||||
here. */
|
here. */
|
||||||
HashResult hash = hashPath(htSHA256, dstPath);
|
HashResult hash = hashPath(htSHA256, dstPath);
|
||||||
|
|
||||||
optimisePath(dstPath); // FIXME: combine with hashPath()
|
optimisePath(dstPath); // FIXME: combine with hashPath()
|
||||||
|
|
||||||
ValidPathInfo info;
|
ValidPathInfo info;
|
||||||
info.path = dstPath;
|
info.path = dstPath;
|
||||||
info.hash = hash.first;
|
info.hash = hash.first;
|
||||||
|
@ -1447,10 +1447,10 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
|
||||||
info.deriver = deriver != "" && isValidPath(deriver) ? deriver : "";
|
info.deriver = deriver != "" && isValidPath(deriver) ? deriver : "";
|
||||||
registerValidPath(info);
|
registerValidPath(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
outputLock.setDeletion(true);
|
outputLock.setDeletion(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
return dstPath;
|
return dstPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1498,7 +1498,7 @@ void LocalStore::verifyStore(bool checkContents)
|
||||||
|
|
||||||
/* Acquire the global GC lock to prevent a garbage collection. */
|
/* Acquire the global GC lock to prevent a garbage collection. */
|
||||||
AutoCloseFD fdGCLock = openGCLock(ltWrite);
|
AutoCloseFD fdGCLock = openGCLock(ltWrite);
|
||||||
|
|
||||||
Paths entries = readDirectory(nixStore);
|
Paths entries = readDirectory(nixStore);
|
||||||
PathSet store(entries.begin(), entries.end());
|
PathSet store(entries.begin(), entries.end());
|
||||||
|
|
||||||
|
@ -1527,7 +1527,7 @@ void LocalStore::verifyStore(bool checkContents)
|
||||||
/* Check the content hash (optionally - slow). */
|
/* Check the content hash (optionally - slow). */
|
||||||
printMsg(lvlTalkative, format("checking contents of `%1%'") % *i);
|
printMsg(lvlTalkative, format("checking contents of `%1%'") % *i);
|
||||||
HashResult current = hashPath(info.hash.type, *i);
|
HashResult current = hashPath(info.hash.type, *i);
|
||||||
|
|
||||||
if (info.hash != nullHash && info.hash != current.first) {
|
if (info.hash != nullHash && info.hash != current.first) {
|
||||||
printMsg(lvlError, format("path `%1%' was modified! "
|
printMsg(lvlError, format("path `%1%' was modified! "
|
||||||
"expected hash `%2%', got `%3%'")
|
"expected hash `%2%', got `%3%'")
|
||||||
|
@ -1542,18 +1542,18 @@ void LocalStore::verifyStore(bool checkContents)
|
||||||
info.hash = current.first;
|
info.hash = current.first;
|
||||||
update = true;
|
update = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Fill in missing narSize fields (from old stores). */
|
/* Fill in missing narSize fields (from old stores). */
|
||||||
if (info.narSize == 0) {
|
if (info.narSize == 0) {
|
||||||
printMsg(lvlError, format("updating size field on `%1%' to %2%") % *i % current.second);
|
printMsg(lvlError, format("updating size field on `%1%' to %2%") % *i % current.second);
|
||||||
info.narSize = current.second;
|
info.narSize = current.second;
|
||||||
update = true;
|
update = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (update) updatePathInfo(info);
|
if (update) updatePathInfo(info);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
/* It's possible that the path got GC'ed, so ignore
|
/* It's possible that the path got GC'ed, so ignore
|
||||||
errors on invalid paths. */
|
errors on invalid paths. */
|
||||||
|
@ -1569,7 +1569,7 @@ void LocalStore::verifyPath(const Path & path, const PathSet & store,
|
||||||
PathSet & done, PathSet & validPaths)
|
PathSet & done, PathSet & validPaths)
|
||||||
{
|
{
|
||||||
checkInterrupt();
|
checkInterrupt();
|
||||||
|
|
||||||
if (done.find(path) != done.end()) return;
|
if (done.find(path) != done.end()) return;
|
||||||
done.insert(path);
|
done.insert(path);
|
||||||
|
|
||||||
|
@ -1596,10 +1596,10 @@ void LocalStore::verifyPath(const Path & path, const PathSet & store,
|
||||||
invalidatePath(path);
|
invalidatePath(path);
|
||||||
} else
|
} else
|
||||||
printMsg(lvlError, format("path `%1%' disappeared, but it still has valid referrers!") % path);
|
printMsg(lvlError, format("path `%1%' disappeared, but it still has valid referrers!") % path);
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
validPaths.insert(path);
|
validPaths.insert(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1665,14 +1665,14 @@ void LocalStore::upgradeStore6()
|
||||||
PathSet validPaths = queryValidPathsOld();
|
PathSet validPaths = queryValidPathsOld();
|
||||||
|
|
||||||
SQLiteTxn txn(db);
|
SQLiteTxn txn(db);
|
||||||
|
|
||||||
foreach (PathSet::iterator, i, validPaths) {
|
foreach (PathSet::iterator, i, validPaths) {
|
||||||
addValidPath(queryPathInfoOld(*i), false);
|
addValidPath(queryPathInfoOld(*i), false);
|
||||||
std::cerr << ".";
|
std::cerr << ".";
|
||||||
}
|
}
|
||||||
|
|
||||||
std::cerr << "|";
|
std::cerr << "|";
|
||||||
|
|
||||||
foreach (PathSet::iterator, i, validPaths) {
|
foreach (PathSet::iterator, i, validPaths) {
|
||||||
ValidPathInfo info = queryPathInfoOld(*i);
|
ValidPathInfo info = queryPathInfoOld(*i);
|
||||||
unsigned long long referrer = queryValidPathId(*i);
|
unsigned long long referrer = queryValidPathId(*i);
|
||||||
|
|
Loading…
Reference in a new issue