* nix-store --dump-db / --load-db to dump/load the Nix DB.
* nix-store --register-validity: option to supply the content hash of each path. * Removed compatibility with Nix <= 0.7 stores.
This commit is contained in:
parent
5b5a3af983
commit
66c51dc215
9 changed files with 130 additions and 137 deletions
|
@ -6,6 +6,19 @@
|
|||
|
||||
|
||||
|
||||
<!--==================================================================-->
|
||||
|
||||
<section xml:id="ssec-relnotes-0.12"><title>Release 0.12 (TBA)</title>
|
||||
|
||||
<itemizedlist>
|
||||
|
||||
<listitem><para><command>nix-store --dump-db / --load-db</command>.</para></listitem>
|
||||
|
||||
</itemizedlist>
|
||||
|
||||
</section>
|
||||
|
||||
|
||||
<!--==================================================================-->
|
||||
|
||||
<section xml:id="ssec-relnotes-0.11"><title>Release 0.11 (December 31,
|
||||
|
|
|
@ -1263,34 +1263,6 @@ string showPaths(const PathSet & paths)
|
|||
}
|
||||
|
||||
|
||||
/* Return a string accepted by `nix-store --register-validity' that
|
||||
registers the specified paths as valid. Note: it's the
|
||||
responsibility of the caller to provide a closure. */
|
||||
static string makeValidityRegistration(const PathSet & paths,
|
||||
bool showDerivers)
|
||||
{
|
||||
string s = "";
|
||||
|
||||
for (PathSet::iterator i = paths.begin(); i != paths.end(); ++i) {
|
||||
s += *i + "\n";
|
||||
|
||||
Path deriver = showDerivers ? store->queryDeriver(*i) : "";
|
||||
s += deriver + "\n";
|
||||
|
||||
PathSet references;
|
||||
store->queryReferences(*i, references);
|
||||
|
||||
s += (format("%1%\n") % references.size()).str();
|
||||
|
||||
for (PathSet::iterator j = references.begin();
|
||||
j != references.end(); ++j)
|
||||
s += *j + "\n";
|
||||
}
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
|
||||
DerivationGoal::HookReply DerivationGoal::tryBuildHook()
|
||||
{
|
||||
if (!useBuildHook) return rpDecline;
|
||||
|
@ -1417,7 +1389,7 @@ DerivationGoal::HookReply DerivationGoal::tryBuildHook()
|
|||
/* The `references' file has exactly the format accepted by
|
||||
`nix-store --register-validity'. */
|
||||
writeStringToFile(referencesFN,
|
||||
makeValidityRegistration(allInputs, true));
|
||||
makeValidityRegistration(allInputs, true, false));
|
||||
|
||||
/* Tell the hook to proceed. */
|
||||
writeLine(toHook.writeSide, "okay");
|
||||
|
@ -1662,7 +1634,7 @@ void DerivationGoal::startBuilder()
|
|||
/* !!! in secure Nix, the writing should be done on the
|
||||
build uid for security (maybe). */
|
||||
writeStringToFile(tmpDir + "/" + fileName,
|
||||
makeValidityRegistration(refs, false));
|
||||
makeValidityRegistration(refs, false, false));
|
||||
}
|
||||
|
||||
// The same for derivations
|
||||
|
@ -1701,7 +1673,7 @@ void DerivationGoal::startBuilder()
|
|||
/* !!! in secure Nix, the writing should be done on the
|
||||
build uid for security (maybe). */
|
||||
writeStringToFile(tmpDir + "/" + fileName,
|
||||
makeValidityRegistration(refs, false));
|
||||
makeValidityRegistration(refs, false, false));
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -56,7 +56,6 @@ static TableId dbReferrers = 0;
|
|||
static TableId dbDerivers = 0;
|
||||
|
||||
|
||||
static void upgradeStore07();
|
||||
static void upgradeStore09();
|
||||
static void upgradeStore11();
|
||||
|
||||
|
@ -128,12 +127,12 @@ LocalStore::LocalStore(bool reserveSpace)
|
|||
% curSchema % nixSchemaVersion);
|
||||
|
||||
if (curSchema < nixSchemaVersion) {
|
||||
if (curSchema == 0) /* new store */
|
||||
curSchema = nixSchemaVersion;
|
||||
if (curSchema <= 1)
|
||||
upgradeStore07();
|
||||
if (curSchema == 2)
|
||||
upgradeStore09();
|
||||
if (curSchema == 3)
|
||||
upgradeStore11();
|
||||
throw Error("your Nix store is no longer supported");
|
||||
if (curSchema <= 2) upgradeStore09();
|
||||
if (curSchema <= 3) upgradeStore11();
|
||||
writeFile(schemaFN, (format("%1%") % nixSchemaVersion).str());
|
||||
}
|
||||
}
|
||||
|
@ -261,6 +260,14 @@ bool LocalStore::isValidPath(const Path & path)
|
|||
}
|
||||
|
||||
|
||||
PathSet LocalStore::queryValidPaths()
|
||||
{
|
||||
Paths paths;
|
||||
nixDB.enumTable(noTxn, dbValidPaths, paths);
|
||||
return PathSet(paths.begin(), paths.end());
|
||||
}
|
||||
|
||||
|
||||
static string addPrefix(const string & prefix, const string & s)
|
||||
{
|
||||
return prefix + string(1, (char) 0) + s;
|
||||
|
@ -1069,93 +1076,6 @@ void LocalStore::optimiseStore(bool dryRun, OptimiseStats & stats)
|
|||
}
|
||||
|
||||
|
||||
/* Upgrade from schema 1 (Nix <= 0.7) to schema 2 (Nix >= 0.8). */
|
||||
static void upgradeStore07()
|
||||
{
|
||||
printMsg(lvlError, "upgrading Nix store to new schema (this may take a while)...");
|
||||
|
||||
Transaction txn(nixDB);
|
||||
|
||||
Paths validPaths2;
|
||||
nixDB.enumTable(txn, dbValidPaths, validPaths2);
|
||||
PathSet validPaths(validPaths2.begin(), validPaths2.end());
|
||||
|
||||
std::cerr << "hashing paths...";
|
||||
int n = 0;
|
||||
for (PathSet::iterator i = validPaths.begin(); i != validPaths.end(); ++i) {
|
||||
checkInterrupt();
|
||||
string s;
|
||||
nixDB.queryString(txn, dbValidPaths, *i, s);
|
||||
if (s == "") {
|
||||
Hash hash = hashPath(htSHA256, *i);
|
||||
setHash(txn, *i, hash);
|
||||
std::cerr << ".";
|
||||
if (++n % 1000 == 0) {
|
||||
txn.commit();
|
||||
txn.begin(nixDB);
|
||||
}
|
||||
}
|
||||
}
|
||||
std::cerr << std::endl;
|
||||
|
||||
txn.commit();
|
||||
|
||||
txn.begin(nixDB);
|
||||
|
||||
std::cerr << "processing closures...";
|
||||
for (PathSet::iterator i = validPaths.begin(); i != validPaths.end(); ++i) {
|
||||
checkInterrupt();
|
||||
if (i->size() > 6 && string(*i, i->size() - 6) == ".store") {
|
||||
ATerm t = ATreadFromNamedFile(i->c_str());
|
||||
if (!t) throw Error(format("cannot read aterm from `%1%'") % *i);
|
||||
|
||||
ATermList roots, elems;
|
||||
if (!matchOldClosure(t, roots, elems)) continue;
|
||||
|
||||
for (ATermIterator j(elems); j; ++j) {
|
||||
|
||||
ATerm path2;
|
||||
ATermList references2;
|
||||
if (!matchOldClosureElem(*j, path2, references2)) continue;
|
||||
|
||||
Path path = aterm2String(path2);
|
||||
if (validPaths.find(path) == validPaths.end())
|
||||
/* Skip this path; it's invalid. This is a normal
|
||||
condition (Nix <= 0.7 did not enforce closure
|
||||
on closure store expressions). */
|
||||
continue;
|
||||
|
||||
PathSet references;
|
||||
for (ATermIterator k(references2); k; ++k) {
|
||||
Path reference = aterm2String(*k);
|
||||
if (validPaths.find(reference) == validPaths.end())
|
||||
/* Bad reference. Set it anyway and let the
|
||||
user fix it. */
|
||||
printMsg(lvlError, format("closure `%1%' contains reference from `%2%' "
|
||||
"to invalid path `%3%' (run `nix-store --verify')")
|
||||
% *i % path % reference);
|
||||
references.insert(reference);
|
||||
}
|
||||
|
||||
PathSet prevReferences;
|
||||
queryReferences(txn, path, prevReferences);
|
||||
if (prevReferences.size() > 0 && references != prevReferences)
|
||||
printMsg(lvlError, format("warning: conflicting references for `%1%'") % path);
|
||||
|
||||
if (references != prevReferences)
|
||||
setReferences(txn, path, references);
|
||||
}
|
||||
|
||||
std::cerr << ".";
|
||||
}
|
||||
}
|
||||
std::cerr << std::endl;
|
||||
|
||||
/* !!! maybe this transaction is way too big */
|
||||
txn.commit();
|
||||
}
|
||||
|
||||
|
||||
/* Upgrade from schema 2 (0.8 <= Nix <= 0.9) to schema 3 (Nix >=
|
||||
0.10). The only thing to do here is to upgrade the old `referer'
|
||||
table (which causes quadratic complexity in some cases) to the new
|
||||
|
|
|
@ -59,6 +59,8 @@ public:
|
|||
|
||||
bool isValidPath(const Path & path);
|
||||
|
||||
PathSet queryValidPaths();
|
||||
|
||||
Hash queryPathHash(const Path & path);
|
||||
|
||||
void queryReferences(const Path & path, PathSet & references);
|
||||
|
|
|
@ -185,6 +185,12 @@ bool RemoteStore::isValidPath(const Path & path)
|
|||
}
|
||||
|
||||
|
||||
PathSet RemoteStore::queryValidPaths()
|
||||
{
|
||||
throw Error("not implemented");
|
||||
}
|
||||
|
||||
|
||||
bool RemoteStore::hasSubstitutes(const Path & path)
|
||||
{
|
||||
writeInt(wopHasSubstitutes, to);
|
||||
|
|
|
@ -27,6 +27,8 @@ public:
|
|||
|
||||
bool isValidPath(const Path & path);
|
||||
|
||||
PathSet queryValidPaths();
|
||||
|
||||
Hash queryPathHash(const Path & path);
|
||||
|
||||
void queryReferences(const Path & path, PathSet & references);
|
||||
|
|
|
@ -151,11 +151,47 @@ Path computeStorePathForText(const string & suffix, const string & s,
|
|||
}
|
||||
|
||||
|
||||
ValidPathInfo decodeValidPathInfo(std::istream & str)
|
||||
/* Return a string accepted by decodeValidPathInfo() that
|
||||
registers the specified paths as valid. Note: it's the
|
||||
responsibility of the caller to provide a closure. */
|
||||
string makeValidityRegistration(const PathSet & paths,
|
||||
bool showDerivers, bool showHash)
|
||||
{
|
||||
string s = "";
|
||||
|
||||
for (PathSet::iterator i = paths.begin(); i != paths.end(); ++i) {
|
||||
s += *i + "\n";
|
||||
|
||||
if (showHash)
|
||||
s += printHash(store->queryPathHash(*i)) + "\n";
|
||||
|
||||
Path deriver = showDerivers ? store->queryDeriver(*i) : "";
|
||||
s += deriver + "\n";
|
||||
|
||||
PathSet references;
|
||||
store->queryReferences(*i, references);
|
||||
|
||||
s += (format("%1%\n") % references.size()).str();
|
||||
|
||||
for (PathSet::iterator j = references.begin();
|
||||
j != references.end(); ++j)
|
||||
s += *j + "\n";
|
||||
}
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
|
||||
ValidPathInfo decodeValidPathInfo(std::istream & str, bool hashGiven)
|
||||
{
|
||||
ValidPathInfo info;
|
||||
getline(str, info.path);
|
||||
if (str.eof()) { info.path = ""; return info; }
|
||||
if (hashGiven) {
|
||||
string s;
|
||||
getline(str, s);
|
||||
info.hash = parseHash(htSHA256, s);
|
||||
}
|
||||
getline(str, info.deriver);
|
||||
string s; int n;
|
||||
getline(str, s);
|
||||
|
|
|
@ -35,6 +35,9 @@ public:
|
|||
/* Checks whether a path is valid. */
|
||||
virtual bool isValidPath(const Path & path) = 0;
|
||||
|
||||
/* Query the set of valid paths. */
|
||||
virtual PathSet queryValidPaths() = 0;
|
||||
|
||||
/* Queries the hash of a valid path. */
|
||||
virtual Hash queryPathHash(const Path & path) = 0;
|
||||
|
||||
|
@ -249,6 +252,9 @@ extern boost::shared_ptr<StoreAPI> store;
|
|||
boost::shared_ptr<StoreAPI> openStore(bool reserveSpace = true);
|
||||
|
||||
|
||||
string makeValidityRegistration(const PathSet & paths,
|
||||
bool showDerivers, bool showHash);
|
||||
|
||||
struct ValidPathInfo
|
||||
{
|
||||
Path path;
|
||||
|
@ -257,7 +263,8 @@ struct ValidPathInfo
|
|||
PathSet references;
|
||||
};
|
||||
|
||||
ValidPathInfo decodeValidPathInfo(std::istream & str);
|
||||
ValidPathInfo decodeValidPathInfo(std::istream & str,
|
||||
bool hashGiven = false);
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -401,25 +401,30 @@ static void opReadLog(Strings opFlags, Strings opArgs)
|
|||
}
|
||||
|
||||
|
||||
static void opRegisterValidity(Strings opFlags, Strings opArgs)
|
||||
static void opDumpDB(Strings opFlags, Strings opArgs)
|
||||
{
|
||||
bool reregister = false; // !!! maybe this should be the default
|
||||
if (!opFlags.empty()) throw UsageError("unknown flag");
|
||||
if (!opArgs.empty())
|
||||
throw UsageError("no arguments expected");
|
||||
PathSet validPaths = store->queryValidPaths();
|
||||
/* !!! this isn't streamy; makeValidityRegistration() builds a
|
||||
potentially gigantic string. */
|
||||
cout << makeValidityRegistration(validPaths, true, true);
|
||||
}
|
||||
|
||||
for (Strings::iterator i = opFlags.begin();
|
||||
i != opFlags.end(); ++i)
|
||||
if (*i == "--reregister") reregister = true;
|
||||
else throw UsageError(format("unknown flag `%1%'") % *i);
|
||||
|
||||
if (!opArgs.empty()) throw UsageError("no arguments expected");
|
||||
|
||||
static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
|
||||
{
|
||||
ValidPathInfos infos;
|
||||
|
||||
while (1) {
|
||||
ValidPathInfo info = decodeValidPathInfo(cin);
|
||||
ValidPathInfo info = decodeValidPathInfo(cin, hashGiven);
|
||||
if (info.path == "") break;
|
||||
if (!store->isValidPath(info.path) || reregister) {
|
||||
/* !!! races */
|
||||
if (canonicalise)
|
||||
canonicalisePathMetaData(info.path);
|
||||
if (!hashGiven)
|
||||
info.hash = hashPath(htSHA256, info.path);
|
||||
infos.push_back(info);
|
||||
}
|
||||
|
@ -432,6 +437,32 @@ static void opRegisterValidity(Strings opFlags, Strings opArgs)
|
|||
}
|
||||
|
||||
|
||||
static void opLoadDB(Strings opFlags, Strings opArgs)
|
||||
{
|
||||
if (!opFlags.empty()) throw UsageError("unknown flag");
|
||||
if (!opArgs.empty())
|
||||
throw UsageError("no arguments expected");
|
||||
registerValidity(true, true, false);
|
||||
}
|
||||
|
||||
|
||||
static void opRegisterValidity(Strings opFlags, Strings opArgs)
|
||||
{
|
||||
bool reregister = false; // !!! maybe this should be the default
|
||||
bool hashGiven = false;
|
||||
|
||||
for (Strings::iterator i = opFlags.begin();
|
||||
i != opFlags.end(); ++i)
|
||||
if (*i == "--reregister") reregister = true;
|
||||
else if (*i == "--hash-given") hashGiven = true;
|
||||
else throw UsageError(format("unknown flag `%1%'") % *i);
|
||||
|
||||
if (!opArgs.empty()) throw UsageError("no arguments expected");
|
||||
|
||||
registerValidity(reregister, hashGiven, true);
|
||||
}
|
||||
|
||||
|
||||
static void opCheckValidity(Strings opFlags, Strings opArgs)
|
||||
{
|
||||
bool printInvalid = false;
|
||||
|
@ -681,6 +712,10 @@ void run(Strings args)
|
|||
op = opQuery;
|
||||
else if (arg == "--read-log" || arg == "-l")
|
||||
op = opReadLog;
|
||||
else if (arg == "--dump-db")
|
||||
op = opDumpDB;
|
||||
else if (arg == "--load-db")
|
||||
op = opLoadDB;
|
||||
else if (arg == "--register-validity")
|
||||
op = opRegisterValidity;
|
||||
else if (arg == "--check-validity")
|
||||
|
|
Loading…
Reference in a new issue