Add a Store::addToStore() variant that accepts a NAR
As a side effect, this ensures that signatures are propagated when copying paths between stores. Also refactored import/export to make use of this.
This commit is contained in:
parent
b6c768fb6a
commit
538a64e8c3
15 changed files with 235 additions and 338 deletions
|
@ -46,8 +46,10 @@ Path BinaryCacheStore::narInfoFileFor(const Path & storePath)
|
|||
return storePathToHash(storePath) + ".narinfo";
|
||||
}
|
||||
|
||||
void BinaryCacheStore::addToCache(const ValidPathInfo & info, ref<std::string> nar)
|
||||
void BinaryCacheStore::addToStore(const ValidPathInfo & info, const std::string & nar, bool repair)
|
||||
{
|
||||
if (!repair && isValidPath(info.path)) return;
|
||||
|
||||
/* Verify that all references are valid. This may do some .narinfo
|
||||
reads, but typically they'll already be cached. */
|
||||
for (auto & ref : info.references)
|
||||
|
@ -60,14 +62,13 @@ void BinaryCacheStore::addToCache(const ValidPathInfo & info, ref<std::string> n
|
|||
}
|
||||
|
||||
auto narInfoFile = narInfoFileFor(info.path);
|
||||
if (fileExists(narInfoFile)) return;
|
||||
|
||||
assert(nar->compare(0, narMagic.size(), narMagic) == 0);
|
||||
assert(nar.compare(0, narMagic.size(), narMagic) == 0);
|
||||
|
||||
auto narInfo = make_ref<NarInfo>(info);
|
||||
|
||||
narInfo->narSize = nar->size();
|
||||
narInfo->narHash = hashString(htSHA256, *nar);
|
||||
narInfo->narSize = nar.size();
|
||||
narInfo->narHash = hashString(htSHA256, nar);
|
||||
|
||||
if (info.narHash && info.narHash != narInfo->narHash)
|
||||
throw Error(format("refusing to copy corrupted path ‘%1%’ to binary cache") % info.path);
|
||||
|
@ -83,7 +84,7 @@ void BinaryCacheStore::addToCache(const ValidPathInfo & info, ref<std::string> n
|
|||
auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1).count();
|
||||
printMsg(lvlTalkative, format("copying path ‘%1%’ (%2% bytes, compressed %3$.1f%% in %4% ms) to binary cache")
|
||||
% narInfo->path % narInfo->narSize
|
||||
% ((1.0 - (double) narCompressed->size() / nar->size()) * 100.0)
|
||||
% ((1.0 - (double) narCompressed->size() / nar.size()) * 100.0)
|
||||
% duration);
|
||||
|
||||
/* Atomically write the NAR file. */
|
||||
|
@ -91,13 +92,13 @@ void BinaryCacheStore::addToCache(const ValidPathInfo & info, ref<std::string> n
|
|||
+ (compression == "xz" ? ".xz" :
|
||||
compression == "bzip2" ? ".bz2" :
|
||||
"");
|
||||
if (!fileExists(narInfo->url)) {
|
||||
if (repair || !fileExists(narInfo->url)) {
|
||||
stats.narWrite++;
|
||||
upsertFile(narInfo->url, *narCompressed);
|
||||
} else
|
||||
stats.narWriteAverted++;
|
||||
|
||||
stats.narWriteBytes += nar->size();
|
||||
stats.narWriteBytes += nar.size();
|
||||
stats.narWriteCompressedBytes += narCompressed->size();
|
||||
stats.narWriteCompressionTimeMs += duration;
|
||||
|
||||
|
@ -141,7 +142,7 @@ void BinaryCacheStore::narFromPath(const Path & storePath, Sink & sink)
|
|||
/* Decompress the NAR. FIXME: would be nice to have the remote
|
||||
side do this. */
|
||||
try {
|
||||
nar = decompress(info->compression, ref<std::string>(nar));
|
||||
nar = decompress(info->compression, *nar);
|
||||
} catch (UnknownCompressionMethod &) {
|
||||
throw Error(format("binary cache path ‘%s’ uses unknown compression method ‘%s’")
|
||||
% storePath % info->compression);
|
||||
|
@ -156,51 +157,6 @@ void BinaryCacheStore::narFromPath(const Path & storePath, Sink & sink)
|
|||
sink((unsigned char *) nar->c_str(), nar->size());
|
||||
}
|
||||
|
||||
void BinaryCacheStore::exportPath(const Path & storePath, Sink & sink)
|
||||
{
|
||||
auto res = queryPathInfo(storePath);
|
||||
|
||||
narFromPath(storePath, sink);
|
||||
|
||||
// FIXME: check integrity of NAR.
|
||||
|
||||
sink << exportMagic << storePath << res->references << res->deriver << 0;
|
||||
}
|
||||
|
||||
Paths BinaryCacheStore::importPaths(Source & source,
|
||||
std::shared_ptr<FSAccessor> accessor)
|
||||
{
|
||||
Paths res;
|
||||
while (true) {
|
||||
unsigned long long n = readLongLong(source);
|
||||
if (n == 0) break;
|
||||
if (n != 1) throw Error("input doesn't look like something created by ‘nix-store --export’");
|
||||
res.push_back(importPath(source, accessor));
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
struct TeeSource : Source
|
||||
{
|
||||
Source & readSource;
|
||||
ref<std::string> data;
|
||||
TeeSource(Source & readSource)
|
||||
: readSource(readSource)
|
||||
, data(make_ref<std::string>())
|
||||
{
|
||||
}
|
||||
size_t read(unsigned char * data, size_t len)
|
||||
{
|
||||
size_t n = readSource.read(data, len);
|
||||
this->data->append((char *) data, n);
|
||||
return n;
|
||||
}
|
||||
};
|
||||
|
||||
struct NopSink : ParseSink
|
||||
{
|
||||
};
|
||||
|
||||
std::shared_ptr<ValidPathInfo> BinaryCacheStore::queryPathInfoUncached(const Path & storePath)
|
||||
{
|
||||
auto narInfoFile = narInfoFileFor(storePath);
|
||||
|
@ -260,8 +216,7 @@ Path BinaryCacheStore::addToStore(const string & name, const Path & srcPath,
|
|||
ValidPathInfo info;
|
||||
info.path = makeFixedOutputPath(recursive, hashAlgo, h, name);
|
||||
|
||||
if (repair || !isValidPath(info.path))
|
||||
addToCache(info, sink.s);
|
||||
addToStore(info, *sink.s, repair);
|
||||
|
||||
return info.path;
|
||||
}
|
||||
|
@ -276,7 +231,7 @@ Path BinaryCacheStore::addTextToStore(const string & name, const string & s,
|
|||
if (repair || !isValidPath(info.path)) {
|
||||
StringSink sink;
|
||||
dumpString(s, sink);
|
||||
addToCache(info, sink.s);
|
||||
addToStore(info, *sink.s, repair);
|
||||
}
|
||||
|
||||
return info.path;
|
||||
|
@ -306,7 +261,7 @@ void BinaryCacheStore::buildPaths(const PathSet & paths, BuildMode buildMode)
|
|||
StringSink sink;
|
||||
dumpPath(storePath, sink);
|
||||
|
||||
addToCache(*info, sink.s);
|
||||
addToStore(*info, *sink.s, buildMode == bmRepair);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -343,7 +298,7 @@ struct BinaryCacheStoreAccessor : public FSAccessor
|
|||
if (i != nars.end()) return {i->second, restPath};
|
||||
|
||||
StringSink sink;
|
||||
store->exportPath(storePath, sink);
|
||||
store->narFromPath(storePath, sink);
|
||||
|
||||
auto accessor = makeNarAccessor(sink.s);
|
||||
nars.emplace(storePath, accessor);
|
||||
|
@ -381,36 +336,4 @@ ref<FSAccessor> BinaryCacheStore::getFSAccessor()
|
|||
std::dynamic_pointer_cast<BinaryCacheStore>(shared_from_this())));
|
||||
}
|
||||
|
||||
Path BinaryCacheStore::importPath(Source & source, std::shared_ptr<FSAccessor> accessor)
|
||||
{
|
||||
/* FIXME: some cut&paste of LocalStore::importPath(). */
|
||||
|
||||
/* Extract the NAR from the source. */
|
||||
TeeSource tee(source);
|
||||
NopSink sink;
|
||||
parseDump(sink, tee);
|
||||
|
||||
uint32_t magic = readInt(source);
|
||||
if (magic != exportMagic)
|
||||
throw Error("Nix archive cannot be imported; wrong format");
|
||||
|
||||
ValidPathInfo info;
|
||||
info.path = readStorePath(source);
|
||||
|
||||
info.references = readStorePaths<PathSet>(source);
|
||||
|
||||
readString(source); // deriver, don't care
|
||||
|
||||
bool haveSignature = readInt(source) == 1;
|
||||
assert(!haveSignature);
|
||||
|
||||
addToCache(info, tee.data);
|
||||
|
||||
auto accessor_ = std::dynamic_pointer_cast<BinaryCacheStoreAccessor>(accessor);
|
||||
if (accessor_)
|
||||
accessor_->nars.emplace(info.path, makeNarAccessor(tee.data));
|
||||
|
||||
return info.path;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -46,8 +46,6 @@ private:
|
|||
|
||||
std::string narInfoFileFor(const Path & storePath);
|
||||
|
||||
void addToCache(const ValidPathInfo & info, ref<std::string> nar);
|
||||
|
||||
public:
|
||||
|
||||
bool isValidPathUncached(const Path & path) override;
|
||||
|
@ -82,6 +80,9 @@ public:
|
|||
void querySubstitutablePathInfos(const PathSet & paths,
|
||||
SubstitutablePathInfos & infos) override;
|
||||
|
||||
void addToStore(const ValidPathInfo & info, const std::string & nar,
|
||||
bool repair = false) override;
|
||||
|
||||
Path addToStore(const string & name, const Path & srcPath,
|
||||
bool recursive = true, HashType hashAlgo = htSHA256,
|
||||
PathFilter & filter = defaultPathFilter, bool repair = false) override;
|
||||
|
@ -91,13 +92,6 @@ public:
|
|||
|
||||
void narFromPath(const Path & path, Sink & sink) override;
|
||||
|
||||
void exportPath(const Path & path, Sink & sink) override;
|
||||
|
||||
Paths importPaths(Source & source,
|
||||
std::shared_ptr<FSAccessor> accessor) override;
|
||||
|
||||
Path importPath(Source & source, std::shared_ptr<FSAccessor> accessor);
|
||||
|
||||
void buildPaths(const PathSet & paths, BuildMode buildMode = bmNormal) override;
|
||||
|
||||
BuildResult buildDerivation(const Path & drvPath, const BasicDerivation & drv,
|
||||
|
|
|
@ -31,7 +31,7 @@ void builtinFetchurl(const BasicDerivation & drv)
|
|||
auto unpack = drv.env.find("unpack");
|
||||
if (unpack != drv.env.end() && unpack->second == "1") {
|
||||
if (string(*data.data, 0, 6) == string("\xfd" "7zXZ\0", 6))
|
||||
data.data = decompress("xz", ref<std::string>(data.data));
|
||||
data.data = decompress("xz", *data.data);
|
||||
StringSource source(*data.data);
|
||||
restorePath(storePath, source);
|
||||
} else
|
||||
|
|
136
src/libstore/export-import.cc
Normal file
136
src/libstore/export-import.cc
Normal file
|
@ -0,0 +1,136 @@
|
|||
#include "store-api.hh"
|
||||
#include "archive.hh"
|
||||
#include "worker-protocol.hh"
|
||||
|
||||
#include <algorithm>
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct HashAndWriteSink : Sink
|
||||
{
|
||||
Sink & writeSink;
|
||||
HashSink hashSink;
|
||||
HashAndWriteSink(Sink & writeSink) : writeSink(writeSink), hashSink(htSHA256)
|
||||
{
|
||||
}
|
||||
virtual void operator () (const unsigned char * data, size_t len)
|
||||
{
|
||||
writeSink(data, len);
|
||||
hashSink(data, len);
|
||||
}
|
||||
Hash currentHash()
|
||||
{
|
||||
return hashSink.currentHash().first;
|
||||
}
|
||||
};
|
||||
|
||||
void Store::exportPaths(const Paths & paths, Sink & sink)
|
||||
{
|
||||
Paths sorted = topoSortPaths(PathSet(paths.begin(), paths.end()));
|
||||
std::reverse(sorted.begin(), sorted.end());
|
||||
|
||||
std::string doneLabel("paths exported");
|
||||
logger->incExpected(doneLabel, sorted.size());
|
||||
|
||||
for (auto & path : sorted) {
|
||||
Activity act(*logger, lvlInfo, format("exporting path ‘%s’") % path);
|
||||
sink << 1;
|
||||
exportPath(path, sink);
|
||||
logger->incProgress(doneLabel);
|
||||
}
|
||||
|
||||
sink << 0;
|
||||
}
|
||||
|
||||
void Store::exportPath(const Path & path, Sink & sink)
|
||||
{
|
||||
auto info = queryPathInfo(path);
|
||||
|
||||
HashAndWriteSink hashAndWriteSink(sink);
|
||||
|
||||
narFromPath(path, hashAndWriteSink);
|
||||
|
||||
/* Refuse to export paths that have changed. This prevents
|
||||
filesystem corruption from spreading to other machines.
|
||||
Don't complain if the stored hash is zero (unknown). */
|
||||
Hash hash = hashAndWriteSink.currentHash();
|
||||
if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
|
||||
throw Error(format("hash of path ‘%1%’ has changed from ‘%2%’ to ‘%3%’!") % path
|
||||
% printHash(info->narHash) % printHash(hash));
|
||||
|
||||
hashAndWriteSink << exportMagic << path << info->references << info->deriver << 0;
|
||||
}
|
||||
|
||||
struct TeeSource : Source
|
||||
{
|
||||
Source & readSource;
|
||||
ref<std::string> data;
|
||||
TeeSource(Source & readSource)
|
||||
: readSource(readSource)
|
||||
, data(make_ref<std::string>())
|
||||
{
|
||||
}
|
||||
size_t read(unsigned char * data, size_t len)
|
||||
{
|
||||
size_t n = readSource.read(data, len);
|
||||
this->data->append((char *) data, n);
|
||||
return n;
|
||||
}
|
||||
};
|
||||
|
||||
struct NopSink : ParseSink
|
||||
{
|
||||
};
|
||||
|
||||
Paths Store::importPaths(Source & source, std::shared_ptr<FSAccessor> accessor)
|
||||
{
|
||||
Paths res;
|
||||
while (true) {
|
||||
unsigned long long n = readLongLong(source);
|
||||
if (n == 0) break;
|
||||
if (n != 1) throw Error("input doesn't look like something created by ‘nix-store --export’");
|
||||
|
||||
/* Extract the NAR from the source. */
|
||||
TeeSource tee(source);
|
||||
NopSink sink;
|
||||
parseDump(sink, tee);
|
||||
|
||||
uint32_t magic = readInt(source);
|
||||
if (magic != exportMagic)
|
||||
throw Error("Nix archive cannot be imported; wrong format");
|
||||
|
||||
ValidPathInfo info;
|
||||
|
||||
info.path = readStorePath(source);
|
||||
|
||||
Activity act(*logger, lvlInfo, format("importing path ‘%s’") % info.path);
|
||||
|
||||
info.references = readStorePaths<PathSet>(source);
|
||||
|
||||
info.deriver = readString(source);
|
||||
if (info.deriver != "") assertStorePath(info.deriver);
|
||||
|
||||
info.narHash = hashString(htSHA256, *tee.data);
|
||||
info.narSize = tee.data->size();
|
||||
|
||||
// Ignore optional legacy signature.
|
||||
if (readInt(source) == 1)
|
||||
readString(source);
|
||||
|
||||
addToStore(info, *tee.data);
|
||||
|
||||
// FIXME: implement accessors?
|
||||
assert(!accessor);
|
||||
#if 0
|
||||
auto accessor_ = std::dynamic_pointer_cast<BinaryCacheStoreAccessor>(accessor);
|
||||
if (accessor_)
|
||||
accessor_->nars.emplace(info.path, makeNarAccessor(tee.data));
|
||||
#endif
|
||||
|
||||
res.push_back(info.path);
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
}
|
|
@ -901,6 +901,40 @@ void LocalStore::invalidatePath(State & state, const Path & path)
|
|||
}
|
||||
|
||||
|
||||
void LocalStore::addToStore(const ValidPathInfo & info, const std::string & nar, bool repair)
|
||||
{
|
||||
addTempRoot(info.path);
|
||||
|
||||
if (repair || !isValidPath(info.path)) {
|
||||
|
||||
PathLocks outputLock;
|
||||
|
||||
/* Lock the output path. But don't lock if we're being called
|
||||
from a build hook (whose parent process already acquired a
|
||||
lock on this path). */
|
||||
Strings locksHeld = tokenizeString<Strings>(getEnv("NIX_HELD_LOCKS"));
|
||||
if (find(locksHeld.begin(), locksHeld.end(), info.path) == locksHeld.end())
|
||||
outputLock.lockPaths({info.path});
|
||||
|
||||
if (repair || !isValidPath(info.path)) {
|
||||
|
||||
deletePath(info.path);
|
||||
|
||||
StringSource source(nar);
|
||||
restorePath(info.path, source);
|
||||
|
||||
canonicalisePathMetaData(info.path, -1);
|
||||
|
||||
optimisePath(info.path); // FIXME: combine with hashPath()
|
||||
|
||||
registerValidPath(info);
|
||||
}
|
||||
|
||||
outputLock.setDeletion(true);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Path LocalStore::addToStoreFromDump(const string & dump, const string & name,
|
||||
bool recursive, HashType hashAlgo, bool repair)
|
||||
{
|
||||
|
@ -1016,69 +1050,6 @@ Path LocalStore::addTextToStore(const string & name, const string & s,
|
|||
}
|
||||
|
||||
|
||||
struct HashAndWriteSink : Sink
|
||||
{
|
||||
Sink & writeSink;
|
||||
HashSink hashSink;
|
||||
HashAndWriteSink(Sink & writeSink) : writeSink(writeSink), hashSink(htSHA256)
|
||||
{
|
||||
}
|
||||
virtual void operator () (const unsigned char * data, size_t len)
|
||||
{
|
||||
writeSink(data, len);
|
||||
hashSink(data, len);
|
||||
}
|
||||
Hash currentHash()
|
||||
{
|
||||
return hashSink.currentHash().first;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
void LocalStore::exportPath(const Path & path, Sink & sink)
|
||||
{
|
||||
assertStorePath(path);
|
||||
|
||||
printMsg(lvlTalkative, format("exporting path ‘%1%’") % path);
|
||||
|
||||
auto info = queryPathInfo(path);
|
||||
|
||||
HashAndWriteSink hashAndWriteSink(sink);
|
||||
|
||||
dumpPath(path, hashAndWriteSink);
|
||||
|
||||
/* Refuse to export paths that have changed. This prevents
|
||||
filesystem corruption from spreading to other machines.
|
||||
Don't complain if the stored hash is zero (unknown). */
|
||||
Hash hash = hashAndWriteSink.currentHash();
|
||||
if (hash != info->narHash && info->narHash != Hash(info->narHash.type))
|
||||
throw Error(format("hash of path ‘%1%’ has changed from ‘%2%’ to ‘%3%’!") % path
|
||||
% printHash(info->narHash) % printHash(hash));
|
||||
|
||||
hashAndWriteSink << exportMagic << path << info->references << info->deriver;
|
||||
|
||||
hashAndWriteSink << 0; // backwards compatibility
|
||||
}
|
||||
|
||||
|
||||
struct HashAndReadSource : Source
|
||||
{
|
||||
Source & readSource;
|
||||
HashSink hashSink;
|
||||
bool hashing;
|
||||
HashAndReadSource(Source & readSource) : readSource(readSource), hashSink(htSHA256)
|
||||
{
|
||||
hashing = true;
|
||||
}
|
||||
size_t read(unsigned char * data, size_t len)
|
||||
{
|
||||
size_t n = readSource.read(data, len);
|
||||
if (hashing) hashSink(data, n);
|
||||
return n;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/* Create a temporary directory in the store that won't be
|
||||
garbage-collected. */
|
||||
Path LocalStore::createTempDirInStore()
|
||||
|
@ -1095,103 +1066,6 @@ Path LocalStore::createTempDirInStore()
|
|||
}
|
||||
|
||||
|
||||
Path LocalStore::importPath(Source & source)
|
||||
{
|
||||
HashAndReadSource hashAndReadSource(source);
|
||||
|
||||
/* We don't yet know what store path this archive contains (the
|
||||
store path follows the archive data proper), and besides, we
|
||||
don't know yet whether the signature is valid. */
|
||||
Path tmpDir = createTempDirInStore();
|
||||
AutoDelete delTmp(tmpDir);
|
||||
Path unpacked = tmpDir + "/unpacked";
|
||||
|
||||
restorePath(unpacked, hashAndReadSource);
|
||||
|
||||
uint32_t magic = readInt(hashAndReadSource);
|
||||
if (magic != exportMagic)
|
||||
throw Error("Nix archive cannot be imported; wrong format");
|
||||
|
||||
Path dstPath = readStorePath(hashAndReadSource);
|
||||
|
||||
printMsg(lvlTalkative, format("importing path ‘%1%’") % dstPath);
|
||||
|
||||
PathSet references = readStorePaths<PathSet>(hashAndReadSource);
|
||||
|
||||
Path deriver = readString(hashAndReadSource);
|
||||
if (deriver != "") assertStorePath(deriver);
|
||||
|
||||
Hash hash = hashAndReadSource.hashSink.finish().first;
|
||||
hashAndReadSource.hashing = false;
|
||||
|
||||
bool haveSignature = readInt(hashAndReadSource) == 1;
|
||||
|
||||
if (haveSignature)
|
||||
// Ignore legacy signature.
|
||||
readString(hashAndReadSource);
|
||||
|
||||
/* Do the actual import. */
|
||||
|
||||
/* !!! way too much code duplication with addTextToStore() etc. */
|
||||
addTempRoot(dstPath);
|
||||
|
||||
if (!isValidPath(dstPath)) {
|
||||
|
||||
PathLocks outputLock;
|
||||
|
||||
/* Lock the output path. But don't lock if we're being called
|
||||
from a build hook (whose parent process already acquired a
|
||||
lock on this path). */
|
||||
Strings locksHeld = tokenizeString<Strings>(getEnv("NIX_HELD_LOCKS"));
|
||||
if (find(locksHeld.begin(), locksHeld.end(), dstPath) == locksHeld.end())
|
||||
outputLock.lockPaths(singleton<PathSet, Path>(dstPath));
|
||||
|
||||
if (!isValidPath(dstPath)) {
|
||||
|
||||
deletePath(dstPath);
|
||||
|
||||
if (rename(unpacked.c_str(), dstPath.c_str()) == -1)
|
||||
throw SysError(format("cannot move ‘%1%’ to ‘%2%’")
|
||||
% unpacked % dstPath);
|
||||
|
||||
canonicalisePathMetaData(dstPath, -1);
|
||||
|
||||
/* !!! if we were clever, we could prevent the hashPath()
|
||||
here. */
|
||||
HashResult hash = hashPath(htSHA256, dstPath);
|
||||
|
||||
optimisePath(dstPath); // FIXME: combine with hashPath()
|
||||
|
||||
ValidPathInfo info;
|
||||
info.path = dstPath;
|
||||
info.narHash = hash.first;
|
||||
info.narSize = hash.second;
|
||||
info.references = references;
|
||||
info.deriver = deriver != "" && isValidPath(deriver) ? deriver : "";
|
||||
registerValidPath(info);
|
||||
}
|
||||
|
||||
outputLock.setDeletion(true);
|
||||
}
|
||||
|
||||
return dstPath;
|
||||
}
|
||||
|
||||
|
||||
Paths LocalStore::importPaths(Source & source,
|
||||
std::shared_ptr<FSAccessor> accessor)
|
||||
{
|
||||
Paths res;
|
||||
while (true) {
|
||||
unsigned long long n = readLongLong(source);
|
||||
if (n == 0) break;
|
||||
if (n != 1) throw Error("input doesn't look like something created by ‘nix-store --export’");
|
||||
res.push_back(importPath(source));
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
void LocalStore::invalidatePathChecked(const Path & path)
|
||||
{
|
||||
assertStorePath(path);
|
||||
|
|
|
@ -112,6 +112,9 @@ public:
|
|||
void querySubstitutablePathInfos(const PathSet & paths,
|
||||
SubstitutablePathInfos & infos) override;
|
||||
|
||||
void addToStore(const ValidPathInfo & info, const std::string & nar,
|
||||
bool repair) override;
|
||||
|
||||
Path addToStore(const string & name, const Path & srcPath,
|
||||
bool recursive = true, HashType hashAlgo = htSHA256,
|
||||
PathFilter & filter = defaultPathFilter, bool repair = false) override;
|
||||
|
@ -126,11 +129,6 @@ public:
|
|||
Path addTextToStore(const string & name, const string & s,
|
||||
const PathSet & references, bool repair = false) override;
|
||||
|
||||
void exportPath(const Path & path, Sink & sink) override;
|
||||
|
||||
Paths importPaths(Source & source,
|
||||
std::shared_ptr<FSAccessor> accessor) override;
|
||||
|
||||
void buildPaths(const PathSet & paths, BuildMode buildMode) override;
|
||||
|
||||
BuildResult buildDerivation(const Path & drvPath, const BasicDerivation & drv,
|
||||
|
@ -229,8 +227,6 @@ private:
|
|||
|
||||
Path createTempDirInStore();
|
||||
|
||||
Path importPath(Source & source);
|
||||
|
||||
void checkDerivationOutputs(const Path & drvPath, const Derivation & drv);
|
||||
|
||||
typedef std::unordered_set<ino_t> InodeHash;
|
||||
|
|
|
@ -326,6 +326,12 @@ Path RemoteStore::queryPathFromHashPart(const string & hashPart)
|
|||
}
|
||||
|
||||
|
||||
void RemoteStore::addToStore(const ValidPathInfo & info, const std::string & nar, bool repair)
|
||||
{
|
||||
throw Error("RemoteStore::addToStore() not implemented");
|
||||
}
|
||||
|
||||
|
||||
Path RemoteStore::addToStore(const string & name, const Path & _srcPath,
|
||||
bool recursive, HashType hashAlgo, PathFilter & filter, bool repair)
|
||||
{
|
||||
|
@ -373,25 +379,6 @@ Path RemoteStore::addTextToStore(const string & name, const string & s,
|
|||
}
|
||||
|
||||
|
||||
void RemoteStore::exportPath(const Path & path, Sink & sink)
|
||||
{
|
||||
auto conn(connections->get());
|
||||
conn->to << wopExportPath << path << 0;
|
||||
conn->processStderr(&sink); /* sink receives the actual data */
|
||||
readInt(conn->from);
|
||||
}
|
||||
|
||||
|
||||
Paths RemoteStore::importPaths(Source & source,
|
||||
std::shared_ptr<FSAccessor> accessor)
|
||||
{
|
||||
auto conn(connections->get());
|
||||
conn->to << wopImportPaths;
|
||||
conn->processStderr(0, &source);
|
||||
return readStorePaths<Paths>(conn->from);
|
||||
}
|
||||
|
||||
|
||||
void RemoteStore::buildPaths(const PathSet & drvPaths, BuildMode buildMode)
|
||||
{
|
||||
auto conn(connections->get());
|
||||
|
|
|
@ -51,6 +51,9 @@ public:
|
|||
void querySubstitutablePathInfos(const PathSet & paths,
|
||||
SubstitutablePathInfos & infos) override;
|
||||
|
||||
void addToStore(const ValidPathInfo & info, const std::string & nar,
|
||||
bool repair) override;
|
||||
|
||||
Path addToStore(const string & name, const Path & srcPath,
|
||||
bool recursive = true, HashType hashAlgo = htSHA256,
|
||||
PathFilter & filter = defaultPathFilter, bool repair = false) override;
|
||||
|
@ -58,11 +61,6 @@ public:
|
|||
Path addTextToStore(const string & name, const string & s,
|
||||
const PathSet & references, bool repair = false) override;
|
||||
|
||||
void exportPath(const Path & path, Sink & sink) override;
|
||||
|
||||
Paths importPaths(Source & source,
|
||||
std::shared_ptr<FSAccessor> accessor) override;
|
||||
|
||||
void buildPaths(const PathSet & paths, BuildMode buildMode) override;
|
||||
|
||||
BuildResult buildDerivation(const Path & drvPath, const BasicDerivation & drv,
|
||||
|
|
|
@ -363,10 +363,9 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
|
|||
auto info = srcStore->queryPathInfo(storePath);
|
||||
|
||||
StringSink sink;
|
||||
srcStore->exportPaths({storePath}, sink);
|
||||
srcStore->narFromPath({storePath}, sink);
|
||||
|
||||
StringSource source(*sink.s);
|
||||
dstStore->importPaths(source, 0);
|
||||
dstStore->addToStore(*info, *sink.s);
|
||||
}
|
||||
|
||||
|
||||
|
@ -406,16 +405,6 @@ string showPaths(const PathSet & paths)
|
|||
}
|
||||
|
||||
|
||||
void Store::exportPaths(const Paths & paths, Sink & sink)
|
||||
{
|
||||
for (auto & i : paths) {
|
||||
sink << 1;
|
||||
exportPath(i, sink);
|
||||
}
|
||||
sink << 0;
|
||||
}
|
||||
|
||||
|
||||
std::string ValidPathInfo::fingerprint() const
|
||||
{
|
||||
if (narSize == 0 || !narHash)
|
||||
|
|
|
@ -19,7 +19,7 @@ namespace nix {
|
|||
/* Size of the hash part of store paths, in base-32 characters. */
|
||||
const size_t storePathHashLen = 32; // i.e. 160 bits
|
||||
|
||||
/* Magic header of exportPath() output. */
|
||||
/* Magic header of exportPath() output (obsolete). */
|
||||
const uint32_t exportMagic = 0x4558494e;
|
||||
|
||||
|
||||
|
@ -253,6 +253,10 @@ public:
|
|||
virtual void querySubstitutablePathInfos(const PathSet & paths,
|
||||
SubstitutablePathInfos & infos) = 0;
|
||||
|
||||
/* Import a path into the store. */
|
||||
virtual void addToStore(const ValidPathInfo & info, const std::string & nar,
|
||||
bool repair = false) = 0;
|
||||
|
||||
/* Copy the contents of a path to the store and register the
|
||||
validity the resulting path. The resulting path is returned.
|
||||
The function object `filter' can be used to exclude files (see
|
||||
|
@ -269,21 +273,6 @@ public:
|
|||
/* Write a NAR dump of a store path. */
|
||||
virtual void narFromPath(const Path & path, Sink & sink) = 0;
|
||||
|
||||
/* Export a store path, that is, create a NAR dump of the store
|
||||
path and append its references and its deriver. */
|
||||
virtual void exportPath(const Path & path, Sink & sink) = 0;
|
||||
|
||||
/* Export multiple paths in the format expected by ‘nix-store
|
||||
--import’. */
|
||||
void exportPaths(const Paths & paths, Sink & sink);
|
||||
|
||||
/* Import a sequence of NAR dumps created by exportPaths() into
|
||||
the Nix store. Optionally, the contents of the NARs are
|
||||
preloaded into the specified FS accessor to speed up subsequent
|
||||
access. */
|
||||
virtual Paths importPaths(Source & source,
|
||||
std::shared_ptr<FSAccessor> accessor) = 0;
|
||||
|
||||
/* For each path, if it's a derivation, build it. Building a
|
||||
derivation means ensuring that the output paths are valid. If
|
||||
they are already valid, this is a no-op. Otherwise, validity
|
||||
|
@ -397,6 +386,19 @@ public:
|
|||
relation. If p refers to q, then p preceeds q in this list. */
|
||||
Paths topoSortPaths(const PathSet & paths);
|
||||
|
||||
/* Export multiple paths in the format expected by ‘nix-store
|
||||
--import’. */
|
||||
void exportPaths(const Paths & paths, Sink & sink);
|
||||
|
||||
void exportPath(const Path & path, Sink & sink);
|
||||
|
||||
/* Import a sequence of NAR dumps created by exportPaths() into
|
||||
the Nix store. Optionally, the contents of the NARs are
|
||||
preloaded into the specified FS accessor to speed up subsequent
|
||||
access. */
|
||||
Paths importPaths(Source & source,
|
||||
std::shared_ptr<FSAccessor> accessor);
|
||||
|
||||
struct Stats
|
||||
{
|
||||
std::atomic<uint64_t> narInfoRead{0};
|
||||
|
|
|
@ -25,7 +25,7 @@ typedef enum {
|
|||
wopAddIndirectRoot = 12,
|
||||
wopSyncWithGC = 13,
|
||||
wopFindRoots = 14,
|
||||
wopExportPath = 16,
|
||||
wopExportPath = 16, // obsolete
|
||||
wopQueryDeriver = 18, // obsolete
|
||||
wopSetOptions = 19,
|
||||
wopCollectGarbage = 20,
|
||||
|
@ -35,7 +35,7 @@ typedef enum {
|
|||
wopQueryFailedPaths = 24,
|
||||
wopClearFailedPaths = 25,
|
||||
wopQueryPathInfo = 26,
|
||||
wopImportPaths = 27,
|
||||
wopImportPaths = 27, // obsolete
|
||||
wopQueryDerivationOutputNames = 28,
|
||||
wopQueryPathFromHashPart = 29,
|
||||
wopQuerySubstitutablePathInfos = 30,
|
||||
|
|
|
@ -173,26 +173,26 @@ static ref<std::string> decompressBzip2(const std::string & in)
|
|||
}
|
||||
}
|
||||
|
||||
ref<std::string> compress(const std::string & method, ref<std::string> in)
|
||||
ref<std::string> compress(const std::string & method, const std::string & in)
|
||||
{
|
||||
if (method == "none")
|
||||
return in;
|
||||
return make_ref<std::string>(in);
|
||||
else if (method == "xz")
|
||||
return compressXZ(*in);
|
||||
return compressXZ(in);
|
||||
else if (method == "bzip2")
|
||||
return compressBzip2(*in);
|
||||
return compressBzip2(in);
|
||||
else
|
||||
throw UnknownCompressionMethod(format("unknown compression method ‘%s’") % method);
|
||||
}
|
||||
|
||||
ref<std::string> decompress(const std::string & method, ref<std::string> in)
|
||||
ref<std::string> decompress(const std::string & method, const std::string & in)
|
||||
{
|
||||
if (method == "none")
|
||||
return in;
|
||||
return make_ref<std::string>(in);
|
||||
else if (method == "xz")
|
||||
return decompressXZ(*in);
|
||||
return decompressXZ(in);
|
||||
else if (method == "bzip2")
|
||||
return decompressBzip2(*in);
|
||||
return decompressBzip2(in);
|
||||
else
|
||||
throw UnknownCompressionMethod(format("unknown compression method ‘%s’") % method);
|
||||
}
|
||||
|
|
|
@ -7,9 +7,9 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
ref<std::string> compress(const std::string & method, ref<std::string> in);
|
||||
ref<std::string> compress(const std::string & method, const std::string & in);
|
||||
|
||||
ref<std::string> decompress(const std::string & method, ref<std::string> in);
|
||||
ref<std::string> decompress(const std::string & method, const std::string & in);
|
||||
|
||||
MakeError(UnknownCompressionMethod, Error);
|
||||
|
||||
|
|
|
@ -503,7 +503,7 @@ static void opReadLog(Strings opFlags, Strings opArgs)
|
|||
}
|
||||
|
||||
else if (pathExists(logBz2Path)) {
|
||||
std::cout << *decompress("bzip2", make_ref<std::string>(readFile(logBz2Path)));
|
||||
std::cout << *decompress("bzip2", readFile(logBz2Path));
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
|
@ -703,9 +703,7 @@ static void opExport(Strings opFlags, Strings opArgs)
|
|||
throw UsageError(format("unknown flag ‘%1%’") % i);
|
||||
|
||||
FdSink sink(STDOUT_FILENO);
|
||||
Paths sorted = store->topoSortPaths(PathSet(opArgs.begin(), opArgs.end()));
|
||||
reverse(sorted.begin(), sorted.end());
|
||||
store->exportPaths(sorted, sink);
|
||||
store->exportPaths(opArgs, sink);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -108,7 +108,7 @@ void StorePathsCommand::run(ref<Store> store)
|
|||
PathSet closure;
|
||||
for (auto & storePath : storePaths)
|
||||
store->computeFSClosure(storePath, closure, false, false);
|
||||
storePaths = store->topoSortPaths(closure);
|
||||
storePaths = Paths(closure.begin(), closure.end());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue