* Refactoring on the file names.
This commit is contained in:
parent
7952a8053c
commit
224c585aba
7 changed files with 17 additions and 60 deletions
|
@ -20,7 +20,7 @@ test_LDADD = libnix.a -ldb_cxx-4 -lATerm
|
|||
noinst_LIBRARIES = libnix.a
|
||||
|
||||
libnix_a_SOURCES = util.cc hash.cc archive.cc md5.c \
|
||||
eval.cc values.cc globals.cc db.cc
|
||||
fstate.cc store.cc globals.cc db.cc
|
||||
|
||||
install-data-local:
|
||||
$(INSTALL) -d $(localstatedir)/nix
|
||||
|
|
|
@ -2,8 +2,8 @@
|
|||
#include <iostream>
|
||||
|
||||
#include "globals.hh"
|
||||
#include "eval.hh"
|
||||
#include "values.hh"
|
||||
#include "fstate.hh"
|
||||
#include "store.hh"
|
||||
#include "shared.hh"
|
||||
|
||||
|
||||
|
|
|
@ -7,9 +7,9 @@
|
|||
#include <unistd.h>
|
||||
#include <fcntl.h>
|
||||
|
||||
#include "eval.hh"
|
||||
#include "fstate.hh"
|
||||
#include "globals.hh"
|
||||
#include "values.hh"
|
||||
#include "store.hh"
|
||||
#include "db.hh"
|
||||
|
||||
|
|
@ -1,8 +1,8 @@
|
|||
#include <iostream>
|
||||
|
||||
#include "globals.hh"
|
||||
#include "values.hh"
|
||||
#include "eval.hh"
|
||||
#include "store.hh"
|
||||
#include "fstate.hh"
|
||||
#include "archive.hh"
|
||||
#include "shared.hh"
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
#include <sys/types.h>
|
||||
#include <sys/wait.h>
|
||||
|
||||
#include "values.hh"
|
||||
#include "store.hh"
|
||||
#include "globals.hh"
|
||||
#include "db.hh"
|
||||
#include "archive.hh"
|
||||
|
@ -105,28 +105,6 @@ void addToStore(string srcPath, string & dstPath, Hash & hash)
|
|||
}
|
||||
|
||||
|
||||
#if 0
|
||||
/* Download object referenced by the given URL into the sources
|
||||
directory. Return the file name it was downloaded to. */
|
||||
string fetchURL(string url)
|
||||
{
|
||||
string filename = baseNameOf(url);
|
||||
string fullname = nixSourcesDir + "/" + filename;
|
||||
struct stat st;
|
||||
if (stat(fullname.c_str(), &st)) {
|
||||
cerr << "fetching " << url << endl;
|
||||
/* !!! quoting */
|
||||
string shellCmd =
|
||||
"cd " + nixSourcesDir + " && wget --quiet -N \"" + url + "\"";
|
||||
int res = system(shellCmd.c_str());
|
||||
if (WEXITSTATUS(res) != 0)
|
||||
throw Error("cannot fetch " + url);
|
||||
}
|
||||
return fullname;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
void deleteFromStore(const string & path)
|
||||
{
|
||||
string prefix = nixStore + "/";
|
||||
|
@ -139,10 +117,6 @@ void deleteFromStore(const string & path)
|
|||
|
||||
string queryFromStore(Hash hash)
|
||||
{
|
||||
bool checkedNet = false;
|
||||
|
||||
while (1) {
|
||||
|
||||
string fn, url;
|
||||
|
||||
if (queryDB(nixDB, dbRefs, hash, fn)) {
|
||||
|
@ -154,22 +128,5 @@ string queryFromStore(Hash hash)
|
|||
return fn;
|
||||
}
|
||||
|
||||
throw Error("a file with hash " + (string) hash + " is required, "
|
||||
"but it is not known to exist locally or on the network");
|
||||
#if 0
|
||||
if (checkedNet)
|
||||
throw Error("consistency problem: file fetched from " + url +
|
||||
" should have hash " + (string) hash + ", but it doesn't");
|
||||
|
||||
if (!queryDB(nixDB, dbNetSources, hash, url))
|
||||
throw Error("a file with hash " + (string) hash + " is required, "
|
||||
"but it is not known to exist locally or on the network");
|
||||
|
||||
checkedNet = true;
|
||||
|
||||
fn = fetchURL(url);
|
||||
|
||||
setDB(nixDB, dbRefs, hash, fn);
|
||||
#endif
|
||||
}
|
||||
throw Error(format("don't know a path with hash `%1%'") % (string) hash);
|
||||
}
|
Loading…
Reference in a new issue