Implement a TTL on cached fetchurl/fetchTarball results
This is because we don't want to do HTTP requests on every evaluation, even though we can prevent a full redownload via the cached ETag. The default is one hour.
This commit is contained in:
parent
60340ce3e2
commit
c1f04fae35
3 changed files with 34 additions and 13 deletions
|
@ -1631,14 +1631,22 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||||
|
|
||||||
string expectedETag;
|
string expectedETag;
|
||||||
|
|
||||||
|
int ttl = settings.get("tarball-ttl", 60 * 60);
|
||||||
|
bool skip = false;
|
||||||
|
|
||||||
if (pathExists(fileLink) && pathExists(dataFile)) {
|
if (pathExists(fileLink) && pathExists(dataFile)) {
|
||||||
storePath = readLink(fileLink);
|
storePath = readLink(fileLink);
|
||||||
store->addTempRoot(storePath);
|
store->addTempRoot(storePath);
|
||||||
if (store->isValidPath(storePath)) {
|
if (store->isValidPath(storePath)) {
|
||||||
auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n");
|
auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n");
|
||||||
if (ss.size() >= 2 && ss[0] == url) {
|
if (ss.size() >= 3 && ss[0] == url) {
|
||||||
printMsg(lvlDebug, format("verifying previous ETag ‘%1%’") % ss[1]);
|
time_t lastChecked;
|
||||||
expectedETag = ss[1];
|
if (string2Int(ss[2], lastChecked) && lastChecked + ttl >= time(0))
|
||||||
|
skip = true;
|
||||||
|
else if (!ss[1].empty()) {
|
||||||
|
printMsg(lvlDebug, format("verifying previous ETag ‘%1%’") % ss[1]);
|
||||||
|
expectedETag = ss[1];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else
|
} else
|
||||||
storePath = "";
|
storePath = "";
|
||||||
|
@ -1648,19 +1656,22 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
|
||||||
auto p = url.rfind('/');
|
auto p = url.rfind('/');
|
||||||
if (p != string::npos) name = string(url, p + 1);
|
if (p != string::npos) name = string(url, p + 1);
|
||||||
|
|
||||||
if (expectedETag.empty())
|
if (!skip) {
|
||||||
printMsg(lvlInfo, format("downloading ‘%1%’...") % url);
|
|
||||||
else
|
|
||||||
printMsg(lvlInfo, format("checking ‘%1%’...") % url);
|
|
||||||
Curl curl;
|
|
||||||
|
|
||||||
if (curl.fetch(url, expectedETag))
|
if (expectedETag.empty())
|
||||||
storePath = store->addTextToStore(name, curl.data, PathSet(), state.repair);
|
printMsg(lvlInfo, format("downloading ‘%1%’...") % url);
|
||||||
|
else
|
||||||
|
printMsg(lvlInfo, format("checking ‘%1%’...") % url);
|
||||||
|
Curl curl;
|
||||||
|
|
||||||
assert(!storePath.empty());
|
if (curl.fetch(url, expectedETag))
|
||||||
replaceSymlink(storePath, fileLink);
|
storePath = store->addTextToStore(name, curl.data, PathSet(), state.repair);
|
||||||
|
|
||||||
writeFile(dataFile, url + "\n" + curl.etag + "\n");
|
assert(!storePath.empty());
|
||||||
|
replaceSymlink(storePath, fileLink);
|
||||||
|
|
||||||
|
writeFile(dataFile, url + "\n" + curl.etag + "\n" + int2String(time(0)) + "\n");
|
||||||
|
}
|
||||||
|
|
||||||
if (unpack) {
|
if (unpack) {
|
||||||
Path unpackedLink = cacheDir + "/" + baseNameOf(storePath) + "-unpacked";
|
Path unpackedLink = cacheDir + "/" + baseNameOf(storePath) + "-unpacked";
|
||||||
|
|
|
@ -143,6 +143,14 @@ bool Settings::get(const string & name, bool def)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
int Settings::get(const string & name, int def)
|
||||||
|
{
|
||||||
|
int res = def;
|
||||||
|
_get(res, name);
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void Settings::update()
|
void Settings::update()
|
||||||
{
|
{
|
||||||
_get(tryFallback, "build-fallback");
|
_get(tryFallback, "build-fallback");
|
||||||
|
|
|
@ -27,6 +27,8 @@ struct Settings {
|
||||||
|
|
||||||
bool get(const string & name, bool def);
|
bool get(const string & name, bool def);
|
||||||
|
|
||||||
|
int get(const string & name, int def);
|
||||||
|
|
||||||
void update();
|
void update();
|
||||||
|
|
||||||
string pack();
|
string pack();
|
||||||
|
|
Loading…
Reference in a new issue