nix-prefetch-url: Rewrite in C++
This commit is contained in:
parent
bdc4a0b54d
commit
bec3c31608
6 changed files with 141 additions and 133 deletions
1
Makefile
1
Makefile
|
@ -13,6 +13,7 @@ makefiles = \
|
||||||
src/nix-collect-garbage/local.mk \
|
src/nix-collect-garbage/local.mk \
|
||||||
src/download-via-ssh/local.mk \
|
src/download-via-ssh/local.mk \
|
||||||
src/nix-log2xml/local.mk \
|
src/nix-log2xml/local.mk \
|
||||||
|
src/nix-prefetch-url/local.mk \
|
||||||
src/bsdiff-4.3/local.mk \
|
src/bsdiff-4.3/local.mk \
|
||||||
perl/local.mk \
|
perl/local.mk \
|
||||||
scripts/local.mk \
|
scripts/local.mk \
|
||||||
|
|
|
@ -4,7 +4,6 @@ nix_bin_scripts := \
|
||||||
$(d)/nix-copy-closure \
|
$(d)/nix-copy-closure \
|
||||||
$(d)/nix-generate-patches \
|
$(d)/nix-generate-patches \
|
||||||
$(d)/nix-install-package \
|
$(d)/nix-install-package \
|
||||||
$(d)/nix-prefetch-url \
|
|
||||||
$(d)/nix-pull \
|
$(d)/nix-pull \
|
||||||
$(d)/nix-push
|
$(d)/nix-push
|
||||||
|
|
||||||
|
|
|
@ -1,132 +0,0 @@
|
||||||
#! @perl@ -w @perlFlags@
|
|
||||||
|
|
||||||
use utf8;
|
|
||||||
use strict;
|
|
||||||
use File::Basename;
|
|
||||||
use File::stat;
|
|
||||||
use Nix::Store;
|
|
||||||
use Nix::Config;
|
|
||||||
use Nix::Utils;
|
|
||||||
|
|
||||||
binmode STDERR, ":encoding(utf8)";
|
|
||||||
|
|
||||||
|
|
||||||
my $hashType = $ENV{'NIX_HASH_ALGO'} || "sha256"; # obsolete
|
|
||||||
my $cacheDir = $ENV{'NIX_DOWNLOAD_CACHE'};
|
|
||||||
|
|
||||||
my @args;
|
|
||||||
my $arg;
|
|
||||||
while ($arg = shift) {
|
|
||||||
if ($arg eq "--help") {
|
|
||||||
exec "man nix-prefetch-url" or die;
|
|
||||||
} elsif ($arg eq "--type") {
|
|
||||||
$hashType = shift;
|
|
||||||
die "$0: ‘$arg’ requires an argument\n" unless defined $hashType;
|
|
||||||
} elsif (substr($arg, 0, 1) eq "-") {
|
|
||||||
die "$0: unknown flag ‘$arg’\n";
|
|
||||||
} else {
|
|
||||||
push @args, $arg;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
my $url = $args[0];
|
|
||||||
my $expHash = $args[1];
|
|
||||||
|
|
||||||
|
|
||||||
if (!defined $url || $url eq "") {
|
|
||||||
print STDERR <<EOF
|
|
||||||
Usage: nix-prefetch-url URL [EXPECTED-HASH]
|
|
||||||
EOF
|
|
||||||
;
|
|
||||||
exit 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
my $tmpDir = mkTempDir("nix-prefetch-url");
|
|
||||||
|
|
||||||
# Hack to support the mirror:// scheme from Nixpkgs.
|
|
||||||
if ($url =~ /^mirror:\/\//) {
|
|
||||||
system("$Nix::Config::binDir/nix-build '<nixpkgs>' -A resolveMirrorURLs --argstr url '$url' -o $tmpDir/urls > /dev/null") == 0
|
|
||||||
or die "$0: nix-build failed; maybe \$NIX_PATH is not set properly\n";
|
|
||||||
my @expanded = split ' ', readFile("$tmpDir/urls");
|
|
||||||
die "$0: cannot resolve ‘$url’" unless scalar @expanded > 0;
|
|
||||||
print STDERR "$url expands to $expanded[0]\n";
|
|
||||||
$url = $expanded[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
# Handle escaped characters in the URI. `+', `=' and `?' are the only
|
|
||||||
# characters that are valid in Nix store path names but have a special
|
|
||||||
# meaning in URIs.
|
|
||||||
my $name = basename $url;
|
|
||||||
die "cannot figure out file name for ‘$url’\n" if $name eq "";
|
|
||||||
$name =~ s/%2b/+/g;
|
|
||||||
$name =~ s/%3d/=/g;
|
|
||||||
$name =~ s/%3f/?/g;
|
|
||||||
|
|
||||||
my $finalPath;
|
|
||||||
my $hash;
|
|
||||||
|
|
||||||
# If the hash was given, a file with that hash may already be in the
|
|
||||||
# store.
|
|
||||||
if (defined $expHash) {
|
|
||||||
$finalPath = makeFixedOutputPath(0, $hashType, $expHash, $name);
|
|
||||||
if (isValidPath($finalPath)) { $hash = $expHash; } else { $finalPath = undef; }
|
|
||||||
}
|
|
||||||
|
|
||||||
# If we don't know the hash or a file with that hash doesn't exist,
|
|
||||||
# download the file and add it to the store.
|
|
||||||
if (!defined $finalPath) {
|
|
||||||
|
|
||||||
my $tmpFile = "$tmpDir/$name";
|
|
||||||
|
|
||||||
# Optionally do timestamp-based caching of the download.
|
|
||||||
# Actually, the only thing that we cache in $NIX_DOWNLOAD_CACHE is
|
|
||||||
# the hash and the timestamp of the file at $url. The caching of
|
|
||||||
# the file *contents* is done in Nix store, where it can be
|
|
||||||
# garbage-collected independently.
|
|
||||||
my ($cachedTimestampFN, $cachedHashFN, @cacheFlags);
|
|
||||||
if (defined $cacheDir) {
|
|
||||||
my $urlHash = hashString("sha256", 1, $url);
|
|
||||||
writeFile "$cacheDir/$urlHash.url", $url;
|
|
||||||
$cachedHashFN = "$cacheDir/$urlHash.$hashType";
|
|
||||||
$cachedTimestampFN = "$cacheDir/$urlHash.stamp";
|
|
||||||
@cacheFlags = ("--time-cond", $cachedTimestampFN) if -f $cachedHashFN && -f $cachedTimestampFN;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Perform the download.
|
|
||||||
my @curlFlags = ("curl", $url, "-o", $tmpFile, "--fail", "--location", "--max-redirs", "20", "--disable-epsv", "--cookie-jar", "$tmpDir/cookies", "--remote-time", (split " ", ($ENV{NIX_CURL_FLAGS} || "")));
|
|
||||||
(system $Nix::Config::curl @curlFlags, @cacheFlags) == 0 or die "$0: download of ‘$url’ failed\n";
|
|
||||||
|
|
||||||
if (defined $cacheDir && ! -e $tmpFile) {
|
|
||||||
# Curl didn't create $tmpFile, so apparently there's no newer
|
|
||||||
# file on the server.
|
|
||||||
$hash = readFile $cachedHashFN or die;
|
|
||||||
$finalPath = makeFixedOutputPath(0, $hashType, $hash, $name);
|
|
||||||
unless (isValidPath $finalPath) {
|
|
||||||
print STDERR "cached contents of ‘$url’ disappeared, redownloading...\n";
|
|
||||||
$finalPath = undef;
|
|
||||||
(system $Nix::Config::curl @curlFlags) == 0 or die "$0: download of ‘$url’ failed\n";
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!defined $finalPath) {
|
|
||||||
|
|
||||||
# Compute the hash.
|
|
||||||
$hash = hashFile($hashType, $hashType ne "md5", $tmpFile);
|
|
||||||
|
|
||||||
if (defined $cacheDir) {
|
|
||||||
writeFile $cachedHashFN, $hash;
|
|
||||||
my $st = stat($tmpFile) or die;
|
|
||||||
open STAMP, ">$cachedTimestampFN" or die; close STAMP;
|
|
||||||
utime($st->atime, $st->mtime, $cachedTimestampFN) or die;
|
|
||||||
}
|
|
||||||
|
|
||||||
# Add the downloaded file to the Nix store.
|
|
||||||
$finalPath = addToStore($tmpFile, 0, $hashType);
|
|
||||||
}
|
|
||||||
|
|
||||||
die "$0: hash mismatch for ‘$url’\n" if defined $expHash && $expHash ne $hash;
|
|
||||||
}
|
|
||||||
|
|
||||||
print STDERR "path is ‘$finalPath’\n" unless $ENV{'QUIET'};
|
|
||||||
print "$hash\n";
|
|
||||||
print "$finalPath\n" if $ENV{'PRINT_PATH'};
|
|
|
@ -202,6 +202,7 @@ public:
|
||||||
AutoDelete(const Path & p, bool recursive = true);
|
AutoDelete(const Path & p, bool recursive = true);
|
||||||
~AutoDelete();
|
~AutoDelete();
|
||||||
void cancel();
|
void cancel();
|
||||||
|
operator Path() const { return path; }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
7
src/nix-prefetch-url/local.mk
Normal file
7
src/nix-prefetch-url/local.mk
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
programs += nix-prefetch-url
|
||||||
|
|
||||||
|
nix-prefetch-url_DIR := $(d)
|
||||||
|
|
||||||
|
nix-prefetch-url_SOURCES := $(d)/nix-prefetch-url.cc
|
||||||
|
|
||||||
|
nix-prefetch-url_LIBS = libmain libexpr libstore libutil libformat
|
132
src/nix-prefetch-url/nix-prefetch-url.cc
Normal file
132
src/nix-prefetch-url/nix-prefetch-url.cc
Normal file
|
@ -0,0 +1,132 @@
|
||||||
|
#include "hash.hh"
|
||||||
|
#include "shared.hh"
|
||||||
|
#include "download.hh"
|
||||||
|
#include "store-api.hh"
|
||||||
|
#include "eval.hh"
|
||||||
|
#include "eval-inline.hh"
|
||||||
|
#include "common-opts.hh"
|
||||||
|
|
||||||
|
#include <iostream>
|
||||||
|
|
||||||
|
using namespace nix;
|
||||||
|
|
||||||
|
|
||||||
|
/* If ‘uri’ starts with ‘mirror://’, then resolve it using the list of
|
||||||
|
mirrors defined in Nixpkgs. */
|
||||||
|
string resolveMirrorUri(EvalState & state, string uri)
|
||||||
|
{
|
||||||
|
if (string(uri, 0, 9) != "mirror://") return uri;
|
||||||
|
|
||||||
|
string s(uri, 9);
|
||||||
|
auto p = s.find('/');
|
||||||
|
if (p == string::npos) throw Error("invalid mirror URI");
|
||||||
|
string mirrorName(s, 0, p);
|
||||||
|
|
||||||
|
Value vMirrors;
|
||||||
|
state.eval(state.parseExprFromString("import <nixpkgs/pkgs/build-support/fetchurl/mirrors.nix>", "."), vMirrors);
|
||||||
|
state.forceAttrs(vMirrors);
|
||||||
|
|
||||||
|
auto mirrorList = vMirrors.attrs->find(state.symbols.create(mirrorName));
|
||||||
|
if (mirrorList == vMirrors.attrs->end())
|
||||||
|
throw Error(format("unknown mirror name ‘%1%’") % mirrorName);
|
||||||
|
state.forceList(*mirrorList->value);
|
||||||
|
|
||||||
|
if (mirrorList->value->listSize() < 1)
|
||||||
|
throw Error(format("mirror URI ‘%1%’ did not expand to anything") % uri);
|
||||||
|
|
||||||
|
string mirror = state.forceString(*mirrorList->value->listElems()[0]);
|
||||||
|
return mirror + (hasSuffix(mirror, "/") ? "" : "/") + string(s, p + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
int main(int argc, char * * argv)
|
||||||
|
{
|
||||||
|
return handleExceptions(argv[0], [&]() {
|
||||||
|
initNix();
|
||||||
|
initGC();
|
||||||
|
|
||||||
|
HashType ht = htSHA256;
|
||||||
|
std::vector<string> args;
|
||||||
|
Strings searchPath;
|
||||||
|
|
||||||
|
parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) {
|
||||||
|
if (*arg == "--help")
|
||||||
|
showManPage("nix-prefetch-url");
|
||||||
|
else if (*arg == "--version")
|
||||||
|
printVersion("nix-prefetch-url");
|
||||||
|
else if (*arg == "--type") {
|
||||||
|
string s = getArg(*arg, arg, end);
|
||||||
|
ht = parseHashType(s);
|
||||||
|
if (ht == htUnknown)
|
||||||
|
throw UsageError(format("unknown hash type ‘%1%’") % s);
|
||||||
|
}
|
||||||
|
else if (parseSearchPathArg(arg, end, searchPath))
|
||||||
|
;
|
||||||
|
else if (*arg != "" && arg->at(0) == '-')
|
||||||
|
return false;
|
||||||
|
else
|
||||||
|
args.push_back(*arg);
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (args.size() < 1 || args.size() > 2)
|
||||||
|
throw UsageError("nix-prefetch-url expects one argument");
|
||||||
|
|
||||||
|
store = openStore();
|
||||||
|
|
||||||
|
EvalState state(searchPath);
|
||||||
|
|
||||||
|
/* Figure out a name in the Nix store. */
|
||||||
|
auto uri = args[0];
|
||||||
|
auto name = baseNameOf(uri);
|
||||||
|
if (name.empty())
|
||||||
|
throw Error(format("cannot figure out file name for ‘%1%’") % uri);
|
||||||
|
|
||||||
|
/* If an expected hash is given, the file may already exist in
|
||||||
|
the store. */
|
||||||
|
Hash hash, expectedHash(ht);
|
||||||
|
Path storePath;
|
||||||
|
if (args.size() == 2) {
|
||||||
|
expectedHash = parseHash16or32(ht, args[1]);
|
||||||
|
storePath = makeFixedOutputPath(false, ht, expectedHash, name);
|
||||||
|
if (store->isValidPath(storePath))
|
||||||
|
hash = expectedHash;
|
||||||
|
else
|
||||||
|
storePath.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (storePath.empty()) {
|
||||||
|
|
||||||
|
auto actualUri = resolveMirrorUri(state, uri);
|
||||||
|
|
||||||
|
if (uri != actualUri)
|
||||||
|
printMsg(lvlInfo, format("‘%1%’ expands to ‘%2%’") % uri % actualUri);
|
||||||
|
|
||||||
|
/* Download the file. */
|
||||||
|
auto result = downloadFile(actualUri);
|
||||||
|
|
||||||
|
/* Copy the file to the Nix store. FIXME: if RemoteStore
|
||||||
|
implemented addToStoreFromDump() and downloadFile()
|
||||||
|
supported a sink, we could stream the download directly
|
||||||
|
into the Nix store. */
|
||||||
|
AutoDelete tmpDir(createTempDir(), true);
|
||||||
|
Path tmpFile = (Path) tmpDir + "/tmp";
|
||||||
|
writeFile(tmpFile, result.data);
|
||||||
|
|
||||||
|
/* FIXME: inefficient; addToStore() will also hash
|
||||||
|
this. */
|
||||||
|
hash = hashString(ht, result.data);
|
||||||
|
|
||||||
|
if (expectedHash != Hash(ht) && expectedHash != hash)
|
||||||
|
throw Error(format("hash mismatch for ‘%1%’") % uri);
|
||||||
|
|
||||||
|
storePath = store->addToStore(name, tmpFile, false, ht);
|
||||||
|
}
|
||||||
|
|
||||||
|
printMsg(lvlInfo, format("path is ‘%1%’") % storePath);
|
||||||
|
|
||||||
|
std::cout << printHash16or32(hash) << std::endl;
|
||||||
|
if (getEnv("PRINT_PATH") != "")
|
||||||
|
std::cout << storePath << std::endl;
|
||||||
|
});
|
||||||
|
}
|
Loading…
Reference in a new issue