* Patch deployment. `download.pl' (intended to be used in the

substitute mechanism) creates a store path by downloading full NAR
  archives and/or patches specified in the available manifests.

  Any combination of present paths, full downloads, and patches can be
  used to construct the target path.  In particular, patches can be
  chained in sequence; and full NAR archives of the target path can be
  omitted (i.e., patch-only deployment is possible).  A shortest path
  algorithm is used to find the smallest set of files to be downloaded
  (the edge weights are currently file sizes, but one can imagine
  taking the network speed to the various source into account).

  Patches are binary deltas between two store paths.  To be precise,
  they are the output of the `bsdiff' program applied to the NAR
  archives obtained by dumping (`nix-store --dump') the two store
  paths.  The advantage of diff'ing NAR archives (and not, say, doing
  file-by-file diffs) is that file renames/moves are handled
  automatically.  The disadvantage is that we cannot optimise creation
  of unchanged files (by hard-linking).
This commit is contained in:
Eelco Dolstra 2004-12-13 13:47:38 +00:00
parent dca48aed34
commit 862f4c154e
3 changed files with 323 additions and 34 deletions

View file

@ -19,14 +19,24 @@ my $confFile = "@sysconfdir@/nix/prebuilts.conf";
my %storePaths2urls;
my %urls2hashes;
my %successors;
sub doURL {
sub processURL {
my $url = shift;
processURL $manifest, $url, \%storePaths2urls, \%urls2hashes, \%successors;
$url =~ s/\/$//;
print "obtaining list of Nix archives at $url...\n";
system("@curl@ --fail --silent --show-error --location --max-redirs 20 " .
"'$url' > '$manifest'") == 0
or die "curl failed: $?";
readManifest $manifest, \%storePaths2urls, \%urls2hashes, \%successors;
}
if (scalar @ARGV > 0) {
while (@ARGV) {
my $url = shift @ARGV;
doURL $url;
processURL $url;
}
} else {
open CONFFILE, "<$confFile";
@ -34,7 +44,7 @@ if (scalar @ARGV > 0) {
chomp;
if (/^\s*(\S+)\s*(\#.*)?$/) {
my $url = $1;
doURL $url;
processURL $url;
}
}
close CONFFILE;

View file

@ -1,27 +1,24 @@
use strict;
sub processURL {
sub readManifest {
my $manifest = shift;
my $url = shift;
my $storePaths2urls = shift;
my $urls2hashes = shift;
my $narFiles = shift;
my $patches = shift;
my $successors = shift;
$url =~ s/\/$//;
print "obtaining list of Nix archives at $url...\n";
system("@curl@ --fail --silent --show-error --location --max-redirs 20 " .
"'$url' > '$manifest'") == 0
or die "curl failed: $?";
open MANIFEST, "<$manifest";
my $inside = 0;
my $type;
my $storePath;
my $narurl;
my $url;
my $hash;
my $size;
my @preds;
my $basePath;
my $baseHash;
my $patchType;
while (<MANIFEST>) {
chomp;
@ -29,38 +26,102 @@ sub processURL {
next if (/^$/);
if (!$inside) {
if (/^\{$/) {
if (/^\{$/) {
$type = "narfile";
$inside = 1;
undef $storePath;
undef $narurl;
undef $url;
undef $hash;
$size = 999999999;
@preds = ();
}
elsif (/^patch \{$/) {
$type = "patch";
$inside = 1;
undef $url;
undef $hash;
undef $size;
undef $basePath;
undef $baseHash;
undef $patchType;
}
else { die "bad line: $_"; }
} else {
if (/^\}$/) {
$inside = 0;
$$storePaths2urls{$storePath} = $narurl;
$$urls2hashes{$narurl} = $hash;
if ($type eq "narfile") {
foreach my $p (@preds) {
$$successors{$p} = $storePath;
$$narFiles{$storePath} = []
unless defined $$narFiles{$storePath};
my $narFileList = $$narFiles{$storePath};
my $found = 0;
foreach my $narFile (@{$narFileList}) {
if ($narFile->{url} eq $url) {
if ($narFile->{hash} eq $hash) {
$found = 1;
} else {
die "conflicting hashes for URL $url, " .
"namely $narFile->{hash} and $hash";
}
}
}
if (!$found) {
push @{$narFileList},
{url => $url, hash => $hash, size => $size};
}
foreach my $p (@preds) {
$$successors{$p} = $storePath;
}
}
elsif ($type eq "patch") {
$$patches{$storePath} = []
unless defined $$patches{$storePath};
my $patchList = $$patches{$storePath};
my $found = 0;
foreach my $patch (@{$patchList}) {
if ($patch->{url} eq $url) {
if ($patch->{hash} eq $hash) {
$found = 1 if ($patch->{basePath} eq $basePath);
} else {
die "conflicting hashes for URL $url, " .
"namely $patch->{hash} and $hash";
}
}
}
if (!$found) {
push @{$patchList},
{ url => $url, hash => $hash, size => $size
, basePath => $basePath, baseHash => $baseHash
};
}
}
}
elsif (/^\s*StorePath:\s*(\/\S+)\s*$/) {
$storePath = $1;
}
elsif (/^\s*NarURL:\s*(\S+)\s*$/) {
$narurl = $1;
}
elsif (/^\s*MD5:\s*(\S+)\s*$/) {
$hash = $1;
}
elsif (/^\s*SuccOf:\s*(\/\S+)\s*$/) {
push @preds, $1;
}
elsif (/^\s*StorePath:\s*(\/\S+)\s*$/) { $storePath = $1; }
elsif (/^\s*Hash:\s*(\S+)\s*$/) { $hash = $1; }
elsif (/^\s*URL:\s*(\S+)\s*$/) { $url = $1; }
elsif (/^\s*Size:\s*(\d+)\s*$/) { $size = $1; }
elsif (/^\s*SuccOf:\s*(\/\S+)\s*$/) { push @preds, $1; }
elsif (/^\s*BasePath:\s*(\/\S+)\s*$/) { $basePath = $1; }
elsif (/^\s*BaseHash:\s*(\S+)\s*$/) { $baseHash = $1; }
elsif (/^\s*Type:\s*(\S+)\s*$/) { $patchType = $1; }
# Compatibility;
elsif (/^\s*NarURL:\s*(\S+)\s*$/) { $url = $1; }
elsif (/^\s*MD5:\s*(\S+)\s*$/) { $hash = $1; }
else { die "bad line: $_"; }
}
}