Merge branch 'master' into new-cli

This commit is contained in:
Eelco Dolstra 2016-02-25 11:25:11 +01:00
commit f1bdeac986
74 changed files with 2119 additions and 540 deletions

View file

@ -11,6 +11,12 @@
<variablelist xml:id="env-common"> <variablelist xml:id="env-common">
<varlistentry><term><envar>IN_NIX_SHELL</envar></term>
<listitem><para>Indicator that tells if the current environment was set up by
<command>nix-shell</command>.</para></listitem>
</varlistentry>
<varlistentry xml:id="env-NIX_PATH"><term><envar>NIX_PATH</envar></term> <varlistentry xml:id="env-NIX_PATH"><term><envar>NIX_PATH</envar></term>

View file

@ -267,7 +267,7 @@ dependencies in Nixpkgs.</para>
<para>The lines starting with <literal>#! nix-shell</literal> specify <para>The lines starting with <literal>#! nix-shell</literal> specify
<command>nix-shell</command> options (see above). Note that you cannot <command>nix-shell</command> options (see above). Note that you cannot
write <literal>#1 /usr/bin/env nix-shell -i ...</literal> because write <literal>#! /usr/bin/env nix-shell -i ...</literal> because
many operating systems only allow one argument in many operating systems only allow one argument in
<literal>#!</literal> lines.</para> <literal>#!</literal> lines.</para>

View file

@ -32,7 +32,7 @@ available as <function>builtins.derivation</function>.</para>
<varlistentry><term><function>builtins.add</function> <varlistentry><term><function>builtins.add</function>
<replaceable>e1</replaceable> <replaceable>e2</replaceable></term> <replaceable>e1</replaceable> <replaceable>e2</replaceable></term>
<listitem><para>Return the sum of the integers <listitem><para>Return the sum of the numbers
<replaceable>e1</replaceable> and <replaceable>e1</replaceable> and
<replaceable>e2</replaceable>.</para></listitem> <replaceable>e2</replaceable>.</para></listitem>
@ -204,7 +204,7 @@ if builtins ? getEnv then builtins.getEnv "PATH" else ""</programlisting>
<varlistentry><term><function>builtins.div</function> <varlistentry><term><function>builtins.div</function>
<replaceable>e1</replaceable> <replaceable>e2</replaceable></term> <replaceable>e1</replaceable> <replaceable>e2</replaceable></term>
<listitem><para>Return the quotient of the integers <listitem><para>Return the quotient of the numbers
<replaceable>e1</replaceable> and <replaceable>e1</replaceable> and
<replaceable>e2</replaceable>.</para></listitem> <replaceable>e2</replaceable>.</para></listitem>
@ -620,12 +620,12 @@ x: x + 456</programlisting>
<varlistentry><term><function>builtins.lessThan</function> <varlistentry><term><function>builtins.lessThan</function>
<replaceable>e1</replaceable> <replaceable>e2</replaceable></term> <replaceable>e1</replaceable> <replaceable>e2</replaceable></term>
<listitem><para>Return <literal>true</literal> if the integer <listitem><para>Return <literal>true</literal> if the number
<replaceable>e1</replaceable> is less than the integer <replaceable>e1</replaceable> is less than the number
<replaceable>e2</replaceable>, and <literal>false</literal> <replaceable>e2</replaceable>, and <literal>false</literal>
otherwise. Evaluation aborts if either otherwise. Evaluation aborts if either
<replaceable>e1</replaceable> or <replaceable>e2</replaceable> <replaceable>e1</replaceable> or <replaceable>e2</replaceable>
does not evaluate to an integer.</para></listitem> does not evaluate to a number.</para></listitem>
</varlistentry> </varlistentry>
@ -676,7 +676,7 @@ map (x: "foo" + x) [ "bar" "bla" "abc" ]</programlisting>
<varlistentry><term><function>builtins.mul</function> <varlistentry><term><function>builtins.mul</function>
<replaceable>e1</replaceable> <replaceable>e2</replaceable></term> <replaceable>e1</replaceable> <replaceable>e2</replaceable></term>
<listitem><para>Return the product of the integers <listitem><para>Return the product of the numbers
<replaceable>e1</replaceable> and <replaceable>e1</replaceable> and
<replaceable>e2</replaceable>.</para></listitem> <replaceable>e2</replaceable>.</para></listitem>
@ -833,7 +833,7 @@ builtins.sort builtins.lessThan [ 483 249 526 147 42 77 ]
<varlistentry><term><function>builtins.sub</function> <varlistentry><term><function>builtins.sub</function>
<replaceable>e1</replaceable> <replaceable>e2</replaceable></term> <replaceable>e1</replaceable> <replaceable>e2</replaceable></term>
<listitem><para>Return the difference between the integers <listitem><para>Return the difference between the numbers
<replaceable>e1</replaceable> and <replaceable>e1</replaceable> and
<replaceable>e2</replaceable>.</para></listitem> <replaceable>e2</replaceable>.</para></listitem>
@ -960,7 +960,7 @@ in foo</programlisting>
<varlistentry><term><function>builtins.toJSON</function> <replaceable>e</replaceable></term> <varlistentry><term><function>builtins.toJSON</function> <replaceable>e</replaceable></term>
<listitem><para>Return a string containing a JSON representation <listitem><para>Return a string containing a JSON representation
of <replaceable>e</replaceable>. Strings, integers, booleans, of <replaceable>e</replaceable>. Strings, integers, floats, booleans,
nulls and lists are mapped to their JSON equivalents. Sets nulls and lists are mapped to their JSON equivalents. Sets
(except derivations) are represented as objects. Derivations are (except derivations) are represented as objects. Derivations are
translated to a JSON string containing the derivations output translated to a JSON string containing the derivations output

View file

@ -43,7 +43,7 @@ of which specify the inputs of the build.</para>
<itemizedlist> <itemizedlist>
<listitem><para>Strings and integers are just passed <listitem><para>Strings and numbers are just passed
verbatim.</para></listitem> verbatim.</para></listitem>
<listitem><para>A <emphasis>path</emphasis> (e.g., <listitem><para>A <emphasis>path</emphasis> (e.g.,

View file

@ -140,8 +140,13 @@ stdenv.mkDerivation {
</listitem> </listitem>
<listitem><para><emphasis>Integers</emphasis>, e.g., <listitem><para>Numbers, which can be <emphasis>integers</emphasis> (like
<literal>123</literal>.</para></listitem> <literal>123</literal>) or <emphasis>floating point</emphasis> (like
<literal>123.43</literal> or <literal>.27e13</literal>).</para>
<para>Numbers are type-compatible: pure integer operations will always
return integers, whereas any operation involving at least one floating point
number will have a floating point number as a result.</para></listitem>
<listitem><para><emphasis>Paths</emphasis>, e.g., <listitem><para><emphasis>Paths</emphasis>, e.g.,
<filename>/bin/sh</filename> or <filename>./builder.sh</filename>. <filename>/bin/sh</filename> or <filename>./builder.sh</filename>.

View file

@ -62,9 +62,10 @@ directories such as
so if a package builds correctly on your system, this is because you so if a package builds correctly on your system, this is because you
specified the dependency explicitly.</para> specified the dependency explicitly.</para>
<para>Runtime dependencies are found by scanning binaries for the hash <para>Once a package is built, runtime dependencies are found by
parts of Nix store paths (such as <literal>r8vvq9kq…</literal>). This scanning binaries for the hash parts of Nix store paths (such as
sounds risky, but it works extremely well.</para> <literal>r8vvq9kq…</literal>). This sounds risky, but it works
extremely well.</para>
</simplesect> </simplesect>

View file

@ -121,6 +121,13 @@ $ diffoscope /nix/store/11a27shh6n2i…-zlib-1.2.8 /nix/store/11a27shh6n2i…-zl
also improves performance.</para> also improves performance.</para>
</listitem> </listitem>
<listitem>
<para>The Nix language now supports floating point numbers. They are
based on regular C++ <literal>float</literal> and compatible with
existing integers and number-related operations. Export and import to and
from JSON and XML works, too.
</para>
</listitem>
<listitem> <listitem>
<para>All "chroot"-containing strings got renamed to "sandbox". <para>All "chroot"-containing strings got renamed to "sandbox".
In particular, some Nix options got renamed, but the old names In particular, some Nix options got renamed, but the old names

View file

@ -10,6 +10,7 @@
#include "globals.hh" #include "globals.hh"
#include "store-api.hh" #include "store-api.hh"
#include "util.hh" #include "util.hh"
#include "crypto.hh"
#if HAVE_SODIUM #if HAVE_SODIUM
#include <sodium.h> #include <sodium.h>
@ -108,7 +109,7 @@ SV * queryPathInfo(char * path, int base32)
XPUSHs(&PL_sv_undef); XPUSHs(&PL_sv_undef);
else else
XPUSHs(sv_2mortal(newSVpv(info.deriver.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(info.deriver.c_str(), 0)));
string s = "sha256:" + (base32 ? printHash32(info.hash) : printHash(info.hash)); string s = "sha256:" + (base32 ? printHash32(info.narHash) : printHash(info.narHash));
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
mXPUSHi(info.registrationTime); mXPUSHi(info.registrationTime);
mXPUSHi(info.narSize); mXPUSHi(info.narSize);
@ -160,6 +161,7 @@ SV * topoSortPaths(...)
SV * followLinksToStorePath(char * path) SV * followLinksToStorePath(char * path)
CODE: CODE:
try { try {
store();
RETVAL = newSVpv(followLinksToStorePath(path).c_str(), 0); RETVAL = newSVpv(followLinksToStorePath(path).c_str(), 0);
} catch (Error & e) { } catch (Error & e) {
croak("%s", e.what()); croak("%s", e.what());
@ -234,19 +236,12 @@ SV * convertHash(char * algo, char * s, int toBase32)
} }
SV * signString(SV * secretKey_, char * msg) SV * signString(char * secretKey_, char * msg)
PPCODE: PPCODE:
try { try {
#if HAVE_SODIUM #if HAVE_SODIUM
STRLEN secretKeyLen; auto sig = SecretKey(secretKey_).signDetached(msg);
unsigned char * secretKey = (unsigned char *) SvPV(secretKey_, secretKeyLen); XPUSHs(sv_2mortal(newSVpv(sig.c_str(), sig.size())));
if (secretKeyLen != crypto_sign_SECRETKEYBYTES)
throw Error("secret key is not valid");
unsigned char sig[crypto_sign_BYTES];
unsigned long long sigLen;
crypto_sign_detached(sig, &sigLen, (unsigned char *) msg, strlen(msg), secretKey);
XPUSHs(sv_2mortal(newSVpv((char *) sig, sigLen)));
#else #else
throw Error("Nix was not compiled with libsodium, required for signed binary cache support"); throw Error("Nix was not compiled with libsodium, required for signed binary cache support");
#endif #endif

View file

@ -180,8 +180,6 @@ let
}; };
rpm_fedora18i386 = makeRPM_i686 (diskImageFuns: diskImageFuns.fedora18i386) [];
rpm_fedora18x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora18x86_64) [];
rpm_fedora19i386 = makeRPM_i686 (diskImageFuns: diskImageFuns.fedora19i386) []; rpm_fedora19i386 = makeRPM_i686 (diskImageFuns: diskImageFuns.fedora19i386) [];
rpm_fedora19x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora19x86_64) []; rpm_fedora19x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora19x86_64) [];
rpm_fedora20i386 = makeRPM_i686 (diskImageFuns: diskImageFuns.fedora20i386) []; rpm_fedora20i386 = makeRPM_i686 (diskImageFuns: diskImageFuns.fedora20i386) [];
@ -190,15 +188,9 @@ let
rpm_fedora21x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora21x86_64) [ "libsodium-devel" ]; rpm_fedora21x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora21x86_64) [ "libsodium-devel" ];
deb_debian7i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.debian7i386) [];
deb_debian7x86_64 = makeDeb_x86_64 (diskImageFunsFun: diskImageFunsFun.debian7x86_64) [];
deb_debian8i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.debian8i386) [ "libsodium-dev" ]; deb_debian8i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.debian8i386) [ "libsodium-dev" ];
deb_debian8x86_64 = makeDeb_x86_64 (diskImageFunsFun: diskImageFunsFun.debian8x86_64) [ "libsodium-dev" ]; deb_debian8x86_64 = makeDeb_x86_64 (diskImageFunsFun: diskImageFunsFun.debian8x86_64) [ "libsodium-dev" ];
deb_ubuntu1210i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1210i386) [];
deb_ubuntu1210x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1210x86_64) [];
deb_ubuntu1304i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1304i386) [];
deb_ubuntu1304x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1304x86_64) [];
deb_ubuntu1310i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1310i386) []; deb_ubuntu1310i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1310i386) [];
deb_ubuntu1310x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1310x86_64) []; deb_ubuntu1310x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1310x86_64) [];
deb_ubuntu1404i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1404i386) []; deb_ubuntu1404i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1404i386) [];
@ -272,8 +264,8 @@ let
binaryTarball.x86_64-darwin binaryTarball.x86_64-darwin
#binaryTarball.x86_64-freebsd #binaryTarball.x86_64-freebsd
binaryTarball.x86_64-linux binaryTarball.x86_64-linux
deb_debian7i386 deb_debian8i386
deb_debian7x86_64 deb_debian8x86_64
deb_ubuntu1404i386 # LTS deb_ubuntu1404i386 # LTS
deb_ubuntu1404x86_64 # LTS deb_ubuntu1404x86_64 # LTS
deb_ubuntu1504i386 deb_ubuntu1504i386

View file

@ -33,7 +33,7 @@ if ! [ -e $dest ]; then
fi fi
if ! [ -w $dest ]; then if ! [ -w $dest ]; then
echo "$0: directory $dest exists, but is not writable by you; please run chown -R $USER $dest as root" >&2 echo "$0: directory $dest exists, but is not writable by you. This could indicate that another user has already performed a single-user installation of Nix on this system. If you wish to enable multi-user support see http://nixos.org/nix/manual/#ssec-multi-user. If you wish to continue with a single-user install for $USER please run chown -R $USER $dest as root." >&2
exit 1 exit 1
fi fi
@ -92,7 +92,7 @@ p=$NIX_LINK/etc/profile.d/nix.sh
added= added=
for i in .bash_profile .bash_login .profile; do for i in .bash_profile .bash_login .profile; do
fn="$HOME/$i" fn="$HOME/$i"
if [ -e "$fn" ]; then if [ -w "$fn" ]; then
if ! grep -q "$p" "$fn"; then if ! grep -q "$p" "$fn"; then
echo "modifying $fn..." >&2 echo "modifying $fn..." >&2
echo "if [ -e $p ]; then . $p; fi # added by Nix installer" >> $fn echo "if [ -e $p ]; then . $p; fi # added by Nix installer" >> $fn

View file

@ -11,8 +11,8 @@ if [ -n "$HOME" ]; then
export PATH=$NIX_LINK/bin:$NIX_LINK/sbin:$PATH export PATH=$NIX_LINK/bin:$NIX_LINK/sbin:$PATH
# Subscribe the user to the Nixpkgs channel by default. # Subscribe the user to the Nixpkgs channel by default.
if [ ! -e $HOME/.nix-channels ]; then if [ ! -e "$HOME/.nix-channels" ]; then
echo "https://nixos.org/channels/nixpkgs-unstable nixpkgs" > $HOME/.nix-channels echo "https://nixos.org/channels/nixpkgs-unstable nixpkgs" > "$HOME/.nix-channels"
fi fi
# Append ~/.nix-defexpr/channels/nixpkgs to $NIX_PATH so that # Append ~/.nix-defexpr/channels/nixpkgs to $NIX_PATH so that
@ -23,6 +23,8 @@ if [ -n "$HOME" ]; then
# Set $SSL_CERT_FILE so that Nixpkgs applications like curl work. # Set $SSL_CERT_FILE so that Nixpkgs applications like curl work.
if [ -e /etc/ssl/certs/ca-certificates.crt ]; then # NixOS, Ubuntu, Debian, Gentoo, Arch if [ -e /etc/ssl/certs/ca-certificates.crt ]; then # NixOS, Ubuntu, Debian, Gentoo, Arch
export SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt export SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
elif [ -e /etc/ssl/ca-bundle.pem ]; then # openSUSE Tumbleweed
export SSL_CERT_FILE=/etc/ssl/ca-bundle.pem
elif [ -e /etc/ssl/certs/ca-bundle.crt ]; then # Old NixOS elif [ -e /etc/ssl/certs/ca-bundle.crt ]; then # Old NixOS
export SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt export SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt
elif [ -e /etc/pki/tls/certs/ca-bundle.crt ]; then # Fedora, CentOS elif [ -e /etc/pki/tls/certs/ca-bundle.crt ]; then # Fedora, CentOS

View file

@ -258,13 +258,10 @@ for (my $n = 0; $n < scalar @storePaths2; $n++) {
} }
if (defined $secretKeyFile) { if (defined $secretKeyFile) {
my $s = readFile $secretKeyFile; my $secretKey = readFile $secretKeyFile;
chomp $s;
my ($keyName, $secretKey) = split ":", $s;
die "invalid secret key file $secretKeyFile\n" unless defined $keyName && defined $secretKey;
my $fingerprint = fingerprintPath($storePath, $narHash, $narSize, $refs); my $fingerprint = fingerprintPath($storePath, $narHash, $narSize, $refs);
my $sig = encode_base64(signString(decode_base64($secretKey), $fingerprint), ""); my $sig = signString($secretKey, $fingerprint);
$info .= "Sig: $keyName:$sig\n"; $info .= "Sig: $sig\n";
} }
my $pathHash = substr(basename($storePath), 0, 32); my $pathHash = substr(basename($storePath), 0, 32);

View file

@ -128,6 +128,9 @@ static void printValue(std::ostream & str, std::set<const Value *> & active, con
case tExternal: case tExternal:
str << *v.external; str << *v.external;
break; break;
case tFloat:
str << v.fpoint;
break;
default: default:
throw Error("invalid value"); throw Error("invalid value");
} }
@ -161,6 +164,7 @@ string showType(const Value & v)
case tPrimOp: return "a built-in function"; case tPrimOp: return "a built-in function";
case tPrimOpApp: return "a partially applied built-in function"; case tPrimOpApp: return "a partially applied built-in function";
case tExternal: return v.external->showType(); case tExternal: return v.external->showType();
case tFloat: return "a float";
} }
abort(); abort();
} }
@ -579,6 +583,12 @@ Value * ExprInt::maybeThunk(EvalState & state, Env & env)
return &v; return &v;
} }
Value * ExprFloat::maybeThunk(EvalState & state, Env & env)
{
nrAvoided++;
return &v;
}
Value * ExprPath::maybeThunk(EvalState & state, Env & env) Value * ExprPath::maybeThunk(EvalState & state, Env & env)
{ {
nrAvoided++; nrAvoided++;
@ -666,6 +676,11 @@ void ExprInt::eval(EvalState & state, Env & env, Value & v)
} }
void ExprFloat::eval(EvalState & state, Env & env, Value & v)
{
v = this->v;
}
void ExprString::eval(EvalState & state, Env & env, Value & v) void ExprString::eval(EvalState & state, Env & env, Value & v)
{ {
v = this->v; v = this->v;
@ -1211,6 +1226,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
PathSet context; PathSet context;
std::ostringstream s; std::ostringstream s;
NixInt n = 0; NixInt n = 0;
NixFloat nf = 0;
bool first = !forceString; bool first = !forceString;
ValueType firstType = tString; ValueType firstType = tString;
@ -1229,15 +1245,30 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v)
} }
if (firstType == tInt) { if (firstType == tInt) {
if (vTmp.type != tInt) if (vTmp.type == tInt) {
n += vTmp.integer;
} else if (vTmp.type == tFloat) {
// Upgrade the type from int to float;
firstType = tFloat;
nf = n;
nf += vTmp.fpoint;
} else
throwEvalError("cannot add %1% to an integer, at %2%", showType(vTmp), pos); throwEvalError("cannot add %1% to an integer, at %2%", showType(vTmp), pos);
n += vTmp.integer; } else if (firstType == tFloat) {
if (vTmp.type == tInt) {
nf += vTmp.integer;
} else if (vTmp.type == tFloat) {
nf += vTmp.fpoint;
} else
throwEvalError("cannot add %1% to a float, at %2%", showType(vTmp), pos);
} else } else
s << state.coerceToString(pos, vTmp, context, false, firstType == tString); s << state.coerceToString(pos, vTmp, context, false, firstType == tString);
} }
if (firstType == tInt) if (firstType == tInt)
mkInt(v, n); mkInt(v, n);
else if (firstType == tFloat)
mkFloat(v, nf);
else if (firstType == tPath) { else if (firstType == tPath) {
if (!context.empty()) if (!context.empty())
throwEvalError("a string that refers to a store path cannot be appended to a path, at %1%", pos); throwEvalError("a string that refers to a store path cannot be appended to a path, at %1%", pos);
@ -1295,6 +1326,17 @@ NixInt EvalState::forceInt(Value & v, const Pos & pos)
} }
NixFloat EvalState::forceFloat(Value & v, const Pos & pos)
{
forceValue(v, pos);
if (v.type == tInt)
return v.integer;
else if (v.type != tFloat)
throwTypeError("value is %1% while a float was expected, at %2%", v, pos);
return v.fpoint;
}
bool EvalState::forceBool(Value & v) bool EvalState::forceBool(Value & v)
{ {
forceValue(v); forceValue(v);
@ -1413,6 +1455,7 @@ string EvalState::coerceToString(const Pos & pos, Value & v, PathSet & context,
if (v.type == tBool && v.boolean) return "1"; if (v.type == tBool && v.boolean) return "1";
if (v.type == tBool && !v.boolean) return ""; if (v.type == tBool && !v.boolean) return "";
if (v.type == tInt) return std::to_string(v.integer); if (v.type == tInt) return std::to_string(v.integer);
if (v.type == tFloat) return std::to_string(v.fpoint);
if (v.type == tNull) return ""; if (v.type == tNull) return "";
if (v.isList()) { if (v.isList()) {
@ -1474,6 +1517,13 @@ bool EvalState::eqValues(Value & v1, Value & v2)
uniqList on a list of sets.) Will remove this eventually. */ uniqList on a list of sets.) Will remove this eventually. */
if (&v1 == &v2) return true; if (&v1 == &v2) return true;
// Special case type-compatibility between float and int
if (v1.type == tInt && v2.type == tFloat)
return v1.integer == v2.fpoint;
if (v1.type == tFloat && v2.type == tInt)
return v1.fpoint == v2.integer;
// All other types are not compatible with each other.
if (v1.type != v2.type) return false; if (v1.type != v2.type) return false;
switch (v1.type) { switch (v1.type) {
@ -1531,6 +1581,9 @@ bool EvalState::eqValues(Value & v1, Value & v2)
case tExternal: case tExternal:
return *v1.external == *v2.external; return *v1.external == *v2.external;
case tFloat:
return v1.fpoint == v2.fpoint;
default: default:
throwEvalError("cannot compare %1% with %2%", showType(v1), showType(v2)); throwEvalError("cannot compare %1% with %2%", showType(v1), showType(v2));
} }

View file

@ -147,6 +147,7 @@ public:
/* Force `v', and then verify that it has the expected type. */ /* Force `v', and then verify that it has the expected type. */
NixInt forceInt(Value & v, const Pos & pos); NixInt forceInt(Value & v, const Pos & pos);
NixFloat forceFloat(Value & v, const Pos & pos);
bool forceBool(Value & v); bool forceBool(Value & v);
inline void forceAttrs(Value & v); inline void forceAttrs(Value & v);
inline void forceAttrs(Value & v, const Pos & pos); inline void forceAttrs(Value & v, const Pos & pos);

View file

@ -106,7 +106,8 @@ bool DrvInfo::checkMeta(Value & v)
if (!checkMeta(*i.value)) return false; if (!checkMeta(*i.value)) return false;
return true; return true;
} }
else return v.type == tInt || v.type == tBool || v.type == tString; else return v.type == tInt || v.type == tBool || v.type == tString ||
v.type == tFloat;
} }
@ -127,7 +128,7 @@ string DrvInfo::queryMetaString(const string & name)
} }
int DrvInfo::queryMetaInt(const string & name, int def) NixInt DrvInfo::queryMetaInt(const string & name, NixInt def)
{ {
Value * v = queryMeta(name); Value * v = queryMeta(name);
if (!v) return def; if (!v) return def;
@ -135,12 +136,26 @@ int DrvInfo::queryMetaInt(const string & name, int def)
if (v->type == tString) { if (v->type == tString) {
/* Backwards compatibility with before we had support for /* Backwards compatibility with before we had support for
integer meta fields. */ integer meta fields. */
int n; NixInt n;
if (string2Int(v->string.s, n)) return n; if (string2Int(v->string.s, n)) return n;
} }
return def; return def;
} }
NixFloat DrvInfo::queryMetaFloat(const string & name, NixFloat def)
{
Value * v = queryMeta(name);
if (!v) return def;
if (v->type == tFloat) return v->fpoint;
if (v->type == tString) {
/* Backwards compatibility with before we had support for
float meta fields. */
NixFloat n;
if (string2Float(v->string.s, n)) return n;
}
return def;
}
bool DrvInfo::queryMetaBool(const string & name, bool def) bool DrvInfo::queryMetaBool(const string & name, bool def)
{ {

View file

@ -47,7 +47,8 @@ public:
StringSet queryMetaNames(); StringSet queryMetaNames();
Value * queryMeta(const string & name); Value * queryMeta(const string & name);
string queryMetaString(const string & name); string queryMetaString(const string & name);
int queryMetaInt(const string & name, int def); NixInt queryMetaInt(const string & name, NixInt def);
NixFloat queryMetaFloat(const string & name, NixFloat def);
bool queryMetaBool(const string & name, bool def); bool queryMetaBool(const string & name, bool def);
void setMeta(const string & name, Value * v); void setMeta(const string & name, Value * v);

View file

@ -105,17 +105,21 @@ static void parseJSON(EvalState & state, const char * & s, Value & v)
mkString(v, parseJSONString(s)); mkString(v, parseJSONString(s));
} }
else if (isdigit(*s) || *s == '-') { else if (isdigit(*s) || *s == '-' || *s == '.' ) {
bool neg = false; // Buffer into a string first, then use built-in C++ conversions
if (*s == '-') { std::string tmp_number;
neg = true; ValueType number_type = tInt;
if (!*++s) throw JSONParseError("unexpected end of JSON number");
while (isdigit(*s) || *s == '-' || *s == '.' || *s == 'e' || *s == 'E') {
if (*s == '.' || *s == 'e' || *s == 'E')
number_type = tFloat;
tmp_number += *s++;
} }
NixInt n = 0;
// FIXME: detect overflow if (number_type == tFloat)
while (isdigit(*s)) n = n * 10 + (*s++ - '0'); mkFloat(v, stod(tmp_number));
if (*s == '.' || *s == 'e') throw JSONParseError("floating point JSON numbers are not supported"); else
mkInt(v, neg ? -n : n); mkInt(v, stoi(tmp_number));
} }
else if (strncmp(s, "true", 4) == 0) { else if (strncmp(s, "true", 4) == 0) {

View file

@ -86,6 +86,7 @@ static Expr * unescapeStr(SymbolTable & symbols, const char * s)
ID [a-zA-Z\_][a-zA-Z0-9\_\'\-]* ID [a-zA-Z\_][a-zA-Z0-9\_\'\-]*
INT [0-9]+ INT [0-9]+
FLOAT (([1-9][0-9]*\.[0-9]*)|(0?\.[0-9]+))([Ee][+-]?[0-9]+)?
PATH [a-zA-Z0-9\.\_\-\+]*(\/[a-zA-Z0-9\.\_\-\+]+)+ PATH [a-zA-Z0-9\.\_\-\+]*(\/[a-zA-Z0-9\.\_\-\+]+)+
HPATH \~(\/[a-zA-Z0-9\.\_\-\+]+)+ HPATH \~(\/[a-zA-Z0-9\.\_\-\+]+)+
SPATH \<[a-zA-Z0-9\.\_\-\+]+(\/[a-zA-Z0-9\.\_\-\+]+)*\> SPATH \<[a-zA-Z0-9\.\_\-\+]+(\/[a-zA-Z0-9\.\_\-\+]+)*\>
@ -126,6 +127,12 @@ or { return OR_KW; }
throw ParseError(format("invalid integer %1%") % yytext); throw ParseError(format("invalid integer %1%") % yytext);
return INT; return INT;
} }
{FLOAT} { errno = 0;
yylval->nf = strtod(yytext, 0);
if (errno != 0)
throw ParseError(format("invalid float %1%") % yytext);
return FLOAT;
}
\$\{ { PUSH_STATE(INSIDE_DOLLAR_CURLY); return DOLLAR_CURLY; } \$\{ { PUSH_STATE(INSIDE_DOLLAR_CURLY); return DOLLAR_CURLY; }
} }
@ -188,5 +195,7 @@ or { return OR_KW; }
} }
<<EOF>> { data->atEnd = true; return 0; }
%% %%

View file

@ -68,6 +68,11 @@ void ExprInt::show(std::ostream & str)
str << n; str << n;
} }
void ExprFloat::show(std::ostream & str)
{
str << nf;
}
void ExprString::show(std::ostream & str) void ExprString::show(std::ostream & str)
{ {
showString(str, s); showString(str, s);
@ -226,6 +231,10 @@ void ExprInt::bindVars(const StaticEnv & env)
{ {
} }
void ExprFloat::bindVars(const StaticEnv & env)
{
}
void ExprString::bindVars(const StaticEnv & env) void ExprString::bindVars(const StaticEnv & env)
{ {
} }

View file

@ -11,6 +11,7 @@ namespace nix {
MakeError(EvalError, Error) MakeError(EvalError, Error)
MakeError(ParseError, Error) MakeError(ParseError, Error)
MakeError(IncompleteParseError, ParseError)
MakeError(AssertionError, EvalError) MakeError(AssertionError, EvalError)
MakeError(ThrownError, AssertionError) MakeError(ThrownError, AssertionError)
MakeError(Abort, EvalError) MakeError(Abort, EvalError)
@ -98,6 +99,15 @@ struct ExprInt : Expr
Value * maybeThunk(EvalState & state, Env & env); Value * maybeThunk(EvalState & state, Env & env);
}; };
struct ExprFloat : Expr
{
NixFloat nf;
Value v;
ExprFloat(NixFloat nf) : nf(nf) { mkFloat(v, nf); };
COMMON_METHODS
Value * maybeThunk(EvalState & state, Env & env);
};
struct ExprString : Expr struct ExprString : Expr
{ {
Symbol s; Symbol s;

View file

@ -31,10 +31,12 @@ namespace nix {
Path basePath; Path basePath;
Symbol path; Symbol path;
string error; string error;
bool atEnd;
Symbol sLetBody; Symbol sLetBody;
ParseData(EvalState & state) ParseData(EvalState & state)
: state(state) : state(state)
, symbols(state.symbols) , symbols(state.symbols)
, atEnd(false)
, sLetBody(symbols.create("<let-body>")) , sLetBody(symbols.create("<let-body>"))
{ }; { };
}; };
@ -244,6 +246,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
nix::Formals * formals; nix::Formals * formals;
nix::Formal * formal; nix::Formal * formal;
nix::NixInt n; nix::NixInt n;
nix::NixFloat nf;
const char * id; // !!! -> Symbol const char * id; // !!! -> Symbol
char * path; char * path;
char * uri; char * uri;
@ -264,6 +267,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParseData * data, const char * err
%token <id> ID ATTRPATH %token <id> ID ATTRPATH
%token <e> STR IND_STR %token <e> STR IND_STR
%token <n> INT %token <n> INT
%token <nf> FLOAT
%token <path> PATH HPATH SPATH %token <path> PATH HPATH SPATH
%token <uri> URI %token <uri> URI
%token IF THEN ELSE ASSERT WITH LET IN REC INHERIT EQ NEQ AND OR IMPL OR_KW %token IF THEN ELSE ASSERT WITH LET IN REC INHERIT EQ NEQ AND OR IMPL OR_KW
@ -366,6 +370,7 @@ expr_simple
$$ = new ExprVar(CUR_POS, data->symbols.create($1)); $$ = new ExprVar(CUR_POS, data->symbols.create($1));
} }
| INT { $$ = new ExprInt($1); } | INT { $$ = new ExprInt($1); }
| FLOAT { $$ = new ExprFloat($1); }
| '"' string_parts '"' { $$ = $2; } | '"' string_parts '"' { $$ = $2; }
| IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE { | IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE {
$$ = stripIndentation(CUR_POS, data->symbols, *$2); $$ = stripIndentation(CUR_POS, data->symbols, *$2);
@ -536,7 +541,12 @@ Expr * EvalState::parse(const char * text,
int res = yyparse(scanner, &data); int res = yyparse(scanner, &data);
yylex_destroy(scanner); yylex_destroy(scanner);
if (res) throw ParseError(data.error); if (res) {
if (data.atEnd)
throw IncompleteParseError(data.error);
else
throw ParseError(data.error);
}
data.result->bindVars(staticEnv); data.result->bindVars(staticEnv);

View file

@ -195,6 +195,7 @@ static void prim_typeOf(EvalState & state, const Pos & pos, Value * * args, Valu
case tExternal: case tExternal:
t = args[0]->external->typeOf(); t = args[0]->external->typeOf();
break; break;
case tFloat: t = "float"; break;
default: abort(); default: abort();
} }
mkString(v, state.symbols.create(t)); mkString(v, state.symbols.create(t));
@ -224,6 +225,12 @@ static void prim_isInt(EvalState & state, const Pos & pos, Value * * args, Value
mkBool(v, args[0]->type == tInt); mkBool(v, args[0]->type == tInt);
} }
/* Determine whether the argument is a float. */
static void prim_isFloat(EvalState & state, const Pos & pos, Value * * args, Value & v)
{
state.forceValue(*args[0]);
mkBool(v, args[0]->type == tFloat);
}
/* Determine whether the argument is a string. */ /* Determine whether the argument is a string. */
static void prim_isString(EvalState & state, const Pos & pos, Value * * args, Value & v) static void prim_isString(EvalState & state, const Pos & pos, Value * * args, Value & v)
@ -245,11 +252,17 @@ struct CompareValues
{ {
bool operator () (const Value * v1, const Value * v2) const bool operator () (const Value * v1, const Value * v2) const
{ {
if (v1->type == tFloat && v2->type == tInt)
return v1->fpoint < v2->integer;
if (v1->type == tInt && v2->type == tFloat)
return v1->integer < v2->fpoint;
if (v1->type != v2->type) if (v1->type != v2->type)
throw EvalError(format("cannot compare %1% with %2%") % showType(*v1) % showType(*v2)); throw EvalError(format("cannot compare %1% with %2%") % showType(*v1) % showType(*v2));
switch (v1->type) { switch (v1->type) {
case tInt: case tInt:
return v1->integer < v2->integer; return v1->integer < v2->integer;
case tFloat:
return v1->fpoint < v2->fpoint;
case tString: case tString:
return strcmp(v1->string.s, v2->string.s) < 0; return strcmp(v1->string.s, v2->string.s) < 0;
case tPath: case tPath:
@ -1423,27 +1436,40 @@ static void prim_sort(EvalState & state, const Pos & pos, Value * * args, Value
static void prim_add(EvalState & state, const Pos & pos, Value * * args, Value & v) static void prim_add(EvalState & state, const Pos & pos, Value * * args, Value & v)
{ {
mkInt(v, state.forceInt(*args[0], pos) + state.forceInt(*args[1], pos)); if (args[0]->type == tFloat || args[1]->type == tFloat)
mkFloat(v, state.forceFloat(*args[0], pos) + state.forceFloat(*args[1], pos));
else
mkInt(v, state.forceInt(*args[0], pos) + state.forceInt(*args[1], pos));
} }
static void prim_sub(EvalState & state, const Pos & pos, Value * * args, Value & v) static void prim_sub(EvalState & state, const Pos & pos, Value * * args, Value & v)
{ {
mkInt(v, state.forceInt(*args[0], pos) - state.forceInt(*args[1], pos)); if (args[0]->type == tFloat || args[1]->type == tFloat)
mkFloat(v, state.forceFloat(*args[0], pos) - state.forceFloat(*args[1], pos));
else
mkInt(v, state.forceInt(*args[0], pos) - state.forceInt(*args[1], pos));
} }
static void prim_mul(EvalState & state, const Pos & pos, Value * * args, Value & v) static void prim_mul(EvalState & state, const Pos & pos, Value * * args, Value & v)
{ {
mkInt(v, state.forceInt(*args[0], pos) * state.forceInt(*args[1], pos)); if (args[0]->type == tFloat || args[1]->type == tFloat)
mkFloat(v, state.forceFloat(*args[0], pos) * state.forceFloat(*args[1], pos));
else
mkInt(v, state.forceInt(*args[0], pos) * state.forceInt(*args[1], pos));
} }
static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value & v) static void prim_div(EvalState & state, const Pos & pos, Value * * args, Value & v)
{ {
NixInt i2 = state.forceInt(*args[1], pos); NixFloat f2 = state.forceFloat(*args[1], pos);
if (i2 == 0) throw EvalError(format("division by zero, at %1%") % pos); if (f2 == 0) throw EvalError(format("division by zero, at %1%") % pos);
mkInt(v, state.forceInt(*args[0], pos) / i2);
if (args[0]->type == tFloat || args[1]->type == tFloat)
mkFloat(v, state.forceFloat(*args[0], pos) / state.forceFloat(*args[1], pos));
else
mkInt(v, state.forceInt(*args[0], pos) / state.forceInt(*args[1], pos));
} }
@ -1735,7 +1761,7 @@ void EvalState::createBaseEnv()
language feature gets added. It's not necessary to increase it language feature gets added. It's not necessary to increase it
when primops get added, because you can just use `builtins ? when primops get added, because you can just use `builtins ?
primOp' to check. */ primOp' to check. */
mkInt(v, 3); mkInt(v, 4);
addConstant("__langVersion", v); addConstant("__langVersion", v);
// Miscellaneous // Miscellaneous
@ -1752,6 +1778,7 @@ void EvalState::createBaseEnv()
addPrimOp("__isFunction", 1, prim_isFunction); addPrimOp("__isFunction", 1, prim_isFunction);
addPrimOp("__isString", 1, prim_isString); addPrimOp("__isString", 1, prim_isString);
addPrimOp("__isInt", 1, prim_isInt); addPrimOp("__isInt", 1, prim_isInt);
addPrimOp("__isFloat", 1, prim_isFloat);
addPrimOp("__isBool", 1, prim_isBool); addPrimOp("__isBool", 1, prim_isBool);
addPrimOp("__genericClosure", 1, prim_genericClosure); addPrimOp("__genericClosure", 1, prim_genericClosure);
addPrimOp("abort", 1, prim_abort); addPrimOp("abort", 1, prim_abort);

View file

@ -84,6 +84,10 @@ void printValueAsJSON(EvalState & state, bool strict,
v.external->printValueAsJSON(state, strict, str, context); v.external->printValueAsJSON(state, strict, str, context);
break; break;
case tFloat:
str << v.fpoint;
break;
default: default:
throw TypeError(format("cannot convert %1% to JSON") % showType(v)); throw TypeError(format("cannot convert %1% to JSON") % showType(v));
} }

View file

@ -36,7 +36,18 @@ struct JSONObject
attr(s); attr(s);
escapeJSON(str, t); escapeJSON(str, t);
} }
void attr(const string & s, int n) void attr(const string & s, const char * t)
{
attr(s);
escapeJSON(str, t);
}
void attr(const string & s, bool b)
{
attr(s);
str << (b ? "true" : "false");
}
template<typename T>
void attr(const string & s, const T & n)
{ {
attr(s); attr(s);
str << n; str << n;

View file

@ -148,6 +148,10 @@ static void printValueAsXML(EvalState & state, bool strict, bool location,
v.external->printValueAsXML(state, strict, location, doc, context, drvsSeen); v.external->printValueAsXML(state, strict, location, doc, context, drvsSeen);
break; break;
case tFloat:
doc.writeEmptyElement("float", singletonAttrs("value", (format("%1%") % v.fpoint).str()));
break;
default: default:
doc.writeEmptyElement("unevaluated"); doc.writeEmptyElement("unevaluated");
} }

View file

@ -22,6 +22,7 @@ typedef enum {
tPrimOp, tPrimOp,
tPrimOpApp, tPrimOpApp,
tExternal, tExternal,
tFloat
} ValueType; } ValueType;
@ -38,6 +39,7 @@ class XMLWriter;
typedef long NixInt; typedef long NixInt;
typedef float NixFloat;
/* External values must descend from ExternalValueBase, so that /* External values must descend from ExternalValueBase, so that
* type-agnostic nix functions (e.g. showType) can be implemented * type-agnostic nix functions (e.g. showType) can be implemented
@ -141,6 +143,7 @@ struct Value
Value * left, * right; Value * left, * right;
} primOpApp; } primOpApp;
ExternalValueBase * external; ExternalValueBase * external;
NixFloat fpoint;
}; };
bool isList() const bool isList() const
@ -181,6 +184,14 @@ static inline void mkInt(Value & v, NixInt n)
} }
static inline void mkFloat(Value & v, NixFloat n)
{
clearValue(v);
v.type = tFloat;
v.fpoint = n;
}
static inline void mkBool(Value & v, bool b) static inline void mkBool(Value & v, bool b)
{ {
clearValue(v); clearValue(v);

View file

@ -6,6 +6,8 @@ libmain_DIR := $(d)
libmain_SOURCES := $(wildcard $(d)/*.cc) libmain_SOURCES := $(wildcard $(d)/*.cc)
libmain_LDFLAGS = $(OPENSSL_LIBS)
libmain_LIBS = libstore libutil libformat libmain_LIBS = libstore libutil libformat
libmain_ALLOW_UNDEFINED = 1 libmain_ALLOW_UNDEFINED = 1

View file

@ -6,10 +6,11 @@
#include "store-api.hh" #include "store-api.hh"
#include "util.hh" #include "util.hh"
#include <iostream> #include <algorithm>
#include <cctype> #include <cctype>
#include <exception> #include <exception>
#include <algorithm> #include <iostream>
#include <mutex>
#include <cstdlib> #include <cstdlib>
#include <sys/time.h> #include <sys/time.h>
@ -17,7 +18,7 @@
#include <unistd.h> #include <unistd.h>
#include <signal.h> #include <signal.h>
extern char * * environ; #include <openssl/crypto.h>
namespace nix { namespace nix {
@ -94,7 +95,18 @@ string getArg(const string & opt,
} }
void detectStackOverflow(); /* OpenSSL is not thread-safe by default - it will randomly crash
unless the user supplies a mutex locking function. So let's do
that. */
static std::vector<std::mutex> opensslLocks;
static void opensslLockCallback(int mode, int type, const char * file, int line)
{
if (mode & CRYPTO_LOCK)
opensslLocks[type].lock();
else
opensslLocks[type].unlock();
}
void initNix() void initNix()
@ -105,11 +117,16 @@ void initNix()
std::cerr.rdbuf()->pubsetbuf(buf, sizeof(buf)); std::cerr.rdbuf()->pubsetbuf(buf, sizeof(buf));
#endif #endif
// FIXME: do we need this? It's not thread-safe.
std::ios::sync_with_stdio(false); std::ios::sync_with_stdio(false);
if (getEnv("IN_SYSTEMD") == "1") if (getEnv("IN_SYSTEMD") == "1")
logType = ltSystemd; logType = ltSystemd;
/* Initialise OpenSSL locking. */
opensslLocks = std::vector<std::mutex>(CRYPTO_num_locks());
CRYPTO_set_locking_callback(opensslLockCallback);
settings.processEnvironment(); settings.processEnvironment();
settings.loadConfFile(); settings.loadConfFile();

View file

@ -65,6 +65,7 @@ template<class N> N getIntArg(const string & opt,
return n * multiplier; return n * multiplier;
} }
/* Show the manual page for the specified program. */ /* Show the manual page for the specified program. */
void showManPage(const string & name); void showManPage(const string & name);
@ -99,4 +100,8 @@ struct PrintFreed
}; };
/* Install a SIGSEGV handler to detect stack overflows. */
void detectStackOverflow();
} }

View file

@ -0,0 +1,347 @@
#include "binary-cache-store.hh"
#include "sync.hh"
#include "archive.hh"
#include "compression.hh"
#include "derivations.hh"
#include "globals.hh"
#include "nar-info.hh"
#include "worker-protocol.hh"
#include <chrono>
namespace nix {
BinaryCacheStore::BinaryCacheStore(std::shared_ptr<Store> localStore,
const Path & secretKeyFile, const Path & publicKeyFile)
: localStore(localStore)
{
if (secretKeyFile != "")
secretKey = std::unique_ptr<SecretKey>(new SecretKey(readFile(secretKeyFile)));
if (publicKeyFile != "") {
publicKeys = std::unique_ptr<PublicKeys>(new PublicKeys);
auto key = PublicKey(readFile(publicKeyFile));
publicKeys->emplace(key.name, key);
}
StringSink sink;
sink << narVersionMagic1;
narMagic = sink.s;
}
void BinaryCacheStore::init()
{
std::string cacheInfoFile = "nix-cache-info";
if (!fileExists(cacheInfoFile))
upsertFile(cacheInfoFile, "StoreDir: " + settings.nixStore + "\n");
}
void BinaryCacheStore::notImpl()
{
throw Error("operation not implemented for binary cache stores");
}
const BinaryCacheStore::Stats & BinaryCacheStore::getStats()
{
return stats;
}
Path BinaryCacheStore::narInfoFileFor(const Path & storePath)
{
assertStorePath(storePath);
return storePathToHash(storePath) + ".narinfo";
}
void BinaryCacheStore::addToCache(const ValidPathInfo & info,
const string & nar)
{
auto narInfoFile = narInfoFileFor(info.path);
if (fileExists(narInfoFile)) return;
assert(nar.compare(0, narMagic.size(), narMagic) == 0);
auto narInfo = make_ref<NarInfo>(info);
narInfo->narSize = nar.size();
narInfo->narHash = hashString(htSHA256, nar);
if (info.narHash.type != htUnknown && info.narHash != narInfo->narHash)
throw Error(format("refusing to copy corrupted path %1% to binary cache") % info.path);
/* Compress the NAR. */
narInfo->compression = "xz";
auto now1 = std::chrono::steady_clock::now();
string narXz = compressXZ(nar);
auto now2 = std::chrono::steady_clock::now();
narInfo->fileHash = hashString(htSHA256, narXz);
narInfo->fileSize = narXz.size();
auto duration = std::chrono::duration_cast<std::chrono::milliseconds>(now2 - now1).count();
printMsg(lvlTalkative, format("copying path %1% (%2% bytes, compressed %3$.1f%% in %4% ms) to binary cache")
% narInfo->path % narInfo->narSize
% ((1.0 - (double) narXz.size() / nar.size()) * 100.0)
% duration);
/* Atomically write the NAR file. */
narInfo->url = "nar/" + printHash32(narInfo->fileHash) + ".nar.xz";
if (!fileExists(narInfo->url)) {
stats.narWrite++;
upsertFile(narInfo->url, narXz);
} else
stats.narWriteAverted++;
stats.narWriteBytes += nar.size();
stats.narWriteCompressedBytes += narXz.size();
stats.narWriteCompressionTimeMs += duration;
/* Atomically write the NAR info file.*/
if (secretKey) narInfo->sign(*secretKey);
upsertFile(narInfoFile, narInfo->to_string());
{
auto state_(state.lock());
state_->narInfoCache.upsert(narInfo->path, narInfo);
stats.narInfoCacheSize = state_->narInfoCache.size();
}
stats.narInfoWrite++;
}
NarInfo BinaryCacheStore::readNarInfo(const Path & storePath)
{
{
auto state_(state.lock());
auto res = state_->narInfoCache.get(storePath);
if (res) {
stats.narInfoReadAverted++;
return **res;
}
}
auto narInfoFile = narInfoFileFor(storePath);
auto narInfo = make_ref<NarInfo>(getFile(narInfoFile), narInfoFile);
assert(narInfo->path == storePath);
stats.narInfoRead++;
if (publicKeys) {
if (!narInfo->checkSignature(*publicKeys))
throw Error(format("invalid signature on NAR info file %1%") % narInfoFile);
}
{
auto state_(state.lock());
state_->narInfoCache.upsert(storePath, narInfo);
stats.narInfoCacheSize = state_->narInfoCache.size();
}
return *narInfo;
}
bool BinaryCacheStore::isValidPath(const Path & storePath)
{
return fileExists(narInfoFileFor(storePath));
}
void BinaryCacheStore::exportPath(const Path & storePath, bool sign, Sink & sink)
{
assert(!sign);
auto res = readNarInfo(storePath);
auto nar = getFile(res.url);
stats.narRead++;
stats.narReadCompressedBytes += nar.size();
/* Decompress the NAR. FIXME: would be nice to have the remote
side do this. */
if (res.compression == "none")
;
else if (res.compression == "xz")
nar = decompressXZ(nar);
else
throw Error(format("unknown NAR compression type %1%") % nar);
stats.narReadBytes += nar.size();
printMsg(lvlTalkative, format("exporting path %1% (%2% bytes)") % storePath % nar.size());
assert(nar.size() % 8 == 0);
sink((unsigned char *) nar.c_str(), nar.size());
// FIXME: check integrity of NAR.
sink << exportMagic << storePath << res.references << res.deriver << 0;
}
Paths BinaryCacheStore::importPaths(bool requireSignature, Source & source)
{
assert(!requireSignature);
Paths res;
while (true) {
unsigned long long n = readLongLong(source);
if (n == 0) break;
if (n != 1) throw Error("input doesn't look like something created by nix-store --export");
res.push_back(importPath(source));
}
return res;
}
struct TeeSource : Source
{
Source & readSource;
std::string data;
TeeSource(Source & readSource) : readSource(readSource)
{
}
size_t read(unsigned char * data, size_t len)
{
size_t n = readSource.read(data, len);
this->data.append((char *) data, n);
return n;
}
};
struct NopSink : ParseSink
{
};
Path BinaryCacheStore::importPath(Source & source)
{
/* FIXME: some cut&paste of LocalStore::importPath(). */
/* Extract the NAR from the source. */
TeeSource tee(source);
NopSink sink;
parseDump(sink, tee);
uint32_t magic = readInt(source);
if (magic != exportMagic)
throw Error("Nix archive cannot be imported; wrong format");
ValidPathInfo info;
info.path = readStorePath(source);
info.references = readStorePaths<PathSet>(source);
readString(source); // deriver, don't care
bool haveSignature = readInt(source) == 1;
assert(!haveSignature);
addToCache(info, tee.data);
return info.path;
}
ValidPathInfo BinaryCacheStore::queryPathInfo(const Path & storePath)
{
return ValidPathInfo(readNarInfo(storePath));
}
void BinaryCacheStore::querySubstitutablePathInfos(const PathSet & paths,
SubstitutablePathInfos & infos)
{
PathSet left;
if (!localStore) return;
for (auto & storePath : paths) {
if (!localStore->isValidPath(storePath)) {
left.insert(storePath);
continue;
}
ValidPathInfo info = localStore->queryPathInfo(storePath);
SubstitutablePathInfo sub;
sub.references = info.references;
sub.downloadSize = 0;
sub.narSize = info.narSize;
infos.emplace(storePath, sub);
}
if (settings.useSubstitutes)
localStore->querySubstitutablePathInfos(left, infos);
}
Path BinaryCacheStore::addToStore(const string & name, const Path & srcPath,
bool recursive, HashType hashAlgo, PathFilter & filter, bool repair)
{
// FIXME: some cut&paste from LocalStore::addToStore().
/* Read the whole path into memory. This is not a very scalable
method for very large paths, but `copyPath' is mainly used for
small files. */
StringSink sink;
Hash h;
if (recursive) {
dumpPath(srcPath, sink, filter);
h = hashString(hashAlgo, sink.s);
} else {
auto s = readFile(srcPath);
dumpString(s, sink);
h = hashString(hashAlgo, s);
}
ValidPathInfo info;
info.path = makeFixedOutputPath(recursive, hashAlgo, h, name);
if (repair || !isValidPath(info.path))
addToCache(info, sink.s);
return info.path;
}
Path BinaryCacheStore::addTextToStore(const string & name, const string & s,
const PathSet & references, bool repair)
{
ValidPathInfo info;
info.path = computeStorePathForText(name, s, references);
info.references = references;
if (repair || !isValidPath(info.path)) {
StringSink sink;
dumpString(s, sink);
addToCache(info, sink.s);
}
return info.path;
}
void BinaryCacheStore::buildPaths(const PathSet & paths, BuildMode buildMode)
{
for (auto & storePath : paths) {
assert(!isDerivation(storePath));
if (isValidPath(storePath)) continue;
if (!localStore)
throw Error(format("don't know how to realise path %1% in a binary cache") % storePath);
localStore->addTempRoot(storePath);
if (!localStore->isValidPath(storePath))
localStore->ensurePath(storePath);
ValidPathInfo info = localStore->queryPathInfo(storePath);
for (auto & ref : info.references)
if (ref != storePath)
ensurePath(ref);
StringSink sink;
dumpPath(storePath, sink);
addToCache(info, sink.s);
}
}
void BinaryCacheStore::ensurePath(const Path & path)
{
buildPaths({path});
}
}

View file

@ -0,0 +1,172 @@
#pragma once
#include "crypto.hh"
#include "store-api.hh"
#include "lru-cache.hh"
#include "sync.hh"
#include "pool.hh"
#include <atomic>
namespace nix {
struct NarInfo;
class BinaryCacheStore : public Store
{
private:
std::unique_ptr<SecretKey> secretKey;
std::unique_ptr<PublicKeys> publicKeys;
std::shared_ptr<Store> localStore;
struct State
{
LRUCache<Path, ref<NarInfo>> narInfoCache{32 * 1024};
};
Sync<State> state;
protected:
BinaryCacheStore(std::shared_ptr<Store> localStore,
const Path & secretKeyFile, const Path & publicKeyFile);
[[noreturn]] void notImpl();
virtual bool fileExists(const std::string & path) = 0;
virtual void upsertFile(const std::string & path, const std::string & data) = 0;
virtual std::string getFile(const std::string & path) = 0;
public:
virtual void init();
struct Stats
{
std::atomic<uint64_t> narInfoRead{0};
std::atomic<uint64_t> narInfoReadAverted{0};
std::atomic<uint64_t> narInfoWrite{0};
std::atomic<uint64_t> narInfoCacheSize{0};
std::atomic<uint64_t> narRead{0};
std::atomic<uint64_t> narReadBytes{0};
std::atomic<uint64_t> narReadCompressedBytes{0};
std::atomic<uint64_t> narWrite{0};
std::atomic<uint64_t> narWriteAverted{0};
std::atomic<uint64_t> narWriteBytes{0};
std::atomic<uint64_t> narWriteCompressedBytes{0};
std::atomic<uint64_t> narWriteCompressionTimeMs{0};
};
const Stats & getStats();
private:
Stats stats;
std::string narMagic;
std::string narInfoFileFor(const Path & storePath);
void addToCache(const ValidPathInfo & info, const string & nar);
protected:
NarInfo readNarInfo(const Path & storePath);
public:
bool isValidPath(const Path & path) override;
PathSet queryValidPaths(const PathSet & paths) override
{ notImpl(); }
PathSet queryAllValidPaths() override
{ notImpl(); }
ValidPathInfo queryPathInfo(const Path & path) override;
Hash queryPathHash(const Path & path) override
{ notImpl(); }
void queryReferrers(const Path & path,
PathSet & referrers) override
{ notImpl(); }
Path queryDeriver(const Path & path) override
{ return ""; }
PathSet queryValidDerivers(const Path & path) override
{ return {}; }
PathSet queryDerivationOutputs(const Path & path) override
{ notImpl(); }
StringSet queryDerivationOutputNames(const Path & path) override
{ notImpl(); }
Path queryPathFromHashPart(const string & hashPart) override
{ notImpl(); }
PathSet querySubstitutablePaths(const PathSet & paths) override
{ return {}; }
void querySubstitutablePathInfos(const PathSet & paths,
SubstitutablePathInfos & infos) override;
Path addToStore(const string & name, const Path & srcPath,
bool recursive = true, HashType hashAlgo = htSHA256,
PathFilter & filter = defaultPathFilter, bool repair = false) override;
Path addTextToStore(const string & name, const string & s,
const PathSet & references, bool repair = false) override;
void exportPath(const Path & path, bool sign,
Sink & sink) override;
Paths importPaths(bool requireSignature, Source & source) override;
Path importPath(Source & source);
void buildPaths(const PathSet & paths, BuildMode buildMode = bmNormal) override;
BuildResult buildDerivation(const Path & drvPath, const BasicDerivation & drv,
BuildMode buildMode = bmNormal) override
{ notImpl(); }
void ensurePath(const Path & path) override;
void addTempRoot(const Path & path) override
{ notImpl(); }
void addIndirectRoot(const Path & path) override
{ notImpl(); }
void syncWithGC() override
{ }
Roots findRoots() override
{ notImpl(); }
void collectGarbage(const GCOptions & options, GCResults & results) override
{ notImpl(); }
PathSet queryFailedPaths() override
{ return {}; }
void clearFailedPaths(const PathSet & paths) override
{ }
void optimiseStore() override
{ }
bool verifyStore(bool checkContents, bool repair) override
{ return true; }
};
}

View file

@ -1310,7 +1310,6 @@ void DerivationGoal::tryToBuild()
for (auto & i : drv->outputs) { for (auto & i : drv->outputs) {
Path path = i.second.path; Path path = i.second.path;
if (worker.store.isValidPath(path)) continue; if (worker.store.isValidPath(path)) continue;
if (!pathExists(path)) continue;
debug(format("removing invalid path %1%") % path); debug(format("removing invalid path %1%") % path);
deletePath(path); deletePath(path);
} }
@ -1390,8 +1389,7 @@ void replaceValidPath(const Path & storePath, const Path tmpPath)
rename(storePath.c_str(), oldPath.c_str()); rename(storePath.c_str(), oldPath.c_str());
if (rename(tmpPath.c_str(), storePath.c_str()) == -1) if (rename(tmpPath.c_str(), storePath.c_str()) == -1)
throw SysError(format("moving %1% to %2%") % tmpPath % storePath); throw SysError(format("moving %1% to %2%") % tmpPath % storePath);
if (pathExists(oldPath)) deletePath(oldPath);
deletePath(oldPath);
} }
@ -1490,7 +1488,7 @@ void DerivationGoal::buildDone()
/* Delete unused redirected outputs (when doing hash rewriting). */ /* Delete unused redirected outputs (when doing hash rewriting). */
for (auto & i : redirectedOutputs) for (auto & i : redirectedOutputs)
if (pathExists(i.second)) deletePath(i.second); deletePath(i.second);
/* Delete the chroot (if we were using one). */ /* Delete the chroot (if we were using one). */
autoDelChroot.reset(); /* this runs the destructor */ autoDelChroot.reset(); /* this runs the destructor */
@ -1939,7 +1937,7 @@ void DerivationGoal::startBuilder()
to ensure that we can create hard-links to non-directory to ensure that we can create hard-links to non-directory
inputs in the fake Nix store in the chroot (see below). */ inputs in the fake Nix store in the chroot (see below). */
chrootRootDir = drvPath + ".chroot"; chrootRootDir = drvPath + ".chroot";
if (pathExists(chrootRootDir)) deletePath(chrootRootDir); deletePath(chrootRootDir);
/* Clean up the chroot directory automatically. */ /* Clean up the chroot directory automatically. */
autoDelChroot = std::make_shared<AutoDelete>(chrootRootDir); autoDelChroot = std::make_shared<AutoDelete>(chrootRootDir);
@ -2514,7 +2512,7 @@ void DerivationGoal::runChild()
debug(sandboxProfile); debug(sandboxProfile);
Path sandboxFile = drvPath + ".sb"; Path sandboxFile = drvPath + ".sb";
if (pathExists(sandboxFile)) deletePath(sandboxFile); deletePath(sandboxFile);
autoDelSandbox.reset(sandboxFile, false); autoDelSandbox.reset(sandboxFile, false);
writeFile(sandboxFile, sandboxProfile); writeFile(sandboxFile, sandboxProfile);
@ -2706,8 +2704,7 @@ void DerivationGoal::registerOutputs()
return; return;
if (actualPath != dest) { if (actualPath != dest) {
PathLocks outputLocks({dest}); PathLocks outputLocks({dest});
if (pathExists(dest)) deletePath(dest);
deletePath(dest);
if (rename(actualPath.c_str(), dest.c_str()) == -1) if (rename(actualPath.c_str(), dest.c_str()) == -1)
throw SysError(format("moving %1% to %2%") % actualPath % dest); throw SysError(format("moving %1% to %2%") % actualPath % dest);
} }
@ -2735,10 +2732,10 @@ void DerivationGoal::registerOutputs()
if (buildMode == bmCheck) { if (buildMode == bmCheck) {
if (!worker.store.isValidPath(path)) continue; if (!worker.store.isValidPath(path)) continue;
ValidPathInfo info = worker.store.queryPathInfo(path); ValidPathInfo info = worker.store.queryPathInfo(path);
if (hash.first != info.hash) { if (hash.first != info.narHash) {
if (settings.keepFailed) { if (settings.keepFailed) {
Path dst = path + checkSuffix; Path dst = path + checkSuffix;
if (pathExists(dst)) deletePath(dst); deletePath(dst);
if (rename(actualPath.c_str(), dst.c_str())) if (rename(actualPath.c_str(), dst.c_str()))
throw SysError(format("renaming %1% to %2%") % actualPath % dst); throw SysError(format("renaming %1% to %2%") % actualPath % dst);
throw Error(format("derivation %1% may not be deterministic: output %2% differs from %3%") throw Error(format("derivation %1% may not be deterministic: output %2% differs from %3%")
@ -2799,7 +2796,7 @@ void DerivationGoal::registerOutputs()
ValidPathInfo info; ValidPathInfo info;
info.path = path; info.path = path;
info.hash = hash.first; info.narHash = hash.first;
info.narSize = hash.second; info.narSize = hash.second;
info.references = references; info.references = references;
info.deriver = drvPath; info.deriver = drvPath;
@ -2830,7 +2827,7 @@ void DerivationGoal::registerOutputs()
if (settings.keepFailed) { if (settings.keepFailed) {
for (auto & i : drv->outputs) { for (auto & i : drv->outputs) {
Path prev = i.second.path + checkSuffix; Path prev = i.second.path + checkSuffix;
if (pathExists(prev)) deletePath(prev); deletePath(prev);
if (curRound < nrRounds) { if (curRound < nrRounds) {
Path dst = i.second.path + checkSuffix; Path dst = i.second.path + checkSuffix;
if (rename(i.second.path.c_str(), dst.c_str())) if (rename(i.second.path.c_str(), dst.c_str()))
@ -2998,7 +2995,7 @@ Path DerivationGoal::addHashRewrite(const Path & path)
string h1 = string(path, settings.nixStore.size() + 1, 32); string h1 = string(path, settings.nixStore.size() + 1, 32);
string h2 = string(printHash32(hashString(htSHA256, "rewrite:" + drvPath + ":" + path)), 0, 32); string h2 = string(printHash32(hashString(htSHA256, "rewrite:" + drvPath + ":" + path)), 0, 32);
Path p = settings.nixStore + "/" + h2 + string(path, settings.nixStore.size() + 33); Path p = settings.nixStore + "/" + h2 + string(path, settings.nixStore.size() + 33);
if (pathExists(p)) deletePath(p); deletePath(p);
assert(path.size() == p.size()); assert(path.size() == p.size());
rewritesToTmp[h1] = h2; rewritesToTmp[h1] = h2;
rewritesFromTmp[h2] = h1; rewritesFromTmp[h2] = h1;
@ -3259,8 +3256,7 @@ void SubstitutionGoal::tryToRun()
destPath = repair ? storePath + ".tmp" : storePath; destPath = repair ? storePath + ".tmp" : storePath;
/* Remove the (stale) output path if it exists. */ /* Remove the (stale) output path if it exists. */
if (pathExists(destPath)) deletePath(destPath);
deletePath(destPath);
worker.store.setSubstituterEnv(); worker.store.setSubstituterEnv();
@ -3369,7 +3365,7 @@ void SubstitutionGoal::finished()
ValidPathInfo info2; ValidPathInfo info2;
info2.path = storePath; info2.path = storePath;
info2.hash = hash.first; info2.narHash = hash.first;
info2.narSize = hash.second; info2.narSize = hash.second;
info2.references = info.references; info2.references = info.references;
info2.deriver = info.deriver; info2.deriver = info.deriver;

88
src/libstore/crypto.cc Normal file
View file

@ -0,0 +1,88 @@
#include "crypto.hh"
#include "util.hh"
#if HAVE_SODIUM
#include <sodium.h>
#endif
namespace nix {
static std::pair<std::string, std::string> split(const string & s)
{
size_t colon = s.find(':');
if (colon == std::string::npos || colon == 0)
return {"", ""};
return {std::string(s, 0, colon), std::string(s, colon + 1)};
}
Key::Key(const string & s)
{
auto ss = split(s);
name = ss.first;
key = ss.second;
if (name == "" || key == "")
throw Error("secret key is corrupt");
key = base64Decode(key);
}
SecretKey::SecretKey(const string & s)
: Key(s)
{
#if HAVE_SODIUM
if (key.size() != crypto_sign_SECRETKEYBYTES)
throw Error("secret key is not valid");
#endif
}
[[noreturn]] static void noSodium()
{
throw Error("Nix was not compiled with libsodium, required for signed binary cache support");
}
std::string SecretKey::signDetached(const std::string & data) const
{
#if HAVE_SODIUM
unsigned char sig[crypto_sign_BYTES];
unsigned long long sigLen;
crypto_sign_detached(sig, &sigLen, (unsigned char *) data.data(), data.size(),
(unsigned char *) key.data());
return name + ":" + base64Encode(std::string((char *) sig, sigLen));
#else
noSodium();
#endif
}
PublicKey::PublicKey(const string & s)
: Key(s)
{
#if HAVE_SODIUM
if (key.size() != crypto_sign_PUBLICKEYBYTES)
throw Error("public key is not valid");
#endif
}
bool verifyDetached(const std::string & data, const std::string & sig,
const PublicKeys & publicKeys)
{
#if HAVE_SODIUM
auto ss = split(sig);
auto key = publicKeys.find(ss.first);
if (key == publicKeys.end()) return false;
auto sig2 = base64Decode(ss.second);
if (sig2.size() != crypto_sign_BYTES)
throw Error("signature is not valid");
return crypto_sign_verify_detached((unsigned char *) sig2.data(),
(unsigned char *) data.data(), data.size(),
(unsigned char *) key->second.key.data()) == 0;
#else
noSodium();
#endif
}
}

40
src/libstore/crypto.hh Normal file
View file

@ -0,0 +1,40 @@
#pragma once
#include "types.hh"
#include <map>
namespace nix {
struct Key
{
std::string name;
std::string key;
/* Construct Key from a string in the format
<name>:<key-in-base64>. */
Key(const std::string & s);
};
struct SecretKey : Key
{
SecretKey(const std::string & s);
/* Return a detached signature of the given string. */
std::string signDetached(const std::string & s) const;
};
struct PublicKey : Key
{
PublicKey(const std::string & data);
};
typedef std::map<std::string, PublicKey> PublicKeys;
/* Return true iff sig is a correct signature over data using one
of the given public keys. */
bool verifyDetached(const std::string & data, const std::string & sig,
const PublicKeys & publicKeys);
}

View file

@ -114,6 +114,8 @@ struct Curl
curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progressCallback_); curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progressCallback_);
curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, (void *) &curl); curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, (void *) &curl);
curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0); curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0);
curl_easy_setopt(curl, CURLOPT_NOSIGNAL, 1);
} }
~Curl() ~Curl()

View file

@ -83,7 +83,7 @@ void LocalStore::addIndirectRoot(const Path & path)
} }
Path addPermRoot(ref<Store> store, const Path & _storePath, Path Store::addPermRoot(const Path & _storePath,
const Path & _gcRoot, bool indirect, bool allowOutsideRootsDir) const Path & _gcRoot, bool indirect, bool allowOutsideRootsDir)
{ {
Path storePath(canonPath(_storePath)); Path storePath(canonPath(_storePath));
@ -101,7 +101,7 @@ Path addPermRoot(ref<Store> store, const Path & _storePath,
if (pathExists(gcRoot) && (!isLink(gcRoot) || !isInStore(readLink(gcRoot)))) if (pathExists(gcRoot) && (!isLink(gcRoot) || !isInStore(readLink(gcRoot))))
throw Error(format("cannot create symlink %1%; already exists") % gcRoot); throw Error(format("cannot create symlink %1%; already exists") % gcRoot);
makeSymlink(gcRoot, storePath); makeSymlink(gcRoot, storePath);
store->addIndirectRoot(gcRoot); addIndirectRoot(gcRoot);
} }
else { else {
@ -127,7 +127,7 @@ Path addPermRoot(ref<Store> store, const Path & _storePath,
check if the root is in a directory in or linked from the check if the root is in a directory in or linked from the
gcroots directory. */ gcroots directory. */
if (settings.checkRootReachability) { if (settings.checkRootReachability) {
Roots roots = store->findRoots(); Roots roots = findRoots();
if (roots.find(gcRoot) == roots.end()) if (roots.find(gcRoot) == roots.end())
printMsg(lvlError, printMsg(lvlError,
format( format(
@ -139,7 +139,7 @@ Path addPermRoot(ref<Store> store, const Path & _storePath,
/* Grab the global GC root, causing us to block while a GC is in /* Grab the global GC root, causing us to block while a GC is in
progress. This prevents the set of permanent roots from progress. This prevents the set of permanent roots from
increasing while a GC is in progress. */ increasing while a GC is in progress. */
store->syncWithGC(); syncWithGC();
return gcRoot; return gcRoot;
} }
@ -608,6 +608,9 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
state.shouldDelete = options.action == GCOptions::gcDeleteDead || options.action == GCOptions::gcDeleteSpecific; state.shouldDelete = options.action == GCOptions::gcDeleteDead || options.action == GCOptions::gcDeleteSpecific;
if (state.shouldDelete)
deletePath(reservedPath);
/* Acquire the global GC root. This prevents /* Acquire the global GC root. This prevents
a) New roots from being added. a) New roots from being added.
b) Processes from creating new temporary root files. */ b) Processes from creating new temporary root files. */

View file

@ -0,0 +1,44 @@
#include "local-binary-cache-store.hh"
namespace nix {
LocalBinaryCacheStore::LocalBinaryCacheStore(std::shared_ptr<Store> localStore,
const Path & secretKeyFile, const Path & publicKeyFile,
const Path & binaryCacheDir)
: BinaryCacheStore(localStore, secretKeyFile, publicKeyFile)
, binaryCacheDir(binaryCacheDir)
{
}
void LocalBinaryCacheStore::init()
{
createDirs(binaryCacheDir + "/nar");
BinaryCacheStore::init();
}
static void atomicWrite(const Path & path, const std::string & s)
{
Path tmp = path + ".tmp." + std::to_string(getpid());
AutoDelete del(tmp, false);
writeFile(tmp, s);
if (rename(tmp.c_str(), path.c_str()))
throw SysError(format("renaming %1% to %2%") % tmp % path);
del.cancel();
}
bool LocalBinaryCacheStore::fileExists(const std::string & path)
{
return pathExists(binaryCacheDir + "/" + path);
}
void LocalBinaryCacheStore::upsertFile(const std::string & path, const std::string & data)
{
atomicWrite(binaryCacheDir + "/" + path, data);
}
std::string LocalBinaryCacheStore::getFile(const std::string & path)
{
return readFile(binaryCacheDir + "/" + path);
}
}

View file

@ -0,0 +1,31 @@
#pragma once
#include "binary-cache-store.hh"
namespace nix {
class LocalBinaryCacheStore : public BinaryCacheStore
{
private:
Path binaryCacheDir;
public:
LocalBinaryCacheStore(std::shared_ptr<Store> localStore,
const Path & secretKeyFile, const Path & publicKeyFile,
const Path & binaryCacheDir);
void init() override;
protected:
bool fileExists(const std::string & path) override;
void upsertFile(const std::string & path, const std::string & data) override;
std::string getFile(const std::string & path) override;
};
}

View file

@ -40,10 +40,7 @@ MakeError(SQLiteError, Error);
MakeError(SQLiteBusy, SQLiteError); MakeError(SQLiteBusy, SQLiteError);
static void throwSQLiteError(sqlite3 * db, const format & f) [[noreturn]] static void throwSQLiteError(sqlite3 * db, const format & f)
__attribute__ ((noreturn));
static void throwSQLiteError(sqlite3 * db, const format & f)
{ {
int err = sqlite3_errcode(db); int err = sqlite3_errcode(db);
if (err == SQLITE_BUSY || err == SQLITE_PROTOCOL) { if (err == SQLITE_BUSY || err == SQLITE_PROTOCOL) {
@ -219,8 +216,9 @@ void checkStoreNotSymlink()
} }
LocalStore::LocalStore(bool reserveSpace) LocalStore::LocalStore()
: didSetSubstituterEnv(false) : reservedPath(settings.nixDBPath + "/reserved")
, didSetSubstituterEnv(false)
{ {
schemaPath = settings.nixDBPath + "/schema"; schemaPath = settings.nixDBPath + "/schema";
@ -279,25 +277,20 @@ LocalStore::LocalStore(bool reserveSpace)
needed, we reserve some dummy space that we can free just needed, we reserve some dummy space that we can free just
before doing a garbage collection. */ before doing a garbage collection. */
try { try {
Path reservedPath = settings.nixDBPath + "/reserved"; struct stat st;
if (reserveSpace) { if (stat(reservedPath.c_str(), &st) == -1 ||
struct stat st; st.st_size != settings.reservedSize)
if (stat(reservedPath.c_str(), &st) == -1 || {
st.st_size != settings.reservedSize) AutoCloseFD fd = open(reservedPath.c_str(), O_WRONLY | O_CREAT, 0600);
{ int res = -1;
AutoCloseFD fd = open(reservedPath.c_str(), O_WRONLY | O_CREAT, 0600);
int res = -1;
#if HAVE_POSIX_FALLOCATE #if HAVE_POSIX_FALLOCATE
res = posix_fallocate(fd, 0, settings.reservedSize); res = posix_fallocate(fd, 0, settings.reservedSize);
#endif #endif
if (res == -1) { if (res == -1) {
writeFull(fd, string(settings.reservedSize, 'X')); writeFull(fd, string(settings.reservedSize, 'X'));
ftruncate(fd, settings.reservedSize); ftruncate(fd, settings.reservedSize);
}
} }
} }
else
deletePath(reservedPath);
} catch (SysError & e) { /* don't care about errors */ } catch (SysError & e) { /* don't care about errors */
} }
@ -694,7 +687,7 @@ unsigned long long LocalStore::addValidPath(const ValidPathInfo & info, bool che
{ {
SQLiteStmtUse use(stmtRegisterValidPath); SQLiteStmtUse use(stmtRegisterValidPath);
stmtRegisterValidPath.bind(info.path); stmtRegisterValidPath.bind(info.path);
stmtRegisterValidPath.bind("sha256:" + printHash(info.hash)); stmtRegisterValidPath.bind("sha256:" + printHash(info.narHash));
stmtRegisterValidPath.bind(info.registrationTime == 0 ? time(0) : info.registrationTime); stmtRegisterValidPath.bind(info.registrationTime == 0 ? time(0) : info.registrationTime);
if (info.deriver != "") if (info.deriver != "")
stmtRegisterValidPath.bind(info.deriver); stmtRegisterValidPath.bind(info.deriver);
@ -845,7 +838,7 @@ ValidPathInfo LocalStore::queryPathInfo(const Path & path)
const char * s = (const char *) sqlite3_column_text(stmtQueryPathInfo, 1); const char * s = (const char *) sqlite3_column_text(stmtQueryPathInfo, 1);
assert(s); assert(s);
info.hash = parseHashField(path, s); info.narHash = parseHashField(path, s);
info.registrationTime = sqlite3_column_int(stmtQueryPathInfo, 2); info.registrationTime = sqlite3_column_int(stmtQueryPathInfo, 2);
@ -883,7 +876,7 @@ void LocalStore::updatePathInfo(const ValidPathInfo & info)
stmtUpdatePathInfo.bind64(info.narSize); stmtUpdatePathInfo.bind64(info.narSize);
else else
stmtUpdatePathInfo.bind(); // null stmtUpdatePathInfo.bind(); // null
stmtUpdatePathInfo.bind("sha256:" + printHash(info.hash)); stmtUpdatePathInfo.bind("sha256:" + printHash(info.narHash));
stmtUpdatePathInfo.bind(info.path); stmtUpdatePathInfo.bind(info.path);
if (sqlite3_step(stmtUpdatePathInfo) != SQLITE_DONE) if (sqlite3_step(stmtUpdatePathInfo) != SQLITE_DONE)
throwSQLiteError(db, format("updating info of path %1% in database") % info.path); throwSQLiteError(db, format("updating info of path %1% in database") % info.path);
@ -953,14 +946,6 @@ PathSet LocalStore::queryAllValidPaths()
} }
void LocalStore::queryReferences(const Path & path,
PathSet & references)
{
ValidPathInfo info = queryPathInfo(path);
references.insert(info.references.begin(), info.references.end());
}
void LocalStore::queryReferrers_(const Path & path, PathSet & referrers) void LocalStore::queryReferrers_(const Path & path, PathSet & referrers)
{ {
SQLiteStmtUse use(stmtQueryReferrers); SQLiteStmtUse use(stmtQueryReferrers);
@ -1064,7 +1049,7 @@ StringSet LocalStore::queryDerivationOutputNames(const Path & path)
Path LocalStore::queryPathFromHashPart(const string & hashPart) Path LocalStore::queryPathFromHashPart(const string & hashPart)
{ {
if (hashPart.size() != 32) throw Error("invalid hash part"); if (hashPart.size() != storePathHashLen) throw Error("invalid hash part");
Path prefix = settings.nixStore + "/" + hashPart; Path prefix = settings.nixStore + "/" + hashPart;
@ -1282,7 +1267,7 @@ void LocalStore::querySubstitutablePathInfos(const PathSet & paths,
Hash LocalStore::queryPathHash(const Path & path) Hash LocalStore::queryPathHash(const Path & path)
{ {
return queryPathInfo(path).hash; return queryPathInfo(path).narHash;
} }
@ -1306,7 +1291,7 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos)
PathSet paths; PathSet paths;
for (auto & i : infos) { for (auto & i : infos) {
assert(i.hash.type == htSHA256); assert(i.narHash.type == htSHA256);
if (isValidPath_(i.path)) if (isValidPath_(i.path))
updatePathInfo(i); updatePathInfo(i);
else else
@ -1380,7 +1365,7 @@ Path LocalStore::addToStoreFromDump(const string & dump, const string & name,
if (repair || !isValidPath(dstPath)) { if (repair || !isValidPath(dstPath)) {
if (pathExists(dstPath)) deletePath(dstPath); deletePath(dstPath);
if (recursive) { if (recursive) {
StringSource source(dump); StringSource source(dump);
@ -1405,7 +1390,7 @@ Path LocalStore::addToStoreFromDump(const string & dump, const string & name,
ValidPathInfo info; ValidPathInfo info;
info.path = dstPath; info.path = dstPath;
info.hash = hash.first; info.narHash = hash.first;
info.narSize = hash.second; info.narSize = hash.second;
registerValidPath(info); registerValidPath(info);
} }
@ -1449,20 +1434,22 @@ Path LocalStore::addTextToStore(const string & name, const string & s,
if (repair || !isValidPath(dstPath)) { if (repair || !isValidPath(dstPath)) {
if (pathExists(dstPath)) deletePath(dstPath); deletePath(dstPath);
writeFile(dstPath, s); writeFile(dstPath, s);
canonicalisePathMetaData(dstPath, -1); canonicalisePathMetaData(dstPath, -1);
HashResult hash = hashPath(htSHA256, dstPath); StringSink sink;
dumpString(s, sink);
auto hash = hashString(htSHA256, sink.s);
optimisePath(dstPath); optimisePath(dstPath);
ValidPathInfo info; ValidPathInfo info;
info.path = dstPath; info.path = dstPath;
info.hash = hash.first; info.narHash = hash;
info.narSize = hash.second; info.narSize = sink.s.size();
info.references = references; info.references = references;
registerValidPath(info); registerValidPath(info);
} }
@ -1493,9 +1480,6 @@ struct HashAndWriteSink : Sink
}; };
#define EXPORT_MAGIC 0x4558494e
static void checkSecrecy(const Path & path) static void checkSecrecy(const Path & path)
{ {
struct stat st; struct stat st;
@ -1532,7 +1516,7 @@ void LocalStore::exportPath(const Path & path, bool sign,
PathSet references; PathSet references;
queryReferences(path, references); queryReferences(path, references);
hashAndWriteSink << EXPORT_MAGIC << path << references << queryDeriver(path); hashAndWriteSink << exportMagic << path << references << queryDeriver(path);
if (sign) { if (sign) {
Hash hash = hashAndWriteSink.currentHash(); Hash hash = hashAndWriteSink.currentHash();
@ -1608,8 +1592,8 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
restorePath(unpacked, hashAndReadSource); restorePath(unpacked, hashAndReadSource);
unsigned int magic = readInt(hashAndReadSource); uint32_t magic = readInt(hashAndReadSource);
if (magic != EXPORT_MAGIC) if (magic != exportMagic)
throw Error("Nix archive cannot be imported; wrong format"); throw Error("Nix archive cannot be imported; wrong format");
Path dstPath = readStorePath(hashAndReadSource); Path dstPath = readStorePath(hashAndReadSource);
@ -1675,7 +1659,7 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
if (!isValidPath(dstPath)) { if (!isValidPath(dstPath)) {
if (pathExists(dstPath)) deletePath(dstPath); deletePath(dstPath);
if (rename(unpacked.c_str(), dstPath.c_str()) == -1) if (rename(unpacked.c_str(), dstPath.c_str()) == -1)
throw SysError(format("cannot move %1% to %2%") throw SysError(format("cannot move %1% to %2%")
@ -1691,7 +1675,7 @@ Path LocalStore::importPath(bool requireSignature, Source & source)
ValidPathInfo info; ValidPathInfo info;
info.path = dstPath; info.path = dstPath;
info.hash = hash.first; info.narHash = hash.first;
info.narSize = hash.second; info.narSize = hash.second;
info.references = references; info.references = references;
info.deriver = deriver != "" && isValidPath(deriver) ? deriver : ""; info.deriver = deriver != "" && isValidPath(deriver) ? deriver : "";
@ -1775,21 +1759,21 @@ bool LocalStore::verifyStore(bool checkContents, bool repair)
/* Check the content hash (optionally - slow). */ /* Check the content hash (optionally - slow). */
printMsg(lvlTalkative, format("checking contents of %1%") % i); printMsg(lvlTalkative, format("checking contents of %1%") % i);
HashResult current = hashPath(info.hash.type, i); HashResult current = hashPath(info.narHash.type, i);
if (info.hash != nullHash && info.hash != current.first) { if (info.narHash != nullHash && info.narHash != current.first) {
printMsg(lvlError, format("path %1% was modified! " printMsg(lvlError, format("path %1% was modified! "
"expected hash %2%, got %3%") "expected hash %2%, got %3%")
% i % printHash(info.hash) % printHash(current.first)); % i % printHash(info.narHash) % printHash(current.first));
if (repair) repairPath(i); else errors = true; if (repair) repairPath(i); else errors = true;
} else { } else {
bool update = false; bool update = false;
/* Fill in missing hashes. */ /* Fill in missing hashes. */
if (info.hash == nullHash) { if (info.narHash == nullHash) {
printMsg(lvlError, format("fixing missing hash on %1%") % i); printMsg(lvlError, format("fixing missing hash on %1%") % i);
info.hash = current.first; info.narHash = current.first;
update = true; update = true;
} }
@ -1878,9 +1862,9 @@ bool LocalStore::pathContentsGood(const Path & path)
if (!pathExists(path)) if (!pathExists(path))
res = false; res = false;
else { else {
HashResult current = hashPath(info.hash.type, path); HashResult current = hashPath(info.narHash.type, path);
Hash nullHash(htSHA256); Hash nullHash(htSHA256);
res = info.hash == nullHash || info.hash == current.first; res = info.narHash == nullHash || info.narHash == current.first;
} }
pathContentsGoodCache[path] = res; pathContentsGoodCache[path] = res;
if (!res) printMsg(lvlError, format("path %1% is corrupted or missing!") % path); if (!res) printMsg(lvlError, format("path %1% is corrupted or missing!") % path);
@ -1932,7 +1916,7 @@ ValidPathInfo LocalStore::queryPathInfoOld(const Path & path)
} else if (name == "Deriver") { } else if (name == "Deriver") {
res.deriver = value; res.deriver = value;
} else if (name == "Hash") { } else if (name == "Hash") {
res.hash = parseHashField(path, value); res.narHash = parseHashField(path, value);
} else if (name == "Registered-At") { } else if (name == "Registered-At") {
int n = 0; int n = 0;
string2Int(value, n); string2Int(value, n);

View file

@ -88,11 +88,13 @@ private:
Path linksDir; Path linksDir;
Path reservedPath;
public: public:
/* Initialise the local store, upgrading the schema if /* Initialise the local store, upgrading the schema if
necessary. */ necessary. */
LocalStore(bool reserveSpace = true); LocalStore();
~LocalStore(); ~LocalStore();
@ -108,8 +110,6 @@ public:
Hash queryPathHash(const Path & path) override; Hash queryPathHash(const Path & path) override;
void queryReferences(const Path & path, PathSet & references) override;
void queryReferrers(const Path & path, PathSet & referrers) override; void queryReferrers(const Path & path, PathSet & referrers) override;
Path queryDeriver(const Path & path) override; Path queryDeriver(const Path & path) override;

View file

@ -8,7 +8,7 @@ libstore_SOURCES := $(wildcard $(d)/*.cc)
libstore_LIBS = libutil libformat libstore_LIBS = libutil libformat
libstore_LDFLAGS = $(SQLITE3_LIBS) -lbz2 $(LIBCURL_LIBS) libstore_LDFLAGS = $(SQLITE3_LIBS) -lbz2 $(LIBCURL_LIBS) $(SODIUM_LIBS)
ifeq ($(OS), SunOS) ifeq ($(OS), SunOS)
libstore_LDFLAGS += -lsocket libstore_LDFLAGS += -lsocket

134
src/libstore/nar-info.cc Normal file
View file

@ -0,0 +1,134 @@
#include "crypto.hh"
#include "globals.hh"
#include "nar-info.hh"
namespace nix {
NarInfo::NarInfo(const std::string & s, const std::string & whence)
{
auto corrupt = [&]() {
throw Error("NAR info file %1% is corrupt");
};
auto parseHashField = [&](const string & s) {
string::size_type colon = s.find(':');
if (colon == string::npos) corrupt();
HashType ht = parseHashType(string(s, 0, colon));
if (ht == htUnknown) corrupt();
return parseHash16or32(ht, string(s, colon + 1));
};
size_t pos = 0;
while (pos < s.size()) {
size_t colon = s.find(':', pos);
if (colon == std::string::npos) corrupt();
std::string name(s, pos, colon - pos);
size_t eol = s.find('\n', colon + 2);
if (eol == std::string::npos) corrupt();
std::string value(s, colon + 2, eol - colon - 2);
if (name == "StorePath") {
if (!isStorePath(value)) corrupt();
path = value;
}
else if (name == "URL")
url = value;
else if (name == "Compression")
compression = value;
else if (name == "FileHash")
fileHash = parseHashField(value);
else if (name == "FileSize") {
if (!string2Int(value, fileSize)) corrupt();
}
else if (name == "NarHash")
narHash = parseHashField(value);
else if (name == "NarSize") {
if (!string2Int(value, narSize)) corrupt();
}
else if (name == "References") {
auto refs = tokenizeString<Strings>(value, " ");
if (!references.empty()) corrupt();
for (auto & r : refs) {
auto r2 = settings.nixStore + "/" + r;
if (!isStorePath(r2)) corrupt();
references.insert(r2);
}
}
else if (name == "Deriver") {
auto p = settings.nixStore + "/" + value;
if (!isStorePath(p)) corrupt();
deriver = p;
}
else if (name == "System")
system = value;
else if (name == "Sig")
sig = value;
pos = eol + 1;
}
if (compression == "") compression = "bzip2";
if (path.empty() || url.empty()) corrupt();
}
std::string NarInfo::to_string() const
{
std::string res;
res += "StorePath: " + path + "\n";
res += "URL: " + url + "\n";
assert(compression != "");
res += "Compression: " + compression + "\n";
assert(fileHash.type == htSHA256);
res += "FileHash: sha256:" + printHash32(fileHash) + "\n";
res += "FileSize: " + std::to_string(fileSize) + "\n";
assert(narHash.type == htSHA256);
res += "NarHash: sha256:" + printHash32(narHash) + "\n";
res += "NarSize: " + std::to_string(narSize) + "\n";
res += "References: " + concatStringsSep(" ", shortRefs()) + "\n";
if (!deriver.empty())
res += "Deriver: " + baseNameOf(deriver) + "\n";
if (!system.empty())
res += "System: " + system + "\n";
if (!sig.empty())
res += "Sig: " + sig + "\n";
return res;
}
std::string NarInfo::fingerprint() const
{
return
"1;" + path + ";"
+ printHashType(narHash.type) + ":" + printHash32(narHash) + ";"
+ std::to_string(narSize) + ";"
+ concatStringsSep(",", references);
}
Strings NarInfo::shortRefs() const
{
Strings refs;
for (auto & r : references)
refs.push_back(baseNameOf(r));
return refs;
}
void NarInfo::sign(const SecretKey & secretKey)
{
sig = secretKey.signDetached(fingerprint());
}
bool NarInfo::checkSignature(const PublicKeys & publicKeys) const
{
return sig != "" && verifyDetached(fingerprint(), sig, publicKeys);
}
}

43
src/libstore/nar-info.hh Normal file
View file

@ -0,0 +1,43 @@
#pragma once
#include "types.hh"
#include "hash.hh"
#include "store-api.hh"
namespace nix {
struct NarInfo : ValidPathInfo
{
std::string url;
std::string compression;
Hash fileHash;
uint64_t fileSize = 0;
std::string system;
std::string sig; // FIXME: support multiple signatures
NarInfo() { }
NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { }
NarInfo(const std::string & s, const std::string & whence);
std::string to_string() const;
/* Return a fingerprint of the store path to be used in binary
cache signatures. It contains the store path, the base-32
SHA-256 hash of the NAR serialisation of the path, the size of
the NAR, and the sorted references. The size field is strictly
speaking superfluous, but might prevent endless/excessive data
attacks. */
std::string fingerprint() const;
void sign(const SecretKey & secretKey);
/* Return true iff this .narinfo is signed by one of the specified
keys. */
bool checkSignature(const PublicKeys & publicKeys) const;
private:
Strings shortRefs() const;
};
}

View file

@ -108,7 +108,7 @@ Path createGeneration(ref<Store> store, Path profile, Path outPath)
user environment etc. we've just built. */ user environment etc. we've just built. */
Path generation; Path generation;
makeName(profile, num + 1, generation); makeName(profile, num + 1, generation);
addPermRoot(store, outPath, generation, false, true); store->addPermRoot(outPath, generation, false, true);
return generation; return generation;
} }

View file

@ -6,6 +6,7 @@
#include "affinity.hh" #include "affinity.hh"
#include "globals.hh" #include "globals.hh"
#include "derivations.hh" #include "derivations.hh"
#include "pool.hh"
#include <sys/types.h> #include <sys/types.h>
#include <sys/stat.h> #include <sys/stat.h>
@ -13,9 +14,8 @@
#include <sys/un.h> #include <sys/un.h>
#include <errno.h> #include <errno.h>
#include <fcntl.h> #include <fcntl.h>
#include <iostream>
#include <unistd.h> #include <unistd.h>
#include <cstring> #include <cstring>
namespace nix { namespace nix {
@ -39,62 +39,25 @@ template<class T> T readStorePaths(Source & from)
template PathSet readStorePaths(Source & from); template PathSet readStorePaths(Source & from);
RemoteStore::RemoteStore() RemoteStore::RemoteStore(size_t maxConnections)
: connections(make_ref<Pool<Connection>>(
maxConnections,
[this]() { return openConnection(); },
[](const ref<Connection> & r) { return r->to.good() && r->from.good(); }
))
{ {
initialised = false;
} }
void RemoteStore::openConnection(bool reserveSpace) ref<RemoteStore::Connection> RemoteStore::openConnection()
{ {
if (initialised) return; auto conn = make_ref<Connection>();
initialised = true;
/* Connect to a daemon that does the privileged work for us. */ /* Connect to a daemon that does the privileged work for us. */
connectToDaemon(); conn->fd = socket(PF_UNIX, SOCK_STREAM, 0);
if (conn->fd == -1)
from.fd = fdSocket;
to.fd = fdSocket;
/* Send the magic greeting, check for the reply. */
try {
to << WORKER_MAGIC_1;
to.flush();
unsigned int magic = readInt(from);
if (magic != WORKER_MAGIC_2) throw Error("protocol mismatch");
daemonVersion = readInt(from);
if (GET_PROTOCOL_MAJOR(daemonVersion) != GET_PROTOCOL_MAJOR(PROTOCOL_VERSION))
throw Error("Nix daemon protocol version not supported");
to << PROTOCOL_VERSION;
if (GET_PROTOCOL_MINOR(daemonVersion) >= 14) {
int cpu = settings.lockCPU ? lockToCurrentCPU() : -1;
if (cpu != -1)
to << 1 << cpu;
else
to << 0;
}
if (GET_PROTOCOL_MINOR(daemonVersion) >= 11)
to << reserveSpace;
processStderr();
}
catch (Error & e) {
throw Error(format("cannot start daemon worker: %1%") % e.msg());
}
setOptions();
}
void RemoteStore::connectToDaemon()
{
fdSocket = socket(PF_UNIX, SOCK_STREAM, 0);
if (fdSocket == -1)
throw SysError("cannot create Unix domain socket"); throw SysError("cannot create Unix domain socket");
closeOnExec(fdSocket); closeOnExec(conn->fd);
string socketPath = settings.nixDaemonSocketFile; string socketPath = settings.nixDaemonSocketFile;
@ -111,111 +74,135 @@ void RemoteStore::connectToDaemon()
addr.sun_family = AF_UNIX; addr.sun_family = AF_UNIX;
if (socketPathRel.size() >= sizeof(addr.sun_path)) if (socketPathRel.size() >= sizeof(addr.sun_path))
throw Error(format("socket path %1% is too long") % socketPathRel); throw Error(format("socket path %1% is too long") % socketPathRel);
using namespace std;
strcpy(addr.sun_path, socketPathRel.c_str()); strcpy(addr.sun_path, socketPathRel.c_str());
if (connect(fdSocket, (struct sockaddr *) &addr, sizeof(addr)) == -1) if (connect(conn->fd, (struct sockaddr *) &addr, sizeof(addr)) == -1)
throw SysError(format("cannot connect to daemon at %1%") % socketPath); throw SysError(format("cannot connect to daemon at %1%") % socketPath);
if (fchdir(fdPrevDir) == -1) if (fchdir(fdPrevDir) == -1)
throw SysError("couldn't change back to previous directory"); throw SysError("couldn't change back to previous directory");
}
conn->from.fd = conn->fd;
conn->to.fd = conn->fd;
RemoteStore::~RemoteStore() /* Send the magic greeting, check for the reply. */
{
try { try {
to.flush(); conn->to << WORKER_MAGIC_1;
fdSocket.close(); conn->to.flush();
} catch (...) { unsigned int magic = readInt(conn->from);
ignoreException(); if (magic != WORKER_MAGIC_2) throw Error("protocol mismatch");
conn->daemonVersion = readInt(conn->from);
if (GET_PROTOCOL_MAJOR(conn->daemonVersion) != GET_PROTOCOL_MAJOR(PROTOCOL_VERSION))
throw Error("Nix daemon protocol version not supported");
conn->to << PROTOCOL_VERSION;
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 14) {
int cpu = settings.lockCPU ? lockToCurrentCPU() : -1;
if (cpu != -1)
conn->to << 1 << cpu;
else
conn->to << 0;
}
if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 11)
conn->to << false;
conn->processStderr();
} }
catch (Error & e) {
throw Error(format("cannot start daemon worker: %1%") % e.msg());
}
setOptions(conn);
return conn;
} }
void RemoteStore::setOptions() void RemoteStore::setOptions(ref<Connection> conn)
{ {
to << wopSetOptions conn->to << wopSetOptions
<< settings.keepFailed << settings.keepFailed
<< settings.keepGoing << settings.keepGoing
<< settings.tryFallback << settings.tryFallback
<< verbosity << verbosity
<< settings.maxBuildJobs << settings.maxBuildJobs
<< settings.maxSilentTime; << settings.maxSilentTime;
if (GET_PROTOCOL_MINOR(daemonVersion) >= 2) if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 2)
to << settings.useBuildHook; conn->to << settings.useBuildHook;
if (GET_PROTOCOL_MINOR(daemonVersion) >= 4) if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 4)
to << settings.buildVerbosity conn->to << settings.buildVerbosity
<< logType << logType
<< settings.printBuildTrace; << settings.printBuildTrace;
if (GET_PROTOCOL_MINOR(daemonVersion) >= 6) if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 6)
to << settings.buildCores; conn->to << settings.buildCores;
if (GET_PROTOCOL_MINOR(daemonVersion) >= 10) if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 10)
to << settings.useSubstitutes; conn->to << settings.useSubstitutes;
if (GET_PROTOCOL_MINOR(daemonVersion) >= 12) { if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 12) {
Settings::SettingsMap overrides = settings.getOverrides(); Settings::SettingsMap overrides = settings.getOverrides();
if (overrides["ssh-auth-sock"] == "") if (overrides["ssh-auth-sock"] == "")
overrides["ssh-auth-sock"] = getEnv("SSH_AUTH_SOCK"); overrides["ssh-auth-sock"] = getEnv("SSH_AUTH_SOCK");
to << overrides.size(); conn->to << overrides.size();
for (auto & i : overrides) for (auto & i : overrides)
to << i.first << i.second; conn->to << i.first << i.second;
} }
processStderr(); conn->processStderr();
} }
bool RemoteStore::isValidPath(const Path & path) bool RemoteStore::isValidPath(const Path & path)
{ {
openConnection(); auto conn(connections->get());
to << wopIsValidPath << path; conn->to << wopIsValidPath << path;
processStderr(); conn->processStderr();
unsigned int reply = readInt(from); unsigned int reply = readInt(conn->from);
return reply != 0; return reply != 0;
} }
PathSet RemoteStore::queryValidPaths(const PathSet & paths) PathSet RemoteStore::queryValidPaths(const PathSet & paths)
{ {
openConnection(); auto conn(connections->get());
if (GET_PROTOCOL_MINOR(daemonVersion) < 12) { if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) {
PathSet res; PathSet res;
for (auto & i : paths) for (auto & i : paths)
if (isValidPath(i)) res.insert(i); if (isValidPath(i)) res.insert(i);
return res; return res;
} else { } else {
to << wopQueryValidPaths << paths; conn->to << wopQueryValidPaths << paths;
processStderr(); conn->processStderr();
return readStorePaths<PathSet>(from); return readStorePaths<PathSet>(conn->from);
} }
} }
PathSet RemoteStore::queryAllValidPaths() PathSet RemoteStore::queryAllValidPaths()
{ {
openConnection(); auto conn(connections->get());
to << wopQueryAllValidPaths; conn->to << wopQueryAllValidPaths;
processStderr(); conn->processStderr();
return readStorePaths<PathSet>(from); return readStorePaths<PathSet>(conn->from);
} }
PathSet RemoteStore::querySubstitutablePaths(const PathSet & paths) PathSet RemoteStore::querySubstitutablePaths(const PathSet & paths)
{ {
openConnection(); auto conn(connections->get());
if (GET_PROTOCOL_MINOR(daemonVersion) < 12) { if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) {
PathSet res; PathSet res;
for (auto & i : paths) { for (auto & i : paths) {
to << wopHasSubstitutes << i; conn->to << wopHasSubstitutes << i;
processStderr(); conn->processStderr();
if (readInt(from)) res.insert(i); if (readInt(conn->from)) res.insert(i);
} }
return res; return res;
} else { } else {
to << wopQuerySubstitutablePaths << paths; conn->to << wopQuerySubstitutablePaths << paths;
processStderr(); conn->processStderr();
return readStorePaths<PathSet>(from); return readStorePaths<PathSet>(conn->from);
} }
} }
@ -225,39 +212,39 @@ void RemoteStore::querySubstitutablePathInfos(const PathSet & paths,
{ {
if (paths.empty()) return; if (paths.empty()) return;
openConnection(); auto conn(connections->get());
if (GET_PROTOCOL_MINOR(daemonVersion) < 3) return; if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 3) return;
if (GET_PROTOCOL_MINOR(daemonVersion) < 12) { if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) {
for (auto & i : paths) { for (auto & i : paths) {
SubstitutablePathInfo info; SubstitutablePathInfo info;
to << wopQuerySubstitutablePathInfo << i; conn->to << wopQuerySubstitutablePathInfo << i;
processStderr(); conn->processStderr();
unsigned int reply = readInt(from); unsigned int reply = readInt(conn->from);
if (reply == 0) continue; if (reply == 0) continue;
info.deriver = readString(from); info.deriver = readString(conn->from);
if (info.deriver != "") assertStorePath(info.deriver); if (info.deriver != "") assertStorePath(info.deriver);
info.references = readStorePaths<PathSet>(from); info.references = readStorePaths<PathSet>(conn->from);
info.downloadSize = readLongLong(from); info.downloadSize = readLongLong(conn->from);
info.narSize = GET_PROTOCOL_MINOR(daemonVersion) >= 7 ? readLongLong(from) : 0; info.narSize = GET_PROTOCOL_MINOR(conn->daemonVersion) >= 7 ? readLongLong(conn->from) : 0;
infos[i] = info; infos[i] = info;
} }
} else { } else {
to << wopQuerySubstitutablePathInfos << paths; conn->to << wopQuerySubstitutablePathInfos << paths;
processStderr(); conn->processStderr();
unsigned int count = readInt(from); unsigned int count = readInt(conn->from);
for (unsigned int n = 0; n < count; n++) { for (unsigned int n = 0; n < count; n++) {
Path path = readStorePath(from); Path path = readStorePath(conn->from);
SubstitutablePathInfo & info(infos[path]); SubstitutablePathInfo & info(infos[path]);
info.deriver = readString(from); info.deriver = readString(conn->from);
if (info.deriver != "") assertStorePath(info.deriver); if (info.deriver != "") assertStorePath(info.deriver);
info.references = readStorePaths<PathSet>(from); info.references = readStorePaths<PathSet>(conn->from);
info.downloadSize = readLongLong(from); info.downloadSize = readLongLong(conn->from);
info.narSize = readLongLong(from); info.narSize = readLongLong(conn->from);
} }
} }
@ -266,27 +253,27 @@ void RemoteStore::querySubstitutablePathInfos(const PathSet & paths,
ValidPathInfo RemoteStore::queryPathInfo(const Path & path) ValidPathInfo RemoteStore::queryPathInfo(const Path & path)
{ {
openConnection(); auto conn(connections->get());
to << wopQueryPathInfo << path; conn->to << wopQueryPathInfo << path;
processStderr(); conn->processStderr();
ValidPathInfo info; ValidPathInfo info;
info.path = path; info.path = path;
info.deriver = readString(from); info.deriver = readString(conn->from);
if (info.deriver != "") assertStorePath(info.deriver); if (info.deriver != "") assertStorePath(info.deriver);
info.hash = parseHash(htSHA256, readString(from)); info.narHash = parseHash(htSHA256, readString(conn->from));
info.references = readStorePaths<PathSet>(from); info.references = readStorePaths<PathSet>(conn->from);
info.registrationTime = readInt(from); info.registrationTime = readInt(conn->from);
info.narSize = readLongLong(from); info.narSize = readLongLong(conn->from);
return info; return info;
} }
Hash RemoteStore::queryPathHash(const Path & path) Hash RemoteStore::queryPathHash(const Path & path)
{ {
openConnection(); auto conn(connections->get());
to << wopQueryPathHash << path; conn->to << wopQueryPathHash << path;
processStderr(); conn->processStderr();
string hash = readString(from); string hash = readString(conn->from);
return parseHash(htSHA256, hash); return parseHash(htSHA256, hash);
} }
@ -294,10 +281,10 @@ Hash RemoteStore::queryPathHash(const Path & path)
void RemoteStore::queryReferences(const Path & path, void RemoteStore::queryReferences(const Path & path,
PathSet & references) PathSet & references)
{ {
openConnection(); auto conn(connections->get());
to << wopQueryReferences << path; conn->to << wopQueryReferences << path;
processStderr(); conn->processStderr();
PathSet references2 = readStorePaths<PathSet>(from); PathSet references2 = readStorePaths<PathSet>(conn->from);
references.insert(references2.begin(), references2.end()); references.insert(references2.begin(), references2.end());
} }
@ -305,20 +292,20 @@ void RemoteStore::queryReferences(const Path & path,
void RemoteStore::queryReferrers(const Path & path, void RemoteStore::queryReferrers(const Path & path,
PathSet & referrers) PathSet & referrers)
{ {
openConnection(); auto conn(connections->get());
to << wopQueryReferrers << path; conn->to << wopQueryReferrers << path;
processStderr(); conn->processStderr();
PathSet referrers2 = readStorePaths<PathSet>(from); PathSet referrers2 = readStorePaths<PathSet>(conn->from);
referrers.insert(referrers2.begin(), referrers2.end()); referrers.insert(referrers2.begin(), referrers2.end());
} }
Path RemoteStore::queryDeriver(const Path & path) Path RemoteStore::queryDeriver(const Path & path)
{ {
openConnection(); auto conn(connections->get());
to << wopQueryDeriver << path; conn->to << wopQueryDeriver << path;
processStderr(); conn->processStderr();
Path drvPath = readString(from); Path drvPath = readString(conn->from);
if (drvPath != "") assertStorePath(drvPath); if (drvPath != "") assertStorePath(drvPath);
return drvPath; return drvPath;
} }
@ -326,37 +313,37 @@ Path RemoteStore::queryDeriver(const Path & path)
PathSet RemoteStore::queryValidDerivers(const Path & path) PathSet RemoteStore::queryValidDerivers(const Path & path)
{ {
openConnection(); auto conn(connections->get());
to << wopQueryValidDerivers << path; conn->to << wopQueryValidDerivers << path;
processStderr(); conn->processStderr();
return readStorePaths<PathSet>(from); return readStorePaths<PathSet>(conn->from);
} }
PathSet RemoteStore::queryDerivationOutputs(const Path & path) PathSet RemoteStore::queryDerivationOutputs(const Path & path)
{ {
openConnection(); auto conn(connections->get());
to << wopQueryDerivationOutputs << path; conn->to << wopQueryDerivationOutputs << path;
processStderr(); conn->processStderr();
return readStorePaths<PathSet>(from); return readStorePaths<PathSet>(conn->from);
} }
PathSet RemoteStore::queryDerivationOutputNames(const Path & path) PathSet RemoteStore::queryDerivationOutputNames(const Path & path)
{ {
openConnection(); auto conn(connections->get());
to << wopQueryDerivationOutputNames << path; conn->to << wopQueryDerivationOutputNames << path;
processStderr(); conn->processStderr();
return readStrings<PathSet>(from); return readStrings<PathSet>(conn->from);
} }
Path RemoteStore::queryPathFromHashPart(const string & hashPart) Path RemoteStore::queryPathFromHashPart(const string & hashPart)
{ {
openConnection(); auto conn(connections->get());
to << wopQueryPathFromHashPart << hashPart; conn->to << wopQueryPathFromHashPart << hashPart;
processStderr(); conn->processStderr();
Path path = readString(from); Path path = readString(conn->from);
if (!path.empty()) assertStorePath(path); if (!path.empty()) assertStorePath(path);
return path; return path;
} }
@ -367,32 +354,32 @@ Path RemoteStore::addToStore(const string & name, const Path & _srcPath,
{ {
if (repair) throw Error("repairing is not supported when building through the Nix daemon"); if (repair) throw Error("repairing is not supported when building through the Nix daemon");
openConnection(); auto conn(connections->get());
Path srcPath(absPath(_srcPath)); Path srcPath(absPath(_srcPath));
to << wopAddToStore << name conn->to << wopAddToStore << name
<< ((hashAlgo == htSHA256 && recursive) ? 0 : 1) /* backwards compatibility hack */ << ((hashAlgo == htSHA256 && recursive) ? 0 : 1) /* backwards compatibility hack */
<< (recursive ? 1 : 0) << (recursive ? 1 : 0)
<< printHashType(hashAlgo); << printHashType(hashAlgo);
try { try {
to.written = 0; conn->to.written = 0;
to.warn = true; conn->to.warn = true;
dumpPath(srcPath, to, filter); dumpPath(srcPath, conn->to, filter);
to.warn = false; conn->to.warn = false;
processStderr(); conn->processStderr();
} catch (SysError & e) { } catch (SysError & e) {
/* Daemon closed while we were sending the path. Probably OOM /* Daemon closed while we were sending the path. Probably OOM
or I/O error. */ or I/O error. */
if (e.errNo == EPIPE) if (e.errNo == EPIPE)
try { try {
processStderr(); conn->processStderr();
} catch (EndOfFile & e) { } } catch (EndOfFile & e) { }
throw; throw;
} }
return readStorePath(from); return readStorePath(conn->from);
} }
@ -401,43 +388,43 @@ Path RemoteStore::addTextToStore(const string & name, const string & s,
{ {
if (repair) throw Error("repairing is not supported when building through the Nix daemon"); if (repair) throw Error("repairing is not supported when building through the Nix daemon");
openConnection(); auto conn(connections->get());
to << wopAddTextToStore << name << s << references; conn->to << wopAddTextToStore << name << s << references;
processStderr(); conn->processStderr();
return readStorePath(from); return readStorePath(conn->from);
} }
void RemoteStore::exportPath(const Path & path, bool sign, void RemoteStore::exportPath(const Path & path, bool sign,
Sink & sink) Sink & sink)
{ {
openConnection(); auto conn(connections->get());
to << wopExportPath << path << (sign ? 1 : 0); conn->to << wopExportPath << path << (sign ? 1 : 0);
processStderr(&sink); /* sink receives the actual data */ conn->processStderr(&sink); /* sink receives the actual data */
readInt(from); readInt(conn->from);
} }
Paths RemoteStore::importPaths(bool requireSignature, Source & source) Paths RemoteStore::importPaths(bool requireSignature, Source & source)
{ {
openConnection(); auto conn(connections->get());
to << wopImportPaths; conn->to << wopImportPaths;
/* We ignore requireSignature, since the worker forces it to true /* We ignore requireSignature, since the worker forces it to true
anyway. */ anyway. */
processStderr(0, &source); conn->processStderr(0, &source);
return readStorePaths<Paths>(from); return readStorePaths<Paths>(conn->from);
} }
void RemoteStore::buildPaths(const PathSet & drvPaths, BuildMode buildMode) void RemoteStore::buildPaths(const PathSet & drvPaths, BuildMode buildMode)
{ {
openConnection(); auto conn(connections->get());
to << wopBuildPaths; conn->to << wopBuildPaths;
if (GET_PROTOCOL_MINOR(daemonVersion) >= 13) { if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 13) {
to << drvPaths; conn->to << drvPaths;
if (GET_PROTOCOL_MINOR(daemonVersion) >= 15) if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 15)
to << buildMode; conn->to << buildMode;
else else
/* Old daemons did not take a 'buildMode' parameter, so we /* Old daemons did not take a 'buildMode' parameter, so we
need to validate it here on the client side. */ need to validate it here on the client side. */
@ -449,22 +436,22 @@ void RemoteStore::buildPaths(const PathSet & drvPaths, BuildMode buildMode)
PathSet drvPaths2; PathSet drvPaths2;
for (auto & i : drvPaths) for (auto & i : drvPaths)
drvPaths2.insert(string(i, 0, i.find('!'))); drvPaths2.insert(string(i, 0, i.find('!')));
to << drvPaths2; conn->to << drvPaths2;
} }
processStderr(); conn->processStderr();
readInt(from); readInt(conn->from);
} }
BuildResult RemoteStore::buildDerivation(const Path & drvPath, const BasicDerivation & drv, BuildResult RemoteStore::buildDerivation(const Path & drvPath, const BasicDerivation & drv,
BuildMode buildMode) BuildMode buildMode)
{ {
openConnection(); auto conn(connections->get());
to << wopBuildDerivation << drvPath << drv << buildMode; conn->to << wopBuildDerivation << drvPath << drv << buildMode;
processStderr(); conn->processStderr();
BuildResult res; BuildResult res;
unsigned int status; unsigned int status;
from >> status >> res.errorMsg; conn->from >> status >> res.errorMsg;
res.status = (BuildResult::Status) status; res.status = (BuildResult::Status) status;
return res; return res;
} }
@ -472,50 +459,50 @@ BuildResult RemoteStore::buildDerivation(const Path & drvPath, const BasicDeriva
void RemoteStore::ensurePath(const Path & path) void RemoteStore::ensurePath(const Path & path)
{ {
openConnection(); auto conn(connections->get());
to << wopEnsurePath << path; conn->to << wopEnsurePath << path;
processStderr(); conn->processStderr();
readInt(from); readInt(conn->from);
} }
void RemoteStore::addTempRoot(const Path & path) void RemoteStore::addTempRoot(const Path & path)
{ {
openConnection(); auto conn(connections->get());
to << wopAddTempRoot << path; conn->to << wopAddTempRoot << path;
processStderr(); conn->processStderr();
readInt(from); readInt(conn->from);
} }
void RemoteStore::addIndirectRoot(const Path & path) void RemoteStore::addIndirectRoot(const Path & path)
{ {
openConnection(); auto conn(connections->get());
to << wopAddIndirectRoot << path; conn->to << wopAddIndirectRoot << path;
processStderr(); conn->processStderr();
readInt(from); readInt(conn->from);
} }
void RemoteStore::syncWithGC() void RemoteStore::syncWithGC()
{ {
openConnection(); auto conn(connections->get());
to << wopSyncWithGC; conn->to << wopSyncWithGC;
processStderr(); conn->processStderr();
readInt(from); readInt(conn->from);
} }
Roots RemoteStore::findRoots() Roots RemoteStore::findRoots()
{ {
openConnection(); auto conn(connections->get());
to << wopFindRoots; conn->to << wopFindRoots;
processStderr(); conn->processStderr();
unsigned int count = readInt(from); unsigned int count = readInt(conn->from);
Roots result; Roots result;
while (count--) { while (count--) {
Path link = readString(from); Path link = readString(conn->from);
Path target = readStorePath(from); Path target = readStorePath(conn->from);
result[link] = target; result[link] = target;
} }
return result; return result;
@ -524,56 +511,68 @@ Roots RemoteStore::findRoots()
void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results) void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results)
{ {
openConnection(false); auto conn(connections->get());
to << wopCollectGarbage << options.action << options.pathsToDelete << options.ignoreLiveness conn->to << wopCollectGarbage << options.action << options.pathsToDelete << options.ignoreLiveness
<< options.maxFreed << 0; << options.maxFreed << 0;
if (GET_PROTOCOL_MINOR(daemonVersion) >= 5) if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 5)
/* removed options */ /* removed options */
to << 0 << 0; conn->to << 0 << 0;
processStderr(); conn->processStderr();
results.paths = readStrings<PathSet>(from); results.paths = readStrings<PathSet>(conn->from);
results.bytesFreed = readLongLong(from); results.bytesFreed = readLongLong(conn->from);
readLongLong(from); // obsolete readLongLong(conn->from); // obsolete
} }
PathSet RemoteStore::queryFailedPaths() PathSet RemoteStore::queryFailedPaths()
{ {
openConnection(); auto conn(connections->get());
to << wopQueryFailedPaths; conn->to << wopQueryFailedPaths;
processStderr(); conn->processStderr();
return readStorePaths<PathSet>(from); return readStorePaths<PathSet>(conn->from);
} }
void RemoteStore::clearFailedPaths(const PathSet & paths) void RemoteStore::clearFailedPaths(const PathSet & paths)
{ {
openConnection(); auto conn(connections->get());
to << wopClearFailedPaths << paths; conn->to << wopClearFailedPaths << paths;
processStderr(); conn->processStderr();
readInt(from); readInt(conn->from);
} }
void RemoteStore::optimiseStore() void RemoteStore::optimiseStore()
{ {
openConnection(); auto conn(connections->get());
to << wopOptimiseStore; conn->to << wopOptimiseStore;
processStderr(); conn->processStderr();
readInt(from); readInt(conn->from);
} }
bool RemoteStore::verifyStore(bool checkContents, bool repair) bool RemoteStore::verifyStore(bool checkContents, bool repair)
{ {
openConnection(); auto conn(connections->get());
to << wopVerifyStore << checkContents << repair; conn->to << wopVerifyStore << checkContents << repair;
processStderr(); conn->processStderr();
return readInt(from) != 0; return readInt(conn->from) != 0;
} }
void RemoteStore::processStderr(Sink * sink, Source * source)
RemoteStore::Connection::~Connection()
{
try {
to.flush();
fd.close();
} catch (...) {
ignoreException();
}
}
void RemoteStore::Connection::processStderr(Sink * sink, Source * source)
{ {
to.flush(); to.flush();
unsigned int msg; unsigned int msg;

View file

@ -1,5 +1,6 @@
#pragma once #pragma once
#include <limits>
#include <string> #include <string>
#include "store-api.hh" #include "store-api.hh"
@ -12,15 +13,14 @@ class Pipe;
class Pid; class Pid;
struct FdSink; struct FdSink;
struct FdSource; struct FdSource;
template<typename T> class Pool;
class RemoteStore : public Store class RemoteStore : public Store
{ {
public: public:
RemoteStore(); RemoteStore(size_t maxConnections = std::numeric_limits<size_t>::max());
~RemoteStore();
/* Implementations of abstract store API methods. */ /* Implementations of abstract store API methods. */
@ -91,19 +91,24 @@ public:
bool verifyStore(bool checkContents, bool repair) override; bool verifyStore(bool checkContents, bool repair) override;
private: private:
AutoCloseFD fdSocket;
FdSink to;
FdSource from;
unsigned int daemonVersion;
bool initialised;
void openConnection(bool reserveSpace = true); struct Connection
{
AutoCloseFD fd;
FdSink to;
FdSource from;
unsigned int daemonVersion;
void processStderr(Sink * sink = 0, Source * source = 0); ~Connection();
void connectToDaemon(); void processStderr(Sink * sink = 0, Source * source = 0);
};
void setOptions(); ref<Pool<Connection>> connections;
ref<Connection> openConnection();
void setOptions(ref<Connection> conn);
}; };

View file

@ -61,7 +61,14 @@ Path followLinksToStorePath(const Path & path)
string storePathToName(const Path & path) string storePathToName(const Path & path)
{ {
assertStorePath(path); assertStorePath(path);
return string(path, settings.nixStore.size() + 34); return string(path, settings.nixStore.size() + storePathHashLen + 2);
}
string storePathToHash(const Path & path)
{
assertStorePath(path);
return string(path, settings.nixStore.size() + 1, storePathHashLen);
} }
@ -217,6 +224,13 @@ Path computeStorePathForText(const string & name, const string & s,
} }
void Store::queryReferences(const Path & path, PathSet & references)
{
ValidPathInfo info = queryPathInfo(path);
references.insert(info.references.begin(), info.references.end());
}
/* Return a string accepted by decodeValidPathInfo() that /* Return a string accepted by decodeValidPathInfo() that
registers the specified paths as valid. Note: it's the registers the specified paths as valid. Note: it's the
responsibility of the caller to provide a closure. */ responsibility of the caller to provide a closure. */
@ -231,7 +245,7 @@ string Store::makeValidityRegistration(const PathSet & paths,
ValidPathInfo info = queryPathInfo(i); ValidPathInfo info = queryPathInfo(i);
if (showHash) { if (showHash) {
s += printHash(info.hash) + "\n"; s += printHash(info.narHash) + "\n";
s += (format("%1%\n") % info.narSize).str(); s += (format("%1%\n") % info.narSize).str();
} }
@ -256,7 +270,7 @@ ValidPathInfo decodeValidPathInfo(std::istream & str, bool hashGiven)
if (hashGiven) { if (hashGiven) {
string s; string s;
getline(str, s); getline(str, s);
info.hash = parseHash(htSHA256, s); info.narHash = parseHash(htSHA256, s);
getline(str, s); getline(str, s);
if (!string2Int(s, info.narSize)) throw Error("number expected"); if (!string2Int(s, info.narSize)) throw Error("number expected");
} }
@ -299,18 +313,26 @@ void Store::exportPaths(const Paths & paths,
#include "local-store.hh" #include "local-store.hh"
#include "serialise.hh"
#include "remote-store.hh" #include "remote-store.hh"
#include "local-binary-cache-store.hh"
namespace nix { namespace nix {
ref<Store> openStore(bool reserveSpace) ref<Store> openStoreAt(const std::string & uri)
{ {
if (std::string(uri, 0, 7) == "file://") {
auto store = make_ref<LocalBinaryCacheStore>(std::shared_ptr<Store>(0),
"", "", // FIXME: allow the signing key to be set
std::string(uri, 7));
store->init();
return store;
}
enum { mDaemon, mLocal, mAuto } mode; enum { mDaemon, mLocal, mAuto } mode;
mode = getEnv("NIX_REMOTE") == "daemon" ? mDaemon : mAuto; mode = uri == "daemon" ? mDaemon : mAuto;
if (mode == mAuto) { if (mode == mAuto) {
if (LocalStore::haveWriteAccess()) if (LocalStore::haveWriteAccess())
@ -322,8 +344,14 @@ ref<Store> openStore(bool reserveSpace)
} }
return mode == mDaemon return mode == mDaemon
? make_ref<Store, RemoteStore>() ? (ref<Store>) make_ref<RemoteStore>()
: make_ref<Store, LocalStore>(reserveSpace); : (ref<Store>) make_ref<LocalStore>();
}
ref<Store> openStore()
{
return openStoreAt(getEnv("NIX_REMOTE"));
} }

View file

@ -12,6 +12,13 @@
namespace nix { namespace nix {
/* Size of the hash part of store paths, in base-32 characters. */
const size_t storePathHashLen = 32; // i.e. 160 bits
/* Magic header of exportPath() output. */
const uint32_t exportMagic = 0x4558494e;
typedef std::map<Path, Path> Roots; typedef std::map<Path, Path> Roots;
@ -85,7 +92,7 @@ struct ValidPathInfo
{ {
Path path; Path path;
Path deriver; Path deriver;
Hash hash; Hash narHash;
PathSet references; PathSet references;
time_t registrationTime = 0; time_t registrationTime = 0;
unsigned long long narSize = 0; // 0 = unknown unsigned long long narSize = 0; // 0 = unknown
@ -95,7 +102,7 @@ struct ValidPathInfo
{ {
return return
path == i.path path == i.path
&& hash == i.hash && narHash == i.narHash
&& references == i.references; && references == i.references;
} }
}; };
@ -156,10 +163,9 @@ public:
/* Query the hash of a valid path. */ /* Query the hash of a valid path. */
virtual Hash queryPathHash(const Path & path) = 0; virtual Hash queryPathHash(const Path & path) = 0;
/* Query the set of outgoing FS references for a store path. The /* Query the set of outgoing FS references for a store path. The
result is not cleared. */ result is not cleared. */
virtual void queryReferences(const Path & path, virtual void queryReferences(const Path & path, PathSet & references);
PathSet & references) = 0;
/* Queries the set of incoming FS references for a store path. /* Queries the set of incoming FS references for a store path.
The result is not cleared. */ The result is not cleared. */
@ -255,6 +261,10 @@ public:
`path' has disappeared. */ `path' has disappeared. */
virtual void addIndirectRoot(const Path & path) = 0; virtual void addIndirectRoot(const Path & path) = 0;
/* Register a permanent GC root. */
Path addPermRoot(const Path & storePath,
const Path & gcRoot, bool indirect, bool allowOutsideRootsDir = false);
/* Acquire the global GC lock, then immediately release it. This /* Acquire the global GC lock, then immediately release it. This
function must be called after registering a new permanent root, function must be called after registering a new permanent root,
but before exiting. Otherwise, it is possible that a running but before exiting. Otherwise, it is possible that a running
@ -346,6 +356,9 @@ bool isStorePath(const Path & path);
/* Extract the name part of the given store path. */ /* Extract the name part of the given store path. */
string storePathToName(const Path & path); string storePathToName(const Path & path);
/* Extract the hash part of the given store path. */
string storePathToHash(const Path & path);
void checkStoreName(const string & name); void checkStoreName(const string & name);
@ -406,14 +419,25 @@ Path computeStorePathForText(const string & name, const string & s,
void removeTempRoots(); void removeTempRoots();
/* Register a permanent GC root. */ /* Return a Store object to access the Nix store denoted by
Path addPermRoot(ref<Store> store, const Path & storePath, uri (slight misnomer...). Supported values are:
const Path & gcRoot, bool indirect, bool allowOutsideRootsDir = false);
* direct: The Nix store in /nix/store and database in
/nix/var/nix/db, accessed directly.
* daemon: The Nix store accessed via a Unix domain socket
connection to nix-daemon.
* file://<path>: A binary cache stored in <path>.
If uri is empty, it defaults to direct or daemon depending on
whether the user has write access to the local Nix store/database.
set to true *unless* you're going to collect garbage. */
ref<Store> openStoreAt(const std::string & uri);
/* Factory method: open the Nix database, either through the local or /* Open the store indicated by the NIX_REMOTE environment variable. */
remote implementation. */ ref<Store> openStore();
ref<Store> openStore(bool reserveSpace = true);
/* Display a set of paths in human-readable form (i.e., between quotes /* Display a set of paths in human-readable form (i.e., between quotes

View file

@ -29,7 +29,7 @@ bool useCaseHack =
false; false;
#endif #endif
static string archiveVersion1 = "nix-archive-1"; const std::string narVersionMagic1 = "nix-archive-1";
static string caseHackSuffix = "~nix~case~hack~"; static string caseHackSuffix = "~nix~case~hack~";
@ -113,11 +113,17 @@ static void dump(const Path & path, Sink & sink, PathFilter & filter)
void dumpPath(const Path & path, Sink & sink, PathFilter & filter) void dumpPath(const Path & path, Sink & sink, PathFilter & filter)
{ {
sink << archiveVersion1; sink << narVersionMagic1;
dump(path, sink, filter); dump(path, sink, filter);
} }
void dumpString(const std::string & s, Sink & sink)
{
sink << narVersionMagic1 << "(" << "type" << "regular" << "contents" << s << ")";
}
static SerialisationError badArchive(string s) static SerialisationError badArchive(string s)
{ {
return SerialisationError("bad archive: " + s); return SerialisationError("bad archive: " + s);
@ -214,7 +220,8 @@ static void parse(ParseSink & sink, Source & source, const Path & path)
} }
else if (s == "executable" && type == tpRegular) { else if (s == "executable" && type == tpRegular) {
readString(source); auto s = readString(source);
if (s != "") throw badArchive("executable marker has non-empty value");
sink.isExecutable(); sink.isExecutable();
} }
@ -275,7 +282,7 @@ void parseDump(ParseSink & sink, Source & source)
/* This generally means the integer at the start couldn't be /* This generally means the integer at the start couldn't be
decoded. Ignore and throw the exception below. */ decoded. Ignore and throw the exception below. */
} }
if (version != archiveVersion1) if (version != narVersionMagic1)
throw badArchive("input doesn't look like a Nix archive"); throw badArchive("input doesn't look like a Nix archive");
parse(sink, source, ""); parse(sink, source, "");
} }

View file

@ -55,6 +55,8 @@ extern PathFilter defaultPathFilter;
void dumpPath(const Path & path, Sink & sink, void dumpPath(const Path & path, Sink & sink,
PathFilter & filter = defaultPathFilter); PathFilter & filter = defaultPathFilter);
void dumpString(const std::string & s, Sink & sink);
struct ParseSink struct ParseSink
{ {
virtual void createDirectory(const Path & path) { }; virtual void createDirectory(const Path & path) { };
@ -76,4 +78,7 @@ void restorePath(const Path & path, Source & source);
extern bool useCaseHack; extern bool useCaseHack;
extern const std::string narVersionMagic1;
} }

View file

@ -6,34 +6,83 @@
namespace nix { namespace nix {
/* RAII wrapper around lzma_stream. */
struct LzmaStream
{
lzma_stream strm;
LzmaStream() : strm(LZMA_STREAM_INIT) { };
~LzmaStream() { lzma_end(&strm); };
lzma_stream & operator()() { return strm; }
};
std::string compressXZ(const std::string & in)
{
LzmaStream strm;
// FIXME: apply the x86 BCJ filter?
lzma_ret ret = lzma_easy_encoder(
&strm(), 6, LZMA_CHECK_CRC64);
if (ret != LZMA_OK)
throw Error("unable to initialise lzma encoder");
lzma_action action = LZMA_RUN;
uint8_t outbuf[BUFSIZ];
string res;
strm().next_in = (uint8_t *) in.c_str();
strm().avail_in = in.size();
strm().next_out = outbuf;
strm().avail_out = sizeof(outbuf);
while (true) {
if (strm().avail_in == 0)
action = LZMA_FINISH;
lzma_ret ret = lzma_code(&strm(), action);
if (strm().avail_out == 0 || ret == LZMA_STREAM_END) {
res.append((char *) outbuf, sizeof(outbuf) - strm().avail_out);
strm().next_out = outbuf;
strm().avail_out = sizeof(outbuf);
}
if (ret == LZMA_STREAM_END)
return res;
if (ret != LZMA_OK)
throw Error("error while decompressing xz file");
}
}
std::string decompressXZ(const std::string & in) std::string decompressXZ(const std::string & in)
{ {
lzma_stream strm = LZMA_STREAM_INIT; LzmaStream strm;
lzma_ret ret = lzma_stream_decoder( lzma_ret ret = lzma_stream_decoder(
&strm, UINT64_MAX, LZMA_CONCATENATED); &strm(), UINT64_MAX, LZMA_CONCATENATED);
if (ret != LZMA_OK) if (ret != LZMA_OK)
throw Error("unable to initialise lzma decoder"); throw Error("unable to initialise lzma decoder");
lzma_action action = LZMA_RUN; lzma_action action = LZMA_RUN;
uint8_t outbuf[BUFSIZ]; uint8_t outbuf[BUFSIZ];
string res; string res;
strm.next_in = (uint8_t *) in.c_str(); strm().next_in = (uint8_t *) in.c_str();
strm.avail_in = in.size(); strm().avail_in = in.size();
strm.next_out = outbuf; strm().next_out = outbuf;
strm.avail_out = sizeof(outbuf); strm().avail_out = sizeof(outbuf);
while (true) { while (true) {
if (strm.avail_in == 0) if (strm().avail_in == 0)
action = LZMA_FINISH; action = LZMA_FINISH;
lzma_ret ret = lzma_code(&strm, action); lzma_ret ret = lzma_code(&strm(), action);
if (strm.avail_out == 0 || ret == LZMA_STREAM_END) { if (strm().avail_out == 0 || ret == LZMA_STREAM_END) {
res.append((char *) outbuf, sizeof(outbuf) - strm.avail_out); res.append((char *) outbuf, sizeof(outbuf) - strm().avail_out);
strm.next_out = outbuf; strm().next_out = outbuf;
strm.avail_out = sizeof(outbuf); strm().avail_out = sizeof(outbuf);
} }
if (ret == LZMA_STREAM_END) if (ret == LZMA_STREAM_END)

View file

@ -4,6 +4,8 @@
namespace nix { namespace nix {
std::string compressXZ(const std::string & in);
std::string decompressXZ(const std::string & in); std::string decompressXZ(const std::string & in);
} }

84
src/libutil/lru-cache.hh Normal file
View file

@ -0,0 +1,84 @@
#pragma once
#include <map>
#include <list>
namespace nix {
/* A simple least-recently used cache. Not thread-safe. */
template<typename Key, typename Value>
class LRUCache
{
private:
size_t maxSize;
// Stupid wrapper to get around circular dependency between Data
// and LRU.
struct LRUIterator;
using Data = std::map<Key, std::pair<LRUIterator, Value>>;
using LRU = std::list<typename Data::iterator>;
struct LRUIterator { typename LRU::iterator it; };
Data data;
LRU lru;
public:
LRUCache(size_t maxSize) : maxSize(maxSize) { }
/* Insert or upsert an item in the cache. */
void upsert(const Key & key, const Value & value)
{
erase(key);
if (data.size() >= maxSize) {
/* Retire the oldest item. */
auto oldest = lru.begin();
data.erase(*oldest);
lru.erase(oldest);
}
auto res = data.emplace(key, std::make_pair(LRUIterator(), value));
assert(res.second);
auto & i(res.first);
auto j = lru.insert(lru.end(), i);
i->second.first.it = j;
}
bool erase(const Key & key)
{
auto i = data.find(key);
if (i == data.end()) return false;
lru.erase(i->second.first.it);
data.erase(i);
return true;
}
/* Look up an item in the cache. If it exists, it becomes the most
recently used item. */
// FIXME: use boost::optional?
Value * get(const Key & key)
{
auto i = data.find(key);
if (i == data.end()) return 0;
/* Move this item to the back of the LRU list. */
lru.erase(i->second.first.it);
auto j = lru.insert(lru.end(), i);
i->second.first.it = j;
return &i->second.second;
}
size_t size()
{
return data.size();
}
};
}

151
src/libutil/pool.hh Normal file
View file

@ -0,0 +1,151 @@
#pragma once
#include <functional>
#include <limits>
#include <list>
#include <memory>
#include <cassert>
#include "sync.hh"
#include "ref.hh"
namespace nix {
/* This template class implements a simple pool manager of resources
of some type R, such as database connections. It is used as
follows:
class Connection { ... };
Pool<Connection> pool;
{
auto conn(pool.get());
conn->exec("select ...");
}
Here, the Connection object referenced by conn is automatically
returned to the pool when conn goes out of scope.
*/
template <class R>
class Pool
{
public:
/* A function that produces new instances of R on demand. */
typedef std::function<ref<R>()> Factory;
/* A function that checks whether an instance of R is still
usable. Unusable instances are removed from the pool. */
typedef std::function<bool(const ref<R> &)> Validator;
private:
Factory factory;
Validator validator;
struct State
{
size_t inUse = 0;
size_t max;
std::vector<ref<R>> idle;
};
Sync<State> state;
std::condition_variable wakeup;
public:
Pool(size_t max = std::numeric_limits<size_t>::max(),
const Factory & factory = []() { return make_ref<R>(); },
const Validator & validator = [](ref<R> r) { return true; })
: factory(factory)
, validator(validator)
{
auto state_(state.lock());
state_->max = max;
}
~Pool()
{
auto state_(state.lock());
assert(!state_->inUse);
state_->max = 0;
state_->idle.clear();
}
class Handle
{
private:
Pool & pool;
std::shared_ptr<R> r;
friend Pool;
Handle(Pool & pool, std::shared_ptr<R> r) : pool(pool), r(r) { }
public:
Handle(Handle && h) : pool(h.pool), r(h.r) { h.r.reset(); }
Handle(const Handle & l) = delete;
~Handle()
{
if (!r) return;
{
auto state_(pool.state.lock());
state_->idle.push_back(ref<R>(r));
assert(state_->inUse);
state_->inUse--;
}
pool.wakeup.notify_one();
}
R * operator -> () { return &*r; }
R & operator * () { return *r; }
};
Handle get()
{
{
auto state_(state.lock());
/* If we're over the maximum number of instance, we need
to wait until a slot becomes available. */
while (state_->idle.empty() && state_->inUse >= state_->max)
state_.wait(wakeup);
while (!state_->idle.empty()) {
auto p = state_->idle.back();
state_->idle.pop_back();
if (validator(p)) {
state_->inUse++;
return Handle(*this, p);
}
}
state_->inUse++;
}
/* We need to create a new instance. Because that might take a
while, we don't hold the lock in the meantime. */
try {
Handle h(*this, factory());
return h;
} catch (...) {
auto state_(state.lock());
state_->inUse--;
throw;
}
}
unsigned int count()
{
auto state_(state.lock());
return state_->idle.size() + state_->inUse;
}
};
}

67
src/libutil/ref.hh Normal file
View file

@ -0,0 +1,67 @@
#pragma once
#include <memory>
#include <exception>
namespace nix {
/* A simple non-nullable reference-counted pointer. Actually a wrapper
around std::shared_ptr that prevents non-null constructions. */
template<typename T>
class ref
{
private:
std::shared_ptr<T> p;
public:
ref<T>(const ref<T> & r)
: p(r.p)
{ }
explicit ref<T>(const std::shared_ptr<T> & p)
: p(p)
{
if (!p)
throw std::invalid_argument("null pointer cast to ref");
}
T* operator ->() const
{
return &*p;
}
T& operator *() const
{
return *p;
}
operator std::shared_ptr<T> ()
{
return p;
}
template<typename T2>
operator ref<T2> ()
{
return ref<T2>((std::shared_ptr<T2>) p);
}
private:
template<typename T2, typename... Args>
friend ref<T2>
make_ref(Args&&... args);
};
template<typename T, typename... Args>
inline ref<T>
make_ref(Args&&... args)
{
auto p = std::make_shared<T>(std::forward<Args>(args)...);
return ref<T>(p);
}
}

View file

@ -72,7 +72,17 @@ void FdSink::write(const unsigned char * data, size_t len)
warned = true; warned = true;
} }
} }
writeFull(fd, data, len); try {
writeFull(fd, data, len);
} catch (SysError & e) {
_good = true;
}
}
bool FdSink::good()
{
return _good;
} }
@ -119,12 +129,18 @@ size_t FdSource::readUnbuffered(unsigned char * data, size_t len)
checkInterrupt(); checkInterrupt();
n = ::read(fd, (char *) data, bufSize); n = ::read(fd, (char *) data, bufSize);
} while (n == -1 && errno == EINTR); } while (n == -1 && errno == EINTR);
if (n == -1) throw SysError("reading from file"); if (n == -1) { _good = false; throw SysError("reading from file"); }
if (n == 0) throw EndOfFile("unexpected end-of-file"); if (n == 0) { _good = false; throw EndOfFile("unexpected end-of-file"); }
return n; return n;
} }
bool FdSource::good()
{
return _good;
}
size_t StringSource::read(unsigned char * data, size_t len) size_t StringSource::read(unsigned char * data, size_t len)
{ {
if (pos == s.size()) throw EndOfFile("end of string reached"); if (pos == s.size()) throw EndOfFile("end of string reached");

View file

@ -12,6 +12,7 @@ struct Sink
{ {
virtual ~Sink() { } virtual ~Sink() { }
virtual void operator () (const unsigned char * data, size_t len) = 0; virtual void operator () (const unsigned char * data, size_t len) = 0;
virtual bool good() { return true; }
}; };
@ -25,7 +26,7 @@ struct BufferedSink : Sink
: bufSize(bufSize), bufPos(0), buffer(0) { } : bufSize(bufSize), bufPos(0), buffer(0) { }
~BufferedSink(); ~BufferedSink();
void operator () (const unsigned char * data, size_t len); void operator () (const unsigned char * data, size_t len) override;
void flush(); void flush();
@ -47,6 +48,8 @@ struct Source
return the number of bytes stored. If blocks until at least return the number of bytes stored. If blocks until at least
one byte is available. */ one byte is available. */
virtual size_t read(unsigned char * data, size_t len) = 0; virtual size_t read(unsigned char * data, size_t len) = 0;
virtual bool good() { return true; }
}; };
@ -60,7 +63,7 @@ struct BufferedSource : Source
: bufSize(bufSize), bufPosIn(0), bufPosOut(0), buffer(0) { } : bufSize(bufSize), bufPosIn(0), bufPosOut(0), buffer(0) { }
~BufferedSource(); ~BufferedSource();
size_t read(unsigned char * data, size_t len); size_t read(unsigned char * data, size_t len) override;
/* Underlying read call, to be overridden. */ /* Underlying read call, to be overridden. */
virtual size_t readUnbuffered(unsigned char * data, size_t len) = 0; virtual size_t readUnbuffered(unsigned char * data, size_t len) = 0;
@ -80,7 +83,12 @@ struct FdSink : BufferedSink
FdSink(int fd) : fd(fd), warn(false), written(0) { } FdSink(int fd) : fd(fd), warn(false), written(0) { }
~FdSink(); ~FdSink();
void write(const unsigned char * data, size_t len); void write(const unsigned char * data, size_t len) override;
bool good() override;
private:
bool _good = true;
}; };
@ -90,7 +98,10 @@ struct FdSource : BufferedSource
int fd; int fd;
FdSource() : fd(-1) { } FdSource() : fd(-1) { }
FdSource(int fd) : fd(fd) { } FdSource(int fd) : fd(fd) { }
size_t readUnbuffered(unsigned char * data, size_t len); size_t readUnbuffered(unsigned char * data, size_t len) override;
bool good() override;
private:
bool _good = true;
}; };
@ -98,7 +109,7 @@ struct FdSource : BufferedSource
struct StringSink : Sink struct StringSink : Sink
{ {
string s; string s;
void operator () (const unsigned char * data, size_t len); void operator () (const unsigned char * data, size_t len) override;
}; };
@ -108,7 +119,7 @@ struct StringSource : Source
const string & s; const string & s;
size_t pos; size_t pos;
StringSource(const string & _s) : s(_s), pos(0) { } StringSource(const string & _s) : s(_s), pos(0) { }
size_t read(unsigned char * data, size_t len); size_t read(unsigned char * data, size_t len) override;
}; };

78
src/libutil/sync.hh Normal file
View file

@ -0,0 +1,78 @@
#pragma once
#include <mutex>
#include <condition_variable>
#include <cassert>
namespace nix {
/* This template class ensures synchronized access to a value of type
T. It is used as follows:
struct Data { int x; ... };
Sync<Data> data;
{
auto data_(data.lock());
data_->x = 123;
}
Here, "data" is automatically unlocked when "data_" goes out of
scope.
*/
template<class T>
class Sync
{
private:
std::mutex mutex;
T data;
public:
Sync() { }
Sync(const T & data) : data(data) { }
class Lock
{
private:
Sync * s;
std::unique_lock<std::mutex> lk;
friend Sync;
Lock(Sync * s) : s(s), lk(s->mutex) { }
public:
Lock(Lock && l) : s(l.s) { abort(); }
Lock(const Lock & l) = delete;
~Lock() { }
T * operator -> () { return &s->data; }
T & operator * () { return s->data; }
void wait(std::condition_variable & cv)
{
assert(s);
cv.wait(lk);
}
template<class Rep, class Period, class Predicate>
bool wait_for(std::condition_variable & cv,
const std::chrono::duration<Rep, Period> & duration,
Predicate pred)
{
assert(s);
return cv.wait_for(lk, duration, pred);
}
template<class Clock, class Duration>
std::cv_status wait_until(std::condition_variable & cv,
const std::chrono::time_point<Clock, Duration> & duration)
{
assert(s);
return cv.wait_until(lk, duration);
}
};
Lock lock() { return Lock(this); }
};
}

View file

@ -2,6 +2,8 @@
#include "config.h" #include "config.h"
#include "ref.hh"
#include <string> #include <string>
#include <list> #include <list>
#include <set> #include <set>
@ -97,70 +99,4 @@ typedef enum {
} Verbosity; } Verbosity;
/* A simple non-nullable reference-counted pointer. Actually a wrapper
around std::shared_ptr that prevents non-null constructions. */
template<typename T>
class ref
{
private:
std::shared_ptr<T> p;
public:
ref<T>(const ref<T> & r)
: p(r.p)
{ }
explicit ref<T>(const std::shared_ptr<T> & p)
: p(p)
{
if (!p)
throw std::invalid_argument("null pointer cast to ref");
}
T* operator ->() const
{
return &*p;
}
T& operator *() const
{
return *p;
}
operator std::shared_ptr<T> ()
{
return p;
}
private:
template<typename T2, typename... Args>
friend ref<T2>
make_ref(Args&&... args);
template<typename T2, typename T3, typename... Args>
friend ref<T2>
make_ref(Args&&... args);
};
template<typename T, typename... Args>
inline ref<T>
make_ref(Args&&... args)
{
auto p = std::make_shared<T>(std::forward<Args>(args)...);
return ref<T>(p);
}
template<typename T, typename T2, typename... Args>
inline ref<T>
make_ref(Args&&... args)
{
auto p = std::make_shared<T2>(std::forward<Args>(args)...);
return ref<T>(p);
}
} }

View file

@ -320,9 +320,11 @@ static void _deletePath(const Path & path, unsigned long long & bytesFreed)
{ {
checkInterrupt(); checkInterrupt();
printMsg(lvlVomit, format("%1%") % path); struct stat st;
if (lstat(path.c_str(), &st) == -1) {
struct stat st = lstat(path); if (errno == ENOENT) return;
throw SysError(format("getting status of %1%") % path);
}
if (!S_ISDIR(st.st_mode) && st.st_nlink == 1) if (!S_ISDIR(st.st_mode) && st.st_nlink == 1)
bytesFreed += st.st_blocks * 512; bytesFreed += st.st_blocks * 512;
@ -338,8 +340,10 @@ static void _deletePath(const Path & path, unsigned long long & bytesFreed)
_deletePath(path + "/" + i.name, bytesFreed); _deletePath(path + "/" + i.name, bytesFreed);
} }
if (remove(path.c_str()) == -1) if (remove(path.c_str()) == -1) {
if (errno == ENOENT) return;
throw SysError(format("cannot unlink %1%") % path); throw SysError(format("cannot unlink %1%") % path);
}
} }

View file

@ -92,8 +92,8 @@ string readLine(int fd);
void writeLine(int fd, string s); void writeLine(int fd, string s);
/* Delete a path; i.e., in the case of a directory, it is deleted /* Delete a path; i.e., in the case of a directory, it is deleted
recursively. Don't use this at home, kids. The second variant recursively. It's not an error if the path does not exist. The
returns the number of bytes and blocks freed. */ second variant returns the number of bytes and blocks freed. */
void deletePath(const Path & path); void deletePath(const Path & path);
void deletePath(const Path & path, unsigned long long & bytesFreed); void deletePath(const Path & path, unsigned long long & bytesFreed);
@ -366,6 +366,14 @@ template<class N> bool string2Int(const string & s, N & n)
return str && str.get() == EOF; return str && str.get() == EOF;
} }
/* Parse a string into a float. */
template<class N> bool string2Float(const string & s, N & n)
{
std::istringstream str(s);
str >> n;
return str && str.get() == EOF;
}
/* Return true iff `s' ends in `suffix'. */ /* Return true iff `s' ends in `suffix'. */
bool hasSuffix(const string & s, const string & suffix); bool hasSuffix(const string & s, const string & suffix);

View file

@ -82,7 +82,7 @@ int main(int argc, char * * argv)
// Run the actual garbage collector. // Run the actual garbage collector.
if (!dryRun) { if (!dryRun) {
auto store = openStore(false); auto store = openStore();
options.action = GCOptions::gcDeleteDead; options.action = GCOptions::gcDeleteDead;
GCResults results; GCResults results;
PrintFreed freed(true, results); PrintFreed freed(true, results);

View file

@ -515,7 +515,7 @@ static void performOp(ref<LocalStore> store, bool trusted, unsigned int clientVe
startWork(); startWork();
ValidPathInfo info = store->queryPathInfo(path); ValidPathInfo info = store->queryPathInfo(path);
stopWork(); stopWork();
to << info.deriver << printHash(info.hash) << info.references to << info.deriver << printHash(info.narHash) << info.references
<< info.registrationTime << info.narSize; << info.registrationTime << info.narSize;
break; break;
} }
@ -562,9 +562,8 @@ static void processConnection(bool trusted)
if (GET_PROTOCOL_MINOR(clientVersion) >= 14 && readInt(from)) if (GET_PROTOCOL_MINOR(clientVersion) >= 14 && readInt(from))
setAffinityTo(readInt(from)); setAffinityTo(readInt(from));
bool reserveSpace = true;
if (GET_PROTOCOL_MINOR(clientVersion) >= 11) if (GET_PROTOCOL_MINOR(clientVersion) >= 11)
reserveSpace = readInt(from) != 0; readInt(from); // obsolete reserveSpace
/* Send startup error messages to the client. */ /* Send startup error messages to the client. */
startWork(); startWork();
@ -582,7 +581,7 @@ static void processConnection(bool trusted)
#endif #endif
/* Open the store. */ /* Open the store. */
auto store = make_ref<LocalStore>(reserveSpace); auto store = make_ref<LocalStore>();
stopWork(); stopWork();
to.flush(); to.flush();

View file

@ -1127,6 +1127,10 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs)
attrs2["type"] = "int"; attrs2["type"] = "int";
attrs2["value"] = (format("%1%") % v->integer).str(); attrs2["value"] = (format("%1%") % v->integer).str();
xml.writeEmptyElement("meta", attrs2); xml.writeEmptyElement("meta", attrs2);
} else if (v->type == tFloat) {
attrs2["type"] = "float";
attrs2["value"] = (format("%1%") % v->fpoint).str();
xml.writeEmptyElement("meta", attrs2);
} else if (v->type == tBool) { } else if (v->type == tBool) {
attrs2["type"] = "bool"; attrs2["type"] = "bool";
attrs2["value"] = v->boolean ? "true" : "false"; attrs2["value"] = v->boolean ? "true" : "false";

View file

@ -32,7 +32,7 @@ static bool indirectRoot = false;
enum OutputKind { okPlain, okXML, okJSON }; enum OutputKind { okPlain, okXML, okJSON };
void processExpr(ref<Store> store, EvalState & state, const Strings & attrPaths, void processExpr(EvalState & state, const Strings & attrPaths,
bool parseOnly, bool strict, Bindings & autoArgs, bool parseOnly, bool strict, Bindings & autoArgs,
bool evalOnly, OutputKind output, bool location, Expr * e) bool evalOnly, OutputKind output, bool location, Expr * e)
{ {
@ -79,7 +79,7 @@ void processExpr(ref<Store> store, EvalState & state, const Strings & attrPaths,
else { else {
Path rootName = gcRoot; Path rootName = gcRoot;
if (++rootNr > 1) rootName += "-" + std::to_string(rootNr); if (++rootNr > 1) rootName += "-" + std::to_string(rootNr);
drvPath = addPermRoot(store, drvPath, rootName, indirectRoot); drvPath = state.store->addPermRoot(drvPath, rootName, indirectRoot);
} }
std::cout << format("%1%%2%\n") % drvPath % (outputName != "out" ? "!" + outputName : ""); std::cout << format("%1%%2%\n") % drvPath % (outputName != "out" ? "!" + outputName : "");
} }
@ -177,7 +177,7 @@ int main(int argc, char * * argv)
if (readStdin) { if (readStdin) {
Expr * e = parseStdin(state); Expr * e = parseStdin(state);
processExpr(store, state, attrPaths, parseOnly, strict, autoArgs, processExpr(state, attrPaths, parseOnly, strict, autoArgs,
evalOnly, outputKind, xmlOutputSourceLocation, e); evalOnly, outputKind, xmlOutputSourceLocation, e);
} else if (files.empty() && !fromArgs) } else if (files.empty() && !fromArgs)
files.push_back("./default.nix"); files.push_back("./default.nix");
@ -186,7 +186,7 @@ int main(int argc, char * * argv)
Expr * e = fromArgs Expr * e = fromArgs
? state.parseExprFromString(i, absPath(".")) ? state.parseExprFromString(i, absPath("."))
: state.parseExprFromFile(resolveExprPath(lookupFileArg(state, i))); : state.parseExprFromFile(resolveExprPath(lookupFileArg(state, i)));
processExpr(store, state, attrPaths, parseOnly, strict, autoArgs, processExpr(state, attrPaths, parseOnly, strict, autoArgs,
evalOnly, outputKind, xmlOutputSourceLocation, e); evalOnly, outputKind, xmlOutputSourceLocation, e);
} }

View file

@ -84,7 +84,7 @@ static PathSet realisePath(Path path, bool build = true)
Path rootName = gcRoot; Path rootName = gcRoot;
if (rootNr > 1) rootName += "-" + std::to_string(rootNr); if (rootNr > 1) rootName += "-" + std::to_string(rootNr);
if (i->first != "out") rootName += "-" + i->first; if (i->first != "out") rootName += "-" + i->first;
outPath = addPermRoot(ref<Store>(store), outPath, rootName, indirectRoot); outPath = store->addPermRoot(outPath, rootName, indirectRoot);
} }
outputs.insert(outPath); outputs.insert(outPath);
} }
@ -100,7 +100,7 @@ static PathSet realisePath(Path path, bool build = true)
Path rootName = gcRoot; Path rootName = gcRoot;
rootNr++; rootNr++;
if (rootNr > 1) rootName += "-" + std::to_string(rootNr); if (rootNr > 1) rootName += "-" + std::to_string(rootNr);
path = addPermRoot(ref<Store>(store), path, rootName, indirectRoot); path = store->addPermRoot(path, rootName, indirectRoot);
} }
return singleton<PathSet>(path); return singleton<PathSet>(path);
} }
@ -374,8 +374,8 @@ static void opQuery(Strings opFlags, Strings opArgs)
for (auto & j : paths) { for (auto & j : paths) {
ValidPathInfo info = store->queryPathInfo(j); ValidPathInfo info = store->queryPathInfo(j);
if (query == qHash) { if (query == qHash) {
assert(info.hash.type == htSHA256); assert(info.narHash.type == htSHA256);
cout << format("sha256:%1%\n") % printHash32(info.hash); cout << format("sha256:%1%\n") % printHash32(info.narHash);
} else if (query == qSize) } else if (query == qSize)
cout << format("%1%\n") % info.narSize; cout << format("%1%\n") % info.narSize;
} }
@ -567,7 +567,7 @@ static void registerValidity(bool reregister, bool hashGiven, bool canonicalise)
canonicalisePathMetaData(info.path, -1); canonicalisePathMetaData(info.path, -1);
if (!hashGiven) { if (!hashGiven) {
HashResult hash = hashPath(htSHA256, info.path); HashResult hash = hashPath(htSHA256, info.path);
info.hash = hash.first; info.narHash = hash.first;
info.narSize = hash.second; info.narSize = hash.second;
} }
infos.push_back(info); infos.push_back(info);
@ -783,11 +783,11 @@ static void opVerifyPath(Strings opFlags, Strings opArgs)
Path path = followLinksToStorePath(i); Path path = followLinksToStorePath(i);
printMsg(lvlTalkative, format("checking path %1%...") % path); printMsg(lvlTalkative, format("checking path %1%...") % path);
ValidPathInfo info = store->queryPathInfo(path); ValidPathInfo info = store->queryPathInfo(path);
HashResult current = hashPath(info.hash.type, path); HashResult current = hashPath(info.narHash.type, path);
if (current.first != info.hash) { if (current.first != info.narHash) {
printMsg(lvlError, printMsg(lvlError,
format("path %1% was modified! expected hash %2%, got %3%") format("path %1% was modified! expected hash %2%, got %3%")
% path % printHash(info.hash) % printHash(current.first)); % path % printHash(info.narHash) % printHash(current.first));
status = 1; status = 1;
} }
} }
@ -1131,7 +1131,7 @@ int main(int argc, char * * argv)
if (!op) throw UsageError("no operation specified"); if (!op) throw UsageError("no operation specified");
if (op != opDump && op != opRestore) /* !!! hack */ if (op != opDump && op != opRestore) /* !!! hack */
store = openStore(op != opGC); store = openStore();
op(opFlags, opArgs); op(opFlags, opArgs);
}); });

View file

@ -15,7 +15,7 @@ in
as.a.b.c or as.x.y.z as.a.b.c or as.x.y.z
as.x.y.bla or bs.f-o-o.bar or "xyzzy" as.x.y.bla or bs.f-o-o.bar or "xyzzy"
as.x.y.bla or bs.bar.foo or "xyzzy" as.x.y.bla or bs.bar.foo or "xyzzy"
123.bla or null.foo or "xyzzy" (123).bla or null.foo or "xyzzy"
# Backwards compatibility test. # Backwards compatibility test.
(fold or [] [true false false]) (fold or [] [true false false])
] ]

View file

@ -12,7 +12,9 @@ builtins.fromJSON
"Width": 100 "Width": 100
}, },
"Animated" : false, "Animated" : false,
"IDs": [116, 943, 234, 38793, true ,false,null, -100] "IDs": [116, 943, 234, 38793, true ,false,null, -100],
"Latitude": 37.7668,
"Longitude": -122.3959
} }
} }
'' ''
@ -28,5 +30,7 @@ builtins.fromJSON
}; };
Animated = false; Animated = false;
IDs = [ 116 943 234 38793 true false null (0-100) ]; IDs = [ 116 943 234 38793 true false null (0-100) ];
Latitude = 37.7668;
Longitude = -122.3959;
}; };
} }

View file

@ -1 +1 @@
"{\"a\":123,\"b\":-456,\"c\":\"foo\",\"d\":\"foo\\n\\\"bar\\\"\",\"e\":true,\"f\":false,\"g\":[1,2,3],\"h\":[\"a\",[\"b\",{\"foo\\nbar\":{}}]],\"i\":3}" "{\"a\":123,\"b\":-456,\"c\":\"foo\",\"d\":\"foo\\n\\\"bar\\\"\",\"e\":true,\"f\":false,\"g\":[1,2,3],\"h\":[\"a\",[\"b\",{\"foo\\nbar\":{}}]],\"i\":3,\"j\":1.44}"

View file

@ -8,4 +8,5 @@ builtins.toJSON
g = [ 1 2 3 ]; g = [ 1 2 3 ];
h = [ "a" [ "b" { "foo\nbar" = {}; } ] ]; h = [ "a" [ "b" { "foo\nbar" = {}; } ] ];
i = 1 + 2; i = 1 + 2;
j = 1.44;
} }

View file

@ -1 +1 @@
[ true false true false true false true false true false true false "int" "bool" "string" "null" "set" "list" "lambda" "lambda" "lambda" "lambda" ] [ true false true false true false true false true true true true true true true true true true true false true false "int" "bool" "string" "null" "set" "list" "lambda" "lambda" "lambda" "lambda" ]

View file

@ -8,6 +8,16 @@ with builtins;
(isString [ "x" ]) (isString [ "x" ])
(isInt (1 + 2)) (isInt (1 + 2))
(isInt { x = 123; }) (isInt { x = 123; })
(isInt (1 / 2))
(isInt (1 + 1))
(isInt (1 / 2))
(isInt (1 * 2))
(isInt (1 - 2))
(isFloat (1.2))
(isFloat (1 + 1.0))
(isFloat (1 / 2.0))
(isFloat (1 * 2.0))
(isFloat (1 - 2.0))
(isBool (true && false)) (isBool (true && false))
(isBool null) (isBool null)
(isAttrs { x = 123; }) (isAttrs { x = 123; })

View file

@ -45,5 +45,8 @@
<attr name="x"> <attr name="x">
<int value="123" /> <int value="123" />
</attr> </attr>
<attr name="y">
<float value="567.89" />
</attr>
</attrs> </attrs>
</expr> </expr>

View file

@ -2,6 +2,8 @@ rec {
x = 123; x = 123;
y = 567.890;
a = "foo"; a = "foo";
b = "bar"; b = "bar";