style: format entire depot with nixpkgs-fmt
This CL can be used to compare the style of nixpkgs-fmt against other formatters (nixpkgs, alejandra). Change-Id: I87c6abff6bcb546b02ead15ad0405f81e01b6d9e Reviewed-on: https://cl.tvl.fyi/c/depot/+/4397 Tested-by: BuildkiteCI Reviewed-by: sterni <sternenseemann@systemli.org> Reviewed-by: lukegb <lukegb@tvl.fyi> Reviewed-by: wpcarro <wpcarro@gmail.com> Reviewed-by: Profpatsch <mail@profpatsch.de> Reviewed-by: kanepyork <rikingcoding@gmail.com> Reviewed-by: tazjin <tazjin@tvl.su> Reviewed-by: cynthia <cynthia@tvl.fyi> Reviewed-by: edef <edef@edef.eu> Reviewed-by: eta <tvl@eta.st> Reviewed-by: grfn <grfn@gws.fyi>
This commit is contained in:
parent
2d10d60fac
commit
aa122cbae7
310 changed files with 7278 additions and 5490 deletions
|
@ -30,7 +30,8 @@ let
|
||||||
</style>
|
</style>
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
in pkgs.runCommandNoCC "corp-website" {} ''
|
in
|
||||||
|
pkgs.runCommandNoCC "corp-website" { } ''
|
||||||
mkdir $out
|
mkdir $out
|
||||||
cp ${index} $out/index.html
|
cp ${index} $out/index.html
|
||||||
''
|
''
|
||||||
|
|
19
default.nix
19
default.nix
|
@ -4,14 +4,16 @@
|
||||||
|
|
||||||
{ nixpkgsBisectPath ? null
|
{ nixpkgsBisectPath ? null
|
||||||
, parentTargetMap ? null
|
, parentTargetMap ? null
|
||||||
, nixpkgsConfig ? {}, ... }@args:
|
, nixpkgsConfig ? { }
|
||||||
|
, ...
|
||||||
|
}@args:
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (builtins)
|
inherit (builtins)
|
||||||
filter
|
filter
|
||||||
;
|
;
|
||||||
|
|
||||||
readTree = import ./nix/readTree {};
|
readTree = import ./nix/readTree { };
|
||||||
|
|
||||||
# Disallow access to //users from other depot parts.
|
# Disallow access to //users from other depot parts.
|
||||||
usersFilter = readTree.restrictFolder {
|
usersFilter = readTree.restrictFolder {
|
||||||
|
@ -70,7 +72,8 @@ let
|
||||||
# Is this tree node eligible for build inclusion?
|
# Is this tree node eligible for build inclusion?
|
||||||
eligible = node: (node ? outPath) && !(node.meta.ci.skip or false);
|
eligible = node: (node ? outPath) && !(node.meta.ci.skip or false);
|
||||||
|
|
||||||
in readTree.fix(self: (readDepot {
|
in
|
||||||
|
readTree.fix (self: (readDepot {
|
||||||
depot = self;
|
depot = self;
|
||||||
|
|
||||||
# Pass third_party as 'pkgs' (for compatibility with external
|
# Pass third_party as 'pkgs' (for compatibility with external
|
||||||
|
@ -110,8 +113,10 @@ in readTree.fix(self: (readDepot {
|
||||||
});
|
});
|
||||||
|
|
||||||
# Derivation that gcroots all depot targets.
|
# Derivation that gcroots all depot targets.
|
||||||
ci.gcroot = with self.third_party.nixpkgs; makeSetupHook {
|
ci.gcroot = with self.third_party.nixpkgs; makeSetupHook
|
||||||
name = "depot-gcroot";
|
{
|
||||||
deps = self.ci.targets;
|
name = "depot-gcroot";
|
||||||
} emptyFile;
|
deps = self.ci.targets;
|
||||||
|
}
|
||||||
|
emptyFile;
|
||||||
})
|
})
|
||||||
|
|
|
@ -33,7 +33,8 @@ let
|
||||||
cp ${frontend} $out/index.html
|
cp ${frontend} $out/index.html
|
||||||
''}/")
|
''}/")
|
||||||
'';
|
'';
|
||||||
in depot.nix.buildLisp.program {
|
in
|
||||||
|
depot.nix.buildLisp.program {
|
||||||
name = "gemma";
|
name = "gemma";
|
||||||
|
|
||||||
deps = with depot.third_party.lisp; [
|
deps = with depot.third_party.lisp; [
|
||||||
|
|
|
@ -4,19 +4,20 @@ let
|
||||||
inherit (pkgs) python3 python3Packages;
|
inherit (pkgs) python3 python3Packages;
|
||||||
|
|
||||||
opts = {
|
opts = {
|
||||||
pname = "idualctl";
|
pname = "idualctl";
|
||||||
version = "0.1";
|
version = "0.1";
|
||||||
src = ./.;
|
src = ./.;
|
||||||
|
|
||||||
propagatedBuildInputs = [
|
propagatedBuildInputs = [
|
||||||
depot.third_party.python.broadlink
|
depot.third_party.python.broadlink
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
package = python3Packages.buildPythonPackage opts;
|
package = python3Packages.buildPythonPackage opts;
|
||||||
script = python3Packages.buildPythonApplication opts;
|
script = python3Packages.buildPythonApplication opts;
|
||||||
in depot.nix.readTree.drvTargets {
|
in
|
||||||
|
depot.nix.readTree.drvTargets {
|
||||||
inherit script;
|
inherit script;
|
||||||
python = python3.withPackages (_: [ package ]);
|
python = python3.withPackages (_: [ package ]);
|
||||||
setAlarm = pkgs.writeShellScriptBin "set-alarm" ''
|
setAlarm = pkgs.writeShellScriptBin "set-alarm" ''
|
||||||
echo "setting an alarm for ''${1}"
|
echo "setting an alarm for ''${1}"
|
||||||
${pkgs.systemd}/bin/systemd-run --user --on-calendar="''${1} Europe/London" --unit=light-alarm.service
|
${pkgs.systemd}/bin/systemd-run --user --on-calendar="''${1} Europe/London" --unit=light-alarm.service
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
{ depot ? (import ../../../. {})
|
{ depot ? (import ../../../. { })
|
||||||
, pkgs ? depot.third_party.nixpkgs
|
, pkgs ? depot.third_party.nixpkgs
|
||||||
, ... }:
|
, ...
|
||||||
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
basePkg = pkgs.haskellPackages.callPackage ./pkg.nix { };
|
basePkg = pkgs.haskellPackages.callPackage ./pkg.nix { };
|
||||||
|
|
|
@ -1,5 +1,15 @@
|
||||||
{ mkDerivation, base, bytestring, chatter, containers, envy
|
{ mkDerivation
|
||||||
, irc-client, lens, lib, random, relude, text
|
, base
|
||||||
|
, bytestring
|
||||||
|
, chatter
|
||||||
|
, containers
|
||||||
|
, envy
|
||||||
|
, irc-client
|
||||||
|
, lens
|
||||||
|
, lib
|
||||||
|
, random
|
||||||
|
, relude
|
||||||
|
, text
|
||||||
}:
|
}:
|
||||||
mkDerivation {
|
mkDerivation {
|
||||||
pname = "owothia";
|
pname = "owothia";
|
||||||
|
@ -8,8 +18,16 @@ mkDerivation {
|
||||||
isLibrary = false;
|
isLibrary = false;
|
||||||
isExecutable = true;
|
isExecutable = true;
|
||||||
executableHaskellDepends = [
|
executableHaskellDepends = [
|
||||||
base bytestring chatter containers envy irc-client lens random
|
base
|
||||||
relude text
|
bytestring
|
||||||
|
chatter
|
||||||
|
containers
|
||||||
|
envy
|
||||||
|
irc-client
|
||||||
|
lens
|
||||||
|
random
|
||||||
|
relude
|
||||||
|
text
|
||||||
];
|
];
|
||||||
license = "unknown";
|
license = "unknown";
|
||||||
hydraPlatforms = lib.platforms.none;
|
hydraPlatforms = lib.platforms.none;
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
{ pkgs ? (import ../../../. {}).third_party, ... }:
|
{ pkgs ? (import ../../../. { }).third_party, ... }:
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (pkgs)
|
inherit (pkgs)
|
||||||
|
|
|
@ -12,7 +12,8 @@ let
|
||||||
gopkgs."github.com".pkg.browser.gopkg
|
gopkgs."github.com".pkg.browser.gopkg
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
in uggc.overrideAttrs(old: {
|
in
|
||||||
|
uggc.overrideAttrs (old: {
|
||||||
buildCommand = old.buildCommand + ''
|
buildCommand = old.buildCommand + ''
|
||||||
install -D ${./uggc.desktop} $out/share/applications/uggc.desktop
|
install -D ${./uggc.desktop} $out/share/applications/uggc.desktop
|
||||||
sed "s|@out@|$out|g" -i $out/share/applications/uggc.desktop
|
sed "s|@out@|$out|g" -i $out/share/applications/uggc.desktop
|
||||||
|
|
|
@ -38,6 +38,7 @@ let
|
||||||
"ecl" # refuses to create non-ASCII paths even on POSIX…
|
"ecl" # refuses to create non-ASCII paths even on POSIX…
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
in bin // {
|
in
|
||||||
|
bin // {
|
||||||
inherit lib;
|
inherit lib;
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
# with `binify { exe = …; name = "hello" }`.
|
# with `binify { exe = …; name = "hello" }`.
|
||||||
{ exe, name }:
|
{ exe, name }:
|
||||||
|
|
||||||
pkgs.runCommandLocal "${name}-bin" {} ''
|
pkgs.runCommandLocal "${name}-bin" { } ''
|
||||||
mkdir -p $out/bin
|
mkdir -p $out/bin
|
||||||
ln -sT ${lib.escapeShellArg exe} $out/bin/${lib.escapeShellArg name}
|
ln -sT ${lib.escapeShellArg exe} $out/bin/${lib.escapeShellArg name}
|
||||||
''
|
''
|
||||||
|
|
|
@ -4,8 +4,9 @@
|
||||||
# buildGo provides Nix functions to build Go packages in the style of Bazel's
|
# buildGo provides Nix functions to build Go packages in the style of Bazel's
|
||||||
# rules_go.
|
# rules_go.
|
||||||
|
|
||||||
{ pkgs ? import <nixpkgs> {}
|
{ pkgs ? import <nixpkgs> { }
|
||||||
, ... }:
|
, ...
|
||||||
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (builtins)
|
inherit (builtins)
|
||||||
|
@ -40,7 +41,7 @@ let
|
||||||
|
|
||||||
xFlags = x_defs: spaceOut (map (k: "-X ${k}=${x_defs."${k}"}") (attrNames x_defs));
|
xFlags = x_defs: spaceOut (map (k: "-X ${k}=${x_defs."${k}"}") (attrNames x_defs));
|
||||||
|
|
||||||
pathToName = p: replaceStrings ["/"] ["_"] (toString p);
|
pathToName = p: replaceStrings [ "/" ] [ "_" ] (toString p);
|
||||||
|
|
||||||
# Add an `overrideGo` attribute to a function result that works
|
# Add an `overrideGo` attribute to a function result that works
|
||||||
# similar to `overrideAttrs`, but is used specifically for the
|
# similar to `overrideAttrs`, but is used specifically for the
|
||||||
|
@ -52,49 +53,50 @@ let
|
||||||
# High-level build functions
|
# High-level build functions
|
||||||
|
|
||||||
# Build a Go program out of the specified files and dependencies.
|
# Build a Go program out of the specified files and dependencies.
|
||||||
program = { name, srcs, deps ? [], x_defs ? {} }:
|
program = { name, srcs, deps ? [ ], x_defs ? { } }:
|
||||||
let uniqueDeps = allDeps (map (d: d.gopkg) deps);
|
let uniqueDeps = allDeps (map (d: d.gopkg) deps);
|
||||||
in runCommand name {} ''
|
in runCommand name { } ''
|
||||||
${go}/bin/go tool compile -o ${name}.a -trimpath=$PWD -trimpath=${go} ${includeSources uniqueDeps} ${spaceOut srcs}
|
${go}/bin/go tool compile -o ${name}.a -trimpath=$PWD -trimpath=${go} ${includeSources uniqueDeps} ${spaceOut srcs}
|
||||||
mkdir -p $out/bin
|
mkdir -p $out/bin
|
||||||
export GOROOT_FINAL=go
|
export GOROOT_FINAL=go
|
||||||
${go}/bin/go tool link -o $out/bin/${name} -buildid nix ${xFlags x_defs} ${includeLibs uniqueDeps} ${name}.a
|
${go}/bin/go tool link -o $out/bin/${name} -buildid nix ${xFlags x_defs} ${includeLibs uniqueDeps} ${name}.a
|
||||||
'';
|
'';
|
||||||
|
|
||||||
# Build a Go library assembled out of the specified files.
|
# Build a Go library assembled out of the specified files.
|
||||||
#
|
#
|
||||||
# This outputs both the sources and compiled binary, as both are
|
# This outputs both the sources and compiled binary, as both are
|
||||||
# needed when downstream packages depend on it.
|
# needed when downstream packages depend on it.
|
||||||
package = { name, srcs, deps ? [], path ? name, sfiles ? [] }:
|
package = { name, srcs, deps ? [ ], path ? name, sfiles ? [ ] }:
|
||||||
let
|
let
|
||||||
uniqueDeps = allDeps (map (d: d.gopkg) deps);
|
uniqueDeps = allDeps (map (d: d.gopkg) deps);
|
||||||
|
|
||||||
# The build steps below need to be executed conditionally for Go
|
# The build steps below need to be executed conditionally for Go
|
||||||
# assembly if the analyser detected any *.s files.
|
# assembly if the analyser detected any *.s files.
|
||||||
#
|
#
|
||||||
# This is required for several popular packages (e.g. x/sys).
|
# This is required for several popular packages (e.g. x/sys).
|
||||||
ifAsm = do: lib.optionalString (sfiles != []) do;
|
ifAsm = do: lib.optionalString (sfiles != [ ]) do;
|
||||||
asmBuild = ifAsm ''
|
asmBuild = ifAsm ''
|
||||||
${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -gensymabis -o ./symabis ${spaceOut sfiles}
|
${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -gensymabis -o ./symabis ${spaceOut sfiles}
|
||||||
${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -o ./asm.o ${spaceOut sfiles}
|
${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -o ./asm.o ${spaceOut sfiles}
|
||||||
'';
|
'';
|
||||||
asmLink = ifAsm "-symabis ./symabis -asmhdr $out/go_asm.h";
|
asmLink = ifAsm "-symabis ./symabis -asmhdr $out/go_asm.h";
|
||||||
asmPack = ifAsm ''
|
asmPack = ifAsm ''
|
||||||
${go}/bin/go tool pack r $out/${path}.a ./asm.o
|
${go}/bin/go tool pack r $out/${path}.a ./asm.o
|
||||||
'';
|
'';
|
||||||
|
|
||||||
gopkg = (runCommand "golib-${name}" {} ''
|
gopkg = (runCommand "golib-${name}" { } ''
|
||||||
mkdir -p $out/${path}
|
mkdir -p $out/${path}
|
||||||
${srcList path (map (s: "${s}") srcs)}
|
${srcList path (map (s: "${s}") srcs)}
|
||||||
${asmBuild}
|
${asmBuild}
|
||||||
${go}/bin/go tool compile -pack ${asmLink} -o $out/${path}.a -trimpath=$PWD -trimpath=${go} -p ${path} ${includeSources uniqueDeps} ${spaceOut srcs}
|
${go}/bin/go tool compile -pack ${asmLink} -o $out/${path}.a -trimpath=$PWD -trimpath=${go} -p ${path} ${includeSources uniqueDeps} ${spaceOut srcs}
|
||||||
${asmPack}
|
${asmPack}
|
||||||
'') // {
|
'') // {
|
||||||
inherit gopkg;
|
inherit gopkg;
|
||||||
goDeps = uniqueDeps;
|
goDeps = uniqueDeps;
|
||||||
goImportPath = path;
|
goImportPath = path;
|
||||||
};
|
};
|
||||||
in gopkg;
|
in
|
||||||
|
gopkg;
|
||||||
|
|
||||||
# Build a tree of Go libraries out of an external Go source
|
# Build a tree of Go libraries out of an external Go source
|
||||||
# directory that follows the standard Go layout and was not built
|
# directory that follows the standard Go layout and was not built
|
||||||
|
@ -110,10 +112,10 @@ let
|
||||||
};
|
};
|
||||||
|
|
||||||
# Build a Go library out of the specified protobuf definition.
|
# Build a Go library out of the specified protobuf definition.
|
||||||
proto = { name, proto, path ? name, goPackage ? name, extraDeps ? [] }: (makeOverridable package) {
|
proto = { name, proto, path ? name, goPackage ? name, extraDeps ? [ ] }: (makeOverridable package) {
|
||||||
inherit name path;
|
inherit name path;
|
||||||
deps = [ protoLibs.goProto.proto.gopkg ] ++ extraDeps;
|
deps = [ protoLibs.goProto.proto.gopkg ] ++ extraDeps;
|
||||||
srcs = lib.singleton (runCommand "goproto-${name}.pb.go" {} ''
|
srcs = lib.singleton (runCommand "goproto-${name}.pb.go" { } ''
|
||||||
cp ${proto} ${baseNameOf proto}
|
cp ${proto} ${baseNameOf proto}
|
||||||
${protobuf}/bin/protoc --plugin=${protoLibs.goProto.protoc-gen-go.gopkg}/bin/protoc-gen-go \
|
${protobuf}/bin/protoc --plugin=${protoLibs.goProto.protoc-gen-go.gopkg}/bin/protoc-gen-go \
|
||||||
--go_out=plugins=grpc,import_path=${baseNameOf path}:. ${baseNameOf proto}
|
--go_out=plugins=grpc,import_path=${baseNameOf path}:. ${baseNameOf proto}
|
||||||
|
@ -124,7 +126,8 @@ let
|
||||||
# Build a Go library out of the specified gRPC definition.
|
# Build a Go library out of the specified gRPC definition.
|
||||||
grpc = args: proto (args // { extraDeps = [ protoLibs.goGrpc.gopkg ]; });
|
grpc = args: proto (args // { extraDeps = [ protoLibs.goGrpc.gopkg ]; });
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
# Only the high-level builder functions are exposed, but made
|
# Only the high-level builder functions are exposed, but made
|
||||||
# overrideable.
|
# overrideable.
|
||||||
program = makeOverridable program;
|
program = makeOverridable program;
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
# users a quick introduction to how to use buildGo.
|
# users a quick introduction to how to use buildGo.
|
||||||
|
|
||||||
let
|
let
|
||||||
buildGo = import ../default.nix {};
|
buildGo = import ../default.nix { };
|
||||||
|
|
||||||
# Example use of buildGo.package, which creates an importable Go
|
# Example use of buildGo.package, which creates an importable Go
|
||||||
# package from the specified source files.
|
# package from the specified source files.
|
||||||
|
@ -29,7 +29,8 @@ let
|
||||||
# Example use of buildGo.program, which builds an executable using
|
# Example use of buildGo.program, which builds an executable using
|
||||||
# the specified name and dependencies (which in turn must have been
|
# the specified name and dependencies (which in turn must have been
|
||||||
# created via buildGo.package etc.)
|
# created via buildGo.package etc.)
|
||||||
in buildGo.program {
|
in
|
||||||
|
buildGo.program {
|
||||||
name = "example";
|
name = "example";
|
||||||
|
|
||||||
srcs = [
|
srcs = [
|
||||||
|
|
48
nix/buildGo/external/default.nix
vendored
48
nix/buildGo/external/default.nix
vendored
|
@ -17,12 +17,12 @@ let
|
||||||
|
|
||||||
inherit (pkgs) lib runCommand go jq ripgrep;
|
inherit (pkgs) lib runCommand go jq ripgrep;
|
||||||
|
|
||||||
pathToName = p: replaceStrings ["/"] ["_"] (toString p);
|
pathToName = p: replaceStrings [ "/" ] [ "_" ] (toString p);
|
||||||
|
|
||||||
# Collect all non-vendored dependencies from the Go standard library
|
# Collect all non-vendored dependencies from the Go standard library
|
||||||
# into a file that can be used to filter them out when processing
|
# into a file that can be used to filter them out when processing
|
||||||
# dependencies.
|
# dependencies.
|
||||||
stdlibPackages = runCommand "stdlib-pkgs.json" {} ''
|
stdlibPackages = runCommand "stdlib-pkgs.json" { } ''
|
||||||
export HOME=$PWD
|
export HOME=$PWD
|
||||||
export GOPATH=/dev/null
|
export GOPATH=/dev/null
|
||||||
${go}/bin/go list std | \
|
${go}/bin/go list std | \
|
||||||
|
@ -45,20 +45,28 @@ let
|
||||||
};
|
};
|
||||||
|
|
||||||
mkset = path: value:
|
mkset = path: value:
|
||||||
if path == [] then { gopkg = value; }
|
if path == [ ] then { gopkg = value; }
|
||||||
else { "${head path}" = mkset (tail path) value; };
|
else { "${head path}" = mkset (tail path) value; };
|
||||||
|
|
||||||
last = l: elemAt l ((length l) - 1);
|
last = l: elemAt l ((length l) - 1);
|
||||||
|
|
||||||
toPackage = self: src: path: depMap: entry:
|
toPackage = self: src: path: depMap: entry:
|
||||||
let
|
let
|
||||||
localDeps = map (d: lib.attrByPath (d ++ [ "gopkg" ]) (
|
localDeps = map
|
||||||
throw "missing local dependency '${lib.concatStringsSep "." d}' in '${path}'"
|
(d: lib.attrByPath (d ++ [ "gopkg" ])
|
||||||
) self) entry.localDeps;
|
(
|
||||||
|
throw "missing local dependency '${lib.concatStringsSep "." d}' in '${path}'"
|
||||||
|
)
|
||||||
|
self)
|
||||||
|
entry.localDeps;
|
||||||
|
|
||||||
foreignDeps = map (d: lib.attrByPath [ d.path ] (
|
foreignDeps = map
|
||||||
throw "missing foreign dependency '${d.path}' in '${path}, imported at ${d.position}'"
|
(d: lib.attrByPath [ d.path ]
|
||||||
) depMap) entry.foreignDeps;
|
(
|
||||||
|
throw "missing foreign dependency '${d.path}' in '${path}, imported at ${d.position}'"
|
||||||
|
)
|
||||||
|
depMap)
|
||||||
|
entry.foreignDeps;
|
||||||
|
|
||||||
args = {
|
args = {
|
||||||
srcs = map (f: src + ("/" + f)) entry.files;
|
srcs = map (f: src + ("/" + f)) entry.files;
|
||||||
|
@ -74,22 +82,28 @@ let
|
||||||
binArgs = args // {
|
binArgs = args // {
|
||||||
name = (last ((lib.splitString "/" path) ++ entry.locator));
|
name = (last ((lib.splitString "/" path) ++ entry.locator));
|
||||||
};
|
};
|
||||||
in if entry.isCommand then (program binArgs) else (package libArgs);
|
in
|
||||||
|
if entry.isCommand then (program binArgs) else (package libArgs);
|
||||||
|
|
||||||
in { src, path, deps ? [] }: let
|
in
|
||||||
|
{ src, path, deps ? [ ] }:
|
||||||
|
let
|
||||||
# Build a map of dependencies (from their import paths to their
|
# Build a map of dependencies (from their import paths to their
|
||||||
# derivation) so that they can be conditionally imported only in
|
# derivation) so that they can be conditionally imported only in
|
||||||
# sub-packages that require them.
|
# sub-packages that require them.
|
||||||
depMap = listToAttrs (map (d: {
|
depMap = listToAttrs (map
|
||||||
name = d.goImportPath;
|
(d: {
|
||||||
value = d;
|
name = d.goImportPath;
|
||||||
}) (map (d: d.gopkg) deps));
|
value = d;
|
||||||
|
})
|
||||||
|
(map (d: d.gopkg) deps));
|
||||||
|
|
||||||
name = pathToName path;
|
name = pathToName path;
|
||||||
analysisOutput = runCommand "${name}-structure.json" {} ''
|
analysisOutput = runCommand "${name}-structure.json" { } ''
|
||||||
${analyser}/bin/analyser -path ${path} -source ${src} > $out
|
${analyser}/bin/analyser -path ${path} -source ${src} > $out
|
||||||
'';
|
'';
|
||||||
analysis = fromJSON (readFile analysisOutput);
|
analysis = fromJSON (readFile analysisOutput);
|
||||||
in lib.fix(self: foldl' lib.recursiveUpdate {} (
|
in
|
||||||
|
lib.fix (self: foldl' lib.recursiveUpdate { } (
|
||||||
map (entry: mkset entry.locator (toPackage self src path depMap entry)) analysis
|
map (entry: mkset entry.locator (toPackage self src path depMap entry)) analysis
|
||||||
))
|
))
|
||||||
|
|
|
@ -8,7 +8,8 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (builtins) fetchGit map;
|
inherit (builtins) fetchGit map;
|
||||||
in rec {
|
in
|
||||||
|
rec {
|
||||||
goProto = external {
|
goProto = external {
|
||||||
path = "github.com/golang/protobuf";
|
path = "github.com/golang/protobuf";
|
||||||
src = fetchGit {
|
src = fetchGit {
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
# buildLisp is designed to enforce conventions and do away with the
|
# buildLisp is designed to enforce conventions and do away with the
|
||||||
# free-for-all of existing Lisp build systems.
|
# free-for-all of existing Lisp build systems.
|
||||||
|
|
||||||
{ pkgs ? import <nixpkgs> {}, ... }:
|
{ pkgs ? import <nixpkgs> { }, ... }:
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (builtins) map elemAt match filter;
|
inherit (builtins) map elemAt match filter;
|
||||||
|
@ -70,11 +70,16 @@ let
|
||||||
implFilter = impl: xs:
|
implFilter = impl: xs:
|
||||||
let
|
let
|
||||||
isFilterSet = x: builtins.isAttrs x && !(lib.isDerivation x);
|
isFilterSet = x: builtins.isAttrs x && !(lib.isDerivation x);
|
||||||
in builtins.map (
|
in
|
||||||
x: if isFilterSet x then x.${impl.name} or x.default else x
|
builtins.map
|
||||||
) (builtins.filter (
|
(
|
||||||
x: !(isFilterSet x) || x ? ${impl.name} || x ? default
|
x: if isFilterSet x then x.${impl.name} or x.default else x
|
||||||
) xs);
|
)
|
||||||
|
(builtins.filter
|
||||||
|
(
|
||||||
|
x: !(isFilterSet x) || x ? ${impl.name} || x ? default
|
||||||
|
)
|
||||||
|
xs);
|
||||||
|
|
||||||
# Generates lisp code which instructs the given lisp implementation to load
|
# Generates lisp code which instructs the given lisp implementation to load
|
||||||
# all the given dependencies.
|
# all the given dependencies.
|
||||||
|
@ -103,17 +108,21 @@ let
|
||||||
# 'allDeps' flattens the list of dependencies (and their
|
# 'allDeps' flattens the list of dependencies (and their
|
||||||
# dependencies) into one ordered list of unique deps which
|
# dependencies) into one ordered list of unique deps which
|
||||||
# all use the given implementation.
|
# all use the given implementation.
|
||||||
allDeps = impl: deps: let
|
allDeps = impl: deps:
|
||||||
# The override _should_ propagate itself recursively, as every derivation
|
let
|
||||||
# would only expose its actually used dependencies. Use implementation
|
# The override _should_ propagate itself recursively, as every derivation
|
||||||
# attribute created by withExtras if present, override in all other cases
|
# would only expose its actually used dependencies. Use implementation
|
||||||
# (mainly bundled).
|
# attribute created by withExtras if present, override in all other cases
|
||||||
deps' = builtins.map (dep: dep."${impl.name}" or (dep.overrideLisp (_: {
|
# (mainly bundled).
|
||||||
implementation = impl;
|
deps' = builtins.map
|
||||||
}))) deps;
|
(dep: dep."${impl.name}" or (dep.overrideLisp (_: {
|
||||||
in (lib.toposort dependsOn (lib.unique (
|
implementation = impl;
|
||||||
lib.flatten (deps' ++ (map (d: d.lispDeps) deps'))
|
})))
|
||||||
))).result;
|
deps;
|
||||||
|
in
|
||||||
|
(lib.toposort dependsOn (lib.unique (
|
||||||
|
lib.flatten (deps' ++ (map (d: d.lispDeps) deps'))
|
||||||
|
))).result;
|
||||||
|
|
||||||
# 'allNative' extracts all native dependencies of a dependency list
|
# 'allNative' extracts all native dependencies of a dependency list
|
||||||
# to ensure that library load paths are set correctly during all
|
# to ensure that library load paths are set correctly during all
|
||||||
|
@ -138,42 +147,49 @@ let
|
||||||
withExtras = f: args:
|
withExtras = f: args:
|
||||||
let
|
let
|
||||||
drv = (makeOverridable f) args;
|
drv = (makeOverridable f) args;
|
||||||
in lib.fix (self:
|
in
|
||||||
drv.overrideLisp (old:
|
lib.fix (self:
|
||||||
let
|
drv.overrideLisp
|
||||||
implementation = old.implementation or defaultImplementation;
|
(old:
|
||||||
brokenOn = old.brokenOn or [];
|
let
|
||||||
targets = lib.subtractLists (brokenOn ++ [ implementation.name ])
|
implementation = old.implementation or defaultImplementation;
|
||||||
(builtins.attrNames impls);
|
brokenOn = old.brokenOn or [ ];
|
||||||
in {
|
targets = lib.subtractLists (brokenOn ++ [ implementation.name ])
|
||||||
passthru = (old.passthru or {}) // {
|
(builtins.attrNames impls);
|
||||||
repl = implementation.lispWith [ self ];
|
in
|
||||||
|
{
|
||||||
|
passthru = (old.passthru or { }) // {
|
||||||
|
repl = implementation.lispWith [ self ];
|
||||||
|
|
||||||
# meta is done via passthru to minimize rebuilds caused by overriding
|
# meta is done via passthru to minimize rebuilds caused by overriding
|
||||||
meta = (old.passthru.meta or {}) // {
|
meta = (old.passthru.meta or { }) // {
|
||||||
inherit targets;
|
inherit targets;
|
||||||
};
|
};
|
||||||
} // builtins.listToAttrs (builtins.map (impl: {
|
} // builtins.listToAttrs (builtins.map
|
||||||
inherit (impl) name;
|
(impl: {
|
||||||
value = self.overrideLisp (_: {
|
inherit (impl) name;
|
||||||
implementation = impl;
|
value = self.overrideLisp (_: {
|
||||||
});
|
implementation = impl;
|
||||||
}) (builtins.attrValues impls));
|
});
|
||||||
}) // {
|
})
|
||||||
overrideLisp = new: withExtras f (args // new args);
|
(builtins.attrValues impls));
|
||||||
});
|
}) // {
|
||||||
|
overrideLisp = new: withExtras f (args // new args);
|
||||||
|
});
|
||||||
|
|
||||||
# 'testSuite' builds a Common Lisp test suite that loads all of srcs and deps,
|
# 'testSuite' builds a Common Lisp test suite that loads all of srcs and deps,
|
||||||
# and then executes expression to check its result
|
# and then executes expression to check its result
|
||||||
testSuite = { name, expression, srcs, deps ? [], native ? [], implementation }:
|
testSuite = { name, expression, srcs, deps ? [ ], native ? [ ], implementation }:
|
||||||
let
|
let
|
||||||
lispDeps = allDeps implementation (implFilter implementation deps);
|
lispDeps = allDeps implementation (implFilter implementation deps);
|
||||||
lispNativeDeps = allNative native lispDeps;
|
lispNativeDeps = allNative native lispDeps;
|
||||||
filteredSrcs = implFilter implementation srcs;
|
filteredSrcs = implFilter implementation srcs;
|
||||||
in runCommandNoCC name {
|
in
|
||||||
LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
|
runCommandNoCC name
|
||||||
LANG = "C.UTF-8";
|
{
|
||||||
} ''
|
LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
|
||||||
|
LANG = "C.UTF-8";
|
||||||
|
} ''
|
||||||
echo "Running test suite ${name}"
|
echo "Running test suite ${name}"
|
||||||
|
|
||||||
${implementation.runScript} ${
|
${implementation.runScript} ${
|
||||||
|
@ -452,15 +468,16 @@ let
|
||||||
} $@
|
} $@
|
||||||
'';
|
'';
|
||||||
|
|
||||||
bundled = name: runCommandNoCC "${name}-cllib" {
|
bundled = name: runCommandNoCC "${name}-cllib"
|
||||||
passthru = {
|
{
|
||||||
lispName = name;
|
passthru = {
|
||||||
lispNativeDeps = [];
|
lispName = name;
|
||||||
lispDeps = [];
|
lispNativeDeps = [ ];
|
||||||
lispBinary = false;
|
lispDeps = [ ];
|
||||||
repl = impls.ecl.lispWith [ (impls.ecl.bundled name) ];
|
lispBinary = false;
|
||||||
};
|
repl = impls.ecl.lispWith [ (impls.ecl.bundled name) ];
|
||||||
} ''
|
};
|
||||||
|
} ''
|
||||||
mkdir -p "$out"
|
mkdir -p "$out"
|
||||||
ln -s "${ecl-static}/lib/ecl-${ecl-static.version}/${name}.${impls.ecl.faslExt}" -t "$out"
|
ln -s "${ecl-static}/lib/ecl-${ecl-static.version}/${name}.${impls.ecl.faslExt}" -t "$out"
|
||||||
ln -s "${ecl-static}/lib/ecl-${ecl-static.version}/lib${name}.a" "$out/${name}.a"
|
ln -s "${ecl-static}/lib/ecl-${ecl-static.version}/lib${name}.a" "$out/${name}.a"
|
||||||
|
@ -489,7 +506,8 @@ let
|
||||||
|
|
||||||
# See https://ccl.clozure.com/docs/ccl.html#building-definitions
|
# See https://ccl.clozure.com/docs/ccl.html#building-definitions
|
||||||
faslExt =
|
faslExt =
|
||||||
/**/ if targetPlatform.isPowerPC && targetPlatform.is32bit then "pfsl"
|
/**/
|
||||||
|
if targetPlatform.isPowerPC && targetPlatform.is32bit then "pfsl"
|
||||||
else if targetPlatform.isPowerPC && targetPlatform.is64bit then "p64fsl"
|
else if targetPlatform.isPowerPC && targetPlatform.is64bit then "p64fsl"
|
||||||
else if targetPlatform.isx86_64 && targetPlatform.isLinux then "lx64fsl"
|
else if targetPlatform.isx86_64 && targetPlatform.isLinux then "lx64fsl"
|
||||||
else if targetPlatform.isx86_32 && targetPlatform.isLinux then "lx32fsl"
|
else if targetPlatform.isx86_32 && targetPlatform.isLinux then "lx32fsl"
|
||||||
|
@ -572,7 +590,7 @@ let
|
||||||
lib.optionalString (deps != [])
|
lib.optionalString (deps != [])
|
||||||
"--load ${writeText "load.lisp" (impls.ccl.genLoadLisp lispDeps)}"
|
"--load ${writeText "load.lisp" (impls.ccl.genLoadLisp lispDeps)}"
|
||||||
} "$@"
|
} "$@"
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -586,37 +604,42 @@ let
|
||||||
library =
|
library =
|
||||||
{ name
|
{ name
|
||||||
, implementation ? defaultImplementation
|
, implementation ? defaultImplementation
|
||||||
, brokenOn ? [] # TODO(sterni): make this a warning
|
, brokenOn ? [ ] # TODO(sterni): make this a warning
|
||||||
, srcs
|
, srcs
|
||||||
, deps ? []
|
, deps ? [ ]
|
||||||
, native ? []
|
, native ? [ ]
|
||||||
, tests ? null
|
, tests ? null
|
||||||
, passthru ? {}
|
, passthru ? { }
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
filteredDeps = implFilter implementation deps;
|
filteredDeps = implFilter implementation deps;
|
||||||
filteredSrcs = implFilter implementation srcs;
|
filteredSrcs = implFilter implementation srcs;
|
||||||
lispNativeDeps = (allNative native filteredDeps);
|
lispNativeDeps = (allNative native filteredDeps);
|
||||||
lispDeps = allDeps implementation filteredDeps;
|
lispDeps = allDeps implementation filteredDeps;
|
||||||
testDrv = if ! isNull tests
|
testDrv =
|
||||||
then testSuite {
|
if ! isNull tests
|
||||||
name = tests.name or "${name}-test";
|
then
|
||||||
srcs = filteredSrcs ++ (tests.srcs or []);
|
testSuite
|
||||||
deps = filteredDeps ++ (tests.deps or []);
|
{
|
||||||
expression = tests.expression;
|
name = tests.name or "${name}-test";
|
||||||
inherit implementation;
|
srcs = filteredSrcs ++ (tests.srcs or [ ]);
|
||||||
}
|
deps = filteredDeps ++ (tests.deps or [ ]);
|
||||||
|
expression = tests.expression;
|
||||||
|
inherit implementation;
|
||||||
|
}
|
||||||
else null;
|
else null;
|
||||||
in lib.fix (self: runCommandNoCC "${name}-cllib" {
|
in
|
||||||
LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
|
lib.fix (self: runCommandNoCC "${name}-cllib"
|
||||||
LANG = "C.UTF-8";
|
{
|
||||||
passthru = passthru // {
|
LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
|
||||||
inherit lispNativeDeps lispDeps;
|
LANG = "C.UTF-8";
|
||||||
lispName = name;
|
passthru = passthru // {
|
||||||
lispBinary = false;
|
inherit lispNativeDeps lispDeps;
|
||||||
tests = testDrv;
|
lispName = name;
|
||||||
};
|
lispBinary = false;
|
||||||
} ''
|
tests = testDrv;
|
||||||
|
};
|
||||||
|
} ''
|
||||||
${if ! isNull testDrv
|
${if ! isNull testDrv
|
||||||
then "echo 'Test ${testDrv} succeeded'"
|
then "echo 'Test ${testDrv} succeeded'"
|
||||||
else "echo 'No tests run'"}
|
else "echo 'No tests run'"}
|
||||||
|
@ -637,13 +660,13 @@ let
|
||||||
program =
|
program =
|
||||||
{ name
|
{ name
|
||||||
, implementation ? defaultImplementation
|
, implementation ? defaultImplementation
|
||||||
, brokenOn ? [] # TODO(sterni): make this a warning
|
, brokenOn ? [ ] # TODO(sterni): make this a warning
|
||||||
, main ? "${name}:main"
|
, main ? "${name}:main"
|
||||||
, srcs
|
, srcs
|
||||||
, deps ? []
|
, deps ? [ ]
|
||||||
, native ? []
|
, native ? [ ]
|
||||||
, tests ? null
|
, tests ? null
|
||||||
, passthru ? {}
|
, passthru ? { }
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
filteredSrcs = implFilter implementation srcs;
|
filteredSrcs = implFilter implementation srcs;
|
||||||
|
@ -656,45 +679,53 @@ let
|
||||||
deps = lispDeps;
|
deps = lispDeps;
|
||||||
srcs = filteredSrcs;
|
srcs = filteredSrcs;
|
||||||
};
|
};
|
||||||
testDrv = if ! isNull tests
|
testDrv =
|
||||||
then testSuite {
|
if ! isNull tests
|
||||||
name = tests.name or "${name}-test";
|
then
|
||||||
srcs =
|
testSuite
|
||||||
( # testSuite does run implFilter as well
|
{
|
||||||
filteredSrcs ++ (tests.srcs or []));
|
name = tests.name or "${name}-test";
|
||||||
deps = filteredDeps ++ (tests.deps or []);
|
srcs =
|
||||||
expression = tests.expression;
|
(
|
||||||
inherit implementation;
|
# testSuite does run implFilter as well
|
||||||
}
|
filteredSrcs ++ (tests.srcs or [ ])
|
||||||
|
);
|
||||||
|
deps = filteredDeps ++ (tests.deps or [ ]);
|
||||||
|
expression = tests.expression;
|
||||||
|
inherit implementation;
|
||||||
|
}
|
||||||
else null;
|
else null;
|
||||||
in lib.fix (self: runCommandNoCC "${name}" {
|
in
|
||||||
nativeBuildInputs = [ makeWrapper ];
|
lib.fix (self: runCommandNoCC "${name}"
|
||||||
LD_LIBRARY_PATH = libPath;
|
{
|
||||||
LANG = "C.UTF-8";
|
nativeBuildInputs = [ makeWrapper ];
|
||||||
passthru = passthru // {
|
LD_LIBRARY_PATH = libPath;
|
||||||
lispName = name;
|
LANG = "C.UTF-8";
|
||||||
lispDeps = [ selfLib ];
|
passthru = passthru // {
|
||||||
lispNativeDeps = native;
|
lispName = name;
|
||||||
lispBinary = true;
|
lispDeps = [ selfLib ];
|
||||||
tests = testDrv;
|
lispNativeDeps = native;
|
||||||
};
|
lispBinary = true;
|
||||||
} (''
|
tests = testDrv;
|
||||||
${if ! isNull testDrv
|
};
|
||||||
then "echo 'Test ${testDrv} succeeded'"
|
|
||||||
else ""}
|
|
||||||
mkdir -p $out/bin
|
|
||||||
|
|
||||||
${implementation.runScript} ${
|
|
||||||
implementation.genDumpLisp {
|
|
||||||
inherit name main;
|
|
||||||
deps = ([ selfLib ] ++ lispDeps);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
'' + lib.optionalString implementation.wrapProgram ''
|
(''
|
||||||
wrapProgram $out/bin/${name} \
|
${if ! isNull testDrv
|
||||||
--prefix LD_LIBRARY_PATH : "${libPath}" \
|
then "echo 'Test ${testDrv} succeeded'"
|
||||||
--add-flags "\$NIX_BUILDLISP_LISP_ARGS --"
|
else ""}
|
||||||
''));
|
mkdir -p $out/bin
|
||||||
|
|
||||||
|
${implementation.runScript} ${
|
||||||
|
implementation.genDumpLisp {
|
||||||
|
inherit name main;
|
||||||
|
deps = ([ selfLib ] ++ lispDeps);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
'' + lib.optionalString implementation.wrapProgram ''
|
||||||
|
wrapProgram $out/bin/${name} \
|
||||||
|
--prefix LD_LIBRARY_PATH : "${libPath}" \
|
||||||
|
--add-flags "\$NIX_BUILDLISP_LISP_ARGS --"
|
||||||
|
''));
|
||||||
|
|
||||||
# 'bundled' creates a "library" which makes a built-in package available,
|
# 'bundled' creates a "library" which makes a built-in package available,
|
||||||
# such as any of SBCL's sb-* packages or ASDF. By default this is done
|
# such as any of SBCL's sb-* packages or ASDF. By default this is done
|
||||||
|
@ -714,11 +745,13 @@ let
|
||||||
}:
|
}:
|
||||||
implementation.bundled or (defaultBundled implementation) name;
|
implementation.bundled or (defaultBundled implementation) name;
|
||||||
|
|
||||||
in (makeOverridable bundled') {
|
in
|
||||||
|
(makeOverridable bundled') {
|
||||||
inherit name;
|
inherit name;
|
||||||
};
|
};
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
library = withExtras library;
|
library = withExtras library;
|
||||||
program = withExtras program;
|
program = withExtras program;
|
||||||
inherit bundled;
|
inherit bundled;
|
||||||
|
|
|
@ -14,15 +14,16 @@ let
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
# Example Lisp program.
|
# Example Lisp program.
|
||||||
#
|
#
|
||||||
# This builds & writes an executable for a program using the library
|
# This builds & writes an executable for a program using the library
|
||||||
# above to disk.
|
# above to disk.
|
||||||
#
|
#
|
||||||
# By default, buildLisp.program expects the entry point to be
|
# By default, buildLisp.program expects the entry point to be
|
||||||
# `$name:main`. This can be overridden by configuring the `main`
|
# `$name:main`. This can be overridden by configuring the `main`
|
||||||
# attribute.
|
# attribute.
|
||||||
in buildLisp.program {
|
in
|
||||||
|
buildLisp.program {
|
||||||
name = "example";
|
name = "example";
|
||||||
deps = [ libExample ];
|
deps = [ libExample ];
|
||||||
|
|
||||||
|
|
|
@ -13,9 +13,9 @@ let
|
||||||
;
|
;
|
||||||
|
|
||||||
bins = getBins mandoc [ "mandoc" ]
|
bins = getBins mandoc [ "mandoc" ]
|
||||||
// getBins gzip [ "gzip" ]
|
// getBins gzip [ "gzip" ]
|
||||||
// getBins coreutils [ "mkdir" "ln" "cp" ]
|
// getBins coreutils [ "mkdir" "ln" "cp" ]
|
||||||
;
|
;
|
||||||
|
|
||||||
defaultGzip = true;
|
defaultGzip = true;
|
||||||
|
|
||||||
|
@ -35,41 +35,68 @@ let
|
||||||
}:
|
}:
|
||||||
{ content
|
{ content
|
||||||
, ...
|
, ...
|
||||||
}@page: let
|
}@page:
|
||||||
|
let
|
||||||
source = builtins.toFile (basename false page) content;
|
source = builtins.toFile (basename false page) content;
|
||||||
in runExecline (basename gzip page) {} ([
|
in
|
||||||
(if requireLint then "if" else "foreground") [
|
runExecline (basename gzip page) { } ([
|
||||||
bins.mandoc "-mdoc" "-T" "lint" source
|
(if requireLint then "if" else "foreground")
|
||||||
|
[
|
||||||
|
bins.mandoc
|
||||||
|
"-mdoc"
|
||||||
|
"-T"
|
||||||
|
"lint"
|
||||||
|
source
|
||||||
]
|
]
|
||||||
"importas" "out" "out"
|
"importas"
|
||||||
|
"out"
|
||||||
|
"out"
|
||||||
] ++ (if gzip then [
|
] ++ (if gzip then [
|
||||||
"redirfd" "-w" "1" "$out"
|
"redirfd"
|
||||||
bins.gzip "-c" source
|
"-w"
|
||||||
|
"1"
|
||||||
|
"$out"
|
||||||
|
bins.gzip
|
||||||
|
"-c"
|
||||||
|
source
|
||||||
] else [
|
] else [
|
||||||
bins.cp "--reflink=auto" source "$out"
|
bins.cp
|
||||||
|
"--reflink=auto"
|
||||||
|
source
|
||||||
|
"$out"
|
||||||
]));
|
]));
|
||||||
|
|
||||||
buildManPages =
|
buildManPages =
|
||||||
name:
|
name:
|
||||||
{ derivationArgs ? {}
|
{ derivationArgs ? { }
|
||||||
, gzip ? defaultGzip
|
, gzip ? defaultGzip
|
||||||
, ...
|
, ...
|
||||||
}@args:
|
}@args:
|
||||||
pages:
|
pages:
|
||||||
runExecline "${name}-man-pages" {
|
runExecline "${name}-man-pages"
|
||||||
inherit derivationArgs;
|
{
|
||||||
} ([
|
inherit derivationArgs;
|
||||||
"importas" "out" "out"
|
}
|
||||||
] ++ lib.concatMap ({ name, section, content }@page: [
|
([
|
||||||
"if" [ bins.mkdir "-p" (manDir page) ]
|
"importas"
|
||||||
"if" [
|
"out"
|
||||||
bins.ln "-s"
|
"out"
|
||||||
(buildManPage args page)
|
] ++ lib.concatMap
|
||||||
(target gzip page)
|
({ name, section, content }@page: [
|
||||||
]
|
"if"
|
||||||
]) pages);
|
[ bins.mkdir "-p" (manDir page) ]
|
||||||
|
"if"
|
||||||
|
[
|
||||||
|
bins.ln
|
||||||
|
"-s"
|
||||||
|
(buildManPage args page)
|
||||||
|
(target gzip page)
|
||||||
|
]
|
||||||
|
])
|
||||||
|
pages);
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
__functor = _: buildManPages;
|
__functor = _: buildManPages;
|
||||||
|
|
||||||
single = buildManPage;
|
single = buildManPage;
|
||||||
|
|
|
@ -29,7 +29,8 @@ let
|
||||||
unsafeDiscardStringContext;
|
unsafeDiscardStringContext;
|
||||||
|
|
||||||
inherit (pkgs) lib runCommandNoCC writeText;
|
inherit (pkgs) lib runCommandNoCC writeText;
|
||||||
in rec {
|
in
|
||||||
|
rec {
|
||||||
# Creates a Nix expression that yields the target at the specified
|
# Creates a Nix expression that yields the target at the specified
|
||||||
# location in the repository.
|
# location in the repository.
|
||||||
#
|
#
|
||||||
|
@ -42,14 +43,15 @@ in rec {
|
||||||
descend = expr: attr: "builtins.getAttr \"${attr}\" (${expr})";
|
descend = expr: attr: "builtins.getAttr \"${attr}\" (${expr})";
|
||||||
targetExpr = foldl' descend "import ./. {}" target.__readTree;
|
targetExpr = foldl' descend "import ./. {}" target.__readTree;
|
||||||
subtargetExpr = descend targetExpr target.__subtarget;
|
subtargetExpr = descend targetExpr target.__subtarget;
|
||||||
in if target ? __subtarget then subtargetExpr else targetExpr;
|
in
|
||||||
|
if target ? __subtarget then subtargetExpr else targetExpr;
|
||||||
|
|
||||||
# Create a pipeline label from the target's tree location.
|
# Create a pipeline label from the target's tree location.
|
||||||
mkLabel = target:
|
mkLabel = target:
|
||||||
let label = concatStringsSep "/" target.__readTree;
|
let label = concatStringsSep "/" target.__readTree;
|
||||||
in if target ? __subtarget
|
in if target ? __subtarget
|
||||||
then "${label}:${target.__subtarget}"
|
then "${label}:${target.__subtarget}"
|
||||||
else label;
|
else label;
|
||||||
|
|
||||||
# Determine whether to skip a target if it has not diverged from the
|
# Determine whether to skip a target if it has not diverged from the
|
||||||
# HEAD branch.
|
# HEAD branch.
|
||||||
|
@ -74,33 +76,36 @@ in rec {
|
||||||
|
|
||||||
# Create a pipeline step from a single target.
|
# Create a pipeline step from a single target.
|
||||||
mkStep = headBranch: parentTargetMap: target:
|
mkStep = headBranch: parentTargetMap: target:
|
||||||
let
|
let
|
||||||
label = mkLabel target;
|
label = mkLabel target;
|
||||||
drvPath = unsafeDiscardStringContext target.drvPath;
|
drvPath = unsafeDiscardStringContext target.drvPath;
|
||||||
shouldSkip' = shouldSkip parentTargetMap;
|
shouldSkip' = shouldSkip parentTargetMap;
|
||||||
in {
|
in
|
||||||
label = ":nix: " + label;
|
{
|
||||||
key = hashString "sha1" label;
|
label = ":nix: " + label;
|
||||||
skip = shouldSkip' label drvPath;
|
key = hashString "sha1" label;
|
||||||
command = mkBuildCommand target drvPath;
|
skip = shouldSkip' label drvPath;
|
||||||
env.READTREE_TARGET = label;
|
command = mkBuildCommand target drvPath;
|
||||||
|
env.READTREE_TARGET = label;
|
||||||
|
|
||||||
# Add a dependency on the initial static pipeline step which
|
# Add a dependency on the initial static pipeline step which
|
||||||
# always runs. This allows build steps uploaded in batches to
|
# always runs. This allows build steps uploaded in batches to
|
||||||
# start running before all batches have been uploaded.
|
# start running before all batches have been uploaded.
|
||||||
depends_on = ":init:";
|
depends_on = ":init:";
|
||||||
};
|
};
|
||||||
|
|
||||||
# Helper function to inelegantly divide a list into chunks of at
|
# Helper function to inelegantly divide a list into chunks of at
|
||||||
# most n elements.
|
# most n elements.
|
||||||
#
|
#
|
||||||
# This works by assigning each element a chunk ID based on its
|
# This works by assigning each element a chunk ID based on its
|
||||||
# index, and then grouping all elements by their chunk ID.
|
# index, and then grouping all elements by their chunk ID.
|
||||||
chunksOf = n: list: let
|
chunksOf = n: list:
|
||||||
chunkId = idx: toString (idx / n + 1);
|
let
|
||||||
assigned = lib.imap1 (idx: value: { inherit value ; chunk = chunkId idx; }) list;
|
chunkId = idx: toString (idx / n + 1);
|
||||||
unchunk = mapAttrs (_: elements: map (e: e.value) elements);
|
assigned = lib.imap1 (idx: value: { inherit value; chunk = chunkId idx; }) list;
|
||||||
in unchunk (lib.groupBy (e: e.chunk) assigned);
|
unchunk = mapAttrs (_: elements: map (e: e.value) elements);
|
||||||
|
in
|
||||||
|
unchunk (lib.groupBy (e: e.chunk) assigned);
|
||||||
|
|
||||||
# Define a build pipeline chunk as a JSON file, using the pipeline
|
# Define a build pipeline chunk as a JSON file, using the pipeline
|
||||||
# format documented on
|
# format documented on
|
||||||
|
@ -120,104 +125,112 @@ in rec {
|
||||||
attrValues (mapAttrs (makePipelineChunk name) (chunksOf 192 steps));
|
attrValues (mapAttrs (makePipelineChunk name) (chunksOf 192 steps));
|
||||||
|
|
||||||
# Create a pipeline structure for the given targets.
|
# Create a pipeline structure for the given targets.
|
||||||
mkPipeline = {
|
mkPipeline =
|
||||||
# HEAD branch of the repository on which release steps, GC
|
{
|
||||||
# anchoring and other "mainline only" steps should run.
|
# HEAD branch of the repository on which release steps, GC
|
||||||
headBranch,
|
# anchoring and other "mainline only" steps should run.
|
||||||
|
headBranch
|
||||||
|
, # List of derivations as read by readTree (in most cases just the
|
||||||
|
# output of readTree.gather) that should be built in Buildkite.
|
||||||
|
#
|
||||||
|
# These are scheduled as the first build steps and run as fast as
|
||||||
|
# possible, in order, without any concurrency restrictions.
|
||||||
|
drvTargets
|
||||||
|
, # Derivation map of a parent commit. Only targets which no longer
|
||||||
|
# correspond to the content of this map will be built. Passing an
|
||||||
|
# empty map will always build all targets.
|
||||||
|
parentTargetMap ? { }
|
||||||
|
, # A list of plain Buildkite step structures to run alongside the
|
||||||
|
# build for all drvTargets, but before proceeding with any
|
||||||
|
# post-build actions such as status reporting.
|
||||||
|
#
|
||||||
|
# Can be used for things like code formatting checks.
|
||||||
|
additionalSteps ? [ ]
|
||||||
|
, # A list of plain Buildkite step structures to run after all
|
||||||
|
# previous steps succeeded.
|
||||||
|
#
|
||||||
|
# Can be used for status reporting steps and the like.
|
||||||
|
postBuildSteps ? [ ]
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
# Convert a target into all of its build and post-build steps,
|
||||||
|
# treated separately as they need to be in different chunks.
|
||||||
|
targetToSteps = target:
|
||||||
|
let
|
||||||
|
step = mkStep headBranch parentTargetMap target;
|
||||||
|
|
||||||
# List of derivations as read by readTree (in most cases just the
|
# Split build/post-build steps
|
||||||
# output of readTree.gather) that should be built in Buildkite.
|
splitExtraSteps = partition ({ postStep, ... }: postStep)
|
||||||
#
|
(attrValues (mapAttrs
|
||||||
# These are scheduled as the first build steps and run as fast as
|
(name: value: {
|
||||||
# possible, in order, without any concurrency restrictions.
|
inherit name value;
|
||||||
drvTargets,
|
postStep = (value ? prompt) || (value.postBuild or false);
|
||||||
|
})
|
||||||
|
(target.meta.ci.extraSteps or { })));
|
||||||
|
|
||||||
# Derivation map of a parent commit. Only targets which no longer
|
mkExtraStep' = { name, value, ... }: mkExtraStep step name value;
|
||||||
# correspond to the content of this map will be built. Passing an
|
extraBuildSteps = map mkExtraStep' splitExtraSteps.wrong; # 'wrong' -> no prompt
|
||||||
# empty map will always build all targets.
|
extraPostSteps = map mkExtraStep' splitExtraSteps.right; # 'right' -> has prompt
|
||||||
parentTargetMap ? {},
|
in
|
||||||
|
{
|
||||||
|
buildSteps = [ step ] ++ extraBuildSteps;
|
||||||
|
postSteps = extraPostSteps;
|
||||||
|
};
|
||||||
|
|
||||||
# A list of plain Buildkite step structures to run alongside the
|
# Combine all target steps into separate build and post-build step lists.
|
||||||
# build for all drvTargets, but before proceeding with any
|
steps = foldl'
|
||||||
# post-build actions such as status reporting.
|
(acc: t: {
|
||||||
#
|
buildSteps = acc.buildSteps ++ t.buildSteps;
|
||||||
# Can be used for things like code formatting checks.
|
postSteps = acc.postSteps ++ t.postSteps;
|
||||||
additionalSteps ? [],
|
})
|
||||||
|
{ buildSteps = [ ]; postSteps = [ ]; }
|
||||||
|
(map targetToSteps drvTargets);
|
||||||
|
|
||||||
# A list of plain Buildkite step structures to run after all
|
buildSteps =
|
||||||
# previous steps succeeded.
|
# Add build steps for each derivation target and their extra
|
||||||
#
|
# steps.
|
||||||
# Can be used for status reporting steps and the like.
|
steps.buildSteps
|
||||||
postBuildSteps ? []
|
|
||||||
}: let
|
|
||||||
# Convert a target into all of its build and post-build steps,
|
|
||||||
# treated separately as they need to be in different chunks.
|
|
||||||
targetToSteps = target: let
|
|
||||||
step = mkStep headBranch parentTargetMap target;
|
|
||||||
|
|
||||||
# Split build/post-build steps
|
# Add additional steps (if set).
|
||||||
splitExtraSteps = partition ({ postStep, ... }: postStep)
|
++ additionalSteps;
|
||||||
(attrValues (mapAttrs (name: value: {
|
|
||||||
inherit name value;
|
|
||||||
postStep = (value ? prompt) || (value.postBuild or false);
|
|
||||||
}) (target.meta.ci.extraSteps or {})));
|
|
||||||
|
|
||||||
mkExtraStep' = { name, value, ... }: mkExtraStep step name value;
|
postSteps =
|
||||||
extraBuildSteps = map mkExtraStep' splitExtraSteps.wrong; # 'wrong' -> no prompt
|
# Add post-build steps for each derivation target.
|
||||||
extraPostSteps = map mkExtraStep' splitExtraSteps.right; # 'right' -> has prompt
|
steps.postSteps
|
||||||
in {
|
|
||||||
buildSteps = [ step ] ++ extraBuildSteps;
|
|
||||||
postSteps = extraPostSteps;
|
|
||||||
};
|
|
||||||
|
|
||||||
# Combine all target steps into separate build and post-build step lists.
|
# Add any globally defined post-build steps.
|
||||||
steps = foldl' (acc: t: {
|
++ postBuildSteps;
|
||||||
buildSteps = acc.buildSteps ++ t.buildSteps;
|
|
||||||
postSteps = acc.postSteps ++ t.postSteps;
|
|
||||||
}) { buildSteps = []; postSteps = []; } (map targetToSteps drvTargets);
|
|
||||||
|
|
||||||
buildSteps =
|
buildChunks = pipelineChunks "build" buildSteps;
|
||||||
# Add build steps for each derivation target and their extra
|
postBuildChunks = pipelineChunks "post" postSteps;
|
||||||
# steps.
|
chunks = buildChunks ++ postBuildChunks;
|
||||||
steps.buildSteps
|
in
|
||||||
|
runCommandNoCC "buildkite-pipeline" { } ''
|
||||||
# Add additional steps (if set).
|
mkdir $out
|
||||||
++ additionalSteps;
|
echo "Generated ${toString (length chunks)} pipeline chunks"
|
||||||
|
${
|
||||||
postSteps =
|
lib.concatMapStringsSep "\n"
|
||||||
# Add post-build steps for each derivation target.
|
(chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks
|
||||||
steps.postSteps
|
}
|
||||||
|
'';
|
||||||
# Add any globally defined post-build steps.
|
|
||||||
++ postBuildSteps;
|
|
||||||
|
|
||||||
buildChunks = pipelineChunks "build" buildSteps;
|
|
||||||
postBuildChunks = pipelineChunks "post" postSteps;
|
|
||||||
chunks = buildChunks ++ postBuildChunks;
|
|
||||||
in runCommandNoCC "buildkite-pipeline" {} ''
|
|
||||||
mkdir $out
|
|
||||||
echo "Generated ${toString (length chunks)} pipeline chunks"
|
|
||||||
${
|
|
||||||
lib.concatMapStringsSep "\n"
|
|
||||||
(chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks
|
|
||||||
}
|
|
||||||
'';
|
|
||||||
|
|
||||||
# Create a drvmap structure for the given targets, containing the
|
# Create a drvmap structure for the given targets, containing the
|
||||||
# mapping of all target paths to their derivations. The mapping can
|
# mapping of all target paths to their derivations. The mapping can
|
||||||
# be persisted for future use.
|
# be persisted for future use.
|
||||||
mkDrvmap = drvTargets: writeText "drvmap.json" (toJSON (listToAttrs (map (target: {
|
mkDrvmap = drvTargets: writeText "drvmap.json" (toJSON (listToAttrs (map
|
||||||
name = mkLabel target;
|
(target: {
|
||||||
value = {
|
name = mkLabel target;
|
||||||
drvPath = unsafeDiscardStringContext target.drvPath;
|
value = {
|
||||||
|
drvPath = unsafeDiscardStringContext target.drvPath;
|
||||||
|
|
||||||
# Include the attrPath in the output to reconstruct the drv
|
# Include the attrPath in the output to reconstruct the drv
|
||||||
# without parsing the human-readable label.
|
# without parsing the human-readable label.
|
||||||
attrPath = target.__readTree ++ lib.optionals (target ? __subtarget) [
|
attrPath = target.__readTree ++ lib.optionals (target ? __subtarget) [
|
||||||
target.__subtarget
|
target.__subtarget
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
}) drvTargets)));
|
})
|
||||||
|
drvTargets)));
|
||||||
|
|
||||||
# Implementation of extra step logic.
|
# Implementation of extra step logic.
|
||||||
#
|
#
|
||||||
|
@ -278,34 +291,37 @@ in rec {
|
||||||
|
|
||||||
# Create the Buildkite configuration for an extra step, optionally
|
# Create the Buildkite configuration for an extra step, optionally
|
||||||
# wrapping it in a gate group.
|
# wrapping it in a gate group.
|
||||||
mkExtraStep = parent: key: {
|
mkExtraStep = parent: key: { command
|
||||||
command,
|
, label ? key
|
||||||
label ? key,
|
, prompt ? false
|
||||||
prompt ? false,
|
, needsOutput ? false
|
||||||
needsOutput ? false,
|
, branches ? null
|
||||||
branches ? null,
|
, alwaysRun ? false
|
||||||
alwaysRun ? false,
|
, postBuild ? false
|
||||||
postBuild ? false
|
}@cfg:
|
||||||
}@cfg: let
|
let
|
||||||
parentLabel = parent.env.READTREE_TARGET;
|
parentLabel = parent.env.READTREE_TARGET;
|
||||||
|
|
||||||
step = {
|
step = {
|
||||||
label = ":gear: ${label} (from ${parentLabel})";
|
label = ":gear: ${label} (from ${parentLabel})";
|
||||||
skip = if alwaysRun then false else parent.skip or false;
|
skip = if alwaysRun then false else parent.skip or false;
|
||||||
depends_on = lib.optional (!alwaysRun && !needsOutput) parent.key;
|
depends_on = lib.optional (!alwaysRun && !needsOutput) parent.key;
|
||||||
branches = if branches != null then lib.concatStringsSep " " branches else null;
|
branches = if branches != null then lib.concatStringsSep " " branches else null;
|
||||||
|
|
||||||
command = pkgs.writeShellScript "${key}-script" ''
|
command = pkgs.writeShellScript "${key}-script" ''
|
||||||
set -ueo pipefail
|
set -ueo pipefail
|
||||||
${lib.optionalString needsOutput "echo '~~~ Preparing build output of ${parentLabel}'"}
|
${lib.optionalString needsOutput "echo '~~~ Preparing build output of ${parentLabel}'"}
|
||||||
${lib.optionalString needsOutput parent.command}
|
${lib.optionalString needsOutput parent.command}
|
||||||
echo '+++ Running extra step command'
|
echo '+++ Running extra step command'
|
||||||
exec ${command}
|
exec ${command}
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
in if (isString prompt)
|
in
|
||||||
then mkGatedStep {
|
if (isString prompt)
|
||||||
inherit step label parent prompt;
|
then
|
||||||
}
|
mkGatedStep
|
||||||
|
{
|
||||||
|
inherit step label parent prompt;
|
||||||
|
}
|
||||||
else step;
|
else step;
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,9 +17,10 @@ let
|
||||||
drvSeqL = defun [ (list drv) drv drv ]
|
drvSeqL = defun [ (list drv) drv drv ]
|
||||||
(drvDeps: drvOut:
|
(drvDeps: drvOut:
|
||||||
let
|
let
|
||||||
drvOutOutputs = drvOut.outputs or ["out"];
|
drvOutOutputs = drvOut.outputs or [ "out" ];
|
||||||
in
|
in
|
||||||
pkgs.runCommandLocal drvOut.name {
|
pkgs.runCommandLocal drvOut.name
|
||||||
|
{
|
||||||
# we inherit all attributes in order to replicate
|
# we inherit all attributes in order to replicate
|
||||||
# the original derivation as much as possible
|
# the original derivation as much as possible
|
||||||
outputs = drvOutOutputs;
|
outputs = drvOutOutputs;
|
||||||
|
@ -29,15 +30,18 @@ let
|
||||||
}
|
}
|
||||||
# the outputs of the original derivation are replicated
|
# the outputs of the original derivation are replicated
|
||||||
# by creating a symlink to the old output path
|
# by creating a symlink to the old output path
|
||||||
(lib.concatMapStrings (output: ''
|
(lib.concatMapStrings
|
||||||
target=${lib.escapeShellArg drvOut.${output}}
|
(output: ''
|
||||||
# if the target is already a symlink, follow it until it’s not;
|
target=${lib.escapeShellArg drvOut.${output}}
|
||||||
# this is done to prevent too many dereferences
|
# if the target is already a symlink, follow it until it’s not;
|
||||||
target=$(readlink -e "$target")
|
# this is done to prevent too many dereferences
|
||||||
# link to the output
|
target=$(readlink -e "$target")
|
||||||
ln -s "$target" "${"$"}${output}"
|
# link to the output
|
||||||
'') drvOutOutputs));
|
ln -s "$target" "${"$"}${output}"
|
||||||
|
'')
|
||||||
|
drvOutOutputs));
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
__functor = _: drvSeqL;
|
__functor = _: drvSeqL;
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,7 +14,8 @@ let
|
||||||
inherit (depot.nix.runTestsuite) runTestsuite it assertEq;
|
inherit (depot.nix.runTestsuite) runTestsuite it assertEq;
|
||||||
};
|
};
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
__functor = _: emptyDerivation;
|
__functor = _: emptyDerivation;
|
||||||
inherit tests;
|
inherit tests;
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
bins = getBins pkgs.s6-portable-utils [ "s6-touch" ]
|
bins = getBins pkgs.s6-portable-utils [ "s6-touch" ]
|
||||||
// getBins pkgs.execline [ "importas" "exec" ];
|
// getBins pkgs.execline [ "importas" "exec" ];
|
||||||
|
|
||||||
emptiness = {
|
emptiness = {
|
||||||
name = "empty-derivation";
|
name = "empty-derivation";
|
||||||
|
@ -21,12 +21,16 @@ let
|
||||||
|
|
||||||
builder = bins.exec;
|
builder = bins.exec;
|
||||||
args = [
|
args = [
|
||||||
bins.importas "out" "out"
|
bins.importas
|
||||||
bins.s6-touch "$out"
|
"out"
|
||||||
|
"out"
|
||||||
|
bins.s6-touch
|
||||||
|
"$out"
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
in (derivation emptiness) // {
|
in
|
||||||
|
(derivation emptiness) // {
|
||||||
# This allows us to call the empty derivation
|
# This allows us to call the empty derivation
|
||||||
# like a function and override fields/add new fields.
|
# like a function and override fields/add new fields.
|
||||||
__functor = _: overrides:
|
__functor = _: overrides:
|
||||||
|
|
|
@ -10,10 +10,17 @@ let
|
||||||
];
|
];
|
||||||
|
|
||||||
fooOut = emptyDerivation {
|
fooOut = emptyDerivation {
|
||||||
builder = writeExecline "foo-builder" {} [
|
builder = writeExecline "foo-builder" { } [
|
||||||
"importas" "out" "out"
|
"importas"
|
||||||
"redirfd" "-w" "1" "$out"
|
"out"
|
||||||
bins.s6-echo "-n" "foo"
|
"out"
|
||||||
|
"redirfd"
|
||||||
|
"-w"
|
||||||
|
"1"
|
||||||
|
"$out"
|
||||||
|
bins.s6-echo
|
||||||
|
"-n"
|
||||||
|
"foo"
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -26,7 +33,8 @@ let
|
||||||
"bar")
|
"bar")
|
||||||
];
|
];
|
||||||
|
|
||||||
in runTestsuite "emptyDerivation" [
|
in
|
||||||
|
runTestsuite "emptyDerivation" [
|
||||||
empty
|
empty
|
||||||
overrideBuilder
|
overrideBuilder
|
||||||
]
|
]
|
||||||
|
|
|
@ -16,14 +16,17 @@ let
|
||||||
# escapeExecline [ "if" [ "somecommand" ] "true" ]
|
# escapeExecline [ "if" [ "somecommand" ] "true" ]
|
||||||
# == ''"if" { "somecommand" } "true"''
|
# == ''"if" { "somecommand" } "true"''
|
||||||
escapeExecline = execlineList: lib.concatStringsSep " "
|
escapeExecline = execlineList: lib.concatStringsSep " "
|
||||||
(let
|
(
|
||||||
go = arg:
|
let
|
||||||
if builtins.isString arg then [(escapeExeclineArg arg)]
|
go = arg:
|
||||||
else if builtins.isPath arg then [(escapeExeclineArg "${arg}")]
|
if builtins.isString arg then [ (escapeExeclineArg arg) ]
|
||||||
else if lib.isDerivation arg then [(escapeExeclineArg arg)]
|
else if builtins.isPath arg then [ (escapeExeclineArg "${arg}") ]
|
||||||
else if builtins.isList arg then [ "{" ] ++ builtins.concatMap go arg ++ [ "}" ]
|
else if lib.isDerivation arg then [ (escapeExeclineArg arg) ]
|
||||||
else abort "escapeExecline can only hande nested lists of strings, was ${lib.generators.toPretty {} arg}";
|
else if builtins.isList arg then [ "{" ] ++ builtins.concatMap go arg ++ [ "}" ]
|
||||||
in builtins.concatMap go execlineList);
|
else abort "escapeExecline can only hande nested lists of strings, was ${lib.generators.toPretty {} arg}";
|
||||||
|
in
|
||||||
|
builtins.concatMap go execlineList
|
||||||
|
);
|
||||||
|
|
||||||
in
|
in
|
||||||
escapeExecline
|
escapeExecline
|
||||||
|
|
|
@ -26,14 +26,16 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
getBins = drv: xs:
|
getBins = drv: xs:
|
||||||
let f = x:
|
let
|
||||||
# TODO(Profpatsch): typecheck
|
f = x:
|
||||||
let x' = if builtins.isString x then { use = x; as = x; } else x;
|
# TODO(Profpatsch): typecheck
|
||||||
in {
|
let x' = if builtins.isString x then { use = x; as = x; } else x;
|
||||||
name = x'.as;
|
in {
|
||||||
value = "${lib.getBin drv}/bin/${x'.use}";
|
name = x'.as;
|
||||||
};
|
value = "${lib.getBin drv}/bin/${x'.use}";
|
||||||
in builtins.listToAttrs (builtins.map f xs);
|
};
|
||||||
|
in
|
||||||
|
builtins.listToAttrs (builtins.map f xs);
|
||||||
|
|
||||||
|
|
||||||
tests = import ./tests.nix {
|
tests = import ./tests.nix {
|
||||||
|
@ -42,7 +44,8 @@ let
|
||||||
inherit (depot.nix.runTestsuite) assertEq it runTestsuite;
|
inherit (depot.nix.runTestsuite) assertEq it runTestsuite;
|
||||||
};
|
};
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
__functor = _: getBins;
|
__functor = _: getBins;
|
||||||
inherit tests;
|
inherit tests;
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,11 +5,11 @@ let
|
||||||
drv2 = writeScriptBin "goodbye" "tschau";
|
drv2 = writeScriptBin "goodbye" "tschau";
|
||||||
|
|
||||||
bins = getBins drv [
|
bins = getBins drv [
|
||||||
"hello"
|
"hello"
|
||||||
{ use = "hello"; as = "also-hello"; }
|
{ use = "hello"; as = "also-hello"; }
|
||||||
]
|
]
|
||||||
// getBins drv2 [ "goodbye" ]
|
// getBins drv2 [ "goodbye" ]
|
||||||
;
|
;
|
||||||
|
|
||||||
simple = it "path is equal to the executable name" [
|
simple = it "path is equal to the executable name" [
|
||||||
(assertEq "path"
|
(assertEq "path"
|
||||||
|
@ -33,8 +33,8 @@ let
|
||||||
];
|
];
|
||||||
|
|
||||||
in
|
in
|
||||||
runTestsuite "getBins" [
|
runTestsuite "getBins" [
|
||||||
simple
|
simple
|
||||||
useAs
|
useAs
|
||||||
secondDrv
|
secondDrv
|
||||||
]
|
]
|
||||||
|
|
|
@ -8,31 +8,31 @@
|
||||||
For example, given the following original document:
|
For example, given the following original document:
|
||||||
|
|
||||||
{
|
{
|
||||||
a = "b";
|
a = "b";
|
||||||
c = {
|
c = {
|
||||||
d = "e";
|
d = "e";
|
||||||
f = "g";
|
f = "g";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Changing the value of `a` and removing `f` can be achieved by merging the patch
|
Changing the value of `a` and removing `f` can be achieved by merging the patch
|
||||||
|
|
||||||
{
|
{
|
||||||
a = "z";
|
a = "z";
|
||||||
c.f = null;
|
c.f = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
which results in
|
which results in
|
||||||
|
|
||||||
{
|
{
|
||||||
a = "z";
|
a = "z";
|
||||||
c = {
|
c = {
|
||||||
d = "e";
|
d = "e";
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
Pseudo-code:
|
Pseudo-code:
|
||||||
define MergePatch(Target, Patch):
|
define MergePatch(Target, Patch):
|
||||||
if Patch is an Object:
|
if Patch is an Object:
|
||||||
if Target is not an Object:
|
if Target is not an Object:
|
||||||
Target = {} # Ignore the contents and set it to an empty Object
|
Target = {} # Ignore the contents and set it to an empty Object
|
||||||
|
@ -55,19 +55,19 @@ let
|
||||||
mergePatch = target: patch:
|
mergePatch = target: patch:
|
||||||
if lib.isAttrs patch
|
if lib.isAttrs patch
|
||||||
then
|
then
|
||||||
let target' = if lib.isAttrs target then target else {};
|
let target' = if lib.isAttrs target then target else { };
|
||||||
in foldlAttrs
|
in foldlAttrs
|
||||||
(acc: patchEl:
|
(acc: patchEl:
|
||||||
if patchEl.value == null
|
if patchEl.value == null
|
||||||
then removeAttrs acc [ patchEl.name ]
|
then removeAttrs acc [ patchEl.name ]
|
||||||
else acc // {
|
else acc // {
|
||||||
${patchEl.name} =
|
${patchEl.name} =
|
||||||
mergePatch
|
mergePatch
|
||||||
(acc.${patchEl.name} or "unnused")
|
(acc.${patchEl.name} or "unnused")
|
||||||
patchEl.value;
|
patchEl.value;
|
||||||
})
|
})
|
||||||
target'
|
target'
|
||||||
patch
|
patch
|
||||||
else patch;
|
else patch;
|
||||||
|
|
||||||
inherit (depot.nix.runTestsuite)
|
inherit (depot.nix.runTestsuite)
|
||||||
|
@ -93,46 +93,49 @@ let
|
||||||
};
|
};
|
||||||
emptyPatch = it "the empty patch returns the original target" [
|
emptyPatch = it "the empty patch returns the original target" [
|
||||||
(assertEq "id"
|
(assertEq "id"
|
||||||
(mergePatch testTarget {})
|
(mergePatch testTarget { })
|
||||||
testTarget)
|
testTarget)
|
||||||
];
|
];
|
||||||
nonAttrs = it "one side is a non-attrset value" [
|
nonAttrs = it "one side is a non-attrset value" [
|
||||||
(assertEq "target is a value means the value is replaced by the patch"
|
(assertEq "target is a value means the value is replaced by the patch"
|
||||||
(mergePatch 42 testPatch)
|
(mergePatch 42 testPatch)
|
||||||
(mergePatch {} testPatch))
|
(mergePatch { } testPatch))
|
||||||
(assertEq "patch is a value means it replaces target alltogether"
|
(assertEq "patch is a value means it replaces target alltogether"
|
||||||
(mergePatch testTarget 42)
|
(mergePatch testTarget 42)
|
||||||
42)
|
42)
|
||||||
];
|
];
|
||||||
rfcExamples = it "the examples from the RFC" [
|
rfcExamples = it "the examples from the RFC" [
|
||||||
(assertEq "a subset is deleted and overwritten"
|
(assertEq "a subset is deleted and overwritten"
|
||||||
(mergePatch testTarget testPatch) {
|
(mergePatch testTarget testPatch)
|
||||||
|
{
|
||||||
a = "z";
|
a = "z";
|
||||||
c = {
|
c = {
|
||||||
d = "e";
|
d = "e";
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
(assertEq "a more complicated example from the example section"
|
(assertEq "a more complicated example from the example section"
|
||||||
(mergePatch {
|
(mergePatch
|
||||||
title = "Goodbye!";
|
{
|
||||||
|
title = "Goodbye!";
|
||||||
author = {
|
author = {
|
||||||
givenName = "John";
|
givenName = "John";
|
||||||
familyName = "Doe";
|
familyName = "Doe";
|
||||||
};
|
};
|
||||||
tags = [ "example" "sample" ];
|
tags = [ "example" "sample" ];
|
||||||
content = "This will be unchanged";
|
content = "This will be unchanged";
|
||||||
} {
|
}
|
||||||
title = "Hello!";
|
{
|
||||||
phoneNumber = "+01-123-456-7890";
|
title = "Hello!";
|
||||||
author.familyName = null;
|
phoneNumber = "+01-123-456-7890";
|
||||||
tags = [ "example" ];
|
author.familyName = null;
|
||||||
})
|
tags = [ "example" ];
|
||||||
|
})
|
||||||
{
|
{
|
||||||
title = "Hello!";
|
title = "Hello!";
|
||||||
phoneNumber = "+01-123-456-7890";
|
phoneNumber = "+01-123-456-7890";
|
||||||
author = {
|
author = {
|
||||||
givenName = "John";
|
givenName = "John";
|
||||||
};
|
};
|
||||||
tags = [ "example" ];
|
tags = [ "example" ];
|
||||||
content = "This will be unchanged";
|
content = "This will be unchanged";
|
||||||
})
|
})
|
||||||
|
@ -144,42 +147,45 @@ let
|
||||||
(assertEq "test number ${toString index}"
|
(assertEq "test number ${toString index}"
|
||||||
(mergePatch target patch)
|
(mergePatch target patch)
|
||||||
res);
|
res);
|
||||||
in it "the test suite from the RFC" [
|
in
|
||||||
(r 1 {"a" = "b";} {"a" = "c";} {"a" = "c";})
|
it "the test suite from the RFC" [
|
||||||
(r 2 {"a" = "b";} {"b" = "c";} {"a" = "b"; "b" = "c";})
|
(r 1 { "a" = "b"; } { "a" = "c"; } { "a" = "c"; })
|
||||||
(r 3 {"a" = "b";} {"a" = null;} {})
|
(r 2 { "a" = "b"; } { "b" = "c"; } { "a" = "b"; "b" = "c"; })
|
||||||
(r 4 {"a" = "b"; "b" = "c";}
|
(r 3 { "a" = "b"; } { "a" = null; } { })
|
||||||
{"a" = null;}
|
(r 4 { "a" = "b"; "b" = "c"; }
|
||||||
{"b" = "c";})
|
{ "a" = null; }
|
||||||
(r 5 {"a" = ["b"];} {"a" = "c";} {"a" = "c";})
|
{ "b" = "c"; })
|
||||||
(r 6 {"a" = "c";} {"a" = ["b"];} {"a" = ["b"];})
|
(r 5 { "a" = [ "b" ]; } { "a" = "c"; } { "a" = "c"; })
|
||||||
(r 7 {"a" = {"b" = "c";}; }
|
(r 6 { "a" = "c"; } { "a" = [ "b" ]; } { "a" = [ "b" ]; })
|
||||||
{"a" = {"b" = "d"; "c" = null;};}
|
(r 7 { "a" = { "b" = "c"; }; }
|
||||||
{"a" = {"b" = "d";};})
|
{ "a" = { "b" = "d"; "c" = null; }; }
|
||||||
(r 8 {"a" = [{"b" = "c";}];}
|
{ "a" = { "b" = "d"; }; })
|
||||||
{"a" = [1];}
|
(r 8 { "a" = [{ "b" = "c"; }]; }
|
||||||
{"a" = [1];})
|
{ "a" = [ 1 ]; }
|
||||||
(r 9 ["a" "b"] ["c" "d"] ["c" "d"])
|
{ "a" = [ 1 ]; })
|
||||||
(r 10 {"a" = "b";} ["c"] ["c"])
|
(r 9 [ "a" "b" ] [ "c" "d" ] [ "c" "d" ])
|
||||||
(r 11 {"a" = "foo";} null null)
|
(r 10 { "a" = "b"; } [ "c" ] [ "c" ])
|
||||||
(r 12 {"a" = "foo";} "bar" "bar")
|
(r 11 { "a" = "foo"; } null null)
|
||||||
(r 13 {"e" = null;} {"a" = 1;} {"e" = null; "a" = 1;})
|
(r 12 { "a" = "foo"; } "bar" "bar")
|
||||||
(r 14 [1 2]
|
(r 13 { "e" = null; } { "a" = 1; } { "e" = null; "a" = 1; })
|
||||||
{"a" = "b"; "c" = null;}
|
(r 14 [ 1 2 ]
|
||||||
{"a" = "b";})
|
{ "a" = "b"; "c" = null; }
|
||||||
(r 15 {}
|
{ "a" = "b"; })
|
||||||
{"a" = {"bb" = {"ccc" = null;};};}
|
(r 15 { }
|
||||||
{"a" = {"bb" = {};};})
|
{ "a" = { "bb" = { "ccc" = null; }; }; }
|
||||||
];
|
{ "a" = { "bb" = { }; }; })
|
||||||
|
];
|
||||||
|
|
||||||
in runTestsuite "mergePatch" [
|
in
|
||||||
|
runTestsuite "mergePatch" [
|
||||||
emptyPatch
|
emptyPatch
|
||||||
nonAttrs
|
nonAttrs
|
||||||
rfcExamples
|
rfcExamples
|
||||||
rfcTests
|
rfcTests
|
||||||
];
|
];
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
__functor = _: mergePatch;
|
__functor = _: mergePatch;
|
||||||
|
|
||||||
inherit tests;
|
inherit tests;
|
||||||
|
|
|
@ -28,6 +28,6 @@ attrs:
|
||||||
lib.concatStrings
|
lib.concatStrings
|
||||||
(lib.mapAttrsToList
|
(lib.mapAttrsToList
|
||||||
(k: v: depot.nix.netstring.fromString
|
(k: v: depot.nix.netstring.fromString
|
||||||
( depot.nix.netstring.fromString k
|
(depot.nix.netstring.fromString k
|
||||||
+ depot.nix.netstring.fromString v))
|
+ depot.nix.netstring.fromString v))
|
||||||
attrs)
|
attrs)
|
||||||
|
|
|
@ -6,9 +6,11 @@ let
|
||||||
;
|
;
|
||||||
in
|
in
|
||||||
|
|
||||||
rustSimpleBin {
|
rustSimpleBin
|
||||||
name = "nint";
|
{
|
||||||
dependencies = [
|
name = "nint";
|
||||||
depot.third_party.rust-crates.serde_json
|
dependencies = [
|
||||||
];
|
depot.third_party.rust-crates.serde_json
|
||||||
} (builtins.readFile ./nint.rs)
|
];
|
||||||
|
}
|
||||||
|
(builtins.readFile ./nint.rs)
|
||||||
|
|
|
@ -43,10 +43,13 @@ let
|
||||||
children = readDir path;
|
children = readDir path;
|
||||||
isVisible = f: f == ".skip-subtree" || (substring 0 1 f) != ".";
|
isVisible = f: f == ".skip-subtree" || (substring 0 1 f) != ".";
|
||||||
names = filter isVisible (attrNames children);
|
names = filter isVisible (attrNames children);
|
||||||
in listToAttrs (map (name: {
|
in
|
||||||
inherit name;
|
listToAttrs (map
|
||||||
value = children.${name};
|
(name: {
|
||||||
}) names);
|
inherit name;
|
||||||
|
value = children.${name};
|
||||||
|
})
|
||||||
|
names);
|
||||||
|
|
||||||
# Create a mark containing the location of this attribute and
|
# Create a mark containing the location of this attribute and
|
||||||
# a list of all child attribute names added by readTree.
|
# a list of all child attribute names added by readTree.
|
||||||
|
@ -57,12 +60,13 @@ let
|
||||||
|
|
||||||
# Import a file and enforce our calling convention
|
# Import a file and enforce our calling convention
|
||||||
importFile = args: scopedArgs: path: parts: filter:
|
importFile = args: scopedArgs: path: parts: filter:
|
||||||
let
|
let
|
||||||
importedFile = if scopedArgs != {}
|
importedFile =
|
||||||
then builtins.scopedImport scopedArgs path
|
if scopedArgs != { }
|
||||||
else import path;
|
then builtins.scopedImport scopedArgs path
|
||||||
|
else import path;
|
||||||
pathType = builtins.typeOf importedFile;
|
pathType = builtins.typeOf importedFile;
|
||||||
in
|
in
|
||||||
if pathType != "lambda"
|
if pathType != "lambda"
|
||||||
then builtins.throw "readTree: trying to import ${toString path}, but it’s a ${pathType}, you need to make it a function like { depot, pkgs, ... }"
|
then builtins.throw "readTree: trying to import ${toString path}, but it’s a ${pathType}, you need to make it a function like { depot, pkgs, ... }"
|
||||||
else importedFile (filter parts (argsWithPath args parts));
|
else importedFile (filter parts (argsWithPath args parts));
|
||||||
|
@ -76,8 +80,9 @@ let
|
||||||
dir = readDirVisible initPath;
|
dir = readDirVisible initPath;
|
||||||
joinChild = c: initPath + ("/" + c);
|
joinChild = c: initPath + ("/" + c);
|
||||||
|
|
||||||
self = if rootDir
|
self =
|
||||||
then { __readTree = []; }
|
if rootDir
|
||||||
|
then { __readTree = [ ]; }
|
||||||
else importFile args scopedArgs initPath parts argsFilter;
|
else importFile args scopedArgs initPath parts argsFilter;
|
||||||
|
|
||||||
# Import subdirectories of the current one, unless the special
|
# Import subdirectories of the current one, unless the special
|
||||||
|
@ -88,33 +93,41 @@ let
|
||||||
# should be ignored, but its content is not inspected by
|
# should be ignored, but its content is not inspected by
|
||||||
# readTree
|
# readTree
|
||||||
filterDir = f: dir."${f}" == "directory";
|
filterDir = f: dir."${f}" == "directory";
|
||||||
children = if hasAttr ".skip-subtree" dir then [] else map (c: {
|
children = if hasAttr ".skip-subtree" dir then [ ] else
|
||||||
name = c;
|
map
|
||||||
value = readTree {
|
(c: {
|
||||||
inherit argsFilter scopedArgs;
|
name = c;
|
||||||
args = args;
|
value = readTree {
|
||||||
initPath = (joinChild c);
|
inherit argsFilter scopedArgs;
|
||||||
rootDir = false;
|
args = args;
|
||||||
parts = (parts ++ [ c ]);
|
initPath = (joinChild c);
|
||||||
};
|
rootDir = false;
|
||||||
}) (filter filterDir (attrNames dir));
|
parts = (parts ++ [ c ]);
|
||||||
|
};
|
||||||
|
})
|
||||||
|
(filter filterDir (attrNames dir));
|
||||||
|
|
||||||
# Import Nix files
|
# Import Nix files
|
||||||
nixFiles = if hasAttr ".skip-subtree" dir then []
|
nixFiles =
|
||||||
|
if hasAttr ".skip-subtree" dir then [ ]
|
||||||
else filter (f: f != null) (map nixFileName (attrNames dir));
|
else filter (f: f != null) (map nixFileName (attrNames dir));
|
||||||
nixChildren = map (c: let
|
nixChildren = map
|
||||||
p = joinChild (c + ".nix");
|
(c:
|
||||||
childParts = parts ++ [ c ];
|
let
|
||||||
imported = importFile args scopedArgs p childParts argsFilter;
|
p = joinChild (c + ".nix");
|
||||||
in {
|
childParts = parts ++ [ c ];
|
||||||
name = c;
|
imported = importFile args scopedArgs p childParts argsFilter;
|
||||||
value =
|
in
|
||||||
if isAttrs imported
|
{
|
||||||
then imported // marker childParts {}
|
name = c;
|
||||||
else imported;
|
value =
|
||||||
}) nixFiles;
|
if isAttrs imported
|
||||||
|
then imported // marker childParts { }
|
||||||
|
else imported;
|
||||||
|
})
|
||||||
|
nixFiles;
|
||||||
|
|
||||||
nodeValue = if dir ? "default.nix" then self else {};
|
nodeValue = if dir ? "default.nix" then self else { };
|
||||||
|
|
||||||
allChildren = listToAttrs (
|
allChildren = listToAttrs (
|
||||||
if dir ? "default.nix"
|
if dir ? "default.nix"
|
||||||
|
@ -123,9 +136,9 @@ let
|
||||||
);
|
);
|
||||||
|
|
||||||
in
|
in
|
||||||
if isAttrs nodeValue
|
if isAttrs nodeValue
|
||||||
then nodeValue // allChildren // (marker parts allChildren)
|
then nodeValue // allChildren // (marker parts allChildren)
|
||||||
else nodeValue;
|
else nodeValue;
|
||||||
|
|
||||||
# Function which can be used to find all readTree targets within an
|
# Function which can be used to find all readTree targets within an
|
||||||
# attribute set.
|
# attribute set.
|
||||||
|
@ -143,40 +156,42 @@ let
|
||||||
# should be included in the build.
|
# should be included in the build.
|
||||||
gather = eligible: node:
|
gather = eligible: node:
|
||||||
if node ? __readTree then
|
if node ? __readTree then
|
||||||
# Include the node itself if it is eligible.
|
# Include the node itself if it is eligible.
|
||||||
(if eligible node then [ node ] else [])
|
(if eligible node then [ node ] else [ ])
|
||||||
# Include eligible children of the node
|
# Include eligible children of the node
|
||||||
++ concatMap (gather eligible) (map (attr: node."${attr}") node.__readTreeChildren)
|
++ concatMap (gather eligible) (map (attr: node."${attr}") node.__readTreeChildren)
|
||||||
# Include specified sub-targets of the node
|
# Include specified sub-targets of the node
|
||||||
++ filter eligible (map
|
++ filter eligible (map
|
||||||
(k: (node."${k}" or {}) // {
|
(k: (node."${k}" or { }) // {
|
||||||
# Keep the same tree location, but explicitly mark this
|
# Keep the same tree location, but explicitly mark this
|
||||||
# node as a subtarget.
|
# node as a subtarget.
|
||||||
__readTree = node.__readTree;
|
__readTree = node.__readTree;
|
||||||
__readTreeChildren = [];
|
__readTreeChildren = [ ];
|
||||||
__subtarget = k;
|
__subtarget = k;
|
||||||
})
|
})
|
||||||
(node.meta.targets or []))
|
(node.meta.targets or [ ]))
|
||||||
else [];
|
else [ ];
|
||||||
|
|
||||||
# Determine whether a given value is a derivation.
|
# Determine whether a given value is a derivation.
|
||||||
# Copied from nixpkgs/lib for cases where lib is not available yet.
|
# Copied from nixpkgs/lib for cases where lib is not available yet.
|
||||||
isDerivation = x: isAttrs x && x ? type && x.type == "derivation";
|
isDerivation = x: isAttrs x && x ? type && x.type == "derivation";
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
inherit gather;
|
inherit gather;
|
||||||
|
|
||||||
__functor = _:
|
__functor = _:
|
||||||
{ path
|
{ path
|
||||||
, args
|
, args
|
||||||
, filter ? (_parts: x: x)
|
, filter ? (_parts: x: x)
|
||||||
, scopedArgs ? {} }:
|
, scopedArgs ? { }
|
||||||
readTree {
|
}:
|
||||||
inherit args scopedArgs;
|
readTree {
|
||||||
argsFilter = filter;
|
inherit args scopedArgs;
|
||||||
initPath = path;
|
argsFilter = filter;
|
||||||
rootDir = true;
|
initPath = path;
|
||||||
parts = [];
|
rootDir = true;
|
||||||
};
|
parts = [ ];
|
||||||
|
};
|
||||||
|
|
||||||
# In addition to readTree itself, some functionality is exposed that
|
# In addition to readTree itself, some functionality is exposed that
|
||||||
# is useful for users of readTree.
|
# is useful for users of readTree.
|
||||||
|
@ -193,7 +208,7 @@ in {
|
||||||
# which should be able to access the restricted folder.
|
# which should be able to access the restricted folder.
|
||||||
#
|
#
|
||||||
# reason: Textual explanation for the restriction (included in errors)
|
# reason: Textual explanation for the restriction (included in errors)
|
||||||
restrictFolder = { folder, exceptions ? [], reason }: parts: args:
|
restrictFolder = { folder, exceptions ? [ ], reason }: parts: args:
|
||||||
if (elemAt parts 0) == folder || elem parts exceptions
|
if (elemAt parts 0) == folder || elem parts exceptions
|
||||||
then args
|
then args
|
||||||
else args // {
|
else args // {
|
||||||
|
@ -224,8 +239,8 @@ in {
|
||||||
drvTargets = attrs: attrs // {
|
drvTargets = attrs: attrs // {
|
||||||
meta = {
|
meta = {
|
||||||
targets = builtins.filter
|
targets = builtins.filter
|
||||||
(x: isDerivation attrs."${x}")
|
(x: isDerivation attrs."${x}")
|
||||||
(builtins.attrNames attrs);
|
(builtins.attrNames attrs);
|
||||||
} // (attrs.meta or {});
|
} // (attrs.meta or { });
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,13 +10,13 @@ let
|
||||||
|
|
||||||
tree-ex = depot.nix.readTree {
|
tree-ex = depot.nix.readTree {
|
||||||
path = ./test-example;
|
path = ./test-example;
|
||||||
args = {};
|
args = { };
|
||||||
};
|
};
|
||||||
|
|
||||||
example = it "corresponds to the README example" [
|
example = it "corresponds to the README example" [
|
||||||
(assertEq "third_party attrset"
|
(assertEq "third_party attrset"
|
||||||
(lib.isAttrs tree-ex.third_party
|
(lib.isAttrs tree-ex.third_party
|
||||||
&& (! lib.isDerivation tree-ex.third_party))
|
&& (! lib.isDerivation tree-ex.third_party))
|
||||||
true)
|
true)
|
||||||
(assertEq "third_party attrset other attribute"
|
(assertEq "third_party attrset other attribute"
|
||||||
tree-ex.third_party.favouriteColour
|
tree-ex.third_party.favouriteColour
|
||||||
|
@ -37,7 +37,7 @@ let
|
||||||
|
|
||||||
tree-tl = depot.nix.readTree {
|
tree-tl = depot.nix.readTree {
|
||||||
path = ./test-tree-traversal;
|
path = ./test-tree-traversal;
|
||||||
args = {};
|
args = { };
|
||||||
};
|
};
|
||||||
|
|
||||||
traversal-logic = it "corresponds to the traversal logic in the README" [
|
traversal-logic = it "corresponds to the traversal logic in the README" [
|
||||||
|
@ -82,7 +82,7 @@ let
|
||||||
"Picked up through the drv")
|
"Picked up through the drv")
|
||||||
(assertEq "default.nix drv is not changed by readTree"
|
(assertEq "default.nix drv is not changed by readTree"
|
||||||
tree-tl.default-nix.can-be-drv
|
tree-tl.default-nix.can-be-drv
|
||||||
(import ./test-tree-traversal/default-nix/can-be-drv/default.nix {}))
|
(import ./test-tree-traversal/default-nix/can-be-drv/default.nix { }))
|
||||||
];
|
];
|
||||||
|
|
||||||
# these each call readTree themselves because the throws have to happen inside assertThrows
|
# these each call readTree themselves because the throws have to happen inside assertThrows
|
||||||
|
@ -90,7 +90,7 @@ let
|
||||||
(assertThrows "this file is not a function"
|
(assertThrows "this file is not a function"
|
||||||
(depot.nix.readTree {
|
(depot.nix.readTree {
|
||||||
path = ./test-wrong-not-a-function;
|
path = ./test-wrong-not-a-function;
|
||||||
args = {};
|
args = { };
|
||||||
}).not-a-function)
|
}).not-a-function)
|
||||||
# can’t test for that, assertThrows can’t catch this error
|
# can’t test for that, assertThrows can’t catch this error
|
||||||
# (assertThrows "this file is a function but doesn’t have dots"
|
# (assertThrows "this file is a function but doesn’t have dots"
|
||||||
|
@ -99,12 +99,13 @@ let
|
||||||
|
|
||||||
read-markers = depot.nix.readTree {
|
read-markers = depot.nix.readTree {
|
||||||
path = ./test-marker;
|
path = ./test-marker;
|
||||||
args = {};
|
args = { };
|
||||||
};
|
};
|
||||||
|
|
||||||
assertMarkerByPath = path:
|
assertMarkerByPath = path:
|
||||||
assertEq "${lib.concatStringsSep "." path} is marked correctly"
|
assertEq "${lib.concatStringsSep "." path} is marked correctly"
|
||||||
(lib.getAttrFromPath path read-markers).__readTree path;
|
(lib.getAttrFromPath path read-markers).__readTree
|
||||||
|
path;
|
||||||
|
|
||||||
markers = it "marks nodes correctly" [
|
markers = it "marks nodes correctly" [
|
||||||
(assertMarkerByPath [ "directory-marked" ])
|
(assertMarkerByPath [ "directory-marked" ])
|
||||||
|
@ -119,7 +120,8 @@ let
|
||||||
read-markers.directory-marked.nested.__readTreeChildren [ ])
|
read-markers.directory-marked.nested.__readTreeChildren [ ])
|
||||||
];
|
];
|
||||||
|
|
||||||
in runTestsuite "readTree" [
|
in
|
||||||
|
runTestsuite "readTree" [
|
||||||
example
|
example
|
||||||
traversal-logic
|
traversal-logic
|
||||||
wrong
|
wrong
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
{ ... }:
|
{ ... }:
|
||||||
|
|
||||||
{}
|
{ }
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
{ ... }:
|
{ ... }:
|
||||||
|
|
||||||
{}
|
{ }
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
{ ... }:
|
{ ... }:
|
||||||
|
|
||||||
{}
|
{ }
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
{ ... }:
|
{ ... }:
|
||||||
|
|
||||||
{}
|
{ }
|
||||||
|
|
|
@ -3,6 +3,6 @@
|
||||||
|
|
||||||
with depot.nix.yants;
|
with depot.nix.yants;
|
||||||
|
|
||||||
defun [ path drv ] (file: pkgs.runCommandNoCC "${file}.rendered.html" {} ''
|
defun [ path drv ] (file: pkgs.runCommandNoCC "${file}.rendered.html" { } ''
|
||||||
cat ${file} | ${depot.tools.cheddar}/bin/cheddar --about-filter ${file} > $out
|
cat ${file} | ${depot.tools.cheddar}/bin/cheddar --about-filter ${file} > $out
|
||||||
'')
|
'')
|
||||||
|
|
|
@ -9,7 +9,7 @@ let
|
||||||
runExeclineLocal = name: args: execline:
|
runExeclineLocal = name: args: execline:
|
||||||
runExecline name
|
runExecline name
|
||||||
(args // {
|
(args // {
|
||||||
derivationArgs = args.derivationArgs or {} // {
|
derivationArgs = args.derivationArgs or { } // {
|
||||||
preferLocalBuild = true;
|
preferLocalBuild = true;
|
||||||
allowSubstitutes = false;
|
allowSubstitutes = false;
|
||||||
};
|
};
|
||||||
|
@ -23,7 +23,8 @@ let
|
||||||
inherit pkgs;
|
inherit pkgs;
|
||||||
};
|
};
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
__functor = _: runExecline;
|
__functor = _: runExecline;
|
||||||
local = runExeclineLocal;
|
local = runExeclineLocal;
|
||||||
inherit tests;
|
inherit tests;
|
||||||
|
|
|
@ -35,32 +35,32 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
bins = getBins pkgs.execline [
|
bins = getBins pkgs.execline [
|
||||||
"execlineb"
|
"execlineb"
|
||||||
{ use = "if"; as = "execlineIf"; }
|
{ use = "if"; as = "execlineIf"; }
|
||||||
"redirfd"
|
"redirfd"
|
||||||
"importas"
|
"importas"
|
||||||
"exec"
|
"exec"
|
||||||
]
|
]
|
||||||
// getBins pkgs.s6-portable-utils [
|
// getBins pkgs.s6-portable-utils [
|
||||||
"s6-cat"
|
"s6-cat"
|
||||||
"s6-grep"
|
"s6-grep"
|
||||||
"s6-touch"
|
"s6-touch"
|
||||||
"s6-test"
|
"s6-test"
|
||||||
"s6-chmod"
|
"s6-chmod"
|
||||||
];
|
];
|
||||||
|
|
||||||
in
|
in
|
||||||
|
|
||||||
# TODO: move name into the attrset
|
# TODO: move name into the attrset
|
||||||
name:
|
name:
|
||||||
{
|
{
|
||||||
# a string to pass as stdin to the execline script
|
# a string to pass as stdin to the execline script
|
||||||
stdin ? ""
|
stdin ? ""
|
||||||
# a program wrapping the acutal execline invocation;
|
# a program wrapping the acutal execline invocation;
|
||||||
# should be in Bernstein-chaining style
|
# should be in Bernstein-chaining style
|
||||||
, builderWrapper ? bins.exec
|
, builderWrapper ? bins.exec
|
||||||
# additional arguments to pass to the derivation
|
# additional arguments to pass to the derivation
|
||||||
, derivationArgs ? {}
|
, derivationArgs ? { }
|
||||||
}:
|
}:
|
||||||
# the execline script as a nested list of string,
|
# the execline script as a nested list of string,
|
||||||
# representing the blocks;
|
# representing the blocks;
|
||||||
|
@ -90,33 +90,33 @@ derivation (derivationArgs // {
|
||||||
passAsFile = [
|
passAsFile = [
|
||||||
"_runExeclineScript"
|
"_runExeclineScript"
|
||||||
"_runExeclineStdin"
|
"_runExeclineStdin"
|
||||||
] ++ derivationArgs.passAsFile or [];
|
] ++ derivationArgs.passAsFile or [ ];
|
||||||
|
|
||||||
# the default, exec acts as identity executable
|
# the default, exec acts as identity executable
|
||||||
builder = builderWrapper;
|
builder = builderWrapper;
|
||||||
|
|
||||||
args = [
|
args = [
|
||||||
bins.importas # import script file as $script
|
bins.importas # import script file as $script
|
||||||
"-ui" # drop the envvar afterwards
|
"-ui" # drop the envvar afterwards
|
||||||
"script" # substitution name
|
"script" # substitution name
|
||||||
"_runExeclineScriptPath" # passed script file
|
"_runExeclineScriptPath" # passed script file
|
||||||
|
|
||||||
bins.importas # do the same for $stdin
|
bins.importas # do the same for $stdin
|
||||||
"-ui"
|
"-ui"
|
||||||
"stdin"
|
"stdin"
|
||||||
"_runExeclineStdinPath"
|
"_runExeclineStdinPath"
|
||||||
|
|
||||||
bins.redirfd # now we
|
bins.redirfd # now we
|
||||||
"-r" # read the file
|
"-r" # read the file
|
||||||
"0" # into the stdin of execlineb
|
"0" # into the stdin of execlineb
|
||||||
"$stdin" # that was given via stdin
|
"$stdin" # that was given via stdin
|
||||||
|
|
||||||
bins.execlineb # the actual invocation
|
bins.execlineb # the actual invocation
|
||||||
# TODO(Profpatsch): depending on the use-case, -S0 might not be enough
|
# TODO(Profpatsch): depending on the use-case, -S0 might not be enough
|
||||||
# in all use-cases, then a wrapper for execlineb arguments
|
# in all use-cases, then a wrapper for execlineb arguments
|
||||||
# should be added (-P, -S, -s).
|
# should be added (-P, -S, -s).
|
||||||
"-S0" # set $@ inside the execline script
|
"-S0" # set $@ inside the execline script
|
||||||
"-W" # die on syntax error
|
"-W" # die on syntax error
|
||||||
"$script" # substituted by importas
|
"$script" # substituted by importas
|
||||||
];
|
];
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,23 +1,29 @@
|
||||||
{ stdenv, pkgs, runExecline, runExeclineLocal, getBins, writeScript
|
{ stdenv
|
||||||
# https://www.mail-archive.com/skaware@list.skarnet.org/msg01256.html
|
, pkgs
|
||||||
, coreutils }:
|
, runExecline
|
||||||
|
, runExeclineLocal
|
||||||
|
, getBins
|
||||||
|
, writeScript
|
||||||
|
# https://www.mail-archive.com/skaware@list.skarnet.org/msg01256.html
|
||||||
|
, coreutils
|
||||||
|
}:
|
||||||
|
|
||||||
let
|
let
|
||||||
|
|
||||||
bins = getBins coreutils [ "mv" ]
|
bins = getBins coreutils [ "mv" ]
|
||||||
// getBins pkgs.execline [
|
// getBins pkgs.execline [
|
||||||
"execlineb"
|
"execlineb"
|
||||||
{ use = "if"; as = "execlineIf"; }
|
{ use = "if"; as = "execlineIf"; }
|
||||||
"redirfd"
|
"redirfd"
|
||||||
"importas"
|
"importas"
|
||||||
]
|
]
|
||||||
// getBins pkgs.s6-portable-utils [
|
// getBins pkgs.s6-portable-utils [
|
||||||
"s6-chmod"
|
"s6-chmod"
|
||||||
"s6-grep"
|
"s6-grep"
|
||||||
"s6-touch"
|
"s6-touch"
|
||||||
"s6-cat"
|
"s6-cat"
|
||||||
"s6-test"
|
"s6-test"
|
||||||
];
|
];
|
||||||
|
|
||||||
# execline block of depth 1
|
# execline block of depth 1
|
||||||
block = args: builtins.map (arg: " ${arg}") args ++ [ "" ];
|
block = args: builtins.map (arg: " ${arg}") args ++ [ "" ];
|
||||||
|
@ -31,49 +37,80 @@ let
|
||||||
builder = bins.execlineIf;
|
builder = bins.execlineIf;
|
||||||
args =
|
args =
|
||||||
(block [
|
(block [
|
||||||
bins.redirfd "-r" "0" file # read file to stdin
|
bins.redirfd
|
||||||
bins.s6-grep "-F" "-q" line # and grep for the line
|
"-r"
|
||||||
|
"0"
|
||||||
|
file # read file to stdin
|
||||||
|
bins.s6-grep
|
||||||
|
"-F"
|
||||||
|
"-q"
|
||||||
|
line # and grep for the line
|
||||||
])
|
])
|
||||||
++ [
|
++ [
|
||||||
# if the block succeeded, touch $out
|
# if the block succeeded, touch $out
|
||||||
bins.importas "-ui" "out" "out"
|
bins.importas
|
||||||
bins.s6-touch "$out"
|
"-ui"
|
||||||
|
"out"
|
||||||
|
"out"
|
||||||
|
bins.s6-touch
|
||||||
|
"$out"
|
||||||
];
|
];
|
||||||
preferLocalBuild = true;
|
preferLocalBuild = true;
|
||||||
allowSubstitutes = false;
|
allowSubstitutes = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
# basic test that touches out
|
# basic test that touches out
|
||||||
basic = runExeclineLocal "run-execline-test-basic" {
|
basic = runExeclineLocal "run-execline-test-basic"
|
||||||
} [
|
{ } [
|
||||||
"importas" "-ui" "out" "out"
|
"importas"
|
||||||
"${bins.s6-touch}" "$out"
|
"-ui"
|
||||||
|
"out"
|
||||||
|
"out"
|
||||||
|
"${bins.s6-touch}"
|
||||||
|
"$out"
|
||||||
];
|
];
|
||||||
|
|
||||||
# whether the stdin argument works as intended
|
# whether the stdin argument works as intended
|
||||||
stdin = fileHasLine "foo" (runExeclineLocal "run-execline-test-stdin" {
|
stdin = fileHasLine "foo" (runExeclineLocal "run-execline-test-stdin"
|
||||||
stdin = "foo\nbar\nfoo";
|
{
|
||||||
} [
|
stdin = "foo\nbar\nfoo";
|
||||||
"importas" "-ui" "out" "out"
|
} [
|
||||||
# this pipes stdout of s6-cat to $out
|
"importas"
|
||||||
# and s6-cat redirects from stdin to stdout
|
"-ui"
|
||||||
"redirfd" "-w" "1" "$out" bins.s6-cat
|
"out"
|
||||||
|
"out"
|
||||||
|
# this pipes stdout of s6-cat to $out
|
||||||
|
# and s6-cat redirects from stdin to stdout
|
||||||
|
"redirfd"
|
||||||
|
"-w"
|
||||||
|
"1"
|
||||||
|
"$out"
|
||||||
|
bins.s6-cat
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
|
||||||
wrapWithVar = runExeclineLocal "run-execline-test-wrap-with-var" {
|
wrapWithVar = runExeclineLocal "run-execline-test-wrap-with-var"
|
||||||
builderWrapper = writeScript "var-wrapper" ''
|
{
|
||||||
#!${bins.execlineb} -S0
|
builderWrapper = writeScript "var-wrapper" ''
|
||||||
export myvar myvalue $@
|
#!${bins.execlineb} -S0
|
||||||
'';
|
export myvar myvalue $@
|
||||||
} [
|
'';
|
||||||
"importas" "-ui" "v" "myvar"
|
} [
|
||||||
"if" [ bins.s6-test "myvalue" "=" "$v" ]
|
"importas"
|
||||||
"importas" "out" "out"
|
"-ui"
|
||||||
bins.s6-touch "$out"
|
"v"
|
||||||
|
"myvar"
|
||||||
|
"if"
|
||||||
|
[ bins.s6-test "myvalue" "=" "$v" ]
|
||||||
|
"importas"
|
||||||
|
"out"
|
||||||
|
"out"
|
||||||
|
bins.s6-touch
|
||||||
|
"$out"
|
||||||
];
|
];
|
||||||
|
|
||||||
in [
|
in
|
||||||
|
[
|
||||||
basic
|
basic
|
||||||
stdin
|
stdin
|
||||||
wrapWithVar
|
wrapWithVar
|
||||||
|
|
|
@ -38,11 +38,11 @@ let
|
||||||
;
|
;
|
||||||
|
|
||||||
bins = depot.nix.getBins pkgs.coreutils [ "printf" ]
|
bins = depot.nix.getBins pkgs.coreutils [ "printf" ]
|
||||||
// depot.nix.getBins pkgs.s6-portable-utils [ "s6-touch" "s6-false" "s6-cat" ];
|
// depot.nix.getBins pkgs.s6-portable-utils [ "s6-touch" "s6-false" "s6-cat" ];
|
||||||
|
|
||||||
# Returns true if the given expression throws when `deepSeq`-ed
|
# Returns true if the given expression throws when `deepSeq`-ed
|
||||||
throws = expr:
|
throws = expr:
|
||||||
!(builtins.tryEval (builtins.deepSeq expr {})).success;
|
!(builtins.tryEval (builtins.deepSeq expr { })).success;
|
||||||
|
|
||||||
# rewrite the builtins.partition result
|
# rewrite the builtins.partition result
|
||||||
# to use `ok` and `err` instead of `right` and `wrong`.
|
# to use `ok` and `err` instead of `right` and `wrong`.
|
||||||
|
@ -99,11 +99,12 @@ let
|
||||||
(context: desc: res:
|
(context: desc: res:
|
||||||
if res
|
if res
|
||||||
then { yep = { test = desc; }; }
|
then { yep = { test = desc; }; }
|
||||||
else { nope = {
|
else {
|
||||||
test = desc;
|
nope = {
|
||||||
inherit context;
|
test = desc;
|
||||||
};
|
inherit context;
|
||||||
});
|
};
|
||||||
|
});
|
||||||
|
|
||||||
# assert that left and right values are equal
|
# assert that left and right values are equal
|
||||||
assertEq = defun [ string any any AssertResult ]
|
assertEq = defun [ string any any AssertResult ]
|
||||||
|
@ -111,7 +112,7 @@ let
|
||||||
let
|
let
|
||||||
context = { not-equal = { inherit left right; }; };
|
context = { not-equal = { inherit left right; }; };
|
||||||
in
|
in
|
||||||
assertBoolContext context desc (left == right));
|
assertBoolContext context desc (left == right));
|
||||||
|
|
||||||
# assert that the expression throws when `deepSeq`-ed
|
# assert that the expression throws when `deepSeq`-ed
|
||||||
assertThrows = defun [ string any AssertResult ]
|
assertThrows = defun [ string any AssertResult ]
|
||||||
|
@ -119,7 +120,7 @@ let
|
||||||
let
|
let
|
||||||
context = { should-throw = { inherit expr; }; };
|
context = { should-throw = { inherit expr; }; };
|
||||||
in
|
in
|
||||||
assertBoolContext context desc (throws expr));
|
assertBoolContext context desc (throws expr));
|
||||||
|
|
||||||
# assert that the expression does not throw when `deepSeq`-ed
|
# assert that the expression does not throw when `deepSeq`-ed
|
||||||
assertDoesNotThrow = defun [ string any AssertResult ]
|
assertDoesNotThrow = defun [ string any AssertResult ]
|
||||||
|
@ -144,31 +145,50 @@ let
|
||||||
yep = _: true;
|
yep = _: true;
|
||||||
nope = _: false;
|
nope = _: false;
|
||||||
};
|
};
|
||||||
res = partitionTests (it:
|
res = partitionTests
|
||||||
(partitionTests goodAss it.asserts).err == []
|
(it:
|
||||||
) itResults;
|
(partitionTests goodAss it.asserts).err == [ ]
|
||||||
prettyRes = lib.generators.toPretty {} res;
|
)
|
||||||
|
itResults;
|
||||||
|
prettyRes = lib.generators.toPretty { } res;
|
||||||
in
|
in
|
||||||
if res.err == []
|
if res.err == [ ]
|
||||||
then depot.nix.runExecline.local "testsuite-${name}-successful" {} [
|
then
|
||||||
"importas" "out" "out"
|
depot.nix.runExecline.local "testsuite-${name}-successful" { } [
|
||||||
|
"importas"
|
||||||
|
"out"
|
||||||
|
"out"
|
||||||
# force derivation to rebuild if test case list changes
|
# force derivation to rebuild if test case list changes
|
||||||
"ifelse" [ bins.s6-false ] [
|
"ifelse"
|
||||||
bins.printf "" (builtins.hashString "sha512" prettyRes)
|
[ bins.s6-false ]
|
||||||
|
[
|
||||||
|
bins.printf
|
||||||
|
""
|
||||||
|
(builtins.hashString "sha512" prettyRes)
|
||||||
]
|
]
|
||||||
"if" [ bins.printf "%s\n" "testsuite ${name} successful!" ]
|
"if"
|
||||||
bins.s6-touch "$out"
|
[ bins.printf "%s\n" "testsuite ${name} successful!" ]
|
||||||
|
bins.s6-touch
|
||||||
|
"$out"
|
||||||
]
|
]
|
||||||
else depot.nix.runExecline.local "testsuite-${name}-failed" {
|
else
|
||||||
stdin = prettyRes + "\n";
|
depot.nix.runExecline.local "testsuite-${name}-failed"
|
||||||
} [
|
{
|
||||||
"importas" "out" "out"
|
stdin = prettyRes + "\n";
|
||||||
"if" [ bins.printf "%s\n" "testsuite ${name} failed!" ]
|
} [
|
||||||
"if" [ bins.s6-cat ]
|
"importas"
|
||||||
"exit" "1"
|
"out"
|
||||||
|
"out"
|
||||||
|
"if"
|
||||||
|
[ bins.printf "%s\n" "testsuite ${name} failed!" ]
|
||||||
|
"if"
|
||||||
|
[ bins.s6-cat ]
|
||||||
|
"exit"
|
||||||
|
"1"
|
||||||
]);
|
]);
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
inherit
|
inherit
|
||||||
assertEq
|
assertEq
|
||||||
assertThrows
|
assertThrows
|
||||||
|
|
|
@ -45,14 +45,16 @@ let
|
||||||
let
|
let
|
||||||
withLeading = p: if builtins.substring 0 1 p == "/" then p else "/" + p;
|
withLeading = p: if builtins.substring 0 1 p == "/" then p else "/" + p;
|
||||||
fullPath =
|
fullPath =
|
||||||
/**/ if builtins.isPath path then path
|
/**/
|
||||||
|
if builtins.isPath path then path
|
||||||
else if builtins.isString path then (root + withLeading path)
|
else if builtins.isString path then (root + withLeading path)
|
||||||
else builtins.throw "Unsupported path type ${builtins.typeOf path}";
|
else builtins.throw "Unsupported path type ${builtins.typeOf path}";
|
||||||
strPath = toString fullPath;
|
strPath = toString fullPath;
|
||||||
contextPath = "${fullPath}";
|
contextPath = "${fullPath}";
|
||||||
belowRoot = builtins.substring rootLength (-1) strPath;
|
belowRoot = builtins.substring rootLength (-1) strPath;
|
||||||
prefix = builtins.substring 0 rootLength strPath;
|
prefix = builtins.substring 0 rootLength strPath;
|
||||||
in assert toString root == prefix; {
|
in
|
||||||
|
assert toString root == prefix; {
|
||||||
src = contextPath;
|
src = contextPath;
|
||||||
dst = belowRoot;
|
dst = belowRoot;
|
||||||
};
|
};
|
||||||
|
@ -61,10 +63,12 @@ let
|
||||||
in
|
in
|
||||||
|
|
||||||
# TODO(sterni): teach readTree to also read symlinked directories,
|
# TODO(sterni): teach readTree to also read symlinked directories,
|
||||||
# so we ln -sT instead of cp -aT.
|
# so we ln -sT instead of cp -aT.
|
||||||
pkgs.runCommandNoCC "sparse-${builtins.baseNameOf root}" {} (
|
pkgs.runCommandNoCC "sparse-${builtins.baseNameOf root}" { } (
|
||||||
lib.concatMapStrings ({ src, dst }: ''
|
lib.concatMapStrings
|
||||||
mkdir -p "$(dirname "$out${dst}")"
|
({ src, dst }: ''
|
||||||
cp -aT --reflink=auto "${src}" "$out${dst}"
|
mkdir -p "$(dirname "$out${dst}")"
|
||||||
'') symlinks
|
cp -aT --reflink=auto "${src}" "$out${dst}"
|
||||||
|
'')
|
||||||
|
symlinks
|
||||||
)
|
)
|
||||||
|
|
|
@ -4,22 +4,24 @@ let
|
||||||
# if so sets `isTag` to `true` and sets the name and value.
|
# if so sets `isTag` to `true` and sets the name and value.
|
||||||
# If not, sets `isTag` to `false` and sets `errmsg`.
|
# If not, sets `isTag` to `false` and sets `errmsg`.
|
||||||
verifyTag = tag:
|
verifyTag = tag:
|
||||||
let cases = builtins.attrNames tag;
|
let
|
||||||
len = builtins.length cases;
|
cases = builtins.attrNames tag;
|
||||||
|
len = builtins.length cases;
|
||||||
in
|
in
|
||||||
if builtins.length cases == 1
|
if builtins.length cases == 1
|
||||||
then let name = builtins.head cases; in {
|
then
|
||||||
isTag = true;
|
let name = builtins.head cases; in {
|
||||||
name = name;
|
isTag = true;
|
||||||
val = tag.${name};
|
name = name;
|
||||||
errmsg = null;
|
val = tag.${name};
|
||||||
}
|
errmsg = null;
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
isTag = false;
|
isTag = false;
|
||||||
errmsg =
|
errmsg =
|
||||||
( "match: an instance of a sum is an attrset "
|
("match: an instance of a sum is an attrset "
|
||||||
+ "with exactly one element, yours had ${toString len}"
|
+ "with exactly one element, yours had ${toString len}"
|
||||||
+ ", namely: ${lib.generators.toPretty {} cases}" );
|
+ ", namely: ${lib.generators.toPretty {} cases}");
|
||||||
name = null;
|
name = null;
|
||||||
val = null;
|
val = null;
|
||||||
};
|
};
|
||||||
|
@ -63,21 +65,22 @@ let
|
||||||
# ] 1
|
# ] 1
|
||||||
# => { smol = 1; }
|
# => { smol = 1; }
|
||||||
discrDef = defTag: fs: v:
|
discrDef = defTag: fs: v:
|
||||||
let res = lib.findFirst
|
let
|
||||||
(t: t.val v)
|
res = lib.findFirst
|
||||||
null
|
(t: t.val v)
|
||||||
(map assertIsTag fs);
|
null
|
||||||
|
(map assertIsTag fs);
|
||||||
in
|
in
|
||||||
if res == null
|
if res == null
|
||||||
then { ${defTag} = v; }
|
then { ${defTag} = v; }
|
||||||
else { ${res.name} = v; };
|
else { ${res.name} = v; };
|
||||||
|
|
||||||
# Like `discrDef`, but fail if there is no match.
|
# Like `discrDef`, but fail if there is no match.
|
||||||
discr = fs: v:
|
discr = fs: v:
|
||||||
let res = discrDef null fs v; in
|
let res = discrDef null fs v; in
|
||||||
assert lib.assertMsg (res != null)
|
assert lib.assertMsg (res != null)
|
||||||
"tag.discr: No predicate found that matches ${lib.generators.toPretty {} v}";
|
"tag.discr: No predicate found that matches ${lib.generators.toPretty {} v}";
|
||||||
res;
|
res;
|
||||||
|
|
||||||
# The canonical pattern matching primitive.
|
# The canonical pattern matching primitive.
|
||||||
# A sum value is an attribute set with one element,
|
# A sum value is an attribute set with one element,
|
||||||
|
@ -104,17 +107,17 @@ let
|
||||||
match = sum: matcher:
|
match = sum: matcher:
|
||||||
let cases = builtins.attrNames sum;
|
let cases = builtins.attrNames sum;
|
||||||
in assert
|
in assert
|
||||||
let len = builtins.length cases; in
|
let len = builtins.length cases; in
|
||||||
lib.assertMsg (len == 1)
|
lib.assertMsg (len == 1)
|
||||||
( "match: an instance of a sum is an attrset "
|
("match: an instance of a sum is an attrset "
|
||||||
+ "with exactly one element, yours had ${toString len}"
|
+ "with exactly one element, yours had ${toString len}"
|
||||||
+ ", namely: ${lib.generators.toPretty {} cases}" );
|
+ ", namely: ${lib.generators.toPretty {} cases}");
|
||||||
let case = builtins.head cases;
|
let case = builtins.head cases;
|
||||||
in assert
|
in assert
|
||||||
lib.assertMsg (matcher ? ${case})
|
lib.assertMsg (matcher ? ${case})
|
||||||
( "match: \"${case}\" is not a valid case of this sum, "
|
("match: \"${case}\" is not a valid case of this sum, "
|
||||||
+ "the matcher accepts: ${lib.generators.toPretty {}
|
+ "the matcher accepts: ${lib.generators.toPretty {}
|
||||||
(builtins.attrNames matcher)}" );
|
(builtins.attrNames matcher)}");
|
||||||
matcher.${case} sum.${case};
|
matcher.${case} sum.${case};
|
||||||
|
|
||||||
# A `match` with the arguments flipped.
|
# A `match` with the arguments flipped.
|
||||||
|
@ -148,15 +151,16 @@ let
|
||||||
;
|
;
|
||||||
};
|
};
|
||||||
|
|
||||||
in {
|
in
|
||||||
inherit
|
{
|
||||||
verifyTag
|
inherit
|
||||||
tagName
|
verifyTag
|
||||||
tagValue
|
tagName
|
||||||
discr
|
tagValue
|
||||||
discrDef
|
discr
|
||||||
match
|
discrDef
|
||||||
matchLam
|
match
|
||||||
tests
|
matchLam
|
||||||
;
|
tests
|
||||||
|
;
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ let
|
||||||
errmsg = null;
|
errmsg = null;
|
||||||
})
|
})
|
||||||
(assertEq "is not Tag"
|
(assertEq "is not Tag"
|
||||||
(removeAttrs (verifyTag { foo = "bar"; baz = 42; }) ["errmsg"])
|
(removeAttrs (verifyTag { foo = "bar"; baz = 42; }) [ "errmsg" ])
|
||||||
{
|
{
|
||||||
isTag = false;
|
isTag = false;
|
||||||
name = null;
|
name = null;
|
||||||
|
@ -41,7 +41,8 @@ let
|
||||||
(discr [
|
(discr [
|
||||||
{ bool = lib.isBool; }
|
{ bool = lib.isBool; }
|
||||||
{ int = lib.isInt; }
|
{ int = lib.isInt; }
|
||||||
] true)
|
]
|
||||||
|
true)
|
||||||
{ bool = true; })
|
{ bool = true; })
|
||||||
(assertEq "fallback to default"
|
(assertEq "fallback to default"
|
||||||
(discrDef "def" [
|
(discrDef "def" [
|
||||||
|
@ -53,19 +54,24 @@ let
|
||||||
|
|
||||||
match-test = it "can match things" [
|
match-test = it "can match things" [
|
||||||
(assertEq "match example"
|
(assertEq "match example"
|
||||||
(let
|
(
|
||||||
success = { res = 42; };
|
let
|
||||||
failure = { err = "no answer"; };
|
success = { res = 42; };
|
||||||
matcher = {
|
failure = { err = "no answer"; };
|
||||||
res = i: i + 1;
|
matcher = {
|
||||||
err = _: 0;
|
res = i: i + 1;
|
||||||
};
|
err = _: 0;
|
||||||
in {
|
};
|
||||||
one = match success matcher;
|
in
|
||||||
two = match failure matcher;
|
{
|
||||||
|
one = match success matcher;
|
||||||
|
two = match failure matcher;
|
||||||
|
}
|
||||||
|
)
|
||||||
|
{
|
||||||
|
one = 43;
|
||||||
|
two = 0;
|
||||||
})
|
})
|
||||||
{ one = 43;
|
|
||||||
two = 0; })
|
|
||||||
(assertEq "matchLam & pipe"
|
(assertEq "matchLam & pipe"
|
||||||
(lib.pipe { foo = 42; } [
|
(lib.pipe { foo = 42; } [
|
||||||
(matchLam {
|
(matchLam {
|
||||||
|
@ -81,8 +87,8 @@ let
|
||||||
];
|
];
|
||||||
|
|
||||||
in
|
in
|
||||||
runTestsuite "tag" [
|
runTestsuite "tag" [
|
||||||
isTag-test
|
isTag-test
|
||||||
discr-test
|
discr-test
|
||||||
match-test
|
match-test
|
||||||
]
|
]
|
||||||
|
|
|
@ -27,4 +27,5 @@ let
|
||||||
# Actual ACL entries
|
# Actual ACL entries
|
||||||
ACLs = list acl;
|
ACLs = list acl;
|
||||||
};
|
};
|
||||||
in config: pkgs.writeText "tailscale-acl.json" (toJSON (aclConfig config))
|
in
|
||||||
|
config: pkgs.writeText "tailscale-acl.json" (toJSON (aclConfig config))
|
||||||
|
|
|
@ -34,14 +34,14 @@ let
|
||||||
basename = builtins.unsafeDiscardStringContext
|
basename = builtins.unsafeDiscardStringContext
|
||||||
(builtins.baseNameOf strPath);
|
(builtins.baseNameOf strPath);
|
||||||
in
|
in
|
||||||
# If p is a direct child of storeDir, we need to remove
|
# If p is a direct child of storeDir, we need to remove
|
||||||
# the leading hash as well to make sure that:
|
# the leading hash as well to make sure that:
|
||||||
# `storePathName drv == storePathName (toString drv)`.
|
# `storePathName drv == storePathName (toString drv)`.
|
||||||
if noStoreDir == basename
|
if noStoreDir == basename
|
||||||
then builtins.substring 33 (-1) basename
|
then builtins.substring 33 (-1) basename
|
||||||
else basename
|
else basename
|
||||||
else builtins.throw "Don't know how to get (base)name of "
|
else builtins.throw "Don't know how to get (base)name of "
|
||||||
+ lib.generators.toPretty {} p;
|
+ lib.generators.toPretty { } p;
|
||||||
|
|
||||||
/* Query the type of a path exposing the same information as would be by
|
/* Query the type of a path exposing the same information as would be by
|
||||||
`builtins.readDir`, but for a single, specific target path.
|
`builtins.readDir`, but for a single, specific target path.
|
||||||
|
@ -106,7 +106,7 @@ let
|
||||||
# We need to call toString to prevent unsafeDiscardStringContext
|
# We need to call toString to prevent unsafeDiscardStringContext
|
||||||
# from importing a path into store which messes with base- and
|
# from importing a path into store which messes with base- and
|
||||||
# dirname of course.
|
# dirname of course.
|
||||||
path'= builtins.unsafeDiscardStringContext (toString path);
|
path' = builtins.unsafeDiscardStringContext (toString path);
|
||||||
# To read the containing directory we absolutely need
|
# To read the containing directory we absolutely need
|
||||||
# to keep the string context, otherwise a derivation
|
# to keep the string context, otherwise a derivation
|
||||||
# would not be realized before our check (at eval time)
|
# would not be realized before our check (at eval time)
|
||||||
|
@ -120,20 +120,22 @@ let
|
||||||
# directory. If not, either the target doesn't exist or is a regular file.
|
# directory. If not, either the target doesn't exist or is a regular file.
|
||||||
# TODO(sterni): is there a way to check reliably if the symlink target exists?
|
# TODO(sterni): is there a way to check reliably if the symlink target exists?
|
||||||
isSymlinkDir = builtins.pathExists (path' + "/.");
|
isSymlinkDir = builtins.pathExists (path' + "/.");
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
${thisPathType} =
|
${thisPathType} =
|
||||||
/**/ if thisPathType != "symlink" then true
|
/**/
|
||||||
else if isSymlinkDir then "directory"
|
if thisPathType != "symlink" then true
|
||||||
else "regular-or-missing";
|
else if isSymlinkDir then "directory"
|
||||||
|
else "regular-or-missing";
|
||||||
};
|
};
|
||||||
|
|
||||||
pathType' = path:
|
pathType' = path:
|
||||||
let
|
let
|
||||||
p = pathType path;
|
p = pathType path;
|
||||||
in
|
in
|
||||||
if p ? missing
|
if p ? missing
|
||||||
then builtins.throw "${lib.generators.toPretty {} path} does not exist"
|
then builtins.throw "${lib.generators.toPretty {} path} does not exist"
|
||||||
else p;
|
else p;
|
||||||
|
|
||||||
/* Check whether the given path is a directory.
|
/* Check whether the given path is a directory.
|
||||||
Throws if the path in question doesn't exist.
|
Throws if the path in question doesn't exist.
|
||||||
|
@ -151,9 +153,11 @@ let
|
||||||
|
|
||||||
Type: path(-like) -> bool
|
Type: path(-like) -> bool
|
||||||
*/
|
*/
|
||||||
realPathIsDirectory = path: let
|
realPathIsDirectory = path:
|
||||||
pt = pathType' path;
|
let
|
||||||
in pt ? directory || pt.symlink or null == "directory";
|
pt = pathType' path;
|
||||||
|
in
|
||||||
|
pt ? directory || pt.symlink or null == "directory";
|
||||||
|
|
||||||
/* Check whether the given path is a regular file.
|
/* Check whether the given path is a regular file.
|
||||||
Throws if the path in question doesn't exist.
|
Throws if the path in question doesn't exist.
|
||||||
|
@ -169,7 +173,8 @@ let
|
||||||
*/
|
*/
|
||||||
isSymlink = path: pathType' path ? symlink;
|
isSymlink = path: pathType' path ? symlink;
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
inherit
|
inherit
|
||||||
storePathName
|
storePathName
|
||||||
pathType
|
pathType
|
||||||
|
|
|
@ -26,38 +26,53 @@ let
|
||||||
pathPredicates = it "judges paths correctly" (lib.flatten [
|
pathPredicates = it "judges paths correctly" (lib.flatten [
|
||||||
# isDirectory
|
# isDirectory
|
||||||
(assertUtilsPred "directory isDirectory"
|
(assertUtilsPred "directory isDirectory"
|
||||||
(isDirectory ./directory) true)
|
(isDirectory ./directory)
|
||||||
|
true)
|
||||||
(assertUtilsPred "symlink not isDirectory"
|
(assertUtilsPred "symlink not isDirectory"
|
||||||
(isDirectory ./symlink-directory) false)
|
(isDirectory ./symlink-directory)
|
||||||
|
false)
|
||||||
(assertUtilsPred "file not isDirectory"
|
(assertUtilsPred "file not isDirectory"
|
||||||
(isDirectory ./directory/file) false)
|
(isDirectory ./directory/file)
|
||||||
|
false)
|
||||||
# realPathIsDirectory
|
# realPathIsDirectory
|
||||||
(assertUtilsPred "directory realPathIsDirectory"
|
(assertUtilsPred "directory realPathIsDirectory"
|
||||||
(realPathIsDirectory ./directory) true)
|
(realPathIsDirectory ./directory)
|
||||||
|
true)
|
||||||
(assertUtilsPred "symlink to directory realPathIsDirectory"
|
(assertUtilsPred "symlink to directory realPathIsDirectory"
|
||||||
(realPathIsDirectory ./symlink-directory) true)
|
(realPathIsDirectory ./symlink-directory)
|
||||||
|
true)
|
||||||
(assertUtilsPred "realPathIsDirectory resolves chained symlinks"
|
(assertUtilsPred "realPathIsDirectory resolves chained symlinks"
|
||||||
(realPathIsDirectory ./symlink-symlink-directory) true)
|
(realPathIsDirectory ./symlink-symlink-directory)
|
||||||
|
true)
|
||||||
# isRegularFile
|
# isRegularFile
|
||||||
(assertUtilsPred "file isRegularFile"
|
(assertUtilsPred "file isRegularFile"
|
||||||
(isRegularFile ./directory/file) true)
|
(isRegularFile ./directory/file)
|
||||||
|
true)
|
||||||
(assertUtilsPred "symlink not isRegularFile"
|
(assertUtilsPred "symlink not isRegularFile"
|
||||||
(isRegularFile ./symlink-file) false)
|
(isRegularFile ./symlink-file)
|
||||||
|
false)
|
||||||
(assertUtilsPred "directory not isRegularFile"
|
(assertUtilsPred "directory not isRegularFile"
|
||||||
(isRegularFile ./directory) false)
|
(isRegularFile ./directory)
|
||||||
|
false)
|
||||||
# isSymlink
|
# isSymlink
|
||||||
(assertUtilsPred "symlink to file isSymlink"
|
(assertUtilsPred "symlink to file isSymlink"
|
||||||
(isSymlink ./symlink-file) true)
|
(isSymlink ./symlink-file)
|
||||||
|
true)
|
||||||
(assertUtilsPred "symlink to directory isSymlink"
|
(assertUtilsPred "symlink to directory isSymlink"
|
||||||
(isSymlink ./symlink-directory) true)
|
(isSymlink ./symlink-directory)
|
||||||
|
true)
|
||||||
(assertUtilsPred "symlink to symlink isSymlink"
|
(assertUtilsPred "symlink to symlink isSymlink"
|
||||||
(isSymlink ./symlink-symlink-file) true)
|
(isSymlink ./symlink-symlink-file)
|
||||||
|
true)
|
||||||
(assertUtilsPred "symlink to missing file isSymlink"
|
(assertUtilsPred "symlink to missing file isSymlink"
|
||||||
(isSymlink ./missing) true)
|
(isSymlink ./missing)
|
||||||
|
true)
|
||||||
(assertUtilsPred "directory not isSymlink"
|
(assertUtilsPred "directory not isSymlink"
|
||||||
(isSymlink ./directory) false)
|
(isSymlink ./directory)
|
||||||
|
false)
|
||||||
(assertUtilsPred "file not isSymlink"
|
(assertUtilsPred "file not isSymlink"
|
||||||
(isSymlink ./directory/file) false)
|
(isSymlink ./directory/file)
|
||||||
|
false)
|
||||||
# missing files throw
|
# missing files throw
|
||||||
(assertThrows "isDirectory throws on missing file"
|
(assertThrows "isDirectory throws on missing file"
|
||||||
(isDirectory ./does-not-exist))
|
(isDirectory ./does-not-exist))
|
||||||
|
@ -89,15 +104,18 @@ let
|
||||||
|
|
||||||
storePathNameTests = it "correctly gets the basename of a store path" [
|
storePathNameTests = it "correctly gets the basename of a store path" [
|
||||||
(assertEq "base name of a derivation"
|
(assertEq "base name of a derivation"
|
||||||
(storePathName depot.tools.cheddar) depot.tools.cheddar.name)
|
(storePathName depot.tools.cheddar)
|
||||||
|
depot.tools.cheddar.name)
|
||||||
(assertEq "base name of a store path string"
|
(assertEq "base name of a store path string"
|
||||||
(storePathName cheddarStorePath) depot.tools.cheddar.name)
|
(storePathName cheddarStorePath)
|
||||||
|
depot.tools.cheddar.name)
|
||||||
(assertEq "base name of a path within a store path"
|
(assertEq "base name of a path within a store path"
|
||||||
(storePathName "${cheddarStorePath}/bin/cheddar") "cheddar")
|
(storePathName "${cheddarStorePath}/bin/cheddar") "cheddar")
|
||||||
(assertEq "base name of a path"
|
(assertEq "base name of a path"
|
||||||
(storePathName ../default.nix) "default.nix")
|
(storePathName ../default.nix) "default.nix")
|
||||||
(assertEq "base name of a cleanSourced path"
|
(assertEq "base name of a cleanSourced path"
|
||||||
(storePathName cleanedSource) cleanedSource.name)
|
(storePathName cleanedSource)
|
||||||
|
cleanedSource.name)
|
||||||
];
|
];
|
||||||
in
|
in
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{ depot, pkgs, ... }:
|
{ depot, pkgs, ... }:
|
||||||
|
|
||||||
{ name, src, deps ? (_: []), emacs ? pkgs.emacs27-nox }:
|
{ name, src, deps ? (_: [ ]), emacs ? pkgs.emacs27-nox }:
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (pkgs) emacsPackages emacsPackagesGen;
|
inherit (pkgs) emacsPackages emacsPackagesGen;
|
||||||
|
@ -8,11 +8,13 @@ let
|
||||||
|
|
||||||
finalEmacs = (emacsPackagesGen emacs).emacsWithPackages deps;
|
finalEmacs = (emacsPackagesGen emacs).emacsWithPackages deps;
|
||||||
|
|
||||||
srcFile = if isString src
|
srcFile =
|
||||||
|
if isString src
|
||||||
then toFile "${name}.el" src
|
then toFile "${name}.el" src
|
||||||
else src;
|
else src;
|
||||||
|
|
||||||
in depot.nix.writeScriptBin name ''
|
in
|
||||||
|
depot.nix.writeScriptBin name ''
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
${finalEmacs}/bin/emacs --batch --no-site-file --script ${srcFile} $@
|
${finalEmacs}/bin/emacs --batch --no-site-file --script ${srcFile} $@
|
||||||
''
|
''
|
||||||
|
|
|
@ -14,9 +14,10 @@ name:
|
||||||
# "env": don’t substitute, set # and 0…n environment vaariables, where n=$#
|
# "env": don’t substitute, set # and 0…n environment vaariables, where n=$#
|
||||||
# "none": don’t substitute or set any positional arguments
|
# "none": don’t substitute or set any positional arguments
|
||||||
# "env-no-push": like "env", but bypass the push-phase. Not recommended.
|
# "env-no-push": like "env", but bypass the push-phase. Not recommended.
|
||||||
argMode ? "var",
|
argMode ? "var"
|
||||||
# Number of arguments to be substituted as variables (passed to "var"/"-s" or "var-full"/"-S"
|
, # Number of arguments to be substituted as variables (passed to "var"/"-s" or "var-full"/"-S"
|
||||||
readNArgs ? 0,
|
readNArgs ? 0
|
||||||
|
,
|
||||||
}:
|
}:
|
||||||
# Nested list of lists of commands.
|
# Nested list of lists of commands.
|
||||||
# Inner lists are translated to execline blocks.
|
# Inner lists are translated to execline blocks.
|
||||||
|
@ -24,7 +25,7 @@ argList:
|
||||||
|
|
||||||
let
|
let
|
||||||
env =
|
env =
|
||||||
if argMode == "var" then "s${toString readNArgs}"
|
if argMode == "var" then "s${toString readNArgs}"
|
||||||
else if argMode == "var-full" then "S${toString readNArgs}"
|
else if argMode == "var-full" then "S${toString readNArgs}"
|
||||||
else if argMode == "env" then ""
|
else if argMode == "env" then ""
|
||||||
else if argMode == "none" then "P"
|
else if argMode == "none" then "P"
|
||||||
|
@ -32,7 +33,7 @@ let
|
||||||
else abort ''"${toString argMode}" is not a valid argMode, use one of "var", "var-full", "env", "none", "env-no-push".'';
|
else abort ''"${toString argMode}" is not a valid argMode, use one of "var", "var-full", "env", "none", "env-no-push".'';
|
||||||
|
|
||||||
in
|
in
|
||||||
depot.nix.writeScript name ''
|
depot.nix.writeScript name ''
|
||||||
#!${pkgs.execline}/bin/execlineb -W${env}
|
#!${pkgs.execline}/bin/execlineb -W${env}
|
||||||
${depot.nix.escapeExecline argList}
|
${depot.nix.escapeExecline argList}
|
||||||
''
|
''
|
||||||
|
|
|
@ -5,25 +5,31 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
bins = depot.nix.getBins pkgs.s6-portable-utils [
|
bins = depot.nix.getBins pkgs.s6-portable-utils [
|
||||||
"s6-cat"
|
"s6-cat"
|
||||||
"s6-chmod"
|
"s6-chmod"
|
||||||
];
|
];
|
||||||
|
|
||||||
in
|
in
|
||||||
name:
|
name:
|
||||||
# string of the executable script that is put in $out
|
# string of the executable script that is put in $out
|
||||||
script:
|
script:
|
||||||
|
|
||||||
depot.nix.runExecline name {
|
depot.nix.runExecline name
|
||||||
|
{
|
||||||
stdin = script;
|
stdin = script;
|
||||||
derivationArgs = {
|
derivationArgs = {
|
||||||
preferLocalBuild = true;
|
preferLocalBuild = true;
|
||||||
allowSubstitutes = false;
|
allowSubstitutes = false;
|
||||||
};
|
};
|
||||||
} [
|
} [
|
||||||
"importas" "out" "out"
|
"importas"
|
||||||
|
"out"
|
||||||
|
"out"
|
||||||
# this pipes stdout of s6-cat to $out
|
# this pipes stdout of s6-cat to $out
|
||||||
# and s6-cat redirects from stdin to stdout
|
# and s6-cat redirects from stdin to stdout
|
||||||
"if" [ "redirfd" "-w" "1" "$out" bins.s6-cat ]
|
"if"
|
||||||
bins.s6-chmod "0755" "$out"
|
[ "redirfd" "-w" "1" "$out" bins.s6-cat ]
|
||||||
|
bins.s6-chmod
|
||||||
|
"0755"
|
||||||
|
"$out"
|
||||||
]
|
]
|
||||||
|
|
|
@ -2,62 +2,71 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
bins = depot.nix.getBins pkgs.s6-portable-utils [ "s6-ln" "s6-ls" "s6-touch" ]
|
bins = depot.nix.getBins pkgs.s6-portable-utils [ "s6-ln" "s6-ls" "s6-touch" ]
|
||||||
;
|
;
|
||||||
|
|
||||||
linkTo = name: path: depot.nix.runExecline.local name {} [
|
linkTo = name: path: depot.nix.runExecline.local name { } [
|
||||||
"importas" "out" "out"
|
"importas"
|
||||||
bins.s6-ln "-s" path "$out"
|
"out"
|
||||||
|
"out"
|
||||||
|
bins.s6-ln
|
||||||
|
"-s"
|
||||||
|
path
|
||||||
|
"$out"
|
||||||
];
|
];
|
||||||
|
|
||||||
# Build a rust executable, $out is the executable.
|
# Build a rust executable, $out is the executable.
|
||||||
rustSimple = args@{name, ...}: src:
|
rustSimple = args@{ name, ... }: src:
|
||||||
linkTo name "${rustSimpleBin args src}/bin/${name}";
|
linkTo name "${rustSimpleBin args src}/bin/${name}";
|
||||||
|
|
||||||
# Like `rustSimple`, but put the binary in `$out/bin/`.
|
# Like `rustSimple`, but put the binary in `$out/bin/`.
|
||||||
rustSimpleBin = {
|
rustSimpleBin =
|
||||||
name,
|
{ name
|
||||||
dependencies ? [],
|
, dependencies ? [ ]
|
||||||
doCheck ? true,
|
, doCheck ? true
|
||||||
}: src:
|
,
|
||||||
|
}: src:
|
||||||
(if doCheck then testRustSimple else pkgs.lib.id)
|
(if doCheck then testRustSimple else pkgs.lib.id)
|
||||||
(pkgs.buildRustCrate ({
|
(pkgs.buildRustCrate ({
|
||||||
pname = name;
|
pname = name;
|
||||||
version = "1.0.0";
|
version = "1.0.0";
|
||||||
crateName = name;
|
crateName = name;
|
||||||
crateBin = [ name ];
|
crateBin = [ name ];
|
||||||
dependencies = dependencies;
|
dependencies = dependencies;
|
||||||
src = pkgs.runCommandLocal "write-main.rs" {
|
src = pkgs.runCommandLocal "write-main.rs"
|
||||||
src = src;
|
{
|
||||||
passAsFile = [ "src" ];
|
src = src;
|
||||||
} ''
|
passAsFile = [ "src" ];
|
||||||
mkdir -p $out/src/bin
|
} ''
|
||||||
cp "$srcPath" $out/src/bin/${name}.rs
|
mkdir -p $out/src/bin
|
||||||
find $out
|
cp "$srcPath" $out/src/bin/${name}.rs
|
||||||
'';
|
find $out
|
||||||
}));
|
'';
|
||||||
|
}));
|
||||||
|
|
||||||
# Build a rust library, that can be used as dependency to `rustSimple`.
|
# Build a rust library, that can be used as dependency to `rustSimple`.
|
||||||
# Wrapper around `pkgs.buildRustCrate`, takes all its arguments.
|
# Wrapper around `pkgs.buildRustCrate`, takes all its arguments.
|
||||||
rustSimpleLib = {
|
rustSimpleLib =
|
||||||
name,
|
{ name
|
||||||
dependencies ? [],
|
, dependencies ? [ ]
|
||||||
doCheck ? true,
|
, doCheck ? true
|
||||||
}: src:
|
,
|
||||||
|
}: src:
|
||||||
(if doCheck then testRustSimple else pkgs.lib.id)
|
(if doCheck then testRustSimple else pkgs.lib.id)
|
||||||
(pkgs.buildRustCrate ({
|
(pkgs.buildRustCrate ({
|
||||||
pname = name;
|
pname = name;
|
||||||
version = "1.0.0";
|
version = "1.0.0";
|
||||||
crateName = name;
|
crateName = name;
|
||||||
dependencies = dependencies;
|
dependencies = dependencies;
|
||||||
src = pkgs.runCommandLocal "write-lib.rs" {
|
src = pkgs.runCommandLocal "write-lib.rs"
|
||||||
src = src;
|
{
|
||||||
passAsFile = [ "src" ];
|
src = src;
|
||||||
} ''
|
passAsFile = [ "src" ];
|
||||||
mkdir -p $out/src
|
} ''
|
||||||
cp "$srcPath" $out/src/lib.rs
|
mkdir -p $out/src
|
||||||
find $out
|
cp "$srcPath" $out/src/lib.rs
|
||||||
'';
|
find $out
|
||||||
}));
|
'';
|
||||||
|
}));
|
||||||
|
|
||||||
/* Takes a `buildRustCrate` derivation as an input,
|
/* Takes a `buildRustCrate` derivation as an input,
|
||||||
* builds it with `{ buildTests = true; }` and runs
|
* builds it with `{ buildTests = true; }` and runs
|
||||||
|
@ -72,19 +81,30 @@ let
|
||||||
testRustSimple = rustDrv:
|
testRustSimple = rustDrv:
|
||||||
let
|
let
|
||||||
crate = buildTests: rustDrv.override { inherit buildTests; };
|
crate = buildTests: rustDrv.override { inherit buildTests; };
|
||||||
tests = depot.nix.runExecline.local "${rustDrv.name}-tests-run" {} [
|
tests = depot.nix.runExecline.local "${rustDrv.name}-tests-run" { } [
|
||||||
"importas" "out" "out"
|
"importas"
|
||||||
"if" [
|
"out"
|
||||||
"pipeline" [ bins.s6-ls "${crate true}/tests" ]
|
"out"
|
||||||
"forstdin" "-o0" "test"
|
"if"
|
||||||
"importas" "test" "test"
|
[
|
||||||
|
"pipeline"
|
||||||
|
[ bins.s6-ls "${crate true}/tests" ]
|
||||||
|
"forstdin"
|
||||||
|
"-o0"
|
||||||
|
"test"
|
||||||
|
"importas"
|
||||||
|
"test"
|
||||||
|
"test"
|
||||||
"${crate true}/tests/$test"
|
"${crate true}/tests/$test"
|
||||||
]
|
]
|
||||||
bins.s6-touch "$out"
|
bins.s6-touch
|
||||||
|
"$out"
|
||||||
];
|
];
|
||||||
in depot.nix.drvSeqL [ tests ] (crate false);
|
in
|
||||||
|
depot.nix.drvSeqL [ tests ] (crate false);
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
inherit
|
inherit
|
||||||
rustSimple
|
rustSimple
|
||||||
rustSimpleBin
|
rustSimpleBin
|
||||||
|
|
|
@ -11,15 +11,20 @@ let
|
||||||
coreutils
|
coreutils
|
||||||
;
|
;
|
||||||
|
|
||||||
run = drv: depot.nix.runExecline.local "run-${drv.name}" {} [
|
run = drv: depot.nix.runExecline.local "run-${drv.name}" { } [
|
||||||
"if" [ drv ]
|
"if"
|
||||||
"importas" "out" "out"
|
[ drv ]
|
||||||
"${coreutils}/bin/touch" "$out"
|
"importas"
|
||||||
|
"out"
|
||||||
|
"out"
|
||||||
|
"${coreutils}/bin/touch"
|
||||||
|
"$out"
|
||||||
];
|
];
|
||||||
|
|
||||||
rustTransitiveLib = rustSimpleLib {
|
rustTransitiveLib = rustSimpleLib
|
||||||
name = "transitive";
|
{
|
||||||
} ''
|
name = "transitive";
|
||||||
|
} ''
|
||||||
pub fn transitive(s: &str) -> String {
|
pub fn transitive(s: &str) -> String {
|
||||||
let mut new = s.to_string();
|
let mut new = s.to_string();
|
||||||
new.push_str(" 1 2 3");
|
new.push_str(" 1 2 3");
|
||||||
|
@ -37,10 +42,11 @@ let
|
||||||
}
|
}
|
||||||
'';
|
'';
|
||||||
|
|
||||||
rustTestLib = rustSimpleLib {
|
rustTestLib = rustSimpleLib
|
||||||
name = "test_lib";
|
{
|
||||||
dependencies = [ rustTransitiveLib ];
|
name = "test_lib";
|
||||||
} ''
|
dependencies = [ rustTransitiveLib ];
|
||||||
|
} ''
|
||||||
extern crate transitive;
|
extern crate transitive;
|
||||||
use transitive::{transitive};
|
use transitive::{transitive};
|
||||||
pub fn test() -> String {
|
pub fn test() -> String {
|
||||||
|
@ -48,10 +54,11 @@ let
|
||||||
}
|
}
|
||||||
'';
|
'';
|
||||||
|
|
||||||
rustWithLib = run (rustSimple {
|
rustWithLib = run (rustSimple
|
||||||
name = "rust-with-lib";
|
{
|
||||||
dependencies = [ rustTestLib ];
|
name = "rust-with-lib";
|
||||||
} ''
|
dependencies = [ rustTestLib ];
|
||||||
|
} ''
|
||||||
extern crate test_lib;
|
extern crate test_lib;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
@ -60,7 +67,8 @@ let
|
||||||
'');
|
'');
|
||||||
|
|
||||||
|
|
||||||
in depot.nix.readTree.drvTargets {
|
in
|
||||||
|
depot.nix.readTree.drvTargets {
|
||||||
inherit
|
inherit
|
||||||
rustTransitiveLib
|
rustTransitiveLib
|
||||||
rustWithLib
|
rustWithLib
|
||||||
|
|
|
@ -6,10 +6,10 @@
|
||||||
#
|
#
|
||||||
# All types (should) compose as expected.
|
# All types (should) compose as expected.
|
||||||
|
|
||||||
{ lib ? (import <nixpkgs> {}).lib, ... }:
|
{ lib ? (import <nixpkgs> { }).lib, ... }:
|
||||||
|
|
||||||
with builtins; let
|
with builtins; let
|
||||||
prettyPrint = lib.generators.toPretty {};
|
prettyPrint = lib.generators.toPretty { };
|
||||||
|
|
||||||
# typedef' :: struct {
|
# typedef' :: struct {
|
||||||
# name = string;
|
# name = string;
|
||||||
|
@ -34,41 +34,44 @@ with builtins; let
|
||||||
#
|
#
|
||||||
# This function is the low-level primitive used to create types. For
|
# This function is the low-level primitive used to create types. For
|
||||||
# many cases the higher-level 'typedef' function is more appropriate.
|
# many cases the higher-level 'typedef' function is more appropriate.
|
||||||
typedef' = { name, checkType
|
typedef' =
|
||||||
, checkToBool ? (result: result.ok)
|
{ name
|
||||||
, toError ? (_: result: result.err)
|
, checkType
|
||||||
, def ? null
|
, checkToBool ? (result: result.ok)
|
||||||
, match ? null }: {
|
, toError ? (_: result: result.err)
|
||||||
inherit name checkToBool toError;
|
, def ? null
|
||||||
|
, match ? null
|
||||||
|
}: {
|
||||||
|
inherit name checkToBool toError;
|
||||||
|
|
||||||
# check :: a -> bool
|
# check :: a -> bool
|
||||||
#
|
#
|
||||||
# This function is used to determine whether a given type is
|
# This function is used to determine whether a given type is
|
||||||
# conformant.
|
# conformant.
|
||||||
check = value: checkToBool (checkType value);
|
check = value: checkToBool (checkType value);
|
||||||
|
|
||||||
# checkType :: a -> struct { ok = bool; err = option string; }
|
# checkType :: a -> struct { ok = bool; err = option string; }
|
||||||
#
|
#
|
||||||
# This function checks whether the passed value is type conformant
|
# This function checks whether the passed value is type conformant
|
||||||
# and returns an optional type error string otherwise.
|
# and returns an optional type error string otherwise.
|
||||||
inherit checkType;
|
inherit checkType;
|
||||||
|
|
||||||
# __functor :: a -> a
|
# __functor :: a -> a
|
||||||
#
|
#
|
||||||
# This function checks whether the passed value is type conformant
|
# This function checks whether the passed value is type conformant
|
||||||
# and throws an error if it is not.
|
# and throws an error if it is not.
|
||||||
#
|
#
|
||||||
# The name of this function is a special attribute in Nix that
|
# The name of this function is a special attribute in Nix that
|
||||||
# makes it possible to execute a type attribute set like a normal
|
# makes it possible to execute a type attribute set like a normal
|
||||||
# function.
|
# function.
|
||||||
__functor = self: value:
|
__functor = self: value:
|
||||||
let result = self.checkType value;
|
let result = self.checkType value;
|
||||||
in if checkToBool result then value
|
in if checkToBool result then value
|
||||||
else throw (toError value result);
|
else throw (toError value result);
|
||||||
};
|
};
|
||||||
|
|
||||||
typeError = type: val:
|
typeError = type: val:
|
||||||
"expected type '${type}', but value '${prettyPrint val}' is of type '${typeOf val}'";
|
"expected type '${type}', but value '${prettyPrint val}' is of type '${typeOf val}'";
|
||||||
|
|
||||||
# typedef :: string -> (a -> bool) -> type
|
# typedef :: string -> (a -> bool) -> type
|
||||||
#
|
#
|
||||||
|
@ -85,27 +88,34 @@ with builtins; let
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
checkEach = name: t: l: foldl' (acc: e:
|
checkEach = name: t: l: foldl'
|
||||||
let res = t.checkType e;
|
(acc: e:
|
||||||
|
let
|
||||||
|
res = t.checkType e;
|
||||||
isT = t.checkToBool res;
|
isT = t.checkToBool res;
|
||||||
in {
|
in
|
||||||
ok = acc.ok && isT;
|
{
|
||||||
err = if isT
|
ok = acc.ok && isT;
|
||||||
then acc.err
|
err =
|
||||||
else acc.err + "${prettyPrint e}: ${t.toError e res}\n";
|
if isT
|
||||||
}) { ok = true; err = "expected type ${name}, but found:\n"; } l;
|
then acc.err
|
||||||
in lib.fix (self: {
|
else acc.err + "${prettyPrint e}: ${t.toError e res}\n";
|
||||||
|
})
|
||||||
|
{ ok = true; err = "expected type ${name}, but found:\n"; }
|
||||||
|
l;
|
||||||
|
in
|
||||||
|
lib.fix (self: {
|
||||||
# Primitive types
|
# Primitive types
|
||||||
any = typedef "any" (_: true);
|
any = typedef "any" (_: true);
|
||||||
unit = typedef "unit" (v: v == {});
|
unit = typedef "unit" (v: v == { });
|
||||||
int = typedef "int" isInt;
|
int = typedef "int" isInt;
|
||||||
bool = typedef "bool" isBool;
|
bool = typedef "bool" isBool;
|
||||||
float = typedef "float" isFloat;
|
float = typedef "float" isFloat;
|
||||||
string = typedef "string" isString;
|
string = typedef "string" isString;
|
||||||
path = typedef "path" (x: typeOf x == "path");
|
path = typedef "path" (x: typeOf x == "path");
|
||||||
drv = typedef "derivation" (x: isAttrs x && x ? "type" && x.type == "derivation");
|
drv = typedef "derivation" (x: isAttrs x && x ? "type" && x.type == "derivation");
|
||||||
function = typedef "function" (x: isFunction x || (isAttrs x && x ? "__functor"
|
function = typedef "function" (x: isFunction x || (isAttrs x && x ? "__functor"
|
||||||
&& isFunction x.__functor));
|
&& isFunction x.__functor));
|
||||||
|
|
||||||
# Type for types themselves. Useful when defining polymorphic types.
|
# Type for types themselves. Useful when defining polymorphic types.
|
||||||
type = typedef "type" (x:
|
type = typedef "type" (x:
|
||||||
|
@ -124,7 +134,7 @@ in lib.fix (self: {
|
||||||
in {
|
in {
|
||||||
ok = isNull v || (self.type t).checkToBool res;
|
ok = isNull v || (self.type t).checkToBool res;
|
||||||
err = "expected type ${name}, but value does not conform to '${t.name}': "
|
err = "expected type ${name}, but value does not conform to '${t.name}': "
|
||||||
+ t.toError v res;
|
+ t.toError v res;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -136,7 +146,8 @@ in lib.fix (self: {
|
||||||
list = t: typedef' rec {
|
list = t: typedef' rec {
|
||||||
name = "list<${t.name}>";
|
name = "list<${t.name}>";
|
||||||
|
|
||||||
checkType = v: if isList v
|
checkType = v:
|
||||||
|
if isList v
|
||||||
then checkEach name (self.type t) v
|
then checkEach name (self.type t) v
|
||||||
else {
|
else {
|
||||||
ok = false;
|
ok = false;
|
||||||
|
@ -147,7 +158,8 @@ in lib.fix (self: {
|
||||||
attrs = t: typedef' rec {
|
attrs = t: typedef' rec {
|
||||||
name = "attrs<${t.name}>";
|
name = "attrs<${t.name}>";
|
||||||
|
|
||||||
checkType = v: if isAttrs v
|
checkType = v:
|
||||||
|
if isAttrs v
|
||||||
then checkEach name (self.type t) (attrValues v)
|
then checkEach name (self.type t) (attrValues v)
|
||||||
else {
|
else {
|
||||||
ok = false;
|
ok = false;
|
||||||
|
@ -172,20 +184,23 @@ in lib.fix (self: {
|
||||||
# checkField checks an individual field of the struct against
|
# checkField checks an individual field of the struct against
|
||||||
# its definition and creates a typecheck result. These results
|
# its definition and creates a typecheck result. These results
|
||||||
# are aggregated during the actual checking.
|
# are aggregated during the actual checking.
|
||||||
checkField = def: name: value: let result = def.checkType value; in rec {
|
checkField = def: name: value:
|
||||||
ok = def.checkToBool result;
|
let result = def.checkType value; in rec {
|
||||||
err = if !ok && isNull value
|
ok = def.checkToBool result;
|
||||||
then "missing required ${def.name} field '${name}'\n"
|
err =
|
||||||
else "field '${name}': ${def.toError value result}\n";
|
if !ok && isNull value
|
||||||
};
|
then "missing required ${def.name} field '${name}'\n"
|
||||||
|
else "field '${name}': ${def.toError value result}\n";
|
||||||
|
};
|
||||||
|
|
||||||
# checkExtraneous determines whether a (closed) struct contains
|
# checkExtraneous determines whether a (closed) struct contains
|
||||||
# any fields that are not part of the definition.
|
# any fields that are not part of the definition.
|
||||||
checkExtraneous = def: has: acc:
|
checkExtraneous = def: has: acc:
|
||||||
if (length has) == 0 then acc
|
if (length has) == 0 then acc
|
||||||
else if (hasAttr (head has) def)
|
else if (hasAttr (head has) def)
|
||||||
then checkExtraneous def (tail has) acc
|
then checkExtraneous def (tail has) acc
|
||||||
else checkExtraneous def (tail has) {
|
else
|
||||||
|
checkExtraneous def (tail has) {
|
||||||
ok = false;
|
ok = false;
|
||||||
err = acc.err + "unexpected struct field '${head has}'\n";
|
err = acc.err + "unexpected struct field '${head has}'\n";
|
||||||
};
|
};
|
||||||
|
@ -197,85 +212,102 @@ in lib.fix (self: {
|
||||||
init = { ok = true; err = ""; };
|
init = { ok = true; err = ""; };
|
||||||
extraneous = checkExtraneous def (attrNames value) init;
|
extraneous = checkExtraneous def (attrNames value) init;
|
||||||
|
|
||||||
checkedFields = map (n:
|
checkedFields = map
|
||||||
let v = if hasAttr n value then value."${n}" else null;
|
(n:
|
||||||
in checkField def."${n}" n v) (attrNames def);
|
let v = if hasAttr n value then value."${n}" else null;
|
||||||
|
in checkField def."${n}" n v)
|
||||||
|
(attrNames def);
|
||||||
|
|
||||||
combined = foldl' (acc: res: {
|
combined = foldl'
|
||||||
ok = acc.ok && res.ok;
|
(acc: res: {
|
||||||
err = if !res.ok then acc.err + res.err else acc.err;
|
ok = acc.ok && res.ok;
|
||||||
}) init checkedFields;
|
err = if !res.ok then acc.err + res.err else acc.err;
|
||||||
in {
|
})
|
||||||
|
init
|
||||||
|
checkedFields;
|
||||||
|
in
|
||||||
|
{
|
||||||
ok = combined.ok && extraneous.ok;
|
ok = combined.ok && extraneous.ok;
|
||||||
err = combined.err + extraneous.err;
|
err = combined.err + extraneous.err;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct' = name: def: typedef' {
|
struct' = name: def: typedef' {
|
||||||
inherit name def;
|
inherit name def;
|
||||||
checkType = value: if isAttrs value
|
checkType = value:
|
||||||
|
if isAttrs value
|
||||||
then (checkStruct (self.attrs self.type def) value)
|
then (checkStruct (self.attrs self.type def) value)
|
||||||
else { ok = false; err = typeError name value; };
|
else { ok = false; err = typeError name value; };
|
||||||
|
|
||||||
toError = _: result: "expected '${name}'-struct, but found:\n" + result.err;
|
toError = _: result: "expected '${name}'-struct, but found:\n" + result.err;
|
||||||
};
|
};
|
||||||
in arg: if isString arg then (struct' arg) else (struct' "anon" arg);
|
in
|
||||||
|
arg: if isString arg then (struct' arg) else (struct' "anon" arg);
|
||||||
|
|
||||||
# Enums & pattern matching
|
# Enums & pattern matching
|
||||||
enum =
|
enum =
|
||||||
let
|
let
|
||||||
plain = name: def: typedef' {
|
plain = name: def: typedef' {
|
||||||
inherit name def;
|
inherit name def;
|
||||||
|
|
||||||
checkType = (x: isString x && elem x def);
|
checkType = (x: isString x && elem x def);
|
||||||
checkToBool = x: x;
|
checkToBool = x: x;
|
||||||
toError = value: _: "'${prettyPrint value} is not a member of enum ${name}";
|
toError = value: _: "'${prettyPrint value} is not a member of enum ${name}";
|
||||||
};
|
};
|
||||||
enum' = name: def: lib.fix (e: (plain name def) // {
|
enum' = name: def: lib.fix (e: (plain name def) // {
|
||||||
match = x: actions: deepSeq (map e (attrNames actions)) (
|
match = x: actions: deepSeq (map e (attrNames actions)) (
|
||||||
let
|
let
|
||||||
actionKeys = attrNames actions;
|
actionKeys = attrNames actions;
|
||||||
missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [] def;
|
missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [ ] def;
|
||||||
in if (length missing) > 0
|
in
|
||||||
then throw "Missing match action for members: ${prettyPrint missing}"
|
if (length missing) > 0
|
||||||
else actions."${e x}");
|
then throw "Missing match action for members: ${prettyPrint missing}"
|
||||||
});
|
else actions."${e x}"
|
||||||
in arg: if isString arg then (enum' arg) else (enum' "anon" arg);
|
);
|
||||||
|
});
|
||||||
|
in
|
||||||
|
arg: if isString arg then (enum' arg) else (enum' "anon" arg);
|
||||||
|
|
||||||
# Sum types
|
# Sum types
|
||||||
#
|
#
|
||||||
# The representation of a sum type is an attribute set with only one
|
# The representation of a sum type is an attribute set with only one
|
||||||
# value, where the key of the value denotes the variant of the type.
|
# value, where the key of the value denotes the variant of the type.
|
||||||
sum =
|
sum =
|
||||||
let
|
let
|
||||||
plain = name: def: typedef' {
|
plain = name: def: typedef' {
|
||||||
inherit name def;
|
inherit name def;
|
||||||
checkType = (x:
|
checkType = (x:
|
||||||
let variant = elemAt (attrNames x) 0;
|
let variant = elemAt (attrNames x) 0;
|
||||||
in if isAttrs x && length (attrNames x) == 1 && hasAttr variant def
|
in if isAttrs x && length (attrNames x) == 1 && hasAttr variant def
|
||||||
then let t = def."${variant}";
|
then
|
||||||
v = x."${variant}";
|
let
|
||||||
res = t.checkType v;
|
t = def."${variant}";
|
||||||
in if t.checkToBool res
|
v = x."${variant}";
|
||||||
then { ok = true; }
|
res = t.checkType v;
|
||||||
else {
|
in
|
||||||
ok = false;
|
if t.checkToBool res
|
||||||
err = "while checking '${name}' variant '${variant}': "
|
then { ok = true; }
|
||||||
+ t.toError v res;
|
else {
|
||||||
}
|
ok = false;
|
||||||
|
err = "while checking '${name}' variant '${variant}': "
|
||||||
|
+ t.toError v res;
|
||||||
|
}
|
||||||
else { ok = false; err = typeError name x; }
|
else { ok = false; err = typeError name x; }
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
sum' = name: def: lib.fix (s: (plain name def) // {
|
sum' = name: def: lib.fix (s: (plain name def) // {
|
||||||
match = x: actions:
|
match = x: actions:
|
||||||
let variant = deepSeq (s x) (elemAt (attrNames x) 0);
|
let
|
||||||
actionKeys = attrNames actions;
|
variant = deepSeq (s x) (elemAt (attrNames x) 0);
|
||||||
defKeys = attrNames def;
|
actionKeys = attrNames actions;
|
||||||
missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [] defKeys;
|
defKeys = attrNames def;
|
||||||
in if (length missing) > 0
|
missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [ ] defKeys;
|
||||||
then throw "Missing match action for variants: ${prettyPrint missing}"
|
in
|
||||||
else actions."${variant}" x."${variant}";
|
if (length missing) > 0
|
||||||
});
|
then throw "Missing match action for variants: ${prettyPrint missing}"
|
||||||
in arg: if isString arg then (sum' arg) else (sum' "anon" arg);
|
else actions."${variant}" x."${variant}";
|
||||||
|
});
|
||||||
|
in
|
||||||
|
arg: if isString arg then (sum' arg) else (sum' "anon" arg);
|
||||||
|
|
||||||
# Typed function definitions
|
# Typed function definitions
|
||||||
#
|
#
|
||||||
|
@ -289,15 +321,19 @@ in lib.fix (self: {
|
||||||
mkFunc = sig: f: {
|
mkFunc = sig: f: {
|
||||||
inherit sig;
|
inherit sig;
|
||||||
__toString = self: foldl' (s: t: "${s} -> ${t.name}")
|
__toString = self: foldl' (s: t: "${s} -> ${t.name}")
|
||||||
"λ :: ${(head self.sig).name}" (tail self.sig);
|
"λ :: ${(head self.sig).name}"
|
||||||
|
(tail self.sig);
|
||||||
__functor = _: f;
|
__functor = _: f;
|
||||||
};
|
};
|
||||||
|
|
||||||
defun' = sig: func: if length sig > 2
|
defun' = sig: func:
|
||||||
|
if length sig > 2
|
||||||
then mkFunc sig (x: defun' (tail sig) (func ((head sig) x)))
|
then mkFunc sig (x: defun' (tail sig) (func ((head sig) x)))
|
||||||
else mkFunc sig (x: ((head (tail sig)) (func ((head sig) x))));
|
else mkFunc sig (x: ((head (tail sig)) (func ((head sig) x))));
|
||||||
|
|
||||||
in sig: func: if length sig < 2
|
in
|
||||||
|
sig: func:
|
||||||
|
if length sig < 2
|
||||||
then (throw "Signature must at least have two types (a -> b)")
|
then (throw "Signature must at least have two types (a -> b)")
|
||||||
else defun' sig func;
|
else defun' sig func;
|
||||||
|
|
||||||
|
@ -311,21 +347,22 @@ in lib.fix (self: {
|
||||||
# depend on the value being of the wrapped type.
|
# depend on the value being of the wrapped type.
|
||||||
restrict = name: pred: t:
|
restrict = name: pred: t:
|
||||||
let restriction = "${t.name}[${name}]"; in typedef' {
|
let restriction = "${t.name}[${name}]"; in typedef' {
|
||||||
name = restriction;
|
name = restriction;
|
||||||
checkType = v:
|
checkType = v:
|
||||||
let res = t.checkType v;
|
let res = t.checkType v;
|
||||||
in
|
in
|
||||||
if !(t.checkToBool res)
|
if !(t.checkToBool res)
|
||||||
then res
|
then res
|
||||||
else
|
else
|
||||||
let
|
let
|
||||||
iok = pred v;
|
iok = pred v;
|
||||||
in if isBool iok then {
|
in
|
||||||
|
if isBool iok then {
|
||||||
ok = iok;
|
ok = iok;
|
||||||
err = "${prettyPrint v} does not conform to restriction '${restriction}'";
|
err = "${prettyPrint v} does not conform to restriction '${restriction}'";
|
||||||
} else
|
} else
|
||||||
# use throw here to avoid spamming the build log
|
# use throw here to avoid spamming the build log
|
||||||
throw "restriction '${restriction}' predicate returned unexpected value '${prettyPrint iok}' instead of boolean";
|
throw "restriction '${restriction}' predicate returned unexpected value '${prettyPrint iok}' instead of boolean";
|
||||||
};
|
};
|
||||||
|
|
||||||
})
|
})
|
||||||
|
|
|
@ -25,7 +25,7 @@ let
|
||||||
};
|
};
|
||||||
|
|
||||||
testPrimitives = it "checks that all primitive types match" [
|
testPrimitives = it "checks that all primitive types match" [
|
||||||
(assertDoesNotThrow "unit type" (unit {}))
|
(assertDoesNotThrow "unit type" (unit { }))
|
||||||
(assertDoesNotThrow "int type" (int 15))
|
(assertDoesNotThrow "int type" (int 15))
|
||||||
(assertDoesNotThrow "bool type" (bool false))
|
(assertDoesNotThrow "bool type" (bool false))
|
||||||
(assertDoesNotThrow "float type" (float 13.37))
|
(assertDoesNotThrow "float type" (float 13.37))
|
||||||
|
@ -44,7 +44,7 @@ let
|
||||||
# Test that structures work as planned.
|
# Test that structures work as planned.
|
||||||
person = struct "person" {
|
person = struct "person" {
|
||||||
name = string;
|
name = string;
|
||||||
age = int;
|
age = int;
|
||||||
|
|
||||||
contact = option (struct {
|
contact = option (struct {
|
||||||
email = string;
|
email = string;
|
||||||
|
@ -55,7 +55,7 @@ let
|
||||||
testStruct = it "checks that structures work as intended" [
|
testStruct = it "checks that structures work as intended" [
|
||||||
(assertDoesNotThrow "person struct" (person {
|
(assertDoesNotThrow "person struct" (person {
|
||||||
name = "Brynhjulf";
|
name = "Brynhjulf";
|
||||||
age = 42;
|
age = 42;
|
||||||
contact.email = "brynhjulf@yants.nix";
|
contact.email = "brynhjulf@yants.nix";
|
||||||
}))
|
}))
|
||||||
];
|
];
|
||||||
|
@ -70,7 +70,8 @@ let
|
||||||
|
|
||||||
testEnum = it "checks enum definitions and matching" [
|
testEnum = it "checks enum definitions and matching" [
|
||||||
(assertEq "enum is matched correctly"
|
(assertEq "enum is matched correctly"
|
||||||
"It is in fact red!" (colour.match "red" colourMatcher))
|
"It is in fact red!"
|
||||||
|
(colour.match "red" colourMatcher))
|
||||||
(assertThrows "out of bounds enum fails"
|
(assertThrows "out of bounds enum fails"
|
||||||
(colour.match "alpha" (colourMatcher // {
|
(colour.match "alpha" (colourMatcher // {
|
||||||
alpha = "This should never happen";
|
alpha = "This should never happen";
|
||||||
|
@ -97,7 +98,8 @@ let
|
||||||
testSum = it "checks sum types definitions and matching" [
|
testSum = it "checks sum types definitions and matching" [
|
||||||
(assertDoesNotThrow "creature sum type" some-human)
|
(assertDoesNotThrow "creature sum type" some-human)
|
||||||
(assertEq "sum type is matched correctly"
|
(assertEq "sum type is matched correctly"
|
||||||
"It's a human named Brynhjulf" (creature.match some-human {
|
"It's a human named Brynhjulf"
|
||||||
|
(creature.match some-human {
|
||||||
human = v: "It's a human named ${v.name}";
|
human = v: "It's a human named ${v.name}";
|
||||||
pet = v: "It's not supposed to be a pet!";
|
pet = v: "It's not supposed to be a pet!";
|
||||||
})
|
})
|
||||||
|
@ -106,7 +108,7 @@ let
|
||||||
|
|
||||||
# Test curried function definitions
|
# Test curried function definitions
|
||||||
func = defun [ string int string ]
|
func = defun [ string int string ]
|
||||||
(name: age: "${name} is ${toString age} years old");
|
(name: age: "${name} is ${toString age} years old");
|
||||||
|
|
||||||
testFunctions = it "checks function definitions" [
|
testFunctions = it "checks function definitions" [
|
||||||
(assertDoesNotThrow "function application" (func "Brynhjulf" 42))
|
(assertDoesNotThrow "function application" (func "Brynhjulf" 42))
|
||||||
|
@ -144,13 +146,13 @@ let
|
||||||
];
|
];
|
||||||
|
|
||||||
in
|
in
|
||||||
runTestsuite "yants" [
|
runTestsuite "yants" [
|
||||||
testPrimitives
|
testPrimitives
|
||||||
testPoly
|
testPoly
|
||||||
testStruct
|
testStruct
|
||||||
testEnum
|
testEnum
|
||||||
testSum
|
testSum
|
||||||
testFunctions
|
testFunctions
|
||||||
testTypes
|
testTypes
|
||||||
testRestrict
|
testRestrict
|
||||||
]
|
]
|
||||||
|
|
|
@ -2,11 +2,12 @@
|
||||||
{ depot, pkgs, ... }:
|
{ depot, pkgs, ... }:
|
||||||
|
|
||||||
let
|
let
|
||||||
checkZone = zone: file: pkgs.runCommandNoCC "${zone}-check" {} ''
|
checkZone = zone: file: pkgs.runCommandNoCC "${zone}-check" { } ''
|
||||||
${pkgs.bind}/bin/named-checkzone -i local ${zone} ${file} | tee $out
|
${pkgs.bind}/bin/named-checkzone -i local ${zone} ${file} | tee $out
|
||||||
'';
|
'';
|
||||||
|
|
||||||
in depot.nix.readTree.drvTargets {
|
in
|
||||||
|
depot.nix.readTree.drvTargets {
|
||||||
nixery-dev = checkZone "nixery.dev" ./nixery.dev.zone;
|
nixery-dev = checkZone "nixery.dev" ./nixery.dev.zone;
|
||||||
tvl-fyi = checkZone "tvl.fyi" ./tvl.fyi.zone;
|
tvl-fyi = checkZone "tvl.fyi" ./tvl.fyi.zone;
|
||||||
tvl-su = checkZone "tvl.su" ./tvl.su.zone;
|
tvl-su = checkZone "tvl.su" ./tvl.su.zone;
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
depot.nix.readTree.drvTargets {
|
depot.nix.readTree.drvTargets {
|
||||||
# Provide a Terraform wrapper with the right provider installed.
|
# Provide a Terraform wrapper with the right provider installed.
|
||||||
terraform = pkgs.terraform.withPlugins(_: [
|
terraform = pkgs.terraform.withPlugins (_: [
|
||||||
depot.third_party.terraform-provider-glesys
|
depot.third_party.terraform-provider-glesys
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,8 @@ depot.third_party.naersk.buildPackage {
|
||||||
src = ./.;
|
src = ./.;
|
||||||
|
|
||||||
buildInputs = with pkgs; [
|
buildInputs = with pkgs; [
|
||||||
pkgconfig openssl systemd.dev
|
pkgconfig
|
||||||
|
openssl
|
||||||
|
systemd.dev
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
depot.nix.readTree.drvTargets {
|
depot.nix.readTree.drvTargets {
|
||||||
# Provide a Terraform wrapper with the right provider installed.
|
# Provide a Terraform wrapper with the right provider installed.
|
||||||
terraform = pkgs.terraform.withPlugins(p: [
|
terraform = pkgs.terraform.withPlugins (p: [
|
||||||
p.keycloak
|
p.keycloak
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,13 +10,17 @@
|
||||||
# This file is the Nix derivation used to build release binaries for
|
# This file is the Nix derivation used to build release binaries for
|
||||||
# several different architectures and operating systems.
|
# several different architectures and operating systems.
|
||||||
|
|
||||||
let pkgs = import ((import <nixpkgs> {}).fetchFromGitHub {
|
let
|
||||||
owner = "NixOS";
|
pkgs = import
|
||||||
repo = "nixpkgs-channels";
|
((import <nixpkgs> { }).fetchFromGitHub {
|
||||||
rev = "541d9cce8af7a490fb9085305939569567cb58e6";
|
owner = "NixOS";
|
||||||
sha256 = "0jgz72hhzkd5vyq5v69vpljjlnf0lqaz7fh327bvb3cvmwbfxrja";
|
repo = "nixpkgs-channels";
|
||||||
}) {};
|
rev = "541d9cce8af7a490fb9085305939569567cb58e6";
|
||||||
in with pkgs; buildGoPackage rec {
|
sha256 = "0jgz72hhzkd5vyq5v69vpljjlnf0lqaz7fh327bvb3cvmwbfxrja";
|
||||||
|
})
|
||||||
|
{ };
|
||||||
|
in
|
||||||
|
with pkgs; buildGoPackage rec {
|
||||||
name = "kontemplate-${version}";
|
name = "kontemplate-${version}";
|
||||||
version = "canon";
|
version = "canon";
|
||||||
src = ./.;
|
src = ./.;
|
||||||
|
@ -29,8 +33,8 @@ in with pkgs; buildGoPackage rec {
|
||||||
# reason for setting the 'allowGoReference' flag.
|
# reason for setting the 'allowGoReference' flag.
|
||||||
dontStrip = true; # Linker configuration handles stripping
|
dontStrip = true; # Linker configuration handles stripping
|
||||||
allowGoReference = true;
|
allowGoReference = true;
|
||||||
CGO_ENABLED="0";
|
CGO_ENABLED = "0";
|
||||||
GOCACHE="off";
|
GOCACHE = "off";
|
||||||
|
|
||||||
# Configure release builds via the "build-matrix" script:
|
# Configure release builds via the "build-matrix" script:
|
||||||
buildInputs = [ git ];
|
buildInputs = [ git ];
|
||||||
|
|
|
@ -4,7 +4,8 @@
|
||||||
let
|
let
|
||||||
inherit (builtins) listToAttrs;
|
inherit (builtins) listToAttrs;
|
||||||
inherit (lib) range;
|
inherit (lib) range;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
imports = [
|
imports = [
|
||||||
"${depot.path}/ops/modules/atward.nix"
|
"${depot.path}/ops/modules/atward.nix"
|
||||||
"${depot.path}/ops/modules/clbot.nix"
|
"${depot.path}/ops/modules/clbot.nix"
|
||||||
|
@ -55,7 +56,13 @@ in {
|
||||||
|
|
||||||
initrd = {
|
initrd = {
|
||||||
availableKernelModules = [
|
availableKernelModules = [
|
||||||
"igb" "xhci_pci" "nvme" "ahci" "usbhid" "usb_storage" "sr_mod"
|
"igb"
|
||||||
|
"xhci_pci"
|
||||||
|
"nvme"
|
||||||
|
"ahci"
|
||||||
|
"usbhid"
|
||||||
|
"usb_storage"
|
||||||
|
"sr_mod"
|
||||||
];
|
];
|
||||||
|
|
||||||
# Enable SSH in the initrd so that we can enter disk encryption
|
# Enable SSH in the initrd so that we can enter disk encryption
|
||||||
|
@ -189,7 +196,7 @@ in {
|
||||||
++ lukegb.keys.all
|
++ lukegb.keys.all
|
||||||
++ [ grfn.keys.whitby ]
|
++ [ grfn.keys.whitby ]
|
||||||
++ sterni.keys.all
|
++ sterni.keys.all
|
||||||
;
|
;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -205,7 +212,8 @@ in {
|
||||||
age.secrets =
|
age.secrets =
|
||||||
let
|
let
|
||||||
secretFile = name: depot.ops.secrets."${name}.age";
|
secretFile = name: depot.ops.secrets."${name}.age";
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
clbot.file = secretFile "clbot";
|
clbot.file = secretFile "clbot";
|
||||||
gerrit-queue.file = secretFile "gerrit-queue";
|
gerrit-queue.file = secretFile "gerrit-queue";
|
||||||
grafana.file = secretFile "grafana";
|
grafana.file = secretFile "grafana";
|
||||||
|
@ -509,15 +517,16 @@ in {
|
||||||
job_name = "node";
|
job_name = "node";
|
||||||
scrape_interval = "5s";
|
scrape_interval = "5s";
|
||||||
static_configs = [{
|
static_configs = [{
|
||||||
targets = ["localhost:${toString config.services.prometheus.exporters.node.port}"];
|
targets = [ "localhost:${toString config.services.prometheus.exporters.node.port}" ];
|
||||||
}];
|
}];
|
||||||
} {
|
}
|
||||||
job_name = "nginx";
|
{
|
||||||
scrape_interval = "5s";
|
job_name = "nginx";
|
||||||
static_configs = [{
|
scrape_interval = "5s";
|
||||||
targets = ["localhost:${toString config.services.prometheus.exporters.nginx.port}"];
|
static_configs = [{
|
||||||
|
targets = [ "localhost:${toString config.services.prometheus.exporters.nginx.port}" ];
|
||||||
|
}];
|
||||||
}];
|
}];
|
||||||
}];
|
|
||||||
};
|
};
|
||||||
|
|
||||||
services.grafana = {
|
services.grafana = {
|
||||||
|
@ -526,58 +535,62 @@ in {
|
||||||
domain = "status.tvl.su";
|
domain = "status.tvl.su";
|
||||||
rootUrl = "https://status.tvl.su";
|
rootUrl = "https://status.tvl.su";
|
||||||
analytics.reporting.enable = false;
|
analytics.reporting.enable = false;
|
||||||
extraOptions = let
|
extraOptions =
|
||||||
options = {
|
let
|
||||||
auth = {
|
options = {
|
||||||
generic_oauth = {
|
auth = {
|
||||||
enabled = true;
|
generic_oauth = {
|
||||||
client_id = "grafana";
|
enabled = true;
|
||||||
scopes = "openid profile email";
|
client_id = "grafana";
|
||||||
name = "TVL";
|
scopes = "openid profile email";
|
||||||
email_attribute_path = "mail";
|
name = "TVL";
|
||||||
login_attribute_path = "sub";
|
email_attribute_path = "mail";
|
||||||
name_attribute_path = "displayName";
|
login_attribute_path = "sub";
|
||||||
auth_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/auth";
|
name_attribute_path = "displayName";
|
||||||
token_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/token";
|
auth_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/auth";
|
||||||
api_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/userinfo";
|
token_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/token";
|
||||||
|
api_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/userinfo";
|
||||||
|
|
||||||
# Give lukegb, grfn, tazjin "Admin" rights.
|
# Give lukegb, grfn, tazjin "Admin" rights.
|
||||||
role_attribute_path = "((sub == 'lukegb' || sub == 'grfn' || sub == 'tazjin') && 'Admin') || 'Editor'";
|
role_attribute_path = "((sub == 'lukegb' || sub == 'grfn' || sub == 'tazjin') && 'Admin') || 'Editor'";
|
||||||
|
|
||||||
# Allow creating new Grafana accounts from OAuth accounts.
|
# Allow creating new Grafana accounts from OAuth accounts.
|
||||||
allow_sign_up = true;
|
allow_sign_up = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
anonymous = {
|
||||||
|
enabled = true;
|
||||||
|
org_name = "The Virus Lounge";
|
||||||
|
org_role = "Viewer";
|
||||||
|
};
|
||||||
|
|
||||||
|
basic.enabled = false;
|
||||||
|
oauth_auto_login = true;
|
||||||
|
disable_login_form = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
anonymous = {
|
|
||||||
enabled = true;
|
|
||||||
org_name = "The Virus Lounge";
|
|
||||||
org_role = "Viewer";
|
|
||||||
};
|
|
||||||
|
|
||||||
basic.enabled = false;
|
|
||||||
oauth_auto_login = true;
|
|
||||||
disable_login_form = true;
|
|
||||||
};
|
};
|
||||||
};
|
inherit (builtins) typeOf replaceStrings listToAttrs concatLists;
|
||||||
inherit (builtins) typeOf replaceStrings listToAttrs concatLists;
|
inherit (lib) toUpper mapAttrsToList nameValuePair concatStringsSep;
|
||||||
inherit (lib) toUpper mapAttrsToList nameValuePair concatStringsSep;
|
|
||||||
|
|
||||||
# Take ["auth" "generic_oauth" "enabled"] and turn it into OPTIONS_GENERIC_OAUTH_ENABLED.
|
# Take ["auth" "generic_oauth" "enabled"] and turn it into OPTIONS_GENERIC_OAUTH_ENABLED.
|
||||||
encodeName = raw: replaceStrings ["."] ["_"] (toUpper (concatStringsSep "_" raw));
|
encodeName = raw: replaceStrings [ "." ] [ "_" ] (toUpper (concatStringsSep "_" raw));
|
||||||
|
|
||||||
# Turn an option value into a string, but we want bools to be sensible strings and not "1" or "".
|
# Turn an option value into a string, but we want bools to be sensible strings and not "1" or "".
|
||||||
optionToString = value:
|
optionToString = value:
|
||||||
if (typeOf value) == "bool" then
|
if (typeOf value) == "bool" then
|
||||||
if value then "true" else "false"
|
if value then "true" else "false"
|
||||||
else builtins.toString value;
|
else builtins.toString value;
|
||||||
|
|
||||||
# Turn an nested options attrset into a flat listToAttrs-compatible list.
|
# Turn an nested options attrset into a flat listToAttrs-compatible list.
|
||||||
encodeOptions = prefix: inp: concatLists (mapAttrsToList (name: value:
|
encodeOptions = prefix: inp: concatLists (mapAttrsToList
|
||||||
if (typeOf value) == "set"
|
(name: value:
|
||||||
then encodeOptions (prefix ++ [name]) value
|
if (typeOf value) == "set"
|
||||||
else [ (nameValuePair (encodeName (prefix ++ [name])) (optionToString value)) ]
|
then encodeOptions (prefix ++ [ name ]) value
|
||||||
) inp);
|
else [ (nameValuePair (encodeName (prefix ++ [ name ])) (optionToString value)) ]
|
||||||
in listToAttrs (encodeOptions [] options);
|
)
|
||||||
|
inp);
|
||||||
|
in
|
||||||
|
listToAttrs (encodeOptions [ ] options);
|
||||||
|
|
||||||
provision = {
|
provision = {
|
||||||
enable = true;
|
enable = true;
|
||||||
|
@ -623,8 +636,8 @@ in {
|
||||||
|
|
||||||
security.sudo.extraRules = [
|
security.sudo.extraRules = [
|
||||||
{
|
{
|
||||||
groups = ["wheel"];
|
groups = [ "wheel" ];
|
||||||
commands = [{ command = "ALL"; options = ["NOPASSWD"]; }];
|
commands = [{ command = "ALL"; options = [ "NOPASSWD" ]; }];
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
|
@ -705,7 +718,7 @@ in {
|
||||||
};
|
};
|
||||||
|
|
||||||
# Set up a user & group for git shenanigans
|
# Set up a user & group for git shenanigans
|
||||||
groups.git = {};
|
groups.git = { };
|
||||||
users.git = {
|
users.git = {
|
||||||
group = "git";
|
group = "git";
|
||||||
isSystemUser = true;
|
isSystemUser = true;
|
||||||
|
|
|
@ -3,7 +3,8 @@
|
||||||
let
|
let
|
||||||
cfg = config.services.depot.atward;
|
cfg = config.services.depot.atward;
|
||||||
description = "atward - (attempt to) cleverly route queries";
|
description = "atward - (attempt to) cleverly route queries";
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.atward = {
|
options.services.depot.atward = {
|
||||||
enable = lib.mkEnableOption description;
|
enable = lib.mkEnableOption description;
|
||||||
|
|
||||||
|
|
|
@ -45,7 +45,8 @@ let
|
||||||
# NixOS in $STATE_DIRECTORY
|
# NixOS in $STATE_DIRECTORY
|
||||||
(cd / && ${rebuild-system}/bin/rebuild-system)
|
(cd / && ${rebuild-system}/bin/rebuild-system)
|
||||||
'';
|
'';
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.auto-deploy = {
|
options.services.depot.auto-deploy = {
|
||||||
enable = lib.mkEnableOption description;
|
enable = lib.mkEnableOption description;
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,8 @@ let
|
||||||
echo "Skipping GC, enough space available"
|
echo "Skipping GC, enough space available"
|
||||||
fi
|
fi
|
||||||
'';
|
'';
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.automatic-gc = {
|
options.services.depot.automatic-gc = {
|
||||||
enable = lib.mkEnableOption description;
|
enable = lib.mkEnableOption description;
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ let
|
||||||
(attrValues (mapAttrs (key: value: "-${key} \"${toString value}\"") flags));
|
(attrValues (mapAttrs (key: value: "-${key} \"${toString value}\"") flags));
|
||||||
|
|
||||||
# Escapes a unit name for use in systemd
|
# Escapes a unit name for use in systemd
|
||||||
systemdEscape = name: removeSuffix "\n" (readFile (runCommandNoCC "unit-name" {} ''
|
systemdEscape = name: removeSuffix "\n" (readFile (runCommandNoCC "unit-name" { } ''
|
||||||
${pkgs.systemd}/bin/systemd-escape '${name}' >> $out
|
${pkgs.systemd}/bin/systemd-escape '${name}' >> $out
|
||||||
''));
|
''));
|
||||||
|
|
||||||
|
@ -42,7 +42,8 @@ let
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.clbot = {
|
options.services.depot.clbot = {
|
||||||
enable = mkEnableOption description;
|
enable = mkEnableOption description;
|
||||||
|
|
||||||
|
@ -68,7 +69,7 @@ in {
|
||||||
# (notably the SSH private key) readable by this user outside of
|
# (notably the SSH private key) readable by this user outside of
|
||||||
# the module.
|
# the module.
|
||||||
users = {
|
users = {
|
||||||
groups.clbot = {};
|
groups.clbot = { };
|
||||||
|
|
||||||
users.clbot = {
|
users.clbot = {
|
||||||
group = "clbot";
|
group = "clbot";
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
# Make readTree happy at this level.
|
# Make readTree happy at this level.
|
||||||
_: {}
|
_: { }
|
||||||
|
|
|
@ -8,7 +8,8 @@ let
|
||||||
inherit default;
|
inherit default;
|
||||||
type = lib.types.str;
|
type = lib.types.str;
|
||||||
};
|
};
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.gerrit-queue = {
|
options.services.depot.gerrit-queue = {
|
||||||
enable = lib.mkEnableOption description;
|
enable = lib.mkEnableOption description;
|
||||||
gerritUrl = mkStringOption "https://cl.tvl.fyi";
|
gerritUrl = mkStringOption "https://cl.tvl.fyi";
|
||||||
|
|
|
@ -12,7 +12,8 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
cfg = config.services.depot.git-serving;
|
cfg = config.services.depot.git-serving;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.git-serving = with lib; {
|
options.services.depot.git-serving = with lib; {
|
||||||
enable = mkEnableOption "Enable cgit & josh configuration";
|
enable = mkEnableOption "Enable cgit & josh configuration";
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,8 @@ let
|
||||||
|
|
||||||
exec ${depot.third_party.irccat}/bin/irccat
|
exec ${depot.third_party.irccat}/bin/irccat
|
||||||
'';
|
'';
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.irccat = {
|
options.services.depot.irccat = {
|
||||||
enable = lib.mkEnableOption description;
|
enable = lib.mkEnableOption description;
|
||||||
|
|
||||||
|
|
|
@ -9,12 +9,13 @@ let
|
||||||
exec -a ${name} ${depot.ops.besadii}/bin/besadii "$@"
|
exec -a ${name} ${depot.ops.besadii}/bin/besadii "$@"
|
||||||
'';
|
'';
|
||||||
|
|
||||||
gerritHooks = pkgs.runCommandNoCC "gerrit-hooks" {} ''
|
gerritHooks = pkgs.runCommandNoCC "gerrit-hooks" { } ''
|
||||||
mkdir -p $out
|
mkdir -p $out
|
||||||
ln -s ${besadiiWithConfig "change-merged"} $out/change-merged
|
ln -s ${besadiiWithConfig "change-merged"} $out/change-merged
|
||||||
ln -s ${besadiiWithConfig "patchset-created"} $out/patchset-created
|
ln -s ${besadiiWithConfig "patchset-created"} $out/patchset-created
|
||||||
'';
|
'';
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
services.gerrit = {
|
services.gerrit = {
|
||||||
enable = true;
|
enable = true;
|
||||||
listenAddress = "[::]:4778"; # 4778 - grrt
|
listenAddress = "[::]:4778"; # 4778 - grrt
|
||||||
|
|
|
@ -6,7 +6,8 @@ let
|
||||||
cfg = config.services.depot.nixery;
|
cfg = config.services.depot.nixery;
|
||||||
description = "Nixery - container images on-demand";
|
description = "Nixery - container images on-demand";
|
||||||
storagePath = "/var/lib/nixery/${pkgs.nixpkgsCommits.unstable}";
|
storagePath = "/var/lib/nixery/${pkgs.nixpkgsCommits.unstable}";
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.nixery = {
|
options.services.depot.nixery = {
|
||||||
enable = lib.mkEnableOption description;
|
enable = lib.mkEnableOption description;
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,8 @@ let
|
||||||
reverse_proxy = true
|
reverse_proxy = true
|
||||||
set_xauthrequest = true
|
set_xauthrequest = true
|
||||||
'';
|
'';
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.oauth2_proxy = {
|
options.services.depot.oauth2_proxy = {
|
||||||
enable = lib.mkEnableOption description;
|
enable = lib.mkEnableOption description;
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,8 @@
|
||||||
let
|
let
|
||||||
cfg = config.services.depot.owothia;
|
cfg = config.services.depot.owothia;
|
||||||
description = "owothia - i'm a service owo";
|
description = "owothia - i'm a service owo";
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.owothia = {
|
options.services.depot.owothia = {
|
||||||
enable = lib.mkEnableOption description;
|
enable = lib.mkEnableOption description;
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
cfg = config.services.depot.panettone;
|
cfg = config.services.depot.panettone;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.panettone = with lib; {
|
options.services.depot.panettone = with lib; {
|
||||||
enable = mkEnableOption "Panettone issue tracker";
|
enable = mkEnableOption "Panettone issue tracker";
|
||||||
|
|
||||||
|
@ -62,23 +63,26 @@ in {
|
||||||
assertion =
|
assertion =
|
||||||
cfg.dbHost != "localhost" || config.services.postgresql.enable;
|
cfg.dbHost != "localhost" || config.services.postgresql.enable;
|
||||||
message = "Panettone requires a postgresql database";
|
message = "Panettone requires a postgresql database";
|
||||||
} {
|
}
|
||||||
assertion =
|
{
|
||||||
cfg.dbHost != "localhost" || config.services.postgresql.enableTCPIP;
|
assertion =
|
||||||
message = "Panettone can only connect to the postgresql database over TCP";
|
cfg.dbHost != "localhost" || config.services.postgresql.enableTCPIP;
|
||||||
} {
|
message = "Panettone can only connect to the postgresql database over TCP";
|
||||||
assertion =
|
}
|
||||||
cfg.dbHost != "localhost" || (lib.any
|
{
|
||||||
(user: user.name == cfg.dbUser)
|
assertion =
|
||||||
config.services.postgresql.ensureUsers);
|
cfg.dbHost != "localhost" || (lib.any
|
||||||
message = "Panettone requires a database user";
|
(user: user.name == cfg.dbUser)
|
||||||
} {
|
config.services.postgresql.ensureUsers);
|
||||||
assertion =
|
message = "Panettone requires a database user";
|
||||||
cfg.dbHost != "localhost" || (lib.any
|
}
|
||||||
(db: db == cfg.dbName)
|
{
|
||||||
config.services.postgresql.ensureDatabases);
|
assertion =
|
||||||
message = "Panettone requires a database";
|
cfg.dbHost != "localhost" || (lib.any
|
||||||
}];
|
(db: db == cfg.dbName)
|
||||||
|
config.services.postgresql.ensureDatabases);
|
||||||
|
message = "Panettone requires a database";
|
||||||
|
}];
|
||||||
|
|
||||||
systemd.services.panettone = {
|
systemd.services.panettone = {
|
||||||
wantedBy = [ "multi-user.target" ];
|
wantedBy = [ "multi-user.target" ];
|
||||||
|
|
|
@ -3,7 +3,8 @@
|
||||||
let
|
let
|
||||||
cfg = config.services.depot.paroxysm;
|
cfg = config.services.depot.paroxysm;
|
||||||
description = "TVL's majestic IRC bot";
|
description = "TVL's majestic IRC bot";
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.paroxysm.enable = lib.mkEnableOption description;
|
options.services.depot.paroxysm.enable = lib.mkEnableOption description;
|
||||||
|
|
||||||
config = lib.mkIf cfg.enable {
|
config = lib.mkIf cfg.enable {
|
||||||
|
|
|
@ -8,7 +8,8 @@ let
|
||||||
enableDaemon = true;
|
enableDaemon = true;
|
||||||
withKDE = false;
|
withKDE = false;
|
||||||
};
|
};
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.quassel = with lib; {
|
options.services.depot.quassel = with lib; {
|
||||||
enable = mkEnableOption "Quassel IRC daemon";
|
enable = mkEnableOption "Quassel IRC daemon";
|
||||||
|
|
||||||
|
@ -70,7 +71,7 @@ in {
|
||||||
group = "quassel";
|
group = "quassel";
|
||||||
};
|
};
|
||||||
|
|
||||||
groups.quassel = {};
|
groups.quassel = { };
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,7 +14,8 @@ let
|
||||||
inherit default;
|
inherit default;
|
||||||
type = lib.types.str;
|
type = lib.types.str;
|
||||||
};
|
};
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.restic = {
|
options.services.depot.restic = {
|
||||||
enable = lib.mkEnableOption description;
|
enable = lib.mkEnableOption description;
|
||||||
bucketEndpoint = mkStringOption "objects.dc-sto1.glesys.net";
|
bucketEndpoint = mkStringOption "objects.dc-sto1.glesys.net";
|
||||||
|
|
|
@ -27,8 +27,9 @@ let
|
||||||
prepareArgs = args:
|
prepareArgs = args:
|
||||||
concatStringsSep " "
|
concatStringsSep " "
|
||||||
(attrValues (mapAttrs (key: value: "-${key} \"${toString value}\"")
|
(attrValues (mapAttrs (key: value: "-${key} \"${toString value}\"")
|
||||||
(args // overrideArgs)));
|
(args // overrideArgs)));
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.smtprelay = {
|
options.services.depot.smtprelay = {
|
||||||
enable = mkEnableOption description;
|
enable = mkEnableOption description;
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,8 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
cfg = config.services.depot.sourcegraph;
|
cfg = config.services.depot.sourcegraph;
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.sourcegraph = with lib; {
|
options.services.depot.sourcegraph = with lib; {
|
||||||
enable = mkEnableOption "SourceGraph code search engine";
|
enable = mkEnableOption "SourceGraph code search engine";
|
||||||
|
|
||||||
|
@ -51,7 +52,8 @@ in {
|
||||||
# Sourcegraph needs a higher nofile limit, it logs warnings
|
# Sourcegraph needs a higher nofile limit, it logs warnings
|
||||||
# otherwise (unclear whether it actually affects the service).
|
# otherwise (unclear whether it actually affects the service).
|
||||||
extraOptions = [
|
extraOptions = [
|
||||||
"--ulimit" "nofile=10000:10000"
|
"--ulimit"
|
||||||
|
"nofile=10000:10000"
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
|
@ -13,7 +13,7 @@ let
|
||||||
|
|
||||||
# All Buildkite hooks are actually besadii, but it's being invoked
|
# All Buildkite hooks are actually besadii, but it's being invoked
|
||||||
# with different names.
|
# with different names.
|
||||||
buildkiteHooks = pkgs.runCommandNoCC "buildkite-hooks" {} ''
|
buildkiteHooks = pkgs.runCommandNoCC "buildkite-hooks" { } ''
|
||||||
mkdir -p $out/bin
|
mkdir -p $out/bin
|
||||||
ln -s ${besadiiWithConfig "post-command"} $out/bin/post-command
|
ln -s ${besadiiWithConfig "post-command"} $out/bin/post-command
|
||||||
'';
|
'';
|
||||||
|
@ -22,7 +22,8 @@ let
|
||||||
echo 'username=buildkite'
|
echo 'username=buildkite'
|
||||||
echo "password=$(jq -r '.gerritPassword' /run/agenix/buildkite-besadii-config)"
|
echo "password=$(jq -r '.gerritPassword' /run/agenix/buildkite-besadii-config)"
|
||||||
'';
|
'';
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
options.services.depot.buildkite = {
|
options.services.depot.buildkite = {
|
||||||
enable = lib.mkEnableOption description;
|
enable = lib.mkEnableOption description;
|
||||||
agentCount = lib.mkOption {
|
agentCount = lib.mkOption {
|
||||||
|
@ -33,39 +34,43 @@ in {
|
||||||
|
|
||||||
config = lib.mkIf cfg.enable {
|
config = lib.mkIf cfg.enable {
|
||||||
# Run the Buildkite agents using the default upstream module.
|
# Run the Buildkite agents using the default upstream module.
|
||||||
services.buildkite-agents = builtins.listToAttrs (map (n: rec {
|
services.buildkite-agents = builtins.listToAttrs (map
|
||||||
name = "whitby-${toString n}";
|
(n: rec {
|
||||||
value = {
|
name = "whitby-${toString n}";
|
||||||
inherit name;
|
value = {
|
||||||
enable = true;
|
inherit name;
|
||||||
tokenPath = "/run/agenix/buildkite-agent-token";
|
enable = true;
|
||||||
hooks.post-command = "${buildkiteHooks}/bin/post-command";
|
tokenPath = "/run/agenix/buildkite-agent-token";
|
||||||
|
hooks.post-command = "${buildkiteHooks}/bin/post-command";
|
||||||
|
|
||||||
runtimePackages = with pkgs; [
|
runtimePackages = with pkgs; [
|
||||||
bash
|
bash
|
||||||
coreutils
|
coreutils
|
||||||
credentialHelper
|
credentialHelper
|
||||||
curl
|
curl
|
||||||
git
|
git
|
||||||
gnutar
|
gnutar
|
||||||
gzip
|
gzip
|
||||||
jq
|
jq
|
||||||
nix
|
nix
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
}) agents);
|
})
|
||||||
|
agents);
|
||||||
|
|
||||||
# Set up a group for all Buildkite agent users
|
# Set up a group for all Buildkite agent users
|
||||||
users = {
|
users = {
|
||||||
groups.buildkite-agents = {};
|
groups.buildkite-agents = { };
|
||||||
users = builtins.listToAttrs (map (n: rec {
|
users = builtins.listToAttrs (map
|
||||||
name = "buildkite-agent-whitby-${toString n}";
|
(n: rec {
|
||||||
value = {
|
name = "buildkite-agent-whitby-${toString n}";
|
||||||
isSystemUser = true;
|
value = {
|
||||||
group = lib.mkForce "buildkite-agents";
|
isSystemUser = true;
|
||||||
extraGroups = [ name "docker" ];
|
group = lib.mkForce "buildkite-agents";
|
||||||
};
|
extraGroups = [ name "docker" ];
|
||||||
}) agents);
|
};
|
||||||
|
})
|
||||||
|
agents);
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,8 @@ let
|
||||||
|
|
||||||
inherit (depot.ops) users;
|
inherit (depot.ops) users;
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
services.openldap = {
|
services.openldap = {
|
||||||
enable = true;
|
enable = true;
|
||||||
|
|
||||||
|
@ -48,7 +49,7 @@ in {
|
||||||
|
|
||||||
"cn=schema".includes =
|
"cn=schema".includes =
|
||||||
map (schema: "${pkgs.openldap}/etc/schema/${schema}.ldif")
|
map (schema: "${pkgs.openldap}/etc/schema/${schema}.ldif")
|
||||||
[ "core" "cosine" "inetorgperson" "nis" ];
|
[ "core" "cosine" "inetorgperson" "nis" ];
|
||||||
};
|
};
|
||||||
|
|
||||||
# Contents are immutable at runtime, and adding user accounts etc.
|
# Contents are immutable at runtime, and adding user accounts etc.
|
||||||
|
|
|
@ -16,9 +16,10 @@ let
|
||||||
drvTargets = depot.ci.targets;
|
drvTargets = depot.ci.targets;
|
||||||
additionalSteps = [ protoCheck ];
|
additionalSteps = [ protoCheck ];
|
||||||
|
|
||||||
parentTargetMap = if (externalArgs ? parentTargetMap)
|
parentTargetMap =
|
||||||
|
if (externalArgs ? parentTargetMap)
|
||||||
then builtins.fromJSON (builtins.readFile externalArgs.parentTargetMap)
|
then builtins.fromJSON (builtins.readFile externalArgs.parentTargetMap)
|
||||||
else {};
|
else { };
|
||||||
|
|
||||||
postBuildSteps = [
|
postBuildSteps = [
|
||||||
# After successful builds, create a gcroot for builds on canon.
|
# After successful builds, create a gcroot for builds on canon.
|
||||||
|
@ -40,7 +41,8 @@ let
|
||||||
};
|
};
|
||||||
|
|
||||||
drvmap = depot.nix.buildkite.mkDrvmap depot.ci.targets;
|
drvmap = depot.nix.buildkite.mkDrvmap depot.ci.targets;
|
||||||
in pkgs.runCommandNoCC "depot-pipeline" {} ''
|
in
|
||||||
|
pkgs.runCommandNoCC "depot-pipeline" { } ''
|
||||||
mkdir $out
|
mkdir $out
|
||||||
cp -r ${pipeline}/* $out
|
cp -r ${pipeline}/* $out
|
||||||
cp ${drvmap} $out/drvmap.json
|
cp ${drvmap} $out/drvmap.json
|
||||||
|
|
|
@ -22,6 +22,6 @@ in
|
||||||
|
|
||||||
defun [ path (attrs agenixSecret) (attrs any) ]
|
defun [ path (attrs agenixSecret) (attrs any) ]
|
||||||
(path: secrets:
|
(path: secrets:
|
||||||
depot.nix.readTree.drvTargets
|
depot.nix.readTree.drvTargets
|
||||||
# Import each secret into the Nix store
|
# Import each secret into the Nix store
|
||||||
(builtins.mapAttrs (name: _: "${path}/${name}") secrets))
|
(builtins.mapAttrs (name: _: "${path}/${name}") secrets))
|
||||||
|
|
|
@ -15,7 +15,8 @@ let
|
||||||
whitby = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILNh/w4BSKov0jdz3gKBc98tpoLta5bb87fQXWBhAl2I";
|
whitby = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILNh/w4BSKov0jdz3gKBc98tpoLta5bb87fQXWBhAl2I";
|
||||||
|
|
||||||
default.publicKeys = tazjin ++ grfn ++ sterni ++ [ whitby ];
|
default.publicKeys = tazjin ++ grfn ++ sterni ++ [ whitby ];
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
"besadii.age" = default;
|
"besadii.age" = default;
|
||||||
"buildkite-agent-token.age" = default;
|
"buildkite-agent-token.age" = default;
|
||||||
"buildkite-graphql-token.age" = default;
|
"buildkite-graphql-token.age" = default;
|
||||||
|
|
16
third_party/abseil_cpp/default.nix
vendored
16
third_party/abseil_cpp/default.nix
vendored
|
@ -8,7 +8,7 @@ in pkgs.abseil-cpp.override {
|
||||||
|
|
||||||
/* TODO(tazjin): update abseil subtree
|
/* TODO(tazjin): update abseil subtree
|
||||||
|
|
||||||
fullLlvm11Stdenv.mkDerivation rec {
|
fullLlvm11Stdenv.mkDerivation rec {
|
||||||
pname = "abseil-cpp";
|
pname = "abseil-cpp";
|
||||||
version = "20200519-768eb2ca+tvl-1";
|
version = "20200519-768eb2ca+tvl-1";
|
||||||
src = ./.;
|
src = ./.;
|
||||||
|
@ -17,15 +17,15 @@ fullLlvm11Stdenv.mkDerivation rec {
|
||||||
# doCheck = true;
|
# doCheck = true;
|
||||||
|
|
||||||
cmakeFlags = [
|
cmakeFlags = [
|
||||||
"-DCMAKE_CXX_STANDARD=17"
|
"-DCMAKE_CXX_STANDARD=17"
|
||||||
#"-DABSL_RUN_TESTS=1"
|
#"-DABSL_RUN_TESTS=1"
|
||||||
];
|
];
|
||||||
|
|
||||||
meta = with lib; {
|
meta = with lib; {
|
||||||
description = "An open-source collection of C++ code designed to augment the C++ standard library";
|
description = "An open-source collection of C++ code designed to augment the C++ standard library";
|
||||||
homepage = https://abseil.io/;
|
homepage = https://abseil.io/;
|
||||||
license = licenses.asl20;
|
license = licenses.asl20;
|
||||||
maintainers = [ maintainers.andersk ];
|
maintainers = [ maintainers.andersk ];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
|
|
3
third_party/agenix/default.nix
vendored
3
third_party/agenix/default.nix
vendored
|
@ -9,7 +9,8 @@ let
|
||||||
agenix = import src {
|
agenix = import src {
|
||||||
inherit pkgs;
|
inherit pkgs;
|
||||||
};
|
};
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
inherit src;
|
inherit src;
|
||||||
cli = agenix.agenix;
|
cli = agenix.agenix;
|
||||||
}
|
}
|
||||||
|
|
14
third_party/arion/default.nix
vendored
14
third_party/arion/default.nix
vendored
|
@ -1,8 +1,10 @@
|
||||||
{ pkgs, ... }:
|
{ pkgs, ... }:
|
||||||
|
|
||||||
(import (pkgs.fetchFromGitHub {
|
(import
|
||||||
owner = "hercules-ci";
|
(pkgs.fetchFromGitHub {
|
||||||
repo = "arion";
|
owner = "hercules-ci";
|
||||||
rev = "db6d4d7490dff363de60cebbece3ae9361e3ce43";
|
repo = "arion";
|
||||||
sha256 = "0d8nqmc7fjshigax2g47ips262v8ml27x0ksq59kmprgb7ckzi5l";
|
rev = "db6d4d7490dff363de60cebbece3ae9361e3ce43";
|
||||||
}) { inherit pkgs; }).arion
|
sha256 = "0d8nqmc7fjshigax2g47ips262v8ml27x0ksq59kmprgb7ckzi5l";
|
||||||
|
})
|
||||||
|
{ inherit pkgs; }).arion
|
||||||
|
|
3
third_party/bat_syntaxes/default.nix
vendored
3
third_party/bat_syntaxes/default.nix
vendored
|
@ -8,7 +8,8 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (pkgs) bat runCommandNoCC;
|
inherit (pkgs) bat runCommandNoCC;
|
||||||
in runCommandNoCC "bat-syntaxes.bin" {} ''
|
in
|
||||||
|
runCommandNoCC "bat-syntaxes.bin" { } ''
|
||||||
export HOME=$PWD
|
export HOME=$PWD
|
||||||
mkdir -p .config/bat/syntaxes
|
mkdir -p .config/bat/syntaxes
|
||||||
cp ${./Prolog.sublime-syntax} .config/bat/syntaxes
|
cp ${./Prolog.sublime-syntax} .config/bat/syntaxes
|
||||||
|
|
3
third_party/cgit/default.nix
vendored
3
third_party/cgit/default.nix
vendored
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (pkgs) stdenv gzip bzip2 xz luajit zlib autoconf openssl pkgconfig;
|
inherit (pkgs) stdenv gzip bzip2 xz luajit zlib autoconf openssl pkgconfig;
|
||||||
in stdenv.mkDerivation rec {
|
in
|
||||||
|
stdenv.mkDerivation rec {
|
||||||
pname = "cgit";
|
pname = "cgit";
|
||||||
version = "master";
|
version = "master";
|
||||||
src = ./.;
|
src = ./.;
|
||||||
|
|
3
third_party/clj2nix/default.nix
vendored
3
third_party/clj2nix/default.nix
vendored
|
@ -5,4 +5,5 @@ pkgs.callPackage "${(pkgs.fetchFromGitHub {
|
||||||
repo = "clj2nix";
|
repo = "clj2nix";
|
||||||
rev = "3d0a38c954c8e0926f57de1d80d357df05fc2f94";
|
rev = "3d0a38c954c8e0926f57de1d80d357df05fc2f94";
|
||||||
sha256 = "0y77b988qdgsrp4w72v1f5rrh33awbps2qdgp2wr2nmmi44541w5";
|
sha256 = "0y77b988qdgsrp4w72v1f5rrh33awbps2qdgp2wr2nmmi44541w5";
|
||||||
})}/clj2nix.nix" {}
|
})}/clj2nix.nix"
|
||||||
|
{ }
|
||||||
|
|
55
third_party/default.nix
vendored
55
third_party/default.nix
vendored
|
@ -24,32 +24,33 @@
|
||||||
# be able to pass `specialArgs`. We depend on this because `depot`
|
# be able to pass `specialArgs`. We depend on this because `depot`
|
||||||
# needs to be partially evaluated in NixOS configuration before
|
# needs to be partially evaluated in NixOS configuration before
|
||||||
# module imports are resolved.
|
# module imports are resolved.
|
||||||
nixos = {
|
nixos =
|
||||||
configuration,
|
{ configuration
|
||||||
specialArgs ? {},
|
, specialArgs ? { }
|
||||||
system ? builtins.currentSystem,
|
, system ? builtins.currentSystem
|
||||||
...
|
, ...
|
||||||
}:
|
}:
|
||||||
let
|
let
|
||||||
eval = import "${pkgs.path}/nixos/lib/eval-config.nix" {
|
eval = import "${pkgs.path}/nixos/lib/eval-config.nix" {
|
||||||
inherit specialArgs system;
|
inherit specialArgs system;
|
||||||
modules = [
|
modules = [
|
||||||
configuration
|
configuration
|
||||||
(import "${depot.path + "/ops/modules/default-imports.nix"}")
|
(import "${depot.path + "/ops/modules/default-imports.nix"}")
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
|
|
||||||
# This is for `nixos-rebuild build-vm'.
|
# This is for `nixos-rebuild build-vm'.
|
||||||
vmConfig = (import "${pkgs.path}/nixos/lib/eval-config.nix" {
|
vmConfig = (import "${pkgs.path}/nixos/lib/eval-config.nix" {
|
||||||
inherit specialArgs system;
|
inherit specialArgs system;
|
||||||
modules = [
|
modules = [
|
||||||
configuration
|
configuration
|
||||||
"${pkgs.path}/nixos/modules/virtualisation/qemu-vm.nix"
|
"${pkgs.path}/nixos/modules/virtualisation/qemu-vm.nix"
|
||||||
];
|
];
|
||||||
}).config;
|
}).config;
|
||||||
in {
|
in
|
||||||
inherit (eval) pkgs config options;
|
{
|
||||||
system = eval.config.system.build.toplevel;
|
inherit (eval) pkgs config options;
|
||||||
vm = vmConfig.system.build.vm;
|
system = eval.config.system.build.toplevel;
|
||||||
};
|
vm = vmConfig.system.build.vm;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
13
third_party/dhall/default.nix
vendored
13
third_party/dhall/default.nix
vendored
|
@ -5,11 +5,14 @@ let
|
||||||
# broken most of the time. The binaries are also fully static
|
# broken most of the time. The binaries are also fully static
|
||||||
# builds, instead of the half-static crap that nixpkgs produces.
|
# builds, instead of the half-static crap that nixpkgs produces.
|
||||||
easy-dhall-nix =
|
easy-dhall-nix =
|
||||||
import (builtins.fetchTarball {
|
import
|
||||||
url = "https://github.com/justinwoo/easy-dhall-nix/archive/eae7f64c4d6c70681e5a56c84198236930ba425e.tar.gz";
|
(builtins.fetchTarball {
|
||||||
sha256 = "1y2x15v8a679vlpxazjpibfwajp6zph60f8wjcm4xflbvazk0dx7";
|
url = "https://github.com/justinwoo/easy-dhall-nix/archive/eae7f64c4d6c70681e5a56c84198236930ba425e.tar.gz";
|
||||||
}) { inherit pkgs; };
|
sha256 = "1y2x15v8a679vlpxazjpibfwajp6zph60f8wjcm4xflbvazk0dx7";
|
||||||
in {
|
})
|
||||||
|
{ inherit pkgs; };
|
||||||
|
in
|
||||||
|
{
|
||||||
dhall = easy-dhall-nix.dhall-simple;
|
dhall = easy-dhall-nix.dhall-simple;
|
||||||
dhall-bash = easy-dhall-nix.dhall-bash-simple;
|
dhall-bash = easy-dhall-nix.dhall-bash-simple;
|
||||||
dhall-docs = easy-dhall-nix.dhall-docs-simple;
|
dhall-docs = easy-dhall-nix.dhall-docs-simple;
|
||||||
|
|
14
third_party/elmPackages_0_18/default.nix
vendored
14
third_party/elmPackages_0_18/default.nix
vendored
|
@ -7,9 +7,11 @@
|
||||||
|
|
||||||
{ pkgs, ... }:
|
{ pkgs, ... }:
|
||||||
|
|
||||||
(import (pkgs.fetchFromGitHub {
|
(import
|
||||||
owner = "NixOS";
|
(pkgs.fetchFromGitHub {
|
||||||
repo = "nixpkgs";
|
owner = "NixOS";
|
||||||
rev = "14f9ee66e63077539252f8b4550049381a082518";
|
repo = "nixpkgs";
|
||||||
sha256 = "1wn7nmb1cqfk2j91l3rwc6yhimfkzxprb8wknw5wi57yhq9m6lv1";
|
rev = "14f9ee66e63077539252f8b4550049381a082518";
|
||||||
}) {}).elmPackages
|
sha256 = "1wn7nmb1cqfk2j91l3rwc6yhimfkzxprb8wknw5wi57yhq9m6lv1";
|
||||||
|
})
|
||||||
|
{ }).elmPackages
|
||||||
|
|
58
third_party/gerrit_plugins/builder.nix
vendored
58
third_party/gerrit_plugins/builder.nix
vendored
|
@ -1,33 +1,35 @@
|
||||||
{ depot, pkgs, ... }:
|
{ depot, pkgs, ... }:
|
||||||
{
|
{
|
||||||
buildGerritBazelPlugin = {
|
buildGerritBazelPlugin =
|
||||||
name,
|
{ name
|
||||||
src,
|
, src
|
||||||
depsOutputHash,
|
, depsOutputHash
|
||||||
overlayPluginCmd ? ''
|
, overlayPluginCmd ? ''
|
||||||
cp -R "${src}" "$out/plugins/${name}"
|
cp -R "${src}" "$out/plugins/${name}"
|
||||||
'',
|
''
|
||||||
postPatch ? "",
|
, postPatch ? ""
|
||||||
}: ((depot.third_party.gerrit.override {
|
,
|
||||||
name = "${name}.jar";
|
}: ((depot.third_party.gerrit.override {
|
||||||
|
name = "${name}.jar";
|
||||||
|
|
||||||
src = pkgs.runCommandLocal "${name}-src" {} ''
|
src = pkgs.runCommandLocal "${name}-src" { } ''
|
||||||
cp -R "${depot.third_party.gerrit.src}" "$out"
|
cp -R "${depot.third_party.gerrit.src}" "$out"
|
||||||
chmod +w "$out/plugins"
|
chmod +w "$out/plugins"
|
||||||
${overlayPluginCmd}
|
${overlayPluginCmd}
|
||||||
'';
|
'';
|
||||||
|
|
||||||
bazelTarget = "//plugins/${name}";
|
bazelTarget = "//plugins/${name}";
|
||||||
}).overrideAttrs (super: {
|
}).overrideAttrs (super: {
|
||||||
deps = super.deps.overrideAttrs (superDeps: {
|
deps = super.deps.overrideAttrs (superDeps: {
|
||||||
outputHash = depsOutputHash;
|
outputHash = depsOutputHash;
|
||||||
});
|
});
|
||||||
installPhase = ''
|
installPhase = ''
|
||||||
cp "bazel-bin/plugins/${name}/${name}.jar" "$out"
|
cp "bazel-bin/plugins/${name}/${name}.jar" "$out"
|
||||||
'';
|
'';
|
||||||
postPatch = if super ? postPatch then ''
|
postPatch =
|
||||||
${super.postPatch}
|
if super ? postPatch then ''
|
||||||
${postPatch}
|
${super.postPatch}
|
||||||
'' else postPatch;
|
${postPatch}
|
||||||
}));
|
'' else postPatch;
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
|
|
3
third_party/gerrit_plugins/default.nix
vendored
3
third_party/gerrit_plugins/default.nix
vendored
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (import ./builder.nix args) buildGerritBazelPlugin;
|
inherit (import ./builder.nix args) buildGerritBazelPlugin;
|
||||||
in depot.nix.readTree.drvTargets {
|
in
|
||||||
|
depot.nix.readTree.drvTargets {
|
||||||
# https://gerrit.googlesource.com/plugins/owners
|
# https://gerrit.googlesource.com/plugins/owners
|
||||||
owners = buildGerritBazelPlugin rec {
|
owners = buildGerritBazelPlugin rec {
|
||||||
name = "owners";
|
name = "owners";
|
||||||
|
|
3
third_party/gerrit_plugins/oauth/default.nix
vendored
3
third_party/gerrit_plugins/oauth/default.nix
vendored
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (import ../builder.nix args) buildGerritBazelPlugin;
|
inherit (import ../builder.nix args) buildGerritBazelPlugin;
|
||||||
in buildGerritBazelPlugin rec {
|
in
|
||||||
|
buildGerritBazelPlugin rec {
|
||||||
name = "oauth";
|
name = "oauth";
|
||||||
depsOutputHash = "sha256:0j86amkw54y177s522hc988hqg034fsrkywbsb9a7h14zwcqbran";
|
depsOutputHash = "sha256:0j86amkw54y177s522hc988hqg034fsrkywbsb9a7h14zwcqbran";
|
||||||
src = pkgs.fetchgit {
|
src = pkgs.fetchgit {
|
||||||
|
|
4
third_party/git/default.nix
vendored
4
third_party/git/default.nix
vendored
|
@ -2,8 +2,8 @@
|
||||||
# `pkgs.srcOnly`.
|
# `pkgs.srcOnly`.
|
||||||
{ pkgs, ... }:
|
{ pkgs, ... }:
|
||||||
|
|
||||||
pkgs.git.overrideAttrs(old: {
|
pkgs.git.overrideAttrs (old: {
|
||||||
patches = (old.patches or []) ++ [
|
patches = (old.patches or [ ]) ++ [
|
||||||
./0001-feat-third_party-git-date-add-dottime-format.patch
|
./0001-feat-third_party-git-date-add-dottime-format.patch
|
||||||
];
|
];
|
||||||
})
|
})
|
||||||
|
|
17
third_party/gitignoreSource/default.nix
vendored
17
third_party/gitignoreSource/default.nix
vendored
|
@ -1,14 +1,17 @@
|
||||||
{ pkgs, ... }:
|
{ pkgs, ... }:
|
||||||
|
|
||||||
let
|
let
|
||||||
gitignoreNix = import (pkgs.fetchFromGitHub {
|
gitignoreNix = import
|
||||||
owner = "hercules-ci";
|
(pkgs.fetchFromGitHub {
|
||||||
repo = "gitignore";
|
owner = "hercules-ci";
|
||||||
rev = "f9e996052b5af4032fe6150bba4a6fe4f7b9d698";
|
repo = "gitignore";
|
||||||
sha256 = "0jrh5ghisaqdd0vldbywags20m2cxpkbbk5jjjmwaw0gr8nhsafv";
|
rev = "f9e996052b5af4032fe6150bba4a6fe4f7b9d698";
|
||||||
}) { inherit (pkgs) lib; };
|
sha256 = "0jrh5ghisaqdd0vldbywags20m2cxpkbbk5jjjmwaw0gr8nhsafv";
|
||||||
|
})
|
||||||
|
{ inherit (pkgs) lib; };
|
||||||
|
|
||||||
in {
|
in
|
||||||
|
{
|
||||||
__functor = _: gitignoreNix.gitignoreSource;
|
__functor = _: gitignoreNix.gitignoreSource;
|
||||||
|
|
||||||
# expose extra functions here
|
# expose extra functions here
|
||||||
|
|
|
@ -3,15 +3,17 @@
|
||||||
depot.nix.buildGo.external {
|
depot.nix.buildGo.external {
|
||||||
path = "github.com/charmbracelet/bubbletea";
|
path = "github.com/charmbracelet/bubbletea";
|
||||||
src =
|
src =
|
||||||
let gitSrc = pkgs.fetchFromGitHub {
|
let
|
||||||
|
gitSrc = pkgs.fetchFromGitHub {
|
||||||
owner = "charmbracelet";
|
owner = "charmbracelet";
|
||||||
repo = "bubbletea";
|
repo = "bubbletea";
|
||||||
rev = "v0.13.1";
|
rev = "v0.13.1";
|
||||||
sha256 = "0yf2fjkvx8ym9n6f3qp2z7sxs0qsfpj148sfvbrp38k67s3h20cs";
|
sha256 = "0yf2fjkvx8ym9n6f3qp2z7sxs0qsfpj148sfvbrp38k67s3h20cs";
|
||||||
};
|
};
|
||||||
# The examples/ directory is fairly extensive,
|
# The examples/ directory is fairly extensive,
|
||||||
# but it also adds most of the dependencies.
|
# but it also adds most of the dependencies.
|
||||||
in pkgs.runCommand gitSrc.name {} ''
|
in
|
||||||
|
pkgs.runCommand gitSrc.name { } ''
|
||||||
mkdir -p $out
|
mkdir -p $out
|
||||||
ln -s "${gitSrc}"/* $out
|
ln -s "${gitSrc}"/* $out
|
||||||
rm -r $out/examples
|
rm -r $out/examples
|
||||||
|
|
2
third_party/grpc/default.nix
vendored
2
third_party/grpc/default.nix
vendored
|
@ -5,7 +5,7 @@
|
||||||
stdenv = pkgs.fullLlvm11Stdenv;
|
stdenv = pkgs.fullLlvm11Stdenv;
|
||||||
abseil-cpp = depot.third_party.abseil_cpp;
|
abseil-cpp = depot.third_party.abseil_cpp;
|
||||||
re2 = depot.third_party.re2;
|
re2 = depot.third_party.re2;
|
||||||
}).overrideAttrs(orig: rec {
|
}).overrideAttrs (orig: rec {
|
||||||
cmakeFlags = orig.cmakeFlags ++ [
|
cmakeFlags = orig.cmakeFlags ++ [
|
||||||
"-DCMAKE_CXX_STANDARD_REQUIRED=ON"
|
"-DCMAKE_CXX_STANDARD_REQUIRED=ON"
|
||||||
"-DCMAKE_CXX_STANDARD=17"
|
"-DCMAKE_CXX_STANDARD=17"
|
||||||
|
|
2
third_party/gtest/default.nix
vendored
2
third_party/gtest/default.nix
vendored
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
(pkgs.gtest.override {
|
(pkgs.gtest.override {
|
||||||
stdenv = pkgs.fullLlvm11Stdenv;
|
stdenv = pkgs.fullLlvm11Stdenv;
|
||||||
}).overrideAttrs(_: {
|
}).overrideAttrs (_: {
|
||||||
src = pkgs.fetchFromGitHub {
|
src = pkgs.fetchFromGitHub {
|
||||||
owner = "google";
|
owner = "google";
|
||||||
repo = "googletest";
|
repo = "googletest";
|
||||||
|
|
12
third_party/josh/default.nix
vendored
12
third_party/josh/default.nix
vendored
|
@ -8,7 +8,8 @@ let
|
||||||
rev = "69dc986e506ba5631c8bbf52835da076a18ec8dc";
|
rev = "69dc986e506ba5631c8bbf52835da076a18ec8dc";
|
||||||
hash = "sha256:0ybc6ivjkm7bchaszs9lhbl1gbjnyhwq7a3vw6jml3ama84l52lb";
|
hash = "sha256:0ybc6ivjkm7bchaszs9lhbl1gbjnyhwq7a3vw6jml3ama84l52lb";
|
||||||
};
|
};
|
||||||
in depot.third_party.naersk.buildPackage {
|
in
|
||||||
|
depot.third_party.naersk.buildPackage {
|
||||||
inherit src;
|
inherit src;
|
||||||
|
|
||||||
buildInputs = with pkgs; [
|
buildInputs = with pkgs; [
|
||||||
|
@ -18,8 +19,11 @@ in depot.third_party.naersk.buildPackage {
|
||||||
];
|
];
|
||||||
|
|
||||||
cargoBuildOptions = x: x ++ [
|
cargoBuildOptions = x: x ++ [
|
||||||
"-p" "josh"
|
"-p"
|
||||||
"-p" "josh-proxy"
|
"josh"
|
||||||
"-p" "josh-ui"
|
"-p"
|
||||||
|
"josh-proxy"
|
||||||
|
"-p"
|
||||||
|
"josh-ui"
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
|
|
3
third_party/lisp/bordeaux-threads.nix
vendored
3
third_party/lisp/bordeaux-threads.nix
vendored
|
@ -5,7 +5,8 @@
|
||||||
let
|
let
|
||||||
src = with pkgs; srcOnly lispPackages.bordeaux-threads;
|
src = with pkgs; srcOnly lispPackages.bordeaux-threads;
|
||||||
getSrc = f: "${src}/src/${f}";
|
getSrc = f: "${src}/src/${f}";
|
||||||
in depot.nix.buildLisp.library {
|
in
|
||||||
|
depot.nix.buildLisp.library {
|
||||||
name = "bordeaux-threads";
|
name = "bordeaux-threads";
|
||||||
deps = [ depot.third_party.lisp.alexandria ];
|
deps = [ depot.third_party.lisp.alexandria ];
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue