style: format entire depot with nixpkgs-fmt
This CL can be used to compare the style of nixpkgs-fmt against other formatters (nixpkgs, alejandra). Change-Id: I87c6abff6bcb546b02ead15ad0405f81e01b6d9e Reviewed-on: https://cl.tvl.fyi/c/depot/+/4397 Tested-by: BuildkiteCI Reviewed-by: sterni <sternenseemann@systemli.org> Reviewed-by: lukegb <lukegb@tvl.fyi> Reviewed-by: wpcarro <wpcarro@gmail.com> Reviewed-by: Profpatsch <mail@profpatsch.de> Reviewed-by: kanepyork <rikingcoding@gmail.com> Reviewed-by: tazjin <tazjin@tvl.su> Reviewed-by: cynthia <cynthia@tvl.fyi> Reviewed-by: edef <edef@edef.eu> Reviewed-by: eta <tvl@eta.st> Reviewed-by: grfn <grfn@gws.fyi>
This commit is contained in:
parent
2d10d60fac
commit
aa122cbae7
310 changed files with 7278 additions and 5490 deletions
corp/website
default.nixfun
nix
binify
buildGo
buildLisp
buildManPages
buildkite
drvSeqL
emptyDerivation
escapeExecline
getBins
mergePatch
netstring
nint
readTree
default.nix
tests
renderMarkdown
runExecline
runTestsuite
sparseTree
tag
tailscale
utils
writeElispBin
writeExecline
writeScript
writers
yants
ops
dns
glesys
journaldriver
keycloak
kontemplate
machines/whitby
modules
atward.nixauto-deploy.nixautomatic-gc.nixclbot.nixdefault.nixgerrit-queue.nixgit-serving.nixirccat.nixmonorepo-gerrit.nixnixery.nixoauth2_proxy.nixowothia.nixpanettone.nixparoxysm.nixquassel.nixrestic.nixsmtprelay.nixsourcegraph.nixtvl-buildkite.nix
tvl-slapd
pipelines
secrets
third_party
abseil_cpp
agenix
arion
bat_syntaxes
cgit
clj2nix
default.nixdhall
elmPackages_0_18
gerrit_plugins
git
gitignoreSource
gopkgs/github.com/charmbracelet/bubbletea
grpc
gtest
josh
lisp
|
@ -30,7 +30,8 @@ let
|
|||
</style>
|
||||
'';
|
||||
};
|
||||
in pkgs.runCommandNoCC "corp-website" {} ''
|
||||
in
|
||||
pkgs.runCommandNoCC "corp-website" { } ''
|
||||
mkdir $out
|
||||
cp ${index} $out/index.html
|
||||
''
|
||||
|
|
19
default.nix
19
default.nix
|
@ -4,14 +4,16 @@
|
|||
|
||||
{ nixpkgsBisectPath ? null
|
||||
, parentTargetMap ? null
|
||||
, nixpkgsConfig ? {}, ... }@args:
|
||||
, nixpkgsConfig ? { }
|
||||
, ...
|
||||
}@args:
|
||||
|
||||
let
|
||||
inherit (builtins)
|
||||
filter
|
||||
;
|
||||
|
||||
readTree = import ./nix/readTree {};
|
||||
readTree = import ./nix/readTree { };
|
||||
|
||||
# Disallow access to //users from other depot parts.
|
||||
usersFilter = readTree.restrictFolder {
|
||||
|
@ -70,7 +72,8 @@ let
|
|||
# Is this tree node eligible for build inclusion?
|
||||
eligible = node: (node ? outPath) && !(node.meta.ci.skip or false);
|
||||
|
||||
in readTree.fix(self: (readDepot {
|
||||
in
|
||||
readTree.fix (self: (readDepot {
|
||||
depot = self;
|
||||
|
||||
# Pass third_party as 'pkgs' (for compatibility with external
|
||||
|
@ -110,8 +113,10 @@ in readTree.fix(self: (readDepot {
|
|||
});
|
||||
|
||||
# Derivation that gcroots all depot targets.
|
||||
ci.gcroot = with self.third_party.nixpkgs; makeSetupHook {
|
||||
name = "depot-gcroot";
|
||||
deps = self.ci.targets;
|
||||
} emptyFile;
|
||||
ci.gcroot = with self.third_party.nixpkgs; makeSetupHook
|
||||
{
|
||||
name = "depot-gcroot";
|
||||
deps = self.ci.targets;
|
||||
}
|
||||
emptyFile;
|
||||
})
|
||||
|
|
|
@ -33,7 +33,8 @@ let
|
|||
cp ${frontend} $out/index.html
|
||||
''}/")
|
||||
'';
|
||||
in depot.nix.buildLisp.program {
|
||||
in
|
||||
depot.nix.buildLisp.program {
|
||||
name = "gemma";
|
||||
|
||||
deps = with depot.third_party.lisp; [
|
||||
|
|
|
@ -4,19 +4,20 @@ let
|
|||
inherit (pkgs) python3 python3Packages;
|
||||
|
||||
opts = {
|
||||
pname = "idualctl";
|
||||
pname = "idualctl";
|
||||
version = "0.1";
|
||||
src = ./.;
|
||||
src = ./.;
|
||||
|
||||
propagatedBuildInputs = [
|
||||
depot.third_party.python.broadlink
|
||||
];
|
||||
};
|
||||
package = python3Packages.buildPythonPackage opts;
|
||||
script = python3Packages.buildPythonApplication opts;
|
||||
in depot.nix.readTree.drvTargets {
|
||||
script = python3Packages.buildPythonApplication opts;
|
||||
in
|
||||
depot.nix.readTree.drvTargets {
|
||||
inherit script;
|
||||
python = python3.withPackages (_: [ package ]);
|
||||
python = python3.withPackages (_: [ package ]);
|
||||
setAlarm = pkgs.writeShellScriptBin "set-alarm" ''
|
||||
echo "setting an alarm for ''${1}"
|
||||
${pkgs.systemd}/bin/systemd-run --user --on-calendar="''${1} Europe/London" --unit=light-alarm.service
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
{ depot ? (import ../../../. {})
|
||||
{ depot ? (import ../../../. { })
|
||||
, pkgs ? depot.third_party.nixpkgs
|
||||
, ... }:
|
||||
, ...
|
||||
}:
|
||||
|
||||
let
|
||||
basePkg = pkgs.haskellPackages.callPackage ./pkg.nix { };
|
||||
|
|
|
@ -1,5 +1,15 @@
|
|||
{ mkDerivation, base, bytestring, chatter, containers, envy
|
||||
, irc-client, lens, lib, random, relude, text
|
||||
{ mkDerivation
|
||||
, base
|
||||
, bytestring
|
||||
, chatter
|
||||
, containers
|
||||
, envy
|
||||
, irc-client
|
||||
, lens
|
||||
, lib
|
||||
, random
|
||||
, relude
|
||||
, text
|
||||
}:
|
||||
mkDerivation {
|
||||
pname = "owothia";
|
||||
|
@ -8,8 +18,16 @@ mkDerivation {
|
|||
isLibrary = false;
|
||||
isExecutable = true;
|
||||
executableHaskellDepends = [
|
||||
base bytestring chatter containers envy irc-client lens random
|
||||
relude text
|
||||
base
|
||||
bytestring
|
||||
chatter
|
||||
containers
|
||||
envy
|
||||
irc-client
|
||||
lens
|
||||
random
|
||||
relude
|
||||
text
|
||||
];
|
||||
license = "unknown";
|
||||
hydraPlatforms = lib.platforms.none;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
{ pkgs ? (import ../../../. {}).third_party, ... }:
|
||||
{ pkgs ? (import ../../../. { }).third_party, ... }:
|
||||
|
||||
let
|
||||
inherit (pkgs)
|
||||
|
|
|
@ -12,7 +12,8 @@ let
|
|||
gopkgs."github.com".pkg.browser.gopkg
|
||||
];
|
||||
};
|
||||
in uggc.overrideAttrs(old: {
|
||||
in
|
||||
uggc.overrideAttrs (old: {
|
||||
buildCommand = old.buildCommand + ''
|
||||
install -D ${./uggc.desktop} $out/share/applications/uggc.desktop
|
||||
sed "s|@out@|$out|g" -i $out/share/applications/uggc.desktop
|
||||
|
|
|
@ -38,6 +38,7 @@ let
|
|||
"ecl" # refuses to create non-ASCII paths even on POSIX…
|
||||
];
|
||||
};
|
||||
in bin // {
|
||||
in
|
||||
bin // {
|
||||
inherit lib;
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
# with `binify { exe = …; name = "hello" }`.
|
||||
{ exe, name }:
|
||||
|
||||
pkgs.runCommandLocal "${name}-bin" {} ''
|
||||
pkgs.runCommandLocal "${name}-bin" { } ''
|
||||
mkdir -p $out/bin
|
||||
ln -sT ${lib.escapeShellArg exe} $out/bin/${lib.escapeShellArg name}
|
||||
''
|
||||
|
|
|
@ -4,8 +4,9 @@
|
|||
# buildGo provides Nix functions to build Go packages in the style of Bazel's
|
||||
# rules_go.
|
||||
|
||||
{ pkgs ? import <nixpkgs> {}
|
||||
, ... }:
|
||||
{ pkgs ? import <nixpkgs> { }
|
||||
, ...
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (builtins)
|
||||
|
@ -40,7 +41,7 @@ let
|
|||
|
||||
xFlags = x_defs: spaceOut (map (k: "-X ${k}=${x_defs."${k}"}") (attrNames x_defs));
|
||||
|
||||
pathToName = p: replaceStrings ["/"] ["_"] (toString p);
|
||||
pathToName = p: replaceStrings [ "/" ] [ "_" ] (toString p);
|
||||
|
||||
# Add an `overrideGo` attribute to a function result that works
|
||||
# similar to `overrideAttrs`, but is used specifically for the
|
||||
|
@ -52,49 +53,50 @@ let
|
|||
# High-level build functions
|
||||
|
||||
# Build a Go program out of the specified files and dependencies.
|
||||
program = { name, srcs, deps ? [], x_defs ? {} }:
|
||||
let uniqueDeps = allDeps (map (d: d.gopkg) deps);
|
||||
in runCommand name {} ''
|
||||
${go}/bin/go tool compile -o ${name}.a -trimpath=$PWD -trimpath=${go} ${includeSources uniqueDeps} ${spaceOut srcs}
|
||||
mkdir -p $out/bin
|
||||
export GOROOT_FINAL=go
|
||||
${go}/bin/go tool link -o $out/bin/${name} -buildid nix ${xFlags x_defs} ${includeLibs uniqueDeps} ${name}.a
|
||||
'';
|
||||
program = { name, srcs, deps ? [ ], x_defs ? { } }:
|
||||
let uniqueDeps = allDeps (map (d: d.gopkg) deps);
|
||||
in runCommand name { } ''
|
||||
${go}/bin/go tool compile -o ${name}.a -trimpath=$PWD -trimpath=${go} ${includeSources uniqueDeps} ${spaceOut srcs}
|
||||
mkdir -p $out/bin
|
||||
export GOROOT_FINAL=go
|
||||
${go}/bin/go tool link -o $out/bin/${name} -buildid nix ${xFlags x_defs} ${includeLibs uniqueDeps} ${name}.a
|
||||
'';
|
||||
|
||||
# Build a Go library assembled out of the specified files.
|
||||
#
|
||||
# This outputs both the sources and compiled binary, as both are
|
||||
# needed when downstream packages depend on it.
|
||||
package = { name, srcs, deps ? [], path ? name, sfiles ? [] }:
|
||||
let
|
||||
uniqueDeps = allDeps (map (d: d.gopkg) deps);
|
||||
package = { name, srcs, deps ? [ ], path ? name, sfiles ? [ ] }:
|
||||
let
|
||||
uniqueDeps = allDeps (map (d: d.gopkg) deps);
|
||||
|
||||
# The build steps below need to be executed conditionally for Go
|
||||
# assembly if the analyser detected any *.s files.
|
||||
#
|
||||
# This is required for several popular packages (e.g. x/sys).
|
||||
ifAsm = do: lib.optionalString (sfiles != []) do;
|
||||
asmBuild = ifAsm ''
|
||||
${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -gensymabis -o ./symabis ${spaceOut sfiles}
|
||||
${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -o ./asm.o ${spaceOut sfiles}
|
||||
'';
|
||||
asmLink = ifAsm "-symabis ./symabis -asmhdr $out/go_asm.h";
|
||||
asmPack = ifAsm ''
|
||||
${go}/bin/go tool pack r $out/${path}.a ./asm.o
|
||||
'';
|
||||
# The build steps below need to be executed conditionally for Go
|
||||
# assembly if the analyser detected any *.s files.
|
||||
#
|
||||
# This is required for several popular packages (e.g. x/sys).
|
||||
ifAsm = do: lib.optionalString (sfiles != [ ]) do;
|
||||
asmBuild = ifAsm ''
|
||||
${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -gensymabis -o ./symabis ${spaceOut sfiles}
|
||||
${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -o ./asm.o ${spaceOut sfiles}
|
||||
'';
|
||||
asmLink = ifAsm "-symabis ./symabis -asmhdr $out/go_asm.h";
|
||||
asmPack = ifAsm ''
|
||||
${go}/bin/go tool pack r $out/${path}.a ./asm.o
|
||||
'';
|
||||
|
||||
gopkg = (runCommand "golib-${name}" {} ''
|
||||
mkdir -p $out/${path}
|
||||
${srcList path (map (s: "${s}") srcs)}
|
||||
${asmBuild}
|
||||
${go}/bin/go tool compile -pack ${asmLink} -o $out/${path}.a -trimpath=$PWD -trimpath=${go} -p ${path} ${includeSources uniqueDeps} ${spaceOut srcs}
|
||||
${asmPack}
|
||||
'') // {
|
||||
inherit gopkg;
|
||||
goDeps = uniqueDeps;
|
||||
goImportPath = path;
|
||||
};
|
||||
in gopkg;
|
||||
gopkg = (runCommand "golib-${name}" { } ''
|
||||
mkdir -p $out/${path}
|
||||
${srcList path (map (s: "${s}") srcs)}
|
||||
${asmBuild}
|
||||
${go}/bin/go tool compile -pack ${asmLink} -o $out/${path}.a -trimpath=$PWD -trimpath=${go} -p ${path} ${includeSources uniqueDeps} ${spaceOut srcs}
|
||||
${asmPack}
|
||||
'') // {
|
||||
inherit gopkg;
|
||||
goDeps = uniqueDeps;
|
||||
goImportPath = path;
|
||||
};
|
||||
in
|
||||
gopkg;
|
||||
|
||||
# Build a tree of Go libraries out of an external Go source
|
||||
# directory that follows the standard Go layout and was not built
|
||||
|
@ -110,10 +112,10 @@ let
|
|||
};
|
||||
|
||||
# Build a Go library out of the specified protobuf definition.
|
||||
proto = { name, proto, path ? name, goPackage ? name, extraDeps ? [] }: (makeOverridable package) {
|
||||
proto = { name, proto, path ? name, goPackage ? name, extraDeps ? [ ] }: (makeOverridable package) {
|
||||
inherit name path;
|
||||
deps = [ protoLibs.goProto.proto.gopkg ] ++ extraDeps;
|
||||
srcs = lib.singleton (runCommand "goproto-${name}.pb.go" {} ''
|
||||
srcs = lib.singleton (runCommand "goproto-${name}.pb.go" { } ''
|
||||
cp ${proto} ${baseNameOf proto}
|
||||
${protobuf}/bin/protoc --plugin=${protoLibs.goProto.protoc-gen-go.gopkg}/bin/protoc-gen-go \
|
||||
--go_out=plugins=grpc,import_path=${baseNameOf path}:. ${baseNameOf proto}
|
||||
|
@ -124,7 +126,8 @@ let
|
|||
# Build a Go library out of the specified gRPC definition.
|
||||
grpc = args: proto (args // { extraDeps = [ protoLibs.goGrpc.gopkg ]; });
|
||||
|
||||
in {
|
||||
in
|
||||
{
|
||||
# Only the high-level builder functions are exposed, but made
|
||||
# overrideable.
|
||||
program = makeOverridable program;
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
# users a quick introduction to how to use buildGo.
|
||||
|
||||
let
|
||||
buildGo = import ../default.nix {};
|
||||
buildGo = import ../default.nix { };
|
||||
|
||||
# Example use of buildGo.package, which creates an importable Go
|
||||
# package from the specified source files.
|
||||
|
@ -29,7 +29,8 @@ let
|
|||
# Example use of buildGo.program, which builds an executable using
|
||||
# the specified name and dependencies (which in turn must have been
|
||||
# created via buildGo.package etc.)
|
||||
in buildGo.program {
|
||||
in
|
||||
buildGo.program {
|
||||
name = "example";
|
||||
|
||||
srcs = [
|
||||
|
|
48
nix/buildGo/external/default.nix
vendored
48
nix/buildGo/external/default.nix
vendored
|
@ -17,12 +17,12 @@ let
|
|||
|
||||
inherit (pkgs) lib runCommand go jq ripgrep;
|
||||
|
||||
pathToName = p: replaceStrings ["/"] ["_"] (toString p);
|
||||
pathToName = p: replaceStrings [ "/" ] [ "_" ] (toString p);
|
||||
|
||||
# Collect all non-vendored dependencies from the Go standard library
|
||||
# into a file that can be used to filter them out when processing
|
||||
# dependencies.
|
||||
stdlibPackages = runCommand "stdlib-pkgs.json" {} ''
|
||||
stdlibPackages = runCommand "stdlib-pkgs.json" { } ''
|
||||
export HOME=$PWD
|
||||
export GOPATH=/dev/null
|
||||
${go}/bin/go list std | \
|
||||
|
@ -45,20 +45,28 @@ let
|
|||
};
|
||||
|
||||
mkset = path: value:
|
||||
if path == [] then { gopkg = value; }
|
||||
if path == [ ] then { gopkg = value; }
|
||||
else { "${head path}" = mkset (tail path) value; };
|
||||
|
||||
last = l: elemAt l ((length l) - 1);
|
||||
|
||||
toPackage = self: src: path: depMap: entry:
|
||||
let
|
||||
localDeps = map (d: lib.attrByPath (d ++ [ "gopkg" ]) (
|
||||
throw "missing local dependency '${lib.concatStringsSep "." d}' in '${path}'"
|
||||
) self) entry.localDeps;
|
||||
localDeps = map
|
||||
(d: lib.attrByPath (d ++ [ "gopkg" ])
|
||||
(
|
||||
throw "missing local dependency '${lib.concatStringsSep "." d}' in '${path}'"
|
||||
)
|
||||
self)
|
||||
entry.localDeps;
|
||||
|
||||
foreignDeps = map (d: lib.attrByPath [ d.path ] (
|
||||
throw "missing foreign dependency '${d.path}' in '${path}, imported at ${d.position}'"
|
||||
) depMap) entry.foreignDeps;
|
||||
foreignDeps = map
|
||||
(d: lib.attrByPath [ d.path ]
|
||||
(
|
||||
throw "missing foreign dependency '${d.path}' in '${path}, imported at ${d.position}'"
|
||||
)
|
||||
depMap)
|
||||
entry.foreignDeps;
|
||||
|
||||
args = {
|
||||
srcs = map (f: src + ("/" + f)) entry.files;
|
||||
|
@ -74,22 +82,28 @@ let
|
|||
binArgs = args // {
|
||||
name = (last ((lib.splitString "/" path) ++ entry.locator));
|
||||
};
|
||||
in if entry.isCommand then (program binArgs) else (package libArgs);
|
||||
in
|
||||
if entry.isCommand then (program binArgs) else (package libArgs);
|
||||
|
||||
in { src, path, deps ? [] }: let
|
||||
in
|
||||
{ src, path, deps ? [ ] }:
|
||||
let
|
||||
# Build a map of dependencies (from their import paths to their
|
||||
# derivation) so that they can be conditionally imported only in
|
||||
# sub-packages that require them.
|
||||
depMap = listToAttrs (map (d: {
|
||||
name = d.goImportPath;
|
||||
value = d;
|
||||
}) (map (d: d.gopkg) deps));
|
||||
depMap = listToAttrs (map
|
||||
(d: {
|
||||
name = d.goImportPath;
|
||||
value = d;
|
||||
})
|
||||
(map (d: d.gopkg) deps));
|
||||
|
||||
name = pathToName path;
|
||||
analysisOutput = runCommand "${name}-structure.json" {} ''
|
||||
analysisOutput = runCommand "${name}-structure.json" { } ''
|
||||
${analyser}/bin/analyser -path ${path} -source ${src} > $out
|
||||
'';
|
||||
analysis = fromJSON (readFile analysisOutput);
|
||||
in lib.fix(self: foldl' lib.recursiveUpdate {} (
|
||||
in
|
||||
lib.fix (self: foldl' lib.recursiveUpdate { } (
|
||||
map (entry: mkset entry.locator (toPackage self src path depMap entry)) analysis
|
||||
))
|
||||
|
|
|
@ -8,7 +8,8 @@
|
|||
|
||||
let
|
||||
inherit (builtins) fetchGit map;
|
||||
in rec {
|
||||
in
|
||||
rec {
|
||||
goProto = external {
|
||||
path = "github.com/golang/protobuf";
|
||||
src = fetchGit {
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
# buildLisp is designed to enforce conventions and do away with the
|
||||
# free-for-all of existing Lisp build systems.
|
||||
|
||||
{ pkgs ? import <nixpkgs> {}, ... }:
|
||||
{ pkgs ? import <nixpkgs> { }, ... }:
|
||||
|
||||
let
|
||||
inherit (builtins) map elemAt match filter;
|
||||
|
@ -70,11 +70,16 @@ let
|
|||
implFilter = impl: xs:
|
||||
let
|
||||
isFilterSet = x: builtins.isAttrs x && !(lib.isDerivation x);
|
||||
in builtins.map (
|
||||
x: if isFilterSet x then x.${impl.name} or x.default else x
|
||||
) (builtins.filter (
|
||||
x: !(isFilterSet x) || x ? ${impl.name} || x ? default
|
||||
) xs);
|
||||
in
|
||||
builtins.map
|
||||
(
|
||||
x: if isFilterSet x then x.${impl.name} or x.default else x
|
||||
)
|
||||
(builtins.filter
|
||||
(
|
||||
x: !(isFilterSet x) || x ? ${impl.name} || x ? default
|
||||
)
|
||||
xs);
|
||||
|
||||
# Generates lisp code which instructs the given lisp implementation to load
|
||||
# all the given dependencies.
|
||||
|
@ -103,17 +108,21 @@ let
|
|||
# 'allDeps' flattens the list of dependencies (and their
|
||||
# dependencies) into one ordered list of unique deps which
|
||||
# all use the given implementation.
|
||||
allDeps = impl: deps: let
|
||||
# The override _should_ propagate itself recursively, as every derivation
|
||||
# would only expose its actually used dependencies. Use implementation
|
||||
# attribute created by withExtras if present, override in all other cases
|
||||
# (mainly bundled).
|
||||
deps' = builtins.map (dep: dep."${impl.name}" or (dep.overrideLisp (_: {
|
||||
implementation = impl;
|
||||
}))) deps;
|
||||
in (lib.toposort dependsOn (lib.unique (
|
||||
lib.flatten (deps' ++ (map (d: d.lispDeps) deps'))
|
||||
))).result;
|
||||
allDeps = impl: deps:
|
||||
let
|
||||
# The override _should_ propagate itself recursively, as every derivation
|
||||
# would only expose its actually used dependencies. Use implementation
|
||||
# attribute created by withExtras if present, override in all other cases
|
||||
# (mainly bundled).
|
||||
deps' = builtins.map
|
||||
(dep: dep."${impl.name}" or (dep.overrideLisp (_: {
|
||||
implementation = impl;
|
||||
})))
|
||||
deps;
|
||||
in
|
||||
(lib.toposort dependsOn (lib.unique (
|
||||
lib.flatten (deps' ++ (map (d: d.lispDeps) deps'))
|
||||
))).result;
|
||||
|
||||
# 'allNative' extracts all native dependencies of a dependency list
|
||||
# to ensure that library load paths are set correctly during all
|
||||
|
@ -138,42 +147,49 @@ let
|
|||
withExtras = f: args:
|
||||
let
|
||||
drv = (makeOverridable f) args;
|
||||
in lib.fix (self:
|
||||
drv.overrideLisp (old:
|
||||
let
|
||||
implementation = old.implementation or defaultImplementation;
|
||||
brokenOn = old.brokenOn or [];
|
||||
targets = lib.subtractLists (brokenOn ++ [ implementation.name ])
|
||||
(builtins.attrNames impls);
|
||||
in {
|
||||
passthru = (old.passthru or {}) // {
|
||||
repl = implementation.lispWith [ self ];
|
||||
in
|
||||
lib.fix (self:
|
||||
drv.overrideLisp
|
||||
(old:
|
||||
let
|
||||
implementation = old.implementation or defaultImplementation;
|
||||
brokenOn = old.brokenOn or [ ];
|
||||
targets = lib.subtractLists (brokenOn ++ [ implementation.name ])
|
||||
(builtins.attrNames impls);
|
||||
in
|
||||
{
|
||||
passthru = (old.passthru or { }) // {
|
||||
repl = implementation.lispWith [ self ];
|
||||
|
||||
# meta is done via passthru to minimize rebuilds caused by overriding
|
||||
meta = (old.passthru.meta or {}) // {
|
||||
inherit targets;
|
||||
};
|
||||
} // builtins.listToAttrs (builtins.map (impl: {
|
||||
inherit (impl) name;
|
||||
value = self.overrideLisp (_: {
|
||||
implementation = impl;
|
||||
});
|
||||
}) (builtins.attrValues impls));
|
||||
}) // {
|
||||
overrideLisp = new: withExtras f (args // new args);
|
||||
});
|
||||
# meta is done via passthru to minimize rebuilds caused by overriding
|
||||
meta = (old.passthru.meta or { }) // {
|
||||
inherit targets;
|
||||
};
|
||||
} // builtins.listToAttrs (builtins.map
|
||||
(impl: {
|
||||
inherit (impl) name;
|
||||
value = self.overrideLisp (_: {
|
||||
implementation = impl;
|
||||
});
|
||||
})
|
||||
(builtins.attrValues impls));
|
||||
}) // {
|
||||
overrideLisp = new: withExtras f (args // new args);
|
||||
});
|
||||
|
||||
# 'testSuite' builds a Common Lisp test suite that loads all of srcs and deps,
|
||||
# and then executes expression to check its result
|
||||
testSuite = { name, expression, srcs, deps ? [], native ? [], implementation }:
|
||||
testSuite = { name, expression, srcs, deps ? [ ], native ? [ ], implementation }:
|
||||
let
|
||||
lispDeps = allDeps implementation (implFilter implementation deps);
|
||||
lispNativeDeps = allNative native lispDeps;
|
||||
filteredSrcs = implFilter implementation srcs;
|
||||
in runCommandNoCC name {
|
||||
LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
|
||||
LANG = "C.UTF-8";
|
||||
} ''
|
||||
in
|
||||
runCommandNoCC name
|
||||
{
|
||||
LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
|
||||
LANG = "C.UTF-8";
|
||||
} ''
|
||||
echo "Running test suite ${name}"
|
||||
|
||||
${implementation.runScript} ${
|
||||
|
@ -452,15 +468,16 @@ let
|
|||
} $@
|
||||
'';
|
||||
|
||||
bundled = name: runCommandNoCC "${name}-cllib" {
|
||||
passthru = {
|
||||
lispName = name;
|
||||
lispNativeDeps = [];
|
||||
lispDeps = [];
|
||||
lispBinary = false;
|
||||
repl = impls.ecl.lispWith [ (impls.ecl.bundled name) ];
|
||||
};
|
||||
} ''
|
||||
bundled = name: runCommandNoCC "${name}-cllib"
|
||||
{
|
||||
passthru = {
|
||||
lispName = name;
|
||||
lispNativeDeps = [ ];
|
||||
lispDeps = [ ];
|
||||
lispBinary = false;
|
||||
repl = impls.ecl.lispWith [ (impls.ecl.bundled name) ];
|
||||
};
|
||||
} ''
|
||||
mkdir -p "$out"
|
||||
ln -s "${ecl-static}/lib/ecl-${ecl-static.version}/${name}.${impls.ecl.faslExt}" -t "$out"
|
||||
ln -s "${ecl-static}/lib/ecl-${ecl-static.version}/lib${name}.a" "$out/${name}.a"
|
||||
|
@ -489,7 +506,8 @@ let
|
|||
|
||||
# See https://ccl.clozure.com/docs/ccl.html#building-definitions
|
||||
faslExt =
|
||||
/**/ if targetPlatform.isPowerPC && targetPlatform.is32bit then "pfsl"
|
||||
/**/
|
||||
if targetPlatform.isPowerPC && targetPlatform.is32bit then "pfsl"
|
||||
else if targetPlatform.isPowerPC && targetPlatform.is64bit then "p64fsl"
|
||||
else if targetPlatform.isx86_64 && targetPlatform.isLinux then "lx64fsl"
|
||||
else if targetPlatform.isx86_32 && targetPlatform.isLinux then "lx32fsl"
|
||||
|
@ -572,7 +590,7 @@ let
|
|||
lib.optionalString (deps != [])
|
||||
"--load ${writeText "load.lisp" (impls.ccl.genLoadLisp lispDeps)}"
|
||||
} "$@"
|
||||
'';
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -586,37 +604,42 @@ let
|
|||
library =
|
||||
{ name
|
||||
, implementation ? defaultImplementation
|
||||
, brokenOn ? [] # TODO(sterni): make this a warning
|
||||
, brokenOn ? [ ] # TODO(sterni): make this a warning
|
||||
, srcs
|
||||
, deps ? []
|
||||
, native ? []
|
||||
, deps ? [ ]
|
||||
, native ? [ ]
|
||||
, tests ? null
|
||||
, passthru ? {}
|
||||
, passthru ? { }
|
||||
}:
|
||||
let
|
||||
filteredDeps = implFilter implementation deps;
|
||||
filteredSrcs = implFilter implementation srcs;
|
||||
lispNativeDeps = (allNative native filteredDeps);
|
||||
lispDeps = allDeps implementation filteredDeps;
|
||||
testDrv = if ! isNull tests
|
||||
then testSuite {
|
||||
name = tests.name or "${name}-test";
|
||||
srcs = filteredSrcs ++ (tests.srcs or []);
|
||||
deps = filteredDeps ++ (tests.deps or []);
|
||||
expression = tests.expression;
|
||||
inherit implementation;
|
||||
}
|
||||
testDrv =
|
||||
if ! isNull tests
|
||||
then
|
||||
testSuite
|
||||
{
|
||||
name = tests.name or "${name}-test";
|
||||
srcs = filteredSrcs ++ (tests.srcs or [ ]);
|
||||
deps = filteredDeps ++ (tests.deps or [ ]);
|
||||
expression = tests.expression;
|
||||
inherit implementation;
|
||||
}
|
||||
else null;
|
||||
in lib.fix (self: runCommandNoCC "${name}-cllib" {
|
||||
LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
|
||||
LANG = "C.UTF-8";
|
||||
passthru = passthru // {
|
||||
inherit lispNativeDeps lispDeps;
|
||||
lispName = name;
|
||||
lispBinary = false;
|
||||
tests = testDrv;
|
||||
};
|
||||
} ''
|
||||
in
|
||||
lib.fix (self: runCommandNoCC "${name}-cllib"
|
||||
{
|
||||
LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
|
||||
LANG = "C.UTF-8";
|
||||
passthru = passthru // {
|
||||
inherit lispNativeDeps lispDeps;
|
||||
lispName = name;
|
||||
lispBinary = false;
|
||||
tests = testDrv;
|
||||
};
|
||||
} ''
|
||||
${if ! isNull testDrv
|
||||
then "echo 'Test ${testDrv} succeeded'"
|
||||
else "echo 'No tests run'"}
|
||||
|
@ -637,13 +660,13 @@ let
|
|||
program =
|
||||
{ name
|
||||
, implementation ? defaultImplementation
|
||||
, brokenOn ? [] # TODO(sterni): make this a warning
|
||||
, brokenOn ? [ ] # TODO(sterni): make this a warning
|
||||
, main ? "${name}:main"
|
||||
, srcs
|
||||
, deps ? []
|
||||
, native ? []
|
||||
, deps ? [ ]
|
||||
, native ? [ ]
|
||||
, tests ? null
|
||||
, passthru ? {}
|
||||
, passthru ? { }
|
||||
}:
|
||||
let
|
||||
filteredSrcs = implFilter implementation srcs;
|
||||
|
@ -656,45 +679,53 @@ let
|
|||
deps = lispDeps;
|
||||
srcs = filteredSrcs;
|
||||
};
|
||||
testDrv = if ! isNull tests
|
||||
then testSuite {
|
||||
name = tests.name or "${name}-test";
|
||||
srcs =
|
||||
( # testSuite does run implFilter as well
|
||||
filteredSrcs ++ (tests.srcs or []));
|
||||
deps = filteredDeps ++ (tests.deps or []);
|
||||
expression = tests.expression;
|
||||
inherit implementation;
|
||||
}
|
||||
testDrv =
|
||||
if ! isNull tests
|
||||
then
|
||||
testSuite
|
||||
{
|
||||
name = tests.name or "${name}-test";
|
||||
srcs =
|
||||
(
|
||||
# testSuite does run implFilter as well
|
||||
filteredSrcs ++ (tests.srcs or [ ])
|
||||
);
|
||||
deps = filteredDeps ++ (tests.deps or [ ]);
|
||||
expression = tests.expression;
|
||||
inherit implementation;
|
||||
}
|
||||
else null;
|
||||
in lib.fix (self: runCommandNoCC "${name}" {
|
||||
nativeBuildInputs = [ makeWrapper ];
|
||||
LD_LIBRARY_PATH = libPath;
|
||||
LANG = "C.UTF-8";
|
||||
passthru = passthru // {
|
||||
lispName = name;
|
||||
lispDeps = [ selfLib ];
|
||||
lispNativeDeps = native;
|
||||
lispBinary = true;
|
||||
tests = testDrv;
|
||||
};
|
||||
} (''
|
||||
${if ! isNull testDrv
|
||||
then "echo 'Test ${testDrv} succeeded'"
|
||||
else ""}
|
||||
mkdir -p $out/bin
|
||||
|
||||
${implementation.runScript} ${
|
||||
implementation.genDumpLisp {
|
||||
inherit name main;
|
||||
deps = ([ selfLib ] ++ lispDeps);
|
||||
}
|
||||
in
|
||||
lib.fix (self: runCommandNoCC "${name}"
|
||||
{
|
||||
nativeBuildInputs = [ makeWrapper ];
|
||||
LD_LIBRARY_PATH = libPath;
|
||||
LANG = "C.UTF-8";
|
||||
passthru = passthru // {
|
||||
lispName = name;
|
||||
lispDeps = [ selfLib ];
|
||||
lispNativeDeps = native;
|
||||
lispBinary = true;
|
||||
tests = testDrv;
|
||||
};
|
||||
}
|
||||
'' + lib.optionalString implementation.wrapProgram ''
|
||||
wrapProgram $out/bin/${name} \
|
||||
--prefix LD_LIBRARY_PATH : "${libPath}" \
|
||||
--add-flags "\$NIX_BUILDLISP_LISP_ARGS --"
|
||||
''));
|
||||
(''
|
||||
${if ! isNull testDrv
|
||||
then "echo 'Test ${testDrv} succeeded'"
|
||||
else ""}
|
||||
mkdir -p $out/bin
|
||||
|
||||
${implementation.runScript} ${
|
||||
implementation.genDumpLisp {
|
||||
inherit name main;
|
||||
deps = ([ selfLib ] ++ lispDeps);
|
||||
}
|
||||
}
|
||||
'' + lib.optionalString implementation.wrapProgram ''
|
||||
wrapProgram $out/bin/${name} \
|
||||
--prefix LD_LIBRARY_PATH : "${libPath}" \
|
||||
--add-flags "\$NIX_BUILDLISP_LISP_ARGS --"
|
||||
''));
|
||||
|
||||
# 'bundled' creates a "library" which makes a built-in package available,
|
||||
# such as any of SBCL's sb-* packages or ASDF. By default this is done
|
||||
|
@ -714,11 +745,13 @@ let
|
|||
}:
|
||||
implementation.bundled or (defaultBundled implementation) name;
|
||||
|
||||
in (makeOverridable bundled') {
|
||||
in
|
||||
(makeOverridable bundled') {
|
||||
inherit name;
|
||||
};
|
||||
|
||||
in {
|
||||
in
|
||||
{
|
||||
library = withExtras library;
|
||||
program = withExtras program;
|
||||
inherit bundled;
|
||||
|
|
|
@ -14,15 +14,16 @@ let
|
|||
];
|
||||
};
|
||||
|
||||
# Example Lisp program.
|
||||
#
|
||||
# This builds & writes an executable for a program using the library
|
||||
# above to disk.
|
||||
#
|
||||
# By default, buildLisp.program expects the entry point to be
|
||||
# `$name:main`. This can be overridden by configuring the `main`
|
||||
# attribute.
|
||||
in buildLisp.program {
|
||||
# Example Lisp program.
|
||||
#
|
||||
# This builds & writes an executable for a program using the library
|
||||
# above to disk.
|
||||
#
|
||||
# By default, buildLisp.program expects the entry point to be
|
||||
# `$name:main`. This can be overridden by configuring the `main`
|
||||
# attribute.
|
||||
in
|
||||
buildLisp.program {
|
||||
name = "example";
|
||||
deps = [ libExample ];
|
||||
|
||||
|
|
|
@ -13,9 +13,9 @@ let
|
|||
;
|
||||
|
||||
bins = getBins mandoc [ "mandoc" ]
|
||||
// getBins gzip [ "gzip" ]
|
||||
// getBins coreutils [ "mkdir" "ln" "cp" ]
|
||||
;
|
||||
// getBins gzip [ "gzip" ]
|
||||
// getBins coreutils [ "mkdir" "ln" "cp" ]
|
||||
;
|
||||
|
||||
defaultGzip = true;
|
||||
|
||||
|
@ -35,41 +35,68 @@ let
|
|||
}:
|
||||
{ content
|
||||
, ...
|
||||
}@page: let
|
||||
}@page:
|
||||
let
|
||||
source = builtins.toFile (basename false page) content;
|
||||
in runExecline (basename gzip page) {} ([
|
||||
(if requireLint then "if" else "foreground") [
|
||||
bins.mandoc "-mdoc" "-T" "lint" source
|
||||
in
|
||||
runExecline (basename gzip page) { } ([
|
||||
(if requireLint then "if" else "foreground")
|
||||
[
|
||||
bins.mandoc
|
||||
"-mdoc"
|
||||
"-T"
|
||||
"lint"
|
||||
source
|
||||
]
|
||||
"importas" "out" "out"
|
||||
"importas"
|
||||
"out"
|
||||
"out"
|
||||
] ++ (if gzip then [
|
||||
"redirfd" "-w" "1" "$out"
|
||||
bins.gzip "-c" source
|
||||
"redirfd"
|
||||
"-w"
|
||||
"1"
|
||||
"$out"
|
||||
bins.gzip
|
||||
"-c"
|
||||
source
|
||||
] else [
|
||||
bins.cp "--reflink=auto" source "$out"
|
||||
bins.cp
|
||||
"--reflink=auto"
|
||||
source
|
||||
"$out"
|
||||
]));
|
||||
|
||||
buildManPages =
|
||||
name:
|
||||
{ derivationArgs ? {}
|
||||
{ derivationArgs ? { }
|
||||
, gzip ? defaultGzip
|
||||
, ...
|
||||
}@args:
|
||||
pages:
|
||||
runExecline "${name}-man-pages" {
|
||||
inherit derivationArgs;
|
||||
} ([
|
||||
"importas" "out" "out"
|
||||
] ++ lib.concatMap ({ name, section, content }@page: [
|
||||
"if" [ bins.mkdir "-p" (manDir page) ]
|
||||
"if" [
|
||||
bins.ln "-s"
|
||||
(buildManPage args page)
|
||||
(target gzip page)
|
||||
]
|
||||
]) pages);
|
||||
runExecline "${name}-man-pages"
|
||||
{
|
||||
inherit derivationArgs;
|
||||
}
|
||||
([
|
||||
"importas"
|
||||
"out"
|
||||
"out"
|
||||
] ++ lib.concatMap
|
||||
({ name, section, content }@page: [
|
||||
"if"
|
||||
[ bins.mkdir "-p" (manDir page) ]
|
||||
"if"
|
||||
[
|
||||
bins.ln
|
||||
"-s"
|
||||
(buildManPage args page)
|
||||
(target gzip page)
|
||||
]
|
||||
])
|
||||
pages);
|
||||
|
||||
in {
|
||||
in
|
||||
{
|
||||
__functor = _: buildManPages;
|
||||
|
||||
single = buildManPage;
|
||||
|
|
|
@ -29,7 +29,8 @@ let
|
|||
unsafeDiscardStringContext;
|
||||
|
||||
inherit (pkgs) lib runCommandNoCC writeText;
|
||||
in rec {
|
||||
in
|
||||
rec {
|
||||
# Creates a Nix expression that yields the target at the specified
|
||||
# location in the repository.
|
||||
#
|
||||
|
@ -42,14 +43,15 @@ in rec {
|
|||
descend = expr: attr: "builtins.getAttr \"${attr}\" (${expr})";
|
||||
targetExpr = foldl' descend "import ./. {}" target.__readTree;
|
||||
subtargetExpr = descend targetExpr target.__subtarget;
|
||||
in if target ? __subtarget then subtargetExpr else targetExpr;
|
||||
in
|
||||
if target ? __subtarget then subtargetExpr else targetExpr;
|
||||
|
||||
# Create a pipeline label from the target's tree location.
|
||||
mkLabel = target:
|
||||
let label = concatStringsSep "/" target.__readTree;
|
||||
in if target ? __subtarget
|
||||
then "${label}:${target.__subtarget}"
|
||||
else label;
|
||||
then "${label}:${target.__subtarget}"
|
||||
else label;
|
||||
|
||||
# Determine whether to skip a target if it has not diverged from the
|
||||
# HEAD branch.
|
||||
|
@ -74,33 +76,36 @@ in rec {
|
|||
|
||||
# Create a pipeline step from a single target.
|
||||
mkStep = headBranch: parentTargetMap: target:
|
||||
let
|
||||
label = mkLabel target;
|
||||
drvPath = unsafeDiscardStringContext target.drvPath;
|
||||
shouldSkip' = shouldSkip parentTargetMap;
|
||||
in {
|
||||
label = ":nix: " + label;
|
||||
key = hashString "sha1" label;
|
||||
skip = shouldSkip' label drvPath;
|
||||
command = mkBuildCommand target drvPath;
|
||||
env.READTREE_TARGET = label;
|
||||
let
|
||||
label = mkLabel target;
|
||||
drvPath = unsafeDiscardStringContext target.drvPath;
|
||||
shouldSkip' = shouldSkip parentTargetMap;
|
||||
in
|
||||
{
|
||||
label = ":nix: " + label;
|
||||
key = hashString "sha1" label;
|
||||
skip = shouldSkip' label drvPath;
|
||||
command = mkBuildCommand target drvPath;
|
||||
env.READTREE_TARGET = label;
|
||||
|
||||
# Add a dependency on the initial static pipeline step which
|
||||
# always runs. This allows build steps uploaded in batches to
|
||||
# start running before all batches have been uploaded.
|
||||
depends_on = ":init:";
|
||||
};
|
||||
# Add a dependency on the initial static pipeline step which
|
||||
# always runs. This allows build steps uploaded in batches to
|
||||
# start running before all batches have been uploaded.
|
||||
depends_on = ":init:";
|
||||
};
|
||||
|
||||
# Helper function to inelegantly divide a list into chunks of at
|
||||
# most n elements.
|
||||
#
|
||||
# This works by assigning each element a chunk ID based on its
|
||||
# index, and then grouping all elements by their chunk ID.
|
||||
chunksOf = n: list: let
|
||||
chunkId = idx: toString (idx / n + 1);
|
||||
assigned = lib.imap1 (idx: value: { inherit value ; chunk = chunkId idx; }) list;
|
||||
unchunk = mapAttrs (_: elements: map (e: e.value) elements);
|
||||
in unchunk (lib.groupBy (e: e.chunk) assigned);
|
||||
chunksOf = n: list:
|
||||
let
|
||||
chunkId = idx: toString (idx / n + 1);
|
||||
assigned = lib.imap1 (idx: value: { inherit value; chunk = chunkId idx; }) list;
|
||||
unchunk = mapAttrs (_: elements: map (e: e.value) elements);
|
||||
in
|
||||
unchunk (lib.groupBy (e: e.chunk) assigned);
|
||||
|
||||
# Define a build pipeline chunk as a JSON file, using the pipeline
|
||||
# format documented on
|
||||
|
@ -120,104 +125,112 @@ in rec {
|
|||
attrValues (mapAttrs (makePipelineChunk name) (chunksOf 192 steps));
|
||||
|
||||
# Create a pipeline structure for the given targets.
|
||||
mkPipeline = {
|
||||
# HEAD branch of the repository on which release steps, GC
|
||||
# anchoring and other "mainline only" steps should run.
|
||||
headBranch,
|
||||
mkPipeline =
|
||||
{
|
||||
# HEAD branch of the repository on which release steps, GC
|
||||
# anchoring and other "mainline only" steps should run.
|
||||
headBranch
|
||||
, # List of derivations as read by readTree (in most cases just the
|
||||
# output of readTree.gather) that should be built in Buildkite.
|
||||
#
|
||||
# These are scheduled as the first build steps and run as fast as
|
||||
# possible, in order, without any concurrency restrictions.
|
||||
drvTargets
|
||||
, # Derivation map of a parent commit. Only targets which no longer
|
||||
# correspond to the content of this map will be built. Passing an
|
||||
# empty map will always build all targets.
|
||||
parentTargetMap ? { }
|
||||
, # A list of plain Buildkite step structures to run alongside the
|
||||
# build for all drvTargets, but before proceeding with any
|
||||
# post-build actions such as status reporting.
|
||||
#
|
||||
# Can be used for things like code formatting checks.
|
||||
additionalSteps ? [ ]
|
||||
, # A list of plain Buildkite step structures to run after all
|
||||
# previous steps succeeded.
|
||||
#
|
||||
# Can be used for status reporting steps and the like.
|
||||
postBuildSteps ? [ ]
|
||||
}:
|
||||
let
|
||||
# Convert a target into all of its build and post-build steps,
|
||||
# treated separately as they need to be in different chunks.
|
||||
targetToSteps = target:
|
||||
let
|
||||
step = mkStep headBranch parentTargetMap target;
|
||||
|
||||
# List of derivations as read by readTree (in most cases just the
|
||||
# output of readTree.gather) that should be built in Buildkite.
|
||||
#
|
||||
# These are scheduled as the first build steps and run as fast as
|
||||
# possible, in order, without any concurrency restrictions.
|
||||
drvTargets,
|
||||
# Split build/post-build steps
|
||||
splitExtraSteps = partition ({ postStep, ... }: postStep)
|
||||
(attrValues (mapAttrs
|
||||
(name: value: {
|
||||
inherit name value;
|
||||
postStep = (value ? prompt) || (value.postBuild or false);
|
||||
})
|
||||
(target.meta.ci.extraSteps or { })));
|
||||
|
||||
# Derivation map of a parent commit. Only targets which no longer
|
||||
# correspond to the content of this map will be built. Passing an
|
||||
# empty map will always build all targets.
|
||||
parentTargetMap ? {},
|
||||
mkExtraStep' = { name, value, ... }: mkExtraStep step name value;
|
||||
extraBuildSteps = map mkExtraStep' splitExtraSteps.wrong; # 'wrong' -> no prompt
|
||||
extraPostSteps = map mkExtraStep' splitExtraSteps.right; # 'right' -> has prompt
|
||||
in
|
||||
{
|
||||
buildSteps = [ step ] ++ extraBuildSteps;
|
||||
postSteps = extraPostSteps;
|
||||
};
|
||||
|
||||
# A list of plain Buildkite step structures to run alongside the
|
||||
# build for all drvTargets, but before proceeding with any
|
||||
# post-build actions such as status reporting.
|
||||
#
|
||||
# Can be used for things like code formatting checks.
|
||||
additionalSteps ? [],
|
||||
# Combine all target steps into separate build and post-build step lists.
|
||||
steps = foldl'
|
||||
(acc: t: {
|
||||
buildSteps = acc.buildSteps ++ t.buildSteps;
|
||||
postSteps = acc.postSteps ++ t.postSteps;
|
||||
})
|
||||
{ buildSteps = [ ]; postSteps = [ ]; }
|
||||
(map targetToSteps drvTargets);
|
||||
|
||||
# A list of plain Buildkite step structures to run after all
|
||||
# previous steps succeeded.
|
||||
#
|
||||
# Can be used for status reporting steps and the like.
|
||||
postBuildSteps ? []
|
||||
}: let
|
||||
# Convert a target into all of its build and post-build steps,
|
||||
# treated separately as they need to be in different chunks.
|
||||
targetToSteps = target: let
|
||||
step = mkStep headBranch parentTargetMap target;
|
||||
buildSteps =
|
||||
# Add build steps for each derivation target and their extra
|
||||
# steps.
|
||||
steps.buildSteps
|
||||
|
||||
# Split build/post-build steps
|
||||
splitExtraSteps = partition ({ postStep, ... }: postStep)
|
||||
(attrValues (mapAttrs (name: value: {
|
||||
inherit name value;
|
||||
postStep = (value ? prompt) || (value.postBuild or false);
|
||||
}) (target.meta.ci.extraSteps or {})));
|
||||
# Add additional steps (if set).
|
||||
++ additionalSteps;
|
||||
|
||||
mkExtraStep' = { name, value, ... }: mkExtraStep step name value;
|
||||
extraBuildSteps = map mkExtraStep' splitExtraSteps.wrong; # 'wrong' -> no prompt
|
||||
extraPostSteps = map mkExtraStep' splitExtraSteps.right; # 'right' -> has prompt
|
||||
in {
|
||||
buildSteps = [ step ] ++ extraBuildSteps;
|
||||
postSteps = extraPostSteps;
|
||||
};
|
||||
postSteps =
|
||||
# Add post-build steps for each derivation target.
|
||||
steps.postSteps
|
||||
|
||||
# Combine all target steps into separate build and post-build step lists.
|
||||
steps = foldl' (acc: t: {
|
||||
buildSteps = acc.buildSteps ++ t.buildSteps;
|
||||
postSteps = acc.postSteps ++ t.postSteps;
|
||||
}) { buildSteps = []; postSteps = []; } (map targetToSteps drvTargets);
|
||||
# Add any globally defined post-build steps.
|
||||
++ postBuildSteps;
|
||||
|
||||
buildSteps =
|
||||
# Add build steps for each derivation target and their extra
|
||||
# steps.
|
||||
steps.buildSteps
|
||||
|
||||
# Add additional steps (if set).
|
||||
++ additionalSteps;
|
||||
|
||||
postSteps =
|
||||
# Add post-build steps for each derivation target.
|
||||
steps.postSteps
|
||||
|
||||
# Add any globally defined post-build steps.
|
||||
++ postBuildSteps;
|
||||
|
||||
buildChunks = pipelineChunks "build" buildSteps;
|
||||
postBuildChunks = pipelineChunks "post" postSteps;
|
||||
chunks = buildChunks ++ postBuildChunks;
|
||||
in runCommandNoCC "buildkite-pipeline" {} ''
|
||||
mkdir $out
|
||||
echo "Generated ${toString (length chunks)} pipeline chunks"
|
||||
${
|
||||
lib.concatMapStringsSep "\n"
|
||||
(chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks
|
||||
}
|
||||
'';
|
||||
buildChunks = pipelineChunks "build" buildSteps;
|
||||
postBuildChunks = pipelineChunks "post" postSteps;
|
||||
chunks = buildChunks ++ postBuildChunks;
|
||||
in
|
||||
runCommandNoCC "buildkite-pipeline" { } ''
|
||||
mkdir $out
|
||||
echo "Generated ${toString (length chunks)} pipeline chunks"
|
||||
${
|
||||
lib.concatMapStringsSep "\n"
|
||||
(chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks
|
||||
}
|
||||
'';
|
||||
|
||||
# Create a drvmap structure for the given targets, containing the
|
||||
# mapping of all target paths to their derivations. The mapping can
|
||||
# be persisted for future use.
|
||||
mkDrvmap = drvTargets: writeText "drvmap.json" (toJSON (listToAttrs (map (target: {
|
||||
name = mkLabel target;
|
||||
value = {
|
||||
drvPath = unsafeDiscardStringContext target.drvPath;
|
||||
mkDrvmap = drvTargets: writeText "drvmap.json" (toJSON (listToAttrs (map
|
||||
(target: {
|
||||
name = mkLabel target;
|
||||
value = {
|
||||
drvPath = unsafeDiscardStringContext target.drvPath;
|
||||
|
||||
# Include the attrPath in the output to reconstruct the drv
|
||||
# without parsing the human-readable label.
|
||||
attrPath = target.__readTree ++ lib.optionals (target ? __subtarget) [
|
||||
target.__subtarget
|
||||
];
|
||||
};
|
||||
}) drvTargets)));
|
||||
# Include the attrPath in the output to reconstruct the drv
|
||||
# without parsing the human-readable label.
|
||||
attrPath = target.__readTree ++ lib.optionals (target ? __subtarget) [
|
||||
target.__subtarget
|
||||
];
|
||||
};
|
||||
})
|
||||
drvTargets)));
|
||||
|
||||
# Implementation of extra step logic.
|
||||
#
|
||||
|
@ -278,34 +291,37 @@ in rec {
|
|||
|
||||
# Create the Buildkite configuration for an extra step, optionally
|
||||
# wrapping it in a gate group.
|
||||
mkExtraStep = parent: key: {
|
||||
command,
|
||||
label ? key,
|
||||
prompt ? false,
|
||||
needsOutput ? false,
|
||||
branches ? null,
|
||||
alwaysRun ? false,
|
||||
postBuild ? false
|
||||
}@cfg: let
|
||||
parentLabel = parent.env.READTREE_TARGET;
|
||||
mkExtraStep = parent: key: { command
|
||||
, label ? key
|
||||
, prompt ? false
|
||||
, needsOutput ? false
|
||||
, branches ? null
|
||||
, alwaysRun ? false
|
||||
, postBuild ? false
|
||||
}@cfg:
|
||||
let
|
||||
parentLabel = parent.env.READTREE_TARGET;
|
||||
|
||||
step = {
|
||||
label = ":gear: ${label} (from ${parentLabel})";
|
||||
skip = if alwaysRun then false else parent.skip or false;
|
||||
depends_on = lib.optional (!alwaysRun && !needsOutput) parent.key;
|
||||
branches = if branches != null then lib.concatStringsSep " " branches else null;
|
||||
step = {
|
||||
label = ":gear: ${label} (from ${parentLabel})";
|
||||
skip = if alwaysRun then false else parent.skip or false;
|
||||
depends_on = lib.optional (!alwaysRun && !needsOutput) parent.key;
|
||||
branches = if branches != null then lib.concatStringsSep " " branches else null;
|
||||
|
||||
command = pkgs.writeShellScript "${key}-script" ''
|
||||
set -ueo pipefail
|
||||
${lib.optionalString needsOutput "echo '~~~ Preparing build output of ${parentLabel}'"}
|
||||
${lib.optionalString needsOutput parent.command}
|
||||
echo '+++ Running extra step command'
|
||||
exec ${command}
|
||||
'';
|
||||
};
|
||||
in if (isString prompt)
|
||||
then mkGatedStep {
|
||||
inherit step label parent prompt;
|
||||
}
|
||||
command = pkgs.writeShellScript "${key}-script" ''
|
||||
set -ueo pipefail
|
||||
${lib.optionalString needsOutput "echo '~~~ Preparing build output of ${parentLabel}'"}
|
||||
${lib.optionalString needsOutput parent.command}
|
||||
echo '+++ Running extra step command'
|
||||
exec ${command}
|
||||
'';
|
||||
};
|
||||
in
|
||||
if (isString prompt)
|
||||
then
|
||||
mkGatedStep
|
||||
{
|
||||
inherit step label parent prompt;
|
||||
}
|
||||
else step;
|
||||
}
|
||||
|
|
|
@ -17,9 +17,10 @@ let
|
|||
drvSeqL = defun [ (list drv) drv drv ]
|
||||
(drvDeps: drvOut:
|
||||
let
|
||||
drvOutOutputs = drvOut.outputs or ["out"];
|
||||
drvOutOutputs = drvOut.outputs or [ "out" ];
|
||||
in
|
||||
pkgs.runCommandLocal drvOut.name {
|
||||
pkgs.runCommandLocal drvOut.name
|
||||
{
|
||||
# we inherit all attributes in order to replicate
|
||||
# the original derivation as much as possible
|
||||
outputs = drvOutOutputs;
|
||||
|
@ -29,15 +30,18 @@ let
|
|||
}
|
||||
# the outputs of the original derivation are replicated
|
||||
# by creating a symlink to the old output path
|
||||
(lib.concatMapStrings (output: ''
|
||||
target=${lib.escapeShellArg drvOut.${output}}
|
||||
# if the target is already a symlink, follow it until it’s not;
|
||||
# this is done to prevent too many dereferences
|
||||
target=$(readlink -e "$target")
|
||||
# link to the output
|
||||
ln -s "$target" "${"$"}${output}"
|
||||
'') drvOutOutputs));
|
||||
(lib.concatMapStrings
|
||||
(output: ''
|
||||
target=${lib.escapeShellArg drvOut.${output}}
|
||||
# if the target is already a symlink, follow it until it’s not;
|
||||
# this is done to prevent too many dereferences
|
||||
target=$(readlink -e "$target")
|
||||
# link to the output
|
||||
ln -s "$target" "${"$"}${output}"
|
||||
'')
|
||||
drvOutOutputs));
|
||||
|
||||
in {
|
||||
in
|
||||
{
|
||||
__functor = _: drvSeqL;
|
||||
}
|
||||
|
|
|
@ -14,7 +14,8 @@ let
|
|||
inherit (depot.nix.runTestsuite) runTestsuite it assertEq;
|
||||
};
|
||||
|
||||
in {
|
||||
in
|
||||
{
|
||||
__functor = _: emptyDerivation;
|
||||
inherit tests;
|
||||
}
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
|
||||
let
|
||||
bins = getBins pkgs.s6-portable-utils [ "s6-touch" ]
|
||||
// getBins pkgs.execline [ "importas" "exec" ];
|
||||
// getBins pkgs.execline [ "importas" "exec" ];
|
||||
|
||||
emptiness = {
|
||||
name = "empty-derivation";
|
||||
|
@ -21,12 +21,16 @@ let
|
|||
|
||||
builder = bins.exec;
|
||||
args = [
|
||||
bins.importas "out" "out"
|
||||
bins.s6-touch "$out"
|
||||
bins.importas
|
||||
"out"
|
||||
"out"
|
||||
bins.s6-touch
|
||||
"$out"
|
||||
];
|
||||
};
|
||||
|
||||
in (derivation emptiness) // {
|
||||
in
|
||||
(derivation emptiness) // {
|
||||
# This allows us to call the empty derivation
|
||||
# like a function and override fields/add new fields.
|
||||
__functor = _: overrides:
|
||||
|
|
|
@ -10,10 +10,17 @@ let
|
|||
];
|
||||
|
||||
fooOut = emptyDerivation {
|
||||
builder = writeExecline "foo-builder" {} [
|
||||
"importas" "out" "out"
|
||||
"redirfd" "-w" "1" "$out"
|
||||
bins.s6-echo "-n" "foo"
|
||||
builder = writeExecline "foo-builder" { } [
|
||||
"importas"
|
||||
"out"
|
||||
"out"
|
||||
"redirfd"
|
||||
"-w"
|
||||
"1"
|
||||
"$out"
|
||||
bins.s6-echo
|
||||
"-n"
|
||||
"foo"
|
||||
];
|
||||
};
|
||||
|
||||
|
@ -26,7 +33,8 @@ let
|
|||
"bar")
|
||||
];
|
||||
|
||||
in runTestsuite "emptyDerivation" [
|
||||
in
|
||||
runTestsuite "emptyDerivation" [
|
||||
empty
|
||||
overrideBuilder
|
||||
]
|
||||
|
|
|
@ -16,14 +16,17 @@ let
|
|||
# escapeExecline [ "if" [ "somecommand" ] "true" ]
|
||||
# == ''"if" { "somecommand" } "true"''
|
||||
escapeExecline = execlineList: lib.concatStringsSep " "
|
||||
(let
|
||||
go = arg:
|
||||
if builtins.isString arg then [(escapeExeclineArg arg)]
|
||||
else if builtins.isPath arg then [(escapeExeclineArg "${arg}")]
|
||||
else if lib.isDerivation arg then [(escapeExeclineArg arg)]
|
||||
else if builtins.isList arg then [ "{" ] ++ builtins.concatMap go arg ++ [ "}" ]
|
||||
else abort "escapeExecline can only hande nested lists of strings, was ${lib.generators.toPretty {} arg}";
|
||||
in builtins.concatMap go execlineList);
|
||||
(
|
||||
let
|
||||
go = arg:
|
||||
if builtins.isString arg then [ (escapeExeclineArg arg) ]
|
||||
else if builtins.isPath arg then [ (escapeExeclineArg "${arg}") ]
|
||||
else if lib.isDerivation arg then [ (escapeExeclineArg arg) ]
|
||||
else if builtins.isList arg then [ "{" ] ++ builtins.concatMap go arg ++ [ "}" ]
|
||||
else abort "escapeExecline can only hande nested lists of strings, was ${lib.generators.toPretty {} arg}";
|
||||
in
|
||||
builtins.concatMap go execlineList
|
||||
);
|
||||
|
||||
in
|
||||
escapeExecline
|
||||
|
|
|
@ -26,14 +26,16 @@
|
|||
|
||||
let
|
||||
getBins = drv: xs:
|
||||
let f = x:
|
||||
# TODO(Profpatsch): typecheck
|
||||
let x' = if builtins.isString x then { use = x; as = x; } else x;
|
||||
in {
|
||||
name = x'.as;
|
||||
value = "${lib.getBin drv}/bin/${x'.use}";
|
||||
};
|
||||
in builtins.listToAttrs (builtins.map f xs);
|
||||
let
|
||||
f = x:
|
||||
# TODO(Profpatsch): typecheck
|
||||
let x' = if builtins.isString x then { use = x; as = x; } else x;
|
||||
in {
|
||||
name = x'.as;
|
||||
value = "${lib.getBin drv}/bin/${x'.use}";
|
||||
};
|
||||
in
|
||||
builtins.listToAttrs (builtins.map f xs);
|
||||
|
||||
|
||||
tests = import ./tests.nix {
|
||||
|
@ -42,7 +44,8 @@ let
|
|||
inherit (depot.nix.runTestsuite) assertEq it runTestsuite;
|
||||
};
|
||||
|
||||
in {
|
||||
in
|
||||
{
|
||||
__functor = _: getBins;
|
||||
inherit tests;
|
||||
}
|
||||
|
|
|
@ -5,11 +5,11 @@ let
|
|||
drv2 = writeScriptBin "goodbye" "tschau";
|
||||
|
||||
bins = getBins drv [
|
||||
"hello"
|
||||
{ use = "hello"; as = "also-hello"; }
|
||||
]
|
||||
// getBins drv2 [ "goodbye" ]
|
||||
;
|
||||
"hello"
|
||||
{ use = "hello"; as = "also-hello"; }
|
||||
]
|
||||
// getBins drv2 [ "goodbye" ]
|
||||
;
|
||||
|
||||
simple = it "path is equal to the executable name" [
|
||||
(assertEq "path"
|
||||
|
@ -33,8 +33,8 @@ let
|
|||
];
|
||||
|
||||
in
|
||||
runTestsuite "getBins" [
|
||||
simple
|
||||
useAs
|
||||
secondDrv
|
||||
]
|
||||
runTestsuite "getBins" [
|
||||
simple
|
||||
useAs
|
||||
secondDrv
|
||||
]
|
||||
|
|
|
@ -8,31 +8,31 @@
|
|||
For example, given the following original document:
|
||||
|
||||
{
|
||||
a = "b";
|
||||
c = {
|
||||
a = "b";
|
||||
c = {
|
||||
d = "e";
|
||||
f = "g";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Changing the value of `a` and removing `f` can be achieved by merging the patch
|
||||
|
||||
{
|
||||
a = "z";
|
||||
c.f = null;
|
||||
a = "z";
|
||||
c.f = null;
|
||||
}
|
||||
|
||||
which results in
|
||||
|
||||
{
|
||||
a = "z";
|
||||
c = {
|
||||
a = "z";
|
||||
c = {
|
||||
d = "e";
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
Pseudo-code:
|
||||
define MergePatch(Target, Patch):
|
||||
define MergePatch(Target, Patch):
|
||||
if Patch is an Object:
|
||||
if Target is not an Object:
|
||||
Target = {} # Ignore the contents and set it to an empty Object
|
||||
|
@ -55,19 +55,19 @@ let
|
|||
mergePatch = target: patch:
|
||||
if lib.isAttrs patch
|
||||
then
|
||||
let target' = if lib.isAttrs target then target else {};
|
||||
let target' = if lib.isAttrs target then target else { };
|
||||
in foldlAttrs
|
||||
(acc: patchEl:
|
||||
if patchEl.value == null
|
||||
then removeAttrs acc [ patchEl.name ]
|
||||
else acc // {
|
||||
${patchEl.name} =
|
||||
mergePatch
|
||||
(acc.${patchEl.name} or "unnused")
|
||||
patchEl.value;
|
||||
})
|
||||
target'
|
||||
patch
|
||||
(acc: patchEl:
|
||||
if patchEl.value == null
|
||||
then removeAttrs acc [ patchEl.name ]
|
||||
else acc // {
|
||||
${patchEl.name} =
|
||||
mergePatch
|
||||
(acc.${patchEl.name} or "unnused")
|
||||
patchEl.value;
|
||||
})
|
||||
target'
|
||||
patch
|
||||
else patch;
|
||||
|
||||
inherit (depot.nix.runTestsuite)
|
||||
|
@ -93,46 +93,49 @@ let
|
|||
};
|
||||
emptyPatch = it "the empty patch returns the original target" [
|
||||
(assertEq "id"
|
||||
(mergePatch testTarget {})
|
||||
(mergePatch testTarget { })
|
||||
testTarget)
|
||||
];
|
||||
nonAttrs = it "one side is a non-attrset value" [
|
||||
(assertEq "target is a value means the value is replaced by the patch"
|
||||
(mergePatch 42 testPatch)
|
||||
(mergePatch {} testPatch))
|
||||
(mergePatch { } testPatch))
|
||||
(assertEq "patch is a value means it replaces target alltogether"
|
||||
(mergePatch testTarget 42)
|
||||
42)
|
||||
];
|
||||
rfcExamples = it "the examples from the RFC" [
|
||||
(assertEq "a subset is deleted and overwritten"
|
||||
(mergePatch testTarget testPatch) {
|
||||
(mergePatch testTarget testPatch)
|
||||
{
|
||||
a = "z";
|
||||
c = {
|
||||
d = "e";
|
||||
};
|
||||
})
|
||||
(assertEq "a more complicated example from the example section"
|
||||
(mergePatch {
|
||||
title = "Goodbye!";
|
||||
(mergePatch
|
||||
{
|
||||
title = "Goodbye!";
|
||||
author = {
|
||||
givenName = "John";
|
||||
familyName = "Doe";
|
||||
};
|
||||
tags = [ "example" "sample" ];
|
||||
content = "This will be unchanged";
|
||||
} {
|
||||
title = "Hello!";
|
||||
phoneNumber = "+01-123-456-7890";
|
||||
author.familyName = null;
|
||||
tags = [ "example" ];
|
||||
})
|
||||
tags = [ "example" "sample" ];
|
||||
content = "This will be unchanged";
|
||||
}
|
||||
{
|
||||
title = "Hello!";
|
||||
phoneNumber = "+01-123-456-7890";
|
||||
author.familyName = null;
|
||||
tags = [ "example" ];
|
||||
})
|
||||
{
|
||||
title = "Hello!";
|
||||
phoneNumber = "+01-123-456-7890";
|
||||
author = {
|
||||
givenName = "John";
|
||||
};
|
||||
author = {
|
||||
givenName = "John";
|
||||
};
|
||||
tags = [ "example" ];
|
||||
content = "This will be unchanged";
|
||||
})
|
||||
|
@ -144,42 +147,45 @@ let
|
|||
(assertEq "test number ${toString index}"
|
||||
(mergePatch target patch)
|
||||
res);
|
||||
in it "the test suite from the RFC" [
|
||||
(r 1 {"a" = "b";} {"a" = "c";} {"a" = "c";})
|
||||
(r 2 {"a" = "b";} {"b" = "c";} {"a" = "b"; "b" = "c";})
|
||||
(r 3 {"a" = "b";} {"a" = null;} {})
|
||||
(r 4 {"a" = "b"; "b" = "c";}
|
||||
{"a" = null;}
|
||||
{"b" = "c";})
|
||||
(r 5 {"a" = ["b"];} {"a" = "c";} {"a" = "c";})
|
||||
(r 6 {"a" = "c";} {"a" = ["b"];} {"a" = ["b"];})
|
||||
(r 7 {"a" = {"b" = "c";}; }
|
||||
{"a" = {"b" = "d"; "c" = null;};}
|
||||
{"a" = {"b" = "d";};})
|
||||
(r 8 {"a" = [{"b" = "c";}];}
|
||||
{"a" = [1];}
|
||||
{"a" = [1];})
|
||||
(r 9 ["a" "b"] ["c" "d"] ["c" "d"])
|
||||
(r 10 {"a" = "b";} ["c"] ["c"])
|
||||
(r 11 {"a" = "foo";} null null)
|
||||
(r 12 {"a" = "foo";} "bar" "bar")
|
||||
(r 13 {"e" = null;} {"a" = 1;} {"e" = null; "a" = 1;})
|
||||
(r 14 [1 2]
|
||||
{"a" = "b"; "c" = null;}
|
||||
{"a" = "b";})
|
||||
(r 15 {}
|
||||
{"a" = {"bb" = {"ccc" = null;};};}
|
||||
{"a" = {"bb" = {};};})
|
||||
];
|
||||
in
|
||||
it "the test suite from the RFC" [
|
||||
(r 1 { "a" = "b"; } { "a" = "c"; } { "a" = "c"; })
|
||||
(r 2 { "a" = "b"; } { "b" = "c"; } { "a" = "b"; "b" = "c"; })
|
||||
(r 3 { "a" = "b"; } { "a" = null; } { })
|
||||
(r 4 { "a" = "b"; "b" = "c"; }
|
||||
{ "a" = null; }
|
||||
{ "b" = "c"; })
|
||||
(r 5 { "a" = [ "b" ]; } { "a" = "c"; } { "a" = "c"; })
|
||||
(r 6 { "a" = "c"; } { "a" = [ "b" ]; } { "a" = [ "b" ]; })
|
||||
(r 7 { "a" = { "b" = "c"; }; }
|
||||
{ "a" = { "b" = "d"; "c" = null; }; }
|
||||
{ "a" = { "b" = "d"; }; })
|
||||
(r 8 { "a" = [{ "b" = "c"; }]; }
|
||||
{ "a" = [ 1 ]; }
|
||||
{ "a" = [ 1 ]; })
|
||||
(r 9 [ "a" "b" ] [ "c" "d" ] [ "c" "d" ])
|
||||
(r 10 { "a" = "b"; } [ "c" ] [ "c" ])
|
||||
(r 11 { "a" = "foo"; } null null)
|
||||
(r 12 { "a" = "foo"; } "bar" "bar")
|
||||
(r 13 { "e" = null; } { "a" = 1; } { "e" = null; "a" = 1; })
|
||||
(r 14 [ 1 2 ]
|
||||
{ "a" = "b"; "c" = null; }
|
||||
{ "a" = "b"; })
|
||||
(r 15 { }
|
||||
{ "a" = { "bb" = { "ccc" = null; }; }; }
|
||||
{ "a" = { "bb" = { }; }; })
|
||||
];
|
||||
|
||||
in runTestsuite "mergePatch" [
|
||||
in
|
||||
runTestsuite "mergePatch" [
|
||||
emptyPatch
|
||||
nonAttrs
|
||||
rfcExamples
|
||||
rfcTests
|
||||
];
|
||||
|
||||
in {
|
||||
in
|
||||
{
|
||||
__functor = _: mergePatch;
|
||||
|
||||
inherit tests;
|
||||
|
|
|
@ -28,6 +28,6 @@ attrs:
|
|||
lib.concatStrings
|
||||
(lib.mapAttrsToList
|
||||
(k: v: depot.nix.netstring.fromString
|
||||
( depot.nix.netstring.fromString k
|
||||
+ depot.nix.netstring.fromString v))
|
||||
(depot.nix.netstring.fromString k
|
||||
+ depot.nix.netstring.fromString v))
|
||||
attrs)
|
||||
|
|
|
@ -6,9 +6,11 @@ let
|
|||
;
|
||||
in
|
||||
|
||||
rustSimpleBin {
|
||||
name = "nint";
|
||||
dependencies = [
|
||||
depot.third_party.rust-crates.serde_json
|
||||
];
|
||||
} (builtins.readFile ./nint.rs)
|
||||
rustSimpleBin
|
||||
{
|
||||
name = "nint";
|
||||
dependencies = [
|
||||
depot.third_party.rust-crates.serde_json
|
||||
];
|
||||
}
|
||||
(builtins.readFile ./nint.rs)
|
||||
|
|
|
@ -43,10 +43,13 @@ let
|
|||
children = readDir path;
|
||||
isVisible = f: f == ".skip-subtree" || (substring 0 1 f) != ".";
|
||||
names = filter isVisible (attrNames children);
|
||||
in listToAttrs (map (name: {
|
||||
inherit name;
|
||||
value = children.${name};
|
||||
}) names);
|
||||
in
|
||||
listToAttrs (map
|
||||
(name: {
|
||||
inherit name;
|
||||
value = children.${name};
|
||||
})
|
||||
names);
|
||||
|
||||
# Create a mark containing the location of this attribute and
|
||||
# a list of all child attribute names added by readTree.
|
||||
|
@ -57,12 +60,13 @@ let
|
|||
|
||||
# Import a file and enforce our calling convention
|
||||
importFile = args: scopedArgs: path: parts: filter:
|
||||
let
|
||||
importedFile = if scopedArgs != {}
|
||||
then builtins.scopedImport scopedArgs path
|
||||
else import path;
|
||||
let
|
||||
importedFile =
|
||||
if scopedArgs != { }
|
||||
then builtins.scopedImport scopedArgs path
|
||||
else import path;
|
||||
pathType = builtins.typeOf importedFile;
|
||||
in
|
||||
in
|
||||
if pathType != "lambda"
|
||||
then builtins.throw "readTree: trying to import ${toString path}, but it’s a ${pathType}, you need to make it a function like { depot, pkgs, ... }"
|
||||
else importedFile (filter parts (argsWithPath args parts));
|
||||
|
@ -76,8 +80,9 @@ let
|
|||
dir = readDirVisible initPath;
|
||||
joinChild = c: initPath + ("/" + c);
|
||||
|
||||
self = if rootDir
|
||||
then { __readTree = []; }
|
||||
self =
|
||||
if rootDir
|
||||
then { __readTree = [ ]; }
|
||||
else importFile args scopedArgs initPath parts argsFilter;
|
||||
|
||||
# Import subdirectories of the current one, unless the special
|
||||
|
@ -88,33 +93,41 @@ let
|
|||
# should be ignored, but its content is not inspected by
|
||||
# readTree
|
||||
filterDir = f: dir."${f}" == "directory";
|
||||
children = if hasAttr ".skip-subtree" dir then [] else map (c: {
|
||||
name = c;
|
||||
value = readTree {
|
||||
inherit argsFilter scopedArgs;
|
||||
args = args;
|
||||
initPath = (joinChild c);
|
||||
rootDir = false;
|
||||
parts = (parts ++ [ c ]);
|
||||
};
|
||||
}) (filter filterDir (attrNames dir));
|
||||
children = if hasAttr ".skip-subtree" dir then [ ] else
|
||||
map
|
||||
(c: {
|
||||
name = c;
|
||||
value = readTree {
|
||||
inherit argsFilter scopedArgs;
|
||||
args = args;
|
||||
initPath = (joinChild c);
|
||||
rootDir = false;
|
||||
parts = (parts ++ [ c ]);
|
||||
};
|
||||
})
|
||||
(filter filterDir (attrNames dir));
|
||||
|
||||
# Import Nix files
|
||||
nixFiles = if hasAttr ".skip-subtree" dir then []
|
||||
nixFiles =
|
||||
if hasAttr ".skip-subtree" dir then [ ]
|
||||
else filter (f: f != null) (map nixFileName (attrNames dir));
|
||||
nixChildren = map (c: let
|
||||
p = joinChild (c + ".nix");
|
||||
childParts = parts ++ [ c ];
|
||||
imported = importFile args scopedArgs p childParts argsFilter;
|
||||
in {
|
||||
name = c;
|
||||
value =
|
||||
if isAttrs imported
|
||||
then imported // marker childParts {}
|
||||
else imported;
|
||||
}) nixFiles;
|
||||
nixChildren = map
|
||||
(c:
|
||||
let
|
||||
p = joinChild (c + ".nix");
|
||||
childParts = parts ++ [ c ];
|
||||
imported = importFile args scopedArgs p childParts argsFilter;
|
||||
in
|
||||
{
|
||||
name = c;
|
||||
value =
|
||||
if isAttrs imported
|
||||
then imported // marker childParts { }
|
||||
else imported;
|
||||
})
|
||||
nixFiles;
|
||||
|
||||
nodeValue = if dir ? "default.nix" then self else {};
|
||||
nodeValue = if dir ? "default.nix" then self else { };
|
||||
|
||||
allChildren = listToAttrs (
|
||||
if dir ? "default.nix"
|
||||
|
@ -123,9 +136,9 @@ let
|
|||
);
|
||||
|
||||
in
|
||||
if isAttrs nodeValue
|
||||
then nodeValue // allChildren // (marker parts allChildren)
|
||||
else nodeValue;
|
||||
if isAttrs nodeValue
|
||||
then nodeValue // allChildren // (marker parts allChildren)
|
||||
else nodeValue;
|
||||
|
||||
# Function which can be used to find all readTree targets within an
|
||||
# attribute set.
|
||||
|
@ -143,40 +156,42 @@ let
|
|||
# should be included in the build.
|
||||
gather = eligible: node:
|
||||
if node ? __readTree then
|
||||
# Include the node itself if it is eligible.
|
||||
(if eligible node then [ node ] else [])
|
||||
# Include the node itself if it is eligible.
|
||||
(if eligible node then [ node ] else [ ])
|
||||
# Include eligible children of the node
|
||||
++ concatMap (gather eligible) (map (attr: node."${attr}") node.__readTreeChildren)
|
||||
# Include specified sub-targets of the node
|
||||
++ filter eligible (map
|
||||
(k: (node."${k}" or {}) // {
|
||||
# Keep the same tree location, but explicitly mark this
|
||||
# node as a subtarget.
|
||||
__readTree = node.__readTree;
|
||||
__readTreeChildren = [];
|
||||
__subtarget = k;
|
||||
})
|
||||
(node.meta.targets or []))
|
||||
else [];
|
||||
(k: (node."${k}" or { }) // {
|
||||
# Keep the same tree location, but explicitly mark this
|
||||
# node as a subtarget.
|
||||
__readTree = node.__readTree;
|
||||
__readTreeChildren = [ ];
|
||||
__subtarget = k;
|
||||
})
|
||||
(node.meta.targets or [ ]))
|
||||
else [ ];
|
||||
|
||||
# Determine whether a given value is a derivation.
|
||||
# Copied from nixpkgs/lib for cases where lib is not available yet.
|
||||
isDerivation = x: isAttrs x && x ? type && x.type == "derivation";
|
||||
in {
|
||||
in
|
||||
{
|
||||
inherit gather;
|
||||
|
||||
__functor = _:
|
||||
{ path
|
||||
, args
|
||||
, filter ? (_parts: x: x)
|
||||
, scopedArgs ? {} }:
|
||||
readTree {
|
||||
inherit args scopedArgs;
|
||||
argsFilter = filter;
|
||||
initPath = path;
|
||||
rootDir = true;
|
||||
parts = [];
|
||||
};
|
||||
, scopedArgs ? { }
|
||||
}:
|
||||
readTree {
|
||||
inherit args scopedArgs;
|
||||
argsFilter = filter;
|
||||
initPath = path;
|
||||
rootDir = true;
|
||||
parts = [ ];
|
||||
};
|
||||
|
||||
# In addition to readTree itself, some functionality is exposed that
|
||||
# is useful for users of readTree.
|
||||
|
@ -193,7 +208,7 @@ in {
|
|||
# which should be able to access the restricted folder.
|
||||
#
|
||||
# reason: Textual explanation for the restriction (included in errors)
|
||||
restrictFolder = { folder, exceptions ? [], reason }: parts: args:
|
||||
restrictFolder = { folder, exceptions ? [ ], reason }: parts: args:
|
||||
if (elemAt parts 0) == folder || elem parts exceptions
|
||||
then args
|
||||
else args // {
|
||||
|
@ -224,8 +239,8 @@ in {
|
|||
drvTargets = attrs: attrs // {
|
||||
meta = {
|
||||
targets = builtins.filter
|
||||
(x: isDerivation attrs."${x}")
|
||||
(builtins.attrNames attrs);
|
||||
} // (attrs.meta or {});
|
||||
(x: isDerivation attrs."${x}")
|
||||
(builtins.attrNames attrs);
|
||||
} // (attrs.meta or { });
|
||||
};
|
||||
}
|
||||
|
|
|
@ -10,13 +10,13 @@ let
|
|||
|
||||
tree-ex = depot.nix.readTree {
|
||||
path = ./test-example;
|
||||
args = {};
|
||||
args = { };
|
||||
};
|
||||
|
||||
example = it "corresponds to the README example" [
|
||||
(assertEq "third_party attrset"
|
||||
(lib.isAttrs tree-ex.third_party
|
||||
&& (! lib.isDerivation tree-ex.third_party))
|
||||
&& (! lib.isDerivation tree-ex.third_party))
|
||||
true)
|
||||
(assertEq "third_party attrset other attribute"
|
||||
tree-ex.third_party.favouriteColour
|
||||
|
@ -37,7 +37,7 @@ let
|
|||
|
||||
tree-tl = depot.nix.readTree {
|
||||
path = ./test-tree-traversal;
|
||||
args = {};
|
||||
args = { };
|
||||
};
|
||||
|
||||
traversal-logic = it "corresponds to the traversal logic in the README" [
|
||||
|
@ -82,7 +82,7 @@ let
|
|||
"Picked up through the drv")
|
||||
(assertEq "default.nix drv is not changed by readTree"
|
||||
tree-tl.default-nix.can-be-drv
|
||||
(import ./test-tree-traversal/default-nix/can-be-drv/default.nix {}))
|
||||
(import ./test-tree-traversal/default-nix/can-be-drv/default.nix { }))
|
||||
];
|
||||
|
||||
# these each call readTree themselves because the throws have to happen inside assertThrows
|
||||
|
@ -90,7 +90,7 @@ let
|
|||
(assertThrows "this file is not a function"
|
||||
(depot.nix.readTree {
|
||||
path = ./test-wrong-not-a-function;
|
||||
args = {};
|
||||
args = { };
|
||||
}).not-a-function)
|
||||
# can’t test for that, assertThrows can’t catch this error
|
||||
# (assertThrows "this file is a function but doesn’t have dots"
|
||||
|
@ -99,12 +99,13 @@ let
|
|||
|
||||
read-markers = depot.nix.readTree {
|
||||
path = ./test-marker;
|
||||
args = {};
|
||||
args = { };
|
||||
};
|
||||
|
||||
assertMarkerByPath = path:
|
||||
assertEq "${lib.concatStringsSep "." path} is marked correctly"
|
||||
(lib.getAttrFromPath path read-markers).__readTree path;
|
||||
(lib.getAttrFromPath path read-markers).__readTree
|
||||
path;
|
||||
|
||||
markers = it "marks nodes correctly" [
|
||||
(assertMarkerByPath [ "directory-marked" ])
|
||||
|
@ -119,7 +120,8 @@ let
|
|||
read-markers.directory-marked.nested.__readTreeChildren [ ])
|
||||
];
|
||||
|
||||
in runTestsuite "readTree" [
|
||||
in
|
||||
runTestsuite "readTree" [
|
||||
example
|
||||
traversal-logic
|
||||
wrong
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
{ ... }:
|
||||
|
||||
{}
|
||||
{ }
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
{ ... }:
|
||||
|
||||
{}
|
||||
{ }
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
{ ... }:
|
||||
|
||||
{}
|
||||
{ }
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
{ ... }:
|
||||
|
||||
{}
|
||||
{ }
|
||||
|
|
|
@ -3,6 +3,6 @@
|
|||
|
||||
with depot.nix.yants;
|
||||
|
||||
defun [ path drv ] (file: pkgs.runCommandNoCC "${file}.rendered.html" {} ''
|
||||
defun [ path drv ] (file: pkgs.runCommandNoCC "${file}.rendered.html" { } ''
|
||||
cat ${file} | ${depot.tools.cheddar}/bin/cheddar --about-filter ${file} > $out
|
||||
'')
|
||||
|
|
|
@ -9,7 +9,7 @@ let
|
|||
runExeclineLocal = name: args: execline:
|
||||
runExecline name
|
||||
(args // {
|
||||
derivationArgs = args.derivationArgs or {} // {
|
||||
derivationArgs = args.derivationArgs or { } // {
|
||||
preferLocalBuild = true;
|
||||
allowSubstitutes = false;
|
||||
};
|
||||
|
@ -23,7 +23,8 @@ let
|
|||
inherit pkgs;
|
||||
};
|
||||
|
||||
in {
|
||||
in
|
||||
{
|
||||
__functor = _: runExecline;
|
||||
local = runExeclineLocal;
|
||||
inherit tests;
|
||||
|
|
|
@ -35,32 +35,32 @@
|
|||
|
||||
let
|
||||
bins = getBins pkgs.execline [
|
||||
"execlineb"
|
||||
{ use = "if"; as = "execlineIf"; }
|
||||
"redirfd"
|
||||
"importas"
|
||||
"exec"
|
||||
]
|
||||
// getBins pkgs.s6-portable-utils [
|
||||
"s6-cat"
|
||||
"s6-grep"
|
||||
"s6-touch"
|
||||
"s6-test"
|
||||
"s6-chmod"
|
||||
];
|
||||
"execlineb"
|
||||
{ use = "if"; as = "execlineIf"; }
|
||||
"redirfd"
|
||||
"importas"
|
||||
"exec"
|
||||
]
|
||||
// getBins pkgs.s6-portable-utils [
|
||||
"s6-cat"
|
||||
"s6-grep"
|
||||
"s6-touch"
|
||||
"s6-test"
|
||||
"s6-chmod"
|
||||
];
|
||||
|
||||
in
|
||||
|
||||
# TODO: move name into the attrset
|
||||
name:
|
||||
{
|
||||
# a string to pass as stdin to the execline script
|
||||
stdin ? ""
|
||||
# a program wrapping the acutal execline invocation;
|
||||
# should be in Bernstein-chaining style
|
||||
# a string to pass as stdin to the execline script
|
||||
stdin ? ""
|
||||
# a program wrapping the acutal execline invocation;
|
||||
# should be in Bernstein-chaining style
|
||||
, builderWrapper ? bins.exec
|
||||
# additional arguments to pass to the derivation
|
||||
, derivationArgs ? {}
|
||||
# additional arguments to pass to the derivation
|
||||
, derivationArgs ? { }
|
||||
}:
|
||||
# the execline script as a nested list of string,
|
||||
# representing the blocks;
|
||||
|
@ -90,33 +90,33 @@ derivation (derivationArgs // {
|
|||
passAsFile = [
|
||||
"_runExeclineScript"
|
||||
"_runExeclineStdin"
|
||||
] ++ derivationArgs.passAsFile or [];
|
||||
] ++ derivationArgs.passAsFile or [ ];
|
||||
|
||||
# the default, exec acts as identity executable
|
||||
builder = builderWrapper;
|
||||
|
||||
args = [
|
||||
bins.importas # import script file as $script
|
||||
"-ui" # drop the envvar afterwards
|
||||
"script" # substitution name
|
||||
bins.importas # import script file as $script
|
||||
"-ui" # drop the envvar afterwards
|
||||
"script" # substitution name
|
||||
"_runExeclineScriptPath" # passed script file
|
||||
|
||||
bins.importas # do the same for $stdin
|
||||
bins.importas # do the same for $stdin
|
||||
"-ui"
|
||||
"stdin"
|
||||
"_runExeclineStdinPath"
|
||||
|
||||
bins.redirfd # now we
|
||||
"-r" # read the file
|
||||
"0" # into the stdin of execlineb
|
||||
"$stdin" # that was given via stdin
|
||||
bins.redirfd # now we
|
||||
"-r" # read the file
|
||||
"0" # into the stdin of execlineb
|
||||
"$stdin" # that was given via stdin
|
||||
|
||||
bins.execlineb # the actual invocation
|
||||
bins.execlineb # the actual invocation
|
||||
# TODO(Profpatsch): depending on the use-case, -S0 might not be enough
|
||||
# in all use-cases, then a wrapper for execlineb arguments
|
||||
# should be added (-P, -S, -s).
|
||||
"-S0" # set $@ inside the execline script
|
||||
"-W" # die on syntax error
|
||||
"$script" # substituted by importas
|
||||
"-S0" # set $@ inside the execline script
|
||||
"-W" # die on syntax error
|
||||
"$script" # substituted by importas
|
||||
];
|
||||
})
|
||||
|
|
|
@ -1,23 +1,29 @@
|
|||
{ stdenv, pkgs, runExecline, runExeclineLocal, getBins, writeScript
|
||||
# https://www.mail-archive.com/skaware@list.skarnet.org/msg01256.html
|
||||
, coreutils }:
|
||||
{ stdenv
|
||||
, pkgs
|
||||
, runExecline
|
||||
, runExeclineLocal
|
||||
, getBins
|
||||
, writeScript
|
||||
# https://www.mail-archive.com/skaware@list.skarnet.org/msg01256.html
|
||||
, coreutils
|
||||
}:
|
||||
|
||||
let
|
||||
|
||||
bins = getBins coreutils [ "mv" ]
|
||||
// getBins pkgs.execline [
|
||||
"execlineb"
|
||||
{ use = "if"; as = "execlineIf"; }
|
||||
"redirfd"
|
||||
"importas"
|
||||
]
|
||||
// getBins pkgs.s6-portable-utils [
|
||||
"s6-chmod"
|
||||
"s6-grep"
|
||||
"s6-touch"
|
||||
"s6-cat"
|
||||
"s6-test"
|
||||
];
|
||||
// getBins pkgs.execline [
|
||||
"execlineb"
|
||||
{ use = "if"; as = "execlineIf"; }
|
||||
"redirfd"
|
||||
"importas"
|
||||
]
|
||||
// getBins pkgs.s6-portable-utils [
|
||||
"s6-chmod"
|
||||
"s6-grep"
|
||||
"s6-touch"
|
||||
"s6-cat"
|
||||
"s6-test"
|
||||
];
|
||||
|
||||
# execline block of depth 1
|
||||
block = args: builtins.map (arg: " ${arg}") args ++ [ "" ];
|
||||
|
@ -31,49 +37,80 @@ let
|
|||
builder = bins.execlineIf;
|
||||
args =
|
||||
(block [
|
||||
bins.redirfd "-r" "0" file # read file to stdin
|
||||
bins.s6-grep "-F" "-q" line # and grep for the line
|
||||
bins.redirfd
|
||||
"-r"
|
||||
"0"
|
||||
file # read file to stdin
|
||||
bins.s6-grep
|
||||
"-F"
|
||||
"-q"
|
||||
line # and grep for the line
|
||||
])
|
||||
++ [
|
||||
# if the block succeeded, touch $out
|
||||
bins.importas "-ui" "out" "out"
|
||||
bins.s6-touch "$out"
|
||||
bins.importas
|
||||
"-ui"
|
||||
"out"
|
||||
"out"
|
||||
bins.s6-touch
|
||||
"$out"
|
||||
];
|
||||
preferLocalBuild = true;
|
||||
allowSubstitutes = false;
|
||||
};
|
||||
|
||||
# basic test that touches out
|
||||
basic = runExeclineLocal "run-execline-test-basic" {
|
||||
} [
|
||||
"importas" "-ui" "out" "out"
|
||||
"${bins.s6-touch}" "$out"
|
||||
basic = runExeclineLocal "run-execline-test-basic"
|
||||
{ } [
|
||||
"importas"
|
||||
"-ui"
|
||||
"out"
|
||||
"out"
|
||||
"${bins.s6-touch}"
|
||||
"$out"
|
||||
];
|
||||
|
||||
# whether the stdin argument works as intended
|
||||
stdin = fileHasLine "foo" (runExeclineLocal "run-execline-test-stdin" {
|
||||
stdin = "foo\nbar\nfoo";
|
||||
} [
|
||||
"importas" "-ui" "out" "out"
|
||||
# this pipes stdout of s6-cat to $out
|
||||
# and s6-cat redirects from stdin to stdout
|
||||
"redirfd" "-w" "1" "$out" bins.s6-cat
|
||||
stdin = fileHasLine "foo" (runExeclineLocal "run-execline-test-stdin"
|
||||
{
|
||||
stdin = "foo\nbar\nfoo";
|
||||
} [
|
||||
"importas"
|
||||
"-ui"
|
||||
"out"
|
||||
"out"
|
||||
# this pipes stdout of s6-cat to $out
|
||||
# and s6-cat redirects from stdin to stdout
|
||||
"redirfd"
|
||||
"-w"
|
||||
"1"
|
||||
"$out"
|
||||
bins.s6-cat
|
||||
]);
|
||||
|
||||
|
||||
wrapWithVar = runExeclineLocal "run-execline-test-wrap-with-var" {
|
||||
builderWrapper = writeScript "var-wrapper" ''
|
||||
#!${bins.execlineb} -S0
|
||||
export myvar myvalue $@
|
||||
'';
|
||||
} [
|
||||
"importas" "-ui" "v" "myvar"
|
||||
"if" [ bins.s6-test "myvalue" "=" "$v" ]
|
||||
"importas" "out" "out"
|
||||
bins.s6-touch "$out"
|
||||
wrapWithVar = runExeclineLocal "run-execline-test-wrap-with-var"
|
||||
{
|
||||
builderWrapper = writeScript "var-wrapper" ''
|
||||
#!${bins.execlineb} -S0
|
||||
export myvar myvalue $@
|
||||
'';
|
||||
} [
|
||||
"importas"
|
||||
"-ui"
|
||||
"v"
|
||||
"myvar"
|
||||
"if"
|
||||
[ bins.s6-test "myvalue" "=" "$v" ]
|
||||
"importas"
|
||||
"out"
|
||||
"out"
|
||||
bins.s6-touch
|
||||
"$out"
|
||||
];
|
||||
|
||||
in [
|
||||
in
|
||||
[
|
||||
basic
|
||||
stdin
|
||||
wrapWithVar
|
||||
|
|
|
@ -38,11 +38,11 @@ let
|
|||
;
|
||||
|
||||
bins = depot.nix.getBins pkgs.coreutils [ "printf" ]
|
||||
// depot.nix.getBins pkgs.s6-portable-utils [ "s6-touch" "s6-false" "s6-cat" ];
|
||||
// depot.nix.getBins pkgs.s6-portable-utils [ "s6-touch" "s6-false" "s6-cat" ];
|
||||
|
||||
# Returns true if the given expression throws when `deepSeq`-ed
|
||||
throws = expr:
|
||||
!(builtins.tryEval (builtins.deepSeq expr {})).success;
|
||||
!(builtins.tryEval (builtins.deepSeq expr { })).success;
|
||||
|
||||
# rewrite the builtins.partition result
|
||||
# to use `ok` and `err` instead of `right` and `wrong`.
|
||||
|
@ -99,11 +99,12 @@ let
|
|||
(context: desc: res:
|
||||
if res
|
||||
then { yep = { test = desc; }; }
|
||||
else { nope = {
|
||||
test = desc;
|
||||
inherit context;
|
||||
};
|
||||
});
|
||||
else {
|
||||
nope = {
|
||||
test = desc;
|
||||
inherit context;
|
||||
};
|
||||
});
|
||||
|
||||
# assert that left and right values are equal
|
||||
assertEq = defun [ string any any AssertResult ]
|
||||
|
@ -111,7 +112,7 @@ let
|
|||
let
|
||||
context = { not-equal = { inherit left right; }; };
|
||||
in
|
||||
assertBoolContext context desc (left == right));
|
||||
assertBoolContext context desc (left == right));
|
||||
|
||||
# assert that the expression throws when `deepSeq`-ed
|
||||
assertThrows = defun [ string any AssertResult ]
|
||||
|
@ -119,7 +120,7 @@ let
|
|||
let
|
||||
context = { should-throw = { inherit expr; }; };
|
||||
in
|
||||
assertBoolContext context desc (throws expr));
|
||||
assertBoolContext context desc (throws expr));
|
||||
|
||||
# assert that the expression does not throw when `deepSeq`-ed
|
||||
assertDoesNotThrow = defun [ string any AssertResult ]
|
||||
|
@ -144,31 +145,50 @@ let
|
|||
yep = _: true;
|
||||
nope = _: false;
|
||||
};
|
||||
res = partitionTests (it:
|
||||
(partitionTests goodAss it.asserts).err == []
|
||||
) itResults;
|
||||
prettyRes = lib.generators.toPretty {} res;
|
||||
res = partitionTests
|
||||
(it:
|
||||
(partitionTests goodAss it.asserts).err == [ ]
|
||||
)
|
||||
itResults;
|
||||
prettyRes = lib.generators.toPretty { } res;
|
||||
in
|
||||
if res.err == []
|
||||
then depot.nix.runExecline.local "testsuite-${name}-successful" {} [
|
||||
"importas" "out" "out"
|
||||
if res.err == [ ]
|
||||
then
|
||||
depot.nix.runExecline.local "testsuite-${name}-successful" { } [
|
||||
"importas"
|
||||
"out"
|
||||
"out"
|
||||
# force derivation to rebuild if test case list changes
|
||||
"ifelse" [ bins.s6-false ] [
|
||||
bins.printf "" (builtins.hashString "sha512" prettyRes)
|
||||
"ifelse"
|
||||
[ bins.s6-false ]
|
||||
[
|
||||
bins.printf
|
||||
""
|
||||
(builtins.hashString "sha512" prettyRes)
|
||||
]
|
||||
"if" [ bins.printf "%s\n" "testsuite ${name} successful!" ]
|
||||
bins.s6-touch "$out"
|
||||
"if"
|
||||
[ bins.printf "%s\n" "testsuite ${name} successful!" ]
|
||||
bins.s6-touch
|
||||
"$out"
|
||||
]
|
||||
else depot.nix.runExecline.local "testsuite-${name}-failed" {
|
||||
stdin = prettyRes + "\n";
|
||||
} [
|
||||
"importas" "out" "out"
|
||||
"if" [ bins.printf "%s\n" "testsuite ${name} failed!" ]
|
||||
"if" [ bins.s6-cat ]
|
||||
"exit" "1"
|
||||
else
|
||||
depot.nix.runExecline.local "testsuite-${name}-failed"
|
||||
{
|
||||
stdin = prettyRes + "\n";
|
||||
} [
|
||||
"importas"
|
||||
"out"
|
||||
"out"
|
||||
"if"
|
||||
[ bins.printf "%s\n" "testsuite ${name} failed!" ]
|
||||
"if"
|
||||
[ bins.s6-cat ]
|
||||
"exit"
|
||||
"1"
|
||||
]);
|
||||
|
||||
in {
|
||||
in
|
||||
{
|
||||
inherit
|
||||
assertEq
|
||||
assertThrows
|
||||
|
|
|
@ -45,14 +45,16 @@ let
|
|||
let
|
||||
withLeading = p: if builtins.substring 0 1 p == "/" then p else "/" + p;
|
||||
fullPath =
|
||||
/**/ if builtins.isPath path then path
|
||||
/**/
|
||||
if builtins.isPath path then path
|
||||
else if builtins.isString path then (root + withLeading path)
|
||||
else builtins.throw "Unsupported path type ${builtins.typeOf path}";
|
||||
strPath = toString fullPath;
|
||||
contextPath = "${fullPath}";
|
||||
belowRoot = builtins.substring rootLength (-1) strPath;
|
||||
prefix = builtins.substring 0 rootLength strPath;
|
||||
in assert toString root == prefix; {
|
||||
in
|
||||
assert toString root == prefix; {
|
||||
src = contextPath;
|
||||
dst = belowRoot;
|
||||
};
|
||||
|
@ -61,10 +63,12 @@ let
|
|||
in
|
||||
|
||||
# TODO(sterni): teach readTree to also read symlinked directories,
|
||||
# so we ln -sT instead of cp -aT.
|
||||
pkgs.runCommandNoCC "sparse-${builtins.baseNameOf root}" {} (
|
||||
lib.concatMapStrings ({ src, dst }: ''
|
||||
mkdir -p "$(dirname "$out${dst}")"
|
||||
cp -aT --reflink=auto "${src}" "$out${dst}"
|
||||
'') symlinks
|
||||
# so we ln -sT instead of cp -aT.
|
||||
pkgs.runCommandNoCC "sparse-${builtins.baseNameOf root}" { } (
|
||||
lib.concatMapStrings
|
||||
({ src, dst }: ''
|
||||
mkdir -p "$(dirname "$out${dst}")"
|
||||
cp -aT --reflink=auto "${src}" "$out${dst}"
|
||||
'')
|
||||
symlinks
|
||||
)
|
||||
|
|
|
@ -4,22 +4,24 @@ let
|
|||
# if so sets `isTag` to `true` and sets the name and value.
|
||||
# If not, sets `isTag` to `false` and sets `errmsg`.
|
||||
verifyTag = tag:
|
||||
let cases = builtins.attrNames tag;
|
||||
len = builtins.length cases;
|
||||
let
|
||||
cases = builtins.attrNames tag;
|
||||
len = builtins.length cases;
|
||||
in
|
||||
if builtins.length cases == 1
|
||||
then let name = builtins.head cases; in {
|
||||
isTag = true;
|
||||
name = name;
|
||||
val = tag.${name};
|
||||
errmsg = null;
|
||||
}
|
||||
then
|
||||
let name = builtins.head cases; in {
|
||||
isTag = true;
|
||||
name = name;
|
||||
val = tag.${name};
|
||||
errmsg = null;
|
||||
}
|
||||
else {
|
||||
isTag = false;
|
||||
errmsg =
|
||||
( "match: an instance of a sum is an attrset "
|
||||
+ "with exactly one element, yours had ${toString len}"
|
||||
+ ", namely: ${lib.generators.toPretty {} cases}" );
|
||||
("match: an instance of a sum is an attrset "
|
||||
+ "with exactly one element, yours had ${toString len}"
|
||||
+ ", namely: ${lib.generators.toPretty {} cases}");
|
||||
name = null;
|
||||
val = null;
|
||||
};
|
||||
|
@ -63,21 +65,22 @@ let
|
|||
# ] 1
|
||||
# => { smol = 1; }
|
||||
discrDef = defTag: fs: v:
|
||||
let res = lib.findFirst
|
||||
(t: t.val v)
|
||||
null
|
||||
(map assertIsTag fs);
|
||||
let
|
||||
res = lib.findFirst
|
||||
(t: t.val v)
|
||||
null
|
||||
(map assertIsTag fs);
|
||||
in
|
||||
if res == null
|
||||
then { ${defTag} = v; }
|
||||
else { ${res.name} = v; };
|
||||
if res == null
|
||||
then { ${defTag} = v; }
|
||||
else { ${res.name} = v; };
|
||||
|
||||
# Like `discrDef`, but fail if there is no match.
|
||||
discr = fs: v:
|
||||
let res = discrDef null fs v; in
|
||||
assert lib.assertMsg (res != null)
|
||||
"tag.discr: No predicate found that matches ${lib.generators.toPretty {} v}";
|
||||
res;
|
||||
assert lib.assertMsg (res != null)
|
||||
"tag.discr: No predicate found that matches ${lib.generators.toPretty {} v}";
|
||||
res;
|
||||
|
||||
# The canonical pattern matching primitive.
|
||||
# A sum value is an attribute set with one element,
|
||||
|
@ -104,17 +107,17 @@ let
|
|||
match = sum: matcher:
|
||||
let cases = builtins.attrNames sum;
|
||||
in assert
|
||||
let len = builtins.length cases; in
|
||||
lib.assertMsg (len == 1)
|
||||
( "match: an instance of a sum is an attrset "
|
||||
+ "with exactly one element, yours had ${toString len}"
|
||||
+ ", namely: ${lib.generators.toPretty {} cases}" );
|
||||
let len = builtins.length cases; in
|
||||
lib.assertMsg (len == 1)
|
||||
("match: an instance of a sum is an attrset "
|
||||
+ "with exactly one element, yours had ${toString len}"
|
||||
+ ", namely: ${lib.generators.toPretty {} cases}");
|
||||
let case = builtins.head cases;
|
||||
in assert
|
||||
lib.assertMsg (matcher ? ${case})
|
||||
( "match: \"${case}\" is not a valid case of this sum, "
|
||||
lib.assertMsg (matcher ? ${case})
|
||||
("match: \"${case}\" is not a valid case of this sum, "
|
||||
+ "the matcher accepts: ${lib.generators.toPretty {}
|
||||
(builtins.attrNames matcher)}" );
|
||||
(builtins.attrNames matcher)}");
|
||||
matcher.${case} sum.${case};
|
||||
|
||||
# A `match` with the arguments flipped.
|
||||
|
@ -148,15 +151,16 @@ let
|
|||
;
|
||||
};
|
||||
|
||||
in {
|
||||
inherit
|
||||
verifyTag
|
||||
tagName
|
||||
tagValue
|
||||
discr
|
||||
discrDef
|
||||
match
|
||||
matchLam
|
||||
tests
|
||||
;
|
||||
in
|
||||
{
|
||||
inherit
|
||||
verifyTag
|
||||
tagName
|
||||
tagValue
|
||||
discr
|
||||
discrDef
|
||||
match
|
||||
matchLam
|
||||
tests
|
||||
;
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ let
|
|||
errmsg = null;
|
||||
})
|
||||
(assertEq "is not Tag"
|
||||
(removeAttrs (verifyTag { foo = "bar"; baz = 42; }) ["errmsg"])
|
||||
(removeAttrs (verifyTag { foo = "bar"; baz = 42; }) [ "errmsg" ])
|
||||
{
|
||||
isTag = false;
|
||||
name = null;
|
||||
|
@ -41,7 +41,8 @@ let
|
|||
(discr [
|
||||
{ bool = lib.isBool; }
|
||||
{ int = lib.isInt; }
|
||||
] true)
|
||||
]
|
||||
true)
|
||||
{ bool = true; })
|
||||
(assertEq "fallback to default"
|
||||
(discrDef "def" [
|
||||
|
@ -53,19 +54,24 @@ let
|
|||
|
||||
match-test = it "can match things" [
|
||||
(assertEq "match example"
|
||||
(let
|
||||
success = { res = 42; };
|
||||
failure = { err = "no answer"; };
|
||||
matcher = {
|
||||
res = i: i + 1;
|
||||
err = _: 0;
|
||||
};
|
||||
in {
|
||||
one = match success matcher;
|
||||
two = match failure matcher;
|
||||
(
|
||||
let
|
||||
success = { res = 42; };
|
||||
failure = { err = "no answer"; };
|
||||
matcher = {
|
||||
res = i: i + 1;
|
||||
err = _: 0;
|
||||
};
|
||||
in
|
||||
{
|
||||
one = match success matcher;
|
||||
two = match failure matcher;
|
||||
}
|
||||
)
|
||||
{
|
||||
one = 43;
|
||||
two = 0;
|
||||
})
|
||||
{ one = 43;
|
||||
two = 0; })
|
||||
(assertEq "matchLam & pipe"
|
||||
(lib.pipe { foo = 42; } [
|
||||
(matchLam {
|
||||
|
@ -81,8 +87,8 @@ let
|
|||
];
|
||||
|
||||
in
|
||||
runTestsuite "tag" [
|
||||
isTag-test
|
||||
discr-test
|
||||
match-test
|
||||
]
|
||||
runTestsuite "tag" [
|
||||
isTag-test
|
||||
discr-test
|
||||
match-test
|
||||
]
|
||||
|
|
|
@ -27,4 +27,5 @@ let
|
|||
# Actual ACL entries
|
||||
ACLs = list acl;
|
||||
};
|
||||
in config: pkgs.writeText "tailscale-acl.json" (toJSON (aclConfig config))
|
||||
in
|
||||
config: pkgs.writeText "tailscale-acl.json" (toJSON (aclConfig config))
|
||||
|
|
|
@ -34,14 +34,14 @@ let
|
|||
basename = builtins.unsafeDiscardStringContext
|
||||
(builtins.baseNameOf strPath);
|
||||
in
|
||||
# If p is a direct child of storeDir, we need to remove
|
||||
# If p is a direct child of storeDir, we need to remove
|
||||
# the leading hash as well to make sure that:
|
||||
# `storePathName drv == storePathName (toString drv)`.
|
||||
if noStoreDir == basename
|
||||
then builtins.substring 33 (-1) basename
|
||||
else basename
|
||||
if noStoreDir == basename
|
||||
then builtins.substring 33 (-1) basename
|
||||
else basename
|
||||
else builtins.throw "Don't know how to get (base)name of "
|
||||
+ lib.generators.toPretty {} p;
|
||||
+ lib.generators.toPretty { } p;
|
||||
|
||||
/* Query the type of a path exposing the same information as would be by
|
||||
`builtins.readDir`, but for a single, specific target path.
|
||||
|
@ -106,7 +106,7 @@ let
|
|||
# We need to call toString to prevent unsafeDiscardStringContext
|
||||
# from importing a path into store which messes with base- and
|
||||
# dirname of course.
|
||||
path'= builtins.unsafeDiscardStringContext (toString path);
|
||||
path' = builtins.unsafeDiscardStringContext (toString path);
|
||||
# To read the containing directory we absolutely need
|
||||
# to keep the string context, otherwise a derivation
|
||||
# would not be realized before our check (at eval time)
|
||||
|
@ -120,20 +120,22 @@ let
|
|||
# directory. If not, either the target doesn't exist or is a regular file.
|
||||
# TODO(sterni): is there a way to check reliably if the symlink target exists?
|
||||
isSymlinkDir = builtins.pathExists (path' + "/.");
|
||||
in {
|
||||
in
|
||||
{
|
||||
${thisPathType} =
|
||||
/**/ if thisPathType != "symlink" then true
|
||||
else if isSymlinkDir then "directory"
|
||||
else "regular-or-missing";
|
||||
/**/
|
||||
if thisPathType != "symlink" then true
|
||||
else if isSymlinkDir then "directory"
|
||||
else "regular-or-missing";
|
||||
};
|
||||
|
||||
pathType' = path:
|
||||
let
|
||||
p = pathType path;
|
||||
in
|
||||
if p ? missing
|
||||
then builtins.throw "${lib.generators.toPretty {} path} does not exist"
|
||||
else p;
|
||||
if p ? missing
|
||||
then builtins.throw "${lib.generators.toPretty {} path} does not exist"
|
||||
else p;
|
||||
|
||||
/* Check whether the given path is a directory.
|
||||
Throws if the path in question doesn't exist.
|
||||
|
@ -151,9 +153,11 @@ let
|
|||
|
||||
Type: path(-like) -> bool
|
||||
*/
|
||||
realPathIsDirectory = path: let
|
||||
pt = pathType' path;
|
||||
in pt ? directory || pt.symlink or null == "directory";
|
||||
realPathIsDirectory = path:
|
||||
let
|
||||
pt = pathType' path;
|
||||
in
|
||||
pt ? directory || pt.symlink or null == "directory";
|
||||
|
||||
/* Check whether the given path is a regular file.
|
||||
Throws if the path in question doesn't exist.
|
||||
|
@ -169,7 +173,8 @@ let
|
|||
*/
|
||||
isSymlink = path: pathType' path ? symlink;
|
||||
|
||||
in {
|
||||
in
|
||||
{
|
||||
inherit
|
||||
storePathName
|
||||
pathType
|
||||
|
|
|
@ -26,38 +26,53 @@ let
|
|||
pathPredicates = it "judges paths correctly" (lib.flatten [
|
||||
# isDirectory
|
||||
(assertUtilsPred "directory isDirectory"
|
||||
(isDirectory ./directory) true)
|
||||
(isDirectory ./directory)
|
||||
true)
|
||||
(assertUtilsPred "symlink not isDirectory"
|
||||
(isDirectory ./symlink-directory) false)
|
||||
(isDirectory ./symlink-directory)
|
||||
false)
|
||||
(assertUtilsPred "file not isDirectory"
|
||||
(isDirectory ./directory/file) false)
|
||||
(isDirectory ./directory/file)
|
||||
false)
|
||||
# realPathIsDirectory
|
||||
(assertUtilsPred "directory realPathIsDirectory"
|
||||
(realPathIsDirectory ./directory) true)
|
||||
(realPathIsDirectory ./directory)
|
||||
true)
|
||||
(assertUtilsPred "symlink to directory realPathIsDirectory"
|
||||
(realPathIsDirectory ./symlink-directory) true)
|
||||
(realPathIsDirectory ./symlink-directory)
|
||||
true)
|
||||
(assertUtilsPred "realPathIsDirectory resolves chained symlinks"
|
||||
(realPathIsDirectory ./symlink-symlink-directory) true)
|
||||
(realPathIsDirectory ./symlink-symlink-directory)
|
||||
true)
|
||||
# isRegularFile
|
||||
(assertUtilsPred "file isRegularFile"
|
||||
(isRegularFile ./directory/file) true)
|
||||
(isRegularFile ./directory/file)
|
||||
true)
|
||||
(assertUtilsPred "symlink not isRegularFile"
|
||||
(isRegularFile ./symlink-file) false)
|
||||
(isRegularFile ./symlink-file)
|
||||
false)
|
||||
(assertUtilsPred "directory not isRegularFile"
|
||||
(isRegularFile ./directory) false)
|
||||
(isRegularFile ./directory)
|
||||
false)
|
||||
# isSymlink
|
||||
(assertUtilsPred "symlink to file isSymlink"
|
||||
(isSymlink ./symlink-file) true)
|
||||
(isSymlink ./symlink-file)
|
||||
true)
|
||||
(assertUtilsPred "symlink to directory isSymlink"
|
||||
(isSymlink ./symlink-directory) true)
|
||||
(isSymlink ./symlink-directory)
|
||||
true)
|
||||
(assertUtilsPred "symlink to symlink isSymlink"
|
||||
(isSymlink ./symlink-symlink-file) true)
|
||||
(isSymlink ./symlink-symlink-file)
|
||||
true)
|
||||
(assertUtilsPred "symlink to missing file isSymlink"
|
||||
(isSymlink ./missing) true)
|
||||
(isSymlink ./missing)
|
||||
true)
|
||||
(assertUtilsPred "directory not isSymlink"
|
||||
(isSymlink ./directory) false)
|
||||
(isSymlink ./directory)
|
||||
false)
|
||||
(assertUtilsPred "file not isSymlink"
|
||||
(isSymlink ./directory/file) false)
|
||||
(isSymlink ./directory/file)
|
||||
false)
|
||||
# missing files throw
|
||||
(assertThrows "isDirectory throws on missing file"
|
||||
(isDirectory ./does-not-exist))
|
||||
|
@ -89,15 +104,18 @@ let
|
|||
|
||||
storePathNameTests = it "correctly gets the basename of a store path" [
|
||||
(assertEq "base name of a derivation"
|
||||
(storePathName depot.tools.cheddar) depot.tools.cheddar.name)
|
||||
(storePathName depot.tools.cheddar)
|
||||
depot.tools.cheddar.name)
|
||||
(assertEq "base name of a store path string"
|
||||
(storePathName cheddarStorePath) depot.tools.cheddar.name)
|
||||
(storePathName cheddarStorePath)
|
||||
depot.tools.cheddar.name)
|
||||
(assertEq "base name of a path within a store path"
|
||||
(storePathName "${cheddarStorePath}/bin/cheddar") "cheddar")
|
||||
(assertEq "base name of a path"
|
||||
(storePathName ../default.nix) "default.nix")
|
||||
(assertEq "base name of a cleanSourced path"
|
||||
(storePathName cleanedSource) cleanedSource.name)
|
||||
(storePathName cleanedSource)
|
||||
cleanedSource.name)
|
||||
];
|
||||
in
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{ depot, pkgs, ... }:
|
||||
|
||||
{ name, src, deps ? (_: []), emacs ? pkgs.emacs27-nox }:
|
||||
{ name, src, deps ? (_: [ ]), emacs ? pkgs.emacs27-nox }:
|
||||
|
||||
let
|
||||
inherit (pkgs) emacsPackages emacsPackagesGen;
|
||||
|
@ -8,11 +8,13 @@ let
|
|||
|
||||
finalEmacs = (emacsPackagesGen emacs).emacsWithPackages deps;
|
||||
|
||||
srcFile = if isString src
|
||||
srcFile =
|
||||
if isString src
|
||||
then toFile "${name}.el" src
|
||||
else src;
|
||||
|
||||
in depot.nix.writeScriptBin name ''
|
||||
in
|
||||
depot.nix.writeScriptBin name ''
|
||||
#!/bin/sh
|
||||
${finalEmacs}/bin/emacs --batch --no-site-file --script ${srcFile} $@
|
||||
''
|
||||
|
|
|
@ -14,9 +14,10 @@ name:
|
|||
# "env": don’t substitute, set # and 0…n environment vaariables, where n=$#
|
||||
# "none": don’t substitute or set any positional arguments
|
||||
# "env-no-push": like "env", but bypass the push-phase. Not recommended.
|
||||
argMode ? "var",
|
||||
# Number of arguments to be substituted as variables (passed to "var"/"-s" or "var-full"/"-S"
|
||||
readNArgs ? 0,
|
||||
argMode ? "var"
|
||||
, # Number of arguments to be substituted as variables (passed to "var"/"-s" or "var-full"/"-S"
|
||||
readNArgs ? 0
|
||||
,
|
||||
}:
|
||||
# Nested list of lists of commands.
|
||||
# Inner lists are translated to execline blocks.
|
||||
|
@ -24,7 +25,7 @@ argList:
|
|||
|
||||
let
|
||||
env =
|
||||
if argMode == "var" then "s${toString readNArgs}"
|
||||
if argMode == "var" then "s${toString readNArgs}"
|
||||
else if argMode == "var-full" then "S${toString readNArgs}"
|
||||
else if argMode == "env" then ""
|
||||
else if argMode == "none" then "P"
|
||||
|
@ -32,7 +33,7 @@ let
|
|||
else abort ''"${toString argMode}" is not a valid argMode, use one of "var", "var-full", "env", "none", "env-no-push".'';
|
||||
|
||||
in
|
||||
depot.nix.writeScript name ''
|
||||
#!${pkgs.execline}/bin/execlineb -W${env}
|
||||
${depot.nix.escapeExecline argList}
|
||||
''
|
||||
depot.nix.writeScript name ''
|
||||
#!${pkgs.execline}/bin/execlineb -W${env}
|
||||
${depot.nix.escapeExecline argList}
|
||||
''
|
||||
|
|
|
@ -5,25 +5,31 @@
|
|||
|
||||
let
|
||||
bins = depot.nix.getBins pkgs.s6-portable-utils [
|
||||
"s6-cat"
|
||||
"s6-chmod"
|
||||
];
|
||||
"s6-cat"
|
||||
"s6-chmod"
|
||||
];
|
||||
|
||||
in
|
||||
name:
|
||||
# string of the executable script that is put in $out
|
||||
script:
|
||||
|
||||
depot.nix.runExecline name {
|
||||
depot.nix.runExecline name
|
||||
{
|
||||
stdin = script;
|
||||
derivationArgs = {
|
||||
preferLocalBuild = true;
|
||||
allowSubstitutes = false;
|
||||
};
|
||||
} [
|
||||
"importas" "out" "out"
|
||||
"importas"
|
||||
"out"
|
||||
"out"
|
||||
# this pipes stdout of s6-cat to $out
|
||||
# and s6-cat redirects from stdin to stdout
|
||||
"if" [ "redirfd" "-w" "1" "$out" bins.s6-cat ]
|
||||
bins.s6-chmod "0755" "$out"
|
||||
"if"
|
||||
[ "redirfd" "-w" "1" "$out" bins.s6-cat ]
|
||||
bins.s6-chmod
|
||||
"0755"
|
||||
"$out"
|
||||
]
|
||||
|
|
|
@ -2,62 +2,71 @@
|
|||
|
||||
let
|
||||
bins = depot.nix.getBins pkgs.s6-portable-utils [ "s6-ln" "s6-ls" "s6-touch" ]
|
||||
;
|
||||
;
|
||||
|
||||
linkTo = name: path: depot.nix.runExecline.local name {} [
|
||||
"importas" "out" "out"
|
||||
bins.s6-ln "-s" path "$out"
|
||||
linkTo = name: path: depot.nix.runExecline.local name { } [
|
||||
"importas"
|
||||
"out"
|
||||
"out"
|
||||
bins.s6-ln
|
||||
"-s"
|
||||
path
|
||||
"$out"
|
||||
];
|
||||
|
||||
# Build a rust executable, $out is the executable.
|
||||
rustSimple = args@{name, ...}: src:
|
||||
rustSimple = args@{ name, ... }: src:
|
||||
linkTo name "${rustSimpleBin args src}/bin/${name}";
|
||||
|
||||
# Like `rustSimple`, but put the binary in `$out/bin/`.
|
||||
rustSimpleBin = {
|
||||
name,
|
||||
dependencies ? [],
|
||||
doCheck ? true,
|
||||
}: src:
|
||||
rustSimpleBin =
|
||||
{ name
|
||||
, dependencies ? [ ]
|
||||
, doCheck ? true
|
||||
,
|
||||
}: src:
|
||||
(if doCheck then testRustSimple else pkgs.lib.id)
|
||||
(pkgs.buildRustCrate ({
|
||||
pname = name;
|
||||
version = "1.0.0";
|
||||
crateName = name;
|
||||
crateBin = [ name ];
|
||||
dependencies = dependencies;
|
||||
src = pkgs.runCommandLocal "write-main.rs" {
|
||||
src = src;
|
||||
passAsFile = [ "src" ];
|
||||
} ''
|
||||
mkdir -p $out/src/bin
|
||||
cp "$srcPath" $out/src/bin/${name}.rs
|
||||
find $out
|
||||
'';
|
||||
}));
|
||||
(pkgs.buildRustCrate ({
|
||||
pname = name;
|
||||
version = "1.0.0";
|
||||
crateName = name;
|
||||
crateBin = [ name ];
|
||||
dependencies = dependencies;
|
||||
src = pkgs.runCommandLocal "write-main.rs"
|
||||
{
|
||||
src = src;
|
||||
passAsFile = [ "src" ];
|
||||
} ''
|
||||
mkdir -p $out/src/bin
|
||||
cp "$srcPath" $out/src/bin/${name}.rs
|
||||
find $out
|
||||
'';
|
||||
}));
|
||||
|
||||
# Build a rust library, that can be used as dependency to `rustSimple`.
|
||||
# Wrapper around `pkgs.buildRustCrate`, takes all its arguments.
|
||||
rustSimpleLib = {
|
||||
name,
|
||||
dependencies ? [],
|
||||
doCheck ? true,
|
||||
}: src:
|
||||
rustSimpleLib =
|
||||
{ name
|
||||
, dependencies ? [ ]
|
||||
, doCheck ? true
|
||||
,
|
||||
}: src:
|
||||
(if doCheck then testRustSimple else pkgs.lib.id)
|
||||
(pkgs.buildRustCrate ({
|
||||
pname = name;
|
||||
version = "1.0.0";
|
||||
crateName = name;
|
||||
dependencies = dependencies;
|
||||
src = pkgs.runCommandLocal "write-lib.rs" {
|
||||
src = src;
|
||||
passAsFile = [ "src" ];
|
||||
} ''
|
||||
mkdir -p $out/src
|
||||
cp "$srcPath" $out/src/lib.rs
|
||||
find $out
|
||||
'';
|
||||
}));
|
||||
(pkgs.buildRustCrate ({
|
||||
pname = name;
|
||||
version = "1.0.0";
|
||||
crateName = name;
|
||||
dependencies = dependencies;
|
||||
src = pkgs.runCommandLocal "write-lib.rs"
|
||||
{
|
||||
src = src;
|
||||
passAsFile = [ "src" ];
|
||||
} ''
|
||||
mkdir -p $out/src
|
||||
cp "$srcPath" $out/src/lib.rs
|
||||
find $out
|
||||
'';
|
||||
}));
|
||||
|
||||
/* Takes a `buildRustCrate` derivation as an input,
|
||||
* builds it with `{ buildTests = true; }` and runs
|
||||
|
@ -72,19 +81,30 @@ let
|
|||
testRustSimple = rustDrv:
|
||||
let
|
||||
crate = buildTests: rustDrv.override { inherit buildTests; };
|
||||
tests = depot.nix.runExecline.local "${rustDrv.name}-tests-run" {} [
|
||||
"importas" "out" "out"
|
||||
"if" [
|
||||
"pipeline" [ bins.s6-ls "${crate true}/tests" ]
|
||||
"forstdin" "-o0" "test"
|
||||
"importas" "test" "test"
|
||||
tests = depot.nix.runExecline.local "${rustDrv.name}-tests-run" { } [
|
||||
"importas"
|
||||
"out"
|
||||
"out"
|
||||
"if"
|
||||
[
|
||||
"pipeline"
|
||||
[ bins.s6-ls "${crate true}/tests" ]
|
||||
"forstdin"
|
||||
"-o0"
|
||||
"test"
|
||||
"importas"
|
||||
"test"
|
||||
"test"
|
||||
"${crate true}/tests/$test"
|
||||
]
|
||||
bins.s6-touch "$out"
|
||||
bins.s6-touch
|
||||
"$out"
|
||||
];
|
||||
in depot.nix.drvSeqL [ tests ] (crate false);
|
||||
in
|
||||
depot.nix.drvSeqL [ tests ] (crate false);
|
||||
|
||||
in {
|
||||
in
|
||||
{
|
||||
inherit
|
||||
rustSimple
|
||||
rustSimpleBin
|
||||
|
|
|
@ -11,15 +11,20 @@ let
|
|||
coreutils
|
||||
;
|
||||
|
||||
run = drv: depot.nix.runExecline.local "run-${drv.name}" {} [
|
||||
"if" [ drv ]
|
||||
"importas" "out" "out"
|
||||
"${coreutils}/bin/touch" "$out"
|
||||
run = drv: depot.nix.runExecline.local "run-${drv.name}" { } [
|
||||
"if"
|
||||
[ drv ]
|
||||
"importas"
|
||||
"out"
|
||||
"out"
|
||||
"${coreutils}/bin/touch"
|
||||
"$out"
|
||||
];
|
||||
|
||||
rustTransitiveLib = rustSimpleLib {
|
||||
name = "transitive";
|
||||
} ''
|
||||
rustTransitiveLib = rustSimpleLib
|
||||
{
|
||||
name = "transitive";
|
||||
} ''
|
||||
pub fn transitive(s: &str) -> String {
|
||||
let mut new = s.to_string();
|
||||
new.push_str(" 1 2 3");
|
||||
|
@ -37,10 +42,11 @@ let
|
|||
}
|
||||
'';
|
||||
|
||||
rustTestLib = rustSimpleLib {
|
||||
name = "test_lib";
|
||||
dependencies = [ rustTransitiveLib ];
|
||||
} ''
|
||||
rustTestLib = rustSimpleLib
|
||||
{
|
||||
name = "test_lib";
|
||||
dependencies = [ rustTransitiveLib ];
|
||||
} ''
|
||||
extern crate transitive;
|
||||
use transitive::{transitive};
|
||||
pub fn test() -> String {
|
||||
|
@ -48,10 +54,11 @@ let
|
|||
}
|
||||
'';
|
||||
|
||||
rustWithLib = run (rustSimple {
|
||||
name = "rust-with-lib";
|
||||
dependencies = [ rustTestLib ];
|
||||
} ''
|
||||
rustWithLib = run (rustSimple
|
||||
{
|
||||
name = "rust-with-lib";
|
||||
dependencies = [ rustTestLib ];
|
||||
} ''
|
||||
extern crate test_lib;
|
||||
|
||||
fn main() {
|
||||
|
@ -60,7 +67,8 @@ let
|
|||
'');
|
||||
|
||||
|
||||
in depot.nix.readTree.drvTargets {
|
||||
in
|
||||
depot.nix.readTree.drvTargets {
|
||||
inherit
|
||||
rustTransitiveLib
|
||||
rustWithLib
|
||||
|
|
|
@ -6,10 +6,10 @@
|
|||
#
|
||||
# All types (should) compose as expected.
|
||||
|
||||
{ lib ? (import <nixpkgs> {}).lib, ... }:
|
||||
{ lib ? (import <nixpkgs> { }).lib, ... }:
|
||||
|
||||
with builtins; let
|
||||
prettyPrint = lib.generators.toPretty {};
|
||||
prettyPrint = lib.generators.toPretty { };
|
||||
|
||||
# typedef' :: struct {
|
||||
# name = string;
|
||||
|
@ -34,41 +34,44 @@ with builtins; let
|
|||
#
|
||||
# This function is the low-level primitive used to create types. For
|
||||
# many cases the higher-level 'typedef' function is more appropriate.
|
||||
typedef' = { name, checkType
|
||||
, checkToBool ? (result: result.ok)
|
||||
, toError ? (_: result: result.err)
|
||||
, def ? null
|
||||
, match ? null }: {
|
||||
inherit name checkToBool toError;
|
||||
typedef' =
|
||||
{ name
|
||||
, checkType
|
||||
, checkToBool ? (result: result.ok)
|
||||
, toError ? (_: result: result.err)
|
||||
, def ? null
|
||||
, match ? null
|
||||
}: {
|
||||
inherit name checkToBool toError;
|
||||
|
||||
# check :: a -> bool
|
||||
#
|
||||
# This function is used to determine whether a given type is
|
||||
# conformant.
|
||||
check = value: checkToBool (checkType value);
|
||||
# check :: a -> bool
|
||||
#
|
||||
# This function is used to determine whether a given type is
|
||||
# conformant.
|
||||
check = value: checkToBool (checkType value);
|
||||
|
||||
# checkType :: a -> struct { ok = bool; err = option string; }
|
||||
#
|
||||
# This function checks whether the passed value is type conformant
|
||||
# and returns an optional type error string otherwise.
|
||||
inherit checkType;
|
||||
# checkType :: a -> struct { ok = bool; err = option string; }
|
||||
#
|
||||
# This function checks whether the passed value is type conformant
|
||||
# and returns an optional type error string otherwise.
|
||||
inherit checkType;
|
||||
|
||||
# __functor :: a -> a
|
||||
#
|
||||
# This function checks whether the passed value is type conformant
|
||||
# and throws an error if it is not.
|
||||
#
|
||||
# The name of this function is a special attribute in Nix that
|
||||
# makes it possible to execute a type attribute set like a normal
|
||||
# function.
|
||||
__functor = self: value:
|
||||
let result = self.checkType value;
|
||||
in if checkToBool result then value
|
||||
else throw (toError value result);
|
||||
};
|
||||
# __functor :: a -> a
|
||||
#
|
||||
# This function checks whether the passed value is type conformant
|
||||
# and throws an error if it is not.
|
||||
#
|
||||
# The name of this function is a special attribute in Nix that
|
||||
# makes it possible to execute a type attribute set like a normal
|
||||
# function.
|
||||
__functor = self: value:
|
||||
let result = self.checkType value;
|
||||
in if checkToBool result then value
|
||||
else throw (toError value result);
|
||||
};
|
||||
|
||||
typeError = type: val:
|
||||
"expected type '${type}', but value '${prettyPrint val}' is of type '${typeOf val}'";
|
||||
"expected type '${type}', but value '${prettyPrint val}' is of type '${typeOf val}'";
|
||||
|
||||
# typedef :: string -> (a -> bool) -> type
|
||||
#
|
||||
|
@ -85,27 +88,34 @@ with builtins; let
|
|||
});
|
||||
};
|
||||
|
||||
checkEach = name: t: l: foldl' (acc: e:
|
||||
let res = t.checkType e;
|
||||
checkEach = name: t: l: foldl'
|
||||
(acc: e:
|
||||
let
|
||||
res = t.checkType e;
|
||||
isT = t.checkToBool res;
|
||||
in {
|
||||
ok = acc.ok && isT;
|
||||
err = if isT
|
||||
then acc.err
|
||||
else acc.err + "${prettyPrint e}: ${t.toError e res}\n";
|
||||
}) { ok = true; err = "expected type ${name}, but found:\n"; } l;
|
||||
in lib.fix (self: {
|
||||
in
|
||||
{
|
||||
ok = acc.ok && isT;
|
||||
err =
|
||||
if isT
|
||||
then acc.err
|
||||
else acc.err + "${prettyPrint e}: ${t.toError e res}\n";
|
||||
})
|
||||
{ ok = true; err = "expected type ${name}, but found:\n"; }
|
||||
l;
|
||||
in
|
||||
lib.fix (self: {
|
||||
# Primitive types
|
||||
any = typedef "any" (_: true);
|
||||
unit = typedef "unit" (v: v == {});
|
||||
int = typedef "int" isInt;
|
||||
bool = typedef "bool" isBool;
|
||||
float = typedef "float" isFloat;
|
||||
string = typedef "string" isString;
|
||||
path = typedef "path" (x: typeOf x == "path");
|
||||
drv = typedef "derivation" (x: isAttrs x && x ? "type" && x.type == "derivation");
|
||||
any = typedef "any" (_: true);
|
||||
unit = typedef "unit" (v: v == { });
|
||||
int = typedef "int" isInt;
|
||||
bool = typedef "bool" isBool;
|
||||
float = typedef "float" isFloat;
|
||||
string = typedef "string" isString;
|
||||
path = typedef "path" (x: typeOf x == "path");
|
||||
drv = typedef "derivation" (x: isAttrs x && x ? "type" && x.type == "derivation");
|
||||
function = typedef "function" (x: isFunction x || (isAttrs x && x ? "__functor"
|
||||
&& isFunction x.__functor));
|
||||
&& isFunction x.__functor));
|
||||
|
||||
# Type for types themselves. Useful when defining polymorphic types.
|
||||
type = typedef "type" (x:
|
||||
|
@ -124,7 +134,7 @@ in lib.fix (self: {
|
|||
in {
|
||||
ok = isNull v || (self.type t).checkToBool res;
|
||||
err = "expected type ${name}, but value does not conform to '${t.name}': "
|
||||
+ t.toError v res;
|
||||
+ t.toError v res;
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -136,7 +146,8 @@ in lib.fix (self: {
|
|||
list = t: typedef' rec {
|
||||
name = "list<${t.name}>";
|
||||
|
||||
checkType = v: if isList v
|
||||
checkType = v:
|
||||
if isList v
|
||||
then checkEach name (self.type t) v
|
||||
else {
|
||||
ok = false;
|
||||
|
@ -147,7 +158,8 @@ in lib.fix (self: {
|
|||
attrs = t: typedef' rec {
|
||||
name = "attrs<${t.name}>";
|
||||
|
||||
checkType = v: if isAttrs v
|
||||
checkType = v:
|
||||
if isAttrs v
|
||||
then checkEach name (self.type t) (attrValues v)
|
||||
else {
|
||||
ok = false;
|
||||
|
@ -172,20 +184,23 @@ in lib.fix (self: {
|
|||
# checkField checks an individual field of the struct against
|
||||
# its definition and creates a typecheck result. These results
|
||||
# are aggregated during the actual checking.
|
||||
checkField = def: name: value: let result = def.checkType value; in rec {
|
||||
ok = def.checkToBool result;
|
||||
err = if !ok && isNull value
|
||||
then "missing required ${def.name} field '${name}'\n"
|
||||
else "field '${name}': ${def.toError value result}\n";
|
||||
};
|
||||
checkField = def: name: value:
|
||||
let result = def.checkType value; in rec {
|
||||
ok = def.checkToBool result;
|
||||
err =
|
||||
if !ok && isNull value
|
||||
then "missing required ${def.name} field '${name}'\n"
|
||||
else "field '${name}': ${def.toError value result}\n";
|
||||
};
|
||||
|
||||
# checkExtraneous determines whether a (closed) struct contains
|
||||
# any fields that are not part of the definition.
|
||||
checkExtraneous = def: has: acc:
|
||||
if (length has) == 0 then acc
|
||||
else if (hasAttr (head has) def)
|
||||
then checkExtraneous def (tail has) acc
|
||||
else checkExtraneous def (tail has) {
|
||||
then checkExtraneous def (tail has) acc
|
||||
else
|
||||
checkExtraneous def (tail has) {
|
||||
ok = false;
|
||||
err = acc.err + "unexpected struct field '${head has}'\n";
|
||||
};
|
||||
|
@ -197,85 +212,102 @@ in lib.fix (self: {
|
|||
init = { ok = true; err = ""; };
|
||||
extraneous = checkExtraneous def (attrNames value) init;
|
||||
|
||||
checkedFields = map (n:
|
||||
let v = if hasAttr n value then value."${n}" else null;
|
||||
in checkField def."${n}" n v) (attrNames def);
|
||||
checkedFields = map
|
||||
(n:
|
||||
let v = if hasAttr n value then value."${n}" else null;
|
||||
in checkField def."${n}" n v)
|
||||
(attrNames def);
|
||||
|
||||
combined = foldl' (acc: res: {
|
||||
ok = acc.ok && res.ok;
|
||||
err = if !res.ok then acc.err + res.err else acc.err;
|
||||
}) init checkedFields;
|
||||
in {
|
||||
combined = foldl'
|
||||
(acc: res: {
|
||||
ok = acc.ok && res.ok;
|
||||
err = if !res.ok then acc.err + res.err else acc.err;
|
||||
})
|
||||
init
|
||||
checkedFields;
|
||||
in
|
||||
{
|
||||
ok = combined.ok && extraneous.ok;
|
||||
err = combined.err + extraneous.err;
|
||||
};
|
||||
|
||||
struct' = name: def: typedef' {
|
||||
inherit name def;
|
||||
checkType = value: if isAttrs value
|
||||
checkType = value:
|
||||
if isAttrs value
|
||||
then (checkStruct (self.attrs self.type def) value)
|
||||
else { ok = false; err = typeError name value; };
|
||||
|
||||
toError = _: result: "expected '${name}'-struct, but found:\n" + result.err;
|
||||
toError = _: result: "expected '${name}'-struct, but found:\n" + result.err;
|
||||
};
|
||||
in arg: if isString arg then (struct' arg) else (struct' "anon" arg);
|
||||
in
|
||||
arg: if isString arg then (struct' arg) else (struct' "anon" arg);
|
||||
|
||||
# Enums & pattern matching
|
||||
enum =
|
||||
let
|
||||
plain = name: def: typedef' {
|
||||
inherit name def;
|
||||
let
|
||||
plain = name: def: typedef' {
|
||||
inherit name def;
|
||||
|
||||
checkType = (x: isString x && elem x def);
|
||||
checkToBool = x: x;
|
||||
toError = value: _: "'${prettyPrint value} is not a member of enum ${name}";
|
||||
};
|
||||
enum' = name: def: lib.fix (e: (plain name def) // {
|
||||
match = x: actions: deepSeq (map e (attrNames actions)) (
|
||||
let
|
||||
actionKeys = attrNames actions;
|
||||
missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [] def;
|
||||
in if (length missing) > 0
|
||||
then throw "Missing match action for members: ${prettyPrint missing}"
|
||||
else actions."${e x}");
|
||||
});
|
||||
in arg: if isString arg then (enum' arg) else (enum' "anon" arg);
|
||||
checkType = (x: isString x && elem x def);
|
||||
checkToBool = x: x;
|
||||
toError = value: _: "'${prettyPrint value} is not a member of enum ${name}";
|
||||
};
|
||||
enum' = name: def: lib.fix (e: (plain name def) // {
|
||||
match = x: actions: deepSeq (map e (attrNames actions)) (
|
||||
let
|
||||
actionKeys = attrNames actions;
|
||||
missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [ ] def;
|
||||
in
|
||||
if (length missing) > 0
|
||||
then throw "Missing match action for members: ${prettyPrint missing}"
|
||||
else actions."${e x}"
|
||||
);
|
||||
});
|
||||
in
|
||||
arg: if isString arg then (enum' arg) else (enum' "anon" arg);
|
||||
|
||||
# Sum types
|
||||
#
|
||||
# The representation of a sum type is an attribute set with only one
|
||||
# value, where the key of the value denotes the variant of the type.
|
||||
sum =
|
||||
let
|
||||
plain = name: def: typedef' {
|
||||
inherit name def;
|
||||
checkType = (x:
|
||||
let variant = elemAt (attrNames x) 0;
|
||||
in if isAttrs x && length (attrNames x) == 1 && hasAttr variant def
|
||||
then let t = def."${variant}";
|
||||
v = x."${variant}";
|
||||
res = t.checkType v;
|
||||
in if t.checkToBool res
|
||||
then { ok = true; }
|
||||
else {
|
||||
ok = false;
|
||||
err = "while checking '${name}' variant '${variant}': "
|
||||
+ t.toError v res;
|
||||
}
|
||||
let
|
||||
plain = name: def: typedef' {
|
||||
inherit name def;
|
||||
checkType = (x:
|
||||
let variant = elemAt (attrNames x) 0;
|
||||
in if isAttrs x && length (attrNames x) == 1 && hasAttr variant def
|
||||
then
|
||||
let
|
||||
t = def."${variant}";
|
||||
v = x."${variant}";
|
||||
res = t.checkType v;
|
||||
in
|
||||
if t.checkToBool res
|
||||
then { ok = true; }
|
||||
else {
|
||||
ok = false;
|
||||
err = "while checking '${name}' variant '${variant}': "
|
||||
+ t.toError v res;
|
||||
}
|
||||
else { ok = false; err = typeError name x; }
|
||||
);
|
||||
};
|
||||
sum' = name: def: lib.fix (s: (plain name def) // {
|
||||
match = x: actions:
|
||||
let variant = deepSeq (s x) (elemAt (attrNames x) 0);
|
||||
actionKeys = attrNames actions;
|
||||
defKeys = attrNames def;
|
||||
missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [] defKeys;
|
||||
in if (length missing) > 0
|
||||
then throw "Missing match action for variants: ${prettyPrint missing}"
|
||||
else actions."${variant}" x."${variant}";
|
||||
});
|
||||
in arg: if isString arg then (sum' arg) else (sum' "anon" arg);
|
||||
);
|
||||
};
|
||||
sum' = name: def: lib.fix (s: (plain name def) // {
|
||||
match = x: actions:
|
||||
let
|
||||
variant = deepSeq (s x) (elemAt (attrNames x) 0);
|
||||
actionKeys = attrNames actions;
|
||||
defKeys = attrNames def;
|
||||
missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [ ] defKeys;
|
||||
in
|
||||
if (length missing) > 0
|
||||
then throw "Missing match action for variants: ${prettyPrint missing}"
|
||||
else actions."${variant}" x."${variant}";
|
||||
});
|
||||
in
|
||||
arg: if isString arg then (sum' arg) else (sum' "anon" arg);
|
||||
|
||||
# Typed function definitions
|
||||
#
|
||||
|
@ -289,15 +321,19 @@ in lib.fix (self: {
|
|||
mkFunc = sig: f: {
|
||||
inherit sig;
|
||||
__toString = self: foldl' (s: t: "${s} -> ${t.name}")
|
||||
"λ :: ${(head self.sig).name}" (tail self.sig);
|
||||
"λ :: ${(head self.sig).name}"
|
||||
(tail self.sig);
|
||||
__functor = _: f;
|
||||
};
|
||||
|
||||
defun' = sig: func: if length sig > 2
|
||||
defun' = sig: func:
|
||||
if length sig > 2
|
||||
then mkFunc sig (x: defun' (tail sig) (func ((head sig) x)))
|
||||
else mkFunc sig (x: ((head (tail sig)) (func ((head sig) x))));
|
||||
|
||||
in sig: func: if length sig < 2
|
||||
in
|
||||
sig: func:
|
||||
if length sig < 2
|
||||
then (throw "Signature must at least have two types (a -> b)")
|
||||
else defun' sig func;
|
||||
|
||||
|
@ -311,21 +347,22 @@ in lib.fix (self: {
|
|||
# depend on the value being of the wrapped type.
|
||||
restrict = name: pred: t:
|
||||
let restriction = "${t.name}[${name}]"; in typedef' {
|
||||
name = restriction;
|
||||
checkType = v:
|
||||
let res = t.checkType v;
|
||||
in
|
||||
name = restriction;
|
||||
checkType = v:
|
||||
let res = t.checkType v;
|
||||
in
|
||||
if !(t.checkToBool res)
|
||||
then res
|
||||
else
|
||||
let
|
||||
iok = pred v;
|
||||
in if isBool iok then {
|
||||
in
|
||||
if isBool iok then {
|
||||
ok = iok;
|
||||
err = "${prettyPrint v} does not conform to restriction '${restriction}'";
|
||||
} else
|
||||
# use throw here to avoid spamming the build log
|
||||
# use throw here to avoid spamming the build log
|
||||
throw "restriction '${restriction}' predicate returned unexpected value '${prettyPrint iok}' instead of boolean";
|
||||
};
|
||||
};
|
||||
|
||||
})
|
||||
|
|
|
@ -25,7 +25,7 @@ let
|
|||
};
|
||||
|
||||
testPrimitives = it "checks that all primitive types match" [
|
||||
(assertDoesNotThrow "unit type" (unit {}))
|
||||
(assertDoesNotThrow "unit type" (unit { }))
|
||||
(assertDoesNotThrow "int type" (int 15))
|
||||
(assertDoesNotThrow "bool type" (bool false))
|
||||
(assertDoesNotThrow "float type" (float 13.37))
|
||||
|
@ -44,7 +44,7 @@ let
|
|||
# Test that structures work as planned.
|
||||
person = struct "person" {
|
||||
name = string;
|
||||
age = int;
|
||||
age = int;
|
||||
|
||||
contact = option (struct {
|
||||
email = string;
|
||||
|
@ -55,7 +55,7 @@ let
|
|||
testStruct = it "checks that structures work as intended" [
|
||||
(assertDoesNotThrow "person struct" (person {
|
||||
name = "Brynhjulf";
|
||||
age = 42;
|
||||
age = 42;
|
||||
contact.email = "brynhjulf@yants.nix";
|
||||
}))
|
||||
];
|
||||
|
@ -70,7 +70,8 @@ let
|
|||
|
||||
testEnum = it "checks enum definitions and matching" [
|
||||
(assertEq "enum is matched correctly"
|
||||
"It is in fact red!" (colour.match "red" colourMatcher))
|
||||
"It is in fact red!"
|
||||
(colour.match "red" colourMatcher))
|
||||
(assertThrows "out of bounds enum fails"
|
||||
(colour.match "alpha" (colourMatcher // {
|
||||
alpha = "This should never happen";
|
||||
|
@ -97,7 +98,8 @@ let
|
|||
testSum = it "checks sum types definitions and matching" [
|
||||
(assertDoesNotThrow "creature sum type" some-human)
|
||||
(assertEq "sum type is matched correctly"
|
||||
"It's a human named Brynhjulf" (creature.match some-human {
|
||||
"It's a human named Brynhjulf"
|
||||
(creature.match some-human {
|
||||
human = v: "It's a human named ${v.name}";
|
||||
pet = v: "It's not supposed to be a pet!";
|
||||
})
|
||||
|
@ -106,7 +108,7 @@ let
|
|||
|
||||
# Test curried function definitions
|
||||
func = defun [ string int string ]
|
||||
(name: age: "${name} is ${toString age} years old");
|
||||
(name: age: "${name} is ${toString age} years old");
|
||||
|
||||
testFunctions = it "checks function definitions" [
|
||||
(assertDoesNotThrow "function application" (func "Brynhjulf" 42))
|
||||
|
@ -144,13 +146,13 @@ let
|
|||
];
|
||||
|
||||
in
|
||||
runTestsuite "yants" [
|
||||
testPrimitives
|
||||
testPoly
|
||||
testStruct
|
||||
testEnum
|
||||
testSum
|
||||
testFunctions
|
||||
testTypes
|
||||
testRestrict
|
||||
]
|
||||
runTestsuite "yants" [
|
||||
testPrimitives
|
||||
testPoly
|
||||
testStruct
|
||||
testEnum
|
||||
testSum
|
||||
testFunctions
|
||||
testTypes
|
||||
testRestrict
|
||||
]
|
||||
|
|
|
@ -2,11 +2,12 @@
|
|||
{ depot, pkgs, ... }:
|
||||
|
||||
let
|
||||
checkZone = zone: file: pkgs.runCommandNoCC "${zone}-check" {} ''
|
||||
checkZone = zone: file: pkgs.runCommandNoCC "${zone}-check" { } ''
|
||||
${pkgs.bind}/bin/named-checkzone -i local ${zone} ${file} | tee $out
|
||||
'';
|
||||
|
||||
in depot.nix.readTree.drvTargets {
|
||||
in
|
||||
depot.nix.readTree.drvTargets {
|
||||
nixery-dev = checkZone "nixery.dev" ./nixery.dev.zone;
|
||||
tvl-fyi = checkZone "tvl.fyi" ./tvl.fyi.zone;
|
||||
tvl-su = checkZone "tvl.su" ./tvl.su.zone;
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
depot.nix.readTree.drvTargets {
|
||||
# Provide a Terraform wrapper with the right provider installed.
|
||||
terraform = pkgs.terraform.withPlugins(_: [
|
||||
terraform = pkgs.terraform.withPlugins (_: [
|
||||
depot.third_party.terraform-provider-glesys
|
||||
]);
|
||||
}
|
||||
|
|
|
@ -4,6 +4,8 @@ depot.third_party.naersk.buildPackage {
|
|||
src = ./.;
|
||||
|
||||
buildInputs = with pkgs; [
|
||||
pkgconfig openssl systemd.dev
|
||||
pkgconfig
|
||||
openssl
|
||||
systemd.dev
|
||||
];
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
depot.nix.readTree.drvTargets {
|
||||
# Provide a Terraform wrapper with the right provider installed.
|
||||
terraform = pkgs.terraform.withPlugins(p: [
|
||||
terraform = pkgs.terraform.withPlugins (p: [
|
||||
p.keycloak
|
||||
]);
|
||||
}
|
||||
|
|
|
@ -10,13 +10,17 @@
|
|||
# This file is the Nix derivation used to build release binaries for
|
||||
# several different architectures and operating systems.
|
||||
|
||||
let pkgs = import ((import <nixpkgs> {}).fetchFromGitHub {
|
||||
owner = "NixOS";
|
||||
repo = "nixpkgs-channels";
|
||||
rev = "541d9cce8af7a490fb9085305939569567cb58e6";
|
||||
sha256 = "0jgz72hhzkd5vyq5v69vpljjlnf0lqaz7fh327bvb3cvmwbfxrja";
|
||||
}) {};
|
||||
in with pkgs; buildGoPackage rec {
|
||||
let
|
||||
pkgs = import
|
||||
((import <nixpkgs> { }).fetchFromGitHub {
|
||||
owner = "NixOS";
|
||||
repo = "nixpkgs-channels";
|
||||
rev = "541d9cce8af7a490fb9085305939569567cb58e6";
|
||||
sha256 = "0jgz72hhzkd5vyq5v69vpljjlnf0lqaz7fh327bvb3cvmwbfxrja";
|
||||
})
|
||||
{ };
|
||||
in
|
||||
with pkgs; buildGoPackage rec {
|
||||
name = "kontemplate-${version}";
|
||||
version = "canon";
|
||||
src = ./.;
|
||||
|
@ -29,8 +33,8 @@ in with pkgs; buildGoPackage rec {
|
|||
# reason for setting the 'allowGoReference' flag.
|
||||
dontStrip = true; # Linker configuration handles stripping
|
||||
allowGoReference = true;
|
||||
CGO_ENABLED="0";
|
||||
GOCACHE="off";
|
||||
CGO_ENABLED = "0";
|
||||
GOCACHE = "off";
|
||||
|
||||
# Configure release builds via the "build-matrix" script:
|
||||
buildInputs = [ git ];
|
||||
|
|
|
@ -4,7 +4,8 @@
|
|||
let
|
||||
inherit (builtins) listToAttrs;
|
||||
inherit (lib) range;
|
||||
in {
|
||||
in
|
||||
{
|
||||
imports = [
|
||||
"${depot.path}/ops/modules/atward.nix"
|
||||
"${depot.path}/ops/modules/clbot.nix"
|
||||
|
@ -55,7 +56,13 @@ in {
|
|||
|
||||
initrd = {
|
||||
availableKernelModules = [
|
||||
"igb" "xhci_pci" "nvme" "ahci" "usbhid" "usb_storage" "sr_mod"
|
||||
"igb"
|
||||
"xhci_pci"
|
||||
"nvme"
|
||||
"ahci"
|
||||
"usbhid"
|
||||
"usb_storage"
|
||||
"sr_mod"
|
||||
];
|
||||
|
||||
# Enable SSH in the initrd so that we can enter disk encryption
|
||||
|
@ -189,7 +196,7 @@ in {
|
|||
++ lukegb.keys.all
|
||||
++ [ grfn.keys.whitby ]
|
||||
++ sterni.keys.all
|
||||
;
|
||||
;
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -205,7 +212,8 @@ in {
|
|||
age.secrets =
|
||||
let
|
||||
secretFile = name: depot.ops.secrets."${name}.age";
|
||||
in {
|
||||
in
|
||||
{
|
||||
clbot.file = secretFile "clbot";
|
||||
gerrit-queue.file = secretFile "gerrit-queue";
|
||||
grafana.file = secretFile "grafana";
|
||||
|
@ -509,15 +517,16 @@ in {
|
|||
job_name = "node";
|
||||
scrape_interval = "5s";
|
||||
static_configs = [{
|
||||
targets = ["localhost:${toString config.services.prometheus.exporters.node.port}"];
|
||||
targets = [ "localhost:${toString config.services.prometheus.exporters.node.port}" ];
|
||||
}];
|
||||
} {
|
||||
job_name = "nginx";
|
||||
scrape_interval = "5s";
|
||||
static_configs = [{
|
||||
targets = ["localhost:${toString config.services.prometheus.exporters.nginx.port}"];
|
||||
}
|
||||
{
|
||||
job_name = "nginx";
|
||||
scrape_interval = "5s";
|
||||
static_configs = [{
|
||||
targets = [ "localhost:${toString config.services.prometheus.exporters.nginx.port}" ];
|
||||
}];
|
||||
}];
|
||||
}];
|
||||
};
|
||||
|
||||
services.grafana = {
|
||||
|
@ -526,58 +535,62 @@ in {
|
|||
domain = "status.tvl.su";
|
||||
rootUrl = "https://status.tvl.su";
|
||||
analytics.reporting.enable = false;
|
||||
extraOptions = let
|
||||
options = {
|
||||
auth = {
|
||||
generic_oauth = {
|
||||
enabled = true;
|
||||
client_id = "grafana";
|
||||
scopes = "openid profile email";
|
||||
name = "TVL";
|
||||
email_attribute_path = "mail";
|
||||
login_attribute_path = "sub";
|
||||
name_attribute_path = "displayName";
|
||||
auth_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/auth";
|
||||
token_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/token";
|
||||
api_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/userinfo";
|
||||
extraOptions =
|
||||
let
|
||||
options = {
|
||||
auth = {
|
||||
generic_oauth = {
|
||||
enabled = true;
|
||||
client_id = "grafana";
|
||||
scopes = "openid profile email";
|
||||
name = "TVL";
|
||||
email_attribute_path = "mail";
|
||||
login_attribute_path = "sub";
|
||||
name_attribute_path = "displayName";
|
||||
auth_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/auth";
|
||||
token_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/token";
|
||||
api_url = "https://auth.tvl.fyi/auth/realms/TVL/protocol/openid-connect/userinfo";
|
||||
|
||||
# Give lukegb, grfn, tazjin "Admin" rights.
|
||||
role_attribute_path = "((sub == 'lukegb' || sub == 'grfn' || sub == 'tazjin') && 'Admin') || 'Editor'";
|
||||
# Give lukegb, grfn, tazjin "Admin" rights.
|
||||
role_attribute_path = "((sub == 'lukegb' || sub == 'grfn' || sub == 'tazjin') && 'Admin') || 'Editor'";
|
||||
|
||||
# Allow creating new Grafana accounts from OAuth accounts.
|
||||
allow_sign_up = true;
|
||||
# Allow creating new Grafana accounts from OAuth accounts.
|
||||
allow_sign_up = true;
|
||||
};
|
||||
|
||||
anonymous = {
|
||||
enabled = true;
|
||||
org_name = "The Virus Lounge";
|
||||
org_role = "Viewer";
|
||||
};
|
||||
|
||||
basic.enabled = false;
|
||||
oauth_auto_login = true;
|
||||
disable_login_form = true;
|
||||
};
|
||||
|
||||
anonymous = {
|
||||
enabled = true;
|
||||
org_name = "The Virus Lounge";
|
||||
org_role = "Viewer";
|
||||
};
|
||||
|
||||
basic.enabled = false;
|
||||
oauth_auto_login = true;
|
||||
disable_login_form = true;
|
||||
};
|
||||
};
|
||||
inherit (builtins) typeOf replaceStrings listToAttrs concatLists;
|
||||
inherit (lib) toUpper mapAttrsToList nameValuePair concatStringsSep;
|
||||
inherit (builtins) typeOf replaceStrings listToAttrs concatLists;
|
||||
inherit (lib) toUpper mapAttrsToList nameValuePair concatStringsSep;
|
||||
|
||||
# Take ["auth" "generic_oauth" "enabled"] and turn it into OPTIONS_GENERIC_OAUTH_ENABLED.
|
||||
encodeName = raw: replaceStrings ["."] ["_"] (toUpper (concatStringsSep "_" raw));
|
||||
# Take ["auth" "generic_oauth" "enabled"] and turn it into OPTIONS_GENERIC_OAUTH_ENABLED.
|
||||
encodeName = raw: replaceStrings [ "." ] [ "_" ] (toUpper (concatStringsSep "_" raw));
|
||||
|
||||
# Turn an option value into a string, but we want bools to be sensible strings and not "1" or "".
|
||||
optionToString = value:
|
||||
if (typeOf value) == "bool" then
|
||||
if value then "true" else "false"
|
||||
else builtins.toString value;
|
||||
# Turn an option value into a string, but we want bools to be sensible strings and not "1" or "".
|
||||
optionToString = value:
|
||||
if (typeOf value) == "bool" then
|
||||
if value then "true" else "false"
|
||||
else builtins.toString value;
|
||||
|
||||
# Turn an nested options attrset into a flat listToAttrs-compatible list.
|
||||
encodeOptions = prefix: inp: concatLists (mapAttrsToList (name: value:
|
||||
if (typeOf value) == "set"
|
||||
then encodeOptions (prefix ++ [name]) value
|
||||
else [ (nameValuePair (encodeName (prefix ++ [name])) (optionToString value)) ]
|
||||
) inp);
|
||||
in listToAttrs (encodeOptions [] options);
|
||||
# Turn an nested options attrset into a flat listToAttrs-compatible list.
|
||||
encodeOptions = prefix: inp: concatLists (mapAttrsToList
|
||||
(name: value:
|
||||
if (typeOf value) == "set"
|
||||
then encodeOptions (prefix ++ [ name ]) value
|
||||
else [ (nameValuePair (encodeName (prefix ++ [ name ])) (optionToString value)) ]
|
||||
)
|
||||
inp);
|
||||
in
|
||||
listToAttrs (encodeOptions [ ] options);
|
||||
|
||||
provision = {
|
||||
enable = true;
|
||||
|
@ -623,8 +636,8 @@ in {
|
|||
|
||||
security.sudo.extraRules = [
|
||||
{
|
||||
groups = ["wheel"];
|
||||
commands = [{ command = "ALL"; options = ["NOPASSWD"]; }];
|
||||
groups = [ "wheel" ];
|
||||
commands = [{ command = "ALL"; options = [ "NOPASSWD" ]; }];
|
||||
}
|
||||
];
|
||||
|
||||
|
@ -705,7 +718,7 @@ in {
|
|||
};
|
||||
|
||||
# Set up a user & group for git shenanigans
|
||||
groups.git = {};
|
||||
groups.git = { };
|
||||
users.git = {
|
||||
group = "git";
|
||||
isSystemUser = true;
|
||||
|
|
|
@ -3,7 +3,8 @@
|
|||
let
|
||||
cfg = config.services.depot.atward;
|
||||
description = "atward - (attempt to) cleverly route queries";
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.atward = {
|
||||
enable = lib.mkEnableOption description;
|
||||
|
||||
|
|
|
@ -45,7 +45,8 @@ let
|
|||
# NixOS in $STATE_DIRECTORY
|
||||
(cd / && ${rebuild-system}/bin/rebuild-system)
|
||||
'';
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.auto-deploy = {
|
||||
enable = lib.mkEnableOption description;
|
||||
|
||||
|
|
|
@ -29,7 +29,8 @@ let
|
|||
echo "Skipping GC, enough space available"
|
||||
fi
|
||||
'';
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.automatic-gc = {
|
||||
enable = lib.mkEnableOption description;
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ let
|
|||
(attrValues (mapAttrs (key: value: "-${key} \"${toString value}\"") flags));
|
||||
|
||||
# Escapes a unit name for use in systemd
|
||||
systemdEscape = name: removeSuffix "\n" (readFile (runCommandNoCC "unit-name" {} ''
|
||||
systemdEscape = name: removeSuffix "\n" (readFile (runCommandNoCC "unit-name" { } ''
|
||||
${pkgs.systemd}/bin/systemd-escape '${name}' >> $out
|
||||
''));
|
||||
|
||||
|
@ -42,7 +42,8 @@ let
|
|||
};
|
||||
};
|
||||
};
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.clbot = {
|
||||
enable = mkEnableOption description;
|
||||
|
||||
|
@ -68,7 +69,7 @@ in {
|
|||
# (notably the SSH private key) readable by this user outside of
|
||||
# the module.
|
||||
users = {
|
||||
groups.clbot = {};
|
||||
groups.clbot = { };
|
||||
|
||||
users.clbot = {
|
||||
group = "clbot";
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
# Make readTree happy at this level.
|
||||
_: {}
|
||||
_: { }
|
||||
|
|
|
@ -8,7 +8,8 @@ let
|
|||
inherit default;
|
||||
type = lib.types.str;
|
||||
};
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.gerrit-queue = {
|
||||
enable = lib.mkEnableOption description;
|
||||
gerritUrl = mkStringOption "https://cl.tvl.fyi";
|
||||
|
|
|
@ -12,7 +12,8 @@
|
|||
|
||||
let
|
||||
cfg = config.services.depot.git-serving;
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.git-serving = with lib; {
|
||||
enable = mkEnableOption "Enable cgit & josh configuration";
|
||||
|
||||
|
|
|
@ -27,7 +27,8 @@ let
|
|||
|
||||
exec ${depot.third_party.irccat}/bin/irccat
|
||||
'';
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.irccat = {
|
||||
enable = lib.mkEnableOption description;
|
||||
|
||||
|
|
|
@ -9,12 +9,13 @@ let
|
|||
exec -a ${name} ${depot.ops.besadii}/bin/besadii "$@"
|
||||
'';
|
||||
|
||||
gerritHooks = pkgs.runCommandNoCC "gerrit-hooks" {} ''
|
||||
gerritHooks = pkgs.runCommandNoCC "gerrit-hooks" { } ''
|
||||
mkdir -p $out
|
||||
ln -s ${besadiiWithConfig "change-merged"} $out/change-merged
|
||||
ln -s ${besadiiWithConfig "patchset-created"} $out/patchset-created
|
||||
'';
|
||||
in {
|
||||
in
|
||||
{
|
||||
services.gerrit = {
|
||||
enable = true;
|
||||
listenAddress = "[::]:4778"; # 4778 - grrt
|
||||
|
|
|
@ -6,7 +6,8 @@ let
|
|||
cfg = config.services.depot.nixery;
|
||||
description = "Nixery - container images on-demand";
|
||||
storagePath = "/var/lib/nixery/${pkgs.nixpkgsCommits.unstable}";
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.nixery = {
|
||||
enable = lib.mkEnableOption description;
|
||||
|
||||
|
|
|
@ -19,7 +19,8 @@ let
|
|||
reverse_proxy = true
|
||||
set_xauthrequest = true
|
||||
'';
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.oauth2_proxy = {
|
||||
enable = lib.mkEnableOption description;
|
||||
|
||||
|
|
|
@ -4,7 +4,8 @@
|
|||
let
|
||||
cfg = config.services.depot.owothia;
|
||||
description = "owothia - i'm a service owo";
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.owothia = {
|
||||
enable = lib.mkEnableOption description;
|
||||
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
|
||||
let
|
||||
cfg = config.services.depot.panettone;
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.panettone = with lib; {
|
||||
enable = mkEnableOption "Panettone issue tracker";
|
||||
|
||||
|
@ -62,23 +63,26 @@ in {
|
|||
assertion =
|
||||
cfg.dbHost != "localhost" || config.services.postgresql.enable;
|
||||
message = "Panettone requires a postgresql database";
|
||||
} {
|
||||
assertion =
|
||||
cfg.dbHost != "localhost" || config.services.postgresql.enableTCPIP;
|
||||
message = "Panettone can only connect to the postgresql database over TCP";
|
||||
} {
|
||||
assertion =
|
||||
cfg.dbHost != "localhost" || (lib.any
|
||||
(user: user.name == cfg.dbUser)
|
||||
config.services.postgresql.ensureUsers);
|
||||
message = "Panettone requires a database user";
|
||||
} {
|
||||
assertion =
|
||||
cfg.dbHost != "localhost" || (lib.any
|
||||
(db: db == cfg.dbName)
|
||||
config.services.postgresql.ensureDatabases);
|
||||
message = "Panettone requires a database";
|
||||
}];
|
||||
}
|
||||
{
|
||||
assertion =
|
||||
cfg.dbHost != "localhost" || config.services.postgresql.enableTCPIP;
|
||||
message = "Panettone can only connect to the postgresql database over TCP";
|
||||
}
|
||||
{
|
||||
assertion =
|
||||
cfg.dbHost != "localhost" || (lib.any
|
||||
(user: user.name == cfg.dbUser)
|
||||
config.services.postgresql.ensureUsers);
|
||||
message = "Panettone requires a database user";
|
||||
}
|
||||
{
|
||||
assertion =
|
||||
cfg.dbHost != "localhost" || (lib.any
|
||||
(db: db == cfg.dbName)
|
||||
config.services.postgresql.ensureDatabases);
|
||||
message = "Panettone requires a database";
|
||||
}];
|
||||
|
||||
systemd.services.panettone = {
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
|
|
|
@ -3,7 +3,8 @@
|
|||
let
|
||||
cfg = config.services.depot.paroxysm;
|
||||
description = "TVL's majestic IRC bot";
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.paroxysm.enable = lib.mkEnableOption description;
|
||||
|
||||
config = lib.mkIf cfg.enable {
|
||||
|
|
|
@ -8,7 +8,8 @@ let
|
|||
enableDaemon = true;
|
||||
withKDE = false;
|
||||
};
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.quassel = with lib; {
|
||||
enable = mkEnableOption "Quassel IRC daemon";
|
||||
|
||||
|
@ -70,7 +71,7 @@ in {
|
|||
group = "quassel";
|
||||
};
|
||||
|
||||
groups.quassel = {};
|
||||
groups.quassel = { };
|
||||
};
|
||||
};
|
||||
}
|
||||
|
|
|
@ -14,7 +14,8 @@ let
|
|||
inherit default;
|
||||
type = lib.types.str;
|
||||
};
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.restic = {
|
||||
enable = lib.mkEnableOption description;
|
||||
bucketEndpoint = mkStringOption "objects.dc-sto1.glesys.net";
|
||||
|
|
|
@ -27,8 +27,9 @@ let
|
|||
prepareArgs = args:
|
||||
concatStringsSep " "
|
||||
(attrValues (mapAttrs (key: value: "-${key} \"${toString value}\"")
|
||||
(args // overrideArgs)));
|
||||
in {
|
||||
(args // overrideArgs)));
|
||||
in
|
||||
{
|
||||
options.services.depot.smtprelay = {
|
||||
enable = mkEnableOption description;
|
||||
|
||||
|
|
|
@ -4,7 +4,8 @@
|
|||
|
||||
let
|
||||
cfg = config.services.depot.sourcegraph;
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.sourcegraph = with lib; {
|
||||
enable = mkEnableOption "SourceGraph code search engine";
|
||||
|
||||
|
@ -51,7 +52,8 @@ in {
|
|||
# Sourcegraph needs a higher nofile limit, it logs warnings
|
||||
# otherwise (unclear whether it actually affects the service).
|
||||
extraOptions = [
|
||||
"--ulimit" "nofile=10000:10000"
|
||||
"--ulimit"
|
||||
"nofile=10000:10000"
|
||||
];
|
||||
};
|
||||
};
|
||||
|
|
|
@ -13,7 +13,7 @@ let
|
|||
|
||||
# All Buildkite hooks are actually besadii, but it's being invoked
|
||||
# with different names.
|
||||
buildkiteHooks = pkgs.runCommandNoCC "buildkite-hooks" {} ''
|
||||
buildkiteHooks = pkgs.runCommandNoCC "buildkite-hooks" { } ''
|
||||
mkdir -p $out/bin
|
||||
ln -s ${besadiiWithConfig "post-command"} $out/bin/post-command
|
||||
'';
|
||||
|
@ -22,7 +22,8 @@ let
|
|||
echo 'username=buildkite'
|
||||
echo "password=$(jq -r '.gerritPassword' /run/agenix/buildkite-besadii-config)"
|
||||
'';
|
||||
in {
|
||||
in
|
||||
{
|
||||
options.services.depot.buildkite = {
|
||||
enable = lib.mkEnableOption description;
|
||||
agentCount = lib.mkOption {
|
||||
|
@ -33,39 +34,43 @@ in {
|
|||
|
||||
config = lib.mkIf cfg.enable {
|
||||
# Run the Buildkite agents using the default upstream module.
|
||||
services.buildkite-agents = builtins.listToAttrs (map (n: rec {
|
||||
name = "whitby-${toString n}";
|
||||
value = {
|
||||
inherit name;
|
||||
enable = true;
|
||||
tokenPath = "/run/agenix/buildkite-agent-token";
|
||||
hooks.post-command = "${buildkiteHooks}/bin/post-command";
|
||||
services.buildkite-agents = builtins.listToAttrs (map
|
||||
(n: rec {
|
||||
name = "whitby-${toString n}";
|
||||
value = {
|
||||
inherit name;
|
||||
enable = true;
|
||||
tokenPath = "/run/agenix/buildkite-agent-token";
|
||||
hooks.post-command = "${buildkiteHooks}/bin/post-command";
|
||||
|
||||
runtimePackages = with pkgs; [
|
||||
bash
|
||||
coreutils
|
||||
credentialHelper
|
||||
curl
|
||||
git
|
||||
gnutar
|
||||
gzip
|
||||
jq
|
||||
nix
|
||||
];
|
||||
};
|
||||
}) agents);
|
||||
runtimePackages = with pkgs; [
|
||||
bash
|
||||
coreutils
|
||||
credentialHelper
|
||||
curl
|
||||
git
|
||||
gnutar
|
||||
gzip
|
||||
jq
|
||||
nix
|
||||
];
|
||||
};
|
||||
})
|
||||
agents);
|
||||
|
||||
# Set up a group for all Buildkite agent users
|
||||
users = {
|
||||
groups.buildkite-agents = {};
|
||||
users = builtins.listToAttrs (map (n: rec {
|
||||
name = "buildkite-agent-whitby-${toString n}";
|
||||
value = {
|
||||
isSystemUser = true;
|
||||
group = lib.mkForce "buildkite-agents";
|
||||
extraGroups = [ name "docker" ];
|
||||
};
|
||||
}) agents);
|
||||
groups.buildkite-agents = { };
|
||||
users = builtins.listToAttrs (map
|
||||
(n: rec {
|
||||
name = "buildkite-agent-whitby-${toString n}";
|
||||
value = {
|
||||
isSystemUser = true;
|
||||
group = lib.mkForce "buildkite-agents";
|
||||
extraGroups = [ name "docker" ];
|
||||
};
|
||||
})
|
||||
agents);
|
||||
};
|
||||
};
|
||||
}
|
||||
|
|
|
@ -26,7 +26,8 @@ let
|
|||
|
||||
inherit (depot.ops) users;
|
||||
|
||||
in {
|
||||
in
|
||||
{
|
||||
services.openldap = {
|
||||
enable = true;
|
||||
|
||||
|
@ -48,7 +49,7 @@ in {
|
|||
|
||||
"cn=schema".includes =
|
||||
map (schema: "${pkgs.openldap}/etc/schema/${schema}.ldif")
|
||||
[ "core" "cosine" "inetorgperson" "nis" ];
|
||||
[ "core" "cosine" "inetorgperson" "nis" ];
|
||||
};
|
||||
|
||||
# Contents are immutable at runtime, and adding user accounts etc.
|
||||
|
|
|
@ -16,9 +16,10 @@ let
|
|||
drvTargets = depot.ci.targets;
|
||||
additionalSteps = [ protoCheck ];
|
||||
|
||||
parentTargetMap = if (externalArgs ? parentTargetMap)
|
||||
parentTargetMap =
|
||||
if (externalArgs ? parentTargetMap)
|
||||
then builtins.fromJSON (builtins.readFile externalArgs.parentTargetMap)
|
||||
else {};
|
||||
else { };
|
||||
|
||||
postBuildSteps = [
|
||||
# After successful builds, create a gcroot for builds on canon.
|
||||
|
@ -40,7 +41,8 @@ let
|
|||
};
|
||||
|
||||
drvmap = depot.nix.buildkite.mkDrvmap depot.ci.targets;
|
||||
in pkgs.runCommandNoCC "depot-pipeline" {} ''
|
||||
in
|
||||
pkgs.runCommandNoCC "depot-pipeline" { } ''
|
||||
mkdir $out
|
||||
cp -r ${pipeline}/* $out
|
||||
cp ${drvmap} $out/drvmap.json
|
||||
|
|
|
@ -22,6 +22,6 @@ in
|
|||
|
||||
defun [ path (attrs agenixSecret) (attrs any) ]
|
||||
(path: secrets:
|
||||
depot.nix.readTree.drvTargets
|
||||
# Import each secret into the Nix store
|
||||
(builtins.mapAttrs (name: _: "${path}/${name}") secrets))
|
||||
depot.nix.readTree.drvTargets
|
||||
# Import each secret into the Nix store
|
||||
(builtins.mapAttrs (name: _: "${path}/${name}") secrets))
|
||||
|
|
|
@ -15,7 +15,8 @@ let
|
|||
whitby = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILNh/w4BSKov0jdz3gKBc98tpoLta5bb87fQXWBhAl2I";
|
||||
|
||||
default.publicKeys = tazjin ++ grfn ++ sterni ++ [ whitby ];
|
||||
in {
|
||||
in
|
||||
{
|
||||
"besadii.age" = default;
|
||||
"buildkite-agent-token.age" = default;
|
||||
"buildkite-graphql-token.age" = default;
|
||||
|
|
16
third_party/abseil_cpp/default.nix
vendored
16
third_party/abseil_cpp/default.nix
vendored
|
@ -8,7 +8,7 @@ in pkgs.abseil-cpp.override {
|
|||
|
||||
/* TODO(tazjin): update abseil subtree
|
||||
|
||||
fullLlvm11Stdenv.mkDerivation rec {
|
||||
fullLlvm11Stdenv.mkDerivation rec {
|
||||
pname = "abseil-cpp";
|
||||
version = "20200519-768eb2ca+tvl-1";
|
||||
src = ./.;
|
||||
|
@ -17,15 +17,15 @@ fullLlvm11Stdenv.mkDerivation rec {
|
|||
# doCheck = true;
|
||||
|
||||
cmakeFlags = [
|
||||
"-DCMAKE_CXX_STANDARD=17"
|
||||
#"-DABSL_RUN_TESTS=1"
|
||||
"-DCMAKE_CXX_STANDARD=17"
|
||||
#"-DABSL_RUN_TESTS=1"
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
description = "An open-source collection of C++ code designed to augment the C++ standard library";
|
||||
homepage = https://abseil.io/;
|
||||
license = licenses.asl20;
|
||||
maintainers = [ maintainers.andersk ];
|
||||
description = "An open-source collection of C++ code designed to augment the C++ standard library";
|
||||
homepage = https://abseil.io/;
|
||||
license = licenses.asl20;
|
||||
maintainers = [ maintainers.andersk ];
|
||||
};
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
|
3
third_party/agenix/default.nix
vendored
3
third_party/agenix/default.nix
vendored
|
@ -9,7 +9,8 @@ let
|
|||
agenix = import src {
|
||||
inherit pkgs;
|
||||
};
|
||||
in {
|
||||
in
|
||||
{
|
||||
inherit src;
|
||||
cli = agenix.agenix;
|
||||
}
|
||||
|
|
14
third_party/arion/default.nix
vendored
14
third_party/arion/default.nix
vendored
|
@ -1,8 +1,10 @@
|
|||
{ pkgs, ... }:
|
||||
|
||||
(import (pkgs.fetchFromGitHub {
|
||||
owner = "hercules-ci";
|
||||
repo = "arion";
|
||||
rev = "db6d4d7490dff363de60cebbece3ae9361e3ce43";
|
||||
sha256 = "0d8nqmc7fjshigax2g47ips262v8ml27x0ksq59kmprgb7ckzi5l";
|
||||
}) { inherit pkgs; }).arion
|
||||
(import
|
||||
(pkgs.fetchFromGitHub {
|
||||
owner = "hercules-ci";
|
||||
repo = "arion";
|
||||
rev = "db6d4d7490dff363de60cebbece3ae9361e3ce43";
|
||||
sha256 = "0d8nqmc7fjshigax2g47ips262v8ml27x0ksq59kmprgb7ckzi5l";
|
||||
})
|
||||
{ inherit pkgs; }).arion
|
||||
|
|
3
third_party/bat_syntaxes/default.nix
vendored
3
third_party/bat_syntaxes/default.nix
vendored
|
@ -8,7 +8,8 @@
|
|||
|
||||
let
|
||||
inherit (pkgs) bat runCommandNoCC;
|
||||
in runCommandNoCC "bat-syntaxes.bin" {} ''
|
||||
in
|
||||
runCommandNoCC "bat-syntaxes.bin" { } ''
|
||||
export HOME=$PWD
|
||||
mkdir -p .config/bat/syntaxes
|
||||
cp ${./Prolog.sublime-syntax} .config/bat/syntaxes
|
||||
|
|
3
third_party/cgit/default.nix
vendored
3
third_party/cgit/default.nix
vendored
|
@ -2,7 +2,8 @@
|
|||
|
||||
let
|
||||
inherit (pkgs) stdenv gzip bzip2 xz luajit zlib autoconf openssl pkgconfig;
|
||||
in stdenv.mkDerivation rec {
|
||||
in
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "cgit";
|
||||
version = "master";
|
||||
src = ./.;
|
||||
|
|
3
third_party/clj2nix/default.nix
vendored
3
third_party/clj2nix/default.nix
vendored
|
@ -5,4 +5,5 @@ pkgs.callPackage "${(pkgs.fetchFromGitHub {
|
|||
repo = "clj2nix";
|
||||
rev = "3d0a38c954c8e0926f57de1d80d357df05fc2f94";
|
||||
sha256 = "0y77b988qdgsrp4w72v1f5rrh33awbps2qdgp2wr2nmmi44541w5";
|
||||
})}/clj2nix.nix" {}
|
||||
})}/clj2nix.nix"
|
||||
{ }
|
||||
|
|
55
third_party/default.nix
vendored
55
third_party/default.nix
vendored
|
@ -24,32 +24,33 @@
|
|||
# be able to pass `specialArgs`. We depend on this because `depot`
|
||||
# needs to be partially evaluated in NixOS configuration before
|
||||
# module imports are resolved.
|
||||
nixos = {
|
||||
configuration,
|
||||
specialArgs ? {},
|
||||
system ? builtins.currentSystem,
|
||||
...
|
||||
}:
|
||||
let
|
||||
eval = import "${pkgs.path}/nixos/lib/eval-config.nix" {
|
||||
inherit specialArgs system;
|
||||
modules = [
|
||||
configuration
|
||||
(import "${depot.path + "/ops/modules/default-imports.nix"}")
|
||||
];
|
||||
};
|
||||
nixos =
|
||||
{ configuration
|
||||
, specialArgs ? { }
|
||||
, system ? builtins.currentSystem
|
||||
, ...
|
||||
}:
|
||||
let
|
||||
eval = import "${pkgs.path}/nixos/lib/eval-config.nix" {
|
||||
inherit specialArgs system;
|
||||
modules = [
|
||||
configuration
|
||||
(import "${depot.path + "/ops/modules/default-imports.nix"}")
|
||||
];
|
||||
};
|
||||
|
||||
# This is for `nixos-rebuild build-vm'.
|
||||
vmConfig = (import "${pkgs.path}/nixos/lib/eval-config.nix" {
|
||||
inherit specialArgs system;
|
||||
modules = [
|
||||
configuration
|
||||
"${pkgs.path}/nixos/modules/virtualisation/qemu-vm.nix"
|
||||
];
|
||||
}).config;
|
||||
in {
|
||||
inherit (eval) pkgs config options;
|
||||
system = eval.config.system.build.toplevel;
|
||||
vm = vmConfig.system.build.vm;
|
||||
};
|
||||
# This is for `nixos-rebuild build-vm'.
|
||||
vmConfig = (import "${pkgs.path}/nixos/lib/eval-config.nix" {
|
||||
inherit specialArgs system;
|
||||
modules = [
|
||||
configuration
|
||||
"${pkgs.path}/nixos/modules/virtualisation/qemu-vm.nix"
|
||||
];
|
||||
}).config;
|
||||
in
|
||||
{
|
||||
inherit (eval) pkgs config options;
|
||||
system = eval.config.system.build.toplevel;
|
||||
vm = vmConfig.system.build.vm;
|
||||
};
|
||||
}
|
||||
|
|
13
third_party/dhall/default.nix
vendored
13
third_party/dhall/default.nix
vendored
|
@ -5,11 +5,14 @@ let
|
|||
# broken most of the time. The binaries are also fully static
|
||||
# builds, instead of the half-static crap that nixpkgs produces.
|
||||
easy-dhall-nix =
|
||||
import (builtins.fetchTarball {
|
||||
url = "https://github.com/justinwoo/easy-dhall-nix/archive/eae7f64c4d6c70681e5a56c84198236930ba425e.tar.gz";
|
||||
sha256 = "1y2x15v8a679vlpxazjpibfwajp6zph60f8wjcm4xflbvazk0dx7";
|
||||
}) { inherit pkgs; };
|
||||
in {
|
||||
import
|
||||
(builtins.fetchTarball {
|
||||
url = "https://github.com/justinwoo/easy-dhall-nix/archive/eae7f64c4d6c70681e5a56c84198236930ba425e.tar.gz";
|
||||
sha256 = "1y2x15v8a679vlpxazjpibfwajp6zph60f8wjcm4xflbvazk0dx7";
|
||||
})
|
||||
{ inherit pkgs; };
|
||||
in
|
||||
{
|
||||
dhall = easy-dhall-nix.dhall-simple;
|
||||
dhall-bash = easy-dhall-nix.dhall-bash-simple;
|
||||
dhall-docs = easy-dhall-nix.dhall-docs-simple;
|
||||
|
|
14
third_party/elmPackages_0_18/default.nix
vendored
14
third_party/elmPackages_0_18/default.nix
vendored
|
@ -7,9 +7,11 @@
|
|||
|
||||
{ pkgs, ... }:
|
||||
|
||||
(import (pkgs.fetchFromGitHub {
|
||||
owner = "NixOS";
|
||||
repo = "nixpkgs";
|
||||
rev = "14f9ee66e63077539252f8b4550049381a082518";
|
||||
sha256 = "1wn7nmb1cqfk2j91l3rwc6yhimfkzxprb8wknw5wi57yhq9m6lv1";
|
||||
}) {}).elmPackages
|
||||
(import
|
||||
(pkgs.fetchFromGitHub {
|
||||
owner = "NixOS";
|
||||
repo = "nixpkgs";
|
||||
rev = "14f9ee66e63077539252f8b4550049381a082518";
|
||||
sha256 = "1wn7nmb1cqfk2j91l3rwc6yhimfkzxprb8wknw5wi57yhq9m6lv1";
|
||||
})
|
||||
{ }).elmPackages
|
||||
|
|
58
third_party/gerrit_plugins/builder.nix
vendored
58
third_party/gerrit_plugins/builder.nix
vendored
|
@ -1,33 +1,35 @@
|
|||
{ depot, pkgs, ... }:
|
||||
{
|
||||
buildGerritBazelPlugin = {
|
||||
name,
|
||||
src,
|
||||
depsOutputHash,
|
||||
overlayPluginCmd ? ''
|
||||
cp -R "${src}" "$out/plugins/${name}"
|
||||
'',
|
||||
postPatch ? "",
|
||||
}: ((depot.third_party.gerrit.override {
|
||||
name = "${name}.jar";
|
||||
buildGerritBazelPlugin =
|
||||
{ name
|
||||
, src
|
||||
, depsOutputHash
|
||||
, overlayPluginCmd ? ''
|
||||
cp -R "${src}" "$out/plugins/${name}"
|
||||
''
|
||||
, postPatch ? ""
|
||||
,
|
||||
}: ((depot.third_party.gerrit.override {
|
||||
name = "${name}.jar";
|
||||
|
||||
src = pkgs.runCommandLocal "${name}-src" {} ''
|
||||
cp -R "${depot.third_party.gerrit.src}" "$out"
|
||||
chmod +w "$out/plugins"
|
||||
${overlayPluginCmd}
|
||||
'';
|
||||
src = pkgs.runCommandLocal "${name}-src" { } ''
|
||||
cp -R "${depot.third_party.gerrit.src}" "$out"
|
||||
chmod +w "$out/plugins"
|
||||
${overlayPluginCmd}
|
||||
'';
|
||||
|
||||
bazelTarget = "//plugins/${name}";
|
||||
}).overrideAttrs (super: {
|
||||
deps = super.deps.overrideAttrs (superDeps: {
|
||||
outputHash = depsOutputHash;
|
||||
});
|
||||
installPhase = ''
|
||||
cp "bazel-bin/plugins/${name}/${name}.jar" "$out"
|
||||
'';
|
||||
postPatch = if super ? postPatch then ''
|
||||
${super.postPatch}
|
||||
${postPatch}
|
||||
'' else postPatch;
|
||||
}));
|
||||
bazelTarget = "//plugins/${name}";
|
||||
}).overrideAttrs (super: {
|
||||
deps = super.deps.overrideAttrs (superDeps: {
|
||||
outputHash = depsOutputHash;
|
||||
});
|
||||
installPhase = ''
|
||||
cp "bazel-bin/plugins/${name}/${name}.jar" "$out"
|
||||
'';
|
||||
postPatch =
|
||||
if super ? postPatch then ''
|
||||
${super.postPatch}
|
||||
${postPatch}
|
||||
'' else postPatch;
|
||||
}));
|
||||
}
|
||||
|
|
3
third_party/gerrit_plugins/default.nix
vendored
3
third_party/gerrit_plugins/default.nix
vendored
|
@ -2,7 +2,8 @@
|
|||
|
||||
let
|
||||
inherit (import ./builder.nix args) buildGerritBazelPlugin;
|
||||
in depot.nix.readTree.drvTargets {
|
||||
in
|
||||
depot.nix.readTree.drvTargets {
|
||||
# https://gerrit.googlesource.com/plugins/owners
|
||||
owners = buildGerritBazelPlugin rec {
|
||||
name = "owners";
|
||||
|
|
3
third_party/gerrit_plugins/oauth/default.nix
vendored
3
third_party/gerrit_plugins/oauth/default.nix
vendored
|
@ -2,7 +2,8 @@
|
|||
|
||||
let
|
||||
inherit (import ../builder.nix args) buildGerritBazelPlugin;
|
||||
in buildGerritBazelPlugin rec {
|
||||
in
|
||||
buildGerritBazelPlugin rec {
|
||||
name = "oauth";
|
||||
depsOutputHash = "sha256:0j86amkw54y177s522hc988hqg034fsrkywbsb9a7h14zwcqbran";
|
||||
src = pkgs.fetchgit {
|
||||
|
|
4
third_party/git/default.nix
vendored
4
third_party/git/default.nix
vendored
|
@ -2,8 +2,8 @@
|
|||
# `pkgs.srcOnly`.
|
||||
{ pkgs, ... }:
|
||||
|
||||
pkgs.git.overrideAttrs(old: {
|
||||
patches = (old.patches or []) ++ [
|
||||
pkgs.git.overrideAttrs (old: {
|
||||
patches = (old.patches or [ ]) ++ [
|
||||
./0001-feat-third_party-git-date-add-dottime-format.patch
|
||||
];
|
||||
})
|
||||
|
|
17
third_party/gitignoreSource/default.nix
vendored
17
third_party/gitignoreSource/default.nix
vendored
|
@ -1,14 +1,17 @@
|
|||
{ pkgs, ... }:
|
||||
|
||||
let
|
||||
gitignoreNix = import (pkgs.fetchFromGitHub {
|
||||
owner = "hercules-ci";
|
||||
repo = "gitignore";
|
||||
rev = "f9e996052b5af4032fe6150bba4a6fe4f7b9d698";
|
||||
sha256 = "0jrh5ghisaqdd0vldbywags20m2cxpkbbk5jjjmwaw0gr8nhsafv";
|
||||
}) { inherit (pkgs) lib; };
|
||||
gitignoreNix = import
|
||||
(pkgs.fetchFromGitHub {
|
||||
owner = "hercules-ci";
|
||||
repo = "gitignore";
|
||||
rev = "f9e996052b5af4032fe6150bba4a6fe4f7b9d698";
|
||||
sha256 = "0jrh5ghisaqdd0vldbywags20m2cxpkbbk5jjjmwaw0gr8nhsafv";
|
||||
})
|
||||
{ inherit (pkgs) lib; };
|
||||
|
||||
in {
|
||||
in
|
||||
{
|
||||
__functor = _: gitignoreNix.gitignoreSource;
|
||||
|
||||
# expose extra functions here
|
||||
|
|
|
@ -3,15 +3,17 @@
|
|||
depot.nix.buildGo.external {
|
||||
path = "github.com/charmbracelet/bubbletea";
|
||||
src =
|
||||
let gitSrc = pkgs.fetchFromGitHub {
|
||||
let
|
||||
gitSrc = pkgs.fetchFromGitHub {
|
||||
owner = "charmbracelet";
|
||||
repo = "bubbletea";
|
||||
rev = "v0.13.1";
|
||||
sha256 = "0yf2fjkvx8ym9n6f3qp2z7sxs0qsfpj148sfvbrp38k67s3h20cs";
|
||||
};
|
||||
# The examples/ directory is fairly extensive,
|
||||
# but it also adds most of the dependencies.
|
||||
in pkgs.runCommand gitSrc.name {} ''
|
||||
# The examples/ directory is fairly extensive,
|
||||
# but it also adds most of the dependencies.
|
||||
in
|
||||
pkgs.runCommand gitSrc.name { } ''
|
||||
mkdir -p $out
|
||||
ln -s "${gitSrc}"/* $out
|
||||
rm -r $out/examples
|
||||
|
|
2
third_party/grpc/default.nix
vendored
2
third_party/grpc/default.nix
vendored
|
@ -5,7 +5,7 @@
|
|||
stdenv = pkgs.fullLlvm11Stdenv;
|
||||
abseil-cpp = depot.third_party.abseil_cpp;
|
||||
re2 = depot.third_party.re2;
|
||||
}).overrideAttrs(orig: rec {
|
||||
}).overrideAttrs (orig: rec {
|
||||
cmakeFlags = orig.cmakeFlags ++ [
|
||||
"-DCMAKE_CXX_STANDARD_REQUIRED=ON"
|
||||
"-DCMAKE_CXX_STANDARD=17"
|
||||
|
|
2
third_party/gtest/default.nix
vendored
2
third_party/gtest/default.nix
vendored
|
@ -2,7 +2,7 @@
|
|||
|
||||
(pkgs.gtest.override {
|
||||
stdenv = pkgs.fullLlvm11Stdenv;
|
||||
}).overrideAttrs(_: {
|
||||
}).overrideAttrs (_: {
|
||||
src = pkgs.fetchFromGitHub {
|
||||
owner = "google";
|
||||
repo = "googletest";
|
||||
|
|
12
third_party/josh/default.nix
vendored
12
third_party/josh/default.nix
vendored
|
@ -8,7 +8,8 @@ let
|
|||
rev = "69dc986e506ba5631c8bbf52835da076a18ec8dc";
|
||||
hash = "sha256:0ybc6ivjkm7bchaszs9lhbl1gbjnyhwq7a3vw6jml3ama84l52lb";
|
||||
};
|
||||
in depot.third_party.naersk.buildPackage {
|
||||
in
|
||||
depot.third_party.naersk.buildPackage {
|
||||
inherit src;
|
||||
|
||||
buildInputs = with pkgs; [
|
||||
|
@ -18,8 +19,11 @@ in depot.third_party.naersk.buildPackage {
|
|||
];
|
||||
|
||||
cargoBuildOptions = x: x ++ [
|
||||
"-p" "josh"
|
||||
"-p" "josh-proxy"
|
||||
"-p" "josh-ui"
|
||||
"-p"
|
||||
"josh"
|
||||
"-p"
|
||||
"josh-proxy"
|
||||
"-p"
|
||||
"josh-ui"
|
||||
];
|
||||
}
|
||||
|
|
3
third_party/lisp/bordeaux-threads.nix
vendored
3
third_party/lisp/bordeaux-threads.nix
vendored
|
@ -5,7 +5,8 @@
|
|||
let
|
||||
src = with pkgs; srcOnly lispPackages.bordeaux-threads;
|
||||
getSrc = f: "${src}/src/${f}";
|
||||
in depot.nix.buildLisp.library {
|
||||
in
|
||||
depot.nix.buildLisp.library {
|
||||
name = "bordeaux-threads";
|
||||
deps = [ depot.third_party.lisp.alexandria ];
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue