Compare commits

...

65 commits

Author SHA1 Message Date
96e8bfff5b
feat(django-apps): Internalize
All checks were successful
Check meta / check_dns (push) Successful in 20s
Check meta / check_meta (push) Successful in 20s
Build all the nodes / bridge01 (push) Successful in 57s
Build all the nodes / geo01 (push) Successful in 58s
Build all the nodes / geo02 (push) Successful in 50s
Build all the nodes / rescue01 (push) Successful in 1m1s
Build all the nodes / compute01 (push) Successful in 1m30s
Build all the nodes / storage01 (push) Successful in 1m0s
Build all the nodes / vault01 (push) Successful in 1m7s
Build all the nodes / web02 (push) Successful in 59s
Run pre-commit on all files / check (push) Successful in 22s
Build all the nodes / web01 (push) Successful in 1m33s
Build all the nodes / web03 (push) Successful in 1m24s
2024-11-25 23:29:39 +01:00
sinavir
aa154d1b1b
fix(web03/dj-apps): Use secret tokens
All checks were successful
Build all the nodes / bridge01 (push) Successful in 56s
Build all the nodes / geo02 (push) Successful in 57s
Build all the nodes / geo01 (push) Successful in 1m2s
Build all the nodes / storage01 (push) Successful in 1m11s
Build all the nodes / rescue01 (push) Successful in 1m13s
Build all the nodes / compute01 (push) Successful in 1m34s
Run pre-commit on all files / check (push) Successful in 24s
Build all the nodes / web02 (push) Successful in 1m0s
Build all the nodes / vault01 (push) Successful in 1m11s
Build all the nodes / web03 (push) Successful in 1m6s
Build all the nodes / web01 (push) Successful in 1m29s
2024-11-25 16:06:08 +01:00
sinavir
f37a7449cb
fix(netbox-agent): Disable as it is broken
All checks were successful
Build all the nodes / bridge01 (push) Successful in 1m2s
Build all the nodes / geo01 (push) Successful in 1m6s
Build all the nodes / geo02 (push) Successful in 1m13s
Build all the nodes / rescue01 (push) Successful in 1m15s
Build all the nodes / storage01 (push) Successful in 1m16s
Build all the nodes / compute01 (push) Successful in 1m31s
Run pre-commit on all files / check (push) Successful in 25s
Build all the nodes / web02 (push) Successful in 1m14s
Build all the nodes / web03 (push) Successful in 1m11s
Build all the nodes / vault01 (push) Successful in 1m27s
Build all the nodes / web01 (push) Successful in 1m48s
2024-11-25 15:18:28 +01:00
030803ba29
chore(patches): Reorganize
All checks were successful
Build all the nodes / geo02 (push) Successful in 1m32s
Build all the nodes / geo01 (push) Successful in 1m34s
Build all the nodes / rescue01 (push) Successful in 1m43s
Build all the nodes / bridge01 (push) Successful in 1m45s
Build all the nodes / storage01 (push) Successful in 1m43s
Build all the nodes / compute01 (push) Successful in 2m9s
Run pre-commit on all files / check (push) Successful in 25s
Build all the nodes / web02 (push) Successful in 58s
Build all the nodes / vault01 (push) Successful in 1m13s
Build all the nodes / web03 (push) Successful in 1m4s
Build all the nodes / web01 (push) Successful in 1m31s
2024-11-25 11:15:20 +01:00
13abd5400b
fix(web03/dj-wikiens): Update dependencies
All checks were successful
Build all the nodes / geo02 (push) Successful in 59s
Build all the nodes / bridge01 (push) Successful in 1m5s
Build all the nodes / geo01 (push) Successful in 1m4s
Build all the nodes / storage01 (push) Successful in 1m6s
Build all the nodes / rescue01 (push) Successful in 1m10s
Build all the nodes / compute01 (push) Successful in 1m22s
Run pre-commit on all files / check (push) Successful in 24s
Build all the nodes / vault01 (push) Successful in 1m9s
Build all the nodes / web02 (push) Successful in 1m8s
Build all the nodes / web03 (push) Successful in 1m16s
Build all the nodes / web01 (push) Successful in 1m36s
2024-11-23 11:33:56 +01:00
f6c933f374
fix(kanidm): Use last usable version: 1.3.3
All checks were successful
Build all the nodes / geo02 (push) Successful in 57s
Build all the nodes / bridge01 (push) Successful in 1m3s
Build all the nodes / geo01 (push) Successful in 1m7s
Build all the nodes / storage01 (push) Successful in 1m12s
Build all the nodes / rescue01 (push) Successful in 1m13s
Run pre-commit on all files / check (push) Successful in 26s
Build all the nodes / compute01 (push) Successful in 1m52s
Build all the nodes / vault01 (push) Successful in 1m13s
Build all the nodes / web03 (push) Successful in 58s
Build all the nodes / web02 (push) Successful in 1m9s
Build all the nodes / web01 (push) Successful in 1m28s
2024-11-22 23:12:18 +01:00
c9839d4be6 chore(npins): Update
All checks were successful
Check meta / check_meta (pull_request) Successful in 21s
Check meta / check_dns (pull_request) Successful in 21s
Check workflows / check_workflows (pull_request) Successful in 25s
Build all the nodes / geo01 (pull_request) Successful in 12m45s
Build all the nodes / bridge01 (pull_request) Successful in 13m11s
Build all the nodes / geo02 (pull_request) Successful in 12m57s
Build all the nodes / rescue01 (pull_request) Successful in 12m57s
Build all the nodes / compute01 (pull_request) Successful in 13m32s
Run pre-commit on all files / check (pull_request) Successful in 25s
Build all the nodes / web02 (pull_request) Successful in 1m13s
Build all the nodes / storage01 (pull_request) Successful in 14m31s
Build all the nodes / vault01 (pull_request) Successful in 1m36s
Build all the nodes / web01 (pull_request) Successful in 2m14s
Build all the nodes / web03 (pull_request) Successful in 9m41s
Build all the nodes / geo01 (push) Successful in 57s
Build all the nodes / geo02 (push) Successful in 59s
Build all the nodes / bridge01 (push) Successful in 1m3s
Build all the nodes / rescue01 (push) Successful in 1m7s
Build all the nodes / storage01 (push) Successful in 1m9s
Build all the nodes / compute01 (push) Successful in 1m26s
Run pre-commit on all files / check (push) Successful in 23s
Build all the nodes / web02 (push) Successful in 1m0s
Build all the nodes / vault01 (push) Successful in 1m8s
Build all the nodes / web03 (push) Successful in 1m4s
Build all the nodes / web01 (push) Successful in 1m39s
2024-11-22 15:27:34 +00:00
fe52f0ebe2
feat(forgejo-runners): Use /data/slow/nix and not /data/slow/nix/nix
All checks were successful
Build all the nodes / bridge01 (push) Successful in 1m9s
Build all the nodes / geo01 (push) Successful in 1m2s
Build all the nodes / geo02 (push) Successful in 1m10s
Build all the nodes / rescue01 (push) Successful in 1m0s
Build all the nodes / storage01 (push) Successful in 1m2s
Build all the nodes / vault01 (push) Successful in 1m7s
Build all the nodes / web02 (push) Successful in 58s
Build all the nodes / web01 (push) Successful in 1m31s
Run pre-commit on all files / check (push) Successful in 28s
Build all the nodes / web03 (push) Successful in 1m2s
Build all the nodes / compute01 (push) Successful in 13m2s
The upstream module should be updated to reflect that, use rootPath
instead of storePath
2024-11-22 14:04:35 +01:00
1f9f56ac91
chore(lix): Use the global patch infrastructure for lix
Some checks failed
Check meta / check_meta (pull_request) Successful in 2m40s
Check meta / check_dns (pull_request) Successful in 2m43s
Check workflows / check_workflows (pull_request) Successful in 2m51s
Build all the nodes / rescue01 (pull_request) Successful in 24m13s
Build all the nodes / geo01 (pull_request) Successful in 24m44s
Build all the nodes / geo02 (pull_request) Successful in 24m53s
Build all the nodes / web02 (pull_request) Successful in 3m46s
Run pre-commit on all files / check (pull_request) Successful in 36s
Build all the nodes / vault01 (pull_request) Successful in 16m18s
Build all the nodes / bridge01 (pull_request) Successful in 1m21s
Build all the nodes / storage01 (pull_request) Successful in 38m38s
Build all the nodes / web01 (pull_request) Successful in 22m5s
Build all the nodes / geo02 (push) Successful in 57s
Build all the nodes / bridge01 (push) Successful in 1m4s
Build all the nodes / geo01 (push) Successful in 1m4s
Build all the nodes / rescue01 (push) Successful in 1m7s
Build all the nodes / storage01 (push) Successful in 1m0s
Build all the nodes / vault01 (push) Successful in 1m7s
Build all the nodes / web01 (push) Successful in 1m36s
Build all the nodes / web02 (push) Successful in 59s
Run pre-commit on all files / check (push) Successful in 26s
Build all the nodes / web03 (pull_request) Successful in 19m23s
Build all the nodes / web03 (push) Successful in 2m14s
Build all the nodes / compute01 (push) Has been cancelled
Build all the nodes / compute01 (pull_request) Successful in 17m48s
2024-11-22 12:56:39 +01:00
75b621e298
fix(ds-fr): Switch to nix-pkgs
All checks were successful
Check meta / check_meta (pull_request) Successful in 19s
Check meta / check_dns (pull_request) Successful in 20s
Check workflows / check_workflows (pull_request) Successful in 25s
Build all the nodes / bridge01 (pull_request) Successful in 53s
Build all the nodes / geo02 (pull_request) Successful in 55s
Build all the nodes / geo01 (pull_request) Successful in 59s
Build all the nodes / rescue01 (pull_request) Successful in 1m0s
Build all the nodes / storage01 (pull_request) Successful in 1m2s
Build all the nodes / vault01 (pull_request) Successful in 1m13s
Build all the nodes / web02 (pull_request) Successful in 1m8s
Build all the nodes / web01 (pull_request) Successful in 1m30s
Run pre-commit on all files / check (pull_request) Successful in 25s
Build all the nodes / web03 (pull_request) Successful in 1m12s
Build all the nodes / compute01 (pull_request) Successful in 5m45s
Build all the nodes / bridge01 (push) Successful in 58s
Build all the nodes / geo02 (push) Successful in 59s
Build all the nodes / geo01 (push) Successful in 1m2s
Build all the nodes / storage01 (push) Successful in 1m7s
Build all the nodes / rescue01 (push) Successful in 1m18s
Build all the nodes / compute01 (push) Successful in 1m30s
Run pre-commit on all files / check (push) Successful in 25s
Build all the nodes / web02 (push) Successful in 1m0s
Build all the nodes / vault01 (push) Successful in 1m7s
Build all the nodes / web03 (push) Successful in 58s
Build all the nodes / web01 (push) Successful in 1m28s
2024-11-21 15:13:25 +01:00
sinavir
32f68a54a9
chore(workflows): regenerate
All checks were successful
Check meta / check_dns (pull_request) Successful in 19s
Check meta / check_meta (pull_request) Successful in 21s
Check workflows / check_workflows (pull_request) Successful in 21s
Build all the nodes / bridge01 (pull_request) Successful in 51s
Build all the nodes / geo01 (pull_request) Successful in 52s
Build all the nodes / geo02 (pull_request) Successful in 55s
Build all the nodes / compute01 (pull_request) Successful in 1m32s
Build all the nodes / rescue01 (pull_request) Successful in 57s
Build all the nodes / storage01 (pull_request) Successful in 58s
Build all the nodes / vault01 (pull_request) Successful in 1m11s
Run pre-commit on all files / check (pull_request) Successful in 24s
Build all the nodes / web02 (pull_request) Successful in 1m3s
Build all the nodes / web03 (pull_request) Successful in 59s
Build all the nodes / web01 (pull_request) Successful in 12m35s
Build all the nodes / bridge01 (push) Successful in 1m1s
Build all the nodes / geo01 (push) Successful in 1m6s
Build all the nodes / geo02 (push) Successful in 1m10s
Build all the nodes / rescue01 (push) Successful in 1m11s
Build all the nodes / compute01 (push) Successful in 1m34s
Build all the nodes / storage01 (push) Successful in 1m7s
Build all the nodes / vault01 (push) Successful in 1m11s
Build all the nodes / web02 (push) Successful in 1m6s
Run pre-commit on all files / check (push) Successful in 25s
Build all the nodes / web03 (push) Successful in 1m2s
Build all the nodes / web01 (push) Successful in 1m36s
2024-11-19 00:53:33 +01:00
sinavir
b00e47ec85
chore(netbox): Upgrade
Some checks failed
Check workflows / check_workflows (push) Failing after 24s
Check meta / check_dns (pull_request) Successful in 21s
Check meta / check_meta (pull_request) Successful in 22s
Check workflows / check_workflows (pull_request) Failing after 23s
Run pre-commit on all files / check (push) Successful in 27s
Build all the nodes / geo01 (pull_request) Successful in 4m4s
Build all the nodes / storage01 (pull_request) Successful in 3m59s
Build all the nodes / geo02 (pull_request) Successful in 4m2s
Build all the nodes / bridge01 (pull_request) Successful in 4m9s
Build all the nodes / rescue01 (pull_request) Successful in 4m4s
Build all the nodes / compute01 (pull_request) Successful in 4m22s
Run pre-commit on all files / check (pull_request) Successful in 24s
Build all the nodes / web02 (pull_request) Successful in 1m11s
Build all the nodes / web03 (pull_request) Successful in 1m24s
Build all the nodes / vault01 (pull_request) Successful in 1m36s
Build all the nodes / web01 (pull_request) Failing after 14m37s
2024-11-19 00:40:15 +01:00
a50637d55e
chore(stirling-pdf): New version
Some checks failed
Check meta / check_meta (pull_request) Successful in 18s
Check meta / check_dns (pull_request) Successful in 18s
Run pre-commit on all files / check (push) Successful in 26s
Check workflows / check_workflows (pull_request) Failing after 25s
Build all the nodes / bridge01 (pull_request) Successful in 1m6s
Build all the nodes / geo02 (pull_request) Successful in 54s
Build all the nodes / geo01 (pull_request) Successful in 56s
Build all the nodes / rescue01 (pull_request) Successful in 1m3s
Build all the nodes / storage01 (pull_request) Successful in 1m6s
Run pre-commit on all files / check (pull_request) Successful in 26s
Build all the nodes / web01 (pull_request) Failing after 51s
Build all the nodes / web02 (pull_request) Successful in 58s
Build all the nodes / vault01 (pull_request) Successful in 1m19s
Build all the nodes / compute01 (pull_request) Successful in 5m31s
Build all the nodes / web03 (pull_request) Successful in 2m39s
2024-11-19 00:13:49 +01:00
aa4f972085
fix(forgejo-runners): Let the shell handle colmena
Some checks failed
Check meta / check_meta (pull_request) Successful in 20s
Check meta / check_dns (pull_request) Successful in 20s
Run pre-commit on all files / check (push) Successful in 24s
Check workflows / check_workflows (pull_request) Failing after 26s
Build all the nodes / bridge01 (pull_request) Successful in 58s
Build all the nodes / compute01 (pull_request) Failing after 1m12s
Build all the nodes / geo02 (pull_request) Successful in 57s
Build all the nodes / geo01 (pull_request) Successful in 1m3s
Build all the nodes / rescue01 (pull_request) Successful in 1m14s
Build all the nodes / web01 (pull_request) Failing after 54s
Build all the nodes / vault01 (pull_request) Successful in 1m12s
Run pre-commit on all files / check (pull_request) Successful in 34s
Build all the nodes / web02 (pull_request) Successful in 1m4s
Build all the nodes / storage01 (pull_request) Successful in 6m43s
Build all the nodes / web03 (pull_request) Failing after 1m20s
2024-11-19 00:01:06 +01:00
8a5de73b47 chore(npins): Update
Some checks failed
Check meta / check_dns (pull_request) Successful in 1m59s
Check meta / check_meta (pull_request) Successful in 1m59s
Run pre-commit on all files / check (push) Successful in 2m1s
Check workflows / check_workflows (pull_request) Failing after 2m4s
Build all the nodes / compute01 (pull_request) Failing after 4m21s
Build all the nodes / bridge01 (pull_request) Successful in 6m35s
Build all the nodes / geo02 (pull_request) Successful in 4m50s
Build all the nodes / geo01 (pull_request) Successful in 4m58s
Build all the nodes / rescue01 (pull_request) Successful in 4m57s
Build all the nodes / web01 (pull_request) Failing after 51s
Build all the nodes / web02 (pull_request) Successful in 1m12s
Run pre-commit on all files / check (pull_request) Successful in 24s
Build all the nodes / vault01 (pull_request) Successful in 1m30s
Build all the nodes / web03 (pull_request) Failing after 1m6s
Build all the nodes / storage01 (pull_request) Failing after 1m18s
2024-11-18 15:29:55 +00:00
3fecacb482
chore(npins): Update nix-modules
All checks were successful
Build all the nodes / bridge01 (push) Successful in 1m8s
Build all the nodes / compute01 (push) Successful in 1m39s
Build all the nodes / geo01 (push) Successful in 54s
Build all the nodes / geo02 (push) Successful in 52s
Build all the nodes / rescue01 (push) Successful in 1m0s
Build all the nodes / storage01 (push) Successful in 1m13s
Build all the nodes / vault01 (push) Successful in 1m4s
Build all the nodes / web02 (push) Successful in 59s
Build all the nodes / web01 (push) Successful in 1m31s
Run pre-commit on all files / check (push) Successful in 26s
Build all the nodes / web03 (push) Successful in 1m4s
2024-11-18 13:03:13 +01:00
ded867d274
feat(dj-interludes): Add a real secret 2024-11-18 13:02:54 +01:00
f61bd85d63
chore(forgejo): Disallow more domains 2024-11-17 19:01:26 +01:00
bf06d2573b
feat(nix): Use passthru for shells and return to importing the scripts
All checks were successful
Check workflows / check_workflows (push) Successful in 23s
Build all the nodes / bridge01 (push) Successful in 1m7s
Build all the nodes / geo02 (push) Successful in 1m6s
Build all the nodes / geo01 (push) Successful in 1m8s
Build all the nodes / rescue01 (push) Successful in 1m13s
Build all the nodes / compute01 (push) Successful in 1m29s
Build all the nodes / storage01 (push) Successful in 1m9s
Run pre-commit on all files / check (push) Successful in 24s
Build all the nodes / vault01 (push) Successful in 1m13s
Build all the nodes / web02 (push) Successful in 1m12s
Build all the nodes / web03 (push) Successful in 1m17s
Build all the nodes / web01 (push) Successful in 1m49s
2024-11-14 22:18:40 +01:00
6fbda40e5e
feat(scripts): Unify behaviour and improve generation
All checks were successful
Build all the nodes / bridge01 (push) Successful in 1m17s
Build all the nodes / geo01 (push) Successful in 1m16s
Build all the nodes / geo02 (push) Successful in 1m22s
Build all the nodes / compute01 (push) Successful in 1m38s
Build all the nodes / storage01 (push) Successful in 1m13s
Build all the nodes / rescue01 (push) Successful in 1m22s
Build all the nodes / vault01 (push) Successful in 1m20s
Run pre-commit on all files / check (push) Successful in 24s
Build all the nodes / web01 (push) Successful in 1m46s
Build all the nodes / web02 (push) Successful in 1m6s
Build all the nodes / web03 (push) Successful in 1m9s
2024-11-14 22:01:58 +01:00
2ffd7732ba
feat(django-apps/interludes): Allow base webapps domain
All checks were successful
Build all the nodes / bridge01 (push) Successful in 1m19s
Build all the nodes / geo01 (push) Successful in 1m18s
Build all the nodes / geo02 (push) Successful in 1m22s
Build all the nodes / storage01 (push) Successful in 1m28s
Build all the nodes / rescue01 (push) Successful in 1m32s
Build all the nodes / compute01 (push) Successful in 1m45s
Run pre-commit on all files / check (push) Successful in 24s
Build all the nodes / web02 (push) Successful in 1m25s
Build all the nodes / web03 (push) Successful in 1m24s
Build all the nodes / vault01 (push) Successful in 1m36s
Build all the nodes / web01 (push) Successful in 1m57s
2024-11-12 15:08:17 +01:00
d45b044b22
feat(django-apps/interludes): Switch to interludes.ens.fr
All checks were successful
Build all the nodes / geo02 (push) Successful in 1m22s
Build all the nodes / bridge01 (push) Successful in 1m25s
Build all the nodes / geo01 (push) Successful in 1m25s
Build all the nodes / rescue01 (push) Successful in 1m42s
Build all the nodes / compute01 (push) Successful in 1m45s
Build all the nodes / storage01 (push) Successful in 1m12s
Build all the nodes / vault01 (push) Successful in 1m25s
Build all the nodes / web02 (push) Successful in 1m9s
Build all the nodes / web03 (push) Successful in 1m13s
Run pre-commit on all files / check (push) Successful in 24s
Build all the nodes / web01 (push) Successful in 1m47s
2024-11-12 14:55:04 +01:00
21b422b1ad
fix(nix-lib): Allow defining top-level imports
All checks were successful
Build all the nodes / geo02 (push) Successful in 1m18s
Build all the nodes / geo01 (push) Successful in 1m27s
Build all the nodes / bridge01 (push) Successful in 1m32s
Build all the nodes / rescue01 (push) Successful in 1m31s
Build all the nodes / storage01 (push) Successful in 1m31s
Build all the nodes / compute01 (push) Successful in 1m44s
Run pre-commit on all files / check (push) Successful in 28s
Build all the nodes / vault01 (push) Successful in 1m20s
Build all the nodes / web03 (push) Successful in 1m15s
Build all the nodes / web02 (push) Successful in 1m19s
Build all the nodes / web01 (push) Successful in 1m46s
2024-11-12 12:29:36 +01:00
420fe99984
fix(django-apps/interludes): Don't run in debug mode
All checks were successful
Build all the nodes / rescue01 (push) Successful in 2m49s
Build all the nodes / compute01 (push) Successful in 6m21s
Build all the nodes / bridge01 (push) Successful in 7m45s
Build all the nodes / geo02 (push) Successful in 7m56s
Build all the nodes / storage01 (push) Successful in 5m36s
Build all the nodes / geo01 (push) Successful in 8m38s
Run pre-commit on all files / check (push) Successful in 29s
Build all the nodes / vault01 (push) Successful in 7m35s
Build all the nodes / web02 (push) Successful in 6m24s
Build all the nodes / web01 (push) Successful in 7m21s
Build all the nodes / web03 (push) Successful in 1m14s
2024-11-12 10:52:49 +01:00
32f13adaad
feat(web03): Deploy interludes.webapps.dgnum.eu
Some checks failed
Build all the nodes / bridge01 (push) Successful in 1m25s
Build all the nodes / geo02 (push) Successful in 1m34s
Build all the nodes / rescue01 (push) Successful in 1m34s
Build all the nodes / geo01 (push) Successful in 1m56s
Build all the nodes / compute01 (push) Successful in 2m22s
Build all the nodes / storage01 (push) Successful in 1m43s
Build all the nodes / web01 (push) Successful in 1m50s
Run pre-commit on all files / check (push) Successful in 25s
Build all the nodes / web02 (push) Successful in 1m54s
Build all the nodes / vault01 (push) Successful in 2m20s
Build all the nodes / web03 (push) Failing after 1m39s
2024-11-12 10:40:09 +01:00
a816c81125
chore: Update nix-actions
All checks were successful
Build all the nodes / geo01 (push) Successful in 1m19s
Build all the nodes / geo02 (push) Successful in 1m21s
Build all the nodes / bridge01 (push) Successful in 1m23s
Build all the nodes / rescue01 (push) Successful in 1m25s
Build all the nodes / storage01 (push) Successful in 1m26s
Build all the nodes / compute01 (push) Successful in 1m40s
Run pre-commit on all files / check (push) Successful in 25s
Build all the nodes / web02 (push) Successful in 1m10s
Build all the nodes / web03 (push) Successful in 1m14s
Build all the nodes / vault01 (push) Successful in 1m23s
Build all the nodes / web01 (push) Successful in 1m48s
2024-11-11 22:05:16 +01:00
6ab3e4b685
fix(workflows): Correct typos
All checks were successful
Check workflows / check_workflows (push) Successful in 36s
Build all the nodes / geo01 (push) Successful in 1m35s
Build all the nodes / bridge01 (push) Successful in 1m37s
Build all the nodes / geo02 (push) Successful in 1m36s
Build all the nodes / compute01 (push) Successful in 1m58s
Build all the nodes / rescue01 (push) Successful in 1m26s
Build all the nodes / storage01 (push) Successful in 1m19s
Build all the nodes / vault01 (push) Successful in 1m26s
Build all the nodes / web02 (push) Successful in 1m12s
Run pre-commit on all files / check (push) Successful in 23s
Build all the nodes / web01 (push) Successful in 1m45s
Build all the nodes / web03 (push) Successful in 1m9s
2024-11-11 18:01:44 +01:00
5f1436e4bf
feat(workflows): Switch to a nix-based definition of workflows
Some checks failed
Build all the nodes / bridge01 (push) Failing after 13s
Build all the nodes / compute01 (push) Failing after 13s
Build all the nodes / geo02 (push) Failing after 12s
Build all the nodes / geo01 (push) Failing after 13s
Build all the nodes / rescue01 (push) Failing after 12s
Build all the nodes / storage01 (push) Failing after 12s
Build all the nodes / web01 (push) Failing after 12s
Build all the nodes / web02 (push) Failing after 12s
Build all the nodes / vault01 (push) Failing after 12s
Check workflows / check_workflows (push) Failing after 32s
Build all the nodes / web03 (push) Failing after 12s
Run pre-commit on all files / check (push) Successful in 22s
2024-11-11 17:57:23 +01:00
sinavir
d8f90dd940
fix(patches): Label nextcloud patch
All checks were successful
build configuration / build_and_cache_geo02 (push) Successful in 1m35s
build configuration / build_and_cache_geo01 (push) Successful in 1m37s
build configuration / build_and_cache_rescue01 (push) Successful in 1m39s
build configuration / build_and_cache_storage01 (push) Successful in 1m45s
build configuration / build_and_cache_compute01 (push) Successful in 1m59s
build configuration / build_and_cache_web02 (push) Successful in 1m50s
build configuration / build_and_cache_web03 (push) Successful in 1m46s
build configuration / build_and_cache_bridge01 (push) Successful in 1m38s
build configuration / build_and_cache_vault01 (push) Successful in 2m2s
lint / check (push) Successful in 23s
build configuration / build_and_cache_web01 (push) Successful in 2m25s
2024-11-11 00:57:49 +01:00
89b22a34da
feat(forgejo): Add blocklist for registering e-mails
All checks were successful
build configuration / build_and_cache_rescue01 (push) Successful in 1m44s
build configuration / build_and_cache_geo01 (push) Successful in 1m21s
build configuration / build_and_cache_geo02 (push) Successful in 1m25s
build configuration / build_and_cache_storage01 (push) Successful in 1m56s
build configuration / build_and_cache_compute01 (push) Successful in 2m3s
build configuration / build_and_cache_vault01 (push) Successful in 1m24s
build configuration / build_and_cache_bridge01 (push) Successful in 1m13s
build configuration / build_and_cache_web02 (push) Successful in 1m26s
build configuration / build_and_cache_web03 (push) Successful in 1m29s
lint / check (push) Successful in 25s
build configuration / build_and_cache_web01 (push) Successful in 1m58s
2024-11-10 16:24:51 +01:00
32d28ed351
feat(organization): Added Antoine Groudiev and Matthieu Boyer to organization
All checks were successful
Check meta / check_dns (pull_request) Successful in 20s
Check meta / check_meta (pull_request) Successful in 20s
build configuration / build_and_cache_storage01 (pull_request) Successful in 1m21s
build configuration / build_and_cache_rescue01 (pull_request) Successful in 1m24s
build configuration / build_and_cache_geo01 (pull_request) Successful in 1m7s
build configuration / build_and_cache_geo02 (pull_request) Successful in 1m10s
build configuration / build_and_cache_vault01 (pull_request) Successful in 1m25s
build configuration / build_and_cache_compute01 (pull_request) Successful in 1m53s
lint / check (pull_request) Successful in 24s
build configuration / build_and_cache_bridge01 (pull_request) Successful in 1m3s
build configuration / build_and_cache_web02 (pull_request) Successful in 1m11s
build configuration / build_and_cache_web03 (pull_request) Successful in 1m19s
build configuration / build_and_cache_web01 (pull_request) Successful in 1m53s
Check meta / check_meta (push) Successful in 18s
Check meta / check_dns (push) Successful in 19s
build configuration / build_and_cache_geo01 (push) Successful in 1m16s
build configuration / build_and_cache_storage01 (push) Successful in 1m19s
build configuration / build_and_cache_geo02 (push) Successful in 1m5s
build configuration / build_and_cache_rescue01 (push) Successful in 1m23s
build configuration / build_and_cache_vault01 (push) Successful in 1m19s
build configuration / build_and_cache_compute01 (push) Successful in 1m48s
lint / check (push) Successful in 24s
build configuration / build_and_cache_bridge01 (push) Successful in 1m6s
build configuration / build_and_cache_web02 (push) Successful in 1m12s
build configuration / build_and_cache_web03 (push) Successful in 1m14s
build configuration / build_and_cache_web01 (push) Successful in 1m47s
2024-11-09 22:55:24 +01:00
46657a7f74
fix(tvix-cache): Turn down the log vomit, and increase the limit of file handles
All checks were successful
build configuration / build_and_cache_geo02 (push) Successful in 1m16s
build configuration / build_and_cache_geo01 (push) Successful in 1m16s
build configuration / build_and_cache_storage01 (push) Successful in 1m27s
build configuration / build_and_cache_rescue01 (push) Successful in 1m27s
build configuration / build_and_cache_vault01 (push) Successful in 1m31s
build configuration / build_and_cache_compute01 (push) Successful in 1m47s
lint / check (push) Successful in 24s
build configuration / build_and_cache_bridge01 (push) Successful in 1m10s
build configuration / build_and_cache_web02 (push) Successful in 1m18s
build configuration / build_and_cache_web03 (push) Successful in 1m18s
build configuration / build_and_cache_web01 (push) Successful in 1m52s
2024-11-09 19:11:10 +01:00
0a40fbbda0
Revert "chore(npins): Update"
All checks were successful
build configuration / build_and_cache_geo01 (push) Successful in 1m9s
build configuration / build_and_cache_geo02 (push) Successful in 1m18s
build configuration / build_and_cache_rescue01 (push) Successful in 1m25s
build configuration / build_and_cache_vault01 (push) Successful in 1m29s
build configuration / build_and_cache_storage01 (push) Successful in 1m32s
build configuration / build_and_cache_compute01 (push) Successful in 1m44s
lint / check (push) Successful in 25s
build configuration / build_and_cache_web02 (push) Successful in 1m13s
build configuration / build_and_cache_web03 (push) Successful in 1m11s
build configuration / build_and_cache_bridge01 (push) Successful in 1m8s
build configuration / build_and_cache_web01 (push) Successful in 1m46s
This reverts commit 045554b2e6.
2024-11-09 18:43:56 +01:00
045554b2e6 chore(npins): Update
Some checks failed
Check meta / check_meta (pull_request) Successful in 22s
Check meta / check_dns (pull_request) Successful in 22s
build configuration / build_and_cache_geo01 (pull_request) Successful in 31m14s
build configuration / build_and_cache_rescue01 (pull_request) Successful in 32m8s
lint / check (pull_request) Successful in 33s
Check meta / check_meta (push) Successful in 22s
Check meta / check_dns (push) Successful in 20s
build configuration / build_and_cache_bridge01 (pull_request) Successful in 2m0s
build configuration / build_and_cache_geo02 (pull_request) Successful in 1m3s
build configuration / build_and_cache_web01 (pull_request) Failing after 1m9s
build configuration / build_and_cache_web03 (pull_request) Failing after 1m32s
build configuration / build_and_cache_storage01 (pull_request) Failing after 2m6s
build configuration / build_and_cache_vault01 (pull_request) Failing after 2m15s
build configuration / build_and_cache_geo01 (push) Successful in 1m54s
build configuration / build_and_cache_rescue01 (push) Successful in 2m7s
build configuration / build_and_cache_web02 (pull_request) Failing after 4m44s
build configuration / build_and_cache_compute01 (pull_request) Failing after 4m58s
build configuration / build_and_cache_geo02 (push) Successful in 1m27s
lint / check (push) Successful in 26s
build configuration / build_and_cache_web02 (push) Successful in 1m15s
build configuration / build_and_cache_bridge01 (push) Successful in 1m11s
build configuration / build_and_cache_compute01 (push) Successful in 3m10s
build configuration / build_and_cache_web01 (push) Failing after 1m6s
build configuration / build_and_cache_web03 (push) Failing after 1m15s
build configuration / build_and_cache_storage01 (push) Failing after 1m36s
build configuration / build_and_cache_vault01 (push) Successful in 1m39s
2024-11-09 15:29:23 +00:00
sinavir
2cee8006d3
feat(access-control): Remove luj from admins
All checks were successful
Check meta / check_meta (push) Successful in 21s
Check meta / check_dns (push) Successful in 22s
build configuration / build_and_cache_geo01 (push) Successful in 1m24s
build configuration / build_and_cache_storage01 (push) Successful in 1m27s
build configuration / build_and_cache_geo02 (push) Successful in 1m7s
build configuration / build_and_cache_rescue01 (push) Successful in 1m32s
build configuration / build_and_cache_compute01 (push) Successful in 1m52s
build configuration / build_and_cache_vault01 (push) Successful in 1m32s
lint / check (push) Successful in 25s
build configuration / build_and_cache_web02 (push) Successful in 1m11s
build configuration / build_and_cache_web03 (push) Successful in 1m15s
build configuration / build_and_cache_bridge01 (push) Successful in 1m11s
build configuration / build_and_cache_web01 (push) Successful in 1m50s
2024-11-08 00:18:14 +01:00
sinavir
9e5be2a279
fix(web01): www.lanuit.ens.fr redirection is not used anymore
All checks were successful
build configuration / build_and_cache_geo01 (push) Successful in 1m18s
build configuration / build_and_cache_rescue01 (push) Successful in 1m24s
build configuration / build_and_cache_storage01 (push) Successful in 1m29s
build configuration / build_and_cache_compute01 (push) Successful in 1m42s
build configuration / build_and_cache_geo02 (push) Successful in 1m4s
build configuration / build_and_cache_vault01 (push) Successful in 1m23s
build configuration / build_and_cache_web02 (push) Successful in 1m13s
lint / check (push) Successful in 24s
build configuration / build_and_cache_web01 (push) Successful in 1m53s
build configuration / build_and_cache_web03 (push) Successful in 1m9s
build configuration / build_and_cache_bridge01 (push) Successful in 1m7s
2024-11-04 20:07:33 +01:00
0576d1ecf8
fix(web03): Use a different domain for webhooks endpoint
All checks were successful
Check meta / check_meta (push) Successful in 18s
Check meta / check_dns (push) Successful in 21s
build configuration / build_and_cache_geo01 (push) Successful in 1m7s
build configuration / build_and_cache_storage01 (push) Successful in 1m14s
build configuration / build_and_cache_geo02 (push) Successful in 1m6s
build configuration / build_and_cache_compute01 (push) Successful in 1m33s
build configuration / build_and_cache_vault01 (push) Successful in 1m20s
build configuration / build_and_cache_rescue01 (push) Successful in 1m41s
lint / check (push) Successful in 26s
build configuration / build_and_cache_web02 (push) Successful in 1m14s
build configuration / build_and_cache_bridge01 (push) Successful in 1m8s
build configuration / build_and_cache_web03 (push) Successful in 1m17s
build configuration / build_and_cache_web01 (push) Successful in 1m43s
2024-10-31 10:58:07 +01:00
06bbe99769
feat(meta/dns): Add private subdomain linking to the netbird ips 2024-10-31 10:58:07 +01:00
sinavir
45f2f59055 feat(keys): Add a key for mdebray
All checks were successful
build configuration / build_and_cache_geo02 (push) Successful in 1m21s
build configuration / build_and_cache_geo01 (push) Successful in 1m23s
build configuration / build_and_cache_rescue01 (push) Successful in 1m23s
build configuration / build_and_cache_storage01 (push) Successful in 1m43s
build configuration / build_and_cache_vault01 (push) Successful in 1m48s
build configuration / build_and_cache_compute01 (push) Successful in 1m51s
lint / check (push) Successful in 24s
build configuration / build_and_cache_web02 (push) Successful in 1m10s
build configuration / build_and_cache_web03 (push) Successful in 1m10s
build configuration / build_and_cache_bridge01 (push) Successful in 1m5s
build configuration / build_and_cache_web01 (push) Successful in 1m49s
2024-10-30 23:21:36 +01:00
0e3463102c
feat(metis): Update and validate providers
All checks were successful
build configuration / build_and_cache_geo02 (push) Successful in 1m12s
build configuration / build_and_cache_storage01 (push) Successful in 1m21s
build configuration / build_and_cache_rescue01 (push) Successful in 1m21s
build configuration / build_and_cache_geo01 (push) Successful in 1m24s
build configuration / build_and_cache_compute01 (push) Successful in 1m40s
build configuration / build_and_cache_vault01 (push) Successful in 1m29s
build configuration / build_and_cache_web03 (push) Successful in 1m21s
build configuration / build_and_cache_bridge01 (push) Successful in 1m8s
build configuration / build_and_cache_web02 (push) Successful in 1m30s
lint / check (push) Successful in 23s
build configuration / build_and_cache_web01 (push) Successful in 2m34s
2024-10-24 00:27:15 +02:00
d2f039755b
chore(metis): Update
All checks were successful
build configuration / build_and_cache_rescue01 (push) Successful in 1m14s
build configuration / build_and_cache_geo01 (push) Successful in 1m15s
build configuration / build_and_cache_storage01 (push) Successful in 1m24s
build configuration / build_and_cache_compute01 (push) Successful in 1m39s
build configuration / build_and_cache_geo02 (push) Successful in 1m5s
build configuration / build_and_cache_vault01 (push) Successful in 1m21s
build configuration / build_and_cache_web02 (push) Successful in 1m12s
lint / check (push) Successful in 27s
build configuration / build_and_cache_web03 (push) Successful in 1m9s
build configuration / build_and_cache_bridge01 (push) Successful in 1m3s
build configuration / build_and_cache_web01 (push) Successful in 2m29s
2024-10-23 23:40:21 +02:00
a6aac2b0b4
feat(web03): Deploy www-bocal on bocal.webapps.dgnum.eu
All checks were successful
build configuration / build_and_cache_geo01 (push) Successful in 1m7s
build configuration / build_and_cache_geo02 (push) Successful in 1m15s
build configuration / build_and_cache_storage01 (push) Successful in 1m35s
build configuration / build_and_cache_rescue01 (push) Successful in 1m35s
build configuration / build_and_cache_vault01 (push) Successful in 1m35s
build configuration / build_and_cache_compute01 (push) Successful in 1m41s
lint / check (push) Successful in 24s
build configuration / build_and_cache_web02 (push) Successful in 1m11s
build configuration / build_and_cache_bridge01 (push) Successful in 1m2s
build configuration / build_and_cache_web01 (push) Successful in 1m43s
build configuration / build_and_cache_web03 (push) Successful in 1m32s
2024-10-23 13:56:47 +02:00
ae7aaabf29
feat(meta/network): Add web03 netbirdIp
All checks were successful
Check meta / check_dns (push) Successful in 19s
Check meta / check_meta (push) Successful in 20s
build configuration / build_and_cache_geo01 (push) Successful in 1m21s
build configuration / build_and_cache_rescue01 (push) Successful in 1m26s
build configuration / build_and_cache_geo02 (push) Successful in 1m8s
build configuration / build_and_cache_storage01 (push) Successful in 1m29s
build configuration / build_and_cache_vault01 (push) Successful in 1m23s
build configuration / build_and_cache_compute01 (push) Successful in 1m50s
lint / check (push) Successful in 24s
build configuration / build_and_cache_bridge01 (push) Successful in 1m8s
build configuration / build_and_cache_web02 (push) Successful in 1m13s
build configuration / build_and_cache_web03 (push) Successful in 1m12s
build configuration / build_and_cache_web01 (push) Successful in 1m44s
2024-10-23 11:04:28 +02:00
7ab63fb4a5
fix(netbox-agent): Use the latest version, which returns the correct value to the shell
All checks were successful
build configuration / build_and_cache_geo02 (push) Successful in 1m36s
build configuration / build_and_cache_geo01 (push) Successful in 1m36s
build configuration / build_and_cache_rescue01 (push) Successful in 1m37s
build configuration / build_and_cache_storage01 (push) Successful in 1m41s
build configuration / build_and_cache_compute01 (push) Successful in 1m52s
build configuration / build_and_cache_vault01 (push) Successful in 1m53s
lint / check (push) Successful in 22s
build configuration / build_and_cache_web02 (push) Successful in 1m20s
build configuration / build_and_cache_bridge01 (push) Successful in 1m7s
build configuration / build_and_cache_web03 (push) Successful in 1m20s
build configuration / build_and_cache_web01 (push) Successful in 1m50s
2024-10-23 10:05:25 +02:00
2bb03126cf
feat(web03): Deploy django-wiki on wiki.webapps.dgnum.eu
All checks were successful
Check meta / check_meta (push) Successful in 18s
Check meta / check_dns (push) Successful in 18s
build configuration / build_and_cache_geo01 (push) Successful in 1m20s
build configuration / build_and_cache_geo02 (push) Successful in 1m5s
build configuration / build_and_cache_rescue01 (push) Successful in 1m25s
build configuration / build_and_cache_storage01 (push) Successful in 1m30s
build configuration / build_and_cache_vault01 (push) Successful in 1m24s
build configuration / build_and_cache_compute01 (push) Successful in 1m48s
lint / check (push) Successful in 22s
build configuration / build_and_cache_bridge01 (push) Successful in 1m8s
build configuration / build_and_cache_web02 (push) Successful in 1m18s
build configuration / build_and_cache_web01 (push) Successful in 1m52s
build configuration / build_and_cache_web03 (push) Successful in 24m20s
2024-10-22 14:19:58 +02:00
2b858bbae4
feat(web03): Switch to nixos-unstable 2024-10-22 14:09:54 +02:00
4f18e8d387
feat(meta/dns): Add apps-webhook domain, to separate from the apps
All checks were successful
Check meta / check_meta (push) Successful in 18s
Check meta / check_dns (push) Successful in 18s
build configuration / build_and_cache_storage01 (push) Successful in 1m19s
build configuration / build_and_cache_geo01 (push) Successful in 1m7s
build configuration / build_and_cache_rescue01 (push) Successful in 1m23s
build configuration / build_and_cache_geo02 (push) Successful in 1m7s
build configuration / build_and_cache_compute01 (push) Successful in 1m39s
build configuration / build_and_cache_web03 (push) Successful in 1m14s
build configuration / build_and_cache_vault01 (push) Successful in 1m20s
build configuration / build_and_cache_bridge01 (push) Successful in 1m7s
build configuration / build_and_cache_web02 (push) Successful in 1m24s
lint / check (push) Successful in 24s
build configuration / build_and_cache_web01 (push) Successful in 2m2s
2024-10-22 13:55:30 +02:00
4a102117a4
feat(storage01): Init victoria-metrics DB
All checks were successful
build configuration / build_and_cache_compute01 (push) Successful in 1m33s
build configuration / build_and_cache_storage01 (push) Successful in 1m11s
build configuration / build_and_cache_rescue01 (push) Successful in 1m14s
build configuration / build_and_cache_geo01 (push) Successful in 1m4s
build configuration / build_and_cache_geo02 (push) Successful in 1m1s
build configuration / build_and_cache_vault01 (push) Successful in 1m17s
build configuration / build_and_cache_web01 (push) Successful in 1m42s
build configuration / build_and_cache_web02 (push) Successful in 1m8s
build configuration / build_and_cache_web03 (push) Successful in 1m5s
build configuration / build_and_cache_bridge01 (push) Successful in 1m0s
lint / check (push) Successful in 24s
2024-10-22 13:07:07 +02:00
969f59fbc4
feat(web03): Deploy the new annuaire
All checks were successful
build configuration / build_and_cache_geo01 (push) Successful in 1m13s
build configuration / build_and_cache_geo02 (push) Successful in 1m18s
build configuration / build_and_cache_storage01 (push) Successful in 1m29s
build configuration / build_and_cache_rescue01 (push) Successful in 1m32s
build configuration / build_and_cache_vault01 (push) Successful in 1m34s
build configuration / build_and_cache_compute01 (push) Successful in 1m40s
lint / check (push) Successful in 25s
build configuration / build_and_cache_web02 (push) Successful in 1m13s
build configuration / build_and_cache_bridge01 (push) Successful in 1m6s
build configuration / build_and_cache_web01 (push) Successful in 1m48s
build configuration / build_and_cache_web03 (push) Successful in 1m11s
2024-10-21 20:03:20 +02:00
972b9554b7 feat(netbox-agent): Internalize
All checks were successful
Check meta / check_meta (push) Successful in 18s
Check meta / check_dns (push) Successful in 19s
Check meta / check_dns (pull_request) Successful in 21s
Check meta / check_meta (pull_request) Successful in 21s
build configuration / build_and_cache_geo01 (pull_request) Successful in 1m8s
build configuration / build_and_cache_geo02 (pull_request) Successful in 1m14s
build configuration / build_and_cache_storage01 (pull_request) Successful in 1m21s
build configuration / build_and_cache_compute01 (pull_request) Successful in 1m44s
build configuration / build_and_cache_vault01 (pull_request) Successful in 1m24s
build configuration / build_and_cache_rescue01 (pull_request) Successful in 1m35s
lint / check (pull_request) Successful in 25s
build configuration / build_and_cache_web02 (pull_request) Successful in 1m10s
build configuration / build_and_cache_bridge01 (pull_request) Successful in 1m2s
build configuration / build_and_cache_web03 (pull_request) Successful in 1m13s
build configuration / build_and_cache_web01 (pull_request) Successful in 1m44s
build configuration / build_and_cache_geo01 (push) Successful in 1m9s
build configuration / build_and_cache_geo02 (push) Successful in 1m15s
build configuration / build_and_cache_rescue01 (push) Successful in 1m20s
build configuration / build_and_cache_storage01 (push) Successful in 1m24s
build configuration / build_and_cache_vault01 (push) Successful in 1m28s
build configuration / build_and_cache_compute01 (push) Successful in 1m44s
lint / check (push) Successful in 24s
build configuration / build_and_cache_bridge01 (push) Successful in 1m1s
build configuration / build_and_cache_web02 (push) Successful in 1m11s
build configuration / build_and_cache_web03 (push) Successful in 1m10s
build configuration / build_and_cache_web01 (push) Successful in 1m44s
2024-10-21 19:38:11 +02:00
sinavir
e993d6de34 fix(stateless-uptime-kuma): Typo in domain name
All checks were successful
build configuration / build_and_cache_geo02 (push) Successful in 1m21s
build configuration / build_and_cache_geo01 (push) Successful in 1m24s
build configuration / build_and_cache_storage01 (push) Successful in 1m24s
build configuration / build_and_cache_rescue01 (push) Successful in 1m28s
build configuration / build_and_cache_vault01 (push) Successful in 1m37s
build configuration / build_and_cache_compute01 (push) Successful in 1m46s
lint / check (push) Successful in 24s
build configuration / build_and_cache_web03 (push) Successful in 1m7s
build configuration / build_and_cache_bridge01 (push) Successful in 1m4s
build configuration / build_and_cache_web02 (push) Successful in 1m17s
build configuration / build_and_cache_web01 (push) Successful in 1m47s
2024-10-21 12:25:18 +02:00
sinavir
e0eb7bbf7c fix(stateless-uptime-kuma): Correct probes for ollama and s3-api
Some checks failed
build configuration / build_and_cache_rescue01 (push) Failing after 1m8s
build configuration / build_and_cache_geo01 (push) Successful in 1m9s
build configuration / build_and_cache_geo02 (push) Successful in 1m10s
build configuration / build_and_cache_storage01 (push) Successful in 1m24s
build configuration / build_and_cache_vault01 (push) Successful in 1m26s
build configuration / build_and_cache_compute01 (push) Successful in 1m39s
lint / check (push) Successful in 25s
build configuration / build_and_cache_web02 (push) Successful in 1m16s
build configuration / build_and_cache_bridge01 (push) Successful in 1m6s
build configuration / build_and_cache_web03 (push) Successful in 1m22s
build configuration / build_and_cache_web01 (push) Successful in 1m47s
2024-10-21 12:16:00 +02:00
7875007a4f
feat(meta/dns): Add victoria-metrics
All checks were successful
Check meta / check_meta (push) Successful in 20s
Check meta / check_dns (push) Successful in 20s
build configuration / build_and_cache_geo01 (push) Successful in 1m1s
build configuration / build_and_cache_storage01 (push) Successful in 1m26s
build configuration / build_and_cache_geo02 (push) Successful in 1m6s
build configuration / build_and_cache_rescue01 (push) Successful in 1m35s
build configuration / build_and_cache_compute01 (push) Successful in 1m36s
build configuration / build_and_cache_web02 (push) Successful in 1m11s
build configuration / build_and_cache_bridge01 (push) Successful in 1m4s
build configuration / build_and_cache_web03 (push) Successful in 1m10s
build configuration / build_and_cache_vault01 (push) Successful in 1m24s
lint / check (push) Successful in 23s
build configuration / build_and_cache_web01 (push) Successful in 1m46s
2024-10-21 11:15:31 +02:00
b5fc554f0f
fix(patches): Update commit for netbox-qrcode
All checks were successful
build configuration / build_and_cache_rescue01 (push) Successful in 2m53s
build configuration / build_and_cache_geo01 (push) Successful in 1m1s
build configuration / build_and_cache_storage01 (push) Successful in 4m3s
build configuration / build_and_cache_compute01 (push) Successful in 4m16s
build configuration / build_and_cache_geo02 (push) Successful in 1m3s
build configuration / build_and_cache_vault01 (push) Successful in 1m25s
build configuration / build_and_cache_web02 (push) Successful in 1m9s
build configuration / build_and_cache_web03 (push) Successful in 1m5s
build configuration / build_and_cache_web01 (push) Successful in 2m33s
lint / check (push) Successful in 25s
build configuration / build_and_cache_bridge01 (push) Successful in 1m6s
2024-10-21 09:41:01 +02:00
a93a64d747
feat(forgejo): Send email to admins when new users appear
All checks were successful
build configuration / build_and_cache_storage01 (push) Successful in 1m25s
build configuration / build_and_cache_compute01 (push) Successful in 1m36s
build configuration / build_and_cache_rescue01 (push) Successful in 1m12s
build configuration / build_and_cache_geo01 (push) Successful in 1m3s
build configuration / build_and_cache_geo02 (push) Successful in 1m8s
build configuration / build_and_cache_vault01 (push) Successful in 1m21s
build configuration / build_and_cache_web02 (push) Successful in 1m6s
build configuration / build_and_cache_web01 (push) Successful in 1m46s
build configuration / build_and_cache_web03 (push) Successful in 1m4s
lint / check (push) Successful in 22s
build configuration / build_and_cache_bridge01 (push) Successful in 1m3s
2024-10-21 09:33:40 +02:00
51133e6e5f
feat(netbird): Update
All checks were successful
Check meta / check_meta (push) Successful in 16s
Check meta / check_dns (push) Successful in 16s
build configuration / build_and_cache_compute01 (push) Successful in 2m54s
build configuration / build_and_cache_storage01 (push) Successful in 4m19s
build configuration / build_and_cache_rescue01 (push) Successful in 1m10s
build configuration / build_and_cache_geo01 (push) Successful in 1m3s
build configuration / build_and_cache_geo02 (push) Successful in 1m1s
build configuration / build_and_cache_vault01 (push) Successful in 1m16s
build configuration / build_and_cache_web01 (push) Successful in 1m42s
build configuration / build_and_cache_web02 (push) Successful in 1m7s
build configuration / build_and_cache_web03 (push) Successful in 1m12s
build configuration / build_and_cache_bridge01 (push) Successful in 1m3s
lint / check (push) Successful in 23s
2024-10-20 23:01:14 +02:00
5f0c7d4e22
feat(meta/nodes): Sort nodes 2024-10-20 22:35:35 +02:00
39abf0b62d chore(npins): Update
All checks were successful
Check meta / check_meta (pull_request) Successful in 17s
Check meta / check_dns (pull_request) Successful in 20s
build configuration / build_and_cache_geo02 (pull_request) Successful in 9m21s
build configuration / build_and_cache_vault01 (pull_request) Successful in 1m32s
build configuration / build_and_cache_web01 (pull_request) Successful in 2m19s
build configuration / build_and_cache_web02 (pull_request) Successful in 1m14s
build configuration / build_and_cache_web03 (pull_request) Successful in 1m7s
build configuration / build_and_cache_bridge01 (pull_request) Successful in 1m7s
lint / check (pull_request) Successful in 22s
build configuration / build_and_cache_geo01 (pull_request) Successful in 1m6s
build configuration / build_and_cache_rescue01 (pull_request) Successful in 1m17s
build configuration / build_and_cache_compute01 (pull_request) Successful in 1m40s
build configuration / build_and_cache_rescue01 (push) Successful in 1m10s
build configuration / build_and_cache_geo01 (push) Successful in 1m12s
build configuration / build_and_cache_compute01 (push) Successful in 1m39s
build configuration / build_and_cache_storage01 (push) Successful in 1m41s
build configuration / build_and_cache_geo02 (push) Successful in 1m5s
lint / check (push) Successful in 24s
build configuration / build_and_cache_web02 (push) Successful in 1m14s
build configuration / build_and_cache_vault01 (push) Successful in 1m17s
build configuration / build_and_cache_web03 (push) Successful in 1m5s
build configuration / build_and_cache_bridge01 (push) Successful in 1m2s
build configuration / build_and_cache_web01 (push) Successful in 1m51s
build configuration / build_and_cache_storage01 (pull_request) Successful in 1m11s
2024-10-20 13:29:01 +00:00
63c9f02b16
fix(meta): Use correct username
All checks were successful
lint / check (push) Successful in 23s
Check meta / check_dns (push) Successful in 19s
Check meta / check_meta (push) Successful in 19s
build configuration / build_and_cache_geo01 (push) Successful in 1m9s
build configuration / build_and_cache_rescue01 (push) Successful in 1m20s
build configuration / build_and_cache_storage01 (push) Successful in 1m23s
build configuration / build_and_cache_geo02 (push) Successful in 1m5s
build configuration / build_and_cache_vault01 (push) Successful in 1m15s
build configuration / build_and_cache_compute01 (push) Successful in 1m44s
build configuration / build_and_cache_bridge01 (push) Successful in 1m5s
build configuration / build_and_cache_web03 (push) Successful in 1m12s
build configuration / build_and_cache_web02 (push) Successful in 1m19s
build configuration / build_and_cache_web01 (push) Successful in 1m54s
(Sorry @jemagius)
2024-10-18 18:46:33 +02:00
f0b3d4b490
feat(kanidm): Use kanidm-provision to setup active members
All checks were successful
Check meta / check_meta (pull_request) Successful in 18s
Check meta / check_dns (pull_request) Successful in 17s
build configuration / build_and_cache_storage01 (pull_request) Successful in 1m16s
build configuration / build_and_cache_geo01 (pull_request) Successful in 1m4s
build configuration / build_and_cache_geo02 (pull_request) Successful in 58s
build configuration / build_and_cache_rescue01 (pull_request) Successful in 1m9s
build configuration / build_and_cache_compute01 (pull_request) Successful in 1m38s
build configuration / build_and_cache_web03 (pull_request) Successful in 1m4s
build configuration / build_and_cache_web02 (pull_request) Successful in 1m9s
build configuration / build_and_cache_vault01 (pull_request) Successful in 1m21s
build configuration / build_and_cache_bridge01 (pull_request) Successful in 1m0s
lint / check (pull_request) Successful in 23s
build configuration / build_and_cache_web01 (pull_request) Successful in 1m41s
Check meta / check_meta (push) Successful in 17s
Check meta / check_dns (push) Successful in 18s
build configuration / build_and_cache_rescue01 (push) Successful in 1m15s
build configuration / build_and_cache_storage01 (push) Successful in 1m20s
build configuration / build_and_cache_compute01 (push) Successful in 1m38s
build configuration / build_and_cache_geo01 (push) Successful in 1m8s
build configuration / build_and_cache_geo02 (push) Successful in 1m4s
build configuration / build_and_cache_vault01 (push) Successful in 1m19s
build configuration / build_and_cache_web02 (push) Successful in 1m8s
build configuration / build_and_cache_web03 (push) Successful in 1m3s
lint / check (push) Successful in 23s
build configuration / build_and_cache_web01 (push) Successful in 1m49s
build configuration / build_and_cache_bridge01 (push) Successful in 1m1s
2024-10-18 14:36:38 +02:00
sinavir
e7edf29e11 fix(ollama-proxy): Use ip instead of vpn domain name
All checks were successful
build configuration / build_and_cache_geo01 (push) Successful in 1m15s
build configuration / build_and_cache_geo02 (push) Successful in 1m16s
build configuration / build_and_cache_rescue01 (push) Successful in 1m18s
build configuration / build_and_cache_storage01 (push) Successful in 1m25s
build configuration / build_and_cache_vault01 (push) Successful in 1m33s
build configuration / build_and_cache_compute01 (push) Successful in 1m40s
lint / check (push) Successful in 23s
build configuration / build_and_cache_bridge01 (push) Successful in 1m2s
build configuration / build_and_cache_web03 (push) Successful in 1m10s
build configuration / build_and_cache_web02 (push) Successful in 1m12s
build configuration / build_and_cache_web01 (push) Successful in 1m42s
VPN dns is slow to start so nginx will fail
2024-10-18 12:45:27 +02:00
sinavir
c0435e694d fix(modules/dgn-records): Add enable option
All checks were successful
build configuration / build_and_cache_web02 (push) Successful in 1m9s
lint / check (push) Successful in 23s
build configuration / build_and_cache_web03 (push) Successful in 1m1s
build configuration / build_and_cache_bridge01 (push) Successful in 1m2s
Check meta / check_dns (push) Successful in 19s
Check meta / check_meta (push) Successful in 19s
build configuration / build_and_cache_rescue01 (push) Successful in 1m17s
build configuration / build_and_cache_geo01 (push) Successful in 1m1s
build configuration / build_and_cache_storage01 (push) Successful in 1m21s
build configuration / build_and_cache_geo02 (push) Successful in 1m2s
build configuration / build_and_cache_vault01 (push) Successful in 1m23s
build configuration / build_and_cache_web01 (push) Successful in 1m42s
build configuration / build_and_cache_compute01 (push) Successful in 1m37s
2024-10-18 11:53:32 +02:00
sinavir
1a05ea3a9a feat(krz01): Move to lab-infra repo 2024-10-18 11:53:32 +02:00
113c83bb9c
feat: laptop change and smartphone add to authorized MACs
All checks were successful
Check meta / check_meta (pull_request) Successful in 17s
Check meta / check_dns (pull_request) Successful in 15s
build configuration / build_and_cache_storage01 (pull_request) Successful in 1m14s
build configuration / build_and_cache_geo01 (pull_request) Successful in 1m1s
build configuration / build_and_cache_rescue01 (pull_request) Successful in 1m8s
build configuration / build_and_cache_compute01 (pull_request) Successful in 1m21s
build configuration / build_and_cache_krz01 (pull_request) Successful in 1m25s
build configuration / build_and_cache_geo02 (pull_request) Successful in 59s
lint / check (pull_request) Successful in 22s
build configuration / build_and_cache_bridge01 (pull_request) Successful in 1m0s
build configuration / build_and_cache_web03 (pull_request) Successful in 1m4s
build configuration / build_and_cache_web02 (pull_request) Successful in 1m10s
build configuration / build_and_cache_vault01 (pull_request) Successful in 1m21s
build configuration / build_and_cache_web01 (pull_request) Successful in 1m47s
build configuration / build_and_cache_geo02 (push) Successful in 1m7s
build configuration / build_and_cache_geo01 (push) Successful in 1m10s
build configuration / build_and_cache_rescue01 (push) Successful in 1m19s
build configuration / build_and_cache_storage01 (push) Successful in 1m21s
build configuration / build_and_cache_krz01 (push) Successful in 1m41s
build configuration / build_and_cache_compute01 (push) Successful in 1m44s
lint / check (push) Successful in 24s
build configuration / build_and_cache_web03 (push) Successful in 1m6s
build configuration / build_and_cache_vault01 (push) Successful in 1m21s
build configuration / build_and_cache_web02 (push) Successful in 1m11s
build configuration / build_and_cache_bridge01 (push) Successful in 1m1s
build configuration / build_and_cache_web01 (push) Successful in 1m43s
2024-10-18 11:12:14 +02:00
ac0aaa9228
fix(npins): Update nix-pkgs
All checks were successful
build configuration / build_and_cache_rescue01 (push) Successful in 1m20s
build configuration / build_and_cache_krz01 (push) Successful in 1m42s
build configuration / build_and_cache_compute01 (push) Successful in 1m46s
lint / check (push) Successful in 25s
build configuration / build_and_cache_web03 (push) Successful in 1m10s
build configuration / build_and_cache_web02 (push) Successful in 1m13s
build configuration / build_and_cache_vault01 (push) Successful in 1m23s
build configuration / build_and_cache_bridge01 (push) Successful in 1m3s
build configuration / build_and_cache_web01 (push) Successful in 1m42s
build configuration / build_and_cache_geo02 (push) Successful in 1m10s
build configuration / build_and_cache_geo01 (push) Successful in 1m13s
build configuration / build_and_cache_storage01 (push) Successful in 1m19s
2024-10-18 11:10:47 +02:00
107 changed files with 2522 additions and 3051 deletions

1
.envrc
View file

@ -1 +1,2 @@
watch_file workflows/*
use nix

View file

@ -1,3 +1,16 @@
jobs:
check_dns:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Check the validity of the DNS configuration
run: nix-build meta/verify.nix -A dns
check_meta:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Check the validity of meta options
run: nix-build meta/verify.nix -A meta
name: Check meta
on:
pull_request:
@ -5,21 +18,4 @@ on:
- main
push:
paths:
- 'meta/*'
jobs:
check_meta:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Check the validity of meta options
run: nix-build meta/verify.nix -A meta
check_dns:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Check the validity of the DNS configuration
run: nix-build meta/verify.nix -A dns --no-out-link
- meta/*

View file

@ -0,0 +1,16 @@
jobs:
check_workflows:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Check that the workflows are up to date
run: nix-shell -A check-workflows --run '[ $(git status --porcelain | wc -l)
-eq 0 ]'
name: Check workflows
on:
pull_request:
branches:
- main
push:
paths:
- workflows/*

View file

@ -1,56 +0,0 @@
name: ds-fr update
on:
schedule:
- cron: "26 18 * * wed"
jobs:
npins_update:
runs-on: nix
steps:
- uses: actions/checkout@v3
with:
token: ${{ secrets.TEA_DGNUM_CHORES_TOKEN }}
- name: Update DS and open PR if necessary
run: |
# Fetch the latest release tag
VERSION=$(curl -L \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/demarches-simplifiees/demarches-simplifiees.fr/releases/latest \
| jq -r '.tag_name')
# Move to the ds-fr directory
cd machines/compute01/ds-fr/package
# Run the update script
./update.sh -v "$VERSION"
if [ ! -z "$(git diff --name-only)" ]; then
echo "[+] Changes detected, pushing updates."
git switch -C ds-update
git add .
git config user.name "DGNum Chores"
git config user.email "tech@dgnum.eu"
git commit --message "chore(ds-fr): Update"
git push --set-upstream origin ds-update --force
# Connect to the server with the cli
tea login add \
-n dgnum-chores \
-t '${{ secrets.TEA_DGNUM_CHORES_TOKEN }}' \
-u https://git.dgnum.eu
# Create a pull request if needed
# i.e. no PR with the same title exists
if [ -z "$(tea pr ls -f='title,author' -o simple | grep 'chore(ds-fr): Update dgnum-chores')" ]; then
tea pr create \
--description "Automatic ds-fr update" \
--title "chore(ds-fr): Update" \
--head ds-update
fi
fi

View file

@ -0,0 +1,119 @@
jobs:
bridge01:
runs-on: nix
steps:
- uses: actions/checkout@v3
- env:
BUILD_NODE: bridge01
STORE_ENDPOINT: https://tvix-store.dgnum.eu/infra-signing/
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
STORE_USER: admin
name: Build and cache bridge01
run: nix-shell -A eval-nodes --run cache-node
compute01:
runs-on: nix
steps:
- uses: actions/checkout@v3
- env:
BUILD_NODE: compute01
STORE_ENDPOINT: https://tvix-store.dgnum.eu/infra-signing/
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
STORE_USER: admin
name: Build and cache compute01
run: nix-shell -A eval-nodes --run cache-node
geo01:
runs-on: nix
steps:
- uses: actions/checkout@v3
- env:
BUILD_NODE: geo01
STORE_ENDPOINT: https://tvix-store.dgnum.eu/infra-signing/
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
STORE_USER: admin
name: Build and cache geo01
run: nix-shell -A eval-nodes --run cache-node
geo02:
runs-on: nix
steps:
- uses: actions/checkout@v3
- env:
BUILD_NODE: geo02
STORE_ENDPOINT: https://tvix-store.dgnum.eu/infra-signing/
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
STORE_USER: admin
name: Build and cache geo02
run: nix-shell -A eval-nodes --run cache-node
rescue01:
runs-on: nix
steps:
- uses: actions/checkout@v3
- env:
BUILD_NODE: rescue01
STORE_ENDPOINT: https://tvix-store.dgnum.eu/infra-signing/
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
STORE_USER: admin
name: Build and cache rescue01
run: nix-shell -A eval-nodes --run cache-node
storage01:
runs-on: nix
steps:
- uses: actions/checkout@v3
- env:
BUILD_NODE: storage01
STORE_ENDPOINT: https://tvix-store.dgnum.eu/infra-signing/
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
STORE_USER: admin
name: Build and cache storage01
run: nix-shell -A eval-nodes --run cache-node
vault01:
runs-on: nix
steps:
- uses: actions/checkout@v3
- env:
BUILD_NODE: vault01
STORE_ENDPOINT: https://tvix-store.dgnum.eu/infra-signing/
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
STORE_USER: admin
name: Build and cache vault01
run: nix-shell -A eval-nodes --run cache-node
web01:
runs-on: nix
steps:
- uses: actions/checkout@v3
- env:
BUILD_NODE: web01
STORE_ENDPOINT: https://tvix-store.dgnum.eu/infra-signing/
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
STORE_USER: admin
name: Build and cache web01
run: nix-shell -A eval-nodes --run cache-node
web02:
runs-on: nix
steps:
- uses: actions/checkout@v3
- env:
BUILD_NODE: web02
STORE_ENDPOINT: https://tvix-store.dgnum.eu/infra-signing/
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
STORE_USER: admin
name: Build and cache web02
run: nix-shell -A eval-nodes --run cache-node
web03:
runs-on: nix
steps:
- uses: actions/checkout@v3
- env:
BUILD_NODE: web03
STORE_ENDPOINT: https://tvix-store.dgnum.eu/infra-signing/
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
STORE_USER: admin
name: Build and cache web03
run: nix-shell -A eval-nodes --run cache-node
name: Build all the nodes
on:
pull_request:
branches:
- main
push:
branches:
- main

View file

@ -1,219 +0,0 @@
name: build configuration
on:
pull_request:
types: [opened, synchronize, edited, reopened]
branches:
- main
push:
branches:
- main
jobs:
build_and_cache_krz01:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Build and cache the node
run: nix-shell --run cache-node
env:
STORE_ENDPOINT: "https://tvix-store.dgnum.eu/infra-signing/"
STORE_USER: "admin"
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
BUILD_NODE: "krz01"
- uses: actions/upload-artifact@v3
if: always()
with:
name: outputs_krz01
path: paths.txt
build_and_cache_compute01:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Build and cache the node
run: nix-shell --run cache-node
env:
STORE_ENDPOINT: "https://tvix-store.dgnum.eu/infra-signing/"
STORE_USER: "admin"
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
BUILD_NODE: "compute01"
- uses: actions/upload-artifact@v3
if: always()
with:
name: outputs_compute01
path: paths.txt
build_and_cache_storage01:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Build and cache the node
run: nix-shell --run cache-node
env:
STORE_ENDPOINT: "https://tvix-store.dgnum.eu/infra-signing/"
STORE_USER: "admin"
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
BUILD_NODE: "storage01"
- uses: actions/upload-artifact@v3
if: always()
with:
name: outputs_storage01
path: paths.txt
build_and_cache_rescue01:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Build and cache the node
run: nix-shell --run cache-node
env:
STORE_ENDPOINT: "https://tvix-store.dgnum.eu/infra-signing/"
STORE_USER: "admin"
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
BUILD_NODE: "rescue01"
- uses: actions/upload-artifact@v3
if: always()
with:
name: outputs_rescue01
path: paths.txt
build_and_cache_geo01:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Build and cache the node
run: nix-shell --run cache-node
env:
STORE_ENDPOINT: "https://tvix-store.dgnum.eu/infra-signing/"
STORE_USER: "admin"
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
BUILD_NODE: "geo01"
- uses: actions/upload-artifact@v3
if: always()
with:
name: outputs_geo01
path: paths.txt
build_and_cache_geo02:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Build and cache the node
run: nix-shell --run cache-node
env:
STORE_ENDPOINT: "https://tvix-store.dgnum.eu/infra-signing/"
STORE_USER: "admin"
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
BUILD_NODE: "geo02"
- uses: actions/upload-artifact@v3
if: always()
with:
name: outputs_geo02
path: paths.txt
build_and_cache_vault01:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Build and cache the node
run: nix-shell --run cache-node
env:
STORE_ENDPOINT: "https://tvix-store.dgnum.eu/infra-signing/"
STORE_USER: "admin"
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
BUILD_NODE: "vault01"
- uses: actions/upload-artifact@v3
if: always()
with:
name: outputs_vault01
path: paths.txt
build_and_cache_web01:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Build and cache the node
run: nix-shell --run cache-node
env:
STORE_ENDPOINT: "https://tvix-store.dgnum.eu/infra-signing/"
STORE_USER: "admin"
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
BUILD_NODE: "web01"
- uses: actions/upload-artifact@v3
if: always()
with:
name: outputs_web01
path: paths.txt
build_and_cache_web02:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Build and cache the node
run: nix-shell --run cache-node
env:
STORE_ENDPOINT: "https://tvix-store.dgnum.eu/infra-signing/"
STORE_USER: "admin"
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
BUILD_NODE: "web02"
- uses: actions/upload-artifact@v3
if: always()
with:
name: outputs_web02
path: paths.txt
build_and_cache_web03:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Build and cache the node
run: nix-shell --run cache-node
env:
STORE_ENDPOINT: "https://tvix-store.dgnum.eu/infra-signing/"
STORE_USER: "admin"
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
BUILD_NODE: "web03"
- uses: actions/upload-artifact@v3
if: always()
with:
name: outputs_web02
path: paths.txt
build_and_cache_bridge01:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Build and cache the node
run: nix-shell --run cache-node
env:
STORE_ENDPOINT: "https://tvix-store.dgnum.eu/infra-signing/"
STORE_USER: "admin"
STORE_PASSWORD: ${{ secrets.STORE_PASSWORD }}
BUILD_NODE: "bridge01"
- uses: actions/upload-artifact@v3
if: always()
with:
name: outputs_web02
path: paths.txt

View file

@ -1,11 +0,0 @@
name: lint
on: [push, pull_request]
jobs:
check:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Run pre-commit on all files
run: nix-shell --run 'pre-commit run --all-files --hook-stage pre-push --show-diff-on-failure' -A shells.pre-commit ./.

View file

@ -0,0 +1,25 @@
jobs:
npins_update:
runs-on: nix
steps:
- uses: actions/checkout@v3
with:
depth: 0
token: ${{ secrets.TEA_DGNUM_CHORES_TOKEN }}
- name: Update dependencies and open PR if necessary
run: "npins update\n\nif [ ! -z \"$(git diff --name-only)\" ]; then\n echo
\"[+] Changes detected, pushing updates.\"\n\n git switch -C npins-update\n\
\n git add npins\n\n git config user.name \"DGNum Chores\"\n git config
user.email \"tech@dgnum.eu\"\n\n git commit --message \"chore(npins): Update\"\
\n git push --set-upstream origin npins-update --force\n\n # Connect to
the server with the cli\n tea login add \\\n -n dgnum-chores \\\n -t
\"${{ secrets.TEA_DGNUM_CHORES_TOKEN }}\" \\\n -u https://git.dgnum.eu\n\
\n # Create a pull request if needed\n # i.e. no PR with the same title
exists\n if [ -z \"$(tea pr ls -f='title,author' -o simple | grep 'chore(npins):
Update dgnum-chores')\" ]; then\n tea pr create \\\n --description
\"Automatic npins update\" \\\n --title \"chore(npins): Update\" \\\n\
\ --head npins-update\n fi\nfi\n"
name: npins update
on:
schedule:
- cron: 25 15 * * *

View file

@ -0,0 +1,12 @@
jobs:
check:
runs-on: nix
steps:
- uses: actions/checkout@v3
- name: Run pre-commit on all files
run: nix-shell -A pre-commit --run 'pre-commit run --all-files --hook-stage
pre-push --show-diff-on-failure'
name: Run pre-commit on all files
on:
- push
- pull_request

View file

@ -41,7 +41,15 @@
}:
let
git-checks = (import (builtins.storePath sources.git-hooks)).run {
inherit (pkgs.lib)
isFunction
mapAttrs
mapAttrs'
nameValuePair
removeSuffix
;
git-checks = (import sources.git-hooks).run {
src = ./.;
hooks = {
@ -67,6 +75,22 @@ let
commitizen.enable = true;
};
};
workflows = (import sources.nix-actions { inherit pkgs; }).install {
src = ./.;
workflows = mapAttrs' (
name: _:
nameValuePair (removeSuffix ".nix" name) (
let
w = import ./workflows/${name};
in
if isFunction w then w { inherit (pkgs) lib; } else w
)
) (builtins.readDir ./workflows);
};
scripts = import ./scripts { inherit pkgs; };
in
{
@ -78,36 +102,35 @@ in
mkCacheSettings = import ./machines/storage01/tvix-cache/cache-settings.nix;
shells = {
default = pkgs.mkShell {
devShell = pkgs.mkShell {
name = "dgnum-infra";
packages = [
(pkgs.nixos-generators.overrideAttrs (_: {
version = "1.8.0-unstable";
src = builtins.storePath sources.nixos-generators;
src = sources.nixos-generators;
}))
pkgs.npins
(pkgs.callPackage ./lib/colmena { inherit (nix-pkgs) colmena; })
(pkgs.callPackage "${sources.agenix}/pkgs/agenix.nix" { })
(pkgs.callPackage "${sources.lon}/nix/packages/lon.nix" { })
] ++ (import ./scripts { inherit pkgs; });
] ++ (builtins.attrValues scripts);
shellHook = ''
${git-checks.shellHook}
${workflows.shellHook}
'';
preferLocalBuild = true;
};
pre-commit = pkgs.mkShell {
name = "pre-commit-shell";
###
# Alternative shells
shellHook = ''
${git-checks.shellHook}
'';
passthru = mapAttrs (name: value: pkgs.mkShell (value // { inherit name; })) {
pre-commit.shellHook = git-checks.shellHook;
check-workflows.shellHook = workflows.shellHook;
eval-nodes.packages = [ scripts.cache-node ];
};
};
}

View file

@ -64,23 +64,12 @@ in
};
defaults =
{
pkgs,
name,
nodeMeta,
...
}:
{ name, nodeMeta, ... }:
{
# Import the default modules
imports = [
./modules
(import "${sources.lix-module}/module.nix" {
lix = pkgs.applyPatches {
name = "lix-2.90.patched";
src = sources.lix;
patches = [ ./patches/00-disable-installChecks-lix.patch ];
};
})
(import "${sources.lix-module}/module.nix" { inherit (sources) lix; })
];
# Include default secrets

View file

@ -17,7 +17,6 @@ rec {
compute01 = [ "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIE/YluSVS+4h3oV8CIUj0OmquyJXju8aEQy0Jz210vTu" ];
geo01 = [ "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIEl6Pubbau+usQkemymoSKrTBbrX8JU5m5qpZbhNx8p4" ];
geo02 = [ "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFNXaCS0/Nsu5npqQk1TP6wMHCVIOaj4pblp2tIg6Ket" ];
krz01 = [ "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIP4o65gWOgNrxbSd3kiQIGZUM+YD6kuZOQtblvzUGsfB" ];
rescue01 = [ "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIEJa02Annu8o7ggPjTH/9ttotdNGyghlWfU9E8pnuLUf" ];
storage01 = [ "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIA0s+rPcEcfWCqZ4B2oJiWT/60awOI8ijL1rtDM2glXZ" ];
vault01 = [ "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAJA6VA7LENvTRlKdcrqt8DxDOPvX3bg3Gjy9mNkdFEW" ];
@ -26,6 +25,9 @@ rec {
web03 = [ "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAICrWsMEfK86iaO9SubMqE2UvZNtHkLY5VUod/bbqKC0L" ];
# SSH keys of the DGNum members
agroudiev = [
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDgyt3ntpcoI/I2n97R1hzjBiNL6R98S73fSi7pkSE/8mQbI8r9GzsPUBcxQ+tIg0FgwkLxTwF8DwLf0E+Le/rPznxBS5LUQaAktSQSrxz/IIID1+jN8b03vf5PjfKS8H2Tu3Q8jZXa8HNsj3cpySpGMqGrE3ieUmknd/YfppRRf+wM4CsGKZeS3ZhB9oZi3Jn22A0U/17AOJTnv4seq+mRZWRQt3pvQvpp8/2M7kEqizie/gTr/DnwxUr45wisqYYH4tat9Cw6iDr7LK10VCrK37BfFagMIZ08Hkh3c46jghjYNQWe+mBUWJByWYhTJ0AtYrbaYeUV1HVYbsRJ6bNx25K6794QQPaE/vc2Z/VK/ILgvJ+9myFSAWVylCWdyYpwUu07RH/jDBl2aqH62ESwAG7SDUUcte6h9N+EryAQLWc8OhsGAYLpshhBpiqZwzX90m+nkbhx1SqMbtt6TS+RPDEHKFYn8E6FBrf1FK34482ndq/hHXZ88mqzGb1nOnM="
];
catvayor = [
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAA16foz+XzwKwyIR4wFgNIAE3Y7AfXyEsUZFVVz8Rie catvayor@katvayor"
];
@ -45,8 +47,10 @@ rec {
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDMBW7rTtfZL9wtrpCVgariKdpN60/VeAzXkh9w3MwbO julien@enigma"
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGa+7n7kNzb86pTqaMn554KiPrkHRGeTJ0asY1NjSbpr julien@tower"
];
mboyer = [ "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIGYnwZaFYvUxtJeNvpaA20rLfq8fOO4dFp7cIXsD8YNx" ];
mdebray = [
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIEpwF+XD3HgX64kqD42pcEZRNYAWoO4YNiOm5KO4tH6o maurice@polaris"
"ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIFdDnSl3cyWil+S5JiyGqOvBR3wVh+lduw58S5WvraoL maurice@fekda"
];
raito = [
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDcEkYM1r8QVNM/G5CxJInEdoBCWjEHHDdHlzDYNSUIdHHsn04QY+XI67AdMCm8w30GZnLUIj5RiJEWXREUApby0GrfxGGcy8otforygfgtmuUKAUEHdU2MMwrQI7RtTZ8oQ0USRGuqvmegxz3l5caVU7qGvBllJ4NUHXrkZSja2/51vq80RF4MKkDGiz7xUTixI2UcBwQBCA/kQedKV9G28EH+1XfvePqmMivZjl+7VyHsgUVj9eRGA1XWFw59UPZG8a7VkxO/Eb3K9NF297HUAcFMcbY6cPFi9AaBgu3VC4eetDnoN/+xT1owiHi7BReQhGAy/6cdf7C/my5ehZwD"

View file

@ -190,8 +190,11 @@ rec {
recursiveFuse [
(enableModules enabledModules)
{ imports = mkImports root ([ "_hardware-configuration" ] ++ enabledServices); }
{
imports =
(extraConfig.imports or [ ]) ++ (mkImports root ([ "_hardware-configuration" ] ++ enabledServices));
}
extraConfig
(removeAttrs extraConfig [ "imports" ])
];
}

View file

@ -1,7 +1,14 @@
{ config, ... }:
{
config,
pkgs,
sources,
...
}:
let
host = "demarches.dgnum.eu";
dgn-id = "fca8f72cd60c00e74d7735ec13e4e3a22e8e1244";
in
{
imports = [ ./module.nix ];
@ -11,6 +18,18 @@ in
services.demarches-simplifiees = {
enable = true;
package =
((import sources.nix-pkgs { inherit pkgs; }).demarches-simplifiees.override {
initialDeploymentDate = "20230923";
}).overrideAttrs
(old: {
dsModules = old.dsModules.overrideAttrs {
prePatch = ''
${pkgs.lib.getExe pkgs.git} apply -p1 < ${builtins.fetchurl "https://git.dgnum.eu/DGNum/demarches-normaliennes/commit/${dgn-id}.patch"}
'';
};
});
secretFile = config.age.secrets."ds-fr-secret_file".path;
initialDeploymentDate = "20230923";

View file

@ -1,12 +1,19 @@
{
config,
lib,
nixpkgs,
meta,
...
}:
let
inherit (lib) escapeRegex concatStringsSep;
inherit (lib)
attrValues
catAttrs
escapeRegex
concatStringsSep
mapAttrs'
nameValuePair
;
domain = "sso.dgnum.eu";
port = 8443;
@ -28,12 +35,14 @@ let
"netbird-beta.hubrecht.ovh"
]
);
usernameFor = member: meta.organization.members.${member}.username;
in
{
services.kanidm = {
enableServer = true;
package = nixpkgs.unstable.kanidm;
# package = nixpkgs.unstable.kanidm;
serverSettings = {
inherit domain;
@ -48,6 +57,107 @@ in
tls_chain = "${cert.directory}/fullchain.pem";
tls_key = "${cert.directory}/key.pem";
};
provision = {
enable = true;
persons = mapAttrs' (
_:
{
email,
name,
username,
...
}:
nameValuePair username {
displayName = name;
mailAddresses = [ email ];
}
) meta.organization.members;
groups =
{
grp_active.members = catAttrs "username" (attrValues meta.organization.members);
}
// (mapAttrs' (
name: members: nameValuePair "grp_${name}" { members = builtins.map usernameFor members; }
) meta.organization.groups);
# INFO: The authentication resources declared here can only be for internal services,
# as regular members cannot be statically known.
systems.oauth2 = {
dgn_grafana = {
displayName = "Grafana [Analysis]";
originLanding = "https://grafana.dgnum.eu";
originUrl = "https://grafana.dgnum.eu/";
preferShortUsername = true;
scopeMaps.grp_active = [
"openid"
"profile"
"email"
];
};
dgn_librenms = {
allowInsecureClientDisablePkce = true;
displayName = "LibreNMS [Network]";
enableLegacyCrypto = true;
originLanding = "https://nms.dgnum.eu";
originUrl = "https://nms.dgnum.eu/";
preferShortUsername = true;
scopeMaps.grp_active = [
"openid"
"profile"
"email"
];
};
dgn_netbird = {
displayName = "Netbird [VPN]";
enableLocalhostRedirects = true;
originLanding = "https://netbird.dgnum.eu";
originUrl = "https://netbird.dgnum.eu/";
preferShortUsername = true;
public = true;
scopeMaps.grp_active = [
"openid"
"profile"
"email"
];
};
dgn_netbox = {
allowInsecureClientDisablePkce = true;
displayName = "Netbox [Inventory]";
enableLegacyCrypto = true;
originLanding = "https://netbox.dgnum.eu";
originUrl = "https://netbox.dgnum.eu/";
preferShortUsername = true;
scopeMaps.grp_active = [
"openid"
"profile"
"email"
];
};
dgn_outline = {
displayName = "Outline [Docs]";
originUrl = "https://docs.dgnum.eu/";
originLanding = "https://docs.dgnum.eu";
preferShortUsername = true;
scopeMaps.grp_active = [
"openid"
"profile"
"email"
];
};
};
};
};
users.users.kanidm.extraGroups = [ cert.group ];

View file

@ -1,16 +1,11 @@
{
pkgs,
nodes,
meta,
...
}:
{ pkgs, ... }:
{
services.nginx = {
virtualHosts."ollama01.beta.dgnum.eu" = {
enableACME = true;
forceSSL = true;
locations."/" = {
proxyPass = "http://${meta.network.krz01.netbirdIp}:${toString nodes.krz01.config.services.ollama.port}";
proxyPass = "http://100.80.103.206:11434";
basicAuthFile = pkgs.writeText "ollama-htpasswd" ''
raito:$y$j9T$UDEHpLtM52hRGK0I4qT6M0$N75AhENLqgtJnTGaPzq51imhjZvuPr.ow81Co1ZTcX2
'';

View file

@ -10,7 +10,7 @@ let
# - push to a new branch dgn-v0.A.B where A.B is the new version
# - finally, update the commit hash of the customization patch
dgn-id = "8f19cb1c9623f8da71f6512c1528d83acc35db57";
dgn-id = "d73e347b1cefe23092bfcb2d3f8a23903410203e";
port = 8084;
in

View file

@ -1,41 +0,0 @@
{ config, lib, ... }:
lib.extra.mkConfig {
enabledModules = [
# INFO: This list needs to stay sorted alphabetically
];
enabledServices = [
# INFO: This list needs to stay sorted alphabetically
# Machine learning API machine
# "microvm-ml01"
# "microvm-router01"
"nvidia-tesla-k80"
"ollama"
"whisper"
"proxmox"
"networking"
];
extraConfig = {
microvm = {
host.enable = true;
};
dgn-hardware = {
useZfs = true;
zfsPools = [
"dpool"
"ppool0"
];
};
# We are going to use CUDA here.
nixpkgs.config.cudaSupport = true;
hardware.graphics.enable = true;
services.netbird.enable = true;
networking.firewall.trustedInterfaces = [ "wt0" ];
};
root = ./.;
}

View file

@ -1,50 +0,0 @@
{
config,
lib,
modulesPath,
...
}:
{
imports = [ (modulesPath + "/installer/scan/not-detected.nix") ];
boot = {
initrd = {
availableKernelModules = [
"ehci_pci"
"ahci"
"mpt3sas"
"usbhid"
"sd_mod"
];
kernelModules = [ ];
};
kernelModules = [ "kvm-intel" ];
extraModulePackages = [ ];
};
fileSystems."/" = {
device = "/dev/disk/by-uuid/92bf4d66-2693-4eca-9b26-f86ae09d468d";
fsType = "ext4";
};
boot.initrd.luks.devices."mainfs" = {
device = "/dev/disk/by-uuid/26f9737b-28aa-4c3f-bd3b-b028283cef88";
keyFileSize = 1;
keyFile = "/dev/zero";
};
fileSystems."/boot" = {
device = "/dev/disk/by-uuid/280C-8844";
fsType = "vfat";
options = [
"fmask=0022"
"dmask=0022"
];
};
swapDevices = [ ];
nixpkgs.hostPlatform = lib.mkDefault "x86_64-linux";
hardware.cpu.intel.updateMicrocode = lib.mkDefault config.hardware.enableRedistributableFirmware;
}

View file

@ -1,22 +0,0 @@
_: {
microvm.autostart = [ "ml01" ];
microvm.vms.ml01 = {
config = {
networking.hostName = "ml01";
microvm = {
hypervisor = "cloud-hypervisor";
vcpu = 4;
mem = 4096;
balloonMem = 2048;
shares = [
{
source = "/nix/store";
mountPoint = "/nix/.ro-store";
tag = "ro-store";
proto = "virtiofs";
}
];
};
};
};
}

View file

@ -1,16 +0,0 @@
_: {
microvm.autostart = [ "router01" ];
microvm.vms.router01 = {
config = {
networking.hostName = "router01";
microvm.shares = [
{
source = "/nix/store";
mountPoint = "/nix/.ro-store";
tag = "ro-store";
proto = "virtiofs";
}
];
};
};
}

View file

@ -1,22 +0,0 @@
{
systemd.networknetworks = {
"10-eno1" = {
matchConfig.Name = [ "eno1" ];
networkConfig = {
Bridge = "vmbr0";
};
};
"10-vmbr0" = {
matchConfig.Name = "vmbr0";
linkConfig.RequiredForOnline = "routable";
};
};
systemd.network.netdevs."vmbr0" = {
netdevConfig = {
Name = "vmbr0";
Kind = "bridge";
};
};
}

View file

@ -1,8 +0,0 @@
{ config, ... }:
{
nixpkgs.config.nvidia.acceptLicense = true;
# Tesla K80 is not supported by the latest driver.
hardware.nvidia.package = config.boot.kernelPackages.nvidia_x11_legacy470;
# Don't ask.
services.xserver.videoDrivers = [ "nvidia" ];
}

View file

@ -1,179 +0,0 @@
From 2abd226ff3093c5a9e18a618fba466853e7ebaf7 Mon Sep 17 00:00:00 2001
From: Raito Bezarius <masterancpp@gmail.com>
Date: Tue, 8 Oct 2024 18:27:41 +0200
Subject: [PATCH] K80 support
Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
---
docs/development.md | 6 +++-
docs/gpu.md | 1 +
gpu/amd_linux.go | 6 +++-
gpu/gpu.go | 63 ++++++++++++++++++++++++++++++++++++-----
scripts/build_docker.sh | 2 +-
scripts/build_linux.sh | 2 +-
6 files changed, 69 insertions(+), 11 deletions(-)
diff --git a/docs/development.md b/docs/development.md
index 2f7b9ecf..9da35931 100644
--- a/docs/development.md
+++ b/docs/development.md
@@ -51,7 +51,11 @@ Typically the build scripts will auto-detect CUDA, however, if your Linux distro
or installation approach uses unusual paths, you can specify the location by
specifying an environment variable `CUDA_LIB_DIR` to the location of the shared
libraries, and `CUDACXX` to the location of the nvcc compiler. You can customize
-a set of target CUDA architectures by setting `CMAKE_CUDA_ARCHITECTURES` (e.g. "50;60;70")
+a set of target CUDA architectures by setting `CMAKE_CUDA_ARCHITECTURES` (e.g. "35;37;50;60;70")
+
+To support GPUs older than Compute Capability 5.0, you will need to use an older version of
+the Driver from [Unix Driver Archive](https://www.nvidia.com/en-us/drivers/unix/) (tested with 470) and [CUDA Toolkit Archive](https://developer.nvidia.com/cuda-toolkit-archive) (tested with cuda V11). When you build Ollama, you will need to set two environment variable to adjust the minimum compute capability Ollama supports via `export GOFLAGS="'-ldflags=-w -s \"-X=github.com/ollama/ollama/gpu.CudaComputeMajorMin=3\" \"-X=github.com/ollama/ollama/gpu.CudaComputeMinorMin=5\"'"` and the `CMAKE_CUDA_ARCHITECTURES`. To find the Compute Capability of your older GPU, refer to [GPU Compute Capability](https://developer.nvidia.com/cuda-gpus).
+
Then generate dependencies:
diff --git a/docs/gpu.md b/docs/gpu.md
index a6b559f0..66627611 100644
--- a/docs/gpu.md
+++ b/docs/gpu.md
@@ -28,6 +28,7 @@ Check your compute compatibility to see if your card is supported:
| 5.0 | GeForce GTX | `GTX 750 Ti` `GTX 750` `NVS 810` |
| | Quadro | `K2200` `K1200` `K620` `M1200` `M520` `M5000M` `M4000M` `M3000M` `M2000M` `M1000M` `K620M` `M600M` `M500M` |
+For building locally to support older GPUs, see [developer.md](./development.md#linux-cuda-nvidia)
### GPU Selection
diff --git a/gpu/amd_linux.go b/gpu/amd_linux.go
index 6b08ac2e..768fb97a 100644
--- a/gpu/amd_linux.go
+++ b/gpu/amd_linux.go
@@ -159,7 +159,11 @@ func AMDGetGPUInfo() []GpuInfo {
return []GpuInfo{}
}
- if int(major) < RocmComputeMin {
+ minVer, err := strconv.Atoi(RocmComputeMajorMin)
+ if err != nil {
+ slog.Error("invalid RocmComputeMajorMin setting", "value", RocmComputeMajorMin, "error", err)
+ }
+ if int(major) < minVer {
slog.Warn(fmt.Sprintf("amdgpu too old gfx%d%x%x", major, minor, patch), "gpu", gpuID)
continue
}
diff --git a/gpu/gpu.go b/gpu/gpu.go
index 781e23df..60d68c33 100644
--- a/gpu/gpu.go
+++ b/gpu/gpu.go
@@ -16,6 +16,7 @@ import (
"os"
"path/filepath"
"runtime"
+ "strconv"
"strings"
"sync"
"unsafe"
@@ -38,9 +39,11 @@ const (
var gpuMutex sync.Mutex
// With our current CUDA compile flags, older than 5.0 will not work properly
-var CudaComputeMin = [2]C.int{5, 0}
+// (string values used to allow ldflags overrides at build time)
+var CudaComputeMajorMin = "5"
+var CudaComputeMinorMin = "0"
-var RocmComputeMin = 9
+var RocmComputeMajorMin = "9"
// TODO find a better way to detect iGPU instead of minimum memory
const IGPUMemLimit = 1 * format.GibiByte // 512G is what they typically report, so anything less than 1G must be iGPU
@@ -175,11 +178,57 @@ func GetGPUInfo() GpuInfoList {
var memInfo C.mem_info_t
resp := []GpuInfo{}
- // NVIDIA first
- for i := 0; i < gpuHandles.deviceCount; i++ {
- // TODO once we support CPU compilation variants of GPU libraries refine this...
- if cpuVariant == "" && runtime.GOARCH == "amd64" {
- continue
+ // Load ALL libraries
+ cHandles = initCudaHandles()
+ minMajorVer, err := strconv.Atoi(CudaComputeMajorMin)
+ if err != nil {
+ slog.Error("invalid CudaComputeMajorMin setting", "value", CudaComputeMajorMin, "error", err)
+ }
+ minMinorVer, err := strconv.Atoi(CudaComputeMinorMin)
+ if err != nil {
+ slog.Error("invalid CudaComputeMinorMin setting", "value", CudaComputeMinorMin, "error", err)
+ }
+
+ // NVIDIA
+ for i := range cHandles.deviceCount {
+ if cHandles.cudart != nil || cHandles.nvcuda != nil {
+ gpuInfo := CudaGPUInfo{
+ GpuInfo: GpuInfo{
+ Library: "cuda",
+ },
+ index: i,
+ }
+ var driverMajor int
+ var driverMinor int
+ if cHandles.cudart != nil {
+ C.cudart_bootstrap(*cHandles.cudart, C.int(i), &memInfo)
+ } else {
+ C.nvcuda_bootstrap(*cHandles.nvcuda, C.int(i), &memInfo)
+ driverMajor = int(cHandles.nvcuda.driver_major)
+ driverMinor = int(cHandles.nvcuda.driver_minor)
+ }
+ if memInfo.err != nil {
+ slog.Info("error looking up nvidia GPU memory", "error", C.GoString(memInfo.err))
+ C.free(unsafe.Pointer(memInfo.err))
+ continue
+ }
+
+ if int(memInfo.major) < minMajorVer || (int(memInfo.major) == minMajorVer && int(memInfo.minor) < minMinorVer) {
+ slog.Info(fmt.Sprintf("[%d] CUDA GPU is too old. Compute Capability detected: %d.%d", i, memInfo.major, memInfo.minor))
+ continue
+ }
+ gpuInfo.TotalMemory = uint64(memInfo.total)
+ gpuInfo.FreeMemory = uint64(memInfo.free)
+ gpuInfo.ID = C.GoString(&memInfo.gpu_id[0])
+ gpuInfo.Compute = fmt.Sprintf("%d.%d", memInfo.major, memInfo.minor)
+ gpuInfo.MinimumMemory = cudaMinimumMemory
+ gpuInfo.DependencyPath = depPath
+ gpuInfo.Name = C.GoString(&memInfo.gpu_name[0])
+ gpuInfo.DriverMajor = driverMajor
+ gpuInfo.DriverMinor = driverMinor
+
+ // TODO potentially sort on our own algorithm instead of what the underlying GPU library does...
+ cudaGPUs = append(cudaGPUs, gpuInfo)
}
gpuInfo := GpuInfo{
Library: "cuda",
diff --git a/scripts/build_docker.sh b/scripts/build_docker.sh
index e91c56ed..c03bc25f 100755
--- a/scripts/build_docker.sh
+++ b/scripts/build_docker.sh
@@ -3,7 +3,7 @@
set -eu
export VERSION=${VERSION:-$(git describe --tags --first-parent --abbrev=7 --long --dirty --always | sed -e "s/^v//g")}
-export GOFLAGS="'-ldflags=-w -s \"-X=github.com/ollama/ollama/version.Version=$VERSION\" \"-X=github.com/ollama/ollama/server.mode=release\"'"
+export GOFLAGS=${GOFLAGS:-"'-ldflags=-w -s \"-X=github.com/ollama/ollama/version.Version=$VERSION\" \"-X=github.com/ollama/ollama/server.mode=release\"'"}
# We use 2 different image repositories to handle combining architecture images into multiarch manifest
# (The ROCm image is x86 only and is not a multiarch manifest)
diff --git a/scripts/build_linux.sh b/scripts/build_linux.sh
index 27c4ff1f..e7e6d0dd 100755
--- a/scripts/build_linux.sh
+++ b/scripts/build_linux.sh
@@ -3,7 +3,7 @@
set -eu
export VERSION=${VERSION:-$(git describe --tags --first-parent --abbrev=7 --long --dirty --always | sed -e "s/^v//g")}
-export GOFLAGS="'-ldflags=-w -s \"-X=github.com/ollama/ollama/version.Version=$VERSION\" \"-X=github.com/ollama/ollama/server.mode=release\"'"
+export GOFLAGS=${GOFLAGS:-"'-ldflags=-w -s \"-X=github.com/ollama/ollama/version.Version=$VERSION\" \"-X=github.com/ollama/ollama/server.mode=release\"'"}
BUILD_ARCH=${BUILD_ARCH:-"amd64 arm64"}
export AMDGPU_TARGETS=${AMDGPU_TARGETS:=""}
--
2.46.0

View file

@ -1,26 +0,0 @@
From 2278389ef9ac9231349440aa68f9544ddc69cdc7 Mon Sep 17 00:00:00 2001
From: Raito Bezarius <masterancpp@gmail.com>
Date: Wed, 9 Oct 2024 13:37:08 +0200
Subject: [PATCH] fix: sm_37 for nvcc
Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
---
Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Makefile b/Makefile
index 2ccb750..70dfd9b 100644
--- a/Makefile
+++ b/Makefile
@@ -537,7 +537,7 @@ endif #GGML_CUDA_NVCC
ifdef CUDA_DOCKER_ARCH
MK_NVCCFLAGS += -Wno-deprecated-gpu-targets -arch=$(CUDA_DOCKER_ARCH)
else ifndef CUDA_POWER_ARCH
- MK_NVCCFLAGS += -arch=native
+ MK_NVCCFLAGS += -arch=sm_37
endif # CUDA_DOCKER_ARCH
ifdef GGML_CUDA_FORCE_DMMV
--
2.46.0

View file

@ -1,20 +0,0 @@
{
config,
pkgs,
meta,
name,
...
}:
{
services = {
ollama = {
enable = true;
host = meta.network.${name}.netbirdIp;
package = pkgs.callPackage ./package.nix {
cudaPackages = pkgs.cudaPackages_11;
# We need to thread our nvidia x11 driver for CUDA.
extraLibraries = [ config.hardware.nvidia.package ];
};
};
};
}

View file

@ -1,20 +0,0 @@
diff --git c/llm/generate/gen_common.sh i/llm/generate/gen_common.sh
index 3825c155..238a74a7 100644
--- c/llm/generate/gen_common.sh
+++ i/llm/generate/gen_common.sh
@@ -69,6 +69,7 @@ git_module_setup() {
}
apply_patches() {
+ return
# apply temporary patches until fix is upstream
for patch in ../patches/*.patch; do
git -c 'user.name=nobody' -c 'user.email=<>' -C ${LLAMACPP_DIR} am ${patch}
@@ -133,6 +134,7 @@ install() {
# Keep the local tree clean after we're done with the build
cleanup() {
+ return
(cd ${LLAMACPP_DIR}/ && git checkout CMakeLists.txt)
if [ -n "$(ls -A ../patches/*.diff)" ]; then

View file

@ -1,34 +0,0 @@
From 51568b61ef63ecd97867562571411082c32751d3 Mon Sep 17 00:00:00 2001
From: Raito Bezarius <masterancpp@gmail.com>
Date: Wed, 9 Oct 2024 13:36:51 +0200
Subject: [PATCH] fix: avx & f16c in Makefile
Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
---
Makefile | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/Makefile b/Makefile
index 32b7cbb..2ccb750 100644
--- a/Makefile
+++ b/Makefile
@@ -361,12 +361,12 @@ ifndef RISCV
ifeq ($(UNAME_M),$(filter $(UNAME_M),x86_64 i686 amd64))
# Use all CPU extensions that are available:
- MK_CFLAGS += -march=native -mtune=native
- HOST_CXXFLAGS += -march=native -mtune=native
+ # MK_CFLAGS += -march=native -mtune=native
+ # HOST_CXXFLAGS += -march=native -mtune=native
# Usage AVX-only
- #MK_CFLAGS += -mfma -mf16c -mavx
- #MK_CXXFLAGS += -mfma -mf16c -mavx
+ MK_CFLAGS += -mf16c -mavx
+ MK_CXXFLAGS += -mf16c -mavx
# Usage SSSE3-only (Not is SSE3!)
#MK_CFLAGS += -mssse3
--
2.46.0

View file

@ -1,243 +0,0 @@
{
lib,
buildGoModule,
fetchFromGitHub,
buildEnv,
linkFarm,
overrideCC,
makeWrapper,
stdenv,
addDriverRunpath,
nix-update-script,
cmake,
gcc11,
clblast,
libdrm,
rocmPackages,
cudaPackages,
darwin,
autoAddDriverRunpath,
extraLibraries ? [ ],
nixosTests,
testers,
ollama,
ollama-rocm,
ollama-cuda,
config,
# one of `[ null false "rocm" "cuda" ]`
acceleration ? null,
}:
assert builtins.elem acceleration [
null
false
"rocm"
"cuda"
];
let
pname = "ollama";
version = "2024-09-10-cc35";
src = fetchFromGitHub {
owner = "aliotard";
repo = "ollama";
rev = "34827c01f7723c7f5f9f5e392fe85f5a4a5d5fc0";
hash = "sha256-xFNuqcW7YWeyCyw5QLBnCHHTSMITR6LJkJT0CXZC+Y8=";
fetchSubmodules = true;
};
vendorHash = "sha256-hSxcREAujhvzHVNwnRTfhi0MKI3s8HNavER2VLz6SYk=";
validateFallback = lib.warnIf (config.rocmSupport && config.cudaSupport) (lib.concatStrings [
"both `nixpkgs.config.rocmSupport` and `nixpkgs.config.cudaSupport` are enabled, "
"but they are mutually exclusive; falling back to cpu"
]) (!(config.rocmSupport && config.cudaSupport));
shouldEnable =
mode: fallback: (acceleration == mode) || (fallback && acceleration == null && validateFallback);
rocmRequested = shouldEnable "rocm" config.rocmSupport;
cudaRequested = shouldEnable "cuda" config.cudaSupport;
enableRocm = rocmRequested && stdenv.isLinux;
enableCuda = cudaRequested && stdenv.isLinux;
rocmLibs = [
rocmPackages.clr
rocmPackages.hipblas
rocmPackages.rocblas
rocmPackages.rocsolver
rocmPackages.rocsparse
rocmPackages.rocm-device-libs
rocmPackages.rocm-smi
];
rocmClang = linkFarm "rocm-clang" { llvm = rocmPackages.llvm.clang; };
rocmPath = buildEnv {
name = "rocm-path";
paths = rocmLibs ++ [ rocmClang ];
};
cudaLibs = [
cudaPackages.cuda_cudart
cudaPackages.libcublas
cudaPackages.cuda_cccl
];
cudaToolkit = buildEnv {
name = "cuda-merged";
paths = map lib.getLib cudaLibs ++ [
(lib.getOutput "static" cudaPackages.cuda_cudart)
(lib.getBin (cudaPackages.cuda_nvcc.__spliced.buildHost or cudaPackages.cuda_nvcc))
];
};
metalFrameworks = with darwin.apple_sdk_11_0.frameworks; [
Accelerate
Metal
MetalKit
MetalPerformanceShaders
];
wrapperOptions =
[
# ollama embeds llama-cpp binaries which actually run the ai models
# these llama-cpp binaries are unaffected by the ollama binary's DT_RUNPATH
# LD_LIBRARY_PATH is temporarily required to use the gpu
# until these llama-cpp binaries can have their runpath patched
"--suffix LD_LIBRARY_PATH : '${addDriverRunpath.driverLink}/lib'"
"--suffix LD_LIBRARY_PATH : '${lib.makeLibraryPath (map lib.getLib extraLibraries)}'"
]
++ lib.optionals enableRocm [
"--suffix LD_LIBRARY_PATH : '${rocmPath}/lib'"
"--set-default HIP_PATH '${rocmPath}'"
]
++ lib.optionals enableCuda [
"--suffix LD_LIBRARY_PATH : '${lib.makeLibraryPath (map lib.getLib cudaLibs)}'"
];
wrapperArgs = builtins.concatStringsSep " " wrapperOptions;
goBuild =
if enableCuda then buildGoModule.override { stdenv = overrideCC stdenv gcc11; } else buildGoModule;
inherit (lib) licenses platforms maintainers;
in
goBuild {
inherit
pname
version
src
vendorHash
;
env =
lib.optionalAttrs enableRocm {
ROCM_PATH = rocmPath;
CLBlast_DIR = "${clblast}/lib/cmake/CLBlast";
}
// lib.optionalAttrs enableCuda { CUDA_LIB_DIR = "${cudaToolkit}/lib"; }
// {
CMAKE_CUDA_ARCHITECTURES = "35;37";
};
nativeBuildInputs =
[ cmake ]
++ lib.optionals enableRocm [ rocmPackages.llvm.bintools ]
++ lib.optionals enableCuda [ cudaPackages.cuda_nvcc ]
++ lib.optionals (enableRocm || enableCuda) [
makeWrapper
autoAddDriverRunpath
]
++ lib.optionals stdenv.isDarwin metalFrameworks;
buildInputs =
lib.optionals enableRocm (rocmLibs ++ [ libdrm ])
++ lib.optionals enableCuda cudaLibs
++ lib.optionals stdenv.isDarwin metalFrameworks;
patches = [
# disable uses of `git` in the `go generate` script
# ollama's build script assumes the source is a git repo, but nix removes the git directory
# this also disables necessary patches contained in `ollama/llm/patches/`
# those patches are applied in `postPatch`
./disable-git.patch
];
postPatch = ''
# replace inaccurate version number with actual release version
substituteInPlace version/version.go --replace-fail 0.0.0 '${version}'
# apply ollama's patches to `llama.cpp` submodule
for diff in llm/patches/*; do
patch -p1 -d llm/llama.cpp < $diff
done
'';
overrideModAttrs = _: _: {
# don't run llama.cpp build in the module fetch phase
preBuild = "";
};
preBuild = ''
# disable uses of `git`, since nix removes the git directory
export OLLAMA_SKIP_PATCHING=true
# build llama.cpp libraries for ollama
go generate ./...
'';
postFixup =
''
# the app doesn't appear functional at the moment, so hide it
mv "$out/bin/app" "$out/bin/.ollama-app"
''
+ lib.optionalString (enableRocm || enableCuda) ''
# expose runtime libraries necessary to use the gpu
wrapProgram "$out/bin/ollama" ${wrapperArgs}
'';
ldflags = [
"-s"
"-w"
"-X=github.com/ollama/ollama/version.Version=${version}"
"-X=github.com/ollama/ollama/server.mode=release"
"-X=github.com/ollama/ollama/gpu.CudaComputeMajorMin=3"
"-X=github.com/ollama/ollama/gpu.CudaComputeMinorMin=5"
];
passthru = {
tests =
{
inherit ollama;
version = testers.testVersion {
inherit version;
package = ollama;
};
}
// lib.optionalAttrs stdenv.isLinux {
inherit ollama-rocm ollama-cuda;
service = nixosTests.ollama;
service-cuda = nixosTests.ollama-cuda;
service-rocm = nixosTests.ollama-rocm;
};
updateScript = nix-update-script { };
};
meta = {
description =
"Get up and running with large language models locally"
+ lib.optionalString rocmRequested ", using ROCm for AMD GPU acceleration"
+ lib.optionalString cudaRequested ", using CUDA for NVIDIA GPU acceleration";
homepage = "https://github.com/ollama/ollama";
changelog = "https://github.com/ollama/ollama/releases/tag/v${version}";
license = licenses.mit;
platforms = if (rocmRequested || cudaRequested) then platforms.linux else platforms.unix;
mainProgram = "ollama";
maintainers = with maintainers; [
abysssol
dit7ya
elohmeier
roydubnium
];
};
}

View file

@ -1,12 +0,0 @@
{ sources, ... }:
let
proxmox-nixos = import sources.proxmox-nixos;
in
{
imports = [ proxmox-nixos.nixosModules.proxmox-ve ];
services.proxmox-ve = {
enable = true;
openFirewall = false;
};
nixpkgs.overlays = [ proxmox-nixos.overlays.x86_64-linux ];
}

View file

@ -1,3 +0,0 @@
(import ../../../keys).mkSecrets [ "krz01" ] [
# List of secrets for krz01
]

View file

@ -1,26 +0,0 @@
From 2278389ef9ac9231349440aa68f9544ddc69cdc7 Mon Sep 17 00:00:00 2001
From: Raito Bezarius <masterancpp@gmail.com>
Date: Wed, 9 Oct 2024 13:37:08 +0200
Subject: [PATCH] fix: sm_37 for nvcc
Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
---
Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Makefile b/Makefile
index 2ccb750..70dfd9b 100644
--- a/Makefile
+++ b/Makefile
@@ -537,7 +537,7 @@ endif #GGML_CUDA_NVCC
ifdef CUDA_DOCKER_ARCH
MK_NVCCFLAGS += -Wno-deprecated-gpu-targets -arch=$(CUDA_DOCKER_ARCH)
else ifndef CUDA_POWER_ARCH
- MK_NVCCFLAGS += -arch=native
+ MK_NVCCFLAGS += -arch=sm_37
endif # CUDA_DOCKER_ARCH
ifdef GGML_CUDA_FORCE_DMMV
--
2.46.0

View file

@ -1,25 +0,0 @@
{ pkgs, ... }:
{
environment.systemPackages = [
((pkgs.openai-whisper-cpp.override { cudaPackages = pkgs.cudaPackages_11; }).overrideAttrs (old: {
src = pkgs.fetchFromGitHub {
owner = "ggerganov";
repo = "whisper.cpp";
rev = "v1.7.1";
hash = "sha256-EDFUVjud79ZRCzGbOh9L9NcXfN3ikvsqkVSOME9F9oo=";
};
env = {
WHISPER_CUBLAS = "";
GGML_CUDA = "1";
};
# We only need Compute Capability 3.7.
CUDA_ARCH_FLAGS = [ "sm_37" ];
# We are GPU-only anyway.
patches = (old.patches or [ ]) ++ [
./no-weird-microarch.patch
./all-nvcc-arch.patch
];
}))
];
}

View file

@ -1,34 +0,0 @@
From 51568b61ef63ecd97867562571411082c32751d3 Mon Sep 17 00:00:00 2001
From: Raito Bezarius <masterancpp@gmail.com>
Date: Wed, 9 Oct 2024 13:36:51 +0200
Subject: [PATCH] fix: avx & f16c in Makefile
Signed-off-by: Raito Bezarius <masterancpp@gmail.com>
---
Makefile | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/Makefile b/Makefile
index 32b7cbb..2ccb750 100644
--- a/Makefile
+++ b/Makefile
@@ -361,12 +361,12 @@ ifndef RISCV
ifeq ($(UNAME_M),$(filter $(UNAME_M),x86_64 i686 amd64))
# Use all CPU extensions that are available:
- MK_CFLAGS += -march=native -mtune=native
- HOST_CXXFLAGS += -march=native -mtune=native
+ # MK_CFLAGS += -march=native -mtune=native
+ # HOST_CXXFLAGS += -march=native -mtune=native
# Usage AVX-only
- #MK_CFLAGS += -mfma -mf16c -mavx
- #MK_CXXFLAGS += -mfma -mf16c -mavx
+ MK_CFLAGS += -mf16c -mavx
+ MK_CXXFLAGS += -mf16c -mavx
# Usage SSSE3-only (Not is SSE3!)
#MK_CFLAGS += -mssse3
--
2.46.0

View file

@ -46,6 +46,16 @@ let
accepted_statuscodes = [ "401" ];
};
"ollama01.beta.dgnum.eu" = {
type = mkForce "http";
accepted_statuscodes = [ "401" ];
};
"s3-admin.dgnum.eu" = {
type = mkForce "http";
accepted_statuscodes = [ "400" ];
};
"api.meet.dgnum.eu" = {
keyword = "Crab Fit API";
};

View file

@ -18,6 +18,7 @@ lib.extra.mkConfig {
"peertube"
"prometheus"
"redirections"
"victoria-metrics"
];
extraConfig = {

View file

@ -1,10 +1,4 @@
{
config,
pkgs,
nixpkgs,
sources,
...
}:
{ config, pkgs, ... }:
let
url = "https://git.dgnum.eu";
@ -30,8 +24,6 @@ let
options = "--cpus=4";
};
};
nix-pkgs = import sources.nix-pkgs { inherit pkgs; };
in
{
services.forgejo-nix-runners = {
@ -39,14 +31,12 @@ in
inherit url;
storePath = "/data/slow/nix";
storePath = "/data/slow";
tokenFile = config.age.secrets."forgejo_runners-token_file".path;
dependencies = [
nix-pkgs.colmena
pkgs.npins
pkgs.tea
nixpkgs.unstable.nixfmt-rfc-style
];
containerOptions = [ "--cpus=4" ];

View file

@ -31,6 +31,7 @@ in
admin = {
DEFAULT_EMAIL_NOTIFICATIONS = "enabled";
SEND_NOTIFICATION_EMAIL_ON_NEW_USER = true;
};
log.LEVEL = "Warn";
@ -60,6 +61,7 @@ in
service = {
EMAIL_DOMAIN_ALLOWLIST = "dgnum.eu,*";
EMAIL_DOMAIN_BLOCKLIST = "*.shop,*.online,*.store";
ENABLE_NOTIFY_MAIL = true;
DISABLE_REGISTRATION = false;

View file

@ -0,0 +1,82 @@
{
config,
lib,
nixpkgs,
...
}:
let
domain = "netbird.dgnum.eu";
s = name: config.age.secrets.${name}.path;
in
{
services = {
netbird.server = {
enable = true;
package = nixpkgs.unstable.netbird;
inherit domain;
enableNginx = true;
coturn.enable = lib.mkForce false;
relay = {
environmentFile = s "netbird-relay_environment_file";
metricsPort = 9094;
};
dashboard = {
settings = {
AUTH_AUTHORITY = "https://sso.dgnum.eu/oauth2/openid/dgn_netbird";
AUTH_AUDIENCE = "dgn_netbird";
AUTH_CLIENT_ID = "dgn_netbird";
};
};
management = {
oidcConfigEndpoint = "https://sso.dgnum.eu/oauth2/openid/dgn_netbird/.well-known/openid-configuration";
dnsDomain = "dgnum";
metricsPort = 9092;
settings = {
DataStoreEncryptionKey._secret = s "netbird-data_store_encryption_key_file";
PKCEAuthorizationFlow.ProviderConfig = {
Audience = "dgn_netbird";
ClientID = "dgn_netbird";
AuthorizationEndpoint = "https://sso.dgnum.eu/ui/oauth2";
TokenEndpoint = "https://sso.dgnum.eu/oauth2/token";
};
IdpManagerConfig.ClientConfig.ClientID = "dgn_netbird";
DeviceAuthorizationFlow = {
Provider = "none";
ProviderConfig = {
Audience = "dgn_netbird";
ClientID = "dgn_netbird";
};
};
Relay = {
Addresses = [ "rels://${domain}:443" ];
CredentialsTTL = "24h";
Secret._secret = s "netbird-relay_secret_file";
};
};
};
};
nginx.virtualHosts.${domain} = {
enableACME = true;
forceSSL = true;
};
};
dgn-backups.jobs.netbird.settings.paths = [ "/var/lib/netbird-mgmt" ];
}

View file

@ -1,47 +0,0 @@
{ config, ... }:
let
domain = "netbird.dgnum.eu";
in
{
imports = [ ./module.nix ];
services.netbird-server = {
enable = true;
logLevel = "DEBUG";
enableDeviceAuthorizationFlow = false;
enableNginx = true;
enableCoturn = true;
setupAutoOidc = true;
management.dnsDomain = "dgnum";
secretFiles.AUTH_CLIENT_SECRET = config.age.secrets."netbird-auth_client_secret_file".path;
settings = {
NETBIRD_DOMAIN = domain;
TURN_PASSWORD = "tototest1234";
NETBIRD_AUTH_OIDC_CONFIGURATION_ENDPOINT = "https://sso.dgnum.eu/oauth2/openid/netbird_dgn/.well-known/openid-configuration";
NETBIRD_AUTH_PKCE_USE_ID_TOKEN = true;
NETBIRD_AUTH_AUDIENCE = "netbird_dgn";
NETBIRD_AUTH_CLIENT_ID = "netbird_dgn";
NETBIRD_AUTH_USER_ID_CLAIM = "sub";
# Updates the preference to use id tokens instead of access token on dashboard
# Okta and Gitlab IDPs can benefit from this
NETBIRD_TOKEN_SOURCE = "idToken";
# NETBIRD_AUTH_PKCE_REDIRECT_URLS = builtins.map (p: "http://localhost:${p}") [
# "53000"
# "54000"
# ];
NETBIRD_STORE_CONFIG_ENGINE = "sqlite";
};
};
dgn-backups.jobs.netbird.settings.paths = [ "/var/lib/netbird-mgmt" ];
}

View file

@ -1,643 +0,0 @@
{
config,
lib,
pkgs,
...
}:
let
inherit (lib)
filterAttrs
literalExpression
maintainers
mkDefault
mkEnableOption
mkIf
mkMerge
mkOption
optionalAttrs
optionalString
optionals
types
;
inherit ((import ./package { inherit pkgs; })) dashboard;
cfg = config.services.netbird-server;
stateDir = "/var/lib/netbird-mgmt";
settingsFormat = pkgs.formats.keyValue { };
managementFormat = pkgs.formats.json { };
settingsFile = settingsFormat.generate "setup.env" (
builtins.mapAttrs (
_: val: if builtins.isList val then ''"${builtins.concatStringsSep " " val}"'' else val
) settings
);
managementFile = managementFormat.generate "config.json" cfg.managementConfig;
settings =
rec {
TURN_DOMAIN = cfg.settings.NETBIRD_DOMAIN;
TURN_PORT = 3478;
TURN_USER = "netbird";
TURN_MIN_PORT = 49152;
TURN_MAX_PORT = 65535;
TURN_PASSWORD = if cfg.secretFiles.TURN_PASSWORD != null then "$TURN_PASSWORD" else null;
TURN_SECRET = if cfg.secretFiles.TURN_SECRET != null then "$TURN_SECRET" else "secret";
STUN_USERNAME = "";
STUN_PASSWORD = if cfg.secretFiles.STUN_PASSWORD != null then "$STUN_PASSWORD" else null;
NETBIRD_DASHBOARD_ENDPOINT = "https://${cfg.settings.NETBIRD_DOMAIN}:443";
NETBIRD_MGMT_API_ENDPOINT = "https://${cfg.settings.NETBIRD_DOMAIN}:${
builtins.toString cfg.settings.NETBIRD_MGMT_API_PORT or NETBIRD_MGMT_API_PORT
}";
NETBIRD_SIGNAL_ENDPOINT = "https://${cfg.settings.NETBIRD_DOMAIN}:${
builtins.toString cfg.settings.NETBIRD_SIGNAL_PORT or NETBIRD_SIGNAL_PORT
}";
NETBIRD_SIGNAL_PROTOCOL = "https";
NETBIRD_SIGNAL_PORT = 443;
NETBIRD_AUTH_USER_ID_CLAIM = "sub";
NETBIRD_AUTH_CLIENT_SECRET =
if cfg.secretFiles.AUTH_CLIENT_SECRET != null then "$AUTH_CLIENT_SECRET" else "";
NETBIRD_AUTH_SUPPORTED_SCOPES = [
"openid"
"profile"
"email"
"offline_access"
"api"
];
NETBIRD_AUTH_REDIRECT_URI = "";
NETBIRD_AUTH_SILENT_REDIRECT_URI = "";
NETBIRD_AUTH_DEVICE_AUTH_PROVIDER = "none";
NETBIRD_AUTH_DEVICE_AUTH_CLIENT_ID = cfg.settings.NETBIRD_AUTH_CLIENT_ID;
NETBIRD_AUTH_DEVICE_AUTH_AUDIENCE = cfg.settings.NETBIRD_AUTH_AUDIENCE;
NETBIRD_AUTH_DEVICE_AUTH_SCOPE = [
"openid"
"profile"
"email"
"offline_access"
"api"
];
NETBIRD_AUTH_DEVICE_AUTH_USE_ID_TOKEN = false;
NETBIRD_MGMT_API_PORT = 443;
NETBIRD_MGMT_IDP = "none";
NETBIRD_IDP_MGMT_CLIENT_ID = cfg.settings.NETBIRD_AUTH_CLIENT_ID;
NETBIRD_IDP_MGMT_CLIENT_SECRET =
if cfg.secretFiles.IDP_MGMT_CLIENT_SECRET != null then
"$IDP_MGMT_CLIENT_SECRET"
else
cfg.settings.NETBIRD_AUTH_CLIENT_SECRET;
NETBIRD_IDP_MGMT_GRANT_TYPE = "client_credentials";
NETBIRD_TOKEN_SOURCE = "accessToken";
NETBIRD_DRAG_QUERY_PARAMS = false;
NETBIRD_USE_AUTH0 = false;
NETBIRD_AUTH_DEVICE_AUTH_ENDPOINT = "";
NETBIRD_AUTH_PKCE_REDIRECT_URL_PORTS = [ "53000" ];
NETBIRD_AUTH_PKCE_REDIRECT_URLS = builtins.map (
p: "http://localhost:${p}"
) cfg.settings.NETBIRD_AUTH_PKCE_REDIRECT_URL_PORTS or NETBIRD_AUTH_PKCE_REDIRECT_URL_PORTS;
}
// (optionalAttrs cfg.setupAutoOidc {
NETBIRD_AUTH_PKCE_AUTHORIZATION_ENDPOINT = "$NETBIRD_AUTH_PKCE_AUTHORIZATION_ENDPOINT";
NETBIRD_AUTH_DEVICE_AUTH_ENDPOINT = "$NETBIRD_AUTH_DEVICE_AUTH_ENDPOINT";
NETBIRD_AUTH_TOKEN_ENDPOINT = "$NETBIRD_AUTH_TOKEN_ENDPOINT";
NETBIRD_AUTH_JWT_CERTS = "$NETBIRD_AUTH_JWT_CERTS";
NETBIRD_AUTH_AUTHORITY = "$NETBIRD_AUTH_AUTHORITY";
})
// cfg.settings;
in
{
meta = {
maintainers = with maintainers; [ thubrecht ];
};
options.services.netbird-server = {
enable = mkEnableOption (lib.mdDoc "netbird management service.");
package = mkOption {
type = types.package;
default = pkgs.netbird;
defaultText = literalExpression "pkgs.netbird";
description = lib.mdDoc "The package to use for netbird";
};
settings = mkOption {
type =
with types;
attrsOf (
nullOr (oneOf [
(listOf str)
bool
int
float
str
])
);
defaultText = lib.literalExpression ''
{
TURN_DOMAIN = cfg.settings.NETBIRD_DOMAIN;
TURN_PORT = 3478;
TURN_USER = "netbird";
TURN_MIN_PORT = 49152;
TURN_MAX_PORT = 65535;
TURN_PASSWORD = if cfg.secretFiles.TURN_PASSWORD != null then "$TURN_PASSWORD" else null;
TURN_SECRET = if cfg.secretFiles.TURN_SECRET != null then "$TURN_SECRET" else "secret";
STUN_USERNAME = "";
STUN_PASSWORD = if cfg.secretFiles.STUN_PASSWORD != null then "$STUN_PASSWORD" else null;
NETBIRD_DASHBOARD_ENDPOINT = "https://''${cfg.settings.NETBIRD_DOMAIN}:443";
NETBIRD_MGMT_API_ENDPOINT = "https://''${cfg.settings.NETBIRD_DOMAIN}:''${builtins.toString cfg.settings.NETBIRD_MGMT_API_PORT or NETBIRD_MGMT_API_PORT}";
NETBIRD_SIGNAL_ENDPOINT = "https://''${cfg.settings.NETBIRD_DOMAIN}:''${builtins.toString cfg.settings.NETBIRD_SIGNAL_PORT or NETBIRD_SIGNAL_PORT}";
NETBIRD_SIGNAL_PROTOCOL = "https";
NETBIRD_SIGNAL_PORT = 443;
NETBIRD_AUTH_USER_ID_CLAIM = "sub";
NETBIRD_AUTH_CLIENT_SECRET = if cfg.secretFiles.AUTH_CLIENT_SECRET != null then "$AUTH_CLIENT_SECRET" else "";
NETBIRD_AUTH_SUPPORTED_SCOPES = [ "openid" "profile" "email" "offline_access" "api" ];
NETBIRD_AUTH_REDIRECT_URI = "";
NETBIRD_AUTH_SILENT_REDIRECT_URI = "";
NETBIRD_AUTH_DEVICE_AUTH_PROVIDER = "none";
NETBIRD_AUTH_DEVICE_AUTH_CLIENT_ID = cfg.settings.NETBIRD_AUTH_CLIENT_ID;
NETBIRD_AUTH_DEVICE_AUTH_AUDIENCE = cfg.settings.NETBIRD_AUTH_AUDIENCE;
NETBIRD_AUTH_DEVICE_AUTH_SCOPE = [ "openid" "profile" "email" "offline_access" "api" ];
NETBIRD_AUTH_DEVICE_AUTH_USE_ID_TOKEN = false;
NETBIRD_MGMT_API_PORT = 443;
NETBIRD_MGMT_IDP = "none";
NETBIRD_IDP_MGMT_CLIENT_ID = cfg.settings.NETBIRD_AUTH_CLIENT_ID;
NETBIRD_IDP_MGMT_CLIENT_SECRET = if cfg.secretFiles.IDP_MGMT_CLIENT_SECRET != null then "$IDP_MGMT_CLIENT_SECRET" else cfg.settings.NETBIRD_AUTH_CLIENT_SECRET;
NETBIRD_IDP_MGMT_GRANT_TYPE = "client_credentials";
NETBIRD_TOKEN_SOURCE = "accessToken";
NETBIRD_DRAG_QUERY_PARAMS = false;
NETBIRD_USE_AUTH0 = false;
NETBIRD_AUTH_DEVICE_AUTH_ENDPOINT = "";
NETBIRD_AUTH_PKCE_REDIRECT_URL_PORTS = [ "53000" ];
NETBIRD_AUTH_PKCE_REDIRECT_URLS = builtins.map (p: "http://localhost:''${p}") cfg.settings.NETBIRD_AUTH_PKCE_REDIRECT_URL_PORTS or NETBIRD_AUTH_PKCE_REDIRECT_URL_PORTS;
}
'';
description = lib.mdDoc ''
Configuration settings for netbird.
Example config values can be found in [setup.env.example](https://github.com/netbirdio/netbird/blob/main/infrastructure_files/setup.env.example)
List of strings [ a b ] will be concatenated as "a b", useful for setting the supported scopes.
'';
};
managementConfig = mkOption {
inherit (managementFormat) type;
description = lib.mdDoc "Configuration of the netbird management server.";
};
idpManagerExtraConfig = mkOption {
type = types.attrsOf types.str;
default = { };
description = lib.mdDoc "Extra options passed to the IdpManagerConfig.";
};
ports.management = mkOption {
type = types.port;
default = 8011;
description = lib.mdDoc "Internal port of the management server.";
};
ports.signal = mkOption {
type = types.port;
default = 8012;
description = lib.mdDoc "Internal port of the signal server.";
};
logLevel = mkOption {
type = types.enum [
"ERROR"
"WARN"
"INFO"
"DEBUG"
];
default = "INFO";
description = lib.mdDoc "Log level of the netbird services.";
};
enableDeviceAuthorizationFlow = mkEnableOption "device authorization flow for netbird." // {
default = true;
};
enableNginx = mkEnableOption "NGINX reverse-proxy for the netbird server.";
enableCoturn = mkEnableOption "a Coturn server used for Netbird.";
setupAutoOidc = mkEnableOption "the automatic setup of the OIDC.";
management = {
dnsDomain = mkOption {
type = types.str;
default = "netbird.selfhosted";
description = lib.mdDoc "Domain used for peer resolution.";
};
singleAccountModeDomain = mkOption {
type = types.str;
default = "netbird.selfhosted";
description = lib.mdDoc ''
Enables single account mode.
This means that all the users will be under the same account grouped by the specified domain.
If the installation has more than one account, the property is ineffective.
'';
};
disableAnonymousMetrics = mkOption {
type = types.bool;
default = true;
description = lib.mdDoc "Disables push of anonymous usage metrics to NetBird.";
};
disableSingleAccountMode = mkOption {
type = types.bool;
default = false;
description = lib.mdDoc ''
If set to true, disables single account mode.
The `singleAccountModeDomain` property will be ignored and every new user will have a separate NetBird account.
'';
};
};
secretFiles = {
TURN_PASSWORD = mkOption {
type = with types; nullOr path;
default = null;
description = lib.mdDoc "Path to a file containing the secret TURN_PASSWORD.";
};
TURN_SECRET = mkOption {
type = with types; nullOr path;
default = null;
description = lib.mdDoc "Path to a file containing the secret TURN_SECRET.";
};
STUN_PASSWORD = mkOption {
type = with types; nullOr path;
default = null;
description = lib.mdDoc "Path to a file containing the secret STUN_PASSWORD.";
};
AUTH_CLIENT_SECRET = mkOption {
type = with types; nullOr path;
default = null;
description = lib.mdDoc "Path to a file containing the secret NETBIRD_AUTH_CLIENT_SECRET.";
};
IDP_MGMT_CLIENT_SECRET = mkOption {
type = with types; nullOr path;
default = cfg.secretFiles.AUTH_CLIENT_SECRET;
defaultText = lib.literalExpression "cfg.secretFiles.AUTH_CLIENT_SECRET;";
description = lib.mdDoc "Path to a file containing the secret NETBIRD_IDP_MGMT_CLIENT_SECRET.";
};
};
};
config = mkMerge [
(mkIf cfg.enable {
services.netbird-server.managementConfig = with settings; {
Stuns = mkDefault [
{
Proto = "udp";
URI = "stun:${TURN_DOMAIN}:${builtins.toString TURN_PORT}";
Username = STUN_USERNAME;
Password = STUN_PASSWORD;
}
];
TURNConfig = {
Turns = [
{
Proto = "udp";
URI = "turn:${TURN_DOMAIN}:${builtins.toString TURN_PORT}";
Username = TURN_USER;
Password = TURN_PASSWORD;
}
];
CredentialsTTL = "12h";
Secret = TURN_SECRET;
TimeBasedCredentials = false;
};
Signal = {
Proto = NETBIRD_SIGNAL_PROTOCOL;
URI = "${NETBIRD_DOMAIN}:${builtins.toString NETBIRD_SIGNAL_PORT}";
Username = "";
Password = null;
};
Datadir = "${stateDir}/data";
HttpConfig = {
Address = "127.0.0.1:${builtins.toString cfg.ports.management}";
AuthIssuer = NETBIRD_AUTH_AUTHORITY;
AuthAudience = NETBIRD_AUTH_AUDIENCE;
AuthKeysLocation = NETBIRD_AUTH_JWT_CERTS;
AuthUserIDClaim = NETBIRD_AUTH_USER_ID_CLAIM;
OIDCConfigEndpoint = NETBIRD_AUTH_OIDC_CONFIGURATION_ENDPOINT;
};
IdpManagerConfig = {
ManagerType = NETBIRD_MGMT_IDP;
ClientConfig = {
Issuer = NETBIRD_AUTH_AUTHORITY;
TokenEndpoint = NETBIRD_AUTH_TOKEN_ENDPOINT;
ClientID = NETBIRD_IDP_MGMT_CLIENT_ID;
ClientSecret = NETBIRD_IDP_MGMT_CLIENT_SECRET;
GrantType = NETBIRD_IDP_MGMT_GRANT_TYPE;
};
ExtraConfig = cfg.idpManagerExtraConfig;
};
DeviceAuthorizationFlow = mkIf cfg.enableDeviceAuthorizationFlow {
Provider = NETBIRD_AUTH_DEVICE_AUTH_PROVIDER;
ProviderConfig = {
Audience = NETBIRD_AUTH_DEVICE_AUTH_AUDIENCE;
Domain = NETBIRD_AUTH_AUTHORITY;
ClientID = NETBIRD_AUTH_DEVICE_AUTH_CLIENT_ID;
TokenEndpoint = NETBIRD_AUTH_TOKEN_ENDPOINT;
DeviceAuthEndpoint = NETBIRD_AUTH_DEVICE_AUTH_ENDPOINT;
Scope = builtins.concatStringsSep " " NETBIRD_AUTH_DEVICE_AUTH_SCOPE;
UseIDToken = NETBIRD_AUTH_DEVICE_AUTH_USE_ID_TOKEN;
};
};
PKCEAuthorizationFlow = {
ProviderConfig = {
Audience = NETBIRD_AUTH_AUDIENCE;
ClientID = NETBIRD_AUTH_CLIENT_ID;
ClientSecret = NETBIRD_AUTH_CLIENT_SECRET;
AuthorizationEndpoint = NETBIRD_AUTH_PKCE_AUTHORIZATION_ENDPOINT;
TokenEndpoint = NETBIRD_AUTH_TOKEN_ENDPOINT;
Scope = builtins.concatStringsSep " " NETBIRD_AUTH_SUPPORTED_SCOPES;
RedirectURLs = NETBIRD_AUTH_PKCE_REDIRECT_URLS;
UseIDToken = NETBIRD_AUTH_PKCE_USE_ID_TOKEN;
};
};
};
services.nginx.virtualHosts = mkIf cfg.enableNginx {
${cfg.settings.NETBIRD_DOMAIN} = {
forceSSL = true;
enableACME = true;
locations = {
"/" = {
root = "${stateDir}/web-ui/";
tryFiles = "$uri /index.html";
};
"/signalexchange.SignalExchange/".extraConfig = ''
grpc_pass grpc://localhost:${builtins.toString cfg.ports.signal};
grpc_read_timeout 1d;
grpc_send_timeout 1d;
grpc_socket_keepalive on;
'';
"/api".proxyPass = "http://localhost:${builtins.toString cfg.ports.management}";
"/management.ManagementService/".extraConfig = ''
grpc_pass grpc://localhost:${builtins.toString cfg.ports.management};
grpc_read_timeout 1d;
grpc_send_timeout 1d;
grpc_socket_keepalive on;
'';
};
};
};
systemd.services = {
netbird-setup = {
wantedBy = [
"netbird-management.service"
"netbird-signal.service"
"multi-user.target"
];
serviceConfig = {
Type = "oneshot";
RuntimeDirectory = "netbird-mgmt";
StateDirectory = "netbird-mgmt";
WorkingDirectory = stateDir;
EnvironmentFile = [ settingsFile ];
};
unitConfig = {
StartLimitInterval = 5;
StartLimitBurst = 10;
};
path =
(with pkgs; [
coreutils
findutils
gettext
gnused
])
++ (optionals cfg.setupAutoOidc (
with pkgs;
[
curl
jq
]
));
script =
''
cp ${managementFile} ${stateDir}/management.json.copy
''
+ (optionalString cfg.setupAutoOidc ''
mv ${stateDir}/management.json.copy ${stateDir}/management.json
echo "loading OpenID configuration from $NETBIRD_AUTH_OIDC_CONFIGURATION_ENDPOINT to the openid-configuration.json file"
curl "$NETBIRD_AUTH_OIDC_CONFIGURATION_ENDPOINT" -q -o ${stateDir}/openid-configuration.json
export NETBIRD_AUTH_AUTHORITY=$(jq -r '.issuer' ${stateDir}/openid-configuration.json)
export NETBIRD_AUTH_JWT_CERTS=$(jq -r '.jwks_uri' ${stateDir}/openid-configuration.json)
export NETBIRD_AUTH_TOKEN_ENDPOINT=$(jq -r '.token_endpoint' ${stateDir}/openid-configuration.json)
export NETBIRD_AUTH_DEVICE_AUTH_ENDPOINT=$(jq -r '.device_authorization_endpoint' ${stateDir}/openid-configuration.json)
export NETBIRD_AUTH_PKCE_AUTHORIZATION_ENDPOINT=$(jq -r '.authorization_endpoint' ${stateDir}/openid-configuration.json)
envsubst '$NETBIRD_AUTH_AUTHORITY $NETBIRD_AUTH_JWT_CERTS $NETBIRD_AUTH_TOKEN_ENDPOINT $NETBIRD_AUTH_DEVICE_AUTH_ENDPOINT $NETBIRD_AUTH_PKCE_AUTHORIZATION_ENDPOINT' < ${stateDir}/management.json > ${stateDir}/management.json.copy
'')
+ ''
# Update secrets in management.json
${builtins.concatStringsSep "\n" (
builtins.attrValues (
builtins.mapAttrs (name: path: "export ${name}=$(cat ${path})") (
filterAttrs (_: p: p != null) cfg.secretFiles
)
)
)}
envsubst '$TURN_PASSWORD $TURN_SECRET $STUN_PASSWORD $AUTH_CLIENT_SECRET $IDP_MGMT_CLIENT_SECRET' < ${stateDir}/management.json.copy > ${stateDir}/management.json
rm -rf ${stateDir}/web-ui
mkdir -p ${stateDir}/web-ui
cp -R ${dashboard}/* ${stateDir}/web-ui
export AUTH_AUTHORITY="$NETBIRD_AUTH_AUTHORITY"
export AUTH_CLIENT_ID="$NETBIRD_AUTH_CLIENT_ID"
${optionalString (
cfg.secretFiles.AUTH_CLIENT_SECRET == null
) ''export AUTH_CLIENT_SECRET="$NETBIRD_AUTH_CLIENT_SECRET"''}
export AUTH_AUDIENCE="$NETBIRD_AUTH_AUDIENCE"
export AUTH_REDIRECT_URI="$NETBIRD_AUTH_REDIRECT_URI"
export AUTH_SILENT_REDIRECT_URI="$NETBIRD_AUTH_SILENT_REDIRECT_URI"
export USE_AUTH0="$NETBIRD_USE_AUTH0"
export AUTH_SUPPORTED_SCOPES=$(echo $NETBIRD_AUTH_SUPPORTED_SCOPES | sed -E 's/"//g')
export NETBIRD_MGMT_API_ENDPOINT=$(echo $NETBIRD_MGMT_API_ENDPOINT | sed -E 's/(:80|:443)$//')
MAIN_JS=$(find ${stateDir}/web-ui/static/js/main.*js)
OIDC_TRUSTED_DOMAINS=${stateDir}/web-ui/OidcTrustedDomains.js
mv "$MAIN_JS" "$MAIN_JS".copy
envsubst '$USE_AUTH0 $AUTH_AUTHORITY $AUTH_CLIENT_ID $AUTH_CLIENT_SECRET $AUTH_SUPPORTED_SCOPES $AUTH_AUDIENCE $NETBIRD_MGMT_API_ENDPOINT $NETBIRD_MGMT_GRPC_API_ENDPOINT $NETBIRD_HOTJAR_TRACK_ID $AUTH_REDIRECT_URI $AUTH_SILENT_REDIRECT_URI $NETBIRD_TOKEN_SOURCE $NETBIRD_DRAG_QUERY_PARAMS' < "$MAIN_JS".copy > "$MAIN_JS"
envsubst '$NETBIRD_MGMT_API_ENDPOINT' < "$OIDC_TRUSTED_DOMAINS".tmpl > "$OIDC_TRUSTED_DOMAINS"
'';
};
netbird-signal = {
after = [ "network.target" ];
wantedBy = [ "netbird-management.service" ];
restartTriggers = [
settingsFile
managementFile
];
serviceConfig = {
ExecStart = ''
${cfg.package}/bin/netbird-signal run \
--port ${builtins.toString cfg.ports.signal} \
--log-file console \
--log-level ${cfg.logLevel}
'';
Restart = "always";
RuntimeDirectory = "netbird-mgmt";
StateDirectory = "netbird-mgmt";
WorkingDirectory = stateDir;
};
unitConfig = {
StartLimitInterval = 5;
StartLimitBurst = 10;
};
stopIfChanged = false;
};
netbird-management = {
description = "The management server for Netbird, a wireguard VPN";
documentation = [ "https://netbird.io/docs/" ];
after = [
"network.target"
"netbird-setup.service"
];
wantedBy = [ "multi-user.target" ];
wants = [
"netbird-signal.service"
"netbird-setup.service"
];
restartTriggers = [
settingsFile
managementFile
];
serviceConfig = {
ExecStart = ''
${cfg.package}/bin/netbird-mgmt management \
--config ${stateDir}/management.json \
--datadir ${stateDir}/data \
${optionalString cfg.management.disableAnonymousMetrics "--disable-anonymous-metrics"} \
${optionalString cfg.management.disableSingleAccountMode "--disable-single-account-mode"} \
--dns-domain ${cfg.management.dnsDomain} \
--single-account-mode-domain ${cfg.management.singleAccountModeDomain} \
--idp-sign-key-refresh-enabled \
--port ${builtins.toString cfg.ports.management} \
--log-file console \
--log-level ${cfg.logLevel}
'';
Restart = "always";
RuntimeDirectory = "netbird-mgmt";
StateDirectory = [
"netbird-mgmt"
"netbird-mgmt/data"
];
WorkingDirectory = stateDir;
};
unitConfig = {
StartLimitInterval = 5;
StartLimitBurst = 10;
};
stopIfChanged = false;
};
};
})
(mkIf cfg.enableCoturn {
services.coturn = {
enable = true;
realm = settings.NETBIRD_DOMAIN;
lt-cred-mech = true;
no-cli = true;
extraConfig = ''
fingerprint
user=${settings.TURN_USER}:${builtins.toString settings.TURN_PASSWORD}
no-software-attribute
'';
};
networking.firewall = {
allowedUDPPorts = with settings; [
TURN_PORT
(TURN_PORT + 1)
5349
5350
];
allowedTCPPorts = with settings; [
TURN_PORT
(TURN_PORT + 1)
];
allowedUDPPortRanges = [
{
from = settings.TURN_MIN_PORT;
to = settings.TURN_MAX_PORT;
}
];
};
})
(mkIf (cfg.enableNginx && cfg.enableCoturn) {
services.coturn =
let
cert = config.security.acme.certs.${settings.TURN_DOMAIN};
in
{
cert = "${cert.directory}/fullchain.pem";
pkey = "${cert.directory}/key.pem";
};
users.users.nginx.extraGroups = [ "turnserver" ];
# share certs with coturn and restart on renewal
security.acme.certs.${settings.TURN_DOMAIN} = {
group = "turnserver";
postRun = "systemctl reload nginx.service; systemctl restart coturn.service";
};
})
];
}

View file

@ -1,31 +0,0 @@
{
lib,
buildNpmPackage,
fetchFromGitHub,
}:
buildNpmPackage rec {
pname = "netbird-dashboard";
version = "1.17.6";
src = fetchFromGitHub {
owner = "netbirdio";
repo = "dashboard";
rev = "v${version}";
hash = "sha256-MDxN/58dv6OqPYnNgDVZ+YRzfw2dER7x8mEWe14rQ40=";
};
npmDepsHash = "sha256-x7YyzBPAiXyxaIcAvUrXBexYaw0TaYnKgQKT3KadW8w=";
npmFlags = [ "--legacy-peer-deps" ];
installPhase = ''
cp -R build $out
'';
meta = with lib; {
description = "NetBird Management Service Web UI Panel";
homepage = "https://github.com/netbirdio/dashboard";
license = licenses.bsd3;
maintainers = with maintainers; [ thubrecht ];
};
}

View file

@ -1,7 +0,0 @@
{
pkgs ? import <nixpkgs> { },
}:
{
dashboard = pkgs.callPackage ./dashboard.nix { };
}

View file

@ -0,0 +1,30 @@
age-encryption.org/v1
-> ssh-ed25519 jIXfPA xId0d57S+YmTeZzTTNOs7Pt3RPQ7MLNiKg6Mox2MEFo
hFUYZMNoxZQBEKz4SYDC4nLDDXRftXtUtCLCX2kvwZ8
-> ssh-ed25519 QlRB9Q kmsgaV+FRbqcKkhttlbmY22M6pO6kMCqLUYsq1yGSyA
VmprdWLh380qm6aarum1q17pDrMF0KLyXV/PN1OmEO8
-> ssh-ed25519 r+nK/Q XVeZFVNLv0FlL/lPhXrvVJcHAubE1tTfSxl5iiixtF0
Udm/qZMOzNcg2LMffkns+jUlrtXAC8Mk8ofCSD6zf/0
-> ssh-rsa krWCLQ
OJlswMZEz2ONsqvFH8aMo4cRXzNiSkqtOmNQuWbRcAI4sXKCNuNtNcv6WPcpBMPZ
8eTvoIOf8triUwGBWLZ9oRvYOeoucyWCqx0zf11VwOclRBeziRPOQ5Uon+5gpsg2
H1FO7Sk0sVjME/2INUjd1Q4TlPF9tlUOcEDBgyc81cLI0JrR7S2D6Hl/rAN9Gees
D9c+q5PJkvbw7KQPEu7WOxPNCi1gRyHSlKv5ef5gToNOl/c8GAJR5FutO/bTgTTl
P+yLysKXK+r2IwNNMHGFBDVbsp09IjQ+H623Sfr6H0pR7FYShohfzcM6JA3ydztN
Gy5MiJasx3nWCUYJZUL1Fw
-> ssh-ed25519 /vwQcQ OelREEMNnpUXuJ8BA1VPVM8yqEd8PS9m81sw5gaq8U8
wPUQOWxzsj55/hii7Cd4+P1eFWVDQANwIcImOliOqog
-> ssh-ed25519 0R97PA 9NzXGY3sZb8srqaVWWbZhbNJdDfCfeZIhJHPWy9U4FU
+LvE5cI8heO8XhsejCWaJrwaRGYGCziymPZLrYTOXtg
-> ssh-ed25519 JGx7Ng 1jWoS1sqmY9MxZT7fAMsg5QbokAMNlTg9jmpxzr1ekQ
7MndRQ0ruZP2/cOKaid60rQg8Q3ljy2oknf0czOLGSo
-> ssh-ed25519 5SY7Kg Bm19KVQA8DkrDxiYsVRdKVubML7J9L/apLoUs+otehk
kQMv/7uijZlyGDbDt2aNF85vp4nYM9o3fIetvnykX6I
-> ssh-ed25519 p/Mg4Q /vhTds9k+5uwSDjLyKp18ge+bu/Aeg72nHx2joWUTw0
zeim4NPL7floIvZ296vYuyk5XAVFCCaWRc0iRQQxbyg
-> ssh-ed25519 rHotTw YbKb6NyxsknA125fdWj5/RJjmaY22yDwNx+bLKV6ZW4
jJw+YJqQC/B+UMLYAtTAIZuON2hiZAY171ovJ0ceKjg
-> @K'k$-grease x>ie }CH4sS h|s
bVzOpc2vPj8ldZskVlQSmOE7wHR2q/dXcdC6vrPXSvYWCKK8Rg
--- uDaSBMjg5lvDnZyTKHqveb5B+y71HjrDzOqtsJycuBs
1Ò¨Rq¢<>nýµ{”ý5?HXH1¢ Ê%)Í01RGr׿fÖNT4å2B(í);ìíÿ‰íÁœ

View file

@ -0,0 +1,31 @@
age-encryption.org/v1
-> ssh-ed25519 jIXfPA lI9DxAFp/gbF+77Sofv9KIrs3kMTYTLEm8C6AsZBPyI
8RFGt1aJnZbd7Lpr4iy1VlMr3yzpPf6sI79cik5X77c
-> ssh-ed25519 QlRB9Q eMENLAMY+eNXJhduTnJoyPimbThM7VA+4m6BrnZa8RE
NpwcJhh0U8pMU1hnXFz2bfwSmCQra1CI5Tr2cbXGMT0
-> ssh-ed25519 r+nK/Q eyuD/hYyYmG96AcPEZVNsohXgK9WD+g+ZyMpIyaiYjY
Ef+R/eXkqvOmYJvjz4muTjGamkXzgHzD31vXDXsgo3M
-> ssh-rsa krWCLQ
BuBMUp5uijNV71OYvMGS9NhBBplfFugJy14EOHclJ2TKjQ19RVKHPj0wX0AxuPCT
iV6j6Po/oKSsGuoKy6JMTLKjYtROPF70Ld8PlC4tFI5i0xQagEFhKONfk1Rd/mF0
2qGriQhSUMvkMirbkhE3CxrAzSqcjuoGji+ZWwpz2LYUVsF89nnoLsTRri+Sg5ZW
4qhoo23UTU+IlrVtqjB7W1rNAwHKhWPZnjc08x1x/qnLATemmDMsFmTEGljJNGMR
kEg+oUdwdvLjDsnGBWkE+Ck/mrEGwjcsDTmZmCYcH/Q11EMdj5hnCfG68PRhLF9K
b28fHveM3i5/jHrrTxWbrA
-> ssh-ed25519 /vwQcQ 1xQWlLW6xCrheirHSKcGEu+KM644y8NP1KYvwOganQc
IFVYj83X1uLvgIRlnDvnLiaoZNM9viLT7X11vIHdLxY
-> ssh-ed25519 0R97PA I8K03IKgC59zmHqVr8h8TaxuuTSbmYsyap830JyhIhw
AGxW9sq7PQNgs9WFcbINI2CnE3lJJ0rDmseN83YSeT0
-> ssh-ed25519 JGx7Ng syz/pzdj3Lg1VwulZhT8UQncgXjOH1nlbtqHgASLAws
IKaU32zbjFc319PctmGPtHt4RXjgzun0K+9HeuGS3FU
-> ssh-ed25519 5SY7Kg 06EjOyKw1zIWcdZGC7EfNt9mFix+fVcy1iS+SBhPgCQ
ZxcNbC1QmTPJkWlwBnD9YjuzekGZtSDeI7RYxq0uwgw
-> ssh-ed25519 p/Mg4Q uCbjjN5S0ZoZtsj5jva9mTrlZ2UE02A3DysxV1PZ/lM
7jWWiWp4ei5VjftKZz29osbaFxfpId+X3GLzgWZ9Wgo
-> ssh-ed25519 rHotTw Q1/zZpGbUCbXiEELad5710uNkllrFuQlhonSLfIoQVo
h6iW26rADPn1MRqNoD33ZVVDRDr2DBoNK+BjrDxwZik
-> ss-grease
A3WDPMHgipAaXF0MStKGx8CAbFTqks74CRTKButwwJYvgnMFp2Yglx3D2NOWTdJm
yde7gp5XInweYf2TjvQK88l0MD0VYlG9Lu7+wbWGFElCpQ
--- 0d/8UVX6ubUZpKG3LzJsFKbsZNRKUwQq7LuWMiyezKo
P?j@¦Hˆ´ßš¥¼ówgêìÚ©L¥_ã+ì|ζãÙ¦Ö#“fu#c涯„IæS†|¨À²å 

View file

@ -8,7 +8,9 @@
"influxdb2-initial_password_file"
"influxdb2-initial_token_file"
"influxdb2-telegraf_token_file"
"netbird-auth_client_secret_file"
"netbird-data_store_encryption_key_file"
"netbird-relay_environment_file"
"netbird-relay_secret_file"
"nginx-tvix-store-password"
"nginx-tvix-store-password-ci"
"peertube-secrets_file"

View file

@ -135,10 +135,11 @@ in
systemd.services."tvix-store" = {
wantedBy = [ "multi-user.target" ];
environment = {
RUST_LOG = "debug";
RUST_LOG = "info";
};
serviceConfig = {
UMask = "007";
LimitNOFILE = 1048576;
ExecStart = "${package}/bin/multitier-tvix-cache --endpoints-config ${toml.endpoints} --store-composition ${toml.composition}";
StateDirectory = "tvix-store";
RuntimeDirectory = "tvix-store";

View file

@ -0,0 +1,16 @@
let
host = "victoria-metrics.dgnum.eu";
port = 9099;
in
{
services.victoriametrics = {
enable = true;
listenAddress = "127.0.0.1:${builtins.toString port}";
};
dgn-web.simpleProxies.victoria-metrics = {
inherit host port;
};
}

View file

@ -239,7 +239,7 @@ in
chain postrouting {
type nat hook postrouting priority 100;
ip saddr 10.0.0.0/16 ip saddr != 10.0.255.0/24 snat ip to 129.199.195.130-129.199.195.158
ether saddr e0:2b:e9:b5:b4:cc snat to 129.199.195.130 comment "Elias"
ether saddr { e0:2e:0b:bd:97:73, e8:d5:2b:0d:fe:4a } snat to 129.199.195.130 comment "Elias"
ether saddr { 1c:1b:b5:14:9c:e5, e6:ce:e2:b6:e3:82 } snat to 129.199.195.131 comment "Lubin"
ether saddr d0:49:7c:46:f6:39 snat to 129.199.195.132 comment "Jean-Marc"
ether saddr { 5c:64:8e:f4:09:06 } snat to 129.199.195.158 comment "APs"

View file

@ -6,30 +6,35 @@
}:
let
metis = import sources.metis { inherit pkgs; };
inherit (lib) mapAttrsToList match;
inherit (metis) providers;
metis = import sources.metis { inherit pkgs; };
in
{
services.nginx.virtualHosts."calendrier.dgnum.eu" = {
enableACME = true;
forceSSL = true;
root = metis.production;
root = metis.package;
locations = lib.mapAttrs' (
name: value:
name: domain:
lib.nameValuePair "/cal/${name}/" {
extraConfig = ''
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Server $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass ${value};
proxy_pass https://${domain}/remote.php/dav/public-calendars/;
'';
}
) providers;
) metis.providers;
extraConfig = ''
rewrite ^/calendrier(.*)$ $1 permanent;
'';
};
assertions = mapAttrsToList (name: domain: {
assertion = (match "[a-z-]+" name == [ ]) && (match "[a-z.-]+" domain == [ ]);
message = "The provider `${name}` associated to the domain `${domain}` seems to have an incorrect definition.";
}) metis.providers;
}

View file

@ -13,7 +13,7 @@ in
services = {
netbox = {
enable = true;
package = nixpkgs.unstable.netbox_3_7;
package = nixpkgs.unstable.netbox_4_1;
secretKeyFile = "/dev/null";
listenAddress = "127.0.0.1";
plugins = p: [ p.netbox-qrcode ];

View file

@ -29,7 +29,6 @@ in
"bds.wp.dgnum.eu" = "bds.ens.fr";
"www.tuteurs.ens.fr" = "tuteurs.ens.fr";
"www.interq.ens.fr" = "interq.ens.fr";
"www.lanuit.ens.fr" = "lanuit.ens.fr";
};
temporary = {

View file

@ -0,0 +1,57 @@
{
pkgs,
sources,
config,
...
}:
let
nix-pkgs = import sources.nix-pkgs { inherit pkgs; };
in
{
services.django-apps.sites.annuaire = {
source = "https://git.dgnum.eu/DGNum/annuaire-eleves";
branch = "main";
domain = "annuaire-ens.webapps.dgnum.eu";
nginx = {
enableACME = true;
forceSSL = true;
};
webHookSecret = config.age.secrets."webhook-annuaire_token".path;
python = pkgs.python3.override {
packageOverrides = _: _: { inherit (nix-pkgs) authens loadcredential; };
};
dependencies = ps: [
ps.django
ps.pillow
ps.loadcredential
ps.authens
ps.python-dateutil
];
credentials = {
SECRET_KEY = config.age.secrets."dj_annuaire-secret_key_file".path;
};
environment = {
ANNUAIRE_ALLOWED_HOSTS = [ "annuaire-ens.webapps.dgnum.eu" ];
ANNUAIRE_LDAP = {
SPI = {
PROTOCOL = "ldaps";
URL = "ldap.spi.ens.fr";
PORT = 636;
};
CRI = {
PROTOCOL = "ldaps";
URL = "annuaire.ens.fr";
PORT = 636;
};
};
};
};
}

View file

@ -0,0 +1,49 @@
{
pkgs,
sources,
config,
...
}:
let
nix-pkgs = import sources.nix-pkgs { inherit pkgs; };
in
{
services.django-apps.sites.bocal = {
source = "https://git.dgnum.eu/DGNum/www-bocal";
branch = "main";
domain = "bocal.webapps.dgnum.eu";
nginx = {
enableACME = true;
forceSSL = true;
};
webHookSecret = config.age.secrets."webhook-bocal_token".path;
python = pkgs.python3.override {
packageOverrides = _: _: { inherit (nix-pkgs) django-cas-ng django-solo loadcredential; };
};
dependencies = ps: [
ps.django
ps.django-cas-ng
ps.django-markdownx
ps.django-solo
ps.markdown
ps.pillow
ps.loadcredential
];
credentials = {
SECRET_KEY = config.age.secrets."dj_bocal-secret_key_file".path;
};
environment = {
DJANGO_SETTINGS_MODULE = "app.settings";
BOCAL_ALLOWED_HOSTS = [ "bocal.webapps.dgnum.eu" ];
BOCAL_RHOSTS_PATH = "/var/lib/django-apps/bocal/.rhosts";
};
};
}

View file

@ -1,11 +1,18 @@
{
imports = [ ./gestiojeux.nix ];
imports = [
./annuaire.nix
./bocal.nix
./gestiojeux.nix
./interludes.nix
./wikiens.nix
];
services.django-apps = {
enable = true;
webhook = {
domain = "hooks.webapps.dgnum.eu";
domain = "apps-webhook.dgnum.eu";
nginx = {
enableACME = true;
forceSSL = true;

View file

@ -1,4 +1,9 @@
{ pkgs, sources, ... }:
{
pkgs,
sources,
config,
...
}:
let
nix-pkgs = import sources.nix-pkgs { inherit pkgs; };
@ -15,7 +20,7 @@ in
forceSSL = true;
};
webHookSecret = builtins.toFile "insecure-secret" "T5hNeDraMivRZLUkrekv&QeM";
webHookSecret = config.age.secrets."webhook-gestiojeux_token".path;
application = {
type = "wsgi";
@ -54,7 +59,7 @@ in
mediaDirectory = "source/public/media";
credentials = {
SECRET_KEY = builtins.toFile "insecure-key" "insecure-key";
SECRET_KEY = config.age.secrets."dj_gestiojeux-secret_key_file".path;
};
environment = {

View file

@ -0,0 +1,66 @@
{
config,
pkgs,
sources,
...
}:
let
nix-pkgs = import sources.nix-pkgs { inherit pkgs; };
in
{
services.webhook.extraArgs = [ "-debug" ];
services.django-apps.sites.interludes = {
source = "https://git.eleves.ens.fr/dlesbre/site-interludes";
branch = "master";
domain = "interludes.ens.fr";
nginx = {
enableACME = true;
forceSSL = true;
serverAliases = [ "interludes.webapps.dgnum.eu" ];
};
webHookSecret = config.age.secrets."webhook-interludes_token".path;
application = {
type = "wsgi";
module = "interludes";
};
dbType = "sqlite";
python = pkgs.python3.override {
packageOverrides = _: _: { inherit (nix-pkgs) python-cas loadcredential; };
};
django = ps: ps.django_4;
dependencies = ps: [
ps.loadcredential
ps.python-ldap
ps.python-cas
];
credentials = {
SECRET_KEY = config.age.secrets."dj_interludes-secret_key_file".path;
EMAIL_HOST_PASSWORD = config.age.secrets."dj_interludes-email_host_password_file".path;
};
environment = {
INTERLUDES_ALLOWED_HOSTS = [
"interludes.ens.fr"
"interludes.webapps.dgnum.eu"
];
# E-mail configuration
INTERLUDES_SERVER_EMAIL = "noreply-interludes-admin@ens.fr";
INTERLUDES_DEFAULT_FROM_EMAIL = "noreply-interludes@ens.fr";
INTERLUDES_EMAIL_HOST = "clipper.ens.fr";
INTERLUDES_EMAIL_PORT = 465;
INTERLUDES_EMAIL_HOST_USER = "interludes";
INTERLUDES_DEBUG = false;
};
};
}

View file

@ -0,0 +1,55 @@
{
pkgs,
sources,
config,
...
}:
let
nix-pkgs = import sources.nix-pkgs { inherit pkgs; };
in
{
services.django-apps.sites.wikiens = {
source = "https://git.dgnum.eu/DGNum/wiki-eleves";
branch = "main";
domain = "wiki.eleves.ens.fr";
nginx = {
enableACME = true;
forceSSL = true;
};
webHookSecret = config.age.secrets."webhook-wikiens_token".path;
python = pkgs.python3.override {
packageOverrides = _: _: {
inherit (nix-pkgs)
django-allauth
django-allauth-ens
django-wiki
loadcredential
;
};
};
dependencies =
ps:
[
ps.django
ps.django-allauth-ens
ps.django-wiki
ps.loadcredential
ps.tinycss2
]
++ ps.django-allauth.optional-dependencies.socialaccount;
credentials = {
SECRET_KEY = config.age.secrets."dj_wikiens-secret_key_file".path;
};
environment = {
WIKIENS_ALLOWED_HOSTS = [ "wiki.eleves.ens.fr" ];
};
};
}

Binary file not shown.

View file

@ -0,0 +1,30 @@
age-encryption.org/v1
-> ssh-ed25519 jIXfPA HF+w4Kuk7Wo2s94SeNxAB3zFZhKNn1fPabJhUK/xGH0
KY5tknNrICYq0HTfNRX760OPyWPJ8B4Sasq8BjN9a6k
-> ssh-ed25519 QlRB9Q OGcCe/S1aIQckJGzt4Wz+DFebTZpNV+YCevnVOPDMXQ
keDckjD4Vjhj3gmQnW0V8nJ1Soubkhb9WP28fsanhMA
-> ssh-ed25519 r+nK/Q lO6xwuhfQ6gMlJzFBF5J9c2elEg1J3leAt5x1uTYGSk
HQG0VQXvn72CIOqe6FRGrSX8TIa7sBB3cOZZQzXBl8w
-> ssh-rsa krWCLQ
pvF18GVS3dHr2jiss4sn00UqVVM2f/6BmkpYMgAVQ3FNpgnimQGsgCssuBo3Hjrc
BTO4v2U6cQ28LTUsruWdPhRChT0zfGRtx1QIn0tPzy3XKUxjt2XkBeblxtLhCHmI
muQ0yA15bP+aQfZn0dE1Eb4krw1unKWE4f82L/BQ5Y/i1P2rubhyBhBoQRb6atHv
S2EWBafaNr3orbFl9FPMjhWW3WZX/zKJxlu0saN88I6ZU2967mdR4PogMpL9iqST
atraraA1jG6mR9Ojloyrf8FG6wTlplDlZk8Sgtg88FD1iHMN1q0DQv1LwRoD3QUa
ywIn9MABMufNXQ+jm/DQpw
-> ssh-ed25519 /vwQcQ 83MxgOJhIBBGU6IRcTQPtxtyR4MapAxhdKT634w/em4
scNxodN5j1HXOIPCB3glvc08Gb4wW9gmZ5gkWMCbm4E
-> ssh-ed25519 0R97PA LBFUS7zx26+rjiWqVwQ4UBqRxr+3Sx+j+GGrRaBbz08
fnFwvJz36SiKnEoJr+0+enNVcT7wduZUrYe7bWhyxfE
-> ssh-ed25519 JGx7Ng iXjAn4Y7+yHASx4ZbIrvFffLzgX52DbQy9hIcTScHAs
6AJZoV33mBryiCaquKTAkw8yB1NQs38QlG2p4LIcoMc
-> ssh-ed25519 bUjjig 0cqMXUVHqhyYhygR7meIyWRr/c7H8ZGB5eO7tTHhRUk
GYKKGB02ElJXpObmBJKF4Bvoswd3o83vvVYIHIpDprg
-> ssh-ed25519 VQSaNw xHhzKnYeKxrN2MJz84v7Mjg3Nh69UJ6Q/eAyVAvC3V0
/bvauGesQw9/tl4DhCNFY9Rq+qWv12O4TcqzdxTCWzk
-> T:){{-grease NuQ <}vLGT%
0JSFYPMWs6LXpWacfiHNdwqvs/eHecFwj6cg0eLZEQe96shxy8/WSUBMpgasKufB
Nc4tpfiOVWVRGm4arhunwJ+1sgg37X35PWde89Qpg5g
--- Y6N6GuCpRLdD25EWW+05qbUAadrT3z2Pzc5golCBHJw
ßNê¯3'8ú³€@/¨0,zWêS¦‘ï;ßñì)§e<C2A7>ßÉïèÞí
qMjÏŒrçHBÇR2šš E2H+d­% ¶Ò–®

View file

@ -0,0 +1,30 @@
age-encryption.org/v1
-> ssh-ed25519 jIXfPA tuq63SvMOBnLOZNkIA5RenFt0DTg6bwCX4zJ8ISYRxc
B1K+kEO/JC0t2EL+2od+UiVNlzBbpRg29lsp2L1DhHw
-> ssh-ed25519 QlRB9Q r3M3DQi3xJiP+3nTpwm+2PQipnAaRyaWSH+mb0es6kE
codqvk7AgptYBRyz2BFVH0FcQ7ebZGGdJ6PJmoWWXTk
-> ssh-ed25519 r+nK/Q Ah4Oim/N0Tdkz1KPbQiHJQaqx614/jjlMqCxtYqjBy0
aTrlmm3TbWN6pyDEHf9uGy9H9CyyChXGKL0RZr7U3W4
-> ssh-rsa krWCLQ
ZbbBqvj7L2XFfJBCQrn799m7FQDrFDg96Moev+Uab/U5caQoJIljMldkfD7VphEt
56dyeJ7IdKdnwyt07213ua2gZ8Cmjyffi4b0mYhHkvRI5aSmfUtfiomXU0HkgZvK
rk4+AVQYXTLZKlGaq5KkTt4i0ltwzjA9ECNirciqi5JmORkUD1T41xBKCSb+7N5b
34Z/uka+oacxt7q27GnSonyFQIm7/owS4bTWV7vxoWLoOYTJcg4Oki/Op4gE9GkK
1y4RDpdVsHcRZbi7ewB9UKbvMzH44TN5VJARUf0mFQ/OHUo5IJcm/glS898fSLu/
mrjVT6XGAmPELB8uaVhSkg
-> ssh-ed25519 /vwQcQ 2mD6dstuZmOkYlBajNevQkeCYAGWshp0h0F1TzdcJSY
pzjxW+RZDSqPAHm+c5cMJZOdIfkwTmSLw2BktGh/kHk
-> ssh-ed25519 0R97PA /vOiTSDwQVYTX+tFuJD0M8Enk+4b0ViZUnrZ/WhUKiI
83r35uyZ/XELwTXZXzlU1yq+xzsNTUYNwK9aGGlOSAA
-> ssh-ed25519 JGx7Ng V6Xnn5q1hSvWHjiWtWJAD7as5N2fdtWNKWi3JwhfYgQ
aL3fX67spVrgguVtNNrfJ20fy3LRaDgMZldw5D1fKuE
-> ssh-ed25519 bUjjig RdTpxQYpmEtG2Cn1EACf85/ZynfPbZhGfoSF+sfw1AA
YovrKYRtwRPco3luRBVA0IA1qAq1jKxoS1UdoouhLGE
-> ssh-ed25519 VQSaNw F4hYo2UaLzV8leVHx/oY9aIcZkZ9Fap5HiuTvZy+Hko
Qwf9JDKqLXmIzId7gAtG5ERirfwZlQWCV6YiKgbexS4
-> v>[->`-grease O {|u& 2o9 {w&!Ev
jZPBNd6e20KQYli80kXK9D+qfmIVbOw9Y0aKXB3uvyNJPWDOoYTbzanjeXLuJdN+
pB/fgMX7znIg+VP87n2qMR5jFVj/x4g4vNgKTUtglw
--- j4kt4DFy3r3y6IMvNakNkmlkeb6iHYI5xAK8CZtbPD4
EWS¦|p^/<2F> Ž?<7F>Np%åeFU/>Ží¸0bccývr(ˆ‰Œº
“.èýVŸdgðáADZ3"® ‡Ù(½\5Ó§q<

View file

@ -0,0 +1,29 @@
age-encryption.org/v1
-> ssh-ed25519 jIXfPA iJSzsbA8RiEhUIyhlKWCASQKoSQstjK4drMYl+PsChw
8THrknrBu0WGFEb4xTZiJxEY26q7sW83rwViDjyTE24
-> ssh-ed25519 QlRB9Q e7PRE212Ggt8nO6Bb+BabO85FOARsJGs9cPJmZNI9kg
ubKIBxI1ZBXttA7TWj401siKNT1HyB+N2MsZ+ldkgb8
-> ssh-ed25519 r+nK/Q EWV24Emm9hENa+yUAuQpkuJ0uJ0zIv+vRIbWpM4Wtg4
J59wnHRytgNqpX4+5HaJ9KZ5GvhckgtRK6TzfX7Ci8Y
-> ssh-rsa krWCLQ
AvmrzShR+XTpUpKaScoqvgFQ40PTSqh8p383p98xjG5LIz5kqJoWBnxJK7JabBpq
JkqVeq5XdH5RX4weobieG4KYUV8EDheLfOMXH5BrPgeJO4yhJ1rzH+oHBw4TwvFM
UvEZEAVgi3G1/suPfJAkO7QRkZjE7fRppEo5RAI0gMlM43YyJavrfqVIqB40Uugk
h0b0ybChUbKpXlZjqhYAAMN45jTAvW1emO0DMeIk6dbmnbZNdibul8f+NNdWKbI1
9NN5iH2IzuqTdc6gkE4912hdDeUJ4NZ6x/Fxp1/u3d1z/Yg7daUQUXUIoDX0Hyvb
+01dH0D/7kzRhEdNLO2NXA
-> ssh-ed25519 /vwQcQ GAsAj2i65KDQeFhe69YR2ycdGskop1wu3Lzrxp59sTg
wCSUqEtWv0i6sNg1RVtHI/jZh3VeNX3qtnbagXoNGT4
-> ssh-ed25519 0R97PA mFZ3q/3jd1guXl8bhRWyYjgsgE4JErJEels6vdmpfCs
7oIAT0MTsaKxbf26PSDBk7KqfyFgcBq09FGJ9v/rXqE
-> ssh-ed25519 JGx7Ng tpslfMWMJMUH46EGycbLiXotVdXlP4xmK0slb7XKYS8
wLLfX4jX4mIxzI8zr2GBlpBcPztTrHqKngi/ON0TExg
-> ssh-ed25519 bUjjig zLoniLfwKGH9Ctu34103WHBvjIyImtPyKx8O+5UMLUU
sYsterVGvCg6JWA0z3AO5sSlj9DBfj8u5o5jH9K2xeA
-> ssh-ed25519 VQSaNw oHzU9Lc/7p+MZAjVylzC63h586vOcffXkkpAi4XB8Q0
7T8CREpaCxM58KMYW28FY2i+ELjrx3eC3K7xaBy7O6A
-> (_o61>U-grease .P>ZRrj~ -=7S;N
6vnQVKKZwp4JowIwVb4klrhaR6NZjwlZYnngVQ0wqVenMZPj9oyhIXthLRqE1Q6/
k+sGxA
--- +yT0o8oZJS+32MeUAl8T9zREh31rq77pSVsSoFjHO5A
è ™ñΗ´ä!î^ûØÖ8ÔzøÑaÒÓ ÐàÔ@Ö¡s\ ˜_ÃÃúoÖö<C396>wõÖ¥Cr)¾€fû¿AÃ'•3D€â

View file

@ -0,0 +1,29 @@
age-encryption.org/v1
-> ssh-ed25519 jIXfPA 7v2qJ+2ZSp0tf4m6gcK2ShFF9ulNm/g3aHu3Wqe4Sxo
ZyVqTqBCK51/U5yxtp23nywprQv46yL90zwx6+DqKRg
-> ssh-ed25519 QlRB9Q IePmluoRImtaDplOoVqNiwfTQMKF1CuF4M6AzurXGRY
JjtOeyvARlc9t5Q+LS2+TZwAUgV4Qn2L8SFkw9YLnaU
-> ssh-ed25519 r+nK/Q LGPI7PmVPnZDQe6Su5MZQauxRHZkBKehyNbMq+BKlGQ
3RvcfLAFKaScusYKf47zFNAtnot7wySvytuD81s6TwM
-> ssh-rsa krWCLQ
xGH7rl+r8L5HEp6JUlAm04ktn9rQsWfBBlSRp7UsOi6ojwCfjjIA91yUrYw8TYRs
Ci60uoLS7cuMtSE/jQVU/FuVtR5kwjhOkWmQDHrC7rUWb6CufusxPIVJ0xanp3wo
cc2t+EfSdpVyGIx5N8BEMhQ6sR2EfERHGfUrnKCpcL5hM5L8ZHnVh6CkRBtvZaq0
Zy44Ob4pqH6fDz7EziM1hBkfg9myN+/Iqfvg5OUnfSrqooLZ8l0gDvGafS5fok91
uqb0PGDiv6lwzpaj87jKUCaXAF3ag2KAa6j8sbZ4+fSsQeB/jhH7hTlWcAR/oEFW
fuPQDFKxMucAsPjv1H1iaQ
-> ssh-ed25519 /vwQcQ +5+xDNQyRwBWXT6c593S01OG9IemNul/81G4ie1hTVg
Tzkq0toOCqdHOZNPiy/rUrO2eQXTDHi7g+jKbrWU/hU
-> ssh-ed25519 0R97PA WEMs0phnuvw1kQaqeSkovwFUL6w7J6wh+V7D82NxfDs
V5npmkeTPVcnaNwDtoy7PqBRllPTuQjvF9Qu14V59os
-> ssh-ed25519 JGx7Ng 3bty0WCf+ElvPEFt7fSpgYf5MeFUPaZ4vVGWPUAjn0I
ggl5CgXaUx4T6qbA9EG1oaF9NbfFYye4davm7lKqUvI
-> ssh-ed25519 bUjjig zFlaOVzFEkPG+J3Yz7alPgSiCVbC/7u/hCTVIP8X/Ho
3PBIRu9ZKfb9lkzijw6kKjX0ztXBkiwVaQUx8rxuYJc
-> ssh-ed25519 VQSaNw btusrepFF5Jhl3x2YWs6wVrHwzb6qBXfDXESclQJAXo
HwfOU3tyP9OsNjTkaMMmJnd4b+0ZfxJLkP6xe5jsAZE
-> Tp-grease s03Py `u6"4 E|5 _
3CvcQ6NEZKLY1F6y0cTMQPwV9mJvHB0T7dauvWJAYKkfb95TymqfDYGWwW1veND2
n1XD/arAJHVwva95K7TaQdsNLPGo8/VePQGUnYqi
--- qe75UTWqdDd0gGg0nm054SFZ2AgqVBw/bbycvcZSfQY
ãñêÕ]¹¦zÂg©;Ê¡îñ˜öÓ´0éÅYëÀHãŒ!@ìp­ö¸T«?£iÞ‰áèÚ>I^ül·o5”¯ë:{¬gJk£vø>€W8ði

Binary file not shown.

View file

@ -1,3 +1,14 @@
(import ../../../keys).mkSecrets [ "web03" ] [
# List of secrets for web03
"dj_annuaire-secret_key_file"
"dj_bocal-secret_key_file"
"dj_gestiojeux-secret_key_file"
"dj_interludes-email_host_password_file"
"dj_interludes-secret_key_file"
"dj_wikiens-secret_key_file"
"webhook-annuaire_token"
"webhook-bocal_token"
"webhook-gestiojeux_token"
"webhook-interludes_token"
"webhook-wikiens_token"
]

View file

@ -0,0 +1,29 @@
age-encryption.org/v1
-> ssh-ed25519 jIXfPA NovhLzllQnEbnI7bno+zDoSRFJyZMfVVYPQMReUIymw
sefGtZ8fbYVqtKgMhrEj9AlwP70YM5MGkQ+o8Dmfb/Q
-> ssh-ed25519 QlRB9Q 9mh3vQVo5tPorLYBVCcZUJOlcEftQKA94PxNhh+pDwg
GXM67qitYqnxbFoHbsfa1lNNLIahPqshosIY7h0fDBA
-> ssh-ed25519 r+nK/Q BOXck7k9AH+KvmoicI/fmGzWcna0nwnJ+uyteUjIukE
Hyts1/6EAdruuBilhifl/HwPTWEBe+Kr1RL6SDjHaaM
-> ssh-rsa krWCLQ
1ROqUHCkbkEgRTQUha0cVJVAqLu0nvfKik9yI392sbEQYgmpuf7F0gzA97BXcoi3
2BdZWu/cJ6m6bfMvXdZ04cUjRcNrnpPHsoqie3G9s9p6aa9XIrLO5K6kH7S6f5DZ
pZdOqfSYldtJKRx7F8k0D/pscN5qB1Tb1x0CIULJVo7uKf9X1MnZwapOOCY2q40U
Ip2aefr40h3EO7jBlswx2/fB8aqW95BR4JQzJZ/uiIsBUQDqvn39GU7R0JaLdAPB
6kJXaJ3ORaDDtslcaAVZWLqFbOlINXYHr/mqYNTZMubE4BmNjvJL3aRozQQWraoJ
q5rDvgwUXVhpGpcaNf4/xw
-> ssh-ed25519 /vwQcQ FHYnfCad1imFiV5tRIfe9mtJ2ouiu2l19th2UD7j3gw
Xu+Sk9GEQ9Wyf7iU790yxv80vLYHp2StArPkfRqfRhI
-> ssh-ed25519 0R97PA etwCsiGmvzufJGMw8aDN+M931lPlE9fTUBQmk0X4DFk
o6xJbfNjQ3Lko1MSJ9JBu6FefZ8267dZ+vL1Gpd1eH8
-> ssh-ed25519 JGx7Ng h0XzejD/c5F2M7sWS4vTQL9OoRG73ACwlWCtK51Dcyo
diMDy201IpwL6Ec+Zb4pH5f1yyMOMHT3jg6yriopCRU
-> ssh-ed25519 bUjjig 2Oh5FhWfrbA9c5TisXuxasyYF41YOlNdurZR9QowETA
706/MLiPT9+9xHZPZQYtvKm8zbN5qS/9XJ+TK15etIs
-> ssh-ed25519 VQSaNw YbtnCoySon7jNBq7IFOl8UfxuJXRjzLrgXp238q4RRE
10au0QwFP9ntPMU4u2bMl3KLYBIPy09xVoKNLxWvpw0
-> Vu-grease !oqb p1-QmV
i1WmaOmxmdAX/se60fnUL41n57c8tN1gnUjjBjSV7GkQGzhKnxTplJTUpifP9Js3
8D+xe86sN2l2JQ5R9QFOAbsvSa5eXSo
--- JE+yvBRH9Jz6Sdz46AzWuhVI0kXWObODKSiNWz5L9As
_n´(I 6ÔÃPèCa\³U¼=é @ “†?6—P[Tò³ñˆjk<6A>0ãrÒ…°“ƒ¼-É(]/³a¿É õ8¶=é¤i²<69>

View file

@ -0,0 +1,29 @@
age-encryption.org/v1
-> ssh-ed25519 jIXfPA Ju7YL9wvvYr9VPLmYtYTniyuj9JTVqe2V8eRLISkIH8
EJjZPLOhspyyrx7a+fYlPPH+1pr93KzW7E2Ztkic0cY
-> ssh-ed25519 QlRB9Q X+TAfiEk1d67rkz6CgIO66bBrahY39ZTnmj0cBGGrSo
kBLFu6DnN7rIzP3mSlPEc+yBN+yU5toLeA069vuNW6g
-> ssh-ed25519 r+nK/Q wcXXCuAS9bOp3GM6c0pU7sxpylFEHFPmnibQTEwJ1x4
fR41b7fhZCzuNP1jst3vx3wUjIkBDsz54VzubwNX6+M
-> ssh-rsa krWCLQ
ySG+OgB3gMW/ijdWqlGr1LnkfqeFD53ChxkOUfAe4+Z1VsK0FkVaBmqvW38SFMw9
S4dcOkO6Km8umsaZBZi2QaItm+p8Rf/j7+W2WZPoyoKE1l1KW1ic/wGOY7uqeucn
YZRq7rWX+DaH2VLbkl12wUlVgYwJGcH6VrpRizbq2z0jcdTak6hgzcXo7WhcNAit
DY8W8X5Zv34mpj1VO7n2LJs5V7gzfSLq+KVMIi++QphVv2VkFpvaOqlEP2neVXnV
C3YNJTkVx+R6wANCao+9a5VHC261Bkm81dKgzceW2OCHkwOP6XTbDpj59sMRxRuU
B7jrvre5S1WZN9jc16Dv/Q
-> ssh-ed25519 /vwQcQ TW560PIrbJV3ZB55w+EvH2PEYOoYM93x3aaeeShYKE8
LC6pydBK3yCq/Vs7MUoa0xjDSn3WjRaZuqwvhX24YJQ
-> ssh-ed25519 0R97PA zyerO6EIwW90XVSBVP3Y/7Q8hK+7uPe6kKENGCdDJRw
WEpgo8Y64YXnat1OJU5qtpecf+Zu2P2LmB7DEtmUuAU
-> ssh-ed25519 JGx7Ng 7h4q8ztQ0BFJSfavV4l1pKjbNRZveOPIJG0KF98vh28
mYcUEL4n2+bkjpvJylIvzXSxoa71YZKMSgN21ONnvko
-> ssh-ed25519 bUjjig 9wKWtLWD+9LlAOO24iQiOdvpSDIWpL6Xo0Wt3QOLIQY
Kq2QLFB7E5tiqZQlsn5pZRM52v8XqUyYsvwNHXZspRs
-> ssh-ed25519 VQSaNw 3tJNtvi0WK9iAzx3Q7Q0Ogj1TGH0Zrm5v0ERhQILBVk
4232/j+xnbhQpId7ZS6+xAQBDxtumeOp4c1HVeMRqB4
-> Pug13&(-grease 'w0JG}JF .t`9lMF v)8}4qW
yRriwE//abKvQgu962F7URbOAiHDFMipnsq22itGkLDvmwIRY6Bi83xOzx72EV4y
27GNdxQOni+z8NPt0YTskqq4fHfZky/EMFUvXTfteB7izYxEliHLRKA
--- JNvexaDwzwOIUCxanJRLunfhBh1/PE8ssFCytr8nPjo
TX¹Þxòšd˜~KS?ìIò…Ce þ—3ÑJõ ¹ŸýCíÓF6qœv~Dùq¢T<>©55€bjˆfÕ5”ñëã"ø£ÅŽp

View file

@ -0,0 +1,29 @@
age-encryption.org/v1
-> ssh-ed25519 jIXfPA dBBF9o4SBTHNv495PFZa6dszbs9nEARwg0EfOlfFwhc
GkqX8sjLqFHGm4UA+zyVRB7FGGgAxilFYHarEQB0YAk
-> ssh-ed25519 QlRB9Q DEu91DA+qho3Zs3gSQbWH/hOKUfgP5Qd90+9ZzYs1So
aIw1ygo/e0tpqW2N27Fl8WRe362ronzqy52vSzD35Tc
-> ssh-ed25519 r+nK/Q JUurf12UYuJKvKusUh/GOJryFbA8lWaS8v+/pRb0kys
VsgsBSwjBXTD+tmP3jxCPVeDY7AHVFx5o57y+ubEjts
-> ssh-rsa krWCLQ
o08ZnFZIj37p5hpWgl8FXwPwHKjoBD7Z0UxMRsF4CUF0sLOpwVHD4L57hAA8a80S
063e48OJ5OsrtueqqJwPT+wjXfmEarLUqC+rP0X+JDW8OLwSImBcYC5DQJZLUFSK
doF8S8Bo0MbuB4eKnXUAJlhdZOk/iqYK8TYuuSIwWQxHwF/fT43hrYIkj6lmqdmG
IqSXA04KpQFoL15INIAtsnj5xXJlI0gCPp0pxMNUmVyTTrNLfaEiKH191D+Elmjd
xcdvMX1yzIPI/mI/+/OjeYspijY0XpRHLJ9ljfEK7E2N8IgpyzBx2BzxYhRHoQmi
6SbZu9Tirw+yv5wv8oIaHA
-> ssh-ed25519 /vwQcQ M6QID8DMaFMnF97UWwbSYJ7Sh0wvj/fq7cszu82/oHI
T+aT4NCbVfGXnvPK7w8fbojAwDTE41h40q0tDwnGyhE
-> ssh-ed25519 0R97PA XyZvyy80nv2tGe1fBzM0LeiIAGuyV22CzBoCPFMMrw8
9VPiRV3GCWbH1So5LBrjBeRzEtErPM7BwOF/zaD/yGk
-> ssh-ed25519 JGx7Ng OPlQBKO+Wub+PPMNPoRGWTeSZfGF3kYCD8HLbLbPR0k
ZhBUT5ig0FnLCau+da9bfEkVjFxfZXG0mXW1o0yZ+JQ
-> ssh-ed25519 bUjjig T5/dZtIRaXmNg8pajSAM76cVANM7MvQ7f32fz2fEqx0
+6kRffMJX+8QAOf5jA5acGihgw4q8yJda0EzVGePD+I
-> ssh-ed25519 VQSaNw InflFPtAwYwQFWqd+KK+ILwMa0XTNkVB+xEMtUXW8Us
XZ6LVMCpvq+QBo0EHAlnC8uBhQssixTLVCpul6ov4Dk
-> YKmn+c&-grease EA5d$ ="1d }cP
3u46NE2SdfO9ugNN/41PeU/65CRgmDiO54B9ZQLNRQtVyyLlcmvaYHCQach+s+Rs
tE0Gc8MD23hPw5ZhWj0nq7xF8VHtRQSTLQ
--- UkbfAVgnLkeg6Zdb3bsdPtx9Wh6HOjdB+qmTvrAWFuE
5_E¼ñ/e)±žÑÊC×ÈY<C388>wPŽöTášt6>l_0:[èP»ÎH5·¼j—<6A> ¸â=vèFýÉIÄ4¹ÿÏD쪘ýp£§

Binary file not shown.

Binary file not shown.

View file

@ -1,7 +1,7 @@
{ lib, dns, ... }:
let
inherit (lib) mapAttrs' nameValuePair;
inherit (lib) mapAttrs' nameValuePair optional;
inherit (lib.extra) fuseAttrs mapSingleFuse;
inherit (dns.lib.combinators) mx spf ttl;
@ -85,6 +85,7 @@ let
"influx" # InfluxDB
"netbird" # Netbird
"prometheus" # Prometheus
"victoria-metrics" # Victoria Metrics
"videos" # Peertube
# Garage S3
@ -137,7 +138,9 @@ let
];
web03.dual = [
"*.webapps" # Django apps
# Django Apps
"*.webapps"
"apps-webhook"
];
}
)
@ -214,17 +217,19 @@ in
subdomains = mapAttrs' (
host:
{ site, ... }:
nameValuePair "${host}.${site}" (
with meta.network.${host}.addresses;
{
let
net = meta.network.${host};
inherit (net.addresses) ipv4 ipv6;
in
nameValuePair "${host}.${site}" {
A = ipv4;
AAAA = ipv6;
subdomains = {
v4.A = ipv4;
v6.AAAA = ipv6;
private.A = optional (net.netbirdIp != null) net.netbirdIp;
};
}
)
) meta.nodes;
};
};

View file

@ -29,30 +29,6 @@
netbirdIp = "100.80.75.197";
};
krz01 = {
interfaces = {
# see also machines/krz01/networking.nix
vmbr0 = {
ipv4 = [
{
address = "129.199.146.21";
prefixLength = 24;
}
{
address = "192.168.1.145";
prefixLength = 24;
}
];
gateways = [ "129.199.146.254" ];
enableDefaultDNS = true;
};
};
hostId = "bd11e8fc";
netbirdIp = "100.80.103.206";
};
geo01 = {
interfaces = {
eno1 = {
@ -195,7 +171,7 @@
};
hostId = "8afc7749";
netbirdIp = null; # web03 is not yet connected to the VPN
netbirdIp = "100.80.157.46";
};
rescue01 = {

View file

@ -37,19 +37,6 @@
};
};
web01 = {
site = "rat01";
deployment.tags = [ "web" ];
hashedPassword = "$y$j9T$9YqXO93VJE/GP3z8Sh4h51$hrBsEPL2O1eP/wBZTrNT8XV906V4JKbQ0g04IWBcyd2";
stateVersion = "23.05";
vm-cluster = "Hyperviseur NPS";
nixpkgs = "24.05";
};
compute01 = {
site = "pav01";
@ -80,15 +67,15 @@
nixpkgs = "24.05";
};
krz01 = {
site = "pav01";
rescue01 = {
site = "luj01";
hashedPassword = "$y$j9T$eNZQgDN.J5y7KTG2hXgat1$J1i5tjx5dnSZu.C9B7swXi5zMFIkUnmRrnmyLHFAt8/";
deployment.targetHost = "v6.rescue01.luj01.infra.dgnum.eu";
stateVersion = "24.05";
nixpkgs = "unstable";
hashedPassword = "$y$j9T$nqoMMu/axrD0m8AlUFdbs.$UFVmIdPAOHBe2jJv5HJJTcDgINC7LTnSGRQNs9zS1mC";
adminGroups = [ "lab" ];
stateVersion = "23.11";
vm-cluster = "Hyperviseur Luj";
};
storage01 = {
@ -99,7 +86,10 @@
stateVersion = "23.11";
nixpkgs = "24.05";
nix-modules = [ "services/forgejo-nix-runners" ];
nix-modules = [
"services/forgejo-nix-runners"
"services/netbird/server.nix"
];
};
vault01 = {
@ -114,6 +104,19 @@
adminGroups = [ "fai" ];
};
web01 = {
site = "rat01";
deployment.tags = [ "web" ];
hashedPassword = "$y$j9T$9YqXO93VJE/GP3z8Sh4h51$hrBsEPL2O1eP/wBZTrNT8XV906V4JKbQ0g04IWBcyd2";
stateVersion = "23.05";
vm-cluster = "Hyperviseur NPS";
nixpkgs = "24.05";
};
web02 = {
site = "rat01";
@ -129,21 +132,8 @@
hashedPassword = "$y$j9T$Un/tcX5SPKNXG.sy/BcTa.$kyNHELjb1GAOWnauJfcjyVi5tacWcuEBKflZDCUC6x4";
nix-modules = [ "services/django-apps" ];
stateVersion = "24.05";
nixpkgs = "24.05";
nixpkgs = "unstable";
vm-cluster = "Hyperviseur NPS";
};
rescue01 = {
site = "luj01";
deployment.targetHost = "v6.rescue01.luj01.infra.dgnum.eu";
hashedPassword = "$y$j9T$nqoMMu/axrD0m8AlUFdbs.$UFVmIdPAOHBe2jJv5HJJTcDgINC7LTnSGRQNs9zS1mC";
stateVersion = "23.11";
vm-cluster = "Hyperviseur Luj";
};
}

View file

@ -41,7 +41,10 @@ in
options = {
organization = {
members = mkOption {
type = attrsOf (submodule {
type = attrsOf (
submodule (
{ name, ... }:
{
options = {
name = mkOption {
type = str;
@ -56,8 +59,19 @@ in
Main e-mail address of the member.
'';
};
username = mkOption {
type = str;
default = name;
description = ''
The username used for authentication.
WARNING: Must be the same as the ens login!
'';
};
});
};
}
)
);
description = ''
Members of the DGNum organization.

View file

@ -5,14 +5,21 @@
{
members = {
agroudiev = {
name = "Antoine Groudiev";
email = "antoine.groudiev@dgnum.eu";
};
catvayor = {
name = "Lubin Bailly";
email = "catvayor@dgnum.eu";
username = "lbailly";
};
cst1 = {
name = "Constantin Gierczak--Galle";
email = "cst1@dgnum.eu";
username = "cgierczakgalle";
};
ecoppens = {
@ -23,11 +30,19 @@
jemagius = {
name = "Jean-Marc Gailis";
email = "jm@dgnum.eu";
username = "jgailis";
};
luj = {
name = "Julien Malka";
email = "luj@dgnum.eu";
username = "jmalka";
};
mboyer = {
name = "Matthieu Boyer";
email = "matthieu.boyer@dgnum.eu";
username = "mboyer02";
};
mdebray = {
@ -38,6 +53,7 @@
raito = {
name = "Ryan Lahfa";
email = "ryan@dgnum.eu";
username = "rlahfa";
};
thubrecht = {
@ -52,7 +68,6 @@
"thubrecht"
"raito"
"mdebray"
"luj"
];
# members of this group are root on the fai infrastructure

View file

@ -58,6 +58,7 @@
"dgn-ssh"
"dgn-vm-variant"
"dgn-web"
"django-apps"
])
++ [
"${sources.agenix}/modules/age.nix"

View file

@ -1,35 +0,0 @@
diff --git a/netbox_agent/network.py b/netbox_agent/network.py
index 673dfc1..8ef60aa 100644
--- a/netbox_agent/network.py
+++ b/netbox_agent/network.py
@@ -1,7 +1,7 @@
import logging
import os
import re
-from itertools import chain
+from itertools import chain, islice
import netifaces
from netaddr import IPAddress
@@ -413,11 +413,17 @@ class Network(object):
# delete IP on netbox that are not known on this server
if len(nb_nics):
- netbox_ips = nb.ipam.ip_addresses.filter(
- **{self.intf_type: [x.id for x in nb_nics]}
- )
+
+ def batched(it, n):
+ while batch := tuple(islice(it, n)):
+ yield batch
+
+ netbox_ips = []
+ for ids in batched((x.id for x in nb_nics), 25):
+ netbox_ips += list(
+ nb.ipam.ip_addresses.filter(**{self.intf_type: ids})
+ )
- netbox_ips = list(netbox_ips)
all_local_ips = list(chain.from_iterable([
x['ip'] for x in self.nics if x['ip'] is not None
]))

View file

@ -7,23 +7,17 @@
let
inherit (config.networking) hostName domain;
in
{
imports = [ ./module.nix ];
options.dgn-netbox-agent = {
enable = lib.mkEnableOption "DGNum netbox agent setup." // {
default = true;
default = false;
};
};
config = lib.mkIf config.dgn-netbox-agent.enable {
nixpkgs.overlays = [
(_: super: {
netbox-agent = super.netbox-agent.overrideAttrs (old: {
patches = (old.patches or [ ]) ++ [ ./01-batch-filter.patch ];
});
})
];
services.netbox-agent = {
enable = true;
@ -51,6 +45,7 @@ in
randomizedDelaySec = "3h";
environmentFile = config.age.secrets."netbox-agent".path;
};
age-secrets.sources = [ ./. ];
age-secrets.sources = [ ./secrets ];
};
}

View file

@ -0,0 +1,115 @@
{
config,
pkgs,
lib,
utils,
...
}:
let
inherit (lib)
getExe
mkEnableOption
mkIf
mkOption
mkPackageOption
;
inherit (lib.types)
either
listOf
nullOr
path
str
;
settingsFormat = pkgs.formats.yaml { };
cfg = config.services.netbox-agent;
in
{
options.services.netbox-agent = {
enable = mkEnableOption "Netbox-agent";
package = (mkPackageOption pkgs "netbox-agent" { }) // {
default = pkgs.callPackage ./package.nix { };
};
startAt = mkOption {
type = either str (listOf str);
default = "*-*-* 00:00:00";
description = ''
Automatically start this unit at the given date/time, which
must be in the format described in
{manpage}`systemd.time(7)`.
'';
};
randomizedDelaySec = mkOption {
type = str;
default = "0";
example = "45min";
description = ''
Add a randomized delay before each netbox-agent runs.
The delay will be chosen between zero and this value.
This value must be a time span in the format specified by
{manpage}`systemd.time(7)`
'';
};
settings = mkOption {
inherit (settingsFormat) type;
description = ''
Settings to be passed to the netbox agent. Will be converted to a YAML
config file
'';
default = { };
};
environmentFile = mkOption {
type = nullOr path;
default = null;
description = ''
Environment file to pass to netbox-agent. See `netbox-agent --help` for
possible environment variables
'';
};
};
config = mkIf cfg.enable {
systemd.services.netbox-agent = {
description = "Netbox-agent service. It generates an existing infrastructure on Netbox and have the ability to update it regularly through this service.";
wants = [ "network-online.target" ];
after = [ "network-online.target" ];
serviceConfig = {
Type = "oneshot";
# We could link directly into pkgs.tzdata, but at least timedatectl seems
# to expect the symlink to point directly to a file in etc.
# Setting the "debian timezone file" to point at /dev/null stops it doing anything.
ExecStart = utils.escapeSystemdExecArgs [
(getExe cfg.package)
"-c"
(settingsFormat.generate "config.yaml" cfg.settings)
];
EnvironmentFile = cfg.environmentFile;
LockPersonality = true;
MemoryDenyWriteExecute = true;
NoNewPrivileges = true;
PrivateTmp = true;
ProtectControlGroups = true;
ProtectHome = true;
ProtectKernelModules = true;
ProtectKernelTunables = true;
ProtectSystem = "strict";
RestrictAddressFamilies = "AF_UNIX AF_INET AF_INET6 AF_NETLINK";
RestrictNamespaces = true;
RestrictRealtime = true;
RestrictSUIDSGID = true;
};
inherit (cfg) startAt;
};
systemd.timers.netbox-agent.timerConfig.RandomizedDelaySec = cfg.randomizedDelaySec;
};
}

View file

@ -0,0 +1,46 @@
{
lib,
buildPythonPackage,
fetchFromGitHub,
cargo,
rustPlatform,
rustc,
typing-extensions,
}:
buildPythonPackage rec {
pname = "netifaces-2";
version = "0.0.22";
pyproject = true;
src = fetchFromGitHub {
owner = "SamuelYvon";
repo = "netifaces-2";
rev = "V${version}";
hash = "sha256-XO3HWq8FOVzvpbK8mIBOup6hFMnhDpqOK/5bPziPZQ8=";
};
cargoDeps = rustPlatform.fetchCargoTarball {
inherit src;
name = "${pname}-${version}";
hash = "sha256-uoUa6DSBuIV3RrE7svT1TVLxPHdx8BFu/C6mbpRmor0=";
};
build-system = [
cargo
rustPlatform.cargoSetupHook
rustPlatform.maturinBuildHook
rustc
];
dependencies = [ typing-extensions ];
pythonImportsCheck = [ "netifaces" ];
meta = {
description = "Netifaces reborn";
homepage = "https://github.com/SamuelYvon/netifaces-2.git";
license = lib.licenses.mit;
maintainers = with lib.maintainers; [ ];
};
}

View file

@ -0,0 +1,64 @@
{
lib,
python3,
fetchgit,
ethtool,
dmidecode,
ipmitool,
lldpd,
lshw,
}:
python3.pkgs.buildPythonApplication {
pname = "netbox-agent";
version = "unstable-2023-03-19";
pyproject = true;
src = fetchgit {
url = "https://git.dgnum.eu/DGNum/netbox-agent";
rev = "424283239658516feb34c0f68496775350b1bf22";
hash = "sha256-sp1QVy8AIezR2LRDDYS9G0g0GQRwGKGmEE7ykITPxtY=";
};
nativeBuildInputs = with python3.pkgs; [
setuptools
wheel
pythonRelaxDepsHook
];
pythonRelaxDeps = true;
propagatedBuildInputs = with python3.pkgs; [
distro
jsonargparse
netaddr
(callPackage ./netifaces2.nix { })
packaging
pynetbox
python-slugify
pyyaml
];
postInstall = ''
wrapProgram $out/bin/netbox_agent \
--prefix PATH ":" ${
lib.makeBinPath [
ethtool
dmidecode
ipmitool
lldpd
lshw
]
}
'';
pythonImportsCheck = [ "netbox_agent" ];
meta = with lib; {
description = "Netbox agent to run on your infrastructure's servers";
homepage = "https://git.dgnum.eu/DGNum/netbox-agent";
license = licenses.asl20;
maintainers = [ ];
mainProgram = "netbox_agent";
};
}

View file

@ -1,6 +1,17 @@
{ config, ... }:
{ config, lib, ... }:
let
inherit (lib) mkEnableOption mkIf;
cfg = config.dgn-records;
in
{
options.dgn-records.enable = mkEnableOption "Arkheon deployment recording." // {
default = true;
};
config = mkIf cfg.enable {
services.arkheon.record = {
enable = true;
@ -10,4 +21,5 @@
};
age-secrets.sources = [ ./. ];
};
}

View file

@ -0,0 +1,67 @@
diff --git a/internal/hook/hook.go b/internal/hook/hook.go
index 0510095..0347f26 100644
--- a/internal/hook/hook.go
+++ b/internal/hook/hook.go
@@ -13,12 +13,12 @@ import (
"errors"
"fmt"
"hash"
- "io/ioutil"
"log"
"math"
"net"
"net/textproto"
"os"
+ "path"
"reflect"
"regexp"
"strconv"
@@ -750,14 +750,18 @@ func (h *Hooks) LoadFromFile(path string, asTemplate bool) error {
}
// parse hook file for hooks
- file, e := ioutil.ReadFile(path)
+ file, e := os.ReadFile(path)
if e != nil {
return e
}
if asTemplate {
- funcMap := template.FuncMap{"getenv": getenv}
+ funcMap := template.FuncMap{
+ "cat": cat,
+ "credential": credential,
+ "getenv": getenv,
+ }
tmpl, err := template.New("hooks").Funcs(funcMap).Parse(string(file))
if err != nil {
@@ -956,3 +960,27 @@ func compare(a, b string) bool {
func getenv(s string) string {
return os.Getenv(s)
}
+
+// cat provides a template function to retrieve content of files
+// Similarly to getenv, if no file is found, it returns the empty string
+func cat(s string) string {
+ data, e := os.ReadFile(s)
+
+ if e != nil {
+ return ""
+ }
+
+ return strings.TrimSuffix(string(data), "\n")
+}
+
+// credential provides a template function to retreive secrets using systemd's LoadCredential mechanism
+func credential(s string) string {
+ dir := getenv("CREDENTIALS_DIRECTORY")
+
+ // If no credential directory is found, fallback to the env variable
+ if dir == "" {
+ return getenv(s)
+ }
+
+ return cat(path.Join(dir, s))
+}

View file

@ -0,0 +1,710 @@
{
config,
lib,
options,
pkgs,
utils,
...
}:
let
inherit (lib)
attrNames
concatLists
concatMapAttrs
filterAttrs
getExe
getExe'
literalExpression
mapAttrs
mapAttrs'
mapAttrsToList
mkEnableOption
mkIf
mkMerge
mkOption
mkPackageOption
nameValuePair
optional
optionals
recursiveUpdate
toUpper
;
inherit (lib.types)
attrs
attrsOf
enum
functionTo
ints
listOf
nullOr
package
path
str
submodule
;
inherit (utils) escapeSystemdExecArgs;
cfg = config.services.django-apps;
# Alias the global config to allow its use when the identifier is shadowed
config' = config;
systemctl = getExe' config.systemd.package "systemctl";
in
{
options.services.django-apps = {
enable = mkEnableOption "automatic django apps management";
webhook = {
domain = mkOption {
type = str;
description = ''
The domain where the webhook service will listen.
'';
};
nginx = mkOption {
type = nullOr options.services.nginx.virtualHosts.type.nestedTypes.elemType;
default = null;
description = ''
With this option, you can customize the nginx virtualHost settings.
'';
example = literalExpression ''
{
# To enable encryption and let Let's Encrypt take care of certificate
forceSSL = true;
enableACME = true;
}
'';
};
};
sites = mkOption {
type = attrsOf (
submodule (
{ name, config, ... }:
{
options = {
source = mkOption {
type = str;
description = ''
The URI where the source of the app can be publicly fetched via git.
'';
};
branch = mkOption {
type = str;
default = "production";
description = ''
Branch to follow for updates to the source.
'';
};
domain = mkOption {
type = str;
description = ''
The domain where the web app will be served.
'';
};
nginx = mkOption {
type = nullOr options.services.nginx.virtualHosts.type.nestedTypes.elemType;
default = null;
description = ''
With this option, you can customize the nginx virtualHost settings.
'';
example = literalExpression ''
{
# To enable encryption and let Let's Encrypt take care of certificate
forceSSL = true;
enableACME = true;
}
'';
};
env_prefix = mkOption {
type = str;
default = toUpper name;
description = ''
The prefix to use for environment settings declaration.
'';
};
application = {
type = mkOption {
type = enum [
"asgi"
"wsgi"
"daphne"
];
default = "wsgi";
description = ''
Specification for the django application.
'';
};
module = mkOption {
type = str;
default = "app";
description = ''
Name of the module containing the application interface.
'';
};
settingsModule = mkOption {
type = str;
default = "${config.application.module}.settings";
description = ''
The django settings module, will be passed as an environment variable to the app.
'';
};
workers = mkOption {
type = ints.positive;
default = 4;
description = ''
Number of workers processes to use.
'';
};
channelLayer = mkOption {
type = str;
default = "channel_layer";
description = ''
Channel layer to use when running the application with daphne.
'';
};
};
python = mkPackageOption pkgs "python3" { };
django = mkOption {
type = functionTo package;
default = ps: ps.django;
defaultText = literalExpression "ps: ps.django";
description = ''
The django version to use to run the app.
'';
};
djangoEnv = mkOption {
type = package;
default = config.python.withPackages (
ps:
[ (config.django ps) ]
++ (optional (config.application.type != "daphne") ps.gunicorn)
++ (optional (config.application.type == "asgi") ps.uvicorn)
++ (optional (config.dbType == "postgresql") ps.psycopg)
++ (config.dependencies ps)
);
description = ''
The python version used to run the app, with the correct dependencies.
'';
};
dependencies = mkOption {
type = functionTo (listOf package);
default = _: [ ];
example = literalExpression "ps: [ ps.requests ]";
description = ''
Python dependencies of the app.
'';
};
extraPackages = mkOption {
type = listOf package;
default = [ ];
description = ''
Packages that will be added to the path of the app.
'';
};
credentials = mkOption {
type = attrsOf path;
default = { };
description = ''
The files containing credentials to pass through `LoadCredential` to the application.
'';
};
environment = mkOption {
type = attrsOf (pkgs.formats.json { }).type;
default = { };
description = ''
Environment variables to pass to the app.
'';
};
managePath = mkOption {
type = str;
default = "manage.py";
description = ''
Path to the manage.py file inside the source
'';
};
extraServices = mkOption {
type = attrs;
default = { };
description = ''
Extra services to run in parallel of the application.
May be used to run background tasks and/or workers.
'';
};
manageScript = mkOption {
type = package;
default = pkgs.writeShellApplication {
name = "${name}-manage";
runtimeInputs = [
pkgs.util-linux
config'.systemd.package
config.djangoEnv
] ++ config.extraPackages;
text = ''
MainPID=$(systemctl show -p MainPID --value dj-${name}.service)
nsenter -e -a -t "$MainPID" -G follow -S follow python /var/lib/django-apps/${name}/source/${config.managePath} "$@"
'';
};
description = ''
Script to run manage.py related tasks.
'';
};
updateScript = mkOption {
type = package;
default = pkgs.writeShellApplication {
name = "dj-${name}-update-source";
runtimeInputs = [
config.djangoEnv
pkgs.git
];
text = ''
git pull
python3 ${config.managePath} migrate
python3 ${config.managePath} collectstatic --no-input
'';
};
description = ''
Script to run when updating the app source.
'';
};
webHookSecret = mkOption {
type = path;
description = ''
Path to the webhook secret.
'';
};
dbType = mkOption {
type = enum [
"manual"
"postgresql"
"sqlite"
];
default = "postgresql";
description = ''
Which database backend to use, set to `manual` for custom declaration.
'';
};
baseDirectory = mkOption {
type = str;
readOnly = true;
default = "/var/lib/django-apps/${name}";
};
sourceDirectory = mkOption {
type = str;
readOnly = true;
default = "${config.baseDirectory}/source";
};
staticDirectory = mkOption {
type = str;
default = "static";
description = ''
Path to the staticfiles directory.
This is relative to the base directory, e.g. the parent of the source directory.
'';
};
mediaDirectory = mkOption {
type = str;
default = "media";
description = ''
Path to the media files directory.
This is relative to the base directory, e.g. the parent of the source directory.
'';
};
};
}
)
);
};
};
config = mkIf cfg.enable {
security.sudo.extraRules = [
{
users = [ "webhook" ];
commands = builtins.map (name: {
command = "${systemctl} start dj-${name}-update.service";
options = [ "NOPASSWD" ];
}) (attrNames cfg.sites);
}
];
environment.systemPackages = mapAttrsToList (_: { manageScript, ... }: manageScript) cfg.sites;
services = {
webhook = {
enable = true;
package = pkgs.webhook.overrideAttrs (old: {
patches = (old.patches or [ ]) ++ [ ./01-webhook.patch ];
});
# extraArgs = [ "-debug" ];
# Only listen on localhost
ip = "127.0.0.1";
hooksTemplated = mapAttrs' (
name:
{ branch, ... }:
nameValuePair "dj-${name}" (
# Avoid issues when quoting "dj-name" through builtins.toJSON
builtins.replaceStrings [ "\\" ] [ "" ] (
builtins.toJSON {
id = "dj-${name}";
execute-command = "/run/wrappers/bin/sudo";
pass-arguments-to-command =
builtins.map
(name: {
inherit name;
source = "string";
})
[
systemctl
"start"
"dj-${name}-update.service"
];
# command-working-directory = "/var/lib/django-apps/${name}";
trigger-rule = {
and = [
{
or = [
{
match = {
type = "payload-hmac-sha256";
secret = ''{{ credential "dj-${name}" | js }}'';
parameter = {
source = "header";
name = "X-Hub-Signature-256";
};
};
}
{
match = {
type = "value";
value = ''{{ credential "dj-${name}" | js }}'';
parameter = {
source = "header";
name = "X-Gitlab-Token";
};
};
}
];
}
{
match = {
type = "value";
value = "refs/heads/${branch}";
parameter = {
source = "payload";
name = "ref";
};
};
}
];
};
}
)
)
) cfg.sites;
};
nginx = mkMerge [
(mkIf (cfg.webhook.nginx != null) {
enable = true;
virtualHosts = {
${cfg.webhook.domain} = mkMerge [
{ locations."/".proxyPass = "http://127.0.0.1:${builtins.toString config.services.webhook.port}"; }
cfg.webhook.nginx
];
};
})
{
virtualHosts = mapAttrs' (
name:
{ domain, nginx, ... }:
nameValuePair domain (
recursiveUpdate {
locations = {
"/".proxyPass = "http://unix:/run/django-apps/${name}.sock";
"/static/".root = "/run/django-apps/${name}";
"/media/".root = "/run/django-apps/${name}";
};
} nginx
)
) cfg.sites;
}
];
postgresql =
let
apps = builtins.map (name: "dj-${name}") (
attrNames (filterAttrs (_: { dbType, ... }: dbType == "postgresql") cfg.sites)
);
in
mkIf (apps != [ ]) {
enable = true;
ensureDatabases = apps;
ensureUsers = builtins.map (name: {
inherit name;
ensureDBOwnership = true;
}) apps;
};
};
users = {
users.nginx.extraGroups = [ "django-apps" ];
groups.django-apps = { };
};
systemd = {
sockets = mapAttrs' (
name: _:
nameValuePair "dj-${name}" {
description = "Socket for the ${name} Django Application";
wantedBy = [ "sockets.target" ];
socketConfig = {
ListenStream = "/run/django-apps/${name}.sock";
SocketMode = "600";
SocketUser = config'.services.nginx.user;
};
}
) cfg.sites;
mounts = concatLists (
mapAttrsToList (
name:
{ mediaDirectory, staticDirectory, ... }:
[
{
where = "/run/django-apps/${name}/static";
what = "/var/lib/django-apps/${name}/${staticDirectory}";
options = "bind";
after = [ "dj-${name}.service" ];
partOf = [ "dj-${name}.service" ];
upheldBy = [ "dj-${name}.service" ];
}
{
where = "/run/django-apps/${name}/media";
what = "/var/lib/django-apps/${name}/${mediaDirectory}";
options = "bind";
after = [ "dj-${name}.service" ];
partOf = [ "dj-${name}.service" ];
upheldBy = [ "dj-${name}.service" ];
}
]
) cfg.sites
);
services =
{
webhook.serviceConfig.LoadCredential = mapAttrsToList (
name: { webHookSecret, ... }: "dj-${name}:${webHookSecret}"
) cfg.sites;
}
// (concatMapAttrs (
name: config:
let
mkDatabase =
name: type:
if type == "postgresql" then
{
ENGINE = "django.db.backends.postgresql";
NAME = "dj-${name}";
}
else if type == "sqlite" then
{
ENGINE = "django.db.backends.sqlite3";
NAME = "/var/lib/django-apps/${name}/db.sqlite3";
}
else
throw "Invalid database type !";
# Systemd Service Configuration
Group = "django-apps";
LoadCredential = mapAttrsToList (credential: path: "${credential}:${path}") config.credentials;
RuntimeDirectory = "django-apps/${name}";
StateDirectory = "django-apps/${name}";
UMask = "0027";
User = "dj-${name}";
WorkingDirectory = "/var/lib/django-apps/${name}";
environment =
let
mkValue = v: if builtins.isString v then v else builtins.toJSON v;
in
(mapAttrs' (key: value: nameValuePair "${config.env_prefix}_${key}" (mkValue value)) {
DATABASES =
if (config.dbType != "manual") then { default = mkDatabase name config.dbType; } else null;
STATIC_ROOT = "/var/lib/django-apps/${name}/${config.staticDirectory}";
MEDIA_ROOT = "/var/lib/django-apps/${name}/${config.mediaDirectory}";
ALLOWED_HOSTS = [ config.domain ];
})
// {
DJANGO_SETTINGS_MODULE = config.application.settingsModule;
}
// (mapAttrs (_: mkValue) config.environment);
path = config.extraPackages ++ [ config.djangoEnv ];
after = [ "network.target" ] ++ (optional (config.dbType == "postgresql") "postgresql.service");
in
{
"dj-${name}" = {
inherit after environment path;
preStart = ''
if [ ! -f .initialized ]; then
# The previous initialization might have failed, so restart from the beginning
rm -rf source
# We need to download the application source and run the migrations first
${lib.getExe pkgs.git} clone --single-branch --branch ${config.branch} ${config.source} source
(cd source && python ${config.managePath} migrate --no-input && python ${config.managePath} collectstatic --no-input)
touch .initialized
fi
# Create the necessary directory with the correct user/group
mkdir -p ${config.mediaDirectory} ${config.staticDirectory}
'';
requires = [ "dj-${name}.socket" ];
wantedBy = [ "multi-user.target" ];
serviceConfig = {
inherit
Group
LoadCredential
RuntimeDirectory
StateDirectory
User
UMask
WorkingDirectory
;
DynamicUser = true;
ExecStart = escapeSystemdExecArgs (
if (config.application.type == "daphne") then
[
(getExe' config.djangoEnv "daphne")
"-u"
"/run/django-apps/${name}.sock"
"${config.application.module}.asgi:${config.application.channelLayer}"
]
else
(
[
(getExe' config.djangoEnv "gunicorn")
"--workers"
config.application.workers
"--bind"
"unix:/run/django-apps/${name}.sock"
"--pythonpath"
"source"
]
++ (optionals (config.application.type == "asgi") [
"--worker-class"
"uvicorn.workers.UvicornWorker"
])
++ [ "${config.application.module}.${config.application.type}" ]
)
);
ExecReload = "${getExe' pkgs.coreutils "kill"} -s HUP $MAINPID";
KillMode = "mixed";
Type = mkIf (config.application.type != "daphne") "notify";
};
};
"dj-${name}-update" = {
inherit environment path;
serviceConfig = {
inherit
Group
LoadCredential
StateDirectory
UMask
User
;
DynamicUser = true;
ExecStart = "${getExe config.updateScript}";
Type = "oneshot";
WorkingDirectory = "/var/lib/django-apps/${name}/source";
};
unitConfig = {
After = "dj-${name}.service";
Conflicts = "dj-${name}.service";
};
};
}
// (mapAttrs' (
serviceName: serviceContent:
nameValuePair "dj-${name}_${serviceName}" (
recursiveUpdate {
inherit after environment path;
partOf = [ "dj-${name}.service" ];
wantedBy = [ "multi-user.target" ];
upheldBy = [ "dj-${name}.service" ];
serviceConfig = {
inherit
Group
LoadCredential
RuntimeDirectory
StateDirectory
UMask
User
;
DynamicUser = true;
};
} serviceContent
)
) config.extraServices)
) cfg.sites);
};
};
}

View file

@ -59,10 +59,10 @@
"pre_releases": false,
"version_upper_bound": null,
"release_prefix": null,
"version": "v1.8.0",
"revision": "624fd86460e482017ed9c3c3c55a3758c06a4e7f",
"url": "https://api.github.com/repos/nix-community/disko/tarball/v1.8.0",
"hash": "06ifryv6rw25cz8zda4isczajdgrvcl3aqr145p8njxx5jya2d77"
"version": "v1.9.0",
"revision": "49a4936cee640e27d74baee6fd1278285d29b100",
"url": "https://api.github.com/repos/nix-community/disko/tarball/v1.9.0",
"hash": "0j76ar4qz320fakdii4659w5lww8wiz6yb7g47npywqvf2lbp388"
},
"dns.nix": {
"type": "GitRelease",
@ -87,9 +87,9 @@
"repo": "git-hooks.nix"
},
"branch": "master",
"revision": "1211305a5b237771e13fcca0c51e60ad47326a9a",
"url": "https://github.com/cachix/git-hooks.nix/archive/1211305a5b237771e13fcca0c51e60ad47326a9a.tar.gz",
"hash": "1qz8d9g7rhwjk4p2x0rx59alsf0dpjrb6kpzs681gi3rjr685ivq"
"revision": "3308484d1a443fc5bc92012435d79e80458fe43c",
"url": "https://github.com/cachix/git-hooks.nix/archive/3308484d1a443fc5bc92012435d79e80458fe43c.tar.gz",
"hash": "0qdhcqisil8zhnf600y0vpa1mayrca8z2bja79p4j5vajy7dnx4s"
},
"kadenios": {
"type": "Git",
@ -144,9 +144,9 @@
"url": "https://git.lix.systems/lix-project/lix.git"
},
"branch": "main",
"revision": "ed9b7f4f84fd60ad8618645cc1bae2d686ff0db6",
"revision": "66f6dbda32959dd5cf3a9aaba15af72d037ab7ff",
"url": null,
"hash": "05kxga8fs9h4qm0yvp5l7jvsda7hzqs7rvxcn8r52dqg3c80hva9"
"hash": "10mfry8k0jab4ngnhvx9d7ia8m7qf4va4395ylwg3qlsxziqvc8z"
},
"lix-module": {
"type": "Git",
@ -155,9 +155,9 @@
"url": "https://git.lix.systems/lix-project/nixos-module.git"
},
"branch": "main",
"revision": "fd186f535a4ac7ae35d98c1dd5d79f0a81b7976d",
"revision": "aa2846680fa9a2032939d720487942567fd9eb63",
"url": null,
"hash": "0jxpqaz12lqibg03iv36sa0shfvamn2yhg937llv3kl4csijd34f"
"hash": "0gb174800sgh6y6sir23nxsx85xrk478hbwqbzyd46ac34clz9wz"
},
"lon": {
"type": "Git",
@ -178,9 +178,9 @@
"url": "https://git.dgnum.eu/DGNum/metis"
},
"branch": "master",
"revision": "f631751da44a330c41398356ce2295e17ed45b7b",
"revision": "ed6fafda45d638b1bafd5deaee098b80156b41e8",
"url": null,
"hash": "1laclhq6jz9mni4vxsxk3c143c2x478z1lqj115mqyj0d62rz8mg"
"hash": "086spyhn2x1x2h31b0y4an501fdhph1nk64riybqnh6mqjkzlq4m"
},
"microvm.nix": {
"type": "Git",
@ -194,6 +194,20 @@
"url": "https://github.com/RaitoBezarius/microvm.nix/archive/49899c9a4fdf75320785e79709bf1608c34caeb8.tar.gz",
"hash": "0sz6azdpiz4bd36x23bcdhx6mwyqj8zl5cczjgv48xqfmysy8zwy"
},
"nix-actions": {
"type": "GitRelease",
"repository": {
"type": "Git",
"url": "https://git.dgnum.eu/DGNum/nix-actions.git"
},
"pre_releases": false,
"version_upper_bound": null,
"release_prefix": null,
"version": "v0.2.2",
"revision": "b9cb5d6f945d1e3fd7b70d63848c70335e9912e8",
"url": null,
"hash": "0m6bw5qlrchsigx7x4nz3xkcn3dnr14k5j0ws9lbggnldnz9qg2w"
},
"nix-modules": {
"type": "Git",
"repository": {
@ -201,9 +215,9 @@
"url": "https://git.hubrecht.ovh/hubrecht/nix-modules.git"
},
"branch": "main",
"revision": "2fd7c7810b2a901020ddd2d0cc82810b83a313fc",
"revision": "75e8d70a051dd19d126b5248b62f61d6f8ce4361",
"url": null,
"hash": "0rag870ll745r5isnk6hlxv0b0sbgriba5k6nihahcwsal2f4830"
"hash": "0yx5by3v2cshiidyh27n75lcqy9d1kk5zz5mchmfv63s9p0cjzqn"
},
"nix-patches": {
"type": "GitRelease",
@ -226,15 +240,15 @@
"url": "https://git.hubrecht.ovh/hubrecht/nix-pkgs"
},
"branch": "main",
"revision": "3e731378f3984313ef902c5e5a49e002e6e2c27e",
"revision": "fe54340f49449f01c2ee489abf7016d97706eb59",
"url": null,
"hash": "1vy2dj9fyy653w6idvi1r73s0nd2a332a1xkppddjip6rk0i030p"
"hash": "1sv9nqhzcqn8anqfgf63i2j5qcqzyy4vl0a45rvllv7rhbhw9adq"
},
"nixos-24.05": {
"type": "Channel",
"name": "nixos-24.05",
"url": "https://releases.nixos.org/nixos/24.05/nixos-24.05.5518.ecbc1ca8ffd6/nixexprs.tar.xz",
"hash": "1yr2v17d8jg9567rvadv62bpr6i47fp73by2454yjxh1m9ric2cm"
"url": "https://releases.nixos.org/nixos/24.05/nixos-24.05.6668.e8c38b73aeb2/nixexprs.tar.xz",
"hash": "0lhh36z3fvd3b64dz7an08y3c3shb67aj17ny9z28bs21i3dc5yh"
},
"nixos-generators": {
"type": "Git",
@ -244,21 +258,21 @@
"repo": "nixos-generators"
},
"branch": "master",
"revision": "9ae128172f823956e54947fe471bc6dfa670ecb4",
"url": "https://github.com/nix-community/nixos-generators/archive/9ae128172f823956e54947fe471bc6dfa670ecb4.tar.gz",
"hash": "1zn3lykymimzh21q4fixw6ql42n8j82dqwm5axifhcnl8dsdgrvr"
"revision": "3280fdde8c8f0276c9f5286ad5c0f433dfa5d56c",
"url": "https://github.com/nix-community/nixos-generators/archive/3280fdde8c8f0276c9f5286ad5c0f433dfa5d56c.tar.gz",
"hash": "12v6lxls3bfkj20rwxy62l8g6zlkhsp29m6wd7764j1wwfwjk274"
},
"nixos-unstable": {
"type": "Channel",
"name": "nixos-unstable",
"url": "https://releases.nixos.org/nixos/unstable/nixos-24.11pre688563.bc947f541ae5/nixexprs.tar.xz",
"hash": "1jsaxwi128fiach3dj8rdj5agqivsr4sidb8lmdnl7g07fl9x0kj"
"url": "https://releases.nixos.org/nixos/unstable/nixos-25.05beta710087.23e89b7da85c/nixexprs.tar.xz",
"hash": "0b695yx17sarr7d3ypb9z6njd0qhiga1682wjxidl053lvx6g33b"
},
"nixpkgs": {
"type": "Channel",
"name": "nixpkgs-unstable",
"url": "https://releases.nixos.org/nixpkgs/nixpkgs-24.11pre689466.7d49afd36b55/nixexprs.tar.xz",
"hash": "0r4zb6j8in4dk7gxciapfm49dqbdd0c7ajjzj9iy2xrrj5aj32qp"
"url": "https://releases.nixos.org/nixpkgs/nixpkgs-25.05pre709559.5083ec887760/nixexprs.tar.xz",
"hash": "1z912j1lmrg8zp2hpmmi69dls9zlpvqfvdkvh5xc3x6iqkqwn0cd"
},
"proxmox-nixos": {
"type": "Git",
@ -268,9 +282,9 @@
"repo": "proxmox-nixos"
},
"branch": "main",
"revision": "950e4cccac0f942076e8558f7f9f4d496cabfb18",
"url": "https://github.com/SaumonNet/proxmox-nixos/archive/950e4cccac0f942076e8558f7f9f4d496cabfb18.tar.gz",
"hash": "0bhqw42ydc0jfkfqw64xsg518a1pbxnvpqw92nna7lm8mzpxm6d4"
"revision": "c6a126238e8f8efc3660b523d314a0074e61fe44",
"url": "https://github.com/SaumonNet/proxmox-nixos/archive/c6a126238e8f8efc3660b523d314a0074e61fe44.tar.gz",
"hash": "0apinc8iiqsjazlj3nh75m4w5f93fd53xs4nj1s06qay4nq1h49w"
},
"signal-irc-bridge": {
"type": "Git",
@ -290,9 +304,9 @@
"url": "https://git.dgnum.eu/mdebray/stateless-uptime-kuma"
},
"branch": "master",
"revision": "390363e6a977d71a96c53d7f8b252038dfee2e2e",
"revision": "880f444ff7862d6127b051cf1a993ad1585b1652",
"url": null,
"hash": "11vvfxw2sznc155x0xlgl00g6n9sr90xa0b1hr14vchg7gkz46r5"
"hash": "166057469hhxnyqbpd7jjlccdmigzch51616n1d5r617xg0y1mwp"
},
"wp4nix": {
"type": "Git",
@ -302,9 +316,9 @@
"server": "https://git.helsinki.tools/"
},
"branch": "master",
"revision": "4c47608f349dd45e4895e1f61f19ad9e8dfcc0bf",
"url": "https://git.helsinki.tools/api/v4/projects/helsinki-systems%2Fwp4nix/repository/archive.tar.gz?sha=4c47608f349dd45e4895e1f61f19ad9e8dfcc0bf",
"hash": "1pnjhbljihf2ras9lbp1f6izzxghccfygkkf2ikkahjr1vbicdbq"
"revision": "cce6f7961eb99fd56a039623c4d9e561d9a98928",
"url": "https://git.helsinki.tools/api/v4/projects/helsinki-systems%2Fwp4nix/repository/archive.tar.gz?sha=cce6f7961eb99fd56a039623c4d9e561d9a98928",
"hash": "0ggqc92mh4xbsrrdv8j0jl6f3cagwizd93sdl8p8mqpxv9445xrf"
}
},
"version": 3

View file

@ -1,808 +0,0 @@
From 3b656cbdf40c6056983e95ac5c87839a68571096 Mon Sep 17 00:00:00 2001
From: Alexander Tomokhov <alexoundos@gmail.com>
Date: Tue, 3 Oct 2023 22:20:59 +0400
Subject: [PATCH 1/8] castopod: 1.6.4 -> 1.6.5
---
pkgs/applications/audio/castopod/default.nix | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/pkgs/applications/audio/castopod/default.nix b/pkgs/applications/audio/castopod/default.nix
index 9d9f83e2ecce40..83c70f9b36646d 100644
--- a/pkgs/applications/audio/castopod/default.nix
+++ b/pkgs/applications/audio/castopod/default.nix
@@ -7,11 +7,11 @@
}:
stdenv.mkDerivation {
pname = "castopod";
- version = "1.6.4";
+ version = "1.6.5";
src = fetchurl {
- url = "https://code.castopod.org/adaures/castopod/uploads/ce56d4f149242f12bedd20f9a2b0916d/castopod-1.6.4.tar.gz";
- sha256 = "080jj91yxbn3xsbs0sywzwa2f5in9bp9qi2zwqcfqpaxlq9ga62v";
+ url = "https://code.castopod.org/adaures/castopod/uploads/5aaaa6cf2edaed25bd7253449e5f8584/castopod-1.6.5.tar.gz";
+ sha256 = "04gcq2vmfy5aa2fmsm1qqv1k8g024nikmysdrhy33wj460d529b5";
};
dontBuild = true;
From 4cd096c27c52ff9948bc7d9ebc05490147ca9675 Mon Sep 17 00:00:00 2001
From: Alexander Tomokhov <alexoundos@gmail.com>
Date: Tue, 3 Oct 2023 22:19:36 +0400
Subject: [PATCH 2/8] nixos/castopod: fix startup, displaying images, uploads
up to 500 MiB
- new maxUploadSize option
- new dataDir option (with ReadWritePaths systemd support)
- admin page reports correct free disk space (instead of /nix/store)
- fix example configuration in documentation
- now podcast creation and file upload are tested during NixOS test
- move castopod from audio to web-apps folder
- verbose logging from the browser test
---
nixos/modules/module-list.nix | 2 +-
.../services/{audio => web-apps}/castopod.md | 11 +-
.../services/{audio => web-apps}/castopod.nix | 60 ++--
nixos/tests/castopod.nix | 263 +++++++++++++-----
pkgs/applications/audio/castopod/default.nix | 13 +-
5 files changed, 256 insertions(+), 93 deletions(-)
rename nixos/modules/services/{audio => web-apps}/castopod.md (72%)
rename nixos/modules/services/{audio => web-apps}/castopod.nix (80%)
diff --git a/nixos/modules/module-list.nix b/nixos/modules/module-list.nix
index 627427262da632..97043c965400c5 100644
--- a/nixos/modules/module-list.nix
+++ b/nixos/modules/module-list.nix
@@ -337,7 +337,6 @@
./services/amqp/rabbitmq.nix
./services/audio/alsa.nix
./services/audio/botamusique.nix
- ./services/audio/castopod.nix
./services/audio/gmediarender.nix
./services/audio/gonic.nix
./services/audio/goxlr-utility.nix
@@ -1282,6 +1281,7 @@
./services/web-apps/bookstack.nix
./services/web-apps/c2fmzq-server.nix
./services/web-apps/calibre-web.nix
+ ./services/web-apps/castopod.nix
./services/web-apps/coder.nix
./services/web-apps/changedetection-io.nix
./services/web-apps/chatgpt-retrieval-plugin.nix
diff --git a/nixos/modules/services/audio/castopod.md b/nixos/modules/services/web-apps/castopod.md
similarity index 72%
rename from nixos/modules/services/audio/castopod.md
rename to nixos/modules/services/web-apps/castopod.md
index ee8590737a7c73..f61bf1166a4d24 100644
--- a/nixos/modules/services/audio/castopod.md
+++ b/nixos/modules/services/web-apps/castopod.md
@@ -4,6 +4,7 @@ Castopod is an open-source hosting platform made for podcasters who want to enga
## Quickstart {#module-services-castopod-quickstart}
+Configure ACME (https://nixos.org/manual/nixos/unstable/#module-security-acme).
Use the following configuration to start a public instance of Castopod on `castopod.example.com` domain:
```nix
@@ -11,11 +12,11 @@ networking.firewall.allowedTCPPorts = [ 80 443 ];
services.castopod = {
enable = true;
database.createLocally = true;
- nginx.virtualHost = {
- serverName = "castopod.example.com";
- enableACME = true;
- forceSSL = true;
- };
+ localDomain = "castopod.example.com";
+};
+services.nginx.virtualHosts."castopod.example.com" = {
+ enableACME = true;
+ forceSSL = true;
};
```
diff --git a/nixos/modules/services/audio/castopod.nix b/nixos/modules/services/web-apps/castopod.nix
similarity index 80%
rename from nixos/modules/services/audio/castopod.nix
rename to nixos/modules/services/web-apps/castopod.nix
index b782b548914795..7c99551c83183f 100644
--- a/nixos/modules/services/audio/castopod.nix
+++ b/nixos/modules/services/web-apps/castopod.nix
@@ -4,7 +4,6 @@ let
fpm = config.services.phpfpm.pools.castopod;
user = "castopod";
- stateDirectory = "/var/lib/castopod";
# https://docs.castopod.org/getting-started/install.html#requirements
phpPackage = pkgs.php.withExtensions ({ enabled, all }: with all; [
@@ -29,6 +28,15 @@ in
defaultText = lib.literalMD "pkgs.castopod";
description = lib.mdDoc "Which Castopod package to use.";
};
+ dataDir = lib.mkOption {
+ type = lib.types.path;
+ default = "/var/lib/castopod";
+ description = lib.mdDoc ''
+ The path where castopod stores all data. This path must be in sync
+ with the castopod package (where it is hardcoded during the build in
+ accordance with its own `dataDir` argument).
+ '';
+ };
database = {
createLocally = lib.mkOption {
type = lib.types.bool;
@@ -111,6 +119,18 @@ in
Options for Castopod's PHP pool. See the documentation on `php-fpm.conf` for details on configuration directives.
'';
};
+ maxUploadSize = lib.mkOption {
+ type = lib.types.int;
+ default = 512;
+ description = lib.mdDoc ''
+ Maximum supported size for a file upload in MiB. Maximum HTTP body
+ size is set to this value for nginx and PHP (because castopod doesn't
+ support chunked uploads yet:
+ https://code.castopod.org/adaures/castopod/-/issues/330). Note, that
+ practical upload size limit is smaller. For example, with 512 MiB
+ setting - around 500 MiB is possible.
+ '';
+ };
};
};
@@ -120,13 +140,13 @@ in
sslEnabled = with config.services.nginx.virtualHosts.${cfg.localDomain}; addSSL || forceSSL || onlySSL || enableACME || useACMEHost != null;
baseURL = "http${lib.optionalString sslEnabled "s"}://${cfg.localDomain}";
in
- lib.mapAttrs (name: lib.mkDefault) {
+ lib.mapAttrs (_name: lib.mkDefault) {
"app.forceGlobalSecureRequests" = sslEnabled;
"app.baseURL" = baseURL;
- "media.baseURL" = "/";
+ "media.baseURL" = baseURL;
"media.root" = "media";
- "media.storage" = stateDirectory;
+ "media.storage" = cfg.dataDir;
"admin.gateway" = "admin";
"auth.gateway" = "auth";
@@ -142,13 +162,13 @@ in
services.phpfpm.pools.castopod = {
inherit user;
group = config.services.nginx.group;
- phpPackage = phpPackage;
+ inherit phpPackage;
phpOptions = ''
- # https://code.castopod.org/adaures/castopod/-/blob/main/docker/production/app/uploads.ini
+ # https://code.castopod.org/adaures/castopod/-/blob/develop/docker/production/common/uploads.template.ini
file_uploads = On
memory_limit = 512M
- upload_max_filesize = 500M
- post_max_size = 512M
+ upload_max_filesize = ${toString cfg.maxUploadSize}M
+ post_max_size = ${toString cfg.maxUploadSize}M
max_execution_time = 300
max_input_time = 300
'';
@@ -165,25 +185,25 @@ in
path = [ pkgs.openssl phpPackage ];
script =
let
- envFile = "${stateDirectory}/.env";
+ envFile = "${cfg.dataDir}/.env";
media = "${cfg.settings."media.storage"}/${cfg.settings."media.root"}";
in
''
- mkdir -p ${stateDirectory}/writable/{cache,logs,session,temp,uploads}
+ mkdir -p ${cfg.dataDir}/writable/{cache,logs,session,temp,uploads}
if [ ! -d ${lib.escapeShellArg media} ]; then
cp --no-preserve=mode,ownership -r ${cfg.package}/share/castopod/public/media ${lib.escapeShellArg media}
fi
- if [ ! -f ${stateDirectory}/salt ]; then
- openssl rand -base64 33 > ${stateDirectory}/salt
+ if [ ! -f ${cfg.dataDir}/salt ]; then
+ openssl rand -base64 33 > ${cfg.dataDir}/salt
fi
cat <<'EOF' > ${envFile}
${lib.generators.toKeyValue { } cfg.settings}
EOF
- echo "analytics.salt=$(cat ${stateDirectory}/salt)" >> ${envFile}
+ echo "analytics.salt=$(cat ${cfg.dataDir}/salt)" >> ${envFile}
${if (cfg.database.passwordFile != null) then ''
echo "database.default.password=$(cat ${lib.escapeShellArg cfg.database.passwordFile})" >> ${envFile}
@@ -192,10 +212,10 @@ in
''}
${lib.optionalString (cfg.environmentFile != null) ''
- cat ${lib.escapeShellArg cfg.environmentFile}) >> ${envFile}
+ cat ${lib.escapeShellArg cfg.environmentFile} >> ${envFile}
''}
- php spark castopod:database-update
+ php ${cfg.package}/share/castopod/spark castopod:database-update
'';
serviceConfig = {
StateDirectory = "castopod";
@@ -204,6 +224,7 @@ in
RemainAfterExit = true;
User = user;
Group = config.services.nginx.group;
+ ReadWritePaths = cfg.dataDir;
};
};
@@ -212,9 +233,7 @@ in
wantedBy = [ "multi-user.target" ];
path = [ phpPackage ];
script = ''
- php public/index.php scheduled-activities
- php public/index.php scheduled-websub-publish
- php public/index.php scheduled-video-clips
+ php ${cfg.package}/share/castopod/spark tasks:run
'';
serviceConfig = {
StateDirectory = "castopod";
@@ -222,6 +241,8 @@ in
Type = "oneshot";
User = user;
Group = config.services.nginx.group;
+ ReadWritePaths = cfg.dataDir;
+ LogLevelMax = "notice"; # otherwise periodic tasks flood the journal
};
};
@@ -251,6 +272,7 @@ in
extraConfig = ''
try_files $uri $uri/ /index.php?$args;
index index.php index.html;
+ client_max_body_size ${toString cfg.maxUploadSize}M;
'';
locations."^~ /${cfg.settings."media.root"}/" = {
@@ -278,7 +300,7 @@ in
};
};
- users.users.${user} = lib.mapAttrs (name: lib.mkDefault) {
+ users.users.${user} = lib.mapAttrs (_name: lib.mkDefault) {
description = "Castopod user";
isSystemUser = true;
group = config.services.nginx.group;
diff --git a/nixos/tests/castopod.nix b/nixos/tests/castopod.nix
index 4435ec617d4e67..2db7aa0bda6507 100644
--- a/nixos/tests/castopod.nix
+++ b/nixos/tests/castopod.nix
@@ -4,74 +4,211 @@ import ./make-test-python.nix ({ pkgs, lib, ... }:
meta = with lib.maintainers; {
maintainers = [ alexoundos misuzu ];
};
+
nodes.castopod = { nodes, ... }: {
+ # otherwise 500 MiB file upload fails!
+ virtualisation.diskSize = 512 + 3 * 512;
+
networking.firewall.allowedTCPPorts = [ 80 ];
- networking.extraHosts = ''
- 127.0.0.1 castopod.example.com
- '';
+ networking.extraHosts =
+ lib.strings.concatStringsSep "\n"
+ (lib.attrsets.mapAttrsToList
+ (name: _: "127.0.0.1 ${name}")
+ nodes.castopod.services.nginx.virtualHosts);
+
services.castopod = {
enable = true;
database.createLocally = true;
localDomain = "castopod.example.com";
+ maxUploadSize = 512;
};
- environment.systemPackages =
- let
- username = "admin";
- email = "admin@castood.example.com";
- password = "v82HmEp5";
- testRunner = pkgs.writers.writePython3Bin "test-runner"
- {
- libraries = [ pkgs.python3Packages.selenium ];
- flakeIgnore = [
- "E501"
- ];
- } ''
- from selenium.webdriver.common.by import By
- from selenium.webdriver import Firefox
- from selenium.webdriver.firefox.options import Options
- from selenium.webdriver.support.ui import WebDriverWait
- from selenium.webdriver.support import expected_conditions as EC
-
- options = Options()
- options.add_argument('--headless')
- driver = Firefox(options=options)
- try:
- driver.implicitly_wait(20)
- driver.get('http://castopod.example.com/cp-install')
-
- wait = WebDriverWait(driver, 10)
-
- wait.until(EC.title_contains("installer"))
-
- driver.find_element(By.CSS_SELECTOR, '#username').send_keys(
- '${username}'
- )
- driver.find_element(By.CSS_SELECTOR, '#email').send_keys(
- '${email}'
- )
- driver.find_element(By.CSS_SELECTOR, '#password').send_keys(
- '${password}'
- )
- driver.find_element(By.XPATH, "//button[contains(., 'Finish install')]").click()
-
- wait.until(EC.title_contains("Auth"))
-
- driver.find_element(By.CSS_SELECTOR, '#email').send_keys(
- '${email}'
- )
- driver.find_element(By.CSS_SELECTOR, '#password').send_keys(
- '${password}'
- )
- driver.find_element(By.XPATH, "//button[contains(., 'Login')]").click()
-
- wait.until(EC.title_contains("Admin dashboard"))
- finally:
- driver.close()
- driver.quit()
- '';
- in
- [ pkgs.firefox-unwrapped pkgs.geckodriver testRunner ];
};
+
+ nodes.client = { nodes, pkgs, lib, ... }:
+ let
+ domain = nodes.castopod.services.castopod.localDomain;
+
+ getIP = node:
+ (builtins.head node.networking.interfaces.eth1.ipv4.addresses).address;
+
+ targetPodcastSize = 500 * 1024 * 1024;
+ lameMp3Bitrate = 348300;
+ lameMp3FileAdjust = -800;
+ targetPodcastDuration = toString
+ ((targetPodcastSize + lameMp3FileAdjust) / (lameMp3Bitrate / 8));
+ mp3file = with pkgs;
+ runCommand "gen-castopod.mp3" { nativeBuildInputs = [ sox lame ]; } ''
+ sox -n -r 48000 -t wav - synth ${targetPodcastDuration} sine 440 `
+ `| lame --noreplaygain -cbr -q 9 -b 320 - $out
+ FILESIZE="$(stat -c%s $out)"
+ [ "$FILESIZE" -gt 0 ]
+ [ "$FILESIZE" -le "${toString targetPodcastSize}" ]
+ '';
+
+ bannerWidth = 3000;
+ banner = pkgs.runCommand "gen-castopod-cover.jpg" { } ''
+ ${pkgs.imagemagick}/bin/magick `
+ `-background green -bordercolor white -gravity northwest xc:black `
+ `-duplicate 99 `
+ `-seed 1 -resize "%[fx:rand()*72+24]" `
+ `-seed 0 -rotate "%[fx:rand()*360]" -border 6x6 -splice 16x36 `
+ `-seed 0 -rotate "%[fx:floor(rand()*4)*90]" -resize "150x50!" `
+ `+append -crop 10x1@ +repage -roll "+%[fx:(t%2)*72]+0" -append `
+ `-resize ${toString bannerWidth} -quality 1 $out
+ '';
+
+ coverWidth = toString 3000;
+ cover = pkgs.runCommand "gen-castopod-banner.jpg" { } ''
+ ${pkgs.imagemagick}/bin/magick `
+ `-background white -bordercolor white -gravity northwest xc:black `
+ `-duplicate 99 `
+ `-seed 1 -resize "%[fx:rand()*72+24]" `
+ `-seed 0 -rotate "%[fx:rand()*360]" -border 6x6 -splice 36x36 `
+ `-seed 0 -rotate "%[fx:floor(rand()*4)*90]" -resize "144x144!" `
+ `+append -crop 10x1@ +repage -roll "+%[fx:(t%2)*72]+0" -append `
+ `-resize ${coverWidth} -quality 1 $out
+ '';
+ in
+ {
+ networking.extraHosts =
+ lib.strings.concatStringsSep "\n"
+ (lib.attrsets.mapAttrsToList
+ (name: _: "${getIP nodes.castopod} ${name}")
+ nodes.castopod.services.nginx.virtualHosts);
+
+ environment.systemPackages =
+ let
+ username = "admin";
+ email = "admin@${domain}";
+ password = "Abcd1234";
+ podcastTitle = "Some Title";
+ episodeTitle = "Episode Title";
+ browser-test = pkgs.writers.writePython3Bin "browser-test"
+ {
+ libraries = [ pkgs.python3Packages.selenium ];
+ flakeIgnore = [ "E124" "E501" ];
+ } ''
+ from selenium.webdriver.common.by import By
+ from selenium.webdriver import Firefox
+ from selenium.webdriver.firefox.options import Options
+ from selenium.webdriver.firefox.service import Service
+ from selenium.webdriver.support.ui import WebDriverWait
+ from selenium.webdriver.support import expected_conditions as EC
+ from subprocess import STDOUT
+ import logging
+
+ selenium_logger = logging.getLogger("selenium")
+ selenium_logger.setLevel(logging.DEBUG)
+ selenium_logger.addHandler(logging.StreamHandler())
+
+ options = Options()
+ options.add_argument('--headless')
+ service = Service(log_output=STDOUT)
+ driver = Firefox(options=options, service=service)
+ driver = Firefox(options=options)
+ driver.implicitly_wait(20)
+
+ # install ##########################################################
+
+ driver.get('http://${domain}/cp-install')
+
+ wait = WebDriverWait(driver, 10)
+
+ wait.until(EC.title_contains("installer"))
+
+ driver.find_element(By.CSS_SELECTOR, '#username').send_keys(
+ '${username}'
+ )
+ driver.find_element(By.CSS_SELECTOR, '#email').send_keys(
+ '${email}'
+ )
+ driver.find_element(By.CSS_SELECTOR, '#password').send_keys(
+ '${password}'
+ )
+ driver.find_element(By.XPATH,
+ "//button[contains(., 'Finish install')]"
+ ).click()
+
+ wait.until(EC.title_contains("Auth"))
+
+ driver.find_element(By.CSS_SELECTOR, '#email').send_keys(
+ '${email}'
+ )
+ driver.find_element(By.CSS_SELECTOR, '#password').send_keys(
+ '${password}'
+ )
+ driver.find_element(By.XPATH,
+ "//button[contains(., 'Login')]"
+ ).click()
+
+ wait.until(EC.title_contains("Admin dashboard"))
+
+ # create podcast ###################################################
+
+ driver.get('http://${domain}/admin/podcasts/new')
+
+ wait.until(EC.title_contains("Create podcast"))
+
+ driver.find_element(By.CSS_SELECTOR, '#cover').send_keys(
+ '${cover}'
+ )
+ driver.find_element(By.CSS_SELECTOR, '#banner').send_keys(
+ '${banner}'
+ )
+ driver.find_element(By.CSS_SELECTOR, '#title').send_keys(
+ '${podcastTitle}'
+ )
+ driver.find_element(By.CSS_SELECTOR, '#handle').send_keys(
+ 'some_handle'
+ )
+ driver.find_element(By.CSS_SELECTOR, '#description').send_keys(
+ 'Some description'
+ )
+ driver.find_element(By.CSS_SELECTOR, '#owner_name').send_keys(
+ 'Owner Name'
+ )
+ driver.find_element(By.CSS_SELECTOR, '#owner_email').send_keys(
+ 'owner@email.xyz'
+ )
+ driver.find_element(By.XPATH,
+ "//button[contains(., 'Create podcast')]"
+ ).click()
+
+ wait.until(EC.title_contains("${podcastTitle}"))
+
+ driver.find_element(By.XPATH,
+ "//span[contains(., 'Add an episode')]"
+ ).click()
+
+ wait.until(EC.title_contains("Add an episode"))
+
+ # upload podcast ###################################################
+
+ driver.find_element(By.CSS_SELECTOR, '#audio_file').send_keys(
+ '${mp3file}'
+ )
+ driver.find_element(By.CSS_SELECTOR, '#cover').send_keys(
+ '${cover}'
+ )
+ driver.find_element(By.CSS_SELECTOR, '#description').send_keys(
+ 'Episode description'
+ )
+ driver.find_element(By.CSS_SELECTOR, '#title').send_keys(
+ '${episodeTitle}'
+ )
+ driver.find_element(By.XPATH,
+ "//button[contains(., 'Create episode')]"
+ ).click()
+
+ wait.until(EC.title_contains("${episodeTitle}"))
+
+ driver.close()
+ driver.quit()
+ '';
+ in
+ [ pkgs.firefox-unwrapped pkgs.geckodriver browser-test ];
+ };
+
testScript = ''
start_all()
castopod.wait_for_unit("castopod-setup.service")
@@ -79,9 +216,9 @@ import ./make-test-python.nix ({ pkgs, lib, ... }:
castopod.wait_for_unit("nginx.service")
castopod.wait_for_open_port(80)
castopod.wait_until_succeeds("curl -sS -f http://castopod.example.com")
- castopod.succeed("curl -s http://localhost/cp-install | grep 'Create your Super Admin account' > /dev/null")
- with subtest("Create superadmin and log in"):
- castopod.succeed("PYTHONUNBUFFERED=1 systemd-cat -t test-runner test-runner")
+ with subtest("Create superadmin, log in, create and upload a podcast"):
+ client.succeed(\
+ "PYTHONUNBUFFERED=1 systemd-cat -t browser-test browser-test")
'';
})
diff --git a/pkgs/applications/audio/castopod/default.nix b/pkgs/applications/audio/castopod/default.nix
index 83c70f9b36646d..badace09587d2b 100644
--- a/pkgs/applications/audio/castopod/default.nix
+++ b/pkgs/applications/audio/castopod/default.nix
@@ -3,7 +3,7 @@
, ffmpeg-headless
, lib
, nixosTests
-, stateDirectory ? "/var/lib/castopod"
+, dataDir ? "/var/lib/castopod"
}:
stdenv.mkDerivation {
pname = "castopod";
@@ -20,13 +20,16 @@ stdenv.mkDerivation {
postPatch = ''
# not configurable at runtime unfortunately:
substituteInPlace app/Config/Paths.php \
- --replace "__DIR__ . '/../../writable'" "'${stateDirectory}/writable'"
+ --replace "__DIR__ . '/../../writable'" "'${dataDir}/writable'"
- # configuration file must be writable, place it to ${stateDirectory}
+ substituteInPlace modules/Admin/Controllers/DashboardController.php \
+ --replace "disk_total_space('./')" "disk_total_space('${dataDir}')"
+
+ # configuration file must be writable, place it to ${dataDir}
substituteInPlace modules/Install/Controllers/InstallController.php \
- --replace "ROOTPATH" "'${stateDirectory}/'"
+ --replace "ROOTPATH" "'${dataDir}/'"
substituteInPlace public/index.php spark \
- --replace "DotEnv(ROOTPATH)" "DotEnv('${stateDirectory}')"
+ --replace "DotEnv(ROOTPATH)" "DotEnv('${dataDir}')"
# ffmpeg is required for Video Clips feature
substituteInPlace modules/MediaClipper/VideoClipper.php \
From 45d43fe39fa3167d5cf7ba9a2cb9fcd6fbe2c5c3 Mon Sep 17 00:00:00 2001
From: Alexander Tomokhov <alexoundos@gmail.com>
Date: Mon, 11 Dec 2023 09:00:26 +0400
Subject: [PATCH 3/8] nixos/castopod: little documentation fix
---
nixos/modules/services/web-apps/castopod.nix | 7 ++++---
1 file changed, 4 insertions(+), 3 deletions(-)
diff --git a/nixos/modules/services/web-apps/castopod.nix b/nixos/modules/services/web-apps/castopod.nix
index 7c99551c83183f..11cf4b36aeb385 100644
--- a/nixos/modules/services/web-apps/castopod.nix
+++ b/nixos/modules/services/web-apps/castopod.nix
@@ -126,9 +126,10 @@ in
Maximum supported size for a file upload in MiB. Maximum HTTP body
size is set to this value for nginx and PHP (because castopod doesn't
support chunked uploads yet:
- https://code.castopod.org/adaures/castopod/-/issues/330). Note, that
- practical upload size limit is smaller. For example, with 512 MiB
- setting - around 500 MiB is possible.
+ https://code.castopod.org/adaures/castopod/-/issues/330).
+
+ Note, that practical upload size limit is smaller. For example, with
+ 512 MiB setting - around 500 MiB is possible.
'';
};
};
From 4aafd48b7e76748eaf0ff7409b12b455d1db31ec Mon Sep 17 00:00:00 2001
From: sinavir <sinavir@sinavir.fr>
Date: Fri, 23 Feb 2024 22:02:10 +0100
Subject: [PATCH 4/8] castopod: 1.6.5 -> 1.10.3
---
pkgs/applications/audio/castopod/default.nix | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pkgs/applications/audio/castopod/default.nix b/pkgs/applications/audio/castopod/default.nix
index badace09587d2b..438f1d728f9624 100644
--- a/pkgs/applications/audio/castopod/default.nix
+++ b/pkgs/applications/audio/castopod/default.nix
@@ -10,8 +10,8 @@ stdenv.mkDerivation {
version = "1.6.5";
src = fetchurl {
- url = "https://code.castopod.org/adaures/castopod/uploads/5aaaa6cf2edaed25bd7253449e5f8584/castopod-1.6.5.tar.gz";
- sha256 = "04gcq2vmfy5aa2fmsm1qqv1k8g024nikmysdrhy33wj460d529b5";
+ url = "https://code.castopod.org/adaures/castopod/uploads/2bb52d4607a772ac8b397efa3559a3ae/castopod-1.10.3.tar.gz";
+ sha256 = "0w1yl14v3aajm089vwpq9wkiibv3w312y004ggdbf7xwzsrmjs51";
};
dontBuild = true;
From 6205595efbdcc2f3440022b4ff7258e2f50a6427 Mon Sep 17 00:00:00 2001
From: sinavir <sinavir@sinavir.fr>
Date: Fri, 23 Feb 2024 22:27:24 +0100
Subject: [PATCH 6/8] nixos/castopod: use LoadCredentials
---
nixos/modules/services/web-apps/castopod.nix | 12 ++++++++++--
1 file changed, 10 insertions(+), 2 deletions(-)
diff --git a/nixos/modules/services/web-apps/castopod.nix b/nixos/modules/services/web-apps/castopod.nix
index 11cf4b36aeb385..042fb3954d2b73 100644
--- a/nixos/modules/services/web-apps/castopod.nix
+++ b/nixos/modules/services/web-apps/castopod.nix
@@ -67,6 +67,8 @@ in
description = lib.mdDoc ''
A file containing the password corresponding to
[](#opt-services.castopod.database.user).
+
+ This file is loaded using systemd LoadCredentials.
'';
};
};
@@ -93,6 +95,8 @@ in
Environment file to inject e.g. secrets into the configuration.
See [](https://code.castopod.org/adaures/castopod/-/blob/main/.env.example)
for available environment variables.
+
+ This file is loaded using systemd LoadCredentials.
'';
};
configureNginx = lib.mkOption {
@@ -207,19 +211,23 @@ in
echo "analytics.salt=$(cat ${cfg.dataDir}/salt)" >> ${envFile}
${if (cfg.database.passwordFile != null) then ''
- echo "database.default.password=$(cat ${lib.escapeShellArg cfg.database.passwordFile})" >> ${envFile}
+ echo "database.default.password=$(cat "$CREDENTIALS_DIRECTORY/dbpasswordfile)" >> ${envFile}
'' else ''
echo "database.default.password=" >> ${envFile}
''}
${lib.optionalString (cfg.environmentFile != null) ''
- cat ${lib.escapeShellArg cfg.environmentFile} >> ${envFile}
+ cat "$CREDENTIALS_DIRECTORY/envfile" >> ${envFile}
''}
php ${cfg.package}/share/castopod/spark castopod:database-update
'';
serviceConfig = {
StateDirectory = "castopod";
+ LoadCredential = lib.optional (cfg.environmentFile != null)
+ "envfile:${cfg.environmentFile}"
+ ++ (lib.optional (cfg.database.passwordFile != null)
+ "dbpasswordfile:${cfg.database.passwordFile}");
WorkingDirectory = "${cfg.package}/share/castopod";
Type = "oneshot";
RemainAfterExit = true;
From 9b03fc35a30671e5d4146bbcbe6b5536fa9baacc Mon Sep 17 00:00:00 2001
From: sinavir <sinavir@sinavir.fr>
Date: Sat, 2 Mar 2024 18:01:54 +0100
Subject: [PATCH 7/8] nixos/castopod: build mp3 in the test
---
nixos/tests/castopod.nix | 31 +++++++++++++++++++++----------
1 file changed, 21 insertions(+), 10 deletions(-)
diff --git a/nixos/tests/castopod.nix b/nixos/tests/castopod.nix
index 2db7aa0bda6507..2bdc6941c23815 100644
--- a/nixos/tests/castopod.nix
+++ b/nixos/tests/castopod.nix
@@ -37,14 +37,7 @@ import ./make-test-python.nix ({ pkgs, lib, ... }:
targetPodcastDuration = toString
((targetPodcastSize + lameMp3FileAdjust) / (lameMp3Bitrate / 8));
mp3file = with pkgs;
- runCommand "gen-castopod.mp3" { nativeBuildInputs = [ sox lame ]; } ''
- sox -n -r 48000 -t wav - synth ${targetPodcastDuration} sine 440 `
- `| lame --noreplaygain -cbr -q 9 -b 320 - $out
- FILESIZE="$(stat -c%s $out)"
- [ "$FILESIZE" -gt 0 ]
- [ "$FILESIZE" -le "${toString targetPodcastSize}" ]
- '';
-
+ runCommand ;
bannerWidth = 3000;
banner = pkgs.runCommand "gen-castopod-cover.jpg" { } ''
${pkgs.imagemagick}/bin/magick `
@@ -185,7 +178,7 @@ import ./make-test-python.nix ({ pkgs, lib, ... }:
# upload podcast ###################################################
driver.find_element(By.CSS_SELECTOR, '#audio_file').send_keys(
- '${mp3file}'
+ '/tmp/podcast.mp3'
)
driver.find_element(By.CSS_SELECTOR, '#cover').send_keys(
'${cover}'
@@ -206,7 +199,23 @@ import ./make-test-python.nix ({ pkgs, lib, ... }:
driver.quit()
'';
in
- [ pkgs.firefox-unwrapped pkgs.geckodriver browser-test ];
+ [
+ pkgs.firefox-unwrapped
+ pkgs.geckodriver
+ browser-test
+ (pkgs.writeShellApplication {
+ name = "build-mp3";
+ runtimeInputs = with pkgs; [ sox lame ];
+ text = ''
+ out=/tmp/podcast.mp3
+ sox -n -r 48000 -t wav - synth ${targetPodcastDuration} sine 440 `
+ `| lame --noreplaygain -cbr -q 9 -b 320 - $out
+ FILESIZE="$(stat -c%s $out)"
+ [ "$FILESIZE" -gt 0 ]
+ [ "$FILESIZE" -le "${toString targetPodcastSize}" ]
+ '';
+ })
+ ];
};
testScript = ''
@@ -217,6 +226,8 @@ import ./make-test-python.nix ({ pkgs, lib, ... }:
castopod.wait_for_open_port(80)
castopod.wait_until_succeeds("curl -sS -f http://castopod.example.com")
+ client.succeed("build-mp3")
+
with subtest("Create superadmin, log in, create and upload a podcast"):
client.succeed(\
"PYTHONUNBUFFERED=1 systemd-cat -t browser-test browser-test")
From 538281e8be427f820371f4005e991e0281872e12 Mon Sep 17 00:00:00 2001
From: sinavir <sinavir@sinavir.fr>
Date: Sat, 2 Mar 2024 18:04:35 +0100
Subject: [PATCH 8/8] nixos/castopod: Increase test timeouts
---
nixos/tests/castopod.nix | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/nixos/tests/castopod.nix b/nixos/tests/castopod.nix
index 2bdc6941c23815..2c5c745f7da0df 100644
--- a/nixos/tests/castopod.nix
+++ b/nixos/tests/castopod.nix
@@ -105,7 +105,7 @@ import ./make-test-python.nix ({ pkgs, lib, ... }:
driver.get('http://${domain}/cp-install')
- wait = WebDriverWait(driver, 10)
+ wait = WebDriverWait(driver, 20)
wait.until(EC.title_contains("installer"))

View file

@ -1,28 +0,0 @@
--- a/nixos/modules/services/web-apps/nextcloud.nix
+++ b/nixos/modules/services/web-apps/nextcloud.nix
@@ -131,6 +131,7 @@
(mkRemovedOptionModule [ "services" "nextcloud" "disableImagemagick" ] ''
Use services.nextcloud.enableImagemagick instead.
'')
+ (mkRenamedOptionModule [ "services" "nextcloud" "config" "objectstore" "s3" "autocreate" ] [ "services" "nextcloud" "config" "objectstore" "s3" "verify_bucket_exists" ])
];
options.services.nextcloud = {
@@ -487,7 +487,7 @@
The name of the S3 bucket.
'';
};
- autocreate = mkOption {
+ verify_bucket_exists = mkOption {
type = types.bool;
description = lib.mdDoc ''
Create the objectstore if it does not exist.
@@ -820,7 +820,7 @@
'class' => '\\OC\\Files\\ObjectStore\\S3',
'arguments' => [
'bucket' => '${s3.bucket}',
- 'autocreate' => ${boolToString s3.autocreate},
+ 'verify_bucket_exists' => ${boolToString s3.verify_bucket_exists},
'key' => '${s3.key}',
'secret' => nix_read_secret('${s3.secretFile}'),
${optionalString (s3.hostname != null) "'hostname' => '${s3.hostname}',"}

View file

@ -1,21 +1,19 @@
let
netboxAgent = {
id = "244549";
hash = "sha256-SePkKEYQGDj6FpuyxZ+1ASeVPA02mCHf0G5i3koMdNw=";
local = path: {
_type = "static";
inherit path;
};
in
{
lix = [
(local ./lix/01-disable-installChecks.patch)
];
"nixos-24.05" = [
# netbox qrcode plugin
{
_type = "commit";
sha = "ae4bf4c110378ebacb3989c9533726859cfebbfa";
hash = "sha256-SgHhW9HCkDQsxT3eG4P9q68c43e3sbDHRY9qs7oSt8o=";
}
netboxAgent
(local ./nixpkgs/06-netbox-qrcode.patch)
# nixos/nextcloud: Rename autocreate (a no-op) to verify_bucket_exists
{
id = "275165";
hash = "sha256-9a26V3Pi8yLD3N9+mC1kvJoruxRTp/qOHapnt6VX7pw=";
@ -29,10 +27,7 @@ in
}
# Crabfit: don't depend on all google-fonts
{
_type = "static";
path = ./04-crabfit-karla.patch;
}
(local ./nixpkgs/04-crabfit-karla.patch)
# nixos/kanidm: add basic provisioning
{
@ -56,13 +51,10 @@ in
];
"nixos-unstable" = [
netboxAgent
# netbox qrcode plugin
{
_type = "commit";
sha = "ae4bf4c110378ebacb3989c9533726859cfebbfa";
hash = "sha256-SgHhW9HCkDQsxT3eG4P9q68c43e3sbDHRY9qs7oSt8o=";
}
(local ./nixpkgs/06-netbox-qrcode.patch)
# Build netbird-relay
(local ./nixpkgs/05-netbird-relay.patch)
];
}

View file

@ -0,0 +1,21 @@
diff --git a/pkgs/tools/networking/netbird/default.nix b/pkgs/tools/networking/netbird/default.nix
index 07a1e906dad3..d5799446628b 100644
--- a/pkgs/tools/networking/netbird/default.nix
+++ b/pkgs/tools/networking/netbird/default.nix
@@ -26,6 +26,7 @@ let
} else {
client = "netbird";
management = "netbird-mgmt";
+ relay = "netbird-relay";
signal = "netbird-signal";
};
in
@@ -82,7 +83,7 @@ buildGoModule rec {
(lib.mapAttrsToList
(module: binary: ''
mv $out/bin/${lib.last (lib.splitString "/" module)} $out/bin/${binary}
- '' + lib.optionalString (!ui) ''
+ '' + lib.optionalString (!ui && module != "relay") ''
installShellCompletion --cmd ${binary} \
--bash <($out/bin/${binary} completion bash) \
--fish <($out/bin/${binary} completion fish) \

View file

@ -0,0 +1,70 @@
diff --git a/pkgs/development/python-modules/netbox-qrcode/default.nix b/pkgs/development/python-modules/netbox-qrcode/default.nix
new file mode 100644
index 000000000000..b378b839a8dc
--- /dev/null
+++ b/pkgs/development/python-modules/netbox-qrcode/default.nix
@@ -0,0 +1,51 @@
+{ lib
+, buildPythonPackage
+, fetchFromGitHub
+, setuptools
+, wheel
+, pillow
+, qrcode
+, netbox
+}:
+
+buildPythonPackage rec {
+ pname = "netbox-qrcode";
+ version = "0.0.13";
+ pyproject = true;
+
+ src = fetchFromGitHub {
+ owner = "netbox-community";
+ repo = "netbox-qrcode";
+ rev = "v${version}";
+ hash = "sha256-/labSZyB1SkU/uemuL946RDk8IVEAgCYJY2vrJFney0=";
+ };
+
+ nativeBuildInputs = [
+ setuptools
+ wheel
+ ];
+
+ propagatedBuildInputs = [
+ qrcode
+ pillow
+ ];
+
+ checkInputs = [
+ netbox
+ ];
+
+ preFixup = ''
+ export PYTHONPATH=${netbox}/opt/netbox/netbox:$PYTHONPATH
+ '';
+
+ pythonImportsCheck = [
+ "netbox_qrcode"
+ ];
+
+ meta = with lib; {
+ description = "NetBox Plugin for generate QR Codes";
+ homepage = "https://github.com/netbox-community/netbox-qrcode";
+ license = licenses.asl20;
+ maintainers = with maintainers; [ sinavir ];
+ };
+}
diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix
index 9999d745e3ac..b226e0063672 100644
--- a/pkgs/top-level/python-packages.nix
+++ b/pkgs/top-level/python-packages.nix
@@ -8989,6 +8989,8 @@ self: super: with self; {
netbox-reorder-rack = callPackage ../development/python-modules/netbox-reorder-rack { };
+ netbox-qrcode = callPackage ../development/python-modules/netbox-qrcode { };
+
netcdf4 = callPackage ../development/python-modules/netcdf4 { };
netdata = callPackage ../development/python-modules/netdata { };

View file

@ -1,6 +1,9 @@
set -eu -o pipefail
set -o errexit
set -o nounset
set -o pipefail
shopt -s lastpipe
drv=$("@colmena@/bin/colmena" eval --instantiate -E "{ nodes, ... }: nodes.${BUILD_NODE}.config.system.build.toplevel")
drv=$(colmena eval --instantiate -E "{ nodes, ... }: nodes.${BUILD_NODE}.config.system.build.toplevel")
# Build the derivation and send it to the great beyond
nix-store --query --requisites --force-realise --include-outputs "$drv" | grep -v '.*\.drv' >paths.txt

View file

@ -1,7 +1,3 @@
#!/usr/bin/env bash
#!@bash@/bin/bash
# shellcheck shell=bash
set -o errexit
set -o nounset
set -o pipefail
@ -82,19 +78,18 @@ retrieve_current_system () {
ssh -n "root@$1" "readlink -f /run/current-system"
}
return_status=0
echo "$RESULTS" | @jq@/bin/jq -c '.[]' |
echo "$RESULTS" | jq -c '.[]' |
while IFS=$'\n' read -r c; do
machine=$(echo "$c" | @jq@/bin/jq -r '.machine')
machine=$(echo "$c" | jq -r '.machine')
if [[ -n ${node-} ]] && [[ "$machine" != "$node" ]]; then
echo "Skipping ${machine}"
continue
fi
expected_path=$(echo "$c" | @jq@/bin/jq -r '.path')
domain=$(echo "$c" | @jq@/bin/jq -r '.domain')
drv_path=$(echo "$c" | @jq@/bin/jq -r '.drv')
expected_path=$(echo "$c" | jq -r '.path')
domain=$(echo "$c" | jq -r '.domain')
drv_path=$(echo "$c" | jq -r '.drv')
err=0
current_path=$(retrieve_current_system "$domain") || err=1
@ -109,7 +104,7 @@ while IFS=$'\n' read -r c; do
nix-copy-closure --from "root@$domain" "$current_path"
nix-store -r "$drv_path"
echo "$machine -> error. nvd output:"
@nvd@/bin/nvd diff "$expected_path" "$current_path"
nvd diff "$expected_path" "$current_path"
return_status=1
else
echo "☠️ $machine -> error:"

View file

@ -1,39 +1,32 @@
{ pkgs, ... }:
{ pkgs }:
let
substitutions = {
inherit (pkgs.lib) mapAttrs;
inherit (pkgs)
bash
writeShellApplication
colmena
coreutils
nvd
git
jq
nvd
;
};
mkShellScript =
name:
(pkgs.substituteAll (
{
inherit name;
src = ./. + "/${name}.sh";
dir = "/bin/";
isExecutable = true;
checkPhase = ''
${pkgs.stdenv.shellDryRun} "$target"
'';
}
// substitutions
));
scripts = [
"cache-node"
"check-deployment"
"launch-vm"
"list-nodes"
scripts = {
cache-node = [ colmena ];
check-deployment = [
colmena
jq
nvd
];
launch-vm = [ colmena ];
list-nodes = [ jq ];
};
in
builtins.map mkShellScript scripts
mapAttrs (
name: runtimeInputs:
writeShellApplication {
inherit name runtimeInputs;
text = builtins.readFile ./${name}.sh;
}
) scripts

View file

@ -1,8 +1,7 @@
#!@bash@/bin/bash
# shellcheck shell=bash
set -o errexit
set -o nounset
set -o pipefail
shopt -s lastpipe
MACHINE=""
HOSTFWD=""
@ -25,9 +24,12 @@ while getopts 'p:o:h' opt; do
done
shift "$((OPTIND - 1))"
if [ -z "$MACHINE" ]; then echo "-o option needed"; exit 1; fi
if [ -z "$MACHINE" ]; then
echo "-o option needed"
exit 1
fi
DRV_PATH=$(@colmena@/bin/colmena eval --instantiate -E "{nodes, ...}: nodes.$MACHINE.config.system.build.vm")
DRV_PATH=$(colmena eval --instantiate -E "{nodes, ...}: nodes.$MACHINE.config.system.build.vm")
echo "Realising $DRV_PATH"
RESULT=$(nix-store -r "$DRV_PATH")

Some files were not shown because too many files have changed in this diff Show more