2024-10-08 18:44:21 +02:00
|
|
|
{ lib, pkgs, ... }:
|
2024-10-08 12:18:26 +02:00
|
|
|
|
|
|
|
lib.extra.mkConfig {
|
|
|
|
enabledModules = [
|
|
|
|
# INFO: This list needs to stay sorted alphabetically
|
|
|
|
];
|
|
|
|
|
|
|
|
enabledServices = [
|
|
|
|
# INFO: This list needs to stay sorted alphabetically
|
2024-10-08 13:37:39 +02:00
|
|
|
"microvm-router01"
|
2024-10-08 20:59:13 +02:00
|
|
|
"nvidia-tesla-k80"
|
|
|
|
"proxmox"
|
2024-10-08 17:29:29 +02:00
|
|
|
# Machine learning API machine
|
|
|
|
"microvm-ml01"
|
2024-10-08 12:18:26 +02:00
|
|
|
];
|
|
|
|
|
|
|
|
extraConfig = {
|
2024-10-08 17:36:31 +02:00
|
|
|
microvm = {
|
|
|
|
host.enable = true;
|
|
|
|
};
|
2024-10-08 12:18:26 +02:00
|
|
|
dgn-hardware = {
|
|
|
|
useZfs = true;
|
|
|
|
zfsPools = [
|
|
|
|
"dpool"
|
|
|
|
"ppool0"
|
|
|
|
];
|
|
|
|
};
|
|
|
|
|
2024-10-08 12:51:57 +02:00
|
|
|
services.netbird.enable = true;
|
2024-10-08 13:58:49 +02:00
|
|
|
|
2024-10-08 17:29:29 +02:00
|
|
|
# We are going to use CUDA here.
|
|
|
|
nixpkgs.config.cudaSupport = true;
|
2024-10-08 18:44:21 +02:00
|
|
|
services.ollama = {
|
|
|
|
enable = true;
|
|
|
|
package =
|
|
|
|
(pkgs.ollama.override {
|
|
|
|
cudaPackages = pkgs.cudaPackages_11;
|
|
|
|
gcc12 = pkgs.gcc11;
|
|
|
|
}).overrideAttrs
|
|
|
|
(old: {
|
|
|
|
CMAKE_CUDA_ARCHITECTURES = "35";
|
|
|
|
ldflags = old.ldflags ++ [
|
|
|
|
# K80 is 3.5
|
|
|
|
"-X=github.com/ollama/ollama/gpu.CudaComputeMajorMin=3"
|
|
|
|
"-X=github.com/ollama/ollama/gpu.CudaComputeMinorMin=5"
|
|
|
|
];
|
|
|
|
patches = (old.patches or [ ]) ++ [ ./K80-support.patch ];
|
|
|
|
});
|
|
|
|
};
|
2024-10-08 15:04:10 +02:00
|
|
|
users.users.root.hashedPassword = "$y$j9T$eNZQgDN.J5y7KTG2hXgat1$J1i5tjx5dnSZu.C9B7swXi5zMFIkUnmRrnmyLHFAt8/";
|
2024-10-08 12:18:26 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
root = ./.;
|
|
|
|
}
|