2024-10-09 12:51:38 +02:00
|
|
|
{
|
|
|
|
config,
|
|
|
|
lib,
|
|
|
|
pkgs,
|
|
|
|
...
|
|
|
|
}:
|
2024-10-08 12:18:26 +02:00
|
|
|
|
|
|
|
lib.extra.mkConfig {
|
|
|
|
enabledModules = [
|
|
|
|
# INFO: This list needs to stay sorted alphabetically
|
|
|
|
];
|
|
|
|
|
|
|
|
enabledServices = [
|
|
|
|
# INFO: This list needs to stay sorted alphabetically
|
2024-10-10 16:38:17 +02:00
|
|
|
# Machine learning API machine
|
|
|
|
"microvm-ml01"
|
2024-10-08 13:37:39 +02:00
|
|
|
"microvm-router01"
|
2024-10-08 20:59:13 +02:00
|
|
|
"nvidia-tesla-k80"
|
|
|
|
"proxmox"
|
2024-10-08 12:18:26 +02:00
|
|
|
];
|
|
|
|
|
|
|
|
extraConfig = {
|
2024-10-08 17:36:31 +02:00
|
|
|
microvm = {
|
|
|
|
host.enable = true;
|
|
|
|
};
|
2024-10-08 12:18:26 +02:00
|
|
|
dgn-hardware = {
|
|
|
|
useZfs = true;
|
|
|
|
zfsPools = [
|
|
|
|
"dpool"
|
|
|
|
"ppool0"
|
|
|
|
];
|
|
|
|
};
|
|
|
|
|
2024-10-08 12:51:57 +02:00
|
|
|
services.netbird.enable = true;
|
2024-10-08 13:58:49 +02:00
|
|
|
|
2024-10-08 17:29:29 +02:00
|
|
|
# We are going to use CUDA here.
|
|
|
|
nixpkgs.config.cudaSupport = true;
|
2024-10-09 12:51:38 +02:00
|
|
|
hardware.graphics.enable = true;
|
|
|
|
environment.systemPackages = [
|
|
|
|
((pkgs.openai-whisper-cpp.override { cudaPackages = pkgs.cudaPackages_11; }).overrideAttrs (old: {
|
|
|
|
src = pkgs.fetchFromGitHub {
|
|
|
|
owner = "ggerganov";
|
|
|
|
repo = "whisper.cpp";
|
|
|
|
rev = "v1.7.1";
|
|
|
|
hash = "sha256-EDFUVjud79ZRCzGbOh9L9NcXfN3ikvsqkVSOME9F9oo=";
|
|
|
|
};
|
|
|
|
env = {
|
|
|
|
WHISPER_CUBLAS = "";
|
|
|
|
GGML_CUDA = "1";
|
|
|
|
};
|
|
|
|
# We only need Compute Capability 3.7.
|
|
|
|
CUDA_ARCH_FLAGS = [ "sm_37" ];
|
|
|
|
# We are GPU-only anyway.
|
|
|
|
patches = (old.patches or [ ]) ++ [
|
|
|
|
./no-weird-microarch.patch
|
|
|
|
./all-nvcc-arch.patch
|
|
|
|
];
|
|
|
|
}))
|
|
|
|
];
|
2024-10-10 16:38:17 +02:00
|
|
|
|
|
|
|
services = {
|
|
|
|
nginx = {
|
|
|
|
enable = true;
|
|
|
|
recommendedProxySettings = true;
|
|
|
|
virtualHosts."ollama01.beta.dgnum.eu" = {
|
|
|
|
enableACME = true;
|
|
|
|
forceSSL = true;
|
|
|
|
locations."/" = {
|
|
|
|
proxyPass = "http://${config.services.ollama.host}:${toString config.services.ollama.port}";
|
|
|
|
basicAuthFile = pkgs.writeText "ollama-htpasswd" ''
|
|
|
|
raito:$y$j9T$UDEHpLtM52hRGK0I4qT6M0$N75AhENLqgtJnTGaPzq51imhjZvuPr.ow81Co1ZTcX2
|
|
|
|
'';
|
|
|
|
};
|
|
|
|
};
|
|
|
|
};
|
|
|
|
ollama = {
|
|
|
|
enable = true;
|
|
|
|
package = pkgs.callPackage ./ollama.nix {
|
|
|
|
cudaPackages = pkgs.cudaPackages_11;
|
|
|
|
# We need to thread our nvidia x11 driver for CUDA.
|
|
|
|
extraLibraries = [ config.hardware.nvidia.package ];
|
|
|
|
};
|
2024-10-09 12:51:38 +02:00
|
|
|
};
|
2024-10-08 18:44:21 +02:00
|
|
|
};
|
2024-10-10 17:20:17 +02:00
|
|
|
|
|
|
|
networking.firewall.allowedTCPPorts = [
|
|
|
|
80
|
|
|
|
443
|
|
|
|
];
|
2024-10-08 12:18:26 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
root = ./.;
|
|
|
|
}
|