feat(krz01): introduce ML01 -- a machine learning VM

I will add ollama on it later on and passthrough the GPU in there.

Signed-off-by: Ryan Lahfa <ryan@dgnum.eu>
This commit is contained in:
Ryan Lahfa 2024-10-08 17:29:29 +02:00
parent e200ae53a4
commit ebed6462f6
3 changed files with 31 additions and 1 deletions

View file

@ -10,6 +10,8 @@ lib.extra.mkConfig {
"microvm-router01" "microvm-router01"
"nvidia-tesla-k80" "nvidia-tesla-k80"
"proxmox" "proxmox"
# Machine learning API machine
"microvm-ml01"
]; ];
extraConfig = { extraConfig = {
@ -24,6 +26,9 @@ lib.extra.mkConfig {
services.netbird.enable = true; services.netbird.enable = true;
# We are going to use CUDA here.
nixpkgs.config.cudaSupport = true;
users.users.root.hashedPassword = "$y$j9T$eNZQgDN.J5y7KTG2hXgat1$J1i5tjx5dnSZu.C9B7swXi5zMFIkUnmRrnmyLHFAt8/"; users.users.root.hashedPassword = "$y$j9T$eNZQgDN.J5y7KTG2hXgat1$J1i5tjx5dnSZu.C9B7swXi5zMFIkUnmRrnmyLHFAt8/";
}; };

View file

@ -0,0 +1,22 @@
_: {
microvm.autostart = [ "ml01" ];
microvm.vms.ml01 = {
config = {
networking.hostName = "ml01";
services.ollama = {
enable = true;
listenAddress = "0.0.0.0:11434";
sandbox = true;
acceleration = "cuda";
};
microvm.shares = [
{
source = "/nix/store";
mountPoint = "/nix/.ro-store";
tag = "ro-store";
proto = "virtiofs";
}
];
};
};
}

View file

@ -1,5 +1,8 @@
{ config, ... }: { config, ... }:
{ {
nixpkgs.config.nvidia.acceptLicense = true;
# Tesla K80 is not supported by the latest driver. # Tesla K80 is not supported by the latest driver.
hardware.nvidia.package = config.boot.kernelPackages.nvidiaPackages_legacy_470; hardware.nvidia.package = config.boot.kernelPackages.nvidia_x11_legacy470;
# Don't ask.
services.xserver.videoDrivers = [ "nvidia" ];
} }