feat(krz01): add a NGINX in front of ollama protected by password

This way, you can do direct requests to ollama from other places.

Signed-off-by: Ryan Lahfa <ryan@dgnum.eu>
This commit is contained in:
Ryan Lahfa 2024-10-10 16:38:17 +02:00
parent 7d70beb1f0
commit d76e655174
2 changed files with 33 additions and 8 deletions

View file

@ -12,11 +12,11 @@ lib.extra.mkConfig {
enabledServices = [
# INFO: This list needs to stay sorted alphabetically
# Machine learning API machine
"microvm-ml01"
"microvm-router01"
"nvidia-tesla-k80"
"proxmox"
# Machine learning API machine
"microvm-ml01"
];
extraConfig = {
@ -57,7 +57,23 @@ lib.extra.mkConfig {
];
}))
];
services.ollama = {
services = {
nginx = {
enable = true;
recommendedProxySettings = true;
virtualHosts."ollama01.beta.dgnum.eu" = {
enableACME = true;
forceSSL = true;
locations."/" = {
proxyPass = "http://${config.services.ollama.host}:${toString config.services.ollama.port}";
basicAuthFile = pkgs.writeText "ollama-htpasswd" ''
raito:$y$j9T$UDEHpLtM52hRGK0I4qT6M0$N75AhENLqgtJnTGaPzq51imhjZvuPr.ow81Co1ZTcX2
'';
};
};
};
ollama = {
enable = true;
package = pkgs.callPackage ./ollama.nix {
cudaPackages = pkgs.cudaPackages_11;
@ -66,6 +82,7 @@ lib.extra.mkConfig {
};
};
};
};
root = ./.;
}

View file

@ -127,6 +127,14 @@ let
"cas-eleves"
"vote"
];
krz01.dual = [
# Beta-grade machine learning API servers
"ollama01.beta"
"openui.beta"
"whisper.beta"
"stable-diffusion.beta"
];
}
)
);