From f40323bfc0a97b36f4e321a69673b3e2a627c429 Mon Sep 17 00:00:00 2001 From: Ryan Lahfa Date: Thu, 10 Oct 2024 18:12:57 +0200 Subject: [PATCH] feat(chatops/takumi): give it ollama powers Those are not superpowers and should be used sparringly and responsibly. LLMs are not bulletproof. They are mostly bullshit generators. But even a bullshit generator has usefulness. Signed-off-by: Ryan Lahfa --- modules/dgn-chatops/default.nix | 2 ++ modules/dgn-chatops/takumi.py | 15 ++++++++++++++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/modules/dgn-chatops/default.nix b/modules/dgn-chatops/default.nix index d3b030d..ed2bb05 100644 --- a/modules/dgn-chatops/default.nix +++ b/modules/dgn-chatops/default.nix @@ -47,7 +47,9 @@ let ps = python3Pkgs.makePythonPath [ ircrobots tortoise-orm + python3Pkgs.ollama python3Pkgs.aiohttp + python3Pkgs.loadcredential ]; in { diff --git a/modules/dgn-chatops/takumi.py b/modules/dgn-chatops/takumi.py index 918237c..f8d6915 100644 --- a/modules/dgn-chatops/takumi.py +++ b/modules/dgn-chatops/takumi.py @@ -1,6 +1,11 @@ #!/usr/bin/env python3 import asyncio +from ircrobots.interface import IBot +from ollama import Client as OllamaClient +from loadcredential import Credentials +import base64 + from irctokens.line import build, Line from ircrobots.bot import Bot as BaseBot from ircrobots.server import Server as BaseServer @@ -56,6 +61,10 @@ def bridge_stripped(possible_command: str, origin_nick: str) -> str | None: return possible_command if possible_command.startswith(TRIGGER) else None class Server(BaseServer): + def __init__(self, bot: IBot, name: str, llm_client: OllamaClient): + super().__init__(bot, name) + self.llm_client = llm_client + def extract_valid_command(self, line: Line) -> str | None: me = self.nickname_lower if line.command == "PRIVMSG" and \ @@ -106,7 +115,11 @@ class Server(BaseServer): class Bot(BaseBot): def create_server(self, name: str): - return Server(self, name) + credentials = Credentials() + base64_encoded_password = base64.b64encode(credentials["OLLAMA_PROXY_PASSWORD"]) + token = f"takumi:{base64_encoded_password}" + llm_client = OllamaClient(host='https://ollama01.beta.dgnum.eu', headers={'Authorization': f'Basic {token}'}) + return Server(self, name, llm_client) async def main(): bot = Bot() -- 2.47.0