feat(tvldb): Import the tvldb/paroxysm source, add a Nix derivation
- This imports the tvldb (actually a thing called 'paroxysm') code from https://git.theta.eu.org/eta/paroxysm into the monorepo. - Additionally, I did a nix thing, yay! \o/ (well, with tazjin's help) - 3p/default.nix needed modifying to whitelist pgsql. Change-Id: Icdf13ca221650dde376f632bd2dd8a087af451bf Reviewed-on: https://cl.tvl.fyi/c/depot/+/389 Reviewed-by: tazjin <mail@tazj.in>
This commit is contained in:
parent
4c22cf3169
commit
c3abbb5e2d
17 changed files with 2315 additions and 0 deletions
5
fun/tvldb/.gitignore
vendored
Normal file
5
fun/tvldb/.gitignore
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
/target
|
||||||
|
irc.toml
|
||||||
|
paroxysm-irc.toml
|
||||||
|
paroxysm.toml
|
||||||
|
**/*.rs.bk
|
1631
fun/tvldb/Cargo.lock
generated
Normal file
1631
fun/tvldb/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
22
fun/tvldb/Cargo.toml
Normal file
22
fun/tvldb/Cargo.toml
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
[package]
|
||||||
|
authors = ["eeeeeta <eta@theta.eu.org>"]
|
||||||
|
edition = "2018"
|
||||||
|
name = "paroxysm"
|
||||||
|
version = "0.1.0"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
chrono = "0.4.6"
|
||||||
|
config = "0.9.1"
|
||||||
|
env_logger = "0.6.0"
|
||||||
|
failure = "0.1.3"
|
||||||
|
irc = "0.13.6"
|
||||||
|
lazy_static = "1.2.0"
|
||||||
|
log = "0.4.6"
|
||||||
|
rand = "0.7.3"
|
||||||
|
regex = "1.1.0"
|
||||||
|
serde = "1.0.81"
|
||||||
|
serde_derive = "1.0.81"
|
||||||
|
|
||||||
|
[dependencies.diesel]
|
||||||
|
features = ["postgres", "chrono", "r2d2"]
|
||||||
|
version = "1.3.3"
|
3
fun/tvldb/OWNERS
Normal file
3
fun/tvldb/OWNERS
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
inherited: true
|
||||||
|
owners:
|
||||||
|
- eta
|
19
fun/tvldb/README.md
Normal file
19
fun/tvldb/README.md
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
paroxysm
|
||||||
|
========
|
||||||
|
|
||||||
|
`paroxysm` is a bot for [internet relay chat
|
||||||
|
(IRC)](https://en.wikipedia.org/wiki/Internet_Relay_Chat) that lets you store
|
||||||
|
small pieces of information, called *factoids*, and retrieve them later. It's
|
||||||
|
useful for organising frequently-used information to avoid repeating oneself in
|
||||||
|
a busy chatroom, as well as making little todo lists or notes to self in a
|
||||||
|
private chatroom.
|
||||||
|
|
||||||
|
It was directly inspired by the
|
||||||
|
[LearnDB](https://github.com/crawl/sequell/blob/master/docs/learndb.md)
|
||||||
|
functionality offered in `##crawl` on chat.freenode.net, and uses similar
|
||||||
|
syntax.
|
||||||
|
|
||||||
|
## Usage instructions
|
||||||
|
|
||||||
|
Will come soon; the project is very much still in beta, and is subject to
|
||||||
|
change.
|
11
fun/tvldb/default.nix
Normal file
11
fun/tvldb/default.nix
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
{ depot, ... }:
|
||||||
|
|
||||||
|
let
|
||||||
|
pkgs = depot.third_party;
|
||||||
|
in
|
||||||
|
pkgs.naersk.buildPackage {
|
||||||
|
name = "tvldb";
|
||||||
|
version = "0.0.1";
|
||||||
|
src = ./.;
|
||||||
|
buildInputs = [pkgs.openssl pkgs.pkgconfig pkgs.postgresql.lib];
|
||||||
|
}
|
7
fun/tvldb/docker/default.nix
Normal file
7
fun/tvldb/docker/default.nix
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
{ depot, ... }:
|
||||||
|
|
||||||
|
depot.third_party.dockerTools.buildLayeredImage {
|
||||||
|
name = "tvldb";
|
||||||
|
contents = [ depot.fun.tvldb ];
|
||||||
|
config.Entrypoint = [ "${depot.fun.tvldb}/bin/paroxysm" ];
|
||||||
|
}
|
2
fun/tvldb/migrations/20181209140247_initial/down.sql
Normal file
2
fun/tvldb/migrations/20181209140247_initial/down.sql
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
DROP TABLE entries;
|
||||||
|
DROP TABLE keywords;
|
15
fun/tvldb/migrations/20181209140247_initial/up.sql
Normal file
15
fun/tvldb/migrations/20181209140247_initial/up.sql
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
CREATE TABLE keywords (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name VARCHAR UNIQUE NOT NULL,
|
||||||
|
chan VARCHAR NOT NULL,
|
||||||
|
UNIQUE(name, chan)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE entries (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
keyword_id INT NOT NULL REFERENCES keywords ON DELETE CASCADE,
|
||||||
|
idx INT NOT NULL,
|
||||||
|
text VARCHAR NOT NULL,
|
||||||
|
creation_ts TIMESTAMP NOT NULL,
|
||||||
|
created_by VARCHAR NOT NULL
|
||||||
|
);
|
1
fun/tvldb/migrations/20181218142013_fix_unique/down.sql
Normal file
1
fun/tvldb/migrations/20181218142013_fix_unique/down.sql
Normal file
|
@ -0,0 +1 @@
|
||||||
|
-- This file should undo anything in `up.sql`
|
1
fun/tvldb/migrations/20181218142013_fix_unique/up.sql
Normal file
1
fun/tvldb/migrations/20181218142013_fix_unique/up.sql
Normal file
|
@ -0,0 +1 @@
|
||||||
|
ALTER TABLE keywords DROP CONSTRAINT IF EXISTS keywords_name_key;
|
11
fun/tvldb/src/cfg.rs
Normal file
11
fun/tvldb/src/cfg.rs
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
pub struct Config {
|
||||||
|
pub database_url: String,
|
||||||
|
pub irc_config_path: String,
|
||||||
|
#[serde(default)]
|
||||||
|
pub admins: HashSet<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub log_filter: Option<String>,
|
||||||
|
}
|
182
fun/tvldb/src/keyword.rs
Normal file
182
fun/tvldb/src/keyword.rs
Normal file
|
@ -0,0 +1,182 @@
|
||||||
|
use crate::models::{Entry, Keyword, NewEntry, NewKeyword};
|
||||||
|
use diesel::pg::PgConnection;
|
||||||
|
use diesel::prelude::*;
|
||||||
|
use failure::Error;
|
||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
pub struct KeywordDetails {
|
||||||
|
pub keyword: Keyword,
|
||||||
|
pub entries: Vec<Entry>,
|
||||||
|
}
|
||||||
|
impl KeywordDetails {
|
||||||
|
pub fn learn(&mut self, nick: &str, text: &str, dbc: &PgConnection) -> Result<usize, Error> {
|
||||||
|
let now = ::chrono::Utc::now().naive_utc();
|
||||||
|
let ins = NewEntry {
|
||||||
|
keyword_id: self.keyword.id,
|
||||||
|
idx: (self.entries.len() + 1) as _,
|
||||||
|
text,
|
||||||
|
creation_ts: now,
|
||||||
|
created_by: nick,
|
||||||
|
};
|
||||||
|
let new = {
|
||||||
|
use crate::schema::entries;
|
||||||
|
::diesel::insert_into(entries::table)
|
||||||
|
.values(ins)
|
||||||
|
.get_result(dbc)?
|
||||||
|
};
|
||||||
|
self.entries.push(new);
|
||||||
|
Ok(self.entries.len())
|
||||||
|
}
|
||||||
|
pub fn process_moves(&mut self, moves: &[(i32, i32)], dbc: &PgConnection) -> Result<(), Error> {
|
||||||
|
for (oid, new_idx) in moves {
|
||||||
|
{
|
||||||
|
use crate::schema::entries::dsl::*;
|
||||||
|
::diesel::update(entries.filter(id.eq(oid)))
|
||||||
|
.set(idx.eq(new_idx))
|
||||||
|
.execute(dbc)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.entries = Self::get_entries(self.keyword.id, dbc)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub fn swap(&mut self, idx_a: usize, idx_b: usize, dbc: &PgConnection) -> Result<(), Error> {
|
||||||
|
let mut moves = vec![];
|
||||||
|
for ent in self.entries.iter() {
|
||||||
|
if ent.idx == idx_a as i32 {
|
||||||
|
moves.push((ent.id, idx_b as i32));
|
||||||
|
}
|
||||||
|
if ent.idx == idx_b as i32 {
|
||||||
|
moves.push((ent.id, idx_a as i32));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if moves.len() != 2 {
|
||||||
|
Err(format_err!("Invalid swap operation."))?;
|
||||||
|
}
|
||||||
|
self.process_moves(&moves, dbc)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub fn update(&mut self, idx: usize, val: &str, dbc: &PgConnection) -> Result<(), Error> {
|
||||||
|
let ent = self
|
||||||
|
.entries
|
||||||
|
.get_mut(idx.saturating_sub(1))
|
||||||
|
.ok_or(format_err!("No such element to update."))?;
|
||||||
|
{
|
||||||
|
use crate::schema::entries::dsl::*;
|
||||||
|
::diesel::update(entries.filter(id.eq(ent.id)))
|
||||||
|
.set(text.eq(val))
|
||||||
|
.execute(dbc)?;
|
||||||
|
}
|
||||||
|
ent.text = val.to_string();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub fn delete(&mut self, idx: usize, dbc: &PgConnection) -> Result<(), Error> {
|
||||||
|
// step 1: delete the element
|
||||||
|
{
|
||||||
|
let ent = self
|
||||||
|
.entries
|
||||||
|
.get(idx.saturating_sub(1))
|
||||||
|
.ok_or(format_err!("No such element to delete."))?;
|
||||||
|
{
|
||||||
|
use crate::schema::entries::dsl::*;
|
||||||
|
::diesel::delete(entries.filter(id.eq(ent.id))).execute(dbc)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// step 2: move all the elements in front of it back one
|
||||||
|
let mut moves = vec![];
|
||||||
|
for ent in self.entries.iter() {
|
||||||
|
if idx > ent.idx as _ {
|
||||||
|
moves.push((ent.id, ent.idx.saturating_sub(1)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.process_moves(&moves, dbc)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub fn add_zwsp_to_name(name: &str) -> Option<String> {
|
||||||
|
let second_index = name.char_indices().nth(1).map(|(i, _)| i)?;
|
||||||
|
let (start, end) = name.split_at(second_index);
|
||||||
|
Some(format!("{}{}", start, end))
|
||||||
|
}
|
||||||
|
pub fn format_entry(&self, idx: usize) -> Option<String> {
|
||||||
|
if let Some(ent) = self.entries.get(idx.saturating_sub(1)) {
|
||||||
|
let gen_clr = if self.keyword.chan == "*" {
|
||||||
|
"\x0307"
|
||||||
|
} else {
|
||||||
|
""
|
||||||
|
};
|
||||||
|
let zwsp_name = Self::add_zwsp_to_name(&self.keyword.name)
|
||||||
|
.unwrap_or_else(|| self.keyword.name.clone());
|
||||||
|
Some(format!(
|
||||||
|
"\x02{}{}\x0f\x0315[{}/{}]\x0f: {} \x0f\x0314[{}]\x0f",
|
||||||
|
gen_clr,
|
||||||
|
zwsp_name,
|
||||||
|
idx,
|
||||||
|
self.entries.len(),
|
||||||
|
ent.text,
|
||||||
|
ent.creation_ts.date()
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn get_or_create(word: &str, c: &str, dbc: &PgConnection) -> Result<Self, Error> {
|
||||||
|
if let Some(ret) = Self::get(word, c, dbc)? {
|
||||||
|
Ok(ret)
|
||||||
|
} else {
|
||||||
|
Ok(Self::create(word, c, dbc)?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn create(word: &str, c: &str, dbc: &PgConnection) -> Result<Self, Error> {
|
||||||
|
let val = NewKeyword {
|
||||||
|
name: word,
|
||||||
|
chan: c,
|
||||||
|
};
|
||||||
|
let ret: Keyword = {
|
||||||
|
use crate::schema::keywords;
|
||||||
|
::diesel::insert_into(keywords::table)
|
||||||
|
.values(val)
|
||||||
|
.get_result(dbc)?
|
||||||
|
};
|
||||||
|
Ok(KeywordDetails {
|
||||||
|
keyword: ret,
|
||||||
|
entries: vec![],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
fn get_entries(kid: i32, dbc: &PgConnection) -> Result<Vec<Entry>, Error> {
|
||||||
|
let entries: Vec<Entry> = {
|
||||||
|
use crate::schema::entries::dsl::*;
|
||||||
|
entries
|
||||||
|
.filter(keyword_id.eq(kid))
|
||||||
|
.order_by(idx.asc())
|
||||||
|
.load(dbc)?
|
||||||
|
};
|
||||||
|
Ok(entries)
|
||||||
|
}
|
||||||
|
pub fn get<'a, T: Into<Cow<'a, str>>>(
|
||||||
|
word: T,
|
||||||
|
c: &str,
|
||||||
|
dbc: &PgConnection,
|
||||||
|
) -> Result<Option<Self>, Error> {
|
||||||
|
let word = word.into();
|
||||||
|
let keyword: Option<Keyword> = {
|
||||||
|
use crate::schema::keywords::dsl::*;
|
||||||
|
keywords
|
||||||
|
.filter(name.ilike(word).and(chan.eq(c).or(chan.eq("*"))))
|
||||||
|
.first(dbc)
|
||||||
|
.optional()?
|
||||||
|
};
|
||||||
|
if let Some(k) = keyword {
|
||||||
|
let entries = Self::get_entries(k.id, dbc)?;
|
||||||
|
if let Some(e0) = entries.get(0) {
|
||||||
|
if e0.text.starts_with("see: ") {
|
||||||
|
return Self::get(e0.text.replace("see: ", ""), c, dbc);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(Some(KeywordDetails {
|
||||||
|
keyword: k,
|
||||||
|
entries,
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
353
fun/tvldb/src/main.rs
Normal file
353
fun/tvldb/src/main.rs
Normal file
|
@ -0,0 +1,353 @@
|
||||||
|
extern crate irc;
|
||||||
|
extern crate serde;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate serde_derive;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate diesel;
|
||||||
|
extern crate chrono;
|
||||||
|
extern crate config;
|
||||||
|
extern crate env_logger;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate log;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate failure;
|
||||||
|
extern crate regex;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate lazy_static;
|
||||||
|
extern crate rand;
|
||||||
|
|
||||||
|
use crate::cfg::Config;
|
||||||
|
use crate::keyword::KeywordDetails;
|
||||||
|
use diesel::pg::PgConnection;
|
||||||
|
use diesel::r2d2::{ConnectionManager, Pool};
|
||||||
|
use failure::Error;
|
||||||
|
use irc::client::prelude::*;
|
||||||
|
use rand::rngs::ThreadRng;
|
||||||
|
use rand::{thread_rng, Rng};
|
||||||
|
use regex::{Captures, Regex};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::fmt::Display;
|
||||||
|
|
||||||
|
mod cfg;
|
||||||
|
mod keyword;
|
||||||
|
mod models;
|
||||||
|
mod schema;
|
||||||
|
|
||||||
|
pub struct App {
|
||||||
|
client: IrcClient,
|
||||||
|
pg: Pool<ConnectionManager<PgConnection>>,
|
||||||
|
rng: ThreadRng,
|
||||||
|
cfg: Config,
|
||||||
|
last_msgs: HashMap<String, HashMap<String, String>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl App {
|
||||||
|
pub fn report_error<T: Display>(
|
||||||
|
&mut self,
|
||||||
|
nick: &str,
|
||||||
|
chan: &str,
|
||||||
|
msg: T,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
self.client
|
||||||
|
.send_notice(nick, format!("[{}] \x0304Error:\x0f {}", chan, msg))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub fn keyword_from_captures(
|
||||||
|
&mut self,
|
||||||
|
learn: &::regex::Captures,
|
||||||
|
nick: &str,
|
||||||
|
chan: &str,
|
||||||
|
) -> Result<KeywordDetails, Error> {
|
||||||
|
let db = self.pg.get()?;
|
||||||
|
debug!("Fetching keyword for captures: {:?}", learn);
|
||||||
|
let subj = &learn["subj"];
|
||||||
|
let learn_chan = if learn.name("gen").is_some() {
|
||||||
|
"*"
|
||||||
|
} else {
|
||||||
|
chan
|
||||||
|
};
|
||||||
|
if !chan.starts_with("#") && learn_chan != "*" {
|
||||||
|
Err(format_err!("Only general entries may be taught via PM."))?;
|
||||||
|
}
|
||||||
|
debug!("Fetching keyword '{}' for chan {}", subj, learn_chan);
|
||||||
|
let kwd = KeywordDetails::get_or_create(subj, learn_chan, &db)?;
|
||||||
|
if kwd.keyword.chan == "*" && !self.cfg.admins.contains(nick) {
|
||||||
|
Err(format_err!(
|
||||||
|
"Only administrators can create or modify general entries."
|
||||||
|
))?;
|
||||||
|
}
|
||||||
|
Ok(kwd)
|
||||||
|
}
|
||||||
|
pub fn handle_move(
|
||||||
|
&mut self,
|
||||||
|
target: &str,
|
||||||
|
nick: &str,
|
||||||
|
chan: &str,
|
||||||
|
mv: Captures,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let db = self.pg.get()?;
|
||||||
|
let idx = &mv["idx"];
|
||||||
|
let idx = match idx[1..(idx.len() - 1)].parse::<usize>() {
|
||||||
|
Ok(i) => i,
|
||||||
|
Err(e) => Err(format_err!("Could not parse index: {}", e))?,
|
||||||
|
};
|
||||||
|
let new_idx = match mv["new_idx"].parse::<i32>() {
|
||||||
|
Ok(i) => i,
|
||||||
|
Err(e) => Err(format_err!("Could not parse target index: {}", e))?,
|
||||||
|
};
|
||||||
|
let mut kwd = self.keyword_from_captures(&mv, nick, chan)?;
|
||||||
|
if new_idx < 0 {
|
||||||
|
kwd.delete(idx, &db)?;
|
||||||
|
self.client.send_notice(
|
||||||
|
target,
|
||||||
|
format!("\x02{}\x0f: Deleted entry {}.", kwd.keyword.name, idx),
|
||||||
|
)?;
|
||||||
|
} else {
|
||||||
|
kwd.swap(idx, new_idx as _, &db)?;
|
||||||
|
self.client.send_notice(
|
||||||
|
target,
|
||||||
|
format!(
|
||||||
|
"\x02{}\x0f: Swapped entries {} and {}.",
|
||||||
|
kwd.keyword.name, idx, new_idx
|
||||||
|
),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub fn handle_learn(
|
||||||
|
&mut self,
|
||||||
|
target: &str,
|
||||||
|
nick: &str,
|
||||||
|
chan: &str,
|
||||||
|
learn: Captures,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let db = self.pg.get()?;
|
||||||
|
let val = &learn["val"];
|
||||||
|
let mut kwd = self.keyword_from_captures(&learn, nick, chan)?;
|
||||||
|
let idx = kwd.learn(nick, val, &db)?;
|
||||||
|
self.client
|
||||||
|
.send_notice(target, kwd.format_entry(idx).unwrap())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub fn handle_insert_last_quote(
|
||||||
|
&mut self,
|
||||||
|
target: &str,
|
||||||
|
nick: &str,
|
||||||
|
chan: &str,
|
||||||
|
qlast: Captures,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let db = self.pg.get()?;
|
||||||
|
let mut kwd = self.keyword_from_captures(&qlast, nick, chan)?;
|
||||||
|
let chan_lastmsgs = self
|
||||||
|
.last_msgs
|
||||||
|
.entry(chan.to_string())
|
||||||
|
.or_insert(HashMap::new());
|
||||||
|
let val = if let Some(last) = chan_lastmsgs.get(&kwd.keyword.name.to_ascii_lowercase()) {
|
||||||
|
format!("<{}> {}", &kwd.keyword.name, last)
|
||||||
|
} else {
|
||||||
|
Err(format_err!("I dunno what {} said...", kwd.keyword.name))?
|
||||||
|
};
|
||||||
|
let idx = kwd.learn(nick, &val, &db)?;
|
||||||
|
self.client
|
||||||
|
.send_notice(target, kwd.format_entry(idx).unwrap())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub fn handle_increment(
|
||||||
|
&mut self,
|
||||||
|
target: &str,
|
||||||
|
nick: &str,
|
||||||
|
chan: &str,
|
||||||
|
icr: Captures,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let db = self.pg.get()?;
|
||||||
|
let mut kwd = self.keyword_from_captures(&icr, nick, chan)?;
|
||||||
|
let is_incr = &icr["incrdecr"] == "++";
|
||||||
|
let now = chrono::Utc::now().naive_utc().date();
|
||||||
|
let mut idx = None;
|
||||||
|
for (i, ent) in kwd.entries.iter().enumerate() {
|
||||||
|
if ent.creation_ts.date() == now {
|
||||||
|
if let Ok(val) = ent.text.parse::<i32>() {
|
||||||
|
let val = if is_incr { val + 1 } else { val - 1 };
|
||||||
|
idx = Some((i + 1, val));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some((i, val)) = idx {
|
||||||
|
kwd.update(i, &val.to_string(), &db)?;
|
||||||
|
self.client.send_notice(target, kwd.format_entry(i).unwrap())?;
|
||||||
|
} else {
|
||||||
|
let val = if is_incr { 1 } else { -1 };
|
||||||
|
let idx = kwd.learn(nick, &val.to_string(), &db)?;
|
||||||
|
self.client
|
||||||
|
.send_notice(target, kwd.format_entry(idx).unwrap())?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub fn handle_query(
|
||||||
|
&mut self,
|
||||||
|
target: &str,
|
||||||
|
nick: &str,
|
||||||
|
chan: &str,
|
||||||
|
query: Captures,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let db = self.pg.get()?;
|
||||||
|
let subj = &query["subj"];
|
||||||
|
let idx = match query.name("idx") {
|
||||||
|
Some(i) => {
|
||||||
|
let i = i.as_str();
|
||||||
|
match &i[1..(i.len() - 1)] {
|
||||||
|
"*" => Some(-1),
|
||||||
|
x => x.parse::<usize>().map(|x| x as i32).ok(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => None,
|
||||||
|
};
|
||||||
|
debug!("Querying {} with idx {:?}", subj, idx);
|
||||||
|
match KeywordDetails::get(subj, chan, &db)? {
|
||||||
|
Some(kwd) => {
|
||||||
|
if let Some(mut idx) = idx {
|
||||||
|
if idx == -1 {
|
||||||
|
// 'get all entries' ('*' parses into this)
|
||||||
|
for i in 0..kwd.entries.len() {
|
||||||
|
self.client.send_notice(
|
||||||
|
nick,
|
||||||
|
format!("[{}] {}", chan, kwd.format_entry(i + 1).unwrap()),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if idx == 0 {
|
||||||
|
idx = 1;
|
||||||
|
}
|
||||||
|
if let Some(ent) = kwd.format_entry(idx as _) {
|
||||||
|
self.client.send_notice(target, ent)?;
|
||||||
|
} else {
|
||||||
|
let pluralised = if kwd.entries.len() == 1 {
|
||||||
|
"entry"
|
||||||
|
} else {
|
||||||
|
"entries"
|
||||||
|
};
|
||||||
|
self.client.send_notice(
|
||||||
|
target,
|
||||||
|
format!(
|
||||||
|
"\x02{}\x0f: only has \x02\x0304{}\x0f {}",
|
||||||
|
subj,
|
||||||
|
kwd.entries.len(),
|
||||||
|
pluralised
|
||||||
|
),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let entry = if kwd.entries.len() < 2 {
|
||||||
|
1 // because [1, 1) does not a range make
|
||||||
|
} else {
|
||||||
|
self.rng.gen_range(1, kwd.entries.len())
|
||||||
|
};
|
||||||
|
if let Some(ent) = kwd.format_entry(entry) {
|
||||||
|
self.client.send_notice(target, ent)?;
|
||||||
|
} else {
|
||||||
|
self.client
|
||||||
|
.send_notice(target, format!("\x02{}\x0f: no entries yet", subj))?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
self.client
|
||||||
|
.send_notice(target, format!("\x02{}\x0f: never heard of it", subj))?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub fn handle_privmsg(&mut self, from: &str, chan: &str, msg: &str) -> Result<(), Error> {
|
||||||
|
lazy_static! {
|
||||||
|
static ref LEARN_RE: Regex =
|
||||||
|
Regex::new(r#"^\?\?(?P<gen>!)?\s*(?P<subj>[^\[:]*):\s*(?P<val>.*)"#).unwrap();
|
||||||
|
static ref QUERY_RE: Regex =
|
||||||
|
Regex::new(r#"^\?\?\s*(?P<subj>[^\[:]*)(?P<idx>\[[^\]]+\])?"#).unwrap();
|
||||||
|
static ref QLAST_RE: Regex = Regex::new(r#"^\?\?\s*(?P<subj>[^\[:]*)!"#).unwrap();
|
||||||
|
static ref INCREMENT_RE: Regex =
|
||||||
|
Regex::new(r#"^\?\?(?P<gen>!)?\s*(?P<subj>[^\[:]*)(?P<incrdecr>\+\+|\-\-)"#)
|
||||||
|
.unwrap();
|
||||||
|
static ref MOVE_RE: Regex = Regex::new(
|
||||||
|
r#"^\?\?(?P<gen>!)?\s*(?P<subj>[^\[:]*)(?P<idx>\[[^\]]+\])->(?P<new_idx>.*)"#
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
let nick = from.split("!").next().ok_or(format_err!(
|
||||||
|
"Received PRIVMSG from a source without nickname (failed to split n!u@h)"
|
||||||
|
))?;
|
||||||
|
let target = if chan.starts_with("#") { chan } else { nick };
|
||||||
|
debug!("[{}] <{}> {}", chan, nick, msg);
|
||||||
|
if let Some(learn) = LEARN_RE.captures(msg) {
|
||||||
|
self.handle_learn(target, nick, chan, learn)?;
|
||||||
|
} else if let Some(qlast) = QLAST_RE.captures(msg) {
|
||||||
|
self.handle_insert_last_quote(target, nick, chan, qlast)?;
|
||||||
|
} else if let Some(mv) = MOVE_RE.captures(msg) {
|
||||||
|
self.handle_move(target, nick, chan, mv)?;
|
||||||
|
} else if let Some(icr) = INCREMENT_RE.captures(msg) {
|
||||||
|
self.handle_increment(target, nick, chan, icr)?;
|
||||||
|
} else if let Some(query) = QUERY_RE.captures(msg) {
|
||||||
|
self.handle_query(target, nick, chan, query)?;
|
||||||
|
} else {
|
||||||
|
let chan_lastmsgs = self
|
||||||
|
.last_msgs
|
||||||
|
.entry(chan.to_string())
|
||||||
|
.or_insert(HashMap::new());
|
||||||
|
chan_lastmsgs.insert(nick.to_string().to_ascii_lowercase(), msg.to_string());
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
pub fn handle_msg(&mut self, m: Message) -> Result<(), Error> {
|
||||||
|
match m.command {
|
||||||
|
Command::PRIVMSG(channel, message) => {
|
||||||
|
if let Some(src) = m.prefix {
|
||||||
|
if let Err(e) = self.handle_privmsg(&src, &channel, &message) {
|
||||||
|
warn!("error handling command in {} (src {}): {}", channel, src, e);
|
||||||
|
if let Some(nick) = src.split("!").next() {
|
||||||
|
self.report_error(nick, &channel, e)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Command::INVITE(nick, channel) => {
|
||||||
|
if self.cfg.admins.contains(&nick) {
|
||||||
|
info!("Joining {} after admin invite", channel);
|
||||||
|
self.client.send_join(channel)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn main() -> Result<(), Error> {
|
||||||
|
println!("[+] loading configuration");
|
||||||
|
let default_log_filter = "paroxysm=info".to_string();
|
||||||
|
let mut settings = config::Config::default();
|
||||||
|
settings.merge(config::Environment::with_prefix("PARX"))?;
|
||||||
|
let cfg: Config = settings.try_into()?;
|
||||||
|
let env = env_logger::Env::new()
|
||||||
|
.default_filter_or(cfg.log_filter.clone().unwrap_or(default_log_filter));
|
||||||
|
env_logger::init_from_env(env);
|
||||||
|
info!("paroxysm starting up");
|
||||||
|
info!("connecting to database at {}", cfg.database_url);
|
||||||
|
let pg = Pool::new(ConnectionManager::new(&cfg.database_url))?;
|
||||||
|
info!("connecting to IRC using config {}", cfg.irc_config_path);
|
||||||
|
let client = IrcClient::new(&cfg.irc_config_path)?;
|
||||||
|
client.identify()?;
|
||||||
|
let st = client.stream();
|
||||||
|
let mut app = App {
|
||||||
|
client,
|
||||||
|
pg,
|
||||||
|
cfg,
|
||||||
|
rng: thread_rng(),
|
||||||
|
last_msgs: HashMap::new(),
|
||||||
|
};
|
||||||
|
info!("running!");
|
||||||
|
st.for_each_incoming(|m| {
|
||||||
|
if let Err(e) = app.handle_msg(m) {
|
||||||
|
warn!("Error processing message: {}", e);
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
Ok(())
|
||||||
|
}
|
33
fun/tvldb/src/models.rs
Normal file
33
fun/tvldb/src/models.rs
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
use crate::schema::{entries, keywords};
|
||||||
|
use chrono::NaiveDateTime;
|
||||||
|
|
||||||
|
#[derive(Queryable)]
|
||||||
|
pub struct Keyword {
|
||||||
|
pub id: i32,
|
||||||
|
pub name: String,
|
||||||
|
pub chan: String,
|
||||||
|
}
|
||||||
|
#[derive(Queryable)]
|
||||||
|
pub struct Entry {
|
||||||
|
pub id: i32,
|
||||||
|
pub keyword_id: i32,
|
||||||
|
pub idx: i32,
|
||||||
|
pub text: String,
|
||||||
|
pub creation_ts: NaiveDateTime,
|
||||||
|
pub created_by: String,
|
||||||
|
}
|
||||||
|
#[derive(Insertable)]
|
||||||
|
#[table_name = "keywords"]
|
||||||
|
pub struct NewKeyword<'a> {
|
||||||
|
pub name: &'a str,
|
||||||
|
pub chan: &'a str,
|
||||||
|
}
|
||||||
|
#[derive(Insertable)]
|
||||||
|
#[table_name = "entries"]
|
||||||
|
pub struct NewEntry<'a> {
|
||||||
|
pub keyword_id: i32,
|
||||||
|
pub idx: i32,
|
||||||
|
pub text: &'a str,
|
||||||
|
pub creation_ts: NaiveDateTime,
|
||||||
|
pub created_by: &'a str,
|
||||||
|
}
|
18
fun/tvldb/src/schema.rs
Normal file
18
fun/tvldb/src/schema.rs
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
table! {
|
||||||
|
entries (id) {
|
||||||
|
id -> Int4,
|
||||||
|
keyword_id -> Int4,
|
||||||
|
idx -> Int4,
|
||||||
|
text -> Varchar,
|
||||||
|
creation_ts -> Timestamp,
|
||||||
|
created_by -> Varchar,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
table! {
|
||||||
|
keywords (id) {
|
||||||
|
id -> Int4,
|
||||||
|
name -> Varchar,
|
||||||
|
chan -> Varchar,
|
||||||
|
}
|
||||||
|
}
|
1
third_party/default.nix
vendored
1
third_party/default.nix
vendored
|
@ -98,6 +98,7 @@ let
|
||||||
pandoc
|
pandoc
|
||||||
parallel
|
parallel
|
||||||
pkgconfig
|
pkgconfig
|
||||||
|
postgresql
|
||||||
pounce
|
pounce
|
||||||
python3
|
python3
|
||||||
python3Packages
|
python3Packages
|
||||||
|
|
Loading…
Reference in a new issue