chore(users/Profpatsch/sync-abfall): park

Change-Id: I9284417cb88f0eb2a0525db789069ca6507a500f
Reviewed-on: https://cl.tvl.fyi/c/depot/+/12583
Tested-by: BuildkiteCI
Reviewed-by: Profpatsch <mail@profpatsch.de>
This commit is contained in:
Profpatsch 2024-10-05 14:42:11 +02:00
parent 2b5a10a45c
commit 96a78877eb
4 changed files with 0 additions and 0 deletions

View file

@ -0,0 +1,3 @@
# sync-abfall-ics-aichach-friedberg
A small tool to sync the ICS files for the local trash collection times at https://abfallwirtschaft.lra-aic-fdb.de/

View file

@ -0,0 +1,31 @@
{ depot, pkgs, ... }:
let
sync-to-dir = depot.users.Profpatsch.writers.python3
{
name = "sync-ics-to-dir";
libraries = (py: [
py.httpx
py.icalendar
]);
} ./sync-ics-to-dir.py;
config =
depot.users.Profpatsch.importDhall.importDhall
{
root = ./..;
files = [
"sync-abfall-ics-aichach-friedberg/ics-to-caldav.dhall"
"dhall/lib.dhall"
"ini/ini.dhall"
];
main = "sync-abfall-ics-aichach-friedberg/ics-to-caldav.dhall";
deps = [
];
}
depot.users.Profpatsch.ini.externs;
in
{ inherit config; }

View file

@ -0,0 +1,139 @@
let Ini = ../ini/ini.dhall
let Lib = ../dhall/lib.dhall
in \(Ini/externs : Ini.Externs) ->
let Vdirsyncer =
let StorageType =
< FileSystem : { path : Text, fileext : < ICS > }
| Http : { url : Text }
>
let Collection = < FromA | FromB | Collection : Text >
let Collections =
< Unspecified | TheseCollections : List Collection >
let Storage = { storageName : Text, storage : StorageType }
in { Storage
, StorageType
, Collection
, Collections
, Pair =
{ pairName : Text
, a : Storage
, b : Storage
, collections : Collections
}
}
let toIniSections
: Vdirsyncer.Pair -> Ini.Sections
= \(pair : Vdirsyncer.Pair) ->
let
-- we assume the names are [a-zA-Z_]
renderList =
\(l : List Text) ->
"["
++ Lib.Text/concatMapSep
", "
Text
(\(t : Text) -> "\"${t}\"")
l
++ "]"
in let nv = \(name : Text) -> \(value : Text) -> { name, value }
let mkStorage =
\(storage : Vdirsyncer.Storage) ->
{ name = "storage ${storage.storageName}"
, value =
merge
{ FileSystem =
\ ( fs
: { path : Text, fileext : < ICS > }
) ->
[ nv "type" "filesystem"
, nv
"fileext"
(merge { ICS = ".ics" } fs.fileext)
, nv "path" fs.path
]
, Http =
\(http : { url : Text }) ->
[ nv "type" "http", nv "url" http.url ]
}
storage.storage
}
in [ { name = "pair ${pair.pairName}"
, value =
[ nv "a" pair.a.storageName
, nv "b" pair.b.storageName
, nv
"collections"
( merge
{ Unspecified = "none"
, TheseCollections =
\(colls : List Vdirsyncer.Collection) ->
renderList
( Lib.List/map
Vdirsyncer.Collection
Text
( \ ( coll
: Vdirsyncer.Collection
) ->
merge
{ FromA = "from a"
, FromB = "from b"
, Collection =
\(t : Text) -> t
}
coll
)
colls
)
}
pair.collections
)
]
}
, mkStorage pair.a
, mkStorage pair.b
]
in { example =
Ini/externs.renderIni
( Ini.appendInis
( Lib.List/map
Vdirsyncer.Pair
Ini.Ini
( \(pair : Vdirsyncer.Pair) ->
{ globalSection = [] : Ini.Section
, sections = toIniSections pair
}
)
( [ { pairName = "testPair"
, a =
{ storageName = "mystor"
, storage =
Vdirsyncer.StorageType.FileSystem
{ path = "./test-ics"
, fileext = < ICS >.ICS
}
}
, b =
{ storageName = "mystor"
, storage =
Vdirsyncer.StorageType.Http
{ url = "https://profpatsch.de" }
}
, collections = Vdirsyncer.Collections.Unspecified
}
]
: List Vdirsyncer.Pair
)
)
)
}

View file

@ -0,0 +1,133 @@
# horrible little module that fetches ICS files for the local trash public service.
#
# It tries its best to not overwrite existing ICS files in case the upstream goes down
# or returns empty ICS files.
import sys
import httpx
import asyncio
import icalendar
from datetime import datetime
import syslog
import os.path
# Internal id for the street (extracted from the ics download url)
ortsteil_id = "e9c32ab3-df25-4660-b88e-abda91897d7a"
# They are using a numeric encoding to refer to different kinds of trash
fraktionen = {
"restmüll": "1",
"bio": "5",
"papier": "7",
"gelbe_tonne": "13",
"problemmüllsammlung": "20"
}
def ics_url(year):
frakt = ','.join(fraktionen.values())
return f'https://awido.cubefour.de/Customer/aic-fdb/KalenderICS.aspx?oid={ortsteil_id}&jahr={year}&fraktionen={frakt}&reminder=1.12:00'
def fetchers_for_years(start_year, no_of_years_in_future):
"""given a starting year, and a number of years in the future,
return the years for which to fetch ics files"""
current_year = datetime.now().year
max_year = current_year + no_of_years_in_future
return {
"passed_years": range(start_year, current_year),
"this_and_future_years": range(current_year, 1 + max_year)
}
async def fetch_ics(c, url):
"""fetch an ICS file from an URL"""
try:
resp = await c.get(url)
except Exception as e:
return { "ics_does_not_exist_exc": e }
if resp.is_error:
return { "ics_does_not_exist": resp }
else:
try:
ics = icalendar.Calendar.from_ical(resp.content)
return { "ics": { "ics_parsed": ics, "ics_bytes": resp.content } }
except ValueError as e:
return { "ics_cannot_be_parsed": e }
def ics_has_events(ics):
"""Determine if there is any event in the ICS, otherwise we can assume its an empty file"""
for item in ics.walk():
if isinstance(item, icalendar.Event):
return True
return False
async def write_nonempty_ics(directory, year, ics):
# only overwrite if the new ics has any events
if ics_has_events(ics['ics_parsed']):
path = os.path.join(directory, f"{year}.ics")
with open(path, "wb") as f:
f.write(ics['ics_bytes'])
info(f"wrote ics for year {year} to file {path}")
else:
info(f"ics for year {year} was empty, skipping")
def main():
ics_directory = os.getenv("ICS_DIRECTORY", None)
if not ics_directory:
critical("please set ICS_DIRECTORY")
start_year = int(os.getenv("ICS_START_YEAR", 2022))
future_years = int(os.getenv("ICS_FUTURE_YEARS", 2))
years = fetchers_for_years(start_year, no_of_years_in_future=future_years)
async def go():
async with httpx.AsyncClient(follow_redirects=True) as c:
info(f"fetching ics for passed years: {years['passed_years']}")
for year in years["passed_years"]:
match await fetch_ics(c, ics_url(year)):
case { "ics_does_not_exist_exc": error }:
warn(f"The ics for the year {year} is gone, error when requesting: {error} for url {ics_url(year)}")
case { "ics_does_not_exist": resp }:
warn(f"The ics for the year {year} is gone, server returned status {resp.status} for url {ics_url(year)}")
case { "ics_cannot_be_parsed": error }:
warn(f"The returned ICS could not be parsed: {error} for url {ics_url(year)}")
case { "ics": ics }:
info(f"fetched ics from {ics_url(year)}")
await write_nonempty_ics(ics_directory, year, ics)
case _:
critical("unknown case for ics result")
info(f"fetching ics for current and upcoming years: {years['this_and_future_years']}")
for year in years["this_and_future_years"]:
match await fetch_ics(c, ics_url(year)):
case { "ics_does_not_exist_exc": error }:
critical(f"The ics for the year {year} is not available, error when requesting: {error} for url {ics_url(year)}")
case { "ics_does_not_exist": resp }:
critical(f"The ics for the year {year} is not available, server returned status {resp.status} for url {ics_url(year)}")
case { "ics_cannot_be_parsed": error }:
critical(f"The returned ICS could not be parsed: {error} for url {ics_url(year)}")
case { "ics": ics }:
info(f"fetched ics from {ics_url(year)}")
await write_nonempty_ics(ics_directory, year, ics)
case _:
critical("unknown case for ics result")
asyncio.run(go())
def info(msg):
syslog.syslog(syslog.LOG_INFO, msg)
def critical(msg):
syslog.syslog(syslog.LOG_CRIT, msg)
sys.exit(1)
def warn(msg):
syslog.syslog(syslog.LOG_WARNING, msg)
def debug(msg):
syslog.syslog(syslog.LOG_DEBUG, msg)
if __name__ == "__main__":
main()