uptime-kuma-api/scripts/build_models.py

170 lines
4.3 KiB
Python
Raw Permalink Normal View History

2022-08-03 11:56:02 +02:00
import re
from pprint import pprint
2022-09-07 13:04:16 +02:00
from utils import deduplicate_list
2022-08-03 11:56:02 +02:00
2022-09-07 13:04:16 +02:00
ROOT = "uptime-kuma"
2022-09-07 13:04:16 +02:00
def parse_json_keys(data):
2022-08-03 11:56:02 +02:00
keys = []
for line in data.split("\n"):
line = line.strip()
if not line:
continue
match = re.match(r'^([^:]+):', line) # example: "type: this.type,"
if match:
key = match.group(1)
else:
key = line.rstrip(",") # example: "notificationIDList,"
keys.append(key)
return keys
2022-09-07 13:04:16 +02:00
# def parse_object_keys(code, object_name):
# match = re.findall(object_name + r'\.[0-9a-zA-Z_$]+', code)
# keys = []
# for m in match:
# key = m.replace(object_name + ".", "")
# keys.append(key)
# return list(set(keys))
2022-08-03 11:56:02 +02:00
def parse_heartbeat():
with open(f'{ROOT}/server/model/heartbeat.js') as f:
2022-08-03 11:56:02 +02:00
content = f.read()
all_keys = []
match = re.search(r'toJSON\(\) {\s+return.*{([^}]+)}', content)
data = match.group(1)
2022-09-07 13:04:16 +02:00
keys = parse_json_keys(data)
2022-08-03 11:56:02 +02:00
all_keys.extend(keys)
match = re.search(r'toPublicJSON\(\) {\s+return.*{([^}]+)}', content)
data = match.group(1)
2022-09-07 13:04:16 +02:00
keys = parse_json_keys(data)
2022-08-03 11:56:02 +02:00
all_keys.extend(keys)
2022-09-07 13:04:16 +02:00
all_keys = deduplicate_list(all_keys)
2022-08-03 11:56:02 +02:00
return all_keys
def parse_incident():
with open(f'{ROOT}/server/model/incident.js') as f:
2022-08-03 11:56:02 +02:00
content = f.read()
match = re.search(r'toPublicJSON\(\) {\s+return.*{([^}]+)}', content)
data = match.group(1)
2022-09-07 13:04:16 +02:00
keys = parse_json_keys(data)
2022-08-03 11:56:02 +02:00
return keys
def parse_monitor():
# todo: toPublicJSON ???
with open(f'{ROOT}/server/model/monitor.js') as f:
2022-08-03 11:56:02 +02:00
content = f.read()
matches = re.findall(r'data = {([^}]+)}', content)
all_keys = []
for match in matches:
2022-09-07 13:04:16 +02:00
keys = parse_json_keys(match)
2022-08-03 11:56:02 +02:00
keys = [i for i in keys if i != "...data"]
all_keys.extend(keys)
2022-09-07 13:04:16 +02:00
all_keys = deduplicate_list(all_keys)
2022-08-03 11:56:02 +02:00
return all_keys
def parse_proxy():
with open(f'{ROOT}/server/model/proxy.js') as f:
2022-08-03 11:56:02 +02:00
content = f.read()
match = re.search(r'toJSON\(\) {\s+return.*{([^}]+)}', content)
data = match.group(1)
2022-09-07 13:04:16 +02:00
keys = parse_json_keys(data)
2022-08-03 11:56:02 +02:00
return keys
2022-09-07 13:04:16 +02:00
# def parse_function(regex_name, content):
# match = re.search(regex_name, content)
# name = match.group(0)
# rest = "".join(content.split(name)[1:])
#
# brackets = 0
# opening_bracket_found = False
# code = ""
# for i in rest:
# code += i
# if i == "{":
# opening_bracket_found = True
# brackets += 1
# if i == "}":
# opening_bracket_found = True
# brackets -= 1
# if opening_bracket_found and brackets == 0:
# break
# return code
# # input (add, edit proxy)
# def parse_proxy2():
# with open(f'{ROOT}/server/proxy.js') as f:
2022-09-07 13:04:16 +02:00
# content = f.read()
#
# code = parse_function(r'async save\([^)]+\) ', content)
# keys = parse_object_keys(code, "proxy")
# return keys
2022-08-03 11:56:02 +02:00
def parse_status_page():
with open(f'{ROOT}/server/model/status_page.js') as f:
2022-08-03 11:56:02 +02:00
content = f.read()
all_keys = []
match = re.search(r'toJSON\(\) {\s+return.*{([^}]+)}', content)
data = match.group(1)
2022-09-07 13:04:16 +02:00
keys = parse_json_keys(data)
2022-08-03 11:56:02 +02:00
all_keys.extend(keys)
match = re.search(r'toPublicJSON\(\) {\s+return.*{([^}]+)}', content)
data = match.group(1)
2022-09-07 13:04:16 +02:00
keys = parse_json_keys(data)
2022-08-03 11:56:02 +02:00
all_keys.extend(keys)
2022-09-07 13:04:16 +02:00
all_keys = deduplicate_list(all_keys)
2022-08-03 11:56:02 +02:00
return all_keys
def parse_tag():
with open(f'{ROOT}/server/model/tag.js') as f:
2022-08-03 11:56:02 +02:00
content = f.read()
match = re.search(r'toJSON\(\) {\s+return.*{([^}]+)}', content)
data = match.group(1)
2022-09-07 13:04:16 +02:00
keys = parse_json_keys(data)
2022-08-03 11:56:02 +02:00
return keys
2022-09-07 13:04:16 +02:00
print("heartbeat")
2022-08-03 11:56:02 +02:00
pprint(parse_heartbeat())
2022-09-07 13:04:16 +02:00
print("")
print("incident")
2022-08-03 11:56:02 +02:00
pprint(parse_incident())
2022-09-07 13:04:16 +02:00
print("")
print("monitor")
2022-08-03 11:56:02 +02:00
pprint(parse_monitor())
2022-09-07 13:04:16 +02:00
print("")
print("proxy")
2022-08-03 11:56:02 +02:00
pprint(parse_proxy())
2022-09-07 13:04:16 +02:00
print("")
# print("prox2")
# pprint(parse_proxy2())
# print("")
print("status page")
2022-08-03 11:56:02 +02:00
pprint(parse_status_page())
2022-09-07 13:04:16 +02:00
print("")
print("tag")
2022-08-03 11:56:02 +02:00
pprint(parse_tag())
2022-09-07 13:04:16 +02:00
print("")
2022-08-03 11:56:02 +02:00
# TODO:
# https://github.com/louislam/uptime-kuma/blob/2adb142ae25984ecebfa4b51c739fec5e492763a/server/proxy.js#L20
# https://github.com/louislam/uptime-kuma/blob/239611a016a85712305100818d4c7b88a14664a9/server/socket-handlers/status-page-socket-handler.js#L118