2022-08-03 11:56:02 +02:00
|
|
|
import re
|
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
from utils import deduplicate_list, diff
|
2023-02-13 22:51:21 +01:00
|
|
|
|
|
|
|
|
2022-09-07 13:04:16 +02:00
|
|
|
def parse_json_keys(data):
|
2022-08-03 11:56:02 +02:00
|
|
|
keys = []
|
|
|
|
for line in data.split("\n"):
|
|
|
|
line = line.strip()
|
|
|
|
if not line:
|
|
|
|
continue
|
|
|
|
match = re.match(r'^([^:]+):', line) # example: "type: this.type,"
|
|
|
|
if match:
|
|
|
|
key = match.group(1)
|
|
|
|
else:
|
|
|
|
key = line.rstrip(",") # example: "notificationIDList,"
|
|
|
|
keys.append(key)
|
|
|
|
return keys
|
|
|
|
|
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
def parse_heartbeat(root):
|
|
|
|
with open(f'{root}/server/model/heartbeat.js') as f:
|
2022-08-03 11:56:02 +02:00
|
|
|
content = f.read()
|
|
|
|
all_keys = []
|
|
|
|
match = re.search(r'toJSON\(\) {\s+return.*{([^}]+)}', content)
|
|
|
|
data = match.group(1)
|
2022-09-07 13:04:16 +02:00
|
|
|
keys = parse_json_keys(data)
|
2022-08-03 11:56:02 +02:00
|
|
|
all_keys.extend(keys)
|
|
|
|
match = re.search(r'toPublicJSON\(\) {\s+return.*{([^}]+)}', content)
|
|
|
|
data = match.group(1)
|
2022-09-07 13:04:16 +02:00
|
|
|
keys = parse_json_keys(data)
|
2022-08-03 11:56:02 +02:00
|
|
|
all_keys.extend(keys)
|
2022-09-07 13:04:16 +02:00
|
|
|
all_keys = deduplicate_list(all_keys)
|
2022-08-03 11:56:02 +02:00
|
|
|
return all_keys
|
|
|
|
|
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
def parse_incident(root):
|
|
|
|
with open(f'{root}/server/model/incident.js') as f:
|
2022-08-03 11:56:02 +02:00
|
|
|
content = f.read()
|
|
|
|
match = re.search(r'toPublicJSON\(\) {\s+return.*{([^}]+)}', content)
|
|
|
|
data = match.group(1)
|
2022-09-07 13:04:16 +02:00
|
|
|
keys = parse_json_keys(data)
|
2022-08-03 11:56:02 +02:00
|
|
|
return keys
|
|
|
|
|
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
def parse_monitor(root):
|
2022-08-03 11:56:02 +02:00
|
|
|
# todo: toPublicJSON ???
|
2023-08-29 18:37:27 +02:00
|
|
|
with open(f'{root}/server/model/monitor.js') as f:
|
2022-08-03 11:56:02 +02:00
|
|
|
content = f.read()
|
|
|
|
matches = re.findall(r'data = {([^}]+)}', content)
|
|
|
|
all_keys = []
|
|
|
|
for match in matches:
|
2022-09-07 13:04:16 +02:00
|
|
|
keys = parse_json_keys(match)
|
2022-08-03 11:56:02 +02:00
|
|
|
keys = [i for i in keys if i != "...data"]
|
|
|
|
all_keys.extend(keys)
|
2022-09-07 13:04:16 +02:00
|
|
|
all_keys = deduplicate_list(all_keys)
|
2022-08-03 11:56:02 +02:00
|
|
|
return all_keys
|
|
|
|
|
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
def parse_proxy(root):
|
|
|
|
with open(f'{root}/server/model/proxy.js') as f:
|
2022-08-03 11:56:02 +02:00
|
|
|
content = f.read()
|
|
|
|
match = re.search(r'toJSON\(\) {\s+return.*{([^}]+)}', content)
|
|
|
|
data = match.group(1)
|
2022-09-07 13:04:16 +02:00
|
|
|
keys = parse_json_keys(data)
|
2022-08-03 11:56:02 +02:00
|
|
|
return keys
|
|
|
|
|
|
|
|
|
2022-09-07 13:04:16 +02:00
|
|
|
# def parse_function(regex_name, content):
|
|
|
|
# match = re.search(regex_name, content)
|
|
|
|
# name = match.group(0)
|
|
|
|
# rest = "".join(content.split(name)[1:])
|
|
|
|
#
|
|
|
|
# brackets = 0
|
|
|
|
# opening_bracket_found = False
|
|
|
|
# code = ""
|
|
|
|
# for i in rest:
|
|
|
|
# code += i
|
|
|
|
# if i == "{":
|
|
|
|
# opening_bracket_found = True
|
|
|
|
# brackets += 1
|
|
|
|
# if i == "}":
|
|
|
|
# opening_bracket_found = True
|
|
|
|
# brackets -= 1
|
|
|
|
# if opening_bracket_found and brackets == 0:
|
|
|
|
# break
|
|
|
|
# return code
|
|
|
|
|
|
|
|
|
|
|
|
# # input (add, edit proxy)
|
|
|
|
# def parse_proxy2():
|
2023-08-29 18:37:27 +02:00
|
|
|
# with open(f'{root}/server/proxy.js') as f:
|
2022-09-07 13:04:16 +02:00
|
|
|
# content = f.read()
|
|
|
|
#
|
|
|
|
# code = parse_function(r'async save\([^)]+\) ', content)
|
|
|
|
# keys = parse_object_keys(code, "proxy")
|
|
|
|
# return keys
|
|
|
|
|
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
def parse_status_page(root):
|
|
|
|
with open(f'{root}/server/model/status_page.js') as f:
|
2022-08-03 11:56:02 +02:00
|
|
|
content = f.read()
|
|
|
|
all_keys = []
|
|
|
|
match = re.search(r'toJSON\(\) {\s+return.*{([^}]+)}', content)
|
|
|
|
data = match.group(1)
|
2022-09-07 13:04:16 +02:00
|
|
|
keys = parse_json_keys(data)
|
2022-08-03 11:56:02 +02:00
|
|
|
all_keys.extend(keys)
|
|
|
|
match = re.search(r'toPublicJSON\(\) {\s+return.*{([^}]+)}', content)
|
|
|
|
data = match.group(1)
|
2022-09-07 13:04:16 +02:00
|
|
|
keys = parse_json_keys(data)
|
2022-08-03 11:56:02 +02:00
|
|
|
all_keys.extend(keys)
|
2022-09-07 13:04:16 +02:00
|
|
|
all_keys = deduplicate_list(all_keys)
|
2022-08-03 11:56:02 +02:00
|
|
|
return all_keys
|
|
|
|
|
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
def parse_tag(root):
|
|
|
|
with open(f'{root}/server/model/tag.js') as f:
|
2022-08-03 11:56:02 +02:00
|
|
|
content = f.read()
|
|
|
|
match = re.search(r'toJSON\(\) {\s+return.*{([^}]+)}', content)
|
|
|
|
data = match.group(1)
|
2022-09-07 13:04:16 +02:00
|
|
|
keys = parse_json_keys(data)
|
2022-08-03 11:56:02 +02:00
|
|
|
return keys
|
|
|
|
|
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
if __name__ == "__main__":
|
|
|
|
root_old = "uptime-kuma-old"
|
|
|
|
root_new = "uptime-kuma"
|
|
|
|
|
|
|
|
for name, func in [
|
|
|
|
["heartbeat", parse_heartbeat],
|
|
|
|
["incident", parse_incident],
|
|
|
|
["monitor", parse_monitor],
|
|
|
|
["proxy", parse_proxy],
|
|
|
|
["status page", parse_status_page],
|
|
|
|
["tag", parse_tag],
|
|
|
|
]:
|
|
|
|
keys_old = func(root_old)
|
|
|
|
keys_new = func(root_new)
|
|
|
|
print(f"{name}:")
|
|
|
|
diff(keys_old, keys_new)
|
2022-08-03 11:56:02 +02:00
|
|
|
|
|
|
|
|
|
|
|
# TODO:
|
|
|
|
# https://github.com/louislam/uptime-kuma/blob/2adb142ae25984ecebfa4b51c739fec5e492763a/server/proxy.js#L20
|
|
|
|
# https://github.com/louislam/uptime-kuma/blob/239611a016a85712305100818d4c7b88a14664a9/server/socket-handlers/status-page-socket-handler.js#L118
|