update scripts

This commit is contained in:
lucasheld 2022-09-07 13:04:16 +02:00
parent a7f571f508
commit 384fd21726
12 changed files with 231 additions and 113 deletions

View file

@ -1,8 +1,10 @@
import re
from pprint import pprint
from utils import deduplicate_list
def parse_data_keys(data):
def parse_json_keys(data):
keys = []
for line in data.split("\n"):
line = line.strip()
@ -17,19 +19,28 @@ def parse_data_keys(data):
return keys
# def parse_object_keys(code, object_name):
# match = re.findall(object_name + r'\.[0-9a-zA-Z_$]+', code)
# keys = []
# for m in match:
# key = m.replace(object_name + ".", "")
# keys.append(key)
# return list(set(keys))
def parse_heartbeat():
with open('uptime-kuma/server/model/heartbeat.js') as f:
content = f.read()
all_keys = []
match = re.search(r'toJSON\(\) {\s+return.*{([^}]+)}', content)
data = match.group(1)
keys = parse_data_keys(data)
keys = parse_json_keys(data)
all_keys.extend(keys)
match = re.search(r'toPublicJSON\(\) {\s+return.*{([^}]+)}', content)
data = match.group(1)
keys = parse_data_keys(data)
keys = parse_json_keys(data)
all_keys.extend(keys)
all_keys = list(set(all_keys))
all_keys = deduplicate_list(all_keys)
return all_keys
@ -38,7 +49,7 @@ def parse_incident():
content = f.read()
match = re.search(r'toPublicJSON\(\) {\s+return.*{([^}]+)}', content)
data = match.group(1)
keys = parse_data_keys(data)
keys = parse_json_keys(data)
return keys
@ -49,9 +60,10 @@ def parse_monitor():
matches = re.findall(r'data = {([^}]+)}', content)
all_keys = []
for match in matches:
keys = parse_data_keys(match)
keys = parse_json_keys(match)
keys = [i for i in keys if i != "...data"]
all_keys.extend(keys)
all_keys = deduplicate_list(all_keys)
return all_keys
@ -60,23 +72,54 @@ def parse_proxy():
content = f.read()
match = re.search(r'toJSON\(\) {\s+return.*{([^}]+)}', content)
data = match.group(1)
keys = parse_data_keys(data)
keys = parse_json_keys(data)
return keys
# def parse_function(regex_name, content):
# match = re.search(regex_name, content)
# name = match.group(0)
# rest = "".join(content.split(name)[1:])
#
# brackets = 0
# opening_bracket_found = False
# code = ""
# for i in rest:
# code += i
# if i == "{":
# opening_bracket_found = True
# brackets += 1
# if i == "}":
# opening_bracket_found = True
# brackets -= 1
# if opening_bracket_found and brackets == 0:
# break
# return code
# # input (add, edit proxy)
# def parse_proxy2():
# with open('uptime-kuma/server/proxy.js') as f:
# content = f.read()
#
# code = parse_function(r'async save\([^)]+\) ', content)
# keys = parse_object_keys(code, "proxy")
# return keys
def parse_status_page():
with open('uptime-kuma/server/model/status_page.js') as f:
content = f.read()
all_keys = []
match = re.search(r'toJSON\(\) {\s+return.*{([^}]+)}', content)
data = match.group(1)
keys = parse_data_keys(data)
keys = parse_json_keys(data)
all_keys.extend(keys)
match = re.search(r'toPublicJSON\(\) {\s+return.*{([^}]+)}', content)
data = match.group(1)
keys = parse_data_keys(data)
keys = parse_json_keys(data)
all_keys.extend(keys)
all_keys = list(set(all_keys))
all_keys = deduplicate_list(all_keys)
return all_keys
@ -85,16 +128,37 @@ def parse_tag():
content = f.read()
match = re.search(r'toJSON\(\) {\s+return.*{([^}]+)}', content)
data = match.group(1)
keys = parse_data_keys(data)
keys = parse_json_keys(data)
return keys
print("heartbeat")
pprint(parse_heartbeat())
print("")
print("incident")
pprint(parse_incident())
print("")
print("monitor")
pprint(parse_monitor())
print("")
print("proxy")
pprint(parse_proxy())
print("")
# print("prox2")
# pprint(parse_proxy2())
# print("")
print("status page")
pprint(parse_status_page())
print("")
print("tag")
pprint(parse_tag())
print("")
# TODO: