2023-05-01 18:57:55 +02:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2022-12-29 00:22:53 +01:00
|
|
|
import datetime
|
2022-07-05 22:12:37 +02:00
|
|
|
import json
|
2022-09-12 20:47:45 +02:00
|
|
|
import random
|
2022-10-04 18:38:17 +02:00
|
|
|
import string
|
|
|
|
import time
|
|
|
|
from contextlib import contextmanager
|
|
|
|
from copy import deepcopy
|
2022-12-29 01:17:45 +01:00
|
|
|
from typing import Any
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-08-26 14:04:43 +02:00
|
|
|
import requests
|
2022-07-02 16:00:54 +02:00
|
|
|
import socketio
|
2023-07-07 22:28:20 +02:00
|
|
|
from packaging.version import parse as parse_version
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-25 18:37:49 +02:00
|
|
|
from . import (
|
|
|
|
AuthMethod,
|
|
|
|
DockerType,
|
|
|
|
Event,
|
|
|
|
IncidentStyle,
|
|
|
|
MaintenanceStrategy,
|
|
|
|
MonitorStatus,
|
|
|
|
MonitorType,
|
|
|
|
NotificationType,
|
|
|
|
ProxyProtocol,
|
|
|
|
Timeout,
|
|
|
|
UptimeKumaException,
|
|
|
|
notification_provider_conditions,
|
|
|
|
notification_provider_options
|
|
|
|
)
|
2023-05-01 18:57:55 +02:00
|
|
|
|
2023-05-25 18:37:49 +02:00
|
|
|
from .docstrings import (
|
|
|
|
append_docstring,
|
|
|
|
docker_host_docstring,
|
|
|
|
maintenance_docstring,
|
|
|
|
monitor_docstring,
|
|
|
|
notification_docstring,
|
|
|
|
proxy_docstring,
|
|
|
|
tag_docstring
|
|
|
|
)
|
2022-08-03 11:56:02 +02:00
|
|
|
|
2023-05-06 13:36:26 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def int_to_bool(data, keys) -> None:
|
2023-05-01 18:57:55 +02:00
|
|
|
if isinstance(data, list):
|
2022-07-05 22:12:37 +02:00
|
|
|
for d in data:
|
|
|
|
int_to_bool(d, keys)
|
|
|
|
else:
|
|
|
|
for key in keys:
|
|
|
|
if key in data:
|
|
|
|
data[key] = True if data[key] == 1 else False
|
|
|
|
|
|
|
|
|
2023-05-25 23:51:09 +02:00
|
|
|
def parse_value(data, key, type_, default=None) -> None:
|
2023-05-25 21:26:54 +02:00
|
|
|
if not data:
|
|
|
|
return
|
2023-05-02 20:34:26 +02:00
|
|
|
if isinstance(data, list):
|
|
|
|
for d in data:
|
2023-05-25 23:51:09 +02:00
|
|
|
parse_value(d, key, type_, default)
|
2023-05-02 20:34:26 +02:00
|
|
|
else:
|
2023-05-25 21:26:54 +02:00
|
|
|
if key in data:
|
2023-05-25 23:51:09 +02:00
|
|
|
if data[key] is not None:
|
2023-07-07 22:28:20 +02:00
|
|
|
try:
|
|
|
|
data[key] = type_(data[key])
|
|
|
|
except ValueError:
|
|
|
|
# todo: add warning to logs
|
|
|
|
pass
|
2023-05-25 23:51:09 +02:00
|
|
|
elif default is not None:
|
|
|
|
data[key] = default
|
2023-05-02 20:34:26 +02:00
|
|
|
|
|
|
|
|
2023-05-25 21:26:54 +02:00
|
|
|
# monitor
|
2023-05-02 20:34:26 +02:00
|
|
|
def parse_monitor_status(data) -> None:
|
2023-05-25 21:26:54 +02:00
|
|
|
parse_value(data, "status", MonitorStatus)
|
|
|
|
|
|
|
|
|
|
|
|
def parse_monitor_type(data) -> None:
|
|
|
|
parse_value(data, "type", MonitorType)
|
|
|
|
|
|
|
|
|
|
|
|
def parse_auth_method(data) -> None:
|
2023-05-25 23:51:09 +02:00
|
|
|
parse_value(data, "authMethod", AuthMethod, AuthMethod.NONE)
|
2023-05-25 21:26:54 +02:00
|
|
|
|
|
|
|
|
|
|
|
# notification
|
|
|
|
def parse_notification_type(data) -> None:
|
|
|
|
parse_value(data, "type", NotificationType)
|
|
|
|
|
|
|
|
|
|
|
|
# docker host
|
|
|
|
def parse_docker_type(data) -> None:
|
|
|
|
parse_value(data, "dockerType", DockerType)
|
|
|
|
|
|
|
|
|
|
|
|
# status page
|
|
|
|
def parse_incident_style(data) -> None:
|
|
|
|
parse_value(data, "style", IncidentStyle)
|
|
|
|
|
|
|
|
|
|
|
|
# maintenance
|
|
|
|
def parse_maintenance_strategy(data) -> None:
|
|
|
|
parse_value(data, "strategy", MaintenanceStrategy)
|
|
|
|
|
|
|
|
|
|
|
|
# proxy
|
|
|
|
def parse_proxy_protocol(data) -> None:
|
|
|
|
parse_value(data, "protocol", ProxyProtocol)
|
2023-05-02 20:34:26 +02:00
|
|
|
|
|
|
|
|
2022-09-12 20:47:45 +02:00
|
|
|
def gen_secret(length: int) -> str:
|
|
|
|
chars = string.ascii_uppercase + string.ascii_lowercase + string.digits
|
|
|
|
return ''.join(random.choice(chars) for _ in range(length))
|
|
|
|
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _convert_monitor_return(monitor) -> None:
|
2023-05-01 18:57:55 +02:00
|
|
|
if isinstance(monitor["notificationIDList"], dict):
|
2022-09-18 14:49:10 +02:00
|
|
|
monitor["notificationIDList"] = [int(i) for i in monitor["notificationIDList"].keys()]
|
2022-09-12 22:45:43 +02:00
|
|
|
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _convert_monitor_input(kwargs) -> None:
|
2022-09-06 13:41:51 +02:00
|
|
|
if not kwargs["accepted_statuscodes"]:
|
|
|
|
kwargs["accepted_statuscodes"] = ["200-299"]
|
|
|
|
|
|
|
|
dict_notification_ids = {}
|
|
|
|
if kwargs["notificationIDList"]:
|
|
|
|
for notification_id in kwargs["notificationIDList"]:
|
|
|
|
dict_notification_ids[notification_id] = True
|
|
|
|
kwargs["notificationIDList"] = dict_notification_ids
|
2022-07-05 22:12:37 +02:00
|
|
|
|
2022-09-07 13:03:10 +02:00
|
|
|
if not kwargs["databaseConnectionString"]:
|
|
|
|
if kwargs["type"] == MonitorType.SQLSERVER:
|
|
|
|
kwargs["databaseConnectionString"] = "Server=<hostname>,<port>;Database=<your database>;User Id=<your user id>;Password=<your password>;Encrypt=<true/false>;TrustServerCertificate=<Yes/No>;Connection Timeout=<int>"
|
|
|
|
elif kwargs["type"] == MonitorType.POSTGRES:
|
|
|
|
kwargs["databaseConnectionString"] = "postgres://username:password@host:port/database"
|
2023-02-13 22:51:21 +01:00
|
|
|
elif kwargs["type"] == MonitorType.MYSQL:
|
|
|
|
kwargs["databaseConnectionString"] = "mysql://username:password@host:port/database"
|
|
|
|
elif kwargs["type"] == MonitorType.REDIS:
|
|
|
|
kwargs["databaseConnectionString"] = "redis://user:password@host:port"
|
2023-07-07 22:28:20 +02:00
|
|
|
elif kwargs["type"] == MonitorType.MONGODB:
|
|
|
|
kwargs["databaseConnectionString"] = "mongodb://username:password@host:port/database"
|
2022-09-12 20:47:45 +02:00
|
|
|
|
|
|
|
if kwargs["type"] == MonitorType.PUSH and not kwargs.get("pushToken"):
|
|
|
|
kwargs["pushToken"] = gen_secret(10)
|
2022-07-05 22:12:37 +02:00
|
|
|
|
|
|
|
|
2022-08-02 11:58:49 +02:00
|
|
|
def _build_notification_data(
|
2022-12-29 00:22:53 +01:00
|
|
|
name: str,
|
|
|
|
type: NotificationType,
|
|
|
|
isDefault: bool = False,
|
|
|
|
applyExisting: bool = False,
|
|
|
|
**kwargs
|
2022-12-16 21:39:18 +01:00
|
|
|
) -> dict:
|
2022-09-17 12:24:08 +02:00
|
|
|
allowed_kwargs = []
|
|
|
|
for keys in notification_provider_options.values():
|
|
|
|
allowed_kwargs.extend(keys)
|
|
|
|
|
|
|
|
for key in kwargs.keys():
|
|
|
|
if key not in allowed_kwargs:
|
|
|
|
raise TypeError(f"unknown argument '{key}'")
|
|
|
|
|
2022-07-07 16:08:19 +02:00
|
|
|
data = {
|
2022-07-05 22:12:37 +02:00
|
|
|
"name": name,
|
2022-08-03 11:56:02 +02:00
|
|
|
"type": type,
|
|
|
|
"isDefault": isDefault,
|
|
|
|
"applyExisting": applyExisting,
|
2022-07-05 22:12:37 +02:00
|
|
|
**kwargs
|
|
|
|
}
|
2022-07-07 16:08:19 +02:00
|
|
|
return data
|
2022-07-05 22:12:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def _build_proxy_data(
|
2022-12-29 00:22:53 +01:00
|
|
|
protocol: ProxyProtocol,
|
|
|
|
host: str,
|
|
|
|
port: str,
|
|
|
|
auth: bool = False,
|
|
|
|
username: str = None,
|
|
|
|
password: str = None,
|
|
|
|
active: bool = True,
|
|
|
|
default: bool = False,
|
|
|
|
applyExisting: bool = False,
|
2022-12-16 21:39:18 +01:00
|
|
|
) -> dict:
|
2022-07-07 16:08:19 +02:00
|
|
|
data = {
|
2022-07-05 22:12:37 +02:00
|
|
|
"protocol": protocol,
|
|
|
|
"host": host,
|
|
|
|
"port": port,
|
|
|
|
"auth": auth,
|
|
|
|
"username": username,
|
|
|
|
"password": password,
|
|
|
|
"active": active,
|
|
|
|
"default": default,
|
2022-08-03 11:56:02 +02:00
|
|
|
"applyExisting": applyExisting
|
2022-07-05 22:12:37 +02:00
|
|
|
}
|
2022-07-07 16:08:19 +02:00
|
|
|
return data
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _convert_docker_host_input(kwargs) -> None:
|
2022-09-07 13:03:10 +02:00
|
|
|
if not kwargs["dockerDaemon"]:
|
|
|
|
if kwargs["dockerType"] == DockerType.SOCKET:
|
|
|
|
kwargs["dockerDaemon"] = "/var/run/docker.sock"
|
|
|
|
elif kwargs["dockerType"] == DockerType.TCP:
|
|
|
|
kwargs["dockerDaemon"] = "tcp://localhost:2375"
|
|
|
|
|
|
|
|
|
|
|
|
def _build_docker_host_data(
|
2022-12-29 00:22:53 +01:00
|
|
|
name: str,
|
|
|
|
dockerType: DockerType,
|
|
|
|
dockerDaemon: str = None
|
2022-12-16 21:39:18 +01:00
|
|
|
) -> dict:
|
2022-09-07 13:03:10 +02:00
|
|
|
data = {
|
|
|
|
"name": name,
|
|
|
|
"dockerType": dockerType,
|
|
|
|
"dockerDaemon": dockerDaemon
|
|
|
|
}
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
def _build_tag_data(
|
|
|
|
name: str,
|
|
|
|
color: str
|
|
|
|
) -> dict:
|
|
|
|
data = {
|
|
|
|
"new": True,
|
|
|
|
"name": name,
|
|
|
|
"color": color
|
|
|
|
}
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _check_missing_arguments(required_params, kwargs) -> None:
|
2022-07-09 22:15:41 +02:00
|
|
|
missing_arguments = []
|
|
|
|
for required_param in required_params:
|
2022-08-03 11:56:02 +02:00
|
|
|
if kwargs.get(required_param) is None:
|
2022-07-09 22:15:41 +02:00
|
|
|
missing_arguments.append(required_param)
|
|
|
|
if missing_arguments:
|
|
|
|
missing_arguments_str = ", ".join([f"'{i}'" for i in missing_arguments])
|
|
|
|
raise TypeError(f"missing {len(missing_arguments)} required argument: {missing_arguments_str}")
|
|
|
|
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _check_argument_conditions(valid_params, kwargs) -> None:
|
2022-07-10 18:07:11 +02:00
|
|
|
for valid_param in valid_params:
|
2022-08-03 11:56:02 +02:00
|
|
|
if valid_param in kwargs:
|
|
|
|
value = kwargs[valid_param]
|
2022-12-29 00:17:57 +01:00
|
|
|
if value is None:
|
|
|
|
continue
|
2022-07-10 18:07:11 +02:00
|
|
|
conditions = valid_params[valid_param]
|
|
|
|
min_ = conditions.get("min")
|
|
|
|
max_ = conditions.get("max")
|
|
|
|
if min_ is not None and value < min_:
|
|
|
|
raise ValueError(f"the value of {valid_param} must not be less than {min_}")
|
|
|
|
if max_ is not None and value > max_:
|
|
|
|
raise ValueError(f"the value of {valid_param} must not be larger than {max_}")
|
|
|
|
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _check_arguments_monitor(kwargs) -> None:
|
2022-07-10 18:07:11 +02:00
|
|
|
required_args = [
|
2022-08-03 11:56:02 +02:00
|
|
|
"type",
|
2022-07-09 22:15:41 +02:00
|
|
|
"name",
|
2022-08-03 11:56:02 +02:00
|
|
|
"interval",
|
|
|
|
"maxretries",
|
|
|
|
"retryInterval"
|
2022-07-09 22:15:41 +02:00
|
|
|
]
|
2022-08-03 11:56:02 +02:00
|
|
|
_check_missing_arguments(required_args, kwargs)
|
2022-07-09 22:15:41 +02:00
|
|
|
|
2022-07-10 18:07:11 +02:00
|
|
|
required_args_by_type = {
|
2022-08-03 11:56:02 +02:00
|
|
|
MonitorType.HTTP: ["url", "maxredirects"],
|
2022-07-09 22:15:41 +02:00
|
|
|
MonitorType.PORT: ["hostname", "port"],
|
|
|
|
MonitorType.PING: ["hostname"],
|
2022-08-03 11:56:02 +02:00
|
|
|
MonitorType.KEYWORD: ["url", "keyword", "maxredirects"],
|
2022-12-29 00:22:53 +01:00
|
|
|
MonitorType.GRPC_KEYWORD: ["grpcUrl", "keyword", "grpcServiceName", "grpcMethod"],
|
2022-07-09 22:15:41 +02:00
|
|
|
MonitorType.DNS: ["hostname", "dns_resolve_server", "port"],
|
2022-12-29 00:22:53 +01:00
|
|
|
MonitorType.DOCKER: ["docker_container", "docker_host"],
|
2022-07-09 22:15:41 +02:00
|
|
|
MonitorType.PUSH: [],
|
|
|
|
MonitorType.STEAM: ["hostname", "port"],
|
2023-02-13 22:51:21 +01:00
|
|
|
MonitorType.GAMEDIG: ["game", "hostname", "port"],
|
2022-08-03 11:56:02 +02:00
|
|
|
MonitorType.MQTT: ["hostname", "port", "mqttTopic"],
|
2022-07-09 22:15:41 +02:00
|
|
|
MonitorType.SQLSERVER: [],
|
2022-09-07 13:03:10 +02:00
|
|
|
MonitorType.POSTGRES: [],
|
2022-12-29 00:22:53 +01:00
|
|
|
MonitorType.MYSQL: [],
|
2023-02-13 22:51:21 +01:00
|
|
|
MonitorType.MONGODB: [],
|
|
|
|
MonitorType.RADIUS: ["radiusUsername", "radiusPassword", "radiusSecret", "radiusCalledStationId", "radiusCallingStationId"],
|
2023-07-07 22:28:20 +02:00
|
|
|
MonitorType.REDIS: [],
|
2023-08-29 18:37:27 +02:00
|
|
|
MonitorType.GROUP: [],
|
|
|
|
MonitorType.JSON_QUERY: ["url", "jsonPath", "expectedValue"],
|
|
|
|
MonitorType.REAL_BROWSER: ["url"],
|
|
|
|
MonitorType.KAFKA_PRODUCER: ["kafkaProducerTopic", "kafkaProducerMessage"],
|
|
|
|
MonitorType.TAILSCALE_PING: ["hostname"],
|
2022-07-09 22:15:41 +02:00
|
|
|
}
|
2022-08-03 11:56:02 +02:00
|
|
|
type_ = kwargs["type"]
|
2022-07-10 18:07:11 +02:00
|
|
|
required_args = required_args_by_type[type_]
|
2022-08-03 11:56:02 +02:00
|
|
|
_check_missing_arguments(required_args, kwargs)
|
2022-07-10 18:07:11 +02:00
|
|
|
|
2022-11-05 18:48:31 +01:00
|
|
|
conditions = dict(
|
|
|
|
interval=dict(
|
|
|
|
min=20,
|
|
|
|
),
|
|
|
|
maxretries=dict(
|
|
|
|
min=0,
|
|
|
|
),
|
|
|
|
retryInterval=dict(
|
|
|
|
min=20,
|
|
|
|
),
|
|
|
|
maxredirects=dict(
|
|
|
|
min=0,
|
|
|
|
),
|
|
|
|
port=dict(
|
|
|
|
min=0,
|
|
|
|
max=65535,
|
|
|
|
),
|
|
|
|
)
|
2022-08-03 11:56:02 +02:00
|
|
|
_check_argument_conditions(conditions, kwargs)
|
2022-07-10 18:07:11 +02:00
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
allowed_accepted_statuscodes = [
|
|
|
|
"100-199",
|
|
|
|
"200-299",
|
|
|
|
"300-399",
|
|
|
|
"400-499",
|
|
|
|
"500-599",
|
|
|
|
] + [
|
|
|
|
str(i) for i in range(100, 999 + 1)
|
|
|
|
]
|
|
|
|
accepted_statuscodes = kwargs["accepted_statuscodes"]
|
|
|
|
for accepted_statuscode in accepted_statuscodes:
|
|
|
|
if accepted_statuscode not in allowed_accepted_statuscodes:
|
|
|
|
raise ValueError(f"Unknown accepted_statuscodes value: {allowed_accepted_statuscodes}")
|
|
|
|
|
|
|
|
dns_resolve_type = kwargs["dns_resolve_type"]
|
|
|
|
if dns_resolve_type not in [
|
|
|
|
"A",
|
|
|
|
"AAAA",
|
|
|
|
"CAA",
|
|
|
|
"CNAME",
|
|
|
|
"MX",
|
|
|
|
"NS",
|
|
|
|
"PTR",
|
|
|
|
"SOA",
|
|
|
|
"SRV",
|
|
|
|
"TXT",
|
|
|
|
]:
|
|
|
|
raise ValueError(f"Unknown dns_resolve_type value: {dns_resolve_type}")
|
|
|
|
|
|
|
|
if type_ == MonitorType.KAFKA_PRODUCER:
|
|
|
|
kafkaProducerSaslOptions_mechanism = kwargs["kafkaProducerSaslOptions"]["mechanism"]
|
|
|
|
if kafkaProducerSaslOptions_mechanism not in [
|
|
|
|
"None",
|
|
|
|
"plain",
|
|
|
|
"scram-sha-256",
|
|
|
|
"scram-sha-512",
|
|
|
|
"aws",
|
|
|
|
]:
|
|
|
|
raise ValueError(f"Unknown kafkaProducerSaslOptions[\"mechanism\"] value: {kafkaProducerSaslOptions_mechanism}")
|
2023-08-12 18:20:59 +02:00
|
|
|
|
2022-07-10 18:07:11 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _check_arguments_notification(kwargs) -> None:
|
2022-08-03 11:56:02 +02:00
|
|
|
required_args = ["type", "name"]
|
|
|
|
_check_missing_arguments(required_args, kwargs)
|
2022-07-10 18:07:11 +02:00
|
|
|
|
2023-05-20 13:10:12 +02:00
|
|
|
type_ = kwargs["type"]
|
2023-05-20 22:06:05 +02:00
|
|
|
required_args = [i for i, j in notification_provider_options[type_].items() if j["required"]]
|
2023-05-20 13:10:12 +02:00
|
|
|
_check_missing_arguments(required_args, kwargs)
|
2022-09-07 13:03:10 +02:00
|
|
|
_check_argument_conditions(notification_provider_conditions, kwargs)
|
2022-07-09 22:15:41 +02:00
|
|
|
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _check_arguments_proxy(kwargs) -> None:
|
2022-07-10 18:07:11 +02:00
|
|
|
required_args = ["protocol", "host", "port"]
|
2022-08-02 23:47:56 +02:00
|
|
|
if kwargs.get("auth"):
|
2022-07-10 18:07:11 +02:00
|
|
|
required_args.extend(["username", "password"])
|
2022-08-03 11:56:02 +02:00
|
|
|
_check_missing_arguments(required_args, kwargs)
|
2022-07-10 18:07:11 +02:00
|
|
|
|
2022-11-05 18:48:31 +01:00
|
|
|
conditions = dict(
|
|
|
|
port=dict(
|
|
|
|
min=0,
|
|
|
|
max=65535,
|
|
|
|
)
|
|
|
|
)
|
2022-08-03 11:56:02 +02:00
|
|
|
_check_argument_conditions(conditions, kwargs)
|
2022-07-09 22:15:41 +02:00
|
|
|
|
|
|
|
|
2022-12-29 00:22:53 +01:00
|
|
|
def _check_arguments_maintenance(kwargs) -> None:
|
|
|
|
required_args = ["title", "strategy"]
|
|
|
|
_check_missing_arguments(required_args, kwargs)
|
|
|
|
|
|
|
|
strategy = kwargs["strategy"]
|
|
|
|
if strategy in [MaintenanceStrategy.RECURRING_INTERVAL, MaintenanceStrategy.RECURRING_WEEKDAY, MaintenanceStrategy.RECURRING_DAY_OF_MONTH]:
|
|
|
|
required_args = ["dateRange"]
|
|
|
|
_check_missing_arguments(required_args, kwargs)
|
|
|
|
|
|
|
|
conditions = dict(
|
|
|
|
intervalDay=dict(
|
|
|
|
min=1,
|
|
|
|
max=3650,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
_check_argument_conditions(conditions, kwargs)
|
|
|
|
|
2023-01-17 21:01:55 +01:00
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
def _check_arguments_tag(kwargs) -> None:
|
|
|
|
required_args = [
|
|
|
|
"name",
|
|
|
|
"color"
|
|
|
|
]
|
|
|
|
_check_missing_arguments(required_args, kwargs)
|
|
|
|
|
|
|
|
|
2022-07-02 16:00:54 +02:00
|
|
|
class UptimeKumaApi(object):
|
2022-12-16 21:39:18 +01:00
|
|
|
"""This class is used to communicate with Uptime Kuma.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
Import UptimeKumaApi from the library and specify the Uptime Kuma server url (e.g. 'http://127.0.0.1:3001'), username and password to initialize the connection.
|
|
|
|
|
|
|
|
>>> from uptime_kuma_api import UptimeKumaApi
|
|
|
|
>>> api = UptimeKumaApi('INSERT_URL')
|
|
|
|
>>> api.login('INSERT_USERNAME', 'INSERT_PASSWORD')
|
2023-05-01 18:40:14 +02:00
|
|
|
{
|
|
|
|
'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImFkbWluIiwiaWF0IjoxNjgyOTU4OTU4fQ.Xb81nuKXeNyE1D_XoQowYgsgZHka-edONdwHmIznJdk'
|
|
|
|
}
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Now you can call one of the existing methods of the instance. For example create a new monitor:
|
|
|
|
|
|
|
|
>>> api.add_monitor(
|
|
|
|
... type=MonitorType.HTTP,
|
|
|
|
... name="Google",
|
|
|
|
... url="https://google.com"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Added Successfully.',
|
|
|
|
'monitorId': 1
|
|
|
|
}
|
|
|
|
|
|
|
|
At the end, the connection to the API must be disconnected so that the program does not block.
|
|
|
|
|
|
|
|
>>> api.disconnect()
|
|
|
|
|
2023-05-01 18:40:14 +02:00
|
|
|
With a context manager, the disconnect method is called automatically:
|
|
|
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
from uptime_kuma_api import UptimeKumaApi
|
|
|
|
|
|
|
|
with UptimeKumaApi('INSERT_URL') as api:
|
|
|
|
api.login('INSERT_USERNAME', 'INSERT_PASSWORD')
|
|
|
|
api.add_monitor(
|
|
|
|
type=MonitorType.HTTP,
|
|
|
|
name="Google",
|
|
|
|
url="https://google.com"
|
|
|
|
)
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str url: The url to the Uptime Kuma instance. For example ``http://127.0.0.1:3001``
|
2023-05-19 13:50:39 +02:00
|
|
|
:param float timeout: How many seconds the client should wait for the connection, an expected event or a server
|
|
|
|
response. Default is ``10``.
|
2023-03-30 12:19:27 +02:00
|
|
|
:param dict headers: Headers that are passed to the socketio connection, defaults to None
|
2023-04-28 00:07:01 +02:00
|
|
|
:param bool ssl_verify: ``True`` to verify SSL certificates, or ``False`` to skip SSL certificate
|
|
|
|
verification, allowing connections to servers with self signed certificates.
|
|
|
|
Default is ``True``.
|
2023-04-30 16:48:33 +02:00
|
|
|
:param float wait_events: How many seconds the client should wait for the next event of the same type.
|
|
|
|
There is no way to determine when the last message of a certain type has arrived.
|
|
|
|
Therefore, a timeout is required. If no further message has arrived within this time,
|
|
|
|
it is assumed that it was the last message. Defaults is ``0.2``.
|
2022-12-17 15:30:29 +01:00
|
|
|
:raises UptimeKumaException: When connection to server failed.
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
2022-12-23 14:07:46 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
url: str,
|
2023-05-19 13:50:39 +02:00
|
|
|
timeout: float = 10,
|
2023-04-28 00:07:01 +02:00
|
|
|
headers: dict = None,
|
2023-04-30 16:48:33 +02:00
|
|
|
ssl_verify: bool = True,
|
|
|
|
wait_events: float = 0.2
|
2022-12-23 14:07:46 +01:00
|
|
|
) -> None:
|
2023-08-12 16:39:44 +02:00
|
|
|
self.url = url.rstrip("/")
|
2023-05-19 13:50:39 +02:00
|
|
|
self.timeout = timeout
|
2023-03-30 12:19:27 +02:00
|
|
|
self.headers = headers
|
2023-04-30 16:48:33 +02:00
|
|
|
self.wait_events = wait_events
|
2023-04-28 00:07:01 +02:00
|
|
|
self.sio = socketio.Client(ssl_verify=ssl_verify)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-08-02 21:32:28 +02:00
|
|
|
self._event_data: dict = {
|
2022-08-05 15:52:19 +02:00
|
|
|
Event.MONITOR_LIST: None,
|
|
|
|
Event.NOTIFICATION_LIST: None,
|
|
|
|
Event.PROXY_LIST: None,
|
|
|
|
Event.STATUS_PAGE_LIST: None,
|
|
|
|
Event.HEARTBEAT_LIST: None,
|
|
|
|
Event.IMPORTANT_HEARTBEAT_LIST: None,
|
|
|
|
Event.AVG_PING: None,
|
|
|
|
Event.UPTIME: None,
|
|
|
|
Event.INFO: None,
|
2022-09-07 13:03:10 +02:00
|
|
|
Event.CERT_INFO: None,
|
2022-09-23 18:24:00 +02:00
|
|
|
Event.DOCKER_HOST_LIST: None,
|
2022-12-29 00:22:53 +01:00
|
|
|
Event.AUTO_LOGIN: None,
|
2023-03-20 15:14:39 +01:00
|
|
|
Event.MAINTENANCE_LIST: None,
|
|
|
|
Event.API_KEY_LIST: None
|
2022-07-02 16:00:54 +02:00
|
|
|
}
|
|
|
|
|
2022-08-05 15:52:19 +02:00
|
|
|
self.sio.on(Event.CONNECT, self._event_connect)
|
|
|
|
self.sio.on(Event.DISCONNECT, self._event_disconnect)
|
|
|
|
self.sio.on(Event.MONITOR_LIST, self._event_monitor_list)
|
|
|
|
self.sio.on(Event.NOTIFICATION_LIST, self._event_notification_list)
|
|
|
|
self.sio.on(Event.PROXY_LIST, self._event_proxy_list)
|
|
|
|
self.sio.on(Event.STATUS_PAGE_LIST, self._event_status_page_list)
|
|
|
|
self.sio.on(Event.HEARTBEAT_LIST, self._event_heartbeat_list)
|
|
|
|
self.sio.on(Event.IMPORTANT_HEARTBEAT_LIST, self._event_important_heartbeat_list)
|
|
|
|
self.sio.on(Event.AVG_PING, self._event_avg_ping)
|
|
|
|
self.sio.on(Event.UPTIME, self._event_uptime)
|
|
|
|
self.sio.on(Event.HEARTBEAT, self._event_heartbeat)
|
|
|
|
self.sio.on(Event.INFO, self._event_info)
|
|
|
|
self.sio.on(Event.CERT_INFO, self._event_cert_info)
|
2022-09-07 13:03:10 +02:00
|
|
|
self.sio.on(Event.DOCKER_HOST_LIST, self._event_docker_host_list)
|
2022-09-23 18:24:00 +02:00
|
|
|
self.sio.on(Event.AUTO_LOGIN, self._event_auto_login)
|
2022-12-29 00:22:53 +01:00
|
|
|
self.sio.on(Event.INIT_SERVER_TIMEZONE, self._event_init_server_timezone)
|
|
|
|
self.sio.on(Event.MAINTENANCE_LIST, self._event_maintenance_list)
|
2023-03-20 15:14:39 +01:00
|
|
|
self.sio.on(Event.API_KEY_LIST, self._event_api_key_list)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-08-26 14:01:29 +02:00
|
|
|
self.connect()
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-01 18:40:14 +02:00
|
|
|
def __enter__(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
|
|
self.disconnect()
|
|
|
|
|
2022-10-04 18:38:17 +02:00
|
|
|
@contextmanager
|
2022-12-16 21:39:18 +01:00
|
|
|
def wait_for_event(self, event: Event) -> None:
|
2023-05-19 13:50:39 +02:00
|
|
|
# waits for the first event of the given type to arrive
|
2022-10-04 18:38:17 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
yield
|
|
|
|
except:
|
|
|
|
raise
|
|
|
|
else:
|
2023-05-19 13:50:39 +02:00
|
|
|
timestamp = time.time()
|
2023-04-07 20:07:52 +02:00
|
|
|
while self._event_data[event] is None:
|
2023-05-19 13:50:39 +02:00
|
|
|
if time.time() - timestamp > self.timeout:
|
|
|
|
raise Timeout(f"Timed out while waiting for event {event}")
|
|
|
|
time.sleep(0.01)
|
2022-10-04 18:38:17 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _get_event_data(self, event) -> Any:
|
2022-08-05 15:52:19 +02:00
|
|
|
monitor_events = [Event.AVG_PING, Event.UPTIME, Event.HEARTBEAT_LIST, Event.IMPORTANT_HEARTBEAT_LIST, Event.CERT_INFO, Event.HEARTBEAT]
|
2023-05-19 13:50:39 +02:00
|
|
|
timestamp = time.time()
|
2022-07-05 22:12:37 +02:00
|
|
|
while self._event_data[event] is None:
|
2023-05-19 13:50:39 +02:00
|
|
|
if time.time() - timestamp > self.timeout:
|
|
|
|
raise Timeout(f"Timed out while waiting for event {event}")
|
2022-08-05 15:48:02 +02:00
|
|
|
# do not wait for events that are not sent
|
2022-08-05 15:52:19 +02:00
|
|
|
if self._event_data[Event.MONITOR_LIST] == {} and event in monitor_events:
|
2022-08-05 15:48:02 +02:00
|
|
|
return []
|
2022-07-02 16:00:54 +02:00
|
|
|
time.sleep(0.01)
|
2023-04-30 16:48:33 +02:00
|
|
|
time.sleep(self.wait_events) # wait for multiple messages
|
2023-05-19 14:07:34 +02:00
|
|
|
return deepcopy(self._event_data[event].copy())
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _call(self, event, data=None) -> Any:
|
2023-05-19 13:50:39 +02:00
|
|
|
r = self.sio.call(event, data, timeout=self.timeout)
|
2023-05-01 18:57:55 +02:00
|
|
|
if isinstance(r, dict) and "ok" in r:
|
2022-07-07 22:17:47 +02:00
|
|
|
if not r["ok"]:
|
2023-05-01 18:57:55 +02:00
|
|
|
raise UptimeKumaException(r.get("msg"))
|
2022-07-07 22:17:47 +02:00
|
|
|
r.pop("ok")
|
2022-07-07 13:29:06 +02:00
|
|
|
return r
|
|
|
|
|
2022-07-02 16:00:54 +02:00
|
|
|
# event handlers
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_connect(self) -> None:
|
2022-07-02 16:00:54 +02:00
|
|
|
pass
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_disconnect(self) -> None:
|
2022-07-02 16:00:54 +02:00
|
|
|
pass
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_monitor_list(self, data) -> None:
|
2022-08-05 15:52:19 +02:00
|
|
|
self._event_data[Event.MONITOR_LIST] = data
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_notification_list(self, data) -> None:
|
2022-08-05 15:52:19 +02:00
|
|
|
self._event_data[Event.NOTIFICATION_LIST] = data
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_proxy_list(self, data) -> None:
|
2022-08-05 15:52:19 +02:00
|
|
|
self._event_data[Event.PROXY_LIST] = data
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_status_page_list(self, data) -> None:
|
2022-08-05 15:52:19 +02:00
|
|
|
self._event_data[Event.STATUS_PAGE_LIST] = data
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-19 14:07:34 +02:00
|
|
|
def _event_heartbeat_list(self, monitor_id, data, overwrite) -> None:
|
|
|
|
monitor_id = int(monitor_id)
|
|
|
|
|
2022-08-05 15:52:19 +02:00
|
|
|
if self._event_data[Event.HEARTBEAT_LIST] is None:
|
2023-05-19 14:07:34 +02:00
|
|
|
self._event_data[Event.HEARTBEAT_LIST] = {}
|
|
|
|
if monitor_id not in self._event_data[Event.HEARTBEAT_LIST] or overwrite:
|
|
|
|
self._event_data[Event.HEARTBEAT_LIST][monitor_id] = data
|
|
|
|
else:
|
|
|
|
self._event_data[Event.HEARTBEAT_LIST][monitor_id].append(data)
|
|
|
|
|
|
|
|
def _event_important_heartbeat_list(self, monitor_id, data, overwrite) -> None:
|
|
|
|
monitor_id = int(monitor_id)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-08-05 15:52:19 +02:00
|
|
|
if self._event_data[Event.IMPORTANT_HEARTBEAT_LIST] is None:
|
2023-05-19 14:07:34 +02:00
|
|
|
self._event_data[Event.IMPORTANT_HEARTBEAT_LIST] = {}
|
|
|
|
if monitor_id not in self._event_data[Event.IMPORTANT_HEARTBEAT_LIST] or overwrite:
|
|
|
|
self._event_data[Event.IMPORTANT_HEARTBEAT_LIST][monitor_id] = data
|
|
|
|
else:
|
|
|
|
self._event_data[Event.IMPORTANT_HEARTBEAT_LIST][monitor_id].append(data)
|
|
|
|
|
|
|
|
def _event_avg_ping(self, monitor_id, data) -> None:
|
|
|
|
monitor_id = int(monitor_id)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-08-05 15:52:19 +02:00
|
|
|
if self._event_data[Event.AVG_PING] is None:
|
2023-05-19 14:07:34 +02:00
|
|
|
self._event_data[Event.AVG_PING] = {}
|
|
|
|
self._event_data[Event.AVG_PING][monitor_id] = data
|
|
|
|
|
|
|
|
def _event_uptime(self, monitor_id, type_, data) -> None:
|
|
|
|
monitor_id = int(monitor_id)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-08-05 15:52:19 +02:00
|
|
|
if self._event_data[Event.UPTIME] is None:
|
2023-05-19 14:07:34 +02:00
|
|
|
self._event_data[Event.UPTIME] = {}
|
|
|
|
if monitor_id not in self._event_data[Event.UPTIME]:
|
|
|
|
self._event_data[Event.UPTIME][monitor_id] = {}
|
|
|
|
self._event_data[Event.UPTIME][monitor_id][type_] = data
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_heartbeat(self, data) -> None:
|
2023-05-20 20:31:39 +02:00
|
|
|
if self._event_data[Event.HEARTBEAT_LIST] is None:
|
|
|
|
self._event_data[Event.HEARTBEAT_LIST] = {}
|
2023-05-19 14:07:34 +02:00
|
|
|
monitor_id = data["monitorID"]
|
2023-05-20 20:31:39 +02:00
|
|
|
if monitor_id not in self._event_data[Event.HEARTBEAT_LIST]:
|
|
|
|
self._event_data[Event.HEARTBEAT_LIST][monitor_id] = []
|
|
|
|
self._event_data[Event.HEARTBEAT_LIST][monitor_id].append(data)
|
|
|
|
if len(self._event_data[Event.HEARTBEAT_LIST][monitor_id]) >= 150:
|
|
|
|
self._event_data[Event.HEARTBEAT_LIST][monitor_id].pop(0)
|
2023-05-19 14:07:34 +02:00
|
|
|
|
|
|
|
# add heartbeat to important heartbeat list
|
|
|
|
if data["important"]:
|
|
|
|
if self._event_data[Event.IMPORTANT_HEARTBEAT_LIST] is None:
|
|
|
|
self._event_data[Event.IMPORTANT_HEARTBEAT_LIST] = {}
|
|
|
|
if monitor_id not in self._event_data[Event.IMPORTANT_HEARTBEAT_LIST]:
|
|
|
|
self._event_data[Event.IMPORTANT_HEARTBEAT_LIST][monitor_id] = []
|
|
|
|
self._event_data[Event.IMPORTANT_HEARTBEAT_LIST][monitor_id] = [data] + self._event_data[Event.IMPORTANT_HEARTBEAT_LIST][monitor_id]
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_info(self, data) -> None:
|
2023-09-26 22:29:33 +02:00
|
|
|
if "version" not in data:
|
|
|
|
# wait for the info event that is sent after login and contains the version
|
|
|
|
return
|
2022-08-05 15:52:19 +02:00
|
|
|
self._event_data[Event.INFO] = data
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-19 14:07:34 +02:00
|
|
|
def _event_cert_info(self, monitor_id, data) -> None:
|
|
|
|
monitor_id = int(monitor_id)
|
|
|
|
|
2022-08-05 15:52:19 +02:00
|
|
|
if self._event_data[Event.CERT_INFO] is None:
|
2023-05-19 14:07:34 +02:00
|
|
|
self._event_data[Event.CERT_INFO] = {}
|
|
|
|
self._event_data[Event.CERT_INFO][monitor_id] = json.loads(data)
|
2022-08-05 15:48:02 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_docker_host_list(self, data) -> None:
|
2022-09-07 13:03:10 +02:00
|
|
|
self._event_data[Event.DOCKER_HOST_LIST] = data
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_auto_login(self) -> None:
|
2022-09-23 18:24:00 +02:00
|
|
|
self._event_data[Event.AUTO_LOGIN] = True
|
|
|
|
|
2022-12-29 00:22:53 +01:00
|
|
|
def _event_init_server_timezone(self) -> None:
|
|
|
|
pass
|
|
|
|
|
|
|
|
def _event_maintenance_list(self, data) -> None:
|
|
|
|
self._event_data[Event.MAINTENANCE_LIST] = data
|
|
|
|
|
2023-03-20 15:14:39 +01:00
|
|
|
def _event_api_key_list(self, data) -> None:
|
|
|
|
self._event_data[Event.API_KEY_LIST] = data
|
|
|
|
|
2022-07-02 16:00:54 +02:00
|
|
|
# connection
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def connect(self) -> None:
|
|
|
|
"""
|
|
|
|
Connects to Uptime Kuma.
|
|
|
|
|
|
|
|
Called automatically when the UptimeKumaApi instance is created.
|
2022-12-17 15:30:29 +01:00
|
|
|
|
|
|
|
:raises UptimeKumaException: When connection to server failed.
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
2022-08-26 14:01:29 +02:00
|
|
|
try:
|
2023-08-12 16:39:44 +02:00
|
|
|
self.sio.connect(f'{self.url}/socket.io/', wait_timeout=self.timeout, headers=self.headers)
|
2022-08-26 14:01:29 +02:00
|
|
|
except:
|
2022-12-16 21:39:18 +01:00
|
|
|
raise UptimeKumaException("unable to connect")
|
|
|
|
|
|
|
|
def disconnect(self) -> None:
|
|
|
|
"""
|
|
|
|
Disconnects from Uptime Kuma.
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
Needs to be called to prevent blocking the program.
|
|
|
|
"""
|
2022-07-02 16:00:54 +02:00
|
|
|
self.sio.disconnect()
|
|
|
|
|
2022-09-07 13:03:10 +02:00
|
|
|
# builder
|
|
|
|
|
|
|
|
@property
|
2022-12-16 21:39:18 +01:00
|
|
|
def version(self) -> str:
|
2022-09-07 13:03:10 +02:00
|
|
|
info = self.info()
|
2023-05-01 18:57:55 +02:00
|
|
|
return info.get("version")
|
2022-09-07 13:03:10 +02:00
|
|
|
|
|
|
|
def _build_monitor_data(
|
|
|
|
self,
|
|
|
|
type: MonitorType,
|
|
|
|
name: str,
|
2023-07-07 22:28:20 +02:00
|
|
|
parent: int = None,
|
2023-03-20 15:14:39 +01:00
|
|
|
description: str = None,
|
2022-09-07 13:03:10 +02:00
|
|
|
interval: int = 60,
|
|
|
|
retryInterval: int = 60,
|
|
|
|
resendInterval: int = 0,
|
2023-04-07 21:03:33 +02:00
|
|
|
maxretries: int = 1,
|
2022-09-07 13:03:10 +02:00
|
|
|
upsideDown: bool = False,
|
|
|
|
notificationIDList: list = None,
|
2023-03-20 15:14:39 +01:00
|
|
|
httpBodyEncoding: str = "json",
|
2022-09-07 13:03:10 +02:00
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
# HTTP, KEYWORD, JSON_QUERY, REAL_BROWSER
|
2022-09-07 13:03:10 +02:00
|
|
|
url: str = None,
|
2023-08-29 18:37:27 +02:00
|
|
|
|
|
|
|
# HTTP, KEYWORD, GRPC_KEYWORD
|
|
|
|
maxredirects: int = 10,
|
|
|
|
accepted_statuscodes: list[str] = None,
|
|
|
|
|
|
|
|
# HTTP, KEYWORD, JSON_QUERY
|
2022-09-07 13:03:10 +02:00
|
|
|
expiryNotification: bool = False,
|
|
|
|
ignoreTls: bool = False,
|
|
|
|
proxyId: int = None,
|
|
|
|
method: str = "GET",
|
|
|
|
body: str = None,
|
|
|
|
headers: str = None,
|
|
|
|
authMethod: AuthMethod = AuthMethod.NONE,
|
2023-03-20 15:14:39 +01:00
|
|
|
tlsCert: str = None,
|
|
|
|
tlsKey: str = None,
|
|
|
|
tlsCa: str = None,
|
2022-09-07 13:03:10 +02:00
|
|
|
basic_auth_user: str = None,
|
|
|
|
basic_auth_pass: str = None,
|
|
|
|
authDomain: str = None,
|
|
|
|
authWorkstation: str = None,
|
2023-08-29 18:37:27 +02:00
|
|
|
oauth_auth_method: str = "client_secret_basic",
|
|
|
|
oauth_token_url: str = None,
|
|
|
|
oauth_client_id: str = None,
|
|
|
|
oauth_client_secret: str = None,
|
|
|
|
oauth_scopes: str = None,
|
|
|
|
timeout: int = 48,
|
2022-09-07 13:03:10 +02:00
|
|
|
|
|
|
|
# KEYWORD
|
|
|
|
keyword: str = None,
|
2023-08-29 18:37:27 +02:00
|
|
|
invertKeyword: bool = False,
|
2022-09-07 13:03:10 +02:00
|
|
|
|
2022-12-29 00:22:53 +01:00
|
|
|
# GRPC_KEYWORD
|
|
|
|
grpcUrl: str = None,
|
|
|
|
grpcEnableTls: bool = False,
|
|
|
|
grpcServiceName: str = None,
|
|
|
|
grpcMethod: str = None,
|
|
|
|
grpcProtobuf: str = None,
|
|
|
|
grpcBody: str = None,
|
|
|
|
grpcMetadata: str = None,
|
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
# PORT, PING, DNS, STEAM, MQTT, RADIUS, TAILSCALE_PING
|
2022-09-07 13:03:10 +02:00
|
|
|
hostname: str = None,
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
# PING
|
|
|
|
packetSize: int = 56,
|
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
# PORT, DNS, STEAM, MQTT, RADIUS
|
2022-12-29 00:22:53 +01:00
|
|
|
port: int = None,
|
2022-09-07 13:03:10 +02:00
|
|
|
|
|
|
|
# DNS
|
|
|
|
dns_resolve_server: str = "1.1.1.1",
|
|
|
|
dns_resolve_type: str = "A",
|
|
|
|
|
|
|
|
# MQTT
|
2023-08-29 18:37:27 +02:00
|
|
|
mqttUsername: str = "",
|
|
|
|
mqttPassword: str = "",
|
|
|
|
mqttTopic: str = "",
|
|
|
|
mqttSuccessMessage: str = "",
|
2022-09-07 13:03:10 +02:00
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
# SQLSERVER, POSTGRES, MYSQL, MONGODB, REDIS
|
2022-09-07 13:03:10 +02:00
|
|
|
databaseConnectionString: str = None,
|
2023-02-13 22:51:21 +01:00
|
|
|
|
|
|
|
# SQLSERVER, POSTGRES, MYSQL
|
2022-09-07 13:03:10 +02:00
|
|
|
databaseQuery: str = None,
|
|
|
|
|
|
|
|
# DOCKER
|
|
|
|
docker_container: str = "",
|
|
|
|
docker_host: int = None,
|
|
|
|
|
|
|
|
# RADIUS
|
|
|
|
radiusUsername: str = None,
|
|
|
|
radiusPassword: str = None,
|
|
|
|
radiusSecret: str = None,
|
|
|
|
radiusCalledStationId: str = None,
|
2023-02-13 22:51:21 +01:00
|
|
|
radiusCallingStationId: str = None,
|
|
|
|
|
|
|
|
# GAMEDIG
|
2023-08-29 18:37:27 +02:00
|
|
|
game: str = None,
|
|
|
|
gamedigGivenPortOnly: bool = True,
|
|
|
|
|
|
|
|
# JSON_QUERY
|
|
|
|
jsonPath: str = None,
|
|
|
|
expectedValue: str = None,
|
|
|
|
|
|
|
|
# KAFKA_PRODUCER
|
|
|
|
kafkaProducerBrokers: list[str] = None,
|
|
|
|
kafkaProducerTopic: str = None,
|
|
|
|
kafkaProducerMessage: str = None,
|
|
|
|
kafkaProducerSsl: bool = False,
|
|
|
|
kafkaProducerAllowAutoTopicCreation: bool = False,
|
|
|
|
kafkaProducerSaslOptions: dict = None,
|
2022-12-16 21:39:18 +01:00
|
|
|
) -> dict:
|
2023-08-29 18:37:27 +02:00
|
|
|
if accepted_statuscodes is None:
|
|
|
|
accepted_statuscodes = ["200-299"]
|
|
|
|
|
|
|
|
if notificationIDList is None:
|
|
|
|
notificationIDList = {}
|
|
|
|
|
2022-09-07 13:03:10 +02:00
|
|
|
data = {
|
|
|
|
"type": type,
|
|
|
|
"name": name,
|
|
|
|
"interval": interval,
|
|
|
|
"retryInterval": retryInterval,
|
|
|
|
"maxretries": maxretries,
|
|
|
|
"notificationIDList": notificationIDList,
|
|
|
|
"upsideDown": upsideDown,
|
2023-05-20 12:43:57 +02:00
|
|
|
"resendInterval": resendInterval,
|
|
|
|
"description": description,
|
2023-08-29 18:37:27 +02:00
|
|
|
"httpBodyEncoding": httpBodyEncoding,
|
2022-09-07 13:03:10 +02:00
|
|
|
}
|
|
|
|
|
2023-07-07 22:28:20 +02:00
|
|
|
if parse_version(self.version) >= parse_version("1.22"):
|
|
|
|
data.update({
|
2023-08-29 18:37:27 +02:00
|
|
|
"parent": parent,
|
2023-07-07 22:28:20 +02:00
|
|
|
})
|
|
|
|
|
2022-12-29 00:22:53 +01:00
|
|
|
if type in [MonitorType.KEYWORD, MonitorType.GRPC_KEYWORD]:
|
2022-09-07 13:03:10 +02:00
|
|
|
data.update({
|
|
|
|
"keyword": keyword,
|
|
|
|
})
|
2023-08-29 18:37:27 +02:00
|
|
|
if parse_version(self.version) >= parse_version("1.23"):
|
|
|
|
data.update({
|
|
|
|
"invertKeyword": invertKeyword,
|
|
|
|
})
|
2022-09-07 13:03:10 +02:00
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
# HTTP, KEYWORD, JSON_QUERY, REAL_BROWSER
|
2022-09-07 13:03:10 +02:00
|
|
|
data.update({
|
|
|
|
"url": url,
|
2023-08-29 18:37:27 +02:00
|
|
|
})
|
|
|
|
|
|
|
|
# HTTP, KEYWORD, GRPC_KEYWORD
|
|
|
|
data.update({
|
2022-09-07 13:03:10 +02:00
|
|
|
"maxredirects": maxredirects,
|
|
|
|
"accepted_statuscodes": accepted_statuscodes,
|
2023-08-29 18:37:27 +02:00
|
|
|
})
|
|
|
|
|
|
|
|
data.update({
|
|
|
|
"expiryNotification": expiryNotification,
|
|
|
|
"ignoreTls": ignoreTls,
|
2022-09-07 13:03:10 +02:00
|
|
|
"proxyId": proxyId,
|
|
|
|
"method": method,
|
|
|
|
"body": body,
|
|
|
|
"headers": headers,
|
|
|
|
"authMethod": authMethod,
|
|
|
|
})
|
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
if parse_version(self.version) >= parse_version("1.23"):
|
|
|
|
data.update({
|
|
|
|
"timeout": timeout,
|
|
|
|
})
|
|
|
|
|
2022-09-07 13:03:10 +02:00
|
|
|
if authMethod in [AuthMethod.HTTP_BASIC, AuthMethod.NTLM]:
|
|
|
|
data.update({
|
|
|
|
"basic_auth_user": basic_auth_user,
|
|
|
|
"basic_auth_pass": basic_auth_pass,
|
|
|
|
})
|
|
|
|
|
|
|
|
if authMethod == AuthMethod.NTLM:
|
|
|
|
data.update({
|
|
|
|
"authDomain": authDomain,
|
|
|
|
"authWorkstation": authWorkstation,
|
|
|
|
})
|
|
|
|
|
2023-03-20 15:14:39 +01:00
|
|
|
if authMethod == AuthMethod.MTLS:
|
|
|
|
data.update({
|
|
|
|
"tlsCert": tlsCert,
|
|
|
|
"tlsKey": tlsKey,
|
|
|
|
"tlsCa": tlsCa,
|
|
|
|
})
|
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
if authMethod == AuthMethod.OAUTH2_CC:
|
|
|
|
data.update({
|
|
|
|
"oauth_auth_method": oauth_auth_method,
|
|
|
|
"oauth_token_url": oauth_token_url,
|
|
|
|
"oauth_client_id": oauth_client_id,
|
|
|
|
"oauth_client_secret": oauth_client_secret,
|
|
|
|
"oauth_scopes": oauth_scopes,
|
|
|
|
})
|
|
|
|
|
2022-12-29 00:22:53 +01:00
|
|
|
# GRPC_KEYWORD
|
|
|
|
if type == MonitorType.GRPC_KEYWORD:
|
|
|
|
data.update({
|
|
|
|
"grpcUrl": grpcUrl,
|
|
|
|
"grpcEnableTls": grpcEnableTls,
|
|
|
|
"grpcServiceName": grpcServiceName,
|
|
|
|
"grpcMethod": grpcMethod,
|
|
|
|
"grpcProtobuf": grpcProtobuf,
|
|
|
|
"grpcBody": grpcBody,
|
|
|
|
"grpcMetadata": grpcMetadata,
|
|
|
|
})
|
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
# PORT, PING, DNS, STEAM, MQTT, RADIUS, TAILSCALE_PING
|
2022-09-07 13:03:10 +02:00
|
|
|
data.update({
|
|
|
|
"hostname": hostname,
|
|
|
|
})
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
# PING
|
2023-05-20 12:43:57 +02:00
|
|
|
data.update({
|
|
|
|
"packetSize": packetSize,
|
|
|
|
})
|
2023-02-13 22:51:21 +01:00
|
|
|
|
2022-12-29 00:22:53 +01:00
|
|
|
# PORT, DNS, STEAM, MQTT, RADIUS
|
|
|
|
if not port:
|
|
|
|
if type == MonitorType.DNS:
|
|
|
|
port = 53
|
|
|
|
elif type == MonitorType.RADIUS:
|
|
|
|
port = 1812
|
2022-09-07 13:03:10 +02:00
|
|
|
data.update({
|
|
|
|
"port": port,
|
|
|
|
})
|
|
|
|
|
|
|
|
# DNS
|
|
|
|
data.update({
|
|
|
|
"dns_resolve_server": dns_resolve_server,
|
|
|
|
"dns_resolve_type": dns_resolve_type,
|
|
|
|
})
|
|
|
|
|
|
|
|
# MQTT
|
|
|
|
data.update({
|
|
|
|
"mqttUsername": mqttUsername,
|
|
|
|
"mqttPassword": mqttPassword,
|
|
|
|
"mqttTopic": mqttTopic,
|
|
|
|
"mqttSuccessMessage": mqttSuccessMessage,
|
|
|
|
})
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
# SQLSERVER, POSTGRES, MYSQL, MONGODB, REDIS
|
2022-09-07 13:03:10 +02:00
|
|
|
data.update({
|
|
|
|
"databaseConnectionString": databaseConnectionString
|
|
|
|
})
|
2023-02-13 22:51:21 +01:00
|
|
|
|
|
|
|
# SQLSERVER, POSTGRES, MYSQL
|
2022-12-29 00:22:53 +01:00
|
|
|
if type in [MonitorType.SQLSERVER, MonitorType.POSTGRES, MonitorType.MYSQL]:
|
2022-09-07 13:03:10 +02:00
|
|
|
data.update({
|
|
|
|
"databaseQuery": databaseQuery,
|
|
|
|
})
|
|
|
|
|
|
|
|
# DOCKER
|
|
|
|
if type == MonitorType.DOCKER:
|
|
|
|
data.update({
|
|
|
|
"docker_container": docker_container,
|
2023-08-29 18:37:27 +02:00
|
|
|
"docker_host": docker_host,
|
2022-09-07 13:03:10 +02:00
|
|
|
})
|
|
|
|
|
|
|
|
# RADIUS
|
|
|
|
if type == MonitorType.RADIUS:
|
|
|
|
data.update({
|
|
|
|
"radiusUsername": radiusUsername,
|
|
|
|
"radiusPassword": radiusPassword,
|
|
|
|
"radiusSecret": radiusSecret,
|
|
|
|
"radiusCalledStationId": radiusCalledStationId,
|
2023-08-29 18:37:27 +02:00
|
|
|
"radiusCallingStationId": radiusCallingStationId,
|
2022-09-07 13:03:10 +02:00
|
|
|
})
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
# GAMEDIG
|
|
|
|
if type == MonitorType.GAMEDIG:
|
|
|
|
data.update({
|
2023-08-29 18:37:27 +02:00
|
|
|
"game": game,
|
|
|
|
})
|
|
|
|
if parse_version(self.version) >= parse_version("1.23"):
|
|
|
|
data.update({
|
|
|
|
"gamedigGivenPortOnly": gamedigGivenPortOnly,
|
|
|
|
})
|
|
|
|
|
|
|
|
# JSON_QUERY
|
|
|
|
if type == MonitorType.JSON_QUERY:
|
|
|
|
data.update({
|
|
|
|
"jsonPath": jsonPath,
|
|
|
|
"expectedValue": expectedValue,
|
2023-02-13 22:51:21 +01:00
|
|
|
})
|
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
# KAFKA_PRODUCER
|
|
|
|
if type == MonitorType.KAFKA_PRODUCER:
|
|
|
|
if kafkaProducerBrokers is None:
|
|
|
|
kafkaProducerBrokers = []
|
|
|
|
if not kafkaProducerSaslOptions:
|
|
|
|
kafkaProducerSaslOptions = {
|
|
|
|
"mechanism": "None",
|
|
|
|
}
|
|
|
|
data.update({
|
|
|
|
"kafkaProducerBrokers": kafkaProducerBrokers,
|
|
|
|
"kafkaProducerTopic": kafkaProducerTopic,
|
|
|
|
"kafkaProducerMessage": kafkaProducerMessage,
|
|
|
|
"kafkaProducerSsl": kafkaProducerSsl,
|
|
|
|
"kafkaProducerAllowAutoTopicCreation": kafkaProducerAllowAutoTopicCreation,
|
|
|
|
"kafkaProducerSaslOptions": kafkaProducerSaslOptions,
|
|
|
|
})
|
2022-09-07 13:03:10 +02:00
|
|
|
return data
|
|
|
|
|
2023-04-07 21:03:33 +02:00
|
|
|
def _build_maintenance_data(
|
|
|
|
self,
|
|
|
|
title: str,
|
|
|
|
strategy: MaintenanceStrategy,
|
|
|
|
active: bool = True,
|
|
|
|
description: str = "",
|
|
|
|
dateRange: list = None,
|
|
|
|
intervalDay: int = 1,
|
|
|
|
weekdays: list = None,
|
|
|
|
daysOfMonth: list = None,
|
|
|
|
timeRange: list = None,
|
|
|
|
cron: str = "30 3 * * *",
|
|
|
|
durationMinutes: int = 60,
|
2023-05-19 13:49:36 +02:00
|
|
|
timezoneOption: str = None
|
2023-04-07 21:03:33 +02:00
|
|
|
) -> dict:
|
|
|
|
if not dateRange:
|
|
|
|
dateRange = [
|
|
|
|
datetime.date.today().strftime("%Y-%m-%d 00:00:00")
|
|
|
|
]
|
|
|
|
if not timeRange:
|
|
|
|
timeRange = [
|
|
|
|
{
|
|
|
|
"hours": 2,
|
|
|
|
"minutes": 0,
|
|
|
|
}, {
|
|
|
|
"hours": 3,
|
|
|
|
"minutes": 0,
|
|
|
|
}
|
|
|
|
]
|
|
|
|
if not weekdays:
|
|
|
|
weekdays = []
|
|
|
|
if not daysOfMonth:
|
|
|
|
daysOfMonth = []
|
|
|
|
data = {
|
|
|
|
"title": title,
|
|
|
|
"active": active,
|
|
|
|
"intervalDay": intervalDay,
|
|
|
|
"dateRange": dateRange,
|
|
|
|
"description": description,
|
|
|
|
"strategy": strategy,
|
|
|
|
"weekdays": weekdays,
|
|
|
|
"daysOfMonth": daysOfMonth,
|
2023-05-20 12:43:57 +02:00
|
|
|
"timeRange": timeRange,
|
|
|
|
"cron": cron,
|
|
|
|
"durationMinutes": durationMinutes,
|
|
|
|
"timezoneOption": timezoneOption
|
2023-04-07 21:03:33 +02:00
|
|
|
}
|
|
|
|
return data
|
|
|
|
|
2023-07-07 22:28:20 +02:00
|
|
|
def _build_status_page_data(
|
|
|
|
self,
|
|
|
|
slug: str,
|
|
|
|
|
|
|
|
# config
|
|
|
|
id: int,
|
|
|
|
title: str,
|
|
|
|
description: str = None,
|
|
|
|
theme: str = None,
|
|
|
|
published: bool = True,
|
|
|
|
showTags: bool = False,
|
|
|
|
domainNameList: list = None,
|
|
|
|
googleAnalyticsId: str = None,
|
|
|
|
customCSS: str = "",
|
|
|
|
footerText: str = None,
|
|
|
|
showPoweredBy: bool = True,
|
2023-08-29 18:37:27 +02:00
|
|
|
showCertificateExpiry: bool = False,
|
2023-07-07 22:28:20 +02:00
|
|
|
|
|
|
|
icon: str = "/icon.svg",
|
|
|
|
publicGroupList: list = None
|
|
|
|
) -> tuple[str, dict, str, list]:
|
|
|
|
if not theme:
|
|
|
|
if parse_version(self.version) >= parse_version("1.22"):
|
|
|
|
theme = "auto"
|
|
|
|
else:
|
|
|
|
theme = "light"
|
|
|
|
if theme not in ["auto", "light", "dark"]:
|
|
|
|
raise ValueError
|
|
|
|
if not domainNameList:
|
|
|
|
domainNameList = []
|
|
|
|
if not publicGroupList:
|
|
|
|
publicGroupList = []
|
|
|
|
config = {
|
|
|
|
"id": id,
|
|
|
|
"slug": slug,
|
|
|
|
"title": title,
|
|
|
|
"description": description,
|
|
|
|
"icon": icon,
|
|
|
|
"theme": theme,
|
|
|
|
"published": published,
|
|
|
|
"showTags": showTags,
|
|
|
|
"domainNameList": domainNameList,
|
|
|
|
"googleAnalyticsId": googleAnalyticsId,
|
|
|
|
"customCSS": customCSS,
|
|
|
|
"footerText": footerText,
|
2023-08-29 18:37:27 +02:00
|
|
|
"showPoweredBy": showPoweredBy,
|
2023-07-07 22:28:20 +02:00
|
|
|
}
|
2023-08-29 18:37:27 +02:00
|
|
|
if parse_version(self.version) >= parse_version("1.23"):
|
|
|
|
config.update({
|
|
|
|
"showCertificateExpiry": showCertificateExpiry,
|
|
|
|
})
|
2023-07-07 22:28:20 +02:00
|
|
|
return slug, config, icon, publicGroupList
|
|
|
|
|
2022-10-04 18:38:17 +02:00
|
|
|
# monitor
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_monitors(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get all monitors.
|
|
|
|
|
|
|
|
:return: A list of monitors.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_monitors()
|
|
|
|
[
|
|
|
|
{
|
2022-12-30 21:37:10 +01:00
|
|
|
'accepted_statuscodes': ['200-299'],
|
2022-12-16 21:39:18 +01:00
|
|
|
'active': True,
|
2022-12-30 21:37:10 +01:00
|
|
|
'authDomain': None,
|
2023-05-25 21:26:54 +02:00
|
|
|
'authMethod': <AuthMethod.NONE: ''>,
|
2022-12-30 21:37:10 +01:00
|
|
|
'authWorkstation': None,
|
|
|
|
'basic_auth_pass': None,
|
|
|
|
'basic_auth_user': None,
|
|
|
|
'body': None,
|
2023-07-07 22:28:20 +02:00
|
|
|
'childrenIDs': [],
|
2022-12-30 21:37:10 +01:00
|
|
|
'databaseConnectionString': None,
|
|
|
|
'databaseQuery': None,
|
2023-07-07 22:28:20 +02:00
|
|
|
'description': None,
|
2022-12-16 21:39:18 +01:00
|
|
|
'dns_last_result': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'dns_resolve_server': '1.1.1.1',
|
|
|
|
'dns_resolve_type': 'A',
|
2022-12-16 21:39:18 +01:00
|
|
|
'docker_container': None,
|
|
|
|
'docker_host': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'expiryNotification': False,
|
2023-07-07 22:28:20 +02:00
|
|
|
'forceInactive': False,
|
2023-02-13 22:51:21 +01:00
|
|
|
'game': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'grpcBody': None,
|
|
|
|
'grpcEnableTls': False,
|
|
|
|
'grpcMetadata': None,
|
|
|
|
'grpcMethod': None,
|
|
|
|
'grpcProtobuf': None,
|
|
|
|
'grpcServiceName': None,
|
|
|
|
'grpcUrl': None,
|
|
|
|
'headers': None,
|
|
|
|
'hostname': None,
|
2023-07-07 22:28:20 +02:00
|
|
|
'httpBodyEncoding': 'json',
|
2022-12-30 21:37:10 +01:00
|
|
|
'id': 1,
|
|
|
|
'ignoreTls': False,
|
|
|
|
'includeSensitiveData': True,
|
|
|
|
'interval': 60,
|
|
|
|
'keyword': None,
|
2022-12-29 00:22:53 +01:00
|
|
|
'maintenance': False,
|
2022-12-30 21:37:10 +01:00
|
|
|
'maxredirects': 10,
|
2023-07-07 22:28:20 +02:00
|
|
|
'maxretries': 0,
|
2022-12-30 21:37:10 +01:00
|
|
|
'method': 'GET',
|
2022-12-16 21:39:18 +01:00
|
|
|
'mqttPassword': None,
|
|
|
|
'mqttSuccessMessage': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'mqttTopic': None,
|
|
|
|
'mqttUsername': None,
|
|
|
|
'name': 'monitor 1',
|
|
|
|
'notificationIDList': [1, 2],
|
2023-02-13 22:51:21 +01:00
|
|
|
'packetSize': 56,
|
2023-07-07 22:28:20 +02:00
|
|
|
'parent': None,
|
|
|
|
'pathName': 'monitor 1',
|
2022-12-30 21:37:10 +01:00
|
|
|
'port': None,
|
|
|
|
'proxyId': None,
|
|
|
|
'pushToken': None,
|
2022-12-16 21:39:18 +01:00
|
|
|
'radiusCalledStationId': None,
|
|
|
|
'radiusCallingStationId': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'radiusPassword': None,
|
2022-12-16 21:39:18 +01:00
|
|
|
'radiusSecret': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'radiusUsername': None,
|
|
|
|
'resendInterval': 0,
|
|
|
|
'retryInterval': 60,
|
|
|
|
'tags': [],
|
2023-07-07 22:28:20 +02:00
|
|
|
'tlsCa': None,
|
|
|
|
'tlsCert': None,
|
|
|
|
'tlsKey': None,
|
|
|
|
'type': <MonitorType.HTTP: 'http'>,
|
2022-12-30 21:37:10 +01:00
|
|
|
'upsideDown': False,
|
|
|
|
'url': 'http://127.0.0.1',
|
|
|
|
'weight': 2000
|
2022-12-16 21:39:18 +01:00
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2023-03-20 15:14:39 +01:00
|
|
|
|
|
|
|
# TODO: replace with getMonitorList?
|
|
|
|
|
2022-08-05 15:52:19 +02:00
|
|
|
r = list(self._get_event_data(Event.MONITOR_LIST).values())
|
2022-09-12 22:45:43 +02:00
|
|
|
for monitor in r:
|
|
|
|
_convert_monitor_return(monitor)
|
2022-07-07 13:29:06 +02:00
|
|
|
int_to_bool(r, ["active"])
|
2023-05-25 21:26:54 +02:00
|
|
|
parse_monitor_type(r)
|
|
|
|
parse_auth_method(r)
|
2022-07-07 13:29:06 +02:00
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_monitor(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Get a monitor.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: The monitor id.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The monitor.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_monitor(1)
|
|
|
|
{
|
2022-12-30 21:37:10 +01:00
|
|
|
'accepted_statuscodes': ['200-299'],
|
2022-12-16 21:39:18 +01:00
|
|
|
'active': True,
|
2022-12-30 21:37:10 +01:00
|
|
|
'authDomain': None,
|
2023-05-25 21:26:54 +02:00
|
|
|
'authMethod': <AuthMethod.NONE: ''>,
|
2022-12-30 21:37:10 +01:00
|
|
|
'authWorkstation': None,
|
|
|
|
'basic_auth_pass': None,
|
|
|
|
'basic_auth_user': None,
|
|
|
|
'body': None,
|
2023-07-07 22:28:20 +02:00
|
|
|
'childrenIDs': [],
|
2022-12-30 21:37:10 +01:00
|
|
|
'databaseConnectionString': None,
|
|
|
|
'databaseQuery': None,
|
2023-07-07 22:28:20 +02:00
|
|
|
'description': None,
|
2022-12-16 21:39:18 +01:00
|
|
|
'dns_last_result': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'dns_resolve_server': '1.1.1.1',
|
|
|
|
'dns_resolve_type': 'A',
|
2022-12-16 21:39:18 +01:00
|
|
|
'docker_container': None,
|
|
|
|
'docker_host': None,
|
2023-08-29 18:37:27 +02:00
|
|
|
'expectedValue': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'expiryNotification': False,
|
2023-07-07 22:28:20 +02:00
|
|
|
'forceInactive': False,
|
2023-02-13 22:51:21 +01:00
|
|
|
'game': None,
|
2023-08-29 18:37:27 +02:00
|
|
|
'gamedigGivenPortOnly': True,
|
2022-12-30 21:37:10 +01:00
|
|
|
'grpcBody': None,
|
|
|
|
'grpcEnableTls': False,
|
|
|
|
'grpcMetadata': None,
|
|
|
|
'grpcMethod': None,
|
|
|
|
'grpcProtobuf': None,
|
|
|
|
'grpcServiceName': None,
|
|
|
|
'grpcUrl': None,
|
|
|
|
'headers': None,
|
|
|
|
'hostname': None,
|
2023-07-07 22:28:20 +02:00
|
|
|
'httpBodyEncoding': 'json',
|
2022-12-30 21:37:10 +01:00
|
|
|
'id': 1,
|
|
|
|
'ignoreTls': False,
|
|
|
|
'includeSensitiveData': True,
|
|
|
|
'interval': 60,
|
2023-08-29 18:37:27 +02:00
|
|
|
'invertKeyword': False,
|
|
|
|
'jsonPath': None,
|
|
|
|
'kafkaProducerAllowAutoTopicCreation': False,
|
|
|
|
'kafkaProducerBrokers': None,
|
|
|
|
'kafkaProducerMessage': None,
|
|
|
|
'kafkaProducerSaslOptions': None,
|
|
|
|
'kafkaProducerSsl': False,
|
|
|
|
'kafkaProducerTopic': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'keyword': None,
|
2022-12-29 00:22:53 +01:00
|
|
|
'maintenance': False,
|
2022-12-30 21:37:10 +01:00
|
|
|
'maxredirects': 10,
|
2023-07-07 22:28:20 +02:00
|
|
|
'maxretries': 0,
|
2022-12-30 21:37:10 +01:00
|
|
|
'method': 'GET',
|
2023-08-29 18:37:27 +02:00
|
|
|
'mqttPassword': '',
|
|
|
|
'mqttSuccessMessage': '',
|
|
|
|
'mqttTopic': '',
|
|
|
|
'mqttUsername': '',
|
2022-12-30 21:37:10 +01:00
|
|
|
'name': 'monitor 1',
|
|
|
|
'notificationIDList': [1, 2],
|
2023-08-29 18:37:27 +02:00
|
|
|
'oauth_auth_method': None,
|
|
|
|
'oauth_client_id': None,
|
|
|
|
'oauth_client_secret': None,
|
|
|
|
'oauth_scopes': None,
|
|
|
|
'oauth_token_url': None,
|
2023-02-13 22:51:21 +01:00
|
|
|
'packetSize': 56,
|
2023-07-07 22:28:20 +02:00
|
|
|
'parent': None,
|
|
|
|
'pathName': 'monitor 1',
|
2022-12-30 21:37:10 +01:00
|
|
|
'port': None,
|
|
|
|
'proxyId': None,
|
|
|
|
'pushToken': None,
|
2022-12-16 21:39:18 +01:00
|
|
|
'radiusCalledStationId': None,
|
|
|
|
'radiusCallingStationId': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'radiusPassword': None,
|
2022-12-16 21:39:18 +01:00
|
|
|
'radiusSecret': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'radiusUsername': None,
|
|
|
|
'resendInterval': 0,
|
|
|
|
'retryInterval': 60,
|
2023-08-29 18:37:27 +02:00
|
|
|
'screenshot': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'tags': [],
|
2023-08-29 18:37:27 +02:00
|
|
|
'timeout': 48,
|
2023-07-07 22:28:20 +02:00
|
|
|
'tlsCa': None,
|
|
|
|
'tlsCert': None,
|
|
|
|
'tlsKey': None,
|
|
|
|
'type': <MonitorType.HTTP: 'http'>,
|
2022-12-30 21:37:10 +01:00
|
|
|
'upsideDown': False,
|
|
|
|
'url': 'http://127.0.0.1',
|
|
|
|
'weight': 2000
|
2022-12-16 21:39:18 +01:00
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
r = self._call('getMonitor', id_)["monitor"]
|
2022-09-12 22:45:43 +02:00
|
|
|
_convert_monitor_return(r)
|
2022-07-07 13:29:06 +02:00
|
|
|
int_to_bool(r, ["active"])
|
2023-05-25 21:26:54 +02:00
|
|
|
parse_monitor_type(r)
|
|
|
|
parse_auth_method(r)
|
2022-07-07 13:29:06 +02:00
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def pause_monitor(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Pauses a monitor.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: The monitor id.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.pause_monitor(1)
|
|
|
|
{
|
|
|
|
'msg': 'Paused Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-09 19:52:21 +02:00
|
|
|
return self._call('pauseMonitor', id_)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def resume_monitor(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Resumes a monitor.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: The monitor id.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.resume_monitor(1)
|
|
|
|
{
|
|
|
|
'msg': 'Resumed Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-09 19:52:21 +02:00
|
|
|
return self._call('resumeMonitor', id_)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def delete_monitor(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Deletes a monitor.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: The monitor id.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_monitor(1)
|
|
|
|
{
|
|
|
|
'msg': 'Deleted Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.MONITOR_LIST):
|
2023-05-20 14:09:09 +02:00
|
|
|
if id_ not in [i["id"] for i in self.get_monitors()]:
|
|
|
|
raise UptimeKumaException("monitor does not exist")
|
2022-10-04 18:38:17 +02:00
|
|
|
return self._call('deleteMonitor', id_)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_monitor_beats(self, id_: int, hours: int) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get monitor beats for a specific monitor in a time range.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: The monitor id.
|
|
|
|
:param int hours: Period time in hours from now.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_monitor_beats(1, 6)
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'down_count': 0,
|
|
|
|
'duration': 0,
|
|
|
|
'id': 25,
|
|
|
|
'important': True,
|
|
|
|
'monitor_id': 1,
|
|
|
|
'msg': '200 - OK',
|
|
|
|
'ping': 201,
|
2023-05-02 20:34:26 +02:00
|
|
|
'status': <MonitorStatus.UP: 1>,
|
2022-12-16 21:39:18 +01:00
|
|
|
'time': '2022-12-15 12:38:42.661'
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'down_count': 0,
|
|
|
|
'duration': 60,
|
|
|
|
'id': 26,
|
|
|
|
'important': False,
|
|
|
|
'monitor_id': 1,
|
|
|
|
'msg': '200 - OK',
|
|
|
|
'ping': 193,
|
2023-05-02 20:34:26 +02:00
|
|
|
'status': <MonitorStatus.UP: 1>,
|
2022-12-16 21:39:18 +01:00
|
|
|
'time': '2022-12-15 12:39:42.878'
|
|
|
|
},
|
|
|
|
...
|
|
|
|
]
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
r = self._call('getMonitorBeats', (id_, hours))["data"]
|
2023-05-02 20:34:26 +02:00
|
|
|
int_to_bool(r, ["important"])
|
|
|
|
parse_monitor_status(r)
|
2022-07-07 13:29:06 +02:00
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_game_list(self) -> list[dict]:
|
2023-02-13 22:51:21 +01:00
|
|
|
"""
|
|
|
|
Get a list of games that are supported by the GameDig monitor type.
|
|
|
|
|
|
|
|
:return: The server response.
|
|
|
|
:rtype: list
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_game_list()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'extra': {},
|
|
|
|
'keys': ['7d2d'],
|
|
|
|
'options': {
|
|
|
|
'port': 26900,
|
|
|
|
'port_query_offset': 1,
|
|
|
|
'protocol': 'valve'
|
|
|
|
},
|
|
|
|
'pretty': '7 Days to Die (2013)'
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'extra': {},
|
|
|
|
'keys': ['arma2'],
|
|
|
|
'options': {
|
|
|
|
'port': 2302,
|
|
|
|
'port_query_offset': 1,
|
|
|
|
'protocol': 'valve'
|
|
|
|
},
|
|
|
|
'pretty': 'ARMA 2 (2009)'
|
|
|
|
},
|
|
|
|
...
|
|
|
|
]
|
|
|
|
"""
|
|
|
|
r = self._call('getGameList')
|
2023-05-01 18:57:55 +02:00
|
|
|
return r.get("gameList")
|
2023-02-13 22:51:21 +01:00
|
|
|
|
2023-08-29 18:37:27 +02:00
|
|
|
def test_chrome(self, executable) -> dict:
|
|
|
|
"""
|
|
|
|
Test if the chrome executable is valid and return the version.
|
|
|
|
|
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.test_chrome("/usr/bin/chromium")
|
|
|
|
{
|
|
|
|
'msg': 'Found Chromium/Chrome. Version: 90.0.4430.212'
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return self._call('testChrome', executable)
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(monitor_docstring("add"))
|
|
|
|
def add_monitor(self, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Adds a new monitor.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:09:27 +01:00
|
|
|
:rtype: dict
|
2022-12-17 15:30:29 +01:00
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.add_monitor(
|
|
|
|
... type=MonitorType.HTTP,
|
|
|
|
... name="Google",
|
|
|
|
... url="https://google.com"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Added Successfully.',
|
|
|
|
'monitorID': 1
|
|
|
|
}
|
|
|
|
"""
|
2022-09-07 13:03:10 +02:00
|
|
|
data = self._build_monitor_data(**kwargs)
|
2022-09-12 22:45:43 +02:00
|
|
|
_convert_monitor_input(data)
|
2022-07-10 18:07:11 +02:00
|
|
|
_check_arguments_monitor(data)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.MONITOR_LIST):
|
|
|
|
return self._call('add', data)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(monitor_docstring("edit"))
|
|
|
|
def edit_monitor(self, id_: int, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Edits an existing monitor.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: The monitor id.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
>>> api.edit_monitor(1,
|
|
|
|
... interval=20
|
|
|
|
... )
|
2022-12-16 21:39:18 +01:00
|
|
|
{
|
|
|
|
'monitorID': 1,
|
|
|
|
'msg': 'Saved.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
data = self.get_monitor(id_)
|
2022-07-09 19:52:21 +02:00
|
|
|
data.update(kwargs)
|
2022-09-12 22:45:43 +02:00
|
|
|
_convert_monitor_input(data)
|
2022-07-10 18:07:11 +02:00
|
|
|
_check_arguments_monitor(data)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.MONITOR_LIST):
|
|
|
|
return self._call('editMonitor', data)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# monitor tags
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def add_monitor_tag(self, tag_id: int, monitor_id: int, value: str = "") -> dict:
|
|
|
|
"""
|
|
|
|
Add a tag to a monitor.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int tag_id: Id of the tag.
|
|
|
|
:param int monitor_id: Id of the monitor to add the tag to.
|
|
|
|
:param str, optional value: Value of the tag., defaults to ""
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.add_monitor_tag(
|
|
|
|
... tag_id=1,
|
|
|
|
... monitor_id=1,
|
|
|
|
... value="test"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Added Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
r = self._call('addMonitorTag', (tag_id, monitor_id, value))
|
|
|
|
# the monitor list event does not send the updated tags
|
|
|
|
self._event_data[Event.MONITOR_LIST][str(monitor_id)] = self.get_monitor(monitor_id)
|
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-07-07 13:29:06 +02:00
|
|
|
# editMonitorTag is unused in uptime-kuma
|
2022-07-05 22:12:37 +02:00
|
|
|
# def edit_monitor_tag(self, tag_id: int, monitor_id: int, value=""):
|
2022-07-07 13:29:06 +02:00
|
|
|
# return self._call('editMonitorTag', (tag_id, monitor_id, value))
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def delete_monitor_tag(self, tag_id: int, monitor_id: int, value: str = "") -> dict:
|
|
|
|
"""
|
|
|
|
Delete a tag from a monitor.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int tag_id: Id of the tag to remove.
|
|
|
|
:param int monitor_id: Id of monitor to remove the tag from.
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str, optional value: Value of the tag., defaults to ""
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_monitor_tag(
|
|
|
|
... tag_id=1,
|
|
|
|
... monitor_id=1,
|
|
|
|
... value="test"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Deleted Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2023-05-20 14:09:09 +02:00
|
|
|
with self.wait_for_event(Event.MONITOR_LIST):
|
|
|
|
tags = [
|
|
|
|
{
|
|
|
|
"monitor_id": y["monitor_id"],
|
|
|
|
"tag_id": y["tag_id"],
|
|
|
|
"value": y["value"]
|
|
|
|
} for x in [
|
|
|
|
i.get("tags") for i in self.get_monitors()
|
|
|
|
] for y in x
|
|
|
|
]
|
|
|
|
if {"monitor_id": monitor_id, "tag_id": tag_id, "value": value} not in tags:
|
|
|
|
raise UptimeKumaException("monitor tag does not exist")
|
|
|
|
r = self._call('deleteMonitorTag', (tag_id, monitor_id, value))
|
|
|
|
# the monitor list event does not send the updated tags
|
|
|
|
self._event_data[Event.MONITOR_LIST][str(monitor_id)] = self.get_monitor(monitor_id)
|
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-10-04 18:38:17 +02:00
|
|
|
# notification
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_notifications(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get all notifications.
|
|
|
|
|
|
|
|
:return: All notifications.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_notifications()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'active': True,
|
|
|
|
'applyExisting': True,
|
|
|
|
'id': 1,
|
|
|
|
'isDefault': True,
|
|
|
|
'name': 'notification 1',
|
|
|
|
'pushAPIKey': '123456789',
|
2023-05-25 21:26:54 +02:00
|
|
|
'type': <NotificationType.PUSHBYTECHULUS: 'PushByTechulus'>
|
2022-12-16 21:39:18 +01:00
|
|
|
'userId': 1
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
notifications = self._get_event_data(Event.NOTIFICATION_LIST)
|
2022-07-07 13:29:06 +02:00
|
|
|
r = []
|
|
|
|
for notification_raw in notifications:
|
2022-07-05 22:12:37 +02:00
|
|
|
notification = notification_raw.copy()
|
|
|
|
config = json.loads(notification["config"])
|
|
|
|
del notification["config"]
|
|
|
|
notification.update(config)
|
2022-07-07 13:29:06 +02:00
|
|
|
r.append(notification)
|
2023-05-25 21:26:54 +02:00
|
|
|
parse_notification_type(r)
|
2022-07-07 13:29:06 +02:00
|
|
|
return r
|
2022-07-05 22:12:37 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_notification(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Get a notification.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the notification to get.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The notification.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the notification does not exist.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_notification(1)
|
|
|
|
{
|
|
|
|
'active': True,
|
|
|
|
'applyExisting': True,
|
|
|
|
'id': 1,
|
|
|
|
'isDefault': True,
|
|
|
|
'name': 'notification 1',
|
|
|
|
'pushAPIKey': '123456789',
|
2023-05-25 21:26:54 +02:00
|
|
|
'type': <NotificationType.PUSHBYTECHULUS: 'PushByTechulus'>
|
2022-12-16 21:39:18 +01:00
|
|
|
'userId': 1
|
|
|
|
}
|
|
|
|
"""
|
2022-07-05 22:12:37 +02:00
|
|
|
notifications = self.get_notifications()
|
|
|
|
for notification in notifications:
|
|
|
|
if notification["id"] == id_:
|
|
|
|
return notification
|
2022-07-07 13:29:06 +02:00
|
|
|
raise UptimeKumaException("notification does not exist")
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(notification_docstring("test"))
|
|
|
|
def test_notification(self, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Test a notification.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.test_notification(
|
|
|
|
... name="notification 1",
|
|
|
|
... isDefault=True,
|
|
|
|
... applyExisting=True,
|
|
|
|
... type=NotificationType.PUSHBYTECHULUS,
|
|
|
|
... pushAPIKey="INSERT_PUSH_API_KEY"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'ok': True,
|
|
|
|
'msg': 'Sent Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-09 22:15:41 +02:00
|
|
|
data = _build_notification_data(**kwargs)
|
|
|
|
|
2022-07-10 18:07:11 +02:00
|
|
|
_check_arguments_notification(data)
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('testNotification', data)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(notification_docstring("add"))
|
|
|
|
def add_notification(self, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Add a notification.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.add_notification(
|
|
|
|
... name="notification 1",
|
|
|
|
... isDefault=True,
|
|
|
|
... applyExisting=True,
|
|
|
|
... type=NotificationType.PUSHBYTECHULUS,
|
|
|
|
... pushAPIKey="123456789"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'msg': 'Saved'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-09 22:15:41 +02:00
|
|
|
data = _build_notification_data(**kwargs)
|
|
|
|
|
2022-07-10 18:07:11 +02:00
|
|
|
_check_arguments_notification(data)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.NOTIFICATION_LIST):
|
|
|
|
return self._call('addNotification', (data, None))
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(notification_docstring("edit"))
|
|
|
|
def edit_notification(self, id_: int, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Edit a notification.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the notification to edit.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
>>> api.edit_notification(1,
|
2022-12-16 21:39:18 +01:00
|
|
|
... name="notification 1 edited",
|
|
|
|
... isDefault=False,
|
|
|
|
... applyExisting=False,
|
|
|
|
... type=NotificationType.PUSHDEER,
|
|
|
|
... pushdeerKey="987654321"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'msg': 'Saved'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-05 22:12:37 +02:00
|
|
|
notification = self.get_notification(id_)
|
2022-07-06 22:35:05 +02:00
|
|
|
|
2022-08-03 11:56:02 +02:00
|
|
|
# remove old notification provider options from notification object
|
|
|
|
if "type" in kwargs and kwargs["type"] != notification["type"]:
|
2022-07-06 22:35:05 +02:00
|
|
|
for provider in notification_provider_options:
|
|
|
|
provider_options = notification_provider_options[provider]
|
2022-08-03 11:56:02 +02:00
|
|
|
if provider != kwargs["type"]:
|
2022-07-06 22:35:05 +02:00
|
|
|
for option in provider_options:
|
|
|
|
if option in notification:
|
|
|
|
del notification[option]
|
|
|
|
|
2022-07-07 13:29:06 +02:00
|
|
|
notification.update(kwargs)
|
2022-07-10 18:07:11 +02:00
|
|
|
_check_arguments_notification(notification)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.NOTIFICATION_LIST):
|
|
|
|
return self._call('addNotification', (notification, id_))
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def delete_notification(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Delete a notification.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the notification to delete.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_notification(1)
|
|
|
|
{
|
|
|
|
'msg': 'Deleted'
|
|
|
|
}
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.NOTIFICATION_LIST):
|
2023-05-20 14:09:09 +02:00
|
|
|
if id_ not in [i["id"] for i in self.get_notifications()]:
|
|
|
|
raise UptimeKumaException("notification does not exist")
|
2022-10-04 18:38:17 +02:00
|
|
|
return self._call('deleteNotification', id_)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def check_apprise(self) -> bool:
|
|
|
|
"""
|
|
|
|
Check if apprise exists.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: bool
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.check_apprise()
|
|
|
|
True
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('checkApprise')
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# proxy
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_proxies(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get all proxies.
|
|
|
|
|
|
|
|
:return: All proxies.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_proxies()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'active': True,
|
|
|
|
'auth': True,
|
|
|
|
'createdDate': '2022-12-15 16:24:24',
|
|
|
|
'default': False,
|
|
|
|
'host': '127.0.0.1',
|
|
|
|
'id': 1,
|
|
|
|
'password': 'password',
|
|
|
|
'port': 8080,
|
2023-05-25 21:26:54 +02:00
|
|
|
'protocol': <ProxyProtocol.HTTP: 'http'>,
|
2022-12-16 21:39:18 +01:00
|
|
|
'userId': 1,
|
|
|
|
'username': 'username'
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
r = self._get_event_data(Event.PROXY_LIST)
|
2022-08-03 11:56:02 +02:00
|
|
|
int_to_bool(r, ["auth", "active", "default", "applyExisting"])
|
2023-05-25 21:26:54 +02:00
|
|
|
parse_proxy_protocol(r)
|
2022-07-07 13:29:06 +02:00
|
|
|
return r
|
2022-07-05 22:12:37 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_proxy(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Get a proxy.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the proxy to get.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The proxy.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the proxy does not exist.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_proxy(1)
|
|
|
|
{
|
|
|
|
'active': True,
|
|
|
|
'auth': True,
|
|
|
|
'createdDate': '2022-12-15 16:24:24',
|
|
|
|
'default': False,
|
|
|
|
'host': '127.0.0.1',
|
|
|
|
'id': 1,
|
|
|
|
'password': 'password',
|
|
|
|
'port': 8080,
|
|
|
|
'protocol': 'http',
|
|
|
|
'userId': 1,
|
|
|
|
'username': 'username'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-05 22:12:37 +02:00
|
|
|
proxies = self.get_proxies()
|
|
|
|
for proxy in proxies:
|
2023-05-01 18:57:55 +02:00
|
|
|
if proxy.get("id") == id_:
|
2022-07-05 22:12:37 +02:00
|
|
|
return proxy
|
2022-07-07 13:29:06 +02:00
|
|
|
raise UptimeKumaException("proxy does not exist")
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(proxy_docstring("add"))
|
|
|
|
def add_proxy(self, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Add a proxy.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.add_proxy(
|
|
|
|
... protocol=ProxyProtocol.HTTP,
|
|
|
|
... host="127.0.0.1",
|
|
|
|
... port=8080,
|
|
|
|
... auth=True,
|
|
|
|
... username="username",
|
|
|
|
... password="password",
|
|
|
|
... active=True,
|
|
|
|
... default=False,
|
|
|
|
... applyExisting=False
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'msg': 'Saved'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-09 22:15:41 +02:00
|
|
|
data = _build_proxy_data(**kwargs)
|
|
|
|
|
2022-07-10 18:07:11 +02:00
|
|
|
_check_arguments_proxy(data)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.PROXY_LIST):
|
|
|
|
return self._call('addProxy', (data, None))
|
2022-07-02 20:26:18 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(proxy_docstring("edit"))
|
|
|
|
def edit_proxy(self, id_: int, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Edit a proxy.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the proxy to edit.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.edit_proxy(1,
|
|
|
|
... protocol=ProxyProtocol.HTTPS,
|
|
|
|
... host="127.0.0.2",
|
|
|
|
... port=8888
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'msg': 'Saved'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
proxy = self.get_proxy(id_)
|
|
|
|
proxy.update(kwargs)
|
2022-07-10 18:07:11 +02:00
|
|
|
_check_arguments_proxy(proxy)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.PROXY_LIST):
|
|
|
|
return self._call('addProxy', (proxy, id_))
|
2022-07-02 20:26:18 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def delete_proxy(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Delete a proxy.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the proxy to delete.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_proxy(1)
|
|
|
|
{
|
|
|
|
'msg': 'Deleted'
|
|
|
|
}
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.PROXY_LIST):
|
2023-05-20 14:09:09 +02:00
|
|
|
if id_ not in [i["id"] for i in self.get_proxies()]:
|
|
|
|
raise UptimeKumaException("proxy does not exist")
|
2022-10-04 18:38:17 +02:00
|
|
|
return self._call('deleteProxy', id_)
|
2022-07-02 20:26:18 +02:00
|
|
|
|
2022-07-02 16:00:54 +02:00
|
|
|
# status page
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_status_pages(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get all status pages.
|
|
|
|
|
|
|
|
:return: All status pages.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_status_pages()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'customCSS': '',
|
|
|
|
'description': 'description 1',
|
|
|
|
'domainNameList': [],
|
|
|
|
'footerText': None,
|
|
|
|
'icon': '/icon.svg',
|
2023-02-13 22:51:21 +01:00
|
|
|
'googleAnalyticsId': '',
|
2022-12-16 21:39:18 +01:00
|
|
|
'id': 1,
|
|
|
|
'published': True,
|
|
|
|
'showPoweredBy': False,
|
|
|
|
'showTags': False,
|
|
|
|
'slug': 'slug1',
|
|
|
|
'theme': 'light',
|
|
|
|
'title': 'status page 1'
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
return list(self._get_event_data(Event.STATUS_PAGE_LIST).values())
|
2022-07-02 20:40:14 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_status_page(self, slug: str) -> dict:
|
|
|
|
"""
|
|
|
|
Get a status page.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str slug: Slug
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The status page.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_status_page("slug1")
|
|
|
|
{
|
|
|
|
'customCSS': '',
|
|
|
|
'description': 'description 1',
|
|
|
|
'domainNameList': [],
|
|
|
|
'footerText': None,
|
2023-02-13 22:51:21 +01:00
|
|
|
'googleAnalyticsId': '',
|
2023-08-29 18:37:27 +02:00
|
|
|
'icon': '/icon.svg',
|
2022-12-16 21:39:18 +01:00
|
|
|
'id': 1,
|
|
|
|
'incident': {
|
|
|
|
'content': 'content 1',
|
|
|
|
'createdDate': '2022-12-15 16:51:43',
|
|
|
|
'id': 1,
|
|
|
|
'lastUpdatedDate': None,
|
|
|
|
'pin': 1,
|
2023-05-25 21:26:54 +02:00
|
|
|
'style': <IncidentStyle.DANGER: 'danger'>,
|
2022-12-16 21:39:18 +01:00
|
|
|
'title': 'title 1'
|
|
|
|
},
|
2022-12-29 00:22:53 +01:00
|
|
|
'maintenanceList': [],
|
2022-12-16 21:39:18 +01:00
|
|
|
'publicGroupList': [
|
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'monitorList': [
|
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'name': 'monitor 1',
|
2023-08-29 18:37:27 +02:00
|
|
|
'sendUrl': False,
|
|
|
|
'type': 'http'
|
2022-12-16 21:39:18 +01:00
|
|
|
}
|
|
|
|
],
|
|
|
|
'name': 'Services',
|
|
|
|
'weight': 1
|
|
|
|
}
|
|
|
|
],
|
|
|
|
'published': True,
|
2023-08-29 18:37:27 +02:00
|
|
|
'showCertificateExpiry': False,
|
2022-12-16 21:39:18 +01:00
|
|
|
'showPoweredBy': False,
|
|
|
|
'showTags': False,
|
|
|
|
'slug': 'slug1',
|
|
|
|
'theme': 'light',
|
|
|
|
'title': 'status page 1'
|
|
|
|
}
|
|
|
|
"""
|
2022-08-26 14:04:43 +02:00
|
|
|
r1 = self._call('getStatusPage', slug)
|
2023-05-19 13:50:39 +02:00
|
|
|
try:
|
|
|
|
r2 = requests.get(f"{self.url}/api/status-page/{slug}", timeout=self.timeout).json()
|
|
|
|
except requests.exceptions.Timeout as e:
|
|
|
|
raise Timeout(e)
|
2022-08-26 14:04:43 +02:00
|
|
|
|
|
|
|
config = r1["config"]
|
|
|
|
config.update(r2["config"])
|
2022-12-29 00:22:53 +01:00
|
|
|
|
|
|
|
data = {
|
2022-08-26 14:04:43 +02:00
|
|
|
**config,
|
|
|
|
"incident": r2["incident"],
|
2022-12-29 00:22:53 +01:00
|
|
|
"publicGroupList": r2["publicGroupList"],
|
2023-05-20 12:43:57 +02:00
|
|
|
"maintenanceList": r2["maintenanceList"]
|
2022-08-26 14:04:43 +02:00
|
|
|
}
|
2023-05-25 21:26:54 +02:00
|
|
|
parse_incident_style(data["incident"])
|
2023-08-12 16:40:25 +02:00
|
|
|
# convert sendUrl from int to bool
|
|
|
|
for i in data["publicGroupList"]:
|
|
|
|
for j in i["monitorList"]:
|
|
|
|
int_to_bool(j, ["sendUrl"])
|
2022-12-29 00:22:53 +01:00
|
|
|
return data
|
2022-07-06 21:29:40 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def add_status_page(self, slug: str, title: str) -> dict:
|
|
|
|
"""
|
|
|
|
Add a status page.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str slug: Slug
|
|
|
|
:param str title: Title
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.add_status_page("slug1", "status page 1")
|
|
|
|
{
|
|
|
|
'msg': 'OK!'
|
|
|
|
}
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.STATUS_PAGE_LIST):
|
|
|
|
return self._call('addStatusPage', (title, slug))
|
2022-07-05 22:12:37 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def delete_status_page(self, slug: str) -> dict:
|
|
|
|
"""
|
|
|
|
Delete a status page.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str slug: Slug
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_status_page("slug1")
|
|
|
|
{}
|
|
|
|
"""
|
2023-05-20 14:09:09 +02:00
|
|
|
with self.wait_for_event(Event.STATUS_PAGE_LIST):
|
|
|
|
if slug not in [i["slug"] for i in self.get_status_pages()]:
|
|
|
|
raise UptimeKumaException("status page does not exist")
|
|
|
|
r = self._call('deleteStatusPage', slug)
|
2022-10-04 18:38:17 +02:00
|
|
|
|
2023-05-20 14:09:09 +02:00
|
|
|
# uptime kuma does not send the status page list event when a status page is deleted
|
|
|
|
for status_page in self._event_data[Event.STATUS_PAGE_LIST].values():
|
|
|
|
if status_page["slug"] == slug:
|
|
|
|
status_page_id = status_page["id"]
|
|
|
|
del self._event_data[Event.STATUS_PAGE_LIST][str(status_page_id)]
|
|
|
|
break
|
2022-10-04 18:38:17 +02:00
|
|
|
|
2023-05-20 14:09:09 +02:00
|
|
|
return r
|
2022-07-05 22:12:37 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def save_status_page(self, slug: str, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Save a status page.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str slug: Slug
|
|
|
|
:param int id: Id of the status page to save
|
2023-08-12 16:40:25 +02:00
|
|
|
:param str, optional title: Title, defaults to None
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str, optional description: Description, defaults to None
|
2023-07-07 22:28:20 +02:00
|
|
|
:param str, optional theme: Switch Theme, defaults to "auto"
|
2022-12-17 15:09:27 +01:00
|
|
|
:param bool, optional published: Published, defaults to True
|
|
|
|
:param bool, optional showTags: Show Tags, defaults to False
|
|
|
|
:param list, optional domainNameList: Domain Names, defaults to None
|
2023-02-13 22:51:21 +01:00
|
|
|
:param str, optional googleAnalyticsId: Google Analytics ID, defaults to None
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str, optional customCSS: Custom CSS, defaults to ""
|
|
|
|
:param str, optional footerText: Custom Footer, defaults to None
|
|
|
|
:param bool, optional showPoweredBy: Show Powered By, defaults to True
|
2023-08-29 18:37:27 +02:00
|
|
|
:param bool, optional showCertificateExpiry: Show Certificate Expiry, defaults to False
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str, optional icon: Icon, defaults to "/icon.svg"
|
|
|
|
:param list, optional publicGroupList: Public Group List, defaults to None
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> monitor_id = 1
|
|
|
|
>>> api.save_status_page(
|
|
|
|
... slug="slug1",
|
|
|
|
... title="status page 1",
|
|
|
|
... description="description 1",
|
|
|
|
... publicGroupList=[
|
|
|
|
... {
|
|
|
|
... 'name': 'Services',
|
|
|
|
... 'weight': 1,
|
|
|
|
... 'monitorList': [
|
|
|
|
... {
|
|
|
|
... "id": monitor_id
|
|
|
|
... }
|
|
|
|
... ]
|
|
|
|
... }
|
|
|
|
... ]
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'publicGroupList': [
|
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'monitorList': [
|
|
|
|
{
|
|
|
|
'id': 1
|
|
|
|
}
|
|
|
|
],
|
|
|
|
'name': 'Services',
|
|
|
|
'weight': 1
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
"""
|
2022-07-06 21:29:40 +02:00
|
|
|
status_page = self.get_status_page(slug)
|
2022-08-26 14:04:43 +02:00
|
|
|
status_page.pop("incident")
|
2023-05-20 12:43:57 +02:00
|
|
|
status_page.pop("maintenanceList")
|
2022-07-07 13:29:06 +02:00
|
|
|
status_page.update(kwargs)
|
2023-07-07 22:28:20 +02:00
|
|
|
data = self._build_status_page_data(**status_page)
|
2022-10-04 18:38:17 +02:00
|
|
|
r = self._call('saveStatusPage', data)
|
|
|
|
|
|
|
|
# uptime kuma does not send the status page list event when a status page is saved
|
|
|
|
status_page = self._call('getStatusPage', slug)["config"]
|
|
|
|
status_page_id = status_page["id"]
|
|
|
|
if self._event_data[Event.STATUS_PAGE_LIST] is None:
|
|
|
|
self._event_data[Event.STATUS_PAGE_LIST] = {}
|
|
|
|
self._event_data[Event.STATUS_PAGE_LIST][str(status_page_id)] = status_page
|
|
|
|
|
|
|
|
return r
|
2022-07-02 20:40:14 +02:00
|
|
|
|
2022-07-05 22:12:37 +02:00
|
|
|
def post_incident(
|
|
|
|
self,
|
|
|
|
slug: str,
|
|
|
|
title: str,
|
|
|
|
content: str,
|
|
|
|
style: IncidentStyle = IncidentStyle.PRIMARY
|
2022-12-16 21:39:18 +01:00
|
|
|
) -> dict:
|
|
|
|
"""
|
|
|
|
Post an incident to status page.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str slug: Slug
|
|
|
|
:param str title: Title
|
|
|
|
:param str content: Content
|
|
|
|
:param IncidentStyle, optional style: Style, defaults to :attr:`~.IncidentStyle.PRIMARY`
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.post_incident(
|
|
|
|
... slug="slug1",
|
|
|
|
... title="title 1",
|
|
|
|
... content="content 1",
|
|
|
|
... style=IncidentStyle.DANGER
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'content': 'content 1',
|
|
|
|
'createdDate': '2022-12-15 16:51:43',
|
|
|
|
'id': 1,
|
|
|
|
'pin': True,
|
2023-05-25 21:26:54 +02:00
|
|
|
'style': <IncidentStyle.DANGER: 'danger'>,
|
2022-12-16 21:39:18 +01:00
|
|
|
'title': 'title 1'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-05 22:12:37 +02:00
|
|
|
incident = {
|
|
|
|
"title": title,
|
|
|
|
"content": content,
|
|
|
|
"style": style
|
|
|
|
}
|
2022-07-07 13:29:06 +02:00
|
|
|
r = self._call('postIncident', (slug, incident))["incident"]
|
2022-07-06 21:29:40 +02:00
|
|
|
self.save_status_page(slug)
|
2023-05-25 21:26:54 +02:00
|
|
|
parse_incident_style(r)
|
2022-07-06 21:29:40 +02:00
|
|
|
return r
|
2022-07-05 22:12:37 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def unpin_incident(self, slug: str) -> dict:
|
|
|
|
"""
|
|
|
|
Unpin an incident from a status page.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str slug: Slug
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.unpin_incident(slug="slug1")
|
|
|
|
{}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
r = self._call('unpinIncident', slug)
|
2022-07-06 21:29:40 +02:00
|
|
|
self.save_status_page(slug)
|
|
|
|
return r
|
2022-07-02 20:40:14 +02:00
|
|
|
|
2022-07-02 16:00:54 +02:00
|
|
|
# heartbeat
|
|
|
|
|
2023-05-19 14:07:34 +02:00
|
|
|
def get_heartbeats(self) -> dict:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get heartbeats.
|
|
|
|
|
2023-05-19 14:07:34 +02:00
|
|
|
:return: The heartbeats for each monitor id.
|
|
|
|
:rtype: dict
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_heartbeats()
|
2023-05-19 14:07:34 +02:00
|
|
|
{
|
|
|
|
1: [
|
|
|
|
{
|
|
|
|
'down_count': 0,
|
|
|
|
'duration': 0,
|
|
|
|
'id': 1,
|
|
|
|
'important': True,
|
|
|
|
'monitor_id': 1,
|
|
|
|
'msg': '',
|
|
|
|
'ping': 10.5,
|
2023-05-25 21:26:54 +02:00
|
|
|
'status': <MonitorStatus.UP: 1>,
|
2023-05-19 14:07:34 +02:00
|
|
|
'time': '2023-05-01 17:22:20.289'
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'down_count': 0,
|
|
|
|
'duration': 60,
|
|
|
|
'id': 2,
|
|
|
|
'important': False,
|
|
|
|
'monitor_id': 1,
|
|
|
|
'msg': '',
|
|
|
|
'ping': 10.7,
|
2023-05-25 21:26:54 +02:00
|
|
|
'status': <MonitorStatus.UP: 1>,
|
2023-05-19 14:07:34 +02:00
|
|
|
'time': '2023-05-01 17:23:20.349'
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
r = self._get_event_data(Event.HEARTBEAT_LIST)
|
2022-07-07 13:29:06 +02:00
|
|
|
for i in r:
|
2023-05-19 14:07:34 +02:00
|
|
|
int_to_bool(r[i], ["important"])
|
|
|
|
parse_monitor_status(r[i])
|
2022-07-07 13:29:06 +02:00
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-19 14:07:34 +02:00
|
|
|
def get_important_heartbeats(self) -> dict:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get important heartbeats.
|
|
|
|
|
2023-05-19 14:07:34 +02:00
|
|
|
:return: The important heartbeats for each monitor id.
|
|
|
|
:rtype: dict
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_important_heartbeats()
|
2023-05-19 14:07:34 +02:00
|
|
|
{
|
|
|
|
1: [
|
|
|
|
{
|
|
|
|
'duration': 0,
|
|
|
|
'important': True,
|
|
|
|
'monitorID': 1,
|
|
|
|
'msg': '',
|
|
|
|
'ping': 10.5,
|
2023-05-25 21:26:54 +02:00
|
|
|
'status': <MonitorStatus.UP: 1>,
|
2023-05-19 14:07:34 +02:00
|
|
|
'time': '2023-05-01 17:22:20.289'
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
r = self._get_event_data(Event.IMPORTANT_HEARTBEAT_LIST)
|
2022-07-07 13:29:06 +02:00
|
|
|
for i in r:
|
2023-05-19 14:07:34 +02:00
|
|
|
int_to_bool(r[i], ["important"])
|
|
|
|
parse_monitor_status(r[i])
|
2022-07-07 13:29:06 +02:00
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# avg ping
|
|
|
|
|
2023-05-19 14:07:34 +02:00
|
|
|
def avg_ping(self) -> dict:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get average ping.
|
|
|
|
|
2023-05-19 14:07:34 +02:00
|
|
|
:return: The average ping for each monitor id.
|
|
|
|
:rtype: dict
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.avg_ping()
|
2023-05-19 14:07:34 +02:00
|
|
|
{
|
|
|
|
1: 10
|
|
|
|
}
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
return self._get_event_data(Event.AVG_PING)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-08-05 15:48:02 +02:00
|
|
|
# cert info
|
|
|
|
|
2023-05-19 14:07:34 +02:00
|
|
|
def cert_info(self) -> dict:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get certificate info.
|
|
|
|
|
2023-05-19 14:07:34 +02:00
|
|
|
:return: Certificate info for each monitor id for which a certificate can be extracted.
|
|
|
|
:rtype: dict
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.cert_info()
|
2023-05-19 14:07:34 +02:00
|
|
|
{
|
|
|
|
1: {
|
|
|
|
'valid': True,
|
|
|
|
'certInfo': {
|
|
|
|
'subject': {
|
|
|
|
'CN': 'www.google.de'
|
|
|
|
},
|
|
|
|
'issuer': {
|
|
|
|
'C': 'US',
|
|
|
|
'O': 'Google Trust Services LLC',
|
|
|
|
'CN': 'GTS CA 1C3'
|
|
|
|
},
|
|
|
|
'subjectaltname': 'DNS:www.google.de',
|
|
|
|
'infoAccess': {
|
|
|
|
'OCSP - URI': ['http://ocsp.pki.goog/gts1c3'],
|
|
|
|
'CA Issuers - URI': ['http://pki.goog/repo/certs/gts1c3.der']
|
|
|
|
},
|
|
|
|
'bits': 256,
|
|
|
|
'pubkey': {
|
|
|
|
'type': 'Buffer',
|
|
|
|
'data': [4, 190, 87, 79, 99, 19, 100, 17, 253, 234, 34, 246, 7, 67, 197, 31, 168, 108, 212, 254, 170, 117, 68, 29, 16, 77, 78, 77, 152, 134, 139, 31, 187, 247, 140, 225, 130, 116, 249, 151, 31, 253, 69, 170, 182, 76, 191, 163, 96, 92, 127, 202, 159, 216, 189, 117, 255, 80, 18, 210, 77, 234, 108, 50, 109]
|
|
|
|
},
|
|
|
|
'asn1Curve': 'prime256v1',
|
|
|
|
'nistCurve': 'P-256',
|
|
|
|
'valid_from': 'Apr 3 08:24:23 2023 GMT',
|
|
|
|
'valid_to': 'Jun 26 08:24:22 2023 GMT',
|
|
|
|
'fingerprint': '45:B2:16:8F:B0:00:25:31:17:D1:DA:F2:66:DE:F6:51:6D:4E:51:E2',
|
|
|
|
'fingerprint256': '5E:77:02:E0:DE:28:33:5E:FB:D4:70:62:D3:21:B1:EE:AB:80:99:E0:92:D5:87:44:ED:C8:D6:8C:E6:67:3D:A8',
|
|
|
|
'fingerprint512': '35:D5:F8:24:BD:20:06:B1:00:19:FA:82:73:53:5A:53:F8:1F:D6:51:29:DF:17:2E:E6:72:8E:42:10:68:1B:E4:58:EB:0E:3A:48:4C:5D:78:12:69:B8:29:AA:95:0C:DB:79:AC:04:84:D8:53:74:F4:BB:0E:B3:80:D1:36:70:51',
|
|
|
|
'ext_key_usage': ['1.3.6.1.5.5.7.3.1'],
|
|
|
|
'serialNumber': '8628F43381B42BA50A686F0ACB522E40',
|
|
|
|
'raw': {
|
|
|
|
'type': 'Buffer',
|
|
|
|
'data': [48, 130, 4, 136, 48, 130, 3, 112, 160, 3, 2, 1, 2, 2, 17, 0, 134, 40, 244, 51, 129, 180, 43, 165, 10, 104, 111, 10, 203, 82, 46, 64, 48, 13, 6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 11, 5, 0, 48, 70, 49, 11, 48, 9, 6, 3, 85, 4, 6, 19, 2, 85, 83, 49, 34, 48, 32, 6, 3, 85, 4, 10, 19, 25, 71, 111, 111, 103, 108, 101, 32, 84, 114, 117, 115, 116, 32, 83, 101, 114, 118, 105, 99, 101, 115, 32, 76, 76, 67, 49, 19, 48, 17, 6, 3, 85, 4, 3, 19, 10, 71, 84, 83, 32, 67, 65, 32, 49, 67, 51, 48, 30, 23, 13, 50, 51, 48, 52, 48, 51, 48, 56, 50, 52, 50, 51, 90, 23, 13, 50, 51, 48, 54, 50, 54, 48, 56, 50, 52, 50, 50, 90, 48, 24, 49, 22, 48, 20, 6, 3, 85, 4, 3, 19, 13, 119, 119, 119, 46, 103, 111, 111, 103, 108, 101, 46, 100, 101, 48, 89, 48, 19, 6, 7, 42, 134, 72, 206, 61, 2, 1, 6, 8, 42, 134, 72, 206, 61, 3, 1, 7, 3, 66, 0, 4, 190, 87, 79, 99, 19, 100, 17, 253, 234, 34, 246, 7, 67, 197, 31, 168, 108, 212, 254, 170, 117, 68, 29, 16, 77, 78, 77, 152, 134, 139, 31, 187, 247, 140, 225, 130, 116, 249, 151, 31, 253, 69, 170, 182, 76, 191, 163, 96, 92, 127, 202, 159, 216, 189, 117, 255, 80, 18, 210, 77, 234, 108, 50, 109, 163, 130, 2, 104, 48, 130, 2, 100, 48, 14, 6, 3, 85, 29, 15, 1, 1, 255, 4, 4, 3, 2, 7, 128, 48, 19, 6, 3, 85, 29, 37, 4, 12, 48, 10, 6, 8, 43, 6, 1, 5, 5, 7, 3, 1, 48, 12, 6, 3, 85, 29, 19, 1, 1, 255, 4, 2, 48, 0, 48, 29, 6, 3, 85, 29, 14, 4, 22, 4, 20, 214, 103, 166, 83, 212, 251, 70, 19, 90, 81, 159, 90, 229, 252, 199, 112, 251, 21, 1, 223, 48, 31, 6, 3, 85, 29, 35, 4, 24, 48, 22, 128, 20, 138, 116, 127, 175, 133, 205, 238, 149, 205, 61, 156, 208, 226, 70, 20, 243, 113, 53, 29, 39, 48, 106, 6, 8, 43, 6, 1, 5, 5, 7, 1, 1, 4, 94, 48, 92, 48, 39, 6, 8, 43, 6, 1, 5, 5, 7, 48, 1, 134, 27, 104, 116, 116, 112, 58, 47, 47, 111, 99, 115, 112, 46, 112, 107, 105, 46, 103, 111, 111, 103, 47, 103, 116, 115, 49, 99, 51, 48, 49, 6, 8, 43, 6, 1, 5, 5, 7, 48, 2, 134, 37, 104, 116, 116, 112, 58, 47, 47, 112, 107, 105, 46, 103, 111, 111, 103, 47, 114, 101, 112, 111, 47, 99, 101, 114, 116, 115, 47, 103, 116, 115, 49, 99, 51, 46, 100, 101, 114, 48, 24, 6, 3, 85, 29, 17, 4, 17, 48, 15, 130, 13, 119, 119, 119, 46, 103, 111, 111, 103, 108, 101, 46, 100, 101, 48, 33, 6, 3, 85, 29, 32, 4, 26, 48, 24, 48, 8, 6, 6, 103, 129, 12, 1, 2, 1, 48, 12, 6, 10, 43, 6, 1, 4, 1, 214, 121, 2, 5, 3, 48, 60, 6, 3, 85, 29, 31, 4, 53, 48, 51, 48, 49, 160, 47, 160, 45, 134, 43, 104, 116, 116, 112, 58, 47, 47, 99, 114, 108, 115, 46, 112, 107, 105, 46, 103, 111, 111, 103, 47, 103, 116, 115, 49, 99, 51, 47, 81, 113, 70, 120, 98, 105, 57, 77, 52, 56, 99, 46, 99, 114, 108, 48, 130, 1, 6, 6, 10, 43, 6, 1, 4, 1, 214, 121, 2, 4, 2, 4, 129, 247, 4, 129, 244, 0, 242, 0, 119, 0, 183, 62, 251, 36, 223, 156, 77, 186, 117, 242, 57, 197, 186, 88, 244, 108, 93, 252, 66, 207, 122, 159, 53, 196, 158, 29, 9, 129, 37, 237, 180, 153, 0, 0, 1, 135, 70, 110, 147, 124, 0, 0, 4, 3, 0, 72, 48, 70, 2, 33, 0, 255, 123, 215, 190, 105, 140, 120, 76, 223, 12, 35, 73, 127, 147, 74, 41, 72, 133, 185, 179, 204, 135, 14, 167, 40, 142, 235, 33, 236, 185, 56, 187, 2, 33, 0, 249, 146, 138, 177, 22, 38, 138, 252, 172, 111, 49, 198, 58, 81, 23, 246, 101, 105, 50, 240, 231, 37, 253, 210, 19, 242, 80, 126, 208, 150, 61, 206, 0, 119, 0, 232, 62, 208, 218, 62, 245, 6, 53, 50, 231, 87, 40, 188, 137, 107, 201, 3, 211, 203, 209, 17, 107, 236, 235, 105, 225, 119, 125, 109, 6, 189, 110, 0, 0, 1, 135, 70, 110, 147, 109, 0, 0, 4, 3, 0, 72, 48, 70, 2, 33, 0, 213, 243, 231, 98, 145, 101, 31, 217, 200, 81, 250, 200, 231, 25, 186, 67, 217, 135, 113, 76, 96, 110, 44, 171, 171, 88, 41, 142, 87, 221, 191, 248, 2, 33, 0, 203, 90, 155, 98, 40, 70, 6, 130, 10, 216, 126, 34, 214, 172, 51, 32, 83, 136, 80, 162, 159, 87, 59, 228, 30, 16, 141, 188, 94, 119, 44, 195, 48, 13, 6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 11, 5, 0, 3, 130, 1, 1, 0, 5, 200, 236, 42, 163, 67, 196, 232, 159, 112, 110, 9, 114, 131, 77, 76, 31, 166, 48, 62, 59, 228, 180, 65, 196, 80, 154, 36, 88, 0, 17, 104, 250, 191, 236, 213, 42, 127, 208, 198, 132, 14, 94, 24, 205, 80, 222, 203, 153, 75, 248, 75, 189, 182, 222, 133, 77, 16, 18
|
|
|
|
},
|
|
|
|
'issuerCertificate': {
|
|
|
|
'subject': {
|
|
|
|
'C': 'US',
|
|
|
|
'O': 'Google Trust Services LLC',
|
|
|
|
'CN': 'GTS CA 1C3'
|
|
|
|
},
|
|
|
|
'issuer': {
|
|
|
|
'C': 'US',
|
|
|
|
'O': 'Google Trust Services LLC',
|
|
|
|
'CN': 'GTS Root R1'
|
|
|
|
},
|
|
|
|
'infoAccess': {
|
|
|
|
'OCSP - URI': ['http://ocsp.pki.goog/gtsr1'],
|
|
|
|
'CA Issuers - URI': ['http://pki.goog/repo/certs/gtsr1.der']
|
|
|
|
},
|
|
|
|
'modulus': 'F588DFE7628C1E37F83742907F6C87D0FB658225FDE8CB6BA4FF6DE95A23E299F61CE9920399137C090A8AFA42D65E5624AA7A33841FD1E969BBB974EC574C66689377375553FE39104DB734BB5F2577373B1794EA3CE59DD5BCC3B443EB2EA747EFB0441163D8B44185DD413048931BBFB7F6E0450221E0964217CFD92B6556340726040DA8FD7DCA2EEFEA487C374D3F009F83DFEF75842E79575CFC576E1A96FFFC8C9AA699BE25D97F962C06F7112A028080EB63183C504987E58ACA5F192B59968100A0FB51DBCA770B0BC9964FEF7049C75C6D20FD99B4B4E2CA2E77FD2DDC0BB66B130C8C192B179698B9F08BF6A027BBB6E38D518FBDAEC79BB1899D',
|
|
|
|
'bits': 2048,
|
|
|
|
'exponent': '0x10001',
|
|
|
|
'pubkey': {
|
|
|
|
'type': 'Buffer',
|
|
|
|
'data': [48, 130, 1, 34, 48, 13, 6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 1, 5, 0, 3, 130, 1, 15, 0, 48, 130, 1, 10, 2, 130, 1, 1, 0, 245, 136, 223, 231, 98, 140, 30, 55, 248, 55, 66, 144, 127, 108, 135, 208, 251, 101, 130, 37, 253, 232, 203, 107, 164, 255, 109, 233, 90, 35, 226, 153, 246, 28, 233, 146, 3, 153, 19, 124, 9, 10, 138, 250, 66, 214, 94, 86, 36, 170, 122, 51, 132, 31, 209, 233, 105, 187, 185, 116, 236, 87, 76, 102, 104, 147, 119, 55, 85, 83, 254, 57, 16, 77, 183, 52, 187, 95, 37, 119, 55, 59, 23, 148, 234, 60, 229, 157, 213, 188, 195, 180, 67, 235, 46, 167, 71, 239, 176, 68, 17, 99, 216, 180, 65, 133, 221, 65, 48, 72, 147, 27, 191, 183, 246, 224, 69, 2, 33, 224, 150, 66, 23, 207, 217, 43, 101, 86, 52, 7, 38, 4, 13, 168, 253, 125, 202, 46, 239, 234, 72, 124, 55, 77, 63, 0, 159, 131, 223, 239, 117, 132, 46, 121, 87, 92, 252, 87, 110, 26, 150, 255, 252, 140, 154, 166, 153, 190, 37, 217, 127, 150, 44, 6, 247, 17, 42, 2, 128, 128, 235, 99, 24, 60, 80, 73, 135, 229, 138, 202, 95, 25, 43, 89, 150, 129, 0, 160, 251, 81, 219, 202, 119, 11, 11, 201, 150, 79, 239, 112, 73, 199, 92, 109, 32, 253, 153, 180, 180, 226, 202, 46, 119, 253, 45, 220, 11, 182, 107, 19, 12, 140, 25, 43, 23, 150, 152, 185, 240, 139, 246, 160, 39, 187, 182, 227, 141, 81, 143, 189, 174, 199, 155, 177, 137, 157, 2, 3, 1, 0, 1]
|
|
|
|
},
|
|
|
|
'valid_from': 'Aug 13 00:00:42 2020 GMT',
|
|
|
|
'valid_to': 'Sep 30 00:00:42 2027 GMT',
|
|
|
|
'fingerprint': '1E:7E:F6:47:CB:A1:50:28:1C:60:89:72:57:10:28:78:C4:BD:8C:DC',
|
|
|
|
'fingerprint256': '23:EC:B0:3E:EC:17:33:8C:4E:33:A6:B4:8A:41:DC:3C:DA:12:28:1B:BC:3F:F8:13:C0:58:9D:6C:C2:38:75:22',
|
|
|
|
'fingerprint512': '43:7B:9E:11:1E:EB:78:01:39:69:F0:BF:AB:EE:CF:67:95:56:D3:FC:3F:6E:F9:C3:21:4F:D0:7B:58:B0:5C:78:DC:1A:9B:E9:B9:9D:21:15:68:BD:B4:4A:4A:33:59:4D:8D:23:08:B4:2A:E9:BF:23:96:82:A0:11:17:8D:FA:10',
|
|
|
|
'ext_key_usage': ['1.3.6.1.5.5.7.3.1', '1.3.6.1.5.5.7.3.2'],
|
|
|
|
'serialNumber': '0203BC53596B34C718F5015066',
|
|
|
|
'raw': {
|
|
|
|
'type': 'Buffer',
|
|
|
|
'data': [48, 130, 5, 150, 48, 130, 3, 126, 160, 3, 2, 1, 2, 2, 13, 2, 3, 188, 83, 89, 107, 52, 199, 24, 245, 1, 80, 102, 48, 13, 6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 11, 5, 0, 48, 71, 49, 11, 48, 9, 6, 3, 85, 4, 6, 19, 2, 85, 83, 49, 34, 48, 32, 6, 3, 85, 4, 10, 19, 25, 71, 111, 111, 103, 108, 101, 32, 84, 114, 117, 115, 116, 32, 83, 101, 114, 118, 105, 99, 101, 115, 32, 76, 76, 67, 49, 20, 48, 18, 6, 3, 85, 4, 3, 19, 11, 71, 84, 83, 32, 82, 111, 111, 116, 32, 82, 49, 48, 30, 23, 13, 50, 48, 48, 56, 49, 51, 48, 48, 48, 48, 52, 50, 90, 23, 13, 50, 55, 48, 57, 51, 48, 48, 48, 48, 48, 52, 50, 90, 48, 70, 49, 11, 48, 9, 6, 3, 85, 4, 6, 19, 2, 85, 83, 49, 34, 48, 32, 6, 3, 85, 4, 10, 19, 25, 71, 111, 111, 103, 108, 101, 32, 84, 114, 117, 115, 116, 32, 83, 101, 114, 118, 105, 99, 101, 115, 32, 76, 76, 67, 49, 19, 48, 17, 6, 3, 85, 4, 3, 19, 10, 71, 84, 83, 32, 67, 65, 32, 49, 67, 51, 48, 130, 1, 34, 48, 13, 6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 1, 5, 0, 3, 130, 1, 15, 0, 48, 130, 1, 10, 2, 130, 1, 1, 0, 245, 136, 223, 231, 98, 140, 30, 55, 248, 55, 66, 144, 127, 108, 135, 208, 251, 101, 130, 37, 253, 232, 203, 107, 164, 255, 109, 233, 90, 35, 226, 153, 246, 28, 233, 146, 3, 153, 19, 124, 9, 10, 138, 250, 66, 214, 94, 86, 36, 170, 122, 51, 132, 31, 209, 233, 105, 187, 185, 116, 236, 87, 76, 102, 104, 147, 119, 55, 85, 83, 254, 57, 16, 77, 183, 52, 187, 95, 37, 119, 55, 59, 23, 148, 234, 60, 229, 157, 213, 188, 195, 180, 67, 235, 46, 167, 71, 239, 176, 68, 17, 99, 216, 180, 65, 133, 221, 65, 48, 72, 147, 27, 191, 183, 246, 224, 69, 2, 33, 224, 150, 66, 23, 207, 217, 43, 101, 86, 52, 7, 38, 4, 13, 168, 253, 125, 202, 46, 239, 234, 72, 124, 55, 77, 63, 0, 159, 131, 223, 239, 117, 132, 46, 121, 87, 92, 252, 87, 110, 26, 150, 255, 252, 140, 154, 166, 153, 190, 37, 217, 127, 150, 44, 6, 247, 17, 42, 2, 128, 128, 235, 99, 24, 60, 80, 73, 135, 229, 138, 202, 95, 25, 43, 89, 150, 129, 0, 160, 251, 81, 219, 202, 119, 11, 11, 201, 150, 79, 239, 112, 73, 199, 92, 109, 32, 253, 153, 180, 180, 226, 202, 46, 119, 253, 45, 220, 11, 182, 107, 19, 12, 140, 25, 43, 23, 150, 152, 185, 240, 139, 246, 160, 39, 187, 182, 227, 141, 81, 143, 189, 174, 199, 155, 177, 137, 157, 2, 3, 1, 0, 1, 163, 130, 1, 128, 48, 130, 1, 124, 48, 14, 6, 3, 85, 29, 15, 1, 1, 255, 4, 4, 3, 2, 1, 134, 48, 29, 6, 3, 85, 29, 37, 4, 22, 48, 20, 6, 8, 43, 6, 1, 5, 5, 7, 3, 1, 6, 8, 43, 6, 1, 5, 5, 7, 3, 2, 48, 18, 6, 3, 85, 29, 19, 1, 1, 255, 4, 8, 48, 6, 1, 1, 255, 2, 1, 0, 48, 29, 6, 3, 85, 29, 14, 4, 22, 4, 20, 138, 116, 127, 175, 133, 205, 238, 149, 205, 61, 156, 208, 226, 70, 20, 243, 113, 53, 29, 39, 48, 31, 6, 3, 85, 29, 35, 4, 24, 48, 22, 128, 20, 228, 175, 43, 38, 113, 26, 43, 72, 39, 133, 47, 82, 102, 44, 239, 240, 137, 19, 113, 62, 48, 104, 6, 8, 43, 6, 1, 5, 5, 7, 1, 1, 4, 92, 48, 90, 48, 38, 6, 8, 43, 6, 1, 5, 5, 7, 48, 1, 134, 26, 104, 116, 116, 112, 58, 47, 47, 111, 99, 115, 112, 46, 112, 107, 105, 46, 103, 111, 111, 103, 47, 103, 116, 115, 114, 49, 48, 48, 6, 8, 43, 6, 1, 5, 5, 7, 48, 2, 134, 36, 104, 116, 116, 112, 58, 47, 47, 112, 107, 105, 46, 103, 111, 111, 103, 47, 114, 101, 112, 111, 47, 99, 101, 114, 116, 115, 47, 103, 116, 115, 114, 49, 46, 100, 101, 114, 48, 52, 6, 3, 85, 29, 31, 4, 45, 48, 43, 48, 41, 160, 39, 160, 37, 134, 35, 104, 116, 116, 112, 58, 47, 47, 99, 114, 108, 46, 112, 107, 105, 46, 103, 111, 111, 103, 47, 103, 116, 115, 114, 49, 47, 103, 116, 115, 114, 49, 46, 99, 114, 108, 48, 87, 6, 3, 85, 29, 32, 4, 80, 48, 78, 48, 56, 6, 10, 43, 6, 1, 4, 1, 214, 121, 2, 5, 3, 48, 42, 48, 40, 6, 8, 43, 6, 1, 5, 5, 7, 2, 1, 22, 28, 104, 116, 116, 112, 115, 58, 47, 47, 112, 107, 105, 46, 103, 111, 111, 103, 47, 114, 101, 112, 111, 115, 105, 116, 111, 114, 121, 47, 48, 8, 6, 6, 103, 129, 12, 1, 2, 1, 48, 8, 6, 6, 103, 129, 12, 1, 2, 2, 48, 13, 6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 11, 5, 0, 3, 130, 2, 1, 0, 137, 125, 172, 32, 92, 12, 60, 190, 154, 168, 87, 149, 27, 180, 174, 250, 171, 165, 114, 113, 180, 54, 149, 253, 223, 64, 17, 3, 76, 194, 70, 20, 187, 20, 36, 171, 240, 80, 113, 34, 219, 173, 196, 110, 127, 207, 24
|
|
|
|
},
|
|
|
|
'issuerCertificate': {
|
|
|
|
'subject': {
|
|
|
|
'C': 'US',
|
|
|
|
'O': 'Google Trust Services LLC',
|
|
|
|
'CN': 'GTS Root R1'
|
|
|
|
},
|
|
|
|
'issuer': {
|
|
|
|
'C': 'BE',
|
|
|
|
'O': 'GlobalSign nv-sa',
|
|
|
|
'OU': 'Root CA',
|
|
|
|
'CN': 'GlobalSign Root CA'
|
|
|
|
},
|
|
|
|
'infoAccess': {
|
|
|
|
'OCSP - URI': ['http://ocsp.pki.goog/gsr1'],
|
|
|
|
'CA Issuers - URI': ['http://pki.goog/gsr1/gsr1.crt']
|
|
|
|
},
|
|
|
|
'modulus': 'B611028B1EE3A1779B3BDCBF943EB795A7403CA1FD82F97D32068271F6F68C7FFBE8DBBC6A2E9797A38C4BF92BF6B1F9CE841DB1F9C597DEEFB9F2A3E9BC12895EA7AA52ABF82327CBA4B19C63DBD7997EF00A5EEB68A6F4C65A470D4D1033E34EB113A3C8186C4BECFC0990DF9D6429252307A1B4D23D2E60E0CFD20987BBCD48F04DC2C27A888ABBBACF5919D6AF8FB007B09E31F182C1C0DF2EA66D6C190EB5D87E261A45033DB079A49428AD0F7F26E5A808FE96E83C689453EE833A882B159609B2E07A8C2E75D69CEBA756648F964F68AE3D97C2848FC0BC40C00B5CBDF687B3356CAC18507F84E04CCD92D320E933BC5299AF32B529B3252AB448F972E1CA64F7E682108DE89DC28A88FA38668AFC63F901F978FD7B5C77FA7687FAECDFB10E799557B4BD26EFD601D1EB160ABB8E0BB5C5C58A55ABD3ACEA914B29CC19A432254E2AF16544D002CEAACE49B4EA9F7C83B0407BE743ABA76CA38F7D8981FA4CA5FFD58EC3CE4BE0B5D8B38E45CF76C0ED402BFD530FB0A7D53B0DB18AA203DE31ADCC77EA6F7B3ED6DF912212E6BEFAD832FC1063145172DE5DD61693BD296833EF3A66EC078A26DF13D757657827DE5E491400A2007F9AA821B6A9B195B0A5B90D1611DAC76C483C40E07E0D5ACD563CD19705B9CB4BED394B9CC43FD255136E24B0D671FAF4C1BACCED1BF5FE8141D800983D3AC8AE7A9837180595',
|
|
|
|
'bits': 4096,
|
|
|
|
'exponent': '0x10001',
|
|
|
|
'pubkey': {
|
|
|
|
'type': 'Buffer',
|
|
|
|
'data': [48, 130, 2, 34, 48, 13, 6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 1, 5, 0, 3, 130, 2, 15, 0, 48, 130, 2, 10, 2, 130, 2, 1, 0, 182, 17, 2, 139, 30, 227, 161, 119, 155, 59, 220, 191, 148, 62, 183, 149, 167, 64, 60, 161, 253, 130, 249, 125, 50, 6, 130, 113, 246, 246, 140, 127, 251, 232, 219, 188, 106, 46, 151, 151, 163, 140, 75, 249, 43, 246, 177, 249, 206, 132, 29, 177, 249, 197, 151, 222, 239, 185, 242, 163, 233, 188, 18, 137, 94, 167, 170, 82, 171, 248, 35, 39, 203, 164, 177, 156, 99, 219, 215, 153, 126, 240, 10, 94, 235, 104, 166, 244, 198, 90, 71, 13, 77, 16, 51, 227, 78, 177, 19, 163, 200, 24, 108, 75, 236, 252, 9, 144, 223, 157, 100, 41, 37, 35, 7, 161, 180, 210, 61, 46, 96, 224, 207, 210, 9, 135, 187, 205, 72, 240, 77, 194, 194, 122, 136, 138, 187, 186, 207, 89, 25, 214, 175, 143, 176, 7, 176, 158, 49, 241, 130, 193, 192, 223, 46, 166, 109, 108, 25, 14, 181, 216, 126, 38, 26, 69, 3, 61, 176, 121, 164, 148, 40, 173, 15, 127, 38, 229, 168, 8, 254, 150, 232, 60, 104, 148, 83, 238, 131, 58, 136, 43, 21, 150, 9, 178, 224, 122, 140, 46, 117, 214, 156, 235, 167, 86, 100, 143, 150, 79, 104, 174, 61, 151, 194, 132, 143, 192, 188, 64, 192, 11, 92, 189, 246, 135, 179, 53, 108, 172, 24, 80, 127, 132, 224, 76, 205, 146, 211, 32, 233, 51, 188, 82, 153, 175, 50, 181, 41, 179, 37, 42, 180, 72, 249, 114, 225, 202, 100, 247, 230, 130, 16, 141, 232, 157, 194, 138, 136, 250, 56, 102, 138, 252, 99, 249, 1, 249, 120, 253, 123, 92, 119, 250, 118, 135, 250, 236, 223, 177, 14, 121, 149, 87, 180, 189, 38, 239, 214, 1, 209, 235, 22, 10, 187, 142, 11, 181, 197, 197, 138, 85, 171, 211, 172, 234, 145, 75, 41, 204, 25, 164, 50, 37, 78, 42, 241, 101, 68, 208, 2, 206, 170, 206, 73, 180, 234, 159, 124, 131, 176, 64, 123, 231, 67, 171, 167, 108, 163, 143, 125, 137, 129, 250, 76, 165, 255, 213, 142, 195, 206, 75, 224, 181, 216, 179, 142, 69, 207, 118, 192, 237, 64, 43, 253, 83, 15, 176, 167, 213, 59, 13, 177, 138, 162, 3, 222, 49, 173, 204, 119, 234, 111, 123, 62, 214, 223, 145, 34, 18, 230, 190, 250, 216, 50, 252, 16, 99, 20, 81, 114, 222, 93, 214, 22, 147, 189, 41, 104, 51, 239, 58, 102, 236, 7, 138, 38, 223, 19, 215, 87, 101, 120, 39, 222, 94, 73, 20, 0, 162, 0, 127, 154, 168, 33, 182, 169, 177, 149, 176, 165, 185, 13, 22, 17, 218, 199, 108, 72, 60, 64, 224, 126, 13, 90, 205, 86, 60, 209, 151, 5, 185, 203, 75, 237, 57, 75, 156, 196, 63, 210, 85, 19, 110, 36, 176, 214, 113, 250, 244, 193, 186, 204, 237, 27, 245, 254, 129, 65, 216, 0, 152, 61, 58, 200, 174, 122, 152, 55, 24, 5, 149, 2, 3, 1, 0, 1]
|
|
|
|
},
|
|
|
|
'valid_from': 'Jun 19 00:00:42 2020 GMT',
|
|
|
|
'valid_to': 'Jan 28 00:00:42 2028 GMT',
|
|
|
|
'fingerprint': '08:74:54:87:E8:91:C1:9E:30:78:C1:F2:A0:7E:45:29:50:EF:36:F6',
|
|
|
|
'fingerprint256': '3E:E0:27:8D:F7:1F:A3:C1:25:C4:CD:48:7F:01:D7:74:69:4E:6F:C5:7E:0C:D9:4C:24:EF:D7:69:13:39:18:E5',
|
|
|
|
'fingerprint512': '7C:88:3C:25:8B:8D:E7:34:81:D6:61:21:DF:53:D0:99:7A:7C:3B:06:E0:E7:09:68:8F:FB:1E:FD:18:B3:6C:B5:43:5F:41:52:8C:7E:64:D6:D8:88:B2:27:28:17:AE:D1:0C:4A:44:22:0E:01:F3:84:50:2F:04:95:E7:85:13:05',
|
|
|
|
'serialNumber': '77BD0D6CDB36F91AEA210FC4F058D30D',
|
|
|
|
'raw': {
|
|
|
|
'type': 'Buffer',
|
|
|
|
'data': [48, 130, 5, 98, 48, 130, 4, 74, 160, 3, 2, 1, 2, 2, 16, 119, 189, 13, 108, 219, 54, 249, 26, 234, 33, 15, 196, 240, 88, 211, 13, 48, 13, 6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 11, 5, 0, 48, 87, 49, 11, 48, 9, 6, 3, 85, 4, 6, 19, 2, 66, 69, 49, 25, 48, 23, 6, 3, 85, 4, 10, 19, 16, 71, 108, 111, 98, 97, 108, 83, 105, 103, 110, 32, 110, 118, 45, 115, 97, 49, 16, 48, 14, 6, 3, 85, 4, 11, 19, 7, 82, 111, 111, 116, 32, 67, 65, 49, 27, 48, 25, 6, 3, 85, 4, 3, 19, 18, 71, 108, 111, 98, 97, 108, 83, 105, 103, 110, 32, 82, 111, 111, 116, 32, 67, 65, 48, 30, 23, 13, 50, 48, 48, 54, 49, 57, 48, 48, 48, 48, 52, 50, 90, 23, 13, 50, 56, 48, 49, 50, 56, 48, 48, 48, 48, 52, 50, 90, 48, 71, 49, 11, 48, 9, 6, 3, 85, 4, 6, 19, 2, 85, 83, 49, 34, 48, 32, 6, 3, 85, 4, 10, 19, 25, 71, 111, 111, 103, 108, 101, 32, 84, 114, 117, 115, 116, 32, 83, 101, 114, 118, 105, 99, 101, 115, 32, 76, 76, 67, 49, 20, 48, 18, 6, 3, 85, 4, 3, 19, 11, 71, 84, 83, 32, 82, 111, 111, 116, 32, 82, 49, 48, 130, 2, 34, 48, 13, 6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 1, 5, 0, 3, 130, 2, 15, 0, 48, 130, 2, 10, 2, 130, 2, 1, 0, 182, 17, 2, 139, 30, 227, 161, 119, 155, 59, 220, 191, 148, 62, 183, 149, 167, 64, 60, 161, 253, 130, 249, 125, 50, 6, 130, 113, 246, 246, 140, 127, 251, 232, 219, 188, 106, 46, 151, 151, 163, 140, 75, 249, 43, 246, 177, 249, 206, 132, 29, 177, 249, 197, 151, 222, 239, 185, 242, 163, 233, 188, 18, 137, 94, 167, 170, 82, 171, 248, 35, 39, 203, 164, 177, 156, 99, 219, 215, 153, 126, 240, 10, 94, 235, 104, 166, 244, 198, 90, 71, 13, 77, 16, 51, 227, 78, 177, 19, 163, 200, 24, 108, 75, 236, 252, 9, 144, 223, 157, 100, 41, 37, 35, 7, 161, 180, 210, 61, 46, 96, 224, 207, 210, 9, 135, 187, 205, 72, 240, 77, 194, 194, 122, 136, 138, 187, 186, 207, 89, 25, 214, 175, 143, 176, 7, 176, 158, 49, 241, 130, 193, 192, 223, 46, 166, 109, 108, 25, 14, 181, 216, 126, 38, 26, 69, 3, 61, 176, 121, 164, 148, 40, 173, 15, 127, 38, 229, 168, 8, 254, 150, 232, 60, 104, 148, 83, 238, 131, 58, 136, 43, 21, 150, 9, 178, 224, 122, 140, 46, 117, 214, 156, 235, 167, 86, 100, 143, 150, 79, 104, 174, 61, 151, 194, 132, 143, 192, 188, 64, 192, 11, 92, 189, 246, 135, 179, 53, 108, 172, 24, 80, 127, 132, 224, 76, 205, 146, 211, 32, 233, 51, 188, 82, 153, 175, 50, 181, 41, 179, 37, 42, 180, 72, 249, 114, 225, 202, 100, 247, 230, 130, 16, 141, 232, 157, 194, 138, 136, 250, 56, 102, 138, 252, 99, 249, 1, 249, 120, 253, 123, 92, 119, 250, 118, 135, 250, 236, 223, 177, 14, 121, 149, 87, 180, 189, 38, 239, 214, 1, 209, 235, 22, 10, 187, 142, 11, 181, 197, 197, 138, 85, 171, 211, 172, 234, 145, 75, 41, 204, 25, 164, 50, 37, 78, 42, 241, 101, 68, 208, 2, 206, 170, 206, 73, 180, 234, 159, 124, 131, 176, 64, 123, 231, 67, 171, 167, 108, 163, 143, 125, 137, 129, 250, 76, 165, 255, 213, 142, 195, 206, 75, 224, 181, 216, 179, 142, 69, 207, 118, 192, 237, 64, 43, 253, 83, 15, 176, 167, 213, 59, 13, 177, 138, 162, 3, 222, 49, 173, 204, 119, 234, 111, 123, 62, 214, 223, 145, 34, 18, 230, 190, 250, 216, 50, 252, 16, 99, 20, 81, 114, 222, 93, 214, 22, 147, 189, 41, 104, 51, 239, 58, 102, 236, 7, 138, 38, 223, 19, 215, 87, 101, 120, 39, 222, 94, 73, 20, 0, 162, 0, 127, 154, 168, 33, 182, 169, 177, 149, 176, 165, 185, 13, 22, 17, 218, 199, 108, 72, 60, 64, 224, 126, 13, 90, 205, 86, 60, 209, 151, 5, 185, 203, 75, 237, 57, 75, 156, 196, 63, 210, 85, 19, 110, 36, 176, 214, 113, 250, 244, 193, 186, 204, 237, 27, 245, 254, 129, 65, 216, 0, 152, 61, 58, 200, 174, 122, 152, 55, 24, 5, 149, 2, 3, 1, 0, 1, 163, 130, 1, 56, 48, 130, 1, 52, 48, 14, 6, 3, 85, 29, 15, 1, 1, 255, 4, 4, 3, 2, 1, 134, 48, 15, 6, 3, 85, 29, 19, 1, 1, 255, 4, 5, 48, 3, 1, 1, 255, 48, 29, 6, 3, 85, 29, 14, 4, 22, 4, 20, 228, 175, 43, 38, 113, 26, 43, 72, 39, 133, 47, 82, 102, 44, 239, 240, 137, 19, 113, 62, 48, 31, 6, 3, 85, 29, 35, 4, 24, 48, 22, 128, 20, 96, 123, 102, 26, 69, 13, 151, 202, 137, 80, 47, 125, 4, 205, 52, 168, 255, 252, 253, 75, 48, 96, 6, 8, 43, 6, 1, 5, 5, 7, 1, 1, 4, 84, 48, 82, 48, 37, 6, 8, 43, 6, 1, 5, 5, 7, 48, 1, 134, 25, 104, 116, 116, 112, 58, 47, 47, 111, 99, 115,
|
|
|
|
},
|
|
|
|
'issuerCertificate': {
|
|
|
|
'subject': {
|
|
|
|
'C': 'BE',
|
|
|
|
'O': 'GlobalSign nv-sa',
|
|
|
|
'OU': 'Root CA',
|
|
|
|
'CN': 'GlobalSign Root CA'
|
|
|
|
},
|
|
|
|
'issuer': {
|
|
|
|
'C': 'BE',
|
|
|
|
'O': 'GlobalSign nv-sa',
|
|
|
|
'OU': 'Root CA',
|
|
|
|
'CN': 'GlobalSign Root CA'
|
|
|
|
},
|
|
|
|
'modulus': 'DA0EE6998DCEA3E34F8A7EFBF18B83256BEA481FF12AB0B9951104BDF063D1E26766CF1CDDCF1B482BEE8D898E9AAF298065ABE9C72D12CBAB1C4C7007A13D0A30CD158D4FF8DDD48C50151CEF50EEC42EF7FCE952F2917DE06DD535308E5E4373F241E9D56AE3B2893A5639386F063C88695B2A4DC5A754B86C89CC9BF93CCAE5FD89F5123C927896D6DC746E934461D18DC746B2750E86E8198AD56D6CD5781695A2E9C80A38EBF224134F73549313853A1BBC1E34B58B058CB9778BB1DB1F2091AB09536E90CE7B3774B97047912251631679AEB1AE412608C8192BD146AA48D6642AD78334FF2C2AC16C19434A0785E7D37CF62168EFEAF2529F7F9390CF',
|
|
|
|
'bits': 2048,
|
|
|
|
'exponent': '0x10001',
|
|
|
|
'pubkey': {
|
|
|
|
'type': 'Buffer',
|
|
|
|
'data': [48, 130, 1, 34, 48, 13, 6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 1, 5, 0, 3, 130, 1, 15, 0, 48, 130, 1, 10, 2, 130, 1, 1, 0, 218, 14, 230, 153, 141, 206, 163, 227, 79, 138, 126, 251, 241, 139, 131, 37, 107, 234, 72, 31, 241, 42, 176, 185, 149, 17, 4, 189, 240, 99, 209, 226, 103, 102, 207, 28, 221, 207, 27, 72, 43, 238, 141, 137, 142, 154, 175, 41, 128, 101, 171, 233, 199, 45, 18, 203, 171, 28, 76, 112, 7, 161, 61, 10, 48, 205, 21, 141, 79, 248, 221, 212, 140, 80, 21, 28, 239, 80, 238, 196, 46, 247, 252, 233, 82, 242, 145, 125, 224, 109, 213, 53, 48, 142, 94, 67, 115, 242, 65, 233, 213, 106, 227, 178, 137, 58, 86, 57, 56, 111, 6, 60, 136, 105, 91, 42, 77, 197, 167, 84, 184, 108, 137, 204, 155, 249, 60, 202, 229, 253, 137, 245, 18, 60, 146, 120, 150, 214, 220, 116, 110, 147, 68, 97, 209, 141, 199, 70, 178, 117, 14, 134, 232, 25, 138, 213, 109, 108, 213, 120, 22, 149, 162, 233, 200, 10, 56, 235, 242, 36, 19, 79, 115, 84, 147, 19, 133, 58, 27, 188, 30, 52, 181, 139, 5, 140, 185, 119, 139, 177, 219, 31, 32, 145, 171, 9, 83, 110, 144, 206, 123, 55, 116, 185, 112, 71, 145, 34, 81, 99, 22, 121, 174, 177, 174, 65, 38, 8, 200, 25, 43, 209, 70, 170, 72, 214, 100, 42, 215, 131, 52, 255, 44, 42, 193, 108, 25, 67, 74, 7, 133, 231, 211, 124, 246, 33, 104, 239, 234, 242, 82, 159, 127, 147, 144, 207, 2, 3, 1, 0, 1]
|
|
|
|
},
|
|
|
|
'valid_from': 'Sep 1 12:00:00 1998 GMT',
|
|
|
|
'valid_to': 'Jan 28 12:00:00 2028 GMT',
|
|
|
|
'fingerprint': 'B1:BC:96:8B:D4:F4:9D:62:2A:A8:9A:81:F2:15:01:52:A4:1D:82:9C',
|
|
|
|
'fingerprint256': 'EB:D4:10:40:E4:BB:3E:C7:42:C9:E3:81:D3:1E:F2:A4:1A:48:B6:68:5C:96:E7:CE:F3:C1:DF:6C:D4:33:1C:99',
|
|
|
|
'fingerprint512': '54:BA:00:4D:54:35:E8:B1:05:31:43:1C:39:2E:D9:97:76:12:0D:36:38:08:13:7D:E7:EB:59:03:04:63:F8:63:CA:DD:02:BD:F9:18:F5:96:B6:D2:09:64:B3:17:25:C2:36:3C:D7:60:17:99:CA:A9:36:0A:1C:36:FE:81:9F:BD',
|
|
|
|
'serialNumber': '040000000001154B5AC394',
|
|
|
|
'raw': {
|
|
|
|
'type': 'Buffer',
|
|
|
|
'data': [48, 130, 3, 117, 48, 130, 2, 93, 160, 3, 2, 1, 2, 2, 11, 4, 0, 0, 0, 0, 1, 21, 75, 90, 195, 148, 48, 13, 6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 5, 5, 0, 48, 87, 49, 11, 48, 9, 6, 3, 85, 4, 6, 19, 2, 66, 69, 49, 25, 48, 23, 6, 3, 85, 4, 10, 19, 16, 71, 108, 111, 98, 97, 108, 83, 105, 103, 110, 32, 110, 118, 45, 115, 97, 49, 16, 48, 14, 6, 3, 85, 4, 11, 19, 7, 82, 111, 111, 116, 32, 67, 65, 49, 27, 48, 25, 6, 3, 85, 4, 3, 19, 18, 71, 108, 111, 98, 97, 108, 83, 105, 103, 110, 32, 82, 111, 111, 116, 32, 67, 65, 48, 30, 23, 13, 57, 56, 48, 57, 48, 49, 49, 50, 48, 48, 48, 48, 90, 23, 13, 50, 56, 48, 49, 50, 56, 49, 50, 48, 48, 48, 48, 90, 48, 87, 49, 11, 48, 9, 6, 3, 85, 4, 6, 19, 2, 66, 69, 49, 25, 48, 23, 6, 3, 85, 4, 10, 19, 16, 71, 108, 111, 98, 97, 108, 83, 105, 103, 110, 32, 110, 118, 45, 115, 97, 49, 16, 48, 14, 6, 3, 85, 4, 11, 19, 7, 82, 111, 111, 116, 32, 67, 65, 49, 27, 48, 25, 6, 3, 85, 4, 3, 19, 18, 71, 108, 111, 98, 97, 108, 83, 105, 103, 110, 32, 82, 111, 111, 116, 32, 67, 65, 48, 130, 1, 34, 48, 13, 6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 1, 5, 0, 3, 130, 1, 15, 0, 48, 130, 1, 10, 2, 130, 1, 1, 0, 218, 14, 230, 153, 141, 206, 163, 227, 79, 138, 126, 251, 241, 139, 131, 37, 107, 234, 72, 31, 241, 42, 176, 185, 149, 17, 4, 189, 240, 99, 209, 226, 103, 102, 207, 28, 221, 207, 27, 72, 43, 238, 141, 137, 142, 154, 175, 41, 128, 101, 171, 233, 199, 45, 18, 203, 171, 28, 76, 112, 7, 161, 61, 10, 48, 205, 21, 141, 79, 248, 221, 212, 140, 80, 21, 28, 239, 80, 238, 196, 46, 247, 252, 233, 82, 242, 145, 125, 224, 109, 213, 53, 48, 142, 94, 67, 115, 242, 65, 233, 213, 106, 227, 178, 137, 58, 86, 57, 56, 111, 6, 60, 136, 105, 91, 42, 77, 197, 167, 84, 184, 108, 137, 204, 155, 249, 60, 202, 229, 253, 137, 245, 18, 60, 146, 120, 150, 214, 220, 116, 110, 147, 68, 97, 209, 141, 199, 70, 178, 117, 14, 134, 232, 25, 138, 213, 109, 108, 213, 120, 22, 149, 162, 233, 200, 10, 56, 235, 242, 36, 19, 79, 115, 84, 147, 19, 133, 58, 27, 188, 30, 52, 181, 139, 5, 140, 185, 119, 139, 177, 219, 31, 32, 145, 171, 9, 83, 110, 144, 206, 123, 55, 116, 185, 112, 71, 145, 34, 81, 99, 22, 121, 174, 177, 174, 65, 38, 8, 200, 25, 43, 209, 70, 170, 72, 214, 100, 42, 215, 131, 52, 255, 44, 42, 193, 108, 25, 67, 74, 7, 133, 231, 211, 124, 246, 33, 104, 239, 234, 242, 82, 159, 127, 147, 144, 207, 2, 3, 1, 0, 1, 163, 66, 48, 64, 48, 14, 6, 3, 85, 29, 15, 1, 1, 255, 4, 4, 3, 2, 1, 6, 48, 15, 6, 3, 85, 29, 19, 1, 1, 255, 4, 5, 48, 3, 1, 1, 255, 48, 29, 6, 3, 85, 29, 14, 4, 22, 4, 20, 96, 123, 102, 26, 69, 13, 151, 202, 137, 80, 47, 125, 4, 205, 52, 168, 255, 252, 253, 75, 48, 13, 6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 5, 5, 0, 3, 130, 1, 1, 0, 214, 115, 231, 124, 79, 118, 208, 141, 191, 236, 186, 162, 190, 52, 197, 40, 50, 181, 124, 252, 108, 156, 44, 43, 189, 9, 158, 83, 191, 107, 94, 170, 17, 72, 182, 229, 8, 163, 179, 202, 61, 97, 77, 211, 70, 9, 179, 62, 195, 160, 227, 99, 85, 27, 242, 186, 239, 173, 57, 225, 67, 185, 56, 163, 230, 47, 138, 38, 59, 239, 160, 80, 86, 249, 198, 10, 253, 56, 205, 196, 11, 112, 81, 148, 151, 152, 4, 223, 195, 95, 148, 213, 21, 201, 20, 65, 156, 196, 93, 117, 100, 21, 13, 255, 85, 48, 236, 134, 143, 255, 13, 239, 44, 185, 99, 70, 246, 170, 252, 223, 188, 105, 253, 46, 18, 72, 100, 154, 224, 149, 240, 166, 239, 41, 143, 1, 177, 21, 181, 12, 29, 165, 254, 105, 44, 105, 36, 120, 30, 179, 167, 28, 113, 98, 238, 202, 200, 151, 172, 23, 93, 138, 194, 248, 71, 134, 110, 42, 196, 86, 49, 149, 208, 103, 137, 133, 43, 249, 108, 166, 93, 70, 157, 12, 170, 130, 228, 153, 81, 221, 112, 183, 219, 86, 61, 97, 228, 106, 225, 92, 214, 246, 254, 61, 222, 65, 204, 7, 174, 99, 82, 191, 83, 83, 244, 43, 233, 199, 253, 182, 247, 130, 95, 133, 210, 65, 24, 219, 129, 179, 4, 28, 197, 31, 164, 128, 111, 21, 32, 201, 222, 12, 136, 10, 29, 214, 102, 85, 226, 252, 72, 201, 41, 38, 105, 224]
|
|
|
|
},
|
|
|
|
'issuerCertificate': None,
|
|
|
|
'validTo': '2028-01-28T12:00:00.000Z',
|
|
|
|
'daysRemaining': 1733
|
|
|
|
},
|
|
|
|
'validTo': '2028-01-28T00:00:42.000Z',
|
|
|
|
'daysRemaining': 1732
|
|
|
|
},
|
|
|
|
'validTo': '2027-09-30T00:00:42.000Z',
|
|
|
|
'daysRemaining': 1612
|
|
|
|
},
|
|
|
|
'validTo': '2023-06-26T08:24:22.000Z',
|
|
|
|
'validFor': ['www.google.de'],
|
|
|
|
'daysRemaining': 56
|
|
|
|
}
|
2022-12-16 21:39:18 +01:00
|
|
|
}
|
2023-05-19 14:07:34 +02:00
|
|
|
}
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
return self._get_event_data(Event.CERT_INFO)
|
2022-08-05 15:48:02 +02:00
|
|
|
|
2022-07-02 16:00:54 +02:00
|
|
|
# uptime
|
|
|
|
|
2023-05-19 14:07:34 +02:00
|
|
|
def uptime(self) -> dict:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get monitor uptime.
|
|
|
|
|
|
|
|
:return: Monitor uptime.
|
2023-05-19 14:07:34 +02:00
|
|
|
:rtype: dict
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.uptime()
|
2023-05-19 14:07:34 +02:00
|
|
|
{
|
|
|
|
1: {
|
|
|
|
24: 1,
|
|
|
|
720: 1
|
2022-12-16 21:39:18 +01:00
|
|
|
}
|
2023-05-19 14:07:34 +02:00
|
|
|
}
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
return self._get_event_data(Event.UPTIME)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# info
|
|
|
|
|
2022-09-07 13:03:10 +02:00
|
|
|
def info(self) -> dict:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get server info.
|
|
|
|
|
|
|
|
:return: Server info.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.info()
|
|
|
|
{
|
2023-08-29 18:37:27 +02:00
|
|
|
'isContainer': True,
|
|
|
|
'latestVersion': '1.23.1',
|
|
|
|
'primaryBaseURL': '',
|
2022-12-29 00:22:53 +01:00
|
|
|
'serverTimezone': 'Europe/Berlin',
|
2023-08-29 18:37:27 +02:00
|
|
|
'serverTimezoneOffset': '+02:00',
|
|
|
|
'version': '1.23.1'
|
2022-12-16 21:39:18 +01:00
|
|
|
}
|
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
r = self._get_event_data(Event.INFO)
|
2022-07-07 13:29:06 +02:00
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# clear
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def clear_events(self, monitor_id: int) -> dict:
|
|
|
|
"""
|
|
|
|
Clear monitor events.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int monitor_id: Id of the monitor to clear events.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.clear_events(1)
|
|
|
|
{}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('clearEvents', monitor_id)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def clear_heartbeats(self, monitor_id: int) -> dict:
|
|
|
|
"""
|
|
|
|
Clear monitor heartbeats.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int monitor_id: Id of the monitor to clear heartbeats.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.clear_heartbeats(1)
|
|
|
|
{}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('clearHeartbeats', monitor_id)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def clear_statistics(self) -> dict:
|
|
|
|
"""
|
|
|
|
Clear statistics.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.clear_statistics()
|
|
|
|
{}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('clearStatistics')
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# tags
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_tags(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get all tags.
|
|
|
|
|
|
|
|
:return: All tags.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_tags()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'color': '#ffffff',
|
|
|
|
'id': 1,
|
|
|
|
'name': 'tag 1'
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('getTags')["tags"]
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_tag(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Get a tag.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the monitor to get.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The tag.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the tag does not exist.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_tag(1)
|
|
|
|
{
|
|
|
|
'color': '#ffffff',
|
|
|
|
'id': 1,
|
|
|
|
'name': 'tag 1'
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2022-07-05 22:12:37 +02:00
|
|
|
tags = self.get_tags()
|
|
|
|
for tag in tags:
|
|
|
|
if tag["id"] == id_:
|
|
|
|
return tag
|
2022-07-07 13:29:06 +02:00
|
|
|
raise UptimeKumaException("tag does not exist")
|
2022-07-05 22:12:37 +02:00
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
@append_docstring(tag_docstring("add"))
|
|
|
|
def add_tag(self, **kwargs) -> dict:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
2023-02-13 22:51:21 +01:00
|
|
|
Add a tag.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
>>> api.add_tag(
|
|
|
|
... name="tag 1",
|
|
|
|
... color="#ffffff"
|
|
|
|
... )
|
2022-12-16 21:39:18 +01:00
|
|
|
{
|
2023-02-13 22:51:21 +01:00
|
|
|
'color': '#ffffff',
|
|
|
|
'id': 1,
|
|
|
|
'name': 'tag 1'
|
2022-12-16 21:39:18 +01:00
|
|
|
}
|
|
|
|
"""
|
2023-02-13 22:51:21 +01:00
|
|
|
data = _build_tag_data(**kwargs)
|
|
|
|
_check_arguments_tag(data)
|
|
|
|
return self._call('addTag', data)["tag"]
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
@append_docstring(tag_docstring("edit"))
|
|
|
|
def edit_tag(self, id_: int, **kwargs) -> dict:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
2023-02-13 22:51:21 +01:00
|
|
|
Edits an existing tag.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
:param int id_: Id of the tag to edit.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
>>> api.edit_tag(1,
|
|
|
|
... name="tag 1 new",
|
|
|
|
... color="#000000"
|
2022-12-16 21:39:18 +01:00
|
|
|
... )
|
|
|
|
{
|
2023-02-13 22:51:21 +01:00
|
|
|
'msg': 'Saved',
|
|
|
|
'tag': {
|
|
|
|
'id': 1,
|
|
|
|
'name': 'tag 1 new',
|
|
|
|
'color': '#000000'
|
|
|
|
}
|
2022-12-16 21:39:18 +01:00
|
|
|
}
|
|
|
|
"""
|
2023-02-13 22:51:21 +01:00
|
|
|
data = self.get_tag(id_)
|
|
|
|
data.update(kwargs)
|
|
|
|
_check_arguments_tag(data)
|
|
|
|
return self._call('editTag', data)
|
|
|
|
|
|
|
|
def delete_tag(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Delete a tag.
|
|
|
|
|
|
|
|
:param int id_: Id of the monitor to delete.
|
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_tag(1)
|
|
|
|
{
|
|
|
|
'msg': 'Deleted Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2023-05-20 14:09:09 +02:00
|
|
|
if id_ not in [i["id"] for i in self.get_tags()]:
|
|
|
|
raise UptimeKumaException("tag does not exist")
|
2023-02-13 22:51:21 +01:00
|
|
|
return self._call('deleteTag', id_)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# settings
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_settings(self) -> dict:
|
|
|
|
"""
|
|
|
|
Get settings.
|
|
|
|
|
|
|
|
:return: Settings.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_settings()
|
|
|
|
{
|
|
|
|
'checkBeta': False,
|
2022-12-29 00:22:53 +01:00
|
|
|
'checkUpdate': False,
|
2023-08-29 18:37:27 +02:00
|
|
|
'chromeExecutable': '',
|
2022-12-29 00:22:53 +01:00
|
|
|
'disableAuth': False,
|
|
|
|
'dnsCache': True,
|
2022-12-16 21:39:18 +01:00
|
|
|
'entryPage': 'dashboard',
|
2022-12-29 00:22:53 +01:00
|
|
|
'keepDataPeriodDays': 180,
|
2023-08-29 18:37:27 +02:00
|
|
|
'nscd': False,
|
2022-12-16 21:39:18 +01:00
|
|
|
'primaryBaseURL': '',
|
2022-12-29 00:22:53 +01:00
|
|
|
'searchEngineIndex': False,
|
|
|
|
'serverTimezone': 'Europe/Berlin',
|
2022-12-16 21:39:18 +01:00
|
|
|
'steamAPIKey': '',
|
|
|
|
'tlsExpiryNotifyDays': [
|
|
|
|
7,
|
|
|
|
14,
|
|
|
|
21
|
|
|
|
],
|
|
|
|
'trustProxy': False
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
r = self._call('getSettings')["data"]
|
|
|
|
return r
|
|
|
|
|
|
|
|
def set_settings(
|
|
|
|
self,
|
2022-09-23 17:19:00 +02:00
|
|
|
password: str = None, # only required if disableAuth is true
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-07-07 13:29:06 +02:00
|
|
|
# about
|
2022-08-03 11:56:02 +02:00
|
|
|
checkUpdate: bool = True,
|
|
|
|
checkBeta: bool = False,
|
2022-07-07 13:29:06 +02:00
|
|
|
|
|
|
|
# monitor history
|
2022-08-03 11:56:02 +02:00
|
|
|
keepDataPeriodDays: int = 180,
|
2022-07-07 13:29:06 +02:00
|
|
|
|
|
|
|
# general
|
2022-12-29 00:22:53 +01:00
|
|
|
serverTimezone: str = "",
|
2022-08-03 11:56:02 +02:00
|
|
|
entryPage: str = "dashboard",
|
|
|
|
searchEngineIndex: bool = False,
|
|
|
|
primaryBaseURL: str = "",
|
|
|
|
steamAPIKey: str = "",
|
2023-08-29 18:37:27 +02:00
|
|
|
nscd: bool = False,
|
2022-12-29 00:22:53 +01:00
|
|
|
dnsCache: bool = False,
|
2023-08-29 18:37:27 +02:00
|
|
|
chromeExecutable: str = "",
|
2022-07-07 13:29:06 +02:00
|
|
|
|
|
|
|
# notifications
|
2022-08-03 11:56:02 +02:00
|
|
|
tlsExpiryNotifyDays: list = None,
|
2022-07-07 13:29:06 +02:00
|
|
|
|
|
|
|
# security
|
2022-09-07 13:03:10 +02:00
|
|
|
disableAuth: bool = False,
|
|
|
|
|
|
|
|
# reverse proxy
|
|
|
|
trustProxy: bool = False
|
2022-12-16 21:39:18 +01:00
|
|
|
) -> dict:
|
|
|
|
"""
|
|
|
|
Set settings.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str, optional password: Password, defaults to None
|
|
|
|
:param bool, optional checkUpdate: Show update if available, defaults to True
|
|
|
|
:param bool, optional checkBeta: Also check beta release, defaults to False
|
2023-01-17 21:01:55 +01:00
|
|
|
:param int, optional keepDataPeriodDays: Keep monitor history data for X days. Set to 0 for infinite retention., defaults to 180
|
2022-12-29 00:22:53 +01:00
|
|
|
:param str, optional serverTimezone: Server Timezone, defaults to ""
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str, optional entryPage: Entry Page, defaults to "dashboard"
|
|
|
|
:param bool, optional searchEngineIndex: Search Engine Visibility, defaults to False
|
|
|
|
:param str, optional primaryBaseURL: Primary Base URL, defaults to ""
|
|
|
|
:param str, optional steamAPIKey: Steam API Key. For monitoring a Steam Game Server you need a Steam Web-API key., defaults to ""
|
2023-08-29 18:37:27 +02:00
|
|
|
:param bool, optional nscd: Enable NSCD (Name Service Cache Daemon) for caching all DNS requests, defaults to False
|
2022-12-29 00:22:53 +01:00
|
|
|
:param bool, optional dnsCache: True to enable DNS Cache. It may be not working in some IPv6 environments, disable it if you encounter any issues., defaults to False
|
2023-08-29 18:37:27 +02:00
|
|
|
:param str, optional chromeExecutable: Chrome/Chromium Executable, defaults to ""
|
2022-12-17 15:09:27 +01:00
|
|
|
:param list, optional tlsExpiryNotifyDays: TLS Certificate Expiry. HTTPS Monitors trigger notification when TLS certificate expires in., defaults to None
|
|
|
|
:param bool, optional disableAuth: Disable Authentication, defaults to False
|
|
|
|
:param bool, optional trustProxy: Trust Proxy. Trust 'X-Forwarded-\*' headers. If you want to get the correct client IP and your Uptime Kuma is behind such as Nginx or Apache, you should enable this., defaults to False
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.set_settings(
|
|
|
|
... checkUpdate=False,
|
|
|
|
... checkBeta=False,
|
|
|
|
... keepDataPeriodDays=180,
|
2022-12-29 00:22:53 +01:00
|
|
|
... serverTimezone="Europe/Berlin",
|
2022-12-16 21:39:18 +01:00
|
|
|
... entryPage="dashboard",
|
|
|
|
... searchEngineIndex=False,
|
|
|
|
... primaryBaseURL="",
|
|
|
|
... steamAPIKey="",
|
2022-12-29 00:22:53 +01:00
|
|
|
... dnsCache=False,
|
|
|
|
... tlsExpiryNotifyDays=[
|
|
|
|
... 7,
|
|
|
|
... 14,
|
|
|
|
... 21
|
|
|
|
... ],
|
2022-12-16 21:39:18 +01:00
|
|
|
... disableAuth=False,
|
|
|
|
... trustProxy=False
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Saved'
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2022-08-03 11:56:02 +02:00
|
|
|
if not tlsExpiryNotifyDays:
|
|
|
|
tlsExpiryNotifyDays = [7, 14, 21]
|
2022-07-07 13:29:06 +02:00
|
|
|
|
|
|
|
data = {
|
2022-08-03 11:56:02 +02:00
|
|
|
"checkUpdate": checkUpdate,
|
|
|
|
"checkBeta": checkBeta,
|
|
|
|
"keepDataPeriodDays": keepDataPeriodDays,
|
2022-12-29 00:22:53 +01:00
|
|
|
"serverTimezone": serverTimezone,
|
2022-08-03 11:56:02 +02:00
|
|
|
"entryPage": entryPage,
|
|
|
|
"searchEngineIndex": searchEngineIndex,
|
|
|
|
"primaryBaseURL": primaryBaseURL,
|
|
|
|
"steamAPIKey": steamAPIKey,
|
2022-12-29 00:22:53 +01:00
|
|
|
"dnsCache": dnsCache,
|
2022-08-03 11:56:02 +02:00
|
|
|
"tlsExpiryNotifyDays": tlsExpiryNotifyDays,
|
2023-05-20 12:43:57 +02:00
|
|
|
"disableAuth": disableAuth,
|
|
|
|
"trustProxy": trustProxy
|
2022-07-07 13:29:06 +02:00
|
|
|
}
|
2023-08-29 18:37:27 +02:00
|
|
|
|
|
|
|
if parse_version(self.version) >= parse_version("1.23"):
|
|
|
|
data.update({
|
|
|
|
"chromeExecutable": chromeExecutable,
|
|
|
|
})
|
|
|
|
if parse_version(self.version) >= parse_version("1.23.1"):
|
|
|
|
data.update({
|
|
|
|
"nscd": nscd,
|
|
|
|
})
|
|
|
|
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('setSettings', (data, password))
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def change_password(self, old_password: str, new_password: str) -> dict:
|
|
|
|
"""
|
|
|
|
Change password.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str old_password: Old password
|
|
|
|
:param str new_password: New password
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.change_password(
|
|
|
|
... old_password="secret123",
|
|
|
|
... new_password="321terces"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Password has been updated successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('changePassword', {
|
2022-07-02 16:00:54 +02:00
|
|
|
"currentPassword": old_password,
|
|
|
|
"newPassword": new_password,
|
|
|
|
})
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def upload_backup(self, json_data: str, import_handle: str = "skip") -> dict:
|
|
|
|
"""
|
|
|
|
Import Backup.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str json_data: Backup data as json string.
|
|
|
|
:param str, optional import_handle: Choose "skip" if you want to skip every monitor or notification with the same name. "overwrite" will delete every existing monitor and notification. "keep" will keep both., defaults to "skip"
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> json_data = json.dumps({
|
|
|
|
... "version": "1.17.1",
|
|
|
|
... "notificationList": [],
|
|
|
|
... "monitorList": [],
|
|
|
|
... "proxyList": []
|
|
|
|
... })
|
|
|
|
>>> api.upload_backup(
|
|
|
|
... json_data=json_data,
|
|
|
|
... import_handle="overwrite"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Backup successfully restored.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-02 16:00:54 +02:00
|
|
|
if import_handle not in ["overwrite", "skip", "keep"]:
|
2023-08-29 18:37:27 +02:00
|
|
|
raise ValueError(f"Unknown import_handle value: {import_handle}")
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('uploadBackup', (json_data, import_handle))
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# 2FA
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def twofa_status(self) -> dict:
|
|
|
|
"""
|
|
|
|
Get current 2FA status.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.twofa_status()
|
|
|
|
{
|
|
|
|
'status': False
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('twoFAStatus')
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def prepare_2fa(self, password: str) -> dict:
|
|
|
|
"""
|
|
|
|
Prepare 2FA configuration.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str password: Current password.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> password = "secret123"
|
|
|
|
>>> r = api.prepare_2fa(password)
|
|
|
|
>>> r
|
|
|
|
{
|
|
|
|
'uri': 'otpauth://totp/Uptime%20Kuma:admin?secret=NBGVQNSNNRXWQ3LJJN4DIWSWIIYW45CZJRXXORSNOY3USSKXO5RG4MDPI5ZUK5CWJFIFOVCBGZVG24TSJ5LDE2BTMRLXOZBSJF3TISA'
|
|
|
|
}
|
|
|
|
>>> uri = r["uri"]
|
|
|
|
>>>
|
|
|
|
>>> from urllib import parse
|
|
|
|
>>> def parse_secret(uri):
|
|
|
|
... query = parse.urlsplit(uri).query
|
|
|
|
... params = dict(parse.parse_qsl(query))
|
|
|
|
... return params["secret"]
|
|
|
|
>>> secret = parse_secret(uri)
|
|
|
|
>>> secret
|
|
|
|
NBGVQNSNNRXWQ3LJJN4DIWSWIIYW45CZJRXXORSNOY3USSKXO5RG4MDPI5ZUK5CWJFIFOVCBGZVG24TSJ5LDE2BTMRLXOZBSJF3TISA
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('prepare2FA', password)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def verify_token(self, token: str, password: str) -> dict:
|
|
|
|
"""
|
|
|
|
Verify the provided 2FA token.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str token: 2FA token.
|
|
|
|
:param str password: Current password.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> import pyotp
|
|
|
|
>>> def generate_token(secret):
|
|
|
|
... totp = pyotp.TOTP(secret)
|
|
|
|
... return totp.now()
|
|
|
|
>>> token = generate_token(secret)
|
|
|
|
>>> token
|
|
|
|
526564
|
|
|
|
>>> api.verify_token(token, password)
|
|
|
|
{
|
|
|
|
'valid': True
|
|
|
|
}
|
|
|
|
"""
|
2022-08-05 14:33:28 +02:00
|
|
|
return self._call('verifyToken', (token, password))
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def save_2fa(self, password: str) -> dict:
|
|
|
|
"""
|
|
|
|
Save the current 2FA configuration.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str password: Current password.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.save_2fa(password)
|
|
|
|
{
|
|
|
|
'msg': '2FA Enabled.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('save2FA', password)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def disable_2fa(self, password: str) -> dict:
|
|
|
|
"""
|
|
|
|
Disable 2FA for this user.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str password: Current password.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.disable_2fa(password)
|
|
|
|
{
|
|
|
|
'msg': '2FA Disabled.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('disable2FA', password)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# login
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def login(self, username: str = None, password: str = None, token: str = "") -> dict:
|
|
|
|
"""
|
|
|
|
Login.
|
|
|
|
|
|
|
|
If username and password is not provided, auto login is performed if disableAuth is enabled.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str, optional username: Username. Must be None if disableAuth is enabled., defaults to None
|
|
|
|
:param str, optional password: Password. Must be None if disableAuth is enabled., defaults to None
|
|
|
|
:param str, optional token: 2FA Token. Required if 2FA is enabled., defaults to ""
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> username = "admin"
|
|
|
|
>>> password = "secret123"
|
|
|
|
>>> api.login(username, password)
|
|
|
|
{
|
|
|
|
'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImFkbWluIiwiaWF0IjoxNjcxMTk3MjkzfQ.lpho_LuKMnoltXOdA7-jZ98gXOU-UbEIuxMwMRm4Nz0'
|
|
|
|
}
|
|
|
|
"""
|
2022-09-23 18:24:00 +02:00
|
|
|
# autologin
|
|
|
|
if username is None and password is None:
|
2022-12-16 21:39:18 +01:00
|
|
|
with self.wait_for_event(Event.AUTO_LOGIN):
|
|
|
|
return {}
|
2022-09-23 18:24:00 +02:00
|
|
|
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('login', {
|
2022-07-02 16:00:54 +02:00
|
|
|
"username": username,
|
|
|
|
"password": password,
|
2022-08-26 17:02:55 +02:00
|
|
|
"token": token
|
2022-07-02 16:00:54 +02:00
|
|
|
})
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def login_by_token(self, token: str) -> dict:
|
|
|
|
"""
|
|
|
|
Login by token.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str token: Login token generated by :meth:`~login`
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.login_by_token(token)
|
|
|
|
{}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('loginByToken', token)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def logout(self) -> None:
|
|
|
|
"""
|
|
|
|
Logout.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: None
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.logout()
|
|
|
|
None
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('logout')
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# setup
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def need_setup(self) -> bool:
|
|
|
|
"""
|
|
|
|
Check if the server has already been set up.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: bool
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.need_setup()
|
|
|
|
True
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('needSetup')
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def setup(self, username: str, password: str) -> dict:
|
|
|
|
"""
|
|
|
|
Set up the server.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str username: Username
|
|
|
|
:param str password: Password
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.setup(username, password)
|
|
|
|
{
|
|
|
|
'msg': 'Added Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call("setup", (username, password))
|
2022-07-02 20:40:14 +02:00
|
|
|
|
|
|
|
# database
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_database_size(self) -> dict:
|
|
|
|
"""
|
|
|
|
Get database size.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_database_size()
|
|
|
|
{
|
|
|
|
'size': 61440
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('getDatabaseSize')
|
2022-07-02 20:40:14 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def shrink_database(self) -> dict:
|
|
|
|
"""
|
|
|
|
Shrink database.
|
|
|
|
|
|
|
|
Trigger database VACUUM for SQLite. If your database is created after 1.10.0, AUTO_VACUUM is already enabled and this action is not needed.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.shrink_database()
|
|
|
|
{}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('shrinkDatabase')
|
2022-09-07 13:03:10 +02:00
|
|
|
|
|
|
|
# docker host
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_docker_hosts(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get all docker hosts.
|
|
|
|
|
|
|
|
:return: All docker hosts.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_docker_hosts()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'dockerDaemon': '/var/run/docker.sock',
|
2023-05-25 21:26:54 +02:00
|
|
|
'dockerType': <DockerType.SOCKET: 'socket'>,
|
2022-12-16 21:39:18 +01:00
|
|
|
'id': 1,
|
|
|
|
'name': 'name 1',
|
|
|
|
'userID': 1
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-09-07 13:03:10 +02:00
|
|
|
r = self._get_event_data(Event.DOCKER_HOST_LIST)
|
2023-05-25 21:26:54 +02:00
|
|
|
parse_docker_type(r)
|
2022-09-07 13:03:10 +02:00
|
|
|
return r
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_docker_host(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Get a docker host.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the docker host to get.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The docker host.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the docker host does not exist.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_docker_host(1)
|
|
|
|
{
|
|
|
|
'dockerDaemon': '/var/run/docker.sock',
|
|
|
|
'dockerType': 'socket',
|
|
|
|
'id': 1,
|
|
|
|
'name': 'name 1',
|
|
|
|
'userID': 1
|
|
|
|
}
|
|
|
|
"""
|
2022-09-07 13:03:10 +02:00
|
|
|
docker_hosts = self.get_docker_hosts()
|
|
|
|
for docker_host in docker_hosts:
|
|
|
|
if docker_host["id"] == id_:
|
|
|
|
return docker_host
|
|
|
|
raise UptimeKumaException("docker host does not exist")
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(docker_host_docstring("test"))
|
|
|
|
def test_docker_host(self, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Test a docker host.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.test_docker_host(
|
|
|
|
... name="name 1",
|
|
|
|
... dockerType=DockerType.SOCKET,
|
|
|
|
... dockerDaemon="/var/run/docker.sock"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Connected Successfully. Amount of containers: 10'
|
|
|
|
}
|
|
|
|
"""
|
2022-09-07 13:03:10 +02:00
|
|
|
data = _build_docker_host_data(**kwargs)
|
|
|
|
return self._call('testDockerHost', data)
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(docker_host_docstring("add"))
|
|
|
|
def add_docker_host(self, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Add a docker host.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.add_docker_host(
|
|
|
|
... name="name 1",
|
|
|
|
... dockerType=DockerType.SOCKET,
|
|
|
|
... dockerDaemon="/var/run/docker.sock"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'msg': 'Saved'
|
|
|
|
}
|
|
|
|
"""
|
2022-09-07 13:03:10 +02:00
|
|
|
data = _build_docker_host_data(**kwargs)
|
2022-09-12 22:45:43 +02:00
|
|
|
_convert_docker_host_input(data)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.DOCKER_HOST_LIST):
|
|
|
|
return self._call('addDockerHost', (data, None))
|
2022-09-07 13:03:10 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(docker_host_docstring("edit"))
|
|
|
|
def edit_docker_host(self, id_: int, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Edit a docker host.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the docker host to edit.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
>>> api.edit_docker_host(1,
|
|
|
|
... name="name 2"
|
|
|
|
... )
|
2022-12-16 21:39:18 +01:00
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'msg': 'Saved'
|
|
|
|
}
|
|
|
|
"""
|
2022-09-07 13:03:10 +02:00
|
|
|
data = self.get_docker_host(id_)
|
|
|
|
data.update(kwargs)
|
2022-09-12 22:45:43 +02:00
|
|
|
_convert_docker_host_input(data)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.DOCKER_HOST_LIST):
|
|
|
|
return self._call('addDockerHost', (data, id_))
|
2022-09-07 13:03:10 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def delete_docker_host(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Delete a docker host.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the docker host to delete.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_docker_host(1)
|
|
|
|
{
|
|
|
|
'msg': 'Deleted'
|
|
|
|
}
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.DOCKER_HOST_LIST):
|
2023-05-20 14:09:09 +02:00
|
|
|
if id_ not in [i["id"] for i in self.get_docker_hosts()]:
|
|
|
|
raise UptimeKumaException("docker host does not exist")
|
2022-10-04 18:38:17 +02:00
|
|
|
return self._call('deleteDockerHost', id_)
|
2022-12-29 00:22:53 +01:00
|
|
|
|
2023-03-20 15:14:39 +01:00
|
|
|
# maintenance
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_maintenances(self) -> list[dict]:
|
2022-12-29 00:22:53 +01:00
|
|
|
"""
|
|
|
|
Get all maintenances.
|
|
|
|
|
|
|
|
:return: All maintenances.
|
2023-03-20 15:14:39 +01:00
|
|
|
:rtype: list
|
2022-12-29 00:22:53 +01:00
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_maintenances()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"id": 1,
|
|
|
|
"title": "title",
|
|
|
|
"description": "description",
|
2023-05-25 21:26:54 +02:00
|
|
|
"strategy": <MaintenanceStrategy.SINGLE: 'single'>,
|
2022-12-29 00:22:53 +01:00
|
|
|
"intervalDay": 1,
|
|
|
|
"active": true,
|
|
|
|
"dateRange": [
|
|
|
|
"2022-12-27 15:39:00",
|
|
|
|
"2022-12-30 15:39:00"
|
|
|
|
],
|
|
|
|
"timeRange": [
|
|
|
|
{
|
2023-04-07 21:03:33 +02:00
|
|
|
"hours": 0,
|
|
|
|
"minutes": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
},
|
|
|
|
{
|
2023-04-07 21:03:33 +02:00
|
|
|
"hours": 0,
|
|
|
|
"minutes": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
}
|
|
|
|
],
|
|
|
|
"weekdays": [],
|
|
|
|
"daysOfMonth": [],
|
|
|
|
"timeslotList": [
|
|
|
|
{
|
2023-04-07 21:03:33 +02:00
|
|
|
"startDate": "2022-12-27 22:36:00",
|
|
|
|
"endDate": "2022-12-29 22:36:00"
|
2022-12-29 00:22:53 +01:00
|
|
|
}
|
|
|
|
],
|
2023-04-07 21:03:33 +02:00
|
|
|
"cron": "",
|
|
|
|
"durationMinutes": null,
|
2023-05-19 13:49:36 +02:00
|
|
|
"timezoneOption": "Europe/Berlin",
|
2023-04-07 21:03:33 +02:00
|
|
|
"timezoneOffset": "+02:00",
|
|
|
|
"status": "ended"
|
2022-12-29 00:22:53 +01:00
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2023-05-25 21:26:54 +02:00
|
|
|
r = list(self._get_event_data(Event.MAINTENANCE_LIST).values())
|
|
|
|
parse_maintenance_strategy(r)
|
|
|
|
return r
|
2022-12-29 00:22:53 +01:00
|
|
|
|
|
|
|
def get_maintenance(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Get a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to get.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: The maintenance.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_maintenance(1)
|
|
|
|
{
|
|
|
|
"id": 1,
|
|
|
|
"title": "title",
|
|
|
|
"description": "description",
|
2023-05-25 21:26:54 +02:00
|
|
|
"strategy": <MaintenanceStrategy.SINGLE: 'single'>,
|
2022-12-29 00:22:53 +01:00
|
|
|
"intervalDay": 1,
|
|
|
|
"active": true,
|
|
|
|
"dateRange": [
|
|
|
|
"2022-12-27 15:39:00",
|
|
|
|
"2022-12-30 15:39:00"
|
|
|
|
],
|
|
|
|
"timeRange": [
|
|
|
|
{
|
2023-04-07 21:03:33 +02:00
|
|
|
"hours": 0,
|
|
|
|
"minutes": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
},
|
|
|
|
{
|
2023-04-07 21:03:33 +02:00
|
|
|
"hours": 0,
|
|
|
|
"minutes": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
}
|
|
|
|
],
|
|
|
|
"weekdays": [],
|
|
|
|
"daysOfMonth": [],
|
|
|
|
"timeslotList": [
|
|
|
|
{
|
2023-04-07 21:03:33 +02:00
|
|
|
"startDate": "2022-12-27 22:36:00",
|
|
|
|
"endDate": "2022-12-29 22:36:00"
|
2022-12-29 00:22:53 +01:00
|
|
|
}
|
|
|
|
],
|
2023-04-07 21:03:33 +02:00
|
|
|
"cron": null,
|
|
|
|
"duration": null,
|
|
|
|
"durationMinutes": 0,
|
2023-05-19 13:49:36 +02:00
|
|
|
"timezoneOption": "Europe/Berlin",
|
2023-04-07 21:03:33 +02:00
|
|
|
"timezoneOffset": "+02:00",
|
|
|
|
"status": "ended"
|
2022-12-29 00:22:53 +01:00
|
|
|
}
|
|
|
|
"""
|
2023-05-25 21:26:54 +02:00
|
|
|
r = self._call('getMaintenance', id_)["maintenance"]
|
|
|
|
parse_maintenance_strategy(r)
|
|
|
|
return r
|
2022-12-29 00:22:53 +01:00
|
|
|
|
|
|
|
@append_docstring(maintenance_docstring("add"))
|
|
|
|
def add_maintenance(self, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Adds a maintenance.
|
|
|
|
|
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example (strategy: :attr:`~.MaintenanceStrategy.MANUAL`)::
|
|
|
|
|
|
|
|
>>> api.add_maintenance(
|
|
|
|
... title="test",
|
|
|
|
... description="test",
|
|
|
|
... strategy=MaintenanceStrategy.MANUAL,
|
|
|
|
... active=True,
|
|
|
|
... intervalDay=1,
|
|
|
|
... dateRange=[
|
|
|
|
... "2022-12-27 00:00:00"
|
|
|
|
... ],
|
|
|
|
... weekdays=[],
|
|
|
|
... daysOfMonth=[]
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"maintenanceID": 1
|
|
|
|
}
|
|
|
|
|
|
|
|
Example (strategy: :attr:`~.MaintenanceStrategy.SINGLE`)::
|
|
|
|
|
|
|
|
>>> api.add_maintenance(
|
|
|
|
... title="test",
|
|
|
|
... description="test",
|
|
|
|
... strategy=MaintenanceStrategy.SINGLE,
|
|
|
|
... active=True,
|
|
|
|
... intervalDay=1,
|
|
|
|
... dateRange=[
|
|
|
|
... "2022-12-27 22:36:00",
|
|
|
|
... "2022-12-29 22:36:00"
|
|
|
|
... ],
|
|
|
|
... weekdays=[],
|
2023-04-07 21:03:33 +02:00
|
|
|
... daysOfMonth=[],
|
2023-05-19 13:49:36 +02:00
|
|
|
... timezoneOption="Europe/Berlin"
|
2022-12-29 00:22:53 +01:00
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"maintenanceID": 1
|
|
|
|
}
|
|
|
|
|
|
|
|
Example (strategy: :attr:`~.MaintenanceStrategy.RECURRING_INTERVAL`)::
|
|
|
|
|
|
|
|
>>> api.add_maintenance(
|
|
|
|
... title="test",
|
|
|
|
... description="test",
|
|
|
|
... strategy=MaintenanceStrategy.RECURRING_INTERVAL,
|
|
|
|
... active=True,
|
|
|
|
... intervalDay=1,
|
|
|
|
... dateRange=[
|
|
|
|
... "2022-12-27 22:37:00",
|
|
|
|
... "2022-12-31 22:37:00"
|
|
|
|
... ],
|
|
|
|
... timeRange=[
|
|
|
|
... {
|
|
|
|
... "hours": 2,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... },
|
|
|
|
... {
|
|
|
|
... "hours": 3,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... }
|
|
|
|
... ],
|
|
|
|
... weekdays=[],
|
2023-04-07 21:03:33 +02:00
|
|
|
... daysOfMonth=[],
|
2023-05-19 13:49:36 +02:00
|
|
|
... timezoneOption="Europe/Berlin"
|
2022-12-29 00:22:53 +01:00
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"maintenanceID": 1
|
|
|
|
}
|
|
|
|
|
|
|
|
Example (strategy: :attr:`~.MaintenanceStrategy.RECURRING_WEEKDAY`)::
|
|
|
|
|
|
|
|
>>> api.add_maintenance(
|
|
|
|
... title="test",
|
|
|
|
... description="test",
|
|
|
|
... strategy=MaintenanceStrategy.RECURRING_WEEKDAY,
|
|
|
|
... active=True,
|
|
|
|
... intervalDay=1,
|
|
|
|
... dateRange=[
|
|
|
|
... "2022-12-27 22:38:00",
|
|
|
|
... "2022-12-31 22:38:00"
|
|
|
|
... ],
|
|
|
|
... timeRange=[
|
|
|
|
... {
|
|
|
|
... "hours": 2,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... },
|
|
|
|
... {
|
|
|
|
... "hours": 3,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... }
|
|
|
|
... ],
|
|
|
|
... weekdays=[
|
|
|
|
... 1,
|
|
|
|
... 3,
|
|
|
|
... 5,
|
|
|
|
... 0
|
|
|
|
... ],
|
2023-04-07 21:03:33 +02:00
|
|
|
... daysOfMonth=[],
|
2023-05-19 13:49:36 +02:00
|
|
|
... timezoneOption="Europe/Berlin"
|
2022-12-29 00:22:53 +01:00
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"maintenanceID": 1
|
|
|
|
}
|
|
|
|
|
|
|
|
Example (strategy: :attr:`~.MaintenanceStrategy.RECURRING_DAY_OF_MONTH`)::
|
|
|
|
|
|
|
|
>>> api.add_maintenance(
|
|
|
|
... title="test",
|
|
|
|
... description="test",
|
|
|
|
... strategy=MaintenanceStrategy.RECURRING_DAY_OF_MONTH,
|
|
|
|
... active=True,
|
|
|
|
... intervalDay=1,
|
|
|
|
... dateRange=[
|
|
|
|
... "2022-12-27 22:39:00",
|
|
|
|
... "2022-12-31 22:39:00"
|
|
|
|
... ],
|
|
|
|
... timeRange=[
|
|
|
|
... {
|
|
|
|
... "hours": 2,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... },
|
|
|
|
... {
|
|
|
|
... "hours": 3,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... }
|
|
|
|
... ],
|
|
|
|
... weekdays=[],
|
|
|
|
... daysOfMonth=[
|
|
|
|
... 1,
|
|
|
|
... 10,
|
|
|
|
... 20,
|
|
|
|
... 30,
|
2023-04-07 21:03:33 +02:00
|
|
|
... "lastDay1"
|
|
|
|
... ],
|
2023-05-19 13:49:36 +02:00
|
|
|
... timezoneOption="Europe/Berlin"
|
2023-04-07 21:03:33 +02:00
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"maintenanceID": 1
|
|
|
|
}
|
|
|
|
|
|
|
|
Example (strategy: :attr:`~.MaintenanceStrategy.CRON`)::
|
|
|
|
|
|
|
|
>>> api.add_maintenance(
|
|
|
|
... title="test",
|
|
|
|
... description="test",
|
|
|
|
... strategy=MaintenanceStrategy.CRON,
|
|
|
|
... active=True,
|
|
|
|
... intervalDay=1,
|
|
|
|
... dateRange=[
|
|
|
|
... "2022-12-27 22:39:00",
|
|
|
|
... "2022-12-31 22:39:00"
|
|
|
|
... ],
|
|
|
|
... weekdays=[],
|
|
|
|
... daysOfMonth=[],
|
|
|
|
... cron="50 5 * * *",
|
|
|
|
... durationMinutes=120,
|
2023-05-19 13:49:36 +02:00
|
|
|
... timezoneOption="Europe/Berlin"
|
2022-12-29 00:22:53 +01:00
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"maintenanceID": 1
|
|
|
|
}
|
|
|
|
"""
|
2023-04-07 21:03:33 +02:00
|
|
|
data = self._build_maintenance_data(**kwargs)
|
2022-12-29 00:22:53 +01:00
|
|
|
_check_arguments_maintenance(data)
|
|
|
|
return self._call('addMaintenance', data)
|
|
|
|
|
|
|
|
@append_docstring(maintenance_docstring("edit"))
|
|
|
|
def edit_maintenance(self, id_: int, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Edits a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to edit.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
>>> api.edit_maintenance(1,
|
2022-12-29 00:22:53 +01:00
|
|
|
... title="test",
|
|
|
|
... description="test",
|
|
|
|
... strategy=MaintenanceStrategy.RECURRING_INTERVAL,
|
|
|
|
... active=True,
|
|
|
|
... intervalDay=1,
|
|
|
|
... dateRange=[
|
|
|
|
... "2022-12-27 22:37:00",
|
|
|
|
... "2022-12-31 22:37:00"
|
|
|
|
... ],
|
|
|
|
... timeRange=[
|
|
|
|
... {
|
|
|
|
... "hours": 2,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... },
|
|
|
|
... {
|
|
|
|
... "hours": 3,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... }
|
|
|
|
... ],
|
|
|
|
... weekdays=[],
|
|
|
|
... daysOfMonth=[]
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Saved.",
|
|
|
|
"maintenanceID": 1
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
maintenance = self.get_maintenance(id_)
|
|
|
|
maintenance.update(kwargs)
|
|
|
|
_check_arguments_maintenance(maintenance)
|
|
|
|
return self._call('editMaintenance', maintenance)
|
|
|
|
|
|
|
|
def delete_maintenance(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Deletes a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to delete.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_maintenance(1)
|
|
|
|
{
|
|
|
|
"msg": "Deleted Successfully."
|
|
|
|
}
|
|
|
|
"""
|
2023-05-20 14:09:09 +02:00
|
|
|
with self.wait_for_event(Event.MAINTENANCE_LIST):
|
|
|
|
if id_ not in [i["id"] for i in self.get_maintenances()]:
|
|
|
|
raise UptimeKumaException("maintenance does not exist")
|
|
|
|
return self._call('deleteMaintenance', id_)
|
2022-12-29 00:22:53 +01:00
|
|
|
|
|
|
|
def pause_maintenance(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Pauses a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to pause.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.pause_maintenance(1)
|
|
|
|
{
|
|
|
|
"msg": "Paused Successfully."
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return self._call('pauseMaintenance', id_)
|
|
|
|
|
|
|
|
def resume_maintenance(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Resumes a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to resume.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.resume_maintenance(1)
|
|
|
|
{
|
|
|
|
"msg": "Resume Successfully"
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return self._call('resumeMaintenance', id_)
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_monitor_maintenance(self, id_: int) -> list[dict]:
|
2022-12-29 00:22:53 +01:00
|
|
|
"""
|
|
|
|
Gets all monitors of a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to get the monitors from.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: All monitors of the maintenance.
|
2023-03-20 15:14:39 +01:00
|
|
|
:rtype: list
|
2022-12-29 00:22:53 +01:00
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_monitor_maintenance(1)
|
|
|
|
[
|
|
|
|
{
|
2023-07-07 22:28:20 +02:00
|
|
|
"id": 1
|
2022-12-29 00:22:53 +01:00
|
|
|
},
|
|
|
|
{
|
2023-07-07 22:28:20 +02:00
|
|
|
"id": 2
|
2022-12-29 00:22:53 +01:00
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
|
|
|
return self._call('getMonitorMaintenance', id_)["monitors"]
|
|
|
|
|
|
|
|
def add_monitor_maintenance(
|
|
|
|
self,
|
|
|
|
id_: int,
|
2022-12-29 01:09:44 +01:00
|
|
|
monitors: list,
|
2022-12-29 00:22:53 +01:00
|
|
|
) -> dict:
|
|
|
|
"""
|
|
|
|
Adds monitors to a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to add the monitors to.
|
|
|
|
:param list monitors: The list of monitors to add to the maintenance.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> monitors = [
|
|
|
|
... {
|
2023-08-12 16:37:28 +02:00
|
|
|
... "id": 1
|
2022-12-29 00:22:53 +01:00
|
|
|
... },
|
|
|
|
... {
|
2023-08-12 16:37:28 +02:00
|
|
|
... "id": 2
|
2022-12-29 00:22:53 +01:00
|
|
|
... }
|
|
|
|
... ]
|
|
|
|
>>> api.add_monitor_maintenance(1, monitors)
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully."
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return self._call('addMonitorMaintenance', (id_, monitors))
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_status_page_maintenance(self, id_: int) -> list[dict]:
|
2022-12-29 00:22:53 +01:00
|
|
|
"""
|
|
|
|
Gets all status pages of a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to get the status pages from.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: All status pages of the maintenance.
|
2023-03-20 15:14:39 +01:00
|
|
|
:rtype: list
|
2022-12-29 00:22:53 +01:00
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_status_page_maintenance(1)
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"id": 1,
|
|
|
|
"title": "test"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
|
|
|
return self._call('getMaintenanceStatusPage', id_)["statusPages"]
|
|
|
|
|
|
|
|
def add_status_page_maintenance(
|
|
|
|
self,
|
|
|
|
id_: int,
|
|
|
|
status_pages: list,
|
|
|
|
) -> dict:
|
|
|
|
"""
|
|
|
|
Adds status pages to a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to add the monitors to.
|
|
|
|
:param list status_pages: The list of status pages to add to the maintenance.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> status_pages = [
|
|
|
|
... {
|
2023-08-12 16:37:28 +02:00
|
|
|
... "id": 1
|
2022-12-29 00:22:53 +01:00
|
|
|
... },
|
|
|
|
... {
|
2023-08-12 16:37:28 +02:00
|
|
|
... "id": 2
|
2022-12-29 00:22:53 +01:00
|
|
|
... }
|
|
|
|
... ]
|
|
|
|
>>> api.add_status_page_maintenance(1, status_pages)
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully."
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return self._call('addMaintenanceStatusPage', (id_, status_pages))
|
2023-03-20 15:14:39 +01:00
|
|
|
|
|
|
|
# api key
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_api_keys(self) -> list[dict]:
|
2023-03-20 15:14:39 +01:00
|
|
|
"""
|
|
|
|
Get all api keys.
|
|
|
|
|
|
|
|
:return: All api keys.
|
|
|
|
:rtype: list
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_api_key_list()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"id": 1,
|
|
|
|
"name": "test",
|
|
|
|
"userID": 1,
|
|
|
|
"createdDate": "2023-03-20 11:15:05",
|
|
|
|
"active": False,
|
|
|
|
"expires": null,
|
|
|
|
"status": "inactive"
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"id": 2,
|
|
|
|
"name": "test2",
|
|
|
|
"userID": 1,
|
|
|
|
"createdDate": "2023-03-20 11:20:29",
|
|
|
|
"active": True,
|
|
|
|
"expires": "2023-03-30 12:20:00",
|
|
|
|
"status": "active"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
|
|
|
|
|
|
|
# TODO: replace with getAPIKeyList?
|
|
|
|
|
|
|
|
r = self._get_event_data(Event.API_KEY_LIST)
|
|
|
|
int_to_bool(r, ["active"])
|
|
|
|
return r
|
|
|
|
|
|
|
|
def get_api_key(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Get an api key.
|
|
|
|
|
|
|
|
:param int id_: Id of the api key to get.
|
|
|
|
:return: The api key.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the api key does not exist.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_api_key(1)
|
|
|
|
{
|
|
|
|
"id": 1,
|
|
|
|
"name": "test",
|
|
|
|
"userID": 1,
|
|
|
|
"createdDate": "2023-03-20 11:15:05",
|
|
|
|
"active": False,
|
|
|
|
"expires": null,
|
|
|
|
"status": "inactive"
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
api_keys = self.get_api_keys()
|
|
|
|
for api_key in api_keys:
|
|
|
|
if api_key["id"] == id_:
|
|
|
|
return api_key
|
|
|
|
raise UptimeKumaException("notification does not exist")
|
|
|
|
|
|
|
|
def add_api_key(self, name: str, expires: str, active: bool) -> dict:
|
|
|
|
"""
|
|
|
|
Adds a new api key.
|
|
|
|
|
|
|
|
:param str name: Name of the api key.
|
|
|
|
:param str expires: Expiration date of the api key. Set to ``None`` to disable expiration.
|
|
|
|
:param bool active: True to activate api key.
|
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.add_api_key(
|
|
|
|
... name="test",
|
|
|
|
... expires="2023-03-30 12:20:00",
|
|
|
|
... active=True
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"key": "uk1_9XPRjV7ilGj9CvWRKYiBPq9GLtQs74UzTxKfCxWY",
|
|
|
|
"keyID": 1
|
|
|
|
}
|
|
|
|
|
|
|
|
>>> api.add_api_key(
|
|
|
|
... name="test2",
|
|
|
|
... expires=None,
|
|
|
|
... active=True
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"key": "uk2_jsB9H1Zmt9eEjycNFMTKgse1B0Vfvb944H4_aRqW",
|
|
|
|
"keyID": 2
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
data = {
|
|
|
|
"name": name,
|
|
|
|
"expires": expires,
|
|
|
|
"active": 1 if active else 0
|
|
|
|
}
|
|
|
|
with self.wait_for_event(Event.API_KEY_LIST):
|
|
|
|
return self._call('addAPIKey', data)
|
|
|
|
|
|
|
|
def enable_api_key(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Enable an api key.
|
|
|
|
|
|
|
|
:param int id_: Id of the api key to enable.
|
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.enable_api_key(1)
|
|
|
|
{
|
|
|
|
"msg": "Enabled Successfully"
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
with self.wait_for_event(Event.API_KEY_LIST):
|
|
|
|
return self._call('enableAPIKey', id_)
|
|
|
|
|
|
|
|
def disable_api_key(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Disable an api key.
|
|
|
|
|
|
|
|
:param int id_: Id of the api key to disable.
|
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.disable_api_key(1)
|
|
|
|
{
|
|
|
|
"msg": "Disabled Successfully."
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
with self.wait_for_event(Event.API_KEY_LIST):
|
|
|
|
return self._call('disableAPIKey', id_)
|
|
|
|
|
|
|
|
def delete_api_key(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Enable an api key.
|
|
|
|
|
|
|
|
:param int id_: Id of the api key to delete.
|
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_api_key(1)
|
|
|
|
{
|
|
|
|
"msg": "Deleted Successfully."
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
with self.wait_for_event(Event.API_KEY_LIST):
|
2023-05-20 14:09:09 +02:00
|
|
|
if id_ not in [i["id"] for i in self.get_api_keys()]:
|
|
|
|
raise UptimeKumaException("api key does not exist")
|
2023-03-20 15:14:39 +01:00
|
|
|
return self._call('deleteAPIKey', id_)
|
2023-05-02 20:36:49 +02:00
|
|
|
|
|
|
|
# helper methods
|
|
|
|
|
|
|
|
def get_monitor_status(self, monitor_id: int) -> MonitorStatus:
|
2023-05-20 20:35:04 +02:00
|
|
|
"""
|
|
|
|
Get the monitor status.
|
|
|
|
|
|
|
|
:param int monitor_id: Id of the monitor.
|
|
|
|
:return: The monitor status.
|
|
|
|
:rtype: MonitorStatus
|
|
|
|
:raises UptimeKumaException: If the monitor does not exist.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_monitor_status(1)
|
|
|
|
<MonitorStatus.PENDING: 2>
|
|
|
|
"""
|
2023-05-02 20:36:49 +02:00
|
|
|
heartbeats = self.get_heartbeats()
|
2023-05-20 20:35:04 +02:00
|
|
|
for heartbeat_monitor_id in heartbeats:
|
|
|
|
if heartbeat_monitor_id == monitor_id:
|
|
|
|
status = heartbeats[heartbeat_monitor_id][-1]["status"]
|
2023-05-02 20:36:49 +02:00
|
|
|
return MonitorStatus(status)
|
|
|
|
raise UptimeKumaException("monitor does not exist")
|