2023-05-01 18:57:55 +02:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2022-12-29 00:22:53 +01:00
|
|
|
import datetime
|
2022-07-05 22:12:37 +02:00
|
|
|
import json
|
2022-09-12 20:47:45 +02:00
|
|
|
import random
|
2022-10-04 18:38:17 +02:00
|
|
|
import string
|
|
|
|
import time
|
|
|
|
from contextlib import contextmanager
|
|
|
|
from copy import deepcopy
|
2022-12-29 01:17:45 +01:00
|
|
|
from typing import Any
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-08-26 14:04:43 +02:00
|
|
|
import requests
|
2022-07-02 16:00:54 +02:00
|
|
|
import socketio
|
2022-09-07 13:03:10 +02:00
|
|
|
from packaging.version import parse as parse_version
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-06 13:36:26 +02:00
|
|
|
from . import (AuthMethod,
|
2023-05-01 18:57:55 +02:00
|
|
|
DockerType,
|
|
|
|
Event,
|
|
|
|
IncidentStyle,
|
|
|
|
MaintenanceStrategy,
|
|
|
|
MonitorType,
|
|
|
|
NotificationType,
|
|
|
|
ProxyProtocol,
|
|
|
|
UptimeKumaException,
|
|
|
|
notification_provider_conditions,
|
|
|
|
notification_provider_options)
|
|
|
|
|
|
|
|
from .docstrings import (append_docstring,
|
|
|
|
docker_host_docstring,
|
|
|
|
maintenance_docstring,
|
|
|
|
monitor_docstring,
|
|
|
|
notification_docstring,
|
|
|
|
proxy_docstring,
|
|
|
|
tag_docstring)
|
2022-08-03 11:56:02 +02:00
|
|
|
|
2023-05-06 13:36:26 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def int_to_bool(data, keys) -> None:
|
2023-05-01 18:57:55 +02:00
|
|
|
if isinstance(data, list):
|
2022-07-05 22:12:37 +02:00
|
|
|
for d in data:
|
|
|
|
int_to_bool(d, keys)
|
|
|
|
else:
|
|
|
|
for key in keys:
|
|
|
|
if key in data:
|
|
|
|
data[key] = True if data[key] == 1 else False
|
|
|
|
|
|
|
|
|
2022-09-12 20:47:45 +02:00
|
|
|
def gen_secret(length: int) -> str:
|
|
|
|
chars = string.ascii_uppercase + string.ascii_lowercase + string.digits
|
|
|
|
return ''.join(random.choice(chars) for _ in range(length))
|
|
|
|
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _convert_monitor_return(monitor) -> None:
|
2023-05-01 18:57:55 +02:00
|
|
|
if isinstance(monitor["notificationIDList"], dict):
|
2022-09-18 14:49:10 +02:00
|
|
|
monitor["notificationIDList"] = [int(i) for i in monitor["notificationIDList"].keys()]
|
2022-09-12 22:45:43 +02:00
|
|
|
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _convert_monitor_input(kwargs) -> None:
|
2022-09-06 13:41:51 +02:00
|
|
|
if not kwargs["accepted_statuscodes"]:
|
|
|
|
kwargs["accepted_statuscodes"] = ["200-299"]
|
|
|
|
|
|
|
|
dict_notification_ids = {}
|
|
|
|
if kwargs["notificationIDList"]:
|
|
|
|
for notification_id in kwargs["notificationIDList"]:
|
|
|
|
dict_notification_ids[notification_id] = True
|
|
|
|
kwargs["notificationIDList"] = dict_notification_ids
|
2022-07-05 22:12:37 +02:00
|
|
|
|
2022-09-07 13:03:10 +02:00
|
|
|
if not kwargs["databaseConnectionString"]:
|
|
|
|
if kwargs["type"] == MonitorType.SQLSERVER:
|
|
|
|
kwargs["databaseConnectionString"] = "Server=<hostname>,<port>;Database=<your database>;User Id=<your user id>;Password=<your password>;Encrypt=<true/false>;TrustServerCertificate=<Yes/No>;Connection Timeout=<int>"
|
|
|
|
elif kwargs["type"] == MonitorType.POSTGRES:
|
|
|
|
kwargs["databaseConnectionString"] = "postgres://username:password@host:port/database"
|
2023-02-13 22:51:21 +01:00
|
|
|
elif kwargs["type"] == MonitorType.MYSQL:
|
|
|
|
kwargs["databaseConnectionString"] = "mysql://username:password@host:port/database"
|
|
|
|
elif kwargs["type"] == MonitorType.MONGODB:
|
|
|
|
kwargs["databaseConnectionString"] = "mongodb://username:password@host:port/database"
|
|
|
|
elif kwargs["type"] == MonitorType.REDIS:
|
|
|
|
kwargs["databaseConnectionString"] = "redis://user:password@host:port"
|
2022-09-12 20:47:45 +02:00
|
|
|
|
|
|
|
if kwargs["type"] == MonitorType.PUSH and not kwargs.get("pushToken"):
|
|
|
|
kwargs["pushToken"] = gen_secret(10)
|
2022-07-05 22:12:37 +02:00
|
|
|
|
|
|
|
|
2022-08-02 11:58:49 +02:00
|
|
|
def _build_notification_data(
|
2022-12-29 00:22:53 +01:00
|
|
|
name: str,
|
|
|
|
type: NotificationType,
|
|
|
|
isDefault: bool = False,
|
|
|
|
applyExisting: bool = False,
|
|
|
|
**kwargs
|
2022-12-16 21:39:18 +01:00
|
|
|
) -> dict:
|
2022-09-17 12:24:08 +02:00
|
|
|
allowed_kwargs = []
|
|
|
|
for keys in notification_provider_options.values():
|
|
|
|
allowed_kwargs.extend(keys)
|
|
|
|
|
|
|
|
for key in kwargs.keys():
|
|
|
|
if key not in allowed_kwargs:
|
|
|
|
raise TypeError(f"unknown argument '{key}'")
|
|
|
|
|
2022-07-07 16:08:19 +02:00
|
|
|
data = {
|
2022-07-05 22:12:37 +02:00
|
|
|
"name": name,
|
2022-08-03 11:56:02 +02:00
|
|
|
"type": type,
|
|
|
|
"isDefault": isDefault,
|
|
|
|
"applyExisting": applyExisting,
|
2022-07-05 22:12:37 +02:00
|
|
|
**kwargs
|
|
|
|
}
|
2022-07-07 16:08:19 +02:00
|
|
|
return data
|
2022-07-05 22:12:37 +02:00
|
|
|
|
|
|
|
|
|
|
|
def _build_proxy_data(
|
2022-12-29 00:22:53 +01:00
|
|
|
protocol: ProxyProtocol,
|
|
|
|
host: str,
|
|
|
|
port: str,
|
|
|
|
auth: bool = False,
|
|
|
|
username: str = None,
|
|
|
|
password: str = None,
|
|
|
|
active: bool = True,
|
|
|
|
default: bool = False,
|
|
|
|
applyExisting: bool = False,
|
2022-12-16 21:39:18 +01:00
|
|
|
) -> dict:
|
2022-07-07 16:08:19 +02:00
|
|
|
data = {
|
2022-07-05 22:12:37 +02:00
|
|
|
"protocol": protocol,
|
|
|
|
"host": host,
|
|
|
|
"port": port,
|
|
|
|
"auth": auth,
|
|
|
|
"username": username,
|
|
|
|
"password": password,
|
|
|
|
"active": active,
|
|
|
|
"default": default,
|
2022-08-03 11:56:02 +02:00
|
|
|
"applyExisting": applyExisting
|
2022-07-05 22:12:37 +02:00
|
|
|
}
|
2022-07-07 16:08:19 +02:00
|
|
|
return data
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
|
2022-07-06 21:29:40 +02:00
|
|
|
def _build_status_page_data(
|
2022-12-29 00:22:53 +01:00
|
|
|
slug: str,
|
|
|
|
|
|
|
|
# config
|
|
|
|
id: int,
|
|
|
|
title: str,
|
|
|
|
description: str = None,
|
|
|
|
theme: str = "light",
|
|
|
|
published: bool = True,
|
|
|
|
showTags: bool = False,
|
|
|
|
domainNameList: list = None,
|
2023-02-13 22:51:21 +01:00
|
|
|
googleAnalyticsId: str = None,
|
2022-12-29 00:22:53 +01:00
|
|
|
customCSS: str = "",
|
|
|
|
footerText: str = None,
|
|
|
|
showPoweredBy: bool = True,
|
|
|
|
|
|
|
|
icon: str = "/icon.svg",
|
|
|
|
publicGroupList: list = None
|
2023-05-06 13:36:26 +02:00
|
|
|
) -> tuple[str, dict, str, list]:
|
2022-07-07 16:08:19 +02:00
|
|
|
if theme not in ["light", "dark"]:
|
|
|
|
raise ValueError
|
2022-08-03 11:56:02 +02:00
|
|
|
if not domainNameList:
|
|
|
|
domainNameList = []
|
2022-08-26 14:04:43 +02:00
|
|
|
if not publicGroupList:
|
|
|
|
publicGroupList = []
|
2022-07-06 21:29:40 +02:00
|
|
|
config = {
|
2022-08-03 11:56:02 +02:00
|
|
|
"id": id,
|
2022-07-06 21:29:40 +02:00
|
|
|
"slug": slug,
|
|
|
|
"title": title,
|
|
|
|
"description": description,
|
2022-08-03 11:56:02 +02:00
|
|
|
"icon": icon,
|
2022-07-07 16:08:19 +02:00
|
|
|
"theme": theme,
|
2022-07-06 21:29:40 +02:00
|
|
|
"published": published,
|
2022-08-03 11:56:02 +02:00
|
|
|
"showTags": showTags,
|
|
|
|
"domainNameList": domainNameList,
|
2023-02-13 22:51:21 +01:00
|
|
|
"googleAnalyticsId": googleAnalyticsId,
|
2022-08-03 11:56:02 +02:00
|
|
|
"customCSS": customCSS,
|
|
|
|
"footerText": footerText,
|
|
|
|
"showPoweredBy": showPoweredBy
|
2022-07-06 21:29:40 +02:00
|
|
|
}
|
2022-08-26 14:04:43 +02:00
|
|
|
return slug, config, icon, publicGroupList
|
2022-07-06 21:29:40 +02:00
|
|
|
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _convert_docker_host_input(kwargs) -> None:
|
2022-09-07 13:03:10 +02:00
|
|
|
if not kwargs["dockerDaemon"]:
|
|
|
|
if kwargs["dockerType"] == DockerType.SOCKET:
|
|
|
|
kwargs["dockerDaemon"] = "/var/run/docker.sock"
|
|
|
|
elif kwargs["dockerType"] == DockerType.TCP:
|
|
|
|
kwargs["dockerDaemon"] = "tcp://localhost:2375"
|
|
|
|
|
|
|
|
|
|
|
|
def _build_docker_host_data(
|
2022-12-29 00:22:53 +01:00
|
|
|
name: str,
|
|
|
|
dockerType: DockerType,
|
|
|
|
dockerDaemon: str = None
|
2022-12-16 21:39:18 +01:00
|
|
|
) -> dict:
|
2022-09-07 13:03:10 +02:00
|
|
|
data = {
|
|
|
|
"name": name,
|
|
|
|
"dockerType": dockerType,
|
|
|
|
"dockerDaemon": dockerDaemon
|
|
|
|
}
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
def _build_tag_data(
|
|
|
|
name: str,
|
|
|
|
color: str
|
|
|
|
) -> dict:
|
|
|
|
data = {
|
|
|
|
"new": True,
|
|
|
|
"name": name,
|
|
|
|
"color": color
|
|
|
|
}
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _check_missing_arguments(required_params, kwargs) -> None:
|
2022-07-09 22:15:41 +02:00
|
|
|
missing_arguments = []
|
|
|
|
for required_param in required_params:
|
2022-08-03 11:56:02 +02:00
|
|
|
if kwargs.get(required_param) is None:
|
2022-07-09 22:15:41 +02:00
|
|
|
missing_arguments.append(required_param)
|
|
|
|
if missing_arguments:
|
|
|
|
missing_arguments_str = ", ".join([f"'{i}'" for i in missing_arguments])
|
|
|
|
raise TypeError(f"missing {len(missing_arguments)} required argument: {missing_arguments_str}")
|
|
|
|
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _check_argument_conditions(valid_params, kwargs) -> None:
|
2022-07-10 18:07:11 +02:00
|
|
|
for valid_param in valid_params:
|
2022-08-03 11:56:02 +02:00
|
|
|
if valid_param in kwargs:
|
|
|
|
value = kwargs[valid_param]
|
2022-12-29 00:17:57 +01:00
|
|
|
if value is None:
|
|
|
|
continue
|
2022-07-10 18:07:11 +02:00
|
|
|
conditions = valid_params[valid_param]
|
|
|
|
min_ = conditions.get("min")
|
|
|
|
max_ = conditions.get("max")
|
|
|
|
if min_ is not None and value < min_:
|
|
|
|
raise ValueError(f"the value of {valid_param} must not be less than {min_}")
|
|
|
|
if max_ is not None and value > max_:
|
|
|
|
raise ValueError(f"the value of {valid_param} must not be larger than {max_}")
|
|
|
|
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _check_arguments_monitor(kwargs) -> None:
|
2022-07-10 18:07:11 +02:00
|
|
|
required_args = [
|
2022-08-03 11:56:02 +02:00
|
|
|
"type",
|
2022-07-09 22:15:41 +02:00
|
|
|
"name",
|
2022-08-03 11:56:02 +02:00
|
|
|
"interval",
|
|
|
|
"maxretries",
|
|
|
|
"retryInterval"
|
2022-07-09 22:15:41 +02:00
|
|
|
]
|
2022-08-03 11:56:02 +02:00
|
|
|
_check_missing_arguments(required_args, kwargs)
|
2022-07-09 22:15:41 +02:00
|
|
|
|
2022-07-10 18:07:11 +02:00
|
|
|
required_args_by_type = {
|
2022-08-03 11:56:02 +02:00
|
|
|
MonitorType.HTTP: ["url", "maxredirects"],
|
2022-07-09 22:15:41 +02:00
|
|
|
MonitorType.PORT: ["hostname", "port"],
|
|
|
|
MonitorType.PING: ["hostname"],
|
2022-08-03 11:56:02 +02:00
|
|
|
MonitorType.KEYWORD: ["url", "keyword", "maxredirects"],
|
2022-12-29 00:22:53 +01:00
|
|
|
MonitorType.GRPC_KEYWORD: ["grpcUrl", "keyword", "grpcServiceName", "grpcMethod"],
|
2022-07-09 22:15:41 +02:00
|
|
|
MonitorType.DNS: ["hostname", "dns_resolve_server", "port"],
|
2022-12-29 00:22:53 +01:00
|
|
|
MonitorType.DOCKER: ["docker_container", "docker_host"],
|
2022-07-09 22:15:41 +02:00
|
|
|
MonitorType.PUSH: [],
|
|
|
|
MonitorType.STEAM: ["hostname", "port"],
|
2023-02-13 22:51:21 +01:00
|
|
|
MonitorType.GAMEDIG: ["game", "hostname", "port"],
|
2022-08-03 11:56:02 +02:00
|
|
|
MonitorType.MQTT: ["hostname", "port", "mqttTopic"],
|
2022-07-09 22:15:41 +02:00
|
|
|
MonitorType.SQLSERVER: [],
|
2022-09-07 13:03:10 +02:00
|
|
|
MonitorType.POSTGRES: [],
|
2022-12-29 00:22:53 +01:00
|
|
|
MonitorType.MYSQL: [],
|
2023-02-13 22:51:21 +01:00
|
|
|
MonitorType.MONGODB: [],
|
|
|
|
MonitorType.RADIUS: ["radiusUsername", "radiusPassword", "radiusSecret", "radiusCalledStationId", "radiusCallingStationId"],
|
|
|
|
MonitorType.REDIS: []
|
2022-07-09 22:15:41 +02:00
|
|
|
}
|
2022-08-03 11:56:02 +02:00
|
|
|
type_ = kwargs["type"]
|
2022-07-10 18:07:11 +02:00
|
|
|
required_args = required_args_by_type[type_]
|
2022-08-03 11:56:02 +02:00
|
|
|
_check_missing_arguments(required_args, kwargs)
|
2022-07-10 18:07:11 +02:00
|
|
|
|
2022-11-05 18:48:31 +01:00
|
|
|
conditions = dict(
|
|
|
|
interval=dict(
|
|
|
|
min=20,
|
|
|
|
),
|
|
|
|
maxretries=dict(
|
|
|
|
min=0,
|
|
|
|
),
|
|
|
|
retryInterval=dict(
|
|
|
|
min=20,
|
|
|
|
),
|
|
|
|
maxredirects=dict(
|
|
|
|
min=0,
|
|
|
|
),
|
|
|
|
port=dict(
|
|
|
|
min=0,
|
|
|
|
max=65535,
|
|
|
|
),
|
|
|
|
)
|
2022-08-03 11:56:02 +02:00
|
|
|
_check_argument_conditions(conditions, kwargs)
|
2022-07-10 18:07:11 +02:00
|
|
|
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _check_arguments_notification(kwargs) -> None:
|
2022-08-03 11:56:02 +02:00
|
|
|
required_args = ["type", "name"]
|
|
|
|
_check_missing_arguments(required_args, kwargs)
|
2022-07-10 18:07:11 +02:00
|
|
|
|
2022-10-07 14:32:52 +02:00
|
|
|
# TODO: collect required notification args from /src/components/notifications/*
|
|
|
|
# type_ = kwargs["type"]
|
|
|
|
# required_args = notification_provider_options[type_]
|
|
|
|
# _check_missing_arguments(required_args, kwargs)
|
2022-09-07 13:03:10 +02:00
|
|
|
_check_argument_conditions(notification_provider_conditions, kwargs)
|
2022-07-09 22:15:41 +02:00
|
|
|
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _check_arguments_proxy(kwargs) -> None:
|
2022-07-10 18:07:11 +02:00
|
|
|
required_args = ["protocol", "host", "port"]
|
2022-08-02 23:47:56 +02:00
|
|
|
if kwargs.get("auth"):
|
2022-07-10 18:07:11 +02:00
|
|
|
required_args.extend(["username", "password"])
|
2022-08-03 11:56:02 +02:00
|
|
|
_check_missing_arguments(required_args, kwargs)
|
2022-07-10 18:07:11 +02:00
|
|
|
|
2022-11-05 18:48:31 +01:00
|
|
|
conditions = dict(
|
|
|
|
port=dict(
|
|
|
|
min=0,
|
|
|
|
max=65535,
|
|
|
|
)
|
|
|
|
)
|
2022-08-03 11:56:02 +02:00
|
|
|
_check_argument_conditions(conditions, kwargs)
|
2022-07-09 22:15:41 +02:00
|
|
|
|
|
|
|
|
2022-12-29 00:22:53 +01:00
|
|
|
def _check_arguments_maintenance(kwargs) -> None:
|
|
|
|
required_args = ["title", "strategy"]
|
|
|
|
_check_missing_arguments(required_args, kwargs)
|
|
|
|
|
|
|
|
strategy = kwargs["strategy"]
|
|
|
|
if strategy in [MaintenanceStrategy.RECURRING_INTERVAL, MaintenanceStrategy.RECURRING_WEEKDAY, MaintenanceStrategy.RECURRING_DAY_OF_MONTH]:
|
|
|
|
required_args = ["dateRange"]
|
|
|
|
_check_missing_arguments(required_args, kwargs)
|
|
|
|
|
|
|
|
conditions = dict(
|
|
|
|
intervalDay=dict(
|
|
|
|
min=1,
|
|
|
|
max=3650,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
_check_argument_conditions(conditions, kwargs)
|
|
|
|
|
2023-01-17 21:01:55 +01:00
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
def _check_arguments_tag(kwargs) -> None:
|
|
|
|
required_args = [
|
|
|
|
"name",
|
|
|
|
"color"
|
|
|
|
]
|
|
|
|
_check_missing_arguments(required_args, kwargs)
|
|
|
|
|
|
|
|
|
2022-07-02 16:00:54 +02:00
|
|
|
class UptimeKumaApi(object):
|
2022-12-16 21:39:18 +01:00
|
|
|
"""This class is used to communicate with Uptime Kuma.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
Import UptimeKumaApi from the library and specify the Uptime Kuma server url (e.g. 'http://127.0.0.1:3001'), username and password to initialize the connection.
|
|
|
|
|
|
|
|
>>> from uptime_kuma_api import UptimeKumaApi
|
|
|
|
>>> api = UptimeKumaApi('INSERT_URL')
|
|
|
|
>>> api.login('INSERT_USERNAME', 'INSERT_PASSWORD')
|
2023-05-01 18:40:14 +02:00
|
|
|
{
|
|
|
|
'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImFkbWluIiwiaWF0IjoxNjgyOTU4OTU4fQ.Xb81nuKXeNyE1D_XoQowYgsgZHka-edONdwHmIznJdk'
|
|
|
|
}
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Now you can call one of the existing methods of the instance. For example create a new monitor:
|
|
|
|
|
|
|
|
>>> api.add_monitor(
|
|
|
|
... type=MonitorType.HTTP,
|
|
|
|
... name="Google",
|
|
|
|
... url="https://google.com"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Added Successfully.',
|
|
|
|
'monitorId': 1
|
|
|
|
}
|
|
|
|
|
|
|
|
At the end, the connection to the API must be disconnected so that the program does not block.
|
|
|
|
|
|
|
|
>>> api.disconnect()
|
|
|
|
|
2023-05-01 18:40:14 +02:00
|
|
|
With a context manager, the disconnect method is called automatically:
|
|
|
|
|
|
|
|
.. code-block:: python
|
|
|
|
|
|
|
|
from uptime_kuma_api import UptimeKumaApi
|
|
|
|
|
|
|
|
with UptimeKumaApi('INSERT_URL') as api:
|
|
|
|
api.login('INSERT_USERNAME', 'INSERT_PASSWORD')
|
|
|
|
api.add_monitor(
|
|
|
|
type=MonitorType.HTTP,
|
|
|
|
name="Google",
|
|
|
|
url="https://google.com"
|
|
|
|
)
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str url: The url to the Uptime Kuma instance. For example ``http://127.0.0.1:3001``
|
2022-12-23 14:07:46 +01:00
|
|
|
:param float wait_timeout: How many seconds the client should wait for the connection., defaults to 1
|
2023-03-30 12:19:27 +02:00
|
|
|
:param dict headers: Headers that are passed to the socketio connection, defaults to None
|
2023-04-28 00:07:01 +02:00
|
|
|
:param bool ssl_verify: ``True`` to verify SSL certificates, or ``False`` to skip SSL certificate
|
|
|
|
verification, allowing connections to servers with self signed certificates.
|
|
|
|
Default is ``True``.
|
2023-04-30 16:48:33 +02:00
|
|
|
:param float wait_events: How many seconds the client should wait for the next event of the same type.
|
|
|
|
There is no way to determine when the last message of a certain type has arrived.
|
|
|
|
Therefore, a timeout is required. If no further message has arrived within this time,
|
|
|
|
it is assumed that it was the last message. Defaults is ``0.2``.
|
2022-12-17 15:30:29 +01:00
|
|
|
:raises UptimeKumaException: When connection to server failed.
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
2022-12-23 14:07:46 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
url: str,
|
2023-03-30 12:19:27 +02:00
|
|
|
wait_timeout: float = 1,
|
2023-04-28 00:07:01 +02:00
|
|
|
headers: dict = None,
|
2023-04-30 16:48:33 +02:00
|
|
|
ssl_verify: bool = True,
|
|
|
|
wait_events: float = 0.2
|
2022-12-23 14:07:46 +01:00
|
|
|
) -> None:
|
2022-08-26 14:01:29 +02:00
|
|
|
self.url = url
|
2022-12-23 14:07:46 +01:00
|
|
|
self.wait_timeout = wait_timeout
|
2023-03-30 12:19:27 +02:00
|
|
|
self.headers = headers
|
2023-04-30 16:48:33 +02:00
|
|
|
self.wait_events = wait_events
|
2023-04-28 00:07:01 +02:00
|
|
|
self.sio = socketio.Client(ssl_verify=ssl_verify)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-08-02 21:32:28 +02:00
|
|
|
self._event_data: dict = {
|
2022-08-05 15:52:19 +02:00
|
|
|
Event.MONITOR_LIST: None,
|
|
|
|
Event.NOTIFICATION_LIST: None,
|
|
|
|
Event.PROXY_LIST: None,
|
|
|
|
Event.STATUS_PAGE_LIST: None,
|
|
|
|
Event.HEARTBEAT_LIST: None,
|
|
|
|
Event.IMPORTANT_HEARTBEAT_LIST: None,
|
|
|
|
Event.AVG_PING: None,
|
|
|
|
Event.UPTIME: None,
|
|
|
|
Event.HEARTBEAT: None,
|
|
|
|
Event.INFO: None,
|
2022-09-07 13:03:10 +02:00
|
|
|
Event.CERT_INFO: None,
|
2022-09-23 18:24:00 +02:00
|
|
|
Event.DOCKER_HOST_LIST: None,
|
2022-12-29 00:22:53 +01:00
|
|
|
Event.AUTO_LOGIN: None,
|
2023-03-20 15:14:39 +01:00
|
|
|
Event.MAINTENANCE_LIST: None,
|
|
|
|
Event.API_KEY_LIST: None
|
2022-07-02 16:00:54 +02:00
|
|
|
}
|
|
|
|
|
2022-08-05 15:52:19 +02:00
|
|
|
self.sio.on(Event.CONNECT, self._event_connect)
|
|
|
|
self.sio.on(Event.DISCONNECT, self._event_disconnect)
|
|
|
|
self.sio.on(Event.MONITOR_LIST, self._event_monitor_list)
|
|
|
|
self.sio.on(Event.NOTIFICATION_LIST, self._event_notification_list)
|
|
|
|
self.sio.on(Event.PROXY_LIST, self._event_proxy_list)
|
|
|
|
self.sio.on(Event.STATUS_PAGE_LIST, self._event_status_page_list)
|
|
|
|
self.sio.on(Event.HEARTBEAT_LIST, self._event_heartbeat_list)
|
|
|
|
self.sio.on(Event.IMPORTANT_HEARTBEAT_LIST, self._event_important_heartbeat_list)
|
|
|
|
self.sio.on(Event.AVG_PING, self._event_avg_ping)
|
|
|
|
self.sio.on(Event.UPTIME, self._event_uptime)
|
|
|
|
self.sio.on(Event.HEARTBEAT, self._event_heartbeat)
|
|
|
|
self.sio.on(Event.INFO, self._event_info)
|
|
|
|
self.sio.on(Event.CERT_INFO, self._event_cert_info)
|
2022-09-07 13:03:10 +02:00
|
|
|
self.sio.on(Event.DOCKER_HOST_LIST, self._event_docker_host_list)
|
2022-09-23 18:24:00 +02:00
|
|
|
self.sio.on(Event.AUTO_LOGIN, self._event_auto_login)
|
2022-12-29 00:22:53 +01:00
|
|
|
self.sio.on(Event.INIT_SERVER_TIMEZONE, self._event_init_server_timezone)
|
|
|
|
self.sio.on(Event.MAINTENANCE_LIST, self._event_maintenance_list)
|
2023-03-20 15:14:39 +01:00
|
|
|
self.sio.on(Event.API_KEY_LIST, self._event_api_key_list)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-08-26 14:01:29 +02:00
|
|
|
self.connect()
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-01 18:40:14 +02:00
|
|
|
def __enter__(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
|
|
self.disconnect()
|
|
|
|
|
2022-10-04 18:38:17 +02:00
|
|
|
@contextmanager
|
2022-12-16 21:39:18 +01:00
|
|
|
def wait_for_event(self, event: Event) -> None:
|
2023-04-07 20:07:52 +02:00
|
|
|
# 200 * 0.05 seconds = 10 seconds
|
2022-10-04 18:38:17 +02:00
|
|
|
retries = 200
|
2023-04-07 20:07:52 +02:00
|
|
|
sleep = 0.05
|
2022-10-04 18:38:17 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
yield
|
|
|
|
except:
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
counter = 0
|
2023-04-07 20:07:52 +02:00
|
|
|
while self._event_data[event] is None:
|
|
|
|
time.sleep(sleep)
|
2022-10-04 18:38:17 +02:00
|
|
|
counter += 1
|
|
|
|
if counter >= retries:
|
2023-04-07 20:07:52 +02:00
|
|
|
print(f"wait_for_event {event} timeout")
|
2022-10-04 18:38:17 +02:00
|
|
|
break
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _get_event_data(self, event) -> Any:
|
2022-08-05 15:52:19 +02:00
|
|
|
monitor_events = [Event.AVG_PING, Event.UPTIME, Event.HEARTBEAT_LIST, Event.IMPORTANT_HEARTBEAT_LIST, Event.CERT_INFO, Event.HEARTBEAT]
|
2022-07-05 22:12:37 +02:00
|
|
|
while self._event_data[event] is None:
|
2022-08-05 15:48:02 +02:00
|
|
|
# do not wait for events that are not sent
|
2022-08-05 15:52:19 +02:00
|
|
|
if self._event_data[Event.MONITOR_LIST] == {} and event in monitor_events:
|
2022-08-05 15:48:02 +02:00
|
|
|
return []
|
2022-07-02 16:00:54 +02:00
|
|
|
time.sleep(0.01)
|
2023-04-30 16:48:33 +02:00
|
|
|
time.sleep(self.wait_events) # wait for multiple messages
|
2022-10-04 18:38:17 +02:00
|
|
|
return deepcopy(self._event_data[event])
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _call(self, event, data=None) -> Any:
|
2022-07-07 13:29:06 +02:00
|
|
|
r = self.sio.call(event, data)
|
2023-05-01 18:57:55 +02:00
|
|
|
if isinstance(r, dict) and "ok" in r:
|
2022-07-07 22:17:47 +02:00
|
|
|
if not r["ok"]:
|
2023-05-01 18:57:55 +02:00
|
|
|
raise UptimeKumaException(r.get("msg"))
|
2022-07-07 22:17:47 +02:00
|
|
|
r.pop("ok")
|
2022-07-07 13:29:06 +02:00
|
|
|
return r
|
|
|
|
|
2022-07-02 16:00:54 +02:00
|
|
|
# event handlers
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_connect(self) -> None:
|
2022-07-02 16:00:54 +02:00
|
|
|
pass
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_disconnect(self) -> None:
|
2022-07-02 16:00:54 +02:00
|
|
|
pass
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_monitor_list(self, data) -> None:
|
2022-08-05 15:52:19 +02:00
|
|
|
self._event_data[Event.MONITOR_LIST] = data
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_notification_list(self, data) -> None:
|
2022-08-05 15:52:19 +02:00
|
|
|
self._event_data[Event.NOTIFICATION_LIST] = data
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_proxy_list(self, data) -> None:
|
2022-08-05 15:52:19 +02:00
|
|
|
self._event_data[Event.PROXY_LIST] = data
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_status_page_list(self, data) -> None:
|
2022-08-05 15:52:19 +02:00
|
|
|
self._event_data[Event.STATUS_PAGE_LIST] = data
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_heartbeat_list(self, id_, data, bool_) -> None:
|
2022-08-05 15:52:19 +02:00
|
|
|
if self._event_data[Event.HEARTBEAT_LIST] is None:
|
|
|
|
self._event_data[Event.HEARTBEAT_LIST] = []
|
|
|
|
self._event_data[Event.HEARTBEAT_LIST].append({
|
2022-07-02 16:00:54 +02:00
|
|
|
"id": id_,
|
|
|
|
"data": data,
|
|
|
|
"bool": bool_,
|
|
|
|
})
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_important_heartbeat_list(self, id_, data, bool_) -> None:
|
2022-08-05 15:52:19 +02:00
|
|
|
if self._event_data[Event.IMPORTANT_HEARTBEAT_LIST] is None:
|
|
|
|
self._event_data[Event.IMPORTANT_HEARTBEAT_LIST] = []
|
|
|
|
self._event_data[Event.IMPORTANT_HEARTBEAT_LIST].append({
|
2022-07-02 16:00:54 +02:00
|
|
|
"id": id_,
|
|
|
|
"data": data,
|
|
|
|
"bool": bool_,
|
|
|
|
})
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_avg_ping(self, id_, data) -> None:
|
2022-08-05 15:52:19 +02:00
|
|
|
if self._event_data[Event.AVG_PING] is None:
|
|
|
|
self._event_data[Event.AVG_PING] = []
|
|
|
|
self._event_data[Event.AVG_PING].append({
|
2022-07-02 16:00:54 +02:00
|
|
|
"id": id_,
|
|
|
|
"data": data,
|
|
|
|
})
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_uptime(self, monitor_id, duration, uptime) -> None:
|
2022-08-05 15:52:19 +02:00
|
|
|
if self._event_data[Event.UPTIME] is None:
|
|
|
|
self._event_data[Event.UPTIME] = []
|
|
|
|
self._event_data[Event.UPTIME].append({
|
2022-12-16 21:39:18 +01:00
|
|
|
"id": monitor_id,
|
|
|
|
"duration": duration,
|
|
|
|
"uptime": uptime,
|
2022-07-02 16:00:54 +02:00
|
|
|
})
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_heartbeat(self, data) -> None:
|
2022-08-05 15:52:19 +02:00
|
|
|
if self._event_data[Event.HEARTBEAT] is None:
|
|
|
|
self._event_data[Event.HEARTBEAT] = []
|
|
|
|
self._event_data[Event.HEARTBEAT].append(data)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_info(self, data) -> None:
|
2022-08-05 15:52:19 +02:00
|
|
|
self._event_data[Event.INFO] = data
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_cert_info(self, id_, data) -> None:
|
2022-08-05 15:52:19 +02:00
|
|
|
if self._event_data[Event.CERT_INFO] is None:
|
|
|
|
self._event_data[Event.CERT_INFO] = []
|
|
|
|
self._event_data[Event.CERT_INFO].append({
|
2022-08-05 15:48:02 +02:00
|
|
|
"id": id_,
|
|
|
|
"data": data,
|
|
|
|
})
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_docker_host_list(self, data) -> None:
|
2022-09-07 13:03:10 +02:00
|
|
|
self._event_data[Event.DOCKER_HOST_LIST] = data
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def _event_auto_login(self) -> None:
|
2022-09-23 18:24:00 +02:00
|
|
|
self._event_data[Event.AUTO_LOGIN] = True
|
|
|
|
|
2022-12-29 00:22:53 +01:00
|
|
|
def _event_init_server_timezone(self) -> None:
|
|
|
|
pass
|
|
|
|
|
|
|
|
def _event_maintenance_list(self, data) -> None:
|
|
|
|
self._event_data[Event.MAINTENANCE_LIST] = data
|
|
|
|
|
2023-03-20 15:14:39 +01:00
|
|
|
def _event_api_key_list(self, data) -> None:
|
|
|
|
self._event_data[Event.API_KEY_LIST] = data
|
|
|
|
|
2022-07-02 16:00:54 +02:00
|
|
|
# connection
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def connect(self) -> None:
|
|
|
|
"""
|
|
|
|
Connects to Uptime Kuma.
|
|
|
|
|
|
|
|
Called automatically when the UptimeKumaApi instance is created.
|
2022-12-17 15:30:29 +01:00
|
|
|
|
|
|
|
:raises UptimeKumaException: When connection to server failed.
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
2022-08-26 14:01:29 +02:00
|
|
|
url = self.url.rstrip("/")
|
|
|
|
try:
|
2023-03-30 12:19:27 +02:00
|
|
|
self.sio.connect(f'{url}/socket.io/', wait_timeout=self.wait_timeout, headers=self.headers)
|
2022-08-26 14:01:29 +02:00
|
|
|
except:
|
2022-12-16 21:39:18 +01:00
|
|
|
raise UptimeKumaException("unable to connect")
|
|
|
|
|
|
|
|
def disconnect(self) -> None:
|
|
|
|
"""
|
|
|
|
Disconnects from Uptime Kuma.
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
Needs to be called to prevent blocking the program.
|
|
|
|
"""
|
2022-07-02 16:00:54 +02:00
|
|
|
self.sio.disconnect()
|
|
|
|
|
2022-09-07 13:03:10 +02:00
|
|
|
# builder
|
|
|
|
|
|
|
|
@property
|
2022-12-16 21:39:18 +01:00
|
|
|
def version(self) -> str:
|
2022-09-07 13:03:10 +02:00
|
|
|
info = self.info()
|
2023-05-01 18:57:55 +02:00
|
|
|
return info.get("version")
|
2022-09-07 13:03:10 +02:00
|
|
|
|
|
|
|
def _build_monitor_data(
|
|
|
|
self,
|
|
|
|
type: MonitorType,
|
|
|
|
name: str,
|
2023-03-20 15:14:39 +01:00
|
|
|
description: str = None,
|
2022-09-07 13:03:10 +02:00
|
|
|
interval: int = 60,
|
|
|
|
retryInterval: int = 60,
|
|
|
|
resendInterval: int = 0,
|
2023-04-07 21:03:33 +02:00
|
|
|
maxretries: int = 1,
|
2022-09-07 13:03:10 +02:00
|
|
|
upsideDown: bool = False,
|
|
|
|
notificationIDList: list = None,
|
2023-03-20 15:14:39 +01:00
|
|
|
httpBodyEncoding: str = "json",
|
2022-09-07 13:03:10 +02:00
|
|
|
|
|
|
|
# HTTP, KEYWORD
|
|
|
|
url: str = None,
|
|
|
|
expiryNotification: bool = False,
|
|
|
|
ignoreTls: bool = False,
|
|
|
|
maxredirects: int = 10,
|
|
|
|
accepted_statuscodes: list = None,
|
|
|
|
proxyId: int = None,
|
|
|
|
method: str = "GET",
|
|
|
|
body: str = None,
|
|
|
|
headers: str = None,
|
|
|
|
authMethod: AuthMethod = AuthMethod.NONE,
|
2023-03-20 15:14:39 +01:00
|
|
|
tlsCert: str = None,
|
|
|
|
tlsKey: str = None,
|
|
|
|
tlsCa: str = None,
|
2022-09-07 13:03:10 +02:00
|
|
|
basic_auth_user: str = None,
|
|
|
|
basic_auth_pass: str = None,
|
|
|
|
authDomain: str = None,
|
|
|
|
authWorkstation: str = None,
|
|
|
|
|
|
|
|
# KEYWORD
|
|
|
|
keyword: str = None,
|
|
|
|
|
2022-12-29 00:22:53 +01:00
|
|
|
# GRPC_KEYWORD
|
|
|
|
grpcUrl: str = None,
|
|
|
|
grpcEnableTls: bool = False,
|
|
|
|
grpcServiceName: str = None,
|
|
|
|
grpcMethod: str = None,
|
|
|
|
grpcProtobuf: str = None,
|
|
|
|
grpcBody: str = None,
|
|
|
|
grpcMetadata: str = None,
|
|
|
|
|
2022-09-07 13:03:10 +02:00
|
|
|
# DNS, PING, STEAM, MQTT
|
|
|
|
hostname: str = None,
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
# PING
|
|
|
|
packetSize: int = 56,
|
|
|
|
|
2022-12-29 00:22:53 +01:00
|
|
|
# DNS, STEAM, MQTT, RADIUS
|
|
|
|
port: int = None,
|
2022-09-07 13:03:10 +02:00
|
|
|
|
|
|
|
# DNS
|
|
|
|
dns_resolve_server: str = "1.1.1.1",
|
|
|
|
dns_resolve_type: str = "A",
|
|
|
|
|
|
|
|
# MQTT
|
|
|
|
mqttUsername: str = None,
|
|
|
|
mqttPassword: str = None,
|
|
|
|
mqttTopic: str = None,
|
|
|
|
mqttSuccessMessage: str = None,
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
# SQLSERVER, POSTGRES, MYSQL, MONGODB, REDIS
|
2022-09-07 13:03:10 +02:00
|
|
|
databaseConnectionString: str = None,
|
2023-02-13 22:51:21 +01:00
|
|
|
|
|
|
|
# SQLSERVER, POSTGRES, MYSQL
|
2022-09-07 13:03:10 +02:00
|
|
|
databaseQuery: str = None,
|
|
|
|
|
|
|
|
# DOCKER
|
|
|
|
docker_container: str = "",
|
|
|
|
docker_host: int = None,
|
|
|
|
|
|
|
|
# RADIUS
|
|
|
|
radiusUsername: str = None,
|
|
|
|
radiusPassword: str = None,
|
|
|
|
radiusSecret: str = None,
|
|
|
|
radiusCalledStationId: str = None,
|
2023-02-13 22:51:21 +01:00
|
|
|
radiusCallingStationId: str = None,
|
|
|
|
|
|
|
|
# GAMEDIG
|
|
|
|
game: str = None
|
2022-12-16 21:39:18 +01:00
|
|
|
) -> dict:
|
2023-04-07 21:03:33 +02:00
|
|
|
# https://github.com/louislam/uptime-kuma/compare/1.21.1...1.21.2#diff-f672603317047f3e6f27b0d7a44f6f244b7dbb5d0d0a85f1059a6b0bc2cb9aa0L910
|
|
|
|
if parse_version(self.version) < parse_version("1.21.2"):
|
|
|
|
maxretries = 0
|
|
|
|
|
2022-09-07 13:03:10 +02:00
|
|
|
data = {
|
|
|
|
"type": type,
|
|
|
|
"name": name,
|
|
|
|
"interval": interval,
|
|
|
|
"retryInterval": retryInterval,
|
|
|
|
"maxretries": maxretries,
|
|
|
|
"notificationIDList": notificationIDList,
|
|
|
|
"upsideDown": upsideDown,
|
|
|
|
}
|
|
|
|
|
|
|
|
if parse_version(self.version) >= parse_version("1.18"):
|
|
|
|
data.update({
|
|
|
|
"resendInterval": resendInterval
|
|
|
|
})
|
|
|
|
|
2023-03-20 15:14:39 +01:00
|
|
|
if parse_version(self.version) >= parse_version("1.21"):
|
|
|
|
data.update({
|
|
|
|
"description": description,
|
|
|
|
"httpBodyEncoding": httpBodyEncoding
|
|
|
|
})
|
|
|
|
|
2022-12-29 00:22:53 +01:00
|
|
|
if type in [MonitorType.KEYWORD, MonitorType.GRPC_KEYWORD]:
|
2022-09-07 13:03:10 +02:00
|
|
|
data.update({
|
|
|
|
"keyword": keyword,
|
|
|
|
})
|
|
|
|
|
|
|
|
# HTTP, KEYWORD
|
|
|
|
data.update({
|
|
|
|
"url": url,
|
|
|
|
"expiryNotification": expiryNotification,
|
|
|
|
"ignoreTls": ignoreTls,
|
|
|
|
"maxredirects": maxredirects,
|
|
|
|
"accepted_statuscodes": accepted_statuscodes,
|
|
|
|
"proxyId": proxyId,
|
|
|
|
"method": method,
|
|
|
|
"body": body,
|
|
|
|
"headers": headers,
|
|
|
|
"authMethod": authMethod,
|
|
|
|
})
|
|
|
|
|
|
|
|
if authMethod in [AuthMethod.HTTP_BASIC, AuthMethod.NTLM]:
|
|
|
|
data.update({
|
|
|
|
"basic_auth_user": basic_auth_user,
|
|
|
|
"basic_auth_pass": basic_auth_pass,
|
|
|
|
})
|
|
|
|
|
|
|
|
if authMethod == AuthMethod.NTLM:
|
|
|
|
data.update({
|
|
|
|
"authDomain": authDomain,
|
|
|
|
"authWorkstation": authWorkstation,
|
|
|
|
})
|
|
|
|
|
2023-03-20 15:14:39 +01:00
|
|
|
if authMethod == AuthMethod.MTLS:
|
|
|
|
data.update({
|
|
|
|
"tlsCert": tlsCert,
|
|
|
|
"tlsKey": tlsKey,
|
|
|
|
"tlsCa": tlsCa,
|
|
|
|
})
|
|
|
|
|
2022-12-29 00:22:53 +01:00
|
|
|
# GRPC_KEYWORD
|
|
|
|
if type == MonitorType.GRPC_KEYWORD:
|
|
|
|
data.update({
|
|
|
|
"grpcUrl": grpcUrl,
|
|
|
|
"grpcEnableTls": grpcEnableTls,
|
|
|
|
"grpcServiceName": grpcServiceName,
|
|
|
|
"grpcMethod": grpcMethod,
|
|
|
|
"grpcProtobuf": grpcProtobuf,
|
|
|
|
"grpcBody": grpcBody,
|
|
|
|
"grpcMetadata": grpcMetadata,
|
|
|
|
})
|
|
|
|
|
2022-09-17 12:24:08 +02:00
|
|
|
# PORT, PING, DNS, STEAM, MQTT
|
2022-09-07 13:03:10 +02:00
|
|
|
data.update({
|
|
|
|
"hostname": hostname,
|
|
|
|
})
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
# PING
|
|
|
|
if parse_version(self.version) >= parse_version("1.20"):
|
|
|
|
data.update({
|
|
|
|
"packetSize": packetSize,
|
|
|
|
})
|
|
|
|
|
2022-12-29 00:22:53 +01:00
|
|
|
# PORT, DNS, STEAM, MQTT, RADIUS
|
|
|
|
if not port:
|
|
|
|
if type == MonitorType.DNS:
|
|
|
|
port = 53
|
|
|
|
elif type == MonitorType.RADIUS:
|
|
|
|
port = 1812
|
2022-09-07 13:03:10 +02:00
|
|
|
data.update({
|
|
|
|
"port": port,
|
|
|
|
})
|
|
|
|
|
|
|
|
# DNS
|
|
|
|
data.update({
|
|
|
|
"dns_resolve_server": dns_resolve_server,
|
|
|
|
"dns_resolve_type": dns_resolve_type,
|
|
|
|
})
|
|
|
|
|
|
|
|
# MQTT
|
|
|
|
data.update({
|
|
|
|
"mqttUsername": mqttUsername,
|
|
|
|
"mqttPassword": mqttPassword,
|
|
|
|
"mqttTopic": mqttTopic,
|
|
|
|
"mqttSuccessMessage": mqttSuccessMessage,
|
|
|
|
})
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
# SQLSERVER, POSTGRES, MYSQL, MONGODB, REDIS
|
2022-09-07 13:03:10 +02:00
|
|
|
data.update({
|
|
|
|
"databaseConnectionString": databaseConnectionString
|
|
|
|
})
|
2023-02-13 22:51:21 +01:00
|
|
|
|
|
|
|
# SQLSERVER, POSTGRES, MYSQL
|
2022-12-29 00:22:53 +01:00
|
|
|
if type in [MonitorType.SQLSERVER, MonitorType.POSTGRES, MonitorType.MYSQL]:
|
2022-09-07 13:03:10 +02:00
|
|
|
data.update({
|
|
|
|
"databaseQuery": databaseQuery,
|
|
|
|
})
|
|
|
|
|
|
|
|
# DOCKER
|
|
|
|
if type == MonitorType.DOCKER:
|
|
|
|
data.update({
|
|
|
|
"docker_container": docker_container,
|
|
|
|
"docker_host": docker_host
|
|
|
|
})
|
|
|
|
|
|
|
|
# RADIUS
|
|
|
|
if type == MonitorType.RADIUS:
|
|
|
|
data.update({
|
|
|
|
"radiusUsername": radiusUsername,
|
|
|
|
"radiusPassword": radiusPassword,
|
|
|
|
"radiusSecret": radiusSecret,
|
|
|
|
"radiusCalledStationId": radiusCalledStationId,
|
|
|
|
"radiusCallingStationId": radiusCallingStationId
|
|
|
|
})
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
# GAMEDIG
|
|
|
|
if type == MonitorType.GAMEDIG:
|
|
|
|
data.update({
|
|
|
|
"game": game
|
|
|
|
})
|
|
|
|
|
2022-09-07 13:03:10 +02:00
|
|
|
return data
|
|
|
|
|
2023-04-07 21:03:33 +02:00
|
|
|
def _build_maintenance_data(
|
|
|
|
self,
|
|
|
|
title: str,
|
|
|
|
strategy: MaintenanceStrategy,
|
|
|
|
active: bool = True,
|
|
|
|
description: str = "",
|
|
|
|
dateRange: list = None,
|
|
|
|
intervalDay: int = 1,
|
|
|
|
weekdays: list = None,
|
|
|
|
daysOfMonth: list = None,
|
|
|
|
timeRange: list = None,
|
|
|
|
cron: str = "30 3 * * *",
|
|
|
|
durationMinutes: int = 60,
|
|
|
|
timezone: str = None
|
|
|
|
) -> dict:
|
|
|
|
if not dateRange:
|
|
|
|
dateRange = [
|
|
|
|
datetime.date.today().strftime("%Y-%m-%d 00:00:00")
|
|
|
|
]
|
|
|
|
if not timeRange:
|
|
|
|
timeRange = [
|
|
|
|
{
|
|
|
|
"hours": 2,
|
|
|
|
"minutes": 0,
|
|
|
|
}, {
|
|
|
|
"hours": 3,
|
|
|
|
"minutes": 0,
|
|
|
|
}
|
|
|
|
]
|
|
|
|
if not weekdays:
|
|
|
|
weekdays = []
|
|
|
|
if not daysOfMonth:
|
|
|
|
daysOfMonth = []
|
|
|
|
data = {
|
|
|
|
"title": title,
|
|
|
|
"active": active,
|
|
|
|
"intervalDay": intervalDay,
|
|
|
|
"dateRange": dateRange,
|
|
|
|
"description": description,
|
|
|
|
"strategy": strategy,
|
|
|
|
"weekdays": weekdays,
|
|
|
|
"daysOfMonth": daysOfMonth,
|
|
|
|
"timeRange": timeRange
|
|
|
|
}
|
|
|
|
if parse_version(self.version) >= parse_version("1.21.2"):
|
|
|
|
data.update({
|
|
|
|
"cron": cron,
|
|
|
|
"durationMinutes": durationMinutes,
|
|
|
|
"timezone": timezone,
|
|
|
|
})
|
|
|
|
return data
|
|
|
|
|
2022-10-04 18:38:17 +02:00
|
|
|
# monitor
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_monitors(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get all monitors.
|
|
|
|
|
|
|
|
:return: A list of monitors.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_monitors()
|
|
|
|
[
|
|
|
|
{
|
2022-12-30 21:37:10 +01:00
|
|
|
'accepted_statuscodes': ['200-299'],
|
2022-12-16 21:39:18 +01:00
|
|
|
'active': True,
|
2022-12-30 21:37:10 +01:00
|
|
|
'authDomain': None,
|
|
|
|
'authMethod': '',
|
|
|
|
'authWorkstation': None,
|
|
|
|
'basic_auth_pass': None,
|
|
|
|
'basic_auth_user': None,
|
|
|
|
'body': None,
|
|
|
|
'databaseConnectionString': None,
|
|
|
|
'databaseQuery': None,
|
2022-12-16 21:39:18 +01:00
|
|
|
'dns_last_result': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'dns_resolve_server': '1.1.1.1',
|
|
|
|
'dns_resolve_type': 'A',
|
2022-12-16 21:39:18 +01:00
|
|
|
'docker_container': None,
|
|
|
|
'docker_host': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'expiryNotification': False,
|
2023-02-13 22:51:21 +01:00
|
|
|
'game': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'grpcBody': None,
|
|
|
|
'grpcEnableTls': False,
|
|
|
|
'grpcMetadata': None,
|
|
|
|
'grpcMethod': None,
|
|
|
|
'grpcProtobuf': None,
|
|
|
|
'grpcServiceName': None,
|
|
|
|
'grpcUrl': None,
|
|
|
|
'headers': None,
|
|
|
|
'hostname': None,
|
|
|
|
'id': 1,
|
|
|
|
'ignoreTls': False,
|
|
|
|
'includeSensitiveData': True,
|
|
|
|
'interval': 60,
|
|
|
|
'keyword': None,
|
2022-12-29 00:22:53 +01:00
|
|
|
'maintenance': False,
|
2022-12-30 21:37:10 +01:00
|
|
|
'maxredirects': 10,
|
2023-04-07 21:03:33 +02:00
|
|
|
'maxretries': 1,
|
2022-12-30 21:37:10 +01:00
|
|
|
'method': 'GET',
|
2022-12-16 21:39:18 +01:00
|
|
|
'mqttPassword': None,
|
|
|
|
'mqttSuccessMessage': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'mqttTopic': None,
|
|
|
|
'mqttUsername': None,
|
|
|
|
'name': 'monitor 1',
|
|
|
|
'notificationIDList': [1, 2],
|
2023-02-13 22:51:21 +01:00
|
|
|
'packetSize': 56,
|
2022-12-30 21:37:10 +01:00
|
|
|
'port': None,
|
|
|
|
'proxyId': None,
|
|
|
|
'pushToken': None,
|
2022-12-16 21:39:18 +01:00
|
|
|
'radiusCalledStationId': None,
|
|
|
|
'radiusCallingStationId': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'radiusPassword': None,
|
2022-12-16 21:39:18 +01:00
|
|
|
'radiusSecret': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'radiusUsername': None,
|
|
|
|
'resendInterval': 0,
|
|
|
|
'retryInterval': 60,
|
|
|
|
'tags': [],
|
|
|
|
'type': 'http',
|
|
|
|
'upsideDown': False,
|
|
|
|
'url': 'http://127.0.0.1',
|
|
|
|
'weight': 2000
|
2022-12-16 21:39:18 +01:00
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2023-03-20 15:14:39 +01:00
|
|
|
|
|
|
|
# TODO: replace with getMonitorList?
|
|
|
|
|
2022-08-05 15:52:19 +02:00
|
|
|
r = list(self._get_event_data(Event.MONITOR_LIST).values())
|
2022-09-12 22:45:43 +02:00
|
|
|
for monitor in r:
|
|
|
|
_convert_monitor_return(monitor)
|
2022-07-07 13:29:06 +02:00
|
|
|
int_to_bool(r, ["active"])
|
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_monitor(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Get a monitor.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: The monitor id.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The monitor.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_monitor(1)
|
|
|
|
{
|
2022-12-30 21:37:10 +01:00
|
|
|
'accepted_statuscodes': ['200-299'],
|
2022-12-16 21:39:18 +01:00
|
|
|
'active': True,
|
2022-12-30 21:37:10 +01:00
|
|
|
'authDomain': None,
|
|
|
|
'authMethod': '',
|
|
|
|
'authWorkstation': None,
|
|
|
|
'basic_auth_pass': None,
|
|
|
|
'basic_auth_user': None,
|
|
|
|
'body': None,
|
|
|
|
'databaseConnectionString': None,
|
|
|
|
'databaseQuery': None,
|
2022-12-16 21:39:18 +01:00
|
|
|
'dns_last_result': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'dns_resolve_server': '1.1.1.1',
|
|
|
|
'dns_resolve_type': 'A',
|
2022-12-16 21:39:18 +01:00
|
|
|
'docker_container': None,
|
|
|
|
'docker_host': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'expiryNotification': False,
|
2023-02-13 22:51:21 +01:00
|
|
|
'game': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'grpcBody': None,
|
|
|
|
'grpcEnableTls': False,
|
|
|
|
'grpcMetadata': None,
|
|
|
|
'grpcMethod': None,
|
|
|
|
'grpcProtobuf': None,
|
|
|
|
'grpcServiceName': None,
|
|
|
|
'grpcUrl': None,
|
|
|
|
'headers': None,
|
|
|
|
'hostname': None,
|
|
|
|
'id': 1,
|
|
|
|
'ignoreTls': False,
|
|
|
|
'includeSensitiveData': True,
|
|
|
|
'interval': 60,
|
|
|
|
'keyword': None,
|
2022-12-29 00:22:53 +01:00
|
|
|
'maintenance': False,
|
2022-12-30 21:37:10 +01:00
|
|
|
'maxredirects': 10,
|
2023-04-07 21:03:33 +02:00
|
|
|
'maxretries': 1,
|
2022-12-30 21:37:10 +01:00
|
|
|
'method': 'GET',
|
2022-12-16 21:39:18 +01:00
|
|
|
'mqttPassword': None,
|
|
|
|
'mqttSuccessMessage': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'mqttTopic': None,
|
|
|
|
'mqttUsername': None,
|
|
|
|
'name': 'monitor 1',
|
|
|
|
'notificationIDList': [1, 2],
|
2023-02-13 22:51:21 +01:00
|
|
|
'packetSize': 56,
|
2022-12-30 21:37:10 +01:00
|
|
|
'port': None,
|
|
|
|
'proxyId': None,
|
|
|
|
'pushToken': None,
|
2022-12-16 21:39:18 +01:00
|
|
|
'radiusCalledStationId': None,
|
|
|
|
'radiusCallingStationId': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'radiusPassword': None,
|
2022-12-16 21:39:18 +01:00
|
|
|
'radiusSecret': None,
|
2022-12-30 21:37:10 +01:00
|
|
|
'radiusUsername': None,
|
|
|
|
'resendInterval': 0,
|
|
|
|
'retryInterval': 60,
|
|
|
|
'tags': [],
|
|
|
|
'type': 'http',
|
|
|
|
'upsideDown': False,
|
|
|
|
'url': 'http://127.0.0.1',
|
|
|
|
'weight': 2000
|
2022-12-16 21:39:18 +01:00
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
r = self._call('getMonitor', id_)["monitor"]
|
2022-09-12 22:45:43 +02:00
|
|
|
_convert_monitor_return(r)
|
2022-07-07 13:29:06 +02:00
|
|
|
int_to_bool(r, ["active"])
|
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def pause_monitor(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Pauses a monitor.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: The monitor id.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.pause_monitor(1)
|
|
|
|
{
|
|
|
|
'msg': 'Paused Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-09 19:52:21 +02:00
|
|
|
return self._call('pauseMonitor', id_)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def resume_monitor(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Resumes a monitor.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: The monitor id.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.resume_monitor(1)
|
|
|
|
{
|
|
|
|
'msg': 'Resumed Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-09 19:52:21 +02:00
|
|
|
return self._call('resumeMonitor', id_)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def delete_monitor(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Deletes a monitor.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: The monitor id.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_monitor(1)
|
|
|
|
{
|
|
|
|
'msg': 'Deleted Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.MONITOR_LIST):
|
|
|
|
return self._call('deleteMonitor', id_)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_monitor_beats(self, id_: int, hours: int) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get monitor beats for a specific monitor in a time range.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: The monitor id.
|
|
|
|
:param int hours: Period time in hours from now.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_monitor_beats(1, 6)
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'down_count': 0,
|
|
|
|
'duration': 0,
|
|
|
|
'id': 25,
|
|
|
|
'important': True,
|
|
|
|
'monitor_id': 1,
|
|
|
|
'msg': '200 - OK',
|
|
|
|
'ping': 201,
|
|
|
|
'status': True,
|
|
|
|
'time': '2022-12-15 12:38:42.661'
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'down_count': 0,
|
|
|
|
'duration': 60,
|
|
|
|
'id': 26,
|
|
|
|
'important': False,
|
|
|
|
'monitor_id': 1,
|
|
|
|
'msg': '200 - OK',
|
|
|
|
'ping': 193,
|
|
|
|
'status': True,
|
|
|
|
'time': '2022-12-15 12:39:42.878'
|
|
|
|
},
|
|
|
|
...
|
|
|
|
]
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
r = self._call('getMonitorBeats', (id_, hours))["data"]
|
|
|
|
int_to_bool(r, ["important", "status"])
|
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_game_list(self) -> list[dict]:
|
2023-02-13 22:51:21 +01:00
|
|
|
"""
|
|
|
|
Get a list of games that are supported by the GameDig monitor type.
|
|
|
|
|
|
|
|
:return: The server response.
|
|
|
|
:rtype: list
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_game_list()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'extra': {},
|
|
|
|
'keys': ['7d2d'],
|
|
|
|
'options': {
|
|
|
|
'port': 26900,
|
|
|
|
'port_query_offset': 1,
|
|
|
|
'protocol': 'valve'
|
|
|
|
},
|
|
|
|
'pretty': '7 Days to Die (2013)'
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'extra': {},
|
|
|
|
'keys': ['arma2'],
|
|
|
|
'options': {
|
|
|
|
'port': 2302,
|
|
|
|
'port_query_offset': 1,
|
|
|
|
'protocol': 'valve'
|
|
|
|
},
|
|
|
|
'pretty': 'ARMA 2 (2009)'
|
|
|
|
},
|
|
|
|
...
|
|
|
|
]
|
|
|
|
"""
|
|
|
|
r = self._call('getGameList')
|
2023-03-20 15:14:39 +01:00
|
|
|
# Workaround, gamelist is not available on first call.
|
|
|
|
# Fixed in https://github.com/louislam/uptime-kuma/commit/7b8ed01f272fc4c6b69ff6299185e936a5e63735
|
|
|
|
# Exists in 1.20.0 - 1.21.0
|
|
|
|
if not r:
|
2023-02-13 22:51:21 +01:00
|
|
|
r = self._call('getGameList')
|
2023-05-01 18:57:55 +02:00
|
|
|
return r.get("gameList")
|
2023-02-13 22:51:21 +01:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(monitor_docstring("add"))
|
|
|
|
def add_monitor(self, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Adds a new monitor.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:09:27 +01:00
|
|
|
:rtype: dict
|
2022-12-17 15:30:29 +01:00
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.add_monitor(
|
|
|
|
... type=MonitorType.HTTP,
|
|
|
|
... name="Google",
|
|
|
|
... url="https://google.com"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Added Successfully.',
|
|
|
|
'monitorID': 1
|
|
|
|
}
|
|
|
|
"""
|
2022-09-07 13:03:10 +02:00
|
|
|
data = self._build_monitor_data(**kwargs)
|
2022-09-12 22:45:43 +02:00
|
|
|
_convert_monitor_input(data)
|
2022-07-10 18:07:11 +02:00
|
|
|
_check_arguments_monitor(data)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.MONITOR_LIST):
|
|
|
|
return self._call('add', data)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(monitor_docstring("edit"))
|
|
|
|
def edit_monitor(self, id_: int, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Edits an existing monitor.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: The monitor id.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
>>> api.edit_monitor(1,
|
|
|
|
... interval=20
|
|
|
|
... )
|
2022-12-16 21:39:18 +01:00
|
|
|
{
|
|
|
|
'monitorID': 1,
|
|
|
|
'msg': 'Saved.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
data = self.get_monitor(id_)
|
2022-07-09 19:52:21 +02:00
|
|
|
data.update(kwargs)
|
2022-09-12 22:45:43 +02:00
|
|
|
_convert_monitor_input(data)
|
2022-07-10 18:07:11 +02:00
|
|
|
_check_arguments_monitor(data)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.MONITOR_LIST):
|
|
|
|
return self._call('editMonitor', data)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# monitor tags
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def add_monitor_tag(self, tag_id: int, monitor_id: int, value: str = "") -> dict:
|
|
|
|
"""
|
|
|
|
Add a tag to a monitor.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int tag_id: Id of the tag.
|
|
|
|
:param int monitor_id: Id of the monitor to add the tag to.
|
|
|
|
:param str, optional value: Value of the tag., defaults to ""
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.add_monitor_tag(
|
|
|
|
... tag_id=1,
|
|
|
|
... monitor_id=1,
|
|
|
|
... value="test"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Added Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
r = self._call('addMonitorTag', (tag_id, monitor_id, value))
|
|
|
|
# the monitor list event does not send the updated tags
|
|
|
|
self._event_data[Event.MONITOR_LIST][str(monitor_id)] = self.get_monitor(monitor_id)
|
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-07-07 13:29:06 +02:00
|
|
|
# editMonitorTag is unused in uptime-kuma
|
2022-07-05 22:12:37 +02:00
|
|
|
# def edit_monitor_tag(self, tag_id: int, monitor_id: int, value=""):
|
2022-07-07 13:29:06 +02:00
|
|
|
# return self._call('editMonitorTag', (tag_id, monitor_id, value))
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def delete_monitor_tag(self, tag_id: int, monitor_id: int, value: str = "") -> dict:
|
|
|
|
"""
|
|
|
|
Delete a tag from a monitor.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int tag_id: Id of the tag to remove.
|
|
|
|
:param int monitor_id: Id of monitor to remove the tag from.
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str, optional value: Value of the tag., defaults to ""
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_monitor_tag(
|
|
|
|
... tag_id=1,
|
|
|
|
... monitor_id=1,
|
|
|
|
... value="test"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Deleted Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
r = self._call('deleteMonitorTag', (tag_id, monitor_id, value))
|
|
|
|
# the monitor list event does not send the updated tags
|
|
|
|
self._event_data[Event.MONITOR_LIST][str(monitor_id)] = self.get_monitor(monitor_id)
|
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-10-04 18:38:17 +02:00
|
|
|
# notification
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_notifications(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get all notifications.
|
|
|
|
|
|
|
|
:return: All notifications.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_notifications()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'active': True,
|
|
|
|
'applyExisting': True,
|
|
|
|
'id': 1,
|
|
|
|
'isDefault': True,
|
|
|
|
'name': 'notification 1',
|
|
|
|
'pushAPIKey': '123456789',
|
|
|
|
'type': 'PushByTechulus',
|
|
|
|
'userId': 1
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
notifications = self._get_event_data(Event.NOTIFICATION_LIST)
|
2022-07-07 13:29:06 +02:00
|
|
|
r = []
|
|
|
|
for notification_raw in notifications:
|
2022-07-05 22:12:37 +02:00
|
|
|
notification = notification_raw.copy()
|
|
|
|
config = json.loads(notification["config"])
|
|
|
|
del notification["config"]
|
|
|
|
notification.update(config)
|
2022-07-07 13:29:06 +02:00
|
|
|
r.append(notification)
|
|
|
|
return r
|
2022-07-05 22:12:37 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_notification(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Get a notification.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the notification to get.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The notification.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the notification does not exist.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_notification(1)
|
|
|
|
{
|
|
|
|
'active': True,
|
|
|
|
'applyExisting': True,
|
|
|
|
'id': 1,
|
|
|
|
'isDefault': True,
|
|
|
|
'name': 'notification 1',
|
|
|
|
'pushAPIKey': '123456789',
|
|
|
|
'type': 'PushByTechulus',
|
|
|
|
'userId': 1
|
|
|
|
}
|
|
|
|
"""
|
2022-07-05 22:12:37 +02:00
|
|
|
notifications = self.get_notifications()
|
|
|
|
for notification in notifications:
|
|
|
|
if notification["id"] == id_:
|
|
|
|
return notification
|
2022-07-07 13:29:06 +02:00
|
|
|
raise UptimeKumaException("notification does not exist")
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(notification_docstring("test"))
|
|
|
|
def test_notification(self, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Test a notification.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.test_notification(
|
|
|
|
... name="notification 1",
|
|
|
|
... isDefault=True,
|
|
|
|
... applyExisting=True,
|
|
|
|
... type=NotificationType.PUSHBYTECHULUS,
|
|
|
|
... pushAPIKey="INSERT_PUSH_API_KEY"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'ok': True,
|
|
|
|
'msg': 'Sent Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-09 22:15:41 +02:00
|
|
|
data = _build_notification_data(**kwargs)
|
|
|
|
|
2022-07-10 18:07:11 +02:00
|
|
|
_check_arguments_notification(data)
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('testNotification', data)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(notification_docstring("add"))
|
|
|
|
def add_notification(self, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Add a notification.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.add_notification(
|
|
|
|
... name="notification 1",
|
|
|
|
... isDefault=True,
|
|
|
|
... applyExisting=True,
|
|
|
|
... type=NotificationType.PUSHBYTECHULUS,
|
|
|
|
... pushAPIKey="123456789"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'msg': 'Saved'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-09 22:15:41 +02:00
|
|
|
data = _build_notification_data(**kwargs)
|
|
|
|
|
2022-07-10 18:07:11 +02:00
|
|
|
_check_arguments_notification(data)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.NOTIFICATION_LIST):
|
|
|
|
return self._call('addNotification', (data, None))
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(notification_docstring("edit"))
|
|
|
|
def edit_notification(self, id_: int, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Edit a notification.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the notification to edit.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
>>> api.edit_notification(1,
|
2022-12-16 21:39:18 +01:00
|
|
|
... name="notification 1 edited",
|
|
|
|
... isDefault=False,
|
|
|
|
... applyExisting=False,
|
|
|
|
... type=NotificationType.PUSHDEER,
|
|
|
|
... pushdeerKey="987654321"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'msg': 'Saved'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-05 22:12:37 +02:00
|
|
|
notification = self.get_notification(id_)
|
2022-07-06 22:35:05 +02:00
|
|
|
|
2022-08-03 11:56:02 +02:00
|
|
|
# remove old notification provider options from notification object
|
|
|
|
if "type" in kwargs and kwargs["type"] != notification["type"]:
|
2022-07-06 22:35:05 +02:00
|
|
|
for provider in notification_provider_options:
|
|
|
|
provider_options = notification_provider_options[provider]
|
2022-08-03 11:56:02 +02:00
|
|
|
if provider != kwargs["type"]:
|
2022-07-06 22:35:05 +02:00
|
|
|
for option in provider_options:
|
|
|
|
if option in notification:
|
|
|
|
del notification[option]
|
|
|
|
|
2022-07-07 13:29:06 +02:00
|
|
|
notification.update(kwargs)
|
2022-07-10 18:07:11 +02:00
|
|
|
_check_arguments_notification(notification)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.NOTIFICATION_LIST):
|
|
|
|
return self._call('addNotification', (notification, id_))
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def delete_notification(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Delete a notification.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the notification to delete.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_notification(1)
|
|
|
|
{
|
|
|
|
'msg': 'Deleted'
|
|
|
|
}
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.NOTIFICATION_LIST):
|
|
|
|
return self._call('deleteNotification', id_)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def check_apprise(self) -> bool:
|
|
|
|
"""
|
|
|
|
Check if apprise exists.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: bool
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.check_apprise()
|
|
|
|
True
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('checkApprise')
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# proxy
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_proxies(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get all proxies.
|
|
|
|
|
|
|
|
:return: All proxies.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_proxies()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'active': True,
|
|
|
|
'auth': True,
|
|
|
|
'createdDate': '2022-12-15 16:24:24',
|
|
|
|
'default': False,
|
|
|
|
'host': '127.0.0.1',
|
|
|
|
'id': 1,
|
|
|
|
'password': 'password',
|
|
|
|
'port': 8080,
|
|
|
|
'protocol': 'http',
|
|
|
|
'userId': 1,
|
|
|
|
'username': 'username'
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
r = self._get_event_data(Event.PROXY_LIST)
|
2022-08-03 11:56:02 +02:00
|
|
|
int_to_bool(r, ["auth", "active", "default", "applyExisting"])
|
2022-07-07 13:29:06 +02:00
|
|
|
return r
|
2022-07-05 22:12:37 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_proxy(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Get a proxy.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the proxy to get.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The proxy.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the proxy does not exist.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_proxy(1)
|
|
|
|
{
|
|
|
|
'active': True,
|
|
|
|
'auth': True,
|
|
|
|
'createdDate': '2022-12-15 16:24:24',
|
|
|
|
'default': False,
|
|
|
|
'host': '127.0.0.1',
|
|
|
|
'id': 1,
|
|
|
|
'password': 'password',
|
|
|
|
'port': 8080,
|
|
|
|
'protocol': 'http',
|
|
|
|
'userId': 1,
|
|
|
|
'username': 'username'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-05 22:12:37 +02:00
|
|
|
proxies = self.get_proxies()
|
|
|
|
for proxy in proxies:
|
2023-05-01 18:57:55 +02:00
|
|
|
if proxy.get("id") == id_:
|
2022-07-05 22:12:37 +02:00
|
|
|
return proxy
|
2022-07-07 13:29:06 +02:00
|
|
|
raise UptimeKumaException("proxy does not exist")
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(proxy_docstring("add"))
|
|
|
|
def add_proxy(self, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Add a proxy.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.add_proxy(
|
|
|
|
... protocol=ProxyProtocol.HTTP,
|
|
|
|
... host="127.0.0.1",
|
|
|
|
... port=8080,
|
|
|
|
... auth=True,
|
|
|
|
... username="username",
|
|
|
|
... password="password",
|
|
|
|
... active=True,
|
|
|
|
... default=False,
|
|
|
|
... applyExisting=False
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'msg': 'Saved'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-09 22:15:41 +02:00
|
|
|
data = _build_proxy_data(**kwargs)
|
|
|
|
|
2022-07-10 18:07:11 +02:00
|
|
|
_check_arguments_proxy(data)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.PROXY_LIST):
|
|
|
|
return self._call('addProxy', (data, None))
|
2022-07-02 20:26:18 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(proxy_docstring("edit"))
|
|
|
|
def edit_proxy(self, id_: int, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Edit a proxy.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the proxy to edit.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.edit_proxy(1,
|
|
|
|
... protocol=ProxyProtocol.HTTPS,
|
|
|
|
... host="127.0.0.2",
|
|
|
|
... port=8888
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'msg': 'Saved'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
proxy = self.get_proxy(id_)
|
|
|
|
proxy.update(kwargs)
|
2022-07-10 18:07:11 +02:00
|
|
|
_check_arguments_proxy(proxy)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.PROXY_LIST):
|
|
|
|
return self._call('addProxy', (proxy, id_))
|
2022-07-02 20:26:18 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def delete_proxy(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Delete a proxy.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the proxy to delete.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_proxy(1)
|
|
|
|
{
|
|
|
|
'msg': 'Deleted'
|
|
|
|
}
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.PROXY_LIST):
|
|
|
|
return self._call('deleteProxy', id_)
|
2022-07-02 20:26:18 +02:00
|
|
|
|
2022-07-02 16:00:54 +02:00
|
|
|
# status page
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_status_pages(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get all status pages.
|
|
|
|
|
|
|
|
:return: All status pages.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_status_pages()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'customCSS': '',
|
|
|
|
'description': 'description 1',
|
|
|
|
'domainNameList': [],
|
|
|
|
'footerText': None,
|
|
|
|
'icon': '/icon.svg',
|
2023-02-13 22:51:21 +01:00
|
|
|
'googleAnalyticsId': '',
|
2022-12-16 21:39:18 +01:00
|
|
|
'id': 1,
|
|
|
|
'published': True,
|
|
|
|
'showPoweredBy': False,
|
|
|
|
'showTags': False,
|
|
|
|
'slug': 'slug1',
|
|
|
|
'theme': 'light',
|
|
|
|
'title': 'status page 1'
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
return list(self._get_event_data(Event.STATUS_PAGE_LIST).values())
|
2022-07-02 20:40:14 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_status_page(self, slug: str) -> dict:
|
|
|
|
"""
|
|
|
|
Get a status page.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str slug: Slug
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The status page.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_status_page("slug1")
|
|
|
|
{
|
|
|
|
'customCSS': '',
|
|
|
|
'description': 'description 1',
|
|
|
|
'domainNameList': [],
|
|
|
|
'footerText': None,
|
|
|
|
'icon': '/icon.svg',
|
2023-02-13 22:51:21 +01:00
|
|
|
'googleAnalyticsId': '',
|
2022-12-16 21:39:18 +01:00
|
|
|
'id': 1,
|
|
|
|
'incident': {
|
|
|
|
'content': 'content 1',
|
|
|
|
'createdDate': '2022-12-15 16:51:43',
|
|
|
|
'id': 1,
|
|
|
|
'lastUpdatedDate': None,
|
|
|
|
'pin': 1,
|
|
|
|
'style': 'danger',
|
|
|
|
'title': 'title 1'
|
|
|
|
},
|
2022-12-29 00:22:53 +01:00
|
|
|
'maintenanceList': [],
|
2022-12-16 21:39:18 +01:00
|
|
|
'publicGroupList': [
|
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'monitorList': [
|
|
|
|
{
|
|
|
|
'id': 1,
|
2022-12-29 00:22:53 +01:00
|
|
|
'maintenance': False,
|
2022-12-16 21:39:18 +01:00
|
|
|
'name': 'monitor 1',
|
|
|
|
'sendUrl': 0
|
|
|
|
}
|
|
|
|
],
|
|
|
|
'name': 'Services',
|
|
|
|
'weight': 1
|
|
|
|
}
|
|
|
|
],
|
|
|
|
'published': True,
|
|
|
|
'showPoweredBy': False,
|
|
|
|
'showTags': False,
|
|
|
|
'slug': 'slug1',
|
|
|
|
'theme': 'light',
|
|
|
|
'title': 'status page 1'
|
|
|
|
}
|
|
|
|
"""
|
2022-08-26 14:04:43 +02:00
|
|
|
r1 = self._call('getStatusPage', slug)
|
|
|
|
r2 = requests.get(f"{self.url}/api/status-page/{slug}").json()
|
|
|
|
|
|
|
|
config = r1["config"]
|
|
|
|
config.update(r2["config"])
|
2022-12-29 00:22:53 +01:00
|
|
|
|
|
|
|
data = {
|
2022-08-26 14:04:43 +02:00
|
|
|
**config,
|
|
|
|
"incident": r2["incident"],
|
2022-12-29 00:22:53 +01:00
|
|
|
"publicGroupList": r2["publicGroupList"],
|
2022-08-26 14:04:43 +02:00
|
|
|
}
|
2022-12-29 00:22:53 +01:00
|
|
|
if parse_version(self.version) >= parse_version("1.19"):
|
|
|
|
data.update({
|
|
|
|
"maintenanceList": r2["maintenanceList"]
|
|
|
|
})
|
|
|
|
return data
|
2022-07-06 21:29:40 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def add_status_page(self, slug: str, title: str) -> dict:
|
|
|
|
"""
|
|
|
|
Add a status page.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str slug: Slug
|
|
|
|
:param str title: Title
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.add_status_page("slug1", "status page 1")
|
|
|
|
{
|
|
|
|
'msg': 'OK!'
|
|
|
|
}
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.STATUS_PAGE_LIST):
|
|
|
|
return self._call('addStatusPage', (title, slug))
|
2022-07-05 22:12:37 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def delete_status_page(self, slug: str) -> dict:
|
|
|
|
"""
|
|
|
|
Delete a status page.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str slug: Slug
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_status_page("slug1")
|
|
|
|
{}
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
r = self._call('deleteStatusPage', slug)
|
|
|
|
|
|
|
|
# uptime kuma does not send the status page list event when a status page is deleted
|
|
|
|
for status_page in self._event_data[Event.STATUS_PAGE_LIST].values():
|
|
|
|
if status_page["slug"] == slug:
|
|
|
|
status_page_id = status_page["id"]
|
|
|
|
del self._event_data[Event.STATUS_PAGE_LIST][str(status_page_id)]
|
|
|
|
break
|
|
|
|
|
|
|
|
return r
|
2022-07-05 22:12:37 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def save_status_page(self, slug: str, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Save a status page.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str slug: Slug
|
|
|
|
:param int id: Id of the status page to save
|
|
|
|
:param str title: Title
|
|
|
|
:param str, optional description: Description, defaults to None
|
|
|
|
:param str, optional theme: Switch Theme, defaults to "light"
|
|
|
|
:param bool, optional published: Published, defaults to True
|
|
|
|
:param bool, optional showTags: Show Tags, defaults to False
|
|
|
|
:param list, optional domainNameList: Domain Names, defaults to None
|
2023-02-13 22:51:21 +01:00
|
|
|
:param str, optional googleAnalyticsId: Google Analytics ID, defaults to None
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str, optional customCSS: Custom CSS, defaults to ""
|
|
|
|
:param str, optional footerText: Custom Footer, defaults to None
|
|
|
|
:param bool, optional showPoweredBy: Show Powered By, defaults to True
|
|
|
|
:param str, optional icon: Icon, defaults to "/icon.svg"
|
|
|
|
:param list, optional publicGroupList: Public Group List, defaults to None
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> monitor_id = 1
|
|
|
|
>>> api.save_status_page(
|
|
|
|
... slug="slug1",
|
|
|
|
... title="status page 1",
|
|
|
|
... description="description 1",
|
|
|
|
... publicGroupList=[
|
|
|
|
... {
|
|
|
|
... 'name': 'Services',
|
|
|
|
... 'weight': 1,
|
|
|
|
... 'monitorList': [
|
|
|
|
... {
|
|
|
|
... "id": monitor_id
|
|
|
|
... }
|
|
|
|
... ]
|
|
|
|
... }
|
|
|
|
... ]
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'publicGroupList': [
|
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'monitorList': [
|
|
|
|
{
|
|
|
|
'id': 1
|
|
|
|
}
|
|
|
|
],
|
|
|
|
'name': 'Services',
|
|
|
|
'weight': 1
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
"""
|
2022-07-06 21:29:40 +02:00
|
|
|
status_page = self.get_status_page(slug)
|
2022-08-26 14:04:43 +02:00
|
|
|
status_page.pop("incident")
|
2022-12-29 00:22:53 +01:00
|
|
|
if parse_version(self.version) >= parse_version("1.19"):
|
|
|
|
status_page.pop("maintenanceList")
|
2022-07-07 13:29:06 +02:00
|
|
|
status_page.update(kwargs)
|
2022-07-06 21:29:40 +02:00
|
|
|
data = _build_status_page_data(**status_page)
|
2022-10-04 18:38:17 +02:00
|
|
|
r = self._call('saveStatusPage', data)
|
|
|
|
|
|
|
|
# uptime kuma does not send the status page list event when a status page is saved
|
|
|
|
status_page = self._call('getStatusPage', slug)["config"]
|
|
|
|
status_page_id = status_page["id"]
|
|
|
|
if self._event_data[Event.STATUS_PAGE_LIST] is None:
|
|
|
|
self._event_data[Event.STATUS_PAGE_LIST] = {}
|
|
|
|
self._event_data[Event.STATUS_PAGE_LIST][str(status_page_id)] = status_page
|
|
|
|
|
|
|
|
return r
|
2022-07-02 20:40:14 +02:00
|
|
|
|
2022-07-05 22:12:37 +02:00
|
|
|
def post_incident(
|
|
|
|
self,
|
|
|
|
slug: str,
|
|
|
|
title: str,
|
|
|
|
content: str,
|
|
|
|
style: IncidentStyle = IncidentStyle.PRIMARY
|
2022-12-16 21:39:18 +01:00
|
|
|
) -> dict:
|
|
|
|
"""
|
|
|
|
Post an incident to status page.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str slug: Slug
|
|
|
|
:param str title: Title
|
|
|
|
:param str content: Content
|
|
|
|
:param IncidentStyle, optional style: Style, defaults to :attr:`~.IncidentStyle.PRIMARY`
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.post_incident(
|
|
|
|
... slug="slug1",
|
|
|
|
... title="title 1",
|
|
|
|
... content="content 1",
|
|
|
|
... style=IncidentStyle.DANGER
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'content': 'content 1',
|
|
|
|
'createdDate': '2022-12-15 16:51:43',
|
|
|
|
'id': 1,
|
|
|
|
'pin': True,
|
|
|
|
'style': 'danger',
|
|
|
|
'title': 'title 1'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-05 22:12:37 +02:00
|
|
|
incident = {
|
|
|
|
"title": title,
|
|
|
|
"content": content,
|
|
|
|
"style": style
|
|
|
|
}
|
2022-07-07 13:29:06 +02:00
|
|
|
r = self._call('postIncident', (slug, incident))["incident"]
|
2022-07-06 21:29:40 +02:00
|
|
|
self.save_status_page(slug)
|
|
|
|
return r
|
2022-07-05 22:12:37 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def unpin_incident(self, slug: str) -> dict:
|
|
|
|
"""
|
|
|
|
Unpin an incident from a status page.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str slug: Slug
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.unpin_incident(slug="slug1")
|
|
|
|
{}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
r = self._call('unpinIncident', slug)
|
2022-07-06 21:29:40 +02:00
|
|
|
self.save_status_page(slug)
|
|
|
|
return r
|
2022-07-02 20:40:14 +02:00
|
|
|
|
2022-07-02 16:00:54 +02:00
|
|
|
# heartbeat
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_heartbeats(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get heartbeats.
|
|
|
|
|
|
|
|
:return: The heartbeats.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_heartbeats()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'bool': False,
|
|
|
|
'data': [
|
|
|
|
{
|
|
|
|
'down_count': 0,
|
|
|
|
'duration': 0,
|
|
|
|
'id': 1,
|
|
|
|
'important': True,
|
|
|
|
'monitor_id': 1,
|
|
|
|
'msg': 'connect ECONNREFUSED 127.0.0.1:80',
|
|
|
|
'ping': None,
|
|
|
|
'status': False,
|
|
|
|
'time': '2022-12-15 16:51:41.782'
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'down_count': 0,
|
|
|
|
'duration': 60,
|
|
|
|
'id': 2,
|
|
|
|
'important': False,
|
|
|
|
'monitor_id': 1,
|
|
|
|
'msg': 'connect ECONNREFUSED 127.0.0.1:80',
|
|
|
|
'ping': None,
|
|
|
|
'status': False,
|
|
|
|
'time': '2022-12-15 16:52:41.799'
|
|
|
|
},
|
|
|
|
...
|
|
|
|
],
|
|
|
|
'id': '1'
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
r = self._get_event_data(Event.HEARTBEAT_LIST)
|
2022-07-07 13:29:06 +02:00
|
|
|
for i in r:
|
|
|
|
int_to_bool(i["data"], ["important", "status"])
|
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_important_heartbeats(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get important heartbeats.
|
|
|
|
|
|
|
|
:return: The important heartbeats.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_important_heartbeats()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'bool': False,
|
|
|
|
'data': [
|
|
|
|
{
|
|
|
|
'duration': 0,
|
|
|
|
'important': True,
|
|
|
|
'monitorID': 1,
|
|
|
|
'msg': 'connect ECONNREFUSED 127.0.0.1:80',
|
|
|
|
'ping': None,
|
|
|
|
'status': False,
|
|
|
|
'time': '2022-12-15 16:51:41.782'
|
|
|
|
}
|
|
|
|
],
|
|
|
|
'id': '1'
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
r = self._get_event_data(Event.IMPORTANT_HEARTBEAT_LIST)
|
2022-07-07 13:29:06 +02:00
|
|
|
for i in r:
|
|
|
|
int_to_bool(i["data"], ["important", "status"])
|
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_heartbeat(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get heartbeat.
|
|
|
|
|
|
|
|
:return: The heartbeat.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_heartbeat()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'duration': 60,
|
|
|
|
'important': False,
|
|
|
|
'monitorID': 1,
|
|
|
|
'msg': 'connect ECONNREFUSED 127.0.0.1:80',
|
|
|
|
'status': False,
|
|
|
|
'time': '2022-12-15 17:17:42.099'
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
r = self._get_event_data(Event.HEARTBEAT)
|
2022-07-07 13:29:06 +02:00
|
|
|
int_to_bool(r, ["important", "status"])
|
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# avg ping
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def avg_ping(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get average ping.
|
|
|
|
|
|
|
|
:return: The average ping.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.avg_ping()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'id': '1',
|
|
|
|
'data': 67
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
return self._get_event_data(Event.AVG_PING)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-08-05 15:48:02 +02:00
|
|
|
# cert info
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def cert_info(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get certificate info.
|
|
|
|
|
|
|
|
:return: Certificate info.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.cert_info()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'id': '2',
|
|
|
|
'data': '{"valid":true,"certInfo":{"subject":{"C":"US","ST":"California","L":"San Francisco","O":"Cloudflare, Inc.","CN":"cloudflare-dns.com"},"issuer":{"C":"US","O":"DigiCert Inc","CN":"DigiCert TLS Hybrid ECC SHA384 2020 CA1"},"subjectaltname":"DNS:cloudflare-dns.com, DNS:*.cloudflare-dns.com, DNS:one.one.one.one, IP Address:1.0.0.1, IP Address:1.1.1.1, IP Address:162.159.36.1, IP Address:162.159.46.1, IP Address:2606:4700:4700:0:0:0:0:1001, IP Address:2606:4700:4700:0:0:0:0:1111, IP Address:2606:4700:4700:0:0:0:0:64, IP Address:2606:4700:4700:0:0:0:0:6400","infoAccess":{"OCSP - URI":["http://ocsp.digicert.com"],"CA Issuers - URI":["http://cacerts.digicert.com/DigiCertTLSHybridECCSHA3842020CA1-1.crt"]},"bits":256,"pubkey":{"type":"Buffer","data":[4,252,62,81,239,116,29,198,218,120,186,174,165,138,74,221,217,11,230,226,91,87,49,87,222,211,191,182,217,138,59,79,210,84,84,136,207,189,46,101,231,102,235,197,223,208,49,84,82,167,44,238,18,134,163,154,102,193,234,6,121,3,186,27,240]},"asn1Curve":"prime256v1","nistCurve":"P-256","valid_from":"Sep 13 00:00:00 2022 GMT","valid_to":"Sep 13 23:59:59 2023 GMT","fingerprint":"D1:D4:67:E7:BC:0E:AC:CD:C8:87:A6:12:B8:B2:BC:15:C1:69:04:6B","fingerprint256":"66:67:73:19:84:78:03:0C:56:FB:23:76:8E:48:19:C2:B7:5C:32:2C:D3:BE:A4:A8:34:6B:B0:3C:22:8D:4F:18","fingerprint512":"C2:76:3A:C5:AC:64:76:BB:BF:9F:AB:3A:B9:04:55:06:A4:8D:13:67:08:26:10:A0:FE:22:B5:E2:26:E0:67:1F:EC:17:B7:C2:59:18:1E:7B:46:99:7C:54:A4:9E:4B:C6:58:B4:16:B4:88:6F:0C:5B:60:D1:78:AD:E9:CE:28:1C","ext_key_usage":["1.3.6.1.5.5.7.3.1","1.3.6.1.5.5.7.3.2"],"serialNumber":"0D1C7AF28E5F2717DBB27F410820BDF7","raw":{"type":"Buffer","data":[48,130,5,247,48,130,5,125,160,3,2,1,2,2,16,13,28,122,242,142,95,39,23,219,178,127,65,8,32,189,247,48,10,6,8,42,134,72,206,61,4,3,3,48,86,49,11,48,9,6,3,85,4,6,19,2,85,83,49,21,48,19,6,3,85,4,10,19,12,68,105,103,105,67,101,114,116,32,73,110,99,49,48,48,46,6,3,85,4,3,19,39,68,105,103,105,67,101,114,116,32,84,76,83,32,72,121,98,114,105,100,32,69,67,67,32,83,72,65,51,56,52,32,50,48,50,48,32,67,65,49,48,30,23,13,50,50,48,57,49,51,48,48,48,48,48,48,90,23,13,50,51,48,57,49,51,50,51,53,57,53,57,90,48,114,49,11,48,9,6,3,85,4,6,19,2,85,83,49,19,48,17,6,3,85,4,8,19,10,67,97,108,105,102,111,114,110,105,97,49,22,48,20,6,3,85,4,7,19,13,83,97,110,32,70,114,97,110,99,105,115,99,111,49,25,48,23,6,3,85,4,10,19,16,67,108,111,117,100,102,108,97,114,101,44,32,73,110,99,46,49,27,48,25,6,3,85,4,3,19,18,99,108,111,117,100,102,108,97,114,101,45,100,110,115,46,99,111,109,48,89,48,19,6,7,42,134,72,206,61,2,1,6,8,42,134,72,206,61,3,1,7,3,66,0,4,252,62,81,239,116,29,198,218,120,186,174,165,138,74,221,217,11,230,226,91,87,49,87,222,211,191,182,217,138,59,79,210,84,84,136,207,189,46,101,231,102,235,197,223,208,49,84,82,167,44,238,18,134,163,154,102,193,234,6,121,3,186,27,240,163,130,4,15,48,130,4,11,48,31,6,3,85,29,35,4,24,48,22,128,20,10,188,8,41,23,140,165,57,109,122,14,206,51,199,46,179,237,251,195,122,48,29,6,3,85,29,14,4,22,4,20,210,99,186,148,214,84,127,76,133,20,8,58,28,133,86,41,239,89,143,204,48,129,166,6,3,85,29,17,4,129,158,48,129,155,130,18,99,108,111,117,100,102,108,97,114,101,45,100,110,115,46,99,111,109,130,20,42,46,99,108,111,117,100,102,108,97,114,101,45,100,110,115,46,99,111,109,130,15,111,110,101,46,111,110,101,46,111,110,101,46,111,110,101,135,4,1,0,0,1,135,4,1,1,1,1,135,4,162,159,36,1,135,4,162,159,46,1,135,16,38,6,71,0,71,0,0,0,0,0,0,0,0,0,16,1,135,16,38,6,71,0,71,0,0,0,0,0,0,0,0,0,17,17,135,16,38,6,71,0,71,0,0,0,0,0,0,0,0,0,0,100,135,16,38,6,71,0,71,0,0,0,0,0,0,0,0,0,100,0,48,14,6,3,85,29,15,1,1,255,4,4,3,2,7,128,48,29,6,3,85,29,37,4,22,48,20,6,8,43,6,1,5,5,7,3,1,6,8,43,6,1,5,5,7,3,2,48,129,155,6,3,85,29,31,4,129,147,48,129,144,48,70,160,68,160,66,134,64,104,116,116,112,58,47,47,99,114,108,51,46,100,105,103,105,99,101,114,116,46,99,111,109,47,68,105,103,105,67,101,114,116,84,76,83,72,121,98,114,105,100,69,67,67,83,72,65,51,56,52,50,48,50,48,67,65,49,45,49,46,99,114,108,48,70,160,68,160,66,134,64,104,116,116,112,58,47,47,99,114,108,52,46,100,105,103,105,99,101,114,116,46,99,111,109,
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
return self._get_event_data(Event.CERT_INFO)
|
2022-08-05 15:48:02 +02:00
|
|
|
|
2022-07-02 16:00:54 +02:00
|
|
|
# uptime
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def uptime(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get monitor uptime.
|
|
|
|
|
|
|
|
:return: Monitor uptime.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.uptime()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'id': '2',
|
|
|
|
'duration': 24,
|
|
|
|
'uptime': 1
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'id': '2',
|
|
|
|
'duration': 720,
|
|
|
|
'uptime': 1
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
return self._get_event_data(Event.UPTIME)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# info
|
|
|
|
|
2022-09-07 13:03:10 +02:00
|
|
|
def info(self) -> dict:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get server info.
|
|
|
|
|
|
|
|
:return: Server info.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.info()
|
|
|
|
{
|
2022-12-29 00:22:53 +01:00
|
|
|
'version': '1.19.2',
|
|
|
|
'latestVersion': '1.19.2',
|
|
|
|
'primaryBaseURL': None,
|
|
|
|
'serverTimezone': 'Europe/Berlin',
|
|
|
|
'serverTimezoneOffset': '+01:00'
|
2022-12-16 21:39:18 +01:00
|
|
|
}
|
|
|
|
"""
|
2022-08-05 15:52:19 +02:00
|
|
|
r = self._get_event_data(Event.INFO)
|
2022-07-07 13:29:06 +02:00
|
|
|
return r
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# clear
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def clear_events(self, monitor_id: int) -> dict:
|
|
|
|
"""
|
|
|
|
Clear monitor events.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int monitor_id: Id of the monitor to clear events.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.clear_events(1)
|
|
|
|
{}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('clearEvents', monitor_id)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def clear_heartbeats(self, monitor_id: int) -> dict:
|
|
|
|
"""
|
|
|
|
Clear monitor heartbeats.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int monitor_id: Id of the monitor to clear heartbeats.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.clear_heartbeats(1)
|
|
|
|
{}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('clearHeartbeats', monitor_id)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def clear_statistics(self) -> dict:
|
|
|
|
"""
|
|
|
|
Clear statistics.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.clear_statistics()
|
|
|
|
{}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('clearStatistics')
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# tags
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_tags(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get all tags.
|
|
|
|
|
|
|
|
:return: All tags.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_tags()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'color': '#ffffff',
|
|
|
|
'id': 1,
|
|
|
|
'name': 'tag 1'
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('getTags')["tags"]
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_tag(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Get a tag.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the monitor to get.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The tag.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the tag does not exist.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_tag(1)
|
|
|
|
{
|
|
|
|
'color': '#ffffff',
|
|
|
|
'id': 1,
|
|
|
|
'name': 'tag 1'
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2022-07-05 22:12:37 +02:00
|
|
|
tags = self.get_tags()
|
|
|
|
for tag in tags:
|
|
|
|
if tag["id"] == id_:
|
|
|
|
return tag
|
2022-07-07 13:29:06 +02:00
|
|
|
raise UptimeKumaException("tag does not exist")
|
2022-07-05 22:12:37 +02:00
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
@append_docstring(tag_docstring("add"))
|
|
|
|
def add_tag(self, **kwargs) -> dict:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
2023-02-13 22:51:21 +01:00
|
|
|
Add a tag.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
>>> api.add_tag(
|
|
|
|
... name="tag 1",
|
|
|
|
... color="#ffffff"
|
|
|
|
... )
|
2022-12-16 21:39:18 +01:00
|
|
|
{
|
2023-02-13 22:51:21 +01:00
|
|
|
'color': '#ffffff',
|
|
|
|
'id': 1,
|
|
|
|
'name': 'tag 1'
|
2022-12-16 21:39:18 +01:00
|
|
|
}
|
|
|
|
"""
|
2023-02-13 22:51:21 +01:00
|
|
|
data = _build_tag_data(**kwargs)
|
|
|
|
_check_arguments_tag(data)
|
|
|
|
return self._call('addTag', data)["tag"]
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
@append_docstring(tag_docstring("edit"))
|
|
|
|
def edit_tag(self, id_: int, **kwargs) -> dict:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
2023-02-13 22:51:21 +01:00
|
|
|
Edits an existing tag.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
:param int id_: Id of the tag to edit.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
>>> api.edit_tag(1,
|
|
|
|
... name="tag 1 new",
|
|
|
|
... color="#000000"
|
2022-12-16 21:39:18 +01:00
|
|
|
... )
|
|
|
|
{
|
2023-02-13 22:51:21 +01:00
|
|
|
'msg': 'Saved',
|
|
|
|
'tag': {
|
|
|
|
'id': 1,
|
|
|
|
'name': 'tag 1 new',
|
|
|
|
'color': '#000000'
|
|
|
|
}
|
2022-12-16 21:39:18 +01:00
|
|
|
}
|
|
|
|
"""
|
2023-02-13 22:51:21 +01:00
|
|
|
data = self.get_tag(id_)
|
|
|
|
data.update(kwargs)
|
|
|
|
_check_arguments_tag(data)
|
|
|
|
return self._call('editTag', data)
|
|
|
|
|
|
|
|
def delete_tag(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Delete a tag.
|
|
|
|
|
|
|
|
:param int id_: Id of the monitor to delete.
|
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_tag(1)
|
|
|
|
{
|
|
|
|
'msg': 'Deleted Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return self._call('deleteTag', id_)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# settings
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_settings(self) -> dict:
|
|
|
|
"""
|
|
|
|
Get settings.
|
|
|
|
|
|
|
|
:return: Settings.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_settings()
|
|
|
|
{
|
|
|
|
'checkBeta': False,
|
2022-12-29 00:22:53 +01:00
|
|
|
'checkUpdate': False,
|
|
|
|
'disableAuth': False,
|
|
|
|
'dnsCache': True,
|
2022-12-16 21:39:18 +01:00
|
|
|
'entryPage': 'dashboard',
|
2022-12-29 00:22:53 +01:00
|
|
|
'keepDataPeriodDays': 180,
|
2022-12-16 21:39:18 +01:00
|
|
|
'primaryBaseURL': '',
|
2022-12-29 00:22:53 +01:00
|
|
|
'searchEngineIndex': False,
|
|
|
|
'serverTimezone': 'Europe/Berlin',
|
2022-12-16 21:39:18 +01:00
|
|
|
'steamAPIKey': '',
|
|
|
|
'tlsExpiryNotifyDays': [
|
|
|
|
7,
|
|
|
|
14,
|
|
|
|
21
|
|
|
|
],
|
|
|
|
'trustProxy': False
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
r = self._call('getSettings')["data"]
|
|
|
|
return r
|
|
|
|
|
|
|
|
def set_settings(
|
|
|
|
self,
|
2022-09-23 17:19:00 +02:00
|
|
|
password: str = None, # only required if disableAuth is true
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-07-07 13:29:06 +02:00
|
|
|
# about
|
2022-08-03 11:56:02 +02:00
|
|
|
checkUpdate: bool = True,
|
|
|
|
checkBeta: bool = False,
|
2022-07-07 13:29:06 +02:00
|
|
|
|
|
|
|
# monitor history
|
2022-08-03 11:56:02 +02:00
|
|
|
keepDataPeriodDays: int = 180,
|
2022-07-07 13:29:06 +02:00
|
|
|
|
|
|
|
# general
|
2022-12-29 00:22:53 +01:00
|
|
|
serverTimezone: str = "",
|
2022-08-03 11:56:02 +02:00
|
|
|
entryPage: str = "dashboard",
|
|
|
|
searchEngineIndex: bool = False,
|
|
|
|
primaryBaseURL: str = "",
|
|
|
|
steamAPIKey: str = "",
|
2022-12-29 00:22:53 +01:00
|
|
|
dnsCache: bool = False,
|
2022-07-07 13:29:06 +02:00
|
|
|
|
|
|
|
# notifications
|
2022-08-03 11:56:02 +02:00
|
|
|
tlsExpiryNotifyDays: list = None,
|
2022-07-07 13:29:06 +02:00
|
|
|
|
|
|
|
# security
|
2022-09-07 13:03:10 +02:00
|
|
|
disableAuth: bool = False,
|
|
|
|
|
|
|
|
# reverse proxy
|
|
|
|
trustProxy: bool = False
|
2022-12-16 21:39:18 +01:00
|
|
|
) -> dict:
|
|
|
|
"""
|
|
|
|
Set settings.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str, optional password: Password, defaults to None
|
|
|
|
:param bool, optional checkUpdate: Show update if available, defaults to True
|
|
|
|
:param bool, optional checkBeta: Also check beta release, defaults to False
|
2023-01-17 21:01:55 +01:00
|
|
|
:param int, optional keepDataPeriodDays: Keep monitor history data for X days. Set to 0 for infinite retention., defaults to 180
|
2022-12-29 00:22:53 +01:00
|
|
|
:param str, optional serverTimezone: Server Timezone, defaults to ""
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str, optional entryPage: Entry Page, defaults to "dashboard"
|
|
|
|
:param bool, optional searchEngineIndex: Search Engine Visibility, defaults to False
|
|
|
|
:param str, optional primaryBaseURL: Primary Base URL, defaults to ""
|
|
|
|
:param str, optional steamAPIKey: Steam API Key. For monitoring a Steam Game Server you need a Steam Web-API key., defaults to ""
|
2022-12-29 00:22:53 +01:00
|
|
|
:param bool, optional dnsCache: True to enable DNS Cache. It may be not working in some IPv6 environments, disable it if you encounter any issues., defaults to False
|
2022-12-17 15:09:27 +01:00
|
|
|
:param list, optional tlsExpiryNotifyDays: TLS Certificate Expiry. HTTPS Monitors trigger notification when TLS certificate expires in., defaults to None
|
|
|
|
:param bool, optional disableAuth: Disable Authentication, defaults to False
|
|
|
|
:param bool, optional trustProxy: Trust Proxy. Trust 'X-Forwarded-\*' headers. If you want to get the correct client IP and your Uptime Kuma is behind such as Nginx or Apache, you should enable this., defaults to False
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.set_settings(
|
|
|
|
... checkUpdate=False,
|
|
|
|
... checkBeta=False,
|
|
|
|
... keepDataPeriodDays=180,
|
2022-12-29 00:22:53 +01:00
|
|
|
... serverTimezone="Europe/Berlin",
|
2022-12-16 21:39:18 +01:00
|
|
|
... entryPage="dashboard",
|
|
|
|
... searchEngineIndex=False,
|
|
|
|
... primaryBaseURL="",
|
|
|
|
... steamAPIKey="",
|
2022-12-29 00:22:53 +01:00
|
|
|
... dnsCache=False,
|
|
|
|
... tlsExpiryNotifyDays=[
|
|
|
|
... 7,
|
|
|
|
... 14,
|
|
|
|
... 21
|
|
|
|
... ],
|
2022-12-16 21:39:18 +01:00
|
|
|
... disableAuth=False,
|
|
|
|
... trustProxy=False
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Saved'
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2022-08-03 11:56:02 +02:00
|
|
|
if not tlsExpiryNotifyDays:
|
|
|
|
tlsExpiryNotifyDays = [7, 14, 21]
|
2022-07-07 13:29:06 +02:00
|
|
|
|
|
|
|
data = {
|
2022-08-03 11:56:02 +02:00
|
|
|
"checkUpdate": checkUpdate,
|
|
|
|
"checkBeta": checkBeta,
|
|
|
|
"keepDataPeriodDays": keepDataPeriodDays,
|
2022-12-29 00:22:53 +01:00
|
|
|
"serverTimezone": serverTimezone,
|
2022-08-03 11:56:02 +02:00
|
|
|
"entryPage": entryPage,
|
|
|
|
"searchEngineIndex": searchEngineIndex,
|
|
|
|
"primaryBaseURL": primaryBaseURL,
|
|
|
|
"steamAPIKey": steamAPIKey,
|
2022-12-29 00:22:53 +01:00
|
|
|
"dnsCache": dnsCache,
|
2022-08-03 11:56:02 +02:00
|
|
|
"tlsExpiryNotifyDays": tlsExpiryNotifyDays,
|
|
|
|
"disableAuth": disableAuth
|
2022-07-07 13:29:06 +02:00
|
|
|
}
|
2022-09-07 13:03:10 +02:00
|
|
|
if parse_version(self.version) >= parse_version("1.18"):
|
|
|
|
data.update({
|
|
|
|
"trustProxy": trustProxy
|
|
|
|
})
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('setSettings', (data, password))
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def change_password(self, old_password: str, new_password: str) -> dict:
|
|
|
|
"""
|
|
|
|
Change password.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str old_password: Old password
|
|
|
|
:param str new_password: New password
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.change_password(
|
|
|
|
... old_password="secret123",
|
|
|
|
... new_password="321terces"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Password has been updated successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('changePassword', {
|
2022-07-02 16:00:54 +02:00
|
|
|
"currentPassword": old_password,
|
|
|
|
"newPassword": new_password,
|
|
|
|
})
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def upload_backup(self, json_data: str, import_handle: str = "skip") -> dict:
|
|
|
|
"""
|
|
|
|
Import Backup.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str json_data: Backup data as json string.
|
|
|
|
:param str, optional import_handle: Choose "skip" if you want to skip every monitor or notification with the same name. "overwrite" will delete every existing monitor and notification. "keep" will keep both., defaults to "skip"
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> json_data = json.dumps({
|
|
|
|
... "version": "1.17.1",
|
|
|
|
... "notificationList": [],
|
|
|
|
... "monitorList": [],
|
|
|
|
... "proxyList": []
|
|
|
|
... })
|
|
|
|
>>> api.upload_backup(
|
|
|
|
... json_data=json_data,
|
|
|
|
... import_handle="overwrite"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Backup successfully restored.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-02 16:00:54 +02:00
|
|
|
if import_handle not in ["overwrite", "skip", "keep"]:
|
|
|
|
raise ValueError()
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('uploadBackup', (json_data, import_handle))
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# 2FA
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def twofa_status(self) -> dict:
|
|
|
|
"""
|
|
|
|
Get current 2FA status.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.twofa_status()
|
|
|
|
{
|
|
|
|
'status': False
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('twoFAStatus')
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def prepare_2fa(self, password: str) -> dict:
|
|
|
|
"""
|
|
|
|
Prepare 2FA configuration.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str password: Current password.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> password = "secret123"
|
|
|
|
>>> r = api.prepare_2fa(password)
|
|
|
|
>>> r
|
|
|
|
{
|
|
|
|
'uri': 'otpauth://totp/Uptime%20Kuma:admin?secret=NBGVQNSNNRXWQ3LJJN4DIWSWIIYW45CZJRXXORSNOY3USSKXO5RG4MDPI5ZUK5CWJFIFOVCBGZVG24TSJ5LDE2BTMRLXOZBSJF3TISA'
|
|
|
|
}
|
|
|
|
>>> uri = r["uri"]
|
|
|
|
>>>
|
|
|
|
>>> from urllib import parse
|
|
|
|
>>> def parse_secret(uri):
|
|
|
|
... query = parse.urlsplit(uri).query
|
|
|
|
... params = dict(parse.parse_qsl(query))
|
|
|
|
... return params["secret"]
|
|
|
|
>>> secret = parse_secret(uri)
|
|
|
|
>>> secret
|
|
|
|
NBGVQNSNNRXWQ3LJJN4DIWSWIIYW45CZJRXXORSNOY3USSKXO5RG4MDPI5ZUK5CWJFIFOVCBGZVG24TSJ5LDE2BTMRLXOZBSJF3TISA
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('prepare2FA', password)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def verify_token(self, token: str, password: str) -> dict:
|
|
|
|
"""
|
|
|
|
Verify the provided 2FA token.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str token: 2FA token.
|
|
|
|
:param str password: Current password.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> import pyotp
|
|
|
|
>>> def generate_token(secret):
|
|
|
|
... totp = pyotp.TOTP(secret)
|
|
|
|
... return totp.now()
|
|
|
|
>>> token = generate_token(secret)
|
|
|
|
>>> token
|
|
|
|
526564
|
|
|
|
>>> api.verify_token(token, password)
|
|
|
|
{
|
|
|
|
'valid': True
|
|
|
|
}
|
|
|
|
"""
|
2022-08-05 14:33:28 +02:00
|
|
|
return self._call('verifyToken', (token, password))
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def save_2fa(self, password: str) -> dict:
|
|
|
|
"""
|
|
|
|
Save the current 2FA configuration.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str password: Current password.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.save_2fa(password)
|
|
|
|
{
|
|
|
|
'msg': '2FA Enabled.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('save2FA', password)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def disable_2fa(self, password: str) -> dict:
|
|
|
|
"""
|
|
|
|
Disable 2FA for this user.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str password: Current password.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.disable_2fa(password)
|
|
|
|
{
|
|
|
|
'msg': '2FA Disabled.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('disable2FA', password)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# login
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def login(self, username: str = None, password: str = None, token: str = "") -> dict:
|
|
|
|
"""
|
|
|
|
Login.
|
|
|
|
|
|
|
|
If username and password is not provided, auto login is performed if disableAuth is enabled.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str, optional username: Username. Must be None if disableAuth is enabled., defaults to None
|
|
|
|
:param str, optional password: Password. Must be None if disableAuth is enabled., defaults to None
|
|
|
|
:param str, optional token: 2FA Token. Required if 2FA is enabled., defaults to ""
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> username = "admin"
|
|
|
|
>>> password = "secret123"
|
|
|
|
>>> api.login(username, password)
|
|
|
|
{
|
|
|
|
'token': 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImFkbWluIiwiaWF0IjoxNjcxMTk3MjkzfQ.lpho_LuKMnoltXOdA7-jZ98gXOU-UbEIuxMwMRm4Nz0'
|
|
|
|
}
|
|
|
|
"""
|
2022-09-23 18:24:00 +02:00
|
|
|
# autologin
|
|
|
|
if username is None and password is None:
|
2022-12-16 21:39:18 +01:00
|
|
|
with self.wait_for_event(Event.AUTO_LOGIN):
|
|
|
|
return {}
|
2022-09-23 18:24:00 +02:00
|
|
|
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('login', {
|
2022-07-02 16:00:54 +02:00
|
|
|
"username": username,
|
|
|
|
"password": password,
|
2022-08-26 17:02:55 +02:00
|
|
|
"token": token
|
2022-07-02 16:00:54 +02:00
|
|
|
})
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def login_by_token(self, token: str) -> dict:
|
|
|
|
"""
|
|
|
|
Login by token.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str token: Login token generated by :meth:`~login`
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.login_by_token(token)
|
|
|
|
{}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('loginByToken', token)
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def logout(self) -> None:
|
|
|
|
"""
|
|
|
|
Logout.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: None
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.logout()
|
|
|
|
None
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('logout')
|
2022-07-02 16:00:54 +02:00
|
|
|
|
|
|
|
# setup
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def need_setup(self) -> bool:
|
|
|
|
"""
|
|
|
|
Check if the server has already been set up.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: bool
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.need_setup()
|
|
|
|
True
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('needSetup')
|
2022-07-02 16:00:54 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def setup(self, username: str, password: str) -> dict:
|
|
|
|
"""
|
|
|
|
Set up the server.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param str username: Username
|
|
|
|
:param str password: Password
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.setup(username, password)
|
|
|
|
{
|
|
|
|
'msg': 'Added Successfully.'
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call("setup", (username, password))
|
2022-07-02 20:40:14 +02:00
|
|
|
|
|
|
|
# database
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_database_size(self) -> dict:
|
|
|
|
"""
|
|
|
|
Get database size.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_database_size()
|
|
|
|
{
|
|
|
|
'size': 61440
|
|
|
|
}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('getDatabaseSize')
|
2022-07-02 20:40:14 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def shrink_database(self) -> dict:
|
|
|
|
"""
|
|
|
|
Shrink database.
|
|
|
|
|
|
|
|
Trigger database VACUUM for SQLite. If your database is created after 1.10.0, AUTO_VACUUM is already enabled and this action is not needed.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.shrink_database()
|
|
|
|
{}
|
|
|
|
"""
|
2022-07-07 13:29:06 +02:00
|
|
|
return self._call('shrinkDatabase')
|
2022-09-07 13:03:10 +02:00
|
|
|
|
|
|
|
# docker host
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_docker_hosts(self) -> list[dict]:
|
2022-12-16 21:39:18 +01:00
|
|
|
"""
|
|
|
|
Get all docker hosts.
|
|
|
|
|
|
|
|
:return: All docker hosts.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: list
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_docker_hosts()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
'dockerDaemon': '/var/run/docker.sock',
|
|
|
|
'dockerType': 'socket',
|
|
|
|
'id': 1,
|
|
|
|
'name': 'name 1',
|
|
|
|
'userID': 1
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
2022-09-07 13:03:10 +02:00
|
|
|
r = self._get_event_data(Event.DOCKER_HOST_LIST)
|
|
|
|
return r
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def get_docker_host(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Get a docker host.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the docker host to get.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The docker host.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the docker host does not exist.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_docker_host(1)
|
|
|
|
{
|
|
|
|
'dockerDaemon': '/var/run/docker.sock',
|
|
|
|
'dockerType': 'socket',
|
|
|
|
'id': 1,
|
|
|
|
'name': 'name 1',
|
|
|
|
'userID': 1
|
|
|
|
}
|
|
|
|
"""
|
2022-09-07 13:03:10 +02:00
|
|
|
docker_hosts = self.get_docker_hosts()
|
|
|
|
for docker_host in docker_hosts:
|
|
|
|
if docker_host["id"] == id_:
|
|
|
|
return docker_host
|
|
|
|
raise UptimeKumaException("docker host does not exist")
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(docker_host_docstring("test"))
|
|
|
|
def test_docker_host(self, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Test a docker host.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.test_docker_host(
|
|
|
|
... name="name 1",
|
|
|
|
... dockerType=DockerType.SOCKET,
|
|
|
|
... dockerDaemon="/var/run/docker.sock"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'msg': 'Connected Successfully. Amount of containers: 10'
|
|
|
|
}
|
|
|
|
"""
|
2022-09-07 13:03:10 +02:00
|
|
|
data = _build_docker_host_data(**kwargs)
|
|
|
|
return self._call('testDockerHost', data)
|
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(docker_host_docstring("add"))
|
|
|
|
def add_docker_host(self, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Add a docker host.
|
|
|
|
|
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.add_docker_host(
|
|
|
|
... name="name 1",
|
|
|
|
... dockerType=DockerType.SOCKET,
|
|
|
|
... dockerDaemon="/var/run/docker.sock"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'msg': 'Saved'
|
|
|
|
}
|
|
|
|
"""
|
2022-09-07 13:03:10 +02:00
|
|
|
data = _build_docker_host_data(**kwargs)
|
2022-09-12 22:45:43 +02:00
|
|
|
_convert_docker_host_input(data)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.DOCKER_HOST_LIST):
|
|
|
|
return self._call('addDockerHost', (data, None))
|
2022-09-07 13:03:10 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
@append_docstring(docker_host_docstring("edit"))
|
|
|
|
def edit_docker_host(self, id_: int, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Edit a docker host.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the docker host to edit.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
>>> api.edit_docker_host(1,
|
|
|
|
... name="name 2"
|
|
|
|
... )
|
2022-12-16 21:39:18 +01:00
|
|
|
{
|
|
|
|
'id': 1,
|
|
|
|
'msg': 'Saved'
|
|
|
|
}
|
|
|
|
"""
|
2022-09-07 13:03:10 +02:00
|
|
|
data = self.get_docker_host(id_)
|
|
|
|
data.update(kwargs)
|
2022-09-12 22:45:43 +02:00
|
|
|
_convert_docker_host_input(data)
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.DOCKER_HOST_LIST):
|
|
|
|
return self._call('addDockerHost', (data, id_))
|
2022-09-07 13:03:10 +02:00
|
|
|
|
2022-12-16 21:39:18 +01:00
|
|
|
def delete_docker_host(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Delete a docker host.
|
|
|
|
|
2022-12-17 15:09:27 +01:00
|
|
|
:param int id_: Id of the docker host to delete.
|
2022-12-16 21:39:18 +01:00
|
|
|
:return: The server response.
|
2022-12-17 15:30:29 +01:00
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
2022-12-16 21:39:18 +01:00
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_docker_host(1)
|
|
|
|
{
|
|
|
|
'msg': 'Deleted'
|
|
|
|
}
|
|
|
|
"""
|
2022-10-04 18:38:17 +02:00
|
|
|
with self.wait_for_event(Event.DOCKER_HOST_LIST):
|
|
|
|
return self._call('deleteDockerHost', id_)
|
2022-12-29 00:22:53 +01:00
|
|
|
|
2023-03-20 15:14:39 +01:00
|
|
|
# maintenance
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_maintenances(self) -> list[dict]:
|
2022-12-29 00:22:53 +01:00
|
|
|
"""
|
|
|
|
Get all maintenances.
|
|
|
|
|
|
|
|
:return: All maintenances.
|
2023-03-20 15:14:39 +01:00
|
|
|
:rtype: list
|
2022-12-29 00:22:53 +01:00
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_maintenances()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"id": 1,
|
|
|
|
"title": "title",
|
|
|
|
"description": "description",
|
|
|
|
"strategy": "single",
|
|
|
|
"intervalDay": 1,
|
|
|
|
"active": true,
|
|
|
|
"dateRange": [
|
|
|
|
"2022-12-27 15:39:00",
|
|
|
|
"2022-12-30 15:39:00"
|
|
|
|
],
|
|
|
|
"timeRange": [
|
|
|
|
{
|
2023-04-07 21:03:33 +02:00
|
|
|
"hours": 0,
|
|
|
|
"minutes": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
},
|
|
|
|
{
|
2023-04-07 21:03:33 +02:00
|
|
|
"hours": 0,
|
|
|
|
"minutes": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
}
|
|
|
|
],
|
|
|
|
"weekdays": [],
|
|
|
|
"daysOfMonth": [],
|
|
|
|
"timeslotList": [
|
|
|
|
{
|
2023-04-07 21:03:33 +02:00
|
|
|
"startDate": "2022-12-27 22:36:00",
|
|
|
|
"endDate": "2022-12-29 22:36:00"
|
2022-12-29 00:22:53 +01:00
|
|
|
}
|
|
|
|
],
|
2023-04-07 21:03:33 +02:00
|
|
|
"cron": "",
|
|
|
|
"durationMinutes": null,
|
|
|
|
"timezone": "Europe/Berlin",
|
|
|
|
"timezoneOffset": "+02:00",
|
|
|
|
"status": "ended"
|
2022-12-29 00:22:53 +01:00
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
|
|
|
return list(self._get_event_data(Event.MAINTENANCE_LIST).values())
|
|
|
|
|
|
|
|
def get_maintenance(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Get a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to get.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: The maintenance.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_maintenance(1)
|
|
|
|
{
|
|
|
|
"id": 1,
|
|
|
|
"title": "title",
|
|
|
|
"description": "description",
|
|
|
|
"strategy": "single",
|
|
|
|
"intervalDay": 1,
|
|
|
|
"active": true,
|
|
|
|
"dateRange": [
|
|
|
|
"2022-12-27 15:39:00",
|
|
|
|
"2022-12-30 15:39:00"
|
|
|
|
],
|
|
|
|
"timeRange": [
|
|
|
|
{
|
2023-04-07 21:03:33 +02:00
|
|
|
"hours": 0,
|
|
|
|
"minutes": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
},
|
|
|
|
{
|
2023-04-07 21:03:33 +02:00
|
|
|
"hours": 0,
|
|
|
|
"minutes": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
}
|
|
|
|
],
|
|
|
|
"weekdays": [],
|
|
|
|
"daysOfMonth": [],
|
|
|
|
"timeslotList": [
|
|
|
|
{
|
2023-04-07 21:03:33 +02:00
|
|
|
"startDate": "2022-12-27 22:36:00",
|
|
|
|
"endDate": "2022-12-29 22:36:00"
|
2022-12-29 00:22:53 +01:00
|
|
|
}
|
|
|
|
],
|
2023-04-07 21:03:33 +02:00
|
|
|
"cron": null,
|
|
|
|
"duration": null,
|
|
|
|
"durationMinutes": 0,
|
|
|
|
"timezone": "Europe/Berlin",
|
|
|
|
"timezoneOffset": "+02:00",
|
|
|
|
"status": "ended"
|
2022-12-29 00:22:53 +01:00
|
|
|
}
|
|
|
|
"""
|
|
|
|
return self._call('getMaintenance', id_)["maintenance"]
|
|
|
|
|
|
|
|
@append_docstring(maintenance_docstring("add"))
|
|
|
|
def add_maintenance(self, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Adds a maintenance.
|
|
|
|
|
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example (strategy: :attr:`~.MaintenanceStrategy.MANUAL`)::
|
|
|
|
|
|
|
|
>>> api.add_maintenance(
|
|
|
|
... title="test",
|
|
|
|
... description="test",
|
|
|
|
... strategy=MaintenanceStrategy.MANUAL,
|
|
|
|
... active=True,
|
|
|
|
... intervalDay=1,
|
|
|
|
... dateRange=[
|
|
|
|
... "2022-12-27 00:00:00"
|
|
|
|
... ],
|
|
|
|
... weekdays=[],
|
|
|
|
... daysOfMonth=[]
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"maintenanceID": 1
|
|
|
|
}
|
|
|
|
|
|
|
|
Example (strategy: :attr:`~.MaintenanceStrategy.SINGLE`)::
|
|
|
|
|
|
|
|
>>> api.add_maintenance(
|
|
|
|
... title="test",
|
|
|
|
... description="test",
|
|
|
|
... strategy=MaintenanceStrategy.SINGLE,
|
|
|
|
... active=True,
|
|
|
|
... intervalDay=1,
|
|
|
|
... dateRange=[
|
|
|
|
... "2022-12-27 22:36:00",
|
|
|
|
... "2022-12-29 22:36:00"
|
|
|
|
... ],
|
|
|
|
... weekdays=[],
|
2023-04-07 21:03:33 +02:00
|
|
|
... daysOfMonth=[],
|
|
|
|
... timezone="Europe/Berlin"
|
2022-12-29 00:22:53 +01:00
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"maintenanceID": 1
|
|
|
|
}
|
|
|
|
|
|
|
|
Example (strategy: :attr:`~.MaintenanceStrategy.RECURRING_INTERVAL`)::
|
|
|
|
|
|
|
|
>>> api.add_maintenance(
|
|
|
|
... title="test",
|
|
|
|
... description="test",
|
|
|
|
... strategy=MaintenanceStrategy.RECURRING_INTERVAL,
|
|
|
|
... active=True,
|
|
|
|
... intervalDay=1,
|
|
|
|
... dateRange=[
|
|
|
|
... "2022-12-27 22:37:00",
|
|
|
|
... "2022-12-31 22:37:00"
|
|
|
|
... ],
|
|
|
|
... timeRange=[
|
|
|
|
... {
|
|
|
|
... "hours": 2,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... },
|
|
|
|
... {
|
|
|
|
... "hours": 3,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... }
|
|
|
|
... ],
|
|
|
|
... weekdays=[],
|
2023-04-07 21:03:33 +02:00
|
|
|
... daysOfMonth=[],
|
|
|
|
... timezone="Europe/Berlin"
|
2022-12-29 00:22:53 +01:00
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"maintenanceID": 1
|
|
|
|
}
|
|
|
|
|
|
|
|
Example (strategy: :attr:`~.MaintenanceStrategy.RECURRING_WEEKDAY`)::
|
|
|
|
|
|
|
|
>>> api.add_maintenance(
|
|
|
|
... title="test",
|
|
|
|
... description="test",
|
|
|
|
... strategy=MaintenanceStrategy.RECURRING_WEEKDAY,
|
|
|
|
... active=True,
|
|
|
|
... intervalDay=1,
|
|
|
|
... dateRange=[
|
|
|
|
... "2022-12-27 22:38:00",
|
|
|
|
... "2022-12-31 22:38:00"
|
|
|
|
... ],
|
|
|
|
... timeRange=[
|
|
|
|
... {
|
|
|
|
... "hours": 2,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... },
|
|
|
|
... {
|
|
|
|
... "hours": 3,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... }
|
|
|
|
... ],
|
|
|
|
... weekdays=[
|
|
|
|
... 1,
|
|
|
|
... 3,
|
|
|
|
... 5,
|
|
|
|
... 0
|
|
|
|
... ],
|
2023-04-07 21:03:33 +02:00
|
|
|
... daysOfMonth=[],
|
|
|
|
... timezone="Europe/Berlin"
|
2022-12-29 00:22:53 +01:00
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"maintenanceID": 1
|
|
|
|
}
|
|
|
|
|
|
|
|
Example (strategy: :attr:`~.MaintenanceStrategy.RECURRING_DAY_OF_MONTH`)::
|
|
|
|
|
|
|
|
>>> api.add_maintenance(
|
|
|
|
... title="test",
|
|
|
|
... description="test",
|
|
|
|
... strategy=MaintenanceStrategy.RECURRING_DAY_OF_MONTH,
|
|
|
|
... active=True,
|
|
|
|
... intervalDay=1,
|
|
|
|
... dateRange=[
|
|
|
|
... "2022-12-27 22:39:00",
|
|
|
|
... "2022-12-31 22:39:00"
|
|
|
|
... ],
|
|
|
|
... timeRange=[
|
|
|
|
... {
|
|
|
|
... "hours": 2,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... },
|
|
|
|
... {
|
|
|
|
... "hours": 3,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... }
|
|
|
|
... ],
|
|
|
|
... weekdays=[],
|
|
|
|
... daysOfMonth=[
|
|
|
|
... 1,
|
|
|
|
... 10,
|
|
|
|
... 20,
|
|
|
|
... 30,
|
2023-04-07 21:03:33 +02:00
|
|
|
... "lastDay1"
|
|
|
|
... ],
|
|
|
|
... timezone="Europe/Berlin"
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"maintenanceID": 1
|
|
|
|
}
|
|
|
|
|
|
|
|
Example (strategy: :attr:`~.MaintenanceStrategy.CRON`)::
|
|
|
|
|
|
|
|
>>> api.add_maintenance(
|
|
|
|
... title="test",
|
|
|
|
... description="test",
|
|
|
|
... strategy=MaintenanceStrategy.CRON,
|
|
|
|
... active=True,
|
|
|
|
... intervalDay=1,
|
|
|
|
... dateRange=[
|
|
|
|
... "2022-12-27 22:39:00",
|
|
|
|
... "2022-12-31 22:39:00"
|
|
|
|
... ],
|
|
|
|
... weekdays=[],
|
|
|
|
... daysOfMonth=[],
|
|
|
|
... cron="50 5 * * *",
|
|
|
|
... durationMinutes=120,
|
|
|
|
... timezone="Europe/Berlin"
|
2022-12-29 00:22:53 +01:00
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"maintenanceID": 1
|
|
|
|
}
|
|
|
|
"""
|
2023-04-07 21:03:33 +02:00
|
|
|
data = self._build_maintenance_data(**kwargs)
|
2022-12-29 00:22:53 +01:00
|
|
|
_check_arguments_maintenance(data)
|
|
|
|
return self._call('addMaintenance', data)
|
|
|
|
|
|
|
|
@append_docstring(maintenance_docstring("edit"))
|
|
|
|
def edit_maintenance(self, id_: int, **kwargs) -> dict:
|
|
|
|
"""
|
|
|
|
Edits a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to edit.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
2023-02-13 22:51:21 +01:00
|
|
|
>>> api.edit_maintenance(1,
|
2022-12-29 00:22:53 +01:00
|
|
|
... title="test",
|
|
|
|
... description="test",
|
|
|
|
... strategy=MaintenanceStrategy.RECURRING_INTERVAL,
|
|
|
|
... active=True,
|
|
|
|
... intervalDay=1,
|
|
|
|
... dateRange=[
|
|
|
|
... "2022-12-27 22:37:00",
|
|
|
|
... "2022-12-31 22:37:00"
|
|
|
|
... ],
|
|
|
|
... timeRange=[
|
|
|
|
... {
|
|
|
|
... "hours": 2,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... },
|
|
|
|
... {
|
|
|
|
... "hours": 3,
|
2022-12-30 21:37:10 +01:00
|
|
|
... "minutes": 0,
|
|
|
|
... "seconds": 0
|
2022-12-29 00:22:53 +01:00
|
|
|
... }
|
|
|
|
... ],
|
|
|
|
... weekdays=[],
|
|
|
|
... daysOfMonth=[]
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Saved.",
|
|
|
|
"maintenanceID": 1
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
maintenance = self.get_maintenance(id_)
|
|
|
|
maintenance.update(kwargs)
|
|
|
|
_check_arguments_maintenance(maintenance)
|
|
|
|
return self._call('editMaintenance', maintenance)
|
|
|
|
|
|
|
|
def delete_maintenance(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Deletes a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to delete.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_maintenance(1)
|
|
|
|
{
|
|
|
|
"msg": "Deleted Successfully."
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return self._call('deleteMaintenance', id_)
|
|
|
|
|
|
|
|
def pause_maintenance(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Pauses a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to pause.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.pause_maintenance(1)
|
|
|
|
{
|
|
|
|
"msg": "Paused Successfully."
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return self._call('pauseMaintenance', id_)
|
|
|
|
|
|
|
|
def resume_maintenance(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Resumes a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to resume.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.resume_maintenance(1)
|
|
|
|
{
|
|
|
|
"msg": "Resume Successfully"
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return self._call('resumeMaintenance', id_)
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_monitor_maintenance(self, id_: int) -> list[dict]:
|
2022-12-29 00:22:53 +01:00
|
|
|
"""
|
|
|
|
Gets all monitors of a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to get the monitors from.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: All monitors of the maintenance.
|
2023-03-20 15:14:39 +01:00
|
|
|
:rtype: list
|
2022-12-29 00:22:53 +01:00
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_monitor_maintenance(1)
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"id": 1,
|
|
|
|
"name": "monitor 1"
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"id": 2,
|
|
|
|
"name": "monitor 2"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
|
|
|
return self._call('getMonitorMaintenance', id_)["monitors"]
|
|
|
|
|
|
|
|
def add_monitor_maintenance(
|
|
|
|
self,
|
|
|
|
id_: int,
|
2022-12-29 01:09:44 +01:00
|
|
|
monitors: list,
|
2022-12-29 00:22:53 +01:00
|
|
|
) -> dict:
|
|
|
|
"""
|
|
|
|
Adds monitors to a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to add the monitors to.
|
|
|
|
:param list monitors: The list of monitors to add to the maintenance.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> monitors = [
|
|
|
|
... {
|
|
|
|
... "id": 1,
|
|
|
|
... "name": "monitor 1"
|
|
|
|
... },
|
|
|
|
... {
|
|
|
|
... "id": 2,
|
|
|
|
... "name": "monitor 2"
|
|
|
|
... }
|
|
|
|
... ]
|
|
|
|
>>> api.add_monitor_maintenance(1, monitors)
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully."
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return self._call('addMonitorMaintenance', (id_, monitors))
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_status_page_maintenance(self, id_: int) -> list[dict]:
|
2022-12-29 00:22:53 +01:00
|
|
|
"""
|
|
|
|
Gets all status pages of a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to get the status pages from.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: All status pages of the maintenance.
|
2023-03-20 15:14:39 +01:00
|
|
|
:rtype: list
|
2022-12-29 00:22:53 +01:00
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_status_page_maintenance(1)
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"id": 1,
|
|
|
|
"title": "test"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
|
|
|
return self._call('getMaintenanceStatusPage', id_)["statusPages"]
|
|
|
|
|
|
|
|
def add_status_page_maintenance(
|
|
|
|
self,
|
|
|
|
id_: int,
|
|
|
|
status_pages: list,
|
|
|
|
) -> dict:
|
|
|
|
"""
|
|
|
|
Adds status pages to a maintenance.
|
|
|
|
|
2022-12-29 01:09:44 +01:00
|
|
|
:param int id_: Id of the maintenance to add the monitors to.
|
|
|
|
:param list status_pages: The list of status pages to add to the maintenance.
|
2022-12-29 00:22:53 +01:00
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> status_pages = [
|
|
|
|
... {
|
|
|
|
... "id": 1,
|
|
|
|
... "name": "status page 1"
|
|
|
|
... },
|
|
|
|
... {
|
|
|
|
... "id": 2,
|
|
|
|
... "name": "status page 2"
|
|
|
|
... }
|
|
|
|
... ]
|
|
|
|
>>> api.add_status_page_maintenance(1, status_pages)
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully."
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
return self._call('addMaintenanceStatusPage', (id_, status_pages))
|
2023-03-20 15:14:39 +01:00
|
|
|
|
|
|
|
# api key
|
|
|
|
|
2023-05-01 18:57:55 +02:00
|
|
|
def get_api_keys(self) -> list[dict]:
|
2023-03-20 15:14:39 +01:00
|
|
|
"""
|
|
|
|
Get all api keys.
|
|
|
|
|
|
|
|
:return: All api keys.
|
|
|
|
:rtype: list
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_api_key_list()
|
|
|
|
[
|
|
|
|
{
|
|
|
|
"id": 1,
|
|
|
|
"name": "test",
|
|
|
|
"userID": 1,
|
|
|
|
"createdDate": "2023-03-20 11:15:05",
|
|
|
|
"active": False,
|
|
|
|
"expires": null,
|
|
|
|
"status": "inactive"
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"id": 2,
|
|
|
|
"name": "test2",
|
|
|
|
"userID": 1,
|
|
|
|
"createdDate": "2023-03-20 11:20:29",
|
|
|
|
"active": True,
|
|
|
|
"expires": "2023-03-30 12:20:00",
|
|
|
|
"status": "active"
|
|
|
|
}
|
|
|
|
]
|
|
|
|
"""
|
|
|
|
|
|
|
|
# TODO: replace with getAPIKeyList?
|
|
|
|
|
|
|
|
r = self._get_event_data(Event.API_KEY_LIST)
|
|
|
|
int_to_bool(r, ["active"])
|
|
|
|
return r
|
|
|
|
|
|
|
|
def get_api_key(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Get an api key.
|
|
|
|
|
|
|
|
:param int id_: Id of the api key to get.
|
|
|
|
:return: The api key.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the api key does not exist.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.get_api_key(1)
|
|
|
|
{
|
|
|
|
"id": 1,
|
|
|
|
"name": "test",
|
|
|
|
"userID": 1,
|
|
|
|
"createdDate": "2023-03-20 11:15:05",
|
|
|
|
"active": False,
|
|
|
|
"expires": null,
|
|
|
|
"status": "inactive"
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
api_keys = self.get_api_keys()
|
|
|
|
for api_key in api_keys:
|
|
|
|
if api_key["id"] == id_:
|
|
|
|
return api_key
|
|
|
|
raise UptimeKumaException("notification does not exist")
|
|
|
|
|
|
|
|
def add_api_key(self, name: str, expires: str, active: bool) -> dict:
|
|
|
|
"""
|
|
|
|
Adds a new api key.
|
|
|
|
|
|
|
|
:param str name: Name of the api key.
|
|
|
|
:param str expires: Expiration date of the api key. Set to ``None`` to disable expiration.
|
|
|
|
:param bool active: True to activate api key.
|
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.add_api_key(
|
|
|
|
... name="test",
|
|
|
|
... expires="2023-03-30 12:20:00",
|
|
|
|
... active=True
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"key": "uk1_9XPRjV7ilGj9CvWRKYiBPq9GLtQs74UzTxKfCxWY",
|
|
|
|
"keyID": 1
|
|
|
|
}
|
|
|
|
|
|
|
|
>>> api.add_api_key(
|
|
|
|
... name="test2",
|
|
|
|
... expires=None,
|
|
|
|
... active=True
|
|
|
|
... )
|
|
|
|
{
|
|
|
|
"msg": "Added Successfully.",
|
|
|
|
"key": "uk2_jsB9H1Zmt9eEjycNFMTKgse1B0Vfvb944H4_aRqW",
|
|
|
|
"keyID": 2
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
data = {
|
|
|
|
"name": name,
|
|
|
|
"expires": expires,
|
|
|
|
"active": 1 if active else 0
|
|
|
|
}
|
|
|
|
with self.wait_for_event(Event.API_KEY_LIST):
|
|
|
|
return self._call('addAPIKey', data)
|
|
|
|
|
|
|
|
def enable_api_key(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Enable an api key.
|
|
|
|
|
|
|
|
:param int id_: Id of the api key to enable.
|
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.enable_api_key(1)
|
|
|
|
{
|
|
|
|
"msg": "Enabled Successfully"
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
with self.wait_for_event(Event.API_KEY_LIST):
|
|
|
|
return self._call('enableAPIKey', id_)
|
|
|
|
|
|
|
|
def disable_api_key(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Disable an api key.
|
|
|
|
|
|
|
|
:param int id_: Id of the api key to disable.
|
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.disable_api_key(1)
|
|
|
|
{
|
|
|
|
"msg": "Disabled Successfully."
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
with self.wait_for_event(Event.API_KEY_LIST):
|
|
|
|
return self._call('disableAPIKey', id_)
|
|
|
|
|
|
|
|
def delete_api_key(self, id_: int) -> dict:
|
|
|
|
"""
|
|
|
|
Enable an api key.
|
|
|
|
|
|
|
|
:param int id_: Id of the api key to delete.
|
|
|
|
:return: The server response.
|
|
|
|
:rtype: dict
|
|
|
|
:raises UptimeKumaException: If the server returns an error.
|
|
|
|
|
|
|
|
Example::
|
|
|
|
|
|
|
|
>>> api.delete_api_key(1)
|
|
|
|
{
|
|
|
|
"msg": "Deleted Successfully."
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
with self.wait_for_event(Event.API_KEY_LIST):
|
|
|
|
return self._call('deleteAPIKey', id_)
|