2017-04-04 18:11:15 +02:00
|
|
|
from datetime import date, datetime, time, timedelta
|
2017-04-03 03:12:52 +02:00
|
|
|
|
2018-10-06 12:35:49 +02:00
|
|
|
from dateutil.relativedelta import relativedelta
|
|
|
|
from django.utils import timezone
|
2017-01-17 17:16:53 +01:00
|
|
|
|
2020-03-09 15:10:02 +01:00
|
|
|
KFET_WAKES_UP_AT = time(5, 0) # La K-Fêt ouvre à 5h (UTC) du matin
|
2017-01-17 17:16:53 +01:00
|
|
|
|
2017-04-03 00:40:52 +02:00
|
|
|
|
|
|
|
def kfet_day(year, month, day, start_at=KFET_WAKES_UP_AT):
|
2020-03-09 15:06:55 +01:00
|
|
|
"""Étant donné une date, renvoie un objet `datetime`
|
|
|
|
correspondant au début du 'jour K-Fêt' correspondant."""
|
2020-03-09 15:10:02 +01:00
|
|
|
return datetime.combine(date(year, month, day), start_at)
|
2017-04-03 00:40:52 +02:00
|
|
|
|
|
|
|
|
|
|
|
def to_kfet_day(dt, start_at=KFET_WAKES_UP_AT):
|
2020-03-09 15:06:55 +01:00
|
|
|
"""
|
|
|
|
Retourne le 'jour K-Fêt' correspondant à un objet `datetime` donné
|
|
|
|
"""
|
2017-04-03 00:40:52 +02:00
|
|
|
kfet_dt = kfet_day(year=dt.year, month=dt.month, day=dt.day)
|
2017-04-04 18:11:15 +02:00
|
|
|
if dt.time() < start_at:
|
|
|
|
kfet_dt -= timedelta(days=1)
|
2017-04-03 00:40:52 +02:00
|
|
|
return kfet_dt
|
|
|
|
|
|
|
|
|
2017-04-04 18:11:15 +02:00
|
|
|
class Scale(object):
|
2020-03-09 15:06:55 +01:00
|
|
|
"""
|
|
|
|
Classe utilisée pour subdiviser un QuerySet (e.g. des opérations) sur
|
|
|
|
une échelle de temps donnée, avec un pas de temps fixe.
|
|
|
|
Cette échelle peut être spécifiée :
|
|
|
|
- par un début et une fin,
|
|
|
|
- par un début/une fin et un nombre de subdivisions.
|
|
|
|
|
2020-05-08 11:09:29 +02:00
|
|
|
Si le booléen `std_chunk` est activé, le début de la première subdivision
|
|
|
|
est généré via la fonction `get_chunk_start`.
|
2020-03-09 15:06:55 +01:00
|
|
|
"""
|
|
|
|
|
2017-04-03 00:40:52 +02:00
|
|
|
name = None
|
|
|
|
step = None
|
|
|
|
|
2018-10-06 12:35:49 +02:00
|
|
|
def __init__(self, n_steps=0, begin=None, end=None, last=False, std_chunk=True):
|
2017-04-03 00:40:52 +02:00
|
|
|
self.std_chunk = std_chunk
|
|
|
|
if last:
|
|
|
|
end = timezone.now()
|
2017-04-12 18:03:31 +02:00
|
|
|
if std_chunk:
|
|
|
|
if begin is not None:
|
|
|
|
begin = self.get_chunk_start(begin)
|
|
|
|
if end is not None:
|
|
|
|
end = self.do_step(self.get_chunk_start(end))
|
2017-04-03 00:40:52 +02:00
|
|
|
|
2020-09-16 19:19:29 +02:00
|
|
|
if begin is not None and n_steps:
|
2017-04-12 18:03:31 +02:00
|
|
|
self.begin = begin
|
2017-04-03 00:40:52 +02:00
|
|
|
self.end = self.do_step(self.begin, n_steps=n_steps)
|
2020-09-16 19:19:29 +02:00
|
|
|
elif end is not None and n_steps:
|
2017-04-12 18:03:31 +02:00
|
|
|
self.end = end
|
2017-04-03 00:40:52 +02:00
|
|
|
self.begin = self.do_step(self.end, n_steps=-n_steps)
|
|
|
|
elif begin is not None and end is not None:
|
2017-04-12 18:03:31 +02:00
|
|
|
self.begin = begin
|
|
|
|
self.end = end
|
2017-04-03 00:40:52 +02:00
|
|
|
else:
|
2018-10-06 12:35:49 +02:00
|
|
|
raise Exception(
|
|
|
|
"Two of these args must be specified: "
|
|
|
|
"n_steps, begin, end; "
|
|
|
|
"or use last and n_steps"
|
|
|
|
)
|
2017-04-03 00:40:52 +02:00
|
|
|
|
2020-03-09 16:09:12 +01:00
|
|
|
self._gen_datetimes()
|
2017-04-03 00:40:52 +02:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def by_name(name):
|
2017-04-04 18:11:15 +02:00
|
|
|
for cls in Scale.__subclasses__():
|
2017-04-03 00:40:52 +02:00
|
|
|
if cls.name == name:
|
|
|
|
return cls
|
2017-04-03 03:12:52 +02:00
|
|
|
return None
|
2017-04-03 00:40:52 +02:00
|
|
|
|
|
|
|
def __getitem__(self, i):
|
2018-10-06 12:35:49 +02:00
|
|
|
return self.datetimes[i], self.datetimes[i + 1]
|
2017-04-03 00:40:52 +02:00
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
return len(self.datetimes) - 1
|
|
|
|
|
|
|
|
def do_step(self, dt, n_steps=1):
|
|
|
|
return dt + self.step * n_steps
|
|
|
|
|
2020-03-09 16:09:12 +01:00
|
|
|
def _gen_datetimes(self):
|
2017-04-03 00:40:52 +02:00
|
|
|
datetimes = [self.begin]
|
|
|
|
tmp = self.begin
|
2017-04-12 18:03:31 +02:00
|
|
|
while tmp < self.end:
|
2017-04-03 00:40:52 +02:00
|
|
|
tmp = self.do_step(tmp)
|
|
|
|
datetimes.append(tmp)
|
2020-03-09 16:09:12 +01:00
|
|
|
self.datetimes = datetimes
|
2017-04-03 00:40:52 +02:00
|
|
|
|
|
|
|
def get_labels(self, label_fmt=None):
|
|
|
|
if label_fmt is None:
|
|
|
|
label_fmt = self.label_fmt
|
2017-05-19 17:40:06 +02:00
|
|
|
return [
|
2018-10-06 12:35:49 +02:00
|
|
|
begin.strftime(label_fmt.format(i=i, rev_i=len(self) - i))
|
2017-05-19 17:40:06 +02:00
|
|
|
for i, (begin, end) in enumerate(self)
|
|
|
|
]
|
2017-04-03 00:40:52 +02:00
|
|
|
|
2020-03-09 16:11:08 +01:00
|
|
|
def chunkify_qs(self, qs, field="at", aggregate=None):
|
2020-03-09 15:06:55 +01:00
|
|
|
"""
|
|
|
|
Découpe un queryset en subdivisions, avec agrégation optionnelle des résultats
|
|
|
|
NB : on pourrait faire ça en une requête, au détriment de la lisibilité...
|
|
|
|
"""
|
2018-10-06 12:35:49 +02:00
|
|
|
begin_f = "{}__gte".format(field)
|
|
|
|
end_f = "{}__lte".format(field)
|
2020-03-09 16:11:08 +01:00
|
|
|
chunks = [qs.filter(**{begin_f: begin, end_f: end}) for begin, end in self]
|
|
|
|
if aggregate is None:
|
|
|
|
return chunks
|
|
|
|
else:
|
|
|
|
return [chunk.aggregate(agg=aggregate)["agg"] or 0 for chunk in chunks]
|
2017-04-13 14:11:44 +02:00
|
|
|
|
2017-04-03 00:40:52 +02:00
|
|
|
|
2017-04-04 18:11:15 +02:00
|
|
|
class DayScale(Scale):
|
2018-10-06 12:35:49 +02:00
|
|
|
name = "day"
|
2017-04-04 18:11:15 +02:00
|
|
|
step = timedelta(days=1)
|
2018-10-06 12:35:49 +02:00
|
|
|
label_fmt = "%A"
|
2017-04-03 00:40:52 +02:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_chunk_start(cls, dt):
|
|
|
|
return to_kfet_day(dt)
|
|
|
|
|
|
|
|
|
2017-04-04 18:11:15 +02:00
|
|
|
class WeekScale(Scale):
|
2018-10-06 12:35:49 +02:00
|
|
|
name = "week"
|
2017-04-04 18:11:15 +02:00
|
|
|
step = timedelta(days=7)
|
2020-03-09 16:15:15 +01:00
|
|
|
label_fmt = "%d %b."
|
2017-04-03 00:40:52 +02:00
|
|
|
|
|
|
|
@classmethod
|
2017-04-03 15:10:53 +02:00
|
|
|
def get_chunk_start(cls, dt):
|
|
|
|
dt_kfet = to_kfet_day(dt)
|
2017-04-04 18:11:15 +02:00
|
|
|
offset = timedelta(days=dt_kfet.weekday())
|
2017-04-03 15:10:53 +02:00
|
|
|
return dt_kfet - offset
|
2017-04-03 00:40:52 +02:00
|
|
|
|
|
|
|
|
2017-04-04 18:11:15 +02:00
|
|
|
class MonthScale(Scale):
|
2018-10-06 12:35:49 +02:00
|
|
|
name = "month"
|
2017-04-03 00:40:52 +02:00
|
|
|
step = relativedelta(months=1)
|
2018-10-06 12:35:49 +02:00
|
|
|
label_fmt = "%B"
|
2017-04-03 00:40:52 +02:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_chunk_start(cls, dt):
|
|
|
|
return to_kfet_day(dt).replace(day=1)
|
2016-12-09 21:45:34 +01:00
|
|
|
|
|
|
|
|
2020-09-16 17:16:49 +02:00
|
|
|
SCALE_CLASS_CHOICES = ((cls.name, cls.name) for cls in Scale.__subclasses__())
|
|
|
|
SCALE_DICT = {cls.name: cls for cls in Scale.__subclasses__()}
|
|
|
|
|
|
|
|
|
|
|
|
def scale_url_params(scales_def):
|
2020-03-09 15:06:55 +01:00
|
|
|
"""
|
|
|
|
Convertit une spécification de scales en arguments GET utilisables par ScaleMixin.
|
|
|
|
La spécification est de la forme suivante :
|
2020-09-16 17:16:49 +02:00
|
|
|
- scales_def : liste de champs de la forme (label, scale, scale_args, default)
|
|
|
|
|
2020-03-09 15:06:55 +01:00
|
|
|
- scale_args : arguments à passer à Scale.__init__
|
2020-09-16 17:16:49 +02:00
|
|
|
- default : le graphe à montrer par défaut
|
2020-03-09 15:06:55 +01:00
|
|
|
"""
|
2016-12-09 21:45:34 +01:00
|
|
|
|
2020-03-09 16:20:49 +01:00
|
|
|
params_list = []
|
|
|
|
for label, cls, params, default in scales_def:
|
2020-09-16 17:16:49 +02:00
|
|
|
url_params = {"scale-name": cls.name}
|
|
|
|
url_params.update({"scale-" + key: value for key, value in params.items()})
|
2020-03-09 16:20:49 +01:00
|
|
|
params_list.append(dict(label=label, url_params=url_params, default=default))
|
2016-12-09 21:45:34 +01:00
|
|
|
|
2020-03-09 16:20:49 +01:00
|
|
|
return params_list
|