diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6af67f68..639a9a0b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -27,6 +27,7 @@ Liste des changements notables dans GestioCOF depuis la version 0.1 (septembre
- Les transferts apparaissent maintenant dans l'historique K-Fêt et l'historique
personnel.
+- les statistiques K-Fêt remontent à plus d'un an (et le code est simplifié)
## Version 0.4.1 - 17/01/2020
diff --git a/kfet/static/kfet/js/statistic.js b/kfet/static/kfet/js/statistic.js
index 9baa08c4..4da17672 100644
--- a/kfet/static/kfet/js/statistic.js
+++ b/kfet/static/kfet/js/statistic.js
@@ -1,28 +1,15 @@
-(function($){
+(function ($) {
window.StatsGroup = function (url, target) {
// a class to properly display statictics
// url : points to an ObjectResumeStat that lists the options through JSON
// target : element of the DOM where to put the stats
- var self = this;
var element = $(target);
var content = $("
");
var buttons;
- function dictToArray (dict, start) {
- // converts the dicts returned by JSONResponse to Arrays
- // necessary because for..in does not guarantee the order
- if (start === undefined) start = 0;
- var array = new Array();
- for (var k in dict) {
- array[k] = dict[k];
- }
- array.splice(0, start);
- return array;
- }
-
- function handleTimeChart (data) {
+ function handleTimeChart(data) {
// reads the balance data and put it into chartjs formatting
chart_data = new Array();
for (var i = 0; i < data.length; i++) {
@@ -36,7 +23,7 @@
return chart_data;
}
- function showStats () {
+ function showStats() {
// CALLBACK : called when a button is selected
// shows the focus on the correct button
@@ -44,24 +31,20 @@
$(this).addClass("focus");
// loads data and shows it
- $.getJSON(this.stats_target_url, {format: 'json'}, displayStats);
+ $.getJSON(this.stats_target_url, displayStats);
}
- function displayStats (data) {
+ function displayStats(data) {
// reads the json data and updates the chart display
var chart_datasets = [];
- var charts = dictToArray(data.charts);
-
// are the points indexed by timestamps?
var is_time_chart = data.is_time_chart || false;
// reads the charts data
- for (var i = 0; i < charts.length; i++) {
- var chart = charts[i];
-
+ for (let chart of data.charts) {
// format the data
- var chart_data = is_time_chart ? handleTimeChart(chart.values) : dictToArray(chart.values, 0);
+ var chart_data = is_time_chart ? handleTimeChart(chart.values) : chart.values;
chart_datasets.push(
{
@@ -76,29 +59,24 @@
// options for chartjs
var chart_options =
- {
- responsive: true,
- maintainAspectRatio: false,
- tooltips: {
- mode: 'index',
- intersect: false,
- },
- hover: {
- mode: 'nearest',
- intersect: false,
- }
- };
+ {
+ responsive: true,
+ maintainAspectRatio: false,
+ tooltips: {
+ mode: 'index',
+ intersect: false,
+ },
+ hover: {
+ mode: 'nearest',
+ intersect: false,
+ }
+ };
// additionnal options for time-indexed charts
if (is_time_chart) {
chart_options['scales'] = {
xAxes: [{
type: "time",
- display: true,
- scaleLabel: {
- display: false,
- labelString: 'Date'
- },
time: {
tooltipFormat: 'll HH:mm',
displayFormats: {
@@ -115,26 +93,19 @@
}
}],
- yAxes: [{
- display: true,
- scaleLabel: {
- display: false,
- labelString: 'value'
- }
- }]
};
}
// global object for the options
var chart_model =
- {
- type: 'line',
- options: chart_options,
- data: {
- labels: data.labels || [],
- datasets: chart_datasets,
- }
- };
+ {
+ type: 'line',
+ options: chart_options,
+ data: {
+ labels: data.labels || [],
+ datasets: chart_datasets,
+ }
+ };
// saves the previous charts to be destroyed
var prev_chart = content.children();
@@ -151,27 +122,30 @@
}
// initialize the interface
- function initialize (data) {
+ function initialize(data) {
// creates the bar with the buttons
buttons = $("
",
- {class: "nav stat-nav",
- "aria-label": "select-period"});
+ {
+ class: "nav stat-nav",
+ "aria-label": "select-period"
+ });
var to_click;
- var context = data.stats;
- for (var i = 0; i < context.length; i++) {
+ for (let stat of data.stats) {
// creates the button
- var btn_wrapper = $("- ", {role:"presentation"});
+ var btn_wrapper = $("
- ", { role: "presentation" });
var btn = $("",
- {class: "btn btn-nav",
- type: "button"})
- .text(context[i].label)
- .prop("stats_target_url", context[i].url)
+ {
+ class: "btn btn-nav",
+ type: "button"
+ })
+ .text(stat.label)
+ .prop("stats_target_url", stat.url)
.on("click", showStats);
// saves the default option to select
- if (i == data.default_stat || i == 0)
+ if (stat.default)
to_click = btn;
// append the elements to the parent
@@ -189,7 +163,7 @@
// constructor
(function () {
- $.getJSON(url, {format: 'json'}, initialize);
+ $.getJSON(url, initialize);
})();
};
})(jQuery);
diff --git a/kfet/statistic.py b/kfet/statistic.py
index 02171267..b2c1d882 100644
--- a/kfet/statistic.py
+++ b/kfet/statistic.py
@@ -1,21 +1,22 @@
from datetime import date, datetime, time, timedelta
-import pytz
from dateutil.parser import parse as dateutil_parse
from dateutil.relativedelta import relativedelta
-from django.db.models import Sum
from django.utils import timezone
-KFET_WAKES_UP_AT = time(7, 0)
+KFET_WAKES_UP_AT = time(5, 0) # La K-Fêt ouvre à 5h (UTC) du matin
def kfet_day(year, month, day, start_at=KFET_WAKES_UP_AT):
- """datetime wrapper with time offset."""
- naive = datetime.combine(date(year, month, day), start_at)
- return pytz.timezone("Europe/Paris").localize(naive, is_dst=None)
+ """Étant donné une date, renvoie un objet `datetime`
+ correspondant au début du 'jour K-Fêt' correspondant."""
+ return datetime.combine(date(year, month, day), start_at)
def to_kfet_day(dt, start_at=KFET_WAKES_UP_AT):
+ """
+ Retourne le 'jour K-Fêt' correspondant à un objet `datetime` donné
+ """
kfet_dt = kfet_day(year=dt.year, month=dt.month, day=dt.day)
if dt.time() < start_at:
kfet_dt -= timedelta(days=1)
@@ -23,6 +24,17 @@ def to_kfet_day(dt, start_at=KFET_WAKES_UP_AT):
class Scale(object):
+ """
+ Classe utilisée pour subdiviser un QuerySet (e.g. des opérations) sur
+ une échelle de temps donnée, avec un pas de temps fixe.
+ Cette échelle peut être spécifiée :
+ - par un début et une fin,
+ - par un début/une fin et un nombre de subdivisions.
+
+ Si le booléen `std_chunk` est activé, le début de la première subdivision
+ est généré via la fonction `get_chunk_start`.
+ """
+
name = None
step = None
@@ -52,7 +64,7 @@ class Scale(object):
"or use last and n_steps"
)
- self.datetimes = self.get_datetimes()
+ self._gen_datetimes()
@staticmethod
def by_name(name):
@@ -61,9 +73,6 @@ class Scale(object):
return cls
return None
- def get_from(self, dt):
- return self.std_chunk and self.get_chunk_start(dt) or dt
-
def __getitem__(self, i):
return self.datetimes[i], self.datetimes[i + 1]
@@ -73,13 +82,13 @@ class Scale(object):
def do_step(self, dt, n_steps=1):
return dt + self.step * n_steps
- def get_datetimes(self):
+ def _gen_datetimes(self):
datetimes = [self.begin]
tmp = self.begin
while tmp < self.end:
tmp = self.do_step(tmp)
datetimes.append(tmp)
- return datetimes
+ self.datetimes = datetimes
def get_labels(self, label_fmt=None):
if label_fmt is None:
@@ -89,93 +98,18 @@ class Scale(object):
for i, (begin, end) in enumerate(self)
]
- def chunkify_qs(self, qs, field=None):
- if field is None:
- field = "at"
+ def chunkify_qs(self, qs, field="at", aggregate=None):
+ """
+ Découpe un queryset en subdivisions, avec agrégation optionnelle des résultats
+ NB : on pourrait faire ça en une requête, au détriment de la lisibilité...
+ """
begin_f = "{}__gte".format(field)
end_f = "{}__lte".format(field)
- return [qs.filter(**{begin_f: begin, end_f: end}) for begin, end in self]
-
- def get_by_chunks(self, qs, field_callback=None, field_db="at"):
- """Objects of queryset ranked according to the scale.
-
- Returns a generator whose each item, corresponding to a scale chunk,
- is a generator of objects from qs for this chunk.
-
- Args:
- qs: Queryset of source objects, must be ordered *first* on the
- same field returned by `field_callback`.
- field_callback: Callable which gives value from an object used
- to compare against limits of the scale chunks.
- Default to: lambda obj: getattr(obj, field_db)
- field_db: Used to filter against `scale` limits.
- Default to 'at'.
-
- Examples:
- If queryset `qs` use `values()`, `field_callback` must be set and
- could be: `lambda d: d['at']`
- If `field_db` use foreign attributes (eg with `__`), it should be
- something like: `lambda obj: obj.group.at`.
-
- """
- if field_callback is None:
-
- def field_callback(obj):
- return getattr(obj, field_db)
-
- begin_f = "{}__gte".format(field_db)
- end_f = "{}__lte".format(field_db)
-
- qs = qs.filter(**{begin_f: self.begin, end_f: self.end})
-
- obj_iter = iter(qs)
-
- last_obj = None
-
- def _objects_until(obj_iter, field_callback, end):
- """Generator of objects until `end`.
-
- Ends if objects source is empty or when an object not verifying
- field_callback(obj) <= end is met.
-
- If this object exists, it is stored in `last_obj` which is found
- from outer scope.
- Also, if this same variable is non-empty when the function is
- called, it first yields its content.
-
- Args:
- obj_iter: Source used to get objects.
- field_callback: Returned value, when it is called on an object
- will be used to test ordering against `end`.
- end
-
- """
- nonlocal last_obj
-
- if last_obj is not None:
- yield last_obj
- last_obj = None
-
- for obj in obj_iter:
- if field_callback(obj) <= end:
- yield obj
- else:
- last_obj = obj
- return
-
- for begin, end in self:
- # forward last seen object, if it exists, to the right chunk,
- # and fill with empty generators for intermediate chunks of scale
- if last_obj is not None:
- if field_callback(last_obj) > end:
- yield iter(())
- continue
-
- # yields generator for this chunk
- # this set last_obj to None if obj_iter reach its end, otherwise
- # it's set to the first met object from obj_iter which doesn't
- # belong to this chunk
- yield _objects_until(obj_iter, field_callback, end)
+ chunks = [qs.filter(**{begin_f: begin, end_f: end}) for begin, end in self]
+ if aggregate is None:
+ return chunks
+ else:
+ return [chunk.aggregate(agg=aggregate)["agg"] or 0 for chunk in chunks]
class DayScale(Scale):
@@ -191,7 +125,7 @@ class DayScale(Scale):
class WeekScale(Scale):
name = "week"
step = timedelta(days=7)
- label_fmt = "Semaine %W"
+ label_fmt = "%d %b."
@classmethod
def get_chunk_start(cls, dt):
@@ -210,111 +144,67 @@ class MonthScale(Scale):
return to_kfet_day(dt).replace(day=1)
-def stat_manifest(
- scales_def=None, scale_args=None, scale_prefix=None, **other_url_params
-):
- if scale_prefix is None:
- scale_prefix = "scale_"
- if scales_def is None:
- scales_def = []
- if scale_args is None:
- scale_args = {}
- manifest = []
- for label, cls in scales_def:
- url_params = {scale_prefix + "name": cls.name}
- url_params.update(
- {scale_prefix + key: value for key, value in scale_args.items()}
- )
+def scale_url_params(scales_def, **other_url_params):
+ """
+ Convertit une spécification de scales en arguments GET utilisables par ScaleMixin.
+ La spécification est de la forme suivante :
+ - scales_def : liste de champs de la forme (label, scale)
+ - scale_args : arguments à passer à Scale.__init__
+ - other_url_params : paramètres GET supplémentaires
+ """
+
+ params_list = []
+ for label, cls, params, default in scales_def:
+ url_params = {"scale_name": cls.name}
+ url_params.update({"scale_" + key: value for key, value in params.items()})
url_params.update(other_url_params)
- manifest.append(dict(label=label, url_params=url_params))
- return manifest
+ params_list.append(dict(label=label, url_params=url_params, default=default))
-
-def last_stats_manifest(
- scales_def=None, scale_args=None, scale_prefix=None, **url_params
-):
- scales_def = [
- ("Derniers mois", MonthScale),
- ("Dernières semaines", WeekScale),
- ("Derniers jours", DayScale),
- ]
- if scale_args is None:
- scale_args = {}
- scale_args.update(dict(last=True, n_steps=7))
- return stat_manifest(
- scales_def=scales_def,
- scale_args=scale_args,
- scale_prefix=scale_prefix,
- **url_params
- )
-
-
-# Étant donné un queryset d'operations
-# rend la somme des article_nb
-def tot_ventes(queryset):
- res = queryset.aggregate(Sum("article_nb"))["article_nb__sum"]
- return res and res or 0
+ return params_list
class ScaleMixin(object):
- scale_args_prefix = "scale_"
-
- def get_scale_args(self, params=None, prefix=None):
- """Retrieve scale args from params.
-
- Should search the same args of Scale constructor.
-
- Args:
- params (dict, optional): Scale args are searched in this.
- Default to GET params of request.
- prefix (str, optional): Appended at the begin of scale args names.
- Default to `self.scale_args_prefix`.
-
+ def parse_scale_args(self):
+ """
+ Récupère les paramètres de subdivision encodés dans une requête GET.
"""
- if params is None:
- params = self.request.GET
- if prefix is None:
- prefix = self.scale_args_prefix
-
scale_args = {}
- name = params.get(prefix + "name", None)
+ name = self.request.GET.get("scale_name", None)
if name is not None:
scale_args["name"] = name
- n_steps = params.get(prefix + "n_steps", None)
+ n_steps = self.request.GET.get("scale_n_steps", None)
if n_steps is not None:
scale_args["n_steps"] = int(n_steps)
- begin = params.get(prefix + "begin", None)
+ begin = self.request.GET.get("scale_begin", None)
if begin is not None:
scale_args["begin"] = dateutil_parse(begin)
- end = params.get(prefix + "send", None)
+ end = self.request.GET.get("scale_send", None)
if end is not None:
scale_args["end"] = dateutil_parse(end)
- last = params.get(prefix + "last", None)
+ last = self.request.GET.get("scale_last", None)
if last is not None:
scale_args["last"] = last in ["true", "True", "1"] and True or False
return scale_args
def get_context_data(self, *args, **kwargs):
- context = super().get_context_data(*args, **kwargs)
+ # On n'hérite pas
- scale_args = self.get_scale_args()
+ scale_args = self.parse_scale_args()
scale_name = scale_args.pop("name", None)
scale_cls = Scale.by_name(scale_name)
if scale_cls is None:
- scale = self.get_default_scale()
+ self.scale = self.get_default_scale()
else:
- scale = scale_cls(**scale_args)
+ self.scale = scale_cls(**scale_args)
- self.scale = scale
- context["labels"] = scale.get_labels()
- return context
+ return {"labels": self.scale.get_labels()}
def get_default_scale(self):
return DayScale(n_steps=7, last=True)
diff --git a/kfet/tests/test_views.py b/kfet/tests/test_views.py
index 08d2cb32..bcd9a9b4 100644
--- a/kfet/tests/test_views.py
+++ b/kfet/tests/test_views.py
@@ -628,37 +628,51 @@ class AccountStatOperationListViewTests(ViewTestCaseMixin, TestCase):
expected_stats = [
{
- "label": "Derniers mois",
+ "label": "Tout le temps",
"url": {
"path": base_url,
"query": {
- "scale_n_steps": ["7"],
+ "types": ["['purchase']"],
"scale_name": ["month"],
+ "scale_last": ["True"],
+ "scale_begin": [
+ self.accounts["user1"].created_at.isoformat(" ")
+ ],
+ },
+ },
+ },
+ {
+ "label": "1 an",
+ "url": {
+ "path": base_url,
+ "query": {
"types": ["['purchase']"],
+ "scale_n_steps": ["12"],
+ "scale_name": ["month"],
"scale_last": ["True"],
},
},
},
{
- "label": "Dernières semaines",
+ "label": "3 mois",
"url": {
"path": base_url,
"query": {
- "scale_n_steps": ["7"],
+ "types": ["['purchase']"],
+ "scale_n_steps": ["13"],
"scale_name": ["week"],
- "types": ["['purchase']"],
"scale_last": ["True"],
},
},
},
{
- "label": "Derniers jours",
+ "label": "2 semaines",
"url": {
"path": base_url,
"query": {
- "scale_n_steps": ["7"],
- "scale_name": ["day"],
"types": ["['purchase']"],
+ "scale_n_steps": ["14"],
+ "scale_name": ["day"],
"scale_last": ["True"],
},
},
@@ -1524,6 +1538,21 @@ class ArticleStatSalesListViewTests(ViewTestCaseMixin, TestCase):
self.article = Article.objects.create(
name="Article", category=ArticleCategory.objects.create(name="Category")
)
+ checkout = Checkout.objects.create(
+ name="Checkout",
+ created_by=self.accounts["team"],
+ balance=5,
+ valid_from=self.now,
+ valid_to=self.now + timedelta(days=5),
+ )
+
+ self.opegroup = create_operation_group(
+ on_acc=self.accounts["user"],
+ checkout=checkout,
+ content=[
+ {"type": Operation.PURCHASE, "article": self.article, "article_nb": 2},
+ ],
+ )
def test_ok(self):
r = self.client.get(self.url)
@@ -1535,33 +1564,44 @@ class ArticleStatSalesListViewTests(ViewTestCaseMixin, TestCase):
expected_stats = [
{
- "label": "Derniers mois",
+ "label": "Tout le temps",
"url": {
"path": base_url,
"query": {
- "scale_n_steps": ["7"],
+ "scale_name": ["month"],
+ "scale_last": ["True"],
+ "scale_begin": [self.opegroup.at.isoformat(" ")],
+ },
+ },
+ },
+ {
+ "label": "1 an",
+ "url": {
+ "path": base_url,
+ "query": {
+ "scale_n_steps": ["12"],
"scale_name": ["month"],
"scale_last": ["True"],
},
},
},
{
- "label": "Dernières semaines",
+ "label": "3 mois",
"url": {
"path": base_url,
"query": {
- "scale_n_steps": ["7"],
+ "scale_n_steps": ["13"],
"scale_name": ["week"],
"scale_last": ["True"],
},
},
},
{
- "label": "Derniers jours",
+ "label": "2 semaines",
"url": {
"path": base_url,
"query": {
- "scale_n_steps": ["7"],
+ "scale_n_steps": ["14"],
"scale_name": ["day"],
"scale_last": ["True"],
},
diff --git a/kfet/views.py b/kfet/views.py
index a04cda24..b6c49f72 100644
--- a/kfet/views.py
+++ b/kfet/views.py
@@ -2,6 +2,7 @@ import ast
import heapq
import statistics
from collections import defaultdict
+from datetime import timedelta
from decimal import Decimal
from typing import List
from urllib.parse import urlencode
@@ -76,7 +77,7 @@ from kfet.models import (
Transfer,
TransferGroup,
)
-from kfet.statistic import ScaleMixin, WeekScale, last_stats_manifest
+from kfet.statistic import DayScale, MonthScale, ScaleMixin, WeekScale, scale_url_params
from .auth import KFET_GENERIC_TRIGRAMME
from .auth.views import ( # noqa
@@ -2199,7 +2200,7 @@ class SupplierUpdate(SuccessMessageMixin, UpdateView):
# Vues génériques
# ---------------
# source : docs.djangoproject.com/fr/1.10/topics/class-based-views/mixins/
-class JSONResponseMixin(object):
+class JSONResponseMixin:
"""
A mixin that can be used to render a JSON response.
"""
@@ -2228,34 +2229,39 @@ class JSONDetailView(JSONResponseMixin, BaseDetailView):
return self.render_to_json_response(context)
-class PkUrlMixin(object):
- def get_object(self, *args, **kwargs):
- get_by = self.kwargs.get(self.pk_url_kwarg)
- return get_object_or_404(self.model, **{self.pk_url_kwarg: get_by})
-
-
class SingleResumeStat(JSONDetailView):
- """Manifest for a kind of a stat about an object.
+ """
+ Génère l'interface de sélection pour les statistiques d'un compte/article.
+ L'interface est constituée d'une série de boutons, qui récupèrent et graphent
+ des statistiques du même type, sur le même objet mais avec des arguments différents.
- Returns JSON whose payload is an array containing descriptions of a stat:
- url to retrieve data, label, ...
+ Attributs :
+ - url_stat : URL où récupérer les statistiques
+ - stats : liste de dictionnaires avec les clés suivantes :
+ - label : texte du bouton
+ - url_params : paramètres GET à rajouter à `url_stat`
+ - default : si `True`, graphe à montrer par défaut
+ On peut aussi définir `stats` dynamiquement, via la fonction `get_stats`.
"""
- id_prefix = ""
- nb_default = 0
-
- stats = []
url_stat = None
+ stats = []
+
+ def get_stats(self):
+ return self.stats
def get_context_data(self, **kwargs):
# On n'hérite pas
- object_id = self.object.id
context = {}
stats = []
- prefix = "{}_{}".format(self.id_prefix, object_id)
- for i, stat_def in enumerate(self.stats):
+ # On peut avoir récupéré self.object via pk ou slug
+ if self.pk_url_kwarg in self.kwargs:
url_pk = getattr(self.object, self.pk_url_kwarg)
+ else:
+ url_pk = getattr(self.object, self.slug_url_kwarg)
+
+ for stat_def in self.get_stats():
url_params_d = stat_def.get("url_params", {})
if len(url_params_d) > 0:
url_params = "?{}".format(urlencode(url_params_d))
@@ -2264,42 +2270,21 @@ class SingleResumeStat(JSONDetailView):
stats.append(
{
"label": stat_def["label"],
- "btn": "btn_{}_{}".format(prefix, i),
"url": "{url}{params}".format(
url=reverse(self.url_stat, args=[url_pk]), params=url_params
),
+ "default": stat_def.get("default", False),
}
)
- context["id_prefix"] = prefix
- context["content_id"] = "content_%s" % prefix
context["stats"] = stats
- context["default_stat"] = self.nb_default
- context["object_id"] = object_id
return context
-# -----------------------
-# Evolution Balance perso
-# -----------------------
-ID_PREFIX_ACC_BALANCE = "balance_acc"
-
-
-class AccountStatBalanceList(PkUrlMixin, SingleResumeStat):
- """Manifest for balance stats of an account."""
-
- model = Account
- context_object_name = "account"
- pk_url_kwarg = "trigramme"
- url_stat = "kfet.account.stat.balance"
- id_prefix = ID_PREFIX_ACC_BALANCE
- stats = [
- {"label": "Tout le temps"},
- {"label": "1 an", "url_params": {"last_days": 365}},
- {"label": "6 mois", "url_params": {"last_days": 183}},
- {"label": "3 mois", "url_params": {"last_days": 90}},
- {"label": "30 jours", "url_params": {"last_days": 30}},
- ]
- nb_default = 0
+class UserAccountMixin:
+ """
+ Mixin qui vérifie que le compte traité par la vue est celui de l'utilisateur·ice
+ actuel·le. Dans le cas contraire, renvoie un Http404.
+ """
def get_object(self, *args, **kwargs):
obj = super().get_object(*args, **kwargs)
@@ -2307,21 +2292,41 @@ class AccountStatBalanceList(PkUrlMixin, SingleResumeStat):
raise Http404
return obj
- @method_decorator(login_required)
- def dispatch(self, *args, **kwargs):
- return super().dispatch(*args, **kwargs)
+
+# -----------------------
+# Evolution Balance perso
+# -----------------------
-class AccountStatBalance(PkUrlMixin, JSONDetailView):
- """Datasets of balance of an account.
-
- Operations and Transfers are taken into account.
-
+@method_decorator(login_required, name="dispatch")
+class AccountStatBalanceList(UserAccountMixin, SingleResumeStat):
+ """
+ Menu général pour l'historique de balance d'un compte
"""
model = Account
- pk_url_kwarg = "trigramme"
- context_object_name = "account"
+ slug_url_kwarg = "trigramme"
+ slug_field = "trigramme"
+ url_stat = "kfet.account.stat.balance"
+ stats = [
+ {"label": "Tout le temps"},
+ {"label": "1 an", "url_params": {"last_days": 365}},
+ {"label": "6 mois", "url_params": {"last_days": 183}},
+ {"label": "3 mois", "url_params": {"last_days": 90}, "default": True},
+ {"label": "30 jours", "url_params": {"last_days": 30}},
+ ]
+
+
+@method_decorator(login_required, name="dispatch")
+class AccountStatBalance(UserAccountMixin, JSONDetailView):
+ """
+ Statistiques (JSON) d'historique de balance d'un compte.
+ Prend en compte les opérations et transferts sur la période donnée.
+ """
+
+ model = Account
+ slug_url_kwarg = "trigramme"
+ slug_field = "trigramme"
def get_changes_list(self, last_days=None, begin_date=None, end_date=None):
account = self.object
@@ -2420,57 +2425,50 @@ class AccountStatBalance(PkUrlMixin, JSONDetailView):
# TODO: offset
return context
- def get_object(self, *args, **kwargs):
- obj = super().get_object(*args, **kwargs)
- if self.request.user != obj.user:
- raise Http404
- return obj
-
- @method_decorator(login_required)
- def dispatch(self, *args, **kwargs):
- return super().dispatch(*args, **kwargs)
-
# ------------------------
# Consommation personnelle
# ------------------------
-ID_PREFIX_ACC_LAST = "last_acc"
-ID_PREFIX_ACC_LAST_DAYS = "last_days_acc"
-ID_PREFIX_ACC_LAST_WEEKS = "last_weeks_acc"
-ID_PREFIX_ACC_LAST_MONTHS = "last_months_acc"
-class AccountStatOperationList(PkUrlMixin, SingleResumeStat):
- """Manifest for operations stats of an account."""
+@method_decorator(login_required, name="dispatch")
+class AccountStatOperationList(UserAccountMixin, SingleResumeStat):
+ """
+ Menu général pour l'historique de consommation d'un compte
+ """
model = Account
- context_object_name = "account"
- pk_url_kwarg = "trigramme"
- id_prefix = ID_PREFIX_ACC_LAST
- nb_default = 2
- stats = last_stats_manifest(types=[Operation.PURCHASE])
+ slug_url_kwarg = "trigramme"
+ slug_field = "trigramme"
url_stat = "kfet.account.stat.operation"
- def get_object(self, *args, **kwargs):
- obj = super().get_object(*args, **kwargs)
- if self.request.user != obj.user:
- raise Http404
- return obj
+ def get_stats(self):
+ scales_def = [
+ (
+ "Tout le temps",
+ MonthScale,
+ {"last": True, "begin": self.object.created_at},
+ False,
+ ),
+ ("1 an", MonthScale, {"last": True, "n_steps": 12}, False),
+ ("3 mois", WeekScale, {"last": True, "n_steps": 13}, True),
+ ("2 semaines", DayScale, {"last": True, "n_steps": 14}, False),
+ ]
- @method_decorator(login_required)
- def dispatch(self, *args, **kwargs):
- return super().dispatch(*args, **kwargs)
+ return scale_url_params(scales_def, types=[Operation.PURCHASE])
-class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView):
- """Datasets of operations of an account."""
+@method_decorator(login_required, name="dispatch")
+class AccountStatOperation(UserAccountMixin, ScaleMixin, JSONDetailView):
+ """
+ Statistiques (JSON) de consommation (nb d'items achetés) d'un compte.
+ """
model = Account
- pk_url_kwarg = "trigramme"
- context_object_name = "account"
- id_prefix = ""
+ slug_url_kwarg = "trigramme"
+ slug_field = "trigramme"
- def get_operations(self, scale, types=None):
+ def get_operations(self, types=None):
# On selectionne les opérations qui correspondent
# à l'article en question et qui ne sont pas annulées
# puis on choisi pour chaques intervalle les opérations
@@ -2482,28 +2480,20 @@ class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView):
)
if types is not None:
all_operations = all_operations.filter(type__in=types)
- chunks = scale.get_by_chunks(
- all_operations,
- field_db="group__at",
- field_callback=(lambda d: d["group__at"]),
- )
- return chunks
+ return all_operations
def get_context_data(self, *args, **kwargs):
- old_ctx = super().get_context_data(*args, **kwargs)
- context = {"labels": old_ctx["labels"]}
- scale = self.scale
+ context = super().get_context_data(*args, **kwargs)
types = self.request.GET.get("types", None)
if types is not None:
types = ast.literal_eval(types)
- operations = self.get_operations(types=types, scale=scale)
+ operations = self.get_operations(types=types)
# On compte les opérations
- nb_ventes = []
- for chunk in operations:
- ventes = sum(ope["article_nb"] for ope in chunk)
- nb_ventes.append(ventes)
+ nb_ventes = self.scale.chunkify_qs(
+ operations, field="group__at", aggregate=Sum("article_nb")
+ )
context["charts"] = [
{
@@ -2514,50 +2504,54 @@ class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView):
]
return context
- def get_object(self, *args, **kwargs):
- obj = super().get_object(*args, **kwargs)
- if self.request.user != obj.user:
- raise Http404
- return obj
-
- @method_decorator(login_required)
- def dispatch(self, *args, **kwargs):
- return super().dispatch(*args, **kwargs)
-
# ------------------------
# Article Satistiques Last
# ------------------------
-ID_PREFIX_ART_LAST = "last_art"
-ID_PREFIX_ART_LAST_DAYS = "last_days_art"
-ID_PREFIX_ART_LAST_WEEKS = "last_weeks_art"
-ID_PREFIX_ART_LAST_MONTHS = "last_months_art"
+@method_decorator(teamkfet_required, name="dispatch")
class ArticleStatSalesList(SingleResumeStat):
- """Manifest for sales stats of an article."""
+ """
+ Menu pour les statistiques de vente d'un article.
+ """
model = Article
- context_object_name = "article"
- id_prefix = ID_PREFIX_ART_LAST
nb_default = 2
url_stat = "kfet.article.stat.sales"
- stats = last_stats_manifest()
- @method_decorator(teamkfet_required)
- def dispatch(self, *args, **kwargs):
- return super().dispatch(*args, **kwargs)
+ def get_stats(self):
+ first_conso = (
+ Operation.objects.filter(article=self.object)
+ .order_by("group__at")
+ .values_list("group__at", flat=True)
+ .first()
+ )
+ if first_conso is None:
+ # On le crée dans le passé au cas où
+ first_conso = timezone.now() - timedelta(seconds=1)
+ scales_def = [
+ ("Tout le temps", MonthScale, {"last": True, "begin": first_conso}, False),
+ ("1 an", MonthScale, {"last": True, "n_steps": 12}, False),
+ ("3 mois", WeekScale, {"last": True, "n_steps": 13}, True),
+ ("2 semaines", DayScale, {"last": True, "n_steps": 14}, False),
+ ]
+
+ return scale_url_params(scales_def)
+@method_decorator(teamkfet_required, name="dispatch")
class ArticleStatSales(ScaleMixin, JSONDetailView):
- """Datasets of sales of an article."""
+ """
+ Statistiques (JSON) de vente d'un article.
+ Sépare LIQ et les comptes K-Fêt, et rajoute le total.
+ """
model = Article
context_object_name = "article"
def get_context_data(self, *args, **kwargs):
- old_ctx = super().get_context_data(*args, **kwargs)
- context = {"labels": old_ctx["labels"]}
+ context = super().get_context_data(*args, **kwargs)
scale = self.scale
all_purchases = (
@@ -2570,23 +2564,13 @@ class ArticleStatSales(ScaleMixin, JSONDetailView):
liq_only = all_purchases.filter(group__on_acc__trigramme="LIQ")
liq_exclude = all_purchases.exclude(group__on_acc__trigramme="LIQ")
- chunks_liq = scale.get_by_chunks(
- liq_only, field_db="group__at", field_callback=lambda d: d["group__at"]
+ nb_liq = scale.chunkify_qs(
+ liq_only, field="group__at", aggregate=Sum("article_nb")
)
- chunks_no_liq = scale.get_by_chunks(
- liq_exclude, field_db="group__at", field_callback=lambda d: d["group__at"]
+ nb_accounts = scale.chunkify_qs(
+ liq_exclude, field="group__at", aggregate=Sum("article_nb")
)
-
- # On compte les opérations
- nb_ventes = []
- nb_accounts = []
- nb_liq = []
- for chunk_liq, chunk_no_liq in zip(chunks_liq, chunks_no_liq):
- sum_accounts = sum(ope["article_nb"] for ope in chunk_no_liq)
- sum_liq = sum(ope["article_nb"] for ope in chunk_liq)
- nb_ventes.append(sum_accounts + sum_liq)
- nb_accounts.append(sum_accounts)
- nb_liq.append(sum_liq)
+ nb_ventes = [n1 + n2 for n1, n2 in zip(nb_liq, nb_accounts)]
context["charts"] = [
{
@@ -2602,7 +2586,3 @@ class ArticleStatSales(ScaleMixin, JSONDetailView):
},
]
return context
-
- @method_decorator(teamkfet_required)
- def dispatch(self, *args, **kwargs):
- return super().dispatch(*args, **kwargs)