diff --git a/kfet/static/kfet/js/statistic.js b/kfet/static/kfet/js/statistic.js index f210c11d..db31e0e8 100644 --- a/kfet/static/kfet/js/statistic.js +++ b/kfet/static/kfet/js/statistic.js @@ -61,7 +61,7 @@ var chart = charts[i]; // format the data - var chart_data = is_time_chart ? handleTimeChart(chart.values) : dictToArray(chart.values, 1); + var chart_data = is_time_chart ? handleTimeChart(chart.values) : dictToArray(chart.values, 0); chart_datasets.push( { @@ -132,7 +132,7 @@ type: 'line', options: chart_options, data: { - labels: (data.labels || []).slice(1), + labels: data.labels || [], datasets: chart_datasets, } }; diff --git a/kfet/statistic.py b/kfet/statistic.py index fe948f73..5ff169ff 100644 --- a/kfet/statistic.py +++ b/kfet/statistic.py @@ -4,6 +4,7 @@ from datetime import date, datetime, time, timedelta from dateutil.relativedelta import relativedelta from dateutil.parser import parse as dateutil_parse +import pytz from django.utils import timezone from django.db.models import Sum @@ -13,7 +14,8 @@ KFET_WAKES_UP_AT = time(7, 0) def kfet_day(year, month, day, start_at=KFET_WAKES_UP_AT): """datetime wrapper with time offset.""" - return datetime.combine(date(year, month, day), start_at) + naive = datetime.combine(date(year, month, day), start_at) + return pytz.timezone('Europe/Paris').localize(naive, is_dst=None) def to_kfet_day(dt, start_at=KFET_WAKES_UP_AT): @@ -32,16 +34,21 @@ class Scale(object): self.std_chunk = std_chunk if last: end = timezone.now() + if std_chunk: + if begin is not None: + begin = self.get_chunk_start(begin) + if end is not None: + end = self.do_step(self.get_chunk_start(end)) if begin is not None and n_steps != 0: - self.begin = self.get_from(begin) + self.begin = begin self.end = self.do_step(self.begin, n_steps=n_steps) elif end is not None and n_steps != 0: - self.end = self.get_from(end) + self.end = end self.begin = self.do_step(self.end, n_steps=-n_steps) elif begin is not None and end is not None: - self.begin = self.get_from(begin) - self.end = self.get_from(end) + self.begin = begin + self.end = end else: raise Exception('Two of these args must be specified: ' 'n_steps, begin, end; ' @@ -71,7 +78,7 @@ class Scale(object): def get_datetimes(self): datetimes = [self.begin] tmp = self.begin - while tmp <= self.end: + while tmp < self.end: tmp = self.do_step(tmp) datetimes.append(tmp) return datetimes @@ -232,3 +239,87 @@ class ScaleMixin(object): qs.filter(**{begin_f: begin, end_f: end}) for begin, end in scale ] + + def get_by_chunks(self, qs, scale, field_callback=None, field_db='at'): + """Objects of queryset ranked according to a given scale. + + Returns a generator whose each item, corresponding to a scale chunk, + is a generator of objects from qs for this chunk. + + Args: + qs: Queryset of source objects, must be ordered *first* on the + same field returned by `field_callback`. + scale: Used to rank objects. + field_callback: Callable which gives value from an object used + to compare against limits of the scale chunks. + Default to: lambda obj: getattr(obj, field_db) + field_db: Used to filter against `scale` limits. + Default to 'at'. + + Examples: + If queryset `qs` use `values()`, `field_callback` must be set and + could be: `lambda d: d['at']` + If `field_db` use foreign attributes (eg with `__`), it should be + something like: `lambda obj: obj.group.at`. + + """ + if field_callback is None: + def field_callback(obj): + return getattr(obj, field_db) + + begin_f = '{}__gte'.format(field_db) + end_f = '{}__lte'.format(field_db) + + qs = ( + qs + .filter(**{begin_f: scale.begin, end_f: scale.end}) + ) + + obj_iter = iter(qs) + + last_obj = None + + def _objects_until(obj_iter, field_callback, end): + """Generator of objects until `end`. + + Ends if objects source is empty or when an object not verifying + field_callback(obj) <= end is met. + + If this object exists, it is stored in `last_obj` which is found + from outer scope. + Also, if this same variable is non-empty when the function is + called, it first yields its content. + + Args: + obj_iter: Source used to get objects. + field_callback: Returned value, when it is called on an object + will be used to test ordering against `end`. + end + + """ + nonlocal last_obj + + if last_obj is not None: + yield last_obj + last_obj = None + + for obj in obj_iter: + if field_callback(obj) <= end: + yield obj + else: + last_obj = obj + return + + for begin, end in scale: + # forward last seen object, if it exists, to the right chunk, + # and fill with empty generators for intermediate chunks of scale + if last_obj is not None: + if field_callback(last_obj) > end: + yield iter(()) + continue + + # yields generator for this chunk + # this set last_obj to None if obj_iter reach its end, otherwise + # it's set to the first met object from obj_iter which doesn't + # belong to this chunk + yield _objects_until(obj_iter, field_callback, end) diff --git a/kfet/templates/kfet/article_read.html b/kfet/templates/kfet/article_read.html index 6fe025f6..19a11094 100644 --- a/kfet/templates/kfet/article_read.html +++ b/kfet/templates/kfet/article_read.html @@ -104,7 +104,7 @@ $(document).ready(function() { var stat_last = new StatsGroup( "{% url 'kfet.article.stat.sales.list' article.id %}", - $("#stat_last"), + $("#stat_last") ); }); diff --git a/kfet/views.py b/kfet/views.py index cfc58aa0..1df78d1e 100644 --- a/kfet/views.py +++ b/kfet/views.py @@ -2369,13 +2369,19 @@ class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView): # à l'article en question et qui ne sont pas annulées # puis on choisi pour chaques intervalle les opérations # effectuées dans ces intervalles de temps - all_operations = (Operation.objects - .filter(group__on_acc=self.object) - .filter(canceled_at=None) - ) + all_operations = ( + Operation.objects + .filter(group__on_acc=self.object, + canceled_at=None) + .values('article_nb', 'group__at') + .order_by('group__at') + ) if types is not None: all_operations = all_operations.filter(type__in=types) - chunks = self.chunkify_qs(all_operations, scale, field='group__at') + chunks = self.get_by_chunks( + all_operations, scale, field_db='group__at', + field_callback=(lambda d: d['group__at']), + ) return chunks def get_context_data(self, *args, **kwargs): @@ -2391,7 +2397,8 @@ class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView): # On compte les opérations nb_ventes = [] for chunk in operations: - nb_ventes.append(tot_ventes(chunk)) + ventes = sum(ope['article_nb'] for ope in chunk) + nb_ventes.append(ventes) context['charts'] = [{"color": "rgb(255, 99, 132)", "label": "NB items achetés", @@ -2442,29 +2449,39 @@ class ArticleStatSales(ScaleMixin, JSONDetailView): context = {'labels': old_ctx['labels']} scale = self.scale - # On selectionne les opérations qui correspondent - # à l'article en question et qui ne sont pas annulées - # puis on choisi pour chaques intervalle les opérations - # effectuées dans ces intervalles de temps - all_operations = ( + all_purchases = ( Operation.objects - .filter(type=Operation.PURCHASE, - article=self.object, - canceled_at=None, - ) + .filter( + type=Operation.PURCHASE, + article=self.object, + canceled_at=None, + ) + .values('group__at', 'article_nb') + .order_by('group__at') ) - chunks = self.chunkify_qs(all_operations, scale, field='group__at') + liq_only = all_purchases.filter(group__on_acc__trigramme='LIQ') + liq_exclude = all_purchases.exclude(group__on_acc__trigramme='LIQ') + + chunks_liq = self.get_by_chunks( + liq_only, scale, field_db='group__at', + field_callback=lambda d: d['group__at'], + ) + chunks_no_liq = self.get_by_chunks( + liq_exclude, scale, field_db='group__at', + field_callback=lambda d: d['group__at'], + ) + # On compte les opérations nb_ventes = [] nb_accounts = [] nb_liq = [] - for qs in chunks: - nb_ventes.append( - tot_ventes(qs)) - nb_liq.append( - tot_ventes(qs.filter(group__on_acc__trigramme='LIQ'))) - nb_accounts.append( - tot_ventes(qs.exclude(group__on_acc__trigramme='LIQ'))) + for chunk_liq, chunk_no_liq in zip(chunks_liq, chunks_no_liq): + sum_accounts = sum(ope['article_nb'] for ope in chunk_no_liq) + sum_liq = sum(ope['article_nb'] for ope in chunk_liq) + nb_ventes.append(sum_accounts + sum_liq) + nb_accounts.append(sum_accounts) + nb_liq.append(sum_liq) + context['charts'] = [{"color": "rgb(255, 99, 132)", "label": "Toutes consommations", "values": nb_ventes},