forked from DGNum/gestioCOF
Fewer queries on stats/scales + Fix
Scales: - Fix #chunks when used with std_chunk=True (there was one too many at the beginning) - Scale.end gives the end of the last chunk (instead of its start) So scale.begin -> scale.end gives the full range of the scale. `kfet_day` now returns an aware datetime. ScaleMixin: - new method `get_by_chunks` which use only one query and ranks elements according to the scale. Elements are returned by a generator for each scale chunk (and all chunks are returned as a generator too). ArticlesStatSales and AccountStatOperations use this new method to avoid issuing #scale_chunks queries. ArticleStat: - fixed on Chrome
This commit is contained in:
parent
e97e0081d7
commit
3f4a1adbb9
4 changed files with 140 additions and 32 deletions
|
@ -61,7 +61,7 @@
|
||||||
var chart = charts[i];
|
var chart = charts[i];
|
||||||
|
|
||||||
// format the data
|
// format the data
|
||||||
var chart_data = is_time_chart ? handleTimeChart(chart.values) : dictToArray(chart.values, 1);
|
var chart_data = is_time_chart ? handleTimeChart(chart.values) : dictToArray(chart.values, 0);
|
||||||
|
|
||||||
chart_datasets.push(
|
chart_datasets.push(
|
||||||
{
|
{
|
||||||
|
@ -132,7 +132,7 @@
|
||||||
type: 'line',
|
type: 'line',
|
||||||
options: chart_options,
|
options: chart_options,
|
||||||
data: {
|
data: {
|
||||||
labels: (data.labels || []).slice(1),
|
labels: data.labels || [],
|
||||||
datasets: chart_datasets,
|
datasets: chart_datasets,
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -4,6 +4,7 @@ from datetime import date, datetime, time, timedelta
|
||||||
|
|
||||||
from dateutil.relativedelta import relativedelta
|
from dateutil.relativedelta import relativedelta
|
||||||
from dateutil.parser import parse as dateutil_parse
|
from dateutil.parser import parse as dateutil_parse
|
||||||
|
import pytz
|
||||||
|
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.db.models import Sum
|
from django.db.models import Sum
|
||||||
|
@ -13,7 +14,8 @@ KFET_WAKES_UP_AT = time(7, 0)
|
||||||
|
|
||||||
def kfet_day(year, month, day, start_at=KFET_WAKES_UP_AT):
|
def kfet_day(year, month, day, start_at=KFET_WAKES_UP_AT):
|
||||||
"""datetime wrapper with time offset."""
|
"""datetime wrapper with time offset."""
|
||||||
return datetime.combine(date(year, month, day), start_at)
|
naive = datetime.combine(date(year, month, day), start_at)
|
||||||
|
return pytz.timezone('Europe/Paris').localize(naive, is_dst=None)
|
||||||
|
|
||||||
|
|
||||||
def to_kfet_day(dt, start_at=KFET_WAKES_UP_AT):
|
def to_kfet_day(dt, start_at=KFET_WAKES_UP_AT):
|
||||||
|
@ -32,16 +34,21 @@ class Scale(object):
|
||||||
self.std_chunk = std_chunk
|
self.std_chunk = std_chunk
|
||||||
if last:
|
if last:
|
||||||
end = timezone.now()
|
end = timezone.now()
|
||||||
|
if std_chunk:
|
||||||
|
if begin is not None:
|
||||||
|
begin = self.get_chunk_start(begin)
|
||||||
|
if end is not None:
|
||||||
|
end = self.do_step(self.get_chunk_start(end))
|
||||||
|
|
||||||
if begin is not None and n_steps != 0:
|
if begin is not None and n_steps != 0:
|
||||||
self.begin = self.get_from(begin)
|
self.begin = begin
|
||||||
self.end = self.do_step(self.begin, n_steps=n_steps)
|
self.end = self.do_step(self.begin, n_steps=n_steps)
|
||||||
elif end is not None and n_steps != 0:
|
elif end is not None and n_steps != 0:
|
||||||
self.end = self.get_from(end)
|
self.end = end
|
||||||
self.begin = self.do_step(self.end, n_steps=-n_steps)
|
self.begin = self.do_step(self.end, n_steps=-n_steps)
|
||||||
elif begin is not None and end is not None:
|
elif begin is not None and end is not None:
|
||||||
self.begin = self.get_from(begin)
|
self.begin = begin
|
||||||
self.end = self.get_from(end)
|
self.end = end
|
||||||
else:
|
else:
|
||||||
raise Exception('Two of these args must be specified: '
|
raise Exception('Two of these args must be specified: '
|
||||||
'n_steps, begin, end; '
|
'n_steps, begin, end; '
|
||||||
|
@ -71,7 +78,7 @@ class Scale(object):
|
||||||
def get_datetimes(self):
|
def get_datetimes(self):
|
||||||
datetimes = [self.begin]
|
datetimes = [self.begin]
|
||||||
tmp = self.begin
|
tmp = self.begin
|
||||||
while tmp <= self.end:
|
while tmp < self.end:
|
||||||
tmp = self.do_step(tmp)
|
tmp = self.do_step(tmp)
|
||||||
datetimes.append(tmp)
|
datetimes.append(tmp)
|
||||||
return datetimes
|
return datetimes
|
||||||
|
@ -232,3 +239,87 @@ class ScaleMixin(object):
|
||||||
qs.filter(**{begin_f: begin, end_f: end})
|
qs.filter(**{begin_f: begin, end_f: end})
|
||||||
for begin, end in scale
|
for begin, end in scale
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def get_by_chunks(self, qs, scale, field_callback=None, field_db='at'):
|
||||||
|
"""Objects of queryset ranked according to a given scale.
|
||||||
|
|
||||||
|
Returns a generator whose each item, corresponding to a scale chunk,
|
||||||
|
is a generator of objects from qs for this chunk.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
qs: Queryset of source objects, must be ordered *first* on the
|
||||||
|
same field returned by `field_callback`.
|
||||||
|
scale: Used to rank objects.
|
||||||
|
field_callback: Callable which gives value from an object used
|
||||||
|
to compare against limits of the scale chunks.
|
||||||
|
Default to: lambda obj: getattr(obj, field_db)
|
||||||
|
field_db: Used to filter against `scale` limits.
|
||||||
|
Default to 'at'.
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
If queryset `qs` use `values()`, `field_callback` must be set and
|
||||||
|
could be: `lambda d: d['at']`
|
||||||
|
If `field_db` use foreign attributes (eg with `__`), it should be
|
||||||
|
something like: `lambda obj: obj.group.at`.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if field_callback is None:
|
||||||
|
def field_callback(obj):
|
||||||
|
return getattr(obj, field_db)
|
||||||
|
|
||||||
|
begin_f = '{}__gte'.format(field_db)
|
||||||
|
end_f = '{}__lte'.format(field_db)
|
||||||
|
|
||||||
|
qs = (
|
||||||
|
qs
|
||||||
|
.filter(**{begin_f: scale.begin, end_f: scale.end})
|
||||||
|
)
|
||||||
|
|
||||||
|
obj_iter = iter(qs)
|
||||||
|
|
||||||
|
last_obj = None
|
||||||
|
|
||||||
|
def _objects_until(obj_iter, field_callback, end):
|
||||||
|
"""Generator of objects until `end`.
|
||||||
|
|
||||||
|
Ends if objects source is empty or when an object not verifying
|
||||||
|
field_callback(obj) <= end is met.
|
||||||
|
|
||||||
|
If this object exists, it is stored in `last_obj` which is found
|
||||||
|
from outer scope.
|
||||||
|
Also, if this same variable is non-empty when the function is
|
||||||
|
called, it first yields its content.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
obj_iter: Source used to get objects.
|
||||||
|
field_callback: Returned value, when it is called on an object
|
||||||
|
will be used to test ordering against `end`.
|
||||||
|
end
|
||||||
|
|
||||||
|
"""
|
||||||
|
nonlocal last_obj
|
||||||
|
|
||||||
|
if last_obj is not None:
|
||||||
|
yield last_obj
|
||||||
|
last_obj = None
|
||||||
|
|
||||||
|
for obj in obj_iter:
|
||||||
|
if field_callback(obj) <= end:
|
||||||
|
yield obj
|
||||||
|
else:
|
||||||
|
last_obj = obj
|
||||||
|
return
|
||||||
|
|
||||||
|
for begin, end in scale:
|
||||||
|
# forward last seen object, if it exists, to the right chunk,
|
||||||
|
# and fill with empty generators for intermediate chunks of scale
|
||||||
|
if last_obj is not None:
|
||||||
|
if field_callback(last_obj) > end:
|
||||||
|
yield iter(())
|
||||||
|
continue
|
||||||
|
|
||||||
|
# yields generator for this chunk
|
||||||
|
# this set last_obj to None if obj_iter reach its end, otherwise
|
||||||
|
# it's set to the first met object from obj_iter which doesn't
|
||||||
|
# belong to this chunk
|
||||||
|
yield _objects_until(obj_iter, field_callback, end)
|
||||||
|
|
|
@ -104,7 +104,7 @@
|
||||||
$(document).ready(function() {
|
$(document).ready(function() {
|
||||||
var stat_last = new StatsGroup(
|
var stat_last = new StatsGroup(
|
||||||
"{% url 'kfet.article.stat.sales.list' article.id %}",
|
"{% url 'kfet.article.stat.sales.list' article.id %}",
|
||||||
$("#stat_last"),
|
$("#stat_last")
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -2369,13 +2369,19 @@ class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView):
|
||||||
# à l'article en question et qui ne sont pas annulées
|
# à l'article en question et qui ne sont pas annulées
|
||||||
# puis on choisi pour chaques intervalle les opérations
|
# puis on choisi pour chaques intervalle les opérations
|
||||||
# effectuées dans ces intervalles de temps
|
# effectuées dans ces intervalles de temps
|
||||||
all_operations = (Operation.objects
|
all_operations = (
|
||||||
.filter(group__on_acc=self.object)
|
Operation.objects
|
||||||
.filter(canceled_at=None)
|
.filter(group__on_acc=self.object,
|
||||||
)
|
canceled_at=None)
|
||||||
|
.values('article_nb', 'group__at')
|
||||||
|
.order_by('group__at')
|
||||||
|
)
|
||||||
if types is not None:
|
if types is not None:
|
||||||
all_operations = all_operations.filter(type__in=types)
|
all_operations = all_operations.filter(type__in=types)
|
||||||
chunks = self.chunkify_qs(all_operations, scale, field='group__at')
|
chunks = self.get_by_chunks(
|
||||||
|
all_operations, scale, field_db='group__at',
|
||||||
|
field_callback=(lambda d: d['group__at']),
|
||||||
|
)
|
||||||
return chunks
|
return chunks
|
||||||
|
|
||||||
def get_context_data(self, *args, **kwargs):
|
def get_context_data(self, *args, **kwargs):
|
||||||
|
@ -2391,7 +2397,8 @@ class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView):
|
||||||
# On compte les opérations
|
# On compte les opérations
|
||||||
nb_ventes = []
|
nb_ventes = []
|
||||||
for chunk in operations:
|
for chunk in operations:
|
||||||
nb_ventes.append(tot_ventes(chunk))
|
ventes = sum(ope['article_nb'] for ope in chunk)
|
||||||
|
nb_ventes.append(ventes)
|
||||||
|
|
||||||
context['charts'] = [{"color": "rgb(255, 99, 132)",
|
context['charts'] = [{"color": "rgb(255, 99, 132)",
|
||||||
"label": "NB items achetés",
|
"label": "NB items achetés",
|
||||||
|
@ -2442,29 +2449,39 @@ class ArticleStatSales(ScaleMixin, JSONDetailView):
|
||||||
context = {'labels': old_ctx['labels']}
|
context = {'labels': old_ctx['labels']}
|
||||||
scale = self.scale
|
scale = self.scale
|
||||||
|
|
||||||
# On selectionne les opérations qui correspondent
|
all_purchases = (
|
||||||
# à l'article en question et qui ne sont pas annulées
|
|
||||||
# puis on choisi pour chaques intervalle les opérations
|
|
||||||
# effectuées dans ces intervalles de temps
|
|
||||||
all_operations = (
|
|
||||||
Operation.objects
|
Operation.objects
|
||||||
.filter(type=Operation.PURCHASE,
|
.filter(
|
||||||
article=self.object,
|
type=Operation.PURCHASE,
|
||||||
canceled_at=None,
|
article=self.object,
|
||||||
)
|
canceled_at=None,
|
||||||
|
)
|
||||||
|
.values('group__at', 'article_nb')
|
||||||
|
.order_by('group__at')
|
||||||
)
|
)
|
||||||
chunks = self.chunkify_qs(all_operations, scale, field='group__at')
|
liq_only = all_purchases.filter(group__on_acc__trigramme='LIQ')
|
||||||
|
liq_exclude = all_purchases.exclude(group__on_acc__trigramme='LIQ')
|
||||||
|
|
||||||
|
chunks_liq = self.get_by_chunks(
|
||||||
|
liq_only, scale, field_db='group__at',
|
||||||
|
field_callback=lambda d: d['group__at'],
|
||||||
|
)
|
||||||
|
chunks_no_liq = self.get_by_chunks(
|
||||||
|
liq_exclude, scale, field_db='group__at',
|
||||||
|
field_callback=lambda d: d['group__at'],
|
||||||
|
)
|
||||||
|
|
||||||
# On compte les opérations
|
# On compte les opérations
|
||||||
nb_ventes = []
|
nb_ventes = []
|
||||||
nb_accounts = []
|
nb_accounts = []
|
||||||
nb_liq = []
|
nb_liq = []
|
||||||
for qs in chunks:
|
for chunk_liq, chunk_no_liq in zip(chunks_liq, chunks_no_liq):
|
||||||
nb_ventes.append(
|
sum_accounts = sum(ope['article_nb'] for ope in chunk_no_liq)
|
||||||
tot_ventes(qs))
|
sum_liq = sum(ope['article_nb'] for ope in chunk_liq)
|
||||||
nb_liq.append(
|
nb_ventes.append(sum_accounts + sum_liq)
|
||||||
tot_ventes(qs.filter(group__on_acc__trigramme='LIQ')))
|
nb_accounts.append(sum_accounts)
|
||||||
nb_accounts.append(
|
nb_liq.append(sum_liq)
|
||||||
tot_ventes(qs.exclude(group__on_acc__trigramme='LIQ')))
|
|
||||||
context['charts'] = [{"color": "rgb(255, 99, 132)",
|
context['charts'] = [{"color": "rgb(255, 99, 132)",
|
||||||
"label": "Toutes consommations",
|
"label": "Toutes consommations",
|
||||||
"values": nb_ventes},
|
"values": nb_ventes},
|
||||||
|
|
Loading…
Reference in a new issue