diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index a8bece7d..6bb31a5f 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,4 +1,4 @@ -image: "python:3.5" +image: "python:3.7" variables: # GestioCOF settings @@ -18,7 +18,8 @@ variables: # psql password authentication PGPASSWORD: $POSTGRES_PASSWORD -.test_template: +test: + stage: test before_script: - mkdir -p vendor/{pip,apt} - apt-get update -q && apt-get -o dir::cache::archives="vendor/apt" install -yqq postgresql-client @@ -33,7 +34,7 @@ variables: after_script: - coverage report services: - - postgres:9.6 + - postgres:11.7 - redis:latest cache: key: test @@ -43,27 +44,16 @@ variables: # Keep this disabled for now, as it may kill GitLab... # coverage: '/TOTAL.*\s(\d+\.\d+)\%$/' -test35: - extends: ".test_template" - image: "python:3.5" - stage: test - -test37: - extends: ".test_template" - image: "python:3.7" - stage: test - linters: - image: python:3.6 stage: test before_script: - mkdir -p vendor/pip - pip install --upgrade black isort flake8 script: - black --check . - - isort --recursive --check-only --diff bda bds clubs cof events gestioncof kfet petitscours provisioning shared utils + - isort --recursive --check-only --diff bda bds clubs cof events gestioncof kfet petitscours provisioning shared # Print errors only - - flake8 --exit-zero bda bds clubs cof events gestioncof kfet petitscours provisioning shared utils + - flake8 --exit-zero bda bds clubs cof events gestioncof kfet petitscours provisioning shared cache: key: linters paths: @@ -81,7 +71,7 @@ migration_checks: script: python manage.py makemigrations --dry-run --check services: # this should not be necessary… - - postgres:9.6 + - postgres:11.7 cache: key: migration_checks paths: diff --git a/CHANGELOG.md b/CHANGELOG.md index 269e5194..639a9a0b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ Liste des changements notables dans GestioCOF depuis la version 0.1 (septembre - Nouveau module de gestion des événements - Nouveau module BDS - Nouveau module clubs +- Module d'autocomplétion indépendant des apps ## Upcoming @@ -19,6 +20,14 @@ Liste des changements notables dans GestioCOF depuis la version 0.1 (septembre - Les montants en K-Fêt sont à nouveau affichés en UKF (et non en €). - Les boutons "afficher/cacher" des mails et noms des participant⋅e⋅s à un spectacle BdA fonctionnent à nouveau. +- on ne peut plus compter de consos sur ☠☠☠, ni éditer les comptes spéciaux +(LIQ, GNR, ☠☠☠, #13). + +### Nouvelles fonctionnalités + +- Les transferts apparaissent maintenant dans l'historique K-Fêt et l'historique + personnel. +- les statistiques K-Fêt remontent à plus d'un an (et le code est simplifié) ## Version 0.4.1 - 17/01/2020 diff --git a/bda/views.py b/bda/views.py index f33b7013..f799360d 100644 --- a/bda/views.py +++ b/bda/views.py @@ -42,7 +42,7 @@ from bda.models import ( Tirage, ) from gestioncof.decorators import BuroRequiredMixin, buro_required, cof_required -from utils.views.autocomplete import Select2QuerySetView +from shared.views.autocomplete import Select2QuerySetView @cof_required diff --git a/gestioncof/autocomplete.py b/gestioncof/autocomplete.py index e27cdb92..239317f8 100644 --- a/gestioncof/autocomplete.py +++ b/gestioncof/autocomplete.py @@ -1,94 +1,56 @@ -from django import shortcuts -from django.conf import settings -from django.contrib.auth.models import User +from django.contrib.auth import get_user_model from django.db.models import Q from django.http import Http404 +from django.views.generic import TemplateView from gestioncof.decorators import buro_required -from gestioncof.models import CofProfile +from shared.views import autocomplete -if getattr(settings, "LDAP_SERVER_URL", None): - from ldap3 import Connection -else: - # shared.tests.testcases.TestCaseMixin.mockLDAP needs - # Connection to be defined in order to mock it. - Connection = None +User = get_user_model() -class Clipper(object): - def __init__(self, clipper, fullname): - if fullname is None: - fullname = "" - assert isinstance(clipper, str) - assert isinstance(fullname, str) - self.clipper = clipper - self.fullname = fullname +class COFMemberSearch(autocomplete.ModelSearch): + model = User + search_fields = ["username", "first_name", "last_name"] - def __str__(self): - return "{} ({})".format(self.clipper, self.fullname) - - def __eq__(self, other): - return self.clipper == other.clipper and self.fullname == other.fullname + def get_queryset_filter(self, *args, **kwargs): + qset_filter = super().get_queryset_filter(*args, **kwargs) + qset_filter &= Q(profile__is_cof=True) + return qset_filter -@buro_required -def autocomplete(request): - if "q" not in request.GET: - raise Http404 - q = request.GET["q"] - data = {"q": q} +class COFOthersSearch(autocomplete.ModelSearch): + model = User + search_fields = ["username", "first_name", "last_name"] - queries = {} - bits = q.split() + def get_queryset_filter(self, *args, **kwargs): + qset_filter = super().get_queryset_filter(*args, **kwargs) + qset_filter &= Q(profile__is_cof=False) + return qset_filter - # Fetching data from User and CofProfile tables - queries["members"] = CofProfile.objects.filter(is_cof=True) - queries["users"] = User.objects.filter(profile__is_cof=False) - for bit in bits: - queries["members"] = queries["members"].filter( - Q(user__first_name__icontains=bit) - | Q(user__last_name__icontains=bit) - | Q(user__username__icontains=bit) - | Q(login_clipper__icontains=bit) - ) - queries["users"] = queries["users"].filter( - Q(first_name__icontains=bit) - | Q(last_name__icontains=bit) - | Q(username__icontains=bit) - ) - queries["members"] = queries["members"].distinct() - queries["users"] = queries["users"].distinct() - # Clearing redundancies - usernames = set(queries["members"].values_list("login_clipper", flat="True")) | set( - queries["users"].values_list("profile__login_clipper", flat="True") - ) +class COFSearch(autocomplete.Compose): + search_units = [ + ("members", "username", COFMemberSearch), + ("others", "username", COFOthersSearch), + ("clippers", "clipper", autocomplete.LDAPSearch), + ] - # Fetching data from the SPI - if getattr(settings, "LDAP_SERVER_URL", None): - # Fetching - ldap_query = "(&{:s})".format( - "".join( - "(|(cn=*{bit:s}*)(uid=*{bit:s}*))".format(bit=bit) - for bit in bits - if bit.isalnum() - ) - ) - if ldap_query != "(&)": - # If none of the bits were legal, we do not perform the query - entries = None - with Connection(settings.LDAP_SERVER_URL) as conn: - conn.search("dc=spi,dc=ens,dc=fr", ldap_query, attributes=["uid", "cn"]) - entries = conn.entries - # Clearing redundancies - queries["clippers"] = [ - Clipper(entry.uid.value, entry.cn.value) - for entry in entries - if entry.uid.value and entry.uid.value not in usernames - ] - # Resulting data - data.update(queries) - data["options"] = sum(len(query) for query in queries) +cof_search = COFSearch() - return shortcuts.render(request, "autocomplete_user.html", data) + +class AutocompleteView(TemplateView): + template_name = "gestioncof/search_results.html" + + def get_context_data(self, *args, **kwargs): + ctx = super().get_context_data(*args, **kwargs) + if "q" not in self.request.GET: + raise Http404 + q = self.request.GET["q"] + ctx["q"] = q + ctx.update(cof_search.search(q.split())) + return ctx + + +autocomplete = buro_required(AutocompleteView.as_view()) diff --git a/gestioncof/templates/autocomplete_user.html b/gestioncof/templates/autocomplete_user.html deleted file mode 100644 index face824d..00000000 --- a/gestioncof/templates/autocomplete_user.html +++ /dev/null @@ -1,29 +0,0 @@ -{% load utils %} - diff --git a/gestioncof/templates/gestioncof/search_results.html b/gestioncof/templates/gestioncof/search_results.html new file mode 100644 index 00000000..ba8b6580 --- /dev/null +++ b/gestioncof/templates/gestioncof/search_results.html @@ -0,0 +1,56 @@ +{% load utils %} + + diff --git a/gestioncof/tests/test_views.py b/gestioncof/tests/test_views.py index 31cb8d8a..f757b4c2 100644 --- a/gestioncof/tests/test_views.py +++ b/gestioncof/tests/test_views.py @@ -15,9 +15,9 @@ from django.test import Client, TestCase, override_settings from django.urls import reverse from bda.models import Salle, Tirage -from gestioncof.autocomplete import Clipper from gestioncof.models import CalendarSubscription, Club, Event, Survey, SurveyAnswer from gestioncof.tests.testcases import ViewTestCaseMixin +from shared.views.autocomplete import Clipper from .utils import create_member, create_root, create_user @@ -285,21 +285,19 @@ class RegistrationAutocompleteViewTests(ViewTestCaseMixin, TestCase): self.mockLDAP([]) - def _test(self, query, expected_users, expected_members, expected_clippers): + def _test(self, query, expected_others, expected_members, expected_clippers): r = self.client.get(self.url, {"q": query}) self.assertEqual(r.status_code, 200) self.assertQuerysetEqual( - r.context["users"], map(repr, expected_users), ordered=False + r.context["others"], map(repr, expected_others), ordered=False ) self.assertQuerysetEqual( - r.context["members"], - map(lambda u: repr(u.profile), expected_members), - ordered=False, + r.context["members"], map(repr, expected_members), ordered=False, ) self.assertCountEqual( - map(str, r.context.get("clippers", [])), map(str, expected_clippers) + map(str, r.context["clippers"]), map(str, expected_clippers) ) def test_username(self): @@ -322,7 +320,7 @@ class RegistrationAutocompleteViewTests(ViewTestCaseMixin, TestCase): mock_ldap.search.assert_called_once_with( "dc=spi,dc=ens,dc=fr", "(&(|(cn=*aa*)(uid=*aa*))(|(cn=*bb*)(uid=*bb*)))", - attributes=["uid", "cn"], + attributes=["cn", "uid"], ) def test_clipper_escaped(self): @@ -333,14 +331,14 @@ class RegistrationAutocompleteViewTests(ViewTestCaseMixin, TestCase): mock_ldap.search.assert_not_called() def test_clipper_no_duplicate(self): - self.mockLDAP([("uid", "uu_u1")]) + self.mockLDAP([("uid", "abc")]) - self._test("uu u1", [self.u1], [], [Clipper("uid", "uu_u1")]) + self._test("abc", [self.u1], [], [Clipper("uid", "abc")]) - self.u1.profile.login_clipper = "uid" - self.u1.profile.save() + self.u1.username = "uid" + self.u1.save() - self._test("uu u1", [self.u1], [], []) + self._test("abc", [self.u1], [], []) class HomeViewTests(ViewTestCaseMixin, TestCase): diff --git a/gestioncof/views.py b/gestioncof/views.py index ced35cfc..07a0ae03 100644 --- a/gestioncof/views.py +++ b/gestioncof/views.py @@ -58,7 +58,7 @@ from gestioncof.models import ( SurveyQuestion, SurveyQuestionAnswer, ) -from utils.views.autocomplete import Select2QuerySetView +from shared.views.autocomplete import Select2QuerySetView class HomeView(LoginRequiredMixin, TemplateView): diff --git a/kfet/forms.py b/kfet/forms.py index b6fad26f..9419d9f8 100644 --- a/kfet/forms.py +++ b/kfet/forms.py @@ -24,6 +24,8 @@ from kfet.models import ( TransferGroup, ) +from . import KFET_DELETED_TRIGRAMME +from .auth import KFET_GENERIC_TRIGRAMME from .auth.forms import UserGroupForm # noqa # ----- @@ -324,7 +326,10 @@ class KPsulOperationGroupForm(forms.ModelForm): widget=forms.HiddenInput(), ) on_acc = forms.ModelChoiceField( - queryset=Account.objects.exclude(trigramme="GNR"), widget=forms.HiddenInput() + queryset=Account.objects.exclude( + trigramme__in=[KFET_DELETED_TRIGRAMME, KFET_GENERIC_TRIGRAMME] + ), + widget=forms.HiddenInput(), ) class Meta: diff --git a/kfet/models.py b/kfet/models.py index 814f857a..2eacf06f 100644 --- a/kfet/models.py +++ b/kfet/models.py @@ -150,6 +150,15 @@ class Account(models.Model): def readable(self): return self.trigramme not in [KFET_DELETED_TRIGRAMME, KFET_GENERIC_TRIGRAMME] + @property + def editable(self): + return self.trigramme not in [ + KFET_DELETED_TRIGRAMME, + KFET_GENERIC_TRIGRAMME, + "LIQ", + "#13", + ] + @property def is_team(self): return self.has_perm("kfet.is_team") diff --git a/kfet/static/kfet/css/history.css b/kfet/static/kfet/css/history.css index 9cd4cd28..42e73527 100644 --- a/kfet/static/kfet/css/history.css +++ b/kfet/static/kfet/css/history.css @@ -20,7 +20,7 @@ z-index:10; } -#history .opegroup { +#history .group { height:30px; line-height:30px; background-color: #c63b52; @@ -30,29 +30,29 @@ overflow:auto; } -#history .opegroup .time { +#history .group .time { width:70px; } -#history .opegroup .trigramme { +#history .group .trigramme { width:55px; text-align:right; } -#history .opegroup .amount { +#history .group .amount { text-align:right; width:90px; } -#history .opegroup .valid_by { +#history .group .valid_by { padding-left:20px } -#history .opegroup .comment { +#history .group .comment { padding-left:20px; } -#history .ope { +#history .entry { position:relative; height:25px; line-height:24px; @@ -61,38 +61,38 @@ overflow:auto; } -#history .ope .amount { +#history .entry .amount { width:50px; text-align:right; } -#history .ope .infos1 { +#history .entry .infos1 { width:80px; text-align:right; } -#history .ope .infos2 { +#history .entry .infos2 { padding-left:15px; } -#history .ope .addcost { +#history .entry .addcost { padding-left:20px; } -#history .ope .canceled { +#history .entry .canceled { padding-left:20px; } -#history div.ope.ui-selected, #history div.ope.ui-selecting { +#history div.entry.ui-selected, #history div.entry.ui-selecting { background-color:rgba(200,16,46,0.6); color:#FFF; } -#history .ope.canceled, #history .transfer.canceled { +#history .entry.canceled { color:#444; } -#history .ope.canceled::before, #history.transfer.canceled::before { +#history .entry.canceled::before { position: absolute; content: ' '; width:100%; @@ -101,10 +101,11 @@ border-top: 1px solid rgba(200,16,46,0.5); } -#history .transfer .amount { - width:80px; +#history .group .infos { + text-align:center; + width:145px; } -#history .transfer .from_acc { - padding-left:10px; +#history .entry .glyphicon { + padding-left:15px; } diff --git a/kfet/static/kfet/js/history.js b/kfet/static/kfet/js/history.js index a7372b87..540c8239 100644 --- a/kfet/static/kfet/js/history.js +++ b/kfet/static/kfet/js/history.js @@ -2,31 +2,59 @@ function dateUTCToParis(date) { return moment.tz(date, 'UTC').tz('Europe/Paris'); } +// TODO : classifier (later) function KHistory(options = {}) { $.extend(this, KHistory.default_options, options); this.$container = $(this.container); + this.$container.selectable({ + filter: 'div.group, div.entry', + selected: function (e, ui) { + $(ui.selected).each(function () { + if ($(this).hasClass('group')) { + var id = $(this).data('id'); + $(this).siblings('.entry').filter(function () { + return $(this).data('group_id') == id + }).addClass('ui-selected'); + } + }); + }, + }); + this.reset = function () { this.$container.html(''); }; - this.addOpeGroup = function (opegroup) { - var $day = this._getOrCreateDay(opegroup['at']); - var $opegroup = this._opeGroupHtml(opegroup); + this.add_history_group = function (group) { + var $day = this._get_or_create_day(group['at']); + var $group = this._group_html(group); - $day.after($opegroup); + $day.after($group); - var trigramme = opegroup['on_acc_trigramme']; - var is_cof = opegroup['is_cof']; - for (var i = 0; i < opegroup['opes'].length; i++) { - var $ope = this._opeHtml(opegroup['opes'][i], is_cof, trigramme); - $ope.data('opegroup', opegroup['id']); - $opegroup.after($ope); + var trigramme = group['on_acc_trigramme']; + var is_cof = group['is_cof']; + var type = group['type'] + // TODO : simplifier ça ? + switch (type) { + case 'operation': + for (let ope of group['entries']) { + var $ope = this._ope_html(ope, is_cof, trigramme); + $ope.data('group_id', group['id']); + $group.after($ope); + } + break; + case 'transfer': + for (let transfer of group['entries']) { + var $transfer = this._transfer_html(transfer); + $transfer.data('group_id', group['id']); + $group.after($transfer); + } + break; } } - this._opeHtml = function (ope, is_cof, trigramme) { + this._ope_html = function (ope, is_cof, trigramme) { var $ope_html = $(this.template_ope); var parsed_amount = parseFloat(ope['amount']); var amount = amountDisplay(parsed_amount, is_cof, trigramme); @@ -54,7 +82,8 @@ function KHistory(options = {}) { } $ope_html - .data('ope', ope['id']) + .data('type', 'operation') + .data('id', ope['id']) .find('.amount').text(amount).end() .find('.infos1').text(infos1).end() .find('.infos2').text(infos2).end(); @@ -62,54 +91,89 @@ function KHistory(options = {}) { var addcost_for = ope['addcost_for__trigramme']; if (addcost_for) { var addcost_amount = parseFloat(ope['addcost_amount']); - $ope_html.find('.addcost').text('(' + amountDisplay(addcost_amount, is_cof) + 'UKF pour ' + addcost_for + ')'); + $ope_html.find('.addcost').text('(' + amountDisplay(addcost_amount, is_cof) + ' UKF pour ' + addcost_for + ')'); } if (ope['canceled_at']) - this.cancelOpe(ope, $ope_html); + this.cancel_entry(ope, $ope_html); return $ope_html; } - this.cancelOpe = function (ope, $ope = null) { - if (!$ope) - $ope = this.findOpe(ope['id']); + this._transfer_html = function (transfer) { + var $transfer_html = $(this.template_transfer); + var parsed_amount = parseFloat(transfer['amount']); + var amount = parsed_amount.toFixed(2) + '€'; - var cancel = 'Annulé'; - var canceled_at = dateUTCToParis(ope['canceled_at']); - if (ope['canceled_by__trigramme']) - cancel += ' par ' + ope['canceled_by__trigramme']; - cancel += ' le ' + canceled_at.format('DD/MM/YY à HH:mm:ss'); + $transfer_html + .data('type', 'transfer') + .data('id', transfer['id']) + .find('.amount').text(amount).end() + .find('.infos1').text(transfer['from_acc']).end() + .find('.infos2').text(transfer['to_acc']).end(); - $ope.addClass('canceled').find('.canceled').text(cancel); + if (transfer['canceled_at']) + this.cancel_entry(transfer, $transfer_html); + + return $transfer_html; } - this._opeGroupHtml = function (opegroup) { - var $opegroup_html = $(this.template_opegroup); - var at = dateUTCToParis(opegroup['at']).format('HH:mm:ss'); - var trigramme = opegroup['on_acc__trigramme']; - var amount = amountDisplay( - parseFloat(opegroup['amount']), opegroup['is_cof'], trigramme); - var comment = opegroup['comment'] || ''; + this.cancel_entry = function (entry, $entry = null) { + if (!$entry) + $entry = this.find_entry(entry["id"], entry["type"]); - $opegroup_html - .data('opegroup', opegroup['id']) + var cancel = 'Annulé'; + var canceled_at = dateUTCToParis(entry['canceled_at']); + if (entry['canceled_by__trigramme']) + cancel += ' par ' + entry['canceled_by__trigramme']; + cancel += ' le ' + canceled_at.format('DD/MM/YY à HH:mm:ss'); + + $entry.addClass('canceled').find('.canceled').text(cancel); + } + + this._group_html = function (group) { + var type = group['type']; + + + switch (type) { + case 'operation': + var $group_html = $(this.template_opegroup); + var trigramme = group['on_acc__trigramme']; + var amount = amountDisplay( + parseFloat(group['amount']), group['is_cof'], trigramme); + break; + case 'transfer': + var $group_html = $(this.template_transfergroup); + $group_html.find('.infos').text('Transferts').end() + var trigramme = ''; + var amount = ''; + break; + } + + + var at = dateUTCToParis(group['at']).format('HH:mm:ss'); + var comment = group['comment'] || ''; + + $group_html + .data('type', type) + .data('id', group['id']) .find('.time').text(at).end() .find('.amount').text(amount).end() .find('.comment').text(comment).end() .find('.trigramme').text(trigramme).end(); if (!this.display_trigramme) - $opegroup_html.find('.trigramme').remove(); + $group_html.find('.trigramme').remove(); + $group_html.find('.info').remove(); - if (opegroup['valid_by__trigramme']) - $opegroup_html.find('.valid_by').text('Par ' + opegroup['valid_by__trigramme']); + if (group['valid_by__trigramme']) + $group_html.find('.valid_by').text('Par ' + group['valid_by__trigramme']); - return $opegroup_html; + return $group_html; } - this._getOrCreateDay = function (date) { + this._get_or_create_day = function (date) { var at = dateUTCToParis(date); var at_ser = at.format('YYYY-MM-DD'); var $day = this.$container.find('.day').filter(function () { @@ -118,35 +182,123 @@ function KHistory(options = {}) { if ($day.length == 1) return $day; var $day = $(this.template_day).prependTo(this.$container); - return $day.data('date', at_ser).text(at.format('D MMMM')); + return $day.data('date', at_ser).text(at.format('D MMMM YYYY')); } - this.findOpeGroup = function (id) { - return this.$container.find('.opegroup').filter(function () { - return $(this).data('opegroup') == id + this.find_group = function (id, type = "operation") { + return this.$container.find('.group').filter(function () { + return ($(this).data('id') == id && $(this).data("type") == type) }); } - this.findOpe = function (id) { - return this.$container.find('.ope').filter(function () { - return $(this).data('ope') == id + this.find_entry = function (id, type = 'operation') { + return this.$container.find('.entry').filter(function () { + return ($(this).data('id') == id && $(this).data('type') == type) }); } - this.cancelOpeGroup = function (opegroup) { - var $opegroup = this.findOpeGroup(opegroup['id']); - var trigramme = $opegroup.find('.trigramme').text(); + this.update_opegroup = function (group, type = "operation") { + var $group = this.find_group(group['id'], type); + var trigramme = $group.find('.trigramme').text(); var amount = amountDisplay( - parseFloat(opegroup['amount']), opegroup['is_cof'], trigramme); - $opegroup.find('.amount').text(amount); + parseFloat(group['amount']), group['is_cof'], trigramme); + $group.find('.amount').text(amount); } + this.fetch = function (fetch_options) { + options = $.extend({}, this.fetch_options, fetch_options); + var that = this; + return $.ajax({ + dataType: "json", + url: django_urls["kfet.history.json"](), + method: "POST", + data: options, + }).done(function (data) { + for (let group of data['groups']) { + that.add_history_group(group); + } + }); + } + + this._cancel = function (type, opes, password = "") { + if (window.lock == 1) + return false + window.lock = 1; + var that = this; + return $.ajax({ + dataType: "json", + url: django_urls[`kfet.${type}s.cancel`](), + method: "POST", + data: opes, + beforeSend: function ($xhr) { + $xhr.setRequestHeader("X-CSRFToken", csrftoken); + if (password != '') + $xhr.setRequestHeader("KFetPassword", password); + }, + + }).done(function (data) { + window.lock = 0; + that.$container.find('.ui-selected').removeClass('ui-selected'); + for (let entry of data["canceled"]) { + entry["type"] = type; + that.cancel_entry(entry); + } + if (type == "operation") { + for (let opegroup of data["opegroups_to_update"]) { + that.update_opegroup(opegroup) + } + } + }).fail(function ($xhr) { + var data = $xhr.responseJSON; + switch ($xhr.status) { + case 403: + requestAuth(data, function (password) { + this.cancel(opes, password); + }); + break; + case 400: + displayErrors(getErrorsHtml(data)); + break; + } + window.lock = 0; + }); + } + + this.cancel_selected = function () { + var opes_to_cancel = { + "transfers": [], + "operations": [], + } + this.$container.find('.entry.ui-selected').each(function () { + type = $(this).data("type"); + opes_to_cancel[`${type}s`].push($(this).data("id")); + }); + if (opes_to_cancel["transfers"].length > 0 && opes_to_cancel["operations"].length > 0) { + // Lancer 2 requêtes AJAX et gérer tous les cas d'erreurs possibles est trop complexe + $.alert({ + title: 'Erreur', + content: "Impossible de supprimer des transferts et des opérations en même temps !", + backgroundDismiss: true, + animation: 'top', + closeAnimation: 'bottom', + keyboardEnabled: true, + }); + } else if (opes_to_cancel["transfers"].length > 0) { + delete opes_to_cancel["operations"]; + this._cancel("transfer", opes_to_cancel); + } else if (opes_to_cancel["operations"].length > 0) { + delete opes_to_cancel["transfers"]; + this._cancel("operation", opes_to_cancel); + } + } } KHistory.default_options = { container: '#history', template_day: '
', - template_opegroup: '
', - template_ope: '
', + template_opegroup: '
', + template_transfergroup: '
', + template_ope: '
', + template_transfer: '
', display_trigramme: true, } diff --git a/kfet/static/kfet/js/statistic.js b/kfet/static/kfet/js/statistic.js index 9baa08c4..4da17672 100644 --- a/kfet/static/kfet/js/statistic.js +++ b/kfet/static/kfet/js/statistic.js @@ -1,28 +1,15 @@ -(function($){ +(function ($) { window.StatsGroup = function (url, target) { // a class to properly display statictics // url : points to an ObjectResumeStat that lists the options through JSON // target : element of the DOM where to put the stats - var self = this; var element = $(target); var content = $("
"); var buttons; - function dictToArray (dict, start) { - // converts the dicts returned by JSONResponse to Arrays - // necessary because for..in does not guarantee the order - if (start === undefined) start = 0; - var array = new Array(); - for (var k in dict) { - array[k] = dict[k]; - } - array.splice(0, start); - return array; - } - - function handleTimeChart (data) { + function handleTimeChart(data) { // reads the balance data and put it into chartjs formatting chart_data = new Array(); for (var i = 0; i < data.length; i++) { @@ -36,7 +23,7 @@ return chart_data; } - function showStats () { + function showStats() { // CALLBACK : called when a button is selected // shows the focus on the correct button @@ -44,24 +31,20 @@ $(this).addClass("focus"); // loads data and shows it - $.getJSON(this.stats_target_url, {format: 'json'}, displayStats); + $.getJSON(this.stats_target_url, displayStats); } - function displayStats (data) { + function displayStats(data) { // reads the json data and updates the chart display var chart_datasets = []; - var charts = dictToArray(data.charts); - // are the points indexed by timestamps? var is_time_chart = data.is_time_chart || false; // reads the charts data - for (var i = 0; i < charts.length; i++) { - var chart = charts[i]; - + for (let chart of data.charts) { // format the data - var chart_data = is_time_chart ? handleTimeChart(chart.values) : dictToArray(chart.values, 0); + var chart_data = is_time_chart ? handleTimeChart(chart.values) : chart.values; chart_datasets.push( { @@ -76,29 +59,24 @@ // options for chartjs var chart_options = - { - responsive: true, - maintainAspectRatio: false, - tooltips: { - mode: 'index', - intersect: false, - }, - hover: { - mode: 'nearest', - intersect: false, - } - }; + { + responsive: true, + maintainAspectRatio: false, + tooltips: { + mode: 'index', + intersect: false, + }, + hover: { + mode: 'nearest', + intersect: false, + } + }; // additionnal options for time-indexed charts if (is_time_chart) { chart_options['scales'] = { xAxes: [{ type: "time", - display: true, - scaleLabel: { - display: false, - labelString: 'Date' - }, time: { tooltipFormat: 'll HH:mm', displayFormats: { @@ -115,26 +93,19 @@ } }], - yAxes: [{ - display: true, - scaleLabel: { - display: false, - labelString: 'value' - } - }] }; } // global object for the options var chart_model = - { - type: 'line', - options: chart_options, - data: { - labels: data.labels || [], - datasets: chart_datasets, - } - }; + { + type: 'line', + options: chart_options, + data: { + labels: data.labels || [], + datasets: chart_datasets, + } + }; // saves the previous charts to be destroyed var prev_chart = content.children(); @@ -151,27 +122,30 @@ } // initialize the interface - function initialize (data) { + function initialize(data) { // creates the bar with the buttons buttons = $("
{% endif %} -
+
@@ -93,29 +94,22 @@ $(document).ready(function() { khistory = new KHistory({ display_trigramme: false, - }); - - function getHistory() { - var data = { + fetch_options: { 'accounts': [{{ account.pk }}], } + }); - $.ajax({ - dataType: "json", - url : "{% url 'kfet.history.json' %}", - method : "POST", - data : data, - }) - .done(function(data) { - for (var i=0; i diff --git a/kfet/templates/kfet/history.html b/kfet/templates/kfet/history.html index ae63358e..94bba48c 100644 --- a/kfet/templates/kfet/history.html +++ b/kfet/templates/kfet/history.html @@ -5,6 +5,7 @@ {{ filter_form.media }} + {% endblock %} @@ -27,6 +28,9 @@
  • Comptes {{ filter_form.accounts }}
  • +
    + +
    {% endblock %} @@ -40,6 +44,8 @@ $(document).ready(function() { settings = { 'subvention_cof': parseFloat({{ kfet_config.subvention_cof|unlocalize }})} + window.lock = 0; + khistory = new KHistory(); var $from_date = $('#id_from_date'); @@ -67,17 +73,8 @@ $(document).ready(function() { var accounts = getSelectedMultiple($accounts); data['accounts'] = accounts; - $.ajax({ - dataType: "json", - url : "{% url 'kfet.history.json' %}", - method : "POST", - data : data, - }) - .done(function(data) { - for (var i=0; i 0) - confirmCancel(opes_to_cancel); + khistory.cancel_selected() } }); - - function confirmCancel(opes_to_cancel) { - var nb = opes_to_cancel.length; - var content = nb+" opérations vont être annulées"; - $.confirm({ - title: 'Confirmation', - content: content, - backgroundDismiss: true, - animation: 'top', - closeAnimation: 'bottom', - keyboardEnabled: true, - confirm: function() { - cancelOperations(opes_to_cancel); - } - }); - } - - function requestAuth(data, callback) { - var content = getErrorsHtml(data); - content += '', - $.confirm({ - title: 'Authentification requise', - content: content, - backgroundDismiss: true, - animation:'top', - closeAnimation:'bottom', - keyboardEnabled: true, - confirm: function() { - var password = this.$content.find('input').val(); - callback(password); - }, - onOpen: function() { - var that = this; - this.$content.find('input').on('keypress', function(e) { - if (e.keyCode == 13) - that.$confirmButton.click(); - }); - }, - }); - } - - function getErrorsHtml(data) { - var content = ''; - if ('missing_perms' in data['errors']) { - content += 'Permissions manquantes'; - content += '
      '; - for (var i=0; i'; - content += '
    '; - } - if ('negative' in data['errors']) { - var url_base = "{% url 'kfet.account.update' LIQ}"; - url_base = base_url(0, url_base.length-8); - for (var i=0; iAutorisation de négatif requise pour '+data['errors']['negative'][i]+'
    '; - } - } - return content; - } - - function cancelOperations(opes_array, password = '') { - var data = { 'operations' : opes_array } - $.ajax({ - dataType: "json", - url : "{% url 'kfet.kpsul.cancel_operations' %}", - method : "POST", - data : data, - beforeSend: function ($xhr) { - $xhr.setRequestHeader("X-CSRFToken", csrftoken); - if (password != '') - $xhr.setRequestHeader("KFetPassword", password); - }, - - }) - .done(function(data) { - khistory.$container.find('.ui-selected').removeClass('ui-selected'); - }) - .fail(function($xhr) { - var data = $xhr.responseJSON; - switch ($xhr.status) { - case 403: - requestAuth(data, function(password) { - cancelOperations(opes_array, password); - }); - break; - case 400: - displayErrors(getErrorsHtml(data)); - break; - } - - }); - } - - getHistory(); }); diff --git a/kfet/templates/kfet/kpsul.html b/kfet/templates/kfet/kpsul.html index 171c7030..7b292087 100644 --- a/kfet/templates/kfet/kpsul.html +++ b/kfet/templates/kfet/kpsul.html @@ -189,7 +189,7 @@ $(document).ready(function() { // ----- // Lock to avoid multiple requests - lock = 0; + window.lock = 0; // Retrieve settings @@ -479,9 +479,9 @@ $(document).ready(function() { var operations = $('#operation_formset'); function performOperations(password = '') { - if (lock == 1) + if (window.lock == 1) return false; - lock = 1; + window.lock = 1; var data = operationGroup.serialize() + '&' + operations.serialize(); $.ajax({ dataType: "json", @@ -497,7 +497,7 @@ $(document).ready(function() { .done(function(data) { updatePreviousOp(); coolReset(); - lock = 0; + window.lock = 0; }) .fail(function($xhr) { var data = $xhr.responseJSON; @@ -513,7 +513,7 @@ $(document).ready(function() { } break; } - lock = 0; + window.lock = 0; }); } @@ -522,55 +522,6 @@ $(document).ready(function() { performOperations(); }); - // ----- - // Cancel operations - // ----- - - var cancelButton = $('#cancel_operations'); - var cancelForm = $('#cancel_form'); - - function cancelOperations(opes_array, password = '') { - if (lock == 1) - return false - lock = 1; - var data = { 'operations' : opes_array } - $.ajax({ - dataType: "json", - url : "{% url 'kfet.kpsul.cancel_operations' %}", - method : "POST", - data : data, - beforeSend: function ($xhr) { - $xhr.setRequestHeader("X-CSRFToken", csrftoken); - if (password != '') - $xhr.setRequestHeader("KFetPassword", password); - }, - - }) - .done(function(data) { - coolReset(); - lock = 0; - }) - .fail(function($xhr) { - var data = $xhr.responseJSON; - switch ($xhr.status) { - case 403: - requestAuth(data, function(password) { - cancelOperations(opes_array, password); - }, triInput); - break; - case 400: - displayErrors(getErrorsHtml(data)); - break; - } - lock = 0; - }); - } - - // Event listeners - cancelButton.on('click', function() { - cancelOperations(); - }); - // ----- // Articles data // ----- @@ -1189,24 +1140,12 @@ $(document).ready(function() { // History // ----- - khistory = new KHistory(); - - function getHistory() { - var data = { + khistory = new KHistory({ + fetch_options: { from: moment().subtract(1, 'days').format('YYYY-MM-DD HH:mm:ss'), - }; - $.ajax({ - dataType: "json", - url : "{% url 'kfet.history.json' %}", - method : "POST", - data : data, - }) - .done(function(data) { - for (var i=0; i 0) - cancelOperations(opes_to_cancel); + khistory.cancel_selected() } }); @@ -1333,16 +1253,9 @@ $(document).ready(function() { // ----- OperationWebSocket.add_handler(function(data) { - for (var i=0; i + + +{% endblock %} + {% block fixed %}
    @@ -16,109 +23,31 @@ {% block main %} -
    - {% for transfergroup in transfergroups %} -
    - {{ transfergroup.at }} - {{ transfergroup.valid_by.trigramme }} - {{ transfergroup.comment }} -
    - {% for transfer in transfergroup.transfers.all %} -
    - {{ transfer.amount }} € - {{ transfer.from_acc.trigramme }} - - {{ transfer.to_acc.trigramme }} -
    - {% endfor %} - {% endfor %} -
    + +
    diff --git a/kfet/tests/test_views.py b/kfet/tests/test_views.py index 0a5c4e49..bcd9a9b4 100644 --- a/kfet/tests/test_views.py +++ b/kfet/tests/test_views.py @@ -3,7 +3,7 @@ from datetime import datetime, timedelta from decimal import Decimal from unittest import mock -from django.contrib.auth.models import Group +from django.contrib.auth.models import Group, User from django.test import Client, TestCase from django.urls import reverse from django.utils import timezone @@ -628,37 +628,51 @@ class AccountStatOperationListViewTests(ViewTestCaseMixin, TestCase): expected_stats = [ { - "label": "Derniers mois", + "label": "Tout le temps", "url": { "path": base_url, "query": { - "scale_n_steps": ["7"], + "types": ["['purchase']"], "scale_name": ["month"], + "scale_last": ["True"], + "scale_begin": [ + self.accounts["user1"].created_at.isoformat(" ") + ], + }, + }, + }, + { + "label": "1 an", + "url": { + "path": base_url, + "query": { "types": ["['purchase']"], + "scale_n_steps": ["12"], + "scale_name": ["month"], "scale_last": ["True"], }, }, }, { - "label": "Dernières semaines", + "label": "3 mois", "url": { "path": base_url, "query": { - "scale_n_steps": ["7"], + "types": ["['purchase']"], + "scale_n_steps": ["13"], "scale_name": ["week"], - "types": ["['purchase']"], "scale_last": ["True"], }, }, }, { - "label": "Derniers jours", + "label": "2 semaines", "url": { "path": base_url, "query": { - "scale_n_steps": ["7"], - "scale_name": ["day"], "types": ["['purchase']"], + "scale_n_steps": ["14"], + "scale_name": ["day"], "scale_last": ["True"], }, }, @@ -1524,6 +1538,21 @@ class ArticleStatSalesListViewTests(ViewTestCaseMixin, TestCase): self.article = Article.objects.create( name="Article", category=ArticleCategory.objects.create(name="Category") ) + checkout = Checkout.objects.create( + name="Checkout", + created_by=self.accounts["team"], + balance=5, + valid_from=self.now, + valid_to=self.now + timedelta(days=5), + ) + + self.opegroup = create_operation_group( + on_acc=self.accounts["user"], + checkout=checkout, + content=[ + {"type": Operation.PURCHASE, "article": self.article, "article_nb": 2}, + ], + ) def test_ok(self): r = self.client.get(self.url) @@ -1535,33 +1564,44 @@ class ArticleStatSalesListViewTests(ViewTestCaseMixin, TestCase): expected_stats = [ { - "label": "Derniers mois", + "label": "Tout le temps", "url": { "path": base_url, "query": { - "scale_n_steps": ["7"], + "scale_name": ["month"], + "scale_last": ["True"], + "scale_begin": [self.opegroup.at.isoformat(" ")], + }, + }, + }, + { + "label": "1 an", + "url": { + "path": base_url, + "query": { + "scale_n_steps": ["12"], "scale_name": ["month"], "scale_last": ["True"], }, }, }, { - "label": "Dernières semaines", + "label": "3 mois", "url": { "path": base_url, "query": { - "scale_n_steps": ["7"], + "scale_n_steps": ["13"], "scale_name": ["week"], "scale_last": ["True"], }, }, }, { - "label": "Derniers jours", + "label": "2 semaines", "url": { "path": base_url, "query": { - "scale_n_steps": ["7"], + "scale_n_steps": ["14"], "scale_name": ["day"], "scale_last": ["True"], }, @@ -1997,9 +2037,10 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase): self.kpsul_consumer_mock.group_send.assert_called_once_with( "kfet.kpsul", { - "opegroups": [ + "groups": [ { "add": True, + "type": "operation", "at": mock.ANY, "amount": Decimal("-5.00"), "checkout__name": "Checkout", @@ -2008,7 +2049,7 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase): "is_cof": False, "on_acc__trigramme": "000", "valid_by__trigramme": None, - "opes": [ + "entries": [ { "id": operation.pk, "addcost_amount": None, @@ -2269,9 +2310,10 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase): self.kpsul_consumer_mock.group_send.assert_called_once_with( "kfet.kpsul", { - "opegroups": [ + "groups": [ { "add": True, + "type": "operation", "at": mock.ANY, "amount": Decimal("10.75"), "checkout__name": "Checkout", @@ -2280,7 +2322,7 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase): "is_cof": False, "on_acc__trigramme": "000", "valid_by__trigramme": "100", - "opes": [ + "entries": [ { "id": operation.pk, "addcost_amount": None, @@ -2443,9 +2485,10 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase): self.kpsul_consumer_mock.group_send.assert_called_once_with( "kfet.kpsul", { - "opegroups": [ + "groups": [ { "add": True, + "type": "operation", "at": mock.ANY, "amount": Decimal("-10.75"), "checkout__name": "Checkout", @@ -2454,7 +2497,7 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase): "is_cof": False, "on_acc__trigramme": "000", "valid_by__trigramme": None, - "opes": [ + "entries": [ { "id": operation.pk, "addcost_amount": None, @@ -2601,9 +2644,10 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase): self.kpsul_consumer_mock.group_send.assert_called_once_with( "kfet.kpsul", { - "opegroups": [ + "groups": [ { "add": True, + "type": "operation", "at": mock.ANY, "amount": Decimal("10.75"), "checkout__name": "Checkout", @@ -2612,7 +2656,7 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase): "is_cof": False, "on_acc__trigramme": "000", "valid_by__trigramme": "100", - "opes": [ + "entries": [ { "id": operation.pk, "addcost_amount": None, @@ -2712,9 +2756,9 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase): self.checkout.refresh_from_db() self.assertEqual(self.checkout.balance, Decimal("100.00")) - ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["opegroups"][ - 0 - ]["opes"][0] + ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["groups"][0][ + "entries" + ][0] self.assertEqual(ws_data_ope["addcost_amount"], Decimal("1.00")) self.assertEqual(ws_data_ope["addcost_for__trigramme"], "ADD") @@ -2752,9 +2796,9 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase): self.checkout.refresh_from_db() self.assertEqual(self.checkout.balance, Decimal("100.00")) - ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["opegroups"][ - 0 - ]["opes"][0] + ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["groups"][0][ + "entries" + ][0] self.assertEqual(ws_data_ope["addcost_amount"], Decimal("0.80")) self.assertEqual(ws_data_ope["addcost_for__trigramme"], "ADD") @@ -2790,9 +2834,9 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase): self.checkout.refresh_from_db() self.assertEqual(self.checkout.balance, Decimal("106.00")) - ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["opegroups"][ - 0 - ]["opes"][0] + ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["groups"][0][ + "entries" + ][0] self.assertEqual(ws_data_ope["addcost_amount"], Decimal("1.00")) self.assertEqual(ws_data_ope["addcost_for__trigramme"], "ADD") @@ -2826,9 +2870,9 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase): self.accounts["addcost"].refresh_from_db() self.assertEqual(self.accounts["addcost"].balance, Decimal("15.00")) - ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["opegroups"][ - 0 - ]["opes"][0] + ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["groups"][0][ + "entries" + ][0] self.assertEqual(ws_data_ope["addcost_amount"], None) self.assertEqual(ws_data_ope["addcost_for__trigramme"], None) @@ -2861,9 +2905,9 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase): self.accounts["addcost"].refresh_from_db() self.assertEqual(self.accounts["addcost"].balance, Decimal("0.00")) - ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["opegroups"][ - 0 - ]["opes"][0] + ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["groups"][0][ + "entries" + ][0] self.assertEqual(ws_data_ope["addcost_amount"], None) self.assertEqual(ws_data_ope["addcost_for__trigramme"], None) @@ -3170,9 +3214,10 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase): self.kpsul_consumer_mock.group_send.assert_called_once_with( "kfet.kpsul", { - "opegroups": [ + "groups": [ { "add": True, + "type": "operation", "at": mock.ANY, "amount": Decimal("-9.00"), "checkout__name": "Checkout", @@ -3181,7 +3226,7 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase): "is_cof": False, "on_acc__trigramme": "000", "valid_by__trigramme": None, - "opes": [ + "entries": [ { "id": operation_list[0].pk, "addcost_amount": None, @@ -3234,7 +3279,7 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase): """ - url_name = "kfet.kpsul.cancel_operations" + url_name = "kfet.operations.cancel" url_expected = "/k-fet/k-psul/cancel_operations" http_methods = ["POST"] @@ -3353,7 +3398,26 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase): ) self.assertDictEqual( - json_data, {"canceled": [operation.pk], "errors": {}, "warnings": {}} + json_data, + { + "canceled": [ + { + "id": operation.id, + # l'encodage des dates en JSON est relou... + "canceled_at": mock.ANY, + "canceled_by__trigramme": None, + } + ], + "errors": {}, + "warnings": {}, + "opegroups_to_update": [ + { + "id": group.pk, + "amount": str(group.amount), + "is_cof": group.is_cof, + } + ], + }, ) self.account.refresh_from_db() @@ -3365,26 +3429,7 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase): self.kpsul_consumer_mock.group_send.assert_called_with( "kfet.kpsul", - { - "opegroups": [ - { - "cancellation": True, - "id": group.pk, - "amount": Decimal("0.00"), - "is_cof": False, - } - ], - "opes": [ - { - "cancellation": True, - "id": operation.pk, - "canceled_by__trigramme": None, - "canceled_at": self.now + timedelta(seconds=15), - } - ], - "checkouts": [], - "articles": [{"id": self.article.pk, "stock": 22}], - }, + {"checkouts": [], "articles": [{"id": self.article.pk, "stock": 22}]}, ) def test_purchase_with_addcost(self): @@ -3541,7 +3586,26 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase): ) self.assertDictEqual( - json_data, {"canceled": [operation.pk], "errors": {}, "warnings": {}} + json_data, + { + "canceled": [ + { + "id": operation.id, + # l'encodage des dates en JSON est relou... + "canceled_at": mock.ANY, + "canceled_by__trigramme": None, + } + ], + "errors": {}, + "warnings": {}, + "opegroups_to_update": [ + { + "id": group.pk, + "amount": str(group.amount), + "is_cof": group.is_cof, + } + ], + }, ) self.account.refresh_from_db() @@ -3554,22 +3618,6 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase): self.kpsul_consumer_mock.group_send.assert_called_with( "kfet.kpsul", { - "opegroups": [ - { - "cancellation": True, - "id": group.pk, - "amount": Decimal("0.00"), - "is_cof": False, - } - ], - "opes": [ - { - "cancellation": True, - "id": operation.pk, - "canceled_by__trigramme": None, - "canceled_at": self.now + timedelta(seconds=15), - } - ], "checkouts": [{"id": self.checkout.pk, "balance": Decimal("89.25")}], "articles": [], }, @@ -3625,7 +3673,26 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase): ) self.assertDictEqual( - json_data, {"canceled": [operation.pk], "errors": {}, "warnings": {}} + json_data, + { + "canceled": [ + { + "id": operation.id, + # l'encodage des dates en JSON est relou... + "canceled_at": mock.ANY, + "canceled_by__trigramme": None, + } + ], + "errors": {}, + "warnings": {}, + "opegroups_to_update": [ + { + "id": group.pk, + "amount": str(group.amount), + "is_cof": group.is_cof, + } + ], + }, ) self.account.refresh_from_db() @@ -3638,22 +3705,6 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase): self.kpsul_consumer_mock.group_send.assert_called_with( "kfet.kpsul", { - "opegroups": [ - { - "cancellation": True, - "id": group.pk, - "amount": Decimal("0.00"), - "is_cof": False, - } - ], - "opes": [ - { - "cancellation": True, - "id": operation.pk, - "canceled_by__trigramme": None, - "canceled_at": self.now + timedelta(seconds=15), - } - ], "checkouts": [{"id": self.checkout.pk, "balance": Decimal("110.75")}], "articles": [], }, @@ -3709,7 +3760,26 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase): ) self.assertDictEqual( - json_data, {"canceled": [operation.pk], "errors": {}, "warnings": {}} + json_data, + { + "canceled": [ + { + "id": operation.id, + # l'encodage des dates en JSON est relou... + "canceled_at": mock.ANY, + "canceled_by__trigramme": None, + } + ], + "errors": {}, + "warnings": {}, + "opegroups_to_update": [ + { + "id": group.pk, + "amount": str(group.amount), + "is_cof": group.is_cof, + } + ], + }, ) self.account.refresh_from_db() @@ -3720,27 +3790,7 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase): self.assertEqual(self.checkout.balance, Decimal("100.00")) self.kpsul_consumer_mock.group_send.assert_called_with( - "kfet.kpsul", - { - "opegroups": [ - { - "cancellation": True, - "id": group.pk, - "amount": Decimal("0.00"), - "is_cof": False, - } - ], - "opes": [ - { - "cancellation": True, - "id": operation.pk, - "canceled_by__trigramme": None, - "canceled_at": self.now + timedelta(seconds=15), - } - ], - "checkouts": [], - "articles": [], - }, + "kfet.kpsul", {"checkouts": [], "articles": []}, ) @mock.patch("django.utils.timezone.now") @@ -3961,13 +4011,33 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase): group.refresh_from_db() self.assertEqual(group.amount, Decimal("10.75")) self.assertEqual(group.opes.exclude(canceled_at=None).count(), 3) - + self.maxDiff = None self.assertDictEqual( json_data, { - "canceled": [operation1.pk, operation2.pk], - "warnings": {"already_canceled": [operation3.pk]}, + "canceled": [ + { + "id": operation1.id, + # l'encodage des dates en JSON est relou... + "canceled_at": mock.ANY, + "canceled_by__trigramme": None, + }, + { + "id": operation2.id, + # l'encodage des dates en JSON est relou... + "canceled_at": mock.ANY, + "canceled_by__trigramme": None, + }, + ], "errors": {}, + "warnings": {"already_canceled": [operation3.pk]}, + "opegroups_to_update": [ + { + "id": group.pk, + "amount": str(group.amount), + "is_cof": group.is_cof, + } + ], }, ) @@ -4121,12 +4191,18 @@ class HistoryJSONViewTests(ViewTestCaseMixin, TestCase): url_expected = "/k-fet/history.json" auth_user = "user" - auth_forbidden = [None] + auth_forbidden = [None, "noaccount"] def test_ok(self): r = self.client.post(self.url) self.assertEqual(r.status_code, 200) + def get_users_extra(self): + noaccount = User.objects.create(username="noaccount") + noaccount.set_password("noaccount") + noaccount.save() + return {"noaccount": noaccount} + class AccountReadJSONViewTests(ViewTestCaseMixin, TestCase): url_name = "kfet.account.read.json" diff --git a/kfet/urls.py b/kfet/urls.py index 03c174f3..12c06d26 100644 --- a/kfet/urls.py +++ b/kfet/urls.py @@ -219,8 +219,8 @@ urlpatterns = [ ), path( "k-psul/cancel_operations", - views.kpsul_cancel_operations, - name="kfet.kpsul.cancel_operations", + views.cancel_operations, + name="kfet.operations.cancel", ), path( "k-psul/articles_data", @@ -252,7 +252,7 @@ urlpatterns = [ # ----- # Transfers urls # ----- - path("transfers/", views.transfers, name="kfet.transfers"), + path("transfers/", views.TransferView.as_view(), name="kfet.transfers"), path("transfers/new", views.transfers_create, name="kfet.transfers.create"), path("transfers/perform", views.perform_transfers, name="kfet.transfers.perform"), path("transfers/cancel", views.cancel_transfers, name="kfet.transfers.cancel"), diff --git a/kfet/views.py b/kfet/views.py index 655e856d..b6c49f72 100644 --- a/kfet/views.py +++ b/kfet/views.py @@ -2,6 +2,7 @@ import ast import heapq import statistics from collections import defaultdict +from datetime import timedelta from decimal import Decimal from typing import List from urllib.parse import urlencode @@ -12,7 +13,7 @@ from django.contrib.auth.mixins import PermissionRequiredMixin from django.contrib.auth.models import Permission, User from django.contrib.messages.views import SuccessMessageMixin from django.db import transaction -from django.db.models import Count, F, Prefetch, Sum +from django.db.models import Count, F, Prefetch, Q, Sum from django.forms import formset_factory from django.http import Http404, JsonResponse from django.shortcuts import get_object_or_404, redirect, render @@ -76,7 +77,7 @@ from kfet.models import ( Transfer, TransferGroup, ) -from kfet.statistic import ScaleMixin, WeekScale, last_stats_manifest +from kfet.statistic import DayScale, MonthScale, ScaleMixin, WeekScale, scale_url_params from .auth import KFET_GENERIC_TRIGRAMME from .auth.views import ( # noqa @@ -328,7 +329,9 @@ def account_update(request, trigramme): account = get_object_or_404(Account, trigramme=trigramme) # Checking permissions - if not request.user.has_perm("kfet.is_team") and request.user != account.user: + if not account.editable or ( + not request.user.has_perm("kfet.is_team") and request.user != account.user + ): raise Http404 user_info_form = UserInfoForm(instance=account.user) @@ -911,6 +914,8 @@ def kpsul_get_settings(request): @teamkfet_required def account_read_json(request, trigramme): account = get_object_or_404(Account, trigramme=trigramme) + if not account.readable: + raise Http404 data = { "id": account.pk, "name": account.name, @@ -1156,9 +1161,10 @@ def kpsul_perform_operations(request): # Websocket data websocket_data = {} - websocket_data["opegroups"] = [ + websocket_data["groups"] = [ { "add": True, + "type": "operation", "id": operationgroup.pk, "amount": operationgroup.amount, "checkout__name": operationgroup.checkout.name, @@ -1169,7 +1175,7 @@ def kpsul_perform_operations(request): operationgroup.valid_by and operationgroup.valid_by.trigramme or None ), "on_acc__trigramme": operationgroup.on_acc.trigramme, - "opes": [], + "entries": [], } ] for operation in operations: @@ -1187,7 +1193,7 @@ def kpsul_perform_operations(request): "canceled_by__trigramme": None, "canceled_at": None, } - websocket_data["opegroups"][0]["opes"].append(ope_data) + websocket_data["groups"][0]["entries"].append(ope_data) # Need refresh from db cause we used update on queryset operationgroup.checkout.refresh_from_db() websocket_data["checkouts"] = [ @@ -1207,7 +1213,7 @@ def kpsul_perform_operations(request): @teamkfet_required @kfet_password_auth -def kpsul_cancel_operations(request): +def cancel_operations(request): # Pour la réponse data = {"canceled": [], "warnings": {}, "errors": {}} @@ -1363,7 +1369,11 @@ def kpsul_cancel_operations(request): .filter(pk__in=opegroups_pk) .order_by("pk") ) - opes = sorted(opes) + opes = ( + Operation.objects.values("id", "canceled_at", "canceled_by__trigramme") + .filter(pk__in=opes) + .order_by("pk") + ) checkouts_pk = [checkout.pk for checkout in to_checkouts_balances] checkouts = ( Checkout.objects.values("id", "balance") @@ -1374,27 +1384,7 @@ def kpsul_cancel_operations(request): articles = Article.objects.values("id", "stock").filter(pk__in=articles_pk) # Websocket data - websocket_data = {"opegroups": [], "opes": [], "checkouts": [], "articles": []} - - for opegroup in opegroups: - websocket_data["opegroups"].append( - { - "cancellation": True, - "id": opegroup["id"], - "amount": opegroup["amount"], - "is_cof": opegroup["is_cof"], - } - ) - canceled_by__trigramme = canceled_by and canceled_by.trigramme or None - for ope in opes: - websocket_data["opes"].append( - { - "cancellation": True, - "id": ope, - "canceled_by__trigramme": canceled_by__trigramme, - "canceled_at": canceled_at, - } - ) + websocket_data = {"checkouts": [], "articles": []} for checkout in checkouts: websocket_data["checkouts"].append( {"id": checkout["id"], "balance": checkout["balance"]} @@ -1405,7 +1395,8 @@ def kpsul_cancel_operations(request): ) consumers.KPsul.group_send("kfet.kpsul", websocket_data) - data["canceled"] = opes + data["canceled"] = list(opes) + data["opegroups_to_update"] = list(opegroups) if opes_already_canceled: data["warnings"]["already_canceled"] = opes_already_canceled return JsonResponse(data) @@ -1416,49 +1407,86 @@ def history_json(request): # Récupération des paramètres from_date = request.POST.get("from", None) to_date = request.POST.get("to", None) - limit = request.POST.get("limit", None) checkouts = request.POST.getlist("checkouts[]", None) accounts = request.POST.getlist("accounts[]", None) + transfers_only = request.POST.get("transfersonly", False) + opes_only = request.POST.get("opesonly", False) + + # Construction de la requête (sur les transferts) pour le prefetch + + transfer_queryset_prefetch = Transfer.objects.select_related( + "from_acc", "to_acc", "canceled_by" + ) + + # Le check sur les comptes est dans le prefetch pour les transferts + if accounts: + transfer_queryset_prefetch = transfer_queryset_prefetch.filter( + Q(from_acc__in=accounts) | Q(to_acc__in=accounts) + ) + + if not request.user.has_perm("kfet.is_team"): + try: + acc = request.user.profile.account_kfet + transfer_queryset_prefetch = transfer_queryset_prefetch.filter( + Q(from_acc=acc) | Q(to_acc=acc) + ) + except Account.DoesNotExist: + return JsonResponse({}, status=403) + + transfer_prefetch = Prefetch( + "transfers", queryset=transfer_queryset_prefetch, to_attr="filtered_transfers" + ) # Construction de la requête (sur les opérations) pour le prefetch - queryset_prefetch = Operation.objects.select_related( + ope_queryset_prefetch = Operation.objects.select_related( "article", "canceled_by", "addcost_for" ) + ope_prefetch = Prefetch("opes", queryset=ope_queryset_prefetch) # Construction de la requête principale opegroups = ( - OperationGroup.objects.prefetch_related( - Prefetch("opes", queryset=queryset_prefetch) - ) + OperationGroup.objects.prefetch_related(ope_prefetch) .select_related("on_acc", "valid_by") .order_by("at") ) + transfergroups = ( + TransferGroup.objects.prefetch_related(transfer_prefetch) + .select_related("valid_by") + .order_by("at") + ) + # Application des filtres if from_date: opegroups = opegroups.filter(at__gte=from_date) + transfergroups = transfergroups.filter(at__gte=from_date) if to_date: opegroups = opegroups.filter(at__lt=to_date) + transfergroups = transfergroups.filter(at__lt=to_date) if checkouts: - opegroups = opegroups.filter(checkout_id__in=checkouts) + opegroups = opegroups.filter(checkout__in=checkouts) + transfergroups = TransferGroup.objects.none() + if transfers_only: + opegroups = OperationGroup.objects.none() + if opes_only: + transfergroups = TransferGroup.objects.none() if accounts: - opegroups = opegroups.filter(on_acc_id__in=accounts) + opegroups = opegroups.filter(on_acc__in=accounts) # Un non-membre de l'équipe n'a que accès à son historique if not request.user.has_perm("kfet.is_team"): opegroups = opegroups.filter(on_acc=request.user.profile.account_kfet) - if limit: - opegroups = opegroups[:limit] # Construction de la réponse - opegroups_list = [] + history_groups = [] for opegroup in opegroups: opegroup_dict = { + "type": "operation", "id": opegroup.id, "amount": opegroup.amount, "at": opegroup.at, "checkout_id": opegroup.checkout_id, "is_cof": opegroup.is_cof, "comment": opegroup.comment, - "opes": [], + "entries": [], "on_acc__trigramme": opegroup.on_acc and opegroup.on_acc.trigramme or None, } if request.user.has_perm("kfet.is_team"): @@ -1482,9 +1510,40 @@ def history_json(request): ope_dict["canceled_by__trigramme"] = ( ope.canceled_by and ope.canceled_by.trigramme or None ) - opegroup_dict["opes"].append(ope_dict) - opegroups_list.append(opegroup_dict) - return JsonResponse({"opegroups": opegroups_list}) + opegroup_dict["entries"].append(ope_dict) + history_groups.append(opegroup_dict) + for transfergroup in transfergroups: + if transfergroup.filtered_transfers: + transfergroup_dict = { + "type": "transfer", + "id": transfergroup.id, + "at": transfergroup.at, + "comment": transfergroup.comment, + "entries": [], + } + if request.user.has_perm("kfet.is_team"): + transfergroup_dict["valid_by__trigramme"] = ( + transfergroup.valid_by and transfergroup.valid_by.trigramme or None + ) + + for transfer in transfergroup.filtered_transfers: + transfer_dict = { + "id": transfer.id, + "amount": transfer.amount, + "canceled_at": transfer.canceled_at, + "from_acc": transfer.from_acc.trigramme, + "to_acc": transfer.to_acc.trigramme, + } + if request.user.has_perm("kfet.is_team"): + transfer_dict["canceled_by__trigramme"] = ( + transfer.canceled_by and transfer.canceled_by.trigramme or None + ) + transfergroup_dict["entries"].append(transfer_dict) + history_groups.append(transfergroup_dict) + + history_groups.sort(key=lambda group: group["at"]) + + return JsonResponse({"groups": history_groups}) @teamkfet_required @@ -1544,18 +1603,9 @@ config_update = permission_required("kfet.change_config")(SettingsUpdate.as_view # ----- -@teamkfet_required -def transfers(request): - transfers_pre = Prefetch( - "transfers", queryset=(Transfer.objects.select_related("from_acc", "to_acc")) - ) - - transfergroups = ( - TransferGroup.objects.select_related("valid_by") - .prefetch_related(transfers_pre) - .order_by("-at") - ) - return render(request, "kfet/transfers.html", {"transfergroups": transfergroups}) +@method_decorator(teamkfet_required, name="dispatch") +class TransferView(TemplateView): + template_name = "kfet/transfers.html" @teamkfet_required @@ -1746,7 +1796,12 @@ def cancel_transfers(request): elif hasattr(account, "negative") and not account.negative.balance_offset: account.negative.delete() - data["canceled"] = transfers + transfers = ( + Transfer.objects.values("id", "canceled_at", "canceled_by__trigramme") + .filter(pk__in=transfers) + .order_by("pk") + ) + data["canceled"] = list(transfers) if transfers_already_canceled: data["warnings"]["already_canceled"] = transfers_already_canceled return JsonResponse(data) @@ -2145,7 +2200,7 @@ class SupplierUpdate(SuccessMessageMixin, UpdateView): # Vues génériques # --------------- # source : docs.djangoproject.com/fr/1.10/topics/class-based-views/mixins/ -class JSONResponseMixin(object): +class JSONResponseMixin: """ A mixin that can be used to render a JSON response. """ @@ -2174,34 +2229,39 @@ class JSONDetailView(JSONResponseMixin, BaseDetailView): return self.render_to_json_response(context) -class PkUrlMixin(object): - def get_object(self, *args, **kwargs): - get_by = self.kwargs.get(self.pk_url_kwarg) - return get_object_or_404(self.model, **{self.pk_url_kwarg: get_by}) - - class SingleResumeStat(JSONDetailView): - """Manifest for a kind of a stat about an object. + """ + Génère l'interface de sélection pour les statistiques d'un compte/article. + L'interface est constituée d'une série de boutons, qui récupèrent et graphent + des statistiques du même type, sur le même objet mais avec des arguments différents. - Returns JSON whose payload is an array containing descriptions of a stat: - url to retrieve data, label, ... + Attributs : + - url_stat : URL où récupérer les statistiques + - stats : liste de dictionnaires avec les clés suivantes : + - label : texte du bouton + - url_params : paramètres GET à rajouter à `url_stat` + - default : si `True`, graphe à montrer par défaut + On peut aussi définir `stats` dynamiquement, via la fonction `get_stats`. """ - id_prefix = "" - nb_default = 0 - - stats = [] url_stat = None + stats = [] + + def get_stats(self): + return self.stats def get_context_data(self, **kwargs): # On n'hérite pas - object_id = self.object.id context = {} stats = [] - prefix = "{}_{}".format(self.id_prefix, object_id) - for i, stat_def in enumerate(self.stats): + # On peut avoir récupéré self.object via pk ou slug + if self.pk_url_kwarg in self.kwargs: url_pk = getattr(self.object, self.pk_url_kwarg) + else: + url_pk = getattr(self.object, self.slug_url_kwarg) + + for stat_def in self.get_stats(): url_params_d = stat_def.get("url_params", {}) if len(url_params_d) > 0: url_params = "?{}".format(urlencode(url_params_d)) @@ -2210,42 +2270,21 @@ class SingleResumeStat(JSONDetailView): stats.append( { "label": stat_def["label"], - "btn": "btn_{}_{}".format(prefix, i), "url": "{url}{params}".format( url=reverse(self.url_stat, args=[url_pk]), params=url_params ), + "default": stat_def.get("default", False), } ) - context["id_prefix"] = prefix - context["content_id"] = "content_%s" % prefix context["stats"] = stats - context["default_stat"] = self.nb_default - context["object_id"] = object_id return context -# ----------------------- -# Evolution Balance perso -# ----------------------- -ID_PREFIX_ACC_BALANCE = "balance_acc" - - -class AccountStatBalanceList(PkUrlMixin, SingleResumeStat): - """Manifest for balance stats of an account.""" - - model = Account - context_object_name = "account" - pk_url_kwarg = "trigramme" - url_stat = "kfet.account.stat.balance" - id_prefix = ID_PREFIX_ACC_BALANCE - stats = [ - {"label": "Tout le temps"}, - {"label": "1 an", "url_params": {"last_days": 365}}, - {"label": "6 mois", "url_params": {"last_days": 183}}, - {"label": "3 mois", "url_params": {"last_days": 90}}, - {"label": "30 jours", "url_params": {"last_days": 30}}, - ] - nb_default = 0 +class UserAccountMixin: + """ + Mixin qui vérifie que le compte traité par la vue est celui de l'utilisateur·ice + actuel·le. Dans le cas contraire, renvoie un Http404. + """ def get_object(self, *args, **kwargs): obj = super().get_object(*args, **kwargs) @@ -2253,21 +2292,41 @@ class AccountStatBalanceList(PkUrlMixin, SingleResumeStat): raise Http404 return obj - @method_decorator(login_required) - def dispatch(self, *args, **kwargs): - return super().dispatch(*args, **kwargs) + +# ----------------------- +# Evolution Balance perso +# ----------------------- -class AccountStatBalance(PkUrlMixin, JSONDetailView): - """Datasets of balance of an account. - - Operations and Transfers are taken into account. - +@method_decorator(login_required, name="dispatch") +class AccountStatBalanceList(UserAccountMixin, SingleResumeStat): + """ + Menu général pour l'historique de balance d'un compte """ model = Account - pk_url_kwarg = "trigramme" - context_object_name = "account" + slug_url_kwarg = "trigramme" + slug_field = "trigramme" + url_stat = "kfet.account.stat.balance" + stats = [ + {"label": "Tout le temps"}, + {"label": "1 an", "url_params": {"last_days": 365}}, + {"label": "6 mois", "url_params": {"last_days": 183}}, + {"label": "3 mois", "url_params": {"last_days": 90}, "default": True}, + {"label": "30 jours", "url_params": {"last_days": 30}}, + ] + + +@method_decorator(login_required, name="dispatch") +class AccountStatBalance(UserAccountMixin, JSONDetailView): + """ + Statistiques (JSON) d'historique de balance d'un compte. + Prend en compte les opérations et transferts sur la période donnée. + """ + + model = Account + slug_url_kwarg = "trigramme" + slug_field = "trigramme" def get_changes_list(self, last_days=None, begin_date=None, end_date=None): account = self.object @@ -2366,57 +2425,50 @@ class AccountStatBalance(PkUrlMixin, JSONDetailView): # TODO: offset return context - def get_object(self, *args, **kwargs): - obj = super().get_object(*args, **kwargs) - if self.request.user != obj.user: - raise Http404 - return obj - - @method_decorator(login_required) - def dispatch(self, *args, **kwargs): - return super().dispatch(*args, **kwargs) - # ------------------------ # Consommation personnelle # ------------------------ -ID_PREFIX_ACC_LAST = "last_acc" -ID_PREFIX_ACC_LAST_DAYS = "last_days_acc" -ID_PREFIX_ACC_LAST_WEEKS = "last_weeks_acc" -ID_PREFIX_ACC_LAST_MONTHS = "last_months_acc" -class AccountStatOperationList(PkUrlMixin, SingleResumeStat): - """Manifest for operations stats of an account.""" +@method_decorator(login_required, name="dispatch") +class AccountStatOperationList(UserAccountMixin, SingleResumeStat): + """ + Menu général pour l'historique de consommation d'un compte + """ model = Account - context_object_name = "account" - pk_url_kwarg = "trigramme" - id_prefix = ID_PREFIX_ACC_LAST - nb_default = 2 - stats = last_stats_manifest(types=[Operation.PURCHASE]) + slug_url_kwarg = "trigramme" + slug_field = "trigramme" url_stat = "kfet.account.stat.operation" - def get_object(self, *args, **kwargs): - obj = super().get_object(*args, **kwargs) - if self.request.user != obj.user: - raise Http404 - return obj + def get_stats(self): + scales_def = [ + ( + "Tout le temps", + MonthScale, + {"last": True, "begin": self.object.created_at}, + False, + ), + ("1 an", MonthScale, {"last": True, "n_steps": 12}, False), + ("3 mois", WeekScale, {"last": True, "n_steps": 13}, True), + ("2 semaines", DayScale, {"last": True, "n_steps": 14}, False), + ] - @method_decorator(login_required) - def dispatch(self, *args, **kwargs): - return super().dispatch(*args, **kwargs) + return scale_url_params(scales_def, types=[Operation.PURCHASE]) -class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView): - """Datasets of operations of an account.""" +@method_decorator(login_required, name="dispatch") +class AccountStatOperation(UserAccountMixin, ScaleMixin, JSONDetailView): + """ + Statistiques (JSON) de consommation (nb d'items achetés) d'un compte. + """ model = Account - pk_url_kwarg = "trigramme" - context_object_name = "account" - id_prefix = "" + slug_url_kwarg = "trigramme" + slug_field = "trigramme" - def get_operations(self, scale, types=None): + def get_operations(self, types=None): # On selectionne les opérations qui correspondent # à l'article en question et qui ne sont pas annulées # puis on choisi pour chaques intervalle les opérations @@ -2428,28 +2480,20 @@ class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView): ) if types is not None: all_operations = all_operations.filter(type__in=types) - chunks = scale.get_by_chunks( - all_operations, - field_db="group__at", - field_callback=(lambda d: d["group__at"]), - ) - return chunks + return all_operations def get_context_data(self, *args, **kwargs): - old_ctx = super().get_context_data(*args, **kwargs) - context = {"labels": old_ctx["labels"]} - scale = self.scale + context = super().get_context_data(*args, **kwargs) types = self.request.GET.get("types", None) if types is not None: types = ast.literal_eval(types) - operations = self.get_operations(types=types, scale=scale) + operations = self.get_operations(types=types) # On compte les opérations - nb_ventes = [] - for chunk in operations: - ventes = sum(ope["article_nb"] for ope in chunk) - nb_ventes.append(ventes) + nb_ventes = self.scale.chunkify_qs( + operations, field="group__at", aggregate=Sum("article_nb") + ) context["charts"] = [ { @@ -2460,50 +2504,54 @@ class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView): ] return context - def get_object(self, *args, **kwargs): - obj = super().get_object(*args, **kwargs) - if self.request.user != obj.user: - raise Http404 - return obj - - @method_decorator(login_required) - def dispatch(self, *args, **kwargs): - return super().dispatch(*args, **kwargs) - # ------------------------ # Article Satistiques Last # ------------------------ -ID_PREFIX_ART_LAST = "last_art" -ID_PREFIX_ART_LAST_DAYS = "last_days_art" -ID_PREFIX_ART_LAST_WEEKS = "last_weeks_art" -ID_PREFIX_ART_LAST_MONTHS = "last_months_art" +@method_decorator(teamkfet_required, name="dispatch") class ArticleStatSalesList(SingleResumeStat): - """Manifest for sales stats of an article.""" + """ + Menu pour les statistiques de vente d'un article. + """ model = Article - context_object_name = "article" - id_prefix = ID_PREFIX_ART_LAST nb_default = 2 url_stat = "kfet.article.stat.sales" - stats = last_stats_manifest() - @method_decorator(teamkfet_required) - def dispatch(self, *args, **kwargs): - return super().dispatch(*args, **kwargs) + def get_stats(self): + first_conso = ( + Operation.objects.filter(article=self.object) + .order_by("group__at") + .values_list("group__at", flat=True) + .first() + ) + if first_conso is None: + # On le crée dans le passé au cas où + first_conso = timezone.now() - timedelta(seconds=1) + scales_def = [ + ("Tout le temps", MonthScale, {"last": True, "begin": first_conso}, False), + ("1 an", MonthScale, {"last": True, "n_steps": 12}, False), + ("3 mois", WeekScale, {"last": True, "n_steps": 13}, True), + ("2 semaines", DayScale, {"last": True, "n_steps": 14}, False), + ] + + return scale_url_params(scales_def) +@method_decorator(teamkfet_required, name="dispatch") class ArticleStatSales(ScaleMixin, JSONDetailView): - """Datasets of sales of an article.""" + """ + Statistiques (JSON) de vente d'un article. + Sépare LIQ et les comptes K-Fêt, et rajoute le total. + """ model = Article context_object_name = "article" def get_context_data(self, *args, **kwargs): - old_ctx = super().get_context_data(*args, **kwargs) - context = {"labels": old_ctx["labels"]} + context = super().get_context_data(*args, **kwargs) scale = self.scale all_purchases = ( @@ -2516,23 +2564,13 @@ class ArticleStatSales(ScaleMixin, JSONDetailView): liq_only = all_purchases.filter(group__on_acc__trigramme="LIQ") liq_exclude = all_purchases.exclude(group__on_acc__trigramme="LIQ") - chunks_liq = scale.get_by_chunks( - liq_only, field_db="group__at", field_callback=lambda d: d["group__at"] + nb_liq = scale.chunkify_qs( + liq_only, field="group__at", aggregate=Sum("article_nb") ) - chunks_no_liq = scale.get_by_chunks( - liq_exclude, field_db="group__at", field_callback=lambda d: d["group__at"] + nb_accounts = scale.chunkify_qs( + liq_exclude, field="group__at", aggregate=Sum("article_nb") ) - - # On compte les opérations - nb_ventes = [] - nb_accounts = [] - nb_liq = [] - for chunk_liq, chunk_no_liq in zip(chunks_liq, chunks_no_liq): - sum_accounts = sum(ope["article_nb"] for ope in chunk_no_liq) - sum_liq = sum(ope["article_nb"] for ope in chunk_liq) - nb_ventes.append(sum_accounts + sum_liq) - nb_accounts.append(sum_accounts) - nb_liq.append(sum_liq) + nb_ventes = [n1 + n2 for n1, n2 in zip(nb_liq, nb_accounts)] context["charts"] = [ { @@ -2548,7 +2586,3 @@ class ArticleStatSales(ScaleMixin, JSONDetailView): }, ] return context - - @method_decorator(teamkfet_required) - def dispatch(self, *args, **kwargs): - return super().dispatch(*args, **kwargs) diff --git a/setup.cfg b/setup.cfg index 100ddb22..1a9901cb 100644 --- a/setup.cfg +++ b/setup.cfg @@ -9,7 +9,6 @@ source = kfet petitscours shared - utils omit = *migrations* *test*.py @@ -37,7 +36,7 @@ default_section = THIRDPARTY force_grid_wrap = 0 include_trailing_comma = true known_django = django -known_first_party = bda,bds,clubs,cof,events,gestioncof,kfet,petitscours,shared,utils +known_first_party = bda,bds,clubs,cof,events,gestioncof,kfet,petitscours,shared line_length = 88 multi_line_output = 3 not_skip = __init__.py diff --git a/utils/__init__.py b/shared/__init__.py similarity index 100% rename from utils/__init__.py rename to shared/__init__.py diff --git a/shared/tests/testcases.py b/shared/tests/testcases.py index 35d697e7..507e1361 100644 --- a/shared/tests/testcases.py +++ b/shared/tests/testcases.py @@ -111,7 +111,7 @@ class TestCaseMixin: mock_context_manager.return_value.__enter__.return_value = mock_connection patcher = mock.patch( - "gestioncof.autocomplete.Connection", new=mock_context_manager + "shared.views.autocomplete.Connection", new=mock_context_manager ) patcher.start() self.addCleanup(patcher.stop) diff --git a/shared/views/autocomplete.py b/shared/views/autocomplete.py new file mode 100644 index 00000000..af5e3980 --- /dev/null +++ b/shared/views/autocomplete.py @@ -0,0 +1,184 @@ +from collections import namedtuple + +from dal import autocomplete +from django.conf import settings +from django.db.models import Q + +if getattr(settings, "LDAP_SERVER_URL", None): + from ldap3 import Connection +else: + # shared.tests.testcases.TestCaseMixin.mockLDAP needs + # Connection to be defined + Connection = None + + +class SearchUnit: + """Base class for all the search utilities. + + A search unit should implement a `search` method taking a list of keywords as + argument and returning an iterable of search results. + """ + + def search(self, _keywords): + raise NotImplementedError( + "Class implementing the SeachUnit interface should implement the search " + "method" + ) + + +# --- +# Model-based search +# --- + + +class ModelSearch(SearchUnit): + """Basic search engine for models based on filtering. + + The class should be configured through its `model` class attribute: the `search` + method will return a queryset of instances of this model. The `search_fields` + attributes indicates which fields to search in. + + Example: + + >>> from django.contrib.auth.models import User + >>> + >>> class UserSearch(ModelSearch): + ... model = User + ... search_fields = ["username", "first_name", "last_name"] + >>> + >>> user_search = UserSearch() # has type ModelSearch[User] + >>> user_search.search(["toto", "foo"]) # returns a queryset of Users + """ + + model = None + search_fields = [] + + def get_queryset_filter(self, keywords): + filter_q = Q() + + if not keywords: + return filter_q + + for keyword in keywords: + kw_filter = Q() + for field in self.search_fields: + kw_filter |= Q(**{"{}__icontains".format(field): keyword}) + filter_q &= kw_filter + + return filter_q + + def search(self, keywords): + """Returns the queryset of model instances matching all the keywords. + + The semantic of the search is the following: a model instance appears in the + search results iff all of the keywords given as arguments occur in at least one + of the search fields. + """ + + return self.model.objects.filter(self.get_queryset_filter(keywords)) + + +class Select2QuerySetView(ModelSearch, autocomplete.Select2QuerySetView): + """Compatibility layer between ModelSearch and Select2QuerySetView.""" + + def get_queryset(self): + keywords = self.q.split() + return super().search(keywords) + + +# --- +# LDAP search +# --- + +Clipper = namedtuple("Clipper", ["clipper", "fullname"]) + + +class LDAPSearch(SearchUnit): + ldap_server_url = getattr(settings, "LDAP_SERVER_URL", None) + domain_component = "dc=spi,dc=ens,dc=fr" + search_fields = ["cn", "uid"] + + def get_ldap_query(self, keywords): + """Return a search query with the following semantics: + + A Clipper appears in the search results iff all of the keywords given as + arguments occur in at least one of the search fields. + """ + + # Dumb but safe + keywords = filter(str.isalnum, keywords) + + ldap_filters = [] + + for keyword in keywords: + ldap_filter = "(|{})".format( + "".join( + "({}=*{}*)".format(field, keyword) for field in self.search_fields + ) + ) + ldap_filters.append(ldap_filter) + + return "(&{})".format("".join(ldap_filters)) + + def search(self, keywords): + """Return a list of Clipper objects matching all the keywords.""" + + query = self.get_ldap_query(keywords) + + if Connection is None or query == "(&)": + return [] + + with Connection(self.ldap_server_url) as conn: + conn.search(self.domain_component, query, attributes=self.search_fields) + return [Clipper(entry.uid.value, entry.cn.value) for entry in conn.entries] + + +# --- +# Composition of autocomplete units +# --- + + +class Compose: + """Search with several units and remove duplicate results. + + The `search_units` class attribute should be a list of tuples of the form `(name, + uniq_key, search_unit)`. + + The `search` method produces a dictionary whose keys are the `name`s given in + `search_units` and whose values are iterables produced by the different search + units. + + The `uniq_key`s are used to remove duplicates: for instance, say that search unit + 1 has `uniq_key = "username"` and search unit 2 has `uniq_key = "clipper"`, then + search results from unit 2 whose `.clipper` attribute is equal to the + `.username` attribute of some result from unit 1 are omitted. + + Typical Example: + + >>> from django.contrib.auth.models import User + >>> + >>> class UserSearch(ModelSearch): + ... model = User + ... search_fields = ["username", "first_name", "last_name"] + >>> + >>> class UserAndClipperSearch(Compose): + ... search_units = [ + ... ("users", "username", UserSearch), + ... ("clippers", "clipper", LDAPSearch), + ... ] + + In this example, clipper accounts that already have an associated user (i.e. with a + username equal to the clipper login), will not appear in the results. + """ + + search_units = [] + + def search(self, keywords): + uniq_results = set() + results = {} + for name, uniq_key, search_unit in self.search_units: + res = search_unit().search(keywords) + res = [r for r in res if getattr(r, uniq_key) not in uniq_results] + uniq_results |= set((getattr(r, uniq_key) for r in res)) + results[name] = res + return results diff --git a/utils/views/__init__.py b/utils/views/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/utils/views/autocomplete.py b/utils/views/autocomplete.py deleted file mode 100644 index c5d51343..00000000 --- a/utils/views/autocomplete.py +++ /dev/null @@ -1,25 +0,0 @@ -from dal import autocomplete -from django.db.models import Q - - -class Select2QuerySetView(autocomplete.Select2QuerySetView): - model = None - search_fields = [] - - def get_queryset_filter(self): - q = self.q - filter_q = Q() - - if not q: - return filter_q - - words = q.split() - - for word in words: - for field in self.search_fields: - filter_q |= Q(**{"{}__icontains".format(field): word}) - - return filter_q - - def get_queryset(self): - return self.model.objects.filter(self.get_queryset_filter())