Merge branch 'master' into Evarin/sitecof-improvements

This commit is contained in:
Martin Pépin 2020-05-08 15:56:42 +02:00
commit 67d7dafc14
27 changed files with 1144 additions and 1139 deletions

View file

@ -1,4 +1,4 @@
image: "python:3.5" image: "python:3.7"
variables: variables:
# GestioCOF settings # GestioCOF settings
@ -18,7 +18,8 @@ variables:
# psql password authentication # psql password authentication
PGPASSWORD: $POSTGRES_PASSWORD PGPASSWORD: $POSTGRES_PASSWORD
.test_template: test:
stage: test
before_script: before_script:
- mkdir -p vendor/{pip,apt} - mkdir -p vendor/{pip,apt}
- apt-get update -q && apt-get -o dir::cache::archives="vendor/apt" install -yqq postgresql-client - apt-get update -q && apt-get -o dir::cache::archives="vendor/apt" install -yqq postgresql-client
@ -33,7 +34,7 @@ variables:
after_script: after_script:
- coverage report - coverage report
services: services:
- postgres:9.6 - postgres:11.7
- redis:latest - redis:latest
cache: cache:
key: test key: test
@ -43,27 +44,16 @@ variables:
# Keep this disabled for now, as it may kill GitLab... # Keep this disabled for now, as it may kill GitLab...
# coverage: '/TOTAL.*\s(\d+\.\d+)\%$/' # coverage: '/TOTAL.*\s(\d+\.\d+)\%$/'
test35:
extends: ".test_template"
image: "python:3.5"
stage: test
test37:
extends: ".test_template"
image: "python:3.7"
stage: test
linters: linters:
image: python:3.6
stage: test stage: test
before_script: before_script:
- mkdir -p vendor/pip - mkdir -p vendor/pip
- pip install --upgrade black isort flake8 - pip install --upgrade black isort flake8
script: script:
- black --check . - black --check .
- isort --recursive --check-only --diff bda bds clubs cof events gestioncof kfet petitscours provisioning shared utils - isort --recursive --check-only --diff bda bds clubs cof events gestioncof kfet petitscours provisioning shared
# Print errors only # Print errors only
- flake8 --exit-zero bda bds clubs cof events gestioncof kfet petitscours provisioning shared utils - flake8 --exit-zero bda bds clubs cof events gestioncof kfet petitscours provisioning shared
cache: cache:
key: linters key: linters
paths: paths:
@ -81,7 +71,7 @@ migration_checks:
script: python manage.py makemigrations --dry-run --check script: python manage.py makemigrations --dry-run --check
services: services:
# this should not be necessary… # this should not be necessary…
- postgres:9.6 - postgres:11.7
cache: cache:
key: migration_checks key: migration_checks
paths: paths:

View file

@ -8,6 +8,7 @@ Liste des changements notables dans GestioCOF depuis la version 0.1 (septembre
- Nouveau module de gestion des événements - Nouveau module de gestion des événements
- Nouveau module BDS - Nouveau module BDS
- Nouveau module clubs - Nouveau module clubs
- Module d'autocomplétion indépendant des apps
## Upcoming ## Upcoming
@ -19,6 +20,14 @@ Liste des changements notables dans GestioCOF depuis la version 0.1 (septembre
- Les montants en K-Fêt sont à nouveau affichés en UKF (et non en €). - Les montants en K-Fêt sont à nouveau affichés en UKF (et non en €).
- Les boutons "afficher/cacher" des mails et noms des participant⋅e⋅s à un - Les boutons "afficher/cacher" des mails et noms des participant⋅e⋅s à un
spectacle BdA fonctionnent à nouveau. spectacle BdA fonctionnent à nouveau.
- on ne peut plus compter de consos sur ☠☠☠, ni éditer les comptes spéciaux
(LIQ, GNR, ☠☠☠, #13).
### Nouvelles fonctionnalités
- Les transferts apparaissent maintenant dans l'historique K-Fêt et l'historique
personnel.
- les statistiques K-Fêt remontent à plus d'un an (et le code est simplifié)
## Version 0.4.1 - 17/01/2020 ## Version 0.4.1 - 17/01/2020

View file

@ -42,7 +42,7 @@ from bda.models import (
Tirage, Tirage,
) )
from gestioncof.decorators import BuroRequiredMixin, buro_required, cof_required from gestioncof.decorators import BuroRequiredMixin, buro_required, cof_required
from utils.views.autocomplete import Select2QuerySetView from shared.views.autocomplete import Select2QuerySetView
@cof_required @cof_required

View file

@ -1,94 +1,56 @@
from django import shortcuts from django.contrib.auth import get_user_model
from django.conf import settings
from django.contrib.auth.models import User
from django.db.models import Q from django.db.models import Q
from django.http import Http404 from django.http import Http404
from django.views.generic import TemplateView
from gestioncof.decorators import buro_required from gestioncof.decorators import buro_required
from gestioncof.models import CofProfile from shared.views import autocomplete
if getattr(settings, "LDAP_SERVER_URL", None): User = get_user_model()
from ldap3 import Connection
else:
# shared.tests.testcases.TestCaseMixin.mockLDAP needs
# Connection to be defined in order to mock it.
Connection = None
class Clipper(object): class COFMemberSearch(autocomplete.ModelSearch):
def __init__(self, clipper, fullname): model = User
if fullname is None: search_fields = ["username", "first_name", "last_name"]
fullname = ""
assert isinstance(clipper, str)
assert isinstance(fullname, str)
self.clipper = clipper
self.fullname = fullname
def __str__(self): def get_queryset_filter(self, *args, **kwargs):
return "{} ({})".format(self.clipper, self.fullname) qset_filter = super().get_queryset_filter(*args, **kwargs)
qset_filter &= Q(profile__is_cof=True)
def __eq__(self, other): return qset_filter
return self.clipper == other.clipper and self.fullname == other.fullname
@buro_required class COFOthersSearch(autocomplete.ModelSearch):
def autocomplete(request): model = User
if "q" not in request.GET: search_fields = ["username", "first_name", "last_name"]
raise Http404
q = request.GET["q"]
data = {"q": q}
queries = {} def get_queryset_filter(self, *args, **kwargs):
bits = q.split() qset_filter = super().get_queryset_filter(*args, **kwargs)
qset_filter &= Q(profile__is_cof=False)
return qset_filter
# Fetching data from User and CofProfile tables
queries["members"] = CofProfile.objects.filter(is_cof=True)
queries["users"] = User.objects.filter(profile__is_cof=False)
for bit in bits:
queries["members"] = queries["members"].filter(
Q(user__first_name__icontains=bit)
| Q(user__last_name__icontains=bit)
| Q(user__username__icontains=bit)
| Q(login_clipper__icontains=bit)
)
queries["users"] = queries["users"].filter(
Q(first_name__icontains=bit)
| Q(last_name__icontains=bit)
| Q(username__icontains=bit)
)
queries["members"] = queries["members"].distinct()
queries["users"] = queries["users"].distinct()
# Clearing redundancies class COFSearch(autocomplete.Compose):
usernames = set(queries["members"].values_list("login_clipper", flat="True")) | set( search_units = [
queries["users"].values_list("profile__login_clipper", flat="True") ("members", "username", COFMemberSearch),
) ("others", "username", COFOthersSearch),
("clippers", "clipper", autocomplete.LDAPSearch),
]
# Fetching data from the SPI
if getattr(settings, "LDAP_SERVER_URL", None):
# Fetching
ldap_query = "(&{:s})".format(
"".join(
"(|(cn=*{bit:s}*)(uid=*{bit:s}*))".format(bit=bit)
for bit in bits
if bit.isalnum()
)
)
if ldap_query != "(&)":
# If none of the bits were legal, we do not perform the query
entries = None
with Connection(settings.LDAP_SERVER_URL) as conn:
conn.search("dc=spi,dc=ens,dc=fr", ldap_query, attributes=["uid", "cn"])
entries = conn.entries
# Clearing redundancies
queries["clippers"] = [
Clipper(entry.uid.value, entry.cn.value)
for entry in entries
if entry.uid.value and entry.uid.value not in usernames
]
# Resulting data cof_search = COFSearch()
data.update(queries)
data["options"] = sum(len(query) for query in queries)
return shortcuts.render(request, "autocomplete_user.html", data)
class AutocompleteView(TemplateView):
template_name = "gestioncof/search_results.html"
def get_context_data(self, *args, **kwargs):
ctx = super().get_context_data(*args, **kwargs)
if "q" not in self.request.GET:
raise Http404
q = self.request.GET["q"]
ctx["q"] = q
ctx.update(cof_search.search(q.split()))
return ctx
autocomplete = buro_required(AutocompleteView.as_view())

View file

@ -1,29 +0,0 @@
{% load utils %}
<ul>
{% if members %}
<li class="autocomplete-header">Membres du COF</li>
{% for member in members %}{% if forloop.counter < 5 %}
<li class="autocomplete-value"><a href="{% url 'user-registration' member.user.username %}">{{ member.user|highlight_user:q }}</a></li>
{% elif forloop.counter == 5 %}<li class="autocomplete-more">...</a>{% endif %}{% endfor %}
{% endif %}
{% if users %}
<li class="autocomplete-header">Utilisateurs de GestioCOF</li>
{% for user in users %}{% if forloop.counter < 5 %}
<li class="autocomplete-value"><a href="{% url 'user-registration' user.username %}">{{ user|highlight_user:q }}</a></li>
{% elif forloop.counter == 5 %}<li class="autocomplete-more">...</a>{% endif %}{% endfor %}
{% endif %}
{% if clippers %}
<li class="autocomplete-header">Utilisateurs <tt>clipper</tt></li>
{% for clipper in clippers %}{% if forloop.counter < 5 %}
<li class="autocomplete-value"><a href="{% url 'clipper-registration' clipper.clipper clipper.fullname %}">{{ clipper|highlight_clipper:q }}</a></li>
{% elif forloop.counter == 5 %}<li class="autocomplete-more">...</a>{% endif %}{% endfor %}
{% endif %}
{% if not options %}
<li class="autocomplete-header">Aucune correspondance trouvée</li>
{% else %}
<li class="autocomplete-header">Pas dans la liste ?</li>
{% endif %}
<li><a href="{% url 'empty-registration' %}">Créer un compte</a></li>
</ul>

View file

@ -0,0 +1,56 @@
{% load utils %}
<ul>
{% if members %}
<li class="autocomplete-header">Membres</li>
{% for user in members %}
{% if forloop.counter < 5 %}
<li class="autocomplete-value">
<a href="{% url "user-registration" user.username %}">
{{ user|highlight_user:q }}
</a>
</li>
{% elif forloop.counter == 5 %}
<li class="autocomplete-more">...</li>
{% endif %}
{% endfor %}
{% endif %}
{% if others %}
<li class="autocomplete-header">Non-membres</li>
{% for user in others %}
{% if forloop.counter < 5 %}
<li class="autocomplete-value">
<a href="{% url "user-registration" user.username %}">
{{ user|highlight_user:q }}
</a>
</li>
{% elif forloop.counter == 5 %}
<li class="autocomplete-more">...</li>
{% endif %}
{% endfor %}
{% endif %}
{% if clippers %}
<li class="autocomplete-header">Utilisateurs <tt>clipper</tt></li>
{% for clipper in clippers %}
{% if forloop.counter < 5 %}
<li class="autocomplete-value">
<a href="{% url "clipper-registration" clipper.clipper clipper.fullname %}">
{{ clipper|highlight_clipper:q }}
</a>
</li>
{% elif forloop.counter == 5 %}
<li class="autocomplete-more">...</li>
{% endif %}
{% endfor %}
{% endif %}
{% if total %}
<li class="autocomplete-header">Pas dans la liste ?</li>
{% else %}
<li class="autocomplete-header">Aucune correspondance trouvée</li>
{% endif %}
<li><a href="{% url "empty-registration" %}">Créer un compte</a></li>
</ul>

View file

@ -15,9 +15,9 @@ from django.test import Client, TestCase, override_settings
from django.urls import reverse from django.urls import reverse
from bda.models import Salle, Tirage from bda.models import Salle, Tirage
from gestioncof.autocomplete import Clipper
from gestioncof.models import CalendarSubscription, Club, Event, Survey, SurveyAnswer from gestioncof.models import CalendarSubscription, Club, Event, Survey, SurveyAnswer
from gestioncof.tests.testcases import ViewTestCaseMixin from gestioncof.tests.testcases import ViewTestCaseMixin
from shared.views.autocomplete import Clipper
from .utils import create_member, create_root, create_user from .utils import create_member, create_root, create_user
@ -285,21 +285,19 @@ class RegistrationAutocompleteViewTests(ViewTestCaseMixin, TestCase):
self.mockLDAP([]) self.mockLDAP([])
def _test(self, query, expected_users, expected_members, expected_clippers): def _test(self, query, expected_others, expected_members, expected_clippers):
r = self.client.get(self.url, {"q": query}) r = self.client.get(self.url, {"q": query})
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
self.assertQuerysetEqual( self.assertQuerysetEqual(
r.context["users"], map(repr, expected_users), ordered=False r.context["others"], map(repr, expected_others), ordered=False
) )
self.assertQuerysetEqual( self.assertQuerysetEqual(
r.context["members"], r.context["members"], map(repr, expected_members), ordered=False,
map(lambda u: repr(u.profile), expected_members),
ordered=False,
) )
self.assertCountEqual( self.assertCountEqual(
map(str, r.context.get("clippers", [])), map(str, expected_clippers) map(str, r.context["clippers"]), map(str, expected_clippers)
) )
def test_username(self): def test_username(self):
@ -322,7 +320,7 @@ class RegistrationAutocompleteViewTests(ViewTestCaseMixin, TestCase):
mock_ldap.search.assert_called_once_with( mock_ldap.search.assert_called_once_with(
"dc=spi,dc=ens,dc=fr", "dc=spi,dc=ens,dc=fr",
"(&(|(cn=*aa*)(uid=*aa*))(|(cn=*bb*)(uid=*bb*)))", "(&(|(cn=*aa*)(uid=*aa*))(|(cn=*bb*)(uid=*bb*)))",
attributes=["uid", "cn"], attributes=["cn", "uid"],
) )
def test_clipper_escaped(self): def test_clipper_escaped(self):
@ -333,14 +331,14 @@ class RegistrationAutocompleteViewTests(ViewTestCaseMixin, TestCase):
mock_ldap.search.assert_not_called() mock_ldap.search.assert_not_called()
def test_clipper_no_duplicate(self): def test_clipper_no_duplicate(self):
self.mockLDAP([("uid", "uu_u1")]) self.mockLDAP([("uid", "abc")])
self._test("uu u1", [self.u1], [], [Clipper("uid", "uu_u1")]) self._test("abc", [self.u1], [], [Clipper("uid", "abc")])
self.u1.profile.login_clipper = "uid" self.u1.username = "uid"
self.u1.profile.save() self.u1.save()
self._test("uu u1", [self.u1], [], []) self._test("abc", [self.u1], [], [])
class HomeViewTests(ViewTestCaseMixin, TestCase): class HomeViewTests(ViewTestCaseMixin, TestCase):

View file

@ -58,7 +58,7 @@ from gestioncof.models import (
SurveyQuestion, SurveyQuestion,
SurveyQuestionAnswer, SurveyQuestionAnswer,
) )
from utils.views.autocomplete import Select2QuerySetView from shared.views.autocomplete import Select2QuerySetView
class HomeView(LoginRequiredMixin, TemplateView): class HomeView(LoginRequiredMixin, TemplateView):

View file

@ -24,6 +24,8 @@ from kfet.models import (
TransferGroup, TransferGroup,
) )
from . import KFET_DELETED_TRIGRAMME
from .auth import KFET_GENERIC_TRIGRAMME
from .auth.forms import UserGroupForm # noqa from .auth.forms import UserGroupForm # noqa
# ----- # -----
@ -324,7 +326,10 @@ class KPsulOperationGroupForm(forms.ModelForm):
widget=forms.HiddenInput(), widget=forms.HiddenInput(),
) )
on_acc = forms.ModelChoiceField( on_acc = forms.ModelChoiceField(
queryset=Account.objects.exclude(trigramme="GNR"), widget=forms.HiddenInput() queryset=Account.objects.exclude(
trigramme__in=[KFET_DELETED_TRIGRAMME, KFET_GENERIC_TRIGRAMME]
),
widget=forms.HiddenInput(),
) )
class Meta: class Meta:

View file

@ -150,6 +150,15 @@ class Account(models.Model):
def readable(self): def readable(self):
return self.trigramme not in [KFET_DELETED_TRIGRAMME, KFET_GENERIC_TRIGRAMME] return self.trigramme not in [KFET_DELETED_TRIGRAMME, KFET_GENERIC_TRIGRAMME]
@property
def editable(self):
return self.trigramme not in [
KFET_DELETED_TRIGRAMME,
KFET_GENERIC_TRIGRAMME,
"LIQ",
"#13",
]
@property @property
def is_team(self): def is_team(self):
return self.has_perm("kfet.is_team") return self.has_perm("kfet.is_team")

View file

@ -20,7 +20,7 @@
z-index:10; z-index:10;
} }
#history .opegroup { #history .group {
height:30px; height:30px;
line-height:30px; line-height:30px;
background-color: #c63b52; background-color: #c63b52;
@ -30,29 +30,29 @@
overflow:auto; overflow:auto;
} }
#history .opegroup .time { #history .group .time {
width:70px; width:70px;
} }
#history .opegroup .trigramme { #history .group .trigramme {
width:55px; width:55px;
text-align:right; text-align:right;
} }
#history .opegroup .amount { #history .group .amount {
text-align:right; text-align:right;
width:90px; width:90px;
} }
#history .opegroup .valid_by { #history .group .valid_by {
padding-left:20px padding-left:20px
} }
#history .opegroup .comment { #history .group .comment {
padding-left:20px; padding-left:20px;
} }
#history .ope { #history .entry {
position:relative; position:relative;
height:25px; height:25px;
line-height:24px; line-height:24px;
@ -61,38 +61,38 @@
overflow:auto; overflow:auto;
} }
#history .ope .amount { #history .entry .amount {
width:50px; width:50px;
text-align:right; text-align:right;
} }
#history .ope .infos1 { #history .entry .infos1 {
width:80px; width:80px;
text-align:right; text-align:right;
} }
#history .ope .infos2 { #history .entry .infos2 {
padding-left:15px; padding-left:15px;
} }
#history .ope .addcost { #history .entry .addcost {
padding-left:20px; padding-left:20px;
} }
#history .ope .canceled { #history .entry .canceled {
padding-left:20px; padding-left:20px;
} }
#history div.ope.ui-selected, #history div.ope.ui-selecting { #history div.entry.ui-selected, #history div.entry.ui-selecting {
background-color:rgba(200,16,46,0.6); background-color:rgba(200,16,46,0.6);
color:#FFF; color:#FFF;
} }
#history .ope.canceled, #history .transfer.canceled { #history .entry.canceled {
color:#444; color:#444;
} }
#history .ope.canceled::before, #history.transfer.canceled::before { #history .entry.canceled::before {
position: absolute; position: absolute;
content: ' '; content: ' ';
width:100%; width:100%;
@ -101,10 +101,11 @@
border-top: 1px solid rgba(200,16,46,0.5); border-top: 1px solid rgba(200,16,46,0.5);
} }
#history .transfer .amount { #history .group .infos {
width:80px; text-align:center;
width:145px;
} }
#history .transfer .from_acc { #history .entry .glyphicon {
padding-left:10px; padding-left:15px;
} }

View file

@ -2,31 +2,59 @@ function dateUTCToParis(date) {
return moment.tz(date, 'UTC').tz('Europe/Paris'); return moment.tz(date, 'UTC').tz('Europe/Paris');
} }
// TODO : classifier (later)
function KHistory(options = {}) { function KHistory(options = {}) {
$.extend(this, KHistory.default_options, options); $.extend(this, KHistory.default_options, options);
this.$container = $(this.container); this.$container = $(this.container);
this.$container.selectable({
filter: 'div.group, div.entry',
selected: function (e, ui) {
$(ui.selected).each(function () {
if ($(this).hasClass('group')) {
var id = $(this).data('id');
$(this).siblings('.entry').filter(function () {
return $(this).data('group_id') == id
}).addClass('ui-selected');
}
});
},
});
this.reset = function () { this.reset = function () {
this.$container.html(''); this.$container.html('');
}; };
this.addOpeGroup = function (opegroup) { this.add_history_group = function (group) {
var $day = this._getOrCreateDay(opegroup['at']); var $day = this._get_or_create_day(group['at']);
var $opegroup = this._opeGroupHtml(opegroup); var $group = this._group_html(group);
$day.after($opegroup); $day.after($group);
var trigramme = opegroup['on_acc_trigramme']; var trigramme = group['on_acc_trigramme'];
var is_cof = opegroup['is_cof']; var is_cof = group['is_cof'];
for (var i = 0; i < opegroup['opes'].length; i++) { var type = group['type']
var $ope = this._opeHtml(opegroup['opes'][i], is_cof, trigramme); // TODO : simplifier ça ?
$ope.data('opegroup', opegroup['id']); switch (type) {
$opegroup.after($ope); case 'operation':
for (let ope of group['entries']) {
var $ope = this._ope_html(ope, is_cof, trigramme);
$ope.data('group_id', group['id']);
$group.after($ope);
}
break;
case 'transfer':
for (let transfer of group['entries']) {
var $transfer = this._transfer_html(transfer);
$transfer.data('group_id', group['id']);
$group.after($transfer);
}
break;
} }
} }
this._opeHtml = function (ope, is_cof, trigramme) { this._ope_html = function (ope, is_cof, trigramme) {
var $ope_html = $(this.template_ope); var $ope_html = $(this.template_ope);
var parsed_amount = parseFloat(ope['amount']); var parsed_amount = parseFloat(ope['amount']);
var amount = amountDisplay(parsed_amount, is_cof, trigramme); var amount = amountDisplay(parsed_amount, is_cof, trigramme);
@ -54,7 +82,8 @@ function KHistory(options = {}) {
} }
$ope_html $ope_html
.data('ope', ope['id']) .data('type', 'operation')
.data('id', ope['id'])
.find('.amount').text(amount).end() .find('.amount').text(amount).end()
.find('.infos1').text(infos1).end() .find('.infos1').text(infos1).end()
.find('.infos2').text(infos2).end(); .find('.infos2').text(infos2).end();
@ -62,54 +91,89 @@ function KHistory(options = {}) {
var addcost_for = ope['addcost_for__trigramme']; var addcost_for = ope['addcost_for__trigramme'];
if (addcost_for) { if (addcost_for) {
var addcost_amount = parseFloat(ope['addcost_amount']); var addcost_amount = parseFloat(ope['addcost_amount']);
$ope_html.find('.addcost').text('(' + amountDisplay(addcost_amount, is_cof) + 'UKF pour ' + addcost_for + ')'); $ope_html.find('.addcost').text('(' + amountDisplay(addcost_amount, is_cof) + ' UKF pour ' + addcost_for + ')');
} }
if (ope['canceled_at']) if (ope['canceled_at'])
this.cancelOpe(ope, $ope_html); this.cancel_entry(ope, $ope_html);
return $ope_html; return $ope_html;
} }
this.cancelOpe = function (ope, $ope = null) { this._transfer_html = function (transfer) {
if (!$ope) var $transfer_html = $(this.template_transfer);
$ope = this.findOpe(ope['id']); var parsed_amount = parseFloat(transfer['amount']);
var amount = parsed_amount.toFixed(2) + '€';
var cancel = 'Annulé'; $transfer_html
var canceled_at = dateUTCToParis(ope['canceled_at']); .data('type', 'transfer')
if (ope['canceled_by__trigramme']) .data('id', transfer['id'])
cancel += ' par ' + ope['canceled_by__trigramme']; .find('.amount').text(amount).end()
cancel += ' le ' + canceled_at.format('DD/MM/YY à HH:mm:ss'); .find('.infos1').text(transfer['from_acc']).end()
.find('.infos2').text(transfer['to_acc']).end();
$ope.addClass('canceled').find('.canceled').text(cancel); if (transfer['canceled_at'])
this.cancel_entry(transfer, $transfer_html);
return $transfer_html;
} }
this._opeGroupHtml = function (opegroup) {
var $opegroup_html = $(this.template_opegroup);
var at = dateUTCToParis(opegroup['at']).format('HH:mm:ss'); this.cancel_entry = function (entry, $entry = null) {
var trigramme = opegroup['on_acc__trigramme']; if (!$entry)
var amount = amountDisplay( $entry = this.find_entry(entry["id"], entry["type"]);
parseFloat(opegroup['amount']), opegroup['is_cof'], trigramme);
var comment = opegroup['comment'] || '';
$opegroup_html var cancel = 'Annulé';
.data('opegroup', opegroup['id']) var canceled_at = dateUTCToParis(entry['canceled_at']);
if (entry['canceled_by__trigramme'])
cancel += ' par ' + entry['canceled_by__trigramme'];
cancel += ' le ' + canceled_at.format('DD/MM/YY à HH:mm:ss');
$entry.addClass('canceled').find('.canceled').text(cancel);
}
this._group_html = function (group) {
var type = group['type'];
switch (type) {
case 'operation':
var $group_html = $(this.template_opegroup);
var trigramme = group['on_acc__trigramme'];
var amount = amountDisplay(
parseFloat(group['amount']), group['is_cof'], trigramme);
break;
case 'transfer':
var $group_html = $(this.template_transfergroup);
$group_html.find('.infos').text('Transferts').end()
var trigramme = '';
var amount = '';
break;
}
var at = dateUTCToParis(group['at']).format('HH:mm:ss');
var comment = group['comment'] || '';
$group_html
.data('type', type)
.data('id', group['id'])
.find('.time').text(at).end() .find('.time').text(at).end()
.find('.amount').text(amount).end() .find('.amount').text(amount).end()
.find('.comment').text(comment).end() .find('.comment').text(comment).end()
.find('.trigramme').text(trigramme).end(); .find('.trigramme').text(trigramme).end();
if (!this.display_trigramme) if (!this.display_trigramme)
$opegroup_html.find('.trigramme').remove(); $group_html.find('.trigramme').remove();
$group_html.find('.info').remove();
if (opegroup['valid_by__trigramme']) if (group['valid_by__trigramme'])
$opegroup_html.find('.valid_by').text('Par ' + opegroup['valid_by__trigramme']); $group_html.find('.valid_by').text('Par ' + group['valid_by__trigramme']);
return $opegroup_html; return $group_html;
} }
this._getOrCreateDay = function (date) { this._get_or_create_day = function (date) {
var at = dateUTCToParis(date); var at = dateUTCToParis(date);
var at_ser = at.format('YYYY-MM-DD'); var at_ser = at.format('YYYY-MM-DD');
var $day = this.$container.find('.day').filter(function () { var $day = this.$container.find('.day').filter(function () {
@ -118,35 +182,123 @@ function KHistory(options = {}) {
if ($day.length == 1) if ($day.length == 1)
return $day; return $day;
var $day = $(this.template_day).prependTo(this.$container); var $day = $(this.template_day).prependTo(this.$container);
return $day.data('date', at_ser).text(at.format('D MMMM')); return $day.data('date', at_ser).text(at.format('D MMMM YYYY'));
} }
this.findOpeGroup = function (id) { this.find_group = function (id, type = "operation") {
return this.$container.find('.opegroup').filter(function () { return this.$container.find('.group').filter(function () {
return $(this).data('opegroup') == id return ($(this).data('id') == id && $(this).data("type") == type)
}); });
} }
this.findOpe = function (id) { this.find_entry = function (id, type = 'operation') {
return this.$container.find('.ope').filter(function () { return this.$container.find('.entry').filter(function () {
return $(this).data('ope') == id return ($(this).data('id') == id && $(this).data('type') == type)
}); });
} }
this.cancelOpeGroup = function (opegroup) { this.update_opegroup = function (group, type = "operation") {
var $opegroup = this.findOpeGroup(opegroup['id']); var $group = this.find_group(group['id'], type);
var trigramme = $opegroup.find('.trigramme').text(); var trigramme = $group.find('.trigramme').text();
var amount = amountDisplay( var amount = amountDisplay(
parseFloat(opegroup['amount']), opegroup['is_cof'], trigramme); parseFloat(group['amount']), group['is_cof'], trigramme);
$opegroup.find('.amount').text(amount); $group.find('.amount').text(amount);
} }
this.fetch = function (fetch_options) {
options = $.extend({}, this.fetch_options, fetch_options);
var that = this;
return $.ajax({
dataType: "json",
url: django_urls["kfet.history.json"](),
method: "POST",
data: options,
}).done(function (data) {
for (let group of data['groups']) {
that.add_history_group(group);
}
});
}
this._cancel = function (type, opes, password = "") {
if (window.lock == 1)
return false
window.lock = 1;
var that = this;
return $.ajax({
dataType: "json",
url: django_urls[`kfet.${type}s.cancel`](),
method: "POST",
data: opes,
beforeSend: function ($xhr) {
$xhr.setRequestHeader("X-CSRFToken", csrftoken);
if (password != '')
$xhr.setRequestHeader("KFetPassword", password);
},
}).done(function (data) {
window.lock = 0;
that.$container.find('.ui-selected').removeClass('ui-selected');
for (let entry of data["canceled"]) {
entry["type"] = type;
that.cancel_entry(entry);
}
if (type == "operation") {
for (let opegroup of data["opegroups_to_update"]) {
that.update_opegroup(opegroup)
}
}
}).fail(function ($xhr) {
var data = $xhr.responseJSON;
switch ($xhr.status) {
case 403:
requestAuth(data, function (password) {
this.cancel(opes, password);
});
break;
case 400:
displayErrors(getErrorsHtml(data));
break;
}
window.lock = 0;
});
}
this.cancel_selected = function () {
var opes_to_cancel = {
"transfers": [],
"operations": [],
}
this.$container.find('.entry.ui-selected').each(function () {
type = $(this).data("type");
opes_to_cancel[`${type}s`].push($(this).data("id"));
});
if (opes_to_cancel["transfers"].length > 0 && opes_to_cancel["operations"].length > 0) {
// Lancer 2 requêtes AJAX et gérer tous les cas d'erreurs possibles est trop complexe
$.alert({
title: 'Erreur',
content: "Impossible de supprimer des transferts et des opérations en même temps !",
backgroundDismiss: true,
animation: 'top',
closeAnimation: 'bottom',
keyboardEnabled: true,
});
} else if (opes_to_cancel["transfers"].length > 0) {
delete opes_to_cancel["operations"];
this._cancel("transfer", opes_to_cancel);
} else if (opes_to_cancel["operations"].length > 0) {
delete opes_to_cancel["transfers"];
this._cancel("operation", opes_to_cancel);
}
}
} }
KHistory.default_options = { KHistory.default_options = {
container: '#history', container: '#history',
template_day: '<div class="day"></div>', template_day: '<div class="day"></div>',
template_opegroup: '<div class="opegroup"><span class="time"></span><span class="trigramme"></span><span class="amount"></span><span class="valid_by"></span><span class="comment"></span></div>', template_opegroup: '<div class="group"><span class="time"></span><span class="trigramme"></span><span class="amount"></span><span class="valid_by"></span><span class="comment"></span></div>',
template_ope: '<div class="ope"><span class="amount"></span><span class="infos1"></span><span class="infos2"></span><span class="addcost"></span><span class="canceled"></span></div>', template_transfergroup: '<div class="group"><span class="time"></span><span class="infos"></span><span class="valid_by"></span><span class="comment"></span></div>',
template_ope: '<div class="entry"><span class="amount"></span><span class="infos1"></span><span class="infos2"></span><span class="addcost"></span><span class="canceled"></span></div>',
template_transfer: '<div class="entry"><span class="amount"></span><span class="infos1"></span><span class="glyphicon glyphicon-arrow-right"></span><span class="infos2"></span><span class="canceled"></span></div>',
display_trigramme: true, display_trigramme: true,
} }

View file

@ -1,28 +1,15 @@
(function($){ (function ($) {
window.StatsGroup = function (url, target) { window.StatsGroup = function (url, target) {
// a class to properly display statictics // a class to properly display statictics
// url : points to an ObjectResumeStat that lists the options through JSON // url : points to an ObjectResumeStat that lists the options through JSON
// target : element of the DOM where to put the stats // target : element of the DOM where to put the stats
var self = this;
var element = $(target); var element = $(target);
var content = $("<div class='full'>"); var content = $("<div class='full'>");
var buttons; var buttons;
function dictToArray (dict, start) { function handleTimeChart(data) {
// converts the dicts returned by JSONResponse to Arrays
// necessary because for..in does not guarantee the order
if (start === undefined) start = 0;
var array = new Array();
for (var k in dict) {
array[k] = dict[k];
}
array.splice(0, start);
return array;
}
function handleTimeChart (data) {
// reads the balance data and put it into chartjs formatting // reads the balance data and put it into chartjs formatting
chart_data = new Array(); chart_data = new Array();
for (var i = 0; i < data.length; i++) { for (var i = 0; i < data.length; i++) {
@ -36,7 +23,7 @@
return chart_data; return chart_data;
} }
function showStats () { function showStats() {
// CALLBACK : called when a button is selected // CALLBACK : called when a button is selected
// shows the focus on the correct button // shows the focus on the correct button
@ -44,24 +31,20 @@
$(this).addClass("focus"); $(this).addClass("focus");
// loads data and shows it // loads data and shows it
$.getJSON(this.stats_target_url, {format: 'json'}, displayStats); $.getJSON(this.stats_target_url, displayStats);
} }
function displayStats (data) { function displayStats(data) {
// reads the json data and updates the chart display // reads the json data and updates the chart display
var chart_datasets = []; var chart_datasets = [];
var charts = dictToArray(data.charts);
// are the points indexed by timestamps? // are the points indexed by timestamps?
var is_time_chart = data.is_time_chart || false; var is_time_chart = data.is_time_chart || false;
// reads the charts data // reads the charts data
for (var i = 0; i < charts.length; i++) { for (let chart of data.charts) {
var chart = charts[i];
// format the data // format the data
var chart_data = is_time_chart ? handleTimeChart(chart.values) : dictToArray(chart.values, 0); var chart_data = is_time_chart ? handleTimeChart(chart.values) : chart.values;
chart_datasets.push( chart_datasets.push(
{ {
@ -76,29 +59,24 @@
// options for chartjs // options for chartjs
var chart_options = var chart_options =
{ {
responsive: true, responsive: true,
maintainAspectRatio: false, maintainAspectRatio: false,
tooltips: { tooltips: {
mode: 'index', mode: 'index',
intersect: false, intersect: false,
}, },
hover: { hover: {
mode: 'nearest', mode: 'nearest',
intersect: false, intersect: false,
} }
}; };
// additionnal options for time-indexed charts // additionnal options for time-indexed charts
if (is_time_chart) { if (is_time_chart) {
chart_options['scales'] = { chart_options['scales'] = {
xAxes: [{ xAxes: [{
type: "time", type: "time",
display: true,
scaleLabel: {
display: false,
labelString: 'Date'
},
time: { time: {
tooltipFormat: 'll HH:mm', tooltipFormat: 'll HH:mm',
displayFormats: { displayFormats: {
@ -115,26 +93,19 @@
} }
}], }],
yAxes: [{
display: true,
scaleLabel: {
display: false,
labelString: 'value'
}
}]
}; };
} }
// global object for the options // global object for the options
var chart_model = var chart_model =
{ {
type: 'line', type: 'line',
options: chart_options, options: chart_options,
data: { data: {
labels: data.labels || [], labels: data.labels || [],
datasets: chart_datasets, datasets: chart_datasets,
} }
}; };
// saves the previous charts to be destroyed // saves the previous charts to be destroyed
var prev_chart = content.children(); var prev_chart = content.children();
@ -151,27 +122,30 @@
} }
// initialize the interface // initialize the interface
function initialize (data) { function initialize(data) {
// creates the bar with the buttons // creates the bar with the buttons
buttons = $("<ul>", buttons = $("<ul>",
{class: "nav stat-nav", {
"aria-label": "select-period"}); class: "nav stat-nav",
"aria-label": "select-period"
});
var to_click; var to_click;
var context = data.stats;
for (var i = 0; i < context.length; i++) { for (let stat of data.stats) {
// creates the button // creates the button
var btn_wrapper = $("<li>", {role:"presentation"}); var btn_wrapper = $("<li>", { role: "presentation" });
var btn = $("<a>", var btn = $("<a>",
{class: "btn btn-nav", {
type: "button"}) class: "btn btn-nav",
.text(context[i].label) type: "button"
.prop("stats_target_url", context[i].url) })
.text(stat.label)
.prop("stats_target_url", stat.url)
.on("click", showStats); .on("click", showStats);
// saves the default option to select // saves the default option to select
if (i == data.default_stat || i == 0) if (stat.default)
to_click = btn; to_click = btn;
// append the elements to the parent // append the elements to the parent
@ -189,7 +163,7 @@
// constructor // constructor
(function () { (function () {
$.getJSON(url, {format: 'json'}, initialize); $.getJSON(url, initialize);
})(); })();
}; };
})(jQuery); })(jQuery);

View file

@ -1,21 +1,22 @@
from datetime import date, datetime, time, timedelta from datetime import date, datetime, time, timedelta
import pytz
from dateutil.parser import parse as dateutil_parse from dateutil.parser import parse as dateutil_parse
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
from django.db.models import Sum
from django.utils import timezone from django.utils import timezone
KFET_WAKES_UP_AT = time(7, 0) KFET_WAKES_UP_AT = time(5, 0) # La K-Fêt ouvre à 5h (UTC) du matin
def kfet_day(year, month, day, start_at=KFET_WAKES_UP_AT): def kfet_day(year, month, day, start_at=KFET_WAKES_UP_AT):
"""datetime wrapper with time offset.""" """Étant donné une date, renvoie un objet `datetime`
naive = datetime.combine(date(year, month, day), start_at) correspondant au début du 'jour K-Fêt' correspondant."""
return pytz.timezone("Europe/Paris").localize(naive, is_dst=None) return datetime.combine(date(year, month, day), start_at)
def to_kfet_day(dt, start_at=KFET_WAKES_UP_AT): def to_kfet_day(dt, start_at=KFET_WAKES_UP_AT):
"""
Retourne le 'jour K-Fêt' correspondant à un objet `datetime` donné
"""
kfet_dt = kfet_day(year=dt.year, month=dt.month, day=dt.day) kfet_dt = kfet_day(year=dt.year, month=dt.month, day=dt.day)
if dt.time() < start_at: if dt.time() < start_at:
kfet_dt -= timedelta(days=1) kfet_dt -= timedelta(days=1)
@ -23,6 +24,17 @@ def to_kfet_day(dt, start_at=KFET_WAKES_UP_AT):
class Scale(object): class Scale(object):
"""
Classe utilisée pour subdiviser un QuerySet (e.g. des opérations) sur
une échelle de temps donnée, avec un pas de temps fixe.
Cette échelle peut être spécifiée :
- par un début et une fin,
- par un début/une fin et un nombre de subdivisions.
Si le booléen `std_chunk` est activé, le début de la première subdivision
est généré via la fonction `get_chunk_start`.
"""
name = None name = None
step = None step = None
@ -52,7 +64,7 @@ class Scale(object):
"or use last and n_steps" "or use last and n_steps"
) )
self.datetimes = self.get_datetimes() self._gen_datetimes()
@staticmethod @staticmethod
def by_name(name): def by_name(name):
@ -61,9 +73,6 @@ class Scale(object):
return cls return cls
return None return None
def get_from(self, dt):
return self.std_chunk and self.get_chunk_start(dt) or dt
def __getitem__(self, i): def __getitem__(self, i):
return self.datetimes[i], self.datetimes[i + 1] return self.datetimes[i], self.datetimes[i + 1]
@ -73,13 +82,13 @@ class Scale(object):
def do_step(self, dt, n_steps=1): def do_step(self, dt, n_steps=1):
return dt + self.step * n_steps return dt + self.step * n_steps
def get_datetimes(self): def _gen_datetimes(self):
datetimes = [self.begin] datetimes = [self.begin]
tmp = self.begin tmp = self.begin
while tmp < self.end: while tmp < self.end:
tmp = self.do_step(tmp) tmp = self.do_step(tmp)
datetimes.append(tmp) datetimes.append(tmp)
return datetimes self.datetimes = datetimes
def get_labels(self, label_fmt=None): def get_labels(self, label_fmt=None):
if label_fmt is None: if label_fmt is None:
@ -89,93 +98,18 @@ class Scale(object):
for i, (begin, end) in enumerate(self) for i, (begin, end) in enumerate(self)
] ]
def chunkify_qs(self, qs, field=None): def chunkify_qs(self, qs, field="at", aggregate=None):
if field is None: """
field = "at" Découpe un queryset en subdivisions, avec agrégation optionnelle des résultats
NB : on pourrait faire ça en une requête, au détriment de la lisibilité...
"""
begin_f = "{}__gte".format(field) begin_f = "{}__gte".format(field)
end_f = "{}__lte".format(field) end_f = "{}__lte".format(field)
return [qs.filter(**{begin_f: begin, end_f: end}) for begin, end in self] chunks = [qs.filter(**{begin_f: begin, end_f: end}) for begin, end in self]
if aggregate is None:
def get_by_chunks(self, qs, field_callback=None, field_db="at"): return chunks
"""Objects of queryset ranked according to the scale. else:
return [chunk.aggregate(agg=aggregate)["agg"] or 0 for chunk in chunks]
Returns a generator whose each item, corresponding to a scale chunk,
is a generator of objects from qs for this chunk.
Args:
qs: Queryset of source objects, must be ordered *first* on the
same field returned by `field_callback`.
field_callback: Callable which gives value from an object used
to compare against limits of the scale chunks.
Default to: lambda obj: getattr(obj, field_db)
field_db: Used to filter against `scale` limits.
Default to 'at'.
Examples:
If queryset `qs` use `values()`, `field_callback` must be set and
could be: `lambda d: d['at']`
If `field_db` use foreign attributes (eg with `__`), it should be
something like: `lambda obj: obj.group.at`.
"""
if field_callback is None:
def field_callback(obj):
return getattr(obj, field_db)
begin_f = "{}__gte".format(field_db)
end_f = "{}__lte".format(field_db)
qs = qs.filter(**{begin_f: self.begin, end_f: self.end})
obj_iter = iter(qs)
last_obj = None
def _objects_until(obj_iter, field_callback, end):
"""Generator of objects until `end`.
Ends if objects source is empty or when an object not verifying
field_callback(obj) <= end is met.
If this object exists, it is stored in `last_obj` which is found
from outer scope.
Also, if this same variable is non-empty when the function is
called, it first yields its content.
Args:
obj_iter: Source used to get objects.
field_callback: Returned value, when it is called on an object
will be used to test ordering against `end`.
end
"""
nonlocal last_obj
if last_obj is not None:
yield last_obj
last_obj = None
for obj in obj_iter:
if field_callback(obj) <= end:
yield obj
else:
last_obj = obj
return
for begin, end in self:
# forward last seen object, if it exists, to the right chunk,
# and fill with empty generators for intermediate chunks of scale
if last_obj is not None:
if field_callback(last_obj) > end:
yield iter(())
continue
# yields generator for this chunk
# this set last_obj to None if obj_iter reach its end, otherwise
# it's set to the first met object from obj_iter which doesn't
# belong to this chunk
yield _objects_until(obj_iter, field_callback, end)
class DayScale(Scale): class DayScale(Scale):
@ -191,7 +125,7 @@ class DayScale(Scale):
class WeekScale(Scale): class WeekScale(Scale):
name = "week" name = "week"
step = timedelta(days=7) step = timedelta(days=7)
label_fmt = "Semaine %W" label_fmt = "%d %b."
@classmethod @classmethod
def get_chunk_start(cls, dt): def get_chunk_start(cls, dt):
@ -210,111 +144,67 @@ class MonthScale(Scale):
return to_kfet_day(dt).replace(day=1) return to_kfet_day(dt).replace(day=1)
def stat_manifest( def scale_url_params(scales_def, **other_url_params):
scales_def=None, scale_args=None, scale_prefix=None, **other_url_params """
): Convertit une spécification de scales en arguments GET utilisables par ScaleMixin.
if scale_prefix is None: La spécification est de la forme suivante :
scale_prefix = "scale_" - scales_def : liste de champs de la forme (label, scale)
if scales_def is None: - scale_args : arguments à passer à Scale.__init__
scales_def = [] - other_url_params : paramètres GET supplémentaires
if scale_args is None: """
scale_args = {}
manifest = [] params_list = []
for label, cls in scales_def: for label, cls, params, default in scales_def:
url_params = {scale_prefix + "name": cls.name} url_params = {"scale_name": cls.name}
url_params.update( url_params.update({"scale_" + key: value for key, value in params.items()})
{scale_prefix + key: value for key, value in scale_args.items()}
)
url_params.update(other_url_params) url_params.update(other_url_params)
manifest.append(dict(label=label, url_params=url_params)) params_list.append(dict(label=label, url_params=url_params, default=default))
return manifest
return params_list
def last_stats_manifest(
scales_def=None, scale_args=None, scale_prefix=None, **url_params
):
scales_def = [
("Derniers mois", MonthScale),
("Dernières semaines", WeekScale),
("Derniers jours", DayScale),
]
if scale_args is None:
scale_args = {}
scale_args.update(dict(last=True, n_steps=7))
return stat_manifest(
scales_def=scales_def,
scale_args=scale_args,
scale_prefix=scale_prefix,
**url_params
)
# Étant donné un queryset d'operations
# rend la somme des article_nb
def tot_ventes(queryset):
res = queryset.aggregate(Sum("article_nb"))["article_nb__sum"]
return res and res or 0
class ScaleMixin(object): class ScaleMixin(object):
scale_args_prefix = "scale_" def parse_scale_args(self):
"""
def get_scale_args(self, params=None, prefix=None): Récupère les paramètres de subdivision encodés dans une requête GET.
"""Retrieve scale args from params.
Should search the same args of Scale constructor.
Args:
params (dict, optional): Scale args are searched in this.
Default to GET params of request.
prefix (str, optional): Appended at the begin of scale args names.
Default to `self.scale_args_prefix`.
""" """
if params is None:
params = self.request.GET
if prefix is None:
prefix = self.scale_args_prefix
scale_args = {} scale_args = {}
name = params.get(prefix + "name", None) name = self.request.GET.get("scale_name", None)
if name is not None: if name is not None:
scale_args["name"] = name scale_args["name"] = name
n_steps = params.get(prefix + "n_steps", None) n_steps = self.request.GET.get("scale_n_steps", None)
if n_steps is not None: if n_steps is not None:
scale_args["n_steps"] = int(n_steps) scale_args["n_steps"] = int(n_steps)
begin = params.get(prefix + "begin", None) begin = self.request.GET.get("scale_begin", None)
if begin is not None: if begin is not None:
scale_args["begin"] = dateutil_parse(begin) scale_args["begin"] = dateutil_parse(begin)
end = params.get(prefix + "send", None) end = self.request.GET.get("scale_send", None)
if end is not None: if end is not None:
scale_args["end"] = dateutil_parse(end) scale_args["end"] = dateutil_parse(end)
last = params.get(prefix + "last", None) last = self.request.GET.get("scale_last", None)
if last is not None: if last is not None:
scale_args["last"] = last in ["true", "True", "1"] and True or False scale_args["last"] = last in ["true", "True", "1"] and True or False
return scale_args return scale_args
def get_context_data(self, *args, **kwargs): def get_context_data(self, *args, **kwargs):
context = super().get_context_data(*args, **kwargs) # On n'hérite pas
scale_args = self.get_scale_args() scale_args = self.parse_scale_args()
scale_name = scale_args.pop("name", None) scale_name = scale_args.pop("name", None)
scale_cls = Scale.by_name(scale_name) scale_cls = Scale.by_name(scale_name)
if scale_cls is None: if scale_cls is None:
scale = self.get_default_scale() self.scale = self.get_default_scale()
else: else:
scale = scale_cls(**scale_args) self.scale = scale_cls(**scale_args)
self.scale = scale return {"labels": self.scale.get_labels()}
context["labels"] = scale.get_labels()
return context
def get_default_scale(self): def get_default_scale(self):
return DayScale(n_steps=7, last=True) return DayScale(n_steps=7, last=True)

View file

@ -5,6 +5,7 @@
{% block extra_head %} {% block extra_head %}
<script type="text/javascript" src="{% static 'kfet/js/history.js' %}"></script> <script type="text/javascript" src="{% static 'kfet/js/history.js' %}"></script>
<script type="text/javascript" src="{% url 'js_reverse' %}" ></script>
<script type="text/javascript" src="{% static 'kfet/vendor/moment/moment.min.js' %}"></script> <script type="text/javascript" src="{% static 'kfet/vendor/moment/moment.min.js' %}"></script>
<script type="text/javascript" src="{% static 'kfet/vendor/moment/fr.js' %}"></script> <script type="text/javascript" src="{% static 'kfet/vendor/moment/fr.js' %}"></script>
<script type="text/javascript" src="{% static 'kfet/vendor/moment/moment-timezone-with-data-2012-2022.min.js' %}"></script> <script type="text/javascript" src="{% static 'kfet/vendor/moment/moment-timezone-with-data-2012-2022.min.js' %}"></script>
@ -81,7 +82,7 @@ $(document).ready(function() {
</ul> </ul>
</div> </div>
{% endif %} {% endif %}
<div id="history" class="full"></div> <div id="history"></div>
</section> </section>
</div><!-- history tab --> </div><!-- history tab -->
@ -93,29 +94,22 @@ $(document).ready(function() {
khistory = new KHistory({ khistory = new KHistory({
display_trigramme: false, display_trigramme: false,
}); fetch_options: {
function getHistory() {
var data = {
'accounts': [{{ account.pk }}], 'accounts': [{{ account.pk }}],
} }
});
$.ajax({ $(document).on('keydown', function (e) {
dataType: "json", if (e.keyCode == 46) {
url : "{% url 'kfet.history.json' %}", // DEL (Suppr)
method : "POST", khistory.cancel_selected()
data : data, }
}) });
.done(function(data) {
for (var i=0; i<data['opegroups'].length; i++) { khistory.fetch().done(function () {
khistory.addOpeGroup(data['opegroups'][i]);
}
var nb_opes = khistory.$container.find('.ope:not(.canceled)').length; var nb_opes = khistory.$container.find('.ope:not(.canceled)').length;
$('#nb_opes').text(nb_opes); $('#nb_opes').text(nb_opes);
}); });
}
getHistory();
}); });
</script> </script>

View file

@ -5,6 +5,7 @@
<link rel="stylesheet" type="text/css" href="{% static 'kfet/vendor/multiple-select/multiple-select.css' %}"> <link rel="stylesheet" type="text/css" href="{% static 'kfet/vendor/multiple-select/multiple-select.css' %}">
<script type="text/javascript" src="{% static 'kfet/vendor/multiple-select/multiple-select.js' %}"></script> <script type="text/javascript" src="{% static 'kfet/vendor/multiple-select/multiple-select.js' %}"></script>
{{ filter_form.media }} {{ filter_form.media }}
<script type="text/javascript" src="{% url 'js_reverse' %}" ></script>
<script type="text/javascript" src="{% static 'kfet/js/history.js' %}"></script> <script type="text/javascript" src="{% static 'kfet/js/history.js' %}"></script>
<script type="text/javascript" src="{% static 'kfet/vendor/moment/moment-timezone-with-data-2012-2022.min.js' %}"></script> <script type="text/javascript" src="{% static 'kfet/vendor/moment/moment-timezone-with-data-2012-2022.min.js' %}"></script>
{% endblock %} {% endblock %}
@ -27,6 +28,9 @@
<li><b>Comptes</b> {{ filter_form.accounts }}</li> <li><b>Comptes</b> {{ filter_form.accounts }}</li>
</ul> </ul>
</div> </div>
<div class="buttons">
<button class="btn btn-primary" id="btn-fetch">Valider</button>
</div>
</aside> </aside>
{% endblock %} {% endblock %}
@ -40,6 +44,8 @@
$(document).ready(function() { $(document).ready(function() {
settings = { 'subvention_cof': parseFloat({{ kfet_config.subvention_cof|unlocalize }})} settings = { 'subvention_cof': parseFloat({{ kfet_config.subvention_cof|unlocalize }})}
window.lock = 0;
khistory = new KHistory(); khistory = new KHistory();
var $from_date = $('#id_from_date'); var $from_date = $('#id_from_date');
@ -67,17 +73,8 @@ $(document).ready(function() {
var accounts = getSelectedMultiple($accounts); var accounts = getSelectedMultiple($accounts);
data['accounts'] = accounts; data['accounts'] = accounts;
$.ajax({ khistory.fetch(data).done(function () {
dataType: "json", var nb_opes = khistory.$container.find('.entry:not(.canceled)').length;
url : "{% url 'kfet.history.json' %}",
method : "POST",
data : data,
})
.done(function(data) {
for (var i=0; i<data['opegroups'].length; i++) {
khistory.addOpeGroup(data['opegroups'][i]);
}
var nb_opes = khistory.$container.find('.ope:not(.canceled)').length;
$('#nb_opes').text(nb_opes); $('#nb_opes').text(nb_opes);
}); });
} }
@ -112,130 +109,17 @@ $(document).ready(function() {
countSelected: "# sur %" countSelected: "# sur %"
}); });
$("input").on('dp.change change', function() { $("#btn-fetch").on('click', function() {
khistory.reset(); khistory.reset();
getHistory(); getHistory();
}); });
khistory.$container.selectable({
filter: 'div.opegroup, div.ope',
selected: function(e, ui) {
$(ui.selected).each(function() {
if ($(this).hasClass('opegroup')) {
var opegroup = $(this).data('opegroup');
$(this).siblings('.ope').filter(function() {
return $(this).data('opegroup') == opegroup
}).addClass('ui-selected');
}
});
},
});
$(document).on('keydown', function (e) { $(document).on('keydown', function (e) {
if (e.keyCode == 46) { if (e.keyCode == 46) {
// DEL (Suppr) // DEL (Suppr)
var opes_to_cancel = []; khistory.cancel_selected()
khistory.$container.find('.ope.ui-selected').each(function () {
opes_to_cancel.push($(this).data('ope'));
});
if (opes_to_cancel.length > 0)
confirmCancel(opes_to_cancel);
} }
}); });
function confirmCancel(opes_to_cancel) {
var nb = opes_to_cancel.length;
var content = nb+" opérations vont être annulées";
$.confirm({
title: 'Confirmation',
content: content,
backgroundDismiss: true,
animation: 'top',
closeAnimation: 'bottom',
keyboardEnabled: true,
confirm: function() {
cancelOperations(opes_to_cancel);
}
});
}
function requestAuth(data, callback) {
var content = getErrorsHtml(data);
content += '<input type="password" name="password" autofocus>',
$.confirm({
title: 'Authentification requise',
content: content,
backgroundDismiss: true,
animation:'top',
closeAnimation:'bottom',
keyboardEnabled: true,
confirm: function() {
var password = this.$content.find('input').val();
callback(password);
},
onOpen: function() {
var that = this;
this.$content.find('input').on('keypress', function(e) {
if (e.keyCode == 13)
that.$confirmButton.click();
});
},
});
}
function getErrorsHtml(data) {
var content = '';
if ('missing_perms' in data['errors']) {
content += 'Permissions manquantes';
content += '<ul>';
for (var i=0; i<data['errors']['missing_perms'].length; i++)
content += '<li>'+data['errors']['missing_perms'][i]+'</li>';
content += '</ul>';
}
if ('negative' in data['errors']) {
var url_base = "{% url 'kfet.account.update' LIQ}";
url_base = base_url(0, url_base.length-8);
for (var i=0; i<data['errors']['negative'].length; i++) {
content += '<a class="btn btn-primary" href="'+url_base+data['errors']['negative'][i]+'/edit" target="_blank">Autorisation de négatif requise pour '+data['errors']['negative'][i]+'</a>';
}
}
return content;
}
function cancelOperations(opes_array, password = '') {
var data = { 'operations' : opes_array }
$.ajax({
dataType: "json",
url : "{% url 'kfet.kpsul.cancel_operations' %}",
method : "POST",
data : data,
beforeSend: function ($xhr) {
$xhr.setRequestHeader("X-CSRFToken", csrftoken);
if (password != '')
$xhr.setRequestHeader("KFetPassword", password);
},
})
.done(function(data) {
khistory.$container.find('.ui-selected').removeClass('ui-selected');
})
.fail(function($xhr) {
var data = $xhr.responseJSON;
switch ($xhr.status) {
case 403:
requestAuth(data, function(password) {
cancelOperations(opes_array, password);
});
break;
case 400:
displayErrors(getErrorsHtml(data));
break;
}
});
}
getHistory(); getHistory();
}); });
</script> </script>

View file

@ -189,7 +189,7 @@ $(document).ready(function() {
// ----- // -----
// Lock to avoid multiple requests // Lock to avoid multiple requests
lock = 0; window.lock = 0;
// Retrieve settings // Retrieve settings
@ -479,9 +479,9 @@ $(document).ready(function() {
var operations = $('#operation_formset'); var operations = $('#operation_formset');
function performOperations(password = '') { function performOperations(password = '') {
if (lock == 1) if (window.lock == 1)
return false; return false;
lock = 1; window.lock = 1;
var data = operationGroup.serialize() + '&' + operations.serialize(); var data = operationGroup.serialize() + '&' + operations.serialize();
$.ajax({ $.ajax({
dataType: "json", dataType: "json",
@ -497,7 +497,7 @@ $(document).ready(function() {
.done(function(data) { .done(function(data) {
updatePreviousOp(); updatePreviousOp();
coolReset(); coolReset();
lock = 0; window.lock = 0;
}) })
.fail(function($xhr) { .fail(function($xhr) {
var data = $xhr.responseJSON; var data = $xhr.responseJSON;
@ -513,7 +513,7 @@ $(document).ready(function() {
} }
break; break;
} }
lock = 0; window.lock = 0;
}); });
} }
@ -522,55 +522,6 @@ $(document).ready(function() {
performOperations(); performOperations();
}); });
// -----
// Cancel operations
// -----
var cancelButton = $('#cancel_operations');
var cancelForm = $('#cancel_form');
function cancelOperations(opes_array, password = '') {
if (lock == 1)
return false
lock = 1;
var data = { 'operations' : opes_array }
$.ajax({
dataType: "json",
url : "{% url 'kfet.kpsul.cancel_operations' %}",
method : "POST",
data : data,
beforeSend: function ($xhr) {
$xhr.setRequestHeader("X-CSRFToken", csrftoken);
if (password != '')
$xhr.setRequestHeader("KFetPassword", password);
},
})
.done(function(data) {
coolReset();
lock = 0;
})
.fail(function($xhr) {
var data = $xhr.responseJSON;
switch ($xhr.status) {
case 403:
requestAuth(data, function(password) {
cancelOperations(opes_array, password);
}, triInput);
break;
case 400:
displayErrors(getErrorsHtml(data));
break;
}
lock = 0;
});
}
// Event listeners
cancelButton.on('click', function() {
cancelOperations();
});
// ----- // -----
// Articles data // Articles data
// ----- // -----
@ -1189,24 +1140,12 @@ $(document).ready(function() {
// History // History
// ----- // -----
khistory = new KHistory(); khistory = new KHistory({
fetch_options: {
function getHistory() {
var data = {
from: moment().subtract(1, 'days').format('YYYY-MM-DD HH:mm:ss'), from: moment().subtract(1, 'days').format('YYYY-MM-DD HH:mm:ss'),
}; opesonly: true,
$.ajax({ },
dataType: "json", });
url : "{% url 'kfet.history.json' %}",
method : "POST",
data : data,
})
.done(function(data) {
for (var i=0; i<data['opegroups'].length; i++) {
khistory.addOpeGroup(data['opegroups'][i]);
}
});
}
var previousop_container = $('#previous_op'); var previousop_container = $('#previous_op');
@ -1302,29 +1241,10 @@ $(document).ready(function() {
// Cancel from history // Cancel from history
// ----- // -----
khistory.$container.selectable({
filter: 'div.opegroup, div.ope',
selected: function(e, ui) {
$(ui.selected).each(function() {
if ($(this).hasClass('opegroup')) {
var opegroup = $(this).data('opegroup');
$(this).siblings('.ope').filter(function() {
return $(this).data('opegroup') == opegroup
}).addClass('ui-selected');
}
});
},
});
$(document).on('keydown', function (e) { $(document).on('keydown', function (e) {
if (e.keyCode == 46) { if (e.keyCode == 46) {
// DEL (Suppr) // DEL (Suppr)
var opes_to_cancel = []; khistory.cancel_selected()
khistory.$container.find('.ope.ui-selected').each(function () {
opes_to_cancel.push($(this).data('ope'));
});
if (opes_to_cancel.length > 0)
cancelOperations(opes_to_cancel);
} }
}); });
@ -1333,16 +1253,9 @@ $(document).ready(function() {
// ----- // -----
OperationWebSocket.add_handler(function(data) { OperationWebSocket.add_handler(function(data) {
for (var i=0; i<data['opegroups'].length; i++) { for (var i=0; i<data['groups'].length; i++) {
if (data['opegroups'][i]['add']) { if (data['groups'][i]['add']) {
khistory.addOpeGroup(data['opegroups'][i]); khistory.add_history_group(data['groups'][i]);
} else if (data['opegroups'][i]['cancellation']) {
khistory.cancelOpeGroup(data['opegroups'][i]);
}
}
for (var i=0; i<data['opes'].length; i++) {
if (data['opes'][i]['cancellation']) {
khistory.cancelOpe(data['opes'][i]);
} }
} }
for (var i=0; i<data['checkouts'].length; i++) { for (var i=0; i<data['checkouts'].length; i++) {
@ -1396,7 +1309,7 @@ $(document).ready(function() {
khistory.reset(); khistory.reset();
resetSettings().done(function (){ resetSettings().done(function (){
getArticles(); getArticles();
getHistory(); khistory.fetch();
displayAddcost(); displayAddcost();
}); });
} }

View file

@ -1,9 +1,16 @@
{% extends 'kfet/base_col_2.html' %} {% extends 'kfet/base_col_2.html' %}
{% load staticfiles %} {% load staticfiles %}
{% load l10n staticfiles widget_tweaks %}
{% block title %}Transferts{% endblock %} {% block title %}Transferts{% endblock %}
{% block header-title %}Transferts{% endblock %} {% block header-title %}Transferts{% endblock %}
{% block extra_head %}
<script type="text/javascript" src="{% url 'js_reverse' %}" ></script>
<script type="text/javascript" src="{% static 'kfet/js/history.js' %}"></script>
<script type="text/javascript" src="{% static 'kfet/vendor/moment/moment-timezone-with-data-2012-2022.min.js' %}"></script>
{% endblock %}
{% block fixed %} {% block fixed %}
<div class="buttons"> <div class="buttons">
@ -16,109 +23,31 @@
{% block main %} {% block main %}
<div id="history"> <table id="history" class="table">
{% for transfergroup in transfergroups %} </table>
<div class="opegroup transfergroup" data-transfergroup="{{ transfergroup.pk }}">
<span>{{ transfergroup.at }}</span>
<span>{{ transfergroup.valid_by.trigramme }}</span>
<span>{{ transfergroup.comment }}</span>
</div>
{% for transfer in transfergroup.transfers.all %}
<div class="ope transfer{% if transfer.canceled_at %} canceled{% endif %}" data-transfer="{{ transfer.pk }}" data-transfergroup="{{ transfergroup.pk }}">
<span class="amount">{{ transfer.amount }} €</span>
<span class="from_acc">{{ transfer.from_acc.trigramme }}</span>
<span class="glyphicon glyphicon-arrow-right"></span>
<span class="to_acc">{{ transfer.to_acc.trigramme }}</span>
</div>
{% endfor %}
{% endfor %}
</div>
<script type="text/javascript"> <script type="text/javascript">
$(document).ready(function() { $(document).ready(function() {
lock = 0; window.lock = 0;
settings = { 'subvention_cof': parseFloat({{ kfet_config.subvention_cof|unlocalize }})}
function displayErrors(html) {
$.alert({
title: 'Erreurs',
content: html,
backgroundDismiss: true,
animation: 'top',
closeAnimation: 'bottom',
keyboardEnabled: true,
});
}
function cancelTransfers(transfers_array, password = '') { var khistory = new KHistory({
if (lock == 1) fetch_options:{
return false transfersonly: true,
lock = 1; }
var data = { 'transfers' : transfers_array }
$.ajax({
dataType: "json",
url : "{% url 'kfet.transfers.cancel' %}",
method : "POST",
data : data,
beforeSend: function ($xhr) {
$xhr.setRequestHeader("X-CSRFToken", csrftoken);
if (password != '')
$xhr.setRequestHeader("KFetPassword", password);
},
})
.done(function(data) {
for (var i=0; i<data['canceled'].length; i++) {
$('#history').find('.transfer[data-transfer='+data['canceled'][i]+']')
.addClass('canceled');
}
$('#history').find('.ui-selected').removeClass('ui-selected');
lock = 0;
})
.fail(function($xhr) {
var data = $xhr.responseJSON;
switch ($xhr.status) {
case 403:
requestAuth(data, function(password) {
cancelTransfers(transfers_array, password);
});
break;
case 400:
displayErrors(getErrorsHtml(data));
break;
}
lock = 0;
});
}
$('#history').selectable({
filter: 'div.transfergroup, div.transfer',
selected: function(e, ui) {
$(ui.selected).each(function() {
if ($(this).hasClass('transfergroup')) {
var transfergroup = $(this).attr('data-transfergroup');
$(this).siblings('.ope').filter(function() {
return $(this).attr('data-transfergroup') == transfergroup
}).addClass('ui-selected');
}
});
},
}); });
$(document).on('keydown', function (e) { $(document).on('keydown', function (e) {
if (e.keyCode == 46) { if (e.keyCode == 46) {
// DEL (Suppr) // DEL (Suppr)
var transfers_to_cancel = []; khistory.cancel_selected()
$('#history').find('.transfer.ui-selected').each(function () {
transfers_to_cancel.push($(this).attr('data-transfer'));
});
if (transfers_to_cancel.length > 0)
cancelTransfers(transfers_to_cancel);
} }
}); });
khistory.fetch()
}); });
</script> </script>

View file

@ -3,7 +3,7 @@ from datetime import datetime, timedelta
from decimal import Decimal from decimal import Decimal
from unittest import mock from unittest import mock
from django.contrib.auth.models import Group from django.contrib.auth.models import Group, User
from django.test import Client, TestCase from django.test import Client, TestCase
from django.urls import reverse from django.urls import reverse
from django.utils import timezone from django.utils import timezone
@ -628,37 +628,51 @@ class AccountStatOperationListViewTests(ViewTestCaseMixin, TestCase):
expected_stats = [ expected_stats = [
{ {
"label": "Derniers mois", "label": "Tout le temps",
"url": { "url": {
"path": base_url, "path": base_url,
"query": { "query": {
"scale_n_steps": ["7"], "types": ["['purchase']"],
"scale_name": ["month"], "scale_name": ["month"],
"scale_last": ["True"],
"scale_begin": [
self.accounts["user1"].created_at.isoformat(" ")
],
},
},
},
{
"label": "1 an",
"url": {
"path": base_url,
"query": {
"types": ["['purchase']"], "types": ["['purchase']"],
"scale_n_steps": ["12"],
"scale_name": ["month"],
"scale_last": ["True"], "scale_last": ["True"],
}, },
}, },
}, },
{ {
"label": "Dernières semaines", "label": "3 mois",
"url": { "url": {
"path": base_url, "path": base_url,
"query": { "query": {
"scale_n_steps": ["7"], "types": ["['purchase']"],
"scale_n_steps": ["13"],
"scale_name": ["week"], "scale_name": ["week"],
"types": ["['purchase']"],
"scale_last": ["True"], "scale_last": ["True"],
}, },
}, },
}, },
{ {
"label": "Derniers jours", "label": "2 semaines",
"url": { "url": {
"path": base_url, "path": base_url,
"query": { "query": {
"scale_n_steps": ["7"],
"scale_name": ["day"],
"types": ["['purchase']"], "types": ["['purchase']"],
"scale_n_steps": ["14"],
"scale_name": ["day"],
"scale_last": ["True"], "scale_last": ["True"],
}, },
}, },
@ -1524,6 +1538,21 @@ class ArticleStatSalesListViewTests(ViewTestCaseMixin, TestCase):
self.article = Article.objects.create( self.article = Article.objects.create(
name="Article", category=ArticleCategory.objects.create(name="Category") name="Article", category=ArticleCategory.objects.create(name="Category")
) )
checkout = Checkout.objects.create(
name="Checkout",
created_by=self.accounts["team"],
balance=5,
valid_from=self.now,
valid_to=self.now + timedelta(days=5),
)
self.opegroup = create_operation_group(
on_acc=self.accounts["user"],
checkout=checkout,
content=[
{"type": Operation.PURCHASE, "article": self.article, "article_nb": 2},
],
)
def test_ok(self): def test_ok(self):
r = self.client.get(self.url) r = self.client.get(self.url)
@ -1535,33 +1564,44 @@ class ArticleStatSalesListViewTests(ViewTestCaseMixin, TestCase):
expected_stats = [ expected_stats = [
{ {
"label": "Derniers mois", "label": "Tout le temps",
"url": { "url": {
"path": base_url, "path": base_url,
"query": { "query": {
"scale_n_steps": ["7"], "scale_name": ["month"],
"scale_last": ["True"],
"scale_begin": [self.opegroup.at.isoformat(" ")],
},
},
},
{
"label": "1 an",
"url": {
"path": base_url,
"query": {
"scale_n_steps": ["12"],
"scale_name": ["month"], "scale_name": ["month"],
"scale_last": ["True"], "scale_last": ["True"],
}, },
}, },
}, },
{ {
"label": "Dernières semaines", "label": "3 mois",
"url": { "url": {
"path": base_url, "path": base_url,
"query": { "query": {
"scale_n_steps": ["7"], "scale_n_steps": ["13"],
"scale_name": ["week"], "scale_name": ["week"],
"scale_last": ["True"], "scale_last": ["True"],
}, },
}, },
}, },
{ {
"label": "Derniers jours", "label": "2 semaines",
"url": { "url": {
"path": base_url, "path": base_url,
"query": { "query": {
"scale_n_steps": ["7"], "scale_n_steps": ["14"],
"scale_name": ["day"], "scale_name": ["day"],
"scale_last": ["True"], "scale_last": ["True"],
}, },
@ -1997,9 +2037,10 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_once_with( self.kpsul_consumer_mock.group_send.assert_called_once_with(
"kfet.kpsul", "kfet.kpsul",
{ {
"opegroups": [ "groups": [
{ {
"add": True, "add": True,
"type": "operation",
"at": mock.ANY, "at": mock.ANY,
"amount": Decimal("-5.00"), "amount": Decimal("-5.00"),
"checkout__name": "Checkout", "checkout__name": "Checkout",
@ -2008,7 +2049,7 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
"is_cof": False, "is_cof": False,
"on_acc__trigramme": "000", "on_acc__trigramme": "000",
"valid_by__trigramme": None, "valid_by__trigramme": None,
"opes": [ "entries": [
{ {
"id": operation.pk, "id": operation.pk,
"addcost_amount": None, "addcost_amount": None,
@ -2269,9 +2310,10 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_once_with( self.kpsul_consumer_mock.group_send.assert_called_once_with(
"kfet.kpsul", "kfet.kpsul",
{ {
"opegroups": [ "groups": [
{ {
"add": True, "add": True,
"type": "operation",
"at": mock.ANY, "at": mock.ANY,
"amount": Decimal("10.75"), "amount": Decimal("10.75"),
"checkout__name": "Checkout", "checkout__name": "Checkout",
@ -2280,7 +2322,7 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
"is_cof": False, "is_cof": False,
"on_acc__trigramme": "000", "on_acc__trigramme": "000",
"valid_by__trigramme": "100", "valid_by__trigramme": "100",
"opes": [ "entries": [
{ {
"id": operation.pk, "id": operation.pk,
"addcost_amount": None, "addcost_amount": None,
@ -2443,9 +2485,10 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_once_with( self.kpsul_consumer_mock.group_send.assert_called_once_with(
"kfet.kpsul", "kfet.kpsul",
{ {
"opegroups": [ "groups": [
{ {
"add": True, "add": True,
"type": "operation",
"at": mock.ANY, "at": mock.ANY,
"amount": Decimal("-10.75"), "amount": Decimal("-10.75"),
"checkout__name": "Checkout", "checkout__name": "Checkout",
@ -2454,7 +2497,7 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
"is_cof": False, "is_cof": False,
"on_acc__trigramme": "000", "on_acc__trigramme": "000",
"valid_by__trigramme": None, "valid_by__trigramme": None,
"opes": [ "entries": [
{ {
"id": operation.pk, "id": operation.pk,
"addcost_amount": None, "addcost_amount": None,
@ -2601,9 +2644,10 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_once_with( self.kpsul_consumer_mock.group_send.assert_called_once_with(
"kfet.kpsul", "kfet.kpsul",
{ {
"opegroups": [ "groups": [
{ {
"add": True, "add": True,
"type": "operation",
"at": mock.ANY, "at": mock.ANY,
"amount": Decimal("10.75"), "amount": Decimal("10.75"),
"checkout__name": "Checkout", "checkout__name": "Checkout",
@ -2612,7 +2656,7 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
"is_cof": False, "is_cof": False,
"on_acc__trigramme": "000", "on_acc__trigramme": "000",
"valid_by__trigramme": "100", "valid_by__trigramme": "100",
"opes": [ "entries": [
{ {
"id": operation.pk, "id": operation.pk,
"addcost_amount": None, "addcost_amount": None,
@ -2712,9 +2756,9 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.checkout.refresh_from_db() self.checkout.refresh_from_db()
self.assertEqual(self.checkout.balance, Decimal("100.00")) self.assertEqual(self.checkout.balance, Decimal("100.00"))
ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["opegroups"][ ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["groups"][0][
0 "entries"
]["opes"][0] ][0]
self.assertEqual(ws_data_ope["addcost_amount"], Decimal("1.00")) self.assertEqual(ws_data_ope["addcost_amount"], Decimal("1.00"))
self.assertEqual(ws_data_ope["addcost_for__trigramme"], "ADD") self.assertEqual(ws_data_ope["addcost_for__trigramme"], "ADD")
@ -2752,9 +2796,9 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.checkout.refresh_from_db() self.checkout.refresh_from_db()
self.assertEqual(self.checkout.balance, Decimal("100.00")) self.assertEqual(self.checkout.balance, Decimal("100.00"))
ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["opegroups"][ ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["groups"][0][
0 "entries"
]["opes"][0] ][0]
self.assertEqual(ws_data_ope["addcost_amount"], Decimal("0.80")) self.assertEqual(ws_data_ope["addcost_amount"], Decimal("0.80"))
self.assertEqual(ws_data_ope["addcost_for__trigramme"], "ADD") self.assertEqual(ws_data_ope["addcost_for__trigramme"], "ADD")
@ -2790,9 +2834,9 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.checkout.refresh_from_db() self.checkout.refresh_from_db()
self.assertEqual(self.checkout.balance, Decimal("106.00")) self.assertEqual(self.checkout.balance, Decimal("106.00"))
ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["opegroups"][ ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["groups"][0][
0 "entries"
]["opes"][0] ][0]
self.assertEqual(ws_data_ope["addcost_amount"], Decimal("1.00")) self.assertEqual(ws_data_ope["addcost_amount"], Decimal("1.00"))
self.assertEqual(ws_data_ope["addcost_for__trigramme"], "ADD") self.assertEqual(ws_data_ope["addcost_for__trigramme"], "ADD")
@ -2826,9 +2870,9 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.accounts["addcost"].refresh_from_db() self.accounts["addcost"].refresh_from_db()
self.assertEqual(self.accounts["addcost"].balance, Decimal("15.00")) self.assertEqual(self.accounts["addcost"].balance, Decimal("15.00"))
ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["opegroups"][ ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["groups"][0][
0 "entries"
]["opes"][0] ][0]
self.assertEqual(ws_data_ope["addcost_amount"], None) self.assertEqual(ws_data_ope["addcost_amount"], None)
self.assertEqual(ws_data_ope["addcost_for__trigramme"], None) self.assertEqual(ws_data_ope["addcost_for__trigramme"], None)
@ -2861,9 +2905,9 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.accounts["addcost"].refresh_from_db() self.accounts["addcost"].refresh_from_db()
self.assertEqual(self.accounts["addcost"].balance, Decimal("0.00")) self.assertEqual(self.accounts["addcost"].balance, Decimal("0.00"))
ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["opegroups"][ ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["groups"][0][
0 "entries"
]["opes"][0] ][0]
self.assertEqual(ws_data_ope["addcost_amount"], None) self.assertEqual(ws_data_ope["addcost_amount"], None)
self.assertEqual(ws_data_ope["addcost_for__trigramme"], None) self.assertEqual(ws_data_ope["addcost_for__trigramme"], None)
@ -3170,9 +3214,10 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_once_with( self.kpsul_consumer_mock.group_send.assert_called_once_with(
"kfet.kpsul", "kfet.kpsul",
{ {
"opegroups": [ "groups": [
{ {
"add": True, "add": True,
"type": "operation",
"at": mock.ANY, "at": mock.ANY,
"amount": Decimal("-9.00"), "amount": Decimal("-9.00"),
"checkout__name": "Checkout", "checkout__name": "Checkout",
@ -3181,7 +3226,7 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
"is_cof": False, "is_cof": False,
"on_acc__trigramme": "000", "on_acc__trigramme": "000",
"valid_by__trigramme": None, "valid_by__trigramme": None,
"opes": [ "entries": [
{ {
"id": operation_list[0].pk, "id": operation_list[0].pk,
"addcost_amount": None, "addcost_amount": None,
@ -3234,7 +3279,7 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
""" """
url_name = "kfet.kpsul.cancel_operations" url_name = "kfet.operations.cancel"
url_expected = "/k-fet/k-psul/cancel_operations" url_expected = "/k-fet/k-psul/cancel_operations"
http_methods = ["POST"] http_methods = ["POST"]
@ -3353,7 +3398,26 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
) )
self.assertDictEqual( self.assertDictEqual(
json_data, {"canceled": [operation.pk], "errors": {}, "warnings": {}} json_data,
{
"canceled": [
{
"id": operation.id,
# l'encodage des dates en JSON est relou...
"canceled_at": mock.ANY,
"canceled_by__trigramme": None,
}
],
"errors": {},
"warnings": {},
"opegroups_to_update": [
{
"id": group.pk,
"amount": str(group.amount),
"is_cof": group.is_cof,
}
],
},
) )
self.account.refresh_from_db() self.account.refresh_from_db()
@ -3365,26 +3429,7 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_with( self.kpsul_consumer_mock.group_send.assert_called_with(
"kfet.kpsul", "kfet.kpsul",
{ {"checkouts": [], "articles": [{"id": self.article.pk, "stock": 22}]},
"opegroups": [
{
"cancellation": True,
"id": group.pk,
"amount": Decimal("0.00"),
"is_cof": False,
}
],
"opes": [
{
"cancellation": True,
"id": operation.pk,
"canceled_by__trigramme": None,
"canceled_at": self.now + timedelta(seconds=15),
}
],
"checkouts": [],
"articles": [{"id": self.article.pk, "stock": 22}],
},
) )
def test_purchase_with_addcost(self): def test_purchase_with_addcost(self):
@ -3541,7 +3586,26 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
) )
self.assertDictEqual( self.assertDictEqual(
json_data, {"canceled": [operation.pk], "errors": {}, "warnings": {}} json_data,
{
"canceled": [
{
"id": operation.id,
# l'encodage des dates en JSON est relou...
"canceled_at": mock.ANY,
"canceled_by__trigramme": None,
}
],
"errors": {},
"warnings": {},
"opegroups_to_update": [
{
"id": group.pk,
"amount": str(group.amount),
"is_cof": group.is_cof,
}
],
},
) )
self.account.refresh_from_db() self.account.refresh_from_db()
@ -3554,22 +3618,6 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_with( self.kpsul_consumer_mock.group_send.assert_called_with(
"kfet.kpsul", "kfet.kpsul",
{ {
"opegroups": [
{
"cancellation": True,
"id": group.pk,
"amount": Decimal("0.00"),
"is_cof": False,
}
],
"opes": [
{
"cancellation": True,
"id": operation.pk,
"canceled_by__trigramme": None,
"canceled_at": self.now + timedelta(seconds=15),
}
],
"checkouts": [{"id": self.checkout.pk, "balance": Decimal("89.25")}], "checkouts": [{"id": self.checkout.pk, "balance": Decimal("89.25")}],
"articles": [], "articles": [],
}, },
@ -3625,7 +3673,26 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
) )
self.assertDictEqual( self.assertDictEqual(
json_data, {"canceled": [operation.pk], "errors": {}, "warnings": {}} json_data,
{
"canceled": [
{
"id": operation.id,
# l'encodage des dates en JSON est relou...
"canceled_at": mock.ANY,
"canceled_by__trigramme": None,
}
],
"errors": {},
"warnings": {},
"opegroups_to_update": [
{
"id": group.pk,
"amount": str(group.amount),
"is_cof": group.is_cof,
}
],
},
) )
self.account.refresh_from_db() self.account.refresh_from_db()
@ -3638,22 +3705,6 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_with( self.kpsul_consumer_mock.group_send.assert_called_with(
"kfet.kpsul", "kfet.kpsul",
{ {
"opegroups": [
{
"cancellation": True,
"id": group.pk,
"amount": Decimal("0.00"),
"is_cof": False,
}
],
"opes": [
{
"cancellation": True,
"id": operation.pk,
"canceled_by__trigramme": None,
"canceled_at": self.now + timedelta(seconds=15),
}
],
"checkouts": [{"id": self.checkout.pk, "balance": Decimal("110.75")}], "checkouts": [{"id": self.checkout.pk, "balance": Decimal("110.75")}],
"articles": [], "articles": [],
}, },
@ -3709,7 +3760,26 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
) )
self.assertDictEqual( self.assertDictEqual(
json_data, {"canceled": [operation.pk], "errors": {}, "warnings": {}} json_data,
{
"canceled": [
{
"id": operation.id,
# l'encodage des dates en JSON est relou...
"canceled_at": mock.ANY,
"canceled_by__trigramme": None,
}
],
"errors": {},
"warnings": {},
"opegroups_to_update": [
{
"id": group.pk,
"amount": str(group.amount),
"is_cof": group.is_cof,
}
],
},
) )
self.account.refresh_from_db() self.account.refresh_from_db()
@ -3720,27 +3790,7 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
self.assertEqual(self.checkout.balance, Decimal("100.00")) self.assertEqual(self.checkout.balance, Decimal("100.00"))
self.kpsul_consumer_mock.group_send.assert_called_with( self.kpsul_consumer_mock.group_send.assert_called_with(
"kfet.kpsul", "kfet.kpsul", {"checkouts": [], "articles": []},
{
"opegroups": [
{
"cancellation": True,
"id": group.pk,
"amount": Decimal("0.00"),
"is_cof": False,
}
],
"opes": [
{
"cancellation": True,
"id": operation.pk,
"canceled_by__trigramme": None,
"canceled_at": self.now + timedelta(seconds=15),
}
],
"checkouts": [],
"articles": [],
},
) )
@mock.patch("django.utils.timezone.now") @mock.patch("django.utils.timezone.now")
@ -3961,13 +4011,33 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
group.refresh_from_db() group.refresh_from_db()
self.assertEqual(group.amount, Decimal("10.75")) self.assertEqual(group.amount, Decimal("10.75"))
self.assertEqual(group.opes.exclude(canceled_at=None).count(), 3) self.assertEqual(group.opes.exclude(canceled_at=None).count(), 3)
self.maxDiff = None
self.assertDictEqual( self.assertDictEqual(
json_data, json_data,
{ {
"canceled": [operation1.pk, operation2.pk], "canceled": [
"warnings": {"already_canceled": [operation3.pk]}, {
"id": operation1.id,
# l'encodage des dates en JSON est relou...
"canceled_at": mock.ANY,
"canceled_by__trigramme": None,
},
{
"id": operation2.id,
# l'encodage des dates en JSON est relou...
"canceled_at": mock.ANY,
"canceled_by__trigramme": None,
},
],
"errors": {}, "errors": {},
"warnings": {"already_canceled": [operation3.pk]},
"opegroups_to_update": [
{
"id": group.pk,
"amount": str(group.amount),
"is_cof": group.is_cof,
}
],
}, },
) )
@ -4121,12 +4191,18 @@ class HistoryJSONViewTests(ViewTestCaseMixin, TestCase):
url_expected = "/k-fet/history.json" url_expected = "/k-fet/history.json"
auth_user = "user" auth_user = "user"
auth_forbidden = [None] auth_forbidden = [None, "noaccount"]
def test_ok(self): def test_ok(self):
r = self.client.post(self.url) r = self.client.post(self.url)
self.assertEqual(r.status_code, 200) self.assertEqual(r.status_code, 200)
def get_users_extra(self):
noaccount = User.objects.create(username="noaccount")
noaccount.set_password("noaccount")
noaccount.save()
return {"noaccount": noaccount}
class AccountReadJSONViewTests(ViewTestCaseMixin, TestCase): class AccountReadJSONViewTests(ViewTestCaseMixin, TestCase):
url_name = "kfet.account.read.json" url_name = "kfet.account.read.json"

View file

@ -219,8 +219,8 @@ urlpatterns = [
), ),
path( path(
"k-psul/cancel_operations", "k-psul/cancel_operations",
views.kpsul_cancel_operations, views.cancel_operations,
name="kfet.kpsul.cancel_operations", name="kfet.operations.cancel",
), ),
path( path(
"k-psul/articles_data", "k-psul/articles_data",
@ -252,7 +252,7 @@ urlpatterns = [
# ----- # -----
# Transfers urls # Transfers urls
# ----- # -----
path("transfers/", views.transfers, name="kfet.transfers"), path("transfers/", views.TransferView.as_view(), name="kfet.transfers"),
path("transfers/new", views.transfers_create, name="kfet.transfers.create"), path("transfers/new", views.transfers_create, name="kfet.transfers.create"),
path("transfers/perform", views.perform_transfers, name="kfet.transfers.perform"), path("transfers/perform", views.perform_transfers, name="kfet.transfers.perform"),
path("transfers/cancel", views.cancel_transfers, name="kfet.transfers.cancel"), path("transfers/cancel", views.cancel_transfers, name="kfet.transfers.cancel"),

View file

@ -2,6 +2,7 @@ import ast
import heapq import heapq
import statistics import statistics
from collections import defaultdict from collections import defaultdict
from datetime import timedelta
from decimal import Decimal from decimal import Decimal
from typing import List from typing import List
from urllib.parse import urlencode from urllib.parse import urlencode
@ -12,7 +13,7 @@ from django.contrib.auth.mixins import PermissionRequiredMixin
from django.contrib.auth.models import Permission, User from django.contrib.auth.models import Permission, User
from django.contrib.messages.views import SuccessMessageMixin from django.contrib.messages.views import SuccessMessageMixin
from django.db import transaction from django.db import transaction
from django.db.models import Count, F, Prefetch, Sum from django.db.models import Count, F, Prefetch, Q, Sum
from django.forms import formset_factory from django.forms import formset_factory
from django.http import Http404, JsonResponse from django.http import Http404, JsonResponse
from django.shortcuts import get_object_or_404, redirect, render from django.shortcuts import get_object_or_404, redirect, render
@ -76,7 +77,7 @@ from kfet.models import (
Transfer, Transfer,
TransferGroup, TransferGroup,
) )
from kfet.statistic import ScaleMixin, WeekScale, last_stats_manifest from kfet.statistic import DayScale, MonthScale, ScaleMixin, WeekScale, scale_url_params
from .auth import KFET_GENERIC_TRIGRAMME from .auth import KFET_GENERIC_TRIGRAMME
from .auth.views import ( # noqa from .auth.views import ( # noqa
@ -328,7 +329,9 @@ def account_update(request, trigramme):
account = get_object_or_404(Account, trigramme=trigramme) account = get_object_or_404(Account, trigramme=trigramme)
# Checking permissions # Checking permissions
if not request.user.has_perm("kfet.is_team") and request.user != account.user: if not account.editable or (
not request.user.has_perm("kfet.is_team") and request.user != account.user
):
raise Http404 raise Http404
user_info_form = UserInfoForm(instance=account.user) user_info_form = UserInfoForm(instance=account.user)
@ -911,6 +914,8 @@ def kpsul_get_settings(request):
@teamkfet_required @teamkfet_required
def account_read_json(request, trigramme): def account_read_json(request, trigramme):
account = get_object_or_404(Account, trigramme=trigramme) account = get_object_or_404(Account, trigramme=trigramme)
if not account.readable:
raise Http404
data = { data = {
"id": account.pk, "id": account.pk,
"name": account.name, "name": account.name,
@ -1156,9 +1161,10 @@ def kpsul_perform_operations(request):
# Websocket data # Websocket data
websocket_data = {} websocket_data = {}
websocket_data["opegroups"] = [ websocket_data["groups"] = [
{ {
"add": True, "add": True,
"type": "operation",
"id": operationgroup.pk, "id": operationgroup.pk,
"amount": operationgroup.amount, "amount": operationgroup.amount,
"checkout__name": operationgroup.checkout.name, "checkout__name": operationgroup.checkout.name,
@ -1169,7 +1175,7 @@ def kpsul_perform_operations(request):
operationgroup.valid_by and operationgroup.valid_by.trigramme or None operationgroup.valid_by and operationgroup.valid_by.trigramme or None
), ),
"on_acc__trigramme": operationgroup.on_acc.trigramme, "on_acc__trigramme": operationgroup.on_acc.trigramme,
"opes": [], "entries": [],
} }
] ]
for operation in operations: for operation in operations:
@ -1187,7 +1193,7 @@ def kpsul_perform_operations(request):
"canceled_by__trigramme": None, "canceled_by__trigramme": None,
"canceled_at": None, "canceled_at": None,
} }
websocket_data["opegroups"][0]["opes"].append(ope_data) websocket_data["groups"][0]["entries"].append(ope_data)
# Need refresh from db cause we used update on queryset # Need refresh from db cause we used update on queryset
operationgroup.checkout.refresh_from_db() operationgroup.checkout.refresh_from_db()
websocket_data["checkouts"] = [ websocket_data["checkouts"] = [
@ -1207,7 +1213,7 @@ def kpsul_perform_operations(request):
@teamkfet_required @teamkfet_required
@kfet_password_auth @kfet_password_auth
def kpsul_cancel_operations(request): def cancel_operations(request):
# Pour la réponse # Pour la réponse
data = {"canceled": [], "warnings": {}, "errors": {}} data = {"canceled": [], "warnings": {}, "errors": {}}
@ -1363,7 +1369,11 @@ def kpsul_cancel_operations(request):
.filter(pk__in=opegroups_pk) .filter(pk__in=opegroups_pk)
.order_by("pk") .order_by("pk")
) )
opes = sorted(opes) opes = (
Operation.objects.values("id", "canceled_at", "canceled_by__trigramme")
.filter(pk__in=opes)
.order_by("pk")
)
checkouts_pk = [checkout.pk for checkout in to_checkouts_balances] checkouts_pk = [checkout.pk for checkout in to_checkouts_balances]
checkouts = ( checkouts = (
Checkout.objects.values("id", "balance") Checkout.objects.values("id", "balance")
@ -1374,27 +1384,7 @@ def kpsul_cancel_operations(request):
articles = Article.objects.values("id", "stock").filter(pk__in=articles_pk) articles = Article.objects.values("id", "stock").filter(pk__in=articles_pk)
# Websocket data # Websocket data
websocket_data = {"opegroups": [], "opes": [], "checkouts": [], "articles": []} websocket_data = {"checkouts": [], "articles": []}
for opegroup in opegroups:
websocket_data["opegroups"].append(
{
"cancellation": True,
"id": opegroup["id"],
"amount": opegroup["amount"],
"is_cof": opegroup["is_cof"],
}
)
canceled_by__trigramme = canceled_by and canceled_by.trigramme or None
for ope in opes:
websocket_data["opes"].append(
{
"cancellation": True,
"id": ope,
"canceled_by__trigramme": canceled_by__trigramme,
"canceled_at": canceled_at,
}
)
for checkout in checkouts: for checkout in checkouts:
websocket_data["checkouts"].append( websocket_data["checkouts"].append(
{"id": checkout["id"], "balance": checkout["balance"]} {"id": checkout["id"], "balance": checkout["balance"]}
@ -1405,7 +1395,8 @@ def kpsul_cancel_operations(request):
) )
consumers.KPsul.group_send("kfet.kpsul", websocket_data) consumers.KPsul.group_send("kfet.kpsul", websocket_data)
data["canceled"] = opes data["canceled"] = list(opes)
data["opegroups_to_update"] = list(opegroups)
if opes_already_canceled: if opes_already_canceled:
data["warnings"]["already_canceled"] = opes_already_canceled data["warnings"]["already_canceled"] = opes_already_canceled
return JsonResponse(data) return JsonResponse(data)
@ -1416,49 +1407,86 @@ def history_json(request):
# Récupération des paramètres # Récupération des paramètres
from_date = request.POST.get("from", None) from_date = request.POST.get("from", None)
to_date = request.POST.get("to", None) to_date = request.POST.get("to", None)
limit = request.POST.get("limit", None)
checkouts = request.POST.getlist("checkouts[]", None) checkouts = request.POST.getlist("checkouts[]", None)
accounts = request.POST.getlist("accounts[]", None) accounts = request.POST.getlist("accounts[]", None)
transfers_only = request.POST.get("transfersonly", False)
opes_only = request.POST.get("opesonly", False)
# Construction de la requête (sur les transferts) pour le prefetch
transfer_queryset_prefetch = Transfer.objects.select_related(
"from_acc", "to_acc", "canceled_by"
)
# Le check sur les comptes est dans le prefetch pour les transferts
if accounts:
transfer_queryset_prefetch = transfer_queryset_prefetch.filter(
Q(from_acc__in=accounts) | Q(to_acc__in=accounts)
)
if not request.user.has_perm("kfet.is_team"):
try:
acc = request.user.profile.account_kfet
transfer_queryset_prefetch = transfer_queryset_prefetch.filter(
Q(from_acc=acc) | Q(to_acc=acc)
)
except Account.DoesNotExist:
return JsonResponse({}, status=403)
transfer_prefetch = Prefetch(
"transfers", queryset=transfer_queryset_prefetch, to_attr="filtered_transfers"
)
# Construction de la requête (sur les opérations) pour le prefetch # Construction de la requête (sur les opérations) pour le prefetch
queryset_prefetch = Operation.objects.select_related( ope_queryset_prefetch = Operation.objects.select_related(
"article", "canceled_by", "addcost_for" "article", "canceled_by", "addcost_for"
) )
ope_prefetch = Prefetch("opes", queryset=ope_queryset_prefetch)
# Construction de la requête principale # Construction de la requête principale
opegroups = ( opegroups = (
OperationGroup.objects.prefetch_related( OperationGroup.objects.prefetch_related(ope_prefetch)
Prefetch("opes", queryset=queryset_prefetch)
)
.select_related("on_acc", "valid_by") .select_related("on_acc", "valid_by")
.order_by("at") .order_by("at")
) )
transfergroups = (
TransferGroup.objects.prefetch_related(transfer_prefetch)
.select_related("valid_by")
.order_by("at")
)
# Application des filtres # Application des filtres
if from_date: if from_date:
opegroups = opegroups.filter(at__gte=from_date) opegroups = opegroups.filter(at__gte=from_date)
transfergroups = transfergroups.filter(at__gte=from_date)
if to_date: if to_date:
opegroups = opegroups.filter(at__lt=to_date) opegroups = opegroups.filter(at__lt=to_date)
transfergroups = transfergroups.filter(at__lt=to_date)
if checkouts: if checkouts:
opegroups = opegroups.filter(checkout_id__in=checkouts) opegroups = opegroups.filter(checkout__in=checkouts)
transfergroups = TransferGroup.objects.none()
if transfers_only:
opegroups = OperationGroup.objects.none()
if opes_only:
transfergroups = TransferGroup.objects.none()
if accounts: if accounts:
opegroups = opegroups.filter(on_acc_id__in=accounts) opegroups = opegroups.filter(on_acc__in=accounts)
# Un non-membre de l'équipe n'a que accès à son historique # Un non-membre de l'équipe n'a que accès à son historique
if not request.user.has_perm("kfet.is_team"): if not request.user.has_perm("kfet.is_team"):
opegroups = opegroups.filter(on_acc=request.user.profile.account_kfet) opegroups = opegroups.filter(on_acc=request.user.profile.account_kfet)
if limit:
opegroups = opegroups[:limit]
# Construction de la réponse # Construction de la réponse
opegroups_list = [] history_groups = []
for opegroup in opegroups: for opegroup in opegroups:
opegroup_dict = { opegroup_dict = {
"type": "operation",
"id": opegroup.id, "id": opegroup.id,
"amount": opegroup.amount, "amount": opegroup.amount,
"at": opegroup.at, "at": opegroup.at,
"checkout_id": opegroup.checkout_id, "checkout_id": opegroup.checkout_id,
"is_cof": opegroup.is_cof, "is_cof": opegroup.is_cof,
"comment": opegroup.comment, "comment": opegroup.comment,
"opes": [], "entries": [],
"on_acc__trigramme": opegroup.on_acc and opegroup.on_acc.trigramme or None, "on_acc__trigramme": opegroup.on_acc and opegroup.on_acc.trigramme or None,
} }
if request.user.has_perm("kfet.is_team"): if request.user.has_perm("kfet.is_team"):
@ -1482,9 +1510,40 @@ def history_json(request):
ope_dict["canceled_by__trigramme"] = ( ope_dict["canceled_by__trigramme"] = (
ope.canceled_by and ope.canceled_by.trigramme or None ope.canceled_by and ope.canceled_by.trigramme or None
) )
opegroup_dict["opes"].append(ope_dict) opegroup_dict["entries"].append(ope_dict)
opegroups_list.append(opegroup_dict) history_groups.append(opegroup_dict)
return JsonResponse({"opegroups": opegroups_list}) for transfergroup in transfergroups:
if transfergroup.filtered_transfers:
transfergroup_dict = {
"type": "transfer",
"id": transfergroup.id,
"at": transfergroup.at,
"comment": transfergroup.comment,
"entries": [],
}
if request.user.has_perm("kfet.is_team"):
transfergroup_dict["valid_by__trigramme"] = (
transfergroup.valid_by and transfergroup.valid_by.trigramme or None
)
for transfer in transfergroup.filtered_transfers:
transfer_dict = {
"id": transfer.id,
"amount": transfer.amount,
"canceled_at": transfer.canceled_at,
"from_acc": transfer.from_acc.trigramme,
"to_acc": transfer.to_acc.trigramme,
}
if request.user.has_perm("kfet.is_team"):
transfer_dict["canceled_by__trigramme"] = (
transfer.canceled_by and transfer.canceled_by.trigramme or None
)
transfergroup_dict["entries"].append(transfer_dict)
history_groups.append(transfergroup_dict)
history_groups.sort(key=lambda group: group["at"])
return JsonResponse({"groups": history_groups})
@teamkfet_required @teamkfet_required
@ -1544,18 +1603,9 @@ config_update = permission_required("kfet.change_config")(SettingsUpdate.as_view
# ----- # -----
@teamkfet_required @method_decorator(teamkfet_required, name="dispatch")
def transfers(request): class TransferView(TemplateView):
transfers_pre = Prefetch( template_name = "kfet/transfers.html"
"transfers", queryset=(Transfer.objects.select_related("from_acc", "to_acc"))
)
transfergroups = (
TransferGroup.objects.select_related("valid_by")
.prefetch_related(transfers_pre)
.order_by("-at")
)
return render(request, "kfet/transfers.html", {"transfergroups": transfergroups})
@teamkfet_required @teamkfet_required
@ -1746,7 +1796,12 @@ def cancel_transfers(request):
elif hasattr(account, "negative") and not account.negative.balance_offset: elif hasattr(account, "negative") and not account.negative.balance_offset:
account.negative.delete() account.negative.delete()
data["canceled"] = transfers transfers = (
Transfer.objects.values("id", "canceled_at", "canceled_by__trigramme")
.filter(pk__in=transfers)
.order_by("pk")
)
data["canceled"] = list(transfers)
if transfers_already_canceled: if transfers_already_canceled:
data["warnings"]["already_canceled"] = transfers_already_canceled data["warnings"]["already_canceled"] = transfers_already_canceled
return JsonResponse(data) return JsonResponse(data)
@ -2145,7 +2200,7 @@ class SupplierUpdate(SuccessMessageMixin, UpdateView):
# Vues génériques # Vues génériques
# --------------- # ---------------
# source : docs.djangoproject.com/fr/1.10/topics/class-based-views/mixins/ # source : docs.djangoproject.com/fr/1.10/topics/class-based-views/mixins/
class JSONResponseMixin(object): class JSONResponseMixin:
""" """
A mixin that can be used to render a JSON response. A mixin that can be used to render a JSON response.
""" """
@ -2174,34 +2229,39 @@ class JSONDetailView(JSONResponseMixin, BaseDetailView):
return self.render_to_json_response(context) return self.render_to_json_response(context)
class PkUrlMixin(object):
def get_object(self, *args, **kwargs):
get_by = self.kwargs.get(self.pk_url_kwarg)
return get_object_or_404(self.model, **{self.pk_url_kwarg: get_by})
class SingleResumeStat(JSONDetailView): class SingleResumeStat(JSONDetailView):
"""Manifest for a kind of a stat about an object. """
Génère l'interface de sélection pour les statistiques d'un compte/article.
L'interface est constituée d'une série de boutons, qui récupèrent et graphent
des statistiques du même type, sur le même objet mais avec des arguments différents.
Returns JSON whose payload is an array containing descriptions of a stat: Attributs :
url to retrieve data, label, ... - url_stat : URL récupérer les statistiques
- stats : liste de dictionnaires avec les clés suivantes :
- label : texte du bouton
- url_params : paramètres GET à rajouter à `url_stat`
- default : si `True`, graphe à montrer par défaut
On peut aussi définir `stats` dynamiquement, via la fonction `get_stats`.
""" """
id_prefix = ""
nb_default = 0
stats = []
url_stat = None url_stat = None
stats = []
def get_stats(self):
return self.stats
def get_context_data(self, **kwargs): def get_context_data(self, **kwargs):
# On n'hérite pas # On n'hérite pas
object_id = self.object.id
context = {} context = {}
stats = [] stats = []
prefix = "{}_{}".format(self.id_prefix, object_id) # On peut avoir récupéré self.object via pk ou slug
for i, stat_def in enumerate(self.stats): if self.pk_url_kwarg in self.kwargs:
url_pk = getattr(self.object, self.pk_url_kwarg) url_pk = getattr(self.object, self.pk_url_kwarg)
else:
url_pk = getattr(self.object, self.slug_url_kwarg)
for stat_def in self.get_stats():
url_params_d = stat_def.get("url_params", {}) url_params_d = stat_def.get("url_params", {})
if len(url_params_d) > 0: if len(url_params_d) > 0:
url_params = "?{}".format(urlencode(url_params_d)) url_params = "?{}".format(urlencode(url_params_d))
@ -2210,42 +2270,21 @@ class SingleResumeStat(JSONDetailView):
stats.append( stats.append(
{ {
"label": stat_def["label"], "label": stat_def["label"],
"btn": "btn_{}_{}".format(prefix, i),
"url": "{url}{params}".format( "url": "{url}{params}".format(
url=reverse(self.url_stat, args=[url_pk]), params=url_params url=reverse(self.url_stat, args=[url_pk]), params=url_params
), ),
"default": stat_def.get("default", False),
} }
) )
context["id_prefix"] = prefix
context["content_id"] = "content_%s" % prefix
context["stats"] = stats context["stats"] = stats
context["default_stat"] = self.nb_default
context["object_id"] = object_id
return context return context
# ----------------------- class UserAccountMixin:
# Evolution Balance perso """
# ----------------------- Mixin qui vérifie que le compte traité par la vue est celui de l'utilisateur·ice
ID_PREFIX_ACC_BALANCE = "balance_acc" actuel·le. Dans le cas contraire, renvoie un Http404.
"""
class AccountStatBalanceList(PkUrlMixin, SingleResumeStat):
"""Manifest for balance stats of an account."""
model = Account
context_object_name = "account"
pk_url_kwarg = "trigramme"
url_stat = "kfet.account.stat.balance"
id_prefix = ID_PREFIX_ACC_BALANCE
stats = [
{"label": "Tout le temps"},
{"label": "1 an", "url_params": {"last_days": 365}},
{"label": "6 mois", "url_params": {"last_days": 183}},
{"label": "3 mois", "url_params": {"last_days": 90}},
{"label": "30 jours", "url_params": {"last_days": 30}},
]
nb_default = 0
def get_object(self, *args, **kwargs): def get_object(self, *args, **kwargs):
obj = super().get_object(*args, **kwargs) obj = super().get_object(*args, **kwargs)
@ -2253,21 +2292,41 @@ class AccountStatBalanceList(PkUrlMixin, SingleResumeStat):
raise Http404 raise Http404
return obj return obj
@method_decorator(login_required)
def dispatch(self, *args, **kwargs): # -----------------------
return super().dispatch(*args, **kwargs) # Evolution Balance perso
# -----------------------
class AccountStatBalance(PkUrlMixin, JSONDetailView): @method_decorator(login_required, name="dispatch")
"""Datasets of balance of an account. class AccountStatBalanceList(UserAccountMixin, SingleResumeStat):
"""
Operations and Transfers are taken into account. Menu général pour l'historique de balance d'un compte
""" """
model = Account model = Account
pk_url_kwarg = "trigramme" slug_url_kwarg = "trigramme"
context_object_name = "account" slug_field = "trigramme"
url_stat = "kfet.account.stat.balance"
stats = [
{"label": "Tout le temps"},
{"label": "1 an", "url_params": {"last_days": 365}},
{"label": "6 mois", "url_params": {"last_days": 183}},
{"label": "3 mois", "url_params": {"last_days": 90}, "default": True},
{"label": "30 jours", "url_params": {"last_days": 30}},
]
@method_decorator(login_required, name="dispatch")
class AccountStatBalance(UserAccountMixin, JSONDetailView):
"""
Statistiques (JSON) d'historique de balance d'un compte.
Prend en compte les opérations et transferts sur la période donnée.
"""
model = Account
slug_url_kwarg = "trigramme"
slug_field = "trigramme"
def get_changes_list(self, last_days=None, begin_date=None, end_date=None): def get_changes_list(self, last_days=None, begin_date=None, end_date=None):
account = self.object account = self.object
@ -2366,57 +2425,50 @@ class AccountStatBalance(PkUrlMixin, JSONDetailView):
# TODO: offset # TODO: offset
return context return context
def get_object(self, *args, **kwargs):
obj = super().get_object(*args, **kwargs)
if self.request.user != obj.user:
raise Http404
return obj
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
# ------------------------ # ------------------------
# Consommation personnelle # Consommation personnelle
# ------------------------ # ------------------------
ID_PREFIX_ACC_LAST = "last_acc"
ID_PREFIX_ACC_LAST_DAYS = "last_days_acc"
ID_PREFIX_ACC_LAST_WEEKS = "last_weeks_acc"
ID_PREFIX_ACC_LAST_MONTHS = "last_months_acc"
class AccountStatOperationList(PkUrlMixin, SingleResumeStat): @method_decorator(login_required, name="dispatch")
"""Manifest for operations stats of an account.""" class AccountStatOperationList(UserAccountMixin, SingleResumeStat):
"""
Menu général pour l'historique de consommation d'un compte
"""
model = Account model = Account
context_object_name = "account" slug_url_kwarg = "trigramme"
pk_url_kwarg = "trigramme" slug_field = "trigramme"
id_prefix = ID_PREFIX_ACC_LAST
nb_default = 2
stats = last_stats_manifest(types=[Operation.PURCHASE])
url_stat = "kfet.account.stat.operation" url_stat = "kfet.account.stat.operation"
def get_object(self, *args, **kwargs): def get_stats(self):
obj = super().get_object(*args, **kwargs) scales_def = [
if self.request.user != obj.user: (
raise Http404 "Tout le temps",
return obj MonthScale,
{"last": True, "begin": self.object.created_at},
False,
),
("1 an", MonthScale, {"last": True, "n_steps": 12}, False),
("3 mois", WeekScale, {"last": True, "n_steps": 13}, True),
("2 semaines", DayScale, {"last": True, "n_steps": 14}, False),
]
@method_decorator(login_required) return scale_url_params(scales_def, types=[Operation.PURCHASE])
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView): @method_decorator(login_required, name="dispatch")
"""Datasets of operations of an account.""" class AccountStatOperation(UserAccountMixin, ScaleMixin, JSONDetailView):
"""
Statistiques (JSON) de consommation (nb d'items achetés) d'un compte.
"""
model = Account model = Account
pk_url_kwarg = "trigramme" slug_url_kwarg = "trigramme"
context_object_name = "account" slug_field = "trigramme"
id_prefix = ""
def get_operations(self, scale, types=None): def get_operations(self, types=None):
# On selectionne les opérations qui correspondent # On selectionne les opérations qui correspondent
# à l'article en question et qui ne sont pas annulées # à l'article en question et qui ne sont pas annulées
# puis on choisi pour chaques intervalle les opérations # puis on choisi pour chaques intervalle les opérations
@ -2428,28 +2480,20 @@ class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView):
) )
if types is not None: if types is not None:
all_operations = all_operations.filter(type__in=types) all_operations = all_operations.filter(type__in=types)
chunks = scale.get_by_chunks( return all_operations
all_operations,
field_db="group__at",
field_callback=(lambda d: d["group__at"]),
)
return chunks
def get_context_data(self, *args, **kwargs): def get_context_data(self, *args, **kwargs):
old_ctx = super().get_context_data(*args, **kwargs) context = super().get_context_data(*args, **kwargs)
context = {"labels": old_ctx["labels"]}
scale = self.scale
types = self.request.GET.get("types", None) types = self.request.GET.get("types", None)
if types is not None: if types is not None:
types = ast.literal_eval(types) types = ast.literal_eval(types)
operations = self.get_operations(types=types, scale=scale) operations = self.get_operations(types=types)
# On compte les opérations # On compte les opérations
nb_ventes = [] nb_ventes = self.scale.chunkify_qs(
for chunk in operations: operations, field="group__at", aggregate=Sum("article_nb")
ventes = sum(ope["article_nb"] for ope in chunk) )
nb_ventes.append(ventes)
context["charts"] = [ context["charts"] = [
{ {
@ -2460,50 +2504,54 @@ class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView):
] ]
return context return context
def get_object(self, *args, **kwargs):
obj = super().get_object(*args, **kwargs)
if self.request.user != obj.user:
raise Http404
return obj
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
# ------------------------ # ------------------------
# Article Satistiques Last # Article Satistiques Last
# ------------------------ # ------------------------
ID_PREFIX_ART_LAST = "last_art"
ID_PREFIX_ART_LAST_DAYS = "last_days_art"
ID_PREFIX_ART_LAST_WEEKS = "last_weeks_art"
ID_PREFIX_ART_LAST_MONTHS = "last_months_art"
@method_decorator(teamkfet_required, name="dispatch")
class ArticleStatSalesList(SingleResumeStat): class ArticleStatSalesList(SingleResumeStat):
"""Manifest for sales stats of an article.""" """
Menu pour les statistiques de vente d'un article.
"""
model = Article model = Article
context_object_name = "article"
id_prefix = ID_PREFIX_ART_LAST
nb_default = 2 nb_default = 2
url_stat = "kfet.article.stat.sales" url_stat = "kfet.article.stat.sales"
stats = last_stats_manifest()
@method_decorator(teamkfet_required) def get_stats(self):
def dispatch(self, *args, **kwargs): first_conso = (
return super().dispatch(*args, **kwargs) Operation.objects.filter(article=self.object)
.order_by("group__at")
.values_list("group__at", flat=True)
.first()
)
if first_conso is None:
# On le crée dans le passé au cas où
first_conso = timezone.now() - timedelta(seconds=1)
scales_def = [
("Tout le temps", MonthScale, {"last": True, "begin": first_conso}, False),
("1 an", MonthScale, {"last": True, "n_steps": 12}, False),
("3 mois", WeekScale, {"last": True, "n_steps": 13}, True),
("2 semaines", DayScale, {"last": True, "n_steps": 14}, False),
]
return scale_url_params(scales_def)
@method_decorator(teamkfet_required, name="dispatch")
class ArticleStatSales(ScaleMixin, JSONDetailView): class ArticleStatSales(ScaleMixin, JSONDetailView):
"""Datasets of sales of an article.""" """
Statistiques (JSON) de vente d'un article.
Sépare LIQ et les comptes K-Fêt, et rajoute le total.
"""
model = Article model = Article
context_object_name = "article" context_object_name = "article"
def get_context_data(self, *args, **kwargs): def get_context_data(self, *args, **kwargs):
old_ctx = super().get_context_data(*args, **kwargs) context = super().get_context_data(*args, **kwargs)
context = {"labels": old_ctx["labels"]}
scale = self.scale scale = self.scale
all_purchases = ( all_purchases = (
@ -2516,23 +2564,13 @@ class ArticleStatSales(ScaleMixin, JSONDetailView):
liq_only = all_purchases.filter(group__on_acc__trigramme="LIQ") liq_only = all_purchases.filter(group__on_acc__trigramme="LIQ")
liq_exclude = all_purchases.exclude(group__on_acc__trigramme="LIQ") liq_exclude = all_purchases.exclude(group__on_acc__trigramme="LIQ")
chunks_liq = scale.get_by_chunks( nb_liq = scale.chunkify_qs(
liq_only, field_db="group__at", field_callback=lambda d: d["group__at"] liq_only, field="group__at", aggregate=Sum("article_nb")
) )
chunks_no_liq = scale.get_by_chunks( nb_accounts = scale.chunkify_qs(
liq_exclude, field_db="group__at", field_callback=lambda d: d["group__at"] liq_exclude, field="group__at", aggregate=Sum("article_nb")
) )
nb_ventes = [n1 + n2 for n1, n2 in zip(nb_liq, nb_accounts)]
# On compte les opérations
nb_ventes = []
nb_accounts = []
nb_liq = []
for chunk_liq, chunk_no_liq in zip(chunks_liq, chunks_no_liq):
sum_accounts = sum(ope["article_nb"] for ope in chunk_no_liq)
sum_liq = sum(ope["article_nb"] for ope in chunk_liq)
nb_ventes.append(sum_accounts + sum_liq)
nb_accounts.append(sum_accounts)
nb_liq.append(sum_liq)
context["charts"] = [ context["charts"] = [
{ {
@ -2548,7 +2586,3 @@ class ArticleStatSales(ScaleMixin, JSONDetailView):
}, },
] ]
return context return context
@method_decorator(teamkfet_required)
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)

View file

@ -9,7 +9,6 @@ source =
kfet kfet
petitscours petitscours
shared shared
utils
omit = omit =
*migrations* *migrations*
*test*.py *test*.py
@ -37,7 +36,7 @@ default_section = THIRDPARTY
force_grid_wrap = 0 force_grid_wrap = 0
include_trailing_comma = true include_trailing_comma = true
known_django = django known_django = django
known_first_party = bda,bds,clubs,cof,events,gestioncof,kfet,petitscours,shared,utils known_first_party = bda,bds,clubs,cof,events,gestioncof,kfet,petitscours,shared
line_length = 88 line_length = 88
multi_line_output = 3 multi_line_output = 3
not_skip = __init__.py not_skip = __init__.py

View file

@ -111,7 +111,7 @@ class TestCaseMixin:
mock_context_manager.return_value.__enter__.return_value = mock_connection mock_context_manager.return_value.__enter__.return_value = mock_connection
patcher = mock.patch( patcher = mock.patch(
"gestioncof.autocomplete.Connection", new=mock_context_manager "shared.views.autocomplete.Connection", new=mock_context_manager
) )
patcher.start() patcher.start()
self.addCleanup(patcher.stop) self.addCleanup(patcher.stop)

View file

@ -0,0 +1,184 @@
from collections import namedtuple
from dal import autocomplete
from django.conf import settings
from django.db.models import Q
if getattr(settings, "LDAP_SERVER_URL", None):
from ldap3 import Connection
else:
# shared.tests.testcases.TestCaseMixin.mockLDAP needs
# Connection to be defined
Connection = None
class SearchUnit:
"""Base class for all the search utilities.
A search unit should implement a `search` method taking a list of keywords as
argument and returning an iterable of search results.
"""
def search(self, _keywords):
raise NotImplementedError(
"Class implementing the SeachUnit interface should implement the search "
"method"
)
# ---
# Model-based search
# ---
class ModelSearch(SearchUnit):
"""Basic search engine for models based on filtering.
The class should be configured through its `model` class attribute: the `search`
method will return a queryset of instances of this model. The `search_fields`
attributes indicates which fields to search in.
Example:
>>> from django.contrib.auth.models import User
>>>
>>> class UserSearch(ModelSearch):
... model = User
... search_fields = ["username", "first_name", "last_name"]
>>>
>>> user_search = UserSearch() # has type ModelSearch[User]
>>> user_search.search(["toto", "foo"]) # returns a queryset of Users
"""
model = None
search_fields = []
def get_queryset_filter(self, keywords):
filter_q = Q()
if not keywords:
return filter_q
for keyword in keywords:
kw_filter = Q()
for field in self.search_fields:
kw_filter |= Q(**{"{}__icontains".format(field): keyword})
filter_q &= kw_filter
return filter_q
def search(self, keywords):
"""Returns the queryset of model instances matching all the keywords.
The semantic of the search is the following: a model instance appears in the
search results iff all of the keywords given as arguments occur in at least one
of the search fields.
"""
return self.model.objects.filter(self.get_queryset_filter(keywords))
class Select2QuerySetView(ModelSearch, autocomplete.Select2QuerySetView):
"""Compatibility layer between ModelSearch and Select2QuerySetView."""
def get_queryset(self):
keywords = self.q.split()
return super().search(keywords)
# ---
# LDAP search
# ---
Clipper = namedtuple("Clipper", ["clipper", "fullname"])
class LDAPSearch(SearchUnit):
ldap_server_url = getattr(settings, "LDAP_SERVER_URL", None)
domain_component = "dc=spi,dc=ens,dc=fr"
search_fields = ["cn", "uid"]
def get_ldap_query(self, keywords):
"""Return a search query with the following semantics:
A Clipper appears in the search results iff all of the keywords given as
arguments occur in at least one of the search fields.
"""
# Dumb but safe
keywords = filter(str.isalnum, keywords)
ldap_filters = []
for keyword in keywords:
ldap_filter = "(|{})".format(
"".join(
"({}=*{}*)".format(field, keyword) for field in self.search_fields
)
)
ldap_filters.append(ldap_filter)
return "(&{})".format("".join(ldap_filters))
def search(self, keywords):
"""Return a list of Clipper objects matching all the keywords."""
query = self.get_ldap_query(keywords)
if Connection is None or query == "(&)":
return []
with Connection(self.ldap_server_url) as conn:
conn.search(self.domain_component, query, attributes=self.search_fields)
return [Clipper(entry.uid.value, entry.cn.value) for entry in conn.entries]
# ---
# Composition of autocomplete units
# ---
class Compose:
"""Search with several units and remove duplicate results.
The `search_units` class attribute should be a list of tuples of the form `(name,
uniq_key, search_unit)`.
The `search` method produces a dictionary whose keys are the `name`s given in
`search_units` and whose values are iterables produced by the different search
units.
The `uniq_key`s are used to remove duplicates: for instance, say that search unit
1 has `uniq_key = "username"` and search unit 2 has `uniq_key = "clipper"`, then
search results from unit 2 whose `.clipper` attribute is equal to the
`.username` attribute of some result from unit 1 are omitted.
Typical Example:
>>> from django.contrib.auth.models import User
>>>
>>> class UserSearch(ModelSearch):
... model = User
... search_fields = ["username", "first_name", "last_name"]
>>>
>>> class UserAndClipperSearch(Compose):
... search_units = [
... ("users", "username", UserSearch),
... ("clippers", "clipper", LDAPSearch),
... ]
In this example, clipper accounts that already have an associated user (i.e. with a
username equal to the clipper login), will not appear in the results.
"""
search_units = []
def search(self, keywords):
uniq_results = set()
results = {}
for name, uniq_key, search_unit in self.search_units:
res = search_unit().search(keywords)
res = [r for r in res if getattr(r, uniq_key) not in uniq_results]
uniq_results |= set((getattr(r, uniq_key) for r in res))
results[name] = res
return results

View file

@ -1,25 +0,0 @@
from dal import autocomplete
from django.db.models import Q
class Select2QuerySetView(autocomplete.Select2QuerySetView):
model = None
search_fields = []
def get_queryset_filter(self):
q = self.q
filter_q = Q()
if not q:
return filter_q
words = q.split()
for word in words:
for field in self.search_fields:
filter_q |= Q(**{"{}__icontains".format(field): word})
return filter_q
def get_queryset(self):
return self.model.objects.filter(self.get_queryset_filter())