@@ -16,109 +23,31 @@
{% block main %}
-
- {% for transfergroup in transfergroups %}
-
- {{ transfergroup.at }}
- {{ transfergroup.valid_by.trigramme }}
- {{ transfergroup.comment }}
-
- {% for transfer in transfergroup.transfers.all %}
-
- {{ transfer.amount }} €
- {{ transfer.from_acc.trigramme }}
-
- {{ transfer.to_acc.trigramme }}
-
- {% endfor %}
- {% endfor %}
-
+
diff --git a/kfet/tests/test_views.py b/kfet/tests/test_views.py
index 0a5c4e49..bcd9a9b4 100644
--- a/kfet/tests/test_views.py
+++ b/kfet/tests/test_views.py
@@ -3,7 +3,7 @@ from datetime import datetime, timedelta
from decimal import Decimal
from unittest import mock
-from django.contrib.auth.models import Group
+from django.contrib.auth.models import Group, User
from django.test import Client, TestCase
from django.urls import reverse
from django.utils import timezone
@@ -628,37 +628,51 @@ class AccountStatOperationListViewTests(ViewTestCaseMixin, TestCase):
expected_stats = [
{
- "label": "Derniers mois",
+ "label": "Tout le temps",
"url": {
"path": base_url,
"query": {
- "scale_n_steps": ["7"],
+ "types": ["['purchase']"],
"scale_name": ["month"],
+ "scale_last": ["True"],
+ "scale_begin": [
+ self.accounts["user1"].created_at.isoformat(" ")
+ ],
+ },
+ },
+ },
+ {
+ "label": "1 an",
+ "url": {
+ "path": base_url,
+ "query": {
"types": ["['purchase']"],
+ "scale_n_steps": ["12"],
+ "scale_name": ["month"],
"scale_last": ["True"],
},
},
},
{
- "label": "Dernières semaines",
+ "label": "3 mois",
"url": {
"path": base_url,
"query": {
- "scale_n_steps": ["7"],
+ "types": ["['purchase']"],
+ "scale_n_steps": ["13"],
"scale_name": ["week"],
- "types": ["['purchase']"],
"scale_last": ["True"],
},
},
},
{
- "label": "Derniers jours",
+ "label": "2 semaines",
"url": {
"path": base_url,
"query": {
- "scale_n_steps": ["7"],
- "scale_name": ["day"],
"types": ["['purchase']"],
+ "scale_n_steps": ["14"],
+ "scale_name": ["day"],
"scale_last": ["True"],
},
},
@@ -1524,6 +1538,21 @@ class ArticleStatSalesListViewTests(ViewTestCaseMixin, TestCase):
self.article = Article.objects.create(
name="Article", category=ArticleCategory.objects.create(name="Category")
)
+ checkout = Checkout.objects.create(
+ name="Checkout",
+ created_by=self.accounts["team"],
+ balance=5,
+ valid_from=self.now,
+ valid_to=self.now + timedelta(days=5),
+ )
+
+ self.opegroup = create_operation_group(
+ on_acc=self.accounts["user"],
+ checkout=checkout,
+ content=[
+ {"type": Operation.PURCHASE, "article": self.article, "article_nb": 2},
+ ],
+ )
def test_ok(self):
r = self.client.get(self.url)
@@ -1535,33 +1564,44 @@ class ArticleStatSalesListViewTests(ViewTestCaseMixin, TestCase):
expected_stats = [
{
- "label": "Derniers mois",
+ "label": "Tout le temps",
"url": {
"path": base_url,
"query": {
- "scale_n_steps": ["7"],
+ "scale_name": ["month"],
+ "scale_last": ["True"],
+ "scale_begin": [self.opegroup.at.isoformat(" ")],
+ },
+ },
+ },
+ {
+ "label": "1 an",
+ "url": {
+ "path": base_url,
+ "query": {
+ "scale_n_steps": ["12"],
"scale_name": ["month"],
"scale_last": ["True"],
},
},
},
{
- "label": "Dernières semaines",
+ "label": "3 mois",
"url": {
"path": base_url,
"query": {
- "scale_n_steps": ["7"],
+ "scale_n_steps": ["13"],
"scale_name": ["week"],
"scale_last": ["True"],
},
},
},
{
- "label": "Derniers jours",
+ "label": "2 semaines",
"url": {
"path": base_url,
"query": {
- "scale_n_steps": ["7"],
+ "scale_n_steps": ["14"],
"scale_name": ["day"],
"scale_last": ["True"],
},
@@ -1997,9 +2037,10 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_once_with(
"kfet.kpsul",
{
- "opegroups": [
+ "groups": [
{
"add": True,
+ "type": "operation",
"at": mock.ANY,
"amount": Decimal("-5.00"),
"checkout__name": "Checkout",
@@ -2008,7 +2049,7 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
"is_cof": False,
"on_acc__trigramme": "000",
"valid_by__trigramme": None,
- "opes": [
+ "entries": [
{
"id": operation.pk,
"addcost_amount": None,
@@ -2269,9 +2310,10 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_once_with(
"kfet.kpsul",
{
- "opegroups": [
+ "groups": [
{
"add": True,
+ "type": "operation",
"at": mock.ANY,
"amount": Decimal("10.75"),
"checkout__name": "Checkout",
@@ -2280,7 +2322,7 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
"is_cof": False,
"on_acc__trigramme": "000",
"valid_by__trigramme": "100",
- "opes": [
+ "entries": [
{
"id": operation.pk,
"addcost_amount": None,
@@ -2443,9 +2485,10 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_once_with(
"kfet.kpsul",
{
- "opegroups": [
+ "groups": [
{
"add": True,
+ "type": "operation",
"at": mock.ANY,
"amount": Decimal("-10.75"),
"checkout__name": "Checkout",
@@ -2454,7 +2497,7 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
"is_cof": False,
"on_acc__trigramme": "000",
"valid_by__trigramme": None,
- "opes": [
+ "entries": [
{
"id": operation.pk,
"addcost_amount": None,
@@ -2601,9 +2644,10 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_once_with(
"kfet.kpsul",
{
- "opegroups": [
+ "groups": [
{
"add": True,
+ "type": "operation",
"at": mock.ANY,
"amount": Decimal("10.75"),
"checkout__name": "Checkout",
@@ -2612,7 +2656,7 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
"is_cof": False,
"on_acc__trigramme": "000",
"valid_by__trigramme": "100",
- "opes": [
+ "entries": [
{
"id": operation.pk,
"addcost_amount": None,
@@ -2712,9 +2756,9 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.checkout.refresh_from_db()
self.assertEqual(self.checkout.balance, Decimal("100.00"))
- ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["opegroups"][
- 0
- ]["opes"][0]
+ ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["groups"][0][
+ "entries"
+ ][0]
self.assertEqual(ws_data_ope["addcost_amount"], Decimal("1.00"))
self.assertEqual(ws_data_ope["addcost_for__trigramme"], "ADD")
@@ -2752,9 +2796,9 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.checkout.refresh_from_db()
self.assertEqual(self.checkout.balance, Decimal("100.00"))
- ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["opegroups"][
- 0
- ]["opes"][0]
+ ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["groups"][0][
+ "entries"
+ ][0]
self.assertEqual(ws_data_ope["addcost_amount"], Decimal("0.80"))
self.assertEqual(ws_data_ope["addcost_for__trigramme"], "ADD")
@@ -2790,9 +2834,9 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.checkout.refresh_from_db()
self.assertEqual(self.checkout.balance, Decimal("106.00"))
- ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["opegroups"][
- 0
- ]["opes"][0]
+ ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["groups"][0][
+ "entries"
+ ][0]
self.assertEqual(ws_data_ope["addcost_amount"], Decimal("1.00"))
self.assertEqual(ws_data_ope["addcost_for__trigramme"], "ADD")
@@ -2826,9 +2870,9 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.accounts["addcost"].refresh_from_db()
self.assertEqual(self.accounts["addcost"].balance, Decimal("15.00"))
- ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["opegroups"][
- 0
- ]["opes"][0]
+ ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["groups"][0][
+ "entries"
+ ][0]
self.assertEqual(ws_data_ope["addcost_amount"], None)
self.assertEqual(ws_data_ope["addcost_for__trigramme"], None)
@@ -2861,9 +2905,9 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.accounts["addcost"].refresh_from_db()
self.assertEqual(self.accounts["addcost"].balance, Decimal("0.00"))
- ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["opegroups"][
- 0
- ]["opes"][0]
+ ws_data_ope = self.kpsul_consumer_mock.group_send.call_args[0][1]["groups"][0][
+ "entries"
+ ][0]
self.assertEqual(ws_data_ope["addcost_amount"], None)
self.assertEqual(ws_data_ope["addcost_for__trigramme"], None)
@@ -3170,9 +3214,10 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_once_with(
"kfet.kpsul",
{
- "opegroups": [
+ "groups": [
{
"add": True,
+ "type": "operation",
"at": mock.ANY,
"amount": Decimal("-9.00"),
"checkout__name": "Checkout",
@@ -3181,7 +3226,7 @@ class KPsulPerformOperationsViewTests(ViewTestCaseMixin, TestCase):
"is_cof": False,
"on_acc__trigramme": "000",
"valid_by__trigramme": None,
- "opes": [
+ "entries": [
{
"id": operation_list[0].pk,
"addcost_amount": None,
@@ -3234,7 +3279,7 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
"""
- url_name = "kfet.kpsul.cancel_operations"
+ url_name = "kfet.operations.cancel"
url_expected = "/k-fet/k-psul/cancel_operations"
http_methods = ["POST"]
@@ -3353,7 +3398,26 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
)
self.assertDictEqual(
- json_data, {"canceled": [operation.pk], "errors": {}, "warnings": {}}
+ json_data,
+ {
+ "canceled": [
+ {
+ "id": operation.id,
+ # l'encodage des dates en JSON est relou...
+ "canceled_at": mock.ANY,
+ "canceled_by__trigramme": None,
+ }
+ ],
+ "errors": {},
+ "warnings": {},
+ "opegroups_to_update": [
+ {
+ "id": group.pk,
+ "amount": str(group.amount),
+ "is_cof": group.is_cof,
+ }
+ ],
+ },
)
self.account.refresh_from_db()
@@ -3365,26 +3429,7 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_with(
"kfet.kpsul",
- {
- "opegroups": [
- {
- "cancellation": True,
- "id": group.pk,
- "amount": Decimal("0.00"),
- "is_cof": False,
- }
- ],
- "opes": [
- {
- "cancellation": True,
- "id": operation.pk,
- "canceled_by__trigramme": None,
- "canceled_at": self.now + timedelta(seconds=15),
- }
- ],
- "checkouts": [],
- "articles": [{"id": self.article.pk, "stock": 22}],
- },
+ {"checkouts": [], "articles": [{"id": self.article.pk, "stock": 22}]},
)
def test_purchase_with_addcost(self):
@@ -3541,7 +3586,26 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
)
self.assertDictEqual(
- json_data, {"canceled": [operation.pk], "errors": {}, "warnings": {}}
+ json_data,
+ {
+ "canceled": [
+ {
+ "id": operation.id,
+ # l'encodage des dates en JSON est relou...
+ "canceled_at": mock.ANY,
+ "canceled_by__trigramme": None,
+ }
+ ],
+ "errors": {},
+ "warnings": {},
+ "opegroups_to_update": [
+ {
+ "id": group.pk,
+ "amount": str(group.amount),
+ "is_cof": group.is_cof,
+ }
+ ],
+ },
)
self.account.refresh_from_db()
@@ -3554,22 +3618,6 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_with(
"kfet.kpsul",
{
- "opegroups": [
- {
- "cancellation": True,
- "id": group.pk,
- "amount": Decimal("0.00"),
- "is_cof": False,
- }
- ],
- "opes": [
- {
- "cancellation": True,
- "id": operation.pk,
- "canceled_by__trigramme": None,
- "canceled_at": self.now + timedelta(seconds=15),
- }
- ],
"checkouts": [{"id": self.checkout.pk, "balance": Decimal("89.25")}],
"articles": [],
},
@@ -3625,7 +3673,26 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
)
self.assertDictEqual(
- json_data, {"canceled": [operation.pk], "errors": {}, "warnings": {}}
+ json_data,
+ {
+ "canceled": [
+ {
+ "id": operation.id,
+ # l'encodage des dates en JSON est relou...
+ "canceled_at": mock.ANY,
+ "canceled_by__trigramme": None,
+ }
+ ],
+ "errors": {},
+ "warnings": {},
+ "opegroups_to_update": [
+ {
+ "id": group.pk,
+ "amount": str(group.amount),
+ "is_cof": group.is_cof,
+ }
+ ],
+ },
)
self.account.refresh_from_db()
@@ -3638,22 +3705,6 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
self.kpsul_consumer_mock.group_send.assert_called_with(
"kfet.kpsul",
{
- "opegroups": [
- {
- "cancellation": True,
- "id": group.pk,
- "amount": Decimal("0.00"),
- "is_cof": False,
- }
- ],
- "opes": [
- {
- "cancellation": True,
- "id": operation.pk,
- "canceled_by__trigramme": None,
- "canceled_at": self.now + timedelta(seconds=15),
- }
- ],
"checkouts": [{"id": self.checkout.pk, "balance": Decimal("110.75")}],
"articles": [],
},
@@ -3709,7 +3760,26 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
)
self.assertDictEqual(
- json_data, {"canceled": [operation.pk], "errors": {}, "warnings": {}}
+ json_data,
+ {
+ "canceled": [
+ {
+ "id": operation.id,
+ # l'encodage des dates en JSON est relou...
+ "canceled_at": mock.ANY,
+ "canceled_by__trigramme": None,
+ }
+ ],
+ "errors": {},
+ "warnings": {},
+ "opegroups_to_update": [
+ {
+ "id": group.pk,
+ "amount": str(group.amount),
+ "is_cof": group.is_cof,
+ }
+ ],
+ },
)
self.account.refresh_from_db()
@@ -3720,27 +3790,7 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
self.assertEqual(self.checkout.balance, Decimal("100.00"))
self.kpsul_consumer_mock.group_send.assert_called_with(
- "kfet.kpsul",
- {
- "opegroups": [
- {
- "cancellation": True,
- "id": group.pk,
- "amount": Decimal("0.00"),
- "is_cof": False,
- }
- ],
- "opes": [
- {
- "cancellation": True,
- "id": operation.pk,
- "canceled_by__trigramme": None,
- "canceled_at": self.now + timedelta(seconds=15),
- }
- ],
- "checkouts": [],
- "articles": [],
- },
+ "kfet.kpsul", {"checkouts": [], "articles": []},
)
@mock.patch("django.utils.timezone.now")
@@ -3961,13 +4011,33 @@ class KPsulCancelOperationsViewTests(ViewTestCaseMixin, TestCase):
group.refresh_from_db()
self.assertEqual(group.amount, Decimal("10.75"))
self.assertEqual(group.opes.exclude(canceled_at=None).count(), 3)
-
+ self.maxDiff = None
self.assertDictEqual(
json_data,
{
- "canceled": [operation1.pk, operation2.pk],
- "warnings": {"already_canceled": [operation3.pk]},
+ "canceled": [
+ {
+ "id": operation1.id,
+ # l'encodage des dates en JSON est relou...
+ "canceled_at": mock.ANY,
+ "canceled_by__trigramme": None,
+ },
+ {
+ "id": operation2.id,
+ # l'encodage des dates en JSON est relou...
+ "canceled_at": mock.ANY,
+ "canceled_by__trigramme": None,
+ },
+ ],
"errors": {},
+ "warnings": {"already_canceled": [operation3.pk]},
+ "opegroups_to_update": [
+ {
+ "id": group.pk,
+ "amount": str(group.amount),
+ "is_cof": group.is_cof,
+ }
+ ],
},
)
@@ -4121,12 +4191,18 @@ class HistoryJSONViewTests(ViewTestCaseMixin, TestCase):
url_expected = "/k-fet/history.json"
auth_user = "user"
- auth_forbidden = [None]
+ auth_forbidden = [None, "noaccount"]
def test_ok(self):
r = self.client.post(self.url)
self.assertEqual(r.status_code, 200)
+ def get_users_extra(self):
+ noaccount = User.objects.create(username="noaccount")
+ noaccount.set_password("noaccount")
+ noaccount.save()
+ return {"noaccount": noaccount}
+
class AccountReadJSONViewTests(ViewTestCaseMixin, TestCase):
url_name = "kfet.account.read.json"
diff --git a/kfet/urls.py b/kfet/urls.py
index 03c174f3..12c06d26 100644
--- a/kfet/urls.py
+++ b/kfet/urls.py
@@ -219,8 +219,8 @@ urlpatterns = [
),
path(
"k-psul/cancel_operations",
- views.kpsul_cancel_operations,
- name="kfet.kpsul.cancel_operations",
+ views.cancel_operations,
+ name="kfet.operations.cancel",
),
path(
"k-psul/articles_data",
@@ -252,7 +252,7 @@ urlpatterns = [
# -----
# Transfers urls
# -----
- path("transfers/", views.transfers, name="kfet.transfers"),
+ path("transfers/", views.TransferView.as_view(), name="kfet.transfers"),
path("transfers/new", views.transfers_create, name="kfet.transfers.create"),
path("transfers/perform", views.perform_transfers, name="kfet.transfers.perform"),
path("transfers/cancel", views.cancel_transfers, name="kfet.transfers.cancel"),
diff --git a/kfet/views.py b/kfet/views.py
index 655e856d..b6c49f72 100644
--- a/kfet/views.py
+++ b/kfet/views.py
@@ -2,6 +2,7 @@ import ast
import heapq
import statistics
from collections import defaultdict
+from datetime import timedelta
from decimal import Decimal
from typing import List
from urllib.parse import urlencode
@@ -12,7 +13,7 @@ from django.contrib.auth.mixins import PermissionRequiredMixin
from django.contrib.auth.models import Permission, User
from django.contrib.messages.views import SuccessMessageMixin
from django.db import transaction
-from django.db.models import Count, F, Prefetch, Sum
+from django.db.models import Count, F, Prefetch, Q, Sum
from django.forms import formset_factory
from django.http import Http404, JsonResponse
from django.shortcuts import get_object_or_404, redirect, render
@@ -76,7 +77,7 @@ from kfet.models import (
Transfer,
TransferGroup,
)
-from kfet.statistic import ScaleMixin, WeekScale, last_stats_manifest
+from kfet.statistic import DayScale, MonthScale, ScaleMixin, WeekScale, scale_url_params
from .auth import KFET_GENERIC_TRIGRAMME
from .auth.views import ( # noqa
@@ -328,7 +329,9 @@ def account_update(request, trigramme):
account = get_object_or_404(Account, trigramme=trigramme)
# Checking permissions
- if not request.user.has_perm("kfet.is_team") and request.user != account.user:
+ if not account.editable or (
+ not request.user.has_perm("kfet.is_team") and request.user != account.user
+ ):
raise Http404
user_info_form = UserInfoForm(instance=account.user)
@@ -911,6 +914,8 @@ def kpsul_get_settings(request):
@teamkfet_required
def account_read_json(request, trigramme):
account = get_object_or_404(Account, trigramme=trigramme)
+ if not account.readable:
+ raise Http404
data = {
"id": account.pk,
"name": account.name,
@@ -1156,9 +1161,10 @@ def kpsul_perform_operations(request):
# Websocket data
websocket_data = {}
- websocket_data["opegroups"] = [
+ websocket_data["groups"] = [
{
"add": True,
+ "type": "operation",
"id": operationgroup.pk,
"amount": operationgroup.amount,
"checkout__name": operationgroup.checkout.name,
@@ -1169,7 +1175,7 @@ def kpsul_perform_operations(request):
operationgroup.valid_by and operationgroup.valid_by.trigramme or None
),
"on_acc__trigramme": operationgroup.on_acc.trigramme,
- "opes": [],
+ "entries": [],
}
]
for operation in operations:
@@ -1187,7 +1193,7 @@ def kpsul_perform_operations(request):
"canceled_by__trigramme": None,
"canceled_at": None,
}
- websocket_data["opegroups"][0]["opes"].append(ope_data)
+ websocket_data["groups"][0]["entries"].append(ope_data)
# Need refresh from db cause we used update on queryset
operationgroup.checkout.refresh_from_db()
websocket_data["checkouts"] = [
@@ -1207,7 +1213,7 @@ def kpsul_perform_operations(request):
@teamkfet_required
@kfet_password_auth
-def kpsul_cancel_operations(request):
+def cancel_operations(request):
# Pour la réponse
data = {"canceled": [], "warnings": {}, "errors": {}}
@@ -1363,7 +1369,11 @@ def kpsul_cancel_operations(request):
.filter(pk__in=opegroups_pk)
.order_by("pk")
)
- opes = sorted(opes)
+ opes = (
+ Operation.objects.values("id", "canceled_at", "canceled_by__trigramme")
+ .filter(pk__in=opes)
+ .order_by("pk")
+ )
checkouts_pk = [checkout.pk for checkout in to_checkouts_balances]
checkouts = (
Checkout.objects.values("id", "balance")
@@ -1374,27 +1384,7 @@ def kpsul_cancel_operations(request):
articles = Article.objects.values("id", "stock").filter(pk__in=articles_pk)
# Websocket data
- websocket_data = {"opegroups": [], "opes": [], "checkouts": [], "articles": []}
-
- for opegroup in opegroups:
- websocket_data["opegroups"].append(
- {
- "cancellation": True,
- "id": opegroup["id"],
- "amount": opegroup["amount"],
- "is_cof": opegroup["is_cof"],
- }
- )
- canceled_by__trigramme = canceled_by and canceled_by.trigramme or None
- for ope in opes:
- websocket_data["opes"].append(
- {
- "cancellation": True,
- "id": ope,
- "canceled_by__trigramme": canceled_by__trigramme,
- "canceled_at": canceled_at,
- }
- )
+ websocket_data = {"checkouts": [], "articles": []}
for checkout in checkouts:
websocket_data["checkouts"].append(
{"id": checkout["id"], "balance": checkout["balance"]}
@@ -1405,7 +1395,8 @@ def kpsul_cancel_operations(request):
)
consumers.KPsul.group_send("kfet.kpsul", websocket_data)
- data["canceled"] = opes
+ data["canceled"] = list(opes)
+ data["opegroups_to_update"] = list(opegroups)
if opes_already_canceled:
data["warnings"]["already_canceled"] = opes_already_canceled
return JsonResponse(data)
@@ -1416,49 +1407,86 @@ def history_json(request):
# Récupération des paramètres
from_date = request.POST.get("from", None)
to_date = request.POST.get("to", None)
- limit = request.POST.get("limit", None)
checkouts = request.POST.getlist("checkouts[]", None)
accounts = request.POST.getlist("accounts[]", None)
+ transfers_only = request.POST.get("transfersonly", False)
+ opes_only = request.POST.get("opesonly", False)
+
+ # Construction de la requête (sur les transferts) pour le prefetch
+
+ transfer_queryset_prefetch = Transfer.objects.select_related(
+ "from_acc", "to_acc", "canceled_by"
+ )
+
+ # Le check sur les comptes est dans le prefetch pour les transferts
+ if accounts:
+ transfer_queryset_prefetch = transfer_queryset_prefetch.filter(
+ Q(from_acc__in=accounts) | Q(to_acc__in=accounts)
+ )
+
+ if not request.user.has_perm("kfet.is_team"):
+ try:
+ acc = request.user.profile.account_kfet
+ transfer_queryset_prefetch = transfer_queryset_prefetch.filter(
+ Q(from_acc=acc) | Q(to_acc=acc)
+ )
+ except Account.DoesNotExist:
+ return JsonResponse({}, status=403)
+
+ transfer_prefetch = Prefetch(
+ "transfers", queryset=transfer_queryset_prefetch, to_attr="filtered_transfers"
+ )
# Construction de la requête (sur les opérations) pour le prefetch
- queryset_prefetch = Operation.objects.select_related(
+ ope_queryset_prefetch = Operation.objects.select_related(
"article", "canceled_by", "addcost_for"
)
+ ope_prefetch = Prefetch("opes", queryset=ope_queryset_prefetch)
# Construction de la requête principale
opegroups = (
- OperationGroup.objects.prefetch_related(
- Prefetch("opes", queryset=queryset_prefetch)
- )
+ OperationGroup.objects.prefetch_related(ope_prefetch)
.select_related("on_acc", "valid_by")
.order_by("at")
)
+ transfergroups = (
+ TransferGroup.objects.prefetch_related(transfer_prefetch)
+ .select_related("valid_by")
+ .order_by("at")
+ )
+
# Application des filtres
if from_date:
opegroups = opegroups.filter(at__gte=from_date)
+ transfergroups = transfergroups.filter(at__gte=from_date)
if to_date:
opegroups = opegroups.filter(at__lt=to_date)
+ transfergroups = transfergroups.filter(at__lt=to_date)
if checkouts:
- opegroups = opegroups.filter(checkout_id__in=checkouts)
+ opegroups = opegroups.filter(checkout__in=checkouts)
+ transfergroups = TransferGroup.objects.none()
+ if transfers_only:
+ opegroups = OperationGroup.objects.none()
+ if opes_only:
+ transfergroups = TransferGroup.objects.none()
if accounts:
- opegroups = opegroups.filter(on_acc_id__in=accounts)
+ opegroups = opegroups.filter(on_acc__in=accounts)
# Un non-membre de l'équipe n'a que accès à son historique
if not request.user.has_perm("kfet.is_team"):
opegroups = opegroups.filter(on_acc=request.user.profile.account_kfet)
- if limit:
- opegroups = opegroups[:limit]
# Construction de la réponse
- opegroups_list = []
+ history_groups = []
for opegroup in opegroups:
opegroup_dict = {
+ "type": "operation",
"id": opegroup.id,
"amount": opegroup.amount,
"at": opegroup.at,
"checkout_id": opegroup.checkout_id,
"is_cof": opegroup.is_cof,
"comment": opegroup.comment,
- "opes": [],
+ "entries": [],
"on_acc__trigramme": opegroup.on_acc and opegroup.on_acc.trigramme or None,
}
if request.user.has_perm("kfet.is_team"):
@@ -1482,9 +1510,40 @@ def history_json(request):
ope_dict["canceled_by__trigramme"] = (
ope.canceled_by and ope.canceled_by.trigramme or None
)
- opegroup_dict["opes"].append(ope_dict)
- opegroups_list.append(opegroup_dict)
- return JsonResponse({"opegroups": opegroups_list})
+ opegroup_dict["entries"].append(ope_dict)
+ history_groups.append(opegroup_dict)
+ for transfergroup in transfergroups:
+ if transfergroup.filtered_transfers:
+ transfergroup_dict = {
+ "type": "transfer",
+ "id": transfergroup.id,
+ "at": transfergroup.at,
+ "comment": transfergroup.comment,
+ "entries": [],
+ }
+ if request.user.has_perm("kfet.is_team"):
+ transfergroup_dict["valid_by__trigramme"] = (
+ transfergroup.valid_by and transfergroup.valid_by.trigramme or None
+ )
+
+ for transfer in transfergroup.filtered_transfers:
+ transfer_dict = {
+ "id": transfer.id,
+ "amount": transfer.amount,
+ "canceled_at": transfer.canceled_at,
+ "from_acc": transfer.from_acc.trigramme,
+ "to_acc": transfer.to_acc.trigramme,
+ }
+ if request.user.has_perm("kfet.is_team"):
+ transfer_dict["canceled_by__trigramme"] = (
+ transfer.canceled_by and transfer.canceled_by.trigramme or None
+ )
+ transfergroup_dict["entries"].append(transfer_dict)
+ history_groups.append(transfergroup_dict)
+
+ history_groups.sort(key=lambda group: group["at"])
+
+ return JsonResponse({"groups": history_groups})
@teamkfet_required
@@ -1544,18 +1603,9 @@ config_update = permission_required("kfet.change_config")(SettingsUpdate.as_view
# -----
-@teamkfet_required
-def transfers(request):
- transfers_pre = Prefetch(
- "transfers", queryset=(Transfer.objects.select_related("from_acc", "to_acc"))
- )
-
- transfergroups = (
- TransferGroup.objects.select_related("valid_by")
- .prefetch_related(transfers_pre)
- .order_by("-at")
- )
- return render(request, "kfet/transfers.html", {"transfergroups": transfergroups})
+@method_decorator(teamkfet_required, name="dispatch")
+class TransferView(TemplateView):
+ template_name = "kfet/transfers.html"
@teamkfet_required
@@ -1746,7 +1796,12 @@ def cancel_transfers(request):
elif hasattr(account, "negative") and not account.negative.balance_offset:
account.negative.delete()
- data["canceled"] = transfers
+ transfers = (
+ Transfer.objects.values("id", "canceled_at", "canceled_by__trigramme")
+ .filter(pk__in=transfers)
+ .order_by("pk")
+ )
+ data["canceled"] = list(transfers)
if transfers_already_canceled:
data["warnings"]["already_canceled"] = transfers_already_canceled
return JsonResponse(data)
@@ -2145,7 +2200,7 @@ class SupplierUpdate(SuccessMessageMixin, UpdateView):
# Vues génériques
# ---------------
# source : docs.djangoproject.com/fr/1.10/topics/class-based-views/mixins/
-class JSONResponseMixin(object):
+class JSONResponseMixin:
"""
A mixin that can be used to render a JSON response.
"""
@@ -2174,34 +2229,39 @@ class JSONDetailView(JSONResponseMixin, BaseDetailView):
return self.render_to_json_response(context)
-class PkUrlMixin(object):
- def get_object(self, *args, **kwargs):
- get_by = self.kwargs.get(self.pk_url_kwarg)
- return get_object_or_404(self.model, **{self.pk_url_kwarg: get_by})
-
-
class SingleResumeStat(JSONDetailView):
- """Manifest for a kind of a stat about an object.
+ """
+ Génère l'interface de sélection pour les statistiques d'un compte/article.
+ L'interface est constituée d'une série de boutons, qui récupèrent et graphent
+ des statistiques du même type, sur le même objet mais avec des arguments différents.
- Returns JSON whose payload is an array containing descriptions of a stat:
- url to retrieve data, label, ...
+ Attributs :
+ - url_stat : URL où récupérer les statistiques
+ - stats : liste de dictionnaires avec les clés suivantes :
+ - label : texte du bouton
+ - url_params : paramètres GET à rajouter à `url_stat`
+ - default : si `True`, graphe à montrer par défaut
+ On peut aussi définir `stats` dynamiquement, via la fonction `get_stats`.
"""
- id_prefix = ""
- nb_default = 0
-
- stats = []
url_stat = None
+ stats = []
+
+ def get_stats(self):
+ return self.stats
def get_context_data(self, **kwargs):
# On n'hérite pas
- object_id = self.object.id
context = {}
stats = []
- prefix = "{}_{}".format(self.id_prefix, object_id)
- for i, stat_def in enumerate(self.stats):
+ # On peut avoir récupéré self.object via pk ou slug
+ if self.pk_url_kwarg in self.kwargs:
url_pk = getattr(self.object, self.pk_url_kwarg)
+ else:
+ url_pk = getattr(self.object, self.slug_url_kwarg)
+
+ for stat_def in self.get_stats():
url_params_d = stat_def.get("url_params", {})
if len(url_params_d) > 0:
url_params = "?{}".format(urlencode(url_params_d))
@@ -2210,42 +2270,21 @@ class SingleResumeStat(JSONDetailView):
stats.append(
{
"label": stat_def["label"],
- "btn": "btn_{}_{}".format(prefix, i),
"url": "{url}{params}".format(
url=reverse(self.url_stat, args=[url_pk]), params=url_params
),
+ "default": stat_def.get("default", False),
}
)
- context["id_prefix"] = prefix
- context["content_id"] = "content_%s" % prefix
context["stats"] = stats
- context["default_stat"] = self.nb_default
- context["object_id"] = object_id
return context
-# -----------------------
-# Evolution Balance perso
-# -----------------------
-ID_PREFIX_ACC_BALANCE = "balance_acc"
-
-
-class AccountStatBalanceList(PkUrlMixin, SingleResumeStat):
- """Manifest for balance stats of an account."""
-
- model = Account
- context_object_name = "account"
- pk_url_kwarg = "trigramme"
- url_stat = "kfet.account.stat.balance"
- id_prefix = ID_PREFIX_ACC_BALANCE
- stats = [
- {"label": "Tout le temps"},
- {"label": "1 an", "url_params": {"last_days": 365}},
- {"label": "6 mois", "url_params": {"last_days": 183}},
- {"label": "3 mois", "url_params": {"last_days": 90}},
- {"label": "30 jours", "url_params": {"last_days": 30}},
- ]
- nb_default = 0
+class UserAccountMixin:
+ """
+ Mixin qui vérifie que le compte traité par la vue est celui de l'utilisateur·ice
+ actuel·le. Dans le cas contraire, renvoie un Http404.
+ """
def get_object(self, *args, **kwargs):
obj = super().get_object(*args, **kwargs)
@@ -2253,21 +2292,41 @@ class AccountStatBalanceList(PkUrlMixin, SingleResumeStat):
raise Http404
return obj
- @method_decorator(login_required)
- def dispatch(self, *args, **kwargs):
- return super().dispatch(*args, **kwargs)
+
+# -----------------------
+# Evolution Balance perso
+# -----------------------
-class AccountStatBalance(PkUrlMixin, JSONDetailView):
- """Datasets of balance of an account.
-
- Operations and Transfers are taken into account.
-
+@method_decorator(login_required, name="dispatch")
+class AccountStatBalanceList(UserAccountMixin, SingleResumeStat):
+ """
+ Menu général pour l'historique de balance d'un compte
"""
model = Account
- pk_url_kwarg = "trigramme"
- context_object_name = "account"
+ slug_url_kwarg = "trigramme"
+ slug_field = "trigramme"
+ url_stat = "kfet.account.stat.balance"
+ stats = [
+ {"label": "Tout le temps"},
+ {"label": "1 an", "url_params": {"last_days": 365}},
+ {"label": "6 mois", "url_params": {"last_days": 183}},
+ {"label": "3 mois", "url_params": {"last_days": 90}, "default": True},
+ {"label": "30 jours", "url_params": {"last_days": 30}},
+ ]
+
+
+@method_decorator(login_required, name="dispatch")
+class AccountStatBalance(UserAccountMixin, JSONDetailView):
+ """
+ Statistiques (JSON) d'historique de balance d'un compte.
+ Prend en compte les opérations et transferts sur la période donnée.
+ """
+
+ model = Account
+ slug_url_kwarg = "trigramme"
+ slug_field = "trigramme"
def get_changes_list(self, last_days=None, begin_date=None, end_date=None):
account = self.object
@@ -2366,57 +2425,50 @@ class AccountStatBalance(PkUrlMixin, JSONDetailView):
# TODO: offset
return context
- def get_object(self, *args, **kwargs):
- obj = super().get_object(*args, **kwargs)
- if self.request.user != obj.user:
- raise Http404
- return obj
-
- @method_decorator(login_required)
- def dispatch(self, *args, **kwargs):
- return super().dispatch(*args, **kwargs)
-
# ------------------------
# Consommation personnelle
# ------------------------
-ID_PREFIX_ACC_LAST = "last_acc"
-ID_PREFIX_ACC_LAST_DAYS = "last_days_acc"
-ID_PREFIX_ACC_LAST_WEEKS = "last_weeks_acc"
-ID_PREFIX_ACC_LAST_MONTHS = "last_months_acc"
-class AccountStatOperationList(PkUrlMixin, SingleResumeStat):
- """Manifest for operations stats of an account."""
+@method_decorator(login_required, name="dispatch")
+class AccountStatOperationList(UserAccountMixin, SingleResumeStat):
+ """
+ Menu général pour l'historique de consommation d'un compte
+ """
model = Account
- context_object_name = "account"
- pk_url_kwarg = "trigramme"
- id_prefix = ID_PREFIX_ACC_LAST
- nb_default = 2
- stats = last_stats_manifest(types=[Operation.PURCHASE])
+ slug_url_kwarg = "trigramme"
+ slug_field = "trigramme"
url_stat = "kfet.account.stat.operation"
- def get_object(self, *args, **kwargs):
- obj = super().get_object(*args, **kwargs)
- if self.request.user != obj.user:
- raise Http404
- return obj
+ def get_stats(self):
+ scales_def = [
+ (
+ "Tout le temps",
+ MonthScale,
+ {"last": True, "begin": self.object.created_at},
+ False,
+ ),
+ ("1 an", MonthScale, {"last": True, "n_steps": 12}, False),
+ ("3 mois", WeekScale, {"last": True, "n_steps": 13}, True),
+ ("2 semaines", DayScale, {"last": True, "n_steps": 14}, False),
+ ]
- @method_decorator(login_required)
- def dispatch(self, *args, **kwargs):
- return super().dispatch(*args, **kwargs)
+ return scale_url_params(scales_def, types=[Operation.PURCHASE])
-class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView):
- """Datasets of operations of an account."""
+@method_decorator(login_required, name="dispatch")
+class AccountStatOperation(UserAccountMixin, ScaleMixin, JSONDetailView):
+ """
+ Statistiques (JSON) de consommation (nb d'items achetés) d'un compte.
+ """
model = Account
- pk_url_kwarg = "trigramme"
- context_object_name = "account"
- id_prefix = ""
+ slug_url_kwarg = "trigramme"
+ slug_field = "trigramme"
- def get_operations(self, scale, types=None):
+ def get_operations(self, types=None):
# On selectionne les opérations qui correspondent
# à l'article en question et qui ne sont pas annulées
# puis on choisi pour chaques intervalle les opérations
@@ -2428,28 +2480,20 @@ class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView):
)
if types is not None:
all_operations = all_operations.filter(type__in=types)
- chunks = scale.get_by_chunks(
- all_operations,
- field_db="group__at",
- field_callback=(lambda d: d["group__at"]),
- )
- return chunks
+ return all_operations
def get_context_data(self, *args, **kwargs):
- old_ctx = super().get_context_data(*args, **kwargs)
- context = {"labels": old_ctx["labels"]}
- scale = self.scale
+ context = super().get_context_data(*args, **kwargs)
types = self.request.GET.get("types", None)
if types is not None:
types = ast.literal_eval(types)
- operations = self.get_operations(types=types, scale=scale)
+ operations = self.get_operations(types=types)
# On compte les opérations
- nb_ventes = []
- for chunk in operations:
- ventes = sum(ope["article_nb"] for ope in chunk)
- nb_ventes.append(ventes)
+ nb_ventes = self.scale.chunkify_qs(
+ operations, field="group__at", aggregate=Sum("article_nb")
+ )
context["charts"] = [
{
@@ -2460,50 +2504,54 @@ class AccountStatOperation(ScaleMixin, PkUrlMixin, JSONDetailView):
]
return context
- def get_object(self, *args, **kwargs):
- obj = super().get_object(*args, **kwargs)
- if self.request.user != obj.user:
- raise Http404
- return obj
-
- @method_decorator(login_required)
- def dispatch(self, *args, **kwargs):
- return super().dispatch(*args, **kwargs)
-
# ------------------------
# Article Satistiques Last
# ------------------------
-ID_PREFIX_ART_LAST = "last_art"
-ID_PREFIX_ART_LAST_DAYS = "last_days_art"
-ID_PREFIX_ART_LAST_WEEKS = "last_weeks_art"
-ID_PREFIX_ART_LAST_MONTHS = "last_months_art"
+@method_decorator(teamkfet_required, name="dispatch")
class ArticleStatSalesList(SingleResumeStat):
- """Manifest for sales stats of an article."""
+ """
+ Menu pour les statistiques de vente d'un article.
+ """
model = Article
- context_object_name = "article"
- id_prefix = ID_PREFIX_ART_LAST
nb_default = 2
url_stat = "kfet.article.stat.sales"
- stats = last_stats_manifest()
- @method_decorator(teamkfet_required)
- def dispatch(self, *args, **kwargs):
- return super().dispatch(*args, **kwargs)
+ def get_stats(self):
+ first_conso = (
+ Operation.objects.filter(article=self.object)
+ .order_by("group__at")
+ .values_list("group__at", flat=True)
+ .first()
+ )
+ if first_conso is None:
+ # On le crée dans le passé au cas où
+ first_conso = timezone.now() - timedelta(seconds=1)
+ scales_def = [
+ ("Tout le temps", MonthScale, {"last": True, "begin": first_conso}, False),
+ ("1 an", MonthScale, {"last": True, "n_steps": 12}, False),
+ ("3 mois", WeekScale, {"last": True, "n_steps": 13}, True),
+ ("2 semaines", DayScale, {"last": True, "n_steps": 14}, False),
+ ]
+
+ return scale_url_params(scales_def)
+@method_decorator(teamkfet_required, name="dispatch")
class ArticleStatSales(ScaleMixin, JSONDetailView):
- """Datasets of sales of an article."""
+ """
+ Statistiques (JSON) de vente d'un article.
+ Sépare LIQ et les comptes K-Fêt, et rajoute le total.
+ """
model = Article
context_object_name = "article"
def get_context_data(self, *args, **kwargs):
- old_ctx = super().get_context_data(*args, **kwargs)
- context = {"labels": old_ctx["labels"]}
+ context = super().get_context_data(*args, **kwargs)
scale = self.scale
all_purchases = (
@@ -2516,23 +2564,13 @@ class ArticleStatSales(ScaleMixin, JSONDetailView):
liq_only = all_purchases.filter(group__on_acc__trigramme="LIQ")
liq_exclude = all_purchases.exclude(group__on_acc__trigramme="LIQ")
- chunks_liq = scale.get_by_chunks(
- liq_only, field_db="group__at", field_callback=lambda d: d["group__at"]
+ nb_liq = scale.chunkify_qs(
+ liq_only, field="group__at", aggregate=Sum("article_nb")
)
- chunks_no_liq = scale.get_by_chunks(
- liq_exclude, field_db="group__at", field_callback=lambda d: d["group__at"]
+ nb_accounts = scale.chunkify_qs(
+ liq_exclude, field="group__at", aggregate=Sum("article_nb")
)
-
- # On compte les opérations
- nb_ventes = []
- nb_accounts = []
- nb_liq = []
- for chunk_liq, chunk_no_liq in zip(chunks_liq, chunks_no_liq):
- sum_accounts = sum(ope["article_nb"] for ope in chunk_no_liq)
- sum_liq = sum(ope["article_nb"] for ope in chunk_liq)
- nb_ventes.append(sum_accounts + sum_liq)
- nb_accounts.append(sum_accounts)
- nb_liq.append(sum_liq)
+ nb_ventes = [n1 + n2 for n1, n2 in zip(nb_liq, nb_accounts)]
context["charts"] = [
{
@@ -2548,7 +2586,3 @@ class ArticleStatSales(ScaleMixin, JSONDetailView):
},
]
return context
-
- @method_decorator(teamkfet_required)
- def dispatch(self, *args, **kwargs):
- return super().dispatch(*args, **kwargs)
diff --git a/setup.cfg b/setup.cfg
index 100ddb22..1a9901cb 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -9,7 +9,6 @@ source =
kfet
petitscours
shared
- utils
omit =
*migrations*
*test*.py
@@ -37,7 +36,7 @@ default_section = THIRDPARTY
force_grid_wrap = 0
include_trailing_comma = true
known_django = django
-known_first_party = bda,bds,clubs,cof,events,gestioncof,kfet,petitscours,shared,utils
+known_first_party = bda,bds,clubs,cof,events,gestioncof,kfet,petitscours,shared
line_length = 88
multi_line_output = 3
not_skip = __init__.py
diff --git a/utils/__init__.py b/shared/__init__.py
similarity index 100%
rename from utils/__init__.py
rename to shared/__init__.py
diff --git a/shared/tests/testcases.py b/shared/tests/testcases.py
index 35d697e7..507e1361 100644
--- a/shared/tests/testcases.py
+++ b/shared/tests/testcases.py
@@ -111,7 +111,7 @@ class TestCaseMixin:
mock_context_manager.return_value.__enter__.return_value = mock_connection
patcher = mock.patch(
- "gestioncof.autocomplete.Connection", new=mock_context_manager
+ "shared.views.autocomplete.Connection", new=mock_context_manager
)
patcher.start()
self.addCleanup(patcher.stop)
diff --git a/shared/views/autocomplete.py b/shared/views/autocomplete.py
new file mode 100644
index 00000000..af5e3980
--- /dev/null
+++ b/shared/views/autocomplete.py
@@ -0,0 +1,184 @@
+from collections import namedtuple
+
+from dal import autocomplete
+from django.conf import settings
+from django.db.models import Q
+
+if getattr(settings, "LDAP_SERVER_URL", None):
+ from ldap3 import Connection
+else:
+ # shared.tests.testcases.TestCaseMixin.mockLDAP needs
+ # Connection to be defined
+ Connection = None
+
+
+class SearchUnit:
+ """Base class for all the search utilities.
+
+ A search unit should implement a `search` method taking a list of keywords as
+ argument and returning an iterable of search results.
+ """
+
+ def search(self, _keywords):
+ raise NotImplementedError(
+ "Class implementing the SeachUnit interface should implement the search "
+ "method"
+ )
+
+
+# ---
+# Model-based search
+# ---
+
+
+class ModelSearch(SearchUnit):
+ """Basic search engine for models based on filtering.
+
+ The class should be configured through its `model` class attribute: the `search`
+ method will return a queryset of instances of this model. The `search_fields`
+ attributes indicates which fields to search in.
+
+ Example:
+
+ >>> from django.contrib.auth.models import User
+ >>>
+ >>> class UserSearch(ModelSearch):
+ ... model = User
+ ... search_fields = ["username", "first_name", "last_name"]
+ >>>
+ >>> user_search = UserSearch() # has type ModelSearch[User]
+ >>> user_search.search(["toto", "foo"]) # returns a queryset of Users
+ """
+
+ model = None
+ search_fields = []
+
+ def get_queryset_filter(self, keywords):
+ filter_q = Q()
+
+ if not keywords:
+ return filter_q
+
+ for keyword in keywords:
+ kw_filter = Q()
+ for field in self.search_fields:
+ kw_filter |= Q(**{"{}__icontains".format(field): keyword})
+ filter_q &= kw_filter
+
+ return filter_q
+
+ def search(self, keywords):
+ """Returns the queryset of model instances matching all the keywords.
+
+ The semantic of the search is the following: a model instance appears in the
+ search results iff all of the keywords given as arguments occur in at least one
+ of the search fields.
+ """
+
+ return self.model.objects.filter(self.get_queryset_filter(keywords))
+
+
+class Select2QuerySetView(ModelSearch, autocomplete.Select2QuerySetView):
+ """Compatibility layer between ModelSearch and Select2QuerySetView."""
+
+ def get_queryset(self):
+ keywords = self.q.split()
+ return super().search(keywords)
+
+
+# ---
+# LDAP search
+# ---
+
+Clipper = namedtuple("Clipper", ["clipper", "fullname"])
+
+
+class LDAPSearch(SearchUnit):
+ ldap_server_url = getattr(settings, "LDAP_SERVER_URL", None)
+ domain_component = "dc=spi,dc=ens,dc=fr"
+ search_fields = ["cn", "uid"]
+
+ def get_ldap_query(self, keywords):
+ """Return a search query with the following semantics:
+
+ A Clipper appears in the search results iff all of the keywords given as
+ arguments occur in at least one of the search fields.
+ """
+
+ # Dumb but safe
+ keywords = filter(str.isalnum, keywords)
+
+ ldap_filters = []
+
+ for keyword in keywords:
+ ldap_filter = "(|{})".format(
+ "".join(
+ "({}=*{}*)".format(field, keyword) for field in self.search_fields
+ )
+ )
+ ldap_filters.append(ldap_filter)
+
+ return "(&{})".format("".join(ldap_filters))
+
+ def search(self, keywords):
+ """Return a list of Clipper objects matching all the keywords."""
+
+ query = self.get_ldap_query(keywords)
+
+ if Connection is None or query == "(&)":
+ return []
+
+ with Connection(self.ldap_server_url) as conn:
+ conn.search(self.domain_component, query, attributes=self.search_fields)
+ return [Clipper(entry.uid.value, entry.cn.value) for entry in conn.entries]
+
+
+# ---
+# Composition of autocomplete units
+# ---
+
+
+class Compose:
+ """Search with several units and remove duplicate results.
+
+ The `search_units` class attribute should be a list of tuples of the form `(name,
+ uniq_key, search_unit)`.
+
+ The `search` method produces a dictionary whose keys are the `name`s given in
+ `search_units` and whose values are iterables produced by the different search
+ units.
+
+ The `uniq_key`s are used to remove duplicates: for instance, say that search unit
+ 1 has `uniq_key = "username"` and search unit 2 has `uniq_key = "clipper"`, then
+ search results from unit 2 whose `.clipper` attribute is equal to the
+ `.username` attribute of some result from unit 1 are omitted.
+
+ Typical Example:
+
+ >>> from django.contrib.auth.models import User
+ >>>
+ >>> class UserSearch(ModelSearch):
+ ... model = User
+ ... search_fields = ["username", "first_name", "last_name"]
+ >>>
+ >>> class UserAndClipperSearch(Compose):
+ ... search_units = [
+ ... ("users", "username", UserSearch),
+ ... ("clippers", "clipper", LDAPSearch),
+ ... ]
+
+ In this example, clipper accounts that already have an associated user (i.e. with a
+ username equal to the clipper login), will not appear in the results.
+ """
+
+ search_units = []
+
+ def search(self, keywords):
+ uniq_results = set()
+ results = {}
+ for name, uniq_key, search_unit in self.search_units:
+ res = search_unit().search(keywords)
+ res = [r for r in res if getattr(r, uniq_key) not in uniq_results]
+ uniq_results |= set((getattr(r, uniq_key) for r in res))
+ results[name] = res
+ return results
diff --git a/utils/views/__init__.py b/utils/views/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/utils/views/autocomplete.py b/utils/views/autocomplete.py
deleted file mode 100644
index c5d51343..00000000
--- a/utils/views/autocomplete.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from dal import autocomplete
-from django.db.models import Q
-
-
-class Select2QuerySetView(autocomplete.Select2QuerySetView):
- model = None
- search_fields = []
-
- def get_queryset_filter(self):
- q = self.q
- filter_q = Q()
-
- if not q:
- return filter_q
-
- words = q.split()
-
- for word in words:
- for field in self.search_fields:
- filter_q |= Q(**{"{}__icontains".format(field): word})
-
- return filter_q
-
- def get_queryset(self):
- return self.model.objects.filter(self.get_queryset_filter())