2017-03-24 18:04:37 +01:00
|
|
|
class StatsController < ApplicationController
|
2017-05-22 16:56:09 +02:00
|
|
|
layout "new_application"
|
2017-03-24 18:04:37 +01:00
|
|
|
|
2018-08-13 14:46:22 +02:00
|
|
|
before_action :authenticate_administration!, only: [:download]
|
|
|
|
|
2017-05-26 17:27:23 +02:00
|
|
|
MEAN_NUMBER_OF_CHAMPS_IN_A_FORM = 24.0
|
|
|
|
|
2017-03-24 18:04:37 +01:00
|
|
|
def index
|
2017-07-17 15:06:36 +02:00
|
|
|
procedures = Procedure.publiees_ou_archivees
|
2018-08-28 14:50:44 +02:00
|
|
|
dossiers = Dossier.state_not_brouillon
|
2017-04-03 15:00:26 +02:00
|
|
|
|
2018-08-29 14:54:06 +02:00
|
|
|
@procedures_numbers = procedures_numbers(procedures)
|
|
|
|
@dossiers_numbers = dossiers_numbers(dossiers)
|
2017-05-29 17:16:26 +02:00
|
|
|
|
2018-08-24 17:57:04 +02:00
|
|
|
@satisfaction_usagers = satisfaction_usagers
|
2018-08-24 17:10:27 +02:00
|
|
|
@dossiers_states = dossiers_states
|
|
|
|
|
2017-07-17 15:10:05 +02:00
|
|
|
@procedures_cumulative = cumulative_hash(procedures, :published_at)
|
|
|
|
@procedures_in_the_last_4_months = last_four_months_hash(procedures, :published_at)
|
2017-05-26 17:29:31 +02:00
|
|
|
|
2017-12-14 15:51:45 +01:00
|
|
|
@dossiers_cumulative = cumulative_hash(dossiers, :en_construction_at)
|
|
|
|
@dossiers_in_the_last_4_months = last_four_months_hash(dossiers, :en_construction_at)
|
2017-04-03 17:04:31 +02:00
|
|
|
|
2017-05-30 15:11:18 +02:00
|
|
|
@procedures_count_per_administrateur = procedures_count_per_administrateur(procedures)
|
|
|
|
|
2017-06-01 09:44:22 +02:00
|
|
|
@dossier_instruction_mean_time = Rails.cache.fetch("dossier_instruction_mean_time", expires_in: 1.day) do
|
|
|
|
dossier_instruction_mean_time(dossiers)
|
|
|
|
end
|
|
|
|
|
|
|
|
@dossier_filling_mean_time = Rails.cache.fetch("dossier_filling_mean_time", expires_in: 1.day) do
|
|
|
|
dossier_filling_mean_time(dossiers)
|
|
|
|
end
|
2017-05-09 17:09:35 +02:00
|
|
|
|
|
|
|
@avis_usage = avis_usage
|
2017-05-09 18:11:58 +02:00
|
|
|
@avis_average_answer_time = avis_average_answer_time
|
2017-05-10 11:07:38 +02:00
|
|
|
@avis_answer_percentages = avis_answer_percentages
|
2017-06-15 14:42:48 +02:00
|
|
|
|
|
|
|
@motivation_usage_dossier = motivation_usage_dossier
|
|
|
|
@motivation_usage_procedure = motivation_usage_procedure
|
2018-04-12 18:36:09 +02:00
|
|
|
|
|
|
|
@cloned_from_library_procedures_ratio = cloned_from_library_procedures_ratio
|
2017-03-24 18:04:37 +01:00
|
|
|
end
|
|
|
|
|
2018-08-13 14:46:22 +02:00
|
|
|
def download
|
|
|
|
headers = [
|
|
|
|
'ID du dossier',
|
2018-09-05 14:48:42 +02:00
|
|
|
'ID de la démarche',
|
|
|
|
'Nom de la démarche',
|
2018-08-13 14:46:22 +02:00
|
|
|
'ID utilisateur',
|
|
|
|
'Etat du fichier',
|
|
|
|
'Durée en brouillon',
|
|
|
|
'Durée en construction',
|
|
|
|
'Durée en instruction'
|
|
|
|
]
|
|
|
|
|
|
|
|
data = Dossier
|
|
|
|
.includes(:procedure, :user)
|
|
|
|
.in_batches
|
|
|
|
.flat_map do |dossiers|
|
|
|
|
|
|
|
|
dossiers
|
|
|
|
.pluck(
|
|
|
|
"dossiers.id",
|
|
|
|
"procedures.id",
|
|
|
|
"procedures.libelle",
|
|
|
|
"users.id",
|
|
|
|
"dossiers.state",
|
|
|
|
"dossiers.en_construction_at - dossiers.created_at",
|
|
|
|
"dossiers.en_instruction_at - dossiers.en_construction_at",
|
|
|
|
"dossiers.processed_at - dossiers.en_instruction_at"
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
respond_to do |format|
|
|
|
|
format.csv { send_data(SpreadsheetArchitect.to_xlsx(headers: headers, data: data), filename: "statistiques.csv") }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-03-24 18:04:37 +01:00
|
|
|
private
|
|
|
|
|
2018-08-29 14:54:06 +02:00
|
|
|
def procedures_numbers(procedures)
|
|
|
|
total = procedures.count
|
|
|
|
last_30_days_count = procedures.where(published_at: 1.month.ago..Time.now).count
|
|
|
|
previous_count = procedures.where(published_at: 2.months.ago..1.month.ago).count
|
2018-09-03 17:35:14 +02:00
|
|
|
if previous_count != 0
|
|
|
|
evolution = (((last_30_days_count.to_f / previous_count) - 1) * 100).round(0)
|
|
|
|
else
|
|
|
|
evolution = 0
|
|
|
|
end
|
2018-08-29 14:54:06 +02:00
|
|
|
formatted_evolution = sprintf("%+d", evolution)
|
|
|
|
|
|
|
|
{
|
|
|
|
total: total.to_s,
|
|
|
|
last_30_days_count: last_30_days_count.to_s,
|
|
|
|
evolution: formatted_evolution
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
def dossiers_numbers(dossiers)
|
|
|
|
total = dossiers.count
|
|
|
|
last_30_days_count = dossiers.where(en_construction_at: 1.month.ago..Time.now).count
|
|
|
|
previous_count = dossiers.where(en_construction_at: 2.months.ago..1.month.ago).count
|
2018-09-03 17:35:14 +02:00
|
|
|
if previous_count != 0
|
|
|
|
evolution = (((last_30_days_count.to_f / previous_count) - 1) * 100).round(0)
|
|
|
|
else
|
|
|
|
evolution = 0
|
|
|
|
end
|
2018-08-29 14:54:06 +02:00
|
|
|
formatted_evolution = sprintf("%+d", evolution)
|
|
|
|
|
|
|
|
{
|
|
|
|
total: total.to_s,
|
|
|
|
last_30_days_count: last_30_days_count.to_s,
|
|
|
|
evolution: formatted_evolution
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2018-08-24 17:10:27 +02:00
|
|
|
def dossiers_states
|
|
|
|
{
|
|
|
|
'Brouilllon' => Dossier.state_brouillon.count,
|
|
|
|
'En construction' => Dossier.state_en_construction.count,
|
|
|
|
'En instruction' => Dossier.state_en_instruction.count,
|
|
|
|
'Terminé' => Dossier.state_termine.count
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2018-08-24 17:57:04 +02:00
|
|
|
def satisfaction_usagers
|
|
|
|
legend = {
|
2018-08-27 18:18:26 +02:00
|
|
|
Feedback.ratings.fetch(:happy) => "Satisfaits",
|
2018-08-27 14:12:38 +02:00
|
|
|
Feedback.ratings.fetch(:neutral) => "Neutres",
|
2018-08-27 18:18:26 +02:00
|
|
|
Feedback.ratings.fetch(:unhappy) => "Mécontents"
|
2018-08-24 17:57:04 +02:00
|
|
|
}
|
2018-09-09 23:04:00 +02:00
|
|
|
interval = 6.weeks.ago.beginning_of_week..1.week.ago.beginning_of_week
|
2018-08-24 17:57:04 +02:00
|
|
|
|
2018-09-09 23:04:00 +02:00
|
|
|
totals = Feedback
|
|
|
|
.where(created_at: interval)
|
|
|
|
.group_by_week(:created_at)
|
|
|
|
.count
|
2018-08-24 17:57:04 +02:00
|
|
|
|
2018-08-27 17:58:53 +02:00
|
|
|
Feedback.ratings.values.map do |rating|
|
2018-08-24 17:57:04 +02:00
|
|
|
data = Feedback
|
2018-09-09 23:04:00 +02:00
|
|
|
.where(created_at: interval, rating: rating)
|
2018-08-24 17:57:04 +02:00
|
|
|
.group_by_week(:created_at)
|
|
|
|
.count
|
|
|
|
.map do |week, count|
|
|
|
|
total = totals[week]
|
|
|
|
|
|
|
|
if total > 0
|
2018-09-10 14:04:59 +02:00
|
|
|
[week, (count.to_f / total * 100).round(2)]
|
2018-08-24 17:57:04 +02:00
|
|
|
else
|
|
|
|
0
|
|
|
|
end
|
|
|
|
end.to_h
|
|
|
|
|
|
|
|
{
|
2018-08-27 14:12:38 +02:00
|
|
|
name: legend[rating],
|
2018-08-24 17:57:04 +02:00
|
|
|
data: data
|
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-04-12 18:36:09 +02:00
|
|
|
def cloned_from_library_procedures_ratio
|
|
|
|
[3.weeks.ago, 2.weeks.ago, 1.week.ago].map do |date|
|
|
|
|
min_date = date.beginning_of_week
|
|
|
|
max_date = min_date.end_of_week
|
|
|
|
|
|
|
|
all_procedures = Procedure.created_during(min_date..max_date)
|
|
|
|
cloned_from_library_procedures = all_procedures.cloned_from_library
|
|
|
|
|
|
|
|
denominator = [1, all_procedures.count].max
|
|
|
|
|
|
|
|
ratio = percentage(cloned_from_library_procedures.count, denominator)
|
|
|
|
|
|
|
|
[l(max_date, format: '%d/%m/%Y'), ratio]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-10-06 09:34:02 +02:00
|
|
|
def max_date
|
2017-06-22 12:10:03 +02:00
|
|
|
if administration_signed_in?
|
2017-10-06 09:34:02 +02:00
|
|
|
Time.now.to_date
|
2017-06-22 12:10:03 +02:00
|
|
|
else
|
2017-10-06 09:34:02 +02:00
|
|
|
Time.now.beginning_of_month - 1.second
|
2017-06-22 12:10:03 +02:00
|
|
|
end
|
2017-10-06 09:34:02 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def last_four_months_hash(association, date_attribute)
|
|
|
|
min_date = 3.months.ago.beginning_of_month.to_date
|
2017-05-26 17:29:31 +02:00
|
|
|
|
2018-01-15 19:12:15 +01:00
|
|
|
association
|
2017-05-26 17:29:31 +02:00
|
|
|
.where(date_attribute => min_date..max_date)
|
|
|
|
.group("DATE_TRUNC('month', #{date_attribute.to_s})")
|
|
|
|
.count
|
|
|
|
.to_a
|
2017-11-20 18:55:59 +01:00
|
|
|
.sort_by { |a| a[0] }
|
2017-05-26 17:29:31 +02:00
|
|
|
.map { |e| [I18n.l(e.first, format: "%B %Y"), e.last] }
|
|
|
|
end
|
|
|
|
|
2017-07-17 15:12:05 +02:00
|
|
|
def cumulative_hash(association, date_attribute)
|
2017-03-28 15:42:48 +02:00
|
|
|
sum = 0
|
|
|
|
association
|
2017-10-06 09:34:02 +02:00
|
|
|
.where("#{date_attribute.to_s} < ?", max_date)
|
2017-04-26 14:28:39 +02:00
|
|
|
.group("DATE_TRUNC('month', #{date_attribute.to_s})")
|
2017-03-28 15:42:48 +02:00
|
|
|
.count
|
|
|
|
.to_a
|
2017-11-20 18:55:59 +01:00
|
|
|
.sort_by { |a| a[0] }
|
2018-01-16 13:34:24 +01:00
|
|
|
.map { |x, y| { x => (sum += y) } }
|
2017-03-28 15:42:48 +02:00
|
|
|
.reduce({}, :merge)
|
|
|
|
end
|
2017-04-11 16:55:13 +02:00
|
|
|
|
2017-05-30 15:11:18 +02:00
|
|
|
def procedures_count_per_administrateur(procedures)
|
|
|
|
count_per_administrateur = procedures.group(:administrateur_id).count.values
|
|
|
|
{
|
2018-09-05 14:48:42 +02:00
|
|
|
'Une démarche' => count_per_administrateur.select { |count| count == 1 }.count,
|
|
|
|
'Entre deux et cinq démarches' => count_per_administrateur.select { |count| count.in?(2..5) }.count,
|
2018-10-01 13:53:55 +02:00
|
|
|
'Plus de cinq démarches' => count_per_administrateur.select { |count| count > 5 }.count
|
2017-05-30 15:11:18 +02:00
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2017-05-29 17:19:50 +02:00
|
|
|
def mean(collection)
|
|
|
|
(collection.sum.to_f / collection.size).round(2)
|
|
|
|
end
|
|
|
|
|
2017-06-19 17:29:20 +02:00
|
|
|
def percentage(numerator, denominator)
|
|
|
|
((numerator.to_f / denominator) * 100).round(2)
|
|
|
|
end
|
|
|
|
|
2017-05-29 17:19:50 +02:00
|
|
|
def dossier_instruction_mean_time(dossiers)
|
|
|
|
# In the 12 last months, we compute for each month
|
|
|
|
# the average time it took to instruct a dossier
|
|
|
|
# We compute monthly averages by first making an average per procedure
|
|
|
|
# and then computing the average for all the procedures
|
|
|
|
|
|
|
|
min_date = 11.months.ago
|
|
|
|
max_date = Time.now.to_date
|
|
|
|
|
|
|
|
processed_dossiers = dossiers
|
|
|
|
.where(:processed_at => min_date..max_date)
|
2017-12-14 15:51:45 +01:00
|
|
|
.pluck(:procedure_id, :en_construction_at, :processed_at)
|
2017-05-29 17:19:50 +02:00
|
|
|
|
|
|
|
# Group dossiers by month
|
|
|
|
processed_dossiers_by_month = processed_dossiers
|
|
|
|
.group_by do |dossier|
|
|
|
|
dossier[2].beginning_of_month.to_s
|
|
|
|
end
|
|
|
|
|
|
|
|
processed_dossiers_by_month.map do |month, value|
|
|
|
|
# Group the dossiers for this month by procedure
|
|
|
|
dossiers_grouped_by_procedure = value.group_by { |dossier| dossier[0] }
|
|
|
|
|
|
|
|
# Compute the mean time for this procedure
|
|
|
|
procedure_processing_times = dossiers_grouped_by_procedure.map do |procedure_id, procedure_dossiers|
|
|
|
|
procedure_dossiers_processing_time = procedure_dossiers.map do |dossier|
|
|
|
|
(dossier[2] - dossier[1]).to_f / (3600 * 24)
|
|
|
|
end
|
|
|
|
|
|
|
|
mean(procedure_dossiers_processing_time)
|
|
|
|
end
|
|
|
|
|
|
|
|
# Compute the average mean time for all the procedures of this month
|
|
|
|
month_average = mean(procedure_processing_times)
|
|
|
|
|
|
|
|
[month, month_average]
|
|
|
|
end.to_h
|
|
|
|
end
|
2017-05-26 17:27:23 +02:00
|
|
|
|
|
|
|
def dossier_filling_mean_time(dossiers)
|
|
|
|
# In the 12 last months, we compute for each month
|
|
|
|
# the average time it took to fill a dossier
|
|
|
|
# We compute monthly averages by first making an average per procedure
|
|
|
|
# and then computing the average for all the procedures
|
|
|
|
# For each procedure, we normalize the data: the time is calculated
|
|
|
|
# for a 24 champs form (the current form mean length)
|
|
|
|
|
|
|
|
min_date = 11.months.ago
|
|
|
|
max_date = Time.now.to_date
|
|
|
|
|
|
|
|
processed_dossiers = dossiers
|
|
|
|
.where(:processed_at => min_date..max_date)
|
2018-10-12 23:32:59 +02:00
|
|
|
.pluck(
|
|
|
|
:procedure_id,
|
|
|
|
Arel.sql('EXTRACT(EPOCH FROM (en_construction_at - created_at)) / 60 AS processing_time'),
|
|
|
|
:processed_at
|
|
|
|
)
|
2017-05-26 17:27:23 +02:00
|
|
|
|
|
|
|
# Group dossiers by month
|
|
|
|
processed_dossiers_by_month = processed_dossiers
|
2018-10-12 21:55:20 +02:00
|
|
|
.group_by do |(*_, processed_at)|
|
|
|
|
processed_at.beginning_of_month.to_s
|
2017-05-26 17:27:23 +02:00
|
|
|
end
|
|
|
|
|
2018-10-12 22:09:59 +02:00
|
|
|
procedure_id_type_de_champs_count = TypeDeChamp
|
|
|
|
.where(private: false)
|
|
|
|
.group(:procedure_id)
|
|
|
|
.count
|
|
|
|
|
2018-10-12 21:55:20 +02:00
|
|
|
processed_dossiers_by_month.map do |month, dossier_plucks|
|
2017-05-26 17:27:23 +02:00
|
|
|
# Group the dossiers for this month by procedure
|
2018-10-12 21:55:20 +02:00
|
|
|
dossiers_grouped_by_procedure = dossier_plucks.group_by { |(procedure_id, *_)| procedure_id }
|
2017-05-26 17:27:23 +02:00
|
|
|
|
|
|
|
# Compute the mean time for this procedure
|
|
|
|
procedure_processing_times = dossiers_grouped_by_procedure.map do |procedure_id, procedure_dossiers|
|
2018-10-16 18:02:00 +02:00
|
|
|
procedure_fields_count = procedure_id_type_de_champs_count[procedure_id]
|
|
|
|
|
|
|
|
if (procedure_fields_count == 0 || procedure_fields_count.nil?)
|
|
|
|
next
|
|
|
|
end
|
|
|
|
|
2018-10-12 23:32:59 +02:00
|
|
|
procedure_dossiers_processing_time = procedure_dossiers.map { |_, processing_time, _| processing_time }
|
2017-05-26 17:27:23 +02:00
|
|
|
procedure_mean = mean(procedure_dossiers_processing_time)
|
|
|
|
|
|
|
|
# We normalize the data for 24 fields
|
|
|
|
procedure_mean * (MEAN_NUMBER_OF_CHAMPS_IN_A_FORM / procedure_fields_count)
|
|
|
|
end
|
2018-10-16 18:02:00 +02:00
|
|
|
.compact
|
2017-05-26 17:27:23 +02:00
|
|
|
|
|
|
|
# Compute the average mean time for all the procedures of this month
|
|
|
|
month_average = mean(procedure_processing_times)
|
|
|
|
|
|
|
|
[month, month_average]
|
|
|
|
end.to_h
|
|
|
|
end
|
2017-05-09 17:09:35 +02:00
|
|
|
|
|
|
|
def avis_usage
|
2018-03-06 13:44:29 +01:00
|
|
|
[3.weeks.ago, 2.weeks.ago, 1.week.ago].map do |min_date|
|
2017-05-09 17:09:35 +02:00
|
|
|
max_date = min_date + 1.week
|
|
|
|
|
|
|
|
weekly_dossiers = Dossier.includes(:avis).where(created_at: min_date..max_date).to_a
|
|
|
|
|
|
|
|
weekly_dossiers_count = weekly_dossiers.count
|
|
|
|
|
|
|
|
if weekly_dossiers_count == 0
|
|
|
|
result = 0
|
|
|
|
else
|
|
|
|
weekly_dossier_with_avis_count = weekly_dossiers.select { |dossier| dossier.avis.present? }.count
|
2017-06-19 17:29:20 +02:00
|
|
|
result = percentage(weekly_dossier_with_avis_count, weekly_dossiers_count)
|
2017-05-09 17:09:35 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
[min_date.to_i, result]
|
|
|
|
end
|
|
|
|
end
|
2017-05-09 18:11:58 +02:00
|
|
|
|
|
|
|
def avis_average_answer_time
|
2018-03-06 13:44:29 +01:00
|
|
|
[3.weeks.ago, 2.weeks.ago, 1.week.ago].map do |min_date|
|
2017-05-09 18:11:58 +02:00
|
|
|
max_date = min_date + 1.week
|
|
|
|
|
|
|
|
average = Avis.with_answer
|
|
|
|
.where(created_at: min_date..max_date)
|
2017-08-24 16:26:57 +02:00
|
|
|
.average("EXTRACT(EPOCH FROM avis.updated_at - avis.created_at) / 86400")
|
2017-05-09 18:11:58 +02:00
|
|
|
|
|
|
|
result = average ? average.to_f.round(2) : 0
|
|
|
|
|
|
|
|
[min_date.to_i, result]
|
|
|
|
end
|
|
|
|
end
|
2017-05-10 11:07:38 +02:00
|
|
|
|
|
|
|
def avis_answer_percentages
|
2018-03-06 13:44:29 +01:00
|
|
|
[3.weeks.ago, 2.weeks.ago, 1.week.ago].map do |min_date|
|
2017-05-10 11:07:38 +02:00
|
|
|
max_date = min_date + 1.week
|
|
|
|
|
|
|
|
weekly_avis = Avis.where(created_at: min_date..max_date)
|
|
|
|
|
|
|
|
weekly_avis_count = weekly_avis.count
|
|
|
|
|
|
|
|
if weekly_avis_count == 0
|
|
|
|
[min_date.to_i, 0]
|
|
|
|
else
|
|
|
|
answered_weekly_avis_count = weekly_avis.with_answer.count
|
2017-06-19 17:29:20 +02:00
|
|
|
result = percentage(answered_weekly_avis_count, weekly_avis_count)
|
2017-05-10 11:07:38 +02:00
|
|
|
|
|
|
|
[min_date.to_i, result]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2017-06-15 14:42:48 +02:00
|
|
|
|
|
|
|
def motivation_usage_dossier
|
2018-03-06 13:44:29 +01:00
|
|
|
[3.weeks.ago, 2.weeks.ago, 1.week.ago].map do |date|
|
2017-06-15 14:42:48 +02:00
|
|
|
min_date = date.beginning_of_week
|
|
|
|
max_date = date.end_of_week
|
|
|
|
|
|
|
|
weekly_termine_dossiers = Dossier.where(processed_at: min_date..max_date)
|
|
|
|
weekly_termine_dossiers_count = weekly_termine_dossiers.count
|
|
|
|
weekly_termine_dossiers_with_motivation_count = weekly_termine_dossiers.where.not(motivation: nil).count
|
|
|
|
|
|
|
|
if weekly_termine_dossiers_count == 0
|
|
|
|
result = 0
|
|
|
|
else
|
|
|
|
result = percentage(weekly_termine_dossiers_with_motivation_count, weekly_termine_dossiers_count)
|
|
|
|
end
|
|
|
|
|
|
|
|
[l(max_date, format: '%d/%m/%Y'), result]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def motivation_usage_procedure
|
2018-03-06 13:44:29 +01:00
|
|
|
[3.weeks.ago, 2.weeks.ago, 1.week.ago].map do |date|
|
2017-06-15 14:42:48 +02:00
|
|
|
min_date = date.beginning_of_week
|
|
|
|
max_date = date.end_of_week
|
|
|
|
|
|
|
|
procedures_with_dossier_processed_this_week = Procedure
|
|
|
|
.joins(:dossiers)
|
|
|
|
.where(dossiers: { processed_at: min_date..max_date })
|
|
|
|
|
|
|
|
procedures_with_dossier_processed_this_week_count = procedures_with_dossier_processed_this_week
|
|
|
|
.uniq
|
|
|
|
.count
|
|
|
|
|
|
|
|
procedures_with_dossier_processed_this_week_and_with_motivation_count = procedures_with_dossier_processed_this_week
|
|
|
|
.where
|
|
|
|
.not(dossiers: { motivation: nil })
|
|
|
|
.uniq
|
|
|
|
.count
|
|
|
|
|
|
|
|
if procedures_with_dossier_processed_this_week_count == 0
|
|
|
|
result = 0
|
|
|
|
else
|
|
|
|
result = percentage(procedures_with_dossier_processed_this_week_and_with_motivation_count, procedures_with_dossier_processed_this_week_count)
|
|
|
|
end
|
|
|
|
|
|
|
|
[l(max_date, format: '%d/%m/%Y'), result]
|
|
|
|
end
|
|
|
|
end
|
2017-03-24 18:04:37 +01:00
|
|
|
end
|