2024-04-29 00:17:15 +02:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2021-04-21 12:02:57 +02:00
|
|
|
class DossierProjectionService
|
2024-11-21 11:36:49 +01:00
|
|
|
class DossierProjection < Struct.new(:dossier, :columns)
|
2021-04-21 12:02:57 +02:00
|
|
|
end
|
|
|
|
|
2024-01-30 10:56:57 +01:00
|
|
|
def self.for_tiers_translation(array)
|
|
|
|
for_tiers, email, first_name, last_name = array
|
|
|
|
if for_tiers == true
|
|
|
|
"#{email} #{I18n.t('views.instructeurs.dossiers.acts_on_behalf')} #{first_name} #{last_name}"
|
|
|
|
else
|
|
|
|
email
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-04-21 12:02:57 +02:00
|
|
|
TABLE = 'table'
|
|
|
|
COLUMN = 'column'
|
2024-10-31 18:36:14 +01:00
|
|
|
STABLE_ID = 'stable_id'
|
2021-04-21 12:02:57 +02:00
|
|
|
|
|
|
|
# Returns [DossierProjection(dossier, columns)] ordered by dossiers_ids
|
|
|
|
# and the columns orderd by fields.
|
|
|
|
#
|
|
|
|
# It tries to be fast by using `pluck` (or at least `select`)
|
|
|
|
# to avoid deserializing entire records.
|
|
|
|
#
|
|
|
|
# It stores its intermediary queries results in an hash in the corresponding field.
|
|
|
|
# ex: field_email[:id_value_h] = { dossier_id_1: email_1, dossier_id_3: email_3 }
|
|
|
|
#
|
|
|
|
# Those hashes are needed because:
|
|
|
|
# - the order of the intermediary query results are unknown
|
|
|
|
# - some values can be missing (if a revision added or removed them)
|
2024-10-07 15:00:05 +02:00
|
|
|
def self.project(dossiers_ids, columns)
|
2024-10-31 18:36:14 +01:00
|
|
|
fields = columns.map do |c|
|
|
|
|
if c.is_a?(Columns::ChampColumn)
|
|
|
|
{ TABLE => c.table, STABLE_ID => c.stable_id, original_column: c }
|
|
|
|
else
|
|
|
|
{ TABLE => c.table, COLUMN => c.column }
|
|
|
|
end
|
|
|
|
end
|
2024-10-07 15:00:05 +02:00
|
|
|
champ_value = champ_value_formatter(dossiers_ids, fields)
|
|
|
|
|
2024-11-21 11:36:49 +01:00
|
|
|
fields
|
2021-04-22 22:27:38 +02:00
|
|
|
.each { |f| f[:id_value_h] = {} }
|
|
|
|
.group_by { |f| f[TABLE] } # one query per table
|
|
|
|
.each do |table, fields|
|
|
|
|
case table
|
2024-11-01 22:22:40 +01:00
|
|
|
when 'type_de_champ'
|
2021-04-22 22:27:38 +02:00
|
|
|
Champ
|
|
|
|
.where(
|
2024-10-31 18:36:14 +01:00
|
|
|
stable_id: fields.map { |f| f[STABLE_ID] },
|
2021-04-22 22:27:38 +02:00
|
|
|
dossier_id: dossiers_ids
|
|
|
|
)
|
2024-04-23 11:32:25 +02:00
|
|
|
.select(:dossier_id, :value, :stable_id, :type, :external_id, :data, :value_json) # we cannot pluck :value, as we need the champ.to_s method
|
2021-04-22 22:27:38 +02:00
|
|
|
.group_by(&:stable_id) # the champs are redispatched to their respective fields
|
|
|
|
.map do |stable_id, champs|
|
2024-10-31 18:36:14 +01:00
|
|
|
fields
|
|
|
|
.filter { |f| f[STABLE_ID] == stable_id }
|
|
|
|
.each do |field|
|
2024-11-06 22:29:59 +01:00
|
|
|
column = field[:original_column]
|
|
|
|
field[:id_value_h] = champs.to_h { [_1.dossier_id, column.is_a?(Columns::JSONPathColumn) ? column.value(_1) : champ_value.(_1)] }
|
|
|
|
end
|
2021-04-22 22:27:38 +02:00
|
|
|
end
|
2021-04-21 12:02:57 +02:00
|
|
|
when 'self'
|
|
|
|
Dossier
|
|
|
|
.where(id: dossiers_ids)
|
2021-04-22 22:27:38 +02:00
|
|
|
.pluck(:id, *fields.map { |f| f[COLUMN].to_sym })
|
2021-04-26 10:35:22 +02:00
|
|
|
.each do |id, *columns|
|
|
|
|
fields.zip(columns).each do |field, value|
|
2024-08-20 17:12:01 +02:00
|
|
|
# SVA must remain a date: in other column we compute remaining delay with it
|
2024-11-13 11:01:10 +01:00
|
|
|
field[:id_value_h][id] = if value.respond_to?(:strftime)
|
2024-08-20 17:12:01 +02:00
|
|
|
I18n.l(value.to_date)
|
2021-04-26 10:35:22 +02:00
|
|
|
else
|
2024-08-20 17:12:01 +02:00
|
|
|
value
|
2021-04-26 10:35:22 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2021-04-21 12:02:57 +02:00
|
|
|
when 'individual'
|
|
|
|
Individual
|
|
|
|
.where(dossier_id: dossiers_ids)
|
2021-04-22 22:27:38 +02:00
|
|
|
.pluck(:dossier_id, *fields.map { |f| f[COLUMN].to_sym })
|
|
|
|
.each { |id, *columns| fields.zip(columns).each { |field, value| field[:id_value_h][id] = value } }
|
2021-04-21 12:02:57 +02:00
|
|
|
when 'etablissement'
|
|
|
|
Etablissement
|
|
|
|
.where(dossier_id: dossiers_ids)
|
2021-04-22 22:27:38 +02:00
|
|
|
.pluck(:dossier_id, *fields.map { |f| f[COLUMN].to_sym })
|
|
|
|
.each { |id, *columns| fields.zip(columns).each { |field, value| field[:id_value_h][id] = value } }
|
|
|
|
when 'user'
|
2024-01-30 10:56:57 +01:00
|
|
|
|
2021-04-22 22:27:38 +02:00
|
|
|
fields[0][:id_value_h] = Dossier # there is only one field available for user table
|
|
|
|
.joins(:user)
|
2024-01-30 10:56:57 +01:00
|
|
|
.includes(:individual)
|
2021-04-22 22:27:38 +02:00
|
|
|
.where(id: dossiers_ids)
|
2024-01-30 10:56:57 +01:00
|
|
|
.pluck('dossiers.id, dossiers.for_tiers, users.email, individuals.prenom, individuals.nom')
|
|
|
|
.to_h { |dossier_id, *array| [dossier_id, for_tiers_translation(array)] }
|
2021-04-21 12:02:57 +02:00
|
|
|
when 'groupe_instructeur'
|
2021-04-22 22:27:38 +02:00
|
|
|
fields[0][:id_value_h] = Dossier
|
2021-04-21 12:02:57 +02:00
|
|
|
.joins(:groupe_instructeur)
|
|
|
|
.where(id: dossiers_ids)
|
|
|
|
.pluck('dossiers.id, groupe_instructeurs.label')
|
|
|
|
.to_h
|
2023-04-03 17:05:54 +02:00
|
|
|
when 'dossier_corrections'
|
2023-03-27 16:23:11 +02:00
|
|
|
columns = fields.map { _1[COLUMN].to_sym }
|
|
|
|
|
2023-04-03 17:05:54 +02:00
|
|
|
id_value_h = DossierCorrection.where(dossier_id: dossiers_ids)
|
2023-03-27 16:23:11 +02:00
|
|
|
.pluck(:dossier_id, *columns)
|
2023-04-03 17:05:54 +02:00
|
|
|
.group_by(&:first) # group corrections by dossier_id
|
|
|
|
.transform_values do |values| # build each correction has an hash column => value
|
2023-03-27 16:23:11 +02:00
|
|
|
values.map { Hash[columns.zip(_1[1..-1])] }
|
|
|
|
end
|
|
|
|
|
|
|
|
fields[0][:id_value_h] = id_value_h
|
|
|
|
|
2024-10-07 16:52:27 +02:00
|
|
|
when 'dossier_labels'
|
|
|
|
columns = fields.map { _1[COLUMN].to_sym }
|
|
|
|
|
|
|
|
id_value_h =
|
|
|
|
DossierLabel
|
2024-10-29 14:52:14 +01:00
|
|
|
.includes(:label)
|
2024-10-07 16:52:27 +02:00
|
|
|
.where(dossier_id: dossiers_ids)
|
2024-10-29 14:52:14 +01:00
|
|
|
.pluck('dossier_id, labels.name, labels.color')
|
2024-10-07 16:52:27 +02:00
|
|
|
.group_by { |dossier_id, _| dossier_id }
|
|
|
|
|
|
|
|
fields[0][:id_value_h] = id_value_h.transform_values { |v| { value: v, type: :label } }
|
|
|
|
|
2021-04-29 09:33:04 +02:00
|
|
|
when 'procedure'
|
|
|
|
Dossier
|
|
|
|
.joins(:procedure)
|
|
|
|
.where(id: dossiers_ids)
|
|
|
|
.pluck(:id, *fields.map { |f| f[COLUMN].to_sym })
|
|
|
|
.each { |id, *columns| fields.zip(columns).each { |field, value| field[:id_value_h][id] = value } }
|
2021-04-21 12:02:57 +02:00
|
|
|
when 'followers_instructeurs'
|
2021-06-10 15:24:15 +02:00
|
|
|
# rubocop:disable Style/HashTransformValues
|
2021-04-22 22:27:38 +02:00
|
|
|
fields[0][:id_value_h] = Follow
|
2021-04-21 12:02:57 +02:00
|
|
|
.active
|
|
|
|
.joins(instructeur: :user)
|
|
|
|
.where(dossier_id: dossiers_ids)
|
|
|
|
.pluck('dossier_id, users.email')
|
|
|
|
.group_by { |dossier_id, _| dossier_id }
|
2021-05-04 15:41:38 +02:00
|
|
|
.to_h { |dossier_id, dossier_id_emails| [dossier_id, dossier_id_emails.sort.map { |_, email| email }&.join(', ')] }
|
2021-06-10 15:24:15 +02:00
|
|
|
# rubocop:enable Style/HashTransformValues
|
2023-04-20 11:44:35 +02:00
|
|
|
when 'avis'
|
|
|
|
# rubocop:disable Style/HashTransformValues
|
|
|
|
fields[0][:id_value_h] = Avis
|
|
|
|
.where(dossier_id: dossiers_ids)
|
|
|
|
.pluck('dossier_id', 'question_answer')
|
|
|
|
.group_by { |dossier_id, _| dossier_id }
|
2023-04-25 14:24:37 +02:00
|
|
|
.to_h { |dossier_id, question_answer| [dossier_id, question_answer.map { |_, answer| answer }&.compact&.tally&.map { |k, v| I18n.t("helpers.label.question_answer_with_count.#{k}", count: v) }&.join(' / ')] }
|
2023-04-20 11:44:35 +02:00
|
|
|
# rubocop:enable Style/HashTransformValues
|
2021-04-21 12:02:57 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2024-11-21 11:36:49 +01:00
|
|
|
dossiers = Dossier.includes(:corrections, :pending_corrections).find(dossiers_ids)
|
2024-11-13 11:01:10 +01:00
|
|
|
|
2021-04-26 10:35:22 +02:00
|
|
|
dossiers_ids.map do |dossier_id|
|
|
|
|
DossierProjection.new(
|
2024-11-13 11:01:10 +01:00
|
|
|
dossiers.find { _1.id == dossier_id },
|
2021-04-26 10:35:22 +02:00
|
|
|
fields.map { |f| f[:id_value_h][dossier_id] }
|
|
|
|
)
|
2021-04-21 12:02:57 +02:00
|
|
|
end
|
|
|
|
end
|
2024-04-24 10:50:17 +02:00
|
|
|
|
|
|
|
class << self
|
|
|
|
private
|
|
|
|
|
|
|
|
def champ_value_formatter(dossiers_ids, fields)
|
2024-11-01 22:22:40 +01:00
|
|
|
stable_ids = fields.filter { _1[TABLE].in?(['type_de_champ']) }.map { _1[STABLE_ID] }
|
2024-04-24 10:50:17 +02:00
|
|
|
revision_ids_by_dossier_ids = Dossier.where(id: dossiers_ids).pluck(:id, :revision_id).to_h
|
2024-10-21 11:51:34 +02:00
|
|
|
stable_ids_and_types_de_champ_by_revision_ids = ProcedureRevisionTypeDeChamp.includes(:type_de_champ)
|
2024-04-24 10:50:17 +02:00
|
|
|
.where(revision_id: revision_ids_by_dossier_ids.values.uniq, type_de_champ: { stable_id: stable_ids })
|
2024-10-21 11:51:34 +02:00
|
|
|
.map { [_1.revision_id, _1.type_de_champ] }
|
2024-04-24 10:50:17 +02:00
|
|
|
.group_by(&:first)
|
2024-10-21 11:51:34 +02:00
|
|
|
.transform_values { _1.map { |_, type_de_champ| [type_de_champ.stable_id, type_de_champ] }.to_h }
|
|
|
|
stable_ids_and_types_de_champ_by_dossier_ids = revision_ids_by_dossier_ids.transform_values { stable_ids_and_types_de_champ_by_revision_ids[_1] }.compact
|
2024-11-06 22:29:59 +01:00
|
|
|
-> (champ) {
|
|
|
|
type_de_champ = stable_ids_and_types_de_champ_by_dossier_ids
|
|
|
|
.fetch(champ.dossier_id, {})[champ.stable_id]
|
|
|
|
type_de_champ&.champ_value(champ)
|
2024-04-24 10:50:17 +02:00
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
2021-04-21 12:02:57 +02:00
|
|
|
end
|