2022-11-18 15:26:31 +01:00
|
|
|
# == Schema Information
|
|
|
|
#
|
|
|
|
# Table name: batch_operations
|
|
|
|
#
|
|
|
|
# id :bigint not null, primary key
|
|
|
|
# failed_dossier_ids :bigint default([]), not null, is an Array
|
|
|
|
# finished_at :datetime
|
|
|
|
# operation :string not null
|
|
|
|
# payload :jsonb not null
|
|
|
|
# run_at :datetime
|
2022-12-01 10:51:10 +01:00
|
|
|
# seen_at :datetime
|
2022-11-18 15:26:31 +01:00
|
|
|
# success_dossier_ids :bigint default([]), not null, is an Array
|
|
|
|
# created_at :datetime not null
|
|
|
|
# updated_at :datetime not null
|
|
|
|
# instructeur_id :bigint not null
|
|
|
|
#
|
2022-11-18 16:59:46 +01:00
|
|
|
|
2022-11-18 15:26:31 +01:00
|
|
|
class BatchOperation < ApplicationRecord
|
|
|
|
enum operation: {
|
2022-12-12 10:02:33 +01:00
|
|
|
archiver: 'archiver',
|
|
|
|
passer_en_instruction: 'passer_en_instruction'
|
2022-11-18 15:26:31 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
has_many :dossiers, dependent: :nullify
|
2022-12-01 10:51:10 +01:00
|
|
|
has_and_belongs_to_many :groupe_instructeurs
|
2022-11-18 15:26:31 +01:00
|
|
|
belongs_to :instructeur
|
2022-11-19 06:03:59 +01:00
|
|
|
|
2022-11-18 15:26:31 +01:00
|
|
|
validates :operation, presence: true
|
|
|
|
|
2022-12-05 13:36:38 +01:00
|
|
|
RETENTION_DURATION = 4.hours
|
|
|
|
MAX_DUREE_GENERATION = 24.hours
|
|
|
|
|
|
|
|
scope :stale, lambda {
|
|
|
|
where.not(finished_at: nil)
|
|
|
|
.where('updated_at < ?', (Time.zone.now - RETENTION_DURATION))
|
|
|
|
}
|
|
|
|
|
|
|
|
scope :stuck, lambda {
|
|
|
|
where(finished_at: nil)
|
|
|
|
.where('updated_at < ?', (Time.zone.now - MAX_DUREE_GENERATION))
|
|
|
|
}
|
|
|
|
|
2022-11-25 15:43:00 +01:00
|
|
|
def dossiers_safe_scope(dossier_ids = self.dossier_ids)
|
2022-12-13 15:52:32 +01:00
|
|
|
query = instructeur
|
|
|
|
.dossiers
|
2022-11-25 15:43:00 +01:00
|
|
|
.visible_by_administration
|
2022-12-13 15:52:32 +01:00
|
|
|
.where(id: dossier_ids)
|
2022-11-25 15:43:00 +01:00
|
|
|
case operation
|
|
|
|
when BatchOperation.operations.fetch(:archiver) then
|
|
|
|
query.not_archived.state_termine
|
2022-12-12 10:02:33 +01:00
|
|
|
when BatchOperation.operations.fetch(:passer_en_instruction) then
|
|
|
|
query.state_en_construction
|
2022-11-25 15:43:00 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def enqueue_all
|
|
|
|
dossiers_safe_scope # later in batch .
|
2022-11-18 16:59:46 +01:00
|
|
|
.map { |dossier| BatchOperationProcessOneJob.perform_later(self, dossier) }
|
|
|
|
end
|
|
|
|
|
2022-11-25 15:43:00 +01:00
|
|
|
def process_one(dossier)
|
|
|
|
case operation
|
|
|
|
when BatchOperation.operations.fetch(:archiver)
|
|
|
|
dossier.archiver!(instructeur)
|
2022-12-12 10:02:33 +01:00
|
|
|
when BatchOperation.operations.fetch(:passer_en_instruction)
|
|
|
|
dossier.passer_en_instruction(instructeur: instructeur)
|
2022-11-25 15:43:00 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# use Arel::UpdateManager for array_append/array_remove (inspired by atomic_append)
|
|
|
|
# see: https://www.rubydoc.info/gems/arel/Arel/UpdateManager
|
|
|
|
# we use this approach to ensure atomicity
|
|
|
|
def track_processed_dossier(success, dossier)
|
2022-11-25 12:30:06 +01:00
|
|
|
transaction do
|
|
|
|
dossier.update(batch_operation: nil)
|
|
|
|
manager = Arel::UpdateManager.new.table(arel_table).where(arel_table[:id].eq(id))
|
|
|
|
values = []
|
|
|
|
values.push([arel_table[:run_at], Time.zone.now]) if called_for_first_time?
|
2022-12-06 10:08:34 +01:00
|
|
|
values.push([arel_table[:finished_at], Time.zone.now]) if called_for_last_time?(dossier)
|
2022-12-05 13:36:38 +01:00
|
|
|
values.push([arel_table[:updated_at], Time.zone.now])
|
2022-11-25 12:30:06 +01:00
|
|
|
if success
|
2022-11-25 15:43:00 +01:00
|
|
|
values.push([arel_table[:success_dossier_ids], Arel::Nodes::NamedFunction.new('array_append', [arel_table[:success_dossier_ids], dossier.id])])
|
2022-11-25 12:30:06 +01:00
|
|
|
values.push([arel_table[:failed_dossier_ids], Arel::Nodes::NamedFunction.new('array_remove', [arel_table[:failed_dossier_ids], dossier.id])])
|
|
|
|
else
|
|
|
|
values.push([arel_table[:failed_dossier_ids], Arel::Nodes::NamedFunction.new('array_append', [arel_table[:failed_dossier_ids], dossier.id])])
|
|
|
|
end
|
|
|
|
manager.set(values)
|
|
|
|
ActiveRecord::Base.connection.update(manager.to_sql)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-11-25 15:43:00 +01:00
|
|
|
# when an instructeur want to create a batch from his interface,
|
|
|
|
# another one might have run something on one of the dossier
|
|
|
|
# we use this approach to create a batch with given dossiers safely
|
|
|
|
def self.safe_create!(params)
|
|
|
|
transaction do
|
|
|
|
instance = new(params)
|
|
|
|
instance.dossiers = instance.dossiers_safe_scope(params[:dossier_ids])
|
2022-12-01 11:22:20 +01:00
|
|
|
.not_having_batch_operation
|
2022-12-12 10:02:33 +01:00
|
|
|
if instance.dossiers.present?
|
|
|
|
instance.save!
|
|
|
|
BatchOperationEnqueueAllJob.perform_later(instance)
|
|
|
|
instance
|
|
|
|
end
|
2022-11-18 15:26:31 +01:00
|
|
|
end
|
2022-11-18 16:59:46 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
def called_for_first_time?
|
|
|
|
run_at.nil?
|
|
|
|
end
|
|
|
|
|
2022-11-25 15:43:00 +01:00
|
|
|
# beware, must be reloaded first
|
2022-12-06 10:08:34 +01:00
|
|
|
def called_for_last_time?(dossier_to_ignore)
|
2022-12-06 11:19:08 +01:00
|
|
|
dossiers.where.not(id: dossier_to_ignore.id).empty?
|
2022-11-18 15:26:31 +01:00
|
|
|
end
|
2022-11-21 16:32:17 +01:00
|
|
|
|
2022-12-05 17:07:59 +01:00
|
|
|
def total_count
|
|
|
|
total = failed_dossier_ids.size + success_dossier_ids.size
|
2022-12-01 11:22:20 +01:00
|
|
|
|
2022-12-05 17:07:59 +01:00
|
|
|
if finished_at.blank?
|
|
|
|
total += dossiers.count
|
|
|
|
end
|
|
|
|
total
|
2022-12-01 11:22:20 +01:00
|
|
|
end
|
|
|
|
|
2022-12-05 17:07:59 +01:00
|
|
|
def progress_count
|
|
|
|
failed_dossier_ids.size + success_dossier_ids.size
|
2022-12-01 11:22:20 +01:00
|
|
|
end
|
|
|
|
|
2022-11-21 16:32:17 +01:00
|
|
|
private
|
|
|
|
|
2022-11-25 15:43:00 +01:00
|
|
|
def arel_table
|
|
|
|
BatchOperation.arel_table
|
2022-11-21 16:32:17 +01:00
|
|
|
end
|
2022-11-18 15:26:31 +01:00
|
|
|
end
|