poc(batch_operation_job): implement archive dossiers with async behaviour, lock, run_at, finished_at, failed_dossier_ids, success_dossier_ids

This commit is contained in:
Martin 2022-11-18 16:59:46 +01:00 committed by mfo
parent 61f4cded75
commit b11dbafc59
5 changed files with 152 additions and 20 deletions

View file

@ -0,0 +1,25 @@
class BatchOperationJob < ApplicationJob
# what about wrapping all of that in a transaction
# but, what about nested transaction because batch_operation.process_one(dossier) can run transaction
def perform(batch_operation, dossier)
success = true
begin
batch_operation.process_one(dossier)
dossier.update(batch_operation: nil)
rescue => error
success = false
raise error
ensure
batch_operation.reload # reload before deciding if it has been finished
batch_operation.run_at = Time.zone.now if batch_operation.called_for_first_time?
batch_operation.finished_at = Time.zone.now if batch_operation.called_for_last_time?
if success # beware to this one, will be refactored for stronger atomicity
batch_operation.success_dossier_ids.push(dossier.id)
batch_operation.failed_dossier_ids = batch_operation.failed_dossier_ids.reject { |d| d.dossier.id }
else
batch_operation.failed_dossier_ids.push(dossier.id)
end
batch_operation.save!
end
end
end

View file

@ -13,6 +13,7 @@
# updated_at :datetime not null
# instructeur_id :bigint not null
#
class BatchOperation < ApplicationRecord
enum operation: {
archiver: 'archiver'
@ -22,10 +23,26 @@ class BatchOperation < ApplicationRecord
belongs_to :instructeur
validates :operation, presence: true
def process
def enqueue_all
Dossier.joins(:procedure)
.where(procedure: { id: instructeur.procedures.ids })
.where(id: dossiers.ids)
.map { |dossier| BatchOperationProcessOneJob.perform_later(self, dossier) }
end
def process_one(dossier)
case operation
when BatchOperation.operations.fetch(:archiver)
dossiers.map { |dossier| dossier.archiver!(instructeur) }
dossier.archiver!(instructeur)
end
true
end
def called_for_first_time?
run_at.nil?
end
def called_for_last_time? # beware, must be reloaded first
dossiers.count.zero?
end
end

View file

@ -1,4 +1,14 @@
FactoryBot.define do
factory :batch_operation do
trait :archiver do
operation { BatchOperation.operations.fetch(:archiver) }
dossiers do
[
association(:dossier, :accepte),
association(:dossier, :refuse),
association(:dossier, :sans_suite)
]
end
end
end
end

View file

@ -0,0 +1,79 @@
describe BatchOperationJob, type: :job do
describe 'perform' do
let(:batch_operation) do
create(:batch_operation, :archiver,
options.merge(instructeur: create(:instructeur)))
end
let(:dossier_job) { batch_operation.dossiers.first }
subject { BatchOperationJob.new(batch_operation, dossier_job) }
let(:options) { {} }
it 'just call the process one' do
expect { subject.perform_now }
.to change { dossier_job.reload.archived }
.from(false)
.to(true)
end
it 'unlock the dossier' do
expect { subject.perform_now }
.to change { dossier_job.reload.batch_operation }
.from(batch_operation)
.to(nil)
end
context 'when it succeed' do
it 'pushes dossier_job id to batch_operation.success_dossier_ids' do
expect { subject.perform_now }
.to change { batch_operation.success_dossier_ids }
.from([])
.to([dossier_job.id])
end
end
context 'when it fails' do
it 'pushes dossier_job id to batch_operation.failed_dossier_ids' do
expect(batch_operation).to receive(:process_one).with(dossier_job).and_raise("KO")
expect { subject.perform_now }.to raise_error("KO")
expect(batch_operation.reload.failed_dossier_ids).to eq([dossier_job.id])
end
end
context 'when it is the first job' do
it 'sets run_at at first' do
run_at = 2.minutes.ago
Timecop.freeze(run_at) do
expect { subject.perform_now }
.to change { batch_operation.run_at }
.from(nil)
.to(run_at)
end
end
end
context 'when it is the second job (meaning run_at was already set) but not the last' do
let(:preview_run_at) { 2.days.ago }
let(:options) { { run_at: preview_run_at } }
it 'does not change run_at' do
expect { subject.perform_now }.not_to change { batch_operation.reload.run_at }
end
end
context 'when it is the last job' do
before do
batch_operation.dossiers
.where.not(id: dossier_job.id)
.update_all(batch_operation_id: nil)
end
it 'sets finished_at' do
finished_at = Time.zone.now
Timecop.freeze(finished_at) do
expect { subject.perform_now }
.to change { batch_operation.reload.finished_at }
.from(nil)
.to(finished_at)
end
end
end
end
end

View file

@ -18,28 +18,29 @@ describe BatchOperation, type: :model do
it { is_expected.to validate_presence_of(:operation) }
end
describe 'process' do
let(:procedure) { create(:procedure, :with_instructeur) }
describe 'enqueue_all' do
context 'given dossier_ids not in instructeur procedures' do
subject do
create(:batch_operation, :archiver, instructeur: create(:instructeur), invalid_instructeur: create(:instructeur))
end
subject do
create(:batch_operation, instructeur: procedure.instructeurs.first,
operation: operation,
dossiers: dossiers)
it 'does not enqueues any BatchOperationProcessOneJob' do
expect { subject.enqueue_all() }
.not_to have_enqueued_job(BatchOperationProcessOneJob)
end
end
context 'archive' do
context 'given dossier_ids in instructeur procedures' do
subject do
create(:batch_operation, :archiver, instructeur: create(:instructeur))
end
let(:operation) { BatchOperation.operations.fetch(:archiver) }
let(:dossier_accepte) { create(:dossier, :accepte, procedure: procedure) }
let(:dossier_refuse) { create(:dossier, :refuse, procedure: procedure) }
let(:dossier_classe_sans_suite) { create(:dossier, :sans_suite, procedure: procedure) }
let(:dossiers) { [dossier_accepte, dossier_refuse, dossier_classe_sans_suite] }
it 'works' do
expect { subject.process() }
.to change { dossiers.map(&:reload).map(&:archived) }
.from(dossiers.map { false })
.to(dossiers.map { true })
it 'enqueues as many BatchOperationProcessOneJob as dossiers_ids' do
expect { subject.enqueue_all() }
.to have_enqueued_job(BatchOperationProcessOneJob)
.with(subject, subject.dossiers.first)
.with(subject, subject.dossiers.second)
.with(subject, subject.dossiers.third)
end
end
end