Merge pull request #3666 from betagouv/frederic/migrate_pj
Migration des PJ en champs PJ
This commit is contained in:
commit
6517d890b3
7 changed files with 525 additions and 1 deletions
143
app/services/carrierwave_active_storage_migration_service.rb
Normal file
143
app/services/carrierwave_active_storage_migration_service.rb
Normal file
|
@ -0,0 +1,143 @@
|
||||||
|
class CarrierwaveActiveStorageMigrationService
|
||||||
|
def ensure_openstack_copy_possible!(uploader)
|
||||||
|
ensure_active_storage_openstack!
|
||||||
|
ensure_carrierwave_openstack!(uploader)
|
||||||
|
ensure_active_storage_and_carrierwave_credetials_match(uploader)
|
||||||
|
end
|
||||||
|
|
||||||
|
def ensure_active_storage_openstack!
|
||||||
|
# If we manage to get the client, it means that ActiveStorage is on OpenStack
|
||||||
|
openstack_client!
|
||||||
|
end
|
||||||
|
|
||||||
|
def openstack_client!
|
||||||
|
@openstack_client ||= active_storage_openstack_client!
|
||||||
|
end
|
||||||
|
|
||||||
|
def active_storage_openstack_client!
|
||||||
|
service = ActiveStorage::Blob.service
|
||||||
|
|
||||||
|
if !defined?(ActiveStorage::Service::OpenStackService) ||
|
||||||
|
!service.is_a?(ActiveStorage::Service::OpenStackService)
|
||||||
|
raise StandardError, 'ActiveStorage must be backed by OpenStack'
|
||||||
|
end
|
||||||
|
|
||||||
|
service.client
|
||||||
|
end
|
||||||
|
|
||||||
|
def ensure_carrierwave_openstack!(uploader)
|
||||||
|
storage = fog_client!(uploader)
|
||||||
|
|
||||||
|
if !defined?(Fog::OpenStack::Storage::Real) ||
|
||||||
|
!storage.is_a?(Fog::OpenStack::Storage::Real)
|
||||||
|
raise StandardError, 'Carrierwave must be backed by OpenStack'
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def fog_client!(uploader)
|
||||||
|
storage = uploader.new.send(:storage)
|
||||||
|
|
||||||
|
if !defined?(CarrierWave::Storage::Fog) ||
|
||||||
|
!storage.is_a?(CarrierWave::Storage::Fog)
|
||||||
|
raise StandardError, 'Carrierwave must be backed by a Fog provider'
|
||||||
|
end
|
||||||
|
|
||||||
|
storage.connection
|
||||||
|
end
|
||||||
|
|
||||||
|
# OpenStack Swift's COPY object command works across different buckets, but they still need
|
||||||
|
# to be on the same object store. This method tries to ensure that Carrierwave and ActiveStorage
|
||||||
|
# are indeed pointing to the same Swift store.
|
||||||
|
def ensure_active_storage_and_carrierwave_credetials_match(uploader)
|
||||||
|
auth_keys = [
|
||||||
|
:openstack_tenant,
|
||||||
|
:openstack_api_key,
|
||||||
|
:openstack_username,
|
||||||
|
:openstack_region,
|
||||||
|
:openstack_management_url
|
||||||
|
]
|
||||||
|
|
||||||
|
active_storage_creds = openstack_client!.credentials.slice(*auth_keys)
|
||||||
|
carrierwave_creds = fog_client!(uploader).credentials.slice(*auth_keys)
|
||||||
|
|
||||||
|
if active_storage_creds != carrierwave_creds
|
||||||
|
raise StandardError, "Active Storage and Carrierwave credentials must match"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# If identify is true, force ActiveStorage to examine the beginning of the file
|
||||||
|
# to determine its MIME type. This identification does not happen immediately,
|
||||||
|
# but when the first attachment that references this blob is created.
|
||||||
|
def make_blob(uploader, created_at, filename: nil, identify: false)
|
||||||
|
content_type = uploader.content_type
|
||||||
|
|
||||||
|
ActiveStorage::Blob.create(
|
||||||
|
filename: filename || uploader.filename,
|
||||||
|
content_type: uploader.content_type,
|
||||||
|
identified: content_type.present? && !identify,
|
||||||
|
byte_size: uploader.size,
|
||||||
|
checksum: checksum(uploader),
|
||||||
|
created_at: created_at
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def checksum(uploader)
|
||||||
|
hex_to_base64(uploader.file.send(:file).etag)
|
||||||
|
end
|
||||||
|
|
||||||
|
def hex_to_base64(hexdigest)
|
||||||
|
[[hexdigest].pack("H*")].pack("m0")
|
||||||
|
end
|
||||||
|
|
||||||
|
def copy_from_carrierwave_to_active_storage!(source_name, blob)
|
||||||
|
openstack_client!.copy_object(
|
||||||
|
carrierwave_container_name,
|
||||||
|
source_name,
|
||||||
|
active_storage_container_name,
|
||||||
|
blob.key
|
||||||
|
)
|
||||||
|
|
||||||
|
fix_content_type(blob)
|
||||||
|
end
|
||||||
|
|
||||||
|
def carrierwave_container_name
|
||||||
|
Rails.application.secrets.fog[:directory]
|
||||||
|
end
|
||||||
|
|
||||||
|
def active_storage_container_name
|
||||||
|
ENV['FOG_ACTIVESTORAGE_DIRECTORY']
|
||||||
|
end
|
||||||
|
|
||||||
|
def delete_from_active_storage!(blob)
|
||||||
|
openstack_client!.delete_object(
|
||||||
|
active_storage_container_name,
|
||||||
|
blob.key
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Before calling this method, you must make sure the file has been uploaded for the blob.
|
||||||
|
# Otherwise, this method might fail if it needs to read the beginning of the file to
|
||||||
|
# update the blob’s MIME type.
|
||||||
|
def make_attachment(model, attachment_name, blob)
|
||||||
|
attachment = ActiveStorage::Attachment.create(
|
||||||
|
name: attachment_name,
|
||||||
|
record_type: model.class.base_class.name,
|
||||||
|
record_id: model.id,
|
||||||
|
blob: blob,
|
||||||
|
created_at: model.updated_at.iso8601
|
||||||
|
)
|
||||||
|
|
||||||
|
# Making the attachment may have triggerred MIME type auto detection on the blob,
|
||||||
|
# so we make sure to sync that potentially new MIME type to the object in OpenStack
|
||||||
|
fix_content_type(blob)
|
||||||
|
|
||||||
|
attachment
|
||||||
|
end
|
||||||
|
|
||||||
|
def fix_content_type(blob)
|
||||||
|
# In OpenStack, ActiveStorage cannot inject the MIME type on the fly during direct
|
||||||
|
# download. Instead, the MIME type needs to be stored statically on the file object
|
||||||
|
# in OpenStack. This is what this call does.
|
||||||
|
blob.service.change_content_type(blob.key, blob.content_type)
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,104 @@
|
||||||
|
class PieceJustificativeToChampPieceJointeMigrationService
|
||||||
|
def initialize(**params)
|
||||||
|
params.each do |key, value|
|
||||||
|
instance_variable_set("@#{key}", value)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def ensure_correct_storage_configuration!
|
||||||
|
storage_service.ensure_openstack_copy_possible!(PieceJustificativeUploader)
|
||||||
|
end
|
||||||
|
|
||||||
|
def convert_procedure_pjs_to_champ_pjs(procedure)
|
||||||
|
types_de_champ_pj = PiecesJustificativesService.types_pj_as_types_de_champ(procedure)
|
||||||
|
populate_champs_pjs!(procedure, types_de_champ_pj)
|
||||||
|
|
||||||
|
# Only destroy the old types PJ once everything has been safely migrated to
|
||||||
|
# champs PJs. Destroying the types PJ will cascade and destroy the PJs,
|
||||||
|
# and delete the linked objects from remote storage. This means that no other
|
||||||
|
# cleanup action is required.
|
||||||
|
procedure.types_de_piece_justificative.destroy_all
|
||||||
|
end
|
||||||
|
|
||||||
|
def storage_service
|
||||||
|
@storage_service ||= CarrierwaveActiveStorageMigrationService.new
|
||||||
|
end
|
||||||
|
|
||||||
|
def populate_champs_pjs!(procedure, types_de_champ_pj)
|
||||||
|
procedure.types_de_champ += types_de_champ_pj
|
||||||
|
|
||||||
|
# Unscope to make sure all dossiers are migrated, even the soft-deleted ones
|
||||||
|
procedure.dossiers.unscope(where: :hidden_at).find_each do |dossier|
|
||||||
|
champs_pj = types_de_champ_pj.map(&:build_champ)
|
||||||
|
dossier.champs += champs_pj
|
||||||
|
|
||||||
|
champs_pj.each do |champ|
|
||||||
|
type_pj_id = champ.type_de_champ.old_pj&.fetch('stable_id', nil)
|
||||||
|
pj = dossier.retrieve_last_piece_justificative_by_type(type_pj_id)
|
||||||
|
|
||||||
|
if pj.present?
|
||||||
|
champ.update(
|
||||||
|
updated_at: pj.updated_at,
|
||||||
|
created_at: pj.created_at
|
||||||
|
)
|
||||||
|
|
||||||
|
convert_pj_to_champ!(pj, champ)
|
||||||
|
else
|
||||||
|
champ.update(
|
||||||
|
updated_at: dossier.updated_at,
|
||||||
|
created_at: dossier.created_at
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
rescue
|
||||||
|
# If anything goes wrong, we roll back the migration by destroying the newly created
|
||||||
|
# types de champ, champs blobs and attachments.
|
||||||
|
types_de_champ_pj.each do |type_champ|
|
||||||
|
type_champ.champ.each { |c| c.piece_justificative_file.purge }
|
||||||
|
type_champ.destroy
|
||||||
|
end
|
||||||
|
|
||||||
|
# Reraise the exception to abort the migration.
|
||||||
|
raise
|
||||||
|
end
|
||||||
|
|
||||||
|
def convert_pj_to_champ!(pj, champ)
|
||||||
|
blob = make_blob(pj)
|
||||||
|
|
||||||
|
# Upload the file before creating the attachment to make sure MIME type
|
||||||
|
# identification doesn’t fail.
|
||||||
|
storage_service.copy_from_carrierwave_to_active_storage!(pj.content.path, blob)
|
||||||
|
attachment = storage_service.make_attachment(champ, 'piece_justificative_file', blob)
|
||||||
|
|
||||||
|
# By reloading, we force ActiveStorage to look at the attachment again, and see
|
||||||
|
# that one exists now. We do this so that, if we need to roll back and destroy the champ,
|
||||||
|
# the blob, the attachment and the actual file on OpenStack also get deleted.
|
||||||
|
champ.reload
|
||||||
|
rescue
|
||||||
|
# Destroy partially attached object that the more general rescue in `populate_champs_pjs!`
|
||||||
|
# might not be able to handle.
|
||||||
|
|
||||||
|
if blob&.key.present?
|
||||||
|
begin
|
||||||
|
storage_service.delete_from_active_storage!(blob)
|
||||||
|
rescue => e
|
||||||
|
# The cleanup attempt failed, perhaps because the object had not been
|
||||||
|
# successfully copied to the Active Storage bucket yet.
|
||||||
|
# Continue trying to clean up the rest anyway.
|
||||||
|
pp e
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
blob&.destroy
|
||||||
|
attachment&.destroy
|
||||||
|
champ.reload
|
||||||
|
|
||||||
|
# Reraise the exception to abort the migration.
|
||||||
|
raise
|
||||||
|
end
|
||||||
|
|
||||||
|
def make_blob(pj)
|
||||||
|
storage_service.make_blob(pj.content, pj.updated_at.iso8601, filename: pj.original_filename)
|
||||||
|
end
|
||||||
|
end
|
|
@ -33,7 +33,13 @@ class PiecesJustificativesService
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.types_pj_as_types_de_champ(procedure)
|
def self.types_pj_as_types_de_champ(procedure)
|
||||||
order_place = procedure.types_de_champ.last&.order_place || 0
|
last_champ = procedure.types_de_champ.last
|
||||||
|
if last_champ.present?
|
||||||
|
order_place = last_champ.order_place + 1
|
||||||
|
else
|
||||||
|
order_place = 0
|
||||||
|
end
|
||||||
|
|
||||||
types_de_champ = [
|
types_de_champ = [
|
||||||
TypeDeChamp.new(
|
TypeDeChamp.new(
|
||||||
libelle: "Pièces jointes",
|
libelle: "Pièces jointes",
|
||||||
|
|
8
lib/tasks/2019_03_13_migrate_pjs_to_champs.rake
Normal file
8
lib/tasks/2019_03_13_migrate_pjs_to_champs.rake
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
namespace :'2019_03_13_migrate_pjs_to_champs' do
|
||||||
|
task run: :environment do
|
||||||
|
procedure_id = ENV['PROCEDURE_ID']
|
||||||
|
service = PieceJustificativeToChampPieceJointeMigrationService.new
|
||||||
|
service.ensure_correct_storage_configuration!
|
||||||
|
service.convert_procedure_pjs_to_champ_pjs(Procedure.find(procedure_id))
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,9 @@
|
||||||
|
require 'spec_helper'
|
||||||
|
|
||||||
|
describe CarrierwaveActiveStorageMigrationService do
|
||||||
|
describe '#hex_to_base64' do
|
||||||
|
let(:service) { CarrierwaveActiveStorageMigrationService.new }
|
||||||
|
|
||||||
|
it { expect(service.hex_to_base64('deadbeef')).to eq('3q2+7w==') }
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,222 @@
|
||||||
|
require 'spec_helper'
|
||||||
|
|
||||||
|
describe PieceJustificativeToChampPieceJointeMigrationService do
|
||||||
|
let(:service) { PieceJustificativeToChampPieceJointeMigrationService.new(storage_service: storage_service) }
|
||||||
|
let(:storage_service) { CarrierwaveActiveStorageMigrationService.new }
|
||||||
|
let(:pj_uploader) { class_double(PieceJustificativeUploader) }
|
||||||
|
let(:pj_service) { class_double(PiecesJustificativesService) }
|
||||||
|
|
||||||
|
let(:procedure) { create(:procedure, types_de_piece_justificative: types_pj) }
|
||||||
|
let(:types_pj) { [create(:type_de_piece_justificative)] }
|
||||||
|
|
||||||
|
let!(:dossier) do
|
||||||
|
create(
|
||||||
|
:dossier,
|
||||||
|
procedure: procedure,
|
||||||
|
pieces_justificatives: pjs
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
let(:pjs) { [] }
|
||||||
|
|
||||||
|
def make_pjs
|
||||||
|
types_pj.map do |tpj|
|
||||||
|
create(:piece_justificative, :contrat, type_de_piece_justificative: tpj)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def expect_storage_service_to_convert_object
|
||||||
|
expect(storage_service).to receive(:make_blob)
|
||||||
|
expect(storage_service).to receive(:copy_from_carrierwave_to_active_storage!)
|
||||||
|
expect(storage_service).to receive(:make_attachment)
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when conversion succeeds' do
|
||||||
|
context 'for the procedure' do
|
||||||
|
it 'types de champ are created for the "pièces jointes" header and for each PJ' do
|
||||||
|
expect { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
||||||
|
.to change { procedure.types_de_champ.count }
|
||||||
|
.by(types_pj.count + 1)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'the old types de pj are removed' do
|
||||||
|
expect { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
||||||
|
.to change { procedure.types_de_piece_justificative.count }
|
||||||
|
.to(0)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'no notifications are sent to instructeurs' do
|
||||||
|
context 'when there is a PJ' do
|
||||||
|
let(:pjs) { make_pjs }
|
||||||
|
|
||||||
|
before do
|
||||||
|
# Reload PJ because the resolution of in-database timestamps is
|
||||||
|
# different from the resolution of in-memory timestamps, causing the
|
||||||
|
# tests to fail on fractional time differences.
|
||||||
|
pjs.last.reload
|
||||||
|
|
||||||
|
expect_storage_service_to_convert_object
|
||||||
|
Timecop.travel(1.hour) { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
||||||
|
|
||||||
|
# Reload the dossier to see the newly created champs
|
||||||
|
dossier.reload
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'the champ has the same created_at as the PJ' do
|
||||||
|
expect(dossier.champs.last.created_at).to eq(pjs.last.created_at)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'the champ has the same updated_at as the PJ' do
|
||||||
|
expect(dossier.champs.last.updated_at).to eq(pjs.last.updated_at)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when there is no PJ' do
|
||||||
|
let!(:expected_updated_at) do
|
||||||
|
# Reload dossier because the resolution of in-database timestamps is
|
||||||
|
# different from the resolution of in-memory timestamps, causing the
|
||||||
|
# tests to fail on fractional time differences.
|
||||||
|
dossier.reload
|
||||||
|
dossier.updated_at
|
||||||
|
end
|
||||||
|
|
||||||
|
before do
|
||||||
|
Timecop.travel(1.hour) { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
||||||
|
|
||||||
|
# Reload the dossier to see the newly created champs
|
||||||
|
dossier.reload
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'the champ has the same created_at as the dossier' do
|
||||||
|
expect(dossier.champs.last.created_at).to eq(dossier.created_at)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'the champ has the same updated_at as the dossier' do
|
||||||
|
expect(dossier.champs.last.updated_at).to eq(expected_updated_at)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'for the dossier' do
|
||||||
|
let(:pjs) { make_pjs }
|
||||||
|
|
||||||
|
before { expect_storage_service_to_convert_object }
|
||||||
|
|
||||||
|
it 'champs are created for the "pièces jointes" header and for each PJ' do
|
||||||
|
expect { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
||||||
|
.to change { dossier.champs.count }
|
||||||
|
.by(types_pj.count + 1)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'the old pjs are removed' do
|
||||||
|
expect { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
||||||
|
.to change { dossier.pieces_justificatives.count }
|
||||||
|
.to(0)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when the dossier is soft-deleted it still gets converted' do
|
||||||
|
let(:pjs) { make_pjs }
|
||||||
|
|
||||||
|
let!(:dossier) do
|
||||||
|
create(
|
||||||
|
:dossier,
|
||||||
|
procedure: procedure,
|
||||||
|
pieces_justificatives: pjs,
|
||||||
|
hidden_at: Time.zone.now
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
before { expect_storage_service_to_convert_object }
|
||||||
|
|
||||||
|
it 'champs are created for the "pièces jointes" header and for each PJ' do
|
||||||
|
expect { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
||||||
|
.to change { dossier.champs.count }
|
||||||
|
.by(types_pj.count + 1)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'the old pjs are removed' do
|
||||||
|
expect { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
||||||
|
.to change { dossier.pieces_justificatives.count }
|
||||||
|
.to(0)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'when there are several pjs for one type' do
|
||||||
|
let(:pjs) { make_pjs + make_pjs }
|
||||||
|
|
||||||
|
it 'only converts the most recent PJ for each type PJ' do
|
||||||
|
expect(storage_service).to receive(:make_blob).exactly(types_pj.count)
|
||||||
|
expect(storage_service).to receive(:copy_from_carrierwave_to_active_storage!).exactly(types_pj.count)
|
||||||
|
expect(storage_service).to receive(:make_attachment).exactly(types_pj.count)
|
||||||
|
|
||||||
|
service.convert_procedure_pjs_to_champ_pjs(procedure)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'cleanup when conversion fails' do
|
||||||
|
let(:pjs) { make_pjs }
|
||||||
|
|
||||||
|
let!(:failing_dossier) do
|
||||||
|
create(
|
||||||
|
:dossier,
|
||||||
|
procedure: procedure,
|
||||||
|
pieces_justificatives: make_pjs
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
before do
|
||||||
|
allow(storage_service).to receive(:checksum).and_return('cafe')
|
||||||
|
allow(storage_service).to receive(:fix_content_type)
|
||||||
|
|
||||||
|
expect(storage_service).to receive(:copy_from_carrierwave_to_active_storage!)
|
||||||
|
expect(storage_service).to receive(:copy_from_carrierwave_to_active_storage!)
|
||||||
|
.and_raise('LOL no!')
|
||||||
|
|
||||||
|
expect(storage_service).to receive(:delete_from_active_storage!)
|
||||||
|
end
|
||||||
|
|
||||||
|
def try_convert(procedure)
|
||||||
|
service.convert_procedure_pjs_to_champ_pjs(procedure)
|
||||||
|
rescue => e
|
||||||
|
e
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'passes on the exception' do
|
||||||
|
expect { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
||||||
|
.to raise_error('LOL no!')
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'does not create champs' do
|
||||||
|
expect { try_convert(procedure) }
|
||||||
|
.not_to change { dossier.champs.count }
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'does not remove any old pjs' do
|
||||||
|
expect { try_convert(procedure) }
|
||||||
|
.not_to change { dossier.pieces_justificatives.count }
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'does not creates types de champ' do
|
||||||
|
expect { try_convert(procedure) }
|
||||||
|
.not_to change { procedure.types_de_champ.count }
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'does not remove old types de pj' do
|
||||||
|
expect { try_convert(procedure) }
|
||||||
|
.not_to change { procedure.types_de_piece_justificative.count }
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'does not leave stale blobs behind' do
|
||||||
|
expect { try_convert(procedure) }
|
||||||
|
.not_to change { ActiveStorage::Blob.count }
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'does not leave stale attachments behind' do
|
||||||
|
expect { try_convert(procedure) }
|
||||||
|
.not_to change { ActiveStorage::Attachment.count }
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -89,4 +89,36 @@ describe PiecesJustificativesService do
|
||||||
it { expect(errors).to match([]) }
|
it { expect(errors).to match([]) }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe 'types_pj_as_types_de_champ' do
|
||||||
|
subject { PiecesJustificativesService.types_pj_as_types_de_champ(procedure) }
|
||||||
|
|
||||||
|
it 'generates one header champ, plus one champ per PJ' do
|
||||||
|
expect(subject.pluck(:libelle)).to contain_exactly("Pièces jointes", "not mandatory")
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'remembers the id of the PJ that got converted into a champ' do
|
||||||
|
expect(subject.map(&:old_pj)).to include({ 'stable_id' => tpj_not_mandatory.id })
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'without pre-existing champs' do
|
||||||
|
it 'generates a sequence of order_places incrementing from zero' do
|
||||||
|
expect(subject.pluck(:order_place)).to contain_exactly(0, 1)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context 'with pre-existing champs' do
|
||||||
|
let(:procedure) do
|
||||||
|
create(
|
||||||
|
:procedure,
|
||||||
|
types_de_piece_justificative: tpjs,
|
||||||
|
types_de_champ: [build(:type_de_champ, order_place: 0)]
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'generates a sequence of incrementing order_places that continues where the last type de champ left off' do
|
||||||
|
expect(subject.pluck(:order_place)).to contain_exactly(1, 2)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue