tasks: remove task to migrate old pieces justificatives
This commit is contained in:
parent
31ebe41025
commit
e975fe4ade
8 changed files with 0 additions and 919 deletions
|
@ -165,18 +165,6 @@ class Dossier < ApplicationRecord
|
||||||
self.private_search_terms = champs_private.flat_map(&:search_terms).compact.join(' ')
|
self.private_search_terms = champs_private.flat_map(&:search_terms).compact.join(' ')
|
||||||
end
|
end
|
||||||
|
|
||||||
def was_piece_justificative_uploaded_for_type_id?(type_id)
|
|
||||||
pieces_justificatives.where(type_de_piece_justificative_id: type_id).count > 0
|
|
||||||
end
|
|
||||||
|
|
||||||
def retrieve_last_piece_justificative_by_type(type)
|
|
||||||
pieces_justificatives.where(type_de_piece_justificative_id: type).last
|
|
||||||
end
|
|
||||||
|
|
||||||
def retrieve_all_piece_justificative_by_type(type)
|
|
||||||
pieces_justificatives.where(type_de_piece_justificative_id: type).order(created_at: :DESC)
|
|
||||||
end
|
|
||||||
|
|
||||||
def build_default_champs
|
def build_default_champs
|
||||||
procedure.build_champs.each do |champ|
|
procedure.build_champs.each do |champ|
|
||||||
champs << champ
|
champs << champ
|
||||||
|
|
|
@ -1,173 +0,0 @@
|
||||||
class CarrierwaveActiveStorageMigrationService
|
|
||||||
def ensure_openstack_copy_possible!(uploader)
|
|
||||||
ensure_active_storage_openstack!
|
|
||||||
ensure_carrierwave_openstack!(uploader)
|
|
||||||
ensure_active_storage_and_carrierwave_credetials_match(uploader)
|
|
||||||
end
|
|
||||||
|
|
||||||
def ensure_active_storage_openstack!
|
|
||||||
# If we manage to get the client, it means that ActiveStorage is on OpenStack
|
|
||||||
openstack_client!
|
|
||||||
end
|
|
||||||
|
|
||||||
def openstack_client!
|
|
||||||
@openstack_client ||= active_storage_openstack_client!
|
|
||||||
end
|
|
||||||
|
|
||||||
def active_storage_openstack_client!
|
|
||||||
service = ActiveStorage::Blob.service
|
|
||||||
|
|
||||||
if defined?(ActiveStorage::Service::DsProxyService) &&
|
|
||||||
service.is_a?(ActiveStorage::Service::DsProxyService)
|
|
||||||
service = service.wrapped
|
|
||||||
end
|
|
||||||
|
|
||||||
if !defined?(ActiveStorage::Service::OpenStackService) ||
|
|
||||||
!service.is_a?(ActiveStorage::Service::OpenStackService)
|
|
||||||
raise StandardError, 'ActiveStorage must be backed by OpenStack'
|
|
||||||
end
|
|
||||||
|
|
||||||
service.client
|
|
||||||
end
|
|
||||||
|
|
||||||
def ensure_carrierwave_openstack!(uploader)
|
|
||||||
storage = fog_client!(uploader)
|
|
||||||
|
|
||||||
if !defined?(Fog::OpenStack::Storage::Real) ||
|
|
||||||
!storage.is_a?(Fog::OpenStack::Storage::Real)
|
|
||||||
raise StandardError, 'Carrierwave must be backed by OpenStack'
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def fog_client!(uploader)
|
|
||||||
storage = uploader.new.send(:storage)
|
|
||||||
|
|
||||||
if !defined?(CarrierWave::Storage::Fog) ||
|
|
||||||
!storage.is_a?(CarrierWave::Storage::Fog)
|
|
||||||
raise StandardError, 'Carrierwave must be backed by a Fog provider'
|
|
||||||
end
|
|
||||||
|
|
||||||
storage.connection
|
|
||||||
end
|
|
||||||
|
|
||||||
# OpenStack Swift's COPY object command works across different buckets, but they still need
|
|
||||||
# to be on the same object store. This method tries to ensure that Carrierwave and ActiveStorage
|
|
||||||
# are indeed pointing to the same Swift store.
|
|
||||||
def ensure_active_storage_and_carrierwave_credetials_match(uploader)
|
|
||||||
auth_keys = [
|
|
||||||
:openstack_tenant,
|
|
||||||
:openstack_api_key,
|
|
||||||
:openstack_username,
|
|
||||||
:openstack_region,
|
|
||||||
:openstack_management_url
|
|
||||||
]
|
|
||||||
|
|
||||||
active_storage_creds = openstack_client!.credentials.slice(*auth_keys)
|
|
||||||
carrierwave_creds = fog_client!(uploader).credentials.slice(*auth_keys)
|
|
||||||
|
|
||||||
if active_storage_creds != carrierwave_creds
|
|
||||||
raise StandardError, "Active Storage and Carrierwave credentials must match"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# If identify is true, force ActiveStorage to examine the beginning of the file
|
|
||||||
# to determine its MIME type. This identification does not happen immediately,
|
|
||||||
# but when the first attachment that references this blob is created.
|
|
||||||
def make_blob(uploader, created_at, filename: nil, identify: false)
|
|
||||||
content_type = uploader.content_type
|
|
||||||
identified = content_type.present? && !identify
|
|
||||||
|
|
||||||
ActiveStorage::Blob.create(
|
|
||||||
filename: filename || uploader.filename,
|
|
||||||
content_type: content_type,
|
|
||||||
byte_size: uploader.size,
|
|
||||||
checksum: checksum(uploader),
|
|
||||||
created_at: created_at,
|
|
||||||
metadata: { identified: identified, virus_scan_result: ActiveStorage::VirusScanner::SAFE }
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
def make_empty_blob(uploader, created_at, filename: nil)
|
|
||||||
content_type = uploader.content_type || 'text/plain'
|
|
||||||
|
|
||||||
blob = ActiveStorage::Blob.build_after_upload(
|
|
||||||
io: StringIO.new('File not found when migrating from CarrierWave.'),
|
|
||||||
filename: filename || uploader.filename,
|
|
||||||
content_type: content_type || 'text/plain',
|
|
||||||
metadata: { virus_scan_result: ActiveStorage::VirusScanner::SAFE }
|
|
||||||
)
|
|
||||||
blob.created_at = created_at
|
|
||||||
blob.save!
|
|
||||||
blob
|
|
||||||
end
|
|
||||||
|
|
||||||
def checksum(uploader)
|
|
||||||
hex_to_base64(uploader.file.send(:file).etag)
|
|
||||||
end
|
|
||||||
|
|
||||||
def hex_to_base64(hexdigest)
|
|
||||||
[[hexdigest].pack("H*")].pack("m0")
|
|
||||||
end
|
|
||||||
|
|
||||||
def copy_from_carrierwave_to_active_storage!(source_name, blob)
|
|
||||||
openstack_client!.copy_object(
|
|
||||||
carrierwave_container_name,
|
|
||||||
source_name,
|
|
||||||
active_storage_container_name,
|
|
||||||
blob.key
|
|
||||||
)
|
|
||||||
|
|
||||||
fix_content_type(blob)
|
|
||||||
end
|
|
||||||
|
|
||||||
def carrierwave_container_name
|
|
||||||
Rails.application.secrets.fog[:directory]
|
|
||||||
end
|
|
||||||
|
|
||||||
def active_storage_container_name
|
|
||||||
ENV['FOG_ACTIVESTORAGE_DIRECTORY']
|
|
||||||
end
|
|
||||||
|
|
||||||
def delete_from_active_storage!(blob)
|
|
||||||
openstack_client!.delete_object(
|
|
||||||
active_storage_container_name,
|
|
||||||
blob.key
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
# Before calling this method, you must make sure the file has been uploaded for the blob.
|
|
||||||
# Otherwise, this method might fail if it needs to read the beginning of the file to
|
|
||||||
# update the blob’s MIME type.
|
|
||||||
def make_attachment(model, attachment_name, blob)
|
|
||||||
attachment = ActiveStorage::Attachment.create(
|
|
||||||
name: attachment_name,
|
|
||||||
record_type: model.class.base_class.name,
|
|
||||||
record_id: model.id,
|
|
||||||
blob: blob,
|
|
||||||
created_at: model.updated_at.iso8601
|
|
||||||
)
|
|
||||||
|
|
||||||
# Making the attachment may have triggerred MIME type auto detection on the blob,
|
|
||||||
# so we make sure to sync that potentially new MIME type to the object in OpenStack
|
|
||||||
fix_content_type(blob)
|
|
||||||
|
|
||||||
attachment
|
|
||||||
end
|
|
||||||
|
|
||||||
def fix_content_type(blob, retry_delay: 5)
|
|
||||||
retries ||= 0
|
|
||||||
# In OpenStack, ActiveStorage cannot inject the MIME type on the fly during direct
|
|
||||||
# download. Instead, the MIME type needs to be stored statically on the file object
|
|
||||||
# in OpenStack. This is what this call does.
|
|
||||||
blob.service.change_content_type(blob.key, blob.content_type)
|
|
||||||
rescue
|
|
||||||
# When we quickly create a new attachment, and then change its content type,
|
|
||||||
# the Object Storage may not be synchronized yet. It this cas, it will return a
|
|
||||||
# "409 Conflict" error.
|
|
||||||
#
|
|
||||||
# Wait for a while, then try again twice (before giving up).
|
|
||||||
sleep(retry_delay)
|
|
||||||
retry if (retries += 1) < 3
|
|
||||||
raise
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,187 +0,0 @@
|
||||||
require Rails.root.join("lib", "tasks", "task_helper")
|
|
||||||
|
|
||||||
class PieceJustificativeToChampPieceJointeMigrationService
|
|
||||||
def initialize(**params)
|
|
||||||
params.each do |key, value|
|
|
||||||
instance_variable_set("@#{key}", value)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def ensure_correct_storage_configuration!
|
|
||||||
storage_service.ensure_openstack_copy_possible!(PieceJustificativeUploader)
|
|
||||||
end
|
|
||||||
|
|
||||||
def procedures_with_pjs_in_range(ids_range)
|
|
||||||
procedures_with_pj = Procedure.unscope(where: :hidden_at).joins(:types_de_piece_justificative).distinct
|
|
||||||
procedures_with_pj.where(id: ids_range)
|
|
||||||
end
|
|
||||||
|
|
||||||
def number_of_champs_to_migrate(procedure)
|
|
||||||
(procedure.types_de_piece_justificative.count + 1) * procedure.dossiers.unscope(where: :hidden_at).count
|
|
||||||
end
|
|
||||||
|
|
||||||
def convert_procedure_pjs_to_champ_pjs(procedure, &progress)
|
|
||||||
types_de_champ_pj = PiecesJustificativesService.types_pj_as_types_de_champ(procedure)
|
|
||||||
populate_champs_pjs!(procedure, types_de_champ_pj, &progress)
|
|
||||||
|
|
||||||
# Only destroy the old types PJ once everything has been safely migrated to
|
|
||||||
# champs PJs.
|
|
||||||
|
|
||||||
# First destroy the individual PJ champs on all dossiers.
|
|
||||||
# It will cascade and destroy the PJs, and delete the linked objects from remote storage.
|
|
||||||
procedure.dossiers.unscope(where: :hidden_at).includes(:champs).find_each do |dossier|
|
|
||||||
destroy_pieces_justificatives(dossier)
|
|
||||||
end
|
|
||||||
|
|
||||||
# Now we can destroy the type de champ themselves,
|
|
||||||
# without cascading the timestamp update on all attached dossiers.
|
|
||||||
procedure.types_de_piece_justificative.destroy_all
|
|
||||||
end
|
|
||||||
|
|
||||||
def storage_service
|
|
||||||
@storage_service ||= CarrierwaveActiveStorageMigrationService.new
|
|
||||||
end
|
|
||||||
|
|
||||||
def populate_champs_pjs!(procedure, types_de_champ_pj, &progress)
|
|
||||||
procedure.types_de_champ += types_de_champ_pj
|
|
||||||
|
|
||||||
# Unscope to make sure all dossiers are migrated, even the soft-deleted ones
|
|
||||||
procedure.dossiers.unscope(where: :hidden_at).includes(:champs).find_each do |dossier|
|
|
||||||
migrate_dossier!(dossier, types_de_champ_pj, &progress)
|
|
||||||
end
|
|
||||||
|
|
||||||
rescue StandardError, SignalException
|
|
||||||
# If anything goes wrong, we roll back the migration by destroying the newly created
|
|
||||||
# types de champ, champs blobs and attachments.
|
|
||||||
rake_puts "Error received. Rolling back migration of procedure #{procedure.id}…"
|
|
||||||
rollback_migration!(types_de_champ_pj)
|
|
||||||
rake_puts "Migration of procedure #{procedure.id} rolled back."
|
|
||||||
|
|
||||||
# Reraise the exception to abort the migration.
|
|
||||||
raise
|
|
||||||
end
|
|
||||||
|
|
||||||
def migrate_dossier!(dossier, types_de_champ_pj)
|
|
||||||
# Add the new pieces justificatives champs to the dossier
|
|
||||||
champs_pj = types_de_champ_pj.map(&:build_champ)
|
|
||||||
preserving_updated_at(dossier) do
|
|
||||||
dossier.champs += champs_pj
|
|
||||||
end
|
|
||||||
|
|
||||||
# Copy the dossier old pieces jointes to the new champs
|
|
||||||
# (even if the champs already existed, so that we ensure a clean state)
|
|
||||||
champs_pj.each do |champ|
|
|
||||||
type_pj_id = champ.type_de_champ.old_pj&.fetch('stable_id', nil)
|
|
||||||
pj = dossier.retrieve_last_piece_justificative_by_type(type_pj_id)
|
|
||||||
|
|
||||||
if pj.present?
|
|
||||||
preserving_updated_at(dossier) do
|
|
||||||
convert_pj_to_champ!(pj, champ)
|
|
||||||
end
|
|
||||||
|
|
||||||
champ.update_columns(
|
|
||||||
updated_at: pj.updated_at,
|
|
||||||
created_at: pj.created_at
|
|
||||||
)
|
|
||||||
else
|
|
||||||
champ.update_columns(
|
|
||||||
created_at: dossier.created_at,
|
|
||||||
# Set an updated_at date that won't cause notifications to appear
|
|
||||||
# on gestionnaires' dashboard.
|
|
||||||
updated_at: dossier.created_at
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
yield if block_given?
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def convert_pj_to_champ!(pj, champ)
|
|
||||||
actual_file_exists = pj.content.file.send(:file)
|
|
||||||
if actual_file_exists
|
|
||||||
blob = make_blob(pj)
|
|
||||||
|
|
||||||
# Upload the file before creating the attachment to make sure MIME type
|
|
||||||
# identification doesn’t fail.
|
|
||||||
storage_service.copy_from_carrierwave_to_active_storage!(pj.content.path, blob)
|
|
||||||
attachment = storage_service.make_attachment(champ, 'piece_justificative_file', blob)
|
|
||||||
|
|
||||||
else
|
|
||||||
make_empty_blob(pj)
|
|
||||||
rake_puts "Notice: attached file for champ #{champ.id} not found. An empty blob has been attached instead."
|
|
||||||
end
|
|
||||||
|
|
||||||
# By reloading, we force ActiveStorage to look at the attachment again, and see
|
|
||||||
# that one exists now. We do this so that, if we need to roll back and destroy the champ,
|
|
||||||
# the blob, the attachment and the actual file on OpenStack also get deleted.
|
|
||||||
champ.reload
|
|
||||||
rescue StandardError, SignalException
|
|
||||||
# Destroy partially attached object that the more general rescue in `populate_champs_pjs!`
|
|
||||||
# might not be able to handle.
|
|
||||||
|
|
||||||
if blob&.key.present?
|
|
||||||
begin
|
|
||||||
storage_service.delete_from_active_storage!(blob)
|
|
||||||
rescue => e
|
|
||||||
# The cleanup attempt failed, perhaps because the object had not been
|
|
||||||
# successfully copied to the Active Storage bucket yet.
|
|
||||||
# Continue trying to clean up the rest anyway.
|
|
||||||
pp e
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
blob&.destroy
|
|
||||||
attachment&.destroy
|
|
||||||
champ.reload
|
|
||||||
|
|
||||||
# Reraise the exception to abort the migration.
|
|
||||||
raise
|
|
||||||
end
|
|
||||||
|
|
||||||
def rollback_migration!(types_de_champ_pj)
|
|
||||||
types_de_champ_pj.each do |type_champ|
|
|
||||||
# First destroy all the individual champs on dossiers
|
|
||||||
type_champ.champ.each do |champ|
|
|
||||||
begin
|
|
||||||
destroy_champ_pj(Dossier.unscope(where: :hidden_at).find(champ.dossier_id), champ)
|
|
||||||
rescue => e
|
|
||||||
rake_puts e
|
|
||||||
rake_puts "Rolling back of champ #{champ.id} failed. Continuing to roll back…"
|
|
||||||
end
|
|
||||||
end
|
|
||||||
# Now we can destroy the type de champ itself,
|
|
||||||
# without cascading the timestamp update on all attached dossiers.
|
|
||||||
type_champ.reload.destroy
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def make_blob(pj)
|
|
||||||
storage_service.make_blob(pj.content, pj.updated_at.iso8601, filename: pj.original_filename)
|
|
||||||
end
|
|
||||||
|
|
||||||
def make_empty_blob(pj)
|
|
||||||
storage_service.make_empty_blob(pj.content, pj.updated_at.iso8601, filename: pj.original_filename)
|
|
||||||
end
|
|
||||||
|
|
||||||
def preserving_updated_at(model)
|
|
||||||
original_modification_date = model.updated_at
|
|
||||||
begin
|
|
||||||
yield
|
|
||||||
ensure
|
|
||||||
model.update_column(:updated_at, original_modification_date)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def destroy_pieces_justificatives(dossier)
|
|
||||||
preserving_updated_at(dossier) do
|
|
||||||
dossier.pieces_justificatives.destroy_all
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def destroy_champ_pj(dossier, champ)
|
|
||||||
preserving_updated_at(dossier) do
|
|
||||||
champ.piece_justificative_file.purge
|
|
||||||
champ.destroy
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,52 +0,0 @@
|
||||||
require Rails.root.join("lib", "tasks", "task_helper")
|
|
||||||
|
|
||||||
namespace :pieces_justificatives do
|
|
||||||
desc <<~EOD
|
|
||||||
Migrate the PJ to champs for a single PROCEDURE_ID.
|
|
||||||
EOD
|
|
||||||
task migrate_procedure_to_champs: :environment do
|
|
||||||
procedure_id = ENV['PROCEDURE_ID']
|
|
||||||
procedure = Procedure.find(procedure_id)
|
|
||||||
|
|
||||||
service = PieceJustificativeToChampPieceJointeMigrationService.new
|
|
||||||
service.ensure_correct_storage_configuration!
|
|
||||||
|
|
||||||
progress = ProgressReport.new(service.number_of_champs_to_migrate(procedure))
|
|
||||||
|
|
||||||
service.convert_procedure_pjs_to_champ_pjs(procedure) do
|
|
||||||
progress.inc
|
|
||||||
end
|
|
||||||
|
|
||||||
progress.finish
|
|
||||||
end
|
|
||||||
|
|
||||||
desc <<~EOD
|
|
||||||
Migrate the PJ to champs for several procedures ids, from RANGE_START to RANGE_END.
|
|
||||||
EOD
|
|
||||||
task migrate_procedures_range_to_champs: :environment do
|
|
||||||
if ENV['RANGE_START'].nil? || ENV['RANGE_END'].nil?
|
|
||||||
fail "RANGE_START and RANGE_END must be specified"
|
|
||||||
end
|
|
||||||
procedures_range = ENV['RANGE_START']..ENV['RANGE_END']
|
|
||||||
|
|
||||||
service = PieceJustificativeToChampPieceJointeMigrationService.new
|
|
||||||
service.ensure_correct_storage_configuration!
|
|
||||||
procedures_to_migrate = service.procedures_with_pjs_in_range(procedures_range)
|
|
||||||
|
|
||||||
total_number_of_champs_to_migrate = procedures_to_migrate
|
|
||||||
.map { |p| service.number_of_champs_to_migrate(p) }
|
|
||||||
.sum
|
|
||||||
progress = ProgressReport.new(total_number_of_champs_to_migrate)
|
|
||||||
|
|
||||||
procedures_to_migrate.find_each do |procedure|
|
|
||||||
rake_puts ''
|
|
||||||
rake_puts "Migrating procedure #{procedure.id}…"
|
|
||||||
|
|
||||||
service.convert_procedure_pjs_to_champ_pjs(procedure) do
|
|
||||||
progress.inc
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
progress.finish
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,51 +0,0 @@
|
||||||
describe 'pieces_justificatives' do
|
|
||||||
describe 'migrate_procedure_to_champs' do
|
|
||||||
let(:rake_task) { Rake::Task['pieces_justificatives:migrate_procedure_to_champs'] }
|
|
||||||
let(:procedure) { create(:procedure, :with_two_type_de_piece_justificative) }
|
|
||||||
|
|
||||||
before do
|
|
||||||
ENV['PROCEDURE_ID'] = procedure.id.to_s
|
|
||||||
|
|
||||||
allow_any_instance_of(PieceJustificativeToChampPieceJointeMigrationService).to receive(:ensure_correct_storage_configuration!)
|
|
||||||
|
|
||||||
rake_task.invoke
|
|
||||||
end
|
|
||||||
|
|
||||||
after { rake_task.reenable }
|
|
||||||
|
|
||||||
it 'migrates the procedure' do
|
|
||||||
expect(procedure.reload.types_de_piece_justificative).to be_empty
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
describe 'migrate_procedures_range_to_champs' do
|
|
||||||
let(:rake_task) { Rake::Task['pieces_justificatives:migrate_procedures_range_to_champs'] }
|
|
||||||
let(:procedure_in_range_1) { create(:procedure, :with_two_type_de_piece_justificative) }
|
|
||||||
let(:procedure_in_range_2) { create(:procedure, :with_two_type_de_piece_justificative) }
|
|
||||||
let(:procedure_out_of_range) { create(:procedure, :with_two_type_de_piece_justificative) }
|
|
||||||
|
|
||||||
before do
|
|
||||||
procedure_in_range_1
|
|
||||||
procedure_in_range_2
|
|
||||||
procedure_out_of_range
|
|
||||||
|
|
||||||
ENV['RANGE_START'] = procedure_in_range_1.id.to_s
|
|
||||||
ENV['RANGE_END'] = procedure_in_range_2.id.to_s
|
|
||||||
|
|
||||||
allow_any_instance_of(PieceJustificativeToChampPieceJointeMigrationService).to receive(:ensure_correct_storage_configuration!)
|
|
||||||
|
|
||||||
rake_task.invoke
|
|
||||||
end
|
|
||||||
|
|
||||||
after { rake_task.reenable }
|
|
||||||
|
|
||||||
it 'migrates procedures in the ids range' do
|
|
||||||
expect(procedure_in_range_1.reload.types_de_piece_justificative).to be_empty
|
|
||||||
expect(procedure_in_range_2.reload.types_de_piece_justificative).to be_empty
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'doesn’t migrate procedures not in the range' do
|
|
||||||
expect(procedure_out_of_range.reload.types_de_piece_justificative).to be_present
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -107,34 +107,6 @@ describe Dossier do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe '#retrieve_last_piece_justificative_by_type', vcr: { cassette_name: 'models_dossier_retrieve_last_piece_justificative_by_type' } do
|
|
||||||
let(:types_de_pj_dossier) { dossier.procedure.types_de_piece_justificative }
|
|
||||||
|
|
||||||
subject { dossier.retrieve_last_piece_justificative_by_type types_de_pj_dossier.first }
|
|
||||||
|
|
||||||
before do
|
|
||||||
create :piece_justificative, :rib, dossier: dossier, type_de_piece_justificative: types_de_pj_dossier.first
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'returns piece justificative with given type' do
|
|
||||||
expect(subject.type).to eq(types_de_pj_dossier.first.id)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
describe '#retrieve_all_piece_justificative_by_type' do
|
|
||||||
let(:types_de_pj_dossier) { dossier.procedure.types_de_piece_justificative }
|
|
||||||
|
|
||||||
subject { dossier.retrieve_all_piece_justificative_by_type types_de_pj_dossier.first }
|
|
||||||
|
|
||||||
before do
|
|
||||||
create :piece_justificative, :rib, dossier: dossier, type_de_piece_justificative: types_de_pj_dossier.first
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'returns a list of the piece justificative' do
|
|
||||||
expect(subject).not_to be_empty
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
describe '#build_default_champs' do
|
describe '#build_default_champs' do
|
||||||
context 'when dossier is linked to a procedure with type_de_champ_public and private' do
|
context 'when dossier is linked to a procedure with type_de_champ_public and private' do
|
||||||
let(:dossier) { create(:dossier, user: user) }
|
let(:dossier) { create(:dossier, user: user) }
|
||||||
|
|
|
@ -1,99 +0,0 @@
|
||||||
require 'spec_helper'
|
|
||||||
|
|
||||||
describe CarrierwaveActiveStorageMigrationService do
|
|
||||||
let(:service) { CarrierwaveActiveStorageMigrationService.new }
|
|
||||||
|
|
||||||
describe '#hex_to_base64' do
|
|
||||||
it { expect(service.hex_to_base64('deadbeef')).to eq('3q2+7w==') }
|
|
||||||
end
|
|
||||||
|
|
||||||
describe '.make_blob' do
|
|
||||||
let(:pj) { create(:piece_justificative, :rib, updated_at: Time.zone.local(2019, 01, 01, 12, 00)) }
|
|
||||||
let(:identify) { false }
|
|
||||||
|
|
||||||
before do
|
|
||||||
allow(service).to receive(:checksum).and_return('cafe')
|
|
||||||
end
|
|
||||||
|
|
||||||
subject(:blob) { service.make_blob(pj.content, pj.updated_at.iso8601, filename: pj.original_filename, identify: identify) }
|
|
||||||
|
|
||||||
it { expect(blob.created_at).to eq pj.updated_at }
|
|
||||||
|
|
||||||
it 'marks the blob as already scanned by the antivirus' do
|
|
||||||
expect(blob.metadata[:virus_scan_result]).to eq(ActiveStorage::VirusScanner::SAFE)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'sets the blob MIME type from the file' do
|
|
||||||
expect(blob.identified).to be true
|
|
||||||
expect(blob.content_type).to eq 'application/pdf'
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when asking for explicit MIME type identification' do
|
|
||||||
let(:identify) { true }
|
|
||||||
|
|
||||||
it 'marks the file as needing MIME type detection' do
|
|
||||||
expect(blob.identified).to be false
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
describe '.make_empty_blob' do
|
|
||||||
let(:pj) { create(:piece_justificative, :rib, updated_at: Time.zone.local(2019, 01, 01, 12, 00)) }
|
|
||||||
|
|
||||||
before 'set the underlying stored file as missing' do
|
|
||||||
allow(pj.content.file).to receive(:file).and_return(nil)
|
|
||||||
end
|
|
||||||
|
|
||||||
subject(:blob) { service.make_empty_blob(pj.content, pj.updated_at.iso8601, filename: pj.original_filename) }
|
|
||||||
|
|
||||||
it { expect(blob.created_at).to eq pj.updated_at }
|
|
||||||
|
|
||||||
it 'marks the blob as already scanned by the antivirus' do
|
|
||||||
expect(blob.metadata[:virus_scan_result]).to eq(ActiveStorage::VirusScanner::SAFE)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'sets the blob MIME type from the file' do
|
|
||||||
expect(blob.identified).to be true
|
|
||||||
expect(blob.content_type).to eq 'application/pdf'
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when the file metadata are also missing' do
|
|
||||||
before do
|
|
||||||
allow(pj).to receive(:original_filename).and_return(nil)
|
|
||||||
allow(pj.content).to receive(:content_type).and_return(nil)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'fallbacks on default values' do
|
|
||||||
expect(blob.filename).to eq pj.content.filename
|
|
||||||
expect(blob.content_type).to eq 'text/plain'
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
describe '.fix_content_type' do
|
|
||||||
let(:pj) { create(:piece_justificative, :rib, updated_at: Time.zone.local(2019, 01, 01, 12, 00)) }
|
|
||||||
let(:blob) { service.make_empty_blob(pj.content, pj.updated_at.iso8601, filename: pj.original_filename) }
|
|
||||||
|
|
||||||
context 'when the request is ok' do
|
|
||||||
it 'succeeds' do
|
|
||||||
expect(blob.service).to receive(:change_content_type).and_return(true)
|
|
||||||
expect { service.fix_content_type(blob) }.not_to raise_error
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when the request fails initially' do
|
|
||||||
it 'retries the request' do
|
|
||||||
expect(blob.service).to receive(:change_content_type).and_raise(StandardError).ordered
|
|
||||||
expect(blob.service).to receive(:change_content_type).and_return(true).ordered
|
|
||||||
expect { service.fix_content_type(blob, retry_delay: 0.01) }.not_to raise_error
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when the request fails too many times' do
|
|
||||||
it 'gives up' do
|
|
||||||
expect(blob.service).to receive(:change_content_type).and_raise(StandardError).thrice
|
|
||||||
expect { service.fix_content_type(blob, retry_delay: 0.01) }.to raise_error(StandardError)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,317 +0,0 @@
|
||||||
require 'spec_helper'
|
|
||||||
|
|
||||||
describe PieceJustificativeToChampPieceJointeMigrationService do
|
|
||||||
let(:service) { PieceJustificativeToChampPieceJointeMigrationService.new(storage_service: storage_service) }
|
|
||||||
let(:storage_service) { CarrierwaveActiveStorageMigrationService.new }
|
|
||||||
let(:pj_uploader) { class_double(PieceJustificativeUploader) }
|
|
||||||
let(:pj_service) { class_double(PiecesJustificativesService) }
|
|
||||||
|
|
||||||
let(:procedure) { create(:procedure, types_de_piece_justificative: types_pj) }
|
|
||||||
let(:types_pj) { [create(:type_de_piece_justificative)] }
|
|
||||||
|
|
||||||
let!(:dossier) { make_dossier }
|
|
||||||
|
|
||||||
let(:pjs) { [] }
|
|
||||||
|
|
||||||
def make_dossier(hidden: false)
|
|
||||||
create(:dossier,
|
|
||||||
procedure: procedure,
|
|
||||||
pieces_justificatives: pjs,
|
|
||||||
hidden_at: hidden ? Time.zone.now : nil)
|
|
||||||
end
|
|
||||||
|
|
||||||
def make_pjs
|
|
||||||
types_pj.map do |tpj|
|
|
||||||
create(:piece_justificative, :contrat, type_de_piece_justificative: tpj)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def timestamps(dossier)
|
|
||||||
# Reload dossier because the resolution of in-database timestamps is
|
|
||||||
# different from the resolution of in-memory timestamps, causing the
|
|
||||||
# tests to fail on fractional time differences.
|
|
||||||
dossier.reload
|
|
||||||
|
|
||||||
{
|
|
||||||
created_at: dossier.created_at,
|
|
||||||
updated_at: dossier.updated_at
|
|
||||||
}
|
|
||||||
end
|
|
||||||
|
|
||||||
def expect_storage_service_to_convert_object
|
|
||||||
expect(storage_service).to receive(:make_blob)
|
|
||||||
expect(storage_service).to receive(:copy_from_carrierwave_to_active_storage!)
|
|
||||||
expect(storage_service).to receive(:make_attachment)
|
|
||||||
end
|
|
||||||
|
|
||||||
describe '.number_of_champs_to_migrate' do
|
|
||||||
let!(:other_dossier) { make_dossier }
|
|
||||||
|
|
||||||
it 'reports the numbers of champs to be migrated' do
|
|
||||||
expect(service.number_of_champs_to_migrate(procedure)).to eq(4)
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when the procedure has hidden dossiers' do
|
|
||||||
let!(:hidden_dossier) { make_dossier(hidden: true) }
|
|
||||||
|
|
||||||
it 'reports the numbers of champs including those of hidden dossiers' do
|
|
||||||
expect(service.number_of_champs_to_migrate(procedure)).to eq(6)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when conversion succeeds' do
|
|
||||||
context 'for the procedure' do
|
|
||||||
it 'types de champ are created for the "pièces jointes" header and for each PJ' do
|
|
||||||
expect { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
|
||||||
.to change { procedure.types_de_champ.count }
|
|
||||||
.by(types_pj.count + 1)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'the old types de pj are removed' do
|
|
||||||
expect { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
|
||||||
.to change { procedure.types_de_piece_justificative.count }
|
|
||||||
.to(0)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'no notifications are sent to instructeurs' do
|
|
||||||
let!(:initial_dossier_timestamps) { timestamps(dossier) }
|
|
||||||
|
|
||||||
context 'when there is a PJ' do
|
|
||||||
let(:pjs) { make_pjs }
|
|
||||||
|
|
||||||
before do
|
|
||||||
# Reload PJ because the resolution of in-database timestamps is
|
|
||||||
# different from the resolution of in-memory timestamps, causing the
|
|
||||||
# tests to fail on fractional time differences.
|
|
||||||
pjs.last.reload
|
|
||||||
|
|
||||||
expect_storage_service_to_convert_object
|
|
||||||
Timecop.travel(1.hour) { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
|
||||||
|
|
||||||
# Reload the dossier to see the newly created champs
|
|
||||||
dossier.reload
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'the champ has the same timestamps as the PJ' do
|
|
||||||
expect(dossier.champs.last.created_at).to eq(pjs.last.created_at)
|
|
||||||
expect(dossier.champs.last.updated_at).to eq(pjs.last.updated_at)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not change the dossier timestamps' do
|
|
||||||
expect(dossier.created_at).to eq(initial_dossier_timestamps[:created_at])
|
|
||||||
expect(dossier.updated_at).to eq(initial_dossier_timestamps[:updated_at])
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when there is no PJ' do
|
|
||||||
before do
|
|
||||||
Timecop.travel(1.hour) { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
|
||||||
|
|
||||||
# Reload the dossier to see the newly created champs
|
|
||||||
dossier.reload
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'the champ doesn’t trigger a notification' do
|
|
||||||
expect(dossier.champs.last.created_at).to eq(initial_dossier_timestamps[:created_at])
|
|
||||||
expect(dossier.champs.last.updated_at).to eq(initial_dossier_timestamps[:created_at])
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not change the dossier timestamps' do
|
|
||||||
expect(dossier.created_at).to eq(initial_dossier_timestamps[:created_at])
|
|
||||||
expect(dossier.updated_at).to eq(initial_dossier_timestamps[:updated_at])
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'for the dossier' do
|
|
||||||
let(:pjs) { make_pjs }
|
|
||||||
|
|
||||||
before { expect_storage_service_to_convert_object }
|
|
||||||
|
|
||||||
it 'champs are created for the "pièces jointes" header and for each PJ' do
|
|
||||||
expect { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
|
||||||
.to change { dossier.champs.count }
|
|
||||||
.by(types_pj.count + 1)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'the old pjs are removed' do
|
|
||||||
expect { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
|
||||||
.to change { dossier.pieces_justificatives.count }
|
|
||||||
.to(0)
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when the procedure has several dossiers' do
|
|
||||||
let!(:other_dossier) { make_dossier }
|
|
||||||
|
|
||||||
it 'sends progress callback for each migrated champ' do
|
|
||||||
number_of_champs_to_migrate = service.number_of_champs_to_migrate(procedure)
|
|
||||||
|
|
||||||
progress_count = 0
|
|
||||||
service.convert_procedure_pjs_to_champ_pjs(procedure) do
|
|
||||||
progress_count += 1
|
|
||||||
end
|
|
||||||
|
|
||||||
expect(progress_count).to eq(number_of_champs_to_migrate)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when the dossier is soft-deleted it still gets converted' do
|
|
||||||
let(:pjs) { make_pjs }
|
|
||||||
let!(:dossier) { make_dossier(hidden: true) }
|
|
||||||
|
|
||||||
before { expect_storage_service_to_convert_object }
|
|
||||||
|
|
||||||
it 'champs are created for the "pièces jointes" header and for each PJ' do
|
|
||||||
expect { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
|
||||||
.to change { dossier.champs.count }
|
|
||||||
.by(types_pj.count + 1)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'the old pjs are removed' do
|
|
||||||
expect { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
|
||||||
.to change { dossier.pieces_justificatives.count }
|
|
||||||
.to(0)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when there are several pjs for one type' do
|
|
||||||
let(:pjs) { make_pjs + make_pjs }
|
|
||||||
|
|
||||||
it 'only converts the most recent PJ for each type PJ' do
|
|
||||||
expect(storage_service).to receive(:make_blob).exactly(types_pj.count)
|
|
||||||
expect(storage_service).to receive(:copy_from_carrierwave_to_active_storage!).exactly(types_pj.count)
|
|
||||||
expect(storage_service).to receive(:make_attachment).exactly(types_pj.count)
|
|
||||||
|
|
||||||
service.convert_procedure_pjs_to_champ_pjs(procedure)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'cleanup when conversion fails' do
|
|
||||||
let(:pjs) { make_pjs }
|
|
||||||
let(:exception) { 'LOL no!' }
|
|
||||||
|
|
||||||
let!(:failing_dossier) do
|
|
||||||
create(
|
|
||||||
:dossier,
|
|
||||||
procedure: procedure,
|
|
||||||
pieces_justificatives: make_pjs
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
let!(:initial_dossier_timestamps) { timestamps(dossier) }
|
|
||||||
let!(:initial_failing_dossier_timestamps) { timestamps(failing_dossier) }
|
|
||||||
|
|
||||||
before do
|
|
||||||
allow(storage_service).to receive(:checksum).and_return('cafe')
|
|
||||||
allow(storage_service).to receive(:fix_content_type)
|
|
||||||
|
|
||||||
expect(storage_service).to receive(:copy_from_carrierwave_to_active_storage!)
|
|
||||||
expect(storage_service).to receive(:copy_from_carrierwave_to_active_storage!)
|
|
||||||
.and_raise(exception)
|
|
||||||
|
|
||||||
expect(storage_service).to receive(:delete_from_active_storage!)
|
|
||||||
end
|
|
||||||
|
|
||||||
def try_convert(procedure)
|
|
||||||
Timecop.travel(1.hour) { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
|
||||||
rescue StandardError, SignalException => e
|
|
||||||
dossier.reload
|
|
||||||
failing_dossier.reload
|
|
||||||
e
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'passes on the exception' do
|
|
||||||
expect { service.convert_procedure_pjs_to_champ_pjs(procedure) }
|
|
||||||
.to raise_error('LOL no!')
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not create champs' do
|
|
||||||
expect { try_convert(procedure) }
|
|
||||||
.not_to change { dossier.champs.count }
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not remove any old pjs' do
|
|
||||||
expect { try_convert(procedure) }
|
|
||||||
.not_to change { dossier.pieces_justificatives.count }
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not creates types de champ' do
|
|
||||||
expect { try_convert(procedure) }
|
|
||||||
.not_to change { procedure.types_de_champ.count }
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not remove old types de pj' do
|
|
||||||
expect { try_convert(procedure) }
|
|
||||||
.not_to change { procedure.types_de_piece_justificative.count }
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not change the dossiers timestamps' do
|
|
||||||
try_convert(procedure)
|
|
||||||
expect(dossier.updated_at).to eq(initial_dossier_timestamps[:updated_at])
|
|
||||||
expect(failing_dossier.updated_at).to eq(initial_failing_dossier_timestamps[:updated_at])
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not leave stale blobs behind' do
|
|
||||||
expect { try_convert(procedure) }
|
|
||||||
.not_to change { ActiveStorage::Blob.count }
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not leave stale attachments behind' do
|
|
||||||
expect { try_convert(procedure) }
|
|
||||||
.not_to change { ActiveStorage::Attachment.count }
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when some dossiers to roll back are hidden' do
|
|
||||||
before do
|
|
||||||
dossier.update_column(:hidden_at, Time.zone.now)
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not create champs' do
|
|
||||||
expect { try_convert(procedure) }
|
|
||||||
.not_to change { dossier.champs.count }
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not change the hidden dossier timestamps' do
|
|
||||||
try_convert(procedure)
|
|
||||||
expect(dossier.updated_at).to eq(initial_dossier_timestamps[:updated_at])
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when receiving a Signal interruption (like Ctrl+C)' do
|
|
||||||
let(:exception) { Interrupt }
|
|
||||||
|
|
||||||
it 'handles the exception as well' do
|
|
||||||
expect { service.convert_procedure_pjs_to_champ_pjs(procedure) }.to raise_error { Interrupt }
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not create champs' do
|
|
||||||
expect { try_convert(procedure) }.not_to change { dossier.champs.count }
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
context 'when rolling back a dossier fails' do
|
|
||||||
before do
|
|
||||||
allow(service).to receive(:destroy_champ_pj)
|
|
||||||
.with(having_attributes(id: dossier.id), anything)
|
|
||||||
.and_raise(StandardError)
|
|
||||||
allow(service).to receive(:destroy_champ_pj)
|
|
||||||
.with(any_args)
|
|
||||||
.and_call_original
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'continues to roll back the other dossiers' do
|
|
||||||
expect { try_convert(procedure) }
|
|
||||||
.not_to change { failing_dossier.champs.count }
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'does not creates types de champ on the procedure' do
|
|
||||||
expect { try_convert(procedure) }
|
|
||||||
.not_to change { procedure.types_de_champ.count }
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
Loading…
Add table
Add a link
Reference in a new issue