Corrections sur la tâche de migration des PJs (#3902)

Corrections sur la tâche de migration des pièces justificatives
This commit is contained in:
Pierre de La Morinerie 2019-05-29 11:07:54 +02:00 committed by GitHub
commit f1875e9c25
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 89 additions and 14 deletions

View file

@ -79,7 +79,7 @@ class CarrierwaveActiveStorageMigrationService
ActiveStorage::Blob.create(
filename: filename || uploader.filename,
content_type: uploader.content_type,
content_type: content_type,
byte_size: uploader.size,
checksum: checksum(uploader),
created_at: created_at,
@ -87,6 +87,20 @@ class CarrierwaveActiveStorageMigrationService
)
end
def make_empty_blob(uploader, created_at, filename: nil)
content_type = uploader.content_type || 'text/plain'
blob = ActiveStorage::Blob.build_after_upload(
io: StringIO.new('File not found when migrating from CarrierWave.'),
filename: filename || uploader.filename,
content_type: content_type || 'text/plain',
metadata: { virus_scan_result: ActiveStorage::VirusScanner::SAFE }
)
blob.created_at = created_at
blob.save!
blob
end
def checksum(uploader)
hex_to_base64(uploader.file.send(:file).etag)
end

View file

@ -75,12 +75,18 @@ class PieceJustificativeToChampPieceJointeMigrationService
end
def convert_pj_to_champ!(pj, champ)
blob = make_blob(pj)
actual_file_exists = pj.content.file.send(:file)
if actual_file_exists
blob = make_blob(pj)
# Upload the file before creating the attachment to make sure MIME type
# identification doesnt fail.
storage_service.copy_from_carrierwave_to_active_storage!(pj.content.path, blob)
attachment = storage_service.make_attachment(champ, 'piece_justificative_file', blob)
# Upload the file before creating the attachment to make sure MIME type
# identification doesnt fail.
storage_service.copy_from_carrierwave_to_active_storage!(pj.content.path, blob)
attachment = storage_service.make_attachment(champ, 'piece_justificative_file', blob)
else
make_empty_blob(pj)
end
# By reloading, we force ActiveStorage to look at the attachment again, and see
# that one exists now. We do this so that, if we need to roll back and destroy the champ,
@ -112,4 +118,8 @@ class PieceJustificativeToChampPieceJointeMigrationService
def make_blob(pj)
storage_service.make_blob(pj.content, pj.updated_at.iso8601, filename: pj.original_filename)
end
def make_empty_blob(pj)
storage_service.make_empty_blob(pj.content, pj.updated_at.iso8601, filename: pj.original_filename)
end
end

View file

@ -33,12 +33,8 @@ class PiecesJustificativesService
end
def self.types_pj_as_types_de_champ(procedure)
last_champ = procedure.types_de_champ.last
if last_champ.present?
order_place = last_champ.order_place + 1
else
order_place = 0
end
max_order_place = procedure.types_de_champ.pluck(:order_place).compact.max || -1
order_place = max_order_place + 1
types_de_champ = [
TypeDeChamp.new(

View file

@ -1,6 +1,9 @@
require Rails.root.join("lib", "tasks", "task_helper")
namespace :pieces_justificatives do
desc <<~EOD
Migrate the PJ to champs for a single PROCEDURE_ID.
EOD
task migrate_procedure_to_champs: :environment do
procedure_id = ENV['PROCEDURE_ID']
procedure = Procedure.find(procedure_id)
@ -17,6 +20,9 @@ namespace :pieces_justificatives do
progress.finish
end
desc <<~EOD
Migrate the PJ to champs for several procedures ids, from RANGE_START to RANGE_END.
EOD
task migrate_procedures_range_to_champs: :environment do
if ENV['RANGE_START'].nil? || ENV['RANGE_END'].nil?
fail "RANGE_START and RANGE_END must be specified"

View file

@ -8,7 +8,7 @@ describe CarrierwaveActiveStorageMigrationService do
end
describe '.make_blob' do
let(:pj) { create(:piece_justificative, :rib) }
let(:pj) { create(:piece_justificative, :rib, updated_at: Time.zone.local(2019, 01, 01, 12, 00)) }
let(:identify) { false }
before do
@ -17,6 +17,8 @@ describe CarrierwaveActiveStorageMigrationService do
subject(:blob) { service.make_blob(pj.content, pj.updated_at.iso8601, filename: pj.original_filename, identify: identify) }
it { expect(blob.created_at).to eq pj.updated_at }
it 'marks the blob as already scanned by the antivirus' do
expect(blob.metadata[:virus_scan_result]).to eq(ActiveStorage::VirusScanner::SAFE)
end
@ -34,4 +36,37 @@ describe CarrierwaveActiveStorageMigrationService do
end
end
end
describe '.make_empty_blob' do
let(:pj) { create(:piece_justificative, :rib, updated_at: Time.zone.local(2019, 01, 01, 12, 00)) }
before 'set the underlying stored file as missing' do
allow(pj.content.file).to receive(:file).and_return(nil)
end
subject(:blob) { service.make_empty_blob(pj.content, pj.updated_at.iso8601, filename: pj.original_filename) }
it { expect(blob.created_at).to eq pj.updated_at }
it 'marks the blob as already scanned by the antivirus' do
expect(blob.metadata[:virus_scan_result]).to eq(ActiveStorage::VirusScanner::SAFE)
end
it 'sets the blob MIME type from the file' do
expect(blob.identified).to be true
expect(blob.content_type).to eq 'application/pdf'
end
context 'when the file metadata are also missing' do
before do
allow(pj).to receive(:original_filename).and_return(nil)
allow(pj.content).to receive(:content_type).and_return(nil)
end
it 'fallbacks on default values' do
expect(blob.filename).to eq pj.content.filename
expect(blob.content_type).to eq 'text/plain'
end
end
end
end

View file

@ -112,11 +112,25 @@ describe PiecesJustificativesService do
create(
:procedure,
types_de_piece_justificative: tpjs,
types_de_champ: [build(:type_de_champ, order_place: 0)]
types_de_champ: [build(:type_de_champ, order_place: 0), build(:type_de_champ, order_place: 1)]
)
end
it 'generates a sequence of incrementing order_places that continues where the last type de champ left off' do
expect(subject.pluck(:order_place)).to contain_exactly(2, 3)
end
end
context 'with pre-existing champs without an order place' do
let(:procedure) do
create(
:procedure,
types_de_piece_justificative: tpjs,
types_de_champ: [build(:type_de_champ, order_place: 0), build(:type_de_champ, order_place: nil)]
)
end
it 'ignores champs without an order place' do
expect(subject.pluck(:order_place)).to contain_exactly(1, 2)
end
end