Merge pull request #7121 from betagouv/US/retry-once-when-call-to-script-fails

feat(ArchiveUploader.upload_with_chunking_wrapper): expect to retry call to wrapper once
This commit is contained in:
mfo 2022-04-05 15:38:30 +02:00 committed by GitHub
commit 4baa9ee43c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 37 additions and 6 deletions

View file

@ -48,11 +48,11 @@ class ArchiveUploader
params = blob_default_params(filepath).merge(byte_size: File.size(filepath), params = blob_default_params(filepath).merge(byte_size: File.size(filepath),
checksum: Digest::SHA256.file(filepath).hexdigest) checksum: Digest::SHA256.file(filepath).hexdigest)
blob = ActiveStorage::Blob.create_before_direct_upload!(**params) blob = ActiveStorage::Blob.create_before_direct_upload!(**params)
if syscall_to_custom_uploader(blob) if retryable_syscall_to_custom_uploader(blob)
return blob return blob
else else
blob.purge blob.purge
fail "custom archive attachment failed, should it be retried ?" fail "custom archive attachment failed twice, retry later"
end end
end end
@ -73,6 +73,18 @@ class ArchiveUploader
@namespaced_object_key ||= "archives/#{Date.today.strftime("%Y-%m-%d")}/#{SecureRandom.uuid}" @namespaced_object_key ||= "archives/#{Date.today.strftime("%Y-%m-%d")}/#{SecureRandom.uuid}"
end end
def retryable_syscall_to_custom_uploader(blob)
limit_to_retry = 1
begin
syscall_to_custom_uploader(blob)
rescue
if limit_to_retry > 0
limit_to_retry = limit_to_retry - 1
retry
end
end
end
def syscall_to_custom_uploader(blob) def syscall_to_custom_uploader(blob)
system(ENV.fetch('ACTIVE_STORAGE_BIG_FILE_UPLOADER_WITH_ENCRYPTION_PATH').to_s, filepath, blob.key, exception: true) system(ENV.fetch('ACTIVE_STORAGE_BIG_FILE_UPLOADER_WITH_ENCRYPTION_PATH').to_s, filepath, blob.key, exception: true)
end end

View file

@ -57,14 +57,33 @@ describe ProcedureArchiveService do
let(:fake_blob_bytesize) { 100.gigabytes } let(:fake_blob_bytesize) { 100.gigabytes }
before do before do
expect(uploader).to receive(:syscall_to_custom_uploader).and_return(true)
expect(File).to receive(:size).with(file.path).and_return(fake_blob_bytesize) expect(File).to receive(:size).with(file.path).and_return(fake_blob_bytesize)
expect(Digest::SHA256).to receive(:file).with(file.path).and_return(double(hexdigest: fake_blob_checksum.hexdigest)) expect(Digest::SHA256).to receive(:file).with(file.path).and_return(double(hexdigest: fake_blob_checksum.hexdigest))
end end
context 'when it just works' do
it 'creates a blob' do it 'creates a blob' do
expect(uploader).to receive(:syscall_to_custom_uploader).and_return(true)
expect { uploader.send(:upload_with_chunking_wrapper) } expect { uploader.send(:upload_with_chunking_wrapper) }
.to change { ActiveStorage::Blob.where(checksum: fake_blob_checksum.hexdigest, byte_size: fake_blob_bytesize).count }.by(1) .to change { ActiveStorage::Blob.where(checksum: fake_blob_checksum.hexdigest, byte_size: fake_blob_bytesize).count }.by(1)
end end
end end
context 'when it fails once (DS proxy a bit flacky with archive ±>20Go, fails once, accept other call' do
it 'retries' do
expect(uploader).to receive(:syscall_to_custom_uploader).with(anything).once.and_raise(StandardError, "BOOM")
expect(uploader).to receive(:syscall_to_custom_uploader).with(anything).once.and_return(true)
expect { uploader.send(:upload_with_chunking_wrapper) }
.to change { ActiveStorage::Blob.where(checksum: fake_blob_checksum.hexdigest, byte_size: fake_blob_bytesize).count }.by(1)
end
end
context 'when it fails twice' do
it 'does not retry more than once' do
expect(uploader).to receive(:syscall_to_custom_uploader).with(anything).twice.and_raise(StandardError, "BOOM")
expect { uploader.send(:upload_with_chunking_wrapper) }
.to raise_error(RuntimeError, "custom archive attachment failed twice, retry later")
end
end
end
end end