Merge pull request #7123 from tchak/feat-archive-with-filters
Export zip files
This commit is contained in:
commit
1dbd8f3e4a
12 changed files with 173 additions and 145 deletions
|
@ -18,8 +18,9 @@ class Export < ApplicationRecord
|
||||||
enum format: {
|
enum format: {
|
||||||
csv: 'csv',
|
csv: 'csv',
|
||||||
ods: 'ods',
|
ods: 'ods',
|
||||||
xlsx: 'xlsx'
|
xlsx: 'xlsx',
|
||||||
}
|
zip: 'zip'
|
||||||
|
}, _prefix: true
|
||||||
|
|
||||||
enum time_span_type: {
|
enum time_span_type: {
|
||||||
everything: 'everything',
|
everything: 'everything',
|
||||||
|
@ -49,11 +50,11 @@ class Export < ApplicationRecord
|
||||||
|
|
||||||
FORMATS_WITH_TIME_SPAN = [:xlsx, :ods, :csv].flat_map do |format|
|
FORMATS_WITH_TIME_SPAN = [:xlsx, :ods, :csv].flat_map do |format|
|
||||||
time_span_types.keys.map do |time_span_type|
|
time_span_types.keys.map do |time_span_type|
|
||||||
{ format: format.to_sym, time_span_type: time_span_type }
|
{ format: format, time_span_type: time_span_type }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
FORMATS = [:xlsx, :ods, :csv].map do |format|
|
FORMATS = [:xlsx, :ods, :csv, :zip].map do |format|
|
||||||
{ format: format.to_sym }
|
{ format: format }
|
||||||
end
|
end
|
||||||
|
|
||||||
def compute_async
|
def compute_async
|
||||||
|
@ -63,13 +64,7 @@ class Export < ApplicationRecord
|
||||||
def compute
|
def compute
|
||||||
load_snapshot!
|
load_snapshot!
|
||||||
|
|
||||||
file.attach(
|
file.attach(blob)
|
||||||
io: io,
|
|
||||||
filename: filename,
|
|
||||||
content_type: content_type,
|
|
||||||
# We generate the exports ourselves, so they are safe
|
|
||||||
metadata: { virus_scan_result: ActiveStorage::VirusScanner::SAFE }
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def since
|
def since
|
||||||
|
@ -92,18 +87,6 @@ class Export < ApplicationRecord
|
||||||
procedure_presentation_id.present?
|
procedure_presentation_id.present?
|
||||||
end
|
end
|
||||||
|
|
||||||
def xlsx?
|
|
||||||
format == self.class.formats.fetch(:xlsx)
|
|
||||||
end
|
|
||||||
|
|
||||||
def ods?
|
|
||||||
format == self.class.formats.fetch(:ods)
|
|
||||||
end
|
|
||||||
|
|
||||||
def csv?
|
|
||||||
format == self.class.formats.fetch(:csv)
|
|
||||||
end
|
|
||||||
|
|
||||||
def self.find_or_create_export(format, groupe_instructeurs, time_span_type: time_span_types.fetch(:everything), statut: statuts.fetch(:tous), procedure_presentation: nil)
|
def self.find_or_create_export(format, groupe_instructeurs, time_span_type: time_span_types.fetch(:everything), statut: statuts.fetch(:tous), procedure_presentation: nil)
|
||||||
create_with(groupe_instructeurs: groupe_instructeurs, procedure_presentation: procedure_presentation, procedure_presentation_snapshot: procedure_presentation&.snapshot)
|
create_with(groupe_instructeurs: groupe_instructeurs, procedure_presentation: procedure_presentation, procedure_presentation_snapshot: procedure_presentation&.snapshot)
|
||||||
.includes(:procedure_presentation)
|
.includes(:procedure_presentation)
|
||||||
|
@ -124,16 +107,20 @@ class Export < ApplicationRecord
|
||||||
|
|
||||||
{
|
{
|
||||||
xlsx: {
|
xlsx: {
|
||||||
time_span_type: not_filtered.filter(&:xlsx?).index_by(&:time_span_type),
|
time_span_type: not_filtered.filter(&:format_xlsx?).index_by(&:time_span_type),
|
||||||
statut: filtered.filter(&:xlsx?).index_by(&:statut)
|
statut: filtered.filter(&:format_xlsx?).index_by(&:statut)
|
||||||
},
|
},
|
||||||
ods: {
|
ods: {
|
||||||
time_span_type: not_filtered.filter(&:ods?).index_by(&:time_span_type),
|
time_span_type: not_filtered.filter(&:format_ods?).index_by(&:time_span_type),
|
||||||
statut: filtered.filter(&:ods?).index_by(&:statut)
|
statut: filtered.filter(&:format_ods?).index_by(&:statut)
|
||||||
},
|
},
|
||||||
csv: {
|
csv: {
|
||||||
time_span_type: not_filtered.filter(&:csv?).index_by(&:time_span_type),
|
time_span_type: not_filtered.filter(&:format_csv?).index_by(&:time_span_type),
|
||||||
statut: filtered.filter(&:csv?).index_by(&:statut)
|
statut: filtered.filter(&:format_csv?).index_by(&:statut)
|
||||||
|
},
|
||||||
|
zip: {
|
||||||
|
time_span_type: {},
|
||||||
|
statut: filtered.filter(&:format_zip?).index_by(&:statut)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
@ -177,32 +164,18 @@ class Export < ApplicationRecord
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def filename
|
def blob
|
||||||
procedure_identifier = procedure.path || "procedure-#{procedure.id}"
|
|
||||||
"dossiers_#{procedure_identifier}_#{statut}_#{Time.zone.now.strftime('%Y-%m-%d_%H-%M')}.#{format}"
|
|
||||||
end
|
|
||||||
|
|
||||||
def io
|
|
||||||
service = ProcedureExportService.new(procedure, dossiers_for_export)
|
service = ProcedureExportService.new(procedure, dossiers_for_export)
|
||||||
|
|
||||||
case format.to_sym
|
case format.to_sym
|
||||||
when :csv
|
when :csv
|
||||||
StringIO.new(service.to_csv)
|
service.to_csv
|
||||||
when :xlsx
|
when :xlsx
|
||||||
StringIO.new(service.to_xlsx)
|
service.to_xlsx
|
||||||
when :ods
|
when :ods
|
||||||
StringIO.new(service.to_ods)
|
service.to_ods
|
||||||
end
|
when :zip
|
||||||
end
|
service.to_zip
|
||||||
|
|
||||||
def content_type
|
|
||||||
case format.to_sym
|
|
||||||
when :csv
|
|
||||||
'text/csv'
|
|
||||||
when :xlsx
|
|
||||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
|
|
||||||
when :ods
|
|
||||||
'application/vnd.oasis.opendocument.spreadsheet'
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ class ArchiveUploader
|
||||||
# when file size is bigger, active storage expects the chunks + a manifest.
|
# when file size is bigger, active storage expects the chunks + a manifest.
|
||||||
MAX_FILE_SIZE_FOR_BACKEND_BEFORE_CHUNKING = ENV.fetch('ACTIVE_STORAGE_FILE_SIZE_THRESHOLD_BEFORE_CUSTOM_UPLOAD') { 4.gigabytes }.to_i
|
MAX_FILE_SIZE_FOR_BACKEND_BEFORE_CHUNKING = ENV.fetch('ACTIVE_STORAGE_FILE_SIZE_THRESHOLD_BEFORE_CUSTOM_UPLOAD') { 4.gigabytes }.to_i
|
||||||
|
|
||||||
def upload
|
def upload(archive)
|
||||||
uploaded_blob = create_and_upload_blob
|
uploaded_blob = create_and_upload_blob
|
||||||
begin
|
begin
|
||||||
archive.file.purge if archive.file.attached?
|
archive.file.purge if archive.file.attached?
|
||||||
|
@ -21,9 +21,13 @@ class ArchiveUploader
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def blob
|
||||||
|
create_and_upload_blob
|
||||||
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
attr_reader :procedure, :archive, :filepath
|
attr_reader :procedure, :filename, :filepath
|
||||||
|
|
||||||
def create_and_upload_blob
|
def create_and_upload_blob
|
||||||
if active_storage_service_local? || File.size(filepath) < MAX_FILE_SIZE_FOR_BACKEND_BEFORE_CHUNKING
|
if active_storage_service_local? || File.size(filepath) < MAX_FILE_SIZE_FOR_BACKEND_BEFORE_CHUNKING
|
||||||
|
@ -62,7 +66,7 @@ class ArchiveUploader
|
||||||
def blob_default_params(filepath)
|
def blob_default_params(filepath)
|
||||||
{
|
{
|
||||||
key: namespaced_object_key,
|
key: namespaced_object_key,
|
||||||
filename: archive.filename(procedure),
|
filename: filename,
|
||||||
content_type: 'application/zip',
|
content_type: 'application/zip',
|
||||||
metadata: { virus_scan_result: ActiveStorage::VirusScanner::SAFE }
|
metadata: { virus_scan_result: ActiveStorage::VirusScanner::SAFE }
|
||||||
}
|
}
|
||||||
|
@ -89,9 +93,9 @@ class ArchiveUploader
|
||||||
system(ENV.fetch('ACTIVE_STORAGE_BIG_FILE_UPLOADER_WITH_ENCRYPTION_PATH').to_s, filepath, blob.key, exception: true)
|
system(ENV.fetch('ACTIVE_STORAGE_BIG_FILE_UPLOADER_WITH_ENCRYPTION_PATH').to_s, filepath, blob.key, exception: true)
|
||||||
end
|
end
|
||||||
|
|
||||||
def initialize(procedure:, archive:, filepath:)
|
def initialize(procedure:, filename:, filepath:)
|
||||||
@procedure = procedure
|
@procedure = procedure
|
||||||
@archive = archive
|
@filename = filename
|
||||||
@filepath = filepath
|
@filepath = filepath
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
27
app/services/downloadable_file_service.rb
Normal file
27
app/services/downloadable_file_service.rb
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
class DownloadableFileService
|
||||||
|
ARCHIVE_CREATION_DIR = ENV.fetch('ARCHIVE_CREATION_DIR') { '/tmp' }
|
||||||
|
|
||||||
|
def self.download_and_zip(procedure, attachments, filename, &block)
|
||||||
|
Dir.mktmpdir(nil, ARCHIVE_CREATION_DIR) do |tmp_dir|
|
||||||
|
export_dir = File.join(tmp_dir, filename)
|
||||||
|
zip_path = File.join(ARCHIVE_CREATION_DIR, "#{filename}.zip")
|
||||||
|
|
||||||
|
begin
|
||||||
|
FileUtils.remove_entry_secure(export_dir) if Dir.exist?(export_dir)
|
||||||
|
Dir.mkdir(export_dir)
|
||||||
|
|
||||||
|
download_manager = DownloadManager::ProcedureAttachmentsExport.new(procedure, attachments, export_dir)
|
||||||
|
download_manager.download_all
|
||||||
|
|
||||||
|
Dir.chdir(tmp_dir) do
|
||||||
|
File.delete(zip_path) if File.exist?(zip_path)
|
||||||
|
system 'zip', '-0', '-r', zip_path, filename
|
||||||
|
end
|
||||||
|
yield(zip_path)
|
||||||
|
ensure
|
||||||
|
FileUtils.remove_entry_secure(export_dir) if Dir.exist?(export_dir)
|
||||||
|
File.delete(zip_path) if File.exist?(zip_path)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -1,8 +1,6 @@
|
||||||
require 'tempfile'
|
require 'tempfile'
|
||||||
|
|
||||||
class ProcedureArchiveService
|
class ProcedureArchiveService
|
||||||
ARCHIVE_CREATION_DIR = ENV.fetch('ARCHIVE_CREATION_DIR') { '/tmp' }
|
|
||||||
|
|
||||||
def initialize(procedure)
|
def initialize(procedure)
|
||||||
@procedure = procedure
|
@procedure = procedure
|
||||||
end
|
end
|
||||||
|
@ -27,9 +25,9 @@ class ProcedureArchiveService
|
||||||
|
|
||||||
attachments = ActiveStorage::DownloadableFile.create_list_from_dossiers(dossiers)
|
attachments = ActiveStorage::DownloadableFile.create_list_from_dossiers(dossiers)
|
||||||
|
|
||||||
download_and_zip(archive, attachments) do |zip_filepath|
|
DownloadableFileService.download_and_zip(@procedure, attachments, zip_root_folder(archive)) do |zip_filepath|
|
||||||
ArchiveUploader.new(procedure: @procedure, archive: archive, filepath: zip_filepath)
|
ArchiveUploader.new(procedure: @procedure, filename: archive.filename(@procedure), filepath: zip_filepath)
|
||||||
.upload
|
.upload(archive)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -45,30 +43,6 @@ class ProcedureArchiveService
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
def download_and_zip(archive, attachments, &block)
|
|
||||||
Dir.mktmpdir(nil, ARCHIVE_CREATION_DIR) do |tmp_dir|
|
|
||||||
archive_dir = File.join(tmp_dir, zip_root_folder(archive))
|
|
||||||
zip_path = File.join(ARCHIVE_CREATION_DIR, "#{zip_root_folder(archive)}.zip")
|
|
||||||
|
|
||||||
begin
|
|
||||||
FileUtils.remove_entry_secure(archive_dir) if Dir.exist?(archive_dir)
|
|
||||||
Dir.mkdir(archive_dir)
|
|
||||||
|
|
||||||
download_manager = DownloadManager::ProcedureAttachmentsExport.new(@procedure, attachments, archive_dir)
|
|
||||||
download_manager.download_all
|
|
||||||
|
|
||||||
Dir.chdir(tmp_dir) do
|
|
||||||
File.delete(zip_path) if File.exist?(zip_path)
|
|
||||||
system 'zip', '-0', '-r', zip_path, zip_root_folder(archive)
|
|
||||||
end
|
|
||||||
yield(zip_path)
|
|
||||||
ensure
|
|
||||||
FileUtils.remove_entry_secure(archive_dir) if Dir.exist?(archive_dir)
|
|
||||||
File.delete(zip_path) if File.exist?(zip_path)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def zip_root_folder(archive)
|
def zip_root_folder(archive)
|
||||||
"procedure-#{@procedure.id}-#{archive.id}"
|
"procedure-#{@procedure.id}-#{archive.id}"
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
class ProcedureExportService
|
class ProcedureExportService
|
||||||
attr_reader :dossiers
|
attr_reader :procedure, :dossiers
|
||||||
|
|
||||||
def initialize(procedure, dossiers)
|
def initialize(procedure, dossiers)
|
||||||
@procedure = procedure
|
@procedure = procedure
|
||||||
|
@ -8,25 +8,72 @@ class ProcedureExportService
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_csv
|
def to_csv
|
||||||
SpreadsheetArchitect.to_csv(options_for(:dossiers, :csv))
|
io = StringIO.new(SpreadsheetArchitect.to_csv(options_for(:dossiers, :csv)))
|
||||||
|
create_blob(io, :csv)
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_xlsx
|
def to_xlsx
|
||||||
# We recursively build multi page spreadsheet
|
# We recursively build multi page spreadsheet
|
||||||
@tables.reduce(nil) do |package, table|
|
io = @tables.reduce(nil) do |package, table|
|
||||||
SpreadsheetArchitect.to_axlsx_package(options_for(table, :xlsx), package)
|
SpreadsheetArchitect.to_axlsx_package(options_for(table, :xlsx), package)
|
||||||
end.to_stream.read
|
end.to_stream
|
||||||
|
create_blob(io, :xlsx)
|
||||||
end
|
end
|
||||||
|
|
||||||
def to_ods
|
def to_ods
|
||||||
# We recursively build multi page spreadsheet
|
# We recursively build multi page spreadsheet
|
||||||
@tables.reduce(nil) do |spreadsheet, table|
|
io = StringIO.new(@tables.reduce(nil) do |spreadsheet, table|
|
||||||
SpreadsheetArchitect.to_rodf_spreadsheet(options_for(table, :ods), spreadsheet)
|
SpreadsheetArchitect.to_rodf_spreadsheet(options_for(table, :ods), spreadsheet)
|
||||||
end.bytes
|
end.bytes)
|
||||||
|
create_blob(io, :ods)
|
||||||
|
end
|
||||||
|
|
||||||
|
def to_zip
|
||||||
|
attachments = ActiveStorage::DownloadableFile.create_list_from_dossiers(dossiers, true)
|
||||||
|
|
||||||
|
DownloadableFileService.download_and_zip(procedure, attachments, base_filename) do |zip_filepath|
|
||||||
|
ArchiveUploader.new(procedure: procedure, filename: filename(:zip), filepath: zip_filepath).blob
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
|
def create_blob(io, format)
|
||||||
|
ActiveStorage::Blob.create_and_upload!(
|
||||||
|
io: io,
|
||||||
|
filename: filename(format),
|
||||||
|
content_type: content_type(format),
|
||||||
|
identify: false,
|
||||||
|
# We generate the exports ourselves, so they are safe
|
||||||
|
metadata: { virus_scan_result: ActiveStorage::VirusScanner::SAFE }
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def base_filename
|
||||||
|
@base_filename ||= "dossiers_#{procedure_identifier}_#{Time.zone.now.strftime('%Y-%m-%d_%H-%M')}"
|
||||||
|
end
|
||||||
|
|
||||||
|
def filename(format)
|
||||||
|
"#{base_filename}.#{format}"
|
||||||
|
end
|
||||||
|
|
||||||
|
def procedure_identifier
|
||||||
|
procedure.path || "procedure-#{procedure.id}"
|
||||||
|
end
|
||||||
|
|
||||||
|
def content_type(format)
|
||||||
|
case format
|
||||||
|
when :csv
|
||||||
|
'text/csv'
|
||||||
|
when :xlsx
|
||||||
|
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
|
||||||
|
when :ods
|
||||||
|
'application/vnd.oasis.opendocument.spreadsheet'
|
||||||
|
when :zip
|
||||||
|
'application/zip'
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def etablissements
|
def etablissements
|
||||||
@etablissements ||= dossiers.flat_map do |dossier|
|
@etablissements ||= dossiers.flat_map do |dossier|
|
||||||
[dossier.champs, dossier.champs_private]
|
[dossier.champs, dossier.champs_private]
|
||||||
|
@ -40,12 +87,12 @@ class ProcedureExportService
|
||||||
end
|
end
|
||||||
|
|
||||||
def champs_repetables_options
|
def champs_repetables_options
|
||||||
revision = @procedure.active_revision
|
revision = procedure.active_revision
|
||||||
champs_by_stable_id = dossiers
|
champs_by_stable_id = dossiers
|
||||||
.flat_map { |dossier| (dossier.champs + dossier.champs_private).filter(&:repetition?) }
|
.flat_map { |dossier| (dossier.champs + dossier.champs_private).filter(&:repetition?) }
|
||||||
.group_by(&:stable_id)
|
.group_by(&:stable_id)
|
||||||
|
|
||||||
@procedure.types_de_champ_for_procedure_presentation.repetition
|
procedure.types_de_champ_for_procedure_presentation.repetition
|
||||||
.map { |type_de_champ_repetition| [type_de_champ_repetition, type_de_champ_repetition.types_de_champ_for_revision(revision).to_a] }
|
.map { |type_de_champ_repetition| [type_de_champ_repetition, type_de_champ_repetition.types_de_champ_for_revision(revision).to_a] }
|
||||||
.filter { |(_, types_de_champ)| types_de_champ.present? }
|
.filter { |(_, types_de_champ)| types_de_champ.present? }
|
||||||
.map do |(type_de_champ_repetition, types_de_champ)|
|
.map do |(type_de_champ_repetition, types_de_champ)|
|
||||||
|
@ -85,7 +132,7 @@ class ProcedureExportService
|
||||||
end
|
end
|
||||||
|
|
||||||
def spreadsheet_columns(format)
|
def spreadsheet_columns(format)
|
||||||
types_de_champ = @procedure.types_de_champ_for_procedure_presentation.not_repetition.to_a
|
types_de_champ = procedure.types_de_champ_for_procedure_presentation.not_repetition.to_a
|
||||||
|
|
||||||
Proc.new do |instance|
|
Proc.new do |instance|
|
||||||
instance.send(:"spreadsheet_columns_#{format}", types_de_champ: types_de_champ)
|
instance.send(:"spreadsheet_columns_#{format}", types_de_champ: types_de_champ)
|
||||||
|
|
|
@ -5,6 +5,7 @@ fr:
|
||||||
everything_csv_html: Demander un export au format .csv<br>(uniquement les dossiers, sans les champs répétables)
|
everything_csv_html: Demander un export au format .csv<br>(uniquement les dossiers, sans les champs répétables)
|
||||||
everything_xlsx_html: Demander un export au format .xlsx
|
everything_xlsx_html: Demander un export au format .xlsx
|
||||||
everything_ods_html: Demander un export au format .ods
|
everything_ods_html: Demander un export au format .ods
|
||||||
|
everything_zip_html: Demander un export au format .zip
|
||||||
everything_short: Demander un export au format %{export_format}
|
everything_short: Demander un export au format %{export_format}
|
||||||
everything_pending_html: Un export au format %{export_format} est en train d’être généré<br>(demandé il y a %{export_time})
|
everything_pending_html: Un export au format %{export_format} est en train d’être généré<br>(demandé il y a %{export_time})
|
||||||
everything_ready_html: Télécharger l’export au format %{export_format}<br>(généré il y a %{export_time})
|
everything_ready_html: Télécharger l’export au format %{export_format}<br>(généré il y a %{export_time})
|
||||||
|
|
|
@ -11,7 +11,7 @@ describe ArchiveCreationJob, type: :job do
|
||||||
before { expect(InstructeurMailer).not_to receive(:send_archive) }
|
before { expect(InstructeurMailer).not_to receive(:send_archive) }
|
||||||
|
|
||||||
it 'does not send email and forward error for retry' do
|
it 'does not send email and forward error for retry' do
|
||||||
allow_any_instance_of(ProcedureArchiveService).to receive(:download_and_zip).and_raise(StandardError, "kaboom")
|
allow(DownloadableFileService).to receive(:download_and_zip).and_raise(StandardError, "kaboom")
|
||||||
expect { job.perform_now }.to raise_error(StandardError, "kaboom")
|
expect { job.perform_now }.to raise_error(StandardError, "kaboom")
|
||||||
expect(archive.reload.failed?).to eq(true)
|
expect(archive.reload.failed?).to eq(true)
|
||||||
end
|
end
|
||||||
|
@ -20,7 +20,7 @@ describe ArchiveCreationJob, type: :job do
|
||||||
context 'when it works' do
|
context 'when it works' do
|
||||||
let(:mailer) { double('mailer', deliver_later: true) }
|
let(:mailer) { double('mailer', deliver_later: true) }
|
||||||
before do
|
before do
|
||||||
allow_any_instance_of(ProcedureArchiveService).to receive(:download_and_zip).and_return(true)
|
allow(DownloadableFileService).to receive(:download_and_zip).and_return(true)
|
||||||
expect(InstructeurMailer).to receive(:send_archive).and_return(mailer)
|
expect(InstructeurMailer).to receive(:send_archive).and_return(mailer)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -48,9 +48,9 @@ RSpec.describe Export, type: :model do
|
||||||
context 'when an export is made for one groupe instructeur' do
|
context 'when an export is made for one groupe instructeur' do
|
||||||
let!(:export) { create(:export, groupe_instructeurs: [gi_1, gi_2]) }
|
let!(:export) { create(:export, groupe_instructeurs: [gi_1, gi_2]) }
|
||||||
|
|
||||||
it { expect(Export.find_for_groupe_instructeurs([gi_1.id], nil)).to eq({ csv: { statut: {}, time_span_type: {} }, xlsx: { statut: {}, time_span_type: {} }, ods: { statut: {}, time_span_type: {} } }) }
|
it { expect(Export.find_for_groupe_instructeurs([gi_1.id], nil)).to eq({ csv: { statut: {}, time_span_type: {} }, xlsx: { statut: {}, time_span_type: {} }, ods: { statut: {}, time_span_type: {} }, zip: { statut: {}, time_span_type: {} } }) }
|
||||||
it { expect(Export.find_for_groupe_instructeurs([gi_2.id, gi_1.id], nil)).to eq({ csv: { statut: {}, time_span_type: { 'everything' => export } }, xlsx: { statut: {}, time_span_type: {} }, ods: { statut: {}, time_span_type: {} } }) }
|
it { expect(Export.find_for_groupe_instructeurs([gi_2.id, gi_1.id], nil)).to eq({ csv: { statut: {}, time_span_type: { 'everything' => export } }, xlsx: { statut: {}, time_span_type: {} }, ods: { statut: {}, time_span_type: {} }, zip: { statut: {}, time_span_type: {} } }) }
|
||||||
it { expect(Export.find_for_groupe_instructeurs([gi_1.id, gi_2.id, gi_3.id], nil)).to eq({ csv: { statut: {}, time_span_type: {} }, xlsx: { statut: {}, time_span_type: {} }, ods: { statut: {}, time_span_type: {} } }) }
|
it { expect(Export.find_for_groupe_instructeurs([gi_1.id, gi_2.id, gi_3.id], nil)).to eq({ csv: { statut: {}, time_span_type: {} }, xlsx: { statut: {}, time_span_type: {} }, ods: { statut: {}, time_span_type: {} }, zip: { statut: {}, time_span_type: {} } }) }
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,18 +4,18 @@ describe ProcedureArchiveService do
|
||||||
let(:file) { Tempfile.new }
|
let(:file) { Tempfile.new }
|
||||||
let(:fixture_blob) { ActiveStorage::Blob.create_before_direct_upload!(filename: File.basename(file.path), byte_size: file.size, checksum: 'osf') }
|
let(:fixture_blob) { ActiveStorage::Blob.create_before_direct_upload!(filename: File.basename(file.path), byte_size: file.size, checksum: 'osf') }
|
||||||
|
|
||||||
let(:uploader) { ArchiveUploader.new(procedure: procedure, archive: archive, filepath: file.path) }
|
let(:uploader) { ArchiveUploader.new(procedure: procedure, filename: archive.filename(procedure), filepath: file.path) }
|
||||||
|
|
||||||
describe '.upload' do
|
describe '.upload' do
|
||||||
context 'when active storage service is local' do
|
context 'when active storage service is local' do
|
||||||
it 'uploads with upload_with_active_storage' do
|
it 'uploads with upload_with_active_storage' do
|
||||||
expect(uploader).to receive(:active_storage_service_local?).and_return(true)
|
expect(uploader).to receive(:active_storage_service_local?).and_return(true)
|
||||||
expect(uploader).to receive(:upload_with_active_storage).and_return(fixture_blob)
|
expect(uploader).to receive(:upload_with_active_storage).and_return(fixture_blob)
|
||||||
uploader.upload
|
uploader.upload(archive)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'link the created blob as an attachment to the current archive instance' do
|
it 'link the created blob as an attachment to the current archive instance' do
|
||||||
expect { uploader.upload }
|
expect { uploader.upload(archive) }
|
||||||
.to change { ActiveStorage::Attachment.where(name: 'file', record_type: 'Archive', record_id: archive.id).count }.by(1)
|
.to change { ActiveStorage::Attachment.where(name: 'file', record_type: 'Archive', record_id: archive.id).count }.by(1)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -31,7 +31,7 @@ describe ProcedureArchiveService do
|
||||||
|
|
||||||
it 'uploads with upload_with_active_storage' do
|
it 'uploads with upload_with_active_storage' do
|
||||||
expect(uploader).to receive(:upload_with_active_storage).and_return(fixture_blob)
|
expect(uploader).to receive(:upload_with_active_storage).and_return(fixture_blob)
|
||||||
uploader.upload
|
uploader.upload(archive)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -40,12 +40,12 @@ describe ProcedureArchiveService do
|
||||||
|
|
||||||
it 'uploads with upload_with_chunking_wrapper' do
|
it 'uploads with upload_with_chunking_wrapper' do
|
||||||
expect(uploader).to receive(:upload_with_chunking_wrapper).and_return(fixture_blob)
|
expect(uploader).to receive(:upload_with_chunking_wrapper).and_return(fixture_blob)
|
||||||
uploader.upload
|
uploader.upload(archive)
|
||||||
end
|
end
|
||||||
|
|
||||||
it 'link the created blob as an attachment to the current archive instance' do
|
it 'link the created blob as an attachment to the current archive instance' do
|
||||||
expect(uploader).to receive(:upload_with_chunking_wrapper).and_return(fixture_blob)
|
expect(uploader).to receive(:upload_with_chunking_wrapper).and_return(fixture_blob)
|
||||||
expect { uploader.upload }
|
expect { uploader.upload(archive) }
|
||||||
.to change { ActiveStorage::Attachment.where(name: 'file', record_type: 'Archive', record_id: archive.id).count }.by(1)
|
.to change { ActiveStorage::Attachment.where(name: 'file', record_type: 'Archive', record_id: archive.id).count }.by(1)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
37
spec/services/downloadable_file_service_spec.rb
Normal file
37
spec/services/downloadable_file_service_spec.rb
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
describe DownloadableFileService do
|
||||||
|
let(:procedure) { create(:procedure, :published) }
|
||||||
|
let(:service) { ProcedureArchiveService.new(procedure) }
|
||||||
|
|
||||||
|
describe '#download_and_zip' do
|
||||||
|
let(:archive) { build(:archive, id: '3') }
|
||||||
|
let(:filename) { service.send(:zip_root_folder, archive) }
|
||||||
|
|
||||||
|
it 'create a tmpdir while block is running' do
|
||||||
|
previous_dir_list = Dir.entries(DownloadableFileService::ARCHIVE_CREATION_DIR)
|
||||||
|
|
||||||
|
DownloadableFileService.download_and_zip(procedure, [], filename) do |_zip_file|
|
||||||
|
new_dir_list = Dir.entries(DownloadableFileService::ARCHIVE_CREATION_DIR)
|
||||||
|
expect(previous_dir_list).not_to eq(new_dir_list)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'cleans up its tmpdir after block execution' do
|
||||||
|
expect { DownloadableFileService.download_and_zip(procedure, [], filename) { |zip_file| } }
|
||||||
|
.not_to change { Dir.entries(DownloadableFileService::ARCHIVE_CREATION_DIR) }
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'creates a zip with zip utility' do
|
||||||
|
expected_zip_path = File.join(DownloadableFileService::ARCHIVE_CREATION_DIR, "#{service.send(:zip_root_folder, archive)}.zip")
|
||||||
|
expect(DownloadableFileService).to receive(:system).with('zip', '-0', '-r', expected_zip_path, an_instance_of(String))
|
||||||
|
DownloadableFileService.download_and_zip(procedure, [], filename) { |zip_path| }
|
||||||
|
end
|
||||||
|
|
||||||
|
it 'cleans up its generated zip' do
|
||||||
|
expected_zip_path = File.join(DownloadableFileService::ARCHIVE_CREATION_DIR, "#{service.send(:zip_root_folder, archive)}.zip")
|
||||||
|
DownloadableFileService.download_and_zip(procedure, [], filename) do |_zip_path|
|
||||||
|
expect(File.exist?(expected_zip_path)).to be_truthy
|
||||||
|
end
|
||||||
|
expect(File.exist?(expected_zip_path)).to be_falsey
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -172,37 +172,6 @@ describe ProcedureArchiveService do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe '#download_and_zip' do
|
|
||||||
let(:archive) { build(:archive, id: '3') }
|
|
||||||
it 'create a tmpdir while block is running' do
|
|
||||||
previous_dir_list = Dir.entries(ProcedureArchiveService::ARCHIVE_CREATION_DIR)
|
|
||||||
|
|
||||||
service.send(:download_and_zip, archive, []) do |_zip_file|
|
|
||||||
new_dir_list = Dir.entries(ProcedureArchiveService::ARCHIVE_CREATION_DIR)
|
|
||||||
expect(previous_dir_list).not_to eq(new_dir_list)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'cleans up its tmpdir after block execution' do
|
|
||||||
expect { service.send(:download_and_zip, archive, []) { |zip_file| } }
|
|
||||||
.not_to change { Dir.entries(ProcedureArchiveService::ARCHIVE_CREATION_DIR) }
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'creates a zip with zip utility' do
|
|
||||||
expected_zip_path = File.join(ProcedureArchiveService::ARCHIVE_CREATION_DIR, "#{service.send(:zip_root_folder, archive)}.zip")
|
|
||||||
expect(service).to receive(:system).with('zip', '-0', '-r', expected_zip_path, an_instance_of(String))
|
|
||||||
service.send(:download_and_zip, archive, []) { |zip_path| }
|
|
||||||
end
|
|
||||||
|
|
||||||
it 'cleans up its generated zip' do
|
|
||||||
expected_zip_path = File.join(ProcedureArchiveService::ARCHIVE_CREATION_DIR, "#{service.send(:zip_root_folder, archive)}.zip")
|
|
||||||
service.send(:download_and_zip, archive, []) do |_zip_path|
|
|
||||||
expect(File.exist?(expected_zip_path)).to be_truthy
|
|
||||||
end
|
|
||||||
expect(File.exist?(expected_zip_path)).to be_falsey
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
def create_dossier_for_month(year, month)
|
def create_dossier_for_month(year, month)
|
||||||
|
|
|
@ -4,11 +4,9 @@ describe ProcedureExportService do
|
||||||
describe 'to_data' do
|
describe 'to_data' do
|
||||||
let(:procedure) { create(:procedure, :published, :for_individual, :with_all_champs) }
|
let(:procedure) { create(:procedure, :published, :for_individual, :with_all_champs) }
|
||||||
subject do
|
subject do
|
||||||
Tempfile.create do |f|
|
ProcedureExportService.new(procedure, procedure.dossiers)
|
||||||
f << ProcedureExportService.new(procedure, procedure.dossiers).to_xlsx
|
.to_xlsx
|
||||||
f.rewind
|
.open { |f| SimpleXlsxReader.open(f.path) }
|
||||||
SimpleXlsxReader.open(f.path)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:dossiers_sheet) { subject.sheets.first }
|
let(:dossiers_sheet) { subject.sheets.first }
|
||||||
|
@ -178,11 +176,9 @@ describe ProcedureExportService do
|
||||||
|
|
||||||
context 'as csv' do
|
context 'as csv' do
|
||||||
subject do
|
subject do
|
||||||
Tempfile.create do |f|
|
ProcedureExportService.new(procedure, procedure.dossiers)
|
||||||
f << ProcedureExportService.new(procedure, procedure.dossiers).to_csv
|
.to_csv
|
||||||
f.rewind
|
.open { |f| CSV.read(f.path) }
|
||||||
CSV.read(f.path)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
let(:nominal_headers) do
|
let(:nominal_headers) do
|
||||||
|
|
Loading…
Reference in a new issue