commit
ec8106dafe
5 changed files with 49 additions and 26 deletions
|
@ -22,6 +22,8 @@ class ApiEntreprise::API
|
||||||
call(RNA_RESOURCE_NAME, siret, procedure_id)
|
call(RNA_RESOURCE_NAME, siret, procedure_id)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
def self.call(resource_name, siret_or_siren, procedure_id)
|
def self.call(resource_name, siret_or_siren, procedure_id)
|
||||||
url = url(resource_name, siret_or_siren)
|
url = url(resource_name, siret_or_siren)
|
||||||
params = params(siret_or_siren, procedure_id)
|
params = params(siret_or_siren, procedure_id)
|
||||||
|
@ -38,7 +40,9 @@ class ApiEntreprise::API
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.url(resource_name, siret_or_siren)
|
def self.url(resource_name, siret_or_siren)
|
||||||
[API_ENTREPRISE_URL, resource_name, siret_or_siren].join("/")
|
base_url = [API_ENTREPRISE_URL, resource_name, siret_or_siren].join("/")
|
||||||
|
|
||||||
|
"#{base_url}?with_insee_v3=true"
|
||||||
end
|
end
|
||||||
|
|
||||||
def self.params(siret_or_siren, procedure_id)
|
def self.params(siret_or_siren, procedure_id)
|
||||||
|
@ -50,8 +54,6 @@ class ApiEntreprise::API
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
private
|
|
||||||
|
|
||||||
def self.token
|
def self.token
|
||||||
Rails.application.secrets.api_entreprise[:key]
|
Rails.application.secrets.api_entreprise[:key]
|
||||||
end
|
end
|
||||||
|
|
|
@ -37,7 +37,7 @@ module ActiveStorage
|
||||||
def delete_prefixed(prefix)
|
def delete_prefixed(prefix)
|
||||||
instrument :delete_prefixed, prefix: prefix do
|
instrument :delete_prefixed, prefix: prefix do
|
||||||
@adapter.session do |s|
|
@adapter.session do |s|
|
||||||
keys = s.list_prefixed(prefix)
|
keys = s.list_prefixed(prefix).map(&:first)
|
||||||
s.delete_keys(keys)
|
s.delete_keys(keys)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -91,7 +91,7 @@ module Cellar
|
||||||
if response.is_a?(Net::HTTPSuccess)
|
if response.is_a?(Net::HTTPSuccess)
|
||||||
(listing, truncated) = parse_bucket_listing(response.body)
|
(listing, truncated) = parse_bucket_listing(response.body)
|
||||||
result += listing
|
result += listing
|
||||||
marker = listing.last
|
marker = listing.last.first
|
||||||
else
|
else
|
||||||
# TODO: error handling
|
# TODO: error handling
|
||||||
return nil
|
return nil
|
||||||
|
@ -139,8 +139,13 @@ module Cellar
|
||||||
def parse_bucket_listing(bucket_listing_xml)
|
def parse_bucket_listing(bucket_listing_xml)
|
||||||
doc = Nokogiri::XML(bucket_listing_xml)
|
doc = Nokogiri::XML(bucket_listing_xml)
|
||||||
listing = doc
|
listing = doc
|
||||||
.xpath('//xmlns:Contents/xmlns:Key')
|
.xpath('//xmlns:Contents')
|
||||||
.map(&:text)
|
.map do |node|
|
||||||
|
[
|
||||||
|
node.xpath('xmlns:Key').text,
|
||||||
|
DateTime.iso8601(node.xpath('xmlns:LastModified').text)
|
||||||
|
]
|
||||||
|
end
|
||||||
truncated = doc.xpath('//xmlns:IsTruncated').text == 'true'
|
truncated = doc.xpath('//xmlns:IsTruncated').text == 'true'
|
||||||
[listing, truncated]
|
[listing, truncated]
|
||||||
end
|
end
|
||||||
|
|
|
@ -67,23 +67,32 @@ namespace :'2018_12_03_finish_piece_jointe_transfer' do
|
||||||
# This task ports them to the new storage after the switch, while being careful not to
|
# This task ports them to the new storage after the switch, while being careful not to
|
||||||
# overwrite attachments that may have changed in the new storage after the switch.
|
# overwrite attachments that may have changed in the new storage after the switch.
|
||||||
def refresh_outdated_files
|
def refresh_outdated_files
|
||||||
rake_puts "Refresh outdated attachments"
|
|
||||||
|
|
||||||
refreshed_keys = []
|
refreshed_keys = []
|
||||||
missing_keys = []
|
|
||||||
old_pj_adapter.session do |old_pjs|
|
old_pj_adapter.session do |old_pjs|
|
||||||
keys = old_pjs.list_prefixed('')
|
rake_puts "List old PJs"
|
||||||
progress = ProgressReport.new(keys.count)
|
old_pj_listing = old_pjs.list_prefixed('')
|
||||||
keys.each do |key|
|
|
||||||
new_pj_metadata = new_pjs.files.head(key)
|
|
||||||
|
|
||||||
|
rake_puts "List new PJs"
|
||||||
|
new_pj_listing = {}
|
||||||
|
progress = ProgressReport.new(new_pjs.count.to_i)
|
||||||
|
new_pjs.files.each do |f|
|
||||||
|
new_pj_listing[f.key] = f.last_modified.in_time_zone
|
||||||
|
progress.inc
|
||||||
|
end
|
||||||
|
progress.finish
|
||||||
|
|
||||||
|
rake_puts "Refresh outdated attachments"
|
||||||
|
progress = ProgressReport.new(old_pj_listing.count)
|
||||||
|
old_pj_listing.each do |key, old_pj_last_modified|
|
||||||
|
new_pj_last_modified = new_pj_listing[key]
|
||||||
|
|
||||||
|
if new_pj_last_modified.nil? || new_pj_last_modified < old_pj_last_modified
|
||||||
|
# Looks like we need to refresh this PJ.
|
||||||
|
# Fetch fresh metadata to avoid overwriting a last-minute change
|
||||||
|
new_pj_metadata = new_pjs.files.head(key)
|
||||||
refresh_needed = new_pj_metadata.nil?
|
refresh_needed = new_pj_metadata.nil?
|
||||||
if !refresh_needed
|
if !refresh_needed
|
||||||
new_pj_last_modified = new_pj_metadata.last_modified.in_time_zone
|
new_pj_last_modified = new_pj_metadata.last_modified.in_time_zone
|
||||||
old_pj_last_modified = old_pjs.last_modified(key)
|
|
||||||
if old_pj_last_modified.nil?
|
|
||||||
missing_keys.push(key)
|
|
||||||
else
|
|
||||||
refresh_needed = new_pj_last_modified < old_pj_last_modified
|
refresh_needed = new_pj_last_modified < old_pj_last_modified
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -114,9 +123,6 @@ namespace :'2018_12_03_finish_piece_jointe_transfer' do
|
||||||
if verbose?
|
if verbose?
|
||||||
rake_puts "Refreshed #{refreshed_keys.count} attachments\n#{refreshed_keys.join(', ')}"
|
rake_puts "Refreshed #{refreshed_keys.count} attachments\n#{refreshed_keys.join(', ')}"
|
||||||
end
|
end
|
||||||
if missing_keys.present?
|
|
||||||
rake_puts "Failed to refresh #{missing_keys.count} attachments\n#{missing_keys.join(', ')}"
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
# For OpenStack, the content type cannot be forced dynamically from a direct download URL.
|
# For OpenStack, the content type cannot be forced dynamically from a direct download URL.
|
||||||
|
|
|
@ -43,7 +43,7 @@ describe 'CellarAdapter' do
|
||||||
</Contents>
|
</Contents>
|
||||||
<Contents>
|
<Contents>
|
||||||
<Key>sample2.jpg</Key>
|
<Key>sample2.jpg</Key>
|
||||||
<LastModified>2011-02-26T01:56:20.000Z</LastModified>
|
<LastModified>2014-03-21T17:44:07.000Z</LastModified>
|
||||||
<ETag>"bf1d737a4d46a19f3bced6905cc8b902"</ETag>
|
<ETag>"bf1d737a4d46a19f3bced6905cc8b902"</ETag>
|
||||||
<Size>142863</Size>
|
<Size>142863</Size>
|
||||||
<StorageClass>STANDARD</StorageClass>
|
<StorageClass>STANDARD</StorageClass>
|
||||||
|
@ -54,7 +54,17 @@ describe 'CellarAdapter' do
|
||||||
|
|
||||||
subject { session.send(:parse_bucket_listing, response) }
|
subject { session.send(:parse_bucket_listing, response) }
|
||||||
|
|
||||||
it { is_expected.to eq([["sample1.jpg", "sample2.jpg"], false]) }
|
it do
|
||||||
|
is_expected.to eq(
|
||||||
|
[
|
||||||
|
[
|
||||||
|
["sample1.jpg", DateTime.new(2011, 2, 26, 1, 56, 20, 0)],
|
||||||
|
["sample2.jpg", DateTime.new(2014, 3, 21, 17, 44, 7, 0)]
|
||||||
|
],
|
||||||
|
false
|
||||||
|
]
|
||||||
|
)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe 'bulk_deletion_request_body' do
|
describe 'bulk_deletion_request_body' do
|
||||||
|
|
Loading…
Reference in a new issue