Merge pull request #2412 from betagouv/dev

deploy-rage
This commit is contained in:
Pierre de La Morinerie 2018-08-16 15:32:26 +02:00 committed by GitHub
commit ef4f9c28ee
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 81 additions and 14 deletions

View file

@ -69,7 +69,6 @@ gem 'leaflet-draw-rails'
gem 'chartkick'
gem 'logstasher'
gem 'lograge'
gem 'logstash-event'

View file

@ -463,10 +463,6 @@ GEM
railties (>= 4)
request_store (~> 1.0)
logstash-event (1.2.02)
logstasher (1.2.2)
activesupport (>= 4.0)
logstash-event (~> 1.2.0)
request_store
loofah (2.2.2)
crass (~> 1.0.2)
nokogiri (>= 1.5.9)
@ -852,7 +848,6 @@ DEPENDENCIES
leaflet-rails
lograge
logstash-event
logstasher
mailjet
maruku
mina!

View file

@ -45,6 +45,7 @@ set :rails_env, ENV["to"]
# Manually create these paths in shared/ (eg: shared/config/database.yml) in your server.
# They will be linked in the 'deploy:link_shared_paths' step.
set :shared_paths, [
'.env',
'log',
'uploads',
'tmp/pids',

View file

@ -0,0 +1,72 @@
require 'active_job/logging'
require 'logstash-event'
class ActiveJobLogSubscriber < ::ActiveJob::Logging::LogSubscriber
def enqueue(event)
process_event(event, 'enqueue')
end
def enqueue_at(event)
process_event(event, 'enqueue_at')
end
def perform(event)
process_event(event, 'perform')
end
def perform_start(event)
process_event(event, 'perform_start')
end
def log(data)
event = LogStash::Event.new(data)
event['message'] = "#{data[:job_class]}##{data[:job_id]} at #{data[:scheduled_at]}"
logger.send(Lograge.log_level, event.to_json)
end
def logger
Lograge.logger.presence || super
end
private
def process_event(event, type)
data = extract_metadata(event)
data.merge! extract_exception(event)
data.merge! extract_scheduled_at(event) if type == 'enqueue_at'
data.merge! extract_duration(event) if type == 'perform'
tags = ['job', type]
tags.push('exception') if data[:exception]
data[:tags] = tags
data[:type] = 'tps'
log(data)
end
def extract_metadata(event)
{
job_id: event.payload[:job].job_id,
queue_name: queue_name(event),
job_class: event.payload[:job].class.to_s,
job_args: args_info(event.payload[:job]),
}
end
def extract_duration(event)
{ duration: event.duration.to_f.round(2) }
end
def extract_exception(event)
event.payload.slice(:exception)
end
def extract_scheduled_at(event)
{ scheduled_at: scheduled_at(event) }
end
# The default args_info makes a string. We need objects to turn into JSON.
def args_info(job)
job.arguments.map { |arg| arg.try(:to_global_id).try(:to_s) || arg }
end
end

View file

@ -1,3 +1,5 @@
require_relative './active_job_log_subscriber'
Rails.application.configure do
config.lograge.formatter = Lograge::Formatters::Logstash.new
config.lograge.base_controller_class = ['ActionController::Base', 'Manager::ApplicationController']
@ -6,17 +8,16 @@ Rails.application.configure do
# injected by ansible.
if !config.lograge.custom_options
config.lograge.custom_options = lambda do |event|
exception_object = event.payload[:exception_object]
{
type: 'tps',
tags: ['request', event.payload[:exception] ? 'exception' : nil].compact,
user_id: event.payload[:user_id],
user_email: event.payload[:user_email],
user_roles: event.payload[:user_roles],
user_agent: event.payload[:user_agent],
browser: event.payload[:browser],
browser_version: event.payload[:browser_version],
platform: event.payload[:platform],
backtrace: exception_object ? exception_object.backtrace.join("\n") : nil
platform: event.payload[:platform]
}.compact
end
@ -29,4 +30,8 @@ Rails.application.configure do
config.lograge.keep_original_rails_log = true
config.lograge.logger = ActiveSupport::Logger.new Rails.root.join('log', "logstash_#{Rails.env}.log")
if config.lograge.enabled
ActiveJobLogSubscriber.attach_to :active_job
end
end

View file

@ -1,5 +0,0 @@
if LogStasher.enabled
LogStasher.add_custom_fields do |fields|
fields[:type] = "tps"
end
end