Sha256: d1ae80fc519c4ac2681b08d5d6ad938a14916a3567ec599353238eced4856d40
Contents?: true
Size: 1.62 KB
Versions: 8
Compression:
Stored size: 1.62 KB
Contents
# frozen_string_literal: true require_relative 'periodic_sync' module Prefab class LogPathAggregator LOG = Prefab::InternalLogger.new(self) include Prefab::PeriodicSync INCREMENT = ->(count) { (count || 0) + 1 } SEVERITY_KEY = { ::Logger::DEBUG => 'debugs', ::Logger::INFO => 'infos', ::Logger::WARN => 'warns', ::Logger::ERROR => 'errors', ::Logger::FATAL => 'fatals' }.freeze attr_reader :data def initialize(client:, max_paths:, sync_interval:) @max_paths = max_paths @client = client @name = 'log_path_aggregator' @data = Concurrent::Map.new @last_data_sent = nil @last_request = nil start_periodic_sync(sync_interval) end def push(path, severity) return if @data.size >= @max_paths @data.compute([path, severity], &INCREMENT) end private def flush(to_ship, start_at_was) pool.post do LOG.debug "Uploading stats for #{to_ship.size} paths" aggregate = Hash.new { |h, k| h[k] = PrefabProto::Logger.new } to_ship.each do |(path, severity), count| aggregate[path][SEVERITY_KEY[severity]] = count aggregate[path]['logger_name'] = path end loggers = PrefabProto::Loggers.new( loggers: aggregate.values, start_at: start_at_was, end_at: Prefab::TimeHelpers.now_in_ms, instance_hash: @client.instance_hash, namespace: @client.namespace ) result = post('/api/v1/known-loggers', loggers) LOG.debug "Uploaded #{to_ship.size} paths: #{result.status}" end end end end
Version data entries
8 entries across 8 versions & 1 rubygems