#!/usr/bin/env ruby require 'rubygems' require 'timeout' require 'optparse' require 'hashlib' require 'set' def report(&block) Onering::Reporter.add(&block) end module Onering class Reporter DEFAULT_PLUGIN_GEMNAMES=[ 'onering-report-plugins' ] DEFAULT_PLUGIN_PATH = [ '/var/lib/onering/reporter' ] DEFAULT_FACTER_PATH = [ '/etc/facter' ] DEFAULT_CACHE_FILE='/var/tmp/.onering-report-cache.json' DEFAULT_CACHE_MAXAGE=600 class< e Onering::Logger.warn("Unable to detect plugin gems: #{e.class.name} - #{e.message}", "Onering::Reporter") end # add gem paths to the @path ([*Onering::Config.get('reporter.plugin_gems',[])]+@detected_gems+DEFAULT_PLUGIN_GEMNAMES).compact.each do |g| begin p = File.join(Util.gem_path(g), 'lib') @path << File.join(p, 'reporter') @facter_path << File.join(p, 'facter') rescue Gem::LoadError => e Onering::Logger.warn("Error loading gem: #{e.message}", "Onering::Reporter") next end end begin ENV['FACTERLIB'] = @facter_path.join(':') require 'facter' Onering::Logger.debug("Facter loaded successfully, FACTERLIB is #{ENV['FACTERLIB']}", "Onering::Reporter") rescue LoadError Onering::Logger.error("Unable to load Facter library", "Onering::Reporter") end end def load_plugins() # load plugins from @path @path.compact.each do |root| begin Dir["#{root}/*"].each do |directory| # only process top-level directories if File.directory?(directory) d = File.basename(directory) # allow plugins to be conditionally loaded based on fact values: # default - always load # - - load if == # if d == 'default' or Facter.value(d.split('-',2).first).to_s.downcase.nil_empty == d.split('-',2).last.to_s.downcase.nil_empty Dir[File.join(directory, '*.rb')].each do |plugin| plugin = File.basename(plugin, '.rb') begin Timeout.timeout((@options[:plugin_timeout] || 10).to_i) do Onering::Logger.debug("Loading plugin #{plugin}", "Onering::Reporter") require "#{directory}/#{plugin}" end rescue Timeout::Error Onering::Logger.warn("Plugin #{plugin} took too long to return, skipping", "Onering::Reporter") end end end end end rescue Exception => e raise e if e.class === Timeout::Error Onering::Logger.warn(e.message, "Onering::Reporter/#{e.class.name}") e.backtrace.each do |eb| Onering::Logger.debug(eb, "Onering::Reporter/#{e.class.name}") end next end end end def add(&block) if block_given? instance_eval(&block) end end def property(name, value=nil) unless value.nil? Onering::Logger.debug3("-> Set property #{name.to_s} (was: #{@_report[:properties].get(name.to_s,'null')})", "Onering::Reporter") @_report[:properties].set(name.to_s, value) end end def stat(name, value=nil) unless value.nil? @_report[:properties][:metrics] ||= {} Onering::Logger.debug3("-> Set metric #{name.to_s}", "Onering::Reporter") @_report[:properties][:metrics].set(name.to_s, value) end end def report() @id = (@options[:id] || Onering::Util.fact('hardwareid', nil)) if not @id.nil? if @options[:nocache] return _generated_report() else rv = _cached_report() return _generated_report() if rv.nil? or rv.empty? return rv end else Onering::Logger.fatal!("Cannot generate report without a hardware ID", "Onering::Reporter") end return {} end def _generated_report() Timeout.timeout((@options[:timeout] || 60).to_i) do hostname = (Facter.value('fqdn') rescue %x{hostname -f}.strip.chomp) @_report = { :id => @id, :name => hostname, :aliases => @options[:aliases], :tags => @options[:tags], :status => (@options[:status] || 'online'), :inventory => true, :properties => {} } # loads plugins and populates @_report load_plugins return @_report.stringify_keys() end return {} end def _cached_report() cachefile = (@options[:cachefile] || DEFAULT_CACHE_FILE) catch(:retry) do if File.readable?(cachefile) Onering::Logger.debug("Loading cache file at #{cachefile}", "Onering::Reporter") cache = File.read(cachefile) cache = (MultiJson.load(cache) rescue {}) if _cache_expired?(cache, @options[:maxage]) Onering::Logger.debug("Cache expired, regenerating...", "Onering::Reporter") throw :retry if _update_cache_file(cachefile) end # remove cached_at key Onering::Logger.debug("Using cached data (#{Time.now.to_i - Time.parse(cache.get('cached_at')).to_i} seconds old)", "Onering::Reporter") cache.delete('cached_at') return cache else Onering::Logger.debug("Report cache file could not be read at #{cachefile}", "Onering::Reporter") throw :retry if _update_cache_file(cachefile) end end return {} end def _update_cache_file(cachefile=DEFAULT_CACHE_FILE) begin File.open(cachefile, 'w+') do |file| Onering::Logger.debug("Regenerating cache file at #{cachefile}", "Onering::Reporter") report = _generated_report() report['cached_at'] = Time.now.strftime('%Y-%m-%dT%H:%M:%S%z') json = MultiJson.dump(report, :pretty => true) file.puts(json) end return true rescue Exception => e Onering::Logger.info("Unable to write cache file #{cachefile}: #{e.class.name} - #{e.message}", "Onering::Reporter") return false end end def _cache_expired?(cache, age=DEFAULT_CACHE_MAXAGE) if cache.is_a?(Hash) return (Time.parse(cache.get('cached_at')) < (Time.now - age) rescue true) else return true end end end end end