lib/onering/plugins/reporter.rb in onering-client-0.1.7 vs lib/onering/plugins/reporter.rb in onering-client-0.2.0
- old
+ new
@@ -3,14 +3,10 @@
require 'timeout'
require 'optparse'
require 'hashlib'
require 'set'
-def report(&block)
- Onering::Reporter.add(&block)
-end
-
module Onering
class Reporter
DEFAULT_PLUGIN_GEMNAMES=[
'onering-report-plugins'
]
@@ -24,238 +20,266 @@
]
DEFAULT_CACHE_FILE='/var/tmp/.onering-report-cache.json'
DEFAULT_CACHE_MAXAGE=600
- class<<self
- include Onering::Util
+ include Onering::Util
- attr_reader :facter_path
+ attr_reader :facter_path
- def setup(config={})
- @options = config
- @facter_path = DEFAULT_FACTER_PATH
- @detected_gems = []
+ class PluginDelegate
+ def initialize(reporter, options={})
+ @_name = options.get(:plugin)
+ @_path = options.get(:path)
- @path = [*Onering::Config.get('reporter.plugin_path',[])]
- @path += DEFAULT_PLUGIN_PATH
+ Onering::Logger.debug3("Creating plugin delegate for plugin #{@_name}", "Onering::Reporter::PluginDelegate")
+ @_reporter = reporter
+ end
+ def get_binding()
+ return binding()
+ end
- begin
- specs = Set.new()
- @detected_gems = []
+ # DSL methods
+ # -------------------------------------------------------------------------
+ def report(&block)
+ if block_given?
+ start = Time.now.to_f
+ yield
- Gem::Specification.each do |spec|
- specs << spec.name
- end
+ finish = (Time.now.to_f - start.to_f)
+ finish = (finish.round(4) rescue finish)
+ Onering::Logger.debug3("Finished evaluating report for plugin #{@_name}, took #{finish} seconds", "Onering::Reporter::PluginDelegate")
+ end
+ end
- @detected_gems = (specs.to_a.select{|i|
- i =~ /^onering-report-/
- } - DEFAULT_PLUGIN_GEMNAMES)
- rescue Exception => e
- Onering::Logger.warn("Unable to detect plugin gems: #{e.class.name} - #{e.message}", "Onering::Reporter")
+ def property(name, value=nil)
+ @_reporter.property(name, value)
+ end
+
+ def stat(name, value=nil)
+ unless value.nil?
+ @_reporter.property((['metrics']+name.to_s.split('.')).join('.'), value)
end
+ end
+ end
- # add gem paths to the @path
- ([*Onering::Config.get('reporter.plugin_gems',[])]+@detected_gems+DEFAULT_PLUGIN_GEMNAMES).compact.each do |g|
- begin
- p = File.join(Util.gem_path(g), 'lib')
- @path << File.join(p, 'reporter')
- @facter_path << File.join(p, 'facter')
- rescue Gem::LoadError => e
- Onering::Logger.warn("Error loading gem: #{e.message}", "Onering::Reporter")
- next
- end
+ def initialize(config={})
+ @options = config
+ @facter_path = DEFAULT_FACTER_PATH
+ @detected_gems = []
+
+ @path = [*Onering::Config.get('reporter.plugin_path',[])]
+ @path += DEFAULT_PLUGIN_PATH
+
+
+ begin
+ specs = Set.new()
+ @detected_gems = []
+
+ Gem::Specification.each do |spec|
+ specs << spec.name
end
- begin
- ENV['FACTERLIB'] = @facter_path.join(':')
- require 'facter'
- Onering::Logger.debug("Facter loaded successfully, FACTERLIB is #{ENV['FACTERLIB']}", "Onering::Reporter")
+ @detected_gems = (specs.to_a.select{|i|
+ i =~ /^onering-report-/
+ } - DEFAULT_PLUGIN_GEMNAMES)
+ rescue Exception => e
+ Onering::Logger.warn("Unable to detect plugin gems: #{e.class.name} - #{e.message}", "Onering::Reporter")
+ end
- rescue LoadError
- Onering::Logger.error("Unable to load Facter library", "Onering::Reporter")
+ # add gem paths to the @path
+ ([*Onering::Config.get('reporter.plugin_gems',[])]+@detected_gems+DEFAULT_PLUGIN_GEMNAMES).compact.each do |g|
+ begin
+ p = File.join(Util.gem_path(g), 'lib')
+ @path << File.join(p, 'reporter')
+ @facter_path << File.join(p, 'facter')
+ rescue Gem::LoadError => e
+ Onering::Logger.warn("Error loading gem: #{e.message}", "Onering::Reporter")
+ next
end
end
- def load_plugins()
- # load plugins from @path
- @path.compact.each do |root|
- begin
- Dir["#{root}/*"].each do |directory|
+ begin
+ ENV['FACTERLIB'] = @facter_path.join(':')
+ require 'facter'
+ Onering::Logger.debug("Facter loaded successfully, FACTERLIB is #{ENV['FACTERLIB']}", "Onering::Reporter")
- # only process top-level directories
- if File.directory?(directory)
- d = File.basename(directory)
+ rescue LoadError
+ Onering::Logger.error("Unable to load Facter library", "Onering::Reporter")
+ end
+ end
- # allow plugins to be conditionally loaded based on fact values:
- # default - always load
- # <fact>-<fact_value> - load if <fact> == <fact_value>
- #
- if d == 'default' or Facter.value(d.split('-',2).first).to_s.downcase.nil_empty == d.split('-',2).last.to_s.downcase.nil_empty
- Dir[File.join(directory, '*.rb')].each do |plugin|
- plugin = File.basename(plugin, '.rb')
+ def load_plugins()
- begin
- Timeout.timeout((@options[:plugin_timeout] || 10).to_i) do
- Onering::Logger.debug("Loading plugin #{plugin}", "Onering::Reporter")
- require "#{directory}/#{plugin}"
- end
- rescue Timeout::Error
- Onering::Logger.warn("Plugin #{plugin} took too long to return, skipping", "Onering::Reporter")
+ # load plugins from @path
+ @path.compact.each do |root|
+ begin
+ Dir["#{root}/*"].each do |directory|
+
+ # only process top-level directories
+ if File.directory?(directory)
+ d = File.basename(directory)
+
+ Onering::Logger.debug("Loading plugins from path #{directory}", "Onering::Reporter")
+
+ # allow plugins to be conditionally loaded based on fact values:
+ # default - always load
+ # <fact>-<fact_value> - load if <fact> == <fact_value>
+ #
+ if d == 'default' or Facter.value(d.split('-',2).first).to_s.downcase.nil_empty == d.split('-',2).last.to_s.downcase.nil_empty
+
+ Dir[File.join(directory, '*.rb')].each do |plugin|
+ plugin = File.basename(plugin, '.rb')
+
+ begin
+ Timeout.timeout((@options[:plugin_timeout] || 10).to_i) do
+ Onering::Logger.debug("Loading plugin #{directory}/#{plugin}.rb", "Onering::Reporter")
+ eval(File.read("#{directory}/#{plugin}.rb"), PluginDelegate.new(self, {
+ :plugin => plugin,
+ :path => "#{directory}/#{plugin}.rb"
+ }).get_binding())
end
+ rescue Timeout::Error
+ Onering::Logger.warn("Plugin #{plugin} took too long to return, skipping", "Onering::Reporter")
end
end
end
end
- rescue Exception => e
- raise e if e.class === Timeout::Error
+ end
+ rescue Exception => e
+ raise e if e.class === Timeout::Error
- Onering::Logger.warn(e.message, "Onering::Reporter/#{e.class.name}")
+ Onering::Logger.warn(e.message, "Onering::Reporter/#{e.class.name}")
- e.backtrace.each do |eb|
- Onering::Logger.debug(eb, "Onering::Reporter/#{e.class.name}")
- end
-
- next
+ e.backtrace.each do |eb|
+ Onering::Logger.debug(eb, "Onering::Reporter/#{e.class.name}")
end
- end
- end
- def add(&block)
- if block_given?
- instance_eval(&block)
+ next
end
end
+ end
- def property(name, value=nil)
- unless value.nil?
- Onering::Logger.debug3("-> Set property #{name.to_s} (was: #{@_report[:properties].get(name.to_s,'null')})", "Onering::Reporter")
- @_report[:properties].set(name.to_s, value)
- end
+ def property(name, value=nil)
+ unless value.nil?
+ Onering::Logger.debug3("-> Set property #{name.to_s} (was: #{@_report[:properties].get(name.to_s,'null')})", "Onering::Reporter")
+ @_report[:properties].set(name.to_s, value)
end
+ end
- def stat(name, value=nil)
- unless value.nil?
- @_report[:properties][:metrics] ||= {}
- Onering::Logger.debug3("-> Set metric #{name.to_s}", "Onering::Reporter")
- @_report[:properties][:metrics].set(name.to_s, value)
- end
- end
+ def report(options={})
+ options = @options.merge(options)
+ @id = (@options[:id] || Onering::Util.fact('hardwareid', nil))
- def report(options={})
- options = @options.merge(options)
- @id = (@options[:id] || Onering::Util.fact('hardwareid', nil))
-
- if not @id.nil?
- if options[:nocache]
- return _generated_report()
- else
- rv = _cached_report(options)
- return _generated_report() if rv.nil? or rv.empty?
- return rv
- end
+ if not @id.nil?
+ if options[:nocache]
+ return _generated_report()
else
- Onering::Logger.fatal!("Cannot generate report without a hardware ID", "Onering::Reporter")
+ rv = _cached_report(options)
+ return _generated_report() if rv.nil? or rv.empty?
+ return rv
end
-
- return {}
+ else
+ Onering::Logger.fatal!("Cannot generate report without a hardware ID", "Onering::Reporter")
end
+ return {}
+ end
- def _generated_report()
- Timeout.timeout((@options[:timeout] || 60).to_i) do
- hostname = (Facter.value('fqdn') rescue %x{hostname -f}.strip.chomp)
- @_report = {
- :id => @id,
- :name => hostname,
- :aliases => @options[:aliases],
- :tags => @options[:tags],
- :status => (@options[:status] || 'online'),
- :inventory => true,
- :properties => {}
- }
+ def _generated_report()
+ Timeout.timeout((@options[:timeout] || 60).to_i) do
+ hostname = (Facter.value('fqdn') rescue %x{hostname -f}.strip.chomp)
- # loads plugins and populates @_report
- load_plugins()
+ @_report = {
+ :id => @id,
+ :name => hostname,
+ :aliases => @options[:aliases],
+ :tags => @options[:tags],
+ :status => (@options[:status] || 'online'),
+ :inventory => true,
+ :properties => {}
+ }
- return @_report.stringify_keys()
- end
+ # loads plugins and populates @_report
+ load_plugins()
- return {}
+ return @_report.stringify_keys()
end
- def _cached_report(options={})
- options = @options.merge(options)
- cachefile = (options[:cachefile] || DEFAULT_CACHE_FILE)
- tries = 0
+ return {}
+ end
- catch(:retry) do
- tries += 1
+ def _cached_report(options={})
+ options = @options.merge(options)
+ cachefile = (options[:cachefile] || DEFAULT_CACHE_FILE)
+ tries = 0
- if tries > 10
- Onering::Logger.error("Too many retries reading cache #{cachefile}, generating report", "Onering::Reporter")
- return _generated_report()
- end
+ catch(:retry) do
+ tries += 1
- if File.readable?(cachefile)
- Onering::Logger.debug("Loading cache file at #{cachefile}", "Onering::Reporter")
- cache = File.read(cachefile)
- cache = (MultiJson.load(cache) rescue {})
+ if tries > 10
+ Onering::Logger.error("Too many retries reading cache #{cachefile}, generating report", "Onering::Reporter")
+ return _generated_report()
+ end
- if _cache_expired?(cache, options[:maxage])
- Onering::Logger.debug("Cache expired, regenerating", "Onering::Reporter")
- throw :retry if _update_cache_file(cachefile)
- end
+ if File.readable?(cachefile)
+ Onering::Logger.debug("Loading cache file at #{cachefile}", "Onering::Reporter")
+ cache = File.read(cachefile)
+ cache = (MultiJson.load(cache) rescue {})
- if options[:cacheregen] == true
- Onering::Logger.debug("Forcing cache regeneration", "Onering::Reporter")
- cache = _update_cache_file(cachefile)
- end
+ if _cache_expired?(cache, options[:maxage])
+ Onering::Logger.debug("Cache expired, regenerating", "Onering::Reporter")
+ throw :retry if _update_cache_file(cachefile)
+ end
- if cache
- # remove cached_at key
- Onering::Logger.debug("Using cached data (#{Time.now.to_i - Time.parse(cache.get('cached_at')).to_i} seconds old)", "Onering::Reporter")
- cache.delete('cached_at')
- return cache
- end
- else
- Onering::Logger.debug("Report cache file could not be read at #{cachefile}", "Onering::Reporter")
- throw :retry if _update_cache_file(cachefile)
+ if options[:cacheregen] == true
+ Onering::Logger.debug("Forcing cache regeneration", "Onering::Reporter")
+ cache = _update_cache_file(cachefile)
end
- end
- return {}
+ if cache
+ # remove cached_at key
+ Onering::Logger.debug("Using cached data (#{Time.now.to_i - Time.parse(cache.get('cached_at')).to_i} seconds old)", "Onering::Reporter")
+ cache.delete('cached_at')
+ return cache
+ end
+ else
+ Onering::Logger.debug("Report cache file could not be read at #{cachefile}", "Onering::Reporter")
+ throw :retry if _update_cache_file(cachefile)
+ end
end
+ return {}
+ end
- def _update_cache_file(cachefile=DEFAULT_CACHE_FILE)
- begin
- report = nil
- File.open(cachefile, 'w+') do |file|
- Onering::Logger.debug("Regenerating cache file at #{cachefile}", "Onering::Reporter")
- report = _generated_report()
- report['cached_at'] = Time.now.strftime('%Y-%m-%dT%H:%M:%S%z')
- json = MultiJson.dump(report, :pretty => true)
- file.puts(json)
- end
+ def _update_cache_file(cachefile=DEFAULT_CACHE_FILE)
+ begin
+ report = nil
- return report
- rescue Exception => e
- Onering::Logger.info("Unable to write cache file #{cachefile}: #{e.class.name} - #{e.message}", "Onering::Reporter")
- return false
+ File.open(cachefile, 'w+') do |file|
+ Onering::Logger.debug("Regenerating cache file at #{cachefile}", "Onering::Reporter")
+ report = _generated_report()
+ report['cached_at'] = Time.now.strftime('%Y-%m-%dT%H:%M:%S%z')
+ json = MultiJson.dump(report, :pretty => true)
+ file.puts(json)
end
+
+ return report
+ rescue Exception => e
+ Onering::Logger.info("Unable to write cache file #{cachefile}: #{e.class.name} - #{e.message}", "Onering::Reporter")
+ return false
end
+ end
- def _cache_expired?(cache, age=DEFAULT_CACHE_MAXAGE)
- if cache.is_a?(Hash)
- return (Time.parse(cache.get('cached_at')) < (Time.now - age) rescue true)
- else
- return true
- end
+ def _cache_expired?(cache, age=DEFAULT_CACHE_MAXAGE)
+ if cache.is_a?(Hash)
+ return (Time.parse(cache.get('cached_at')) < (Time.now - age) rescue true)
+ else
+ return true
end
end
end
end