require "mobile_metrics/version" require "mobile_metrics/junit" require 'time' require 'json' require 'securerandom' require 'fileutils' require 'pathname' # Metric types METRIC_TIMER = 'x.mobile_metric.timer' METRIC_COUNTER = 'x.mobile_metric.counter' METRIC_RATIO = 'x.mobile_metric.ratio' METRIC_SIZE = 'x.mobile_metric.size' # Misc Constants UNKNOWN = 'UNKNOWN' LOG_PREFIX = 'CI-METRICS:' LOGGING_DIR = '/tmp/cimetricslogs' UPLOAD_ENV_VAR = 'MOBILE_METRICS_UPLOAD_LOCATION' module MobileMetrics class Log @@in_progress_timers = {} @@log_file = nil @@env_values = nil @@verbose = false @@upload_location = nil #======================================== # Configuration #========================================= # Named params, with ability to override / leave out stuff we don't care about. def self.set_default_values( # Required params project:, # These params either aren't required, or have sensible defaults. level: "info", platform: "iOS", buildUrl: ENV['BUILD_URL'], gitUrl: ENV['GIT_URL'], gitBranch: ENV['DOTCI_BRANCH'], gitSha: ENV['DOTCI_SHA'], # no sensible defaults for these, we'll strip them later buildType: UNKNOWN, brand: UNKNOWN, # Controls log levels within this class verbose: false, # Upload location for logs upload_location: nil ) #TODO: May be able to support overridding these at some point in the future. # It won't be threadsafe, but our builds aren't parallelized at the build script level anyway. if @@env_values print 'Can only override default values once! Aborting!' return end if project.nil? print 'Project value for logging MUST be non-nil, failing build.' exit 14 return end @@verbose = verbose # Upload location @@upload_location = (upload_location || ENV[UPLOAD_ENV_VAR]) if @@upload_location.nil? print 'Upload location value for logging MUST not be nil, exiting.' exit 15 return end # Create the logging dir + filename FileUtils.mkdir_p(LOGGING_DIR) filename = "metrics_#{project}_#{build_number}.log" @@log_file = Pathname.new(LOGGING_DIR) + filename print @@log_file if @@verbose # Populate our env values @@env_values = {} # Required @@env_values[:project] = project # Optional @@env_values[:level] = level @@env_values[:platform] = platform @@env_values[:gitUrl] = gitUrl @@env_values[:gitBranch] = gitBranch @@env_values[:gitSha] = gitSha @@env_values[:buildUrl] = buildUrl #TODO: Should any of these be required? @@env_values[:buildType] = buildType if buildType != UNKNOWN @@env_values[:brand] = brand if brand != UNKNOWN end # =begin # # Sample timer metric # { # "time": "", # "name": "x.mobile_metric.", # "level": "info", # "data": { # "name": "", # … // additional data per metric type # } # "context": { # "platform": "", # "project": "", # "gitUrl": "", # "gitBranch": "", # "gitSha": "", # "buildType": "", # "brand": "", # "host": "", # "processId": "" # } # } # =end #========================================= # Public Logging Methods #========================================= def self.log_ratio_metric(name:, ratio:) overrides = {name: METRIC_RATIO, data: {name: name, ratio: ratio.to_f }} log_partial_metric(overrides) end def self.log_counter_metric(name:, count:) overrides = {name: METRIC_COUNTER, data: {name: name, count: count.to_i }} log_partial_metric(overrides) end def self.log_size_metric(name:, sizeInBytes:, filename:, artifactUrl:) overrides = {name: METRIC_SIZE, data: {name: name, filename: filename, sizeInBytes: sizeInBytes, artifactUrl: artifactUrl }} log_partial_metric(overrides) end def self.start_timer_metric(name:) if @@in_progress_timers.has_key?(name) print "WARNING: #{name} already has a running timer, refusing to start a new timer" return end @@in_progress_timers[name] = monotonic_timestamp end def self.end_timer_metric(name:) if !@@in_progress_timers.has_key?(name) print "WARNING: #{name} does not have a running timer, the end_timer_metric call has no effect" return end # Calculate delta start = @@in_progress_timers[name] now = monotonic_timestamp delta_in_ms = ((now - start) * 1000).to_i # remove existing timer @@in_progress_timers.delete(name) # log to file overrides = {name: METRIC_TIMER, data: {name: name, duration: delta_in_ms }} log_partial_metric(overrides) end # Block based timer. This is the recommended way to timer operations. def self.time(name:, &block) self.start_timer_metric(name: name) yield self.end_timer_metric(name: name) end def self.upload_logs() # Already called upload logs before, second time is a no-op if @@log_file.nil? || !@@log_file.file? print "WARN: Log file is empty or doesn't exist. Was upload_logs called previously?" return end # Skip uploads for local dev machines if !should_upload_logs() print 'Detected local machine, refusing to upload build metrics. Removing intermediate log file' remove_log_file() return end # Warn for any open timers if @@in_progress_timers.size > 0 @@in_progress_timers.each { |k,v| print "WARN: Timer not closed when upload_logs was called: #{k}" } end # Upload upload_log_file() # Remove log file remove_log_file() end def self.log_file() @@log_file end def self.upload_location() @@upload_location end # Tears down logging instance, removes any intermediate log files. # Does not finalize or upload any in-progress logs. # To use the logger again, you'll have to call set_default_values before any other methods. def self.reset() if @@log_file remove_log_file() end @@log_file = nil @@env_values = nil @@in_progress_timers = {} @@verbose = false @@upload_location = nil end def self.remove_all_log_files() FileUtils.rm_rf(LOGGING_DIR) end private # Creates a stubbed metric with common fields def self.default_metric() if !@@env_values print 'default_metric called before env_values initialized, aborting!' exit 12 end context = { platform: @@env_values[:platform], project: @@env_values[:project], # name is overridden per metric, so not present here. gitUrl: @@env_values[:gitUrl], gitBranch: @@env_values[:gitBranch], gitSha: @@env_values[:gitSha], buildType: @@env_values[:buildType], brand: @@env_values[:brand], buildUrl: @@env_values[:buildUrl], host: hostname(), processId: build_number() }.select { |k,v| !v.nil? } {time: timestamp(), level:@@env_values[:level], context: context} end def self.append_to_log(value) # Create the log file if it doesn't exist if !@@log_file.file? FileUtils.touch(@@log_file) end File.open(@@log_file, 'a+') { |f| f.puts(value + "\n") } end def self.remove_log_file() FileUtils.rm_rf(@@log_file) end def self.upload_log_file() destination = @@upload_location + "metrics_#{@@env_values[:project]}_#{build_number}.log" command = "scp -v -o StrictHostKeyChecking=no #{@@log_file} #{destination}" # This feels icky print `#{command}` end # Returns a timestamp as a string in the correct logging format. # Note: This format doesn't currently add milliseconds. # Example: "2018-03-27T21:12:21Z" # Note: DO NOT use this for benchmarking / measuring timers, see monotonic_timestamp instead. def self.timestamp() Time.now.utc.iso8601.to_s end # Returns a floating point monotonic timestamp suitable for timers # Monotonic means we won't ever go back for things like leap seconds # Unit is a floating point in seconds. def self.monotonic_timestamp() Process.clock_gettime(Process::CLOCK_MONOTONIC) end # Each metric should call this method. # This method handles merging the metric hash with the default hash, # Generates the JSON with the keys in the right order, then appends to the log. def self.log_partial_metric(overrides) merged_hash = merge_hashes(default_metric, overrides) key_order = ['time', 'name', 'level', 'data', 'context'] ordered_hash = Hash[ merged_hash.sort_by { |key, val| key_order.index(key.to_s) || 999 } ] metric = ordered_hash.to_json append_to_log(metric) end # Util method for merging the default hash values and overrides recursively as we want them def self.merge_hashes(default, overrides) default.merge(overrides) { |key, oldval, newval| if key == :data || key == :contenxt oldval.merge(newval) else newval end } end # Returns the host name of the machine we are currently running on def self.hostname() ENV['NODE_NAME'] || 'local-dev' end def self.build_number() ENV['BUILD_NUMBER'] end # This method checks against a bunch of default environment variables available through DOTCI # The intent is never to upload timing logs from a local machine, so we make it unlikely that these # env variables are present on a dev's local machine def self.should_upload_logs() have_workspace = ENV['WORKSPACE'] && !ENV['WORKSPACE'].empty? have_build_number = ENV['BUILD_NUMBER'] && !ENV['BUILD_NUMBER'].empty? have_jenkins_url = ENV['JENKINS_URL'] && !ENV['JENKINS_URL'].empty? have_ci_var = ENV['CI'] && !ENV['CI'].empty? have_workspace && have_build_number && have_jenkins_url && have_ci_var end def self.print(val) puts("#{LOG_PREFIX} #{val}") end end end