lib/openstudio/analysis/translator/excel.rb in openstudio-analysis-0.1.10 vs lib/openstudio/analysis/translator/excel.rb in openstudio-analysis-0.1.11

- old
+ new

@@ -3,17 +3,20 @@ module Translator class Excel attr_reader :version attr_reader :settings attr_reader :variables + attr_reader :outputs attr_reader :models attr_reader :weather_files attr_reader :measure_path attr_reader :export_path + attr_reader :cluster_name attr_reader :variables attr_reader :algorithm attr_reader :problem + attr_reader :run_setup # remove these once we have classes to construct the JSON file attr_reader :name attr_reader :machine_name attr_reader :template_json @@ -34,30 +37,35 @@ # Initialize some other instance variables @version = '0.0.1' @name = nil @machine_name = nil + @cluster_name = nil @settings = {} @weather_files = [] @models = [] @other_files = [] @export_path = "./export" @measure_path = "./measures" @number_of_samples = 0 # todo: remove this @problem = {} @algorithm = {} @template_json = nil + @outputs = {} + @run_setup = {} end def process @setup = parse_setup() @version = Semantic::Version.new @version raise "Spreadsheet version #{@version} is no longer supported. Please upgrade your spreadsheet to at least 0.1.9" if @version < '0.1.9' @variables = parse_variables() + @outputs = parse_outputs() + # call validate to make sure everything that is needed exists (i.e. directories) validate_analysis() end # Save off the legacy format of the JSON file @@ -182,12 +190,12 @@ puts "Analysis name is #{@name}" openstudio_analysis_json = JSON.parse(analysis_template.result(get_binding)) openstudio_analysis_json['analysis']['problem'].merge!(@problem) openstudio_analysis_json['analysis']['problem']['algorithm'].merge!(@algorithm) + openstudio_analysis_json['analysis'].merge!(@outputs) - @measure_index = -1 @variables['data'].each do |measure| # With OpenStudio server we need to create the workflow with all the measure instances if measure['enabled'] && measure['name'] != 'baseline' @measure_index += 1 @@ -245,22 +253,22 @@ puts @variable.inspect weights = nil if @variable['distribution']['discrete_weights'] && @variable['distribution']['discrete_weights'] != '' weights = eval(@variable['distribution']['discrete_weights']) end - + values = nil if variable['type'].downcase == 'bool' values = eval(@variable['distribution']['discrete_values']) - values.map!{|v| v.downcase == 'true'} + values.map! { |v| v.downcase == 'true' } else values = eval(@variable['distribution']['discrete_values']) end if weights raise "Discrete variable #{@variable['name']} does not have equal length of values and weights" if values.size != weights.size - @values_and_weights = values.zip( weights ).map { |v,w| {value: v, weight: w} }.to_json + @values_and_weights = values.zip(weights).map { |v, w| {value: v, weight: w} }.to_json else @values_and_weights = values.map { |v| {value: v} }.to_json end vr = JSON.parse(discrete_uncertain_variable_template.result(get_binding)) @@ -421,19 +429,26 @@ next if row[0].nil? if b_settings @version = row[1].chomp if row[0] == "Spreadsheet Version" @settings["#{row[0].snake_case}"] = row[1] if row[0] - + @cluster_name = @settings["cluster_name"].snake_case if @settings["cluster_name"] + # type some of the values that we know @settings["proxy_port"] = @settings["proxy_port"].to_i if @settings["proxy_port"] - elsif b_run_setup @name = row[1].chomp if row[0] == "Analysis Name" @machine_name = @name.snake_case @export_path = File.expand_path(File.join(@root_path, row[1])) if row[0] == "Export Directory" @measure_path = File.expand_path(File.join(@root_path, row[1])) if row[0] == "Measure Directory" + + @run_setup["#{row[0].snake_case}"] = row[1] if row[0] + + # type cast + @run_setup["allow_multiple_jobs"] = @run_setup["allow_multiple_jobs"].to_s.to_bool if @run_setup["allow_multiple_jobs"] + @run_setup["use_server_as_worker"] = @run_setup["use_server_as_worker"].to_s.to_bool if @run_setup["use_server_as_worker"] + elsif b_problem_setup if row[0] v = row[1] v.to_i if v % 1 == 0 @problem["#{row[0].snake_case}"] = v @@ -455,11 +470,10 @@ @other_files << {lib_zip_name: row[1], path: row[2]} end end end - # parse_variables will parse the XLS spreadsheet and save the data into # a higher level JSON file. The JSON file is historic and it should really # be omitted as an intermediate step def parse_variables() rows = @xls.sheet('Variables').parse() @@ -561,9 +575,43 @@ end data end + def parse_outputs() + rows = @xls.sheet('Outputs').parse() + + if !rows + raise "Could not find the sheet name 'Outputs' in excel file #{@root_path}" + end + + data = {} + data['output_variables'] = [] + + icnt = 0 + variable_index = -1 + @algorithm['objective_functions'] = [] + + rows.each do |row| + icnt += 1 + # puts "Parsing line: #{icnt}" + next if icnt <= 3 # skip the first 3 lines of the file + variable_index += 1 + var = {} + var['display_name'] = row[0].strip + var['name'] = row[1] + var['units'] = row[2] + var['objective_function'] = row[3].downcase == "true" ? true : false + if var['objective_function'] == true + @algorithm['objective_functions'] << var['name'] + end + var['objective_function_target'] = row[4] + var['objective_function_index'] = variable_index + data['output_variables'] << var + end + + data + end end end end end