lib/openstudio/analysis/server_api.rb in openstudio-analysis-0.4.4 vs lib/openstudio/analysis/server_api.rb in openstudio-analysis-0.4.5
- old
+ new
@@ -3,10 +3,13 @@
module OpenStudio
module Analysis
class ServerApi
attr_reader :hostname
+ # Define set of anlaysis methods require batch_run to be queued after them
+ BATCH_RUN_METHODS = %w(lhs preflight single_run repeat_run doe diag baseline_perturbation batch_datapoints)
+
def initialize(options = {})
defaults = { hostname: 'http://localhost:8080' }
options = defaults.merge(options)
@logger = ::Logger.new('faraday.log')
@@ -760,10 +763,84 @@
start_analysis(analysis_id, run_options)
analysis_id
end
+ def run_baseline_perturbation(formulation_filename, analysis_zip_filename)
+ project_options = {}
+ project_id = new_project(project_options)
+
+ analysis_options = {
+ formulation_file: formulation_filename,
+ upload_file: analysis_zip_filename,
+ reset_uuids: true
+ }
+ analysis_id = new_analysis(project_id, analysis_options)
+
+ run_options = {
+ analysis_action: 'start',
+ without_delay: false,
+ analysis_type: 'baseline_perturbation',
+ allow_multiple_jobs: true,
+ use_server_as_worker: true,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
+ }
+ start_analysis(analysis_id, run_options)
+
+ run_options = {
+ analysis_action: 'start',
+ without_delay: false, # run in background
+ analysis_type: 'batch_run',
+ allow_multiple_jobs: true,
+ use_server_as_worker: true,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
+ }
+ start_analysis(analysis_id, run_options)
+
+ analysis_id
+ end
+
+ def run_batch_datapoints(formulation_filename, analysis_zip_filename)
+ project_options = {}
+ project_id = new_project(project_options)
+
+ puts 'In run_batch_datapoints'
+
+ analysis_options = {
+ formulation_file: formulation_filename,
+ upload_file: analysis_zip_filename,
+ reset_uuids: true
+ }
+ analysis_id = new_analysis(project_id, analysis_options)
+
+ run_options = {
+ analysis_action: 'start',
+ without_delay: false,
+ analysis_type: 'batch_datapoints',
+ allow_multiple_jobs: true,
+ use_server_as_worker: true,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
+ }
+ start_analysis(analysis_id, run_options)
+
+ run_options = {
+ analysis_action: 'start',
+ without_delay: false, # run in background
+ analysis_type: 'batch_run',
+ allow_multiple_jobs: true,
+ use_server_as_worker: true,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
+ }
+ start_analysis(analysis_id, run_options)
+
+ analysis_id
+ end
+
def run_analysis_detailed(formulation_filename, analysis_zip_filename, analysis_type,
allow_multiple_jobs = true, server_as_worker = true,
run_data_point_filename = 'run_openstudio_workflow_monthly.rb')
warn 'run_analysis_detailed will be deprecated in 0.5.0. Use run(...)'
project_options = {}
@@ -772,10 +849,11 @@
analysis_options = {
formulation_file: formulation_filename,
upload_file: analysis_zip_filename,
reset_uuids: true
}
+
analysis_id = new_analysis(project_id, analysis_options)
server_as_worker = true if analysis_type == 'optim' || analysis_type == 'rgenoud'
run_options = {
analysis_action: 'start',
@@ -788,10 +866,10 @@
}
start_analysis(analysis_id, run_options)
# If the analysis is a staged analysis, then go ahead and run batch run because there is
# no explicit way to tell the system to do it
- if %w(lhs preflight single_run repeat_run doe).include? analysis_type
+ if BATCH_RUN_METHODS.include? analysis_type
run_options = {
analysis_action: 'start',
without_delay: false,
analysis_type: 'batch_run',
allow_multiple_jobs: allow_multiple_jobs,