lib/openstudio/analysis/server_api.rb in openstudio-analysis-0.4.2 vs lib/openstudio/analysis/server_api.rb in openstudio-analysis-0.4.3
- old
+ new
@@ -170,10 +170,48 @@
end
status
end
+ # Check if the machine is alive
+ #
+ # return [Boolean] True if the machine has an awake value set
+ def alive?
+ m = machine_status
+
+ m = !m[:status][:awake].nil? if m
+
+ m
+ end
+
+ # Retrieve the machine status
+ #
+ # return [Hash]
+ def machine_status
+ status = nil
+
+ begin
+ resp = @conn.get do |req|
+ req.url 'status.json'
+ req.options.timeout = 10
+ req.options.open_timeout = 10
+ end
+
+ if resp.status == 200
+ j = JSON.parse resp.body, symbolize_names: true
+ status = j if j
+ end
+
+ rescue Faraday::ConnectionFailed
+
+ rescue Net::ReadTimeout
+
+ end
+
+ status
+ end
+
def get_analysis_status_and_json(analysis_id, analysis_type)
status = nil
j = nil
# sleep 2 # super cheesy---need to update how this works. Right now there is a good chance to get a
@@ -408,11 +446,11 @@
}
puts formulation_json
analysis_id = SecureRandom.uuid
formulation_json[:analysis][:uuid] = analysis_id
end
- fail "No analysis id defined in analyis.json #{options[:formulation_file]}" if analysis_id.nil?
+ fail "No analysis id defined in analysis.json #{options[:formulation_file]}" if analysis_id.nil?
# save out this file to compare
# File.open('formulation_merge.json', 'w') { |f| f << JSON.pretty_generate(formulation_json) }
response = @conn.post do |req|
@@ -501,11 +539,13 @@
else
fail "could not create new datapoints #{response.body}"
end
end
+ # TODO: this should be called 'start analysis'
def run_analysis(analysis_id, options)
+ warn 'In 0.5.0, OpenStudio::Analysis::ServerApi run_analysis will be renamed to start_analysis. Use start_analysis.'
defaults = { analysis_action: 'start', without_delay: false }
options = defaults.merge(options)
puts "Run analysis is configured with #{options.to_json}"
response = @conn.post do |req|
@@ -520,10 +560,12 @@
else
fail 'Could not start the analysis'
end
end
+ alias_method :start_analysis, :run_analysis
+
def kill_analysis(analysis_id)
analysis_action = { analysis_action: 'stop' }
response = @conn.post do |req|
req.url "analyses/#{analysis_id}/action.json"
@@ -550,21 +592,43 @@
kill_analysis(analysis_id)
end
end
end
+ # Get a list of analyses and the data points
+ #
+ # @param analysis_id [String] An analysis ID
+ def data_point_status(analysis_id = nil)
+ data_points = nil
+ call_string = nil
+ if analysis_id
+ call_string = "analyses/#{analysis_id}/status.json"
+ else
+ call_string = 'analyses/status.json'
+ end
+
+ resp = @conn.get call_string, version: 2
+ if resp.status == 200
+ data_points = JSON.parse(resp.body, symbolize_names: true)[:analyses]
+ end
+
+ data_points
+ end
+
+ # This is the former version of get data point status. The new version is preferred and allows for
+ # checking data points across all analyses.
def get_datapoint_status(analysis_id, filter = nil)
data_points = nil
# get the status of all the entire analysis
unless analysis_id.nil?
if filter.nil? || filter == ''
resp = @conn.get "analyses/#{analysis_id}/status.json"
if resp.status == 200
data_points = JSON.parse(resp.body, symbolize_names: true)[:data_points]
end
else
- resp = @conn.get "#{@hostname}/analyses/#{analysis_id}/status.json", jobs: filter
+ resp = @conn.get "analyses/#{analysis_id}/status.json", jobs: filter
if resp.status == 200
data_points = JSON.parse(resp.body, symbolize_names: true)[:data_points]
end
end
end
@@ -584,10 +648,24 @@
data_point
end
## here are a bunch of runs that really don't belong here.
+ # Submit a generic analysis. This will use the options that are configured in the JSON file including
+ # the analysis type and options. Note that this may not work for all cases were multiple analyses need to run
+ # (e.g. single_run, queue_model, lhs)
+ #
+ # @params formaluation_filename [String] FQP to the formulation file
+ # @params analysis_zip_filename [String] FQP to the zip file with the supporting files
+ def run_file(formulation_filename, analysis_zip_filename)
+ # parse the JSON file to grab the analysis type
+ j = JSON.parse(formulation_filename, symbolize_names: true)
+ analysis_type = j[:analysis][:problem][:analysis_type]
+
+ run(formulation_filename, analysis_zip_filename, analysis_type)
+ end
+
# create a new analysis and run a single model
def run_single_model(formulation_filename, analysis_zip_filename, run_data_point_filename = 'run_openstudio_workflow_monthly.rb')
project_options = {}
project_id = new_project(project_options)
@@ -606,22 +684,22 @@
allow_multiple_jobs: true,
use_server_as_worker: true,
simulate_data_point_filename: 'simulate_data_point.rb',
run_data_point_filename: run_data_point_filename
}
- run_analysis(analysis_id, run_options)
+ start_analysis(analysis_id, run_options)
run_options = {
analysis_action: 'start',
without_delay: false, # run in background
analysis_type: 'batch_run',
allow_multiple_jobs: true,
use_server_as_worker: true,
simulate_data_point_filename: 'simulate_data_point.rb',
run_data_point_filename: run_data_point_filename
}
- run_analysis(analysis_id, run_options)
+ start_analysis(analysis_id, run_options)
analysis_id
end
# creates a new analysis and runs rgenoud optimization - number of generations isn't used right now
@@ -643,11 +721,11 @@
allow_multiple_jobs: true,
use_server_as_worker: true,
simulate_data_point_filename: 'simulate_data_point.rb',
run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
}
- run_analysis(analysis_id, run_options)
+ start_analysis(analysis_id, run_options)
analysis_id
end
def run_lhs(formulation_filename, analysis_zip_filename)
@@ -668,28 +746,30 @@
allow_multiple_jobs: true,
use_server_as_worker: true,
simulate_data_point_filename: 'simulate_data_point.rb',
run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
}
- run_analysis(analysis_id, run_options)
+ start_analysis(analysis_id, run_options)
run_options = {
analysis_action: 'start',
without_delay: false, # run in background
analysis_type: 'batch_run',
allow_multiple_jobs: true,
use_server_as_worker: true,
simulate_data_point_filename: 'simulate_data_point.rb',
run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
}
- run_analysis(analysis_id, run_options)
+ start_analysis(analysis_id, run_options)
analysis_id
end
- def run_analysis_detailed(formulation_filename, analysis_zip_filename,
- analysis_type, allow_multiple_jobs, server_as_worker, run_data_point_filename)
+ def run_analysis_detailed(formulation_filename, analysis_zip_filename, analysis_type,
+ allow_multiple_jobs = true, server_as_worker = true,
+ run_data_point_filename = 'run_openstudio_workflow_monthly.rb')
+ warn 'run_analysis_detailed will be deprecated in 0.5.0. Use run(...)'
project_options = {}
project_id = new_project(project_options)
analysis_options = {
formulation_file: formulation_filename,
@@ -706,25 +786,84 @@
allow_multiple_jobs: allow_multiple_jobs,
use_server_as_worker: server_as_worker,
simulate_data_point_filename: 'simulate_data_point.rb',
run_data_point_filename: run_data_point_filename
}
- run_analysis(analysis_id, run_options)
+ start_analysis(analysis_id, run_options)
# If the analysis is LHS, then go ahead and run batch run because there is
# no explicit way to tell the system to do it
- if analysis_type == 'lhs' || analysis_type == 'preflight' || analysis_type == 'single_run'
+ if analysis_type == 'lhs' || analysis_type == 'preflight' || analysis_type == 'single_run' || analysis_type == 'doe'
run_options = {
analysis_action: 'start',
without_delay: false,
analysis_type: 'batch_run',
allow_multiple_jobs: allow_multiple_jobs,
use_server_as_worker: server_as_worker,
simulate_data_point_filename: 'simulate_data_point.rb',
run_data_point_filename: run_data_point_filename
}
- run_analysis(analysis_id, run_options)
+ start_analysis(analysis_id, run_options)
end
+
+ analysis_id
+ end
+
+ alias_method :run, :run_analysis_detailed
+
+ def queue_single_run(formulation_filename, analysis_zip_filename, analysis_type,
+ allow_multiple_jobs = true, server_as_worker = true,
+ run_data_point_filename = 'run_openstudio_workflow_monthly.rb')
+ project_options = {}
+ project_id = new_project(project_options)
+
+ analysis_options = {
+ formulation_file: formulation_filename,
+ upload_file: analysis_zip_filename,
+ reset_uuids: true
+ }
+ analysis_id = new_analysis(project_id, analysis_options)
+
+ server_as_worker = true if analysis_type == 'optim' || analysis_type == 'rgenoud'
+ run_options = {
+ analysis_action: 'start',
+ without_delay: false,
+ analysis_type: analysis_type,
+ allow_multiple_jobs: allow_multiple_jobs,
+ use_server_as_worker: server_as_worker,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: run_data_point_filename
+ }
+ start_analysis(analysis_id, run_options)
+
+ analysis_id
+ end
+
+ # TODO: this should take no arguments
+ def run_batch_run_across_analyses(formulation_filename, analysis_zip_filename,
+ allow_multiple_jobs = true, server_as_worker = true,
+ run_data_point_filename = 'run_openstudio_workflow_monthly.rb')
+ project_options = {}
+ project_id = new_project(project_options)
+
+ analysis_options = {
+ formulation_file: nil,
+ upload_file: nil,
+ reset_uuids: true,
+ # { analysis: { name: 'something', display_name: 'something else' }}
+ }
+ analysis_id = new_analysis(project_id, analysis_options)
+
+ run_options = {
+ analysis_action: 'start',
+ without_delay: false,
+ analysis_type: 'batch_run_analyses',
+ allow_multiple_jobs: allow_multiple_jobs,
+ use_server_as_worker: server_as_worker,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: run_data_point_filename
+ }
+ start_analysis(analysis_id, run_options)
analysis_id
end
end
end