lib/openstudio/analysis/server_api.rb in openstudio-analysis-0.3.1 vs lib/openstudio/analysis/server_api.rb in openstudio-analysis-0.3.2
- old
+ new
@@ -4,11 +4,11 @@
module Analysis
class ServerApi
attr_reader :hostname
def initialize(options = {})
- defaults = { hostname: 'http://localhost:8080' }
+ defaults = {hostname: 'http://localhost:8080'}
options = defaults.merge(options)
@logger = Logger.new('faraday.log')
@hostname = options[:hostname]
@@ -64,16 +64,16 @@
end
end
end
def new_project(options = {})
- defaults = { project_name: "Project #{Time.now.strftime('%Y-%m-%d %H:%M:%S')}" }
+ defaults = {project_name: "Project #{Time.now.strftime('%Y-%m-%d %H:%M:%S')}"}
options = defaults.merge(options)
project_id = nil
# TODO: make this a display name and a machine name
- project_hash = { project: { name: "#{options[:project_name]}" } }
+ project_hash = {project: {name: "#{options[:project_name]}"}}
response = @conn.post do |req|
req.url '/projects.json'
req.headers['Content-Type'] = 'application/json'
req.body = project_hash.to_json
@@ -107,75 +107,158 @@
end
analysis_ids
end
+ # return the entire analysis JSON
+ def get_analysis(analysis_id)
+ result = nil
+ response = @conn.get "/analyses/#{analysis_id}.json"
+ if response.status == 200
+ result = JSON.parse(response.body, symbolize_names: true, max_nesting: false)[:analysis]
+ end
+
+ result
+ end
+
+ # Check the status of the simulation. Format should be:
+ # {
+ # analysis: {
+ # status: "completed",
+ # analysis_type: "batch_run"
+ # },
+ # data_points: [
+ # {
+ # _id: "bbd57e90-ce59-0131-35de-080027880ca6",
+ # status: "completed"
+ # }
+ # ]
+ # }
+ def get_analysis_status(analysis_id, analysis_type)
+ status = nil
+
+ #sleep 2 # super cheesy---need to update how this works. Right now there is a good chance to get a
+ # race condition when the analysis state changes.
+ unless analysis_id.nil?
+ resp = @conn.get "analyses/#{analysis_id}/status.json"
+ if resp.status == 200
+ j = JSON.parse resp.body, symbolize_names: true
+ if j && j[:analysis] && j[:analysis][:analysis_type] == analysis_type
+ status = j[:analysis][:status]
+ end
+ end
+ end
+
+ status
+ end
+
+ # return the data point results in JSON format
+ def get_analysis_results(analysis_id)
+ analysis = nil
+
+ response = @conn.get "/analyses/#{analysis_id}/analysis_data.json"
+ if response.status == 200
+ analysis = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
+ end
+
+ analysis
+ end
+
def download_dataframe(analysis_id, format='rdata', save_directory=".")
- response = @conn.get "/analyses/#{analysis_id}/download_data.#{format}"
+ # Set the export = true flag to retrieve all the variables for the export (not just the visualize variables)
+ response = @conn.get "/analyses/#{analysis_id}/download_data.#{format}?export=true"
if response.status == 200
filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
puts "File #{filename} already exists, overwriting" if File.exist?("#{save_directory}/#{filename}")
- File.open("#{save_directory}/#{filename}",'w') {|f| f << response.body}
+ if format == 'rdata'
+ File.open("#{save_directory}/#{filename}", 'wb') { |f| f << response.body }
+ else
+ File.open("#{save_directory}/#{filename}", 'w') { |f| f << response.body }
+ end
end
end
def download_variables(analysis_id, format='rdata', save_directory=".")
response = @conn.get "/analyses/#{analysis_id}/variables/download_variables.#{format}"
if response.status == 200
filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
puts "File #{filename} already exists, overwriting" if File.exist?("#{save_directory}/#{filename}")
- File.open("#{save_directory}/#{filename}",'w') {|f| f << response.body}
+ if format == 'rdata'
+ File.open("#{save_directory}/#{filename}", 'wb') { |f| f << response.body }
+ else
+ File.open("#{save_directory}/#{filename}", 'w') { |f| f << response.body }
+ end
end
end
+ def download_datapoint(datapoint_id, save_directory=".")
+ response = @conn.get "/data_points/#{datapoint_id}/download"
+ if response.status == 200
+ filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
+ puts "File #{filename} already exists, overwriting" if File.exist?("#{save_directory}/#{filename}")
+ File.open("#{save_directory}/#{filename}", 'wb') { |f| f << response.body }
+ end
+ end
+
def download_all_data_points(analysis_id, save_directory=".")
response = @conn.get "/analyses/#{analysis_id}/download_all_data_points"
if response.status == 200
filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
puts "File #{filename} already exists, overwriting" if File.exist?("#{save_directory}/#{filename}")
- File.open("#{save_directory}/#{filename}",'w') {|f| f << response.body}
+ File.open("#{save_directory}/#{filename}", 'wb') { |f| f << response.body }
end
end
def new_analysis(project_id, options)
- defaults = { analysis_name: nil, reset_uuids: false }
+ defaults = {analysis_name: nil, reset_uuids: false}
options = defaults.merge(options)
fail 'No project id passed' if project_id.nil?
- fail 'no formulation passed to new_analysis' unless options[:formulation_file]
- fail "No formulation exists #{options[:formulation_file]}" unless File.exist?(options[:formulation_file])
- formulation_json = JSON.parse(File.read(options[:formulation_file]), symbolize_names: true)
+ formulation_json = nil
+ if options[:formulation_file]
+ fail "No formulation exists #{options[:formulation_file]}" unless File.exist?(options[:formulation_file])
+ formulation_json = JSON.parse(File.read(options[:formulation_file]), symbolize_names: true)
+ end
# read in the analysis id from the analysis.json file
analysis_id = nil
- if options[:reset_uuids]
- analysis_id = UUID.new.generate
- formulation_json[:analysis][:uuid] = analysis_id
+ if formulation_json
+ if options[:reset_uuids]
+ analysis_id = UUID.new.generate
+ formulation_json[:analysis][:uuid] = analysis_id
- formulation_json[:analysis][:problem][:workflow].each do |wf|
- wf[:uuid] = UUID.new.generate
- if wf[:arguments]
- wf[:arguments].each do |arg|
- arg[:uuid] = UUID.new.generate
+ formulation_json[:analysis][:problem][:workflow].each do |wf|
+ wf[:uuid] = UUID.new.generate
+ if wf[:arguments]
+ wf[:arguments].each do |arg|
+ arg[:uuid] = UUID.new.generate
+ end
end
- end
- if wf[:variables]
- wf[:variables].each do |var|
- var[:uuid] = UUID.new.generate
- var[:argument][:uuid] = UUID.new.generate if var[:argument]
+ if wf[:variables]
+ wf[:variables].each do |var|
+ var[:uuid] = UUID.new.generate
+ var[:argument][:uuid] = UUID.new.generate if var[:argument]
+ end
end
end
+ else
+ analysis_id = formulation_json[:analysis][:uuid]
end
+
+ # set the analysis name
+ formulation_json[:analysis][:name] = "#{options[:analysis_name]}" unless options[:analysis_name].nil?
else
- analysis_id = formulation_json[:analysis][:uuid]
+ formulation_json = {
+ analysis: options
+ }
+ puts formulation_json
+ analysis_id = UUID.new.generate
+ formulation_json[:analysis][:uuid] = analysis_id
end
fail "No analysis id defined in analyis.json #{options[:formulation_file]}" if analysis_id.nil?
- # set the analysis name
- formulation_json[:analysis][:name] = "#{options[:analysis_name]}" unless options[:analysis_name].nil?
-
# save out this file to compare
# File.open('formulation_merge.json', 'w') { |f| f << JSON.pretty_generate(formulation_json) }
response = @conn.post do |req|
req.url "projects/#{project_id}/analyses.json"
@@ -195,11 +278,11 @@
# check if we need to upload the analysis zip file
if options[:upload_file]
fail "upload file does not exist #{options[:upload_file]}" unless File.exist?(options[:upload_file])
- payload = { file: Faraday::UploadIO.new(options[:upload_file], 'application/zip') }
+ payload = {file: Faraday::UploadIO.new(options[:upload_file], 'application/zip')}
response = @conn_multipart.post "analyses/#{analysis_id}/upload.json", payload
if response.status == 201
puts 'Successfully uploaded ZIP file'
else
@@ -209,11 +292,11 @@
analysis_id
end
def upload_datapoint(analysis_id, options)
- defaults = { reset_uuids: false }
+ defaults = {reset_uuids: false}
options = defaults.merge(options)
fail 'No analysis id passed' if analysis_id.nil?
fail 'No datapoints file passed to new_analysis' unless options[:datapoint_file]
fail "No datapoints_file exists #{options[:datapoint_file]}" unless File.exist?(options[:datapoint_file])
@@ -262,11 +345,11 @@
fail "could not create new datapoints #{response.body}"
end
end
def run_analysis(analysis_id, options)
- defaults = { analysis_action: 'start', without_delay: false }
+ defaults = {analysis_action: 'start', without_delay: false}
options = defaults.merge(options)
puts "Run analysis is configured with #{options.to_json}"
response = @conn.post do |req|
req.url "analyses/#{analysis_id}/action.json"
@@ -281,11 +364,11 @@
fail 'Could not start the analysis'
end
end
def kill_analysis(analysis_id)
- analysis_action = { analysis_action: 'stop' }
+ analysis_action = {analysis_action: 'stop'}
response = @conn.post do |req|
req.url "analyses/#{analysis_id}/action.json"
req.headers['Content-Type'] = 'application/json'
req.body = analysis_action.to_json
@@ -311,19 +394,140 @@
end
end
end
def get_datapoint_status(analysis_id, filter = nil)
+ data_points = nil
# get the status of all the entire analysis
unless analysis_id.nil?
if filter.nil? || filter == ''
resp = @conn.get "analyses/#{analysis_id}/status.json"
- puts "Data points (all): #{resp}"
+ if resp.status == 200
+ data_points = JSON.parse(resp.body, symbolize_names: true)[:data_points]
+ end
else
resp = @conn.get "#{@hostname}/analyses/#{analysis_id}/status.json", jobs: filter
- puts "Data points (#{filter}): #{resp}"
+ if resp.status == 200
+ data_points = JSON.parse(resp.body, symbolize_names: true)[:data_points]
+ end
end
end
+
+ data_points
+ end
+
+ # Return the JSON (Full) of the datapoint
+ def get_datapoint(data_point_id)
+ data_point = nil
+
+ resp = @conn.get "/data_points/#{data_point_id}/show_full.json"
+ if resp.status == 200
+ data_point = JSON.parse resp.body, symbolize_names: true
+ end
+
+ data_point
+ end
+
+ ## here are a bunch of runs that really don't belong here.
+
+ # create a new analysis and run a single model
+ def run_single_model(formulation_filename, analysis_zip_filename)
+ project_options = {}
+ project_id = new_project(project_options)
+
+ analysis_options = {
+ formulation_file: formulation_filename,
+ upload_file: analysis_zip_filename,
+ reset_uuids: true
+ }
+ analysis_id = new_analysis(project_id, analysis_options)
+
+ # Force this to run in the foreground for now until we can deal with checing the 'analysis state of various anlaysis'
+ run_options = {
+ analysis_action: "start",
+ without_delay: true, # run this in the foreground
+ analysis_type: 'single_run',
+ allow_multiple_jobs: true,
+ use_server_as_worker: true,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
+ }
+ run_analysis(analysis_id, run_options)
+
+ run_options = {
+ analysis_action: "start",
+ without_delay: false, # run in background
+ analysis_type: 'batch_run',
+ allow_multiple_jobs: true,
+ use_server_as_worker: true,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
+ }
+ run_analysis(analysis_id, run_options)
+
+ analysis_id
+ end
+
+ # creates a new analysis and runs rgenoud optimization - number of generations isn't used right now
+ def run_rgenoud(formulation_filename, analysis_zip_filename, number_of_generations)
+ project_options = {}
+ project_id = new_project(project_options)
+
+ analysis_options = {
+ formulation_file: formulation_filename,
+ upload_file: analysis_zip_filename,
+ reset_uuids: true
+ }
+ analysis_id = new_analysis(project_id, analysis_options)
+
+ run_options = {
+ analysis_action: "start",
+ without_delay: false,
+ analysis_type: 'rgenoud',
+ allow_multiple_jobs: true,
+ use_server_as_worker: true,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
+ }
+ run_analysis(analysis_id, run_options)
+
+ analysis_id
+ end
+
+ def run_lhs(formulation_filename, analysis_zip_filename)
+ project_options = {}
+ project_id = new_project(project_options)
+
+ analysis_options = {
+ formulation_file: formulation_filename,
+ upload_file: analysis_zip_filename,
+ reset_uuids: true
+ }
+ analysis_id = new_analysis(project_id, analysis_options)
+
+ run_options = {
+ analysis_action: "start",
+ without_delay: false,
+ analysis_type: 'lhs',
+ allow_multiple_jobs: true,
+ use_server_as_worker: true,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
+ }
+ run_analysis(analysis_id, run_options)
+
+ run_options = {
+ analysis_action: "start",
+ without_delay: false, # run in background
+ analysis_type: 'batch_run',
+ allow_multiple_jobs: true,
+ use_server_as_worker: true,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
+ }
+ run_analysis(analysis_id, run_options)
+
+ analysis_id
end
end
end
end