lib/openstudio/analysis/server_api.rb in openstudio-analysis-0.3.4 vs lib/openstudio/analysis/server_api.rb in openstudio-analysis-0.3.5
- old
+ new
@@ -4,11 +4,11 @@
module Analysis
class ServerApi
attr_reader :hostname
def initialize(options = {})
- defaults = {hostname: 'http://localhost:8080'}
+ defaults = { hostname: 'http://localhost:8080' }
options = defaults.merge(options)
@logger = Logger.new('faraday.log')
@hostname = options[:hostname]
@@ -50,30 +50,43 @@
def get_project_ids
ids = get_projects
ids.map { |project| project[:uuid] }
end
+ def delete_project(id)
+ deleted = false
+ response = @conn.delete "/projects/#{id}.json"
+ if response.status == 204
+ puts "Successfully deleted project #{id}"
+ deleted = true
+ else
+ puts "ERROR deleting project #{id}"
+ deleted = false
+ end
+
+ deleted
+ end
+
def delete_all
ids = get_project_ids
- puts "Deleting Projects #{ids}"
+ puts "deleting projects with IDs: #{ids}"
+ success = true
ids.each do |id|
- response = @conn.delete "/projects/#{id}.json"
- if response.status == 204
- puts "Successfully deleted project #{id}"
- else
- puts "ERROR deleting project #{id}"
- end
+ r = delete_project id
+ success = false if r == false
end
+
+ success
end
def new_project(options = {})
- defaults = {project_name: "Project #{Time.now.strftime('%Y-%m-%d %H:%M:%S')}"}
+ defaults = { project_name: "Project #{Time.now.strftime('%Y-%m-%d %H:%M:%S')}" }
options = defaults.merge(options)
project_id = nil
# TODO: make this a display name and a machine name
- project_hash = {project: {name: "#{options[:project_name]}"}}
+ project_hash = { project: { name: "#{options[:project_name]}" } }
response = @conn.post do |req|
req.url '/projects.json'
req.headers['Content-Type'] = 'application/json'
req.body = project_hash.to_json
@@ -94,12 +107,10 @@
def get_analyses(project_id)
analysis_ids = []
response = @conn.get "/projects/#{project_id}.json"
if response.status == 200
- puts 'received the list of analyses for the project'
-
analyses = JSON.parse(response.body, symbolize_names: true, max_nesting: false)
if analyses[:analyses]
analyses[:analyses].each do |analysis|
analysis_ids << analysis[:_id]
end
@@ -107,10 +118,20 @@
end
analysis_ids
end
+ def get_analyses_detailed(project_id)
+ analyses = nil
+ response = @conn.get "/projects/#{project_id}.json"
+ if response.status == 200
+ analyses = JSON.parse(response.body, symbolize_names: true, max_nesting: false)[:analyses]
+ end
+
+ analyses
+ end
+
# return the entire analysis JSON
def get_analysis(analysis_id)
result = nil
response = @conn.get "/analyses/#{analysis_id}.json"
if response.status == 200
@@ -134,12 +155,12 @@
# ]
# }
def get_analysis_status(analysis_id, analysis_type)
status = nil
- #sleep 2 # super cheesy---need to update how this works. Right now there is a good chance to get a
- # race condition when the analysis state changes.
+ # sleep 2 # super cheesy---need to update how this works. Right now there is a good chance to get a
+ # race condition when the analysis state changes.
unless analysis_id.nil?
resp = @conn.get "analyses/#{analysis_id}/status.json"
if resp.status == 200
j = JSON.parse resp.body, symbolize_names: true
if j && j[:analysis] && j[:analysis][:analysis_type] == analysis_type
@@ -149,10 +170,29 @@
end
status
end
+ def get_analysis_status_and_json(analysis_id, analysis_type)
+ status = nil
+ j = nil
+
+ # sleep 2 # super cheesy---need to update how this works. Right now there is a good chance to get a
+ # race condition when the analysis state changes.
+ unless analysis_id.nil?
+ resp = @conn.get "analyses/#{analysis_id}/status.json"
+ if resp.status == 200
+ j = JSON.parse resp.body, symbolize_names: true
+ if j && j[:analysis] && j[:analysis][:analysis_type] == analysis_type
+ status = j[:analysis][:status]
+ end
+ end
+ end
+
+ [status, j]
+ end
+
# return the data point results in JSON format
def get_analysis_results(analysis_id)
analysis = nil
response = @conn.get "/analyses/#{analysis_id}/analysis_data.json"
@@ -161,57 +201,172 @@
end
analysis
end
- def download_dataframe(analysis_id, format='rdata', save_directory=".")
- # Set the export = true flag to retrieve all the variables for the export (not just the visualize variables)
- response = @conn.get "/analyses/#{analysis_id}/download_data.#{format}?export=true"
+ def download_dataframe(analysis_id, format = 'rdata', save_directory = '.')
+ downloaded = false
+ file_path_and_name = nil
+
+ response = @conn.get do |r|
+ r.url "/analyses/#{analysis_id}/download_data.#{format}?export=true"
+ r.options.timeout = 3600 # 60 minutes
+ end
if response.status == 200
filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
- puts "File #{filename} already exists, overwriting" if File.exist?("#{save_directory}/#{filename}")
+ downloaded = true
+ file_path_and_name = "#{save_directory}/#{filename}"
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
if format == 'rdata'
- File.open("#{save_directory}/#{filename}", 'wb') { |f| f << response.body }
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
else
- File.open("#{save_directory}/#{filename}", 'w') { |f| f << response.body }
+ File.open(file_path_and_name, 'w') { |f| f << response.body }
end
end
+
+ [downloaded, file_path_and_name]
end
- def download_variables(analysis_id, format='rdata', save_directory=".")
+ def download_variables(analysis_id, format = 'rdata', save_directory = '.')
+ downloaded = false
+ file_path_and_name = nil
+
response = @conn.get "/analyses/#{analysis_id}/variables/download_variables.#{format}"
if response.status == 200
filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
- puts "File #{filename} already exists, overwriting" if File.exist?("#{save_directory}/#{filename}")
+ downloaded = true
+ file_path_and_name = "#{save_directory}/#{filename}"
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
if format == 'rdata'
- File.open("#{save_directory}/#{filename}", 'wb') { |f| f << response.body }
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
else
- File.open("#{save_directory}/#{filename}", 'w') { |f| f << response.body }
+ File.open(file_path_and_name, 'w') { |f| f << response.body }
end
end
+
+ [downloaded, file_path_and_name]
end
- def download_datapoint(datapoint_id, save_directory=".")
+ def download_datapoint(datapoint_id, save_directory = '.')
+ downloaded = false
+ file_path_and_name = nil
+
response = @conn.get "/data_points/#{datapoint_id}/download"
if response.status == 200
filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
- puts "File #{filename} already exists, overwriting" if File.exist?("#{save_directory}/#{filename}")
- File.open("#{save_directory}/#{filename}", 'wb') { |f| f << response.body }
+ downloaded = true
+ file_path_and_name = "#{save_directory}/#{filename}"
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
end
+
+ [downloaded, file_path_and_name]
end
- def download_all_data_points(analysis_id, save_directory=".")
- response = @conn.get "/analyses/#{analysis_id}/download_all_data_points"
+ # Download a MongoDB Snapshot. This database can get large. For 13,000 simulations with
+ # DEnCity reporting, the size is around 325MB
+ def download_database(save_directory = '.')
+ downloaded = false
+ file_path_and_name = nil
+
+ response = @conn.get do |r|
+ r.url '/admin/backup_database?full_backup=true'
+ r.options.timeout = 3600 # 60 minutes
+ end
+
if response.status == 200
filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
- puts "File #{filename} already exists, overwriting" if File.exist?("#{save_directory}/#{filename}")
- File.open("#{save_directory}/#{filename}", 'wb') { |f| f << response.body }
+ downloaded = true
+ file_path_and_name = "#{save_directory}/#{filename}"
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
end
+
+ [downloaded, file_path_and_name]
end
+ def download_datapoint_reports(datapoint_id, save_directory = '.')
+ downloaded = false
+ file_path_and_name = nil
+
+ response = @conn.get "/data_points/#{datapoint_id}/download_reports"
+ if response.status == 200
+ filename = response['content-disposition'].match(/filename=(\"?)(.+)\1/)[2]
+ downloaded = true
+ file_path_and_name = "#{save_directory}/#{filename}"
+ puts "File #{filename} already exists, overwriting" if File.exist?(file_path_and_name)
+ File.open(file_path_and_name, 'wb') { |f| f << response.body }
+ end
+
+ [downloaded, file_path_and_name]
+ end
+
+ def download_datapoints_reports(analysis_id, save_directory = '.')
+ # get the list of all the datapoints
+ dps = get_datapoint_status(analysis_id)
+ dps.each do |dp|
+ if dp[:status] == 'completed'
+ download_datapoint_reports(dp[:_id], save_directory)
+ end
+ end
+ end
+
+ def download_datapoint_jsons(analysis_id, save_directory = '.')
+ # get the list of all the datapoints
+ dps = get_datapoint_status(analysis_id)
+ dps.each do |dp|
+ if dp[:status] == 'completed'
+ dp_h = get_datapoint(dp[:_id])
+ File.open("#{save_directory}/data_point_#{dp[:_id]}.json", 'w') { |f| f << JSON.pretty_generate(dp_h) }
+ end
+ end
+ end
+
+ def datapoint_dencity(datapoint_id)
+ # Return the JSON (Full) of the datapoint
+ data_point = nil
+
+ resp = @conn.get "/data_points/#{datapoint_id}/dencity.json"
+ if resp.status == 200
+ data_point = JSON.parse resp.body, symbolize_names: true
+ end
+
+ data_point
+ end
+
+ def analysis_dencity_json(analysis_id)
+ # Return the hash of the dencity format for the analysis
+ dencity = nil
+
+ resp = @conn.get "/analyses/#{analysis_id}/dencity.json"
+ if resp.status == 200
+ dencity = JSON.parse resp.body, symbolize_names: true
+ end
+
+ dencity
+ end
+
+ def download_dencity_json(analysis_id, save_directory = '.')
+ a_h = analysis_dencity_json(analysis_id)
+ if a_h
+ File.open("#{save_directory}/analysis_#{analysis_id}_dencity.json", 'w') { |f| f << JSON.pretty_generate(a_h) }
+ end
+ end
+
+ def download_datapoint_dencity_jsons(analysis_id, save_directory = '.')
+ # get the list of all the datapoints
+ dps = get_datapoint_status(analysis_id)
+ dps.each do |dp|
+ if dp[:status] == 'completed'
+ dp_h = datapoint_dencity(dp[:_id])
+ File.open("#{save_directory}/data_point_#{dp[:_id]}_dencity.json", 'w') { |f| f << JSON.pretty_generate(dp_h) }
+ end
+ end
+ end
+
def new_analysis(project_id, options)
- defaults = {analysis_name: nil, reset_uuids: false}
+ defaults = { analysis_name: nil, reset_uuids: false }
options = defaults.merge(options)
fail 'No project id passed' if project_id.nil?
formulation_json = nil
@@ -247,11 +402,11 @@
# set the analysis name
formulation_json[:analysis][:name] = "#{options[:analysis_name]}" unless options[:analysis_name].nil?
else
formulation_json = {
- analysis: options
+ analysis: options
}
puts formulation_json
analysis_id = UUID.new.generate
formulation_json[:analysis][:uuid] = analysis_id
end
@@ -278,13 +433,16 @@
# check if we need to upload the analysis zip file
if options[:upload_file]
fail "upload file does not exist #{options[:upload_file]}" unless File.exist?(options[:upload_file])
- payload = {file: Faraday::UploadIO.new(options[:upload_file], 'application/zip')}
- response = @conn_multipart.post "analyses/#{analysis_id}/upload.json", payload
+ payload = { file: Faraday::UploadIO.new(options[:upload_file], 'application/zip') }
+ response = @conn_multipart.post "analyses/#{analysis_id}/upload.json", payload do |req|
+ req.options[:timeout] = 1800 # seconds
+ end
+
if response.status == 201
puts 'Successfully uploaded ZIP file'
else
fail response.inspect
end
@@ -292,11 +450,11 @@
analysis_id
end
def upload_datapoint(analysis_id, options)
- defaults = {reset_uuids: false}
+ defaults = { reset_uuids: false }
options = defaults.merge(options)
fail 'No analysis id passed' if analysis_id.nil?
fail 'No datapoints file passed to new_analysis' unless options[:datapoint_file]
fail "No datapoints_file exists #{options[:datapoint_file]}" unless File.exist?(options[:datapoint_file])
@@ -345,11 +503,11 @@
fail "could not create new datapoints #{response.body}"
end
end
def run_analysis(analysis_id, options)
- defaults = {analysis_action: 'start', without_delay: false}
+ defaults = { analysis_action: 'start', without_delay: false }
options = defaults.merge(options)
puts "Run analysis is configured with #{options.to_json}"
response = @conn.post do |req|
req.url "analyses/#{analysis_id}/action.json"
@@ -364,11 +522,11 @@
fail 'Could not start the analysis'
end
end
def kill_analysis(analysis_id)
- analysis_action = {analysis_action: 'stop'}
+ analysis_action = { analysis_action: 'stop' }
response = @conn.post do |req|
req.url "analyses/#{analysis_id}/action.json"
req.headers['Content-Type'] = 'application/json'
req.body = analysis_action.to_json
@@ -428,67 +586,67 @@
end
## here are a bunch of runs that really don't belong here.
# create a new analysis and run a single model
- def run_single_model(formulation_filename, analysis_zip_filename)
+ def run_single_model(formulation_filename, analysis_zip_filename, run_data_point_filename = 'run_openstudio_workflow_monthly.rb')
project_options = {}
project_id = new_project(project_options)
analysis_options = {
- formulation_file: formulation_filename,
- upload_file: analysis_zip_filename,
- reset_uuids: true
+ formulation_file: formulation_filename,
+ upload_file: analysis_zip_filename,
+ reset_uuids: true
}
analysis_id = new_analysis(project_id, analysis_options)
# Force this to run in the foreground for now until we can deal with checing the 'analysis state of various anlaysis'
run_options = {
- analysis_action: "start",
- without_delay: true, # run this in the foreground
- analysis_type: 'single_run',
- allow_multiple_jobs: true,
- use_server_as_worker: true,
- simulate_data_point_filename: 'simulate_data_point.rb',
- run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
+ analysis_action: 'start',
+ without_delay: true, # run this in the foreground
+ analysis_type: 'single_run',
+ allow_multiple_jobs: true,
+ use_server_as_worker: true,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: run_data_point_filename
}
run_analysis(analysis_id, run_options)
run_options = {
- analysis_action: "start",
- without_delay: false, # run in background
- analysis_type: 'batch_run',
- allow_multiple_jobs: true,
- use_server_as_worker: true,
- simulate_data_point_filename: 'simulate_data_point.rb',
- run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
+ analysis_action: 'start',
+ without_delay: false, # run in background
+ analysis_type: 'batch_run',
+ allow_multiple_jobs: true,
+ use_server_as_worker: true,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: run_data_point_filename
}
run_analysis(analysis_id, run_options)
analysis_id
end
# creates a new analysis and runs rgenoud optimization - number of generations isn't used right now
- def run_rgenoud(formulation_filename, analysis_zip_filename, number_of_generations)
+ def run_rgenoud(formulation_filename, analysis_zip_filename, _number_of_generations)
project_options = {}
project_id = new_project(project_options)
analysis_options = {
- formulation_file: formulation_filename,
- upload_file: analysis_zip_filename,
- reset_uuids: true
+ formulation_file: formulation_filename,
+ upload_file: analysis_zip_filename,
+ reset_uuids: true
}
analysis_id = new_analysis(project_id, analysis_options)
run_options = {
- analysis_action: "start",
- without_delay: false,
- analysis_type: 'rgenoud',
- allow_multiple_jobs: true,
- use_server_as_worker: true,
- simulate_data_point_filename: 'simulate_data_point.rb',
- run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
+ analysis_action: 'start',
+ without_delay: false,
+ analysis_type: 'rgenoud',
+ allow_multiple_jobs: true,
+ use_server_as_worker: true,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
}
run_analysis(analysis_id, run_options)
analysis_id
end
@@ -496,36 +654,78 @@
def run_lhs(formulation_filename, analysis_zip_filename)
project_options = {}
project_id = new_project(project_options)
analysis_options = {
- formulation_file: formulation_filename,
- upload_file: analysis_zip_filename,
- reset_uuids: true
+ formulation_file: formulation_filename,
+ upload_file: analysis_zip_filename,
+ reset_uuids: true
}
analysis_id = new_analysis(project_id, analysis_options)
run_options = {
- analysis_action: "start",
- without_delay: false,
- analysis_type: 'lhs',
- allow_multiple_jobs: true,
- use_server_as_worker: true,
- simulate_data_point_filename: 'simulate_data_point.rb',
- run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
+ analysis_action: 'start',
+ without_delay: false,
+ analysis_type: 'lhs',
+ allow_multiple_jobs: true,
+ use_server_as_worker: true,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
}
run_analysis(analysis_id, run_options)
run_options = {
- analysis_action: "start",
- without_delay: false, # run in background
- analysis_type: 'batch_run',
- allow_multiple_jobs: true,
- use_server_as_worker: true,
- simulate_data_point_filename: 'simulate_data_point.rb',
- run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
+ analysis_action: 'start',
+ without_delay: false, # run in background
+ analysis_type: 'batch_run',
+ allow_multiple_jobs: true,
+ use_server_as_worker: true,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: 'run_openstudio_workflow_monthly.rb'
}
run_analysis(analysis_id, run_options)
+
+ analysis_id
+ end
+
+ def run_analysis_detailed(formulation_filename, analysis_zip_filename,
+ analysis_type, allow_multiple_jobs, server_as_worker, run_data_point_filename)
+ project_options = {}
+ project_id = new_project(project_options)
+
+ analysis_options = {
+ formulation_file: formulation_filename,
+ upload_file: analysis_zip_filename,
+ reset_uuids: true
+ }
+ analysis_id = new_analysis(project_id, analysis_options)
+
+ server_as_worker = true if analysis_type == 'optim' || analysis_type == 'rgenoud'
+ run_options = {
+ analysis_action: 'start',
+ without_delay: false,
+ analysis_type: analysis_type,
+ allow_multiple_jobs: allow_multiple_jobs,
+ use_server_as_worker: server_as_worker,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: run_data_point_filename
+ }
+ run_analysis(analysis_id, run_options)
+
+ # If the analysis is LHS, then go ahead and run batch run because there is
+ # no explicit way to tell the system to do it
+ if analysis_type == 'lhs' || analysis_type == 'preflight' || analysis_type == 'single_run'
+ run_options = {
+ analysis_action: 'start',
+ without_delay: false,
+ analysis_type: 'batch_run',
+ allow_multiple_jobs: allow_multiple_jobs,
+ use_server_as_worker: server_as_worker,
+ simulate_data_point_filename: 'simulate_data_point.rb',
+ run_data_point_filename: run_data_point_filename
+ }
+ run_analysis(analysis_id, run_options)
+ end
analysis_id
end
end
end