lib/sushi_fabric/sushiApp.rb in sushi_fabric-0.4.9 vs lib/sushi_fabric/sushiApp.rb in sushi_fabric-0.5.0
- old
+ new
@@ -1,8 +1,8 @@
#!/usr/bin/env ruby
# encoding: utf-8
-# Version = '20151029-161011'
+# Version = '20151106-140410'
require 'csv'
require 'fileutils'
require 'yaml'
require 'drb/drb'
@@ -380,10 +380,11 @@
end
end
@out.print <<-EOF
cd #{SCRATCH_DIR}
rm -rf #{@scratch_dir} || exit 1
+
EOF
end
def job_main
@out.print "#### NOW THE ACTUAL JOBS STARTS\n"
@@ -494,12 +495,16 @@
def default_node
@workflow_manager||=DRbObject.new_with_uri(WORKFLOW_MANAGER)
@workflow_manager.default_node
end
- def make_job_script
- @out = open(@job_script, 'w')
+ def make_job_script(append = false)
+ @out = if append
+ open(@job_script, 'a')
+ else
+ open(@job_script, 'w')
+ end
job_header
job_main
job_footer
@out.close
end
@@ -526,10 +531,23 @@
end
make_job_script
@job_scripts << @job_script
@result_dataset << next_dataset
end
+ def batch_mode
+ @job_script = if @dataset_sushi_id and dataset = DataSet.find_by_id(@dataset_sushi_id.to_i)
+ File.join(@job_script_dir, @analysis_category + '_' + dataset.name.gsub(/\s+/,'_') + '.sh')
+ else
+ File.join(@job_script_dir, @analysis_category + '_' + 'job_script.sh')
+ end
+ @dataset_hash.each do |row|
+ @dataset = Hash[*row.map{|key,value| [key.gsub(/\[.+\]/,'').strip, value]}.flatten]
+ make_job_script('append')
+ @result_dataset << next_dataset
+ end
+ @job_scripts << @job_script
+ end
def save_data_set(data_set_arr, headers, rows, user=nil)
data_set_hash = Hash[*data_set_arr]
unless project = Project.find_by_number(data_set_hash['ProjectNumber'].to_i)
project = Project.new
project.number = data_set_hash['ProjectNumber'].to_i
@@ -579,9 +597,11 @@
@job_scripts = []
if @params['process_mode'] == 'SAMPLE'
sample_mode
elsif @params['process_mode'] == 'DATASET'
dataset_mode
+ elsif @params['process_mode'] == 'BATCH'
+ batch_mode
else
#stop
warn "the process mode (#{@params['process_mode']}) is not defined"
raise "stop job submitting"
end