lib/boxgrinder-build/plugins/delivery/s3/s3-plugin.rb in boxgrinder-build-0.9.4 vs lib/boxgrinder-build/plugins/delivery/s3/s3-plugin.rb in boxgrinder-build-0.9.5
- old
+ new
@@ -25,11 +25,11 @@
module BoxGrinder
class S3Plugin < BasePlugin
def after_init
- register_supported_os("fedora", ['13', '14', '15'])
+ register_supported_os("fedora", ['13', '14', '15', '16'])
register_supported_os("centos", ['5'])
register_supported_os("rhel", ['5', '6'])
register_supported_os("sl", ['5', '6'])
@ami_build_dir = "#{@dir.base}/ami"
@@ -43,68 +43,71 @@
validate_plugin_config(['bucket', 'access_key', 'secret_access_key'], 'http://boxgrinder.org/tutorials/boxgrinder-build-plugins/#S3_Delivery_Plugin')
subtype(:ami) do
set_default_config_value('snapshot', false)
validate_plugin_config(['cert_file', 'key_file', 'account_number'], 'http://boxgrinder.org/tutorials/boxgrinder-build-plugins/#S3_Delivery_Plugin')
+
+ raise PluginValidationError, "AWS certificate file doesn't exists, please check the path: '#{@plugin_config['cert_file']}'." unless File.exists?(File.expand_path(@plugin_config['cert_file']))
+ raise PluginValidationError, "AWS key file doesn't exists, please check the path: '#{@plugin_config['key_file']}'." unless File.exists?(File.expand_path(@plugin_config['key_file']))
end
@s3_endpoints = S3Helper::endpoints
- raise PluginValidationError, "Invalid region specified: #{@plugin_config['region']}. This plugin is only aware of the following regions: #{@s3_endpoints.keys.join(", ")}" unless @s3_endpoints.has_key?(@plugin_config['region'])
+ raise PluginValidationError, "Invalid region specified: #{@plugin_config['region']}. This plugin is only aware of the following regions: #{@s3_endpoints.keys.join(", ")}." unless @s3_endpoints.has_key?(@plugin_config['region'])
@plugin_config['account_number'] = @plugin_config['account_number'].to_s.gsub(/-/, '')
# Set global AWS configuration
AWS.config(:access_key_id => @plugin_config['access_key'],
:secret_access_key => @plugin_config['secret_access_key'],
:ec2_endpoint => EC2Helper::endpoints[@plugin_config['region']][:endpoint],
:s3_endpoint => @s3_endpoints[@plugin_config['region']][:endpoint],
:max_retries => 5,
:use_ssl => @plugin_config['use_ssl'])
- #:logger => @log) need to modify our logger to accept blah.log(:level, 'message')
@ec2 = AWS::EC2.new
@s3 = AWS::S3.new
@s3helper = S3Helper.new(@ec2, @s3, :log => @log)
@ec2helper = EC2Helper.new(@ec2, :log => @log)
+
+ subtype(:ami) do
+ # If there is an existing bucket, determine whether its location_constraint matches the region selected
+ if existing_bucket = asset_bucket(false)
+ raise PluginValidationError, "Existing bucket #{@plugin_config['bucket']} has a location constraint that does not match the region selected. " <<
+ "AMI region and bucket location constraint must match." unless constraint_equal?(@s3_endpoints[@plugin_config['region']][:location], existing_bucket.location_constraint)
+ end
+ end
+
+ @bucket = asset_bucket(true)
end
def execute
case @type
when :s3
upload_to_bucket(@previous_deliverables)
when :cloudfront
upload_to_bucket(@previous_deliverables, :public_read)
when :ami
ami_dir = ami_key(@appliance_config.name, @plugin_config['path'])
- ami_manifest_key = @s3helper.stub_s3obj(asset_bucket, "#{ami_dir}/#{@appliance_config.name}.ec2.manifest.xml")
+ ami_manifest_key = @s3helper.stub_s3obj(@bucket, "#{ami_dir}/#{@appliance_config.name}.ec2.manifest.xml")
@log.debug "Going to check whether s3 object exists"
- if @s3helper.object_exists?(ami_manifest_key) and @plugin_config['overwrite']
+ if ami_manifest_key.exists? and @plugin_config['overwrite']
@log.info "Object exists, attempting to deregister an existing image"
deregister_image(ami_manifest_key) # Remove existing image
- @s3helper.delete_folder(asset_bucket, ami_dir) # Avoid triggering dupe detection
+ @s3helper.delete_folder(@bucket, ami_dir) # Avoid triggering dupe detection
end
- if !@s3helper.object_exists?(ami_manifest_key) or @plugin_config['snapshot']
+ if !ami_manifest_key.exists? or @plugin_config['snapshot']
@log.info "Doing bundle/snapshot"
bundle_image(@previous_deliverables)
- fix_sha1_sum
upload_image(ami_dir)
end
register_image(ami_manifest_key)
end
end
- # https://jira.jboss.org/browse/BGBUILD-34
- def fix_sha1_sum
- ami_manifest = File.open(@ami_manifest).read
- ami_manifest.gsub!('(stdin)= ', '')
-
- File.open(@ami_manifest, "w") { |f| f.write(ami_manifest) }
- end
-
def upload_to_bucket(previous_deliverables, permissions = :private)
register_deliverable(
:package => "#{@appliance_config.name}-#{@appliance_config.version}.#{@appliance_config.release}-#{@appliance_config.os.name}-#{@appliance_config.os.version}-#{@appliance_config.hardware.arch}-#{current_platform}.tgz"
)
@@ -113,13 +116,13 @@
PackageHelper.new(@config, @appliance_config, :log => @log, :exec_helper => @exec_helper).package(File.dirname(previous_deliverables[:disk]), @deliverables[:package])
remote_path = "#{@s3helper.parse_path(@plugin_config['path'])}#{File.basename(@deliverables[:package])}"
size_m = File.size(@deliverables[:package])/1024**2
- s3_obj = @s3helper.stub_s3obj(asset_bucket,remote_path.gsub(/^\//, '').gsub(/\/\//, ''))
+ s3_obj = @s3helper.stub_s3obj(@bucket,remote_path.gsub(/^\//, '').gsub(/\/\//, ''))
# Does it really exist?
- obj_exists = @s3helper.object_exists?(s3_obj)
+ obj_exists = s3_obj.exists?
if !obj_exists or @plugin_config['overwrite']
@log.info "Will overwrite existing file #{remote_path}" if obj_exists and @plugin_config['overwrite']
@log.info "Uploading #{File.basename(@deliverables[:package])} (#{size_m}MB) to '#{@plugin_config['bucket']}#{remote_path}' path..."
s3_obj.write(:file => @deliverables[:package],
@@ -130,12 +133,12 @@
end
end
def asset_bucket(create_if_missing = true, permissions = :private)
@s3helper.bucket(:bucket => @plugin_config['bucket'],
+ :create_if_missing => create_if_missing,
:acl => permissions,
- :create_of_missing => create_if_missing,
:location_constraint => @s3_endpoints[@plugin_config['region']][:location]
)
end
def bundle_image(deliverables)
@@ -154,11 +157,10 @@
@log.info "Bundling AMI finished."
end
def upload_image(ami_dir)
- asset_bucket(true,:private) # this will create the bucket if needed
@log.info "Uploading #{@appliance_config.name} AMI to bucket '#{@plugin_config['bucket']}'..."
@exec_helper.execute("euca-upload-bundle -U #{@plugin_config['url'].nil? ? "http://#{@s3_endpoints[@plugin_config['region']][:endpoint]}" : @plugin_config['url']} -b #{@plugin_config['bucket']}/#{ami_dir} -m #{@ami_manifest} -a #{@plugin_config['access_key']} -s #{@plugin_config['secret_access_key']}", :redacted => [@plugin_config['access_key'], @plugin_config['secret_access_key']])
end
@@ -194,18 +196,22 @@
return "#{base_path}/#{@appliance_config.hardware.arch}" unless @plugin_config['snapshot']
@log.info "Determining snapshot name"
snapshot = 1
- while @s3helper.object_exists?(
- @s3helper.stub_s3obj(asset_bucket, "#{base_path}-SNAPSHOT-#{snapshot}/#{@appliance_config.hardware.arch}/")
- )
+ while @s3helper.stub_s3obj(@bucket, "#{base_path}-SNAPSHOT-#{snapshot}/#{@appliance_config.hardware.arch}/").exists?
snapshot += 1
end
# Reuse the last key (if there was one)
snapshot -=1 if snapshot > 1 and @plugin_config['overwrite']
"#{base_path}-SNAPSHOT-#{snapshot}/#{@appliance_config.hardware.arch}"
+ end
+
+ #US constraint is often represented as '' or nil
+ def constraint_equal?(a, b)
+ [a, b].collect!{|c| c.nil? ? '': c}
+ a == b
end
end
end