require 'open-uri' module EY class BucketMinder def initialize(secret_id, secret_key, bucket_name, region = 'us-east-1') @s3 = Fog::Storage.new(:provider => 'AWS',:aws_access_key_id => secret_id, :aws_secret_access_key => secret_key, :region => region) @region = region @bucket_name = bucket_name || "ey-backup-#{Digest::SHA1.hexdigest(secret_id)[0..11]}" setup_bucket end attr_reader :bucket_name def bucket @bucket ||= @s3.directories.get(@bucket_name) end def file bucket end def files bucket.files end def setup_bucket unless bucket @s3.directories.create(s3_params(:key => @bucket_name)) end end def s3_params(params = {}) return params if @region == 'us-east-1' if @region == 'eu-west-1' params.merge({:location => 'EU'}) else params.merge({:location => @region}) end end def remove_object(key) @s3.delete_object(bucket.key, key) end def stream(key, &block) files.get(key, &block) end def list(prefix) listing = files.all(:prefix => prefix) s3merge(listing) end # Merge s3 file listing to work with split files with naming of *.part\d\d def s3merge(list) return list if list.empty? distinct_files=Array.new() list.each do |item| fname = item.key.gsub(/.part\d+$/,'') match = false distinct_files.each_with_index do |b, i| if b[:name] == fname distinct_files[i][:keys] << item.key match = true end end if not match path = Array.new() path << item.key file = {:name => fname, :keys => path} distinct_files << file end end distinct_files end def put(filename, contents) files.create( :key => filename, :body => contents, :public => false, :multipart_chunk_size => 100*1024*1024, # 100MB 'x-amz-server-side-encryption' => 'AES256' ) end end end