Sha256: 56fffd4e8769a771b1cba02f6f63b0e08bd4c8e9ace6342bd555dae9f0392cef
Contents?: true
Size: 1.88 KB
Versions: 26
Compression:
Stored size: 1.88 KB
Contents
require 'open-uri' module EY class BucketMinder def initialize(secret_id, secret_key, bucket_name, region = 'us-east-1') @s3 = Fog::Storage.new(:provider => 'AWS',:aws_access_key_id => secret_id, :aws_secret_access_key => secret_key, :region => region) @region = region @bucket_name = bucket_name || "ey-backup-#{Digest::SHA1.hexdigest(secret_id)[0..11]}" setup_bucket end attr_reader :bucket_name def bucket @bucket ||= @s3.directories.get(@bucket_name) end def files bucket.files end def setup_bucket unless bucket @s3.directories.create(s3_params(:key => @bucket_name)) end end def s3_params(params = {}) return params if @region == 'us-east-1' if @region == 'eu-west-1' params.merge({:location => 'EU'}) else params.merge({:location => @region}) end end def remove_object(key) @s3.delete_object(bucket.key, key) end def stream(key, &block) files.get(key, &block) end def list(prefix) listing = files.all(:prefix => prefix) s3merge(listing) end # Merge s3 file listing to work with split files with naming of *.part\d\d def s3merge(list) return list if list.empty? distinct_files=Array.new() list.each do |item| fname = item.key.gsub(/.part\d+$/,'') match = false distinct_files.each_with_index do |b, i| if b[:name] == fname distinct_files[i][:keys] << item.key match = true end end if not match path = Array.new() path << item.key file = {:name => fname, :keys => path} distinct_files << file end end distinct_files end def put(filename, contents) files.create(:key => filename, :body => contents) end end end
Version data entries
26 entries across 26 versions & 1 rubygems