lib/fakes3/file_store.rb in fakes3-1.0.0 vs lib/fakes3/file_store.rb in fakes3-1.1.0

- old
+ new

@@ -14,14 +14,15 @@ # without any sub second precision (e.g. jets3t v0.7.2), and the examples # given in the official AWS S3 documentation specify three (3) decimals for # sub second precision. SUBSECOND_PRECISION = 3 - def initialize(root) + def initialize(root, quiet_mode) @root = root @buckets = [] @bucket_hash = {} + @quiet_mode = quiet_mode Dir[File.join(root,"*")].each do |bucket| bucket_name = File.basename(bucket) bucket_obj = Bucket.new(bucket_name,Time.now,[]) @buckets << bucket_obj @bucket_hash[bucket_name] = bucket_obj @@ -83,22 +84,24 @@ obj_root = File.join(@root,bucket,object_name,FAKE_S3_METADATA_DIR) metadata = File.open(File.join(obj_root, "metadata")) { |file| YAML::load(file) } real_obj.name = object_name real_obj.md5 = metadata[:md5] real_obj.content_type = metadata.fetch(:content_type) { "application/octet-stream" } - real_obj.content_encoding = metadata.fetch(:content_encoding) + real_obj.content_encoding = metadata.fetch(:content_encoding) # if metadata.fetch(:content_encoding) real_obj.io = RateLimitableFile.open(File.join(obj_root, "content"), 'rb') real_obj.size = metadata.fetch(:size) { 0 } real_obj.creation_date = File.ctime(obj_root).utc.iso8601(SUBSECOND_PRECISION) real_obj.modified_date = metadata.fetch(:modified_date) do File.mtime(File.join(obj_root, "content")).utc.iso8601(SUBSECOND_PRECISION) end real_obj.custom_metadata = metadata.fetch(:custom_metadata) { {} } return real_obj rescue - puts $! - $!.backtrace.each { |line| puts line } + unless @quiet_mode + puts $! + $!.backtrace.each { |line| puts line } + end return nil end end def object_metadata(bucket, object) @@ -133,11 +136,11 @@ end end metadata_directive = request.header["x-amz-metadata-directive"].first if metadata_directive == "REPLACE" - metadata_struct = create_metadata(content,request) + metadata_struct = create_metadata(content, request) File.open(metadata,'w') do |f| f << YAML::dump(metadata_struct) end end @@ -146,11 +149,11 @@ obj = S3Object.new obj.name = dst_name obj.md5 = src_metadata[:md5] obj.content_type = src_metadata[:content_type] - obj.content_encoding = src_metadata[:content_encoding] + obj.content_encoding = src_metadata[:content_encoding] # if src_metadata[:content_encoding] obj.size = src_metadata[:size] obj.modified_date = src_metadata[:modified_date] src_bucket.find(src_name) dst_bucket.add(obj) @@ -201,19 +204,21 @@ obj = S3Object.new obj.name = object_name obj.md5 = metadata_struct[:md5] obj.content_type = metadata_struct[:content_type] - obj.content_encoding = metadata_struct[:content_encoding] + obj.content_encoding = metadata_struct[:content_encoding] # if metadata_struct[:content_encoding] obj.size = metadata_struct[:size] obj.modified_date = metadata_struct[:modified_date] bucket.add(obj) return obj rescue - puts $! - $!.backtrace.each { |line| puts line } + unless @quiet_mode + puts $! + $!.backtrace.each { |line| puts line } + end return nil end end def combine_object_parts(bucket, upload_id, object_name, parts, request) @@ -262,10 +267,14 @@ # TODO: abstract getting meta data from request. def create_metadata(content, request) metadata = {} metadata[:md5] = Digest::MD5.file(content).hexdigest metadata[:content_type] = request.header["content-type"].first - metadata[:content_encoding] = request.header["content-encoding"].first + content_encoding = request.header["content-encoding"].first + metadata[:content_encoding] = content_encoding + #if content_encoding + # metadata[:content_encoding] = content_encoding + #end metadata[:size] = File.size(content) metadata[:modified_date] = File.mtime(content).utc.iso8601(SUBSECOND_PRECISION) metadata[:amazon_metadata] = {} metadata[:custom_metadata] = {}