Sha256: bedfb85e79b71c805bd965f81233c437fac89a3752fcda06a3c6a832312279c5

Contents?: true

Size: 1.56 KB

Versions: 2

Compression:

Stored size: 1.56 KB

Contents

module S3Stream
  class Upload

    # S3Stream::Upload.to(:s3object => s3object, :log_to => $stdout) do |out|
    #   out.write("abc")
    #   out.write("123")
    # end
    def self.to(args={})
      stream = new(args)
      begin
        yield stream
        stream.close
      rescue => e
        stream.cancel
        raise
      end
      stream.size
    end

    attr_reader :size

    def initialize(args={})
      @s3object = args[:s3object] || raise("Missing :s3object")
      @log = args[:log_to] # optional
      reset
      @upload = @s3object.multipart_upload
    end

    def write(data)
      @buffer << data
      flush if @chunk < S3Stream::MAX_CHUNKS && @buffer.size >= @buffer_size
      @size += data.size
    end

    def close
      flush if @buffer.size > 0
      unless @upload.close.nil?
        log "Done uploading #{size} bytes to #{location}."
      end
    end

    def cancel
      @upload.abort unless @upload.nil?
      reset
      log "Canceled upload to #{location}."
    end

    private

    def location
      "s3://#{@s3object.bucket.name}/#{@s3object.key}"
    end

    def reset
      @buffer = ""
      @buffer_size = S3Stream::INITIAL_BUFFER_SIZE
      @size = 0
      @chunk = 1
      @upload = nil
    end

    def flush
      log "Uploading part #{@chunk} (#{@buffer.size} bytes)."
      @upload.add_part(@buffer)
      @buffer_size = (@buffer_size * S3Stream::BUFFER_GROWTH_FACTOR).to_i
      @buffer.clear
      @chunk += 1
      nil
    end

    def log(msg)
      unless @log.nil?
        @log.puts(msg)
        @log.flush
      end
    end
  end
end

Version data entries

2 entries across 2 versions & 1 rubygems

Version Path
s3stream-0.0.9 lib/s3stream/upload.rb
s3stream-0.0.8 lib/s3stream/upload.rb