Sha256: f1dfb87a7108bbf7e0979cb5682266a268e9ca471a383298ddafb1333b579402

Contents?: true

Size: 1.44 KB

Versions: 1

Compression:

Stored size: 1.44 KB

Contents

#!/usr/bin/env ruby

$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
require 's3streambackup'

S3StreamBackup.new do
	cli do
		description 'Store backup from STDIN to S3'
		stdin :data
		option :keep,
			short: :K,
			description: 'how many backup file to keep',
			cast: Integer,
			default: 2
		option :postfix,
			short: :P,
			description: 'postfix which is appended to backup objects',
			default: ''
	end

	main do |settings, log, s3|
		upload_date = Time.now.utc.strftime "%y%m%d_%H%M%S"
		prefix = "#{settings.prefix}#{settings.name}.backup."
		path = "#{prefix}#{upload_date}#{settings.postfix}"

		bucket = s3.buckets[settings.bucket]
		backup = bucket.objects[path] 
		log.info "writting to: #{path}"

		# make sure we use multipart upload
		total_bytes = ProgeressLogger.new
		backup.write(
			content_type: 'application/octet-stream',
			estimated_content_length: 10 * 1024 ** 3
		) do |buffer, bytes|
			total_bytes.log(log, ' written...')
			data = settings.stdin.read(bytes)
			total_bytes << data.bytesize
			buffer.write data
		end
		log.info "total upload size: #{total_bytes.in_bytes_auto}"

		backups = bucket.objects.with_prefix(prefix).to_a

		log.info "keeping maximum #{settings.keep} latest buckups of #{backups.length} storred"

		if backups.length > settings.keep
			backups.take(backups.length - settings.keep).each do |backup|
				log.info "removing oldest backup: #{backup.key}"
				backup.delete
			end
		end
	end
end

Version data entries

1 entries across 1 versions & 1 rubygems

Version Path
s3streambackup-0.2.0 bin/s3streambackup