module S33r
# Use this class to do operations on the Service, e.g.
# creating buckets, deleting buckets, listing all buckets,
# returning a single bucket.
class Client
# Options used to create this Client.
attr_reader :created_with_options
#-- These are used for creating URLs.
# Use SSL for requests.
attr_accessor :use_ssl
# Use subdomains (set on host header)
attr_accessor :subdomain
# Default expiry for authenticated URLs.
attr_accessor :expires
# Default canned ACL string to apply to all put requests.
attr_accessor :canned_acl
# Amazon keys.
attr_accessor :access, :secret
# Get default options passed to every call to do_request.
def request_defaults
defaults = {}
defaults[:use_ssl] = @use_ssl
defaults[:subdomain] = @subdomain
defaults[:expires] = @expires
defaults[:access] = @access
defaults[:secret] = @secret
defaults[:canned_acl] = @canned_acl
defaults
end
# Get the settings for this client.
def settings
request_defaults.merge(:dump_requests => dump_requests,
:chunk_size => chunk_size, :persistent => persistent)
end
# Create a plain Client.
def initialize(options={})
set_options(options)
end
# Setup a client from a YAML file.
def self.init(yaml_file)
config, options = S33r.load_config(yaml_file)
config.merge!(options)
self.new(config)
end
# Set options for the client.
#
# +options+ may include the following which alter how the Client interacts with S3; they also
# influence URLs you may generate from the Client:
# * :access => 'aws access key' (defaults to nil)
# * :secret => 'aws secret access key' (defaults to nil)
# * :use_ssl => false: to use plain HTTP for requests
# sent by this bucket (default=true). If a bucket has :use_ssl => true,
# any URLs you generate from it will be SSL URLs unless you explicitly
# disable this behaviour (see url for details).
# * :expires => : set the default value to be passed as the :expires
# option when generating authenticated URLs. Should be parseable by S33r.parse_expiry.
# * :canned_acl => 'public-read': set a default canned acl to apply to all put
# requests.
# * :subdomain => false: set to true to use subdomains in URLs
#
# These options change the behaviour of the HTTP client which actually sends the request:
# * :chunk_size => Integer: use a non-standard chunk size;
# default is to use S33r::DEFAULT_CHUNK_SIZE.
# * :persistent => true: use persistent HTTP connections
# (default=false).
# * :dump_requests => true: to dump all request headers before the request is sent.
def set_options(options={})
# General client options.
@access = options[:access]
@secret = options[:secret]
@subdomain = false
@subdomain = true if (true == options[:subdomain])
@use_ssl = true
@use_ssl = false if (false == options[:use_ssl])
@expires = options[:expires] || 'never'
@canned_acl = options[:canned_acl] || nil
# Options specific to the mechanics of the HTTP request.
@dump_requests = options[:dump_requests] || false
@chunk_size = options[:chunk_size]
@persistent = options[:persistent] || false
@created_with_options = options
end
# List all buckets.
#
# Returns an array of Bucket instances; array will be empty if
# the BucketListing parse fails for any reason (i.e. no elements
# occur in it).
#
# +options+ is passed through to get_bucket, making it possible to detach
# retrieved buckets from the Client instance, and to pass other options to
# the bucket.
def buckets(options={})
resp = do_get
bucket_list_xml = resp.body
doc = XML.get_xml_doc(S33r.remove_namespace(bucket_list_xml))
buckets = {}
doc.find("//Bucket").to_a.each do |node|
bucket_name = node.xget('Name')
if bucket_name
# CreationDate is a string in format '2006-10-17T15:14:39.000Z'.
creation_date = Time.parse(node.xget('CreationDate'))
# The Bucket instances inherit the request dumping behaviour
# of this client.
buckets[bucket_name] = get_bucket(bucket_name, options)
end
end
buckets
end
alias :list_buckets :buckets
# Just get a sorted array of names of buckets.
def bucket_names
buckets.keys.sort
end
# Get a Client instance bound to a bucket.
#
# +options+:
# * :orphan => true: create the Client in isolation from
# the Service and don't inherit any of its instance settings.
#
# Other options are passed through to Bucket.new.
def get_bucket(bucket_name, options={})
orphan = options.delete(:orphan)
unless orphan
options.merge!(settings) { |key, old_val, new_val| old_val }
end
bucket = Bucket.new(bucket_name, options)
yield bucket if block_given?
bucket
end
# Create a new Bucket.
def create_bucket(name, options={})
options[:create] = true
get_bucket(name, options)
end
# List entries in a bucket.
#
# +options+: hash of options on the bucket listing request, passed as querystring parameters to S3
# (see http://docs.amazonwebservices.com/AmazonS3/2006-03-01/).
# * :prefix => 'some_string': restrict results to keys beginning with 'some_string'
# * :marker => 'some_string': restict results to keys occurring lexicographically after 'some_string'
# * :max_keys => Integer: return at most this number of keys (maximum possible value is 1000)
# * :delimiter => 'some_string': keys containing the same string between prefix and the delimiter
# are rolled up into a CommonPrefixes element inside the response
#
# NB if you pass a :marker, this takes up one of your :max_keys; so if you are fetching page
# two from a bucket, and you want 10 items, you need to set :max_keys to 11.
#
# To page through a bucket 10 keys at a time, you can do:
#
# listing = list_bucket('mybucket', :max_keys => 10)
# listing = list_bucket('mybucket', :max_keys => 11, :marker => listing.last_key)
# listing = list_bucket('mybucket', :max_keys => 11, :marker => listing.last_key)
# etc.
#
# Note in the example code, +listing+ is a BucketListing instance; call its contents method
# to get a hash of the keys in the bucket, along with associated objects.
#
# Returns BucketListing instance.
#-- TODO: testing
def listing(options={})
querystring = options[:querystring] || {}
# Check :max_keys isn't higher than the maximum allowed by S3.
if options[:max_keys]
max_keys = options[:max_keys].to_i
if max_keys > BUCKET_LIST_MAX_MAX_KEYS
raise BucketListingMaxKeysError, "max_keys option to list bucket cannot be > #{BUCKET_LIST_MAX_MAX_KEYS}"
end
querystring['max-keys'] = max_keys
end
['prefix', 'marker', 'delimiter'].each do |key|
key_sym = key.to_sym
querystring[key] = options[key_sym] if options[key_sym]
end
options[:querystring] = querystring
resp = do_get(options)
if resp.ok?
@listing = BucketListing.new(resp.body)
else
raise resp.s3_error
end
end
alias :objects :listing
# List content of a bucket.
def list_bucket(bucket_name, options={})
options[:bucket] = bucket_name
listing(options)
end
# Delete a Bucket.
#
# +options+:
# * :force => true: To clear the content of the bucket first. This option adds :escape => true to ensure bucket deletion.
# * :escape => true: CGI::escape keys when they are appended to the path.
def delete_bucket(bucket_name, options={})
options[:bucket] = bucket_name
if options[:force]
options[:escape] = true
listing(options).keys.each { |key| do_delete(options.merge(:key => key)) }
end
do_delete(options).ok?
end
# Check whether a bucket exists on S3.
def bucket_exists?(name, options={})
options[:bucket] = name
do_head(options).ok?
end
# Put a "thing" onto S3.
#
# +thing+ may be a string, an S3Object, an S3ACL::Policy,
# a LoggingResource or a file handle.
#
# Anything you pass in +options+ will override any values
# inferred from the +thing+ (e.g. content type, key).
#
# +options+:
# * :key => 'some-key' (required unless thing is an S3Object).
# * :bucket => 'some-bucket'
# * :content_type => 'text/plain'
# * :render_as_attachment => Boolean
# * :file => true: thing is a filename, so load it as a file
# * :canned_acl => 'public': one of S33r::CANNED_ACLS, to set a canned
# acl on a put.
#
#-- TODO: finish documentation for options
def put(thing, options={}, headers={})
is_file = options[:file]
# thing is a file, so load it.
if is_file and thing.is_a?(String)
# Use the filename as the key unless it is set already.
options[:key] ||= thing
# Guess the content type unless it's been set.
unless options[:content_type]
mime_type = guess_mime_type(thing)
content_type = mime_type.simplified
options[:content_type] = content_type
end
elsif thing.is_a?(S3Object)
options[:key] ||= thing.key
data = thing.value
options[:content_type] ||= thing.content_type
options[:render_as_attachment] ||= thing.render_as_attachment
headers = metadata_headers(thing.meta)
elsif thing.is_a?(Policy) || thing.is_a?(LoggingResource)
data = thing.to_xml
options[:content_type] = 'text/xml'
else
data = thing
end
key = options[:key]
# Headers for content type etc.
headers.merge! content_headers(options[:content_type], key, options[:render_as_attachment])
if is_file
File.open(thing) do |data|
do_put(data, options, headers).ok?
end
else
do_put(data, options, headers).ok?
end
end
# Put a file onto S3 (shortcut to put).
def put_file(filename, options={}, headers={})
options[:file] = true
put(filename, options, headers)
end
# Get an ACL.
def get_acl(options={})
options[:acl] = true
resp = do_get(options)
if resp.ok?
S3ACL::Policy.from_xml(resp.body)
else
nil
end
end
alias :acl :get_acl
# Set an ACL.
def set_acl(policy, options={})
options[:acl] = true
put(policy, options)
end
alias :acl= :set_acl
# Is a resource public?
def public?(options={})
get_acl(options).public_readable?
end
# Make a resource public
def make_public
set_acl(get_acl().add_public_read_grant)
end
# Make a resource private
def make_private
set_acl(get_acl().remove_public_read_grant)
end
# Get a URL for a thing.
def url(options={})
options = request_defaults.merge(options)
s3_url(options)
end
# Change the status of a bucket for logging.
#
# +logging_on+ = :on to turn logging on (default),
# :off to turn logging off.
def change_log_target_status(bucket_name, state=:on)
logging_on = (:on == state)
bucket = get_bucket(bucket_name)
policy = bucket.acl
logging_on ? policy.add_log_target_grants : policy.remove_log_target_grants
bucket.acl = policy
logging_on == policy.log_targetable?
end
# Get the logging status for a resource.
#
# +options+:
# * :for_bucket => 'bucket': get the logging status for a bucket.
# (Alias for :bucket; if both supplied, :bucket takes preference.)
def logging(options={})
options[:logging] = true
options[:bucket] ||= options[:for_bucket]
resp = do_get(options)
if resp.ok?
LoggingResource.from_xml(resp.body)
else
nil
end
end
# Enable logging for a bucket.
#
# +target_bucket+ is the target for the logs.
# The bucket you want to log is passed in +options+.
#
# +options+
# * :bucket => 'bucket': bucket to log.
# * :for_bucket => 'bucket': syntactic sugar; alias for :bucket.
# If :bucket and :for_bucket are provided, :bucket takes preference.
# * :prefix => 'some-prefix-': specify a prefix for log files;
# otherwise 'log--' is used
def logs_to(target_bucket, options={})
target_bucket_name = target_bucket.is_a?(Bucket) ? target_bucket.name : target_bucket
log_prefix = options[:prefix] || "log-#{target_bucket_name}-"
options[:bucket] ||= options[:for_bucket]
target_bucket_acl = get_acl(:bucket => target_bucket_name)
unless target_bucket_acl.log_targetable?
raise BucketNotLogTargetable, "The bucket #{target_bucket_name} cannot be specified as a log target"
end
logging_resource = LoggingResource.new(target_bucket_name, log_prefix)
options[:logging] = true
put(logging_resource, options)
end
# Turn off logging for a bucket.
#
# +options+:
# * :bucket => 'bucket': bucket to turn logging off for.
def logs_off(options={})
options[:logging] = true
put(LoggingResource.new, options)
end
end
end