lib/s33r/client.rb in s33r-0.4.1 vs lib/s33r/client.rb in s33r-0.4.2
- old
+ new
@@ -1,11 +1,12 @@
require 'net/https'
require 'cgi'
require 'erb'
require 'yaml'
-require File.join(File.dirname(__FILE__), 's3_acl')
-require File.join(File.dirname(__FILE__), 's33r_exception')
+base = File.dirname(__FILE__)
+require File.join(base, 's3_acl')
+require File.join(base, 's33r_exception')
module S33r
include Net
# The client performs operations over the network,
@@ -34,20 +35,25 @@
# Whether client dumps headers from requests.
attr_accessor :dump_requests
# Default log bucket location.
attr_accessor :log_bucket
+
+ # The options used to create the client (useful when spawning
+ # NamedBucket instances from Client instances).
+ attr_reader :options
# Configure either an SSL-enabled or plain HTTP client.
# (If using SSL, no verification of server certificate is performed.)
#
# +options+: hash of optional client config.:
# * <tt>:use_ssl => false</tt>: only use plain HTTP for connections
# * <tt>:dump_requests => true</tt>: dump each request's initial line and headers to STDOUT
def initialize(aws_access_key, aws_secret_access_key, options={})
@use_ssl = true
@use_ssl = false if (false == options[:use_ssl])
+ options[:use_ssl] = @use_ssl
@dump_requests = (true == options[:dump_requests])
# set default chunk size for streaming request body
@chunk_size = DEFAULT_CHUNK_SIZE
@@ -58,10 +64,13 @@
@aws_access_key = aws_access_key
@aws_secret_access_key = aws_secret_access_key
# headers sent with every request made by this client
@client_headers = {}
+
+ # keep a record of the options used to create this instance
+ @options = options
end
# Get an HTTP client instance.
#
# NB this has been moved here so that client instances are
@@ -93,13 +102,13 @@
# :include: test/files/namedbucket_config.yml
#
# Note that the loader also runs the config. file through ERB, so you can
# add dynamic blocks of ERB (Ruby) code into your files.
#
- # The +options+ section contains settings specific to Client and NamedClient instances; +custom+
- # contains extra settings specific to your application.
- # +options+ and +custom+ sections can be omitted, but settings for AWS keys are required.
+ # The +options+ section contains other settings, either specific to the Client or
+ # NamedBucket classes, or general application settings.
+ # The +options+ section can be omitted, but settings for AWS keys are required.
#
# Returns an array <tt>[aws_access_key, aws_secret_access_key, options]</tt>, where +options+
# is a hash.
def Client.load_config(config_file)
config = YAML::load(ERB.new(IO.read(config_file)).result)
@@ -158,11 +167,12 @@
# Run the request.
client = get_client
client.start do
response = client.request(req, data)
- # Check the response to see whether S3 is down.
+ # Check the response to see whether S3 is down;
+ # raises an S3FallenOver error if S3 returns a 500-503 response code
response.check_s3_availability
response
end
end
@@ -173,21 +183,27 @@
eval("HTTP::" + method[0,1].upcase + method[1..-1].downcase + ".new('#{path}')")
end
# List all buckets.
#
- # Returns an Array of NamedBucket instances.
+ # Returns an array of NamedBucket instances; array will be empty if
+ # the BucketListing parse fails for any reason (i.e. no <Bucket> elements
+ # occur in it).
def list_buckets
bucket_list_xml = do_get('/').body
doc = XML.get_xml_doc(S33r.remove_namespace(bucket_list_xml))
named_buckets = []
doc.find("//Bucket").to_a.each do |node|
bucket_name = node.xget('Name')
- named_buckets << NamedBucket.new(@aws_access_key, @aws_secret_access_key,
- {:default_bucket => bucket_name, :dump_request => self.dump_requests})
+ if bucket_name
+ # The NamedBucket instances inherit the request dumping behaviour
+ # of this client.
+ named_buckets << NamedBucket.new(@aws_access_key, @aws_secret_access_key,
+ {:default_bucket => bucket_name, :dump_request => self.dump_requests})
+ end
end
named_buckets
end
@@ -224,11 +240,11 @@
if query_params[:max_keys]
max_keys = query_params[:max_keys].to_i
raise S33rException::BucketListingMaxKeysError, "max_keys option to list bucket cannot be > #{BUCKET_LIST_MAX_MAX_KEYS}" \
if max_keys > BUCKET_LIST_MAX_MAX_KEYS
- # convert max_keys parameter to :max-keys parameter
+ # convert :max_keys querystring parameter to 'max-keys' parameter
query_params['max-keys'] = query_params.delete(:max_keys)
end
resp = do_get("/#{bucket_name}" + generate_querystring(query_params))
@@ -259,40 +275,55 @@
do_delete("/#{bucket_name}", headers)
end
# Check whether a bucket exists or not.
- #
- # Returns true if bucket exists.
def bucket_exists?(bucket_name)
resource_exists?(bucket_name)
end
+ # Create a NamedBucket instance.
+ #
+ # +options+ is a hash of extra options to use when creating
+ # the NamedBucket instance (see NamedBucket.initialize);
+ # specify :parent to use the same options used to create this Client
+ # instance.
+ def get_named_bucket(bucket_name, options={}, &block)
+ options = @options if :parent == options
+ options[:default_bucket] = bucket_name
+ named_bucket = NamedBucket.new(@aws_access_key, @aws_secret_access_key, options)
+ yield named_bucket if block_given?
+ named_bucket
+ end
+
# Fetch a resource.
+ #
+ # Returns a plain response, not an S3Object: if you want an object back,
+ # use get_object instead.
def get_resource(bucket_name, resource_key, headers={})
do_get("/#{bucket_name}/#{resource_key}", headers)
end
# Check whether a bucket contains a key.
#
- # Returns true if resource_key exists inside bucket_name exists.
+ # Returns true if resource_key exists inside bucket_name.
def resource_exists?(bucket_name, resource_key=nil)
path = "/#{bucket_name}"
path += "/#{resource_key}" unless resource_key.nil?
do_head(path).ok?
end
- # Fetch an object.
- #
- # TODO: return S3Object
- def get_object(bucket_name, resource_key, headers)
+ # Fetch an object. Note that this actually pulls down the
+ # object from S3 and instantiates the S3Object instance with it.
+ def get_object(bucket_name, resource_key, headers={})
response = get_resource(bucket_name, resource_key, headers)
+ S3Object.from_response(resource_key, response)
end
# Fetch the ACL document for a resource.
#
- # Returns nil if there is a problem with the resource
+ # Raises an exception if there is a problem with the resource
# (e.g. it doesn't exist).
def get_acl(bucket_name, resource_key='')
path = s3_acl_path(bucket_name, resource_key)
response = do_get(path)
if response.ok?
@@ -300,11 +331,11 @@
else
raise S33rException::MissingResource, "Tried to get an ACL from a non-existent resource [#{path}]"
end
end
- # Put the ACL document for a resource.
+ # Put the ACL document back to a resource.
#
# +acl_doc+ is an S33r::S3ACL::ACLDoc instance.
#
# Returns true if response had a 200 code, false otherwise.
# If you get a 400 Bad Request back, it means a CanonicalUser
@@ -336,11 +367,11 @@
if !acl.nil? and acl.add_public_read_grants
set_acl(acl, bucket_name, resource_key)
end
end
- # TODO
+ #-- TODO
def make_private
end
# Make a bucket capable of being a target for access logging.
#
@@ -367,23 +398,25 @@
acl.remove_log_target
set_acl(acl, bucket_name)
!acl.log_targetable?
end
- # Enable logging for a resource (bucket or key).
+ # Enable logging for a resource (only buckets are supported presently).
#
# +log_prefix+ is the prefix for the logs.
# +bucket_name+ is the bucket to log.
# +log_bucket+ is the bucket to put logs into.
#
# options:
# +:for_key => 'key'+ is the (optional) resource to log in the bucket
# (NB this is not currently supported by S3).
# +:log_prefix => 'prefix'+ is the (optional) log file prefix
# (defaults to bucket_name + '-')
- #
+ #
+ #-- TODO: tests
def enable_logging(bucket_name, log_bucket=nil, options={})
+ # Set to the default log_bucket if not set explicitly.
log_bucket ||= @log_bucket
resource_key = options[:for_key]
resource_key ||= ''
@@ -396,21 +429,28 @@
end
logging_resource = LoggingResource.new(log_bucket, log_prefix)
set_logging(logging_resource, bucket_name, resource_key)
end
- # TODO
+ # Turn off logging of a resource.
+ #-- TODO
def disable_logging
end
- # TODO
+ # Get the logging status of a resource.
+ #-- TODO
def get_logging
end
# Put some generic resource onto S3.
+ #
+ # To stream with this method, +data+ should respond to the +stat+
+ # method; examples of data types which respond to this include File instances.
def put_resource(bucket_name, resource_key, data, headers={})
- do_put("/#{bucket_name}/" + "#{CGI::escape(resource_key)}", data, headers)
+ raise S33r::S33rException::TryingToPutEmptyResource, "No data to put for key '#{resource_key}'" unless data
+ resp = do_put("/#{bucket_name}/" + "#{CGI::escape(resource_key)}", data, headers)
+ resp.ok?
end
# Put a string onto S3.
def put_text(string, bucket_name, resource_key, headers={})
headers["Content-Type"] = "text/plain"
@@ -445,26 +485,28 @@
if headers[:content_type]
# use the first MIME type corresponding to this content type string
# (MIME::Types returns an array of possible MIME types)
mime_type = MIME::Types[headers[:content_type]][0]
else
+ # we're not going to use this much, just for parsing the content type etc.
mime_type = guess_mime_type(filename)
end
content_type = mime_type.simplified
headers['Content-Type'] = content_type
headers['Content-Transfer-Encoding'] = 'binary' if mime_type.binary?
- # the data we want to put (handle to file, so we can stream from it)
+ # Open the file, and pass the handle to the HTTP client so content
+ # can be streamed.
File.open(filename) do |data|
# send the put request
put_resource(bucket_name, resource_key, data, headers)
end
end
# Delete a resource from S3.
#
- # Note that S3 returns the same response code () regardless
+ # Note that S3 returns the same response code regardless
# of whether the resource was successfully deleted, or didn't exist
# in the first place.
def delete_resource(bucket_name, resource_key, headers={})
do_delete("/#{bucket_name}/#{resource_key}", headers)
end
@@ -498,6 +540,6 @@
def do_delete(path, headers={})
do_request('DELETE', path, nil, headers)
end
end
-end
\ No newline at end of file
+end