diff options
96 files changed, 1454 insertions, 696 deletions
diff --git a/MANIFEST.in b/MANIFEST.in index da3dfb3a..0b29a23f 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -3,6 +3,7 @@ include README.rst include boto/file/README include .gitignore include pylintrc +include boto/endpoints.json include boto/pyami/copybot.cfg include boto/services/sonofmmm.cfg include boto/mturk/test/*.doctest @@ -1,9 +1,9 @@ #### boto #### -boto 2.23.0 +boto 2.24.0 -Released: 10-January-2014 +Released: 29-January-2014 .. image:: https://travis-ci.org/boto/boto.png?branch=develop :target: https://travis-ci.org/boto/boto diff --git a/bin/elbadmin b/bin/elbadmin index 423088b0..816c7327 100755 --- a/bin/elbadmin +++ b/bin/elbadmin @@ -108,18 +108,19 @@ def get(elb, name): print # Make map of all instance Id's to Name tags + import boto if not options.region: ec2 = boto.connect_ec2() else: - import boto.ec2.elb ec2 = boto.ec2.connect_to_region(options.region) instance_health = b.get_instance_health() instances = [state.instance_id for state in instance_health] - names = {} - for i in ec2.get_only_instances(instances): - names[i.id] = i.tags.get('Name', '') + names = dict((k,'') for k in instances) + for i in ec2.get_only_instances(): + if i.id in instances: + names[i.id] = i.tags.get('Name', '') name_column_width = max([4] + [len(v) for k,v in names.iteritems()]) + 2 diff --git a/boto/__init__.py b/boto/__init__.py index a4d6c35f..05cae44c 100644 --- a/boto/__init__.py +++ b/boto/__init__.py @@ -37,7 +37,7 @@ import logging.config import urlparse from boto.exception import InvalidUriError -__version__ = '2.23.0' +__version__ = '2.24.0' Version = __version__ # for backware compatibility # http://bugs.python.org/issue7980 @@ -58,6 +58,7 @@ TOO_LONG_DNS_NAME_COMP = re.compile(r'[-_a-z0-9]{64}') GENERATION_RE = re.compile(r'(?P<versionless_uri_str>.+)' r'#(?P<generation>[0-9]+)$') VERSION_RE = re.compile('(?P<versionless_uri_str>.+)#(?P<version_id>.+)$') +ENDPOINTS_PATH = os.path.join(os.path.dirname(__file__), 'endpoints.json') def init_logging(): diff --git a/boto/auth.py b/boto/auth.py index ba4f9508..cc58e3c3 100644 --- a/boto/auth.py +++ b/boto/auth.py @@ -36,6 +36,7 @@ import copy import datetime from email.utils import formatdate import hmac +import os import sys import time import urllib @@ -903,6 +904,12 @@ def detect_potential_sigv4(func): def detect_potential_s3sigv4(func): def _wrapper(self): + if os.environ.get('S3_USE_SIGV4', False): + return ['hmac-v4-s3'] + + if boto.config.get('s3', 'use-sigv4', False): + return ['hmac-v4-s3'] + if hasattr(self, 'host'): if '.cn-' in self.host: return ['hmac-v4-s3'] diff --git a/boto/beanstalk/__init__.py b/boto/beanstalk/__init__.py index 904d855e..c3928bcd 100644 --- a/boto/beanstalk/__init__.py +++ b/boto/beanstalk/__init__.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -31,31 +31,10 @@ def regions(): :return: A list of :class:`boto.regioninfo.RegionInfo` """ import boto.beanstalk.layer1 - return [RegionInfo(name='us-east-1', - endpoint='elasticbeanstalk.us-east-1.amazonaws.com', - connection_cls=boto.beanstalk.layer1.Layer1), - RegionInfo(name='us-west-1', - endpoint='elasticbeanstalk.us-west-1.amazonaws.com', - connection_cls=boto.beanstalk.layer1.Layer1), - RegionInfo(name='us-west-2', - endpoint='elasticbeanstalk.us-west-2.amazonaws.com', - connection_cls=boto.beanstalk.layer1.Layer1), - RegionInfo(name='ap-northeast-1', - endpoint='elasticbeanstalk.ap-northeast-1.amazonaws.com', - connection_cls=boto.beanstalk.layer1.Layer1), - RegionInfo(name='ap-southeast-1', - endpoint='elasticbeanstalk.ap-southeast-1.amazonaws.com', - connection_cls=boto.beanstalk.layer1.Layer1), - RegionInfo(name='ap-southeast-2', - endpoint='elasticbeanstalk.ap-southeast-2.amazonaws.com', - connection_cls=boto.beanstalk.layer1.Layer1), - RegionInfo(name='eu-west-1', - endpoint='elasticbeanstalk.eu-west-1.amazonaws.com', - connection_cls=boto.beanstalk.layer1.Layer1), - RegionInfo(name='sa-east-1', - endpoint='elasticbeanstalk.sa-east-1.amazonaws.com', - connection_cls=boto.beanstalk.layer1.Layer1), - ] + return get_regions( + 'elasticbeanstalk', + connection_cls=boto.beanstalk.layer1.Layer1 + ) def connect_to_region(region_name, **kw_params): diff --git a/boto/beanstalk/layer1.py b/boto/beanstalk/layer1.py index f70a6b28..5963f50e 100644 --- a/boto/beanstalk/layer1.py +++ b/boto/beanstalk/layer1.py @@ -40,7 +40,7 @@ class Layer1(AWSQueryConnection): proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None, debug=0, https_connection_factory=None, region=None, path='/', - api_version=None, security_token=None): + api_version=None, security_token=None, profile_name=None): if not region: region = RegionInfo(self, self.DefaultRegionName, self.DefaultRegionEndpoint) @@ -51,7 +51,7 @@ class Layer1(AWSQueryConnection): proxy_user, proxy_pass, self.region.endpoint, debug, https_connection_factory, path, - security_token) + security_token, profile_name=profile_name) def _required_auth_capability(self): return ['hmac-v4'] diff --git a/boto/cloudformation/__init__.py b/boto/cloudformation/__init__.py index cf6679f9..84047e2b 100644 --- a/boto/cloudformation/__init__.py +++ b/boto/cloudformation/__init__.py @@ -21,19 +21,9 @@ # IN THE SOFTWARE. from connection import CloudFormationConnection -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions, load_regions -RegionData = { - 'us-east-1': 'cloudformation.us-east-1.amazonaws.com', - 'us-west-1': 'cloudformation.us-west-1.amazonaws.com', - 'us-west-2': 'cloudformation.us-west-2.amazonaws.com', - 'sa-east-1': 'cloudformation.sa-east-1.amazonaws.com', - 'eu-west-1': 'cloudformation.eu-west-1.amazonaws.com', - 'ap-northeast-1': 'cloudformation.ap-northeast-1.amazonaws.com', - 'ap-southeast-1': 'cloudformation.ap-southeast-1.amazonaws.com', - 'ap-southeast-2': 'cloudformation.ap-southeast-2.amazonaws.com', - 'cn-north-1': 'cloudformation.cn-north-1.amazonaws.com.cn', -} +RegionData = load_regions().get('cloudformation') def regions(): @@ -43,13 +33,10 @@ def regions(): :rtype: list :return: A list of :class:`boto.RegionInfo` instances """ - regions = [] - for region_name in RegionData: - region = RegionInfo(name=region_name, - endpoint=RegionData[region_name], - connection_cls=CloudFormationConnection) - regions.append(region) - return regions + return get_regions( + 'cloudformation', + connection_cls=CloudFormationConnection + ) def connect_to_region(region_name, **kw_params): diff --git a/boto/cloudformation/connection.py b/boto/cloudformation/connection.py index 5e6325d5..9ebc5f18 100644 --- a/boto/cloudformation/connection.py +++ b/boto/cloudformation/connection.py @@ -52,7 +52,8 @@ class CloudFormationConnection(AWSQueryConnection): is_secure=True, port=None, proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None, debug=0, https_connection_factory=None, region=None, path='/', - converter=None, security_token=None, validate_certs=True): + converter=None, security_token=None, validate_certs=True, + profile_name=None): if not region: region = RegionInfo(self, self.DefaultRegionName, self.DefaultRegionEndpoint, CloudFormationConnection) @@ -64,7 +65,8 @@ class CloudFormationConnection(AWSQueryConnection): self.region.endpoint, debug, https_connection_factory, path, security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) def _required_auth_capability(self): return ['hmac-v4'] diff --git a/boto/cloudfront/__init__.py b/boto/cloudfront/__init__.py index 0fa4ae81..42f70601 100644 --- a/boto/cloudfront/__init__.py +++ b/boto/cloudfront/__init__.py @@ -43,12 +43,13 @@ class CloudFrontConnection(AWSAuthConnection): def __init__(self, aws_access_key_id=None, aws_secret_access_key=None, port=None, proxy=None, proxy_port=None, host=DefaultHost, debug=0, security_token=None, - validate_certs=True): + validate_certs=True, profile_name=None): super(CloudFrontConnection, self).__init__(host, aws_access_key_id, aws_secret_access_key, True, port, proxy, proxy_port, debug=debug, security_token=security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) def get_etag(self, response): response_headers = response.msg diff --git a/boto/cloudsearch/__init__.py b/boto/cloudsearch/__init__.py index 466ad426..451a6bfa 100644 --- a/boto/cloudsearch/__init__.py +++ b/boto/cloudsearch/__init__.py @@ -21,7 +21,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -32,23 +32,10 @@ def regions(): :return: A list of :class:`boto.regioninfo.RegionInfo` """ import boto.cloudsearch.layer1 - return [RegionInfo(name='us-east-1', - endpoint='cloudsearch.us-east-1.amazonaws.com', - connection_cls=boto.cloudsearch.layer1.Layer1), - RegionInfo(name='eu-west-1', - endpoint='cloudsearch.eu-west-1.amazonaws.com', - connection_cls=boto.cloudsearch.layer1.Layer1), - RegionInfo(name='us-west-1', - endpoint='cloudsearch.us-west-1.amazonaws.com', - connection_cls=boto.cloudsearch.layer1.Layer1), - RegionInfo(name='us-west-2', - endpoint='cloudsearch.us-west-2.amazonaws.com', - connection_cls=boto.cloudsearch.layer1.Layer1), - RegionInfo(name='ap-southeast-1', - endpoint='cloudsearch.ap-southeast-1.amazonaws.com', - connection_cls=boto.cloudsearch.layer1.Layer1), - - ] + return get_regions( + 'cloudsearch', + connection_cls=boto.cloudsearch.layer1.Layer1 + ) def connect_to_region(region_name, **kw_params): diff --git a/boto/cloudsearch/layer1.py b/boto/cloudsearch/layer1.py index 1e0e7f99..92ebe082 100644 --- a/boto/cloudsearch/layer1.py +++ b/boto/cloudsearch/layer1.py @@ -46,7 +46,7 @@ class Layer1(AWSQueryConnection): proxy_user=None, proxy_pass=None, debug=0, https_connection_factory=None, region=None, path='/', api_version=None, security_token=None, - validate_certs=True): + validate_certs=True, profile_name=None): if not region: region = RegionInfo(self, self.DefaultRegionName, self.DefaultRegionEndpoint) @@ -66,7 +66,8 @@ class Layer1(AWSQueryConnection): https_connection_factory=https_connection_factory, path=path, security_token=security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) def _required_auth_capability(self): return ['hmac-v4'] diff --git a/boto/cloudtrail/__init__.py b/boto/cloudtrail/__init__.py index 836f57fc..263caffa 100644 --- a/boto/cloudtrail/__init__.py +++ b/boto/cloudtrail/__init__.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -31,14 +31,7 @@ def regions(): :return: A list of :class:`boto.regioninfo.RegionInfo` """ from boto.cloudtrail.layer1 import CloudTrailConnection - - return [RegionInfo(name='us-east-1', - endpoint='cloudtrail.us-east-1.amazonaws.com', - connection_cls=CloudTrailConnection), - RegionInfo(name='us-west-2', - endpoint='cloudtrail.us-west-2.amazonaws.com', - connection_cls=CloudTrailConnection), - ] + return get_regions('cloudtrail', connection_cls=CloudTrailConnection) def connect_to_region(region_name, **kw_params): diff --git a/boto/connection.py b/boto/connection.py index 9f8cb019..9951778c 100644 --- a/boto/connection.py +++ b/boto/connection.py @@ -423,7 +423,7 @@ class AWSAuthConnection(object): https_connection_factory=None, path='/', provider='aws', security_token=None, suppress_consec_slashes=True, - validate_certs=True): + validate_certs=True, profile_name=None): """ :type host: str :param host: The host to make the connection to @@ -468,6 +468,10 @@ class AWSAuthConnection(object): :type validate_certs: bool :param validate_certs: Controls whether SSL certificates will be validated or not. Defaults to True. + + :type profile_name: str + :param profile_name: Override usual Credentials section in config + file to use a named set of keys instead. """ self.suppress_consec_slashes = suppress_consec_slashes self.num_retries = 6 @@ -546,7 +550,8 @@ class AWSAuthConnection(object): self.provider = Provider(self._provider_type, aws_access_key_id, aws_secret_access_key, - security_token) + security_token, + profile_name) # Allow config file to override default host, port, and host header. if self.provider.host: @@ -603,6 +608,10 @@ class AWSAuthConnection(object): gs_secret_access_key = aws_secret_access_key secret_key = aws_secret_access_key + def profile_name(self): + return self.provider.profile_name + profile_name = property(profile_name) + def get_path(self, path='/'): # The default behavior is to suppress consecutive slashes for reasons # discussed at @@ -891,8 +900,8 @@ class AWSAuthConnection(object): # the port info. All others should be now be up to date and # not include the port. if 's3' not in self._required_auth_capability(): - self.set_host_header(request) - + if not getattr(self, 'anon', False): + self.set_host_header(request) if callable(sender): response = sender(connection, request.method, request.path, request.body, request.headers) @@ -1037,14 +1046,15 @@ class AWSQueryConnection(AWSAuthConnection): is_secure=True, port=None, proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None, host=None, debug=0, https_connection_factory=None, path='/', security_token=None, - validate_certs=True): + validate_certs=True, profile_name=None): super(AWSQueryConnection, self).__init__(host, aws_access_key_id, aws_secret_access_key, is_secure, port, proxy, proxy_port, proxy_user, proxy_pass, debug, https_connection_factory, path, security_token=security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) def _required_auth_capability(self): return [] diff --git a/boto/directconnect/__init__.py b/boto/directconnect/__init__.py index 0fa314ca..2603177d 100644 --- a/boto/directconnect/__init__.py +++ b/boto/directconnect/__init__.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -31,32 +31,7 @@ def regions(): :return: A list of :class:`boto.regioninfo.RegionInfo` """ from boto.directconnect.layer1 import DirectConnectConnection - - return [RegionInfo(name='us-east-1', - endpoint='directconnect.us-east-1.amazonaws.com', - connection_cls=DirectConnectConnection), - RegionInfo(name='us-west-1', - endpoint='directconnect.us-west-1.amazonaws.com', - connection_cls=DirectConnectConnection), - RegionInfo(name='us-west-2', - endpoint='directconnect.us-west-2.amazonaws.com', - connection_cls=DirectConnectConnection), - RegionInfo(name='eu-west-1', - endpoint='directconnect.eu-west-1.amazonaws.com', - connection_cls=DirectConnectConnection), - RegionInfo(name='ap-southeast-1', - endpoint='directconnect.ap-southeast-1.amazonaws.com', - connection_cls=DirectConnectConnection), - RegionInfo(name='ap-southeast-2', - endpoint='directconnect.ap-southeast-2.amazonaws.com', - connection_cls=DirectConnectConnection), - RegionInfo(name='ap-southeast-3', - endpoint='directconnect.ap-southeast-3.amazonaws.com', - connection_cls=DirectConnectConnection), - RegionInfo(name='sa-east-1', - endpoint='directconnect.sa-east-1.amazonaws.com', - connection_cls=DirectConnectConnection), - ] + return get_regions('directconnect', connection_cls=DirectConnectConnection) def connect_to_region(region_name, **kw_params): diff --git a/boto/dynamodb/__init__.py b/boto/dynamodb/__init__.py index a6bd2739..8d548167 100644 --- a/boto/dynamodb/__init__.py +++ b/boto/dynamodb/__init__.py @@ -21,7 +21,7 @@ # IN THE SOFTWARE. # -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -32,37 +32,7 @@ def regions(): :return: A list of :class:`boto.regioninfo.RegionInfo` """ import boto.dynamodb.layer2 - return [RegionInfo(name='us-east-1', - endpoint='dynamodb.us-east-1.amazonaws.com', - connection_cls=boto.dynamodb.layer2.Layer2), - RegionInfo(name='us-gov-west-1', - endpoint='dynamodb.us-gov-west-1.amazonaws.com', - connection_cls=boto.dynamodb.layer2.Layer2), - RegionInfo(name='us-west-1', - endpoint='dynamodb.us-west-1.amazonaws.com', - connection_cls=boto.dynamodb.layer2.Layer2), - RegionInfo(name='us-west-2', - endpoint='dynamodb.us-west-2.amazonaws.com', - connection_cls=boto.dynamodb.layer2.Layer2), - RegionInfo(name='ap-northeast-1', - endpoint='dynamodb.ap-northeast-1.amazonaws.com', - connection_cls=boto.dynamodb.layer2.Layer2), - RegionInfo(name='ap-southeast-1', - endpoint='dynamodb.ap-southeast-1.amazonaws.com', - connection_cls=boto.dynamodb.layer2.Layer2), - RegionInfo(name='ap-southeast-2', - endpoint='dynamodb.ap-southeast-2.amazonaws.com', - connection_cls=boto.dynamodb.layer2.Layer2), - RegionInfo(name='eu-west-1', - endpoint='dynamodb.eu-west-1.amazonaws.com', - connection_cls=boto.dynamodb.layer2.Layer2), - RegionInfo(name='sa-east-1', - endpoint='dynamodb.sa-east-1.amazonaws.com', - connection_cls=boto.dynamodb.layer2.Layer2), - RegionInfo(name='cn-north-1', - endpoint='dynamodb.cn-north-1.amazonaws.com.cn', - connection_cls=boto.dynamodb.layer2.Layer2), - ] + return get_regions('dynamodb', connection_cls=boto.dynamodb.layer2.Layer2) def connect_to_region(region_name, **kw_params): diff --git a/boto/dynamodb/layer1.py b/boto/dynamodb/layer1.py index ca11ca43..317cf433 100644 --- a/boto/dynamodb/layer1.py +++ b/boto/dynamodb/layer1.py @@ -74,7 +74,7 @@ class Layer1(AWSAuthConnection): def __init__(self, aws_access_key_id=None, aws_secret_access_key=None, is_secure=True, port=None, proxy=None, proxy_port=None, debug=0, security_token=None, region=None, - validate_certs=True, validate_checksums=True): + validate_certs=True, validate_checksums=True, profile_name=None): if not region: region_name = boto.config.get('DynamoDB', 'region', self.DefaultRegionName) @@ -89,7 +89,8 @@ class Layer1(AWSAuthConnection): aws_secret_access_key, is_secure, port, proxy, proxy_port, debug=debug, security_token=security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) self.throughput_exceeded_events = 0 self._validate_checksums = boto.config.getbool( 'DynamoDB', 'validate_checksums', validate_checksums) diff --git a/boto/dynamodb/layer2.py b/boto/dynamodb/layer2.py index 16fcdbbb..743c7055 100644 --- a/boto/dynamodb/layer2.py +++ b/boto/dynamodb/layer2.py @@ -145,11 +145,13 @@ class Layer2(object): def __init__(self, aws_access_key_id=None, aws_secret_access_key=None, is_secure=True, port=None, proxy=None, proxy_port=None, debug=0, security_token=None, region=None, - validate_certs=True, dynamizer=LossyFloatDynamizer): + validate_certs=True, dynamizer=LossyFloatDynamizer, + profile_name=None): self.layer1 = Layer1(aws_access_key_id, aws_secret_access_key, is_secure, port, proxy, proxy_port, debug, security_token, region, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) self.dynamizer = dynamizer() def use_decimals(self): diff --git a/boto/dynamodb2/__init__.py b/boto/dynamodb2/__init__.py index 23f4c5ab..aa07e5cc 100644 --- a/boto/dynamodb2/__init__.py +++ b/boto/dynamodb2/__init__.py @@ -21,7 +21,7 @@ # IN THE SOFTWARE. # -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -32,37 +32,7 @@ def regions(): :return: A list of :class:`boto.regioninfo.RegionInfo` """ from boto.dynamodb2.layer1 import DynamoDBConnection - return [RegionInfo(name='us-east-1', - endpoint='dynamodb.us-east-1.amazonaws.com', - connection_cls=DynamoDBConnection), - RegionInfo(name='us-gov-west-1', - endpoint='dynamodb.us-gov-west-1.amazonaws.com', - connection_cls=DynamoDBConnection), - RegionInfo(name='us-west-1', - endpoint='dynamodb.us-west-1.amazonaws.com', - connection_cls=DynamoDBConnection), - RegionInfo(name='us-west-2', - endpoint='dynamodb.us-west-2.amazonaws.com', - connection_cls=DynamoDBConnection), - RegionInfo(name='eu-west-1', - endpoint='dynamodb.eu-west-1.amazonaws.com', - connection_cls=DynamoDBConnection), - RegionInfo(name='ap-northeast-1', - endpoint='dynamodb.ap-northeast-1.amazonaws.com', - connection_cls=DynamoDBConnection), - RegionInfo(name='ap-southeast-1', - endpoint='dynamodb.ap-southeast-1.amazonaws.com', - connection_cls=DynamoDBConnection), - RegionInfo(name='ap-southeast-2', - endpoint='dynamodb.ap-southeast-2.amazonaws.com', - connection_cls=DynamoDBConnection), - RegionInfo(name='sa-east-1', - endpoint='dynamodb.sa-east-1.amazonaws.com', - connection_cls=DynamoDBConnection), - RegionInfo(name='cn-north-1', - endpoint='dynamodb.cn-north-1.amazonaws.com.cn', - connection_cls=DynamoDBConnection), - ] + return get_regions('dynamodb', connection_cls=DynamoDBConnection) def connect_to_region(region_name, **kw_params): diff --git a/boto/dynamodb2/results.py b/boto/dynamodb2/results.py index 6ec333cd..98da8a6a 100644 --- a/boto/dynamodb2/results.py +++ b/boto/dynamodb2/results.py @@ -128,9 +128,18 @@ class ResultSet(object): if self._last_key_seen is not None: kwargs[self.first_key] = self._last_key_seen + # If the page size is greater than limit set them + # to the same value + if self._limit and self._max_page_size > self._limit: + self._max_page_size = self._limit + # Put in the max page size. if self._max_page_size is not None: kwargs['limit'] = self._max_page_size + elif self._limit is not None: + # If max_page_size is not set and limit is available + # use it as the page size + kwargs['limit'] = self._limit results = self.the_callable(*args, **kwargs) self._fetches += 1 diff --git a/boto/dynamodb2/table.py b/boto/dynamodb2/table.py index a5db6152..7d40ad5a 100644 --- a/boto/dynamodb2/table.py +++ b/boto/dynamodb2/table.py @@ -170,7 +170,7 @@ class Table(object): ... ], ... throughput={ ... 'read':10, - ... 'write":10, + ... 'write':10, ... }), ... ]) diff --git a/boto/ec2/__init__.py b/boto/ec2/__init__.py index d0e18bf5..c3976da1 100644 --- a/boto/ec2/__init__.py +++ b/boto/ec2/__init__.py @@ -24,21 +24,10 @@ This module provides an interface to the Elastic Compute Cloud (EC2) service from AWS. """ from boto.ec2.connection import EC2Connection -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions, load_regions -RegionData = { - 'us-east-1': 'ec2.us-east-1.amazonaws.com', - 'us-gov-west-1': 'ec2.us-gov-west-1.amazonaws.com', - 'us-west-1': 'ec2.us-west-1.amazonaws.com', - 'us-west-2': 'ec2.us-west-2.amazonaws.com', - 'sa-east-1': 'ec2.sa-east-1.amazonaws.com', - 'eu-west-1': 'ec2.eu-west-1.amazonaws.com', - 'ap-northeast-1': 'ec2.ap-northeast-1.amazonaws.com', - 'ap-southeast-1': 'ec2.ap-southeast-1.amazonaws.com', - 'ap-southeast-2': 'ec2.ap-southeast-2.amazonaws.com', - 'cn-north-1': 'ec2.cn-north-1.amazonaws.com.cn', -} +RegionData = load_regions().get('ec2', {}) def regions(**kw_params): @@ -51,13 +40,7 @@ def regions(**kw_params): :rtype: list :return: A list of :class:`boto.ec2.regioninfo.RegionInfo` """ - regions = [] - for region_name in RegionData: - region = RegionInfo(name=region_name, - endpoint=RegionData[region_name], - connection_cls=EC2Connection) - regions.append(region) - return regions + return get_regions('ec2', connection_cls=EC2Connection) def connect_to_region(region_name, **kw_params): diff --git a/boto/ec2/autoscale/__init__.py b/boto/ec2/autoscale/__init__.py index 15386a4f..40e37c51 100644 --- a/boto/ec2/autoscale/__init__.py +++ b/boto/ec2/autoscale/__init__.py @@ -31,7 +31,7 @@ import base64 import boto from boto.connection import AWSQueryConnection -from boto.ec2.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions, load_regions from boto.ec2.autoscale.request import Request from boto.ec2.autoscale.launchconfig import LaunchConfiguration from boto.ec2.autoscale.group import AutoScalingGroup @@ -46,18 +46,7 @@ from boto.ec2.autoscale.scheduled import ScheduledUpdateGroupAction from boto.ec2.autoscale.tag import Tag from boto.ec2.autoscale.limits import AccountLimits -RegionData = { - 'us-east-1': 'autoscaling.us-east-1.amazonaws.com', - 'us-gov-west-1': 'autoscaling.us-gov-west-1.amazonaws.com', - 'us-west-1': 'autoscaling.us-west-1.amazonaws.com', - 'us-west-2': 'autoscaling.us-west-2.amazonaws.com', - 'sa-east-1': 'autoscaling.sa-east-1.amazonaws.com', - 'eu-west-1': 'autoscaling.eu-west-1.amazonaws.com', - 'ap-northeast-1': 'autoscaling.ap-northeast-1.amazonaws.com', - 'ap-southeast-1': 'autoscaling.ap-southeast-1.amazonaws.com', - 'ap-southeast-2': 'autoscaling.ap-southeast-2.amazonaws.com', - 'cn-north-1': 'autoscaling.cn-north-1.amazonaws.com.cn', -} +RegionData = load_regions().get('autoscaling', {}) def regions(): @@ -67,13 +56,7 @@ def regions(): :rtype: list :return: A list of :class:`boto.RegionInfo` instances """ - regions = [] - for region_name in RegionData: - region = RegionInfo(name=region_name, - endpoint=RegionData[region_name], - connection_cls=AutoScaleConnection) - regions.append(region) - return regions + return get_regions('autoscaling', connection_cls=AutoScaleConnection) def connect_to_region(region_name, **kw_params): @@ -104,7 +87,7 @@ class AutoScaleConnection(AWSQueryConnection): is_secure=True, port=None, proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None, debug=0, https_connection_factory=None, region=None, path='/', - security_token=None, validate_certs=True): + security_token=None, validate_certs=True, profile_name=None): """ Init method to create a new connection to the AutoScaling service. @@ -123,7 +106,8 @@ class AutoScaleConnection(AWSQueryConnection): self.region.endpoint, debug, https_connection_factory, path=path, security_token=security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) def _required_auth_capability(self): return ['hmac-v4'] diff --git a/boto/ec2/cloudwatch/__init__.py b/boto/ec2/cloudwatch/__init__.py index 41417570..ba3376b1 100644 --- a/boto/ec2/cloudwatch/__init__.py +++ b/boto/ec2/cloudwatch/__init__.py @@ -28,21 +28,10 @@ from boto.connection import AWSQueryConnection from boto.ec2.cloudwatch.metric import Metric from boto.ec2.cloudwatch.alarm import MetricAlarm, MetricAlarms, AlarmHistoryItem from boto.ec2.cloudwatch.datapoint import Datapoint -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions, load_regions import boto -RegionData = { - 'us-east-1': 'monitoring.us-east-1.amazonaws.com', - 'us-gov-west-1': 'monitoring.us-gov-west-1.amazonaws.com', - 'us-west-1': 'monitoring.us-west-1.amazonaws.com', - 'us-west-2': 'monitoring.us-west-2.amazonaws.com', - 'sa-east-1': 'monitoring.sa-east-1.amazonaws.com', - 'eu-west-1': 'monitoring.eu-west-1.amazonaws.com', - 'ap-northeast-1': 'monitoring.ap-northeast-1.amazonaws.com', - 'ap-southeast-1': 'monitoring.ap-southeast-1.amazonaws.com', - 'ap-southeast-2': 'monitoring.ap-southeast-2.amazonaws.com', - 'cn-north-1': 'monitoring.cn-north-1.amazonaws.com.cn', -} +RegionData = load_regions().get('cloudwatch', {}) def regions(): @@ -52,13 +41,7 @@ def regions(): :rtype: list :return: A list of :class:`boto.RegionInfo` instances """ - regions = [] - for region_name in RegionData: - region = RegionInfo(name=region_name, - endpoint=RegionData[region_name], - connection_cls=CloudWatchConnection) - regions.append(region) - return regions + return get_regions('cloudwatch', connection_cls=CloudWatchConnection) def connect_to_region(region_name, **kw_params): @@ -91,7 +74,7 @@ class CloudWatchConnection(AWSQueryConnection): is_secure=True, port=None, proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None, debug=0, https_connection_factory=None, region=None, path='/', - security_token=None, validate_certs=True): + security_token=None, validate_certs=True, profile_name=None): """ Init method to create a new connection to EC2 Monitoring Service. @@ -115,7 +98,8 @@ class CloudWatchConnection(AWSQueryConnection): self.region.endpoint, debug, https_connection_factory, path, security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) def _required_auth_capability(self): return ['hmac-v4'] diff --git a/boto/ec2/connection.py b/boto/ec2/connection.py index 4afe2482..23db665f 100644 --- a/boto/ec2/connection.py +++ b/boto/ec2/connection.py @@ -83,7 +83,7 @@ class EC2Connection(AWSQueryConnection): proxy_user=None, proxy_pass=None, debug=0, https_connection_factory=None, region=None, path='/', api_version=None, security_token=None, - validate_certs=True): + validate_certs=True, profile_name=None): """ Init method to create a new connection to EC2. """ @@ -98,7 +98,8 @@ class EC2Connection(AWSQueryConnection): self.region.endpoint, debug, https_connection_factory, path, security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) if api_version: self.APIVersion = api_version @@ -734,8 +735,8 @@ class EC2Connection(AWSQueryConnection): launch instances. :type security_groups: list of strings - :param security_groups: The names of the security groups with which to - associate instances. + :param security_groups: The names of the EC2 classic security groups + with which to associate instances :type user_data: string :param user_data: The Base64-encoded MIME user data to be made @@ -749,6 +750,8 @@ class EC2Connection(AWSQueryConnection): * m1.medium * m1.large * m1.xlarge + * m3.medium + * m3.large * m3.xlarge * m3.2xlarge * c1.medium @@ -1442,6 +1445,8 @@ class EC2Connection(AWSQueryConnection): * m1.medium * m1.large * m1.xlarge + * m3.medium + * m3.large * m3.xlarge * m3.2xlarge * c1.medium diff --git a/boto/ec2/elb/__init__.py b/boto/ec2/elb/__init__.py index 2b740a47..35f4d8ec 100644 --- a/boto/ec2/elb/__init__.py +++ b/boto/ec2/elb/__init__.py @@ -31,21 +31,10 @@ from boto.ec2.elb.loadbalancer import LoadBalancer, LoadBalancerZones from boto.ec2.elb.instancestate import InstanceState from boto.ec2.elb.healthcheck import HealthCheck from boto.ec2.elb.listelement import ListElement -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions, load_regions import boto -RegionData = { - 'us-east-1': 'elasticloadbalancing.us-east-1.amazonaws.com', - 'us-gov-west-1': 'elasticloadbalancing.us-gov-west-1.amazonaws.com', - 'us-west-1': 'elasticloadbalancing.us-west-1.amazonaws.com', - 'us-west-2': 'elasticloadbalancing.us-west-2.amazonaws.com', - 'sa-east-1': 'elasticloadbalancing.sa-east-1.amazonaws.com', - 'eu-west-1': 'elasticloadbalancing.eu-west-1.amazonaws.com', - 'ap-northeast-1': 'elasticloadbalancing.ap-northeast-1.amazonaws.com', - 'ap-southeast-1': 'elasticloadbalancing.ap-southeast-1.amazonaws.com', - 'ap-southeast-2': 'elasticloadbalancing.ap-southeast-2.amazonaws.com', - 'cn-north-1': 'elasticloadbalancing.cn-north-1.amazonaws.com.cn', -} +RegionData = load_regions().get('elasticloadbalancing', {}) def regions(): @@ -55,13 +44,7 @@ def regions(): :rtype: list :return: A list of :class:`boto.RegionInfo` instances """ - regions = [] - for region_name in RegionData: - region = RegionInfo(name=region_name, - endpoint=RegionData[region_name], - connection_cls=ELBConnection) - regions.append(region) - return regions + return get_regions('elasticloadbalancing', connection_cls=ELBConnection) def connect_to_region(region_name, **kw_params): @@ -92,7 +75,7 @@ class ELBConnection(AWSQueryConnection): is_secure=True, port=None, proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None, debug=0, https_connection_factory=None, region=None, path='/', - security_token=None, validate_certs=True): + security_token=None, validate_certs=True, profile_name=None): """ Init method to create a new connection to EC2 Load Balancing Service. @@ -110,7 +93,8 @@ class ELBConnection(AWSQueryConnection): self.region.endpoint, debug, https_connection_factory, path, security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) def _required_auth_capability(self): return ['ec2'] diff --git a/boto/ec2/image.py b/boto/ec2/image.py index 08203a25..807811dc 100644 --- a/boto/ec2/image.py +++ b/boto/ec2/image.py @@ -208,6 +208,8 @@ class Image(TaggedEC2Object): * m1.medium * m1.large * m1.xlarge + * m3.medium + * m3.large * m3.xlarge * m3.2xlarge * c1.medium diff --git a/boto/ec2/regioninfo.py b/boto/ec2/regioninfo.py index 1b6c6ad9..78cd757c 100644 --- a/boto/ec2/regioninfo.py +++ b/boto/ec2/regioninfo.py @@ -28,7 +28,8 @@ class EC2RegionInfo(RegionInfo): Represents an EC2 Region """ - def __init__(self, connection=None, name=None, endpoint=None): + def __init__(self, connection=None, name=None, endpoint=None, + connection_cls=None): from boto.ec2.connection import EC2Connection super(EC2RegionInfo, self).__init__(connection, name, endpoint, EC2Connection) diff --git a/boto/ecs/__init__.py b/boto/ecs/__init__.py index 96d4b670..d643afc7 100644 --- a/boto/ecs/__init__.py +++ b/boto/ecs/__init__.py @@ -41,10 +41,13 @@ class ECSConnection(AWSQueryConnection): def __init__(self, aws_access_key_id=None, aws_secret_access_key=None, is_secure=True, port=None, proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None, host='ecs.amazonaws.com', - debug=0, https_connection_factory=None, path='/'): + debug=0, https_connection_factory=None, path='/', + security_token=None, profile_name=None): super(ECSConnection, self).__init__(aws_access_key_id, aws_secret_access_key, is_secure, port, proxy, proxy_port, proxy_user, proxy_pass, - host, debug, https_connection_factory, path) + host, debug, https_connection_factory, path, + security_token=security_token, + profile_name=profile_name) def _required_auth_capability(self): return ['ecs'] diff --git a/boto/elasticache/__init__.py b/boto/elasticache/__init__.py index 1759a17e..73d28c9f 100644 --- a/boto/elasticache/__init__.py +++ b/boto/elasticache/__init__.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -31,34 +31,7 @@ def regions(): :return: A list of :class:`boto.regioninfo.RegionInfo` """ from boto.elasticache.layer1 import ElastiCacheConnection - return [RegionInfo(name='us-east-1', - endpoint='elasticache.us-east-1.amazonaws.com', - connection_cls=ElastiCacheConnection), - RegionInfo(name='us-west-1', - endpoint='elasticache.us-west-1.amazonaws.com', - connection_cls=ElastiCacheConnection), - RegionInfo(name='us-west-2', - endpoint='elasticache.us-west-2.amazonaws.com', - connection_cls=ElastiCacheConnection), - RegionInfo(name='eu-west-1', - endpoint='elasticache.eu-west-1.amazonaws.com', - connection_cls=ElastiCacheConnection), - RegionInfo(name='ap-northeast-1', - endpoint='elasticache.ap-northeast-1.amazonaws.com', - connection_cls=ElastiCacheConnection), - RegionInfo(name='ap-southeast-1', - endpoint='elasticache.ap-southeast-1.amazonaws.com', - connection_cls=ElastiCacheConnection), - RegionInfo(name='ap-southeast-2', - endpoint='elasticache.ap-southeast-2.amazonaws.com', - connection_cls=ElastiCacheConnection), - RegionInfo(name='sa-east-1', - endpoint='elasticache.sa-east-1.amazonaws.com', - connection_cls=ElastiCacheConnection), - RegionInfo(name='cn-north-1', - endpoint='elasticache.cn-north-1.amazonaws.com.cn', - connection_cls=ElastiCacheConnection), - ] + return get_regions('elasticache', connection_cls=ElastiCacheConnection) def connect_to_region(region_name, **kw_params): diff --git a/boto/elastictranscoder/__init__.py b/boto/elastictranscoder/__init__.py index c53bc0ce..afb23e56 100644 --- a/boto/elastictranscoder/__init__.py +++ b/boto/elastictranscoder/__init__.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -31,27 +31,10 @@ def regions(): :return: A list of :class:`boto.regioninfo.RegionInfo` """ from boto.elastictranscoder.layer1 import ElasticTranscoderConnection - cls = ElasticTranscoderConnection - return [ - RegionInfo(name='us-east-1', - endpoint='elastictranscoder.us-east-1.amazonaws.com', - connection_cls=cls), - RegionInfo(name='us-west-1', - endpoint='elastictranscoder.us-west-1.amazonaws.com', - connection_cls=cls), - RegionInfo(name='us-west-2', - endpoint='elastictranscoder.us-west-2.amazonaws.com', - connection_cls=cls), - RegionInfo(name='ap-northeast-1', - endpoint='elastictranscoder.ap-northeast-1.amazonaws.com', - connection_cls=cls), - RegionInfo(name='ap-southeast-1', - endpoint='elastictranscoder.ap-southeast-1.amazonaws.com', - connection_cls=cls), - RegionInfo(name='eu-west-1', - endpoint='elastictranscoder.eu-west-1.amazonaws.com', - connection_cls=cls), - ] + return get_regions( + 'elastictranscoder', + connection_cls=ElasticTranscoderConnection + ) def connect_to_region(region_name, **kw_params): diff --git a/boto/emr/__init__.py b/boto/emr/__init__.py index 0bd48ad3..b04d08fe 100644 --- a/boto/emr/__init__.py +++ b/boto/emr/__init__.py @@ -29,7 +29,7 @@ service from AWS. from connection import EmrConnection from step import Step, StreamingStep, JarStep from bootstrap_action import BootstrapAction -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -39,34 +39,7 @@ def regions(): :rtype: list :return: A list of :class:`boto.regioninfo.RegionInfo` """ - return [RegionInfo(name='us-east-1', - endpoint='elasticmapreduce.us-east-1.amazonaws.com', - connection_cls=EmrConnection), - RegionInfo(name='us-west-1', - endpoint='us-west-1.elasticmapreduce.amazonaws.com', - connection_cls=EmrConnection), - RegionInfo(name='us-west-2', - endpoint='us-west-2.elasticmapreduce.amazonaws.com', - connection_cls=EmrConnection), - RegionInfo(name='ap-northeast-1', - endpoint='ap-northeast-1.elasticmapreduce.amazonaws.com', - connection_cls=EmrConnection), - RegionInfo(name='ap-southeast-1', - endpoint='ap-southeast-1.elasticmapreduce.amazonaws.com', - connection_cls=EmrConnection), - RegionInfo(name='ap-southeast-2', - endpoint='ap-southeast-2.elasticmapreduce.amazonaws.com', - connection_cls=EmrConnection), - RegionInfo(name='eu-west-1', - endpoint='eu-west-1.elasticmapreduce.amazonaws.com', - connection_cls=EmrConnection), - RegionInfo(name='sa-east-1', - endpoint='sa-east-1.elasticmapreduce.amazonaws.com', - connection_cls=EmrConnection), - RegionInfo(name='cn-north-1', - endpoint='elasticmapreduce.cn-north-1.amazonaws.com.cn', - connection_cls=EmrConnection), - ] + return get_regions('elasticmapreduce', connection_cls=EmrConnection) def connect_to_region(region_name, **kw_params): diff --git a/boto/emr/connection.py b/boto/emr/connection.py index e7b5b747..6c5222ad 100644 --- a/boto/emr/connection.py +++ b/boto/emr/connection.py @@ -55,7 +55,7 @@ class EmrConnection(AWSQueryConnection): is_secure=True, port=None, proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None, debug=0, https_connection_factory=None, region=None, path='/', - security_token=None, validate_certs=True): + security_token=None, validate_certs=True, profile_name=None): if not region: region = RegionInfo(self, self.DefaultRegionName, self.DefaultRegionEndpoint) @@ -67,7 +67,8 @@ class EmrConnection(AWSQueryConnection): self.region.endpoint, debug, https_connection_factory, path, security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) # Many of the EMR hostnames are of the form: # <region>.<service_name>.amazonaws.com # rather than the more common: @@ -265,7 +266,7 @@ class EmrConnection(AWSQueryConnection): if step_states: self.build_list_params(params, step_states, 'StepStateList.member') - self.get_object('ListSteps', params, StepSummaryList) + return self.get_object('ListSteps', params, StepSummaryList) def add_tags(self, resource_id, tags): """ diff --git a/boto/endpoints.json b/boto/endpoints.json new file mode 100644 index 00000000..2dbdff96 --- /dev/null +++ b/boto/endpoints.json @@ -0,0 +1,300 @@ +{ + "autoscaling": { + "ap-northeast-1": "autoscaling.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "autoscaling.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "autoscaling.ap-southeast-2.amazonaws.com", + "cn-north-1": "autoscaling.cn-north-1.amazonaws.com.cn", + "eu-west-1": "autoscaling.eu-west-1.amazonaws.com", + "sa-east-1": "autoscaling.sa-east-1.amazonaws.com", + "us-east-1": "autoscaling.us-east-1.amazonaws.com", + "us-gov-west-1": "autoscaling.us-gov-west-1.amazonaws.com", + "us-west-1": "autoscaling.us-west-1.amazonaws.com", + "us-west-2": "autoscaling.us-west-2.amazonaws.com" + }, + "cloudformation": { + "ap-northeast-1": "cloudformation.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "cloudformation.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "cloudformation.ap-southeast-2.amazonaws.com", + "cn-north-1": "cloudformation.cn-north-1.amazonaws.com.cn", + "eu-west-1": "cloudformation.eu-west-1.amazonaws.com", + "sa-east-1": "cloudformation.sa-east-1.amazonaws.com", + "us-east-1": "cloudformation.us-east-1.amazonaws.com", + "us-west-1": "cloudformation.us-west-1.amazonaws.com", + "us-west-2": "cloudformation.us-west-2.amazonaws.com" + }, + "cloudfront": { + "ap-northeast-1": "cloudfront.amazonaws.com", + "ap-southeast-1": "cloudfront.amazonaws.com", + "ap-southeast-2": "cloudfront.amazonaws.com", + "eu-west-1": "cloudfront.amazonaws.com", + "sa-east-1": "cloudfront.amazonaws.com", + "us-east-1": "cloudfront.amazonaws.com", + "us-west-1": "cloudfront.amazonaws.com", + "us-west-2": "cloudfront.amazonaws.com" + }, + "cloudsearch": { + "ap-southeast-1": "cloudsearch.ap-southeast-1.amazonaws.com", + "eu-west-1": "cloudsearch.eu-west-1.amazonaws.com", + "us-east-1": "cloudsearch.us-east-1.amazonaws.com", + "us-west-1": "cloudsearch.us-west-1.amazonaws.com", + "us-west-2": "cloudsearch.us-west-2.amazonaws.com" + }, + "cloudtrail": { + "us-east-1": "cloudtrail.us-east-1.amazonaws.com", + "us-west-2": "cloudtrail.us-west-2.amazonaws.com" + }, + "cloudwatch": { + "ap-northeast-1": "monitoring.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "monitoring.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "monitoring.ap-southeast-2.amazonaws.com", + "cn-north-1": "monitoring.cn-north-1.amazonaws.com.cn", + "eu-west-1": "monitoring.eu-west-1.amazonaws.com", + "sa-east-1": "monitoring.sa-east-1.amazonaws.com", + "us-east-1": "monitoring.us-east-1.amazonaws.com", + "us-gov-west-1": "monitoring.us-gov-west-1.amazonaws.com", + "us-west-1": "monitoring.us-west-1.amazonaws.com", + "us-west-2": "monitoring.us-west-2.amazonaws.com" + }, + "datapipeline": { + "us-east-1": "datapipeline.us-east-1.amazonaws.com" + }, + "directconnect": { + "ap-northeast-1": "directconnect.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "directconnect.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "directconnect.ap-southeast-2.amazonaws.com", + "eu-west-1": "directconnect.eu-west-1.amazonaws.com", + "sa-east-1": "directconnect.sa-east-1.amazonaws.com", + "us-east-1": "directconnect.us-east-1.amazonaws.com", + "us-west-1": "directconnect.us-west-1.amazonaws.com", + "us-west-2": "directconnect.us-west-2.amazonaws.com" + }, + "dynamodb": { + "ap-northeast-1": "dynamodb.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "dynamodb.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "dynamodb.ap-southeast-2.amazonaws.com", + "cn-north-1": "dynamodb.cn-north-1.amazonaws.com.cn", + "eu-west-1": "dynamodb.eu-west-1.amazonaws.com", + "sa-east-1": "dynamodb.sa-east-1.amazonaws.com", + "us-east-1": "dynamodb.us-east-1.amazonaws.com", + "us-gov-west-1": "dynamodb.us-gov-west-1.amazonaws.com", + "us-west-1": "dynamodb.us-west-1.amazonaws.com", + "us-west-2": "dynamodb.us-west-2.amazonaws.com" + }, + "ec2": { + "ap-northeast-1": "ec2.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "ec2.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "ec2.ap-southeast-2.amazonaws.com", + "cn-north-1": "ec2.cn-north-1.amazonaws.com.cn", + "eu-west-1": "ec2.eu-west-1.amazonaws.com", + "sa-east-1": "ec2.sa-east-1.amazonaws.com", + "us-east-1": "ec2.us-east-1.amazonaws.com", + "us-gov-west-1": "ec2.us-gov-west-1.amazonaws.com", + "us-west-1": "ec2.us-west-1.amazonaws.com", + "us-west-2": "ec2.us-west-2.amazonaws.com" + }, + "elasticache": { + "ap-northeast-1": "elasticache.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "elasticache.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "elasticache.ap-southeast-2.amazonaws.com", + "cn-north-1": "elasticache.cn-north-1.amazonaws.com.cn", + "eu-west-1": "elasticache.eu-west-1.amazonaws.com", + "sa-east-1": "elasticache.sa-east-1.amazonaws.com", + "us-east-1": "elasticache.us-east-1.amazonaws.com", + "us-west-1": "elasticache.us-west-1.amazonaws.com", + "us-west-2": "elasticache.us-west-2.amazonaws.com" + }, + "elasticbeanstalk": { + "ap-northeast-1": "elasticbeanstalk.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "elasticbeanstalk.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "elasticbeanstalk.ap-southeast-2.amazonaws.com", + "eu-west-1": "elasticbeanstalk.eu-west-1.amazonaws.com", + "sa-east-1": "elasticbeanstalk.sa-east-1.amazonaws.com", + "us-east-1": "elasticbeanstalk.us-east-1.amazonaws.com", + "us-west-1": "elasticbeanstalk.us-west-1.amazonaws.com", + "us-west-2": "elasticbeanstalk.us-west-2.amazonaws.com" + }, + "elasticloadbalancing": { + "ap-northeast-1": "elasticloadbalancing.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "elasticloadbalancing.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "elasticloadbalancing.ap-southeast-2.amazonaws.com", + "cn-north-1": "elasticloadbalancing.cn-north-1.amazonaws.com.cn", + "eu-west-1": "elasticloadbalancing.eu-west-1.amazonaws.com", + "sa-east-1": "elasticloadbalancing.sa-east-1.amazonaws.com", + "us-east-1": "elasticloadbalancing.us-east-1.amazonaws.com", + "us-gov-west-1": "elasticloadbalancing.us-gov-west-1.amazonaws.com", + "us-west-1": "elasticloadbalancing.us-west-1.amazonaws.com", + "us-west-2": "elasticloadbalancing.us-west-2.amazonaws.com" + }, + "elasticmapreduce": { + "ap-northeast-1": "elasticmapreduce.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "elasticmapreduce.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "elasticmapreduce.ap-southeast-2.amazonaws.com", + "cn-north-1": "elasticmapreduce.cn-north-1.amazonaws.com.cn", + "eu-west-1": "elasticmapreduce.eu-west-1.amazonaws.com", + "sa-east-1": "elasticmapreduce.sa-east-1.amazonaws.com", + "us-east-1": "elasticmapreduce.us-east-1.amazonaws.com", + "us-gov-west-1": "elasticmapreduce.us-gov-west-1.amazonaws.com", + "us-west-1": "elasticmapreduce.us-west-1.amazonaws.com", + "us-west-2": "elasticmapreduce.us-west-2.amazonaws.com" + }, + "elastictranscoder": { + "ap-northeast-1": "elastictranscoder.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "elastictranscoder.ap-southeast-1.amazonaws.com", + "eu-west-1": "elastictranscoder.eu-west-1.amazonaws.com", + "us-east-1": "elastictranscoder.us-east-1.amazonaws.com", + "us-west-1": "elastictranscoder.us-west-1.amazonaws.com", + "us-west-2": "elastictranscoder.us-west-2.amazonaws.com" + }, + "glacier": { + "ap-northeast-1": "glacier.ap-northeast-1.amazonaws.com", + "ap-southeast-2": "glacier.ap-southeast-2.amazonaws.com", + "cn-north-1": "glacier.cn-north-1.amazonaws.com.cn", + "eu-west-1": "glacier.eu-west-1.amazonaws.com", + "us-east-1": "glacier.us-east-1.amazonaws.com", + "us-west-1": "glacier.us-west-1.amazonaws.com", + "us-west-2": "glacier.us-west-2.amazonaws.com" + }, + "iam": { + "ap-northeast-1": "iam.amazonaws.com", + "ap-southeast-1": "iam.amazonaws.com", + "ap-southeast-2": "iam.amazonaws.com", + "cn-north-1": "iam.cn-north-1.amazonaws.com.cn", + "eu-west-1": "iam.amazonaws.com", + "sa-east-1": "iam.amazonaws.com", + "us-east-1": "iam.amazonaws.com", + "us-gov-west-1": "iam.us-gov.amazonaws.com", + "us-west-1": "iam.amazonaws.com", + "us-west-2": "iam.amazonaws.com" + }, + "importexport": { + "ap-northeast-1": "importexport.amazonaws.com", + "ap-southeast-1": "importexport.amazonaws.com", + "ap-southeast-2": "importexport.amazonaws.com", + "eu-west-1": "importexport.amazonaws.com", + "sa-east-1": "importexport.amazonaws.com", + "us-east-1": "importexport.amazonaws.com", + "us-west-1": "importexport.amazonaws.com", + "us-west-2": "importexport.amazonaws.com" + }, + "opsworks": { + "us-east-1": "opsworks.us-east-1.amazonaws.com" + }, + "rds": { + "ap-northeast-1": "rds.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "rds.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "rds.ap-southeast-2.amazonaws.com", + "cn-north-1": "rds.cn-north-1.amazonaws.com.cn", + "eu-west-1": "rds.eu-west-1.amazonaws.com", + "sa-east-1": "rds.sa-east-1.amazonaws.com", + "us-east-1": "rds.us-east-1.amazonaws.com", + "us-gov-west-1": "rds.us-gov-west-1.amazonaws.com", + "us-west-1": "rds.us-west-1.amazonaws.com", + "us-west-2": "rds.us-west-2.amazonaws.com" + }, + "redshift": { + "ap-northeast-1": "redshift.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "redshift.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "redshift.ap-southeast-2.amazonaws.com", + "eu-west-1": "redshift.eu-west-1.amazonaws.com", + "us-east-1": "redshift.us-east-1.amazonaws.com", + "us-west-2": "redshift.us-west-2.amazonaws.com" + }, + "route53": { + "ap-northeast-1": "route53.amazonaws.com", + "ap-southeast-1": "route53.amazonaws.com", + "ap-southeast-2": "route53.amazonaws.com", + "eu-west-1": "route53.amazonaws.com", + "sa-east-1": "route53.amazonaws.com", + "us-east-1": "route53.amazonaws.com", + "us-west-1": "route53.amazonaws.com", + "us-west-2": "route53.amazonaws.com" + }, + "s3": { + "ap-northeast-1": "s3-ap-northeast-1.amazonaws.com", + "ap-southeast-1": "s3-ap-southeast-1.amazonaws.com", + "ap-southeast-2": "s3-ap-southeast-2.amazonaws.com", + "cn-north-1": "s3.cn-north-1.amazonaws.com.cn", + "eu-west-1": "s3-eu-west-1.amazonaws.com", + "sa-east-1": "s3-sa-east-1.amazonaws.com", + "us-east-1": "s3.amazonaws.com", + "us-gov-west-1": "s3-us-gov-west-1.amazonaws.com", + "us-west-1": "s3-us-west-1.amazonaws.com", + "us-west-2": "s3-us-west-2.amazonaws.com" + }, + "sdb": { + "ap-northeast-1": "sdb.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "sdb.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "sdb.ap-southeast-2.amazonaws.com", + "eu-west-1": "sdb.eu-west-1.amazonaws.com", + "sa-east-1": "sdb.sa-east-1.amazonaws.com", + "us-east-1": "sdb.amazonaws.com", + "us-west-1": "sdb.us-west-1.amazonaws.com", + "us-west-2": "sdb.us-west-2.amazonaws.com" + }, + "ses": { + "eu-west-1": "email.eu-west-1.amazonaws.com", + "us-east-1": "email.us-east-1.amazonaws.com", + "us-west-2": "email.us-west-2.amazonaws.com" + }, + "sns": { + "ap-northeast-1": "sns.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "sns.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "sns.ap-southeast-2.amazonaws.com", + "cn-north-1": "sns.cn-north-1.amazonaws.com.cn", + "eu-west-1": "sns.eu-west-1.amazonaws.com", + "sa-east-1": "sns.sa-east-1.amazonaws.com", + "us-east-1": "sns.us-east-1.amazonaws.com", + "us-gov-west-1": "sns.us-gov-west-1.amazonaws.com", + "us-west-1": "sns.us-west-1.amazonaws.com", + "us-west-2": "sns.us-west-2.amazonaws.com" + }, + "sqs": { + "ap-northeast-1": "sqs.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "sqs.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "sqs.ap-southeast-2.amazonaws.com", + "cn-north-1": "sqs.cn-north-1.amazonaws.com.cn", + "eu-west-1": "sqs.eu-west-1.amazonaws.com", + "sa-east-1": "sqs.sa-east-1.amazonaws.com", + "us-east-1": "sqs.us-east-1.amazonaws.com", + "us-gov-west-1": "sqs.us-gov-west-1.amazonaws.com", + "us-west-1": "sqs.us-west-1.amazonaws.com", + "us-west-2": "sqs.us-west-2.amazonaws.com" + }, + "storagegateway": { + "ap-northeast-1": "storagegateway.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "storagegateway.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "storagegateway.ap-southeast-2.amazonaws.com", + "eu-west-1": "storagegateway.eu-west-1.amazonaws.com", + "sa-east-1": "storagegateway.sa-east-1.amazonaws.com", + "us-east-1": "storagegateway.us-east-1.amazonaws.com", + "us-west-1": "storagegateway.us-west-1.amazonaws.com", + "us-west-2": "storagegateway.us-west-2.amazonaws.com" + }, + "sts": { + "ap-northeast-1": "sts.amazonaws.com", + "ap-southeast-1": "sts.amazonaws.com", + "ap-southeast-2": "sts.amazonaws.com", + "cn-north-1": "sts.cn-north-1.amazonaws.com.cn", + "eu-west-1": "sts.amazonaws.com", + "sa-east-1": "sts.amazonaws.com", + "us-east-1": "sts.amazonaws.com", + "us-gov-west-1": "sts.us-gov-west-1.amazonaws.com", + "us-west-1": "sts.amazonaws.com", + "us-west-2": "sts.amazonaws.com" + }, + "support": { + "us-east-1": "support.us-east-1.amazonaws.com" + }, + "swf": { + "ap-northeast-1": "swf.ap-northeast-1.amazonaws.com", + "ap-southeast-1": "swf.ap-southeast-1.amazonaws.com", + "ap-southeast-2": "swf.ap-southeast-2.amazonaws.com", + "cn-north-1": "swf.cn-north-1.amazonaws.com.cn", + "eu-west-1": "swf.eu-west-1.amazonaws.com", + "sa-east-1": "swf.sa-east-1.amazonaws.com", + "us-east-1": "swf.us-east-1.amazonaws.com", + "us-gov-west-1": "swf.us-gov-west-1.amazonaws.com", + "us-west-1": "swf.us-west-1.amazonaws.com", + "us-west-2": "swf.us-west-2.amazonaws.com" + } +} diff --git a/boto/fps/response.py b/boto/fps/response.py index ef12b00f..94c8d151 100644 --- a/boto/fps/response.py +++ b/boto/fps/response.py @@ -1,3 +1,26 @@ +# Copyright (c) 2012 Andy Davidoff http://www.disruptek.com/ +# Copyright (c) 2010 Jason R. Coombs http://www.jaraco.com/ +# Copyright (c) 2008 Chris Moyer http://coredumped.org/ +# Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/ +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, dis- +# tribute, sublicense, and/or sell copies of the Software, and to permit +# persons to whom the Software is furnished to do so, subject to the fol- +# lowing conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- +# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. from decimal import Decimal diff --git a/boto/glacier/__init__.py b/boto/glacier/__init__.py index ccf35b32..713740b4 100644 --- a/boto/glacier/__init__.py +++ b/boto/glacier/__init__.py @@ -21,7 +21,7 @@ # IN THE SOFTWARE. # -from boto.ec2.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -32,28 +32,7 @@ def regions(): :return: A list of :class:`boto.regioninfo.RegionInfo` """ from boto.glacier.layer2 import Layer2 - return [RegionInfo(name='us-east-1', - endpoint='glacier.us-east-1.amazonaws.com', - connection_cls=Layer2), - RegionInfo(name='us-west-1', - endpoint='glacier.us-west-1.amazonaws.com', - connection_cls=Layer2), - RegionInfo(name='us-west-2', - endpoint='glacier.us-west-2.amazonaws.com', - connection_cls=Layer2), - RegionInfo(name='ap-northeast-1', - endpoint='glacier.ap-northeast-1.amazonaws.com', - connection_cls=Layer2), - RegionInfo(name='eu-west-1', - endpoint='glacier.eu-west-1.amazonaws.com', - connection_cls=Layer2), - RegionInfo(name='ap-southeast-2', - endpoint='glacier.ap-southeast-2.amazonaws.com', - connection_cls=Layer2), - RegionInfo(name='cn-north-1', - endpoint='glacier.cn-north-1.amazonaws.com.cn', - connection_cls=Layer2), - ] + return get_regions('glacier', connection_cls=Layer2) def connect_to_region(region_name, **kw_params): diff --git a/boto/glacier/layer1.py b/boto/glacier/layer1.py index 716cea37..f46fbf09 100644 --- a/boto/glacier/layer1.py +++ b/boto/glacier/layer1.py @@ -77,7 +77,8 @@ class Layer1(AWSAuthConnection): https_connection_factory=None, path='/', provider='aws', security_token=None, suppress_consec_slashes=True, - region=None, region_name='us-east-1'): + region=None, region_name='us-east-1', + profile_name=None): if not region: for reg in boto.glacier.regions(): @@ -93,7 +94,7 @@ class Layer1(AWSAuthConnection): proxy_user, proxy_pass, debug, https_connection_factory, path, provider, security_token, - suppress_consec_slashes) + suppress_consec_slashes, profile_name=profile_name) def _required_auth_capability(self): return ['hmac-v4'] diff --git a/boto/gs/bucketlistresultset.py b/boto/gs/bucketlistresultset.py index 5e717a50..db634cfd 100644 --- a/boto/gs/bucketlistresultset.py +++ b/boto/gs/bucketlistresultset.py @@ -38,7 +38,7 @@ def versioned_bucket_lister(bucket, prefix='', delimiter='', generation_marker = rs.next_generation_marker more_results= rs.is_truncated -class VersionedBucketListResultSet: +class VersionedBucketListResultSet(object): """ A resultset for listing versions within a bucket. Uses the bucket_lister generator function and implements the iterator interface. This diff --git a/boto/gs/connection.py b/boto/gs/connection.py index 4c31979c..104ed45d 100755 --- a/boto/gs/connection.py +++ b/boto/gs/connection.py @@ -25,7 +25,7 @@ from boto.s3.connection import SubdomainCallingFormat from boto.s3.connection import check_lowercase_bucketname from boto.utils import get_utf8_value -class Location: +class Location(object): DEFAULT = 'US' EU = 'EU' diff --git a/boto/gs/user.py b/boto/gs/user.py index 62f2cf56..c3072952 100755 --- a/boto/gs/user.py +++ b/boto/gs/user.py @@ -20,7 +20,7 @@ # IN THE SOFTWARE. -class User: +class User(object): def __init__(self, parent=None, id='', name=''): if parent: parent.owner = self diff --git a/boto/iam/__init__.py b/boto/iam/__init__.py index fac7b5db..3df44f29 100644 --- a/boto/iam/__init__.py +++ b/boto/iam/__init__.py @@ -22,8 +22,8 @@ # this is here for backward compatibility # originally, the IAMConnection class was defined here -from connection import IAMConnection -from boto.regioninfo import RegionInfo +from boto.iam.connection import IAMConnection +from boto.regioninfo import RegionInfo, get_regions class IAMRegionInfo(RegionInfo): @@ -50,16 +50,22 @@ def regions(): :rtype: list :return: A list of :class:`boto.regioninfo.RegionInfo` instances """ - return [IAMRegionInfo(name='universal', - endpoint='iam.amazonaws.com', - connection_cls=IAMConnection), - IAMRegionInfo(name='us-gov-west-1', - endpoint='iam.us-gov.amazonaws.com', - connection_cls=IAMConnection), - IAMRegionInfo(name='cn-north-1', - endpoint='iam.cn-north-1.amazonaws.com.cn', - connection_cls=IAMConnection) - ] + regions = get_regions( + 'iam', + region_cls=IAMRegionInfo, + connection_cls=IAMConnection + ) + + # For historical reasons, we had a "universal" endpoint as well. + regions.append( + IAMRegionInfo( + name='universal', + endpoint='iam.amazonaws.com', + connection_cls=IAMConnection + ) + ) + + return regions def connect_to_region(region_name, **kw_params): diff --git a/boto/iam/connection.py b/boto/iam/connection.py index 32b3ab2e..4872b274 100644 --- a/boto/iam/connection.py +++ b/boto/iam/connection.py @@ -40,15 +40,16 @@ class IAMConnection(AWSQueryConnection): def __init__(self, aws_access_key_id=None, aws_secret_access_key=None, is_secure=True, port=None, proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None, host='iam.amazonaws.com', - debug=0, https_connection_factory=None, - path='/', security_token=None, validate_certs=True): + debug=0, https_connection_factory=None, path='/', + security_token=None, validate_certs=True, profile_name=None): super(IAMConnection, self).__init__(aws_access_key_id, aws_secret_access_key, is_secure, port, proxy, proxy_port, proxy_user, proxy_pass, host, debug, https_connection_factory, path, security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) def _required_auth_capability(self): return ['hmac-v4'] diff --git a/boto/kinesis/__init__.py b/boto/kinesis/__init__.py index 1c19a3b2..5fc33c74 100644 --- a/boto/kinesis/__init__.py +++ b/boto/kinesis/__init__.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -31,11 +31,7 @@ def regions(): :return: A list of :class:`boto.regioninfo.RegionInfo` """ from boto.kinesis.layer1 import KinesisConnection - - return [RegionInfo(name='us-east-1', - endpoint='kinesis.us-east-1.amazonaws.com', - connection_cls=KinesisConnection), - ] + return get_regions('kinesis', connection_cls=KinesisConnection) def connect_to_region(region_name, **kw_params): diff --git a/boto/mturk/connection.py b/boto/mturk/connection.py index ed7ba9d2..ff011ff6 100644 --- a/boto/mturk/connection.py +++ b/boto/mturk/connection.py @@ -46,7 +46,8 @@ class MTurkConnection(AWSQueryConnection): is_secure=True, port=None, proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None, host=None, debug=0, - https_connection_factory=None): + https_connection_factory=None, security_token=None, + profile_name=None): if not host: if config.has_option('MTurk', 'sandbox') and config.get('MTurk', 'sandbox') == 'True': host = 'mechanicalturk.sandbox.amazonaws.com' @@ -58,7 +59,9 @@ class MTurkConnection(AWSQueryConnection): aws_secret_access_key, is_secure, port, proxy, proxy_port, proxy_user, proxy_pass, host, debug, - https_connection_factory) + https_connection_factory, + security_token=security_token, + profile_name=profile_name) def _required_auth_capability(self): return ['mturk'] diff --git a/boto/opsworks/__init__.py b/boto/opsworks/__init__.py index e69de29b..71bc7209 100644 --- a/boto/opsworks/__init__.py +++ b/boto/opsworks/__init__.py @@ -0,0 +1,41 @@ +# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. +# All Rights Reserved +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, dis- +# tribute, sublicense, and/or sell copies of the Software, and to permit +# persons to whom the Software is furnished to do so, subject to the fol- +# lowing conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- +# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +from boto.regioninfo import RegionInfo, get_regions + + +def regions(): + """ + Get all available regions for the Amazon Kinesis service. + + :rtype: list + :return: A list of :class:`boto.regioninfo.RegionInfo` + """ + from boto.opsworks.layer1 import OpsWorksConnection + return get_regions('opsworks', connection_cls=OpsWorksConnection) + + +def connect_to_region(region_name, **kw_params): + for region in regions(): + if region.name == region_name: + return region.connect(**kw_params) + return None diff --git a/boto/provider.py b/boto/provider.py index 75400638..a7ea2028 100644 --- a/boto/provider.py +++ b/boto/provider.py @@ -57,6 +57,7 @@ STORAGE_CLASS_HEADER_KEY = 'storage-class' MFA_HEADER_KEY = 'mfa-header' SERVER_SIDE_ENCRYPTION_KEY = 'server-side-encryption-header' VERSION_ID_HEADER_KEY = 'version-id-header' +RESTORE_HEADER_KEY = 'restore-header' STORAGE_COPY_ERROR = 'StorageCopyError' STORAGE_CREATE_ERROR = 'StorageCreateError' @@ -124,6 +125,7 @@ class Provider(object): VERSION_ID_HEADER_KEY: AWS_HEADER_PREFIX + 'version-id', STORAGE_CLASS_HEADER_KEY: AWS_HEADER_PREFIX + 'storage-class', MFA_HEADER_KEY: AWS_HEADER_PREFIX + 'mfa', + RESTORE_HEADER_KEY: AWS_HEADER_PREFIX + 'restore', }, 'google': { HEADER_PREFIX_KEY: GOOG_HEADER_PREFIX, @@ -146,6 +148,7 @@ class Provider(object): VERSION_ID_HEADER_KEY: GOOG_HEADER_PREFIX + 'version-id', STORAGE_CLASS_HEADER_KEY: None, MFA_HEADER_KEY: None, + RESTORE_HEADER_KEY: None, } } @@ -167,18 +170,19 @@ class Provider(object): } def __init__(self, name, access_key=None, secret_key=None, - security_token=None): + security_token=None, profile_name=None): self.host = None self.port = None self.host_header = None self.access_key = access_key self.secret_key = secret_key self.security_token = security_token + self.profile_name = profile_name self.name = name self.acl_class = self.AclClassMap[self.name] self.canned_acls = self.CannedAclsMap[self.name] self._credential_expiry_time = None - self.get_credentials(access_key, secret_key, security_token) + self.get_credentials(access_key, secret_key, security_token, profile_name) self.configure_headers() self.configure_errors() # Allow config file to override default host and port. @@ -242,7 +246,7 @@ class Provider(object): return False def get_credentials(self, access_key=None, secret_key=None, - security_token=None): + security_token=None, profile_name=None): access_key_name, secret_key_name, security_token_name = self.CredentialMap[self.name] if access_key is not None: self.access_key = access_key @@ -250,6 +254,9 @@ class Provider(object): elif access_key_name.upper() in os.environ: self.access_key = os.environ[access_key_name.upper()] boto.log.debug("Using access key found in environment variable.") + elif config.has_option("profile %s" % profile_name, access_key_name): + self.access_key = config.get("profile %s" % profile_name, access_key_name) + boto.log.debug("Using access key found in config file: profile %s." % profile_name) elif config.has_option('Credentials', access_key_name): self.access_key = config.get('Credentials', access_key_name) boto.log.debug("Using access key found in config file.") @@ -260,6 +267,9 @@ class Provider(object): elif secret_key_name.upper() in os.environ: self.secret_key = os.environ[secret_key_name.upper()] boto.log.debug("Using secret key found in environment variable.") + elif config.has_option("profile %s" % profile_name, secret_key_name): + self.secret_key = config.get("profile %s" % profile_name, secret_key_name) + boto.log.debug("Using secret key found in config file: profile %s." % profile_name) elif config.has_option('Credentials', secret_key_name): self.secret_key = config.get('Credentials', secret_key_name) boto.log.debug("Using secret key found in config file.") @@ -348,6 +358,7 @@ class Provider(object): self.storage_class_header = header_info_map[STORAGE_CLASS_HEADER_KEY] self.version_id = header_info_map[VERSION_ID_HEADER_KEY] self.mfa_header = header_info_map[MFA_HEADER_KEY] + self.restore_header = header_info_map[RESTORE_HEADER_KEY] def configure_errors(self): error_map = self.ErrorMap[self.name] diff --git a/boto/pyami/config.py b/boto/pyami/config.py index 48314e26..6669cc05 100644 --- a/boto/pyami/config.py +++ b/boto/pyami/config.py @@ -42,7 +42,7 @@ BotoConfigLocations = [BotoConfigPath] UserConfigPath = os.path.join(expanduser('~'), '.boto') BotoConfigLocations.append(UserConfigPath) -# If there's a BOTO_CONFIG variable set, we load ONLY +# If there's a BOTO_CONFIG variable set, we load ONLY # that variable if 'BOTO_CONFIG' in os.environ: BotoConfigLocations = [expanduser(os.environ['BOTO_CONFIG'])] @@ -149,14 +149,14 @@ class Config(ConfigParser.SafeConfigParser): except: val = default return val - + def getint(self, section, name, default=0): try: val = ConfigParser.SafeConfigParser.getint(self, section, name) except: val = int(default) return val - + def getfloat(self, section, name, default=0.0): try: val = ConfigParser.SafeConfigParser.getfloat(self, section, name) @@ -174,13 +174,13 @@ class Config(ConfigParser.SafeConfigParser): else: val = default return val - + def setbool(self, section, name, value): if value: self.set(section, name, 'true') else: self.set(section, name, 'false') - + def dump(self): s = StringIO.StringIO() self.write(s) @@ -196,7 +196,7 @@ class Config(ConfigParser.SafeConfigParser): fp.write('%s = xxxxxxxxxxxxxxxxxx\n' % option) else: fp.write('%s = %s\n' % (option, self.get(section, option))) - + def dump_to_sdb(self, domain_name, item_name): from boto.compat import json sdb = boto.connect_sdb() diff --git a/boto/rds/__init__.py b/boto/rds/__init__.py index 775c1acf..c577a1bb 100644 --- a/boto/rds/__init__.py +++ b/boto/rds/__init__.py @@ -31,6 +31,7 @@ from boto.rds.event import Event from boto.rds.regioninfo import RDSRegionInfo from boto.rds.dbsubnetgroup import DBSubnetGroup from boto.rds.vpcsecuritygroupmembership import VPCSecurityGroupMembership +from boto.regioninfo import get_regions def regions(): """ @@ -39,27 +40,11 @@ def regions(): :rtype: list :return: A list of :class:`boto.rds.regioninfo.RDSRegionInfo` """ - return [RDSRegionInfo(name='us-east-1', - endpoint='rds.amazonaws.com'), - RDSRegionInfo(name='us-gov-west-1', - endpoint='rds.us-gov-west-1.amazonaws.com'), - RDSRegionInfo(name='eu-west-1', - endpoint='rds.eu-west-1.amazonaws.com'), - RDSRegionInfo(name='us-west-1', - endpoint='rds.us-west-1.amazonaws.com'), - RDSRegionInfo(name='us-west-2', - endpoint='rds.us-west-2.amazonaws.com'), - RDSRegionInfo(name='sa-east-1', - endpoint='rds.sa-east-1.amazonaws.com'), - RDSRegionInfo(name='ap-northeast-1', - endpoint='rds.ap-northeast-1.amazonaws.com'), - RDSRegionInfo(name='ap-southeast-1', - endpoint='rds.ap-southeast-1.amazonaws.com'), - RDSRegionInfo(name='ap-southeast-2', - endpoint='rds.ap-southeast-2.amazonaws.com'), - RDSRegionInfo(name='cn-north-1', - endpoint='rds.cn-north-1.amazonaws.com.cn'), - ] + return get_regions( + 'rds', + region_cls=RDSRegionInfo, + connection_cls=RDSConnection + ) def connect_to_region(region_name, **kw_params): @@ -94,7 +79,8 @@ class RDSConnection(AWSQueryConnection): is_secure=True, port=None, proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None, debug=0, https_connection_factory=None, region=None, path='/', - security_token=None, validate_certs=True): + security_token=None, validate_certs=True, + profile_name=None): if not region: region = RDSRegionInfo(self, self.DefaultRegionName, self.DefaultRegionEndpoint) @@ -106,7 +92,8 @@ class RDSConnection(AWSQueryConnection): self.region.endpoint, debug, https_connection_factory, path, security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) def _required_auth_capability(self): return ['hmac-v4'] diff --git a/boto/rds/regioninfo.py b/boto/rds/regioninfo.py index 376dc9f0..5019aca9 100644 --- a/boto/rds/regioninfo.py +++ b/boto/rds/regioninfo.py @@ -26,7 +26,8 @@ from boto.regioninfo import RegionInfo class RDSRegionInfo(RegionInfo): - def __init__(self, connection=None, name=None, endpoint=None): + def __init__(self, connection=None, name=None, endpoint=None, + connection_cls=None): from boto.rds import RDSConnection super(RDSRegionInfo, self).__init__(connection, name, endpoint, RDSConnection) diff --git a/boto/redshift/__init__.py b/boto/redshift/__init__.py index 1019e895..f98ececd 100644 --- a/boto/redshift/__init__.py +++ b/boto/redshift/__init__.py @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -31,27 +31,7 @@ def regions(): :return: A list of :class:`boto.regioninfo.RegionInfo` """ from boto.redshift.layer1 import RedshiftConnection - cls = RedshiftConnection - return [ - RegionInfo(name='us-east-1', - endpoint='redshift.us-east-1.amazonaws.com', - connection_cls=cls), - RegionInfo(name='us-west-2', - endpoint='redshift.us-west-2.amazonaws.com', - connection_cls=cls), - RegionInfo(name='eu-west-1', - endpoint='redshift.eu-west-1.amazonaws.com', - connection_cls=cls), - RegionInfo(name='ap-northeast-1', - endpoint='redshift.ap-northeast-1.amazonaws.com', - connection_cls=cls), - RegionInfo(name='ap-southeast-1', - endpoint='redshift.ap-southeast-1.amazonaws.com', - connection_cls=cls), - RegionInfo(name='ap-southeast-2', - endpoint='redshift.ap-southeast-2.amazonaws.com', - connection_cls=cls), - ] + return get_regions('redshift', connection_cls=RedshiftConnection) def connect_to_region(region_name, **kw_params): diff --git a/boto/regioninfo.py b/boto/regioninfo.py index 6e936b37..29ebb1e3 100644 --- a/boto/regioninfo.py +++ b/boto/regioninfo.py @@ -20,6 +20,131 @@ # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. +from __future__ import with_statement +import os + +import boto +from boto.compat import json +from boto.exception import BotoClientError + + +def load_endpoint_json(path): + """ + Loads a given JSON file & returns it. + + :param path: The path to the JSON file + :type path: string + + :returns: The loaded data + """ + with open(path, 'r') as endpoints_file: + return json.load(endpoints_file) + + +def merge_endpoints(defaults, additions): + """ + Given an existing set of endpoint data, this will deep-update it with + any similarly structured data in the additions. + + :param defaults: The existing endpoints data + :type defaults: dict + + :param defaults: The additional endpoints data + :type defaults: dict + + :returns: The modified endpoints data + :rtype: dict + """ + # We can't just do an ``defaults.update(...)`` here, as that could + # *overwrite* regions if present in both. + # We'll iterate instead, essentially doing a deeper merge. + for service, region_info in additions.items(): + # Set the default, if not present, to an empty dict. + defaults.setdefault(service, {}) + defaults[service].update(region_info) + + return defaults + + +def load_regions(): + """ + Actually load the region/endpoint information from the JSON files. + + By default, this loads from the default included ``boto/endpoints.json`` + file. + + Users can override/extend this by supplying either a ``BOTO_ENDPOINTS`` + environment variable or a ``endpoints_path`` config variable, either of + which should be an absolute path to the user's JSON file. + + :returns: The endpoints data + :rtype: dict + """ + # Load the defaults first. + endpoints = load_endpoint_json(boto.ENDPOINTS_PATH) + additional_path = None + + # Try the ENV var. If not, check the config file. + if os.environ.get('BOTO_ENDPOINTS'): + additional_path = os.environ['BOTO_ENDPOINTS'] + elif boto.config.get('boto', 'endpoints_path'): + additional_path = boto.config.get('boto', 'endpoints_path') + + # If there's a file provided, we'll load it & additively merge it into + # the endpoints. + if additional_path: + additional = load_endpoint_json(additional_path) + endpoints = merge_endpoints(endpoints, additional) + + return endpoints + + +def get_regions(service_name, region_cls=None, connection_cls=None): + """ + Given a service name (like ``ec2``), returns a list of ``RegionInfo`` + objects for that service. + + This leverages the ``endpoints.json`` file (+ optional user overrides) to + configure/construct all the objects. + + :param service_name: The name of the service to construct the ``RegionInfo`` + objects for. Ex: ``ec2``, ``s3``, ``sns``, etc. + :type service_name: string + + :param region_cls: (Optional) The class to use when constructing. By + default, this is ``RegionInfo``. + :type region_cls: class + + :param connection_cls: (Optional) The connection class for the + ``RegionInfo`` object. Providing this allows the ``connect`` method on + the ``RegionInfo`` to work. Default is ``None`` (no connection). + :type connection_cls: class + + :returns: A list of configured ``RegionInfo`` objects + :rtype: list + """ + endpoints = load_regions() + + if not service_name in endpoints: + raise BotoClientError( + "Service '%s' not found in endpoints." % service_name + ) + + if region_cls is None: + region_cls = RegionInfo + + region_objs = [] + + for region_name, endpoint in endpoints.get(service_name, {}).items(): + region_objs.append( + region_cls( + name=region_name, + endpoint=endpoint, + connection_cls=connection_cls + ) + ) + + return region_objs class RegionInfo(object): diff --git a/boto/route53/__init__.py b/boto/route53/__init__.py index 3546d25d..7b131f92 100644 --- a/boto/route53/__init__.py +++ b/boto/route53/__init__.py @@ -23,8 +23,8 @@ # this is here for backward compatibility # originally, the Route53Connection class was defined here -from connection import Route53Connection -from boto.regioninfo import RegionInfo +from boto.route53.connection import Route53Connection +from boto.regioninfo import RegionInfo, get_regions class Route53RegionInfo(RegionInfo): @@ -51,10 +51,22 @@ def regions(): :rtype: list :return: A list of :class:`boto.regioninfo.RegionInfo` instances """ - return [Route53RegionInfo(name='universal', - endpoint='route53.amazonaws.com', - connection_cls=Route53Connection) - ] + regions = get_regions( + 'route53', + region_cls=Route53RegionInfo, + connection_cls=Route53Connection + ) + + # For historical reasons, we had a "universal" endpoint as well. + regions.append( + Route53RegionInfo( + name='universal', + endpoint='route53.amazonaws.com', + connection_cls=Route53Connection + ) + ) + + return regions def connect_to_region(region_name, **kw_params): diff --git a/boto/route53/connection.py b/boto/route53/connection.py index 398ff870..f9ba5bea 100644 --- a/boto/route53/connection.py +++ b/boto/route53/connection.py @@ -63,13 +63,15 @@ class Route53Connection(AWSAuthConnection): def __init__(self, aws_access_key_id=None, aws_secret_access_key=None, port=None, proxy=None, proxy_port=None, host=DefaultHost, debug=0, security_token=None, - validate_certs=True, https_connection_factory=None): + validate_certs=True, https_connection_factory=None, + profile_name=None): super(Route53Connection, self).__init__(host, aws_access_key_id, aws_secret_access_key, True, port, proxy, proxy_port, debug=debug, security_token=security_token, validate_certs=validate_certs, - https_connection_factory=https_connection_factory) + https_connection_factory=https_connection_factory, + profile_name=profile_name) def _required_auth_capability(self): return ['route53'] diff --git a/boto/s3/__init__.py b/boto/s3/__init__.py index 49a73ea7..271c1047 100644 --- a/boto/s3/__init__.py +++ b/boto/s3/__init__.py @@ -22,7 +22,7 @@ # IN THE SOFTWARE. # -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions class S3RegionInfo(RegionInfo): @@ -50,37 +50,11 @@ def regions(): :return: A list of :class:`boto.regioninfo.RegionInfo` """ from .connection import S3Connection - return [S3RegionInfo(name='us-east-1', - endpoint='s3.amazonaws.com', - connection_cls=S3Connection), - S3RegionInfo(name='us-gov-west-1', - endpoint='s3-us-gov-west-1.amazonaws.com', - connection_cls=S3Connection), - S3RegionInfo(name='us-west-1', - endpoint='s3-us-west-1.amazonaws.com', - connection_cls=S3Connection), - S3RegionInfo(name='us-west-2', - endpoint='s3-us-west-2.amazonaws.com', - connection_cls=S3Connection), - S3RegionInfo(name='ap-northeast-1', - endpoint='s3-ap-northeast-1.amazonaws.com', - connection_cls=S3Connection), - S3RegionInfo(name='ap-southeast-1', - endpoint='s3-ap-southeast-1.amazonaws.com', - connection_cls=S3Connection), - S3RegionInfo(name='ap-southeast-2', - endpoint='s3-ap-southeast-2.amazonaws.com', - connection_cls=S3Connection), - S3RegionInfo(name='eu-west-1', - endpoint='s3-eu-west-1.amazonaws.com', - connection_cls=S3Connection), - S3RegionInfo(name='sa-east-1', - endpoint='s3-sa-east-1.amazonaws.com', - connection_cls=S3Connection), - S3RegionInfo(name='cn-north-1', - endpoint='s3.cn-north-1.amazonaws.com.cn', - connection_cls=S3Connection), - ] + return get_regions( + 's3', + region_cls=S3RegionInfo, + connection_cls=S3Connection + ) def connect_to_region(region_name, **kw_params): diff --git a/boto/s3/bucket.py b/boto/s3/bucket.py index a14fb8a7..ed409703 100644 --- a/boto/s3/bucket.py +++ b/boto/s3/bucket.py @@ -143,24 +143,46 @@ class Bucket(object): return self.get_key(key_name, headers=headers) def get_key(self, key_name, headers=None, version_id=None, - response_headers=None): + response_headers=None, validate=True): """ Check to see if a particular key exists within the bucket. This method uses a HEAD request to check for the existance of the key. Returns: An instance of a Key object or None - :type key_name: string :param key_name: The name of the key to retrieve + :type key_name: string + + :param headers: The headers to send when retrieving the key + :type headers: dict + + :param version_id: + :type version_id: string - :type response_headers: dict :param response_headers: A dictionary containing HTTP headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. + :type response_headers: dict + + :param validate: Verifies whether the key exists. If ``False``, this + will not hit the service, constructing an in-memory object. + Default is ``True``. + :type validate: bool :rtype: :class:`boto.s3.key.Key` :returns: A Key object from this bucket. """ + if validate is False: + if headers or version_id or response_headers: + raise BotoClientError( + "When providing 'validate=False', no other params " + \ + "are allowed." + ) + + # This leans on the default behavior of ``new_key`` (not hitting + # the service). If that changes, that behavior should migrate here. + return self.new_key(key_name) + query_args_l = [] if version_id: query_args_l.append('versionId=%s' % version_id) @@ -545,6 +567,7 @@ class Bucket(object): list only if they have an upload ID lexicographically greater than the specified upload_id_marker. + :type encoding_type: string :param encoding_type: Requests Amazon S3 to encode the response and specifies the encoding method to use. @@ -555,14 +578,31 @@ class Bucket(object): encode the keys in the response. Valid options: ``url`` - :type encoding_type: string + + :type delimiter: string + :param delimiter: Character you use to group keys. + All keys that contain the same string between the prefix, if + specified, and the first occurrence of the delimiter after the + prefix are grouped under a single result element, CommonPrefixes. + If you don't specify the prefix parameter, then the substring + starts at the beginning of the key. The keys that are grouped + under CommonPrefixes result element are not returned elsewhere + in the response. + + :type prefix: string + :param prefix: Lists in-progress uploads only for those keys that + begin with the specified prefix. You can use prefixes to separate + a bucket into different grouping of keys. (You can think of using + prefix to make groups in the same way you'd use a folder in a + file system.) :rtype: ResultSet :return: The result from S3 listing the uploads requested """ self.validate_kwarg_names(params, ['max_uploads', 'key_marker', - 'upload_id_marker', 'encoding_type']) + 'upload_id_marker', 'encoding_type', + 'delimiter', 'prefix']) return self._get_all([('Upload', MultiPartUpload), ('CommonPrefixes', Prefix)], 'uploads', headers, **params) diff --git a/boto/s3/connection.py b/boto/s3/connection.py index b876b8f1..a84c701d 100644 --- a/boto/s3/connection.py +++ b/boto/s3/connection.py @@ -148,6 +148,16 @@ class Location(object): CNNorth1 = 'cn-north-1' +class NoHostProvided(object): + # An identifying object to help determine whether the user provided a + # ``host`` or not. Never instantiated. + pass + + +class HostRequiredError(BotoClientError): + pass + + class S3Connection(AWSAuthConnection): DefaultHost = boto.config.get('s3', 'host', 's3.amazonaws.com') @@ -157,11 +167,15 @@ class S3Connection(AWSAuthConnection): def __init__(self, aws_access_key_id=None, aws_secret_access_key=None, is_secure=True, port=None, proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None, - host=DefaultHost, debug=0, https_connection_factory=None, + host=NoHostProvided, debug=0, https_connection_factory=None, calling_format=DefaultCallingFormat, path='/', provider='aws', bucket_class=Bucket, security_token=None, suppress_consec_slashes=True, anon=False, - validate_certs=None): + validate_certs=None, profile_name=None): + no_host_provided = False + if host is NoHostProvided: + no_host_provided = True + host = self.DefaultHost if isinstance(calling_format, basestring): calling_format=boto.utils.find_class(calling_format)() self.calling_format = calling_format @@ -173,7 +187,14 @@ class S3Connection(AWSAuthConnection): debug=debug, https_connection_factory=https_connection_factory, path=path, provider=provider, security_token=security_token, suppress_consec_slashes=suppress_consec_slashes, - validate_certs=validate_certs) + validate_certs=validate_certs, profile_name=profile_name) + # We need to delay until after the call to ``super`` before checking + # to see if SigV4 is in use. + if no_host_provided: + if 'hmac-v4-s3' in self._required_auth_capability(): + raise HostRequiredError( + "When using SigV4, you must specify a 'host' parameter." + ) @detect_potential_s3sigv4 def _required_auth_capability(self): diff --git a/boto/s3/key.py b/boto/s3/key.py index 5c9f076f..ba20c41a 100644 --- a/boto/s3/key.py +++ b/boto/s3/key.py @@ -217,7 +217,8 @@ class Key(object): self.delete_marker = False def handle_restore_headers(self, response): - header = response.getheader('x-amz-restore') + provider = self.bucket.connection.provider + header = response.getheader(provider.restore_header) if header is None: return parts = header.split(',', 1) @@ -299,6 +300,7 @@ class Key(object): self.content_disposition = value self.handle_version_headers(self.resp) self.handle_encryption_headers(self.resp) + self.handle_restore_headers(self.resp) self.handle_addl_headers(self.resp.getheaders()) def open_write(self, headers=None, override_num_retries=None): @@ -1416,6 +1418,14 @@ class Key(object): headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. + + :type version_id: str + :param version_id: The ID of a particular version of the object. + If this parameter is not supplied but the Key object has + a ``version_id`` attribute, that value will be used when + retrieving the object. You can set the Key object's + ``version_id`` attribute to None to always grab the latest + version from a version-enabled bucket. """ self._get_file_internal(fp, headers=headers, cb=cb, num_cb=num_cb, torrent=torrent, version_id=version_id, @@ -1573,6 +1583,14 @@ class Key(object): headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. + + :type version_id: str + :param version_id: The ID of a particular version of the object. + If this parameter is not supplied but the Key object has + a ``version_id`` attribute, that value will be used when + retrieving the object. You can set the Key object's + ``version_id`` attribute to None to always grab the latest + version from a version-enabled bucket. """ if self.bucket is not None: if res_download_handler: @@ -1629,6 +1647,14 @@ class Key(object): headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. + + :type version_id: str + :param version_id: The ID of a particular version of the object. + If this parameter is not supplied but the Key object has + a ``version_id`` attribute, that value will be used when + retrieving the object. You can set the Key object's + ``version_id`` attribute to None to always grab the latest + version from a version-enabled bucket. """ try: with open(filename, 'wb') as fp: @@ -1687,6 +1713,14 @@ class Key(object): with the stored object in the response. See http://goo.gl/EWOPb for details. + :type version_id: str + :param version_id: The ID of a particular version of the object. + If this parameter is not supplied but the Key object has + a ``version_id`` attribute, that value will be used when + retrieving the object. You can set the Key object's + ``version_id`` attribute to None to always grab the latest + version from a version-enabled bucket. + :rtype: string :returns: The contents of the file as a string """ diff --git a/boto/s3/lifecycle.py b/boto/s3/lifecycle.py index 58126e6d..dadc1d32 100644 --- a/boto/s3/lifecycle.py +++ b/boto/s3/lifecycle.py @@ -23,16 +23,18 @@ class Rule(object): """ - A Lifcycle rule for an S3 bucket. + A Lifecycle rule for an S3 bucket. :ivar id: Unique identifier for the rule. The value cannot be longer - than 255 characters. + than 255 characters. This value is optional. The server will + generate a unique value for the rule if no value is provided. :ivar prefix: Prefix identifying one or more objects to which the - rule applies. + rule applies. If prefix is not provided, Boto generates a default + prefix which will match all objects. - :ivar status: If Enabled, the rule is currently being applied. - If Disabled, the rule is not currently being applied. + :ivar status: If 'Enabled', the rule is currently being applied. + If 'Disabled', the rule is not currently being applied. :ivar expiration: An instance of `Expiration`. This indicates the lifetime of the objects that are subject to the rule. @@ -44,7 +46,7 @@ class Rule(object): def __init__(self, id=None, prefix=None, status=None, expiration=None, transition=None): self.id = id - self.prefix = prefix + self.prefix = '' if prefix is None else prefix self.status = status if isinstance(expiration, (int, long)): # retain backwards compatibility??? @@ -78,7 +80,8 @@ class Rule(object): def to_xml(self): s = '<Rule>' - s += '<ID>%s</ID>' % self.id + if self.id is not None: + s += '<ID>%s</ID>' % self.id s += '<Prefix>%s</Prefix>' % self.prefix s += '<Status>%s</Status>' % self.status if self.expiration is not None: @@ -199,7 +202,8 @@ class Lifecycle(list): s += '</LifecycleConfiguration>' return s - def add_rule(self, id, prefix, status, expiration, transition=None): + def add_rule(self, id=None, prefix='', status='Enabled', + expiration=None, transition=None): """ Add a rule to this Lifecycle configuration. This only adds the rule to the local copy. To install the new rule(s) on @@ -208,7 +212,8 @@ class Lifecycle(list): :type id: str :param id: Unique identifier for the rule. The value cannot be longer - than 255 characters. + than 255 characters. This value is optional. The server will + generate a unique value for the rule if no value is provided. :type prefix: str :iparam prefix: Prefix identifying one or more objects to which the diff --git a/boto/sdb/__init__.py b/boto/sdb/__init__.py index bebc1522..6cb30050 100644 --- a/boto/sdb/__init__.py +++ b/boto/sdb/__init__.py @@ -21,6 +21,7 @@ # from .regioninfo import SDBRegionInfo +from boto.regioninfo import get_regions def regions(): @@ -30,23 +31,10 @@ def regions(): :rtype: list :return: A list of :class:`boto.sdb.regioninfo.RegionInfo` instances """ - return [SDBRegionInfo(name='us-east-1', - endpoint='sdb.amazonaws.com'), - SDBRegionInfo(name='eu-west-1', - endpoint='sdb.eu-west-1.amazonaws.com'), - SDBRegionInfo(name='us-west-1', - endpoint='sdb.us-west-1.amazonaws.com'), - SDBRegionInfo(name='sa-east-1', - endpoint='sdb.sa-east-1.amazonaws.com'), - SDBRegionInfo(name='us-west-2', - endpoint='sdb.us-west-2.amazonaws.com'), - SDBRegionInfo(name='ap-northeast-1', - endpoint='sdb.ap-northeast-1.amazonaws.com'), - SDBRegionInfo(name='ap-southeast-1', - endpoint='sdb.ap-southeast-1.amazonaws.com'), - SDBRegionInfo(name='ap-southeast-2', - endpoint='sdb.ap-southeast-2.amazonaws.com') - ] + return get_regions( + 'sdb', + region_cls=SDBRegionInfo + ) def connect_to_region(region_name, **kw_params): diff --git a/boto/sdb/connection.py b/boto/sdb/connection.py index bfbce921..c7370b67 100644 --- a/boto/sdb/connection.py +++ b/boto/sdb/connection.py @@ -86,7 +86,8 @@ class SDBConnection(AWSQueryConnection): is_secure=True, port=None, proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None, debug=0, https_connection_factory=None, region=None, path='/', - converter=None, security_token=None, validate_certs=True): + converter=None, security_token=None, validate_certs=True, + profile_name=None): """ For any keywords that aren't documented, refer to the parent class, :py:class:`boto.connection.AWSAuthConnection`. You can avoid having @@ -118,7 +119,8 @@ class SDBConnection(AWSQueryConnection): self.region.endpoint, debug, https_connection_factory, path, security_token=security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) self.box_usage = 0.0 self.converter = converter self.item_cls = Item diff --git a/boto/sdb/regioninfo.py b/boto/sdb/regioninfo.py index 769be555..cb0211e1 100644 --- a/boto/sdb/regioninfo.py +++ b/boto/sdb/regioninfo.py @@ -26,7 +26,8 @@ from boto.regioninfo import RegionInfo class SDBRegionInfo(RegionInfo): - def __init__(self, connection=None, name=None, endpoint=None): + def __init__(self, connection=None, name=None, endpoint=None, + connection_cls=None): from boto.sdb.connection import SDBConnection super(SDBRegionInfo, self).__init__(connection, name, endpoint, SDBConnection) diff --git a/boto/ses/__init__.py b/boto/ses/__init__.py index b3d03ae3..81d4206d 100644 --- a/boto/ses/__init__.py +++ b/boto/ses/__init__.py @@ -21,7 +21,7 @@ # IN THE SOFTWARE. from connection import SESConnection -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -31,9 +31,7 @@ def regions(): :rtype: list :return: A list of :class:`boto.regioninfo.RegionInfo` instances """ - return [RegionInfo(name='us-east-1', - endpoint='email.us-east-1.amazonaws.com', - connection_cls=SESConnection)] + return get_regions('ses', connection_cls=SESConnection) def connect_to_region(region_name, **kw_params): diff --git a/boto/ses/connection.py b/boto/ses/connection.py index 5425ef35..df115232 100644 --- a/boto/ses/connection.py +++ b/boto/ses/connection.py @@ -42,7 +42,7 @@ class SESConnection(AWSAuthConnection): is_secure=True, port=None, proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None, debug=0, https_connection_factory=None, region=None, path='/', - security_token=None, validate_certs=True): + security_token=None, validate_certs=True, profile_name=None): if not region: region = RegionInfo(self, self.DefaultRegionName, self.DefaultRegionEndpoint) @@ -53,7 +53,8 @@ class SESConnection(AWSAuthConnection): proxy_user, proxy_pass, debug, https_connection_factory, path, security_token=security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) def _required_auth_capability(self): return ['ses'] diff --git a/boto/sns/__init__.py b/boto/sns/__init__.py index 4764a94f..1517f5f1 100644 --- a/boto/sns/__init__.py +++ b/boto/sns/__init__.py @@ -23,7 +23,7 @@ # this is here for backward compatibility # originally, the SNSConnection class was defined here from connection import SNSConnection -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -33,37 +33,7 @@ def regions(): :rtype: list :return: A list of :class:`boto.regioninfo.RegionInfo` instances """ - return [RegionInfo(name='us-east-1', - endpoint='sns.us-east-1.amazonaws.com', - connection_cls=SNSConnection), - RegionInfo(name='eu-west-1', - endpoint='sns.eu-west-1.amazonaws.com', - connection_cls=SNSConnection), - RegionInfo(name='us-gov-west-1', - endpoint='sns.us-gov-west-1.amazonaws.com', - connection_cls=SNSConnection), - RegionInfo(name='us-west-1', - endpoint='sns.us-west-1.amazonaws.com', - connection_cls=SNSConnection), - RegionInfo(name='sa-east-1', - endpoint='sns.sa-east-1.amazonaws.com', - connection_cls=SNSConnection), - RegionInfo(name='us-west-2', - endpoint='sns.us-west-2.amazonaws.com', - connection_cls=SNSConnection), - RegionInfo(name='ap-northeast-1', - endpoint='sns.ap-northeast-1.amazonaws.com', - connection_cls=SNSConnection), - RegionInfo(name='ap-southeast-1', - endpoint='sns.ap-southeast-1.amazonaws.com', - connection_cls=SNSConnection), - RegionInfo(name='ap-southeast-2', - endpoint='sns.ap-southeast-2.amazonaws.com', - connection_cls=SNSConnection), - RegionInfo(name='cn-north-1', - endpoint=' sns.cn-north-1.amazonaws.com.cn', - connection_cls=SNSConnection), - ] + return get_regions('sns', connection_cls=SNSConnection) def connect_to_region(region_name, **kw_params): diff --git a/boto/sns/connection.py b/boto/sns/connection.py index 1d459311..c98793f5 100644 --- a/boto/sns/connection.py +++ b/boto/sns/connection.py @@ -48,15 +48,18 @@ class SNSConnection(AWSQueryConnection): requests, and handling error responses. For a list of available SDKs, go to `Tools for Amazon Web Services`_. """ - DefaultRegionName = 'us-east-1' - DefaultRegionEndpoint = 'sns.us-east-1.amazonaws.com' - APIVersion = '2010-03-31' + DefaultRegionName = boto.config.get('Boto', 'sns_region_name', 'us-east-1') + DefaultRegionEndpoint = boto.config.get('Boto', 'sns_region_endpoint', + 'sns.us-east-1.amazonaws.com') + APIVersion = boto.config.get('Boto', 'sns_version', '2010-03-31') + def __init__(self, aws_access_key_id=None, aws_secret_access_key=None, is_secure=True, port=None, proxy=None, proxy_port=None, proxy_user=None, proxy_pass=None, debug=0, https_connection_factory=None, region=None, path='/', - security_token=None, validate_certs=True): + security_token=None, validate_certs=True, + profile_name=None): if not region: region = RegionInfo(self, self.DefaultRegionName, self.DefaultRegionEndpoint, @@ -69,7 +72,8 @@ class SNSConnection(AWSQueryConnection): self.region.endpoint, debug, https_connection_factory, path, security_token=security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) def _build_dict_as_list_params(self, params, dictionary, name): """ @@ -264,7 +268,7 @@ class SNSConnection(AWSQueryConnection): :type protocol: string :param protocol: The protocol used to communicate with the subscriber. Current choices are: - email|email-json|http|https|sqs|sms + email|email-json|http|https|sqs|sms|application :type endpoint: string :param endpoint: The location of the endpoint for @@ -274,7 +278,10 @@ class SNSConnection(AWSQueryConnection): * For http, this would be a URL beginning with http * For https, this would be a URL beginning with https * For sqs, this would be the ARN of an SQS Queue - * For sms, this would be a phone number of an SMS-enabled device + * For sms, this would be a phone number of an + SMS-enabled device + * For application, the endpoint is the EndpointArn + of a mobile app and device. """ params = {'TopicArn': topic, 'Protocol': protocol, diff --git a/boto/sqs/__init__.py b/boto/sqs/__init__.py index 35e6c2bd..526a34cc 100644 --- a/boto/sqs/__init__.py +++ b/boto/sqs/__init__.py @@ -21,6 +21,7 @@ # from regioninfo import SQSRegionInfo +from boto.regioninfo import get_regions def regions(): @@ -30,27 +31,10 @@ def regions(): :rtype: list :return: A list of :class:`boto.sqs.regioninfo.RegionInfo` """ - return [SQSRegionInfo(name='us-east-1', - endpoint='queue.amazonaws.com'), - SQSRegionInfo(name='us-gov-west-1', - endpoint='sqs.us-gov-west-1.amazonaws.com'), - SQSRegionInfo(name='eu-west-1', - endpoint='eu-west-1.queue.amazonaws.com'), - SQSRegionInfo(name='us-west-1', - endpoint='us-west-1.queue.amazonaws.com'), - SQSRegionInfo(name='us-west-2', - endpoint='us-west-2.queue.amazonaws.com'), - SQSRegionInfo(name='sa-east-1', - endpoint='sa-east-1.queue.amazonaws.com'), - SQSRegionInfo(name='ap-northeast-1', - endpoint='ap-northeast-1.queue.amazonaws.com'), - SQSRegionInfo(name='ap-southeast-1', - endpoint='ap-southeast-1.queue.amazonaws.com'), - SQSRegionInfo(name='ap-southeast-2', - endpoint='ap-southeast-2.queue.amazonaws.com'), - SQSRegionInfo(name='cn-north-1', - endpoint='sqs.cn-north-1.amazonaws.com.cn'), - ] + return get_regions( + 'sqs', + region_cls=SQSRegionInfo + ) def connect_to_region(region_name, **kw_params): diff --git a/boto/sqs/connection.py b/boto/sqs/connection.py index f403d639..8444c84c 100644 --- a/boto/sqs/connection.py +++ b/boto/sqs/connection.py @@ -19,6 +19,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. +import boto from boto.connection import AWSQueryConnection from boto.sqs.regioninfo import SQSRegionInfo from boto.sqs.queue import Queue @@ -32,9 +33,10 @@ class SQSConnection(AWSQueryConnection): """ A Connection to the SQS Service. """ - DefaultRegionName = 'us-east-1' - DefaultRegionEndpoint = 'queue.amazonaws.com' - APIVersion = '2012-11-05' + DefaultRegionName = boto.config.get('Boto', 'sqs_region_name', 'us-east-1') + DefaultRegionEndpoint = boto.config.get('Boto', 'sqs_region_endpoint', + 'queue.amazonaws.com') + APIVersion = boto.config.get('Boto', 'sqs_version', '2012-11-05') DefaultContentType = 'text/plain' ResponseError = SQSError AuthServiceName = 'sqs' @@ -127,6 +129,7 @@ class SQSConnection(AWSQueryConnection): * LastModifiedTimestamp * Policy * ReceiveMessageWaitTimeSeconds + * RedrivePolicy :rtype: :class:`boto.sqs.attributes.Attributes` :return: An Attributes object containing request value(s). @@ -357,6 +360,19 @@ class SQSConnection(AWSQueryConnection): lookup = get_queue + def get_dead_letter_source_queues(self, queue): + """ + Retrieves the dead letter source queues for a given queue. + + :type queue: A :class:`boto.sqs.queue.Queue` object. + :param queue: The queue for which to get DL source queues + :rtype: list + :returns: A list of :py:class:`boto.sqs.queue.Queue` instances. + """ + params = {'QueueUrl': queue.url} + return self.get_list('ListDeadLetterSourceQueues', params, + [('QueueUrl', Queue)]) + # # Permissions methods # diff --git a/boto/sqs/regioninfo.py b/boto/sqs/regioninfo.py index 9f7d7f12..d21dff9c 100644 --- a/boto/sqs/regioninfo.py +++ b/boto/sqs/regioninfo.py @@ -26,7 +26,8 @@ from boto.regioninfo import RegionInfo class SQSRegionInfo(RegionInfo): - def __init__(self, connection=None, name=None, endpoint=None): + def __init__(self, connection=None, name=None, endpoint=None, + connection_cls=None): from boto.sqs.connection import SQSConnection super(SQSRegionInfo, self).__init__(connection, name, endpoint, SQSConnection) diff --git a/boto/sts/__init__.py b/boto/sts/__init__.py index d7c26f56..a130b7fe 100644 --- a/boto/sts/__init__.py +++ b/boto/sts/__init__.py @@ -21,7 +21,7 @@ # IN THE SOFTWARE. from connection import STSConnection -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -31,16 +31,7 @@ def regions(): :rtype: list :return: A list of :class:`boto.regioninfo.RegionInfo` instances """ - return [RegionInfo(name='us-east-1', - endpoint='sts.amazonaws.com', - connection_cls=STSConnection), - RegionInfo(name='us-gov-west-1', - endpoint='sts.us-gov-west-1.amazonaws.com', - connection_cls=STSConnection), - RegionInfo(name='cn-north-1', - endpoint='sts.cn-north-1.amazonaws.com.cn', - connection_cls=STSConnection), - ] + return get_regions('sts', connection_cls=STSConnection) def connect_to_region(region_name, **kw_params): diff --git a/boto/sts/connection.py b/boto/sts/connection.py index 7c480a3f..ef4d52ed 100644 --- a/boto/sts/connection.py +++ b/boto/sts/connection.py @@ -70,7 +70,7 @@ class STSConnection(AWSQueryConnection): proxy_user=None, proxy_pass=None, debug=0, https_connection_factory=None, region=None, path='/', converter=None, validate_certs=True, anon=False, - security_token=None): + security_token=None, profile_name=None): if not region: region = RegionInfo(self, self.DefaultRegionName, self.DefaultRegionEndpoint, @@ -85,7 +85,8 @@ class STSConnection(AWSQueryConnection): self.region.endpoint, debug, https_connection_factory, path, validate_certs=validate_certs, - security_token=security_token) + security_token=security_token, + profile_name=profile_name) def _required_auth_capability(self): if self.anon: diff --git a/boto/support/__init__.py b/boto/support/__init__.py index 6d59b375..c114a9a3 100644 --- a/boto/support/__init__.py +++ b/boto/support/__init__.py @@ -20,7 +20,7 @@ # IN THE SOFTWARE. # -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(): @@ -31,13 +31,7 @@ def regions(): :return: A list of :class:`boto.regioninfo.RegionInfo` """ from boto.support.layer1 import SupportConnection - return [ - RegionInfo( - name='us-east-1', - endpoint='support.us-east-1.amazonaws.com', - connection_cls=SupportConnection - ), - ] + return get_regions('support', connection_cls=SupportConnection) def connect_to_region(region_name, **kw_params): diff --git a/boto/swf/__init__.py b/boto/swf/__init__.py index fc6ebfe2..bebbd696 100644 --- a/boto/swf/__init__.py +++ b/boto/swf/__init__.py @@ -23,20 +23,10 @@ # from boto.ec2.regioninfo import RegionInfo +from boto.regioninfo import get_regions, load_regions import boto.swf.layer1 -REGION_ENDPOINTS = { - 'us-east-1': 'swf.us-east-1.amazonaws.com', - 'us-gov-west-1': 'swf.us-gov-west-1.amazonaws.com', - 'us-west-1': 'swf.us-west-1.amazonaws.com', - 'us-west-2': 'swf.us-west-2.amazonaws.com', - 'sa-east-1': 'swf.sa-east-1.amazonaws.com', - 'eu-west-1': 'swf.eu-west-1.amazonaws.com', - 'ap-northeast-1': 'swf.ap-northeast-1.amazonaws.com', - 'ap-southeast-1': 'swf.ap-southeast-1.amazonaws.com', - 'ap-southeast-2': 'swf.ap-southeast-2.amazonaws.com', - 'cn-north-1': 'swf.cn-north-1.amazonaws.com.cn', -} +REGION_ENDPOINTS = load_regions().get('swf', {}) def regions(**kw_params): @@ -46,9 +36,7 @@ def regions(**kw_params): :rtype: list :return: A list of :class:`boto.regioninfo.RegionInfo` """ - return [RegionInfo(name=region_name, endpoint=REGION_ENDPOINTS[region_name], - connection_cls=boto.swf.layer1.Layer1) - for region_name in REGION_ENDPOINTS] + return get_regions('swf', connection_cls=boto.swf.layer1.Layer1) def connect_to_region(region_name, **kw_params): diff --git a/boto/swf/layer1.py b/boto/swf/layer1.py index d027abd4..73a809b2 100644 --- a/boto/swf/layer1.py +++ b/boto/swf/layer1.py @@ -69,7 +69,7 @@ class Layer1(AWSAuthConnection): def __init__(self, aws_access_key_id=None, aws_secret_access_key=None, is_secure=True, port=None, proxy=None, proxy_port=None, - debug=0, session_token=None, region=None): + debug=0, session_token=None, region=None, profile_name=None): if not region: region_name = boto.config.get('SWF', 'region', self.DefaultRegionName) @@ -82,7 +82,7 @@ class Layer1(AWSAuthConnection): super(Layer1, self).__init__(self.region.endpoint, aws_access_key_id, aws_secret_access_key, is_secure, port, proxy, proxy_port, - debug, session_token) + debug, session_token, profile_name=profile_name) def _required_auth_capability(self): return ['hmac-v4'] diff --git a/boto/vpc/__init__.py b/boto/vpc/__init__.py index 7e7c0ef9..4025d667 100644 --- a/boto/vpc/__init__.py +++ b/boto/vpc/__init__.py @@ -35,7 +35,7 @@ from boto.vpc.dhcpoptions import DhcpOptions from boto.vpc.subnet import Subnet from boto.vpc.vpnconnection import VpnConnection from boto.ec2 import RegionData -from boto.regioninfo import RegionInfo +from boto.regioninfo import RegionInfo, get_regions def regions(**kw_params): @@ -48,16 +48,7 @@ def regions(**kw_params): :rtype: list :return: A list of :class:`boto.ec2.regioninfo.RegionInfo` """ - regions = [] - for region_name in RegionData: - region = RegionInfo(name=region_name, - endpoint=RegionData[region_name], - connection_cls=VPCConnection) - regions.append(region) - regions.append(RegionInfo(name='us-gov-west-1', - endpoint=RegionData[region_name], - connection_cls=VPCConnection)) - return regions + return get_regions('ec2', connection_cls=VPCConnection) def connect_to_region(region_name, **kw_params): @@ -1125,7 +1116,7 @@ class VPCConnection(EC2Connection): - *state*, a list of states of the Subnet (pending,available) - - *vpcId*, a list of IDs of teh VPC the subnet is in. + - *vpcId*, a list of IDs of the VPC that the subnet is in. - *cidrBlock*, a list of CIDR blocks of the subnet - *availabilityZone*, list of the Availability Zones the subnet is in. diff --git a/docs/source/autoscale_tut.rst b/docs/source/autoscale_tut.rst index d1eaf3f9..abc60957 100644 --- a/docs/source/autoscale_tut.rst +++ b/docs/source/autoscale_tut.rst @@ -199,7 +199,7 @@ To retrieve the instances in your autoscale group: >>> import boto.ec2 >>> ec2 = boto.ec2.connect_to_region('us-west-2) ->>> conn.get_all_groups(names=['my_group'])[0] +>>> group = conn.get_all_groups(names=['my_group'])[0] >>> instance_ids = [i.instance_id for i in group.instances] >>> instances = ec2.get_only_instances(instance_ids) diff --git a/docs/source/boto_config_tut.rst b/docs/source/boto_config_tut.rst index dc8000e7..bdfc489b 100644 --- a/docs/source/boto_config_tut.rst +++ b/docs/source/boto_config_tut.rst @@ -52,9 +52,10 @@ Credentials The Credentials section is used to specify the AWS credentials used for all boto requests. The order of precedence for authentication credentials is: -* Credentials passed into Connection class constructor. +* Credentials passed into the Connection class constructor. * Credentials specified by environment variables -* Credentials specified as options in the config file. +* Credentials specified as named profiles in the config file. +* Credentials specified by default in the config file. This section defines the following options: ``aws_access_key_id`` and ``aws_secret_access_key``. The former being your AWS key id and the latter @@ -62,12 +63,23 @@ being the secret key. For example:: + [profile name_goes_here] + aws_access_key_id = <access key for this profile> + aws_secret_access_key = <secret key for this profile> + [Credentials] - aws_access_key_id = <your access key> - aws_secret_access_key = <your secret key> + aws_access_key_id = <your default access key> + aws_secret_access_key = <your default secret key> Please notice that quote characters are not used to either side of the '=' -operator even when both your AWS access key id and secret key are strings. +operator even when both your AWS access key ID and secret key are strings. + +If you have multiple AWS keypairs that you use for different purposes, +use the ``profile`` style shown above. You can set an arbitrary number +of profiles within your configuration files and then reference them by name +when you instantiate your connection. If you specify a profile that does not +exist in the configuration, the keys used under the ``[Credentials]`` heading +will be applied by default. For greater security, the secret key can be stored in a keyring and retrieved via the keyring package. To use a keyring, use ``keyring``, @@ -140,6 +152,10 @@ For example:: timeout for httplib . :send_crlf_after_proxy_auth_headers: Change line ending behaviour with proxies. For more details see this `discussion <https://groups.google.com/forum/?fromgroups=#!topic/boto-dev/teenFvOq2Cc>`_ +:endpoints_path: Allows customizing the regions/endpoints available in Boto. + Provide an absolute path to a custom JSON file, which gets merged into the + defaults. (This can also be specified with the ``BOTO_ENDPOINTS`` + environment variable instead.) These settings will default to:: @@ -150,6 +166,7 @@ These settings will default to:: ca_certificates_file = cacerts.txt http_socket_timeout = 60 send_crlf_after_proxy_auth_headers = False + endpoints_path = /path/to/my/boto/endpoints.json You can control the timeouts and number of retries used when retrieving information from the Metadata Service (this is used for retrieving credentials diff --git a/docs/source/elb_tut.rst b/docs/source/elb_tut.rst index 4d5661c4..0cff8ac8 100644 --- a/docs/source/elb_tut.rst +++ b/docs/source/elb_tut.rst @@ -92,7 +92,7 @@ works very much like a list. >>> balancers = conn.get_all_load_balancers() >>> balancers[0] -[LoadBalancer:load-balancer-prod] +LoadBalancer:load-balancer-prod Creating a Load Balancer ------------------------ diff --git a/docs/source/releasenotes/v2.24.0.rst b/docs/source/releasenotes/v2.24.0.rst new file mode 100644 index 00000000..737a1f1a --- /dev/null +++ b/docs/source/releasenotes/v2.24.0.rst @@ -0,0 +1,36 @@ +boto v2.24.0 +============ + +:date: 2014/01/29 + +This release adds M3 instance types to Amazon EC2, adds support for dead letter queues to Amazon Simple Queue Service (SQS), adds a single JSON file for all region and endpoint information and provides several fixes to a handful of services and documentation. Additionally, the SDK now supports using AWS Signature Version 4 with Amazon S3. + +Features +-------- +* Load region and endpoint information from a JSON file (:sha:`b9dbaad`) +* Return the x-amz-restore header with GET KEY and fix provider prefix. (:issue:`1990`, :sha:`43e8e0a`) +* Make S3 key validation optional with the ``validate`` parameter (:issue:`2013`, :issue:`1996`, :sha:`fd6b632`) +* Adding new eu-west-1 and eu-west-2 endpoints for SES. (:issue:`2015`, :sha:`d5ef862`, :sha:`56ba3e5`) +* Google Storage now uses new-style Python classes (:issue:`1927`, :sha:`86c9f77`) +* Add support for step summary list to Elastic MapReduce (:issue:`2011`, :sha:`d3af158`) +* Added the M3 instance types. (:issue:`2012`, :sha:`7c82f57`) +* Add credential profile configuration (:issue:`1979`, :sha:`e3ab708`) +* Add support for dead letter queues to SQS (:sha:`93c7d05`) + +Bugfixes +-------- +* Make the Lifecycle Id optional and fix prefix=None in XML generation. (:issue:`2021`, :sha:`362a04a`) +* Fix DynamoDB query limit bug (:issue:`2014`, :sha:`7ecb3f7`) +* Add documentation about the version_id behavior of Key objects. (:issue:`2026`, :sha:`b6b242c`) +* Fixed typo in Table.create example (:issue:`2023`, :sha:`d81a660`) +* Adding a license/copyright header. (:issue:`2025`, :sha:`26ded39`) +* Update the docstring for the SNS subscribe method (:issue:`2017`, :sha:`4c806de`) +* Renamed unit test with duplicate name (:issue:`2016`, :sha:`c7bd0bd`) +* Use UTC instead of local time in ``test_refresh_credentials`` (:issue:`2020`, :sha:`b5a2eaf`) +* Fix missing ``security_token`` option in some connection classes (:issue:`1989`, :issue:`1942`, :sha:`2b72f32`) +* Fix listing S3 multipart uploads with some parameter combinations (:issue:`2000`, :sha:`49045bc`) +* Fix ``elbadmin`` crash because of non-extant instances in load balancer (:issue:`2001`, :sha:`d47cc14`) +* Fix anonymous S3 fetch test case (:issue:`1988`, :issue:`1992`, :sha:`8fb1666`) +* Fix ``elbadmin`` boto import (:issue:`2002`, :sha:`674c3a6`) +* Fixing SQS tutorial to correctly describe behavior of the write operation (:issue:`1986`, :sha:`6147d86`) +* Fix various grammar mistakes (:issue:`1980`, :sha:`ada40b5`) diff --git a/docs/source/s3_tut.rst b/docs/source/s3_tut.rst index aff667e3..c87c4f73 100644 --- a/docs/source/s3_tut.rst +++ b/docs/source/s3_tut.rst @@ -143,6 +143,24 @@ guessing. The other thing to note is that boto does stream the content to and from S3 so you should be able to send and receive large files without any problem. +When fetching a key that has already exists, you have two options. If you're +uncertain whether a key exists (or if you need the metadata set on it, you can +call ``Bucket.get_key(key_name_here)``. However, if you're sure a key already +exists within a bucket, you can skip the check for a key on the server. + +:: + + >>> import boto + >>> c = boto.connect_s3() + >>> b = c.get_bucket('mybucket') # substitute your bucket name here + + # Will hit the API to check if it exists. + >>> possible_key = b.get_key('mykey') # substitute your key name here + + # Won't hit the API. + >>> key_we_know_is_there = b.get_key(validate=False) + + Accessing A Bucket ------------------ diff --git a/docs/source/sqs_tut.rst b/docs/source/sqs_tut.rst index 72ccca1d..f4ea0254 100644 --- a/docs/source/sqs_tut.rst +++ b/docs/source/sqs_tut.rst @@ -107,12 +107,13 @@ So, first we need to create a Message object:: >>> from boto.sqs.message import Message >>> m = Message() >>> m.set_body('This is my first message.') ->>> status = q.write(m) +>>> q.write(m) -The write method returns a True if everything went well. If the write -didn't succeed it will either return a False (meaning SQS simply chose -not to write the message for some reason) or an exception if there was -some sort of problem with the request. +The write method will return the ``Message`` object. The ``id`` and +``md5`` attribute of the ``Message`` object will be updated with the +values of the message that was written to the queue. + +If the message cannot be written an ``SQSError`` exception will be raised. Writing Messages (Custom Format) -------------------------------- @@ -135,7 +136,7 @@ default boto Message object. To register your message class, you would:: >>> q.set_message_class(MyMessage) >>> m = MyMessage() >>> m.set_body('This is my first message.') ->>> status = q.write(m) +>>> q.write(m) where MyMessage is the class definition for your message class. Your message class should subclass the boto Message because there is a small diff --git a/scripts/rebuild_endpoints.py b/scripts/rebuild_endpoints.py new file mode 100644 index 00000000..6ec71537 --- /dev/null +++ b/scripts/rebuild_endpoints.py @@ -0,0 +1,54 @@ +from __future__ import print_function + +import json +from pyquery import PyQuery as pq +import requests + + +class FetchError(Exception): + pass + + +def fetch_endpoints(): + # We utilize what the Java SDK publishes as a baseline. + resp = requests.get('https://raw2.github.com/aws/aws-sdk-java/master/src/main/resources/etc/regions.xml') + + if int(resp.status_code) != 200: + raise FetchError("Failed to fetch the endpoints. Got {0}: {1}".format( + resp.status, + resp.body + )) + + return resp.text + +def parse_xml(raw_xml): + return pq(raw_xml, parser='xml') + + +def build_data(doc): + data = {} + + # Run through all the regions. These have all the data we need. + for region_elem in doc('Regions').find('Region'): + region = pq(region_elem, parser='xml') + region_name = region.find('Name').text() + + for endp in region.find('Endpoint'): + service_name = endp.find('ServiceName').text + endpoint = endp.find('Hostname').text + + data.setdefault(service_name, {}) + data[service_name][region_name] = endpoint + + return data + + +def main(): + raw_xml = fetch_endpoints() + doc = parse_xml(raw_xml) + data = build_data(doc) + print(json.dumps(data, indent=4, sort_keys=True)) + + +if __name__ == '__main__': + main() @@ -76,7 +76,10 @@ setup(name = "boto", "boto.elastictranscoder", "boto.opsworks", "boto.redshift", "boto.dynamodb2", "boto.support", "boto.cloudtrail", "boto.directconnect", "boto.kinesis"], - package_data = {"boto.cacerts": ["cacerts.txt"]}, + package_data = { + "boto.cacerts": ["cacerts.txt"], + "boto": ["endpoints.json"], + }, license = "MIT", platforms = "Posix; MacOS X; Windows", classifiers = ["Development Status :: 5 - Production/Stable", diff --git a/tests/integration/s3/test_bucket.py b/tests/integration/s3/test_bucket.py index 6d295256..be87570e 100644 --- a/tests/integration/s3/test_bucket.py +++ b/tests/integration/s3/test_bucket.py @@ -34,10 +34,10 @@ from boto.s3.connection import S3Connection from boto.s3.bucketlogging import BucketLogging from boto.s3.lifecycle import Lifecycle from boto.s3.lifecycle import Transition +from boto.s3.lifecycle import Expiration from boto.s3.lifecycle import Rule from boto.s3.acl import Grant from boto.s3.tagging import Tags, TagSet -from boto.s3.lifecycle import Lifecycle, Expiration, Transition from boto.s3.website import RedirectLocation @@ -261,3 +261,22 @@ class S3BucketTest (unittest.TestCase): self.assertEqual(rule.expiration.days, days) #Note: Boto seems correct? AWS seems broken? #self.assertEqual(rule.prefix, prefix) + + def test_lifecycle_with_defaults(self): + lifecycle = Lifecycle() + lifecycle.add_rule(expiration=30) + self.assertTrue(self.bucket.configure_lifecycle(lifecycle)) + response = self.bucket.get_lifecycle_config() + self.assertEqual(len(response), 1) + actual_lifecycle = response[0] + self.assertNotEqual(len(actual_lifecycle.id), 0) + self.assertEqual(actual_lifecycle.prefix, '') + + def test_lifecycle_rule_xml(self): + # create a rule directly with id, prefix defaults + rule = Rule(status='Enabled', expiration=30) + s = rule.to_xml() + # Confirm no ID is set in the rule. + self.assertEqual(s.find("<ID>"), -1) + # Confirm Prefix is '' and not set to 'None' + self.assertNotEqual(s.find("<Prefix></Prefix>"), -1) diff --git a/tests/integration/s3/test_connection.py b/tests/integration/s3/test_connection.py index 5d7473ee..c870f6a2 100644 --- a/tests/integration/s3/test_connection.py +++ b/tests/integration/s3/test_connection.py @@ -15,7 +15,7 @@ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. @@ -196,7 +196,7 @@ class S3ConnectionTest (unittest.TestCase): # now try to inject a response header data = k.get_contents_as_string(response_headers={'response-content-type' : 'foo/bar'}) assert k.content_type == 'foo/bar' - + # now delete all keys in bucket for k in bucket: if k.name == 'reduced_redundancy': @@ -224,6 +224,7 @@ class S3ConnectionTest (unittest.TestCase): # give bucket anon user access and anon read again auth_bucket.set_acl('public-read') + time.sleep(5) try: iter(anon_bucket.list()).next() self.fail("not expecting contents") diff --git a/tests/integration/s3/test_multipart.py b/tests/integration/s3/test_multipart.py index b603c141..f0a77d68 100644 --- a/tests/integration/s3/test_multipart.py +++ b/tests/integration/s3/test_multipart.py @@ -100,6 +100,17 @@ class S3MultiPartUploadTest(unittest.TestCase): self.assertEqual(lmpu.id, ompu.id) self.assertEqual(0, len(mpus)) + def test_get_all_multipart_uploads(self): + key1 = 'a' + key2 = 'b/c' + mpu1 = self.bucket.initiate_multipart_upload(key1) + mpu2 = self.bucket.initiate_multipart_upload(key2) + rs = self.bucket.get_all_multipart_uploads(prefix='b/', delimiter='/') + for lmpu in rs: + # only expect upload for key2 (mpu2) returned + self.assertEqual(lmpu.key_name, mpu2.key_name) + self.assertEqual(lmpu.id, mpu2.id) + def test_four_part_file(self): key_name = "k" contents = "01234567890123456789" diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index cea79ebf..007a989c 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -4,6 +4,7 @@ except ImportError: import unittest import httplib +import mock from mock import Mock @@ -77,3 +78,36 @@ class AWSMockServiceTestCase(unittest.TestCase): def default_body(self): return '' + + +class MockServiceWithConfigTestCase(AWSMockServiceTestCase): + def setUp(self): + super(MockServiceWithConfigTestCase, self).setUp() + self.environ = {} + self.config = {} + self.config_patch = mock.patch('boto.provider.config.get', + self.get_config) + self.has_config_patch = mock.patch('boto.provider.config.has_option', + self.has_config) + self.environ_patch = mock.patch('os.environ', self.environ) + self.config_patch.start() + self.has_config_patch.start() + self.environ_patch.start() + + def tearDown(self): + self.config_patch.stop() + self.has_config_patch.stop() + self.environ_patch.stop() + + def has_config(self, section_name, key): + try: + self.config[section_name][key] + return True + except KeyError: + return False + + def get_config(self, section_name, key, default=None): + try: + return self.config[section_name][key] + except KeyError: + return None diff --git a/tests/unit/auth/test_sigv4.py b/tests/unit/auth/test_sigv4.py index 86d21f1d..a35bfd68 100644 --- a/tests/unit/auth/test_sigv4.py +++ b/tests/unit/auth/test_sigv4.py @@ -20,11 +20,14 @@ # IN THE SOFTWARE. # import copy +import mock from mock import Mock -from tests.unit import unittest +import os +from tests.unit import unittest, MockServiceWithConfigTestCase from boto.auth import HmacAuthV4Handler from boto.auth import S3HmacAuthV4Handler +from boto.auth import detect_potential_s3sigv4 from boto.connection import HTTPRequest @@ -431,3 +434,45 @@ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855""" request = self.auth.mangle_path_and_params(request) authed_req = self.auth.canonical_request(request) self.assertEqual(authed_req, expected) + + +class FakeS3Connection(object): + def __init__(self, *args, **kwargs): + self.host = kwargs.pop('host', None) + + @detect_potential_s3sigv4 + def _required_auth_capability(self): + return ['nope'] + + def _mexe(self, *args, **kwargs): + pass + + +class TestSigV4OptIn(MockServiceWithConfigTestCase): + connection_class = FakeS3Connection + + def test_sigv4_opt_out(self): + # Default is opt-out. + fake = FakeS3Connection(host='s3.amazonaws.com') + self.assertEqual(fake._required_auth_capability(), ['nope']) + + def test_sigv4_non_optional(self): + # Default is opt-out. + fake = FakeS3Connection(host='s3.cn-north-1.amazonaws.com.cn') + self.assertEqual(fake._required_auth_capability(), ['hmac-v4-s3']) + + def test_sigv4_opt_in_config(self): + # Opt-in via the config. + self.config = { + 's3': { + 'use-sigv4': True, + }, + } + fake = FakeS3Connection() + self.assertEqual(fake._required_auth_capability(), ['hmac-v4-s3']) + + def test_sigv4_opt_in_env(self): + # Opt-in via the ENV. + self.environ['S3_USE_SIGV4'] = True + fake = FakeS3Connection(host='s3.amazonaws.com') + self.assertEqual(fake._required_auth_capability(), ['hmac-v4-s3']) diff --git a/tests/unit/dynamodb2/test_table.py b/tests/unit/dynamodb2/test_table.py index b7b40b95..a77bba6c 100644 --- a/tests/unit/dynamodb2/test_table.py +++ b/tests/unit/dynamodb2/test_table.py @@ -821,11 +821,33 @@ class ResultSetTestCase(unittest.TestCase): def setUp(self): super(ResultSetTestCase, self).setUp() self.results = ResultSet() - self.results.to_call(fake_results, 'john', greeting='Hello', limit=20) + self.result_function = mock.MagicMock(side_effect=fake_results) + self.results.to_call(self.result_function, 'john', greeting='Hello', limit=20) def test_first_key(self): self.assertEqual(self.results.first_key, 'exclusive_start_key') + def test_max_page_size_fetch_more(self): + self.results = ResultSet(max_page_size=10) + self.results.to_call(self.result_function, 'john', greeting='Hello') + self.results.fetch_more() + self.result_function.assert_called_with('john', greeting='Hello', limit=10) + self.result_function.reset_mock() + + def test_max_page_size_and_smaller_limit_fetch_more(self): + self.results = ResultSet(max_page_size=10) + self.results.to_call(self.result_function, 'john', greeting='Hello', limit=5) + self.results.fetch_more() + self.result_function.assert_called_with('john', greeting='Hello', limit=5) + self.result_function.reset_mock() + + def test_max_page_size_and_bigger_limit_fetch_more(self): + self.results = ResultSet(max_page_size=10) + self.results.to_call(self.result_function, 'john', greeting='Hello', limit=15) + self.results.fetch_more() + self.result_function.assert_called_with('john', greeting='Hello', limit=10) + self.result_function.reset_mock() + def test_fetch_more(self): # First "page". self.results.fetch_more() @@ -837,6 +859,9 @@ class ResultSetTestCase(unittest.TestCase): 'Hello john #4', ]) + self.result_function.assert_called_with('john', greeting='Hello', limit=20) + self.result_function.reset_mock() + # Fake in a last key. self.results._last_key_seen = 4 # Second "page". @@ -849,6 +874,9 @@ class ResultSetTestCase(unittest.TestCase): 'Hello john #9', ]) + self.result_function.assert_called_with('john', greeting='Hello', limit=20, exclusive_start_key=4) + self.result_function.reset_mock() + # Fake in a last key. self.results._last_key_seen = 9 # Last "page". diff --git a/tests/unit/ec2/test_blockdevicemapping.py b/tests/unit/ec2/test_blockdevicemapping.py index 78539744..7b0e922d 100644 --- a/tests/unit/ec2/test_blockdevicemapping.py +++ b/tests/unit/ec2/test_blockdevicemapping.py @@ -65,7 +65,7 @@ class BlockDeviceMappingTests(unittest.TestCase): retval = self.block_device_mapping.startElement("virtualName", None, None) assert self.block_device_type_eq(retval, BlockDeviceType(self.block_device_mapping)) - def test_endElement_with_name_device_sets_current_name(self): + def test_endElement_with_name_device_sets_current_name_dev_null(self): self.block_device_mapping.endElement("device", "/dev/null", None) self.assertEqual(self.block_device_mapping.current_name, "/dev/null") diff --git a/tests/unit/emr/test_connection.py b/tests/unit/emr/test_connection.py index 189e6741..8116a121 100644 --- a/tests/unit/emr/test_connection.py +++ b/tests/unit/emr/test_connection.py @@ -27,7 +27,7 @@ from datetime import datetime from tests.unit import AWSMockServiceTestCase from boto.emr.connection import EmrConnection -from boto.emr.emrobject import JobFlowStepList +from boto.emr.emrobject import JobFlowStepList, StepSummaryList # These tests are just checking the basic structure of # the Elastic MapReduce code, by picking a few calls @@ -162,7 +162,7 @@ class TestListSteps(AWSMockServiceTestCase): connection_class = EmrConnection def default_body(self): - return """<ListStepsOutput><Steps></Steps></ListStepsOutput>""" + return """<ListStepsOutput><Steps><member><Name>Step 1</Name></member></Steps></ListStepsOutput>""" def test_list_steps(self): self.set_http_response(200) @@ -177,6 +177,8 @@ class TestListSteps(AWSMockServiceTestCase): 'ClusterId': 'j-123', 'Version': '2009-03-31' }) + self.assertTrue(isinstance(response, StepSummaryList)) + self.assertEqual(response.steps[0].name, 'Step 1') def test_list_steps_with_states(self): self.set_http_response(200) @@ -193,7 +195,8 @@ class TestListSteps(AWSMockServiceTestCase): 'StepStateList.member.2': 'FAILED', 'Version': '2009-03-31' }) - + self.assertTrue(isinstance(response, StepSummaryList)) + self.assertEqual(response.steps[0].name, 'Step 1') class TestListBootstrapActions(AWSMockServiceTestCase): connection_class = EmrConnection diff --git a/tests/unit/provider/test_provider.py b/tests/unit/provider/test_provider.py index 20fd0efe..0162b8ab 100644 --- a/tests/unit/provider/test_provider.py +++ b/tests/unit/provider/test_provider.py @@ -80,6 +80,29 @@ class TestProvider(unittest.TestCase): self.assertEqual(p.secret_key, 'env_secret_key') self.assertEqual(p.security_token, 'env_security_token') + def test_config_profile_values_are_used(self): + self.config = { + 'profile dev': { + 'aws_access_key_id': 'dev_access_key', + 'aws_secret_access_key': 'dev_secret_key', + }, 'profile prod': { + 'aws_access_key_id': 'prod_access_key', + 'aws_secret_access_key': 'prod_secret_key', + }, 'Credentials': { + 'aws_access_key_id': 'default_access_key', + 'aws_secret_access_key': 'default_secret_key' + } + } + p = provider.Provider('aws', profile_name='prod') + self.assertEqual(p.access_key, 'prod_access_key') + self.assertEqual(p.secret_key, 'prod_secret_key') + q = provider.Provider('aws', profile_name='dev') + self.assertEqual(q.access_key, 'dev_access_key') + self.assertEqual(q.secret_key, 'dev_secret_key') + r = provider.Provider('aws', profile_name='doesntexist') + self.assertEqual(r.access_key, 'default_access_key') + self.assertEqual(r.secret_key, 'default_secret_key') + def test_config_values_are_used(self): self.config = { 'Credentials': { @@ -173,7 +196,7 @@ class TestProvider(unittest.TestCase): 'meta-data/iam/security-credentials/') def test_refresh_credentials(self): - now = datetime.now() + now = datetime.utcnow() first_expiration = (now + timedelta(seconds=10)).strftime( "%Y-%m-%dT%H:%M:%SZ") credentials = { diff --git a/tests/unit/s3/test_bucket.py b/tests/unit/s3/test_bucket.py index 5bbb2121..2b36f254 100644 --- a/tests/unit/s3/test_bucket.py +++ b/tests/unit/s3/test_bucket.py @@ -4,6 +4,7 @@ from mock import patch from tests.unit import unittest from tests.unit import AWSMockServiceTestCase +from boto.exception import BotoClientError from boto.s3.connection import S3Connection from boto.s3.bucket import Bucket from boto.s3.deletemarker import DeleteMarker @@ -176,3 +177,21 @@ class TestS3Bucket(AWSMockServiceTestCase): ], 'uploads', None, encoding_type='url' ) + + @patch.object(Bucket, 'get_all_keys') + @patch.object(Bucket, '_get_key_internal') + def test_bucket_get_key_no_validate(self, mock_gki, mock_gak): + self.set_http_response(status_code=200) + bucket = self.service_connection.get_bucket('mybucket') + key = bucket.get_key('mykey', validate=False) + + self.assertEqual(len(mock_gki.mock_calls), 0) + self.assertTrue(isinstance(key, Key)) + self.assertEqual(key.name, 'mykey') + + with self.assertRaises(BotoClientError): + bucket.get_key( + 'mykey', + version_id='something', + validate=False + ) diff --git a/tests/unit/s3/test_connection.py b/tests/unit/s3/test_connection.py index 5fe47712..f4a1d51d 100644 --- a/tests/unit/s3/test_connection.py +++ b/tests/unit/s3/test_connection.py @@ -19,10 +19,13 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # +import mock + from tests.unit import unittest from tests.unit import AWSMockServiceTestCase +from tests.unit import MockServiceWithConfigTestCase -from boto.s3.connection import S3Connection +from boto.s3.connection import S3Connection, HostRequiredError class TestSignatureAlteration(AWSMockServiceTestCase): @@ -46,6 +49,49 @@ class TestSignatureAlteration(AWSMockServiceTestCase): ) +class TestSigV4HostError(MockServiceWithConfigTestCase): + connection_class = S3Connection + + def test_historical_behavior(self): + self.assertEqual( + self.service_connection._required_auth_capability(), + ['s3'] + ) + self.assertEqual(self.service_connection.host, 's3.amazonaws.com') + + def test_sigv4_opt_in(self): + # Switch it at the config, so we can check to see how the host is + # handled. + self.config = { + 's3': { + 'use-sigv4': True, + } + } + + with self.assertRaises(HostRequiredError): + # No host+SigV4 == KABOOM + self.connection_class( + aws_access_key_id='less', + aws_secret_access_key='more' + ) + + # Ensure passing a ``host`` still works. + conn = self.connection_class( + aws_access_key_id='less', + aws_secret_access_key='more', + host='s3.cn-north-1.amazonaws.com.cn' + ) + self.assertEqual( + conn._required_auth_capability(), + ['hmac-v4-s3'] + ) + self.assertEqual( + conn.host, + 's3.cn-north-1.amazonaws.com.cn' + ) + + + class TestUnicodeCallingFormat(AWSMockServiceTestCase): connection_class = S3Connection diff --git a/tests/unit/test_connection.py b/tests/unit/test_connection.py index 6f3a6053..9d4b106c 100644 --- a/tests/unit/test_connection.py +++ b/tests/unit/test_connection.py @@ -82,7 +82,7 @@ class MockAWSService(AWSQueryConnection): proxy_user=None, proxy_pass=None, debug=0, https_connection_factory=None, region=None, path='/', api_version=None, security_token=None, - validate_certs=True): + validate_certs=True, profile_name=None): self.region = region if host is None: host = self.region.endpoint @@ -93,7 +93,8 @@ class MockAWSService(AWSQueryConnection): host, debug, https_connection_factory, path, security_token, - validate_certs=validate_certs) + validate_certs=validate_certs, + profile_name=profile_name) class TestAWSAuthConnection(unittest.TestCase): def test_get_path(self): diff --git a/tests/unit/test_endpoints.json b/tests/unit/test_endpoints.json new file mode 100644 index 00000000..325176a7 --- /dev/null +++ b/tests/unit/test_endpoints.json @@ -0,0 +1,5 @@ +{ + "ec2": { + "test-1": "ec2.test-1.amazonaws.com" + } +} diff --git a/tests/unit/test_regioninfo.py b/tests/unit/test_regioninfo.py new file mode 100644 index 00000000..0f492788 --- /dev/null +++ b/tests/unit/test_regioninfo.py @@ -0,0 +1,146 @@ +# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, dis- +# tribute, sublicense, and/or sell copies of the Software, and to permit +# persons to whom the Software is furnished to do so, subject to the fol- +# lowing conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- +# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +import os +from tests.unit import unittest + +import boto +from boto.compat import json +from boto.exception import BotoServerError +from boto.regioninfo import RegionInfo, load_endpoint_json, merge_endpoints +from boto.regioninfo import load_regions, get_regions + + +class TestRegionInfo(object): + def __init__(self, connection=None, name=None, endpoint=None, + connection_cls=None): + self.connection = connection + self.name = name + self.endpoint = endpoint + self.connection_cls = connection_cls + + +class FakeConn(object): + pass + + +class TestEndpointLoading(unittest.TestCase): + def setUp(self): + super(TestEndpointLoading, self).setUp() + + def test_load_endpoint_json(self): + endpoints = load_endpoint_json(boto.ENDPOINTS_PATH) + self.assertTrue('ec2' in endpoints) + self.assertEqual( + endpoints['ec2']['us-east-1'], + 'ec2.us-east-1.amazonaws.com' + ) + + def test_merge_endpoints(self): + defaults = { + 'ec2': { + 'us-east-1': 'ec2.us-east-1.amazonaws.com', + 'us-west-1': 'ec2.us-west-1.amazonaws.com', + } + } + additions = { + # Top-level addition. + 's3': { + 'us-east-1': 's3.amazonaws.com' + }, + 'ec2': { + # Overwrite. This doesn't exist, just test data. + 'us-east-1': 'ec2.auto-resolve.amazonaws.com', + # Deep addition. + 'us-west-2': 'ec2.us-west-2.amazonaws.com', + } + } + + endpoints = merge_endpoints(defaults, additions) + self.assertEqual(endpoints, { + 'ec2': { + 'us-east-1': 'ec2.auto-resolve.amazonaws.com', + 'us-west-1': 'ec2.us-west-1.amazonaws.com', + 'us-west-2': 'ec2.us-west-2.amazonaws.com', + }, + 's3': { + 'us-east-1': 's3.amazonaws.com' + } + }) + + def test_load_regions(self): + # Just the defaults. + endpoints = load_regions() + self.assertTrue('us-east-1' in endpoints['ec2']) + self.assertFalse('test-1' in endpoints['ec2']) + + # With ENV overrides. + os.environ['BOTO_ENDPOINTS'] = os.path.join( + os.path.dirname(__file__), + 'test_endpoints.json' + ) + self.addCleanup(os.environ.pop, 'BOTO_ENDPOINTS') + endpoints = load_regions() + self.assertTrue('us-east-1' in endpoints['ec2']) + self.assertTrue('test-1' in endpoints['ec2']) + self.assertEqual(endpoints['ec2']['test-1'], 'ec2.test-1.amazonaws.com') + + def test_get_regions(self): + # With defaults. + ec2_regions = get_regions('ec2') + self.assertEqual(len(ec2_regions), 10) + west_2 = None + + for region_info in ec2_regions: + if region_info.name == 'us-west-2': + west_2 = region_info + break + + self.assertNotEqual(west_2, None, "Couldn't find the us-west-2 region!") + self.assertTrue(isinstance(west_2, RegionInfo)) + self.assertEqual(west_2.name, 'us-west-2') + self.assertEqual(west_2.endpoint, 'ec2.us-west-2.amazonaws.com') + self.assertEqual(west_2.connection_cls, None) + + def test_get_regions_overrides(self): + ec2_regions = get_regions( + 'ec2', + region_cls=TestRegionInfo, + connection_cls=FakeConn + ) + self.assertEqual(len(ec2_regions), 10) + west_2 = None + + for region_info in ec2_regions: + if region_info.name == 'us-west-2': + west_2 = region_info + break + + self.assertNotEqual(west_2, None, "Couldn't find the us-west-2 region!") + self.assertFalse(isinstance(west_2, RegionInfo)) + self.assertTrue(isinstance(west_2, TestRegionInfo)) + self.assertEqual(west_2.name, 'us-west-2') + self.assertEqual(west_2.endpoint, 'ec2.us-west-2.amazonaws.com') + self.assertEqual(west_2.connection_cls, FakeConn) + + +if __name__ == '__main__': + unittest.main() |