summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--README.rst4
-rw-r--r--boto/__init__.py2
-rw-r--r--boto/beanstalk/layer1.py16
-rw-r--r--boto/cloudtrail/layer1.py4
-rw-r--r--boto/connection.py3
-rw-r--r--boto/ec2/cloudwatch/__init__.py2
-rw-r--r--boto/ec2/connection.py14
-rw-r--r--boto/ec2/elb/__init__.py70
-rw-r--r--boto/ec2/elb/attributes.py61
-rw-r--r--boto/ec2/elb/loadbalancer.py53
-rw-r--r--boto/gs/bucket.py8
-rw-r--r--boto/iam/connection.py125
-rw-r--r--boto/rds/__init__.py42
-rw-r--r--boto/s3/bucket.py37
-rw-r--r--boto/s3/key.py13
-rw-r--r--boto/sts/connection.py109
-rw-r--r--docs/source/dynamodb2_tut.rst4
-rw-r--r--docs/source/index.rst1
-rw-r--r--docs/source/releasenotes/v2.18.0.rst41
-rw-r--r--setup.cfg2
-rw-r--r--tests/integration/rds/test_promote_modify.py138
-rw-r--r--tests/integration/s3/test_key.py18
-rw-r--r--tests/integration/s3/test_multipart.py8
-rw-r--r--tests/unit/ec2/elb/test_attribute.py178
-rw-r--r--tests/unit/iam/__init__.py0
-rw-r--r--tests/unit/iam/test_connection.py166
-rw-r--r--tests/unit/s3/test_key.py16
-rw-r--r--tests/unit/sts/test_connection.py59
28 files changed, 1148 insertions, 46 deletions
diff --git a/README.rst b/README.rst
index 0cbaee4e..1c15dea3 100644
--- a/README.rst
+++ b/README.rst
@@ -1,9 +1,9 @@
####
boto
####
-boto 2.17.0
+boto 2.18.0
-Released: 14-November-2013
+Released: 22-November-2013
.. image:: https://travis-ci.org/boto/boto.png?branch=develop
:target: https://travis-ci.org/boto/boto
diff --git a/boto/__init__.py b/boto/__init__.py
index 786f0f85..6e41b110 100644
--- a/boto/__init__.py
+++ b/boto/__init__.py
@@ -36,7 +36,7 @@ import logging.config
import urlparse
from boto.exception import InvalidUriError
-__version__ = '2.17.0'
+__version__ = '2.18.0'
Version = __version__ # for backware compatibility
UserAgent = 'Boto/%s Python/%s %s/%s' % (
diff --git a/boto/beanstalk/layer1.py b/boto/beanstalk/layer1.py
index e63f70e7..09612f14 100644
--- a/boto/beanstalk/layer1.py
+++ b/boto/beanstalk/layer1.py
@@ -245,14 +245,6 @@ class Layer1(AWSQueryConnection):
version to be deployed. If no application is found with this name,
CreateEnvironment returns an InvalidParameterValue error.
- :type version_label: string
- :param version_label: The name of the application version to deploy. If
- the specified application has no associated application versions,
- AWS Elastic Beanstalk UpdateEnvironment returns an
- InvalidParameterValue error. Default: If not specified, AWS
- Elastic Beanstalk attempts to launch the most recently created
- application version.
-
:type environment_name: string
:param environment_name: A unique name for the deployment environment.
Used in the application URL. Constraint: Must be from 4 to 23
@@ -264,6 +256,14 @@ class Layer1(AWSQueryConnection):
name becomes part of the CNAME, and therefore part of the visible
URL for your application.
+ :type version_label: string
+ :param version_label: The name of the application version to deploy. If
+ the specified application has no associated application versions,
+ AWS Elastic Beanstalk UpdateEnvironment returns an
+ InvalidParameterValue error. Default: If not specified, AWS
+ Elastic Beanstalk attempts to launch the most recently created
+ application version.
+
:type template_name: string
:param template_name: The name of the configuration template to
use in deployment. If no configuration template is found with this
diff --git a/boto/cloudtrail/layer1.py b/boto/cloudtrail/layer1.py
index e1e21453..1ee41d4a 100644
--- a/boto/cloudtrail/layer1.py
+++ b/boto/cloudtrail/layer1.py
@@ -128,7 +128,7 @@ class CloudTrailConnection(AWSQueryConnection):
**TrailAlreadyExists**
- At attempt was made to create a trail with a name that already
+ An attempt was made to create a trail with a name that already
exists.
**S3BucketDoesNotExist**
@@ -138,7 +138,7 @@ class CloudTrailConnection(AWSQueryConnection):
**InsufficientS3BucketPolicy**
Policy on Amazon S3 bucket does not permit CloudTrail to write
- to your bucket. See the AWS AWS CloudTrail User Guide for the
+ to your bucket. See the AWS CloudTrail User Guide for the
required bucket policy.
**InsufficientSnsTopicPolicy**
diff --git a/boto/connection.py b/boto/connection.py
index 78c7a223..7d699eaa 100644
--- a/boto/connection.py
+++ b/boto/connection.py
@@ -372,7 +372,8 @@ class HTTPRequest(object):
for key in self.headers:
val = self.headers[key]
if isinstance(val, unicode):
- self.headers[key] = urllib.quote_plus(val.encode('utf-8'))
+ safe = '!"#$%&\'()*+,/:;<=>?@[\\]^`{|}~'
+ self.headers[key] = urllib.quote_plus(val.encode('utf-8'), safe)
connection._auth_handler.add_auth(self, **kwargs)
diff --git a/boto/ec2/cloudwatch/__init__.py b/boto/ec2/cloudwatch/__init__.py
index 82c529e4..646f71e9 100644
--- a/boto/ec2/cloudwatch/__init__.py
+++ b/boto/ec2/cloudwatch/__init__.py
@@ -117,7 +117,7 @@ class CloudWatchConnection(AWSQueryConnection):
validate_certs=validate_certs)
def _required_auth_capability(self):
- return ['ec2']
+ return ['hmac-v4']
def build_dimension_param(self, dimension, params):
prefix = 'Dimensions.member'
diff --git a/boto/ec2/connection.py b/boto/ec2/connection.py
index 803f3fed..2b75fde2 100644
--- a/boto/ec2/connection.py
+++ b/boto/ec2/connection.py
@@ -62,6 +62,7 @@ from boto.ec2.instancestatus import InstanceStatusSet
from boto.ec2.volumestatus import VolumeStatusSet
from boto.ec2.networkinterface import NetworkInterface
from boto.ec2.attributes import AccountAttribute, VPCAttribute
+from boto.ec2.blockdevicemapping import BlockDeviceMapping, BlockDeviceType
from boto.exception import EC2ResponseError
#boto.set_stream_logger('ec2')
@@ -260,7 +261,8 @@ class EC2Connection(AWSQueryConnection):
def register_image(self, name=None, description=None, image_location=None,
architecture=None, kernel_id=None, ramdisk_id=None,
root_device_name=None, block_device_map=None,
- dry_run=False, virtualization_type=None):
+ dry_run=False, virtualization_type=None,
+ snapshot_id=None):
"""
Register an image.
@@ -299,6 +301,11 @@ class EC2Connection(AWSQueryConnection):
* paravirtual
* hvm
+ :type snapshot_id: string
+ :param snapshot_id: A snapshot ID for the snapshot to be used
+ as root device for the image. Mutually exclusive with
+ block_device_map, requires root_device_name
+
:rtype: string
:return: The new image id
"""
@@ -317,12 +324,17 @@ class EC2Connection(AWSQueryConnection):
params['ImageLocation'] = image_location
if root_device_name:
params['RootDeviceName'] = root_device_name
+ if snapshot_id:
+ root_vol = BlockDeviceType(snapshot_id=snapshot_id)
+ block_device_map = BlockDeviceMapping()
+ block_device_map[root_device_name] = root_vol
if block_device_map:
block_device_map.ec2_build_list_params(params)
if dry_run:
params['DryRun'] = 'true'
if virtualization_type:
params['VirtualizationType'] = virtualization_type
+
rs = self.get_object('RegisterImage', params, ResultSet, verb='POST')
image_id = getattr(rs, 'imageId', None)
diff --git a/boto/ec2/elb/__init__.py b/boto/ec2/elb/__init__.py
index e5ae5886..d36e1372 100644
--- a/boto/ec2/elb/__init__.py
+++ b/boto/ec2/elb/__init__.py
@@ -390,6 +390,76 @@ class ELBConnection(AWSQueryConnection):
params, LoadBalancerZones)
return obj.zones
+ def modify_lb_attribute(self, load_balancer_name, attribute, value):
+ """Changes an attribute of a Load Balancer
+
+ :type load_balancer_name: string
+ :param load_balancer_name: The name of the Load Balancer
+
+ :type attribute: string
+ :param attribute: The attribute you wish to change.
+
+ * crossZoneLoadBalancing - Boolean (true)
+
+ :type value: string
+ :param value: The new value for the attribute
+
+ :rtype: bool
+ :return: Whether the operation succeeded or not
+ """
+
+ bool_reqs = ('crosszoneloadbalancing',)
+ if attribute.lower() in bool_reqs:
+ if isinstance(value, bool):
+ if value:
+ value = 'true'
+ else:
+ value = 'false'
+
+ params = {'LoadBalancerName': load_balancer_name}
+ if attribute.lower() == 'crosszoneloadbalancing':
+ params['LoadBalancerAttributes.CrossZoneLoadBalancing.Enabled'
+ ] = value
+ else:
+ raise ValueError('InvalidAttribute', attribute)
+ return self.get_status('ModifyLoadBalancerAttributes', params,
+ verb='GET')
+
+ def get_all_lb_attributes(self, load_balancer_name):
+ """Gets all Attributes of a Load Balancer
+
+ :type load_balancer_name: string
+ :param load_balancer_name: The name of the Load Balancer
+
+ :rtype: boto.ec2.elb.attribute.LbAttributes
+ :return: The attribute object of the ELB.
+ """
+ from boto.ec2.elb.attributes import LbAttributes
+ params = {'LoadBalancerName': load_balancer_name}
+ return self.get_object('DescribeLoadBalancerAttributes',
+ params, LbAttributes)
+
+ def get_lb_attribute(self, load_balancer_name, attribute):
+ """Gets an attribute of a Load Balancer
+
+ This will make an EC2 call for each method call.
+
+ :type load_balancer_name: string
+ :param load_balancer_name: The name of the Load Balancer
+
+ :type attribute: string
+ :param attribute: The attribute you wish to see.
+
+ * crossZoneLoadBalancing - Boolean
+
+ :rtype: Attribute dependent
+ :return: The new value for the attribute
+ """
+ attributes = self.get_all_lb_attributes(load_balancer_name)
+ if attribute.lower() == 'crosszoneloadbalancing':
+ return attributes.cross_zone_load_balancing.enabled
+ return None
+
def register_instances(self, load_balancer_name, instances):
"""
Add new Instances to an existing Load Balancer.
diff --git a/boto/ec2/elb/attributes.py b/boto/ec2/elb/attributes.py
new file mode 100644
index 00000000..0d70a642
--- /dev/null
+++ b/boto/ec2/elb/attributes.py
@@ -0,0 +1,61 @@
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+# Created by Chris Huegle for TellApart, Inc.
+
+class CrossZoneLoadBalancingAttribute(object):
+ """
+ Represents the CrossZoneLoadBalancing segement of ELB Attributes.
+ """
+ def __init__(self, connection=None):
+ self.enabled = None
+
+ def __repr__(self):
+ return 'CrossZoneLoadBalancingAttribute(%s)' % (
+ self.enabled)
+
+ def startElement(self, name, attrs, connection):
+ pass
+
+ def endElement(self, name, value, connection):
+ if name == 'Enabled':
+ if value.lower() == 'true':
+ self.enabled = True
+ else:
+ self.enabled = False
+
+class LbAttributes(object):
+ """
+ Represents the Attributes of an Elastic Load Balancer.
+ """
+ def __init__(self, connection=None):
+ self.connection = connection
+ self.cross_zone_load_balancing = CrossZoneLoadBalancingAttribute(
+ self.connection)
+
+ def __repr__(self):
+ return 'LbAttributes(%s)' % (
+ repr(self.cross_zone_load_balancing))
+
+ def startElement(self, name, attrs, connection):
+ if name == 'CrossZoneLoadBalancing':
+ return self.cross_zone_load_balancing
+
+ def endElement(self, name, value, connection):
+ pass
diff --git a/boto/ec2/elb/loadbalancer.py b/boto/ec2/elb/loadbalancer.py
index fde9ac1f..f6ed3dec 100644
--- a/boto/ec2/elb/loadbalancer.py
+++ b/boto/ec2/elb/loadbalancer.py
@@ -122,6 +122,7 @@ class LoadBalancer(object):
self.vpc_id = None
self.scheme = None
self.backends = None
+ self._attributes = None
def __repr__(self):
return 'LoadBalancer:%s' % self.name
@@ -203,6 +204,58 @@ class LoadBalancer(object):
new_zones = self.connection.disable_availability_zones(self.name, zones)
self.availability_zones = new_zones
+ def get_attributes(self, force=False):
+ """
+ Gets the LbAttributes. The Attributes will be cached.
+
+ :type force: bool
+ :param force: Ignore cache value and reload.
+
+ :rtype: boto.ec2.elb.attributes.LbAttributes
+ :return: The LbAttribues object
+ """
+ if not self._attributes or force:
+ self._attributes = self.connection.get_all_lb_attributes(self.name)
+ return self._attributes
+
+ def is_cross_zone_load_balancing(self, force=False):
+ """
+ Identifies if the ELB is current configured to do CrossZone Balancing.
+
+ :type force: bool
+ :param force: Ignore cache value and reload.
+
+ :rtype: bool
+ :return: True if balancing is enabled, False if not.
+ """
+ return self.get_attributes(force).cross_zone_load_balancing.enabled
+
+ def enable_cross_zone_load_balancing(self):
+ """
+ Turns on CrossZone Load Balancing for this ELB.
+
+ :rtype: bool
+ :return: True if successful, False if not.
+ """
+ success = self.connection.modify_lb_attribute(
+ self.name, 'crossZoneLoadBalancing', True)
+ if success and self._attributes:
+ self._attributes.cross_zone_load_balancing.enabled = True
+ return success
+
+ def disable_cross_zone_load_balancing(self):
+ """
+ Turns off CrossZone Load Balancing for this ELB.
+
+ :rtype: bool
+ :return: True if successful, False if not.
+ """
+ success = self.connection.modify_lb_attribute(
+ self.name, 'crossZoneLoadBalancing', False)
+ if success and self._attributes:
+ self._attributes.cross_zone_load_balancing.enabled = False
+ return success
+
def register_instances(self, instances):
"""
Adds instances to this load balancer. All instances must be in the same
diff --git a/boto/gs/bucket.py b/boto/gs/bucket.py
index 9e989258..3b706408 100644
--- a/boto/gs/bucket.py
+++ b/boto/gs/bucket.py
@@ -221,6 +221,14 @@ class Bucket(S3Bucket):
marker, generation_marker,
headers)
+ def validate_get_all_versions_params(self, params):
+ """
+ See documentation in boto/s3/bucket.py.
+ """
+ self.validate_kwarg_names(params,
+ ['version_id_marker', 'delimiter', 'marker',
+ 'generation_marker', 'prefix', 'max_keys'])
+
def delete_key(self, key_name, headers=None, version_id=None,
mfa_token=None, generation=None):
"""
diff --git a/boto/iam/connection.py b/boto/iam/connection.py
index 9cc15c6f..7176ceda 100644
--- a/boto/iam/connection.py
+++ b/boto/iam/connection.py
@@ -65,11 +65,16 @@ class IAMConnection(AWSQueryConnection):
body = response.read()
boto.log.debug(body)
if response.status == 200:
- e = boto.jsonresponse.Element(list_marker=list_marker,
- pythonize_name=True)
- h = boto.jsonresponse.XmlHandler(e, parent)
- h.parse(body)
- return e
+ if body:
+ e = boto.jsonresponse.Element(list_marker=list_marker,
+ pythonize_name=True)
+ h = boto.jsonresponse.XmlHandler(e, parent)
+ h.parse(body)
+ return e
+ else:
+ # Support empty responses, e.g. deleting a SAML provider
+ # according to the official documentation.
+ return {}
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
@@ -1318,3 +1323,113 @@ class IAMConnection(AWSQueryConnection):
return self.get_response('UpdateAssumeRolePolicy',
{'RoleName': role_name,
'PolicyDocument': policy_document})
+
+ def create_saml_provider(self, saml_metadata_document, name):
+ """
+ Creates an IAM entity to describe an identity provider (IdP)
+ that supports SAML 2.0.
+
+ The SAML provider that you create with this operation can be
+ used as a principal in a role's trust policy to establish a
+ trust relationship between AWS and a SAML identity provider.
+ You can create an IAM role that supports Web-based single
+ sign-on (SSO) to the AWS Management Console or one that
+ supports API access to AWS.
+
+ When you create the SAML provider, you upload an a SAML
+ metadata document that you get from your IdP and that includes
+ the issuer's name, expiration information, and keys that can
+ be used to validate the SAML authentication response
+ (assertions) that are received from the IdP. You must generate
+ the metadata document using the identity management software
+ that is used as your organization's IdP.
+ This operation requires `Signature Version 4`_.
+ For more information, see `Giving Console Access Using SAML`_
+ and `Creating Temporary Security Credentials for SAML
+ Federation`_ in the Using Temporary Credentials guide.
+
+ :type saml_metadata_document: string
+ :param saml_metadata_document: An XML document generated by an identity
+ provider (IdP) that supports SAML 2.0. The document includes the
+ issuer's name, expiration information, and keys that can be used to
+ validate the SAML authentication response (assertions) that are
+ received from the IdP. You must generate the metadata document
+ using the identity management software that is used as your
+ organization's IdP.
+ For more information, see `Creating Temporary Security Credentials for
+ SAML Federation`_ in the Using Temporary Security Credentials
+ guide.
+
+ :type name: string
+ :param name: The name of the provider to create.
+
+ """
+ params = {
+ 'SAMLMetadataDocument': saml_metadata_document,
+ 'Name': name,
+ }
+ return self.get_response('CreateSAMLProvider', params)
+
+ def list_saml_providers(self):
+ """
+ Lists the SAML providers in the account.
+ This operation requires `Signature Version 4`_.
+ """
+ return self.get_response('ListSAMLProviders', {})
+
+ def get_saml_provider(self, saml_provider_arn):
+ """
+ Returns the SAML provider metadocument that was uploaded when
+ the provider was created or updated.
+ This operation requires `Signature Version 4`_.
+
+ :type saml_provider_arn: string
+ :param saml_provider_arn: The Amazon Resource Name (ARN) of the SAML
+ provider to get information about.
+
+ """
+ params = {'SAMLProviderArn': saml_provider_arn }
+ return self.get_response('GetSAMLProvider', params)
+
+ def update_saml_provider(self, saml_provider_arn, saml_metadata_document):
+ """
+ Updates the metadata document for an existing SAML provider.
+ This operation requires `Signature Version 4`_.
+
+ :type saml_provider_arn: string
+ :param saml_provider_arn: The Amazon Resource Name (ARN) of the SAML
+ provider to update.
+
+ :type saml_metadata_document: string
+ :param saml_metadata_document: An XML document generated by an identity
+ provider (IdP) that supports SAML 2.0. The document includes the
+ issuer's name, expiration information, and keys that can be used to
+ validate the SAML authentication response (assertions) that are
+ received from the IdP. You must generate the metadata document
+ using the identity management software that is used as your
+ organization's IdP.
+
+ """
+ params = {
+ 'SAMLMetadataDocument': saml_metadata_document,
+ 'SAMLProviderArn': saml_provider_arn,
+ }
+ return self.get_response('UpdateSAMLProvider', params)
+
+ def delete_saml_provider(self, saml_provider_arn):
+ """
+ Deletes a SAML provider.
+
+ Deleting the provider does not update any roles that reference
+ the SAML provider as a principal in their trust policies. Any
+ attempt to assume a role that references a SAML provider that
+ has been deleted will fail.
+ This operation requires `Signature Version 4`_.
+
+ :type saml_provider_arn: string
+ :param saml_provider_arn: The Amazon Resource Name (ARN) of the SAML
+ provider to delete.
+
+ """
+ params = {'SAMLProviderArn': saml_provider_arn }
+ return self.get_response('DeleteSAMLProvider', params)
diff --git a/boto/rds/__init__.py b/boto/rds/__init__.py
index 751c5d51..1cf371eb 100644
--- a/boto/rds/__init__.py
+++ b/boto/rds/__init__.py
@@ -516,6 +516,42 @@ class RDSConnection(AWSQueryConnection):
return self.get_object('CreateDBInstanceReadReplica',
params, DBInstance)
+
+ def promote_read_replica(self, id,
+ backup_retention_period=None,
+ preferred_backup_window=None):
+ """
+ Promote a Read Replica to a standalone DB Instance.
+
+ :type id: str
+ :param id: Unique identifier for the new instance.
+ Must contain 1-63 alphanumeric characters.
+ First character must be a letter.
+ May not end with a hyphen or contain two consecutive hyphens
+
+ :type backup_retention_period: int
+ :param backup_retention_period: The number of days for which automated
+ backups are retained. Setting this to
+ zero disables automated backups.
+
+ :type preferred_backup_window: str
+ :param preferred_backup_window: The daily time range during which
+ automated backups are created (if
+ enabled). Must be in h24:mi-hh24:mi
+ format (UTC).
+
+ :rtype: :class:`boto.rds.dbinstance.DBInstance`
+ :return: The new db instance.
+ """
+ params = {'DBInstanceIdentifier': id}
+ if backup_retention_period is not None:
+ params['BackupRetentionPeriod'] = backup_retention_period
+ if preferred_backup_window:
+ params['PreferredBackupWindow'] = preferred_backup_window
+
+ return self.get_object('PromoteReadReplica', params, DBInstance)
+
+
def modify_dbinstance(self, id, param_group=None, security_groups=None,
preferred_maintenance_window=None,
master_password=None, allocated_storage=None,
@@ -526,6 +562,7 @@ class RDSConnection(AWSQueryConnection):
apply_immediately=False,
iops=None,
vpc_security_groups=None,
+ new_instance_id=None,
):
"""
Modify an existing DBInstance.
@@ -606,6 +643,9 @@ class RDSConnection(AWSQueryConnection):
:param vpc_security_groups: List of VPC security group ids or a
VPCSecurityGroupMembership object this DBInstance should be a member of
+ :type new_instance_id: str
+ :param new_instance_id: New name to rename the DBInstance to.
+
:rtype: :class:`boto.rds.dbinstance.DBInstance`
:return: The modified db instance.
"""
@@ -648,6 +688,8 @@ class RDSConnection(AWSQueryConnection):
params['ApplyImmediately'] = 'true'
if iops:
params['Iops'] = iops
+ if new_instance_id:
+ params['NewDBInstanceIdentifier'] = new_instance_id
return self.get_object('ModifyDBInstance', params, DBInstance)
diff --git a/boto/s3/bucket.py b/boto/s3/bucket.py
index 03d21e13..756915ca 100644
--- a/boto/s3/bucket.py
+++ b/boto/s3/bucket.py
@@ -342,10 +342,19 @@ class Bucket(object):
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
- def _validate_kwarg_names(self, kwargs, names):
+ def validate_kwarg_names(self, kwargs, names):
+ """
+ Checks that all named arguments are in the specified list of names.
+
+ :type kwargs: dict
+ :param kwargs: Dictionary of kwargs to validate.
+
+ :type names: list
+ :param names: List of possible named arguments.
+ """
for kwarg in kwargs:
if kwarg not in names:
- raise TypeError('Invalid argument %s!' % kwarg)
+ raise TypeError('Invalid argument "%s"!' % kwarg)
def get_all_keys(self, headers=None, **params):
"""
@@ -376,8 +385,8 @@ class Bucket(object):
:return: The result from S3 listing the keys requested
"""
- self._validate_kwarg_names(params, ['maxkeys', 'max_keys', 'prefix',
- 'marker', 'delimiter'])
+ self.validate_kwarg_names(params, ['maxkeys', 'max_keys', 'prefix',
+ 'marker', 'delimiter'])
return self._get_all([('Contents', self.key_class),
('CommonPrefixes', Prefix)],
'', headers, **params)
@@ -415,14 +424,24 @@ class Bucket(object):
:rtype: ResultSet
:return: The result from S3 listing the keys requested
"""
- self._validate_kwarg_names(params, ['maxkeys', 'max_keys', 'prefix',
- 'key_marker', 'version_id_marker',
- 'delimiter'])
+ self.validate_get_all_versions_params(params)
return self._get_all([('Version', self.key_class),
('CommonPrefixes', Prefix),
('DeleteMarker', DeleteMarker)],
'versions', headers, **params)
+ def validate_get_all_versions_params(self, params):
+ """
+ Validate that the parameters passed to get_all_versions are valid.
+ Overridden by subclasses that allow a different set of parameters.
+
+ :type params: dict
+ :param params: Parameters to validate.
+ """
+ self.validate_kwarg_names(
+ params, ['maxkeys', 'max_keys', 'prefix', 'key_marker',
+ 'version_id_marker', 'delimiter'])
+
def get_all_multipart_uploads(self, headers=None, **params):
"""
A lower-level, version-aware method for listing active
@@ -461,8 +480,8 @@ class Bucket(object):
:return: The result from S3 listing the uploads requested
"""
- self._validate_kwarg_names(params, ['max_uploads', 'key_marker',
- 'upload_id_marker'])
+ self.validate_kwarg_names(params, ['max_uploads', 'key_marker',
+ 'upload_id_marker'])
return self._get_all([('Upload', MultiPartUpload),
('CommonPrefixes', Prefix)],
'uploads', headers, **params)
diff --git a/boto/s3/key.py b/boto/s3/key.py
index 2b7ae73a..493e2e88 100644
--- a/boto/s3/key.py
+++ b/boto/s3/key.py
@@ -1623,17 +1623,16 @@ class Key(object):
with the stored object in the response. See
http://goo.gl/EWOPb for details.
"""
- fp = open(filename, 'wb')
try:
- self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent,
- version_id=version_id,
- res_download_handler=res_download_handler,
- response_headers=response_headers)
+ with open(filename, 'wb') as fp:
+ self.get_contents_to_file(fp, headers, cb, num_cb,
+ torrent=torrent,
+ version_id=version_id,
+ res_download_handler=res_download_handler,
+ response_headers=response_headers)
except Exception:
os.remove(filename)
raise
- finally:
- fp.close()
# if last_modified date was sent from s3, try to set file's timestamp
if self.last_modified != None:
try:
diff --git a/boto/sts/connection.py b/boto/sts/connection.py
index 5f488e26..49dcd771 100644
--- a/boto/sts/connection.py
+++ b/boto/sts/connection.py
@@ -338,6 +338,115 @@ class STSConnection(AWSQueryConnection):
params['ExternalId'] = external_id
return self.get_object('AssumeRole', params, AssumedRole, verb='POST')
+ def assume_role_with_saml(self, role_arn, principal_arn, saml_assertion,
+ policy=None, duration_seconds=None):
+ """
+ Returns a set of temporary security credentials for users who
+ have been authenticated via a SAML authentication response.
+ This operation provides a mechanism for tying an enterprise
+ identity store or directory to role-based AWS access without
+ user-specific credentials or configuration.
+
+ The temporary security credentials returned by this operation
+ consist of an access key ID, a secret access key, and a
+ security token. Applications can use these temporary security
+ credentials to sign calls to AWS services. The credentials are
+ valid for the duration that you specified when calling
+ `AssumeRoleWithSAML`, which can be up to 3600 seconds (1 hour)
+ or until the time specified in the SAML authentication
+ response's `NotOnOrAfter` value, whichever is shorter.
+
+ The maximum duration for a session is 1 hour, and the minimum
+ duration is 15 minutes, even if values outside this range are
+ specified.
+
+ Optionally, you can pass an AWS IAM access policy to this
+ operation. The temporary security credentials that are
+ returned by the operation have the permissions that are
+ associated with the access policy of the role being assumed,
+ except for any permissions explicitly denied by the policy you
+ pass. This gives you a way to further restrict the permissions
+ for the federated user. These policies and any applicable
+ resource-based policies are evaluated when calls to AWS are
+ made using the temporary security credentials.
+
+ Before your application can call `AssumeRoleWithSAML`, you
+ must configure your SAML identity provider (IdP) to issue the
+ claims required by AWS. Additionally, you must use AWS
+ Identity and Access Management (AWS IAM) to create a SAML
+ provider entity in your AWS account that represents your
+ identity provider, and create an AWS IAM role that specifies
+ this SAML provider in its trust policy.
+
+ Calling `AssumeRoleWithSAML` does not require the use of AWS
+ security credentials. The identity of the caller is validated
+ by using keys in the metadata document that is uploaded for
+ the SAML provider entity for your identity provider.
+
+ For more information, see the following resources:
+
+
+ + `Creating Temporary Security Credentials for SAML
+ Federation`_ in the Using Temporary Security Credentials
+ guide.
+ + `SAML Providers`_ in the Using IAM guide.
+ + `Configuring a Relying Party and Claims in the Using IAM
+ guide. `_
+ + `Creating a Role for SAML-Based Federation`_ in the Using
+ IAM guide.
+
+ :type role_arn: string
+ :param role_arn: The Amazon Resource Name (ARN) of the role that the
+ caller is assuming.
+
+ :type principal_arn: string
+ :param principal_arn: The Amazon Resource Name (ARN) of the SAML
+ provider in AWS IAM that describes the IdP.
+
+ :type saml_assertion: string
+ :param saml_assertion: The base-64 encoded SAML authentication response
+ provided by the IdP.
+ For more information, see `Configuring a Relying Party and Adding
+ Claims`_ in the Using IAM guide.
+
+ :type policy: string
+ :param policy:
+ An AWS IAM policy in JSON format.
+
+ The temporary security credentials that are returned by this operation
+ have the permissions that are associated with the access policy of
+ the role being assumed, except for any permissions explicitly
+ denied by the policy you pass. These policies and any applicable
+ resource-based policies are evaluated when calls to AWS are made
+ using the temporary security credentials.
+
+ The policy must be 2048 bytes or shorter, and its packed size must be
+ less than 450 bytes.
+
+ :type duration_seconds: integer
+ :param duration_seconds:
+ The duration, in seconds, of the role session. The value can range from
+ 900 seconds (15 minutes) to 3600 seconds (1 hour). By default, the
+ value is set to 3600 seconds. An expiration can also be specified
+ in the SAML authentication response's `NotOnOrAfter` value. The
+ actual expiration time is whichever value is shorter.
+
+ The maximum duration for a session is 1 hour, and the minimum duration
+ is 15 minutes, even if values outside this range are specified.
+
+ """
+ params = {
+ 'RoleArn': role_arn,
+ 'PrincipalArn': principal_arn,
+ 'SAMLAssertion': saml_assertion,
+ }
+ if policy is not None:
+ params['Policy'] = policy
+ if duration_seconds is not None:
+ params['DurationSeconds'] = duration_seconds
+ return self.get_object('AssumeRoleWithSAML', params, AssumedRole,
+ verb='POST')
+
def assume_role_with_web_identity(self, role_arn, role_session_name,
web_identity_token, provider_id=None,
policy=None, duration_seconds=None):
diff --git a/docs/source/dynamodb2_tut.rst b/docs/source/dynamodb2_tut.rst
index 27ed9e48..a398481b 100644
--- a/docs/source/dynamodb2_tut.rst
+++ b/docs/source/dynamodb2_tut.rst
@@ -204,8 +204,8 @@ three choices.
The first is sending all the data with the expectation nothing has changed
since you read the data. DynamoDB will verify the data is in the original state
-and, if so, will all of the item's data. If that expectation fails, the call
-will fail::
+and, if so, will send all of the item's data. If that expectation fails, the
+call will fail::
>>> johndoe = users.get_item(username='johndoe')
>>> johndoe['first_name'] = 'Johann'
diff --git a/docs/source/index.rst b/docs/source/index.rst
index ab838685..75ddae9b 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -115,6 +115,7 @@ Release Notes
.. toctree::
:titlesonly:
+ releasenotes/v2.18.0
releasenotes/v2.17.0
releasenotes/v2.16.0
releasenotes/v2.15.0
diff --git a/docs/source/releasenotes/v2.18.0.rst b/docs/source/releasenotes/v2.18.0.rst
new file mode 100644
index 00000000..268a5625
--- /dev/null
+++ b/docs/source/releasenotes/v2.18.0.rst
@@ -0,0 +1,41 @@
+boto v2.18.0
+============
+
+:date: 2013/11/22
+
+This release adds support for new AWS Identity and Access Management (IAM),
+AWS Security Token Service (STS), Elastic Load Balancing (ELB), Amazon Elastic
+Compute Cloud (EC2), Amazon Relational Database Service (RDS), and Amazon
+Elastic Transcoder APIs and parameters. Amazon Redshift SNS notifications are
+now supported. CloudWatch is updated to use signature version four, issues
+encoding HTTP headers are fixed and several services received documentation
+fixes.
+
+
+Features
+--------
+* Add support for new STS and IAM calls related to SAML. (:issue:`1867`,
+ :issue:`1867`, :sha:`1c51d17`)
+* Add SigV4 support to Cloudwatch (:sha:`ef43035`)
+* Add support for ELB Attributes and Cross Zone Balancing. (:issue:`1852`,
+ :issue:`1852`, :sha:`76f8b7f`)
+* Add RDS promote and rename support. (:issue:`1857`, :issue:`1857`,
+ :sha:`0b62c70`)
+* Update EC2 ``get_all_snapshots`` and add support for registering an image
+ with a snapshot. (:issue:`1850`, :issue:`1850`, :sha:`3007956`)
+
+
+Bugfixes
+--------
+* Fix issues related to encoding of values in HTTP headers when using
+ unicode. (:issue:`1864`, :issue:`1864`, :issue:`1839`, :issue:`1829`,
+ :issue:`1828`, :issue:`702`, :sha:`5610dd7`)
+* Fix order of Beanstalk documetation to match param order. (:issue:`1863`,
+ :issue:`1863`, :sha:`a3a29f8`)
+* Make sure file is closed before attempting to delete it when downloading
+ an S3 key. (:issue:`1791`, :sha:`0e6dcbe`)
+* Fix minor CloudTrail documentation typos. (:issue:`1861`, :issue:`1861`,
+ :sha:`256a115`)
+* Fix DynamoDBv2 tutorial sentence with missing verb. (:issue:`1859`,
+ :issue:`1825`, :issue:`1859`, :sha:`0fd5300`)
+* Fix parameter validation for gs (:issue:`1858`, :sha:`6b9a869`)
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 00000000..5e409001
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,2 @@
+[wheel]
+universal = 1
diff --git a/tests/integration/rds/test_promote_modify.py b/tests/integration/rds/test_promote_modify.py
new file mode 100644
index 00000000..20963ed2
--- /dev/null
+++ b/tests/integration/rds/test_promote_modify.py
@@ -0,0 +1,138 @@
+# Author: Bruce Pennypacker
+#
+# Create a temporary RDS database instance, then create a read-replica of the
+# instance. Once the replica is available, promote it and verify that the
+# promotion succeeds, then rename it. Delete the databases upon completion i
+# of the tests.
+#
+# For each step (creating the databases, promoting, etc) we loop for up
+# to 15 minutes to wait for the instance to become available. It should
+# never take that long for any of the steps to complete.
+
+"""
+Check that promotion of read replicas and renaming instances works as expected
+"""
+
+import unittest
+import time
+from boto.rds import RDSConnection
+
+class PromoteReadReplicaTest(unittest.TestCase):
+ rds = True
+
+ def setUp(self):
+ self.conn = RDSConnection()
+ self.masterDB_name = "boto-db-%s" % str(int(time.time()))
+ self.replicaDB_name = "replica-%s" % self.masterDB_name
+ self.renamedDB_name = "renamed-replica-%s" % self.masterDB_name
+
+
+ def tearDown(self):
+ instances = self.conn.get_all_dbinstances()
+ for db in [self.masterDB_name, self.replicaDB_name, self.renamedDB_name]:
+ for i in instances:
+ if i.id == db:
+ self.conn.delete_dbinstance(db, skip_final_snapshot=True)
+
+ def test_promote(self):
+ print '--- running RDS promotion & renaming tests ---'
+ self.masterDB = self.conn.create_dbinstance(self.masterDB_name, 5, 'db.t1.micro', 'root', 'bototestpw')
+
+ # Wait up to 15 minutes for the masterDB to become available
+ print '--- waiting for "%s" to become available ---' % self.masterDB_name
+ wait_timeout = time.time() + (15 * 60)
+ time.sleep(60)
+
+ instances = self.conn.get_all_dbinstances(self.masterDB_name)
+ inst = instances[0]
+
+ while wait_timeout > time.time() and inst.status != 'available':
+ time.sleep(15)
+ instances = self.conn.get_all_dbinstances(self.masterDB_name)
+ inst = instances[0]
+
+ self.assertTrue(inst.status == 'available')
+
+ self.replicaDB = self.conn.create_dbinstance_read_replica(self.replicaDB_name, self.masterDB_name)
+
+ # Wait up to 15 minutes for the replicaDB to become available
+ print '--- waiting for "%s" to become available ---' % self.replicaDB_name
+ wait_timeout = time.time() + (15 * 60)
+ time.sleep(60)
+
+ instances = self.conn.get_all_dbinstances(self.replicaDB_name)
+ inst = instances[0]
+
+ while wait_timeout > time.time() and inst.status != 'available':
+ time.sleep(15)
+ instances = self.conn.get_all_dbinstances(self.replicaDB_name)
+ inst = instances[0]
+
+ self.assertTrue(inst.status == 'available')
+
+ # Promote the replicaDB and wait for it to become available
+ self.replicaDB = self.conn.promote_read_replica(self.replicaDB_name)
+
+ # Wait up to 15 minutes for the replicaDB to become available
+ print '--- waiting for "%s" to be promoted and available ---' % self.replicaDB_name
+ wait_timeout = time.time() + (15 * 60)
+ time.sleep(60)
+
+ instances = self.conn.get_all_dbinstances(self.replicaDB_name)
+ inst = instances[0]
+
+ while wait_timeout > time.time() and inst.status != 'available':
+ time.sleep(15)
+ instances = self.conn.get_all_dbinstances(self.replicaDB_name)
+ inst = instances[0]
+
+ # Verify that the replica is now a standalone instance and no longer
+ # functioning as a read replica
+ self.assertTrue(inst)
+ self.assertTrue(inst.status == 'available')
+ self.assertFalse(inst.status_infos)
+
+ # Verify that the master no longer has any read replicas
+ instances = self.conn.get_all_dbinstances(self.masterDB_name)
+ inst = instances[0]
+ self.assertFalse(inst.read_replica_dbinstance_identifiers)
+
+ print '--- renaming "%s" to "%s" ---' % ( self.replicaDB_name, self.renamedDB_name )
+
+ self.renamedDB = self.conn.modify_dbinstance(self.replicaDB_name, new_instance_id=self.renamedDB_name, apply_immediately=True)
+
+ # Wait up to 15 minutes for the masterDB to become available
+ print '--- waiting for "%s" to exist ---' % self.renamedDB_name
+
+ wait_timeout = time.time() + (15 * 60)
+ time.sleep(60)
+
+ # Wait up to 15 minutes until the new name shows up in the instance table
+ found = False
+ while found == False and wait_timeout > time.time():
+ instances = self.conn.get_all_dbinstances()
+ for i in instances:
+ if i.id == self.renamedDB_name:
+ found = True
+ if found == False:
+ time.sleep(15)
+
+ self.assertTrue(found)
+
+ print '--- waiting for "%s" to become available ---' % self.renamedDB_name
+
+ instances = self.conn.get_all_dbinstances(self.renamedDB_name)
+ inst = instances[0]
+
+ # Now wait for the renamed instance to become available
+ while wait_timeout > time.time() and inst.status != 'available':
+ time.sleep(15)
+ instances = self.conn.get_all_dbinstances(self.renamedDB_name)
+ inst = instances[0]
+
+ self.assertTrue(inst.status == 'available')
+
+ # Since the replica DB was renamed...
+ self.replicaDB = None
+
+ print '--- tests completed ---'
diff --git a/tests/integration/s3/test_key.py b/tests/integration/s3/test_key.py
index ef5831a9..a3c29d13 100644
--- a/tests/integration/s3/test_key.py
+++ b/tests/integration/s3/test_key.py
@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# All rights reserved.
#
@@ -27,6 +28,7 @@ Some unit tests for S3 Key
from tests.unit import unittest
import time
import StringIO
+import urllib
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from boto.exception import S3ResponseError
@@ -394,3 +396,19 @@ class S3KeyTest(unittest.TestCase):
check = self.bucket.get_key('test_header_case')
self.assertEqual(check.content_type, 'application/json')
+
+ def test_header_encoding(self):
+ key = self.bucket.new_key('test_header_encoding')
+
+ key.set_metadata('Cache-control', 'public, max-age=500')
+ key.set_metadata('Content-disposition', u'filename=Schöne Zeit.txt')
+ key.set_contents_from_string('foo')
+
+ check = self.bucket.get_key('test_header_encoding')
+
+ self.assertEqual(check.cache_control, 'public, max-age=500')
+ self.assertEqual(check.content_disposition, 'filename=Sch%C3%B6ne+Zeit.txt')
+ self.assertEqual(
+ urllib.unquote_plus(check.content_disposition).decode('utf-8'),
+ 'filename=Schöne Zeit.txt'.decode('utf-8')
+ )
diff --git a/tests/integration/s3/test_multipart.py b/tests/integration/s3/test_multipart.py
index 2ca42b5c..b603c141 100644
--- a/tests/integration/s3/test_multipart.py
+++ b/tests/integration/s3/test_multipart.py
@@ -73,13 +73,7 @@ class S3MultiPartUploadTest(unittest.TestCase):
mpu.upload_part_from_file(fp, part_num=1)
fp.close()
cmpu = mpu.complete_upload()
- # LOL... just found an Amazon bug when it returns the
- # key in the completemultipartupload result. AWS returns
- # ??? instead of the correctly encoded key name. We should
- # fix this to the comment line below when amazon fixes this
- # and this test starts failing due to below assertion.
- self.assertEqual(cmpu.key_name, "???")
- #self.assertEqual(cmpu.key_name, key_name)
+ self.assertEqual(cmpu.key_name, key_name)
self.assertNotEqual(cmpu.etag, None)
def test_list_japanese(self):
diff --git a/tests/unit/ec2/elb/test_attribute.py b/tests/unit/ec2/elb/test_attribute.py
new file mode 100644
index 00000000..7bd499d2
--- /dev/null
+++ b/tests/unit/ec2/elb/test_attribute.py
@@ -0,0 +1,178 @@
+from tests.unit import unittest
+
+import mock
+
+from boto.ec2.elb import ELBConnection
+from boto.ec2.elb import LoadBalancer
+from boto.ec2.elb.attributes import LbAttributes
+
+ATTRIBUTE_GET_TRUE_CZL_RESPONSE = r"""<?xml version="1.0" encoding="UTF-8"?>
+<DescribeLoadBalancerAttributesResponse xmlns="http://elasticloadbalancing.amazonaws.com/doc/2012-06-01/">
+ <DescribeLoadBalancerAttributesResult>
+ <LoadBalancerAttributes>
+ <CrossZoneLoadBalancing>
+ <Enabled>true</Enabled>
+ </CrossZoneLoadBalancing>
+ </LoadBalancerAttributes>
+ </DescribeLoadBalancerAttributesResult>
+<ResponseMetadata>
+ <RequestId>83c88b9d-12b7-11e3-8b82-87b12EXAMPLE</RequestId>
+</ResponseMetadata>
+</DescribeLoadBalancerAttributesResponse>
+"""
+
+ATTRIBUTE_GET_FALSE_CZL_RESPONSE = r"""<?xml version="1.0" encoding="UTF-8"?>
+<DescribeLoadBalancerAttributesResponse xmlns="http://elasticloadbalancing.amazonaws.com/doc/2012-06-01/">
+ <DescribeLoadBalancerAttributesResult>
+ <LoadBalancerAttributes>
+ <CrossZoneLoadBalancing>
+ <Enabled>false</Enabled>
+ </CrossZoneLoadBalancing>
+ </LoadBalancerAttributes>
+ </DescribeLoadBalancerAttributesResult>
+<ResponseMetadata>
+ <RequestId>83c88b9d-12b7-11e3-8b82-87b12EXAMPLE</RequestId>
+</ResponseMetadata>
+</DescribeLoadBalancerAttributesResponse>
+"""
+
+ATTRIBUTE_SET_RESPONSE = r"""<?xml version="1.0" encoding="UTF-8"?>
+<ModifyLoadBalancerAttributesResponse xmlns="http://elasticloadbalancing.amazonaws.com/doc/2012-06-01/">
+<ModifyLoadBalancerAttributesResult/>
+<ResponseMetadata>
+ <RequestId>83c88b9d-12b7-11e3-8b82-87b12EXAMPLE</RequestId>
+</ResponseMetadata>
+</ModifyLoadBalancerAttributesResponse>
+"""
+
+# make_request arguments for setting attributes.
+# Format: (API_COMMAND, API_PARAMS, API_PATH, API_METHOD)
+ATTRIBUTE_SET_CZL_TRUE_REQUEST = (
+ 'ModifyLoadBalancerAttributes',
+ {'LoadBalancerAttributes.CrossZoneLoadBalancing.Enabled': 'true',
+ 'LoadBalancerName': 'test_elb'}, mock.ANY, mock.ANY)
+ATTRIBUTE_SET_CZL_FALSE_REQUEST = (
+ 'ModifyLoadBalancerAttributes',
+ {'LoadBalancerAttributes.CrossZoneLoadBalancing.Enabled': 'false',
+ 'LoadBalancerName': 'test_elb'}, mock.ANY, mock.ANY)
+
+# Tests to be run on an LbAttributes
+# Format:
+# (EC2_RESPONSE_STRING, list( (string_of_attribute_to_test, value) ) )
+ATTRIBUTE_TESTS = [
+ (ATTRIBUTE_GET_TRUE_CZL_RESPONSE,
+ [('cross_zone_load_balancing.enabled', True)]),
+ (ATTRIBUTE_GET_FALSE_CZL_RESPONSE,
+ [('cross_zone_load_balancing.enabled', False)]),
+ ]
+
+class TestLbAttributes(unittest.TestCase):
+ """Tests LB Attributes."""
+ def _setup_mock(self):
+ """Sets up a mock elb request.
+ Returns: response, elb connection and LoadBalancer
+ """
+ mock_response = mock.Mock()
+ mock_response.status = 200
+ elb = ELBConnection(aws_access_key_id='aws_access_key_id',
+ aws_secret_access_key='aws_secret_access_key')
+ elb.make_request = mock.Mock(return_value=mock_response)
+ return mock_response, elb, LoadBalancer(elb, 'test_elb')
+
+ def _verify_attributes(self, attributes, attr_tests):
+ """Verifies an LbAttributes object."""
+ for attr, result in attr_tests:
+ attr_result = attributes
+ for sub_attr in attr.split('.'):
+ attr_result = getattr(attr_result, sub_attr, None)
+ self.assertEqual(attr_result, result)
+
+ def test_get_all_lb_attributes(self):
+ """Tests getting the LbAttributes from the elb.connection."""
+ mock_response, elb, _ = self._setup_mock()
+
+ for response, attr_tests in ATTRIBUTE_TESTS:
+ mock_response.read.return_value = response
+ attributes = elb.get_all_lb_attributes('test_elb')
+ self.assertTrue(isinstance(attributes, LbAttributes))
+ self._verify_attributes(attributes, attr_tests)
+
+ def test_get_lb_attribute(self):
+ """Tests getting a single attribute from elb.connection."""
+ mock_response, elb, _ = self._setup_mock()
+
+ tests = [
+ ('crossZoneLoadBalancing', True, ATTRIBUTE_GET_TRUE_CZL_RESPONSE),
+ ('crossZoneLoadBalancing', False, ATTRIBUTE_GET_FALSE_CZL_RESPONSE),
+ ]
+
+
+ for attr, value, response in tests:
+ mock_response.read.return_value = response
+ status = elb.get_lb_attribute('test_elb', attr)
+ self.assertEqual(status, value)
+
+ def test_modify_lb_attribute(self):
+ """Tests setting the attributes from elb.connection."""
+ mock_response, elb, _ = self._setup_mock()
+
+ tests = [
+ ('crossZoneLoadBalancing', True, ATTRIBUTE_SET_CZL_TRUE_REQUEST),
+ ('crossZoneLoadBalancing', False, ATTRIBUTE_SET_CZL_FALSE_REQUEST),
+ ]
+
+ for attr, value, args in tests:
+ mock_response.read.return_value = ATTRIBUTE_SET_RESPONSE
+ result = elb.modify_lb_attribute('test_elb', attr, value)
+ self.assertTrue(result)
+ elb.make_request.assert_called_with(*args)
+
+ def test_lb_get_attributes(self):
+ """Tests the LbAttributes from the ELB object."""
+ mock_response, _, lb = self._setup_mock()
+
+ for response, attr_tests in ATTRIBUTE_TESTS:
+ mock_response.read.return_value = response
+ attributes = lb.get_attributes(force=True)
+ self.assertTrue(isinstance(attributes, LbAttributes))
+ self._verify_attributes(attributes, attr_tests)
+
+ def test_lb_is_cross_zone_load_balancing(self):
+ """Tests checking is_cross_zone_load_balancing."""
+ mock_response, _, lb = self._setup_mock()
+
+ tests = [
+ # Format: (method, args, result, response)
+ # Gets a true result.
+ (lb.is_cross_zone_load_balancing, [], True,
+ ATTRIBUTE_GET_TRUE_CZL_RESPONSE),
+ # Returns the previous calls cached value.
+ (lb.is_cross_zone_load_balancing, [], True,
+ ATTRIBUTE_GET_FALSE_CZL_RESPONSE),
+ # Gets a false result.
+ (lb.is_cross_zone_load_balancing, [True], False,
+ ATTRIBUTE_GET_FALSE_CZL_RESPONSE),
+ ]
+
+ for method, args, result, response in tests:
+ mock_response.read.return_value = response
+ self.assertEqual(method(*args), result)
+
+ def test_lb_enable_cross_zone_load_balancing(self):
+ """Tests enabling cross zone balancing from LoadBalancer."""
+ mock_response, elb, lb = self._setup_mock()
+
+ mock_response.read.return_value = ATTRIBUTE_SET_RESPONSE
+ self.assertTrue(lb.enable_cross_zone_load_balancing())
+ elb.make_request.assert_called_with(*ATTRIBUTE_SET_CZL_TRUE_REQUEST)
+
+ def test_lb_disable_cross_zone_load_balancing(self):
+ """Tests disabling cross zone balancing from LoadBalancer."""
+ mock_response, elb, lb = self._setup_mock()
+
+ mock_response.read.return_value = ATTRIBUTE_SET_RESPONSE
+ self.assertTrue(lb.disable_cross_zone_load_balancing())
+ elb.make_request.assert_called_with(*ATTRIBUTE_SET_CZL_FALSE_REQUEST)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/unit/iam/__init__.py b/tests/unit/iam/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unit/iam/__init__.py
diff --git a/tests/unit/iam/test_connection.py b/tests/unit/iam/test_connection.py
new file mode 100644
index 00000000..2e3e8a4b
--- /dev/null
+++ b/tests/unit/iam/test_connection.py
@@ -0,0 +1,166 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+
+from tests.unit import unittest
+from boto.iam.connection import IAMConnection
+from tests.unit import AWSMockServiceTestCase
+
+
+class TestCreateSamlProvider(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return """
+ <CreateSAMLProviderResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <CreateSAMLProviderResult>
+ <SAMLProviderArn>arn</SAMLProviderArn>
+ </CreateSAMLProviderResult>
+ <ResponseMetadata>
+ <RequestId>29f47818-99f5-11e1-a4c3-27EXAMPLE804</RequestId>
+ </ResponseMetadata>
+ </CreateSAMLProviderResponse>
+ """
+
+ def test_create_saml_provider(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.create_saml_provider('document', 'name')
+
+ self.assert_request_parameters(
+ {'Action': 'CreateSAMLProvider',
+ 'SAMLMetadataDocument': 'document',
+ 'Name': 'name'},
+ ignore_params_values=['Version'])
+
+ self.assertEqual(response['create_saml_provider_response']\
+ ['create_saml_provider_result']\
+ ['saml_provider_arn'], 'arn')
+
+
+class TestListSamlProviders(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return """
+ <ListSAMLProvidersResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <ListSAMLProvidersResult>
+ <SAMLProviderList>
+ <member>
+ <Arn>arn:aws:iam::123456789012:instance-profile/application_abc/component_xyz/Database</Arn>
+ <ValidUntil>2032-05-09T16:27:11Z</ValidUntil>
+ <CreateDate>2012-05-09T16:27:03Z</CreateDate>
+ </member>
+ <member>
+ <Arn>arn:aws:iam::123456789012:instance-profile/application_abc/component_xyz/Webserver</Arn>
+ <ValidUntil>2015-03-11T13:11:02Z</ValidUntil>
+ <CreateDate>2012-05-09T16:27:11Z</CreateDate>
+ </member>
+ </SAMLProviderList>
+ </ListSAMLProvidersResult>
+ <ResponseMetadata>
+ <RequestId>fd74fa8d-99f3-11e1-a4c3-27EXAMPLE804</RequestId>
+ </ResponseMetadata>
+ </ListSAMLProvidersResponse>
+ """
+
+ def test_list_saml_providers(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.list_saml_providers()
+
+ self.assert_request_parameters(
+ {'Action': 'ListSAMLProviders'},
+ ignore_params_values=['Version'])
+
+
+class TestGetSamlProvider(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return """
+ <GetSAMLProviderResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <GetSAMLProviderResult>
+ <CreateDate>2012-05-09T16:27:11Z</CreateDate>
+ <ValidUntil>2015-12-31T211:59:59Z</ValidUntil>
+ <SAMLMetadataDocument>Pd9fexDssTkRgGNqs...DxptfEs==</SAMLMetadataDocument>
+ </GetSAMLProviderResult>
+ <ResponseMetadata>
+ <RequestId>29f47818-99f5-11e1-a4c3-27EXAMPLE804</RequestId>
+ </ResponseMetadata>
+ </GetSAMLProviderResponse>
+ """
+
+ def test_get_saml_provider(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.get_saml_provider('arn')
+
+ self.assert_request_parameters(
+ {
+ 'Action': 'GetSAMLProvider',
+ 'SAMLProviderArn': 'arn'
+ },
+ ignore_params_values=['Version'])
+
+
+class TestUpdateSamlProvider(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return """
+ <UpdateSAMLProviderResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <UpdateSAMLProviderResult>
+ <SAMLProviderArn>arn:aws:iam::123456789012:saml-metadata/MyUniversity</SAMLProviderArn>
+ </UpdateSAMLProviderResult>
+ <ResponseMetadata>
+ <RequestId>29f47818-99f5-11e1-a4c3-27EXAMPLE804</RequestId>
+ </ResponseMetadata>
+ </UpdateSAMLProviderResponse>
+ """
+
+ def test_update_saml_provider(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.update_saml_provider('arn', 'doc')
+
+ self.assert_request_parameters(
+ {
+ 'Action': 'UpdateSAMLProvider',
+ 'SAMLMetadataDocument': 'doc',
+ 'SAMLProviderArn': 'arn'
+ },
+ ignore_params_values=['Version'])
+
+
+class TestDeleteSamlProvider(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return ""
+
+ def test_delete_saml_provider(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.delete_saml_provider('arn')
+
+ self.assert_request_parameters(
+ {
+ 'Action': 'DeleteSAMLProvider',
+ 'SAMLProviderArn': 'arn'
+ },
+ ignore_params_values=['Version'])
diff --git a/tests/unit/s3/test_key.py b/tests/unit/s3/test_key.py
index 68d487ac..60aadad3 100644
--- a/tests/unit/s3/test_key.py
+++ b/tests/unit/s3/test_key.py
@@ -20,6 +20,8 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
+from __future__ import with_statement
+
try:
from cStringIO import StringIO
except ImportError:
@@ -32,6 +34,7 @@ from tests.unit import AWSMockServiceTestCase
from boto.exception import BotoServerError
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
+from boto.s3.key import Key
class TestS3Key(AWSMockServiceTestCase):
@@ -147,5 +150,18 @@ class TestS3KeyRetries(AWSMockServiceTestCase):
self.assertTrue(k.should_retry.count, 1)
+class TestFileError(unittest.TestCase):
+ def test_file_error(self):
+ key = Key()
+
+ class CustomException(Exception): pass
+
+ key.get_contents_to_file = mock.Mock(
+ side_effect=CustomException('File blew up!'))
+
+ # Ensure our exception gets raised instead of a file or IO error
+ with self.assertRaises(CustomException):
+ key.get_contents_to_filename('foo.txt')
+
if __name__ == '__main__':
unittest.main()
diff --git a/tests/unit/sts/test_connection.py b/tests/unit/sts/test_connection.py
index de0ab261..19f62bbb 100644
--- a/tests/unit/sts/test_connection.py
+++ b/tests/unit/sts/test_connection.py
@@ -158,5 +158,64 @@ class TestSTSWebIdentityConnection(AWSMockServiceTestCase):
)
+class TestSTSSAMLConnection(AWSMockServiceTestCase):
+ connection_class = STSConnection
+
+ def setUp(self):
+ super(TestSTSSAMLConnection, self).setUp()
+
+ def default_body(self):
+ return """
+<AssumeRoleWithSAMLResponse xmlns="https://sts.amazonaws.com/doc/
+2011-06-15/">
+ <AssumeRoleWithSAMLResult>
+ <Credentials>
+ <SessionToken>session_token</SessionToken>
+ <SecretAccessKey>secretkey</SecretAccessKey>
+ <Expiration>2011-07-15T23:28:33.359Z</Expiration>
+ <AccessKeyId>accesskey</AccessKeyId>
+ </Credentials>
+ <AssumedRoleUser>
+ <Arn>arn:role</Arn>
+ <AssumedRoleId>roleid:myrolesession</AssumedRoleId>
+ </AssumedRoleUser>
+ <PackedPolicySize>6</PackedPolicySize>
+ </AssumeRoleWithSAMLResult>
+ <ResponseMetadata>
+ <RequestId>c6104cbe-af31-11e0-8154-cbc7ccf896c7</RequestId>
+ </ResponseMetadata>
+</AssumeRoleWithSAMLResponse>
+"""
+
+ def test_assume_role_with_saml(self):
+ arn = 'arn:aws:iam::000240903217:role/Test'
+ principal = 'arn:aws:iam::000240903217:role/Principal'
+ assertion = 'test'
+
+ self.set_http_response(status_code=200)
+ response = self.service_connection.assume_role_with_saml(
+ role_arn=arn,
+ principal_arn=principal,
+ saml_assertion=assertion
+ )
+ self.assert_request_parameters({
+ 'RoleArn': arn,
+ 'PrincipalArn': principal,
+ 'SAMLAssertion': assertion,
+ 'Action': 'AssumeRoleWithSAML'
+ }, ignore_params_values=[
+ 'AWSAccessKeyId',
+ 'SignatureMethod',
+ 'Timestamp',
+ 'SignatureVersion',
+ 'Version',
+ ])
+ self.assertEqual(response.credentials.access_key, 'accesskey')
+ self.assertEqual(response.credentials.secret_key, 'secretkey')
+ self.assertEqual(response.credentials.session_token, 'session_token')
+ self.assertEqual(response.user.arn, 'arn:role')
+ self.assertEqual(response.user.assume_role_id, 'roleid:myrolesession')
+
+
if __name__ == '__main__':
unittest.main()