summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJordonPhillips <phjordon@amazon.com>2016-01-18 17:44:30 -0800
committerJordonPhillips <phjordon@amazon.com>2016-01-18 17:44:30 -0800
commit3f8f02caf92917eafeac830ca705694405f90307 (patch)
tree5c7e76a3de569c24ab92fee841929623a2410f32
parent5cc5326fe6791bd4cc8f8d90ecfe6609ed482cb0 (diff)
parent30abc140efb75fd0fa7b6e96647cb9a067c1ea11 (diff)
downloadboto-3f8f02caf92917eafeac830ca705694405f90307.tar.gz
Merge branch 'release-2.39.0'2.39.0
-rw-r--r--.travis.yml3
-rw-r--r--CONTRIBUTING45
-rw-r--r--README.rst31
-rwxr-xr-xbin/cfadmin2
-rwxr-xr-xbin/cq4
-rwxr-xr-xbin/cwutil2
-rwxr-xr-xbin/dynamodb_dump3
-rwxr-xr-xbin/dynamodb_load3
-rwxr-xr-xbin/elbadmin3
-rwxr-xr-xbin/route532
-rw-r--r--boto/__init__.py2
-rw-r--r--boto/auth.py8
-rw-r--r--boto/cacerts/cacerts.txt32
-rw-r--r--boto/cloudfront/distribution.py4
-rw-r--r--boto/cloudfront/invalidation.py4
-rw-r--r--boto/compat.py12
-rw-r--r--boto/configservice/layer1.py6
-rw-r--r--boto/connection.py2
-rw-r--r--boto/contrib/ymlmessage.py2
-rw-r--r--boto/dynamodb/types.py2
-rw-r--r--boto/dynamodb2/table.py7
-rw-r--r--boto/ec2/cloudwatch/__init__.py11
-rw-r--r--boto/ec2/cloudwatch/metric.py7
-rw-r--r--boto/ec2/connection.py19
-rw-r--r--boto/ec2/image.py5
-rw-r--r--boto/ec2/instance.py1
-rw-r--r--boto/ec2/snapshot.py15
-rw-r--r--boto/ec2containerservice/__init__.py7
-rw-r--r--boto/emr/connection.py12
-rw-r--r--boto/endpoints.json87
-rw-r--r--boto/gs/key.py2
-rw-r--r--boto/iam/connection.py290
-rw-r--r--boto/kms/layer1.py4
-rw-r--r--boto/mturk/connection.py2
-rw-r--r--boto/pyami/installers/ubuntu/ebs.py4
-rw-r--r--boto/rds/__init__.py2
-rw-r--r--boto/rds2/layer1.py39
-rw-r--r--boto/s3/bucket.py3
-rw-r--r--boto/s3/bucketlistresultset.py6
-rw-r--r--boto/s3/connection.py3
-rw-r--r--boto/s3/key.py14
-rw-r--r--boto/sns/connection.py4
-rw-r--r--boto/sqs/connection.py61
-rw-r--r--boto/sqs/queue.py65
-rwxr-xr-xboto/storage_uri.py5
-rw-r--r--boto/swf/layer1.py7
-rw-r--r--boto/utils.py38
-rw-r--r--boto/vendored/six.py200
-rw-r--r--boto/vpc/routetable.py3
-rw-r--r--docs/source/_templates/page.html14
-rw-r--r--docs/source/boto_config_tut.rst10
-rw-r--r--docs/source/cloudfront_tut.rst4
-rw-r--r--docs/source/cloudsearch_tut.rst13
-rw-r--r--docs/source/conf.py2
-rw-r--r--docs/source/contributing.rst2
-rw-r--r--docs/source/dynamodb2_tut.rst2
-rw-r--r--docs/source/ec2_tut.rst2
-rw-r--r--docs/source/index.rst11
-rw-r--r--docs/source/rds_tut.rst2
-rw-r--r--docs/source/releasenotes/v2.35.2.rst2
-rw-r--r--docs/source/releasenotes/v2.39.0.rst27
-rw-r--r--docs/source/route53_tut.rst16
-rw-r--r--docs/source/s3_tut.rst5
-rw-r--r--docs/source/ses_tut.rst4
-rw-r--r--docs/source/simpledb_tut.rst6
-rw-r--r--docs/source/swf_tut.rst2
-rw-r--r--requirements.txt2
-rwxr-xr-xtests/fps/test.py2
-rw-r--r--tests/integration/gs/test_resumable_downloads.py8
-rw-r--r--tests/integration/gs/test_resumable_uploads.py20
-rw-r--r--tests/integration/gs/testcase.py18
-rw-r--r--tests/integration/gs/util.py2
-rw-r--r--tests/integration/iam/test_policy.py112
-rw-r--r--tests/integration/s3/test_bucket.py7
-rw-r--r--tests/integration/s3/test_connect_to_region.py5
-rw-r--r--tests/integration/s3/test_connection.py5
-rw-r--r--tests/integration/s3/test_https_cert_validation.py2
-rw-r--r--tests/integration/s3/test_key.py13
-rw-r--r--tests/integration/s3/test_multidelete.py18
-rw-r--r--tests/mturk/selenium_support.py2
-rwxr-xr-xtests/test.py52
-rw-r--r--tests/unit/auth/test_sigv4.py16
-rw-r--r--tests/unit/cloudfront/test_invalidation.py23
-rw-r--r--tests/unit/cloudsearchdomain/test_cloudsearchdomain.py6
-rw-r--r--tests/unit/dynamodb/test_types.py9
-rwxr-xr-xtests/unit/ec2/test_connection.py17
-rw-r--r--tests/unit/ec2containerservice/__init__.py0
-rwxr-xr-xtests/unit/ec2containerservice/test_connection.py33
-rw-r--r--tests/unit/emr/test_connection.py8
-rw-r--r--tests/unit/glacier/test_concurrent.py10
-rw-r--r--tests/unit/iam/test_policy.py687
-rw-r--r--tests/unit/kms/test_kms.py2
-rw-r--r--tests/unit/s3/test_bucket.py10
-rw-r--r--tests/unit/utils/test_utils.py96
-rw-r--r--tests/unit/vpc/test_routetable.py8
95 files changed, 2085 insertions, 327 deletions
diff --git a/.travis.yml b/.travis.yml
index 67de5019..e7eee360 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -4,7 +4,10 @@ python:
- "2.7"
- "3.3"
- "3.4"
+ - "3.5"
- "pypy"
+env:
+ - BOTO_CONFIG=/tmp/nowhere
before_install:
- sudo apt-get update
- sudo apt-get --reinstall install -qq language-pack-en language-pack-de
diff --git a/CONTRIBUTING b/CONTRIBUTING
index d3014eef..fc8f22c7 100644
--- a/CONTRIBUTING
+++ b/CONTRIBUTING
@@ -45,3 +45,48 @@ Reporting An Issue/Feature
resolved sooner.
+Maintenance
+===========
+
+You do not have to be labeled as a
+"maintainer" to be able to help with the triaging, resolving, and
+reviewing of boto issues and pull requests.
+
+These are the processes that the maintainers follow, that you can also
+follow to help speed up the resolution of an issue or pull request:
+
+Pull Requests
+-------------
+There are some key points that are needed to be met before a pull request
+can be merged:
+
+* All tests must pass for all python versions.
+* All pull requests require tests that either test the new feature or test
+ that the specific bug is fixed. Pull requests for minor things like
+ adding a new region or fixing a typo do not need tests.
+* Must follow PEP8 conventions.
+* All changes must be backwards compatible.
+* If the pull request is for a feature, make sure a link to the corresponding
+ API is provided in order to easily check that the feature matches up
+ with the service's API.
+
+The best way to help with pull requests is to comment on pull requests by
+noting if any of these key points are missing, it will both help get feedback
+sooner to the issuer of the pull request and make it easier to determine for
+an individual with write permissions to the repository if a pull request
+is ready to be merged.
+
+Issues
+------
+Here are the best ways to help with open issues:
+
+* If there is an issue without a set of instructions on how to reproduce the
+ bug, feel free to try to reproduce the issue, comment with the minimal
+ amount of steps to reproduce the bug (a code snippet would be ideal). If
+ there is not a set of steps that can be made to reproduce the issue, at
+ least make sure there are debug logs that capture the unexpected behavior.
+
+* Consolidate all related issue to one issues by closing out related issues
+ and linking them to the single issue that outlines the general issue.
+
+* Submit pull requests for open issues.
diff --git a/README.rst b/README.rst
index d20d0459..b819374c 100644
--- a/README.rst
+++ b/README.rst
@@ -1,7 +1,7 @@
####
boto
####
-boto 2.38.0
+boto 2.39.0
Released: 9-Apr-2015
@@ -11,6 +11,24 @@ Released: 9-Apr-2015
.. image:: https://pypip.in/d/boto/badge.svg
:target: https://pypi.python.org/pypi/boto/
+******
+Boto 3
+******
+
+`Boto3 <https://github.com/boto/boto3>`__, the next version of Boto, is now
+stable and recommended for general use. It can be used side-by-side with Boto
+in the same project, so it is easy to start using Boto3 in your existing
+projects as well as new projects. Going forward, API updates and all new
+feature work will be focused on Boto3.
+
+To assist users who still depend on Boto and cannot immediately switch over, we
+will be triaging and addressing critical issues and PRs in Boto in the short
+term. As more users make the switch to Boto3, we expect to reduce our
+maintenance involvement over time. If we decide on a cutoff date or any
+significant changes to our maintenance plan, we will make pre-announcements
+well ahead of schedule to allow ample time for our users to adapt/migrate.
+
+
************
Introduction
************
@@ -26,17 +44,6 @@ and the `Porting Guide`_. If you would like, you can open an issue to let
others know about your work in progress. Tests **must** pass on Python
2.6, 2.7, 3.3, and 3.4 for pull requests to be accepted.
-******
-Boto 3
-******
-The next major version of Boto is currently in developer preview and can
-be found in the `Boto 3 <https://github.com/boto/boto3#readme>`__
-repository and installed via ``pip``. It supports the latest service APIs
-and provides a high-level object-oriented interface to many services.
-
-Please try Boto 3 and
-`leave feedback <https://github.com/boto/boto3/issues>`__ with any issues,
-suggestions, and feature requests you might have.
********
Services
diff --git a/bin/cfadmin b/bin/cfadmin
index 6fcdd86d..9d93b85a 100755
--- a/bin/cfadmin
+++ b/bin/cfadmin
@@ -103,6 +103,6 @@ if __name__ == "__main__":
cmd = help
try:
cmd(cf, *args)
- except TypeError, e:
+ except TypeError as e:
print e
help(cf, cmd.__name__)
diff --git a/bin/cq b/bin/cq
index 05bc95b9..38e94a2d 100755
--- a/bin/cq
+++ b/bin/cq
@@ -65,7 +65,7 @@ def main():
if queue_name:
try:
rs = [c.create_queue(queue_name)]
- except SQSError, e:
+ except SQSError as e:
print 'An Error Occurred:'
print '%s: %s' % (e.status, e.reason)
print e.body
@@ -73,7 +73,7 @@ def main():
else:
try:
rs = c.get_all_queues()
- except SQSError, e:
+ except SQSError as e:
print 'An Error Occurred:'
print '%s: %s' % (e.status, e.reason)
print e.body
diff --git a/bin/cwutil b/bin/cwutil
index e22b64ca..280d53f3 100755
--- a/bin/cwutil
+++ b/bin/cwutil
@@ -135,6 +135,6 @@ if __name__ == "__main__":
cmd = help
try:
cmd(*args)
- except TypeError, e:
+ except TypeError as e:
print e
help(cmd.__name__)
diff --git a/bin/dynamodb_dump b/bin/dynamodb_dump
index 8b6aada7..46efa4f6 100755
--- a/bin/dynamodb_dump
+++ b/bin/dynamodb_dump
@@ -6,6 +6,7 @@ import os
import boto
from boto.compat import json
+from boto.compat import six
DESCRIPTION = """Dump the contents of one or more DynamoDB tables to the local filesystem.
@@ -39,7 +40,7 @@ def dump_table(table, out_dir):
for item in table.scan():
# JSON can't serialize sets -- convert those to lists.
data = {}
- for k, v in item.iteritems():
+ for k, v in six.iteritems(item):
if isinstance(v, (set, frozenset)):
data[k] = list(v)
else:
diff --git a/bin/dynamodb_load b/bin/dynamodb_load
index 46a8d392..ccaefbb3 100755
--- a/bin/dynamodb_load
+++ b/bin/dynamodb_load
@@ -5,6 +5,7 @@ import os
import boto
from boto.compat import json
+from boto.compat import six
from boto.dynamodb.schema import Schema
@@ -61,7 +62,7 @@ def load_table(table, in_fd):
for i in _json_iterload(in_fd):
# Convert lists back to sets.
data = {}
- for k, v in i.iteritems():
+ for k, v in six.iteritems(i):
if isinstance(v, list):
data[k] = set(v)
else:
diff --git a/bin/elbadmin b/bin/elbadmin
index d83643f0..d340442d 100755
--- a/bin/elbadmin
+++ b/bin/elbadmin
@@ -113,6 +113,7 @@ def get(elb, name):
# Make map of all instance Id's to Name tags
import boto
+ from boto.compat.six import iteritems
if not options.region:
ec2 = boto.connect_ec2()
else:
@@ -127,7 +128,7 @@ def get(elb, name):
if i.id in instances:
names[i.id] = i.tags.get('Name', '')
- name_column_width = max([4] + [len(v) for k,v in names.iteritems()]) + 2
+ name_column_width = max([4] + [len(v) for k,v in iteritems(names)]) + 2
print "Instances"
print "---------"
diff --git a/bin/route53 b/bin/route53
index fcdea70b..f9e6898f 100755
--- a/bin/route53
+++ b/bin/route53
@@ -200,6 +200,6 @@ if __name__ == "__main__":
cmd = help
try:
cmd(conn, *args)
- except TypeError, e:
+ except TypeError as e:
print e
help(conn, cmd.__name__)
diff --git a/boto/__init__.py b/boto/__init__.py
index 75082c93..989fb4b7 100644
--- a/boto/__init__.py
+++ b/boto/__init__.py
@@ -38,7 +38,7 @@ import logging.config
from boto.compat import urlparse
from boto.exception import InvalidUriError
-__version__ = '2.38.0'
+__version__ = '2.39.0'
Version = __version__ # for backware compatibility
# http://bugs.python.org/issue7980
diff --git a/boto/auth.py b/boto/auth.py
index 636dcfdd..a3ce891c 100644
--- a/boto/auth.py
+++ b/boto/auth.py
@@ -55,9 +55,11 @@ except ImportError:
# by default.
SIGV4_DETECT = [
'.cn-',
- # In eu-central we support both host styles for S3
+ # In eu-central and ap-northeast-2 we support both host styles for S3
'.eu-central',
'-eu-central',
+ '.ap-northeast-2',
+ '-ap-northeast-2'
]
@@ -878,8 +880,8 @@ class QuerySignatureV1AuthHandler(QuerySignatureHelper, AuthHandler):
def _calc_signature(self, params, *args):
boto.log.debug('using _calc_signature_1')
hmac = self._get_hmac()
- keys = params.keys()
- keys.sort(cmp=lambda x, y: cmp(x.lower(), y.lower()))
+ keys = list(params.keys())
+ keys.sort(key=lambda x: x.lower())
pairs = []
for key in keys:
hmac.update(key.encode('utf-8'))
diff --git a/boto/cacerts/cacerts.txt b/boto/cacerts/cacerts.txt
index 3cf3f26f..514ab1bf 100644
--- a/boto/cacerts/cacerts.txt
+++ b/boto/cacerts/cacerts.txt
@@ -93,22 +93,6 @@ BIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee95
70+sB3c4
-----END CERTIFICATE-----
-Verisign Class 3 Public Primary Certification Authority
-=======================================================
------BEGIN CERTIFICATE-----
-MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkGA1UEBhMCVVMx
-FzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmltYXJ5
-IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVow
-XzELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAz
-IFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUA
-A4GNADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhEBarsAx94
-f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/isI19wKTakyYbnsZogy1Ol
-hec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0GCSqGSIb3DQEBAgUAA4GBALtMEivPLCYA
-TxQT3ab7/AoRhIzzKBxnki98tsX63/Dolbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59Ah
-WM1pF+NEHJwZRDmJXNycAA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2Omuf
-Tqj/ZA1k
------END CERTIFICATE-----
-
Verisign Class 3 Public Primary Certification Authority - G2
============================================================
-----BEGIN CERTIFICATE-----
@@ -2613,22 +2597,6 @@ MCwXEGCSn1WHElkQwg9naRHMTh5+Spqtr0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3o
tkYNbn5XOmeUwssfnHdKZ05phkOTOPu220+DkdRgfks+KzgHVZhepA==
-----END CERTIFICATE-----
-Verisign Class 3 Public Primary Certification Authority
-=======================================================
------BEGIN CERTIFICATE-----
-MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkGA1UEBhMCVVMx
-FzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmltYXJ5
-IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVow
-XzELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAz
-IFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUA
-A4GNADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhEBarsAx94
-f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/isI19wKTakyYbnsZogy1Ol
-hec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBABByUqkFFBky
-CEHwxWsKzH4PIRnN5GfcX6kb5sroc50i2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWX
-bj9T/UWZYB2oK0z5XqcJ2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/
-D/xwzoiQ
------END CERTIFICATE-----
-
Microsec e-Szigno Root CA 2009
==============================
-----BEGIN CERTIFICATE-----
diff --git a/boto/cloudfront/distribution.py b/boto/cloudfront/distribution.py
index 7131c86c..192c7c39 100644
--- a/boto/cloudfront/distribution.py
+++ b/boto/cloudfront/distribution.py
@@ -525,12 +525,12 @@ class Distribution(object):
:param expire_time: The expiry time of the URL. If provided, the URL
will expire after the time has passed. If not provided the URL will
never expire. Format is a unix epoch.
- Use time.time() + duration_in_sec.
+ Use int(time.time() + duration_in_sec).
:type valid_after_time: int
:param valid_after_time: If provided, the URL will not be valid until
after valid_after_time. Format is a unix epoch.
- Use time.time() + secs_until_valid.
+ Use int(time.time() + secs_until_valid).
:type ip_address: str
:param ip_address: If provided, only allows access from the specified
diff --git a/boto/cloudfront/invalidation.py b/boto/cloudfront/invalidation.py
index 58adf81f..385d099e 100644
--- a/boto/cloudfront/invalidation.py
+++ b/boto/cloudfront/invalidation.py
@@ -68,10 +68,10 @@ class InvalidationBatch(object):
self.paths[k] = v
def escape(self, p):
- """Escape a path, make sure it begins with a slash and contains no invalid characters"""
+ """Escape a path, make sure it begins with a slash and contains no invalid characters. Retain literal wildcard characters."""
if not p[0] == "/":
p = "/%s" % p
- return urllib.parse.quote(p)
+ return urllib.parse.quote(p, safe = "/*")
def to_xml(self):
"""Get this batch as XML"""
diff --git a/boto/compat.py b/boto/compat.py
index a7503f01..43e2d2b4 100644
--- a/boto/compat.py
+++ b/boto/compat.py
@@ -54,6 +54,7 @@ from boto.vendored.six.moves import filter, http_client, map, _thread, \
from boto.vendored.six.moves.queue import Queue
from boto.vendored.six.moves.urllib.parse import parse_qs, quote, unquote, \
urlparse, urlsplit
+from boto.vendored.six.moves.urllib.parse import unquote_plus
from boto.vendored.six.moves.urllib.request import urlopen
if six.PY3:
@@ -61,7 +62,18 @@ if six.PY3:
StandardError = Exception
long_type = int
from configparser import ConfigParser
+ unquote_str = unquote_plus
else:
StandardError = StandardError
long_type = long
from ConfigParser import SafeConfigParser as ConfigParser
+
+ def unquote_str(value, encoding='utf-8'):
+ # In python2, unquote() gives us a string back that has the urldecoded
+ # bits, but not the unicode parts. We need to decode this manually.
+ # unquote has special logic in which if it receives a unicode object it
+ # will decode it to latin1. This is hard coded. To avoid this, we'll
+ # encode the string with the passed in encoding before trying to
+ # unquote it.
+ byte_string = value.encode(encoding)
+ return unquote_plus(byte_string).decode(encoding)
diff --git a/boto/configservice/layer1.py b/boto/configservice/layer1.py
index fe598d98..d768b66c 100644
--- a/boto/configservice/layer1.py
+++ b/boto/configservice/layer1.py
@@ -289,7 +289,11 @@ class ConfigServiceConnection(AWSQueryConnection):
:type configuration_recorder: dict
:param configuration_recorder: The configuration recorder object that
- records each configuration change made to the resources.
+ records each configuration change made to the resources. The
+ format should follow:
+
+ {'name': 'myrecorder',
+ 'roleARN': 'arn:aws:iam::123456789012:role/trusted-aws-config'}
"""
params = {'ConfigurationRecorder': configuration_recorder, }
diff --git a/boto/connection.py b/boto/connection.py
index 28bb320a..32fecd68 100644
--- a/boto/connection.py
+++ b/boto/connection.py
@@ -726,7 +726,7 @@ class AWSAuthConnection(object):
# Make sure the host is really just the host, not including
# the port number
- host = host.split(':', 1)[0]
+ host = boto.utils.parse_host(host)
http_connection_kwargs = self.http_connection_kwargs.copy()
diff --git a/boto/contrib/ymlmessage.py b/boto/contrib/ymlmessage.py
index ae6aea48..32aecc2c 100644
--- a/boto/contrib/ymlmessage.py
+++ b/boto/contrib/ymlmessage.py
@@ -47,7 +47,7 @@ class YAMLMessage(Message):
super(YAMLMessage, self).__init__(queue, body)
def set_body(self, body):
- self.data = yaml.load(body)
+ self.data = yaml.safe_load(body)
def get_body(self):
return yaml.dump(self.data)
diff --git a/boto/dynamodb/types.py b/boto/dynamodb/types.py
index 6a48ae5f..d9aaaa4c 100644
--- a/boto/dynamodb/types.py
+++ b/boto/dynamodb/types.py
@@ -333,7 +333,7 @@ class Dynamizer(object):
the appropriate python type.
"""
- if len(attr) > 1 or not attr:
+ if len(attr) > 1 or not attr or is_str(attr):
return attr
dynamodb_type = list(attr.keys())[0]
if dynamodb_type.lower() == dynamodb_type:
diff --git a/boto/dynamodb2/table.py b/boto/dynamodb2/table.py
index d02ff5c7..3b187566 100644
--- a/boto/dynamodb2/table.py
+++ b/boto/dynamodb2/table.py
@@ -97,7 +97,7 @@ class Table(object):
... ],
... throughput={
... 'read':10,
- ... 'write":10,
+ ... 'write':10,
... }),
... ], connection=dynamodb2.connect_to_region('us-west-2',
... aws_access_key_id='key',
@@ -178,7 +178,8 @@ class Table(object):
... 'write': 10,
... }, indexes=[
... KeysOnlyIndex('MostRecentlyJoined', parts=[
- ... RangeKey('date_joined')
+ ... HashKey('username'),
+ ... RangeKey('date_joined'),
... ]), global_indexes=[
... GlobalAllIndex('UsersByZipcode', parts=[
... HashKey('zipcode'),
@@ -1118,7 +1119,7 @@ class Table(object):
+ `AND` - True if all filter conditions evaluate to true (default)
+ `OR` - True if at least one filter condition evaluates to true
- Returns a ``ResultSet``, which transparently handles the pagination of
+ Returns a ``ResultSet`` containing ``Item``s, which transparently handles the pagination of
results you get back.
Example::
diff --git a/boto/ec2/cloudwatch/__init__.py b/boto/ec2/cloudwatch/__init__.py
index 9b150b4e..3bc806d9 100644
--- a/boto/ec2/cloudwatch/__init__.py
+++ b/boto/ec2/cloudwatch/__init__.py
@@ -343,7 +343,7 @@ class CloudWatchConnection(AWSQueryConnection):
action.
:type action_prefix: string
- :param action_name: The action name prefix.
+ :param action_prefix: The action name prefix.
:type alarm_name_prefix: string
:param alarm_name_prefix: The alarm name prefix. AlarmNames cannot
@@ -444,7 +444,7 @@ class CloudWatchConnection(AWSQueryConnection):
or unit to filter the set of alarms further.
:type metric_name: string
- :param metric_name: The name of the metric
+ :param metric_name: The name of the metric.
:type namespace: string
:param namespace: The namespace of the metric.
@@ -456,9 +456,10 @@ class CloudWatchConnection(AWSQueryConnection):
:type statistic: string
:param statistic: The statistic for the metric.
- :param dimension_filters: A dictionary containing name/value
- pairs that will be used to filter the results. The key in
- the dictionary is the name of a Dimension. The value in
+ :type dimensions: dict
+ :param dimensions: A dictionary containing name/value
+ pairs that will be used to filter the results. The key in
+ the dictionary is the name of a Dimension. The value in
the dictionary is either a scalar value of that Dimension
name that you want to filter on, a list of values to
filter on or None if you want all metrics with that
diff --git a/boto/ec2/cloudwatch/metric.py b/boto/ec2/cloudwatch/metric.py
index f92f282a..15d1d968 100644
--- a/boto/ec2/cloudwatch/metric.py
+++ b/boto/ec2/cloudwatch/metric.py
@@ -148,9 +148,10 @@ class Metric(object):
:type statistic: string
:param statistic: The statistic for the metric.
- :param dimension_filters: A dictionary containing name/value
- pairs that will be used to filter the results. The key in
- the dictionary is the name of a Dimension. The value in
+ :type dimensions: dict
+ :param dimension: A dictionary containing name/value
+ pairs that will be used to filter the results. The key in
+ the dictionary is the name of a Dimension. The value in
the dictionary is either a scalar value of that Dimension
name that you want to filter on, a list of values to
filter on or None if you want all metrics with that
diff --git a/boto/ec2/connection.py b/boto/ec2/connection.py
index 210de7bb..f1a49285 100644
--- a/boto/ec2/connection.py
+++ b/boto/ec2/connection.py
@@ -804,6 +804,11 @@ class EC2Connection(AWSQueryConnection):
* c3.2xlarge
* c3.4xlarge
* c3.8xlarge
+ * c4.large
+ * c4.xlarge
+ * c4.2xlarge
+ * c4.4xlarge
+ * c4.8xlarge
* i2.xlarge
* i2.2xlarge
* i2.4xlarge
@@ -1504,6 +1509,11 @@ class EC2Connection(AWSQueryConnection):
* c3.2xlarge
* c3.4xlarge
* c3.8xlarge
+ * c4.large
+ * c4.xlarge
+ * c4.2xlarge
+ * c4.4xlarge
+ * c4.8xlarge
* i2.xlarge
* i2.2xlarge
* i2.4xlarge
@@ -2265,7 +2275,7 @@ class EC2Connection(AWSQueryConnection):
return self.get_status('ModifyVolumeAttribute', params, verb='POST')
def create_volume(self, size, zone, snapshot=None, volume_type=None,
- iops=None, encrypted=False, dry_run=False):
+ iops=None, encrypted=False, kms_key_id=None, dry_run=False):
"""
Create a new EBS Volume.
@@ -2291,6 +2301,11 @@ class EC2Connection(AWSQueryConnection):
:param encrypted: Specifies whether the volume should be encrypted.
(optional)
+ :type kms_key_id: string
+ :params kms_key_id: If encrypted is True, this KMS Key ID may be specified to
+ encrypt volume with this key (optional)
+ e.g.: arn:aws:kms:us-east-1:012345678910:key/abcd1234-a123-456a-a12b-a123b4cd56ef
+
:type dry_run: bool
:param dry_run: Set to True if the operation should not actually run.
@@ -2310,6 +2325,8 @@ class EC2Connection(AWSQueryConnection):
params['Iops'] = str(iops)
if encrypted:
params['Encrypted'] = 'true'
+ if kms_key_id:
+ params['KmsKeyId'] = kms_key_id
if dry_run:
params['DryRun'] = 'true'
return self.get_object('CreateVolume', params, Volume, verb='POST')
diff --git a/boto/ec2/image.py b/boto/ec2/image.py
index 612404f0..68ab0371 100644
--- a/boto/ec2/image.py
+++ b/boto/ec2/image.py
@@ -230,6 +230,11 @@ class Image(TaggedEC2Object):
* c3.2xlarge
* c3.4xlarge
* c3.8xlarge
+ * c4.large
+ * c4.xlarge
+ * c4.2xlarge
+ * c4.4xlarge
+ * c4.8xlarge
* i2.xlarge
* i2.2xlarge
* i2.4xlarge
diff --git a/boto/ec2/instance.py b/boto/ec2/instance.py
index eb7c9ec6..ae0056c3 100644
--- a/boto/ec2/instance.py
+++ b/boto/ec2/instance.py
@@ -201,7 +201,6 @@ class Instance(TaggedEC2Object):
:ivar root_device_type: The root device type (ebs|instance-store).
:ivar block_device_mapping: The Block Device Mapping for the instance.
:ivar state_reason: The reason for the most recent state transition.
- :ivar groups: List of security Groups associated with the instance.
:ivar interfaces: List of Elastic Network Interfaces associated with
this instance.
:ivar ebs_optimized: Whether instance is using optimized EBS volumes
diff --git a/boto/ec2/snapshot.py b/boto/ec2/snapshot.py
index eaf7164c..4db301f4 100644
--- a/boto/ec2/snapshot.py
+++ b/boto/ec2/snapshot.py
@@ -28,6 +28,21 @@ from boto.ec2.zone import Zone
class Snapshot(TaggedEC2Object):
+ """
+ Represents an EBS snapshot.
+ :ivar id: The unique ID of the snapshot.
+ :ivar volume_id: The ID of the volume this snapshot was created
+ from.
+ :ivar status: The status of the snapshot.
+ :ivar progress: The percent complete of the snapshot.
+ :ivar start_time: The timestamp of when the snapshot was created.
+ :ivar owner_id: The id of the account that owns the snapshot.
+ :ivar owner_alias: The alias of the account that owns the snapshot.
+ :ivar volume_size: The size (in GB) of the volume the snapshot was created from.
+ :ivar description: The description of the snapshot.
+ :ivar encrypted: True if this snapshot is encrypted
+ """
+
AttrName = 'createVolumePermission'
def __init__(self, connection=None):
diff --git a/boto/ec2containerservice/__init__.py b/boto/ec2containerservice/__init__.py
index a8946a0e..b7abb19e 100644
--- a/boto/ec2containerservice/__init__.py
+++ b/boto/ec2containerservice/__init__.py
@@ -20,7 +20,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
-from boto.regioninfo import RegionInfo, get_regions
+from boto.regioninfo import get_regions
def regions():
@@ -30,8 +30,9 @@ def regions():
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
- from boto.ec2containerservice import EC2ContainerServiceConnection
- return get_regions('', connection_cls=EC2ContainerServiceConnection)
+ from boto.ec2containerservice.layer1 import EC2ContainerServiceConnection
+ return get_regions('ec2containerservice',
+ connection_cls=EC2ContainerServiceConnection)
def connect_to_region(region_name, **kw_params):
diff --git a/boto/emr/connection.py b/boto/emr/connection.py
index d15852ea..87eafbde 100644
--- a/boto/emr/connection.py
+++ b/boto/emr/connection.py
@@ -96,6 +96,10 @@ class EmrConnection(AWSQueryConnection):
def describe_jobflow(self, jobflow_id):
"""
+ This method is deprecated. We recommend you use list_clusters,
+ describe_cluster, list_steps, list_instance_groups and
+ list_bootstrap_actions instead.
+
Describes a single Elastic MapReduce job flow
:type jobflow_id: str
@@ -108,6 +112,10 @@ class EmrConnection(AWSQueryConnection):
def describe_jobflows(self, states=None, jobflow_ids=None,
created_after=None, created_before=None):
"""
+ This method is deprecated. We recommend you use list_clusters,
+ describe_cluster, list_steps, list_instance_groups and
+ list_bootstrap_actions instead.
+
Retrieve all the Elastic MapReduce job flows on your account
:type states: list
@@ -242,7 +250,7 @@ class EmrConnection(AWSQueryConnection):
if instance_group_types:
self.build_list_params(params, instance_group_types,
- 'InstanceGroupTypeList.member')
+ 'InstanceGroupTypes.member')
return self.get_object('ListInstances', params, InstanceList)
@@ -265,7 +273,7 @@ class EmrConnection(AWSQueryConnection):
params['Marker'] = marker
if step_states:
- self.build_list_params(params, step_states, 'StepStateList.member')
+ self.build_list_params(params, step_states, 'StepStates.member')
return self.get_object('ListSteps', params, StepSummaryList)
diff --git a/boto/endpoints.json b/boto/endpoints.json
index 4e4afe84..ef23f0c1 100644
--- a/boto/endpoints.json
+++ b/boto/endpoints.json
@@ -1,6 +1,7 @@
{
"autoscaling": {
"ap-northeast-1": "autoscaling.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "autoscaling.ap-northeast-2.amazonaws.com",
"ap-southeast-1": "autoscaling.ap-southeast-1.amazonaws.com",
"ap-southeast-2": "autoscaling.ap-southeast-2.amazonaws.com",
"cn-north-1": "autoscaling.cn-north-1.amazonaws.com.cn",
@@ -15,10 +16,12 @@
"awslambda": {
"us-east-1": "lambda.us-east-1.amazonaws.com",
"us-west-2": "lambda.us-west-2.amazonaws.com",
- "eu-west-1": "lambda.eu-west-1.amazonaws.com"
+ "eu-west-1": "lambda.eu-west-1.amazonaws.com",
+ "ap-northeast-1": "lambda.ap-northeast-1.amazonaws.com"
},
"cloudformation": {
"ap-northeast-1": "cloudformation.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "cloudformation.ap-northeast-2.amazonaws.com",
"ap-southeast-1": "cloudformation.ap-southeast-1.amazonaws.com",
"ap-southeast-2": "cloudformation.ap-southeast-2.amazonaws.com",
"cn-north-1": "cloudformation.cn-north-1.amazonaws.com.cn",
@@ -32,6 +35,7 @@
},
"cloudfront": {
"ap-northeast-1": "cloudfront.amazonaws.com",
+ "ap-northeast-2": "cloudfront.amazonaws.com",
"ap-southeast-1": "cloudfront.amazonaws.com",
"ap-southeast-2": "cloudfront.amazonaws.com",
"eu-west-1": "cloudfront.amazonaws.com",
@@ -43,6 +47,7 @@
},
"cloudhsm": {
"us-east-1": "cloudhsm.us-east-1.amazonaws.com",
+ "us-gov-west-1": "cloudhsm.us-gov-west-1.amazonaws.com",
"us-west-2": "cloudhsm.us-west-2.amazonaws.com",
"eu-west-1": "cloudhsm.eu-west-1.amazonaws.com",
"eu-central-1": "cloudhsm.eu-central-1.amazonaws.com",
@@ -72,17 +77,20 @@
},
"cloudtrail": {
"ap-northeast-1": "cloudtrail.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "cloudtrail.ap-northeast-2.amazonaws.com",
"ap-southeast-1": "cloudtrail.ap-southeast-1.amazonaws.com",
"ap-southeast-2": "cloudtrail.ap-southeast-2.amazonaws.com",
"eu-west-1": "cloudtrail.eu-west-1.amazonaws.com",
"sa-east-1": "cloudtrail.sa-east-1.amazonaws.com",
"us-east-1": "cloudtrail.us-east-1.amazonaws.com",
+ "us-gov-west-1": "cloudtrail.us-gov-west-1.amazonaws.com",
"us-west-1": "cloudtrail.us-west-1.amazonaws.com",
"us-west-2": "cloudtrail.us-west-2.amazonaws.com",
"eu-central-1": "cloudtrail.eu-central-1.amazonaws.com"
},
"cloudwatch": {
"ap-northeast-1": "monitoring.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "monitoring.ap-northeast-2.amazonaws.com",
"ap-southeast-1": "monitoring.ap-southeast-1.amazonaws.com",
"ap-southeast-2": "monitoring.ap-southeast-2.amazonaws.com",
"cn-north-1": "monitoring.cn-north-1.amazonaws.com.cn",
@@ -95,20 +103,31 @@
"eu-central-1": "monitoring.eu-central-1.amazonaws.com"
},
"codedeploy": {
+ "ap-southeast-2": "codedeploy.ap-southeast-2.amazonaws.com",
+ "eu-west-1": "codedeploy.eu-west-1.amazonaws.com",
"us-east-1": "codedeploy.us-east-1.amazonaws.com",
- "us-west-2": "codedeploy.us-west-2.amazonaws.com"
+ "us-west-2": "codedeploy.us-west-2.amazonaws.com",
+ "eu-west-1": "codedeploy.eu-west-1.amazonaws.com",
+ "ap-southeast-2": "codedeploy.ap-southeast-2.amazonaws.com"
},
"cognito-identity": {
+ "eu-west-1": "cognito-identity.eu-west-1.amazonaws.com",
"us-east-1": "cognito-identity.us-east-1.amazonaws.com"
},
"cognito-sync": {
+ "eu-west-1": "cognito-sync.eu-west-1.amazonaws.com",
"us-east-1": "cognito-sync.us-east-1.amazonaws.com"
},
"configservice": {
- "us-east-1": "config.us-east-1.amazonaws.com",
- "us-west-2": "config.us-west-2.amazonaws.com",
+ "ap-northeast-1": "config.ap-northeast-1.amazonaws.com",
+ "ap-southeast-1": "config.ap-southeast-1.amazonaws.com",
+ "ap-southeast-2": "config.ap-southeast-2.amazonaws.com",
+ "eu-central-1": "config.eu-central-1.amazonaws.com",
"eu-west-1": "config.eu-west-1.amazonaws.com",
- "ap-southeast-2": "config.ap-southeast-2.amazonaws.com"
+ "sa-east-1": "config.sa-east-1.amazonaws.com",
+ "us-east-1": "config.us-east-1.amazonaws.com",
+ "us-west-1": "config.us-west-1.amazonaws.com",
+ "us-west-2": "config.us-west-2.amazonaws.com"
},
"datapipeline": {
"us-east-1": "datapipeline.us-east-1.amazonaws.com",
@@ -119,6 +138,7 @@
},
"directconnect": {
"ap-northeast-1": "directconnect.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "directconnect.ap-northeast-2.amazonaws.com",
"ap-southeast-1": "directconnect.ap-southeast-1.amazonaws.com",
"ap-southeast-2": "directconnect.ap-southeast-2.amazonaws.com",
"eu-west-1": "directconnect.eu-west-1.amazonaws.com",
@@ -130,6 +150,7 @@
},
"dynamodb": {
"ap-northeast-1": "dynamodb.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "dynamodb.ap-northeast-2.amazonaws.com",
"ap-southeast-1": "dynamodb.ap-southeast-1.amazonaws.com",
"ap-southeast-2": "dynamodb.ap-southeast-2.amazonaws.com",
"cn-north-1": "dynamodb.cn-north-1.amazonaws.com.cn",
@@ -143,6 +164,7 @@
},
"ec2": {
"ap-northeast-1": "ec2.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "ec2.ap-northeast-2.amazonaws.com",
"ap-southeast-1": "ec2.ap-southeast-1.amazonaws.com",
"ap-southeast-2": "ec2.ap-southeast-2.amazonaws.com",
"cn-north-1": "ec2.cn-north-1.amazonaws.com.cn",
@@ -155,22 +177,29 @@
"eu-central-1": "ec2.eu-central-1.amazonaws.com"
},
"ec2containerservice": {
- "us-east-1": "ecs.us-east-1.amazonaws.com"
+ "us-east-1": "ecs.us-east-1.amazonaws.com",
+ "us-west-2": "ecs.us-west-2.amazonaws.com",
+ "eu-west-1": "ecs.eu-west-1.amazonaws.com",
+ "ap-northeast-1": "ecs.ap-northeast-1.amazonaws.com",
+ "ap-southeast-2": "ecs.ap-southeast-2.amazonaws.com"
},
"elasticache": {
"ap-northeast-1": "elasticache.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "elasticache.ap-northeast-2.amazonaws.com",
"ap-southeast-1": "elasticache.ap-southeast-1.amazonaws.com",
"ap-southeast-2": "elasticache.ap-southeast-2.amazonaws.com",
"cn-north-1": "elasticache.cn-north-1.amazonaws.com.cn",
"eu-west-1": "elasticache.eu-west-1.amazonaws.com",
"sa-east-1": "elasticache.sa-east-1.amazonaws.com",
"us-east-1": "elasticache.us-east-1.amazonaws.com",
+ "us-gov-west-1": "elasticache.us-gov-west-1.amazonaws.com",
"us-west-1": "elasticache.us-west-1.amazonaws.com",
"us-west-2": "elasticache.us-west-2.amazonaws.com",
"eu-central-1": "elasticache.eu-central-1.amazonaws.com"
},
"elasticbeanstalk": {
"ap-northeast-1": "elasticbeanstalk.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "elasticbeanstalk.ap-northeast-2.amazonaws.com",
"ap-southeast-1": "elasticbeanstalk.ap-southeast-1.amazonaws.com",
"ap-southeast-2": "elasticbeanstalk.ap-southeast-2.amazonaws.com",
"eu-west-1": "elasticbeanstalk.eu-west-1.amazonaws.com",
@@ -182,6 +211,7 @@
},
"elasticloadbalancing": {
"ap-northeast-1": "elasticloadbalancing.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "elasticloadbalancing.ap-northeast-2.amazonaws.com",
"ap-southeast-1": "elasticloadbalancing.ap-southeast-1.amazonaws.com",
"ap-southeast-2": "elasticloadbalancing.ap-southeast-2.amazonaws.com",
"cn-north-1": "elasticloadbalancing.cn-north-1.amazonaws.com.cn",
@@ -195,6 +225,7 @@
},
"elasticmapreduce": {
"ap-northeast-1": "ap-northeast-1.elasticmapreduce.amazonaws.com",
+ "ap-northeast-2": "ap-northeast-2.elasticmapreduce.amazonaws.com",
"ap-southeast-1": "ap-southeast-1.elasticmapreduce.amazonaws.com",
"ap-southeast-2": "ap-southeast-2.elasticmapreduce.amazonaws.com",
"cn-north-1": "elasticmapreduce.cn-north-1.amazonaws.com.cn",
@@ -212,11 +243,11 @@
"eu-west-1": "elastictranscoder.eu-west-1.amazonaws.com",
"us-east-1": "elastictranscoder.us-east-1.amazonaws.com",
"us-west-1": "elastictranscoder.us-west-1.amazonaws.com",
- "us-west-2": "elastictranscoder.us-west-2.amazonaws.com",
- "eu-central-1": "elastictranscoder.eu-central-1.amazonaws.com"
+ "us-west-2": "elastictranscoder.us-west-2.amazonaws.com"
},
"glacier": {
"ap-northeast-1": "glacier.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "glacier.ap-northeast-2.amazonaws.com",
"ap-southeast-2": "glacier.ap-southeast-2.amazonaws.com",
"cn-north-1": "glacier.cn-north-1.amazonaws.com.cn",
"eu-west-1": "glacier.eu-west-1.amazonaws.com",
@@ -228,6 +259,7 @@
},
"iam": {
"ap-northeast-1": "iam.amazonaws.com",
+ "ap-northeast-2": "iam.amazonaws.com",
"ap-southeast-1": "iam.amazonaws.com",
"ap-southeast-2": "iam.amazonaws.com",
"cn-north-1": "iam.cn-north-1.amazonaws.com.cn",
@@ -241,8 +273,10 @@
},
"importexport": {
"ap-northeast-1": "importexport.amazonaws.com",
+ "ap-northeast-2": "importexport.amazonaws.com",
"ap-southeast-1": "importexport.amazonaws.com",
"ap-southeast-2": "importexport.amazonaws.com",
+ "eu-central-1": "importexport.amazonaws.com",
"eu-west-1": "importexport.amazonaws.com",
"sa-east-1": "importexport.amazonaws.com",
"us-east-1": "importexport.amazonaws.com",
@@ -251,15 +285,18 @@
},
"kinesis": {
"us-east-1": "kinesis.us-east-1.amazonaws.com",
+ "us-west-1": "kinesis.us-west-1.amazonaws.com",
"us-west-2": "kinesis.us-west-2.amazonaws.com",
"eu-west-1": "kinesis.eu-west-1.amazonaws.com",
"ap-southeast-1": "kinesis.ap-southeast-1.amazonaws.com",
"ap-southeast-2": "kinesis.ap-southeast-2.amazonaws.com",
"ap-northeast-1": "kinesis.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "kinesis.ap-northeast-2.amazonaws.com",
"eu-central-1": "kinesis.eu-central-1.amazonaws.com"
},
"kms": {
"us-east-1": "kms.us-east-1.amazonaws.com",
+ "us-gov-west-1": "kms.us-gov-west-1.amazonaws.com",
"us-west-1": "kms.us-west-1.amazonaws.com",
"us-west-2": "kms.us-west-2.amazonaws.com",
"eu-west-1": "kms.eu-west-1.amazonaws.com",
@@ -267,24 +304,29 @@
"ap-southeast-2": "kms.ap-southeast-2.amazonaws.com",
"ap-southeast-1": "kms.ap-southeast-1.amazonaws.com",
"ap-northeast-1": "kms.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "kms.ap-northeast-2.amazonaws.com",
"sa-east-1": "kms.sa-east-1.amazonaws.com"
},
"logs": {
"us-east-1": "logs.us-east-1.amazonaws.com",
"us-west-2": "logs.us-west-2.amazonaws.com",
+ "us-west-1": "logs.us-west-1.amazonaws.com",
"eu-west-1": "logs.eu-west-1.amazonaws.com",
- "eu-central-1": "logs.eu-central-1.amazonaws.com"
+ "eu-central-1": "logs.eu-central-1.amazonaws.com",
+ "ap-southeast-1": "logs.ap-southeast-1.amazonaws.com",
+ "ap-southeast-2": "logs.ap-southeast-2.amazonaws.com",
+ "ap-northeast-1": "logs.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "logs.ap-northeast-2.amazonaws.com"
},
"opsworks": {
- "us-east-1": "opsworks.us-east-1.amazonaws.com",
- "eu-central-1": "opsworks.eu-central-1.amazonaws.com"
+ "us-east-1": "opsworks.us-east-1.amazonaws.com"
},
"machinelearning": {
- "us-east-1": "machinelearning.us-east-1.amazonaws.com",
- "us-west-2": "machinelearning.us-west-2.amazonaws.com"
+ "us-east-1": "machinelearning.us-east-1.amazonaws.com"
},
"rds": {
"ap-northeast-1": "rds.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "rds.ap-northeast-2.amazonaws.com",
"ap-southeast-1": "rds.ap-southeast-1.amazonaws.com",
"ap-southeast-2": "rds.ap-southeast-2.amazonaws.com",
"cn-north-1": "rds.cn-north-1.amazonaws.com.cn",
@@ -298,15 +340,18 @@
},
"redshift": {
"ap-northeast-1": "redshift.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "redshift.ap-northeast-2.amazonaws.com",
"ap-southeast-1": "redshift.ap-southeast-1.amazonaws.com",
"ap-southeast-2": "redshift.ap-southeast-2.amazonaws.com",
"eu-west-1": "redshift.eu-west-1.amazonaws.com",
"us-east-1": "redshift.us-east-1.amazonaws.com",
"us-west-2": "redshift.us-west-2.amazonaws.com",
- "eu-central-1": "redshift.eu-central-1.amazonaws.com"
+ "eu-central-1": "redshift.eu-central-1.amazonaws.com",
+ "us-gov-west-1": "redshift.us-gov-west-1.amazonaws.com"
},
"route53": {
"ap-northeast-1": "route53.amazonaws.com",
+ "ap-northeast-2": "route53.amazonaws.com",
"ap-southeast-1": "route53.amazonaws.com",
"ap-southeast-2": "route53.amazonaws.com",
"eu-central-1": "route53.amazonaws.com",
@@ -321,6 +366,7 @@
},
"s3": {
"ap-northeast-1": "s3-ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "s3-ap-northeast-2.amazonaws.com",
"ap-southeast-1": "s3-ap-southeast-1.amazonaws.com",
"ap-southeast-2": "s3-ap-southeast-2.amazonaws.com",
"cn-north-1": "s3.cn-north-1.amazonaws.com.cn",
@@ -340,17 +386,16 @@
"sa-east-1": "sdb.sa-east-1.amazonaws.com",
"us-east-1": "sdb.amazonaws.com",
"us-west-1": "sdb.us-west-1.amazonaws.com",
- "us-west-2": "sdb.us-west-2.amazonaws.com",
- "eu-central-1": "sdb.eu-central-1.amazonaws.com"
+ "us-west-2": "sdb.us-west-2.amazonaws.com"
},
"ses": {
"eu-west-1": "email.eu-west-1.amazonaws.com",
"us-east-1": "email.us-east-1.amazonaws.com",
- "us-west-2": "email.us-west-2.amazonaws.com",
- "eu-central-1": "email.eu-central-1.amazonaws.com"
+ "us-west-2": "email.us-west-2.amazonaws.com"
},
"sns": {
"ap-northeast-1": "sns.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "sns.ap-northeast-2.amazonaws.com",
"ap-southeast-1": "sns.ap-southeast-1.amazonaws.com",
"ap-southeast-2": "sns.ap-southeast-2.amazonaws.com",
"cn-north-1": "sns.cn-north-1.amazonaws.com.cn",
@@ -364,6 +409,7 @@
},
"sqs": {
"ap-northeast-1": "ap-northeast-1.queue.amazonaws.com",
+ "ap-northeast-2": "ap-northeast-2.queue.amazonaws.com",
"ap-southeast-1": "ap-southeast-1.queue.amazonaws.com",
"ap-southeast-2": "ap-southeast-2.queue.amazonaws.com",
"cn-north-1": "cn-north-1.queue.amazonaws.com.cn",
@@ -388,6 +434,7 @@
},
"sts": {
"ap-northeast-1": "sts.amazonaws.com",
+ "ap-northeast-2": "sts.amazonaws.com",
"ap-southeast-1": "sts.amazonaws.com",
"ap-southeast-2": "sts.amazonaws.com",
"cn-north-1": "sts.cn-north-1.amazonaws.com.cn",
@@ -400,11 +447,11 @@
"eu-central-1": "sts.amazonaws.com"
},
"support": {
- "us-east-1": "support.us-east-1.amazonaws.com",
- "eu-central-1": "support.eu-central-1.amazonaws.com"
+ "us-east-1": "support.us-east-1.amazonaws.com"
},
"swf": {
"ap-northeast-1": "swf.ap-northeast-1.amazonaws.com",
+ "ap-northeast-2": "swf.ap-northeast-2.amazonaws.com",
"ap-southeast-1": "swf.ap-southeast-1.amazonaws.com",
"ap-southeast-2": "swf.ap-southeast-2.amazonaws.com",
"cn-north-1": "swf.cn-north-1.amazonaws.com.cn",
diff --git a/boto/gs/key.py b/boto/gs/key.py
index c4fcf01f..bc49e243 100644
--- a/boto/gs/key.py
+++ b/boto/gs/key.py
@@ -270,7 +270,7 @@ class Key(S3Key):
:type fp: file
:param fp: The file pointer to upload. The file pointer must
- point point at the offset from which you wish to upload.
+ point at the offset from which you wish to upload.
ie. if uploading the full file, it should point at the
start of the file. Normally when a file is opened for
reading, the fp will point at the first byte. See the
diff --git a/boto/iam/connection.py b/boto/iam/connection.py
index 392d3f35..f66931f4 100644
--- a/boto/iam/connection.py
+++ b/boto/iam/connection.py
@@ -1640,3 +1640,293 @@ class IAMConnection(AWSQueryConnection):
if require_uppercase_characters is not None and type(allow_users_to_change_password) is bool:
params['RequireUppercaseCharacters'] = str(require_uppercase_characters).lower()
return self.get_response('UpdateAccountPasswordPolicy', params)
+
+ def create_policy(self, policy_name, policy_document, path='/',
+ description=None):
+ """
+ Create a policy.
+
+ :type policy_name: string
+ :param policy_name: The name of the new policy
+
+ :type policy_document string
+ :param policy_document: The document of the new policy
+
+ :type path: string
+ :param path: The path in which the policy will be created.
+ Defaults to /.
+
+ :type description: string
+ :param path: A description of the new policy.
+
+ """
+ params = {'PolicyName': policy_name,
+ 'PolicyDocument': policy_document,
+ 'Path': path}
+ if description is not None:
+ params['Description'] = str(description)
+
+ return self.get_response('CreatePolicy', params)
+
+ def create_policy_version(
+ self,
+ policy_arn,
+ policy_document,
+ set_as_default=None):
+ """
+ Create a policy version.
+
+ :type policy_arn: string
+ :param policy_arn: The ARN of the policy
+
+ :type policy_document string
+ :param policy_document: The document of the new policy version
+
+ :type set_as_default: bool
+ :param set_as_default: Sets the policy version as default
+ Defaults to None.
+
+ """
+ params = {'PolicyArn': policy_arn,
+ 'PolicyDocument': policy_document}
+ if type(set_as_default) == bool:
+ params['SetAsDefault'] = str(set_as_default).lower()
+ return self.get_response('CreatePolicyVersion', params)
+
+ def delete_policy(self, policy_arn):
+ """
+ Delete a policy.
+
+ :type policy_arn: string
+ :param policy_arn: The ARN of the policy to delete
+
+ """
+ params = {'PolicyArn': policy_arn}
+ return self.get_response('DeletePolicy', params)
+
+ def delete_policy_version(self, policy_arn, version_id):
+ """
+ Delete a policy version.
+
+ :type policy_arn: string
+ :param policy_arn: The ARN of the policy to delete a version from
+
+ :type version_id: string
+ :param version_id: The id of the version to delete
+
+ """
+ params = {'PolicyArn': policy_arn,
+ 'VersionId': version_id}
+ return self.get_response('DeletePolicyVersion', params)
+
+ def get_policy(self, policy_arn):
+ """
+ Get policy information.
+
+ :type policy_arn: string
+ :param policy_arn: The ARN of the policy to get information for
+
+ """
+ params = {'PolicyArn': policy_arn}
+ return self.get_response('GetPolicy', params)
+
+ def get_policy_version(self, policy_arn, version_id):
+ """
+ Get policy information.
+
+ :type policy_arn: string
+ :param policy_arn: The ARN of the policy to get information for a
+ specific version
+
+ :type version_id: string
+ :param version_id: The id of the version to get information for
+
+ """
+ params = {'PolicyArn': policy_arn,
+ 'VersionId': version_id}
+ return self.get_response('GetPolicyVersion', params)
+
+ def list_policies(self, marker=None, max_items=None, only_attached=None,
+ path_prefix=None, scope=None):
+ """
+ List policies of account.
+
+ :type marker: string
+ :param marker: A marker used for pagination (received from previous
+ accesses)
+
+ :type max_items: int
+ :param max_items: Send only max_items; allows paginations
+
+ :type only_attached: bool
+ :param only_attached: Send only policies attached to other resources
+
+ :type path_prefix: string
+ :param path_prefix: Send only items prefixed by this path
+
+ :type scope: string
+ :param scope: AWS|Local. Choose between AWS policies or your own
+ """
+ params = {}
+ if path_prefix is not None:
+ params['PathPrefix'] = path_prefix
+ if marker is not None:
+ params['Marker'] = marker
+ if max_items is not None:
+ params['MaxItems'] = max_items
+ if type(only_attached) == bool:
+ params['OnlyAttached'] = str(only_attached).lower()
+ if scope is not None:
+ params['Scope'] = scope
+ return self.get_response(
+ 'ListPolicies',
+ params,
+ list_marker='Policies')
+
+ def list_policy_versions(self, policy_arn, marker=None, max_items=None):
+ """
+ List policy versions.
+
+ :type policy_arn: string
+ :param policy_arn: The ARN of the policy to get versions of
+
+ :type marker: string
+ :param marker: A marker used for pagination (received from previous
+ accesses)
+
+ :type max_items: int
+ :param max_items: Send only max_items; allows paginations
+
+ """
+ params = {'PolicyArn': policy_arn}
+ if marker is not None:
+ params['Marker'] = marker
+ if max_items is not None:
+ params['MaxItems'] = max_items
+ return self.get_response(
+ 'ListPolicyVersions',
+ params,
+ list_marker='Versions')
+
+ def set_default_policy_version(self, policy_arn, version_id):
+ """
+ Set default policy version.
+
+ :type policy_arn: string
+ :param policy_arn: The ARN of the policy to set the default version
+ for
+
+ :type version_id: string
+ :param version_id: The id of the version to set as default
+ """
+ params = {'PolicyArn': policy_arn,
+ 'VersionId': version_id}
+ return self.get_response('SetDefaultPolicyVersion', params)
+
+ def list_entities_for_policy(self, policy_arn, path_prefix=None,
+ marker=None, max_items=None,
+ entity_filter=None):
+ """
+ :type policy_arn: string
+ :param policy_arn: The ARN of the policy to get entities for
+
+ :type marker: string
+ :param marker: A marker used for pagination (received from previous
+ accesses)
+
+ :type max_items: int
+ :param max_items: Send only max_items; allows paginations
+
+ :type path_prefix: string
+ :param path_prefix: Send only items prefixed by this path
+
+ :type entity_filter: string
+ :param entity_filter: Which entity type of User | Role | Group |
+ LocalManagedPolicy | AWSManagedPolicy to return
+
+ """
+ params = {'PolicyArn': policy_arn}
+ if marker is not None:
+ params['Marker'] = marker
+ if max_items is not None:
+ params['MaxItems'] = max_items
+ if path_prefix is not None:
+ params['PathPrefix'] = path_prefix
+ if entity_filter is not None:
+ params['EntityFilter'] = entity_filter
+ return self.get_response('ListEntitiesForPolicy', params,
+ list_marker=('PolicyGroups',
+ 'PolicyUsers',
+ 'PolicyRoles'))
+
+ def attach_group_policy(self, policy_arn, group_name):
+ """
+ :type policy_arn: string
+ :param policy_arn: The ARN of the policy to attach
+
+ :type group_name: string
+ :param group_name: Group to attach the policy to
+
+ """
+ params = {'PolicyArn': policy_arn, 'GroupName': group_name}
+ return self.get_response('AttachGroupPolicy', params)
+
+ def attach_role_policy(self, policy_arn, role_name):
+ """
+ :type policy_arn: string
+ :param policy_arn: The ARN of the policy to attach
+
+ :type role_name: string
+ :param role_name: Role to attach the policy to
+
+ """
+ params = {'PolicyArn': policy_arn, 'RoleName': role_name}
+ return self.get_response('AttachRolePolicy', params)
+
+ def attach_user_policy(self, policy_arn, user_name):
+ """
+ :type policy_arn: string
+ :param policy_arn: The ARN of the policy to attach
+
+ :type user_name: string
+ :param user_name: User to attach the policy to
+
+ """
+ params = {'PolicyArn': policy_arn, 'UserName': user_name}
+ return self.get_response('AttachUserPolicy', params)
+
+ def detach_group_policy(self, policy_arn, group_name):
+ """
+ :type policy_arn: string
+ :param policy_arn: The ARN of the policy to detach
+
+ :type group_name: string
+ :param group_name: Group to detach the policy from
+
+ """
+ params = {'PolicyArn': policy_arn, 'GroupName': group_name}
+ return self.get_response('DetachGroupPolicy', params)
+
+ def detach_role_policy(self, policy_arn, role_name):
+ """
+ :type policy_arn: string
+ :param policy_arn: The ARN of the policy to detach
+
+ :type role_name: string
+ :param role_name: Role to detach the policy from
+
+ """
+ params = {'PolicyArn': policy_arn, 'RoleName': role_name}
+ return self.get_response('DetachRolePolicy', params)
+
+ def detach_user_policy(self, policy_arn, user_name):
+ """
+ :type policy_arn: string
+ :param policy_arn: The ARN of the policy to detach
+
+ :type user_name: string
+ :param user_name: User to detach the policy from
+
+ """
+ params = {'PolicyArn': policy_arn, 'UserName': user_name}
+ return self.get_response('DetachUserPolicy', params)
diff --git a/boto/kms/layer1.py b/boto/kms/layer1.py
index f44cd048..88ea2e0f 100644
--- a/boto/kms/layer1.py
+++ b/boto/kms/layer1.py
@@ -279,7 +279,7 @@ class KMSConnection(AWSQueryConnection):
"Value of argument ``ciphertext_blob`` "
"must be of type %s." % six.binary_type)
ciphertext_blob = base64.b64encode(ciphertext_blob)
- params = {'CiphertextBlob': ciphertext_blob, }
+ params = {'CiphertextBlob': ciphertext_blob.decode('utf-8'), }
if encryption_context is not None:
params['EncryptionContext'] = encryption_context
if grant_tokens is not None:
@@ -403,7 +403,7 @@ class KMSConnection(AWSQueryConnection):
"Value of argument ``plaintext`` "
"must be of type %s." % six.binary_type)
plaintext = base64.b64encode(plaintext)
- params = {'KeyId': key_id, 'Plaintext': plaintext, }
+ params = {'KeyId': key_id, 'Plaintext': plaintext.decode('utf-8'), }
if encryption_context is not None:
params['EncryptionContext'] = encryption_context
if grant_tokens is not None:
diff --git a/boto/mturk/connection.py b/boto/mturk/connection.py
index 4f2a23fa..506746cf 100644
--- a/boto/mturk/connection.py
+++ b/boto/mturk/connection.py
@@ -844,7 +844,7 @@ class MTurkConnection(AWSQueryConnection):
body = response.read()
if self.debug == 2:
print(body)
- if '<Errors>' not in body:
+ if '<Errors>' not in body.decode('utf-8'):
rs = ResultSet(marker_elems)
h = handler.XmlHandler(rs, self)
xml.sax.parseString(body, h)
diff --git a/boto/pyami/installers/ubuntu/ebs.py b/boto/pyami/installers/ubuntu/ebs.py
index 54a47985..a0a50704 100644
--- a/boto/pyami/installers/ubuntu/ebs.py
+++ b/boto/pyami/installers/ubuntu/ebs.py
@@ -64,10 +64,10 @@ class Backup(ScriptBase):
self.run("/usr/sbin/xfs_freeze -f ${mount_point}", exit_on_error = True)
snapshot = ec2.create_snapshot('${volume_id}')
boto.log.info("Snapshot created: %s " % snapshot)
- except Exception, e:
+ except Exception as e:
self.notify(subject="${instance_id} Backup Failed", body=traceback.format_exc())
boto.log.info("Snapshot created: ${volume_id}")
- except Exception, e:
+ except Exception as e:
self.notify(subject="${instance_id} Backup Failed", body=traceback.format_exc())
finally:
self.run("/usr/sbin/xfs_freeze -u ${mount_point}")
diff --git a/boto/rds/__init__.py b/boto/rds/__init__.py
index 8e8afa81..15c838b8 100644
--- a/boto/rds/__init__.py
+++ b/boto/rds/__init__.py
@@ -451,7 +451,7 @@ class RDSConnection(AWSQueryConnection):
self.build_list_params(params, l, 'VpcSecurityGroupIds.member')
# Remove any params set to None
- for k, v in params.items():
+ for k, v in list(params.items()):
if v is None: del(params[k])
return self.get_object('CreateDBInstance', params, DBInstance)
diff --git a/boto/rds2/layer1.py b/boto/rds2/layer1.py
index 5615f110..bbe5a778 100644
--- a/boto/rds2/layer1.py
+++ b/boto/rds2/layer1.py
@@ -192,6 +192,8 @@ class RDSConnection(AWSQueryConnection):
:type tags: list
:param tags: The tags to be assigned to the Amazon RDS resource.
+ Tags must be passed as tuples in the form
+ [('key1', 'valueForKey1'), ('key2', 'valueForKey2')]
"""
params = {'ResourceName': resource_name, }
@@ -301,8 +303,8 @@ class RDSConnection(AWSQueryConnection):
Example: `my-db-snapshot`
:type tags: list
- :param tags: A list of tags.
-
+ :param tags: A list of tags. Tags must be passed as tuples in the form
+ [('key1', 'valueForKey1'), ('key2', 'valueForKey2')]
"""
params = {
'SourceDBSnapshotIdentifier': source_db_snapshot_identifier,
@@ -645,7 +647,8 @@ class RDSConnection(AWSQueryConnection):
has not been set, the DB instance will be private.
:type tags: list
- :param tags: A list of tags.
+ :param tags: A list of tags. Tags must be passed as tuples in the form
+ [('key1', 'valueForKey1'), ('key2', 'valueForKey2')]
"""
params = {
@@ -799,7 +802,8 @@ class RDSConnection(AWSQueryConnection):
has not been set, the DB instance will be private.
:type tags: list
- :param tags: A list of tags.
+ :param tags: A list of tags. Tags must be passed as tuples in the form
+ [('key1', 'valueForKey1'), ('key2', 'valueForKey2')]
"""
params = {
@@ -874,7 +878,8 @@ class RDSConnection(AWSQueryConnection):
:param description: The description for the DB parameter group.
:type tags: list
- :param tags: A list of tags.
+ :param tags: A list of tags. Tags must be passed as tuples in the form
+ [('key1', 'valueForKey1'), ('key2', 'valueForKey2')]
"""
params = {
@@ -918,7 +923,8 @@ class RDSConnection(AWSQueryConnection):
security group.
:type tags: list
- :param tags: A list of tags.
+ :param tags: A list of tags. Tags must be passed as tuples in the form
+ [('key1', 'valueForKey1'), ('key2', 'valueForKey2')]
"""
params = {
@@ -967,7 +973,8 @@ class RDSConnection(AWSQueryConnection):
+ Cannot end with a hyphen or contain two consecutive hyphens
:type tags: list
- :param tags: A list of tags.
+ :param tags: A list of tags. Tags must be passed as tuples in the form
+ [('key1', 'valueForKey1'), ('key2', 'valueForKey2')]
"""
params = {
@@ -1007,7 +1014,8 @@ class RDSConnection(AWSQueryConnection):
:param subnet_ids: The EC2 Subnet IDs for the DB subnet group.
:type tags: list
- :param tags: A list of tags into tuples.
+ :param tags: A list of tags. Tags must be passed as tuples in the form
+ [('key1', 'valueForKey1'), ('key2', 'valueForKey2')]
"""
params = {
@@ -1106,7 +1114,8 @@ class RDSConnection(AWSQueryConnection):
active it.
:type tags: list
- :param tags: A list of tags.
+ :param tags: A list of tags. Tags must be passed as tuples in the form
+ [('key1', 'valueForKey1'), ('key2', 'valueForKey2')]
"""
params = {
@@ -1168,7 +1177,8 @@ class RDSConnection(AWSQueryConnection):
:param option_group_description: The description of the option group.
:type tags: list
- :param tags: A list of tags.
+ :param tags: A list of tags. Tags must be passed as tuples in the form
+ [('key1', 'valueForKey1'), ('key2', 'valueForKey2')]
"""
params = {
@@ -3109,7 +3119,8 @@ class RDSConnection(AWSQueryConnection):
Default: `1`
:type tags: list
- :param tags: A list of tags.
+ :param tags: A list of tags. Tags must be passed as tuples in the form
+ [('key1', 'valueForKey1'), ('key2', 'valueForKey2')]
"""
params = {
@@ -3438,7 +3449,8 @@ class RDSConnection(AWSQueryConnection):
DB instance
:type tags: list
- :param tags: A list of tags.
+ :param tags: A list of tags. Tags must be passed as tuples in the form
+ [('key1', 'valueForKey1'), ('key2', 'valueForKey2')]
"""
params = {
@@ -3646,7 +3658,8 @@ class RDSConnection(AWSQueryConnection):
DB instance
:type tags: list
- :param tags: A list of tags.
+ :param tags: A list of tags. Tags must be passed as tuples in the form
+ [('key1', 'valueForKey1'), ('key2', 'valueForKey2')]
"""
params = {
diff --git a/boto/s3/bucket.py b/boto/s3/bucket.py
index 504f24f9..061bf7a4 100644
--- a/boto/s3/bucket.py
+++ b/boto/s3/bucket.py
@@ -57,6 +57,7 @@ class S3WebsiteEndpointTranslate(object):
trans_region = defaultdict(lambda: 's3-website-us-east-1')
trans_region['eu-west-1'] = 's3-website-eu-west-1'
+ trans_region['eu-central-1'] = 's3-website.eu-central-1'
trans_region['us-west-1'] = 's3-website-us-west-1'
trans_region['us-west-2'] = 's3-website-us-west-2'
trans_region['sa-east-1'] = 's3-website-sa-east-1'
@@ -355,7 +356,7 @@ class Bucket(object):
Valid options: ``url``
:type encoding_type: string
- :rtype: :class:`boto.s3.bucketlistresultset.BucketListResultSet`
+ :rtype: :class:`boto.s3.bucketlistresultset.MultiPartUploadListResultSet`
:return: an instance of a BucketListResultSet that handles paging, etc
"""
return MultiPartUploadListResultSet(self, key_marker,
diff --git a/boto/s3/bucketlistresultset.py b/boto/s3/bucketlistresultset.py
index ab9c65e4..e9044276 100644
--- a/boto/s3/bucketlistresultset.py
+++ b/boto/s3/bucketlistresultset.py
@@ -19,7 +19,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-from boto.compat import urllib, six
+from boto.compat import unquote_str
def bucket_lister(bucket, prefix='', delimiter='', marker='', headers=None,
encoding_type=None):
@@ -37,9 +37,7 @@ def bucket_lister(bucket, prefix='', delimiter='', marker='', headers=None,
if k:
marker = rs.next_marker or k.name
if marker and encoding_type == "url":
- if isinstance(marker, six.text_type):
- marker = marker.encode('utf-8')
- marker = urllib.parse.unquote(marker)
+ marker = unquote_str(marker)
more_results= rs.is_truncated
class BucketListResultSet(object):
diff --git a/boto/s3/connection.py b/boto/s3/connection.py
index 0fcc1f59..dc2aa443 100644
--- a/boto/s3/connection.py
+++ b/boto/s3/connection.py
@@ -138,7 +138,8 @@ class ProtocolIndependentOrdinaryCallingFormat(OrdinaryCallingFormat):
class Location(object):
DEFAULT = '' # US Classic Region
- EU = 'EU'
+ EU = 'EU' # Ireland
+ EUCentral1 = 'eu-central-1' # Frankfurt
USWest = 'us-west-1'
USWest2 = 'us-west-2'
SAEast = 'sa-east-1'
diff --git a/boto/s3/key.py b/boto/s3/key.py
index 194c6b6e..de865258 100644
--- a/boto/s3/key.py
+++ b/boto/s3/key.py
@@ -634,17 +634,17 @@ class Key(object):
Generate a URL to access this key.
:type expires_in: int
- :param expires_in: How long the url is valid for, in seconds
+ :param expires_in: How long the url is valid for, in seconds.
:type method: string
:param method: The method to use for retrieving the file
- (default is GET)
+ (default is GET).
:type headers: dict
- :param headers: Any headers to pass along in the request
+ :param headers: Any headers to pass along in the request.
:type query_auth: bool
- :param query_auth:
+ :param query_auth: If True, signs the request in the URL.
:type force_http: bool
:param force_http: If True, http will be used instead of https.
@@ -713,7 +713,7 @@ class Key(object):
:type fp: file
:param fp: The file pointer to upload. The file pointer must
- point point at the offset from which you wish to upload.
+ point at the offset from which you wish to upload.
ie. if uploading the full file, it should point at the
start of the file. Normally when a file is opened for
reading, the fp will point at the first byte. See the
@@ -1387,9 +1387,9 @@ class Key(object):
the second representing the size of the to be transmitted
object.
- :type cb: int
+ :type num_cb: int
:param num_cb: (optional) If a callback is specified with the
- cb parameter this parameter determines the granularity of
+ num_cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
diff --git a/boto/sns/connection.py b/boto/sns/connection.py
index 5a6da205..8ba7d481 100644
--- a/boto/sns/connection.py
+++ b/boto/sns/connection.py
@@ -216,10 +216,10 @@ class SNSConnection(AWSQueryConnection):
def publish(self, topic=None, message=None, subject=None, target_arn=None,
message_structure=None, message_attributes=None):
"""
- Get properties of a Topic
+ Sends a message to all of a topic's subscribed endpoints
:type topic: string
- :param topic: The ARN of the new topic.
+ :param topic: The topic you want to publish to.
:type message: string
:param message: The message you want to send to the topic.
diff --git a/boto/sqs/connection.py b/boto/sqs/connection.py
index 0eafc1fa..bd340d15 100644
--- a/boto/sqs/connection.py
+++ b/boto/sqs/connection.py
@@ -131,7 +131,7 @@ class SQSConnection(AWSQueryConnection):
:param queue: The SQS queue to get attributes for
:type attribute: str
- :type attribute: The specific attribute requested. If not
+ :param attribute: The specific attribute requested. If not
supplied, the default is to return all attributes. Valid
attributes are:
@@ -158,6 +158,65 @@ class SQSConnection(AWSQueryConnection):
Attributes, queue.id)
def set_queue_attribute(self, queue, attribute, value):
+ """
+ Set a new value for an attribute of a Queue.
+
+ :type queue: A Queue object
+ :param queue: The SQS queue to get attributes for
+
+ :type attribute: String
+ :param attribute: The name of the attribute you want to set.
+
+ :param value: The new value for the attribute must be:
+
+ * For `DelaySeconds` the value must be an integer number of
+ seconds from 0 to 900 (15 minutes).
+ >>> connection.set_queue_attribute(queue, 'DelaySeconds', 900)
+
+ * For `MaximumMessageSize` the value must be an integer number of
+ bytes from 1024 (1 KiB) to 262144 (256 KiB).
+ >>> connection.set_queue_attribute(queue, 'MaximumMessageSize', 262144)
+
+ * For `MessageRetentionPeriod` the value must be an integer number of
+ seconds from 60 (1 minute) to 1209600 (14 days).
+ >>> connection.set_queue_attribute(queue, 'MessageRetentionPeriod', 1209600)
+
+ * For `Policy` the value must be an string that contains JSON formatted
+ parameters and values.
+ >>> connection.set_queue_attribute(queue, 'Policy', json.dumps({
+ ... 'Version': '2008-10-17',
+ ... 'Id': '/123456789012/testQueue/SQSDefaultPolicy',
+ ... 'Statement': [
+ ... {
+ ... 'Sid': 'Queue1ReceiveMessage',
+ ... 'Effect': 'Allow',
+ ... 'Principal': {
+ ... 'AWS': '*'
+ ... },
+ ... 'Action': 'SQS:ReceiveMessage',
+ ... 'Resource': 'arn:aws:aws:sqs:us-east-1:123456789012:testQueue'
+ ... }
+ ... ]
+ ... }))
+
+ * For `ReceiveMessageWaitTimeSeconds` the value must be an integer number of
+ seconds from 0 to 20.
+ >>> connection.set_queue_attribute(queue, 'ReceiveMessageWaitTimeSeconds', 20)
+
+ * For `VisibilityTimeout` the value must be an integer number of
+ seconds from 0 to 43200 (12 hours).
+ >>> connection.set_queue_attribute(queue, 'VisibilityTimeout', 43200)
+
+ * For `RedrivePolicy` the value must be an string that contains JSON formatted
+ parameters and values. You can set maxReceiveCount to a value between 1 and 1000.
+ The deadLetterTargetArn value is the Amazon Resource Name (ARN) of the queue that
+ will receive the dead letter messages.
+ >>> connection.set_queue_attribute(queue, 'RedrivePolicy', json.dumps({
+ ... 'maxReceiveCount': 5,
+ ... 'deadLetterTargetArn': "arn:aws:aws:sqs:us-east-1:123456789012:testDeadLetterQueue"
+ ... }))
+ """
+
params = {'Attribute.Name' : attribute, 'Attribute.Value' : value}
return self.get_status('SetQueueAttributes', params, queue.id)
diff --git a/boto/sqs/queue.py b/boto/sqs/queue.py
index bf3720d9..c81ed76e 100644
--- a/boto/sqs/queue.py
+++ b/boto/sqs/queue.py
@@ -55,8 +55,12 @@ class Queue(object):
def _arn(self):
parts = self.id.split('/')
- return 'arn:aws:sqs:%s:%s:%s' % (
- self.connection.region.name, parts[1], parts[2])
+ if self.connection.region.name == 'cn-north-1':
+ partition = 'aws-cn'
+ else:
+ partition = 'aws'
+ return 'arn:%s:sqs:%s:%s:%s' % (
+ partition, self.connection.region.name, parts[1], parts[2])
arn = property(_arn)
def startElement(self, name, attrs, connection):
@@ -107,12 +111,57 @@ class Queue(object):
Set a new value for an attribute of the Queue.
:type attribute: String
- :param attribute: The name of the attribute you want to set. The
- only valid value at this time is: VisibilityTimeout
- :type value: int
- :param value: The new value for the attribute.
- For VisibilityTimeout the value must be an
- integer number of seconds from 0 to 86400.
+ :param attribute: The name of the attribute you want to set.
+
+ :param value: The new value for the attribute must be:
+
+
+ * For `DelaySeconds` the value must be an integer number of
+ seconds from 0 to 900 (15 minutes).
+ >>> queue.set_attribute('DelaySeconds', 900)
+
+ * For `MaximumMessageSize` the value must be an integer number of
+ bytes from 1024 (1 KiB) to 262144 (256 KiB).
+ >>> queue.set_attribute('MaximumMessageSize', 262144)
+
+ * For `MessageRetentionPeriod` the value must be an integer number of
+ seconds from 60 (1 minute) to 1209600 (14 days).
+ >>> queue.set_attribute('MessageRetentionPeriod', 1209600)
+
+ * For `Policy` the value must be an string that contains JSON formatted
+ parameters and values.
+ >>> queue.set_attribute('Policy', json.dumps({
+ ... 'Version': '2008-10-17',
+ ... 'Id': '/123456789012/testQueue/SQSDefaultPolicy',
+ ... 'Statement': [
+ ... {
+ ... 'Sid': 'Queue1ReceiveMessage',
+ ... 'Effect': 'Allow',
+ ... 'Principal': {
+ ... 'AWS': '*'
+ ... },
+ ... 'Action': 'SQS:ReceiveMessage',
+ ... 'Resource': 'arn:aws:aws:sqs:us-east-1:123456789012:testQueue'
+ ... }
+ ... ]
+ ... }))
+
+ * For `ReceiveMessageWaitTimeSeconds` the value must be an integer number of
+ seconds from 0 to 20.
+ >>> queue.set_attribute('ReceiveMessageWaitTimeSeconds', 20)
+
+ * For `VisibilityTimeout` the value must be an integer number of
+ seconds from 0 to 43200 (12 hours).
+ >>> queue.set_attribute('VisibilityTimeout', 43200)
+
+ * For `RedrivePolicy` the value must be an string that contains JSON formatted
+ parameters and values. You can set maxReceiveCount to a value between 1 and 1000.
+ The deadLetterTargetArn value is the Amazon Resource Name (ARN) of the queue that
+ will receive the dead letter messages.
+ >>> queue.set_attribute('RedrivePolicy', json.dumps({
+ ... 'maxReceiveCount': 5,
+ ... 'deadLetterTargetArn': "arn:aws:aws:sqs:us-east-1:123456789012:testDeadLetterQueue"
+ ... }))
:rtype: bool
:return: True if successful, otherwise False.
diff --git a/boto/storage_uri.py b/boto/storage_uri.py
index 34b7b060..c1597b11 100755
--- a/boto/storage_uri.py
+++ b/boto/storage_uri.py
@@ -432,7 +432,10 @@ class BucketStorageUri(StorageUri):
"""sets or updates a bucket's CORS XML"""
self._check_bucket_uri('set_cors ')
bucket = self.get_bucket(validate, headers)
- bucket.set_cors(cors.to_xml(), headers)
+ if self.scheme == 's3':
+ bucket.set_cors(cors, headers)
+ else:
+ bucket.set_cors(cors.to_xml(), headers)
def get_location(self, validate=False, headers=None):
self._check_bucket_uri('get_location')
diff --git a/boto/swf/layer1.py b/boto/swf/layer1.py
index bba16ad2..0264befe 100644
--- a/boto/swf/layer1.py
+++ b/boto/swf/layer1.py
@@ -660,9 +660,10 @@ class Layer1(AWSAuthConnection):
def deprecate_activity_type(self, domain, activity_name, activity_version):
"""
- Returns information about the specified activity type. This
- includes configuration settings provided at registration time
- as well as other general information about the type.
+ Deprecates the specified activity type. After an activity
+ type has been deprecated, you cannot create new tasks of
+ that activity type. Tasks of this type that were scheduled
+ before the type was deprecated will continue to run.
:type domain: string
:param domain: The name of the domain in which the activity
diff --git a/boto/utils.py b/boto/utils.py
index 0e7e3a79..852aa5ab 100644
--- a/boto/utils.py
+++ b/boto/utils.py
@@ -1049,3 +1049,41 @@ class RequestHook(object):
"""
def handle_request_data(self, request, response, error=False):
pass
+
+
+def host_is_ipv6(hostname):
+ """
+ Detect (naively) if the hostname is an IPV6 host.
+ Return a boolean.
+ """
+ # empty strings or anything that is not a string is automatically not an
+ # IPV6 address
+ if not hostname or not isinstance(hostname, str):
+ return False
+
+ if hostname.startswith('['):
+ return True
+
+ if len(hostname.split(':')) > 2:
+ return True
+
+ # Anything else that doesn't start with brackets or doesn't have more than
+ # one ':' should not be an IPV6 address. This is very naive but the rest of
+ # the connection chain should error accordingly for typos or ill formed
+ # addresses
+ return False
+
+
+def parse_host(hostname):
+ """
+ Given a hostname that may have a port name, ensure that the port is trimmed
+ returning only the host, including hostnames that are IPV6 and may include
+ brackets.
+ """
+ # ensure that hostname does not have any whitespaces
+ hostname = hostname.strip()
+
+ if host_is_ipv6(hostname):
+ return hostname.split(']:', 1)[0].strip('[]')
+ else:
+ return hostname.split(':', 1)[0]
diff --git a/boto/vendored/six.py b/boto/vendored/six.py
index 55f5c3bf..a104cb87 100644
--- a/boto/vendored/six.py
+++ b/boto/vendored/six.py
@@ -1,6 +1,6 @@
"""Utilities for writing code that runs on Python 2 and 3"""
-# Copyright (c) 2010-2014 Benjamin Peterson
+# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
@@ -20,18 +20,22 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
+from __future__ import absolute_import
+
import functools
+import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
-__version__ = "1.7.2"
+__version__ = "1.9.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = str,
@@ -54,6 +58,7 @@ else:
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
+
def __len__(self):
return 1 << 31
try:
@@ -85,9 +90,13 @@ class _LazyDescr(object):
def __get__(self, obj, tp):
result = self._resolve()
- setattr(obj, self.name, result) # Invokes __set__.
- # This is a bit ugly, but it avoids running this again.
- delattr(obj.__class__, self.name)
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
return result
@@ -153,12 +162,14 @@ class MovedAttribute(_LazyDescr):
class _SixMetaPathImporter(object):
+
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
+
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
@@ -216,6 +227,7 @@ _importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
+
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
@@ -225,10 +237,14 @@ _moved_attributes = [
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
- MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
@@ -236,7 +252,6 @@ _moved_attributes = [
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
-
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
@@ -248,6 +263,7 @@ _moved_attributes = [
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
@@ -281,9 +297,14 @@ _moved_attributes = [
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
- MovedModule("xmlrpc_server", "xmlrpclib", "xmlrpc.server"),
- MovedModule("winreg", "_winreg"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
@@ -297,6 +318,7 @@ _importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
+
"""Lazy loading of moved objects in six.moves.urllib_parse"""
@@ -317,6 +339,13 @@ _urllib_parse_moved_attributes = [
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
@@ -329,6 +358,7 @@ _importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_pa
class Module_six_moves_urllib_error(_LazyModule):
+
"""Lazy loading of moved objects in six.moves.urllib_error"""
@@ -348,6 +378,7 @@ _importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.er
class Module_six_moves_urllib_request(_LazyModule):
+
"""Lazy loading of moved objects in six.moves.urllib_request"""
@@ -397,6 +428,7 @@ _importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.
class Module_six_moves_urllib_response(_LazyModule):
+
"""Lazy loading of moved objects in six.moves.urllib_response"""
@@ -417,6 +449,7 @@ _importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib
class Module_six_moves_urllib_robotparser(_LazyModule):
+
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
@@ -434,6 +467,7 @@ _importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.url
class Module_six_moves_urllib(types.ModuleType):
+
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
@@ -504,6 +538,9 @@ if PY3:
create_bound_method = types.MethodType
+ def create_unbound_method(func, cls):
+ return func
+
Iterator = object
else:
def get_unbound_function(unbound):
@@ -512,6 +549,9 @@ else:
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
class Iterator(object):
def next(self):
@@ -542,18 +582,30 @@ if PY3:
def iterlists(d, **kw):
return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
- return iter(d.iterkeys(**kw))
+ return d.iterkeys(**kw)
def itervalues(d, **kw):
- return iter(d.itervalues(**kw))
+ return d.itervalues(**kw)
def iteritems(d, **kw):
- return iter(d.iteritems(**kw))
+ return d.iteritems(**kw)
def iterlists(d, **kw):
- return iter(d.iterlists(**kw))
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
@@ -566,46 +618,69 @@ _add_doc(iterlists,
if PY3:
def b(s):
return s.encode("latin-1")
+
def u(s):
return s
unichr = chr
- if sys.version_info[1] <= 1:
- def int2byte(i):
- return bytes((i,))
- else:
- # This is about 2x faster than the implementation above on 3.2+
- int2byte = operator.methodcaller("to_bytes", 1, "big")
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
+
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
+
def byte2int(bs):
return ord(bs[0])
+
def indexbytes(buf, i):
return ord(buf[i])
- def iterbytes(buf):
- return (ord(byte) for byte in buf)
+ iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
if PY3:
exec_ = getattr(moves.builtins, "exec")
-
def reraise(tp, value, tb=None):
+ if value is None:
+ value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
@@ -623,12 +698,26 @@ else:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
-
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
+if sys.version_info[:2] == (3, 2):
+ exec_("""def raise_from(value, from_value):
+ if from_value is None:
+ raise value
+ raise value from from_value
+""")
+elif sys.version_info[:2] > (3, 2):
+ exec_("""def raise_from(value, from_value):
+ raise value from from_value
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
@@ -636,13 +725,14 @@ if print_ is None:
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
+
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
- isinstance(data, unicode) and
- fp.encoding is not None):
+ isinstance(data, unicode) and
+ fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
@@ -683,61 +773,83 @@ if print_ is None:
write(sep)
write(arg)
write(end)
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
- def wraps(wrapped):
+ def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
def wrapper(f):
- f = functools.wraps(wrapped)(f)
+ f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
+
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
- # This requires a bit of explanation: the basic idea is to make a
- # dummy metaclass for one level of class instantiation that replaces
- # itself with the actual metaclass. Because of internal type checks
- # we also need to make sure that we downgrade the custom metaclass
- # for one level to something closer to type (that's why __call__ and
- # __init__ comes back from type etc.).
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
class metaclass(meta):
- __call__ = type.__call__
- __init__ = type.__init__
+
def __new__(cls, name, this_bases, d):
- if this_bases is None:
- return type.__new__(cls, name, (), d)
return meta(name, bases, d)
- return metaclass('temporary_class', None, {})
+ return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
- orig_vars.pop('__dict__', None)
- orig_vars.pop('__weakref__', None)
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
+
+def python_2_unicode_compatible(klass):
+ """
+ A decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
-try:
+if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
-except NameError:
- pass
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
@@ -748,7 +860,7 @@ if sys.meta_path:
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
- importer.name == __name__):
+ importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
diff --git a/boto/vpc/routetable.py b/boto/vpc/routetable.py
index 21060ee9..d464e2ed 100644
--- a/boto/vpc/routetable.py
+++ b/boto/vpc/routetable.py
@@ -70,6 +70,7 @@ class Route(object):
self.interface_id = None
self.vpc_peering_connection_id = None
self.state = None
+ self.origin = None
def __repr__(self):
return 'Route:%s' % self.destination_cidr_block
@@ -90,6 +91,8 @@ class Route(object):
self.vpc_peering_connection_id = value
elif name == 'state':
self.state = value
+ elif name == 'origin':
+ self.origin = value
class RouteAssociation(object):
def __init__(self, connection=None):
diff --git a/docs/source/_templates/page.html b/docs/source/_templates/page.html
new file mode 100644
index 00000000..8862f15a
--- /dev/null
+++ b/docs/source/_templates/page.html
@@ -0,0 +1,14 @@
+{% extends '!page.html' %}
+{% block body %}
+<div class="admonition note">
+<p class="first admonition-title">Note</p>
+<p>You are viewing the documentation for an older version of boto (boto2).<p>
+<p class="last"><a class="reference external" href="https://github.com/boto/boto3">Boto3</a>, the next version of Boto, is now
+stable and recommended for general use. It can be used side-by-side with
+Boto in the same project, so it is easy to start using Boto3 in your existing
+projects as well as new projects. Going forward, API updates and all new
+feature work will be focused on Boto3.</p>
+<p>For more information, see the <a href="http://boto3.readthedocs.org/">documentation for boto3.</a></p>
+</div>
+{{ super() }}
+{% endblock %}
diff --git a/docs/source/boto_config_tut.rst b/docs/source/boto_config_tut.rst
index 37c22f04..302d2093 100644
--- a/docs/source/boto_config_tut.rst
+++ b/docs/source/boto_config_tut.rst
@@ -23,8 +23,14 @@ on startup, the boto library looks for configuration files in the following loca
and in the following order:
* /etc/boto.cfg - for site-wide settings that all users on this machine will use
-* ~/.boto - for user-specific settings
+* (if profile is given) ~/.aws/credentials - for credentials shared between SDKs
+* (if profile is given) ~/.boto - for user-specific settings
* ~/.aws/credentials - for credentials shared between SDKs
+* ~/.boto - for user-specific settings
+
+**Comments**
+You can comment out a line by putting a '#' at the beginning of the line, just like in Python code.
+
In Windows, create a text file that has any name (e.g. boto.config). It's
recommended that you put this file in your user folder. Then set
@@ -167,7 +173,7 @@ For example::
:connection_stale_duration: Amount of time to wait in seconds before a
connection will stop getting reused. AWS will disconnect connections which
have been idle for 180 seconds.
-:is_secure: Is the connection over SSL. This setting will overide passed in
+:is_secure: Is the connection over SSL. This setting will override passed in
values.
:https_validate_certificates: Validate HTTPS certificates. This is on by default
:ca_certificates_file: Location of CA certificates or the keyword "system".
diff --git a/docs/source/cloudfront_tut.rst b/docs/source/cloudfront_tut.rst
index cd33056e..fde6b1b2 100644
--- a/docs/source/cloudfront_tut.rst
+++ b/docs/source/cloudfront_tut.rst
@@ -116,7 +116,7 @@ Invalidate a list of paths in a CloudFront distribution::
This will return a :class:`boto.cloudfront.invalidation.InvalidationBatch`
object representing the invalidation request. You can also fetch a single
-invalidaton request for a given distribution using
+invalidation request for a given distribution using
``invalidation_request_status``::
>>> inval_req = c.invalidation_request_status(u'ECH69MOIW7613', u'IFCT7K03VUETK')
@@ -187,7 +187,7 @@ representing the invalidation request pointed to by a
>>> print inval_req
<InvalidationBatch: IFCT7K03VUETK>
-Simiarly you can get the parent
+Similarly you can get the parent
:class:`boto.cloudfront.distribution.Distribution` object for the invalidation
request from a :class:`boto.cloudfront.invalidation.InvalidationSummary` object
using::
diff --git a/docs/source/cloudsearch_tut.rst b/docs/source/cloudsearch_tut.rst
index 13ffd5ef..33518979 100644
--- a/docs/source/cloudsearch_tut.rst
+++ b/docs/source/cloudsearch_tut.rst
@@ -76,7 +76,7 @@ Creating index fields
Each domain can have up to twenty index fields which are indexed by the
CloudSearch service. For each index field, you will need to specify whether
-it's a text or integer field, as well as optionaly a default value::
+it's a text or integer field, as well as optionally a default value::
>>> # Create an 'text' index field called 'username'
>>> uname_field = domain.create_index_field('username', 'text')
@@ -178,11 +178,10 @@ The result is an instance of :py:class:`CommitResponse
dictionary response a nice object (ie result.adds, result.deletes) and raise an
exception for us if all of our documents weren't actually committed.
-After you have successfully committed some documents to cloudsearch, you must
-use :py:meth:`clear_sdf
-<boto.cloudsearch.document.DocumentServiceConnection.clear_sdf>`, if you wish
-to use the same document service connection again so that its internal cache is
-cleared.
+If you wish to use the same document service connection after a commit,
+you must use :py:meth:`clear_sdf
+<boto.cloudsearch.document.DocumentServiceConnection.clear_sdf>` to clear its
+internal cache.
Searching Documents
-------------------
@@ -381,7 +380,7 @@ The stopwords object has similar attributes defined above for stemming
that provide additional information about the stopwords in your domain.
-Viewing and Adjusting Stopwords for a Domain
+Viewing and Adjusting Synonyms for a Domain
--------------------------------------------
You can configure synonyms for terms that appear in the data you are
diff --git a/docs/source/conf.py b/docs/source/conf.py
index 4fbbf3fc..aa32edc0 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -31,7 +31,7 @@ github_project_url = 'https://github.com/boto/boto/'
try:
release = os.environ.get('SVN_REVISION', 'HEAD')
print release
-except Exception, e:
+except Exception as e:
print e
html_title = "boto v%s" % version
diff --git a/docs/source/contributing.rst b/docs/source/contributing.rst
index 80821995..e17d07e9 100644
--- a/docs/source/contributing.rst
+++ b/docs/source/contributing.rst
@@ -152,7 +152,7 @@ into nosetests.
Testing Supported Python Versions
-==================================
+=================================
Boto supports python 2.6 and 2.7. An easy way to verify functionality
across multiple python versions is to use tox_. A tox.ini file is included
diff --git a/docs/source/dynamodb2_tut.rst b/docs/source/dynamodb2_tut.rst
index 0e4b5b87..f367cf97 100644
--- a/docs/source/dynamodb2_tut.rst
+++ b/docs/source/dynamodb2_tut.rst
@@ -635,7 +635,7 @@ API requests necessary to access a large number of items. The
& fetches all of them, presented as an iterator interface.
This is done lazily, so if you never iterate over the results, no requests are
-executed. Additionally, if you only iterate over part of the set, the minumum
+executed. Additionally, if you only iterate over part of the set, the minimum
number of calls are made to fetch those results (typically max 100 per
response).
diff --git a/docs/source/ec2_tut.rst b/docs/source/ec2_tut.rst
index 140930be..ba576b30 100644
--- a/docs/source/ec2_tut.rst
+++ b/docs/source/ec2_tut.rst
@@ -237,7 +237,7 @@ If you no longer need a launch configuration, you can delete it:
If ``use_block_device_types=True`` is passed to the connection it will deserialize
Launch Configurations with Block Device Mappings into a re-usable format with
BlockDeviceType objects, similar to how AMIs are deserialized currently. Legacy
- behavior is to put them into a format that is incompatabile with creating new Launch
+ behavior is to put them into a format that is incompatible with creating new Launch
Configurations. This switch is in place to preserve backwards compatability, but
its usage is the preferred format going forward.
diff --git a/docs/source/index.rst b/docs/source/index.rst
index a5aa0b00..6be34845 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -4,6 +4,15 @@
boto: A Python interface to Amazon Web Services
===============================================
+.. note::
+
+ `Boto3 <https://github.com/boto/boto3>`__, the next version of Boto, is now
+ stable and recommended for general use. It can be used side-by-side with
+ Boto in the same project, so it is easy to start using Boto3 in your existing
+ projects as well as new projects. Going forward, API updates and all new
+ feature work will be focused on Boto3.
+
+
An integrated interface to current and future infrastructural services
offered by `Amazon Web Services`_.
@@ -143,6 +152,8 @@ Release Notes
.. toctree::
:titlesonly:
+ releasenotes/v2.39.0
+ releasenotes/v2.38.0
releasenotes/v2.37.0
releasenotes/v2.36.0
releasenotes/v2.35.2
diff --git a/docs/source/rds_tut.rst b/docs/source/rds_tut.rst
index e648528d..33f052d0 100644
--- a/docs/source/rds_tut.rst
+++ b/docs/source/rds_tut.rst
@@ -31,7 +31,7 @@ The recommended method of doing this is as follows::
At this point the variable conn will point to an RDSConnection object in the
US-WEST-2 region. Bear in mind that just as any other AWS service, RDS is
region-specific. In this example, the AWS access key and AWS secret key are
-passed in to the method explicitely. Alternatively, you can set the environment
+passed in to the method explicitly. Alternatively, you can set the environment
variables:
* ``AWS_ACCESS_KEY_ID`` - Your AWS Access Key ID
diff --git a/docs/source/releasenotes/v2.35.2.rst b/docs/source/releasenotes/v2.35.2.rst
index ca7ed9ab..e1f6ff81 100644
--- a/docs/source/releasenotes/v2.35.2.rst
+++ b/docs/source/releasenotes/v2.35.2.rst
@@ -1,4 +1,4 @@
-boto v2.32.2
+boto v2.35.2
============
:date: 2015/01/19
diff --git a/docs/source/releasenotes/v2.39.0.rst b/docs/source/releasenotes/v2.39.0.rst
new file mode 100644
index 00000000..b1e465b3
--- /dev/null
+++ b/docs/source/releasenotes/v2.39.0.rst
@@ -0,0 +1,27 @@
+boto v2.39.0
+============
+
+:date: 2016/01/18
+
+Add support for ap-northeast-2, update documentation, and fix several bugs.
+
+
+Changes
+-------
+* Autodetect sigv4 for ap-northeast-2 (:issue:`3461`, :sha:`c2a17ce`)
+* Added support for ap-northeast-2 (:issue:`3454`, :sha:`c3c1ddd`)
+* Remove VeriSign Class 3 CA from trusted certs (:issue:`3450`, :sha:`8a025df`)
+* Add note about boto3 on all pages of boto docs (:sha:`9bd904c`)
+* Fix for listing EMR steps based on cluster_states filter (:issue:`3399`, :sha:`0f92f35`)
+* Fixed param name in set_contents_from_string docstring (:issue:`3420`, :sha:`e30297b`)
+* Closes #3441 Remove py3 test whitelist Update rds to pass on py3 Update mturk to pass tests on py3 Update cloudsearchdomain tests to work with py3 (:issue:`3441`, :sha:`5b2f552`)
+* Run tests against py35 (:sha:`7d039d0`)
+* Fix Glacier test failure in python 3.5 due to MagicMock (:issue:`3412`, :sha:`d042f07`)
+* Undo log message change BF(PY3): use except ... as syntax instead of except ..., (:sha:`607cad7`)
+* Fix travis CI builds for PY3 (:issue:`3439`, :sha:`22ab610`)
+* Spelling fixes (:issue:`3425`, :sha:`f43bbbd`)
+* Fixed docs (:issue:`3401`, :sha:`4f66311`)
+* Add deprecation notice to emr methods (:issue:`3422`, :sha:`cee6159`)
+* Add some GovCloud endpoints (:issue:`3421`, :sha:`5afc068`)
+
+
diff --git a/docs/source/route53_tut.rst b/docs/source/route53_tut.rst
index 12e0d659..12d3e5b2 100644
--- a/docs/source/route53_tut.rst
+++ b/docs/source/route53_tut.rst
@@ -85,8 +85,22 @@ You can call the API again and ask for the current status as follows:
When the status has changed to *INSYNC*, the change has been propagated to
remote servers
+Updating a record
+-----------------
+
+You can create, upsert or delete a single record like this
+
+>>> zone = conn.get_zone("example.com.")
+>>> change_set = ResourceRecordSets(conn, zone.id)
+>>> changes1 = change_set.add_change("UPSERT", "www" + ".example.com", type="CNAME", ttl=3600)
+>>> changes1.add_value("webserver.example.com")
+>>> change_set.commit()
+
+In this example we create or update, depending on the existence of the record, the
+CNAME www.example.com to webserver.example.com.
+
Working with Change Sets
------------------------
+------------------------
You can also do bulk updates using ResourceRecordSets. For example updating the TTL
diff --git a/docs/source/s3_tut.rst b/docs/source/s3_tut.rst
index 23e0350d..9d253d1c 100644
--- a/docs/source/s3_tut.rst
+++ b/docs/source/s3_tut.rst
@@ -18,7 +18,7 @@ There are two ways to do this in boto. The first is:
At this point the variable conn will point to an S3Connection object. In
this example, the AWS access key and AWS secret key are passed in to the
-method explicitely. Alternatively, you can set the environment variables:
+method explicitly. Alternatively, you can set the environment variables:
* `AWS_ACCESS_KEY_ID` - Your AWS Access Key ID
* `AWS_SECRET_ACCESS_KEY` - Your AWS Secret Access Key
@@ -81,6 +81,7 @@ boto.s3.connection module, like this::
APSoutheast2
DEFAULT
EU
+ EUCentral1
SAEast
USWest
USWest2
@@ -96,7 +97,7 @@ bucket in that location. For example::
will create the bucket in the EU region (assuming the name is available).
Storing Data
-----------------
+------------
Once you have a bucket, presumably you will want to store some data
in it. S3 doesn't care what kind of information you store in your objects
diff --git a/docs/source/ses_tut.rst b/docs/source/ses_tut.rst
index d19a4e36..10d6a278 100644
--- a/docs/source/ses_tut.rst
+++ b/docs/source/ses_tut.rst
@@ -25,10 +25,10 @@ To do so, the most straight forward way is the following::
Bear in mind that if you have your credentials in boto config in your home
directory, the two keyword arguments in the call above are not needed. More
-details on configuration can be fond in :doc:`boto_config_tut`.
+details on configuration can be found in :doc:`boto_config_tut`.
The :py:func:`boto.ses.connect_to_region` functions returns a
-:py:class:`boto.ses.connection.SESConnection` instance, which is a the boto API
+:py:class:`boto.ses.connection.SESConnection` instance, which is the boto API
for working with SES.
Notes on Sending
diff --git a/docs/source/simpledb_tut.rst b/docs/source/simpledb_tut.rst
index 6ecc087f..ed1c24c1 100644
--- a/docs/source/simpledb_tut.rst
+++ b/docs/source/simpledb_tut.rst
@@ -78,7 +78,7 @@ an exception if the domain you are looking for doesn't exist. If you set it to f
:py:class:`Domain <boto.sdb.domain.Domain>` object blindly regardless of its existence.
Getting Domain Metadata
-------------------------
+-----------------------
There are times when you might want to know your domains' machine usage, aprox. item count and other such data.
To this end, boto offers a simple and convenient way to do so as shown below::
@@ -141,7 +141,7 @@ parameter.
Retrieving Items
------------------
+----------------
To retrieve an item along with its attributes is a fairly straight forward operation and can be accomplished as follows::
>>> dom.get_item('item1')
@@ -190,7 +190,7 @@ delete_item() method, boto will take care of the rest::
Deleting Domains
------------------------------------
+----------------
To delete a domain and all items under it (i.e. be very careful), you can do it as follows::
>>> conn.delete_domain('test-domain')
diff --git a/docs/source/swf_tut.rst b/docs/source/swf_tut.rst
index ffbacfd2..7a2aef7f 100644
--- a/docs/source/swf_tut.rst
+++ b/docs/source/swf_tut.rst
@@ -338,7 +338,7 @@ The workers only need to know which task lists to poll.
try:
print 'working on activity from tasklist %s at %i' % (self.task_list, time.time())
self.activity(activity_task.get('input'))
- except Exception, error:
+ except Exception as error:
self.fail(reason=str(error))
raise error
diff --git a/requirements.txt b/requirements.txt
index c5d83577..007b79d1 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,7 +1,7 @@
nose==1.3.3
requests>=1.2.3,<=2.0.1
rsa==3.1.4
-simplejson==3.5.2
+simplejson==3.6.5
argparse==1.2.1
httpretty>=0.7.0,<=0.8.6
paramiko>=1.10.0
diff --git a/tests/fps/test.py b/tests/fps/test.py
index d5efb4b7..20af47a5 100755
--- a/tests/fps/test.py
+++ b/tests/fps/test.py
@@ -88,7 +88,7 @@ class FPSTestCase(unittest.TestCase):
try:
self.fps.write_off_debt(CreditInstrumentId='foo',
AdjustmentAmount=123.45)
- except Exception, e:
+ except Exception as e:
print e
@unittest.skip('cosmetic')
diff --git a/tests/integration/gs/test_resumable_downloads.py b/tests/integration/gs/test_resumable_downloads.py
index ba5d9830..1edd29dc 100644
--- a/tests/integration/gs/test_resumable_downloads.py
+++ b/tests/integration/gs/test_resumable_downloads.py
@@ -102,7 +102,7 @@ class ResumableDownloadTests(GSTestCase):
dst_fp, cb=harness.call,
res_download_handler=res_download_handler)
self.fail('Did not get expected ResumableDownloadException')
- except ResumableDownloadException, e:
+ except ResumableDownloadException as e:
# We'll get a ResumableDownloadException at this point because
# of CallbackTestHarness (above). Check that the tracker file was
# created correctly.
@@ -164,7 +164,7 @@ class ResumableDownloadTests(GSTestCase):
dst_fp, cb=harness.call,
res_download_handler=res_download_handler)
self.fail('Did not get expected OSError')
- except OSError, e:
+ except OSError as e:
# Ensure the error was re-raised.
self.assertEqual(e.errno, 13)
@@ -228,7 +228,7 @@ class ResumableDownloadTests(GSTestCase):
dst_fp, cb=harness.call,
res_download_handler=res_download_handler)
self.fail('Did not get expected ResumableDownloadException')
- except ResumableDownloadException, e:
+ except ResumableDownloadException as e:
self.assertEqual(e.disposition,
ResumableTransferDisposition.ABORT_CUR_PROCESS)
# Ensure a tracker file survived.
@@ -345,7 +345,7 @@ class ResumableDownloadTests(GSTestCase):
os.chmod(tmp_dir, 0)
res_download_handler = ResumableDownloadHandler(
tracker_file_name=tracker_file_name)
- except ResumableDownloadException, e:
+ except ResumableDownloadException as e:
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
self.assertNotEqual(
e.message.find('Couldn\'t write URI tracker file'), -1)
diff --git a/tests/integration/gs/test_resumable_uploads.py b/tests/integration/gs/test_resumable_uploads.py
index 605937f3..c729904f 100644
--- a/tests/integration/gs/test_resumable_uploads.py
+++ b/tests/integration/gs/test_resumable_uploads.py
@@ -120,7 +120,7 @@ class ResumableUploadTests(GSTestCase):
small_src_file, cb=harness.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
# We'll get a ResumableUploadException at this point because
# of CallbackTestHarness (above). Check that the tracker file was
# created correctly.
@@ -185,7 +185,7 @@ class ResumableUploadTests(GSTestCase):
small_src_file, cb=harness.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected OSError')
- except OSError, e:
+ except OSError as e:
# Ensure the error was re-raised.
self.assertEqual(e.errno, 13)
@@ -247,7 +247,7 @@ class ResumableUploadTests(GSTestCase):
larger_src_file, cb=harness.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
self.assertEqual(e.disposition,
ResumableTransferDisposition.ABORT_CUR_PROCESS)
# Ensure a tracker file survived.
@@ -351,7 +351,7 @@ class ResumableUploadTests(GSTestCase):
larger_src_file, cb=harness.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
# First abort (from harness-forced failure) should be
# ABORT_CUR_PROCESS.
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT_CUR_PROCESS)
@@ -368,7 +368,7 @@ class ResumableUploadTests(GSTestCase):
dst_key.set_contents_from_file(
largest_src_file, res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
# This abort should be a hard abort (file size changing during
# transfer).
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
@@ -391,7 +391,7 @@ class ResumableUploadTests(GSTestCase):
test_file, cb=harness.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
self.assertNotEqual(
e.message.find('File changed during upload'), -1)
@@ -411,7 +411,7 @@ class ResumableUploadTests(GSTestCase):
test_file, cb=harness.call,
res_upload_handler=res_upload_handler)
return False
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
# Ensure the file size didn't change.
test_file.seek(0, os.SEEK_END)
@@ -422,7 +422,7 @@ class ResumableUploadTests(GSTestCase):
try:
dst_key_uri.get_key()
self.fail('Did not get expected InvalidUriError')
- except InvalidUriError, e:
+ except InvalidUriError as e:
pass
return True
@@ -477,7 +477,7 @@ class ResumableUploadTests(GSTestCase):
small_src_file, res_upload_handler=res_upload_handler,
headers={'Content-Length' : SMALL_KEY_SIZE})
self.fail('Did not get expected ResumableUploadException')
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
self.assertNotEqual(
e.message.find('Attempt to specify Content-Length header'), -1)
@@ -543,7 +543,7 @@ class ResumableUploadTests(GSTestCase):
os.chmod(tmp_dir, 0)
res_upload_handler = ResumableUploadHandler(
tracker_file_name=tracker_file_name)
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
self.assertNotEqual(
e.message.find('Couldn\'t write URI tracker file'), -1)
diff --git a/tests/integration/gs/testcase.py b/tests/integration/gs/testcase.py
index b16ea8f8..4db201d7 100644
--- a/tests/integration/gs/testcase.py
+++ b/tests/integration/gs/testcase.py
@@ -55,11 +55,19 @@ class GSTestCase(unittest.TestCase):
while(len(self._buckets)):
b = self._buckets[-1]
- bucket = self._conn.get_bucket(b)
- while len(list(bucket.list_versions())) > 0:
- for k in bucket.list_versions():
- bucket.delete_key(k.name, generation=k.generation)
- bucket.delete()
+ try:
+ bucket = self._conn.get_bucket(b)
+ while len(list(bucket.list_versions())) > 0:
+ for k in bucket.list_versions():
+ try:
+ bucket.delete_key(k.name, generation=k.generation)
+ except GSResponseError as e:
+ if e.status != 404:
+ raise
+ bucket.delete()
+ except GSResponseError as e:
+ if e.status != 404:
+ raise
self._buckets.pop()
def _GetConnection(self):
diff --git a/tests/integration/gs/util.py b/tests/integration/gs/util.py
index 2b76078c..f5868195 100644
--- a/tests/integration/gs/util.py
+++ b/tests/integration/gs/util.py
@@ -70,7 +70,7 @@ def retry(ExceptionToCheck, tries=4, delay=3, backoff=2, logger=None):
return f(*args, **kwargs)
try_one_last_time = False
break
- except ExceptionToCheck, e:
+ except ExceptionToCheck as e:
msg = "%s, Retrying in %d seconds..." % (str(e), mdelay)
if logger:
logger.warning(msg)
diff --git a/tests/integration/iam/test_policy.py b/tests/integration/iam/test_policy.py
new file mode 100644
index 00000000..522daf07
--- /dev/null
+++ b/tests/integration/iam/test_policy.py
@@ -0,0 +1,112 @@
+# Copyright (c) 2015 Shaun Brady.
+# All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+import boto
+import time
+import json
+
+from tests.compat import unittest
+
+
+class TestIAMPolicy(unittest.TestCase):
+ iam = True
+
+ def test_policy_actions(self):
+ # Test managed policy create/attach/detach/delete
+ iam = boto.connect_iam()
+
+ time_suffix = time.time()
+ rolename = 'boto-test-role-%d' % time_suffix
+ groupname = 'boto-test-group-%d' % time_suffix
+ username = 'boto-test-user-%d' % time_suffix
+ policyname = 'TestPolicyName-%d' % time_suffix
+
+ iam.create_role(rolename)
+ iam.create_group(groupname)
+ iam.create_user(username)
+
+ policy_doc = {
+ "Version": "2012-10-17",
+ "Id": "TestPermission",
+ "Statement": [
+ {
+ "Sid": "TestSid",
+ "Action": "s3:*",
+ "Effect": "Deny",
+ "Resource": "arn:aws:s3:::*"
+ }
+ ]
+ }
+
+ policy_json = json.dumps(policy_doc)
+
+ # Create policy
+ policy = iam.create_policy(policyname, policy_json)
+
+ # Get it back, verify it is the same
+ policy_copy = iam.get_policy(policy.arn)
+ if not policy_copy.arn == policy.arn:
+ raise Exception("Policies not equal.")
+
+ # Show that policy is not attached
+ result = iam.list_entities_for_policy(policy.arn)[
+ 'list_entities_for_policy_response'][
+ 'list_entities_for_policy_result']
+
+ if not len(result['policy_roles']) == 0:
+ raise Exception("Roles when not expected")
+
+ if not len(result['policy_groups']) == 0:
+ raise Exception("Groups when not expected")
+
+ if not len(result['policy_users']) == 0:
+ raise Exception("Users when not expected")
+
+ # Attach the policy
+ iam.attach_role_policy(policy.arn, rolename)
+ iam.attach_group_policy(policy.arn, groupname)
+ iam.attach_user_policy(policy.arn, username)
+
+ # Show that policy is indeed attached
+ result = iam.list_entities_for_policy(policy.arn)[
+ 'list_entities_for_policy_response'][
+ 'list_entities_for_policy_result']
+
+ if not len(result['policy_roles']) == 1:
+ raise Exception("Roles expected")
+
+ if not len(result['policy_groups']) == 1:
+ raise Exception("Groups expected")
+
+ if not len(result['policy_users']) == 1:
+ raise Exception("Users expected")
+
+ # Detach the policy
+ iam.detach_role_policy(policy.arn, rolename)
+ iam.detach_group_policy(policy.arn, groupname)
+ iam.detach_user_policy(policy.arn, username)
+
+ # Clean up
+ iam.delete_policy(policy.arn)
+ iam.delete_role(rolename)
+ iam.delete_user(username)
+ iam.delete_group(groupname)
diff --git a/tests/integration/s3/test_bucket.py b/tests/integration/s3/test_bucket.py
index 84951440..8d7fdcce 100644
--- a/tests/integration/s3/test_bucket.py
+++ b/tests/integration/s3/test_bucket.py
@@ -40,7 +40,7 @@ from boto.s3.lifecycle import Rule
from boto.s3.acl import Grant
from boto.s3.tagging import Tags, TagSet
from boto.s3.website import RedirectLocation
-from boto.compat import urllib
+from boto.compat import unquote_str
class S3BucketTest (unittest.TestCase):
@@ -88,8 +88,9 @@ class S3BucketTest (unittest.TestCase):
self.assertEqual(element.name, expected.pop(0))
self.assertEqual(expected, [])
+
def test_list_with_url_encoding(self):
- expected = ["α", "β", "γ"]
+ expected = [u"α", u"β", u"γ"]
for key_name in expected:
key = self.bucket.new_key(key_name)
key.set_contents_from_string(key_name)
@@ -101,7 +102,7 @@ class S3BucketTest (unittest.TestCase):
with patch.object(self.bucket, '_get_all', getall):
rs = self.bucket.list(encoding_type="url")
for element in rs:
- name = urllib.parse.unquote(element.name.encode('utf-8'))
+ name = unquote_str(element.name)
self.assertEqual(name, expected.pop(0))
self.assertEqual(expected, [])
diff --git a/tests/integration/s3/test_connect_to_region.py b/tests/integration/s3/test_connect_to_region.py
index 5c76ada9..938de3a3 100644
--- a/tests/integration/s3/test_connect_to_region.py
+++ b/tests/integration/s3/test_connect_to_region.py
@@ -57,6 +57,11 @@ class S3SpecifyHost(unittest.TestCase):
self.assertEquals('s3.amazonaws.com', connection.host)
self.assertIsInstance(connection, S3Connection)
+ def testSuccessWithDefaultEUCentral1(self):
+ connection = connect_to_region('eu-central-1')
+ self.assertEquals('s3.eu-central-1.amazonaws.com', connection.host)
+ self.assertIsInstance(connection, S3Connection)
+
def testDefaultWithInvalidHost(self):
connect_args = dict({'host':''})
connection = connect_to_region('us-west-2', **connect_args)
diff --git a/tests/integration/s3/test_connection.py b/tests/integration/s3/test_connection.py
index d7b848bb..ac4027e1 100644
--- a/tests/integration/s3/test_connection.py
+++ b/tests/integration/s3/test_connection.py
@@ -26,6 +26,7 @@ Some unit tests for the S3Connection
import unittest
import time
import os
+import socket
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
@@ -222,7 +223,7 @@ class S3ConnectionTest (unittest.TestCase):
# give bucket anon user access and anon read again
auth_bucket.set_acl('public-read')
- time.sleep(5)
+ time.sleep(10) # Was 5 secondes, turns out not enough
try:
next(iter(anon_bucket.list()))
self.fail("not expecting contents")
@@ -241,5 +242,7 @@ class S3ConnectionTest (unittest.TestCase):
c.create_bucket('bad$bucket$name')
except S3ResponseError as e:
self.assertEqual(e.error_code, 'InvalidBucketName')
+ except socket.gaierror:
+ pass # This is also a possible result for an invalid bucket name
else:
self.fail("S3ResponseError not raised.")
diff --git a/tests/integration/s3/test_https_cert_validation.py b/tests/integration/s3/test_https_cert_validation.py
index 9222a4a7..d55303c1 100644
--- a/tests/integration/s3/test_https_cert_validation.py
+++ b/tests/integration/s3/test_https_cert_validation.py
@@ -88,7 +88,7 @@ class CertValidationTest(unittest.TestCase):
boto.config.set('Boto', 'proxy_port', PROXY_PORT)
def assertConnectionThrows(self, connection_class, error):
- conn = connection_class()
+ conn = connection_class('fake_id', 'fake_secret')
self.assertRaises(error, conn.get_all_buckets)
def do_test_valid_cert(self):
diff --git a/tests/integration/s3/test_key.py b/tests/integration/s3/test_key.py
index 8d426a26..40bc8c32 100644
--- a/tests/integration/s3/test_key.py
+++ b/tests/integration/s3/test_key.py
@@ -27,6 +27,7 @@ Some unit tests for S3 Key
from tests.unit import unittest
import time
+import random
import boto.s3
from boto.compat import six, StringIO, urllib
@@ -40,7 +41,9 @@ class S3KeyTest(unittest.TestCase):
def setUp(self):
self.conn = S3Connection()
- self.bucket_name = 'keytest-%d' % int(time.time())
+ random.seed()
+ self.bucket_name = 'keytest-%d-%d' % (
+ time.time(), random.randint(1, 99999999))
self.bucket = self.conn.create_bucket(self.bucket_name)
def tearDown(self):
@@ -495,15 +498,15 @@ class S3KeyVersionCopyTest(unittest.TestCase):
self.bucket_name = 'boto-key-version-copy-%d' % int(time.time())
self.bucket = self.conn.create_bucket(self.bucket_name)
self.bucket.configure_versioning(True)
-
+
def tearDown(self):
for key in self.bucket.list_versions():
key.delete()
self.bucket.delete()
-
+
def test_key_overwrite_and_copy(self):
- first_content = "abcdefghijklm"
- second_content = "nopqrstuvwxyz"
+ first_content = b"abcdefghijklm"
+ second_content = b"nopqrstuvwxyz"
k = Key(self.bucket, 'testkey')
k.set_contents_from_string(first_content)
# Wait for S3's eventual consistency (may not be necessary)
diff --git a/tests/integration/s3/test_multidelete.py b/tests/integration/s3/test_multidelete.py
index b22581bb..ba7d5180 100644
--- a/tests/integration/s3/test_multidelete.py
+++ b/tests/integration/s3/test_multidelete.py
@@ -17,7 +17,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
@@ -45,6 +45,7 @@ class S3MultiDeleteTest(unittest.TestCase):
def tearDown(self):
for key in self.bucket:
key.delete()
+ self.bucket.delete_keys(self.bucket.list_versions())
self.bucket.delete()
def test_delete_nothing(self):
@@ -130,13 +131,11 @@ class S3MultiDeleteTest(unittest.TestCase):
# Adding 1000 objects is painful otherwise...
key_names = ['key-%03d' % i for i in range(0, 1000)]
result = self.bucket.delete_keys(key_names)
- self.assertEqual(len(result.deleted), 1000)
- self.assertEqual(len(result.errors), 0)
+ self.assertEqual(len(result.deleted) + len(result.errors), 1000)
# delete them again to create 1000 more delete markers
result = self.bucket.delete_keys(key_names)
- self.assertEqual(len(result.deleted), 1000)
- self.assertEqual(len(result.errors), 0)
+ self.assertEqual(len(result.deleted) + len(result.errors), 1000)
# Sometimes takes AWS sometime to settle
time.sleep(10)
@@ -144,12 +143,11 @@ class S3MultiDeleteTest(unittest.TestCase):
# delete all versions to delete 2000 objects.
# this tests the 1000 limit.
result = self.bucket.delete_keys(self.bucket.list_versions())
- self.assertEqual(len(result.deleted), 2000)
- self.assertEqual(len(result.errors), 0)
+ self.assertEqual(len(result.deleted) + len(result.errors), 2000)
def test_1(self):
nkeys = 100
-
+
# create a bunch of keynames
key_names = ['key-%03d' % i for i in range(0, nkeys)]
@@ -170,9 +168,9 @@ class S3MultiDeleteTest(unittest.TestCase):
self.assertEqual(len(result.deleted), nkeys)
self.assertEqual(len(result.errors), 0)
-
+
time.sleep(5)
-
+
# now count keys in bucket
n = 0
for key in self.bucket:
diff --git a/tests/mturk/selenium_support.py b/tests/mturk/selenium_support.py
index f1552cb2..1ed760cb 100644
--- a/tests/mturk/selenium_support.py
+++ b/tests/mturk/selenium_support.py
@@ -17,7 +17,7 @@ def has_selenium():
# a little trick to see if the server is responding
try:
sel.do_command('shutdown', '')
- except Exception, e:
+ except Exception as e:
if not 'Server Exception' in str(e):
raise
result = True
diff --git a/tests/test.py b/tests/test.py
index 692ed4dd..9f5c6334 100755
--- a/tests/test.py
+++ b/tests/test.py
@@ -28,51 +28,7 @@ import sys
from nose.core import run
-# This is a whitelist of unit tests that support Python 3.
-# When porting a new module to Python 3, please update this
-# list so that its tests will run by default. See the
-# `default` target below for more information.
-# We use this instead of test attributes/tags because in
-# order to filter on tags nose must load each test - many
-# will fail to import with Python 3.
-PY3_WHITELIST = (
- 'tests/unit/auth',
- 'tests/unit/beanstalk',
- 'tests/unit/cloudformation',
- 'tests/unit/cloudfront',
- 'tests/unit/cloudsearch',
- 'tests/unit/cloudsearch2',
- 'tests/unit/cloudtrail',
- 'tests/unit/directconnect',
- 'tests/unit/dynamodb',
- 'tests/unit/dynamodb2',
- 'tests/unit/ecs',
- 'tests/unit/elasticache',
- 'tests/unit/emr',
- 'tests/unit/glacier',
- 'tests/unit/iam',
- 'tests/unit/ec2',
- 'tests/unit/logs',
- 'tests/unit/manage',
- 'tests/unit/mws',
- 'tests/unit/provider',
- 'tests/unit/rds2',
- 'tests/unit/route53',
- 'tests/unit/s3',
- 'tests/unit/sns',
- 'tests/unit/ses',
- 'tests/unit/sqs',
- 'tests/unit/sts',
- 'tests/unit/swf',
- 'tests/unit/utils',
- 'tests/unit/vpc',
- 'tests/unit/test_connection.py',
- 'tests/unit/test_exception.py',
- 'tests/unit/test_regioninfo.py',
-)
-
-
-def main(whitelist=[]):
+def main():
description = ("Runs boto unit and/or integration tests. "
"Arguments will be passed on to nosetests. "
"See nosetests --help for more information.")
@@ -98,11 +54,7 @@ def main(whitelist=[]):
for i, arg in enumerate(remaining_args):
if arg == 'default':
- if sys.version_info[0] == 3:
- del remaining_args[i]
- remaining_args += PY3_WHITELIST
- else:
- remaining_args[i] = 'tests/unit'
+ remaining_args[i] = 'tests/unit'
all_args = [__file__] + attribute_args + remaining_args
print("nose command:", ' '.join(all_args))
diff --git a/tests/unit/auth/test_sigv4.py b/tests/unit/auth/test_sigv4.py
index 8f7876b6..cf8d44ca 100644
--- a/tests/unit/auth/test_sigv4.py
+++ b/tests/unit/auth/test_sigv4.py
@@ -530,12 +530,24 @@ class TestS3SigV4OptIn(MockServiceWithConfigTestCase):
self.assertEqual(fake._required_auth_capability(), ['nope'])
def test_sigv4_non_optional(self):
- # Requires SigV4.
- for region in ['.cn-north', '.eu-central', '-eu-central']:
+ region_groups = ['.cn-north', '.eu-central', '-eu-central']
+ specific_regions = ['.ap-northeast-2', '-ap-northeast-2']
+
+ # Create a connection for a sample region in each of these groups
+ # and ensure sigv4 is used.
+ for region in region_groups:
fake = FakeS3Connection(host='s3' + region + '-1.amazonaws.com')
self.assertEqual(
fake._required_auth_capability(), ['hmac-v4-s3'])
+ # Create a connection from the specific regions and make sure
+ # that these use sigv4.
+ for region in specific_regions:
+ fake = FakeS3Connection(host='s3' + region + '.amazonaws.com')
+ self.assertEqual(
+ fake._required_auth_capability(), ['hmac-v4-s3'])
+
+
def test_sigv4_opt_in_config(self):
# Opt-in via the config.
self.config = {
diff --git a/tests/unit/cloudfront/test_invalidation.py b/tests/unit/cloudfront/test_invalidation.py
new file mode 100644
index 00000000..721c146c
--- /dev/null
+++ b/tests/unit/cloudfront/test_invalidation.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+from tests.compat import unittest
+
+import boto.cloudfront as cf
+
+class CFInvalidationTest(unittest.TestCase):
+
+ cloudfront = True
+
+ def test_wildcard_escape(self):
+ """
+ Test that wildcards are retained as literals
+ See: http://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/Invalidation.html#invalidation-specifying-objects-paths
+ """
+ batch = cf.invalidation.InvalidationBatch()
+ self.assertEqual(batch.escape("/*"), "/*")
+ self.assertEqual(batch.escape("/foo*"), "/foo*")
+ self.assertEqual(batch.escape("/foo/bar/*"), "/foo/bar/*")
+ self.assertEqual(batch.escape("/nowildcard"), "/nowildcard")
+ self.assertEqual(batch.escape("/other special characters"), "/other%20special%20characters")
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/unit/cloudsearchdomain/test_cloudsearchdomain.py b/tests/unit/cloudsearchdomain/test_cloudsearchdomain.py
index 694e98ff..e0758a47 100644
--- a/tests/unit/cloudsearchdomain/test_cloudsearchdomain.py
+++ b/tests/unit/cloudsearchdomain/test_cloudsearchdomain.py
@@ -82,7 +82,8 @@ class CloudSearchDomainConnectionTest(AWSMockServiceTestCase):
}
- self.set_http_response(status_code=200, body=json.dumps(response))
+ self.set_http_response(status_code=200,
+ body=json.dumps(response).encode('utf-8'))
search_service.domain_connection = self.service_connection
resp = search_service.search()
@@ -109,7 +110,8 @@ class CloudSearchDomainConnectionTest(AWSMockServiceTestCase):
"category": ["cat_a", "cat_b", "cat_c"]
}
- self.set_http_response(status_code=200, body=json.dumps(response))
+ self.set_http_response(status_code=200,
+ body=json.dumps(response).encode('utf-8'))
document_service.domain_connection = self.service_connection
document_service.add("1234", document)
resp = document_service.commit()
diff --git a/tests/unit/dynamodb/test_types.py b/tests/unit/dynamodb/test_types.py
index ed72cc39..a6a3eb9e 100644
--- a/tests/unit/dynamodb/test_types.py
+++ b/tests/unit/dynamodb/test_types.py
@@ -23,7 +23,7 @@
from decimal import Decimal
from tests.compat import unittest
-from boto.compat import six
+from boto.compat import six, json
from boto.dynamodb import types
from boto.dynamodb.exceptions import DynamoDBNumberError
@@ -95,6 +95,13 @@ class TestDynamizer(unittest.TestCase):
self.assertEqual(dynamizer.decode({'NS': ['1.1', '2.2', '3.3']}),
set([1.1, 2.2, 3.3]))
+ def test_decoding_full_doc(self):
+ '''Simple List decoding that had caused some errors'''
+ dynamizer = types.Dynamizer()
+ doc = '{"__type__":{"S":"Story"},"company_tickers":{"SS":["NASDAQ-TSLA","NYSE-F","NYSE-GM"]},"modified_at":{"N":"1452525162"},"created_at":{"N":"1452525162"},"version":{"N":"1"},"categories":{"SS":["AUTOMTVE","LTRTR","MANUFCTU","PN","PRHYPE","TAXE","TJ","TL"]},"provider_categories":{"L":[{"S":"F"},{"S":"GM"},{"S":"TSLA"}]},"received_at":{"S":"2016-01-11T11:26:31Z"}}'
+ output_doc = {'provider_categories': ['F', 'GM', 'TSLA'], '__type__': 'Story', 'company_tickers': set(['NASDAQ-TSLA', 'NYSE-GM', 'NYSE-F']), 'modified_at': Decimal('1452525162'), 'version': Decimal('1'), 'received_at': '2016-01-11T11:26:31Z', 'created_at': Decimal('1452525162'), 'categories': set(['LTRTR', 'TAXE', 'MANUFCTU', 'TL', 'TJ', 'AUTOMTVE', 'PRHYPE', 'PN'])}
+ self.assertEqual(json.loads(doc, object_hook=dynamizer.decode), output_doc)
+
class TestBinary(unittest.TestCase):
def test_good_input(self):
diff --git a/tests/unit/ec2/test_connection.py b/tests/unit/ec2/test_connection.py
index b51e0e36..46148f9f 100755
--- a/tests/unit/ec2/test_connection.py
+++ b/tests/unit/ec2/test_connection.py
@@ -1640,6 +1640,23 @@ class TestCreateVolume(TestEC2ConnectionBase):
self.assertEqual(result.id, 'vol-1a2b3c4d')
self.assertTrue(result.encrypted)
+ def test_create_volume_with_specify_kms(self):
+ self.set_http_response(status_code=200)
+ result = self.ec2.create_volume(80, 'us-east-1e', snapshot='snap-1a2b3c4d',
+ encrypted=True,kms_key_id='arn:aws:kms:us-east-1:012345678910:key/abcd1234-a123-456a-a12b-a123b4cd56ef')
+ self.assert_request_parameters({
+ 'Action': 'CreateVolume',
+ 'AvailabilityZone': 'us-east-1e',
+ 'Size': 80,
+ 'SnapshotId': 'snap-1a2b3c4d',
+ 'Encrypted': 'true',
+ 'KmsKeyId': 'arn:aws:kms:us-east-1:012345678910:key/abcd1234-a123-456a-a12b-a123b4cd56ef'},
+ ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'])
+ self.assertEqual(result.id, 'vol-1a2b3c4d')
+ self.assertTrue(result.encrypted)
+
class TestGetClassicLinkInstances(TestEC2ConnectionBase):
def default_body(self):
diff --git a/tests/unit/ec2containerservice/__init__.py b/tests/unit/ec2containerservice/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unit/ec2containerservice/__init__.py
diff --git a/tests/unit/ec2containerservice/test_connection.py b/tests/unit/ec2containerservice/test_connection.py
new file mode 100755
index 00000000..6419e33f
--- /dev/null
+++ b/tests/unit/ec2containerservice/test_connection.py
@@ -0,0 +1,33 @@
+# Copyright (c) 2015 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+from tests.unit import unittest
+
+import boto.ec2containerservice
+from boto.ec2containerservice.layer1 import EC2ContainerServiceConnection
+
+
+class TestConnectToRegion(unittest.TestCase):
+
+ def test_aws_region(self):
+ ecs = boto.ec2containerservice.connect_to_region('us-east-1')
+ self.assertIsInstance(ecs, EC2ContainerServiceConnection)
diff --git a/tests/unit/emr/test_connection.py b/tests/unit/emr/test_connection.py
index 84d3ff8f..5bcbefaa 100644
--- a/tests/unit/emr/test_connection.py
+++ b/tests/unit/emr/test_connection.py
@@ -370,8 +370,8 @@ class TestListInstances(AWSMockServiceTestCase):
self.assert_request_parameters({
'Action': 'ListInstances',
'ClusterId': 'j-123',
- 'InstanceGroupTypeList.member.1': 'MASTER',
- 'InstanceGroupTypeList.member.2': 'TASK',
+ 'InstanceGroupTypes.member.1': 'MASTER',
+ 'InstanceGroupTypes.member.2': 'TASK',
'Version': '2009-03-31'
})
@@ -500,8 +500,8 @@ class TestListSteps(AWSMockServiceTestCase):
self.assert_request_parameters({
'Action': 'ListSteps',
'ClusterId': 'j-123',
- 'StepStateList.member.1': 'COMPLETED',
- 'StepStateList.member.2': 'FAILED',
+ 'StepStates.member.1': 'COMPLETED',
+ 'StepStates.member.2': 'FAILED',
'Version': '2009-03-31'
})
self.assertTrue(isinstance(response, StepSummaryList))
diff --git a/tests/unit/glacier/test_concurrent.py b/tests/unit/glacier/test_concurrent.py
index dd33e170..9ed9a89e 100644
--- a/tests/unit/glacier/test_concurrent.py
+++ b/tests/unit/glacier/test_concurrent.py
@@ -95,14 +95,20 @@ class TestConcurrentUploader(unittest.TestCase):
def test_correct_low_level_api_calls(self):
api_mock = mock.MagicMock()
+ upload_id = '0898d645-ea45-4548-9a67-578f507ead49'
+ initiate_upload_mock = mock.Mock(
+ return_value={'UploadId': upload_id})
+ # initiate_multipart_upload must return a body containing an `UploadId`
+ api_mock.attach_mock(initiate_upload_mock, 'initiate_multipart_upload')
+
uploader = FakeThreadedConcurrentUploader(api_mock, 'vault_name')
uploader.upload('foofile')
# The threads call the upload_part, so we're just verifying the
# initiate/complete multipart API calls.
- api_mock.initiate_multipart_upload.assert_called_with(
+ initiate_upload_mock.assert_called_with(
'vault_name', 4 * 1024 * 1024, None)
api_mock.complete_multipart_upload.assert_called_with(
- 'vault_name', mock.ANY, mock.ANY, 8 * 1024 * 1024)
+ 'vault_name', upload_id, mock.ANY, 8 * 1024 * 1024)
def test_downloader_work_queue_is_correctly_populated(self):
job = mock.MagicMock()
diff --git a/tests/unit/iam/test_policy.py b/tests/unit/iam/test_policy.py
new file mode 100644
index 00000000..6d47d410
--- /dev/null
+++ b/tests/unit/iam/test_policy.py
@@ -0,0 +1,687 @@
+#!/usr/bin/env python
+# Copyright (c) 2015 Shaun Brady. All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+
+
+from boto.compat import json
+from boto.iam.connection import IAMConnection
+from tests.unit import AWSMockServiceTestCase
+
+
+class TestCreatePolicy(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<CreatePolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <CreatePolicyResult>
+ <Policy>
+ <PolicyName>S3-read-only-example-bucket</PolicyName>
+ <DefaultVersionId>v1</DefaultVersionId>
+ <PolicyId>AGPACKCEVSQ6C2EXAMPLE</PolicyId>
+ <Path>/</Path>
+ <Arn>arn:aws:iam::123456789012:policy/S3-read-only-example-bucket</Arn>
+ <AttachmentCount>0</AttachmentCount>
+ <CreateDate>2014-09-15T17:36:14.673Z</CreateDate>
+ <UpdateDate>2014-09-15T17:36:14.673Z</UpdateDate>
+ </Policy>
+ </CreatePolicyResult>
+ <ResponseMetadata>
+ <RequestId>ca64c9e1-3cfe-11e4-bfad-8d1c6EXAMPLE</RequestId>
+ </ResponseMetadata>
+</CreatePolicyResponse>
+ """
+
+ def test_create_policy(self):
+ self.set_http_response(status_code=200)
+ policy_doc = """
+{
+ "Version": "2012-10-17",
+ "Statement": [
+ {
+ "Sid": "Stmt1430948004000",
+ "Effect": "Deny",
+ "Action": [
+ "s3:*"
+ ],
+ "Resource": [
+ "*"
+ ]
+ }
+ ]
+}
+ """
+ response = self.service_connection.create_policy(
+ 'S3-read-only-example-bucket',
+ policy_doc)
+
+ self.assert_request_parameters(
+ {'Action': 'CreatePolicy',
+ 'PolicyDocument': policy_doc,
+ 'Path': '/',
+ 'PolicyName': 'S3-read-only-example-bucket'},
+ ignore_params_values=['Version'])
+
+ self.assertEqual(response['create_policy_response']
+ ['create_policy_result']
+ ['policy']
+ ['policy_name'],
+ 'S3-read-only-example-bucket')
+
+
+class TestCreatePolicyVersion(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<CreatePolicyVersionResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <CreatePolicyVersionResult>
+ <PolicyVersion>
+ <IsDefaultVersion>true</IsDefaultVersion>
+ <VersionId>v2</VersionId>
+ <CreateDate>2014-09-15T19:58:59.430Z</CreateDate>
+ </PolicyVersion>
+ </CreatePolicyVersionResult>
+ <ResponseMetadata>
+ <RequestId>bb551b92-3d12-11e4-bfad-8d1c6EXAMPLE</RequestId>
+ </ResponseMetadata>
+</CreatePolicyVersionResponse>
+ """
+
+ def test_create_policy_version(self):
+ self.set_http_response(status_code=200)
+ policy_doc = """
+{
+ "Version": "2012-10-17",
+ "Statement": [
+ {
+ "Sid": "Stmt1430948004000",
+ "Effect": "Deny",
+ "Action": [
+ "s3:*"
+ ],
+ "Resource": [
+ "*"
+ ]
+ }
+ ]
+}
+ """
+ response = self.service_connection.create_policy_version(
+ 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ policy_doc,
+ set_as_default=True)
+
+ self.assert_request_parameters(
+ {'Action': 'CreatePolicyVersion',
+ 'PolicyDocument': policy_doc,
+ 'SetAsDefault': 'true',
+ 'PolicyArn': 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket'},
+ ignore_params_values=['Version'])
+
+ self.assertEqual(response['create_policy_version_response']
+ ['create_policy_version_result']
+ ['policy_version']
+ ['is_default_version'],
+ 'true')
+
+
+class TestDeletePolicy(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<DeletePolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <ResponseMetadata>
+ <RequestId>4706281b-3d19-11e4-a4a0-cffb9EXAMPLE</RequestId>
+ </ResponseMetadata>
+</DeletePolicyResponse>
+ """
+
+ def test_delete_policy(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.delete_policy(
+ 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket')
+
+ self.assert_request_parameters(
+ {'Action': 'DeletePolicy',
+ 'PolicyArn': 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket'},
+ ignore_params_values=['Version'])
+
+ self.assertEqual('request_id' in response['delete_policy_response']
+ ['response_metadata'],
+ True)
+
+
+class TestDeletePolicyVersion(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<DeletePolicyVersionResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <ResponseMetadata>
+ <RequestId>268e1556-3d19-11e4-a4a0-cffb9EXAMPLE</RequestId>
+ </ResponseMetadata>
+</DeletePolicyVersionResponse>
+ """
+
+ def test_delete_policy_version(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.delete_policy_version(
+ 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'v1')
+
+ self.assert_request_parameters(
+ {'Action': 'DeletePolicyVersion',
+ 'PolicyArn': 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'VersionId': 'v1'},
+ ignore_params_values=['Version'])
+
+ self.assertEqual('request_id' in response['delete_policy_version_response']
+ ['response_metadata'],
+ True)
+
+
+class TestGetPolicy(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<GetPolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <GetPolicyResult>
+ <Policy>
+ <PolicyName>S3-read-only-example-bucket</PolicyName>
+ <DefaultVersionId>v1</DefaultVersionId>
+ <PolicyId>AGPACKCEVSQ6C2EXAMPLE</PolicyId>
+ <Path>/</Path>
+ <Arn>arn:aws:iam::123456789012:policy/S3-read-only-example-bucket</Arn>
+ <AttachmentCount>9</AttachmentCount>
+ <CreateDate>2014-09-15T17:36:14Z</CreateDate>
+ <UpdateDate>2014-09-15T20:31:47Z</UpdateDate>
+ <Description>My Awesome Policy</Description>
+ </Policy>
+ </GetPolicyResult>
+ <ResponseMetadata>
+ <RequestId>684f0917-3d22-11e4-a4a0-cffb9EXAMPLE</RequestId>
+ </ResponseMetadata>
+</GetPolicyResponse>
+ """
+
+ def test_get_policy(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.get_policy(
+ 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket')
+
+ self.assert_request_parameters(
+ {'Action': 'GetPolicy',
+ 'PolicyArn': 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket'},
+ ignore_params_values=['Version'])
+
+ self.assertEqual(response['get_policy_response']
+ ['get_policy_result']
+ ['policy']
+ ['arn'],
+ 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket')
+
+ self.assertEqual(response['get_policy_response']
+ ['get_policy_result']
+ ['policy']
+ ['description'],
+ 'My Awesome Policy')
+
+
+class TestGetPolicyVersion(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<GetPolicyVersionResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <GetPolicyVersionResult>
+ <PolicyVersion>
+ <Document>
+ {"Version":"2012-10-17","Statement":[{"Effect":"Allow","Action":["s3:Get*","s3:List*"],
+ "Resource":["arn:aws:s3:::EXAMPLE-BUCKET","arn:aws:s3:::EXAMPLE-BUCKET/*"]}]}
+ </Document>
+ <IsDefaultVersion>true</IsDefaultVersion>
+ <VersionId>v1</VersionId>
+ <CreateDate>2014-09-15T20:31:47Z</CreateDate>
+ </PolicyVersion>
+ </GetPolicyVersionResult>
+ <ResponseMetadata>
+ <RequestId>d472f28e-3d23-11e4-a4a0-cffb9EXAMPLE</RequestId>
+ </ResponseMetadata>
+</GetPolicyVersionResponse>
+ """
+
+ def test_get_policy_version(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.get_policy_version(
+ 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'v1')
+
+ self.assert_request_parameters(
+ {'Action': 'GetPolicyVersion',
+ 'PolicyArn': 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'VersionId': 'v1'},
+ ignore_params_values=['Version'])
+
+ self.assertEqual(response['get_policy_version_response']
+ ['get_policy_version_result']
+ ['policy_version']
+ ['version_id'],
+ 'v1')
+
+
+class TestListPolicies(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<ListPoliciesResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <ListPoliciesResult>
+ <IsTruncated>true</IsTruncated>
+ <Marker>EXAMPLEkakv9BCuUNFDtxWSyfzetYwEx2ADc8dnzfvERF5S6YMvXKx41t6gCl/eeaCX3Jo94/bKqezEAg8TEVS99EKFLxm3jtbpl25FDWEXAMPLE
+ </Marker>
+ <Policies>
+ <member>
+ <PolicyName>ExamplePolicy</PolicyName>
+ <DefaultVersionId>v1</DefaultVersionId>
+ <PolicyId>AGPACKCEVSQ6C2EXAMPLE</PolicyId>
+ <Path>/</Path>
+ <Arn>arn:aws:iam::123456789012:policy/ExamplePolicy</Arn>
+ <AttachmentCount>2</AttachmentCount>
+ <CreateDate>2014-09-15T17:36:14Z</CreateDate>
+ <UpdateDate>2014-09-15T20:31:47Z</UpdateDate>
+ </member>
+ <member>
+ <PolicyName>PowerUserAccess</PolicyName>
+ <DefaultVersionId>v1</DefaultVersionId>
+ <PolicyId>AGPACKCEVSQ6C2EXAMPLE</PolicyId>
+ <Path>/</Path>
+ <Arn>arn:aws:iam::aws:policy/PowerUserAccess</Arn>
+ <AttachmentCount>0</AttachmentCount>
+ <CreateDate>2014-08-21T20:25:01Z</CreateDate>
+ <UpdateDate>2014-08-21T20:25:01Z</UpdateDate>
+ </member>
+ <member>
+ <PolicyName>AdministratorAccess</PolicyName>
+ <DefaultVersionId>v1</DefaultVersionId>
+ <PolicyId>AGPACKCEVSQ6C2EXAMPLE</PolicyId>
+ <Path>/</Path>
+ <Arn>arn:aws:iam::aws:policy/AdministratorAccess</Arn>
+ <AttachmentCount>1</AttachmentCount>
+ <CreateDate>2014-08-21T20:11:25Z</CreateDate>
+ <UpdateDate>2014-08-21T20:11:25Z</UpdateDate>
+ </member>
+ <member>
+ <PolicyName>ReadOnlyAccess</PolicyName>
+ <DefaultVersionId>v1</DefaultVersionId>
+ <PolicyId>AGPACKCEVSQ6C2EXAMPLE</PolicyId>
+ <Path>/</Path>
+ <Arn>arn:aws:iam::aws:policy/ReadOnlyAccess</Arn>
+ <AttachmentCount>6</AttachmentCount>
+ <CreateDate>2014-08-21T20:31:44Z</CreateDate>
+ <UpdateDate>2014-08-21T20:31:44Z</UpdateDate>
+ </member>
+ </Policies>
+ </ListPoliciesResult>
+ <ResponseMetadata>
+ <RequestId>6207e832-3eb7-11e4-9d0d-6f969EXAMPLE</RequestId>
+ </ResponseMetadata>
+</ListPoliciesResponse>
+ """
+
+ def test_list_policies(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.list_policies(
+ max_items=4)
+ self.assert_request_parameters(
+ {'Action': 'ListPolicies',
+ 'MaxItems': 4},
+ ignore_params_values=['Version'])
+
+ self.assertEqual(len(response['list_policies_response']
+ ['list_policies_result']
+ ['policies']),
+ 4)
+
+ self.assertEqual(response['list_policies_response']
+ ['list_policies_result']
+ ['is_truncated'],
+ 'true')
+
+
+class TestListPolicyVersions(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<ListPolicyVersionsResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <ListPolicyVersionsResult>
+ <Versions>
+ <member>
+ <IsDefaultVersion>false</IsDefaultVersion>
+ <VersionId>v3</VersionId>
+ <CreateDate>2014-09-17T22:32:43Z</CreateDate>
+ </member>
+ <member>
+ <IsDefaultVersion>true</IsDefaultVersion>
+ <VersionId>v2</VersionId>
+ <CreateDate>2014-09-15T20:31:47Z</CreateDate>
+ </member>
+ <member>
+ <IsDefaultVersion>false</IsDefaultVersion>
+ <VersionId>v1</VersionId>
+ <CreateDate>2014-09-15T17:36:14Z</CreateDate>
+ </member>
+ </Versions>
+ <IsTruncated>false</IsTruncated>
+ </ListPolicyVersionsResult>
+ <ResponseMetadata>
+ <RequestId>a31d1a86-3eba-11e4-9d0d-6f969EXAMPLE</RequestId>
+ </ResponseMetadata>
+</ListPolicyVersionsResponse>
+ """
+
+ def test_list_policy_versions(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.list_policy_versions(
+ 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ max_items=3)
+
+ self.assert_request_parameters(
+ {'Action': 'ListPolicyVersions',
+ 'PolicyArn': 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'MaxItems': 3},
+ ignore_params_values=['Version'])
+
+ self.assertEqual(len(response['list_policy_versions_response']
+ ['list_policy_versions_result']
+ ['versions']),
+ 3)
+
+
+class TestSetDefaultPolicyVersion(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<SetDefaultPolicyVersionResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <ResponseMetadata>
+ <RequestId>35f241af-3ebc-11e4-9d0d-6f969EXAMPLE</RequestId>
+ </ResponseMetadata>
+</SetDefaultPolicyVersionResponse>
+ """
+
+ def test_set_default_policy_version(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.set_default_policy_version(
+ 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'v1')
+
+ self.assert_request_parameters(
+ {'Action': 'SetDefaultPolicyVersion',
+ 'PolicyArn': 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'VersionId': 'v1'},
+ ignore_params_values=['Version'])
+
+ self.assertEqual('request_id' in response['set_default_policy_version_response']
+ ['response_metadata'],
+ True)
+
+
+class TestListEntitiesForPolicy(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<ListEntitiesForPolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <ListEntitiesForPolicyResult>
+ <PolicyRoles>
+ <member>
+ <RoleName>DevRole</RoleName>
+ </member>
+ </PolicyRoles>
+ <PolicyGroups>
+ <member>
+ <GroupName>Dev</GroupName>
+ </member>
+ </PolicyGroups>
+ <IsTruncated>false</IsTruncated>
+ <PolicyUsers>
+ <member>
+ <UserName>Alice</UserName>
+ </member>
+ <member>
+ <UserName>Bob</UserName>
+ </member>
+ </PolicyUsers>
+ </ListEntitiesForPolicyResult>
+ <ResponseMetadata>
+ <RequestId>eb358e22-9d1f-11e4-93eb-190ecEXAMPLE</RequestId>
+ </ResponseMetadata>
+</ListEntitiesForPolicyResponse>
+ """
+
+ def test_list_entities_for_policy(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.list_entities_for_policy(
+ 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket')
+
+ self.assert_request_parameters(
+ {'Action': 'ListEntitiesForPolicy',
+ 'PolicyArn': 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket'},
+ ignore_params_values=['Version'])
+
+ self.assertEqual(len(response['list_entities_for_policy_response']
+ ['list_entities_for_policy_result']
+ ['policy_roles']),
+ 1)
+
+ self.assertEqual(len(response['list_entities_for_policy_response']
+ ['list_entities_for_policy_result']
+ ['policy_groups']),
+ 1)
+
+ self.assertEqual(len(response['list_entities_for_policy_response']
+ ['list_entities_for_policy_result']
+ ['policy_users']),
+ 2)
+
+ self.assertEqual({'user_name': 'Alice'} in response['list_entities_for_policy_response']
+ ['list_entities_for_policy_result']
+ ['policy_users'],
+ True)
+
+
+class TestAttachGroupPolicy(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<AttachGroupPolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <ResponseMetadata>
+ <RequestId>f8a7b7b9-3d01-11e4-bfad-8d1c6EXAMPLE</RequestId>
+ </ResponseMetadata>
+</AttachGroupPolicyResponse>
+ """
+
+ def test_attach_group_policy(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.attach_group_policy(
+ 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'Dev')
+
+ self.assert_request_parameters(
+ {'Action': 'AttachGroupPolicy',
+ 'PolicyArn': 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'GroupName': 'Dev'},
+ ignore_params_values=['Version'])
+
+ self.assertEqual('request_id' in response['attach_group_policy_response']
+ ['response_metadata'],
+ True)
+
+
+class TestAttachRolePolicy(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<AttachRolePolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <ResponseMetadata>
+ <RequestId>37a87673-3d07-11e4-bfad-8d1c6EXAMPLE</RequestId>
+ </ResponseMetadata>
+</AttachRolePolicyResponse>
+ """
+
+ def test_attach_role_policy(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.attach_role_policy(
+ 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'DevRole')
+
+ self.assert_request_parameters(
+ {'Action': 'AttachRolePolicy',
+ 'PolicyArn': 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'RoleName': 'DevRole'},
+ ignore_params_values=['Version'])
+
+ self.assertEqual('request_id' in response['attach_role_policy_response']
+ ['response_metadata'],
+ True)
+
+
+class TestAttachUserPolicy(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<AttachUserPolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <ResponseMetadata>
+ <RequestId>ed7e72d3-3d07-11e4-bfad-8d1c6EXAMPLE</RequestId>
+ </ResponseMetadata>
+</AttachUserPolicyResponse>
+ """
+
+ def test_attach_user_policy(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.attach_user_policy(
+ 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'Alice')
+
+ self.assert_request_parameters(
+ {'Action': 'AttachUserPolicy',
+ 'PolicyArn': 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'UserName': 'Alice'},
+ ignore_params_values=['Version'])
+
+ self.assertEqual('request_id' in response['attach_user_policy_response']
+ ['response_metadata'],
+ True)
+
+
+class TestDetachGroupPolicy(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<DetachGroupPolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <ResponseMetadata>
+ <RequestId>d4faa7aa-3d1d-11e4-a4a0-cffb9EXAMPLE</RequestId>
+ </ResponseMetadata>
+</DetachGroupPolicyResponse>
+ """
+
+ def test_detach_group_policy(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.detach_group_policy(
+ 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'Dev')
+
+ self.assert_request_parameters(
+ {'Action': 'DetachGroupPolicy',
+ 'PolicyArn': 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'GroupName': 'Dev'},
+ ignore_params_values=['Version'])
+
+ self.assertEqual('request_id' in response['detach_group_policy_response']
+ ['response_metadata'],
+ True)
+
+
+class TestDetachRolePolicy(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<DetachRolePolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <ResponseMetadata>
+ <RequestId>4c80ccf4-3d1e-11e4-a4a0-cffb9EXAMPLE</RequestId>
+ </ResponseMetadata>
+</DetachRolePolicyResponse>
+ """
+
+ def test_detach_role_policy(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.detach_role_policy(
+ 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'DevRole')
+
+ self.assert_request_parameters(
+ {'Action': 'DetachRolePolicy',
+ 'PolicyArn': 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'RoleName': 'DevRole'},
+ ignore_params_values=['Version'])
+
+ self.assertEqual('request_id' in response['detach_role_policy_response']
+ ['response_metadata'],
+ True)
+
+
+class TestDetachUserPolicy(AWSMockServiceTestCase):
+ connection_class = IAMConnection
+
+ def default_body(self):
+ return b"""
+<DetachUserPolicyResponse xmlns="https://iam.amazonaws.com/doc/2010-05-08/">
+ <ResponseMetadata>
+ <RequestId>85ba31fa-3d1f-11e4-a4a0-cffb9EXAMPLE</RequestId>
+ </ResponseMetadata>
+</DetachUserPolicyResponse>
+ """
+
+ def test_detach_user_policy(self):
+ self.set_http_response(status_code=200)
+ response = self.service_connection.detach_user_policy(
+ 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'Alice')
+
+ self.assert_request_parameters(
+ {'Action': 'DetachUserPolicy',
+ 'PolicyArn': 'arn:aws:iam::123456789012:policy/S3-read-only-example-bucket',
+ 'UserName': 'Alice'},
+ ignore_params_values=['Version'])
+
+ self.assertEqual('request_id' in response['detach_user_policy_response']
+ ['response_metadata'],
+ True)
diff --git a/tests/unit/kms/test_kms.py b/tests/unit/kms/test_kms.py
index c46e831a..059e3bf3 100644
--- a/tests/unit/kms/test_kms.py
+++ b/tests/unit/kms/test_kms.py
@@ -39,7 +39,7 @@ class TestKinesis(AWSMockServiceTestCase):
self.set_http_response(status_code=200)
data = b'\x00\x01\x02\x03\x04\x05'
self.service_connection.encrypt(key_id='foo', plaintext=data)
- body = json.loads(self.actual_request.body)
+ body = json.loads(self.actual_request.body.decode('utf-8'))
self.assertEqual(body['Plaintext'], 'AAECAwQF')
def test_non_binary_input_for_blobs_fails(self):
diff --git a/tests/unit/s3/test_bucket.py b/tests/unit/s3/test_bucket.py
index 72e10ed2..f35e078b 100644
--- a/tests/unit/s3/test_bucket.py
+++ b/tests/unit/s3/test_bucket.py
@@ -6,7 +6,7 @@ from tests.unit import unittest
from tests.unit import AWSMockServiceTestCase
from boto.exception import BotoClientError
-from boto.s3.connection import S3Connection
+from boto.s3.connection import Location, S3Connection
from boto.s3.bucket import Bucket
from boto.s3.deletemarker import DeleteMarker
from boto.s3.key import Key
@@ -25,6 +25,14 @@ class TestS3Bucket(AWSMockServiceTestCase):
bucket = self.service_connection.create_bucket('mybucket_create')
self.assertEqual(bucket.name, 'mybucket_create')
+ def test_bucket_create_eu_central_1_location(self):
+ self.set_http_response(status_code=200)
+ bucket = self.service_connection.create_bucket(
+ 'eu_central_1_bucket',
+ location=Location.EUCentral1
+ )
+ self.assertEqual(bucket.name, 'eu_central_1_bucket')
+
def test_bucket_constructor(self):
self.set_http_response(status_code=200)
bucket = Bucket(self.service_connection, 'mybucket_constructor')
diff --git a/tests/unit/utils/test_utils.py b/tests/unit/utils/test_utils.py
index d96978c1..db15b56d 100644
--- a/tests/unit/utils/test_utils.py
+++ b/tests/unit/utils/test_utils.py
@@ -317,5 +317,101 @@ class TestStringToDatetimeParsing(unittest.TestCase):
self.assertEqual(6, result.minute)
+class TestHostIsIPV6(unittest.TestCase):
+
+ def test_is_ipv6_no_brackets(self):
+ hostname = 'bf1d:cb48:4513:d1f1:efdd:b290:9ff9:64be'
+ result = boto.utils.host_is_ipv6(hostname)
+ self.assertTrue(result)
+
+ def test_is_ipv6_with_brackets(self):
+ hostname = '[bf1d:cb48:4513:d1f1:efdd:b290:9ff9:64be]'
+ result = boto.utils.host_is_ipv6(hostname)
+ self.assertTrue(result)
+
+ def test_is_ipv6_with_brackets_and_port(self):
+ hostname = '[bf1d:cb48:4513:d1f1:efdd:b290:9ff9:64be]:8080'
+ result = boto.utils.host_is_ipv6(hostname)
+ self.assertTrue(result)
+
+ def test_is_ipv6_no_brackets_abbreviated(self):
+ hostname = 'bf1d:cb48:4513::'
+ result = boto.utils.host_is_ipv6(hostname)
+ self.assertTrue(result)
+
+ def test_is_ipv6_with_brackets_abbreviated(self):
+ hostname = '[bf1d:cb48:4513::'
+ result = boto.utils.host_is_ipv6(hostname)
+ self.assertTrue(result)
+
+ def test_is_ipv6_with_brackets_and_port_abbreviated(self):
+ hostname = '[bf1d:cb48:4513::]:8080'
+ result = boto.utils.host_is_ipv6(hostname)
+ self.assertTrue(result)
+
+ def test_empty_string(self):
+ result = boto.utils.host_is_ipv6('')
+ self.assertFalse(result)
+
+ def test_not_of_string_type(self):
+ hostnames = [None, 0, False, [], {}]
+ for h in hostnames:
+ result = boto.utils.host_is_ipv6(h)
+ self.assertFalse(result)
+
+ def test_ipv4_no_port(self):
+ result = boto.utils.host_is_ipv6('192.168.1.1')
+ self.assertFalse(result)
+
+ def test_ipv4_with_port(self):
+ result = boto.utils.host_is_ipv6('192.168.1.1:8080')
+ self.assertFalse(result)
+
+ def test_hostnames_are_not_ipv6_with_port(self):
+ result = boto.utils.host_is_ipv6('example.org:8080')
+ self.assertFalse(result)
+
+ def test_hostnames_are_not_ipv6_without_port(self):
+ result = boto.utils.host_is_ipv6('example.org')
+ self.assertFalse(result)
+
+
+class TestParseHost(unittest.TestCase):
+
+ def test_parses_ipv6_hosts_no_brackets(self):
+ host = 'bf1d:cb48:4513:d1f1:efdd:b290:9ff9:64be'
+ result = boto.utils.parse_host(host)
+ self.assertEquals(result, host)
+
+ def test_parses_ipv6_hosts_with_brackets_stripping_them(self):
+ host = '[bf1d:cb48:4513:d1f1:efdd:b290:9ff9:64be]'
+ result = boto.utils.parse_host(host)
+ self.assertEquals(result, 'bf1d:cb48:4513:d1f1:efdd:b290:9ff9:64be')
+
+ def test_parses_ipv6_hosts_with_brackets_and_port(self):
+ host = '[bf1d:cb48:4513:d1f1:efdd:b290:9ff9:64be]:8080'
+ result = boto.utils.parse_host(host)
+ self.assertEquals(result, 'bf1d:cb48:4513:d1f1:efdd:b290:9ff9:64be')
+
+ def test_parses_ipv4_hosts(self):
+ host = '10.0.1.1'
+ result = boto.utils.parse_host(host)
+ self.assertEquals(result, host)
+
+ def test_parses_ipv4_hosts_with_port(self):
+ host = '192.168.168.200:8080'
+ result = boto.utils.parse_host(host)
+ self.assertEquals(result, '192.168.168.200')
+
+ def test_parses_hostnames_with_port(self):
+ host = 'example.org:8080'
+ result = boto.utils.parse_host(host)
+ self.assertEquals(result, 'example.org')
+
+ def test_parses_hostnames_without_port(self):
+ host = 'example.org'
+ result = boto.utils.parse_host(host)
+ self.assertEquals(result, host)
+
if __name__ == '__main__':
unittest.main()
diff --git a/tests/unit/vpc/test_routetable.py b/tests/unit/vpc/test_routetable.py
index c90e56c4..4948fa12 100644
--- a/tests/unit/vpc/test_routetable.py
+++ b/tests/unit/vpc/test_routetable.py
@@ -31,7 +31,7 @@ class TestDescribeRouteTables(AWSMockServiceTestCase):
<main>true</main>
</item>
</associationSet>
- <tagSet/>
+ <tagSet/>
</item>
<item>
<routeTableId>rtb-f9ad4890</routeTableId>
@@ -47,6 +47,7 @@ class TestDescribeRouteTables(AWSMockServiceTestCase):
<destinationCidrBlock>0.0.0.0/0</destinationCidrBlock>
<gatewayId>igw-eaad4883</gatewayId>
<state>active</state>
+ <origin>CreateRoute</origin>
</item>
<item>
<destinationCidrBlock>10.0.0.0/21</destinationCidrBlock>
@@ -94,6 +95,7 @@ class TestDescribeRouteTables(AWSMockServiceTestCase):
self.assertEquals(api_response[0].routes[0].destination_cidr_block, '10.0.0.0/22')
self.assertEquals(api_response[0].routes[0].gateway_id, 'local')
self.assertEquals(api_response[0].routes[0].state, 'active')
+ self.assertEquals(api_response[0].routes[0].origin, 'CreateRouteTable')
self.assertEquals(len(api_response[0].associations), 1)
self.assertEquals(api_response[0].associations[0].id, 'rtbassoc-12ad487b')
self.assertEquals(api_response[0].associations[0].route_table_id, 'rtb-13ad487a')
@@ -104,15 +106,19 @@ class TestDescribeRouteTables(AWSMockServiceTestCase):
self.assertEquals(api_response[1].routes[0].destination_cidr_block, '10.0.0.0/22')
self.assertEquals(api_response[1].routes[0].gateway_id, 'local')
self.assertEquals(api_response[1].routes[0].state, 'active')
+ self.assertEquals(api_response[1].routes[0].origin, 'CreateRouteTable')
self.assertEquals(api_response[1].routes[1].destination_cidr_block, '0.0.0.0/0')
self.assertEquals(api_response[1].routes[1].gateway_id, 'igw-eaad4883')
self.assertEquals(api_response[1].routes[1].state, 'active')
+ self.assertEquals(api_response[1].routes[1].origin, 'CreateRoute')
self.assertEquals(api_response[1].routes[2].destination_cidr_block, '10.0.0.0/21')
self.assertEquals(api_response[1].routes[2].interface_id, 'eni-884ec1d1')
self.assertEquals(api_response[1].routes[2].state, 'blackhole')
+ self.assertEquals(api_response[1].routes[2].origin, 'CreateRoute')
self.assertEquals(api_response[1].routes[3].destination_cidr_block, '11.0.0.0/22')
self.assertEquals(api_response[1].routes[3].vpc_peering_connection_id, 'pcx-efc52b86')
self.assertEquals(api_response[1].routes[3].state, 'blackhole')
+ self.assertEquals(api_response[1].routes[3].origin, 'CreateRoute')
self.assertEquals(len(api_response[1].associations), 1)
self.assertEquals(api_response[1].associations[0].id, 'rtbassoc-faad4893')
self.assertEquals(api_response[1].associations[0].route_table_id, 'rtb-f9ad4890')