summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDaniel G. Taylor <danielgtaylor@gmail.com>2013-10-09 14:19:40 -0700
committerDaniel G. Taylor <danielgtaylor@gmail.com>2013-10-09 14:19:40 -0700
commit5005178d88ef8e4bbe8c30949ffab46e46a716b0 (patch)
tree9a726460c3d64e438dada4e66b584aa189058d5f
parent3c56d13f56d4db34ea59eb526e221c1e07728c98 (diff)
parenta00851636307f971b03e72e1ce812cd3242816f3 (diff)
downloadboto-2.14.0.tar.gz
Merge branch 'release-2.14.0'2.14.0
-rw-r--r--.gitignore2
-rw-r--r--README.rst17
-rwxr-xr-xbin/dynamodb_load2
-rwxr-xr-xbin/elbadmin5
-rwxr-xr-xbin/instance_events2
-rwxr-xr-xbin/list_instances2
-rwxr-xr-xbin/s3put98
-rw-r--r--boto/__init__.py9
-rw-r--r--boto/auth.py58
-rw-r--r--boto/cacerts/cacerts.txt3289
-rwxr-xr-x[-rw-r--r--]boto/cloudformation/stack.py13
-rw-r--r--boto/cloudfront/distribution.py26
-rw-r--r--boto/cloudsearch/__init__.py2
-rw-r--r--boto/cloudsearch/layer1.py26
-rw-r--r--boto/cloudsearch/layer2.py16
-rw-r--r--boto/cloudsearch/search.py15
-rw-r--r--boto/connection.py122
-rw-r--r--boto/dynamodb/__init__.py3
-rw-r--r--boto/dynamodb2/__init__.py6
-rw-r--r--boto/dynamodb2/items.py202
-rw-r--r--boto/dynamodb2/layer1.py17
-rw-r--r--boto/dynamodb2/results.py22
-rw-r--r--boto/dynamodb2/table.py66
-rw-r--r--boto/ec2/__init__.py6
-rw-r--r--boto/ec2/address.py33
-rw-r--r--boto/ec2/autoscale/__init__.py48
-rw-r--r--boto/ec2/autoscale/group.py6
-rw-r--r--boto/ec2/autoscale/policy.py7
-rw-r--r--boto/ec2/autoscale/tag.py6
-rw-r--r--boto/ec2/blockdevicemapping.py12
-rw-r--r--boto/ec2/cloudwatch/__init__.py1
-rw-r--r--boto/ec2/cloudwatch/alarm.py4
-rw-r--r--boto/ec2/connection.py1026
-rw-r--r--boto/ec2/ec2object.py20
-rw-r--r--boto/ec2/elb/__init__.py35
-rw-r--r--boto/ec2/elb/loadbalancer.py41
-rw-r--r--boto/ec2/elb/policies.py22
-rw-r--r--boto/ec2/image.py82
-rw-r--r--boto/ec2/instance.py104
-rw-r--r--boto/ec2/keypair.py18
-rw-r--r--boto/ec2/networkinterface.py52
-rw-r--r--boto/ec2/placementgroup.py11
-rw-r--r--boto/ec2/reservedinstance.py126
-rw-r--r--boto/ec2/securitygroup.py93
-rw-r--r--boto/ec2/snapshot.py61
-rw-r--r--boto/ec2/spotdatafeedsubscription.py10
-rw-r--r--boto/ec2/spotinstancerequest.py7
-rw-r--r--boto/ec2/volume.py52
-rw-r--r--boto/elasticache/__init__.py3
-rw-r--r--boto/elasticache/layer1.py1145
-rw-r--r--boto/emr/__init__.py14
-rw-r--r--boto/emr/connection.py2
-rw-r--r--boto/emr/instance_group.py4
-rw-r--r--boto/exception.py18
-rw-r--r--boto/glacier/layer2.py12
-rw-r--r--boto/gs/bucket.py24
-rw-r--r--boto/gs/key.py13
-rw-r--r--boto/iam/__init__.py3
-rw-r--r--boto/iam/connection.py5
-rw-r--r--boto/manage/server.py6
-rw-r--r--boto/mashups/server.py2
-rw-r--r--boto/mws/response.py4
-rw-r--r--boto/opsworks/layer1.py1018
-rw-r--r--boto/provider.py4
-rw-r--r--boto/pyami/installers/ubuntu/ebs.py2
-rw-r--r--boto/rds/__init__.py178
-rw-r--r--boto/rds/dbinstance.py17
-rw-r--r--boto/rds/dbsubnetgroup.py69
-rw-r--r--boto/rds/vpcsecuritygroupmembership.py85
-rw-r--r--boto/redshift/exceptions.py8
-rw-r--r--boto/redshift/layer1.py195
-rw-r--r--boto/route53/record.py3
-rw-r--r--boto/s3/__init__.py3
-rw-r--r--boto/s3/key.py54
-rw-r--r--boto/s3/keyfile.py2
-rw-r--r--boto/s3/multipart.py4
-rw-r--r--boto/sdb/db/manager/__init__.py4
-rw-r--r--boto/sdb/db/manager/sdbmanager.py18
-rw-r--r--boto/sdb/db/model.py25
-rw-r--r--boto/ses/connection.py3
-rw-r--r--boto/sns/__init__.py3
-rw-r--r--boto/sns/connection.py544
-rw-r--r--boto/sqs/__init__.py2
-rw-r--r--boto/sqs/message.py3
-rw-r--r--boto/sts/__init__.py4
-rw-r--r--boto/sts/connection.py8
-rw-r--r--boto/sts/credentials.py3
-rw-r--r--boto/swf/__init__.py1
-rw-r--r--boto/swf/layer1.py44
-rw-r--r--boto/swf/layer2.py40
-rw-r--r--boto/vpc/__init__.py329
-rw-r--r--boto/vpc/vpc.py7
-rw-r--r--boto/vpc/vpnconnection.py7
-rw-r--r--boto/vpc/vpngateway.py16
-rw-r--r--docs/Makefile8
-rw-r--r--docs/source/apps_built_on_boto.rst44
-rw-r--r--docs/source/autoscale_tut.rst3
-rw-r--r--docs/source/commandline.rst85
-rw-r--r--docs/source/contributing.rst23
-rw-r--r--docs/source/dynamodb2_tut.rst8
-rw-r--r--docs/source/ec2_tut.rst2
-rw-r--r--docs/source/index.rst50
-rw-r--r--docs/source/ref/cloudwatch.rst7
-rw-r--r--docs/source/ref/contrib.rst17
-rw-r--r--docs/source/ref/elb.rst26
-rw-r--r--docs/source/ref/opsworks.rst28
-rw-r--r--docs/source/ref/s3.rst7
-rw-r--r--docs/source/ref/sdb_db.rst31
-rw-r--r--docs/source/ref/swf.rst5
-rw-r--r--docs/source/releasenotes/dev.rst21
-rw-r--r--docs/source/releasenotes/releasenotes_template.rst21
-rw-r--r--docs/source/releasenotes/v2.0.0.rst2
-rw-r--r--docs/source/releasenotes/v2.10.0.rst54
-rw-r--r--docs/source/releasenotes/v2.11.0.rst62
-rw-r--r--docs/source/releasenotes/v2.12.0.rst32
-rw-r--r--docs/source/releasenotes/v2.13.0.rst40
-rw-r--r--docs/source/releasenotes/v2.13.2.rst39
-rw-r--r--docs/source/releasenotes/v2.13.3.rst11
-rw-r--r--docs/source/releasenotes/v2.14.0.rst63
-rw-r--r--docs/source/releasenotes/v2.9.8.rst2
-rw-r--r--docs/source/releasenotes/v2.9.9.rst50
-rw-r--r--docs/source/s3_tut.rst2
-rw-r--r--docs/source/sqs_tut.rst2
-rw-r--r--tests/integration/__init__.py67
-rw-r--r--tests/integration/cloudformation/test_cert_verification.py15
-rw-r--r--tests/integration/cloudsearch/test_cert_verification.py17
-rw-r--r--tests/integration/cloudsearch/test_layers.py75
-rw-r--r--tests/integration/dynamodb/test_cert_verification.py15
-rw-r--r--tests/integration/dynamodb2/test_cert_verification.py15
-rw-r--r--tests/integration/dynamodb2/test_highlevel.py77
-rw-r--r--tests/integration/ec2/autoscale/test_cert_verification.py15
-rw-r--r--tests/integration/ec2/cloudwatch/test_cert_verification.py15
-rw-r--r--tests/integration/ec2/elb/test_cert_verification.py15
-rw-r--r--tests/integration/ec2/elb/test_connection.py22
-rw-r--r--tests/integration/ec2/test_cert_verification.py15
-rw-r--r--tests/integration/ec2/test_connection.py51
-rw-r--r--tests/integration/ec2/vpc/test_connection.py49
-rw-r--r--tests/integration/elastictranscoder/test_cert_verification.py15
-rw-r--r--tests/integration/emr/test_cert_verification.py15
-rw-r--r--tests/integration/glacier/test_cert_verification.py15
-rw-r--r--tests/integration/gs/cb_test_harness.py9
-rw-r--r--tests/integration/gs/test_resumable_uploads.py95
-rw-r--r--tests/integration/gs/testcase.py4
-rw-r--r--tests/integration/gs/util.py3
-rw-r--r--tests/integration/iam/test_cert_verification.py15
-rw-r--r--tests/integration/mws/test.py10
-rw-r--r--tests/integration/rds/test_cert_verification.py15
-rw-r--r--tests/integration/rds/test_db_subnet_group.py92
-rw-r--r--tests/integration/redshift/test_cert_verification.py15
-rw-r--r--tests/integration/route53/test_cert_verification.py15
-rw-r--r--tests/integration/route53/test_resourcerecordsets.py48
-rw-r--r--tests/integration/s3/test_cert_verification.py15
-rw-r--r--tests/integration/s3/test_key.py11
-rw-r--r--tests/integration/s3/test_multipart.py20
-rw-r--r--tests/integration/sdb/test_cert_verification.py15
-rw-r--r--tests/integration/ses/test_cert_verification.py15
-rw-r--r--tests/integration/ses/test_connection.py2
-rw-r--r--tests/integration/sns/test_cert_verification.py15
-rw-r--r--tests/integration/sns/test_connection.py68
-rw-r--r--tests/integration/sns/test_sns_sqs_subscription.py6
-rw-r--r--tests/integration/sqs/test_cert_verification.py15
-rw-r--r--tests/integration/sqs/test_connection.py42
-rw-r--r--tests/integration/sts/test_cert_verification.py15
-rw-r--r--tests/integration/sts/test_session_token.py10
-rw-r--r--tests/integration/support/test_cert_verification.py15
-rw-r--r--tests/integration/swf/test_cert_verification.py15
-rw-r--r--tests/unit/auth/test_query.py76
-rw-r--r--tests/unit/auth/test_sigv4.py81
-rwxr-xr-x[-rw-r--r--]tests/unit/cloudformation/test_connection.py10
-rwxr-xr-x[-rw-r--r--]tests/unit/cloudformation/test_stack.py42
-rw-r--r--tests/unit/cloudfront/test_distribution.py21
-rw-r--r--tests/unit/cloudfront/test_signed_urls.py17
-rw-r--r--tests/unit/cloudsearch/test_connection.py15
-rw-r--r--tests/unit/cloudsearch/test_exceptions.py37
-rw-r--r--tests/unit/cloudsearch/test_search.py38
-rw-r--r--tests/unit/dynamodb2/test_layer1.py8
-rw-r--r--tests/unit/dynamodb2/test_table.py209
-rw-r--r--tests/unit/ec2/autoscale/test_group.py256
-rw-r--r--tests/unit/ec2/elb/test_loadbalancer.py97
-rw-r--r--tests/unit/ec2/test_address.py16
-rw-r--r--tests/unit/ec2/test_blockdevicemapping.py54
-rw-r--r--tests/unit/ec2/test_connection.py657
-rw-r--r--tests/unit/ec2/test_instance.py2
-rw-r--r--tests/unit/ec2/test_networkinterface.py148
-rw-r--r--tests/unit/ec2/test_securitygroup.py212
-rw-r--r--tests/unit/ec2/test_volume.py37
-rw-r--r--tests/unit/emr/test_instance_group_args.py57
-rw-r--r--tests/unit/glacier/test_layer2.py52
-rw-r--r--tests/unit/rds/test_connection.py328
-rw-r--r--tests/unit/s3/test_key.py21
-rw-r--r--tests/unit/ses/__init__.py0
-rw-r--r--tests/unit/ses/test_identity.py82
-rw-r--r--tests/unit/sns/test_connection.py135
-rw-r--r--tests/unit/sqs/test_message.py35
-rw-r--r--tests/unit/sts/__init__.py0
-rw-r--r--tests/unit/sts/test_connection.py88
-rw-r--r--tests/unit/sts/test_credentials.py38
-rw-r--r--tests/unit/swf/__init__.py0
-rw-r--r--tests/unit/swf/test_layer2_actors.py87
-rw-r--r--tests/unit/swf/test_layer2_domain.py112
-rw-r--r--tests/unit/swf/test_layer2_types.py46
-rw-r--r--tests/unit/test_connection.py101
-rw-r--r--tests/unit/test_exception.py48
-rw-r--r--tests/unit/vpc/__init__.py3
-rw-r--r--tests/unit/vpc/test_vpc.py63
205 files changed, 12627 insertions, 2416 deletions
diff --git a/.gitignore b/.gitignore
index f5685bd5..9ed67ef1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,3 +12,5 @@ MANIFEST
.coverage
*flymake.py
venv
+venv-2.5
+env-2.5
diff --git a/README.rst b/README.rst
index cfeebef7..284438bf 100644
--- a/README.rst
+++ b/README.rst
@@ -1,13 +1,13 @@
####
boto
####
-boto 2.9.8
+boto 2.13.3
-Released: 18-July-2013
+Released: 16-September-2013
.. image:: https://travis-ci.org/boto/boto.png?branch=develop
:target: https://travis-ci.org/boto/boto
-
+
.. image:: https://pypip.in/d/boto/badge.png
:target: https://crate.io/packages/boto/
@@ -41,6 +41,7 @@ At the moment, boto supports:
* AWS Elastic Beanstalk
* AWS CloudFormation
* AWS Data Pipeline
+ * AWS Opsworks
* Identity & Access
@@ -49,6 +50,7 @@ At the moment, boto supports:
* Application Services
* Amazon CloudSearch
+ * Amazon Elastic Transcoder
* Amazon Simple Workflow Service (SWF)
* Amazon Simple Queue Service (SQS)
* Amazon Simple Notification Server (SNS)
@@ -89,9 +91,9 @@ Web Services. In addition, boto provides support for other public
services such as Google Storage in addition to private cloud systems
like Eucalyptus, OpenStack and Open Nebula.
-Boto is developed mainly using Python 2.6.6 and Python 2.7.1 on Mac OSX
+Boto is developed mainly using Python 2.6.6 and Python 2.7.3 on Mac OSX
and Ubuntu Maverick. It is known to work on other Linux distributions
-and on Windows. Boto requires no additional libraries or packages
+and on Windows. Most of Boto requires no additional libraries or packages
other than those that are distributed with Python. Efforts are made
to keep boto compatible with Python 2.5.x but no guarantees are made.
@@ -153,11 +155,6 @@ following environment variables to ascertain your credentials:
Credentials and other boto-related settings can also be stored in a
boto config file. See `this`_ for details.
-Copyright (c) 2006-2012 Mitch Garnaat <mitch@garnaat.com>
-Copyright (c) 2010-2011, Eucalyptus Systems, Inc.
-Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
-All rights reserved.
-
.. _pip: http://www.pip-installer.org/
.. _release notes: https://github.com/boto/boto/wiki
.. _github.com: http://github.com/boto/boto
diff --git a/bin/dynamodb_load b/bin/dynamodb_load
index 21dfa176..46a8d392 100755
--- a/bin/dynamodb_load
+++ b/bin/dynamodb_load
@@ -66,7 +66,7 @@ def load_table(table, in_fd):
data[k] = set(v)
else:
data[k] = v
- table.new_item(attrs=i).put()
+ table.new_item(attrs=data).put()
def dynamodb_load(tables, in_dir, create_tables):
diff --git a/bin/elbadmin b/bin/elbadmin
index 87dd2b14..fc954f02 100755
--- a/bin/elbadmin
+++ b/bin/elbadmin
@@ -118,9 +118,8 @@ def get(elb, name):
instances = [state.instance_id for state in instance_health]
names = {}
- for r in ec2.get_all_instances(instances):
- for i in r.instances:
- names[i.id] = i.tags.get('Name', '')
+ for i in ec2.get_only_instances(instances):
+ names[i.id] = i.tags.get('Name', '')
name_column_width = max([4] + [len(v) for k,v in names.iteritems()]) + 2
diff --git a/bin/instance_events b/bin/instance_events
index b36a4809..a851df66 100755
--- a/bin/instance_events
+++ b/bin/instance_events
@@ -51,7 +51,7 @@ def list(region, headers, order, completed):
ec2 = boto.connect_ec2(region=region)
- reservations = ec2.get_all_instances()
+ reservations = ec2.get_all_reservations()
instanceinfo = {}
events = {}
diff --git a/bin/list_instances b/bin/list_instances
index a8de4ada..8cb743c0 100755
--- a/bin/list_instances
+++ b/bin/list_instances
@@ -76,7 +76,7 @@ def main():
print format_string % headers
print "-" * len(format_string % headers)
- for r in ec2.get_all_instances(filters=filters):
+ for r in ec2.get_all_reservations(filters=filters):
groups = [g.name for g in r.groups]
for i in r.instances:
i.groups = ','.join(groups)
diff --git a/bin/s3put b/bin/s3put
index 01d9fcb1..60a720a5 100755
--- a/bin/s3put
+++ b/bin/s3put
@@ -37,7 +37,9 @@ try:
multipart_capable = True
usage_flag_multipart_capable = """ [--multipart]"""
usage_string_multipart_capable = """
- multipart - Upload files as multiple parts. This needs filechunkio."""
+ multipart - Upload files as multiple parts. This needs filechunkio.
+ Requires ListBucket, ListMultipartUploadParts,
+ ListBucketMultipartUploads and PutObject permissions."""
except ImportError as err:
multipart_capable = False
usage_flag_multipart_capable = ""
@@ -46,6 +48,8 @@ except ImportError as err:
'" is missing for multipart support '
+DEFAULT_REGION = 'us-east-1'
+
usage_string = """
SYNOPSIS
s3put [-a/--access_key <access_key>] [-s/--secret_key <secret_key>]
@@ -53,7 +57,8 @@ SYNOPSIS
[-d/--debug <debug_level>] [-i/--ignore <ignore_dirs>]
[-n/--no_op] [-p/--prefix <prefix>] [-k/--key_prefix <key_prefix>]
[-q/--quiet] [-g/--grant grant] [-w/--no_overwrite] [-r/--reduced]
- [--header] [--host <s3_host>]""" + usage_flag_multipart_capable + """ path [path...]
+ [--header] [--region <name>] [--host <s3_host>]""" + \
+ usage_flag_multipart_capable + """ path [path...]
Where
access_key - Your AWS Access Key ID. If not supplied, boto will
@@ -103,6 +108,9 @@ SYNOPSIS
updated.
header - key=value pairs of extra header(s) to pass along in the
request
+ region - Manually set a region for buckets that are not in the US
+ classic region. Normally the region is autodetected, but
+ setting this yourself is more efficient.
host - Hostname override, for using an endpoint other then AWS S3
""" + usage_string_multipart_capable + """
@@ -112,9 +120,9 @@ SYNOPSIS
"""
-def usage():
+def usage(status=1):
print usage_string
- sys.exit()
+ sys.exit(status)
def submit_cb(bytes_so_far, total_bytes):
@@ -168,11 +176,13 @@ def _upload_part(bucketname, aws_key, aws_secret, multipart_id, part_num,
def multipart_upload(bucketname, aws_key, aws_secret, source_path, keyname,
reduced, debug, cb, num_cb, acl='private', headers={},
- guess_mimetype=True, parallel_processes=4):
+ guess_mimetype=True, parallel_processes=4,
+ region=DEFAULT_REGION):
"""
Parallel multipart upload.
"""
- conn = S3Connection(aws_key, aws_secret)
+ conn = boto.s3.connect_to_region(region, aws_access_key_id=aws_key,
+ aws_secret_access_key=aws_secret)
conn.debug = debug
bucket = conn.get_bucket(bucketname)
@@ -242,6 +252,7 @@ def main():
headers = {}
host = None
multipart_requested = False
+ region = None
try:
opts, args = getopt.getopt(
@@ -249,14 +260,14 @@ def main():
['access_key=', 'bucket=', 'callback=', 'debug=', 'help', 'grant=',
'ignore=', 'key_prefix=', 'no_op', 'prefix=', 'quiet',
'secret_key=', 'no_overwrite', 'reduced', 'header=', 'multipart',
- 'host='])
+ 'host=', 'region='])
except:
- usage()
+ usage(1)
# parse opts
for o, a in opts:
if o in ('-h', '--help'):
- usage()
+ usage(0)
if o in ('-a', '--access_key'):
aws_access_key_id = a
if o in ('-b', '--bucket'):
@@ -288,7 +299,7 @@ def main():
if o in ('-r', '--reduced'):
reduced = True
if o in ('--header'):
- (k, v) = a.split("=")
+ (k, v) = a.split("=", 1)
headers[k] = v
if o in ('--host'):
host = a
@@ -297,23 +308,62 @@ def main():
multipart_requested = True
else:
print "multipart upload requested but not capable"
- sys.exit()
+ sys.exit(4)
+ if o in ('--region'):
+ regions = boto.s3.regions()
+ for region_info in regions:
+ if region_info.name == a:
+ region = a
+ break
+ else:
+ raise ValueError('Invalid region %s specified' % a)
if len(args) < 1:
- usage()
+ usage(2)
if not bucket_name:
print "bucket name is required!"
- usage()
+ usage(3)
+
+ connect_args = {
+ 'aws_access_key_id': aws_access_key_id,
+ 'aws_secret_access_key': aws_secret_access_key
+ }
if host:
- c = boto.connect_s3(host=host, aws_access_key_id=aws_access_key_id,
- aws_secret_access_key=aws_secret_access_key)
- else:
- c = boto.connect_s3(aws_access_key_id=aws_access_key_id,
- aws_secret_access_key=aws_secret_access_key)
+ connect_args['host'] = host
+
+ c = boto.s3.connect_to_region(region or DEFAULT_REGION, **connect_args)
c.debug = debug
- b = c.get_bucket(bucket_name)
+ b = c.get_bucket(bucket_name, validate=False)
+
+ # Attempt to determine location and warn if no --host or --region
+ # arguments were passed. Then try to automagically figure out
+ # what should have been passed and fix it.
+ if host is None and region is None:
+ try:
+ location = b.get_location()
+
+ # Classic region will be '', any other will have a name
+ if location:
+ print 'Bucket exists in %s but no host or region given!' % location
+
+ # Override for EU, which is really Ireland according to the docs
+ if location == 'EU':
+ location = 'eu-west-1'
+
+ print 'Automatically setting region to %s' % location
+
+ # Here we create a new connection, and then take the existing
+ # bucket and set it to use the new connection
+ c = boto.s3.connect_to_region(location, **connect_args)
+ c.debug = debug
+ b.connection = c
+ except Exception, e:
+ if debug > 0:
+ print e
+ print 'Could not get bucket region info, skipping...'
+
existing_keys_to_check_against = []
files_to_check_for_upload = []
@@ -350,9 +400,10 @@ def main():
key_name = get_key_name(fullpath, prefix, key_prefix)
if no_overwrite and key_name in existing_keys_to_check_against:
- if not quiet:
- print 'Skipping %s as it exists in s3' % fullpath
- continue
+ if b.get_key(key_name):
+ if not quiet:
+ print 'Skipping %s as it exists in s3' % fullpath
+ continue
if not quiet:
print 'Copying %s to %s/%s' % (fullpath, bucket_name, key_name)
@@ -364,7 +415,8 @@ def main():
multipart_upload(bucket_name, aws_access_key_id,
aws_secret_access_key, fullpath, key_name,
reduced, debug, cb, num_cb,
- grant or 'private', headers)
+ grant or 'private', headers,
+ region=region or DEFAULT_REGION)
else:
singlepart_upload(b, key_name, fullpath, cb=cb, num_cb=num_cb,
policy=grant, reduced_redundancy=reduced,
diff --git a/boto/__init__.py b/boto/__init__.py
index 20c9000d..6595ada5 100644
--- a/boto/__init__.py
+++ b/boto/__init__.py
@@ -36,10 +36,15 @@ import logging.config
import urlparse
from boto.exception import InvalidUriError
-__version__ = '2.9.8'
+__version__ = '2.14.0'
Version = __version__ # for backware compatibility
-UserAgent = 'Boto/%s (%s)' % (__version__, sys.platform)
+UserAgent = 'Boto/%s Python/%s %s/%s' % (
+ __version__,
+ platform.python_version(),
+ platform.system(),
+ platform.release()
+)
config = Config()
# Regex to disallow buckets violating charset or not [3..255] chars total.
diff --git a/boto/auth.py b/boto/auth.py
index 0aa299f9..0d4221d6 100644
--- a/boto/auth.py
+++ b/boto/auth.py
@@ -385,8 +385,9 @@ class HmacAuthV4Handler(AuthHandler, HmacKeys):
def canonical_uri(self, http_request):
path = http_request.auth_path
- # Normalize the path.
- normalized = posixpath.normpath(path)
+ # Normalize the path
+ # in windows normpath('/') will be '\\' so we chane it back to '/'
+ normalized = posixpath.normpath(path).replace('\\','/')
# Then urlencode whatever's left.
encoded = urllib.quote(normalized)
if len(path) > 1 and path.endswith('/'):
@@ -430,11 +431,17 @@ class HmacAuthV4Handler(AuthHandler, HmacKeys):
parts = http_request.host.split('.')
if self.region_name is not None:
region_name = self.region_name
- else:
- if len(parts) == 3:
- region_name = 'us-east-1'
+ elif len(parts) > 1:
+ if parts[1] == 'us-gov':
+ region_name = 'us-gov-west-1'
else:
- region_name = parts[1]
+ if len(parts) == 3:
+ region_name = 'us-east-1'
+ else:
+ region_name = parts[1]
+ else:
+ region_name = parts[0]
+
if self.service_name is not None:
service_name = self.service_name
else:
@@ -509,6 +516,45 @@ class HmacAuthV4Handler(AuthHandler, HmacKeys):
req.headers['Authorization'] = ','.join(l)
+class QueryAuthHandler(AuthHandler):
+ """
+ Provides pure query construction (no actual signing).
+
+ Mostly useful for STS' ``assume_role_with_web_identity``.
+
+ Does **NOT** escape query string values!
+ """
+
+ capability = ['pure-query']
+
+ def _escape_value(self, value):
+ # Would normally be ``return urllib.quote(value)``.
+ return value
+
+ def _build_query_string(self, params):
+ keys = params.keys()
+ keys.sort(cmp=lambda x, y: cmp(x.lower(), y.lower()))
+ pairs = []
+ for key in keys:
+ val = boto.utils.get_utf8_value(params[key])
+ pairs.append(key + '=' + self._escape_value(val))
+ return '&'.join(pairs)
+
+ def add_auth(self, http_request, **kwargs):
+ headers = http_request.headers
+ params = http_request.params
+ qs = self._build_query_string(
+ http_request.params
+ )
+ boto.log.debug('query_string: %s' % qs)
+ headers['Content-Type'] = 'application/json; charset=UTF-8'
+ http_request.body = ''
+ # if this is a retried request, the qs from the previous try will
+ # already be there, we need to get rid of that and rebuild it
+ http_request.path = http_request.path.split('?')[0]
+ http_request.path = http_request.path + '?' + qs
+
+
class QuerySignatureHelper(HmacKeys):
"""
Helper for Query signature based Auth handler.
diff --git a/boto/cacerts/cacerts.txt b/boto/cacerts/cacerts.txt
index f6e0ee60..1b7738ff 100644
--- a/boto/cacerts/cacerts.txt
+++ b/boto/cacerts/cacerts.txt
@@ -1,135 +1,41 @@
-# Certifcate Authority certificates for validating SSL connections.
-#
-# This file contains PEM format certificates generated from
-# http://mxr.mozilla.org/seamonkey/source/security/nss/lib/ckfw/builtins/certdata.txt
-#
-# ***** BEGIN LICENSE BLOCK *****
-# Version: MPL 1.1/GPL 2.0/LGPL 2.1
-#
-# The contents of this file are subject to the Mozilla Public License Version
-# 1.1 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-# http://www.mozilla.org/MPL/
-#
-# Software distributed under the License is distributed on an "AS IS" basis,
-# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
-# for the specific language governing rights and limitations under the
-# License.
-#
-# The Original Code is the Netscape security libraries.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1994-2000
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#
-# Alternatively, the contents of this file may be used under the terms of
-# either the GNU General Public License Version 2 or later (the "GPL"), or
-# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
-# in which case the provisions of the GPL or the LGPL are applicable instead
-# of those above. If you wish to allow use of your version of this file only
-# under the terms of either the GPL or the LGPL, and not to allow others to
-# use your version of this file under the terms of the MPL, indicate your
-# decision by deleting the provisions above and replace them with the notice
-# and other provisions required by the GPL or the LGPL. If you do not delete
-# the provisions above, a recipient may use your version of this file under
-# the terms of any one of the MPL, the GPL or the LGPL.
-#
-# ***** END LICENSE BLOCK *****
-
-Verisign/RSA Secure Server CA
-=============================
-
------BEGIN CERTIFICATE-----
-MIICNDCCAaECEAKtZn5ORf5eV288mBle3cAwDQYJKoZIhvcNAQECBQAwXzELMAkG
-A1UEBhMCVVMxIDAeBgNVBAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYD
-VQQLEyVTZWN1cmUgU2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk0
-MTEwOTAwMDAwMFoXDTEwMDEwNzIzNTk1OVowXzELMAkGA1UEBhMCVVMxIDAeBgNV
-BAoTF1JTQSBEYXRhIFNlY3VyaXR5LCBJbmMuMS4wLAYDVQQLEyVTZWN1cmUgU2Vy
-dmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGbMA0GCSqGSIb3DQEBAQUAA4GJ
-ADCBhQJ+AJLOesGugz5aqomDV6wlAXYMra6OLDfO6zV4ZFQD5YRAUcm/jwjiioII
-0haGN1XpsSECrXZogZoFokvJSyVmIlZsiAeP94FZbYQHZXATcXY+m3dM41CJVphI
-uR2nKRoTLkoRWZweFdVJVCxzOmmCsZc5nG1wZ0jl3S3WyB57AgMBAAEwDQYJKoZI
-hvcNAQECBQADfgBl3X7hsuyw4jrg7HFGmhkRuNPHoLQDQCYCPgmc4RKz0Vr2N6W3
-YQO2WxZpO8ZECAyIUwxrl0nHPjXcbLm7qt9cuzovk2C2qUtN8iD3zV9/ZHuO3ABc
-1/p3yjkWWW8O6tO1g39NTUJWdrTJXwT4OPjr0l91X817/OWOgHz8UA==
------END CERTIFICATE-----
+##
+## boto/cacerts/cacerts.txt -- Bundle of CA Root Certificates
+##
+## Certificate data from Mozilla as of: Sat Dec 29 20:03:40 2012
+##
+## This is a bundle of X.509 certificates of public Certificate Authorities
+## (CA). These were automatically extracted from Mozilla's root certificates
+## file (certdata.txt). This file can be found in the mozilla source tree:
+## http://mxr.mozilla.org/mozilla/source/security/nss/lib/ckfw/builtins/certdata.txt?raw=1
+##
+## It contains the certificates in PEM format and therefore
+## can be directly used with curl / libcurl / php_curl, or with
+## an Apache+mod_ssl webserver for SSL client authentication.
+## Just configure this file as the SSLCACertificateFile.
+##
+
+# @(#) $RCSfile: certdata.txt,v $ $Revision: 1.87 $ $Date: 2012/12/29 16:32:45 $
-Thawte Personal Basic CA
-========================
-
------BEGIN CERTIFICATE-----
-MIIDITCCAoqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCByzELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD
-VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT
-ZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFBlcnNvbmFsIEJhc2lj
-IENBMSgwJgYJKoZIhvcNAQkBFhlwZXJzb25hbC1iYXNpY0B0aGF3dGUuY29tMB4X
-DTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgcsxCzAJBgNVBAYTAlpBMRUw
-EwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEaMBgGA1UE
-ChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRpb24gU2Vy
-dmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQZXJzb25hbCBCYXNpYyBD
-QTEoMCYGCSqGSIb3DQEJARYZcGVyc29uYWwtYmFzaWNAdGhhd3RlLmNvbTCBnzAN
-BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAvLyTU23AUE+CFeZIlDWmWr5vQvoPR+53
-dXLdjUmbllegeNTKP1GzaQuRdhciB5dqxFGTS+CN7zeVoQxN2jSQHReJl+A1OFdK
-wPQIcOk8RHtQfmGakOMj04gRRif1CwcOu93RfyAKiLlWCy4cgNrx454p7xS9CkT7
-G1sY0b8jkyECAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQQF
-AAOBgQAt4plrsD16iddZopQBHyvdEktTwq1/qqcAXJFAVyVKOKqEcLnZgA+le1z7
-c8a914phXAPjLSeoF+CEhULcXpvGt7Jtu3Sv5D/Lp7ew4F2+eIMllNLbgQ95B21P
-9DkVWlIBe94y1k049hJcBlDfBVu9FEuh3ym6O0GN92NWod8isQ==
------END CERTIFICATE-----
-
-Thawte Personal Premium CA
+GTE CyberTrust Global Root
==========================
-
------BEGIN CERTIFICATE-----
-MIIDKTCCApKgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBzzELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD
-VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT
-ZXJ2aWNlcyBEaXZpc2lvbjEjMCEGA1UEAxMaVGhhd3RlIFBlcnNvbmFsIFByZW1p
-dW0gQ0ExKjAoBgkqhkiG9w0BCQEWG3BlcnNvbmFsLXByZW1pdW1AdGhhd3RlLmNv
-bTAeFw05NjAxMDEwMDAwMDBaFw0yMDEyMzEyMzU5NTlaMIHPMQswCQYDVQQGEwJa
-QTEVMBMGA1UECBMMV2VzdGVybiBDYXBlMRIwEAYDVQQHEwlDYXBlIFRvd24xGjAY
-BgNVBAoTEVRoYXd0ZSBDb25zdWx0aW5nMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9u
-IFNlcnZpY2VzIERpdmlzaW9uMSMwIQYDVQQDExpUaGF3dGUgUGVyc29uYWwgUHJl
-bWl1bSBDQTEqMCgGCSqGSIb3DQEJARYbcGVyc29uYWwtcHJlbWl1bUB0aGF3dGUu
-Y29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDJZtn4B0TPuYwu8KHvE0Vs
-Bd/eJxZRNkERbGw77f4QfRKe5ZtCmv5gMcNmt3M6SK5O0DI3lIi1DbbZ8/JE2dWI
-Et12TfIa/G8jHnrx2JhFTgcQ7xZC0EN1bUre4qrJMf8fAHB8Zs8QJQi6+u4A6UYD
-ZicRFTuqW/KY3TZCstqIdQIDAQABoxMwETAPBgNVHRMBAf8EBTADAQH/MA0GCSqG
-SIb3DQEBBAUAA4GBAGk2ifc0KjNyL2071CKyuG+axTZmDhs8obF1Wub9NdP4qPIH
-b4Vnjt4rueIXsDqg8A6iAJrf8xQVbrvIhVqYgPn/vnQdPfP+MCXRNzRn+qVxeTBh
-KXLA4CxM+1bkOqhv5TJZUtt1KFBZDPgLGeSs2a+WjS9Q2wfD6h+rM+D1KzGJ
------END CERTIFICATE-----
-
-Thawte Personal Freemail CA
-===========================
-
-----BEGIN CERTIFICATE-----
-MIIDLTCCApagAwIBAgIBADANBgkqhkiG9w0BAQQFADCB0TELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMRowGAYD
-VQQKExFUaGF3dGUgQ29uc3VsdGluZzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBT
-ZXJ2aWNlcyBEaXZpc2lvbjEkMCIGA1UEAxMbVGhhd3RlIFBlcnNvbmFsIEZyZWVt
-YWlsIENBMSswKQYJKoZIhvcNAQkBFhxwZXJzb25hbC1mcmVlbWFpbEB0aGF3dGUu
-Y29tMB4XDTk2MDEwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgdExCzAJBgNVBAYT
-AlpBMRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEa
-MBgGA1UEChMRVGhhd3RlIENvbnN1bHRpbmcxKDAmBgNVBAsTH0NlcnRpZmljYXRp
-b24gU2VydmljZXMgRGl2aXNpb24xJDAiBgNVBAMTG1RoYXd0ZSBQZXJzb25hbCBG
-cmVlbWFpbCBDQTErMCkGCSqGSIb3DQEJARYccGVyc29uYWwtZnJlZW1haWxAdGhh
-d3RlLmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA1GnX1LCUZFtx6UfY
-DFG26nKRsIRefS0Nj3sS34UldSh0OkIsYyeflXtL734Zhx2G6qPduc6WZBrCFG5E
-rHzmj+hND3EfQDimAKOHePb5lIZererAXnbr2RSjXW56fAylS1V/Bhkpf56aJtVq
-uzgkCGqYx7Hao5iR/Xnb5VrEHLkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zAN
-BgkqhkiG9w0BAQQFAAOBgQDH7JJ+Tvj1lqVnYiqk8E0RYNBvjWBYYawmu1I1XAjP
-MPuoSpaKH2JCI4wXD/S6ZJwXrEcp352YXtJsYHFcoqzceePnbgBHH7UNKOgCneSa
-/RP0ptl8sfjcXyMmCZGAc9AUG95DqYMl8uacLxXK/qarigd1iwzdUYRr5PjRznei
-gQ==
+MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD
+VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv
+bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv
+b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV
+UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
+cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
+b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH
+iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS
+r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4
+04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r
+GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9
+3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P
+lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/
-----END CERTIFICATE-----
Thawte Server CA
================
-
-----BEGIN CERTIFICATE-----
MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx
FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
@@ -152,7 +58,6 @@ qdq5snUb9kLy78fyGPmJvKP/iiMucEc=
Thawte Premium Server CA
========================
-
-----BEGIN CERTIFICATE-----
MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
@@ -175,7 +80,6 @@ pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
Equifax Secure CA
=================
-
-----BEGIN CERTIFICATE-----
MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy
@@ -196,46 +100,52 @@ A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y
1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4
-----END CERTIFICATE-----
-Verisign Class 1 Public Primary Certification Authority
-=======================================================
-
+Digital Signature Trust Co. Global CA 1
+=======================================
-----BEGIN CERTIFICATE-----
-MIICPTCCAaYCEQDNun9W8N/kvFT+IqyzcqpVMA0GCSqGSIb3DQEBAgUAMF8xCzAJ
-BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xh
-c3MgMSBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05
-NjAxMjkwMDAwMDBaFw0yODA4MDEyMzU5NTlaMF8xCzAJBgNVBAYTAlVTMRcwFQYD
-VQQKEw5WZXJpU2lnbiwgSW5jLjE3MDUGA1UECxMuQ2xhc3MgMSBQdWJsaWMgUHJp
-bWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCBnzANBgkqhkiG9w0BAQEFAAOB
-jQAwgYkCgYEA5Rm/baNWYS2ZSHH2Z965jeu3noaACpEO+jglr0aIguVzqKCbJF0N
-H8xlbgyw0FaEGIeaBpsQoXPftFg5a27B9hXVqKg/qhIGjTGsf7A01480Z4gJzRQR
-4k5FVmkfeAKA2txHkSm7NsljXMXg1y2He6G3MrB7MLoqLzGq7qNn2tsCAwEAATAN
-BgkqhkiG9w0BAQIFAAOBgQBMP7iLxmjf7kMzDl3ppssHhE16M/+SG/Q2rdiVIjZo
-EWx8QszznC7EBz8UsA9P/5CSdvnivErpj82ggAr3xSnxgiJduLHdgSOjeyUVRjB5
-FvjqBUuUfx3CHMjjt/QQQDwTw18fU+hI5Ia0e6E1sHslurjTjqs/OJ0ANACY89Fx
-lA==
+MIIDKTCCApKgAwIBAgIENnAVljANBgkqhkiG9w0BAQUFADBGMQswCQYDVQQGEwJV
+UzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMREwDwYDVQQL
+EwhEU1RDQSBFMTAeFw05ODEyMTAxODEwMjNaFw0xODEyMTAxODQwMjNaMEYxCzAJ
+BgNVBAYTAlVTMSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4x
+ETAPBgNVBAsTCERTVENBIEUxMIGdMA0GCSqGSIb3DQEBAQUAA4GLADCBhwKBgQCg
+bIGpzzQeJN3+hijM3oMv+V7UQtLodGBmE5gGHKlREmlvMVW5SXIACH7TpWJENySZ
+j9mDSI+ZbZUTu0M7LklOiDfBu1h//uG9+LthzfNHwJmm8fOR6Hh8AMthyUQncWlV
+Sn5JTe2io74CTADKAqjuAQIxZA9SLRN0dja1erQtcQIBA6OCASQwggEgMBEGCWCG
+SAGG+EIBAQQEAwIABzBoBgNVHR8EYTBfMF2gW6BZpFcwVTELMAkGA1UEBhMCVVMx
+JDAiBgNVBAoTG0RpZ2l0YWwgU2lnbmF0dXJlIFRydXN0IENvLjERMA8GA1UECxMI
+RFNUQ0EgRTExDTALBgNVBAMTBENSTDEwKwYDVR0QBCQwIoAPMTk5ODEyMTAxODEw
+MjNagQ8yMDE4MTIxMDE4MTAyM1owCwYDVR0PBAQDAgEGMB8GA1UdIwQYMBaAFGp5
+fpFpRhgTCgJ3pVlbYJglDqL4MB0GA1UdDgQWBBRqeX6RaUYYEwoCd6VZW2CYJQ6i
++DAMBgNVHRMEBTADAQH/MBkGCSqGSIb2fQdBAAQMMAobBFY0LjADAgSQMA0GCSqG
+SIb3DQEBBQUAA4GBACIS2Hod3IEGtgllsofIH160L+nEHvI8wbsEkBFKg05+k7lN
+QseSJqBcNJo4cvj9axY+IO6CizEqkzaFI4iKPANo08kJD038bKTaKHKTDomAsH3+
+gG9lbRgzl4vCa4nuYD3Im+9/KzJic5PLPON74nZ4RbyhkwS7hp86W0N6w4pl
-----END CERTIFICATE-----
-Verisign Class 2 Public Primary Certification Authority
-=======================================================
-
+Digital Signature Trust Co. Global CA 3
+=======================================
-----BEGIN CERTIFICATE-----
-MIICPDCCAaUCEC0b/EoXjaOR6+f/9YtFvgswDQYJKoZIhvcNAQECBQAwXzELMAkG
-A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
-cyAyIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
-MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
-BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAyIFB1YmxpYyBQcmlt
-YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
-ADCBiQKBgQC2WoujDWojg4BrzzmH9CETMwZMJaLtVRKXxaeAufqDwSCg+i8VDXyh
-YGt+eSz6Bg86rvYbb7HS/y8oUl+DfUvEerf4Zh+AVPy3wo5ZShRXRtGak75BkQO7
-FYCTXOvnzAhsPz6zSvz/S2wj1VCCJkQZjiPDceoZJEcEnnW/yKYAHwIDAQABMA0G
-CSqGSIb3DQEBAgUAA4GBAIobK/o5wXTXXtgZZKJYSi034DNHD6zt96rbHuSLBlxg
-J8pFUs4W7z8GZOeUaHxgMxURaa+dYo2jA1Rrpr7l7gUYYAS/QoD90KioHgE796Nc
-r6Pc5iaAIzy4RHT3Cq5Ji2F4zCS/iIqnDupzGUH9TQPwiNHleI2lKk/2lw0Xd8rY
+MIIDKTCCApKgAwIBAgIENm7TzjANBgkqhkiG9w0BAQUFADBGMQswCQYDVQQGEwJV
+UzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMREwDwYDVQQL
+EwhEU1RDQSBFMjAeFw05ODEyMDkxOTE3MjZaFw0xODEyMDkxOTQ3MjZaMEYxCzAJ
+BgNVBAYTAlVTMSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4x
+ETAPBgNVBAsTCERTVENBIEUyMIGdMA0GCSqGSIb3DQEBAQUAA4GLADCBhwKBgQC/
+k48Xku8zExjrEH9OFr//Bo8qhbxe+SSmJIi2A7fBw18DW9Fvrn5C6mYjuGODVvso
+LeE4i7TuqAHhzhy2iCoiRoX7n6dwqUcUP87eZfCocfdPJmyMvMa1795JJ/9IKn3o
+TQPMx7JSxhcxEzu1TdvIxPbDDyQq2gyd55FbgM2UnQIBA6OCASQwggEgMBEGCWCG
+SAGG+EIBAQQEAwIABzBoBgNVHR8EYTBfMF2gW6BZpFcwVTELMAkGA1UEBhMCVVMx
+JDAiBgNVBAoTG0RpZ2l0YWwgU2lnbmF0dXJlIFRydXN0IENvLjERMA8GA1UECxMI
+RFNUQ0EgRTIxDTALBgNVBAMTBENSTDEwKwYDVR0QBCQwIoAPMTk5ODEyMDkxOTE3
+MjZagQ8yMDE4MTIwOTE5MTcyNlowCwYDVR0PBAQDAgEGMB8GA1UdIwQYMBaAFB6C
+TShlgDzJQW6sNS5ay97u+DlbMB0GA1UdDgQWBBQegk0oZYA8yUFurDUuWsve7vg5
+WzAMBgNVHRMEBTADAQH/MBkGCSqGSIb2fQdBAAQMMAobBFY0LjADAgSQMA0GCSqG
+SIb3DQEBBQUAA4GBAEeNg61i8tuwnkUiBbmi1gMOOHLnnvx75pO2mqWilMg0HZHR
+xdf0CiUPPXiBng+xZ8SQTGPdXqfiup/1902lMXucKS1M/mQ+7LZT/uqb7YLbdHVL
+B3luHtgZg3Pe9T7Qtd7nS2h9Qy4qIOF+oHhEngj1mPnHfxsb1gYgAlihw6ID
-----END CERTIFICATE-----
Verisign Class 3 Public Primary Certification Authority
=======================================================
-
-----BEGIN CERTIFICATE-----
MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG
A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
@@ -253,7 +163,6 @@ AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k
Verisign Class 1 Public Primary Certification Authority - G2
============================================================
-
-----BEGIN CERTIFICATE-----
MIIDAjCCAmsCEEzH6qqYPnHTkxD4PTqJkZIwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
@@ -276,7 +185,6 @@ DzFc6PLZ
Verisign Class 2 Public Primary Certification Authority - G2
============================================================
-
-----BEGIN CERTIFICATE-----
MIIDAzCCAmwCEQC5L2DMiJ+hekYJuFtwbIqvMA0GCSqGSIb3DQEBBQUAMIHBMQsw
CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xPDA6BgNVBAsTM0Ns
@@ -299,7 +207,6 @@ jBJ7xUS0rg==
Verisign Class 3 Public Primary Certification Authority - G2
============================================================
-
-----BEGIN CERTIFICATE-----
MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
@@ -320,32 +227,120 @@ F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY
oJ2daZH9
-----END CERTIFICATE-----
-Verisign Class 4 Public Primary Certification Authority - G2
-============================================================
+GlobalSign Root CA
+==================
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+GlobalSign Root CA - R2
+=======================
-----BEGIN CERTIFICATE-----
-MIIDAjCCAmsCEDKIjprS9esTR/h/xCA3JfgwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
-BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
-c3MgNCBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
-MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
-emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
-DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
-FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgNCBQdWJsaWMg
-UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
-YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
-MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
-AQUAA4GNADCBiQKBgQC68OTP+cSuhVS5B1f5j8V/aBH4xBewRNzjMHPVKmIquNDM
-HO0oW369atyzkSTKQWI8/AIBvxwWMZQFl3Zuoq29YRdsTjCG8FE3KlDHqGKB3FtK
-qsGgtG7rL+VXxbErQHDbWk2hjh+9Ax/YA9SPTJlxvOKCzFjomDqG04Y48wApHwID
-AQABMA0GCSqGSIb3DQEBBQUAA4GBAIWMEsGnuVAVess+rLhDityq3RS6iYF+ATwj
-cSGIL4LcY/oCRaxFWdcqWERbt5+BO5JoPeI3JPV7bI92NZYJqFmduc4jq3TWg/0y
-cyfYaT5DdPauxYma51N86Xv2S/PBZYPejYqcPIiNOVn8qj8ijaHBZlCBckztImRP
-T8qAkbYp
+MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
+MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
+v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
+eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
+tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
+C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
+zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
+mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
+V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
+bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
+3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
+J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
+291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
+ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
+AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
+TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
+-----END CERTIFICATE-----
+
+ValiCert Class 1 VA
+===================
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy
+NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y
+LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+
+TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y
+TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0
+LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW
+I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw
+nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI
+-----END CERTIFICATE-----
+
+ValiCert Class 2 VA
+===================
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
+NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
+dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
+WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
+v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
+UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
+IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
+W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
+-----END CERTIFICATE-----
+
+RSA Root Certificate 1
+======================
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
+NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
+cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
+2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
+JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
+Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
+n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
+PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
-----END CERTIFICATE-----
Verisign Class 1 Public Primary Certification Authority - G3
============================================================
-
-----BEGIN CERTIFICATE-----
MIIEGjCCAwICEQCLW3VWhFSFCwDPrzhIzrGkMA0GCSqGSIb3DQEBBQUAMIHKMQsw
CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
@@ -373,7 +368,6 @@ E1Z5T21Q6huwtVexN2ZYI/PcD98Kh8TvhgXVOBRgmaNL3gaWcSzy27YfpO8/7g==
Verisign Class 2 Public Primary Certification Authority - G3
============================================================
-
-----BEGIN CERTIFICATE-----
MIIEGTCCAwECEGFwy0mMX5hFKeewptlQW3owDQYJKoZIhvcNAQEFBQAwgcoxCzAJ
BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjEfMB0GA1UECxMWVmVy
@@ -401,7 +395,6 @@ cjnhsUPgKM+351psE2tJs//jGHyJizNdrDPXp/naOlXJWBD5qu9ats9LS98q
Verisign Class 3 Public Primary Certification Authority - G3
============================================================
-
-----BEGIN CERTIFICATE-----
MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
@@ -429,7 +422,6 @@ TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
Verisign Class 4 Public Primary Certification Authority - G3
============================================================
-
-----BEGIN CERTIFICATE-----
MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
@@ -455,154 +447,8 @@ fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c
bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==
-----END CERTIFICATE-----
-Equifax Secure Global eBusiness CA
-==================================
-
------BEGIN CERTIFICATE-----
-MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
-ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
-MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
-dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
-c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
-UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
-58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
-o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
-MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
-aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
-A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
-Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
-8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
------END CERTIFICATE-----
-
-Equifax Secure eBusiness CA 1
-=============================
-
------BEGIN CERTIFICATE-----
-MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc
-MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT
-ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw
-MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j
-LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ
-KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo
-RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu
-WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw
-Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD
-AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK
-eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM
-zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+
-WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN
-/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==
------END CERTIFICATE-----
-
-Equifax Secure eBusiness CA 2
-=============================
-
------BEGIN CERTIFICATE-----
-MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
-UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj
-dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0
-NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD
-VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B
-AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G
-vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/
-BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C
-AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX
-MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl
-IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw
-NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq
-y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF
-MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
-A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy
-0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1
-E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN
------END CERTIFICATE-----
-
-Thawte Time Stamping CA
-=======================
-
------BEGIN CERTIFICATE-----
-MIICoTCCAgqgAwIBAgIBADANBgkqhkiG9w0BAQQFADCBizELMAkGA1UEBhMCWkEx
-FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzAN
-BgNVBAoTBlRoYXd0ZTEdMBsGA1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAd
-BgNVBAMTFlRoYXd0ZSBUaW1lc3RhbXBpbmcgQ0EwHhcNOTcwMTAxMDAwMDAwWhcN
-MjAxMjMxMjM1OTU5WjCBizELMAkGA1UEBhMCWkExFTATBgNVBAgTDFdlc3Rlcm4g
-Q2FwZTEUMBIGA1UEBxMLRHVyYmFudmlsbGUxDzANBgNVBAoTBlRoYXd0ZTEdMBsG
-A1UECxMUVGhhd3RlIENlcnRpZmljYXRpb24xHzAdBgNVBAMTFlRoYXd0ZSBUaW1l
-c3RhbXBpbmcgQ0EwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANYrWHhhRYZT
-6jR7UZztsOYuGA7+4F+oJ9O0yeB8WU4WDnNUYMF/9p8u6TqFJBU820cEY8OexJQa
-Wt9MevPZQx08EHp5JduQ/vBR5zDWQQD9nyjfeb6Uu522FOMjhdepQeBMpHmwKxqL
-8vg7ij5FrHGSALSQQZj7X+36ty6K+Ig3AgMBAAGjEzARMA8GA1UdEwEB/wQFMAMB
-Af8wDQYJKoZIhvcNAQEEBQADgYEAZ9viwuaHPUCDhjc1fR/OmsMMZiCouqoEiYbC
-9RAIDb/LogWK0E02PvTX72nGXuSwlG9KuefeW4i2e9vjJ+V2w/A1wcu1J5szedyQ
-pgCed/r8zSeUQhac0xxo7L9c3eWpexAKMnRUEzGLhQOEkbdYATAUOK8oyvyxUBkZ
-CayJSdM=
------END CERTIFICATE-----
-
-thawte Primary Root CA
-======================
-
------BEGIN CERTIFICATE-----
-MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
-qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
-Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
-MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
-BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
-NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
-LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
-A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
-IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
-SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
-W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
-3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
-6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
-Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
-NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
-MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
-r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
-DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
-YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
-xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
-/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
-LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
-jVaMaA==
------END CERTIFICATE-----
-
-VeriSign Class 3 Public Primary Certification Authority - G5
-============================================================
-
------BEGIN CERTIFICATE-----
-MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
-yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
-ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
-U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
-ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
-aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
-MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
-ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
-biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
-U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
-aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
-nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
-t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
-SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
-BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
-rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
-NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
-BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
-BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
-aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
-MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
-p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
-5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
-WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
-4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
-hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
------END CERTIFICATE-----
-
-Entrust.net Secure Server Certification Authority
-=================================================
-
+Entrust.net Secure Server CA
+============================
-----BEGIN CERTIFICATE-----
MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
@@ -632,145 +478,8 @@ hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
-----END CERTIFICATE-----
-GTE CyberTrust Global Root
-==========================
-
------BEGIN CERTIFICATE-----
-MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD
-VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv
-bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv
-b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV
-UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
-cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
-b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH
-iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS
-r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4
-04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r
-GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9
-3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P
-lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/
------END CERTIFICATE-----
-
-GlobalSign Root CA
-==================
-
------BEGIN CERTIFICATE-----
-MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
-A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
-b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
-MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
-YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
-aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
-jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
-xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
-1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
-snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
-U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
-9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
-BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
-AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
-yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
-38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
-AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
-DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
-HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
------END CERTIFICATE-----
-
-GlobalSign Root CA - R2
-=======================
-
------BEGIN CERTIFICATE-----
-MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
-A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
-Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
-MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
-A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
-v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
-eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
-tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
-C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
-zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
-mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
-V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
-bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
-3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
-J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
-291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
-ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
-AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
-TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
------END CERTIFICATE-----
-
-ValiCert Class 1 VA
-===================
-
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy
-NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y
-LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+
-TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y
-TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0
-LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW
-I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw
-nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI
------END CERTIFICATE-----
-
-ValiCert Class 2 VA
-===================
-
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
-NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
-dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
-WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
-v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
-UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
-IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
-W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
------END CERTIFICATE-----
-
-RSA Root Certificate 1
-======================
-
------BEGIN CERTIFICATE-----
-MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
-IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
-BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
-aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
-9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
-NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
-azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
-YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
-Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
-cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
-cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
-2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
-JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
-Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
-n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
-PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
------END CERTIFICATE-----
-
Entrust.net Premium 2048 Secure Server CA
=========================================
-
-----BEGIN CERTIFICATE-----
MIIEXDCCA0SgAwIBAgIEOGO5ZjANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
@@ -800,7 +509,6 @@ vUxFnmG6v4SBkgPR0ml8xQ==
Baltimore CyberTrust Root
=========================
-
-----BEGIN CERTIFICATE-----
MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
@@ -823,9 +531,68 @@ ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
-----END CERTIFICATE-----
+Equifax Secure Global eBusiness CA
+==================================
+-----BEGIN CERTIFICATE-----
+MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
+ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
+MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
+dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
+c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
+UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
+58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
+o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
+aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
+A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
+Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
+8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
+-----END CERTIFICATE-----
+
+Equifax Secure eBusiness CA 1
+=============================
+-----BEGIN CERTIFICATE-----
+MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT
+ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw
+MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j
+LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ
+KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo
+RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu
+WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw
+Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD
+AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK
+eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM
+zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+
+WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN
+/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==
+-----END CERTIFICATE-----
+
+Equifax Secure eBusiness CA 2
+=============================
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
+UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj
+dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0
+NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD
+VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B
+AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G
+vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/
+BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C
+AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl
+IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw
+NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq
+y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF
+MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
+A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy
+0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1
+E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN
+-----END CERTIFICATE-----
+
AddTrust Low-Value Services Root
================================
-
-----BEGIN CERTIFICATE-----
MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU
MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
@@ -853,7 +620,6 @@ WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk=
AddTrust External Root
======================
-
-----BEGIN CERTIFICATE-----
MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
@@ -882,7 +648,6 @@ mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
AddTrust Public Services Root
=============================
-
-----BEGIN CERTIFICATE-----
MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU
MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
@@ -910,7 +675,6 @@ XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY=
AddTrust Qualified Certificates Root
====================================
-
-----BEGIN CERTIFICATE-----
MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU
MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
@@ -939,7 +703,6 @@ xqE=
Entrust Root Certification Authority
====================================
-
-----BEGIN CERTIFICATE-----
MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
@@ -968,9 +731,32 @@ eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
0vdXcDazv/wor3ElhVsT/h5/WrQ8
-----END CERTIFICATE-----
+RSA Security 2048 v3
+====================
+-----BEGIN CERTIFICATE-----
+MIIDYTCCAkmgAwIBAgIQCgEBAQAAAnwAAAAKAAAAAjANBgkqhkiG9w0BAQUFADA6
+MRkwFwYDVQQKExBSU0EgU2VjdXJpdHkgSW5jMR0wGwYDVQQLExRSU0EgU2VjdXJp
+dHkgMjA0OCBWMzAeFw0wMTAyMjIyMDM5MjNaFw0yNjAyMjIyMDM5MjNaMDoxGTAX
+BgNVBAoTEFJTQSBTZWN1cml0eSBJbmMxHTAbBgNVBAsTFFJTQSBTZWN1cml0eSAy
+MDQ4IFYzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAt49VcdKA3Xtp
+eafwGFAyPGJn9gqVB93mG/Oe2dJBVGutn3y+Gc37RqtBaB4Y6lXIL5F4iSj7Jylg
+/9+PjDvJSZu1pJTOAeo+tWN7fyb9Gd3AIb2E0S1PRsNO3Ng3OTsor8udGuorryGl
+wSMiuLgbWhOHV4PR8CDn6E8jQrAApX2J6elhc5SYcSa8LWrg903w8bYqODGBDSnh
+AMFRD0xS+ARaqn1y07iHKrtjEAMqs6FPDVpeRrc9DvV07Jmf+T0kgYim3WBU6JU2
+PcYJk5qjEoAAVZkZR73QpXzDuvsf9/UP+Ky5tfQ3mBMY3oVbtwyCO4dvlTlYMNpu
+AWgXIszACwIDAQABo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAfBgNVHSMEGDAWgBQHw1EwpKrpRa41JPr/JCwz0LGdjDAdBgNVHQ4EFgQUB8NR
+MKSq6UWuNST6/yQsM9CxnYwwDQYJKoZIhvcNAQEFBQADggEBAF8+hnZuuDU8TjYc
+HnmYv/3VEhF5Ug7uMYm83X/50cYVIeiKAVQNOvtUudZj1LGqlk2iQk3UUx+LEN5/
+Zb5gEydxiKRz44Rj0aRV4VCT5hsOedBnvEbIvz8XDZXmxpBp3ue0L96VfdASPz0+
+f00/FGj1EVDVwfSQpQgdMWD/YIwjVAqv/qFuxdF6Kmh4zx6CCiC0H63lhbJqaHVO
+rSU3lIW+vaHU6rcMSzyd6BIA8F+sDeGscGNz9395nzIlQnQFgCi/vcEkllgVsRch
+6YlL2weIZ/QVrXA+L02FO8K32/6YaCOJ4XQP3vTFhGMpG8zLB8kApKnXwiJPZ9d3
+7CAFYd4=
+-----END CERTIFICATE-----
+
GeoTrust Global CA
==================
-
-----BEGIN CERTIFICATE-----
MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
@@ -994,7 +780,6 @@ hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
GeoTrust Global CA 2
====================
-
-----BEGIN CERTIFICATE-----
MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW
MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs
@@ -1019,7 +804,6 @@ I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz
GeoTrust Universal CA
=====================
-
-----BEGIN CERTIFICATE-----
MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
@@ -1054,7 +838,6 @@ bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
GeoTrust Universal CA 2
=======================
-
-----BEGIN CERTIFICATE-----
MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
@@ -1087,9 +870,37 @@ OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
-----END CERTIFICATE-----
+UTN-USER First-Network Applications
+===================================
+-----BEGIN CERTIFICATE-----
+MIIEZDCCA0ygAwIBAgIQRL4Mi1AAJLQR0zYwS8AzdzANBgkqhkiG9w0BAQUFADCB
+ozELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
+Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
+dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xKzApBgNVBAMTIlVUTi1VU0VSRmlyc3Qt
+TmV0d29yayBBcHBsaWNhdGlvbnMwHhcNOTkwNzA5MTg0ODM5WhcNMTkwNzA5MTg1
+NzQ5WjCBozELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0
+IExha2UgQ2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYD
+VQQLExhodHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xKzApBgNVBAMTIlVUTi1VU0VS
+Rmlyc3QtTmV0d29yayBBcHBsaWNhdGlvbnMwggEiMA0GCSqGSIb3DQEBAQUAA4IB
+DwAwggEKAoIBAQCz+5Gh5DZVhawGNFugmliy+LUPBXeDrjKxdpJo7CNKyXY/45y2
+N3kDuatpjQclthln5LAbGHNhSuh+zdMvZOOmfAz6F4CjDUeJT1FxL+78P/m4FoCH
+iZMlIJpDgmkkdihZNaEdwH+DBmQWICzTSaSFtMBhf1EI+GgVkYDLpdXuOzr0hARe
+YFmnjDRy7rh4xdE7EkpvfmUnuaRVxblvQ6TFHSyZwFKkeEwVs0CYCGtDxgGwenv1
+axwiP8vv/6jQOkt2FZ7S0cYu49tXGzKiuG/ohqY/cKvlcJKrRB5AUPuco2LkbG6g
+yN7igEL66S/ozjIEj3yNtxyjNTwV3Z7DrpelAgMBAAGjgZEwgY4wCwYDVR0PBAQD
+AgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFPqGydvguul49Uuo1hXf8NPh
+ahQ8ME8GA1UdHwRIMEYwRKBCoECGPmh0dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9V
+VE4tVVNFUkZpcnN0LU5ldHdvcmtBcHBsaWNhdGlvbnMuY3JsMA0GCSqGSIb3DQEB
+BQUAA4IBAQCk8yXM0dSRgyLQzDKrm5ZONJFUICU0YV8qAhXhi6r/fWRRzwr/vH3Y
+IWp4yy9Rb/hCHTO967V7lMPDqaAt39EpHx3+jz+7qEUqf9FuVSTiuwL7MT++6Lzs
+QCv4AdRWOOTKRIK1YSAhZ2X28AvnNPilwpyjXEAfhZOVBt5P1CeptqX8Fs1zMT+4
+ZSfP1FMa8Kxun08FDAOBp4QpxFq9ZFdyrTvPNximmMatBrTcCKME1SmklpoSZ0qM
+YEWd8SOasACcaLWYUNPvji6SZbFIPiG+FTAqDbUMo2s/rn9X9R+WfN9v3YIwLGUb
+QErNaLly7HF27FSOH4UMAWr6pjisH8SE
+-----END CERTIFICATE-----
+
America Online Root Certification Authority 1
=============================================
-
-----BEGIN CERTIFICATE-----
MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc
MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP
@@ -1115,7 +926,6 @@ sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7
America Online Root Certification Authority 2
=============================================
-
-----BEGIN CERTIFICATE-----
MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc
MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP
@@ -1150,9 +960,55 @@ Zvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuPcX/9XhmgD0uRuMRUvAaw
RY8mkaKO/qk=
-----END CERTIFICATE-----
+Visa eCommerce Root
+===================
+-----BEGIN CERTIFICATE-----
+MIIDojCCAoqgAwIBAgIQE4Y1TR0/BvLB+WUF1ZAcYjANBgkqhkiG9w0BAQUFADBr
+MQswCQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRl
+cm5hdGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNv
+bW1lcmNlIFJvb3QwHhcNMDIwNjI2MDIxODM2WhcNMjIwNjI0MDAxNjEyWjBrMQsw
+CQYDVQQGEwJVUzENMAsGA1UEChMEVklTQTEvMC0GA1UECxMmVmlzYSBJbnRlcm5h
+dGlvbmFsIFNlcnZpY2UgQXNzb2NpYXRpb24xHDAaBgNVBAMTE1Zpc2EgZUNvbW1l
+cmNlIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvV95WHm6h
+2mCxlCfLF9sHP4CFT8icttD0b0/Pmdjh28JIXDqsOTPHH2qLJj0rNfVIsZHBAk4E
+lpF7sDPwsRROEW+1QK8bRaVK7362rPKgH1g/EkZgPI2h4H3PVz4zHvtH8aoVlwdV
+ZqW1LS7YgFmypw23RuwhY/81q6UCzyr0TP579ZRdhE2o8mCP2w4lPJ9zcc+U30rq
+299yOIzzlr3xF7zSujtFWsan9sYXiwGd/BmoKoMWuDpI/k4+oKsGGelT84ATB+0t
+vz8KPFUgOSwsAGl0lUq8ILKpeeUYiZGo3BxN77t+Nwtd/jmliFKMAGzsGHxBvfaL
+dXe6YJ2E5/4tAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQVOIMPPyw/cDMezUb+B4wg4NfDtzANBgkqhkiG9w0BAQUF
+AAOCAQEAX/FBfXxcCLkr4NWSR/pnXKUTwwMhmytMiUbPWU3J/qVAtmPN3XEolWcR
+zCSs00Rsca4BIGsDoo8Ytyk6feUWYFN4PMCvFYP3j1IzJL1kk5fui/fbGKhtcbP3
+LBfQdCVp9/5rPJS+TUtBjE7ic9DjkCJzQ83z7+pzzkWKsKZJ/0x9nXGIxHYdkFsd
+7v3M9+79YKWxehZx0RbQfBI8bGmX265fOZpwLwU8GUYEmSA20GBuYQa7FkKMcPcw
+++DbZqMAAb3mLNqRX6BGi01qnD093QVG/na/oAo85ADmJ7f/hC3euiInlhBx6yLt
+398znM/jra6O1I7mT1GvFpLgXPYHDw==
+-----END CERTIFICATE-----
+
+Certum Root CA
+==============
+-----BEGIN CERTIFICATE-----
+MIIDDDCCAfSgAwIBAgIDAQAgMA0GCSqGSIb3DQEBBQUAMD4xCzAJBgNVBAYTAlBM
+MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD
+QTAeFw0wMjA2MTExMDQ2MzlaFw0yNzA2MTExMDQ2MzlaMD4xCzAJBgNVBAYTAlBM
+MRswGQYDVQQKExJVbml6ZXRvIFNwLiB6IG8uby4xEjAQBgNVBAMTCUNlcnR1bSBD
+QTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM6xwS7TT3zNJc4YPk/E
+jG+AanPIW1H4m9LcuwBcsaD8dQPugfCI7iNS6eYVM42sLQnFdvkrOYCJ5JdLkKWo
+ePhzQ3ukYbDYWMzhbGZ+nPMJXlVjhNWo7/OxLjBos8Q82KxujZlakE403Daaj4GI
+ULdtlkIJ89eVgw1BS7Bqa/j8D35in2fE7SZfECYPCE/wpFcozo+47UX2bu4lXapu
+Ob7kky/ZR6By6/qmW6/KUz/iDsaWVhFu9+lmqSbYf5VT7QqFiLpPKaVCjF62/IUg
+AKpoC6EahQGcxEZjgoi2IrHu/qpGWX7PNSzVttpd90gzFFS269lvzs2I1qsb2pY7
+HVkCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEA
+uI3O7+cUus/usESSbLQ5PqKEbq24IXfS1HeCh+YgQYHu4vgRt2PRFze+GXYkHAQa
+TOs9qmdvLdTN/mUxcMUbpgIKumB7bVjCmkn+YzILa+M6wKyrO7Do0wlRjBCDxjTg
+xSvgGrZgFCdsMneMvLJymM/NzD+5yCRCFNZX/OYmQ6kd5YCQzgNUKD73P9P4Te1q
+CjqTE5s7FCMTY5w/0YcneeVMUeMBrYVdGjux1XMQpNPyvG5k9VpWkKjHDkx0Dy5x
+O/fIR/RpbxXyEV6DHpx8Uq79AtoSqFlnGNu8cN2bsWntgM6JQEhqDjXKKWYVIZQs
+6GAqm4VKQPNriiTsBhYscw==
+-----END CERTIFICATE-----
+
Comodo AAA Services root
========================
-
-----BEGIN CERTIFICATE-----
MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
@@ -1181,7 +1037,6 @@ smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
Comodo Secure Services root
===========================
-
-----BEGIN CERTIFICATE-----
MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb
MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
@@ -1210,7 +1065,6 @@ izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk=
Comodo Trusted Services root
============================
-
-----BEGIN CERTIFICATE-----
MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb
MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
@@ -1237,9 +1091,275 @@ xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn
dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi
-----END CERTIFICATE-----
+QuoVadis Root CA
+================
+-----BEGIN CERTIFICATE-----
+MIIF0DCCBLigAwIBAgIEOrZQizANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDElMCMGA1UECxMcUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTEuMCwGA1UEAxMlUXVvVmFkaXMgUm9vdCBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wMTAzMTkxODMzMzNaFw0yMTAzMTcxODMz
+MzNaMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMSUw
+IwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYDVQQDEyVR
+dW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv2G1lVO6V/z68mcLOhrfEYBklbTRvM16z/Yp
+li4kVEAkOPcahdxYTMukJ0KX0J+DisPkBgNbAKVRHnAEdOLB1Dqr1607BxgFjv2D
+rOpm2RgbaIr1VxqYuvXtdj182d6UajtLF8HVj71lODqV0D1VNk7feVcxKh7YWWVJ
+WCCYfqtffp/p1k3sg3Spx2zY7ilKhSoGFPlU5tPaZQeLYzcS19Dsw3sgQUSj7cug
+F+FxZc4dZjH3dgEZyH0DWLaVSR2mEiboxgx24ONmy+pdpibu5cxfvWenAScOospU
+xbF6lR1xHkopigPcakXBpBlebzbNw6Kwt/5cOOJSvPhEQ+aQuwIDAQABo4ICUjCC
+Ak4wPQYIKwYBBQUHAQEEMTAvMC0GCCsGAQUFBzABhiFodHRwczovL29jc3AucXVv
+dmFkaXNvZmZzaG9yZS5jb20wDwYDVR0TAQH/BAUwAwEB/zCCARoGA1UdIASCAREw
+ggENMIIBCQYJKwYBBAG+WAABMIH7MIHUBggrBgEFBQcCAjCBxxqBxFJlbGlhbmNl
+IG9uIHRoZSBRdW9WYWRpcyBSb290IENlcnRpZmljYXRlIGJ5IGFueSBwYXJ0eSBh
+c3N1bWVzIGFjY2VwdGFuY2Ugb2YgdGhlIHRoZW4gYXBwbGljYWJsZSBzdGFuZGFy
+ZCB0ZXJtcyBhbmQgY29uZGl0aW9ucyBvZiB1c2UsIGNlcnRpZmljYXRpb24gcHJh
+Y3RpY2VzLCBhbmQgdGhlIFF1b1ZhZGlzIENlcnRpZmljYXRlIFBvbGljeS4wIgYI
+KwYBBQUHAgEWFmh0dHA6Ly93d3cucXVvdmFkaXMuYm0wHQYDVR0OBBYEFItLbe3T
+KbkGGew5Oanwl4Rqy+/fMIGuBgNVHSMEgaYwgaOAFItLbe3TKbkGGew5Oanwl4Rq
+y+/foYGEpIGBMH8xCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1p
+dGVkMSUwIwYDVQQLExxSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MS4wLAYD
+VQQDEyVRdW9WYWRpcyBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggQ6tlCL
+MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOCAQEAitQUtf70mpKnGdSk
+fnIYj9lofFIk3WdvOXrEql494liwTXCYhGHoG+NpGA7O+0dQoE7/8CQfvbLO9Sf8
+7C9TqnN7Az10buYWnuulLsS/VidQK2K6vkscPFVcQR0kvoIgR13VRH56FmjffU1R
+cHhXHTMe/QKZnAzNCgVPx7uOpHX6Sm2xgI4JVrmcGmD+XcHXetwReNDWXcG31a0y
+mQM6isxUJTkxgXsTIlG6Rmyhu576BGxJJnSP0nPrzDCi5upZIof4l/UO/erMkqQW
+xFIY6iHOsfHmhIHluqmGKPJDWl0Snawe2ajlCmqnf6CHKc/yiU3U7MXi5nrQNiOK
+SnQ2+Q==
+-----END CERTIFICATE-----
+
+QuoVadis Root CA 2
+==================
+-----BEGIN CERTIFICATE-----
+MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
+GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
+Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
+WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
+rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
+ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
+Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
+PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
+/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
+oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
+yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
+EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
+A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
+MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
+ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
+BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
+g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
+fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
+WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
+B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
+hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
+TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
+mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
+ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
+4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
+8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
+-----END CERTIFICATE-----
+
+QuoVadis Root CA 3
+==================
+-----BEGIN CERTIFICATE-----
+MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
+V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
+4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
+H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
+8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
+vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
+mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
+btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
+T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
+WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
+c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
+4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
+VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
+CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
+aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
+aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
+dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
+czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
+A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
+Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
+7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
+d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
+4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
+t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
+DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
+k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
+zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
+Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
+mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
+4SVhM7JZG+Ju1zdXtg2pEto=
+-----END CERTIFICATE-----
+
+Security Communication Root CA
+==============================
+-----BEGIN CERTIFICATE-----
+MIIDWjCCAkKgAwIBAgIBADANBgkqhkiG9w0BAQUFADBQMQswCQYDVQQGEwJKUDEY
+MBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYDVQQLEx5TZWN1cml0eSBDb21t
+dW5pY2F0aW9uIFJvb3RDQTEwHhcNMDMwOTMwMDQyMDQ5WhcNMjMwOTMwMDQyMDQ5
+WjBQMQswCQYDVQQGEwJKUDEYMBYGA1UEChMPU0VDT00gVHJ1c3QubmV0MScwJQYD
+VQQLEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQCzs/5/022x7xZ8V6UMbXaKL0u/ZPtM7orw8yl8
+9f/uKuDp6bpbZCKamm8sOiZpUQWZJtzVHGpxxpp9Hp3dfGzGjGdnSj74cbAZJ6kJ
+DKaVv0uMDPpVmDvY6CKhS3E4eayXkmmziX7qIWgGmBSWh9JhNrxtJ1aeV+7AwFb9
+Ms+k2Y7CI9eNqPPYJayX5HA49LY6tJ07lyZDo6G8SVlyTCMwhwFY9k6+HGhWZq/N
+QV3Is00qVUarH9oe4kA92819uZKAnDfdDJZkndwi92SL32HeFZRSFaB9UslLqCHJ
+xrHty8OVYNEP8Ktw+N/LTX7s1vqr2b1/VPKl6Xn62dZ2JChzAgMBAAGjPzA9MB0G
+A1UdDgQWBBSgc0mZaNyFW2XjmygvV5+9M7wHSDALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAaECpqLvkT115swW1F7NgE+vG
+kl3g0dNq/vu+m22/xwVtWSDEHPC32oRYAmP6SBbvT6UL90qY8j+eG61Ha2POCEfr
+Uj94nK9NrvjVT8+amCoQQTlSxN3Zmw7vkwGusi7KaEIkQmywszo+zenaSMQVy+n5
+Bw+SUEmK3TGXX8npN6o7WWWXlDLJs58+OmJYxUmtYg5xpTKqL8aJdkNAExNnPaJU
+JRDL8Try2frbSVa7pv6nQTXD4IhhyYjH3zYQIphZ6rBK+1YWc26sTfcioU+tHXot
+RSflMMFe8toTyyVCUZVHA4xsIcx0Qu1T/zOLjw9XARYvz6buyXAiFL39vmwLAw==
+-----END CERTIFICATE-----
+
+Sonera Class 1 Root CA
+======================
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAgigAwIBAgIBJDANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP
+MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MxIENBMB4XDTAx
+MDQwNjEwNDkxM1oXDTIxMDQwNjEwNDkxM1owOTELMAkGA1UEBhMCRkkxDzANBgNV
+BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMSBDQTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBALWJHytPZwp5/8Ue+H887dF+2rDNbS82rDTG
+29lkFwhjMDMiikzujrsPDUJVyZ0upe/3p4zDq7mXy47vPxVnqIJyY1MPQYx9EJUk
+oVqlBvqSV536pQHydekfvFYmUk54GWVYVQNYwBSujHxVX3BbdyMGNpfzJLWaRpXk
+3w0LBUXl0fIdgrvGE+D+qnr9aTCU89JFhfzyMlsy3uhsXR/LpCJ0sICOXZT3BgBL
+qdReLjVQCfOAl/QMF6452F/NM8EcyonCIvdFEu1eEpOdY6uCLrnrQkFEy0oaAIIN
+nvmLVz5MxxftLItyM19yejhW1ebZrgUaHXVFsculJRwSVzb9IjcCAwEAAaMzMDEw
+DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQIR+IMi/ZTiFIwCwYDVR0PBAQDAgEG
+MA0GCSqGSIb3DQEBBQUAA4IBAQCLGrLJXWG04bkruVPRsoWdd44W7hE928Jj2VuX
+ZfsSZ9gqXLar5V7DtxYvyOirHYr9qxp81V9jz9yw3Xe5qObSIjiHBxTZ/75Wtf0H
+DjxVyhbMp6Z3N/vbXB9OWQaHowND9Rart4S9Tu+fMTfwRvFAttEMpWT4Y14h21VO
+TzF2nBBhjrZTOqMRvq9tfB69ri3iDGnHhVNoomG6xT60eVR4ngrHAr5i0RGCS2Uv
+kVrCqIexVmiUefkl98HVrhq4uz2PqYo4Ffdz0Fpg0YCw8NzVUM1O7pJIae2yIx4w
+zMiUyLb1O4Z/P6Yun/Y+LLWSlj7fLJOK/4GMDw9ZIRlXvVWa
+-----END CERTIFICATE-----
+
+Sonera Class 2 Root CA
+======================
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAgigAwIBAgIBHTANBgkqhkiG9w0BAQUFADA5MQswCQYDVQQGEwJGSTEP
+MA0GA1UEChMGU29uZXJhMRkwFwYDVQQDExBTb25lcmEgQ2xhc3MyIENBMB4XDTAx
+MDQwNjA3Mjk0MFoXDTIxMDQwNjA3Mjk0MFowOTELMAkGA1UEBhMCRkkxDzANBgNV
+BAoTBlNvbmVyYTEZMBcGA1UEAxMQU29uZXJhIENsYXNzMiBDQTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJAXSjWdyvANlsdE+hY3/Ei9vX+ALTU74W+o
+Z6m/AxxNjG8yR9VBaKQTBME1DJqEQ/xcHf+Js+gXGM2RX/uJ4+q/Tl18GybTdXnt
+5oTjV+WtKcT0OijnpXuENmmz/V52vaMtmdOQTiMofRhj8VQ7Jp12W5dCsv+u8E7s
+3TmVToMGf+dJQMjFAbJUWmYdPfz56TwKnoG4cPABi+QjVHzIrviQHgCWctRUz2Ej
+vOr7nQKV0ba5cTppCD8PtOFCx4j1P5iop7oc4HFx71hXgVB6XGt0Rg6DA5jDjqhu
+8nYybieDwnPz3BjotJPqdURrBGAgcVeHnfO+oJAjPYok4doh28MCAwEAAaMzMDEw
+DwYDVR0TAQH/BAUwAwEB/zARBgNVHQ4ECgQISqCqWITTXjwwCwYDVR0PBAQDAgEG
+MA0GCSqGSIb3DQEBBQUAA4IBAQBazof5FnIVV0sd2ZvnoiYw7JNn39Yt0jSv9zil
+zqsWuasvfDXLrNAPtEwr/IDva4yRXzZ299uzGxnq9LIR/WFxRL8oszodv7ND6J+/
+3DEIcbCdjdY0RzKQxmUk96BKfARzjzlvF4xytb1LyHr4e4PDKE6cCepnP7JnBBvD
+FNr450kkkdAdavphOe9r5yF1BgfYErQhIHBCcYHaPJo2vqZbDWpsmh+Re/n570K6
+Tk6ezAyNlNzZRZxe7EJQY670XcSxEtzKO6gunRRaBXW37Ndj4ro1tgQIkejanZz2
+ZrUYrAqmVCY0M9IbwdR/GjqOC6oybtv8TyWf2TLHllpwrN9M
+-----END CERTIFICATE-----
+
+Staat der Nederlanden Root CA
+=============================
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgIEAJiWijANBgkqhkiG9w0BAQUFADBVMQswCQYDVQQGEwJO
+TDEeMBwGA1UEChMVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSYwJAYDVQQDEx1TdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQTAeFw0wMjEyMTcwOTIzNDlaFw0xNTEy
+MTYwOTE1MzhaMFUxCzAJBgNVBAYTAk5MMR4wHAYDVQQKExVTdGFhdCBkZXIgTmVk
+ZXJsYW5kZW4xJjAkBgNVBAMTHVN0YWF0IGRlciBOZWRlcmxhbmRlbiBSb290IENB
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAmNK1URF6gaYUmHFtvszn
+ExvWJw56s2oYHLZhWtVhCb/ekBPHZ+7d89rFDBKeNVU+LCeIQGv33N0iYfXCxw71
+9tV2U02PjLwYdjeFnejKScfST5gTCaI+Ioicf9byEGW07l8Y1Rfj+MX94p2i71MO
+hXeiD+EwR+4A5zN9RGcaC1Hoi6CeUJhoNFIfLm0B8mBF8jHrqTFoKbt6QZ7GGX+U
+tFE5A3+y3qcym7RHjm+0Sq7lr7HcsBthvJly3uSJt3omXdozSVtSnA71iq3DuD3o
+BmrC1SoLbHuEvVYFy4ZlkuxEK7COudxwC0barbxjiDn622r+I/q85Ej0ZytqERAh
+SQIDAQABo4GRMIGOMAwGA1UdEwQFMAMBAf8wTwYDVR0gBEgwRjBEBgRVHSAAMDww
+OgYIKwYBBQUHAgEWLmh0dHA6Ly93d3cucGtpb3ZlcmhlaWQubmwvcG9saWNpZXMv
+cm9vdC1wb2xpY3kwDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSofeu8Y6R0E3QA
+7Jbg0zTBLL9s+DANBgkqhkiG9w0BAQUFAAOCAQEABYSHVXQ2YcG70dTGFagTtJ+k
+/rvuFbQvBgwp8qiSpGEN/KtcCFtREytNwiphyPgJWPwtArI5fZlmgb9uXJVFIGzm
+eafR2Bwp/MIgJ1HI8XxdNGdphREwxgDS1/PTfLbwMVcoEoJz6TMvplW0C5GUR5z6
+u3pCMuiufi3IvKwUv9kP2Vv8wfl6leF9fpb8cbDCTMjfRTTJzg3ynGQI0DvDKcWy
+7ZAEwbEpkcUwb8GpcjPM/l0WFywRaed+/sWDCN+83CI6LiBpIzlWYGeQiy52OfsR
+iJf2fL1LuCAWZwWN4jvBcj+UlTfHXbme2JOhF4//DGYVwSR8MnwDHTuhWEUykw==
+-----END CERTIFICATE-----
+
+TDC Internet Root CA
+====================
+-----BEGIN CERTIFICATE-----
+MIIEKzCCAxOgAwIBAgIEOsylTDANBgkqhkiG9w0BAQUFADBDMQswCQYDVQQGEwJE
+SzEVMBMGA1UEChMMVERDIEludGVybmV0MR0wGwYDVQQLExRUREMgSW50ZXJuZXQg
+Um9vdCBDQTAeFw0wMTA0MDUxNjMzMTdaFw0yMTA0MDUxNzAzMTdaMEMxCzAJBgNV
+BAYTAkRLMRUwEwYDVQQKEwxUREMgSW50ZXJuZXQxHTAbBgNVBAsTFFREQyBJbnRl
+cm5ldCBSb290IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxLhA
+vJHVYx/XmaCLDEAedLdInUaMArLgJF/wGROnN4NrXceO+YQwzho7+vvOi20jxsNu
+Zp+Jpd/gQlBn+h9sHvTQBda/ytZO5GhgbEaqHF1j4QeGDmUApy6mcca8uYGoOn0a
+0vnRrEvLznWv3Hv6gXPU/Lq9QYjUdLP5Xjg6PEOo0pVOd20TDJ2PeAG3WiAfAzc1
+4izbSysseLlJ28TQx5yc5IogCSEWVmb/Bexb4/DPqyQkXsN/cHoSxNK1EKC2IeGN
+eGlVRGn1ypYcNIUXJXfi9i8nmHj9eQY6otZaQ8H/7AQ77hPv01ha/5Lr7K7a8jcD
+R0G2l8ktCkEiu7vmpwIDAQABo4IBJTCCASEwEQYJYIZIAYb4QgEBBAQDAgAHMGUG
+A1UdHwReMFwwWqBYoFakVDBSMQswCQYDVQQGEwJESzEVMBMGA1UEChMMVERDIElu
+dGVybmV0MR0wGwYDVQQLExRUREMgSW50ZXJuZXQgUm9vdCBDQTENMAsGA1UEAxME
+Q1JMMTArBgNVHRAEJDAigA8yMDAxMDQwNTE2MzMxN1qBDzIwMjEwNDA1MTcwMzE3
+WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUbGQBx/2FbazI2p5QCIUItTxWqFAw
+HQYDVR0OBBYEFGxkAcf9hW2syNqeUAiFCLU8VqhQMAwGA1UdEwQFMAMBAf8wHQYJ
+KoZIhvZ9B0EABBAwDhsIVjUuMDo0LjADAgSQMA0GCSqGSIb3DQEBBQUAA4IBAQBO
+Q8zR3R0QGwZ/t6T609lN+yOfI1Rb5osvBCiLtSdtiaHsmGnc540mgwV5dOy0uaOX
+wTUA/RXaOYE6lTGQ3pfphqiZdwzlWqCE/xIWrG64jcN7ksKsLtB9KOy282A4aW8+
+2ARVPp7MVdK6/rtHBNcK2RYKNCn1WBPVT8+PVkuzHu7TmHnaCB4Mb7j4Fifvwm89
+9qNLPg7kbWzbO0ESm70NRyN/PErQr8Cv9u8btRXE64PECV90i9kR+8JWsTz4cMo0
+jUNAE4z9mQNUecYu6oah9jrUCbz0vGbMPVjQV0kK7iXiQe4T+Zs4NNEA9X7nlB38
+aQNiuJkFBT1reBK9sG9l
+-----END CERTIFICATE-----
+
+TDC OCES Root CA
+================
+-----BEGIN CERTIFICATE-----
+MIIFGTCCBAGgAwIBAgIEPki9xDANBgkqhkiG9w0BAQUFADAxMQswCQYDVQQGEwJE
+SzEMMAoGA1UEChMDVERDMRQwEgYDVQQDEwtUREMgT0NFUyBDQTAeFw0wMzAyMTEw
+ODM5MzBaFw0zNzAyMTEwOTA5MzBaMDExCzAJBgNVBAYTAkRLMQwwCgYDVQQKEwNU
+REMxFDASBgNVBAMTC1REQyBPQ0VTIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEArGL2YSCyz8DGhdfjeebM7fI5kqSXLmSjhFuHnEz9pPPEXyG9VhDr
+2y5h7JNp46PMvZnDBfwGuMo2HP6QjklMxFaaL1a8z3sM8W9Hpg1DTeLpHTk0zY0s
+2RKY+ePhwUp8hjjEqcRhiNJerxomTdXkoCJHhNlktxmW/OwZ5LKXJk5KTMuPJItU
+GBxIYXvViGjaXbXqzRowwYCDdlCqT9HU3Tjw7xb04QxQBr/q+3pJoSgrHPb8FTKj
+dGqPqcNiKXEx5TukYBdedObaE+3pHx8b0bJoc8YQNHVGEBDjkAB2QMuLt0MJIf+r
+TpPGWOmlgtt3xDqZsXKVSQTwtyv6e1mO3QIDAQABo4ICNzCCAjMwDwYDVR0TAQH/
+BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwgewGA1UdIASB5DCB4TCB3gYIKoFQgSkB
+AQEwgdEwLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuY2VydGlmaWthdC5kay9yZXBv
+c2l0b3J5MIGdBggrBgEFBQcCAjCBkDAKFgNUREMwAwIBARqBgUNlcnRpZmlrYXRl
+ciBmcmEgZGVubmUgQ0EgdWRzdGVkZXMgdW5kZXIgT0lEIDEuMi4yMDguMTY5LjEu
+MS4xLiBDZXJ0aWZpY2F0ZXMgZnJvbSB0aGlzIENBIGFyZSBpc3N1ZWQgdW5kZXIg
+T0lEIDEuMi4yMDguMTY5LjEuMS4xLjARBglghkgBhvhCAQEEBAMCAAcwgYEGA1Ud
+HwR6MHgwSKBGoESkQjBAMQswCQYDVQQGEwJESzEMMAoGA1UEChMDVERDMRQwEgYD
+VQQDEwtUREMgT0NFUyBDQTENMAsGA1UEAxMEQ1JMMTAsoCqgKIYmaHR0cDovL2Ny
+bC5vY2VzLmNlcnRpZmlrYXQuZGsvb2Nlcy5jcmwwKwYDVR0QBCQwIoAPMjAwMzAy
+MTEwODM5MzBagQ8yMDM3MDIxMTA5MDkzMFowHwYDVR0jBBgwFoAUYLWF7FZkfhIZ
+J2cdUBVLc647+RIwHQYDVR0OBBYEFGC1hexWZH4SGSdnHVAVS3OuO/kSMB0GCSqG
+SIb2fQdBAAQQMA4bCFY2LjA6NC4wAwIEkDANBgkqhkiG9w0BAQUFAAOCAQEACrom
+JkbTc6gJ82sLMJn9iuFXehHTuJTXCRBuo7E4A9G28kNBKWKnctj7fAXmMXAnVBhO
+inxO5dHKjHiIzxvTkIvmI/gLDjNDfZziChmPyQE+dF10yYscA+UYyAFMP8uXBV2Y
+caaYb7Z8vTd/vuGTJW1v8AqtFxjhA7wHKcitJuj4YfD9IQl+mo6paH1IYnK9AOoB
+mbgGglGBTvH1tJFUuSN6AJqfXY3gPGS5GhKSKseCRHI53OI8xthV9RVOyAUO28bQ
+YqbsFbS1AoLbrIyigfCbmTH1ICCoiGEKB5+U/NDXG8wuF/MEJ3Zn61SD/aSQfgY9
+BKNDLdr8C2LqL19iUw==
+-----END CERTIFICATE-----
+
UTN DATACorp SGC Root CA
========================
-
-----BEGIN CERTIFICATE-----
MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB
kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
@@ -1267,9 +1387,38 @@ KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv
mfnGV/TJVTl4uix5yaaIK/QI
-----END CERTIFICATE-----
+UTN USERFirst Email Root CA
+===========================
+-----BEGIN CERTIFICATE-----
+MIIEojCCA4qgAwIBAgIQRL4Mi1AAJLQR0zYlJWfJiTANBgkqhkiG9w0BAQUFADCB
+rjELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
+Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
+dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xNjA0BgNVBAMTLVVUTi1VU0VSRmlyc3Qt
+Q2xpZW50IEF1dGhlbnRpY2F0aW9uIGFuZCBFbWFpbDAeFw05OTA3MDkxNzI4NTBa
+Fw0xOTA3MDkxNzM2NThaMIGuMQswCQYDVQQGEwJVUzELMAkGA1UECBMCVVQxFzAV
+BgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5l
+dHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cudXNlcnRydXN0LmNvbTE2MDQGA1UE
+AxMtVVROLVVTRVJGaXJzdC1DbGllbnQgQXV0aGVudGljYXRpb24gYW5kIEVtYWls
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsjmFpPJ9q0E7YkY3rs3B
+YHW8OWX5ShpHornMSMxqmNVNNRm5pELlzkniii8efNIxB8dOtINknS4p1aJkxIW9
+hVE1eaROaJB7HHqkkqgX8pgV8pPMyaQylbsMTzC9mKALi+VuG6JG+ni8om+rWV6l
+L8/K2m2qL+usobNqqrcuZzWLeeEeaYji5kbNoKXqvgvOdjp6Dpvq/NonWz1zHyLm
+SGHGTPNpsaguG7bUMSAsvIKKjqQOpdeJQ/wWWq8dcdcRWdq6hw2v+vPhwvCkxWeM
+1tZUOt4KpLoDd7NlyP0e03RiqhjKaJMeoYV+9Udly/hNVyh00jT/MLbu9mIwFIws
+6wIDAQABo4G5MIG2MAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBSJgmd9xJ0mcABLtFBIfN49rgRufTBYBgNVHR8EUTBPME2gS6BJhkdodHRw
+Oi8vY3JsLnVzZXJ0cnVzdC5jb20vVVROLVVTRVJGaXJzdC1DbGllbnRBdXRoZW50
+aWNhdGlvbmFuZEVtYWlsLmNybDAdBgNVHSUEFjAUBggrBgEFBQcDAgYIKwYBBQUH
+AwQwDQYJKoZIhvcNAQEFBQADggEBALFtYV2mGn98q0rkMPxTbyUkxsrt4jFcKw7u
+7mFVbwQ+zznexRtJlOTrIEy05p5QLnLZjfWqo7NK2lYcYJeA3IKirUq9iiv/Cwm0
+xtcgBEXkzYABurorbs6q15L+5K/r9CYdFip/bDCVNy8zEqx/3cfREYxRmLLQo5HQ
+rfafnoOTHh1CuEava2bwm3/q4wMC5QJRwarVNZ1yQAOJujEdxRBoUp7fooXFXAim
+eOZTT7Hot9MUnpOmw2TjrH5xzbyf6QMbzPvprDHBr3wVdAKZw7JHpsIyYdfHb0gk
+USeh1YdV8nuPmD0Wnu51tvjQjvLzxq4oW6fw8zYX/MMF08oDSlQ=
+-----END CERTIFICATE-----
+
UTN USERFirst Hardware Root CA
==============================
-
-----BEGIN CERTIFICATE-----
MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB
lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
@@ -1297,9 +1446,249 @@ CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t
KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA==
-----END CERTIFICATE-----
+UTN USERFirst Object Root CA
+============================
+-----BEGIN CERTIFICATE-----
+MIIEZjCCA06gAwIBAgIQRL4Mi1AAJLQR0zYt4LNfGzANBgkqhkiG9w0BAQUFADCB
+lTELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
+Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
+dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHTAbBgNVBAMTFFVUTi1VU0VSRmlyc3Qt
+T2JqZWN0MB4XDTk5MDcwOTE4MzEyMFoXDTE5MDcwOTE4NDAzNlowgZUxCzAJBgNV
+BAYTAlVTMQswCQYDVQQIEwJVVDEXMBUGA1UEBxMOU2FsdCBMYWtlIENpdHkxHjAc
+BgNVBAoTFVRoZSBVU0VSVFJVU1QgTmV0d29yazEhMB8GA1UECxMYaHR0cDovL3d3
+dy51c2VydHJ1c3QuY29tMR0wGwYDVQQDExRVVE4tVVNFUkZpcnN0LU9iamVjdDCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAM6qgT+jo2F4qjEAVZURnicP
+HxzfOpuCaDDASmEd8S8O+r5596Uj71VRloTN2+O5bj4x2AogZ8f02b+U60cEPgLO
+KqJdhwQJ9jCdGIqXsqoc/EHSoTbL+z2RuufZcDX65OeQw5ujm9M89RKZd7G3CeBo
+5hy485RjiGpq/gt2yb70IuRnuasaXnfBhQfdDWy/7gbHd2pBnqcP1/vulBe3/IW+
+pKvEHDHd17bR5PDv3xaPslKT16HUiaEHLr/hARJCHhrh2JU022R5KP+6LhHC5ehb
+kkj7RwvCbNqtMoNB86XlQXD9ZZBt+vpRxPm9lisZBCzTbafc8H9vg2XiaquHhnUC
+AwEAAaOBrzCBrDALBgNVHQ8EBAMCAcYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E
+FgQU2u1kdBScFDyr3ZmpvVsoTYs8ydgwQgYDVR0fBDswOTA3oDWgM4YxaHR0cDov
+L2NybC51c2VydHJ1c3QuY29tL1VUTi1VU0VSRmlyc3QtT2JqZWN0LmNybDApBgNV
+HSUEIjAgBggrBgEFBQcDAwYIKwYBBQUHAwgGCisGAQQBgjcKAwQwDQYJKoZIhvcN
+AQEFBQADggEBAAgfUrE3RHjb/c652pWWmKpVZIC1WkDdIaXFwfNfLEzIR1pp6ujw
+NTX00CXzyKakh0q9G7FzCL3Uw8q2NbtZhncxzaeAFK4T7/yxSPlrJSUtUbYsbUXB
+mMiKVl0+7kNOPmsnjtA6S4ULX9Ptaqd1y9Fahy85dRNacrACgZ++8A+EVCBibGnU
+4U3GDZlDAQ0Slox4nb9QorFEqmrPF3rPbw/U+CRVX/A0FklmPlBGyWNxODFiuGK5
+81OtbLUrohKqGU8J2l7nk8aOFAj+8DCAGKCGhU3IfdeLA/5u1fedFqySLKAj5ZyR
+Uh+U3xeUc8OzwcFxBSAAeL0TUh2oPs0AH8g=
+-----END CERTIFICATE-----
+
+Camerfirma Chambers of Commerce Root
+====================================
+-----BEGIN CERTIFICATE-----
+MIIEvTCCA6WgAwIBAgIBADANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJFVTEn
+MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL
+ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEiMCAGA1UEAxMZQ2hhbWJlcnMg
+b2YgQ29tbWVyY2UgUm9vdDAeFw0wMzA5MzAxNjEzNDNaFw0zNzA5MzAxNjEzNDRa
+MH8xCzAJBgNVBAYTAkVVMScwJQYDVQQKEx5BQyBDYW1lcmZpcm1hIFNBIENJRiBB
+ODI3NDMyODcxIzAhBgNVBAsTGmh0dHA6Ly93d3cuY2hhbWJlcnNpZ24ub3JnMSIw
+IAYDVQQDExlDaGFtYmVycyBvZiBDb21tZXJjZSBSb290MIIBIDANBgkqhkiG9w0B
+AQEFAAOCAQ0AMIIBCAKCAQEAtzZV5aVdGDDg2olUkfzIx1L4L1DZ77F1c2VHfRtb
+unXF/KGIJPov7coISjlUxFF6tdpg6jg8gbLL8bvZkSM/SAFwdakFKq0fcfPJVD0d
+BmpAPrMMhe5cG3nCYsS4No41XQEMIwRHNaqbYE6gZj3LJgqcQKH0XZi/caulAGgq
+7YN6D6IUtdQis4CwPAxaUWktWBiP7Zme8a7ileb2R6jWDA+wWFjbw2Y3npuRVDM3
+0pQcakjJyfKl2qUMI/cjDpwyVV5xnIQFUZot/eZOKjRa3spAN2cMVCFVd9oKDMyX
+roDclDZK9D7ONhMeU+SsTjoF7Nuucpw4i9A5O4kKPnf+dQIBA6OCAUQwggFAMBIG
+A1UdEwEB/wQIMAYBAf8CAQwwPAYDVR0fBDUwMzAxoC+gLYYraHR0cDovL2NybC5j
+aGFtYmVyc2lnbi5vcmcvY2hhbWJlcnNyb290LmNybDAdBgNVHQ4EFgQU45T1sU3p
+26EpW1eLTXYGduHRooowDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIA
+BzAnBgNVHREEIDAegRxjaGFtYmVyc3Jvb3RAY2hhbWJlcnNpZ24ub3JnMCcGA1Ud
+EgQgMB6BHGNoYW1iZXJzcm9vdEBjaGFtYmVyc2lnbi5vcmcwWAYDVR0gBFEwTzBN
+BgsrBgEEAYGHLgoDATA+MDwGCCsGAQUFBwIBFjBodHRwOi8vY3BzLmNoYW1iZXJz
+aWduLm9yZy9jcHMvY2hhbWJlcnNyb290Lmh0bWwwDQYJKoZIhvcNAQEFBQADggEB
+AAxBl8IahsAifJ/7kPMa0QOx7xP5IV8EnNrJpY0nbJaHkb5BkAFyk+cefV/2icZd
+p0AJPaxJRUXcLo0waLIJuvvDL8y6C98/d3tGfToSJI6WjzwFCm/SlCgdbQzALogi
+1djPHRPH8EjX1wWnz8dHnjs8NMiAT9QUu/wNUPf6s+xCX6ndbcj0dc97wXImsQEc
+XCz9ek60AcUFV7nnPKoF2YjpB0ZBzu9Bga5Y34OirsrXdx/nADydb47kMgkdTXg0
+eDQ8lJsm7U9xxhl6vSAiSFr+S30Dt+dYvsYyTnQeaN2oaFuzPu5ifdmA6Ap1erfu
+tGWaIZDgqtCYvDi1czyL+Nw=
+-----END CERTIFICATE-----
+
+Camerfirma Global Chambersign Root
+==================================
+-----BEGIN CERTIFICATE-----
+MIIExTCCA62gAwIBAgIBADANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJFVTEn
+MCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgyNzQzMjg3MSMwIQYDVQQL
+ExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4GA1UEAxMXR2xvYmFsIENo
+YW1iZXJzaWduIFJvb3QwHhcNMDMwOTMwMTYxNDE4WhcNMzcwOTMwMTYxNDE4WjB9
+MQswCQYDVQQGEwJFVTEnMCUGA1UEChMeQUMgQ2FtZXJmaXJtYSBTQSBDSUYgQTgy
+NzQzMjg3MSMwIQYDVQQLExpodHRwOi8vd3d3LmNoYW1iZXJzaWduLm9yZzEgMB4G
+A1UEAxMXR2xvYmFsIENoYW1iZXJzaWduIFJvb3QwggEgMA0GCSqGSIb3DQEBAQUA
+A4IBDQAwggEIAoIBAQCicKLQn0KuWxfH2H3PFIP8T8mhtxOviteePgQKkotgVvq0
+Mi+ITaFgCPS3CU6gSS9J1tPfnZdan5QEcOw/Wdm3zGaLmFIoCQLfxS+EjXqXd7/s
+QJ0lcqu1PzKY+7e3/HKE5TWH+VX6ox8Oby4o3Wmg2UIQxvi1RMLQQ3/bvOSiPGpV
+eAp3qdjqGTK3L/5cPxvusZjsyq16aUXjlg9V9ubtdepl6DJWk0aJqCWKZQbua795
+B9Dxt6/tLE2Su8CoX6dnfQTyFQhwrJLWfQTSM/tMtgsL+xrJxI0DqX5c8lCrEqWh
+z0hQpe/SyBoT+rB/sYIcd2oPX9wLlY/vQ37mRQklAgEDo4IBUDCCAUwwEgYDVR0T
+AQH/BAgwBgEB/wIBDDA/BgNVHR8EODA2MDSgMqAwhi5odHRwOi8vY3JsLmNoYW1i
+ZXJzaWduLm9yZy9jaGFtYmVyc2lnbnJvb3QuY3JsMB0GA1UdDgQWBBRDnDafsJ4w
+TcbOX60Qq+UDpfqpFDAOBgNVHQ8BAf8EBAMCAQYwEQYJYIZIAYb4QgEBBAQDAgAH
+MCoGA1UdEQQjMCGBH2NoYW1iZXJzaWducm9vdEBjaGFtYmVyc2lnbi5vcmcwKgYD
+VR0SBCMwIYEfY2hhbWJlcnNpZ25yb290QGNoYW1iZXJzaWduLm9yZzBbBgNVHSAE
+VDBSMFAGCysGAQQBgYcuCgEBMEEwPwYIKwYBBQUHAgEWM2h0dHA6Ly9jcHMuY2hh
+bWJlcnNpZ24ub3JnL2Nwcy9jaGFtYmVyc2lnbnJvb3QuaHRtbDANBgkqhkiG9w0B
+AQUFAAOCAQEAPDtwkfkEVCeR4e3t/mh/YV3lQWVPMvEYBZRqHN4fcNs+ezICNLUM
+bKGKfKX0j//U2K0X1S0E0T9YgOKBWYi+wONGkyT+kL0mojAt6JcmVzWJdJYY9hXi
+ryQZVgICsroPFOrGimbBhkVVi76SvpykBMdJPJ7oKXqJ1/6v/2j1pReQvayZzKWG
+VwlnRtvWFsJG8eSpUPWP0ZIV018+xgBJOm5YstHRJw0lyDL4IBHNfTIzSJRUTN3c
+ecQwn+uOuFW114hcxWokPbLTBQNRxgfvzBRydD1ucs4YKIxKoHflCStFREest2d/
+AYoFWpO+ocH/+OcOZ6RHSXZddZAa9SaP8A==
+-----END CERTIFICATE-----
+
+NetLock Qualified (Class QA) Root
+=================================
+-----BEGIN CERTIFICATE-----
+MIIG0TCCBbmgAwIBAgIBezANBgkqhkiG9w0BAQUFADCByTELMAkGA1UEBhMCSFUx
+ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0
+b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMUIwQAYDVQQD
+EzlOZXRMb2NrIE1pbm9zaXRldHQgS296amVneXpvaSAoQ2xhc3MgUUEpIFRhbnVz
+aXR2YW55a2lhZG8xHjAcBgkqhkiG9w0BCQEWD2luZm9AbmV0bG9jay5odTAeFw0w
+MzAzMzAwMTQ3MTFaFw0yMjEyMTUwMTQ3MTFaMIHJMQswCQYDVQQGEwJIVTERMA8G
+A1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRvbnNh
+Z2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxQjBABgNVBAMTOU5l
+dExvY2sgTWlub3NpdGV0dCBLb3pqZWd5em9pIChDbGFzcyBRQSkgVGFudXNpdHZh
+bnlraWFkbzEeMBwGCSqGSIb3DQEJARYPaW5mb0BuZXRsb2NrLmh1MIIBIjANBgkq
+hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAx1Ilstg91IRVCacbvWy5FPSKAtt2/Goq
+eKvld/Bu4IwjZ9ulZJm53QE+b+8tmjwi8F3JV6BVQX/yQ15YglMxZc4e8ia6AFQe
+r7C8HORSjKAyr7c3sVNnaHRnUPYtLmTeriZ539+Zhqurf4XsoPuAzPS4DB6TRWO5
+3Lhbm+1bOdRfYrCnjnxmOCyqsQhjF2d9zL2z8cM/z1A57dEZgxXbhxInlrfa6uWd
+vLrqOU+L73Sa58XQ0uqGURzk/mQIKAR5BevKxXEOC++r6uwSEaEYBTJp0QwsGj0l
+mT+1fMptsK6ZmfoIYOcZwvK9UdPM0wKswREMgM6r3JSda6M5UzrWhQIDAMV9o4IC
+wDCCArwwEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNVHQ8BAf8EBAMCAQYwggJ1Bglg
+hkgBhvhCAQ0EggJmFoICYkZJR1lFTEVNISBFemVuIHRhbnVzaXR2YW55IGEgTmV0
+TG9jayBLZnQuIE1pbm9zaXRldHQgU3pvbGdhbHRhdGFzaSBTemFiYWx5emF0YWJh
+biBsZWlydCBlbGphcmFzb2sgYWxhcGphbiBrZXN6dWx0LiBBIG1pbm9zaXRldHQg
+ZWxla3Ryb25pa3VzIGFsYWlyYXMgam9naGF0YXMgZXJ2ZW55ZXN1bGVzZW5laywg
+dmFsYW1pbnQgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYSBNaW5vc2l0ZXR0IFN6
+b2xnYWx0YXRhc2kgU3phYmFseXphdGJhbiwgYXogQWx0YWxhbm9zIFN6ZXJ6b2Rl
+c2kgRmVsdGV0ZWxla2JlbiBlbG9pcnQgZWxsZW5vcnplc2kgZWxqYXJhcyBtZWd0
+ZXRlbGUuIEEgZG9rdW1lbnR1bW9rIG1lZ3RhbGFsaGF0b2sgYSBodHRwczovL3d3
+dy5uZXRsb2NrLmh1L2RvY3MvIGNpbWVuIHZhZ3kga2VyaGV0b2sgYXogaW5mb0Bu
+ZXRsb2NrLm5ldCBlLW1haWwgY2ltZW4uIFdBUk5JTkchIFRoZSBpc3N1YW5jZSBh
+bmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmljYXRlIGFyZSBzdWJqZWN0IHRvIHRo
+ZSBOZXRMb2NrIFF1YWxpZmllZCBDUFMgYXZhaWxhYmxlIGF0IGh0dHBzOi8vd3d3
+Lm5ldGxvY2suaHUvZG9jcy8gb3IgYnkgZS1tYWlsIGF0IGluZm9AbmV0bG9jay5u
+ZXQwHQYDVR0OBBYEFAlqYhaSsFq7VQ7LdTI6MuWyIckoMA0GCSqGSIb3DQEBBQUA
+A4IBAQCRalCc23iBmz+LQuM7/KbD7kPgz/PigDVJRXYC4uMvBcXxKufAQTPGtpvQ
+MznNwNuhrWw3AkxYQTvyl5LGSKjN5Yo5iWH5Upfpvfb5lHTocQ68d4bDBsxafEp+
+NFAwLvt/MpqNPfMgW/hqyobzMUwsWYACff44yTB1HLdV47yfuqhthCgFdbOLDcCR
+VCHnpgu0mfVRQdzNo0ci2ccBgcTcR08m6h/t280NmPSjnLRzMkqWmf68f8glWPhY
+83ZmiVSkpj7EUFy6iRiCdUgh0k8T6GB+B3bbELVR5qq5aKrN9p2QdRLqOBrKROi3
+macqaJVmlaut74nLYKkGEsaUR+ko
+-----END CERTIFICATE-----
+
+NetLock Notary (Class A) Root
+=============================
+-----BEGIN CERTIFICATE-----
+MIIGfTCCBWWgAwIBAgICAQMwDQYJKoZIhvcNAQEEBQAwga8xCzAJBgNVBAYTAkhV
+MRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMe
+TmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0
+dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBLb3pqZWd5em9pIChDbGFzcyBB
+KSBUYW51c2l0dmFueWtpYWRvMB4XDTk5MDIyNDIzMTQ0N1oXDTE5MDIxOTIzMTQ0
+N1owga8xCzAJBgNVBAYTAkhVMRAwDgYDVQQIEwdIdW5nYXJ5MREwDwYDVQQHEwhC
+dWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9uc2FnaSBLZnQu
+MRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE2MDQGA1UEAxMtTmV0TG9jayBL
+b3pqZWd5em9pIChDbGFzcyBBKSBUYW51c2l0dmFueWtpYWRvMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAvHSMD7tM9DceqQWC2ObhbHDqeLVu0ThEDaiD
+zl3S1tWBxdRL51uUcCbbO51qTGL3cfNk1mE7PetzozfZz+qMkjvN9wfcZnSX9EUi
+3fRc4L9t875lM+QVOr/bmJBVOMTtplVjC7B4BPTjbsE/jvxReB+SnoPC/tmwqcm8
+WgD/qaiYdPv2LD4VOQ22BFWoDpggQrOxJa1+mm9dU7GrDPzr4PN6s6iz/0b2Y6LY
+Oph7tqyF/7AlT3Rj5xMHpQqPBffAZG9+pyeAlt7ULoZgx2srXnN7F+eRP2QM2Esi
+NCubMvJIH5+hCoR64sKtlz2O1cH5VqNQ6ca0+pii7pXmKgOM3wIDAQABo4ICnzCC
+ApswDgYDVR0PAQH/BAQDAgAGMBIGA1UdEwEB/wQIMAYBAf8CAQQwEQYJYIZIAYb4
+QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1GSUdZRUxFTSEgRXplbiB0
+YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFub3MgU3pvbGdhbHRhdGFz
+aSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBhbGFwamFuIGtlc3p1bHQu
+IEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExvY2sgS2Z0LiB0ZXJtZWtm
+ZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGlnaXRhbGlzIGFsYWlyYXMg
+ZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0IGVsbGVub3J6ZXNpIGVs
+amFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJhc2EgbWVndGFsYWxoYXRv
+IGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGphbiBhIGh0dHBzOi8vd3d3
+Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJoZXRvIGF6IGVsbGVub3J6
+ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBPUlRBTlQhIFRoZSBpc3N1
+YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmljYXRlIGlzIHN1YmplY3Qg
+dG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBodHRwczovL3d3dy5uZXRs
+b2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNAbmV0bG9jay5uZXQuMA0G
+CSqGSIb3DQEBBAUAA4IBAQBIJEb3ulZv+sgoA0BO5TE5ayZrU3/b39/zcT0mwBQO
+xmd7I6gMc90Bu8bKbjc5VdXHjFYgDigKDtIqpLBJUsY4B/6+CgmM0ZjPytoUMaFP
+0jn8DxEsQ8Pdq5PHVT5HfBgaANzze9jyf1JsIPQLX2lS9O74silg6+NJMSEN1rUQ
+QeJBCWziGppWS3cC9qCbmieH6FUpccKQn0V4GuEVZD3QDtigdp+uxdAu6tYPVuxk
+f1qbFFgBJ34TUMdrKuZoPL9coAob4Q566eKAw+np9v1sEZ7Q5SgnK1QyQhSCdeZK
+8CtmdWOMovsEPoMOmzbwGOQmIMOM8CgHrTwXZoi1/baI
+-----END CERTIFICATE-----
+
+NetLock Business (Class B) Root
+===============================
+-----BEGIN CERTIFICATE-----
+MIIFSzCCBLSgAwIBAgIBaTANBgkqhkiG9w0BAQQFADCBmTELMAkGA1UEBhMCSFUx
+ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0
+b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTIwMAYDVQQD
+EylOZXRMb2NrIFV6bGV0aSAoQ2xhc3MgQikgVGFudXNpdHZhbnlraWFkbzAeFw05
+OTAyMjUxNDEwMjJaFw0xOTAyMjAxNDEwMjJaMIGZMQswCQYDVQQGEwJIVTERMA8G
+A1UEBxMIQnVkYXBlc3QxJzAlBgNVBAoTHk5ldExvY2sgSGFsb3phdGJpenRvbnNh
+Z2kgS2Z0LjEaMBgGA1UECxMRVGFudXNpdHZhbnlraWFkb2sxMjAwBgNVBAMTKU5l
+dExvY2sgVXpsZXRpIChDbGFzcyBCKSBUYW51c2l0dmFueWtpYWRvMIGfMA0GCSqG
+SIb3DQEBAQUAA4GNADCBiQKBgQCx6gTsIKAjwo84YM/HRrPVG/77uZmeBNwcf4xK
+gZjupNTKihe5In+DCnVMm8Bp2GQ5o+2So/1bXHQawEfKOml2mrriRBf8TKPV/riX
+iK+IA4kfpPIEPsgHC+b5sy96YhQJRhTKZPWLgLViqNhr1nGTLbO/CVRY7QbrqHvc
+Q7GhaQIDAQABo4ICnzCCApswEgYDVR0TAQH/BAgwBgEB/wIBBDAOBgNVHQ8BAf8E
+BAMCAAYwEQYJYIZIAYb4QgEBBAQDAgAHMIICYAYJYIZIAYb4QgENBIICURaCAk1G
+SUdZRUxFTSEgRXplbiB0YW51c2l0dmFueSBhIE5ldExvY2sgS2Z0LiBBbHRhbGFu
+b3MgU3pvbGdhbHRhdGFzaSBGZWx0ZXRlbGVpYmVuIGxlaXJ0IGVsamFyYXNvayBh
+bGFwamFuIGtlc3p1bHQuIEEgaGl0ZWxlc2l0ZXMgZm9seWFtYXRhdCBhIE5ldExv
+Y2sgS2Z0LiB0ZXJtZWtmZWxlbG9zc2VnLWJpenRvc2l0YXNhIHZlZGkuIEEgZGln
+aXRhbGlzIGFsYWlyYXMgZWxmb2dhZGFzYW5hayBmZWx0ZXRlbGUgYXogZWxvaXJ0
+IGVsbGVub3J6ZXNpIGVsamFyYXMgbWVndGV0ZWxlLiBBeiBlbGphcmFzIGxlaXJh
+c2EgbWVndGFsYWxoYXRvIGEgTmV0TG9jayBLZnQuIEludGVybmV0IGhvbmxhcGph
+biBhIGh0dHBzOi8vd3d3Lm5ldGxvY2submV0L2RvY3MgY2ltZW4gdmFneSBrZXJo
+ZXRvIGF6IGVsbGVub3J6ZXNAbmV0bG9jay5uZXQgZS1tYWlsIGNpbWVuLiBJTVBP
+UlRBTlQhIFRoZSBpc3N1YW5jZSBhbmQgdGhlIHVzZSBvZiB0aGlzIGNlcnRpZmlj
+YXRlIGlzIHN1YmplY3QgdG8gdGhlIE5ldExvY2sgQ1BTIGF2YWlsYWJsZSBhdCBo
+dHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIG9yIGJ5IGUtbWFpbCBhdCBjcHNA
+bmV0bG9jay5uZXQuMA0GCSqGSIb3DQEBBAUAA4GBAATbrowXr/gOkDFOzT4JwG06
+sPgzTEdM43WIEJessDgVkcYplswhwG08pXTP2IKlOcNl40JwuyKQ433bNXbhoLXa
+n3BukxowOR0w2y7jfLKRstE3Kfq51hdcR0/jHTjrn9V7lagonhVK0dHQKwCXoOKS
+NitjrFgBazMpUIaD8QFI
+-----END CERTIFICATE-----
+
+NetLock Express (Class C) Root
+==============================
+-----BEGIN CERTIFICATE-----
+MIIFTzCCBLigAwIBAgIBaDANBgkqhkiG9w0BAQQFADCBmzELMAkGA1UEBhMCSFUx
+ETAPBgNVBAcTCEJ1ZGFwZXN0MScwJQYDVQQKEx5OZXRMb2NrIEhhbG96YXRiaXp0
+b25zYWdpIEtmdC4xGjAYBgNVBAsTEVRhbnVzaXR2YW55a2lhZG9rMTQwMgYDVQQD
+EytOZXRMb2NrIEV4cHJlc3N6IChDbGFzcyBDKSBUYW51c2l0dmFueWtpYWRvMB4X
+DTk5MDIyNTE0MDgxMVoXDTE5MDIyMDE0MDgxMVowgZsxCzAJBgNVBAYTAkhVMREw
+DwYDVQQHEwhCdWRhcGVzdDEnMCUGA1UEChMeTmV0TG9jayBIYWxvemF0Yml6dG9u
+c2FnaSBLZnQuMRowGAYDVQQLExFUYW51c2l0dmFueWtpYWRvazE0MDIGA1UEAxMr
+TmV0TG9jayBFeHByZXNzeiAoQ2xhc3MgQykgVGFudXNpdHZhbnlraWFkbzCBnzAN
+BgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA6+ywbGGKIyWvYCDj2Z/8kwvbXY2wobNA
+OoLO/XXgeDIDhlqGlZHtU/qdQPzm6N3ZW3oDvV3zOwzDUXmbrVWg6dADEK8KuhRC
+2VImESLH0iDMgqSaqf64gXadarfSNnU+sYYJ9m5tfk63euyucYT2BDMIJTLrdKwW
+RMbkQJMdf60CAwEAAaOCAp8wggKbMBIGA1UdEwEB/wQIMAYBAf8CAQQwDgYDVR0P
+AQH/BAQDAgAGMBEGCWCGSAGG+EIBAQQEAwIABzCCAmAGCWCGSAGG+EIBDQSCAlEW
+ggJNRklHWUVMRU0hIEV6ZW4gdGFudXNpdHZhbnkgYSBOZXRMb2NrIEtmdC4gQWx0
+YWxhbm9zIFN6b2xnYWx0YXRhc2kgRmVsdGV0ZWxlaWJlbiBsZWlydCBlbGphcmFz
+b2sgYWxhcGphbiBrZXN6dWx0LiBBIGhpdGVsZXNpdGVzIGZvbHlhbWF0YXQgYSBO
+ZXRMb2NrIEtmdC4gdGVybWVrZmVsZWxvc3NlZy1iaXp0b3NpdGFzYSB2ZWRpLiBB
+IGRpZ2l0YWxpcyBhbGFpcmFzIGVsZm9nYWRhc2FuYWsgZmVsdGV0ZWxlIGF6IGVs
+b2lydCBlbGxlbm9yemVzaSBlbGphcmFzIG1lZ3RldGVsZS4gQXogZWxqYXJhcyBs
+ZWlyYXNhIG1lZ3RhbGFsaGF0byBhIE5ldExvY2sgS2Z0LiBJbnRlcm5ldCBob25s
+YXBqYW4gYSBodHRwczovL3d3dy5uZXRsb2NrLm5ldC9kb2NzIGNpbWVuIHZhZ3kg
+a2VyaGV0byBheiBlbGxlbm9yemVzQG5ldGxvY2submV0IGUtbWFpbCBjaW1lbi4g
+SU1QT1JUQU5UISBUaGUgaXNzdWFuY2UgYW5kIHRoZSB1c2Ugb2YgdGhpcyBjZXJ0
+aWZpY2F0ZSBpcyBzdWJqZWN0IHRvIHRoZSBOZXRMb2NrIENQUyBhdmFpbGFibGUg
+YXQgaHR0cHM6Ly93d3cubmV0bG9jay5uZXQvZG9jcyBvciBieSBlLW1haWwgYXQg
+Y3BzQG5ldGxvY2submV0LjANBgkqhkiG9w0BAQQFAAOBgQAQrX/XDDKACtiG8XmY
+ta3UzbM2xJZIwVzNmtkFLp++UOv0JhQQLdRmF/iewSf98e3ke0ugbLWrmldwpu2g
+pO0u9f38vf5NNwgMvOOWgyL1SRt/Syu0VMGAfJlOHdCM7tCs5ZL6dVb+ZKATj7i4
+Fp1hBWeAyNDYpQcCNJgEjTME1A==
+-----END CERTIFICATE-----
+
XRamp Global CA Root
====================
-
-----BEGIN CERTIFICATE-----
MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
@@ -1328,7 +1717,6 @@ O+7ETPTsJ3xCwnR8gooJybQDJbw=
Go Daddy Class 2 CA
===================
-
-----BEGIN CERTIFICATE-----
MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
@@ -1356,7 +1744,6 @@ ReYNnyicsbkqWletNw+vHX/bvZ8=
Starfield Class 2 CA
====================
-
-----BEGIN CERTIFICATE-----
MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
@@ -1384,7 +1771,6 @@ WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
StartCom Certification Authority
================================
-
-----BEGIN CERTIFICATE-----
MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW
MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
@@ -1430,9 +1816,135 @@ um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh
NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14=
-----END CERTIFICATE-----
+Taiwan GRCA
+===========
+-----BEGIN CERTIFICATE-----
+MIIFcjCCA1qgAwIBAgIQH51ZWtcvwgZEpYAIaeNe9jANBgkqhkiG9w0BAQUFADA/
+MQswCQYDVQQGEwJUVzEwMC4GA1UECgwnR292ZXJubWVudCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5MB4XDTAyMTIwNTEzMjMzM1oXDTMyMTIwNTEzMjMzM1ow
+PzELMAkGA1UEBhMCVFcxMDAuBgNVBAoMJ0dvdmVybm1lbnQgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+AJoluOzMonWoe/fOW1mKydGGEghU7Jzy50b2iPN86aXfTEc2pBsBHH8eV4qNw8XR
+IePaJD9IK/ufLqGU5ywck9G/GwGHU5nOp/UKIXZ3/6m3xnOUT0b3EEk3+qhZSV1q
+gQdW8or5BtD3cCJNtLdBuTK4sfCxw5w/cP1T3YGq2GN49thTbqGsaoQkclSGxtKy
+yhwOeYHWtXBiCAEuTk8O1RGvqa/lmr/czIdtJuTJV6L7lvnM4T9TjGxMfptTCAts
+F/tnyMKtsc2AtJfcdgEWFelq16TheEfOhtX7MfP6Mb40qij7cEwdScevLJ1tZqa2
+jWR+tSBqnTuBto9AAGdLiYa4zGX+FVPpBMHWXx1E1wovJ5pGfaENda1UhhXcSTvx
+ls4Pm6Dso3pdvtUqdULle96ltqqvKKyskKw4t9VoNSZ63Pc78/1Fm9G7Q3hub/FC
+VGqY8A2tl+lSXunVanLeavcbYBT0peS2cWeqH+riTcFCQP5nRhc4L0c/cZyu5SHK
+YS1tB6iEfC3uUSXxY5Ce/eFXiGvviiNtsea9P63RPZYLhY3Naye7twWb7LuRqQoH
+EgKXTiCQ8P8NHuJBO9NAOueNXdpm5AKwB1KYXA6OM5zCppX7VRluTI6uSw+9wThN
+Xo+EHWbNxWCWtFJaBYmOlXqYwZE8lSOyDvR5tMl8wUohAgMBAAGjajBoMB0GA1Ud
+DgQWBBTMzO/MKWCkO7GStjz6MmKPrCUVOzAMBgNVHRMEBTADAQH/MDkGBGcqBwAE
+MTAvMC0CAQAwCQYFKw4DAhoFADAHBgVnKgMAAAQUA5vwIhP/lSg209yewDL7MTqK
+UWUwDQYJKoZIhvcNAQEFBQADggIBAECASvomyc5eMN1PhnR2WPWus4MzeKR6dBcZ
+TulStbngCnRiqmjKeKBMmo4sIy7VahIkv9Ro04rQ2JyftB8M3jh+Vzj8jeJPXgyf
+qzvS/3WXy6TjZwj/5cAWtUgBfen5Cv8b5Wppv3ghqMKnI6mGq3ZW6A4M9hPdKmaK
+ZEk9GhiHkASfQlK3T8v+R0F2Ne//AHY2RTKbxkaFXeIksB7jSJaYV0eUVXoPQbFE
+JPPB/hprv4j9wabak2BegUqZIJxIZhm1AHlUD7gsL0u8qV1bYH+Mh6XgUmMqvtg7
+hUAV/h62ZT/FS9p+tXo1KaMuephgIqP0fSdOLeq0dDzpD6QzDxARvBMB1uUO07+1
+EqLhRSPAzAhuYbeJq4PjJB7mXQfnHyA+z2fI56wwbSdLaG5LKlwCCDTb+HbkZ6Mm
+nD+iMsJKxYEYMRBWqoTvLQr/uB930r+lWKBi5NdLkXWNiYCYfm3LU05er/ayl4WX
+udpVBrkk7tfGOB5jGxI7leFYrPLfhNVfmS8NVVvmONsuP3LpSIXLuykTjx44Vbnz
+ssQwmSNOXfJIoRIM3BKQCZBUkQM8R+XVyWXgt0t97EfTsws+rZ7QdAAO671RrcDe
+LMDDav7v3Aun+kbfYNucpllQdSNpc5Oy+fwC00fmcc4QAu4njIT/rEUNE1yDMuAl
+pYYsfPQS
+-----END CERTIFICATE-----
+
+Firmaprofesional Root CA
+========================
+-----BEGIN CERTIFICATE-----
+MIIEVzCCAz+gAwIBAgIBATANBgkqhkiG9w0BAQUFADCBnTELMAkGA1UEBhMCRVMx
+IjAgBgNVBAcTGUMvIE11bnRhbmVyIDI0NCBCYXJjZWxvbmExQjBABgNVBAMTOUF1
+dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1hcHJvZmVzaW9uYWwgQ0lGIEE2
+MjYzNDA2ODEmMCQGCSqGSIb3DQEJARYXY2FAZmlybWFwcm9mZXNpb25hbC5jb20w
+HhcNMDExMDI0MjIwMDAwWhcNMTMxMDI0MjIwMDAwWjCBnTELMAkGA1UEBhMCRVMx
+IjAgBgNVBAcTGUMvIE11bnRhbmVyIDI0NCBCYXJjZWxvbmExQjBABgNVBAMTOUF1
+dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1hcHJvZmVzaW9uYWwgQ0lGIEE2
+MjYzNDA2ODEmMCQGCSqGSIb3DQEJARYXY2FAZmlybWFwcm9mZXNpb25hbC5jb20w
+ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDnIwNvbyOlXnjOlSztlB5u
+Cp4Bx+ow0Syd3Tfom5h5VtP8c9/Qit5Vj1H5WuretXDE7aTt/6MNbg9kUDGvASdY
+rv5sp0ovFy3Tc9UTHI9ZpTQsHVQERc1ouKDAA6XPhUJHlShbz++AbOCQl4oBPB3z
+hxAwJkh91/zpnZFx/0GaqUC1N5wpIE8fUuOgfRNtVLcK3ulqTgesrBlf3H5idPay
+BQC6haD9HThuy1q7hryUZzM1gywfI834yJFxzJeL764P3CkDG8A563DtwW4O2GcL
+iam8NeTvtjS0pbbELaW+0MOUJEjb35bTALVmGotmBQ/dPz/LP6pemkr4tErvlTcb
+AgMBAAGjgZ8wgZwwKgYDVR0RBCMwIYYfaHR0cDovL3d3dy5maXJtYXByb2Zlc2lv
+bmFsLmNvbTASBgNVHRMBAf8ECDAGAQH/AgEBMCsGA1UdEAQkMCKADzIwMDExMDI0
+MjIwMDAwWoEPMjAxMzEwMjQyMjAwMDBaMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4E
+FgQUMwugZtHq2s7eYpMEKFK1FH84aLcwDQYJKoZIhvcNAQEFBQADggEBAEdz/o0n
+VPD11HecJ3lXV7cVVuzH2Fi3AQL0M+2TUIiefEaxvT8Ub/GzR0iLjJcG1+p+o1wq
+u00vR+L4OQbJnC4xGgN49Lw4xiKLMzHwFgQEffl25EvXwOaD7FnMP97/T2u3Z36m
+hoEyIwOdyPdfwUpgpZKpsaSgYMN4h7Mi8yrrW6ntBas3D7Hi05V2Y1Z0jFhyGzfl
+ZKG+TQyTmAyX9odtsz/ny4Cm7YjHX1BiAuiZdBbQ5rQ58SfLyEDW44YQqSMSkuBp
+QWOnryULwMWSyx6Yo1q6xTMPoJcB3X/ge9YGVM+h4k0460tQtcsm9MracEpqoeJ5
+quGnM/b9Sh/22WA=
+-----END CERTIFICATE-----
+
+Wells Fargo Root CA
+===================
+-----BEGIN CERTIFICATE-----
+MIID5TCCAs2gAwIBAgIEOeSXnjANBgkqhkiG9w0BAQUFADCBgjELMAkGA1UEBhMC
+VVMxFDASBgNVBAoTC1dlbGxzIEZhcmdvMSwwKgYDVQQLEyNXZWxscyBGYXJnbyBD
+ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEvMC0GA1UEAxMmV2VsbHMgRmFyZ28gUm9v
+dCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDAxMDExMTY0MTI4WhcNMjEwMTE0
+MTY0MTI4WjCBgjELMAkGA1UEBhMCVVMxFDASBgNVBAoTC1dlbGxzIEZhcmdvMSww
+KgYDVQQLEyNXZWxscyBGYXJnbyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEvMC0G
+A1UEAxMmV2VsbHMgRmFyZ28gUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDVqDM7Jvk0/82bfuUER84A4n13
+5zHCLielTWi5MbqNQ1mXx3Oqfz1cQJ4F5aHiidlMuD+b+Qy0yGIZLEWukR5zcUHE
+SxP9cMIlrCL1dQu3U+SlK93OvRw6esP3E48mVJwWa2uv+9iWsWCaSOAlIiR5NM4O
+JgALTqv9i86C1y8IcGjBqAr5dE8Hq6T54oN+J3N0Prj5OEL8pahbSCOz6+MlsoCu
+ltQKnMJ4msZoGK43YjdeUXWoWGPAUe5AeH6orxqg4bB4nVCMe+ez/I4jsNtlAHCE
+AQgAFG5Uhpq6zPk3EPbg3oQtnaSFN9OH4xXQwReQfhkhahKpdv0SAulPIV4XAgMB
+AAGjYTBfMA8GA1UdEwEB/wQFMAMBAf8wTAYDVR0gBEUwQzBBBgtghkgBhvt7hwcB
+CzAyMDAGCCsGAQUFBwIBFiRodHRwOi8vd3d3LndlbGxzZmFyZ28uY29tL2NlcnRw
+b2xpY3kwDQYJKoZIhvcNAQEFBQADggEBANIn3ZwKdyu7IvICtUpKkfnRLb7kuxpo
+7w6kAOnu5+/u9vnldKTC2FJYxHT7zmu1Oyl5GFrvm+0fazbuSCUlFLZWohDo7qd/
+0D+j0MNdJu4HzMPBJCGHHt8qElNvQRbn7a6U+oxy+hNH8Dx+rn0ROhPs7fpvcmR7
+nX1/Jv16+yWt6j4pf0zjAFcysLPp7VMX2YuyFA4w6OXVE8Zkr8QA1dhYJPz1j+zx
+x32l2w8n0cbyQIjmH/ZhqPRCyLk306m+LFZ4wnKbWV01QIroTmMatukgalHizqSQ
+33ZwmVxwQ023tqcZZE6St8WRPH9IFmV7Fv3L/PvZ1dZPIWU7Sn9Ho/s=
+-----END CERTIFICATE-----
+
+Swisscom Root CA 1
+==================
+-----BEGIN CERTIFICATE-----
+MIIF2TCCA8GgAwIBAgIQXAuFXAvnWUHfV8w/f52oNjANBgkqhkiG9w0BAQUFADBk
+MQswCQYDVQQGEwJjaDERMA8GA1UEChMIU3dpc3Njb20xJTAjBgNVBAsTHERpZ2l0
+YWwgQ2VydGlmaWNhdGUgU2VydmljZXMxGzAZBgNVBAMTElN3aXNzY29tIFJvb3Qg
+Q0EgMTAeFw0wNTA4MTgxMjA2MjBaFw0yNTA4MTgyMjA2MjBaMGQxCzAJBgNVBAYT
+AmNoMREwDwYDVQQKEwhTd2lzc2NvbTElMCMGA1UECxMcRGlnaXRhbCBDZXJ0aWZp
+Y2F0ZSBTZXJ2aWNlczEbMBkGA1UEAxMSU3dpc3Njb20gUm9vdCBDQSAxMIICIjAN
+BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA0LmwqAzZuz8h+BvVM5OAFmUgdbI9
+m2BtRsiMMW8Xw/qabFbtPMWRV8PNq5ZJkCoZSx6jbVfd8StiKHVFXqrWW/oLJdih
+FvkcxC7mlSpnzNApbjyFNDhhSbEAn9Y6cV9Nbc5fuankiX9qUvrKm/LcqfmdmUc/
+TilftKaNXXsLmREDA/7n29uj/x2lzZAeAR81sH8A25Bvxn570e56eqeqDFdvpG3F
+EzuwpdntMhy0XmeLVNxzh+XTF3xmUHJd1BpYwdnP2IkCb6dJtDZd0KTeByy2dbco
+kdaXvij1mB7qWybJvbCXc9qukSbraMH5ORXWZ0sKbU/Lz7DkQnGMU3nn7uHbHaBu
+HYwadzVcFh4rUx80i9Fs/PJnB3r1re3WmquhsUvhzDdf/X/NTa64H5xD+SpYVUNF
+vJbNcA78yeNmuk6NO4HLFWR7uZToXTNShXEuT46iBhFRyePLoW4xCGQMwtI89Tbo
+19AOeCMgkckkKmUpWyL3Ic6DXqTz3kvTaI9GdVyDCW4pa8RwjPWd1yAv/0bSKzjC
+L3UcPX7ape8eYIVpQtPM+GP+HkM5haa2Y0EQs3MevNP6yn0WR+Kn1dCjigoIlmJW
+bjTb2QK5MHXjBNLnj8KwEUAKrNVxAmKLMb7dxiNYMUJDLXT5xp6mig/p/r+D5kNX
+JLrvRjSq1xIBOO0CAwEAAaOBhjCBgzAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0hBBYw
+FDASBgdghXQBUwABBgdghXQBUwABMBIGA1UdEwEB/wQIMAYBAf8CAQcwHwYDVR0j
+BBgwFoAUAyUv3m+CATpcLNwroWm1Z9SM0/0wHQYDVR0OBBYEFAMlL95vggE6XCzc
+K6FptWfUjNP9MA0GCSqGSIb3DQEBBQUAA4ICAQA1EMvspgQNDQ/NwNurqPKIlwzf
+ky9NfEBWMXrrpA9gzXrzvsMnjgM+pN0S734edAY8PzHyHHuRMSG08NBsl9Tpl7Ik
+Vh5WwzW9iAUPWxAaZOHHgjD5Mq2eUCzneAXQMbFamIp1TpBcahQq4FJHgmDmHtqB
+sfsUC1rxn9KVuj7QG9YVHaO+htXbD8BJZLsuUBlL0iT43R4HVtA4oJVwIHaM190e
+3p9xxCPvgxNcoyQVTSlAPGrEqdi3pkSlDfTgnXceQHAm/NrZNuR55LU/vJtlvrsR
+ls/bxig5OgjOR1tTWsWZ/l2p3e9M1MalrQLmjAcSHm8D0W+go/MpvRLHUKKwf4ip
+mXeascClOS5cfGniLLDqN2qk4Vrh9VDlg++luyqI54zb/W1elxmofmZ1a3Hqv7HH
+b6D0jqTsNFFbjCYDcKF31QESVwA12yPeDooomf2xEG9L/zgtYE4snOtnta1J7ksf
+rK/7DZBaZmBwXarNeNQk7shBoJMBkpxqnvy5JMWzFYJ+vq6VK+uxwNrjAWALXmms
+hFZhvnEX/h0TD/7Gh0Xp/jKgGg0TpJRVcaUWi7rKibCyx/yP2FS1k2Kdzs9Z+z0Y
+zirLNRWCXf9UIltxUvu3yf5gmwBBZPCqKuy2QkPOiWaByIufOVQDJdMWNY6E0F/6
+MBr1mmz0DlP5OlvRHA==
+-----END CERTIFICATE-----
+
DigiCert Assured ID Root CA
===========================
-
-----BEGIN CERTIFICATE-----
MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
@@ -1458,7 +1970,6 @@ H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
DigiCert Global Root CA
=======================
-
-----BEGIN CERTIFICATE-----
MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
@@ -1484,7 +1995,6 @@ CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
DigiCert High Assurance EV Root CA
==================================
-
-----BEGIN CERTIFICATE-----
MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
@@ -1509,9 +2019,246 @@ vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
+OkuE6N36B9K
-----END CERTIFICATE-----
+Certplus Class 2 Primary CA
+===========================
+-----BEGIN CERTIFICATE-----
+MIIDkjCCAnqgAwIBAgIRAIW9S/PY2uNp9pTXX8OlRCMwDQYJKoZIhvcNAQEFBQAw
+PTELMAkGA1UEBhMCRlIxETAPBgNVBAoTCENlcnRwbHVzMRswGQYDVQQDExJDbGFz
+cyAyIFByaW1hcnkgQ0EwHhcNOTkwNzA3MTcwNTAwWhcNMTkwNzA2MjM1OTU5WjA9
+MQswCQYDVQQGEwJGUjERMA8GA1UEChMIQ2VydHBsdXMxGzAZBgNVBAMTEkNsYXNz
+IDIgUHJpbWFyeSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANxQ
+ltAS+DXSCHh6tlJw/W/uz7kRy1134ezpfgSN1sxvc0NXYKwzCkTsA18cgCSR5aiR
+VhKC9+Ar9NuuYS6JEI1rbLqzAr3VNsVINyPi8Fo3UjMXEuLRYE2+L0ER4/YXJQyL
+kcAbmXuZVg2v7tK8R1fjeUl7NIknJITesezpWE7+Tt9avkGtrAjFGA7v0lPubNCd
+EgETjdyAYveVqUSISnFOYFWe2yMZeVYHDD9jC1yw4r5+FfyUM1hBOHTE4Y+L3yas
+H7WLO7dDWWuwJKZtkIvEcupdM5i3y95ee++U8Rs+yskhwcWYAqqi9lt3m/V+llU0
+HGdpwPFC40es/CgcZlUCAwEAAaOBjDCBiTAPBgNVHRMECDAGAQH/AgEKMAsGA1Ud
+DwQEAwIBBjAdBgNVHQ4EFgQU43Mt38sOKAze3bOkynm4jrvoMIkwEQYJYIZIAYb4
+QgEBBAQDAgEGMDcGA1UdHwQwMC4wLKAqoCiGJmh0dHA6Ly93d3cuY2VydHBsdXMu
+Y29tL0NSTC9jbGFzczIuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQCnVM+IRBnL39R/
+AN9WM2K191EBkOvDP9GIROkkXe/nFL0gt5o8AP5tn9uQ3Nf0YtaLcF3n5QRIqWh8
+yfFC82x/xXp8HVGIutIKPidd3i1RTtMTZGnkLuPT55sJmabglZvOGtd/vjzOUrMR
+FcEPF80Du5wlFbqidon8BvEY0JNLDnyCt6X09l/+7UCmnYR0ObncHoUW2ikbhiMA
+ybuJfm6AiB4vFLQDJKgybwOaRywwvlbGp0ICcBvqQNi6BQNwB6SW//1IMwrh3KWB
+kJtN3X3n57LNXMhqlfil9o3EXXgIvnsG1knPGTZQIy4I5p4FTUcY1Rbpsda2ENW7
+l7+ijrRU
+-----END CERTIFICATE-----
+
+DST Root CA X3
+==============
+-----BEGIN CERTIFICATE-----
+MIIDSjCCAjKgAwIBAgIQRK+wgNajJ7qJMDmGLvhAazANBgkqhkiG9w0BAQUFADA/
+MSQwIgYDVQQKExtEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdCBDby4xFzAVBgNVBAMT
+DkRTVCBSb290IENBIFgzMB4XDTAwMDkzMDIxMTIxOVoXDTIxMDkzMDE0MDExNVow
+PzEkMCIGA1UEChMbRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3QgQ28uMRcwFQYDVQQD
+Ew5EU1QgUm9vdCBDQSBYMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AN+v6ZdQCINXtMxiZfaQguzH0yxrMMpb7NnDfcdAwRgUi+DoM3ZJKuM/IUmTrE4O
+rz5Iy2Xu/NMhD2XSKtkyj4zl93ewEnu1lcCJo6m67XMuegwGMoOifooUMM0RoOEq
+OLl5CjH9UL2AZd+3UWODyOKIYepLYYHsUmu5ouJLGiifSKOeDNoJjj4XLh7dIN9b
+xiqKqy69cK3FCxolkHRyxXtqqzTWMIn/5WgTe1QLyNau7Fqckh49ZLOMxt+/yUFw
+7BZy1SbsOFU5Q9D8/RhcQPGX69Wam40dutolucbY38EVAjqr2m7xPi71XAicPNaD
+aeQQmxkqtilX4+U9m5/wAl0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFMSnsaR7LHH62+FLkHX/xBVghYkQMA0GCSqG
+SIb3DQEBBQUAA4IBAQCjGiybFwBcqR7uKGY3Or+Dxz9LwwmglSBd49lZRNI+DT69
+ikugdB/OEIKcdBodfpga3csTS7MgROSR6cz8faXbauX+5v3gTt23ADq1cEmv8uXr
+AvHRAosZy5Q6XkjEGB5YGV8eAlrwDPGxrancWYaLbumR9YbK+rlmM6pZW87ipxZz
+R8srzJmwN0jP41ZL9c8PDHIyh8bwRLtTcm1D9SZImlJnt1ir/md2cXjbDaJWFBM5
+JDGFoqgCWjBH4d1QB7wCCZAA62RjYJsWvIjJEubSfZGL+T0yjWW06XyxV3bqxbYo
+Ob8VZRzI9neWagqNdwvYkQsEjgfbKbYK7p2CNTUQ
+-----END CERTIFICATE-----
+
+DST ACES CA X6
+==============
+-----BEGIN CERTIFICATE-----
+MIIECTCCAvGgAwIBAgIQDV6ZCtadt3js2AdWO4YV2TANBgkqhkiG9w0BAQUFADBb
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXRGlnaXRhbCBTaWduYXR1cmUgVHJ1c3Qx
+ETAPBgNVBAsTCERTVCBBQ0VTMRcwFQYDVQQDEw5EU1QgQUNFUyBDQSBYNjAeFw0w
+MzExMjAyMTE5NThaFw0xNzExMjAyMTE5NThaMFsxCzAJBgNVBAYTAlVTMSAwHgYD
+VQQKExdEaWdpdGFsIFNpZ25hdHVyZSBUcnVzdDERMA8GA1UECxMIRFNUIEFDRVMx
+FzAVBgNVBAMTDkRTVCBBQ0VTIENBIFg2MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAuT31LMmU3HWKlV1j6IR3dma5WZFcRt2SPp/5DgO0PWGSvSMmtWPu
+ktKe1jzIDZBfZIGxqAgNTNj50wUoUrQBJcWVHAx+PhCEdc/BGZFjz+iokYi5Q1K7
+gLFViYsx+tC3dr5BPTCapCIlF3PoHuLTrCq9Wzgh1SpL11V94zpVvddtawJXa+ZH
+fAjIgrrep4c9oW24MFbCswKBXy314powGCi4ZtPLAZZv6opFVdbgnf9nKxcCpk4a
+ahELfrd755jWjHZvwTvbUJN+5dCOHze4vbrGn2zpfDPyMjwmR/onJALJfh1biEIT
+ajV8fTXpLmaRcpPVMibEdPVTo7NdmvYJywIDAQABo4HIMIHFMA8GA1UdEwEB/wQF
+MAMBAf8wDgYDVR0PAQH/BAQDAgHGMB8GA1UdEQQYMBaBFHBraS1vcHNAdHJ1c3Rk
+c3QuY29tMGIGA1UdIARbMFkwVwYKYIZIAWUDAgEBATBJMEcGCCsGAQUFBwIBFjto
+dHRwOi8vd3d3LnRydXN0ZHN0LmNvbS9jZXJ0aWZpY2F0ZXMvcG9saWN5L0FDRVMt
+aW5kZXguaHRtbDAdBgNVHQ4EFgQUCXIGThhDD+XWzMNqizF7eI+og7gwDQYJKoZI
+hvcNAQEFBQADggEBAKPYjtay284F5zLNAdMEA+V25FYrnJmQ6AgwbN99Pe7lv7Uk
+QIRJ4dEorsTCOlMwiPH1d25Ryvr/ma8kXxug/fKshMrfqfBfBC6tFr8hlxCBPeP/
+h40y3JTlR4peahPJlJU90u7INJXQgNStMgiAVDzgvVJT11J8smk/f3rPanTK+gQq
+nExaBqXpIK1FZg9p8d2/6eMyi/rgwYZNcjwu2JN4Cir42NInPRmJX1p7ijvMDNpR
+rscL9yuwNwXsvFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf2
+9w4LTJxoeHtxMcfrHuBnQfO3oKfN5XozNmr6mis=
+-----END CERTIFICATE-----
+
+TURKTRUST Certificate Services Provider Root 1
+==============================================
+-----BEGIN CERTIFICATE-----
+MIID+zCCAuOgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBtzE/MD0GA1UEAww2VMOc
+UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
+c8SxMQswCQYDVQQGDAJUUjEPMA0GA1UEBwwGQU5LQVJBMVYwVAYDVQQKDE0oYykg
+MjAwNSBUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8
+dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjAeFw0wNTA1MTMxMDI3MTdaFw0xNTAz
+MjIxMDI3MTdaMIG3MT8wPQYDVQQDDDZUw5xSS1RSVVNUIEVsZWt0cm9uaWsgU2Vy
+dGlmaWthIEhpem1ldCBTYcSfbGF5xLFjxLFzxLExCzAJBgNVBAYMAlRSMQ8wDQYD
+VQQHDAZBTktBUkExVjBUBgNVBAoMTShjKSAyMDA1IFTDnFJLVFJVU1QgQmlsZ2kg
+xLBsZXRpxZ9pbSB2ZSBCaWxpxZ9pbSBHw7x2ZW5sacSfaSBIaXptZXRsZXJpIEEu
+xZ4uMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAylIF1mMD2Bxf3dJ7
+XfIMYGFbazt0K3gNfUW9InTojAPBxhEqPZW8qZSwu5GXyGl8hMW0kWxsE2qkVa2k
+heiVfrMArwDCBRj1cJ02i67L5BuBf5OI+2pVu32Fks66WJ/bMsW9Xe8iSi9BB35J
+YbOG7E6mQW6EvAPs9TscyB/C7qju6hJKjRTP8wrgUDn5CDX4EVmt5yLqS8oUBt5C
+urKZ8y1UiBAG6uEaPj1nH/vO+3yC6BFdSsG5FOpU2WabfIl9BJpiyelSPJ6c79L1
+JuTm5Rh8i27fbMx4W09ysstcP4wFjdFMjK2Sx+F4f2VsSQZQLJ4ywtdKxnWKWU51
+b0dewQIDAQABoxAwDjAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4IBAQAV
+9VX/N5aAWSGk/KEVTCD21F/aAyT8z5Aa9CEKmu46sWrv7/hg0Uw2ZkUd82YCdAR7
+kjCo3gp2D++Vbr3JN+YaDayJSFvMgzbC9UZcWYJWtNX+I7TYVBxEq8Sn5RTOPEFh
+fEPmzcSBCYsk+1Ql1haolgxnB2+zUEfjHCQo3SqYpGH+2+oSN7wBGjSFvW5P55Fy
+B0SFHljKVETd96y5y4khctuPwGkplyqjrhgjlxxBKot8KsF8kOipKMDTkcatKIdA
+aLX/7KfS0zgYnNN9aV3wxqUeJBujR/xpB2jn5Jq07Q+hh4cCzofSSE7hvP/L8XKS
+RGQDJereW26fyfJOrN3H
+-----END CERTIFICATE-----
+
+TURKTRUST Certificate Services Provider Root 2
+==============================================
+-----BEGIN CERTIFICATE-----
+MIIEPDCCAySgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvjE/MD0GA1UEAww2VMOc
+UktUUlVTVCBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
+c8SxMQswCQYDVQQGEwJUUjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xS
+S1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kg
+SGl6bWV0bGVyaSBBLsWeLiAoYykgS2FzxLFtIDIwMDUwHhcNMDUxMTA3MTAwNzU3
+WhcNMTUwOTE2MTAwNzU3WjCBvjE/MD0GA1UEAww2VMOcUktUUlVTVCBFbGVrdHJv
+bmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGEwJU
+UjEPMA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xSS1RSVVNUIEJpbGdpIMSw
+bGV0acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWe
+LiAoYykgS2FzxLFtIDIwMDUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCpNn7DkUNMwxmYCMjHWHtPFoylzkkBH3MOrHUTpvqeLCDe2JAOCtFp0if7qnef
+J1Il4std2NiDUBd9irWCPwSOtNXwSadktx4uXyCcUHVPr+G1QRT0mJKIx+XlZEdh
+R3n9wFHxwZnn3M5q+6+1ATDcRhzviuyV79z/rxAc653YsKpqhRgNF8k+v/Gb0AmJ
+Qv2gQrSdiVFVKc8bcLyEVK3BEx+Y9C52YItdP5qtygy/p1Zbj3e41Z55SZI/4PGX
+JHpsmxcPbe9TmJEr5A++WXkHeLuXlfSfadRYhwqp48y2WBmfJiGxxFmNskF1wK1p
+zpwACPI2/z7woQ8arBT9pmAPAgMBAAGjQzBBMB0GA1UdDgQWBBTZN7NOBf3Zz58S
+Fq62iS/rJTqIHDAPBgNVHQ8BAf8EBQMDBwYAMA8GA1UdEwEB/wQFMAMBAf8wDQYJ
+KoZIhvcNAQEFBQADggEBAHJglrfJ3NgpXiOFX7KzLXb7iNcX/nttRbj2hWyfIvwq
+ECLsqrkw9qtY1jkQMZkpAL2JZkH7dN6RwRgLn7Vhy506vvWolKMiVW4XSf/SKfE4
+Jl3vpao6+XF75tpYHdN0wgH6PmlYX63LaL4ULptswLbcoCb6dxriJNoaN+BnrdFz
+gw2lGh1uEpJ+hGIAF728JRhX8tepb1mIvDS3LoV4nZbcFMMsilKbloxSZj2GFotH
+uFEJjOp9zYhys2AzsfAKRO8P9Qk3iCQOLGsgOqL6EfJANZxEaGM7rDNvY7wsu/LS
+y3Z9fYjYHcgFHW68lKlmjHdxx/qR+i9Rnuk5UrbnBEI=
+-----END CERTIFICATE-----
+
+SwissSign Platinum CA - G2
+==========================
+-----BEGIN CERTIFICATE-----
+MIIFwTCCA6mgAwIBAgIITrIAZwwDXU8wDQYJKoZIhvcNAQEFBQAwSTELMAkGA1UE
+BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEjMCEGA1UEAxMaU3dpc3NTaWdu
+IFBsYXRpbnVtIENBIC0gRzIwHhcNMDYxMDI1MDgzNjAwWhcNMzYxMDI1MDgzNjAw
+WjBJMQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMSMwIQYDVQQD
+ExpTd2lzc1NpZ24gUGxhdGludW0gQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQAD
+ggIPADCCAgoCggIBAMrfogLi2vj8Bxax3mCq3pZcZB/HL37PZ/pEQtZ2Y5Wu669y
+IIpFR4ZieIbWIDkm9K6j/SPnpZy1IiEZtzeTIsBQnIJ71NUERFzLtMKfkr4k2Htn
+IuJpX+UFeNSH2XFwMyVTtIc7KZAoNppVRDBopIOXfw0enHb/FZ1glwCNioUD7IC+
+6ixuEFGSzH7VozPY1kneWCqv9hbrS3uQMpe5up1Y8fhXSQQeol0GcN1x2/ndi5ob
+jM89o03Oy3z2u5yg+gnOI2Ky6Q0f4nIoj5+saCB9bzuohTEJfwvH6GXp43gOCWcw
+izSC+13gzJ2BbWLuCB4ELE6b7P6pT1/9aXjvCR+htL/68++QHkwFix7qepF6w9fl
++zC8bBsQWJj3Gl/QKTIDE0ZNYWqFTFJ0LwYfexHihJfGmfNtf9dng34TaNhxKFrY
+zt3oEBSa/m0jh26OWnA81Y0JAKeqvLAxN23IhBQeW71FYyBrS3SMvds6DsHPWhaP
+pZjydomyExI7C3d3rLvlPClKknLKYRorXkzig3R3+jVIeoVNjZpTxN94ypeRSCtF
+KwH3HBqi7Ri6Cr2D+m+8jVeTO9TUps4e8aCxzqv9KyiaTxvXw3LbpMS/XUz13XuW
+ae5ogObnmLo2t/5u7Su9IPhlGdpVCX4l3P5hYnL5fhgC72O00Puv5TtjjGePAgMB
+AAGjgawwgakwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFFCvzAeHFUdvOMW0ZdHelarp35zMMB8GA1UdIwQYMBaAFFCvzAeHFUdvOMW0
+ZdHelarp35zMMEYGA1UdIAQ/MD0wOwYJYIV0AVkBAQEBMC4wLAYIKwYBBQUHAgEW
+IGh0dHA6Ly9yZXBvc2l0b3J5LnN3aXNzc2lnbi5jb20vMA0GCSqGSIb3DQEBBQUA
+A4ICAQAIhab1Fgz8RBrBY+D5VUYI/HAcQiiWjrfFwUF1TglxeeVtlspLpYhg0DB0
+uMoI3LQwnkAHFmtllXcBrqS3NQuB2nEVqXQXOHtYyvkv+8Bldo1bAbl93oI9ZLi+
+FHSjClTTLJUYFzX1UWs/j6KWYTl4a0vlpqD4U99REJNi54Av4tHgvI42Rncz7Lj7
+jposiU0xEQ8mngS7twSNC/K5/FqdOxa3L8iYq/6KUFkuozv8KV2LwUvJ4ooTHbG/
+u0IdUt1O2BReEMYxB+9xJ/cbOQncguqLs5WGXv312l0xpuAxtpTmREl0xRbl9x8D
+YSjFyMsSoEJL+WuICI20MhjzdZ/EfwBPBZWcoxcCw7NTm6ogOSkrZvqdr16zktK1
+puEa+S1BaYEUtLS17Yk9zvupnTVCRLEcFHOBzyoBNZox1S2PbYTfgE1X4z/FhHXa
+icYwu+uPyyIIoK6q8QNsOktNCaUOcsZWayFCTiMlFGiudgp8DAdwZPmaL/YFOSbG
+DI8Zf0NebvRbFS/bYV3mZy8/CJT5YLSYMdp08YSTcU1f+2BY0fvEwW2JorsgH51x
+kcsymxM9Pn2SUjWskpSi0xjCfMfqr3YFFt1nJ8J+HAciIfNAChs0B0QTwoRqjt8Z
+Wr9/6x3iGjjRXK9HkmuAtTClyY3YqzGBH9/CZjfTk6mFhnll0g==
+-----END CERTIFICATE-----
+
+SwissSign Gold CA - G2
+======================
+-----BEGIN CERTIFICATE-----
+MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
+BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
+biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
+MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
+d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
+76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
+6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
+emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
+MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
+MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
+MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
+FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
+aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
+gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
+qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
+lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
+8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
+L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
+45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
+UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
+O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
+bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
+GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
+77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
+hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
+92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
+Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
+ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
+Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
+-----END CERTIFICATE-----
+
+SwissSign Silver CA - G2
+========================
+-----BEGIN CERTIFICATE-----
+MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
+BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
+IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
+RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
+U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
+MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
+Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
+YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
+nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
+6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
+eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
+c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
+MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
+HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
+jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
+5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
+rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
+wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
+cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
+AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
+WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
+xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
+2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
+IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
+aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
+em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
+dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
+OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
+tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
+-----END CERTIFICATE-----
+
GeoTrust Primary Certification Authority
========================================
-
-----BEGIN CERTIFICATE-----
MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
@@ -1534,9 +2281,117 @@ UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
-----END CERTIFICATE-----
+thawte Primary Root CA
+======================
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
+-----END CERTIFICATE-----
+
+VeriSign Class 3 Public Primary Certification Authority - G5
+============================================================
+-----BEGIN CERTIFICATE-----
+MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
+yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
+ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
+nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
+t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
+SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
+BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
+NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
+BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
+BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
+aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
+MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
+p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
+5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
+WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
+4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
+hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
+-----END CERTIFICATE-----
+
+SecureTrust CA
+==============
+-----BEGIN CERTIFICATE-----
+MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
+MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
+cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
+Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
+0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
+wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
+7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
+8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
+BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
+JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
+6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
+3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
+D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
+CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
+3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
+-----END CERTIFICATE-----
+
+Secure Global CA
+================
+-----BEGIN CERTIFICATE-----
+MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
+MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
+Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
+iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
+/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
+jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
+HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
+sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
+gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
+KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
+AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
+URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
+H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
+I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
+iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
+f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
+-----END CERTIFICATE-----
+
COMODO Certification Authority
==============================
-
-----BEGIN CERTIFICATE-----
MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
@@ -1565,7 +2420,6 @@ ZQ==
Network Solutions Certificate Authority
=======================================
-
-----BEGIN CERTIFICATE-----
MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
@@ -1590,9 +2444,39 @@ wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
-----END CERTIFICATE-----
+WellsSecure Public Root Certificate Authority
+=============================================
+-----BEGIN CERTIFICATE-----
+MIIEvTCCA6WgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBhTELMAkGA1UEBhMCVVMx
+IDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxs
+cyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9v
+dCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwHhcNMDcxMjEzMTcwNzU0WhcNMjIxMjE0
+MDAwNzU0WjCBhTELMAkGA1UEBhMCVVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdl
+bGxzU2VjdXJlMRwwGgYDVQQLDBNXZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQD
+DC1XZWxsc1NlY3VyZSBQdWJsaWMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkw
+ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDub7S9eeKPCCGeOARBJe+r
+WxxTkqxtnt3CxC5FlAM1iGd0V+PfjLindo8796jE2yljDpFoNoqXjopxaAkH5OjU
+Dk/41itMpBb570OYj7OeUt9tkTmPOL13i0Nj67eT/DBMHAGTthP796EfvyXhdDcs
+HqRePGj4S78NuR4uNuip5Kf4D8uCdXw1LSLWwr8L87T8bJVhHlfXBIEyg1J55oNj
+z7fLY4sR4r1e6/aN7ZVyKLSsEmLpSjPmgzKuBXWVvYSV2ypcm44uDLiBK0HmOFaf
+SZtsdvqKXfcBeYF8wYNABf5x/Qw/zE5gCQ5lRxAvAcAFP4/4s0HvWkJ+We/Slwxl
+AgMBAAGjggE0MIIBMDAPBgNVHRMBAf8EBTADAQH/MDkGA1UdHwQyMDAwLqAsoCqG
+KGh0dHA6Ly9jcmwucGtpLndlbGxzZmFyZ28uY29tL3dzcHJjYS5jcmwwDgYDVR0P
+AQH/BAQDAgHGMB0GA1UdDgQWBBQmlRkQ2eihl5H/3BnZtQQ+0nMKajCBsgYDVR0j
+BIGqMIGngBQmlRkQ2eihl5H/3BnZtQQ+0nMKaqGBi6SBiDCBhTELMAkGA1UEBhMC
+VVMxIDAeBgNVBAoMF1dlbGxzIEZhcmdvIFdlbGxzU2VjdXJlMRwwGgYDVQQLDBNX
+ZWxscyBGYXJnbyBCYW5rIE5BMTYwNAYDVQQDDC1XZWxsc1NlY3VyZSBQdWJsaWMg
+Um9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHmCAQEwDQYJKoZIhvcNAQEFBQADggEB
+ALkVsUSRzCPIK0134/iaeycNzXK7mQDKfGYZUMbVmO2rvwNa5U3lHshPcZeG1eMd
+/ZDJPHV3V3p9+N701NX3leZ0bh08rnyd2wIDBSxxSyU+B+NemvVmFymIGjifz6pB
+A4SXa5M4esowRBskRDPQ5NHcKDj0E0M1NSljqHyita04pO2t/caaH/+Xc/77szWn
+k4bGdpEA5qxRFsQnMlzbc9qlk1eOPm01JghZ1edE13YgY+esE2fDbbFwRnzVlhE9
+iW9dqKHrjQrawx0zbKPqZxmamX9LPYNRKh3KL4YMon4QLSvUFpULB6ouFJJJtylv
+2G0xffX8oRAHh84vWdw+WNs=
+-----END CERTIFICATE-----
+
COMODO ECC Certification Authority
==================================
-
-----BEGIN CERTIFICATE-----
MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
@@ -1610,9 +2494,226 @@ fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
-----END CERTIFICATE-----
+IGC/A
+=====
+-----BEGIN CERTIFICATE-----
+MIIEAjCCAuqgAwIBAgIFORFFEJQwDQYJKoZIhvcNAQEFBQAwgYUxCzAJBgNVBAYT
+AkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAMBgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQ
+TS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEOMAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG
+9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2LmZyMB4XDTAyMTIxMzE0MjkyM1oXDTIw
+MTAxNzE0MjkyMlowgYUxCzAJBgNVBAYTAkZSMQ8wDQYDVQQIEwZGcmFuY2UxDjAM
+BgNVBAcTBVBhcmlzMRAwDgYDVQQKEwdQTS9TR0ROMQ4wDAYDVQQLEwVEQ1NTSTEO
+MAwGA1UEAxMFSUdDL0ExIzAhBgkqhkiG9w0BCQEWFGlnY2FAc2dkbi5wbS5nb3V2
+LmZyMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsh/R0GLFMzvABIaI
+s9z4iPf930Pfeo2aSVz2TqrMHLmh6yeJ8kbpO0px1R2OLc/mratjUMdUC24SyZA2
+xtgv2pGqaMVy/hcKshd+ebUyiHDKcMCWSo7kVc0dJ5S/znIq7Fz5cyD+vfcuiWe4
+u0dzEvfRNWk68gq5rv9GQkaiv6GFGvm/5P9JhfejcIYyHF2fYPepraX/z9E0+X1b
+F8bc1g4oa8Ld8fUzaJ1O/Id8NhLWo4DoQw1VYZTqZDdH6nfK0LJYBcNdfrGoRpAx
+Vs5wKpayMLh35nnAvSk7/ZR3TL0gzUEl4C7HG7vupARB0l2tEmqKm0f7yd1GQOGd
+PDPQtQIDAQABo3cwdTAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBRjAVBgNV
+HSAEDjAMMAoGCCqBegF5AQEBMB0GA1UdDgQWBBSjBS8YYFDCiQrdKyFP/45OqDAx
+NjAfBgNVHSMEGDAWgBSjBS8YYFDCiQrdKyFP/45OqDAxNjANBgkqhkiG9w0BAQUF
+AAOCAQEABdwm2Pp3FURo/C9mOnTgXeQp/wYHE4RKq89toB9RlPhJy3Q2FLwV3duJ
+L92PoF189RLrn544pEfMs5bZvpwlqwN+Mw+VgQ39FuCIvjfwbF3QMZsyK10XZZOY
+YLxuj7GoPB7ZHPOpJkL5ZB3C55L29B5aqhlSXa/oovdgoPaN8In1buAKBQGVyYsg
+Crpa/JosPL3Dt8ldeCUFP1YUmwza+zpI/pdpXsoQhvdOlgQITeywvl3cO45Pwf2a
+NjSaTFR+FwNIlQgRHAdvhQh+XU3Endv7rs6y0bO4g2wdsrN58dhwmX7wEwLOXt1R
+0982gaEbeC9xs/FZTEYYKKuF0mBWWg==
+-----END CERTIFICATE-----
+
+Security Communication EV RootCA1
+=================================
+-----BEGIN CERTIFICATE-----
+MIIDfTCCAmWgAwIBAgIBADANBgkqhkiG9w0BAQUFADBgMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEqMCgGA1UECxMh
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBFViBSb290Q0ExMB4XDTA3MDYwNjAyMTIz
+MloXDTM3MDYwNjAyMTIzMlowYDELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09N
+IFRydXN0IFN5c3RlbXMgQ08uLExURC4xKjAoBgNVBAsTIVNlY3VyaXR5IENvbW11
+bmljYXRpb24gRVYgUm9vdENBMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBALx/7FebJOD+nLpCeamIivqA4PUHKUPqjgo0No0c+qe1OXj/l3X3L+SqawSE
+RMqm4miO/VVQYg+kcQ7OBzgtQoVQrTyWb4vVog7P3kmJPdZkLjjlHmy1V4qe70gO
+zXppFodEtZDkBp2uoQSXWHnvIEqCa4wiv+wfD+mEce3xDuS4GBPMVjZd0ZoeUWs5
+bmB2iDQL87PRsJ3KYeJkHcFGB7hj3R4zZbOOCVVSPbW9/wfrrWFVGCypaZhKqkDF
+MxRldAD5kd6vA0jFQFTcD4SQaCDFkpbcLuUCRarAX1T4bepJz11sS6/vmsJWXMY1
+VkJqMF/Cq/biPT+zyRGPMUzXn0kCAwEAAaNCMEAwHQYDVR0OBBYEFDVK9U2vP9eC
+OKyrcWUXdYydVZPmMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0G
+CSqGSIb3DQEBBQUAA4IBAQCoh+ns+EBnXcPBZsdAS5f8hxOQWsTvoMpfi7ent/HW
+tWS3irO4G8za+6xmiEHO6Pzk2x6Ipu0nUBsCMCRGef4Eh3CXQHPRwMFXGZpppSeZ
+q51ihPZRwSzJIxXYKLerJRO1RuGGAv8mjMSIkh1W/hln8lXkgKNrnKt34VFxDSDb
+EJrbvXZ5B3eZKK2aXtqxT0QsNY6llsf9g/BYxnnWmHyojf6GPgcWkuF75x3sM3Z+
+Qi5KhfmRiWiEA4Glm5q+4zfFVKtWOxgtQaQM+ELbmaDgcm+7XeEWT1MKZPlO9L9O
+VL14bIjqv5wTJMJwaaJ/D8g8rQjJsJhAoyrniIPtd490
+-----END CERTIFICATE-----
+
+OISTE WISeKey Global Root GA CA
+===============================
+-----BEGIN CERTIFICATE-----
+MIID8TCCAtmgAwIBAgIQQT1yx/RrH4FDffHSKFTfmjANBgkqhkiG9w0BAQUFADCB
+ijELMAkGA1UEBhMCQ0gxEDAOBgNVBAoTB1dJU2VLZXkxGzAZBgNVBAsTEkNvcHly
+aWdodCAoYykgMjAwNTEiMCAGA1UECxMZT0lTVEUgRm91bmRhdGlvbiBFbmRvcnNl
+ZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwgUm9vdCBHQSBDQTAeFw0w
+NTEyMTExNjAzNDRaFw0zNzEyMTExNjA5NTFaMIGKMQswCQYDVQQGEwJDSDEQMA4G
+A1UEChMHV0lTZUtleTEbMBkGA1UECxMSQ29weXJpZ2h0IChjKSAyMDA1MSIwIAYD
+VQQLExlPSVNURSBGb3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBX
+SVNlS2V5IEdsb2JhbCBSb290IEdBIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAy0+zAJs9Nt350UlqaxBJH+zYK7LG+DKBKUOVTJoZIyEVRd7jyBxR
+VVuuk+g3/ytr6dTqvirdqFEr12bDYVxgAsj1znJ7O7jyTmUIms2kahnBAbtzptf2
+w93NvKSLtZlhuAGio9RN1AU9ka34tAhxZK9w8RxrfvbDd50kc3vkDIzh2TbhmYsF
+mQvtRTEJysIA2/dyoJaqlYfQjse2YXMNdmaM3Bu0Y6Kff5MTMPGhJ9vZ/yxViJGg
+4E8HsChWjBgbl0SOid3gF27nKu+POQoxhILYQBRJLnpB5Kf+42TMwVlxSywhp1t9
+4B3RLoGbw9ho972WG6xwsRYUC9tguSYBBQIDAQABo1EwTzALBgNVHQ8EBAMCAYYw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUswN+rja8sHnR3JQmthG+IbJphpQw
+EAYJKwYBBAGCNxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBAEuh/wuHbrP5wUOx
+SPMowB0uyQlB+pQAHKSkq0lPjz0e701vvbyk9vImMMkQyh2I+3QZH4VFvbBsUfk2
+ftv1TDI6QU9bR8/oCy22xBmddMVHxjtqD6wU2zz0c5ypBd8A3HR4+vg1YFkCExh8
+vPtNsCBtQ7tgMHpnM1zFmdH4LTlSc/uMqpclXHLZCB6rTjzjgTGfA6b7wP4piFXa
+hNVQA7bihKOmNqoROgHhGEvWRGizPflTdISzRpFGlgC3gCy24eMQ4tui5yiPAZZi
+Fj4A4xylNoEYokxSdsARo27mHbrjWr42U8U+dY+GaSlYU7Wcu2+fXMUY7N0v4ZjJ
+/L7fCg0=
+-----END CERTIFICATE-----
+
+S-TRUST Authentication and Encryption Root CA 2005 PN
+=====================================================
+-----BEGIN CERTIFICATE-----
+MIIEezCCA2OgAwIBAgIQNxkY5lNUfBq1uMtZWts1tzANBgkqhkiG9w0BAQUFADCB
+rjELMAkGA1UEBhMCREUxIDAeBgNVBAgTF0JhZGVuLVd1ZXJ0dGVtYmVyZyAoQlcp
+MRIwEAYDVQQHEwlTdHV0dGdhcnQxKTAnBgNVBAoTIERldXRzY2hlciBTcGFya2Fz
+c2VuIFZlcmxhZyBHbWJIMT4wPAYDVQQDEzVTLVRSVVNUIEF1dGhlbnRpY2F0aW9u
+IGFuZCBFbmNyeXB0aW9uIFJvb3QgQ0EgMjAwNTpQTjAeFw0wNTA2MjIwMDAwMDBa
+Fw0zMDA2MjEyMzU5NTlaMIGuMQswCQYDVQQGEwJERTEgMB4GA1UECBMXQmFkZW4t
+V3VlcnR0ZW1iZXJnIChCVykxEjAQBgNVBAcTCVN0dXR0Z2FydDEpMCcGA1UEChMg
+RGV1dHNjaGVyIFNwYXJrYXNzZW4gVmVybGFnIEdtYkgxPjA8BgNVBAMTNVMtVFJV
+U1QgQXV0aGVudGljYXRpb24gYW5kIEVuY3J5cHRpb24gUm9vdCBDQSAyMDA1OlBO
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2bVKwdMz6tNGs9HiTNL1
+toPQb9UY6ZOvJ44TzbUlNlA0EmQpoVXhOmCTnijJ4/Ob4QSwI7+Vio5bG0F/WsPo
+TUzVJBY+h0jUJ67m91MduwwA7z5hca2/OnpYH5Q9XIHV1W/fuJvS9eXLg3KSwlOy
+ggLrra1fFi2SU3bxibYs9cEv4KdKb6AwajLrmnQDaHgTncovmwsdvs91DSaXm8f1
+XgqfeN+zvOyauu9VjxuapgdjKRdZYgkqeQd3peDRF2npW932kKvimAoA0SVtnteF
+hy+S8dF2g08LOlk3KC8zpxdQ1iALCvQm+Z845y2kuJuJja2tyWp9iRe79n+Ag3rm
+7QIDAQABo4GSMIGPMBIGA1UdEwEB/wQIMAYBAf8CAQAwDgYDVR0PAQH/BAQDAgEG
+MCkGA1UdEQQiMCCkHjAcMRowGAYDVQQDExFTVFJvbmxpbmUxLTIwNDgtNTAdBgNV
+HQ4EFgQUD8oeXHngovMpttKFswtKtWXsa1IwHwYDVR0jBBgwFoAUD8oeXHngovMp
+ttKFswtKtWXsa1IwDQYJKoZIhvcNAQEFBQADggEBAK8B8O0ZPCjoTVy7pWMciDMD
+pwCHpB8gq9Yc4wYfl35UvbfRssnV2oDsF9eK9XvCAPbpEW+EoFolMeKJ+aQAPzFo
+LtU96G7m1R08P7K9n3frndOMusDXtk3sU5wPBG7qNWdX4wple5A64U8+wwCSersF
+iXOMy6ZNwPv2AtawB6MDwidAnwzkhYItr5pCHdDHjfhA7p0GVxzZotiAFP7hYy0y
+h9WUUpY6RsZxlj33mA6ykaqP2vROJAA5VeitF7nTNCtKqUDMFypVZUF0Qn71wK/I
+k63yGFs9iQzbRzkk+OBM8h+wPQrKBU6JIRrjKpms/H+h8Q8bHz2eBIPdltkdOpQ=
+-----END CERTIFICATE-----
+
+Microsec e-Szigno Root CA
+=========================
+-----BEGIN CERTIFICATE-----
+MIIHqDCCBpCgAwIBAgIRAMy4579OKRr9otxmpRwsDxEwDQYJKoZIhvcNAQEFBQAw
+cjELMAkGA1UEBhMCSFUxETAPBgNVBAcTCEJ1ZGFwZXN0MRYwFAYDVQQKEw1NaWNy
+b3NlYyBMdGQuMRQwEgYDVQQLEwtlLVN6aWdubyBDQTEiMCAGA1UEAxMZTWljcm9z
+ZWMgZS1Temlnbm8gUm9vdCBDQTAeFw0wNTA0MDYxMjI4NDRaFw0xNzA0MDYxMjI4
+NDRaMHIxCzAJBgNVBAYTAkhVMREwDwYDVQQHEwhCdWRhcGVzdDEWMBQGA1UEChMN
+TWljcm9zZWMgTHRkLjEUMBIGA1UECxMLZS1Temlnbm8gQ0ExIjAgBgNVBAMTGU1p
+Y3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
+ggEKAoIBAQDtyADVgXvNOABHzNuEwSFpLHSQDCHZU4ftPkNEU6+r+ICbPHiN1I2u
+uO/TEdyB5s87lozWbxXGd36hL+BfkrYn13aaHUM86tnsL+4582pnS4uCzyL4ZVX+
+LMsvfUh6PXX5qqAnu3jCBspRwn5mS6/NoqdNAoI/gqyFxuEPkEeZlApxcpMqyabA
+vjxWTHOSJ/FrtfX9/DAFYJLG65Z+AZHCabEeHXtTRbjcQR/Ji3HWVBTji1R4P770
+Yjtb9aPs1ZJ04nQw7wHb4dSrmZsqa/i9phyGI0Jf7Enemotb9HI6QMVJPqW+jqpx
+62z69Rrkav17fVVA71hu5tnVvCSrwe+3AgMBAAGjggQ3MIIEMzBnBggrBgEFBQcB
+AQRbMFkwKAYIKwYBBQUHMAGGHGh0dHBzOi8vcmNhLmUtc3ppZ25vLmh1L29jc3Aw
+LQYIKwYBBQUHMAKGIWh0dHA6Ly93d3cuZS1zemlnbm8uaHUvUm9vdENBLmNydDAP
+BgNVHRMBAf8EBTADAQH/MIIBcwYDVR0gBIIBajCCAWYwggFiBgwrBgEEAYGoGAIB
+AQEwggFQMCgGCCsGAQUFBwIBFhxodHRwOi8vd3d3LmUtc3ppZ25vLmh1L1NaU1ov
+MIIBIgYIKwYBBQUHAgIwggEUHoIBEABBACAAdABhAG4A+gBzAO0AdAB2AOEAbgB5
+ACAA6QByAHQAZQBsAG0AZQB6AOkAcwDpAGgAZQB6ACAA6QBzACAAZQBsAGYAbwBn
+AGEAZADhAHMA4QBoAG8AegAgAGEAIABTAHoAbwBsAGcA4QBsAHQAYQB0APMAIABT
+AHoAbwBsAGcA4QBsAHQAYQB0AOEAcwBpACAAUwB6AGEAYgDhAGwAeQB6AGEAdABh
+ACAAcwB6AGUAcgBpAG4AdAAgAGsAZQBsAGwAIABlAGwAagDhAHIAbgBpADoAIABo
+AHQAdABwADoALwAvAHcAdwB3AC4AZQAtAHMAegBpAGcAbgBvAC4AaAB1AC8AUwBa
+AFMAWgAvMIHIBgNVHR8EgcAwgb0wgbqggbeggbSGIWh0dHA6Ly93d3cuZS1zemln
+bm8uaHUvUm9vdENBLmNybIaBjmxkYXA6Ly9sZGFwLmUtc3ppZ25vLmh1L0NOPU1p
+Y3Jvc2VjJTIwZS1Temlnbm8lMjBSb290JTIwQ0EsT1U9ZS1Temlnbm8lMjBDQSxP
+PU1pY3Jvc2VjJTIwTHRkLixMPUJ1ZGFwZXN0LEM9SFU/Y2VydGlmaWNhdGVSZXZv
+Y2F0aW9uTGlzdDtiaW5hcnkwDgYDVR0PAQH/BAQDAgEGMIGWBgNVHREEgY4wgYuB
+EGluZm9AZS1zemlnbm8uaHWkdzB1MSMwIQYDVQQDDBpNaWNyb3NlYyBlLVN6aWdu
+w7MgUm9vdCBDQTEWMBQGA1UECwwNZS1TemlnbsOzIEhTWjEWMBQGA1UEChMNTWlj
+cm9zZWMgS2Z0LjERMA8GA1UEBxMIQnVkYXBlc3QxCzAJBgNVBAYTAkhVMIGsBgNV
+HSMEgaQwgaGAFMegSXUWYYTbMUuE0vE3QJDvTtz3oXakdDByMQswCQYDVQQGEwJI
+VTERMA8GA1UEBxMIQnVkYXBlc3QxFjAUBgNVBAoTDU1pY3Jvc2VjIEx0ZC4xFDAS
+BgNVBAsTC2UtU3ppZ25vIENBMSIwIAYDVQQDExlNaWNyb3NlYyBlLVN6aWdubyBS
+b290IENBghEAzLjnv04pGv2i3GalHCwPETAdBgNVHQ4EFgQUx6BJdRZhhNsxS4TS
+8TdAkO9O3PcwDQYJKoZIhvcNAQEFBQADggEBANMTnGZjWS7KXHAM/IO8VbH0jgds
+ZifOwTsgqRy7RlRw7lrMoHfqaEQn6/Ip3Xep1fvj1KcExJW4C+FEaGAHQzAxQmHl
+7tnlJNUb3+FKG6qfx1/4ehHqE5MAyopYse7tDk2016g2JnzgOsHVV4Lxdbb9iV/a
+86g4nzUGCM4ilb7N1fy+W955a9x6qWVmvrElWl/tftOsRm1M9DKHtCAE4Gx4sHfR
+hUZLphK3dehKyVZs15KrnfVJONJPU+NVkBHbmJbGSfI+9J8b4PeI3CVimUTYc78/
+MPMMNz7UwiiAc7EBt51alhQBS6kRnSlqLtBdgcDPsiBDxwPgN05dCtxZICU=
+-----END CERTIFICATE-----
+
+Certigna
+========
+-----BEGIN CERTIFICATE-----
+MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
+BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
+DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
+BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
+QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
+gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
+zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
+130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
+JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
+ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
+AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
+AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
+9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
+bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
+fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
+HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
+t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
+WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
+-----END CERTIFICATE-----
+
+AC Ra\xC3\xADz Certic\xC3\xA1mara S.A.
+======================================
+-----BEGIN CERTIFICATE-----
+MIIGZjCCBE6gAwIBAgIPB35Sk3vgFeNX8GmMy+wMMA0GCSqGSIb3DQEBBQUAMHsx
+CzAJBgNVBAYTAkNPMUcwRQYDVQQKDD5Tb2NpZWRhZCBDYW1lcmFsIGRlIENlcnRp
+ZmljYWNpw7NuIERpZ2l0YWwgLSBDZXJ0aWPDoW1hcmEgUy5BLjEjMCEGA1UEAwwa
+QUMgUmHDrXogQ2VydGljw6FtYXJhIFMuQS4wHhcNMDYxMTI3MjA0NjI5WhcNMzAw
+NDAyMjE0MjAyWjB7MQswCQYDVQQGEwJDTzFHMEUGA1UECgw+U29jaWVkYWQgQ2Ft
+ZXJhbCBkZSBDZXJ0aWZpY2FjacOzbiBEaWdpdGFsIC0gQ2VydGljw6FtYXJhIFMu
+QS4xIzAhBgNVBAMMGkFDIFJhw616IENlcnRpY8OhbWFyYSBTLkEuMIICIjANBgkq
+hkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq2uJo1PMSCMI+8PPUZYILrgIem08kBeG
+qentLhM0R7LQcNzJPNCNyu5LF6vQhbCnIwTLqKL85XXbQMpiiY9QngE9JlsYhBzL
+fDe3fezTf3MZsGqy2IiKLUV0qPezuMDU2s0iiXRNWhU5cxh0T7XrmafBHoi0wpOQ
+Y5fzp6cSsgkiBzPZkc0OnB8OIMfuuzONj8LSWKdf/WU34ojC2I+GdV75LaeHM/J4
+Ny+LvB2GNzmxlPLYvEqcgxhaBvzz1NS6jBUJJfD5to0EfhcSM2tXSExP2yYe68yQ
+54v5aHxwD6Mq0Do43zeX4lvegGHTgNiRg0JaTASJaBE8rF9ogEHMYELODVoqDA+b
+MMCm8Ibbq0nXl21Ii/kDwFJnmxL3wvIumGVC2daa49AZMQyth9VXAnow6IYm+48j
+ilSH5L887uvDdUhfHjlvgWJsxS3EF1QZtzeNnDeRyPYL1epjb4OsOMLzP96a++Ej
+YfDIJss2yKHzMI+ko6Kh3VOz3vCaMh+DkXkwwakfU5tTohVTP92dsxA7SH2JD/zt
+A/X7JWR1DhcZDY8AFmd5ekD8LVkH2ZD6mq093ICK5lw1omdMEWux+IBkAC1vImHF
+rEsm5VoQgpukg3s0956JkSCXjrdCx2bD0Omk1vUgjcTDlaxECp1bczwmPS9KvqfJ
+pxAe+59QafMCAwEAAaOB5jCB4zAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
+AwIBBjAdBgNVHQ4EFgQU0QnQ6dfOeXRU+Tows/RtLAMDG2gwgaAGA1UdIASBmDCB
+lTCBkgYEVR0gADCBiTArBggrBgEFBQcCARYfaHR0cDovL3d3dy5jZXJ0aWNhbWFy
+YS5jb20vZHBjLzBaBggrBgEFBQcCAjBOGkxMaW1pdGFjaW9uZXMgZGUgZ2FyYW50
+7WFzIGRlIGVzdGUgY2VydGlmaWNhZG8gc2UgcHVlZGVuIGVuY29udHJhciBlbiBs
+YSBEUEMuMA0GCSqGSIb3DQEBBQUAA4ICAQBclLW4RZFNjmEfAygPU3zmpFmps4p6
+xbD/CHwso3EcIRNnoZUSQDWDg4902zNc8El2CoFS3UnUmjIz75uny3XlesuXEpBc
+unvFm9+7OSPI/5jOCk0iAUgHforA1SBClETvv3eiiWdIG0ADBaGJ7M9i4z0ldma/
+Jre7Ir5v/zlXdLp6yQGVwZVR6Kss+LGGIOk/yzVb0hfpKv6DExdA7ohiZVvVO2Dp
+ezy4ydV/NgIlqmjCMRW3MGXrfx1IebHPOeJCgBbT9ZMj/EyXyVo3bHwi2ErN0o42
+gzmRkBDI8ck1fj+404HGIGQatlDCIaR43NAvO2STdPCWkPHv+wlaNECW8DYSwaN0
+jJN+Qd53i+yG2dIPPy3RzECiiWZIHiCznCNZc6lEc7wkeZBWN7PGKX6jD/EpOe9+
+XCgycDWs2rjIdWb8m0w5R44bb5tNAlQiM+9hup4phO9OSzNHdpdqy35f/RWmnkJD
+W2ZaiogN9xa5P1FlK2Zqi9E4UqLWRhH6/JocdJ6PlwsCT2TG9WjTSy3/pDceiz+/
+RL5hRqGEPQgnTIEgd4kI6mdAXmwIUV80WoyWaM3X94nCHNMyAK9Sy9NgWyo6R35r
+MDOhYil/SrnhLecUIw4OGEfhefwVVdCx/CVxY3UzHCMrr1zZ7Ud3YA47Dx7SwNxk
+BYn8eNZcLCZDqQ==
+-----END CERTIFICATE-----
+
TC TrustCenter Class 2 CA II
============================
-
-----BEGIN CERTIFICATE-----
MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL
MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV
@@ -1643,7 +2744,6 @@ dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ==
TC TrustCenter Class 3 CA II
============================
-
-----BEGIN CERTIFICATE-----
MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjEL
MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV
@@ -1674,7 +2774,6 @@ S+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc5A==
TC TrustCenter Universal CA I
=============================
-
-----BEGIN CERTIFICATE-----
MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL
MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV
@@ -1699,9 +2798,83 @@ ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/
2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY
-----END CERTIFICATE-----
+Deutsche Telekom Root CA 2
+==========================
+-----BEGIN CERTIFICATE-----
+MIIDnzCCAoegAwIBAgIBJjANBgkqhkiG9w0BAQUFADBxMQswCQYDVQQGEwJERTEc
+MBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxlU2Vj
+IFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290IENB
+IDIwHhcNOTkwNzA5MTIxMTAwWhcNMTkwNzA5MjM1OTAwWjBxMQswCQYDVQQGEwJE
+RTEcMBoGA1UEChMTRGV1dHNjaGUgVGVsZWtvbSBBRzEfMB0GA1UECxMWVC1UZWxl
+U2VjIFRydXN0IENlbnRlcjEjMCEGA1UEAxMaRGV1dHNjaGUgVGVsZWtvbSBSb290
+IENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCrC6M14IspFLEU
+ha88EOQ5bzVdSq7d6mGNlUn0b2SjGmBmpKlAIoTZ1KXleJMOaAGtuU1cOs7TuKhC
+QN/Po7qCWWqSG6wcmtoIKyUn+WkjR/Hg6yx6m/UTAtB+NHzCnjwAWav12gz1Mjwr
+rFDa1sPeg5TKqAyZMg4ISFZbavva4VhYAUlfckE8FQYBjl2tqriTtM2e66foai1S
+NNs671x1Udrb8zH57nGYMsRUFUQM+ZtV7a3fGAigo4aKSe5TBY8ZTNXeWHmb0moc
+QqvF1afPaA+W5OFhmHZhyJF81j4A4pFQh+GdCuatl9Idxjp9y7zaAzTVjlsB9WoH
+txa2bkp/AgMBAAGjQjBAMB0GA1UdDgQWBBQxw3kbuvVT1xfgiXotF2wKsyudMzAP
+BgNVHRMECDAGAQH/AgEFMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
+AQEAlGRZrTlk5ynrE/5aw4sTV8gEJPB0d8Bg42f76Ymmg7+Wgnxu1MM9756Abrsp
+tJh6sTtU6zkXR34ajgv8HzFZMQSyzhfzLMdiNlXiItiJVbSYSKpk+tYcNthEeFpa
+IzpXl/V6ME+un2pMSyuOoAPjPuCp1NJ70rOo4nI8rZ7/gFnkm0W09juwzTkZmDLl
+6iFhkOQxIY40sfcvNUqFENrnijchvllj4PKFiDFT1FQUhXB59C4Gdyd1Lx+4ivn+
+xbrYNuSD7Odlt79jWvNGr4GUN9RBjNYj1h7P9WgbRGOiWrqnNVmh5XAFmw4jV5mU
+Cm26OWMohpLzGITY+9HPBVZkVw==
+-----END CERTIFICATE-----
+
+ComSign CA
+==========
+-----BEGIN CERTIFICATE-----
+MIIDkzCCAnugAwIBAgIQFBOWgxRVjOp7Y+X8NId3RDANBgkqhkiG9w0BAQUFADA0
+MRMwEQYDVQQDEwpDb21TaWduIENBMRAwDgYDVQQKEwdDb21TaWduMQswCQYDVQQG
+EwJJTDAeFw0wNDAzMjQxMTMyMThaFw0yOTAzMTkxNTAyMThaMDQxEzARBgNVBAMT
+CkNvbVNpZ24gQ0ExEDAOBgNVBAoTB0NvbVNpZ24xCzAJBgNVBAYTAklMMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA8ORUaSvTx49qROR+WCf4C9DklBKK
+8Rs4OC8fMZwG1Cyn3gsqrhqg455qv588x26i+YtkbDqthVVRVKU4VbirgwTyP2Q2
+98CNQ0NqZtH3FyrV7zb6MBBC11PN+fozc0yz6YQgitZBJzXkOPqUm7h65HkfM/sb
+2CEJKHxNGGleZIp6GZPKfuzzcuc3B1hZKKxC+cX/zT/npfo4sdAMx9lSGlPWgcxC
+ejVb7Us6eva1jsz/D3zkYDaHL63woSV9/9JLEYhwVKZBqGdTUkJe5DSe5L6j7Kpi
+Xd3DTKaCQeQzC6zJMw9kglcq/QytNuEMrkvF7zuZ2SOzW120V+x0cAwqTwIDAQAB
+o4GgMIGdMAwGA1UdEwQFMAMBAf8wPQYDVR0fBDYwNDAyoDCgLoYsaHR0cDovL2Zl
+ZGlyLmNvbXNpZ24uY28uaWwvY3JsL0NvbVNpZ25DQS5jcmwwDgYDVR0PAQH/BAQD
+AgGGMB8GA1UdIwQYMBaAFEsBmz5WGmU2dst7l6qSBe4y5ygxMB0GA1UdDgQWBBRL
+AZs+VhplNnbLe5eqkgXuMucoMTANBgkqhkiG9w0BAQUFAAOCAQEA0Nmlfv4pYEWd
+foPPbrxHbvUanlR2QnG0PFg/LUAlQvaBnPGJEMgOqnhPOAlXsDzACPw1jvFIUY0M
+cXS6hMTXcpuEfDhOZAYnKuGntewImbQKDdSFc8gS4TXt8QUxHXOZDOuWyt3T5oWq
+8Ir7dcHyCTxlZWTzTNity4hp8+SDtwy9F1qWF8pb/627HOkthIDYIb6FUtnUdLlp
+hbpN7Sgy6/lhSuTENh4Z3G+EER+V9YMoGKgzkkMn3V0TBEVPh9VGzT2ouvDzuFYk
+Res3x+F2T3I5GN9+dHLHcy056mDmrRGiVod7w2ia/viMcKjfZTL0pECMocJEAw6U
+AGegcQCCSA==
+-----END CERTIFICATE-----
+
+ComSign Secured CA
+==================
+-----BEGIN CERTIFICATE-----
+MIIDqzCCApOgAwIBAgIRAMcoRwmzuGxFjB36JPU2TukwDQYJKoZIhvcNAQEFBQAw
+PDEbMBkGA1UEAxMSQ29tU2lnbiBTZWN1cmVkIENBMRAwDgYDVQQKEwdDb21TaWdu
+MQswCQYDVQQGEwJJTDAeFw0wNDAzMjQxMTM3MjBaFw0yOTAzMTYxNTA0NTZaMDwx
+GzAZBgNVBAMTEkNvbVNpZ24gU2VjdXJlZCBDQTEQMA4GA1UEChMHQ29tU2lnbjEL
+MAkGA1UEBhMCSUwwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGtWhf
+HZQVw6QIVS3joFd67+l0Kru5fFdJGhFeTymHDEjWaueP1H5XJLkGieQcPOqs49oh
+gHMhCu95mGwfCP+hUH3ymBvJVG8+pSjsIQQPRbsHPaHA+iqYHU4Gk/v1iDurX8sW
+v+bznkqH7Rnqwp9D5PGBpX8QTz7RSmKtUxvLg/8HZaWSLWapW7ha9B20IZFKF3ue
+Mv5WJDmyVIRD9YTC2LxBkMyd1mja6YJQqTtoz7VdApRgFrFD2UNd3V2Hbuq7s8lr
+9gOUCXDeFhF6K+h2j0kQmHe5Y1yLM5d19guMsqtb3nQgJT/j8xH5h2iGNXHDHYwt
+6+UarA9z1YJZQIDTAgMBAAGjgacwgaQwDAYDVR0TBAUwAwEB/zBEBgNVHR8EPTA7
+MDmgN6A1hjNodHRwOi8vZmVkaXIuY29tc2lnbi5jby5pbC9jcmwvQ29tU2lnblNl
+Y3VyZWRDQS5jcmwwDgYDVR0PAQH/BAQDAgGGMB8GA1UdIwQYMBaAFMFL7XC29z58
+ADsAj8c+DkWfHl3sMB0GA1UdDgQWBBTBS+1wtvc+fAA7AI/HPg5Fnx5d7DANBgkq
+hkiG9w0BAQUFAAOCAQEAFs/ukhNQq3sUnjO2QiBq1BW9Cav8cujvR3qQrFHBZE7p
+iL1DRYHjZiM/EoZNGeQFsOY3wo3aBijJD4mkU6l1P7CW+6tMM1X5eCZGbxs2mPtC
+dsGCuY7e+0X5YxtiOzkGynd6qDwJz2w2PQ8KRUtpFhpFfTMDZflScZAmlaxMDPWL
+kz/MdXSFmLr/YnpNH4n+rr2UAJm/EaXc4HnFFgt9AmEd6oX5AhVP51qJThRv4zdL
+hfXBPGHg/QVBspJ/wx2g0K5SZGBrGMYmnNj1ZOQ2GmKfig8+/21OGVZOIJFsnzQz
+OjRXUDpvgV4GxvU+fE6OK85lBi5d0ipTdF7Tbieejw==
+-----END CERTIFICATE-----
+
Cybertrust Global Root
======================
-
-----BEGIN CERTIFICATE-----
MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
@@ -1725,9 +2898,231 @@ A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
WL1WMRJOEcgh4LMRkWXbtKaIOM5V
-----END CERTIFICATE-----
+ePKI Root Certification Authority
+=================================
+-----BEGIN CERTIFICATE-----
+MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
+IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
+SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
+SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
+ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
+DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
+TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
+fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
+sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
+WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
+nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
+dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
+NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
+AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
+MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
+ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
+uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
+PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
+JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
+gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
+j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
+5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
+o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
+/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
+Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
+W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
+hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
+-----END CERTIFICATE-----
+
+T\xc3\x9c\x42\xC4\xB0TAK UEKAE K\xC3\xB6k Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1 - S\xC3\xBCr\xC3\xBCm 3
+=============================================================================================================================
+-----BEGIN CERTIFICATE-----
+MIIFFzCCA/+gAwIBAgIBETANBgkqhkiG9w0BAQUFADCCASsxCzAJBgNVBAYTAlRS
+MRgwFgYDVQQHDA9HZWJ6ZSAtIEtvY2FlbGkxRzBFBgNVBAoMPlTDvHJraXllIEJp
+bGltc2VsIHZlIFRla25vbG9qaWsgQXJhxZ90xLFybWEgS3VydW11IC0gVMOcQsSw
+VEFLMUgwRgYDVQQLDD9VbHVzYWwgRWxla3Ryb25payB2ZSBLcmlwdG9sb2ppIEFy
+YcWfdMSxcm1hIEVuc3RpdMO8c8O8IC0gVUVLQUUxIzAhBgNVBAsMGkthbXUgU2Vy
+dGlmaWthc3lvbiBNZXJrZXppMUowSAYDVQQDDEFUw5xCxLBUQUsgVUVLQUUgS8O2
+ayBTZXJ0aWZpa2EgSGl6bWV0IFNhxJ9sYXnEsWPEsXPEsSAtIFPDvHLDvG0gMzAe
+Fw0wNzA4MjQxMTM3MDdaFw0xNzA4MjExMTM3MDdaMIIBKzELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcMD0dlYnplIC0gS29jYWVsaTFHMEUGA1UECgw+VMO8cmtpeWUgQmls
+aW1zZWwgdmUgVGVrbm9sb2ppayBBcmHFn3TEsXJtYSBLdXJ1bXUgLSBUw5xCxLBU
+QUsxSDBGBgNVBAsMP1VsdXNhbCBFbGVrdHJvbmlrIHZlIEtyaXB0b2xvamkgQXJh
+xZ90xLFybWEgRW5zdGl0w7xzw7wgLSBVRUtBRTEjMCEGA1UECwwaS2FtdSBTZXJ0
+aWZpa2FzeW9uIE1lcmtlemkxSjBIBgNVBAMMQVTDnELEsFRBSyBVRUtBRSBLw7Zr
+IFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxIC0gU8O8csO8bSAzMIIB
+IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAim1L/xCIOsP2fpTo6iBkcK4h
+gb46ezzb8R1Sf1n68yJMlaCQvEhOEav7t7WNeoMojCZG2E6VQIdhn8WebYGHV2yK
+O7Rm6sxA/OOqbLLLAdsyv9Lrhc+hDVXDWzhXcLh1xnnRFDDtG1hba+818qEhTsXO
+fJlfbLm4IpNQp81McGq+agV/E5wrHur+R84EpW+sky58K5+eeROR6Oqeyjh1jmKw
+lZMq5d/pXpduIF9fhHpEORlAHLpVK/swsoHvhOPc7Jg4OQOFCKlUAwUp8MmPi+oL
+hmUZEdPpCSPeaJMDyTYcIW7OjGbxmTDY17PDHfiBLqi9ggtm/oLL4eAagsNAgQID
+AQABo0IwQDAdBgNVHQ4EFgQUvYiHyY/2pAoLquvF/pEjnatKijIwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAB18+kmP
+NOm3JpIWmgV050vQbTlswyb2zrgxvMTfvCr4N5EY3ATIZJkrGG2AA1nJrvhY0D7t
+wyOfaTyGOBye79oneNGEN3GKPEs5z35FBtYt2IpNeBLWrcLTy9LQQfMmNkqblWwM
+7uXRQydmwYj3erMgbOqwaSvHIOgMA8RBBZniP+Rr+KCGgceExh/VS4ESshYhLBOh
+gLJeDEoTniDYYkCrkOpkSi+sDQESeUWoL4cZaMjihccwsnX5OD+ywJO0a+IDRM5n
+oN+J1q2MdqMTw5RhK2vZbMEHCiIHhWyFJEapvj+LeISCfiQMnf2BN+MlqO02TpUs
+yZyQ2uypQjyttgI=
+-----END CERTIFICATE-----
+
+Buypass Class 2 CA 1
+====================
+-----BEGIN CERTIFICATE-----
+MIIDUzCCAjugAwIBAgIBATANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3Mg
+Q2xhc3MgMiBDQSAxMB4XDTA2MTAxMzEwMjUwOVoXDTE2MTAxMzEwMjUwOVowSzEL
+MAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MR0wGwYD
+VQQDDBRCdXlwYXNzIENsYXNzIDIgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAIs8B0XY9t/mx8q6jUPFR42wWsE425KEHK8T1A9vNkYgxC7McXA0
+ojTTNy7Y3Tp3L8DrKehc0rWpkTSHIln+zNvnma+WwajHQN2lFYxuyHyXA8vmIPLX
+l18xoS830r7uvqmtqEyeIWZDO6i88wmjONVZJMHCR3axiFyCO7srpgTXjAePzdVB
+HfCuuCkslFJgNJQ72uA40Z0zPhX0kzLFANq1KWYOOngPIVJfAuWSeyXTkh4vFZ2B
+5J2O6O+JzhRMVB0cgRJNcKi+EAUXfh/RuFdV7c27UsKwHnjCTTZoy1YmwVLBvXb3
+WNVyfh9EdrsAiR0WnVE1703CVu9r4Iw7DekCAwEAAaNCMEAwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUP42aWYv8e3uco684sDntkHGA1sgwDgYDVR0PAQH/BAQD
+AgEGMA0GCSqGSIb3DQEBBQUAA4IBAQAVGn4TirnoB6NLJzKyQJHyIdFkhb5jatLP
+gcIV1Xp+DCmsNx4cfHZSldq1fyOhKXdlyTKdqC5Wq2B2zha0jX94wNWZUYN/Xtm+
+DKhQ7SLHrQVMdvvt7h5HZPb3J31cKA9FxVxiXqaakZG3Uxcu3K1gnZZkOb1naLKu
+BctN518fV4bVIJwo+28TOPX2EZL2fZleHwzoq0QkKXJAPTZSr4xYkHPB7GEseaHs
+h7U/2k3ZIQAw3pDaDtMaSKk+hQsUi4y8QZ5q9w5wwDX3OaJdZtB7WZ+oRxKaJyOk
+LY4ng5IgodcVf/EuGO70SH8vf/GhGLWhC5SgYiAynB321O+/TIho
+-----END CERTIFICATE-----
+
+Buypass Class 3 CA 1
+====================
+-----BEGIN CERTIFICATE-----
+MIIDUzCCAjugAwIBAgIBAjANBgkqhkiG9w0BAQUFADBLMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxHTAbBgNVBAMMFEJ1eXBhc3Mg
+Q2xhc3MgMyBDQSAxMB4XDTA1MDUwOTE0MTMwM1oXDTE1MDUwOTE0MTMwM1owSzEL
+MAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MR0wGwYD
+VQQDDBRCdXlwYXNzIENsYXNzIDMgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAKSO13TZKWTeXx+HgJHqTjnmGcZEC4DVC69TB4sSveZn8AKxifZg
+isRbsELRwCGoy+Gb72RRtqfPFfV0gGgEkKBYouZ0plNTVUhjP5JW3SROjvi6K//z
+NIqeKNc0n6wv1g/xpC+9UrJJhW05NfBEMJNGJPO251P7vGGvqaMU+8IXF4Rs4HyI
++MkcVyzwPX6UvCWThOiaAJpFBUJXgPROztmuOfbIUxAMZTpHe2DC1vqRycZxbL2R
+hzyRhkmr8w+gbCZ2Xhysm3HljbybIR6c1jh+JIAVMYKWsUnTYjdbiAwKYjT+p0h+
+mbEwi5A3lRyoH6UsjfRVyNvdWQrCrXig9IsCAwEAAaNCMEAwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUOBTmyPCppAP0Tj4io1vy1uCtQHQwDgYDVR0PAQH/BAQD
+AgEGMA0GCSqGSIb3DQEBBQUAA4IBAQABZ6OMySU9E2NdFm/soT4JXJEVKirZgCFP
+Bdy7pYmrEzMqnji3jG8CcmPHc3ceCQa6Oyh7pEfJYWsICCD8igWKH7y6xsL+z27s
+EzNxZy5p+qksP2bAEllNC1QCkoS72xLvg3BweMhT+t/Gxv/ciC8HwEmdMldg0/L2
+mSlf56oBzKwzqBwKu5HEA6BvtjT5htOzdlSY9EqBs1OdTUDs5XcTRa9bqh/YL0yC
+e/4qxFi7T/ye/QNlGioOw6UgFpRreaaiErS7GqQjel/wroQk5PMr+4okoyeYZdow
+dXb8GZHo2+ubPzK/QJcHJrrM85SFSnonk8+QQtS4Wxam58tAA915
+-----END CERTIFICATE-----
+
+EBG Elektronik Sertifika Hizmet Sa\xC4\x9Flay\xc4\xb1\x63\xc4\xb1s\xc4\xb1
+==========================================================================
+-----BEGIN CERTIFICATE-----
+MIIF5zCCA8+gAwIBAgIITK9zQhyOdAIwDQYJKoZIhvcNAQEFBQAwgYAxODA2BgNV
+BAMML0VCRyBFbGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sx
+c8SxMTcwNQYDVQQKDC5FQkcgQmlsacWfaW0gVGVrbm9sb2ppbGVyaSB2ZSBIaXpt
+ZXRsZXJpIEEuxZ4uMQswCQYDVQQGEwJUUjAeFw0wNjA4MTcwMDIxMDlaFw0xNjA4
+MTQwMDMxMDlaMIGAMTgwNgYDVQQDDC9FQkcgRWxla3Ryb25payBTZXJ0aWZpa2Eg
+SGl6bWV0IFNhxJ9sYXnEsWPEsXPEsTE3MDUGA1UECgwuRUJHIEJpbGnFn2ltIFRl
+a25vbG9qaWxlcmkgdmUgSGl6bWV0bGVyaSBBLsWeLjELMAkGA1UEBhMCVFIwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDuoIRh0DpqZhAy2DE4f6en5f2h
+4fuXd7hxlugTlkaDT7byX3JWbhNgpQGR4lvFzVcfd2NR/y8927k/qqk153nQ9dAk
+tiHq6yOU/im/+4mRDGSaBUorzAzu8T2bgmmkTPiab+ci2hC6X5L8GCcKqKpE+i4s
+tPtGmggDg3KriORqcsnlZR9uKg+ds+g75AxuetpX/dfreYteIAbTdgtsApWjluTL
+dlHRKJ2hGvxEok3MenaoDT2/F08iiFD9rrbskFBKW5+VQarKD7JK/oCZTqNGFav4
+c0JqwmZ2sQomFd2TkuzbqV9UIlKRcF0T6kjsbgNs2d1s/OsNA/+mgxKb8amTD8Um
+TDGyY5lhcucqZJnSuOl14nypqZoaqsNW2xCaPINStnuWt6yHd6i58mcLlEOzrz5z
++kI2sSXFCjEmN1ZnuqMLfdb3ic1nobc6HmZP9qBVFCVMLDMNpkGMvQQxahByCp0O
+Lna9XvNRiYuoP1Vzv9s6xiQFlpJIqkuNKgPlV5EQ9GooFW5Hd4RcUXSfGenmHmMW
+OeMRFeNYGkS9y8RsZteEBt8w9DeiQyJ50hBs37vmExH8nYQKE3vwO9D8owrXieqW
+fo1IhR5kX9tUoqzVegJ5a9KK8GfaZXINFHDk6Y54jzJ0fFfy1tb0Nokb+Clsi7n2
+l9GkLqq+CxnCRelwXQIDAJ3Zo2MwYTAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB
+/wQEAwIBBjAdBgNVHQ4EFgQU587GT/wWZ5b6SqMHwQSny2re2kcwHwYDVR0jBBgw
+FoAU587GT/wWZ5b6SqMHwQSny2re2kcwDQYJKoZIhvcNAQEFBQADggIBAJuYml2+
+8ygjdsZs93/mQJ7ANtyVDR2tFcU22NU57/IeIl6zgrRdu0waypIN30ckHrMk2pGI
+6YNw3ZPX6bqz3xZaPt7gyPvT/Wwp+BVGoGgmzJNSroIBk5DKd8pNSe/iWtkqvTDO
+TLKBtjDOWU/aWR1qeqRFsIImgYZ29fUQALjuswnoT4cCB64kXPBfrAowzIpAoHME
+wfuJJPaaHFy3PApnNgUIMbOv2AFoKuB4j3TeuFGkjGwgPaL7s9QJ/XvCgKqTbCmY
+Iai7FvOpEl90tYeY8pUm3zTvilORiF0alKM/fCL414i6poyWqD1SNGKfAB5UVUJn
+xk1Gj7sURT0KlhaOEKGXmdXTMIXM3rRyt7yKPBgpaP3ccQfuJDlq+u2lrDgv+R4Q
+DgZxGhBM/nV+/x5XOULK1+EVoVZVWRvRo68R2E7DpSvvkL/A7IITW43WciyTTo9q
+Kd+FPNMN4KIYEsxVL0e3p5sC/kH2iExt2qkBR4NkJ2IQgtYSe14DHzSpyZH+r11t
+hie3I6p1GMog57AP14kOpmciY/SDQSsGS7tY1dHXt7kQY9iJSrSq3RZj9W6+YKH4
+7ejWkE8axsWgKdOnIaj1Wjz3x0miIZpKlVIglnKaZsv30oZDfCK+lvm9AahH3eU7
+QPl1K5srRmSGjR70j/sHd9DqSaIcjVIUpgqT
+-----END CERTIFICATE-----
+
+certSIGN ROOT CA
+================
+-----BEGIN CERTIFICATE-----
+MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
+AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
+QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
+MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
+0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
+UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
+RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
+OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
+JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
+AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
+BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
+LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
+MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
+44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
+Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
+i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
+9u6wWk5JRFRYX0KD
+-----END CERTIFICATE-----
+
+CNNIC ROOT
+==========
+-----BEGIN CERTIFICATE-----
+MIIDVTCCAj2gAwIBAgIESTMAATANBgkqhkiG9w0BAQUFADAyMQswCQYDVQQGEwJD
+TjEOMAwGA1UEChMFQ05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwHhcNMDcwNDE2
+MDcwOTE0WhcNMjcwNDE2MDcwOTE0WjAyMQswCQYDVQQGEwJDTjEOMAwGA1UEChMF
+Q05OSUMxEzARBgNVBAMTCkNOTklDIFJPT1QwggEiMA0GCSqGSIb3DQEBAQUAA4IB
+DwAwggEKAoIBAQDTNfc/c3et6FtzF8LRb+1VvG7q6KR5smzDo+/hn7E7SIX1mlwh
+IhAsxYLO2uOabjfhhyzcuQxauohV3/2q2x8x6gHx3zkBwRP9SFIhxFXf2tizVHa6
+dLG3fdfA6PZZxU3Iva0fFNrfWEQlMhkqx35+jq44sDB7R3IJMfAw28Mbdim7aXZO
+V/kbZKKTVrdvmW7bCgScEeOAH8tjlBAKqeFkgjH5jCftppkA9nCTGPihNIaj3XrC
+GHn2emU1z5DrvTOTn1OrczvmmzQgLx3vqR1jGqCA2wMv+SYahtKNu6m+UjqHZ0gN
+v7Sg2Ca+I19zN38m5pIEo3/PIKe38zrKy5nLAgMBAAGjczBxMBEGCWCGSAGG+EIB
+AQQEAwIABzAfBgNVHSMEGDAWgBRl8jGtKvf33VKWCscCwQ7vptU7ETAPBgNVHRMB
+Af8EBTADAQH/MAsGA1UdDwQEAwIB/jAdBgNVHQ4EFgQUZfIxrSr3991SlgrHAsEO
+76bVOxEwDQYJKoZIhvcNAQEFBQADggEBAEs17szkrr/Dbq2flTtLP1se31cpolnK
+OOK5Gv+e5m4y3R6u6jW39ZORTtpC4cMXYFDy0VwmuYK36m3knITnA3kXr5g9lNvH
+ugDnuL8BV8F3RTIMO/G0HAiw/VGgod2aHRM2mm23xzy54cXZF/qD1T0VoDy7Hgvi
+yJA/qIYM/PmLXoXLT1tLYhFHxUV8BS9BsZ4QaRuZluBVeftOhpm4lNqGOGqTo+fL
+buXf6iFViZx9fX+Y9QCJ7uOEwFyWtcVG6kbghVW2G8kS1sHNzYDzAgE8yGnLRUhj
+2JTQ7IUOO04RZfSCjKY9ri4ilAnIXOo8gV0WKgOXFlUJ24pBgp5mmxE=
+-----END CERTIFICATE-----
+
+ApplicationCA - Japanese Government
+===================================
+-----BEGIN CERTIFICATE-----
+MIIDoDCCAoigAwIBAgIBMTANBgkqhkiG9w0BAQUFADBDMQswCQYDVQQGEwJKUDEc
+MBoGA1UEChMTSmFwYW5lc2UgR292ZXJubWVudDEWMBQGA1UECxMNQXBwbGljYXRp
+b25DQTAeFw0wNzEyMTIxNTAwMDBaFw0xNzEyMTIxNTAwMDBaMEMxCzAJBgNVBAYT
+AkpQMRwwGgYDVQQKExNKYXBhbmVzZSBHb3Zlcm5tZW50MRYwFAYDVQQLEw1BcHBs
+aWNhdGlvbkNBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp23gdE6H
+j6UG3mii24aZS2QNcfAKBZuOquHMLtJqO8F6tJdhjYq+xpqcBrSGUeQ3DnR4fl+K
+f5Sk10cI/VBaVuRorChzoHvpfxiSQE8tnfWuREhzNgaeZCw7NCPbXCbkcXmP1G55
+IrmTwcrNwVbtiGrXoDkhBFcsovW8R0FPXjQilbUfKW1eSvNNcr5BViCH/OlQR9cw
+FO5cjFW6WY2H/CPek9AEjP3vbb3QesmlOmpyM8ZKDQUXKi17safY1vC+9D/qDiht
+QWEjdnjDuGWk81quzMKq2edY3rZ+nYVunyoKb58DKTCXKB28t89UKU5RMfkntigm
+/qJj5kEW8DOYRwIDAQABo4GeMIGbMB0GA1UdDgQWBBRUWssmP3HMlEYNllPqa0jQ
+k/5CdTAOBgNVHQ8BAf8EBAMCAQYwWQYDVR0RBFIwUKROMEwxCzAJBgNVBAYTAkpQ
+MRgwFgYDVQQKDA/ml6XmnKzlm73mlL/lupwxIzAhBgNVBAsMGuOCouODl+ODquOC
+seODvOOCt+ODp+ODs0NBMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADlqRHZ3ODrso2dGD/mLBqj7apAxzn7s2tGJfHrrLgy9mTLnsCTWw//1sogJ
+hyzjVOGjprIIC8CFqMjSnHH2HZ9g/DgzE+Ge3Atf2hZQKXsvcJEPmbo0NI2VdMV+
+eKlmXb3KIXdCEKxmJj3ekav9FfBv7WxfEPjzFvYDio+nEhEMy/0/ecGc/WLuo89U
+DNErXxc+4z6/wCs+CZv+iKZ+tJIX/COUgb1up8WMwusRRdv4QcmWdupwX3kSa+Sj
+B1oF7ydJzyGfikwJcGapJsErEU4z0g781mzSDjJkaP+tBXhfAx2o45CsJOAPQKdL
+rosot4LKGAfmt1t06SAZf7IbiVQ=
+-----END CERTIFICATE-----
+
GeoTrust Primary Certification Authority - G3
=============================================
-
-----BEGIN CERTIFICATE-----
MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
@@ -1755,7 +3150,6 @@ spki4cErx5z481+oghLrGREt
thawte Primary Root CA - G2
===========================
-
-----BEGIN CERTIFICATE-----
MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
@@ -1775,7 +3169,6 @@ XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
thawte Primary Root CA - G3
===========================
-
-----BEGIN CERTIFICATE-----
MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
@@ -1804,7 +3197,6 @@ MdRAGmI0Nj81Aa6sY6A=
GeoTrust Primary Certification Authority - G2
=============================================
-
-----BEGIN CERTIFICATE-----
MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
@@ -1825,7 +3217,6 @@ rD6ogRLQy7rQkgu2npaqBA+K
VeriSign Universal Root Certification Authority
===============================================
-
-----BEGIN CERTIFICATE-----
MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
@@ -1857,7 +3248,6 @@ lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
VeriSign Class 3 Public Primary Certification Authority - G4
============================================================
-
-----BEGIN CERTIFICATE-----
MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
@@ -1880,9 +3270,299 @@ kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
-----END CERTIFICATE-----
-GlobalSign Root CA - R3
+NetLock Arany (Class Gold) Főtanúsítvány
+============================================
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
+EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
+MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
+cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
+dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
+pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
+b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
+aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
+IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
+lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
+AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
+VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
+ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
+BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
+AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
+U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
+bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
+bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
+uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
+XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
+-----END CERTIFICATE-----
+
+Staat der Nederlanden Root CA - G2
+==================================
+-----BEGIN CERTIFICATE-----
+MIIFyjCCA7KgAwIBAgIEAJiWjDANBgkqhkiG9w0BAQsFADBaMQswCQYDVQQGEwJO
+TDEeMBwGA1UECgwVU3RhYXQgZGVyIE5lZGVybGFuZGVuMSswKQYDVQQDDCJTdGFh
+dCBkZXIgTmVkZXJsYW5kZW4gUm9vdCBDQSAtIEcyMB4XDTA4MDMyNjExMTgxN1oX
+DTIwMDMyNTExMDMxMFowWjELMAkGA1UEBhMCTkwxHjAcBgNVBAoMFVN0YWF0IGRl
+ciBOZWRlcmxhbmRlbjErMCkGA1UEAwwiU3RhYXQgZGVyIE5lZGVybGFuZGVuIFJv
+b3QgQ0EgLSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMVZ5291
+qj5LnLW4rJ4L5PnZyqtdj7U5EILXr1HgO+EASGrP2uEGQxGZqhQlEq0i6ABtQ8Sp
+uOUfiUtnvWFI7/3S4GCI5bkYYCjDdyutsDeqN95kWSpGV+RLufg3fNU254DBtvPU
+Z5uW6M7XxgpT0GtJlvOjCwV3SPcl5XCsMBQgJeN/dVrlSPhOewMHBPqCYYdu8DvE
+pMfQ9XQ+pV0aCPKbJdL2rAQmPlU6Yiile7Iwr/g3wtG61jj99O9JMDeZJiFIhQGp
+5Rbn3JBV3w/oOM2ZNyFPXfUib2rFEhZgF1XyZWampzCROME4HYYEhLoaJXhena/M
+UGDWE4dS7WMfbWV9whUYdMrhfmQpjHLYFhN9C0lK8SgbIHRrxT3dsKpICT0ugpTN
+GmXZK4iambwYfp/ufWZ8Pr2UuIHOzZgweMFvZ9C+X+Bo7d7iscksWXiSqt8rYGPy
+5V6548r6f1CGPqI0GAwJaCgRHOThuVw+R7oyPxjMW4T182t0xHJ04eOLoEq9jWYv
+6q012iDTiIJh8BIitrzQ1aTsr1SIJSQ8p22xcik/Plemf1WvbibG/ufMQFxRRIEK
+eN5KzlW/HdXZt1bv8Hb/C3m1r737qWmRRpdogBQ2HbN/uymYNqUg+oJgYjOk7Na6
+B6duxc8UpufWkjTYgfX8HV2qXB72o007uPc5AgMBAAGjgZcwgZQwDwYDVR0TAQH/
+BAUwAwEB/zBSBgNVHSAESzBJMEcGBFUdIAAwPzA9BggrBgEFBQcCARYxaHR0cDov
+L3d3dy5wa2lvdmVyaGVpZC5ubC9wb2xpY2llcy9yb290LXBvbGljeS1HMjAOBgNV
+HQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJFoMocVHYnitfGsNig0jQt8YojrMA0GCSqG
+SIb3DQEBCwUAA4ICAQCoQUpnKpKBglBu4dfYszk78wIVCVBR7y29JHuIhjv5tLyS
+CZa59sCrI2AGeYwRTlHSeYAz+51IvuxBQ4EffkdAHOV6CMqqi3WtFMTC6GY8ggen
+5ieCWxjmD27ZUD6KQhgpxrRW/FYQoAUXvQwjf/ST7ZwaUb7dRUG/kSS0H4zpX897
+IZmflZ85OkYcbPnNe5yQzSipx6lVu6xiNGI1E0sUOlWDuYaNkqbG9AclVMwWVxJK
+gnjIFNkXgiYtXSAfea7+1HAWFpWD2DU5/1JddRwWxRNVz0fMdWVSSt7wsKfkCpYL
++63C4iWEst3kvX5ZbJvw8NjnyvLplzh+ib7M+zkXYT9y2zqR2GUBGR2tUKRXCnxL
+vJxxcypFURmFzI79R6d0lR2o0a9OF7FpJsKqeFdbxU2n5Z4FF5TKsl+gSRiNNOkm
+bEgeqmiSBeGCc1qb3AdbCG19ndeNIdn8FCCqwkXfP+cAslHkwvgFuXkajDTznlvk
+N1trSt8sV4pAWja63XVECDdCcAz+3F4hoKOKwJCcaNpQ5kUQR3i2TtJlycM33+FC
+Y7BXN0Ute4qcvwXqZVUz9zkQxSgqIXobisQk+T8VyJoVIPVVYpbtbZNQvOSqeK3Z
+ywplh6ZmwcSBo3c6WB4L7oOLnR7SUqTMHW+wmG2UMbX4cQrcufx9MmDm66+KAQ==
+-----END CERTIFICATE-----
+
+CA Disig
+========
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBATANBgkqhkiG9w0BAQUFADBKMQswCQYDVQQGEwJTSzET
+MBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcgYS5zLjERMA8GA1UE
+AxMIQ0EgRGlzaWcwHhcNMDYwMzIyMDEzOTM0WhcNMTYwMzIyMDEzOTM0WjBKMQsw
+CQYDVQQGEwJTSzETMBEGA1UEBxMKQnJhdGlzbGF2YTETMBEGA1UEChMKRGlzaWcg
+YS5zLjERMA8GA1UEAxMIQ0EgRGlzaWcwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
+ggEKAoIBAQCS9jHBfYj9mQGp2HvycXXxMcbzdWb6UShGhJd4NLxs/LxFWYgmGErE
+Nx+hSkS943EE9UQX4j/8SFhvXJ56CbpRNyIjZkMhsDxkovhqFQ4/61HhVKndBpnX
+mjxUizkDPw/Fzsbrg3ICqB9x8y34dQjbYkzo+s7552oftms1grrijxaSfQUMbEYD
+XcDtab86wYqg6I7ZuUUohwjstMoVvoLdtUSLLa2GDGhibYVW8qwUYzrG0ZmsNHhW
+S8+2rT+MitcE5eN4TPWGqvWP+j1scaMtymfraHtuM6kMgiioTGohQBUgDCZbg8Kp
+FhXAJIJdKxatymP2dACw30PEEGBWZ2NFAgMBAAGjgf8wgfwwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUjbJJaJ1yCCW5wCf1UJNWSEZx+Y8wDgYDVR0PAQH/BAQD
+AgEGMDYGA1UdEQQvMC2BE2Nhb3BlcmF0b3JAZGlzaWcuc2uGFmh0dHA6Ly93d3cu
+ZGlzaWcuc2svY2EwZgYDVR0fBF8wXTAtoCugKYYnaHR0cDovL3d3dy5kaXNpZy5z
+ay9jYS9jcmwvY2FfZGlzaWcuY3JsMCygKqAohiZodHRwOi8vY2EuZGlzaWcuc2sv
+Y2EvY3JsL2NhX2Rpc2lnLmNybDAaBgNVHSAEEzARMA8GDSuBHpGT5goAAAABAQEw
+DQYJKoZIhvcNAQEFBQADggEBAF00dGFMrzvY/59tWDYcPQuBDRIrRhCA/ec8J9B6
+yKm2fnQwM6M6int0wHl5QpNt/7EpFIKrIYwvF/k/Ji/1WcbvgAa3mkkp7M5+cTxq
+EEHA9tOasnxakZzArFvITV734VP/Q3f8nktnbNfzg9Gg4H8l37iYC5oyOGwwoPP/
+CBUz91BKez6jPiCp3C9WgArtQVCwyfTssuMmRAAOb54GvCKWU3BlxFAKRmukLyeB
+EicTXxChds6KezfqwzlhA5WYOudsiCUI/HloDYd9Yvi0X/vF2Ey9WLw/Q1vUHgFN
+PGO+I++MzVpQuGhU+QqZMxEA4Z7CRneC9VkGjCFMhwnN5ag=
+-----END CERTIFICATE-----
+
+Juur-SK
+=======
+-----BEGIN CERTIFICATE-----
+MIIE5jCCA86gAwIBAgIEO45L/DANBgkqhkiG9w0BAQUFADBdMRgwFgYJKoZIhvcN
+AQkBFglwa2lAc2suZWUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKExlBUyBTZXJ0aWZp
+dHNlZXJpbWlza2Vza3VzMRAwDgYDVQQDEwdKdXVyLVNLMB4XDTAxMDgzMDE0MjMw
+MVoXDTE2MDgyNjE0MjMwMVowXTEYMBYGCSqGSIb3DQEJARYJcGtpQHNrLmVlMQsw
+CQYDVQQGEwJFRTEiMCAGA1UEChMZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1czEQ
+MA4GA1UEAxMHSnV1ci1TSzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
+AIFxNj4zB9bjMI0TfncyRsvPGbJgMUaXhvSYRqTCZUXP00B841oiqBB4M8yIsdOB
+SvZiF3tfTQou0M+LI+5PAk676w7KvRhj6IAcjeEcjT3g/1tf6mTll+g/mX8MCgkz
+ABpTpyHhOEvWgxutr2TC+Rx6jGZITWYfGAriPrsfB2WThbkasLnE+w0R9vXW+RvH
+LCu3GFH+4Hv2qEivbDtPL+/40UceJlfwUR0zlv/vWT3aTdEVNMfqPxZIe5EcgEMP
+PbgFPtGzlc3Yyg/CQ2fbt5PgIoIuvvVoKIO5wTtpeyDaTpxt4brNj3pssAki14sL
+2xzVWiZbDcDq5WDQn/413z8CAwEAAaOCAawwggGoMA8GA1UdEwEB/wQFMAMBAf8w
+ggEWBgNVHSAEggENMIIBCTCCAQUGCisGAQQBzh8BAQEwgfYwgdAGCCsGAQUFBwIC
+MIHDHoHAAFMAZQBlACAAcwBlAHIAdABpAGYAaQBrAGEAYQB0ACAAbwBuACAAdgDk
+AGwAagBhAHMAdABhAHQAdQBkACAAQQBTAC0AaQBzACAAUwBlAHIAdABpAGYAaQB0
+AHMAZQBlAHIAaQBtAGkAcwBrAGUAcwBrAHUAcwAgAGEAbABhAG0ALQBTAEsAIABz
+AGUAcgB0AGkAZgBpAGsAYQBhAHQAaQBkAGUAIABrAGkAbgBuAGkAdABhAG0AaQBz
+AGUAawBzMCEGCCsGAQUFBwIBFhVodHRwOi8vd3d3LnNrLmVlL2Nwcy8wKwYDVR0f
+BCQwIjAgoB6gHIYaaHR0cDovL3d3dy5zay5lZS9qdXVyL2NybC8wHQYDVR0OBBYE
+FASqekej5ImvGs8KQKcYP2/v6X2+MB8GA1UdIwQYMBaAFASqekej5ImvGs8KQKcY
+P2/v6X2+MA4GA1UdDwEB/wQEAwIB5jANBgkqhkiG9w0BAQUFAAOCAQEAe8EYlFOi
+CfP+JmeaUOTDBS8rNXiRTHyoERF5TElZrMj3hWVcRrs7EKACr81Ptcw2Kuxd/u+g
+kcm2k298gFTsxwhwDY77guwqYHhpNjbRxZyLabVAyJRld/JXIWY7zoVAtjNjGr95
+HvxcHdMdkxuLDF2FvZkwMhgJkVLpfKG6/2SSmuz+Ne6ML678IIbsSt4beDI3poHS
+na9aEhbKmVv8b20OxaAehsmR0FyYgl9jDIpaq9iVpszLita/ZEuOyoqysOkhMp6q
+qIWYNIE5ITuoOlIyPfZrN4YGWhWY3PARZv40ILcD9EEQfTmEeZZyY7aWAuVrua0Z
+TbvGRNs2yyqcjg==
+-----END CERTIFICATE-----
+
+Hongkong Post Root CA 1
=======================
+-----BEGIN CERTIFICATE-----
+MIIDMDCCAhigAwIBAgICA+gwDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UEBhMCSEsx
+FjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdrb25nIFBvc3Qg
+Um9vdCBDQSAxMB4XDTAzMDUxNTA1MTMxNFoXDTIzMDUxNTA0NTIyOVowRzELMAkG
+A1UEBhMCSEsxFjAUBgNVBAoTDUhvbmdrb25nIFBvc3QxIDAeBgNVBAMTF0hvbmdr
+b25nIFBvc3QgUm9vdCBDQSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEArP84tulmAknjorThkPlAj3n54r15/gK97iSSHSL22oVyaf7XPwnU3ZG1ApzQ
+jVrhVcNQhrkpJsLj2aDxaQMoIIBFIi1WpztUlVYiWR8o3x8gPW2iNr4joLFutbEn
+PzlTCeqrauh0ssJlXI6/fMN4hM2eFvz1Lk8gKgifd/PFHsSaUmYeSF7jEAaPIpjh
+ZY4bXSNmO7ilMlHIhqqhqZ5/dpTCpmy3QfDVyAY45tQM4vM7TG1QjMSDJ8EThFk9
+nnV0ttgCXjqQesBCNnLsak3c78QA3xMYV18meMjWCnl3v/evt3a5pQuEF10Q6m/h
+q5URX208o1xNg1vysxmKgIsLhwIDAQABoyYwJDASBgNVHRMBAf8ECDAGAQH/AgED
+MA4GA1UdDwEB/wQEAwIBxjANBgkqhkiG9w0BAQUFAAOCAQEADkbVPK7ih9legYsC
+mEEIjEy82tvuJxuC52pF7BaLT4Wg87JwvVqWuspube5Gi27nKi6Wsxkz67SfqLI3
+7piol7Yutmcn1KZJ/RyTZXaeQi/cImyaT/JaFTmxcdcrUehtHJjA2Sr0oYJ71clB
+oiMBdDhViw+5LmeiIAQ32pwL0xch4I+XeTRvhEgCIDMb5jREn5Fw9IBehEPCKdJs
+EhTkYY2sEJCehFC78JZvRZ+K88psT/oROhUVRsPNH4NbLUES7VBnQRM9IauUiqpO
+fMGx+6fWtScvl6tu4B3i0RwsH0Ti/L6RoZz71ilTc4afU9hDDl3WY4JxHYB0yvbi
+AmvZWg==
+-----END CERTIFICATE-----
+
+SecureSign RootCA11
+===================
+-----BEGIN CERTIFICATE-----
+MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
+MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
+A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
+MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
+Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
+QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
+i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
+h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
+MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
+UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
+8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
+h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
+VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
+KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
+X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
+QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
+pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
+QSdJQO7e5iNEOdyhIta6A/I=
+-----END CERTIFICATE-----
+ACEDICOM Root
+=============
+-----BEGIN CERTIFICATE-----
+MIIFtTCCA52gAwIBAgIIYY3HhjsBggUwDQYJKoZIhvcNAQEFBQAwRDEWMBQGA1UE
+AwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZFRElDT00x
+CzAJBgNVBAYTAkVTMB4XDTA4MDQxODE2MjQyMloXDTI4MDQxMzE2MjQyMlowRDEW
+MBQGA1UEAwwNQUNFRElDT00gUm9vdDEMMAoGA1UECwwDUEtJMQ8wDQYDVQQKDAZF
+RElDT00xCzAJBgNVBAYTAkVTMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKC
+AgEA/5KV4WgGdrQsyFhIyv2AVClVYyT/kGWbEHV7w2rbYgIB8hiGtXxaOLHkWLn7
+09gtn70yN78sFW2+tfQh0hOR2QetAQXW8713zl9CgQr5auODAKgrLlUTY4HKRxx7
+XBZXehuDYAQ6PmXDzQHe3qTWDLqO3tkE7hdWIpuPY/1NFgu3e3eM+SW10W2ZEi5P
+Grjm6gSSrj0RuVFCPYewMYWveVqc/udOXpJPQ/yrOq2lEiZmueIM15jO1FillUAK
+t0SdE3QrwqXrIhWYENiLxQSfHY9g5QYbm8+5eaA9oiM/Qj9r+hwDezCNzmzAv+Yb
+X79nuIQZ1RXve8uQNjFiybwCq0Zfm/4aaJQ0PZCOrfbkHQl/Sog4P75n/TSW9R28
+MHTLOO7VbKvU/PQAtwBbhTIWdjPp2KOZnQUAqhbm84F9b32qhm2tFXTTxKJxqvQU
+fecyuB+81fFOvW8XAjnXDpVCOscAPukmYxHqC9FK/xidstd7LzrZlvvoHpKuE1XI
+2Sf23EgbsCTBheN3nZqk8wwRHQ3ItBTutYJXCb8gWH8vIiPYcMt5bMlL8qkqyPyH
+K9caUPgn6C9D4zq92Fdx/c6mUlv53U3t5fZvie27k5x2IXXwkkwp9y+cAS7+UEae
+ZAwUswdbxcJzbPEHXEUkFDWug/FqTYl6+rPYLWbwNof1K1MCAwEAAaOBqjCBpzAP
+BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKaz4SsrSbbXc6GqlPUB53NlTKxQ
+MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUprPhKytJttdzoaqU9QHnc2VMrFAw
+RAYDVR0gBD0wOzA5BgRVHSAAMDEwLwYIKwYBBQUHAgEWI2h0dHA6Ly9hY2VkaWNv
+bS5lZGljb21ncm91cC5jb20vZG9jMA0GCSqGSIb3DQEBBQUAA4ICAQDOLAtSUWIm
+fQwng4/F9tqgaHtPkl7qpHMyEVNEskTLnewPeUKzEKbHDZ3Ltvo/Onzqv4hTGzz3
+gvoFNTPhNahXwOf9jU8/kzJPeGYDdwdY6ZXIfj7QeQCM8htRM5u8lOk6e25SLTKe
+I6RF+7YuE7CLGLHdztUdp0J/Vb77W7tH1PwkzQSulgUV1qzOMPPKC8W64iLgpq0i
+5ALudBF/TP94HTXa5gI06xgSYXcGCRZj6hitoocf8seACQl1ThCojz2GuHURwCRi
+ipZ7SkXp7FnFvmuD5uHorLUwHv4FB4D54SMNUI8FmP8sX+g7tq3PgbUhh8oIKiMn
+MCArz+2UW6yyetLHKKGKC5tNSixthT8Jcjxn4tncB7rrZXtaAWPWkFtPF2Y9fwsZ
+o5NjEFIqnxQWWOLcpfShFosOkYuByptZ+thrkQdlVV9SH686+5DdaaVbnG0OLLb6
+zqylfDJKZ0DcMDQj3dcEI2bw/FWAp/tmGYI1Z2JwOV5vx+qQQEQIHriy1tvuWacN
+GHk0vFQYXlPKNFHtRQrmjseCNj6nOGOpMCwXEGCSn1WHElkQwg9naRHMTh5+Spqt
+r0CodaxWkHS4oJyleW/c6RrIaQXpuvoDs3zk4E7Czp3otkYNbn5XOmeUwssfnHdK
+Z05phkOTOPu220+DkdRgfks+KzgHVZhepA==
+-----END CERTIFICATE-----
+
+Verisign Class 1 Public Primary Certification Authority
+=======================================================
+-----BEGIN CERTIFICATE-----
+MIICPDCCAaUCED9pHoGc8JpK83P/uUii5N0wDQYJKoZIhvcNAQEFBQAwXzELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
+cyAxIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
+MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
+BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAxIFB1YmxpYyBQcmlt
+YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
+ADCBiQKBgQDlGb9to1ZhLZlIcfZn3rmN67eehoAKkQ76OCWvRoiC5XOooJskXQ0f
+zGVuDLDQVoQYh5oGmxChc9+0WDlrbsH2FdWoqD+qEgaNMax/sDTXjzRniAnNFBHi
+TkVWaR94AoDa3EeRKbs2yWNcxeDXLYd7obcysHswuiovMaruo2fa2wIDAQABMA0G
+CSqGSIb3DQEBBQUAA4GBAFgVKTk8d6PaXCUDfGD67gmZPCcQcMgMCeazh88K4hiW
+NWLMv5sneYlfycQJ9M61Hd8qveXbhpxoJeUwfLaJFf5n0a3hUKw8fGJLj7qE1xIV
+Gx/KXQ/BUpQqEZnae88MNhPVNdwQGVnqlMEAv3WP2fr9dgTbYruQagPZRjXZ+Hxb
+-----END CERTIFICATE-----
+
+Verisign Class 3 Public Primary Certification Authority
+=======================================================
+-----BEGIN CERTIFICATE-----
+MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
+cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
+MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
+BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
+YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
+ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
+BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
+I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
+CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i
+2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ
+2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ
+-----END CERTIFICATE-----
+
+Microsec e-Szigno Root CA 2009
+==============================
+-----BEGIN CERTIFICATE-----
+MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
+ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
+CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
+OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
+FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
+Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
+dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
+kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
+cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
+fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
+N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
+xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
+Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
+SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
+mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
+ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
+tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
+2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
+HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
+-----END CERTIFICATE-----
+
+E-Guven Kok Elektronik Sertifika Hizmet Saglayicisi
+===================================================
+-----BEGIN CERTIFICATE-----
+MIIDtjCCAp6gAwIBAgIQRJmNPMADJ72cdpW56tustTANBgkqhkiG9w0BAQUFADB1
+MQswCQYDVQQGEwJUUjEoMCYGA1UEChMfRWxla3Ryb25payBCaWxnaSBHdXZlbmxp
+Z2kgQS5TLjE8MDoGA1UEAxMzZS1HdXZlbiBLb2sgRWxla3Ryb25payBTZXJ0aWZp
+a2EgSGl6bWV0IFNhZ2xheWljaXNpMB4XDTA3MDEwNDExMzI0OFoXDTE3MDEwNDEx
+MzI0OFowdTELMAkGA1UEBhMCVFIxKDAmBgNVBAoTH0VsZWt0cm9uaWsgQmlsZ2kg
+R3V2ZW5saWdpIEEuUy4xPDA6BgNVBAMTM2UtR3V2ZW4gS29rIEVsZWt0cm9uaWsg
+U2VydGlmaWthIEhpem1ldCBTYWdsYXlpY2lzaTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAMMSIJ6wXgBljU5Gu4Bc6SwGl9XzcslwuedLZYDBS75+PNdU
+MZTe1RK6UxYC6lhj71vY8+0qGqpxSKPcEC1fX+tcS5yWCEIlKBHMilpiAVDV6wlT
+L/jDj/6z/P2douNffb7tC+Bg62nsM+3YjfsSSYMAyYuXjDtzKjKzEve5TfL0TW3H
+5tYmNwjy2f1rXKPlSFxYvEK+A1qBuhw1DADT9SN+cTAIJjjcJRFHLfO6IxClv7wC
+90Nex/6wN1CZew+TzuZDLMN+DfIcQ2Zgy2ExR4ejT669VmxMvLz4Bcpk9Ok0oSy1
+c+HCPujIyTQlCFzz7abHlJ+tiEMl1+E5YP6sOVkCAwEAAaNCMEAwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFJ/uRLOU1fqRTy7ZVZoE
+VtstxNulMA0GCSqGSIb3DQEBBQUAA4IBAQB/X7lTW2M9dTLn+sR0GstG30ZpHFLP
+qk/CaOv/gKlR6D1id4k9CnU58W5dF4dvaAXBlGzZXd/aslnLpRCKysw5zZ/rTt5S
+/wzw9JKp8mxTq5vSR6AfdPebmvEvFZ96ZDAYBzwqD2fK/A+JYZ1lpTzlvBNbCNvj
+/+27BrtqBrF6T2XGgv0enIu1De5Iu7i9qgi0+6N8y5/NkHZchpZ4Vwpm+Vganf2X
+KWDeEaaQHBkc7gGWIjQ0LpH5t8Qn0Xvmv/uARFoW5evg1Ao4vOSR49XrXMGs3xtq
+fJ7lddK2l4fbzIcrQzqECK+rPNv3PGYxhrCdU3nt+CPeQuMtgvEP5fqX
+-----END CERTIFICATE-----
+
+GlobalSign Root CA - R3
+=======================
-----BEGIN CERTIFICATE-----
MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
@@ -1907,7 +3587,6 @@ WD9f
TC TrustCenter Universal CA III
===============================
-
-----BEGIN CERTIFICATE-----
MIID4TCCAsmgAwIBAgIOYyUAAQACFI0zFQLkbPQwDQYJKoZIhvcNAQEFBQAwezEL
MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV
@@ -1932,9 +3611,172 @@ CIgJwcyRGSmwIC3/yzikQOEXvnlhgP8HA4ZMTnsGnxGGjYnuJ8Tb4rwZjgvDwxPH
LQNjO9Po5KIqwoIIlBZU8O8fJ5AluA0OKBtHd0e9HKgl8ZS0Zg==
-----END CERTIFICATE-----
+Autoridad de Certificacion Firmaprofesional CIF A62634068
+=========================================================
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIU+w77vuySF8wDQYJKoZIhvcNAQEFBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0wOTA1MjAwODM4MTVaFw0zMDEy
+MzEwODM4MTVaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMBIGA1UdEwEB/wQIMAYBAf8CAQEwDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRlzeurNR4APn7VdMActHNHDhpkLzCBpgYD
+VR0gBIGeMIGbMIGYBgRVHSAAMIGPMC8GCCsGAQUFBwIBFiNodHRwOi8vd3d3LmZp
+cm1hcHJvZmVzaW9uYWwuY29tL2NwczBcBggrBgEFBQcCAjBQHk4AUABhAHMAZQBv
+ACAAZABlACAAbABhACAAQgBvAG4AYQBuAG8AdgBhACAANAA3ACAAQgBhAHIAYwBl
+AGwAbwBuAGEAIAAwADgAMAAxADcwDQYJKoZIhvcNAQEFBQADggIBABd9oPm03cXF
+661LJLWhAqvdpYhKsg9VSytXjDvlMd3+xDLx51tkljYyGOylMnfX40S2wBEqgLk9
+am58m9Ot/MPWo+ZkKXzR4Tgegiv/J2Wv+xYVxC5xhOW1//qkR71kMrv2JYSiJ0L1
+ILDCExARzRAVukKQKtJE4ZYm6zFIEv0q2skGz3QeqUvVhyj5eTSSPi5E6PaPT481
+PyWzOdxjKpBrIF/EUhJOlywqrJ2X3kjyo2bbwtKDlaZmp54lD+kLM5FlClrD2VQS
+3a/DTg4fJl4N3LON7NWBcN7STyQF82xO9UxJZo3R/9ILJUFI/lGExkKvgATP0H5k
+SeTy36LssUzAKh3ntLFlosS88Zj0qnAHY7S42jtM+kAiMFsRpvAFDsYCA0irhpuF
+3dvd6qJ2gHN99ZwExEWN57kci57q13XRcrHedUTnQn3iV2t93Jm8PYMo6oCTjcVM
+ZcFwgbg4/EMxsvYDNEeyrPsiBsse3RdHHF9mudMaotoRsaS8I8nkvof/uZS2+F0g
+StRf571oe2XyFR7SOqkt6dhrJKyXWERHrVkY8SFlcN7ONGCoQPHzPKTDKCOM/icz
+Q0CgFzzr6juwcqajuUpLXhZI9LK8yIySxZ2frHI2vDSANGupi5LAuBft7HZT9SQB
+jLMi6Et8Vcad+qMUu2WFbm5PEn4KPJ2V
+-----END CERTIFICATE-----
+
+Izenpe.com
+==========
+-----BEGIN CERTIFICATE-----
+MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
+MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
+ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
+VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
+b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
+scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
+xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
+LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
+uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
+yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
+rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
+BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
+hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
+QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
+Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
+QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
+BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
+MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
+A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
+laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
+awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
+JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
+LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
+VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
+LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
+UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
+QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
+QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
+-----END CERTIFICATE-----
+
+Chambers of Commerce Root - 2008
+================================
+-----BEGIN CERTIFICATE-----
+MIIHTzCCBTegAwIBAgIJAKPaQn6ksa7aMA0GCSqGSIb3DQEBBQUAMIGuMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xKTAnBgNVBAMTIENoYW1iZXJz
+IG9mIENvbW1lcmNlIFJvb3QgLSAyMDA4MB4XDTA4MDgwMTEyMjk1MFoXDTM4MDcz
+MTEyMjk1MFowga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpNYWRyaWQgKHNlZSBj
+dXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29tL2FkZHJlc3MpMRIw
+EAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVyZmlybWEgUy5BLjEp
+MCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAtIDIwMDgwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCvAMtwNyuAWko6bHiUfaN/Gh/2NdW9
+28sNRHI+JrKQUrpjOyhYb6WzbZSm891kDFX29ufyIiKAXuFixrYp4YFs8r/lfTJq
+VKAyGVn+H4vXPWCGhSRv4xGzdz4gljUha7MI2XAuZPeEklPWDrCQiorjh40G072Q
+DuKZoRuGDtqaCrsLYVAGUvGef3bsyw/QHg3PmTA9HMRFEFis1tPo1+XqxQEHd9ZR
+5gN/ikilTWh1uem8nk4ZcfUyS5xtYBkL+8ydddy/Js2Pk3g5eXNeJQ7KXOt3EgfL
+ZEFHcpOrUMPrCXZkNNI5t3YRCQ12RcSprj1qr7V9ZS+UWBDsXHyvfuK2GNnQm05a
+Sd+pZgvMPMZ4fKecHePOjlO+Bd5gD2vlGts/4+EhySnB8esHnFIbAURRPHsl18Tl
+UlRdJQfKFiC4reRB7noI/plvg6aRArBsNlVq5331lubKgdaX8ZSD6e2wsWsSaR6s
++12pxZjptFtYer49okQ6Y1nUCyXeG0+95QGezdIp1Z8XGQpvvwyQ0wlf2eOKNcx5
+Wk0ZN5K3xMGtr/R5JJqyAQuxr1yW84Ay+1w9mPGgP0revq+ULtlVmhduYJ1jbLhj
+ya6BXBg14JC7vjxPNyK5fuvPnnchpj04gftI2jE9K+OJ9dC1vX7gUMQSibMjmhAx
+hduub+84Mxh2EQIDAQABo4IBbDCCAWgwEgYDVR0TAQH/BAgwBgEB/wIBDDAdBgNV
+HQ4EFgQU+SSsD7K1+HnA+mCIG8TZTQKeFxkwgeMGA1UdIwSB2zCB2IAU+SSsD7K1
++HnA+mCIG8TZTQKeFxmhgbSkgbEwga4xCzAJBgNVBAYTAkVVMUMwQQYDVQQHEzpN
+YWRyaWQgKHNlZSBjdXJyZW50IGFkZHJlc3MgYXQgd3d3LmNhbWVyZmlybWEuY29t
+L2FkZHJlc3MpMRIwEAYDVQQFEwlBODI3NDMyODcxGzAZBgNVBAoTEkFDIENhbWVy
+ZmlybWEgUy5BLjEpMCcGA1UEAxMgQ2hhbWJlcnMgb2YgQ29tbWVyY2UgUm9vdCAt
+IDIwMDiCCQCj2kJ+pLGu2jAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRV
+HSAAMCowKAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20w
+DQYJKoZIhvcNAQEFBQADggIBAJASryI1wqM58C7e6bXpeHxIvj99RZJe6dqxGfwW
+PJ+0W2aeaufDuV2I6A+tzyMP3iU6XsxPpcG1Lawk0lgH3qLPaYRgM+gQDROpI9CF
+5Y57pp49chNyM/WqfcZjHwj0/gF/JM8rLFQJ3uIrbZLGOU8W6jx+ekbURWpGqOt1
+glanq6B8aBMz9p0w8G8nOSQjKpD9kCk18pPfNKXG9/jvjA9iSnyu0/VU+I22mlaH
+FoI6M6taIgj3grrqLuBHmrS1RaMFO9ncLkVAO+rcf+g769HsJtg1pDDFOqxXnrN2
+pSB7+R5KBWIBpih1YJeSDW4+TTdDDZIVnBgizVGZoCkaPF+KMjNbMMeJL0eYD6MD
+xvbxrN8y8NmBGuScvfaAFPDRLLmF9dijscilIeUcE5fuDr3fKanvNFNb0+RqE4QG
+tjICxFKuItLcsiFCGtpA8CnJ7AoMXOLQusxI0zcKzBIKinmwPQN/aUv0NCB9szTq
+jktk9T79syNnFQ0EuPAtwQlRPLJsFfClI9eDdOTlLsn+mCdCxqvGnrDQWzilm1De
+fhiYtUU79nm06PcaewaD+9CL2rvHvRirCG88gGtAPxkZumWK5r7VXNM21+9AUiRg
+OGcEMeyP84LG3rlV8zsxkVrctQgVrXYlCg17LofiDKYGvCYQbTed7N14jHyAxfDZ
+d0jQ
+-----END CERTIFICATE-----
+
+Global Chambersign Root - 2008
+==============================
+-----BEGIN CERTIFICATE-----
+MIIHSTCCBTGgAwIBAgIJAMnN0+nVfSPOMA0GCSqGSIb3DQEBBQUAMIGsMQswCQYD
+VQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3VycmVudCBhZGRyZXNzIGF0
+IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAGA1UEBRMJQTgyNzQzMjg3
+MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAlBgNVBAMTHkdsb2JhbCBD
+aGFtYmVyc2lnbiBSb290IC0gMjAwODAeFw0wODA4MDExMjMxNDBaFw0zODA3MzEx
+MjMxNDBaMIGsMQswCQYDVQQGEwJFVTFDMEEGA1UEBxM6TWFkcmlkIChzZWUgY3Vy
+cmVudCBhZGRyZXNzIGF0IHd3dy5jYW1lcmZpcm1hLmNvbS9hZGRyZXNzKTESMBAG
+A1UEBRMJQTgyNzQzMjg3MRswGQYDVQQKExJBQyBDYW1lcmZpcm1hIFMuQS4xJzAl
+BgNVBAMTHkdsb2JhbCBDaGFtYmVyc2lnbiBSb290IC0gMjAwODCCAiIwDQYJKoZI
+hvcNAQEBBQADggIPADCCAgoCggIBAMDfVtPkOpt2RbQT2//BthmLN0EYlVJH6xed
+KYiONWwGMi5HYvNJBL99RDaxccy9Wglz1dmFRP+RVyXfXjaOcNFccUMd2drvXNL7
+G706tcuto8xEpw2uIRU/uXpbknXYpBI4iRmKt4DS4jJvVpyR1ogQC7N0ZJJ0YPP2
+zxhPYLIj0Mc7zmFLmY/CDNBAspjcDahOo7kKrmCgrUVSY7pmvWjg+b4aqIG7HkF4
+ddPB/gBVsIdU6CeQNR1MM62X/JcumIS/LMmjv9GYERTtY/jKmIhYF5ntRQOXfjyG
+HoiMvvKRhI9lNNgATH23MRdaKXoKGCQwoze1eqkBfSbW+Q6OWfH9GzO1KTsXO0G2
+Id3UwD2ln58fQ1DJu7xsepeY7s2MH/ucUa6LcL0nn3HAa6x9kGbo1106DbDVwo3V
+yJ2dwW3Q0L9R5OP4wzg2rtandeavhENdk5IMagfeOx2YItaswTXbo6Al/3K1dh3e
+beksZixShNBFks4c5eUzHdwHU1SjqoI7mjcv3N2gZOnm3b2u/GSFHTynyQbehP9r
+6GsaPMWis0L7iwk+XwhSx2LE1AVxv8Rk5Pihg+g+EpuoHtQ2TS9x9o0o9oOpE9Jh
+wZG7SMA0j0GMS0zbaRL/UJScIINZc+18ofLx/d33SdNDWKBWY8o9PeU1VlnpDsog
+zCtLkykPAgMBAAGjggFqMIIBZjASBgNVHRMBAf8ECDAGAQH/AgEMMB0GA1UdDgQW
+BBS5CcqcHtvTbDprru1U8VuTBjUuXjCB4QYDVR0jBIHZMIHWgBS5CcqcHtvTbDpr
+ru1U8VuTBjUuXqGBsqSBrzCBrDELMAkGA1UEBhMCRVUxQzBBBgNVBAcTOk1hZHJp
+ZCAoc2VlIGN1cnJlbnQgYWRkcmVzcyBhdCB3d3cuY2FtZXJmaXJtYS5jb20vYWRk
+cmVzcykxEjAQBgNVBAUTCUE4Mjc0MzI4NzEbMBkGA1UEChMSQUMgQ2FtZXJmaXJt
+YSBTLkEuMScwJQYDVQQDEx5HbG9iYWwgQ2hhbWJlcnNpZ24gUm9vdCAtIDIwMDiC
+CQDJzdPp1X0jzjAOBgNVHQ8BAf8EBAMCAQYwPQYDVR0gBDYwNDAyBgRVHSAAMCow
+KAYIKwYBBQUHAgEWHGh0dHA6Ly9wb2xpY3kuY2FtZXJmaXJtYS5jb20wDQYJKoZI
+hvcNAQEFBQADggIBAICIf3DekijZBZRG/5BXqfEv3xoNa/p8DhxJJHkn2EaqbylZ
+UohwEurdPfWbU1Rv4WCiqAm57OtZfMY18dwY6fFn5a+6ReAJ3spED8IXDneRRXoz
+X1+WLGiLwUePmJs9wOzL9dWCkoQ10b42OFZyMVtHLaoXpGNR6woBrX/sdZ7LoR/x
+fxKxueRkf2fWIyr0uDldmOghp+G9PUIadJpwr2hsUF1Jz//7Dl3mLEfXgTpZALVz
+a2Mg9jFFCDkO9HB+QHBaP9BrQql0PSgvAm11cpUJjUhjxsYjV5KTXjXBjfkK9yyd
+Yhz2rXzdpjEetrHHfoUm+qRqtdpjMNHvkzeyZi99Bffnt0uYlDXA2TopwZ2yUDMd
+SqlapskD7+3056huirRXhOukP9DuqqqHW2Pok+JrqNS4cnhrG+055F3Lm6qH1U9O
+AP7Zap88MQ8oAgF9mOinsKJknnn4SPIVqczmyETrP3iZ8ntxPjzxmKfFGBI/5rso
+M0LpRQp8bfKGeS/Fghl9CYl8slR2iK7ewfPM4W7bMdaTrpmg7yVqc5iJWzouE4ge
+v8CSlDQb4ye3ix5vQv/n6TebUB0tovkC7stYWDpxvGjjqsGvHCgfotwjZT+B6q6Z
+09gwzxMNTxXJhLynSC34MCN32EZLeW32jO06f2ARePTpm67VVMB0gNELQp/B
+-----END CERTIFICATE-----
+
Go Daddy Root Certificate Authority - G2
========================================
-
-----BEGIN CERTIFICATE-----
MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
@@ -1961,7 +3803,6 @@ LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
Starfield Root Certificate Authority - G2
=========================================
-
-----BEGIN CERTIFICATE-----
MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
@@ -1988,7 +3829,6 @@ mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
Starfield Services Root Certificate Authority - G2
==================================================
-
-----BEGIN CERTIFICATE-----
MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
@@ -2016,7 +3856,6 @@ sSi6
AffirmTrust Commercial
======================
-
-----BEGIN CERTIFICATE-----
MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
@@ -2040,7 +3879,6 @@ nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
AffirmTrust Networking
======================
-
-----BEGIN CERTIFICATE-----
MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
@@ -2064,7 +3902,6 @@ x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
AffirmTrust Premium
===================
-
-----BEGIN CERTIFICATE-----
MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
@@ -2099,7 +3936,6 @@ KeC2uAloGRwYQw==
AffirmTrust Premium ECC
=======================
-
-----BEGIN CERTIFICATE-----
MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
@@ -2114,9 +3950,304 @@ aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
-----END CERTIFICATE-----
+Certum Trusted Network CA
+=========================
+-----BEGIN CERTIFICATE-----
+MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
+MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
+ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
+cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
+WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
+Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
+IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
+UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
+TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
+BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
+kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
+AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
+HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
+sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
+I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
+J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
+VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
+03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
+-----END CERTIFICATE-----
+
+Certinomis - Autorité Racine
+=============================
+-----BEGIN CERTIFICATE-----
+MIIFnDCCA4SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJGUjET
+MBEGA1UEChMKQ2VydGlub21pczEXMBUGA1UECxMOMDAwMiA0MzM5OTg5MDMxJjAk
+BgNVBAMMHUNlcnRpbm9taXMgLSBBdXRvcml0w6kgUmFjaW5lMB4XDTA4MDkxNzA4
+Mjg1OVoXDTI4MDkxNzA4Mjg1OVowYzELMAkGA1UEBhMCRlIxEzARBgNVBAoTCkNl
+cnRpbm9taXMxFzAVBgNVBAsTDjAwMDIgNDMzOTk4OTAzMSYwJAYDVQQDDB1DZXJ0
+aW5vbWlzIC0gQXV0b3JpdMOpIFJhY2luZTCCAiIwDQYJKoZIhvcNAQEBBQADggIP
+ADCCAgoCggIBAJ2Fn4bT46/HsmtuM+Cet0I0VZ35gb5j2CN2DpdUzZlMGvE5x4jY
+F1AMnmHawE5V3udauHpOd4cN5bjr+p5eex7Ezyh0x5P1FMYiKAT5kcOrJ3NqDi5N
+8y4oH3DfVS9O7cdxbwlyLu3VMpfQ8Vh30WC8Tl7bmoT2R2FFK/ZQpn9qcSdIhDWe
+rP5pqZ56XjUl+rSnSTV3lqc2W+HN3yNw2F1MpQiD8aYkOBOo7C+ooWfHpi2GR+6K
+/OybDnT0K0kCe5B1jPyZOQE51kqJ5Z52qz6WKDgmi92NjMD2AR5vpTESOH2VwnHu
+7XSu5DaiQ3XV8QCb4uTXzEIDS3h65X27uK4uIJPT5GHfceF2Z5c/tt9qc1pkIuVC
+28+BA5PY9OMQ4HL2AHCs8MF6DwV/zzRpRbWT5BnbUhYjBYkOjUjkJW+zeL9i9Qf6
+lSTClrLooyPCXQP8w9PlfMl1I9f09bze5N/NgL+RiH2nE7Q5uiy6vdFrzPOlKO1E
+nn1So2+WLhl+HPNbxxaOu2B9d2ZHVIIAEWBsMsGoOBvrbpgT1u449fCfDu/+MYHB
+0iSVL1N6aaLwD4ZFjliCK0wi1F6g530mJ0jfJUaNSih8hp75mxpZuWW/Bd22Ql09
+5gBIgl4g9xGC3srYn+Y3RyYe63j3YcNBZFgCQfna4NH4+ej9Uji29YnfAgMBAAGj
+WzBZMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBQN
+jLZh2kS40RR9w759XkjwzspqsDAXBgNVHSAEEDAOMAwGCiqBegFWAgIAAQEwDQYJ
+KoZIhvcNAQEFBQADggIBACQ+YAZ+He86PtvqrxyaLAEL9MW12Ukx9F1BjYkMTv9s
+ov3/4gbIOZ/xWqndIlgVqIrTseYyCYIDbNc/CMf4uboAbbnW/FIyXaR/pDGUu7ZM
+OH8oMDX/nyNTt7buFHAAQCvaR6s0fl6nVjBhK4tDrP22iCj1a7Y+YEq6QpA0Z43q
+619FVDsXrIvkxmUP7tCMXWY5zjKn2BCXwH40nJ+U8/aGH88bc62UeYdocMMzpXDn
+2NU4lG9jeeu/Cg4I58UvD0KgKxRA/yHgBcUn4YQRE7rWhh1BCxMjidPJC+iKunqj
+o3M3NYB9Ergzd0A4wPpeMNLytqOx1qKVl4GbUu1pTP+A5FPbVFsDbVRfsbjvJL1v
+nxHDx2TCDyhihWZeGnuyt++uNckZM6i4J9szVb9o4XVIRFb7zdNIu0eJOqxp9YDG
+5ERQL1TEqkPFMTFYvZbF6nVsmnWxTfj3l/+WFvKXTej28xH5On2KOG4Ey+HTRRWq
+pdEdnV1j6CTmNhTih60bWfVEm/vXd3wfAXBioSAaosUaKPQhA+4u2cGA6rnZgtZb
+dsLLO7XSAPCjDuGtbkD326C00EauFddEwk01+dIL8hf2rGbVJLJP0RyZwG71fet0
+BLj5TXcJ17TPBzAJ8bgAVtkXFhYKK4bfjwEZGuW7gmP/vgt2Fl43N+bYdJeimUV5
+-----END CERTIFICATE-----
+
+Root CA Generalitat Valenciana
+==============================
+-----BEGIN CERTIFICATE-----
+MIIGizCCBXOgAwIBAgIEO0XlaDANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJF
+UzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJ
+R1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwHhcN
+MDEwNzA2MTYyMjQ3WhcNMjEwNzAxMTUyMjQ3WjBoMQswCQYDVQQGEwJFUzEfMB0G
+A1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0GA1UECxMGUEtJR1ZBMScw
+JQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVuY2lhbmEwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDGKqtXETcvIorKA3Qdyu0togu8M1JAJke+
+WmmmO3I2F0zo37i7L3bhQEZ0ZQKQUgi0/6iMweDHiVYQOTPvaLRfX9ptI6GJXiKj
+SgbwJ/BXufjpTjJ3Cj9BZPPrZe52/lSqfR0grvPXdMIKX/UIKFIIzFVd0g/bmoGl
+u6GzwZTNVOAydTGRGmKy3nXiz0+J2ZGQD0EbtFpKd71ng+CT516nDOeB0/RSrFOy
+A8dEJvt55cs0YFAQexvba9dHq198aMpunUEDEO5rmXteJajCq+TA81yc477OMUxk
+Hl6AovWDfgzWyoxVjr7gvkkHD6MkQXpYHYTqWBLI4bft75PelAgxAgMBAAGjggM7
+MIIDNzAyBggrBgEFBQcBAQQmMCQwIgYIKwYBBQUHMAGGFmh0dHA6Ly9vY3NwLnBr
+aS5ndmEuZXMwEgYDVR0TAQH/BAgwBgEB/wIBAjCCAjQGA1UdIASCAiswggInMIIC
+IwYKKwYBBAG/VQIBADCCAhMwggHoBggrBgEFBQcCAjCCAdoeggHWAEEAdQB0AG8A
+cgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEAYwBpAPMAbgAgAFIA
+YQDtAHoAIABkAGUAIABsAGEAIABHAGUAbgBlAHIAYQBsAGkAdABhAHQAIABWAGEA
+bABlAG4AYwBpAGEAbgBhAC4ADQAKAEwAYQAgAEQAZQBjAGwAYQByAGEAYwBpAPMA
+bgAgAGQAZQAgAFAAcgDhAGMAdABpAGMAYQBzACAAZABlACAAQwBlAHIAdABpAGYA
+aQBjAGEAYwBpAPMAbgAgAHEAdQBlACAAcgBpAGcAZQAgAGUAbAAgAGYAdQBuAGMA
+aQBvAG4AYQBtAGkAZQBuAHQAbwAgAGQAZQAgAGwAYQAgAHAAcgBlAHMAZQBuAHQA
+ZQAgAEEAdQB0AG8AcgBpAGQAYQBkACAAZABlACAAQwBlAHIAdABpAGYAaQBjAGEA
+YwBpAPMAbgAgAHMAZQAgAGUAbgBjAHUAZQBuAHQAcgBhACAAZQBuACAAbABhACAA
+ZABpAHIAZQBjAGMAaQDzAG4AIAB3AGUAYgAgAGgAdAB0AHAAOgAvAC8AdwB3AHcA
+LgBwAGsAaQAuAGcAdgBhAC4AZQBzAC8AYwBwAHMwJQYIKwYBBQUHAgEWGWh0dHA6
+Ly93d3cucGtpLmd2YS5lcy9jcHMwHQYDVR0OBBYEFHs100DSHHgZZu90ECjcPk+y
+eAT8MIGVBgNVHSMEgY0wgYqAFHs100DSHHgZZu90ECjcPk+yeAT8oWykajBoMQsw
+CQYDVQQGEwJFUzEfMB0GA1UEChMWR2VuZXJhbGl0YXQgVmFsZW5jaWFuYTEPMA0G
+A1UECxMGUEtJR1ZBMScwJQYDVQQDEx5Sb290IENBIEdlbmVyYWxpdGF0IFZhbGVu
+Y2lhbmGCBDtF5WgwDQYJKoZIhvcNAQEFBQADggEBACRhTvW1yEICKrNcda3Fbcrn
+lD+laJWIwVTAEGmiEi8YPyVQqHxK6sYJ2fR1xkDar1CdPaUWu20xxsdzCkj+IHLt
+b8zog2EWRpABlUt9jppSCS/2bxzkoXHPjCpaF3ODR00PNvsETUlR4hTJZGH71BTg
+9J63NI8KJr2XXPR5OkowGcytT6CYirQxlyric21+eLj4iIlPsSKRZEv1UN4D2+XF
+ducTZnV+ZfsBn5OHiJ35Rld8TWCvmHMTI6QgkYH60GFmuH3Rr9ZvHmw96RH9qfmC
+IoaZM3Fa6hlXPZHNqcCjbgcTpsnt+GijnsNacgmHKNHEc8RzGF9QdRYxn7fofMM=
+-----END CERTIFICATE-----
+
+A-Trust-nQual-03
+================
+-----BEGIN CERTIFICATE-----
+MIIDzzCCAregAwIBAgIDAWweMA0GCSqGSIb3DQEBBQUAMIGNMQswCQYDVQQGEwJB
+VDFIMEYGA1UECgw/QS1UcnVzdCBHZXMuIGYuIFNpY2hlcmhlaXRzc3lzdGVtZSBp
+bSBlbGVrdHIuIERhdGVudmVya2VociBHbWJIMRkwFwYDVQQLDBBBLVRydXN0LW5R
+dWFsLTAzMRkwFwYDVQQDDBBBLVRydXN0LW5RdWFsLTAzMB4XDTA1MDgxNzIyMDAw
+MFoXDTE1MDgxNzIyMDAwMFowgY0xCzAJBgNVBAYTAkFUMUgwRgYDVQQKDD9BLVRy
+dXN0IEdlcy4gZi4gU2ljaGVyaGVpdHNzeXN0ZW1lIGltIGVsZWt0ci4gRGF0ZW52
+ZXJrZWhyIEdtYkgxGTAXBgNVBAsMEEEtVHJ1c3QtblF1YWwtMDMxGTAXBgNVBAMM
+EEEtVHJ1c3QtblF1YWwtMDMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCtPWFuA/OQO8BBC4SAzewqo51ru27CQoT3URThoKgtUaNR8t4j8DRE/5TrzAUj
+lUC5B3ilJfYKvUWG6Nm9wASOhURh73+nyfrBJcyFLGM/BWBzSQXgYHiVEEvc+RFZ
+znF/QJuKqiTfC0Li21a8StKlDJu3Qz7dg9MmEALP6iPESU7l0+m0iKsMrmKS1GWH
+2WrX9IWf5DMiJaXlyDO6w8dB3F/GaswADm0yqLaHNgBid5seHzTLkDx4iHQF63n1
+k3Flyp3HaxgtPVxO59X4PzF9j4fsCiIvI+n+u33J4PTs63zEsMMtYrWacdaxaujs
+2e3Vcuy+VwHOBVWf3tFgiBCzAgMBAAGjNjA0MA8GA1UdEwEB/wQFMAMBAf8wEQYD
+VR0OBAoECERqlWdVeRFPMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQUFAAOC
+AQEAVdRU0VlIXLOThaq/Yy/kgM40ozRiPvbY7meIMQQDbwvUB/tOdQ/TLtPAF8fG
+KOwGDREkDg6lXb+MshOWcdzUzg4NCmgybLlBMRmrsQd7TZjTXLDR8KdCoLXEjq/+
+8T/0709GAHbrAvv5ndJAlseIOrifEXnzgGWovR/TeIGgUUw3tKZdJXDRZslo+S4R
+FGjxVJgIrCaSD96JntT6s3kr0qN51OyLrIdTaEJMUVF0HhsnLuP1Hyl0Te2v9+GS
+mYHovjrHF1D2t8b8m7CKa9aIA5GPBnc6hQLdmNVDeD/GMBWsm2vLV7eJUYs66MmE
+DNuxUCAKGkq6ahq97BvIxYSazQ==
+-----END CERTIFICATE-----
+
+TWCA Root Certification Authority
+=================================
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
+MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
+V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
+WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
+LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
+AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
+K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
+RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
+rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
+3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
+hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
+MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
+XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
+lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
+aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
+YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
+-----END CERTIFICATE-----
+
+Security Communication RootCA2
+==============================
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
+DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
+dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
+YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
+OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
+zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
+VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
+hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
+ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
+awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
+OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
+DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
+coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
+okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
+t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
+1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
+SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
+-----END CERTIFICATE-----
+
+EC-ACC
+======
+-----BEGIN CERTIFICATE-----
+MIIFVjCCBD6gAwIBAgIQ7is969Qh3hSoYqwE893EATANBgkqhkiG9w0BAQUFADCB
+8zELMAkGA1UEBhMCRVMxOzA5BgNVBAoTMkFnZW5jaWEgQ2F0YWxhbmEgZGUgQ2Vy
+dGlmaWNhY2lvIChOSUYgUS0wODAxMTc2LUkpMSgwJgYDVQQLEx9TZXJ2ZWlzIFB1
+YmxpY3MgZGUgQ2VydGlmaWNhY2lvMTUwMwYDVQQLEyxWZWdldSBodHRwczovL3d3
+dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAoYykwMzE1MDMGA1UECxMsSmVyYXJxdWlh
+IEVudGl0YXRzIGRlIENlcnRpZmljYWNpbyBDYXRhbGFuZXMxDzANBgNVBAMTBkVD
+LUFDQzAeFw0wMzAxMDcyMzAwMDBaFw0zMTAxMDcyMjU5NTlaMIHzMQswCQYDVQQG
+EwJFUzE7MDkGA1UEChMyQWdlbmNpYSBDYXRhbGFuYSBkZSBDZXJ0aWZpY2FjaW8g
+KE5JRiBRLTA4MDExNzYtSSkxKDAmBgNVBAsTH1NlcnZlaXMgUHVibGljcyBkZSBD
+ZXJ0aWZpY2FjaW8xNTAzBgNVBAsTLFZlZ2V1IGh0dHBzOi8vd3d3LmNhdGNlcnQu
+bmV0L3ZlcmFycmVsIChjKTAzMTUwMwYDVQQLEyxKZXJhcnF1aWEgRW50aXRhdHMg
+ZGUgQ2VydGlmaWNhY2lvIENhdGFsYW5lczEPMA0GA1UEAxMGRUMtQUNDMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsyLHT+KXQpWIR4NA9h0X84NzJB5R
+85iKw5K4/0CQBXCHYMkAqbWUZRkiFRfCQ2xmRJoNBD45b6VLeqpjt4pEndljkYRm
+4CgPukLjbo73FCeTae6RDqNfDrHrZqJyTxIThmV6PttPB/SnCWDaOkKZx7J/sxaV
+HMf5NLWUhdWZXqBIoH7nF2W4onW4HvPlQn2v7fOKSGRdghST2MDk/7NQcvJ29rNd
+QlB50JQ+awwAvthrDk4q7D7SzIKiGGUzE3eeml0aE9jD2z3Il3rucO2n5nzbcc8t
+lGLfbdb1OL4/pYUKGbio2Al1QnDE6u/LDsg0qBIimAy4E5S2S+zw0JDnJwIDAQAB
+o4HjMIHgMB0GA1UdEQQWMBSBEmVjX2FjY0BjYXRjZXJ0Lm5ldDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUoMOLRKo3pUW/l4Ba0fF4
+opvpXY0wfwYDVR0gBHgwdjB0BgsrBgEEAfV4AQMBCjBlMCwGCCsGAQUFBwIBFiBo
+dHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbDA1BggrBgEFBQcCAjApGidW
+ZWdldSBodHRwczovL3d3dy5jYXRjZXJ0Lm5ldC92ZXJhcnJlbCAwDQYJKoZIhvcN
+AQEFBQADggEBAKBIW4IB9k1IuDlVNZyAelOZ1Vr/sXE7zDkJlF7W2u++AVtd0x7Y
+/X1PzaBB4DSTv8vihpw3kpBWHNzrKQXlxJ7HNd+KDM3FIUPpqojlNcAZQmNaAl6k
+SBg6hW/cnbw/nZzBh7h6YQjpdwt/cKt63dmXLGQehb+8dJahw3oS7AwaboMMPOhy
+Rp/7SNVel+axofjk70YllJyJ22k4vuxcDlbHZVHlUIiIv0LVKz3l+bqeLrPK9HOS
+Agu+TGbrIP65y7WZf+a2E/rKS03Z7lNGBjvGTq2TWoF+bCpLagVFjPIhpDGQh2xl
+nJ2lYJU6Un/10asIbvPuW/mIPX64b24D5EI=
+-----END CERTIFICATE-----
+
+Hellenic Academic and Research Institutions RootCA 2011
+=======================================================
+-----BEGIN CERTIFICATE-----
+MIIEMTCCAxmgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBlTELMAkGA1UEBhMCR1Ix
+RDBCBgNVBAoTO0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgQ2VydC4gQXV0aG9yaXR5MUAwPgYDVQQDEzdIZWxsZW5pYyBBY2FkZW1p
+YyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIFJvb3RDQSAyMDExMB4XDTExMTIw
+NjEzNDk1MloXDTMxMTIwMTEzNDk1MlowgZUxCzAJBgNVBAYTAkdSMUQwQgYDVQQK
+EztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0dXRpb25zIENl
+cnQuIEF1dGhvcml0eTFAMD4GA1UEAxM3SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBSb290Q0EgMjAxMTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAKlTAOMupvaO+mDYLZU++CwqVE7NuYRhlFhPjz2L5EPz
+dYmNUeTDN9KKiE15HrcS3UN4SoqS5tdI1Q+kOilENbgH9mgdVc04UfCMJDGFr4PJ
+fel3r+0ae50X+bOdOFAPplp5kYCvN66m0zH7tSYJnTxa71HFK9+WXesyHgLacEns
+bgzImjeN9/E2YEsmLIKe0HjzDQ9jpFEw4fkrJxIH2Oq9GGKYsFk3fb7u8yBRQlqD
+75O6aRXxYp2fmTmCobd0LovUxQt7L/DICto9eQqakxylKHJzkUOap9FNhYS5qXSP
+FEDH3N6sQWRstBmbAmNtJGSPRLIl6s5ddAxjMlyNh+UCAwEAAaOBiTCBhjAPBgNV
+HRMBAf8EBTADAQH/MAsGA1UdDwQEAwIBBjAdBgNVHQ4EFgQUppFC/RNhSiOeCKQp
+5dgTBCPuQSUwRwYDVR0eBEAwPqA8MAWCAy5ncjAFggMuZXUwBoIELmVkdTAGggQu
+b3JnMAWBAy5ncjAFgQMuZXUwBoEELmVkdTAGgQQub3JnMA0GCSqGSIb3DQEBBQUA
+A4IBAQAf73lB4XtuP7KMhjdCSk4cNx6NZrokgclPEg8hwAOXhiVtXdMiKahsog2p
+6z0GW5k6x8zDmjR/qw7IThzh+uTczQ2+vyT+bOdrwg3IBp5OjWEopmr95fZi6hg8
+TqBTnbI6nOulnJEWtk2C4AwFSKls9cz4y51JtPACpf1wA+2KIaWuE4ZJwzNzvoc7
+dIsXRSZMFpGD/md9zU1jZ/rzAxKWeAaNsWftjj++n08C9bMJL/NMh98qy5V8Acys
+Nnq/onN694/BtZqhFLKPM58N7yLcZnuEvUUXBj08yrl3NI/K6s8/MT7jiOOASSXI
+l7WdmplNsDz4SgCbZN2fOUvRJ9e4
+-----END CERTIFICATE-----
+
+Actalis Authentication Root CA
+==============================
+-----BEGIN CERTIFICATE-----
+MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
+BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
+MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
+IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
+SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
+ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
+UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
+4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
+KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
+gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
+rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
+51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
+be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
+KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
+v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
+fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
+jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
+ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
+ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
+e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
+jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
+WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
+SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
+pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
+X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
+fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
+K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
+ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
+LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
+LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
+-----END CERTIFICATE-----
+
+Trustis FPS Root CA
+===================
+-----BEGIN CERTIFICATE-----
+MIIDZzCCAk+gAwIBAgIQGx+ttiD5JNM2a/fH8YygWTANBgkqhkiG9w0BAQUFADBF
+MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPVHJ1c3RpcyBMaW1pdGVkMRwwGgYDVQQL
+ExNUcnVzdGlzIEZQUyBSb290IENBMB4XDTAzMTIyMzEyMTQwNloXDTI0MDEyMTEx
+MzY1NFowRTELMAkGA1UEBhMCR0IxGDAWBgNVBAoTD1RydXN0aXMgTGltaXRlZDEc
+MBoGA1UECxMTVHJ1c3RpcyBGUFMgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAMVQe547NdDfxIzNjpvto8A2mfRC6qc+gIMPpqdZh8mQRUN+
+AOqGeSoDvT03mYlmt+WKVoaTnGhLaASMk5MCPjDSNzoiYYkchU59j9WvezX2fihH
+iTHcDnlkH5nSW7r+f2C/revnPDgpai/lkQtV/+xvWNUtyd5MZnGPDNcE2gfmHhjj
+vSkCqPoc4Vu5g6hBSLwacY3nYuUtsuvffM/bq1rKMfFMIvMFE/eC+XN5DL7XSxzA
+0RU8k0Fk0ea+IxciAIleH2ulrG6nS4zto3Lmr2NNL4XSFDWaLk6M6jKYKIahkQlB
+OrTh4/L68MkKokHdqeMDx4gVOxzUGpTXn2RZEm0CAwEAAaNTMFEwDwYDVR0TAQH/
+BAUwAwEB/zAfBgNVHSMEGDAWgBS6+nEleYtXQSUhhgtx67JkDoshZzAdBgNVHQ4E
+FgQUuvpxJXmLV0ElIYYLceuyZA6LIWcwDQYJKoZIhvcNAQEFBQADggEBAH5Y//01
+GX2cGE+esCu8jowU/yyg2kdbw++BLa8F6nRIW/M+TgfHbcWzk88iNVy2P3UnXwmW
+zaD+vkAMXBJV+JOCyinpXj9WV4s4NvdFGkwozZ5BuO1WTISkQMi4sKUraXAEasP4
+1BIy+Q7DsdwyhEQsb8tGD+pmQQ9P8Vilpg0ND2HepZ5dfWWhPBfnqFVO76DH7cZE
+f1T1o+CP8HxVIo8ptoGj4W1OLBuAZ+ytIJ8MYmHVl/9D7S3B2l0pKoU/rGXuhg8F
+jZBf3+6f9L/uHfuY5H+QK4R4EA5sSVPvFVtlRkpdr7r7OnIdzfYliB6XzCGcKQEN
+ZetX2fNXlrtIzYE=
+-----END CERTIFICATE-----
+
StartCom Certification Authority
================================
-
-----BEGIN CERTIFICATE-----
MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW
MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
@@ -2163,7 +4294,6 @@ fyWl8kgAwKQB2j8=
StartCom Certification Authority G2
===================================
-
-----BEGIN CERTIFICATE-----
MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW
MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm
@@ -2195,3 +4325,124 @@ IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo
hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr
so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI
-----END CERTIFICATE-----
+
+Buypass Class 2 Root CA
+=======================
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
+6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
+L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
+1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
+MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
+QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
+arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
+Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
+FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
+P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
+9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
+uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
+9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
+A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
+OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
+KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
+DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
+H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
+I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
+5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
+3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
+Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
+-----END CERTIFICATE-----
+
+Buypass Class 3 Root CA
+=======================
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
+ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
+N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
+tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
+0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
+/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
+KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
+zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
+O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
+34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
+K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
+Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
+QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
+cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
+IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
+HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
+O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
+033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
+dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
+kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
+3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
+u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
+4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
+-----END CERTIFICATE-----
+
+T-TeleSec GlobalRoot Class 3
+============================
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
+8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
+RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
+hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
+ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
+EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
+A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
+WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
+1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
+6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
+91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
+e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
+TpPDpFQUWw==
+-----END CERTIFICATE-----
+
+EE Certification Centre Root CA
+===============================
+-----BEGIN CERTIFICATE-----
+MIIEAzCCAuugAwIBAgIQVID5oHPtPwBMyonY43HmSjANBgkqhkiG9w0BAQUFADB1
+MQswCQYDVQQGEwJFRTEiMCAGA1UECgwZQVMgU2VydGlmaXRzZWVyaW1pc2tlc2t1
+czEoMCYGA1UEAwwfRUUgQ2VydGlmaWNhdGlvbiBDZW50cmUgUm9vdCBDQTEYMBYG
+CSqGSIb3DQEJARYJcGtpQHNrLmVlMCIYDzIwMTAxMDMwMTAxMDMwWhgPMjAzMDEy
+MTcyMzU5NTlaMHUxCzAJBgNVBAYTAkVFMSIwIAYDVQQKDBlBUyBTZXJ0aWZpdHNl
+ZXJpbWlza2Vza3VzMSgwJgYDVQQDDB9FRSBDZXJ0aWZpY2F0aW9uIENlbnRyZSBS
+b290IENBMRgwFgYJKoZIhvcNAQkBFglwa2lAc2suZWUwggEiMA0GCSqGSIb3DQEB
+AQUAA4IBDwAwggEKAoIBAQDIIMDs4MVLqwd4lfNE7vsLDP90jmG7sWLqI9iroWUy
+euuOF0+W2Ap7kaJjbMeMTC55v6kF/GlclY1i+blw7cNRfdCT5mzrMEvhvH2/UpvO
+bntl8jixwKIy72KyaOBhU8E2lf/slLo2rpwcpzIP5Xy0xm90/XsY6KxX7QYgSzIw
+WFv9zajmofxwvI6Sc9uXp3whrj3B9UiHbCe9nyV0gVWw93X2PaRka9ZP585ArQ/d
+MtO8ihJTmMmJ+xAdTX7Nfh9WDSFwhfYggx/2uh8Ej+p3iDXE/+pOoYtNP2MbRMNE
+1CV2yreN1x5KZmTNXMWcg+HCCIia7E6j8T4cLNlsHaFLAgMBAAGjgYowgYcwDwYD
+VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBLyWj7qVhy/
+zQas8fElyalL1BSZMEUGA1UdJQQ+MDwGCCsGAQUFBwMCBggrBgEFBQcDAQYIKwYB
+BQUHAwMGCCsGAQUFBwMEBggrBgEFBQcDCAYIKwYBBQUHAwkwDQYJKoZIhvcNAQEF
+BQADggEBAHv25MANqhlHt01Xo/6tu7Fq1Q+e2+RjxY6hUFaTlrg4wCQiZrxTFGGV
+v9DHKpY5P30osxBAIWrEr7BSdxjhlthWXePdNl4dp1BUoMUq5KqMlIpPnTX/dqQG
+E5Gion0ARD9V04I8GtVbvFZMIi5GQ4okQC3zErg7cBqklrkar4dBGmoYDQZPxz5u
+uSlNDUmJEYcyW+ZLBMjkXOZ0c5RdFpgTlf7727FE5TpwrDdr5rMzcijJs1eg9gIW
+iAYLtqZLICjU3j2LrTcFU3T+bsy8QxdxXvnFzBqpYe73dgzzcvRyrc9yAjYHR8/v
+GVCJYMzpJJUPwssd8m92kMfMdcGWxZ0=
+-----END CERTIFICATE-----
diff --git a/boto/cloudformation/stack.py b/boto/cloudformation/stack.py
index 5d35e891..c173de66 100644..100755
--- a/boto/cloudformation/stack.py
+++ b/boto/cloudformation/stack.py
@@ -48,7 +48,10 @@ class Stack(object):
elif name == "Description":
self.description = value
elif name == "DisableRollback":
- self.disable_rollback = bool(value)
+ if str(value).lower() == 'true':
+ self.disable_rollback = True
+ else:
+ self.disable_rollback = False
elif name == 'StackId':
self.stack_id = value
elif name == 'StackName':
@@ -292,7 +295,7 @@ class StackResource(object):
class StackResourceSummary(object):
def __init__(self, connection=None):
self.connection = connection
- self.last_updated_timestamp = None
+ self.last_updated_time = None
self.logical_resource_id = None
self.physical_resource_id = None
self.resource_status = None
@@ -303,14 +306,14 @@ class StackResourceSummary(object):
return None
def endElement(self, name, value, connection):
- if name == "LastUpdatedTimestamp":
+ if name == "LastUpdatedTime":
try:
- self.last_updated_timestamp = datetime.strptime(
+ self.last_updated_time = datetime.strptime(
value,
'%Y-%m-%dT%H:%M:%SZ'
)
except ValueError:
- self.last_updated_timestamp = datetime.strptime(
+ self.last_updated_time = datetime.strptime(
value,
'%Y-%m-%dT%H:%M:%S.%fZ'
)
diff --git a/boto/cloudfront/distribution.py b/boto/cloudfront/distribution.py
index 78b26240..423cb201 100644
--- a/boto/cloudfront/distribution.py
+++ b/boto/cloudfront/distribution.py
@@ -30,7 +30,7 @@ from boto.cloudfront.logging import LoggingInfo
from boto.cloudfront.origin import S3Origin, CustomOrigin
from boto.s3.acl import ACL
-class DistributionConfig:
+class DistributionConfig(object):
def __init__(self, connection=None, origin=None, enabled=False,
caller_reference='', cnames=None, comment='',
@@ -100,7 +100,7 @@ class DistributionConfig:
self.cnames = cnames
self.comment = comment
self.trusted_signers = trusted_signers
- self.logging = None
+ self.logging = logging
self.default_root_object = default_root_object
def to_xml(self):
@@ -214,7 +214,7 @@ class StreamingDistributionConfig(DistributionConfig):
s += '</StreamingDistributionConfig>\n'
return s
-class DistributionSummary:
+class DistributionSummary(object):
def __init__(self, connection=None, domain_name='', id='',
last_modified_time=None, status='', origin=None,
@@ -279,7 +279,7 @@ class StreamingDistributionSummary(DistributionSummary):
def get_distribution(self):
return self.connection.get_streaming_distribution_info(self.id)
-class Distribution:
+class Distribution(object):
def __init__(self, connection=None, config=None, domain_name='',
id='', last_modified_time=None, status=''):
@@ -362,14 +362,14 @@ class Distribution:
def enable(self):
"""
- Deactivate the Distribution. A convenience wrapper around
+ Activate the Distribution. A convenience wrapper around
the update method.
"""
self.update(enabled=True)
def disable(self):
"""
- Activate the Distribution. A convenience wrapper around
+ Deactivate the Distribution. A convenience wrapper around
the update method.
"""
self.update(enabled=False)
@@ -654,12 +654,14 @@ class Distribution:
raise ValueError("Only specify the private_key_file or the private_key_string not both")
if not private_key_file and not private_key_string:
raise ValueError("You must specify one of private_key_file or private_key_string")
- # If private_key_file is a file, read its contents. Otherwise, open it and then read it
- if isinstance(private_key_file, file):
- private_key_string = private_key_file.read()
- elif private_key_file:
- with open(private_key_file, 'r') as file_handle:
- private_key_string = file_handle.read()
+ # If private_key_file is a file name, open it and read it
+ if private_key_string is None:
+ if isinstance(private_key_file, basestring):
+ with open(private_key_file, 'r') as file_handle:
+ private_key_string = file_handle.read()
+ # Otherwise, treat it like a file
+ else:
+ private_key_string = private_key_file.read()
# Sign it!
private_key = rsa.PrivateKey.load_pkcs1(private_key_string)
diff --git a/boto/cloudsearch/__init__.py b/boto/cloudsearch/__init__.py
index 01fa41df..466ad426 100644
--- a/boto/cloudsearch/__init__.py
+++ b/boto/cloudsearch/__init__.py
@@ -21,7 +21,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
-from boto.ec2.regioninfo import RegionInfo
+from boto.regioninfo import RegionInfo
def regions():
diff --git a/boto/cloudsearch/layer1.py b/boto/cloudsearch/layer1.py
index ff712932..4ca763e9 100644
--- a/boto/cloudsearch/layer1.py
+++ b/boto/cloudsearch/layer1.py
@@ -51,17 +51,25 @@ class Layer1(AWSQueryConnection):
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint)
self.region = region
- AWSQueryConnection.__init__(self, aws_access_key_id,
- aws_secret_access_key,
- is_secure, port, proxy, proxy_port,
- proxy_user, proxy_pass,
- self.region.endpoint, debug,
- https_connection_factory, path,
- security_token,
- validate_certs=validate_certs)
+ AWSQueryConnection.__init__(
+ self,
+ host=self.region.endpoint,
+ aws_access_key_id=aws_access_key_id,
+ aws_secret_access_key=aws_secret_access_key,
+ is_secure=is_secure,
+ port=port,
+ proxy=proxy,
+ proxy_port=proxy_port,
+ proxy_user=proxy_user,
+ proxy_pass=proxy_pass,
+ debug=debug,
+ https_connection_factory=https_connection_factory,
+ path=path,
+ security_token=security_token,
+ validate_certs=validate_certs)
def _required_auth_capability(self):
- return ['sign-v2']
+ return ['hmac-v4']
def get_response(self, doc_path, action, params, path='/',
parent=None, verb='GET', list_marker=None):
diff --git a/boto/cloudsearch/layer2.py b/boto/cloudsearch/layer2.py
index af5c4d17..4189b5bb 100644
--- a/boto/cloudsearch/layer2.py
+++ b/boto/cloudsearch/layer2.py
@@ -32,10 +32,18 @@ class Layer2(object):
is_secure=True, port=None, proxy=None, proxy_port=None,
host=None, debug=0, session_token=None, region=None,
validate_certs=True):
- self.layer1 = Layer1(aws_access_key_id, aws_secret_access_key,
- is_secure, port, proxy, proxy_port,
- host, debug, session_token, region,
- validate_certs=validate_certs)
+ self.layer1 = Layer1(
+ aws_access_key_id=aws_access_key_id,
+ aws_secret_access_key=aws_secret_access_key,
+ is_secure=is_secure,
+ port=port,
+ proxy=proxy,
+ proxy_port=proxy_port,
+ host=host,
+ debug=debug,
+ security_token=session_token,
+ region=region,
+ validate_certs=validate_certs)
def list_domains(self, domain_names=None):
"""
diff --git a/boto/cloudsearch/search.py b/boto/cloudsearch/search.py
index 69a1981e..ece623a8 100644
--- a/boto/cloudsearch/search.py
+++ b/boto/cloudsearch/search.py
@@ -37,7 +37,6 @@ class CommitMismatchError(Exception):
class SearchResults(object):
-
def __init__(self, **attrs):
self.rid = attrs['info']['rid']
# self.doc_coverage_pct = attrs['info']['doc-coverage-pct']
@@ -289,7 +288,19 @@ class SearchConnection(object):
params = query.to_params()
r = requests.get(url, params=params)
- data = json.loads(r.content)
+ try:
+ data = json.loads(r.content)
+ except ValueError, e:
+ if r.status_code == 403:
+ msg = ''
+ import re
+ g = re.search('<html><body><h1>403 Forbidden</h1>([^<]+)<', r.content)
+ try:
+ msg = ': %s' % (g.groups()[0].strip())
+ except AttributeError:
+ pass
+ raise SearchServiceException('Authentication error from Amazon%s' % msg)
+ raise SearchServiceException("Got non-json response from Amazon")
data['query'] = query
data['search_service'] = self
diff --git a/boto/connection.py b/boto/connection.py
index 375a9ca7..ed57fc4a 100644
--- a/boto/connection.py
+++ b/boto/connection.py
@@ -101,7 +101,7 @@ DEFAULT_CA_CERTS_FILE = os.path.join(os.path.dirname(os.path.abspath(boto.cacert
class HostConnectionPool(object):
"""
- A pool of connections for one remote (host,is_secure).
+ A pool of connections for one remote (host,port,is_secure).
When connections are added to the pool, they are put into a
pending queue. The _mexe method returns connections to the pool
@@ -145,7 +145,7 @@ class HostConnectionPool(object):
def get(self):
"""
Returns the next connection in this pool that is ready to be
- reused. Returns None of there aren't any.
+ reused. Returns None if there aren't any.
"""
# Discard ready connections that are too old.
self.clean()
@@ -234,7 +234,7 @@ class ConnectionPool(object):
STALE_DURATION = 60.0
def __init__(self):
- # Mapping from (host,is_secure) to HostConnectionPool.
+ # Mapping from (host,port,is_secure) to HostConnectionPool.
# If a pool becomes empty, it is removed.
self.host_to_pool = {}
# The last time the pool was cleaned.
@@ -259,7 +259,7 @@ class ConnectionPool(object):
"""
return sum(pool.size() for pool in self.host_to_pool.values())
- def get_http_connection(self, host, is_secure):
+ def get_http_connection(self, host, port, is_secure):
"""
Gets a connection from the pool for the named host. Returns
None if there is no connection that can be reused. It's the caller's
@@ -268,18 +268,18 @@ class ConnectionPool(object):
"""
self.clean()
with self.mutex:
- key = (host, is_secure)
+ key = (host, port, is_secure)
if key not in self.host_to_pool:
return None
return self.host_to_pool[key].get()
- def put_http_connection(self, host, is_secure, conn):
+ def put_http_connection(self, host, port, is_secure, conn):
"""
Adds a connection to the pool of connections that can be
reused for the named host.
"""
with self.mutex:
- key = (host, is_secure)
+ key = (host, port, is_secure)
if key not in self.host_to_pool:
self.host_to_pool[key] = HostConnectionPool()
self.host_to_pool[key].put(conn)
@@ -517,6 +517,7 @@ class AWSAuthConnection(object):
self.port = port
else:
self.port = PORTS_BY_SECURITY[is_secure]
+ self.host_header = None
# Timeout used to tell httplib how long to wait for socket timeouts.
# Default is to leave timeout unchanged, which will in turn result in
@@ -541,14 +542,16 @@ class AWSAuthConnection(object):
aws_secret_access_key,
security_token)
- # Allow config file to override default host and port.
+ # Allow config file to override default host, port, and host header.
if self.provider.host:
self.host = self.provider.host
if self.provider.port:
self.port = self.provider.port
+ if self.provider.host_header:
+ self.host_header = self.provider.host_header
self._pool = ConnectionPool()
- self._connection = (self.server_name(), self.is_secure)
+ self._connection = (self.host, self.port, self.is_secure)
self._last_rs = None
self._auth_handler = auth.get_auth_handler(
host, config, self.provider, self._required_auth_capability())
@@ -673,60 +676,92 @@ class AWSAuthConnection(object):
print "http_proxy environment variable does not specify " \
"a port, using default"
self.proxy_port = self.port
+
+ self.no_proxy = os.environ.get('no_proxy', '') or os.environ.get('NO_PROXY', '')
self.use_proxy = (self.proxy != None)
- def get_http_connection(self, host, is_secure):
- conn = self._pool.get_http_connection(host, is_secure)
+ def get_http_connection(self, host, port, is_secure):
+ conn = self._pool.get_http_connection(host, port, is_secure)
if conn is not None:
return conn
else:
- return self.new_http_connection(host, is_secure)
+ return self.new_http_connection(host, port, is_secure)
+
+ def skip_proxy(self, host):
+ if not self.no_proxy:
+ return False
+
+ if self.no_proxy == "*":
+ return True
+
+ hostonly = host
+ hostonly = host.split(':')[0]
+
+ for name in self.no_proxy.split(','):
+ if name and (hostonly.endswith(name) or host.endswith(name)):
+ return True
- def new_http_connection(self, host, is_secure):
- if self.use_proxy and not is_secure:
- host = '%s:%d' % (self.proxy, int(self.proxy_port))
+ return False
+
+ def new_http_connection(self, host, port, is_secure):
if host is None:
host = self.server_name()
+
+ # Make sure the host is really just the host, not including
+ # the port number
+ host = host.split(':', 1)[0]
+
+ http_connection_kwargs = self.http_connection_kwargs.copy()
+
+ # Connection factories below expect a port keyword argument
+ http_connection_kwargs['port'] = port
+
+ # Override host with proxy settings if needed
+ if self.use_proxy and not is_secure and \
+ not self.skip_proxy(host):
+ host = self.proxy
+ http_connection_kwargs['port'] = int(self.proxy_port)
+
if is_secure:
boto.log.debug(
'establishing HTTPS connection: host=%s, kwargs=%s',
- host, self.http_connection_kwargs)
- if self.use_proxy:
+ host, http_connection_kwargs)
+ if self.use_proxy and not self.skip_proxy(host):
connection = self.proxy_ssl(host, is_secure and 443 or 80)
elif self.https_connection_factory:
connection = self.https_connection_factory(host)
elif self.https_validate_certificates and HAVE_HTTPS_CONNECTION:
connection = https_connection.CertValidatingHTTPSConnection(
host, ca_certs=self.ca_certificates_file,
- **self.http_connection_kwargs)
+ **http_connection_kwargs)
else:
connection = httplib.HTTPSConnection(host,
- **self.http_connection_kwargs)
+ **http_connection_kwargs)
else:
boto.log.debug('establishing HTTP connection: kwargs=%s' %
- self.http_connection_kwargs)
+ http_connection_kwargs)
if self.https_connection_factory:
# even though the factory says https, this is too handy
# to not be able to allow overriding for http also.
connection = self.https_connection_factory(host,
- **self.http_connection_kwargs)
+ **http_connection_kwargs)
else:
connection = httplib.HTTPConnection(host,
- **self.http_connection_kwargs)
+ **http_connection_kwargs)
if self.debug > 1:
connection.set_debuglevel(self.debug)
# self.connection must be maintained for backwards-compatibility
# however, it must be dynamically pulled from the connection pool
# set a private variable which will enable that
if host.split(':')[0] == self.host and is_secure == self.is_secure:
- self._connection = (host, is_secure)
+ self._connection = (host, port, is_secure)
# Set the response class of the http connection to use our custom
# class.
connection.response_class = HTTPResponse
return connection
- def put_http_connection(self, host, is_secure, connection):
- self._pool.put_http_connection(host, is_secure, connection)
+ def put_http_connection(self, host, port, is_secure, connection):
+ self._pool.put_http_connection(host, port, is_secure, connection)
def proxy_ssl(self, host=None, port=None):
if host and port:
@@ -819,6 +854,7 @@ class AWSAuthConnection(object):
boto.log.debug('Data: %s' % request.body)
boto.log.debug('Headers: %s' % request.headers)
boto.log.debug('Host: %s' % request.host)
+ boto.log.debug('Port: %s' % request.port)
boto.log.debug('Params: %s' % request.params)
response = None
body = None
@@ -828,7 +864,8 @@ class AWSAuthConnection(object):
else:
num_retries = override_num_retries
i = 0
- connection = self.get_http_connection(request.host, self.is_secure)
+ connection = self.get_http_connection(request.host, request.port,
+ self.is_secure)
while i <= num_retries:
# Use binary exponential backoff to desynchronize client requests.
next_sleep = random.random() * (2 ** i)
@@ -836,6 +873,12 @@ class AWSAuthConnection(object):
# we now re-sign each request before it is retried
boto.log.debug('Token: %s' % self.provider.security_token)
request.authorize(connection=self)
+ # Only force header for non-s3 connections, because s3 uses
+ # an older signing method + bucket resource URLs that include
+ # the port info. All others should be now be up to date and
+ # not include the port.
+ if 's3' not in self._required_auth_capability():
+ request.headers['Host'] = self.host.split(':', 1)[0]
if callable(sender):
response = sender(connection, request.method, request.path,
request.body, request.headers)
@@ -865,24 +908,38 @@ class AWSAuthConnection(object):
body = response.read()
elif response.status < 300 or response.status >= 400 or \
not location:
- self.put_http_connection(request.host, self.is_secure,
- connection)
+ # don't return connection to the pool if response contains
+ # Connection:close header, because the connection has been
+ # closed and default reconnect behavior may do something
+ # different than new_http_connection. Also, it's probably
+ # less efficient to try to reuse a closed connection.
+ conn_header_value = response.getheader('connection')
+ if conn_header_value == 'close':
+ connection.close()
+ else:
+ self.put_http_connection(request.host, request.port,
+ self.is_secure, connection)
return response
else:
scheme, request.host, request.path, \
params, query, fragment = urlparse.urlparse(location)
if query:
request.path += '?' + query
+ # urlparse can return both host and port in netloc, so if
+ # that's the case we need to split them up properly
+ if ':' in request.host:
+ request.host, request.port = request.host.split(':', 1)
msg = 'Redirecting: %s' % scheme + '://'
msg += request.host + request.path
boto.log.debug(msg)
connection = self.get_http_connection(request.host,
+ request.port,
scheme == 'https')
response = None
continue
except PleaseRetryException, e:
boto.log.debug('encountered a retry exception: %s' % e)
- connection = self.new_http_connection(request.host,
+ connection = self.new_http_connection(request.host, request.port,
self.is_secure)
response = e.response
except self.http_exceptions, e:
@@ -894,7 +951,7 @@ class AWSAuthConnection(object):
raise e
boto.log.debug('encountered %s exception, reconnecting' % \
e.__class__.__name__)
- connection = self.new_http_connection(request.host,
+ connection = self.new_http_connection(request.host, request.port,
self.is_secure)
time.sleep(next_sleep)
i += 1
@@ -923,6 +980,9 @@ class AWSAuthConnection(object):
headers = {}
else:
headers = headers.copy()
+ if (self.host_header and
+ not boto.utils.find_matching_headers('host', headers)):
+ headers['host'] = self.host_header
host = host or self.host
if self.use_proxy:
if not auth_path:
@@ -981,7 +1041,7 @@ class AWSQueryConnection(AWSAuthConnection):
def make_request(self, action, params=None, path='/', verb='GET'):
http_request = self.build_base_http_request(verb, path, None,
params, {}, '',
- self.server_name())
+ self.host)
if action:
http_request.params['Action'] = action
if self.APIVersion:
diff --git a/boto/dynamodb/__init__.py b/boto/dynamodb/__init__.py
index 12204361..46199732 100644
--- a/boto/dynamodb/__init__.py
+++ b/boto/dynamodb/__init__.py
@@ -35,6 +35,9 @@ def regions():
return [RegionInfo(name='us-east-1',
endpoint='dynamodb.us-east-1.amazonaws.com',
connection_cls=boto.dynamodb.layer2.Layer2),
+ RegionInfo(name='us-gov-west-1',
+ endpoint='dynamodb.us-gov-west-1.amazonaws.com',
+ connection_cls=boto.dynamodb.layer2.Layer2),
RegionInfo(name='us-west-1',
endpoint='dynamodb.us-west-1.amazonaws.com',
connection_cls=boto.dynamodb.layer2.Layer2),
diff --git a/boto/dynamodb2/__init__.py b/boto/dynamodb2/__init__.py
index 8cdfcace..837f5620 100644
--- a/boto/dynamodb2/__init__.py
+++ b/boto/dynamodb2/__init__.py
@@ -35,6 +35,9 @@ def regions():
return [RegionInfo(name='us-east-1',
endpoint='dynamodb.us-east-1.amazonaws.com',
connection_cls=DynamoDBConnection),
+ RegionInfo(name='us-gov-west-1',
+ endpoint='dynamodb.us-gov-west-1.amazonaws.com',
+ connection_cls=DynamoDBConnection),
RegionInfo(name='us-west-1',
endpoint='dynamodb.us-west-1.amazonaws.com',
connection_cls=DynamoDBConnection),
@@ -50,6 +53,9 @@ def regions():
RegionInfo(name='ap-southeast-1',
endpoint='dynamodb.ap-southeast-1.amazonaws.com',
connection_cls=DynamoDBConnection),
+ RegionInfo(name='ap-southeast-2',
+ endpoint='dynamodb.ap-southeast-2.amazonaws.com',
+ connection_cls=DynamoDBConnection),
RegionInfo(name='sa-east-1',
endpoint='dynamodb.sa-east-1.amazonaws.com',
connection_cls=DynamoDBConnection),
diff --git a/boto/dynamodb2/items.py b/boto/dynamodb2/items.py
index 8df51026..9f076afa 100644
--- a/boto/dynamodb2/items.py
+++ b/boto/dynamodb2/items.py
@@ -1,3 +1,5 @@
+from copy import deepcopy
+
from boto.dynamodb2.types import Dynamizer
@@ -18,7 +20,7 @@ class Item(object):
data. It also tries to intelligently track how data has changed throughout
the life of the instance, to be as efficient as possible about updates.
"""
- def __init__(self, table, data=None):
+ def __init__(self, table, data=None, loaded=False):
"""
Constructs an (unsaved) ``Item`` instance.
@@ -32,6 +34,10 @@ class Item(object):
Optionally accepts a ``data`` parameter, which should be a dictionary
of the fields & values of the item.
+ Optionally accepts a ``loaded`` parameter, which should be a boolean.
+ ``True`` if it was preexisting data loaded from DynamoDB, ``False`` if
+ it's new data from the user. Default is ``False``.
+
Example::
>>> users = Table('users')
@@ -57,41 +63,28 @@ class Item(object):
"""
self.table = table
- self._data = {}
+ self._loaded = loaded
self._orig_data = {}
- self._is_dirty = False
+ self._data = data
self._dynamizer = Dynamizer()
- if data:
- self._data = data
- self._is_dirty = True
+ if self._data is None:
+ self._data = {}
- for key in data.keys():
- self._orig_data[key] = NEWVALUE
+ if self._loaded:
+ self._orig_data = deepcopy(self._data)
def __getitem__(self, key):
return self._data.get(key, None)
def __setitem__(self, key, value):
- # Stow the original value if present, so we can track what's changed.
- if key in self._data:
- self._orig_data[key] = self._data[key]
- else:
- # Use a marker to indicate we've never seen a value for this key.
- self._orig_data[key] = NEWVALUE
-
self._data[key] = value
- self._is_dirty = True
def __delitem__(self, key):
if not key in self._data:
return
- # Stow the original value, so we can track what's changed.
- value = self._data[key]
del self._data[key]
- self._orig_data[key] = value
- self._is_dirty = True
def keys(self):
return self._data.keys()
@@ -112,10 +105,50 @@ class Item(object):
def __contains__(self, key):
return key in self._data
- def needs_save(self):
+ def _determine_alterations(self):
+ """
+ Checks the ``-orig_data`` against the ``_data`` to determine what
+ changes to the data are present.
+
+ Returns a dictionary containing the keys ``adds``, ``changes`` &
+ ``deletes``, containing the updated data.
+ """
+ alterations = {
+ 'adds': {},
+ 'changes': {},
+ 'deletes': [],
+ }
+
+ orig_keys = set(self._orig_data.keys())
+ data_keys = set(self._data.keys())
+
+ # Run through keys we know are in both for changes.
+ for key in orig_keys.intersection(data_keys):
+ if self._data[key] != self._orig_data[key]:
+ if self._is_storable(self._data[key]):
+ alterations['changes'][key] = self._data[key]
+ else:
+ alterations['deletes'].append(key)
+
+ # Run through additions.
+ for key in data_keys.difference(orig_keys):
+ if self._is_storable(self._data[key]):
+ alterations['adds'][key] = self._data[key]
+
+ # Run through deletions.
+ for key in orig_keys.difference(data_keys):
+ alterations['deletes'].append(key)
+
+ return alterations
+
+ def needs_save(self, data=None):
"""
Returns whether or not the data has changed on the ``Item``.
+ Optionally accepts a ``data`` argument, which accepts the output from
+ ``self._determine_alterations()`` if you've already called it. Typically
+ unnecessary to do. Default is ``None``.
+
Example:
>>> user.needs_save()
@@ -125,7 +158,17 @@ class Item(object):
True
"""
- return self._is_dirty
+ if data is None:
+ data = self._determine_alterations()
+
+ needs_save = False
+
+ for kind in ['adds', 'changes', 'deletes']:
+ if len(data[kind]):
+ needs_save = True
+ break
+
+ return needs_save
def mark_clean(self):
"""
@@ -143,23 +186,16 @@ class Item(object):
False
"""
- self._orig_data = {}
- self._is_dirty = False
+ self._orig_data = deepcopy(self._data)
def mark_dirty(self):
"""
- Marks an ``Item`` instance as needing to be saved.
-
- Example:
-
- >>> user.needs_save()
- False
- >>> user.mark_dirty()
- >>> user.needs_save()
- True
+ DEPRECATED: Marks an ``Item`` instance as needing to be saved.
+ This method is no longer necessary, as the state tracking on ``Item``
+ has been improved to automatically detect proper state.
"""
- self._is_dirty = True
+ return
def load(self, data):
"""
@@ -175,7 +211,8 @@ class Item(object):
for field_name, field_value in data.get('Item', {}).items():
self[field_name] = self._dynamizer.decode(field_value)
- self.mark_clean()
+ self._loaded = True
+ self._orig_data = deepcopy(self._data)
def get_keys(self):
"""
@@ -229,30 +266,42 @@ class Item(object):
raise ValueError("Unknown key %s provided." % key)
# States:
- # * New field (_data & _orig_data w/ marker)
- # * Unchanged field (only _data)
- # * Modified field (_data & _orig_data)
- # * Deleted field (only _orig_data)
- if not key in self._orig_data:
+ # * New field (only in _data)
+ # * Unchanged field (in both _data & _orig_data, same data)
+ # * Modified field (in both _data & _orig_data, different data)
+ # * Deleted field (only in _orig_data)
+ orig_value = self._orig_data.get(key, NEWVALUE)
+ current_value = self._data.get(key, NEWVALUE)
+
+ if orig_value == current_value:
# Existing field unchanged.
- value = self._data[key]
+ value = current_value
else:
if key in self._data:
- if self._orig_data[key] is NEWVALUE:
+ if not key in self._orig_data:
# New field.
expects[key]['Exists'] = False
else:
# Existing field modified.
- value = self._orig_data[key]
+ value = orig_value
else:
# Existing field deleted.
- value = self._orig_data[key]
+ value = orig_value
if value is not None:
expects[key]['Value'] = self._dynamizer.encode(value)
return expects
+ def _is_storable(self, value):
+ # We need to prevent ``None``, empty string & empty set from
+ # heading to DDB, but allow false-y values like 0 & False make it.
+ if not value:
+ if not value in (0, 0.0, False):
+ return False
+
+ return True
+
def prepare_full(self):
"""
Runs through all fields & encodes them to be handed off to DynamoDB
@@ -265,6 +314,9 @@ class Item(object):
final_data = {}
for key, value in self._data.items():
+ if not self._is_storable(value):
+ continue
+
final_data[key] = self._dynamizer.encode(value)
return final_data
@@ -280,22 +332,30 @@ class Item(object):
# This doesn't save on it's own. Rather, we prepare the datastructure
# and hand-off to the table to handle creation/update.
final_data = {}
+ fields = set()
+ alterations = self._determine_alterations()
- # Loop over ``_orig_data`` so that we only build up data that's changed.
- for key, value in self._orig_data.items():
- if key in self._data:
- # It changed.
- final_data[key] = {
- 'Action': 'PUT',
- 'Value': self._dynamizer.encode(self._data[key])
- }
- else:
- # It was deleted.
- final_data[key] = {
- 'Action': 'DELETE',
- }
+ for key, value in alterations['adds'].items():
+ final_data[key] = {
+ 'Action': 'PUT',
+ 'Value': self._dynamizer.encode(self._data[key])
+ }
+ fields.add(key)
- return final_data
+ for key, value in alterations['changes'].items():
+ final_data[key] = {
+ 'Action': 'PUT',
+ 'Value': self._dynamizer.encode(self._data[key])
+ }
+ fields.add(key)
+
+ for key in alterations['deletes']:
+ final_data[key] = {
+ 'Action': 'DELETE',
+ }
+ fields.add(key)
+
+ return final_data, fields
def partial_save(self):
"""
@@ -316,14 +376,28 @@ class Item(object):
>>> user.partial_save()
"""
- if not self.needs_save():
- return False
-
key = self.get_keys()
# Build a new dict of only the data we're changing.
- final_data = self.prepare_partial()
+ final_data, fields = self.prepare_partial()
+
+ if not final_data:
+ return False
+
+ # Remove the key(s) from the ``final_data`` if present.
+ # They should only be present if this is a new item, in which
+ # case we shouldn't be sending as part of the data to update.
+ for fieldname, value in key.items():
+ if fieldname in final_data:
+ del final_data[fieldname]
+
+ try:
+ # It's likely also in ``fields``, so remove it there too.
+ fields.remove(fieldname)
+ except KeyError:
+ pass
+
# Build expectations of only the fields we're planning to update.
- expects = self.build_expects(fields=self._orig_data.keys())
+ expects = self.build_expects(fields=fields)
returned = self.table._update_item(key, final_data, expects=expects)
# Mark the object as clean.
self.mark_clean()
@@ -359,7 +433,7 @@ class Item(object):
>>> user.save(overwrite=True)
"""
- if not self.needs_save():
+ if not self.needs_save() and not overwrite:
return False
final_data = self.prepare_full()
diff --git a/boto/dynamodb2/layer1.py b/boto/dynamodb2/layer1.py
index 532e2f6e..796a6a79 100644
--- a/boto/dynamodb2/layer1.py
+++ b/boto/dynamodb2/layer1.py
@@ -21,7 +21,11 @@
#
from binascii import crc32
-import json
+try:
+ import json
+except ImportError:
+ import simplejson as json
+
import boto
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
@@ -67,7 +71,11 @@ class DynamoDBConnection(AWSQueryConnection):
if reg.name == region_name:
region = reg
break
- kwargs['host'] = region.endpoint
+
+ # Only set host if it isn't manually overwritten
+ if 'host' not in kwargs:
+ kwargs['host'] = region.endpoint
+
AWSQueryConnection.__init__(self, **kwargs)
self.region = region
self._validate_checksums = boto.config.getbool(
@@ -1467,13 +1475,13 @@ class DynamoDBConnection(AWSQueryConnection):
def make_request(self, action, body):
headers = {
'X-Amz-Target': '%s.%s' % (self.TargetPrefix, action),
- 'Host': self.region.endpoint,
+ 'Host': self.host,
'Content-Type': 'application/x-amz-json-1.0',
'Content-Length': str(len(body)),
}
http_request = self.build_base_http_request(
method='POST', path='/', auth_path='/', params={},
- headers=headers, data=body)
+ headers=headers, data=body, host=self.host)
response = self._mexe(http_request, sender=None,
override_num_retries=self.NumberRetries,
retry_handler=self._retry_handler)
@@ -1491,6 +1499,7 @@ class DynamoDBConnection(AWSQueryConnection):
def _retry_handler(self, response, i, next_sleep):
status = None
+ boto.log.debug("Saw HTTP status: %s" % response.status)
if response.status == 400:
response_body = response.read()
boto.log.debug(response_body)
diff --git a/boto/dynamodb2/results.py b/boto/dynamodb2/results.py
index 23f64046..8ddf3022 100644
--- a/boto/dynamodb2/results.py
+++ b/boto/dynamodb2/results.py
@@ -58,6 +58,12 @@ class ResultSet(object):
self.fetch_more()
+ # It's possible that previous call to ``fetch_more`` may not return
+ # anything useful but there may be more results. Loop until we get
+ # something back, making sure we guard for no results left.
+ while not len(self._results) and self._results_left:
+ self.fetch_more()
+
if self._offset < len(self._results):
return self._results[self._offset]
else:
@@ -106,16 +112,11 @@ class ResultSet(object):
kwargs[self.first_key] = self._last_key_seen
results = self.the_callable(*args, **kwargs)
-
- if not len(results.get('results', [])):
- self._results_left = False
- return
-
- self._results.extend(results['results'])
+ new_results = results.get('results', [])
self._last_key_seen = results.get('last_key', None)
- if self._last_key_seen is None:
- self._results_left = False
+ if len(new_results):
+ self._results.extend(results['results'])
# Decrease the limit, if it's present.
if self.call_kwargs.get('limit'):
@@ -124,7 +125,10 @@ class ResultSet(object):
# results to look for
if 0 == self.call_kwargs['limit']:
self._results_left = False
-
+
+ if self._last_key_seen is None:
+ self._results_left = False
+
class BatchGetResultSet(ResultSet):
def __init__(self, *args, **kwargs):
diff --git a/boto/dynamodb2/table.py b/boto/dynamodb2/table.py
index c1de437d..5d6803ce 100644
--- a/boto/dynamodb2/table.py
+++ b/boto/dynamodb2/table.py
@@ -1,3 +1,4 @@
+import boto
from boto.dynamodb2 import exceptions
from boto.dynamodb2.fields import (HashKey, RangeKey,
AllIndex, KeysOnlyIndex, IncludeIndex)
@@ -57,7 +58,7 @@ class Table(object):
>>> conn = Table('users')
# The full, minimum-extra-calls case.
- >>> from boto.dynamodb2.layer1 import DynamoDBConnection
+ >>> from boto import dynamodb2
>>> users = Table('users', schema=[
... HashKey('username'),
... RangeKey('date_joined', data_type=NUMBER)
@@ -69,11 +70,10 @@ class Table(object):
... RangeKey('date_joined')
... ]),
... ],
- ... connection=DynamoDBConnection(
- ... aws_access_key_id='key',
- ... aws_secret_access_key='key',
- ... region='us-west-2'
- ... ))
+ ... connection=dynamodb2.connect_to_region('us-west-2',
+ ... aws_access_key_id='key',
+ ... aws_secret_access_key='key',
+ ... ))
"""
self.table_name = table_name
@@ -133,7 +133,7 @@ class Table(object):
Example::
- >>> users = Table.create_table('users', schema=[
+ >>> users = Table.create('users', schema=[
... HashKey('username'),
... RangeKey('date_joined', data_type=NUMBER)
... ], throughput={
@@ -611,7 +611,7 @@ class Table(object):
'AttributeValueList': [],
'ComparisonOperator': op,
}
-
+
# Special-case the ``NULL/NOT_NULL`` case.
if field_bits[-1] == 'null':
del lookup['AttributeValueList']
@@ -1071,17 +1071,19 @@ class BatchTable(object):
self.table = table
self._to_put = []
self._to_delete = []
+ self._unprocessed = []
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
- if not self._to_put and not self._to_delete:
- return False
+ if self._to_put or self._to_delete:
+ # Flush anything that's left.
+ self.flush()
- # Flush anything that's left.
- self.flush()
- return True
+ if self._unprocessed:
+ # Finally, handle anything that wasn't processed.
+ self.resend_unprocessed()
def put_item(self, data, overwrite=False):
self._to_put.append(data)
@@ -1123,7 +1125,43 @@ class BatchTable(object):
}
})
- self.table.connection.batch_write_item(batch_data)
+ resp = self.table.connection.batch_write_item(batch_data)
+ self.handle_unprocessed(resp)
+
self._to_put = []
self._to_delete = []
return True
+
+ def handle_unprocessed(self, resp):
+ if len(resp.get('UnprocessedItems', [])):
+ table_name = self.table.table_name
+ unprocessed = resp['UnprocessedItems'].get(table_name, [])
+
+ # Some items have not been processed. Stow them for now &
+ # re-attempt processing on ``__exit__``.
+ msg = "%s items were unprocessed. Storing for later."
+ boto.log.info(msg % len(unprocessed))
+ self._unprocessed.extend(unprocessed)
+
+ def resend_unprocessed(self):
+ # If there are unprocessed records (for instance, the user was over
+ # their throughput limitations), iterate over them & send until they're
+ # all there.
+ boto.log.info(
+ "Re-sending %s unprocessed items." % len(self._unprocessed)
+ )
+
+ while len(self._unprocessed):
+ # Again, do 25 at a time.
+ to_resend = self._unprocessed[:25]
+ # Remove them from the list.
+ self._unprocessed = self._unprocessed[25:]
+ batch_data = {
+ self.table.table_name: to_resend
+ }
+ boto.log.info("Sending %s items" % len(to_resend))
+ resp = self.table.connection.batch_write_item(batch_data)
+ self.handle_unprocessed(resp)
+ boto.log.info(
+ "%s unprocessed items left" % len(self._unprocessed)
+ ) \ No newline at end of file
diff --git a/boto/ec2/__init__.py b/boto/ec2/__init__.py
index cc1e582f..4220c92e 100644
--- a/boto/ec2/__init__.py
+++ b/boto/ec2/__init__.py
@@ -29,6 +29,7 @@ from boto.regioninfo import RegionInfo
RegionData = {
'us-east-1': 'ec2.us-east-1.amazonaws.com',
+ 'us-gov-west-1': 'ec2.us-gov-west-1.amazonaws.com',
'us-west-1': 'ec2.us-west-1.amazonaws.com',
'us-west-2': 'ec2.us-west-2.amazonaws.com',
'sa-east-1': 'ec2.sa-east-1.amazonaws.com',
@@ -72,9 +73,14 @@ def connect_to_region(region_name, **kw_params):
:return: A connection to the given region, or None if an invalid region
name is given
"""
+ if 'region' in kw_params and isinstance(kw_params['region'], RegionInfo)\
+ and region_name == kw_params['region'].name:
+ return EC2Connection(**kw_params)
+
for region in regions(**kw_params):
if region.name == region_name:
return region.connect(**kw_params)
+
return None
diff --git a/boto/ec2/address.py b/boto/ec2/address.py
index 9eadfaa3..27608a4a 100644
--- a/boto/ec2/address.py
+++ b/boto/ec2/address.py
@@ -71,33 +71,50 @@ class Address(EC2Object):
else:
setattr(self, name, value)
- def release(self):
+ def release(self, dry_run=False):
"""
Free up this Elastic IP address.
:see: :meth:`boto.ec2.connection.EC2Connection.release_address`
"""
if self.allocation_id:
- return self.connection.release_address(None, self.allocation_id)
+ return self.connection.release_address(
+ None,
+ self.allocation_id,
+ dry_run=dry_run)
else:
- return self.connection.release_address(self.public_ip)
+ return self.connection.release_address(
+ self.public_ip,
+ dry_run=dry_run
+ )
delete = release
- def associate(self, instance_id):
+ def associate(self, instance_id, dry_run=False):
"""
Associate this Elastic IP address with a currently running instance.
:see: :meth:`boto.ec2.connection.EC2Connection.associate_address`
"""
- return self.connection.associate_address(instance_id, self.public_ip)
+ return self.connection.associate_address(
+ instance_id,
+ self.public_ip,
+ dry_run=dry_run
+ )
- def disassociate(self):
+ def disassociate(self, dry_run=False):
"""
Disassociate this Elastic IP address from a currently running instance.
:see: :meth:`boto.ec2.connection.EC2Connection.disassociate_address`
"""
if self.association_id:
- return self.connection.disassociate_address(None, self.association_id)
+ return self.connection.disassociate_address(
+ None,
+ self.association_id,
+ dry_run=dry_run
+ )
else:
- return self.connection.disassociate_address(self.public_ip)
+ return self.connection.disassociate_address(
+ self.public_ip,
+ dry_run=dry_run
+ )
diff --git a/boto/ec2/autoscale/__init__.py b/boto/ec2/autoscale/__init__.py
index 17a89e11..440718e1 100644
--- a/boto/ec2/autoscale/__init__.py
+++ b/boto/ec2/autoscale/__init__.py
@@ -47,6 +47,7 @@ from boto.ec2.autoscale.tag import Tag
RegionData = {
'us-east-1': 'autoscaling.us-east-1.amazonaws.com',
+ 'us-gov-west-1': 'autoscaling.us-gov-west-1.amazonaws.com',
'us-west-1': 'autoscaling.us-west-1.amazonaws.com',
'us-west-2': 'autoscaling.us-west-2.amazonaws.com',
'sa-east-1': 'autoscaling.sa-east-1.amazonaws.com',
@@ -224,8 +225,7 @@ class AutoScaleConnection(AWSQueryConnection):
if launch_config.ramdisk_id:
params['RamdiskId'] = launch_config.ramdisk_id
if launch_config.block_device_mappings:
- self.build_list_params(params, launch_config.block_device_mappings,
- 'BlockDeviceMappings')
+ [x.autoscale_build_list_params(params) for x in launch_config.block_device_mappings]
if launch_config.security_groups:
self.build_list_params(params, launch_config.security_groups,
'SecurityGroups')
@@ -255,6 +255,11 @@ class AutoScaleConnection(AWSQueryConnection):
'AutoScalingGroupName': scaling_policy.as_name,
'PolicyName': scaling_policy.name,
'ScalingAdjustment': scaling_policy.scaling_adjustment}
+
+ if scaling_policy.adjustment_type == "PercentChangeInCapacity" and \
+ scaling_policy.min_adjustment_step is not None:
+ params['MinAdjustmentStep'] = scaling_policy.min_adjustment_step
+
if scaling_policy.cooldown is not None:
params['Cooldown'] = scaling_policy.cooldown
@@ -487,15 +492,19 @@ class AutoScaleConnection(AWSQueryConnection):
If no group name or list of policy names are provided, all
available policies are returned.
- :type as_name: str
- :param as_name: The name of the
+ :type as_group: str
+ :param as_group: The name of the
:class:`boto.ec2.autoscale.group.AutoScalingGroup` to filter for.
- :type names: list
- :param names: List of policy names which should be searched for.
+ :type policy_names: list
+ :param policy_names: List of policy names which should be searched for.
:type max_records: int
:param max_records: Maximum amount of groups to return.
+
+ :type next_token: str
+ :param next_token: If you have more results than can be returned
+ at once, pass in this parameter to page through all results.
"""
params = {}
if as_group:
@@ -676,9 +685,9 @@ class AutoScaleConnection(AWSQueryConnection):
Configures an Auto Scaling group to send notifications when
specified events take place.
- :type as_group: str or
+ :type autoscale_group: str or
:class:`boto.ec2.autoscale.group.AutoScalingGroup` object
- :param as_group: The Auto Scaling group to put notification
+ :param autoscale_group: The Auto Scaling group to put notification
configuration on.
:type topic: str
@@ -699,6 +708,29 @@ class AutoScaleConnection(AWSQueryConnection):
self.build_list_params(params, notification_types, 'NotificationTypes')
return self.get_status('PutNotificationConfiguration', params)
+ def delete_notification_configuration(self, autoscale_group, topic):
+ """
+ Deletes notifications created by put_notification_configuration.
+
+ :type autoscale_group: str or
+ :class:`boto.ec2.autoscale.group.AutoScalingGroup` object
+ :param autoscale_group: The Auto Scaling group to put notification
+ configuration on.
+
+ :type topic: str
+ :param topic: The Amazon Resource Name (ARN) of the Amazon Simple
+ Notification Service (SNS) topic.
+ """
+
+ name = autoscale_group
+ if isinstance(autoscale_group, AutoScalingGroup):
+ name = autoscale_group.name
+
+ params = {'AutoScalingGroupName': name,
+ 'TopicARN': topic}
+
+ return self.get_status('DeleteNotificationConfiguration', params)
+
def set_instance_health(self, instance_id, health_status,
should_respect_grace_period=True):
"""
diff --git a/boto/ec2/autoscale/group.py b/boto/ec2/autoscale/group.py
index e9fadce1..65e6ee46 100644
--- a/boto/ec2/autoscale/group.py
+++ b/boto/ec2/autoscale/group.py
@@ -302,6 +302,12 @@ class AutoScalingGroup(object):
topic,
notification_types)
+ def delete_notification_configuration(self, topic):
+ """
+ Deletes notifications created by put_notification_configuration.
+ """
+ return self.connection.delete_notification_configuration(self, topic)
+
def suspend_processes(self, scaling_processes=None):
"""
Suspends Auto Scaling processes for an Auto Scaling group.
diff --git a/boto/ec2/autoscale/policy.py b/boto/ec2/autoscale/policy.py
index adcdbdc8..0538557d 100644
--- a/boto/ec2/autoscale/policy.py
+++ b/boto/ec2/autoscale/policy.py
@@ -115,6 +115,10 @@ class ScalingPolicy(object):
:type scaling_adjustment: int
:param scaling_adjustment: Value of adjustment (type specified in `adjustment_type`).
+ :type min_adjustment_step: int
+ :param min_adjustment_step: Value of min adjustment step required to
+ apply the scaling policy (only make sense when use `PercentChangeInCapacity` as adjustment_type.).
+
:type cooldown: int
:param cooldown: Time (in seconds) before Alarm related Scaling Activities can start after the previous Scaling Activity ends.
@@ -125,6 +129,7 @@ class ScalingPolicy(object):
self.scaling_adjustment = kwargs.get('scaling_adjustment', None)
self.cooldown = kwargs.get('cooldown', None)
self.connection = connection
+ self.min_adjustment_step = kwargs.get('min_adjustment_step', None)
def __repr__(self):
return 'ScalingPolicy(%s group:%s adjustment:%s)' % (self.name,
@@ -149,6 +154,8 @@ class ScalingPolicy(object):
self.cooldown = int(value)
elif name == 'AdjustmentType':
self.adjustment_type = value
+ elif name == 'MinAdjustmentStep':
+ self.min_adjustment_step = int(value)
def delete(self):
return self.connection.delete_policy(self.name, self.as_name)
diff --git a/boto/ec2/autoscale/tag.py b/boto/ec2/autoscale/tag.py
index ad9641d5..a783edf0 100644
--- a/boto/ec2/autoscale/tag.py
+++ b/boto/ec2/autoscale/tag.py
@@ -55,11 +55,11 @@ class Tag(object):
self.key = value
elif name == 'Value':
self.value = value
- elif name == 'PropogateAtLaunch':
+ elif name == 'PropagateAtLaunch':
if value.lower() == 'true':
- self.propogate_at_launch = True
+ self.propagate_at_launch = True
else:
- self.propogate_at_launch = False
+ self.propagate_at_launch = False
elif name == 'ResourceId':
self.resource_id = value
elif name == 'ResourceType':
diff --git a/boto/ec2/blockdevicemapping.py b/boto/ec2/blockdevicemapping.py
index df774ae9..78f7e61d 100644
--- a/boto/ec2/blockdevicemapping.py
+++ b/boto/ec2/blockdevicemapping.py
@@ -115,10 +115,18 @@ class BlockDeviceMapping(dict):
elif name == 'item':
self[self.current_name] = self.current_value
- def build_list_params(self, params, prefix=''):
+ def ec2_build_list_params(self, params, prefix=''):
+ pre = '%sBlockDeviceMapping' % prefix
+ return self._build_list_params(params, prefix=pre)
+
+ def autoscale_build_list_params(self, params, prefix=''):
+ pre = '%sBlockDeviceMappings.member' % prefix
+ return self._build_list_params(params, prefix=pre)
+
+ def _build_list_params(self, params, prefix=''):
i = 1
for dev_name in self:
- pre = '%sBlockDeviceMapping.%d' % (prefix, i)
+ pre = '%s.%d' % (prefix, i)
params['%s.DeviceName' % pre] = dev_name
block_dev = self[dev_name]
if block_dev.ephemeral_name:
diff --git a/boto/ec2/cloudwatch/__init__.py b/boto/ec2/cloudwatch/__init__.py
index dd7b6811..82c529e4 100644
--- a/boto/ec2/cloudwatch/__init__.py
+++ b/boto/ec2/cloudwatch/__init__.py
@@ -33,6 +33,7 @@ import boto
RegionData = {
'us-east-1': 'monitoring.us-east-1.amazonaws.com',
+ 'us-gov-west-1': 'monitoring.us-gov-west-1.amazonaws.com',
'us-west-1': 'monitoring.us-west-1.amazonaws.com',
'us-west-2': 'monitoring.us-west-2.amazonaws.com',
'sa-east-1': 'monitoring.sa-east-1.amazonaws.com',
diff --git a/boto/ec2/cloudwatch/alarm.py b/boto/ec2/cloudwatch/alarm.py
index e0f72421..bd98e4a9 100644
--- a/boto/ec2/cloudwatch/alarm.py
+++ b/boto/ec2/cloudwatch/alarm.py
@@ -95,7 +95,7 @@ class MetricAlarm(object):
statistic is applied.
:type evaluation_periods: int
- :param evaluation_period: The number of periods over which data is
+ :param evaluation_periods: The number of periods over which data is
compared to the specified threshold.
:type unit: str
@@ -113,7 +113,7 @@ class MetricAlarm(object):
:param description: Description of MetricAlarm
:type dimensions: list of dicts
- :param description: Dimensions of alarm, such as:
+ :param dimensions: Dimensions of alarm, such as:
[{'InstanceId':['i-0123456,i-0123457']}]
:type alarm_actions: list of strs
diff --git a/boto/ec2/connection.py b/boto/ec2/connection.py
index 3d6b0205..1fe905fa 100644
--- a/boto/ec2/connection.py
+++ b/boto/ec2/connection.py
@@ -48,6 +48,9 @@ from boto.ec2.instanceinfo import InstanceInfo
from boto.ec2.reservedinstance import ReservedInstancesOffering
from boto.ec2.reservedinstance import ReservedInstance
from boto.ec2.reservedinstance import ReservedInstanceListing
+from boto.ec2.reservedinstance import ReservedInstancesConfiguration
+from boto.ec2.reservedinstance import ModifyReservedInstancesResult
+from boto.ec2.reservedinstance import ReservedInstancesModification
from boto.ec2.spotinstancerequest import SpotInstanceRequest
from boto.ec2.spotpricehistory import SpotPriceHistory
from boto.ec2.spotdatafeedsubscription import SpotDatafeedSubscription
@@ -66,7 +69,7 @@ from boto.exception import EC2ResponseError
class EC2Connection(AWSQueryConnection):
- APIVersion = boto.config.get('Boto', 'ec2_version', '2013-02-01')
+ APIVersion = boto.config.get('Boto', 'ec2_version', '2013-10-01')
DefaultRegionName = boto.config.get('Boto', 'ec2_region_name', 'us-east-1')
DefaultRegionEndpoint = boto.config.get('Boto', 'ec2_region_endpoint',
'ec2.us-east-1.amazonaws.com')
@@ -133,7 +136,7 @@ class EC2Connection(AWSQueryConnection):
# Image methods
def get_all_images(self, image_ids=None, owners=None,
- executable_by=None, filters=None):
+ executable_by=None, filters=None, dry_run=False):
"""
Retrieve all the EC2 images available on your account.
@@ -141,7 +144,10 @@ class EC2Connection(AWSQueryConnection):
:param image_ids: A list of strings with the image IDs wanted
:type owners: list
- :param owners: A list of owner IDs
+ :param owners: A list of owner IDs, the special strings 'self',
+ 'amazon', and 'aws-marketplace', may be used to describe
+ images owned by you, Amazon or AWS Marketplace
+ respectively
:type executable_by: list
:param executable_by: Returns AMIs for which the specified
@@ -155,6 +161,9 @@ class EC2Connection(AWSQueryConnection):
names/values is dependent on the request being performed.
Check the EC2 API guide for details.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.ec2.image.Image`
"""
@@ -167,10 +176,12 @@ class EC2Connection(AWSQueryConnection):
self.build_list_params(params, executable_by, 'ExecutableBy')
if filters:
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeImages', params,
[('item', Image)], verb='POST')
- def get_all_kernels(self, kernel_ids=None, owners=None):
+ def get_all_kernels(self, kernel_ids=None, owners=None, dry_run=False):
"""
Retrieve all the EC2 kernels available on your account.
Constructs a filter to allow the processing to happen server side.
@@ -181,6 +192,9 @@ class EC2Connection(AWSQueryConnection):
:type owners: list
:param owners: A list of owner IDs
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.ec2.image.Image`
"""
@@ -191,10 +205,12 @@ class EC2Connection(AWSQueryConnection):
self.build_list_params(params, owners, 'Owner')
filter = {'image-type': 'kernel'}
self.build_filter_params(params, filter)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeImages', params,
[('item', Image)], verb='POST')
- def get_all_ramdisks(self, ramdisk_ids=None, owners=None):
+ def get_all_ramdisks(self, ramdisk_ids=None, owners=None, dry_run=False):
"""
Retrieve all the EC2 ramdisks available on your account.
Constructs a filter to allow the processing to happen server side.
@@ -205,6 +221,9 @@ class EC2Connection(AWSQueryConnection):
:type owners: list
:param owners: A list of owner IDs
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.ec2.image.Image`
"""
@@ -215,27 +234,33 @@ class EC2Connection(AWSQueryConnection):
self.build_list_params(params, owners, 'Owner')
filter = {'image-type': 'ramdisk'}
self.build_filter_params(params, filter)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeImages', params,
[('item', Image)], verb='POST')
- def get_image(self, image_id):
+ def get_image(self, image_id, dry_run=False):
"""
Shortcut method to retrieve a specific image (AMI).
:type image_id: string
:param image_id: the ID of the Image to retrieve
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: :class:`boto.ec2.image.Image`
:return: The EC2 Image specified or None if the image is not found
"""
try:
- return self.get_all_images(image_ids=[image_id])[0]
+ return self.get_all_images(image_ids=[image_id], dry_run=dry_run)[0]
except IndexError: # None of those images available
return None
def register_image(self, name=None, description=None, image_location=None,
architecture=None, kernel_id=None, ramdisk_id=None,
- root_device_name=None, block_device_map=None):
+ root_device_name=None, block_device_map=None,
+ dry_run=False, virtualization_type=None):
"""
Register an image.
@@ -265,6 +290,15 @@ class EC2Connection(AWSQueryConnection):
:param block_device_map: A BlockDeviceMapping data structure
describing the EBS volumes associated with the Image.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
+ :type virtualization_type: string
+ :param virtualization_type: The virutalization_type of the image.
+ Valid choices are:
+ * paravirtual
+ * hvm
+
:rtype: string
:return: The new image id
"""
@@ -284,12 +318,17 @@ class EC2Connection(AWSQueryConnection):
if root_device_name:
params['RootDeviceName'] = root_device_name
if block_device_map:
- block_device_map.build_list_params(params)
+ block_device_map.ec2_build_list_params(params)
+ if dry_run:
+ params['DryRun'] = 'true'
+ if virtualization_type:
+ params['VirtualizationType'] = virtualization_type
+
rs = self.get_object('RegisterImage', params, ResultSet, verb='POST')
image_id = getattr(rs, 'imageId', None)
return image_id
- def deregister_image(self, image_id, delete_snapshot=False):
+ def deregister_image(self, image_id, delete_snapshot=False, dry_run=False):
"""
Unregister an AMI.
@@ -300,6 +339,9 @@ class EC2Connection(AWSQueryConnection):
:param delete_snapshot: Set to True if we should delete the
snapshot associated with an EBS volume mounted at /dev/sda1
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
@@ -310,15 +352,19 @@ class EC2Connection(AWSQueryConnection):
if key == "/dev/sda1":
snapshot_id = image.block_device_mapping[key].snapshot_id
break
-
+ params = {
+ 'ImageId': image_id,
+ }
+ if dry_run:
+ params['DryRun'] = 'true'
result = self.get_status('DeregisterImage',
- {'ImageId':image_id}, verb='POST')
+ params, verb='POST')
if result and snapshot_id:
return result and self.delete_snapshot(snapshot_id)
return result
def create_image(self, instance_id, name,
- description=None, no_reboot=False):
+ description=None, no_reboot=False, dry_run=False):
"""
Will create an AMI from the instance in the running or stopped
state.
@@ -340,6 +386,9 @@ class EC2Connection(AWSQueryConnection):
responsibility of maintaining file system integrity is
left to the owner of the instance.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: string
:return: The new image id
"""
@@ -349,12 +398,15 @@ class EC2Connection(AWSQueryConnection):
params['Description'] = description
if no_reboot:
params['NoReboot'] = 'true'
+ if dry_run:
+ params['DryRun'] = 'true'
img = self.get_object('CreateImage', params, Image, verb='POST')
return img.id
# ImageAttribute methods
- def get_image_attribute(self, image_id, attribute='launchPermission'):
+ def get_image_attribute(self, image_id, attribute='launchPermission',
+ dry_run=False):
"""
Gets an attribute from an image.
@@ -368,18 +420,23 @@ class EC2Connection(AWSQueryConnection):
* productCodes
* blockDeviceMapping
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: :class:`boto.ec2.image.ImageAttribute`
:return: An ImageAttribute object representing the value of the
attribute requested
"""
params = {'ImageId': image_id,
'Attribute': attribute}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('DescribeImageAttribute', params,
ImageAttribute, verb='POST')
def modify_image_attribute(self, image_id, attribute='launchPermission',
operation='add', user_ids=None, groups=None,
- product_codes=None):
+ product_codes=None, dry_run=False):
"""
Changes an attribute of an image.
@@ -403,6 +460,10 @@ class EC2Connection(AWSQueryConnection):
:param product_codes: Amazon DevPay product code. Currently only one
product code can be associated with an AMI. Once
set, the product code cannot be changed or reset.
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
params = {'ImageId': image_id,
'Attribute': attribute,
@@ -413,9 +474,12 @@ class EC2Connection(AWSQueryConnection):
self.build_list_params(params, groups, 'UserGroup')
if product_codes:
self.build_list_params(params, product_codes, 'ProductCode')
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('ModifyImageAttribute', params, verb='POST')
- def reset_image_attribute(self, image_id, attribute='launchPermission'):
+ def reset_image_attribute(self, image_id, attribute='launchPermission',
+ dry_run=False):
"""
Resets an attribute of an AMI to its default value.
@@ -425,16 +489,59 @@ class EC2Connection(AWSQueryConnection):
:type attribute: string
:param attribute: The attribute to reset
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: Whether the operation succeeded or not
"""
params = {'ImageId': image_id,
'Attribute': attribute}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('ResetImageAttribute', params, verb='POST')
# Instance methods
- def get_all_instances(self, instance_ids=None, filters=None):
+ def get_all_instances(self, instance_ids=None, filters=None, dry_run=False):
+ """
+ Retrieve all the instance reservations associated with your account.
+
+ .. note::
+ This method's current behavior is deprecated in favor of
+ :meth:`get_all_reservations`. A future major release will change
+ :meth:`get_all_instances` to return a list of
+ :class:`boto.ec2.instance.Instance` objects as its name suggests.
+ To obtain that behavior today, use :meth:`get_only_instances`.
+
+ :type instance_ids: list
+ :param instance_ids: A list of strings of instance IDs
+
+ :type filters: dict
+ :param filters: Optional filters that can be used to limit the
+ results returned. Filters are provided in the form of a
+ dictionary consisting of filter names as the key and
+ filter values as the value. The set of allowable filter
+ names/values is dependent on the request being performed.
+ Check the EC2 API guide for details.
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
+ :rtype: list
+ :return: A list of :class:`boto.ec2.instance.Reservation`
+
+ """
+ warnings.warn(('The current get_all_instances implementation will be '
+ 'replaced with get_all_reservations.'),
+ PendingDeprecationWarning)
+ return self.get_all_reservations(instance_ids=instance_ids,
+ filters=filters, dry_run=dry_run)
+
+ def get_only_instances(self, instance_ids=None, filters=None,
+ dry_run=False):
+ # A future release should rename this method to get_all_instances
+ # and make get_only_instances an alias for that.
"""
Retrieve all the instances associated with your account.
@@ -449,6 +556,37 @@ class EC2Connection(AWSQueryConnection):
names/values is dependent on the request being performed.
Check the EC2 API guide for details.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
+ :rtype: list
+ :return: A list of :class:`boto.ec2.instance.Instance`
+ """
+ reservations = self.get_all_reservations(instance_ids=instance_ids,
+ filters=filters,
+ dry_run=dry_run)
+ return [instance for reservation in reservations
+ for instance in reservation.instances]
+
+ def get_all_reservations(self, instance_ids=None, filters=None,
+ dry_run=False):
+ """
+ Retrieve all the instance reservations associated with your account.
+
+ :type instance_ids: list
+ :param instance_ids: A list of strings of instance IDs
+
+ :type filters: dict
+ :param filters: Optional filters that can be used to limit the
+ results returned. Filters are provided in the form of a
+ dictionary consisting of filter names as the key and
+ filter values as the value. The set of allowable filter
+ names/values is dependent on the request being performed.
+ Check the EC2 API guide for details.
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.ec2.instance.Reservation`
"""
@@ -465,12 +603,14 @@ class EC2Connection(AWSQueryConnection):
"by group name use the 'group-name' filter instead.",
UserWarning)
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeInstances', params,
[('item', Reservation)], verb='POST')
def get_all_instance_status(self, instance_ids=None,
max_results=None, next_token=None,
- filters=None):
+ filters=None, dry_run=False):
"""
Retrieve all the instances in your account scheduled for maintenance.
@@ -495,6 +635,9 @@ class EC2Connection(AWSQueryConnection):
being performed. Check the EC2 API guide
for details.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of instances that have maintenance scheduled.
"""
@@ -507,6 +650,8 @@ class EC2Connection(AWSQueryConnection):
params['NextToken'] = next_token
if filters:
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('DescribeInstanceStatus', params,
InstanceStatusSet, verb='POST')
@@ -524,7 +669,8 @@ class EC2Connection(AWSQueryConnection):
security_group_ids=None,
additional_info=None, instance_profile_name=None,
instance_profile_arn=None, tenancy=None,
- ebs_optimized=False, network_interfaces=None):
+ ebs_optimized=False, network_interfaces=None,
+ dry_run=False):
"""
Runs an image on EC2.
@@ -655,6 +801,9 @@ class EC2Connection(AWSQueryConnection):
:param network_interfaces: A list of
:class:`boto.ec2.networkinterface.NetworkInterfaceSpecification`
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: Reservation
:return: The :class:`boto.ec2.instance.Reservation` associated with
the request for machines
@@ -703,7 +852,7 @@ class EC2Connection(AWSQueryConnection):
if private_ip_address:
params['PrivateIpAddress'] = private_ip_address
if block_device_map:
- block_device_map.build_list_params(params)
+ block_device_map.ec2_build_list_params(params)
if disable_api_termination:
params['DisableApiTermination'] = 'true'
if instance_initiated_shutdown_behavior:
@@ -721,26 +870,33 @@ class EC2Connection(AWSQueryConnection):
params['EbsOptimized'] = 'true'
if network_interfaces:
network_interfaces.build_list_params(params)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('RunInstances', params, Reservation,
verb='POST')
- def terminate_instances(self, instance_ids=None):
+ def terminate_instances(self, instance_ids=None, dry_run=False):
"""
Terminate the instances specified
:type instance_ids: list
:param instance_ids: A list of strings of the Instance IDs to terminate
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of the instances terminated
"""
params = {}
if instance_ids:
self.build_list_params(params, instance_ids, 'InstanceId')
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('TerminateInstances', params,
[('item', Instance)], verb='POST')
- def stop_instances(self, instance_ids=None, force=False):
+ def stop_instances(self, instance_ids=None, force=False, dry_run=False):
"""
Stop the instances specified
@@ -750,6 +906,9 @@ class EC2Connection(AWSQueryConnection):
:type force: bool
:param force: Forces the instance to stop
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of the instances stopped
"""
@@ -758,62 +917,88 @@ class EC2Connection(AWSQueryConnection):
params['Force'] = 'true'
if instance_ids:
self.build_list_params(params, instance_ids, 'InstanceId')
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('StopInstances', params,
[('item', Instance)], verb='POST')
- def start_instances(self, instance_ids=None):
+ def start_instances(self, instance_ids=None, dry_run=False):
"""
Start the instances specified
:type instance_ids: list
:param instance_ids: A list of strings of the Instance IDs to start
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of the instances started
"""
params = {}
if instance_ids:
self.build_list_params(params, instance_ids, 'InstanceId')
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('StartInstances', params,
[('item', Instance)], verb='POST')
- def get_console_output(self, instance_id):
+ def get_console_output(self, instance_id, dry_run=False):
"""
Retrieves the console output for the specified instance.
:type instance_id: string
:param instance_id: The instance ID of a running instance on the cloud.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: :class:`boto.ec2.instance.ConsoleOutput`
:return: The console output as a ConsoleOutput object
"""
params = {}
self.build_list_params(params, [instance_id], 'InstanceId')
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('GetConsoleOutput', params,
ConsoleOutput, verb='POST')
- def reboot_instances(self, instance_ids=None):
+ def reboot_instances(self, instance_ids=None, dry_run=False):
"""
Reboot the specified instances.
:type instance_ids: list
:param instance_ids: The instances to terminate and reboot
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
params = {}
if instance_ids:
self.build_list_params(params, instance_ids, 'InstanceId')
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('RebootInstances', params)
- def confirm_product_instance(self, product_code, instance_id):
+ def confirm_product_instance(self, product_code, instance_id,
+ dry_run=False):
+ """
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
+ """
params = {'ProductCode': product_code,
'InstanceId': instance_id}
+ if dry_run:
+ params['DryRun'] = 'true'
rs = self.get_object('ConfirmProductInstance', params,
ResultSet, verb='POST')
return (rs.status, rs.ownerId)
# InstanceAttribute methods
- def get_instance_attribute(self, instance_id, attribute):
+ def get_instance_attribute(self, instance_id, attribute, dry_run=False):
"""
Gets an attribute from an instance.
@@ -837,6 +1022,9 @@ class EC2Connection(AWSQueryConnection):
* groupSet
* ebsOptimized
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: :class:`boto.ec2.image.InstanceAttribute`
:return: An InstanceAttribute object representing the value of the
attribute requested
@@ -844,10 +1032,85 @@ class EC2Connection(AWSQueryConnection):
params = {'InstanceId': instance_id}
if attribute:
params['Attribute'] = attribute
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('DescribeInstanceAttribute', params,
InstanceAttribute, verb='POST')
- def modify_instance_attribute(self, instance_id, attribute, value):
+ def modify_network_interface_attribute(self, interface_id, attr, value,
+ attachment_id=None, dry_run=False):
+ """
+ Changes an attribute of a network interface.
+
+ :type interface_id: string
+ :param interface_id: The interface id. Looks like 'eni-xxxxxxxx'
+
+ :type attr: string
+ :param attr: The attribute you wish to change.
+
+ Learn more at http://docs.aws.amazon.com/AWSEC2/latest/API\
+ Reference/ApiReference-query-ModifyNetworkInterfaceAttribute.html
+
+ * description - Textual description of interface
+ * groupSet - List of security group ids or group objects
+ * sourceDestCheck - Boolean
+ * deleteOnTermination - Boolean. Must also specify attachment_id
+
+ :type value: string
+ :param value: The new value for the attribute
+
+ :rtype: bool
+ :return: Whether the operation succeeded or not
+
+ :type attachment_id: string
+ :param attachment_id: If you're modifying DeleteOnTermination you must
+ specify the attachment_id.
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
+ """
+ bool_reqs = (
+ 'deleteontermination',
+ 'sourcedestcheck',
+ )
+ if attr.lower() in bool_reqs:
+ if isinstance(value, bool):
+ if value:
+ value = 'true'
+ else:
+ value = 'false'
+ elif value not in ['true', 'false']:
+ raise ValueError('%s must be a boolean, "true", or "false"!'
+ % attr)
+
+ params = {'NetworkInterfaceId': interface_id}
+
+ # groupSet is handled differently from other arguments
+ if attr.lower() == 'groupset':
+ for idx, sg in enumerate(value):
+ if isinstance(sg, SecurityGroup):
+ sg = sg.id
+ params['SecurityGroupId.%s' % (idx + 1)] = sg
+ elif attr.lower() == 'description':
+ params['Description.Value'] = value
+ elif attr.lower() == 'sourcedestcheck':
+ params['SourceDestCheck.Value'] = value
+ elif attr.lower() == 'deleteontermination':
+ params['Attachment.DeleteOnTermination'] = value
+ if not attachment_id:
+ raise ValueError('You must also specify an attachment_id')
+ params['Attachment.AttachmentId'] = attachment_id
+ else:
+ raise ValueError('Unknown attribute "%s"' % (attr,))
+
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_status(
+ 'ModifyNetworkInterfaceAttribute', params, verb='POST')
+
+ def modify_instance_attribute(self, instance_id, attribute, value,
+ dry_run=False):
"""
Changes an attribute of an instance
@@ -871,6 +1134,9 @@ class EC2Connection(AWSQueryConnection):
:type value: string
:param value: The new value for the attribute
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: Whether the operation succeeded or not
"""
@@ -904,9 +1170,11 @@ class EC2Connection(AWSQueryConnection):
attribute = attribute[0].upper() + attribute[1:]
params['%s.Value' % attribute] = value
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('ModifyInstanceAttribute', params, verb='POST')
- def reset_instance_attribute(self, instance_id, attribute):
+ def reset_instance_attribute(self, instance_id, attribute, dry_run=False):
"""
Resets an attribute of an instance to its default value.
@@ -917,17 +1185,22 @@ class EC2Connection(AWSQueryConnection):
:param attribute: The attribute to reset. Valid values are:
kernel|ramdisk
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: Whether the operation succeeded or not
"""
params = {'InstanceId': instance_id,
'Attribute': attribute}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('ResetInstanceAttribute', params, verb='POST')
# Spot Instances
def get_all_spot_instance_requests(self, request_ids=None,
- filters=None):
+ filters=None, dry_run=False):
"""
Retrieve all the spot instances requests associated with your account.
@@ -942,6 +1215,9 @@ class EC2Connection(AWSQueryConnection):
names/values is dependent on the request being performed.
Check the EC2 API guide for details.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of
:class:`boto.ec2.spotinstancerequest.SpotInstanceRequest`
@@ -959,12 +1235,14 @@ class EC2Connection(AWSQueryConnection):
"group name. Please update your filters accordingly.",
UserWarning)
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeSpotInstanceRequests', params,
[('item', SpotInstanceRequest)], verb='POST')
def get_spot_price_history(self, start_time=None, end_time=None,
instance_type=None, product_description=None,
- availability_zone=None):
+ availability_zone=None, dry_run=False):
"""
Retrieve the recent history of spot instances pricing.
@@ -995,6 +1273,9 @@ class EC2Connection(AWSQueryConnection):
should be returned. If not specified, data for all
availability zones will be returned.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list tuples containing price and timestamp.
"""
@@ -1009,6 +1290,8 @@ class EC2Connection(AWSQueryConnection):
params['ProductDescription'] = product_description
if availability_zone:
params['AvailabilityZone'] = availability_zone
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeSpotPriceHistory', params,
[('item', SpotPriceHistory)], verb='POST')
@@ -1026,7 +1309,7 @@ class EC2Connection(AWSQueryConnection):
instance_profile_name=None,
security_group_ids=None,
ebs_optimized=False,
- network_interfaces=None):
+ network_interfaces=None, dry_run=False):
"""
Request instances on the spot market at a particular price.
@@ -1133,6 +1416,9 @@ class EC2Connection(AWSQueryConnection):
:param network_interfaces: A list of
:class:`boto.ec2.networkinterface.NetworkInterfaceSpecification`
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: Reservation
:return: The :class:`boto.ec2.spotinstancerequest.SpotInstanceRequest`
associated with the request for machines
@@ -1189,7 +1475,7 @@ class EC2Connection(AWSQueryConnection):
if placement_group:
params['%s.Placement.GroupName' % ls] = placement_group
if block_device_map:
- block_device_map.build_list_params(params, '%s.' % ls)
+ block_device_map.ec2_build_list_params(params, '%s.' % ls)
if instance_profile_name:
params['%s.IamInstanceProfile.Name' % ls] = instance_profile_name
if instance_profile_arn:
@@ -1198,38 +1484,51 @@ class EC2Connection(AWSQueryConnection):
params['%s.EbsOptimized' % ls] = 'true'
if network_interfaces:
network_interfaces.build_list_params(params, prefix=ls + '.')
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('RequestSpotInstances', params,
[('item', SpotInstanceRequest)],
verb='POST')
- def cancel_spot_instance_requests(self, request_ids):
+ def cancel_spot_instance_requests(self, request_ids, dry_run=False):
"""
Cancel the specified Spot Instance Requests.
:type request_ids: list
:param request_ids: A list of strings of the Request IDs to terminate
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of the instances terminated
"""
params = {}
if request_ids:
self.build_list_params(params, request_ids, 'SpotInstanceRequestId')
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('CancelSpotInstanceRequests', params,
[('item', Instance)], verb='POST')
- def get_spot_datafeed_subscription(self):
+ def get_spot_datafeed_subscription(self, dry_run=False):
"""
Return the current spot instance data feed subscription
associated with this account, if any.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: :class:`boto.ec2.spotdatafeedsubscription.SpotDatafeedSubscription`
:return: The datafeed subscription object or None
"""
+ params = {}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('DescribeSpotDatafeedSubscription',
- None, SpotDatafeedSubscription, verb='POST')
+ params, SpotDatafeedSubscription, verb='POST')
- def create_spot_datafeed_subscription(self, bucket, prefix):
+ def create_spot_datafeed_subscription(self, bucket, prefix, dry_run=False):
"""
Create a spot instance datafeed subscription for this account.
@@ -1243,29 +1542,40 @@ class EC2Connection(AWSQueryConnection):
:param prefix: An optional prefix that will be pre-pended to all
data files written to the bucket.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: :class:`boto.ec2.spotdatafeedsubscription.SpotDatafeedSubscription`
:return: The datafeed subscription object or None
"""
params = {'Bucket': bucket}
if prefix:
params['Prefix'] = prefix
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('CreateSpotDatafeedSubscription',
params, SpotDatafeedSubscription, verb='POST')
- def delete_spot_datafeed_subscription(self):
+ def delete_spot_datafeed_subscription(self, dry_run=False):
"""
Delete the current spot instance data feed subscription
associated with this account
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
+ params = {}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeleteSpotDatafeedSubscription',
- None, verb='POST')
+ params, verb='POST')
# Zone methods
- def get_all_zones(self, zones=None, filters=None):
+ def get_all_zones(self, zones=None, filters=None, dry_run=False):
"""
Get all Availability Zones associated with the current region.
@@ -1284,6 +1594,9 @@ class EC2Connection(AWSQueryConnection):
being performed. Check the EC2 API guide
for details.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list of :class:`boto.ec2.zone.Zone`
:return: The requested Zone objects
"""
@@ -1292,12 +1605,15 @@ class EC2Connection(AWSQueryConnection):
self.build_list_params(params, zones, 'ZoneName')
if filters:
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeAvailabilityZones', params,
[('item', Zone)], verb='POST')
# Address methods
- def get_all_addresses(self, addresses=None, filters=None, allocation_ids=None):
+ def get_all_addresses(self, addresses=None, filters=None,
+ allocation_ids=None, dry_run=False):
"""
Get all EIP's associated with the current credentials.
@@ -1321,6 +1637,9 @@ class EC2Connection(AWSQueryConnection):
present, only the Addresses associated with the given
allocation IDs will be returned.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list of :class:`boto.ec2.address.Address`
:return: The requested Address objects
"""
@@ -1331,9 +1650,11 @@ class EC2Connection(AWSQueryConnection):
self.build_list_params(params, allocation_ids, 'AllocationId')
if filters:
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeAddresses', params, [('item', Address)], verb='POST')
- def allocate_address(self, domain=None):
+ def allocate_address(self, domain=None, dry_run=False):
"""
Allocate a new Elastic IP address and associate it with your account.
@@ -1342,6 +1663,9 @@ class EC2Connection(AWSQueryConnection):
will be allocated to VPC . Will return address object with
allocation_id.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: :class:`boto.ec2.address.Address`
:return: The newly allocated Address
"""
@@ -1350,12 +1674,15 @@ class EC2Connection(AWSQueryConnection):
if domain is not None:
params['Domain'] = domain
+ if dry_run:
+ params['DryRun'] = 'true'
+
return self.get_object('AllocateAddress', params, Address, verb='POST')
def assign_private_ip_addresses(self, network_interface_id=None,
private_ip_addresses=None,
secondary_private_ip_address_count=None,
- allow_reassignment=False):
+ allow_reassignment=False, dry_run=False):
"""
Assigns one or more secondary private IP addresses to a network
interface in Amazon VPC.
@@ -1378,6 +1705,9 @@ class EC2Connection(AWSQueryConnection):
that is already assigned to another network interface or instance
to be reassigned to the specified network interface.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
@@ -1396,11 +1726,15 @@ class EC2Connection(AWSQueryConnection):
if allow_reassignment:
params['AllowReassignment'] = 'true'
+ if dry_run:
+ params['DryRun'] = 'true'
+
return self.get_status('AssignPrivateIpAddresses', params, verb='POST')
def associate_address(self, instance_id=None, public_ip=None,
allocation_id=None, network_interface_id=None,
- private_ip_address=None, allow_reassociation=False):
+ private_ip_address=None, allow_reassociation=False,
+ dry_run=False):
"""
Associate an Elastic IP address with a currently running instance.
This requires one of ``public_ip`` or ``allocation_id`` depending
@@ -1433,6 +1767,9 @@ class EC2Connection(AWSQueryConnection):
or instance to be re-associated with the specified instance or
interface.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
@@ -1453,9 +1790,13 @@ class EC2Connection(AWSQueryConnection):
if allow_reassociation:
params['AllowReassociation'] = 'true'
+ if dry_run:
+ params['DryRun'] = 'true'
+
return self.get_status('AssociateAddress', params, verb='POST')
- def disassociate_address(self, public_ip=None, association_id=None):
+ def disassociate_address(self, public_ip=None, association_id=None,
+ dry_run=False):
"""
Disassociate an Elastic IP address from a currently running instance.
@@ -1465,6 +1806,9 @@ class EC2Connection(AWSQueryConnection):
:type association_id: string
:param association_id: The association ID for a VPC based elastic ip.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
@@ -1475,9 +1819,13 @@ class EC2Connection(AWSQueryConnection):
elif association_id is not None:
params['AssociationId'] = association_id
+ if dry_run:
+ params['DryRun'] = 'true'
+
return self.get_status('DisassociateAddress', params, verb='POST')
- def release_address(self, public_ip=None, allocation_id=None):
+ def release_address(self, public_ip=None, allocation_id=None,
+ dry_run=False):
"""
Free up an Elastic IP address. Pass a public IP address to
release an EC2 Elastic IP address and an AllocationId to
@@ -1497,6 +1845,9 @@ class EC2Connection(AWSQueryConnection):
:type allocation_id: string
:param allocation_id: The Allocation ID for VPC elastic IPs.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
@@ -1507,10 +1858,13 @@ class EC2Connection(AWSQueryConnection):
elif allocation_id is not None:
params['AllocationId'] = allocation_id
+ if dry_run:
+ params['DryRun'] = 'true'
+
return self.get_status('ReleaseAddress', params, verb='POST')
def unassign_private_ip_addresses(self, network_interface_id=None,
- private_ip_addresses=None):
+ private_ip_addresses=None, dry_run=False):
"""
Unassigns one or more secondary private IP addresses from a network
interface in Amazon VPC.
@@ -1523,6 +1877,9 @@ class EC2Connection(AWSQueryConnection):
:param private_ip_addresses: Specifies the secondary private IP
addresses that you want to unassign from the network interface.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
@@ -1535,12 +1892,15 @@ class EC2Connection(AWSQueryConnection):
self.build_list_params(params, private_ip_addresses,
'PrivateIpAddress')
+ if dry_run:
+ params['DryRun'] = 'true'
+
return self.get_status('UnassignPrivateIpAddresses', params,
verb='POST')
# Volume methods
- def get_all_volumes(self, volume_ids=None, filters=None):
+ def get_all_volumes(self, volume_ids=None, filters=None, dry_run=False):
"""
Get all Volumes associated with the current credentials.
@@ -1559,6 +1919,9 @@ class EC2Connection(AWSQueryConnection):
being performed. Check the EC2 API guide
for details.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list of :class:`boto.ec2.volume.Volume`
:return: The requested Volume objects
"""
@@ -1567,12 +1930,14 @@ class EC2Connection(AWSQueryConnection):
self.build_list_params(params, volume_ids, 'VolumeId')
if filters:
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeVolumes', params,
[('item', Volume)], verb='POST')
def get_all_volume_status(self, volume_ids=None,
max_results=None, next_token=None,
- filters=None):
+ filters=None, dry_run=False):
"""
Retrieve the status of one or more volumes.
@@ -1597,6 +1962,9 @@ class EC2Connection(AWSQueryConnection):
being performed. Check the EC2 API guide
for details.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of volume status.
"""
@@ -1609,10 +1977,12 @@ class EC2Connection(AWSQueryConnection):
params['NextToken'] = next_token
if filters:
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('DescribeVolumeStatus', params,
VolumeStatusSet, verb='POST')
- def enable_volume_io(self, volume_id):
+ def enable_volume_io(self, volume_id, dry_run=False):
"""
Enables I/O operations for a volume that had I/O operations
disabled because the data on the volume was potentially inconsistent.
@@ -1620,14 +1990,19 @@ class EC2Connection(AWSQueryConnection):
:type volume_id: str
:param volume_id: The ID of the volume.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
params = {'VolumeId': volume_id}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('EnableVolumeIO', params, verb='POST')
def get_volume_attribute(self, volume_id,
- attribute='autoEnableIO'):
+ attribute='autoEnableIO', dry_run=False):
"""
Describes attribute of the volume.
@@ -1639,14 +2014,20 @@ class EC2Connection(AWSQueryConnection):
* autoEnableIO
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list of :class:`boto.ec2.volume.VolumeAttribute`
:return: The requested Volume attribute
"""
params = {'VolumeId': volume_id, 'Attribute': attribute}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('DescribeVolumeAttribute', params,
VolumeAttribute, verb='POST')
- def modify_volume_attribute(self, volume_id, attribute, new_value):
+ def modify_volume_attribute(self, volume_id, attribute, new_value,
+ dry_run=False):
"""
Changes an attribute of an Volume.
@@ -1659,14 +2040,20 @@ class EC2Connection(AWSQueryConnection):
:type new_value: string
:param new_value: The new value of the attribute.
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
params = {'VolumeId': volume_id}
if attribute == 'AutoEnableIO':
params['AutoEnableIO.Value'] = new_value
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('ModifyVolumeAttribute', params, verb='POST')
def create_volume(self, size, zone, snapshot=None,
- volume_type=None, iops=None):
+ volume_type=None, iops=None, dry_run=False):
"""
Create a new EBS Volume.
@@ -1687,6 +2074,10 @@ class EC2Connection(AWSQueryConnection):
:type iops: int
:param iops: The provisioned IOPs you want to associate with
this volume. (optional)
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
if isinstance(zone, Zone):
zone = zone.name
@@ -1701,22 +2092,29 @@ class EC2Connection(AWSQueryConnection):
params['VolumeType'] = volume_type
if iops:
params['Iops'] = str(iops)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('CreateVolume', params, Volume, verb='POST')
- def delete_volume(self, volume_id):
+ def delete_volume(self, volume_id, dry_run=False):
"""
Delete an EBS volume.
:type volume_id: str
:param volume_id: The ID of the volume to be delete.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
params = {'VolumeId': volume_id}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeleteVolume', params, verb='POST')
- def attach_volume(self, volume_id, instance_id, device):
+ def attach_volume(self, volume_id, instance_id, device, dry_run=False):
"""
Attach an EBS volume to an EC2 instance.
@@ -1731,16 +2129,21 @@ class EC2Connection(AWSQueryConnection):
:param device: The device on the instance through which the
volume will be exposted (e.g. /dev/sdh)
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
params = {'InstanceId': instance_id,
'VolumeId': volume_id,
'Device': device}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('AttachVolume', params, verb='POST')
def detach_volume(self, volume_id, instance_id=None,
- device=None, force=False):
+ device=None, force=False, dry_run=False):
"""
Detach an EBS volume from an EC2 instance.
@@ -1765,6 +2168,9 @@ class EC2Connection(AWSQueryConnection):
use this option, you must perform file system check and
repair procedures.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
@@ -1775,13 +2181,15 @@ class EC2Connection(AWSQueryConnection):
params['Device'] = device
if force:
params['Force'] = 'true'
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DetachVolume', params, verb='POST')
# Snapshot methods
def get_all_snapshots(self, snapshot_ids=None,
owner=None, restorable_by=None,
- filters=None):
+ filters=None, dry_run=False):
"""
Get all EBS Snapshots associated with the current credentials.
@@ -1812,6 +2220,9 @@ class EC2Connection(AWSQueryConnection):
being performed. Check the EC2 API guide
for details.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list of :class:`boto.ec2.snapshot.Snapshot`
:return: The requested Snapshot objects
"""
@@ -1824,10 +2235,12 @@ class EC2Connection(AWSQueryConnection):
params['RestorableBy'] = restorable_by
if filters:
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeSnapshots', params,
[('item', Snapshot)], verb='POST')
- def create_snapshot(self, volume_id, description=None):
+ def create_snapshot(self, volume_id, description=None, dry_run=False):
"""
Create a snapshot of an existing EBS Volume.
@@ -1838,26 +2251,38 @@ class EC2Connection(AWSQueryConnection):
:param description: A description of the snapshot.
Limited to 255 characters.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: :class:`boto.ec2.snapshot.Snapshot`
:return: The created Snapshot object
"""
params = {'VolumeId': volume_id}
if description:
params['Description'] = description[0:255]
+ if dry_run:
+ params['DryRun'] = 'true'
snapshot = self.get_object('CreateSnapshot', params,
Snapshot, verb='POST')
- volume = self.get_all_volumes([volume_id])[0]
+ volume = self.get_all_volumes([volume_id], dry_run=dry_run)[0]
volume_name = volume.tags.get('Name')
if volume_name:
snapshot.add_tag('Name', volume_name)
return snapshot
- def delete_snapshot(self, snapshot_id):
+ def delete_snapshot(self, snapshot_id, dry_run=False):
+ """
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
+ """
params = {'SnapshotId': snapshot_id}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeleteSnapshot', params, verb='POST')
def copy_snapshot(self, source_region, source_snapshot_id,
- description=None):
+ description=None, dry_run=False):
"""
Copies a point-in-time snapshot of an Amazon Elastic Block Store
(Amazon EBS) volume and stores it in Amazon Simple Storage Service
@@ -1876,6 +2301,9 @@ class EC2Connection(AWSQueryConnection):
:type description: str
:param description: A description of the new Amazon EBS snapshot.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: str
:return: The snapshot ID
@@ -1886,12 +2314,14 @@ class EC2Connection(AWSQueryConnection):
}
if description is not None:
params['Description'] = description
+ if dry_run:
+ params['DryRun'] = 'true'
snapshot = self.get_object('CopySnapshot', params, Snapshot,
verb='POST')
return snapshot.id
def trim_snapshots(self, hourly_backups=8, daily_backups=7,
- weekly_backups=4):
+ weekly_backups=4, monthly_backups=True):
"""
Trim excess snapshots, based on when they were taken. More current
snapshots are retained, with the number retained decreasing as you
@@ -1909,7 +2339,7 @@ class EC2Connection(AWSQueryConnection):
snapshots taken in each of the last seven days, the first snapshots
taken in the last 4 weeks (counting Midnight Sunday morning as
the start of the week), and the first snapshot from the first
- Sunday of each month forever.
+ day of each month forever.
:type hourly_backups: int
:param hourly_backups: How many recent hourly backups should be saved.
@@ -1919,6 +2349,9 @@ class EC2Connection(AWSQueryConnection):
:type weekly_backups: int
:param weekly_backups: How many recent weekly backups should be saved.
+
+ :type monthly_backups: int
+ :param monthly_backups: How many monthly backups should be saved. Use True for no limit.
"""
# This function first builds up an ordered list of target times
@@ -1953,10 +2386,14 @@ class EC2Connection(AWSQueryConnection):
target_backup_times.append(last_sunday - timedelta(weeks = week))
one_day = timedelta(days = 1)
- while start_of_month > oldest_snapshot_date:
+ monthly_snapshots_added = 0
+ while (start_of_month > oldest_snapshot_date and
+ (monthly_backups is True or
+ monthly_snapshots_added < monthly_backups)):
# append the start of the month to the list of
# snapshot dates to save:
target_backup_times.append(start_of_month)
+ monthly_snapshots_added += 1
# there's no timedelta setting for one month, so instead:
# decrement the day by one, so we go to the final day of
# the previous month...
@@ -2037,7 +2474,8 @@ class EC2Connection(AWSQueryConnection):
snap_found_for_this_time_period = False
def get_snapshot_attribute(self, snapshot_id,
- attribute='createVolumePermission'):
+ attribute='createVolumePermission',
+ dry_run=False):
"""
Get information about an attribute of a snapshot. Only one attribute
can be specified per call.
@@ -2050,18 +2488,24 @@ class EC2Connection(AWSQueryConnection):
* createVolumePermission
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list of :class:`boto.ec2.snapshotattribute.SnapshotAttribute`
:return: The requested Snapshot attribute
"""
params = {'Attribute': attribute}
if snapshot_id:
params['SnapshotId'] = snapshot_id
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('DescribeSnapshotAttribute', params,
SnapshotAttribute, verb='POST')
def modify_snapshot_attribute(self, snapshot_id,
attribute='createVolumePermission',
- operation='add', user_ids=None, groups=None):
+ operation='add', user_ids=None, groups=None,
+ dry_run=False):
"""
Changes an attribute of an image.
@@ -2083,6 +2527,9 @@ class EC2Connection(AWSQueryConnection):
:param groups: The groups to add/remove attributes. The only valid
value at this time is 'all'.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
params = {'SnapshotId': snapshot_id,
'Attribute': attribute,
@@ -2091,10 +2538,13 @@ class EC2Connection(AWSQueryConnection):
self.build_list_params(params, user_ids, 'UserId')
if groups:
self.build_list_params(params, groups, 'UserGroup')
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('ModifySnapshotAttribute', params, verb='POST')
def reset_snapshot_attribute(self, snapshot_id,
- attribute='createVolumePermission'):
+ attribute='createVolumePermission',
+ dry_run=False):
"""
Resets an attribute of a snapshot to its default value.
@@ -2104,16 +2554,21 @@ class EC2Connection(AWSQueryConnection):
:type attribute: string
:param attribute: The attribute to reset
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: Whether the operation succeeded or not
"""
params = {'SnapshotId': snapshot_id,
'Attribute': attribute}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('ResetSnapshotAttribute', params, verb='POST')
# Keypair methods
- def get_all_key_pairs(self, keynames=None, filters=None):
+ def get_all_key_pairs(self, keynames=None, filters=None, dry_run=False):
"""
Get all key pairs associated with your account.
@@ -2129,6 +2584,9 @@ class EC2Connection(AWSQueryConnection):
names/values is dependent on the request being performed.
Check the EC2 API guide for details.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.ec2.keypair.KeyPair`
"""
@@ -2137,28 +2595,36 @@ class EC2Connection(AWSQueryConnection):
self.build_list_params(params, keynames, 'KeyName')
if filters:
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeKeyPairs', params,
[('item', KeyPair)], verb='POST')
- def get_key_pair(self, keyname):
+ def get_key_pair(self, keyname, dry_run=False):
"""
Convenience method to retrieve a specific keypair (KeyPair).
:type keyname: string
:param keyname: The name of the keypair to retrieve
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: :class:`boto.ec2.keypair.KeyPair`
:return: The KeyPair specified or None if it is not found
"""
try:
- return self.get_all_key_pairs(keynames=[keyname])[0]
+ return self.get_all_key_pairs(
+ keynames=[keyname],
+ dry_run=dry_run
+ )[0]
except self.ResponseError, e:
if e.code == 'InvalidKeyPair.NotFound':
return None
else:
raise
- def create_key_pair(self, key_name):
+ def create_key_pair(self, key_name, dry_run=False):
"""
Create a new key pair for your account.
This will create the key pair within the region you
@@ -2167,25 +2633,36 @@ class EC2Connection(AWSQueryConnection):
:type key_name: string
:param key_name: The name of the new keypair
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: :class:`boto.ec2.keypair.KeyPair`
:return: The newly created :class:`boto.ec2.keypair.KeyPair`.
The material attribute of the new KeyPair object
will contain the the unencrypted PEM encoded RSA private key.
"""
params = {'KeyName': key_name}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('CreateKeyPair', params, KeyPair, verb='POST')
- def delete_key_pair(self, key_name):
+ def delete_key_pair(self, key_name, dry_run=False):
"""
Delete a key pair from your account.
:type key_name: string
:param key_name: The name of the keypair to delete
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
params = {'KeyName': key_name}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeleteKeyPair', params, verb='POST')
- def import_key_pair(self, key_name, public_key_material):
+ def import_key_pair(self, key_name, public_key_material, dry_run=False):
"""
mports the public key from an RSA key pair that you created
with a third-party tool.
@@ -2212,6 +2689,9 @@ class EC2Connection(AWSQueryConnection):
the public key material before sending
it to AWS.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: :class:`boto.ec2.keypair.KeyPair`
:return: A :class:`boto.ec2.keypair.KeyPair` object representing
the newly imported key pair. This object will contain only
@@ -2220,12 +2700,14 @@ class EC2Connection(AWSQueryConnection):
public_key_material = base64.b64encode(public_key_material)
params = {'KeyName': key_name,
'PublicKeyMaterial': public_key_material}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('ImportKeyPair', params, KeyPair, verb='POST')
# SecurityGroup methods
def get_all_security_groups(self, groupnames=None, group_ids=None,
- filters=None):
+ filters=None, dry_run=False):
"""
Get all security groups associated with your account in a region.
@@ -2248,6 +2730,9 @@ class EC2Connection(AWSQueryConnection):
being performed. Check the EC2 API guide
for details.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.ec2.securitygroup.SecurityGroup`
"""
@@ -2258,11 +2743,13 @@ class EC2Connection(AWSQueryConnection):
self.build_list_params(params, group_ids, 'GroupId')
if filters is not None:
self.build_filter_params(params, filters)
-
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeSecurityGroups', params,
[('item', SecurityGroup)], verb='POST')
- def create_security_group(self, name, description, vpc_id=None):
+ def create_security_group(self, name, description, vpc_id=None,
+ dry_run=False):
"""
Create a new security group for your account.
This will create the security group within the region you
@@ -2278,6 +2765,9 @@ class EC2Connection(AWSQueryConnection):
:param vpc_id: The ID of the VPC to create the security group in,
if any.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: :class:`boto.ec2.securitygroup.SecurityGroup`
:return: The newly created :class:`boto.ec2.securitygroup.SecurityGroup`.
"""
@@ -2287,6 +2777,9 @@ class EC2Connection(AWSQueryConnection):
if vpc_id is not None:
params['VpcId'] = vpc_id
+ if dry_run:
+ params['DryRun'] = 'true'
+
group = self.get_object('CreateSecurityGroup', params,
SecurityGroup, verb='POST')
group.name = name
@@ -2295,7 +2788,7 @@ class EC2Connection(AWSQueryConnection):
group.vpc_id = vpc_id
return group
- def delete_security_group(self, name=None, group_id=None):
+ def delete_security_group(self, name=None, group_id=None, dry_run=False):
"""
Delete a security group from your account.
@@ -2306,6 +2799,9 @@ class EC2Connection(AWSQueryConnection):
:param group_id: The ID of the security group to delete within
a VPC.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful.
"""
@@ -2316,6 +2812,9 @@ class EC2Connection(AWSQueryConnection):
elif group_id is not None:
params['GroupId'] = group_id
+ if dry_run:
+ params['DryRun'] = 'true'
+
return self.get_status('DeleteSecurityGroup', params, verb='POST')
def authorize_security_group_deprecated(self, group_name,
@@ -2323,7 +2822,7 @@ class EC2Connection(AWSQueryConnection):
src_security_group_owner_id=None,
ip_protocol=None,
from_port=None, to_port=None,
- cidr_ip=None):
+ cidr_ip=None, dry_run=False):
"""
NOTE: This method uses the old-style request parameters
that did not allow a port to be specified when
@@ -2354,6 +2853,9 @@ class EC2Connection(AWSQueryConnection):
:param to_port: The CIDR block you are providing access to.
See http://goo.gl/Yj5QC
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful.
"""
@@ -2370,6 +2872,8 @@ class EC2Connection(AWSQueryConnection):
params['ToPort'] = to_port
if cidr_ip:
params['CidrIp'] = cidr_ip
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('AuthorizeSecurityGroupIngress', params)
def authorize_security_group(self, group_name=None,
@@ -2378,7 +2882,8 @@ class EC2Connection(AWSQueryConnection):
ip_protocol=None,
from_port=None, to_port=None,
cidr_ip=None, group_id=None,
- src_security_group_group_id=None):
+ src_security_group_group_id=None,
+ dry_run=False):
"""
Add a new rule to an existing security group.
You need to pass in either src_security_group_name and
@@ -2421,6 +2926,9 @@ class EC2Connection(AWSQueryConnection):
group you are granting access to. Can be used instead of
src_security_group_name
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful.
"""
@@ -2457,6 +2965,8 @@ class EC2Connection(AWSQueryConnection):
for i, single_cidr_ip in enumerate(cidr_ip):
params['IpPermissions.1.IpRanges.%d.CidrIp' % (i+1)] = \
single_cidr_ip
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('AuthorizeSecurityGroupIngress',
params, verb='POST')
@@ -2467,13 +2977,18 @@ class EC2Connection(AWSQueryConnection):
from_port=None,
to_port=None,
src_group_id=None,
- cidr_ip=None):
+ cidr_ip=None,
+ dry_run=False):
"""
The action adds one or more egress rules to a VPC security
group. Specifically, this action permits instances in a
security group to send traffic to one or more destination
CIDR IP address ranges, or to one or more destination
security groups in the same VPC.
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
params = {
'GroupId': group_id,
@@ -2488,6 +3003,8 @@ class EC2Connection(AWSQueryConnection):
params['IpPermissions.1.Groups.1.GroupId'] = src_group_id
if cidr_ip is not None:
params['IpPermissions.1.IpRanges.1.CidrIp'] = cidr_ip
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('AuthorizeSecurityGroupEgress',
params, verb='POST')
@@ -2497,7 +3014,7 @@ class EC2Connection(AWSQueryConnection):
src_security_group_owner_id=None,
ip_protocol=None,
from_port=None, to_port=None,
- cidr_ip=None):
+ cidr_ip=None, dry_run=False):
"""
NOTE: This method uses the old-style request parameters
that did not allow a port to be specified when
@@ -2534,6 +3051,9 @@ class EC2Connection(AWSQueryConnection):
:param to_port: The CIDR block you are revoking access to.
http://goo.gl/Yj5QC
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful.
"""
@@ -2550,6 +3070,8 @@ class EC2Connection(AWSQueryConnection):
params['ToPort'] = to_port
if cidr_ip:
params['CidrIp'] = cidr_ip
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('RevokeSecurityGroupIngress', params)
def revoke_security_group(self, group_name=None,
@@ -2557,7 +3079,7 @@ class EC2Connection(AWSQueryConnection):
src_security_group_owner_id=None,
ip_protocol=None, from_port=None, to_port=None,
cidr_ip=None, group_id=None,
- src_security_group_group_id=None):
+ src_security_group_group_id=None, dry_run=False):
"""
Remove an existing rule from an existing security group.
You need to pass in either src_security_group_name and
@@ -2600,6 +3122,9 @@ class EC2Connection(AWSQueryConnection):
for which you are revoking access. Can be used instead
of src_security_group_name
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful.
"""
@@ -2630,6 +3155,8 @@ class EC2Connection(AWSQueryConnection):
params['IpPermissions.1.ToPort'] = to_port
if cidr_ip:
params['IpPermissions.1.IpRanges.1.CidrIp'] = cidr_ip
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('RevokeSecurityGroupIngress',
params, verb='POST')
@@ -2639,7 +3166,7 @@ class EC2Connection(AWSQueryConnection):
from_port=None,
to_port=None,
src_group_id=None,
- cidr_ip=None):
+ cidr_ip=None, dry_run=False):
"""
Remove an existing egress rule from an existing VPC security
group. You need to pass in an ip_protocol, from_port and
@@ -2668,6 +3195,9 @@ class EC2Connection(AWSQueryConnection):
:param cidr_ip: The CIDR block you are revoking access to.
See http://goo.gl/Yj5QC
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful.
"""
@@ -2685,6 +3215,8 @@ class EC2Connection(AWSQueryConnection):
params['IpPermissions.1.Groups.1.GroupId'] = src_group_id
if cidr_ip:
params['IpPermissions.1.IpRanges.1.CidrIp'] = cidr_ip
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('RevokeSecurityGroupEgress',
params, verb='POST')
@@ -2692,7 +3224,7 @@ class EC2Connection(AWSQueryConnection):
# Regions
#
- def get_all_regions(self, region_names=None, filters=None):
+ def get_all_regions(self, region_names=None, filters=None, dry_run=False):
"""
Get all available regions for the EC2 service.
@@ -2709,6 +3241,9 @@ class EC2Connection(AWSQueryConnection):
being performed. Check the EC2 API guide
for details.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.ec2.regioninfo.RegionInfo`
"""
@@ -2717,6 +3252,8 @@ class EC2Connection(AWSQueryConnection):
self.build_list_params(params, region_names, 'RegionName')
if filters:
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
regions = self.get_list('DescribeRegions', params,
[('item', RegionInfo)], verb='POST')
for region in regions:
@@ -2740,7 +3277,8 @@ class EC2Connection(AWSQueryConnection):
max_duration=None,
max_instance_count=None,
next_token=None,
- max_results=None):
+ max_results=None,
+ dry_run=False):
"""
Describes Reserved Instance offerings that are available for purchase.
@@ -2802,6 +3340,9 @@ class EC2Connection(AWSQueryConnection):
:type max_results: int
:param max_results: Maximum number of offerings to return per call.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of
:class:`boto.ec2.reservedinstance.ReservedInstancesOffering`.
@@ -2838,13 +3379,15 @@ class EC2Connection(AWSQueryConnection):
params['NextToken'] = next_token
if max_results is not None:
params['MaxResults'] = str(max_results)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeReservedInstancesOfferings',
params, [('item', ReservedInstancesOffering)],
verb='POST')
def get_all_reserved_instances(self, reserved_instances_id=None,
- filters=None):
+ filters=None, dry_run=False):
"""
Describes one or more of the Reserved Instances that you purchased.
@@ -2861,6 +3404,9 @@ class EC2Connection(AWSQueryConnection):
names/values is dependent on the request being performed.
Check the EC2 API guide for details.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.ec2.reservedinstance.ReservedInstance`
"""
@@ -2870,12 +3416,15 @@ class EC2Connection(AWSQueryConnection):
'ReservedInstancesId')
if filters:
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeReservedInstances',
params, [('item', ReservedInstance)], verb='POST')
def purchase_reserved_instance_offering(self,
reserved_instances_offering_id,
- instance_count=1, limit_price=None):
+ instance_count=1, limit_price=None,
+ dry_run=False):
"""
Purchase a Reserved Instance for use with your account.
** CAUTION **
@@ -2895,6 +3444,9 @@ class EC2Connection(AWSQueryConnection):
Must be a tuple of (amount, currency_code), for example:
(100.0, 'USD').
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: :class:`boto.ec2.reservedinstance.ReservedInstance`
:return: The newly created Reserved Instance
"""
@@ -2904,11 +3456,14 @@ class EC2Connection(AWSQueryConnection):
if limit_price is not None:
params['LimitPrice.Amount'] = str(limit_price[0])
params['LimitPrice.CurrencyCode'] = str(limit_price[1])
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('PurchaseReservedInstancesOffering', params,
ReservedInstance, verb='POST')
- def create_reserved_instances_listing(self, reserved_instances_id, instance_count,
- price_schedules, client_token):
+ def create_reserved_instances_listing(self, reserved_instances_id,
+ instance_count, price_schedules,
+ client_token, dry_run=False):
"""Creates a new listing for Reserved Instances.
Creates a new listing for Amazon EC2 Reserved Instances that will be
@@ -2954,6 +3509,9 @@ class EC2Connection(AWSQueryConnection):
:param client_token: Unique, case-sensitive identifier you provide
to ensure idempotency of the request. Maximum 64 ASCII characters.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of
:class:`boto.ec2.reservedinstance.ReservedInstanceListing`
@@ -2968,17 +3526,23 @@ class EC2Connection(AWSQueryConnection):
price, term = schedule
params['PriceSchedules.%s.Price' % i] = str(price)
params['PriceSchedules.%s.Term' % i] = str(term)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('CreateReservedInstancesListing',
params, [('item', ReservedInstanceListing)], verb='POST')
- def cancel_reserved_instances_listing(
- self, reserved_instances_listing_ids=None):
+ def cancel_reserved_instances_listing(self,
+ reserved_instances_listing_ids=None,
+ dry_run=False):
"""Cancels the specified Reserved Instance listing.
:type reserved_instances_listing_ids: List of strings
:param reserved_instances_listing_ids: The ID of the
Reserved Instance listing to be cancelled.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of
:class:`boto.ec2.reservedinstance.ReservedInstanceListing`
@@ -2988,29 +3552,120 @@ class EC2Connection(AWSQueryConnection):
if reserved_instances_listing_ids is not None:
self.build_list_params(params, reserved_instances_listing_ids,
'ReservedInstancesListingId')
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('CancelReservedInstancesListing',
params, [('item', ReservedInstanceListing)], verb='POST')
+ def build_configurations_param_list(self, params, target_configurations):
+ for offset, tc in enumerate(target_configurations):
+ prefix = 'ReservedInstancesConfigurationSetItemType.%d.' % offset
+ if tc.availability_zone is not None:
+ params[prefix + 'AvailabilityZone'] = tc.availability_zone
+ if tc.platform is not None:
+ params[prefix + 'Platform'] = tc.platform
+ if tc.instance_count is not None:
+ params[prefix + 'InstanceCount'] = tc.instance_count
+
+ def modify_reserved_instances(self, client_token, reserved_instance_ids,
+ target_configurations):
+ """
+ Modifies the specified Reserved Instances.
+
+ :type client_token: string
+ :param client_token: A unique, case-sensitive, token you provide to
+ ensure idempotency of your modification request.
+
+ :type reserved_instance_ids: List of strings
+ :param reserved_instance_ids: The IDs of the Reserved Instances to
+ modify.
+
+ :type target_configurations: List of :class:`boto.ec2.reservedinstance.ReservedInstancesConfiguration`
+ :param target_configurations: The configuration settings for the
+ modified Reserved Instances.
+
+ :rtype: string
+ :return: The unique ID for the submitted modification request.
+ """
+ params = {
+ 'ClientToken': client_token,
+ }
+ if reserved_instance_ids is not None:
+ self.build_list_params(params, reserved_instance_ids,
+ 'ReservedInstancesId')
+ if target_configurations is not None:
+ self.build_configurations_param_list(params, target_configurations)
+ mrir = self.get_object(
+ 'ModifyReservedInstances',
+ params,
+ ModifyReservedInstancesResult,
+ verb='POST'
+ )
+ return mrir.modification_id
+
+ def describe_reserved_instances_modifications(self,
+ reserved_instances_modification_ids=None, next_token=None,
+ filters=None):
+ """
+ A request to describe the modifications made to Reserved Instances in
+ your account.
+
+ :type reserved_instances_modification_ids: list
+ :param reserved_instances_modification_ids: An optional list of
+ Reserved Instances modification IDs to describe.
+
+ :type next_token: str
+ :param next_token: A string specifying the next paginated set
+ of results to return.
+
+ :type filters: dict
+ :param filters: Optional filters that can be used to limit the
+ results returned. Filters are provided in the form of a
+ dictionary consisting of filter names as the key and
+ filter values as the value. The set of allowable filter
+ names/values is dependent on the request being performed.
+ Check the EC2 API guide for details.
+
+ :rtype: list
+ :return: A list of :class:`boto.ec2.reservedinstance.ReservedInstance`
+ """
+ params = {}
+ if reserved_instances_modification_ids:
+ self.build_list_params(params, reserved_instances_modification_ids,
+ 'ReservedInstancesModificationId')
+ if next_token:
+ params['NextToken'] = next_token
+ if filters:
+ self.build_filter_params(params, filters)
+ return self.get_list('DescribeReservedInstancesModifications',
+ params, [('item', ReservedInstancesModification)],
+ verb='POST')
+
#
# Monitoring
#
- def monitor_instances(self, instance_ids):
+ def monitor_instances(self, instance_ids, dry_run=False):
"""
Enable CloudWatch monitoring for the supplied instances.
:type instance_id: list of strings
:param instance_id: The instance ids
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.ec2.instanceinfo.InstanceInfo`
"""
params = {}
self.build_list_params(params, instance_ids, 'InstanceId')
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('MonitorInstances', params,
[('item', InstanceInfo)], verb='POST')
- def monitor_instance(self, instance_id):
+ def monitor_instance(self, instance_id, dry_run=False):
"""
Deprecated Version, maintained for backward compatibility.
Enable CloudWatch monitoring for the supplied instance.
@@ -3018,27 +3673,35 @@ class EC2Connection(AWSQueryConnection):
:type instance_id: string
:param instance_id: The instance id
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.ec2.instanceinfo.InstanceInfo`
"""
- return self.monitor_instances([instance_id])
+ return self.monitor_instances([instance_id], dry_run=dry_run)
- def unmonitor_instances(self, instance_ids):
+ def unmonitor_instances(self, instance_ids, dry_run=False):
"""
Disable CloudWatch monitoring for the supplied instance.
:type instance_id: list of string
:param instance_id: The instance id
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.ec2.instanceinfo.InstanceInfo`
"""
params = {}
self.build_list_params(params, instance_ids, 'InstanceId')
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('UnmonitorInstances', params,
[('item', InstanceInfo)], verb='POST')
- def unmonitor_instance(self, instance_id):
+ def unmonitor_instance(self, instance_id, dry_run=False):
"""
Deprecated Version, maintained for backward compatibility.
Disable CloudWatch monitoring for the supplied instance.
@@ -3046,10 +3709,13 @@ class EC2Connection(AWSQueryConnection):
:type instance_id: string
:param instance_id: The instance id
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.ec2.instanceinfo.InstanceInfo`
"""
- return self.unmonitor_instances([instance_id])
+ return self.unmonitor_instances([instance_id], dry_run=dry_run)
#
# Bundle Windows Instances
@@ -3058,7 +3724,7 @@ class EC2Connection(AWSQueryConnection):
def bundle_instance(self, instance_id,
s3_bucket,
s3_prefix,
- s3_upload_policy):
+ s3_upload_policy, dry_run=False):
"""
Bundle Windows instance.
@@ -3075,6 +3741,10 @@ class EC2Connection(AWSQueryConnection):
:param s3_upload_policy: Base64 encoded policy that specifies condition
and permissions for Amazon EC2 to upload the
user's image into Amazon S3.
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
params = {'InstanceId': instance_id,
@@ -3086,10 +3756,13 @@ class EC2Connection(AWSQueryConnection):
params['Storage.S3.AWSAccessKeyId'] = self.aws_access_key_id
signature = s3auth.sign_string(s3_upload_policy)
params['Storage.S3.UploadPolicySignature'] = signature
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('BundleInstance', params,
BundleInstanceTask, verb='POST')
- def get_all_bundle_tasks(self, bundle_ids=None, filters=None):
+ def get_all_bundle_tasks(self, bundle_ids=None, filters=None,
+ dry_run=False):
"""
Retrieve current bundling tasks. If no bundle id is specified, all
tasks are retrieved.
@@ -3108,38 +3781,52 @@ class EC2Connection(AWSQueryConnection):
being performed. Check the EC2 API guide
for details.
- """
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+ """
params = {}
if bundle_ids:
self.build_list_params(params, bundle_ids, 'BundleId')
if filters:
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeBundleTasks', params,
[('item', BundleInstanceTask)], verb='POST')
- def cancel_bundle_task(self, bundle_id):
+ def cancel_bundle_task(self, bundle_id, dry_run=False):
"""
Cancel a previously submitted bundle task
:type bundle_id: string
:param bundle_id: The identifier of the bundle task to cancel.
- """
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
+ """
params = {'BundleId': bundle_id}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('CancelBundleTask', params,
BundleInstanceTask, verb='POST')
- def get_password_data(self, instance_id):
+ def get_password_data(self, instance_id, dry_run=False):
"""
Get encrypted administrator password for a Windows instance.
:type instance_id: string
:param instance_id: The identifier of the instance to retrieve the
password for.
- """
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
+ """
params = {'InstanceId': instance_id}
+ if dry_run:
+ params['DryRun'] = 'true'
rs = self.get_object('GetPasswordData', params, ResultSet, verb='POST')
return rs.passwordData
@@ -3147,7 +3834,8 @@ class EC2Connection(AWSQueryConnection):
# Cluster Placement Groups
#
- def get_all_placement_groups(self, groupnames=None, filters=None):
+ def get_all_placement_groups(self, groupnames=None, filters=None,
+ dry_run=False):
"""
Get all placement groups associated with your account in a region.
@@ -3166,6 +3854,9 @@ class EC2Connection(AWSQueryConnection):
being performed. Check the EC2 API guide
for details.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.ec2.placementgroup.PlacementGroup`
"""
@@ -3174,10 +3865,12 @@ class EC2Connection(AWSQueryConnection):
self.build_list_params(params, groupnames, 'GroupName')
if filters:
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribePlacementGroups', params,
[('item', PlacementGroup)], verb='POST')
- def create_placement_group(self, name, strategy='cluster'):
+ def create_placement_group(self, name, strategy='cluster', dry_run=False):
"""
Create a new placement group for your account.
This will create the placement group within the region you
@@ -3190,21 +3883,32 @@ class EC2Connection(AWSQueryConnection):
:param strategy: The placement strategy of the new placement group.
Currently, the only acceptable value is "cluster".
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
params = {'GroupName':name, 'Strategy':strategy}
+ if dry_run:
+ params['DryRun'] = 'true'
group = self.get_status('CreatePlacementGroup', params, verb='POST')
return group
- def delete_placement_group(self, name):
+ def delete_placement_group(self, name, dry_run=False):
"""
Delete a placement group from your account.
:type key_name: string
:param key_name: The name of the keypair to delete
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
params = {'GroupName':name}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeletePlacementGroup', params, verb='POST')
# Tag methods
@@ -3219,7 +3923,7 @@ class EC2Connection(AWSQueryConnection):
params['Tag.%d.Value'%i] = value
i += 1
- def get_all_tags(self, filters=None):
+ def get_all_tags(self, filters=None, dry_run=False):
"""
Retrieve all the metadata tags associated with your account.
@@ -3233,16 +3937,21 @@ class EC2Connection(AWSQueryConnection):
being performed. Check the EC2 API guide
for details.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.ec2.tag.Tag` objects
"""
params = {}
if filters:
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeTags', params,
[('item', Tag)], verb='POST')
- def create_tags(self, resource_ids, tags):
+ def create_tags(self, resource_ids, tags, dry_run=False):
"""
Create new metadata tags for the specified resource ids.
@@ -3255,13 +3964,18 @@ class EC2Connection(AWSQueryConnection):
value for that tag should be the empty string
(e.g. '').
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
params = {}
self.build_list_params(params, resource_ids, 'ResourceId')
self.build_tag_param_list(params, tags)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('CreateTags', params, verb='POST')
- def delete_tags(self, resource_ids, tags):
+ def delete_tags(self, resource_ids, tags, dry_run=False):
"""
Delete metadata tags for the specified resource ids.
@@ -3277,17 +3991,22 @@ class EC2Connection(AWSQueryConnection):
for the tag value, all tags with that name will
be deleted.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
if isinstance(tags, list):
tags = {}.fromkeys(tags, None)
params = {}
self.build_list_params(params, resource_ids, 'ResourceId')
self.build_tag_param_list(params, tags)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeleteTags', params, verb='POST')
# Network Interface methods
- def get_all_network_interfaces(self, filters=None):
+ def get_all_network_interfaces(self, filters=None, dry_run=False):
"""
Retrieve all of the Elastic Network Interfaces (ENI's)
associated with your account.
@@ -3302,17 +4021,22 @@ class EC2Connection(AWSQueryConnection):
being performed. Check the EC2 API guide
for details.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.ec2.networkinterface.NetworkInterface`
"""
params = {}
if filters:
self.build_filter_params(params, filters)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeNetworkInterfaces', params,
[('item', NetworkInterface)], verb='POST')
def create_network_interface(self, subnet_id, private_ip_address=None,
- description=None, groups=None):
+ description=None, groups=None, dry_run=False):
"""
Creates a network interface in the specified subnet.
@@ -3333,6 +4057,9 @@ class EC2Connection(AWSQueryConnection):
This can be either a list of group ID's or a list of
:class:`boto.ec2.securitygroup.SecurityGroup` objects.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: :class:`boto.ec2.networkinterface.NetworkInterface`
:return: The newly created network interface.
"""
@@ -3349,11 +4076,13 @@ class EC2Connection(AWSQueryConnection):
else:
ids.append(group)
self.build_list_params(params, ids, 'SecurityGroupId')
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('CreateNetworkInterface', params,
NetworkInterface, verb='POST')
def attach_network_interface(self, network_interface_id,
- instance_id, device_index):
+ instance_id, device_index, dry_run=False):
"""
Attaches a network interface to an instance.
@@ -3367,13 +4096,20 @@ class EC2Connection(AWSQueryConnection):
:type device_index: int
:param device_index: The index of the device for the network
interface attachment on the instance.
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
params = {'NetworkInterfaceId': network_interface_id,
'InstanceId': instance_id,
'DeviceIndex': device_index}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('AttachNetworkInterface', params, verb='POST')
- def detach_network_interface(self, attachment_id, force=False):
+ def detach_network_interface(self, attachment_id, force=False,
+ dry_run=False):
"""
Detaches a network interface from an instance.
@@ -3383,21 +4119,31 @@ class EC2Connection(AWSQueryConnection):
:type force: bool
:param force: Set to true to force a detachment.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
params = {'AttachmentId': attachment_id}
if force:
params['Force'] = 'true'
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DetachNetworkInterface', params, verb='POST')
- def delete_network_interface(self, network_interface_id):
+ def delete_network_interface(self, network_interface_id, dry_run=False):
"""
Delete the specified network interface.
:type network_interface_id: str
:param network_interface_id: The ID of the network interface to delete.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
params = {'NetworkInterfaceId': network_interface_id}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeleteNetworkInterface', params, verb='POST')
def get_all_vmtypes(self):
@@ -3410,40 +4156,65 @@ class EC2Connection(AWSQueryConnection):
params = {}
return self.get_list('DescribeVmTypes', params, [('euca:item', VmType)], verb='POST')
- def copy_image(self, source_region, source_image_id, name,
- description=None, client_token=None):
+ def copy_image(self, source_region, source_image_id, name=None,
+ description=None, client_token=None, dry_run=False):
+ """
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
+ """
params = {
'SourceRegion': source_region,
'SourceImageId': source_image_id,
- 'Name': name
}
+ if name is not None:
+ params['Name'] = name
if description is not None:
params['Description'] = description
if client_token is not None:
params['ClientToken'] = client_token
- image = self.get_object('CopyImage', params, CopyImage,
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_object('CopyImage', params, CopyImage,
verb='POST')
- return image
- def describe_account_attributes(self, attribute_names=None):
+ def describe_account_attributes(self, attribute_names=None, dry_run=False):
+ """
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
+ """
params = {}
if attribute_names is not None:
self.build_list_params(params, attribute_names, 'AttributeName')
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeAccountAttributes', params,
[('item', AccountAttribute)], verb='POST')
- def describe_vpc_attribute(self, vpc_id, attribute=None):
+ def describe_vpc_attribute(self, vpc_id, attribute=None, dry_run=False):
+ """
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
+ """
params = {
'VpcId': vpc_id
}
if attribute is not None:
params['Attribute'] = attribute
- attr = self.get_object('DescribeVpcAttribute', params,
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_object('DescribeVpcAttribute', params,
VPCAttribute, verb='POST')
- return attr
def modify_vpc_attribute(self, vpc_id, enable_dns_support=None,
- enable_dns_hostnames=None):
+ enable_dns_hostnames=None, dry_run=False):
+ """
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
+ """
params = {
'VpcId': vpc_id
}
@@ -3453,5 +4224,6 @@ class EC2Connection(AWSQueryConnection):
if enable_dns_hostnames is not None:
params['EnableDnsHostnames.Value'] = (
'true' if enable_dns_hostnames else 'false')
- result = self.get_status('ModifyVpcAttribute', params, verb='POST')
- return result
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_status('ModifyVpcAttribute', params, verb='POST')
diff --git a/boto/ec2/ec2object.py b/boto/ec2/ec2object.py
index 7756bee7..265678c6 100644
--- a/boto/ec2/ec2object.py
+++ b/boto/ec2/ec2object.py
@@ -15,7 +15,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
@@ -40,7 +40,7 @@ class EC2Object(object):
def endElement(self, name, value, connection):
setattr(self, name, value)
-
+
class TaggedEC2Object(EC2Object):
"""
Any EC2 resource that can be tagged should be represented
@@ -62,7 +62,7 @@ class TaggedEC2Object(EC2Object):
else:
return None
- def add_tag(self, key, value=''):
+ def add_tag(self, key, value='', dry_run=False):
"""
Add a tag to this object. Tag's are stored by AWS and can be used
to organize and filter resources. Adding a tag involves a round-trip
@@ -76,12 +76,16 @@ class TaggedEC2Object(EC2Object):
If you want only the tag name and no value, the
value should be the empty string.
"""
- status = self.connection.create_tags([self.id], {key : value})
+ status = self.connection.create_tags(
+ [self.id],
+ {key : value},
+ dry_run=dry_run
+ )
if self.tags is None:
self.tags = TagSet()
self.tags[key] = value
- def remove_tag(self, key, value=None):
+ def remove_tag(self, key, value=None, dry_run=False):
"""
Remove a tag from this object. Removing a tag involves a round-trip
to the EC2 service.
@@ -102,6 +106,10 @@ class TaggedEC2Object(EC2Object):
tags = {key : value}
else:
tags = [key]
- status = self.connection.delete_tags([self.id], tags)
+ status = self.connection.delete_tags(
+ [self.id],
+ tags,
+ dry_run=dry_run
+ )
if key in self.tags:
del self.tags[key]
diff --git a/boto/ec2/elb/__init__.py b/boto/ec2/elb/__init__.py
index ed9aaeaa..be949052 100644
--- a/boto/ec2/elb/__init__.py
+++ b/boto/ec2/elb/__init__.py
@@ -36,6 +36,7 @@ import boto
RegionData = {
'us-east-1': 'elasticloadbalancing.us-east-1.amazonaws.com',
+ 'us-gov-west-1': 'elasticloadbalancing.us-gov-west-1.amazonaws.com',
'us-west-1': 'elasticloadbalancing.us-west-1.amazonaws.com',
'us-west-2': 'elasticloadbalancing.us-west-2.amazonaws.com',
'sa-east-1': 'elasticloadbalancing.sa-east-1.amazonaws.com',
@@ -87,7 +88,7 @@ class ELBConnection(AWSQueryConnection):
'elasticloadbalancing.us-east-1.amazonaws.com')
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
- is_secure=False, port=None, proxy=None, proxy_port=None,
+ is_secure=True, port=None, proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None, debug=0,
https_connection_factory=None, region=None, path='/',
security_token=None, validate_certs=True):
@@ -535,6 +536,23 @@ class ELBConnection(AWSQueryConnection):
params['CookieExpirationPeriod'] = cookie_expiration_period
return self.get_status('CreateLBCookieStickinessPolicy', params)
+ def create_lb_policy(self, lb_name, policy_name, policy_type, policy_attributes):
+ """
+ Creates a new policy that contais the necessary attributes depending on
+ the policy type. Policies are settings that are saved for your load
+ balancer and that can be applied to the front-end listener, or
+ the back-end application server.
+ """
+ params = {'LoadBalancerName': lb_name,
+ 'PolicyName': policy_name,
+ 'PolicyTypeName': policy_type}
+ for index, (name, value) in enumerate(policy_attributes.iteritems(), 1):
+ params['PolicyAttributes.member.%d.AttributeName' % index] = name
+ params['PolicyAttributes.member.%d.AttributeValue' % index] = value
+ else:
+ params['PolicyAttributes'] = ''
+ return self.get_status('CreateLoadBalancerPolicy', params)
+
def delete_lb_policy(self, lb_name, policy_name):
"""
Deletes a policy from the LoadBalancer. The specified policy must not
@@ -555,6 +573,19 @@ class ELBConnection(AWSQueryConnection):
self.build_list_params(params, policies, 'PolicyNames.member.%d')
return self.get_status('SetLoadBalancerPoliciesOfListener', params)
+ def set_lb_policies_of_backend_server(self, lb_name, instance_port, policies):
+ """
+ Replaces the current set of policies associated with a port on which
+ the back-end server is listening with a new set of policies.
+ """
+ params = {'LoadBalancerName': lb_name,
+ 'InstancePort': instance_port}
+ if policies:
+ self.build_list_params(params, policies, 'PolicyNames.member.%d')
+ else:
+ params['PolicyNames'] = ''
+ return self.get_status('SetLoadBalancerPoliciesForBackendServer', params)
+
def apply_security_groups_to_lb(self, name, security_groups):
"""
Applies security groups to the load balancer.
@@ -616,5 +647,5 @@ class ELBConnection(AWSQueryConnection):
params = {'LoadBalancerName': name}
self.build_list_params(params, subnets,
'Subnets.member.%d')
- return self.get_list('DettachLoadBalancerFromSubnets',
+ return self.get_list('DetachLoadBalancerFromSubnets',
params, None)
diff --git a/boto/ec2/elb/loadbalancer.py b/boto/ec2/elb/loadbalancer.py
index 7b6afc7d..fde9ac1f 100644
--- a/boto/ec2/elb/loadbalancer.py
+++ b/boto/ec2/elb/loadbalancer.py
@@ -23,12 +23,34 @@
from boto.ec2.elb.healthcheck import HealthCheck
from boto.ec2.elb.listener import Listener
from boto.ec2.elb.listelement import ListElement
-from boto.ec2.elb.policies import Policies
+from boto.ec2.elb.policies import Policies, OtherPolicy
from boto.ec2.elb.securitygroup import SecurityGroup
from boto.ec2.instanceinfo import InstanceInfo
from boto.resultset import ResultSet
+class Backend(object):
+ """Backend server description"""
+
+ def __init__(self, connection=None):
+ self.connection = connection
+ self.instance_port = None
+ self.policies = None
+
+ def __repr__(self):
+ return 'Backend(%r:%r)' % (self.instance_port, self.policies)
+
+ def startElement(self, name, attrs, connection):
+ if name == 'PolicyNames':
+ self.policies = ResultSet([('member', OtherPolicy)])
+ return self.policies
+
+ def endElement(self, name, value, connection):
+ if name == 'InstancePort':
+ self.instance_port = int(value)
+ return
+
+
class LoadBalancerZones(object):
"""
Used to collect the zones for a Load Balancer when enable_zones
@@ -80,6 +102,8 @@ class LoadBalancer(object):
:ivar list security_groups: A list of additional security groups that
have been applied.
:ivar str vpc_id: The ID of the VPC that this ELB resides within.
+ :ivar list backends: A list of :py:class:`boto.ec2.elb.loadbalancer.Backend
+ back-end server descriptions.
"""
self.connection = connection
self.name = name
@@ -97,6 +121,7 @@ class LoadBalancer(object):
self.security_groups = ListElement()
self.vpc_id = None
self.scheme = None
+ self.backends = None
def __repr__(self):
return 'LoadBalancer:%s' % self.name
@@ -125,6 +150,9 @@ class LoadBalancer(object):
return self.security_groups
elif name == 'VPCId':
pass
+ elif name == "BackendServerDescriptions":
+ self.backends = ResultSet([('member', Backend)])
+ return self.backends
else:
return None
@@ -266,6 +294,12 @@ class LoadBalancer(object):
lb_port,
policies)
+ def set_policies_of_backend_server(self, instance_port, policies):
+ return self.connection.set_lb_policies_of_backend_server(self.name,
+ instance_port,
+ policies)
+
+
def create_cookie_stickiness_policy(self, cookie_expiration_period,
policy_name):
return self.connection.create_lb_cookie_stickiness_policy(cookie_expiration_period, self.name, policy_name)
@@ -280,6 +314,9 @@ class LoadBalancer(object):
lb_port,
ssl_certificate_id)
+ def create_lb_policy(self, policy_name, policy_type, policy_attribute):
+ return self.connection.create_lb_policy(self.name, policy_name, policy_type, policy_attribute)
+
def attach_subnets(self, subnets):
"""
Attaches load balancer to one or more subnets.
@@ -305,7 +342,7 @@ class LoadBalancer(object):
"""
if isinstance(subnets, str) or isinstance(subnets, unicode):
subnets = [subnets]
- new_subnets = self.connection.detach_lb_to_subnets(self.name, subnets)
+ new_subnets = self.connection.detach_lb_from_subnets(self.name, subnets)
self.subnets = new_subnets
def apply_security_groups(self, security_groups):
diff --git a/boto/ec2/elb/policies.py b/boto/ec2/elb/policies.py
index c25a51fa..faea1c78 100644
--- a/boto/ec2/elb/policies.py
+++ b/boto/ec2/elb/policies.py
@@ -60,6 +60,20 @@ class LBCookieStickinessPolicy(object):
self.policy_name = value
+class OtherPolicy(object):
+ def __init__(self, connection=None):
+ self.policy_name = None
+
+ def __repr__(self):
+ return 'OtherPolicy(%s)' % (self.policy_name)
+
+ def startElement(self, name, attrs, connection):
+ pass
+
+ def endElement(self, name, value, connection):
+ self.policy_name = value
+
+
class Policies(object):
"""
ELB Policies
@@ -68,11 +82,13 @@ class Policies(object):
self.connection = connection
self.app_cookie_stickiness_policies = None
self.lb_cookie_stickiness_policies = None
+ self.other_policies = None
def __repr__(self):
app = 'AppCookieStickiness%s' % self.app_cookie_stickiness_policies
lb = 'LBCookieStickiness%s' % self.lb_cookie_stickiness_policies
- return 'Policies(%s,%s)' % (app, lb)
+ other = 'Other%s' % self.other_policies
+ return 'Policies(%s,%s,%s)' % (app, lb, other)
def startElement(self, name, attrs, connection):
if name == 'AppCookieStickinessPolicies':
@@ -83,6 +99,10 @@ class Policies(object):
rs = ResultSet([('member', LBCookieStickinessPolicy)])
self.lb_cookie_stickiness_policies = rs
return rs
+ elif name == 'OtherPolicies':
+ rs = ResultSet([('member', OtherPolicy)])
+ self.other_policies = rs
+ return rs
def endElement(self, name, value, connection):
return
diff --git a/boto/ec2/image.py b/boto/ec2/image.py
index 376fc869..3e4e3171 100644
--- a/boto/ec2/image.py
+++ b/boto/ec2/image.py
@@ -32,6 +32,15 @@ class ProductCodes(list):
if name == 'productCode':
self.append(value)
+class BillingProducts(list):
+
+ def startElement(self, name, attrs, connection):
+ pass
+
+ def endElement(self, name, value, connection):
+ if name == 'billingProduct':
+ self.append(value)
+
class Image(TaggedEC2Object):
"""
Represents an EC2 Image
@@ -54,6 +63,7 @@ class Image(TaggedEC2Object):
self.name = None
self.description = None
self.product_codes = ProductCodes()
+ self.billing_products = BillingProducts()
self.block_device_mapping = None
self.root_device_type = None
self.root_device_name = None
@@ -73,6 +83,8 @@ class Image(TaggedEC2Object):
return self.block_device_mapping
elif name == 'productCodes':
return self.product_codes
+ elif name == 'billingProducts':
+ return self.billing_products
else:
return None
@@ -130,7 +142,7 @@ class Image(TaggedEC2Object):
def _update(self, updated):
self.__dict__.update(updated.__dict__)
- def update(self, validate=False):
+ def update(self, validate=False, dry_run=False):
"""
Update the image's state information by making a call to fetch
the current image attributes from the service.
@@ -142,7 +154,7 @@ class Image(TaggedEC2Object):
raise a ValueError exception if no data is
returned from EC2.
"""
- rs = self.connection.get_all_images([self.id])
+ rs = self.connection.get_all_images([self.id], dry_run=dry_run)
if len(rs) > 0:
img = rs[0]
if img.id == self.id:
@@ -162,7 +174,7 @@ class Image(TaggedEC2Object):
private_ip_address=None,
placement_group=None, security_group_ids=None,
additional_info=None, instance_profile_name=None,
- instance_profile_arn=None, tenancy=None):
+ instance_profile_arn=None, tenancy=None, dry_run=False):
"""
Runs this instance.
@@ -295,40 +307,62 @@ class Image(TaggedEC2Object):
additional_info=additional_info,
instance_profile_name=instance_profile_name,
instance_profile_arn=instance_profile_arn,
- tenancy=tenancy)
-
- def deregister(self, delete_snapshot=False):
- return self.connection.deregister_image(self.id, delete_snapshot)
-
- def get_launch_permissions(self):
- img_attrs = self.connection.get_image_attribute(self.id,
- 'launchPermission')
+ tenancy=tenancy, dry_run=dry_run)
+
+ def deregister(self, delete_snapshot=False, dry_run=False):
+ return self.connection.deregister_image(
+ self.id,
+ delete_snapshot,
+ dry_run=dry_run
+ )
+
+ def get_launch_permissions(self, dry_run=False):
+ img_attrs = self.connection.get_image_attribute(
+ self.id,
+ 'launchPermission',
+ dry_run=dry_run
+ )
return img_attrs.attrs
- def set_launch_permissions(self, user_ids=None, group_names=None):
+ def set_launch_permissions(self, user_ids=None, group_names=None,
+ dry_run=False):
return self.connection.modify_image_attribute(self.id,
'launchPermission',
'add',
user_ids,
- group_names)
+ group_names,
+ dry_run=dry_run)
- def remove_launch_permissions(self, user_ids=None, group_names=None):
+ def remove_launch_permissions(self, user_ids=None, group_names=None,
+ dry_run=False):
return self.connection.modify_image_attribute(self.id,
'launchPermission',
'remove',
user_ids,
- group_names)
-
- def reset_launch_attributes(self):
- return self.connection.reset_image_attribute(self.id,
- 'launchPermission')
-
- def get_kernel(self):
- img_attrs =self.connection.get_image_attribute(self.id, 'kernel')
+ group_names,
+ dry_run=dry_run)
+
+ def reset_launch_attributes(self, dry_run=False):
+ return self.connection.reset_image_attribute(
+ self.id,
+ 'launchPermission',
+ dry_run=dry_run
+ )
+
+ def get_kernel(self, dry_run=False):
+ img_attrs =self.connection.get_image_attribute(
+ self.id,
+ 'kernel',
+ dry_run=dry_run
+ )
return img_attrs.kernel
- def get_ramdisk(self):
- img_attrs = self.connection.get_image_attribute(self.id, 'ramdisk')
+ def get_ramdisk(self, dry_run=False):
+ img_attrs = self.connection.get_image_attribute(
+ self.id,
+ 'ramdisk',
+ dry_run=dry_run
+ )
return img_attrs.ramdisk
class ImageAttribute:
diff --git a/boto/ec2/instance.py b/boto/ec2/instance.py
index 5be701f0..430647e8 100644
--- a/boto/ec2/instance.py
+++ b/boto/ec2/instance.py
@@ -149,9 +149,9 @@ class Reservation(EC2Object):
else:
setattr(self, name, value)
- def stop_all(self):
+ def stop_all(self, dry_run=False):
for instance in self.instances:
- instance.stop()
+ instance.stop(dry_run=dry_run)
class Instance(TaggedEC2Object):
@@ -340,14 +340,6 @@ class Instance(TaggedEC2Object):
self.ami_launch_index = value
elif name == 'previousState':
self.previous_state = value
- elif name == 'name':
- self.state = value
- elif name == 'code':
- try:
- self.state_code = int(value)
- except ValueError:
- boto.log.warning('Error converting code (%s) to int' % value)
- self.state_code = value
elif name == 'instanceType':
self.instance_type = value
elif name == 'rootDeviceName':
@@ -406,7 +398,7 @@ class Instance(TaggedEC2Object):
def _update(self, updated):
self.__dict__.update(updated.__dict__)
- def update(self, validate=False):
+ def update(self, validate=False, dry_run=False):
"""
Update the instance's state information by making a call to fetch
the current instance attributes from the service.
@@ -418,7 +410,7 @@ class Instance(TaggedEC2Object):
raise a ValueError exception if no data is
returned from EC2.
"""
- rs = self.connection.get_all_instances([self.id])
+ rs = self.connection.get_all_reservations([self.id], dry_run=dry_run)
if len(rs) > 0:
r = rs[0]
for i in r.instances:
@@ -428,15 +420,15 @@ class Instance(TaggedEC2Object):
raise ValueError('%s is not a valid Instance ID' % self.id)
return self.state
- def terminate(self):
+ def terminate(self, dry_run=False):
"""
Terminate the instance
"""
- rs = self.connection.terminate_instances([self.id])
+ rs = self.connection.terminate_instances([self.id], dry_run=dry_run)
if len(rs) > 0:
self._update(rs[0])
- def stop(self, force=False):
+ def stop(self, force=False, dry_run=False):
"""
Stop the instance
@@ -446,34 +438,38 @@ class Instance(TaggedEC2Object):
:rtype: list
:return: A list of the instances stopped
"""
- rs = self.connection.stop_instances([self.id], force)
+ rs = self.connection.stop_instances([self.id], force, dry_run=dry_run)
if len(rs) > 0:
self._update(rs[0])
- def start(self):
+ def start(self, dry_run=False):
"""
Start the instance.
"""
- rs = self.connection.start_instances([self.id])
+ rs = self.connection.start_instances([self.id], dry_run=dry_run)
if len(rs) > 0:
self._update(rs[0])
- def reboot(self):
- return self.connection.reboot_instances([self.id])
+ def reboot(self, dry_run=False):
+ return self.connection.reboot_instances([self.id], dry_run=dry_run)
- def get_console_output(self):
+ def get_console_output(self, dry_run=False):
"""
Retrieves the console output for the instance.
:rtype: :class:`boto.ec2.instance.ConsoleOutput`
:return: The console output as a ConsoleOutput object
"""
- return self.connection.get_console_output(self.id)
+ return self.connection.get_console_output(self.id, dry_run=dry_run)
- def confirm_product(self, product_code):
- return self.connection.confirm_product_instance(self.id, product_code)
+ def confirm_product(self, product_code, dry_run=False):
+ return self.connection.confirm_product_instance(
+ self.id,
+ product_code,
+ dry_run=dry_run
+ )
- def use_ip(self, ip_address):
+ def use_ip(self, ip_address, dry_run=False):
"""
Associates an Elastic IP to the instance.
@@ -488,15 +484,19 @@ class Instance(TaggedEC2Object):
if isinstance(ip_address, Address):
ip_address = ip_address.public_ip
- return self.connection.associate_address(self.id, ip_address)
+ return self.connection.associate_address(
+ self.id,
+ ip_address,
+ dry_run=dry_run
+ )
- def monitor(self):
- return self.connection.monitor_instance(self.id)
+ def monitor(self, dry_run=False):
+ return self.connection.monitor_instance(self.id, dry_run=dry_run)
- def unmonitor(self):
- return self.connection.unmonitor_instance(self.id)
+ def unmonitor(self, dry_run=False):
+ return self.connection.unmonitor_instance(self.id, dry_run=dry_run)
- def get_attribute(self, attribute):
+ def get_attribute(self, attribute, dry_run=False):
"""
Gets an attribute from this instance.
@@ -521,9 +521,13 @@ class Instance(TaggedEC2Object):
:return: An InstanceAttribute object representing the value of the
attribute requested
"""
- return self.connection.get_instance_attribute(self.id, attribute)
+ return self.connection.get_instance_attribute(
+ self.id,
+ attribute,
+ dry_run=dry_run
+ )
- def modify_attribute(self, attribute, value):
+ def modify_attribute(self, attribute, value, dry_run=False):
"""
Changes an attribute of this instance
@@ -546,10 +550,14 @@ class Instance(TaggedEC2Object):
:rtype: bool
:return: Whether the operation succeeded or not
"""
- return self.connection.modify_instance_attribute(self.id, attribute,
- value)
-
- def reset_attribute(self, attribute):
+ return self.connection.modify_instance_attribute(
+ self.id,
+ attribute,
+ value,
+ dry_run=dry_run
+ )
+
+ def reset_attribute(self, attribute, dry_run=False):
"""
Resets an attribute of this instance to its default value.
@@ -560,12 +568,14 @@ class Instance(TaggedEC2Object):
:rtype: bool
:return: Whether the operation succeeded or not
"""
- return self.connection.reset_instance_attribute(self.id, attribute)
-
- def create_image(
- self, name,
- description=None, no_reboot=False
- ):
+ return self.connection.reset_instance_attribute(
+ self.id,
+ attribute,
+ dry_run=dry_run
+ )
+
+ def create_image(self, name, description=None, no_reboot=False,
+ dry_run=False):
"""
Will create an AMI from the instance in the running or stopped
state.
@@ -587,7 +597,13 @@ class Instance(TaggedEC2Object):
:rtype: string
:return: The new image id
"""
- return self.connection.create_image(self.id, name, description, no_reboot)
+ return self.connection.create_image(
+ self.id,
+ name,
+ description,
+ no_reboot,
+ dry_run=dry_run
+ )
class ConsoleOutput:
diff --git a/boto/ec2/keypair.py b/boto/ec2/keypair.py
index 65c95908..c15a0984 100644
--- a/boto/ec2/keypair.py
+++ b/boto/ec2/keypair.py
@@ -14,7 +14,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
@@ -28,7 +28,7 @@ from boto.ec2.ec2object import EC2Object
from boto.exception import BotoClientError
class KeyPair(EC2Object):
-
+
def __init__(self, connection=None):
EC2Object.__init__(self, connection)
self.name = None
@@ -48,20 +48,20 @@ class KeyPair(EC2Object):
else:
setattr(self, name, value)
- def delete(self):
+ def delete(self, dry_run=False):
"""
Delete the KeyPair.
-
+
:rtype: bool
:return: True if successful, otherwise False.
"""
- return self.connection.delete_key_pair(self.name)
+ return self.connection.delete_key_pair(self.name, dry_run=dry_run)
def save(self, directory_path):
"""
Save the material (the unencrypted PEM encoded RSA private key)
of a newly created KeyPair to a local file.
-
+
:type directory_path: string
:param directory_path: The fully qualified path to the directory
in which the keypair will be saved. The
@@ -71,7 +71,7 @@ class KeyPair(EC2Object):
name already exists in the directory, an
exception will be raised and the old file
will not be overwritten.
-
+
:rtype: bool
:return: True if successful.
"""
@@ -88,7 +88,7 @@ class KeyPair(EC2Object):
else:
raise BotoClientError('KeyPair contains no material')
- def copy_to_region(self, region):
+ def copy_to_region(self, region, dry_run=False):
"""
Create a new key pair of the same new in another region.
Note that the new key pair will use a different ssh
@@ -106,7 +106,7 @@ class KeyPair(EC2Object):
raise BotoClientError('Unable to copy to the same Region')
conn_params = self.connection.get_params()
rconn = region.connect(**conn_params)
- kp = rconn.create_key_pair(self.name)
+ kp = rconn.create_key_pair(self.name, dry_run=dry_run)
return kp
diff --git a/boto/ec2/networkinterface.py b/boto/ec2/networkinterface.py
index 5c6088f4..98368050 100644
--- a/boto/ec2/networkinterface.py
+++ b/boto/ec2/networkinterface.py
@@ -23,6 +23,7 @@
"""
Represents an EC2 Elastic Network Interface
"""
+from boto.exception import BotoClientError
from boto.ec2.ec2object import TaggedEC2Object
from boto.resultset import ResultSet
from boto.ec2.group import Group
@@ -59,6 +60,8 @@ class Attachment(object):
self.id = value
elif name == 'instanceId':
self.instance_id = value
+ elif name == 'deviceIndex':
+ self.device_index = int(value)
elif name == 'instanceOwnerId':
self.instance_owner_id = value
elif name == 'status':
@@ -163,8 +166,11 @@ class NetworkInterface(TaggedEC2Object):
else:
setattr(self, name, value)
- def delete(self):
- return self.connection.delete_network_interface(self.id)
+ def delete(self, dry_run=False):
+ return self.connection.delete_network_interface(
+ self.id,
+ dry_run=dry_run
+ )
class PrivateIPAddress(object):
@@ -194,13 +200,15 @@ class NetworkInterfaceCollection(list):
def build_list_params(self, params, prefix=''):
for i, spec in enumerate(self):
- full_prefix = '%sNetworkInterface.%s.' % (prefix, i+1)
+ full_prefix = '%sNetworkInterface.%s.' % (prefix, i)
if spec.network_interface_id is not None:
params[full_prefix + 'NetworkInterfaceId'] = \
str(spec.network_interface_id)
if spec.device_index is not None:
params[full_prefix + 'DeviceIndex'] = \
str(spec.device_index)
+ else:
+ params[full_prefix + 'DeviceIndex'] = 0
if spec.subnet_id is not None:
params[full_prefix + 'SubnetId'] = str(spec.subnet_id)
if spec.description is not None:
@@ -216,25 +224,56 @@ class NetworkInterfaceCollection(list):
str(spec.private_ip_address)
if spec.groups is not None:
for j, group_id in enumerate(spec.groups):
- query_param_key = '%sSecurityGroupId.%s' % (full_prefix, j+1)
+ query_param_key = '%sSecurityGroupId.%s' % (full_prefix, j)
params[query_param_key] = str(group_id)
if spec.private_ip_addresses is not None:
for k, ip_addr in enumerate(spec.private_ip_addresses):
query_param_key_prefix = (
- '%sPrivateIpAddresses.%s' % (full_prefix, k+1))
+ '%sPrivateIpAddresses.%s' % (full_prefix, k))
params[query_param_key_prefix + '.PrivateIpAddress'] = \
str(ip_addr.private_ip_address)
if ip_addr.primary is not None:
params[query_param_key_prefix + '.Primary'] = \
'true' if ip_addr.primary else 'false'
+ # Associating Public IPs have special logic around them:
+ #
+ # * Only assignable on an device_index of ``0``
+ # * Only on one interface
+ # * Only if there are no other interfaces being created
+ # * Only if it's a new interface (which we can't really guard
+ # against)
+ #
+ # More details on http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-RunInstances.html
+ if spec.associate_public_ip_address is not None:
+ if not params[full_prefix + 'DeviceIndex'] in (0, '0'):
+ raise BotoClientError(
+ "Only the interface with device index of 0 can " + \
+ "be provided when using " + \
+ "'associate_public_ip_address'."
+ )
+
+ if len(self) > 1:
+ raise BotoClientError(
+ "Only one interface can be provided when using " + \
+ "'associate_public_ip_address'."
+ )
+
+ key = full_prefix + 'AssociatePublicIpAddress'
+
+ if spec.associate_public_ip_address:
+ params[key] = 'true'
+ else:
+ params[key] = 'false'
+
class NetworkInterfaceSpecification(object):
def __init__(self, network_interface_id=None, device_index=None,
subnet_id=None, description=None, private_ip_address=None,
groups=None, delete_on_termination=None,
private_ip_addresses=None,
- secondary_private_ip_address_count=None):
+ secondary_private_ip_address_count=None,
+ associate_public_ip_address=None):
self.network_interface_id = network_interface_id
self.device_index = device_index
self.subnet_id = subnet_id
@@ -245,3 +284,4 @@ class NetworkInterfaceSpecification(object):
self.private_ip_addresses = private_ip_addresses
self.secondary_private_ip_address_count = \
secondary_private_ip_address_count
+ self.associate_public_ip_address = associate_public_ip_address
diff --git a/boto/ec2/placementgroup.py b/boto/ec2/placementgroup.py
index e1bbea62..79bd4c46 100644
--- a/boto/ec2/placementgroup.py
+++ b/boto/ec2/placementgroup.py
@@ -14,7 +14,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
@@ -25,7 +25,7 @@ from boto.ec2.ec2object import EC2Object
from boto.exception import BotoClientError
class PlacementGroup(EC2Object):
-
+
def __init__(self, connection=None, name=None, strategy=None, state=None):
EC2Object.__init__(self, connection)
self.name = name
@@ -45,7 +45,10 @@ class PlacementGroup(EC2Object):
else:
setattr(self, name, value)
- def delete(self):
- return self.connection.delete_placement_group(self.name)
+ def delete(self, dry_run=False):
+ return self.connection.delete_placement_group(
+ self.name,
+ dry_run=dry_run
+ )
diff --git a/boto/ec2/reservedinstance.py b/boto/ec2/reservedinstance.py
index d92f1686..1386c414 100644
--- a/boto/ec2/reservedinstance.py
+++ b/boto/ec2/reservedinstance.py
@@ -21,6 +21,7 @@
from boto.resultset import ResultSet
from boto.ec2.ec2object import EC2Object
+from boto.utils import parse_ts
class ReservedInstancesOffering(EC2Object):
@@ -89,8 +90,12 @@ class ReservedInstancesOffering(EC2Object):
print '\tUsage Price=%s' % self.usage_price
print '\tDescription=%s' % self.description
- def purchase(self, instance_count=1):
- return self.connection.purchase_reserved_instance_offering(self.id, instance_count)
+ def purchase(self, instance_count=1, dry_run=False):
+ return self.connection.purchase_reserved_instance_offering(
+ self.id,
+ instance_count,
+ dry_run=dry_run
+ )
class RecurringCharge(object):
@@ -225,3 +230,120 @@ class PriceSchedule(object):
self.active = True if value == 'true' else False
else:
setattr(self, name, value)
+
+
+class ReservedInstancesConfiguration(object):
+ def __init__(self, connection=None, availability_zone=None, platform=None,
+ instance_count=None, instance_type=None):
+ self.connection = connection
+ self.availability_zone = availability_zone
+ self.platform = platform
+ self.instance_count = instance_count
+ self.instance_type = instance_type
+
+ def startElement(self, name, attrs, connection):
+ return None
+
+ def endElement(self, name, value, connection):
+ if name == 'availabilityZone':
+ self.availability_zone = value
+ elif name == 'platform':
+ self.platform = value
+ elif name == 'instanceCount':
+ self.instance_count = int(value)
+ elif name == 'instanceType':
+ self.instance_type = value
+ else:
+ setattr(self, name, value)
+
+
+class ModifyReservedInstancesResult(object):
+ def __init__(self, connection=None, modification_id=None):
+ self.connection = connection
+ self.modification_id = modification_id
+
+ def startElement(self, name, attrs, connection):
+ return None
+
+ def endElement(self, name, value, connection):
+ if name == 'reservedInstancesModificationId':
+ self.modification_id = value
+ else:
+ setattr(self, name, value)
+
+
+class ModificationResult(object):
+ def __init__(self, connection=None, modification_id=None,
+ availability_zone=None, platform=None, instance_count=None,
+ instance_type=None):
+ self.connection = connection
+ self.modification_id = modification_id
+ self.availability_zone = availability_zone
+ self.platform = platform
+ self.instance_count = instance_count
+ self.instance_type = instance_type
+
+ def startElement(self, name, attrs, connection):
+ return None
+
+ def endElement(self, name, value, connection):
+ if name == 'reservedInstancesModificationId':
+ self.modification_id = value
+ elif name == 'availabilityZone':
+ self.availability_zone = value
+ elif name == 'platform':
+ self.platform = value
+ elif name == 'instanceCount':
+ self.instance_count = int(value)
+ elif name == 'instanceType':
+ self.instance_type = value
+ else:
+ setattr(self, name, value)
+
+
+class ReservedInstancesModification(object):
+ def __init__(self, connection=None, modification_id=None,
+ reserved_instances=None, modification_results=None,
+ create_date=None, update_date=None, effective_date=None,
+ status=None, status_message=None, client_token=None):
+ self.connection = connection
+ self.modification_id = modification_id
+ self.reserved_instances = reserved_instances
+ self.modification_results = modification_results
+ self.create_date = create_date
+ self.update_date = update_date
+ self.effective_date = effective_date
+ self.status = status
+ self.status_message = status_message
+ self.client_token = client_token
+
+ def startElement(self, name, attrs, connection):
+ if name == 'reservedInstancesSet':
+ self.reserved_instances = ResultSet([
+ ('item', ReservedInstance)
+ ])
+ return self.reserved_instances
+ elif name == 'modificationResultSet':
+ self.modification_results = ResultSet([
+ ('item', ModificationResult)
+ ])
+ return self.modification_results
+ return None
+
+ def endElement(self, name, value, connection):
+ if name == 'reservedInstancesModificationId':
+ self.modification_id = value
+ elif name == 'createDate':
+ self.create_date = parse_ts(value)
+ elif name == 'updateDate':
+ self.update_date = parse_ts(value)
+ elif name == 'effectiveDate':
+ self.effective_date = parse_ts(value)
+ elif name == 'status':
+ self.status = value
+ elif name == 'statusMessage':
+ self.status_message = value
+ elif name == 'clientToken':
+ self.client_token = value
+ else:
+ setattr(self, name, value)
diff --git a/boto/ec2/securitygroup.py b/boto/ec2/securitygroup.py
index 1b3c0ade..9f437ebb 100644
--- a/boto/ec2/securitygroup.py
+++ b/boto/ec2/securitygroup.py
@@ -26,6 +26,7 @@ Represents an EC2 Security Group
from boto.ec2.ec2object import TaggedEC2Object
from boto.exception import BotoClientError
+
class SecurityGroup(TaggedEC2Object):
def __init__(self, connection=None, owner_id=None,
@@ -73,7 +74,7 @@ class SecurityGroup(TaggedEC2Object):
self.status = True
else:
raise Exception(
- 'Unexpected value of status %s for group %s'%(
+ 'Unexpected value of status %s for group %s' % (
value,
self.name
)
@@ -81,14 +82,21 @@ class SecurityGroup(TaggedEC2Object):
else:
setattr(self, name, value)
- def delete(self):
+ def delete(self, dry_run=False):
if self.vpc_id:
- return self.connection.delete_security_group(group_id=self.id)
+ return self.connection.delete_security_group(
+ group_id=self.id,
+ dry_run=dry_run
+ )
else:
- return self.connection.delete_security_group(self.name)
+ return self.connection.delete_security_group(
+ self.name,
+ dry_run=dry_run
+ )
def add_rule(self, ip_protocol, from_port, to_port,
- src_group_name, src_group_owner_id, cidr_ip, src_group_group_id):
+ src_group_name, src_group_owner_id, cidr_ip,
+ src_group_group_id, dry_run=False):
"""
Add a rule to the SecurityGroup object. Note that this method
only changes the local version of the object. No information
@@ -99,15 +107,25 @@ class SecurityGroup(TaggedEC2Object):
rule.from_port = from_port
rule.to_port = to_port
self.rules.append(rule)
- rule.add_grant(src_group_name, src_group_owner_id, cidr_ip, src_group_group_id)
+ rule.add_grant(
+ src_group_name,
+ src_group_owner_id,
+ cidr_ip,
+ src_group_group_id,
+ dry_run=dry_run
+ )
def remove_rule(self, ip_protocol, from_port, to_port,
- src_group_name, src_group_owner_id, cidr_ip, src_group_group_id):
+ src_group_name, src_group_owner_id, cidr_ip,
+ src_group_group_id, dry_run=False):
"""
Remove a rule to the SecurityGroup object. Note that this method
only changes the local version of the object. No information
is sent to EC2.
"""
+ if not self.rules:
+ raise ValueError("The security group has no rules")
+
target_rule = None
for rule in self.rules:
if rule.ip_protocol == ip_protocol:
@@ -122,11 +140,11 @@ class SecurityGroup(TaggedEC2Object):
target_grant = grant
if target_grant:
rule.grants.remove(target_grant)
- if len(rule.grants) == 0:
- self.rules.remove(target_rule)
+ if len(rule.grants) == 0:
+ self.rules.remove(target_rule)
def authorize(self, ip_protocol=None, from_port=None, to_port=None,
- cidr_ip=None, src_group=None):
+ cidr_ip=None, src_group=None, dry_run=False):
"""
Add a new rule to this security group.
You need to pass in either src_group_name
@@ -181,17 +199,19 @@ class SecurityGroup(TaggedEC2Object):
to_port,
cidr_ip,
group_id,
- src_group_group_id)
+ src_group_group_id,
+ dry_run=dry_run)
if status:
if not isinstance(cidr_ip, list):
cidr_ip = [cidr_ip]
for single_cidr_ip in cidr_ip:
self.add_rule(ip_protocol, from_port, to_port, src_group_name,
- src_group_owner_id, single_cidr_ip, src_group_group_id)
+ src_group_owner_id, single_cidr_ip,
+ src_group_group_id, dry_run=dry_run)
return status
def revoke(self, ip_protocol=None, from_port=None, to_port=None,
- cidr_ip=None, src_group=None):
+ cidr_ip=None, src_group=None, dry_run=False):
group_name = None
if not self.vpc_id:
group_name = self.name
@@ -219,13 +239,15 @@ class SecurityGroup(TaggedEC2Object):
to_port,
cidr_ip,
group_id,
- src_group_group_id)
+ src_group_group_id,
+ dry_run=dry_run)
if status:
self.remove_rule(ip_protocol, from_port, to_port, src_group_name,
- src_group_owner_id, cidr_ip, src_group_group_id)
+ src_group_owner_id, cidr_ip, src_group_group_id,
+ dry_run=dry_run)
return status
- def copy_to_region(self, region, name=None):
+ def copy_to_region(self, region, name=None, dry_run=False):
"""
Create a copy of this security group in another region.
Note that the new security group will be a separate entity
@@ -246,7 +268,11 @@ class SecurityGroup(TaggedEC2Object):
raise BotoClientError('Unable to copy to the same Region')
conn_params = self.connection.get_params()
rconn = region.connect(**conn_params)
- sg = rconn.create_security_group(name or self.name, self.description)
+ sg = rconn.create_security_group(
+ name or self.name,
+ self.description,
+ dry_run=dry_run
+ )
source_groups = []
for rule in self.rules:
for grant in rule.grants:
@@ -254,13 +280,14 @@ class SecurityGroup(TaggedEC2Object):
if grant_nom:
if grant_nom not in source_groups:
source_groups.append(grant_nom)
- sg.authorize(None, None, None, None, grant)
+ sg.authorize(None, None, None, None, grant,
+ dry_run=dry_run)
else:
sg.authorize(rule.ip_protocol, rule.from_port, rule.to_port,
- grant.cidr_ip)
+ grant.cidr_ip, dry_run=dry_run)
return sg
- def instances(self):
+ def instances(self, dry_run=False):
"""
Find all of the current instances that are running within this
security group.
@@ -268,16 +295,21 @@ class SecurityGroup(TaggedEC2Object):
:rtype: list of :class:`boto.ec2.instance.Instance`
:return: A list of Instance objects
"""
- # It would be more efficient to do this with filters now
- # but not all services that implement EC2 API support filters.
- instances = []
- rs = self.connection.get_all_instances()
- for reservation in rs:
- uses_group = [g.name for g in reservation.groups if g.name == self.name]
- if uses_group:
- instances.extend(reservation.instances)
+ rs = []
+ if self.vpc_id:
+ rs.extend(self.connection.get_all_reservations(
+ filters={'instance.group-id': self.id},
+ dry_run=dry_run
+ ))
+ else:
+ rs.extend(self.connection.get_all_reservations(
+ filters={'group-id': self.id},
+ dry_run=dry_run
+ ))
+ instances = [i for r in rs for i in r.instances]
return instances
+
class IPPermissionsList(list):
def startElement(self, name, attrs, connection):
@@ -289,6 +321,7 @@ class IPPermissionsList(list):
def endElement(self, name, value, connection):
pass
+
class IPPermissions(object):
def __init__(self, parent=None):
@@ -318,7 +351,8 @@ class IPPermissions(object):
else:
setattr(self, name, value)
- def add_grant(self, name=None, owner_id=None, cidr_ip=None, group_id=None):
+ def add_grant(self, name=None, owner_id=None, cidr_ip=None, group_id=None,
+ dry_run=False):
grant = GroupOrCIDR(self)
grant.owner_id = owner_id
grant.group_id = group_id
@@ -327,6 +361,7 @@ class IPPermissions(object):
self.grants.append(grant)
return grant
+
class GroupOrCIDR(object):
def __init__(self, parent=None):
diff --git a/boto/ec2/snapshot.py b/boto/ec2/snapshot.py
index d2c4b2b9..24bffe6b 100644
--- a/boto/ec2/snapshot.py
+++ b/boto/ec2/snapshot.py
@@ -15,7 +15,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
@@ -27,9 +27,9 @@ from boto.ec2.ec2object import TaggedEC2Object
from boto.ec2.zone import Zone
class Snapshot(TaggedEC2Object):
-
+
AttrName = 'createVolumePermission'
-
+
def __init__(self, connection=None):
TaggedEC2Object.__init__(self, connection)
self.id = None
@@ -72,7 +72,7 @@ class Snapshot(TaggedEC2Object):
self.progress = updated.progress
self.status = updated.status
- def update(self, validate=False):
+ def update(self, validate=False, dry_run=False):
"""
Update the data associated with this snapshot by querying EC2.
@@ -83,39 +83,49 @@ class Snapshot(TaggedEC2Object):
raise a ValueError exception if no data is
returned from EC2.
"""
- rs = self.connection.get_all_snapshots([self.id])
+ rs = self.connection.get_all_snapshots([self.id], dry_run=dry_run)
if len(rs) > 0:
self._update(rs[0])
elif validate:
raise ValueError('%s is not a valid Snapshot ID' % self.id)
return self.progress
-
- def delete(self):
- return self.connection.delete_snapshot(self.id)
- def get_permissions(self):
- attrs = self.connection.get_snapshot_attribute(self.id, self.AttrName)
+ def delete(self, dry_run=False):
+ return self.connection.delete_snapshot(self.id, dry_run=dry_run)
+
+ def get_permissions(self, dry_run=False):
+ attrs = self.connection.get_snapshot_attribute(
+ self.id,
+ self.AttrName,
+ dry_run=dry_run
+ )
return attrs.attrs
- def share(self, user_ids=None, groups=None):
+ def share(self, user_ids=None, groups=None, dry_run=False):
return self.connection.modify_snapshot_attribute(self.id,
self.AttrName,
'add',
user_ids,
- groups)
+ groups,
+ dry_run=dry_run)
- def unshare(self, user_ids=None, groups=None):
+ def unshare(self, user_ids=None, groups=None, dry_run=False):
return self.connection.modify_snapshot_attribute(self.id,
self.AttrName,
'remove',
user_ids,
- groups)
-
- def reset_permissions(self):
- return self.connection.reset_snapshot_attribute(self.id,
- self.AttrName)
-
- def create_volume(self, zone, size=None, volume_type=None, iops=None):
+ groups,
+ dry_run=dry_run)
+
+ def reset_permissions(self, dry_run=False):
+ return self.connection.reset_snapshot_attribute(
+ self.id,
+ self.AttrName,
+ dry_run=dry_run
+ )
+
+ def create_volume(self, zone, size=None, volume_type=None, iops=None,
+ dry_run=False):
"""
Create a new EBS Volume from this Snapshot
@@ -136,7 +146,14 @@ class Snapshot(TaggedEC2Object):
"""
if isinstance(zone, Zone):
zone = zone.name
- return self.connection.create_volume(size, zone, self.id, volume_type, iops)
+ return self.connection.create_volume(
+ size,
+ zone,
+ self.id,
+ volume_type,
+ iops,
+ dry_run=dry_run
+ )
class SnapshotAttribute:
@@ -167,4 +184,4 @@ class SnapshotAttribute:
setattr(self, name, value)
-
+
diff --git a/boto/ec2/spotdatafeedsubscription.py b/boto/ec2/spotdatafeedsubscription.py
index 9b820a3e..1b30a99f 100644
--- a/boto/ec2/spotdatafeedsubscription.py
+++ b/boto/ec2/spotdatafeedsubscription.py
@@ -26,7 +26,7 @@ from boto.ec2.ec2object import EC2Object
from boto.ec2.spotinstancerequest import SpotInstanceStateFault
class SpotDatafeedSubscription(EC2Object):
-
+
def __init__(self, connection=None, owner_id=None,
bucket=None, prefix=None, state=None,fault=None):
EC2Object.__init__(self, connection)
@@ -45,7 +45,7 @@ class SpotDatafeedSubscription(EC2Object):
return self.fault
else:
return None
-
+
def endElement(self, name, value, connection):
if name == 'ownerId':
self.owner_id = value
@@ -58,6 +58,8 @@ class SpotDatafeedSubscription(EC2Object):
else:
setattr(self, name, value)
- def delete(self):
- return self.connection.delete_spot_datafeed_subscription()
+ def delete(self, dry_run=False):
+ return self.connection.delete_spot_datafeed_subscription(
+ dry_run=dry_run
+ )
diff --git a/boto/ec2/spotinstancerequest.py b/boto/ec2/spotinstancerequest.py
index 54fba1d6..c5b8bc95 100644
--- a/boto/ec2/spotinstancerequest.py
+++ b/boto/ec2/spotinstancerequest.py
@@ -184,5 +184,8 @@ class SpotInstanceRequest(TaggedEC2Object):
else:
setattr(self, name, value)
- def cancel(self):
- self.connection.cancel_spot_instance_requests([self.id])
+ def cancel(self, dry_run=False):
+ self.connection.cancel_spot_instance_requests(
+ [self.id],
+ dry_run=dry_run
+ )
diff --git a/boto/ec2/volume.py b/boto/ec2/volume.py
index bc5befc7..2127b260 100644
--- a/boto/ec2/volume.py
+++ b/boto/ec2/volume.py
@@ -98,7 +98,7 @@ class Volume(TaggedEC2Object):
def _update(self, updated):
self.__dict__.update(updated.__dict__)
- def update(self, validate=False):
+ def update(self, validate=False, dry_run=False):
"""
Update the data associated with this volume by querying EC2.
@@ -110,7 +110,10 @@ class Volume(TaggedEC2Object):
returned from EC2.
"""
# Check the resultset since Eucalyptus ignores the volumeId param
- unfiltered_rs = self.connection.get_all_volumes([self.id])
+ unfiltered_rs = self.connection.get_all_volumes(
+ [self.id],
+ dry_run=dry_run
+ )
rs = [x for x in unfiltered_rs if x.id == self.id]
if len(rs) > 0:
self._update(rs[0])
@@ -118,16 +121,16 @@ class Volume(TaggedEC2Object):
raise ValueError('%s is not a valid Volume ID' % self.id)
return self.status
- def delete(self):
+ def delete(self, dry_run=False):
"""
Delete this EBS volume.
:rtype: bool
:return: True if successful
"""
- return self.connection.delete_volume(self.id)
+ return self.connection.delete_volume(self.id, dry_run=dry_run)
- def attach(self, instance_id, device):
+ def attach(self, instance_id, device, dry_run=False):
"""
Attach this EBS volume to an EC2 instance.
@@ -142,9 +145,14 @@ class Volume(TaggedEC2Object):
:rtype: bool
:return: True if successful
"""
- return self.connection.attach_volume(self.id, instance_id, device)
-
- def detach(self, force=False):
+ return self.connection.attach_volume(
+ self.id,
+ instance_id,
+ device,
+ dry_run=dry_run
+ )
+
+ def detach(self, force=False, dry_run=False):
"""
Detach this EBS volume from an EC2 instance.
@@ -167,10 +175,15 @@ class Volume(TaggedEC2Object):
device = None
if self.attach_data:
device = self.attach_data.device
- return self.connection.detach_volume(self.id, instance_id,
- device, force)
-
- def create_snapshot(self, description=None):
+ return self.connection.detach_volume(
+ self.id,
+ instance_id,
+ device,
+ force,
+ dry_run=dry_run
+ )
+
+ def create_snapshot(self, description=None, dry_run=False):
"""
Create a snapshot of this EBS Volume.
@@ -181,7 +194,11 @@ class Volume(TaggedEC2Object):
:rtype: :class:`boto.ec2.snapshot.Snapshot`
:return: The created Snapshot object
"""
- return self.connection.create_snapshot(self.id, description)
+ return self.connection.create_snapshot(
+ self.id,
+ description,
+ dry_run=dry_run
+ )
def volume_state(self):
"""
@@ -198,7 +215,7 @@ class Volume(TaggedEC2Object):
state = self.attach_data.status
return state
- def snapshots(self, owner=None, restorable_by=None):
+ def snapshots(self, owner=None, restorable_by=None, dry_run=False):
"""
Get all snapshots related to this volume. Note that this requires
that all available snapshots for the account be retrieved from EC2
@@ -221,8 +238,11 @@ class Volume(TaggedEC2Object):
:return: The requested Snapshot objects
"""
- rs = self.connection.get_all_snapshots(owner=owner,
- restorable_by=restorable_by)
+ rs = self.connection.get_all_snapshots(
+ owner=owner,
+ restorable_by=restorable_by,
+ dry_run=dry_run
+ )
mine = []
for snap in rs:
if snap.volume_id == self.id:
diff --git a/boto/elasticache/__init__.py b/boto/elasticache/__init__.py
index fe35d707..acd03777 100644
--- a/boto/elasticache/__init__.py
+++ b/boto/elasticache/__init__.py
@@ -49,6 +49,9 @@ def regions():
RegionInfo(name='ap-southeast-1',
endpoint='elasticache.ap-southeast-1.amazonaws.com',
connection_cls=ElastiCacheConnection),
+ RegionInfo(name='ap-southeast-2',
+ endpoint='elasticache.ap-southeast-2.amazonaws.com',
+ connection_cls=ElastiCacheConnection),
RegionInfo(name='sa-east-1',
endpoint='elasticache.sa-east-1.amazonaws.com',
connection_cls=ElastiCacheConnection),
diff --git a/boto/elasticache/layer1.py b/boto/elasticache/layer1.py
index 6c50438a..f1dc3a26 100644
--- a/boto/elasticache/layer1.py
+++ b/boto/elasticache/layer1.py
@@ -20,7 +20,6 @@
# IN THE SOFTWARE.
#
-
import boto
from boto.compat import json
from boto.connection import AWSQueryConnection
@@ -33,18 +32,18 @@ class ElastiCacheConnection(AWSQueryConnection):
Amazon ElastiCache is a web service that makes it easier to set
up, operate, and scale a distributed cache in the cloud.
- With Amazon ElastiCache, customers gain all of the benefits of a
- high-performance, in-memory cache with far less of the
- administrative burden of launching and managing a distributed
- cache. The service makes set-up, scaling, and cluster failure
- handling much simpler than in a self-managed cache deployment.
+ With ElastiCache, customers gain all of the benefits of a high-
+ performance, in-memory cache with far less of the administrative
+ burden of launching and managing a distributed cache. The service
+ makes set-up, scaling, and cluster failure handling much simpler
+ than in a self-managed cache deployment.
In addition, through integration with Amazon CloudWatch, customers
get enhanced visibility into the key performance statistics
associated with their cache and can receive alarms if a part of
their cache runs hot.
"""
- APIVersion = "2012-11-15"
+ APIVersion = "2013-06-15"
DefaultRegionName = "us-east-1"
DefaultRegionEndpoint = "elasticache.us-east-1.amazonaws.com"
@@ -68,26 +67,26 @@ class ElastiCacheConnection(AWSQueryConnection):
ec2_security_group_name,
ec2_security_group_owner_id):
"""
- Authorizes ingress to a CacheSecurityGroup using EC2 Security
- Groups as authorization (therefore the application using the
- cache must be running on EC2 clusters). This API requires the
- following parameters: EC2SecurityGroupName and
- EC2SecurityGroupOwnerId.
- You cannot authorize ingress from an EC2 security group in one
- Region to an Amazon Cache Cluster in another.
+ The AuthorizeCacheSecurityGroupIngress operation allows
+ network ingress to a cache security group. Applications using
+ ElastiCache must be running on Amazon EC2, and Amazon EC2
+ security groups are used as the authorization mechanism.
+ You cannot authorize ingress from an Amazon EC2 security group
+ in one Region to an ElastiCache cluster in another Region.
:type cache_security_group_name: string
- :param cache_security_group_name: The name of the Cache Security Group
- to authorize.
+ :param cache_security_group_name: The cache security group which will
+ allow network ingress.
:type ec2_security_group_name: string
- :param ec2_security_group_name: Name of the EC2 Security Group to
- include in the authorization.
+ :param ec2_security_group_name: The Amazon EC2 security group to be
+ authorized for ingress to the cache security group.
:type ec2_security_group_owner_id: string
- :param ec2_security_group_owner_id: AWS Account Number of the owner of
- the security group specified in the EC2SecurityGroupName parameter.
- The AWS Access Key ID is not an acceptable value.
+ :param ec2_security_group_owner_id: The AWS account number of the
+ Amazon EC2 security group owner. Note that this is not the same
+ thing as an AWS access key ID - you must provide a valid AWS
+ account number for this parameter.
"""
params = {
@@ -101,86 +100,146 @@ class ElastiCacheConnection(AWSQueryConnection):
path='/', params=params)
def create_cache_cluster(self, cache_cluster_id, num_cache_nodes,
- cache_node_type, engine, engine_version=None,
+ cache_node_type, engine,
+ replication_group_id=None, engine_version=None,
cache_parameter_group_name=None,
cache_subnet_group_name=None,
cache_security_group_names=None,
- security_group_ids=None,
+ security_group_ids=None, snapshot_arns=None,
preferred_availability_zone=None,
preferred_maintenance_window=None, port=None,
notification_topic_arn=None,
auto_minor_version_upgrade=None):
"""
- Creates a new Cache Cluster.
+ The CreateCacheCluster operation creates a new cache cluster.
+ All nodes in the cache cluster run the same protocol-compliant
+ cache engine software - either Memcached or Redis.
:type cache_cluster_id: string
- :param cache_cluster_id: The Cache Cluster identifier. This parameter
- is stored as a lowercase string.
+ :param cache_cluster_id:
+ The cache cluster identifier. This parameter is stored as a lowercase
+ string.
+
+ Constraints:
+
+
+ + Must contain from 1 to 20 alphanumeric characters or hyphens.
+ + First character must be a letter.
+ + Cannot end with a hyphen or contain two consecutive hyphens.
+
+ :type replication_group_id: string
+ :param replication_group_id: The replication group to which this cache
+ cluster should belong. If this parameter is specified, the cache
+ cluster will be added to the specified replication group as a read
+ replica; otherwise, the cache cluster will be a standalone primary
+ that is not part of any replication group.
:type num_cache_nodes: integer
- :param num_cache_nodes: The number of Cache Nodes the Cache Cluster
- should have.
+ :param num_cache_nodes: The initial number of cache nodes that the
+ cache cluster will have.
+ For a Memcached cluster, valid values are between 1 and 20. If you need
+ to exceed this limit, please fill out the ElastiCache Limit
+ Increase Request form at ``_ .
+
+ For Redis, only single-node cache clusters are supported at this time,
+ so the value for this parameter must be 1.
:type cache_node_type: string
- :param cache_node_type: The compute and memory capacity of nodes in a
- Cache Cluster.
+ :param cache_node_type: The compute and memory capacity of the nodes in
+ the cache cluster.
+ Valid values for Memcached:
+
+ `cache.t1.micro` | `cache.m1.small` | `cache.m1.medium` |
+ `cache.m1.large` | `cache.m1.xlarge` | `cache.m3.xlarge` |
+ `cache.m3.2xlarge` | `cache.m2.xlarge` | `cache.m2.2xlarge` |
+ `cache.m2.4xlarge` | `cache.c1.xlarge`
+
+ Valid values for Redis:
+
+ `cache.t1.micro` | `cache.m1.small` | `cache.m1.medium` |
+ `cache.m1.large` | `cache.m1.xlarge` | `cache.m2.xlarge` |
+ `cache.m2.2xlarge` | `cache.m2.4xlarge` | `cache.c1.xlarge`
+
+ For a complete listing of cache node types and specifications, see `.
:type engine: string
- :param engine: The name of the cache engine to be used for this Cache
- Cluster. Currently, memcached is the only cache engine supported
- by the service.
+ :param engine: The name of the cache engine to be used for this cache
+ cluster.
+ Valid values for this parameter are:
+
+ `memcached` | `redis`
:type engine_version: string
- :param engine_version: The version of the cache engine to be used for
- this cluster.
+ :param engine_version: The version number of the cache engine to be
+ used for this cluster. To view the supported cache engine versions,
+ use the DescribeCacheEngineVersions operation.
:type cache_parameter_group_name: string
:param cache_parameter_group_name: The name of the cache parameter
- group to associate with this Cache cluster. If this argument is
- omitted, the default CacheParameterGroup for the specified engine
+ group to associate with this cache cluster. If this argument is
+ omitted, the default cache parameter group for the specified engine
will be used.
:type cache_subnet_group_name: string
- :param cache_subnet_group_name: The name of the Cache Subnet Group to
- be used for the Cache Cluster. Use this parameter only when you
- are creating a cluster in an Amazon Virtual Private Cloud (VPC).
+ :param cache_subnet_group_name: The name of the cache subnet group to
+ be used for the cache cluster.
+ Use this parameter only when you are creating a cluster in an Amazon
+ Virtual Private Cloud (VPC).
:type cache_security_group_names: list
- :param cache_security_group_names: A list of Cache Security Group Names
- to associate with this Cache Cluster. Use this parameter only when
- you are creating a cluster outside of an Amazon Virtual Private
- Cloud (VPC).
+ :param cache_security_group_names: A list of cache security group names
+ to associate with this cache cluster.
+ Use this parameter only when you are creating a cluster outside of an
+ Amazon Virtual Private Cloud (VPC).
:type security_group_ids: list
- :param security_group_ids: Specifies the VPC Security Groups associated
- with the Cache Cluster. Use this parameter only when you are
- creating a cluster in an Amazon Virtual Private Cloud (VPC).
+ :param security_group_ids: One or more VPC security groups associated
+ with the cache cluster.
+ Use this parameter only when you are creating a cluster in an Amazon
+ Virtual Private Cloud (VPC).
+
+ :type snapshot_arns: list
+ :param snapshot_arns: A single-element string list containing an Amazon
+ Resource Name (ARN) that uniquely identifies a Redis RDB snapshot
+ file stored in Amazon S3. The snapshot file will be used to
+ populate the Redis cache in the new cache cluster. The Amazon S3
+ object name in the ARN cannot contain any commas.
+ Here is an example of an Amazon S3 ARN:
+ `arn:aws:s3:::my_bucket/snapshot1.rdb`
+
+ **Note:** This parameter is only valid if the `Engine` parameter is
+ `redis`.
:type preferred_availability_zone: string
- :param preferred_availability_zone: The EC2 Availability Zone that the
- Cache Cluster will be created in. All cache nodes belonging to a
- cache cluster are placed in the preferred availability zone.
- Default: System chosen (random) availability zone.
+ :param preferred_availability_zone: The EC2 Availability Zone in which
+ the cache cluster will be created.
+ All cache nodes belonging to a cache cluster are placed in the
+ preferred availability zone.
+
+ Default: System chosen availability zone.
:type preferred_maintenance_window: string
:param preferred_maintenance_window: The weekly time range (in UTC)
- during which system maintenance can occur. Example:
- `sun:05:00-sun:09:00`
+ during which system maintenance can occur.
+ Example: `sun:05:00-sun:09:00`
:type port: integer
- :param port: The port number on which each of the Cache Nodes will
+ :param port: The port number on which each of the cache nodes will
accept connections.
:type notification_topic_arn: string
- :param notification_topic_arn: The Amazon Resource Name (ARN) of the
- Amazon Simple Notification Service (SNS) topic to which
- notifications will be sent. The Amazon SNS topic owner must be the
- same as the Cache Cluster owner.
+ :param notification_topic_arn:
+ The Amazon Resource Name (ARN) of the Amazon Simple Notification
+ Service (SNS) topic to which notifications will be sent.
+
+ The Amazon SNS topic owner must be the same as the cache cluster owner.
:type auto_minor_version_upgrade: boolean
- :param auto_minor_version_upgrade: Indicates that minor engine upgrades
- will be applied automatically to the Cache Cluster during the
- maintenance window. Default: `True`
+ :param auto_minor_version_upgrade: Determines whether minor engine
+ upgrades will be applied automatically to the cache cluster during
+ the maintenance window. A value of `True` allows these upgrades to
+ occur; `False` disables automatic upgrades.
+ Default: `True`
"""
params = {
@@ -189,6 +248,8 @@ class ElastiCacheConnection(AWSQueryConnection):
'CacheNodeType': cache_node_type,
'Engine': engine,
}
+ if replication_group_id is not None:
+ params['ReplicationGroupId'] = replication_group_id
if engine_version is not None:
params['EngineVersion'] = engine_version
if cache_parameter_group_name is not None:
@@ -203,6 +264,10 @@ class ElastiCacheConnection(AWSQueryConnection):
self.build_list_params(params,
security_group_ids,
'SecurityGroupIds.member')
+ if snapshot_arns is not None:
+ self.build_list_params(params,
+ snapshot_arns,
+ 'SnapshotArns.member')
if preferred_availability_zone is not None:
params['PreferredAvailabilityZone'] = preferred_availability_zone
if preferred_maintenance_window is not None:
@@ -223,21 +288,23 @@ class ElastiCacheConnection(AWSQueryConnection):
cache_parameter_group_family,
description):
"""
- Creates a new Cache Parameter Group. Cache Parameter groups
- control the parameters for a Cache Cluster.
+ The CreateCacheParameterGroup operation creates a new cache
+ parameter group. A cache parameter group is a collection of
+ parameters that you apply to all of the nodes in a cache
+ cluster.
:type cache_parameter_group_name: string
- :param cache_parameter_group_name: The name of the Cache Parameter
- Group.
+ :param cache_parameter_group_name: A user-specified name for the cache
+ parameter group.
:type cache_parameter_group_family: string
- :param cache_parameter_group_family: The name of the Cache Parameter
- Group Family the Cache Parameter Group can be used with.
- Currently, memcached1.4 is the only cache parameter group family
- supported by the service.
+ :param cache_parameter_group_family: The name of the cache parameter
+ group family the cache parameter group can be used with.
+ Valid values are: `memcached1.4` | `redis2.6`
:type description: string
- :param description: The description for the Cache Parameter Group.
+ :param description: A user-specified description for the cache
+ parameter group.
"""
params = {
@@ -253,21 +320,26 @@ class ElastiCacheConnection(AWSQueryConnection):
def create_cache_security_group(self, cache_security_group_name,
description):
"""
- Creates a new Cache Security Group. Cache Security groups
- control access to one or more Cache Clusters.
+ The CreateCacheSecurityGroup operation creates a new cache
+ security group. Use a cache security group to control access
+ to one or more cache clusters.
- Only use cache security groups when you are creating a cluster
- outside of an Amazon Virtual Private Cloud (VPC). Inside of a
- VPC, use VPC security groups.
+ Cache security groups are only used when you are creating a
+ cluster outside of an Amazon Virtual Private Cloud (VPC). If
+ you are creating a cluster inside of a VPC, use a cache subnet
+ group instead. For more information, see
+ CreateCacheSubnetGroup .
:type cache_security_group_name: string
- :param cache_security_group_name: The name for the Cache Security
- Group. This value is stored as a lowercase string. Constraints:
- Must contain no more than 255 alphanumeric characters. Must not be
- "Default". Example: `mysecuritygroup`
+ :param cache_security_group_name: A name for the cache security group.
+ This value is stored as a lowercase string.
+ Constraints: Must contain no more than 255 alphanumeric characters.
+ Must not be the word "Default".
+
+ Example: `mysecuritygroup`
:type description: string
- :param description: The description for the Cache Security Group.
+ :param description: A description for the cache security group.
"""
params = {
@@ -282,20 +354,26 @@ class ElastiCacheConnection(AWSQueryConnection):
def create_cache_subnet_group(self, cache_subnet_group_name,
cache_subnet_group_description, subnet_ids):
"""
- Creates a new Cache Subnet Group.
+ The CreateCacheSubnetGroup operation creates a new cache
+ subnet group.
+
+ Use this parameter only when you are creating a cluster in an
+ Amazon Virtual Private Cloud (VPC).
:type cache_subnet_group_name: string
- :param cache_subnet_group_name: The name for the Cache Subnet Group.
- This value is stored as a lowercase string. Constraints: Must
- contain no more than 255 alphanumeric characters or hyphens.
- Example: `mysubnetgroup`
+ :param cache_subnet_group_name: A name for the cache subnet group. This
+ value is stored as a lowercase string.
+ Constraints: Must contain no more than 255 alphanumeric characters or
+ hyphens.
+
+ Example: `mysubnetgroup`
:type cache_subnet_group_description: string
- :param cache_subnet_group_description: The description for the Cache
- Subnet Group.
+ :param cache_subnet_group_description: A description for the cache
+ subnet group.
:type subnet_ids: list
- :param subnet_ids: The EC2 Subnet IDs for the Cache Subnet Group.
+ :param subnet_ids: A list of VPC subnet IDs for the cache subnet group.
"""
params = {
@@ -310,17 +388,66 @@ class ElastiCacheConnection(AWSQueryConnection):
verb='POST',
path='/', params=params)
+ def create_replication_group(self, replication_group_id,
+ primary_cluster_id,
+ replication_group_description):
+ """
+ The CreateReplicationGroup operation creates a replication
+ group. A replication group is a collection of cache clusters,
+ where one of the clusters is a read/write primary and the
+ other clusters are read-only replicas. Writes to the primary
+ are automatically propagated to the replicas.
+
+ When you create a replication group, you must specify an
+ existing cache cluster that is in the primary role. When the
+ replication group has been successfully created, you can add
+ one or more read replica replicas to it, up to a total of five
+ read replicas.
+
+ :type replication_group_id: string
+ :param replication_group_id:
+ The replication group identifier. This parameter is stored as a
+ lowercase string.
+
+ Constraints:
+
+
+ + Must contain from 1 to 20 alphanumeric characters or hyphens.
+ + First character must be a letter.
+ + Cannot end with a hyphen or contain two consecutive hyphens.
+
+ :type primary_cluster_id: string
+ :param primary_cluster_id: The identifier of the cache cluster that
+ will serve as the primary for this replication group. This cache
+ cluster must already exist and have a status of available .
+
+ :type replication_group_description: string
+ :param replication_group_description: A user-specified description for
+ the replication group.
+
+ """
+ params = {
+ 'ReplicationGroupId': replication_group_id,
+ 'PrimaryClusterId': primary_cluster_id,
+ 'ReplicationGroupDescription': replication_group_description,
+ }
+ return self._make_request(
+ action='CreateReplicationGroup',
+ verb='POST',
+ path='/', params=params)
+
def delete_cache_cluster(self, cache_cluster_id):
"""
- Deletes a previously provisioned Cache Cluster. A successful
- response from the web service indicates the request was
- received correctly. This action cannot be canceled or
- reverted. DeleteCacheCluster deletes all associated Cache
- Nodes, node endpoints and the Cache Cluster itself.
+ The DeleteCacheCluster operation deletes a previously
+ provisioned cache cluster. DeleteCacheCluster deletes all
+ associated cache nodes, node endpoints and the cache cluster
+ itself. When you receive a successful response from this
+ operation, Amazon ElastiCache immediately begins deleting the
+ cache cluster; you cannot cancel or revert this operation.
:type cache_cluster_id: string
- :param cache_cluster_id: The Cache Cluster identifier for the Cache
- Cluster to be deleted. This parameter isn't case sensitive.
+ :param cache_cluster_id: The cache cluster identifier for the cluster
+ to be deleted. This parameter is not case sensitive.
"""
params = {'CacheClusterId': cache_cluster_id, }
@@ -331,14 +458,16 @@ class ElastiCacheConnection(AWSQueryConnection):
def delete_cache_parameter_group(self, cache_parameter_group_name):
"""
- Deletes the specified CacheParameterGroup. The
- CacheParameterGroup cannot be deleted if it is associated with
- any cache clusters.
+ The DeleteCacheParameterGroup operation deletes the specified
+ cache parameter group. You cannot delete a cache parameter
+ group if it is associated with any cache clusters.
:type cache_parameter_group_name: string
- :param cache_parameter_group_name: The name of the Cache Parameter
- Group to delete. The specified cache security group must not be
- associated with any Cache clusters.
+ :param cache_parameter_group_name:
+ The name of the cache parameter group to delete.
+
+ The specified cache security group must not be associated with any
+ cache clusters.
"""
params = {
@@ -351,13 +480,16 @@ class ElastiCacheConnection(AWSQueryConnection):
def delete_cache_security_group(self, cache_security_group_name):
"""
- Deletes a Cache Security Group.
- The specified Cache Security Group must not be associated with
- any Cache Clusters.
+ The DeleteCacheSecurityGroup operation deletes a cache
+ security group.
+ You cannot delete a cache security group if it is associated
+ with any cache clusters.
:type cache_security_group_name: string
- :param cache_security_group_name: The name of the Cache Security Group
- to delete. You cannot delete the default security group.
+ :param cache_security_group_name:
+ The name of the cache security group to delete.
+
+ You cannot delete the default security group.
"""
params = {
@@ -370,14 +502,16 @@ class ElastiCacheConnection(AWSQueryConnection):
def delete_cache_subnet_group(self, cache_subnet_group_name):
"""
- Deletes a Cache Subnet Group.
- The specified Cache Subnet Group must not be associated with
- any Cache Clusters.
+ The DeleteCacheSubnetGroup operation deletes a cache subnet
+ group.
+ You cannot delete a cache subnet group if it is associated
+ with any cache clusters.
:type cache_subnet_group_name: string
- :param cache_subnet_group_name: The name of the Cache Subnet Group to
- delete. Constraints: Must contain no more than 255 alphanumeric
- characters or hyphens.
+ :param cache_subnet_group_name: The name of the cache subnet group to
+ delete.
+ Constraints: Must contain no more than 255 alphanumeric characters or
+ hyphens.
"""
params = {'CacheSubnetGroupName': cache_subnet_group_name, }
@@ -386,19 +520,41 @@ class ElastiCacheConnection(AWSQueryConnection):
verb='POST',
path='/', params=params)
+ def delete_replication_group(self, replication_group_id):
+ """
+ The DeleteReplicationGroup operation deletes an existing
+ replication group. DeleteReplicationGroup deletes the primary
+ cache cluster and all of the read replicas in the replication
+ group. When you receive a successful response from this
+ operation, Amazon ElastiCache immediately begins deleting the
+ entire replication group; you cannot cancel or revert this
+ operation.
+
+ :type replication_group_id: string
+ :param replication_group_id: The identifier for the replication group
+ to be deleted. This parameter is not case sensitive.
+
+ """
+ params = {'ReplicationGroupId': replication_group_id, }
+ return self._make_request(
+ action='DeleteReplicationGroup',
+ verb='POST',
+ path='/', params=params)
+
def describe_cache_clusters(self, cache_cluster_id=None,
max_records=None, marker=None,
show_cache_node_info=None):
"""
- Returns information about all provisioned Cache Clusters if no
- Cache Cluster identifier is specified, or about a specific
- Cache Cluster if a Cache Cluster identifier is supplied.
+ The DescribeCacheClusters operation returns information about
+ all provisioned cache clusters if no cache cluster identifier
+ is specified, or about a specific cache cluster if a cache
+ cluster identifier is supplied.
- Cluster information will be returned by default. An optional
- ShowDetails flag can be used to retrieve detailed information
- about the Cache Nodes associated with the Cache Cluster.
- Details include the DNS address and port for the Cache Node
- endpoint.
+ By default, abbreviated information about the cache
+ clusters(s) will be returned. You can use the optional
+ ShowDetails flag to retrieve detailed information about the
+ cache nodes associated with the cache clusters. These details
+ include the DNS address and port for the cache node endpoint.
If the cluster is in the CREATING state, only cluster level
information will be displayed until all of the nodes are
@@ -407,37 +563,40 @@ class ElastiCacheConnection(AWSQueryConnection):
If the cluster is in the DELETING state, only cluster level
information will be displayed.
- While adding Cache Nodes, node endpoint information and
- creation time for the additional nodes will not be displayed
- until they are completely provisioned. The cluster lifecycle
- tells the customer when new nodes are AVAILABLE.
-
- While removing existing Cache Nodes from an cluster, endpoint
- information for the removed nodes will not be displayed.
+ If cache nodes are currently being added to the cache cluster,
+ node endpoint information and creation time for the additional
+ nodes will not be displayed until they are completely
+ provisioned. When the cache cluster state is available , the
+ cluster is ready for use.
- DescribeCacheClusters supports pagination.
+ If cache nodes are currently being removed from the cache
+ cluster, no endpoint information for the removed nodes is
+ displayed.
:type cache_cluster_id: string
:param cache_cluster_id: The user-supplied cluster identifier. If this
- parameter is specified, only information about that specific Cache
- Cluster is returned. This parameter isn't case sensitive.
+ parameter is specified, only information about that specific cache
+ cluster is returned. This parameter isn't case sensitive.
:type max_records: integer
:param max_records: The maximum number of records to include in the
- response. If more records exist than the specified MaxRecords
+ response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
- results may be retrieved.
+ results can be retrieved.
+ Default: 100
+
+ Constraints: minimum 20; maximum 100.
:type marker: string
- :param marker: An optional marker provided in the previous
- DescribeCacheClusters request. If this parameter is specified, the
- response includes only records beyond the marker, up to the value
- specified by MaxRecords .
+ :param marker: An optional marker returned from a prior request. Use
+ this marker for pagination of results from this operation. If this
+ parameter is specified, the response includes only records beyond
+ the marker, up to the value specified by MaxRecords .
:type show_cache_node_info: boolean
:param show_cache_node_info: An optional flag that can be included in
- the DescribeCacheCluster request to retrieve Cache Nodes
- information.
+ the DescribeCacheCluster request to retrieve information about the
+ individual cache nodes.
"""
params = {}
@@ -461,39 +620,48 @@ class ElastiCacheConnection(AWSQueryConnection):
max_records=None, marker=None,
default_only=None):
"""
- Returns a list of the available cache engines and their
- versions.
+ The DescribeCacheEngineVersions operation returns a list of
+ the available cache engines and their versions.
:type engine: string
- :param engine: The cache engine to return.
+ :param engine: The cache engine to return. Valid values: `memcached` |
+ `redis`
:type engine_version: string
- :param engine_version: The cache engine version to return. Example:
- `1.4.14`
+ :param engine_version: The cache engine version to return.
+ Example: `1.4.14`
:type cache_parameter_group_family: string
- :param cache_parameter_group_family: The name of a specific Cache
- Parameter Group family to return details for. Constraints: +
- Must be 1 to 255 alphanumeric characters + First character must be
- a letter + Cannot end with a hyphen or contain two consecutive
- hyphens
+ :param cache_parameter_group_family:
+ The name of a specific cache parameter group family to return details
+ for.
+
+ Constraints:
+
+
+ + Must be 1 to 255 alphanumeric characters
+ + First character must be a letter
+ + Cannot end with a hyphen or contain two consecutive hyphens
:type max_records: integer
:param max_records: The maximum number of records to include in the
- response. If more records exist than the specified MaxRecords
+ response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
- results may be retrieved.
+ results can be retrieved.
+ Default: 100
+
+ Constraints: minimum 20; maximum 100.
:type marker: string
- :param marker: An optional marker provided in the previous
- DescribeCacheParameterGroups request. If this parameter is
- specified, the response includes only records beyond the marker, up
- to the value specified by MaxRecords .
+ :param marker: An optional marker returned from a prior request. Use
+ this marker for pagination of results from this operation. If this
+ parameter is specified, the response includes only records beyond
+ the marker, up to the value specified by MaxRecords .
:type default_only: boolean
- :param default_only: Indicates that only the default version of the
- specified engine or engine and major version combination is
- returned.
+ :param default_only: If true , specifies that only the default version
+ of the specified engine or engine and major version combination is
+ to be returned.
"""
params = {}
@@ -519,9 +687,10 @@ class ElastiCacheConnection(AWSQueryConnection):
cache_parameter_group_name=None,
max_records=None, marker=None):
"""
- Returns a list of CacheParameterGroup descriptions. If a
- CacheParameterGroupName is specified, the list will contain
- only the descriptions of the specified CacheParameterGroup.
+ The DescribeCacheParameterGroups operation returns a list of
+ cache parameter group descriptions. If a cache parameter group
+ name is specified, the list will contain only the descriptions
+ for that group.
:type cache_parameter_group_name: string
:param cache_parameter_group_name: The name of a specific cache
@@ -529,15 +698,18 @@ class ElastiCacheConnection(AWSQueryConnection):
:type max_records: integer
:param max_records: The maximum number of records to include in the
- response. If more records exist than the specified MaxRecords
+ response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
- results may be retrieved.
+ results can be retrieved.
+ Default: 100
+
+ Constraints: minimum 20; maximum 100.
:type marker: string
- :param marker: An optional marker provided in the previous
- DescribeCacheParameterGroups request. If this parameter is
- specified, the response includes only records beyond the marker, up
- to the value specified by MaxRecords .
+ :param marker: An optional marker returned from a prior request. Use
+ this marker for pagination of results from this operation. If this
+ parameter is specified, the response includes only records beyond
+ the marker, up to the value specified by MaxRecords .
"""
params = {}
@@ -555,28 +727,31 @@ class ElastiCacheConnection(AWSQueryConnection):
def describe_cache_parameters(self, cache_parameter_group_name,
source=None, max_records=None, marker=None):
"""
- Returns the detailed parameter list for a particular
- CacheParameterGroup.
+ The DescribeCacheParameters operation returns the detailed
+ parameter list for a particular cache parameter group.
:type cache_parameter_group_name: string
:param cache_parameter_group_name: The name of a specific cache
parameter group to return details for.
:type source: string
- :param source: The parameter types to return. Valid values: `user` |
- `system` | `engine-default`
+ :param source: The parameter types to return.
+ Valid values: `user` | `system` | `engine-default`
:type max_records: integer
:param max_records: The maximum number of records to include in the
- response. If more records exist than the specified MaxRecords
+ response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
- results may be retrieved.
+ results can be retrieved.
+ Default: 100
+
+ Constraints: minimum 20; maximum 100.
:type marker: string
- :param marker: An optional marker provided in the previous
- DescribeCacheClusters request. If this parameter is specified, the
- response includes only records beyond the marker, up to the value
- specified by MaxRecords .
+ :param marker: An optional marker returned from a prior request. Use
+ this marker for pagination of results from this operation. If this
+ parameter is specified, the response includes only records beyond
+ the marker, up to the value specified by MaxRecords .
"""
params = {
@@ -596,25 +771,29 @@ class ElastiCacheConnection(AWSQueryConnection):
def describe_cache_security_groups(self, cache_security_group_name=None,
max_records=None, marker=None):
"""
- Returns a list of CacheSecurityGroup descriptions. If a
- CacheSecurityGroupName is specified, the list will contain
- only the description of the specified CacheSecurityGroup.
+ The DescribeCacheSecurityGroups operation returns a list of
+ cache security group descriptions. If a cache security group
+ name is specified, the list will contain only the description
+ of that group.
:type cache_security_group_name: string
- :param cache_security_group_name: The name of the Cache Security Group
+ :param cache_security_group_name: The name of the cache security group
to return details for.
:type max_records: integer
:param max_records: The maximum number of records to include in the
- response. If more records exist than the specified MaxRecords
+ response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
- results may be retrieved.
+ results can be retrieved.
+ Default: 100
+
+ Constraints: minimum 20; maximum 100.
:type marker: string
- :param marker: An optional marker provided in the previous
- DescribeCacheClusters request. If this parameter is specified, the
- response includes only records beyond the marker, up to the value
- specified by MaxRecords .
+ :param marker: An optional marker returned from a prior request. Use
+ this marker for pagination of results from this operation. If this
+ parameter is specified, the response includes only records beyond
+ the marker, up to the value specified by MaxRecords .
"""
params = {}
@@ -632,26 +811,29 @@ class ElastiCacheConnection(AWSQueryConnection):
def describe_cache_subnet_groups(self, cache_subnet_group_name=None,
max_records=None, marker=None):
"""
- Returns a list of CacheSubnetGroup descriptions. If a
- CacheSubnetGroupName is specified, the list will contain only
- the description of the specified Cache Subnet Group.
+ The DescribeCacheSubnetGroups operation returns a list of
+ cache subnet group descriptions. If a subnet group name is
+ specified, the list will contain only the description of that
+ group.
:type cache_subnet_group_name: string
- :param cache_subnet_group_name: The name of the Cache Subnet Group to
+ :param cache_subnet_group_name: The name of the cache subnet group to
return details for.
:type max_records: integer
:param max_records: The maximum number of records to include in the
response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
- results may be retrieved. Default: 100 Constraints: minimum 20,
- maximum 100
+ results can be retrieved.
+ Default: 100
+
+ Constraints: minimum 20; maximum 100.
:type marker: string
- :param marker: An optional marker provided in the previous
- DescribeCacheSubnetGroups request. If this parameter is specified,
- the response includes only records beyond the marker, up to the
- value specified by `MaxRecords`.
+ :param marker: An optional marker returned from a prior request. Use
+ this marker for pagination of results from this operation. If this
+ parameter is specified, the response includes only records beyond
+ the marker, up to the value specified by MaxRecords .
"""
params = {}
@@ -670,25 +852,28 @@ class ElastiCacheConnection(AWSQueryConnection):
cache_parameter_group_family,
max_records=None, marker=None):
"""
- Returns the default engine and system parameter information
- for the specified cache engine.
+ The DescribeEngineDefaultParameters operation returns the
+ default engine and system parameter information for the
+ specified cache engine.
:type cache_parameter_group_family: string
- :param cache_parameter_group_family: The name of the Cache Parameter
- Group Family. Currently, memcached1.4 is the only cache parameter
- group family supported by the service.
+ :param cache_parameter_group_family: The name of the cache parameter
+ group family. Valid values are: `memcached1.4` | `redis2.6`
:type max_records: integer
:param max_records: The maximum number of records to include in the
- response. If more records exist than the specified MaxRecords
+ response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
- results may be retrieved.
+ results can be retrieved.
+ Default: 100
+
+ Constraints: minimum 20; maximum 100.
:type marker: string
- :param marker: An optional marker provided in the previous
- DescribeCacheClusters request. If this parameter is specified, the
- response includes only records beyond the marker, up to the value
- specified by MaxRecords .
+ :param marker: An optional marker returned from a prior request. Use
+ this marker for pagination of results from this operation. If this
+ parameter is specified, the response includes only records beyond
+ the marker, up to the value specified by MaxRecords .
"""
params = {
@@ -707,12 +892,15 @@ class ElastiCacheConnection(AWSQueryConnection):
start_time=None, end_time=None, duration=None,
max_records=None, marker=None):
"""
- Returns events related to Cache Clusters, Cache Security
- Groups, and Cache Parameter Groups for the past 14 days.
- Events specific to a particular Cache Cluster, Cache Security
- Group, or Cache Parameter Group can be obtained by providing
- the name as a parameter. By default, the past hour of events
- are returned.
+ The DescribeEvents operation returns events related to cache
+ clusters, cache security groups, and cache parameter groups.
+ You can obtain events specific to a particular cache cluster,
+ cache security group, or cache parameter group by providing
+ the name as a parameter.
+
+ By default, only the events occurring within the last hour are
+ returned; however, you can retrieve up to 14 days' worth of
+ events if necessary.
:type source_identifier: string
:param source_identifier: The identifier of the event source for which
@@ -722,29 +910,34 @@ class ElastiCacheConnection(AWSQueryConnection):
:type source_type: string
:param source_type: The event source to retrieve events for. If no
value is specified, all events are returned.
+ Valid values are: `cache-cluster` | `cache-parameter-group` | `cache-
+ security-group` | `cache-subnet-group`
- :type start_time: string
+ :type start_time: timestamp
:param start_time: The beginning of the time interval to retrieve
events for, specified in ISO 8601 format.
- :type end_time: string
+ :type end_time: timestamp
:param end_time: The end of the time interval for which to retrieve
events, specified in ISO 8601 format.
:type duration: integer
- :param duration: The number of minutes to retrieve events for.
+ :param duration: The number of minutes' worth of events to retrieve.
:type max_records: integer
:param max_records: The maximum number of records to include in the
- response. If more records exist than the specified MaxRecords
+ response. If more records exist than the specified `MaxRecords`
value, a marker is included in the response so that the remaining
- results may be retrieved.
+ results can be retrieved.
+ Default: 100
+
+ Constraints: minimum 20; maximum 100.
:type marker: string
- :param marker: An optional marker provided in the previous
- DescribeCacheClusters request. If this parameter is specified, the
- response includes only records beyond the marker, up to the value
- specified by MaxRecords .
+ :param marker: An optional marker returned from a prior request. Use
+ this marker for pagination of results from this operation. If this
+ parameter is specified, the response includes only records beyond
+ the marker, up to the value specified by MaxRecords .
"""
params = {}
@@ -767,6 +960,48 @@ class ElastiCacheConnection(AWSQueryConnection):
verb='POST',
path='/', params=params)
+ def describe_replication_groups(self, replication_group_id=None,
+ max_records=None, marker=None):
+ """
+ The DescribeReplicationGroups operation returns information
+ about a particular replication group. If no identifier is
+ specified, DescribeReplicationGroups returns information about
+ all replication groups.
+
+ :type replication_group_id: string
+ :param replication_group_id: The identifier for the replication group
+ to be described. This parameter is not case sensitive.
+ If you do not specify this parameter, information about all replication
+ groups is returned.
+
+ :type max_records: integer
+ :param max_records: The maximum number of records to include in the
+ response. If more records exist than the specified `MaxRecords`
+ value, a marker is included in the response so that the remaining
+ results can be retrieved.
+ Default: 100
+
+ Constraints: minimum 20; maximum 100.
+
+ :type marker: string
+ :param marker: An optional marker returned from a prior request. Use
+ this marker for pagination of results from this operation. If this
+ parameter is specified, the response includes only records beyond
+ the marker, up to the value specified by MaxRecords .
+
+ """
+ params = {}
+ if replication_group_id is not None:
+ params['ReplicationGroupId'] = replication_group_id
+ if max_records is not None:
+ params['MaxRecords'] = max_records
+ if marker is not None:
+ params['Marker'] = marker
+ return self._make_request(
+ action='DescribeReplicationGroups',
+ verb='POST',
+ path='/', params=params)
+
def describe_reserved_cache_nodes(self, reserved_cache_node_id=None,
reserved_cache_nodes_offering_id=None,
cache_node_type=None, duration=None,
@@ -774,51 +1009,57 @@ class ElastiCacheConnection(AWSQueryConnection):
offering_type=None, max_records=None,
marker=None):
"""
- Returns information about reserved Cache Nodes for this
- account, or about a specified reserved Cache Node.
+ The DescribeReservedCacheNodes operation returns information
+ about reserved cache nodes for this account, or about a
+ specified reserved cache node.
:type reserved_cache_node_id: string
- :param reserved_cache_node_id: The reserved Cache Node identifier
- filter value. Specify this parameter to show only the reservation
- that matches the specified reservation ID.
+ :param reserved_cache_node_id: The reserved cache node identifier
+ filter value. Use this parameter to show only the reservation that
+ matches the specified reservation ID.
:type reserved_cache_nodes_offering_id: string
:param reserved_cache_nodes_offering_id: The offering identifier filter
- value. Specify this parameter to show only purchased reservations
+ value. Use this parameter to show only purchased reservations
matching the specified offering identifier.
:type cache_node_type: string
- :param cache_node_type: The Cache Node type filter value. Specify this
+ :param cache_node_type: The cache node type filter value. Use this
parameter to show only those reservations matching the specified
- Cache Nodes type.
+ cache node type.
:type duration: string
:param duration: The duration filter value, specified in years or
- seconds. Specify this parameter to show only reservations for this
- duration. Valid Values: `1 | 3 | 31536000 | 94608000`
+ seconds. Use this parameter to show only reservations for this
+ duration.
+ Valid Values: `1 | 3 | 31536000 | 94608000`
:type product_description: string
- :param product_description: The product description filter value.
- Specify this parameter to show only those reservations matching the
+ :param product_description: The product description filter value. Use
+ this parameter to show only those reservations matching the
specified product description.
:type offering_type: string
- :param offering_type: The offering type filter value. Specify this
+ :param offering_type: The offering type filter value. Use this
parameter to show only the available offerings matching the
- specified offering type. Valid Values: `"Light Utilization" |
- "Medium Utilization" | "Heavy Utilization"`
+ specified offering type.
+ Valid values: `"Light Utilization" | "Medium Utilization" | "Heavy
+ Utilization" `
:type max_records: integer
:param max_records: The maximum number of records to include in the
- response. If more than the `MaxRecords` value is available, a
- marker is included in the response so that the following results
- can be retrieved. Default: 100 Constraints: minimum 20, maximum
- 100
+ response. If more records exist than the specified `MaxRecords`
+ value, a marker is included in the response so that the remaining
+ results can be retrieved.
+ Default: 100
+
+ Constraints: minimum 20; maximum 100.
:type marker: string
- :param marker: The marker provided in the previous request. If this
- parameter is specified, the response includes records beyond the
- marker only, up to `MaxRecords`.
+ :param marker: An optional marker returned from a prior request. Use
+ this marker for pagination of results from this operation. If this
+ parameter is specified, the response includes only records beyond
+ the marker, up to the value specified by MaxRecords .
"""
params = {}
@@ -852,46 +1093,51 @@ class ElastiCacheConnection(AWSQueryConnection):
max_records=None,
marker=None):
"""
- Lists available reserved Cache Node offerings.
+ The DescribeReservedCacheNodesOfferings operation lists
+ available reserved cache node offerings.
:type reserved_cache_nodes_offering_id: string
:param reserved_cache_nodes_offering_id: The offering identifier filter
- value. Specify this parameter to show only the available offering
- that matches the specified reservation identifier. Example:
- `438012d3-4052-4cc7-b2e3-8d3372e0e706`
+ value. Use this parameter to show only the available offering that
+ matches the specified reservation identifier.
+ Example: `438012d3-4052-4cc7-b2e3-8d3372e0e706`
:type cache_node_type: string
- :param cache_node_type: The Cache Node type filter value. Specify this
+ :param cache_node_type: The cache node type filter value. Use this
parameter to show only the available offerings matching the
- specified Cache Node type.
+ specified cache node type.
:type duration: string
:param duration: Duration filter value, specified in years or seconds.
- Specify this parameter to show only reservations for this duration.
- Valid Values: `1 | 3 | 31536000 | 94608000`
+ Use this parameter to show only reservations for a given duration.
+ Valid Values: `1 | 3 | 31536000 | 94608000`
:type product_description: string
- :param product_description: Product description filter value. Specify
+ :param product_description: The product description filter value. Use
this parameter to show only the available offerings matching the
specified product description.
:type offering_type: string
- :param offering_type: The offering type filter value. Specify this
+ :param offering_type: The offering type filter value. Use this
parameter to show only the available offerings matching the
- specified offering type. Valid Values: `"Light Utilization" |
- "Medium Utilization" | "Heavy Utilization"`
+ specified offering type.
+ Valid Values: `"Light Utilization" | "Medium Utilization" | "Heavy
+ Utilization" `
:type max_records: integer
:param max_records: The maximum number of records to include in the
- response. If more than the `MaxRecords` value is available, a
- marker is included in the response so that the following results
- can be retrieved. Default: 100 Constraints: minimum 20, maximum
- 100
+ response. If more records exist than the specified `MaxRecords`
+ value, a marker is included in the response so that the remaining
+ results can be retrieved.
+ Default: 100
+
+ Constraints: minimum 20; maximum 100.
:type marker: string
- :param marker: The marker provided in the previous request. If this
- parameter is specified, the response includes records beyond the
- marker only, up to `MaxRecords`.
+ :param marker: An optional marker returned from a prior request. Use
+ this marker for pagination of results from this operation. If this
+ parameter is specified, the response includes only records beyond
+ the marker, up to the value specified by MaxRecords .
"""
params = {}
@@ -925,85 +1171,101 @@ class ElastiCacheConnection(AWSQueryConnection):
apply_immediately=None, engine_version=None,
auto_minor_version_upgrade=None):
"""
- Modifies the Cache Cluster settings. You can change one or
- more Cache Cluster configuration parameters by specifying the
- parameters and the new values in the request.
+ The ModifyCacheCluster operation modifies the settings for a
+ cache cluster. You can use this operation to change one or
+ more cluster configuration parameters by specifying the
+ parameters and the new values.
:type cache_cluster_id: string
- :param cache_cluster_id: The Cache Cluster identifier. This value is
+ :param cache_cluster_id: The cache cluster identifier. This value is
stored as a lowercase string.
:type num_cache_nodes: integer
- :param num_cache_nodes: The number of Cache Nodes the Cache Cluster
- should have. If NumCacheNodes is greater than the existing number
- of Cache Nodes, Cache Nodes will be added. If NumCacheNodes is less
- than the existing number of Cache Nodes, Cache Nodes will be
- removed. When removing Cache Nodes, the Ids of the specific Cache
- Nodes to be removed must be supplied using the CacheNodeIdsToRemove
- parameter.
+ :param num_cache_nodes: The number of cache nodes that the cache
+ cluster should have. If the value for NumCacheNodes is greater than
+ the existing number of cache nodes, then more nodes will be added.
+ If the value is less than the existing number of cache nodes, then
+ cache nodes will be removed.
+ If you are removing cache nodes, you must use the CacheNodeIdsToRemove
+ parameter to provide the IDs of the specific cache nodes to be
+ removed.
:type cache_node_ids_to_remove: list
- :param cache_node_ids_to_remove: The list of Cache Node IDs to be
- removed. This parameter is only valid when NumCacheNodes is less
- than the existing number of Cache Nodes. The number of Cache Node
- Ids supplied in this parameter must match the difference between
- the existing number of Cache Nodes in the cluster and the new
- NumCacheNodes requested.
+ :param cache_node_ids_to_remove: A list of cache node IDs to be
+ removed. A node ID is a numeric identifier (0001, 0002, etc.). This
+ parameter is only valid when NumCacheNodes is less than the
+ existing number of cache nodes. The number of cache node IDs
+ supplied in this parameter must match the difference between the
+ existing number of cache nodes in the cluster and the value of
+ NumCacheNodes in the request.
:type cache_security_group_names: list
- :param cache_security_group_names: A list of Cache Security Group Names
- to authorize on this Cache Cluster. This change is asynchronously
- applied as soon as possible. This parameter can be used only with
- clusters that are created outside of an Amazon Virtual Private
- Cloud (VPC). Constraints: Must contain no more than 255
- alphanumeric characters. Must not be "Default".
+ :param cache_security_group_names: A list of cache security group names
+ to authorize on this cache cluster. This change is asynchronously
+ applied as soon as possible.
+ This parameter can be used only with clusters that are created outside
+ of an Amazon Virtual Private Cloud (VPC).
+
+ Constraints: Must contain no more than 255 alphanumeric characters.
+ Must not be "Default".
:type security_group_ids: list
:param security_group_ids: Specifies the VPC Security Groups associated
- with the Cache Cluster. This parameter can be used only with
- clusters that are created in an Amazon Virtual Private Cloud (VPC).
+ with the cache cluster.
+ This parameter can be used only with clusters that are created in an
+ Amazon Virtual Private Cloud (VPC).
:type preferred_maintenance_window: string
:param preferred_maintenance_window: The weekly time range (in UTC)
- during which system maintenance can occur, which may result in an
- outage. This change is made immediately. If moving this window to
- the current time, there must be at least 120 minutes between the
- current time and end of the window to ensure pending changes are
- applied.
+ during which system maintenance can occur. Note that system
+ maintenance may result in an outage. This change is made
+ immediately. If you are moving this window to the current time,
+ there must be at least 120 minutes between the current time and end
+ of the window to ensure that pending changes are applied.
:type notification_topic_arn: string
- :param notification_topic_arn: The Amazon Resource Name (ARN) of the
- SNS topic to which notifications will be sent. The SNS topic owner
- must be same as the Cache Cluster owner.
+ :param notification_topic_arn:
+ The Amazon Resource Name (ARN) of the SNS topic to which notifications
+ will be sent.
+
+ The SNS topic owner must be same as the cache cluster owner.
:type cache_parameter_group_name: string
- :param cache_parameter_group_name: The name of the Cache Parameter
- Group to apply to this Cache Cluster. This change is asynchronously
+ :param cache_parameter_group_name: The name of the cache parameter
+ group to apply to this cache cluster. This change is asynchronously
applied as soon as possible for parameters when the
ApplyImmediately parameter is specified as true for this request.
:type notification_topic_status: string
:param notification_topic_status: The status of the Amazon SNS
- notification topic. The value can be active or inactive .
- Notifications are sent only if the status is active .
+ notification topic. Notifications are sent only if the status is
+ active .
+ Valid values: `active` | `inactive`
:type apply_immediately: boolean
- :param apply_immediately: Specifies whether or not the modifications in
- this request and any pending modifications are asynchronously
- applied as soon as possible, regardless of the
- PreferredMaintenanceWindow setting for the Cache Cluster. If this
- parameter is passed as `False`, changes to the Cache Cluster are
- applied on the next maintenance reboot, or the next failure reboot,
- whichever occurs first. Default: `False`
+ :param apply_immediately: If `True`, this parameter causes the
+ modifications in this request and any pending modifications to be
+ applied, asynchronously and as soon as possible, regardless of the
+ PreferredMaintenanceWindow setting for the cache cluster.
+ If `False`, then changes to the cache cluster are applied on the next
+ maintenance reboot, or the next failure reboot, whichever occurs
+ first.
+
+ Valid values: `True` | `False`
+
+ Default: `False`
:type engine_version: string
- :param engine_version: The version of the cache engine to upgrade this
- cluster to.
+ :param engine_version: The upgraded version of the cache engine to be
+ run on the cache cluster nodes.
:type auto_minor_version_upgrade: boolean
- :param auto_minor_version_upgrade: Indicates that minor engine upgrades
- will be applied automatically to the Cache Cluster during the
- maintenance window. Default: `True`
+ :param auto_minor_version_upgrade: If `True`, then minor engine
+ upgrades will be applied automatically to the cache cluster during
+ the maintenance window.
+ Valid values: `True` | `False`
+
+ Default: `True`
"""
params = {'CacheClusterId': cache_cluster_id, }
@@ -1045,10 +1307,10 @@ class ElastiCacheConnection(AWSQueryConnection):
def modify_cache_parameter_group(self, cache_parameter_group_name,
parameter_name_values):
"""
- Modifies the parameters of a CacheParameterGroup. To modify
- more than one parameter, submit a list of ParameterName and
- ParameterValue parameters. A maximum of 20 parameters can be
- modified in a single request.
+ The ModifyCacheParameterGroup operation modifies the
+ parameters of a cache parameter group. You can modify up to 20
+ parameters in a single request by submitting a list parameter
+ name and value pairs.
:type cache_parameter_group_name: string
:param cache_parameter_group_name: The name of the cache parameter
@@ -1056,9 +1318,9 @@ class ElastiCacheConnection(AWSQueryConnection):
:type parameter_name_values: list
:param parameter_name_values: An array of parameter names and values
- for the parameter update. At least one parameter name and value
- must be supplied; subsequent arguments are optional. A maximum of
- 20 parameters may be modified in a single request.
+ for the parameter update. You must supply at least one parameter
+ name and value; subsequent arguments are optional. A maximum of 20
+ parameters may be modified per request.
"""
params = {
@@ -1077,20 +1339,23 @@ class ElastiCacheConnection(AWSQueryConnection):
cache_subnet_group_description=None,
subnet_ids=None):
"""
- Modifies an existing Cache Subnet Group.
+ The ModifyCacheSubnetGroup operation modifies an existing
+ cache subnet group.
:type cache_subnet_group_name: string
- :param cache_subnet_group_name: The name for the Cache Subnet Group.
- This value is stored as a lowercase string. Constraints: Must
- contain no more than 255 alphanumeric characters or hyphens.
- Example: `mysubnetgroup`
+ :param cache_subnet_group_name: The name for the cache subnet group.
+ This value is stored as a lowercase string.
+ Constraints: Must contain no more than 255 alphanumeric characters or
+ hyphens.
+
+ Example: `mysubnetgroup`
:type cache_subnet_group_description: string
- :param cache_subnet_group_description: The description for the Cache
- Subnet Group.
+ :param cache_subnet_group_description: A description for the cache
+ subnet group.
:type subnet_ids: list
- :param subnet_ids: The EC2 Subnet IDs for the Cache Subnet Group.
+ :param subnet_ids: The EC2 subnet IDs for the cache subnet group.
"""
params = {'CacheSubnetGroupName': cache_subnet_group_name, }
@@ -1105,25 +1370,160 @@ class ElastiCacheConnection(AWSQueryConnection):
verb='POST',
path='/', params=params)
+ def modify_replication_group(self, replication_group_id,
+ replication_group_description=None,
+ cache_security_group_names=None,
+ security_group_ids=None,
+ preferred_maintenance_window=None,
+ notification_topic_arn=None,
+ cache_parameter_group_name=None,
+ notification_topic_status=None,
+ apply_immediately=None, engine_version=None,
+ auto_minor_version_upgrade=None,
+ primary_cluster_id=None):
+ """
+ The ModifyReplicationGroup operation modifies the settings for
+ a replication group.
+
+ :type replication_group_id: string
+ :param replication_group_id: The identifier of the replication group to
+ modify.
+
+ :type replication_group_description: string
+ :param replication_group_description: A description for the replication
+ group. Maximum length is 255 characters.
+
+ :type cache_security_group_names: list
+ :param cache_security_group_names: A list of cache security group names
+ to authorize for the clusters in this replication group. This
+ change is asynchronously applied as soon as possible.
+ This parameter can be used only with replication groups containing
+ cache clusters running outside of an Amazon Virtual Private Cloud
+ (VPC).
+
+ Constraints: Must contain no more than 255 alphanumeric characters.
+ Must not be "Default".
+
+ :type security_group_ids: list
+ :param security_group_ids: Specifies the VPC Security Groups associated
+ with the cache clusters in the replication group.
+ This parameter can be used only with replication groups containing
+ cache clusters running in an Amazon Virtual Private Cloud (VPC).
+
+ :type preferred_maintenance_window: string
+ :param preferred_maintenance_window: The weekly time range (in UTC)
+ during which replication group system maintenance can occur. Note
+ that system maintenance may result in an outage. This change is
+ made immediately. If you are moving this window to the current
+ time, there must be at least 120 minutes between the current time
+ and end of the window to ensure that pending changes are applied.
+
+ :type notification_topic_arn: string
+ :param notification_topic_arn:
+ The Amazon Resource Name (ARN) of the SNS topic to which notifications
+ will be sent.
+
+ The SNS topic owner must be same as the replication group owner.
+
+ :type cache_parameter_group_name: string
+ :param cache_parameter_group_name: The name of the cache parameter
+ group to apply to all of the cache nodes in this replication group.
+ This change is asynchronously applied as soon as possible for
+ parameters when the ApplyImmediately parameter is specified as true
+ for this request.
+
+ :type notification_topic_status: string
+ :param notification_topic_status: The status of the Amazon SNS
+ notification topic for the replication group. Notifications are
+ sent only if the status is active .
+ Valid values: `active` | `inactive`
+
+ :type apply_immediately: boolean
+ :param apply_immediately: If `True`, this parameter causes the
+ modifications in this request and any pending modifications to be
+ applied, asynchronously and as soon as possible, regardless of the
+ PreferredMaintenanceWindow setting for the replication group.
+ If `False`, then changes to the nodes in the replication group are
+ applied on the next maintenance reboot, or the next failure reboot,
+ whichever occurs first.
+
+ Valid values: `True` | `False`
+
+ Default: `False`
+
+ :type engine_version: string
+ :param engine_version: The upgraded version of the cache engine to be
+ run on the nodes in the replication group..
+
+ :type auto_minor_version_upgrade: boolean
+ :param auto_minor_version_upgrade: Determines whether minor engine
+ upgrades will be applied automatically to all of the cache nodes in
+ the replication group during the maintenance window. A value of
+ `True` allows these upgrades to occur; `False` disables automatic
+ upgrades.
+
+ :type primary_cluster_id: string
+ :param primary_cluster_id: If this parameter is specified, ElastiCache
+ will promote each of the nodes in the specified cache cluster to
+ the primary role. The nodes of all other clusters in the
+ replication group will be read replicas.
+
+ """
+ params = {'ReplicationGroupId': replication_group_id, }
+ if replication_group_description is not None:
+ params['ReplicationGroupDescription'] = replication_group_description
+ if cache_security_group_names is not None:
+ self.build_list_params(params,
+ cache_security_group_names,
+ 'CacheSecurityGroupNames.member')
+ if security_group_ids is not None:
+ self.build_list_params(params,
+ security_group_ids,
+ 'SecurityGroupIds.member')
+ if preferred_maintenance_window is not None:
+ params['PreferredMaintenanceWindow'] = preferred_maintenance_window
+ if notification_topic_arn is not None:
+ params['NotificationTopicArn'] = notification_topic_arn
+ if cache_parameter_group_name is not None:
+ params['CacheParameterGroupName'] = cache_parameter_group_name
+ if notification_topic_status is not None:
+ params['NotificationTopicStatus'] = notification_topic_status
+ if apply_immediately is not None:
+ params['ApplyImmediately'] = str(
+ apply_immediately).lower()
+ if engine_version is not None:
+ params['EngineVersion'] = engine_version
+ if auto_minor_version_upgrade is not None:
+ params['AutoMinorVersionUpgrade'] = str(
+ auto_minor_version_upgrade).lower()
+ if primary_cluster_id is not None:
+ params['PrimaryClusterId'] = primary_cluster_id
+ return self._make_request(
+ action='ModifyReplicationGroup',
+ verb='POST',
+ path='/', params=params)
+
def purchase_reserved_cache_nodes_offering(self,
reserved_cache_nodes_offering_id,
reserved_cache_node_id=None,
cache_node_count=None):
"""
- Purchases a reserved Cache Node offering.
+ The PurchaseReservedCacheNodesOffering operation allows you to
+ purchase a reserved cache node offering.
:type reserved_cache_nodes_offering_id: string
- :param reserved_cache_nodes_offering_id: The ID of the Reserved Cache
- Node offering to purchase. Example:
- 438012d3-4052-4cc7-b2e3-8d3372e0e706
+ :param reserved_cache_nodes_offering_id: The ID of the reserved cache
+ node offering to purchase.
+ Example: 438012d3-4052-4cc7-b2e3-8d3372e0e706
:type reserved_cache_node_id: string
- :param reserved_cache_node_id: Customer-specified identifier to track
- this reservation. Example: myreservationID
+ :param reserved_cache_node_id: A customer-specified identifier to track
+ this reservation.
+ Example: myreservationID
:type cache_node_count: integer
- :param cache_node_count: The number of instances to reserve. Default:
- `1`
+ :param cache_node_count: The number of cache node instances to reserve.
+ Default: `1`
"""
params = {
@@ -1141,24 +1541,28 @@ class ElastiCacheConnection(AWSQueryConnection):
def reboot_cache_cluster(self, cache_cluster_id,
cache_node_ids_to_reboot):
"""
- Reboots some (or all) of the cache cluster nodes within a
- previously provisioned ElastiCache cluster. This API results
- in the application of modified CacheParameterGroup parameters
- to the cache cluster. This action is taken as soon as
+ The RebootCacheCluster operation reboots some, or all, of the
+ cache cluster nodes within a provisioned cache cluster. This
+ API will apply any modified cache parameter groups to the
+ cache cluster. The reboot action takes place as soon as
possible, and results in a momentary outage to the cache
- cluster during which the cache cluster status is set to
- rebooting. During that momentary outage, the contents of the
- cache (for each cache cluster node being rebooted) are lost. A
- CacheCluster event is created when the reboot is completed.
+ cluster. During the reboot, the cache cluster status is set to
+ REBOOTING.
+
+ The reboot causes the contents of the cache (for each cache
+ cluster node being rebooted) to be lost.
+
+ When the reboot is complete, a cache cluster event is created.
:type cache_cluster_id: string
- :param cache_cluster_id: The Cache Cluster identifier. This parameter
+ :param cache_cluster_id: The cache cluster identifier. This parameter
is stored as a lowercase string.
:type cache_node_ids_to_reboot: list
- :param cache_node_ids_to_reboot: A list of Cache Cluster Node Ids to
- reboot. To reboot an entire cache cluster, specify all cache
- cluster node Ids.
+ :param cache_node_ids_to_reboot: A list of cache cluster node IDs to
+ reboot. A node ID is a numeric identifier (0001, 0002, etc.). To
+ reboot an entire cache cluster, specify all of the cache cluster
+ node IDs.
"""
params = {'CacheClusterId': cache_cluster_id, }
@@ -1174,25 +1578,27 @@ class ElastiCacheConnection(AWSQueryConnection):
parameter_name_values,
reset_all_parameters=None):
"""
- Modifies the parameters of a CacheParameterGroup to the engine
- or system default value. To reset specific parameters submit a
- list of the parameter names. To reset the entire
- CacheParameterGroup, specify the CacheParameterGroup name and
- ResetAllParameters parameters.
+ The ResetCacheParameterGroup operation modifies the parameters
+ of a cache parameter group to the engine or system default
+ value. You can reset specific parameters by submitting a list
+ of parameter names. To reset the entire cache parameter group,
+ specify the ResetAllParameters and CacheParameterGroupName
+ parameters.
:type cache_parameter_group_name: string
- :param cache_parameter_group_name: The name of the Cache Parameter
- Group.
+ :param cache_parameter_group_name: The name of the cache parameter
+ group to reset.
:type reset_all_parameters: boolean
- :param reset_all_parameters: Specifies whether ( true ) or not ( false
- ) to reset all parameters in the Cache Parameter Group to default
- values.
+ :param reset_all_parameters: If true , all parameters in the cache
+ parameter group will be reset to default values. If false , no such
+ action occurs.
+ Valid values: `True` | `False`
:type parameter_name_values: list
- :param parameter_name_values: An array of parameter names which should
- be reset. If not resetting the entire CacheParameterGroup, at least
- one parameter name must be supplied.
+ :param parameter_name_values: An array of parameter names to be reset.
+ If you are not resetting the entire cache parameter group, you must
+ specify at least one parameter name.
"""
params = {
@@ -1214,21 +1620,24 @@ class ElastiCacheConnection(AWSQueryConnection):
ec2_security_group_name,
ec2_security_group_owner_id):
"""
- Revokes ingress from a CacheSecurityGroup for previously
- authorized EC2 Security Groups.
+ The RevokeCacheSecurityGroupIngress operation revokes ingress
+ from a cache security group. Use this operation to disallow
+ access from an Amazon EC2 security group that had been
+ previously authorized.
:type cache_security_group_name: string
- :param cache_security_group_name: The name of the Cache Security Group
+ :param cache_security_group_name: The name of the cache security group
to revoke ingress from.
:type ec2_security_group_name: string
- :param ec2_security_group_name: The name of the EC2 Security Group to
- revoke access from.
+ :param ec2_security_group_name: The name of the Amazon EC2 security
+ group to revoke access from.
:type ec2_security_group_owner_id: string
- :param ec2_security_group_owner_id: The AWS Account Number of the owner
- of the security group specified in the EC2SecurityGroupName
- parameter. The AWS Access Key ID is not an acceptable value.
+ :param ec2_security_group_owner_id: The AWS account number of the
+ Amazon EC2 security group owner. Note that this is not the same
+ thing as an AWS access key ID - you must provide a valid AWS
+ account number for this parameter.
"""
params = {
diff --git a/boto/emr/__init__.py b/boto/emr/__init__.py
index 562c582d..e0cdf712 100644
--- a/boto/emr/__init__.py
+++ b/boto/emr/__init__.py
@@ -43,25 +43,25 @@ def regions():
endpoint='elasticmapreduce.us-east-1.amazonaws.com',
connection_cls=EmrConnection),
RegionInfo(name='us-west-1',
- endpoint='us-west-1.elasticmapreduce.amazonaws.com',
+ endpoint='elasticmapreduce.us-west-1.amazonaws.com',
connection_cls=EmrConnection),
RegionInfo(name='us-west-2',
- endpoint='us-west-2.elasticmapreduce.amazonaws.com',
+ endpoint='elasticmapreduce.us-west-2.amazonaws.com',
connection_cls=EmrConnection),
RegionInfo(name='ap-northeast-1',
- endpoint='ap-northeast-1.elasticmapreduce.amazonaws.com',
+ endpoint='elasticmapreduce.ap-northeast-1.amazonaws.com',
connection_cls=EmrConnection),
RegionInfo(name='ap-southeast-1',
- endpoint='ap-southeast-1.elasticmapreduce.amazonaws.com',
+ endpoint='elasticmapreduce.ap-southeast-1.amazonaws.com',
connection_cls=EmrConnection),
RegionInfo(name='ap-southeast-2',
- endpoint='ap-southeast-2.elasticmapreduce.amazonaws.com',
+ endpoint='elasticmapreduce.ap-southeast-2.amazonaws.com',
connection_cls=EmrConnection),
RegionInfo(name='eu-west-1',
- endpoint='eu-west-1.elasticmapreduce.amazonaws.com',
+ endpoint='elasticmapreduce.eu-west-1.amazonaws.com',
connection_cls=EmrConnection),
RegionInfo(name='sa-east-1',
- endpoint='sa-east-1.elasticmapreduce.amazonaws.com',
+ endpoint='elasticmapreduce.sa-east-1.amazonaws.com',
connection_cls=EmrConnection),
]
diff --git a/boto/emr/connection.py b/boto/emr/connection.py
index 95083abd..b0815f22 100644
--- a/boto/emr/connection.py
+++ b/boto/emr/connection.py
@@ -67,7 +67,7 @@ class EmrConnection(AWSQueryConnection):
validate_certs=validate_certs)
def _required_auth_capability(self):
- return ['emr']
+ return ['hmac-v4']
def describe_jobflow(self, jobflow_id):
"""
diff --git a/boto/emr/instance_group.py b/boto/emr/instance_group.py
index be229510..6ab63c5d 100644
--- a/boto/emr/instance_group.py
+++ b/boto/emr/instance_group.py
@@ -27,9 +27,9 @@ class InstanceGroup(object):
self.market = market
self.name = name
if market == 'SPOT':
- if not isinstance(bidprice, basestring):
+ if not bidprice:
raise ValueError('bidprice must be specified if market == SPOT')
- self.bidprice = bidprice
+ self.bidprice = str(bidprice)
def __repr__(self):
if self.market == 'SPOT':
diff --git a/boto/exception.py b/boto/exception.py
index 0c871b37..419aac15 100644
--- a/boto/exception.py
+++ b/boto/exception.py
@@ -76,7 +76,7 @@ class BotoServerError(StandardError):
self.body = body or ''
self.request_id = None
self.error_code = None
- self.error_message = None
+ self._error_message = None
self.box_usage = None
# Attempt to parse the error response. If body isn't present,
@@ -90,16 +90,22 @@ class BotoServerError(StandardError):
# in exception. But first, save self.body in self.error_message
# because occasionally we get error messages from Eucalyptus
# that are just text strings that we want to preserve.
- self.error_message = self.body
+ self.message = self.body
self.body = None
def __getattr__(self, name):
- if name == 'message':
- return self.error_message
+ if name == 'error_message':
+ return self.message
if name == 'code':
return self.error_code
raise AttributeError
+ def __setattr__(self, name, value):
+ if name == 'error_message':
+ self.message = value
+ else:
+ super(BotoServerError, self).__setattr__(name, value)
+
def __repr__(self):
return '%s: %s %s\n%s' % (self.__class__.__name__,
self.status, self.reason, self.body)
@@ -117,7 +123,7 @@ class BotoServerError(StandardError):
elif name == 'Code':
self.error_code = value
elif name == 'Message':
- self.error_message = value
+ self.message = value
elif name == 'BoxUsage':
self.box_usage = value
return None
@@ -125,7 +131,7 @@ class BotoServerError(StandardError):
def _cleanupParsedProperties(self):
self.request_id = None
self.error_code = None
- self.error_message = None
+ self.message = None
self.box_usage = None
class ConsoleOutput:
diff --git a/boto/glacier/layer2.py b/boto/glacier/layer2.py
index e519ca89..d27f62d1 100644
--- a/boto/glacier/layer2.py
+++ b/boto/glacier/layer2.py
@@ -89,5 +89,13 @@ class Layer2(object):
:rtype: List of :class:`boto.glacier.vault.Vault`
:return: A list of Vault objects.
"""
- response_data = self.layer1.list_vaults()
- return [Vault(self.layer1, rd) for rd in response_data['VaultList']]
+ vaults = []
+ marker = None
+ while True:
+ response_data = self.layer1.list_vaults(marker=marker, limit=1000)
+ vaults.extend([Vault(self.layer1, rd) for rd in response_data['VaultList']])
+ marker = response_data.get('Marker')
+ if not marker:
+ break
+
+ return vaults
diff --git a/boto/gs/bucket.py b/boto/gs/bucket.py
index a8ced49a..9e989258 100644
--- a/boto/gs/bucket.py
+++ b/boto/gs/bucket.py
@@ -19,12 +19,14 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+import re
import urllib
import xml.sax
import boto
from boto import handler
from boto.resultset import ResultSet
+from boto.exception import GSResponseError
from boto.exception import InvalidAclError
from boto.gs.acl import ACL, CannedACLStrings
from boto.gs.acl import SupportedPermissions as GSPermissions
@@ -41,6 +43,7 @@ DEF_OBJ_ACL = 'defaultObjectAcl'
STANDARD_ACL = 'acl'
CORS_ARG = 'cors'
LIFECYCLE_ARG = 'lifecycle'
+ERROR_DETAILS_REGEX = re.compile(r'<Details>(?P<details>.*)</Details>')
class Bucket(S3Bucket):
"""Represents a Google Cloud Storage bucket."""
@@ -99,9 +102,16 @@ class Bucket(S3Bucket):
if response_headers:
for rk, rv in response_headers.iteritems():
query_args_l.append('%s=%s' % (rk, urllib.quote(rv)))
-
- key, resp = self._get_key_internal(key_name, headers,
- query_args_l=query_args_l)
+ try:
+ key, resp = self._get_key_internal(key_name, headers,
+ query_args_l=query_args_l)
+ except GSResponseError, e:
+ if e.status == 403 and 'Forbidden' in e.reason:
+ # If we failed getting an object, let the user know which object
+ # failed rather than just returning a generic 403.
+ e.reason = ("Access denied to 'gs://%s/%s'." %
+ (self.name, key_name))
+ raise
return key
def copy_key(self, new_key_name, src_bucket_name, src_key_name,
@@ -312,6 +322,14 @@ class Bucket(S3Bucket):
headers=headers)
body = response.read()
if response.status != 200:
+ if response.status == 403:
+ match = ERROR_DETAILS_REGEX.search(body)
+ details = match.group('details') if match else None
+ if details:
+ details = (('<Details>%s. Note that Full Control access'
+ ' is required to access ACLs.</Details>') %
+ details)
+ body = re.sub(ERROR_DETAILS_REGEX, details, body)
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
return body
diff --git a/boto/gs/key.py b/boto/gs/key.py
index 41ad0569..4417973b 100644
--- a/boto/gs/key.py
+++ b/boto/gs/key.py
@@ -119,6 +119,14 @@ class Key(S3Key):
self.component_count = int(value)
elif key == 'x-goog-generation':
self.generation = value
+ # Use x-goog-stored-content-encoding and
+ # x-goog-stored-content-length to indicate original content length
+ # and encoding, which are transcoding-invariant (so are preferable
+ # over using content-encoding and size headers).
+ elif key == 'x-goog-stored-content-encoding':
+ self.content_encoding = value
+ elif key == 'x-goog-stored-content-length':
+ self.size = int(value)
def open_read(self, headers=None, query_args='',
override_num_retries=None, response_headers=None):
@@ -300,9 +308,10 @@ class Key(S3Key):
chunked_transfer=chunked_transfer, size=size,
hash_algs=hash_algs)
- def delete(self):
+ def delete(self, headers=None):
return self.bucket.delete_key(self.name, version_id=self.version_id,
- generation=self.generation)
+ generation=self.generation,
+ headers=headers)
def add_email_grant(self, permission, email_address):
"""
diff --git a/boto/iam/__init__.py b/boto/iam/__init__.py
index 71cf7177..f0444ac1 100644
--- a/boto/iam/__init__.py
+++ b/boto/iam/__init__.py
@@ -52,6 +52,9 @@ def regions():
"""
return [IAMRegionInfo(name='universal',
endpoint='iam.amazonaws.com',
+ connection_cls=IAMConnection),
+ IAMRegionInfo(name='us-gov-west-1',
+ endpoint='iam.us-gov.amazonaws.com',
connection_cls=IAMConnection)
]
diff --git a/boto/iam/connection.py b/boto/iam/connection.py
index adacc8fb..f6fa6338 100644
--- a/boto/iam/connection.py
+++ b/boto/iam/connection.py
@@ -1004,7 +1004,10 @@ class IAMConnection(AWSQueryConnection):
if not alias:
raise Exception('No alias associated with this account. Please use iam.create_account_alias() first.')
- return "https://%s.signin.aws.amazon.com/console/%s" % (alias, service)
+ if self.host == 'iam.us-gov.amazonaws.com':
+ return "https://%s.signin.amazonaws-us-gov.com/console/%s" % (alias, service)
+ else:
+ return "https://%s.signin.aws.amazon.com/console/%s" % (alias, service)
def get_account_summary(self):
"""
diff --git a/boto/manage/server.py b/boto/manage/server.py
index 2a2b1f16..3acc4b2f 100644
--- a/boto/manage/server.py
+++ b/boto/manage/server.py
@@ -353,7 +353,7 @@ class Server(Model):
for region in regions:
ec2 = region.connect()
try:
- rs = ec2.get_all_instances([instance_id])
+ rs = ec2.get_all_reservations([instance_id])
except:
rs = []
if len(rs) == 1:
@@ -377,7 +377,7 @@ class Server(Model):
regions = boto.ec2.regions()
for region in regions:
ec2 = region.connect()
- rs = ec2.get_all_instances()
+ rs = ec2.get_all_reservations()
for reservation in rs:
for instance in reservation.instances:
try:
@@ -413,7 +413,7 @@ class Server(Model):
self.ec2 = region.connect()
if self.instance_id and not self._instance:
try:
- rs = self.ec2.get_all_instances([self.instance_id])
+ rs = self.ec2.get_all_reservations([self.instance_id])
if len(rs) >= 1:
for instance in rs[0].instances:
if instance.id == self.instance_id:
diff --git a/boto/mashups/server.py b/boto/mashups/server.py
index 6cea106c..aa564471 100644
--- a/boto/mashups/server.py
+++ b/boto/mashups/server.py
@@ -114,7 +114,7 @@ class Server(Model):
if not self._instance:
if self.instance_id:
try:
- rs = self.ec2.get_all_instances([self.instance_id])
+ rs = self.ec2.get_all_reservations([self.instance_id])
except:
return None
if len(rs) > 0:
diff --git a/boto/mws/response.py b/boto/mws/response.py
index 06740b56..fa25ed05 100644
--- a/boto/mws/response.py
+++ b/boto/mws/response.py
@@ -569,6 +569,10 @@ class GetMatchingProductForIdResult(ListMatchingProductsResult):
pass
+class GetMatchingProductForIdResponse(ResponseResultList):
+ _ResultClass = GetMatchingProductForIdResult
+
+
class GetCompetitivePricingForSKUResponse(ProductsBulkOperationResponse):
pass
diff --git a/boto/opsworks/layer1.py b/boto/opsworks/layer1.py
index 2e8ae436..0d79a05b 100644
--- a/boto/opsworks/layer1.py
+++ b/boto/opsworks/layer1.py
@@ -31,6 +31,29 @@ from boto.opsworks import exceptions
class OpsWorksConnection(AWSQueryConnection):
"""
AWS OpsWorks
+ Welcome to the AWS OpsWorks API Reference . This guide provides
+ descriptions, syntax, and usage examples about AWS OpsWorks
+ actions and data types, including common parameters and error
+ codes.
+
+ AWS OpsWorks is an application management service that provides an
+ integrated experience for overseeing the complete application
+ lifecycle. For information about this product, go to the `AWS
+ OpsWorks`_ details page.
+
+ **Endpoints**
+
+ AWS OpsWorks supports only one endpoint, opsworks.us-
+ east-1.amazonaws.com (HTTPS), so you must connect to that
+ endpoint. You can then use the API to direct AWS OpsWorks to
+ create stacks in any AWS Region.
+
+ **Chef Version**
+
+ When you call CreateStack, CloneStack, or UpdateStack we recommend
+ you use the `ConfigurationManager` parameter to specify the Chef
+ version, 0.9 or 11.4. The default value is currently 0.9. However,
+ we expect to change the default value to 11.4 in September 2013.
"""
APIVersion = "2013-02-18"
DefaultRegionName = "us-east-1"
@@ -57,16 +80,64 @@ class OpsWorksConnection(AWSQueryConnection):
def _required_auth_capability(self):
return ['hmac-v4']
+ def assign_volume(self, volume_id, instance_id=None):
+ """
+ Assigns one of the stack's registered Amazon EBS volumes to a
+ specified instance. The volume must first be registered with
+ the stack by calling RegisterVolume. For more information, see
+ ``_.
+
+ :type volume_id: string
+ :param volume_id: The volume ID.
+
+ :type instance_id: string
+ :param instance_id: The instance ID.
+
+ """
+ params = {'VolumeId': volume_id, }
+ if instance_id is not None:
+ params['InstanceId'] = instance_id
+ return self.make_request(action='AssignVolume',
+ body=json.dumps(params))
+
+ def associate_elastic_ip(self, elastic_ip, instance_id=None):
+ """
+ Associates one of the stack's registered Elastic IP addresses
+ with a specified instance. The address must first be
+ registered with the stack by calling RegisterElasticIp. For
+ more information, see ``_.
+
+ :type elastic_ip: string
+ :param elastic_ip: The Elastic IP address.
+
+ :type instance_id: string
+ :param instance_id: The instance ID.
+
+ """
+ params = {'ElasticIp': elastic_ip, }
+ if instance_id is not None:
+ params['InstanceId'] = instance_id
+ return self.make_request(action='AssociateElasticIp',
+ body=json.dumps(params))
+
def attach_elastic_load_balancer(self, elastic_load_balancer_name,
layer_id):
"""
+ Attaches an Elastic Load Balancing load balancer to a
+ specified layer.
+ You must create the Elastic Load Balancing instance
+ separately, by using the Elastic Load Balancing console, API,
+ or CLI. For more information, see ` Elastic Load Balancing
+ Developer Guide`_.
:type elastic_load_balancer_name: string
- :param elastic_load_balancer_name:
+ :param elastic_load_balancer_name: The Elastic Load Balancing
+ instance's name.
:type layer_id: string
- :param layer_id:
+ :param layer_id: The ID of the layer that the Elastic Load Balancing
+ instance is to be attached to.
"""
params = {
@@ -77,15 +148,17 @@ class OpsWorksConnection(AWSQueryConnection):
body=json.dumps(params))
def clone_stack(self, source_stack_id, service_role_arn, name=None,
- region=None, attributes=None,
+ region=None, vpc_id=None, attributes=None,
default_instance_profile_arn=None, default_os=None,
hostname_theme=None, default_availability_zone=None,
- custom_json=None, use_custom_cookbooks=None,
+ default_subnet_id=None, custom_json=None,
+ configuration_manager=None, use_custom_cookbooks=None,
custom_cookbooks_source=None, default_ssh_key_name=None,
clone_permissions=None, clone_app_ids=None,
default_root_device_type=None):
"""
- Creates a clone of a specified stack.
+ Creates a clone of a specified stack. For more information,
+ see `Clone a Stack`_.
:type source_stack_id: string
:param source_stack_id: The source stack ID.
@@ -95,21 +168,56 @@ class OpsWorksConnection(AWSQueryConnection):
:type region: string
:param region: The cloned stack AWS region, such as "us-east-1". For
- more information about AWS regions, see `Regions and Endpoints`_
+ more information about AWS regions, see `Regions and Endpoints`_.
+
+ :type vpc_id: string
+ :param vpc_id: The ID of the VPC that the cloned stack is to be
+ launched into. It must be in the specified region. All instances
+ will be launched into this VPC, and you cannot change the ID later.
+
+ + If your account supports EC2 Classic, the default value is no VPC.
+ + If your account does not support EC2 Classic, the default value is
+ the default VPC for the specified region.
+
+
+ If the VPC ID corresponds to a default VPC and you have specified
+ either the `DefaultAvailabilityZone` or the `DefaultSubnetId`
+ parameter only, AWS OpsWorks infers the value of the other
+ parameter. If you specify neither parameter, AWS OpsWorks sets
+ these parameters to the first valid Availability Zone for the
+ specified region and the corresponding default VPC subnet ID,
+ respectively.
+
+ If you specify a nondefault VPC ID, note the following:
+
+
+ + It must belong to a VPC in your account that is in the specified
+ region.
+ + You must specify a value for `DefaultSubnetId`.
+
+
+ For more information on how to use AWS OpsWorks with a VPC, see
+ `Running a Stack in a VPC`_. For more information on default VPC
+ and EC2 Classic, see `Supported Platforms`_.
:type attributes: map
:param attributes: A list of stack attributes and values as key/value
pairs to be added to the cloned stack.
:type service_role_arn: string
- :param service_role_arn: The stack AWS Identity and Access Management
- (IAM) role, which allows OpsWorks to work with AWS resources on
- your behalf. You must set this parameter to the Amazon Resource
- Name (ARN) for an existing IAM role. If you create a stack by using
- the OpsWorks console, it creates the role for you. You can obtain
- an existing stack's IAM ARN programmatically by calling
- DescribePermissions. For more information about IAM ARNs, see
- `Using Identifiers`_.
+ :param service_role_arn:
+ The stack AWS Identity and Access Management (IAM) role, which allows
+ AWS OpsWorks to work with AWS resources on your behalf. You must
+ set this parameter to the Amazon Resource Name (ARN) for an
+ existing IAM role. If you create a stack by using the AWS OpsWorks
+ console, it creates the role for you. You can obtain an existing
+ stack's IAM ARN programmatically by calling DescribePermissions.
+ For more information about IAM ARNs, see `Using Identifiers`_.
+
+ You must set this parameter to a valid service role ARN or the action
+ will fail; there is no default value. You can specify the source
+ stack's service role ARN, if you prefer, but you must do so
+ explicitly.
:type default_instance_profile_arn: string
:param default_instance_profile_arn: The ARN of an IAM profile that is
@@ -117,15 +225,16 @@ class OpsWorksConnection(AWSQueryConnection):
information about IAM ARNs, see `Using Identifiers`_.
:type default_os: string
- :param default_os: The cloned stack default operating system, which
- must be either "Amazon Linux" or "Ubuntu 12.04 LTS".
+ :param default_os: The cloned stack's default operating system, which
+ must be set to `Amazon Linux` or `Ubuntu 12.04 LTS`. The default
+ option is `Amazon Linux`.
:type hostname_theme: string
:param hostname_theme: The stack's host name theme, with spaces are
- replaced by underscores. The theme is used to generate hostnames
+ replaced by underscores. The theme is used to generate host names
for the stack's instances. By default, `HostnameTheme` is set to
- Layer_Dependent, which creates hostnames by appending integers to
- the layer's shortname. The other themes are:
+ Layer_Dependent, which creates host names by appending integers to
+ the layer's short name. The other themes are:
+ Baked_Goods
+ Clouds
@@ -140,26 +249,48 @@ class OpsWorksConnection(AWSQueryConnection):
+ Wild_Cats
- To obtain a generated hostname, call `GetHostNameSuggestion`, which
- returns a hostname based on the current theme.
+ To obtain a generated host name, call `GetHostNameSuggestion`, which
+ returns a host name based on the current theme.
:type default_availability_zone: string
- :param default_availability_zone: The cloned stack's Availability Zone.
- For more information, see `Regions and Endpoints`_.
+ :param default_availability_zone: The cloned stack's default
+ Availability Zone, which must be in the specified region. For more
+ information, see `Regions and Endpoints`_. If you also specify a
+ value for `DefaultSubnetId`, the subnet must be in the same zone.
+ For more information, see the `VpcId` parameter description.
+
+ :type default_subnet_id: string
+ :param default_subnet_id: The stack's default subnet ID. All instances
+ will be launched into this subnet unless you specify otherwise when
+ you create the instance. If you also specify a value for
+ `DefaultAvailabilityZone`, the subnet must be in the same zone. For
+ information on default values and when this parameter is required,
+ see the `VpcId` parameter description.
:type custom_json: string
- :param custom_json:
- A string that contains user-defined, custom JSON. It is used to
- override the corresponding default stack configuration JSON values.
- The string should be in the following format and must escape
- characters such as '"'.:
- `"{\"key1\": \"value1\", \"key2\": \"value2\",...}"`
+ :param custom_json: A string that contains user-defined, custom JSON.
+ It is used to override the corresponding default stack
+ configuration JSON values. The string should be in the following
+ format and must escape characters such as '"'.: `"{\"key1\":
+ \"value1\", \"key2\": \"value2\",...}"`
+ For more information on custom JSON, see `Use Custom JSON to Modify the
+ Stack Configuration JSON`_
+
+ :type configuration_manager: dict
+ :param configuration_manager: The configuration manager. When you clone
+ a stack we recommend that you use the configuration manager to
+ specify the Chef version, 0.9 or 11.4. The default value is
+ currently 0.9. However, we expect to change the default value to
+ 11.4 in September 2013.
:type use_custom_cookbooks: boolean
:param use_custom_cookbooks: Whether to use custom cookbooks.
:type custom_cookbooks_source: dict
- :param custom_cookbooks_source:
+ :param custom_cookbooks_source: Contains the information required to
+ retrieve an app or cookbook from a repository. For more
+ information, see `Creating Apps`_ or `Custom Recipes and
+ Cookbooks`_.
:type default_ssh_key_name: string
:param default_ssh_key_name: A default SSH key for the stack instances.
@@ -174,7 +305,10 @@ class OpsWorksConnection(AWSQueryConnection):
the cloned stack.
:type default_root_device_type: string
- :param default_root_device_type:
+ :param default_root_device_type: The default root device type. This
+ value is used by default for all instances in the cloned stack, but
+ you can override it when you create an instance. For more
+ information, see `Storage for the Root Device`_.
"""
params = {
@@ -185,6 +319,8 @@ class OpsWorksConnection(AWSQueryConnection):
params['Name'] = name
if region is not None:
params['Region'] = region
+ if vpc_id is not None:
+ params['VpcId'] = vpc_id
if attributes is not None:
params['Attributes'] = attributes
if default_instance_profile_arn is not None:
@@ -195,8 +331,12 @@ class OpsWorksConnection(AWSQueryConnection):
params['HostnameTheme'] = hostname_theme
if default_availability_zone is not None:
params['DefaultAvailabilityZone'] = default_availability_zone
+ if default_subnet_id is not None:
+ params['DefaultSubnetId'] = default_subnet_id
if custom_json is not None:
params['CustomJson'] = custom_json
+ if configuration_manager is not None:
+ params['ConfigurationManager'] = configuration_manager
if use_custom_cookbooks is not None:
params['UseCustomCookbooks'] = use_custom_cookbooks
if custom_cookbooks_source is not None:
@@ -216,13 +356,14 @@ class OpsWorksConnection(AWSQueryConnection):
description=None, app_source=None, domains=None,
enable_ssl=None, ssl_configuration=None, attributes=None):
"""
- Creates an app for a specified stack.
+ Creates an app for a specified stack. For more information,
+ see `Creating Apps`_.
:type stack_id: string
:param stack_id: The stack ID.
:type shortname: string
- :param shortname:
+ :param shortname: The app's short name.
:type name: string
:param name: The app name.
@@ -233,7 +374,7 @@ class OpsWorksConnection(AWSQueryConnection):
:type type: string
:param type: The app type. Each supported type is associated with a
particular layer. For example, PHP applications are associated with
- a PHP layer. OpsWorks deploys an application to those instances
+ a PHP layer. AWS OpsWorks deploys an application to those instances
that are members of the corresponding layer.
:type app_source: dict
@@ -241,7 +382,7 @@ class OpsWorksConnection(AWSQueryConnection):
:type domains: list
:param domains: The app virtual host settings, with multiple domains
- separated by commas. For example: `'www.mysite.com, mysite.com'`
+ separated by commas. For example: `'www.example.com, example.com'`
:type enable_ssl: boolean
:param enable_ssl: Whether to enable SSL for the app.
@@ -285,29 +426,35 @@ class OpsWorksConnection(AWSQueryConnection):
+ Stack deployment runs the `deploy` recipes but does not
raise an event.
+
+ For more information, see `Deploying Apps`_ and `Run Stack
+ Commands`_.
+
:type stack_id: string
:param stack_id: The stack ID.
:type app_id: string
- :param app_id: The app ID, for app deployments.
+ :param app_id: The app ID. This parameter is required for app
+ deployments, but not for other deployment commands.
:type instance_ids: list
:param instance_ids: The instance IDs for the deployment targets.
:type command: dict
- :param command: A `DeploymentCommand` object that describes details of
- the operation.
+ :param command: A `DeploymentCommand` object that specifies the
+ deployment command and any associated arguments.
:type comment: string
:param comment: A user-defined comment.
:type custom_json: string
- :param custom_json:
- A string that contains user-defined, custom JSON. It is used to
- override the corresponding default stack configuration JSON values.
- The string should be in the following format and must escape
- characters such as '"'.:
- `"{\"key1\": \"value1\", \"key2\": \"value2\",...}"`
+ :param custom_json: A string that contains user-defined, custom JSON.
+ It is used to override the corresponding default stack
+ configuration JSON values. The string should be in the following
+ format and must escape characters such as '"'.: `"{\"key1\":
+ \"value1\", \"key2\": \"value2\",...}"`
+ For more information on custom JSON, see `Use Custom JSON to Modify the
+ Stack Configuration JSON`_.
"""
params = {'StackId': stack_id, 'Command': command, }
@@ -324,10 +471,13 @@ class OpsWorksConnection(AWSQueryConnection):
def create_instance(self, stack_id, layer_ids, instance_type,
auto_scaling_type=None, hostname=None, os=None,
- ssh_key_name=None, availability_zone=None,
- architecture=None, root_device_type=None):
+ ami_id=None, ssh_key_name=None,
+ availability_zone=None, subnet_id=None,
+ architecture=None, root_device_type=None,
+ install_updates_on_boot=None):
"""
- Creates an instance in a specified stack.
+ Creates an instance in a specified stack. For more
+ information, see `Adding an Instance to a Layer`_.
:type stack_id: string
:param stack_id: The stack ID.
@@ -336,26 +486,18 @@ class OpsWorksConnection(AWSQueryConnection):
:param layer_ids: An array that contains the instance layer IDs.
:type instance_type: string
- :param instance_type:
- The instance type, which can be one of the following:
-
-
- + m1.small
- + m1.medium
- + m1.large
- + m1.xlarge
- + c1.medium
- + c1.xlarge
- + m2.xlarge
- + m2.2xlarge
- + m2.4xlarge
+ :param instance_type: The instance type. AWS OpsWorks supports all
+ instance types except Cluster Compute, Cluster GPU, and High Memory
+ Cluster. For more information, see `Instance Families and Types`_.
+ The parameter values that you use to specify the various types are
+ in the API Name column of the Available Instance Types table.
:type auto_scaling_type: string
:param auto_scaling_type:
The instance auto scaling type, which has three possible values:
- + **AlwaysRunning**: A 24x7 instance, which is not affected by auto
+ + **AlwaysRunning**: A 24/7 instance, which is not affected by auto
scaling.
+ **TimeBasedAutoScaling**: A time-based auto scaling instance, which
is started and stopped based on a specified schedule. To specify
@@ -369,7 +511,25 @@ class OpsWorksConnection(AWSQueryConnection):
:param hostname: The instance host name.
:type os: string
- :param os: The instance operating system.
+ :param os: The instance operating system, which must be set to one of
+ the following.
+
+ + Standard operating systems: `Amazon Linux` or `Ubuntu 12.04 LTS`
+ + Custom AMIs: `Custom`
+
+
+ The default option is `Amazon Linux`. If you set this parameter to
+ `Custom`, you must use the CreateInstance action's AmiId parameter
+ to specify the custom AMI that you want to use. For more
+ information on the standard operating systems, see `Operating
+ Systems`_For more information on how to use custom AMIs with
+ OpsWorks, see `Using Custom AMIs`_.
+
+ :type ami_id: string
+ :param ami_id: A custom AMI ID to be used to create the instance. The
+ AMI should be based on one of the standard AWS OpsWorks APIs:
+ Amazon Linux or Ubuntu 12.04 LTS. For more information, see
+ `Instances`_
:type ssh_key_name: string
:param ssh_key_name: The instance SSH key name.
@@ -378,11 +538,33 @@ class OpsWorksConnection(AWSQueryConnection):
:param availability_zone: The instance Availability Zone. For more
information, see `Regions and Endpoints`_.
+ :type subnet_id: string
+ :param subnet_id: The ID of the instance's subnet. If the stack is
+ running in a VPC, you can use this parameter to override the
+ stack's default subnet ID value and direct AWS OpsWorks to launch
+ the instance in a different subnet.
+
:type architecture: string
- :param architecture:
+ :param architecture: The instance architecture. Instance types do not
+ necessarily support both architectures. For a list of the
+ architectures that are supported by the different instance types,
+ see `Instance Families and Types`_.
:type root_device_type: string
- :param root_device_type:
+ :param root_device_type: The instance root device type. For more
+ information, see `Storage for the Root Device`_.
+
+ :type install_updates_on_boot: boolean
+ :param install_updates_on_boot:
+ Whether to install operating system and package updates when the
+ instance boots. The default value is `True`. To control when
+ updates are installed, set this value to `False`. You must then
+ update your instances manually by using CreateDeployment to run the
+ `update_dependencies` stack command or manually running `yum`
+ (Amazon Linux) or `apt-get` (Ubuntu) on the instances.
+
+ We strongly recommend using the default value of `True`, to ensure that
+ your instances have the latest security updates.
"""
params = {
@@ -396,14 +578,20 @@ class OpsWorksConnection(AWSQueryConnection):
params['Hostname'] = hostname
if os is not None:
params['Os'] = os
+ if ami_id is not None:
+ params['AmiId'] = ami_id
if ssh_key_name is not None:
params['SshKeyName'] = ssh_key_name
if availability_zone is not None:
params['AvailabilityZone'] = availability_zone
+ if subnet_id is not None:
+ params['SubnetId'] = subnet_id
if architecture is not None:
params['Architecture'] = architecture
if root_device_type is not None:
params['RootDeviceType'] = root_device_type
+ if install_updates_on_boot is not None:
+ params['InstallUpdatesOnBoot'] = install_updates_on_boot
return self.make_request(action='CreateInstance',
body=json.dumps(params))
@@ -411,23 +599,46 @@ class OpsWorksConnection(AWSQueryConnection):
custom_instance_profile_arn=None,
custom_security_group_ids=None, packages=None,
volume_configurations=None, enable_auto_healing=None,
- auto_assign_elastic_ips=None, custom_recipes=None):
+ auto_assign_elastic_ips=None,
+ auto_assign_public_ips=None, custom_recipes=None,
+ install_updates_on_boot=None):
"""
- Creates a layer.
+ Creates a layer. For more information, see `How to Create a
+ Layer`_.
+
+ You should use **CreateLayer** for noncustom layer types such
+ as PHP App Server only if the stack does not have an existing
+ layer of that type. A stack can have at most one instance of
+ each noncustom layer; if you attempt to create a second
+ instance, **CreateLayer** fails. A stack can have an arbitrary
+ number of custom layers, so you can call **CreateLayer** as
+ many times as you like for that layer type.
:type stack_id: string
:param stack_id: The layer stack ID.
:type type: string
- :param type: The layer type. A stack cannot have more than one layer of
- the same type.
+ :param type:
+ The layer type. A stack cannot have more than one layer of the same
+ type. This parameter must be set to one of the following:
+
+
+ + lb: An HAProxy layer
+ + web: A Static Web Server layer
+ + rails-app: A Rails App Server layer
+ + php-app: A PHP App Server layer
+ + nodejs-app: A Node.js App Server layer
+ + memcached: A Memcached layer
+ + db-master: A MySQL layer
+ + monitoring-master: A Ganglia layer
+ + custom: A custom layer
:type name: string
:param name: The layer name, which is used by the console.
:type shortname: string
- :param shortname: The layer short name, which is used internally by
- OpsWorks and by Chef recipes. The shortname is also used as the
+ :param shortname: The layer short name, which is used internally by AWS
+ OpsWorks and by Chef recipes. The short name is also used as the
name for the directory where your app files are installed. It can
have a maximum of 200 characters, which are limited to the
alphanumeric characters, '-', '_', and '.'.
@@ -459,12 +670,30 @@ class OpsWorksConnection(AWSQueryConnection):
:type auto_assign_elastic_ips: boolean
:param auto_assign_elastic_ips: Whether to automatically assign an
- `Elastic IP address`_ to the layer.
+ `Elastic IP address`_ to the layer's instances. For more
+ information, see `How to Edit a Layer`_.
+
+ :type auto_assign_public_ips: boolean
+ :param auto_assign_public_ips: For stacks that are running in a VPC,
+ whether to automatically assign a public IP address to the layer's
+ instances. For more information, see `How to Edit a Layer`_.
:type custom_recipes: dict
:param custom_recipes: A `LayerCustomRecipes` object that specifies the
layer custom recipes.
+ :type install_updates_on_boot: boolean
+ :param install_updates_on_boot:
+ Whether to install operating system and package updates when the
+ instance boots. The default value is `True`. To control when
+ updates are installed, set this value to `False`. You must then
+ update your instances manually by using CreateDeployment to run the
+ `update_dependencies` stack command or manually running `yum`
+ (Amazon Linux) or `apt-get` (Ubuntu) on the instances.
+
+ We strongly recommend using the default value of `True`, to ensure that
+ your instances have the latest security updates.
+
"""
params = {
'StackId': stack_id,
@@ -486,20 +715,26 @@ class OpsWorksConnection(AWSQueryConnection):
params['EnableAutoHealing'] = enable_auto_healing
if auto_assign_elastic_ips is not None:
params['AutoAssignElasticIps'] = auto_assign_elastic_ips
+ if auto_assign_public_ips is not None:
+ params['AutoAssignPublicIps'] = auto_assign_public_ips
if custom_recipes is not None:
params['CustomRecipes'] = custom_recipes
+ if install_updates_on_boot is not None:
+ params['InstallUpdatesOnBoot'] = install_updates_on_boot
return self.make_request(action='CreateLayer',
body=json.dumps(params))
def create_stack(self, name, region, service_role_arn,
- default_instance_profile_arn, attributes=None,
- default_os=None, hostname_theme=None,
- default_availability_zone=None, custom_json=None,
+ default_instance_profile_arn, vpc_id=None,
+ attributes=None, default_os=None, hostname_theme=None,
+ default_availability_zone=None, default_subnet_id=None,
+ custom_json=None, configuration_manager=None,
use_custom_cookbooks=None, custom_cookbooks_source=None,
default_ssh_key_name=None,
default_root_device_type=None):
"""
- Creates a new stack.
+ Creates a new stack. For more information, see `Create a New
+ Stack`_.
:type name: string
:param name: The stack name.
@@ -508,13 +743,43 @@ class OpsWorksConnection(AWSQueryConnection):
:param region: The stack AWS region, such as "us-east-1". For more
information about Amazon regions, see `Regions and Endpoints`_.
+ :type vpc_id: string
+ :param vpc_id: The ID of the VPC that the stack is to be launched into.
+ It must be in the specified region. All instances will be launched
+ into this VPC, and you cannot change the ID later.
+
+ + If your account supports EC2 Classic, the default value is no VPC.
+ + If your account does not support EC2 Classic, the default value is
+ the default VPC for the specified region.
+
+
+ If the VPC ID corresponds to a default VPC and you have specified
+ either the `DefaultAvailabilityZone` or the `DefaultSubnetId`
+ parameter only, AWS OpsWorks infers the value of the other
+ parameter. If you specify neither parameter, AWS OpsWorks sets
+ these parameters to the first valid Availability Zone for the
+ specified region and the corresponding default VPC subnet ID,
+ respectively.
+
+ If you specify a nondefault VPC ID, note the following:
+
+
+ + It must belong to a VPC in your account that is in the specified
+ region.
+ + You must specify a value for `DefaultSubnetId`.
+
+
+ For more information on how to use AWS OpsWorks with a VPC, see
+ `Running a Stack in a VPC`_. For more information on default VPC
+ and EC2 Classic, see `Supported Platforms`_.
+
:type attributes: map
:param attributes: One or more user-defined key/value pairs to be added
to the stack attributes bag.
:type service_role_arn: string
:param service_role_arn: The stack AWS Identity and Access Management
- (IAM) role, which allows OpsWorks to work with AWS resources on
+ (IAM) role, which allows AWS OpsWorks to work with AWS resources on
your behalf. You must set this parameter to the Amazon Resource
Name (ARN) for an existing IAM role. For more information about IAM
ARNs, see `Using Identifiers`_.
@@ -525,15 +790,16 @@ class OpsWorksConnection(AWSQueryConnection):
information about IAM ARNs, see `Using Identifiers`_.
:type default_os: string
- :param default_os: The cloned stack default operating system, which
- must be either "Amazon Linux" or "Ubuntu 12.04 LTS".
+ :param default_os: The stack's default operating system, which must be
+ set to `Amazon Linux` or `Ubuntu 12.04 LTS`. The default option is
+ `Amazon Linux`.
:type hostname_theme: string
:param hostname_theme: The stack's host name theme, with spaces are
- replaced by underscores. The theme is used to generate hostnames
+ replaced by underscores. The theme is used to generate host names
for the stack's instances. By default, `HostnameTheme` is set to
- Layer_Dependent, which creates hostnames by appending integers to
- the layer's shortname. The other themes are:
+ Layer_Dependent, which creates host names by appending integers to
+ the layer's short name. The other themes are:
+ Baked_Goods
+ Clouds
@@ -548,33 +814,58 @@ class OpsWorksConnection(AWSQueryConnection):
+ Wild_Cats
- To obtain a generated hostname, call `GetHostNameSuggestion`, which
- returns a hostname based on the current theme.
+ To obtain a generated host name, call `GetHostNameSuggestion`, which
+ returns a host name based on the current theme.
:type default_availability_zone: string
- :param default_availability_zone: The stack default Availability Zone.
- For more information, see `Regions and Endpoints`_.
+ :param default_availability_zone: The stack's default Availability
+ Zone, which must be in the specified region. For more information,
+ see `Regions and Endpoints`_. If you also specify a value for
+ `DefaultSubnetId`, the subnet must be in the same zone. For more
+ information, see the `VpcId` parameter description.
+
+ :type default_subnet_id: string
+ :param default_subnet_id: The stack's default subnet ID. All instances
+ will be launched into this subnet unless you specify otherwise when
+ you create the instance. If you also specify a value for
+ `DefaultAvailabilityZone`, the subnet must be in that zone. For
+ information on default values and when this parameter is required,
+ see the `VpcId` parameter description.
:type custom_json: string
- :param custom_json:
- A string that contains user-defined, custom JSON. It is used to
- override the corresponding default stack configuration JSON values.
- The string should be in the following format and must escape
- characters such as '"'.:
- `"{\"key1\": \"value1\", \"key2\": \"value2\",...}"`
+ :param custom_json: A string that contains user-defined, custom JSON.
+ It is used to override the corresponding default stack
+ configuration JSON values. The string should be in the following
+ format and must escape characters such as '"'.: `"{\"key1\":
+ \"value1\", \"key2\": \"value2\",...}"`
+ For more information on custom JSON, see `Use Custom JSON to Modify the
+ Stack Configuration JSON`_.
+
+ :type configuration_manager: dict
+ :param configuration_manager: The configuration manager. When you
+ create a stack we recommend that you use the configuration manager
+ to specify the Chef version, 0.9 or 11.4. The default value is
+ currently 0.9. However, we expect to change the default value to
+ 11.4 in September 2013.
:type use_custom_cookbooks: boolean
:param use_custom_cookbooks: Whether the stack uses custom cookbooks.
:type custom_cookbooks_source: dict
- :param custom_cookbooks_source:
+ :param custom_cookbooks_source: Contains the information required to
+ retrieve an app or cookbook from a repository. For more
+ information, see `Creating Apps`_ or `Custom Recipes and
+ Cookbooks`_.
:type default_ssh_key_name: string
:param default_ssh_key_name: A default SSH key for the stack instances.
You can override this value when you create or update an instance.
:type default_root_device_type: string
- :param default_root_device_type:
+ :param default_root_device_type: The default root device type. This
+ value is used by default for all instances in the cloned stack, but
+ you can override it when you create an instance. For more
+ information, see `Storage for the Root Device`_.
"""
params = {
@@ -583,6 +874,8 @@ class OpsWorksConnection(AWSQueryConnection):
'ServiceRoleArn': service_role_arn,
'DefaultInstanceProfileArn': default_instance_profile_arn,
}
+ if vpc_id is not None:
+ params['VpcId'] = vpc_id
if attributes is not None:
params['Attributes'] = attributes
if default_os is not None:
@@ -591,8 +884,12 @@ class OpsWorksConnection(AWSQueryConnection):
params['HostnameTheme'] = hostname_theme
if default_availability_zone is not None:
params['DefaultAvailabilityZone'] = default_availability_zone
+ if default_subnet_id is not None:
+ params['DefaultSubnetId'] = default_subnet_id
if custom_json is not None:
params['CustomJson'] = custom_json
+ if configuration_manager is not None:
+ params['ConfigurationManager'] = configuration_manager
if use_custom_cookbooks is not None:
params['UseCustomCookbooks'] = use_custom_cookbooks
if custom_cookbooks_source is not None:
@@ -607,7 +904,7 @@ class OpsWorksConnection(AWSQueryConnection):
def create_user_profile(self, iam_user_arn, ssh_username=None,
ssh_public_key=None):
"""
- Creates a new user.
+ Creates a new user profile.
:type iam_user_arn: string
:param iam_user_arn: The user's IAM ARN.
@@ -642,7 +939,9 @@ class OpsWorksConnection(AWSQueryConnection):
def delete_instance(self, instance_id, delete_elastic_ip=None,
delete_volumes=None):
"""
- Deletes a specified instance.
+ Deletes a specified instance. You must stop an instance before
+ you can delete it. For more information, see `Deleting
+ Instances`_.
:type instance_id: string
:param instance_id: The instance ID.
@@ -666,8 +965,9 @@ class OpsWorksConnection(AWSQueryConnection):
def delete_layer(self, layer_id):
"""
- Deletes a specified layer. You must first remove all
- associated instances.
+ Deletes a specified layer. You must first stop and then delete
+ all associated instances. For more information, see `How to
+ Delete a Layer`_.
:type layer_id: string
:param layer_id: The layer ID.
@@ -679,8 +979,9 @@ class OpsWorksConnection(AWSQueryConnection):
def delete_stack(self, stack_id):
"""
- Deletes a specified stack. You must first delete all instances
- and layers.
+ Deletes a specified stack. You must first delete all
+ instances, layers, and apps. For more information, see `Shut
+ Down a Stack`_.
:type stack_id: string
:param stack_id: The stack ID.
@@ -692,7 +993,7 @@ class OpsWorksConnection(AWSQueryConnection):
def delete_user_profile(self, iam_user_arn):
"""
- Deletes a user.
+ Deletes a user profile.
:type iam_user_arn: string
:param iam_user_arn: The user's IAM ARN.
@@ -702,16 +1003,48 @@ class OpsWorksConnection(AWSQueryConnection):
return self.make_request(action='DeleteUserProfile',
body=json.dumps(params))
+ def deregister_elastic_ip(self, elastic_ip):
+ """
+ Deregisters a specified Elastic IP address. The address can
+ then be registered by another stack. For more information, see
+ ``_.
+
+ :type elastic_ip: string
+ :param elastic_ip: The Elastic IP address.
+
+ """
+ params = {'ElasticIp': elastic_ip, }
+ return self.make_request(action='DeregisterElasticIp',
+ body=json.dumps(params))
+
+ def deregister_volume(self, volume_id):
+ """
+ Deregisters an Amazon EBS volume. The volume can then be
+ registered by another stack. For more information, see ``_.
+
+ :type volume_id: string
+ :param volume_id: The volume ID.
+
+ """
+ params = {'VolumeId': volume_id, }
+ return self.make_request(action='DeregisterVolume',
+ body=json.dumps(params))
+
def describe_apps(self, stack_id=None, app_ids=None):
"""
Requests a description of a specified set of apps.
+ You must specify at least one of the parameters.
+
:type stack_id: string
- :param stack_id:
- The app stack ID.
+ :param stack_id: The app stack ID. If you use this parameter,
+ `DescribeApps` returns a description of the apps in the specified
+ stack.
:type app_ids: list
- :param app_ids: An array of app IDs for the apps to be described.
+ :param app_ids: An array of app IDs for the apps to be described. If
+ you use this parameter, `DescribeApps` returns a description of the
+ specified apps. Otherwise, it returns a description of every app.
"""
params = {}
@@ -727,14 +1060,23 @@ class OpsWorksConnection(AWSQueryConnection):
"""
Describes the results of specified commands.
+ You must specify at least one of the parameters.
+
:type deployment_id: string
- :param deployment_id: The deployment ID.
+ :param deployment_id: The deployment ID. If you include this parameter,
+ `DescribeCommands` returns a description of the commands associated
+ with the specified deployment.
:type instance_id: string
- :param instance_id: The instance ID.
+ :param instance_id: The instance ID. If you include this parameter,
+ `DescribeCommands` returns a description of the commands associated
+ with the specified instance.
:type command_ids: list
- :param command_ids: An array of IDs for the commands to be described.
+ :param command_ids: An array of command IDs. If you include this
+ parameter, `DescribeCommands` returns a description of the
+ specified commands. Otherwise, it returns a description of every
+ command.
"""
params = {}
@@ -752,14 +1094,23 @@ class OpsWorksConnection(AWSQueryConnection):
"""
Requests a description of a specified set of deployments.
+ You must specify at least one of the parameters.
+
:type stack_id: string
- :param stack_id: The stack ID.
+ :param stack_id: The stack ID. If you include this parameter,
+ `DescribeDeployments` returns a description of the commands
+ associated with the specified stack.
:type app_id: string
- :param app_id: The app ID.
+ :param app_id: The app ID. If you include this parameter,
+ `DescribeDeployments` returns a description of the commands
+ associated with the specified app.
:type deployment_ids: list
- :param deployment_ids: An array of deployment IDs to be described.
+ :param deployment_ids: An array of deployment IDs to be described. If
+ you include this parameter, `DescribeDeployments` returns a
+ description of the specified deployments. Otherwise, it returns a
+ description of every deployment.
"""
params = {}
@@ -772,20 +1123,34 @@ class OpsWorksConnection(AWSQueryConnection):
return self.make_request(action='DescribeDeployments',
body=json.dumps(params))
- def describe_elastic_ips(self, instance_id=None, ips=None):
+ def describe_elastic_ips(self, instance_id=None, stack_id=None, ips=None):
"""
- Describes an instance's `Elastic IP addresses`_.
+ Describes `Elastic IP addresses`_.
+
+ You must specify at least one of the parameters.
:type instance_id: string
- :param instance_id: The instance ID.
+ :param instance_id: The instance ID. If you include this parameter,
+ `DescribeElasticIps` returns a description of the Elastic IP
+ addresses associated with the specified instance.
+
+ :type stack_id: string
+ :param stack_id: A stack ID. If you include this parameter,
+ `DescribeElasticIps` returns a description of the Elastic IP
+ addresses that are registered with the specified stack.
:type ips: list
- :param ips: An array of Elastic IP addresses to be described.
+ :param ips: An array of Elastic IP addresses to be described. If you
+ include this parameter, `DescribeElasticIps` returns a description
+ of the specified Elastic IP addresses. Otherwise, it returns a
+ description of every Elastic IP address.
"""
params = {}
if instance_id is not None:
params['InstanceId'] = instance_id
+ if stack_id is not None:
+ params['StackId'] = stack_id
if ips is not None:
params['Ips'] = ips
return self.make_request(action='DescribeElasticIps',
@@ -793,13 +1158,17 @@ class OpsWorksConnection(AWSQueryConnection):
def describe_elastic_load_balancers(self, stack_id=None, layer_ids=None):
"""
+ Describes a stack's Elastic Load Balancing instances.
+ You must specify at least one of the parameters.
:type stack_id: string
- :param stack_id:
+ :param stack_id: A stack ID. The action describes the stack's Elastic
+ Load Balancing instances.
:type layer_ids: list
- :param layer_ids:
+ :param layer_ids: A list of layer IDs. The action describes the Elastic
+ Load Balancing instances for the specified layers.
"""
params = {}
@@ -813,17 +1182,25 @@ class OpsWorksConnection(AWSQueryConnection):
def describe_instances(self, stack_id=None, layer_id=None,
instance_ids=None):
"""
- Requests a description of a set of instances associated with a
- specified ID or IDs.
+ Requests a description of a set of instances.
+
+ You must specify at least one of the parameters.
:type stack_id: string
- :param stack_id: A stack ID.
+ :param stack_id: A stack ID. If you use this parameter,
+ `DescribeInstances` returns descriptions of the instances
+ associated with the specified stack.
:type layer_id: string
- :param layer_id: A layer ID.
+ :param layer_id: A layer ID. If you use this parameter,
+ `DescribeInstances` returns descriptions of the instances
+ associated with the specified layer.
:type instance_ids: list
- :param instance_ids: An array of instance IDs to be described.
+ :param instance_ids: An array of instance IDs to be described. If you
+ use this parameter, `DescribeInstances` returns a description of
+ the specified instances. Otherwise, it returns a description of
+ every instance.
"""
params = {}
@@ -836,20 +1213,25 @@ class OpsWorksConnection(AWSQueryConnection):
return self.make_request(action='DescribeInstances',
body=json.dumps(params))
- def describe_layers(self, stack_id, layer_ids=None):
+ def describe_layers(self, stack_id=None, layer_ids=None):
"""
Requests a description of one or more layers in a specified
stack.
+ You must specify at least one of the parameters.
+
:type stack_id: string
:param stack_id: The stack ID.
:type layer_ids: list
:param layer_ids: An array of layer IDs that specify the layers to be
- described.
+ described. If you omit this parameter, `DescribeLayers` returns a
+ description of every layer in the specified stack.
"""
- params = {'StackId': stack_id, }
+ params = {}
+ if stack_id is not None:
+ params['StackId'] = stack_id
if layer_ids is not None:
params['LayerIds'] = layer_ids
return self.make_request(action='DescribeLayers',
@@ -860,6 +1242,8 @@ class OpsWorksConnection(AWSQueryConnection):
Describes load-based auto scaling configurations for specified
layers.
+ You must specify at least one of the parameters.
+
:type layer_ids: list
:param layer_ids: An array of layer IDs.
@@ -870,8 +1254,7 @@ class OpsWorksConnection(AWSQueryConnection):
def describe_permissions(self, iam_user_arn, stack_id):
"""
- Describes the permissions for a specified stack. You must
- specify at least one of the two request values.
+ Describes the permissions for a specified stack.
:type iam_user_arn: string
:param iam_user_arn: The user's IAM ARN. For more information about IAM
@@ -889,11 +1272,18 @@ class OpsWorksConnection(AWSQueryConnection):
"""
Describe an instance's RAID arrays.
+ You must specify at least one of the parameters.
+
:type instance_id: string
- :param instance_id: The instance ID.
+ :param instance_id: The instance ID. If you use this parameter,
+ `DescribeRaidArrays` returns descriptions of the RAID arrays
+ associated with the specified instance.
:type raid_array_ids: list
- :param raid_array_ids: An array of RAID array IDs to be described.
+ :param raid_array_ids: An array of RAID array IDs. If you use this
+ parameter, `DescribeRaidArrays` returns descriptions of the
+ specified arrays. Otherwise, it returns a description of every
+ array.
"""
params = {}
@@ -907,17 +1297,23 @@ class OpsWorksConnection(AWSQueryConnection):
def describe_service_errors(self, stack_id=None, instance_id=None,
service_error_ids=None):
"""
- Describes OpsWorks service errors.
+ Describes AWS OpsWorks service errors.
:type stack_id: string
- :param stack_id: The stack ID.
+ :param stack_id: The stack ID. If you use this parameter,
+ `DescribeServiceErrors` returns descriptions of the errors
+ associated with the specified stack.
:type instance_id: string
- :param instance_id: The instance ID.
+ :param instance_id: The instance ID. If you use this parameter,
+ `DescribeServiceErrors` returns descriptions of the errors
+ associated with the specified instance.
:type service_error_ids: list
- :param service_error_ids: An array of service error IDs to be
- described.
+ :param service_error_ids: An array of service error IDs. If you use
+ this parameter, `DescribeServiceErrors` returns descriptions of the
+ specified errors. Otherwise, it returns a description of every
+ error.
"""
params = {}
@@ -936,7 +1332,8 @@ class OpsWorksConnection(AWSQueryConnection):
:type stack_ids: list
:param stack_ids: An array of stack IDs that specify the stacks to be
- described.
+ described. If you omit this parameter, `DescribeStacks` returns a
+ description of every stack.
"""
params = {}
@@ -950,6 +1347,8 @@ class OpsWorksConnection(AWSQueryConnection):
Describes time-based auto scaling configurations for specified
instances.
+ You must specify at least one of the parameters.
+
:type instance_ids: list
:param instance_ids: An array of instance IDs.
@@ -971,24 +1370,38 @@ class OpsWorksConnection(AWSQueryConnection):
return self.make_request(action='DescribeUserProfiles',
body=json.dumps(params))
- def describe_volumes(self, instance_id=None, raid_array_id=None,
- volume_ids=None):
+ def describe_volumes(self, instance_id=None, stack_id=None,
+ raid_array_id=None, volume_ids=None):
"""
Describes an instance's Amazon EBS volumes.
+ You must specify at least one of the parameters.
+
:type instance_id: string
- :param instance_id: The instance ID.
+ :param instance_id: The instance ID. If you use this parameter,
+ `DescribeVolumes` returns descriptions of the volumes associated
+ with the specified instance.
+
+ :type stack_id: string
+ :param stack_id: A stack ID. The action describes the stack's
+ registered Amazon EBS volumes.
:type raid_array_id: string
- :param raid_array_id: The RAID array ID.
+ :param raid_array_id: The RAID array ID. If you use this parameter,
+ `DescribeVolumes` returns descriptions of the volumes associated
+ with the specified RAID array.
:type volume_ids: list
- :param volume_ids: Am array of volume IDs to be described.
+ :param volume_ids: Am array of volume IDs. If you use this parameter,
+ `DescribeVolumes` returns descriptions of the specified volumes.
+ Otherwise, it returns a description of every volume.
"""
params = {}
if instance_id is not None:
params['InstanceId'] = instance_id
+ if stack_id is not None:
+ params['StackId'] = stack_id
if raid_array_id is not None:
params['RaidArrayId'] = raid_array_id
if volume_ids is not None:
@@ -999,13 +1412,16 @@ class OpsWorksConnection(AWSQueryConnection):
def detach_elastic_load_balancer(self, elastic_load_balancer_name,
layer_id):
"""
-
+ Detaches a specified Elastic Load Balancing instance from its
+ layer.
:type elastic_load_balancer_name: string
- :param elastic_load_balancer_name:
+ :param elastic_load_balancer_name: The Elastic Load Balancing
+ instance's name.
:type layer_id: string
- :param layer_id:
+ :param layer_id: The ID of the layer that the Elastic Load Balancing
+ instance is attached to.
"""
params = {
@@ -1015,10 +1431,24 @@ class OpsWorksConnection(AWSQueryConnection):
return self.make_request(action='DetachElasticLoadBalancer',
body=json.dumps(params))
+ def disassociate_elastic_ip(self, elastic_ip):
+ """
+ Disassociates an Elastic IP address from its instance. The
+ address remains registered with the stack. For more
+ information, see ``_.
+
+ :type elastic_ip: string
+ :param elastic_ip: The Elastic IP address.
+
+ """
+ params = {'ElasticIp': elastic_ip, }
+ return self.make_request(action='DisassociateElasticIp',
+ body=json.dumps(params))
+
def get_hostname_suggestion(self, layer_id):
"""
- Gets a generated hostname for the specified layer, based on
- the current hostname theme.
+ Gets a generated host name for the specified layer, based on
+ the current host name theme.
:type layer_id: string
:param layer_id: The layer ID.
@@ -1030,7 +1460,8 @@ class OpsWorksConnection(AWSQueryConnection):
def reboot_instance(self, instance_id):
"""
- Reboots a specified instance.
+ Reboots a specified instance. For more information, see
+ `Starting, Stopping, and Rebooting Instances`_.
:type instance_id: string
:param instance_id: The instance ID.
@@ -1040,11 +1471,51 @@ class OpsWorksConnection(AWSQueryConnection):
return self.make_request(action='RebootInstance',
body=json.dumps(params))
+ def register_elastic_ip(self, elastic_ip, stack_id):
+ """
+ Registers an Elastic IP address with a specified stack. An
+ address can be registered with only one stack at a time. If
+ the address is already registered, you must first deregister
+ it by calling DeregisterElasticIp. For more information, see
+ ``_.
+
+ :type elastic_ip: string
+ :param elastic_ip: The Elastic IP address.
+
+ :type stack_id: string
+ :param stack_id: The stack ID.
+
+ """
+ params = {'ElasticIp': elastic_ip, 'StackId': stack_id, }
+ return self.make_request(action='RegisterElasticIp',
+ body=json.dumps(params))
+
+ def register_volume(self, stack_id, ec_2_volume_id=None):
+ """
+ Registers an Amazon EBS volume with a specified stack. A
+ volume can be registered with only one stack at a time. If the
+ volume is already registered, you must first deregister it by
+ calling DeregisterVolume. For more information, see ``_.
+
+ :type ec_2_volume_id: string
+ :param ec_2_volume_id: The Amazon EBS volume ID.
+
+ :type stack_id: string
+ :param stack_id: The stack ID.
+
+ """
+ params = {'StackId': stack_id, }
+ if ec_2_volume_id is not None:
+ params['Ec2VolumeId'] = ec_2_volume_id
+ return self.make_request(action='RegisterVolume',
+ body=json.dumps(params))
+
def set_load_based_auto_scaling(self, layer_id, enable=None,
up_scaling=None, down_scaling=None):
"""
Specify the load-based auto scaling configuration for a
- specified layer.
+ specified layer. For more information, see `Managing Load with
+ Time-based and Load-based Instances`_.
To use load-based auto scaling, you must create a set of load-
based auto scaling instances. Load-based auto scaling operates
@@ -1061,13 +1532,13 @@ class OpsWorksConnection(AWSQueryConnection):
:type up_scaling: dict
:param up_scaling: An `AutoScalingThresholds` object with the upscaling
threshold configuration. If the load exceeds these thresholds for a
- specified amount of time, OpsWorks starts a specified number of
+ specified amount of time, AWS OpsWorks starts a specified number of
instances.
:type down_scaling: dict
:param down_scaling: An `AutoScalingThresholds` object with the
downscaling threshold configuration. If the load falls below these
- thresholds for a specified amount of time, OpsWorks stops a
+ thresholds for a specified amount of time, AWS OpsWorks stops a
specified number of instances.
"""
@@ -1084,7 +1555,8 @@ class OpsWorksConnection(AWSQueryConnection):
def set_permission(self, stack_id, iam_user_arn, allow_ssh=None,
allow_sudo=None):
"""
- Specifies a stack's permissions.
+ Specifies a stack's permissions. For more information, see
+ `Security and Permissions`_.
:type stack_id: string
:param stack_id: The stack ID.
@@ -1113,7 +1585,8 @@ class OpsWorksConnection(AWSQueryConnection):
auto_scaling_schedule=None):
"""
Specify the time-based auto scaling configuration for a
- specified instance.
+ specified instance. For more information, see `Managing Load
+ with Time-based and Load-based Instances`_.
:type instance_id: string
:param instance_id: The instance ID.
@@ -1131,7 +1604,8 @@ class OpsWorksConnection(AWSQueryConnection):
def start_instance(self, instance_id):
"""
- Starts a specified instance.
+ Starts a specified instance. For more information, see
+ `Starting, Stopping, and Rebooting Instances`_.
:type instance_id: string
:param instance_id: The instance ID.
@@ -1158,7 +1632,8 @@ class OpsWorksConnection(AWSQueryConnection):
Stops a specified instance. When you stop a standard instance,
the data disappears and must be reinstalled when you restart
the instance. You can stop an Amazon EBS-backed instance
- without losing data.
+ without losing data. For more information, see `Starting,
+ Stopping, and Rebooting Instances`_.
:type instance_id: string
:param instance_id: The instance ID.
@@ -1180,6 +1655,19 @@ class OpsWorksConnection(AWSQueryConnection):
return self.make_request(action='StopStack',
body=json.dumps(params))
+ def unassign_volume(self, volume_id):
+ """
+ Unassigns an assigned Amazon EBS volume. The volume remains
+ registered with the stack. For more information, see ``_.
+
+ :type volume_id: string
+ :param volume_id: The volume ID.
+
+ """
+ params = {'VolumeId': volume_id, }
+ return self.make_request(action='UnassignVolume',
+ body=json.dumps(params))
+
def update_app(self, app_id, name=None, description=None, type=None,
app_source=None, domains=None, enable_ssl=None,
ssl_configuration=None, attributes=None):
@@ -1203,7 +1691,7 @@ class OpsWorksConnection(AWSQueryConnection):
:type domains: list
:param domains: The app's virtual host settings, with multiple domains
- separated by commas. For example: `'www.mysite.com, mysite.com'`
+ separated by commas. For example: `'www.example.com, example.com'`
:type enable_ssl: boolean
:param enable_ssl: Whether SSL is enabled for the app.
@@ -1237,10 +1725,29 @@ class OpsWorksConnection(AWSQueryConnection):
return self.make_request(action='UpdateApp',
body=json.dumps(params))
+ def update_elastic_ip(self, elastic_ip, name=None):
+ """
+ Updates a registered Elastic IP address's name. For more
+ information, see ``_.
+
+ :type elastic_ip: string
+ :param elastic_ip: The address.
+
+ :type name: string
+ :param name: The new name.
+
+ """
+ params = {'ElasticIp': elastic_ip, }
+ if name is not None:
+ params['Name'] = name
+ return self.make_request(action='UpdateElasticIp',
+ body=json.dumps(params))
+
def update_instance(self, instance_id, layer_ids=None,
instance_type=None, auto_scaling_type=None,
- hostname=None, os=None, ssh_key_name=None,
- architecture=None):
+ hostname=None, os=None, ami_id=None,
+ ssh_key_name=None, architecture=None,
+ install_updates_on_boot=None):
"""
Updates a specified instance.
@@ -1251,26 +1758,18 @@ class OpsWorksConnection(AWSQueryConnection):
:param layer_ids: The instance's layer IDs.
:type instance_type: string
- :param instance_type:
- The instance type, which can be one of the following:
-
-
- + m1.small
- + m1.medium
- + m1.large
- + m1.xlarge
- + c1.medium
- + c1.xlarge
- + m2.xlarge
- + m2.2xlarge
- + m2.4xlarge
+ :param instance_type: The instance type. AWS OpsWorks supports all
+ instance types except Cluster Compute, Cluster GPU, and High Memory
+ Cluster. For more information, see `Instance Families and Types`_.
+ The parameter values that you use to specify the various types are
+ in the API Name column of the Available Instance Types table.
:type auto_scaling_type: string
:param auto_scaling_type:
The instance's auto scaling type, which has three possible values:
- + **AlwaysRunning**: A 24x7 instance, which is not affected by auto
+ + **AlwaysRunning**: A 24/7 instance, which is not affected by auto
scaling.
+ **TimeBasedAutoScaling**: A time-based auto scaling instance, which
is started and stopped based on a specified schedule.
@@ -1281,13 +1780,46 @@ class OpsWorksConnection(AWSQueryConnection):
:param hostname: The instance host name.
:type os: string
- :param os: The instance operating system.
+ :param os: The instance operating system, which must be set to one of
+ the following.
+
+ + Standard operating systems: `Amazon Linux` or `Ubuntu 12.04 LTS`
+ + Custom AMIs: `Custom`
+
+
+ The default option is `Amazon Linux`. If you set this parameter to
+ `Custom`, you must use the CreateInstance action's AmiId parameter
+ to specify the custom AMI that you want to use. For more
+ information on the standard operating systems, see `Operating
+ Systems`_For more information on how to use custom AMIs with
+ OpsWorks, see `Using Custom AMIs`_.
+
+ :type ami_id: string
+ :param ami_id: A custom AMI ID to be used to create the instance. The
+ AMI should be based on one of the standard AWS OpsWorks APIs:
+ Amazon Linux or Ubuntu 12.04 LTS. For more information, see
+ `Instances`_
:type ssh_key_name: string
:param ssh_key_name: The instance SSH key name.
:type architecture: string
- :param architecture:
+ :param architecture: The instance architecture. Instance types do not
+ necessarily support both architectures. For a list of the
+ architectures that are supported by the different instance types,
+ see `Instance Families and Types`_.
+
+ :type install_updates_on_boot: boolean
+ :param install_updates_on_boot:
+ Whether to install operating system and package updates when the
+ instance boots. The default value is `True`. To control when
+ updates are installed, set this value to `False`. You must then
+ update your instances manually by using CreateDeployment to run the
+ `update_dependencies` stack command or manually running `yum`
+ (Amazon Linux) or `apt-get` (Ubuntu) on the instances.
+
+ We strongly recommend using the default value of `True`, to ensure that
+ your instances have the latest security updates.
"""
params = {'InstanceId': instance_id, }
@@ -1301,10 +1833,14 @@ class OpsWorksConnection(AWSQueryConnection):
params['Hostname'] = hostname
if os is not None:
params['Os'] = os
+ if ami_id is not None:
+ params['AmiId'] = ami_id
if ssh_key_name is not None:
params['SshKeyName'] = ssh_key_name
if architecture is not None:
params['Architecture'] = architecture
+ if install_updates_on_boot is not None:
+ params['InstallUpdatesOnBoot'] = install_updates_on_boot
return self.make_request(action='UpdateInstance',
body=json.dumps(params))
@@ -1312,7 +1848,9 @@ class OpsWorksConnection(AWSQueryConnection):
attributes=None, custom_instance_profile_arn=None,
custom_security_group_ids=None, packages=None,
volume_configurations=None, enable_auto_healing=None,
- auto_assign_elastic_ips=None, custom_recipes=None):
+ auto_assign_elastic_ips=None,
+ auto_assign_public_ips=None, custom_recipes=None,
+ install_updates_on_boot=None):
"""
Updates a specified layer.
@@ -1323,10 +1861,10 @@ class OpsWorksConnection(AWSQueryConnection):
:param name: The layer name, which is used by the console.
:type shortname: string
- :param shortname: The layer short name, which is used internally by
- OpsWorks, by Chef. The shortname is also used as the name for the
- directory where your app files are installed. It can have a maximum
- of 200 characters and must be in the following format:
+ :param shortname: The layer short name, which is used internally by AWS
+ OpsWorksand by Chef. The short name is also used as the name for
+ the directory where your app files are installed. It can have a
+ maximum of 200 characters and must be in the following format:
/\A[a-z0-9\-\_\.]+\Z/.
:type attributes: map
@@ -1356,12 +1894,30 @@ class OpsWorksConnection(AWSQueryConnection):
:type auto_assign_elastic_ips: boolean
:param auto_assign_elastic_ips: Whether to automatically assign an
- `Elastic IP address`_ to the layer.
+ `Elastic IP address`_ to the layer's instances. For more
+ information, see `How to Edit a Layer`_.
+
+ :type auto_assign_public_ips: boolean
+ :param auto_assign_public_ips: For stacks that are running in a VPC,
+ whether to automatically assign a public IP address to the layer's
+ instances. For more information, see `How to Edit a Layer`_.
:type custom_recipes: dict
:param custom_recipes: A `LayerCustomRecipes` object that specifies the
layer's custom recipes.
+ :type install_updates_on_boot: boolean
+ :param install_updates_on_boot:
+ Whether to install operating system and package updates when the
+ instance boots. The default value is `True`. To control when
+ updates are installed, set this value to `False`. You must then
+ update your instances manually by using CreateDeployment to run the
+ `update_dependencies` stack command or manually running `yum`
+ (Amazon Linux) or `apt-get` (Ubuntu) on the instances.
+
+ We strongly recommend using the default value of `True`, to ensure that
+ your instances have the latest security updates.
+
"""
params = {'LayerId': layer_id, }
if name is not None:
@@ -1382,8 +1938,12 @@ class OpsWorksConnection(AWSQueryConnection):
params['EnableAutoHealing'] = enable_auto_healing
if auto_assign_elastic_ips is not None:
params['AutoAssignElasticIps'] = auto_assign_elastic_ips
+ if auto_assign_public_ips is not None:
+ params['AutoAssignPublicIps'] = auto_assign_public_ips
if custom_recipes is not None:
params['CustomRecipes'] = custom_recipes
+ if install_updates_on_boot is not None:
+ params['InstallUpdatesOnBoot'] = install_updates_on_boot
return self.make_request(action='UpdateLayer',
body=json.dumps(params))
@@ -1391,7 +1951,8 @@ class OpsWorksConnection(AWSQueryConnection):
service_role_arn=None,
default_instance_profile_arn=None, default_os=None,
hostname_theme=None, default_availability_zone=None,
- custom_json=None, use_custom_cookbooks=None,
+ default_subnet_id=None, custom_json=None,
+ configuration_manager=None, use_custom_cookbooks=None,
custom_cookbooks_source=None, default_ssh_key_name=None,
default_root_device_type=None):
"""
@@ -1408,11 +1969,17 @@ class OpsWorksConnection(AWSQueryConnection):
to the stack attributes bag.
:type service_role_arn: string
- :param service_role_arn: The stack AWS Identity and Access Management
- (IAM) role, which allows OpsWorks to work with AWS resources on
- your behalf. You must set this parameter to the Amazon Resource
- Name (ARN) for an existing IAM role. For more information about IAM
- ARNs, see `Using Identifiers`_.
+ :param service_role_arn:
+ The stack AWS Identity and Access Management (IAM) role, which allows
+ AWS OpsWorks to work with AWS resources on your behalf. You must
+ set this parameter to the Amazon Resource Name (ARN) for an
+ existing IAM role. For more information about IAM ARNs, see `Using
+ Identifiers`_.
+
+ You must set this parameter to a valid service role ARN or the action
+ will fail; there is no default value. You can specify the stack's
+ current service role ARN, if you prefer, but you must do so
+ explicitly.
:type default_instance_profile_arn: string
:param default_instance_profile_arn: The ARN of an IAM profile that is
@@ -1420,15 +1987,16 @@ class OpsWorksConnection(AWSQueryConnection):
information about IAM ARNs, see `Using Identifiers`_.
:type default_os: string
- :param default_os: The cloned stack default operating system, which
- must be either "Amazon Linux" or "Ubuntu 12.04 LTS".
+ :param default_os: The stack's default operating system, which must be
+ set to `Amazon Linux` or `Ubuntu 12.04 LTS`. The default option is
+ `Amazon Linux`.
:type hostname_theme: string
:param hostname_theme: The stack's new host name theme, with spaces are
- replaced by underscores. The theme is used to generate hostnames
+ replaced by underscores. The theme is used to generate host names
for the stack's instances. By default, `HostnameTheme` is set to
- Layer_Dependent, which creates hostnames by appending integers to
- the layer's shortname. The other themes are:
+ Layer_Dependent, which creates host names by appending integers to
+ the layer's short name. The other themes are:
+ Baked_Goods
+ Clouds
@@ -1443,33 +2011,56 @@ class OpsWorksConnection(AWSQueryConnection):
+ Wild_Cats
- To obtain a generated hostname, call `GetHostNameSuggestion`, which
- returns a hostname based on the current theme.
+ To obtain a generated host name, call `GetHostNameSuggestion`, which
+ returns a host name based on the current theme.
:type default_availability_zone: string
- :param default_availability_zone: The stack new default Availability
- Zone. For more information, see `Regions and Endpoints`_.
+ :param default_availability_zone: The stack's default Availability
+ Zone, which must be in the specified region. For more information,
+ see `Regions and Endpoints`_. If you also specify a value for
+ `DefaultSubnetId`, the subnet must be in the same zone. For more
+ information, see CreateStack.
+
+ :type default_subnet_id: string
+ :param default_subnet_id: The stack's default subnet ID. All instances
+ will be launched into this subnet unless you specify otherwise when
+ you create the instance. If you also specify a value for
+ `DefaultAvailabilityZone`, the subnet must be in that zone. For
+ more information, see CreateStack.
:type custom_json: string
- :param custom_json:
- A string that contains user-defined, custom JSON. It is used to
- override the corresponding default stack configuration JSON values.
- The string should be in the following format and must escape
- characters such as '"'.:
- `"{\"key1\": \"value1\", \"key2\": \"value2\",...}"`
+ :param custom_json: A string that contains user-defined, custom JSON.
+ It is used to override the corresponding default stack
+ configuration JSON values. The string should be in the following
+ format and must escape characters such as '"'.: `"{\"key1\":
+ \"value1\", \"key2\": \"value2\",...}"`
+ For more information on custom JSON, see `Use Custom JSON to Modify the
+ Stack Configuration JSON`_.
+
+ :type configuration_manager: dict
+ :param configuration_manager: The configuration manager. When you
+ update a stack you can optionally use the configuration manager to
+ specify the Chef version, 0.9 or 11.4. If you omit this parameter,
+ AWS OpsWorks does not change the Chef version.
:type use_custom_cookbooks: boolean
:param use_custom_cookbooks: Whether the stack uses custom cookbooks.
:type custom_cookbooks_source: dict
- :param custom_cookbooks_source:
+ :param custom_cookbooks_source: Contains the information required to
+ retrieve an app or cookbook from a repository. For more
+ information, see `Creating Apps`_ or `Custom Recipes and
+ Cookbooks`_.
:type default_ssh_key_name: string
:param default_ssh_key_name: A default SSH key for the stack instances.
You can override this value when you create or update an instance.
:type default_root_device_type: string
- :param default_root_device_type:
+ :param default_root_device_type: The default root device type. This
+ value is used by default for all instances in the cloned stack, but
+ you can override it when you create an instance. For more
+ information, see `Storage for the Root Device`_.
"""
params = {'StackId': stack_id, }
@@ -1487,8 +2078,12 @@ class OpsWorksConnection(AWSQueryConnection):
params['HostnameTheme'] = hostname_theme
if default_availability_zone is not None:
params['DefaultAvailabilityZone'] = default_availability_zone
+ if default_subnet_id is not None:
+ params['DefaultSubnetId'] = default_subnet_id
if custom_json is not None:
params['CustomJson'] = custom_json
+ if configuration_manager is not None:
+ params['ConfigurationManager'] = configuration_manager
if use_custom_cookbooks is not None:
params['UseCustomCookbooks'] = use_custom_cookbooks
if custom_cookbooks_source is not None:
@@ -1503,7 +2098,7 @@ class OpsWorksConnection(AWSQueryConnection):
def update_user_profile(self, iam_user_arn, ssh_username=None,
ssh_public_key=None):
"""
- Updates a specified user's SSH name and public key.
+ Updates a specified user profile.
:type iam_user_arn: string
:param iam_user_arn: The user IAM ARN.
@@ -1523,6 +2118,29 @@ class OpsWorksConnection(AWSQueryConnection):
return self.make_request(action='UpdateUserProfile',
body=json.dumps(params))
+ def update_volume(self, volume_id, name=None, mount_point=None):
+ """
+ Updates an Amazon EBS volume's name or mount point. For more
+ information, see ``_.
+
+ :type volume_id: string
+ :param volume_id: The volume ID.
+
+ :type name: string
+ :param name: The new name.
+
+ :type mount_point: string
+ :param mount_point: The new mount point.
+
+ """
+ params = {'VolumeId': volume_id, }
+ if name is not None:
+ params['Name'] = name
+ if mount_point is not None:
+ params['MountPoint'] = mount_point
+ return self.make_request(action='UpdateVolume',
+ body=json.dumps(params))
+
def make_request(self, action, body):
headers = {
'X-Amz-Target': '%s.%s' % (self.TargetPrefix, action),
diff --git a/boto/provider.py b/boto/provider.py
index 457a87e7..e27247cd 100644
--- a/boto/provider.py
+++ b/boto/provider.py
@@ -168,6 +168,7 @@ class Provider(object):
security_token=None):
self.host = None
self.port = None
+ self.host_header = None
self.access_key = access_key
self.secret_key = secret_key
self.security_token = security_token
@@ -185,6 +186,9 @@ class Provider(object):
port_opt_name = '%s_port' % self.HostKeyMap[self.name]
if config.has_option('Credentials', port_opt_name):
self.port = config.getint('Credentials', port_opt_name)
+ host_header_opt_name = '%s_host_header' % self.HostKeyMap[self.name]
+ if config.has_option('Credentials', host_header_opt_name):
+ self.host_header = config.get('Credentials', host_header_opt_name)
def get_access_key(self):
if self._credentials_need_refresh():
diff --git a/boto/pyami/installers/ubuntu/ebs.py b/boto/pyami/installers/ubuntu/ebs.py
index a52549b0..3e5b5c28 100644
--- a/boto/pyami/installers/ubuntu/ebs.py
+++ b/boto/pyami/installers/ubuntu/ebs.py
@@ -122,7 +122,7 @@ class EBSInstaller(Installer):
while volume.update() != 'available':
boto.log.info('Volume %s not yet available. Current status = %s.' % (volume.id, volume.status))
time.sleep(5)
- instance = ec2.get_all_instances([self.instance_id])[0].instances[0]
+ instance = ec2.get_only_instances([self.instance_id])[0]
attempt_attach = True
while attempt_attach:
try:
diff --git a/boto/rds/__init__.py b/boto/rds/__init__.py
index ec3305cf..751c5d51 100644
--- a/boto/rds/__init__.py
+++ b/boto/rds/__init__.py
@@ -29,7 +29,8 @@ from boto.rds.parametergroup import ParameterGroup
from boto.rds.dbsnapshot import DBSnapshot
from boto.rds.event import Event
from boto.rds.regioninfo import RDSRegionInfo
-
+from boto.rds.dbsubnetgroup import DBSubnetGroup
+from boto.rds.vpcsecuritygroupmembership import VPCSecurityGroupMembership
def regions():
"""
@@ -40,6 +41,8 @@ def regions():
"""
return [RDSRegionInfo(name='us-east-1',
endpoint='rds.amazonaws.com'),
+ RDSRegionInfo(name='us-gov-west-1',
+ endpoint='rds.us-gov-west-1.amazonaws.com'),
RDSRegionInfo(name='eu-west-1',
endpoint='rds.eu-west-1.amazonaws.com'),
RDSRegionInfo(name='us-west-1',
@@ -164,6 +167,7 @@ class RDSConnection(AWSQueryConnection):
license_model = None,
option_group_name = None,
iops=None,
+ vpc_security_groups=None,
):
# API version: 2012-09-17
# Parameter notes:
@@ -276,10 +280,10 @@ class RDSConnection(AWSQueryConnection):
* SQL Server:
Not applicable and must be None.
- :type param_group: str
- :param param_group: Name of DBParameterGroup to associate with
- this DBInstance. If no groups are specified
- no parameter groups will be used.
+ :type param_group: str or ParameterGroup object
+ :param param_group: Name of DBParameterGroup or ParameterGroup instance
+ to associate with this DBInstance. If no groups are
+ specified no parameter groups will be used.
:type security_groups: list of str or list of DBSecurityGroup objects
:param security_groups: List of names of DBSecurityGroup to
@@ -362,6 +366,10 @@ class RDSConnection(AWSQueryConnection):
If you specify a value, it must be at least 1000 IOPS and you must
allocate 100 GB of storage.
+ :type vpc_security_groups: list of str or a VPCSecurityGroupMembership object
+ :param vpc_security_groups: List of VPC security group ids or a list of
+ VPCSecurityGroupMembership objects this DBInstance should be a member of
+
:rtype: :class:`boto.rds.dbinstance.DBInstance`
:return: The new db instance.
"""
@@ -389,6 +397,7 @@ class RDSConnection(AWSQueryConnection):
# port => Port
# preferred_backup_window => PreferredBackupWindow
# preferred_maintenance_window => PreferredMaintenanceWindow
+ # vpc_security_groups => VpcSecurityGroupIds.member.N
params = {
'AllocatedStorage': allocated_storage,
'AutoMinorVersionUpgrade': str(auto_minor_version_upgrade).lower() if auto_minor_version_upgrade else None,
@@ -398,7 +407,9 @@ class RDSConnection(AWSQueryConnection):
'DBInstanceClass': instance_class,
'DBInstanceIdentifier': id,
'DBName': db_name,
- 'DBParameterGroupName': param_group,
+ 'DBParameterGroupName': (param_group.name
+ if isinstance(param_group, ParameterGroup)
+ else param_group),
'DBSubnetGroupName': db_subnet_group_name,
'Engine': engine,
'EngineVersion': engine_version,
@@ -421,6 +432,15 @@ class RDSConnection(AWSQueryConnection):
l.append(group)
self.build_list_params(params, l, 'DBSecurityGroups.member')
+ if vpc_security_groups:
+ l = []
+ for vpc_grp in vpc_security_groups:
+ if isinstance(vpc_grp, VPCSecurityGroupMembership):
+ l.append(vpc_grp.vpc_group)
+ else:
+ l.append(vpc_grp)
+ self.build_list_params(params, l, 'VpcSecurityGroupIds.member')
+
# Remove any params set to None
for k, v in params.items():
if not v: del(params[k])
@@ -504,17 +524,19 @@ class RDSConnection(AWSQueryConnection):
preferred_backup_window=None,
multi_az=False,
apply_immediately=False,
- iops=None):
+ iops=None,
+ vpc_security_groups=None,
+ ):
"""
Modify an existing DBInstance.
:type id: str
:param id: Unique identifier for the new instance.
- :type param_group: str
- :param param_group: Name of DBParameterGroup to associate with
- this DBInstance. If no groups are specified
- no parameter groups will be used.
+ :type param_group: str or ParameterGroup object
+ :param param_group: Name of DBParameterGroup or ParameterGroup instance
+ to associate with this DBInstance. If no groups are
+ specified no parameter groups will be used.
:type security_groups: list of str or list of DBSecurityGroup objects
:param security_groups: List of names of DBSecurityGroup to authorize on
@@ -580,12 +602,18 @@ class RDSConnection(AWSQueryConnection):
If you specify a value, it must be at least 1000 IOPS and you must
allocate 100 GB of storage.
+ :type vpc_security_groups: list of str or a VPCSecurityGroupMembership object
+ :param vpc_security_groups: List of VPC security group ids or a
+ VPCSecurityGroupMembership object this DBInstance should be a member of
+
:rtype: :class:`boto.rds.dbinstance.DBInstance`
:return: The modified db instance.
"""
params = {'DBInstanceIdentifier': id}
if param_group:
- params['DBParameterGroupName'] = param_group
+ params['DBParameterGroupName'] = (param_group.name
+ if isinstance(param_group, ParameterGroup)
+ else param_group)
if security_groups:
l = []
for group in security_groups:
@@ -594,6 +622,14 @@ class RDSConnection(AWSQueryConnection):
else:
l.append(group)
self.build_list_params(params, l, 'DBSecurityGroups.member')
+ if vpc_security_groups:
+ l = []
+ for vpc_grp in vpc_security_groups:
+ if isinstance(vpc_grp, VPCSecurityGroupMembership):
+ l.append(vpc_grp.vpc_group)
+ else:
+ l.append(vpc_grp)
+ self.build_list_params(params, l, 'VpcSecurityGroupIds.member')
if preferred_maintenance_window:
params['PreferredMaintenanceWindow'] = preferred_maintenance_window
if master_password:
@@ -738,10 +774,10 @@ class RDSConnection(AWSQueryConnection):
:param engine: Name of database engine.
:type description: string
- :param description: The description of the new security group
+ :param description: The description of the new dbparameter group
- :rtype: :class:`boto.rds.dbsecuritygroup.DBSecurityGroup`
- :return: The newly created DBSecurityGroup
+ :rtype: :class:`boto.rds.parametergroup.ParameterGroup`
+ :return: The newly created ParameterGroup
"""
params = {'DBParameterGroupName': name,
'DBParameterGroupFamily': engine,
@@ -750,10 +786,10 @@ class RDSConnection(AWSQueryConnection):
def modify_parameter_group(self, name, parameters=None):
"""
- Modify a parameter group for your account.
+ Modify a ParameterGroup for your account.
:type name: string
- :param name: The name of the new parameter group
+ :param name: The name of the new ParameterGroup
:type parameters: list of :class:`boto.rds.parametergroup.Parameter`
:param parameters: The new parameters
@@ -793,10 +829,10 @@ class RDSConnection(AWSQueryConnection):
def delete_parameter_group(self, name):
"""
- Delete a DBSecurityGroup from your account.
+ Delete a ParameterGroup from your account.
:type key_name: string
- :param key_name: The name of the DBSecurityGroup to delete
+ :param key_name: The name of the ParameterGroup to delete
"""
params = {'DBParameterGroupName': name}
return self.get_status('DeleteDBParameterGroup', params)
@@ -1089,7 +1125,8 @@ class RDSConnection(AWSQueryConnection):
restore_time=None,
dbinstance_class=None,
port=None,
- availability_zone=None):
+ availability_zone=None,
+ db_subnet_group_name=None):
"""
Create a new DBInstance from a point in time.
@@ -1122,6 +1159,11 @@ class RDSConnection(AWSQueryConnection):
:param availability_zone: Name of the availability zone to place
DBInstance into.
+ :type db_subnet_group_name: str
+ :param db_subnet_group_name: A DB Subnet Group to associate with this DB Instance.
+ If there is no DB Subnet Group, then it is a non-VPC DB
+ instance.
+
:rtype: :class:`boto.rds.dbinstance.DBInstance`
:return: The newly created DBInstance
"""
@@ -1137,6 +1179,8 @@ class RDSConnection(AWSQueryConnection):
params['Port'] = port
if availability_zone:
params['AvailabilityZone'] = availability_zone
+ if db_subnet_group_name is not None:
+ params['DBSubnetGroupName'] = db_subnet_group_name
return self.get_object('RestoreDBInstanceToPointInTime',
params, DBInstance)
@@ -1199,6 +1243,98 @@ class RDSConnection(AWSQueryConnection):
params['Marker'] = marker
return self.get_list('DescribeEvents', params, [('Event', Event)])
+ def create_db_subnet_group(self, name, desc, subnet_ids):
+ """
+ Create a new Database Subnet Group.
+
+ :type name: string
+ :param name: The identifier for the db_subnet_group
+
+ :type desc: string
+ :param desc: A description of the db_subnet_group
+
+ :type subnet_ids: list
+ :param subnets: A list of the subnet identifiers to include in the
+ db_subnet_group
+
+ :rtype: :class:`boto.rds.dbsubnetgroup.DBSubnetGroup
+ :return: the created db_subnet_group
+ """
+
+ params = {'DBSubnetGroupName': name,
+ 'DBSubnetGroupDescription': desc}
+ self.build_list_params(params, subnet_ids, 'SubnetIds.member')
+
+ return self.get_object('CreateDBSubnetGroup', params, DBSubnetGroup)
+
+ def delete_db_subnet_group(self, name):
+ """
+ Delete a Database Subnet Group.
+
+ :type name: string
+ :param name: The identifier of the db_subnet_group to delete
+
+ :rtype: :class:`boto.rds.dbsubnetgroup.DBSubnetGroup`
+ :return: The deleted db_subnet_group.
+ """
+
+ params = {'DBSubnetGroupName': name}
+
+ return self.get_object('DeleteDBSubnetGroup', params, DBSubnetGroup)
+
+
+ def get_all_db_subnet_groups(self, name=None, max_records=None, marker=None):
+ """
+ Retrieve all the DBSubnetGroups in your account.
+
+ :type name: str
+ :param name: DBSubnetGroup name If supplied, only information about
+ this DBSubnetGroup will be returned. Otherwise, info
+ about all DBSubnetGroups will be returned.
+
+ :type max_records: int
+ :param max_records: The maximum number of records to be returned.
+ If more results are available, a Token will be
+ returned in the response that can be used to
+ retrieve additional records. Default is 100.
+
+ :type marker: str
+ :param marker: The marker provided by a previous request.
+
+ :rtype: list
+ :return: A list of :class:`boto.rds.dbsubnetgroup.DBSubnetGroup`
+ """
+ params = dict()
+ if name != None:
+ params['DBSubnetGroupName'] = name
+ if max_records != None:
+ params['MaxRecords'] = max_records
+ if marker != None:
+ params['Marker'] = marker
+
+ return self.get_list('DescribeDBSubnetGroups', params, [('DBSubnetGroup',DBSubnetGroup)])
+
+ def modify_db_subnet_group(self, name, description=None, subnet_ids=None):
+ """
+ Modify a parameter group for your account.
+
+ :type name: string
+ :param name: The name of the new parameter group
+
+ :type parameters: list of :class:`boto.rds.parametergroup.Parameter`
+ :param parameters: The new parameters
+
+ :rtype: :class:`boto.rds.parametergroup.ParameterGroup`
+ :return: The newly created ParameterGroup
+ """
+ params = {'DBSubnetGroupName': name}
+ if description != None:
+ params['DBSubnetGroupDescription'] = description
+ if subnet_ids != None:
+ self.build_list_params(params, subnet_ids, 'SubnetIds.member')
+
+ return self.get_object('ModifyDBSubnetGroup', params, DBSubnetGroup)
+
def create_option_group(self, name, engine_name, major_engine_version,
description=None):
"""
@@ -1331,4 +1467,4 @@ class RDSConnection(AWSQueryConnection):
params['Marker'] = marker
return self.get_list('DescribeOptionGroupOptions', params, [
('OptionGroupOptions', OptionGroupOption)
- ]) \ No newline at end of file
+ ])
diff --git a/boto/rds/dbinstance.py b/boto/rds/dbinstance.py
index e6b51b76..043052ea 100644
--- a/boto/rds/dbinstance.py
+++ b/boto/rds/dbinstance.py
@@ -22,6 +22,7 @@
from boto.rds.dbsecuritygroup import DBSecurityGroup
from boto.rds.parametergroup import ParameterGroup
from boto.rds.statusinfo import StatusInfo
+from boto.rds.vpcsecuritygroupmembership import VPCSecurityGroupMembership
from boto.resultset import ResultSet
@@ -65,6 +66,9 @@ class DBInstance(object):
Multi-AZ deployment.
:ivar iops: The current number of provisioned IOPS for the DB Instance.
Can be None if this is a standard instance.
+ :ivar vpc_security_groups: List of VPC Security Group Membership elements
+ containing only VpcSecurityGroupMembership.VpcSecurityGroupId and
+ VpcSecurityGroupMembership.Status subelements.
:ivar pending_modified_values: Specifies that changes to the
DB Instance are pending. This element is only included when changes
are pending. Specific changes are identified by subelements.
@@ -94,6 +98,7 @@ class DBInstance(object):
self.latest_restorable_time = None
self.multi_az = False
self.iops = None
+ self.vpc_security_groups = None
self.pending_modified_values = None
self._in_endpoint = False
self._port = None
@@ -114,6 +119,10 @@ class DBInstance(object):
self.security_groups = ResultSet([('DBSecurityGroup',
DBSecurityGroup)])
return self.security_groups
+ elif name == 'VpcSecurityGroups':
+ self.vpc_security_groups = ResultSet([('VpcSecurityGroupMembership',
+ VPCSecurityGroupMembership)])
+ return self.vpc_security_groups
elif name == 'PendingModifiedValues':
self.pending_modified_values = PendingModifiedValues()
return self.pending_modified_values
@@ -264,6 +273,7 @@ class DBInstance(object):
preferred_backup_window=None,
multi_az=False,
iops=None,
+ vpc_security_groups=None,
apply_immediately=False):
"""
Modify this DBInstance.
@@ -335,6 +345,10 @@ class DBInstance(object):
If you specify a value, it must be at least 1000 IOPS and
you must allocate 100 GB of storage.
+ :type vpc_security_groups: list
+ :param vpc_security_groups: List of VPCSecurityGroupMembership
+ that this DBInstance is a memberof.
+
:rtype: :class:`boto.rds.dbinstance.DBInstance`
:return: The modified db instance.
"""
@@ -349,7 +363,8 @@ class DBInstance(object):
preferred_backup_window,
multi_az,
apply_immediately,
- iops)
+ iops,
+ vpc_security_groups)
class PendingModifiedValues(dict):
diff --git a/boto/rds/dbsubnetgroup.py b/boto/rds/dbsubnetgroup.py
new file mode 100644
index 00000000..4b9fc580
--- /dev/null
+++ b/boto/rds/dbsubnetgroup.py
@@ -0,0 +1,69 @@
+# Copyright (c) 2013 Franc Carter - franc.carter@gmail.com
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+"""
+Represents an DBSubnetGroup
+"""
+
+class DBSubnetGroup(object):
+ """
+ Represents an RDS database subnet group
+
+ Properties reference available from the AWS documentation at http://docs.amazonwebservices.com/AmazonRDS/latest/APIReference/API_DeleteDBSubnetGroup.html
+
+ :ivar status: The current status of the subnet group. Possibile values are [ active, ? ]. Reference documentation lacks specifics of possibilities
+ :ivar connection: boto.rds.RDSConnection associated with the current object
+ :ivar description: The description of the subnet group
+ :ivar subnet_ids: List of subnet identifiers in the group
+ :ivar name: Name of the subnet group
+ :ivar vpc_id: The ID of the VPC the subnets are inside
+ """
+ def __init__(self, connection=None, name=None, description=None, subnet_ids=None):
+ self.connection = connection
+ self.name = name
+ self.description = description
+ if subnet_ids != None:
+ self.subnet_ids = subnet_ids
+ else:
+ self.subnet_ids = []
+ self.vpc_id = None
+ self.status = None
+
+ def __repr__(self):
+ return 'DBSubnetGroup:%s' % self.name
+
+ def startElement(self, name, attrs, connection):
+ pass
+
+ def endElement(self, name, value, connection):
+ if name == 'SubnetIdentifier':
+ self.subnet_ids.append(value)
+ elif name == 'DBSubnetGroupName':
+ self.name = value
+ elif name == 'DBSubnetGroupDescription':
+ self.description = value
+ elif name == 'VpcId':
+ self.vpc_id = value
+ elif name == 'SubnetGroupStatus':
+ self.status = value
+ else:
+ setattr(self, name, value)
+
diff --git a/boto/rds/vpcsecuritygroupmembership.py b/boto/rds/vpcsecuritygroupmembership.py
new file mode 100644
index 00000000..e0092e9c
--- /dev/null
+++ b/boto/rds/vpcsecuritygroupmembership.py
@@ -0,0 +1,85 @@
+# Copyright (c) 2013 Anthony Tonns http://www.corsis.com/
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+"""
+Represents a VPCSecurityGroupMembership
+"""
+
+
+class VPCSecurityGroupMembership(object):
+ """
+ Represents VPC Security Group that this RDS database is a member of
+
+ Properties reference available from the AWS documentation at
+ http://docs.aws.amazon.com/AmazonRDS/latest/APIReference/\
+ API_VpcSecurityGroupMembership.html
+
+ Example::
+ pri = "sg-abcdefgh"
+ sec = "sg-hgfedcba"
+
+ # Create with list of str
+ db = c.create_dbinstance(... vpc_security_groups=[pri], ... )
+
+ # Modify with list of str
+ db.modify(... vpc_security_groups=[pri,sec], ... )
+
+ # Create with objects
+ memberships = []
+ membership = VPCSecurityGroupMembership()
+ membership.vpc_group = pri
+ memberships.append(membership)
+
+ db = c.create_dbinstance(... vpc_security_groups=memberships, ... )
+
+ # Modify with objects
+ memberships = d.vpc_security_groups
+ membership = VPCSecurityGroupMembership()
+ membership.vpc_group = sec
+ memberships.append(membership)
+
+ db.modify(... vpc_security_groups=memberships, ... )
+
+ :ivar connection: :py:class:`boto.rds.RDSConnection` associated with the
+ current object
+ :ivar vpc_group: This id of the VPC security group
+ :ivar status: Status of the VPC security group membership
+ <boto.ec2.securitygroup.SecurityGroup>` objects that this RDS Instance
+ is a member of
+ """
+ def __init__(self, connection=None, status=None, vpc_group=None):
+ self.connection = connection
+ self.status = status
+ self.vpc_group = vpc_group
+
+ def __repr__(self):
+ return 'VPCSecurityGroupMembership:%s' % self.vpc_group
+
+ def startElement(self, name, attrs, connection):
+ pass
+
+ def endElement(self, name, value, connection):
+ if name == 'VpcSecurityGroupId':
+ self.vpc_group = value
+ elif name == 'Status':
+ self.status = value
+ else:
+ setattr(self, name, value)
diff --git a/boto/redshift/exceptions.py b/boto/redshift/exceptions.py
index 92779d08..b4f60dd8 100644
--- a/boto/redshift/exceptions.py
+++ b/boto/redshift/exceptions.py
@@ -180,3 +180,11 @@ class SubnetAlreadyInUse(JSONResponseError):
class InvalidParameterCombinationFault(JSONResponseError):
pass
+
+
+class AccessToSnapshotDeniedFault(JSONResponseError):
+ pass
+
+
+class UnauthorizedOperationFault(JSONResponseError):
+ pass
diff --git a/boto/redshift/layer1.py b/boto/redshift/layer1.py
index f57ec0a6..6ba3fd3d 100644
--- a/boto/redshift/layer1.py
+++ b/boto/redshift/layer1.py
@@ -89,44 +89,47 @@ class RedshiftConnection(AWSQueryConnection):
_faults = {
"ClusterNotFound": exceptions.ClusterNotFoundFault,
- "InvalidClusterSnapshotState": exceptions.InvalidClusterSnapshotStateFault,
- "ClusterSnapshotNotFound": exceptions.ClusterSnapshotNotFoundFault,
- "ClusterSecurityGroupQuotaExceeded": exceptions.ClusterSecurityGroupQuotaExceededFault,
- "ReservedNodeOfferingNotFound": exceptions.ReservedNodeOfferingNotFoundFault,
- "InvalidSubnet": exceptions.InvalidSubnet,
- "ClusterSubnetGroupQuotaExceeded": exceptions.ClusterSubnetGroupQuotaExceededFault,
- "InvalidClusterState": exceptions.InvalidClusterStateFault,
+ "InvalidClusterSubnetState": exceptions.InvalidClusterSubnetStateFault,
"InvalidClusterParameterGroupState": exceptions.InvalidClusterParameterGroupStateFault,
- "ClusterParameterGroupAlreadyExists": exceptions.ClusterParameterGroupAlreadyExistsFault,
- "InvalidClusterSecurityGroupState": exceptions.InvalidClusterSecurityGroupStateFault,
+ "ReservedNodeQuotaExceeded": exceptions.ReservedNodeQuotaExceededFault,
+ "InvalidClusterState": exceptions.InvalidClusterStateFault,
"InvalidRestore": exceptions.InvalidRestoreFault,
- "AuthorizationNotFound": exceptions.AuthorizationNotFoundFault,
- "ResizeNotFound": exceptions.ResizeNotFoundFault,
+ "ClusterSecurityGroupAlreadyExists": exceptions.ClusterSecurityGroupAlreadyExistsFault,
"NumberOfNodesQuotaExceeded": exceptions.NumberOfNodesQuotaExceededFault,
- "ClusterSnapshotAlreadyExists": exceptions.ClusterSnapshotAlreadyExistsFault,
+ "ReservedNodeOfferingNotFound": exceptions.ReservedNodeOfferingNotFoundFault,
+ "InsufficientClusterCapacity": exceptions.InsufficientClusterCapacityFault,
+ "UnauthorizedOperation": exceptions.UnauthorizedOperationFault,
+ "ClusterQuotaExceeded": exceptions.ClusterQuotaExceededFault,
+ "InvalidVPCNetworkState": exceptions.InvalidVPCNetworkStateFault,
+ "ClusterSnapshotNotFound": exceptions.ClusterSnapshotNotFoundFault,
"AuthorizationQuotaExceeded": exceptions.AuthorizationQuotaExceededFault,
- "AuthorizationAlreadyExists": exceptions.AuthorizationAlreadyExistsFault,
+ "InvalidSubne": exceptions.InvalidSubnet,
+ "ResizeNotFound": exceptions.ResizeNotFoundFault,
+ "ClusterSubnetGroupNotFound": exceptions.ClusterSubnetGroupNotFoundFault,
"ClusterSnapshotQuotaExceeded": exceptions.ClusterSnapshotQuotaExceededFault,
- "ReservedNodeNotFound": exceptions.ReservedNodeNotFoundFault,
- "ReservedNodeAlreadyExists": exceptions.ReservedNodeAlreadyExistsFault,
- "ClusterSecurityGroupAlreadyExists": exceptions.ClusterSecurityGroupAlreadyExistsFault,
- "ClusterParameterGroupNotFound": exceptions.ClusterParameterGroupNotFoundFault,
- "ReservedNodeQuotaExceeded": exceptions.ReservedNodeQuotaExceededFault,
- "ClusterQuotaExceeded": exceptions.ClusterQuotaExceededFault,
+ "AccessToSnapshotDenied": exceptions.AccessToSnapshotDeniedFault,
+ "InvalidClusterSecurityGroupState": exceptions.InvalidClusterSecurityGroupStateFault,
+ "NumberOfNodesPerClusterLimitExceeded": exceptions.NumberOfNodesPerClusterLimitExceededFault,
"ClusterSubnetQuotaExceeded": exceptions.ClusterSubnetQuotaExceededFault,
"UnsupportedOption": exceptions.UnsupportedOptionFault,
- "InvalidVPCNetworkState": exceptions.InvalidVPCNetworkStateFault,
"ClusterSecurityGroupNotFound": exceptions.ClusterSecurityGroupNotFoundFault,
- "InvalidClusterSubnetGroupState": exceptions.InvalidClusterSubnetGroupStateFault,
- "ClusterSubnetGroupAlreadyExists": exceptions.ClusterSubnetGroupAlreadyExistsFault,
- "NumberOfNodesPerClusterLimitExceeded": exceptions.NumberOfNodesPerClusterLimitExceededFault,
- "ClusterSubnetGroupNotFound": exceptions.ClusterSubnetGroupNotFoundFault,
- "ClusterParameterGroupQuotaExceeded": exceptions.ClusterParameterGroupQuotaExceededFault,
"ClusterAlreadyExists": exceptions.ClusterAlreadyExistsFault,
- "InsufficientClusterCapacity": exceptions.InsufficientClusterCapacityFault,
- "InvalidClusterSubnetState": exceptions.InvalidClusterSubnetStateFault,
+ "ClusterSnapshotAlreadyExists": exceptions.ClusterSnapshotAlreadyExistsFault,
+ "ReservedNodeAlreadyExists": exceptions.ReservedNodeAlreadyExistsFault,
+ "ClusterSubnetGroupQuotaExceeded": exceptions.ClusterSubnetGroupQuotaExceededFault,
+ "ClusterParameterGroupNotFound": exceptions.ClusterParameterGroupNotFoundFault,
+ "AuthorizationNotFound": exceptions.AuthorizationNotFoundFault,
+ "ClusterSecurityGroupQuotaExceeded": exceptions.ClusterSecurityGroupQuotaExceededFault,
+ "AuthorizationAlreadyExists": exceptions.AuthorizationAlreadyExistsFault,
+ "InvalidClusterSnapshotState": exceptions.InvalidClusterSnapshotStateFault,
+ "ClusterParameterGroupQuotaExceeded": exceptions.ClusterParameterGroupQuotaExceededFault,
+ "ClusterSubnetGroupAlreadyExists": exceptions.ClusterSubnetGroupAlreadyExistsFault,
+ "ReservedNodeNotFound": exceptions.ReservedNodeNotFoundFault,
+ "InvalidClusterSubnetGroupState": exceptions.InvalidClusterSubnetGroupStateFault,
+ "ClusterParameterGroupAlreadyExists": exceptions.ClusterParameterGroupAlreadyExistsFault,
"SubnetAlreadyInUse": exceptions.SubnetAlreadyInUse,
- "InvalidParameterCombination": exceptions.InvalidParameterCombinationFault,
+ "AccessToSnapshotDenied": exceptions.AccessToSnapshotDeniedFault,
+ "UnauthorizedOperation": exceptions.UnauthorizedOperationFault,
}
@@ -199,8 +202,43 @@ class RedshiftConnection(AWSQueryConnection):
verb='POST',
path='/', params=params)
+ def authorize_snapshot_access(self, snapshot_identifier,
+ account_with_restore_access,
+ snapshot_cluster_identifier=None):
+ """
+ Authorizes the specified AWS customer account to restore the
+ specified snapshot.
+
+ For more information about working with snapshots, go to
+ `Amazon Redshift Snapshots`_ in the Amazon Redshift Management
+ Guide .
+
+ :type snapshot_identifier: string
+ :param snapshot_identifier: The identifier of the snapshot the account
+ is authorized to restore.
+
+ :type snapshot_cluster_identifier: string
+ :param snapshot_cluster_identifier:
+
+ :type account_with_restore_access: string
+ :param account_with_restore_access: The identifier of the AWS customer
+ account authorized to restore the specified snapshot.
+
+ """
+ params = {
+ 'SnapshotIdentifier': snapshot_identifier,
+ 'AccountWithRestoreAccess': account_with_restore_access,
+ }
+ if snapshot_cluster_identifier is not None:
+ params['SnapshotClusterIdentifier'] = snapshot_cluster_identifier
+ return self._make_request(
+ action='AuthorizeSnapshotAccess',
+ verb='POST',
+ path='/', params=params)
+
def copy_cluster_snapshot(self, source_snapshot_identifier,
- target_snapshot_identifier):
+ target_snapshot_identifier,
+ source_snapshot_cluster_identifier=None):
"""
Copies the specified automated cluster snapshot to a new
manual cluster snapshot. The source must be an automated
@@ -227,6 +265,9 @@ class RedshiftConnection(AWSQueryConnection):
+ Must be the identifier for a valid automated snapshot whose state is
"available".
+ :type source_snapshot_cluster_identifier: string
+ :param source_snapshot_cluster_identifier:
+
:type target_snapshot_identifier: string
:param target_snapshot_identifier:
The identifier given to the new manual snapshot.
@@ -245,6 +286,8 @@ class RedshiftConnection(AWSQueryConnection):
'SourceSnapshotIdentifier': source_snapshot_identifier,
'TargetSnapshotIdentifier': target_snapshot_identifier,
}
+ if source_snapshot_cluster_identifier is not None:
+ params['SourceSnapshotClusterIdentifier'] = source_snapshot_cluster_identifier
return self._make_request(
action='CopyClusterSnapshot',
verb='POST',
@@ -353,6 +396,8 @@ class RedshiftConnection(AWSQueryConnection):
+ Must contain at least one uppercase letter.
+ Must contain at least one lowercase letter.
+ Must contain one number.
+ + Can be any printable ASCII character (ASCII code 33 to 126) except '
+ (single quote), " (double quote), \, /, @, or space.
:type cluster_security_groups: list
:param cluster_security_groups: A list of security groups to be
@@ -396,10 +441,7 @@ class RedshiftConnection(AWSQueryConnection):
+ **US-East (Northern Virginia) Region:** 03:00-11:00 UTC
- + **US-West (Northern California) Region:** 06:00-14:00 UTC
- + **EU (Ireland) Region:** 22:00-06:00 UTC
- + **Asia Pacific (Singapore) Region:** 14:00-22:00 UTC
- + **Asia Pacific (Tokyo) Region: ** 17:00-03:00 UTC
+ + **US-West (Oregon) Region** 06:00-14:00 UTC
Valid Days: Mon | Tue | Wed | Thu | Fri | Sat | Sun
@@ -822,15 +864,19 @@ class RedshiftConnection(AWSQueryConnection):
verb='POST',
path='/', params=params)
- def delete_cluster_snapshot(self, snapshot_identifier):
+ def delete_cluster_snapshot(self, snapshot_identifier,
+ snapshot_cluster_identifier=None):
"""
Deletes the specified manual snapshot. The snapshot must be in
- the "available" state.
+ the "available" state, with no other users authorized to
+ access the snapshot.
Unlike automated snapshots, manual snapshots are retained even
after you delete your cluster. Amazon Redshift does not delete
your manual snapshots. You must delete manual snapshot
- explicitly to avoid getting charged.
+ explicitly to avoid getting charged. If other accounts are
+ authorized to access the snapshot, you must revoke all of the
+ authorizations before you can delete the snapshot.
:type snapshot_identifier: string
:param snapshot_identifier: The unique identifier of the manual
@@ -838,8 +884,13 @@ class RedshiftConnection(AWSQueryConnection):
Constraints: Must be the name of an existing snapshot that is in the
`available` state.
+ :type snapshot_cluster_identifier: string
+ :param snapshot_cluster_identifier:
+
"""
params = {'SnapshotIdentifier': snapshot_identifier, }
+ if snapshot_cluster_identifier is not None:
+ params['SnapshotClusterIdentifier'] = snapshot_cluster_identifier
return self._make_request(
action='DeleteClusterSnapshot',
verb='POST',
@@ -1021,12 +1072,14 @@ class RedshiftConnection(AWSQueryConnection):
snapshot_identifier=None,
snapshot_type=None, start_time=None,
end_time=None, max_records=None,
- marker=None):
+ marker=None, owner_account=None):
"""
Returns one or more snapshot objects, which contain metadata
about your cluster snapshots. By default, this operation
returns information about all snapshots of all clusters that
- are owned by the AWS account.
+ are owned by you AWS customer account. No information is
+ returned for snapshots owned by inactive AWS customer
+ accounts.
:type cluster_identifier: string
:param cluster_identifier: The identifier of the cluster for which
@@ -1071,6 +1124,13 @@ class RedshiftConnection(AWSQueryConnection):
DescribeClusterSnapshots request to indicate the first snapshot
that the request will return.
+ :type owner_account: string
+ :param owner_account: The AWS customer account used to create or copy
+ the snapshot. Use this field to filter the results to snapshots
+ owned by a particular account. To describe snapshots you own,
+ either specify your AWS customer account, or do not specify the
+ parameter.
+
"""
params = {}
if cluster_identifier is not None:
@@ -1087,6 +1147,8 @@ class RedshiftConnection(AWSQueryConnection):
params['MaxRecords'] = max_records
if marker is not None:
params['Marker'] = marker
+ if owner_account is not None:
+ params['OwnerAccount'] = owner_account
return self._make_request(
action='DescribeClusterSnapshots',
verb='POST',
@@ -1658,6 +1720,8 @@ class RedshiftConnection(AWSQueryConnection):
+ Must contain at least one uppercase letter.
+ Must contain at least one lowercase letter.
+ Must contain one number.
+ + Can be any printable ASCII character (ASCII code 33 to 126) except '
+ (single quote), " (double quote), \, /, @, or space.
:type cluster_parameter_group_name: string
:param cluster_parameter_group_name: The name of the cluster parameter
@@ -1913,11 +1977,13 @@ class RedshiftConnection(AWSQueryConnection):
path='/', params=params)
def restore_from_cluster_snapshot(self, cluster_identifier,
- snapshot_identifier, port=None,
- availability_zone=None,
+ snapshot_identifier,
+ snapshot_cluster_identifier=None,
+ port=None, availability_zone=None,
allow_version_upgrade=None,
cluster_subnet_group_name=None,
- publicly_accessible=None):
+ publicly_accessible=None,
+ owner_account=None):
"""
Creates a new cluster from a snapshot. Amazon Redshift creates
the resulting cluster with the same configuration as the
@@ -1956,6 +2022,9 @@ class RedshiftConnection(AWSQueryConnection):
create the new cluster. This parameter isn't case sensitive.
Example: `my-snapshot-id`
+ :type snapshot_cluster_identifier: string
+ :param snapshot_cluster_identifier:
+
:type port: integer
:param port: The port number on which the cluster accepts connections.
Default: The same port as the original cluster.
@@ -1986,11 +2055,18 @@ class RedshiftConnection(AWSQueryConnection):
:param publicly_accessible: If `True`, the cluster can be accessed from
a public network.
+ :type owner_account: string
+ :param owner_account: The AWS customer account used to create or copy
+ the snapshot. Required if you are restoring a snapshot you do not
+ own, optional if you own the snapshot.
+
"""
params = {
'ClusterIdentifier': cluster_identifier,
'SnapshotIdentifier': snapshot_identifier,
}
+ if snapshot_cluster_identifier is not None:
+ params['SnapshotClusterIdentifier'] = snapshot_cluster_identifier
if port is not None:
params['Port'] = port
if availability_zone is not None:
@@ -2003,6 +2079,8 @@ class RedshiftConnection(AWSQueryConnection):
if publicly_accessible is not None:
params['PubliclyAccessible'] = str(
publicly_accessible).lower()
+ if owner_account is not None:
+ params['OwnerAccount'] = owner_account
return self._make_request(
action='RestoreFromClusterSnapshot',
verb='POST',
@@ -2060,6 +2138,41 @@ class RedshiftConnection(AWSQueryConnection):
verb='POST',
path='/', params=params)
+ def revoke_snapshot_access(self, snapshot_identifier,
+ account_with_restore_access,
+ snapshot_cluster_identifier=None):
+ """
+ Removes the ability of the specified AWS customer account to
+ restore the specified snapshot. If the account is currently
+ restoring the snapshot, the restore will run to completion.
+
+ For more information about working with snapshots, go to
+ `Amazon Redshift Snapshots`_ in the Amazon Redshift Management
+ Guide .
+
+ :type snapshot_identifier: string
+ :param snapshot_identifier: The identifier of the snapshot that the
+ account can no longer access.
+
+ :type snapshot_cluster_identifier: string
+ :param snapshot_cluster_identifier:
+
+ :type account_with_restore_access: string
+ :param account_with_restore_access: The identifier of the AWS customer
+ account that can no longer restore the specified snapshot.
+
+ """
+ params = {
+ 'SnapshotIdentifier': snapshot_identifier,
+ 'AccountWithRestoreAccess': account_with_restore_access,
+ }
+ if snapshot_cluster_identifier is not None:
+ params['SnapshotClusterIdentifier'] = snapshot_cluster_identifier
+ return self._make_request(
+ action='RevokeSnapshotAccess',
+ verb='POST',
+ path='/', params=params)
+
def _make_request(self, action, verb, path, params):
params['ContentType'] = 'JSON'
response = self.make_request(action=action, verb='POST',
diff --git a/boto/route53/record.py b/boto/route53/record.py
index 3fe75abb..d26ca119 100644
--- a/boto/route53/record.py
+++ b/boto/route53/record.py
@@ -161,6 +161,7 @@ class ResourceRecordSets(ResultSet):
def __iter__(self):
"""Override the next function to support paging"""
results = ResultSet.__iter__(self)
+ truncated = self.is_truncated
while results:
for obj in results:
yield obj
@@ -169,6 +170,8 @@ class ResourceRecordSets(ResultSet):
results = self.connection.get_all_rrsets(self.hosted_zone_id, name=self.next_record_name, type=self.next_record_type)
else:
results = None
+ self.is_truncated = truncated
+
diff --git a/boto/s3/__init__.py b/boto/s3/__init__.py
index 30d610d2..f7237157 100644
--- a/boto/s3/__init__.py
+++ b/boto/s3/__init__.py
@@ -53,6 +53,9 @@ def regions():
return [S3RegionInfo(name='us-east-1',
endpoint='s3.amazonaws.com',
connection_cls=S3Connection),
+ S3RegionInfo(name='us-gov-west-1',
+ endpoint='s3-us-gov-west-1.amazonaws.com',
+ connection_cls=S3Connection),
S3RegionInfo(name='us-west-1',
endpoint='s3-us-west-1.amazonaws.com',
connection_cls=S3Connection),
diff --git a/boto/s3/key.py b/boto/s3/key.py
index 9c7b4b27..2b7ae73a 100644
--- a/boto/s3/key.py
+++ b/boto/s3/key.py
@@ -21,6 +21,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+from __future__ import with_statement
import errno
import mimetypes
import os
@@ -147,38 +148,38 @@ class Key(object):
provider = self.bucket.connection.provider
return provider
- @property
- def key(self):
+ def _get_key(self):
return self.name
- @key.setter
- def key(self, value):
+ def _set_key(self, value):
self.name = value
- @property
- def md5(self):
+ key = property(_get_key, _set_key);
+
+ def _get_md5(self):
if 'md5' in self.local_hashes and self.local_hashes['md5']:
return binascii.b2a_hex(self.local_hashes['md5'])
- @md5.setter
- def md5(self, value):
+ def _set_md5(self, value):
if value:
self.local_hashes['md5'] = binascii.a2b_hex(value)
elif 'md5' in self.local_hashes:
self.local_hashes.pop('md5', None)
- @property
- def base64md5(self):
+ md5 = property(_get_md5, _set_md5);
+
+ def _get_base64md5(self):
if 'md5' in self.local_hashes and self.local_hashes['md5']:
return binascii.b2a_base64(self.local_hashes['md5']).rstrip('\n')
- @base64md5.setter
- def base64md5(self, value):
+ def _set_base64md5(self, value):
if value:
self.local_hashes['md5'] = binascii.a2b_base64(value)
elif 'md5' in self.local_hashes:
del self.local_hashes['md5']
+ base64md5 = property(_get_base64md5, _set_base64md5);
+
def get_md5_from_hexdigest(self, md5_hexdigest):
"""
A utility function to create the 2-tuple (md5hexdigest, base64md5)
@@ -502,26 +503,34 @@ class Key(object):
else:
setattr(self, name, value)
- def exists(self):
+ def exists(self, headers=None):
"""
Returns True if the key exists
:rtype: bool
:return: Whether the key exists on S3
"""
- return bool(self.bucket.lookup(self.name))
+ return bool(self.bucket.lookup(self.name, headers=headers))
- def delete(self):
+ def delete(self, headers=None):
"""
Delete this key from S3
"""
- return self.bucket.delete_key(self.name, version_id=self.version_id)
+ return self.bucket.delete_key(self.name, version_id=self.version_id,
+ headers=headers)
def get_metadata(self, name):
return self.metadata.get(name)
def set_metadata(self, name, value):
- self.metadata[name] = value
+ # Ensure that metadata that is vital to signing is in the correct
+ # case. Applies to ``Content-Type`` & ``Content-MD5``.
+ if name.lower() == 'content-type':
+ self.metadata['Content-Type'] = value
+ elif name.lower() == 'content-md5':
+ self.metadata['Content-MD5'] = value
+ else:
+ self.metadata[name] = value
def update_metadata(self, d):
self.metadata.update(d)
@@ -741,7 +750,14 @@ class Key(object):
raise provider.storage_data_error(
'Cannot retry failed request. fp does not support seeking.')
- http_conn.putrequest(method, path)
+ # If the caller explicitly specified host header, tell putrequest
+ # not to add a second host header. Similarly for accept-encoding.
+ skips = {}
+ if boto.utils.find_matching_headers('host', headers):
+ skips['skip_host'] = 1
+ if boto.utils.find_matching_headers('accept-encoding', headers):
+ skips['skip_accept_encoding'] = 1
+ http_conn.putrequest(method, path, **skips)
for key in headers:
http_conn.putheader(key, headers[key])
http_conn.endheaders()
@@ -1464,7 +1480,7 @@ class Key(object):
if i == cb_count or cb_count == -1:
cb(data_len, cb_size)
i = 0
- except IOError as e:
+ except IOError, e:
if e.errno == errno.ENOSPC:
raise StorageDataError('Out of space for destination file '
'%s' % fp.name)
diff --git a/boto/s3/keyfile.py b/boto/s3/keyfile.py
index 4245413d..84858a2b 100644
--- a/boto/s3/keyfile.py
+++ b/boto/s3/keyfile.py
@@ -75,7 +75,7 @@ class KeyFile():
raise IOError('Invalid whence param (%d) passed to seek' % whence)
try:
self.key.open_read(headers={"Range": "bytes=%d-" % pos})
- except StorageResponseError as e:
+ except StorageResponseError, e:
# 416 Invalid Range means that the given starting byte was past the end
# of file. We catch this because the Python file interface allows silently
# seeking past the end of the file.
diff --git a/boto/s3/multipart.py b/boto/s3/multipart.py
index 12926781..fae3389e 100644
--- a/boto/s3/multipart.py
+++ b/boto/s3/multipart.py
@@ -235,6 +235,9 @@ class MultiPartUpload(object):
The other parameters are exactly as defined for the
:class:`boto.s3.key.Key` set_contents_from_file method.
+
+ :rtype: :class:`boto.s3.key.Key` or subclass
+ :returns: The uploaded part containing the etag.
"""
if part_num < 1:
raise ValueError('Part numbers must be greater than zero')
@@ -244,6 +247,7 @@ class MultiPartUpload(object):
cb=cb, num_cb=num_cb, md5=md5,
reduced_redundancy=False,
query_args=query_args, size=size)
+ return key
def copy_part_from_key(self, src_bucket_name, src_key_name, part_num,
start=None, end=None, src_version_id=None,
diff --git a/boto/sdb/db/manager/__init__.py b/boto/sdb/db/manager/__init__.py
index 69fc16f7..ded1716c 100644
--- a/boto/sdb/db/manager/__init__.py
+++ b/boto/sdb/db/manager/__init__.py
@@ -74,11 +74,11 @@ def get_manager(cls):
elif hasattr(cls.__bases__[0], "_manager"):
return cls.__bases__[0]._manager
if db_type == 'SimpleDB':
- from sdbmanager import SDBManager
+ from boto.sdb.db.manager.sdbmanager import SDBManager
return SDBManager(cls, db_name, db_user, db_passwd,
db_host, db_port, db_table, sql_dir, enable_ssl)
elif db_type == 'XML':
- from xmlmanager import XMLManager
+ from boto.sdb.db.manager.xmlmanager import XMLManager
return XMLManager(cls, db_name, db_user, db_passwd,
db_host, db_port, db_table, sql_dir, enable_ssl)
else:
diff --git a/boto/sdb/db/manager/sdbmanager.py b/boto/sdb/db/manager/sdbmanager.py
index bce2e4e2..fd9777de 100644
--- a/boto/sdb/db/manager/sdbmanager.py
+++ b/boto/sdb/db/manager/sdbmanager.py
@@ -24,7 +24,6 @@ import re
from boto.utils import find_class
import uuid
from boto.sdb.db.key import Key
-from boto.sdb.db.model import Model
from boto.sdb.db.blob import Blob
from boto.sdb.db.property import ListProperty, MapProperty
from datetime import datetime, date, time
@@ -53,12 +52,17 @@ class SDBConverter(object):
called"encode_<type name>" or "decode_<type name>".
"""
def __init__(self, manager):
+ # Do a delayed import to prevent possible circular import errors.
+ from boto.sdb.db.model import Model
+ self.model_class = Model
self.manager = manager
self.type_map = {bool: (self.encode_bool, self.decode_bool),
int: (self.encode_int, self.decode_int),
long: (self.encode_long, self.decode_long),
float: (self.encode_float, self.decode_float),
- Model: (self.encode_reference, self.decode_reference),
+ self.model_class: (
+ self.encode_reference, self.decode_reference
+ ),
Key: (self.encode_reference, self.decode_reference),
datetime: (self.encode_datetime, self.decode_datetime),
date: (self.encode_date, self.decode_date),
@@ -69,8 +73,8 @@ class SDBConverter(object):
def encode(self, item_type, value):
try:
- if Model in item_type.mro():
- item_type = Model
+ if self.model_class in item_type.mro():
+ item_type = self.model_class
except:
pass
if item_type in self.type_map:
@@ -110,8 +114,8 @@ class SDBConverter(object):
new_value = []
for key in value:
item_type = getattr(prop, "item_type")
- if Model in item_type.mro():
- item_type = Model
+ if self.model_class in item_type.mro():
+ item_type = self.model_class
encoded_value = self.encode(item_type, value[key])
if encoded_value != None:
new_value.append('%s:%s' % (urllib.quote(key), encoded_value))
@@ -159,7 +163,7 @@ class SDBConverter(object):
if ":" in value:
key, value = value.split(':', 1)
key = urllib.unquote(key)
- if Model in item_type.mro():
+ if self.model_class in item_type.mro():
value = item_type(id=value)
else:
value = self.decode(item_type, value)
diff --git a/boto/sdb/db/model.py b/boto/sdb/db/model.py
index a625ad21..3d9a6b5a 100644
--- a/boto/sdb/db/model.py
+++ b/boto/sdb/db/model.py
@@ -14,12 +14,11 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-from boto.sdb.db.manager import get_manager
from boto.sdb.db.property import Property
from boto.sdb.db.key import Key
from boto.sdb.db.query import Query
@@ -32,6 +31,10 @@ class ModelMeta(type):
super(ModelMeta, cls).__init__(name, bases, dict)
# Make sure this is a subclass of Model - mainly copied from django ModelBase (thanks!)
cls.__sub_classes__ = []
+
+ # Do a delayed import to prevent possible circular import errors.
+ from boto.sdb.db.manager import get_manager
+
try:
if filter(lambda b: issubclass(b, Model), bases):
for base in bases:
@@ -52,7 +55,7 @@ class ModelMeta(type):
# 'Model' isn't defined yet, meaning we're looking at our own
# Model class, defined below.
pass
-
+
class Model(object):
__metaclass__ = ModelMeta
__consistent__ = False # Consistent is set off by default
@@ -67,13 +70,13 @@ class Model(object):
@classmethod
def kind(cls):
return cls.__name__
-
+
@classmethod
def _get_by_id(cls, id, manager=None):
if not manager:
manager = cls._manager
return manager.get_object(cls, id)
-
+
@classmethod
def get_by_id(cls, ids=None, parent=None):
if isinstance(ids, list):
@@ -102,7 +105,7 @@ class Model(object):
@classmethod
def get_or_insert(key_name, **kw):
raise NotImplementedError("get_or_insert not currently supported")
-
+
@classmethod
def properties(cls, hidden=True):
properties = []
@@ -171,7 +174,7 @@ class Model(object):
def __str__(self):
return str(self.id)
-
+
def __eq__(self, other):
return other and isinstance(other, Model) and self.id == other.id
@@ -191,8 +194,8 @@ class Model(object):
"""
Save this object as it is, with an optional expected value
- :param expected_value: Optional tuple of Attribute, and Value that
- must be the same in order to save this object. If this
+ :param expected_value: Optional tuple of Attribute, and Value that
+ must be the same in order to save this object. If this
condition is not met, an SDBResponseError will be raised with a
Confict status code.
:type expected_value: tuple or list
@@ -237,7 +240,7 @@ class Model(object):
return self
save_attributes = put_attributes
-
+
def delete(self):
self._manager.delete_object(self)
@@ -291,4 +294,4 @@ class Expando(Model):
return value
raise AttributeError
-
+
diff --git a/boto/ses/connection.py b/boto/ses/connection.py
index b5bd157b..8652042d 100644
--- a/boto/ses/connection.py
+++ b/boto/ses/connection.py
@@ -103,7 +103,8 @@ class SESConnection(AWSAuthConnection):
body = response.read()
if response.status == 200:
list_markers = ('VerifiedEmailAddresses', 'Identities',
- 'VerificationAttributes', 'SendDataPoints')
+ 'DkimTokens', 'VerificationAttributes',
+ 'SendDataPoints')
item_markers = ('member', 'item', 'entry')
e = boto.jsonresponse.Element(list_marker=list_markers,
diff --git a/boto/sns/__init__.py b/boto/sns/__init__.py
index 565317a1..4ed0539a 100644
--- a/boto/sns/__init__.py
+++ b/boto/sns/__init__.py
@@ -39,6 +39,9 @@ def regions():
RegionInfo(name='eu-west-1',
endpoint='sns.eu-west-1.amazonaws.com',
connection_cls=SNSConnection),
+ RegionInfo(name='us-gov-west-1',
+ endpoint='sns.us-gov-west-1.amazonaws.com',
+ connection_cls=SNSConnection),
RegionInfo(name='us-west-1',
endpoint='sns.us-west-1.amazonaws.com',
connection_cls=SNSConnection),
diff --git a/boto/sns/connection.py b/boto/sns/connection.py
index 0cdc628d..701892ce 100644
--- a/boto/sns/connection.py
+++ b/boto/sns/connection.py
@@ -30,7 +30,24 @@ import boto
class SNSConnection(AWSQueryConnection):
-
+ """
+ Amazon Simple Notification Service
+ Amazon Simple Notification Service (Amazon SNS) is a web service
+ that enables you to build distributed web-enabled applications.
+ Applications can use Amazon SNS to easily push real-time
+ notification messages to interested subscribers over multiple
+ delivery protocols. For more information about this product see
+ `http://aws.amazon.com/sns`_. For detailed information about
+ Amazon SNS features and their associated API calls, see the
+ `Amazon SNS Developer Guide`_.
+
+ We also provide SDKs that enable you to access Amazon SNS from
+ your preferred programming language. The SDKs contain
+ functionality that automatically takes care of tasks such as:
+ cryptographically signing your service requests, retrying
+ requests, and handling error responses. For a list of available
+ SDKs, go to `Tools for Amazon Web Services`_.
+ """
DefaultRegionName = 'us-east-1'
DefaultRegionEndpoint = 'sns.us-east-1.amazonaws.com'
APIVersion = '2010-03-31'
@@ -54,6 +71,33 @@ class SNSConnection(AWSQueryConnection):
security_token=security_token,
validate_certs=validate_certs)
+ def _build_dict_as_list_params(self, params, dictionary, name):
+ """
+ Serialize a parameter 'name' which value is a 'dictionary' into a list of parameters.
+
+ See: http://docs.aws.amazon.com/sns/latest/api/API_SetPlatformApplicationAttributes.html
+ For example::
+
+ dictionary = {'PlatformPrincipal': 'foo', 'PlatformCredential': 'bar'}
+ name = 'Attributes'
+
+ would result in params dict being populated with:
+ Attributes.entry.1.key = PlatformPrincipal
+ Attributes.entry.1.value = foo
+ Attributes.entry.2.key = PlatformCredential
+ Attributes.entry.2.value = bar
+
+ :param params: the resulting parameters will be added to this dict
+ :param dictionary: dict - value of the serialized parameter
+ :param name: name of the serialized parameter
+ """
+ items = sorted(dictionary.items(), key=lambda x:x[0])
+ for kv, index in zip(items, range(1, len(items)+1)):
+ key, value = kv
+ prefix = '%s.entry.%s' % (name, index)
+ params['%s.key' % prefix] = key
+ params['%s.value' % prefix] = value
+
def _required_auth_capability(self):
return ['hmac-v4']
@@ -64,17 +108,10 @@ class SNSConnection(AWSQueryConnection):
this method.
"""
- params = {'ContentType': 'JSON'}
+ params = {}
if next_token:
params['NextToken'] = next_token
- response = self.make_request('ListTopics', params, '/', 'GET')
- body = response.read()
- if response.status == 200:
- return json.loads(body)
- else:
- boto.log.error('%s %s' % (response.status, response.reason))
- boto.log.error('%s' % body)
- raise self.ResponseError(response.status, response.reason, body)
+ return self._make_request('ListTopics', params)
def get_topic_attributes(self, topic):
"""
@@ -84,16 +121,8 @@ class SNSConnection(AWSQueryConnection):
:param topic: The ARN of the topic.
"""
- params = {'ContentType': 'JSON',
- 'TopicArn': topic}
- response = self.make_request('GetTopicAttributes', params, '/', 'GET')
- body = response.read()
- if response.status == 200:
- return json.loads(body)
- else:
- boto.log.error('%s %s' % (response.status, response.reason))
- boto.log.error('%s' % body)
- raise self.ResponseError(response.status, response.reason, body)
+ params = {'TopicArn': topic}
+ return self._make_request('GetTopicAttributes', params)
def set_topic_attributes(self, topic, attr_name, attr_value):
"""
@@ -111,18 +140,10 @@ class SNSConnection(AWSQueryConnection):
:param attr_value: The new value for the attribute.
"""
- params = {'ContentType': 'JSON',
- 'TopicArn': topic,
+ params = {'TopicArn': topic,
'AttributeName': attr_name,
'AttributeValue': attr_value}
- response = self.make_request('SetTopicAttributes', params, '/', 'GET')
- body = response.read()
- if response.status == 200:
- return json.loads(body)
- else:
- boto.log.error('%s %s' % (response.status, response.reason))
- boto.log.error('%s' % body)
- raise self.ResponseError(response.status, response.reason, body)
+ return self._make_request('SetTopicAttributes', params)
def add_permission(self, topic, label, account_ids, actions):
"""
@@ -144,19 +165,11 @@ class SNSConnection(AWSQueryConnection):
specified principal(s).
"""
- params = {'ContentType': 'JSON',
- 'TopicArn': topic,
+ params = {'TopicArn': topic,
'Label': label}
self.build_list_params(params, account_ids, 'AWSAccountId.member')
self.build_list_params(params, actions, 'ActionName.member')
- response = self.make_request('AddPermission', params, '/', 'GET')
- body = response.read()
- if response.status == 200:
- return json.loads(body)
- else:
- boto.log.error('%s %s' % (response.status, response.reason))
- boto.log.error('%s' % body)
- raise self.ResponseError(response.status, response.reason, body)
+ return self._make_request('AddPermission', params)
def remove_permission(self, topic, label):
"""
@@ -170,17 +183,9 @@ class SNSConnection(AWSQueryConnection):
to be removed.
"""
- params = {'ContentType': 'JSON',
- 'TopicArn': topic,
+ params = {'TopicArn': topic,
'Label': label}
- response = self.make_request('RemovePermission', params, '/', 'GET')
- body = response.read()
- if response.status == 200:
- return json.loads(body)
- else:
- boto.log.error('%s %s' % (response.status, response.reason))
- boto.log.error('%s' % body)
- raise self.ResponseError(response.status, response.reason, body)
+ return self._make_request('RemovePermission', params)
def create_topic(self, topic):
"""
@@ -190,16 +195,8 @@ class SNSConnection(AWSQueryConnection):
:param topic: The name of the new topic.
"""
- params = {'ContentType': 'JSON',
- 'Name': topic}
- response = self.make_request('CreateTopic', params, '/', 'GET')
- body = response.read()
- if response.status == 200:
- return json.loads(body)
- else:
- boto.log.error('%s %s' % (response.status, response.reason))
- boto.log.error('%s' % body)
- raise self.ResponseError(response.status, response.reason, body)
+ params = {'Name': topic}
+ return self._make_request('CreateTopic', params)
def delete_topic(self, topic):
"""
@@ -209,18 +206,11 @@ class SNSConnection(AWSQueryConnection):
:param topic: The ARN of the topic
"""
- params = {'ContentType': 'JSON',
- 'TopicArn': topic}
- response = self.make_request('DeleteTopic', params, '/', 'GET')
- body = response.read()
- if response.status == 200:
- return json.loads(body)
- else:
- boto.log.error('%s %s' % (response.status, response.reason))
- boto.log.error('%s' % body)
- raise self.ResponseError(response.status, response.reason, body)
+ params = {'TopicArn': topic}
+ return self._make_request('DeleteTopic', params, '/', 'GET')
- def publish(self, topic, message, subject=None):
+ def publish(self, topic=None, message=None, subject=None, target_arn=None,
+ message_structure=None):
"""
Get properties of a Topic
@@ -232,24 +222,37 @@ class SNSConnection(AWSQueryConnection):
Messages must be UTF-8 encoded strings and
be at most 4KB in size.
+ :type message_structure: string
+ :param message_structure: Optional parameter. If left as ``None``,
+ plain text will be sent. If set to ``json``,
+ your message should be a JSON string that
+ matches the structure described at
+ http://docs.aws.amazon.com/sns/latest/dg/PublishTopic.html#sns-message-formatting-by-protocol
+
:type subject: string
:param subject: Optional parameter to be used as the "Subject"
line of the email notifications.
+ :type target_arn: string
+ :param target_arn: Optional parameter for either TopicArn or
+ EndpointArn, but not both.
+
"""
- params = {'ContentType': 'JSON',
- 'TopicArn': topic,
- 'Message': message}
- if subject:
+ if message is None:
+ # To be backwards compatible when message did not have
+ # a default value and topic and message were required
+ # args.
+ raise TypeError("'message' is a required parameter")
+ params = {'Message': message}
+ if subject is not None:
params['Subject'] = subject
- response = self.make_request('Publish', params, '/', 'GET')
- body = response.read()
- if response.status == 200:
- return json.loads(body)
- else:
- boto.log.error('%s %s' % (response.status, response.reason))
- boto.log.error('%s' % body)
- raise self.ResponseError(response.status, response.reason, body)
+ if topic is not None:
+ params['TopicArn'] = topic
+ if target_arn is not None:
+ params['TargetArn'] = target_arn
+ if message_structure is not None:
+ params['MessageStructure'] = message_structure
+ return self._make_request('Publish', params, '/', 'POST')
def subscribe(self, topic, protocol, endpoint):
"""
@@ -261,7 +264,7 @@ class SNSConnection(AWSQueryConnection):
:type protocol: string
:param protocol: The protocol used to communicate with
the subscriber. Current choices are:
- email|email-json|http|https|sqs
+ email|email-json|http|https|sqs|sms
:type endpoint: string
:param endpoint: The location of the endpoint for
@@ -271,19 +274,12 @@ class SNSConnection(AWSQueryConnection):
* For http, this would be a URL beginning with http
* For https, this would be a URL beginning with https
* For sqs, this would be the ARN of an SQS Queue
+ * For sms, this would be a phone number of an SMS-enabled device
"""
- params = {'ContentType': 'JSON',
- 'TopicArn': topic,
+ params = {'TopicArn': topic,
'Protocol': protocol,
'Endpoint': endpoint}
- response = self.make_request('Subscribe', params, '/', 'GET')
- body = response.read()
- if response.status == 200:
- return json.loads(body)
- else:
- boto.log.error('%s %s' % (response.status, response.reason))
- boto.log.error('%s' % body)
- raise self.ResponseError(response.status, response.reason, body)
+ return self._make_request('Subscribe', params)
def subscribe_sqs_queue(self, topic, queue):
"""
@@ -356,19 +352,10 @@ class SNSConnection(AWSQueryConnection):
of the subscription.
"""
- params = {'ContentType': 'JSON',
- 'TopicArn': topic,
- 'Token': token}
+ params = {'TopicArn': topic, 'Token': token}
if authenticate_on_unsubscribe:
params['AuthenticateOnUnsubscribe'] = 'true'
- response = self.make_request('ConfirmSubscription', params, '/', 'GET')
- body = response.read()
- if response.status == 200:
- return json.loads(body)
- else:
- boto.log.error('%s %s' % (response.status, response.reason))
- boto.log.error('%s' % body)
- raise self.ResponseError(response.status, response.reason, body)
+ return self._make_request('ConfirmSubscription', params)
def unsubscribe(self, subscription):
"""
@@ -379,16 +366,8 @@ class SNSConnection(AWSQueryConnection):
:param subscription: The ARN of the subscription to be deleted.
"""
- params = {'ContentType': 'JSON',
- 'SubscriptionArn': subscription}
- response = self.make_request('Unsubscribe', params, '/', 'GET')
- body = response.read()
- if response.status == 200:
- return json.loads(body)
- else:
- boto.log.error('%s %s' % (response.status, response.reason))
- boto.log.error('%s' % body)
- raise self.ResponseError(response.status, response.reason, body)
+ params = {'SubscriptionArn': subscription}
+ return self._make_request('Unsubscribe', params)
def get_all_subscriptions(self, next_token=None):
"""
@@ -399,17 +378,10 @@ class SNSConnection(AWSQueryConnection):
this method.
"""
- params = {'ContentType': 'JSON'}
+ params = {}
if next_token:
params['NextToken'] = next_token
- response = self.make_request('ListSubscriptions', params, '/', 'GET')
- body = response.read()
- if response.status == 200:
- return json.loads(body)
- else:
- boto.log.error('%s %s' % (response.status, response.reason))
- boto.log.error('%s' % body)
- raise self.ResponseError(response.status, response.reason, body)
+ return self._make_request('ListSubscriptions', params)
def get_all_subscriptions_by_topic(self, topic, next_token=None):
"""
@@ -424,13 +396,329 @@ class SNSConnection(AWSQueryConnection):
this method.
"""
- params = {'ContentType': 'JSON',
- 'TopicArn': topic}
+ params = {'TopicArn': topic}
if next_token:
params['NextToken'] = next_token
- response = self.make_request('ListSubscriptionsByTopic', params,
- '/', 'GET')
+ return self._make_request('ListSubscriptionsByTopic', params)
+
+ def create_platform_application(self, name=None, platform=None,
+ attributes=None):
+ """
+ The `CreatePlatformApplication` action creates a platform
+ application object for one of the supported push notification
+ services, such as APNS and GCM, to which devices and mobile
+ apps may register. You must specify PlatformPrincipal and
+ PlatformCredential attributes when using the
+ `CreatePlatformApplication` action. The PlatformPrincipal is
+ received from the notification service. For APNS/APNS_SANDBOX,
+ PlatformPrincipal is "SSL certificate". For GCM,
+ PlatformPrincipal is not applicable. For ADM,
+ PlatformPrincipal is "client id". The PlatformCredential is
+ also received from the notification service. For
+ APNS/APNS_SANDBOX, PlatformCredential is "private key". For
+ GCM, PlatformCredential is "API key". For ADM,
+ PlatformCredential is "client secret". The
+ PlatformApplicationArn that is returned when using
+ `CreatePlatformApplication` is then used as an attribute for
+ the `CreatePlatformEndpoint` action. For more information, see
+ `Using Amazon SNS Mobile Push Notifications`_.
+
+ :type name: string
+ :param name: Application names must be made up of only uppercase and
+ lowercase ASCII letters, numbers, underscores, hyphens, and
+ periods, and must be between 1 and 256 characters long.
+
+ :type platform: string
+ :param platform: The following platforms are supported: ADM (Amazon
+ Device Messaging), APNS (Apple Push Notification Service),
+ APNS_SANDBOX, and GCM (Google Cloud Messaging).
+
+ :type attributes: map
+ :param attributes: For a list of attributes, see
+ `SetPlatformApplicationAttributes`_
+
+ """
+ params = {}
+ if name is not None:
+ params['Name'] = name
+ if platform is not None:
+ params['Platform'] = platform
+ if attributes is not None:
+ self._build_dict_as_list_params(params, attributes, 'Attributes')
+ return self._make_request(action='CreatePlatformApplication',
+ params=params)
+
+ def set_platform_application_attributes(self,
+ platform_application_arn=None,
+ attributes=None):
+ """
+ The `SetPlatformApplicationAttributes` action sets the
+ attributes of the platform application object for the
+ supported push notification services, such as APNS and GCM.
+ For more information, see `Using Amazon SNS Mobile Push
+ Notifications`_.
+
+ :type platform_application_arn: string
+ :param platform_application_arn: PlatformApplicationArn for
+ SetPlatformApplicationAttributes action.
+
+ :type attributes: map
+ :param attributes:
+ A map of the platform application attributes. Attributes in this map
+ include the following:
+
+
+ + `PlatformCredential` -- The credential received from the notification
+ service. For APNS/APNS_SANDBOX, PlatformCredential is "private
+ key". For GCM, PlatformCredential is "API key". For ADM,
+ PlatformCredential is "client secret".
+ + `PlatformPrincipal` -- The principal received from the notification
+ service. For APNS/APNS_SANDBOX, PlatformPrincipal is "SSL
+ certificate". For GCM, PlatformPrincipal is not applicable. For
+ ADM, PlatformPrincipal is "client id".
+ + `EventEndpointCreated` -- Topic ARN to which EndpointCreated event
+ notifications should be sent.
+ + `EventEndpointDeleted` -- Topic ARN to which EndpointDeleted event
+ notifications should be sent.
+ + `EventEndpointUpdated` -- Topic ARN to which EndpointUpdate event
+ notifications should be sent.
+ + `EventDeliveryFailure` -- Topic ARN to which DeliveryFailure event
+ notifications should be sent upon Direct Publish delivery failure
+ (permanent) to one of the application's endpoints.
+
+ """
+ params = {}
+ if platform_application_arn is not None:
+ params['PlatformApplicationArn'] = platform_application_arn
+ if attributes is not None:
+ self._build_dict_as_list_params(params, attributes, 'Attributes')
+ return self._make_request(action='SetPlatformApplicationAttributes',
+ params=params)
+
+ def get_platform_application_attributes(self,
+ platform_application_arn=None):
+ """
+ The `GetPlatformApplicationAttributes` action retrieves the
+ attributes of the platform application object for the
+ supported push notification services, such as APNS and GCM.
+ For more information, see `Using Amazon SNS Mobile Push
+ Notifications`_.
+
+ :type platform_application_arn: string
+ :param platform_application_arn: PlatformApplicationArn for
+ GetPlatformApplicationAttributesInput.
+
+ """
+ params = {}
+ if platform_application_arn is not None:
+ params['PlatformApplicationArn'] = platform_application_arn
+ return self._make_request(action='GetPlatformApplicationAttributes',
+ params=params)
+
+ def list_platform_applications(self, next_token=None):
+ """
+ The `ListPlatformApplications` action lists the platform
+ application objects for the supported push notification
+ services, such as APNS and GCM. The results for
+ `ListPlatformApplications` are paginated and return a limited
+ list of applications, up to 100. If additional records are
+ available after the first page results, then a NextToken
+ string will be returned. To receive the next page, you call
+ `ListPlatformApplications` using the NextToken string received
+ from the previous call. When there are no more records to
+ return, NextToken will be null. For more information, see
+ `Using Amazon SNS Mobile Push Notifications`_.
+
+ :type next_token: string
+ :param next_token: NextToken string is used when calling
+ ListPlatformApplications action to retrieve additional records that
+ are available after the first page results.
+
+ """
+ params = {}
+ if next_token is not None:
+ params['NextToken'] = next_token
+ return self._make_request(action='ListPlatformApplications',
+ params=params)
+
+ def list_endpoints_by_platform_application(self,
+ platform_application_arn=None,
+ next_token=None):
+ """
+ The `ListEndpointsByPlatformApplication` action lists the
+ endpoints and endpoint attributes for devices in a supported
+ push notification service, such as GCM and APNS. The results
+ for `ListEndpointsByPlatformApplication` are paginated and
+ return a limited list of endpoints, up to 100. If additional
+ records are available after the first page results, then a
+ NextToken string will be returned. To receive the next page,
+ you call `ListEndpointsByPlatformApplication` again using the
+ NextToken string received from the previous call. When there
+ are no more records to return, NextToken will be null. For
+ more information, see `Using Amazon SNS Mobile Push
+ Notifications`_.
+
+ :type platform_application_arn: string
+ :param platform_application_arn: PlatformApplicationArn for
+ ListEndpointsByPlatformApplicationInput action.
+
+ :type next_token: string
+ :param next_token: NextToken string is used when calling
+ ListEndpointsByPlatformApplication action to retrieve additional
+ records that are available after the first page results.
+
+ """
+ params = {}
+ if platform_application_arn is not None:
+ params['PlatformApplicationArn'] = platform_application_arn
+ if next_token is not None:
+ params['NextToken'] = next_token
+ return self._make_request(action='ListEndpointsByPlatformApplication',
+ params=params)
+
+ def delete_platform_application(self, platform_application_arn=None):
+ """
+ The `DeletePlatformApplication` action deletes a platform
+ application object for one of the supported push notification
+ services, such as APNS and GCM. For more information, see
+ `Using Amazon SNS Mobile Push Notifications`_.
+
+ :type platform_application_arn: string
+ :param platform_application_arn: PlatformApplicationArn of platform
+ application object to delete.
+
+ """
+ params = {}
+ if platform_application_arn is not None:
+ params['PlatformApplicationArn'] = platform_application_arn
+ return self._make_request(action='DeletePlatformApplication',
+ params=params)
+
+ def create_platform_endpoint(self, platform_application_arn=None,
+ token=None, custom_user_data=None,
+ attributes=None):
+ """
+ The `CreatePlatformEndpoint` creates an endpoint for a device
+ and mobile app on one of the supported push notification
+ services, such as GCM and APNS. `CreatePlatformEndpoint`
+ requires the PlatformApplicationArn that is returned from
+ `CreatePlatformApplication`. The EndpointArn that is returned
+ when using `CreatePlatformEndpoint` can then be used by the
+ `Publish` action to send a message to a mobile app or by the
+ `Subscribe` action for subscription to a topic. For more
+ information, see `Using Amazon SNS Mobile Push
+ Notifications`_.
+
+ :type platform_application_arn: string
+ :param platform_application_arn: PlatformApplicationArn returned from
+ CreatePlatformApplication is used to create a an endpoint.
+
+ :type token: string
+ :param token: Unique identifier created by the notification service for
+ an app on a device. The specific name for Token will vary,
+ depending on which notification service is being used. For example,
+ when using APNS as the notification service, you need the device
+ token. Alternatively, when using GCM or ADM, the device token
+ equivalent is called the registration ID.
+
+ :type custom_user_data: string
+ :param custom_user_data: Arbitrary user data to associate with the
+ endpoint. SNS does not use this data. The data must be in UTF-8
+ format and less than 2KB.
+
+ :type attributes: map
+ :param attributes: For a list of attributes, see
+ `SetEndpointAttributes`_.
+
+ """
+ params = {}
+ if platform_application_arn is not None:
+ params['PlatformApplicationArn'] = platform_application_arn
+ if token is not None:
+ params['Token'] = token
+ if custom_user_data is not None:
+ params['CustomUserData'] = custom_user_data
+ if attributes is not None:
+ self._build_dict_as_list_params(params, attributes, 'Attributes')
+ return self._make_request(action='CreatePlatformEndpoint',
+ params=params)
+
+ def delete_endpoint(self, endpoint_arn=None):
+ """
+ The `DeleteEndpoint` action, which is idempotent, deletes the
+ endpoint from SNS. For more information, see `Using Amazon SNS
+ Mobile Push Notifications`_.
+
+ :type endpoint_arn: string
+ :param endpoint_arn: EndpointArn of endpoint to delete.
+
+ """
+ params = {}
+ if endpoint_arn is not None:
+ params['EndpointArn'] = endpoint_arn
+ return self._make_request(action='DeleteEndpoint', params=params)
+
+ def set_endpoint_attributes(self, endpoint_arn=None, attributes=None):
+ """
+ The `SetEndpointAttributes` action sets the attributes for an
+ endpoint for a device on one of the supported push
+ notification services, such as GCM and APNS. For more
+ information, see `Using Amazon SNS Mobile Push
+ Notifications`_.
+
+ :type endpoint_arn: string
+ :param endpoint_arn: EndpointArn used for SetEndpointAttributes action.
+
+ :type attributes: map
+ :param attributes:
+ A map of the endpoint attributes. Attributes in this map include the
+ following:
+
+
+ + `CustomUserData` -- arbitrary user data to associate with the
+ endpoint. SNS does not use this data. The data must be in UTF-8
+ format and less than 2KB.
+ + `Enabled` -- flag that enables/disables delivery to the endpoint.
+ Message Processor will set this to false when a notification
+ service indicates to SNS that the endpoint is invalid. Users can
+ set it back to true, typically after updating Token.
+ + `Token` -- device token, also referred to as a registration id, for
+ an app and mobile device. This is returned from the notification
+ service when an app and mobile device are registered with the
+ notification service.
+
+ """
+ params = {}
+ if endpoint_arn is not None:
+ params['EndpointArn'] = endpoint_arn
+ if attributes is not None:
+ self._build_dict_as_list_params(params, attributes, 'Attributes')
+ return self._make_request(action='SetEndpointAttributes',
+ params=params)
+
+ def get_endpoint_attributes(self, endpoint_arn=None):
+ """
+ The `GetEndpointAttributes` retrieves the endpoint attributes
+ for a device on one of the supported push notification
+ services, such as GCM and APNS. For more information, see
+ `Using Amazon SNS Mobile Push Notifications`_.
+
+ :type endpoint_arn: string
+ :param endpoint_arn: EndpointArn for GetEndpointAttributes input.
+
+ """
+ params = {}
+ if endpoint_arn is not None:
+ params['EndpointArn'] = endpoint_arn
+ return self._make_request(action='GetEndpointAttributes',
+ params=params)
+
+ def _make_request(self, action, params, path='/', verb='GET'):
+ params['ContentType'] = 'JSON'
+ response = self.make_request(action=action, verb=verb,
+ path=path, params=params)
body = response.read()
+ boto.log.debug(body)
if response.status == 200:
return json.loads(body)
else:
diff --git a/boto/sqs/__init__.py b/boto/sqs/__init__.py
index b59a4572..973b8ba5 100644
--- a/boto/sqs/__init__.py
+++ b/boto/sqs/__init__.py
@@ -32,6 +32,8 @@ def regions():
"""
return [SQSRegionInfo(name='us-east-1',
endpoint='queue.amazonaws.com'),
+ SQSRegionInfo(name='us-gov-west-1',
+ endpoint='sqs.us-gov-west-1.amazonaws.com'),
SQSRegionInfo(name='eu-west-1',
endpoint='eu-west-1.queue.amazonaws.com'),
SQSRegionInfo(name='us-west-1',
diff --git a/boto/sqs/message.py b/boto/sqs/message.py
index 43efee38..f0666e56 100644
--- a/boto/sqs/message.py
+++ b/boto/sqs/message.py
@@ -199,6 +199,9 @@ class MHMessage(Message):
s = s + '%s: %s\n' % (item[0], item[1])
return s
+ def __contains__(self, key):
+ return key in self._body
+
def __getitem__(self, key):
if key in self._body:
return self._body[key]
diff --git a/boto/sts/__init__.py b/boto/sts/__init__.py
index 05fd74e5..0b7a8de2 100644
--- a/boto/sts/__init__.py
+++ b/boto/sts/__init__.py
@@ -33,7 +33,11 @@ def regions():
"""
return [RegionInfo(name='us-east-1',
endpoint='sts.amazonaws.com',
+ connection_cls=STSConnection),
+ RegionInfo(name='us-gov-west-1',
+ endpoint='sts.us-gov-west-1.amazonaws.com',
connection_cls=STSConnection)
+
]
diff --git a/boto/sts/connection.py b/boto/sts/connection.py
index bdf21859..5f488e26 100644
--- a/boto/sts/connection.py
+++ b/boto/sts/connection.py
@@ -69,12 +69,13 @@ class STSConnection(AWSQueryConnection):
is_secure=True, port=None, proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None, debug=0,
https_connection_factory=None, region=None, path='/',
- converter=None, validate_certs=True):
+ converter=None, validate_certs=True, anon=False):
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint,
connection_cls=STSConnection)
self.region = region
+ self.anon = anon
self._mutex = threading.Semaphore()
AWSQueryConnection.__init__(self, aws_access_key_id,
aws_secret_access_key,
@@ -85,7 +86,10 @@ class STSConnection(AWSQueryConnection):
validate_certs=validate_certs)
def _required_auth_capability(self):
- return ['sign-v2']
+ if self.anon:
+ return ['pure-query']
+ else:
+ return ['sign-v2']
def _check_token_cache(self, token_key, duration=None, window_seconds=60):
token = _session_token_cache.get(token_key, None)
diff --git a/boto/sts/credentials.py b/boto/sts/credentials.py
index a28d1067..21828db7 100644
--- a/boto/sts/credentials.py
+++ b/boto/sts/credentials.py
@@ -42,6 +42,7 @@ class Credentials(object):
self.secret_key = None
self.session_token = None
self.expiration = None
+ self.request_id = None
@classmethod
def from_json(cls, json_doc):
@@ -138,6 +139,7 @@ class Credentials(object):
delta = ts - now
return delta.total_seconds() <= 0
+
class FederationToken(object):
"""
:ivar credentials: A Credentials object containing the credentials.
@@ -153,6 +155,7 @@ class FederationToken(object):
self.federated_user_arn = None
self.federated_user_id = None
self.packed_policy_size = None
+ self.request_id = None
def startElement(self, name, attrs, connection):
if name == 'Credentials':
diff --git a/boto/swf/__init__.py b/boto/swf/__init__.py
index 5eab6bc0..3594444d 100644
--- a/boto/swf/__init__.py
+++ b/boto/swf/__init__.py
@@ -27,6 +27,7 @@ import boto.swf.layer1
REGION_ENDPOINTS = {
'us-east-1': 'swf.us-east-1.amazonaws.com',
+ 'us-gov-west-1': 'swf.us-gov-west-1.amazonaws.com',
'us-west-1': 'swf.us-west-1.amazonaws.com',
'us-west-2': 'swf.us-west-2.amazonaws.com',
'sa-east-1': 'swf.sa-east-1.amazonaws.com',
diff --git a/boto/swf/layer1.py b/boto/swf/layer1.py
index 8e1af903..264016bd 100644
--- a/boto/swf/layer1.py
+++ b/boto/swf/layer1.py
@@ -85,7 +85,7 @@ class Layer1(AWSAuthConnection):
debug, session_token)
def _required_auth_capability(self):
- return ['hmac-v3-http']
+ return ['hmac-v4']
@classmethod
def _normalize_request_dict(cls, data):
@@ -112,7 +112,7 @@ class Layer1(AWSAuthConnection):
:type data: dict
:param data: Specifies request parameters associated with the action.
- """
+ """
self._normalize_request_dict(data)
json_input = json.dumps(data)
return self.make_request(action, json_input, object_hook)
@@ -175,7 +175,7 @@ class Layer1(AWSAuthConnection):
:raises: UnknownResourceFault, SWFOperationNotPermittedError
"""
return self.json_request('PollForActivityTask', {
- 'domain': domain,
+ 'domain': domain,
'taskList': {'name': task_list},
'identity': identity,
})
@@ -243,7 +243,7 @@ class Layer1(AWSAuthConnection):
'taskToken': task_token,
'details': details,
})
-
+
def record_activity_task_heartbeat(self, task_token, details=None):
"""
Used by activity workers to report to the service that the
@@ -317,7 +317,7 @@ class Layer1(AWSAuthConnection):
:raises: UnknownResourceFault, SWFOperationNotPermittedError
"""
return self.json_request('PollForDecisionTask', {
- 'domain': domain,
+ 'domain': domain,
'taskList': {'name': task_list},
'identity': identity,
'maximumPageSize': maximum_page_size,
@@ -351,7 +351,7 @@ class Layer1(AWSAuthConnection):
return self.json_request('RespondDecisionTaskCompleted', {
'taskToken': task_token,
'decisions': decisions,
- 'executionContext': execution_context,
+ 'executionContext': execution_context,
})
def request_cancel_workflow_execution(self, domain, workflow_id,
@@ -378,7 +378,7 @@ class Layer1(AWSAuthConnection):
:raises: UnknownResourceFault, SWFOperationNotPermittedError
"""
return self.json_request('RequestCancelWorkflowExecution', {
- 'domain': domain,
+ 'domain': domain,
'workflowId': workflow_id,
'runId': run_id,
})
@@ -465,7 +465,7 @@ class Layer1(AWSAuthConnection):
SWFOperationNotPermittedError, DefaultUndefinedFault
"""
return self.json_request('StartWorkflowExecution', {
- 'domain': domain,
+ 'domain': domain,
'workflowId': workflow_id,
'workflowType': {'name': workflow_name,
'version': workflow_version},
@@ -509,7 +509,7 @@ class Layer1(AWSAuthConnection):
:raises: UnknownResourceFault, SWFOperationNotPermittedError
"""
return self.json_request('SignalWorkflowExecution', {
- 'domain': domain,
+ 'domain': domain,
'signalName': signal_name,
'workflowId': workflow_id,
'input': input,
@@ -567,7 +567,7 @@ class Layer1(AWSAuthConnection):
:raises: UnknownResourceFault, SWFOperationNotPermittedError
"""
return self.json_request('TerminateWorkflowExecution', {
- 'domain': domain,
+ 'domain': domain,
'workflowId': workflow_id,
'childPolicy': child_policy,
'details': details,
@@ -682,7 +682,7 @@ class Layer1(AWSAuthConnection):
'activityType': {'name': activity_name,
'version': activity_version}
})
-
+
## Workflow Management
def register_workflow_type(self, domain, name, version,
@@ -756,8 +756,8 @@ class Layer1(AWSAuthConnection):
UnknownResourceFault, SWFOperationNotPermittedError
"""
return self.json_request('RegisterWorkflowType', {
- 'domain': domain,
- 'name': name,
+ 'domain': domain,
+ 'name': name,
'version': version,
'defaultTaskList': {'name': task_list},
'defaultChildPolicy': default_child_policy,
@@ -765,7 +765,7 @@ class Layer1(AWSAuthConnection):
'defaultTaskStartToCloseTimeout': default_task_start_to_close_timeout,
'description': description,
})
-
+
def deprecate_workflow_type(self, domain, workflow_name, workflow_version):
"""
Deprecates the specified workflow type. After a workflow type
@@ -905,7 +905,7 @@ class Layer1(AWSAuthConnection):
'nextPageToken': next_page_token,
'reverseOrder': reverse_order,
})
-
+
def describe_activity_type(self, domain, activity_name, activity_version):
"""
Returns information about the specified activity type. This
@@ -975,7 +975,7 @@ class Layer1(AWSAuthConnection):
:raises: SWFOperationNotPermittedError, UnknownResourceFault
"""
return self.json_request('ListWorkflowTypes', {
- 'domain': domain,
+ 'domain': domain,
'name': name,
'registrationStatus': registration_status,
'maximumPageSize': maximum_page_size,
@@ -1031,7 +1031,7 @@ class Layer1(AWSAuthConnection):
"""
return self.json_request('DescribeWorkflowExecution', {
'domain': domain,
- 'execution': {'runId': run_id,
+ 'execution': {'runId': run_id,
'workflowId': workflow_id},
})
@@ -1080,13 +1080,13 @@ class Layer1(AWSAuthConnection):
"""
return self.json_request('GetWorkflowExecutionHistory', {
'domain': domain,
- 'execution': {'runId': run_id,
+ 'execution': {'runId': run_id,
'workflowId': workflow_id},
'maximumPageSize': maximum_page_size,
'nextPageToken': next_page_token,
'reverseOrder': reverse_order,
})
-
+
def count_open_workflow_executions(self, domain, latest_date, oldest_date,
tag=None,
workflow_id=None,
@@ -1454,7 +1454,7 @@ class Layer1(AWSAuthConnection):
'nextPageToken': next_page_token,
'reverseOrder': reverse_order,
})
-
+
def describe_domain(self, name):
"""
Returns information about the specified domain including
@@ -1486,7 +1486,7 @@ class Layer1(AWSAuthConnection):
:raises: UnknownResourceFault, SWFOperationNotPermittedError
"""
return self.json_request('CountPendingDecisionTasks', {
- 'domain': domain,
+ 'domain': domain,
'taskList': {'name': task_list}
})
@@ -1507,6 +1507,6 @@ class Layer1(AWSAuthConnection):
:raises: UnknownResourceFault, SWFOperationNotPermittedError
"""
return self.json_request('CountPendingActivityTasks', {
- 'domain': domain,
+ 'domain': domain,
'taskList': {'name': task_list}
})
diff --git a/boto/swf/layer2.py b/boto/swf/layer2.py
index cb3298e1..fbb9f33d 100644
--- a/boto/swf/layer2.py
+++ b/boto/swf/layer2.py
@@ -19,15 +19,12 @@ def set_default_credentials(aws_access_key_id, aws_secret_access_key):
class SWFBase(object):
- """SWFBase."""
-
name = None
domain = None
aws_access_key_id = None
aws_secret_access_key = None
def __init__(self, **kwargs):
- """Construct an SWF object."""
# Set default credentials.
for credkey in ('aws_access_key_id', 'aws_secret_access_key'):
if DEFAULT_CREDENTIALS.get(credkey):
@@ -40,7 +37,6 @@ class SWFBase(object):
self.aws_secret_access_key)
def __repr__(self):
- """Generate string representation."""
rep_str = str(self.name)
if hasattr(self, 'version'):
rep_str += '-' + str(getattr(self, 'version'))
@@ -106,9 +102,9 @@ class Domain(SWFBase):
def executions(self, closed=False, **kwargs):
"""List list open/closed executions.
- For more info, try:
- >>> help(boto.swf.layer1.Layer1.list_closed_workflow_executions)
- >>> help(boto.swf.layer1.Layer1.list_open_workflow_executions)
+ For a full list of available parameters refer to
+ :py:func:`boto.swf.layer1.Layer1.list_closed_workflow_executions` and
+ :py:func:`boto.swf.layer1.Layer1.list_open_workflow_executions`
"""
if closed:
executions = self._swf.list_closed_workflow_executions(self.name,
@@ -148,8 +144,6 @@ class Domain(SWFBase):
class Actor(SWFBase):
- """Simple Workflow Actor interface."""
-
task_list = None
last_tasktoken = None
domain = None
@@ -160,7 +154,7 @@ class Actor(SWFBase):
class ActivityWorker(Actor):
- """ActivityWorker."""
+ """Base class for SimpleWorkflow activity workers."""
@wraps(Layer1.respond_activity_task_canceled)
def cancel(self, task_token=None, details=None):
@@ -194,14 +188,18 @@ class ActivityWorker(Actor):
@wraps(Layer1.poll_for_activity_task)
def poll(self, **kwargs):
"""PollForActivityTask."""
- task = self._swf.poll_for_activity_task(self.domain, self.task_list,
+ task_list = self.task_list
+ if 'task_list' in kwargs:
+ task_list = kwargs.get('task_list')
+ del kwargs['task_list']
+ task = self._swf.poll_for_activity_task(self.domain, task_list,
**kwargs)
self.last_tasktoken = task.get('taskToken')
return task
class Decider(Actor):
- """Simple Workflow Decider."""
+ """Base class for SimpleWorkflow deciders."""
@wraps(Layer1.respond_decision_task_completed)
def complete(self, task_token=None, decisions=None, **kwargs):
@@ -217,16 +215,18 @@ class Decider(Actor):
@wraps(Layer1.poll_for_decision_task)
def poll(self, **kwargs):
"""PollForDecisionTask."""
- result = self._swf.poll_for_decision_task(self.domain, self.task_list,
+ task_list = self.task_list
+ if 'task_list' in kwargs:
+ task_list = kwargs.get('task_list')
+ del kwargs['task_list']
+ decision_task = self._swf.poll_for_decision_task(self.domain, task_list,
**kwargs)
- # Record task token.
- self.last_tasktoken = result.get('taskToken')
- # Record the last event.
- return result
+ self.last_tasktoken = decision_task.get('taskToken')
+ return decision_task
class WorkflowType(SWFBase):
- """WorkflowType."""
+ """A versioned workflow type."""
version = None
task_list = None
@@ -274,7 +274,7 @@ class WorkflowType(SWFBase):
class WorkflowExecution(SWFBase):
- """WorkflowExecution."""
+ """An instance of a workflow."""
workflowId = None
runId = None
@@ -312,7 +312,7 @@ class WorkflowExecution(SWFBase):
class ActivityType(SWFBase):
- """ActivityType."""
+ """A versioned activity type."""
version = None
diff --git a/boto/vpc/__init__.py b/boto/vpc/__init__.py
index e529b6f3..1e76cc9d 100644
--- a/boto/vpc/__init__.py
+++ b/boto/vpc/__init__.py
@@ -52,6 +52,10 @@ def regions(**kw_params):
endpoint=RegionData[region_name],
connection_cls=VPCConnection)
regions.append(region)
+ regions.append(RegionInfo(name='us-gov-west-1',
+ endpoint=RegionData[region_name],
+ connection_cls=VPCConnection)
+ )
return regions
@@ -79,7 +83,7 @@ class VPCConnection(EC2Connection):
# VPC methods
- def get_all_vpcs(self, vpc_ids=None, filters=None):
+ def get_all_vpcs(self, vpc_ids=None, filters=None, dry_run=False):
"""
Retrieve information about your VPCs. You can filter results to
return information only about those VPCs that match your search
@@ -98,6 +102,9 @@ class VPCConnection(EC2Connection):
* *cidrBlock* - a list CIDR blocks of the VPC
* *dhcpOptionsId* - a list of IDs of a set of DHCP options
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.vpc.vpc.VPC`
"""
@@ -106,37 +113,49 @@ class VPCConnection(EC2Connection):
self.build_list_params(params, vpc_ids, 'VpcId')
if filters:
self.build_filter_params(params, dict(filters))
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeVpcs', params, [('item', VPC)])
- def create_vpc(self, cidr_block):
+ def create_vpc(self, cidr_block, dry_run=False):
"""
Create a new Virtual Private Cloud.
:type cidr_block: str
:param cidr_block: A valid CIDR block
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: The newly created VPC
:return: A :class:`boto.vpc.vpc.VPC` object
"""
params = {'CidrBlock' : cidr_block}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('CreateVpc', params, VPC)
- def delete_vpc(self, vpc_id):
+ def delete_vpc(self, vpc_id, dry_run=False):
"""
Delete a Virtual Private Cloud.
:type vpc_id: str
:param vpc_id: The ID of the vpc to be deleted.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
params = {'VpcId': vpc_id}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeleteVpc', params)
def modify_vpc_attribute(self, vpc_id,
enable_dns_support=None,
- enable_dns_hostnames=None):
+ enable_dns_hostnames=None, dry_run=False):
"""
Modifies the specified attribute of the specified VPC.
You can only modify one attribute at a time.
@@ -153,6 +172,10 @@ class VPCConnection(EC2Connection):
provided for the instances launched in this VPC. You can only
set this attribute to ``true`` if EnableDnsSupport
is also ``true``.
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
params = {'VpcId': vpc_id}
if enable_dns_support is not None:
@@ -165,11 +188,14 @@ class VPCConnection(EC2Connection):
params['EnableDnsHostnames.Value'] = 'true'
else:
params['EnableDnsHostnames.Value'] = 'false'
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('ModifyVpcAttribute', params)
# Route Tables
- def get_all_route_tables(self, route_table_ids=None, filters=None):
+ def get_all_route_tables(self, route_table_ids=None, filters=None,
+ dry_run=False):
"""
Retrieve information about your routing tables. You can filter results
to return information only about those route tables that match your
@@ -184,6 +210,9 @@ class VPCConnection(EC2Connection):
:param filters: A list of tuples containing filters. Each tuple
consists of a filter key and a filter value.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.vpc.routetable.RouteTable`
"""
@@ -192,10 +221,12 @@ class VPCConnection(EC2Connection):
self.build_list_params(params, route_table_ids, "RouteTableId")
if filters:
self.build_filter_params(params, dict(filters))
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeRouteTables', params,
[('item', RouteTable)])
- def associate_route_table(self, route_table_id, subnet_id):
+ def associate_route_table(self, route_table_id, subnet_id, dry_run=False):
"""
Associates a route table with a specific subnet.
@@ -205,6 +236,9 @@ class VPCConnection(EC2Connection):
:type subnet_id: str
:param subnet_id: The ID of the subnet to associate with.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: str
:return: The ID of the association created
"""
@@ -212,11 +246,12 @@ class VPCConnection(EC2Connection):
'RouteTableId': route_table_id,
'SubnetId': subnet_id
}
-
+ if dry_run:
+ params['DryRun'] = 'true'
result = self.get_object('AssociateRouteTable', params, ResultSet)
return result.associationId
- def disassociate_route_table(self, association_id):
+ def disassociate_route_table(self, association_id, dry_run=False):
"""
Removes an association from a route table. This will cause all subnets
that would've used this association to now use the main routing
@@ -225,40 +260,85 @@ class VPCConnection(EC2Connection):
:type association_id: str
:param association_id: The ID of the association to disassociate.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
params = { 'AssociationId': association_id }
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DisassociateRouteTable', params)
- def create_route_table(self, vpc_id):
+ def create_route_table(self, vpc_id, dry_run=False):
"""
Creates a new route table.
:type vpc_id: str
:param vpc_id: The VPC ID to associate this route table with.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: The newly created route table
:return: A :class:`boto.vpc.routetable.RouteTable` object
"""
params = { 'VpcId': vpc_id }
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('CreateRouteTable', params, RouteTable)
- def delete_route_table(self, route_table_id):
+ def delete_route_table(self, route_table_id, dry_run=False):
"""
Delete a route table.
:type route_table_id: str
:param route_table_id: The ID of the route table to delete.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
params = { 'RouteTableId': route_table_id }
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeleteRouteTable', params)
+ def replace_route_table_assocation(self, association_id,
+ route_table_id, dry_run=False):
+ """
+ Replaces a route association with a new route table. This can be
+ used to replace the 'main' route table by using the main route
+ table assocation instead of the more common subnet type
+ association.
+
+ :type association_id: str
+ :param association_id: The ID of the existing assocation to replace.
+
+ :type route_table_id: str
+ :param route_table_id: The route table to ID to be used in the
+ association.
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
+ :rtype: bool
+ :return: True if successful
+ """
+
+ params = {
+ 'AssociationId': association_id,
+ 'RouteTableId': route_table_id
+ }
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_status('ReplaceRouteTableAssociation', params)
+
def create_route(self, route_table_id, destination_cidr_block,
- gateway_id=None, instance_id=None):
+ gateway_id=None, instance_id=None, dry_run=False):
"""
Creates a new route in the route table within a VPC. The route's target
can be either a gateway attached to the VPC or a NAT instance in the
@@ -277,6 +357,9 @@ class VPCConnection(EC2Connection):
:type instance_id: str
:param instance_id: The ID of a NAT instance in your VPC.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
@@ -289,11 +372,14 @@ class VPCConnection(EC2Connection):
params['GatewayId'] = gateway_id
elif instance_id is not None:
params['InstanceId'] = instance_id
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('CreateRoute', params)
def replace_route(self, route_table_id, destination_cidr_block,
- gateway_id=None, instance_id=None, interface_id=None):
+ gateway_id=None, instance_id=None, interface_id=None,
+ dry_run=False):
"""
Replaces an existing route within a route table in a VPC.
@@ -313,6 +399,9 @@ class VPCConnection(EC2Connection):
:type interface_id: str
:param interface_id: Allows routing to network interface attachments.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
@@ -327,10 +416,13 @@ class VPCConnection(EC2Connection):
params['InstanceId'] = instance_id
elif interface_id is not None:
params['NetworkInterfaceId'] = interface_id
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('ReplaceRoute', params)
- def delete_route(self, route_table_id, destination_cidr_block):
+ def delete_route(self, route_table_id, destination_cidr_block,
+ dry_run=False):
"""
Deletes a route from a route table within a VPC.
@@ -341,6 +433,9 @@ class VPCConnection(EC2Connection):
:param destination_cidr_block: The CIDR address block used for
destination match.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
@@ -348,13 +443,14 @@ class VPCConnection(EC2Connection):
'RouteTableId': route_table_id,
'DestinationCidrBlock': destination_cidr_block
}
-
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeleteRoute', params)
# Internet Gateways
def get_all_internet_gateways(self, internet_gateway_ids=None,
- filters=None):
+ filters=None, dry_run=False):
"""
Get a list of internet gateways. You can filter results to return information
about only those gateways that you're interested in.
@@ -365,6 +461,10 @@ class VPCConnection(EC2Connection):
:type filters: list of tuples
:param filters: A list of tuples containing filters. Each tuple
consists of a filter key and a filter value.
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
"""
params = {}
@@ -373,32 +473,46 @@ class VPCConnection(EC2Connection):
'InternetGatewayId')
if filters:
self.build_filter_params(params, dict(filters))
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeInternetGateways', params,
[('item', InternetGateway)])
- def create_internet_gateway(self):
+ def create_internet_gateway(self, dry_run=False):
"""
Creates an internet gateway for VPC.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: Newly created internet gateway.
:return: `boto.vpc.internetgateway.InternetGateway`
"""
- return self.get_object('CreateInternetGateway', {}, InternetGateway)
+ params = {}
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_object('CreateInternetGateway', params, InternetGateway)
- def delete_internet_gateway(self, internet_gateway_id):
+ def delete_internet_gateway(self, internet_gateway_id, dry_run=False):
"""
Deletes an internet gateway from the VPC.
:type internet_gateway_id: str
:param internet_gateway_id: The ID of the internet gateway to delete.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: Bool
:return: True if successful
"""
params = { 'InternetGatewayId': internet_gateway_id }
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeleteInternetGateway', params)
- def attach_internet_gateway(self, internet_gateway_id, vpc_id):
+ def attach_internet_gateway(self, internet_gateway_id, vpc_id,
+ dry_run=False):
"""
Attach an internet gateway to a specific VPC.
@@ -408,6 +522,9 @@ class VPCConnection(EC2Connection):
:type vpc_id: str
:param vpc_id: The ID of the VPC to attach to.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: Bool
:return: True if successful
"""
@@ -415,10 +532,12 @@ class VPCConnection(EC2Connection):
'InternetGatewayId': internet_gateway_id,
'VpcId': vpc_id
}
-
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('AttachInternetGateway', params)
- def detach_internet_gateway(self, internet_gateway_id, vpc_id):
+ def detach_internet_gateway(self, internet_gateway_id, vpc_id,
+ dry_run=False):
"""
Detach an internet gateway from a specific VPC.
@@ -428,6 +547,9 @@ class VPCConnection(EC2Connection):
:type vpc_id: str
:param vpc_id: The ID of the VPC to attach to.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: Bool
:return: True if successful
"""
@@ -435,13 +557,14 @@ class VPCConnection(EC2Connection):
'InternetGatewayId': internet_gateway_id,
'VpcId': vpc_id
}
-
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DetachInternetGateway', params)
# Customer Gateways
def get_all_customer_gateways(self, customer_gateway_ids=None,
- filters=None):
+ filters=None, dry_run=False):
"""
Retrieve information about your CustomerGateways. You can filter
results to return information only about those CustomerGateways that
@@ -463,6 +586,9 @@ class VPCConnection(EC2Connection):
- *ipAddress* the IP address of customer gateway's
internet-routable external inteface
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.vpc.customergateway.CustomerGateway`
"""
@@ -473,10 +599,13 @@ class VPCConnection(EC2Connection):
if filters:
self.build_filter_params(params, dict(filters))
+ if dry_run:
+ params['DryRun'] = 'true'
+
return self.get_list('DescribeCustomerGateways', params,
[('item', CustomerGateway)])
- def create_customer_gateway(self, type, ip_address, bgp_asn):
+ def create_customer_gateway(self, type, ip_address, bgp_asn, dry_run=False):
"""
Create a new Customer Gateway
@@ -491,30 +620,41 @@ class VPCConnection(EC2Connection):
:param bgp_asn: Customer gateway's Border Gateway Protocol (BGP)
Autonomous System Number (ASN)
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: The newly created CustomerGateway
:return: A :class:`boto.vpc.customergateway.CustomerGateway` object
"""
params = {'Type' : type,
'IpAddress' : ip_address,
'BgpAsn' : bgp_asn}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('CreateCustomerGateway', params, CustomerGateway)
- def delete_customer_gateway(self, customer_gateway_id):
+ def delete_customer_gateway(self, customer_gateway_id, dry_run=False):
"""
Delete a Customer Gateway.
:type customer_gateway_id: str
:param customer_gateway_id: The ID of the customer_gateway to be deleted.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
params = {'CustomerGatewayId': customer_gateway_id}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeleteCustomerGateway', params)
# VPN Gateways
- def get_all_vpn_gateways(self, vpn_gateway_ids=None, filters=None):
+ def get_all_vpn_gateways(self, vpn_gateway_ids=None, filters=None,
+ dry_run=False):
"""
Retrieve information about your VpnGateways. You can filter results to
return information only about those VpnGateways that match your search
@@ -535,6 +675,9 @@ class VPCConnection(EC2Connection):
- *availabilityZone*, a list of Availability zones the
VPN gateway is in.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.vpc.customergateway.VpnGateway`
"""
@@ -543,10 +686,12 @@ class VPCConnection(EC2Connection):
self.build_list_params(params, vpn_gateway_ids, 'VpnGatewayId')
if filters:
self.build_filter_params(params, dict(filters))
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeVpnGateways', params,
[('item', VpnGateway)])
- def create_vpn_gateway(self, type, availability_zone=None):
+ def create_vpn_gateway(self, type, availability_zone=None, dry_run=False):
"""
Create a new Vpn Gateway
@@ -556,28 +701,38 @@ class VPCConnection(EC2Connection):
:type availability_zone: str
:param availability_zone: The Availability Zone where you want the VPN gateway.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: The newly created VpnGateway
:return: A :class:`boto.vpc.vpngateway.VpnGateway` object
"""
params = {'Type' : type}
if availability_zone:
params['AvailabilityZone'] = availability_zone
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('CreateVpnGateway', params, VpnGateway)
- def delete_vpn_gateway(self, vpn_gateway_id):
+ def delete_vpn_gateway(self, vpn_gateway_id, dry_run=False):
"""
Delete a Vpn Gateway.
:type vpn_gateway_id: str
:param vpn_gateway_id: The ID of the vpn_gateway to be deleted.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
params = {'VpnGatewayId': vpn_gateway_id}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeleteVpnGateway', params)
- def attach_vpn_gateway(self, vpn_gateway_id, vpc_id):
+ def attach_vpn_gateway(self, vpn_gateway_id, vpc_id, dry_run=False):
"""
Attaches a VPN gateway to a VPC.
@@ -587,16 +742,21 @@ class VPCConnection(EC2Connection):
:type vpc_id: str
:param vpc_id: The ID of the VPC you want to attach the gateway to.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: An attachment
:return: a :class:`boto.vpc.vpngateway.Attachment`
"""
params = {'VpnGatewayId': vpn_gateway_id,
'VpcId' : vpc_id}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('AttachVpnGateway', params, Attachment)
# Subnets
- def get_all_subnets(self, subnet_ids=None, filters=None):
+ def get_all_subnets(self, subnet_ids=None, filters=None, dry_run=False):
"""
Retrieve information about your Subnets. You can filter results to
return information only about those Subnets that match your search
@@ -619,6 +779,9 @@ class VPCConnection(EC2Connection):
the subnet is in.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.vpc.subnet.Subnet`
"""
@@ -627,9 +790,12 @@ class VPCConnection(EC2Connection):
self.build_list_params(params, subnet_ids, 'SubnetId')
if filters:
self.build_filter_params(params, dict(filters))
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeSubnets', params, [('item', Subnet)])
- def create_subnet(self, vpc_id, cidr_block, availability_zone=None):
+ def create_subnet(self, vpc_id, cidr_block, availability_zone=None,
+ dry_run=False):
"""
Create a new Subnet
@@ -642,6 +808,9 @@ class VPCConnection(EC2Connection):
:type availability_zone: str
:param availability_zone: The AZ you want the subnet in
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: The newly created Subnet
:return: A :class:`boto.vpc.customergateway.Subnet` object
"""
@@ -649,43 +818,55 @@ class VPCConnection(EC2Connection):
'CidrBlock' : cidr_block}
if availability_zone:
params['AvailabilityZone'] = availability_zone
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('CreateSubnet', params, Subnet)
- def delete_subnet(self, subnet_id):
+ def delete_subnet(self, subnet_id, dry_run=False):
"""
Delete a subnet.
:type subnet_id: str
:param subnet_id: The ID of the subnet to be deleted.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
params = {'SubnetId': subnet_id}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeleteSubnet', params)
# DHCP Options
- def get_all_dhcp_options(self, dhcp_options_ids=None):
+ def get_all_dhcp_options(self, dhcp_options_ids=None, dry_run=False):
"""
Retrieve information about your DhcpOptions.
:type dhcp_options_ids: list
:param dhcp_options_ids: A list of strings with the desired DhcpOption ID's
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.vpc.dhcpoptions.DhcpOptions`
"""
params = {}
if dhcp_options_ids:
self.build_list_params(params, dhcp_options_ids, 'DhcpOptionsId')
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeDhcpOptions', params,
[('item', DhcpOptions)])
def create_dhcp_options(self, domain_name=None, domain_name_servers=None,
ntp_servers=None, netbios_name_servers=None,
- netbios_node_type=None):
+ netbios_node_type=None, dry_run=False):
"""
Create a new DhcpOption
@@ -714,6 +895,9 @@ class VPCConnection(EC2Connection):
only use 2 at this time (broadcast and multicast are currently not
supported).
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: The newly created DhcpOption
:return: A :class:`boto.vpc.customergateway.DhcpOption` object
"""
@@ -749,23 +933,30 @@ class VPCConnection(EC2Connection):
if netbios_node_type:
key_counter = insert_option(params,
'netbios-node-type', netbios_node_type)
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('CreateDhcpOptions', params, DhcpOptions)
- def delete_dhcp_options(self, dhcp_options_id):
+ def delete_dhcp_options(self, dhcp_options_id, dry_run=False):
"""
Delete a DHCP Options
:type dhcp_options_id: str
:param dhcp_options_id: The ID of the DHCP Options to be deleted.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
params = {'DhcpOptionsId': dhcp_options_id}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeleteDhcpOptions', params)
- def associate_dhcp_options(self, dhcp_options_id, vpc_id):
+ def associate_dhcp_options(self, dhcp_options_id, vpc_id, dry_run=False):
"""
Associate a set of Dhcp Options with a VPC.
@@ -775,16 +966,22 @@ class VPCConnection(EC2Connection):
:type vpc_id: str
:param vpc_id: The ID of the VPC.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
params = {'DhcpOptionsId': dhcp_options_id,
'VpcId' : vpc_id}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('AssociateDhcpOptions', params)
# VPN Connection
- def get_all_vpn_connections(self, vpn_connection_ids=None, filters=None):
+ def get_all_vpn_connections(self, vpn_connection_ids=None, filters=None,
+ dry_run=False):
"""
Retrieve information about your VPN_CONNECTIONs. You can filter results to
return information only about those VPN_CONNECTIONs that match your search
@@ -807,6 +1004,9 @@ class VPCConnection(EC2Connection):
- *vpnGatewayId*, a list of IDs of the VPN gateway associated
with the VPN connection
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: list
:return: A list of :class:`boto.vpn_connection.vpnconnection.VpnConnection`
"""
@@ -816,10 +1016,13 @@ class VPCConnection(EC2Connection):
'Vpn_ConnectionId')
if filters:
self.build_filter_params(params, dict(filters))
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_list('DescribeVpnConnections', params,
[('item', VpnConnection)])
- def create_vpn_connection(self, type, customer_gateway_id, vpn_gateway_id):
+ def create_vpn_connection(self, type, customer_gateway_id, vpn_gateway_id,
+ dry_run=False):
"""
Create a new VPN Connection.
@@ -833,28 +1036,39 @@ class VPCConnection(EC2Connection):
:type vpn_gateway_id: str
:param vpn_gateway_id: The ID of the VPN gateway.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: The newly created VpnConnection
:return: A :class:`boto.vpc.vpnconnection.VpnConnection` object
"""
params = {'Type' : type,
'CustomerGatewayId' : customer_gateway_id,
'VpnGatewayId' : vpn_gateway_id}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_object('CreateVpnConnection', params, VpnConnection)
- def delete_vpn_connection(self, vpn_connection_id):
+ def delete_vpn_connection(self, vpn_connection_id, dry_run=False):
"""
Delete a VPN Connection.
:type vpn_connection_id: str
:param vpn_connection_id: The ID of the vpn_connection to be deleted.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
params = {'VpnConnectionId': vpn_connection_id}
+ if dry_run:
+ params['DryRun'] = 'true'
return self.get_status('DeleteVpnConnection', params)
- def disable_vgw_route_propagation(self, route_table_id, gateway_id):
+ def disable_vgw_route_propagation(self, route_table_id, gateway_id,
+ dry_run=False):
"""
Disables a virtual private gateway (VGW) from propagating routes to the
routing tables of an Amazon VPC.
@@ -865,6 +1079,9 @@ class VPCConnection(EC2Connection):
:type gateway_id: str
:param gateway_id: The ID of the virtual private gateway.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
@@ -872,9 +1089,12 @@ class VPCConnection(EC2Connection):
'RouteTableId': route_table_id,
'GatewayId': gateway_id,
}
- self.get_status('DisableVgwRoutePropagation', params)
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_status('DisableVgwRoutePropagation', params)
- def enable_vgw_route_propagation(self, route_table_id, gateway_id):
+ def enable_vgw_route_propagation(self, route_table_id, gateway_id,
+ dry_run=False):
"""
Enables a virtual private gateway (VGW) to propagate routes to the
routing tables of an Amazon VPC.
@@ -885,6 +1105,9 @@ class VPCConnection(EC2Connection):
:type gateway_id: str
:param gateway_id: The ID of the virtual private gateway.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
@@ -892,10 +1115,12 @@ class VPCConnection(EC2Connection):
'RouteTableId': route_table_id,
'GatewayId': gateway_id,
}
- self.get_status('EnableVgwRoutePropagation', params)
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_status('EnableVgwRoutePropagation', params)
def create_vpn_connection_route(self, destination_cidr_block,
- vpn_connection_id):
+ vpn_connection_id, dry_run=False):
"""
Creates a new static route associated with a VPN connection between an
existing virtual private gateway and a VPN customer gateway. The static
@@ -909,6 +1134,9 @@ class VPCConnection(EC2Connection):
:type vpn_connection_id: str
:param vpn_connection_id: The ID of the VPN connection.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
@@ -916,10 +1144,12 @@ class VPCConnection(EC2Connection):
'DestinationCidrBlock': destination_cidr_block,
'VpnConnectionId': vpn_connection_id,
}
- self.get_status('CreateVpnConnectionRoute', params)
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_status('CreateVpnConnectionRoute', params)
def delete_vpn_connection_route(self, destination_cidr_block,
- vpn_connection_id):
+ vpn_connection_id, dry_run=False):
"""
Deletes a static route associated with a VPN connection between an
existing virtual private gateway and a VPN customer gateway. The static
@@ -933,6 +1163,9 @@ class VPCConnection(EC2Connection):
:type vpn_connection_id: str
:param vpn_connection_id: The ID of the VPN connection.
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
:rtype: bool
:return: True if successful
"""
@@ -940,4 +1173,6 @@ class VPCConnection(EC2Connection):
'DestinationCidrBlock': destination_cidr_block,
'VpnConnectionId': vpn_connection_id,
}
- self.get_status('DeleteVpnConnectionRoute', params)
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_status('DeleteVpnConnectionRoute', params)
diff --git a/boto/vpc/vpc.py b/boto/vpc/vpc.py
index 8fdaa62f..2eb480d1 100644
--- a/boto/vpc/vpc.py
+++ b/boto/vpc/vpc.py
@@ -72,8 +72,11 @@ class VPC(TaggedEC2Object):
def _update(self, updated):
self.__dict__.update(updated.__dict__)
- def update(self, validate=False):
- vpc_list = self.connection.get_all_vpcs([self.id])
+ def update(self, validate=False, dry_run=False):
+ vpc_list = self.connection.get_all_vpcs(
+ [self.id],
+ dry_run=dry_run
+ )
if len(vpc_list):
updated_vpc = vpc_list[0]
self._update(updated_vpc)
diff --git a/boto/vpc/vpnconnection.py b/boto/vpc/vpnconnection.py
index aa49c36a..c36492f5 100644
--- a/boto/vpc/vpnconnection.py
+++ b/boto/vpc/vpnconnection.py
@@ -197,5 +197,8 @@ class VpnConnection(TaggedEC2Object):
else:
setattr(self, name, value)
- def delete(self):
- return self.connection.delete_vpn_connection(self.id)
+ def delete(self, dry_run=False):
+ return self.connection.delete_vpn_connection(
+ self.id,
+ dry_run=dry_run
+ )
diff --git a/boto/vpc/vpngateway.py b/boto/vpc/vpngateway.py
index 83b912ef..fe476d93 100644
--- a/boto/vpc/vpngateway.py
+++ b/boto/vpc/vpngateway.py
@@ -14,7 +14,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
@@ -33,7 +33,7 @@ class Attachment(object):
def startElement(self, name, attrs, connection):
pass
-
+
def endElement(self, name, value, connection):
if name == 'vpcId':
self.vpc_id = value
@@ -41,7 +41,7 @@ class Attachment(object):
self.state = value
else:
setattr(self, name, value)
-
+
class VpnGateway(TaggedEC2Object):
def __init__(self, connection=None):
@@ -63,7 +63,7 @@ class VpnGateway(TaggedEC2Object):
att = Attachment()
self.attachments.append(att)
return att
-
+
def endElement(self, name, value, connection):
if name == 'vpnGatewayId':
self.id = value
@@ -78,6 +78,10 @@ class VpnGateway(TaggedEC2Object):
else:
setattr(self, name, value)
- def attach(self, vpc_id):
- return self.connection.attach_vpn_gateway(self.id, vpc_id)
+ def attach(self, vpc_id, dry_run=False):
+ return self.connection.attach_vpn_gateway(
+ self.id,
+ vpc_id,
+ dry_run=dry_run
+ )
diff --git a/docs/Makefile b/docs/Makefile
index 5fd1f920..91d9bf27 100644
--- a/docs/Makefile
+++ b/docs/Makefile
@@ -12,7 +12,7 @@ PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
-.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest
+.PHONY: help clean html dirhtml pickle json epub htmlhelp qthelp latex changes linkcheck doctest
help:
@echo "Please use \`make <target>' where <target> is one of"
@@ -20,6 +20,7 @@ help:
@echo " dirhtml to make HTML files named index.html in directories"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
+ @echo " epub to make ePub files (sphinx >= v1.2b2)"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@@ -50,6 +51,11 @@ json:
@echo
@echo "Build finished; now you can process the JSON files."
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The e-Pub pages are in $(BUILDDIR)/epub."
+
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
diff --git a/docs/source/apps_built_on_boto.rst b/docs/source/apps_built_on_boto.rst
new file mode 100644
index 00000000..0fc40666
--- /dev/null
+++ b/docs/source/apps_built_on_boto.rst
@@ -0,0 +1,44 @@
+.. _apps_built_on_boto:
+
+==========================
+Applications Built On Boto
+==========================
+
+Many people have taken Boto and layered on additional functionality, then shared
+them with the community. This is a (partial) list of applications that use Boto.
+
+If you have an application or utility you've open-sourced that uses Boto &
+you'd like it listed here, please submit a `pull request`_ adding it!
+
+.. _`pull request`: https://github.com/boto/boto/pulls
+
+**botornado**
+ https://pypi.python.org/pypi/botornado
+ An asynchronous AWS client on Tornado. This is a dirty work to move boto
+ onto Tornado ioloop. Currently works with SQS and S3.
+
+**boto_rsync**
+ https://pypi.python.org/pypi/boto_rsync
+ boto-rsync is a rough adaptation of boto's s3put script which has been
+ reengineered to more closely mimic rsync. Its goal is to provide a familiar
+ rsync-like wrapper for boto's S3 and Google Storage interfaces.
+
+**boto_utils**
+ https://pypi.python.org/pypi/boto_utils
+ Command-line tools for interacting with Amazon Web Services, based on Boto.
+ Includes utils for S3, SES & Cloudwatch.
+
+**django-storages**
+ https://pypi.python.org/pypi/django-storages
+ A collection of storage backends for Django. Features the ``S3BotoStorage``
+ backend for storing media on S3.
+
+**mr.awsome**
+ https://pypi.python.org/pypi/mr.awsome
+ mr.awsome is a commandline-tool (aws) to manage and control Amazon
+ Webservice's EC2 instances. Once configured with your AWS key, you can
+ create, delete, monitor and ssh into instances, as well as perform scripted
+ tasks on them (via fabfiles). Examples are adding additional,
+ pre-configured webservers to a cluster (including updating the load
+ balancer), performing automated software deployments and creating backups -
+ each with just one call from the commandline.
diff --git a/docs/source/autoscale_tut.rst b/docs/source/autoscale_tut.rst
index 86fc529f..d1eaf3f9 100644
--- a/docs/source/autoscale_tut.rst
+++ b/docs/source/autoscale_tut.rst
@@ -201,8 +201,7 @@ To retrieve the instances in your autoscale group:
>>> ec2 = boto.ec2.connect_to_region('us-west-2)
>>> conn.get_all_groups(names=['my_group'])[0]
>>> instance_ids = [i.instance_id for i in group.instances]
->>> reservations = ec2.get_all_instances(instance_ids)
->>> instances = [i for r in reservations for i in r.instances]
+>>> instances = ec2.get_only_instances(instance_ids)
To delete your autoscale group, we first need to shutdown all the
instances:
diff --git a/docs/source/commandline.rst b/docs/source/commandline.rst
new file mode 100644
index 00000000..6b604827
--- /dev/null
+++ b/docs/source/commandline.rst
@@ -0,0 +1,85 @@
+.. _ref-boto_commandline:
+
+==================
+Command Line Tools
+==================
+
+Introduction
+============
+
+Boto ships with a number of command line utilities, which are installed
+when the package is installed. This guide outlines which ones are available
+& what they do.
+
+.. note::
+
+ If you're not already depending on these utilities, you may wish to check
+ out the AWS-CLI (http://aws.amazon.com/cli/ - `User Guide`_ &
+ `Reference Guide`_). It provides much wider & complete access to the
+ AWS services.
+
+ .. _`User Guide`: http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-welcome.html
+ .. _`Reference Guide`: http://docs.aws.amazon.com/cli/latest/reference/
+
+The included utilities available are:
+
+``asadmin``
+ Works with Autoscaling
+
+``bundle_image``
+ Creates a bundled AMI in S3 based on a EC2 instance
+
+``cfadmin``
+ Works with CloudFront & invalidations
+
+``cq``
+ Works with SQS queues
+
+``cwutil``
+ Works with CloudWatch
+
+``dynamodb_dump``
+``dynamodb_load``
+ Handle dumping/loading data from DynamoDB tables
+
+``elbadmin``
+ Manages Elastic Load Balancer instances
+
+``fetch_file``
+ Downloads an S3 key to disk
+
+``glacier``
+ Lists vaults, jobs & uploads files to Glacier
+
+``instance_events``
+ Lists all events for EC2 reservations
+
+``kill_instance``
+ Kills a list of EC2 instances
+
+``launch_instance``
+ Launches an EC2 instance
+
+``list_instances``
+ Lists all of your EC2 instances
+
+``lss3``
+ Lists what keys you have within a bucket in S3
+
+``mturk``
+ Provides a number of facilities for interacting with Mechanical Turk
+
+``pyami_sendmail``
+ Sends an email from the Pyami instance
+
+``route53``
+ Interacts with the Route53 service
+
+``s3put``
+ Uploads a directory or a specific file(s) to S3
+
+``sdbadmin``
+ Allows for working with SimpleDB domains
+
+``taskadmin``
+ A tool for working with the tasks in SimpleDB
diff --git a/docs/source/contributing.rst b/docs/source/contributing.rst
index ac9305a3..80821995 100644
--- a/docs/source/contributing.rst
+++ b/docs/source/contributing.rst
@@ -202,3 +202,26 @@ and uses `restructured text`_ for the markup language.
.. _virtualenvwrapper: http://www.doughellmann.com/projects/virtualenvwrapper/
.. _sphinx: http://sphinx.pocoo.org/
.. _restructured text: http://sphinx.pocoo.org/rest.html
+
+
+Merging A Branch (Core Devs)
+============================
+
+* All features/bugfixes should go through a review.
+
+ * This includes new features added by core devs themselves. The usual
+ branch/pull-request/merge flow that happens for community contributions
+ should also apply to core.
+
+* Ensure there is proper test coverage. If there's a change in behavior, there
+ should be a test demonstrating the failure before the change & passing with
+ the change.
+
+ * This helps ensure we don't regress in the future as well.
+
+* Merging of pull requests is typically done with
+ ``git merge --no-ff <remote/branch_name>``.
+
+ * GitHub's big green button is probably OK for very small PRs (like doc
+ fixes), but you can't run tests on GH, so most things should get pulled
+ down locally.
diff --git a/docs/source/dynamodb2_tut.rst b/docs/source/dynamodb2_tut.rst
index b6e98118..3e37675c 100644
--- a/docs/source/dynamodb2_tut.rst
+++ b/docs/source/dynamodb2_tut.rst
@@ -73,8 +73,8 @@ Simple example::
A full example::
+ >>> import boto.dynamodb2
>>> from boto.dynamodb2.fields import HashKey, RangeKey, KeysOnlyIndex, AllIndex
- >>> from boto.dynamodb2.layer1 import DynamoDBConnection
>>> from boto.dynamodb2.table import Table
>>> from boto.dynamodb2.types import NUMBER
@@ -90,11 +90,7 @@ A full example::
... ])
... ],
... # If you need to specify custom parameters like keys or region info...
- ... connection=DynamoDBConnection(
- ... aws_access_key_id='key',
- ... aws_secret_access_key='key',
- ... region='us-west-2'
- ... ))
+ ... connection= boto.dynamodb2.connect_to_region('us-east-1'))
Using an Existing Table
diff --git a/docs/source/ec2_tut.rst b/docs/source/ec2_tut.rst
index d9ffe38c..6e179262 100644
--- a/docs/source/ec2_tut.rst
+++ b/docs/source/ec2_tut.rst
@@ -88,7 +88,7 @@ Checking What Instances Are Running
-----------------------------------
You can also get information on your currently running instances::
- >>> reservations = conn.get_all_instances()
+ >>> reservations = conn.get_all_reservations()
>>> reservations
[Reservation:r-00000000]
diff --git a/docs/source/index.rst b/docs/source/index.rst
index f720cf1b..92ee2bbd 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -24,8 +24,6 @@ Currently Supported Services
* :doc:`Elastic Compute Cloud (EC2) <ec2_tut>` -- (:doc:`API Reference <ref/ec2>`)
* :doc:`Elastic MapReduce (EMR) <emr_tut>` -- (:doc:`API Reference <ref/emr>`)
* :doc:`Auto Scaling <autoscale_tut>` -- (:doc:`API Reference <ref/autoscale>`)
- * Data Pipeline -- (:doc:`API Reference <ref/datapipeline>`)
- * Elastic Transcoder -- (:doc:`API Reference <ref/elastictranscoder>`)
* **Content Delivery**
@@ -44,6 +42,8 @@ Currently Supported Services
* CloudFormation -- (:doc:`API Reference <ref/cloudformation>`)
* Elastic Beanstalk -- (:doc:`API Reference <ref/beanstalk>`)
+ * Data Pipeline -- (:doc:`API Reference <ref/datapipeline>`)
+ * Opsworks -- (:doc:`API Reference <ref/opsworks>`)
* **Identity & Access**
@@ -52,11 +52,12 @@ Currently Supported Services
* **Application Services**
+ * :doc:`Cloudsearch <cloudsearch_tut>` -- (:doc:`API Reference <ref/cloudsearch>`)
+ * Elastic Transcoder -- (:doc:`API Reference <ref/elastictranscoder>`)
* Simple Workflow Service (SWF) -- (:doc:`API Reference <ref/swf>`)
* :doc:`Simple Queue Service (SQS) <sqs_tut>` -- (:doc:`API Reference <ref/sqs>`)
* Simple Notification Service (SNS) -- (:doc:`API Reference <ref/sns>`)
* :doc:`Simple Email Service (SES) <ses_tut>` -- (:doc:`API Reference <ref/ses>`)
- * :doc:`Cloudsearch <cloudsearch_tut>` -- (:doc:`API Reference <ref/cloudsearch>`)
* **Monitoring**
@@ -90,6 +91,8 @@ Currently Supported Services
Additional Resources
--------------------
+* :doc:`Applications Built On Boto <apps_built_on_boto>`
+* :doc:`Command Line Utilities <commandline>`
* :doc:`Boto Config Tutorial <boto_config_tut>`
* :doc:`Contributing to Boto <contributing>`
* `Boto Source Repository`_
@@ -111,6 +114,13 @@ Release Notes
.. toctree::
:titlesonly:
+ releasenotes/v2.13.3
+ releasenotes/v2.13.2
+ releasenotes/v2.13.0
+ releasenotes/v2.12.0
+ releasenotes/v2.11.0
+ releasenotes/v2.10.0
+ releasenotes/v2.9.9
releasenotes/v2.9.8
releasenotes/v2.9.7
releasenotes/v2.9.6
@@ -139,58 +149,34 @@ Release Notes
.. toctree::
:hidden:
+ :glob:
getting_started
ec2_tut
security_groups
- ref/ec2
emr_tut
- ref/emr
autoscale_tut
- ref/autoscale
cloudfront_tut
- ref/cloudfront
simpledb_tut
- ref/sdb
- ref/sdb_db
dynamodb_tut
- ref/dynamodb
rds_tut
- ref/rds
- ref/cloudformation
- ref/iam
- ref/mws
sqs_tut
- ref/sqs
- ref/sns
ses_tut
- ref/ses
cloudsearch_tut
- ref/cloudsearch
cloudwatch_tut
- ref/cloudwatch
- ref/route53
vpc_tut
- ref/vpc
elb_tut
- ref/elb
- ref/fps
s3_tut
- ref/s3
- ref/mturk
boto_config_tut
- ref/index
documentation
contributing
- ref/datapipeline
- ref/elasticache
- ref/elastictranscoder
- ref/redshift
- ref/dynamodb2
+ commandline
support_tut
- ref/support
dynamodb2_tut
migrations/dynamodb_v1_to_v2
+ apps_built_on_boto
+ ref/*
+ releasenotes/*
Indices and tables
diff --git a/docs/source/ref/cloudwatch.rst b/docs/source/ref/cloudwatch.rst
index 96c650ed..ae38d89b 100644
--- a/docs/source/ref/cloudwatch.rst
+++ b/docs/source/ref/cloudwatch.rst
@@ -25,3 +25,10 @@ boto.ec2.cloudwatch.metric
:members:
:undoc-members:
+boto.ec2.cloudwatch.alarm
+--------------------------
+
+.. automodule:: boto.ec2.cloudwatch.alarm
+ :members:
+ :undoc-members:
+
diff --git a/docs/source/ref/contrib.rst b/docs/source/ref/contrib.rst
index 9262a0dc..39ef54f8 100644
--- a/docs/source/ref/contrib.rst
+++ b/docs/source/ref/contrib.rst
@@ -8,25 +8,12 @@ boto.contrib
------------
.. automodule:: boto.contrib
- :members:
- :undoc-members:
-
-boto.contrib.m2helpers
-----------------------
-
-.. note::
-
- This module requires installation of M2Crypto__ in your Python path.
-
- __ http://sandbox.rulemaker.net/ngps/m2/
-
-.. automodule:: boto.contrib.m2helpers
- :members:
+ :members:
:undoc-members:
boto.contrib.ymlmessage
-----------------------
.. automodule:: boto.contrib.ymlmessage
- :members:
+ :members:
:undoc-members: \ No newline at end of file
diff --git a/docs/source/ref/elb.rst b/docs/source/ref/elb.rst
index 74e77f33..aef0c5b4 100644
--- a/docs/source/ref/elb.rst
+++ b/docs/source/ref/elb.rst
@@ -8,40 +8,54 @@ boto.ec2.elb
------------
.. automodule:: boto.ec2.elb
- :members:
+ :members:
:undoc-members:
boto.ec2.elb.healthcheck
------------------------
.. automodule:: boto.ec2.elb.healthcheck
- :members:
+ :members:
:undoc-members:
boto.ec2.elb.instancestate
--------------------------
.. automodule:: boto.ec2.elb.instancestate
- :members:
+ :members:
:undoc-members:
boto.ec2.elb.listelement
------------------------
.. automodule:: boto.ec2.elb.listelement
- :members:
+ :members:
:undoc-members:
boto.ec2.elb.listener
---------------------
.. automodule:: boto.ec2.elb.listener
- :members:
+ :members:
:undoc-members:
boto.ec2.elb.loadbalancer
-------------------------
.. automodule:: boto.ec2.elb.loadbalancer
- :members:
+ :members:
+ :undoc-members:
+
+boto.ec2.elb.policies
+-------------------------
+
+.. automodule:: boto.ec2.elb.policies
+ :members:
+ :undoc-members:
+
+boto.ec2.elb.securitygroup
+-------------------------
+
+.. automodule:: boto.ec2.elb.securitygroup
+ :members:
:undoc-members:
diff --git a/docs/source/ref/opsworks.rst b/docs/source/ref/opsworks.rst
new file mode 100644
index 00000000..7c761e72
--- /dev/null
+++ b/docs/source/ref/opsworks.rst
@@ -0,0 +1,28 @@
+.. ref-opsworks
+
+========
+Opsworks
+========
+
+boto.opsworks
+------------
+
+.. automodule:: boto.opsworks
+ :members:
+ :undoc-members:
+
+boto.opsworks.layer1
+-------------------
+
+.. automodule:: boto.opsworks.layer1
+ :members:
+ :undoc-members:
+
+boto.opsworks.exceptions
+-----------------------
+
+.. automodule:: boto.opsworks.exceptions
+ :members:
+ :undoc-members:
+
+
diff --git a/docs/source/ref/s3.rst b/docs/source/ref/s3.rst
index ce5c9256..2cad0e95 100644
--- a/docs/source/ref/s3.rst
+++ b/docs/source/ref/s3.rst
@@ -88,13 +88,6 @@ boto.s3.lifecycle
:members:
:undoc-members:
-boto.s3.prefix
------------------
-
-.. automodule:: boto.s3.prefix
- :members:
- :undoc-members:
-
boto.s3.tagging
---------------
diff --git a/docs/source/ref/sdb_db.rst b/docs/source/ref/sdb_db.rst
index 5b77e2bc..a7594db5 100644
--- a/docs/source/ref/sdb_db.rst
+++ b/docs/source/ref/sdb_db.rst
@@ -10,74 +10,61 @@ boto.sdb.db
-----------
.. automodule:: boto.sdb.db
- :members:
+ :members:
:undoc-members:
boto.sdb.db.blob
----------------
.. automodule:: boto.sdb.db.blob
- :members:
+ :members:
:undoc-members:
boto.sdb.db.key
---------------
.. automodule:: boto.sdb.db.key
- :members:
+ :members:
:undoc-members:
boto.sdb.db.manager
-------------------
.. automodule:: boto.sdb.db.manager
- :members:
- :undoc-members:
-
-boto.sdb.db.manager.pgmanager
------------------------------
-
-.. note::
-
- This module requires psycopg2__ to be installed in the Python path.
-
- __ http://initd.org/
-
-.. automodule:: boto.sdb.db.manager.pgmanager
- :members:
+ :members:
:undoc-members:
boto.sdb.db.manager.sdbmanager
------------------------------
.. automodule:: boto.sdb.db.manager.sdbmanager
- :members:
+ :members:
:undoc-members:
boto.sdb.db.manager.xmlmanager
------------------------------
.. automodule:: boto.sdb.db.manager.xmlmanager
- :members:
+ :members:
:undoc-members:
boto.sdb.db.model
-----------------
.. automodule:: boto.sdb.db.model
- :members:
+ :members:
:undoc-members:
boto.sdb.db.property
--------------------
.. automodule:: boto.sdb.db.property
- :members:
+ :members:
:undoc-members:
boto.sdb.db.query
-----------------
.. automodule:: boto.sdb.db.query
- :members:
+ :members:
:undoc-members:
diff --git a/docs/source/ref/swf.rst b/docs/source/ref/swf.rst
index d4b0ca36..892bc07a 100644
--- a/docs/source/ref/swf.rst
+++ b/docs/source/ref/swf.rst
@@ -18,5 +18,8 @@ boto.swf.layer1
:members:
:undoc-members:
+boto.swf.layer2
+--------------------
-
+.. automodule:: boto.swf.layer2
+ :members:
diff --git a/docs/source/releasenotes/dev.rst b/docs/source/releasenotes/dev.rst
new file mode 100644
index 00000000..39b096eb
--- /dev/null
+++ b/docs/source/releasenotes/dev.rst
@@ -0,0 +1,21 @@
+boto v2.xx.x
+============
+
+:date: 2013/xx/xx
+
+This release adds ____.
+
+
+Features
+--------
+
+* . (:issue:``, :sha:``)
+
+
+Bugfixes
+--------
+
+* (:issue:``, :sha:``)
+* Several documentation improvements/fixes:
+
+ * (:issue:``, :sha:``)
diff --git a/docs/source/releasenotes/releasenotes_template.rst b/docs/source/releasenotes/releasenotes_template.rst
new file mode 100644
index 00000000..39b096eb
--- /dev/null
+++ b/docs/source/releasenotes/releasenotes_template.rst
@@ -0,0 +1,21 @@
+boto v2.xx.x
+============
+
+:date: 2013/xx/xx
+
+This release adds ____.
+
+
+Features
+--------
+
+* . (:issue:``, :sha:``)
+
+
+Bugfixes
+--------
+
+* (:issue:``, :sha:``)
+* Several documentation improvements/fixes:
+
+ * (:issue:``, :sha:``)
diff --git a/docs/source/releasenotes/v2.0.0.rst b/docs/source/releasenotes/v2.0.0.rst
index 47e563a0..28d38536 100644
--- a/docs/source/releasenotes/v2.0.0.rst
+++ b/docs/source/releasenotes/v2.0.0.rst
@@ -10,7 +10,7 @@ There have been many, many changes since the 2.0b4 release. This overview highli
* Fix connection pooling bug: don't close before reading.
* Added AddInstanceGroup and ModifyInstanceGroup to boto.emr
* Merge pull request #246 from chetan/multipart_s3put
-AddInstanceGroupsResponse class to boto.emr.emrobject.
+* AddInstanceGroupsResponse class to boto.emr.emrobject.
* Removed extra print statement
* Merge pull request #244 from ryansb/master
* Added add_instance_groups function to boto.emr.connection. Built some helper methods for it, and added AddInstanceGroupsResponse class to boto.emr.emrobject.
diff --git a/docs/source/releasenotes/v2.10.0.rst b/docs/source/releasenotes/v2.10.0.rst
new file mode 100644
index 00000000..fda15b42
--- /dev/null
+++ b/docs/source/releasenotes/v2.10.0.rst
@@ -0,0 +1,54 @@
+boto v2.10.0
+============
+
+:date: 2013/08/13
+
+This release adds Mobile Push Notification support to Amazon Simple Notification
+Service, better reporting for Amazon Redshift, SigV4 authorization for Amazon
+Elastic MapReduce & lots of bugfixes.
+
+
+Features
+--------
+
+* Added support for Mobile Push Notifications to SNS. This enables you to send
+ push notifications to mobile devices (such as iOS or Android) using SNS.
+ (:sha:`ccba574`)
+* Added support for better reporting within Redshift. (:sha:`9d55dd3`)
+* Switched Elastic MapReduce to use SigV4 for authorization. (:sha:`b80aa48`)
+
+
+Bugfixes
+--------
+
+* Added the ``MinAdjustmentType`` parameter to EC2 Autoscaling. (:issue:`1562`,
+ :issue:`1619`, :sha:`1760284`, :sha:`2a11fd9`, :sha:`2d14006` &
+ :sha:`b7f1ae1`)
+* Fixed how DynamoDB tracks changes to data in ``Item`` objects, fixing
+ failures with modified sets not being sent. (:issue:`1565`,
+ :sha:`b111fcf` & :sha:`812f9a6`)
+* Updated the CA certificates Boto ships with. (:issue:`1578`, :sha:`4dfadc8`)
+* Fixed how CloudSearch's ``Layer2`` object gets initialized. (:issue:`1629`,
+ :issue:`1630`, :sha:`40b3652` & :sha:`f797ff9`)
+* Fixed the ``-w`` flag in ``s3put``. (:issue:`1637`, :sha:`0865004` &
+ :sha:`3fe70ca`)
+* Added the ``ap-southeast-2`` endpoint for DynamoDB. (:issue:`1621`,
+ :sha:`501b637`)
+* Fixed test suite to run faster. (:sha:`243a67e`)
+* Fixed how non-JSON responses are caught from CloudSearch. (:issue:`1633`,
+ :issue:`1645`, :sha:`d5a5c01`, :sha:`954a50c`, :sha:`915d8ff` &
+ :sha:`4407fcb`)
+* Fixed how ``DeviceIndex`` is parsed from EC2. (:issue:`1632`, :issue:`1646`,
+ :sha:`ff15e1f`, :sha:`8337a0b` & :sha:`27c9b04`)
+* Fixed EC2's ``connect_to_region`` to respect the ``region`` parameter. (
+ :issue:`1616`, :issue:`1654`, :sha:`9c37256`, :sha:`5950d12` & :sha:`b7eebe8`)
+* Added ``modify_network_interface_atribute`` to EC2 connections.
+ (:issue:`1613`, :issue:`1656`, :sha:`e00b601`, :sha:`5b62f27`, :sha:`126f6e9`,
+ :sha:`bbfed1f` & :sha:`0c61293`)
+* Added support for ``param_group`` within RDS. (:issue:`1639`, :sha:`c47baf0`)
+* Added support for using ``Item.partial_save`` to create new records within
+ DynamoDBv2. (:issue:`1660`, :issue:`1521`, :sha:`bfa469f` & :sha:`58a13d7`)
+* Several documentation improvements/fixes:
+
+ * Updated guideline on how core should merge PRs. (:sha:`80a419c`)
+ * Fixed a typo in a CloudFront docstring. (:issue:`1657`, :sha:`1aa0621`) \ No newline at end of file
diff --git a/docs/source/releasenotes/v2.11.0.rst b/docs/source/releasenotes/v2.11.0.rst
new file mode 100644
index 00000000..267d4a15
--- /dev/null
+++ b/docs/source/releasenotes/v2.11.0.rst
@@ -0,0 +1,62 @@
+boto v2.11.0
+============
+
+:date: 2013/08/29
+
+This release adds Public IP address support for VPCs created by EC2. It also
+makes the GovCloud region available for all services. Finally, this release
+also fixes a number of bugs.
+
+
+Features
+--------
+
+* Added Public IP address support within VPCs created by EC2. (:sha:`be132d1`)
+* All services can now easily use GovCloud. (:issue:`1651`, :sha:`542a301`,
+ :sha:`3c56121`, :sha:`9167d89`)
+* Added ``db_subnet_group`` to
+ ``RDSConnection.restore_dbinstance_from_point_in_time``. (:issue:`1640`,
+ :sha:`06592b9`)
+* Added ``monthly_backups`` to EC2's ``trim_snapshots``. (:issue:`1688`,
+ :sha:`a2ad606`, :sha:`2998c11`, :sha:`e32d033`)
+* Added ``get_all_reservations`` & ``get_only_instances`` methods to EC2.
+ (:issue:`1572`, :sha:`ffc6cc0`)
+
+
+Bugfixes
+--------
+
+* Fixed the parsing of CloudFormation's ``LastUpdatedTime``. (:issue:`1667`,
+ :sha:` 70f363a`)
+* Fixed STS' ``assume_role_with_web_identity`` to work correctly.
+ (:issue:`1671`, :sha:`ed1f403`, :sha:`ca794d5`, :sha:`ed7e563`,
+ :sha:`859762d`)
+* Fixed how VPC security group filtering is done in EC2. (:issue:`1665`,
+ :issue:`1677`, :sha:`be00956`, :sha:`5e85dd1`, :sha:`e63aae8`)
+* Fixed fetching more than 100 records with ``ResourceRecordSet``.
+ (:issue:`1647`, :issue:`1648`, :issue:`1680`, :sha:`b64dd4f`, :sha:`276df7e`,
+ :sha:`e57cab0`, :sha:`e62a58b`, :sha:`4c81bea`, :sha:`a3c635b`)
+* Fixed how VPC Security Groups are referred to when working with RDS.
+ (:issue:`1602`, :issue:`1683`, :issue:`1685`, :issue:`1694`, :sha:`012aa0c`,
+ :sha:`d5c6dfa`, :sha:`7841230`, :sha:`0a90627`, :sha:`ed4fd8c`,
+ :sha:`61d394b`, :sha:`ebe84c9`, :sha:`a6b0f7e`)
+* Google Storage ``Key`` now uses transcoding-invariant headers where possible.
+ (:sha:`d36eac3`)
+* Doing non-multipart uploads when using ``s3put`` no longer requires having
+ the ``ListBucket`` permission. (:issue:`1642`, :issue:`1693`, :sha:`f35e914`)
+* Fixed the serialization of ``attributes`` in a variety of SNS methods.
+ (:issue:`1686`, :sha:`4afb3dd`, :sha:`a58af54`)
+* Fixed SNS to be better behaved when constructing an mobile push notification.
+ (:issue:`1692`, :sha:`62fdf34`)
+* Moved SWF to SigV4. (:sha:`ef7d255`)
+* Several documentation improvements/fixes:
+
+ * Updated the DynamoDB v2 docs to correct how the connection is built.
+ (:issue:`1662`, :sha:`047962d`)
+ * Fixed a typo in the DynamoDB v2 docstring for ``Table.create``.
+ (:sha:`be00956`)
+ * Fixed a typo in the DynamoDB v2 docstring for ``Table`` for custom
+ connections. (:issue:`1681`, :sha:`6a53020`)
+ * Fixed incorrect parameter names for ``DBParameterGroup`` in RDS.
+ (:issue:`1682`, :sha:`0d46aed`)
+ * Fixed a typo in the SQS tutorial. (:issue:`1684`, :sha:`38b7889`)
diff --git a/docs/source/releasenotes/v2.12.0.rst b/docs/source/releasenotes/v2.12.0.rst
new file mode 100644
index 00000000..8b713c4b
--- /dev/null
+++ b/docs/source/releasenotes/v2.12.0.rst
@@ -0,0 +1,32 @@
+boto v2.12.0
+============
+
+:date: 2013/09/04
+
+This release adds support for Redis & replication groups to Elasticache as
+well as several bug fixes.
+
+
+Features
+--------
+
+* Added support for Redis & replication groups to Elasticache. (:sha:`f744ff6`)
+
+
+Bugfixes
+--------
+
+* Boto's User-Agent string has changed. Mostly additive to include more
+ information. (:sha:`edb038a`)
+* Headers that are part of S3's signing are now correctly coerced to the proper
+ case. (:issue:`1687`, :sha:`89eae8c`)
+* Altered S3 so that it's possible to track what portions of a multipart upload
+ succeeded. (:issue:`1305`, :issue:`1675`, :sha:`e9a2c59`)
+* Added ``create_lb_policy`` & ``set_lb_policies_of_backend_server`` to ELB.
+ (:issue:`1695`, :sha:`77a9458`)
+* Fixed pagination when listing vaults in Glacier. (:issue:`1699`,
+ :sha:`9afecca`)
+* Several documentation improvements/fixes:
+
+ * Added some docs about what command-line utilities ship with boto.
+ (:sha:`5d7d54d`)
diff --git a/docs/source/releasenotes/v2.13.0.rst b/docs/source/releasenotes/v2.13.0.rst
new file mode 100644
index 00000000..b28e9ba7
--- /dev/null
+++ b/docs/source/releasenotes/v2.13.0.rst
@@ -0,0 +1,40 @@
+boto v2.13.0
+============
+
+:date: 2013/09/12
+
+This release adds support for VPC within AWS Opsworks, added dry-run support &
+the ability to modify reserved instances in EC2 as well as several important
+bugfixes for EC2, SNS & DynamoDBv2.
+
+
+Features
+--------
+
+* Added support for VPC within Opsworks. (:sha:`56e1df3`)
+* Added support for ``dry_run`` within EC2. (:sha:`dd7774c`)
+* Added support for ``modify_reserved_instances`` &
+ ``describe_reserved_instances_modifications`` within EC2. (:sha:`7a08672`)
+
+
+Bugfixes
+--------
+
+* Fixed EC2's ``associate_public_ip`` to work correctly. (:sha:`9db6101`)
+* Fixed a bug with ``dynamodb_load`` when working with sets. (:issue:`1664`,
+ :sha:`ef2d28b`)
+* Changed SNS ``publish`` to use POST. (:sha:`9c11772`)
+* Fixed inability to create LaunchConfigurations when using Block Device
+ Mappings. (:issue:`1709`, :issue:`1710`, :sha:`5fd728e`)
+* Fixed DynamoDBv2's ``batch_write`` to appropriately handle
+ ``UnprocessedItems``. (:issue:`1566`, :issue:`1679`, :issue:`1714`,
+ :sha:`2fc2369`)
+* Several documentation improvements/fixes:
+
+ * Added Opsworks docs to the index. (:sha:`5d48763`)
+ * Added docs on the correct string values for ``get_all_images``.
+ (:issue:`1674`, :sha:`1e4ed2e`)
+ * Removed a duplicate ``boto.s3.prefix`` entry from the docs.
+ (:issue:`1707`, :sha:`b42d34c`)
+ * Added an API reference for ``boto.swf.layer2``. (:issue:`1712`,
+ :sha:`9f7b15f`)
diff --git a/docs/source/releasenotes/v2.13.2.rst b/docs/source/releasenotes/v2.13.2.rst
new file mode 100644
index 00000000..4e3d6842
--- /dev/null
+++ b/docs/source/releasenotes/v2.13.2.rst
@@ -0,0 +1,39 @@
+boto v2.13.2
+============
+
+:date: 2013/09/16
+
+This release is a bugfix-only release, correcting several problems in EC2 as
+well as S3, DynamoDB v2 & SWF.
+
+.. note::
+
+ There was no v2.13.1 release made public. There was a packaging error that
+ was discovered before it was published to PyPI.
+
+ We apologise for the fault in the releases. Those responsible have been
+ sacked.
+
+
+Bugfixes
+--------
+
+* Fixed test fallout from the EC2 dry-run change. (:sha:`2159456`)
+* Added tests for more of SWF's ``layer2``. (:issue:`1718`, :sha:`35fb741`,
+ :sha:`a84d401`, :sha:`1cf1641`, :sha:`a36429c`)
+* Changed EC2 to allow ``name`` to be optional in calls to ``copy_image``.
+ (:issue:`1672`, :sha:` 26285aa`)
+* Added ``billingProducts`` support to EC2 ``Image``. (:issue:`1703`,
+ :sha:`cccadaf`, :sha:`3914e91`)
+* Fixed a place where ``dry_run`` was handled in EC2. (:issue:`1722`,
+ :sha:`0a52c82`)
+* Fixed ``run_instances`` with a block device mapping. (:issue:`1723`,
+ :sha:`974743f`, :sha:`9049f05`, :sha:`d7edafc`)
+* Fixed ``s3put`` to accept headers with a ``=`` in them. (:issue:`1700`,
+ :sha:`7958c70`)
+* Fixed a bug in DynamoDB v2 where scans with filters over large sets may not
+ return all values. (:issue:`1713`, :sha:`02893e1`)
+* Cloudsearch now uses SigV4. (:sha:`b2bdbf5`)
+* Several documentation improvements/fixes:
+
+ * Added the "Apps Built On Boto" doc. (:sha:`3bd628c`)
diff --git a/docs/source/releasenotes/v2.13.3.rst b/docs/source/releasenotes/v2.13.3.rst
new file mode 100644
index 00000000..f145f75a
--- /dev/null
+++ b/docs/source/releasenotes/v2.13.3.rst
@@ -0,0 +1,11 @@
+boto v2.13.3
+============
+
+:date: 2013/09/16
+
+This release fixes a packaging error with the previous version of boto.
+The version ``v2.13.2`` was provided instead of ``2.13.2``, causing things
+like ``pip`` to incorrectly resolve the latest release.
+
+That release was only available for several minutes & was removed from PyPI
+due to the way it would break installation for users.
diff --git a/docs/source/releasenotes/v2.14.0.rst b/docs/source/releasenotes/v2.14.0.rst
new file mode 100644
index 00000000..4f235f04
--- /dev/null
+++ b/docs/source/releasenotes/v2.14.0.rst
@@ -0,0 +1,63 @@
+boto v2.14.0
+============
+
+:date: 2013/10/09
+
+This release makes ``s3put`` region-aware, adds some missing features to
+EC2 and SNS, enables EPUB documentation output, and makes the HTTP(S)
+connection pooling port-aware, which in turn enables connecting to
+e.g. mock services running on ``localhost``. It also includes support
+for the latest EC2 and OpsWorks features, as well as several
+important bugfixes for EC2, DynamoDB, MWS, and Python 2.5 support.
+
+
+Features
+--------
+
+* Add support for a ``--region`` argument to ``s3put`` and auto-detect bucket
+ regions if possible (:issue:`1731`, :sha:`d9c28f6`)
+* Add ``delete_notification_configuration`` for EC2 autoscaling
+ (:issue:`1717`, :sha:`ebb7ace`)
+* Add support for registering HVM instances (:issue:`1733`, :sha:`2afc68e`)
+* Add support for ``ReplaceRouteTableAssociation`` for EC2 (:issue:`1736`,
+ :sha:`4296835`)
+* Add ``sms`` as an option for SNS subscribe (:issue:`1744`, :sha:`8ff08e5`)
+* Allow overriding ``has_google_credentials`` (:issue:`1752`, :sha:`052cc91`)
+* Add EPUB output format for docs (:issue:`1759`, :sha:`def7c67`)
+* Add handling of ``Connection: close`` HTTP headers in responses
+ (:issue:`1773`, :sha:`1a38f32`)
+* Make connection pooling port-aware (:issue:`1764`, :issue:`1737`,
+ :sha:`b6c7330`)
+* Add support for ``instance_type`` to ``modify_reserved_instances``
+ (:sha:`bf07eee`)
+* Add support for new OpsWorks features (:sha:`f512898`)
+
+
+Bugfixes
+--------
+
+* Remove erroneous ``dry_run`` parameter (:issue:`1729`, :sha:`35a516e`)
+* Fix task_list override in poll methods of SWF Deciders and Workers (
+ :issue:`1724`, :sha:`fa8d871`)
+* Remove Content-Encoding header from metadata test (:issue:`1735`,
+ :sha:`c8b0130`)
+* Fix the ability to override DynamoDBv2 host and port when creating
+ connections (:issue:`1734`, :sha:`8d2b492`)
+* Fix UnboundLocalError (:sha:`e0e6aeb`)
+* ``self.rules`` is of type IPPermissionsList, remove takes no kwargs
+ (:sha:`3c56b3f`)
+* Nicer error messages for 403s (:issue:`1753`, :sha:`d3d9eab`)
+* Various documentation fixes (:issue:`1762`, :sha:`76aef10`)
+* Various Python 2.5 fixes (:sha:`150aef6`, :sha:`67ae9ff`)
+* Prevent certificate tests from failing for non-govcloud accounts
+ (:sha:`2d3d9f6`)
+* Fix flaky resumable upload test (:issue:`1768`, :sha:`6aa8ae2`)
+* Force the Host HTTP header to fix an issue with older httplibs
+ (:sha:`202c456`)
+* Blacklist S3 from forced Host HTTP header (:sha:`9193226`)
+* Fix ``propagate_at_launch`` spelling error (:issue:`1739`, :sha:`e78d88a`)
+* Remove unused code that causes exceptions with bad response data
+ (:issue:`1771`, :sha:`bec5e70`)
+* Fix ``detach_subnets`` typo (:issue:`1760`, :sha:`4424e1b`)
+* Fix result list handling of ``GetMatchingProductForIdResponse`` for MWS
+ (:issue:`1751`, :sha:`977b7dc`)
diff --git a/docs/source/releasenotes/v2.9.8.rst b/docs/source/releasenotes/v2.9.8.rst
index 36432813..0398582d 100644
--- a/docs/source/releasenotes/v2.9.8.rst
+++ b/docs/source/releasenotes/v2.9.8.rst
@@ -12,7 +12,7 @@ Features
--------
* Added support for the ``DecodeAuthorizationMessage`` in STS (:sha:`1ada5ac`).
-* Added support for creating/deleting/describing ``OptionGroup``s in RDS.
+* Added support for creating/deleting/describing ``OptionGroup`` in RDS.
(:sha:`d629228` & :sha:`d059a3b`)
* Added ``CancelUpdateStack`` to CloudFormation. (:issue:`1476`, :sha:`5bae130`)
* Added support for getting/setting lifecycle configurations on GS buckets.
diff --git a/docs/source/releasenotes/v2.9.9.rst b/docs/source/releasenotes/v2.9.9.rst
new file mode 100644
index 00000000..e9a0bfd2
--- /dev/null
+++ b/docs/source/releasenotes/v2.9.9.rst
@@ -0,0 +1,50 @@
+boto v2.9.9
+===========
+
+:date: 2013/07/24
+
+This release updates Opsworks to add AMI & Chef 11 support, DBSubnetGroup
+support in RDS & many other bugfixes.
+
+
+Features
+--------
+
+* Added AMI, configuration manager & Chef 11 support to Opsworks.
+ (:sha:`55725fc`).
+* Added ``in`` support for SQS messages. (:issue:`1593`, :sha:`e5fe1ed`)
+* Added support for the ``ap-southeast-2`` region in Elasticache.
+ (:issue:`1607`, :sha:`9986b61`)
+* Added support for block device mappings in ELB. (:issue:`1343`, :issue:`753`,
+ :issue:`1357`, :sha:`974a23a`)
+* Added support for DBSubnetGroup in RDS. (:issue:`1500`, :sha:`01eef87`,
+ :sha:`45c60a0`, :sha:`c4c859e`)
+
+
+Bugfixes
+--------
+
+* Fixed the canonicalization of paths on Windows. (:issue:`1609`,
+ :sha:`a1fa98c`)
+* Fixed how ``BotoServerException`` uses ``message``. (:issue:`1353`,
+ :sha:`b944f4b`)
+* Fixed ``DisableRollback`` always being ``True`` in a CloudFormation ``Stack``.
+ (:issue:`1379`, :sha:`32b3150`)
+* Changed EMR instance groups to no longer require a string price (can now be
+ a ``Decimal``). (:issue:`1396`, :sha:`dfc39ff`)
+* Altered ``Distribution._sign_string`` to accept any file-like object as well
+ within CloudFront. (:issue:`1349`, :sha:`8df6c14`)
+* Fixed the ``detach_lb_from_subnets`` call within ELB. (:issue:`1417`,
+ :issue:`1418` :sha:`4a397bd`, :sha:`c11d72b`, :sha:`9e595b5`, :sha:`634469d`,
+ :sha:`586dd54`)
+* Altered boto to obey ``no_proxy`` environment variables. (:issue:`1600`,
+ :issue:`1603`, :sha:`aaef5a9`)
+* Fixed ELB connections to use HTTPS by default. (:issue:`1587`, :sha:`fe158c4`)
+* Updated S3 to be Python 2.5 compatible again. (:issue:`1598`, :sha:`066009f`)
+* All calls within SES will now return *all* DKIMTokens, instead of just one.
+ (:issue:`1550`, :issue:`1610`, :sha:`1a079da`, :sha:`1e82f85`, :sha:`5c8b6b8`)
+* Fixed the ``logging`` parameter within ``DistributionConfig`` in CloudFront
+ to respect whatever is provided to the constructor. (:issue:`1457`,
+ :sha:`e76180d`)
+* Fixed CloudSearch to no longer raise an error if a non-JSON response is received.
+ (:issue:`1555`, :issue:`1614`, :sha:`5e2c292`, :sha:`6510e1f`)
diff --git a/docs/source/s3_tut.rst b/docs/source/s3_tut.rst
index 2b40306a..aff667e3 100644
--- a/docs/source/s3_tut.rst
+++ b/docs/source/s3_tut.rst
@@ -384,7 +384,7 @@ under ``logs/*`` to transition to Glacier 30 days after the object is created.
We can now configure the bucket with this lifecycle policy::
>>> bucket.configure_lifecycle(lifecycle)
-True
+ True
You can also retrieve the current lifecycle policy for the bucket::
diff --git a/docs/source/sqs_tut.rst b/docs/source/sqs_tut.rst
index d4d69c98..72ccca1d 100644
--- a/docs/source/sqs_tut.rst
+++ b/docs/source/sqs_tut.rst
@@ -229,7 +229,7 @@ to count the number of messages in a queue:
>>> q.count()
10
-This can be handy but is command as well as the other two utility methods
+This can be handy but this command as well as the other two utility methods
I'll describe in a minute are inefficient and should be used with caution
on queues with lots of messages (e.g. many hundreds or more). Similarly,
you can clear (delete) all messages in a queue with:
diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py
index e69de29b..310043f2 100644
--- a/tests/integration/__init__.py
+++ b/tests/integration/__init__.py
@@ -0,0 +1,67 @@
+# Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
+# All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+"""
+Base class to make checking the certs easier.
+"""
+import httplib
+import socket
+import unittest
+
+
+# We subclass from ``object`` instead of ``TestCase`` here so that this doesn't
+# add noise to the test suite (otherwise these no-ops would run on every
+# import).
+class ServiceCertVerificationTest(object):
+ ssl = True
+
+ # SUBCLASSES MUST OVERRIDE THIS!
+ # Something like ``boto.sqs.regions()``...
+ regions = []
+
+ def test_certs(self):
+ self.assertTrue(len(self.regions) > 0)
+
+ for region in self.regions:
+ try:
+ c = region.connect()
+ self.sample_service_call(c)
+ except (socket.gaierror, httplib.BadStatusLine):
+ # This is bad (because the SSL cert failed). Re-raise the
+ # exception.
+ raise
+ except:
+ if 'gov' in region.name:
+ # Ignore it. GovCloud accounts require special permission
+ # to use.
+ continue
+
+ # Anything else is bad. Re-raise.
+ raise
+
+ def sample_service_call(self, conn):
+ """
+ Subclasses should override this method to do a service call that will
+ always succeed (like fetch a list, even if it's empty).
+ """
+ pass
+
diff --git a/tests/integration/cloudformation/test_cert_verification.py b/tests/integration/cloudformation/test_cert_verification.py
index 8a576e9b..6dede2f9 100644
--- a/tests/integration/cloudformation/test_cert_verification.py
+++ b/tests/integration/cloudformation/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.cloudformation
+from tests.integration import ServiceCertVerificationTest
+
+import boto.cloudformation
-class CertVerificationTest(unittest.TestCase):
+class CloudFormationCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
cloudformation = True
- ssl = True
+ regions = boto.cloudformation.regions()
- def test_certs(self):
- for region in boto.cloudformation.regions():
- c = region.connect()
- c.describe_stacks()
+ def sample_service_call(self, conn):
+ conn.describe_stacks()
diff --git a/tests/integration/cloudsearch/test_cert_verification.py b/tests/integration/cloudsearch/test_cert_verification.py
index 99477021..338c2ac7 100644
--- a/tests/integration/cloudsearch/test_cert_verification.py
+++ b/tests/integration/cloudsearch/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.cloudsearch
+from tests.integration import ServiceCertVerificationTest
+
+import boto.cloudsearch
-class CertVerificationTest(unittest.TestCase):
- rds = True
- ssl = True
+class CloudSearchCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
+ cloudsearch = True
+ regions = boto.cloudsearch.regions()
- def test_certs(self):
- for region in boto.cloudsearch.regions():
- c = region.connect()
- c.describe_domains()
+ def sample_service_call(self, conn):
+ conn.describe_domains()
diff --git a/tests/integration/cloudsearch/test_layers.py b/tests/integration/cloudsearch/test_layers.py
new file mode 100644
index 00000000..1db1d5a0
--- /dev/null
+++ b/tests/integration/cloudsearch/test_layers.py
@@ -0,0 +1,75 @@
+# Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
+# All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+"""
+Tests for Layer1 of Cloudsearch
+"""
+import time
+
+from tests.unit import unittest
+from boto.cloudsearch.layer1 import Layer1
+from boto.cloudsearch.layer2 import Layer2
+from boto.regioninfo import RegionInfo
+
+
+class CloudSearchLayer1Test(unittest.TestCase):
+ cloudsearch = True
+
+ def setUp(self):
+ super(CloudSearchLayer1Test, self).setUp()
+ self.layer1 = Layer1()
+ self.domain_name = 'test-%d' % int(time.time())
+
+ def test_create_domain(self):
+ resp = self.layer1.create_domain(self.domain_name)
+ self.addCleanup(self.layer1.delete_domain, self.domain_name)
+ self.assertTrue(resp.get('created', False))
+
+
+class CloudSearchLayer2Test(unittest.TestCase):
+ cloudsearch = True
+
+ def setUp(self):
+ super(CloudSearchLayer2Test, self).setUp()
+ self.layer2 = Layer2()
+ self.domain_name = 'test-%d' % int(time.time())
+
+ def test_create_domain(self):
+ domain = self.layer2.create_domain(self.domain_name)
+ self.addCleanup(domain.delete)
+ self.assertTrue(domain.created, False)
+ self.assertEqual(domain.domain_name, self.domain_name)
+ self.assertEqual(domain.num_searchable_docs, 0)
+
+ def test_initialization_regression(self):
+ us_west_2 = RegionInfo(
+ name='us-west-2',
+ endpoint='cloudsearch.us-west-2.amazonaws.com'
+ )
+ self.layer2 = Layer2(
+ region=us_west_2,
+ host='cloudsearch.us-west-2.amazonaws.com'
+ )
+ self.assertEqual(
+ self.layer2.layer1.host,
+ 'cloudsearch.us-west-2.amazonaws.com'
+ )
diff --git a/tests/integration/dynamodb/test_cert_verification.py b/tests/integration/dynamodb/test_cert_verification.py
index d0e4ef40..ce94b753 100644
--- a/tests/integration/dynamodb/test_cert_verification.py
+++ b/tests/integration/dynamodb/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.dynamodb
+from tests.integration import ServiceCertVerificationTest
+
+import boto.dynamodb
-class CertVerificationTest(unittest.TestCase):
+class DynamoDBCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
dynamodb = True
- ssl = True
+ regions = boto.dynamodb.regions()
- def test_certs(self):
- for region in boto.dynamodb.regions():
- c = region.connect()
- c.layer1.list_tables()
+ def sample_service_call(self, conn):
+ conn.layer1.list_tables()
diff --git a/tests/integration/dynamodb2/test_cert_verification.py b/tests/integration/dynamodb2/test_cert_verification.py
index 3901c57e..2fc93fd5 100644
--- a/tests/integration/dynamodb2/test_cert_verification.py
+++ b/tests/integration/dynamodb2/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.dynamodb2
+from tests.integration import ServiceCertVerificationTest
+
+import boto.dynamodb2
-class CertVerificationTest(unittest.TestCase):
+class DynamoDB2CertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
dynamodb2 = True
- ssl = True
+ regions = boto.dynamodb2.regions()
- def test_certs(self):
- for region in boto.dynamodb2.regions():
- c = region.connect()
- c.list_tables()
+ def sample_service_call(self, conn):
+ conn.list_tables()
diff --git a/tests/integration/dynamodb2/test_highlevel.py b/tests/integration/dynamodb2/test_highlevel.py
index eed46efa..46848fae 100644
--- a/tests/integration/dynamodb2/test_highlevel.py
+++ b/tests/integration/dynamodb2/test_highlevel.py
@@ -23,11 +23,14 @@
"""
Tests for DynamoDB v2 high-level abstractions.
"""
+from __future__ import with_statement
+
import time
from tests.unit import unittest
from boto.dynamodb2 import exceptions
from boto.dynamodb2.fields import HashKey, RangeKey, KeysOnlyIndex
+from boto.dynamodb2.items import Item
from boto.dynamodb2.table import Table
from boto.dynamodb2.types import NUMBER
@@ -43,12 +46,12 @@ class DynamoDBv2Test(unittest.TestCase):
], throughput={
'read': 5,
'write': 5,
- }, indexes={
+ }, indexes=[
KeysOnlyIndex('LastNameIndex', parts=[
HashKey('username'),
RangeKey('last_name')
]),
- })
+ ])
self.addCleanup(users.delete)
self.assertEqual(len(users.schema), 2)
@@ -146,7 +149,10 @@ class DynamoDBv2Test(unittest.TestCase):
self.assertEqual(check_name_again['first_name'], 'Joan')
# Reset it.
- jane.mark_dirty()
+ jane['username'] = 'jane'
+ jane['first_name'] = 'Jane'
+ jane['last_name'] = 'Doe'
+ jane['friend_count'] = 3
self.assertTrue(jane.save(overwrite=True))
# Test the partial update behavior.
@@ -176,9 +182,27 @@ class DynamoDBv2Test(unittest.TestCase):
self.assertEqual(partial_jane['first_name'], 'Jacqueline')
# Reset it.
- jane.mark_dirty()
+ jane['username'] = 'jane'
+ jane['first_name'] = 'Jane'
+ jane['last_name'] = 'Doe'
+ jane['friend_count'] = 3
self.assertTrue(jane.save(overwrite=True))
+ # Ensure that partial saves of a brand-new object work.
+ sadie = Item(users, data={
+ 'username': 'sadie',
+ 'first_name': 'Sadie',
+ 'favorite_band': 'Zedd',
+ 'friend_count': 7
+ })
+ self.assertTrue(sadie.partial_save())
+ serverside_sadie = users.get_item(
+ username='sadie',
+ friend_count=7,
+ consistent=True
+ )
+ self.assertEqual(serverside_sadie['first_name'], 'Sadie')
+
# Test the eventually consistent query.
results = users.query(
username__eq='johndoe',
@@ -274,3 +298,48 @@ class DynamoDBv2Test(unittest.TestCase):
)
# But it shouldn't break on more complex tables.
res = users.query(username__eq='johndoe')
+
+ # Test putting with/without sets.
+ mau5_created = users.put_item(data={
+ 'username': 'mau5',
+ 'first_name': 'dead',
+ 'last_name': 'mau5',
+ 'friend_count': 2,
+ 'friends': set(['skrill', 'penny']),
+ })
+ self.assertTrue(mau5_created)
+
+ penny_created = users.put_item(data={
+ 'username': 'penny',
+ 'first_name': 'Penny',
+ 'friend_count': 0,
+ 'friends': set([]),
+ })
+ self.assertTrue(penny_created)
+
+ def test_unprocessed_batch_writes(self):
+ # Create a very limited table w/ low throughput.
+ users = Table.create('slow_users', schema=[
+ HashKey('user_id'),
+ ], throughput={
+ 'read': 1,
+ 'write': 1,
+ })
+ self.addCleanup(users.delete)
+
+ # Wait for it.
+ time.sleep(60)
+
+ with users.batch_write() as batch:
+ for i in range(500):
+ batch.put_item(data={
+ 'user_id': str(i),
+ 'name': 'Droid #{0}'.format(i),
+ })
+
+ # Before ``__exit__`` runs, we should have a bunch of unprocessed
+ # items.
+ self.assertTrue(len(batch._unprocessed) > 0)
+
+ # Post-__exit__, they should all be gone.
+ self.assertEqual(len(batch._unprocessed), 0)
diff --git a/tests/integration/ec2/autoscale/test_cert_verification.py b/tests/integration/ec2/autoscale/test_cert_verification.py
index 99271539..c56574fe 100644
--- a/tests/integration/ec2/autoscale/test_cert_verification.py
+++ b/tests/integration/ec2/autoscale/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.ec2.autoscale
+from tests.integration import ServiceCertVerificationTest
+
+import boto.ec2.autoscale
-class CertVerificationTest(unittest.TestCase):
+class AutoscaleCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
autoscale = True
- ssl = True
+ regions = boto.ec2.autoscale.regions()
- def test_certs(self):
- for region in boto.ec2.autoscale.regions():
- c = region.connect()
- c.get_all_groups()
+ def sample_service_call(self, conn):
+ conn.get_all_groups()
diff --git a/tests/integration/ec2/cloudwatch/test_cert_verification.py b/tests/integration/ec2/cloudwatch/test_cert_verification.py
index 4dfb56d0..f3c84be6 100644
--- a/tests/integration/ec2/cloudwatch/test_cert_verification.py
+++ b/tests/integration/ec2/cloudwatch/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.ec2.cloudwatch
+from tests.integration import ServiceCertVerificationTest
+
+import boto.ec2.cloudwatch
-class CertVerificationTest(unittest.TestCase):
+class CloudWatchCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
cloudwatch = True
- ssl = True
+ regions = boto.ec2.cloudwatch.regions()
- def test_certs(self):
- for region in boto.ec2.cloudwatch.regions():
- c = region.connect()
- c.describe_alarms()
+ def sample_service_call(self, conn):
+ conn.describe_alarms()
diff --git a/tests/integration/ec2/elb/test_cert_verification.py b/tests/integration/ec2/elb/test_cert_verification.py
index a574f679..5ec364c9 100644
--- a/tests/integration/ec2/elb/test_cert_verification.py
+++ b/tests/integration/ec2/elb/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.ec2.elb
+from tests.integration import ServiceCertVerificationTest
+
+import boto.ec2.elb
-class CertVerificationTest(unittest.TestCase):
+class ELBCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
elb = True
- ssl = True
+ regions = boto.ec2.elb.regions()
- def test_certs(self):
- for region in boto.ec2.elb.regions():
- c = region.connect()
- c.get_all_load_balancers()
+ def sample_service_call(self, conn):
+ conn.get_all_load_balancers()
diff --git a/tests/integration/ec2/elb/test_connection.py b/tests/integration/ec2/elb/test_connection.py
index 1661899b..5d11a399 100644
--- a/tests/integration/ec2/elb/test_connection.py
+++ b/tests/integration/ec2/elb/test_connection.py
@@ -118,6 +118,28 @@ class ELBConnectionTest(unittest.TestCase):
# Policy names should be checked here once they are supported
# in the Listener object.
+ def test_create_load_balancer_backend_with_policies(self):
+ other_policy_name = 'enable-proxy-protocol'
+ backend_port = 8081
+ self.conn.create_lb_policy(self.name, other_policy_name,
+ 'ProxyProtocolPolicyType', {'ProxyProtocol': True})
+ self.conn.set_lb_policies_of_backend_server(self.name, backend_port, [other_policy_name])
+
+ balancers = self.conn.get_all_load_balancers(load_balancer_names=[self.name])
+ self.assertEqual([lb.name for lb in balancers], [self.name])
+ self.assertEqual(len(balancers[0].policies.other_policies), 1)
+ self.assertEqual(balancers[0].policies.other_policies[0].policy_name, other_policy_name)
+ self.assertEqual(len(balancers[0].backends), 1)
+ self.assertEqual(balancers[0].backends[0].instance_port, backend_port)
+ self.assertEqual(balancers[0].backends[0].policies[0].policy_name, other_policy_name)
+
+ self.conn.set_lb_policies_of_backend_server(self.name, backend_port, [])
+
+ balancers = self.conn.get_all_load_balancers(load_balancer_names=[self.name])
+ self.assertEqual([lb.name for lb in balancers], [self.name])
+ self.assertEqual(len(balancers[0].policies.other_policies), 1)
+ self.assertEqual(len(balancers[0].backends), 0)
+
def test_create_load_balancer_complex_listeners(self):
complex_listeners = [
(8080, 80, 'HTTP', 'HTTP'),
diff --git a/tests/integration/ec2/test_cert_verification.py b/tests/integration/ec2/test_cert_verification.py
index d2428fa0..5ca8ceca 100644
--- a/tests/integration/ec2/test_cert_verification.py
+++ b/tests/integration/ec2/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.ec2
+from tests.integration import ServiceCertVerificationTest
+
+import boto.ec2
-class CertVerificationTest(unittest.TestCase):
+class EC2CertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
ec2 = True
- ssl = True
+ regions = boto.ec2.regions()
- def test_certs(self):
- for region in boto.ec2.regions():
- c = region.connect()
- c.get_all_instances()
+ def sample_service_call(self, conn):
+ conn.get_all_reservations()
diff --git a/tests/integration/ec2/test_connection.py b/tests/integration/ec2/test_connection.py
index ef1080b0..fc7137ab 100644
--- a/tests/integration/ec2/test_connection.py
+++ b/tests/integration/ec2/test_connection.py
@@ -32,9 +32,10 @@ import socket
from nose.plugins.attrib import attr
from boto.ec2.connection import EC2Connection
+from boto.exception import EC2ResponseError
-class EC2ConnectionTest (unittest.TestCase):
+class EC2ConnectionTest(unittest.TestCase):
ec2 = True
@attr('notdefault')
@@ -190,3 +191,51 @@ class EC2ConnectionTest (unittest.TestCase):
assert l[0].product_codes[0] == demo_paid_ami_product_code
print '--- tests completed ---'
+
+ def test_dry_run(self):
+ c = EC2Connection()
+ dry_run_msg = 'Request would have succeeded, but DryRun flag is set.'
+
+ try:
+ rs = c.get_all_images(dry_run=True)
+ self.fail("Should have gotten an exception")
+ except EC2ResponseError, e:
+ self.assertTrue(dry_run_msg in str(e))
+
+ try:
+ rs = c.run_instances(
+ image_id='ami-a0cd60c9',
+ instance_type='m1.small',
+ dry_run=True
+ )
+ self.fail("Should have gotten an exception")
+ except EC2ResponseError, e:
+ self.assertTrue(dry_run_msg in str(e))
+
+ # Need an actual instance for the rest of this...
+ rs = c.run_instances(
+ image_id='ami-a0cd60c9',
+ instance_type='m1.small'
+ )
+ time.sleep(120)
+
+ try:
+ rs = c.stop_instances(
+ instance_ids=[rs.instances[0].id],
+ dry_run=True
+ )
+ self.fail("Should have gotten an exception")
+ except EC2ResponseError, e:
+ self.assertTrue(dry_run_msg in str(e))
+
+ try:
+ rs = c.terminate_instances(
+ instance_ids=[rs.instances[0].id],
+ dry_run=True
+ )
+ self.fail("Should have gotten an exception")
+ except EC2ResponseError, e:
+ self.assertTrue(dry_run_msg in str(e))
+
+ # And kill it.
+ rs.instances[0].terminate()
diff --git a/tests/integration/ec2/vpc/test_connection.py b/tests/integration/ec2/vpc/test_connection.py
index 59c07343..56bab462 100644
--- a/tests/integration/ec2/vpc/test_connection.py
+++ b/tests/integration/ec2/vpc/test_connection.py
@@ -35,9 +35,14 @@ class TestVPCConnection(unittest.TestCase):
vpc = self.api.create_vpc('10.0.0.0/16')
self.addCleanup(self.api.delete_vpc, vpc.id)
+ # Need time for the VPC to be in place. :/
+ time.sleep(5)
self.subnet = self.api.create_subnet(vpc.id, '10.0.0.0/24')
self.addCleanup(self.api.delete_subnet, self.subnet.id)
+ # Need time for the subnet to be in place.
+ time.sleep(10)
+
def terminate_instance(self, instance):
instance.terminate()
for i in xrange(300):
@@ -69,7 +74,7 @@ class TestVPCConnection(unittest.TestCase):
time.sleep(10)
instance = reservation.instances[0]
self.addCleanup(self.terminate_instance, instance)
- retrieved = self.api.get_all_instances(instance_ids=[instance.id])
+ retrieved = self.api.get_all_reservations(instance_ids=[instance.id])
self.assertEqual(len(retrieved), 1)
retrieved_instances = retrieved[0].instances
self.assertEqual(len(retrieved_instances), 1)
@@ -77,7 +82,7 @@ class TestVPCConnection(unittest.TestCase):
self.assertEqual(len(retrieved_instance.interfaces), 1)
interface = retrieved_instance.interfaces[0]
-
+
private_ip_addresses = interface.private_ip_addresses
self.assertEqual(len(private_ip_addresses), 4)
self.assertEqual(private_ip_addresses[0].private_ip_address,
@@ -90,6 +95,46 @@ class TestVPCConnection(unittest.TestCase):
self.assertEqual(private_ip_addresses[3].private_ip_address,
'10.0.0.24')
+ def test_associate_public_ip(self):
+ # Supplying basically nothing ought to work.
+ interface = NetworkInterfaceSpecification(
+ associate_public_ip_address=True,
+ subnet_id=self.subnet.id,
+ # Just for testing.
+ delete_on_termination=True
+ )
+ interfaces = NetworkInterfaceCollection(interface)
+
+ reservation = self.api.run_instances(
+ image_id='ami-a0cd60c9',
+ instance_type='m1.small',
+ network_interfaces=interfaces
+ )
+ instance = reservation.instances[0]
+ self.addCleanup(self.terminate_instance, instance)
+
+ # Give it a **LONG** time to start up.
+ # Because the public IP won't be there right away.
+ time.sleep(60)
+
+ retrieved = self.api.get_all_reservations(
+ instance_ids=[
+ instance.id
+ ]
+ )
+ self.assertEqual(len(retrieved), 1)
+ retrieved_instances = retrieved[0].instances
+ self.assertEqual(len(retrieved_instances), 1)
+ retrieved_instance = retrieved_instances[0]
+
+ self.assertEqual(len(retrieved_instance.interfaces), 1)
+ interface = retrieved_instance.interfaces[0]
+
+ # There ought to be a public IP there.
+ # We can't reason about the IP itself, so just make sure it vaguely
+ # resembles an IP (& isn't empty/``None``)...
+ self.assertTrue(interface.publicIp.count('.') >= 3)
+
if __name__ == '__main__':
unittest.main()
diff --git a/tests/integration/elastictranscoder/test_cert_verification.py b/tests/integration/elastictranscoder/test_cert_verification.py
index adf2e8f8..e87a278e 100644
--- a/tests/integration/elastictranscoder/test_cert_verification.py
+++ b/tests/integration/elastictranscoder/test_cert_verification.py
@@ -19,17 +19,16 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
+import unittest
-from tests.unit import unittest
-import boto.elastictranscoder
+from tests.integration import ServiceCertVerificationTest
+import boto.elastictranscoder
-class CertVerificationTest(unittest.TestCase):
+class ElasticTranscoderCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
elastictranscoder = True
- ssl = True
+ regions = boto.elastictranscoder.regions()
- def test_certs(self):
- for region in boto.elastictranscoder.regions():
- c = region.connect()
- c.list_pipelines()
+ def sample_service_call(self, conn):
+ conn.list_pipelines()
diff --git a/tests/integration/emr/test_cert_verification.py b/tests/integration/emr/test_cert_verification.py
index 7c098139..b29b5257 100644
--- a/tests/integration/emr/test_cert_verification.py
+++ b/tests/integration/emr/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all EMR endpoints validate.
"""
-
import unittest
-import boto.emr
+from tests.integration import ServiceCertVerificationTest
+
+import boto.emr
-class EMRCertVerificationTest(unittest.TestCase):
+class EMRCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
emr = True
- ssl = True
+ regions = boto.emr.regions()
- def test_certs(self):
- for region in boto.emr.regions():
- c = region.connect()
- c.describe_jobflows()
+ def sample_service_call(self, conn):
+ conn.describe_jobflows()
diff --git a/tests/integration/glacier/test_cert_verification.py b/tests/integration/glacier/test_cert_verification.py
index abb818e2..fea1a64a 100644
--- a/tests/integration/glacier/test_cert_verification.py
+++ b/tests/integration/glacier/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.glacier
+from tests.integration import ServiceCertVerificationTest
+
+import boto.glacier
-class CertVerificationTest(unittest.TestCase):
+class GlacierCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
glacier = True
- ssl = True
+ regions = boto.glacier.regions()
- def test_certs(self):
- for region in boto.glacier.regions():
- c = region.connect()
- c.list_vaults()
+ def sample_service_call(self, conn):
+ conn.list_vaults()
diff --git a/tests/integration/gs/cb_test_harness.py b/tests/integration/gs/cb_test_harness.py
index 195b5eb4..024b0cf0 100644
--- a/tests/integration/gs/cb_test_harness.py
+++ b/tests/integration/gs/cb_test_harness.py
@@ -28,13 +28,15 @@ allowing testing of various file upload/download conditions.
"""
import socket
+import time
class CallbackTestHarness(object):
def __init__(self, fail_after_n_bytes=0, num_times_to_fail=1,
exception=socket.error('mock socket error', 0),
- fp_to_change=None, fp_change_pos=None):
+ fp_to_change=None, fp_change_pos=None,
+ delay_after_change=None):
self.fail_after_n_bytes = fail_after_n_bytes
self.num_times_to_fail = num_times_to_fail
self.exception = exception
@@ -42,6 +44,7 @@ class CallbackTestHarness(object):
# written at that position just before the first exception is thrown.
self.fp_to_change = fp_to_change
self.fp_change_pos = fp_change_pos
+ self.delay_after_change = delay_after_change
self.num_failures = 0
self.transferred_seq_before_first_failure = []
self.transferred_seq_after_first_failure = []
@@ -67,5 +70,7 @@ class CallbackTestHarness(object):
self.fp_to_change.seek(self.fp_change_pos)
self.fp_to_change.write('abc')
self.fp_to_change.seek(cur_pos)
+ if self.delay_after_change:
+ time.sleep(self.delay_after_change)
self.called = True
- raise self.exception
+ raise self.exception \ No newline at end of file
diff --git a/tests/integration/gs/test_resumable_uploads.py b/tests/integration/gs/test_resumable_uploads.py
index 7c601451..605937f3 100644
--- a/tests/integration/gs/test_resumable_uploads.py
+++ b/tests/integration/gs/test_resumable_uploads.py
@@ -308,8 +308,8 @@ class ResumableUploadTests(GSTestCase):
Tests that resumable upload correctly sets passed metadata
"""
res_upload_handler = ResumableUploadHandler()
- headers = {'Content-Type' : 'text/plain', 'Content-Encoding' : 'gzip',
- 'x-goog-meta-abc' : 'my meta', 'x-goog-acl' : 'public-read'}
+ headers = {'Content-Type' : 'text/plain', 'x-goog-meta-abc' : 'my meta',
+ 'x-goog-acl' : 'public-read'}
small_src_file_as_string, small_src_file = self.make_small_file()
small_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
@@ -321,7 +321,6 @@ class ResumableUploadTests(GSTestCase):
dst_key.get_contents_as_string())
dst_key.open_read()
self.assertEqual('text/plain', dst_key.content_type)
- self.assertEqual('gzip', dst_key.content_encoding)
self.assertTrue('abc' in dst_key.metadata)
self.assertEqual('my meta', str(dst_key.metadata['abc']))
acl = dst_key.get_acl()
@@ -400,38 +399,68 @@ class ResumableUploadTests(GSTestCase):
def test_upload_with_file_content_change_during_upload(self):
"""
Tests resumable upload on a file that changes one byte of content
- (so, size stays the same) while upload in progress
+ (so, size stays the same) while upload in progress.
"""
- test_file_size = 500 * 1024 # 500 KB.
- test_file = self.build_input_file(test_file_size)[1]
- harness = CallbackTestHarness(fail_after_n_bytes=test_file_size/2,
- fp_to_change=test_file,
- # Write to byte 1, as the CallbackTestHarness writes
- # 3 bytes. This will result in the data on the server
- # being different than the local file.
- fp_change_pos=1)
- res_upload_handler = ResumableUploadHandler(num_retries=1)
- dst_key = self._MakeKey(set_contents=False)
- bucket_uri = storage_uri('gs://' + dst_key.bucket.name)
- dst_key_uri = bucket_uri.clone_replace_name(dst_key.name)
- try:
- dst_key.set_contents_from_file(
- test_file, cb=harness.call,
- res_upload_handler=res_upload_handler)
- self.fail('Did not get expected ResumableUploadException')
- except ResumableUploadException, e:
- self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
- # Ensure the file size didn't change.
- test_file.seek(0, os.SEEK_END)
- self.assertEqual(test_file_size, test_file.tell())
- self.assertNotEqual(
- e.message.find('md5 signature doesn\'t match etag'), -1)
- # Ensure the bad data wasn't left around.
+ def Execute():
+ res_upload_handler = ResumableUploadHandler(num_retries=1)
+ dst_key = self._MakeKey(set_contents=False)
+ bucket_uri = storage_uri('gs://' + dst_key.bucket.name)
+ dst_key_uri = bucket_uri.clone_replace_name(dst_key.name)
try:
- dst_key_uri.get_key()
- self.fail('Did not get expected InvalidUriError')
- except InvalidUriError, e:
- pass
+ dst_key.set_contents_from_file(
+ test_file, cb=harness.call,
+ res_upload_handler=res_upload_handler)
+ return False
+ except ResumableUploadException, e:
+ self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
+ # Ensure the file size didn't change.
+ test_file.seek(0, os.SEEK_END)
+ self.assertEqual(test_file_size, test_file.tell())
+ self.assertNotEqual(
+ e.message.find('md5 signature doesn\'t match etag'), -1)
+ # Ensure the bad data wasn't left around.
+ try:
+ dst_key_uri.get_key()
+ self.fail('Did not get expected InvalidUriError')
+ except InvalidUriError, e:
+ pass
+ return True
+
+ test_file_size = 500 * 1024 # 500 KB
+ # The sizes of all the blocks written, except the final block, must be a
+ # multiple of 256K bytes. We need to trigger a failure after the first
+ # 256K bytes have been uploaded so that at least one block of data is
+ # written on the server.
+ # See https://developers.google.com/storage/docs/concepts-techniques#resumable
+ # for more information about chunking of uploads.
+ n_bytes = 300 * 1024 # 300 KB
+ delay = 0
+ # First, try the test without a delay. If that fails, try it with a
+ # 15-second delay. The first attempt may fail to recognize that the
+ # server has a block if the server hasn't yet committed that block
+ # when we resume the transfer. This would cause a restarted upload
+ # instead of a resumed upload.
+ for attempt in range(2):
+ test_file = self.build_input_file(test_file_size)[1]
+ harness = CallbackTestHarness(
+ fail_after_n_bytes=n_bytes,
+ fp_to_change=test_file,
+ # Write to byte 1, as the CallbackTestHarness writes
+ # 3 bytes. This will result in the data on the server
+ # being different than the local file.
+ fp_change_pos=1,
+ delay_after_change=delay)
+ if Execute():
+ break
+ if (attempt == 0 and
+ 0 in harness.transferred_seq_after_first_failure):
+ # We can confirm the upload was restarted instead of resumed
+ # by determining if there is an entry of 0 in the
+ # transferred_seq_after_first_failure list.
+ # In that case, try again with a 15 second delay.
+ delay = 15
+ continue
+ self.fail('Did not get expected ResumableUploadException')
def test_upload_with_content_length_header_set(self):
"""
diff --git a/tests/integration/gs/testcase.py b/tests/integration/gs/testcase.py
index a6c1e087..b16ea8f8 100644
--- a/tests/integration/gs/testcase.py
+++ b/tests/integration/gs/testcase.py
@@ -29,11 +29,11 @@ import time
from boto.exception import GSResponseError
from boto.gs.connection import GSConnection
-from tests.integration.gs.util import has_google_credentials
+from tests.integration.gs import util
from tests.integration.gs.util import retry
from tests.unit import unittest
-@unittest.skipUnless(has_google_credentials(),
+@unittest.skipUnless(util.has_google_credentials(),
"Google credentials are required to run the Google "
"Cloud Storage tests. Update your boto.cfg to run "
"these tests.")
diff --git a/tests/integration/gs/util.py b/tests/integration/gs/util.py
index 5c99ac08..2b76078c 100644
--- a/tests/integration/gs/util.py
+++ b/tests/integration/gs/util.py
@@ -32,7 +32,8 @@ def has_google_credentials():
global _HAS_GOOGLE_CREDENTIALS
if _HAS_GOOGLE_CREDENTIALS is None:
provider = Provider('google')
- if provider.access_key is None or provider.secret_key is None:
+ if (provider.get_access_key() is None or
+ provider.get_secret_key() is None):
_HAS_GOOGLE_CREDENTIALS = False
else:
_HAS_GOOGLE_CREDENTIALS = True
diff --git a/tests/integration/iam/test_cert_verification.py b/tests/integration/iam/test_cert_verification.py
index 5791ac12..53349de6 100644
--- a/tests/integration/iam/test_cert_verification.py
+++ b/tests/integration/iam/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.iam
+from tests.integration import ServiceCertVerificationTest
+
+import boto.iam
-class CertVerificationTest(unittest.TestCase):
+class IAMCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
iam = True
- ssl = True
+ regions = boto.iam.regions()
- def test_certs(self):
- for region in boto.iam.regions():
- c = region.connect()
- c.get_all_users()
+ def sample_service_call(self, conn):
+ conn.get_all_users()
diff --git a/tests/integration/mws/test.py b/tests/integration/mws/test.py
index 4818258f..fd4b6b83 100644
--- a/tests/integration/mws/test.py
+++ b/tests/integration/mws/test.py
@@ -85,6 +85,16 @@ class MWSTestCase(unittest.TestCase):
ASINList=[asin,])
product = response._result[0].Product
+ @unittest.skipUnless(simple and isolator, "skipping simple test")
+ def test_get_matching_product_for_id(self):
+ asins = ['B001UDRNHO', '144930544X']
+ response = self.mws.get_matching_product_for_id(\
+ MarketplaceId=self.marketplace_id,
+ IdType='ASIN',
+ IdList=asins)
+ self.assertEqual(len(response._result), 2)
+ for result in response._result:
+ self.assertEqual(len(result.Products.Product), 1)
@unittest.skipUnless(simple and isolator, "skipping simple test")
def test_get_lowest_offer_listings_for_asin(self):
diff --git a/tests/integration/rds/test_cert_verification.py b/tests/integration/rds/test_cert_verification.py
index 1efe8f3f..0cef27d7 100644
--- a/tests/integration/rds/test_cert_verification.py
+++ b/tests/integration/rds/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.rds
+from tests.integration import ServiceCertVerificationTest
+
+import boto.rds
-class CertVerificationTest(unittest.TestCase):
+class RDSCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
rds = True
- ssl = True
+ regions = boto.rds.regions()
- def test_certs(self):
- for region in boto.rds.regions():
- c = region.connect()
- c.get_all_dbinstances()
+ def sample_service_call(self, conn):
+ conn.get_all_dbinstances()
diff --git a/tests/integration/rds/test_db_subnet_group.py b/tests/integration/rds/test_db_subnet_group.py
new file mode 100644
index 00000000..52d63739
--- /dev/null
+++ b/tests/integration/rds/test_db_subnet_group.py
@@ -0,0 +1,92 @@
+# Copyright (c) 2013 Franc Carter franc.carter@gmail.com
+# All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+"""
+Check that db_subnet_groups behave sanely
+"""
+
+import time
+import unittest
+import boto.rds
+from boto.vpc import VPCConnection
+from boto.rds import RDSConnection
+
+def _is_ok(subnet_group, vpc_id, description, subnets):
+ if subnet_group.vpc_id != vpc_id:
+ print 'vpc_id is ',subnet_group.vpc_id, 'but should be ', vpc_id
+ return 0
+ if subnet_group.description != description:
+ print "description is '"+subnet_group.description+"' but should be '"+description+"'"
+ return 0
+ if set(subnet_group.subnet_ids) != set(subnets):
+ subnets_are = ','.join(subnet_group.subnet_ids)
+ should_be = ','.join(subnets)
+ print "subnets are "+subnets_are+" but should be "+should_be
+ return 0
+ return 1
+
+
+class DbSubnetGroupTest(unittest.TestCase):
+ rds = True
+
+ def test_db_subnet_group(self):
+ vpc_api = VPCConnection()
+ rds_api = RDSConnection()
+ vpc = vpc_api.create_vpc('10.0.0.0/16')
+
+ az_list = vpc_api.get_all_zones(filters={'state':'available'})
+ subnet = list()
+ n = 0;
+ for az in az_list:
+ try:
+ subnet.append(vpc_api.create_subnet(vpc.id, '10.0.'+str(n)+'.0/24',availability_zone=az.name))
+ n = n+1
+ except:
+ pass
+
+ grp_name = 'db_subnet_group'+str(int(time.time()))
+ subnet_group = rds_api.create_db_subnet_group(grp_name, grp_name, [subnet[0].id,subnet[1].id])
+ if not _is_ok(subnet_group, vpc.id, grp_name, [subnet[0].id,subnet[1].id]):
+ raise Exception("create_db_subnet_group returned bad values")
+
+ rds_api.modify_db_subnet_group(grp_name, description='new description')
+ subnet_grps = rds_api.get_all_db_subnet_groups(name=grp_name)
+ if not _is_ok(subnet_grps[0], vpc.id, 'new description', [subnet[0].id,subnet[1].id]):
+ raise Exception("modifying the subnet group desciption returned bad values")
+
+ rds_api.modify_db_subnet_group(grp_name, subnet_ids=[subnet[1].id,subnet[2].id])
+ subnet_grps = rds_api.get_all_db_subnet_groups(name=grp_name)
+ if not _is_ok(subnet_grps[0], vpc.id, 'new description', [subnet[1].id,subnet[2].id]):
+ raise Exception("modifying the subnet group subnets returned bad values")
+
+ rds_api.delete_db_subnet_group(subnet_group.name)
+ try:
+ rds_api.get_all_db_subnet_groups(name=grp_name)
+ raise Exception(subnet_group.name+" still accessible after delete_db_subnet_group")
+ except:
+ pass
+
+ while n > 0:
+ n = n-1
+ vpc_api.delete_subnet(subnet[n].id)
+ vpc_api.delete_vpc(vpc.id)
+
diff --git a/tests/integration/redshift/test_cert_verification.py b/tests/integration/redshift/test_cert_verification.py
index 27fd16da..3081c577 100644
--- a/tests/integration/redshift/test_cert_verification.py
+++ b/tests/integration/redshift/test_cert_verification.py
@@ -19,17 +19,16 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
+import unittest
-from tests.unit import unittest
-import boto.redshift
+from tests.integration import ServiceCertVerificationTest
+import boto.redshift
-class CertVerificationTest(unittest.TestCase):
+class RedshiftCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
redshift = True
- ssl = True
+ regions = boto.redshift.regions()
- def test_certs(self):
- for region in boto.redshift.regions():
- c = region.connect()
- c.describe_cluster_versions()
+ def sample_service_call(self, conn):
+ conn.describe_cluster_versions()
diff --git a/tests/integration/route53/test_cert_verification.py b/tests/integration/route53/test_cert_verification.py
index 18c43a9f..bc36a186 100644
--- a/tests/integration/route53/test_cert_verification.py
+++ b/tests/integration/route53/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.route53
+from tests.integration import ServiceCertVerificationTest
+
+import boto.route53
-class CertVerificationTest(unittest.TestCase):
+class Route53CertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
route53 = True
- ssl = True
+ regions = boto.route53.regions()
- def test_certs(self):
- for region in boto.route53.regions():
- c = region.connect()
- c.get_all_hosted_zones()
+ def sample_service_call(self, conn):
+ conn.get_all_hosted_zones()
diff --git a/tests/integration/route53/test_resourcerecordsets.py b/tests/integration/route53/test_resourcerecordsets.py
index c6baadfc..9c8f3b22 100644
--- a/tests/integration/route53/test_resourcerecordsets.py
+++ b/tests/integration/route53/test_resourcerecordsets.py
@@ -23,7 +23,6 @@
import unittest
from boto.route53.connection import Route53Connection
from boto.route53.record import ResourceRecordSets
-from boto.exception import TooManyRecordsException
class TestRoute53ResourceRecordSets(unittest.TestCase):
@@ -48,6 +47,53 @@ class TestRoute53ResourceRecordSets(unittest.TestCase):
deleted.add_value('192.168.0.25')
rrs.commit()
+ def test_record_count(self):
+ rrs = ResourceRecordSets(self.conn, self.zone.id)
+ hosts = 101
+
+ for hostid in range(hosts):
+ rec = "test" + str(hostid) + ".example.com"
+ created = rrs.add_change("CREATE", rec, "A")
+ ip = '192.168.0.' + str(hostid)
+ created.add_value(ip)
+
+ # Max 100 changes per commit
+ if (hostid + 1) % 100 == 0:
+ rrs.commit()
+ rrs = ResourceRecordSets(self.conn, self.zone.id)
+
+ rrs.commit()
+
+ all_records = self.conn.get_all_rrsets(self.zone.id)
+
+ # First time around was always fine
+ i = 0
+ for rset in all_records:
+ i += 1
+
+ # Second time was a failure
+ i = 0
+ for rset in all_records:
+ i += 1
+
+ # Cleanup indivual records
+ rrs = ResourceRecordSets(self.conn, self.zone.id)
+ for hostid in range(hosts):
+ rec = "test" + str(hostid) + ".example.com"
+ deleted = rrs.add_change("DELETE", rec, "A")
+ ip = '192.168.0.' + str(hostid)
+ deleted.add_value(ip)
+
+ # Max 100 changes per commit
+ if (hostid + 1) % 100 == 0:
+ rrs.commit()
+ rrs = ResourceRecordSets(self.conn, self.zone.id)
+
+ rrs.commit()
+
+ # 2nd count should match the number of hosts plus NS/SOA records
+ records = hosts + 2
+ self.assertEqual(i, records)
if __name__ == '__main__':
unittest.main()
diff --git a/tests/integration/s3/test_cert_verification.py b/tests/integration/s3/test_cert_verification.py
index 27116dee..7395d10a 100644
--- a/tests/integration/s3/test_cert_verification.py
+++ b/tests/integration/s3/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on SQS endpoints validate.
"""
-
import unittest
-import boto.s3
+from tests.integration import ServiceCertVerificationTest
+
+import boto.s3
-class CertVerificationTest(unittest.TestCase):
+class S3CertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
s3 = True
- ssl = True
+ regions = boto.s3.regions()
- def test_certs(self):
- for region in boto.s3.regions():
- c = region.connect()
- c.get_all_buckets()
+ def sample_service_call(self, conn):
+ conn.get_all_buckets()
diff --git a/tests/integration/s3/test_key.py b/tests/integration/s3/test_key.py
index f329e06b..ef5831a9 100644
--- a/tests/integration/s3/test_key.py
+++ b/tests/integration/s3/test_key.py
@@ -383,3 +383,14 @@ class S3KeyTest(unittest.TestCase):
check = self.bucket.get_key('test_date')
self.assertEqual(check.get_metadata('date'), u'20130524T155935Z')
self.assertTrue('x-amz-meta-date' in check._get_remote_metadata())
+
+ def test_header_casing(self):
+ key = self.bucket.new_key('test_header_case')
+ # Using anything but CamelCase on ``Content-Type`` or ``Content-MD5``
+ # used to cause a signature error (when using ``s3`` for signing).
+ key.set_metadata('Content-type', 'application/json')
+ key.set_metadata('Content-md5', 'XmUKnus7svY1frWsVskxXg==')
+ key.set_contents_from_string('{"abc": 123}')
+
+ check = self.bucket.get_key('test_header_case')
+ self.assertEqual(check.content_type, 'application/json')
diff --git a/tests/integration/s3/test_multipart.py b/tests/integration/s3/test_multipart.py
index 51d34a51..2ca42b5c 100644
--- a/tests/integration/s3/test_multipart.py
+++ b/tests/integration/s3/test_multipart.py
@@ -137,3 +137,23 @@ class S3MultiPartUploadTest(unittest.TestCase):
# be a min of 5MB so so we'll assume that is enough
# testing and abort the upload.
mpu.cancel_upload()
+
+ # mpu.upload_part_from_file() now returns the uploaded part
+ # which makes the etag available. Confirm the etag is
+ # available and equal to the etag returned by the parts list.
+ def test_etag_of_parts(self):
+ key_name = "etagtest"
+ mpu = self.bucket.initiate_multipart_upload(key_name)
+ fp = StringIO.StringIO("small file")
+ # upload 2 parts and save each part
+ uparts = []
+ uparts.append(mpu.upload_part_from_file(fp, part_num=1, size=5))
+ uparts.append(mpu.upload_part_from_file(fp, part_num=2))
+ fp.close()
+ # compare uploaded parts etag to listed parts
+ pn = 0
+ for lpart in mpu:
+ self.assertEqual(uparts[pn].etag, lpart.etag)
+ pn += 1
+ # Can't complete 2 small parts so just clean up.
+ mpu.cancel_upload()
diff --git a/tests/integration/sdb/test_cert_verification.py b/tests/integration/sdb/test_cert_verification.py
index 1e0cf4f0..ebb8949f 100644
--- a/tests/integration/sdb/test_cert_verification.py
+++ b/tests/integration/sdb/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.sdb
+from tests.integration import ServiceCertVerificationTest
+
+import boto.sdb
-class CertVerificationTest(unittest.TestCase):
+class SDBCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
sdb = True
- ssl = True
+ regions = boto.sdb.regions()
- def test_certs(self):
- for region in boto.sdb.regions():
- c = region.connect()
- c.get_all_domains()
+ def sample_service_call(self, conn):
+ conn.get_all_domains()
diff --git a/tests/integration/ses/test_cert_verification.py b/tests/integration/ses/test_cert_verification.py
index 8954ec83..7add339d 100644
--- a/tests/integration/ses/test_cert_verification.py
+++ b/tests/integration/ses/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.ses
+from tests.integration import ServiceCertVerificationTest
+
+import boto.ses
-class CertVerificationTest(unittest.TestCase):
+class SESCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
ses = True
- ssl = True
+ regions = boto.ses.regions()
- def test_certs(self):
- for region in boto.ses.regions():
- c = region.connect()
- c.list_verified_email_addresses()
+ def sample_service_call(self, conn):
+ conn.list_verified_email_addresses()
diff --git a/tests/integration/ses/test_connection.py b/tests/integration/ses/test_connection.py
index 83b99944..f1d66e8c 100644
--- a/tests/integration/ses/test_connection.py
+++ b/tests/integration/ses/test_connection.py
@@ -1,3 +1,5 @@
+from __future__ import with_statement
+
from tests.unit import unittest
from boto.ses.connection import SESConnection
diff --git a/tests/integration/sns/test_cert_verification.py b/tests/integration/sns/test_cert_verification.py
index a67e1aa7..53b6b9f2 100644
--- a/tests/integration/sns/test_cert_verification.py
+++ b/tests/integration/sns/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on SQS endpoints validate.
"""
-
import unittest
-import boto.sns
+from tests.integration import ServiceCertVerificationTest
+
+import boto.sns
-class CertVerificationTest(unittest.TestCase):
+class SNSCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
sns = True
- ssl = True
+ regions = boto.sns.regions()
- def test_certs(self):
- for region in boto.sns.regions():
- c = region.connect()
- c.get_all_topics()
+ def sample_service_call(self, conn):
+ conn.get_all_topics()
diff --git a/tests/integration/sns/test_connection.py b/tests/integration/sns/test_connection.py
new file mode 100644
index 00000000..889b8577
--- /dev/null
+++ b/tests/integration/sns/test_connection.py
@@ -0,0 +1,68 @@
+# Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
+# All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+from __future__ import with_statement
+import mock
+import httplib
+
+from tests.unit import unittest
+from boto.sns import connect_to_region
+
+
+class StubResponse(object):
+ status = 403
+ reason = 'nopenopenope'
+
+ def getheader(self, val):
+ return ''
+
+ def read(self):
+ return ''
+
+
+class TestSNSConnection(unittest.TestCase):
+
+ sns = True
+
+ def setUp(self):
+ self.connection = connect_to_region('us-west-2')
+
+ def test_list_platform_applications(self):
+ response = self.connection.list_platform_applications()
+
+ def test_forced_host(self):
+ # This test asserts that the ``Host`` header is correctly set.
+ # On Python 2.5(.6), not having this in place would cause any SigV4
+ # calls to fail, due to a signature mismatch (the port would be present
+ # when it shouldn't be).
+ https = httplib.HTTPSConnection
+ mpo = mock.patch.object
+
+ with mpo(https, 'request') as mock_request:
+ with mpo(https, 'getresponse', return_value=StubResponse()):
+ with self.assertRaises(self.connection.ResponseError):
+ self.connection.list_platform_applications()
+
+ # Now, assert that the ``Host`` was there & correct.
+ call = mock_request.call_args_list[0]
+ headers = call[0][3]
+ self.assertTrue('Host' in headers)
+ self.assertEqual(headers['Host'], 'sns.us-west-2.amazonaws.com')
diff --git a/tests/integration/sns/test_sns_sqs_subscription.py b/tests/integration/sns/test_sns_sqs_subscription.py
index 0cb8b360..88ea2bf4 100644
--- a/tests/integration/sns/test_sns_sqs_subscription.py
+++ b/tests/integration/sns/test_sns_sqs_subscription.py
@@ -27,15 +27,15 @@ Unit tests for subscribing SQS queues to SNS topics.
import hashlib
import time
-import json
from tests.unit import unittest
+from boto.compat import json
from boto.sqs.connection import SQSConnection
from boto.sns.connection import SNSConnection
class SNSSubcribeSQSTest(unittest.TestCase):
-
+
sqs = True
sns = True
@@ -64,7 +64,7 @@ class SNSSubcribeSQSTest(unittest.TestCase):
expected_sid = hashlib.md5(topic_arn + queue_arn).hexdigest()
resp = self.snsc.subscribe_sqs_queue(topic_arn, queue)
-
+
found_expected_sid = False
statements = self.get_policy_statements(queue)
for statement in statements:
diff --git a/tests/integration/sqs/test_cert_verification.py b/tests/integration/sqs/test_cert_verification.py
index 1b18fe84..f815e4c3 100644
--- a/tests/integration/sqs/test_cert_verification.py
+++ b/tests/integration/sqs/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on SQS endpoints validate.
"""
-
import unittest
-import boto.sqs
+from tests.integration import ServiceCertVerificationTest
+
+import boto.sqs
-class SQSCertVerificationTest(unittest.TestCase):
+class SQSCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
sqs = True
- ssl = True
+ regions = boto.sqs.regions()
- def test_certs(self):
- for region in boto.sqs.regions():
- c = region.connect()
- c.get_all_queues()
+ def sample_service_call(self, conn):
+ conn.get_all_queues()
diff --git a/tests/integration/sqs/test_connection.py b/tests/integration/sqs/test_connection.py
index 9b2ab59a..b655aa11 100644
--- a/tests/integration/sqs/test_connection.py
+++ b/tests/integration/sqs/test_connection.py
@@ -24,6 +24,7 @@
"""
Some unit tests for the SQSConnection
"""
+from __future__ import with_statement
import time
from threading import Timer
@@ -239,3 +240,44 @@ class SQSConnectionTest(unittest.TestCase):
# Wait long enough for SQS to finally remove the queues.
time.sleep(90)
self.assertEqual(len(conn.get_all_queues()), initial_count)
+
+ def test_get_messages_attributes(self):
+ conn = SQSConnection()
+ current_timestamp = int(time.time())
+ queue_name = 'test%d' % int(time.time())
+ test = conn.create_queue(queue_name)
+ self.addCleanup(conn.delete_queue, test)
+ time.sleep(65)
+
+ # Put a message in the queue.
+ m1 = Message()
+ m1.set_body('This is a test message.')
+ test.write(m1)
+ self.assertEqual(test.count(), 1)
+
+ # Check all attributes.
+ msgs = test.get_messages(
+ num_messages=1,
+ attributes='All'
+ )
+ for msg in msgs:
+ self.assertEqual(msg.attributes['ApproximateReceiveCount'], '1')
+ first_rec = msg.attributes['ApproximateFirstReceiveTimestamp']
+ first_rec = int(first_rec) / 1000
+ self.assertTrue(first_rec >= current_timestamp)
+
+ # Put another message in the queue.
+ m2 = Message()
+ m2.set_body('This is another test message.')
+ test.write(m2)
+ self.assertEqual(test.count(), 1)
+
+ # Check a specific attribute.
+ msgs = test.get_messages(
+ num_messages=1,
+ attributes='ApproximateReceiveCount'
+ )
+ for msg in msgs:
+ self.assertEqual(msg.attributes['ApproximateReceiveCount'], '1')
+ with self.assertRaises(KeyError):
+ msg.attributes['ApproximateFirstReceiveTimestamp']
diff --git a/tests/integration/sts/test_cert_verification.py b/tests/integration/sts/test_cert_verification.py
index 0696ed9c..31ae2a6c 100644
--- a/tests/integration/sts/test_cert_verification.py
+++ b/tests/integration/sts/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.sts
+from tests.integration import ServiceCertVerificationTest
+
+import boto.sts
-class CertVerificationTest(unittest.TestCase):
+class STSCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
sts = True
- ssl = True
+ regions = boto.sts.regions()
- def test_certs(self):
- for region in boto.sts.regions():
- c = region.connect()
- c.get_session_token()
+ def sample_service_call(self, conn):
+ conn.get_session_token()
diff --git a/tests/integration/sts/test_session_token.py b/tests/integration/sts/test_session_token.py
index 3d548b9f..d47071d9 100644
--- a/tests/integration/sts/test_session_token.py
+++ b/tests/integration/sts/test_session_token.py
@@ -67,13 +67,15 @@ class SessionTokenTest (unittest.TestCase):
print '--- tests completed ---'
def test_assume_role_with_web_identity(self):
- c = STSConnection()
+ c = STSConnection(anon=True)
+ arn = 'arn:aws:iam::000240903217:role/FederatedWebIdentityRole'
+ wit = 'b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9'
try:
creds = c.assume_role_with_web_identity(
- 'arn:aws:s3:::my_corporate_bucket/*',
- 'guestuser',
- 'b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9',
+ role_arn=arn,
+ role_session_name='guestuser',
+ web_identity_token=wit,
provider_id='www.amazon.com',
)
except BotoServerError as err:
diff --git a/tests/integration/support/test_cert_verification.py b/tests/integration/support/test_cert_verification.py
index 586cc71d..1127c0d0 100644
--- a/tests/integration/support/test_cert_verification.py
+++ b/tests/integration/support/test_cert_verification.py
@@ -19,17 +19,16 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
+import unittest
-from tests.unit import unittest
-import boto.support
+from tests.integration import ServiceCertVerificationTest
+import boto.support
-class CertVerificationTest(unittest.TestCase):
+class SupportCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
support = True
- ssl = True
+ regions = boto.support.regions()
- def test_certs(self):
- for region in boto.support.regions():
- c = region.connect()
- c.describe_services()
+ def sample_service_call(self, conn):
+ conn.describe_services()
diff --git a/tests/integration/swf/test_cert_verification.py b/tests/integration/swf/test_cert_verification.py
index 1328b825..108f218b 100644
--- a/tests/integration/swf/test_cert_verification.py
+++ b/tests/integration/swf/test_cert_verification.py
@@ -24,17 +24,16 @@
"""
Check that all of the certs on all service endpoints validate.
"""
-
import unittest
-import boto.swf
+from tests.integration import ServiceCertVerificationTest
+
+import boto.swf
-class CertVerificationTest(unittest.TestCase):
+class SWFCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
swf = True
- ssl = True
+ regions = boto.swf.regions()
- def test_certs(self):
- for region in boto.swf.regions():
- c = region.connect()
- c.list_domains('REGISTERED')
+ def sample_service_call(self, conn):
+ conn.list_domains('REGISTERED')
diff --git a/tests/unit/auth/test_query.py b/tests/unit/auth/test_query.py
new file mode 100644
index 00000000..fa5882c9
--- /dev/null
+++ b/tests/unit/auth/test_query.py
@@ -0,0 +1,76 @@
+# Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+import copy
+from mock import Mock
+from tests.unit import unittest
+
+from boto.auth import QueryAuthHandler
+from boto.connection import HTTPRequest
+
+
+class TestQueryAuthHandler(unittest.TestCase):
+ def setUp(self):
+ self.provider = Mock()
+ self.provider.access_key = 'access_key'
+ self.provider.secret_key = 'secret_key'
+ self.request = HTTPRequest(
+ method='GET',
+ protocol='https',
+ host='sts.amazonaws.com',
+ port=443,
+ path='/',
+ auth_path=None,
+ params={
+ 'Action': 'AssumeRoleWithWebIdentity',
+ 'Version': '2011-06-15',
+ 'RoleSessionName': 'web-identity-federation',
+ 'ProviderId': '2012-06-01',
+ 'WebIdentityToken': 'Atza|IQEBLjAsAhRkcxQ',
+ },
+ headers={},
+ body=''
+ )
+
+ def test_escape_value(self):
+ auth = QueryAuthHandler('sts.amazonaws.com',
+ Mock(), self.provider)
+ # This should **NOT** get escaped.
+ value = auth._escape_value('Atza|IQEBLjAsAhRkcxQ')
+ self.assertEqual(value, 'Atza|IQEBLjAsAhRkcxQ')
+
+ def test_build_query_string(self):
+ auth = QueryAuthHandler('sts.amazonaws.com',
+ Mock(), self.provider)
+ query_string = auth._build_query_string(self.request.params)
+ self.assertEqual(query_string, 'Action=AssumeRoleWithWebIdentity' + \
+ '&ProviderId=2012-06-01&RoleSessionName=web-identity-federation' + \
+ '&Version=2011-06-15&WebIdentityToken=Atza|IQEBLjAsAhRkcxQ')
+
+ def test_add_auth(self):
+ auth = QueryAuthHandler('sts.amazonaws.com',
+ Mock(), self.provider)
+ req = copy.copy(self.request)
+ auth.add_auth(req)
+ self.assertEqual(req.path,
+ '/?Action=AssumeRoleWithWebIdentity' + \
+ '&ProviderId=2012-06-01&RoleSessionName=web-identity-federation' + \
+ '&Version=2011-06-15&WebIdentityToken=Atza|IQEBLjAsAhRkcxQ')
diff --git a/tests/unit/auth/test_sigv4.py b/tests/unit/auth/test_sigv4.py
index 2de6d724..670ce664 100644
--- a/tests/unit/auth/test_sigv4.py
+++ b/tests/unit/auth/test_sigv4.py
@@ -91,6 +91,87 @@ class TestSigV4Handler(unittest.TestCase):
# There should not be two-slashes.
self.assertEqual(canonical_uri, '/')
+ # Make sure Windows-style slashes are converted properly
+ request = HTTPRequest(
+ 'GET', 'https', 'glacier.us-east-1.amazonaws.com', 443,
+ '\\x\\x.html', None, {},
+ {'x-amz-glacier-version': '2012-06-01'}, '')
+ canonical_uri = auth.canonical_uri(request)
+ self.assertEqual(canonical_uri, '/x/x.html')
+
+ def test_credential_scope(self):
+ # test the AWS standard regions IAM endpoint
+ auth = HmacAuthV4Handler('iam.amazonaws.com',
+ Mock(), self.provider)
+ request = HTTPRequest(
+ 'POST', 'https', 'iam.amazonaws.com', 443,
+ '/', '/',
+ {'Action': 'ListAccountAliases', 'Version': '2010-05-08'},
+ {
+ 'Content-Length': '44',
+ 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
+ 'X-Amz-Date': '20130808T013210Z'
+ },
+ 'Action=ListAccountAliases&Version=2010-05-08')
+ credential_scope = auth.credential_scope(request)
+ region_name = credential_scope.split('/')[1]
+ self.assertEqual(region_name, 'us-east-1')
+
+ # test the AWS GovCloud region IAM endpoint
+ auth = HmacAuthV4Handler('iam.us-gov.amazonaws.com',
+ Mock(), self.provider)
+ request = HTTPRequest(
+ 'POST', 'https', 'iam.us-gov.amazonaws.com', 443,
+ '/', '/',
+ {'Action': 'ListAccountAliases', 'Version': '2010-05-08'},
+ {
+ 'Content-Length': '44',
+ 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
+ 'X-Amz-Date': '20130808T013210Z'
+ },
+ 'Action=ListAccountAliases&Version=2010-05-08')
+ credential_scope = auth.credential_scope(request)
+ region_name = credential_scope.split('/')[1]
+ self.assertEqual(region_name, 'us-gov-west-1')
+
+ # iam.us-west-1.amazonaws.com does not exist however this
+ # covers the remaining region_name control structure for a
+ # different region name
+ auth = HmacAuthV4Handler('iam.us-west-1.amazonaws.com',
+ Mock(), self.provider)
+ request = HTTPRequest(
+ 'POST', 'https', 'iam.us-west-1.amazonaws.com', 443,
+ '/', '/',
+ {'Action': 'ListAccountAliases', 'Version': '2010-05-08'},
+ {
+ 'Content-Length': '44',
+ 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
+ 'X-Amz-Date': '20130808T013210Z'
+ },
+ 'Action=ListAccountAliases&Version=2010-05-08')
+ credential_scope = auth.credential_scope(request)
+ region_name = credential_scope.split('/')[1]
+ self.assertEqual(region_name, 'us-west-1')
+
+ # Test connections to custom locations, e.g. localhost:8080
+ auth = HmacAuthV4Handler('localhost', Mock(), self.provider,
+ service_name='iam')
+
+ request = HTTPRequest(
+ 'POST', 'http', 'localhost', 8080,
+ '/', '/',
+ {'Action': 'ListAccountAliases', 'Version': '2010-05-08'},
+ {
+ 'Content-Length': '44',
+ 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
+ 'X-Amz-Date': '20130808T013210Z'
+ },
+ 'Action=ListAccountAliases&Version=2010-05-08')
+ credential_scope = auth.credential_scope(request)
+ timestamp, region, service, v = credential_scope.split('/')
+ self.assertEqual(region, 'localhost')
+ self.assertEqual(service, 'iam')
+
def test_headers_to_sign(self):
auth = HmacAuthV4Handler('glacier.us-east-1.amazonaws.com',
Mock(), self.provider)
diff --git a/tests/unit/cloudformation/test_connection.py b/tests/unit/cloudformation/test_connection.py
index 6890f152..766e6f1c 100644..100755
--- a/tests/unit/cloudformation/test_connection.py
+++ b/tests/unit/cloudformation/test_connection.py
@@ -421,7 +421,7 @@ class TestCloudFormationDescribeStacks(CloudFormationConnectionBase):
self.assertEqual(stack.creation_time,
datetime(2012, 5, 16, 22, 55, 31))
self.assertEqual(stack.description, 'My Description')
- self.assertEqual(stack.disable_rollback, True)
+ self.assertEqual(stack.disable_rollback, False)
self.assertEqual(stack.stack_id, 'arn:aws:cfn:us-east-1:1:stack')
self.assertEqual(stack.stack_status, 'CREATE_COMPLETE')
self.assertEqual(stack.stack_name, 'MyStack')
@@ -462,14 +462,14 @@ class TestCloudFormationListStackResources(CloudFormationConnectionBase):
<member>
<ResourceStatus>CREATE_COMPLETE</ResourceStatus>
<LogicalResourceId>SampleDB</LogicalResourceId>
- <LastUpdatedTimestamp>2011-06-21T20:25:57Z</LastUpdatedTimestamp>
+ <LastUpdatedTime>2011-06-21T20:25:57Z</LastUpdatedTime>
<PhysicalResourceId>My-db-ycx</PhysicalResourceId>
<ResourceType>AWS::RDS::DBInstance</ResourceType>
</member>
<member>
<ResourceStatus>CREATE_COMPLETE</ResourceStatus>
<LogicalResourceId>CPUAlarmHigh</LogicalResourceId>
- <LastUpdatedTimestamp>2011-06-21T20:29:23Z</LastUpdatedTimestamp>
+ <LastUpdatedTime>2011-06-21T20:29:23Z</LastUpdatedTime>
<PhysicalResourceId>MyStack-CPUH-PF</PhysicalResourceId>
<ResourceType>AWS::CloudWatch::Alarm</ResourceType>
</member>
@@ -486,7 +486,7 @@ class TestCloudFormationListStackResources(CloudFormationConnectionBase):
resources = self.service_connection.list_stack_resources('MyStack',
next_token='next_token')
self.assertEqual(len(resources), 2)
- self.assertEqual(resources[0].last_updated_timestamp,
+ self.assertEqual(resources[0].last_updated_time,
datetime(2011, 6, 21, 20, 25, 57))
self.assertEqual(resources[0].logical_resource_id, 'SampleDB')
self.assertEqual(resources[0].physical_resource_id, 'My-db-ycx')
@@ -494,7 +494,7 @@ class TestCloudFormationListStackResources(CloudFormationConnectionBase):
self.assertEqual(resources[0].resource_status_reason, None)
self.assertEqual(resources[0].resource_type, 'AWS::RDS::DBInstance')
- self.assertEqual(resources[1].last_updated_timestamp,
+ self.assertEqual(resources[1].last_updated_time,
datetime(2011, 6, 21, 20, 29, 23))
self.assertEqual(resources[1].logical_resource_id, 'CPUAlarmHigh')
self.assertEqual(resources[1].physical_resource_id, 'MyStack-CPUH-PF')
diff --git a/tests/unit/cloudformation/test_stack.py b/tests/unit/cloudformation/test_stack.py
index 0f39184e..c3bc9438 100644..100755
--- a/tests/unit/cloudformation/test_stack.py
+++ b/tests/unit/cloudformation/test_stack.py
@@ -116,14 +116,14 @@ LIST_STACK_RESOURCES_XML = r"""
<member>
<ResourceStatus>CREATE_COMPLETE</ResourceStatus>
<LogicalResourceId>DBSecurityGroup</LogicalResourceId>
- <LastUpdatedTimestamp>2011-06-21T20:15:58Z</LastUpdatedTimestamp>
+ <LastUpdatedTime>2011-06-21T20:15:58Z</LastUpdatedTime>
<PhysicalResourceId>gmarcteststack-dbsecuritygroup-1s5m0ez5lkk6w</PhysicalResourceId>
<ResourceType>AWS::RDS::DBSecurityGroup</ResourceType>
</member>
<member>
<ResourceStatus>CREATE_COMPLETE</ResourceStatus>
<LogicalResourceId>SampleDB</LogicalResourceId>
- <LastUpdatedTimestamp>2011-06-21T20:25:57.875643Z</LastUpdatedTimestamp>
+ <LastUpdatedTime>2011-06-21T20:25:57.875643Z</LastUpdatedTime>
<PhysicalResourceId>MyStack-sampledb-ycwhk1v830lx</PhysicalResourceId>
<ResourceType>AWS::RDS::DBInstance</ResourceType>
</member>
@@ -207,16 +207,50 @@ class TestStackParse(unittest.TestCase):
])
h = boto.handler.XmlHandler(rs, None)
xml.sax.parseString(LIST_STACK_RESOURCES_XML, h)
- timestamp_1 = rs[0].last_updated_timestamp
+ timestamp_1 = rs[0].last_updated_time
self.assertEqual(
timestamp_1,
datetime.datetime(2011, 6, 21, 20, 15, 58)
)
- timestamp_2 = rs[1].last_updated_timestamp
+ timestamp_2 = rs[1].last_updated_time
self.assertEqual(
timestamp_2,
datetime.datetime(2011, 6, 21, 20, 25, 57, 875643)
)
+ def test_disable_rollback_false(self):
+ # SAMPLE_XML defines DisableRollback=="false"
+ rs = boto.resultset.ResultSet([('member', boto.cloudformation.stack.Stack)])
+ h = boto.handler.XmlHandler(rs, None)
+ xml.sax.parseString(SAMPLE_XML, h)
+ disable_rollback = rs[0].disable_rollback
+ self.assertFalse(disable_rollback)
+
+ def test_disable_rollback_false_upper(self):
+ # Should also handle "False"
+ rs = boto.resultset.ResultSet([('member', boto.cloudformation.stack.Stack)])
+ h = boto.handler.XmlHandler(rs, None)
+ sample_xml_upper = SAMPLE_XML.replace('false', 'False')
+ xml.sax.parseString(sample_xml_upper, h)
+ disable_rollback = rs[0].disable_rollback
+ self.assertFalse(disable_rollback)
+
+ def test_disable_rollback_true(self):
+ rs = boto.resultset.ResultSet([('member', boto.cloudformation.stack.Stack)])
+ h = boto.handler.XmlHandler(rs, None)
+ sample_xml_upper = SAMPLE_XML.replace('false', 'true')
+ xml.sax.parseString(sample_xml_upper, h)
+ disable_rollback = rs[0].disable_rollback
+ self.assertTrue(disable_rollback)
+
+ def test_disable_rollback_true_upper(self):
+ rs = boto.resultset.ResultSet([('member', boto.cloudformation.stack.Stack)])
+ h = boto.handler.XmlHandler(rs, None)
+ sample_xml_upper = SAMPLE_XML.replace('false', 'True')
+ xml.sax.parseString(sample_xml_upper, h)
+ disable_rollback = rs[0].disable_rollback
+ self.assertTrue(disable_rollback)
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/tests/unit/cloudfront/test_distribution.py b/tests/unit/cloudfront/test_distribution.py
new file mode 100644
index 00000000..38e27106
--- /dev/null
+++ b/tests/unit/cloudfront/test_distribution.py
@@ -0,0 +1,21 @@
+import unittest
+
+from boto.cloudfront.distribution import DistributionConfig
+from boto.cloudfront.logging import LoggingInfo
+
+
+class CloudfrontDistributionTest(unittest.TestCase):
+ cloudfront = True
+
+ def setUp(self):
+ self.dist = DistributionConfig()
+
+ def test_logging(self):
+ # Default.
+ self.assertEqual(self.dist.logging, None)
+
+ # Override.
+ lo = LoggingInfo(bucket='whatever', prefix='override_')
+ dist = DistributionConfig(logging=lo)
+ self.assertEqual(dist.logging.bucket, 'whatever')
+ self.assertEqual(dist.logging.prefix, 'override_')
diff --git a/tests/unit/cloudfront/test_signed_urls.py b/tests/unit/cloudfront/test_signed_urls.py
index e1a19f06..30d9035e 100644
--- a/tests/unit/cloudfront/test_signed_urls.py
+++ b/tests/unit/cloudfront/test_signed_urls.py
@@ -4,6 +4,7 @@ try:
import simplejson as json
except ImportError:
import json
+from cStringIO import StringIO
from textwrap import dedent
from boto.cloudfront.distribution import Distribution
@@ -132,6 +133,22 @@ class CloudfrontSignedUrlsTest(unittest.TestCase):
encoded_sig = self.dist._url_base64_encode(sig)
self.assertEqual(expected, encoded_sig)
+ def test_sign_canned_policy_pk_file_like(self):
+ """
+ Test signing the canned policy from amazon's cloudfront documentation
+ with a file-like object (not a subclass of 'file' type)
+ """
+ expected = ("Nql641NHEUkUaXQHZINK1FZ~SYeUSoBJMxjdgqrzIdzV2gyEXPDN"
+ "v0pYdWJkflDKJ3xIu7lbwRpSkG98NBlgPi4ZJpRRnVX4kXAJK6td"
+ "Nx6FucDB7OVqzcxkxHsGFd8VCG1BkC-Afh9~lOCMIYHIaiOB6~5j"
+ "t9w2EOwi6sIIqrg_")
+ pk_file = StringIO()
+ pk_file.write(self.pk_str)
+ pk_file.seek(0)
+ sig = self.dist._sign_string(self.canned_policy, private_key_file=pk_file)
+ encoded_sig = self.dist._url_base64_encode(sig)
+ self.assertEqual(expected, encoded_sig)
+
def test_sign_canned_policy_unicode(self):
"""
Test signing the canned policy from amazon's cloudfront documentation.
diff --git a/tests/unit/cloudsearch/test_connection.py b/tests/unit/cloudsearch/test_connection.py
index d2f67526..d77c1a73 100644
--- a/tests/unit/cloudsearch/test_connection.py
+++ b/tests/unit/cloudsearch/test_connection.py
@@ -47,11 +47,8 @@ class TestCloudSearchCreateDomain(AWSMockServiceTestCase):
self.assert_request_parameters({
'Action': 'CreateDomain',
'DomainName': 'demo',
- 'AWSAccessKeyId': 'aws_access_key_id',
- 'SignatureMethod': 'HmacSHA256',
- 'SignatureVersion': 2,
'Version': '2011-02-01',
- }, ignore_params_values=['Timestamp'])
+ })
def test_cloudsearch_connect_result_endpoints(self):
"""Check that endpoints & ARNs are correctly returned from AWS"""
@@ -158,11 +155,8 @@ class CloudSearchConnectionDeletionTest(AWSMockServiceTestCase):
self.assert_request_parameters({
'Action': 'DeleteDomain',
'DomainName': 'demo',
- 'AWSAccessKeyId': 'aws_access_key_id',
- 'SignatureMethod': 'HmacSHA256',
- 'SignatureVersion': 2,
'Version': '2011-02-01',
- }, ignore_params_values=['Timestamp'])
+ })
class CloudSearchConnectionIndexDocumentTest(AWSMockServiceTestCase):
@@ -215,11 +209,8 @@ class CloudSearchConnectionIndexDocumentTest(AWSMockServiceTestCase):
self.assert_request_parameters({
'Action': 'IndexDocuments',
'DomainName': 'demo',
- 'AWSAccessKeyId': 'aws_access_key_id',
- 'SignatureMethod': 'HmacSHA256',
- 'SignatureVersion': 2,
'Version': '2011-02-01',
- }, ignore_params_values=['Timestamp'])
+ })
def test_cloudsearch_index_documents_resp(self):
"""
diff --git a/tests/unit/cloudsearch/test_exceptions.py b/tests/unit/cloudsearch/test_exceptions.py
new file mode 100644
index 00000000..5840537e
--- /dev/null
+++ b/tests/unit/cloudsearch/test_exceptions.py
@@ -0,0 +1,37 @@
+import mock
+from boto.compat import json
+from tests.unit import unittest
+
+from .test_search import HOSTNAME, CloudSearchSearchBaseTest
+from boto.cloudsearch.search import SearchConnection, SearchServiceException
+
+
+def fake_loads_value_error(content, *args, **kwargs):
+ """Callable to generate a fake ValueError"""
+ raise ValueError("HAHAHA! Totally not simplejson & you gave me bad JSON.")
+
+
+def fake_loads_json_error(content, *args, **kwargs):
+ """Callable to generate a fake JSONDecodeError"""
+ raise json.JSONDecodeError('Using simplejson & you gave me bad JSON.',
+ '', 0)
+
+
+class CloudSearchJSONExceptionTest(CloudSearchSearchBaseTest):
+ response = '{}'
+
+ def test_no_simplejson_value_error(self):
+ with mock.patch.object(json, 'loads', fake_loads_value_error):
+ search = SearchConnection(endpoint=HOSTNAME)
+
+ with self.assertRaisesRegexp(SearchServiceException, 'non-json'):
+ search.search(q='test')
+
+ @unittest.skipUnless(hasattr(json, 'JSONDecodeError'),
+ 'requires simplejson')
+ def test_simplejson_jsondecodeerror(self):
+ with mock.patch.object(json, 'loads', fake_loads_json_error):
+ search = SearchConnection(endpoint=HOSTNAME)
+
+ with self.assertRaisesRegexp(SearchServiceException, 'non-json'):
+ search.search(q='test')
diff --git a/tests/unit/cloudsearch/test_search.py b/tests/unit/cloudsearch/test_search.py
index 7cadf659..990e4637 100644
--- a/tests/unit/cloudsearch/test_search.py
+++ b/tests/unit/cloudsearch/test_search.py
@@ -6,7 +6,7 @@ from httpretty import HTTPretty
import urlparse
import json
-from boto.cloudsearch.search import SearchConnection
+from boto.cloudsearch.search import SearchConnection, SearchServiceException
HOSTNAME = "search-demo-userdomain.us-east-1.cloudsearch.amazonaws.com"
FULL_URL = 'http://%s/2011-02-01/search' % HOSTNAME
@@ -45,6 +45,8 @@ class CloudSearchSearchBaseTest(unittest.TestCase):
},
]
+ content_type = "text/xml"
+ response_status = 200
def get_args(self, requestline):
(_, request, _) = requestline.split(" ")
@@ -54,9 +56,15 @@ class CloudSearchSearchBaseTest(unittest.TestCase):
def setUp(self):
HTTPretty.enable()
+ body = self.response
+
+ if not isinstance(body, basestring):
+ body = json.dumps(body)
+
HTTPretty.register_uri(HTTPretty.GET, FULL_URL,
- body=json.dumps(self.response),
- content_type="text/xml")
+ body=body,
+ content_type=self.content_type,
+ status=self.response_status)
def tearDown(self):
HTTPretty.disable()
@@ -355,3 +363,27 @@ class CloudSearchSearchFacetTest(CloudSearchSearchBaseTest):
self.assertTrue('tags' not in results.facets)
self.assertEqual(results.facets['animals'], {u'lions': u'1', u'fish': u'2'})
+
+
+class CloudSearchNonJsonTest(CloudSearchSearchBaseTest):
+ response = '<html><body><h1>500 Internal Server Error</h1></body></html>'
+ response_status = 500
+ content_type = 'text/xml'
+
+ def test_response(self):
+ search = SearchConnection(endpoint=HOSTNAME)
+
+ with self.assertRaises(SearchServiceException):
+ search.search(q='Test')
+
+
+class CloudSearchUnauthorizedTest(CloudSearchSearchBaseTest):
+ response = '<html><body><h1>403 Forbidden</h1>foo bar baz</body></html>'
+ response_status = 403
+ content_type = 'text/html'
+
+ def test_response(self):
+ search = SearchConnection(endpoint=HOSTNAME)
+
+ with self.assertRaisesRegexp(SearchServiceException, 'foo bar baz'):
+ search.search(q='Test')
diff --git a/tests/unit/dynamodb2/test_layer1.py b/tests/unit/dynamodb2/test_layer1.py
index 5778a72d..405f1052 100644
--- a/tests/unit/dynamodb2/test_layer1.py
+++ b/tests/unit/dynamodb2/test_layer1.py
@@ -43,3 +43,11 @@ class DynamoDBv2Layer1UnitTest(unittest.TestCase):
aws_secret_access_key='aws_secret_access_key',
)
self.assertEqual(dynamodb.region.name, 'us-west-2')
+
+ def test_init_host_override(self):
+ dynamodb = DynamoDBConnection(
+ aws_access_key_id='aws_access_key_id',
+ aws_secret_access_key='aws_secret_access_key',
+ host='localhost', port=8000)
+ self.assertEqual(dynamodb.host, 'localhost')
+ self.assertEqual(dynamodb.port, 8000)
diff --git a/tests/unit/dynamodb2/test_table.py b/tests/unit/dynamodb2/test_table.py
index 791fdf59..6eab6339 100644
--- a/tests/unit/dynamodb2/test_table.py
+++ b/tests/unit/dynamodb2/test_table.py
@@ -269,6 +269,21 @@ class ItemTestCase(unittest.TestCase):
self.johndoe['last_name'] = 'Doe'
self.assertTrue(self.johndoe.needs_save())
+ def test_needs_save_set_changed(self):
+ # First, ensure we're clean.
+ self.johndoe.mark_clean()
+ self.assertFalse(self.johndoe.needs_save())
+ # Add a friends collection.
+ self.johndoe['friends'] = set(['jane', 'alice'])
+ self.assertTrue(self.johndoe.needs_save())
+ # Now mark it clean, then change the collection.
+ # This does NOT call ``__setitem__``, so the item used to be
+ # incorrectly appearing to be clean, when it had in fact been changed.
+ self.johndoe.mark_clean()
+ self.assertFalse(self.johndoe.needs_save())
+ self.johndoe['friends'].add('bob')
+ self.assertTrue(self.johndoe.needs_save())
+
def test_mark_clean(self):
self.johndoe['last_name'] = 'Doe'
self.assertTrue(self.johndoe.needs_save())
@@ -416,6 +431,51 @@ class ItemTestCase(unittest.TestCase):
'date_joined': {'N': '12345'}
})
+ self.johndoe['friends'] = set(['jane', 'alice'])
+ self.assertEqual(self.johndoe.prepare_full(), {
+ 'username': {'S': 'johndoe'},
+ 'first_name': {'S': 'John'},
+ 'date_joined': {'N': '12345'},
+ 'friends': {'SS': ['jane', 'alice']},
+ })
+
+ def test_prepare_full_empty_set(self):
+ self.johndoe['friends'] = set()
+ self.assertEqual(self.johndoe.prepare_full(), {
+ 'username': {'S': 'johndoe'},
+ 'first_name': {'S': 'John'},
+ 'date_joined': {'N': '12345'}
+ })
+
+ def test_prepare_partial(self):
+ self.johndoe.mark_clean()
+ # Change some data.
+ self.johndoe['first_name'] = 'Johann'
+ # Add some data.
+ self.johndoe['last_name'] = 'Doe'
+ # Delete some data.
+ del self.johndoe['date_joined']
+
+ final_data, fields = self.johndoe.prepare_partial()
+ self.assertEqual(final_data, {
+ 'date_joined': {
+ 'Action': 'DELETE',
+ },
+ 'first_name': {
+ 'Action': 'PUT',
+ 'Value': {'S': 'Johann'},
+ },
+ 'last_name': {
+ 'Action': 'PUT',
+ 'Value': {'S': 'Doe'},
+ },
+ })
+ self.assertEqual(fields, set([
+ 'first_name',
+ 'last_name',
+ 'date_joined'
+ ]))
+
def test_prepare_partial(self):
self.johndoe.mark_clean()
# Change some data.
@@ -424,8 +484,11 @@ class ItemTestCase(unittest.TestCase):
self.johndoe['last_name'] = 'Doe'
# Delete some data.
del self.johndoe['date_joined']
+ # Put an empty set on the ``Item``.
+ self.johndoe['friends'] = set()
- self.assertEqual(self.johndoe.prepare_partial(), {
+ final_data, fields = self.johndoe.prepare_partial()
+ self.assertEqual(final_data, {
'date_joined': {
'Action': 'DELETE',
},
@@ -438,6 +501,11 @@ class ItemTestCase(unittest.TestCase):
'Value': {'S': 'Doe'},
},
})
+ self.assertEqual(fields, set([
+ 'first_name',
+ 'last_name',
+ 'date_joined'
+ ]))
def test_save_no_changes(self):
# Unchanged, no save.
@@ -694,7 +762,7 @@ class ResultSetTestCase(unittest.TestCase):
self.assertEqual(results.next(), 'Hello john #0')
self.assertEqual(results.next(), 'Hello john #1')
self.assertRaises(StopIteration, results.next)
-
+
def test_limit_equals_page(self):
results = ResultSet()
results.to_call(fake_results, 'john', greeting='Hello', limit=5)
@@ -729,6 +797,60 @@ class ResultSetTestCase(unittest.TestCase):
results.to_call(none, limit=20)
self.assertRaises(StopIteration, results.next)
+ def test_iteration_sporadic_pages(self):
+ # Some pages have no/incomplete results but have a ``LastEvaluatedKey``
+ # (for instance, scans with filters), so we need to accommodate that.
+ def sporadic():
+ # A dict, because Python closures have read-only access to the
+ # reference itself.
+ count = {'value': -1}
+
+ def _wrapper(limit=10, exclusive_start_key=None):
+ count['value'] = count['value'] + 1
+
+ if count['value'] == 0:
+ # Full page.
+ return {
+ 'results': [
+ 'Result #0',
+ 'Result #1',
+ 'Result #2',
+ 'Result #3',
+ ],
+ 'last_key': 'page-1'
+ }
+ elif count['value'] == 1:
+ # Empty page but continue.
+ return {
+ 'results': [],
+ 'last_key': 'page-2'
+ }
+ elif count['value'] == 2:
+ # Final page.
+ return {
+ 'results': [
+ 'Result #4',
+ 'Result #5',
+ 'Result #6',
+ ],
+ }
+
+ return _wrapper
+
+ results = ResultSet()
+ results.to_call(sporadic(), limit=20)
+ # First page
+ self.assertEqual(results.next(), 'Result #0')
+ self.assertEqual(results.next(), 'Result #1')
+ self.assertEqual(results.next(), 'Result #2')
+ self.assertEqual(results.next(), 'Result #3')
+ # Second page (misses!)
+ # Moves on to the third page
+ self.assertEqual(results.next(), 'Result #4')
+ self.assertEqual(results.next(), 'Result #5')
+ self.assertEqual(results.next(), 'Result #6')
+ self.assertRaises(StopIteration, results.next)
+
def test_list(self):
self.assertEqual(list(self.results), [
'Hello john #0',
@@ -1408,6 +1530,89 @@ class TableTestCase(unittest.TestCase):
self.assertEqual(mock_batch.call_count, 2)
+ def test_batch_write_unprocessed_items(self):
+ unprocessed = {
+ 'UnprocessedItems': {
+ 'users': [
+ {
+ 'PutRequest': {
+ 'username': {
+ 'S': 'jane',
+ },
+ 'date_joined': {
+ 'N': 12342547
+ }
+ },
+ },
+ ],
+ },
+ }
+
+ # Test enqueuing the unprocessed bits.
+ with mock.patch.object(
+ self.users.connection,
+ 'batch_write_item',
+ return_value=unprocessed) as mock_batch:
+ with self.users.batch_write() as batch:
+ self.assertEqual(len(batch._unprocessed), 0)
+
+ # Trash the ``resend_unprocessed`` method so that we don't
+ # infinite loop forever here.
+ batch.resend_unprocessed = lambda: True
+
+ batch.put_item(data={
+ 'username': 'jane',
+ 'date_joined': 12342547
+ })
+ batch.delete_item(username='johndoe')
+ batch.put_item(data={
+ 'username': 'alice',
+ 'date_joined': 12342888
+ })
+
+ self.assertEqual(len(batch._unprocessed), 1)
+
+ # Now test resending those unprocessed items.
+ with mock.patch.object(
+ self.users.connection,
+ 'batch_write_item',
+ return_value={}) as mock_batch:
+ with self.users.batch_write() as batch:
+ self.assertEqual(len(batch._unprocessed), 0)
+
+ # Toss in faked unprocessed items, as though a previous batch
+ # had failed.
+ batch._unprocessed = [
+ {
+ 'PutRequest': {
+ 'username': {
+ 'S': 'jane',
+ },
+ 'date_joined': {
+ 'N': 12342547
+ }
+ },
+ },
+ ]
+
+ batch.put_item(data={
+ 'username': 'jane',
+ 'date_joined': 12342547
+ })
+ batch.delete_item(username='johndoe')
+ batch.put_item(data={
+ 'username': 'alice',
+ 'date_joined': 12342888
+ })
+
+ # Flush, to make sure everything has been processed.
+ # Unprocessed items should still be hanging around.
+ batch.flush()
+ self.assertEqual(len(batch._unprocessed), 1)
+
+ # Post-exit, this should be emptied.
+ self.assertEqual(len(batch._unprocessed), 0)
+
def test__build_filters(self):
filters = self.users._build_filters({
'username__eq': 'johndoe',
diff --git a/tests/unit/ec2/autoscale/test_group.py b/tests/unit/ec2/autoscale/test_group.py
index 28941545..b3a5594e 100644
--- a/tests/unit/ec2/autoscale/test_group.py
+++ b/tests/unit/ec2/autoscale/test_group.py
@@ -28,7 +28,12 @@ from tests.unit import AWSMockServiceTestCase
from boto.ec2.autoscale import AutoScaleConnection
from boto.ec2.autoscale.group import AutoScalingGroup
+from boto.ec2.autoscale.policy import ScalingPolicy
+from boto.ec2.autoscale.tag import Tag
+from boto.ec2.blockdevicemapping import EBSBlockDeviceType, BlockDeviceMapping
+
+from boto.ec2.autoscale import launchconfig
class TestAutoScaleGroup(AWSMockServiceTestCase):
connection_class = AutoScaleConnection
@@ -195,6 +200,257 @@ class TestDescribeTerminationPolicies(AWSMockServiceTestCase):
['ClosestToNextInstanceHour', 'Default',
'NewestInstance', 'OldestInstance', 'OldestLaunchConfiguration'])
+class TestLaunchConfiguration(AWSMockServiceTestCase):
+ connection_class = AutoScaleConnection
+
+ def default_body(self):
+ # This is a dummy response
+ return """
+ <DescribeLaunchConfigurationsResponse>
+ </DescribeLaunchConfigurationsResponse>
+ """
+
+ def test_launch_config(self):
+ # This unit test is based on #753 and #1343
+ self.set_http_response(status_code=200)
+ dev_sdf = EBSBlockDeviceType(snapshot_id='snap-12345')
+ dev_sdg = EBSBlockDeviceType(snapshot_id='snap-12346')
+
+ bdm = BlockDeviceMapping()
+ bdm['/dev/sdf'] = dev_sdf
+ bdm['/dev/sdg'] = dev_sdg
+
+ lc = launchconfig.LaunchConfiguration(
+ connection=self.service_connection,
+ name='launch_config',
+ image_id='123456',
+ instance_type = 'm1.large',
+ security_groups = ['group1', 'group2'],
+ spot_price='price',
+ block_device_mappings = [bdm]
+ )
+
+ response = self.service_connection.create_launch_configuration(lc)
+
+ self.assert_request_parameters({
+ 'Action': 'CreateLaunchConfiguration',
+ 'BlockDeviceMappings.member.1.DeviceName': '/dev/sdf',
+ 'BlockDeviceMappings.member.1.Ebs.DeleteOnTermination': 'false',
+ 'BlockDeviceMappings.member.1.Ebs.SnapshotId': 'snap-12345',
+ 'BlockDeviceMappings.member.2.DeviceName': '/dev/sdg',
+ 'BlockDeviceMappings.member.2.Ebs.DeleteOnTermination': 'false',
+ 'BlockDeviceMappings.member.2.Ebs.SnapshotId': 'snap-12346',
+ 'EbsOptimized': 'false',
+ 'LaunchConfigurationName': 'launch_config',
+ 'ImageId': '123456',
+ 'InstanceMonitoring.Enabled': 'false',
+ 'InstanceType': 'm1.large',
+ 'SecurityGroups.member.1': 'group1',
+ 'SecurityGroups.member.2': 'group2',
+ 'SpotPrice': 'price',
+ }, ignore_params_values=['Version'])
+
+
+class TestCreateAutoScalePolicy(AWSMockServiceTestCase):
+ connection_class = AutoScaleConnection
+
+ def setUp(self):
+ super(TestCreateAutoScalePolicy, self).setUp()
+
+ def default_body(self):
+ return """
+ <PutScalingPolicyResponse xmlns="http://autoscaling.amazonaws.com\
+ /doc/2011-01-01/">
+ <PutScalingPolicyResult>
+ <PolicyARN>arn:aws:autoscaling:us-east-1:803981987763:scaling\
+ Policy:b0dcf5e8
+ -02e6-4e31-9719-0675d0dc31ae:autoScalingGroupName/my-test-asg:\
+ policyName/my-scal
+ eout-policy</PolicyARN>
+ </PutScalingPolicyResult>
+ <ResponseMetadata>
+ <RequestId>3cfc6fef-c08b-11e2-a697-2922EXAMPLE</RequestId>
+ </ResponseMetadata>
+ </PutScalingPolicyResponse>
+ """
+
+ def test_scaling_policy_with_min_adjustment_step(self):
+ self.set_http_response(status_code=200)
+
+ policy = ScalingPolicy(
+ name='foo', as_name='bar',
+ adjustment_type='PercentChangeInCapacity', scaling_adjustment=50,
+ min_adjustment_step=30)
+ self.service_connection.create_scaling_policy(policy)
+
+ self.assert_request_parameters({
+ 'Action': 'PutScalingPolicy',
+ 'PolicyName': 'foo',
+ 'AutoScalingGroupName': 'bar',
+ 'AdjustmentType': 'PercentChangeInCapacity',
+ 'ScalingAdjustment': 50,
+ 'MinAdjustmentStep': 30
+ }, ignore_params_values=['Version'])
+
+ def test_scaling_policy_with_wrong_adjustment_type(self):
+ self.set_http_response(status_code=200)
+
+ policy = ScalingPolicy(
+ name='foo', as_name='bar',
+ adjustment_type='ChangeInCapacity', scaling_adjustment=50,
+ min_adjustment_step=30)
+ self.service_connection.create_scaling_policy(policy)
+
+ self.assert_request_parameters({
+ 'Action': 'PutScalingPolicy',
+ 'PolicyName': 'foo',
+ 'AutoScalingGroupName': 'bar',
+ 'AdjustmentType': 'ChangeInCapacity',
+ 'ScalingAdjustment': 50
+ }, ignore_params_values=['Version'])
+
+ def test_scaling_policy_without_min_adjustment_step(self):
+ self.set_http_response(status_code=200)
+
+ policy = ScalingPolicy(
+ name='foo', as_name='bar',
+ adjustment_type='PercentChangeInCapacity', scaling_adjustment=50)
+ self.service_connection.create_scaling_policy(policy)
+
+ self.assert_request_parameters({
+ 'Action': 'PutScalingPolicy',
+ 'PolicyName': 'foo',
+ 'AutoScalingGroupName': 'bar',
+ 'AdjustmentType': 'PercentChangeInCapacity',
+ 'ScalingAdjustment': 50
+ }, ignore_params_values=['Version'])
+
+
+class TestPutNotificationConfiguration(AWSMockServiceTestCase):
+ connection_class = AutoScaleConnection
+
+ def setUp(self):
+ super(TestPutNotificationConfiguration, self).setUp()
+
+ def default_body(self):
+ return """
+ <PutNotificationConfigurationResponse>
+ <ResponseMetadata>
+ <RequestId>requestid</RequestId>
+ </ResponseMetadata>
+ </PutNotificationConfigurationResponse>
+ """
+
+ def test_autoscaling_group_put_notification_configuration(self):
+ self.set_http_response(status_code=200)
+ autoscale = AutoScalingGroup(
+ name='ana', launch_config='lauch_config',
+ min_size=1, max_size=2,
+ termination_policies=['OldestInstance', 'OldestLaunchConfiguration'])
+ self.service_connection.put_notification_configuration(autoscale, 'arn:aws:sns:us-east-1:19890506:AutoScaling-Up', ['autoscaling:EC2_INSTANCE_LAUNCH'])
+ self.assert_request_parameters({
+ 'Action': 'PutNotificationConfiguration',
+ 'AutoScalingGroupName': 'ana',
+ 'NotificationTypes.member.1': 'autoscaling:EC2_INSTANCE_LAUNCH',
+ 'TopicARN': 'arn:aws:sns:us-east-1:19890506:AutoScaling-Up',
+ }, ignore_params_values=['Version'])
+
+
+class TestDeleteNotificationConfiguration(AWSMockServiceTestCase):
+ connection_class = AutoScaleConnection
+
+ def setUp(self):
+ super(TestDeleteNotificationConfiguration, self).setUp()
+
+ def default_body(self):
+ return """
+ <DeleteNotificationConfigurationResponse>
+ <ResponseMetadata>
+ <RequestId>requestid</RequestId>
+ </ResponseMetadata>
+ </DeleteNotificationConfigurationResponse>
+ """
+
+ def test_autoscaling_group_put_notification_configuration(self):
+ self.set_http_response(status_code=200)
+ autoscale = AutoScalingGroup(
+ name='ana', launch_config='lauch_config',
+ min_size=1, max_size=2,
+ termination_policies=['OldestInstance', 'OldestLaunchConfiguration'])
+ self.service_connection.delete_notification_configuration(autoscale, 'arn:aws:sns:us-east-1:19890506:AutoScaling-Up')
+ self.assert_request_parameters({
+ 'Action': 'DeleteNotificationConfiguration',
+ 'AutoScalingGroupName': 'ana',
+ 'TopicARN': 'arn:aws:sns:us-east-1:19890506:AutoScaling-Up',
+ }, ignore_params_values=['Version'])
+
+class TestAutoScalingTag(AWSMockServiceTestCase):
+ connection_class = AutoScaleConnection
+
+ def default_body(self):
+ return """
+ <CreateOrUpdateTagsResponse>
+ <ResponseMetadata>
+ <RequestId>requestId</RequestId>
+ </ResponseMetadata>
+ </CreateOrUpdateTagsResponse>
+ """
+
+ def test_create_or_update_tags(self):
+ self.set_http_response(status_code=200)
+
+ tags = [
+ Tag(
+ connection=self.service_connection,
+ key='alpha',
+ value='tango',
+ resource_id='sg-00000000',
+ resource_type='auto-scaling-group',
+ propagate_at_launch=True
+ ),
+ Tag(
+ connection=self.service_connection,
+ key='bravo',
+ value='sierra',
+ resource_id='sg-00000000',
+ resource_type='auto-scaling-group',
+ propagate_at_launch=False
+ )]
+
+
+ response = self.service_connection.create_or_update_tags(tags)
+
+ self.assert_request_parameters({
+ 'Action': 'CreateOrUpdateTags',
+ 'Tags.member.1.ResourceType': 'auto-scaling-group',
+ 'Tags.member.1.ResourceId': 'sg-00000000',
+ 'Tags.member.1.Key': 'alpha',
+ 'Tags.member.1.Value': 'tango',
+ 'Tags.member.1.PropagateAtLaunch': 'true',
+ 'Tags.member.2.ResourceType': 'auto-scaling-group',
+ 'Tags.member.2.ResourceId': 'sg-00000000',
+ 'Tags.member.2.Key': 'bravo',
+ 'Tags.member.2.Value': 'sierra',
+ 'Tags.member.2.PropagateAtLaunch': 'false'
+ }, ignore_params_values=['Version'])
+
+ def test_endElement(self):
+ for i in [
+ ('Key', 'mykey', 'key'),
+ ('Value', 'myvalue', 'value'),
+ ('ResourceType', 'auto-scaling-group', 'resource_type'),
+ ('ResourceId', 'sg-01234567', 'resource_id'),
+ ('PropagateAtLaunch', 'true', 'propagate_at_launch')]:
+ self.check_tag_attributes_set(i[0], i[1], i[2])
+
+
+ def check_tag_attributes_set(self, name, value, attr):
+ tag = Tag()
+ tag.endElement(name, value, None)
+ if value == 'true':
+ self.assertEqual(getattr(tag, attr), True)
+ else:
+ self.assertEqual(getattr(tag, attr), value)
if __name__ == '__main__':
unittest.main()
diff --git a/tests/unit/ec2/elb/test_loadbalancer.py b/tests/unit/ec2/elb/test_loadbalancer.py
index d5e126c2..3577d7f8 100644
--- a/tests/unit/ec2/elb/test_loadbalancer.py
+++ b/tests/unit/ec2/elb/test_loadbalancer.py
@@ -6,6 +6,7 @@ from tests.unit import AWSMockServiceTestCase
import mock
from boto.ec2.elb import ELBConnection
+from boto.ec2.elb import LoadBalancer
DISABLE_RESPONSE = r"""<?xml version="1.0" encoding="UTF-8"?>
<DisableAvailabilityZonesForLoadBalancerResult xmlns="http://ec2.amazonaws.com/doc/2013-02-01/">
@@ -29,5 +30,101 @@ class TestInstanceStatusResponseParsing(unittest.TestCase):
self.assertEqual(disabled, ['sample-zone'])
+DESCRIBE_RESPONSE = r"""<?xml version="1.0" encoding="UTF-8"?>
+<DescribeLoadBalancersResponse xmlns="http://elasticloadbalancing.amazonaws.com/doc/2012-06-01/">
+ <DescribeLoadBalancersResult>
+ <LoadBalancerDescriptions>
+ <member>
+ <SecurityGroups/>
+ <CreatedTime>2013-07-09T19:18:00.520Z</CreatedTime>
+ <LoadBalancerName>elb-boto-unit-test</LoadBalancerName>
+ <HealthCheck/>
+ <ListenerDescriptions>
+ <member>
+ <PolicyNames/>
+ <Listener/>
+ </member>
+ </ListenerDescriptions>
+ <Instances/>
+ <Policies>
+ <AppCookieStickinessPolicies/>
+ <OtherPolicies>
+ <member>AWSConsole-SSLNegotiationPolicy-my-test-loadbalancer</member>
+ <member>EnableProxyProtocol</member>
+ </OtherPolicies>
+ <LBCookieStickinessPolicies/>
+ </Policies>
+ <AvailabilityZones>
+ <member>us-east-1a</member>
+ </AvailabilityZones>
+ <CanonicalHostedZoneName>elb-boto-unit-test-408121642.us-east-1.elb.amazonaws.com</CanonicalHostedZoneName>
+ <CanonicalHostedZoneNameID>Z3DZXE0Q79N41H</CanonicalHostedZoneNameID>
+ <Scheme>internet-facing</Scheme>
+ <SourceSecurityGroup>
+ <OwnerAlias>amazon-elb</OwnerAlias>
+ <GroupName>amazon-elb-sg</GroupName>
+ </SourceSecurityGroup>
+ <DNSName>elb-boto-unit-test-408121642.us-east-1.elb.amazonaws.com</DNSName>
+ <BackendServerDescriptions>
+ <member>
+ <PolicyNames>
+ <member>EnableProxyProtocol</member>
+ </PolicyNames>
+ <InstancePort>80</InstancePort>
+ </member>
+ </BackendServerDescriptions>
+ <Subnets/>
+ </member>
+ </LoadBalancerDescriptions>
+ </DescribeLoadBalancersResult>
+ <ResponseMetadata>
+ <RequestId>5763d932-e8cc-11e2-a940-11136cceffb8</RequestId>
+ </ResponseMetadata>
+</DescribeLoadBalancersResponse>
+"""
+
+class TestDescribeLoadBalancers(unittest.TestCase):
+ def test_other_policy(self):
+ elb = ELBConnection(aws_access_key_id='aws_access_key_id',
+ aws_secret_access_key='aws_secret_access_key')
+ mock_response = mock.Mock()
+ mock_response.read.return_value = DESCRIBE_RESPONSE
+ mock_response.status = 200
+ elb.make_request = mock.Mock(return_value=mock_response)
+ load_balancers = elb.get_all_load_balancers()
+ self.assertEqual(len(load_balancers), 1)
+
+ lb = load_balancers[0]
+ self.assertEqual(len(lb.policies.other_policies), 2)
+ self.assertEqual(lb.policies.other_policies[0].policy_name,
+ 'AWSConsole-SSLNegotiationPolicy-my-test-loadbalancer')
+ self.assertEqual(lb.policies.other_policies[1].policy_name,
+ 'EnableProxyProtocol')
+
+ self.assertEqual(len(lb.backends), 1)
+ self.assertEqual(len(lb.backends[0].policies), 1)
+ self.assertEqual(lb.backends[0].policies[0].policy_name,
+ 'EnableProxyProtocol')
+ self.assertEqual(lb.backends[0].instance_port, 80)
+
+
+DETACH_RESPONSE = r"""<?xml version="1.0" encoding="UTF-8"?>
+<DetachLoadBalancerFromSubnets xmlns="http://ec2.amazonaws.com/doc/2013-02-01/">
+ <requestId>3be1508e-c444-4fef-89cc-0b1223c4f02fEXAMPLE</requestId>
+</DetachLoadBalancerFromSubnets>
+"""
+
+class TestDetachSubnets(unittest.TestCase):
+ def test_detach_subnets(self):
+ elb = ELBConnection(aws_access_key_id='aws_access_key_id',
+ aws_secret_access_key='aws_secret_access_key')
+ lb = LoadBalancer(elb, "mylb")
+
+ mock_response = mock.Mock()
+ mock_response.read.return_value = DETACH_RESPONSE
+ mock_response.status = 200
+ elb.make_request = mock.Mock(return_value=mock_response)
+ lb.detach_subnets("s-xxx")
+
if __name__ == '__main__':
unittest.main()
diff --git a/tests/unit/ec2/test_address.py b/tests/unit/ec2/test_address.py
index f2661979..765ce422 100644
--- a/tests/unit/ec2/test_address.py
+++ b/tests/unit/ec2/test_address.py
@@ -25,15 +25,25 @@ class AddressTest(unittest.TestCase):
def test_release_calls_connection_release_address_with_correct_args(self):
self.address.release()
- self.address.connection.release_address.assert_called_with("192.168.1.1")
+ self.address.connection.release_address.assert_called_with(
+ "192.168.1.1",
+ dry_run=False
+ )
def test_associate_calls_connection_associate_address_with_correct_args(self):
self.address.associate(1)
- self.address.connection.associate_address.assert_called_with(1, "192.168.1.1")
+ self.address.connection.associate_address.assert_called_with(
+ 1,
+ "192.168.1.1",
+ dry_run=False
+ )
def test_disassociate_calls_connection_disassociate_address_with_correct_args(self):
self.address.disassociate()
- self.address.connection.disassociate_address.assert_called_with("192.168.1.1")
+ self.address.connection.disassociate_address.assert_called_with(
+ "192.168.1.1",
+ dry_run=False
+ )
if __name__ == "__main__":
unittest.main()
diff --git a/tests/unit/ec2/test_blockdevicemapping.py b/tests/unit/ec2/test_blockdevicemapping.py
index 02ecf582..78539744 100644
--- a/tests/unit/ec2/test_blockdevicemapping.py
+++ b/tests/unit/ec2/test_blockdevicemapping.py
@@ -1,8 +1,12 @@
import mock
import unittest
+from boto.ec2.connection import EC2Connection
from boto.ec2.blockdevicemapping import BlockDeviceType, BlockDeviceMapping
+from tests.unit import AWSMockServiceTestCase
+
+
class BlockDeviceTypeTests(unittest.TestCase):
def setUp(self):
self.block_device_type = BlockDeviceType()
@@ -75,5 +79,55 @@ class BlockDeviceMappingTests(unittest.TestCase):
self.block_device_mapping.endElement("item", "some item", None)
self.assertEqual(self.block_device_mapping["some name"], "some value")
+
+class TestLaunchConfiguration(AWSMockServiceTestCase):
+ connection_class = EC2Connection
+
+ def default_body(self):
+ # This is a dummy response
+ return """
+ <DescribeLaunchConfigurationsResponse>
+ </DescribeLaunchConfigurationsResponse>
+ """
+
+ def test_run_instances_block_device_mapping(self):
+ # Same as the test in ``unit/ec2/autoscale/test_group.py:TestLaunchConfiguration``,
+ # but with modified request parameters (due to a mismatch between EC2 &
+ # Autoscaling).
+ self.set_http_response(status_code=200)
+ dev_sdf = BlockDeviceType(snapshot_id='snap-12345')
+ dev_sdg = BlockDeviceType(snapshot_id='snap-12346')
+
+ bdm = BlockDeviceMapping()
+ bdm['/dev/sdf'] = dev_sdf
+ bdm['/dev/sdg'] = dev_sdg
+
+ response = self.service_connection.run_instances(
+ image_id='123456',
+ instance_type='m1.large',
+ security_groups=['group1', 'group2'],
+ block_device_map=bdm
+ )
+
+ self.assert_request_parameters({
+ 'Action': 'RunInstances',
+ 'BlockDeviceMapping.1.DeviceName': '/dev/sdf',
+ 'BlockDeviceMapping.1.Ebs.DeleteOnTermination': 'false',
+ 'BlockDeviceMapping.1.Ebs.SnapshotId': 'snap-12345',
+ 'BlockDeviceMapping.2.DeviceName': '/dev/sdg',
+ 'BlockDeviceMapping.2.Ebs.DeleteOnTermination': 'false',
+ 'BlockDeviceMapping.2.Ebs.SnapshotId': 'snap-12346',
+ 'ImageId': '123456',
+ 'InstanceType': 'm1.large',
+ 'MaxCount': 1,
+ 'MinCount': 1,
+ 'SecurityGroup.1': 'group1',
+ 'SecurityGroup.2': 'group2',
+ }, ignore_params_values=[
+ 'Version', 'AWSAccessKeyId', 'SignatureMethod', 'SignatureVersion',
+ 'Timestamp'
+ ])
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/tests/unit/ec2/test_connection.py b/tests/unit/ec2/test_connection.py
index d06288dc..deb6759c 100644
--- a/tests/unit/ec2/test_connection.py
+++ b/tests/unit/ec2/test_connection.py
@@ -1,8 +1,17 @@
#!/usr/bin/env python
+import httplib
+
+from datetime import datetime, timedelta
+from mock import MagicMock, Mock, patch
from tests.unit import unittest
from tests.unit import AWSMockServiceTestCase
+import boto.ec2
+
+from boto.regioninfo import RegionInfo
from boto.ec2.connection import EC2Connection
+from boto.ec2.snapshot import Snapshot
+from boto.ec2.reservedinstance import ReservedInstancesConfiguration
class TestEC2ConnectionBase(AWSMockServiceTestCase):
@@ -475,6 +484,47 @@ class TestCopySnapshot(TestEC2ConnectionBase):
'SignatureVersion', 'Timestamp',
'Version'])
+class TestCopyImage(TestEC2ConnectionBase):
+ def default_body(self):
+ return """
+ <CopyImageResponse xmlns="http://ec2.amazonaws.com/doc/2013-07-15/">
+ <requestId>request_id</requestId>
+ <imageId>ami-copied-id</imageId>
+ </CopyImageResponse>
+ """
+
+ def test_copy_image(self):
+ self.set_http_response(status_code=200)
+ copied_ami = self.ec2.copy_image('us-west-2', 'ami-id',
+ 'name', 'description', 'client-token')
+ self.assertEqual(copied_ami.image_id, 'ami-copied-id')
+
+ self.assert_request_parameters({
+ 'Action': 'CopyImage',
+ 'Description': 'description',
+ 'Name': 'name',
+ 'SourceRegion': 'us-west-2',
+ 'SourceImageId': 'ami-id',
+ 'ClientToken': 'client-token'},
+ ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'])
+ def test_copy_image_without_name(self):
+ self.set_http_response(status_code=200)
+ copied_ami = self.ec2.copy_image('us-west-2', 'ami-id',
+ description='description',
+ client_token='client-token')
+ self.assertEqual(copied_ami.image_id, 'ami-copied-id')
+
+ self.assert_request_parameters({
+ 'Action': 'CopyImage',
+ 'Description': 'description',
+ 'SourceRegion': 'us-west-2',
+ 'SourceImageId': 'ami-id',
+ 'ClientToken': 'client-token'},
+ ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'])
class TestAccountAttributes(TestEC2ConnectionBase):
def default_body(self):
@@ -562,5 +612,612 @@ class TestDescribeVPCAttribute(TestEC2ConnectionBase):
'Version'])
+class TestGetAllNetworkInterfaces(TestEC2ConnectionBase):
+ def default_body(self):
+ return """
+<DescribeNetworkInterfacesResponse xmlns="http://ec2.amazonaws.com/\
+ doc/2013-06-15/">
+ <requestId>fc45294c-006b-457b-bab9-012f5b3b0e40</requestId>
+ <networkInterfaceSet>
+ <item>
+ <networkInterfaceId>eni-0f62d866</networkInterfaceId>
+ <subnetId>subnet-c53c87ac</subnetId>
+ <vpcId>vpc-cc3c87a5</vpcId>
+ <availabilityZone>ap-southeast-1b</availabilityZone>
+ <description/>
+ <ownerId>053230519467</ownerId>
+ <requesterManaged>false</requesterManaged>
+ <status>in-use</status>
+ <macAddress>02:81:60:cb:27:37</macAddress>
+ <privateIpAddress>10.0.0.146</privateIpAddress>
+ <sourceDestCheck>true</sourceDestCheck>
+ <groupSet>
+ <item>
+ <groupId>sg-3f4b5653</groupId>
+ <groupName>default</groupName>
+ </item>
+ </groupSet>
+ <attachment>
+ <attachmentId>eni-attach-6537fc0c</attachmentId>
+ <instanceId>i-22197876</instanceId>
+ <instanceOwnerId>053230519467</instanceOwnerId>
+ <deviceIndex>5</deviceIndex>
+ <status>attached</status>
+ <attachTime>2012-07-01T21:45:27.000Z</attachTime>
+ <deleteOnTermination>true</deleteOnTermination>
+ </attachment>
+ <tagSet/>
+ <privateIpAddressesSet>
+ <item>
+ <privateIpAddress>10.0.0.146</privateIpAddress>
+ <primary>true</primary>
+ </item>
+ <item>
+ <privateIpAddress>10.0.0.148</privateIpAddress>
+ <primary>false</primary>
+ </item>
+ <item>
+ <privateIpAddress>10.0.0.150</privateIpAddress>
+ <primary>false</primary>
+ </item>
+ </privateIpAddressesSet>
+ </item>
+ </networkInterfaceSet>
+</DescribeNetworkInterfacesResponse>"""
+
+ def test_attachment_has_device_index(self):
+ self.set_http_response(status_code=200)
+ parsed = self.ec2.get_all_network_interfaces()
+
+ self.assertEqual(5, parsed[0].attachment.device_index)
+
+class TestGetAllImages(TestEC2ConnectionBase):
+ def default_body(self):
+ return """
+<DescribeImagesResponse xmlns="http://ec2.amazonaws.com/doc/2013-02-01/">
+ <requestId>e32375e8-4ac3-4099-a8bf-3ec902b9023e</requestId>
+ <imagesSet>
+ <item>
+ <imageId>ami-abcd1234</imageId>
+ <imageLocation>111111111111/windows2008r2-hvm-i386-20130702</imageLocation>
+ <imageState>available</imageState>
+ <imageOwnerId>111111111111</imageOwnerId>
+ <isPublic>false</isPublic>
+ <architecture>i386</architecture>
+ <imageType>machine</imageType>
+ <platform>windows</platform>
+ <viridianEnabled>true</viridianEnabled>
+ <name>Windows Test</name>
+ <description>Windows Test Description</description>
+ <billingProducts>
+ <item>
+ <billingProduct>bp-6ba54002</billingProduct>
+ </item>
+ </billingProducts>
+ <rootDeviceType>ebs</rootDeviceType>
+ <rootDeviceName>/dev/sda1</rootDeviceName>
+ <blockDeviceMapping>
+ <item>
+ <deviceName>/dev/sda1</deviceName>
+ <ebs>
+ <snapshotId>snap-abcd1234</snapshotId>
+ <volumeSize>30</volumeSize>
+ <deleteOnTermination>true</deleteOnTermination>
+ <volumeType>standard</volumeType>
+ </ebs>
+ </item>
+ <item>
+ <deviceName>xvdb</deviceName>
+ <virtualName>ephemeral0</virtualName>
+ </item>
+ <item>
+ <deviceName>xvdc</deviceName>
+ <virtualName>ephemeral1</virtualName>
+ </item>
+ <item>
+ <deviceName>xvdd</deviceName>
+ <virtualName>ephemeral2</virtualName>
+ </item>
+ <item>
+ <deviceName>xvde</deviceName>
+ <virtualName>ephemeral3</virtualName>
+ </item>
+ </blockDeviceMapping>
+ <virtualizationType>hvm</virtualizationType>
+ <hypervisor>xen</hypervisor>
+ </item>
+ </imagesSet>
+</DescribeImagesResponse>"""
+
+ def test_get_all_images(self):
+ self.set_http_response(status_code=200)
+ parsed = self.ec2.get_all_images()
+ self.assertEquals(1, len(parsed))
+ self.assertEquals("ami-abcd1234", parsed[0].id)
+ self.assertEquals("111111111111/windows2008r2-hvm-i386-20130702", parsed[0].location)
+ self.assertEquals("available", parsed[0].state)
+ self.assertEquals("111111111111", parsed[0].ownerId)
+ self.assertEquals("111111111111", parsed[0].owner_id)
+ self.assertEquals(False, parsed[0].is_public)
+ self.assertEquals("i386", parsed[0].architecture)
+ self.assertEquals("machine", parsed[0].type)
+ self.assertEquals(None, parsed[0].kernel_id)
+ self.assertEquals(None, parsed[0].ramdisk_id)
+ self.assertEquals(None, parsed[0].owner_alias)
+ self.assertEquals("windows", parsed[0].platform)
+ self.assertEquals("Windows Test", parsed[0].name)
+ self.assertEquals("Windows Test Description", parsed[0].description)
+ self.assertEquals("ebs", parsed[0].root_device_type)
+ self.assertEquals("/dev/sda1", parsed[0].root_device_name)
+ self.assertEquals("hvm", parsed[0].virtualization_type)
+ self.assertEquals("xen", parsed[0].hypervisor)
+ self.assertEquals(None, parsed[0].instance_lifecycle)
+
+ # 1 billing product parsed into a list
+ self.assertEquals(1, len(parsed[0].billing_products))
+ self.assertEquals("bp-6ba54002", parsed[0].billing_products[0])
+
+ # Just verify length, there is already a block_device_mapping test
+ self.assertEquals(5, len(parsed[0].block_device_mapping))
+
+ # TODO: No tests for product codes?
+
+
+class TestModifyInterfaceAttribute(TestEC2ConnectionBase):
+ def default_body(self):
+ return """
+<ModifyNetworkInterfaceAttributeResponse \
+ xmlns="http://ec2.amazonaws.com/doc/2013-06-15/">
+ <requestId>657a4623-5620-4232-b03b-427e852d71cf</requestId>
+ <return>true</return>
+</ModifyNetworkInterfaceAttributeResponse>
+"""
+
+ def test_modify_description(self):
+ self.set_http_response(status_code=200)
+ self.ec2.modify_network_interface_attribute('id', 'description', 'foo')
+
+ self.assert_request_parameters({
+ 'Action': 'ModifyNetworkInterfaceAttribute',
+ 'NetworkInterfaceId': 'id',
+ 'Description.Value': 'foo'},
+ ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'])
+
+ def test_modify_source_dest_check_bool(self):
+ self.set_http_response(status_code=200)
+ self.ec2.modify_network_interface_attribute('id', 'sourceDestCheck',
+ True)
+
+ self.assert_request_parameters({
+ 'Action': 'ModifyNetworkInterfaceAttribute',
+ 'NetworkInterfaceId': 'id',
+ 'SourceDestCheck.Value': 'true'},
+ ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'])
+
+ def test_modify_source_dest_check_str(self):
+ self.set_http_response(status_code=200)
+ self.ec2.modify_network_interface_attribute('id', 'sourceDestCheck',
+ 'true')
+
+ self.assert_request_parameters({
+ 'Action': 'ModifyNetworkInterfaceAttribute',
+ 'NetworkInterfaceId': 'id',
+ 'SourceDestCheck.Value': 'true'},
+ ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'])
+
+ def test_modify_source_dest_check_invalid(self):
+ self.set_http_response(status_code=200)
+
+ with self.assertRaises(ValueError):
+ self.ec2.modify_network_interface_attribute('id',
+ 'sourceDestCheck',
+ 123)
+
+ def test_modify_delete_on_termination_str(self):
+ self.set_http_response(status_code=200)
+ self.ec2.modify_network_interface_attribute('id',
+ 'deleteOnTermination',
+ True, attachment_id='bar')
+
+ self.assert_request_parameters({
+ 'Action': 'ModifyNetworkInterfaceAttribute',
+ 'NetworkInterfaceId': 'id',
+ 'Attachment.AttachmentId': 'bar',
+ 'Attachment.DeleteOnTermination': 'true'},
+ ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'])
+
+ def test_modify_delete_on_termination_bool(self):
+ self.set_http_response(status_code=200)
+ self.ec2.modify_network_interface_attribute('id',
+ 'deleteOnTermination',
+ 'false',
+ attachment_id='bar')
+
+ self.assert_request_parameters({
+ 'Action': 'ModifyNetworkInterfaceAttribute',
+ 'NetworkInterfaceId': 'id',
+ 'Attachment.AttachmentId': 'bar',
+ 'Attachment.DeleteOnTermination': 'false'},
+ ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'])
+
+ def test_modify_delete_on_termination_invalid(self):
+ self.set_http_response(status_code=200)
+
+ with self.assertRaises(ValueError):
+ self.ec2.modify_network_interface_attribute('id',
+ 'deleteOnTermination',
+ 123,
+ attachment_id='bar')
+
+ def test_modify_group_set_list(self):
+ self.set_http_response(status_code=200)
+ self.ec2.modify_network_interface_attribute('id', 'groupSet',
+ ['sg-1', 'sg-2'])
+
+ self.assert_request_parameters({
+ 'Action': 'ModifyNetworkInterfaceAttribute',
+ 'NetworkInterfaceId': 'id',
+ 'SecurityGroupId.1': 'sg-1',
+ 'SecurityGroupId.2': 'sg-2'},
+ ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'])
+
+ def test_modify_group_set_invalid(self):
+ self.set_http_response(status_code=200)
+
+ with self.assertRaisesRegexp(TypeError, 'iterable'):
+ self.ec2.modify_network_interface_attribute('id', 'groupSet',
+ False)
+
+ def test_modify_attr_invalid(self):
+ self.set_http_response(status_code=200)
+
+ with self.assertRaisesRegexp(ValueError, 'Unknown attribute'):
+ self.ec2.modify_network_interface_attribute('id', 'invalid', 0)
+
+
+class TestConnectToRegion(unittest.TestCase):
+ def setUp(self):
+ self.https_connection = Mock(spec=httplib.HTTPSConnection)
+ self.https_connection_factory = (
+ Mock(return_value=self.https_connection), ())
+
+ def test_aws_region(self):
+ region = boto.ec2.RegionData.keys()[0]
+ self.ec2 = boto.ec2.connect_to_region(region,
+ https_connection_factory=self.https_connection_factory,
+ aws_access_key_id='aws_access_key_id',
+ aws_secret_access_key='aws_secret_access_key'
+ )
+ self.assertEqual(boto.ec2.RegionData[region], self.ec2.host)
+
+ def test_non_aws_region(self):
+ self.ec2 = boto.ec2.connect_to_region('foo',
+ https_connection_factory=self.https_connection_factory,
+ aws_access_key_id='aws_access_key_id',
+ aws_secret_access_key='aws_secret_access_key',
+ region = RegionInfo(name='foo', endpoint='https://foo.com/bar')
+ )
+ self.assertEqual('https://foo.com/bar', self.ec2.host)
+
+ def test_missing_region(self):
+ self.ec2 = boto.ec2.connect_to_region('foo',
+ https_connection_factory=self.https_connection_factory,
+ aws_access_key_id='aws_access_key_id',
+ aws_secret_access_key='aws_secret_access_key'
+ )
+ self.assertEqual(None, self.ec2)
+
+
+class TestTrimSnapshots(TestEC2ConnectionBase):
+ """
+ Test snapshot trimming functionality by ensuring that expected calls
+ are made when given a known set of volume snapshots.
+ """
+ def _get_snapshots(self):
+ """
+ Generate a list of fake snapshots with names and dates.
+ """
+ snaps = []
+
+ # Generate some dates offset by days, weeks, months
+ now = datetime.now()
+ dates = [
+ now,
+ now - timedelta(days=1),
+ now - timedelta(days=2),
+ now - timedelta(days=7),
+ now - timedelta(days=14),
+ datetime(now.year, now.month, 1) - timedelta(days=30),
+ datetime(now.year, now.month, 1) - timedelta(days=60),
+ datetime(now.year, now.month, 1) - timedelta(days=90)
+ ]
+
+ for date in dates:
+ # Create a fake snapshot for each date
+ snap = Snapshot(self.ec2)
+ snap.tags['Name'] = 'foo'
+ # Times are expected to be ISO8601 strings
+ snap.start_time = date.strftime('%Y-%m-%dT%H:%M:%S.000Z')
+ snaps.append(snap)
+
+ return snaps
+
+ def test_trim_defaults(self):
+ """
+ Test trimming snapshots with the default arguments, which should
+ keep all monthly backups forever. The result of this test should
+ be that nothing is deleted.
+ """
+ # Setup mocks
+ orig = {
+ 'get_all_snapshots': self.ec2.get_all_snapshots,
+ 'delete_snapshot': self.ec2.delete_snapshot
+ }
+
+ snaps = self._get_snapshots()
+
+ self.ec2.get_all_snapshots = MagicMock(return_value=snaps)
+ self.ec2.delete_snapshot = MagicMock()
+
+ # Call the tested method
+ self.ec2.trim_snapshots()
+
+ # Assertions
+ self.assertEqual(True, self.ec2.get_all_snapshots.called)
+ self.assertEqual(False, self.ec2.delete_snapshot.called)
+
+ # Restore
+ self.ec2.get_all_snapshots = orig['get_all_snapshots']
+ self.ec2.delete_snapshot = orig['delete_snapshot']
+
+ def test_trim_months(self):
+ """
+ Test trimming monthly snapshots and ensure that older months
+ get deleted properly. The result of this test should be that
+ the two oldest snapshots get deleted.
+ """
+ # Setup mocks
+ orig = {
+ 'get_all_snapshots': self.ec2.get_all_snapshots,
+ 'delete_snapshot': self.ec2.delete_snapshot
+ }
+
+ snaps = self._get_snapshots()
+
+ self.ec2.get_all_snapshots = MagicMock(return_value=snaps)
+ self.ec2.delete_snapshot = MagicMock()
+
+ # Call the tested method
+ self.ec2.trim_snapshots(monthly_backups=1)
+
+ # Assertions
+ self.assertEqual(True, self.ec2.get_all_snapshots.called)
+ self.assertEqual(2, self.ec2.delete_snapshot.call_count)
+
+ # Restore
+ self.ec2.get_all_snapshots = orig['get_all_snapshots']
+ self.ec2.delete_snapshot = orig['delete_snapshot']
+
+
+class TestModifyReservedInstances(TestEC2ConnectionBase):
+ def default_body(self):
+ return """<ModifyReservedInstancesResponse xmlns='http://ec2.amazonaws.com/doc/2013-08-15/'>
+ <requestId>bef729b6-0731-4489-8881-2258746ae163</requestId>
+ <reservedInstancesModificationId>rimod-3aae219d-3d63-47a9-a7e9-e764example</reservedInstancesModificationId>
+</ModifyReservedInstancesResponse>"""
+
+ def test_serialized_api_args(self):
+ self.set_http_response(status_code=200)
+ response = self.ec2.modify_reserved_instances(
+ 'a-token-goes-here',
+ reserved_instance_ids=[
+ '2567o137-8a55-48d6-82fb-7258506bb497',
+ ],
+ target_configurations=[
+ ReservedInstancesConfiguration(
+ availability_zone='us-west-2c',
+ platform='EC2-VPC',
+ instance_count=3
+ ),
+ ]
+ )
+ self.assert_request_parameters({
+ 'Action': 'ModifyReservedInstances',
+ 'ClientToken': 'a-token-goes-here',
+ 'ReservedInstancesConfigurationSetItemType.0.AvailabilityZone': 'us-west-2c',
+ 'ReservedInstancesConfigurationSetItemType.0.InstanceCount': 3,
+ 'ReservedInstancesConfigurationSetItemType.0.Platform': 'EC2-VPC',
+ 'ReservedInstancesId.1': '2567o137-8a55-48d6-82fb-7258506bb497'
+ }, ignore_params_values=[
+ 'AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'
+ ])
+
+ self.assertEqual(response, 'rimod-3aae219d-3d63-47a9-a7e9-e764example')
+
+
+class TestDescribeReservedInstancesModifications(TestEC2ConnectionBase):
+ def default_body(self):
+ return """<DescribeReservedInstancesModificationsResponse xmlns='http://ec2.amazonaws.com/doc/2013-08-15/'>
+ <requestId>eb4a6e3c-3689-445c-b536-19e38df35898</requestId>
+ <reservedInstancesModificationsSet>
+ <item>
+ <reservedInstancesModificationId>rimod-49b9433e-fdc7-464a-a6e5-9dabcexample</reservedInstancesModificationId>
+ <reservedInstancesSet>
+ <item>
+ <reservedInstancesId>2567o137-8a55-48d6-82fb-7258506bb497</reservedInstancesId>
+ </item>
+ </reservedInstancesSet>
+ <modificationResultSet>
+ <item>
+ <reservedInstancesId>9d5cb137-5d65-4479-b4ac-8c337example</reservedInstancesId>
+ <targetConfiguration>
+ <availabilityZone>us-east-1b</availabilityZone>
+ <platform>EC2-VPC</platform>
+ <instanceCount>1</instanceCount>
+ </targetConfiguration>
+ </item>
+ </modificationResultSet>
+ <createDate>2013-09-02T21:20:19.637Z</createDate>
+ <updateDate>2013-09-02T21:38:24.143Z</updateDate>
+ <effectiveDate>2013-09-02T21:00:00.000Z</effectiveDate>
+ <status>fulfilled</status>
+ <clientToken>token-f5b56c05-09b0-4d17-8d8c-c75d8a67b806</clientToken>
+ </item>
+ </reservedInstancesModificationsSet>
+</DescribeReservedInstancesModificationsResponse>"""
+
+ def test_serialized_api_args(self):
+ self.set_http_response(status_code=200)
+ response = self.ec2.describe_reserved_instances_modifications(
+ reserved_instances_modification_ids=[
+ '2567o137-8a55-48d6-82fb-7258506bb497'
+ ],
+ filters={
+ 'status': 'processing',
+ }
+ )
+ self.assert_request_parameters({
+ 'Action': 'DescribeReservedInstancesModifications',
+ 'Filter.1.Name': 'status',
+ 'Filter.1.Value.1': 'processing',
+ 'ReservedInstancesModificationId.1': '2567o137-8a55-48d6-82fb-7258506bb497'
+ }, ignore_params_values=[
+ 'AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'
+ ])
+
+ # Make sure the response was parsed correctly.
+ self.assertEqual(
+ response[0].modification_id,
+ 'rimod-49b9433e-fdc7-464a-a6e5-9dabcexample'
+ )
+ self.assertEqual(
+ response[0].create_date,
+ datetime(2013, 9, 2, 21, 20, 19, 637000)
+ )
+ self.assertEqual(
+ response[0].update_date,
+ datetime(2013, 9, 2, 21, 38, 24, 143000)
+ )
+ self.assertEqual(
+ response[0].effective_date,
+ datetime(2013, 9, 2, 21, 0, 0, 0)
+ )
+ self.assertEqual(
+ response[0].status,
+ 'fulfilled'
+ )
+ self.assertEqual(
+ response[0].status_message,
+ None
+ )
+ self.assertEqual(
+ response[0].client_token,
+ 'token-f5b56c05-09b0-4d17-8d8c-c75d8a67b806'
+ )
+ self.assertEqual(
+ response[0].reserved_instances[0].id,
+ '2567o137-8a55-48d6-82fb-7258506bb497'
+ )
+ self.assertEqual(
+ response[0].modification_results[0].availability_zone,
+ 'us-east-1b'
+ )
+ self.assertEqual(
+ response[0].modification_results[0].platform,
+ 'EC2-VPC'
+ )
+ self.assertEqual(
+ response[0].modification_results[0].instance_count,
+ 1
+ )
+ self.assertEqual(len(response), 1)
+
+
+class TestRegisterImage(TestEC2ConnectionBase):
+ def default_body(self):
+ return """
+ <RegisterImageResponse xmlns="http://ec2.amazonaws.com/doc/2013-08-15/">
+ <requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
+ <imageId>ami-1a2b3c4d</imageId>
+ </RegisterImageResponse>
+ """
+
+ def test_vm_type_default(self):
+ self.set_http_response(status_code=200)
+ self.ec2.register_image('name', 'description',
+ image_location='s3://foo')
+
+ self.assert_request_parameters({
+ 'Action': 'RegisterImage',
+ 'ImageLocation': 's3://foo',
+ 'Name': 'name',
+ 'Description': 'description',
+ }, ignore_params_values=[
+ 'AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'
+ ])
+
+ def test_vm_type_hvm(self):
+ self.set_http_response(status_code=200)
+ self.ec2.register_image('name', 'description',
+ image_location='s3://foo',
+ virtualization_type='hvm')
+
+ self.assert_request_parameters({
+ 'Action': 'RegisterImage',
+ 'ImageLocation': 's3://foo',
+ 'Name': 'name',
+ 'Description': 'description',
+ 'VirtualizationType': 'hvm'
+ }, ignore_params_values=[
+ 'AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'
+ ])
+
+
+class TestTerminateInstances(TestEC2ConnectionBase):
+ def default_body(self):
+ return """<?xml version="1.0" ?>
+ <TerminateInstancesResponse xmlns="http://ec2.amazonaws.com/doc/2013-07-15/">
+ <requestId>req-59a9ad52-0434-470c-ad48-4f89ded3a03e</requestId>
+ <instancesSet>
+ <item>
+ <instanceId>i-000043a2</instanceId>
+ <shutdownState>
+ <code>16</code>
+ <name>running</name>
+ </shutdownState>
+ <previousState>
+ <code>16</code>
+ <name>running</name>
+ </previousState>
+ </item>
+ </instancesSet>
+ </TerminateInstancesResponse>
+ """
+
+ def test_terminate_bad_response(self):
+ self.set_http_response(status_code=200)
+ self.ec2.terminate_instances('foo')
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/tests/unit/ec2/test_instance.py b/tests/unit/ec2/test_instance.py
index c48ef114..6ee0f2f2 100644
--- a/tests/unit/ec2/test_instance.py
+++ b/tests/unit/ec2/test_instance.py
@@ -216,7 +216,7 @@ class TestDescribeInstances(AWSMockServiceTestCase):
def test_multiple_private_ip_addresses(self):
self.set_http_response(status_code=200)
- api_response = self.service_connection.get_all_instances()
+ api_response = self.service_connection.get_all_reservations()
self.assertEqual(len(api_response), 1)
instances = api_response[0].instances
diff --git a/tests/unit/ec2/test_networkinterface.py b/tests/unit/ec2/test_networkinterface.py
index b23f6c36..81fa4aef 100644
--- a/tests/unit/ec2/test_networkinterface.py
+++ b/tests/unit/ec2/test_networkinterface.py
@@ -23,7 +23,7 @@
from tests.unit import unittest
-
+from boto.exception import BotoClientError
from boto.ec2.networkinterface import NetworkInterfaceCollection
from boto.ec2.networkinterface import NetworkInterfaceSpecification
from boto.ec2.networkinterface import PrivateIPAddress
@@ -42,7 +42,8 @@ class TestNetworkInterfaceCollection(unittest.TestCase):
description='description1',
private_ip_address='10.0.0.54', delete_on_termination=False,
private_ip_addresses=[self.private_ip_address1,
- self.private_ip_address2])
+ self.private_ip_address2]
+ )
self.private_ip_address3 = PrivateIPAddress(
private_ip_address='10.0.1.10', primary=False)
@@ -54,7 +55,18 @@ class TestNetworkInterfaceCollection(unittest.TestCase):
groups=['group_id1', 'group_id2'],
private_ip_address='10.0.1.54', delete_on_termination=False,
private_ip_addresses=[self.private_ip_address3,
- self.private_ip_address4])
+ self.private_ip_address4]
+ )
+
+ self.network_interfaces_spec3 = NetworkInterfaceSpecification(
+ device_index=0, subnet_id='subnet_id2',
+ description='description2',
+ groups=['group_id1', 'group_id2'],
+ private_ip_address='10.0.1.54', delete_on_termination=False,
+ private_ip_addresses=[self.private_ip_address3,
+ self.private_ip_address4],
+ associate_public_ip_address=True
+ )
def test_param_serialization(self):
collection = NetworkInterfaceCollection(self.network_interfaces_spec1,
@@ -62,34 +74,33 @@ class TestNetworkInterfaceCollection(unittest.TestCase):
params = {}
collection.build_list_params(params)
self.assertDictEqual(params, {
- 'NetworkInterface.1.DeviceIndex': '1',
- 'NetworkInterface.1.DeleteOnTermination': 'false',
- 'NetworkInterface.1.Description': 'description1',
- 'NetworkInterface.1.PrivateIpAddress': '10.0.0.54',
- 'NetworkInterface.1.SubnetId': 'subnet_id',
- 'NetworkInterface.1.PrivateIpAddresses.1.Primary': 'false',
- 'NetworkInterface.1.PrivateIpAddresses.1.PrivateIpAddress':
+ 'NetworkInterface.0.DeviceIndex': '1',
+ 'NetworkInterface.0.DeleteOnTermination': 'false',
+ 'NetworkInterface.0.Description': 'description1',
+ 'NetworkInterface.0.PrivateIpAddress': '10.0.0.54',
+ 'NetworkInterface.0.SubnetId': 'subnet_id',
+ 'NetworkInterface.0.PrivateIpAddresses.0.Primary': 'false',
+ 'NetworkInterface.0.PrivateIpAddresses.0.PrivateIpAddress':
'10.0.0.10',
- 'NetworkInterface.1.PrivateIpAddresses.2.Primary': 'false',
- 'NetworkInterface.1.PrivateIpAddresses.2.PrivateIpAddress':
+ 'NetworkInterface.0.PrivateIpAddresses.1.Primary': 'false',
+ 'NetworkInterface.0.PrivateIpAddresses.1.PrivateIpAddress':
'10.0.0.11',
- 'NetworkInterface.2.DeviceIndex': '2',
- 'NetworkInterface.2.Description': 'description2',
- 'NetworkInterface.2.DeleteOnTermination': 'false',
- 'NetworkInterface.2.PrivateIpAddress': '10.0.1.54',
- 'NetworkInterface.2.SubnetId': 'subnet_id2',
- 'NetworkInterface.2.SecurityGroupId.1': 'group_id1',
- 'NetworkInterface.2.SecurityGroupId.2': 'group_id2',
- 'NetworkInterface.2.PrivateIpAddresses.1.Primary': 'false',
- 'NetworkInterface.2.PrivateIpAddresses.1.PrivateIpAddress':
+ 'NetworkInterface.1.DeviceIndex': '2',
+ 'NetworkInterface.1.Description': 'description2',
+ 'NetworkInterface.1.DeleteOnTermination': 'false',
+ 'NetworkInterface.1.PrivateIpAddress': '10.0.1.54',
+ 'NetworkInterface.1.SubnetId': 'subnet_id2',
+ 'NetworkInterface.1.SecurityGroupId.0': 'group_id1',
+ 'NetworkInterface.1.SecurityGroupId.1': 'group_id2',
+ 'NetworkInterface.1.PrivateIpAddresses.0.Primary': 'false',
+ 'NetworkInterface.1.PrivateIpAddresses.0.PrivateIpAddress':
'10.0.1.10',
- 'NetworkInterface.2.PrivateIpAddresses.2.Primary': 'false',
- 'NetworkInterface.2.PrivateIpAddresses.2.PrivateIpAddress':
+ 'NetworkInterface.1.PrivateIpAddresses.1.Primary': 'false',
+ 'NetworkInterface.1.PrivateIpAddresses.1.PrivateIpAddress':
'10.0.1.11',
})
def test_add_prefix_to_serialization(self):
- return
collection = NetworkInterfaceCollection(self.network_interfaces_spec1,
self.network_interfaces_spec2)
params = {}
@@ -98,43 +109,92 @@ class TestNetworkInterfaceCollection(unittest.TestCase):
# we're just checking a few keys to make sure we get the proper
# prefix.
self.assertDictEqual(params, {
- 'LaunchSpecification.NetworkInterface.1.DeviceIndex': '1',
- 'LaunchSpecification.NetworkInterface.1.DeleteOnTermination':
+ 'LaunchSpecification.NetworkInterface.0.DeviceIndex': '1',
+ 'LaunchSpecification.NetworkInterface.0.DeleteOnTermination':
'false',
- 'LaunchSpecification.NetworkInterface.1.Description':
+ 'LaunchSpecification.NetworkInterface.0.Description':
'description1',
- 'LaunchSpecification.NetworkInterface.1.PrivateIpAddress':
+ 'LaunchSpecification.NetworkInterface.0.PrivateIpAddress':
'10.0.0.54',
- 'LaunchSpecification.NetworkInterface.1.SubnetId': 'subnet_id',
- 'LaunchSpecification.NetworkInterface.1.PrivateIpAddresses.1.Primary':
+ 'LaunchSpecification.NetworkInterface.0.SubnetId': 'subnet_id',
+ 'LaunchSpecification.NetworkInterface.0.PrivateIpAddresses.0.Primary':
'false',
- 'LaunchSpecification.NetworkInterface.1.PrivateIpAddresses.1.PrivateIpAddress':
+ 'LaunchSpecification.NetworkInterface.0.PrivateIpAddresses.0.PrivateIpAddress':
'10.0.0.10',
- 'LaunchSpecification.NetworkInterface.1.PrivateIpAddresses.2.Primary': 'false',
- 'LaunchSpecification.NetworkInterface.1.PrivateIpAddresses.2.PrivateIpAddress':
+ 'LaunchSpecification.NetworkInterface.0.PrivateIpAddresses.1.Primary': 'false',
+ 'LaunchSpecification.NetworkInterface.0.PrivateIpAddresses.1.PrivateIpAddress':
'10.0.0.11',
- 'LaunchSpecification.NetworkInterface.2.DeviceIndex': '2',
- 'LaunchSpecification.NetworkInterface.2.Description':
+ 'LaunchSpecification.NetworkInterface.1.DeviceIndex': '2',
+ 'LaunchSpecification.NetworkInterface.1.Description':
'description2',
- 'LaunchSpecification.NetworkInterface.2.DeleteOnTermination':
+ 'LaunchSpecification.NetworkInterface.1.DeleteOnTermination':
'false',
- 'LaunchSpecification.NetworkInterface.2.PrivateIpAddress':
+ 'LaunchSpecification.NetworkInterface.1.PrivateIpAddress':
'10.0.1.54',
- 'LaunchSpecification.NetworkInterface.2.SubnetId': 'subnet_id2',
- 'LaunchSpecification.NetworkInterface.2.SecurityGroupId.1':
+ 'LaunchSpecification.NetworkInterface.1.SubnetId': 'subnet_id2',
+ 'LaunchSpecification.NetworkInterface.1.SecurityGroupId.0':
'group_id1',
- 'LaunchSpecification.NetworkInterface.2.SecurityGroupId.2':
+ 'LaunchSpecification.NetworkInterface.1.SecurityGroupId.1':
'group_id2',
- 'LaunchSpecification.NetworkInterface.2.PrivateIpAddresses.1.Primary':
+ 'LaunchSpecification.NetworkInterface.1.PrivateIpAddresses.0.Primary':
'false',
- 'LaunchSpecification.NetworkInterface.2.PrivateIpAddresses.1.PrivateIpAddress':
+ 'LaunchSpecification.NetworkInterface.1.PrivateIpAddresses.0.PrivateIpAddress':
'10.0.1.10',
- 'LaunchSpecification.NetworkInterface.2.PrivateIpAddresses.2.Primary':
+ 'LaunchSpecification.NetworkInterface.1.PrivateIpAddresses.1.Primary':
'false',
- 'LaunchSpecification.NetworkInterface.2.PrivateIpAddresses.2.PrivateIpAddress':
+ 'LaunchSpecification.NetworkInterface.1.PrivateIpAddresses.1.PrivateIpAddress':
'10.0.1.11',
})
+ def test_cant_use_public_ip(self):
+ collection = NetworkInterfaceCollection(self.network_interfaces_spec3,
+ self.network_interfaces_spec1)
+ params = {}
+
+ # First, verify we can't incorrectly create multiple interfaces with
+ # on having a public IP.
+ with self.assertRaises(BotoClientError):
+ collection.build_list_params(params, prefix='LaunchSpecification.')
+
+ # Next, ensure it can't be on device index 1.
+ self.network_interfaces_spec3.device_index = 1
+ collection = NetworkInterfaceCollection(self.network_interfaces_spec3)
+ params = {}
+
+ with self.assertRaises(BotoClientError):
+ collection.build_list_params(params, prefix='LaunchSpecification.')
+
+ def test_public_ip(self):
+ # With public IP.
+ collection = NetworkInterfaceCollection(self.network_interfaces_spec3)
+ params = {}
+ collection.build_list_params(params, prefix='LaunchSpecification.')
+
+ self.assertDictEqual(params, {
+ 'LaunchSpecification.NetworkInterface.0.AssociatePublicIpAddress':
+ 'true',
+ 'LaunchSpecification.NetworkInterface.0.DeviceIndex': '0',
+ 'LaunchSpecification.NetworkInterface.0.DeleteOnTermination':
+ 'false',
+ 'LaunchSpecification.NetworkInterface.0.Description':
+ 'description2',
+ 'LaunchSpecification.NetworkInterface.0.PrivateIpAddress':
+ '10.0.1.54',
+ 'LaunchSpecification.NetworkInterface.0.SubnetId': 'subnet_id2',
+ 'LaunchSpecification.NetworkInterface.0.PrivateIpAddresses.0.Primary':
+ 'false',
+ 'LaunchSpecification.NetworkInterface.0.PrivateIpAddresses.0.PrivateIpAddress':
+ '10.0.1.10',
+ 'LaunchSpecification.NetworkInterface.0.PrivateIpAddresses.1.Primary':
+ 'false',
+ 'LaunchSpecification.NetworkInterface.0.PrivateIpAddresses.1.PrivateIpAddress':
+ '10.0.1.11',
+ 'LaunchSpecification.NetworkInterface.0.SecurityGroupId.0':
+ 'group_id1',
+ 'LaunchSpecification.NetworkInterface.0.SecurityGroupId.1':
+ 'group_id2',
+ })
+
if __name__ == '__main__':
unittest.main()
diff --git a/tests/unit/ec2/test_securitygroup.py b/tests/unit/ec2/test_securitygroup.py
new file mode 100644
index 00000000..361dc256
--- /dev/null
+++ b/tests/unit/ec2/test_securitygroup.py
@@ -0,0 +1,212 @@
+#!/usr/bin/env python
+
+from tests.unit import unittest
+from tests.unit import AWSMockServiceTestCase
+
+import mock
+
+from boto.ec2.connection import EC2Connection
+from boto.ec2.securitygroup import SecurityGroup
+
+
+DESCRIBE_SECURITY_GROUP = r"""<?xml version="1.0" encoding="UTF-8"?>
+<DescribeSecurityGroupsResponse xmlns="http://ec2.amazonaws.com/doc/2013-06-15/">
+ <requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
+ <securityGroupInfo>
+ <item>
+ <ownerId>111122223333</ownerId>
+ <groupId>sg-1a2b3c4d</groupId>
+ <groupName>WebServers</groupName>
+ <groupDescription>Web Servers</groupDescription>
+ <vpcId/>
+ <ipPermissions>
+ <item>
+ <ipProtocol>tcp</ipProtocol>
+ <fromPort>80</fromPort>
+ <toPort>80</toPort>
+ <groups/>
+ <ipRanges>
+ <item>
+ <cidrIp>0.0.0.0/0</cidrIp>
+ </item>
+ </ipRanges>
+ </item>
+ </ipPermissions>
+ <ipPermissionsEgress/>
+ </item>
+ <item>
+ <ownerId>111122223333</ownerId>
+ <groupId>sg-2a2b3c4d</groupId>
+ <groupName>RangedPortsBySource</groupName>
+ <groupDescription>Group A</groupDescription>
+ <ipPermissions>
+ <item>
+ <ipProtocol>tcp</ipProtocol>
+ <fromPort>6000</fromPort>
+ <toPort>7000</toPort>
+ <groups>
+ <item>
+ <userId>111122223333</userId>
+ <groupId>sg-3a2b3c4d</groupId>
+ <groupName>Group B</groupName>
+ </item>
+ </groups>
+ <ipRanges/>
+ </item>
+ </ipPermissions>
+ <ipPermissionsEgress/>
+ </item>
+ </securityGroupInfo>
+</DescribeSecurityGroupsResponse>"""
+
+DESCRIBE_INSTANCES = r"""<?xml version="1.0" encoding="UTF-8"?>
+<DescribeInstancesResponse xmlns="http://ec2.amazonaws.com/doc/2012-10-01/">
+ <requestId>c6132c74-b524-4884-87f5-0f4bde4a9760</requestId>
+ <reservationSet>
+ <item>
+ <reservationId>r-72ef4a0a</reservationId>
+ <ownerId>184906166255</ownerId>
+ <groupSet/>
+ <instancesSet>
+ <item>
+ <instanceId>i-instance</instanceId>
+ <imageId>ami-1624987f</imageId>
+ <instanceState>
+ <code>16</code>
+ <name>running</name>
+ </instanceState>
+ <privateDnsName/>
+ <dnsName/>
+ <reason/>
+ <keyName>mykeypair</keyName>
+ <amiLaunchIndex>0</amiLaunchIndex>
+ <productCodes/>
+ <instanceType>m1.small</instanceType>
+ <launchTime>2012-12-14T23:48:37.000Z</launchTime>
+ <placement>
+ <availabilityZone>us-east-1d</availabilityZone>
+ <groupName/>
+ <tenancy>default</tenancy>
+ </placement>
+ <kernelId>aki-88aa75e1</kernelId>
+ <monitoring>
+ <state>disabled</state>
+ </monitoring>
+ <subnetId>subnet-0dc60667</subnetId>
+ <vpcId>vpc-id</vpcId>
+ <privateIpAddress>10.0.0.67</privateIpAddress>
+ <sourceDestCheck>true</sourceDestCheck>
+ <groupSet>
+ <item>
+ <groupId>sg-1a2b3c4d</groupId>
+ <groupName>WebServerSG</groupName>
+ </item>
+ </groupSet>
+ <architecture>x86_64</architecture>
+ <rootDeviceType>ebs</rootDeviceType>
+ <rootDeviceName>/dev/sda1</rootDeviceName>
+ <blockDeviceMapping>
+ <item>
+ <deviceName>/dev/sda1</deviceName>
+ <ebs>
+ <volumeId>vol-id</volumeId>
+ <status>attached</status>
+ <attachTime>2012-12-14T23:48:43.000Z</attachTime>
+ <deleteOnTermination>true</deleteOnTermination>
+ </ebs>
+ </item>
+ </blockDeviceMapping>
+ <virtualizationType>paravirtual</virtualizationType>
+ <clientToken>foo</clientToken>
+ <tagSet>
+ <item>
+ <key>Name</key>
+ <value/>
+ </item>
+ </tagSet>
+ <hypervisor>xen</hypervisor>
+ <networkInterfaceSet>
+ <item>
+ <networkInterfaceId>eni-id</networkInterfaceId>
+ <subnetId>subnet-id</subnetId>
+ <vpcId>vpc-id</vpcId>
+ <description>Primary network interface</description>
+ <ownerId>ownerid</ownerId>
+ <status>in-use</status>
+ <privateIpAddress>10.0.0.67</privateIpAddress>
+ <sourceDestCheck>true</sourceDestCheck>
+ <groupSet>
+ <item>
+ <groupId>sg-id</groupId>
+ <groupName>WebServerSG</groupName>
+ </item>
+ </groupSet>
+ <attachment>
+ <attachmentId>eni-attach-id</attachmentId>
+ <deviceIndex>0</deviceIndex>
+ <status>attached</status>
+ <attachTime>2012-12-14T23:48:37.000Z</attachTime>
+ <deleteOnTermination>true</deleteOnTermination>
+ </attachment>
+ <privateIpAddressesSet>
+ <item>
+ <privateIpAddress>10.0.0.67</privateIpAddress>
+ <primary>true</primary>
+ </item>
+ <item>
+ <privateIpAddress>10.0.0.54</privateIpAddress>
+ <primary>false</primary>
+ </item>
+ <item>
+ <privateIpAddress>10.0.0.55</privateIpAddress>
+ <primary>false</primary>
+ </item>
+ </privateIpAddressesSet>
+ </item>
+ </networkInterfaceSet>
+ <ebsOptimized>false</ebsOptimized>
+ </item>
+ </instancesSet>
+ </item>
+ </reservationSet>
+</DescribeInstancesResponse>
+"""
+
+class TestDescribeSecurityGroups(AWSMockServiceTestCase):
+ connection_class = EC2Connection
+
+ def test_get_instances(self):
+ self.set_http_response(status_code=200, body=DESCRIBE_SECURITY_GROUP)
+ groups = self.service_connection.get_all_security_groups()
+
+ self.set_http_response(status_code=200, body=DESCRIBE_INSTANCES)
+ instances = groups[0].instances()
+
+ self.assertEqual(1, len(instances))
+ self.assertEqual(groups[0].id, instances[0].groups[0].id)
+
+
+class SecurityGroupTest(unittest.TestCase):
+ def test_add_rule(self):
+ sg = SecurityGroup()
+ self.assertEqual(len(sg.rules), 0)
+
+ # Regression: ``dry_run`` was being passed (but unhandled) before.
+ sg.add_rule(
+ ip_protocol='http',
+ from_port='80',
+ to_port='8080',
+ src_group_name='groupy',
+ src_group_owner_id='12345',
+ cidr_ip='10.0.0.1',
+ src_group_group_id='54321',
+ dry_run=False
+ )
+ self.assertEqual(len(sg.rules), 1)
+
+ def test_remove_rule_on_empty_group(self):
+ # Remove a rule from a group with no rules
+ sg = SecurityGroup()
+
+ with self.assertRaises(ValueError):
+ sg.remove_rule('ip', 80, 80, None, None, None, None)
diff --git a/tests/unit/ec2/test_volume.py b/tests/unit/ec2/test_volume.py
index fd2a4553..14f0bcb6 100644
--- a/tests/unit/ec2/test_volume.py
+++ b/tests/unit/ec2/test_volume.py
@@ -38,7 +38,12 @@ class VolumeTests(unittest.TestCase):
def test_startElement_calls_TaggedEC2Object_startElement_with_correct_args(self, startElement):
volume = Volume()
volume.startElement("some name", "some attrs", None)
- startElement.assert_called_with(volume, "some name", "some attrs", None)
+ startElement.assert_called_with(
+ volume,
+ "some name",
+ "some attrs",
+ None
+ )
@mock.patch("boto.ec2.volume.TaggedEC2Object.startElement")
def test_startElement_retval_not_None_returns_correct_thing(self, startElement):
@@ -120,43 +125,57 @@ class VolumeTests(unittest.TestCase):
def test_delete_calls_delete_volume(self):
self.volume_one.connection = mock.Mock()
self.volume_one.delete()
- self.volume_one.connection.delete_volume.assert_called_with(1)
+ self.volume_one.connection.delete_volume.assert_called_with(
+ 1,
+ dry_run=False
+ )
def test_attach_calls_attach_volume(self):
self.volume_one.connection = mock.Mock()
self.volume_one.attach("instance_id", "/dev/null")
- self.volume_one.connection.attach_volume.assert_called_with(1, "instance_id", "/dev/null")
+ self.volume_one.connection.attach_volume.assert_called_with(
+ 1,
+ "instance_id",
+ "/dev/null",
+ dry_run=False
+ )
def test_detach_calls_detach_volume(self):
self.volume_one.connection = mock.Mock()
self.volume_one.detach()
self.volume_one.connection.detach_volume.assert_called_with(
- 1, 2, "/dev/null", False)
+ 1, 2, "/dev/null", False, dry_run=False)
def test_detach_with_no_attach_data(self):
self.volume_two.connection = mock.Mock()
self.volume_two.detach()
self.volume_two.connection.detach_volume.assert_called_with(
- 1, None, None, False)
+ 1, None, None, False, dry_run=False)
def test_detach_with_force_calls_detach_volume_with_force(self):
self.volume_one.connection = mock.Mock()
self.volume_one.detach(True)
self.volume_one.connection.detach_volume.assert_called_with(
- 1, 2, "/dev/null", True)
+ 1, 2, "/dev/null", True, dry_run=False)
def test_create_snapshot_calls_connection_create_snapshot(self):
self.volume_one.connection = mock.Mock()
self.volume_one.create_snapshot()
self.volume_one.connection.create_snapshot.assert_called_with(
- 1, None)
+ 1,
+ None,
+ dry_run=False
+ )
def test_create_snapshot_with_description(self):
self.volume_one.connection = mock.Mock()
self.volume_one.create_snapshot("some description")
self.volume_one.connection.create_snapshot.assert_called_with(
- 1, "some description")
+ 1,
+ "some description",
+ dry_run=False
+ )
def test_volume_state_returns_status(self):
retval = self.volume_one.volume_state()
@@ -186,7 +205,7 @@ class VolumeTests(unittest.TestCase):
self.volume_one.connection.get_all_snapshots.return_value = []
self.volume_one.snapshots("owner", "restorable_by")
self.volume_one.connection.get_all_snapshots.assert_called_with(
- owner="owner", restorable_by="restorable_by")
+ owner="owner", restorable_by="restorable_by", dry_run=False)
class AttachmentSetTests(unittest.TestCase):
def check_that_attribute_has_been_set(self, name, value, attribute):
diff --git a/tests/unit/emr/test_instance_group_args.py b/tests/unit/emr/test_instance_group_args.py
new file mode 100644
index 00000000..d5aab790
--- /dev/null
+++ b/tests/unit/emr/test_instance_group_args.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+# Author: Charlie Schluting <charlie@schluting.com>
+#
+# Test to ensure initalization of InstanceGroup object emits appropriate errors
+# if bidprice is not specified, but allows float, int, Decimal.
+
+import unittest
+from decimal import Decimal
+
+from boto.emr.instance_group import InstanceGroup
+
+
+class TestInstanceGroupArgs(unittest.TestCase):
+
+ def test_bidprice_missing_spot(self):
+ """
+ Test InstanceGroup init raises ValueError when market==spot and
+ bidprice is not specified.
+ """
+ with self.assertRaisesRegexp(ValueError, 'bidprice must be specified'):
+ InstanceGroup(1, 'MASTER', 'm1.small',
+ 'SPOT', 'master')
+
+ def test_bidprice_missing_ondemand(self):
+ """
+ Test InstanceGroup init accepts a missing bidprice arg, when market is
+ ON_DEMAND.
+ """
+ instance_group = InstanceGroup(1, 'MASTER', 'm1.small',
+ 'ON_DEMAND', 'master')
+
+ def test_bidprice_Decimal(self):
+ """
+ Test InstanceGroup init works with bidprice type = Decimal.
+ """
+ instance_group = InstanceGroup(1, 'MASTER', 'm1.small',
+ 'SPOT', 'master', bidprice=Decimal(1.10))
+ self.assertEquals('1.10', instance_group.bidprice[:4])
+
+ def test_bidprice_float(self):
+ """
+ Test InstanceGroup init works with bidprice type = float.
+ """
+ instance_group = InstanceGroup(1, 'MASTER', 'm1.small',
+ 'SPOT', 'master', bidprice=1.1)
+ self.assertEquals('1.1', instance_group.bidprice)
+
+ def test_bidprice_string(self):
+ """
+ Test InstanceGroup init works with bidprice type = string.
+ """
+ instance_group = InstanceGroup(1, 'MASTER', 'm1.small',
+ 'SPOT', 'master', bidprice='1.1')
+ self.assertEquals('1.1', instance_group.bidprice)
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/tests/unit/glacier/test_layer2.py b/tests/unit/glacier/test_layer2.py
index 3a54924a..854904e0 100644
--- a/tests/unit/glacier/test_layer2.py
+++ b/tests/unit/glacier/test_layer2.py
@@ -43,6 +43,40 @@ FIXTURE_VAULT = {
"VaultName" : "examplevault"
}
+FIXTURE_VAULTS = {
+ 'RequestId': 'vuXO7SHTw-luynJ0Zu31AYjR3TcCn7X25r7ykpuulxY2lv8',
+ 'VaultList': [{'SizeInBytes': 0, 'LastInventoryDate': None,
+ 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault0',
+ 'VaultName': 'vault0', 'NumberOfArchives': 0,
+ 'CreationDate': '2013-05-17T02:38:39.049Z'},
+ {'SizeInBytes': 0, 'LastInventoryDate': None,
+ 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault3',
+ 'VaultName': 'vault3', 'NumberOfArchives': 0,
+ 'CreationDate': '2013-05-17T02:31:18.659Z'}]}
+
+FIXTURE_PAGINATED_VAULTS = {
+ 'Marker': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault2',
+ 'RequestId': 'vuXO7SHTw-luynJ0Zu31AYjR3TcCn7X25r7ykpuulxY2lv8',
+ 'VaultList': [{'SizeInBytes': 0, 'LastInventoryDate': None,
+ 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault0',
+ 'VaultName': 'vault0', 'NumberOfArchives': 0,
+ 'CreationDate': '2013-05-17T02:38:39.049Z'},
+ {'SizeInBytes': 0, 'LastInventoryDate': None,
+ 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault1',
+ 'VaultName': 'vault1', 'NumberOfArchives': 0,
+ 'CreationDate': '2013-05-17T02:31:18.659Z'}]}
+FIXTURE_PAGINATED_VAULTS_CONT = {
+ 'Marker': None,
+ 'RequestId': 'vuXO7SHTw-luynJ0Zu31AYjR3TcCn7X25r7ykpuulxY2lv8',
+ 'VaultList': [{'SizeInBytes': 0, 'LastInventoryDate': None,
+ 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault2',
+ 'VaultName': 'vault2', 'NumberOfArchives': 0,
+ 'CreationDate': '2013-05-17T02:38:39.049Z'},
+ {'SizeInBytes': 0, 'LastInventoryDate': None,
+ 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault3',
+ 'VaultName': 'vault3', 'NumberOfArchives': 0,
+ 'CreationDate': '2013-05-17T02:31:18.659Z'}]}
+
FIXTURE_ARCHIVE_JOB = {
"Action": "ArchiveRetrieval",
"ArchiveId": ("NkbByEejwEggmBz2fTHgJrg0XBoDfjP4q6iu87-TjhqG6eGoOY9Z8i1_AUyUs"
@@ -135,10 +169,22 @@ class TestGlacierLayer2Connection(GlacierLayer2Base):
self.assertEqual(vault.size, 78088912)
self.assertEqual(vault.number_of_archives, 192)
- def list_vaults(self):
- self.mock_layer1.list_vaults.return_value = [FIXTURE_VAULT]
+ def test_list_vaults(self):
+ self.mock_layer1.list_vaults.return_value = FIXTURE_VAULTS
+ vaults = self.layer2.list_vaults()
+ self.assertEqual(vaults[0].name, "vault0")
+ self.assertEqual(len(vaults), 2)
+
+ def test_list_vaults_paginated(self):
+ resps = [FIXTURE_PAGINATED_VAULTS, FIXTURE_PAGINATED_VAULTS_CONT]
+ def return_paginated_vaults_resp(marker=None, limit=None):
+ return resps.pop(0)
+
+ self.mock_layer1.list_vaults = Mock(side_effect = return_paginated_vaults_resp)
vaults = self.layer2.list_vaults()
- self.assertEqual(vaults[0].name, "examplevault")
+ self.assertEqual(vaults[0].name, "vault0")
+ self.assertEqual(vaults[3].name, "vault3")
+ self.assertEqual(len(vaults), 4)
class TestVault(GlacierLayer2Base):
diff --git a/tests/unit/rds/test_connection.py b/tests/unit/rds/test_connection.py
index 7eef7415..b8d012d1 100644
--- a/tests/unit/rds/test_connection.py
+++ b/tests/unit/rds/test_connection.py
@@ -24,7 +24,10 @@
from tests.unit import unittest
from tests.unit import AWSMockServiceTestCase
+from boto.ec2.securitygroup import SecurityGroup
from boto.rds import RDSConnection
+from boto.rds.vpcsecuritygroupmembership import VPCSecurityGroupMembership
+from boto.rds.parametergroup import ParameterGroup
class TestRDSConnection(AWSMockServiceTestCase):
@@ -72,6 +75,12 @@ class TestRDSConnection(AWSMockServiceTestCase):
<DBSecurityGroupName>default</DBSecurityGroupName>
</DBSecurityGroup>
</DBSecurityGroups>
+ <VpcSecurityGroups>
+ <VpcSecurityGroupMembership>
+ <VpcSecurityGroupId>sg-1</VpcSecurityGroupId>
+ <Status>active</Status>
+ </VpcSecurityGroupMembership>
+ </VpcSecurityGroups>
<DBName>mydb2</DBName>
<AutoMinorVersionUpgrade>true</AutoMinorVersionUpgrade>
<InstanceCreateTime>2012-10-03T22:01:51.047Z</InstanceCreateTime>
@@ -136,6 +145,325 @@ class TestRDSConnection(AWSMockServiceTestCase):
self.assertEqual(db.status_infos[0].normal, True)
self.assertEqual(db.status_infos[0].status, 'replicating')
self.assertEqual(db.status_infos[0].status_type, 'read replication')
+ self.assertEqual(db.vpc_security_groups[0].status, 'active')
+ self.assertEqual(db.vpc_security_groups[0].vpc_group, 'sg-1')
+
+
+class TestRDSCCreateDBInstance(AWSMockServiceTestCase):
+ connection_class = RDSConnection
+
+ def setUp(self):
+ super(TestRDSCCreateDBInstance, self).setUp()
+
+ def default_body(self):
+ return """
+ <CreateDBInstanceResponse xmlns="http://rds.amazonaws.com/doc/2013-05-15/">
+ <CreateDBInstanceResult>
+ <DBInstance>
+ <ReadReplicaDBInstanceIdentifiers/>
+ <Engine>mysql</Engine>
+ <PendingModifiedValues>
+ <MasterUserPassword>****</MasterUserPassword>
+ </PendingModifiedValues>
+ <BackupRetentionPeriod>1</BackupRetentionPeriod>
+ <MultiAZ>false</MultiAZ>
+ <LicenseModel>general-public-license</LicenseModel>
+ <DBSubnetGroup>
+ <VpcId>990524496922</VpcId>
+ <SubnetGroupStatus>Complete</SubnetGroupStatus>
+ <DBSubnetGroupDescription>description</DBSubnetGroupDescription>
+ <DBSubnetGroupName>subnet_grp1</DBSubnetGroupName>
+ <Subnets>
+ <Subnet>
+ <SubnetStatus>Active</SubnetStatus>
+ <SubnetIdentifier>subnet-7c5b4115</SubnetIdentifier>
+ <SubnetAvailabilityZone>
+ <Name>us-east-1c</Name>
+ </SubnetAvailabilityZone>
+ </Subnet>
+ <Subnet>
+ <SubnetStatus>Active</SubnetStatus>
+ <SubnetIdentifier>subnet-7b5b4112</SubnetIdentifier>
+ <SubnetAvailabilityZone>
+ <Name>us-east-1b</Name>
+ </SubnetAvailabilityZone>
+ </Subnet>
+ <Subnet>
+ <SubnetStatus>Active</SubnetStatus>
+ <SubnetIdentifier>subnet-3ea6bd57</SubnetIdentifier>
+ <SubnetAvailabilityZone>
+ <Name>us-east-1d</Name>
+ </SubnetAvailabilityZone>
+ </Subnet>
+ </Subnets>
+ </DBSubnetGroup>
+ <DBInstanceStatus>creating</DBInstanceStatus>
+ <EngineVersion>5.1.50</EngineVersion>
+ <DBInstanceIdentifier>simcoprod01</DBInstanceIdentifier>
+ <DBParameterGroups>
+ <DBParameterGroup>
+ <ParameterApplyStatus>in-sync</ParameterApplyStatus>
+ <DBParameterGroupName>default.mysql5.1</DBParameterGroupName>
+ </DBParameterGroup>
+ </DBParameterGroups>
+ <DBSecurityGroups>
+ <DBSecurityGroup>
+ <Status>active</Status>
+ <DBSecurityGroupName>default</DBSecurityGroupName>
+ </DBSecurityGroup>
+ </DBSecurityGroups>
+ <PreferredBackupWindow>00:00-00:30</PreferredBackupWindow>
+ <AutoMinorVersionUpgrade>true</AutoMinorVersionUpgrade>
+ <PreferredMaintenanceWindow>sat:07:30-sat:08:00</PreferredMaintenanceWindow>
+ <AllocatedStorage>10</AllocatedStorage>
+ <DBInstanceClass>db.m1.large</DBInstanceClass>
+ <MasterUsername>master</MasterUsername>
+ </DBInstance>
+ </CreateDBInstanceResult>
+ <ResponseMetadata>
+ <RequestId>2e5d4270-8501-11e0-bd9b-a7b1ece36d51</RequestId>
+ </ResponseMetadata>
+ </CreateDBInstanceResponse>
+ """
+
+ def test_create_db_instance_param_group_name(self):
+ self.set_http_response(status_code=200)
+ db = self.service_connection.create_dbinstance(
+ 'SimCoProd01',
+ 10,
+ 'db.m1.large',
+ 'master',
+ 'Password01',
+ param_group='default.mysql5.1',
+ db_subnet_group_name='dbSubnetgroup01')
+
+ self.assert_request_parameters({
+ 'Action': 'CreateDBInstance',
+ 'AllocatedStorage': 10,
+ 'AutoMinorVersionUpgrade': 'true',
+ 'DBInstanceClass': 'db.m1.large',
+ 'DBInstanceIdentifier': 'SimCoProd01',
+ 'DBParameterGroupName': 'default.mysql5.1',
+ 'DBSubnetGroupName': 'dbSubnetgroup01',
+ 'Engine': 'MySQL5.1',
+ 'MasterUsername': 'master',
+ 'MasterUserPassword': 'Password01',
+ 'Port': 3306,
+ }, ignore_params_values=['Version'])
+
+ self.assertEqual(db.id, 'simcoprod01')
+ self.assertEqual(db.engine, 'mysql')
+ self.assertEqual(db.status, 'creating')
+ self.assertEqual(db.allocated_storage, 10)
+ self.assertEqual(db.instance_class, 'db.m1.large')
+ self.assertEqual(db.master_username, 'master')
+ self.assertEqual(db.multi_az, False)
+ self.assertEqual(db.pending_modified_values,
+ {'MasterUserPassword': '****'})
+
+ self.assertEqual(db.parameter_group.name,
+ 'default.mysql5.1')
+ self.assertEqual(db.parameter_group.description, None)
+ self.assertEqual(db.parameter_group.engine, None)
+
+ def test_create_db_instance_param_group_instance(self):
+ self.set_http_response(status_code=200)
+ param_group = ParameterGroup()
+ param_group.name = 'default.mysql5.1'
+ db = self.service_connection.create_dbinstance(
+ 'SimCoProd01',
+ 10,
+ 'db.m1.large',
+ 'master',
+ 'Password01',
+ param_group=param_group,
+ db_subnet_group_name='dbSubnetgroup01')
+
+ self.assert_request_parameters({
+ 'Action': 'CreateDBInstance',
+ 'AllocatedStorage': 10,
+ 'AutoMinorVersionUpgrade': 'true',
+ 'DBInstanceClass': 'db.m1.large',
+ 'DBInstanceIdentifier': 'SimCoProd01',
+ 'DBParameterGroupName': 'default.mysql5.1',
+ 'DBSubnetGroupName': 'dbSubnetgroup01',
+ 'Engine': 'MySQL5.1',
+ 'MasterUsername': 'master',
+ 'MasterUserPassword': 'Password01',
+ 'Port': 3306,
+ }, ignore_params_values=['Version'])
+
+ self.assertEqual(db.id, 'simcoprod01')
+ self.assertEqual(db.engine, 'mysql')
+ self.assertEqual(db.status, 'creating')
+ self.assertEqual(db.allocated_storage, 10)
+ self.assertEqual(db.instance_class, 'db.m1.large')
+ self.assertEqual(db.master_username, 'master')
+ self.assertEqual(db.multi_az, False)
+ self.assertEqual(db.pending_modified_values,
+ {'MasterUserPassword': '****'})
+ self.assertEqual(db.parameter_group.name,
+ 'default.mysql5.1')
+ self.assertEqual(db.parameter_group.description, None)
+ self.assertEqual(db.parameter_group.engine, None)
+
+
+class TestRDSConnectionRestoreDBInstanceFromPointInTime(AWSMockServiceTestCase):
+ connection_class = RDSConnection
+
+ def setUp(self):
+ super(TestRDSConnectionRestoreDBInstanceFromPointInTime, self).setUp()
+
+ def default_body(self):
+ return """
+ <RestoreDBInstanceToPointInTimeResponse xmlns="http://rds.amazonaws.com/doc/2013-05-15/">
+ <RestoreDBInstanceToPointInTimeResult>
+ <DBInstance>
+ <ReadReplicaDBInstanceIdentifiers/>
+ <Engine>mysql</Engine>
+ <PendingModifiedValues/>
+ <BackupRetentionPeriod>1</BackupRetentionPeriod>
+ <MultiAZ>false</MultiAZ>
+ <LicenseModel>general-public-license</LicenseModel>
+ <DBInstanceStatus>creating</DBInstanceStatus>
+ <EngineVersion>5.1.50</EngineVersion>
+ <DBInstanceIdentifier>restored-db</DBInstanceIdentifier>
+ <DBParameterGroups>
+ <DBParameterGroup>
+ <ParameterApplyStatus>in-sync</ParameterApplyStatus>
+ <DBParameterGroupName>default.mysql5.1</DBParameterGroupName>
+ </DBParameterGroup>
+ </DBParameterGroups>
+ <DBSecurityGroups>
+ <DBSecurityGroup>
+ <Status>active</Status>
+ <DBSecurityGroupName>default</DBSecurityGroupName>
+ </DBSecurityGroup>
+ </DBSecurityGroups>
+ <PreferredBackupWindow>00:00-00:30</PreferredBackupWindow>
+ <AutoMinorVersionUpgrade>true</AutoMinorVersionUpgrade>
+ <PreferredMaintenanceWindow>sat:07:30-sat:08:00</PreferredMaintenanceWindow>
+ <AllocatedStorage>10</AllocatedStorage>
+ <DBInstanceClass>db.m1.large</DBInstanceClass>
+ <MasterUsername>master</MasterUsername>
+ </DBInstance>
+ </RestoreDBInstanceToPointInTimeResult>
+ <ResponseMetadata>
+ <RequestId>1ef546bc-850b-11e0-90aa-eb648410240d</RequestId>
+ </ResponseMetadata>
+ </RestoreDBInstanceToPointInTimeResponse>
+ """
+
+ def test_restore_dbinstance_from_point_in_time(self):
+ self.set_http_response(status_code=200)
+ db = self.service_connection.restore_dbinstance_from_point_in_time(
+ 'simcoprod01',
+ 'restored-db',
+ True)
+
+ self.assert_request_parameters({
+ 'Action': 'RestoreDBInstanceToPointInTime',
+ 'SourceDBInstanceIdentifier': 'simcoprod01',
+ 'TargetDBInstanceIdentifier': 'restored-db',
+ 'UseLatestRestorableTime': 'true',
+ }, ignore_params_values=['Version'])
+
+ self.assertEqual(db.id, 'restored-db')
+ self.assertEqual(db.engine, 'mysql')
+ self.assertEqual(db.status, 'creating')
+ self.assertEqual(db.allocated_storage, 10)
+ self.assertEqual(db.instance_class, 'db.m1.large')
+ self.assertEqual(db.master_username, 'master')
+ self.assertEqual(db.multi_az, False)
+
+ self.assertEqual(db.parameter_group.name,
+ 'default.mysql5.1')
+ self.assertEqual(db.parameter_group.description, None)
+ self.assertEqual(db.parameter_group.engine, None)
+
+ def test_restore_dbinstance_from_point_in_time__db_subnet_group_name(self):
+ self.set_http_response(status_code=200)
+ db = self.service_connection.restore_dbinstance_from_point_in_time(
+ 'simcoprod01',
+ 'restored-db',
+ True,
+ db_subnet_group_name='dbsubnetgroup')
+
+ self.assert_request_parameters({
+ 'Action': 'RestoreDBInstanceToPointInTime',
+ 'SourceDBInstanceIdentifier': 'simcoprod01',
+ 'TargetDBInstanceIdentifier': 'restored-db',
+ 'UseLatestRestorableTime': 'true',
+ 'DBSubnetGroupName': 'dbsubnetgroup',
+ }, ignore_params_values=['Version'])
+
+ def test_create_db_instance_vpc_sg_str(self):
+ self.set_http_response(status_code=200)
+ vpc_security_groups = [
+ VPCSecurityGroupMembership(self.service_connection, 'active', 'sg-1'),
+ VPCSecurityGroupMembership(self.service_connection, None, 'sg-2')]
+
+ db = self.service_connection.create_dbinstance(
+ 'SimCoProd01',
+ 10,
+ 'db.m1.large',
+ 'master',
+ 'Password01',
+ param_group='default.mysql5.1',
+ db_subnet_group_name='dbSubnetgroup01',
+ vpc_security_groups=vpc_security_groups)
+
+ self.assert_request_parameters({
+ 'Action': 'CreateDBInstance',
+ 'AllocatedStorage': 10,
+ 'AutoMinorVersionUpgrade': 'true',
+ 'DBInstanceClass': 'db.m1.large',
+ 'DBInstanceIdentifier': 'SimCoProd01',
+ 'DBParameterGroupName': 'default.mysql5.1',
+ 'DBSubnetGroupName': 'dbSubnetgroup01',
+ 'Engine': 'MySQL5.1',
+ 'MasterUsername': 'master',
+ 'MasterUserPassword': 'Password01',
+ 'Port': 3306,
+ 'VpcSecurityGroupIds.member.1': 'sg-1',
+ 'VpcSecurityGroupIds.member.2': 'sg-2'
+ }, ignore_params_values=['Version'])
+
+ def test_create_db_instance_vpc_sg_obj(self):
+ self.set_http_response(status_code=200)
+
+ sg1 = SecurityGroup(name='sg-1')
+ sg2 = SecurityGroup(name='sg-2')
+
+ vpc_security_groups = [
+ VPCSecurityGroupMembership(self.service_connection, 'active', sg1.name),
+ VPCSecurityGroupMembership(self.service_connection, None, sg2.name)]
+
+ db = self.service_connection.create_dbinstance(
+ 'SimCoProd01',
+ 10,
+ 'db.m1.large',
+ 'master',
+ 'Password01',
+ param_group='default.mysql5.1',
+ db_subnet_group_name='dbSubnetgroup01',
+ vpc_security_groups=vpc_security_groups)
+
+ self.assert_request_parameters({
+ 'Action': 'CreateDBInstance',
+ 'AllocatedStorage': 10,
+ 'AutoMinorVersionUpgrade': 'true',
+ 'DBInstanceClass': 'db.m1.large',
+ 'DBInstanceIdentifier': 'SimCoProd01',
+ 'DBParameterGroupName': 'default.mysql5.1',
+ 'DBSubnetGroupName': 'dbSubnetgroup01',
+ 'Engine': 'MySQL5.1',
+ 'MasterUsername': 'master',
+ 'MasterUserPassword': 'Password01',
+ 'Port': 3306,
+ 'VpcSecurityGroupIds.member.1': 'sg-1',
+ 'VpcSecurityGroupIds.member.2': 'sg-2'
+ }, ignore_params_values=['Version'])
class TestRDSOptionGroups(AWSMockServiceTestCase):
diff --git a/tests/unit/s3/test_key.py b/tests/unit/s3/test_key.py
index 5e249c17..889976e5 100644
--- a/tests/unit/s3/test_key.py
+++ b/tests/unit/s3/test_key.py
@@ -25,6 +25,7 @@ try:
except ImportError:
from StringIO import StringIO
+import mock
from tests.unit import unittest
from tests.unit import AWSMockServiceTestCase
@@ -87,37 +88,29 @@ def counter(fn):
class TestS3KeyRetries(AWSMockServiceTestCase):
connection_class = S3Connection
- def setUp(self):
- super(TestS3KeyRetries, self).setUp()
-
- def test_500_retry(self):
+ @mock.patch('time.sleep')
+ def test_500_retry(self, sleep_mock):
self.set_http_response(status_code=500)
b = Bucket(self.service_connection, 'mybucket')
k = b.new_key('test_failure')
fail_file = StringIO('This will attempt to retry.')
- try:
+ with self.assertRaises(BotoServerError):
k.send_file(fail_file)
- self.fail("This shouldn't ever succeed.")
- except BotoServerError:
- pass
- def test_400_timeout(self):
+ @mock.patch('time.sleep')
+ def test_400_timeout(self, sleep_mock):
weird_timeout_body = "<Error><Code>RequestTimeout</Code></Error>"
self.set_http_response(status_code=400, body=weird_timeout_body)
b = Bucket(self.service_connection, 'mybucket')
k = b.new_key('test_failure')
fail_file = StringIO('This will pretend to be chunk-able.')
- # Decorate.
k.should_retry = counter(k.should_retry)
self.assertEqual(k.should_retry.count, 0)
- try:
+ with self.assertRaises(BotoServerError):
k.send_file(fail_file)
- self.fail("This shouldn't ever succeed.")
- except BotoServerError:
- pass
self.assertTrue(k.should_retry.count, 1)
diff --git a/tests/unit/ses/__init__.py b/tests/unit/ses/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unit/ses/__init__.py
diff --git a/tests/unit/ses/test_identity.py b/tests/unit/ses/test_identity.py
new file mode 100644
index 00000000..6735e4a8
--- /dev/null
+++ b/tests/unit/ses/test_identity.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+from tests.unit import unittest
+from tests.unit import AWSMockServiceTestCase
+
+from boto.jsonresponse import ListElement
+from boto.ses.connection import SESConnection
+
+
+class TestSESIdentity(AWSMockServiceTestCase):
+ connection_class = SESConnection
+
+ def setUp(self):
+ super(TestSESIdentity, self).setUp()
+
+ def default_body(self):
+ return """<GetIdentityDkimAttributesResponse \
+xmlns="http://ses.amazonaws.com/doc/2010-12-01/">
+ <GetIdentityDkimAttributesResult>
+ <DkimAttributes>
+ <entry>
+ <key>amazon.com</key>
+ <value>
+ <DkimEnabled>true</DkimEnabled>
+ <DkimVerificationStatus>Success</DkimVerificationStatus>
+ <DkimTokens>
+ <member>vvjuipp74whm76gqoni7qmwwn4w4qusjiainivf6f</member>
+ <member>3frqe7jn4obpuxjpwpolz6ipb3k5nvt2nhjpik2oy</member>
+ <member>wrqplteh7oodxnad7hsl4mixg2uavzneazxv5sxi2</member>
+ </DkimTokens>
+ </value>
+ </entry>
+ </DkimAttributes>
+ </GetIdentityDkimAttributesResult>
+ <ResponseMetadata>
+ <RequestId>bb5a105d-c468-11e1-82eb-dff885ccc06a</RequestId>
+ </ResponseMetadata>
+</GetIdentityDkimAttributesResponse>"""
+
+ def test_ses_get_identity_dkim_list(self):
+ self.set_http_response(status_code=200)
+
+ response = self.service_connection\
+ .get_identity_dkim_attributes(['test@amazon.com'])
+
+ response = response['GetIdentityDkimAttributesResponse']
+ result = response['GetIdentityDkimAttributesResult']
+ attributes = result['DkimAttributes']['entry']['value']
+ tokens = attributes['DkimTokens']
+
+ self.assertEqual(ListElement, type(tokens))
+ self.assertEqual(3, len(tokens))
+ self.assertEqual('vvjuipp74whm76gqoni7qmwwn4w4qusjiainivf6f',
+ tokens[0])
+ self.assertEqual('3frqe7jn4obpuxjpwpolz6ipb3k5nvt2nhjpik2oy',
+ tokens[1])
+ self.assertEqual('wrqplteh7oodxnad7hsl4mixg2uavzneazxv5sxi2',
+ tokens[2])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/unit/sns/test_connection.py b/tests/unit/sns/test_connection.py
index 8cc064e5..3a474c3c 100644
--- a/tests/unit/sns/test_connection.py
+++ b/tests/unit/sns/test_connection.py
@@ -90,6 +90,141 @@ class TestSNSConnection(AWSMockServiceTestCase):
# Only a single statement should be part of the policy.
self.assertEqual(len(actual_policy['Statement']), 1)
+ def test_publish_with_positional_args(self):
+ self.set_http_response(status_code=200)
+
+ self.service_connection.publish('topic', 'message', 'subject')
+ self.assert_request_parameters({
+ 'Action': 'Publish',
+ 'TopicArn': 'topic',
+ 'Subject': 'subject',
+ 'Message': 'message',
+ }, ignore_params_values=['Version', 'ContentType'])
+
+ def test_publish_with_kwargs(self):
+ self.set_http_response(status_code=200)
+
+ self.service_connection.publish(topic='topic',
+ message='message',
+ subject='subject')
+ self.assert_request_parameters({
+ 'Action': 'Publish',
+ 'TopicArn': 'topic',
+ 'Subject': 'subject',
+ 'Message': 'message',
+ }, ignore_params_values=['Version', 'ContentType'])
+
+ def test_publish_with_target_arn(self):
+ self.set_http_response(status_code=200)
+
+ self.service_connection.publish(target_arn='target_arn',
+ message='message',
+ subject='subject')
+ self.assert_request_parameters({
+ 'Action': 'Publish',
+ 'TargetArn': 'target_arn',
+ 'Subject': 'subject',
+ 'Message': 'message',
+ }, ignore_params_values=['Version', 'ContentType'])
+
+ def test_create_platform_application(self):
+ self.set_http_response(status_code=200)
+
+ self.service_connection.create_platform_application(
+ name='MyApp',
+ platform='APNS',
+ attributes={
+ 'PlatformPrincipal': 'a ssl certificate',
+ 'PlatformCredential': 'a private key'
+ }
+ )
+ self.assert_request_parameters({
+ 'Action': 'CreatePlatformApplication',
+ 'Name': 'MyApp',
+ 'Platform': 'APNS',
+ 'Attributes.entry.1.key': 'PlatformCredential',
+ 'Attributes.entry.1.value': 'a private key',
+ 'Attributes.entry.2.key': 'PlatformPrincipal',
+ 'Attributes.entry.2.value': 'a ssl certificate',
+ }, ignore_params_values=['Version', 'ContentType'])
+
+ def test_set_platform_application_attributes(self):
+ self.set_http_response(status_code=200)
+
+ self.service_connection.set_platform_application_attributes(
+ platform_application_arn='arn:myapp',
+ attributes={'PlatformPrincipal': 'a ssl certificate',
+ 'PlatformCredential': 'a private key'})
+ self.assert_request_parameters({
+ 'Action': 'SetPlatformApplicationAttributes',
+ 'PlatformApplicationArn': 'arn:myapp',
+ 'Attributes.entry.1.key': 'PlatformCredential',
+ 'Attributes.entry.1.value': 'a private key',
+ 'Attributes.entry.2.key': 'PlatformPrincipal',
+ 'Attributes.entry.2.value': 'a ssl certificate',
+ }, ignore_params_values=['Version', 'ContentType'])
+
+ def test_create_platform_endpoint(self):
+ self.set_http_response(status_code=200)
+
+ self.service_connection.create_platform_endpoint(
+ platform_application_arn='arn:myapp',
+ token='abcde12345',
+ custom_user_data='john',
+ attributes={'Enabled': False})
+ self.assert_request_parameters({
+ 'Action': 'CreatePlatformEndpoint',
+ 'PlatformApplicationArn': 'arn:myapp',
+ 'Token': 'abcde12345',
+ 'CustomUserData': 'john',
+ 'Attributes.entry.1.key': 'Enabled',
+ 'Attributes.entry.1.value': False,
+ }, ignore_params_values=['Version', 'ContentType'])
+
+ def test_set_endpoint_attributes(self):
+ self.set_http_response(status_code=200)
+
+ self.service_connection.set_endpoint_attributes(
+ endpoint_arn='arn:myendpoint',
+ attributes={'CustomUserData': 'john',
+ 'Enabled': False})
+ self.assert_request_parameters({
+ 'Action': 'SetEndpointAttributes',
+ 'EndpointArn': 'arn:myendpoint',
+ 'Attributes.entry.1.key': 'CustomUserData',
+ 'Attributes.entry.1.value': 'john',
+ 'Attributes.entry.2.key': 'Enabled',
+ 'Attributes.entry.2.value': False,
+ }, ignore_params_values=['Version', 'ContentType'])
+
+ def test_message_is_required(self):
+ self.set_http_response(status_code=200)
+
+ with self.assertRaises(TypeError):
+ self.service_connection.publish(topic='topic', subject='subject')
+
+ def test_publish_with_json(self):
+ self.set_http_response(status_code=200)
+
+ self.service_connection.publish(
+ message=json.dumps({
+ 'default': 'Ignored.',
+ 'GCM': {
+ 'data': 'goes here',
+ }
+ }),
+ message_structure='json',
+ subject='subject',
+ target_arn='target_arn'
+ )
+ self.assert_request_parameters({
+ 'Action': 'Publish',
+ 'TargetArn': 'target_arn',
+ 'Subject': 'subject',
+ 'Message': '{"default": "Ignored.", "GCM": {"data": "goes here"}}',
+ 'MessageStructure': 'json',
+ }, ignore_params_values=['Version', 'ContentType'])
+
if __name__ == '__main__':
unittest.main()
diff --git a/tests/unit/sqs/test_message.py b/tests/unit/sqs/test_message.py
new file mode 100644
index 00000000..44cca9c1
--- /dev/null
+++ b/tests/unit/sqs/test_message.py
@@ -0,0 +1,35 @@
+# Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+from tests.unit import unittest
+
+from boto.sqs.message import MHMessage
+
+
+class TestMHMessage(unittest.TestCase):
+ def test_contains(self):
+ msg = MHMessage()
+ msg.update({'hello': 'world'})
+ self.assertTrue('hello' in msg)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/unit/sts/__init__.py b/tests/unit/sts/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unit/sts/__init__.py
diff --git a/tests/unit/sts/test_connection.py b/tests/unit/sts/test_connection.py
index f874cafd..de0ab261 100644
--- a/tests/unit/sts/test_connection.py
+++ b/tests/unit/sts/test_connection.py
@@ -70,5 +70,93 @@ class TestSTSConnection(AWSMockServiceTestCase):
self.assertEqual(response.user.assume_role_id, 'roleid:myrolesession')
+class TestSTSWebIdentityConnection(AWSMockServiceTestCase):
+ connection_class = STSConnection
+
+ def setUp(self):
+ super(TestSTSWebIdentityConnection, self).setUp()
+
+ def default_body(self):
+ return """
+<AssumeRoleWithWebIdentityResponse xmlns="https://sts.amazonaws.com/doc/2011-06-15/">
+ <AssumeRoleWithWebIdentityResult>
+ <SubjectFromWebIdentityToken>
+ amzn1.account.AF6RHO7KZU5XRVQJGXK6HB56KR2A
+ </SubjectFromWebIdentityToken>
+ <AssumedRoleUser>
+ <Arn>
+ arn:aws:sts::000240903217:assumed-role/FederatedWebIdentityRole/app1
+ </Arn>
+ <AssumedRoleId>
+ AROACLKWSDQRAOFQC3IDI:app1
+ </AssumedRoleId>
+ </AssumedRoleUser>
+ <Credentials>
+ <SessionToken>
+ AQoDYXdzEE0a8ANXXXXXXXXNO1ewxE5TijQyp+IPfnyowF
+ </SessionToken>
+ <SecretAccessKey>
+ secretkey
+ </SecretAccessKey>
+ <Expiration>
+ 2013-05-14T23:00:23Z
+ </Expiration>
+ <AccessKeyId>
+ accesskey
+ </AccessKeyId>
+ </Credentials>
+ </AssumeRoleWithWebIdentityResult>
+ <ResponseMetadata>
+ <RequestId>ad4156e9-bce1-11e2-82e6-6b6ef249e618</RequestId>
+ </ResponseMetadata>
+</AssumeRoleWithWebIdentityResponse>
+ """
+
+ def test_assume_role_with_web_identity(self):
+ arn = 'arn:aws:iam::000240903217:role/FederatedWebIdentityRole'
+ wit = 'b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9'
+
+ self.set_http_response(status_code=200)
+ response = self.service_connection.assume_role_with_web_identity(
+ role_arn=arn,
+ role_session_name='guestuser',
+ web_identity_token=wit,
+ provider_id='www.amazon.com',
+ )
+ self.assert_request_parameters({
+ 'RoleSessionName': 'guestuser',
+ 'AWSAccessKeyId': 'aws_access_key_id',
+ 'RoleArn': arn,
+ 'WebIdentityToken': wit,
+ 'ProviderId': 'www.amazon.com',
+ 'Action': 'AssumeRoleWithWebIdentity'
+ }, ignore_params_values=[
+ 'SignatureMethod',
+ 'Timestamp',
+ 'SignatureVersion',
+ 'Version',
+ ])
+ self.assertEqual(
+ response.credentials.access_key.strip(),
+ 'accesskey'
+ )
+ self.assertEqual(
+ response.credentials.secret_key.strip(),
+ 'secretkey'
+ )
+ self.assertEqual(
+ response.credentials.session_token.strip(),
+ 'AQoDYXdzEE0a8ANXXXXXXXXNO1ewxE5TijQyp+IPfnyowF'
+ )
+ self.assertEqual(
+ response.user.arn.strip(),
+ 'arn:aws:sts::000240903217:assumed-role/FederatedWebIdentityRole/app1'
+ )
+ self.assertEqual(
+ response.user.assume_role_id.strip(),
+ 'AROACLKWSDQRAOFQC3IDI:app1'
+ )
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/tests/unit/sts/test_credentials.py b/tests/unit/sts/test_credentials.py
new file mode 100644
index 00000000..27a16ca7
--- /dev/null
+++ b/tests/unit/sts/test_credentials.py
@@ -0,0 +1,38 @@
+import unittest
+
+from boto.sts.credentials import Credentials
+
+
+class STSCredentialsTest(unittest.TestCase):
+ sts = True
+
+ def setUp(self):
+ super(STSCredentialsTest, self).setUp()
+ self.creds = Credentials()
+
+ def test_to_dict(self):
+ # This would fail miserably if ``Credentials.request_id`` hadn't been
+ # explicitly set (no default).
+ # Default.
+ self.assertEqual(self.creds.to_dict(), {
+ 'access_key': None,
+ 'expiration': None,
+ 'request_id': None,
+ 'secret_key': None,
+ 'session_token': None
+ })
+
+ # Override.
+ creds = Credentials()
+ creds.access_key = 'something'
+ creds.secret_key = 'crypto'
+ creds.session_token = 'this'
+ creds.expiration = 'way'
+ creds.request_id = 'comes'
+ self.assertEqual(creds.to_dict(), {
+ 'access_key': 'something',
+ 'expiration': 'way',
+ 'request_id': 'comes',
+ 'secret_key': 'crypto',
+ 'session_token': 'this'
+ })
diff --git a/tests/unit/swf/__init__.py b/tests/unit/swf/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unit/swf/__init__.py
diff --git a/tests/unit/swf/test_layer2_actors.py b/tests/unit/swf/test_layer2_actors.py
new file mode 100644
index 00000000..cedf895b
--- /dev/null
+++ b/tests/unit/swf/test_layer2_actors.py
@@ -0,0 +1,87 @@
+import boto.swf.layer2
+from boto.swf.layer2 import Decider, ActivityWorker
+from tests.unit import unittest
+from mock import Mock
+
+
+class TestActors(unittest.TestCase):
+
+ def setUp(self):
+ boto.swf.layer2.Layer1 = Mock()
+ self.worker = ActivityWorker(name='test-worker', domain='test', task_list='test_list')
+ self.decider = Decider(name='test-worker', domain='test', task_list='test_list')
+ self.worker._swf = Mock()
+ self.decider._swf = Mock()
+
+ def test_decider_pass_tasktoken(self):
+ self.decider._swf.poll_for_decision_task.return_value = {
+ 'events': [{'eventId': 1,
+ 'eventTimestamp': 1379019427.953,
+ 'eventType': 'WorkflowExecutionStarted',
+ 'workflowExecutionStartedEventAttributes': {
+ 'childPolicy': 'TERMINATE',
+ 'executionStartToCloseTimeout': '3600',
+ 'parentInitiatedEventId': 0,
+ 'taskList': {'name': 'test_list'},
+ 'taskStartToCloseTimeout': '123',
+ 'workflowType': {'name': 'test_workflow_name',
+ 'version': 'v1'}}},
+ {'decisionTaskScheduledEventAttributes':
+ {'startToCloseTimeout': '123',
+ 'taskList': {'name': 'test_list'}},
+ 'eventId': 2,
+ 'eventTimestamp': 1379019427.953,
+ 'eventType': 'DecisionTaskScheduled'},
+ {'decisionTaskStartedEventAttributes': {'scheduledEventId': 2},
+ 'eventId': 3, 'eventTimestamp': 1379019495.585,
+ 'eventType': 'DecisionTaskStarted'}],
+ 'previousStartedEventId': 0, 'startedEventId': 3,
+ 'taskToken': 'my_specific_task_token',
+ 'workflowExecution': {'runId': 'fwr243dsa324132jmflkfu0943tr09=',
+ 'workflowId': 'test_workflow_name-v1-1379019427'},
+ 'workflowType': {'name': 'test_workflow_name', 'version': 'v1'}}
+
+ self.decider.poll()
+ self.decider.complete()
+
+ self.decider._swf.respond_decision_task_completed.assert_called_with('my_specific_task_token', None)
+ self.assertEqual('my_specific_task_token', self.decider.last_tasktoken)
+
+ def test_worker_pass_tasktoken(self):
+ task_token = 'worker_task_token'
+ self.worker._swf.poll_for_activity_task.return_value = {
+ 'activityId': 'SomeActivity-1379020713',
+ 'activityType': {'name': 'SomeActivity', 'version': '1.0'},
+ 'startedEventId': 6,
+ 'taskToken': task_token,
+ 'workflowExecution': {'runId': '12T026NzGK5c4eMti06N9O3GHFuTDaNyA+8LFtoDkAwfE=',
+ 'workflowId': 'MyWorkflow-1.0-1379020705'}}
+
+ self.worker.poll()
+
+ self.worker.cancel(details='Cancelling!')
+ self.worker.complete(result='Done!')
+ self.worker.fail(reason='Failure!')
+ self.worker.heartbeat()
+
+ self.worker._swf.respond_activity_task_canceled.assert_called_with(task_token, 'Cancelling!')
+ self.worker._swf.respond_activity_task_completed.assert_called_with(task_token, 'Done!')
+ self.worker._swf.respond_activity_task_failed.assert_called_with(task_token, None, 'Failure!')
+ self.worker._swf.record_activity_task_heartbeat.assert_called_with(task_token, None)
+
+ def test_actor_poll_without_tasklist_override(self):
+ self.worker.poll()
+ self.decider.poll()
+ self.worker._swf.poll_for_activity_task.assert_called_with('test', 'test_list')
+ self.decider._swf.poll_for_decision_task.assert_called_with('test', 'test_list')
+
+ def test_worker_override_tasklist(self):
+ self.worker.poll(task_list='some_other_tasklist')
+ self.worker._swf.poll_for_activity_task.assert_called_with('test', 'some_other_tasklist')
+
+ def test_decider_override_tasklist(self):
+ self.decider.poll(task_list='some_other_tasklist')
+ self.decider._swf.poll_for_decision_task.assert_called_with('test', 'some_other_tasklist')
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/unit/swf/test_layer2_domain.py b/tests/unit/swf/test_layer2_domain.py
new file mode 100644
index 00000000..b56cb4b1
--- /dev/null
+++ b/tests/unit/swf/test_layer2_domain.py
@@ -0,0 +1,112 @@
+import boto.swf.layer2
+from boto.swf.layer2 import Domain, ActivityType, WorkflowType, WorkflowExecution
+from tests.unit import unittest
+from mock import Mock
+
+
+class TestDomain(unittest.TestCase):
+
+ def setUp(self):
+ boto.swf.layer2.Layer1 = Mock()
+ self.domain = Domain(name='test-domain', description='My test domain')
+ self.domain.aws_access_key_id = 'inheritable access key'
+ self.domain.aws_secret_access_key = 'inheritable secret key'
+
+ def test_domain_instantiation(self):
+ self.assertEquals('test-domain', self.domain.name)
+ self.assertEquals('My test domain', self.domain.description)
+
+ def test_domain_list_activities(self):
+ self.domain._swf.list_activity_types.return_value = {
+ 'typeInfos': [{'activityType': {'name': 'DeleteLocalFile',
+ 'version': '1.0'},
+ 'creationDate': 1332853651.235,
+ 'status': 'REGISTERED'},
+ {'activityType': {'name': 'DoUpdate', 'version': 'test'},
+ 'creationDate': 1333463734.528,
+ 'status': 'REGISTERED'},
+ {'activityType': {'name': 'GrayscaleTransform',
+ 'version': '1.0'},
+ 'creationDate': 1332853651.18,
+ 'status': 'REGISTERED'},
+ {'activityType': {'name': 'S3Download', 'version': '1.0'},
+ 'creationDate': 1332853651.264,
+ 'status': 'REGISTERED'},
+ {'activityType': {'name': 'S3Upload', 'version': '1.0'},
+ 'creationDate': 1332853651.314,
+ 'status': 'REGISTERED'},
+ {'activityType': {'name': 'SepiaTransform', 'version': '1.1'},
+ 'creationDate': 1333373797.734,
+ 'status': 'REGISTERED'}]}
+
+ expected_names = ('DeleteLocalFile', 'GrayscaleTransform', 'S3Download',
+ 'S3Upload', 'SepiaTransform', 'DoUpdate')
+
+ activity_types = self.domain.activities()
+ self.assertEquals(6, len(activity_types))
+ for activity_type in activity_types:
+ self.assertIsInstance(activity_type, ActivityType)
+ self.assertTrue(activity_type.name in expected_names)
+
+ def test_domain_list_workflows(self):
+ self.domain._swf.list_workflow_types.return_value = {
+ 'typeInfos': [{'creationDate': 1332853651.136,
+ 'description': 'Image processing sample workflow type',
+ 'status': 'REGISTERED',
+ 'workflowType': {'name': 'ProcessFile', 'version': '1.0'}},
+ {'creationDate': 1333551719.89,
+ 'status': 'REGISTERED',
+ 'workflowType': {'name': 'test_workflow_name',
+ 'version': 'v1'}}]}
+ expected_names = ('ProcessFile', 'test_workflow_name')
+
+ workflow_types = self.domain.workflows()
+ self.assertEquals(2, len(workflow_types))
+ for workflow_type in workflow_types:
+ self.assertIsInstance(workflow_type, WorkflowType)
+ self.assertTrue(workflow_type.name in expected_names)
+ self.assertEquals(self.domain.aws_access_key_id, workflow_type.aws_access_key_id)
+ self.assertEquals(self.domain.aws_secret_access_key, workflow_type.aws_secret_access_key)
+ self.assertEquals(self.domain.name, workflow_type.domain)
+
+ def test_domain_list_executions(self):
+ self.domain._swf.list_open_workflow_executions.return_value = {
+ 'executionInfos': [{'cancelRequested': False,
+ 'execution': {'runId': '12OeDTyoD27TDaafViz/QIlCHrYzspZmDgj0coIfjm868=',
+ 'workflowId': 'ProcessFile-1.0-1378933928'},
+ 'executionStatus': 'OPEN',
+ 'startTimestamp': 1378933928.676,
+ 'workflowType': {'name': 'ProcessFile',
+ 'version': '1.0'}},
+ {'cancelRequested': False,
+ 'execution': {'runId': '12GwBkx4hH6t2yaIh8LYxy5HyCM6HcyhDKePJCg0/ciJk=',
+ 'workflowId': 'ProcessFile-1.0-1378933927'},
+ 'executionStatus': 'OPEN',
+ 'startTimestamp': 1378933927.919,
+ 'workflowType': {'name': 'ProcessFile',
+ 'version': '1.0'}},
+ {'cancelRequested': False,
+ 'execution': {'runId': '12oRG3vEWrQ7oYBV+Bqi33Fht+ZRCYTt+tOdn5kLVcwKI=',
+ 'workflowId': 'ProcessFile-1.0-1378933926'},
+ 'executionStatus': 'OPEN',
+ 'startTimestamp': 1378933927.04,
+ 'workflowType': {'name': 'ProcessFile',
+ 'version': '1.0'}},
+ {'cancelRequested': False,
+ 'execution': {'runId': '12qrdcpYmad2cjnqJcM4Njm3qrCGvmRFR1wwQEt+a2ako=',
+ 'workflowId': 'ProcessFile-1.0-1378933874'},
+ 'executionStatus': 'OPEN',
+ 'startTimestamp': 1378933874.956,
+ 'workflowType': {'name': 'ProcessFile',
+ 'version': '1.0'}}]}
+
+ executions = self.domain.executions()
+ self.assertEquals(4, len(executions))
+ for wf_execution in executions:
+ self.assertIsInstance(wf_execution, WorkflowExecution)
+ self.assertEquals(self.domain.aws_access_key_id, wf_execution.aws_access_key_id)
+ self.assertEquals(self.domain.aws_secret_access_key, wf_execution.aws_secret_access_key)
+ self.assertEquals(self.domain.name, wf_execution.domain)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/unit/swf/test_layer2_types.py b/tests/unit/swf/test_layer2_types.py
new file mode 100644
index 00000000..d9b7db0d
--- /dev/null
+++ b/tests/unit/swf/test_layer2_types.py
@@ -0,0 +1,46 @@
+import boto.swf.layer2
+from boto.swf.layer2 import ActivityType, WorkflowType, WorkflowExecution
+from tests.unit import unittest
+from mock import Mock, ANY
+
+
+class TestTypes(unittest.TestCase):
+
+ def setUp(self):
+ boto.swf.layer2.Layer1 = Mock()
+
+ def test_workflow_type_register_defaults(self):
+ wf_type = WorkflowType(name='name', domain='test', version='1')
+ wf_type.register()
+
+ wf_type._swf.register_workflow_type.assert_called_with('test', 'name', '1',
+ default_execution_start_to_close_timeout=ANY,
+ default_task_start_to_close_timeout=ANY,
+ default_child_policy=ANY
+ )
+
+ def test_activity_type_register_defaults(self):
+ act_type = ActivityType(name='name', domain='test', version='1')
+ act_type.register()
+
+ act_type._swf.register_activity_type.assert_called_with('test', 'name', '1',
+ default_task_heartbeat_timeout=ANY,
+ default_task_schedule_to_close_timeout=ANY,
+ default_task_schedule_to_start_timeout=ANY,
+ default_task_start_to_close_timeout=ANY
+ )
+
+ def test_workflow_type_start_execution(self):
+ wf_type = WorkflowType(name='name', domain='test', version='1')
+ run_id = '122aJcg6ic7MRAkjDRzLBsqU/R49qt5D0LPHycT/6ArN4='
+ wf_type._swf.start_workflow_execution.return_value = {'runId': run_id}
+
+ execution = wf_type.start(task_list='hello_world')
+
+ self.assertIsInstance(execution, WorkflowExecution)
+ self.assertEquals(wf_type.name, execution.name)
+ self.assertEquals(wf_type.version, execution.version)
+ self.assertEquals(run_id, execution.runId)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/unit/test_connection.py b/tests/unit/test_connection.py
index d71587fc..75df1c63 100644
--- a/tests/unit/test_connection.py
+++ b/tests/unit/test_connection.py
@@ -19,6 +19,9 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
+from __future__ import with_statement
+
+import os
import urlparse
from tests.unit import unittest
from httpretty import HTTPretty
@@ -81,11 +84,13 @@ class MockAWSService(AWSQueryConnection):
api_version=None, security_token=None,
validate_certs=True):
self.region = region
+ if host is None:
+ host = self.region.endpoint
AWSQueryConnection.__init__(self, aws_access_key_id,
aws_secret_access_key,
is_secure, port, proxy, proxy_port,
proxy_user, proxy_pass,
- self.region.endpoint, debug,
+ host, debug,
https_connection_factory, path,
security_token,
validate_certs=validate_certs)
@@ -134,6 +139,49 @@ class TestAWSQueryConnectionSimple(TestAWSQueryConnection):
self.assertEqual(conn.host, 'mockservice.cc-zone-1.amazonaws.com')
+ def test_query_connection_noproxy(self):
+ HTTPretty.register_uri(HTTPretty.POST,
+ 'https://%s/' % self.region.endpoint,
+ json.dumps({'test': 'secure'}),
+ content_type='application/json')
+
+ os.environ['no_proxy'] = self.region.endpoint
+
+ conn = self.region.connect(aws_access_key_id='access_key',
+ aws_secret_access_key='secret',
+ proxy="NON_EXISTENT_HOSTNAME",
+ proxy_port="3128")
+
+ resp = conn.make_request('myCmd',
+ {'par1': 'foo', 'par2': 'baz'},
+ "/",
+ "POST")
+ del os.environ['no_proxy']
+ args = urlparse.parse_qs(HTTPretty.last_request.body)
+ self.assertEqual(args['AWSAccessKeyId'], ['access_key'])
+
+ def test_query_connection_noproxy_nosecure(self):
+ HTTPretty.register_uri(HTTPretty.POST,
+ 'https://%s/' % self.region.endpoint,
+ json.dumps({'test': 'insecure'}),
+ content_type='application/json')
+
+ os.environ['no_proxy'] = self.region.endpoint
+
+ conn = self.region.connect(aws_access_key_id='access_key',
+ aws_secret_access_key='secret',
+ proxy="NON_EXISTENT_HOSTNAME",
+ proxy_port="3128",
+ is_secure = False)
+
+ resp = conn.make_request('myCmd',
+ {'par1': 'foo', 'par2': 'baz'},
+ "/",
+ "POST")
+ del os.environ['no_proxy']
+ args = urlparse.parse_qs(HTTPretty.last_request.body)
+ self.assertEqual(args['AWSAccessKeyId'], ['access_key'])
+
def test_single_command(self):
HTTPretty.register_uri(HTTPretty.POST,
'https://%s/' % self.region.endpoint,
@@ -240,6 +288,57 @@ class TestAWSQueryConnectionSimple(TestAWSQueryConnection):
'POST')
self.assertEqual(resp.read(), "{'test': 'success'}")
+ def test_connection_close(self):
+ """Check connection re-use after close header is received"""
+ HTTPretty.register_uri(HTTPretty.POST,
+ 'https://%s/' % self.region.endpoint,
+ json.dumps({'test': 'secure'}),
+ content_type='application/json',
+ connection='close')
+
+ conn = self.region.connect(aws_access_key_id='access_key',
+ aws_secret_access_key='secret')
+
+ def mock_put_conn(*args, **kwargs):
+ raise Exception('put_http_connection should not be called!')
+
+ conn.put_http_connection = mock_put_conn
+
+ resp1 = conn.make_request('myCmd1',
+ {'par1': 'foo', 'par2': 'baz'},
+ "/",
+ "POST")
+
+ # If we've gotten this far then no exception was raised
+ # by attempting to put the connection back into the pool
+ # Now let's just confirm the close header was actually
+ # set or we have another problem.
+ self.assertEqual(resp1.getheader('connection'), 'close')
+
+ def test_port_pooling(self):
+ conn = self.region.connect(aws_access_key_id='access_key',
+ aws_secret_access_key='secret',
+ port=8080)
+
+ # Pick a connection, then put it back
+ con1 = conn.get_http_connection(conn.host, conn.port, conn.is_secure)
+ conn.put_http_connection(conn.host, conn.port, conn.is_secure, con1)
+
+ # Pick another connection, which hopefully is the same yet again
+ con2 = conn.get_http_connection(conn.host, conn.port, conn.is_secure)
+ conn.put_http_connection(conn.host, conn.port, conn.is_secure, con2)
+
+ self.assertEqual(con1, con2)
+
+ # Change the port and make sure a new connection is made
+ conn.port = 8081
+
+ con3 = conn.get_http_connection(conn.host, conn.port, conn.is_secure)
+ conn.put_http_connection(conn.host, conn.port, conn.is_secure, con3)
+
+ self.assertNotEqual(con1, con3)
+
+
class TestAWSQueryStatus(TestAWSQueryConnection):
def test_get_status(self):
diff --git a/tests/unit/test_exception.py b/tests/unit/test_exception.py
index 684ca0ce..a14f0dca 100644
--- a/tests/unit/test_exception.py
+++ b/tests/unit/test_exception.py
@@ -1,6 +1,6 @@
from tests.unit import unittest
-from boto.exception import BotoServerError
+from boto.exception import BotoServerError, S3CreateError, JSONResponseError
from httpretty import HTTPretty, httprettified
@@ -23,8 +23,9 @@ class TestBotoServerError(unittest.TestCase):
<RequestId>093f80d0-4473-11e1-9234-edce8ec08e2d</RequestId>
</ErrorResponse>"""
bse = BotoServerError('400', 'Bad Request', body=xml)
-
+
self.assertEqual(bse.error_message, 'Cannot find Load Balancer webapp-balancer2')
+ self.assertEqual(bse.error_message, bse.message)
self.assertEqual(bse.request_id, '093f80d0-4473-11e1-9234-edce8ec08e2d')
self.assertEqual(bse.error_code, 'LoadBalancerNotFound')
self.assertEqual(bse.status, '400')
@@ -44,10 +45,11 @@ class TestBotoServerError(unittest.TestCase):
<RequestID>e73bb2bb-63e3-9cdc-f220-6332de66dbbe</RequestID>
</Response>"""
bse = BotoServerError('403', 'Forbidden', body=xml)
- self.assertEqual(bse.error_message,
+ self.assertEqual(bse.error_message,
'Session does not have permission to perform (sdb:CreateDomain) on '
'resource (arn:aws:sdb:us-east-1:xxxxxxx:domain/test_domain). '
'Contact account owner.')
+ self.assertEqual(bse.error_message, bse.message)
self.assertEqual(bse.box_usage, '0.0055590278')
self.assertEqual(bse.error_code, 'AuthorizationFailure')
self.assertEqual(bse.status, '403')
@@ -65,6 +67,45 @@ class TestBotoServerError(unittest.TestCase):
bse = BotoServerError('403', 'Forbidden', body=xml)
self.assertEqual([], HTTPretty.latest_requests)
+ def test_message_storage_create_error(self):
+ # This test value comes from https://answers.launchpad.net/duplicity/+question/150801
+ xml = """<?xml version="1.0" encoding="UTF-8"?>
+<Error>
+ <Code>BucketAlreadyOwnedByYou</Code>
+ <Message>Your previous request to create the named bucket succeeded and you already own it.</Message>
+ <BucketName>cmsbk</BucketName>
+ <RequestId>FF8B86A32CC3FE4F</RequestId>
+ <HostId>6ENGL3DT9f0n7Tkv4qdKIs/uBNCMMA6QUFapw265WmodFDluP57esOOkecp55qhh</HostId>
+</Error>
+"""
+ s3ce = S3CreateError('409', 'Conflict', body=xml)
+
+ self.assertEqual(s3ce.bucket, 'cmsbk')
+ self.assertEqual(s3ce.error_code, 'BucketAlreadyOwnedByYou')
+ self.assertEqual(s3ce.status, '409')
+ self.assertEqual(s3ce.reason, 'Conflict')
+ self.assertEqual(s3ce.error_message,
+ 'Your previous request to create the named bucket succeeded '
+ 'and you already own it.')
+ self.assertEqual(s3ce.error_message, s3ce.message)
+ self.assertEqual(s3ce.request_id, 'FF8B86A32CC3FE4F')
+
+ def test_message_json_response_error(self):
+ # This test comes from https://forums.aws.amazon.com/thread.jspa?messageID=374936
+ body = {
+ '__type': 'com.amazon.coral.validate#ValidationException',
+ 'message': 'The attempted filter operation is not supported '
+ 'for the provided filter argument count'}
+
+ jre = JSONResponseError('400', 'Bad Request', body=body)
+
+ self.assertEqual(jre.status, '400')
+ self.assertEqual(jre.reason, 'Bad Request')
+ self.assertEqual(jre.error_message, body['message'])
+ self.assertEqual(jre.error_message, jre.message)
+ self.assertEqual(jre.code, 'ValidationException')
+ self.assertEqual(jre.code, jre.error_code)
+
def test_message_not_xml(self):
body = 'This is not XML'
@@ -76,3 +117,4 @@ class TestBotoServerError(unittest.TestCase):
bse = BotoServerError('400', 'Bad Request', body=body)
self.assertEqual(bse.code, bse.error_code)
+ self.assertEqual(bse.message, bse.error_message)
diff --git a/tests/unit/vpc/__init__.py b/tests/unit/vpc/__init__.py
index e69de29b..c7856c5b 100644
--- a/tests/unit/vpc/__init__.py
+++ b/tests/unit/vpc/__init__.py
@@ -0,0 +1,3 @@
+"""
+Test package for VPC
+"""
diff --git a/tests/unit/vpc/test_vpc.py b/tests/unit/vpc/test_vpc.py
index 499d1582..1f21f3aa 100644
--- a/tests/unit/vpc/test_vpc.py
+++ b/tests/unit/vpc/test_vpc.py
@@ -4,6 +4,7 @@ from tests.unit import AWSMockServiceTestCase
from boto.vpc import VPCConnection
+
DESCRIBE_VPCS = r'''<?xml version="1.0" encoding="UTF-8"?>
<DescribeVpcsResponse xmlns="http://ec2.amazonaws.com/doc/2013-02-01/">
<requestId>623040d1-b51c-40bc-8080-93486f38d03d</requestId>
@@ -22,10 +23,10 @@ DESCRIBE_VPCS = r'''<?xml version="1.0" encoding="UTF-8"?>
class TestDescriveVPCs(AWSMockServiceTestCase):
connection_class = VPCConnection
-
+
def default_body(self):
return DESCRIBE_VPCS
-
+
def test_get_vpcs(self):
self.set_http_response(status_code=200)
@@ -36,5 +37,61 @@ class TestDescriveVPCs(AWSMockServiceTestCase):
self.assertFalse(vpc.is_default)
self.assertEqual(vpc.instance_tenancy,'default')
+
+class TestVPCConnection(unittest.TestCase):
+ """
+ Test class for `boto.vpc.VPCConnection`
+ """
+
+ def setUp(self):
+ """
+ Setup method to initialize vpc_connection objectq
+ """
+ super(TestVPCConnection, self).setUp()
+ self.vpc_connection = VPCConnection(
+ aws_access_key_id='aws_access_key_id',
+ aws_secret_access_key='aws_secret_access_key')
+
+ def test_detach_internet_gateway(self):
+ """
+ Tests detach_internet_gateway with all valid parameters
+ """
+ internet_gateway_id = 'mock_gateway_id'
+ vpc_id = 'mock_vpc_id'
+
+ def get_status(status, params):
+ if status == "DetachInternetGateway" and \
+ params["InternetGatewayId"] == internet_gateway_id and \
+ params["VpcId"] == vpc_id:
+ return True
+ else:
+ return False
+
+ self.vpc_connection.get_status = get_status
+ status = self.vpc_connection.detach_internet_gateway(
+ internet_gateway_id, vpc_id)
+ self.assertEquals(True, status)
+
+ def test_replace_route_table_association(self):
+ """
+ Tests replace_route_table_assocation with all valid parameters
+ """
+ association_id = 'mock_association_id'
+ route_table_id = 'mock_route_table_id'
+
+ def get_status(status, params):
+ if status == "ReplaceRouteTableAssociation" and \
+ params["AssociationId"] == association_id and \
+ params["RouteTableId"] == route_table_id:
+ return True
+ else:
+ return False
+
+ self.vpc_connection.get_status = get_status
+ status = self.vpc_connection.replace_route_table_assocation(
+ association_id, route_table_id)
+ self.assertEquals(True, status)
+
+
if __name__ == '__main__':
- unittest.main() \ No newline at end of file
+ unittest.main()