summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMitch Garnaat <mitch@garnaat.com>2012-05-15 18:59:46 -0700
committerMitch Garnaat <mitch@garnaat.com>2012-05-15 18:59:46 -0700
commit1aa1133e8502ea6c95e49ac34681569df5ace46b (patch)
treea25397b20d7d409131ccfbfce5d4dca23d7e52a4
parent911f42b97fdccbc55e160ec323df0cad6fe64c6b (diff)
parent6588ea270bfc9e0bb4d17263b72ee8b5255545c5 (diff)
downloadboto-1aa1133e8502ea6c95e49ac34681569df5ace46b.tar.gz
Merge branch 'release-2.4.0'2.4.0
-rw-r--r--MANIFEST.in2
-rw-r--r--README.markdown73
-rw-r--r--README.rst100
-rwxr-xr-xbin/s3multiput4
-rw-r--r--boto/__init__.py25
-rw-r--r--boto/auth.py27
-rw-r--r--boto/cloudformation/connection.py7
-rw-r--r--boto/cloudformation/stack.py18
-rw-r--r--boto/cloudfront/invalidation.py2
-rw-r--r--boto/cloudsearch/__init__.py45
-rw-r--r--boto/cloudsearch/document.py150
-rw-r--r--boto/cloudsearch/domain.py397
-rw-r--r--boto/cloudsearch/layer1.py732
-rw-r--r--boto/cloudsearch/layer2.py52
-rw-r--r--boto/cloudsearch/optionstatus.py249
-rw-r--r--boto/cloudsearch/search.py298
-rw-r--r--boto/cloudsearch/sourceattribute.py75
-rw-r--r--boto/connection.py27
-rw-r--r--boto/dynamodb/__init__.py16
-rw-r--r--boto/dynamodb/batch.py142
-rw-r--r--boto/dynamodb/condition.py89
-rw-r--r--boto/dynamodb/exceptions.py19
-rw-r--r--boto/dynamodb/item.py55
-rw-r--r--boto/dynamodb/layer1.py89
-rw-r--r--boto/dynamodb/layer2.py234
-rw-r--r--boto/dynamodb/schema.py12
-rw-r--r--boto/dynamodb/table.py56
-rw-r--r--boto/dynamodb/types.py24
-rw-r--r--boto/ec2/autoscale/launchconfig.py5
-rw-r--r--boto/ec2/cloudwatch/__init__.py21
-rw-r--r--boto/ec2/connection.py81
-rw-r--r--boto/ec2/elb/__init__.py30
-rw-r--r--boto/ec2/image.py15
-rw-r--r--boto/ec2/networkinterface.py3
-rw-r--r--boto/ec2/securitygroup.py87
-rw-r--r--boto/ec2/snapshot.py3
-rw-r--r--boto/ec2/spotinstancerequest.py20
-rw-r--r--boto/emr/connection.py5
-rw-r--r--boto/emr/step.py39
-rw-r--r--boto/exception.py36
-rw-r--r--boto/fps/__init__.py4
-rw-r--r--boto/fps/connection.py670
-rw-r--r--boto/fps/exception.py344
-rw-r--r--boto/fps/response.py173
-rwxr-xr-xboto/gs/acl.py62
-rw-r--r--boto/gs/bucket.py42
-rwxr-xr-xboto/gs/cors.py169
-rw-r--r--boto/gs/key.py54
-rw-r--r--boto/gs/resumable_upload_handler.py41
-rw-r--r--boto/mws/__init__.py21
-rw-r--r--boto/mws/connection.py725
-rw-r--r--boto/mws/exception.py75
-rw-r--r--boto/mws/response.py633
-rw-r--r--boto/rds/parametergroup.py2
-rw-r--r--boto/resultset.py9
-rw-r--r--boto/route53/connection.py21
-rw-r--r--boto/route53/record.py32
-rw-r--r--boto/s3/bucket.py47
-rw-r--r--boto/s3/connection.py42
-rw-r--r--boto/s3/key.py105
-rw-r--r--boto/s3/multipart.py4
-rw-r--r--boto/sqs/batchresults.py4
-rw-r--r--boto/sqs/connection.py155
-rw-r--r--boto/sqs/message.py4
-rw-r--r--boto/sqs/queue.py90
-rwxr-xr-xboto/storage_uri.py16
-rw-r--r--boto/swf/exceptions.py37
-rw-r--r--boto/swf/layer1.py135
-rw-r--r--boto/swf/layer1_decisions.py316
-rw-r--r--boto/utils.py2
-rw-r--r--boto/vpc/__init__.py68
-rw-r--r--docs/source/cloudsearch_tut.rst264
-rw-r--r--docs/source/ref/cloudformation.rst7
-rw-r--r--docs/source/ref/cloudsearch.rst59
-rw-r--r--docs/source/ref/index.rst1
-rw-r--r--docs/source/ref/s3.rst7
-rw-r--r--setup.py4
-rw-r--r--tests/dynamodb/test_layer1.py20
-rw-r--r--tests/dynamodb/test_layer2.py56
-rw-r--r--tests/ec2/cloudwatch/test_connection.py56
-rwxr-xr-xtests/fps/test.py101
-rw-r--r--tests/fps/test_install_caller_instruction.py4
-rw-r--r--tests/mws/__init__.py0
-rwxr-xr-xtests/mws/test.py98
-rw-r--r--tests/s3/test_connection.py17
-rw-r--r--tests/s3/test_gsconnection.py64
-rw-r--r--tests/s3/test_key.py30
-rw-r--r--tests/s3/test_multipart.py13
-rwxr-xr-xtests/s3/test_resumable_downloads.py147
-rwxr-xr-xtests/s3/test_resumable_uploads.py181
-rw-r--r--tests/sqs/test_connection.py14
-rw-r--r--tests/swf/__init__.py0
-rw-r--r--tests/swf/test_layer1.py245
-rw-r--r--tests/swf/test_layer1_workflow_execution.py171
-rwxr-xr-xtests/test.py12
95 files changed, 7715 insertions, 1322 deletions
diff --git a/MANIFEST.in b/MANIFEST.in
index 530ca333..d5e4f618 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,5 +1,5 @@
include boto/cacerts/cacerts.txt
-include README.markdown
+include README.rst
include Changelog.rst
include boto/file/README
include .gitignore
diff --git a/README.markdown b/README.markdown
deleted file mode 100644
index e2a53f4c..00000000
--- a/README.markdown
+++ /dev/null
@@ -1,73 +0,0 @@
-# boto
-boto 2.3.0
-18-Mar-2012
-
-## Introduction
-
-Boto is a Python package that provides interfaces to Amazon Web Services.
-At the moment, boto supports:
-
- * Simple Storage Service (S3)
- * SimpleQueue Service (SQS)
- * Elastic Compute Cloud (EC2)
- * Mechanical Turk
- * SimpleDB
- * CloudFront
- * CloudWatch
- * AutoScale
- * Elastic Load Balancer (ELB)
- * Virtual Private Cloud (VPC)
- * Elastic Map Reduce (EMR)
- * Relational Data Service (RDS)
- * Simple Notification Server (SNS)
- * Google Storage
- * Identity and Access Management (IAM)
- * Route53 DNS Service (route53)
- * Simple Email Service (SES)
- * Flexible Payment Service (FPS)
- * CloudFormation
- * Amazon DynamoDB
- * Amazon SimpleWorkflow
-
-The goal of boto is to support the full breadth and depth of Amazon
-Web Services. In addition, boto provides support for other public
-services such as Google Storage in addition to private cloud systems
-like Eucalyptus, OpenStack and Open Nebula.
-
-Boto is developed mainly using Python 2.6.6 and Python 2.7.1 on Mac OSX
-and Ubuntu Maverick. It is known to work on other Linux distributions
-and on Windows. Boto requires no additional libraries or packages
-other than those that are distributed with Python. Efforts are made
-to keep boto compatible with Python 2.5.x but no guarantees are made.
-
-## Finding Out More About Boto
-
-The main source code repository for boto can be found on
-[github.com](http://github.com/boto/boto)
-
-[Online documentation](http://docs.pythonboto.org/) is also
-available. The online documentation includes full API documentation
-as well as Getting Started Guides for many of the boto modules.
-
-Boto releases can be found on the [Python Cheese Shop](http://pypi.python.org/).
-
-Join our `IRC channel`_ (#boto on FreeNode).
- IRC channel: http://webchat.freenode.net/?channels=boto
-
-## Getting Started with Boto
-
-Your credentials can be passed into the methods that create
-connections. Alternatively, boto will check for the existance of the
-following environment variables to ascertain your credentials:
-
-AWS_ACCESS_KEY_ID - Your AWS Access Key ID
-AWS_SECRET_ACCESS_KEY - Your AWS Secret Access Key
-
-Credentials and other boto-related settings can also be stored in a
-boto config file. See
-[this](http://code.google.com/p/boto/wiki/BotoConfig) for details.
-
-Copyright (c) 2006-2012 Mitch Garnaat <mitch@garnaat.com>
-Copyright (c) 2010-2011, Eucalyptus Systems, Inc.
-Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
-All rights reserved.
diff --git a/README.rst b/README.rst
new file mode 100644
index 00000000..55df13d0
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,100 @@
+####
+boto
+####
+boto 2.4.0
+15-May-2012
+
+************
+Introduction
+************
+
+Boto is a Python package that provides interfaces to Amazon Web Services.
+At the moment, boto supports:
+
+* Simple Storage Service (S3)
+* SimpleQueue Service (SQS)
+* Elastic Compute Cloud (EC2)
+* Mechanical Turk
+* SimpleDB
+* CloudFront
+* CloudWatch
+* AutoScale
+* Elastic Load Balancer (ELB)
+* Virtual Private Cloud (VPC)
+* Elastic Map Reduce (EMR)
+* Relational Data Service (RDS)
+* Simple Notification Server (SNS)
+* Google Storage
+* Identity and Access Management (IAM)
+* Route53 DNS Service (route53)
+* Simple Email Service (SES)
+* Flexible Payment Service (FPS)
+* CloudFormation
+* Amazon DynamoDB
+* Amazon SimpleWorkflow
+* CloudSearch
+* Marketplace Web Services
+
+The goal of boto is to support the full breadth and depth of Amazon
+Web Services. In addition, boto provides support for other public
+services such as Google Storage in addition to private cloud systems
+like Eucalyptus, OpenStack and Open Nebula.
+
+Boto is developed mainly using Python 2.6.6 and Python 2.7.1 on Mac OSX
+and Ubuntu Maverick. It is known to work on other Linux distributions
+and on Windows. Boto requires no additional libraries or packages
+other than those that are distributed with Python. Efforts are made
+to keep boto compatible with Python 2.5.x but no guarantees are made.
+
+*********************************
+Special Note for Python 3.x Users
+*********************************
+
+If you are interested in trying out boto with Python 3.x, check out the
+`neo`_ branch. This is under active development and the goal is a version
+of boto that works in Python 2.6, 2.7, and 3.x. Not everything is working
+just yet but many things are and it's worth a look if you are an active
+Python 3.x user.
+
+***************************
+Finding Out More About Boto
+***************************
+
+The main source code repository for boto can be found on `github.com`_.
+The boto project uses the `gitflow`_ model for branching.
+
+`Online documentation`_ is also available. The online documentation includes
+full API documentation as well as Getting Started Guides for many of the boto
+modules.
+
+Boto releases can be found on the `Python Cheese Shop`_.
+
+Join our IRC channel `#boto` on FreeNode.
+Webchat IRC channel: http://webchat.freenode.net/?channels=boto
+
+*************************
+Getting Started with Boto
+*************************
+
+Your credentials can be passed into the methods that create
+connections. Alternatively, boto will check for the existance of the
+following environment variables to ascertain your credentials:
+
+**AWS_ACCESS_KEY_ID** - Your AWS Access Key ID
+
+**AWS_SECRET_ACCESS_KEY** - Your AWS Secret Access Key
+
+Credentials and other boto-related settings can also be stored in a
+boto config file. See `this`_ for details.
+
+Copyright (c) 2006-2012 Mitch Garnaat <mitch@garnaat.com>
+Copyright (c) 2010-2011, Eucalyptus Systems, Inc.
+Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
+All rights reserved.
+
+.. _github.com: http://github.com/boto/boto
+.. _Online documentation: http://docs.pythonboto.org
+.. _Python Cheese Shop: http://pypi.python.org/pypi/boto
+.. _this: http://code.google.com/p/boto/wiki/BotoConfig
+.. _gitflow: http://nvie.com/posts/a-successful-git-branching-model/
+.. _neo: https://github.com/boto/boto/tree/neo
diff --git a/bin/s3multiput b/bin/s3multiput
index 644ddc91..bdce861a 100755
--- a/bin/s3multiput
+++ b/bin/s3multiput
@@ -285,7 +285,7 @@ def main():
else:
upload(bucket_name, aws_access_key_id,
aws_secret_access_key, fullpath, key_name,
- reduced, debug, cb, num_cb)
+ reduced, debug, cb, num_cb, grant or 'private')
total += 1
# upload a single file
@@ -311,7 +311,7 @@ def main():
else:
upload(bucket_name, aws_access_key_id,
aws_secret_access_key, path, key_name,
- reduced, debug, cb, num_cb)
+ reduced, debug, cb, num_cb, grant or 'private')
if __name__ == "__main__":
main()
diff --git a/boto/__init__.py b/boto/__init__.py
index 15697547..d16e024f 100644
--- a/boto/__init__.py
+++ b/boto/__init__.py
@@ -32,7 +32,7 @@ import logging.config
import urlparse
from boto.exception import InvalidUriError
-__version__ = '2.3.0'
+__version__ = '2.4.0'
Version = __version__ # for backware compatibility
UserAgent = 'Boto/%s (%s)' % (__version__, sys.platform)
@@ -371,7 +371,7 @@ def connect_euca(host=None, aws_access_key_id=None, aws_secret_access_key=None,
region=reg, port=port, path=path,
is_secure=is_secure, **kwargs)
-def connect_ec2_endpoint(url, aws_access_key_id=None, aws_secret_access_key=None,
+def connect_ec2_endpoint(url, aws_access_key_id=None, aws_secret_access_key=None,
**kwargs):
"""
Connect to an EC2 Api endpoint. Additional arguments are passed
@@ -437,7 +437,7 @@ def connect_walrus(host=None, aws_access_key_id=None, aws_secret_access_key=None
None)
if not host:
host = config.get('Boto', 'walrus_host', None)
-
+
return S3Connection(aws_access_key_id, aws_secret_access_key,
host=host, port=port, path=path,
calling_format=OrdinaryCallingFormat(),
@@ -535,6 +535,25 @@ def connect_swf(aws_access_key_id=None,
from boto.swf.layer1 import Layer1
return Layer1(aws_access_key_id, aws_secret_access_key, **kwargs)
+
+def connect_cloudsearch(aws_access_key_id=None,
+ aws_secret_access_key=None,
+ **kwargs):
+ """
+ :type aws_access_key_id: string
+ :param aws_access_key_id: Your AWS Access Key ID
+
+ :type aws_secret_access_key: string
+ :param aws_secret_access_key: Your AWS Secret Access Key
+
+ :rtype: :class:`boto.ec2.autoscale.CloudSearchConnection`
+ :return: A connection to Amazon's CloudSearch service
+ """
+ from boto.cloudsearch.layer2 import Layer2
+ return Layer2(aws_access_key_id, aws_secret_access_key,
+ **kwargs)
+
+
def check_extensions(module_name, module_path):
"""
This function checks for extensions to boto modules. It should be called in the
diff --git a/boto/auth.py b/boto/auth.py
index 14bd0fa2..530a4728 100644
--- a/boto/auth.py
+++ b/boto/auth.py
@@ -366,6 +366,33 @@ class QuerySignatureV2AuthHandler(QuerySignatureHelper, AuthHandler):
boto.log.debug('base64 encoded digest: %s' % b64)
return (qs, b64)
+class POSTPathQSV2AuthHandler(QuerySignatureV2AuthHandler, AuthHandler):
+ """
+ Query Signature V2 Authentication relocating signed query
+ into the path and allowing POST requests with Content-Types.
+ """
+
+ capability = ['mws']
+
+ def add_auth(self, req, **kwargs):
+ req.params['AWSAccessKeyId'] = self._provider.access_key
+ req.params['SignatureVersion'] = self.SignatureVersion
+ req.params['Timestamp'] = boto.utils.get_ts()
+ qs, signature = self._calc_signature(req.params, req.method,
+ req.auth_path, req.host)
+ boto.log.debug('query_string: %s Signature: %s' % (qs, signature))
+ if req.method == 'POST':
+ req.headers['Content-Length'] = str(len(req.body))
+ req.headers['Content-Type'] = req.headers.get('Content-Type',
+ 'text/plain')
+ else:
+ req.body = ''
+ # if this is a retried req, the qs from the previous try will
+ # already be there, we need to get rid of that and rebuild it
+ req.path = req.path.split('?')[0]
+ req.path = (req.path + '?' + qs +
+ '&Signature=' + urllib.quote_plus(signature))
+
def get_auth_handler(host, config, provider, requested_capability=None):
"""Finds an AuthHandler that is ready to authenticate.
diff --git a/boto/cloudformation/connection.py b/boto/cloudformation/connection.py
index 6243dc6c..578b84ec 100644
--- a/boto/cloudformation/connection.py
+++ b/boto/cloudformation/connection.py
@@ -36,9 +36,10 @@ class CloudFormationConnection(AWSQueryConnection):
"""
A Connection to the CloudFormation Service.
"""
- DefaultRegionName = 'us-east-1'
- DefaultRegionEndpoint = 'cloudformation.us-east-1.amazonaws.com'
- APIVersion = '2010-05-15'
+ APIVersion = boto.config.get('Boto', 'cfn_version', '2010-05-15')
+ DefaultRegionName = boto.config.get('Boto', 'cfn_region_name', 'us-east-1')
+ DefaultRegionEndpoint = boto.config.get('Boto', 'cfn_region_endpoint',
+ 'cloudformation.us-east-1.amazonaws.com')
valid_states = ("CREATE_IN_PROGRESS", "CREATE_FAILED", "CREATE_COMPLETE",
"ROLLBACK_IN_PROGRESS", "ROLLBACK_FAILED", "ROLLBACK_COMPLETE",
diff --git a/boto/cloudformation/stack.py b/boto/cloudformation/stack.py
index 8b9e1157..f65dd5a7 100644
--- a/boto/cloudformation/stack.py
+++ b/boto/cloudformation/stack.py
@@ -11,6 +11,7 @@ class Stack:
self.notification_arns = []
self.outputs = []
self.parameters = []
+ self.capabilities = []
self.stack_id = None
self.stack_status = None
self.stack_name = None
@@ -24,6 +25,9 @@ class Stack:
elif name == "Outputs":
self.outputs = ResultSet([('member', Output)])
return self.outputs
+ elif name == "Capabilities":
+ self.capabilities = ResultSet([('member', Capability)])
+ return self.capabilities
else:
return None
@@ -165,6 +169,20 @@ class Output:
def __repr__(self):
return "Output:\"%s\"=\"%s\"" % (self.key, self.value)
+class Capability:
+ def __init__(self, connection=None):
+ self.connection = None
+ self.value = None
+
+ def startElement(self, name, attrs, connection):
+ return None
+
+ def endElement(self, name, value, connection):
+ self.value = value
+
+ def __repr__(self):
+ return "Capability:\"%s\"" % (self.value)
+
class StackResource:
def __init__(self, connection=None):
self.connection = connection
diff --git a/boto/cloudfront/invalidation.py b/boto/cloudfront/invalidation.py
index b213e65c..b066e309 100644
--- a/boto/cloudfront/invalidation.py
+++ b/boto/cloudfront/invalidation.py
@@ -40,7 +40,7 @@ class InvalidationBatch(object):
# If we passed in a distribution,
# then we use that as the connection object
if distribution:
- self.connection = connection
+ self.connection = distribution
else:
self.connection = connection
diff --git a/boto/cloudsearch/__init__.py b/boto/cloudsearch/__init__.py
new file mode 100644
index 00000000..9c8157a1
--- /dev/null
+++ b/boto/cloudsearch/__init__.py
@@ -0,0 +1,45 @@
+# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+from boto.ec2.regioninfo import RegionInfo
+
+
+def regions():
+ """
+ Get all available regions for the Amazon CloudSearch service.
+
+ :rtype: list
+ :return: A list of :class:`boto.regioninfo.RegionInfo`
+ """
+ import boto.cloudsearch.layer1
+ return [RegionInfo(name='us-east-1',
+ endpoint='cloudsearch.us-east-1.amazonaws.com',
+ connection_cls=boto.cloudsearch.layer1.Layer1),
+ ]
+
+
+def connect_to_region(region_name, **kw_params):
+ for region in regions():
+ if region.name == region_name:
+ return region.connect(**kw_params)
+ return None
diff --git a/boto/cloudsearch/document.py b/boto/cloudsearch/document.py
new file mode 100644
index 00000000..64a11e07
--- /dev/null
+++ b/boto/cloudsearch/document.py
@@ -0,0 +1,150 @@
+# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+import boto.exception
+import requests
+import boto
+
+class SearchServiceException(Exception):
+ pass
+
+
+class CommitMismatchError(Exception):
+ pass
+
+
+class DocumentServiceConnection(object):
+
+ def __init__(self, domain=None, endpoint=None):
+ self.domain = domain
+ self.endpoint = endpoint
+ if not self.endpoint:
+ self.endpoint = domain.doc_service_endpoint
+ self.documents_batch = []
+ self._sdf = None
+
+ def add(self, _id, version, fields, lang='en'):
+ d = {'type': 'add', 'id': _id, 'version': version, 'lang': lang,
+ 'fields': fields}
+ self.documents_batch.append(d)
+
+ def delete(self, _id, version):
+ d = {'type': 'delete', 'id': _id, 'version': version}
+ self.documents_batch.append(d)
+
+ def get_sdf(self):
+ return self._sdf if self._sdf else json.dumps(self.documents_batch)
+
+ def clear_sdf(self):
+ self._sdf = None
+ self.documents_batch = []
+
+ def add_sdf_from_s3(self, key_obj):
+ """@todo (lucas) would be nice if this could just take an s3://uri..."""
+ self._sdf = key_obj.get_contents_as_string()
+
+ def commit(self):
+ sdf = self.get_sdf()
+
+ if ': null' in sdf:
+ boto.log.error('null value in sdf detected. This will probably raise '
+ '500 error.')
+ index = sdf.index(': null')
+ boto.log.error(sdf[index - 100:index + 100])
+
+ url = "http://%s/2011-02-01/documents/batch" % (self.endpoint)
+
+ request_config = {
+ 'pool_connections': 20,
+ 'keep_alive': True,
+ 'max_retries': 5,
+ 'pool_maxsize': 50
+ }
+
+ r = requests.post(url, data=sdf, config=request_config,
+ headers={'Content-Type': 'application/json'})
+
+ return CommitResponse(r, self, sdf)
+
+
+class CommitResponse(object):
+ """Wrapper for response to Cloudsearch document batch commit.
+
+ :type response: :class:`requests.models.Response`
+ :param response: Response from Cloudsearch /documents/batch API
+
+ :type doc_service: :class:`exfm.cloudsearch.DocumentServiceConnection`
+ :param doc_service: Object containing the documents posted and methods to
+ retry
+
+ :raises: :class:`boto.exception.BotoServerError`
+ :raises: :class:`exfm.cloudsearch.SearchServiceException`
+ """
+ def __init__(self, response, doc_service, sdf):
+ self.response = response
+ self.doc_service = doc_service
+ self.sdf = sdf
+
+ try:
+ self.content = json.loads(response.content)
+ except:
+ boto.log.error('Error indexing documents.\nResponse Content:\n{}\n\n'
+ 'SDF:\n{}'.format(response.content, self.sdf))
+ raise boto.exception.BotoServerError(self.response.status_code, '',
+ body=response.content)
+
+ self.status = self.content['status']
+ if self.status == 'error':
+ self.errors = [e.get('message') for e in self.content.get('errors',
+ [])]
+ else:
+ self.errors = []
+
+ self.adds = self.content['adds']
+ self.deletes = self.content['deletes']
+ self._check_num_ops('add', self.adds)
+ self._check_num_ops('delete', self.deletes)
+
+ def _check_num_ops(self, type_, response_num):
+ """Raise exception if number of ops in response doesn't match commit
+
+ :type type_: str
+ :param type_: Type of commit operation: 'add' or 'delete'
+
+ :type response_num: int
+ :param response_num: Number of adds or deletes in the response.
+
+ :raises: :class:`exfm.cloudsearch.SearchServiceException`
+ """
+ commit_num = len([d for d in self.doc_service.documents_batch
+ if d['type'] == type_])
+
+ if response_num != commit_num:
+ raise CommitMismatchError(
+ 'Incorrect number of {}s returned. Commit: {} Respose: {}'\
+ .format(type_, commit_num, response_num))
diff --git a/boto/cloudsearch/domain.py b/boto/cloudsearch/domain.py
new file mode 100644
index 00000000..43fcac8b
--- /dev/null
+++ b/boto/cloudsearch/domain.py
@@ -0,0 +1,397 @@
+# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+
+import boto
+try:
+ import simplejson as json
+except ImportError:
+ import json
+from .optionstatus import OptionStatus
+from .optionstatus import IndexFieldStatus
+from .optionstatus import ServicePoliciesStatus
+from .optionstatus import RankExpressionStatus
+from .document import DocumentServiceConnection
+from .search import SearchConnection
+
+def handle_bool(value):
+ if value in [True, 'true', 'True', 'TRUE', 1]:
+ return True
+ return False
+
+
+class Domain(object):
+ """
+ A Cloudsearch domain.
+
+ :ivar name: The name of the domain.
+
+ :ivar id: The internally generated unique identifier for the domain.
+
+ :ivar created: A boolean which is True if the domain is
+ created. It can take several minutes to initialize a domain
+ when CreateDomain is called. Newly created search domains are
+ returned with a False value for Created until domain creation
+ is complete
+
+ :ivar deleted: A boolean which is True if the search domain has
+ been deleted. The system must clean up resources dedicated to
+ the search domain when delete is called. Newly deleted
+ search domains are returned from list_domains with a True
+ value for deleted for several minutes until resource cleanup
+ is complete.
+
+ :ivar processing: True if processing is being done to activate the
+ current domain configuration.
+
+ :ivar num_searchable_docs: The number of documents that have been
+ submittted to the domain and indexed.
+
+ :ivar requires_index_document: True if index_documents needs to be
+ called to activate the current domain configuration.
+
+ :ivar search_instance_count: The number of search instances that are
+ available to process search requests.
+
+ :ivar search_instance_type: The instance type that is being used to
+ process search requests.
+
+ :ivar search_partition_count: The number of partitions across which
+ the search index is spread.
+ """
+
+ def __init__(self, layer1, data):
+ self.layer1 = layer1
+ self.update_from_data(data)
+
+ def update_from_data(self, data):
+ self.created = data['created']
+ self.deleted = data['deleted']
+ self.processing = data['processing']
+ self.requires_index_documents = data['requires_index_documents']
+ self.domain_id = data['domain_id']
+ self.domain_name = data['domain_name']
+ self.num_searchable_docs = data['num_searchable_docs']
+ self.search_instance_count = data['search_instance_count']
+ self.search_instance_type = data.get('search_instance_type', None)
+ self.search_partition_count = data['search_partition_count']
+ self._doc_service = data['doc_service']
+ self._search_service = data['search_service']
+
+ @property
+ def doc_service_arn(self):
+ return self._doc_service['arn']
+
+ @property
+ def doc_service_endpoint(self):
+ return self._doc_service['endpoint']
+
+ @property
+ def search_service_arn(self):
+ return self._search_service['arn']
+
+ @property
+ def search_service_endpoint(self):
+ return self._search_service['endpoint']
+
+ @property
+ def created(self):
+ return self._created
+
+ @created.setter
+ def created(self, value):
+ self._created = handle_bool(value)
+
+ @property
+ def deleted(self):
+ return self._deleted
+
+ @deleted.setter
+ def deleted(self, value):
+ self._deleted = handle_bool(value)
+
+ @property
+ def processing(self):
+ return self._processing
+
+ @processing.setter
+ def processing(self, value):
+ self._processing = handle_bool(value)
+
+ @property
+ def requires_index_documents(self):
+ return self._requires_index_documents
+
+ @requires_index_documents.setter
+ def requires_index_documents(self, value):
+ self._requires_index_documents = handle_bool(value)
+
+ @property
+ def search_partition_count(self):
+ return self._search_partition_count
+
+ @search_partition_count.setter
+ def search_partition_count(self, value):
+ self._search_partition_count = int(value)
+
+ @property
+ def search_instance_count(self):
+ return self._search_instance_count
+
+ @search_instance_count.setter
+ def search_instance_count(self, value):
+ self._search_instance_count = int(value)
+
+ @property
+ def num_searchable_docs(self):
+ return self._num_searchable_docs
+
+ @num_searchable_docs.setter
+ def num_searchable_docs(self, value):
+ self._num_searchable_docs = int(value)
+
+ @property
+ def name(self):
+ return self.domain_name
+
+ @property
+ def id(self):
+ return self.domain_id
+
+ def delete(self):
+ """
+ Delete this domain and all index data associated with it.
+ """
+ return self.layer1.delete_domain(self.name)
+
+ def get_stemming(self):
+ """
+ Return a :class:`boto.cloudsearch.option.OptionStatus` object
+ representing the currently defined stemming options for
+ the domain.
+ """
+ return OptionStatus(self, None,
+ self.layer1.describe_stemming_options,
+ self.layer1.update_stemming_options)
+
+ def get_stopwords(self):
+ """
+ Return a :class:`boto.cloudsearch.option.OptionStatus` object
+ representing the currently defined stopword options for
+ the domain.
+ """
+ return OptionStatus(self, None,
+ self.layer1.describe_stopword_options,
+ self.layer1.update_stopword_options)
+
+ def get_synonyms(self):
+ """
+ Return a :class:`boto.cloudsearch.option.OptionStatus` object
+ representing the currently defined synonym options for
+ the domain.
+ """
+ return OptionStatus(self, None,
+ self.layer1.describe_synonym_options,
+ self.layer1.update_synonym_options)
+
+ def get_access_policies(self):
+ """
+ Return a :class:`boto.cloudsearch.option.OptionStatus` object
+ representing the currently defined access policies for
+ the domain.
+ """
+ return ServicePoliciesStatus(self, None,
+ self.layer1.describe_service_access_policies,
+ self.layer1.update_service_access_policies)
+
+ def index_documents(self):
+ """
+ Tells the search domain to start indexing its documents using
+ the latest text processing options and IndexFields. This
+ operation must be invoked to make options whose OptionStatus
+ has OptioState of RequiresIndexDocuments visible in search
+ results.
+ """
+ self.layer1.index_documents(self.name)
+
+ def get_index_fields(self, field_names=None):
+ """
+ Return a list of index fields defined for this domain.
+ """
+ data = self.layer1.describe_index_fields(self.name, field_names)
+ return [IndexFieldStatus(self, d) for d in data]
+
+ def create_index_field(self, field_name, field_type,
+ default='', facet=False, result=False, searchable=False,
+ source_attributes=[]):
+ """
+ Defines an ``IndexField``, either replacing an existing
+ definition or creating a new one.
+
+ :type field_name: string
+ :param field_name: The name of a field in the search index.
+
+ :type field_type: string
+ :param field_type: The type of field. Valid values are
+ uint | literal | text
+
+ :type default: string or int
+ :param default: The default value for the field. If the
+ field is of type ``uint`` this should be an integer value.
+ Otherwise, it's a string.
+
+ :type facet: bool
+ :param facet: A boolean to indicate whether facets
+ are enabled for this field or not. Does not apply to
+ fields of type ``uint``.
+
+ :type results: bool
+ :param results: A boolean to indicate whether values
+ of this field can be returned in search results or
+ used in ranking. Does not apply to fields of type ``uint``.
+
+ :type searchable: bool
+ :param searchable: A boolean to indicate whether search
+ is enabled for this field or not. Applies only to fields
+ of type ``literal``.
+
+ :type source_attributes: list of dicts
+ :param source_attributes: An optional list of dicts that
+ provide information about attributes for this index field.
+ A maximum of 20 source attributes can be configured for
+ each index field.
+
+ Each item in the list is a dict with the following keys:
+
+ * data_copy - The value is a dict with the following keys:
+ * default - Optional default value if the source attribute
+ is not specified in a document.
+ * name - The name of the document source field to add
+ to this ``IndexField``.
+ * data_function - Identifies the transformation to apply
+ when copying data from a source attribute.
+ * data_map - The value is a dict with the following keys:
+ * cases - A dict that translates source field values
+ to custom values.
+ * default - An optional default value to use if the
+ source attribute is not specified in a document.
+ * name - the name of the document source field to add
+ to this ``IndexField``
+ * data_trim_title - Trims common title words from a source
+ document attribute when populating an ``IndexField``.
+ This can be used to create an ``IndexField`` you can
+ use for sorting. The value is a dict with the following
+ fields:
+ * default - An optional default value.
+ * language - an IETF RFC 4646 language code.
+ * separator - The separator that follows the text to trim.
+ * name - The name of the document source field to add.
+
+ :raises: BaseException, InternalException, LimitExceededException,
+ InvalidTypeException, ResourceNotFoundException
+ """
+ data = self.layer1.define_index_field(self.name, field_name,
+ field_type, default=default,
+ facet=facet, result=result,
+ searchable=searchable,
+ source_attributes=source_attributes)
+ return IndexFieldStatus(self, data,
+ self.layer1.describe_index_fields)
+
+ def get_rank_expressions(self, rank_names=None):
+ """
+ Return a list of rank expressions defined for this domain.
+ """
+ fn = self.layer1.describe_rank_expressions
+ data = fn(self.name, rank_names)
+ return [RankExpressionStatus(self, d, fn) for d in data]
+
+ def create_rank_expression(self, name, expression):
+ """
+ Create a new rank expression.
+
+ :type rank_name: string
+ :param rank_name: The name of an expression computed for ranking
+ while processing a search request.
+
+ :type rank_expression: string
+ :param rank_expression: The expression to evaluate for ranking
+ or thresholding while processing a search request. The
+ RankExpression syntax is based on JavaScript expressions
+ and supports:
+
+ * Integer, floating point, hex and octal literals
+ * Shortcut evaluation of logical operators such that an
+ expression a || b evaluates to the value a if a is
+ true without evaluting b at all
+ * JavaScript order of precedence for operators
+ * Arithmetic operators: + - * / %
+ * Boolean operators (including the ternary operator)
+ * Bitwise operators
+ * Comparison operators
+ * Common mathematic functions: abs ceil erf exp floor
+ lgamma ln log2 log10 max min sqrt pow
+ * Trigonometric library functions: acosh acos asinh asin
+ atanh atan cosh cos sinh sin tanh tan
+ * Random generation of a number between 0 and 1: rand
+ * Current time in epoch: time
+ * The min max functions that operate on a variable argument list
+
+ Intermediate results are calculated as double precision
+ floating point values. The final return value of a
+ RankExpression is automatically converted from floating
+ point to a 32-bit unsigned integer by rounding to the
+ nearest integer, with a natural floor of 0 and a ceiling
+ of max(uint32_t), 4294967295. Mathematical errors such as
+ dividing by 0 will fail during evaluation and return a
+ value of 0.
+
+ The source data for a RankExpression can be the name of an
+ IndexField of type uint, another RankExpression or the
+ reserved name text_relevance. The text_relevance source is
+ defined to return an integer from 0 to 1000 (inclusive) to
+ indicate how relevant a document is to the search request,
+ taking into account repetition of search terms in the
+ document and proximity of search terms to each other in
+ each matching IndexField in the document.
+
+ For more information about using rank expressions to
+ customize ranking, see the Amazon CloudSearch Developer
+ Guide.
+
+ :raises: BaseException, InternalException, LimitExceededException,
+ InvalidTypeException, ResourceNotFoundException
+ """
+ data = self.layer1.define_rank_expression(self.name, name, expression)
+ return RankExpressionStatus(self, data,
+ self.layer1.describe_rank_expressions)
+
+ def get_document_service(self):
+ return DocumentServiceConnection(domain=self)
+
+ def get_search_service(self):
+ return SearchConnection(domain=self)
+
+ def __repr__(self):
+ return '<Domain: %s>' % self.domain_name
+
diff --git a/boto/cloudsearch/layer1.py b/boto/cloudsearch/layer1.py
new file mode 100644
index 00000000..e5b4ed5d
--- /dev/null
+++ b/boto/cloudsearch/layer1.py
@@ -0,0 +1,732 @@
+# Copyright (c) 202 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+import boto
+import boto.jsonresponse
+from boto.connection import AWSQueryConnection
+from boto.regioninfo import RegionInfo
+
+#boto.set_stream_logger('cloudsearch')
+
+def do_bool(val):
+ return 'true' if val in [True, 1, '1', 'true'] else 'false'
+
+
+class Layer1(AWSQueryConnection):
+
+ APIVersion = '2011-02-01'
+ DefaultRegionName = boto.config.get('Boto', 'cs_region_name', 'us-east-1')
+ DefaultRegionEndpoint = boto.config.get('Boto', 'cs_region_endpoint',
+ 'cloudsearch.us-east-1.amazonaws.com')
+
+ def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
+ is_secure=True, host=None, port=None,
+ proxy=None, proxy_port=None,
+ proxy_user=None, proxy_pass=None, debug=0,
+ https_connection_factory=None, region=None, path='/',
+ api_version=None, security_token=None):
+ if not region:
+ region = RegionInfo(self, self.DefaultRegionName,
+ self.DefaultRegionEndpoint)
+ self.region = region
+ AWSQueryConnection.__init__(self, aws_access_key_id,
+ aws_secret_access_key,
+ is_secure, port, proxy, proxy_port,
+ proxy_user, proxy_pass,
+ self.region.endpoint, debug,
+ https_connection_factory, path,
+ security_token)
+
+ def _required_auth_capability(self):
+ return ['sign-v2']
+
+ def get_response(self, doc_path, action, params, path='/',
+ parent=None, verb='GET', list_marker=None):
+ if not parent:
+ parent = self
+ response = self.make_request(action, params, path, verb)
+ body = response.read()
+ boto.log.debug(body)
+ if response.status == 200:
+ e = boto.jsonresponse.Element(
+ list_marker=list_marker if list_marker else 'Set',
+ pythonize_name=True)
+ h = boto.jsonresponse.XmlHandler(e, parent)
+ h.parse(body)
+ inner = e
+ for p in doc_path:
+ inner = inner.get(p)
+ if not inner:
+ return None if list_marker == None else []
+ if isinstance(inner, list):
+ return [dict(**i) for i in inner]
+ else:
+ return dict(**inner)
+ else:
+ raise self.ResponseError(response.status, response.reason, body)
+
+ def create_domain(self, domain_name):
+ """
+ Create a new search domain.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :raises: BaseException, InternalException, LimitExceededException
+ """
+ doc_path = ('create_domain_response',
+ 'create_domain_result',
+ 'domain_status')
+ params = {'DomainName': domain_name}
+ return self.get_response(doc_path, 'CreateDomain',
+ params, verb='POST')
+
+ def define_index_field(self, domain_name, field_name, field_type,
+ default='', facet=False, result=False,
+ searchable=False, source_attributes=None):
+ """
+ Defines an ``IndexField``, either replacing an existing
+ definition or creating a new one.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :type field_name: string
+ :param field_name: The name of a field in the search index.
+
+ :type field_type: string
+ :param field_type: The type of field. Valid values are
+ uint | literal | text
+
+ :type default: string or int
+ :param default: The default value for the field. If the
+ field is of type ``uint`` this should be an integer value.
+ Otherwise, it's a string.
+
+ :type facet: bool
+ :param facet: A boolean to indicate whether facets
+ are enabled for this field or not. Does not apply to
+ fields of type ``uint``.
+
+ :type results: bool
+ :param results: A boolean to indicate whether values
+ of this field can be returned in search results or
+ used in ranking. Does not apply to fields of type ``uint``.
+
+ :type searchable: bool
+ :param searchable: A boolean to indicate whether search
+ is enabled for this field or not. Applies only to fields
+ of type ``literal``.
+
+ :type source_attributes: list of dicts
+ :param source_attributes: An optional list of dicts that
+ provide information about attributes for this index field.
+ A maximum of 20 source attributes can be configured for
+ each index field.
+
+ Each item in the list is a dict with the following keys:
+
+ * data_copy - The value is a dict with the following keys:
+ * default - Optional default value if the source attribute
+ is not specified in a document.
+ * name - The name of the document source field to add
+ to this ``IndexField``.
+ * data_function - Identifies the transformation to apply
+ when copying data from a source attribute.
+ * data_map - The value is a dict with the following keys:
+ * cases - A dict that translates source field values
+ to custom values.
+ * default - An optional default value to use if the
+ source attribute is not specified in a document.
+ * name - the name of the document source field to add
+ to this ``IndexField``
+ * data_trim_title - Trims common title words from a source
+ document attribute when populating an ``IndexField``.
+ This can be used to create an ``IndexField`` you can
+ use for sorting. The value is a dict with the following
+ fields:
+ * default - An optional default value.
+ * language - an IETF RFC 4646 language code.
+ * separator - The separator that follows the text to trim.
+ * name - The name of the document source field to add.
+
+ :raises: BaseException, InternalException, LimitExceededException,
+ InvalidTypeException, ResourceNotFoundException
+ """
+ doc_path = ('define_index_field_response',
+ 'define_index_field_result',
+ 'index_field')
+ params = {'DomainName': domain_name,
+ 'IndexField.IndexFieldName': field_name,
+ 'IndexField.IndexFieldType': field_type}
+ if field_type == 'literal':
+ params['IndexField.LiteralOptions.DefaultValue'] = default
+ params['IndexField.LiteralOptions.FacetEnabled'] = do_bool(facet)
+ params['IndexField.LiteralOptions.ResultEnabled'] = do_bool(result)
+ params['IndexField.LiteralOptions.SearchEnabled'] = do_bool(searchable)
+ elif field_type == 'uint':
+ params['IndexField.UIntOptions.DefaultValue'] = default
+ elif field_type == 'text':
+ params['IndexField.TextOptions.DefaultValue'] = default
+ params['IndexField.TextOptions.FacetEnabled'] = do_bool(facet)
+ params['IndexField.TextOptions.ResultEnabled'] = do_bool(result)
+
+ return self.get_response(doc_path, 'DefineIndexField',
+ params, verb='POST')
+
+ def define_rank_expression(self, domain_name, rank_name, rank_expression):
+ """
+ Defines a RankExpression, either replacing an existing
+ definition or creating a new one.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :type rank_name: string
+ :param rank_name: The name of an expression computed for ranking
+ while processing a search request.
+
+ :type rank_expression: string
+ :param rank_expression: The expression to evaluate for ranking
+ or thresholding while processing a search request. The
+ RankExpression syntax is based on JavaScript expressions
+ and supports:
+
+ * Integer, floating point, hex and octal literals
+ * Shortcut evaluation of logical operators such that an
+ expression a || b evaluates to the value a if a is
+ true without evaluting b at all
+ * JavaScript order of precedence for operators
+ * Arithmetic operators: + - * / %
+ * Boolean operators (including the ternary operator)
+ * Bitwise operators
+ * Comparison operators
+ * Common mathematic functions: abs ceil erf exp floor
+ lgamma ln log2 log10 max min sqrt pow
+ * Trigonometric library functions: acosh acos asinh asin
+ atanh atan cosh cos sinh sin tanh tan
+ * Random generation of a number between 0 and 1: rand
+ * Current time in epoch: time
+ * The min max functions that operate on a variable argument list
+
+ Intermediate results are calculated as double precision
+ floating point values. The final return value of a
+ RankExpression is automatically converted from floating
+ point to a 32-bit unsigned integer by rounding to the
+ nearest integer, with a natural floor of 0 and a ceiling
+ of max(uint32_t), 4294967295. Mathematical errors such as
+ dividing by 0 will fail during evaluation and return a
+ value of 0.
+
+ The source data for a RankExpression can be the name of an
+ IndexField of type uint, another RankExpression or the
+ reserved name text_relevance. The text_relevance source is
+ defined to return an integer from 0 to 1000 (inclusive) to
+ indicate how relevant a document is to the search request,
+ taking into account repetition of search terms in the
+ document and proximity of search terms to each other in
+ each matching IndexField in the document.
+
+ For more information about using rank expressions to
+ customize ranking, see the Amazon CloudSearch Developer
+ Guide.
+
+ :raises: BaseException, InternalException, LimitExceededException,
+ InvalidTypeException, ResourceNotFoundException
+ """
+ doc_path = ('define_rank_expression_response',
+ 'define_rank_expression_result',
+ 'rank_expression')
+ params = {'DomainName': domain_name,
+ 'RankExpression.RankExpression': rank_expression,
+ 'RankExpression.RankName': rank_name}
+ return self.get_response(doc_path, 'DefineRankExpression',
+ params, verb='POST')
+
+ def delete_domain(self, domain_name):
+ """
+ Delete a search domain.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :raises: BaseException, InternalException
+ """
+ doc_path = ('delete_domain_response',
+ 'delete_domain_result',
+ 'domain_status')
+ params = {'DomainName': domain_name}
+ return self.get_response(doc_path, 'DeleteDomain',
+ params, verb='POST')
+
+ def delete_index_field(self, domain_name, field_name):
+ """
+ Deletes an existing ``IndexField`` from the search domain.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :type field_name: string
+ :param field_name: A string that represents the name of
+ an index field. Field names must begin with a letter and
+ can contain the following characters: a-z (lowercase),
+ 0-9, and _ (underscore). Uppercase letters and hyphens are
+ not allowed. The names "body", "docid", and
+ "text_relevance" are reserved and cannot be specified as
+ field or rank expression names.
+
+ :raises: BaseException, InternalException, ResourceNotFoundException
+ """
+ doc_path = ('delete_index_field_response',
+ 'delete_index_field_result',
+ 'index_field')
+ params = {'DomainName': domain_name,
+ 'IndexFieldName': field_name}
+ return self.get_response(doc_path, 'DeleteIndexField',
+ params, verb='POST')
+
+ def delete_rank_expression(self, domain_name, rank_name):
+ """
+ Deletes an existing ``RankExpression`` from the search domain.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :type rank_name: string
+ :param rank_name: Name of the ``RankExpression`` to delete.
+
+ :raises: BaseException, InternalException, ResourceNotFoundException
+ """
+ doc_path = ('delete_rank_expression_response',
+ 'delete_rank_expression_result',
+ 'rank_expression')
+ params = {'DomainName': domain_name, 'RankName': rank_name}
+ return self.get_response(doc_path, 'DeleteRankExpression',
+ params, verb='POST')
+
+ def describe_default_search_field(self, domain_name):
+ """
+ Describes options defining the default search field used by
+ indexing for the search domain.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :raises: BaseException, InternalException, ResourceNotFoundException
+ """
+ doc_path = ('describe_default_search_field_response',
+ 'describe_default_search_field_result',
+ 'default_search_field')
+ params = {'DomainName': domain_name}
+ return self.get_response(doc_path, 'DescribeDefaultSearchField',
+ params, verb='POST')
+
+ def describe_domains(self, domain_names=None):
+ """
+ Describes the domains (optionally limited to one or more
+ domains by name) owned by this account.
+
+ :type domain_names: list
+ :param domain_names: Limits the response to the specified domains.
+
+ :raises: BaseException, InternalException
+ """
+ doc_path = ('describe_domains_response',
+ 'describe_domains_result',
+ 'domain_status_list')
+ params = {}
+ if domain_names:
+ params['DomainNames'] = domain_names
+ return self.get_response(doc_path, 'DescribeDomains',
+ params, verb='POST',
+ list_marker='DomainStatusList')
+
+ def describe_index_fields(self, domain_name, field_names=None):
+ """
+ Describes index fields in the search domain, optionally
+ limited to a single ``IndexField``.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :type field_names: list
+ :param field_names: Limits the response to the specified fields.
+
+ :raises: BaseException, InternalException, ResourceNotFoundException
+ """
+ doc_path = ('describe_index_fields_response',
+ 'describe_index_fields_result',
+ 'index_fields')
+ params = {'DomainName': domain_name}
+ if field_names:
+ params['FieldNames'] = field_names
+ return self.get_response(doc_path, 'DescribeIndexFields',
+ params, verb='POST',
+ list_marker='IndexFields')
+
+ def describe_rank_expressions(self, domain_name, rank_names=None):
+ """
+ Describes RankExpressions in the search domain, optionally
+ limited to a single expression.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :type rank_names: list
+ :param rank_names: Limit response to the specified rank names.
+
+ :raises: BaseException, InternalException, ResourceNotFoundException
+ """
+ doc_path = ('describe_rank_expressions_response',
+ 'describe_rank_expressions_result',
+ 'rank_expressions')
+ params = {'DomainName': domain_name}
+ if rank_names:
+ params['RankNames'] = rank_names
+ return self.get_response(doc_path, 'DescribeRankExpressions',
+ params, verb='POST',
+ list_marker='RankExpressions')
+
+ def describe_service_access_policies(self, domain_name):
+ """
+ Describes the resource-based policies controlling access to
+ the services in this search domain.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :raises: BaseException, InternalException, ResourceNotFoundException
+ """
+ doc_path = ('describe_service_access_policies_response',
+ 'describe_service_access_policies_result',
+ 'access_policies')
+ params = {'DomainName': domain_name}
+ return self.get_response(doc_path, 'DescribeServiceAccessPolicies',
+ params, verb='POST')
+
+ def describe_stemming_options(self, domain_name):
+ """
+ Describes stemming options used by indexing for the search domain.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :raises: BaseException, InternalException, ResourceNotFoundException
+ """
+ doc_path = ('describe_stemming_options_response',
+ 'describe_stemming_options_result',
+ 'stems')
+ params = {'DomainName': domain_name}
+ return self.get_response(doc_path, 'DescribeStemmingOptions',
+ params, verb='POST')
+
+ def describe_stopword_options(self, domain_name):
+ """
+ Describes stopword options used by indexing for the search domain.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :raises: BaseException, InternalException, ResourceNotFoundException
+ """
+ doc_path = ('describe_stopword_options_response',
+ 'describe_stopword_options_result',
+ 'stopwords')
+ params = {'DomainName': domain_name}
+ return self.get_response(doc_path, 'DescribeStopwordOptions',
+ params, verb='POST')
+
+ def describe_synonym_options(self, domain_name):
+ """
+ Describes synonym options used by indexing for the search domain.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :raises: BaseException, InternalException, ResourceNotFoundException
+ """
+ doc_path = ('describe_synonym_options_response',
+ 'describe_synonym_options_result',
+ 'synonyms')
+ params = {'DomainName': domain_name}
+ return self.get_response(doc_path, 'DescribeSynonymOptions',
+ params, verb='POST')
+
+ def index_documents(self, domain_name):
+ """
+ Tells the search domain to start scanning its documents using
+ the latest text processing options and ``IndexFields``. This
+ operation must be invoked to make visible in searches any
+ options whose <a>OptionStatus</a> has ``OptionState`` of
+ ``RequiresIndexDocuments``.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :raises: BaseException, InternalException, ResourceNotFoundException
+ """
+ doc_path = ('index_documents_response',
+ 'index_documents_result',
+ 'field_names')
+ params = {'DomainName': domain_name}
+ return self.get_response(doc_path, 'IndexDocuments', params,
+ verb='POST', list_marker='FieldNames')
+
+ def update_default_search_field(self, domain_name, default_search_field):
+ """
+ Updates options defining the default search field used by
+ indexing for the search domain.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :type default_search_field: string
+ :param default_search_field: The IndexField to use for search
+ requests issued with the q parameter. The default is an
+ empty string, which automatically searches all text
+ fields.
+
+ :raises: BaseException, InternalException, InvalidTypeException,
+ ResourceNotFoundException
+ """
+ doc_path = ('update_default_search_field_response',
+ 'update_default_search_field_result',
+ 'default_search_field')
+ params = {'DomainName': domain_name,
+ 'DefaultSearchField': default_search_field}
+ return self.get_response(doc_path, 'UpdateDefaultSearchField',
+ params, verb='POST')
+
+ def update_service_access_policies(self, domain_name, access_policies):
+ """
+ Updates the policies controlling access to the services in
+ this search domain.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :type access_policies: string
+ :param access_policies: An IAM access policy as described in
+ The Access Policy Language in Using AWS Identity and
+ Access Management. The maximum size of an access policy
+ document is 100KB.
+
+ :raises: BaseException, InternalException, LimitExceededException,
+ ResourceNotFoundException, InvalidTypeException
+ """
+ doc_path = ('update_service_access_policies_response',
+ 'update_service_access_policies_result',
+ 'access_policies')
+ params = {'AccessPolicies': access_policies,
+ 'DomainName': domain_name}
+ return self.get_response(doc_path, 'UpdateServiceAccessPolicies',
+ params, verb='POST')
+
+ def update_stemming_options(self, domain_name, stems):
+ """
+ Updates stemming options used by indexing for the search domain.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :type stems: string
+ :param stems: Maps terms to their stems. The JSON object
+ has a single key called "stems" whose value is a
+ dict mapping terms to their stems. The maximum size
+ of a stemming document is 500KB.
+ Example: {"stems":{"people": "person", "walking":"walk"}}
+
+ :raises: BaseException, InternalException, InvalidTypeException,
+ LimitExceededException, ResourceNotFoundException
+ """
+ doc_path = ('update_stemming_options_response',
+ 'update_stemming_options_result',
+ 'stems')
+ params = {'DomainName': domain_name,
+ 'Stems': stems}
+ return self.get_response(doc_path, 'UpdateStemmingOptions',
+ params, verb='POST')
+
+ def update_stopword_options(self, domain_name, stopwords):
+ """
+ Updates stopword options used by indexing for the search domain.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :type stopwords: string
+ :param stopwords: Lists stopwords in a JSON object. The object has a
+ single key called "stopwords" whose value is an array of strings.
+ The maximum size of a stopwords document is 10KB. Example:
+ {"stopwords": ["a", "an", "the", "of"]}
+
+ :raises: BaseException, InternalException, InvalidTypeException,
+ LimitExceededException, ResourceNotFoundException
+ """
+ doc_path = ('update_stopword_options_response',
+ 'update_stopword_options_result',
+ 'stopwords')
+ params = {'DomainName': domain_name,
+ 'Stopwords': stopwords}
+ return self.get_response(doc_path, 'UpdateStopwordOptions',
+ params, verb='POST')
+
+ def update_synonym_options(self, domain_name, synonyms):
+ """
+ Updates synonym options used by indexing for the search domain.
+
+ :type domain_name: string
+ :param domain_name: A string that represents the name of a
+ domain. Domain names must be unique across the domains
+ owned by an account within an AWS region. Domain names
+ must start with a letter or number and can contain the
+ following characters: a-z (lowercase), 0-9, and -
+ (hyphen). Uppercase letters and underscores are not
+ allowed.
+
+ :type synonyms: string
+ :param synonyms: Maps terms to their synonyms. The JSON object
+ has a single key "synonyms" whose value is a dict mapping terms
+ to their synonyms. Each synonym is a simple string or an
+ array of strings. The maximum size of a stopwords document
+ is 100KB. Example:
+ {"synonyms": {"cat": ["feline", "kitten"], "puppy": "dog"}}
+
+ :raises: BaseException, InternalException, InvalidTypeException,
+ LimitExceededException, ResourceNotFoundException
+ """
+ doc_path = ('update_synonym_options_response',
+ 'update_synonym_options_result',
+ 'synonyms')
+ params = {'DomainName': domain_name,
+ 'Synonyms': synonyms}
+ return self.get_response(doc_path, 'UpdateSynonymOptions',
+ params, verb='POST')
diff --git a/boto/cloudsearch/layer2.py b/boto/cloudsearch/layer2.py
new file mode 100644
index 00000000..755d093a
--- /dev/null
+++ b/boto/cloudsearch/layer2.py
@@ -0,0 +1,52 @@
+# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+
+from .layer1 import Layer1
+from .domain import Domain
+
+
+class Layer2(object):
+
+ def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
+ is_secure=True, port=None, proxy=None, proxy_port=None,
+ host=None, debug=0, session_token=None, region=None):
+ self.layer1 = Layer1(aws_access_key_id, aws_secret_access_key,
+ is_secure, port, proxy, proxy_port,
+ host, debug, session_token, region)
+
+ def list_domains(self, domain_names=None):
+ """
+ Return a list of :class:`boto.cloudsearch.domain.Domain`
+ objects for each domain defined in the current account.
+ """
+ domain_data = self.layer1.describe_domains(domain_names)
+ return [Domain(self.layer1, data) for data in domain_data]
+
+ def create_domain(self, domain_name):
+ """
+ Create a new CloudSearch domain and return the corresponding
+ :class:`boto.cloudsearch.domain.Domain` object.
+ """
+ data = self.layer1.create_domain(domain_name)
+ return Domain(self.layer1, data)
diff --git a/boto/cloudsearch/optionstatus.py b/boto/cloudsearch/optionstatus.py
new file mode 100644
index 00000000..869d82fa
--- /dev/null
+++ b/boto/cloudsearch/optionstatus.py
@@ -0,0 +1,249 @@
+# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+class OptionStatus(dict):
+ """
+ Presents a combination of status field (defined below) which are
+ accessed as attributes and option values which are stored in the
+ native Python dictionary. In this class, the option values are
+ merged from a JSON object that is stored as the Option part of
+ the object.
+
+ :ivar domain_name: The name of the domain this option is associated with.
+ :ivar create_date: A timestamp for when this option was created.
+ :ivar state: The state of processing a change to an option.
+ Possible values:
+
+ * RequiresIndexDocuments: the option's latest value will not
+ be visible in searches until IndexDocuments has been called
+ and indexing is complete.
+ * Processing: the option's latest value is not yet visible in
+ all searches but is in the process of being activated.
+ * Active: the option's latest value is completely visible.
+
+ :ivar update_date: A timestamp for when this option was updated.
+ :ivar update_version: A unique integer that indicates when this
+ option was last updated.
+ """
+
+ def __init__(self, domain, data=None, refresh_fn=None, save_fn=None):
+ self.domain = domain
+ self.refresh_fn = refresh_fn
+ self.save_fn = save_fn
+ self.refresh(data)
+
+ def _update_status(self, status):
+ self.creation_date = status['creation_date']
+ self.status = status['state']
+ self.update_date = status['update_date']
+ self.update_version = int(status['update_version'])
+
+ def _update_options(self, options):
+ if options:
+ self.update(json.loads(options))
+
+ def refresh(self, data=None):
+ """
+ Refresh the local state of the object. You can either pass
+ new state data in as the parameter ``data`` or, if that parameter
+ is omitted, the state data will be retrieved from CloudSearch.
+ """
+ if not data:
+ if self.refresh_fn:
+ data = self.refresh_fn(self.domain.name)
+ if data:
+ self._update_status(data['status'])
+ self._update_options(data['options'])
+
+ def to_json(self):
+ """
+ Return the JSON representation of the options as a string.
+ """
+ return json.dumps(self)
+
+ def startElement(self, name, attrs, connection):
+ return None
+
+ def endElement(self, name, value, connection):
+ if name == 'CreationDate':
+ self.created = value
+ elif name == 'State':
+ self.state = value
+ elif name == 'UpdateDate':
+ self.updated = value
+ elif name == 'UpdateVersion':
+ self.update_version = int(value)
+ elif name == 'Options':
+ self.update_from_json_doc(value)
+ else:
+ setattr(self, name, value)
+
+ def save(self):
+ """
+ Write the current state of the local object back to the
+ CloudSearch service.
+ """
+ if self.save_fn:
+ data = self.save_fn(self.domain.name, self.to_json())
+ self.refresh(data)
+
+ def wait_for_state(self, state):
+ """
+ Performs polling of CloudSearch to wait for the ``state``
+ of this object to change to the provided state.
+ """
+ while self.state != state:
+ time.sleep(5)
+ self.refresh()
+
+
+class IndexFieldStatus(OptionStatus):
+
+ def _update_options(self, options):
+ self.update(options)
+
+ def save(self):
+ pass
+
+
+class RankExpressionStatus(IndexFieldStatus):
+
+ pass
+
+class ServicePoliciesStatus(OptionStatus):
+
+ def new_statement(self, arn, ip):
+ """
+ Returns a new policy statement that will allow
+ access to the service described by ``arn`` by the
+ ip specified in ``ip``.
+
+ :type arn: string
+ :param arn: The Amazon Resource Notation identifier for the
+ service you wish to provide access to. This would be
+ either the search service or the document service.
+
+ :type ip: string
+ :param ip: An IP address or CIDR block you wish to grant access
+ to.
+ """
+ return {
+ "Effect":"Allow",
+ "Action":"*", # Docs say use GET, but denies unless *
+ "Resource": arn,
+ "Condition": {
+ "IpAddress": {
+ "aws:SourceIp": [ip]
+ }
+ }
+ }
+
+ def _allow_ip(self, arn, ip):
+ if 'Statement' not in self:
+ s = self.new_statement(arn, ip)
+ self['Statement'] = [s]
+ self.save()
+ else:
+ add_statement = True
+ for statement in self['Statement']:
+ if statement['Resource'] == arn:
+ for condition_name in statement['Condition']:
+ if condition_name == 'IpAddress':
+ add_statement = False
+ condition = statement['Condition'][condition_name]
+ if ip not in condition['aws:SourceIp']:
+ condition['aws:SourceIp'].append(ip)
+
+ if add_statement:
+ s = self.new_statement(arn, ip)
+ self['Statement'].append(s)
+ self.save()
+
+ def allow_search_ip(self, ip):
+ """
+ Add the provided ip address or CIDR block to the list of
+ allowable address for the search service.
+
+ :type ip: string
+ :param ip: An IP address or CIDR block you wish to grant access
+ to.
+ """
+ arn = self.domain.search_service_arn
+ self._allow_ip(arn, ip)
+
+ def allow_doc_ip(self, ip):
+ """
+ Add the provided ip address or CIDR block to the list of
+ allowable address for the document service.
+
+ :type ip: string
+ :param ip: An IP address or CIDR block you wish to grant access
+ to.
+ """
+ arn = self.domain.doc_service_arn
+ self._allow_ip(arn, ip)
+
+ def _disallow_ip(self, arn, ip):
+ if 'Statement' not in self:
+ return
+ need_update = False
+ for statement in self['Statement']:
+ if statement['Resource'] == arn:
+ for condition_name in statement['Condition']:
+ if condition_name == 'IpAddress':
+ condition = statement['Condition'][condition_name]
+ if ip in condition['aws:SourceIp']:
+ condition['aws:SourceIp'].remove(ip)
+ need_update = True
+ if need_update:
+ self.save()
+
+ def disallow_search_ip(self, ip):
+ """
+ Remove the provided ip address or CIDR block from the list of
+ allowable address for the search service.
+
+ :type ip: string
+ :param ip: An IP address or CIDR block you wish to grant access
+ to.
+ """
+ arn = self.domain.search_service_arn
+ self._disallow_ip(arn, ip)
+
+ def disallow_doc_ip(self, ip):
+ """
+ Remove the provided ip address or CIDR block from the list of
+ allowable address for the document service.
+
+ :type ip: string
+ :param ip: An IP address or CIDR block you wish to grant access
+ to.
+ """
+ arn = self.domain.doc_service_arn
+ self._disallow_ip(arn, ip)
diff --git a/boto/cloudsearch/search.py b/boto/cloudsearch/search.py
new file mode 100644
index 00000000..f1b16e47
--- /dev/null
+++ b/boto/cloudsearch/search.py
@@ -0,0 +1,298 @@
+# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+from math import ceil
+import time
+import json
+import boto
+import requests
+
+
+class SearchServiceException(Exception):
+ pass
+
+
+class CommitMismatchError(Exception):
+ pass
+
+
+class SearchResults(object):
+
+ def __init__(self, **attrs):
+ self.rid = attrs['info']['rid']
+ # self.doc_coverage_pct = attrs['info']['doc-coverage-pct']
+ self.cpu_time_ms = attrs['info']['cpu-time-ms']
+ self.time_ms = attrs['info']['time-ms']
+ self.hits = attrs['hits']['found']
+ self.docs = attrs['hits']['hit']
+ self.start = attrs['hits']['start']
+ self.rank = attrs['rank']
+ self.match_expression = attrs['match-expr']
+ self.query = attrs['query']
+ self.search_service = attrs['search_service']
+
+ self.num_pages_needed = ceil(self.hits / self.query.real_size)
+
+ def __len__(self):
+ return len(self.docs)
+
+ def __iter__(self):
+ return iter(self.docs)
+
+ def next_page(self):
+ """Call Cloudsearch to get the next page of search results
+
+ :rtype: :class:`exfm.cloudsearch.SearchResults`
+ :return: A cloudsearch SearchResults object
+ """
+ if self.query.page <= self.num_pages_needed:
+ self.query.start += self.query.real_size
+ self.query.page += 1
+ return self.search_service(self.query)
+ else:
+ raise StopIteration
+
+
+class Query(object):
+
+ RESULTS_PER_PAGE = 500
+
+ def __init__(self, q=None, bq=None, rank=None,
+ return_fields=None, size=10,
+ start=0, facet=None, facet_constraints=None,
+ facet_sort=None, facet_top_n=None, t=None):
+
+ self.q = q
+ self.bq = bq
+ self.rank = rank or []
+ self.return_fields = return_fields or []
+ self.start = start
+ self.facet = facet or []
+ self.facet_constraints = facet_constraints or {}
+ self.facet_sort = facet_sort or {}
+ self.facet_top_n = facet_top_n or {}
+ self.t = t or {}
+ self.page = 0
+ self.update_size(size)
+
+ def update_size(self, new_size):
+ self.size = new_size
+ self.real_size = Query.RESULTS_PER_PAGE if (self.size >
+ Query.RESULTS_PER_PAGE or self.size == 0) else self.size
+
+ def to_params(self):
+ """Transform search parameters from instance properties to a dictionary
+
+ :rtype: dict
+ :return: search parameters
+ """
+ params = {'start': self.start, 'size': self.real_size}
+
+ if self.q:
+ params['q'] = self.q
+
+ if self.bq:
+ params['bq'] = self.bq
+
+ if self.rank:
+ params['rank'] = ','.join(self.rank)
+
+ if self.return_fields:
+ params['return-fields'] = ','.join(self.return_fields)
+
+ if self.facet:
+ params['facet'] = ','.join(self.facet)
+
+ if self.facet_constraints:
+ for k, v in self.facet_constraints.iteritems():
+ params['facet-%s-constraints' % k] = v
+
+ if self.facet_sort:
+ for k, v in self.facet_sort.iteritems():
+ params['facet-%s-sort' % k] = v
+
+ if self.facet_top_n:
+ for k, v in self.facet_top_n.iteritems():
+ params['facet-%s-top-n' % k] = v
+
+ if self.t:
+ for k, v in self.t.iteritems():
+ params['t-%s' % k] = v
+ return params
+
+
+class SearchConnection(object):
+
+ def __init__(self, domain=None, endpoint=None):
+ self.domain = domain
+ self.endpoint = endpoint
+ if not endpoint:
+ self.endpoint = domain.search_service_endpoint
+
+ def build_query(self, q=None, bq=None, rank=None, return_fields=None,
+ size=10, start=0, facet=None, facet_constraints=None,
+ facet_sort=None, facet_top_n=None, t=None):
+ return Query(q=q, bq=bq, rank=rank, return_fields=return_fields,
+ size=size, start=start, facet=facet,
+ facet_constraints=facet_constraints,
+ facet_sort=facet_sort, facet_top_n=facet_top_n, t=t)
+
+ def search(self, q=None, bq=None, rank=None, return_fields=None,
+ size=10, start=0, facet=None, facet_constraints=None,
+ facet_sort=None, facet_top_n=None, t=None):
+ """
+ Query Cloudsearch
+
+ :type q:
+ :param q:
+
+ :type bq:
+ :param bq:
+
+ :type rank:
+ :param rank:
+
+ :type return_fields:
+ :param return_fields:
+
+ :type size:
+ :param size:
+
+ :type start:
+ :param start:
+
+ :type facet:
+ :param facet:
+
+ :type facet_constraints:
+ :param facet_constraints:
+
+ :type facet_sort:
+ :param facet_sort:
+
+ :type facet_top_n:
+ :param facet_top_n:
+
+ :type t:
+ :param t:
+
+ :rtype: :class:`exfm.cloudsearch.SearchResults`
+ :return: A cloudsearch SearchResults object
+ """
+
+ query = self.build_query(q=q, bq=bq, rank=rank,
+ return_fields=return_fields,
+ size=size, start=start, facet=facet,
+ facet_constraints=facet_constraints,
+ facet_sort=facet_sort,
+ facet_top_n=facet_top_n, t=t)
+ return self(query)
+
+ def __call__(self, query):
+ """Make a call to CloudSearch
+
+ :type query: :class:`exfm.cloudsearch.Query`
+ :param query: A fully specified Query instance
+
+ :rtype: :class:`exfm.cloudsearch.SearchResults`
+ :return: A cloudsearch SearchResults object
+ """
+ url = "http://%s/2011-02-01/search" % (self.endpoint)
+ params = query.to_params()
+
+ r = requests.get(url, params=params)
+ data = json.loads(r.content)
+ data['query'] = query
+ data['search_service'] = self
+
+ if 'messages' in data and 'error' in data:
+ for m in data['messages']:
+ if m['severity'] == 'fatal':
+ raise SearchServiceException("Error processing search %s "
+ "=> %s" % (params, m['message']), query)
+ elif 'error' in data:
+ raise SearchServiceException("Unknown error processing search %s"
+ % (params), query)
+
+ return SearchResults(**data)
+
+ def get_all_paged(self, query, per_page):
+ """Get a generator to iterate over all pages of search results
+
+ :type query: :class:`exfm.cloudsearch.Query`
+ :param query: A fully specified Query instance
+
+ :type per_page: int
+ :param per_page: Number of docs in each SearchResults object.
+
+ :rtype: generator
+ :return: Generator containing :class:`exfm.cloudsearch.SearchResults`
+ """
+ query.update_size(per_page)
+ page = 0
+ num_pages_needed = 0
+ while page <= num_pages_needed:
+ results = self(query)
+ num_pages_needed = results.num_pages_needed
+ yield results
+ query.start += query.real_size
+ page += 1
+
+ def get_all_hits(self, query):
+ """Get a generator to iterate over all search results
+
+ Transparently handles the results paging from Cloudsearch
+ search results so even if you have many thousands of results
+ you can iterate over all results in a reasonably efficient
+ manner.
+
+ :type query: :class:`exfm.cloudsearch.Query`
+ :param query: A fully specified Query instance
+
+ :rtype: generator
+ :return: All docs matching query
+ """
+ page = 0
+ num_pages_needed = 0
+ while page <= num_pages_needed:
+ results = self(query)
+ num_pages_needed = results.num_pages_needed
+ for doc in results:
+ yield doc
+ query.start += query.real_size
+ page += 1
+
+ def get_num_hits(self, query):
+ """Return the total number of hits for query
+
+ :type query: :class:`exfm.cloudsearch.Query`
+ :param query: A fully specified Query instance
+
+ :rtype: int
+ :return: Total number of hits for query
+ """
+ query.update_size(1)
+ return self(query).hits
+
+
+
diff --git a/boto/cloudsearch/sourceattribute.py b/boto/cloudsearch/sourceattribute.py
new file mode 100644
index 00000000..c3435079
--- /dev/null
+++ b/boto/cloudsearch/sourceattribute.py
@@ -0,0 +1,75 @@
+# Copyright (c) 202 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+class SourceAttribute(object):
+ """
+ Provide information about attributes for an index field.
+ A maximum of 20 source attributes can be configured for
+ each index field.
+
+ :ivar default: Optional default value if the source attribute
+ is not specified in a document.
+
+ :ivar name: The name of the document source field to add
+ to this ``IndexField``.
+
+ :ivar data_function: Identifies the transformation to apply
+ when copying data from a source attribute.
+
+ :ivar data_map: The value is a dict with the following keys:
+ * cases - A dict that translates source field values
+ to custom values.
+ * default - An optional default value to use if the
+ source attribute is not specified in a document.
+ * name - the name of the document source field to add
+ to this ``IndexField``
+ :ivar data_trim_title: Trims common title words from a source
+ document attribute when populating an ``IndexField``.
+ This can be used to create an ``IndexField`` you can
+ use for sorting. The value is a dict with the following
+ fields:
+ * default - An optional default value.
+ * language - an IETF RFC 4646 language code.
+ * separator - The separator that follows the text to trim.
+ * name - The name of the document source field to add.
+ """
+
+ ValidDataFunctions = ('Copy', 'TrimTitle', 'Map')
+
+ def __init__(self):
+ self.data_copy = {}
+ self._data_function = self.ValidDataFunctions[0]
+ self.data_map = {}
+ self.data_trim_title = {}
+
+ @property
+ def data_function(self):
+ return self._data_function
+
+ @data_function.setter
+ def data_function(self, value):
+ if value not in self.ValidDataFunctions:
+ valid = '|'.join(self.ValidDataFunctions)
+ raise ValueError('data_function must be one of: %s' % valid)
+ self._data_function = value
+
diff --git a/boto/connection.py b/boto/connection.py
index e170d0b3..56d39684 100644
--- a/boto/connection.py
+++ b/boto/connection.py
@@ -450,10 +450,10 @@ class AWSAuthConnection(object):
self.protocol = 'http'
self.host = host
self.path = path
- if isinstance(debug, (int, long)):
- self.debug = debug
- else:
- self.debug = config.getint('Boto', 'debug', 0)
+ # if the value passed in for debug
+ if not isinstance(debug, (int, long)):
+ debug = 0
+ self.debug = config.getint('Boto', 'debug', debug)
if port:
self.port = port
else:
@@ -470,10 +470,14 @@ class AWSAuthConnection(object):
timeout = config.getint('Boto', 'http_socket_timeout')
self.http_connection_kwargs['timeout'] = timeout
- self.provider = Provider(provider,
- aws_access_key_id,
- aws_secret_access_key,
- security_token)
+ if isinstance(provider, Provider):
+ # Allow overriding Provider
+ self.provider = provider
+ else:
+ self.provider = Provider(provider,
+ aws_access_key_id,
+ aws_secret_access_key,
+ security_token)
# allow config file to override default host
if self.provider.host:
@@ -645,7 +649,12 @@ class AWSAuthConnection(object):
if self.proxy_user and self.proxy_pass:
for k, v in self.get_proxy_auth_header().items():
sock.sendall("%s: %s\r\n" % (k, v))
- sock.sendall("\r\n")
+ # See discussion about this config option at
+ # https://groups.google.com/forum/?fromgroups#!topic/boto-dev/teenFvOq2Cc
+ if config.getbool('Boto', 'send_crlf_after_proxy_auth_headers', False):
+ sock.sendall("\r\n")
+ else:
+ sock.sendall("\r\n")
resp = httplib.HTTPResponse(sock, strict=True, debuglevel=self.debug)
resp.begin()
diff --git a/boto/dynamodb/__init__.py b/boto/dynamodb/__init__.py
index ce552627..10556c89 100644
--- a/boto/dynamodb/__init__.py
+++ b/boto/dynamodb/__init__.py
@@ -15,7 +15,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
@@ -23,10 +23,11 @@
from boto.ec2.regioninfo import RegionInfo
+
def regions():
"""
Get all available regions for the Amazon DynamoDB service.
-
+
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
@@ -34,17 +35,26 @@ def regions():
return [RegionInfo(name='us-east-1',
endpoint='dynamodb.us-east-1.amazonaws.com',
connection_cls=boto.dynamodb.layer2.Layer2),
+ RegionInfo(name='us-west-1',
+ endpoint='dynamodb.us-west-1.amazonaws.com',
+ connection_cls=boto.dynamodb.layer2.Layer2),
+ RegionInfo(name='us-west-2',
+ endpoint='dynamodb.us-west-2.amazonaws.com',
+ connection_cls=boto.dynamodb.layer2.Layer2),
RegionInfo(name='ap-northeast-1',
endpoint='dynamodb.ap-northeast-1.amazonaws.com',
connection_cls=boto.dynamodb.layer2.Layer2),
+ RegionInfo(name='ap-southeast-1',
+ endpoint='dynamodb.ap-southeast-1.amazonaws.com',
+ connection_cls=boto.dynamodb.layer2.Layer2),
RegionInfo(name='eu-west-1',
endpoint='dynamodb.eu-west-1.amazonaws.com',
connection_cls=boto.dynamodb.layer2.Layer2),
]
+
def connect_to_region(region_name, **kw_params):
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
-
diff --git a/boto/dynamodb/batch.py b/boto/dynamodb/batch.py
index cce32b62..2e76b92a 100644
--- a/boto/dynamodb/batch.py
+++ b/boto/dynamodb/batch.py
@@ -15,14 +15,17 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
+
class Batch(object):
"""
+ Used to construct a BatchGet request.
+
:ivar table: The Table object from which the item is retrieved.
:ivar keys: A list of scalar or tuple values. Each element in the
@@ -31,8 +34,10 @@ class Batch(object):
list should be a tuple consisting of (hash_key, range_key). If
the schema for the table contains only a HashKey, each element
in the list should be a scalar value of the appropriate type
- for the table schema.
-
+ for the table schema. NOTE: The maximum number of items that
+ can be retrieved for a single operation is 100. Also, the
+ number of items retrieved is constrained by a 1 MB size limit.
+
:ivar attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
be returned. Otherwise, all attributes will be returned.
@@ -42,7 +47,74 @@ class Batch(object):
self.table = table
self.keys = keys
self.attributes_to_get = attributes_to_get
-
+
+ def to_dict(self):
+ """
+ Convert the Batch object into the format required for Layer1.
+ """
+ batch_dict = {}
+ key_list = []
+ for key in self.keys:
+ if isinstance(key, tuple):
+ hash_key, range_key = key
+ else:
+ hash_key = key
+ range_key = None
+ k = self.table.layer2.build_key_from_values(self.table.schema,
+ hash_key, range_key)
+ key_list.append(k)
+ batch_dict['Keys'] = key_list
+ if self.attributes_to_get:
+ batch_dict['AttributesToGet'] = self.attributes_to_get
+ return batch_dict
+
+class BatchWrite(object):
+ """
+ Used to construct a BatchWrite request. Each BatchWrite object
+ represents a collection of PutItem and DeleteItem requests for
+ a single Table.
+
+ :ivar table: The Table object from which the item is retrieved.
+
+ :ivar puts: A list of :class:`boto.dynamodb.item.Item` objects
+ that you want to write to DynamoDB.
+
+ :ivar deletes: A list of scalar or tuple values. Each element in the
+ list represents one Item to delete. If the schema for the
+ table has both a HashKey and a RangeKey, each element in the
+ list should be a tuple consisting of (hash_key, range_key). If
+ the schema for the table contains only a HashKey, each element
+ in the list should be a scalar value of the appropriate type
+ for the table schema.
+ """
+
+ def __init__(self, table, puts=None, deletes=None):
+ self.table = table
+ self.puts = puts or []
+ self.deletes = deletes or []
+
+ def to_dict(self):
+ """
+ Convert the Batch object into the format required for Layer1.
+ """
+ op_list = []
+ for item in self.puts:
+ d = {'Item': self.table.layer2.dynamize_item(item)}
+ d = {'PutRequest': d}
+ op_list.append(d)
+ for key in self.deletes:
+ if isinstance(key, tuple):
+ hash_key, range_key = key
+ else:
+ hash_key = key
+ range_key = None
+ k = self.table.layer2.build_key_from_values(self.table.schema,
+ hash_key, range_key)
+ d = {'Key': k}
+ op_list.append({'DeleteRequest': d})
+ return (self.table.name, op_list)
+
+
class BatchList(list):
"""
A subclass of a list object that contains a collection of
@@ -56,7 +128,7 @@ class BatchList(list):
def add_batch(self, table, keys, attributes_to_get=None):
"""
Add a Batch to this BatchList.
-
+
:type table: :class:`boto.dynamodb.table.Table`
:param table: The Table object in which the items are contained.
@@ -67,8 +139,10 @@ class BatchList(list):
list should be a tuple consisting of (hash_key, range_key). If
the schema for the table contains only a HashKey, each element
in the list should be a scalar value of the appropriate type
- for the table schema.
-
+ for the table schema. NOTE: The maximum number of items that
+ can be retrieved for a single operation is 100. Also, the
+ number of items retrieved is constrained by a 1 MB size limit.
+
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
@@ -79,5 +153,57 @@ class BatchList(list):
def submit(self):
return self.layer2.batch_get_item(self)
-
+ def to_dict(self):
+ """
+ Convert a BatchList object into format required for Layer1.
+ """
+ d = {}
+ for batch in self:
+ d[batch.table.name] = batch.to_dict()
+ return d
+
+class BatchWriteList(list):
+ """
+ A subclass of a list object that contains a collection of
+ :class:`boto.dynamodb.batch.BatchWrite` objects.
+ """
+
+ def __init__(self, layer2):
+ list.__init__(self)
+ self.layer2 = layer2
+
+ def add_batch(self, table, puts=None, deletes=None):
+ """
+ Add a BatchWrite to this BatchWriteList.
+
+ :type table: :class:`boto.dynamodb.table.Table`
+ :param table: The Table object in which the items are contained.
+
+ :type puts: list of :class:`boto.dynamodb.item.Item` objects
+ :param puts: A list of items that you want to write to DynamoDB.
+
+ :type deletes: A list
+ :param deletes: A list of scalar or tuple values. Each element
+ in the list represents one Item to delete. If the schema
+ for the table has both a HashKey and a RangeKey, each
+ element in the list should be a tuple consisting of
+ (hash_key, range_key). If the schema for the table
+ contains only a HashKey, each element in the list should
+ be a scalar value of the appropriate type for the table
+ schema.
+ """
+ self.append(BatchWrite(table, puts, deletes))
+
+ def submit(self):
+ return self.layer2.batch_write_item(self)
+
+ def to_dict(self):
+ """
+ Convert a BatchWriteList object into format required for Layer1.
+ """
+ d = {}
+ for batch in self:
+ table_name, batch_dict = batch.to_dict()
+ d[table_name] = batch_dict
+ return d
diff --git a/boto/dynamodb/condition.py b/boto/dynamodb/condition.py
index 6c4f24ec..43ed6de1 100644
--- a/boto/dynamodb/condition.py
+++ b/boto/dynamodb/condition.py
@@ -15,13 +15,14 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
-from boto.dynamodb.types import get_dynamodb_type, dynamize_value, convert_num
+from boto.dynamodb.types import dynamize_value
+
class Condition(object):
"""
@@ -31,27 +32,29 @@ class Condition(object):
pass
+
class ConditionNoArgs(Condition):
"""
Abstract class for Conditions that require no arguments, such
as NULL or NOT_NULL.
"""
-
+
def __repr__(self):
return '%s' % self.__class__.__name__
def to_dict(self):
return {'ComparisonOperator': self.__class__.__name__}
+
class ConditionOneArg(Condition):
"""
Abstract class for Conditions that require a single argument
such as EQ or NE.
"""
-
+
def __init__(self, v1):
self.v1 = v1
-
+
def __repr__(self):
return '%s:%s' % (self.__class__.__name__, self.v1)
@@ -59,12 +62,13 @@ class ConditionOneArg(Condition):
return {'AttributeValueList': [dynamize_value(self.v1)],
'ComparisonOperator': self.__class__.__name__}
+
class ConditionTwoArgs(Condition):
"""
Abstract class for Conditions that require two arguments.
The only example of this currently is BETWEEN.
"""
-
+
def __init__(self, v1, v2):
self.v1 = v1
self.v2 = v2
@@ -76,64 +80,73 @@ class ConditionTwoArgs(Condition):
values = (self.v1, self.v2)
return {'AttributeValueList': [dynamize_value(v) for v in values],
'ComparisonOperator': self.__class__.__name__}
-
+
+
class EQ(ConditionOneArg):
-
+
pass
-
+
+
class NE(ConditionOneArg):
-
+
pass
-
+
+
class LE(ConditionOneArg):
-
+
pass
-
+
+
class LT(ConditionOneArg):
-
+
pass
-
+
+
class GE(ConditionOneArg):
-
+
pass
-
+
+
class GT(ConditionOneArg):
-
+
pass
-
+
+
class NULL(ConditionNoArgs):
-
+
pass
-
+
+
class NOT_NULL(ConditionNoArgs):
-
+
pass
-
+
+
class CONTAINS(ConditionOneArg):
-
+
pass
-
+
+
class NOT_CONTAINS(ConditionOneArg):
-
+
pass
-
+
+
class BEGINS_WITH(ConditionOneArg):
-
+
pass
-
+
+
class IN(ConditionOneArg):
-
+
pass
-
+
+
class BEGINS_WITH(ConditionOneArg):
-
- pass
-
-class BETWEEN(ConditionTwoArgs):
pass
-
-
-
+class BETWEEN(ConditionTwoArgs):
+
+ pass
diff --git a/boto/dynamodb/exceptions.py b/boto/dynamodb/exceptions.py
index ef485042..b60d5aa0 100644
--- a/boto/dynamodb/exceptions.py
+++ b/boto/dynamodb/exceptions.py
@@ -2,6 +2,7 @@
Exceptions that are specific to the dynamodb module.
"""
from boto.exception import BotoServerError, BotoClientError
+from boto.exception import DynamoDBResponseError
class DynamoDBExpiredTokenError(BotoServerError):
"""
@@ -10,6 +11,7 @@ class DynamoDBExpiredTokenError(BotoServerError):
"""
pass
+
class DynamoDBKeyNotFoundError(BotoClientError):
"""
Raised when attempting to retrieve or interact with an item whose key
@@ -17,6 +19,7 @@ class DynamoDBKeyNotFoundError(BotoClientError):
"""
pass
+
class DynamoDBItemError(BotoClientError):
"""
Raised when invalid parameters are passed when creating a
@@ -24,3 +27,19 @@ class DynamoDBItemError(BotoClientError):
"""
pass
+
+class DynamoDBConditionalCheckFailedError(DynamoDBResponseError):
+ """
+ Raised when a ConditionalCheckFailedException response is received.
+ This happens when a conditional check, expressed via the expected_value
+ paramenter, fails.
+ """
+ pass
+
+class DynamoDBValidationError(DynamoDBResponseError):
+ """
+ Raised when a ValidationException response is received. This happens
+ when one or more required parameter values are missing, or if the item
+ has exceeded the 64Kb size limit.
+ """
+ pass
diff --git a/boto/dynamodb/item.py b/boto/dynamodb/item.py
index 3a2122ef..4d4abda3 100644
--- a/boto/dynamodb/item.py
+++ b/boto/dynamodb/item.py
@@ -15,7 +15,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
@@ -23,6 +23,7 @@
from boto.dynamodb.exceptions import DynamoDBItemError
+
class Item(dict):
"""
An item in Amazon DynamoDB.
@@ -34,41 +35,39 @@ class Item(dict):
:ivar range_key_name: The name of the RangeKey associated with this item.
:ivar table: The Table this item belongs to.
"""
-
+
def __init__(self, table, hash_key=None, range_key=None, attrs=None):
self.table = table
self._updates = None
self._hash_key_name = self.table.schema.hash_key_name
self._range_key_name = self.table.schema.range_key_name
- hash_key = hash_key or attrs.get(self._hash_key_name, None)
- if hash_key is None:
- raise DynamoDBItemError('You must supply a hash_key')
- if self._range_key_name:
- range_key = range_key or attrs.get(self._range_key_name, None)
- if range_key is None:
- raise DynamoDBItemError('You must supply a range_key')
+ if attrs == None:
+ attrs = {}
+ if hash_key == None:
+ hash_key = attrs.get(self._hash_key_name, None)
self[self._hash_key_name] = hash_key
if self._range_key_name:
+ if range_key == None:
+ range_key = attrs.get(self._range_key_name, None)
self[self._range_key_name] = range_key
- if attrs:
- for key, value in attrs.items():
- if key != self._hash_key_name and key != self._range_key_name:
- self[key] = value
+ for key, value in attrs.items():
+ if key != self._hash_key_name and key != self._range_key_name:
+ self[key] = value
self.consumed_units = 0
self._updates = {}
@property
def hash_key(self):
return self[self._hash_key_name]
-
+
@property
def range_key(self):
return self.get(self._range_key_name)
-
+
@property
def hash_key_name(self):
return self._hash_key_name
-
+
@property
def range_key_name(self):
return self._range_key_name
@@ -140,18 +139,18 @@ class Item(dict):
"""
return self.table.layer2.update_item(self, expected_value,
return_values)
-
+
def delete(self, expected_value=None, return_values=None):
"""
Delete the item from DynamoDB.
:type expected_value: dict
- :param expected_value: A dictionary of name/value pairs that you expect.
- This dictionary should have name/value pairs where the name
- is the name of the attribute and the value is either the value
- you are expecting or False if you expect the attribute not to
- exist.
-
+ :param expected_value: A dictionary of name/value pairs that
+ you expect. This dictionary should have name/value pairs
+ where the name is the name of the attribute and the value
+ is either the value you are expecting or False if you expect
+ the attribute not to exist.
+
:type return_values: str
:param return_values: Controls the return of attribute
name-value pairs before then were changed. Possible
@@ -168,11 +167,11 @@ class Item(dict):
in Amazon DynamoDB.
:type expected_value: dict
- :param expected_value: A dictionary of name/value pairs that you expect.
- This dictionary should have name/value pairs where the name
- is the name of the attribute and the value is either the value
- you are expecting or False if you expect the attribute not to
- exist.
+ :param expected_value: A dictionary of name/value pairs that
+ you expect. This dictionary should have name/value pairs
+ where the name is the name of the attribute and the value
+ is either the value you are expecting or False if you expect
+ the attribute not to exist.
:type return_values: str
:param return_values: Controls the return of attribute
diff --git a/boto/dynamodb/layer1.py b/boto/dynamodb/layer1.py
index 4df20f36..5c187a58 100644
--- a/boto/dynamodb/layer1.py
+++ b/boto/dynamodb/layer1.py
@@ -15,7 +15,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
@@ -26,7 +26,6 @@ from boto.connection import AWSAuthConnection
from boto.exception import DynamoDBResponseError
from boto.provider import Provider
from boto.dynamodb import exceptions as dynamodb_exceptions
-from boto.dynamodb.table import Table
import time
try:
@@ -39,7 +38,8 @@ except ImportError:
# value of Debug to be 2
#
#boto.set_stream_logger('dynamodb')
-Debug=0
+Debug = 0
+
class Layer1(AWSAuthConnection):
"""
@@ -54,13 +54,13 @@ class Layer1(AWSAuthConnection):
keeps a running total of the number of ThroughputExceeded
responses this connection has received from Amazon DynamoDB.
"""
-
+
DefaultRegionName = 'us-east-1'
"""The default region name for DynamoDB API."""
ServiceName = 'DynamoDB'
"""The name of the Service"""
-
+
Version = '20111205'
"""DynamoDB API version."""
@@ -69,12 +69,18 @@ class Layer1(AWSAuthConnection):
SessionExpiredError = 'com.amazon.coral.service#ExpiredTokenException'
"""The error response returned when session token has expired"""
-
+
+ ConditionalCheckFailedError = 'ConditionalCheckFailedException'
+ """The error response returned when a conditional check fails"""
+
+ ValidationError = 'ValidationException'
+ """The error response returned when an item is invalid in some way"""
+
ResponseError = DynamoDBResponseError
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
- host=None, debug=0, session_token=None, region=None):
+ debug=0, session_token=None, region=None):
if not region:
region_name = boto.config.get('DynamoDB', 'region',
self.DefaultRegionName)
@@ -106,7 +112,7 @@ class Layer1(AWSAuthConnection):
self.creds.secret_key,
self.creds.session_token)
self._auth_handler.update_provider(self.provider)
-
+
def _get_session_token(self):
boto.log.debug('Creating new Session Token')
sts = boto.connect_sts(self._passed_access_key,
@@ -120,10 +126,11 @@ class Layer1(AWSAuthConnection):
"""
:raises: ``DynamoDBExpiredTokenError`` if the security token expires.
"""
- headers = {'X-Amz-Target' : '%s_%s.%s' % (self.ServiceName,
- self.Version, action),
- 'Content-Type' : 'application/x-amz-json-1.0',
- 'Content-Length' : str(len(body))}
+ headers = {'X-Amz-Target': '%s_%s.%s' % (self.ServiceName,
+ self.Version, action),
+ 'Host': self.region.endpoint,
+ 'Content-Type': 'application/x-amz-json-1.0',
+ 'Content-Length': str(len(body))}
http_request = self.build_base_http_request('POST', '/', '/',
{}, headers, body, None)
if self.do_instrumentation:
@@ -132,6 +139,7 @@ class Layer1(AWSAuthConnection):
override_num_retries=10,
retry_handler=self._retry_handler)
self.request_id = response.getheader('x-amzn-RequestId')
+ boto.log.debug('RequestId: %s' % self.request_id)
if self.do_instrumentation:
self.instrumentation['times'].append(time.time() - start)
self.instrumentation['ids'].append(self.request_id)
@@ -151,14 +159,20 @@ class Layer1(AWSAuthConnection):
if i == 0:
next_sleep = 0
else:
- next_sleep = 0.05 * (2**i)
+ next_sleep = 0.05 * (2 ** i)
i += 1
status = (msg, i, next_sleep)
elif self.SessionExpiredError in data.get('__type'):
msg = 'Renewing Session Token'
self.creds = self._get_session_token()
self._update_provider()
- status = (msg, i+self.num_retries-1, next_sleep)
+ status = (msg, i + self.num_retries - 1, 0)
+ elif self.ConditionalCheckFailedError in data.get('__type'):
+ raise dynamodb_exceptions.DynamoDBConditionalCheckFailedError(
+ response.status, response.reason, data)
+ elif self.ValidationError in data.get('__type'):
+ raise dynamodb_exceptions.DynamoDBValidationError(
+ response.status, response.reason, data)
else:
raise self.ResponseError(response.status, response.reason,
data)
@@ -201,7 +215,7 @@ class Layer1(AWSAuthConnection):
:type table_name: str
:param table_name: The name of the table to describe.
"""
- data = {'TableName' : table_name}
+ data = {'TableName': table_name}
json_input = json.dumps(data)
return self.make_request('DescribeTable', json_input)
@@ -215,7 +229,7 @@ class Layer1(AWSAuthConnection):
:type table_name: str
:param table_name: The name of the table to create.
-
+
:type schema: dict
:param schema: A Python version of the KeySchema data structure
as defined by DynamoDB
@@ -224,10 +238,9 @@ class Layer1(AWSAuthConnection):
:param provisioned_throughput: A Python version of the
ProvisionedThroughput data structure defined by
DynamoDB.
-
"""
- data = {'TableName' : table_name,
- 'KeySchema' : schema,
+ data = {'TableName': table_name,
+ 'KeySchema': schema,
'ProvisionedThroughput': provisioned_throughput}
json_input = json.dumps(data)
response_dict = self.make_request('CreateTable', json_input)
@@ -236,10 +249,10 @@ class Layer1(AWSAuthConnection):
def update_table(self, table_name, provisioned_throughput):
"""
Updates the provisioned throughput for a given table.
-
+
:type table_name: str
:param table_name: The name of the table to update.
-
+
:type provisioned_throughput: dict
:param provisioned_throughput: A Python version of the
ProvisionedThroughput data structure defined by
@@ -295,12 +308,12 @@ class Layer1(AWSAuthConnection):
json_input = json.dumps(data)
response = self.make_request('GetItem', json_input,
object_hook=object_hook)
- if not response.has_key('Item'):
+ if 'Item' not in response:
raise dynamodb_exceptions.DynamoDBKeyNotFoundError(
"Key does not exist."
)
return response
-
+
def batch_get_item(self, request_items, object_hook=None):
"""
Return a set of attributes for a multiple items in
@@ -310,11 +323,25 @@ class Layer1(AWSAuthConnection):
:param request_items: A Python version of the RequestItems
data structure defined by DynamoDB.
"""
- data = {'RequestItems' : request_items}
+ data = {'RequestItems': request_items}
json_input = json.dumps(data)
return self.make_request('BatchGetItem', json_input,
object_hook=object_hook)
+ def batch_write_item(self, request_items, object_hook=None):
+ """
+ This operation enables you to put or delete several items
+ across multiple tables in a single API call.
+
+ :type request_items: dict
+ :param request_items: A Python version of the RequestItems
+ data structure defined by DynamoDB.
+ """
+ data = {'RequestItems': request_items}
+ json_input = json.dumps(data)
+ return self.make_request('BatchWriteItem', json_input,
+ object_hook=object_hook)
+
def put_item(self, table_name, item,
expected=None, return_values=None,
object_hook=None):
@@ -344,8 +371,8 @@ class Layer1(AWSAuthConnection):
specified and the item is overwritten, the content
of the old item is returned.
"""
- data = {'TableName' : table_name,
- 'Item' : item}
+ data = {'TableName': table_name,
+ 'Item': item}
if expected:
data['Expected'] = expected
if return_values:
@@ -385,8 +412,8 @@ class Layer1(AWSAuthConnection):
specified and the item is overwritten, the content
of the old item is returned.
"""
- data = {'TableName' : table_name,
- 'Key' : key,
+ data = {'TableName': table_name,
+ 'Key': key,
'AttributeUpdates': attribute_updates}
if expected:
data['Expected'] = expected
@@ -422,8 +449,8 @@ class Layer1(AWSAuthConnection):
specified and the item is overwritten, the content
of the old item is returned.
"""
- data = {'TableName' : table_name,
- 'Key' : key}
+ data = {'TableName': table_name,
+ 'Key': key}
if expected:
data['Expected'] = expected
if return_values:
@@ -540,5 +567,3 @@ class Layer1(AWSAuthConnection):
data['ExclusiveStartKey'] = exclusive_start_key
json_input = json.dumps(data)
return self.make_request('Scan', json_input, object_hook=object_hook)
-
-
diff --git a/boto/dynamodb/layer2.py b/boto/dynamodb/layer2.py
index 2f8568af..0d8d5bba 100644
--- a/boto/dynamodb/layer2.py
+++ b/boto/dynamodb/layer2.py
@@ -15,7 +15,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
@@ -25,9 +25,10 @@ from boto.dynamodb.layer1 import Layer1
from boto.dynamodb.table import Table
from boto.dynamodb.schema import Schema
from boto.dynamodb.item import Item
-from boto.dynamodb.batch import BatchList
+from boto.dynamodb.batch import BatchList, BatchWriteList
from boto.dynamodb.types import get_dynamodb_type, dynamize_value, convert_num
+
def item_object_hook(dct):
"""
A custom object hook for use when decoding JSON item bodys.
@@ -46,6 +47,60 @@ def item_object_hook(dct):
return set(map(convert_num, dct['NS']))
return dct
+def table_generator(tgen):
+ """
+ A low-level generator used to page through results from
+ query and scan operations. This is used by
+ :class:`boto.dynamodb.layer2.TableGenerator` and is not intended
+ to be used outside of that context.
+ """
+ response = True
+ n = 0
+ while response:
+ if tgen.max_results and n == tgen.max_results:
+ break
+ if response is True:
+ pass
+ elif 'LastEvaluatedKey' in response:
+ lek = response['LastEvaluatedKey']
+ esk = tgen.table.layer2.dynamize_last_evaluated_key(lek)
+ tgen.kwargs['exclusive_start_key'] = esk
+ else:
+ break
+ response = tgen.callable(**tgen.kwargs)
+ if 'ConsumedCapacityUnits' in response:
+ tgen.consumed_units += response['ConsumedCapacityUnits']
+ for item in response['Items']:
+ if tgen.max_results and n == tgen.max_results:
+ break
+ yield tgen.item_class(tgen.table, attrs=item)
+ n += 1
+
+
+class TableGenerator:
+ """
+ This is an object that wraps up the table_generator function.
+ The only real reason to have this is that we want to be able
+ to accumulate and return the ConsumedCapacityUnits element that
+ is part of each response.
+
+ :ivar consumed_units: An integer that holds the number of
+ ConsumedCapacityUnits accumulated thus far for this
+ generator.
+ """
+
+ def __init__(self, table, callable, max_results, item_class, kwargs):
+ self.table = table
+ self.callable = callable
+ self.max_results = max_results
+ self.item_class = item_class
+ self.kwargs = kwargs
+ self.consumed_units = 0
+
+ def __iter__(self):
+ return table_generator(self)
+
+
class Layer2(object):
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
@@ -53,7 +108,7 @@ class Layer2(object):
host=None, debug=0, session_token=None, region=None):
self.layer1 = Layer1(aws_access_key_id, aws_secret_access_key,
is_secure, port, proxy, proxy_port,
- host, debug, session_token, region)
+ debug, session_token, region)
def dynamize_attribute_updates(self, pending_updates):
"""
@@ -131,32 +186,6 @@ class Layer2(object):
d['RangeKeyElement'] = dynamize_value(range_key)
return d
- def dynamize_request_items(self, batch_list):
- """
- Convert a request_items parameter into the data structure
- required for Layer1.
- """
- d = None
- if batch_list:
- d = {}
- for batch in batch_list:
- batch_dict = {}
- key_list = []
- for key in batch.keys:
- if isinstance(key, tuple):
- hash_key, range_key = key
- else:
- hash_key = key
- range_key = None
- k = self.build_key_from_values(batch.table.schema,
- hash_key, range_key)
- key_list.append(k)
- batch_dict['Keys'] = key_list
- if batch.attributes_to_get:
- batch_dict['AttributesToGet'] = batch.attributes_to_get
- d[batch.table.name] = batch_dict
- return d
-
def build_key_from_values(self, schema, hash_key, range_key=None):
"""
Build a Key structure to be used for accessing items
@@ -198,6 +227,13 @@ class Layer2(object):
"""
return BatchList(self)
+ def new_batch_write_list(self):
+ """
+ Return a new, empty :class:`boto.dynamodb.batch.BatchWriteList`
+ object.
+ """
+ return BatchWriteList(self)
+
def list_tables(self, limit=None):
"""
Return a list of the names of all tables associated with the
@@ -239,23 +275,24 @@ class Layer2(object):
return Table(self, response)
lookup = get_table
+
def create_table(self, name, schema, read_units, write_units):
"""
Create a new Amazon DynamoDB table.
-
+
:type name: str
:param name: The name of the desired table.
:type schema: :class:`boto.dynamodb.schema.Schema`
:param schema: The Schema object that defines the schema used
by this table.
-
+
:type read_units: int
:param read_units: The value for ReadCapacityUnits.
-
+
:type write_units: int
:param write_units: The value for WriteCapacityUnits.
-
+
:rtype: :class:`boto.dynamodb.table.Table`
:return: A Table object representing the new Amazon DynamoDB table.
"""
@@ -270,10 +307,10 @@ class Layer2(object):
:type table: :class:`boto.dynamodb.table.Table`
:param table: The Table object whose throughput is being updated.
-
+
:type read_units: int
:param read_units: The new value for ReadCapacityUnits.
-
+
:type write_units: int
:param write_units: The new value for WriteCapacityUnits.
"""
@@ -281,7 +318,7 @@ class Layer2(object):
{'ReadCapacityUnits': read_units,
'WriteCapacityUnits': write_units})
table.update_from_response(response)
-
+
def delete_table(self, table):
"""
Delete this table and all items in it. After calling this
@@ -303,16 +340,18 @@ class Layer2(object):
:type hash_key_proto_value: int|long|float|str|unicode
:param hash_key_proto_value: A sample or prototype of the type
- of value you want to use for the HashKey.
-
+ of value you want to use for the HashKey. Alternatively,
+ you can also just pass in the Python type (e.g. int, float, etc.).
+
:type range_key_name: str
:param range_key_name: The name of the RangeKey for the schema.
This parameter is optional.
:type range_key_proto_value: int|long|float|str|unicode
:param range_key_proto_value: A sample or prototype of the type
- of value you want to use for the RangeKey. This parameter
- is optional.
+ of value you want to use for the RangeKey. Alternatively,
+ you can also pass in the Python type (e.g. int, float, etc.)
+ This parameter is optional.
"""
schema = {}
hash_key = {}
@@ -336,17 +375,17 @@ class Layer2(object):
:type table: :class:`boto.dynamodb.table.Table`
:param table: The Table object from which the item is retrieved.
-
+
:type hash_key: int|long|float|str|unicode
:param hash_key: The HashKey of the requested item. The
type of the value must match the type defined in the
schema for the table.
-
+
:type range_key: int|long|float|str|unicode
:param range_key: The optional RangeKey of the requested item.
The type of the value must match the type defined in the
schema for the table.
-
+
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
@@ -383,10 +422,24 @@ class Layer2(object):
batch of objects that you wish to retrieve in this
request.
"""
- request_items = self.dynamize_request_items(batch_list)
+ request_items = batch_list.to_dict()
return self.layer1.batch_get_item(request_items,
object_hook=item_object_hook)
+ def batch_write_item(self, batch_list):
+ """
+ Performs multiple Puts and Deletes in one batch.
+
+ :type batch_list: :class:`boto.dynamodb.batch.BatchWriteList`
+ :param batch_list: A BatchWriteList object which consists of a
+ list of :class:`boto.dynamoddb.batch.BatchWrite` objects.
+ Each Batch object contains the information about one
+ batch of objects that you wish to put or delete.
+ """
+ request_items = batch_list.to_dict()
+ return self.layer1.batch_write_item(request_items,
+ object_hook=item_object_hook)
+
def put_item(self, item, expected_value=None, return_values=None):
"""
Store a new item or completely replace an existing item
@@ -394,7 +447,7 @@ class Layer2(object):
:type item: :class:`boto.dynamodb.item.Item`
:param item: The Item to write to Amazon DynamoDB.
-
+
:type expected_value: dict
:param expected_value: A dictionary of name/value pairs that you expect.
This dictionary should have name/value pairs where the name
@@ -408,7 +461,6 @@ class Layer2(object):
values are: None or 'ALL_OLD'. If 'ALL_OLD' is
specified and the item is overwritten, the content
of the old item is returned.
-
"""
expected_value = self.dynamize_expected_value(expected_value)
response = self.layer1.put_item(item.table.name,
@@ -418,7 +470,7 @@ class Layer2(object):
if 'ConsumedCapacityUnits' in response:
item.consumed_units = response['ConsumedCapacityUnits']
return response
-
+
def update_item(self, item, expected_value=None, return_values=None):
"""
Commit pending item updates to Amazon DynamoDB.
@@ -460,21 +512,21 @@ class Layer2(object):
if 'ConsumedCapacityUnits' in response:
item.consumed_units = response['ConsumedCapacityUnits']
return response
-
+
def delete_item(self, item, expected_value=None, return_values=None):
"""
Delete the item from Amazon DynamoDB.
:type item: :class:`boto.dynamodb.item.Item`
:param item: The Item to delete from Amazon DynamoDB.
-
+
:type expected_value: dict
:param expected_value: A dictionary of name/value pairs that you expect.
This dictionary should have name/value pairs where the name
is the name of the attribute and the value is either the value
you are expecting or False if you expect the attribute not to
exist.
-
+
:type return_values: str
:param return_values: Controls the return of attribute
name-value pairs before then were changed. Possible
@@ -497,10 +549,10 @@ class Layer2(object):
item_class=Item):
"""
Perform a query on the table.
-
+
:type table: :class:`boto.dynamodb.table.Table`
:param table: The Table object that is being queried.
-
+
:type hash_key: int|long|float|str|unicode
:param hash_key: The HashKey of the requested item. The
type of the value must match the type defined in the
@@ -515,7 +567,7 @@ class Layer2(object):
The only condition which expects or will accept two
values is 'BETWEEN', otherwise a single value should
be passed to the Condition constructor.
-
+
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
@@ -555,39 +607,29 @@ class Layer2(object):
to generate the items. This should be a subclass of
:class:`boto.dynamodb.item.Item`
- :rtype: generator
+ :rtype: :class:`boto.dynamodb.layer2.TableGenerator`
"""
if range_key_condition:
rkc = self.dynamize_range_key_condition(range_key_condition)
else:
rkc = None
if exclusive_start_key:
- esk = self.build_key_from_values(table.schema, *exclusive_start_key)
+ esk = self.build_key_from_values(table.schema,
+ *exclusive_start_key)
else:
esk = None
- response = True
- n = 0
- while response:
- if max_results and n == max_results:
- break
- if response is True:
- pass
- elif response.has_key("LastEvaluatedKey"):
- lek = response['LastEvaluatedKey']
- esk = self.dynamize_last_evaluated_key(lek)
- else:
- break
- response = self.layer1.query(table.name,
- dynamize_value(hash_key),
- rkc, attributes_to_get, request_limit,
- consistent_read, scan_index_forward,
- esk, object_hook=item_object_hook)
- for item in response['Items']:
- if max_results and n == max_results:
- break
- yield item_class(table, attrs=item)
- n += 1
-
+ kwargs = {'table_name': table.name,
+ 'hash_key_value': dynamize_value(hash_key),
+ 'range_key_conditions': rkc,
+ 'attributes_to_get': attributes_to_get,
+ 'limit': request_limit,
+ 'consistent_read': consistent_read,
+ 'scan_index_forward': scan_index_forward,
+ 'exclusive_start_key': esk,
+ 'object_hook': item_object_hook}
+ return TableGenerator(table, self.layer1.query,
+ max_results, item_class, kwargs)
+
def scan(self, table, scan_filter=None,
attributes_to_get=None, request_limit=None, max_results=None,
count=False, exclusive_start_key=None, item_class=Item):
@@ -652,31 +694,19 @@ class Layer2(object):
to generate the items. This should be a subclass of
:class:`boto.dynamodb.item.Item`
- :rtype: generator
+ :rtype: :class:`boto.dynamodb.layer2.TableGenerator`
"""
if exclusive_start_key:
- esk = self.build_key_from_values(table.schema, *exclusive_start_key)
+ esk = self.build_key_from_values(table.schema,
+ *exclusive_start_key)
else:
esk = None
- sf = self.dynamize_scan_filter(scan_filter)
- response = True
- n = 0
- while response:
- if response is True:
- pass
- elif response.has_key("LastEvaluatedKey"):
- last_evaluated_key = response['LastEvaluatedKey']
- esk = self.dynamize_item(last_evaluated_key)
- else:
- break
- response = self.layer1.scan(table.name, sf,
- attributes_to_get, request_limit,
- count, esk,
- object_hook=item_object_hook)
- if response:
- for item in response['Items']:
- if max_results and n == max_results:
- break
- yield item_class(table, attrs=item)
- n += 1
-
+ kwargs = {'table_name': table.name,
+ 'scan_filter': self.dynamize_scan_filter(scan_filter),
+ 'attributes_to_get': attributes_to_get,
+ 'limit': request_limit,
+ 'count': count,
+ 'exclusive_start_key': esk,
+ 'object_hook': item_object_hook}
+ return TableGenerator(table, self.layer1.scan,
+ max_results, item_class, kwargs)
diff --git a/boto/dynamodb/schema.py b/boto/dynamodb/schema.py
index 108d3e9c..34ff212a 100644
--- a/boto/dynamodb/schema.py
+++ b/boto/dynamodb/schema.py
@@ -15,12 +15,13 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
+
class Schema(object):
"""
Represents a DynamoDB schema.
@@ -49,26 +50,25 @@ class Schema(object):
@property
def dict(self):
return self._dict
-
+
@property
def hash_key_name(self):
return self._dict['HashKeyElement']['AttributeName']
-
+
@property
def hash_key_type(self):
return self._dict['HashKeyElement']['AttributeType']
-
+
@property
def range_key_name(self):
name = None
if 'RangeKeyElement' in self._dict:
name = self._dict['RangeKeyElement']['AttributeName']
return name
-
+
@property
def range_key_type(self):
type = None
if 'RangeKeyElement' in self._dict:
type = self._dict['RangeKeyElement']['AttributeType']
return type
-
diff --git a/boto/dynamodb/table.py b/boto/dynamodb/table.py
index c03421bc..85ce9686 100644
--- a/boto/dynamodb/table.py
+++ b/boto/dynamodb/table.py
@@ -15,7 +15,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
@@ -26,6 +26,7 @@ from boto.dynamodb.item import Item
from boto.dynamodb import exceptions as dynamodb_exceptions
import time
+
class Table(object):
"""
An Amazon DynamoDB table.
@@ -60,23 +61,23 @@ class Table(object):
@property
def name(self):
return self._dict['TableName']
-
+
@property
def create_time(self):
return self._dict['CreationDateTime']
-
+
@property
def status(self):
return self._dict['TableStatus']
-
+
@property
def item_count(self):
return self._dict.get('ItemCount', 0)
-
+
@property
def size_bytes(self):
return self._dict.get('TableSizeBytes', 0)
-
+
@property
def schema(self):
return self._schema
@@ -84,11 +85,11 @@ class Table(object):
@property
def read_units(self):
return self._dict['ProvisionedThroughput']['ReadCapacityUnits']
-
+
@property
def write_units(self):
return self._dict['ProvisionedThroughput']['WriteCapacityUnits']
-
+
def update_from_response(self, response):
"""
Update the state of the Table object based on the response
@@ -134,12 +135,12 @@ class Table(object):
:type read_units: int
:param read_units: The new value for ReadCapacityUnits.
-
+
:type write_units: int
:param write_units: The new value for WriteCapacityUnits.
"""
self.layer2.update_throughput(self, read_units, write_units)
-
+
def delete(self):
"""
Delete this table and all items in it. After calling this
@@ -157,12 +158,12 @@ class Table(object):
:param hash_key: The HashKey of the requested item. The
type of the value must match the type defined in the
schema for the table.
-
+
:type range_key: int|long|float|str|unicode
:param range_key: The optional RangeKey of the requested item.
The type of the value must match the type defined in the
schema for the table.
-
+
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
@@ -182,7 +183,7 @@ class Table(object):
attributes_to_get, consistent_read,
item_class)
lookup = get_item
-
+
def has_item(self, hash_key, range_key=None, consistent_read=False):
"""
Checks the table to see if the Item with the specified ``hash_key``
@@ -231,7 +232,8 @@ class Table(object):
the hash_key and range_key values of the item. You can use
these explicit parameters when calling the method, such as::
- >>> my_item = my_table.new_item(hash_key='a', range_key=1, attrs={'key1': 'val1', 'key2': 'val2'})
+ >>> my_item = my_table.new_item(hash_key='a', range_key=1,
+ attrs={'key1': 'val1', 'key2': 'val2'})
>>> my_item
{u'bar': 1, u'foo': 'a', 'key1': 'val1', 'key2': 'val2'}
@@ -256,7 +258,7 @@ class Table(object):
:param hash_key: The HashKey of the new item. The
type of the value must match the type defined in the
schema for the table.
-
+
:type range_key: int|long|float|str|unicode
:param range_key: The optional RangeKey of the new item.
The type of the value must match the type defined in the
@@ -265,12 +267,11 @@ class Table(object):
:type attrs: dict
:param attrs: A dictionary of key value pairs used to
populate the new item.
-
+
:type item_class: Class
:param item_class: Allows you to override the class used
to generate the items. This should be a subclass of
:class:`boto.dynamodb.item.Item`
-
"""
return item_class(self, hash_key, range_key, attrs)
@@ -281,25 +282,22 @@ class Table(object):
item_class=Item):
"""
Perform a query on the table.
-
+
:type hash_key: int|long|float|str|unicode
:param hash_key: The HashKey of the requested item. The
type of the value must match the type defined in the
schema for the table.
- :type range_key_condition: dict
- :param range_key_condition: A dict where the key is either
- a scalar value appropriate for the RangeKey in the schema
- of the database or a tuple of such values. The value
- associated with this key in the dict will be one of the
- following conditions:
+ :type range_key_condition: :class:`boto.dynamodb.condition.Condition`
+ :param range_key_condition: A Condition object.
+ Condition object can be one of the following types:
+
+ EQ|LE|LT|GE|GT|BEGINS_WITH|BETWEEN
- 'EQ'|'LE'|'LT'|'GE'|'GT'|'BEGINS_WITH'|'BETWEEN'
+ The only condition which expects or will accept two
+ values is 'BETWEEN', otherwise a single value should
+ be passed to the Condition constructor.
- The only condition which expects or will accept a tuple
- of values is 'BETWEEN', otherwise a scalar value should
- be used as the key in the dict.
-
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
diff --git a/boto/dynamodb/types.py b/boto/dynamodb/types.py
index 723d33d8..d3896d02 100644
--- a/boto/dynamodb/types.py
+++ b/boto/dynamodb/types.py
@@ -15,7 +15,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
@@ -25,11 +25,15 @@ Some utility functions to deal with mapping Amazon DynamoDB types to
Python types and vice-versa.
"""
+
def is_num(n):
- return isinstance(n, (int, long, float, bool))
+ types = (int, long, float, bool)
+ return isinstance(n, types) or n in types
+
def is_str(n):
- return isinstance(n, basestring)
+ return isinstance(n, basestring) or (isinstance(n, type) and issubclass(n, basestring))
+
def convert_num(s):
if '.' in s:
@@ -38,6 +42,7 @@ def convert_num(s):
n = int(s)
return n
+
def get_dynamodb_type(val):
"""
Take a scalar Python value and return a string representing
@@ -55,9 +60,11 @@ def get_dynamodb_type(val):
elif False not in map(is_str, val):
dynamodb_type = 'SS'
if dynamodb_type is None:
- raise TypeError('Unsupported type "%s" for value "%s"' % (type(val), val))
+ msg = 'Unsupported type "%s" for value "%s"' % (type(val), val)
+ raise TypeError(msg)
return dynamodb_type
+
def dynamize_value(val):
"""
Take a scalar Python value and return a dict consisting
@@ -77,12 +84,11 @@ def dynamize_value(val):
dynamodb_type = get_dynamodb_type(val)
if dynamodb_type == 'N':
- val = {dynamodb_type : _str(val)}
+ val = {dynamodb_type: _str(val)}
elif dynamodb_type == 'S':
- val = {dynamodb_type : val}
+ val = {dynamodb_type: val}
elif dynamodb_type == 'NS':
- val = {dynamodb_type : [ str(n) for n in val]}
+ val = {dynamodb_type: [str(n) for n in val]}
elif dynamodb_type == 'SS':
- val = {dynamodb_type : [ n for n in val]}
+ val = {dynamodb_type: [n for n in val]}
return val
-
diff --git a/boto/ec2/autoscale/launchconfig.py b/boto/ec2/autoscale/launchconfig.py
index 8811fb6b..526f4686 100644
--- a/boto/ec2/autoscale/launchconfig.py
+++ b/boto/ec2/autoscale/launchconfig.py
@@ -173,7 +173,10 @@ class LaunchConfiguration(object):
elif name == 'RamdiskId':
self.ramdisk_id = value
elif name == 'UserData':
- self.user_data = base64.b64decode(value)
+ try:
+ self.user_data = base64.b64decode(value)
+ except TypeError:
+ self.user_data = value
elif name == 'LaunchConfigurationARN':
self.launch_configuration_arn = value
elif name == 'InstanceMonitoring':
diff --git a/boto/ec2/cloudwatch/__init__.py b/boto/ec2/cloudwatch/__init__.py
index bef02a55..1e69bae0 100644
--- a/boto/ec2/cloudwatch/__init__.py
+++ b/boto/ec2/cloudwatch/__init__.py
@@ -113,17 +113,20 @@ class CloudWatchConnection(AWSQueryConnection):
def build_dimension_param(self, dimension, params):
prefix = 'Dimensions.member'
- for i, dim_name in enumerate(dimension):
+ i=0
+ for dim_name in dimension:
dim_value = dimension[dim_name]
if dim_value:
if isinstance(dim_value, basestring):
dim_value = [dim_value]
- for j, value in enumerate(dim_value):
- params['%s.%d.Name.%d' % (prefix, i+1, j+1)] = dim_name
- params['%s.%d.Value.%d' % (prefix, i+1, j+1)] = value
+ for value in dim_value:
+ params['%s.%d.Name' % (prefix, i+1)] = dim_name
+ params['%s.%d.Value' % (prefix, i+1)] = value
+ i += 1
else:
params['%s.%d.Name' % (prefix, i+1)] = dim_name
-
+ i += 1
+
def build_list_params(self, params, items, label):
if isinstance(items, basestring):
items = [items]
@@ -139,7 +142,7 @@ class CloudWatchConnection(AWSQueryConnection):
def build_put_params(self, params, name, value=None, timestamp=None,
unit=None, dimensions=None, statistics=None):
- args = (name, value, unit, dimensions, statistics)
+ args = (name, value, unit, dimensions, statistics, timestamp)
length = max(map(lambda a: len(a) if isinstance(a, list) else 1, args))
def aslist(a):
@@ -149,11 +152,11 @@ class CloudWatchConnection(AWSQueryConnection):
return a
return [a] * length
- for index, (n, v, u, d, s) in enumerate(zip(*map(aslist, args))):
+ for index, (n, v, u, d, s, t) in enumerate(zip(*map(aslist, args))):
metric_data = {'MetricName': n}
if timestamp:
- metric_data['Timestamp'] = timestamp.isoformat()
+ metric_data['Timestamp'] = t.isoformat()
if unit:
metric_data['Unit'] = u
@@ -320,7 +323,7 @@ class CloudWatchConnection(AWSQueryConnection):
self.build_put_params(params, name, value=value, timestamp=timestamp,
unit=unit, dimensions=dimensions, statistics=statistics)
- return self.get_status('PutMetricData', params)
+ return self.get_status('PutMetricData', params, verb="POST")
def describe_alarms(self, action_prefix=None, alarm_name_prefix=None,
diff --git a/boto/ec2/connection.py b/boto/ec2/connection.py
index 52f2f370..4924410d 100644
--- a/boto/ec2/connection.py
+++ b/boto/ec2/connection.py
@@ -519,7 +519,8 @@ class EC2Connection(AWSQueryConnection):
instance_initiated_shutdown_behavior=None,
private_ip_address=None,
placement_group=None, client_token=None,
- security_group_ids=None):
+ security_group_ids=None,
+ additional_info=None):
"""
Runs an image on EC2.
@@ -610,6 +611,10 @@ class EC2Connection(AWSQueryConnection):
to ensure idempotency of the request.
Maximum 64 ASCII characters
+ :type additional_info: string
+ :param additional_info: Specifies additional information to make
+ available to the instance(s)
+
:rtype: Reservation
:return: The :class:`boto.ec2.instance.Reservation` associated with
the request for machines
@@ -668,6 +673,8 @@ class EC2Connection(AWSQueryConnection):
params['InstanceInitiatedShutdownBehavior'] = val
if client_token:
params['ClientToken'] = client_token
+ if additional_info:
+ params['AdditionalInfo'] = additional_info
return self.get_object('RunInstances', params, Reservation, verb='POST')
def terminate_instances(self, instance_ids=None):
@@ -1212,7 +1219,8 @@ class EC2Connection(AWSQueryConnection):
return self.get_object('AllocateAddress', params, Address, verb='POST')
- def associate_address(self, instance_id, public_ip=None, allocation_id=None):
+ def associate_address(self, instance_id=None, public_ip=None,
+ allocation_id=None, network_interface_id=None):
"""
Associate an Elastic IP address with a currently running instance.
This requires one of ``public_ip`` or ``allocation_id`` depending
@@ -1227,10 +1235,18 @@ class EC2Connection(AWSQueryConnection):
:type allocation_id: string
:param allocation_id: The allocation ID for a VPC-based elastic IP.
+ :type network_interface_id: string
+ : param network_interface_id: The network interface ID to which
+ elastic IP is to be assigned to
+
:rtype: bool
:return: True if successful
"""
- params = { 'InstanceId' : instance_id }
+ params = {}
+ if instance_id is not None:
+ params['InstanceId'] = instance_id
+ elif network_interface_id is not None:
+ params['NetworkInterfaceId'] = network_interface_id
if public_ip is not None:
params['PublicIp'] = public_ip
@@ -1993,6 +2009,8 @@ class EC2Connection(AWSQueryConnection):
SecurityGroup, verb='POST')
group.name = name
group.description = description
+ if vpc_id is not None:
+ group.vpc_id = vpc_id
return group
def delete_security_group(self, name=None, group_id=None):
@@ -2031,15 +2049,15 @@ class EC2Connection(AWSQueryConnection):
:type group_name: string
:param group_name: The name of the security group you are adding
- the rule to.
+ the rule to.
:type src_security_group_name: string
:param src_security_group_name: The name of the security group you are
- granting access to.
+ granting access to.
:type src_security_group_owner_id: string
:param src_security_group_owner_id: The ID of the owner of the security
- group you are granting access to.
+ group you are granting access to.
:type ip_protocol: string
:param ip_protocol: Either tcp | udp | icmp
@@ -2052,7 +2070,7 @@ class EC2Connection(AWSQueryConnection):
:type to_port: string
:param to_port: The CIDR block you are providing access to.
- See http://goo.gl/Yj5QC
+ See http://goo.gl/Yj5QC
:rtype: bool
:return: True if successful.
@@ -2087,15 +2105,15 @@ class EC2Connection(AWSQueryConnection):
:type group_name: string
:param group_name: The name of the security group you are adding
- the rule to.
+ the rule to.
:type src_security_group_name: string
:param src_security_group_name: The name of the security group you are
- granting access to.
+ granting access to.
:type src_security_group_owner_id: string
:param src_security_group_owner_id: The ID of the owner of the security
- group you are granting access to.
+ group you are granting access to.
:type ip_protocol: string
:param ip_protocol: Either tcp | udp | icmp
@@ -2108,19 +2126,17 @@ class EC2Connection(AWSQueryConnection):
:type cidr_ip: string or list of strings
:param cidr_ip: The CIDR block you are providing access to.
- See http://goo.gl/Yj5QC
+ See http://goo.gl/Yj5QC
:type group_id: string
- :param group_id: ID of the EC2 or VPC security group to modify.
- This is required for VPC security groups and
- can be used instead of group_name for EC2
- security groups.
+ :param group_id: ID of the EC2 or VPC security group to
+ modify. This is required for VPC security groups and can
+ be used instead of group_name for EC2 security groups.
- :type group_id: string
- :param group_id: ID of the EC2 or VPC source security group.
- This is required for VPC security groups and
- can be used instead of group_name for EC2
- security groups.
+ :type src_security_group_group_id: string
+ :param src_security_group_group_id: The ID of the security
+ group you are granting access to. Can be used instead of
+ src_security_group_name
:rtype: bool
:return: True if successful.
@@ -2235,18 +2251,6 @@ class EC2Connection(AWSQueryConnection):
:param to_port: The CIDR block you are revoking access to.
http://goo.gl/Yj5QC
- :type group_id: string
- :param group_id: ID of the EC2 or VPC security group to modify.
- This is required for VPC security groups and
- can be used instead of group_name for EC2
- security groups.
-
- :type group_id: string
- :param group_id: ID of the EC2 or VPC source security group.
- This is required for VPC security groups and
- can be used instead of group_name for EC2
- security groups.
-
:rtype: bool
:return: True if successful.
"""
@@ -2302,6 +2306,17 @@ class EC2Connection(AWSQueryConnection):
:param cidr_ip: The CIDR block you are revoking access to.
See http://goo.gl/Yj5QC
+ :type group_id: string
+ :param group_id: ID of the EC2 or VPC security group to modify.
+ This is required for VPC security groups and
+ can be used instead of group_name for EC2
+ security groups.
+
+ :type src_security_group_group_id: string
+ :param src_security_group_group_id: The ID of the security group
+ for which you are revoking access.
+ Can be used instead of src_security_group_name
+
:rtype: bool
:return: True if successful.
"""
@@ -2918,7 +2933,7 @@ class EC2Connection(AWSQueryConnection):
"""
params = {'NetworkInterfaceId' : network_interface_id,
'InstanceId' : instance_id,
- 'Deviceindex' : device_index}
+ 'DeviceIndex' : device_index}
return self.get_status('AttachNetworkInterface', params, verb='POST')
def detach_network_interface(self, network_interface_id, force=False):
diff --git a/boto/ec2/elb/__init__.py b/boto/ec2/elb/__init__.py
index fc1ae8ed..899a0ebc 100644
--- a/boto/ec2/elb/__init__.py
+++ b/boto/ec2/elb/__init__.py
@@ -157,10 +157,11 @@ class ELBConnection(AWSQueryConnection):
params = {'LoadBalancerName' : name}
for index, listener in enumerate(listeners):
i = index + 1
+ protocol = listener[2].upper()
params['Listeners.member.%d.LoadBalancerPort' % i] = listener[0]
params['Listeners.member.%d.InstancePort' % i] = listener[1]
params['Listeners.member.%d.Protocol' % i] = listener[2]
- if listener[2]=='HTTPS':
+ if protocol == 'HTTPS' or protocol == 'SSL':
params['Listeners.member.%d.SSLCertificateId' % i] = listener[3]
if zones:
self.build_list_params(params, zones, 'AvailabilityZones.member.%d')
@@ -169,7 +170,7 @@ class ELBConnection(AWSQueryConnection):
self.build_list_params(params, subnets, 'Subnets.member.%d')
if security_groups:
- self.build_list_params(params, security_groups,
+ self.build_list_params(params, security_groups,
'SecurityGroups.member.%d')
load_balancer = self.get_object('CreateLoadBalancer',
@@ -194,19 +195,20 @@ class ELBConnection(AWSQueryConnection):
[SSLCertificateId])
where LoadBalancerPortNumber and InstancePortNumber are
integer values between 1 and 65535, Protocol is a
- string containing either 'TCP', 'HTTP' or 'HTTPS';
+ string containing either 'TCP', 'HTTP', 'HTTPS', or 'SSL';
SSLCertificateID is the ARN of a AWS AIM certificate,
- and must be specified when doing HTTPS.
+ and must be specified when doing HTTPS or SSL.
:return: The status of the request
"""
params = {'LoadBalancerName' : name}
for index, listener in enumerate(listeners):
i = index + 1
+ protocol = listener[2].upper()
params['Listeners.member.%d.LoadBalancerPort' % i] = listener[0]
params['Listeners.member.%d.InstancePort' % i] = listener[1]
params['Listeners.member.%d.Protocol' % i] = listener[2]
- if listener[2]=='HTTPS':
+ if protocol == 'HTTPS' or protocol == 'SSL':
params['Listeners.member.%d.SSLCertificateId' % i] = listener[3]
return self.get_status('CreateLoadBalancerListeners', params)
@@ -461,7 +463,7 @@ class ELBConnection(AWSQueryConnection):
def apply_security_groups_to_lb(self, name, security_groups):
"""
Applies security groups to the load balancer.
- Applying security groups that are already registered with the
+ Applying security groups that are already registered with the
Load Balancer has no effect.
:type name: string
@@ -475,16 +477,16 @@ class ELBConnection(AWSQueryConnection):
"""
params = {'LoadBalancerName' : name}
- self.build_list_params(params, security_groups,
+ self.build_list_params(params, security_groups,
'SecurityGroups.member.%d')
- return self.get_list('ApplySecurityGroupsToLoadBalancer',
+ return self.get_list('ApplySecurityGroupsToLoadBalancer',
params,
None)
def attach_lb_to_subnets(self, name, subnets):
"""
Attaches load balancer to one or more subnets.
- Attaching subnets that are already registered with the
+ Attaching subnets that are already registered with the
Load Balancer has no effect.
:type name: string
@@ -498,9 +500,9 @@ class ELBConnection(AWSQueryConnection):
"""
params = {'LoadBalancerName' : name}
- self.build_list_params(params, subnets,
+ self.build_list_params(params, subnets,
'Subnets.member.%d')
- return self.get_list('AttachLoadBalancerToSubnets',
+ return self.get_list('AttachLoadBalancerToSubnets',
params,
None)
@@ -519,10 +521,8 @@ class ELBConnection(AWSQueryConnection):
"""
params = {'LoadBalancerName' : name}
- self.build_list_params(params, subnets,
+ self.build_list_params(params, subnets,
'Subnets.member.%d')
- return self.get_list('DettachLoadBalancerFromSubnets',
+ return self.get_list('DettachLoadBalancerFromSubnets',
params,
None)
-
-
diff --git a/boto/ec2/image.py b/boto/ec2/image.py
index de1b5d26..d184702d 100644
--- a/boto/ec2/image.py
+++ b/boto/ec2/image.py
@@ -160,7 +160,8 @@ class Image(TaggedEC2Object):
disable_api_termination=False,
instance_initiated_shutdown_behavior=None,
private_ip_address=None,
- placement_group=None, security_group_ids=None):
+ placement_group=None, security_group_ids=None,
+ additional_info=None):
"""
Runs this instance.
@@ -229,11 +230,16 @@ class Image(TaggedEC2Object):
:param placement_group: If specified, this is the name of the placement
group in which the instance(s) will be launched.
- :rtype: Reservation
- :return: The :class:`boto.ec2.instance.Reservation` associated with the request for machines
+ :type additional_info: string
+ :param additional_info: Specifies additional information to make
+ available to the instance(s)
:type security_group_ids:
:param security_group_ids:
+
+ :rtype: Reservation
+ :return: The :class:`boto.ec2.instance.Reservation` associated with the request for machines
+
"""
return self.connection.run_instances(self.id, min_count, max_count,
@@ -245,7 +251,8 @@ class Image(TaggedEC2Object):
block_device_map, disable_api_termination,
instance_initiated_shutdown_behavior,
private_ip_address, placement_group,
- security_group_ids=security_group_ids)
+ security_group_ids=security_group_ids,
+ additional_info=additional_info)
def deregister(self, delete_snapshot=False):
return self.connection.deregister_image(self.id, delete_snapshot)
diff --git a/boto/ec2/networkinterface.py b/boto/ec2/networkinterface.py
index a9ec1d28..2658e3fc 100644
--- a/boto/ec2/networkinterface.py
+++ b/boto/ec2/networkinterface.py
@@ -111,6 +111,9 @@ class NetworkInterface(TaggedEC2Object):
return 'NetworkInterface:%s' % self.id
def startElement(self, name, attrs, connection):
+ retval = TaggedEC2Object.startElement(self, name, attrs, connection)
+ if retval is not None:
+ return retval
if name == 'groupSet':
self.groups = ResultSet([('item', Group)])
return self.groups
diff --git a/boto/ec2/securitygroup.py b/boto/ec2/securitygroup.py
index 87d4b25e..83292fea 100644
--- a/boto/ec2/securitygroup.py
+++ b/boto/ec2/securitygroup.py
@@ -82,10 +82,13 @@ class SecurityGroup(TaggedEC2Object):
setattr(self, name, value)
def delete(self):
- return self.connection.delete_security_group(self.name)
+ if self.vpc_id:
+ return self.connection.delete_security_group(group_id=self.id)
+ else:
+ return self.connection.delete_security_group(self.name)
def add_rule(self, ip_protocol, from_port, to_port,
- src_group_name, src_group_owner_id, cidr_ip):
+ src_group_name, src_group_owner_id, cidr_ip, src_group_group_id):
"""
Add a rule to the SecurityGroup object. Note that this method
only changes the local version of the object. No information
@@ -96,10 +99,10 @@ class SecurityGroup(TaggedEC2Object):
rule.from_port = from_port
rule.to_port = to_port
self.rules.append(rule)
- rule.add_grant(src_group_name, src_group_owner_id, cidr_ip)
+ rule.add_grant(src_group_name, src_group_owner_id, cidr_ip, src_group_group_id)
def remove_rule(self, ip_protocol, from_port, to_port,
- src_group_name, src_group_owner_id, cidr_ip):
+ src_group_name, src_group_owner_id, cidr_ip, src_group_group_id):
"""
Remove a rule to the SecurityGroup object. Note that this method
only changes the local version of the object. No information
@@ -113,7 +116,7 @@ class SecurityGroup(TaggedEC2Object):
target_rule = rule
target_grant = None
for grant in rule.grants:
- if grant.name == src_group_name:
+ if grant.name == src_group_name or grant.group_id == src_group_group_id:
if grant.owner_id == src_group_owner_id:
if grant.cidr_ip == cidr_ip:
target_grant = grant
@@ -151,48 +154,75 @@ class SecurityGroup(TaggedEC2Object):
:rtype: bool
:return: True if successful.
"""
+ group_name = None
+ if not self.vpc_id:
+ group_name = self.name
+ group_id = None
+ if self.vpc_id:
+ group_id = self.id
+ src_group_name = None
+ src_group_owner_id = None
+ src_group_group_id = None
if src_group:
cidr_ip = None
- src_group_name = src_group.name
src_group_owner_id = src_group.owner_id
- else:
- src_group_name = None
- src_group_owner_id = None
- status = self.connection.authorize_security_group(self.name,
+ if not self.vpc_id:
+ src_group_name = src_group.name
+ else:
+ if hasattr(src_group, 'group_id'):
+ src_group_group_id = src_group.group_id
+ else:
+ src_group_group_id = src_group.id
+ status = self.connection.authorize_security_group(group_name,
src_group_name,
src_group_owner_id,
ip_protocol,
from_port,
to_port,
- cidr_ip)
+ cidr_ip,
+ group_id,
+ src_group_group_id)
if status:
if type(cidr_ip) != list:
cidr_ip = [cidr_ip]
for single_cidr_ip in cidr_ip:
self.add_rule(ip_protocol, from_port, to_port, src_group_name,
- src_group_owner_id, single_cidr_ip)
-
+ src_group_owner_id, single_cidr_ip, src_group_group_id)
return status
def revoke(self, ip_protocol=None, from_port=None, to_port=None,
cidr_ip=None, src_group=None):
+ group_name = None
+ if not self.vpc_id:
+ group_name = self.name
+ group_id = None
+ if self.vpc_id:
+ group_id = self.id
+ src_group_name = None
+ src_group_owner_id = None
+ src_group_group_id = None
if src_group:
- cidr_ip=None
- src_group_name = src_group.name
+ cidr_ip = None
src_group_owner_id = src_group.owner_id
- else:
- src_group_name = None
- src_group_owner_id = None
- status = self.connection.revoke_security_group(self.name,
+ if not self.vpc_id:
+ src_group_name = src_group.name
+ else:
+ if hasattr(src_group, 'group_id'):
+ src_group_group_id = src_group.group_id
+ else:
+ src_group_group_id = src_group.id
+ status = self.connection.revoke_security_group(group_name,
src_group_name,
src_group_owner_id,
ip_protocol,
from_port,
to_port,
- cidr_ip)
+ cidr_ip,
+ group_id,
+ src_group_group_id)
if status:
self.remove_rule(ip_protocol, from_port, to_port, src_group_name,
- src_group_owner_id, cidr_ip)
+ src_group_owner_id, cidr_ip, src_group_group_id)
return status
def copy_to_region(self, region, name=None):
@@ -220,9 +250,10 @@ class SecurityGroup(TaggedEC2Object):
source_groups = []
for rule in self.rules:
for grant in rule.grants:
- if grant.name:
- if grant.name not in source_groups:
- source_groups.append(grant.name)
+ grant_nom = grant.name or grant.group_id
+ if grant_nom:
+ if grant_nom not in source_groups:
+ source_groups.append(grant_nom)
sg.authorize(None, None, None, None, grant)
else:
sg.authorize(rule.ip_protocol, rule.from_port, rule.to_port,
@@ -287,9 +318,10 @@ class IPPermissions(object):
else:
setattr(self, name, value)
- def add_grant(self, name=None, owner_id=None, cidr_ip=None):
+ def add_grant(self, name=None, owner_id=None, cidr_ip=None, group_id=None):
grant = GroupOrCIDR(self)
grant.owner_id = owner_id
+ grant.group_id = group_id
grant.name = name
grant.cidr_ip = cidr_ip
self.grants.append(grant)
@@ -299,6 +331,7 @@ class GroupOrCIDR(object):
def __init__(self, parent=None):
self.owner_id = None
+ self.group_id = None
self.name = None
self.cidr_ip = None
@@ -306,7 +339,7 @@ class GroupOrCIDR(object):
if self.cidr_ip:
return '%s' % self.cidr_ip
else:
- return '%s-%s' % (self.name, self.owner_id)
+ return '%s-%s' % (self.name or self.group_id, self.owner_id)
def startElement(self, name, attrs, connection):
return None
@@ -314,6 +347,8 @@ class GroupOrCIDR(object):
def endElement(self, name, value, connection):
if name == 'userId':
self.owner_id = value
+ elif name == 'groupId':
+ self.group_id = value
elif name == 'groupName':
self.name = value
if name == 'cidrIp':
diff --git a/boto/ec2/snapshot.py b/boto/ec2/snapshot.py
index d52abe44..f01fd254 100644
--- a/boto/ec2/snapshot.py
+++ b/boto/ec2/snapshot.py
@@ -37,6 +37,7 @@ class Snapshot(TaggedEC2Object):
self.progress = None
self.start_time = None
self.owner_id = None
+ self.owner_alias = None
self.volume_size = None
self.description = None
@@ -54,6 +55,8 @@ class Snapshot(TaggedEC2Object):
self.start_time = value
elif name == 'ownerId':
self.owner_id = value
+ elif name == 'ownerAlias':
+ self.owner_alias = value
elif name == 'volumeSize':
try:
self.volume_size = int(value)
diff --git a/boto/ec2/spotinstancerequest.py b/boto/ec2/spotinstancerequest.py
index 06acb0f5..a3562ac3 100644
--- a/boto/ec2/spotinstancerequest.py
+++ b/boto/ec2/spotinstancerequest.py
@@ -15,7 +15,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
@@ -27,6 +27,7 @@ Represents an EC2 Spot Instance Request
from boto.ec2.ec2object import TaggedEC2Object
from boto.ec2.launchspecification import LaunchSpecification
+
class SpotInstanceStateFault(object):
def __init__(self, code=None, message=None):
@@ -46,8 +47,9 @@ class SpotInstanceStateFault(object):
self.message = value
setattr(self, name, value)
+
class SpotInstanceRequest(TaggedEC2Object):
-
+
def __init__(self, connection=None):
TaggedEC2Object.__init__(self, connection)
self.id = None
@@ -58,6 +60,7 @@ class SpotInstanceRequest(TaggedEC2Object):
self.valid_from = None
self.valid_until = None
self.launch_group = None
+ self.launched_availability_zone = None
self.product_description = None
self.availability_zone_group = None
self.create_time = None
@@ -89,8 +92,6 @@ class SpotInstanceRequest(TaggedEC2Object):
self.type = value
elif name == 'state':
self.state = value
- elif name == 'productDescription':
- self.product_description = value
elif name == 'validFrom':
self.valid_from = value
elif name == 'validUntil':
@@ -99,15 +100,16 @@ class SpotInstanceRequest(TaggedEC2Object):
self.launch_group = value
elif name == 'availabilityZoneGroup':
self.availability_zone_group = value
- elif name == 'createTime':
- self.create_time = value
+ elif name == 'launchedAvailabilityZone':
+ self.launched_availability_zone = value
elif name == 'instanceId':
self.instance_id = value
+ elif name == 'createTime':
+ self.create_time = value
+ elif name == 'productDescription':
+ self.product_description = value
else:
setattr(self, name, value)
def cancel(self):
self.connection.cancel_spot_instance_requests([self.id])
-
-
-
diff --git a/boto/emr/connection.py b/boto/emr/connection.py
index bd264d20..7e39af9c 100644
--- a/boto/emr/connection.py
+++ b/boto/emr/connection.py
@@ -197,7 +197,7 @@ class EmrConnection(AWSQueryConnection):
return self.get_object('ModifyInstanceGroups', params,
ModifyInstanceGroupsResponse, verb='POST')
- def run_jobflow(self, name, log_uri, ec2_keyname=None,
+ def run_jobflow(self, name, log_uri=None, ec2_keyname=None,
availability_zone=None,
master_instance_type='m1.small',
slave_instance_type='m1.small', num_instances=1,
@@ -282,8 +282,9 @@ class EmrConnection(AWSQueryConnection):
params = {}
if action_on_failure:
params['ActionOnFailure'] = action_on_failure
+ if log_uri:
+ params['LogUri'] = log_uri
params['Name'] = name
- params['LogUri'] = log_uri
# Common instance args
common_params = self._build_instance_common_args(ec2_keyname,
diff --git a/boto/emr/step.py b/boto/emr/step.py
index 15dfe889..1fb0043d 100644
--- a/boto/emr/step.py
+++ b/boto/emr/step.py
@@ -191,3 +191,42 @@ class StreamingStep(Step):
self.name, self.mapper, self.reducer, self.action_on_failure,
self.cache_files, self.cache_archives, self.step_args,
self.input, self.output, self._jar)
+
+class ScriptRunnerStep(JarStep):
+
+ ScriptRunnerJar = 's3n://us-east-1.elasticmapreduce/libs/script-runner/script-runner.jar'
+
+ def __init__(self, name, **kw):
+ JarStep.__init__(self, name, self.ScriptRunnerJar, **kw)
+
+class PigBase(ScriptRunnerStep):
+
+ BaseArgs = ['s3n://us-east-1.elasticmapreduce/libs/pig/pig-script',
+ '--base-path', 's3n://us-east-1.elasticmapreduce/libs/pig/']
+
+class InstallPigStep(PigBase):
+ """
+ Install pig on emr step
+ """
+
+ InstallPigName = 'Install Pig'
+
+ def __init__(self, pig_versions='latest'):
+ step_args = []
+ step_args.extend(self.BaseArgs)
+ step_args.extend(['--install-pig'])
+ step_args.extend(['--pig-versions', pig_versions])
+ ScriptRunnerStep.__init__(self, self.InstallPigName, step_args=step_args)
+
+class PigStep(PigBase):
+ """
+ Pig script step
+ """
+
+ def __init__(self, name, pig_file, pig_versions='latest', pig_args=[]):
+ step_args = []
+ step_args.extend(self.BaseArgs)
+ step_args.extend(['--pig-versions', pig_versions])
+ step_args.extend(['--run-pig-script', '--args', '-f', pig_file])
+ step_args.extend(pig_args)
+ ScriptRunnerStep.__init__(self, name, step_args=step_args)
diff --git a/boto/exception.py b/boto/exception.py
index c9eefc4f..cc3526e1 100644
--- a/boto/exception.py
+++ b/boto/exception.py
@@ -322,6 +322,32 @@ class DynamoDBResponseError(BotoServerError):
if self.error_code:
self.error_code = self.error_code.split('#')[-1]
+
+class SWFResponseError(BotoServerError):
+ """
+ This exception expects the fully parsed and decoded JSON response
+ body to be passed as the body parameter.
+
+ :ivar status: The HTTP status code.
+ :ivar reason: The HTTP reason message.
+ :ivar body: The Python dict that represents the decoded JSON
+ response body.
+ :ivar error_message: The full description of the AWS error encountered.
+ :ivar error_code: A short string that identifies the AWS error
+ (e.g. ConditionalCheckFailedException)
+ """
+
+ def __init__(self, status, reason, body=None, *args):
+ self.status = status
+ self.reason = reason
+ self.body = body
+ if self.body:
+ self.error_message = self.body.get('message', None)
+ self.error_code = self.body.get('__type', None)
+ if self.error_code:
+ self.error_code = self.error_code.split('#')[-1]
+
+
class EmrResponseError(BotoServerError):
"""
Error in response from EMR
@@ -376,9 +402,6 @@ class GSDataError(StorageDataError):
"""
pass
-class FPSResponseError(BotoServerError):
- pass
-
class InvalidUriError(Exception):
"""Exception raised when URI is invalid."""
@@ -393,6 +416,13 @@ class InvalidAclError(Exception):
Exception.__init__(self, message)
self.message = message
+class InvalidCorsError(Exception):
+ """Exception raised when CORS XML is invalid."""
+
+ def __init__(self, message):
+ Exception.__init__(self, message)
+ self.message = message
+
class NoAuthHandlerFound(Exception):
"""Is raised when no auth handlers were found ready to authenticate."""
pass
diff --git a/boto/fps/__init__.py b/boto/fps/__init__.py
index 2f44483d..d69b7f08 100644
--- a/boto/fps/__init__.py
+++ b/boto/fps/__init__.py
@@ -14,10 +14,8 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
-
-
diff --git a/boto/fps/connection.py b/boto/fps/connection.py
index 1bb95dfd..d923df2d 100644
--- a/boto/fps/connection.py
+++ b/boto/fps/connection.py
@@ -1,5 +1,6 @@
+# Copyright (c) 2012 Andy Davidoff http://www.disruptek.com/
+# Copyright (c) 2010 Jason R. Coombs http://www.jaraco.com/
# Copyright (c) 2008 Chris Moyer http://coredumped.org/
-# Copyringt (c) 2010 Jason R. Coombs http://www.jaraco.com/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
@@ -15,403 +16,364 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import urllib
-import xml.sax
import uuid
-import boto
-import boto.utils
-from boto import handler
from boto.connection import AWSQueryConnection
-from boto.resultset import ResultSet
-from boto.exception import FPSResponseError
-from boto.fps.response import FPSResponse
+from boto.fps.exception import ResponseErrorFactory
+from boto.fps.response import ResponseFactory
+import boto.fps.response
+
+__all__ = ['FPSConnection']
+
+decorated_attrs = ('action', 'response')
+
+
+def add_attrs_from(func, to):
+ for attr in decorated_attrs:
+ setattr(to, attr, getattr(func, attr, None))
+ return to
+
+
+def complex_amounts(*fields):
+ def decorator(func):
+ def wrapper(self, *args, **kw):
+ for field in filter(kw.has_key, fields):
+ amount = kw.pop(field)
+ kw[field + '.Value'] = getattr(amount, 'Value', str(amount))
+ kw[field + '.CurrencyCode'] = getattr(amount, 'CurrencyCode',
+ self.currencycode)
+ return func(self, *args, **kw)
+ wrapper.__doc__ = "{}\nComplex Amounts: {}".format(func.__doc__,
+ ', '.join(fields))
+ return add_attrs_from(func, to=wrapper)
+ return decorator
+
+
+def requires(*groups):
+
+ def decorator(func):
+
+ def wrapper(*args, **kw):
+ hasgroup = lambda x: len(x) == len(filter(kw.has_key, x))
+ if 1 != len(filter(hasgroup, groups)):
+ message = ' OR '.join(['+'.join(g) for g in groups])
+ message = "{} requires {} argument(s)" \
+ "".format(func.action, message)
+ raise KeyError(message)
+ return func(*args, **kw)
+ message = ' OR '.join(['+'.join(g) for g in groups])
+ wrapper.__doc__ = "{}\nRequired: {}".format(func.__doc__,
+ message)
+ return add_attrs_from(func, to=wrapper)
+ return decorator
+
+
+def needs_caller_reference(func):
+
+ def wrapper(*args, **kw):
+ kw.setdefault('CallerReference', uuid.uuid4())
+ return func(*args, **kw)
+ wrapper.__doc__ = "{}\nUses CallerReference, defaults " \
+ "to uuid.uuid4()".format(func.__doc__)
+ return add_attrs_from(func, to=wrapper)
+
+
+def needs_caller_key(func):
+
+ def wrapper(self, *args, **kw):
+ kw.setdefault('callerKey', self.aws_access_key_id)
+ return func(self, *args, **kw)
+ wrapper.__doc__ = "{}\nUses callerKey, defaults to your " \
+ "AWS Access Key ID".format(func.__doc__)
+ return add_attrs_from(func, to=wrapper)
+
+
+def api_action(*api):
+
+ def decorator(func):
+ action = ''.join(api or map(str.capitalize, func.func_name.split('_')))
+ response = ResponseFactory(action)
+ if hasattr(boto.fps.response, action + 'Response'):
+ response = getattr(boto.fps.response, action + 'Response')
+
+ def wrapper(self, *args, **kw):
+ return func(self, action, response, *args, **kw)
+ wrapper.action, wrapper.response = action, response
+ wrapper.__doc__ = "FPS {} API call\n{}".format(action,
+ func.__doc__)
+ return wrapper
+ return decorator
+
class FPSConnection(AWSQueryConnection):
APIVersion = '2010-08-28'
+ ResponseError = ResponseErrorFactory
+ currencycode = 'USD'
+
+ def __init__(self, *args, **kw):
+ self.currencycode = kw.pop('CurrencyCode', self.currencycode)
+ kw.setdefault('host', 'fps.sandbox.amazonaws.com')
+ AWSQueryConnection.__init__(self, *args, **kw)
- def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
- is_secure=True, port=None, proxy=None, proxy_port=None,
- proxy_user=None, proxy_pass=None,
- host='fps.sandbox.amazonaws.com', debug=0,
- https_connection_factory=None, path="/"):
- AWSQueryConnection.__init__(self, aws_access_key_id,
- aws_secret_access_key,
- is_secure, port, proxy, proxy_port,
- proxy_user, proxy_pass, host, debug,
- https_connection_factory, path)
-
def _required_auth_capability(self):
return ['fps']
- def install_payment_instruction(self, instruction,
- token_type="Unrestricted",
- transaction_id=None):
+ @needs_caller_reference
+ @complex_amounts('SettlementAmount')
+ @requires(['CreditInstrumentId', 'SettlementAmount.Value',
+ 'SenderTokenId', 'SettlementAmount.CurrencyCode'])
+ @api_action()
+ def settle_debt(self, action, response, **kw):
+ """Allows a caller to initiate a transaction that atomically
+ transfers money from a sender's payment instrument to the
+ recipient, while decreasing corresponding debt balance.
+ """
+ return self.get_object(action, kw, response)
+
+ @requires(['TransactionId'])
+ @api_action()
+ def get_transaction_status(self, action, response, **kw):
+ """Gets the latest status of a transaction.
+ """
+ return self.get_object(action, kw, response)
+
+ @requires(['StartDate'])
+ @api_action()
+ def get_account_activity(self, action, response, **kw):
+ """Returns transactions for a given date range.
"""
- InstallPaymentInstruction
- instruction: The PaymentInstruction to send, for example:
-
- MyRole=='Caller' orSay 'Roles do not match';
-
- token_type: Defaults to "Unrestricted"
- transaction_id: Defaults to a new ID
+ return self.get_object(action, kw, response)
+
+ @requires(['TransactionId'])
+ @api_action()
+ def get_transaction(self, action, response, **kw):
+ """Returns all details of a transaction.
"""
+ return self.get_object(action, kw, response)
- if(transaction_id == None):
- transaction_id = uuid.uuid4()
- params = {}
- params['PaymentInstruction'] = instruction
- params['TokenType'] = token_type
- params['CallerReference'] = transaction_id
- response = self.make_request("InstallPaymentInstruction", params)
- return response
-
- def install_caller_instruction(self, token_type="Unrestricted",
- transaction_id=None):
+ @api_action()
+ def get_outstanding_debt_balance(self, action, response):
+ """Returns the total outstanding balance for all the credit
+ instruments for the given creditor account.
"""
- Set us up as a caller
- This will install a new caller_token into the FPS section.
- This should really only be called to regenerate the caller token.
+ return self.get_object(action, {}, response)
+
+ @requires(['PrepaidInstrumentId'])
+ @api_action()
+ def get_prepaid_balance(self, action, response, **kw):
+ """Returns the balance available on the given prepaid instrument.
"""
- response = self.install_payment_instruction("MyRole=='Caller';",
- token_type=token_type,
- transaction_id=transaction_id)
- body = response.read()
- if(response.status == 200):
- rs = ResultSet()
- h = handler.XmlHandler(rs, self)
- xml.sax.parseString(body, h)
- caller_token = rs.TokenId
- try:
- boto.config.save_system_option("FPS", "caller_token",
- caller_token)
- except(IOError):
- boto.config.save_user_option("FPS", "caller_token",
- caller_token)
- return caller_token
- else:
- raise FPSResponseError(response.status, response.reason, body)
-
- def install_recipient_instruction(self, token_type="Unrestricted",
- transaction_id=None):
+ return self.get_object(action, kw, response)
+
+ @api_action()
+ def get_total_prepaid_liability(self, action, response):
+ """Returns the total liability held by the given account
+ corresponding to all the prepaid instruments owned by the
+ account.
"""
- Set us up as a Recipient
- This will install a new caller_token into the FPS section.
- This should really only be called to regenerate the recipient token.
+ return self.get_object(action, {}, response)
+
+ @api_action()
+ def get_account_balance(self, action, response):
+ """Returns the account balance for an account in real time.
"""
- response = self.install_payment_instruction("MyRole=='Recipient';",
- token_type=token_type,
- transaction_id=transaction_id)
- body = response.read()
- if(response.status == 200):
- rs = ResultSet()
- h = handler.XmlHandler(rs, self)
- xml.sax.parseString(body, h)
- recipient_token = rs.TokenId
- try:
- boto.config.save_system_option("FPS", "recipient_token",
- recipient_token)
- except(IOError):
- boto.config.save_user_option("FPS", "recipient_token",
- recipient_token)
-
- return recipient_token
- else:
- raise FPSResponseError(response.status, response.reason, body)
-
- def make_marketplace_registration_url(self, returnURL, pipelineName,
- maxFixedFee=0.0, maxVariableFee=0.0,
- recipientPaysFee=True, **params):
+ return self.get_object(action, {}, response)
+
+ @needs_caller_reference
+ @requires(['PaymentInstruction', 'TokenType'])
+ @api_action()
+ def install_payment_instruction(self, action, response, **kw):
+ """Installs a payment instruction for caller.
"""
- Generate the URL with the signature required for signing up a recipient
+ return self.get_object(action, kw, response)
+
+ @needs_caller_key
+ @requires(['returnURL', 'pipelineName'])
+ def cbui_url(self, **kw):
+ """Generate a signed URL for the Co-Branded service API given
+ arguments as payload.
"""
- # use the sandbox authorization endpoint if we're using the
- # sandbox for API calls.
- endpoint_host = 'authorize.payments.amazon.com'
- if 'sandbox' in self.host:
- endpoint_host = 'authorize.payments-sandbox.amazon.com'
- base = "/cobranded-ui/actions/start"
-
- params['callerKey'] = str(self.aws_access_key_id)
- params['returnURL'] = str(returnURL)
- params['pipelineName'] = str(pipelineName)
- params['maxFixedFee'] = str(maxFixedFee)
- params['maxVariableFee'] = str(maxVariableFee)
- params['recipientPaysFee'] = str(recipientPaysFee)
- params["signatureMethod"] = 'HmacSHA256'
- params["signatureVersion"] = '2'
-
- if(not params.has_key('callerReference')):
- params['callerReference'] = str(uuid.uuid4())
-
- parts = ''
- for k in sorted(params.keys()):
- parts += "&%s=%s" % (k, urllib.quote(params[k], '~'))
-
- canonical = '\n'.join(['GET',
- str(endpoint_host).lower(),
- base,
- parts[1:]])
+ sandbox = 'sandbox' in self.host and 'payments-sandbox' or 'payments'
+ endpoint = 'authorize.{}.amazon.com'.format(sandbox)
+ base = '/cobranded-ui/actions/start'
- signature = self._auth_handler.sign_string(canonical)
- params["signature"] = signature
+ validpipelines = ('SingleUse', 'MultiUse', 'Recurring', 'Recipient',
+ 'SetupPrepaid', 'SetupPostpaid', 'EditToken')
+ assert kw['pipelineName'] in validpipelines, "Invalid pipelineName"
+ kw.update({
+ 'signatureMethod': 'HmacSHA256',
+ 'signatureVersion': '2',
+ })
- urlsuffix = ''
- for k in sorted(params.keys()):
- urlsuffix += "&%s=%s" % (k, urllib.quote(params[k], '~'))
- urlsuffix = urlsuffix[1:] # strip the first &
-
- fmt = "https://%(endpoint_host)s%(base)s?%(urlsuffix)s"
- final = fmt % vars()
- return final
+ safestr = lambda x: x is not None and str(x) or ''
+ safequote = lambda x: urllib.quote(safestr(x), safe='~')
+ payload = [(k, safequote(v)) for k, v in kw.items()]
+ payload.sort()
+ encoded = lambda p: '&'.join([k + '=' + v for k, v in p])
+ canonical = '\n'.join(['GET', endpoint, base, encoded(payload)])
+ signature = self._auth_handler.sign_string(canonical)
+ payload += [('signature', safequote(signature))]
+ payload.sort()
+
+ return 'https://{}{}?{}'.format(endpoint, base, encoded(payload))
- def make_url(self, returnURL, paymentReason, pipelineName,
- transactionAmount, **params):
+ @needs_caller_reference
+ @complex_amounts('TransactionAmount')
+ @requires(['SenderTokenId', 'TransactionAmount.Value',
+ 'TransactionAmount.CurrencyCode'])
+ @api_action()
+ def reserve(self, action, response, **kw):
+ """Reserve API is part of the Reserve and Settle API conjunction
+ that serve the purpose of a pay where the authorization and
+ settlement have a timing difference.
"""
- Generate the URL with the signature required for a transaction
+ return self.get_object(action, kw, response)
+
+ @needs_caller_reference
+ @complex_amounts('TransactionAmount')
+ @requires(['SenderTokenId', 'TransactionAmount.Value',
+ 'TransactionAmount.CurrencyCode'])
+ @api_action()
+ def pay(self, action, response, **kw):
+ """Allows calling applications to move money from a sender to
+ a recipient.
"""
- # use the sandbox authorization endpoint if we're using the
- # sandbox for API calls.
- endpoint_host = 'authorize.payments.amazon.com'
- if 'sandbox' in self.host:
- endpoint_host = 'authorize.payments-sandbox.amazon.com'
- base = "/cobranded-ui/actions/start"
-
- params['callerKey'] = str(self.aws_access_key_id)
- params['returnURL'] = str(returnURL)
- params['paymentReason'] = str(paymentReason)
- params['pipelineName'] = pipelineName
- params['transactionAmount'] = transactionAmount
- params["signatureMethod"] = 'HmacSHA256'
- params["signatureVersion"] = '2'
-
- if(not params.has_key('callerReference')):
- params['callerReference'] = str(uuid.uuid4())
-
- parts = ''
- for k in sorted(params.keys()):
- parts += "&%s=%s" % (k, urllib.quote(params[k], '~'))
-
- canonical = '\n'.join(['GET',
- str(endpoint_host).lower(),
- base,
- parts[1:]])
+ return self.get_object(action, kw, response)
- signature = self._auth_handler.sign_string(canonical)
- params["signature"] = signature
-
- urlsuffix = ''
- for k in sorted(params.keys()):
- urlsuffix += "&%s=%s" % (k, urllib.quote(params[k], '~'))
- urlsuffix = urlsuffix[1:] # strip the first &
-
- fmt = "https://%(endpoint_host)s%(base)s?%(urlsuffix)s"
- final = fmt % vars()
- return final
-
- def pay(self, transactionAmount, senderTokenId,
- recipientTokenId=None,
- chargeFeeTo="Recipient",
- callerReference=None, senderReference=None, recipientReference=None,
- senderDescription=None, recipientDescription=None,
- callerDescription=None, metadata=None,
- transactionDate=None, reserve=False):
+ @requires(['TransactionId'])
+ @api_action()
+ def cancel(self, action, response, **kw):
+ """Cancels an ongoing transaction and puts it in cancelled state.
"""
- Make a payment transaction. You must specify the amount.
- This can also perform a Reserve request if 'reserve' is set to True.
+ return self.get_object(action, kw, response)
+
+ @complex_amounts('TransactionAmount')
+ @requires(['ReserveTransactionId', 'TransactionAmount.Value',
+ 'TransactionAmount.CurrencyCode'])
+ @api_action()
+ def settle(self, action, response, **kw):
+ """The Settle API is used in conjunction with the Reserve API and
+ is used to settle previously reserved transaction.
"""
- params = {}
- params['SenderTokenId'] = senderTokenId
- # this is for 2010-08-28 specification
- params['TransactionAmount.Value'] = str(transactionAmount)
- params['TransactionAmount.CurrencyCode'] = "USD"
- params['ChargeFeeTo'] = chargeFeeTo
-
- if recipientTokenId:
- params['RecipientTokenId'] = (
- recipientTokenId if recipientTokenId is not None
- else boto.config.get("FPS", "recipient_token")
- )
- if(transactionDate != None):
- params['TransactionDate'] = transactionDate
- if(senderReference != None):
- params['SenderReference'] = senderReference
- if(recipientReference != None):
- params['RecipientReference'] = recipientReference
- if(senderDescription != None):
- params['SenderDescription'] = senderDescription
- if(recipientDescription != None):
- params['RecipientDescription'] = recipientDescription
- if(callerDescription != None):
- params['CallerDescription'] = callerDescription
- if(metadata != None):
- params['MetaData'] = metadata
- if(callerReference == None):
- callerReference = uuid.uuid4()
- params['CallerReference'] = callerReference
-
- if reserve:
- action = "Reserve"
- else:
- action = "Pay"
- response = self.make_request(action, params)
- body = response.read()
- if(response.status == 200):
- rs = ResultSet([("%sResponse" %action, FPSResponse)])
- h = handler.XmlHandler(rs, self)
- xml.sax.parseString(body, h)
- return rs
- else:
- raise FPSResponseError(response.status, response.reason, body)
-
- def get_transaction_status(self, transactionId):
+ return self.get_object(action, kw, response)
+
+ @complex_amounts('RefundAmount')
+ @requires(['TransactionId', 'RefundAmount.Value',
+ 'CallerReference', 'RefundAmount.CurrencyCode'])
+ @api_action()
+ def refund(self, action, response, **kw):
+ """Refunds a previously completed transaction.
"""
- Returns the status of a given transaction.
+ return self.get_object(action, kw, response)
+
+ @requires(['RecipientTokenId'])
+ @api_action()
+ def get_recipient_verification_status(self, action, response, **kw):
+ """Returns the recipient status.
"""
- params = {}
- params['TransactionId'] = transactionId
-
- response = self.make_request("GetTransactionStatus", params)
- body = response.read()
- if(response.status == 200):
- rs = ResultSet([("GetTransactionStatusResponse", FPSResponse)])
- h = handler.XmlHandler(rs, self)
- xml.sax.parseString(body, h)
- return rs
- else:
- raise FPSResponseError(response.status, response.reason, body)
-
- def cancel(self, transactionId, description=None):
+ return self.get_object(action, kw, response)
+
+ @requires(['CallerReference'], ['TokenId'])
+ @api_action()
+ def get_token_by_caller(self, action, response, **kw):
+ """Returns the details of a particular token installed by this
+ calling application using the subway co-branded UI.
"""
- Cancels a reserved or pending transaction.
+ return self.get_object(action, kw, response)
+
+ @requires(['UrlEndPoint', 'HttpParameters'])
+ @api_action()
+ def verify_signature(self, action, response, **kw):
+ """Verify the signature that FPS sent in IPN or callback urls.
"""
- params = {}
- params['TransactionId'] = transactionId
- if(description != None):
- params['description'] = description
-
- response = self.make_request("Cancel", params)
- body = response.read()
- if(response.status == 200):
- rs = ResultSet([("CancelResponse", FPSResponse)])
- h = handler.XmlHandler(rs, self)
- xml.sax.parseString(body, h)
- return rs
- else:
- raise FPSResponseError(response.status, response.reason, body)
-
- def settle(self, reserveTransactionId, transactionAmount=None):
+ return self.get_object(action, kw, response)
+
+ @api_action()
+ def get_tokens(self, action, response, **kw):
+ """Returns a list of tokens installed on the given account.
"""
- Charges for a reserved payment.
+ return self.get_object(action, kw, response)
+
+ @requires(['TokenId'])
+ @api_action()
+ def get_token_usage(self, action, response, **kw):
+ """Returns the usage of a token.
"""
- params = {}
- params['ReserveTransactionId'] = reserveTransactionId
- if(transactionAmount != None):
- params['TransactionAmount.Value'] = transactionAmount
- params['TransactionAmount.CurrencyCode'] = "USD"
-
- response = self.make_request("Settle", params)
- body = response.read()
- if(response.status == 200):
- rs = ResultSet([("SettleResponse", FPSResponse)])
- h = handler.XmlHandler(rs, self)
- xml.sax.parseString(body, h)
- return rs
- else:
- raise FPSResponseError(response.status, response.reason, body)
-
- def refund(self, callerReference, transactionId, refundAmount=None,
- callerDescription=None):
+ return self.get_object(action, kw, response)
+
+ @requires(['TokenId'])
+ @api_action()
+ def cancel_token(self, action, response, **kw):
+ """Cancels any token installed by the calling application on
+ its own account.
"""
- Refund a transaction. This refunds the full amount by default
- unless 'refundAmount' is specified.
+ return self.get_object(action, kw, response)
+
+ @needs_caller_reference
+ @complex_amounts('FundingAmount')
+ @requires(['PrepaidInstrumentId', 'FundingAmount.Value',
+ 'SenderTokenId', 'FundingAmount.CurrencyCode'])
+ @api_action()
+ def fund_prepaid(self, action, response, **kw):
+ """Funds the prepaid balance on the given prepaid instrument.
"""
- params = {}
- params['CallerReference'] = callerReference
- params['TransactionId'] = transactionId
- if(refundAmount != None):
- params['RefundAmount.Value'] = refundAmount
- params['RefundAmount.CurrencyCode'] = "USD"
- if(callerDescription != None):
- params['CallerDescription'] = callerDescription
-
- response = self.make_request("Refund", params)
- body = response.read()
- if(response.status == 200):
- rs = ResultSet([("RefundResponse", FPSResponse)])
- h = handler.XmlHandler(rs, self)
- xml.sax.parseString(body, h)
- return rs
- else:
- raise FPSResponseError(response.status, response.reason, body)
-
- def get_recipient_verification_status(self, recipientTokenId):
+ return self.get_object(action, kw, response)
+
+ @requires(['CreditInstrumentId'])
+ @api_action()
+ def get_debt_balance(self, action, response, **kw):
+ """Returns the balance corresponding to the given credit instrument.
"""
- Test that the intended recipient has a verified Amazon Payments account.
+ return self.get_object(action, kw, response)
+
+ @needs_caller_reference
+ @complex_amounts('AdjustmentAmount')
+ @requires(['CreditInstrumentId', 'AdjustmentAmount.Value',
+ 'AdjustmentAmount.CurrencyCode'])
+ @api_action()
+ def write_off_debt(self, action, response, **kw):
+ """Allows a creditor to write off the debt balance accumulated
+ partially or fully at any time.
"""
- params ={}
- params['RecipientTokenId'] = recipientTokenId
-
- response = self.make_request("GetRecipientVerificationStatus", params)
- body = response.read()
- if(response.status == 200):
- rs = ResultSet()
- h = handler.XmlHandler(rs, self)
- xml.sax.parseString(body, h)
- return rs
- else:
- raise FPSResponseError(response.status, response.reason, body)
-
- def get_token_by_caller_reference(self, callerReference):
+ return self.get_object(action, kw, response)
+
+ @requires(['SubscriptionId'])
+ @api_action()
+ def get_transactions_for_subscription(self, action, response, **kw):
+ """Returns the transactions for a given subscriptionID.
"""
- Returns details about the token specified by 'CallerReference'.
+ return self.get_object(action, kw, response)
+
+ @requires(['SubscriptionId'])
+ @api_action()
+ def get_subscription_details(self, action, response, **kw):
+ """Returns the details of Subscription for a given subscriptionID.
"""
- params ={}
- params['CallerReference'] = callerReference
-
- response = self.make_request("GetTokenByCaller", params)
- body = response.read()
- if(response.status == 200):
- rs = ResultSet()
- h = handler.XmlHandler(rs, self)
- xml.sax.parseString(body, h)
- return rs
- else:
- raise FPSResponseError(response.status, response.reason, body)
-
- def get_token_by_caller_token(self, tokenId):
+ return self.get_object(action, kw, response)
+
+ @needs_caller_reference
+ @complex_amounts('RefundAmount')
+ @requires(['SubscriptionId'])
+ @api_action()
+ def cancel_subscription_and_refund(self, action, response, **kw):
+ """Cancels a subscription.
"""
- Returns details about the token specified by 'TokenId'.
+ message = "If you specify a RefundAmount, " \
+ "you must specify CallerReference."
+ assert not 'RefundAmount.Value' in kw \
+ or 'CallerReference' in kw, message
+ return self.get_object(action, kw, response)
+
+ @requires(['TokenId'])
+ @api_action()
+ def get_payment_instruction(self, action, response, **kw):
+ """Gets the payment instruction of a token.
"""
- params ={}
- params['TokenId'] = tokenId
-
- response = self.make_request("GetTokenByCaller", params)
- body = response.read()
- if(response.status == 200):
- rs = ResultSet()
- h = handler.XmlHandler(rs, self)
- xml.sax.parseString(body, h)
- return rs
- else:
- raise FPSResponseError(response.status, response.reason, body)
-
- def verify_signature(self, end_point_url, http_parameters):
- params = dict(
- UrlEndPoint = end_point_url,
- HttpParameters = http_parameters,
- )
- response = self.make_request("VerifySignature", params)
- body = response.read()
- if(response.status != 200):
- raise FPSResponseError(response.status, response.reason, body)
- rs = ResultSet([("VerifySignatureResponse", FPSResponse)])
- h = handler.XmlHandler(rs, self)
- xml.sax.parseString(body, h)
- return rs
+ return self.get_object(action, kw, response)
diff --git a/boto/fps/exception.py b/boto/fps/exception.py
new file mode 100644
index 00000000..fd551afe
--- /dev/null
+++ b/boto/fps/exception.py
@@ -0,0 +1,344 @@
+from boto.exception import BotoServerError
+
+
+class ResponseErrorFactory(BotoServerError):
+
+ def __new__(cls, *args, **kw):
+ error = BotoServerError(*args, **kw)
+ newclass = globals().get(error.error_code, ResponseError)
+ obj = newclass.__new__(newclass, *args, **kw)
+ obj.__dict__.update(error.__dict__)
+ return obj
+
+
+class ResponseError(BotoServerError):
+ """Undefined response error.
+ """
+ retry = False
+
+ def __repr__(self):
+ return '{}({}, {},\n\t{})'.format(self.__class__.__name__,
+ self.status, self.reason,
+ self.error_message)
+
+ def __str__(self):
+ return 'FPS Response Error: {0.status} {0.__class__.__name__} {1}\n' \
+ '{2}\n' \
+ '{0.error_message}'.format(self,
+ self.retry and '(Retriable)' or '',
+ self.__doc__.strip())
+
+
+class RetriableResponseError(ResponseError):
+ retry = True
+
+
+class AccessFailure(RetriableResponseError):
+ """Account cannot be accessed.
+ """
+
+
+class AccountClosed(RetriableResponseError):
+ """Account is not active.
+ """
+
+
+class AccountLimitsExceeded(RetriableResponseError):
+ """The spending or receiving limit on the account is exceeded.
+ """
+
+
+class AmountOutOfRange(ResponseError):
+ """The transaction amount is more than the allowed range.
+ """
+
+
+class AuthFailure(RetriableResponseError):
+ """AWS was not able to validate the provided access credentials.
+ """
+
+
+class ConcurrentModification(RetriableResponseError):
+ """A retriable error can happen when two processes try to modify the
+ same data at the same time.
+ """
+
+
+class DuplicateRequest(ResponseError):
+ """A different request associated with this caller reference already
+ exists.
+ """
+
+
+class InactiveInstrument(ResponseError):
+ """Payment instrument is inactive.
+ """
+
+
+class IncompatibleTokens(ResponseError):
+ """The transaction could not be completed because the tokens have
+ incompatible payment instructions.
+ """
+
+
+class InstrumentAccessDenied(ResponseError):
+ """The external calling application is not the recipient for this
+ postpaid or prepaid instrument.
+ """
+
+
+class InstrumentExpired(ResponseError):
+ """The prepaid or the postpaid instrument has expired.
+ """
+
+
+class InsufficientBalance(RetriableResponseError):
+ """The sender, caller, or recipient's account balance has
+ insufficient funds to complete the transaction.
+ """
+
+
+class InternalError(RetriableResponseError):
+ """A retriable error that happens due to some transient problem in
+ the system.
+ """
+
+
+class InvalidAccountState(RetriableResponseError):
+ """The account is either suspended or closed.
+ """
+
+
+class InvalidAccountState_Caller(RetriableResponseError):
+ """The developer account cannot participate in the transaction.
+ """
+
+
+class InvalidAccountState_Recipient(RetriableResponseError):
+ """Recipient account cannot participate in the transaction.
+ """
+
+
+class InvalidAccountState_Sender(RetriableResponseError):
+ """Sender account cannot participate in the transaction.
+ """
+
+
+class InvalidCallerReference(ResponseError):
+ """The Caller Reference does not have a token associated with it.
+ """
+
+
+class InvalidClientTokenId(ResponseError):
+ """The AWS Access Key Id you provided does not exist in our records.
+ """
+
+
+class InvalidDateRange(ResponseError):
+ """The end date specified is before the start date or the start date
+ is in the future.
+ """
+
+
+class InvalidParams(ResponseError):
+ """One or more parameters in the request is invalid.
+ """
+
+
+class InvalidPaymentInstrument(ResponseError):
+ """The payment method used in the transaction is invalid.
+ """
+
+
+class InvalidPaymentMethod(ResponseError):
+ """Specify correct payment method.
+ """
+
+
+class InvalidRecipientForCCTransaction(ResponseError):
+ """This account cannot receive credit card payments.
+ """
+
+
+class InvalidSenderRoleForAccountType(ResponseError):
+ """This token cannot be used for this operation.
+ """
+
+
+class InvalidTokenId(ResponseError):
+ """You did not install the token that you are trying to cancel.
+ """
+
+
+class InvalidTokenId_Recipient(ResponseError):
+ """The recipient token specified is either invalid or canceled.
+ """
+
+
+class InvalidTokenId_Sender(ResponseError):
+ """The sender token specified is either invalid or canceled or the
+ token is not active.
+ """
+
+
+class InvalidTokenType(ResponseError):
+ """An invalid operation was performed on the token, for example,
+ getting the token usage information on a single use token.
+ """
+
+
+class InvalidTransactionId(ResponseError):
+ """The specified transaction could not be found or the caller did not
+ execute the transaction or this is not a Pay or Reserve call.
+ """
+
+
+class InvalidTransactionState(ResponseError):
+ """The transaction is not complete, or it has temporarily failed.
+ """
+
+
+class NotMarketplaceApp(RetriableResponseError):
+ """This is not an marketplace application or the caller does not
+ match either the sender or the recipient.
+ """
+
+
+class OriginalTransactionFailed(ResponseError):
+ """The original transaction has failed.
+ """
+
+
+class OriginalTransactionIncomplete(RetriableResponseError):
+ """The original transaction is still in progress.
+ """
+
+
+class PaymentInstrumentNotCC(ResponseError):
+ """The payment method specified in the transaction is not a credit
+ card. You can only use a credit card for this transaction.
+ """
+
+
+class PaymentMethodNotDefined(ResponseError):
+ """Payment method is not defined in the transaction.
+ """
+
+
+class PrepaidFundingLimitExceeded(RetriableResponseError):
+ """An attempt has been made to fund the prepaid instrument
+ at a level greater than its recharge limit.
+ """
+
+
+class RefundAmountExceeded(ResponseError):
+ """The refund amount is more than the refundable amount.
+ """
+
+
+class SameSenderAndRecipient(ResponseError):
+ """The sender and receiver are identical, which is not allowed.
+ """
+
+
+class SameTokenIdUsedMultipleTimes(ResponseError):
+ """This token is already used in earlier transactions.
+ """
+
+
+class SenderNotOriginalRecipient(ResponseError):
+ """The sender in the refund transaction is not
+ the recipient of the original transaction.
+ """
+
+
+class SettleAmountGreaterThanDebt(ResponseError):
+ """The amount being settled or written off is
+ greater than the current debt.
+ """
+
+
+class SettleAmountGreaterThanReserveAmount(ResponseError):
+ """The amount being settled is greater than the reserved amount.
+ """
+
+
+class SignatureDoesNotMatch(ResponseError):
+ """The request signature calculated by Amazon does not match the
+ signature you provided.
+ """
+
+
+class TokenAccessDenied(ResponseError):
+ """Permission to cancel the token is denied.
+ """
+
+
+class TokenNotActive(ResponseError):
+ """The token is canceled.
+ """
+
+
+class TokenNotActive_Recipient(ResponseError):
+ """The recipient token is canceled.
+ """
+
+
+class TokenNotActive_Sender(ResponseError):
+ """The sender token is canceled.
+ """
+
+
+class TokenUsageError(ResponseError):
+ """The token usage limit is exceeded.
+ """
+
+
+class TransactionDenied(ResponseError):
+ """The transaction is not allowed.
+ """
+
+
+class TransactionFullyRefundedAlready(ResponseError):
+ """The transaction has already been completely refunded.
+ """
+
+
+class TransactionTypeNotRefundable(ResponseError):
+ """You cannot refund this transaction.
+ """
+
+
+class UnverifiedAccount_Recipient(ResponseError):
+ """The recipient's account must have a verified bank account or a
+ credit card before this transaction can be initiated.
+ """
+
+
+class UnverifiedAccount_Sender(ResponseError):
+ """The sender's account must have a verified U.S. credit card or
+ a verified U.S bank account before this transaction can be
+ initiated.
+ """
+
+
+class UnverifiedBankAccount(ResponseError):
+ """A verified bank account should be used for this transaction.
+ """
+
+
+class UnverifiedEmailAddress_Caller(ResponseError):
+ """The caller account must have a verified email address.
+ """
+
+
+class UnverifiedEmailAddress_Recipient(ResponseError):
+ """The recipient account must have a verified
+ email address for receiving payments.
+ """
+
+
+class UnverifiedEmailAddress_Sender(ResponseError):
+ """The sender account must have a verified
+ email address for this payment.
+ """
diff --git a/boto/fps/response.py b/boto/fps/response.py
index 748b0358..5f7964d3 100644
--- a/boto/fps/response.py
+++ b/boto/fps/response.py
@@ -1,10 +1,175 @@
-class FPSResponse(object):
- def __init__(self, connection=None):
- self.connection = connection
+from decimal import Decimal
+
+
+def ResponseFactory(action):
+ class FPSResponse(Response):
+ _action = action
+ _Result = globals().get(action + 'Result', ResponseElement)
+
+ # due to nodes receiving their closing tags
+ def endElement(self, name, value, connection):
+ if name != action + 'Response':
+ Response.endElement(self, name, value, connection)
+ return FPSResponse
+
+
+class ResponseElement(object):
+ def __init__(self, connection=None, name=None):
+ if connection is not None:
+ self._connection = connection
+ self._name = name or self.__class__.__name__
+
+ @property
+ def connection(self):
+ return self._connection
+
+ def __repr__(self):
+ render = lambda pair: '{!s}: {!r}'.format(*pair)
+ do_show = lambda pair: not pair[0].startswith('_')
+ attrs = filter(do_show, self.__dict__.items())
+ return '{}({})'.format(self.__class__.__name__,
+ ', '.join(map(render, attrs)))
def startElement(self, name, attrs, connection):
return None
+ # due to nodes receiving their closing tags
def endElement(self, name, value, connection):
- if not name == "ResponseMetadata":
+ if name != self._name:
setattr(self, name, value)
+
+
+class Response(ResponseElement):
+ _action = 'Undefined'
+
+ def startElement(self, name, attrs, connection):
+ if name == 'ResponseMetadata':
+ setattr(self, name, ResponseElement(name=name))
+ elif name == self._action + 'Result':
+ setattr(self, name, self._Result(name=name))
+ else:
+ return ResponseElement.startElement(self, name, attrs, connection)
+ return getattr(self, name)
+
+
+class ComplexAmount(ResponseElement):
+ def __repr__(self):
+ return '{} {}'.format(self.CurrencyCode, self.Value)
+
+ def __float__(self):
+ return float(self.Value)
+
+ def __str__(self):
+ return str(self.Value)
+
+ def startElement(self, name, attrs, connection):
+ if name not in ('CurrencyCode', 'Value'):
+ message = 'Unrecognized tag {} in ComplexAmount'.format(name)
+ raise AssertionError(message)
+ return ResponseElement.startElement(self, name, attrs, connection)
+
+ def endElement(self, name, value, connection):
+ if name == 'Value':
+ value = Decimal(value)
+ ResponseElement.endElement(self, name, value, connection)
+
+
+class AmountCollection(ResponseElement):
+ def startElement(self, name, attrs, connection):
+ setattr(self, name, ComplexAmount(name=name))
+ return getattr(self, name)
+
+
+class AccountBalance(AmountCollection):
+ def startElement(self, name, attrs, connection):
+ if name == 'AvailableBalances':
+ setattr(self, name, AmountCollection(name=name))
+ return getattr(self, name)
+ return AmountCollection.startElement(self, name, attrs, connection)
+
+
+class GetAccountBalanceResult(ResponseElement):
+ def startElement(self, name, attrs, connection):
+ if name == 'AccountBalance':
+ setattr(self, name, AccountBalance(name=name))
+ return getattr(self, name)
+ return Response.startElement(self, name, attrs, connection)
+
+
+class GetTotalPrepaidLiabilityResult(ResponseElement):
+ def startElement(self, name, attrs, connection):
+ if name == 'OutstandingPrepaidLiability':
+ setattr(self, name, AmountCollection(name=name))
+ return getattr(self, name)
+ return Response.startElement(self, name, attrs, connection)
+
+
+class GetPrepaidBalanceResult(ResponseElement):
+ def startElement(self, name, attrs, connection):
+ if name == 'PrepaidBalance':
+ setattr(self, name, AmountCollection(name=name))
+ return getattr(self, name)
+ return Response.startElement(self, name, attrs, connection)
+
+
+class GetOutstandingDebtBalanceResult(ResponseElement):
+ def startElement(self, name, attrs, connection):
+ if name == 'OutstandingDebt':
+ setattr(self, name, AmountCollection(name=name))
+ return getattr(self, name)
+ return Response.startElement(self, name, attrs, connection)
+
+
+class TransactionPart(ResponseElement):
+ def startElement(self, name, attrs, connection):
+ if name == 'FeesPaid':
+ setattr(self, name, ComplexAmount(name=name))
+ return getattr(self, name)
+ return ResponseElement.startElement(self, name, attrs, connection)
+
+
+class Transaction(ResponseElement):
+ def __init__(self, *args, **kw):
+ self.TransactionPart = []
+ ResponseElement.__init__(self, *args, **kw)
+
+ def startElement(self, name, attrs, connection):
+ if name == 'TransactionPart':
+ getattr(self, name).append(TransactionPart(name=name))
+ return getattr(self, name)[-1]
+ if name in ('TransactionAmount', 'FPSFees', 'Balance'):
+ setattr(self, name, ComplexAmount(name=name))
+ return getattr(self, name)
+ return ResponseElement.startElement(self, name, attrs, connection)
+
+
+class GetAccountActivityResult(ResponseElement):
+ def __init__(self, *args, **kw):
+ self.Transaction = []
+ ResponseElement.__init__(self, *args, **kw)
+
+ def startElement(self, name, attrs, connection):
+ if name == 'Transaction':
+ getattr(self, name).append(Transaction(name=name))
+ return getattr(self, name)[-1]
+ return ResponseElement.startElement(self, name, attrs, connection)
+
+
+class GetTransactionResult(ResponseElement):
+ def startElement(self, name, attrs, connection):
+ if name == 'Transaction':
+ setattr(self, name, Transaction(name=name))
+ return getattr(self, name)
+ return ResponseElement.startElement(self, name, attrs, connection)
+
+
+class GetTokensResult(ResponseElement):
+ def __init__(self, *args, **kw):
+ self.Token = []
+ ResponseElement.__init__(self, *args, **kw)
+
+ def startElement(self, name, attrs, connection):
+ if name == 'Token':
+ getattr(self, name).append(ResponseElement(name=name))
+ return getattr(self, name)[-1]
+ return ResponseElement.startElement(self, name, attrs, connection)
diff --git a/boto/gs/acl.py b/boto/gs/acl.py
index 93bb4a9c..d726b804 100755
--- a/boto/gs/acl.py
+++ b/boto/gs/acl.py
@@ -94,19 +94,19 @@ class ACL:
self.entries.entry_list.append(entry)
def startElement(self, name, attrs, connection):
- if name == OWNER:
+ if name.lower() == OWNER.lower():
self.owner = User(self)
return self.owner
- elif name == ENTRIES:
+ elif name.lower() == ENTRIES.lower():
self.entries = Entries(self)
return self.entries
else:
return None
def endElement(self, name, value, connection):
- if name == OWNER:
+ if name.lower() == OWNER.lower():
pass
- elif name == ENTRIES:
+ elif name.lower() == ENTRIES.lower():
pass
else:
setattr(self, name, value)
@@ -138,7 +138,7 @@ class Entries:
return '<Entries: %s>' % ', '.join(entries_repr)
def startElement(self, name, attrs, connection):
- if name == ENTRY:
+ if name.lower() == ENTRY.lower():
entry = Entry(self)
self.entry_list.append(entry)
return entry
@@ -146,7 +146,7 @@ class Entries:
return None
def endElement(self, name, value, connection):
- if name == ENTRY:
+ if name.lower() == ENTRY.lower():
pass
else:
setattr(self, name, value)
@@ -173,7 +173,7 @@ class Entry:
return '<%s: %s>' % (self.scope.__repr__(), self.permission.__repr__())
def startElement(self, name, attrs, connection):
- if name == SCOPE:
+ if name.lower() == SCOPE.lower():
# The following if statement used to look like this:
# if not TYPE in attrs:
# which caused problems because older versions of the
@@ -194,15 +194,15 @@ class Entry:
(TYPE, SCOPE))
self.scope = Scope(self, attrs[TYPE])
return self.scope
- elif name == PERMISSION:
+ elif name.lower() == PERMISSION.lower():
pass
else:
return None
def endElement(self, name, value, connection):
- if name == SCOPE:
+ if name.lower() == SCOPE.lower():
pass
- elif name == PERMISSION:
+ elif name.lower() == PERMISSION.lower():
value = value.strip()
if not value in SupportedPermissions:
raise InvalidAclError('Invalid Permission "%s"' % value)
@@ -219,15 +219,17 @@ class Entry:
class Scope:
- # Map from Scope type to list of allowed sub-elems.
+ # Map from Scope type.lower() to lower-cased list of allowed sub-elems.
ALLOWED_SCOPE_TYPE_SUB_ELEMS = {
- ALL_AUTHENTICATED_USERS : [],
- ALL_USERS : [],
- GROUP_BY_DOMAIN : [DOMAIN],
- GROUP_BY_EMAIL : [DISPLAY_NAME, EMAIL_ADDRESS, NAME],
- GROUP_BY_ID : [DISPLAY_NAME, ID, NAME],
- USER_BY_EMAIL : [DISPLAY_NAME, EMAIL_ADDRESS, NAME],
- USER_BY_ID : [DISPLAY_NAME, ID, NAME]
+ ALL_AUTHENTICATED_USERS.lower() : [],
+ ALL_USERS.lower() : [],
+ GROUP_BY_DOMAIN.lower() : [DOMAIN.lower()],
+ GROUP_BY_EMAIL.lower() : [
+ DISPLAY_NAME.lower(), EMAIL_ADDRESS.lower(), NAME.lower()],
+ GROUP_BY_ID.lower() : [DISPLAY_NAME.lower(), ID.lower(), NAME.lower()],
+ USER_BY_EMAIL.lower() : [
+ DISPLAY_NAME.lower(), EMAIL_ADDRESS.lower(), NAME.lower()],
+ USER_BY_ID.lower() : [DISPLAY_NAME.lower(), ID.lower(), NAME.lower()]
}
def __init__(self, parent, type=None, id=None, name=None,
@@ -238,7 +240,7 @@ class Scope:
self.id = id
self.domain = domain
self.email_address = email_address
- if not self.ALLOWED_SCOPE_TYPE_SUB_ELEMS.has_key(self.type):
+ if not self.ALLOWED_SCOPE_TYPE_SUB_ELEMS.has_key(self.type.lower()):
raise InvalidAclError('Invalid %s %s "%s" ' %
(SCOPE, TYPE, self.type))
@@ -256,36 +258,40 @@ class Scope:
return '<%s>' % self.type
def startElement(self, name, attrs, connection):
- if not name in self.ALLOWED_SCOPE_TYPE_SUB_ELEMS[self.type]:
+ if (not name.lower() in
+ self.ALLOWED_SCOPE_TYPE_SUB_ELEMS[self.type.lower()]):
raise InvalidAclError('Element "%s" not allowed in %s %s "%s" ' %
(name, SCOPE, TYPE, self.type))
return None
def endElement(self, name, value, connection):
value = value.strip()
- if name == DOMAIN:
+ if name.lower() == DOMAIN.lower():
self.domain = value
- elif name == EMAIL_ADDRESS:
+ elif name.lower() == EMAIL_ADDRESS.lower():
self.email_address = value
- elif name == ID:
+ elif name.lower() == ID.lower():
self.id = value
- elif name == NAME:
+ elif name.lower() == NAME.lower():
self.name = value
else:
setattr(self, name, value)
def to_xml(self):
s = '<%s type="%s">' % (SCOPE, self.type)
- if self.type == ALL_AUTHENTICATED_USERS or self.type == ALL_USERS:
+ if (self.type.lower() == ALL_AUTHENTICATED_USERS.lower()
+ or self.type.lower() == ALL_USERS.lower()):
pass
- elif self.type == GROUP_BY_DOMAIN:
+ elif self.type.lower() == GROUP_BY_DOMAIN.lower():
s += '<%s>%s</%s>' % (DOMAIN, self.domain, DOMAIN)
- elif self.type == GROUP_BY_EMAIL or self.type == USER_BY_EMAIL:
+ elif (self.type.lower() == GROUP_BY_EMAIL.lower()
+ or self.type.lower() == USER_BY_EMAIL.lower()):
s += '<%s>%s</%s>' % (EMAIL_ADDRESS, self.email_address,
EMAIL_ADDRESS)
if self.name:
s += '<%s>%s</%s>' % (NAME, self.name, NAME)
- elif self.type == GROUP_BY_ID or self.type == USER_BY_ID:
+ elif (self.type.lower() == GROUP_BY_ID.lower()
+ or self.type.lower() == USER_BY_ID.lower()):
s += '<%s>%s</%s>' % (ID, self.id, ID)
if self.name:
s += '<%s>%s</%s>' % (NAME, self.name, NAME)
diff --git a/boto/gs/bucket.py b/boto/gs/bucket.py
index aa2d4c23..14c97b8f 100644
--- a/boto/gs/bucket.py
+++ b/boto/gs/bucket.py
@@ -24,14 +24,16 @@ from boto import handler
from boto.exception import InvalidAclError
from boto.gs.acl import ACL, CannedACLStrings
from boto.gs.acl import SupportedPermissions as GSPermissions
+from boto.gs.cors import Cors
from boto.gs.key import Key as GSKey
from boto.s3.acl import Policy
from boto.s3.bucket import Bucket as S3Bucket
import xml.sax
-# constants for default object ACL and standard acl in http query args
+# constants for http query args
DEF_OBJ_ACL = 'defaultObjectAcl'
STANDARD_ACL = 'acl'
+CORS_ARG = 'cors'
class Bucket(S3Bucket):
@@ -39,10 +41,10 @@ class Bucket(S3Bucket):
super(Bucket, self).__init__(connection, name, key_class)
def set_acl(self, acl_or_str, key_name='', headers=None, version_id=None):
- """sets or changes a bucket's acl. We include a version_id argument
- to support a polymorphic interface for callers, however,
- version_id is not relevant for Google Cloud Storage buckets
- and is therefore ignored here."""
+ """sets or changes a bucket's or key's acl (depending on whether a
+ key_name was passed). We include a version_id argument to support a
+ polymorphic interface for callers, however, version_id is not relevant
+ for Google Cloud Storage buckets and is therefore ignored here."""
if isinstance(acl_or_str, Policy):
raise InvalidAclError('Attempt to set S3 Policy on GS ACL')
elif isinstance(acl_or_str, ACL):
@@ -118,10 +120,38 @@ class Bucket(S3Bucket):
query_args=DEF_OBJ_ACL)
def set_def_xml_acl(self, acl_str, key_name='', headers=None):
- """sets or changes a bucket's default object"""
+ """sets or changes a bucket's default object ACL"""
return self.set_xml_acl(acl_str, key_name, headers,
query_args=DEF_OBJ_ACL)
+ def get_cors(self, headers=None):
+ """returns a bucket's CORS XML"""
+ response = self.connection.make_request('GET', self.name,
+ query_args=CORS_ARG,
+ headers=headers)
+ body = response.read()
+ if response.status == 200:
+ # Success - parse XML and return Cors object.
+ cors = Cors()
+ h = handler.XmlHandler(cors, self)
+ xml.sax.parseString(body, h)
+ return cors
+ else:
+ raise self.connection.provider.storage_response_error(
+ response.status, response.reason, body)
+
+ def set_cors(self, cors, headers=None):
+ """sets or changes a bucket's CORS XML."""
+ cors_xml = cors.encode('ISO-8859-1')
+ response = self.connection.make_request('PUT', self.name,
+ data=cors_xml,
+ query_args=CORS_ARG,
+ headers=headers)
+ body = response.read()
+ if response.status != 200:
+ raise self.connection.provider.storage_response_error(
+ response.status, response.reason, body)
+
# Method with same signature as boto.s3.bucket.Bucket.add_email_grant(),
# to allow polymorphic treatment at application layer.
def add_email_grant(self, permission, email_address,
diff --git a/boto/gs/cors.py b/boto/gs/cors.py
new file mode 100755
index 00000000..e5dd9184
--- /dev/null
+++ b/boto/gs/cors.py
@@ -0,0 +1,169 @@
+# Copyright 2012 Google Inc.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+import types
+from boto.gs.user import User
+from boto.exception import InvalidCorsError
+from xml.sax import handler
+
+# Relevant tags for the CORS XML document.
+CORS_CONFIG = 'CorsConfig'
+CORS = 'Cors'
+ORIGINS = 'Origins'
+ORIGIN = 'Origin'
+METHODS = 'Methods'
+METHOD = 'Method'
+HEADERS = 'ResponseHeaders'
+HEADER = 'ResponseHeader'
+MAXAGESEC = 'MaxAgeSec'
+
+class Cors(handler.ContentHandler):
+ """Encapsulates the CORS configuration XML document"""
+ def __init__(self):
+ # List of CORS elements found within a CorsConfig element.
+ self.cors = []
+ # List of collections (e.g. Methods, ResponseHeaders, Origins)
+ # found within a CORS element. We use a list of lists here
+ # instead of a dictionary because the collections need to be
+ # preserved in the order in which they appear in the input XML
+ # document (and Python dictionary keys are inherently unordered).
+ # The elements on this list are two element tuples of the form
+ # (collection name, [list of collection contents]).
+ self.collections = []
+ # Lists of elements within a collection. Again a list is needed to
+ # preserve ordering but also because the same element may appear
+ # multiple times within a collection.
+ self.elements = []
+ # Dictionary mapping supported collection names to element types
+ # which may be contained within each.
+ self.legal_collections = {
+ ORIGINS : [ORIGIN],
+ METHODS : [METHOD],
+ HEADERS : [HEADER],
+ MAXAGESEC: []
+ }
+ # List of supported element types within any collection, used for
+ # checking validadity of a parsed element name.
+ self.legal_elements = [ORIGIN, METHOD, HEADER]
+
+ self.parse_level = 0
+ self.collection = None
+ self.element = None
+
+ def validateParseLevel(self, tag, level):
+ """Verify parse level for a given tag."""
+ if self.parse_level != level:
+ raise InvalidCorsError('Invalid tag %s at parse level %d: ' %
+ (tag, self.parse_level))
+
+ def startElement(self, name, attrs, connection):
+ """SAX XML logic for parsing new element found."""
+ if name == CORS_CONFIG:
+ self.validateParseLevel(name, 0)
+ self.parse_level += 1;
+ elif name == CORS:
+ self.validateParseLevel(name, 1)
+ self.parse_level += 1;
+ elif name in self.legal_collections:
+ self.validateParseLevel(name, 2)
+ self.parse_level += 1;
+ self.collection = name
+ elif name in self.legal_elements:
+ self.validateParseLevel(name, 3)
+ # Make sure this tag is found inside a collection tag.
+ if self.collection is None:
+ raise InvalidCorsError('Tag %s found outside collection' % name)
+ # Make sure this tag is allowed for the current collection tag.
+ if name not in self.legal_collections[self.collection]:
+ raise InvalidCorsError('Tag %s not allowed in %s collection' %
+ (name, self.collection))
+ self.element = name
+ else:
+ raise InvalidCorsError('Unsupported tag ' + name)
+
+ def endElement(self, name, value, connection):
+ """SAX XML logic for parsing new element found."""
+ if name == CORS_CONFIG:
+ self.validateParseLevel(name, 1)
+ self.parse_level -= 1;
+ elif name == CORS:
+ self.validateParseLevel(name, 2)
+ self.parse_level -= 1;
+ # Terminating a CORS element, save any collections we found
+ # and re-initialize collections list.
+ self.cors.append(self.collections)
+ self.collections = []
+ elif name in self.legal_collections:
+ self.validateParseLevel(name, 3)
+ if name != self.collection:
+ raise InvalidCorsError('Mismatched start and end tags (%s/%s)' %
+ (self.collection, name))
+ self.parse_level -= 1;
+ if not self.legal_collections[name]:
+ # If this collection doesn't contain any sub-elements, store
+ # a tuple of name and this tag's element value.
+ self.collections.append((name, value.strip()))
+ else:
+ # Otherwise, we're terminating a collection of sub-elements,
+ # so store a tuple of name and list of contained elements.
+ self.collections.append((name, self.elements))
+ self.elements = []
+ self.collection = None
+ elif name in self.legal_elements:
+ self.validateParseLevel(name, 3)
+ # Make sure this tag is found inside a collection tag.
+ if self.collection is None:
+ raise InvalidCorsError('Tag %s found outside collection' % name)
+ # Make sure this end tag is allowed for the current collection tag.
+ if name not in self.legal_collections[self.collection]:
+ raise InvalidCorsError('Tag %s not allowed in %s collection' %
+ (name, self.collection))
+ if name != self.element:
+ raise InvalidCorsError('Mismatched start and end tags (%s/%s)' %
+ (self.element, name))
+ # Terminating an element tag, add it to the list of elements
+ # for the current collection.
+ self.elements.append((name, value.strip()))
+ self.element = None
+ else:
+ raise InvalidCorsError('Unsupported end tag ' + name)
+
+ def to_xml(self):
+ """Convert CORS object into XML string representation."""
+ s = '<' + CORS_CONFIG + '>'
+ for collections in self.cors:
+ s += '<' + CORS + '>'
+ for (collection, elements_or_value) in collections:
+ assert collection is not None
+ s += '<' + collection + '>'
+ # If collection elements has type string, append atomic value,
+ # otherwise, append sequence of values in named tags.
+ if isinstance(elements_or_value, types.StringTypes):
+ s += elements_or_value
+ else:
+ for (name, value) in elements_or_value:
+ assert name is not None
+ assert value is not None
+ s += '<' + name + '>' + value + '</' + name + '>'
+ s += '</' + collection + '>'
+ s += '</' + CORS + '>'
+ s += '</' + CORS_CONFIG + '>'
+ return s
diff --git a/boto/gs/key.py b/boto/gs/key.py
index 21532d38..3c76cc56 100644
--- a/boto/gs/key.py
+++ b/boto/gs/key.py
@@ -19,6 +19,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+import os
import StringIO
from boto.exception import BotoClientError
from boto.s3.key import Key as S3Key
@@ -111,7 +112,7 @@ class Key(S3Key):
def set_contents_from_file(self, fp, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
- res_upload_handler=None, size=None):
+ res_upload_handler=None, size=None, rewind=False):
"""
Store an object in GS using the name of the Key object as the
key in GS and the contents of the file pointed to by 'fp' as the
@@ -175,6 +176,15 @@ class Key(S3Key):
2. At present Google Cloud Storage does not support
multipart uploads.
+ :type rewind: bool
+ :param rewind: (optional) If True, the file pointer (fp) will be
+ rewound to the start before any bytes are read from
+ it. The default behaviour is False which reads from
+ the current position of the file pointer (fp).
+
+ :rtype: int
+ :return: The number of bytes written to the key.
+
TODO: At some point we should refactor the Bucket and Key classes,
to move functionality common to all providers into a parent class,
and provider-specific functionality into subclasses (rather than
@@ -187,16 +197,28 @@ class Key(S3Key):
headers = headers or {}
if policy:
headers[provider.acl_header] = policy
+
+ if rewind:
+ # caller requests reading from beginning of fp.
+ fp.seek(0, os.SEEK_SET)
+ else:
+ spos = fp.tell()
+ fp.seek(0, os.SEEK_END)
+ if fp.tell() == spos:
+ fp.seek(0, os.SEEK_SET)
+ if fp.tell() != spos:
+ # Raise an exception as this is likely a programming error
+ # whereby there is data before the fp but nothing after it.
+ fp.seek(spos)
+ raise AttributeError(
+ 'fp is at EOF. Use rewind option or seek() to data start.')
+ # seek back to the correct position.
+ fp.seek(spos)
+
if hasattr(fp, 'name'):
self.path = fp.name
if self.bucket != None:
- if not md5:
- # compute_md5() and also set self.size to actual
- # size of the bytes read computing the md5.
- md5 = self.compute_md5(fp, size)
- # adjust size if required
- size = self.size
- elif size:
+ if size:
self.size = size
else:
# If md5 is provided, still need to size so
@@ -206,10 +228,13 @@ class Key(S3Key):
self.size = fp.tell() - spos
fp.seek(spos)
size = self.size
- self.md5 = md5[0]
- self.base64md5 = md5[1]
if self.name == None:
+ if md5 == None:
+ md5 = self.compute_md5(fp, size)
+ self.md5 = md5[0]
+ self.base64md5 = md5[1]
+
self.name = self.md5
if not replace:
if self.bucket.lookup(self.name):
@@ -270,6 +295,10 @@ class Key(S3Key):
:param res_upload_handler: If provided, this handler will perform the
upload.
"""
+ # Clear out any previously computed md5 hashes, since we are setting the content.
+ self.md5 = None
+ self.base64md5 = None
+
fp = open(filename, 'rb')
self.set_contents_from_file(fp, headers, replace, cb, num_cb,
policy, md5, res_upload_handler)
@@ -320,6 +349,11 @@ class Key(S3Key):
param, if present, will be used as the MD5 values
of the file. Otherwise, the checksum will be computed.
"""
+
+ # Clear out any previously computed md5 hashes, since we are setting the content.
+ self.md5 = None
+ self.base64md5 = None
+
if isinstance(s, unicode):
s = s.encode("utf-8")
fp = StringIO.StringIO(s)
diff --git a/boto/gs/resumable_upload_handler.py b/boto/gs/resumable_upload_handler.py
index d4176c94..e81a0fc6 100644
--- a/boto/gs/resumable_upload_handler.py
+++ b/boto/gs/resumable_upload_handler.py
@@ -34,6 +34,10 @@ from boto.connection import AWSAuthConnection
from boto.exception import InvalidUriError
from boto.exception import ResumableTransferDisposition
from boto.exception import ResumableUploadException
+try:
+ from hashlib import md5
+except ImportError:
+ from md5 import md5
"""
Handler for Google Cloud Storage resumable uploads. See
@@ -295,7 +299,7 @@ class ResumableUploadHandler(object):
self._save_tracker_uri_to_file()
def _upload_file_bytes(self, conn, http_conn, fp, file_length,
- total_bytes_uploaded, cb, num_cb):
+ total_bytes_uploaded, cb, num_cb, md5sum):
"""
Makes one attempt to upload file bytes, using an existing resumable
upload connection.
@@ -340,6 +344,7 @@ class ResumableUploadHandler(object):
http_conn.set_debuglevel(0)
while buf:
http_conn.send(buf)
+ md5sum.update(buf)
total_bytes_uploaded += len(buf)
if cb:
i += 1
@@ -375,7 +380,7 @@ class ResumableUploadHandler(object):
(resp.status, resp.reason), disposition)
def _attempt_resumable_upload(self, key, fp, file_length, headers, cb,
- num_cb):
+ num_cb, md5sum):
"""
Attempts a resumable upload.
@@ -391,7 +396,19 @@ class ResumableUploadHandler(object):
(server_start, server_end) = (
self._query_server_pos(conn, file_length))
self.server_has_bytes = server_start
- key=key
+
+ if server_end:
+ # If the server already has some of the content, we need to update the md5 with
+ # the bytes that have already been uploaded to ensure we get a complete hash in
+ # the end.
+ print 'Catching up md5 for resumed upload'
+ fp.seek(0)
+ bytes_to_go = server_end + 1
+ while bytes_to_go:
+ chunk = fp.read(min(key.BufferSize, bytes_to_go))
+ md5sum.update(chunk)
+ bytes_to_go -= len(chunk)
+
if conn.debug >= 1:
print 'Resuming transfer.'
except ResumableUploadException, e:
@@ -434,7 +451,7 @@ class ResumableUploadHandler(object):
# and can report that progress on next attempt.
try:
return self._upload_file_bytes(conn, http_conn, fp, file_length,
- total_bytes_uploaded, cb, num_cb)
+ total_bytes_uploaded, cb, num_cb, md5sum)
except (ResumableUploadException, socket.error):
resp = self._query_server_state(conn, file_length)
if resp.status == 400:
@@ -517,6 +534,9 @@ class ResumableUploadHandler(object):
fp.seek(0)
debug = key.bucket.connection.debug
+ # Compute the MD5 checksum on the fly.
+ md5sum = md5()
+
# Use num-retries from constructor if one was provided; else check
# for a value specified in the boto config file; else default to 5.
if self.num_retries is None:
@@ -525,9 +545,15 @@ class ResumableUploadHandler(object):
while True: # Retry as long as we're making progress.
server_had_bytes_before_attempt = self.server_has_bytes
+ md5sum_before_attempt = md5sum.copy()
try:
etag = self._attempt_resumable_upload(key, fp, file_length,
- headers, cb, num_cb)
+ headers, cb, num_cb, md5sum)
+
+ # Get the final md5 for the uploaded content.
+ hd = md5sum.hexdigest()
+ key.md5, key.base64md5 = key.get_md5_from_hexdigest(hd)
+
# Upload succceded, so remove the tracker file (if have one).
self._remove_tracker_file()
self._check_final_md5(key, etag)
@@ -564,11 +590,14 @@ class ResumableUploadHandler(object):
if debug >= 1:
print('Caught ResumableUploadException (%s) - will '
'retry' % e.message)
-
# At this point we had a re-tryable failure; see if made progress.
if self.server_has_bytes > server_had_bytes_before_attempt:
progress_less_iterations = 0
else:
+ # Rollback any potential md5sum updates, as we did not
+ # make any progress in this iteration.
+ md5sum = md5sum_before_attempt
+
progress_less_iterations += 1
if progress_less_iterations > self.num_retries:
diff --git a/boto/mws/__init__.py b/boto/mws/__init__.py
new file mode 100644
index 00000000..d69b7f08
--- /dev/null
+++ b/boto/mws/__init__.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2008, Chris Moyer http://coredumped.org
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
diff --git a/boto/mws/connection.py b/boto/mws/connection.py
new file mode 100644
index 00000000..bfb0d29a
--- /dev/null
+++ b/boto/mws/connection.py
@@ -0,0 +1,725 @@
+# Copyright (c) 2012 Andy Davidoff http://www.disruptek.com/
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+import xml.sax
+import hashlib
+import base64
+from boto.connection import AWSQueryConnection
+from boto.mws.exception import ResponseErrorFactory
+from boto.mws.response import ResponseFactory, ResponseElement
+from boto.handler import XmlHandler
+import boto.mws.response
+
+__all__ = ['MWSConnection']
+
+api_version_path = {
+ 'Feeds': ('2009-01-01', 'Merchant', '/'),
+ 'Reports': ('2009-01-01', 'Merchant', '/'),
+ 'Orders': ('2011-01-01', 'SellerId', '/Orders/2011-01-01'),
+ 'Products': ('2011-10-01', 'SellerId', '/Products/2011-10-01'),
+ 'Sellers': ('2011-07-01', 'SellerId', '/Sellers/2011-07-01'),
+ 'Inbound': ('2010-10-01', 'SellerId',
+ '/FulfillmentInboundShipment/2010-10-01'),
+ 'Outbound': ('2010-10-01', 'SellerId',
+ '/FulfillmentOutboundShipment/2010-10-01'),
+ 'Inventory': ('2010-10-01', 'SellerId',
+ '/FulfillmentInventory/2010-10-01'),
+}
+content_md5 = lambda c: base64.encodestring(hashlib.md5(c).digest()).strip()
+decorated_attrs = ('action', 'response', 'section',
+ 'quota', 'restore', 'version')
+
+
+def add_attrs_from(func, to):
+ for attr in decorated_attrs:
+ setattr(to, attr, getattr(func, attr, None))
+ return to
+
+
+def structured_lists(*fields):
+
+ def decorator(func):
+
+ def wrapper(self, *args, **kw):
+ for key, acc in [f.split('.') for f in fields]:
+ if key in kw:
+ newkey = key + '.' + acc + (acc and '.' or '')
+ for i in range(len(kw[key])):
+ kw[newkey + str(i + 1)] = kw[key][i]
+ kw.pop(key)
+ return func(self, *args, **kw)
+ wrapper.__doc__ = "{}\nLists: {}".format(func.__doc__,
+ ', '.join(fields))
+ return add_attrs_from(func, to=wrapper)
+ return decorator
+
+
+def http_body(field):
+
+ def decorator(func):
+
+ def wrapper(*args, **kw):
+ if filter(lambda x: not x in kw, (field, 'content_type')):
+ message = "{} requires {} and content_type arguments for " \
+ "building HTTP body".format(func.action, field)
+ raise KeyError(message)
+ body = kw.pop(field)
+ return func(*args, body=body, headers={
+ 'Content-Type': kw.pop('content_type'),
+ 'Content-MD5': content_md5(body),
+ }, **kw)
+ wrapper.__doc__ = "{}\nRequired HTTP Body: " \
+ "{}".format(func.__doc__, field)
+ return add_attrs_from(func, to=wrapper)
+ return decorator
+
+
+def destructure_object(value, into={}, prefix=''):
+ if isinstance(value, ResponseElement):
+ for name, attr in value.__dict__.items():
+ if name.startswith('_'):
+ continue
+ destructure_object(attr, into=into, prefix=prefix + '.' + name)
+ elif filter(lambda x: isinstance(value, x), (list, set, tuple)):
+ for index, element in [(prefix + '.' + str(i + 1), value[i])
+ for i in range(len(value))]:
+ destructure_object(element, into=into, prefix=index)
+ elif isinstance(value, bool):
+ into[prefix] = str(value).lower()
+ else:
+ into[prefix] = value
+
+
+def structured_objects(*fields):
+
+ def decorator(func):
+
+ def wrapper(*args, **kw):
+ for field in filter(kw.has_key, fields):
+ destructure_object(kw.pop(field), into=kw, prefix=field)
+ return func(*args, **kw)
+ wrapper.__doc__ = "{}\nObjects: {}".format(func.__doc__,
+ ', '.join(fields))
+ return add_attrs_from(func, to=wrapper)
+ return decorator
+
+
+def requires(*groups):
+
+ def decorator(func):
+
+ def wrapper(*args, **kw):
+ hasgroup = lambda x: len(x) == len(filter(kw.has_key, x))
+ if 1 != len(filter(hasgroup, groups)):
+ message = ' OR '.join(['+'.join(g) for g in groups])
+ message = "{} requires {} argument(s)" \
+ "".format(func.action, message)
+ raise KeyError(message)
+ return func(*args, **kw)
+ message = ' OR '.join(['+'.join(g) for g in groups])
+ wrapper.__doc__ = "{}\nRequired: {}".format(func.__doc__,
+ message)
+ return add_attrs_from(func, to=wrapper)
+ return decorator
+
+
+def requires_some_of(*fields):
+
+ def decorator(func):
+
+ def wrapper(*args, **kw):
+ if not filter(kw.has_key, fields):
+ message = "{} requires at least one of {} argument(s)" \
+ "".format(func.action, ', '.join(fields))
+ raise KeyError(message)
+ return func(*args, **kw)
+ wrapper.__doc__ = "{}\nSome Required: {}".format(func.__doc__,
+ ', '.join(fields))
+ return add_attrs_from(func, to=wrapper)
+ return decorator
+
+
+def boolean_arguments(*fields):
+
+ def decorator(func):
+
+ def wrapper(*args, **kw):
+ for field in filter(lambda x: type(kw.get(x)) is bool, fields):
+ kw[field] = str(kw[field]).lower()
+ return func(*args, **kw)
+ wrapper.__doc__ = "{}\nBooleans: {}".format(func.__doc__,
+ ', '.join(fields))
+ return add_attrs_from(func, to=wrapper)
+ return decorator
+
+
+def api_action(section, quota, restore, *api):
+
+ def decorator(func, quota=int(quota), restore=float(restore)):
+ version, accesskey, path = api_version_path[section]
+ action = ''.join(api or map(str.capitalize, func.func_name.split('_')))
+ if hasattr(boto.mws.response, action + 'Response'):
+ response = getattr(boto.mws.response, action + 'Response')
+ else:
+ response = ResponseFactory(action)
+
+ def wrapper(self, *args, **kw):
+ kw.setdefault(accesskey, getattr(self, accesskey, None))
+ if kw[accesskey] is None:
+ message = "{} requires {} argument. Set the " \
+ "MWSConnection.{} attribute?" \
+ "".format(action, accesskey, accesskey)
+ raise KeyError(message)
+ kw['Action'] = action
+ kw['Version'] = version
+ return func(self, path, response, *args, **kw)
+ for attr in decorated_attrs:
+ setattr(wrapper, attr, locals().get(attr))
+ wrapper.__doc__ = "MWS {}/{} API call; quota={} restore={:.2f}\n" \
+ "{}".format(action, version, quota, restore,
+ func.__doc__)
+ return wrapper
+ return decorator
+
+
+class MWSConnection(AWSQueryConnection):
+
+ ResponseError = ResponseErrorFactory
+
+ def __init__(self, *args, **kw):
+ kw.setdefault('host', 'mws.amazonservices.com')
+ self.Merchant = kw.pop('Merchant', None) or kw.get('SellerId')
+ self.SellerId = kw.pop('SellerId', None) or self.Merchant
+ AWSQueryConnection.__init__(self, *args, **kw)
+
+ def _required_auth_capability(self):
+ return ['mws']
+
+ def post_request(self, path, params, cls, body='', headers={}, isXML=True):
+ """Make a POST request, optionally with a content body,
+ and return the response, optionally as raw text.
+ Modelled off of the inherited get_object/make_request flow.
+ """
+ request = self.build_base_http_request('POST', path, None, data=body,
+ params=params, headers=headers, host=self.server_name())
+ response = self._mexe(request, override_num_retries=None)
+ body = response.read()
+ boto.log.debug(body)
+ if not body:
+ boto.log.error('Null body %s' % body)
+ raise self.ResponseError(response.status, response.reason, body)
+ if response.status != 200:
+ boto.log.error('%s %s' % (response.status, response.reason))
+ boto.log.error('%s' % body)
+ raise self.ResponseError(response.status, response.reason, body)
+ if not isXML:
+ digest = response.getheader('Content-MD5')
+ assert content_md5(body) == digest
+ return body
+ obj = cls(self)
+ h = XmlHandler(obj, self)
+ xml.sax.parseString(body, h)
+ return obj
+
+ @boolean_arguments('PurgeAndReplace')
+ @http_body('FeedContent')
+ @structured_lists('MarketplaceIdList.Id')
+ @requires(['FeedType'])
+ @api_action('Feeds', 15, 120)
+ def submit_feed(self, path, response, headers={}, body='', **kw):
+ """Uploads a feed for processing by Amazon MWS.
+ """
+ return self.post_request(path, kw, response, body=body,
+ headers=headers)
+
+ @structured_lists('FeedSubmissionIdList.Id', 'FeedTypeList.Type',
+ 'FeedProcessingStatusList.Status')
+ @api_action('Feeds', 10, 45)
+ def get_feed_submission_list(self, path, response, **kw):
+ """Returns a list of all feed submissions submitted in the
+ previous 90 days.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['NextToken'])
+ @api_action('Feeds', 0, 0)
+ def get_feed_submission_list_by_next_token(self, path, response, **kw):
+ """Returns a list of feed submissions using the NextToken parameter.
+ """
+ return self.post_request(path, kw, response)
+
+ @structured_lists('FeedTypeList.Type', 'FeedProcessingStatusList.Status')
+ @api_action('Feeds', 10, 45)
+ def get_feed_submission_count(self, path, response, **kw):
+ """Returns a count of the feeds submitted in the previous 90 days.
+ """
+ return self.post_request(path, kw, response)
+
+ @structured_lists('FeedSubmissionIdList.Id', 'FeedTypeList.Type')
+ @api_action('Feeds', 10, 45)
+ def cancel_feed_submissions(self, path, response, **kw):
+ """Cancels one or more feed submissions and returns a
+ count of the feed submissions that were canceled.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['FeedSubmissionId'])
+ @api_action('Feeds', 15, 60)
+ def get_feed_submission_result(self, path, response, **kw):
+ """Returns the feed processing report.
+ """
+ return self.post_request(path, kw, response, isXML=False)
+
+ def get_service_status(self, **kw):
+ """Instruct the user on how to get service status.
+ """
+ message = "Use {}.get_(section)_service_status(), " \
+ "where (section) is one of the following: " \
+ "{}".format(self.__class__.__name__,
+ ', '.join(map(str.lower, api_version_path.keys())))
+ raise AttributeError(message)
+
+ @structured_lists('MarketplaceIdList.Id')
+ @boolean_arguments('ReportOptions=ShowSalesChannel')
+ @requires(['ReportType'])
+ @api_action('Reports', 15, 60)
+ def request_report(self, path, response, **kw):
+ """Creates a report request and submits the request to Amazon MWS.
+ """
+ return self.post_request(path, kw, response)
+
+ @structured_lists('ReportRequestIdList.Id', 'ReportTypeList.Type',
+ 'ReportProcessingStatusList.Status')
+ @api_action('Reports', 10, 45)
+ def get_report_request_list(self, path, response, **kw):
+ """Returns a list of report requests that you can use to get the
+ ReportRequestId for a report.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['NextToken'])
+ @api_action('Reports', 0, 0)
+ def get_report_request_list_by_next_token(self, path, response, **kw):
+ """Returns a list of report requests using the NextToken,
+ which was supplied by a previous request to either
+ GetReportRequestListByNextToken or GetReportRequestList, where
+ the value of HasNext was true in that previous request.
+ """
+ return self.post_request(path, kw, response)
+
+ @structured_lists('ReportTypeList.Type',
+ 'ReportProcessingStatusList.Status')
+ @api_action('Reports', 10, 45)
+ def get_report_request_count(self, path, response, **kw):
+ """Returns a count of report requests that have been submitted
+ to Amazon MWS for processing.
+ """
+ return self.post_request(path, kw, response)
+
+ @api_action('Reports', 10, 45)
+ def cancel_report_requests(self, path, response, **kw):
+ """Cancel one or more report requests, returning the count of the
+ canceled report requests and the report request information.
+ """
+ return self.post_request(path, kw, response)
+
+ @boolean_arguments('Acknowledged')
+ @structured_lists('ReportRequestIdList.Id', 'ReportTypeList.Type')
+ @api_action('Reports', 10, 60)
+ def get_report_list(self, path, response, **kw):
+ """Returns a list of reports that were created in the previous
+ 90 days that match the query parameters.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['NextToken'])
+ @api_action('Reports', 0, 0)
+ def get_report_list_by_next_token(self, path, response, **kw):
+ """Returns a list of reports using the NextToken, which
+ was supplied by a previous request to either
+ GetReportListByNextToken or GetReportList, where the
+ value of HasNext was true in the previous call.
+ """
+ return self.post_request(path, kw, response)
+
+ @boolean_arguments('Acknowledged')
+ @structured_lists('ReportTypeList.Type')
+ @api_action('Reports', 10, 45)
+ def get_report_count(self, path, response, **kw):
+ """Returns a count of the reports, created in the previous 90 days,
+ with a status of _DONE_ and that are available for download.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires('ReportId')
+ @api_action('Reports', 15, 60)
+ def get_report(self, path, response, **kw):
+ """Returns the contents of a report.
+ """
+ return self.post_request(path, kw, response, isXML=False)
+
+ @requires('ReportType', 'Schedule')
+ @api_action('Reports', 10, 45)
+ def manage_report_schedule(self, path, response, **kw):
+ """Creates, updates, or deletes a report request schedule for
+ a specified report type.
+ """
+ return self.post_request(path, kw, response)
+
+ @structured_lists('ReportTypeList.Type')
+ @api_action('Reports', 10, 45)
+ def get_report_schedule_list(self, path, response, **kw):
+ """Returns a list of order report requests that are scheduled
+ to be submitted to Amazon MWS for processing.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['NextToken'])
+ @api_action('Reports', 0, 0)
+ def get_report_schedule_list_by_next_token(self, path, response, **kw):
+ """Returns a list of report requests using the NextToken,
+ which was supplied by a previous request to either
+ GetReportScheduleListByNextToken or GetReportScheduleList,
+ where the value of HasNext was true in that previous request.
+ """
+ return self.post_request(path, kw, response)
+
+ @structured_lists('ReportTypeList.Type')
+ @api_action('Reports', 10, 45)
+ def get_report_schedule_count(self, path, response, **kw):
+ """Returns a count of order report requests that are scheduled
+ to be submitted to Amazon MWS.
+ """
+ return self.post_request(path, kw, response)
+
+ @boolean_arguments('Acknowledged')
+ @requires('ReportIdList')
+ @structured_lists('ReportIdList.Id')
+ @api_action('Reports', 10, 45)
+ def update_report_acknowledgements(self, path, response, **kw):
+ """Updates the acknowledged status of one or more reports.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires('ShipFromAddress', 'InboundShipmentPlanRequestItems')
+ @structured_objects('ShipFromAddress', 'InboundShipmentPlanRequestItems')
+ @api_action('Inbound', 30, 0.5)
+ def create_inbound_shipment_plan(self, path, response, **kw):
+ """Returns the information required to create an inbound shipment.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires('ShipmentId', 'InboundShipmentHeader', 'InboundShipmentItems')
+ @structured_objects('InboundShipmentHeader', 'InboundShipmentItems')
+ @api_action('Inbound', 30, 0.5)
+ def create_inbound_shipment(self, path, response, **kw):
+ """Creates an inbound shipment.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires('ShipmentId')
+ @structured_objects('InboundShipmentHeader', 'InboundShipmentItems')
+ @api_action('Inbound', 30, 0.5)
+ def update_inbound_shipment(self, path, response, **kw):
+ """Updates an existing inbound shipment. Amazon documentation
+ is ambiguous as to whether the InboundShipmentHeader and
+ InboundShipmentItems arguments are required.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires_some_of('ShipmentIdList', 'ShipmentStatusList')
+ @structured_lists('ShipmentIdList.Id', 'ShipmentStatusList.Status')
+ @api_action('Inbound', 30, 0.5)
+ def list_inbound_shipments(self, path, response, **kw):
+ """Returns a list of inbound shipments based on criteria that
+ you specify.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['NextToken'])
+ @api_action('Inbound', 30, 0.5)
+ def list_inbound_shipments_by_next_token(self, path, response, **kw):
+ """Returns the next page of inbound shipments using the NextToken
+ parameter.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['ShipmentId'], ['LastUpdatedAfter', 'LastUpdatedBefore'])
+ @api_action('Inbound', 30, 0.5)
+ def list_inbound_shipment_items(self, path, response, **kw):
+ """Returns a list of items in a specified inbound shipment, or a
+ list of items that were updated within a specified time frame.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['NextToken'])
+ @api_action('Inbound', 30, 0.5)
+ def list_inbound_shipment_items_by_next_token(self, path, response, **kw):
+ """Returns the next page of inbound shipment items using the
+ NextToken parameter.
+ """
+ return self.post_request(path, kw, response)
+
+ @api_action('Inbound', 2, 300, 'GetServiceStatus')
+ def get_inbound_service_status(self, path, response, **kw):
+ """Returns the operational status of the Fulfillment Inbound
+ Shipment API section.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['SellerSkus'], ['QueryStartDateTime'])
+ @structured_lists('SellerSkus.member')
+ @api_action('Inventory', 30, 0.5)
+ def list_inventory_supply(self, path, response, **kw):
+ """Returns information about the availability of a seller's
+ inventory.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['NextToken'])
+ @api_action('Inventory', 30, 0.5)
+ def list_inventory_supply_by_next_token(self, path, response, **kw):
+ """Returns the next page of information about the availability
+ of a seller's inventory using the NextToken parameter.
+ """
+ return self.post_request(path, kw, response)
+
+ @api_action('Inventory', 2, 300, 'GetServiceStatus')
+ def get_inventory_service_status(self, path, response, **kw):
+ """Returns the operational status of the Fulfillment Inventory
+ API section.
+ """
+ return self.post_request(path, kw, response)
+
+ @structured_objects('Address', 'Items')
+ @requires('Address', 'Items')
+ @api_action('Outbound', 30, 0.5)
+ def get_fulfillment_preview(self, path, response, **kw):
+ """Returns a list of fulfillment order previews based on items
+ and shipping speed categories that you specify.
+ """
+ return self.post_request(path, kw, response)
+
+ @structured_objects('Address', 'Items')
+ @requires('SellerFulfillmentOrderId', 'DisplayableOrderId',
+ 'ShippingSpeedCategory', 'DisplayableOrderDateTime',
+ 'DestinationAddress', 'DisplayableOrderComment')
+ @api_action('Outbound', 30, 0.5)
+ def create_fulfillment_order(self, path, response, **kw):
+ """Requests that Amazon ship items from the seller's inventory
+ to a destination address.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires('SellerFulfillmentOrderId')
+ @api_action('Outbound', 30, 0.5)
+ def get_fulfillment_order(self, path, response, **kw):
+ """Returns a fulfillment order based on a specified
+ SellerFulfillmentOrderId.
+ """
+ return self.post_request(path, kw, response)
+
+ @api_action('Outbound', 30, 0.5)
+ def list_all_fulfillment_orders(self, path, response, **kw):
+ """Returns a list of fulfillment orders fulfilled after (or
+ at) a specified date or by fulfillment method.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['NextToken'])
+ @api_action('Outbound', 30, 0.5)
+ def list_all_fulfillment_orders_by_next_token(self, path, response, **kw):
+ """Returns the next page of inbound shipment items using the
+ NextToken parameter.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['SellerFulfillmentOrderId'])
+ @api_action('Outbound', 30, 0.5)
+ def cancel_fulfillment_order(self, path, response, **kw):
+ """Requests that Amazon stop attempting to fulfill an existing
+ fulfillment order.
+ """
+ return self.post_request(path, kw, response)
+
+ @api_action('Outbound', 2, 300, 'GetServiceStatus')
+ def get_outbound_service_status(self, path, response, **kw):
+ """Returns the operational status of the Fulfillment Outbound
+ API section.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['CreatedBefore', 'CreatedAfter'],
+ ['LastUpdatedBefore', 'LastUpdatedAfter'])
+ @requires(['MarketplaceId'])
+ @structured_objects('OrderTotal', 'ShippingAddress',
+ 'PaymentExecutionDetail')
+ @structured_lists('MarketplaceId.Id', 'OrderStatus.Status',
+ 'FulfillmentChannel.Channel', 'PaymentMethod.')
+ @api_action('Orders', 6, 60)
+ def list_orders(self, path, response, **kw):
+ """Returns a list of orders created or updated during a time
+ frame that you specify.
+ """
+ toggle = set(('FulfillmentChannel.Channel.1',
+ 'OrderStatus.Status.1', 'PaymentMethod.1',
+ 'LastUpdatedAfter', 'LastUpdatedBefore'))
+ for do, dont in {
+ 'BuyerEmail': toggle.union(['SellerOrderId']),
+ 'SellerOrderId': toggle.union(['BuyerEmail']),
+ }.items():
+ if do in kw and filter(kw.has_key, dont):
+ message = "Don't include {} when specifying " \
+ "{}".format(' or '.join(dont), do)
+ raise AssertionError(message)
+ return self.post_request(path, kw, response)
+
+ @requires(['NextToken'])
+ @api_action('Orders', 6, 60)
+ def list_orders_by_next_token(self, path, response, **kw):
+ """Returns the next page of orders using the NextToken value
+ that was returned by your previous request to either
+ ListOrders or ListOrdersByNextToken.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['AmazonOrderId'])
+ @structured_lists('AmazonOrderId.Id')
+ @api_action('Orders', 6, 60)
+ def get_order(self, path, response, **kw):
+ """Returns an order for each AmazonOrderId that you specify.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['AmazonOrderId'])
+ @structured_lists('AmazonOrderId.Id')
+ @api_action('Orders', 30, 2)
+ def list_order_items(self, path, response, **kw):
+ """Returns order item information for an AmazonOrderId that
+ you specify.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['NextToken'])
+ @api_action('Orders', 30, 2)
+ def list_order_items_by_next_token(self, path, response, **kw):
+ """Returns the next page of order items using the NextToken
+ value that was returned by your previous request to either
+ ListOrderItems or ListOrderItemsByNextToken.
+ """
+ return self.post_request(path, kw, response)
+
+ @api_action('Orders', 2, 300, 'GetServiceStatus')
+ def get_orders_service_status(self, path, response, **kw):
+ """Returns the operational status of the Orders API section.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['MarketplaceId', 'Query'])
+ @api_action('Products', 20, 20)
+ def list_matching_products(self, path, response, **kw):
+ """Returns a list of products and their attributes, ordered
+ by relevancy, based on a search query that you specify.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['MarketplaceId', 'ASINList'])
+ @structured_lists('ASINList.ASIN')
+ @api_action('Products', 20, 20)
+ def get_matching_product(self, path, response, **kw):
+ """Returns a list of products and their attributes, based on
+ a list of ASIN values that you specify.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['MarketplaceId', 'SellerSKUList'])
+ @structured_lists('SellerSKUList.SellerSKU')
+ @api_action('Products', 20, 10, 'GetCompetitivePricingForSKU')
+ def get_competitive_pricing_for_sku(self, path, response, **kw):
+ """Returns the current competitive pricing of a product,
+ based on the SellerSKUs and MarketplaceId that you specify.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['MarketplaceId', 'ASINList'])
+ @structured_lists('ASINList.ASIN')
+ @api_action('Products', 20, 10, 'GetCompetitivePricingForASIN')
+ def get_competitive_pricing_for_asin(self, path, response, **kw):
+ """Returns the current competitive pricing of a product,
+ based on the ASINs and MarketplaceId that you specify.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['MarketplaceId', 'SellerSKUList'])
+ @structured_lists('SellerSKUList.SellerSKU')
+ @api_action('Products', 20, 5, 'GetLowestOfferListingsForSKU')
+ def get_lowest_offer_listings_for_sku(self, path, response, **kw):
+ """Returns the lowest price offer listings for a specific
+ product by item condition and SellerSKUs.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['MarketplaceId', 'ASINList'])
+ @structured_lists('ASINList.ASIN')
+ @api_action('Products', 20, 5, 'GetLowestOfferListingsForASIN')
+ def get_lowest_offer_listings_for_asin(self, path, response, **kw):
+ """Returns the lowest price offer listings for a specific
+ product by item condition and ASINs.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['MarketplaceId', 'SellerSKU'])
+ @api_action('Products', 20, 20, 'GetProductCategoriesForSKU')
+ def get_product_categories_for_sku(self, path, response, **kw):
+ """Returns the product categories that a SellerSKU belongs to.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['MarketplaceId', 'ASIN'])
+ @api_action('Products', 20, 20, 'GetProductCategoriesForASIN')
+ def get_product_categories_for_asin(self, path, response, **kw):
+ """Returns the product categories that an ASIN belongs to.
+ """
+ return self.post_request(path, kw, response)
+
+ @api_action('Products', 2, 300, 'GetServiceStatus')
+ def get_products_service_status(self, path, response, **kw):
+ """Returns the operational status of the Products API section.
+ """
+ return self.post_request(path, kw, response)
+
+ @api_action('Sellers', 15, 60)
+ def list_marketplace_participations(self, path, response, **kw):
+ """Returns a list of marketplaces that the seller submitting
+ the request can sell in, and a list of participations that
+ include seller-specific information in that marketplace.
+ """
+ return self.post_request(path, kw, response)
+
+ @requires(['NextToken'])
+ @api_action('Sellers', 15, 60)
+ def list_marketplace_participations_by_next_token(self, path, response,
+ **kw):
+ """Returns the next page of marketplaces and participations
+ using the NextToken value that was returned by your
+ previous request to either ListMarketplaceParticipations
+ or ListMarketplaceParticipationsByNextToken.
+ """
+ return self.post_request(path, kw, response)
diff --git a/boto/mws/exception.py b/boto/mws/exception.py
new file mode 100644
index 00000000..d2146bb7
--- /dev/null
+++ b/boto/mws/exception.py
@@ -0,0 +1,75 @@
+# Copyright (c) 2012 Andy Davidoff http://www.disruptek.com/
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+from boto.exception import BotoServerError
+
+
+class ResponseErrorFactory(BotoServerError):
+
+ def __new__(cls, *args, **kw):
+ error = BotoServerError(*args, **kw)
+ try:
+ newclass = globals()[error.error_code]
+ except KeyError:
+ newclass = ResponseError
+ obj = newclass.__new__(newclass, *args, **kw)
+ obj.__dict__.update(error.__dict__)
+ return obj
+
+
+class ResponseError(BotoServerError):
+ """
+ Undefined response error.
+ """
+ retry = False
+
+ def __repr__(self):
+ return '{}({}, {},\n\t{})'.format(self.__class__.__name__,
+ self.status, self.reason,
+ self.error_message)
+
+ def __str__(self):
+ return 'MWS Response Error: {0.status} {0.__class__.__name__} {1}\n' \
+ '{2}\n' \
+ '{0.error_message}'.format(self,
+ self.retry and '(Retriable)' or '',
+ self.__doc__.strip())
+
+
+class RetriableResponseError(ResponseError):
+ retry = True
+
+
+class InvalidParameterValue(ResponseError):
+ """
+ One or more parameter values in the request is invalid.
+ """
+
+
+class InvalidParameter(ResponseError):
+ """
+ One or more parameters in the request is invalid.
+ """
+
+
+class InvalidAddress(ResponseError):
+ """
+ Invalid address.
+ """
diff --git a/boto/mws/response.py b/boto/mws/response.py
new file mode 100644
index 00000000..606d7b61
--- /dev/null
+++ b/boto/mws/response.py
@@ -0,0 +1,633 @@
+# Copyright (c) 2012 Andy Davidoff http://www.disruptek.com/
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+from decimal import Decimal
+
+
+class ComplexType(dict):
+ _value = 'Value'
+
+ def __repr__(self):
+ return '{}{}'.format(getattr(self, self._value, None), self.copy())
+
+ def __str__(self):
+ return str(getattr(self, self._value, ''))
+
+
+class DeclarativeType(object):
+ def __init__(self, _hint=None, **kw):
+ if _hint is not None:
+ self._hint = _hint
+ else:
+ class JITResponse(ResponseElement):
+ pass
+ self._hint = JITResponse
+ for name, value in kw.items():
+ setattr(self._hint, name, value)
+ self._value = None
+
+ def setup(self, parent, name, *args, **kw):
+ self._parent = parent
+ self._name = name
+ setattr(self._parent, self._name, self)
+
+ def start(self, *args, **kw):
+ raise NotImplemented
+
+ def end(self, *args, **kw):
+ raise NotImplemented
+
+ def teardown(self, *args, **kw):
+ if self._value is None or self._value == []:
+ delattr(self._parent, self._name)
+ else:
+ setattr(self._parent, self._name, self._value)
+
+
+class Element(DeclarativeType):
+ def start(self, *args, **kw):
+ self._value = self._hint(parent=self._parent, **kw)
+ return self._value
+
+ def end(self, *args, **kw):
+ pass
+
+
+class SimpleList(DeclarativeType):
+ def setup(self, *args, **kw):
+ DeclarativeType.setup(self, *args, **kw)
+ self._value = []
+
+ def start(self, *args, **kw):
+ return None
+
+ def end(self, name, value, *args, **kw):
+ self._value.append(value)
+
+
+class ElementList(SimpleList):
+ def start(self, *args, **kw):
+ value = self._hint(parent=self._parent, **kw)
+ self._value += [value]
+ return self._value[-1]
+
+ def end(self, *args, **kw):
+ pass
+
+
+class MemberList(ElementList):
+ def __init__(self, *args, **kw):
+ self._this = kw.get('this')
+ ElementList.__init__(self, *args, **kw)
+
+ def start(self, attrs={}, **kw):
+ Class = self._this or self._parent._type_for(self._name, attrs)
+ if issubclass(self._hint, ResponseElement):
+ ListClass = ElementList
+ else:
+ ListClass = SimpleList
+ setattr(Class, Class._member, ListClass(self._hint))
+ self._value = Class(attrs=attrs, parent=self._parent, **kw)
+ return self._value
+
+ def end(self, *args, **kw):
+ self._value = getattr(self._value, self._value._member)
+ ElementList.end(self, *args, **kw)
+
+
+def ResponseFactory(action):
+ result = globals().get(action + 'Result', ResponseElement)
+
+ class MWSResponse(Response):
+ _name = action + 'Response'
+
+ setattr(MWSResponse, action + 'Result', Element(result))
+ return MWSResponse
+
+
+def strip_namespace(func):
+ def wrapper(self, name, *args, **kw):
+ if self._namespace is not None:
+ if name.startswith(self._namespace + ':'):
+ name = name[len(self._namespace + ':'):]
+ return func(self, name, *args, **kw)
+ return wrapper
+
+
+class ResponseElement(dict):
+ _override = {}
+ _member = 'member'
+ _name = None
+ _namespace = None
+
+ def __init__(self, connection=None, name=None, parent=None, attrs={}):
+ if parent is not None and self._namespace is None:
+ self._namespace = parent._namespace
+ if connection is not None:
+ self._connection = connection
+ self._name = name or self._name or self.__class__.__name__
+ self._declared('setup', attrs=attrs)
+ dict.__init__(self, attrs.copy())
+
+ def _declared(self, op, **kw):
+ def inherit(obj):
+ result = {}
+ for cls in getattr(obj, '__bases__', ()):
+ result.update(inherit(cls))
+ result.update(obj.__dict__)
+ return result
+
+ scope = inherit(self.__class__)
+ scope.update(self.__dict__)
+ declared = lambda attr: isinstance(attr[1], DeclarativeType)
+ for name, node in filter(declared, scope.items()):
+ getattr(node, op)(self, name, parentname=self._name, **kw)
+
+ @property
+ def connection(self):
+ return self._connection
+
+ def __repr__(self):
+ render = lambda pair: '{!s}: {!r}'.format(*pair)
+ do_show = lambda pair: not pair[0].startswith('_')
+ attrs = filter(do_show, self.__dict__.items())
+ name = self.__class__.__name__
+ if name == 'JITResponse':
+ name = '^{}^'.format(self._name or '')
+ return '{}{!r}({})'.format(name,
+ self.copy(),
+ ', '.join(map(render, attrs)))
+
+ def _type_for(self, name, attrs):
+ return self._override.get(name, globals().get(name, ResponseElement))
+
+ @strip_namespace
+ def startElement(self, name, attrs, connection):
+ attribute = getattr(self, name, None)
+ if isinstance(attribute, DeclarativeType):
+ return attribute.start(name=name, attrs=attrs,
+ connection=connection)
+ elif attrs.getLength():
+ setattr(self, name, ComplexType(attrs.copy()))
+ else:
+ return None
+
+ @strip_namespace
+ def endElement(self, name, value, connection):
+ attribute = getattr(self, name, None)
+ if name == self._name:
+ self._declared('teardown')
+ elif isinstance(attribute, DeclarativeType):
+ attribute.end(name=name, value=value, connection=connection)
+ elif isinstance(attribute, ComplexType):
+ setattr(attribute, attribute._value, value)
+ else:
+ setattr(self, name, value)
+
+
+class Response(ResponseElement):
+ ResponseMetadata = Element()
+
+ @property
+ def _result(self):
+ return getattr(self, self._action + 'Result', None)
+
+ @property
+ def _action(self):
+ return (self._name or self.__class__.__name__)[:-len('Response')]
+
+
+class ResponseResultList(Response):
+ _ResultClass = ResponseElement
+
+ def __init__(self, *args, **kw):
+ setattr(self, self._action + 'Result', ElementList(self._ResultClass))
+ Response.__init__(self, *args, **kw)
+
+
+class FeedSubmissionInfo(ResponseElement):
+ pass
+
+
+class SubmitFeedResult(ResponseElement):
+ FeedSubmissionInfo = Element(FeedSubmissionInfo)
+
+
+class GetFeedSubmissionListResult(ResponseElement):
+ FeedSubmissionInfo = ElementList(FeedSubmissionInfo)
+
+
+class GetFeedSubmissionListByNextTokenResult(GetFeedSubmissionListResult):
+ pass
+
+
+class GetFeedSubmissionCountResult(ResponseElement):
+ pass
+
+
+class CancelFeedSubmissionsResult(GetFeedSubmissionListResult):
+ pass
+
+
+class GetServiceStatusResult(ResponseElement):
+ Messages = Element(Messages=ElementList())
+
+
+class ReportRequestInfo(ResponseElement):
+ pass
+
+
+class RequestReportResult(ResponseElement):
+ ReportRequestInfo = Element()
+
+
+class GetReportRequestListResult(RequestReportResult):
+ ReportRequestInfo = Element()
+
+
+class GetReportRequestListByNextTokenResult(GetReportRequestListResult):
+ pass
+
+
+class CancelReportRequestsResult(RequestReportResult):
+ pass
+
+
+class GetReportListResult(ResponseElement):
+ ReportInfo = Element()
+
+
+class GetReportListByNextTokenResult(GetReportListResult):
+ pass
+
+
+class ManageReportScheduleResult(ResponseElement):
+ ReportSchedule = Element()
+
+
+class GetReportScheduleListResult(ManageReportScheduleResult):
+ pass
+
+
+class GetReportScheduleListByNextTokenResult(GetReportScheduleListResult):
+ pass
+
+
+class UpdateReportAcknowledgementsResult(GetReportListResult):
+ pass
+
+
+class CreateInboundShipmentPlanResult(ResponseElement):
+ InboundShipmentPlans = MemberList(ShipToAddress=Element(),
+ Items=MemberList())
+
+
+class ListInboundShipmentsResult(ResponseElement):
+ ShipmentData = MemberList(Element(ShipFromAddress=Element()))
+
+
+class ListInboundShipmentsByNextTokenResult(ListInboundShipmentsResult):
+ pass
+
+
+class ListInboundShipmentItemsResult(ResponseElement):
+ ItemData = MemberList()
+
+
+class ListInboundShipmentItemsByNextTokenResult(ListInboundShipmentItemsResult):
+ pass
+
+
+class ListInventorySupplyResult(ResponseElement):
+ InventorySupplyList = MemberList(
+ EarliestAvailability=Element(),
+ SupplyDetail=MemberList(\
+ EarliestAvailabileToPick=Element(),
+ LatestAvailableToPick=Element(),
+ )
+ )
+
+
+class ListInventorySupplyByNextTokenResult(ListInventorySupplyResult):
+ pass
+
+
+class ComplexAmount(ResponseElement):
+ _amount = 'Value'
+
+ def __repr__(self):
+ return '{} {}'.format(self.CurrencyCode, getattr(self, self._amount))
+
+ def __float__(self):
+ return float(getattr(self, self._amount))
+
+ def __str__(self):
+ return str(getattr(self, self._amount))
+
+ @strip_namespace
+ def startElement(self, name, attrs, connection):
+ if name not in ('CurrencyCode', self._amount):
+ message = 'Unrecognized tag {} in ComplexAmount'.format(name)
+ raise AssertionError(message)
+ return ResponseElement.startElement(self, name, attrs, connection)
+
+ @strip_namespace
+ def endElement(self, name, value, connection):
+ if name == self._amount:
+ value = Decimal(value)
+ ResponseElement.endElement(self, name, value, connection)
+
+
+class ComplexMoney(ComplexAmount):
+ _amount = 'Amount'
+
+
+class ComplexWeight(ResponseElement):
+ def __repr__(self):
+ return '{} {}'.format(self.Value, self.Unit)
+
+ def __float__(self):
+ return float(self.Value)
+
+ def __str__(self):
+ return str(self.Value)
+
+ @strip_namespace
+ def startElement(self, name, attrs, connection):
+ if name not in ('Unit', 'Value'):
+ message = 'Unrecognized tag {} in ComplexWeight'.format(name)
+ raise AssertionError(message)
+ return ResponseElement.startElement(self, name, attrs, connection)
+
+ @strip_namespace
+ def endElement(self, name, value, connection):
+ if name == 'Value':
+ value = Decimal(value)
+ ResponseElement.endElement(self, name, value, connection)
+
+
+class Dimension(ComplexType):
+ _value = 'Value'
+
+
+class ComplexDimensions(ResponseElement):
+ _dimensions = ('Height', 'Length', 'Width', 'Weight')
+
+ def __repr__(self):
+ values = [getattr(self, key, None) for key in self._dimensions]
+ values = filter(None, values)
+ return 'x'.join(map('{0.Value:0.2f}{0[Units]}'.format, values))
+
+ @strip_namespace
+ def startElement(self, name, attrs, connection):
+ if name not in self._dimensions:
+ message = 'Unrecognized tag {} in ComplexDimensions'.format(name)
+ raise AssertionError(message)
+ setattr(self, name, Dimension(attrs.copy()))
+
+ @strip_namespace
+ def endElement(self, name, value, connection):
+ if name in self._dimensions:
+ value = Decimal(value or '0')
+ ResponseElement.endElement(self, name, value, connection)
+
+
+class FulfillmentPreviewItem(ResponseElement):
+ EstimatedShippingWeight = Element(ComplexWeight)
+
+
+class FulfillmentPreview(ResponseElement):
+ EstimatedShippingWeight = Element(ComplexWeight)
+ EstimatedFees = MemberList(\
+ Element(\
+ Amount=Element(ComplexAmount),
+ ),
+ )
+ UnfulfillablePreviewItems = MemberList(FulfillmentPreviewItem)
+ FulfillmentPreviewShipments = MemberList(\
+ FulfillmentPreviewItems=MemberList(FulfillmentPreviewItem),
+ )
+
+
+class GetFulfillmentPreviewResult(ResponseElement):
+ FulfillmentPreviews = MemberList(FulfillmentPreview)
+
+
+class FulfillmentOrder(ResponseElement):
+ DestinationAddress = Element()
+ NotificationEmailList = MemberList(str)
+
+
+class GetFulfillmentOrderResult(ResponseElement):
+ FulfillmentOrder = Element(FulfillmentOrder)
+ FulfillmentShipment = MemberList(Element(\
+ FulfillmentShipmentItem=MemberList(),
+ FulfillmentShipmentPackage=MemberList(),
+ )
+ )
+ FulfillmentOrderItem = MemberList()
+
+
+class ListAllFulfillmentOrdersResult(ResponseElement):
+ FulfillmentOrders = MemberList(FulfillmentOrder)
+
+
+class ListAllFulfillmentOrdersByNextTokenResult(ListAllFulfillmentOrdersResult):
+ pass
+
+
+class Image(ResponseElement):
+ pass
+
+
+class AttributeSet(ResponseElement):
+ ItemDimensions = Element(ComplexDimensions)
+ ListPrice = Element(ComplexMoney)
+ PackageDimensions = Element(ComplexDimensions)
+ SmallImage = Element(Image)
+
+
+class ItemAttributes(AttributeSet):
+ Languages = Element(Language=ElementList())
+
+ def __init__(self, *args, **kw):
+ names = ('Actor', 'Artist', 'Author', 'Creator', 'Director',
+ 'Feature', 'Format', 'GemType', 'MaterialType',
+ 'MediaType', 'OperatingSystem', 'Platform')
+ for name in names:
+ setattr(self, name, SimpleList())
+ AttributeSet.__init__(self, *args, **kw)
+
+
+class VariationRelationship(ResponseElement):
+ Identifiers = Element(MarketplaceASIN=Element(),
+ SKUIdentifier=Element())
+ GemType = SimpleList()
+ MaterialType = SimpleList()
+ OperatingSystem = SimpleList()
+
+
+class Price(ResponseElement):
+ LandedPrice = Element(ComplexMoney)
+ ListingPrice = Element(ComplexMoney)
+ Shipping = Element(ComplexMoney)
+
+
+class CompetitivePrice(ResponseElement):
+ Price = Element(Price)
+
+
+class CompetitivePriceList(ResponseElement):
+ CompetitivePrice = ElementList(CompetitivePrice)
+
+
+class CompetitivePricing(ResponseElement):
+ CompetitivePrices = Element(CompetitivePriceList)
+ NumberOfOfferListings = SimpleList()
+ TradeInValue = Element(ComplexMoney)
+
+
+class SalesRank(ResponseElement):
+ pass
+
+
+class LowestOfferListing(ResponseElement):
+ Qualifiers = Element(ShippingTime=Element())
+ Price = Element(Price)
+
+
+class Product(ResponseElement):
+ _namespace = 'ns2'
+ Identifiers = Element(MarketplaceASIN=Element(),
+ SKUIdentifier=Element())
+ AttributeSets = Element(\
+ ItemAttributes=ElementList(ItemAttributes),
+ )
+ Relationships = Element(\
+ VariationParent=ElementList(VariationRelationship),
+ )
+ CompetitivePricing = ElementList(CompetitivePricing)
+ SalesRankings = ElementList(SalesRank)
+ LowestOfferListings = Element(\
+ LowestOfferListing=ElementList(LowestOfferListing),
+ )
+
+
+class ListMatchingProductsResult(ResponseElement):
+ Products = Element(Product=ElementList(Product))
+
+
+class ProductsBulkOperationResult(ResponseElement):
+ Product = Element(Product)
+ Error = Element()
+
+
+class ProductsBulkOperationResponse(ResponseResultList):
+ _ResultClass = ProductsBulkOperationResult
+
+
+class GetMatchingProductResponse(ProductsBulkOperationResponse):
+ pass
+
+
+class GetCompetitivePricingForSKUResponse(ProductsBulkOperationResponse):
+ pass
+
+
+class GetCompetitivePricingForASINResponse(ProductsBulkOperationResponse):
+ pass
+
+
+class GetLowestOfferListingsForSKUResponse(ProductsBulkOperationResponse):
+ pass
+
+
+class GetLowestOfferListingsForASINResponse(ProductsBulkOperationResponse):
+ pass
+
+
+class ProductCategory(ResponseElement):
+
+ def __init__(self, *args, **kw):
+ setattr(self, 'Parent', Element(ProductCategory))
+ ResponseElement.__init__(self, *args, **kw)
+
+
+class GetProductCategoriesResult(ResponseElement):
+ Self = Element(ProductCategory)
+
+
+class GetProductCategoriesForSKUResult(GetProductCategoriesResult):
+ pass
+
+
+class GetProductCategoriesForASINResult(GetProductCategoriesResult):
+ pass
+
+
+class Order(ResponseElement):
+ OrderTotal = Element(ComplexAmount)
+ ShippingAddress = Element()
+ PaymentExecutionDetail = Element(\
+ PaymentExecutionDetailItem=ElementList(\
+ PaymentExecutionDetailItem=Element(\
+ Payment=Element(ComplexAmount)
+ )
+ )
+ )
+
+
+class ListOrdersResult(ResponseElement):
+ Orders = Element(Order=ElementList(Order))
+
+
+class ListOrdersByNextTokenResult(ListOrdersResult):
+ pass
+
+
+class GetOrderResult(ListOrdersResult):
+ pass
+
+
+class OrderItem(ResponseElement):
+ ItemPrice = Element(ComplexAmount)
+ ShippingPrice = Element(ComplexAmount)
+ GiftWrapPrice = Element(ComplexAmount)
+ ItemTax = Element(ComplexAmount)
+ ShippingTax = Element(ComplexAmount)
+ GiftWrapTax = Element(ComplexAmount)
+ ShippingDiscount = Element(ComplexAmount)
+ PromotionDiscount = Element(ComplexAmount)
+ PromotionIds = SimpleList()
+ CODFee = Element(ComplexAmount)
+ CODFeeDiscount = Element(ComplexAmount)
+
+
+class ListOrderItemsResult(ResponseElement):
+ OrderItems = Element(OrderItem=ElementList(OrderItem))
+
+
+class ListMarketplaceParticipationsResult(ResponseElement):
+ ListParticipations = Element(Participation=ElementList())
+ ListMarketplaces = Element(Marketplace=ElementList())
+
+
+class ListMarketplaceParticipationsByNextTokenResult(ListMarketplaceParticipationsResult):
+ pass
diff --git a/boto/rds/parametergroup.py b/boto/rds/parametergroup.py
index 44d00e22..2c3bf27a 100644
--- a/boto/rds/parametergroup.py
+++ b/boto/rds/parametergroup.py
@@ -127,7 +127,7 @@ class Parameter(object):
prefix = 'Parameters.member.%d.' % i
if self.name:
d[prefix+'ParameterName'] = self.name
- if self._value:
+ if self._value is not None:
d[prefix+'ParameterValue'] = self._value
if self.apply_type:
d[prefix+'ApplyMethod'] = self.apply_method
diff --git a/boto/resultset.py b/boto/resultset.py
index 954160c5..6f95d791 100644
--- a/boto/resultset.py
+++ b/boto/resultset.py
@@ -19,6 +19,8 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+from boto.s3.user import User
+
class ResultSet(list):
"""
The ResultSet is used to pass results back from the Amazon services
@@ -50,6 +52,7 @@ class ResultSet(list):
self.key_marker = None
self.next_marker = None # avail when delimiter used
self.next_key_marker = None
+ self.next_upload_id_marker = None
self.next_version_id_marker = None
self.version_id_marker = None
self.is_truncated = False
@@ -62,6 +65,12 @@ class ResultSet(list):
obj = t[1](connection)
self.append(obj)
return obj
+ if name == 'Owner':
+ # Makes owner available for get_service and
+ # perhaps other lists where not handled by
+ # another element.
+ self.owner = User()
+ return self.owner
return None
def to_boolean(self, value, true_value='true'):
diff --git a/boto/route53/connection.py b/boto/route53/connection.py
index 7c3f1b88..a0054d2b 100644
--- a/boto/route53/connection.py
+++ b/boto/route53/connection.py
@@ -48,10 +48,10 @@ class Route53Connection(AWSAuthConnection):
DefaultHost = 'route53.amazonaws.com'
"""The default Route53 API endpoint to connect to."""
- Version = '2011-05-05'
+ Version = '2012-02-29'
"""Route53 API version."""
- XMLNameSpace = 'https://route53.amazonaws.com/doc/2011-05-05/'
+ XMLNameSpace = 'https://route53.amazonaws.com/doc/2012-02-29/'
"""XML schema for this Route53 API version."""
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
@@ -129,6 +129,23 @@ class Route53Connection(AWSAuthConnection):
h.parse(body)
return e
+ def get_hosted_zone_by_name(self, hosted_zone_name):
+ """
+ Get detailed information about a particular Hosted Zone.
+
+ :type hosted_zone_name: str
+ :param hosted_zone_name: The fully qualified domain name for the Hosted
+ Zone
+
+ """
+ if hosted_zone_name[-1] != '.':
+ hosted_zone_name += '.'
+ all_hosted_zones = self.get_all_hosted_zones()
+ for zone in all_hosted_zones['ListHostedZonesResponse']['HostedZones']:
+ #check that they gave us the FQDN for their zone
+ if zone['Name'] == hosted_zone_name:
+ return self.get_hosted_zone(zone['Id'].split('/')[-1])
+
def create_hosted_zone(self, domain_name, caller_ref=None, comment=''):
"""
Create a new Hosted Zone. Returns a Python data structure with
diff --git a/boto/route53/record.py b/boto/route53/record.py
index 629258a4..76bbfab5 100644
--- a/boto/route53/record.py
+++ b/boto/route53/record.py
@@ -1,4 +1,6 @@
# Copyright (c) 2010 Chris Moyer http://coredumped.org/
+# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
@@ -33,7 +35,7 @@ class ResourceRecordSets(ResultSet):
"""
ChangeResourceRecordSetsBody = """<?xml version="1.0" encoding="UTF-8"?>
- <ChangeResourceRecordSetsRequest xmlns="https://route53.amazonaws.com/doc/2011-05-05/">
+ <ChangeResourceRecordSetsRequest xmlns="https://route53.amazonaws.com/doc/2012-02-29/">
<ChangeBatch>
<Comment>%(comment)s</Comment>
<Changes>%(changes)s</Changes>
@@ -59,7 +61,7 @@ class ResourceRecordSets(ResultSet):
def add_change(self, action, name, type, ttl=600,
alias_hosted_zone_id=None, alias_dns_name=None, identifier=None,
- weight=None):
+ weight=None, region=None):
"""
Add a change request to the set.
@@ -96,8 +98,8 @@ class ResourceRecordSets(ResultSet):
Information about the domain to which you are redirecting traffic.
:type identifier: str
- :param identifier: *Weighted resource record sets only* An
- identifier that differentiates among multiple resource
+ :param identifier: *Weighted and latency-based resource record sets
+ only* An identifier that differentiates among multiple resource
record sets that have the same combination of DNS name and type.
:type weight: int
@@ -105,11 +107,17 @@ class ResourceRecordSets(ResultSet):
record sets that have the same combination of DNS name and type,
a value that determines what portion of traffic for the current
resource record set is routed to the associated location
+
+ :type region: str
+ :param region: *Latency-based resource record sets only* Among resource
+ record sets that have the same combination of DNS name and type,
+ a value that determines which region this should be associated with
+ for the latency-based routing
"""
change = Record(name, type, ttl,
alias_hosted_zone_id=alias_hosted_zone_id,
alias_dns_name=alias_dns_name, identifier=identifier,
- weight=weight)
+ weight=weight, region=None)
self.changes.append([action, change])
return change
@@ -169,6 +177,11 @@ class Record(object):
<Weight>%(weight)s</Weight>
"""
+ RRRBody = """
+ <SetIdentifier>%(identifier)s</SetIdentifier>
+ <Region>%(region)s</Region>
+ """
+
ResourceRecordsBody = """
<TTL>%(ttl)s</TTL>
<ResourceRecords>
@@ -188,7 +201,7 @@ class Record(object):
def __init__(self, name=None, type=None, ttl=600, resource_records=None,
alias_hosted_zone_id=None, alias_dns_name=None, identifier=None,
- weight=None):
+ weight=None, region=None):
self.name = name
self.type = type
self.ttl = ttl
@@ -199,6 +212,7 @@ class Record(object):
self.alias_dns_name = alias_dns_name
self.identifier = identifier
self.weight = weight
+ self.region = region
def add_value(self, value):
"""Add a resource record value"""
@@ -227,6 +241,10 @@ class Record(object):
if self.identifier != None and self.weight != None:
weight = self.WRRBody % {"identifier": self.identifier, "weight":
self.weight}
+ elif self.identifier != None and self.region != None:
+ weight = self.RRRBody % {"identifier": self.identifier, "region":
+ self.region}
+
params = {
"name": self.name,
"type": self.type,
@@ -266,6 +284,8 @@ class Record(object):
self.identifier = value
elif name == 'Weight':
self.weight = value
+ elif name == 'Region':
+ self.region = value
def startElement(self, name, attrs, connection):
return None
diff --git a/boto/s3/bucket.py b/boto/s3/bucket.py
index 3f7f7178..dae32231 100644
--- a/boto/s3/bucket.py
+++ b/boto/s3/bucket.py
@@ -142,7 +142,7 @@ class Bucket(object):
"""
return self.get_key(key_name, headers=headers)
- def get_key(self, key_name, headers=None, version_id=None):
+ def get_key(self, key_name, headers=None, version_id=None, response_headers=None):
"""
Check to see if a particular key exists within the bucket. This
method uses a HEAD request to check for the existance of the key.
@@ -150,12 +150,24 @@ class Bucket(object):
:type key_name: string
:param key_name: The name of the key to retrieve
+
+ :type response_headers: dict
+ :param response_headers: A dictionary containing HTTP headers/values
+ that will override any headers associated with
+ the stored object in the response.
+ See http://goo.gl/EWOPb for details.
:rtype: :class:`boto.s3.key.Key`
:returns: A Key object from this bucket.
"""
+ query_args = []
if version_id:
- query_args = 'versionId=%s' % version_id
+ query_args.append('versionId=%s' % version_id)
+ if response_headers:
+ for rk,rv in response_headers.iteritems():
+ query_args.append('%s=%s' % (rk, urllib.quote(rv)))
+ if query_args:
+ query_args = '&'.join(query_args)
else:
query_args = None
response = self.connection.make_request('HEAD', self.name, key_name,
@@ -566,6 +578,11 @@ class Bucket(object):
This value is required anytime you are
deleting versioned objects from a bucket
that has the MFADelete option on the bucket.
+
+ :rtype: :class:`boto.s3.key.Key` or subclass
+ :returns: A key object holding information on what was deleted.
+ The Caller can see if a delete_marker was created or
+ removed and what version_id the delete created or removed.
"""
provider = self.connection.provider
if version_id:
@@ -583,6 +600,12 @@ class Bucket(object):
if response.status != 204:
raise provider.storage_response_error(response.status,
response.reason, body)
+ else:
+ # return a key object with information on what was deleted.
+ k = self.key_class(self)
+ k.name = key_name
+ k.handle_version_headers(response)
+ return k
def copy_key(self, new_key_name, src_bucket_name,
src_key_name, metadata=None, src_version_id=None,
@@ -1324,7 +1347,8 @@ class Bucket(object):
def initiate_multipart_upload(self, key_name, headers=None,
reduced_redundancy=False,
- metadata=None, encrypt_key=False):
+ metadata=None, encrypt_key=False,
+ policy=None):
"""
Start a multipart upload operation.
@@ -1355,11 +1379,16 @@ class Bucket(object):
be encrypted on the server-side by S3 and
will be stored in an encrypted form while
at rest in S3.
+
+ :type policy: :class:`boto.s3.acl.CannedACLStrings`
+ :param policy: A canned ACL policy that will be applied to the
+ new key (once completed) in S3.
"""
query_args = 'uploads'
provider = self.connection.provider
- if headers is None:
- headers = {}
+ headers = headers or {}
+ if policy:
+ headers[provider.acl_header] = policy
if reduced_redundancy:
storage_class_header = provider.storage_class_header
if storage_class_header:
@@ -1411,6 +1440,14 @@ class Bucket(object):
resp = CompleteMultiPartUpload(self)
h = handler.XmlHandler(resp, self)
xml.sax.parseString(body, h)
+ # Use a dummy key to parse various response headers
+ # for versioning, encryption info and then explicitly
+ # set the completed MPU object values from key.
+ k = self.key_class(self)
+ k.handle_version_headers(response)
+ k.handle_encryption_headers(response)
+ resp.version_id = k.version_id
+ resp.encrypted = k.encrypted
return resp
else:
raise self.connection.provider.storage_response_error(
diff --git a/boto/s3/connection.py b/boto/s3/connection.py
index 211f344e..36edaeda 100644
--- a/boto/s3/connection.py
+++ b/boto/s3/connection.py
@@ -293,25 +293,25 @@ class S3Connection(AWSAuthConnection):
def generate_url(self, expires_in, method, bucket='', key='', headers=None,
query_auth=True, force_http=False, response_headers=None,
- expires_in_absolute=False):
- if not headers:
- headers = {}
+ expires_in_absolute=False, version_id=None):
+ headers = headers or {}
if expires_in_absolute:
expires = int(expires_in)
else:
expires = int(time.time() + expires_in)
auth_path = self.calling_format.build_auth_path(bucket, key)
auth_path = self.get_path(auth_path)
- # Arguments to override response headers become part of the canonical
- # string to be signed.
+ # optional version_id and response_headers need to be added to
+ # the query param list.
+ extra_qp = []
+ if version_id is not None:
+ extra_qp.append("versionId=%s" % version_id)
if response_headers:
- response_hdrs = ["%s=%s" % (k, v) for k, v in
- response_headers.items()]
+ for k, v in response_headers.items():
+ extra_qp.append("%s=%s" % (k, urllib.quote(v)))
+ if extra_qp:
delimiter = '?' if '?' not in auth_path else '&'
- auth_path = "%s%s" % (auth_path, delimiter)
- auth_path += '&'.join(response_hdrs)
- else:
- response_headers = {}
+ auth_path += delimiter + '&'.join(extra_qp)
c_string = boto.utils.canonical_string(method, auth_path, headers,
expires, self.provider)
b64_hmac = self._auth_handler.sign_string(c_string)
@@ -320,14 +320,18 @@ class S3Connection(AWSAuthConnection):
if query_auth:
query_part = '?' + self.QueryString % (encoded_canonical, expires,
self.aws_access_key_id)
- # The response headers must also be GET parameters in the URL.
- headers.update(response_headers)
- hdrs = ['%s=%s'%(n, urllib.quote(v)) for n, v in headers.items()]
- q_str = '&'.join(hdrs)
- if q_str:
- query_part += '&' + q_str
else:
query_part = ''
+ if headers:
+ hdr_prefix = self.provider.header_prefix
+ for k, v in headers.items():
+ if k.startswith(hdr_prefix):
+ # headers used for sig generation must be
+ # included in the url also.
+ extra_qp.append("%s=%s" % (k, urllib.quote(v)))
+ if extra_qp:
+ delimiter = '?' if not query_part else '&'
+ query_part += delimiter + '&'.join(extra_qp)
if force_http:
protocol = 'http'
port = 80
@@ -408,8 +412,8 @@ class S3Connection(AWSAuthConnection):
if location == Location.DEFAULT:
data = ''
else:
- data = '<CreateBucketConstraint><LocationConstraint>' + \
- location + '</LocationConstraint></CreateBucketConstraint>'
+ data = '<CreateBucketConfiguration><LocationConstraint>' + \
+ location + '</LocationConstraint></CreateBucketConfiguration>'
response = self.make_request('PUT', bucket_name, headers=headers,
data=data)
body = response.read()
diff --git a/boto/s3/key.py b/boto/s3/key.py
index 75a18564..71c3b91f 100644
--- a/boto/s3/key.py
+++ b/boto/s3/key.py
@@ -430,7 +430,8 @@ class Key(object):
def generate_url(self, expires_in, method='GET', headers=None,
query_auth=True, force_http=False, response_headers=None,
- expires_in_absolute=False):
+ expires_in_absolute=False, version_id=None,
+ policy=None, reduced_redundancy=False, encrypt_key=False):
"""
Generate a URL to access this key.
@@ -447,15 +448,67 @@ class Key(object):
:type query_auth: bool
:param query_auth:
+ :type force_http: bool
+ :param force_http: If True, http will be used instead of https.
+
+ :type response_headers: dict
+ :param response_headers: A dictionary containing HTTP headers/values
+ that will override any headers associated with
+ the stored object in the response.
+ See http://goo.gl/EWOPb for details.
+
+ :type expires_in_absolute: bool
+ :param expires_in_absolute:
+
+ :type version_id: string
+ :param version_id: The version_id of the object to GET. If specified
+ this overrides any value in the key.
+
+ :type policy: :class:`boto.s3.acl.CannedACLStrings`
+ :param policy: A canned ACL policy that will be applied to the
+ new key in S3.
+
+ :type reduced_redundancy: bool
+ :param reduced_redundancy: If True, this will set the storage
+ class of the new Key to be
+ REDUCED_REDUNDANCY. The Reduced Redundancy
+ Storage (RRS) feature of S3, provides lower
+ redundancy at lower storage cost.
+
+ :type encrypt_key: bool
+ :param encrypt_key: If True, the new copy of the object will
+ be encrypted on the server-side by S3 and
+ will be stored in an encrypted form while
+ at rest in S3.
+
:rtype: string
:return: The URL to access the key
"""
+ provider = self.bucket.connection.provider
+ version_id = version_id or self.version_id
+ if headers is None:
+ headers = {}
+ else:
+ headers = headers.copy()
+
+ # add headers accordingly (usually PUT case)
+ if policy:
+ headers[provider.acl_header] = policy
+ if reduced_redundancy:
+ self.storage_class = 'REDUCED_REDUNDANCY'
+ if provider.storage_class_header:
+ headers[provider.storage_class_header] = self.storage_class
+ if encrypt_key:
+ headers[provider.server_side_encryption_header] = 'AES256'
+ headers = boto.utils.merge_meta(headers, self.metadata, provider)
+
return self.bucket.connection.generate_url(expires_in, method,
self.bucket.name, self.name,
headers, query_auth,
force_http,
response_headers,
- expires_in_absolute)
+ expires_in_absolute,
+ version_id)
def send_file(self, fp, headers=None, cb=None, num_cb=10,
query_args=None, chunked_transfer=False, size=None):
@@ -521,8 +574,9 @@ class Key(object):
for key in headers:
http_conn.putheader(key, headers[key])
http_conn.endheaders()
- if chunked_transfer and not self.base64md5:
- # MD5 for the stream has to be calculated on the fly.
+
+ # Calculate all MD5 checksums on the fly, if not already computed
+ if not self.base64md5:
m = md5()
else:
m = None
@@ -591,12 +645,14 @@ class Key(object):
chunk = fp.read(self.BufferSize)
self.size = data_len
+
+ if m:
+ # Use the chunked trailer for the digest
+ hd = m.hexdigest()
+ self.md5, self.base64md5 = self.get_md5_from_hexdigest(hd)
+
if chunked_transfer:
http_conn.send('0\r\n')
- if m:
- # Use the chunked trailer for the digest
- hd = m.hexdigest()
- self.md5, self.base64md5 = self.get_md5_from_hexdigest(hd)
# http_conn.send("Content-MD5: %s\r\n" % self.base64md5)
http_conn.send('\r\n')
@@ -781,11 +837,12 @@ class Key(object):
def set_contents_from_file(self, fp, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
reduced_redundancy=False, query_args=None,
- encrypt_key=False, size=None):
+ encrypt_key=False, size=None, rewind=False):
"""
Store an object in S3 using the name of the Key object as the
key in S3 and the contents of the file pointed to by 'fp' as the
- contents.
+ contents. The data is read from 'fp' from its current position until
+ 'size' bytes have been read or EOF.
:type fp: file
:param fp: the file whose contents to upload
@@ -850,6 +907,15 @@ class Key(object):
file up into different ranges to be uploaded. If not
specified, the default behaviour is to read all bytes
from the file pointer. Less bytes may be available.
+
+ :type rewind: bool
+ :param rewind: (optional) If True, the file pointer (fp) will be
+ rewound to the start before any bytes are read from
+ it. The default behaviour is False which reads from
+ the current position of the file pointer (fp).
+
+ :rtype: int
+ :return: The number of bytes written to the key.
"""
provider = self.bucket.connection.provider
headers = headers or {}
@@ -858,6 +924,23 @@ class Key(object):
if encrypt_key:
headers[provider.server_side_encryption_header] = 'AES256'
+ if rewind:
+ # caller requests reading from beginning of fp.
+ fp.seek(0, os.SEEK_SET)
+ else:
+ spos = fp.tell()
+ fp.seek(0, os.SEEK_END)
+ if fp.tell() == spos:
+ fp.seek(0, os.SEEK_SET)
+ if fp.tell() != spos:
+ # Raise an exception as this is likely a programming error
+ # whereby there is data before the fp but nothing after it.
+ fp.seek(spos)
+ raise AttributeError(
+ 'fp is at EOF. Use rewind option or seek() to data start.')
+ # seek back to the correct position.
+ fp.seek(spos)
+
if reduced_redundancy:
self.storage_class = 'REDUCED_REDUNDANCY'
if provider.storage_class_header:
@@ -903,6 +986,8 @@ class Key(object):
self.send_file(fp, headers=headers, cb=cb, num_cb=num_cb,
query_args=query_args, chunked_transfer=chunked_transfer,
size=size)
+ # return number of bytes written.
+ return self.size
def set_contents_from_filename(self, filename, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
diff --git a/boto/s3/multipart.py b/boto/s3/multipart.py
index a2930333..9b624302 100644
--- a/boto/s3/multipart.py
+++ b/boto/s3/multipart.py
@@ -35,6 +35,8 @@ class CompleteMultiPartUpload(object):
is contained
* key_name - The name of the new, completed key
* etag - The MD5 hash of the completed, combined upload
+ * version_id - The version_id of the completed upload
+ * encrypted - The value of the encryption header
"""
def __init__(self, bucket=None):
@@ -43,6 +45,8 @@ class CompleteMultiPartUpload(object):
self.bucket_name = None
self.key_name = None
self.etag = None
+ self.version_id = None
+ self.encrypted = None
def __repr__(self):
return '<CompleteMultiPartUpload: %s.%s>' % (self.bucket_name,
diff --git a/boto/sqs/batchresults.py b/boto/sqs/batchresults.py
index 29ef6d66..aa5f86b8 100644
--- a/boto/sqs/batchresults.py
+++ b/boto/sqs/batchresults.py
@@ -79,13 +79,13 @@ class BatchResults(object):
self.errors = []
def startElement(self, name, attrs, connection):
- if name == 'SendMessageBatchResultEntry':
+ if name.endswith('MessageBatchResultEntry'):
entry = ResultEntry()
self.results.append(entry)
return entry
if name == 'BatchResultErrorEntry':
entry = ResultEntry()
- self.results.errors(entry)
+ self.errors.append(entry)
return entry
return None
diff --git a/boto/sqs/connection.py b/boto/sqs/connection.py
index 7e055dd5..773b2e60 100644
--- a/boto/sqs/connection.py
+++ b/boto/sqs/connection.py
@@ -64,21 +64,20 @@ class SQSConnection(AWSQueryConnection):
Create an SQS Queue.
:type queue_name: str or unicode
- :param queue_name: The name of the new queue. Names are scoped to
- an account and need to be unique within that
- account. Calling this method on an existing
- queue name will not return an error from SQS
- unless the value for visibility_timeout is
- different than the value of the existing queue
- of that name. This is still an expensive operation,
- though, and not the preferred way to check for
- the existence of a queue. See the
- :func:`boto.sqs.connection.SQSConnection.lookup` method.
+ :param queue_name: The name of the new queue. Names are
+ scoped to an account and need to be unique within that
+ account. Calling this method on an existing queue name
+ will not return an error from SQS unless the value for
+ visibility_timeout is different than the value of the
+ existing queue of that name. This is still an expensive
+ operation, though, and not the preferred way to check for
+ the existence of a queue. See the
+ :func:`boto.sqs.connection.SQSConnection.lookup` method.
:type visibility_timeout: int
- :param visibility_timeout: The default visibility timeout for all
- messages written in the queue. This can
- be overridden on a per-message.
+ :param visibility_timeout: The default visibility timeout for
+ all messages written in the queue. This can be overridden
+ on a per-message.
:rtype: :class:`boto.sqs.queue.Queue`
:return: The newly created queue.
@@ -98,13 +97,12 @@ class SQSConnection(AWSQueryConnection):
:param queue: The SQS queue to be deleted
:type force_deletion: Boolean
- :param force_deletion: Normally, SQS will not delete a queue that
- contains messages. However, if the
- force_deletion argument is True, the
- queue will be deleted regardless of whether
- there are messages in the queue or not.
- USE WITH CAUTION. This will delete all
- messages in the queue as well.
+ :param force_deletion: Normally, SQS will not delete a queue
+ that contains messages. However, if the force_deletion
+ argument is True, the queue will be deleted regardless of
+ whether there are messages in the queue or not. USE WITH
+ CAUTION. This will delete all messages in the queue as
+ well.
:rtype: bool
:return: True if the command succeeded, False otherwise
@@ -119,16 +117,16 @@ class SQSConnection(AWSQueryConnection):
:param queue: The SQS queue to be deleted
:type attribute: str
- :type attribute: The specific attribute requested. If not supplied,
- the default is to return all attributes.
- Valid attributes are:
-
- ApproximateNumberOfMessages|
- ApproximateNumberOfMessagesNotVisible|
- VisibilityTimeout|
- CreatedTimestamp|
- LastModifiedTimestamp|
- Policy
+ :type attribute: The specific attribute requested. If not
+ supplied, the default is to return all attributes. Valid
+ attributes are:
+
+ * ApproximateNumberOfMessages|
+ * ApproximateNumberOfMessagesNotVisible|
+ * VisibilityTimeout|
+ * CreatedTimestamp|
+ * LastModifiedTimestamp|
+ * Policy
:rtype: :class:`boto.sqs.attributes.Attributes`
:return: An Attributes object containing request value(s).
@@ -155,18 +153,19 @@ class SQSConnection(AWSQueryConnection):
:type visibility_timeout: int
:param visibility_timeout: The number of seconds the message should
- remain invisible to other queue readers
- (default=None which uses the Queues default)
+ remain invisible to other queue readers
+ (default=None which uses the Queues default)
:type attributes: str
:param attributes: The name of additional attribute to return
- with response or All if you want all attributes.
- The default is to return no additional attributes.
- Valid values:
-
- All|SenderId|SentTimestamp|
- ApproximateReceiveCount|
- ApproximateFirstReceiveTimestamp
+ with response or All if you want all attributes. The
+ default is to return no additional attributes. Valid
+ values:
+ * All
+ * SenderId
+ * SentTimestamp
+ * ApproximateReceiveCount
+ * ApproximateFirstReceiveTimestamp
:rtype: list
:return: A list of :class:`boto.sqs.message.Message` objects.
@@ -196,6 +195,26 @@ class SQSConnection(AWSQueryConnection):
params = {'ReceiptHandle' : message.receipt_handle}
return self.get_status('DeleteMessage', params, queue.id)
+ def delete_message_batch(self, queue, messages):
+ """
+ Deletes a list of messages from a queue in a single request.
+
+ :type queue: A :class:`boto.sqs.queue.Queue` object.
+ :param queue: The Queue to which the messages will be written.
+
+ :type messages: List of :class:`boto.sqs.message.Message` objects.
+ :param messages: A list of message objects.
+ """
+ params = {}
+ for i, msg in enumerate(messages):
+ prefix = 'DeleteMessageBatchRequestEntry'
+ p_name = '%s.%i.Id' % (prefix, (i+1))
+ params[p_name] = msg.id
+ p_name = '%s.%i.ReceiptHandle' % (prefix, (i+1))
+ params[p_name] = msg.receipt_handle
+ return self.get_object('DeleteMessageBatch', params, BatchResults,
+ queue.id, verb='POST')
+
def delete_message_from_handle(self, queue, receipt_handle):
"""
Delete a message from a queue, given a receipt handle.
@@ -268,6 +287,32 @@ class SQSConnection(AWSQueryConnection):
'VisibilityTimeout' : visibility_timeout}
return self.get_status('ChangeMessageVisibility', params, queue.id)
+ def change_message_visibility_batch(self, queue, messages):
+ """
+ A batch version of change_message_visibility that can act
+ on up to 10 messages at a time.
+
+ :type queue: A :class:`boto.sqs.queue.Queue` object.
+ :param queue: The Queue to which the messages will be written.
+
+ :type messages: List of tuples.
+ :param messages: A list of tuples where each tuple consists
+ of a :class:`boto.sqs.message.Message` object and an integer
+ that represents the new visibility timeout for that message.
+ """
+ params = {}
+ for i, t in enumerate(messages):
+ prefix = 'ChangeMessageVisibilityBatchRequestEntry'
+ p_name = '%s.%i.Id' % (prefix, (i+1))
+ params[p_name] = t[0].id
+ p_name = '%s.%i.ReceiptHandle' % (prefix, (i+1))
+ params[p_name] = t[0].receipt_handle
+ p_name = '%s.%i.VisibilityTimeout' % (prefix, (i+1))
+ params[p_name] = t[1]
+ return self.get_object('ChangeMessageVisibilityBatch',
+ params, BatchResults,
+ queue.id, verb='POST')
+
def get_all_queues(self, prefix=''):
"""
Retrieves all queues.
@@ -291,11 +336,11 @@ class SQSConnection(AWSQueryConnection):
:rtype: :py:class:`boto.sqs.queue.Queue` or ``None``
:returns: The requested queue, or ``None`` if no match was found.
"""
- rs = self.get_all_queues(queue_name)
- for q in rs:
- if q.url.endswith(queue_name):
- return q
- return None
+ params = {'QueueName': queue_name}
+ try:
+ return self.get_object('GetQueueUrl', params, Queue)
+ except SQSError:
+ return None
lookup = get_queue
@@ -312,20 +357,24 @@ class SQSConnection(AWSQueryConnection):
:type label: str or unicode
:param label: A unique identification of the permission you are setting.
- Maximum of 80 characters ``[0-9a-zA-Z_-]``
- Example, AliceSendMessage
+ Maximum of 80 characters ``[0-9a-zA-Z_-]``
+ Example, AliceSendMessage
:type aws_account_id: str or unicode
- :param principal_id: The AWS account number of the principal who will
- be given permission. The principal must have
- an AWS account, but does not need to be signed
- up for Amazon SQS. For information
- about locating the AWS account identification.
+ :param principal_id: The AWS account number of the principal
+ who will be given permission. The principal must have an
+ AWS account, but does not need to be signed up for Amazon
+ SQS. For information about locating the AWS account
+ identification.
:type action_name: str or unicode
:param action_name: The action. Valid choices are:
- \*|SendMessage|ReceiveMessage|DeleteMessage|
- ChangeMessageVisibility|GetQueueAttributes
+ * *
+ * SendMessage
+ * ReceiveMessage
+ * DeleteMessage
+ * ChangeMessageVisibility
+ * GetQueueAttributes
:rtype: bool
:return: True if successful, False otherwise.
diff --git a/boto/sqs/message.py b/boto/sqs/message.py
index 8fabd478..e50a3e3a 100644
--- a/boto/sqs/message.py
+++ b/boto/sqs/message.py
@@ -67,6 +67,7 @@ import base64
import StringIO
from boto.sqs.attributes import Attributes
from boto.exception import SQSDecodeError
+import boto
class RawMessage:
"""
@@ -156,7 +157,8 @@ class Message(RawMessage):
try:
value = base64.b64decode(value)
except:
- raise SQSDecodeError('Unable to decode message', self)
+ boto.log.warning('Unable to decode message')
+ return value
return value
class MHMessage(Message):
diff --git a/boto/sqs/queue.py b/boto/sqs/queue.py
index 0ecb1f2c..ca5593c1 100644
--- a/boto/sqs/queue.py
+++ b/boto/sqs/queue.py
@@ -67,9 +67,10 @@ class Queue:
def set_message_class(self, message_class):
"""
- Set the message class that should be used when instantiating messages read
- from the queue. By default, the class boto.sqs.message.Message is used but
- this can be overriden with any class that behaves like a message.
+ Set the message class that should be used when instantiating
+ messages read from the queue. By default, the class
+ :class:`boto.sqs.message.Message` is used but this can be overriden
+ with any class that behaves like a message.
:type message_class: Message-like class
:param message_class: The new Message class
@@ -104,8 +105,8 @@ class Queue:
only valid value at this time is: VisibilityTimeout
:type value: int
:param value: The new value for the attribute.
- For VisibilityTimeout the value must be an
- integer number of seconds from 0 to 86400.
+ For VisibilityTimeout the value must be an
+ integer number of seconds from 0 to 86400.
:rtype: bool
:return: True if successful, otherwise False.
@@ -140,32 +141,34 @@ class Queue:
:type label: str or unicode
:param label: A unique identification of the permission you are setting.
- Maximum of 80 characters ``[0-9a-zA-Z_-]``
- Example, AliceSendMessage
+ Maximum of 80 characters ``[0-9a-zA-Z_-]``
+ Example, AliceSendMessage
:type aws_account_id: str or unicode
- :param principal_id: The AWS account number of the principal who will be given
- permission. The principal must have an AWS account, but
- does not need to be signed up for Amazon SQS. For information
- about locating the AWS account identification.
+ :param principal_id: The AWS account number of the principal who
+ will be given permission. The principal must have an AWS account,
+ but does not need to be signed up for Amazon SQS. For information
+ about locating the AWS account identification.
:type action_name: str or unicode
:param action_name: The action. Valid choices are:
- \*|SendMessage|ReceiveMessage|DeleteMessage|
- ChangeMessageVisibility|GetQueueAttributes
+ *|SendMessage|ReceiveMessage|DeleteMessage|
+ ChangeMessageVisibility|GetQueueAttributes
:rtype: bool
:return: True if successful, False otherwise.
"""
- return self.connection.add_permission(self, label, aws_account_id, action_name)
+ return self.connection.add_permission(self, label, aws_account_id,
+ action_name)
def remove_permission(self, label):
"""
Remove a permission from a queue.
:type label: str or unicode
- :param label: The unique label associated with the permission being removed.
+ :param label: The unique label associated with the permission
+ being removed.
:rtype: bool
:return: True if successful, False otherwise.
@@ -198,11 +201,29 @@ class Queue:
:rtype: :class:`boto.sqs.message.Message`
:return: The :class:`boto.sqs.message.Message` object that was written.
"""
- new_msg = self.connection.send_message(self, message.get_body_encoded(), delay_seconds)
+ new_msg = self.connection.send_message(self,
+ message.get_body_encoded(),
+ delay_seconds)
message.id = new_msg.id
message.md5 = new_msg.md5
return message
+ def write_batch(self, messages):
+ """
+ Delivers up to 10 messages in a single request.
+
+ :type messages: List of lists.
+ :param messages: A list of lists or tuples. Each inner
+ tuple represents a single message to be written
+ and consists of and ID (string) that must be unique
+ within the list of messages, the message body itself
+ which can be a maximum of 64K in length, and an
+ integer which represents the delay time (in seconds)
+ for the message (0-900) before the message will
+ be delivered to the queue.
+ """
+ return self.connection.send_message_batch(self, messages)
+
def new_message(self, body=''):
"""
Create new message of appropriate class.
@@ -224,20 +245,18 @@ class Queue:
Get a variable number of messages.
:type num_messages: int
- :param num_messages: The maximum number of messages to read from the queue.
+ :param num_messages: The maximum number of messages to read from
+ the queue.
:type visibility_timeout: int
:param visibility_timeout: The VisibilityTimeout for the messages read.
:type attributes: str
- :param attributes: The name of additional attribute to return with response
- or All if you want all attributes. The default is to
- return no additional attributes. Valid values:
- All
- SenderId
- SentTimestamp
- ApproximateReceiveCount
- ApproximateFirstReceiveTimestamp
+ :param attributes: The name of additional attribute to return
+ with response or All if you want all attributes. The
+ default is to return no additional attributes. Valid
+ values: All SenderId SentTimestamp ApproximateReceiveCount
+ ApproximateFirstReceiveTimestamp
:rtype: list
:return: A list of :class:`boto.sqs.message.Message` objects.
@@ -258,6 +277,27 @@ class Queue:
"""
return self.connection.delete_message(self, message)
+ def delete_message_batch(self, messages):
+ """
+ Deletes a list of messages in a single request.
+
+ :type messages: List of :class:`boto.sqs.message.Message` objects.
+ :param messages: A list of message objects.
+ """
+ return self.connection.delete_message_batch(self, messages)
+
+ def change_message_visibility_batch(self, messages):
+ """
+ A batch version of change_message_visibility that can act
+ on up to 10 messages at a time.
+
+ :type messages: List of tuples.
+ :param messages: A list of tuples where each tuple consists
+ of a :class:`boto.sqs.message.Message` object and an integer
+ that represents the new visibility timeout for that message.
+ """
+ return self.connection.change_message_visibility_batch(self, messages)
+
def delete(self):
"""
Delete the queue.
diff --git a/boto/storage_uri.py b/boto/storage_uri.py
index 5f14500d..9661c9f7 100755
--- a/boto/storage_uri.py
+++ b/boto/storage_uri.py
@@ -268,6 +268,22 @@ class BucketStorageUri(StorageUri):
self.check_response(acl, 'acl', self.uri)
return acl
+ def get_cors(self, validate=True, headers=None):
+ """returns a bucket's CORS XML"""
+ if not self.bucket_name:
+ raise InvalidUriError('get_cors on bucket-less URI (%s)' % self.uri)
+ bucket = self.get_bucket(validate, headers)
+ cors = bucket.get_cors(headers)
+ self.check_response(cors, 'cors', self.uri)
+ return cors
+
+ def set_cors(self, cors, validate=True, headers=None):
+ """sets or updates a bucket's CORS XML"""
+ if not self.bucket_name:
+ raise InvalidUriError('set_cors on bucket-less URI (%s)' % self.uri)
+ bucket = self.get_bucket(validate, headers)
+ bucket.set_cors(cors.to_xml(), headers)
+
def get_location(self, validate=True, headers=None):
if not self.bucket_name:
raise InvalidUriError('get_location on bucket-less URI (%s)' %
diff --git a/boto/swf/exceptions.py b/boto/swf/exceptions.py
new file mode 100644
index 00000000..eacfda9d
--- /dev/null
+++ b/boto/swf/exceptions.py
@@ -0,0 +1,37 @@
+"""
+Exceptions that are specific to the swf module.
+
+This module subclasses the base SWF response exception,
+boto.exceptions.SWFResponseError, for some of the SWF specific faults.
+"""
+from boto.exception import SWFResponseError
+
+
+class SWFDomainAlreadyExistsError(SWFResponseError):
+ """
+ Raised when when the domain already exists.
+ """
+ pass
+
+
+class SWFLimitExceededError(SWFResponseError):
+ """
+ Raised when when a system imposed limitation has been reached.
+ """
+ pass
+
+
+class SWFOperationNotPermittedError(SWFResponseError):
+ """
+ Raised when (reserved for future use).
+ """
+
+
+class SWFTypeAlreadyExistsError(SWFResponseError):
+ """
+ Raised when when the workflow type or activity type already exists.
+ """
+ pass
+
+
+
diff --git a/boto/swf/layer1.py b/boto/swf/layer1.py
index d39c3d13..73e67ec0 100644
--- a/boto/swf/layer1.py
+++ b/boto/swf/layer1.py
@@ -25,7 +25,8 @@
import boto
from boto.connection import AWSAuthConnection
from boto.provider import Provider
-from boto.exception import DynamoDBResponseError
+from boto.exception import SWFResponseError
+from boto.swf import exceptions as swf_exceptions
import time
try:
@@ -37,7 +38,7 @@ except ImportError:
# To get full debug output, uncomment the following line and set the
# value of Debug to be 2
#
-#boto.set_stream_logger('dynamodb')
+#boto.set_stream_logger('swf')
Debug=0
class Layer1(AWSAuthConnection):
@@ -46,16 +47,29 @@ class Layer1(AWSAuthConnection):
"""
DefaultRegionName = 'us-east-1'
- """The default region name for DynamoDB API."""
+ """The default region name for Simple Workflow."""
ServiceName = 'com.amazonaws.swf.service.model.SimpleWorkflowService'
"""The name of the Service"""
-
- ResponseError = DynamoDBResponseError
+
+ # In some cases, the fault response __type value is mapped to
+ # an exception class more specific than SWFResponseError.
+ _fault_excp = {
+ 'com.amazonaws.swf.base.model#DomainAlreadyExistsFault':
+ swf_exceptions.SWFDomainAlreadyExistsError,
+ 'com.amazonaws.swf.base.model#LimitExceededFault':
+ swf_exceptions.SWFLimitExceededError,
+ 'com.amazonaws.swf.base.model#OperationNotPermittedFault':
+ swf_exceptions.SWFOperationNotPermittedError,
+ 'com.amazonaws.swf.base.model#TypeAlreadyExistsFault':
+ swf_exceptions.SWFTypeAlreadyExistsError ,
+ }
+
+ ResponseError = SWFResponseError
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
- host=None, debug=0, session_token=None, region=None):
+ debug=0, session_token=None, region=None):
if not region:
region_name = boto.config.get('SWF', 'region',
self.DefaultRegionName)
@@ -75,9 +89,10 @@ class Layer1(AWSAuthConnection):
def make_request(self, action, body='', object_hook=None):
"""
- :raises: ``DynamoDBExpiredTokenError`` if the security token expires.
+ :raises: ``SWFResponseError`` if response status is not 200.
"""
headers = {'X-Amz-Target': '%s.%s' % (self.ServiceName, action),
+ 'Host': self.region.endpoint,
'Content-Type': 'application/json; charset=UTF-8',
'Content-Encoding': 'amz-1.0',
'Content-Length': str(len(body))}
@@ -87,10 +102,19 @@ class Layer1(AWSAuthConnection):
override_num_retries=10)
response_body = response.read()
boto.log.debug(response_body)
- if response_body:
- return json.loads(response_body, object_hook=object_hook)
+ if response.status == 200:
+ if response_body:
+ return json.loads(response_body, object_hook=object_hook)
+ else:
+ return None
else:
- return None
+ json_body = json.loads(response_body)
+ fault_name = json_body.get('__type', None)
+ # Certain faults get mapped to more specific exception classes.
+ excp_cls = self._fault_excp.get(fault_name, self.ResponseError)
+ raise excp_cls(response.status, response.reason, body=json_body)
+
+
# Actions related to Activities
@@ -655,7 +679,8 @@ class Layer1(AWSAuthConnection):
## Workflow Management
- def register_workflow_type(self, domain, name, task_list=None,
+ def register_workflow_type(self, domain, name, version,
+ task_list=None,
default_child_policy=None,
default_execution_start_to_close_timeout=None,
default_task_start_to_close_timeout=None,
@@ -671,6 +696,9 @@ class Layer1(AWSAuthConnection):
:type name: string
:param name: The name of the workflow type.
+ :type version: string
+ :param version: The version of the workflow type.
+
:type task_list: list of name, version of tasks
:param name: If set, specifies the default task list to use
for scheduling decision tasks for executions of this workflow
@@ -721,7 +749,7 @@ class Layer1(AWSAuthConnection):
:raises: TypeAlreadyExistsFault, LimitExceededFault,
UnknownResourceFault, OperationNotPermittedFault
"""
- data = {'domain': domain, 'name': name}
+ data = {'domain': domain, 'name': name, 'version': version}
if task_list:
data['defaultTaskList'] = {'name': task_list}
if default_child_policy:
@@ -1119,53 +1147,88 @@ class Layer1(AWSAuthConnection):
json_input = json.dumps(data)
return self.make_request('CountOpenWorkflowExecutions', json_input)
- def list_open_workflow_executions(self, domain, name, oldest_date, tag, workflow_id, latest_date=None, maximum_page_size=None, next_page_token=None, reverse_order=None, version=None):
- """
- No documentation supplied.
+ def list_open_workflow_executions(self, domain,
+ latest_date=None,
+ oldest_date=None,
+ tag=None, workflow_id=None,
+ workflow_name=None,
+ workflow_version=None,
+ maximum_page_size=None,
+ next_page_token=None,
+ reverse_order=None):
+ """
+ Returns the list of open workflow executions within the
+ given domain that meet the specified filtering criteria.
+
+ .. note:
+ workflow_id, workflow_name/workflow_version
+ and tag are mutually exclusive. You can specify at most
+ one of these in a request.
:type domain: string
- :param domain: no docs
+ :param domain: The name of the domain containing the
+ workflow executions to count.
- :type name: string
- :param name: no docs
+ :type latest_date: timestamp
+ :param latest_date: Specifies the latest start or close date
+ and time to return.
:type oldest_date: timestamp
- :param oldest_date: no docs
+ :param oldest_date: Specifies the oldest start or close date
+ and time to return.
:type tag: string
- :param tag: no docs
+ :param tag: If specified, only executions that have a tag
+ that matches the filter are counted.
:type workflow_id: string
- :param workflow_id: no docs
+ :param workflow_id: If specified, only workflow executions
+ matching the workflow_id are counted.
- :type latest_date: timestamp
- :param latest_date: no docs
+ :type workflow_name: string
+ :param workflow_name: Name of the workflow type to filter on.
+
+ :type workflow_version: string
+ :param workflow_version: Version of the workflow type to filter on.
:type maximum_page_size: integer
- :param maximum_page_size: no docs
+ :param maximum_page_size: The maximum number of results
+ returned in each page. The default is 100, but the caller can
+ override this value to a page size smaller than the
+ default. You cannot specify a page size greater than 100.
:type next_page_token: string
- :param next_page_token: no docs
+ :param next_page_token: If on a previous call to this method a
+ NextPageToken was returned, the results are being
+ paginated. To get the next page of results, repeat the call
+ with the returned token and all other arguments unchanged.
:type reverse_order: boolean
- :param reverse_order: no docs
+ :param reverse_order: When set to true, returns the results in
+ reverse order. By default the results are returned in
+ descending order of the start or the close time of the
+ executions.
- :type version: string
- :param version: no docs
+ :raises: UnknownResourceFault, OperationNotPermittedFault
- :raises: #UnknownResourceFault, #OperationNotPermittedFault
"""
- data = {'domain': domain, 'name': name, 'oldestDate': oldest_date, 'tag': tag, 'workflowId': workflow_id}
- if latest_date:
- data['latestDate'] = latest_date
+ data = {'domain': domain}
+ data['startTimeFilter'] = {'oldestDate': oldest_date,
+ 'latestDate': latest_date}
+ if tag:
+ data['tagFilter'] = {'tag': tag}
+ if workflow_name and workflow_version:
+ data['typeFilter'] = {'name': workflow_name,
+ 'version': workflow_version}
+ if workflow_id:
+ data['executionFilter'] = {'workflowId': workflow_id}
+
if maximum_page_size:
data['maximumPageSize'] = maximum_page_size
if next_page_token:
data['nextPageToken'] = next_page_token
if reverse_order:
data['reverseOrder'] = 'true'
- if version:
- data['version'] = version
json_input = json.dumps(data)
return self.make_request('ListOpenWorkflowExecutions', json_input)
@@ -1356,6 +1419,10 @@ class Layer1(AWSAuthConnection):
if close_latest_date and close_oldest_date:
data['closeTimeFilter'] = {'oldestDate': close_oldest_date,
'latestDate': close_latest_date}
+
+ if workflow_id:
+ data['executionFilter'] = {'workflowId': workflow_id}
+
if close_status:
data['closeStatusFilter'] = {'status': close_status}
if tag:
diff --git a/boto/swf/layer1_decisions.py b/boto/swf/layer1_decisions.py
new file mode 100644
index 00000000..f34bc82f
--- /dev/null
+++ b/boto/swf/layer1_decisions.py
@@ -0,0 +1,316 @@
+"""
+helper class for creating decision responses
+"""
+
+class Layer1Decisions:
+ """
+ Use this object to build a list of decisions for a decision response.
+ Each method call will add append a new decision. Retrieve the list
+ of decisions from the _data attribute.
+
+ """
+ def __init__(self):
+ self._data = []
+
+ def schedule_activity_task(self,
+ activity_id,
+ activity_type_name,
+ activity_type_version,
+ task_list=None,
+ control=None,
+ heartbeat_timeout=None,
+ schedule_to_close_timeout=None,
+ schedule_to_start_timeout=None,
+ start_to_close_timeout=None,
+ input=None):
+ """
+ schedules an activity task
+
+ :type activity_id: string
+ :param activity_id: The activityId of the type of the activity
+ being scheduled.
+
+ :type activity_type_name: string
+ :param activity_type_name: The name of the type of the activity
+ being scheduled.
+
+ :type activity_type_version: string
+ :param activity_type_version: The version of the type of the
+ activity being scheduled.
+
+ :type task_list: string
+ :param task_list: If set, specifies the name of the task list in
+ which to schedule the activity task. If not specified, the
+ defaultTaskList registered with the activity type will be used.
+ Note: a task list for this activity task must be specified either
+ as a default for the activity type or through this field. If
+ neither this field is set nor a default task list was specified
+ at registration time then a fault will be returned.
+
+ FINISH DOCS
+ """
+ o = {}
+ o['decisionType'] = 'ScheduleActivityTask'
+ attrs = o['scheduleActivityTaskDecisionAttributes'] = {}
+ attrs['activityId'] = activity_id
+ attrs['activityType'] = {
+ 'name': activity_type_name,
+ 'version': activity_type_version,
+ }
+ if task_list is not None:
+ attrs['taskList'] = {'name': task_list}
+ if control is not None:
+ attrs['control'] = control
+ if heartbeat_timeout is not None:
+ attrs['heartbeatTimeout'] = heartbeat_timeout
+ if schedule_to_close_timeout is not None:
+ attrs['scheduleToCloseTimeout'] = schedule_to_close_timeout
+ if schedule_to_start_timeout is not None:
+ attrs['scheduleToStartTimeout'] = schedule_to_start_timeout
+ if start_to_close_timeout is not None:
+ attrs['startToCloseTimeout'] = start_to_close_timeout
+ if input is not None:
+ attrs['input'] = input
+ self._data.append(o)
+
+ def request_cancel_activity_task(self,
+ activity_id):
+ """
+ attempts to cancel a previously scheduled activity task. If the activity
+ task was scheduled but has not been assigned to a worker, then it will
+ be canceled. If the activity task was already assigned to a worker, then
+ the worker will be informed that cancellation has been requested in the
+ response to RecordActivityTaskHeartbeat.
+
+ FINISH DOCS
+ """
+ o = {}
+ o['decisionType'] = 'RequestCancelActivityTask'
+ attrs = o['requestCancelActivityTaskDecisionAttributes'] = {}
+ attrs['activityId'] = activity_id
+ self._data.append(o)
+
+ def record_marker(self,
+ marker_name,
+ details=None):
+ """
+ records a MarkerRecorded event in the history. Markers can be used for
+ adding custom information in the history for instance to let deciders know
+ that they do not need to look at the history beyond the marker event.
+
+ FINISH DOCS
+ """
+ o = {}
+ o['decisionType'] = 'RecordMarker'
+ attrs = o['recordMarkerDecisionAttributes'] = {}
+ attrs['markerName'] = marker_name
+ if details is not None:
+ attrs['details'] = details
+ self._data.append(o)
+
+ def complete_workflow_execution(self,
+ result=None):
+ """
+ closes the workflow execution and records a WorkflowExecutionCompleted
+ event in the history
+
+ FINISH DOCS
+ """
+ o = {}
+ o['decisionType'] = 'CompleteWorkflowExecution'
+ attrs = o['completeWorkflowExecutionDecisionAttributes'] = {}
+ if result is not None:
+ attrs['result'] = result
+ self._data.append(o)
+
+ def fail_workflow_execution(self,
+ reason=None,
+ details=None):
+ """
+ closes the workflow execution and records a WorkflowExecutionFailed event
+ in the history.
+
+ FINISH DOCS
+ """
+ o = {}
+ o['decisionType'] = 'FailWorkflowExecution'
+ attrs = o['failWorkflowExecutionDecisionAttributes'] = {}
+ if reason is not None:
+ attrs['reason'] = reason
+ if details is not None:
+ attrs['details'] = details
+ self._data.append(o)
+
+ def cancel_workflow_executions(self,
+ details=None):
+ """
+ closes the workflow execution and records a WorkflowExecutionCanceled
+ event in the history.
+
+ FINISH DOCS
+ """
+ o = {}
+ o['decisionType'] = 'CancelWorkflowExecutions'
+ attrs = o['cancelWorkflowExecutionsDecisionAttributes'] = {}
+ if details is not None:
+ attrs['details'] = details
+ self._data.append(o)
+
+ def continue_as_new_workflow_execution(self,
+ child_policy=None,
+ execution_start_to_close_timeout=None,
+ input=None,
+ tag_list=None,
+ task_list=None,
+ start_to_close_timeout=None,
+ workflow_type_version=None):
+ """
+ closes the workflow execution and starts a new workflow execution of
+ the same type using the same workflow id and a unique run Id. A
+ WorkflowExecutionContinuedAsNew event is recorded in the history.
+
+ FINISH DOCS
+ """
+ o = {}
+ o['decisionType'] = 'ContinueAsNewWorkflowExecution'
+ attrs = o['continueAsNewWorkflowExecutionDecisionAttributes'] = {}
+ if child_policy is not None:
+ attrs['childPolicy'] = child_policy
+ if execution_start_to_close_timeout is not None:
+ attrs['executionStartToCloseTimeout'] = execution_start_to_close_timeout
+ if input is not None:
+ attrs['input'] = input
+ if tag_list is not None:
+ attrs['tagList'] = tag_list
+ if task_list is not None:
+ attrs['taskList'] = {'name': task_list}
+ if start_to_close_timeout is not None:
+ attrs['startToCloseTimeout'] = start_to_close_timeout
+ if workflow_type_version is not None:
+ attrs['workflowTypeVersion'] = workflow_type_version
+ self._data.append(o)
+
+ def start_timer(self,
+ start_to_fire_timeout,
+ timer_id,
+ control=None):
+ """
+ starts a timer for this workflow execution and records a TimerStarted
+ event in the history. This timer will fire after the specified delay
+ and record a TimerFired event.
+
+ FINISH DOCS
+ """
+ o = {}
+ o['decisionType'] = 'StartTimer'
+ attrs = o['startTimerDecisionAttributes'] = {}
+ attrs['startToFireTimeout'] = start_to_fire_timeout
+ attrs['timerId'] = timer_id
+ if control is not None:
+ attrs['control'] = control
+ self._data.append(o)
+
+ def cancel_timer(self,
+ timer_id):
+ """
+ cancels a previously started timer and records a TimerCanceled event in the
+ history.
+
+ FINISH DOCS
+ """
+ o = {}
+ o['decisionType'] = 'CancelTimer'
+ attrs = o['cancelTimerDecisionAttributes'] = {}
+ attrs['timerId'] = timer_id
+ self._data.append(o)
+
+ def signal_external_workflow_execution(self,
+ workflow_id,
+ signal_name,
+ run_id=None,
+ control=None,
+ input=None):
+ """
+ requests a signal to be delivered to the specified external workflow
+ execution and records a SignalExternalWorkflowExecutionInitiated
+ event in the history.
+
+ FINISH DOCS
+ """
+ o = {}
+ o['decisionType'] = 'SignalExternalWorkflowExecution'
+ attrs = o['signalExternalWorkflowExecutionDecisionAttributes'] = {}
+ attrs['workflowId'] = workflow_id
+ attrs['signalName'] = signal_name
+ if run_id is not None:
+ attrs['runId'] = run_id
+ if control is not None:
+ attrs['control'] = control
+ if input is not None:
+ attrs['input'] = input
+ self._data.append(o)
+
+ def request_cancel_external_workflow_execution(self,
+ workflow_id,
+ control=None,
+ run_id=None):
+ """
+ requests that a request be made to cancel the specified external workflow
+ execution and records a
+ RequestCancelExternalWorkflowExecutionInitiated event in the history.
+
+ FINISH DOCS
+ """
+ o = {}
+ o['decisionType'] = 'RequestCancelExternalWorkflowExecution'
+ attrs = o['requestCancelExternalWorkflowExecutionDecisionAttributes'] = {}
+ attrs['workflowId'] = workflow_id
+ if control is not None:
+ attrs['control'] = control
+ if run_id is not None:
+ attrs['runId'] = run_id
+ self._data.append(o)
+
+ def start_child_workflow_execution(self,
+ workflow_type_name,
+ workflow_type_version,
+ child_policy=None,
+ control=None,
+ execution_start_to_close_timeout=None,
+ input=None,
+ tag_list=None,
+ task_list=None,
+ task_start_to_close_timeout=None):
+ """
+ requests that a child workflow execution be started and records a
+ StartChildWorkflowExecutionInitiated event in the history. The child
+ workflow execution is a separate workflow execution with its own history.
+
+ FINISH DOCS
+ """
+ o = {}
+ o['decisionType'] = 'StartChildWorkflowExecution'
+ attrs = o['startChildWorkflowExecutionDecisionAttributes'] = {}
+ attrs['workflowType'] = {
+ 'name': workflow_type_name,
+ 'version': workflow_type_version,
+ }
+ if child_policy is not None:
+ attrs['childPolicy'] = child_policy
+ if control is not None:
+ attrs['control'] = control
+ if execution_start_to_close_timeout is not None:
+ attrs['executionStartToCloseTimeout'] = execution_start_to_close_timeout
+ if input is not None:
+ attrs['input'] = input
+ if tag_list is not None:
+ attrs['tagList'] = tag_list
+ if task_list is not None:
+ attrs['taskList'] = {'name': task_list}
+ if task_start_to_close_timeout is not None:
+ attrs['taskStartToCloseTimeout'] = task_start_to_close_timeout
+ self._data.append(o)
+
+
+
+
diff --git a/boto/utils.py b/boto/utils.py
index 960da180..a2bf386d 100644
--- a/boto/utils.py
+++ b/boto/utils.py
@@ -71,7 +71,7 @@ except ImportError:
_hashfn = md5.md5
# List of Query String Arguments of Interest
-qsa_of_interest = ['acl', 'defaultObjectAcl', 'location', 'logging',
+qsa_of_interest = ['acl', 'cors', 'defaultObjectAcl', 'location', 'logging',
'partNumber', 'policy', 'requestPayment', 'torrent',
'versioning', 'versionId', 'versions', 'website',
'uploads', 'uploadId', 'response-content-type',
diff --git a/boto/vpc/__init__.py b/boto/vpc/__init__.py
index ae55a26e..e5c0eefe 100644
--- a/boto/vpc/__init__.py
+++ b/boto/vpc/__init__.py
@@ -50,12 +50,12 @@ class VPCConnection(EC2Connection):
:type filters: list of tuples
:param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
- Possible filter keys are:
+ consists of a filter key and a filter value.
+ Possible filter keys are:
- - *state*, the state of the VPC (pending or available)
- - *cidrBlock*, CIDR block of the VPC
- - *dhcpOptionsId*, the ID of a set of DHCP options
+ * *state* - a list of states of the VPC (pending or available)
+ * *cidrBlock* - a list CIDR blocks of the VPC
+ * *dhcpOptionsId* - a list of IDs of a set of DHCP options
:rtype: list
:return: A list of :class:`boto.vpc.vpc.VPC`
@@ -64,11 +64,7 @@ class VPCConnection(EC2Connection):
if vpc_ids:
self.build_list_params(params, vpc_ids, 'VpcId')
if filters:
- i = 1
- for filter in filters:
- params[('Filter.%d.Name' % i)] = filter[0]
- params[('Filter.%d.Value.1' % i)] = filter[1]
- i += 1
+ self.build_filter_params(params, dict(filters))
return self.get_list('DescribeVpcs', params, [('item', VPC)])
def create_vpc(self, cidr_block):
@@ -261,7 +257,6 @@ class VPCConnection(EC2Connection):
self.build_list_params(params, internet_gateway_ids, 'InternetGatewayId')
if filters:
self.build_filter_params(params, dict(filters))
-
return self.get_list('DescribeInternetGateways', params, [('item', InternetGateway)])
def create_internet_gateway(self):
@@ -311,7 +306,7 @@ class VPCConnection(EC2Connection):
Detach an internet gateway from a specific VPC.
:type internet_gateway_id: str
- :param internet_gateway_id: The ID of the internet gateway to delete.
+ :param internet_gateway_id: The ID of the internet gateway to detach.
:type vpc_id: str
:param vpc_id: The ID of the VPC to attach to.
@@ -356,11 +351,8 @@ class VPCConnection(EC2Connection):
if customer_gateway_ids:
self.build_list_params(params, customer_gateway_ids, 'CustomerGatewayId')
if filters:
- i = 1
- for filter in filters:
- params[('Filter.%d.Name' % i)] = filter[0]
- params[('Filter.%d.Value.1')] = filter[1]
- i += 1
+ self.build_filter_params(params, dict(filters))
+
return self.get_list('DescribeCustomerGateways', params, [('item', CustomerGateway)])
def create_customer_gateway(self, type, ip_address, bgp_asn):
@@ -416,10 +408,10 @@ class VPCConnection(EC2Connection):
consists of a filter key and a filter value.
Possible filter keys are:
- - *state*, the state of the VpnGateway
+ - *state*, a list of states of the VpnGateway
(pending,available,deleting,deleted)
- - *type*, the type of customer gateway (ipsec.1)
- - *availabilityZone*, the Availability zone the
+ - *type*, a list types of customer gateway (ipsec.1)
+ - *availabilityZone*, a list of Availability zones the
VPN gateway is in.
:rtype: list
@@ -429,11 +421,7 @@ class VPCConnection(EC2Connection):
if vpn_gateway_ids:
self.build_list_params(params, vpn_gateway_ids, 'VpnGatewayId')
if filters:
- i = 1
- for filter in filters:
- params[('Filter.%d.Name' % i)] = filter[0]
- params[('Filter.%d.Value.1')] = filter[1]
- i += 1
+ self.build_filter_params(params, dict(filters))
return self.get_list('DescribeVpnGateways', params, [('item', VpnGateway)])
def create_vpn_gateway(self, type, availability_zone=None):
@@ -501,11 +489,11 @@ class VPCConnection(EC2Connection):
consists of a filter key and a filter value.
Possible filter keys are:
- - *state*, the state of the Subnet
+ - *state*, a list of states of the Subnet
(pending,available)
- - *vpdId*, the ID of teh VPC the subnet is in.
- - *cidrBlock*, CIDR block of the subnet
- - *availabilityZone*, the Availability Zone
+ - *vpdId*, a list of IDs of teh VPC the subnet is in.
+ - *cidrBlock*, a list of CIDR blocks of the subnet
+ - *availabilityZone*, list of the Availability Zones
the subnet is in.
@@ -516,11 +504,7 @@ class VPCConnection(EC2Connection):
if subnet_ids:
self.build_list_params(params, subnet_ids, 'SubnetId')
if filters:
- i = 1
- for filter in filters:
- params[('Filter.%d.Name' % i)] = filter[0]
- params[('Filter.%d.Value.1' % i)] = filter[1]
- i += 1
+ self.build_filter_params(params, dict(filters))
return self.get_list('DescribeSubnets', params, [('item', Subnet)])
def create_subnet(self, vpc_id, cidr_block, availability_zone=None):
@@ -645,12 +629,12 @@ class VPCConnection(EC2Connection):
consists of a filter key and a filter value.
Possible filter keys are:
- - *state*, the state of the VPN_CONNECTION
+ - *state*, a list of states of the VPN_CONNECTION
pending,available,deleting,deleted
- - *type*, the type of connection, currently 'ipsec.1'
- - *customerGatewayId*, the ID of the customer gateway
+ - *type*, a list of types of connection, currently 'ipsec.1'
+ - *customerGatewayId*, a list of IDs of the customer gateway
associated with the VPN
- - *vpnGatewayId*, the ID of the VPN gateway associated
+ - *vpnGatewayId*, a list of IDs of the VPN gateway associated
with the VPN connection
:rtype: list
@@ -660,11 +644,7 @@ class VPCConnection(EC2Connection):
if vpn_connection_ids:
self.build_list_params(params, vpn_connection_ids, 'Vpn_ConnectionId')
if filters:
- i = 1
- for filter in filters:
- params[('Filter.%d.Name' % i)] = filter[0]
- params[('Filter.%d.Value.1')] = filter[1]
- i += 1
+ self.build_filter_params(params, dict(filters))
return self.get_list('DescribeVpnConnections', params, [('item', VpnConnection)])
def create_vpn_connection(self, type, customer_gateway_id, vpn_gateway_id):
@@ -701,5 +681,3 @@ class VPCConnection(EC2Connection):
"""
params = {'VpnConnectionId': vpn_connection_id}
return self.get_status('DeleteVpnConnection', params)
-
-
diff --git a/docs/source/cloudsearch_tut.rst b/docs/source/cloudsearch_tut.rst
new file mode 100644
index 00000000..4bf12d13
--- /dev/null
+++ b/docs/source/cloudsearch_tut.rst
@@ -0,0 +1,264 @@
+.. cloudsearch_tut:
+
+===============================================
+An Introduction to boto's Cloudsearch interface
+===============================================
+
+This tutorial focuses on the boto interface to AWS' Cloudsearch_. This tutorial
+assumes that you have boto already downloaded and installed.
+
+.. _Cloudsearch: http://aws.amazon.com/cloudsearch/
+
+Creating a Domain
+-----------------
+
+ import boto
+
+ our_ip = '192.168.1.0'
+
+ conn = boto.connect_cloudsearch()
+ domain = conn.create_domain('demo')
+
+ # Allow our IP address to access the document and search services
+ policy = domain.get_access_policies()
+ policy.allow_search_ip(our_ip)
+ policy.allow_doc_ip(our_ip)
+
+ # Create an 'text' index field called 'username'
+ uname_field = domain.create_index_field('username', 'text')
+
+ # But it would be neat to drill down into different countries
+ loc_field = domain.create_index_field('location', 'text', facet=True)
+
+ # Epoch time of when the user last did something
+ time_field = domain.create_index_field('last_activity', 'uint', default=0)
+
+ follower_field = domain.create_index_field('follower_count', 'uint', default=0)
+
+ domain.create_rank_expression('recently_active', 'last_activity') # We'll want to be able to just show the most recently active users
+
+ domain.create_rank_expression('activish', 'text_relevance + ((follower_count/(time() - last_activity))*1000)') # Let's get trickier and combine text relevance with a really dynamic expression
+
+Viewing and Adjusting Stemming for a Domain
+--------------------------------------------
+
+A stemming dictionary maps related words to a common stem. A stem is
+typically the root or base word from which variants are derived. For
+example, run is the stem of running and ran. During indexing, Amazon
+CloudSearch uses the stemming dictionary when it performs
+text-processing on text fields. At search time, the stemming
+dictionary is used to perform text-processing on the search
+request. This enables matching on variants of a word. For example, if
+you map the term running to the stem run and then search for running,
+the request matches documents that contain run as well as running.
+
+To get the current stemming dictionary defined for a domain, use the
+``get_stemming`` method of the Domain object.
+
+ >>> stems = domain.get_stemming()
+ >>> stems
+ {u'stems': {}}
+ >>>
+
+This returns a dictionary object that can be manipulated directly to
+add additional stems for your search domain by adding pairs of term:stem
+to the stems dictionary.
+
+ >>> stems['stems']['running'] = 'run'
+ >>> stems['stems']['ran'] = 'run'
+ >>> stems
+ {u'stems': {u'ran': u'run', u'running': u'run'}}
+ >>>
+
+This has changed the value locally. To update the information in
+Amazon CloudSearch, you need to save the data.
+
+ >>> stems.save()
+
+You can also access certain CloudSearch-specific attributes related to
+the stemming dictionary defined for your domain.
+
+ >>> stems.status
+ u'RequiresIndexDocuments'
+ >>> stems.creation_date
+ u'2012-05-01T12:12:32Z'
+ >>> stems.update_date
+ u'2012-05-01T12:12:32Z'
+ >>> stems.update_version
+ 19
+ >>>
+
+The status indicates that, because you have changed the stems associated
+with the domain, you will need to re-index the documents in the domain
+before the new stems are used.
+
+Viewing and Adjusting Stopwords for a Domain
+--------------------------------------------
+
+Stopwords are words that should typically be ignored both during
+indexing and at search time because they are either insignificant or
+so common that including them would result in a massive number of
+matches.
+
+To view the stopwords currently defined for your domain, use the
+``get_stopwords`` method of the Domain object.
+
+ >>> stopwords = domain.get_stopwords()
+ >>> stopwords
+ {u'stopwords': [u'a',
+ u'an',
+ u'and',
+ u'are',
+ u'as',
+ u'at',
+ u'be',
+ u'but',
+ u'by',
+ u'for',
+ u'in',
+ u'is',
+ u'it',
+ u'of',
+ u'on',
+ u'or',
+ u'the',
+ u'to',
+ u'was']}
+ >>>
+
+You can add additional stopwords by simply appending the values to the
+list.
+
+ >>> stopwords['stopwords'].append('foo')
+ >>> stopwords['stopwords'].append('bar')
+ >>> stopwords
+
+Similarly, you could remove currently defined stopwords from the list.
+To save the changes, use the ``save`` method.
+
+ >>> stopwords.save()
+
+The stopwords object has similar attributes defined above for stemming
+that provide additional information about the stopwords in your domain.
+
+
+Viewing and Adjusting Stopwords for a Domain
+--------------------------------------------
+
+You can configure synonyms for terms that appear in the data you are
+searching. That way, if a user searches for the synonym rather than
+the indexed term, the results will include documents that contain the
+indexed term.
+
+If you want two terms to match the same documents, you must define
+them as synonyms of each other. For example:
+
+ cat, feline
+ feline, cat
+
+To view the synonyms currently defined for your domain, use the
+``get_synonyms`` method of the Domain object.
+
+ >>> synonyms = domain.get_synsonyms()
+ >>> synonyms
+ {u'synonyms': {}}
+ >>>
+
+You can define new synonyms by adding new term:synonyms entries to the
+synonyms dictionary object.
+
+ >>> synonyms['synonyms']['cat'] = ['feline', 'kitten']
+ >>> synonyms['synonyms']['dog'] = ['canine', 'puppy']
+
+To save the changes, use the ``save`` method.
+
+ >>> synonyms.save()
+
+The synonyms object has similar attributes defined above for stemming
+that provide additional information about the stopwords in your domain.
+
+Adding Documents to the Index
+-----------------------------
+
+Now, we can add some documents to our new search domain.
+
+ doc_service = domain.get_document_service()
+
+ # Presumably get some users from your db of choice.
+ users = [
+ {
+ 'id': 1,
+ 'username': 'dan',
+ 'last_activity': 1334252740,
+ 'follower_count': 20,
+ 'location': 'USA'
+ },
+ {
+ 'id': 2,
+ 'username': 'dankosaur',
+ 'last_activity': 1334252904,
+ 'follower_count': 1,
+ 'location': 'UK'
+ },
+ {
+ 'id': 3,
+ 'username': 'danielle',
+ 'last_activity': 1334252969,
+ 'follower_count': 100,
+ 'location': 'DE'
+ },
+ {
+ 'id': 4,
+ 'username': 'daniella',
+ 'last_activity': 1334253279,
+ 'follower_count': 7,
+ 'location': 'USA'
+ }
+ ]
+
+ for user in users:
+ doc_service.add(user['id'], user['last_activity'], user)
+
+ result = doc_service.commit() # Actually post the SDF to the document service
+
+The result is an instance of `cloudsearch.CommitResponse` which will
+makes the plain dictionary response a nice object (ie result.adds,
+result.deletes) and raise an exception for us if all of our documents
+weren't actually committed.
+
+
+Searching Documents
+-------------------
+
+Now, let's try performing a search.
+
+ # Get an instance of cloudsearch.SearchServiceConnection
+ search_service = domain.get_search_service()
+
+ # Horray wildcard search
+ query = "username:'dan*'"
+
+
+ results = search_service.search(bq=query, rank=['-recently_active'], start=0, size=10)
+
+ # Results will give us back a nice cloudsearch.SearchResults object that looks as
+ # close as possible to pysolr.Results
+
+ print "Got %s results back." % results.hits
+ print "User ids are:"
+ for result in results:
+ print result['id']
+
+
+Deleting Documents
+------------------
+
+ import time
+ from datetime import datetime
+
+ doc_service = domain.get_document_service()
+
+ # Again we'll cheat and use the current epoch time as our version number
+
+ doc_service.delete(4, int(time.mktime(datetime.utcnow().timetuple())))
+ service.commit()
diff --git a/docs/source/ref/cloudformation.rst b/docs/source/ref/cloudformation.rst
index 3ec10c77..3e0ab41f 100644
--- a/docs/source/ref/cloudformation.rst
+++ b/docs/source/ref/cloudformation.rst
@@ -11,6 +11,13 @@ boto.cloudformation
:members:
:undoc-members:
+boto.cloudformation.connection
+------------------------------
+
+.. automodule:: boto.cloudformation.connection
+ :members:
+ :undoc-members:
+
boto.cloudformation.stack
-------------------------
diff --git a/docs/source/ref/cloudsearch.rst b/docs/source/ref/cloudsearch.rst
new file mode 100644
index 00000000..4cd2baaa
--- /dev/null
+++ b/docs/source/ref/cloudsearch.rst
@@ -0,0 +1,59 @@
+.. ref-cloudsearch
+
+===========
+Cloudsearch
+===========
+
+boto.cloudsearch
+--------
+
+.. automodule:: boto.swf
+ :members:
+ :undoc-members:
+
+boto.cloudsearch.domain
+--------------------
+
+.. automodule:: boto.cloudsearch.domain
+ :members:
+ :undoc-members:
+
+boto.cloudsearch.layer1
+--------------------
+
+.. automodule:: boto.cloudsearch.layer1
+ :members:
+ :undoc-members:
+
+boto.cloudsearch.layer2
+--------------------
+
+.. automodule:: boto.cloudsearch.layer2
+ :members:
+ :undoc-members:
+
+boto.cloudsearch.optionstatus
+--------------------
+
+.. automodule:: boto.cloudsearch.optionstatus
+ :members:
+ :undoc-members:
+
+boto.cloudsearch.search
+--------------------
+
+.. automodule:: boto.cloudsearch.search
+ :members:
+ :undoc-members:
+
+boto.cloudsearch.document
+--------------------
+
+.. automodule:: boto.cloudsearch.document
+ :members:
+ :undoc-members:
+
+
+
+
+
diff --git a/docs/source/ref/index.rst b/docs/source/ref/index.rst
index 70aa0a6d..4f36adf6 100644
--- a/docs/source/ref/index.rst
+++ b/docs/source/ref/index.rst
@@ -10,6 +10,7 @@ API Reference
boto
cloudformation
cloudfront
+ cloudsearch
contrib
dynamodb
ec2
diff --git a/docs/source/ref/s3.rst b/docs/source/ref/s3.rst
index 86b411a4..8520aa96 100644
--- a/docs/source/ref/s3.rst
+++ b/docs/source/ref/s3.rst
@@ -74,3 +74,10 @@ boto.s3.deletemarker
:members:
:undoc-members:
+boto.s3.lifecycle
+--------------------
+
+.. automodule:: boto.s3.lifecycle
+ :members:
+ :undoc-members:
+
diff --git a/setup.py b/setup.py
index a65a7479..cfa2a953 100644
--- a/setup.py
+++ b/setup.py
@@ -42,7 +42,7 @@ if sys.version_info <= (2, 4):
setup(name = "boto",
version = __version__,
description = "Amazon Web Services Library",
- long_description = "Python interface to Amazon's Web Services.",
+ long_description = open("README.rst").read(),
author = "Mitch Garnaat",
author_email = "mitch@garnaat.com",
scripts = ["bin/sdbadmin", "bin/elbadmin", "bin/cfadmin",
@@ -64,7 +64,7 @@ setup(name = "boto",
"boto.fps", "boto.emr", "boto.emr", "boto.sns",
"boto.ecs", "boto.iam", "boto.route53", "boto.ses",
"boto.cloudformation", "boto.sts", "boto.dynamodb",
- "boto.swf"],
+ "boto.swf", "boto.mws"],
package_data = {"boto.cacerts": ["cacerts.txt"]},
license = "MIT",
platforms = "Posix; MacOS X; Windows",
diff --git a/tests/dynamodb/test_layer1.py b/tests/dynamodb/test_layer1.py
index 5964118d..b7227fc9 100644
--- a/tests/dynamodb/test_layer1.py
+++ b/tests/dynamodb/test_layer1.py
@@ -27,6 +27,8 @@ Tests for Layer1 of DynamoDB
import unittest
import time
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError
+from boto.dynamodb.exceptions import DynamoDBConditionalCheckFailedError
+from boto.dynamodb.exceptions import DynamoDBValidationError
from boto.dynamodb.layer1 import Layer1
from boto.sts.credentials import Credentials
@@ -134,10 +136,9 @@ class DynamoDBLayer1Test (unittest.TestCase):
# Try to delete the item with the wrong Expected value
expected = {'Views': {'Value': {'N': '1'}}}
- try:
- result = c.delete_item(table_name, key=key1, expected=expected)
- except c.ResponseError, e:
- assert e.error_code == 'ConditionalCheckFailedException'
+ self.assertRaises(DynamoDBConditionalCheckFailedError,
+ c.delete_item, table_name, key=key1,
+ expected=expected)
# Now update the existing object
attribute_updates = {'Views': {'Value': {'N': '5'},
@@ -147,6 +148,17 @@ class DynamoDBLayer1Test (unittest.TestCase):
result = c.update_item(table_name, key=key1,
attribute_updates=attribute_updates)
+ # Try and update an item, in a fashion which makes it too large.
+ # The new message text is the item size limit minus 32 bytes and
+ # the current object is larger than 32 bytes.
+ item_size_overflow_text = 'Text to be padded'.zfill(64*1024-32)
+ attribute_updates = {'Message': {'Value': {'S': item_size_overflow_text},
+ 'Action': 'PUT'}}
+ self.assertRaises(DynamoDBValidationError,
+ c.update_item, table_name, key=key1,
+ attribute_updates=attribute_updates)
+
+
# Put a few more items into the table
item2_key = 'Amazon DynamoDB'
item2_range = 'DynamoDB Thread 2'
diff --git a/tests/dynamodb/test_layer2.py b/tests/dynamodb/test_layer2.py
index 63708a1a..c60b497b 100644
--- a/tests/dynamodb/test_layer2.py
+++ b/tests/dynamodb/test_layer2.py
@@ -28,6 +28,7 @@ import unittest
import time
import uuid
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBItemError
+from boto.dynamodb.exceptions import DynamoDBConditionalCheckFailedError
from boto.dynamodb.layer2 import Layer2
from boto.dynamodb.types import get_dynamodb_type
from boto.dynamodb.condition import *
@@ -102,13 +103,6 @@ class DynamoDBLayer2Test (unittest.TestCase):
'LastPostDateTime': '12/9/2011 11:36:03 PM'}
# Test a few corner cases with new_item
- # First, try not supplying a hash_key
- self.assertRaises(DynamoDBItemError,
- table.new_item, None, item1_range, item1_attrs)
-
- # Try supplying a hash but no range
- self.assertRaises(DynamoDBItemError,
- table.new_item, item1_key, None, item1_attrs)
# Try supplying a hash_key as an arg and as an item in attrs
item1_attrs[hash_key_name] = 'foo'
@@ -166,12 +160,8 @@ class DynamoDBLayer2Test (unittest.TestCase):
# Try to delete the item with the wrong Expected value
expected = {'Views': 1}
- try:
- item1.delete(expected_value=expected)
- except c.layer1.ResponseError, e:
- assert e.error_code == 'ConditionalCheckFailedException'
- else:
- raise Exception("Expected Value condition failed")
+ self.assertRaises(DynamoDBConditionalCheckFailedError,
+ item1.delete, expected_value=expected)
# Try to delete a value while expecting a non-existant attribute
expected = {'FooBar': True}
@@ -248,6 +238,7 @@ class DynamoDBLayer2Test (unittest.TestCase):
for item in items:
n += 1
assert n == 2
+ assert items.consumed_units > 0
items = table.query('Amazon DynamoDB', BEGINS_WITH('DynamoDB'),
request_limit=1, max_results=1)
@@ -255,6 +246,7 @@ class DynamoDBLayer2Test (unittest.TestCase):
for item in items:
n += 1
assert n == 1
+ assert items.consumed_units > 0
# Try a few scans
items = table.scan()
@@ -262,12 +254,14 @@ class DynamoDBLayer2Test (unittest.TestCase):
for item in items:
n += 1
assert n == 3
+ assert items.consumed_units > 0
items = table.scan({'Replies': GT(0)})
n = 0
for item in items:
n += 1
assert n == 1
+ assert items.consumed_units > 0
# Test some integer and float attributes
integer_value = 42
@@ -314,6 +308,42 @@ class DynamoDBLayer2Test (unittest.TestCase):
response = batch_list.submit()
assert len(response['Responses'][table.name]['Items']) == 2
+ # Try a few batch write operations
+ item4_key = 'Amazon S3'
+ item4_range = 'S3 Thread 2'
+ item4_attrs = {
+ 'Message': 'S3 Thread 2 message text',
+ 'LastPostedBy': 'User A',
+ 'Views': 0,
+ 'Replies': 0,
+ 'Answered': 0,
+ 'Tags': set(['largeobject', 'multipart upload']),
+ 'LastPostDateTime': '12/9/2011 11:36:03 PM'
+ }
+ item5_key = 'Amazon S3'
+ item5_range = 'S3 Thread 3'
+ item5_attrs = {
+ 'Message': 'S3 Thread 3 message text',
+ 'LastPostedBy': 'User A',
+ 'Views': 0,
+ 'Replies': 0,
+ 'Answered': 0,
+ 'Tags': set(['largeobject', 'multipart upload']),
+ 'LastPostDateTime': '12/9/2011 11:36:03 PM'
+ }
+ item4 = table.new_item(item4_key, item4_range, item4_attrs)
+ item5 = table.new_item(item5_key, item5_range, item5_attrs)
+ batch_list = c.new_batch_write_list()
+ batch_list.add_batch(table, puts=[item4, item5])
+ response = batch_list.submit()
+ # should really check for unprocessed items
+
+ batch_list = c.new_batch_write_list()
+ batch_list.add_batch(table, deletes=[(item4_key, item4_range),
+ (item5_key, item5_range)])
+ response = batch_list.submit()
+
+
# Try queries
results = table.query('Amazon DynamoDB', BEGINS_WITH('DynamoDB'))
n = 0
diff --git a/tests/ec2/cloudwatch/test_connection.py b/tests/ec2/cloudwatch/test_connection.py
index 0479d650..c6883da1 100644
--- a/tests/ec2/cloudwatch/test_connection.py
+++ b/tests/ec2/cloudwatch/test_connection.py
@@ -120,8 +120,8 @@ class CloudWatchConnectionTest(unittest.TestCase):
expected_params = {
'MetricData.member.1.MetricName': 'N',
'MetricData.member.1.Value': 1,
- 'MetricData.member.1.Dimensions.member.1.Name.1': 'D',
- 'MetricData.member.1.Dimensions.member.1.Value.1': 'V',
+ 'MetricData.member.1.Dimensions.member.1.Name': 'D',
+ 'MetricData.member.1.Dimensions.member.1.Value': 'V',
}
self.assertEqual(params, expected_params)
@@ -132,12 +132,12 @@ class CloudWatchConnectionTest(unittest.TestCase):
expected_params = {
'MetricData.member.1.MetricName': 'N',
'MetricData.member.1.Value': 1,
- 'MetricData.member.1.Dimensions.member.1.Name.1': 'D',
- 'MetricData.member.1.Dimensions.member.1.Value.1': 'V',
+ 'MetricData.member.1.Dimensions.member.1.Name': 'D',
+ 'MetricData.member.1.Dimensions.member.1.Value': 'V',
'MetricData.member.2.MetricName': 'M',
'MetricData.member.2.Value': 2,
- 'MetricData.member.2.Dimensions.member.1.Name.1': 'D',
- 'MetricData.member.2.Dimensions.member.1.Value.1': 'V',
+ 'MetricData.member.2.Dimensions.member.1.Name': 'D',
+ 'MetricData.member.2.Dimensions.member.1.Value': 'V',
}
self.assertEqual(params, expected_params)
@@ -148,12 +148,12 @@ class CloudWatchConnectionTest(unittest.TestCase):
expected_params = {
'MetricData.member.1.MetricName': 'N',
'MetricData.member.1.Value': 1,
- 'MetricData.member.1.Dimensions.member.1.Name.1': 'D',
- 'MetricData.member.1.Dimensions.member.1.Value.1': 'V',
+ 'MetricData.member.1.Dimensions.member.1.Name': 'D',
+ 'MetricData.member.1.Dimensions.member.1.Value': 'V',
'MetricData.member.2.MetricName': 'N',
'MetricData.member.2.Value': 2,
- 'MetricData.member.2.Dimensions.member.1.Name.1': 'D',
- 'MetricData.member.2.Dimensions.member.1.Value.1': 'W',
+ 'MetricData.member.2.Dimensions.member.1.Name': 'D',
+ 'MetricData.member.2.Dimensions.member.1.Value': 'W',
}
self.assertEqual(params, expected_params)
@@ -170,14 +170,14 @@ class CloudWatchConnectionTest(unittest.TestCase):
expected_params = {
'MetricData.member.1.MetricName': 'N',
'MetricData.member.1.Value': 1,
- 'MetricData.member.1.Dimensions.member.1.Name.1': 'D1',
- 'MetricData.member.1.Dimensions.member.1.Value.1': 'V',
- 'MetricData.member.1.Dimensions.member.2.Name.1': 'D2',
- 'MetricData.member.1.Dimensions.member.2.Value.1': 'W',
+ 'MetricData.member.1.Dimensions.member.1.Name': 'D1',
+ 'MetricData.member.1.Dimensions.member.1.Value': 'V',
+ 'MetricData.member.1.Dimensions.member.2.Name': 'D2',
+ 'MetricData.member.1.Dimensions.member.2.Value': 'W',
}
self.assertEqual(params, expected_params)
- def test_build_get_params_multiple_parameter_dimension(self):
+ def test_build_get_params_multiple_parameter_dimension1(self):
from collections import OrderedDict
self.maxDiff = None
c = CloudWatchConnection()
@@ -185,10 +185,28 @@ class CloudWatchConnectionTest(unittest.TestCase):
dimensions = OrderedDict((("D1", "V"), ("D2", "W")))
c.build_dimension_param(dimensions, params)
expected_params = {
- 'Dimensions.member.1.Name.1': 'D1',
- 'Dimensions.member.1.Value.1': 'V',
- 'Dimensions.member.2.Name.1': 'D2',
- 'Dimensions.member.2.Value.1': 'W',
+ 'Dimensions.member.1.Name': 'D1',
+ 'Dimensions.member.1.Value': 'V',
+ 'Dimensions.member.2.Name': 'D2',
+ 'Dimensions.member.2.Value': 'W',
+ }
+ self.assertEqual(params, expected_params)
+
+ def test_build_get_params_multiple_parameter_dimension2(self):
+ from collections import OrderedDict
+ self.maxDiff = None
+ c = CloudWatchConnection()
+ params = {}
+ dimensions = OrderedDict((("D1", ["V1", "V2"]), ("D2", "W"), ("D3", None)))
+ c.build_dimension_param(dimensions, params)
+ expected_params = {
+ 'Dimensions.member.1.Name': 'D1',
+ 'Dimensions.member.1.Value': 'V1',
+ 'Dimensions.member.2.Name': 'D1',
+ 'Dimensions.member.2.Value': 'V2',
+ 'Dimensions.member.3.Name': 'D2',
+ 'Dimensions.member.3.Value': 'W',
+ 'Dimensions.member.4.Name': 'D3',
}
self.assertEqual(params, expected_params)
diff --git a/tests/fps/test.py b/tests/fps/test.py
new file mode 100755
index 00000000..4eba9070
--- /dev/null
+++ b/tests/fps/test.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+from unittest import main, skip, skipUnless, TestCase
+import uuid
+import sys
+import os
+import os.path
+
+simple = True
+advanced = False
+if __name__ == "__main__":
+ devpath = os.path.relpath(os.path.join('..', '..'),
+ start=os.path.dirname(__file__))
+ sys.path = [devpath] + sys.path
+ print '>>> advanced FPS tests; using local boto sources'
+ advanced = True
+
+from boto.fps.connection import FPSConnection
+from boto.fps.response import ComplexAmount
+
+
+class FPSTestCase(TestCase):
+
+ def __init__(self, *args, **kw):
+ TestCase.__init__(self, *args, **kw)
+ self.fps = FPSConnection(host='fps.sandbox.amazonaws.com')
+ if advanced:
+ self.activity = self.fps.get_account_activity(\
+ StartDate='2012-01-01')
+ result = self.activity.GetAccountActivityResult
+ self.transactions = result.Transaction
+
+ @skipUnless(simple, "skipping simple test")
+ def test_get_account_balance(self):
+ response = self.fps.get_account_balance()
+ self.assertTrue(hasattr(response, 'GetAccountBalanceResult'))
+ self.assertTrue(hasattr(response.GetAccountBalanceResult,
+ 'AccountBalance'))
+ accountbalance = response.GetAccountBalanceResult.AccountBalance
+ self.assertTrue(hasattr(accountbalance, 'TotalBalance'))
+ self.assertIsInstance(accountbalance.TotalBalance, ComplexAmount)
+ self.assertTrue(hasattr(accountbalance, 'AvailableBalances'))
+ availablebalances = accountbalance.AvailableBalances
+ self.assertTrue(hasattr(availablebalances, 'RefundBalance'))
+
+ @skipUnless(simple, "skipping simple test")
+ def test_complex_amount(self):
+ response = self.fps.get_account_balance()
+ accountbalance = response.GetAccountBalanceResult.AccountBalance
+ asfloat = float(accountbalance.TotalBalance.Value)
+ self.assertIn('.', str(asfloat))
+
+ @skipUnless(simple, "skipping simple test")
+ def test_required_arguments(self):
+ with self.assertRaises(KeyError):
+ self.fps.write_off_debt(AdjustmentAmount=123.45)
+
+ @skipUnless(simple, "skipping simple test")
+ def test_cbui_url(self):
+ inputs = {
+ 'transactionAmount': 123.45,
+ 'pipelineName': 'SingleUse',
+ 'returnURL': 'https://localhost/',
+ 'paymentReason': 'a reason for payment',
+ 'callerReference': 'foo',
+ }
+ result = self.fps.cbui_url(**inputs)
+ print "cbui_url() yields {}".format(result)
+
+ @skipUnless(simple, "skipping simple test")
+ def test_get_account_activity(self):
+ response = self.fps.get_account_activity(StartDate='2012-01-01')
+ self.assertTrue(hasattr(response, 'GetAccountActivityResult'))
+ result = response.GetAccountActivityResult
+ self.assertTrue(hasattr(result, 'BatchSize'))
+ try:
+ int(result.BatchSize)
+ except:
+ self.assertTrue(False)
+
+ @skipUnless(advanced, "skipping advanced test")
+ def test_get_transaction(self):
+ assert len(self.transactions)
+ transactionid = self.transactions[0].TransactionId
+ result = self.fps.get_transaction(TransactionId=transactionid)
+ self.assertTrue(hasattr(result.GetTransactionResult, 'Transaction'))
+
+ @skip('cosmetic')
+ def test_bad_request(self):
+ try:
+ self.fps.write_off_debt(CreditInstrumentId='foo',
+ AdjustmentAmount=123.45)
+ except Exception, e:
+ print e
+
+ @skip('cosmetic')
+ def test_repr(self):
+ print self.fps.get_account_balance()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tests/fps/test_install_caller_instruction.py b/tests/fps/test_install_caller_instruction.py
deleted file mode 100644
index 80959148..00000000
--- a/tests/fps/test_install_caller_instruction.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from boto.fps.connection import FPSConnection
-conn = FPSConnection()
-conn.install_caller_instruction()
-conn.install_recipient_instruction()
diff --git a/tests/mws/__init__.py b/tests/mws/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/mws/__init__.py
diff --git a/tests/mws/test.py b/tests/mws/test.py
new file mode 100755
index 00000000..e1448fe4
--- /dev/null
+++ b/tests/mws/test.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python
+from unittest import main, skip, skipUnless, TestCase
+import sys
+import os
+import os.path
+
+
+simple = os.environ.get('MWS_MERCHANT', None)
+if not simple:
+ print """
+ Please set the MWS_MERCHANT environmental variable
+ to your Merchant or SellerId to enable MWS tests.
+ """
+
+
+advanced = False
+isolator = True
+if __name__ == "__main__":
+ devpath = os.path.relpath(os.path.join('..', '..'),
+ start=os.path.dirname(__file__))
+ sys.path = [devpath] + sys.path
+ advanced = simple and True or False
+ if advanced:
+ print '>>> advanced MWS tests; using local boto sources'
+
+from boto.mws.connection import MWSConnection
+
+
+class MWSTestCase(TestCase):
+
+ def __init__(self, *args, **kw):
+ TestCase.__init__(self, *args, **kw)
+ self.mws = MWSConnection(Merchant=simple, debug=0)
+
+ @skipUnless(simple and isolator, "skipping simple test")
+ def test_feedlist(self):
+ self.mws.get_feed_submission_list()
+
+ @skipUnless(simple and isolator, "skipping simple test")
+ def test_inbound_status(self):
+ response = self.mws.get_inbound_service_status()
+ status = response.GetServiceStatusResult.Status
+ self.assertIn(status, ('GREEN', 'GREEN_I', 'YELLOW', 'RED'))
+
+ @property
+ def marketplace(self):
+ response = self.mws.list_marketplace_participations()
+ result = response.ListMarketplaceParticipationsResult
+ return result.ListMarketplaces.Marketplace[0]
+
+ @property
+ def marketplace_id(self):
+ return self.marketplace.MarketplaceId
+
+ @skipUnless(simple and isolator, "skipping simple test")
+ def test_marketplace_participations(self):
+ response = self.mws.list_marketplace_participations()
+ result = response.ListMarketplaceParticipationsResult
+ self.assertTrue(result.ListMarketplaces.Marketplace[0].MarketplaceId)
+
+ @skipUnless(simple and isolator, "skipping simple test")
+ def test_get_product_categories_for_asin(self):
+ asin = '144930544X'
+ response = self.mws.get_product_categories_for_asin(\
+ MarketplaceId=self.marketplace_id,
+ ASIN=asin)
+ result = response._result
+ self.assertTrue(int(result.Self.ProductCategoryId) == 21)
+
+ @skipUnless(simple and isolator, "skipping simple test")
+ def test_list_matching_products(self):
+ response = self.mws.list_matching_products(\
+ MarketplaceId=self.marketplace_id,
+ Query='boto')
+ products = response._result.Products
+ self.assertTrue(len(products))
+
+ @skipUnless(simple and isolator, "skipping simple test")
+ def test_get_matching_product(self):
+ asin = 'B001UDRNHO'
+ response = self.mws.get_matching_product(\
+ MarketplaceId=self.marketplace_id,
+ ASINList=[asin,])
+ product = response._result[0].Product
+
+
+ @skipUnless(simple and isolator, "skipping simple test")
+ def test_get_lowest_offer_listings_for_asin(self):
+ asin = '144930544X'
+ response = self.mws.get_lowest_offer_listings_for_asin(\
+ MarketplaceId=self.marketplace_id,
+ ItemCondition='New',
+ ASINList=[asin,])
+ product = response._result[0].Product
+ self.assertTrue(product.LowestOfferListings)
+
+if __name__ == "__main__":
+ main()
diff --git a/tests/s3/test_connection.py b/tests/s3/test_connection.py
index e9d372e5..ce1884b0 100644
--- a/tests/s3/test_connection.py
+++ b/tests/s3/test_connection.py
@@ -28,6 +28,8 @@ import unittest
import time
import os
import urllib
+import urlparse
+import httplib
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3PermissionsError, S3ResponseError
@@ -70,9 +72,24 @@ class S3ConnectionTest (unittest.TestCase):
assert s1 == file.read(), 'invalid URL %s' % url
url = k.generate_url(3600, force_http=True, headers={'x-amz-x-token' : 'XYZ'})
file = urllib.urlopen(url)
+ assert s1 == file.read(), 'invalid URL %s' % url
rh = {'response-content-disposition': 'attachment; filename="foo.txt"'}
url = k.generate_url(60, response_headers=rh)
+ file = urllib.urlopen(url)
+ assert s1 == file.read(), 'invalid URL %s' % url
+ #test whether amperands and to-be-escaped characters work in header filename
+ rh = {'response-content-disposition': 'attachment; filename="foo&z%20ar&ar&zar&bar.txt"'}
+ url = k.generate_url(60, response_headers=rh, force_http=True)
+ file = urllib.urlopen(url)
assert s1 == file.read(), 'invalid URL %s' % url
+ # overwrite foobar contents with a PUT
+ url = k.generate_url(3600, 'PUT', force_http=True, policy='private', reduced_redundancy=True)
+ up = urlparse.urlsplit(url)
+ con = httplib.HTTPConnection(up.hostname, up.port)
+ con.request("PUT", up.path + '?' + up.query, body="hello there")
+ resp = con.getresponse()
+ assert 200 == resp.status
+ assert "hello there" == k.get_contents_as_string()
bucket.delete_key(k)
# test a few variations on get_all_keys - first load some data
# for the first one, let's override the content type
diff --git a/tests/s3/test_gsconnection.py b/tests/s3/test_gsconnection.py
index 0146c39e..cffb40b1 100644
--- a/tests/s3/test_gsconnection.py
+++ b/tests/s3/test_gsconnection.py
@@ -2,6 +2,7 @@
# Copyright (c) 2006-2011 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# Copyright (c) 2011, Nexenta Systems, Inc.
+# Copyright (c) 2012, Google, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
@@ -27,11 +28,15 @@
Some unit tests for the GSConnection
"""
+import boto
import unittest
import time
import os
import re
+import xml
from boto.gs.connection import GSConnection
+from boto.gs.cors import Cors
+from boto import handler
from boto import storage_uri
class GSConnectionTest (unittest.TestCase):
@@ -157,6 +162,18 @@ class GSConnectionTest (unittest.TestCase):
k.set_acl('private')
acl = k.get_acl()
assert len(acl.entries.entry_list) == 1
+ #
+ # Test case-insensitivity of XML ACL parsing.
+ acl_xml = (
+ '<ACCESSControlList><EntrIes><Entry>' +
+ '<Scope type="AllUsers"></Scope><Permission>READ</Permission>' +
+ '</Entry></EntrIes></ACCESSControlList>')
+ acl = boto.gs.acl.ACL()
+ h = handler.XmlHandler(acl, bucket)
+ xml.sax.parseString(acl_xml, h)
+ bucket.set_acl(acl)
+ assert len(acl.entries.entry_list) == 1
+ #
# try set/get raw logging subresource
empty_logging_str="<?xml version='1.0' encoding='UTF-8'?><Logging/>"
logging_str = (
@@ -214,6 +231,8 @@ class GSConnectionTest (unittest.TestCase):
# delete test buckets
c.delete_bucket(bucket1)
c.delete_bucket(bucket2)
+ # delete temp file
+ os.unlink('foobar')
def test_3_default_object_acls(self):
"""test default object acls"""
@@ -291,5 +310,50 @@ class GSConnectionTest (unittest.TestCase):
assert acl.to_xml() == '<AccessControlList></AccessControlList>'
# delete bucket
uri.delete_bucket()
+
+ def test_4_cors_xml(self):
+ """test setting and getting of CORS XML documents"""
+ # regexp for matching project-private default object ACL
+ cors_empty = '<CorsConfig></CorsConfig>'
+ cors_doc = ('<CorsConfig><Cors><Origins><Origin>origin1.example.com'
+ '</Origin><Origin>origin2.example.com</Origin></Origins>'
+ '<Methods><Method>GET</Method><Method>PUT</Method>'
+ '<Method>POST</Method></Methods><ResponseHeaders>'
+ '<ResponseHeader>foo</ResponseHeader>'
+ '<ResponseHeader>bar</ResponseHeader></ResponseHeaders>'
+ '</Cors></CorsConfig>')
+ c = GSConnection()
+ # create a new bucket
+ bucket_name = 'test-%d' % int(time.time())
+ bucket = c.create_bucket(bucket_name)
+ # now call get_bucket to see if it's really there
+ bucket = c.get_bucket(bucket_name)
+ # get new bucket cors and make sure it's empty
+ cors = re.sub(r'\s', '', bucket.get_cors().to_xml())
+ assert cors == cors_empty
+ # set cors document on new bucket
+ bucket.set_cors(cors_doc)
+ cors = re.sub(r'\s', '', bucket.get_cors().to_xml())
+ assert cors == cors_doc
+ # delete bucket
+ c.delete_bucket(bucket)
+
+ # repeat cors tests using boto's storage_uri interface
+ # create a new bucket
+ bucket_name = 'test-%d' % int(time.time())
+ uri = storage_uri('gs://' + bucket_name)
+ uri.create_bucket()
+ # get new bucket cors and make sure it's empty
+ cors = re.sub(r'\s', '', uri.get_cors().to_xml())
+ assert cors == cors_empty
+ # set cors document on new bucket
+ cors_obj = Cors()
+ h = handler.XmlHandler(cors_obj, None)
+ xml.sax.parseString(cors_doc, h)
+ uri.set_cors(cors_obj)
+ cors = re.sub(r'\s', '', uri.get_cors().to_xml())
+ assert cors == cors_doc
+ # delete bucket
+ uri.delete_bucket()
print '--- tests completed ---'
diff --git a/tests/s3/test_key.py b/tests/s3/test_key.py
index c961b317..2e823182 100644
--- a/tests/s3/test_key.py
+++ b/tests/s3/test_key.py
@@ -45,6 +45,36 @@ class S3KeyTest (unittest.TestCase):
key.delete()
self.bucket.delete()
+ def test_set_contents_from_file_dataloss(self):
+ # Create an empty stringio and write to it.
+ content = "abcde"
+ sfp = StringIO.StringIO()
+ sfp.write(content)
+ # Try set_contents_from_file() without rewinding sfp
+ k = self.bucket.new_key("k")
+ try:
+ k.set_contents_from_file(sfp)
+ self.fail("forgot to rewind so should fail.")
+ except AttributeError:
+ pass
+ # call with rewind and check if we wrote 5 bytes
+ k.set_contents_from_file(sfp, rewind=True)
+ self.assertEqual(k.size, 5)
+ # check actual contents by getting it.
+ kn = self.bucket.new_key("k")
+ ks = kn.get_contents_as_string()
+ self.assertEqual(ks, content)
+
+ # finally, try with a 0 length string
+ sfp = StringIO.StringIO()
+ k = self.bucket.new_key("k")
+ k.set_contents_from_file(sfp)
+ self.assertEqual(k.size, 0)
+ # check actual contents by getting it.
+ kn = self.bucket.new_key("k")
+ ks = kn.get_contents_as_string()
+ self.assertEqual(ks, "")
+
def test_set_contents_as_file(self):
content="01234567890123456789"
sfp = StringIO.StringIO(content)
diff --git a/tests/s3/test_multipart.py b/tests/s3/test_multipart.py
index 5c64ba73..8e93a6d8 100644
--- a/tests/s3/test_multipart.py
+++ b/tests/s3/test_multipart.py
@@ -91,6 +91,19 @@ class S3MultiPartUploadTest (unittest.TestCase):
# Abort using the one returned in the list
lmpu.cancel_upload()
+ def test_list_multipart_uploads(self):
+ key_name = u"ใƒ†ใ‚นใƒˆ"
+ mpus = []
+ mpus.append(self.bucket.initiate_multipart_upload(key_name))
+ mpus.append(self.bucket.initiate_multipart_upload(key_name))
+ rs = self.bucket.list_multipart_uploads()
+ # uploads (for a key) are returned in time initiated asc order
+ for lmpu in rs:
+ ompu = mpus.pop(0)
+ self.assertEqual(lmpu.key_name, ompu.key_name)
+ self.assertEqual(lmpu.id, ompu.id)
+ self.assertEqual(0, len(mpus))
+
def test_four_part_file(self):
key_name = "k"
contents = "01234567890123456789"
diff --git a/tests/s3/test_resumable_downloads.py b/tests/s3/test_resumable_downloads.py
index 38be659b..b813d1cb 100755
--- a/tests/s3/test_resumable_downloads.py
+++ b/tests/s3/test_resumable_downloads.py
@@ -72,116 +72,88 @@ class ResumableDownloadTests(unittest.TestCase):
except StorageResponseError, e:
pass
- @classmethod
- def setUp(cls):
- """
- Creates file-like object for detination of each download test.
-
- This method's namingCase is required by the unittest framework.
- """
- cls.dst_fp = open(cls.dst_file_name, 'w')
-
- @classmethod
- def tearDown(cls):
- """
- Deletes any objects or files created by last test run, and closes
- any keys in case they were read incompletely (which would leave
- partial buffers of data for subsequent tests to trip over).
-
- This method's namingCase is required by the unittest framework.
- """
- # Recursively delete dst dir and then re-create it, so in effect we
- # remove all dirs and files under that directory.
- shutil.rmtree(cls.tmp_dir)
- os.mkdir(cls.tmp_dir)
-
- # Close test objects.
- cls.resilient_close(cls.empty_src_key)
- cls.resilient_close(cls.small_src_key)
- cls.resilient_close(cls.larger_src_key)
-
- @classmethod
- def build_test_input_object(cls, obj_name, size, debug):
+ def build_test_input_object(self, obj_name, size):
buf = []
for i in range(size):
buf.append(str(random.randint(0, 9)))
string_data = ''.join(buf)
- uri = cls.src_bucket_uri.clone_replace_name(obj_name)
+ uri = self.src_bucket_uri.clone_replace_name(obj_name)
key = uri.new_key(validate=False)
key.set_contents_from_file(StringIO.StringIO(string_data))
- # Set debug on key's connection after creating data, so only the test
- # runs will show HTTP output (if called passed debug>0).
- key.bucket.connection.debug = debug
return (string_data, key)
- @classmethod
- def set_up_class(cls, debug):
+ def setUp(self):
"""
- Initializes test suite.
+ Initializes for each test.
"""
-
# Create the test bucket.
hostname = socket.gethostname().split('.')[0]
uri_base_str = 'gs://res-download-test-%s-%s-%s' % (
hostname, os.getpid(), int(time.time()))
- cls.src_bucket_uri = storage_uri('%s-dst' % uri_base_str)
- cls.src_bucket_uri.create_bucket()
+ self.src_bucket_uri = storage_uri('%s-dst' % uri_base_str)
+ self.src_bucket_uri.create_bucket()
# Create test source objects.
- cls.empty_src_key_size = 0
- (cls.empty_src_key_as_string, cls.empty_src_key) = (
- cls.build_test_input_object('empty', cls.empty_src_key_size,
- debug=debug))
- cls.small_src_key_size = 2 * 1024 # 2 KB.
- (cls.small_src_key_as_string, cls.small_src_key) = (
- cls.build_test_input_object('small', cls.small_src_key_size,
- debug=debug))
- cls.larger_src_key_size = 500 * 1024 # 500 KB.
- (cls.larger_src_key_as_string, cls.larger_src_key) = (
- cls.build_test_input_object('larger', cls.larger_src_key_size,
- debug=debug))
+ self.empty_src_key_size = 0
+ (self.empty_src_key_as_string, self.empty_src_key) = (
+ self.build_test_input_object('empty', self.empty_src_key_size))
+ self.small_src_key_size = 2 * 1024 # 2 KB.
+ (self.small_src_key_as_string, self.small_src_key) = (
+ self.build_test_input_object('small', self.small_src_key_size))
+ self.larger_src_key_size = 500 * 1024 # 500 KB.
+ (self.larger_src_key_as_string, self.larger_src_key) = (
+ self.build_test_input_object('larger', self.larger_src_key_size))
# Use a designated tmpdir prefix to make it easy to find the end of
# the tmp path.
- cls.tmpdir_prefix = 'tmp_resumable_download_test'
+ self.tmpdir_prefix = 'tmp_resumable_download_test'
# Create temp dir and name for download file.
- cls.tmp_dir = tempfile.mkdtemp(prefix=cls.tmpdir_prefix)
- cls.dst_file_name = '%s%sdst_file' % (cls.tmp_dir, os.sep)
+ self.tmp_dir = tempfile.mkdtemp(prefix=self.tmpdir_prefix)
+ self.dst_file_name = '%s%sdst_file' % (self.tmp_dir, os.sep)
- cls.tracker_file_name = '%s%stracker' % (cls.tmp_dir, os.sep)
+ self.tracker_file_name = '%s%stracker' % (self.tmp_dir, os.sep)
- cls.created_test_data = True
+ # Create file-like object for detination of each download test.
+ self.dst_fp = open(self.dst_file_name, 'w')
+ self.created_test_data = True
- @classmethod
- def tear_down_class(cls):
+ def tearDown(self):
"""
- Deletes test objects and bucket and tmp dir created by set_up_class.
+ Deletes test objects and bucket and tmp dir created by set_up_class,
+ and closes any keys in case they were read incompletely (which would
+ leave partial buffers of data for subsequent tests to trip over).
"""
- if not hasattr(cls, 'created_test_data'):
+ if not hasattr(self, 'created_test_data'):
return
- # Call cls.tearDown() in case the tests got interrupted, to ensure
- # dst objects get deleted.
- cls.tearDown()
+ # Recursively delete dst dir and then re-create it, so in effect we
+ # remove all dirs and files under that directory.
+ shutil.rmtree(self.tmp_dir)
+ os.mkdir(self.tmp_dir)
+
+ # Close test objects.
+ self.resilient_close(self.empty_src_key)
+ self.resilient_close(self.small_src_key)
+ self.resilient_close(self.larger_src_key)
# Delete test objects.
- cls.empty_src_key.delete()
- cls.small_src_key.delete()
- cls.larger_src_key.delete()
+ self.empty_src_key.delete()
+ self.small_src_key.delete()
+ self.larger_src_key.delete()
# Retry (for up to 2 minutes) the bucket gets deleted (it may not
# the first time round, due to eventual consistency of bucket delete
# operations).
for i in range(60):
try:
- cls.src_bucket_uri.delete_bucket()
+ self.src_bucket_uri.delete_bucket()
break
except StorageResponseError:
print 'Test bucket (%s) not yet deleted, still trying' % (
- cls.src_bucket_uri.uri)
+ self.src_bucket_uri.uri)
time.sleep(2)
- shutil.rmtree(cls.tmp_dir)
- cls.tmp_dir = tempfile.mkdtemp(prefix=cls.tmpdir_prefix)
+ shutil.rmtree(self.tmp_dir)
+ self.tmp_dir = tempfile.mkdtemp(prefix=self.tmpdir_prefix)
def test_non_resumable_download(self):
"""
@@ -440,36 +412,3 @@ class ResumableDownloadTests(unittest.TestCase):
finally:
# Restore original protection of dir where tracker_file lives.
os.chmod(self.tmp_dir, save_mod)
-
-if __name__ == '__main__':
- if sys.version_info[:3] < (2, 5, 1):
- sys.exit('These tests must be run on at least Python 2.5.1\n')
-
- # Use -d to see more HTTP protocol detail during tests. Note that
- # unlike the upload test case, you won't see much for the downloads
- # because there's no HTTP server state protocol for in the download case
- # (and the actual Range GET HTTP protocol detail is suppressed by the
- # normal boto.s3.Key.get_file() processing).
- debug = 0
- opts, args = getopt.getopt(sys.argv[1:], 'd', ['debug'])
- for o, a in opts:
- if o in ('-d', '--debug'):
- debug = 2
-
- test_loader = unittest.TestLoader()
- test_loader.testMethodPrefix = 'test_'
- suite = test_loader.loadTestsFromTestCase(ResumableDownloadTests)
- # Seems like there should be a cleaner way to find the test_class.
- test_class = suite.__getattribute__('_tests')[0]
- # We call set_up_class() and tear_down_class() ourselves because we
- # don't assume the user has Python 2.7 (which supports classmethods
- # that do it, with camelCase versions of these names).
- try:
- print 'Setting up %s...' % test_class.get_suite_description()
- test_class.set_up_class(debug)
- print 'Running %s...' % test_class.get_suite_description()
- unittest.TextTestRunner(verbosity=2).run(suite)
- finally:
- print 'Cleaning up after %s...' % test_class.get_suite_description()
- test_class.tear_down_class()
- print ''
diff --git a/tests/s3/test_resumable_uploads.py b/tests/s3/test_resumable_uploads.py
index 8a4a51f3..714dda32 100755
--- a/tests/s3/test_resumable_uploads.py
+++ b/tests/s3/test_resumable_uploads.py
@@ -65,32 +65,7 @@ class ResumableUploadTests(unittest.TestCase):
def get_suite_description(self):
return 'Resumable upload test suite'
- def setUp(self):
- """
- Creates dst_key needed by all tests.
-
- This method's namingCase is required by the unittest framework.
- """
- self.dst_key = self.dst_key_uri.new_key(validate=False)
-
- def tearDown(self):
- """
- Deletes any objects or files created by last test run.
-
- This method's namingCase is required by the unittest framework.
- """
- try:
- self.dst_key_uri.delete_key()
- except GSResponseError:
- # Ignore possible not-found error.
- pass
- # Recursively delete dst dir and then re-create it, so in effect we
- # remove all dirs and files under that directory.
- shutil.rmtree(self.tmp_dir)
- os.mkdir(self.tmp_dir)
-
- @staticmethod
- def build_test_input_file(size):
+ def build_test_input_file(self, size):
buf = []
# I manually construct the random data here instead of calling
# os.urandom() because I want to constrain the range of data (in
@@ -102,108 +77,119 @@ class ResumableUploadTests(unittest.TestCase):
file_as_string = ''.join(buf)
return (file_as_string, StringIO.StringIO(file_as_string))
- @classmethod
- def get_dst_bucket_uri(cls, debug):
+ def get_dst_bucket_uri(self):
"""A unique bucket to test."""
hostname = socket.gethostname().split('.')[0]
uri_base_str = 'gs://res-upload-test-%s-%s-%s' % (
hostname, os.getpid(), int(time.time()))
- return boto.storage_uri('%s-dst' % uri_base_str, debug=debug)
+ return boto.storage_uri('%s-dst' % uri_base_str)
- @classmethod
- def get_dst_key_uri(cls):
+ def get_dst_key_uri(self):
"""A key to test."""
- return cls.dst_bucket_uri.clone_replace_name('obj')
+ return self.dst_bucket_uri.clone_replace_name('obj')
- @classmethod
- def get_staged_host(cls):
+ def get_staged_host(self):
"""URL of an existing bucket."""
return 'pub.commondatastorage.googleapis.com'
- @classmethod
- def get_invalid_upload_id(cls):
+ def get_invalid_upload_id(self):
return (
'http://%s/?upload_id='
'AyzB2Uo74W4EYxyi5dp_-r68jz8rtbvshsv4TX7srJVkJ57CxTY5Dw2' % (
- cls.get_staged_host()))
+ self.get_staged_host()))
- @classmethod
- def set_up_class(cls, debug):
+ def setUp(self):
"""
- Initializes test suite.
+ Creates dst bucket and data needed by each test.
"""
-
# Use a designated tmpdir prefix to make it easy to find the end of
# the tmp path.
- cls.tmpdir_prefix = 'tmp_resumable_upload_test'
+ self.tmpdir_prefix = 'tmp_resumable_upload_test'
# Create test source file data.
- cls.empty_src_file_size = 0
- (cls.empty_src_file_as_string, cls.empty_src_file) = (
- cls.build_test_input_file(cls.empty_src_file_size))
- cls.small_src_file_size = 2 * 1024 # 2 KB.
- (cls.small_src_file_as_string, cls.small_src_file) = (
- cls.build_test_input_file(cls.small_src_file_size))
- cls.larger_src_file_size = 500 * 1024 # 500 KB.
- (cls.larger_src_file_as_string, cls.larger_src_file) = (
- cls.build_test_input_file(cls.larger_src_file_size))
- cls.largest_src_file_size = 1024 * 1024 # 1 MB.
- (cls.largest_src_file_as_string, cls.largest_src_file) = (
- cls.build_test_input_file(cls.largest_src_file_size))
+ self.empty_src_file_size = 0
+ (self.empty_src_file_as_string, self.empty_src_file) = (
+ self.build_test_input_file(self.empty_src_file_size))
+ self.small_src_file_size = 2 * 1024 # 2 KB.
+ (self.small_src_file_as_string, self.small_src_file) = (
+ self.build_test_input_file(self.small_src_file_size))
+ self.larger_src_file_size = 500 * 1024 # 500 KB.
+ (self.larger_src_file_as_string, self.larger_src_file) = (
+ self.build_test_input_file(self.larger_src_file_size))
+ self.largest_src_file_size = 1024 * 1024 # 1 MB.
+ (self.largest_src_file_as_string, self.largest_src_file) = (
+ self.build_test_input_file(self.largest_src_file_size))
# Create temp dir.
- cls.tmp_dir = tempfile.mkdtemp(prefix=cls.tmpdir_prefix)
+ self.tmp_dir = tempfile.mkdtemp(prefix=self.tmpdir_prefix)
# Create the test bucket.
- cls.dst_bucket_uri = cls.get_dst_bucket_uri(debug)
- cls.dst_bucket_uri.create_bucket()
- cls.dst_key_uri = cls.get_dst_key_uri()
+ self.dst_bucket_uri = self.get_dst_bucket_uri()
+ self.dst_bucket_uri.create_bucket()
+ self.dst_key_uri = self.get_dst_key_uri()
- cls.tracker_file_name = '%s%suri_tracker' % (cls.tmp_dir, os.sep)
+ self.tracker_file_name = '%s%suri_tracker' % (self.tmp_dir, os.sep)
- cls.syntactically_invalid_tracker_file_name = (
- '%s%ssynt_invalid_uri_tracker' % (cls.tmp_dir, os.sep))
- f = open(cls.syntactically_invalid_tracker_file_name, 'w')
+ self.syntactically_invalid_tracker_file_name = (
+ '%s%ssynt_invalid_uri_tracker' % (self.tmp_dir, os.sep))
+ f = open(self.syntactically_invalid_tracker_file_name, 'w')
f.write('ftp://example.com')
f.close()
- cls.invalid_upload_id = cls.get_invalid_upload_id()
- cls.invalid_upload_id_tracker_file_name = (
- '%s%sinvalid_upload_id_tracker' % (cls.tmp_dir, os.sep))
- f = open(cls.invalid_upload_id_tracker_file_name, 'w')
- f.write(cls.invalid_upload_id)
+ self.invalid_upload_id = self.get_invalid_upload_id()
+ self.invalid_upload_id_tracker_file_name = (
+ '%s%sinvalid_upload_id_tracker' % (self.tmp_dir, os.sep))
+ f = open(self.invalid_upload_id_tracker_file_name, 'w')
+ f.write(self.invalid_upload_id)
f.close()
- cls.created_test_data = True
+ self.dst_key = self.dst_key_uri.new_key(validate=False)
+ self.created_test_data = True
- @classmethod
- def tear_down_class(cls):
+ def tearDown(self):
"""
- Deletes bucket and tmp dir created by set_up_class.
+ Deletes any objects, files, and bucket from each test run.
"""
- if not hasattr(cls, 'created_test_data'):
+ if not hasattr(self, 'created_test_data'):
return
+ shutil.rmtree(self.tmp_dir)
+
# Retry (for up to 2 minutes) the bucket gets deleted (it may not
# the first time round, due to eventual consistency of bucket delete
- # operations).
+ # operations). We also retry key deletions because if the key fails
+ # to be deleted on the first attempt, it will stop us from deleting
+ # the bucket.
for i in range(60):
try:
- cls.dst_bucket_uri.delete_bucket()
+ self.dst_key_uri.delete_key()
+ except GSResponseError, e:
+ # Ignore errors attempting to delete the key, because not all
+ # tests will write to the dst key.
+ pass
+ try:
+ self.dst_bucket_uri.delete_bucket()
break
except StorageResponseError:
print 'Test bucket (%s) not yet deleted, still trying' % (
- cls.dst_bucket_uri.uri)
+ self.dst_bucket_uri.uri)
time.sleep(2)
- shutil.rmtree(cls.tmp_dir)
- cls.tmp_dir = tempfile.mkdtemp(prefix=cls.tmpdir_prefix)
+ shutil.rmtree(self.tmp_dir, ignore_errors=True)
+ self.tmp_dir = tempfile.mkdtemp(prefix=self.tmpdir_prefix)
def test_non_resumable_upload(self):
"""
Tests that non-resumable uploads work
"""
- self.small_src_file.seek(0)
- self.dst_key.set_contents_from_file(self.small_src_file)
+ # Seek to end incase its the first test.
+ self.small_src_file.seek(0, os.SEEK_END)
+ try:
+ self.dst_key.set_contents_from_file(self.small_src_file)
+ self.fail("should fail as need to rewind the filepointer")
+ except AttributeError:
+ pass
+ # Now try calling with a proper rewind.
+ self.dst_key.set_contents_from_file(self.small_src_file, rewind=True)
self.assertEqual(self.small_src_file_size, self.dst_key.size)
self.assertEqual(self.small_src_file_as_string,
self.dst_key.get_contents_as_string())
@@ -494,10 +480,10 @@ class ResumableUploadTests(unittest.TestCase):
test_file = self.build_test_input_file(test_file_size)[1]
harnass = CallbackTestHarnass(fail_after_n_bytes=test_file_size/2,
fp_to_change=test_file,
- # Writing at file_size-5 won't change file
- # size because CallbackTestHarnass only
- # writes 3 bytes.
- fp_change_pos=test_file_size-5)
+ # Write to byte 1, as the CallbackTestHarnass writes
+ # 3 bytes. This will result in the data on the server
+ # being different than the local file.
+ fp_change_pos=1)
res_upload_handler = ResumableUploadHandler(num_retries=1)
try:
self.dst_key.set_contents_from_file(
@@ -585,32 +571,3 @@ class ResumableUploadTests(unittest.TestCase):
finally:
# Restore original protection of dir where tracker_file lives.
os.chmod(self.tmp_dir, save_mod)
-
-if __name__ == '__main__':
- if sys.version_info[:3] < (2, 5, 1):
- sys.exit('These tests must be run on at least Python 2.5.1\n')
-
- # Use -d to see more HTTP protocol detail during tests.
- debug = 0
- opts, args = getopt.getopt(sys.argv[1:], 'd', ['debug'])
- for o, a in opts:
- if o in ('-d', '--debug'):
- debug = 2
-
- test_loader = unittest.TestLoader()
- test_loader.testMethodPrefix = 'test_'
- suite = test_loader.loadTestsFromTestCase(ResumableUploadTests)
- # Seems like there should be a cleaner way to find the test_class.
- test_class = suite.__getattribute__('_tests')[0]
- # We call set_up_class() and tear_down_class() ourselves because we
- # don't assume the user has Python 2.7 (which supports classmethods
- # that do it, with camelCase versions of these names).
- try:
- print 'Setting up %s...' % test_class.get_suite_description()
- test_class.set_up_class(debug)
- print 'Running %s...' % test_class.get_suite_description()
- unittest.TextTestRunner(verbosity=2).run(suite)
- finally:
- print 'Cleaning up after %s...' % test_class.get_suite_description()
- test_class.tear_down_class()
- print ''
diff --git a/tests/sqs/test_connection.py b/tests/sqs/test_connection.py
index 6996a54a..be1a16f7 100644
--- a/tests/sqs/test_connection.py
+++ b/tests/sqs/test_connection.py
@@ -109,6 +109,20 @@ class SQSConnectionTest (unittest.TestCase):
time.sleep(30)
assert queue.count_slow() == 0
+ # try a batch write
+ num_msgs = 10
+ msgs = [(i, 'This is message %d' % i, 0) for i in range(num_msgs)]
+ queue.write_batch(msgs)
+
+ # try to delete all of the messages using batch delete
+ deleted = 0
+ while deleted < num_msgs:
+ time.sleep(5)
+ msgs = queue.get_messages(num_msgs)
+ if msgs:
+ br = queue.delete_message_batch(msgs)
+ deleted += len(br.results)
+
# create another queue so we can test force deletion
# we will also test MHMessage with this queue
queue_name = 'test%d' % int(time.time())
diff --git a/tests/swf/__init__.py b/tests/swf/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/swf/__init__.py
diff --git a/tests/swf/test_layer1.py b/tests/swf/test_layer1.py
new file mode 100644
index 00000000..49f31856
--- /dev/null
+++ b/tests/swf/test_layer1.py
@@ -0,0 +1,245 @@
+"""
+Tests for Layer1 of Simple Workflow
+
+"""
+import os
+import unittest
+import time
+
+from boto.swf.layer1 import Layer1
+from boto.swf import exceptions as swf_exceptions
+
+
+
+# A standard AWS account is permitted a maximum of 100 of SWF domains,
+# registered or deprecated. Deleting deprecated domains on demand does
+# not appear possible. Therefore, these tests reuse a default or
+# user-named testing domain. This is named by the user via the environment
+# variable BOTO_SWF_UNITTEST_DOMAIN, if available. Otherwise the default
+# testing domain is literally "boto-swf-unittest-domain". Do not use
+# the testing domain for other purposes.
+BOTO_SWF_UNITTEST_DOMAIN = os.environ.get("BOTO_SWF_UNITTEST_DOMAIN",
+ "boto-swf-unittest-domain")
+
+# A standard domain can have a maxiumum of 10,000 workflow types and
+# activity types, registered or deprecated. Therefore, eventually any
+# tests which register new workflow types or activity types would begin
+# to fail with LimitExceeded. Instead of generating new workflow types
+# and activity types, these tests reuse the existing types.
+
+# The consequence of the limits and inability to delete deprecated
+# domains, workflow types, and activity types is that the tests in
+# this module will not test for the three register actions:
+# * register_domain
+# * register_workflow_type
+# * register_activity_type
+# Instead, the setUp of the TestCase create a domain, workflow type,
+# and activity type, expecting that they may already exist, and the
+# tests themselves test other things.
+
+# If you really want to re-test the register_* functions in their
+# ability to create things (rather than just reporting that they
+# already exist), you'll need to use a new BOTO_SWF_UNITTEST_DOMAIN.
+# But, beware that once you hit 100 domains, you are cannot create any
+# more, delete existing ones, or rename existing ones.
+
+# Some API calls establish resources, but these resources are not instantly
+# available to the next API call. For testing purposes, it is necessary to
+# have a short pause to avoid having tests fail for invalid reasons.
+PAUSE_SECONDS = 4
+
+
+
+class SimpleWorkflowLayer1TestBase(unittest.TestCase):
+ """
+ There are at least two test cases which share this setUp/tearDown
+ and the class-based parameter definitions:
+ * SimpleWorkflowLayer1Test
+ * tests.swf.test_layer1_workflow_execution.SwfL1WorkflowExecutionTest
+ """
+ # Some params used throughout the tests...
+ # Domain registration params...
+ _domain = BOTO_SWF_UNITTEST_DOMAIN
+ _workflow_execution_retention_period_in_days = 'NONE'
+ _domain_description = 'test workflow domain'
+ # Type registration params used for workflow type and activity type...
+ _task_list = 'tasklist1'
+ # Workflow type registration params...
+ _workflow_type_name = 'wft1'
+ _workflow_type_version = '1'
+ _workflow_type_description = 'wft1 description'
+ _default_child_policy = 'REQUEST_CANCEL'
+ _default_execution_start_to_close_timeout = '600'
+ _default_task_start_to_close_timeout = '60'
+ # Activity type registration params...
+ _activity_type_name = 'at1'
+ _activity_type_version = '1'
+ _activity_type_description = 'at1 description'
+ _default_task_heartbeat_timeout = '30'
+ _default_task_schedule_to_close_timeout = '90'
+ _default_task_schedule_to_start_timeout = '10'
+ _default_task_start_to_close_timeout = '30'
+
+
+ def setUp(self):
+ # Create a Layer1 connection for testing.
+ # Tester needs boto config or keys in environment variables.
+ self.conn = Layer1()
+
+ # Register a domain. Expect None (success) or
+ # SWFDomainAlreadyExistsError.
+ try:
+ r = self.conn.register_domain(self._domain,
+ self._workflow_execution_retention_period_in_days,
+ description=self._domain_description)
+ assert r is None
+ time.sleep(PAUSE_SECONDS)
+ except swf_exceptions.SWFDomainAlreadyExistsError:
+ pass
+
+ # Register a workflow type. Expect None (success) or
+ # SWFTypeAlreadyExistsError.
+ try:
+ r = self.conn.register_workflow_type(self._domain,
+ self._workflow_type_name, self._workflow_type_version,
+ task_list=self._task_list,
+ default_child_policy=self._default_child_policy,
+ default_execution_start_to_close_timeout=
+ self._default_execution_start_to_close_timeout,
+ default_task_start_to_close_timeout=
+ self._default_task_start_to_close_timeout,
+ description=self._workflow_type_description)
+ assert r is None
+ time.sleep(PAUSE_SECONDS)
+ except swf_exceptions.SWFTypeAlreadyExistsError:
+ pass
+
+ # Register an activity type. Expect None (success) or
+ # SWFTypeAlreadyExistsError.
+ try:
+ r = self.conn.register_activity_type(self._domain,
+ self._activity_type_name, self._activity_type_version,
+ task_list=self._task_list,
+ default_task_heartbeat_timeout=
+ self._default_task_heartbeat_timeout,
+ default_task_schedule_to_close_timeout=
+ self._default_task_schedule_to_close_timeout,
+ default_task_schedule_to_start_timeout=
+ self._default_task_schedule_to_start_timeout,
+ default_task_start_to_close_timeout=
+ self._default_task_start_to_close_timeout,
+ description=self._activity_type_description)
+ assert r is None
+ time.sleep(PAUSE_SECONDS)
+ except swf_exceptions.SWFTypeAlreadyExistsError:
+ pass
+
+ def tearDown(self):
+ # Delete what we can...
+ pass
+
+
+
+
+class SimpleWorkflowLayer1Test(SimpleWorkflowLayer1TestBase):
+
+ def test_list_domains(self):
+ # Find the domain.
+ r = self.conn.list_domains('REGISTERED')
+ found = None
+ for info in r['domainInfos']:
+ if info['name'] == self._domain:
+ found = info
+ break
+ self.assertNotEqual(found, None, 'list_domains; test domain not found')
+ # Validate some properties.
+ self.assertEqual(found['description'], self._domain_description,
+ 'list_domains; description does not match')
+ self.assertEqual(found['status'], 'REGISTERED',
+ 'list_domains; status does not match')
+
+ def test_list_workflow_types(self):
+ # Find the workflow type.
+ r = self.conn.list_workflow_types(self._domain, 'REGISTERED')
+ found = None
+ for info in r['typeInfos']:
+ if ( info['workflowType']['name'] == self._workflow_type_name and
+ info['workflowType']['version'] == self._workflow_type_version ):
+ found = info
+ break
+ self.assertNotEqual(found, None, 'list_workflow_types; test type not found')
+ # Validate some properties.
+ self.assertEqual(found['description'], self._workflow_type_description,
+ 'list_workflow_types; description does not match')
+ self.assertEqual(found['status'], 'REGISTERED',
+ 'list_workflow_types; status does not match')
+
+ def test_list_activity_types(self):
+ # Find the activity type.
+ r = self.conn.list_activity_types(self._domain, 'REGISTERED')
+ found = None
+ for info in r['typeInfos']:
+ if info['activityType']['name'] == self._activity_type_name:
+ found = info
+ break
+ self.assertNotEqual(found, None, 'list_activity_types; test type not found')
+ # Validate some properties.
+ self.assertEqual(found['description'], self._activity_type_description,
+ 'list_activity_types; description does not match')
+ self.assertEqual(found['status'], 'REGISTERED',
+ 'list_activity_types; status does not match')
+
+
+ def test_list_closed_workflow_executions(self):
+ # Test various legal ways to call function.
+ latest_date = time.time()
+ oldest_date = time.time() - 3600
+ # With startTimeFilter...
+ self.conn.list_closed_workflow_executions(self._domain,
+ start_latest_date=latest_date, start_oldest_date=oldest_date)
+ # With closeTimeFilter...
+ self.conn.list_closed_workflow_executions(self._domain,
+ close_latest_date=latest_date, close_oldest_date=oldest_date)
+ # With closeStatusFilter...
+ self.conn.list_closed_workflow_executions(self._domain,
+ close_latest_date=latest_date, close_oldest_date=oldest_date,
+ close_status='COMPLETED')
+ # With tagFilter...
+ self.conn.list_closed_workflow_executions(self._domain,
+ close_latest_date=latest_date, close_oldest_date=oldest_date,
+ tag='ig')
+ # With executionFilter...
+ self.conn.list_closed_workflow_executions(self._domain,
+ close_latest_date=latest_date, close_oldest_date=oldest_date,
+ workflow_id='ig')
+ # With typeFilter...
+ self.conn.list_closed_workflow_executions(self._domain,
+ close_latest_date=latest_date, close_oldest_date=oldest_date,
+ workflow_name='ig', workflow_version='ig')
+ # With reverseOrder...
+ self.conn.list_closed_workflow_executions(self._domain,
+ close_latest_date=latest_date, close_oldest_date=oldest_date,
+ reverse_order=True)
+
+
+ def test_list_open_workflow_executions(self):
+ # Test various legal ways to call function.
+ latest_date = time.time()
+ oldest_date = time.time() - 3600
+ # With required params only...
+ self.conn.list_closed_workflow_executions(self._domain,
+ latest_date, oldest_date)
+ # With tagFilter...
+ self.conn.list_closed_workflow_executions(self._domain,
+ latest_date, oldest_date, tag='ig')
+ # With executionFilter...
+ self.conn.list_closed_workflow_executions(self._domain,
+ latest_date, oldest_date, workflow_id='ig')
+ # With typeFilter...
+ self.conn.list_closed_workflow_executions(self._domain,
+ latest_date, oldest_date,
+ workflow_name='ig', workflow_version='ig')
+ # With reverseOrder...
+ self.conn.list_closed_workflow_executions(self._domain,
+ latest_date, oldest_date, reverse_order=True)
+
diff --git a/tests/swf/test_layer1_workflow_execution.py b/tests/swf/test_layer1_workflow_execution.py
new file mode 100644
index 00000000..2f47130f
--- /dev/null
+++ b/tests/swf/test_layer1_workflow_execution.py
@@ -0,0 +1,171 @@
+"""
+Tests for Layer1 of Simple Workflow
+
+"""
+import time
+import uuid
+import json
+import traceback
+
+from boto.swf.layer1_decisions import Layer1Decisions
+
+from test_layer1 import SimpleWorkflowLayer1TestBase
+
+
+
+class SwfL1WorkflowExecutionTest(SimpleWorkflowLayer1TestBase):
+ """
+ test a simple workflow execution
+ """
+ def run_decider(self):
+ """
+ run one iteration of a simple decision engine
+ """
+ # Poll for a decision task.
+ tries = 0
+ while 1:
+ dtask = self.conn.poll_for_decision_task(self._domain,
+ self._task_list, reverse_order=True)
+ if dtask.get('taskToken') is not None:
+ # This means a real decision task has arrived.
+ break
+ time.sleep(2)
+ tries += 1
+ if tries > 10:
+ # Give up if it's taking too long. Probably
+ # means something is broken somewhere else.
+ assert False, 'no decision task occurred'
+
+ # Get the most recent interesting event.
+ ignorable = (
+ 'DecisionTaskScheduled',
+ 'DecisionTaskStarted',
+ 'DecisionTaskTimedOut',
+ )
+ event = None
+ for tevent in dtask['events']:
+ if tevent['eventType'] not in ignorable:
+ event = tevent
+ break
+
+ # Construct the decision response.
+ decisions = Layer1Decisions()
+ if event['eventType'] == 'WorkflowExecutionStarted':
+ activity_id = str(uuid.uuid1())
+ decisions.schedule_activity_task(activity_id,
+ self._activity_type_name, self._activity_type_version,
+ task_list=self._task_list,
+ input=event['workflowExecutionStartedEventAttributes']['input'])
+ elif event['eventType'] == 'ActivityTaskCompleted':
+ decisions.complete_workflow_execution(
+ result=event['activityTaskCompletedEventAttributes']['result'])
+ elif event['eventType'] == 'ActivityTaskFailed':
+ decisions.fail_workflow_execution(
+ reason=event['activityTaskFailedEventAttributes']['reason'],
+ details=event['activityTaskFailedEventAttributes']['details'])
+ else:
+ decisions.fail_workflow_execution(
+ reason='unhandled decision task type; %r' % (event['eventType'],))
+
+ # Send the decision response.
+ r = self.conn.respond_decision_task_completed(dtask['taskToken'],
+ decisions=decisions._data,
+ execution_context=None)
+ assert r is None
+
+
+ def run_worker(self):
+ """
+ run one iteration of a simple worker engine
+ """
+ # Poll for an activity task.
+ tries = 0
+ while 1:
+ atask = self.conn.poll_for_activity_task(self._domain,
+ self._task_list, identity='test worker')
+ if atask.get('activityId') is not None:
+ # This means a real activity task has arrived.
+ break
+ time.sleep(2)
+ tries += 1
+ if tries > 10:
+ # Give up if it's taking too long. Probably
+ # means something is broken somewhere else.
+ assert False, 'no activity task occurred'
+ # Do the work or catch a "work exception."
+ reason = None
+ try:
+ result = json.dumps(sum(json.loads(atask['input'])))
+ except:
+ reason = 'an exception was raised'
+ details = traceback.format_exc()
+ if reason is None:
+ r = self.conn.respond_activity_task_completed(
+ atask['taskToken'], result)
+ else:
+ r = self.conn.respond_activity_task_failed(
+ atask['taskToken'], reason=reason, details=details)
+ assert r is None
+
+
+ def test_workflow_execution(self):
+ # Start a workflow execution whose activity task will succeed.
+ workflow_id = 'wfid-%.2f' % (time.time(),)
+ r = self.conn.start_workflow_execution(self._domain,
+ workflow_id,
+ self._workflow_type_name,
+ self._workflow_type_version,
+ execution_start_to_close_timeout='20',
+ input='[600, 15]')
+ # Need the run_id to lookup the execution history later.
+ run_id = r['runId']
+
+ # Move the workflow execution forward by having the
+ # decider schedule an activity task.
+ self.run_decider()
+
+ # Run the worker to handle the scheduled activity task.
+ self.run_worker()
+
+ # Complete the workflow execution by having the
+ # decider close it down.
+ self.run_decider()
+
+ # Check that the result was stored in the execution history.
+ r = self.conn.get_workflow_execution_history(self._domain,
+ run_id, workflow_id,
+ reverse_order=True)['events'][0]
+ result = r['workflowExecutionCompletedEventAttributes']['result']
+ assert json.loads(result) == 615
+
+
+ def test_failed_workflow_execution(self):
+ # Start a workflow execution whose activity task will fail.
+ workflow_id = 'wfid-%.2f' % (time.time(),)
+ r = self.conn.start_workflow_execution(self._domain,
+ workflow_id,
+ self._workflow_type_name,
+ self._workflow_type_version,
+ execution_start_to_close_timeout='20',
+ input='[600, "s"]')
+ # Need the run_id to lookup the execution history later.
+ run_id = r['runId']
+
+ # Move the workflow execution forward by having the
+ # decider schedule an activity task.
+ self.run_decider()
+
+ # Run the worker to handle the scheduled activity task.
+ self.run_worker()
+
+ # Complete the workflow execution by having the
+ # decider close it down.
+ self.run_decider()
+
+ # Check that the failure was stored in the execution history.
+ r = self.conn.get_workflow_execution_history(self._domain,
+ run_id, workflow_id,
+ reverse_order=True)['events'][0]
+ reason = r['workflowExecutionFailedEventAttributes']['reason']
+ assert reason == 'an exception was raised'
+
diff --git a/tests/test.py b/tests/test.py
index e9af4404..43c58456 100755
--- a/tests/test.py
+++ b/tests/test.py
@@ -40,6 +40,8 @@ from s3.test_multidelete import S3MultiDeleteTest
from s3.test_multipart import S3MultiPartUploadTest
from s3.test_gsconnection import GSConnectionTest
from s3.test_https_cert_validation import CertValidationTest
+from s3.test_resumable_downloads import ResumableDownloadTests
+from s3.test_resumable_uploads import ResumableUploadTests
from ec2.test_connection import EC2ConnectionTest
from ec2.elb.test_connection import ELBConnectionTest
from ec2.cloudwatch.test_connection import CloudWatchConnectionTest
@@ -49,6 +51,8 @@ from cloudfront.test_signed_urls import CloudfrontSignedUrlsTest
from dynamodb.test_layer1 import DynamoDBLayer1Test
from dynamodb.test_layer2 import DynamoDBLayer2Test
from sts.test_session_token import SessionTokenTest
+from swf.test_layer1 import SimpleWorkflowLayer1Test
+from swf.test_layer1_workflow_execution import SwfL1WorkflowExecutionTest
def usage():
print "test.py [-t testsuite] [-v verbosity]"
@@ -109,6 +113,8 @@ def suite(testsuite="all"):
tests.addTest(unittest.makeSuite(S3MFATest))
elif testsuite == "gs":
tests.addTest(unittest.makeSuite(GSConnectionTest))
+ tests.addTest(unittest.makeSuite(ResumableDownloadTests))
+ tests.addTest(unittest.makeSuite(ResumableUploadTests))
elif testsuite == "sqs":
tests.addTest(unittest.makeSuite(SQSConnectionTest))
elif testsuite == "ec2":
@@ -131,6 +137,12 @@ def suite(testsuite="all"):
tests.addTest(unittest.makeSuite(DynamoDBLayer2Test))
elif testsuite == "sts":
tests.addTest(unittest.makeSuite(SessionTokenTest))
+ elif testsuite == "swf":
+ tests.addTest(unittest.makeSuite(SimpleWorkflowLayer1Test))
+ tests.addTest(unittest.makeSuite(SwfL1WorkflowExecutionTest))
+ elif testsuite == "swfL1":
+ tests.addTest(unittest.makeSuite(SimpleWorkflowLayer1Test))
+ tests.addTest(unittest.makeSuite(SwfL1WorkflowExecutionTest))
else:
raise ValueError("Invalid choice.")
return tests