summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--README.rst4
-rwxr-xr-xbin/s3put2
-rw-r--r--boto/__init__.py2
-rw-r--r--boto/cloudsearch/layer1.py2
-rw-r--r--boto/connection.py8
-rw-r--r--boto/dynamodb2/results.py22
-rw-r--r--boto/ec2/autoscale/__init__.py2
-rw-r--r--boto/ec2/blockdevicemapping.py12
-rw-r--r--boto/ec2/connection.py11
-rw-r--r--boto/ec2/image.py12
-rw-r--r--boto/ec2/securitygroup.py3
-rw-r--r--boto/gs/key.py5
-rw-r--r--boto/provider.py4
-rw-r--r--boto/s3/key.py18
-rw-r--r--docs/source/apps_built_on_boto.rst44
-rw-r--r--docs/source/index.rst2
-rw-r--r--docs/source/releasenotes/dev.rst31
-rw-r--r--tests/integration/gs/test_resumable_uploads.py4
-rw-r--r--tests/unit/dynamodb2/test_table.py54
-rw-r--r--tests/unit/ec2/test_address.py16
-rw-r--r--tests/unit/ec2/test_blockdevicemapping.py54
-rw-r--r--tests/unit/ec2/test_connection.py132
-rw-r--r--tests/unit/ec2/test_securitygroup.py22
-rw-r--r--tests/unit/ec2/test_volume.py37
-rw-r--r--tests/unit/swf/__init__.py0
-rw-r--r--tests/unit/swf/test_layer2_actors.py73
-rw-r--r--tests/unit/swf/test_layer2_domain.py112
-rw-r--r--tests/unit/swf/test_layer2_types.py46
28 files changed, 689 insertions, 45 deletions
diff --git a/README.rst b/README.rst
index a853566a..fe1fde21 100644
--- a/README.rst
+++ b/README.rst
@@ -1,9 +1,9 @@
####
boto
####
-boto 2.13.0
+boto v2.13.1
-Released: 12-September-2013
+Released: 16-September-2013
.. image:: https://travis-ci.org/boto/boto.png?branch=develop
:target: https://travis-ci.org/boto/boto
diff --git a/bin/s3put b/bin/s3put
index faf10ffd..5af2d5a6 100755
--- a/bin/s3put
+++ b/bin/s3put
@@ -290,7 +290,7 @@ def main():
if o in ('-r', '--reduced'):
reduced = True
if o in ('--header'):
- (k, v) = a.split("=")
+ (k, v) = a.split("=", 1)
headers[k] = v
if o in ('--host'):
host = a
diff --git a/boto/__init__.py b/boto/__init__.py
index a498bb32..99c59af2 100644
--- a/boto/__init__.py
+++ b/boto/__init__.py
@@ -36,7 +36,7 @@ import logging.config
import urlparse
from boto.exception import InvalidUriError
-__version__ = '2.13.0'
+__version__ = 'v2.13.1'
Version = __version__ # for backware compatibility
UserAgent = 'Boto/%s Python/%s %s/%s' % (
diff --git a/boto/cloudsearch/layer1.py b/boto/cloudsearch/layer1.py
index 7d6372c1..4ca763e9 100644
--- a/boto/cloudsearch/layer1.py
+++ b/boto/cloudsearch/layer1.py
@@ -69,7 +69,7 @@ class Layer1(AWSQueryConnection):
validate_certs=validate_certs)
def _required_auth_capability(self):
- return ['sign-v2']
+ return ['hmac-v4']
def get_response(self, doc_path, action, params, path='/',
parent=None, verb='GET', list_marker=None):
diff --git a/boto/connection.py b/boto/connection.py
index a66dba8b..950a8fe7 100644
--- a/boto/connection.py
+++ b/boto/connection.py
@@ -517,6 +517,7 @@ class AWSAuthConnection(object):
self.port = port
else:
self.port = PORTS_BY_SECURITY[is_secure]
+ self.host_header = None
# Timeout used to tell httplib how long to wait for socket timeouts.
# Default is to leave timeout unchanged, which will in turn result in
@@ -541,11 +542,13 @@ class AWSAuthConnection(object):
aws_secret_access_key,
security_token)
- # Allow config file to override default host and port.
+ # Allow config file to override default host, port, and host header.
if self.provider.host:
self.host = self.provider.host
if self.provider.port:
self.port = self.provider.port
+ if self.provider.host_header:
+ self.host_header = self.provider.host_header
self._pool = ConnectionPool()
self._connection = (self.server_name(), self.is_secure)
@@ -942,6 +945,9 @@ class AWSAuthConnection(object):
headers = {}
else:
headers = headers.copy()
+ if (self.host_header and
+ not boto.utils.find_matching_headers('host', headers)):
+ headers['host'] = self.host_header
host = host or self.host
if self.use_proxy:
if not auth_path:
diff --git a/boto/dynamodb2/results.py b/boto/dynamodb2/results.py
index 23f64046..8ddf3022 100644
--- a/boto/dynamodb2/results.py
+++ b/boto/dynamodb2/results.py
@@ -58,6 +58,12 @@ class ResultSet(object):
self.fetch_more()
+ # It's possible that previous call to ``fetch_more`` may not return
+ # anything useful but there may be more results. Loop until we get
+ # something back, making sure we guard for no results left.
+ while not len(self._results) and self._results_left:
+ self.fetch_more()
+
if self._offset < len(self._results):
return self._results[self._offset]
else:
@@ -106,16 +112,11 @@ class ResultSet(object):
kwargs[self.first_key] = self._last_key_seen
results = self.the_callable(*args, **kwargs)
-
- if not len(results.get('results', [])):
- self._results_left = False
- return
-
- self._results.extend(results['results'])
+ new_results = results.get('results', [])
self._last_key_seen = results.get('last_key', None)
- if self._last_key_seen is None:
- self._results_left = False
+ if len(new_results):
+ self._results.extend(results['results'])
# Decrease the limit, if it's present.
if self.call_kwargs.get('limit'):
@@ -124,7 +125,10 @@ class ResultSet(object):
# results to look for
if 0 == self.call_kwargs['limit']:
self._results_left = False
-
+
+ if self._last_key_seen is None:
+ self._results_left = False
+
class BatchGetResultSet(ResultSet):
def __init__(self, *args, **kwargs):
diff --git a/boto/ec2/autoscale/__init__.py b/boto/ec2/autoscale/__init__.py
index f82ce9ec..9a8270db 100644
--- a/boto/ec2/autoscale/__init__.py
+++ b/boto/ec2/autoscale/__init__.py
@@ -225,7 +225,7 @@ class AutoScaleConnection(AWSQueryConnection):
if launch_config.ramdisk_id:
params['RamdiskId'] = launch_config.ramdisk_id
if launch_config.block_device_mappings:
- [x.build_list_params(params) for x in launch_config.block_device_mappings]
+ [x.autoscale_build_list_params(params) for x in launch_config.block_device_mappings]
if launch_config.security_groups:
self.build_list_params(params, launch_config.security_groups,
'SecurityGroups')
diff --git a/boto/ec2/blockdevicemapping.py b/boto/ec2/blockdevicemapping.py
index 1fb67b53..78f7e61d 100644
--- a/boto/ec2/blockdevicemapping.py
+++ b/boto/ec2/blockdevicemapping.py
@@ -115,10 +115,18 @@ class BlockDeviceMapping(dict):
elif name == 'item':
self[self.current_name] = self.current_value
- def build_list_params(self, params, prefix=''):
+ def ec2_build_list_params(self, params, prefix=''):
+ pre = '%sBlockDeviceMapping' % prefix
+ return self._build_list_params(params, prefix=pre)
+
+ def autoscale_build_list_params(self, params, prefix=''):
+ pre = '%sBlockDeviceMappings.member' % prefix
+ return self._build_list_params(params, prefix=pre)
+
+ def _build_list_params(self, params, prefix=''):
i = 1
for dev_name in self:
- pre = '%sBlockDeviceMappings.member.%d' % (prefix, i)
+ pre = '%s.%d' % (prefix, i)
params['%s.DeviceName' % pre] = dev_name
block_dev = self[dev_name]
if block_dev.ephemeral_name:
diff --git a/boto/ec2/connection.py b/boto/ec2/connection.py
index e3c86826..71da5f0b 100644
--- a/boto/ec2/connection.py
+++ b/boto/ec2/connection.py
@@ -312,7 +312,7 @@ class EC2Connection(AWSQueryConnection):
if root_device_name:
params['RootDeviceName'] = root_device_name
if block_device_map:
- block_device_map.build_list_params(params)
+ block_device_map.ec2_build_list_params(params)
if dry_run:
params['DryRun'] = 'true'
rs = self.get_object('RegisterImage', params, ResultSet, verb='POST')
@@ -843,7 +843,7 @@ class EC2Connection(AWSQueryConnection):
if private_ip_address:
params['PrivateIpAddress'] = private_ip_address
if block_device_map:
- block_device_map.build_list_params(params)
+ block_device_map.ec2_build_list_params(params)
if disable_api_termination:
params['DisableApiTermination'] = 'true'
if instance_initiated_shutdown_behavior:
@@ -1466,7 +1466,7 @@ class EC2Connection(AWSQueryConnection):
if placement_group:
params['%s.Placement.GroupName' % ls] = placement_group
if block_device_map:
- block_device_map.build_list_params(params, '%s.' % ls)
+ block_device_map.ec2_build_list_params(params, '%s.' % ls)
if instance_profile_name:
params['%s.IamInstanceProfile.Name' % ls] = instance_profile_name
if instance_profile_arn:
@@ -4147,7 +4147,7 @@ class EC2Connection(AWSQueryConnection):
params = {}
return self.get_list('DescribeVmTypes', params, [('euca:item', VmType)], verb='POST')
- def copy_image(self, source_region, source_image_id, name,
+ def copy_image(self, source_region, source_image_id, name=None,
description=None, client_token=None, dry_run=False):
"""
:type dry_run: bool
@@ -4157,8 +4157,9 @@ class EC2Connection(AWSQueryConnection):
params = {
'SourceRegion': source_region,
'SourceImageId': source_image_id,
- 'Name': name
}
+ if name is not None:
+ params['Name'] = name
if description is not None:
params['Description'] = description
if client_token is not None:
diff --git a/boto/ec2/image.py b/boto/ec2/image.py
index 6b6d9ce9..3e4e3171 100644
--- a/boto/ec2/image.py
+++ b/boto/ec2/image.py
@@ -32,6 +32,15 @@ class ProductCodes(list):
if name == 'productCode':
self.append(value)
+class BillingProducts(list):
+
+ def startElement(self, name, attrs, connection):
+ pass
+
+ def endElement(self, name, value, connection):
+ if name == 'billingProduct':
+ self.append(value)
+
class Image(TaggedEC2Object):
"""
Represents an EC2 Image
@@ -54,6 +63,7 @@ class Image(TaggedEC2Object):
self.name = None
self.description = None
self.product_codes = ProductCodes()
+ self.billing_products = BillingProducts()
self.block_device_mapping = None
self.root_device_type = None
self.root_device_name = None
@@ -73,6 +83,8 @@ class Image(TaggedEC2Object):
return self.block_device_mapping
elif name == 'productCodes':
return self.product_codes
+ elif name == 'billingProducts':
+ return self.billing_products
else:
return None
diff --git a/boto/ec2/securitygroup.py b/boto/ec2/securitygroup.py
index 3d93faa2..f1e81cd9 100644
--- a/boto/ec2/securitygroup.py
+++ b/boto/ec2/securitygroup.py
@@ -348,7 +348,8 @@ class IPPermissions(object):
else:
setattr(self, name, value)
- def add_grant(self, name=None, owner_id=None, cidr_ip=None, group_id=None):
+ def add_grant(self, name=None, owner_id=None, cidr_ip=None, group_id=None,
+ dry_run=False):
grant = GroupOrCIDR(self)
grant.owner_id = owner_id
grant.group_id = group_id
diff --git a/boto/gs/key.py b/boto/gs/key.py
index 7da1b3dc..4417973b 100644
--- a/boto/gs/key.py
+++ b/boto/gs/key.py
@@ -308,9 +308,10 @@ class Key(S3Key):
chunked_transfer=chunked_transfer, size=size,
hash_algs=hash_algs)
- def delete(self):
+ def delete(self, headers=None):
return self.bucket.delete_key(self.name, version_id=self.version_id,
- generation=self.generation)
+ generation=self.generation,
+ headers=headers)
def add_email_grant(self, permission, email_address):
"""
diff --git a/boto/provider.py b/boto/provider.py
index 457a87e7..e27247cd 100644
--- a/boto/provider.py
+++ b/boto/provider.py
@@ -168,6 +168,7 @@ class Provider(object):
security_token=None):
self.host = None
self.port = None
+ self.host_header = None
self.access_key = access_key
self.secret_key = secret_key
self.security_token = security_token
@@ -185,6 +186,9 @@ class Provider(object):
port_opt_name = '%s_port' % self.HostKeyMap[self.name]
if config.has_option('Credentials', port_opt_name):
self.port = config.getint('Credentials', port_opt_name)
+ host_header_opt_name = '%s_host_header' % self.HostKeyMap[self.name]
+ if config.has_option('Credentials', host_header_opt_name):
+ self.host_header = config.get('Credentials', host_header_opt_name)
def get_access_key(self):
if self._credentials_need_refresh():
diff --git a/boto/s3/key.py b/boto/s3/key.py
index 80ba3e93..2b7ae73a 100644
--- a/boto/s3/key.py
+++ b/boto/s3/key.py
@@ -503,20 +503,21 @@ class Key(object):
else:
setattr(self, name, value)
- def exists(self):
+ def exists(self, headers=None):
"""
Returns True if the key exists
:rtype: bool
:return: Whether the key exists on S3
"""
- return bool(self.bucket.lookup(self.name))
+ return bool(self.bucket.lookup(self.name, headers=headers))
- def delete(self):
+ def delete(self, headers=None):
"""
Delete this key from S3
"""
- return self.bucket.delete_key(self.name, version_id=self.version_id)
+ return self.bucket.delete_key(self.name, version_id=self.version_id,
+ headers=headers)
def get_metadata(self, name):
return self.metadata.get(name)
@@ -749,7 +750,14 @@ class Key(object):
raise provider.storage_data_error(
'Cannot retry failed request. fp does not support seeking.')
- http_conn.putrequest(method, path)
+ # If the caller explicitly specified host header, tell putrequest
+ # not to add a second host header. Similarly for accept-encoding.
+ skips = {}
+ if boto.utils.find_matching_headers('host', headers):
+ skips['skip_host'] = 1
+ if boto.utils.find_matching_headers('accept-encoding', headers):
+ skips['skip_accept_encoding'] = 1
+ http_conn.putrequest(method, path, **skips)
for key in headers:
http_conn.putheader(key, headers[key])
http_conn.endheaders()
diff --git a/docs/source/apps_built_on_boto.rst b/docs/source/apps_built_on_boto.rst
new file mode 100644
index 00000000..0fc40666
--- /dev/null
+++ b/docs/source/apps_built_on_boto.rst
@@ -0,0 +1,44 @@
+.. _apps_built_on_boto:
+
+==========================
+Applications Built On Boto
+==========================
+
+Many people have taken Boto and layered on additional functionality, then shared
+them with the community. This is a (partial) list of applications that use Boto.
+
+If you have an application or utility you've open-sourced that uses Boto &
+you'd like it listed here, please submit a `pull request`_ adding it!
+
+.. _`pull request`: https://github.com/boto/boto/pulls
+
+**botornado**
+ https://pypi.python.org/pypi/botornado
+ An asynchronous AWS client on Tornado. This is a dirty work to move boto
+ onto Tornado ioloop. Currently works with SQS and S3.
+
+**boto_rsync**
+ https://pypi.python.org/pypi/boto_rsync
+ boto-rsync is a rough adaptation of boto's s3put script which has been
+ reengineered to more closely mimic rsync. Its goal is to provide a familiar
+ rsync-like wrapper for boto's S3 and Google Storage interfaces.
+
+**boto_utils**
+ https://pypi.python.org/pypi/boto_utils
+ Command-line tools for interacting with Amazon Web Services, based on Boto.
+ Includes utils for S3, SES & Cloudwatch.
+
+**django-storages**
+ https://pypi.python.org/pypi/django-storages
+ A collection of storage backends for Django. Features the ``S3BotoStorage``
+ backend for storing media on S3.
+
+**mr.awsome**
+ https://pypi.python.org/pypi/mr.awsome
+ mr.awsome is a commandline-tool (aws) to manage and control Amazon
+ Webservice's EC2 instances. Once configured with your AWS key, you can
+ create, delete, monitor and ssh into instances, as well as perform scripted
+ tasks on them (via fabfiles). Examples are adding additional,
+ pre-configured webservers to a cluster (including updating the load
+ balancer), performing automated software deployments and creating backups -
+ each with just one call from the commandline.
diff --git a/docs/source/index.rst b/docs/source/index.rst
index c6c20e2d..2922b40c 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -91,6 +91,7 @@ Currently Supported Services
Additional Resources
--------------------
+* :doc:`Applications Built On Boto <apps_built_on_boto>`
* :doc:`Command Line Utilities <commandline>`
* :doc:`Boto Config Tutorial <boto_config_tut>`
* :doc:`Contributing to Boto <contributing>`
@@ -170,6 +171,7 @@ Release Notes
support_tut
dynamodb2_tut
migrations/dynamodb_v1_to_v2
+ apps_built_on_boto
ref/*
releasenotes/*
diff --git a/docs/source/releasenotes/dev.rst b/docs/source/releasenotes/dev.rst
new file mode 100644
index 00000000..1036ef36
--- /dev/null
+++ b/docs/source/releasenotes/dev.rst
@@ -0,0 +1,31 @@
+boto v2.xx.x
+============
+
+:date: 2013/xx/xx
+
+This release adds ____.
+
+
+Features
+--------
+
+* . (:issue:``, :sha:``)
+
+
+Bugfixes
+--------
+
+* Fixed test fallout from the EC2 dry-run change. (:sha:`2159456`)
+* Added tests for more of SWF's ``layer2``. (:issue:`1718`, :sha:`35fb741`,
+ :sha:`a84d401`, :sha:`1cf1641`, :sha:`a36429c`)
+* Changed EC2 to allow ``name`` to be optional in calls to ``copy_image``.
+ (:issue:`1672`, :sha:` 26285aa`)
+* Added ``billingProducts`` support to EC2 ``Image``. (:issue:`1703`,
+ :sha:`cccadaf`, :sha:`3914e91`)
+* Fixed a place where ``dry_run`` was handled in EC2. (:issue:`1722`,
+ :sha:`0a52c82`)
+* Fixed ``run_instances`` with a block device mapping. (:issue:`1723`,
+ :sha:`974743f`, :sha:`9049f05`, :sha:`d7edafc`)
+* Several documentation improvements/fixes:
+
+ * Added the "Apps Built On Boto" doc. (:sha:`3bd628c`)
diff --git a/tests/integration/gs/test_resumable_uploads.py b/tests/integration/gs/test_resumable_uploads.py
index 7c601451..68626eb2 100644
--- a/tests/integration/gs/test_resumable_uploads.py
+++ b/tests/integration/gs/test_resumable_uploads.py
@@ -308,7 +308,7 @@ class ResumableUploadTests(GSTestCase):
Tests that resumable upload correctly sets passed metadata
"""
res_upload_handler = ResumableUploadHandler()
- headers = {'Content-Type' : 'text/plain', 'Content-Encoding' : 'gzip',
+ headers = {'Content-Type' : 'text/plain', 'Content-Encoding' : 'utf8',
'x-goog-meta-abc' : 'my meta', 'x-goog-acl' : 'public-read'}
small_src_file_as_string, small_src_file = self.make_small_file()
small_src_file.seek(0)
@@ -321,7 +321,7 @@ class ResumableUploadTests(GSTestCase):
dst_key.get_contents_as_string())
dst_key.open_read()
self.assertEqual('text/plain', dst_key.content_type)
- self.assertEqual('gzip', dst_key.content_encoding)
+ self.assertEqual('utf8', dst_key.content_encoding)
self.assertTrue('abc' in dst_key.metadata)
self.assertEqual('my meta', str(dst_key.metadata['abc']))
acl = dst_key.get_acl()
diff --git a/tests/unit/dynamodb2/test_table.py b/tests/unit/dynamodb2/test_table.py
index 9b247d64..6eab6339 100644
--- a/tests/unit/dynamodb2/test_table.py
+++ b/tests/unit/dynamodb2/test_table.py
@@ -797,6 +797,60 @@ class ResultSetTestCase(unittest.TestCase):
results.to_call(none, limit=20)
self.assertRaises(StopIteration, results.next)
+ def test_iteration_sporadic_pages(self):
+ # Some pages have no/incomplete results but have a ``LastEvaluatedKey``
+ # (for instance, scans with filters), so we need to accommodate that.
+ def sporadic():
+ # A dict, because Python closures have read-only access to the
+ # reference itself.
+ count = {'value': -1}
+
+ def _wrapper(limit=10, exclusive_start_key=None):
+ count['value'] = count['value'] + 1
+
+ if count['value'] == 0:
+ # Full page.
+ return {
+ 'results': [
+ 'Result #0',
+ 'Result #1',
+ 'Result #2',
+ 'Result #3',
+ ],
+ 'last_key': 'page-1'
+ }
+ elif count['value'] == 1:
+ # Empty page but continue.
+ return {
+ 'results': [],
+ 'last_key': 'page-2'
+ }
+ elif count['value'] == 2:
+ # Final page.
+ return {
+ 'results': [
+ 'Result #4',
+ 'Result #5',
+ 'Result #6',
+ ],
+ }
+
+ return _wrapper
+
+ results = ResultSet()
+ results.to_call(sporadic(), limit=20)
+ # First page
+ self.assertEqual(results.next(), 'Result #0')
+ self.assertEqual(results.next(), 'Result #1')
+ self.assertEqual(results.next(), 'Result #2')
+ self.assertEqual(results.next(), 'Result #3')
+ # Second page (misses!)
+ # Moves on to the third page
+ self.assertEqual(results.next(), 'Result #4')
+ self.assertEqual(results.next(), 'Result #5')
+ self.assertEqual(results.next(), 'Result #6')
+ self.assertRaises(StopIteration, results.next)
+
def test_list(self):
self.assertEqual(list(self.results), [
'Hello john #0',
diff --git a/tests/unit/ec2/test_address.py b/tests/unit/ec2/test_address.py
index f2661979..765ce422 100644
--- a/tests/unit/ec2/test_address.py
+++ b/tests/unit/ec2/test_address.py
@@ -25,15 +25,25 @@ class AddressTest(unittest.TestCase):
def test_release_calls_connection_release_address_with_correct_args(self):
self.address.release()
- self.address.connection.release_address.assert_called_with("192.168.1.1")
+ self.address.connection.release_address.assert_called_with(
+ "192.168.1.1",
+ dry_run=False
+ )
def test_associate_calls_connection_associate_address_with_correct_args(self):
self.address.associate(1)
- self.address.connection.associate_address.assert_called_with(1, "192.168.1.1")
+ self.address.connection.associate_address.assert_called_with(
+ 1,
+ "192.168.1.1",
+ dry_run=False
+ )
def test_disassociate_calls_connection_disassociate_address_with_correct_args(self):
self.address.disassociate()
- self.address.connection.disassociate_address.assert_called_with("192.168.1.1")
+ self.address.connection.disassociate_address.assert_called_with(
+ "192.168.1.1",
+ dry_run=False
+ )
if __name__ == "__main__":
unittest.main()
diff --git a/tests/unit/ec2/test_blockdevicemapping.py b/tests/unit/ec2/test_blockdevicemapping.py
index 02ecf582..78539744 100644
--- a/tests/unit/ec2/test_blockdevicemapping.py
+++ b/tests/unit/ec2/test_blockdevicemapping.py
@@ -1,8 +1,12 @@
import mock
import unittest
+from boto.ec2.connection import EC2Connection
from boto.ec2.blockdevicemapping import BlockDeviceType, BlockDeviceMapping
+from tests.unit import AWSMockServiceTestCase
+
+
class BlockDeviceTypeTests(unittest.TestCase):
def setUp(self):
self.block_device_type = BlockDeviceType()
@@ -75,5 +79,55 @@ class BlockDeviceMappingTests(unittest.TestCase):
self.block_device_mapping.endElement("item", "some item", None)
self.assertEqual(self.block_device_mapping["some name"], "some value")
+
+class TestLaunchConfiguration(AWSMockServiceTestCase):
+ connection_class = EC2Connection
+
+ def default_body(self):
+ # This is a dummy response
+ return """
+ <DescribeLaunchConfigurationsResponse>
+ </DescribeLaunchConfigurationsResponse>
+ """
+
+ def test_run_instances_block_device_mapping(self):
+ # Same as the test in ``unit/ec2/autoscale/test_group.py:TestLaunchConfiguration``,
+ # but with modified request parameters (due to a mismatch between EC2 &
+ # Autoscaling).
+ self.set_http_response(status_code=200)
+ dev_sdf = BlockDeviceType(snapshot_id='snap-12345')
+ dev_sdg = BlockDeviceType(snapshot_id='snap-12346')
+
+ bdm = BlockDeviceMapping()
+ bdm['/dev/sdf'] = dev_sdf
+ bdm['/dev/sdg'] = dev_sdg
+
+ response = self.service_connection.run_instances(
+ image_id='123456',
+ instance_type='m1.large',
+ security_groups=['group1', 'group2'],
+ block_device_map=bdm
+ )
+
+ self.assert_request_parameters({
+ 'Action': 'RunInstances',
+ 'BlockDeviceMapping.1.DeviceName': '/dev/sdf',
+ 'BlockDeviceMapping.1.Ebs.DeleteOnTermination': 'false',
+ 'BlockDeviceMapping.1.Ebs.SnapshotId': 'snap-12345',
+ 'BlockDeviceMapping.2.DeviceName': '/dev/sdg',
+ 'BlockDeviceMapping.2.Ebs.DeleteOnTermination': 'false',
+ 'BlockDeviceMapping.2.Ebs.SnapshotId': 'snap-12346',
+ 'ImageId': '123456',
+ 'InstanceType': 'm1.large',
+ 'MaxCount': 1,
+ 'MinCount': 1,
+ 'SecurityGroup.1': 'group1',
+ 'SecurityGroup.2': 'group2',
+ }, ignore_params_values=[
+ 'Version', 'AWSAccessKeyId', 'SignatureMethod', 'SignatureVersion',
+ 'Timestamp'
+ ])
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/tests/unit/ec2/test_connection.py b/tests/unit/ec2/test_connection.py
index eab36160..05c22ce9 100644
--- a/tests/unit/ec2/test_connection.py
+++ b/tests/unit/ec2/test_connection.py
@@ -484,6 +484,47 @@ class TestCopySnapshot(TestEC2ConnectionBase):
'SignatureVersion', 'Timestamp',
'Version'])
+class TestCopyImage(TestEC2ConnectionBase):
+ def default_body(self):
+ return """
+ <CopyImageResponse xmlns="http://ec2.amazonaws.com/doc/2013-07-15/">
+ <requestId>request_id</requestId>
+ <imageId>ami-copied-id</imageId>
+ </CopyImageResponse>
+ """
+
+ def test_copy_image(self):
+ self.set_http_response(status_code=200)
+ copied_ami = self.ec2.copy_image('us-west-2', 'ami-id',
+ 'name', 'description', 'client-token')
+ self.assertEqual(copied_ami.image_id, 'ami-copied-id')
+
+ self.assert_request_parameters({
+ 'Action': 'CopyImage',
+ 'Description': 'description',
+ 'Name': 'name',
+ 'SourceRegion': 'us-west-2',
+ 'SourceImageId': 'ami-id',
+ 'ClientToken': 'client-token'},
+ ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'])
+ def test_copy_image_without_name(self):
+ self.set_http_response(status_code=200)
+ copied_ami = self.ec2.copy_image('us-west-2', 'ami-id',
+ description='description',
+ client_token='client-token')
+ self.assertEqual(copied_ami.image_id, 'ami-copied-id')
+
+ self.assert_request_parameters({
+ 'Action': 'CopyImage',
+ 'Description': 'description',
+ 'SourceRegion': 'us-west-2',
+ 'SourceImageId': 'ami-id',
+ 'ClientToken': 'client-token'},
+ ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
+ 'SignatureVersion', 'Timestamp',
+ 'Version'])
class TestAccountAttributes(TestEC2ConnectionBase):
def default_body(self):
@@ -630,6 +671,97 @@ class TestGetAllNetworkInterfaces(TestEC2ConnectionBase):
self.assertEqual(5, parsed[0].attachment.device_index)
+class TestGetAllImages(TestEC2ConnectionBase):
+ def default_body(self):
+ return """
+<DescribeImagesResponse xmlns="http://ec2.amazonaws.com/doc/2013-02-01/">
+ <requestId>e32375e8-4ac3-4099-a8bf-3ec902b9023e</requestId>
+ <imagesSet>
+ <item>
+ <imageId>ami-abcd1234</imageId>
+ <imageLocation>111111111111/windows2008r2-hvm-i386-20130702</imageLocation>
+ <imageState>available</imageState>
+ <imageOwnerId>111111111111</imageOwnerId>
+ <isPublic>false</isPublic>
+ <architecture>i386</architecture>
+ <imageType>machine</imageType>
+ <platform>windows</platform>
+ <viridianEnabled>true</viridianEnabled>
+ <name>Windows Test</name>
+ <description>Windows Test Description</description>
+ <billingProducts>
+ <item>
+ <billingProduct>bp-6ba54002</billingProduct>
+ </item>
+ </billingProducts>
+ <rootDeviceType>ebs</rootDeviceType>
+ <rootDeviceName>/dev/sda1</rootDeviceName>
+ <blockDeviceMapping>
+ <item>
+ <deviceName>/dev/sda1</deviceName>
+ <ebs>
+ <snapshotId>snap-abcd1234</snapshotId>
+ <volumeSize>30</volumeSize>
+ <deleteOnTermination>true</deleteOnTermination>
+ <volumeType>standard</volumeType>
+ </ebs>
+ </item>
+ <item>
+ <deviceName>xvdb</deviceName>
+ <virtualName>ephemeral0</virtualName>
+ </item>
+ <item>
+ <deviceName>xvdc</deviceName>
+ <virtualName>ephemeral1</virtualName>
+ </item>
+ <item>
+ <deviceName>xvdd</deviceName>
+ <virtualName>ephemeral2</virtualName>
+ </item>
+ <item>
+ <deviceName>xvde</deviceName>
+ <virtualName>ephemeral3</virtualName>
+ </item>
+ </blockDeviceMapping>
+ <virtualizationType>hvm</virtualizationType>
+ <hypervisor>xen</hypervisor>
+ </item>
+ </imagesSet>
+</DescribeImagesResponse>"""
+
+ def test_get_all_images(self):
+ self.set_http_response(status_code=200)
+ parsed = self.ec2.get_all_images()
+ self.assertEquals(1, len(parsed))
+ self.assertEquals("ami-abcd1234", parsed[0].id)
+ self.assertEquals("111111111111/windows2008r2-hvm-i386-20130702", parsed[0].location)
+ self.assertEquals("available", parsed[0].state)
+ self.assertEquals("111111111111", parsed[0].ownerId)
+ self.assertEquals("111111111111", parsed[0].owner_id)
+ self.assertEquals(False, parsed[0].is_public)
+ self.assertEquals("i386", parsed[0].architecture)
+ self.assertEquals("machine", parsed[0].type)
+ self.assertEquals(None, parsed[0].kernel_id)
+ self.assertEquals(None, parsed[0].ramdisk_id)
+ self.assertEquals(None, parsed[0].owner_alias)
+ self.assertEquals("windows", parsed[0].platform)
+ self.assertEquals("Windows Test", parsed[0].name)
+ self.assertEquals("Windows Test Description", parsed[0].description)
+ self.assertEquals("ebs", parsed[0].root_device_type)
+ self.assertEquals("/dev/sda1", parsed[0].root_device_name)
+ self.assertEquals("hvm", parsed[0].virtualization_type)
+ self.assertEquals("xen", parsed[0].hypervisor)
+ self.assertEquals(None, parsed[0].instance_lifecycle)
+
+ # 1 billing product parsed into a list
+ self.assertEquals(1, len(parsed[0].billing_products))
+ self.assertEquals("bp-6ba54002", parsed[0].billing_products[0])
+
+ # Just verify length, there is already a block_device_mapping test
+ self.assertEquals(5, len(parsed[0].block_device_mapping))
+
+ # TODO: No tests for product codes?
+
class TestModifyInterfaceAttribute(TestEC2ConnectionBase):
def default_body(self):
diff --git a/tests/unit/ec2/test_securitygroup.py b/tests/unit/ec2/test_securitygroup.py
index 2876ffff..c2cd5bca 100644
--- a/tests/unit/ec2/test_securitygroup.py
+++ b/tests/unit/ec2/test_securitygroup.py
@@ -6,6 +6,8 @@ from tests.unit import AWSMockServiceTestCase
import mock
from boto.ec2.connection import EC2Connection
+from boto.ec2.securitygroup import SecurityGroup
+
DESCRIBE_SECURITY_GROUP = r"""<?xml version="1.0" encoding="UTF-8"?>
<DescribeSecurityGroupsResponse xmlns="http://ec2.amazonaws.com/doc/2013-06-15/">
@@ -182,3 +184,23 @@ class TestDescribeSecurityGroups(AWSMockServiceTestCase):
self.assertEqual(1, len(instances))
self.assertEqual(groups[0].id, instances[0].groups[0].id)
+
+
+class SecurityGroupTest(unittest.TestCase):
+ def test_add_rule(self):
+ sg = SecurityGroup()
+ self.assertEqual(len(sg.rules), 0)
+
+ # Regression: ``dry_run`` was being passed (but unhandled) before.
+ sg.add_rule(
+ ip_protocol='http',
+ from_port='80',
+ to_port='8080',
+ src_group_name='groupy',
+ src_group_owner_id='12345',
+ cidr_ip='10.0.0.1',
+ src_group_group_id='54321',
+ dry_run=False
+ )
+ self.assertEqual(len(sg.rules), 1)
+
diff --git a/tests/unit/ec2/test_volume.py b/tests/unit/ec2/test_volume.py
index fd2a4553..14f0bcb6 100644
--- a/tests/unit/ec2/test_volume.py
+++ b/tests/unit/ec2/test_volume.py
@@ -38,7 +38,12 @@ class VolumeTests(unittest.TestCase):
def test_startElement_calls_TaggedEC2Object_startElement_with_correct_args(self, startElement):
volume = Volume()
volume.startElement("some name", "some attrs", None)
- startElement.assert_called_with(volume, "some name", "some attrs", None)
+ startElement.assert_called_with(
+ volume,
+ "some name",
+ "some attrs",
+ None
+ )
@mock.patch("boto.ec2.volume.TaggedEC2Object.startElement")
def test_startElement_retval_not_None_returns_correct_thing(self, startElement):
@@ -120,43 +125,57 @@ class VolumeTests(unittest.TestCase):
def test_delete_calls_delete_volume(self):
self.volume_one.connection = mock.Mock()
self.volume_one.delete()
- self.volume_one.connection.delete_volume.assert_called_with(1)
+ self.volume_one.connection.delete_volume.assert_called_with(
+ 1,
+ dry_run=False
+ )
def test_attach_calls_attach_volume(self):
self.volume_one.connection = mock.Mock()
self.volume_one.attach("instance_id", "/dev/null")
- self.volume_one.connection.attach_volume.assert_called_with(1, "instance_id", "/dev/null")
+ self.volume_one.connection.attach_volume.assert_called_with(
+ 1,
+ "instance_id",
+ "/dev/null",
+ dry_run=False
+ )
def test_detach_calls_detach_volume(self):
self.volume_one.connection = mock.Mock()
self.volume_one.detach()
self.volume_one.connection.detach_volume.assert_called_with(
- 1, 2, "/dev/null", False)
+ 1, 2, "/dev/null", False, dry_run=False)
def test_detach_with_no_attach_data(self):
self.volume_two.connection = mock.Mock()
self.volume_two.detach()
self.volume_two.connection.detach_volume.assert_called_with(
- 1, None, None, False)
+ 1, None, None, False, dry_run=False)
def test_detach_with_force_calls_detach_volume_with_force(self):
self.volume_one.connection = mock.Mock()
self.volume_one.detach(True)
self.volume_one.connection.detach_volume.assert_called_with(
- 1, 2, "/dev/null", True)
+ 1, 2, "/dev/null", True, dry_run=False)
def test_create_snapshot_calls_connection_create_snapshot(self):
self.volume_one.connection = mock.Mock()
self.volume_one.create_snapshot()
self.volume_one.connection.create_snapshot.assert_called_with(
- 1, None)
+ 1,
+ None,
+ dry_run=False
+ )
def test_create_snapshot_with_description(self):
self.volume_one.connection = mock.Mock()
self.volume_one.create_snapshot("some description")
self.volume_one.connection.create_snapshot.assert_called_with(
- 1, "some description")
+ 1,
+ "some description",
+ dry_run=False
+ )
def test_volume_state_returns_status(self):
retval = self.volume_one.volume_state()
@@ -186,7 +205,7 @@ class VolumeTests(unittest.TestCase):
self.volume_one.connection.get_all_snapshots.return_value = []
self.volume_one.snapshots("owner", "restorable_by")
self.volume_one.connection.get_all_snapshots.assert_called_with(
- owner="owner", restorable_by="restorable_by")
+ owner="owner", restorable_by="restorable_by", dry_run=False)
class AttachmentSetTests(unittest.TestCase):
def check_that_attribute_has_been_set(self, name, value, attribute):
diff --git a/tests/unit/swf/__init__.py b/tests/unit/swf/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unit/swf/__init__.py
diff --git a/tests/unit/swf/test_layer2_actors.py b/tests/unit/swf/test_layer2_actors.py
new file mode 100644
index 00000000..c783f62d
--- /dev/null
+++ b/tests/unit/swf/test_layer2_actors.py
@@ -0,0 +1,73 @@
+import boto.swf.layer2
+from boto.swf.layer2 import Decider, ActivityWorker
+from tests.unit import unittest
+from mock import Mock
+
+
+class TestActors(unittest.TestCase):
+
+ def setUp(self):
+ boto.swf.layer2.Layer1 = Mock()
+ self.worker = ActivityWorker(name='test-worker', domain='test', task_list='test_list')
+ self.decider = Decider(name='test-worker', domain='test', task_list='test_list')
+ self.worker._swf = Mock()
+ self.decider._swf = Mock()
+
+ def test_decider_pass_tasktoken(self):
+ self.decider._swf.poll_for_decision_task.return_value = {
+ 'events': [{'eventId': 1,
+ 'eventTimestamp': 1379019427.953,
+ 'eventType': 'WorkflowExecutionStarted',
+ 'workflowExecutionStartedEventAttributes': {
+ 'childPolicy': 'TERMINATE',
+ 'executionStartToCloseTimeout': '3600',
+ 'parentInitiatedEventId': 0,
+ 'taskList': {'name': 'test_list'},
+ 'taskStartToCloseTimeout': '123',
+ 'workflowType': {'name': 'test_workflow_name',
+ 'version': 'v1'}}},
+ {'decisionTaskScheduledEventAttributes':
+ {'startToCloseTimeout': '123',
+ 'taskList': {'name': 'test_list'}},
+ 'eventId': 2,
+ 'eventTimestamp': 1379019427.953,
+ 'eventType': 'DecisionTaskScheduled'},
+ {'decisionTaskStartedEventAttributes': {'scheduledEventId': 2},
+ 'eventId': 3, 'eventTimestamp': 1379019495.585,
+ 'eventType': 'DecisionTaskStarted'}],
+ 'previousStartedEventId': 0, 'startedEventId': 3,
+ 'taskToken': 'my_specific_task_token',
+ 'workflowExecution': {'runId': 'fwr243dsa324132jmflkfu0943tr09=',
+ 'workflowId': 'test_workflow_name-v1-1379019427'},
+ 'workflowType': {'name': 'test_workflow_name', 'version': 'v1'}}
+
+ self.decider.poll()
+ self.decider.complete()
+
+ self.decider._swf.respond_decision_task_completed.assert_called_with('my_specific_task_token', None)
+ self.assertEqual('my_specific_task_token', self.decider.last_tasktoken)
+
+ def test_worker_pass_tasktoken(self):
+ task_token = 'worker_task_token'
+ self.worker._swf.poll_for_activity_task.return_value = {
+ 'activityId': 'SomeActivity-1379020713',
+ 'activityType': {'name': 'SomeActivity', 'version': '1.0'},
+ 'startedEventId': 6,
+ 'taskToken': task_token,
+ 'workflowExecution': {'runId': '12T026NzGK5c4eMti06N9O3GHFuTDaNyA+8LFtoDkAwfE=',
+ 'workflowId': 'MyWorkflow-1.0-1379020705'}}
+
+ self.worker.poll()
+
+ self.worker.cancel(details='Cancelling!')
+ self.worker.complete(result='Done!')
+ self.worker.fail(reason='Failure!')
+ self.worker.heartbeat()
+
+ self.worker._swf.respond_activity_task_canceled.assert_called_with(task_token, 'Cancelling!')
+ self.worker._swf.respond_activity_task_completed.assert_called_with(task_token, 'Done!')
+ self.worker._swf.respond_activity_task_failed.assert_called_with(task_token, None, 'Failure!')
+ self.worker._swf.record_activity_task_heartbeat.assert_called_with(task_token, None)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/unit/swf/test_layer2_domain.py b/tests/unit/swf/test_layer2_domain.py
new file mode 100644
index 00000000..b56cb4b1
--- /dev/null
+++ b/tests/unit/swf/test_layer2_domain.py
@@ -0,0 +1,112 @@
+import boto.swf.layer2
+from boto.swf.layer2 import Domain, ActivityType, WorkflowType, WorkflowExecution
+from tests.unit import unittest
+from mock import Mock
+
+
+class TestDomain(unittest.TestCase):
+
+ def setUp(self):
+ boto.swf.layer2.Layer1 = Mock()
+ self.domain = Domain(name='test-domain', description='My test domain')
+ self.domain.aws_access_key_id = 'inheritable access key'
+ self.domain.aws_secret_access_key = 'inheritable secret key'
+
+ def test_domain_instantiation(self):
+ self.assertEquals('test-domain', self.domain.name)
+ self.assertEquals('My test domain', self.domain.description)
+
+ def test_domain_list_activities(self):
+ self.domain._swf.list_activity_types.return_value = {
+ 'typeInfos': [{'activityType': {'name': 'DeleteLocalFile',
+ 'version': '1.0'},
+ 'creationDate': 1332853651.235,
+ 'status': 'REGISTERED'},
+ {'activityType': {'name': 'DoUpdate', 'version': 'test'},
+ 'creationDate': 1333463734.528,
+ 'status': 'REGISTERED'},
+ {'activityType': {'name': 'GrayscaleTransform',
+ 'version': '1.0'},
+ 'creationDate': 1332853651.18,
+ 'status': 'REGISTERED'},
+ {'activityType': {'name': 'S3Download', 'version': '1.0'},
+ 'creationDate': 1332853651.264,
+ 'status': 'REGISTERED'},
+ {'activityType': {'name': 'S3Upload', 'version': '1.0'},
+ 'creationDate': 1332853651.314,
+ 'status': 'REGISTERED'},
+ {'activityType': {'name': 'SepiaTransform', 'version': '1.1'},
+ 'creationDate': 1333373797.734,
+ 'status': 'REGISTERED'}]}
+
+ expected_names = ('DeleteLocalFile', 'GrayscaleTransform', 'S3Download',
+ 'S3Upload', 'SepiaTransform', 'DoUpdate')
+
+ activity_types = self.domain.activities()
+ self.assertEquals(6, len(activity_types))
+ for activity_type in activity_types:
+ self.assertIsInstance(activity_type, ActivityType)
+ self.assertTrue(activity_type.name in expected_names)
+
+ def test_domain_list_workflows(self):
+ self.domain._swf.list_workflow_types.return_value = {
+ 'typeInfos': [{'creationDate': 1332853651.136,
+ 'description': 'Image processing sample workflow type',
+ 'status': 'REGISTERED',
+ 'workflowType': {'name': 'ProcessFile', 'version': '1.0'}},
+ {'creationDate': 1333551719.89,
+ 'status': 'REGISTERED',
+ 'workflowType': {'name': 'test_workflow_name',
+ 'version': 'v1'}}]}
+ expected_names = ('ProcessFile', 'test_workflow_name')
+
+ workflow_types = self.domain.workflows()
+ self.assertEquals(2, len(workflow_types))
+ for workflow_type in workflow_types:
+ self.assertIsInstance(workflow_type, WorkflowType)
+ self.assertTrue(workflow_type.name in expected_names)
+ self.assertEquals(self.domain.aws_access_key_id, workflow_type.aws_access_key_id)
+ self.assertEquals(self.domain.aws_secret_access_key, workflow_type.aws_secret_access_key)
+ self.assertEquals(self.domain.name, workflow_type.domain)
+
+ def test_domain_list_executions(self):
+ self.domain._swf.list_open_workflow_executions.return_value = {
+ 'executionInfos': [{'cancelRequested': False,
+ 'execution': {'runId': '12OeDTyoD27TDaafViz/QIlCHrYzspZmDgj0coIfjm868=',
+ 'workflowId': 'ProcessFile-1.0-1378933928'},
+ 'executionStatus': 'OPEN',
+ 'startTimestamp': 1378933928.676,
+ 'workflowType': {'name': 'ProcessFile',
+ 'version': '1.0'}},
+ {'cancelRequested': False,
+ 'execution': {'runId': '12GwBkx4hH6t2yaIh8LYxy5HyCM6HcyhDKePJCg0/ciJk=',
+ 'workflowId': 'ProcessFile-1.0-1378933927'},
+ 'executionStatus': 'OPEN',
+ 'startTimestamp': 1378933927.919,
+ 'workflowType': {'name': 'ProcessFile',
+ 'version': '1.0'}},
+ {'cancelRequested': False,
+ 'execution': {'runId': '12oRG3vEWrQ7oYBV+Bqi33Fht+ZRCYTt+tOdn5kLVcwKI=',
+ 'workflowId': 'ProcessFile-1.0-1378933926'},
+ 'executionStatus': 'OPEN',
+ 'startTimestamp': 1378933927.04,
+ 'workflowType': {'name': 'ProcessFile',
+ 'version': '1.0'}},
+ {'cancelRequested': False,
+ 'execution': {'runId': '12qrdcpYmad2cjnqJcM4Njm3qrCGvmRFR1wwQEt+a2ako=',
+ 'workflowId': 'ProcessFile-1.0-1378933874'},
+ 'executionStatus': 'OPEN',
+ 'startTimestamp': 1378933874.956,
+ 'workflowType': {'name': 'ProcessFile',
+ 'version': '1.0'}}]}
+
+ executions = self.domain.executions()
+ self.assertEquals(4, len(executions))
+ for wf_execution in executions:
+ self.assertIsInstance(wf_execution, WorkflowExecution)
+ self.assertEquals(self.domain.aws_access_key_id, wf_execution.aws_access_key_id)
+ self.assertEquals(self.domain.aws_secret_access_key, wf_execution.aws_secret_access_key)
+ self.assertEquals(self.domain.name, wf_execution.domain)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/unit/swf/test_layer2_types.py b/tests/unit/swf/test_layer2_types.py
new file mode 100644
index 00000000..d9b7db0d
--- /dev/null
+++ b/tests/unit/swf/test_layer2_types.py
@@ -0,0 +1,46 @@
+import boto.swf.layer2
+from boto.swf.layer2 import ActivityType, WorkflowType, WorkflowExecution
+from tests.unit import unittest
+from mock import Mock, ANY
+
+
+class TestTypes(unittest.TestCase):
+
+ def setUp(self):
+ boto.swf.layer2.Layer1 = Mock()
+
+ def test_workflow_type_register_defaults(self):
+ wf_type = WorkflowType(name='name', domain='test', version='1')
+ wf_type.register()
+
+ wf_type._swf.register_workflow_type.assert_called_with('test', 'name', '1',
+ default_execution_start_to_close_timeout=ANY,
+ default_task_start_to_close_timeout=ANY,
+ default_child_policy=ANY
+ )
+
+ def test_activity_type_register_defaults(self):
+ act_type = ActivityType(name='name', domain='test', version='1')
+ act_type.register()
+
+ act_type._swf.register_activity_type.assert_called_with('test', 'name', '1',
+ default_task_heartbeat_timeout=ANY,
+ default_task_schedule_to_close_timeout=ANY,
+ default_task_schedule_to_start_timeout=ANY,
+ default_task_start_to_close_timeout=ANY
+ )
+
+ def test_workflow_type_start_execution(self):
+ wf_type = WorkflowType(name='name', domain='test', version='1')
+ run_id = '122aJcg6ic7MRAkjDRzLBsqU/R49qt5D0LPHycT/6ArN4='
+ wf_type._swf.start_workflow_execution.return_value = {'runId': run_id}
+
+ execution = wf_type.start(task_list='hello_world')
+
+ self.assertIsInstance(execution, WorkflowExecution)
+ self.assertEquals(wf_type.name, execution.name)
+ self.assertEquals(wf_type.version, execution.version)
+ self.assertEquals(run_id, execution.runId)
+
+if __name__ == '__main__':
+ unittest.main()