summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJames Saryerwinnie <jls.npi@gmail.com>2012-06-15 10:05:20 -0700
committerJames Saryerwinnie <jls.npi@gmail.com>2012-06-15 11:19:52 -0700
commit036bc2b399a85fd98f0745dd1f7d50da45287ef4 (patch)
tree4663c9df4f199a13d61447a6d3081fbea12a416e
parent87f69f9bde3d44952e171abb6234ab4930db5d6b (diff)
downloadboto-036bc2b399a85fd98f0745dd1f7d50da45287ef4.tar.gz
Tag the existing integration tests with service names
I've added the following service name tags that should be compatible with the previous '-t' arg: * s3 * ssl * s3mfa * gs * sqs * ec2 * autoscale * sdb * dynamodb * sts * swf
-rw-r--r--tests/integration/autoscale/test_connection.py11
-rw-r--r--tests/integration/dynamodb/test_layer1.py1
-rw-r--r--tests/integration/dynamodb/test_layer2.py1
-rw-r--r--tests/integration/ec2/cloudwatch/test_connection.py1
-rw-r--r--tests/integration/ec2/elb/test_connection.py1
-rw-r--r--tests/integration/ec2/test_connection.py10
-rw-r--r--tests/integration/s3/test_bucket.py1
-rw-r--r--tests/integration/s3/test_connection.py2
-rw-r--r--tests/integration/s3/test_encryption.py1
-rw-r--r--tests/integration/s3/test_gsconnection.py3
-rw-r--r--tests/integration/s3/test_https_cert_validation.py5
-rw-r--r--tests/integration/s3/test_key.py1
-rw-r--r--tests/integration/s3/test_multidelete.py3
-rw-r--r--tests/integration/s3/test_multipart.py4
-rw-r--r--[-rwxr-xr-x]tests/integration/s3/test_resumable_downloads.py1
-rw-r--r--[-rwxr-xr-x]tests/integration/s3/test_resumable_uploads.py3
-rw-r--r--tests/integration/sdb/test_connection.py1
-rw-r--r--tests/integration/sts/test_session_token.py6
-rw-r--r--tests/integration/swf/test_layer1.py1
-rw-r--r--tests/integration/swf/test_layer1_workflow_execution.py22
20 files changed, 52 insertions, 27 deletions
diff --git a/tests/integration/autoscale/test_connection.py b/tests/integration/autoscale/test_connection.py
index a650dcef..cf8d99ac 100644
--- a/tests/integration/autoscale/test_connection.py
+++ b/tests/integration/autoscale/test_connection.py
@@ -35,7 +35,10 @@ from boto.ec2.autoscale.scheduled import ScheduledUpdateGroupAction
from boto.ec2.autoscale.instance import Instance
from boto.ec2.autoscale.tag import Tag
+
class AutoscaleConnectionTest(unittest.TestCase):
+ ec2 = True
+ autoscale = True
def test_basic(self):
# NB: as it says on the tin these are really basic tests that only
@@ -146,10 +149,10 @@ class AutoscaleConnectionTest(unittest.TestCase):
if group.name == group_name:
if not group.instances:
instances = False
-
+
group.delete()
lc.delete()
-
+
found = True
while found:
found = False
@@ -158,9 +161,7 @@ class AutoscaleConnectionTest(unittest.TestCase):
for tag in tags:
if tag.resource_id == group_name and tag.key == 'foo':
found = True
-
+
assert not found
print '--- tests completed ---'
-
-
diff --git a/tests/integration/dynamodb/test_layer1.py b/tests/integration/dynamodb/test_layer1.py
index b7227fc9..8149ef55 100644
--- a/tests/integration/dynamodb/test_layer1.py
+++ b/tests/integration/dynamodb/test_layer1.py
@@ -35,6 +35,7 @@ from boto.sts.credentials import Credentials
json_doc = """{"access_key": "ASIAIV7R2NUUJ6SB7GKQ", "secret_key": "eIfijGxJlejHDSQiaGr6b7U805U0GKWmllCTt2ZM", "request_id": "28c17897-4555-11e1-8bb1-2529f165f2f0", "expiration": "2012-01-23T00:59:45.617Z", "session_token": "AQoDYXdzEPn//////////wEasAGDXeGY8bx36NLRSA1v3dy2x00k3FNA2KVsMEXkQuKY08gPTtYs2tefZTBsTjgjC+O6j8ieoB1on2bPyCq872+Yq3cipls8jna+PNSEcsXtC8CJBKai/FfYNg1XUHam6EUCtRiUHvqztOVgaGqUUS1UbrBKB7kKSXzgKrJ9AT0bvqi4hZS0ayaU8969f2HIbN9psXhRBKpJyB9FUPuVYpYYZsz9NY3y2kGtK+dgfrKvxyDxxfL4BA=="}"""
class DynamoDBLayer1Test (unittest.TestCase):
+ dynamodb = True
def test_layer1_basic(self):
print '--- running DynamoDB Layer1 tests ---'
diff --git a/tests/integration/dynamodb/test_layer2.py b/tests/integration/dynamodb/test_layer2.py
index 2e9c8fa0..afaf97c2 100644
--- a/tests/integration/dynamodb/test_layer2.py
+++ b/tests/integration/dynamodb/test_layer2.py
@@ -34,6 +34,7 @@ from boto.dynamodb.types import get_dynamodb_type
from boto.dynamodb.condition import *
class DynamoDBLayer2Test (unittest.TestCase):
+ dynamodb = True
def test_layer2_basic(self):
print '--- running Amazon DynamoDB Layer2 tests ---'
diff --git a/tests/integration/ec2/cloudwatch/test_connection.py b/tests/integration/ec2/cloudwatch/test_connection.py
index c6883da1..922c17b1 100644
--- a/tests/integration/ec2/cloudwatch/test_connection.py
+++ b/tests/integration/ec2/cloudwatch/test_connection.py
@@ -100,6 +100,7 @@ DESCRIBE_ALARMS_BODY = """<DescribeAlarmsResponse xmlns="http://monitoring.amazo
class CloudWatchConnectionTest(unittest.TestCase):
+ ec2 = True
def test_build_list_params(self):
c = CloudWatchConnection()
diff --git a/tests/integration/ec2/elb/test_connection.py b/tests/integration/ec2/elb/test_connection.py
index 492766fb..2d574d98 100644
--- a/tests/integration/ec2/elb/test_connection.py
+++ b/tests/integration/ec2/elb/test_connection.py
@@ -28,6 +28,7 @@ import unittest
from boto.ec2.elb import ELBConnection
class ELBConnectionTest(unittest.TestCase):
+ ec2 = True
def tearDown(self):
""" Deletes all load balancers after every test. """
diff --git a/tests/integration/ec2/test_connection.py b/tests/integration/ec2/test_connection.py
index bb1c5c89..e80c63d3 100644
--- a/tests/integration/ec2/test_connection.py
+++ b/tests/integration/ec2/test_connection.py
@@ -27,13 +27,17 @@ Some unit tests for the EC2Connection
import unittest
import time
-from boto.ec2.connection import EC2Connection
import telnetlib
import socket
+from nose.plugins.attrib import attr
+from boto.ec2.connection import EC2Connection
+
class EC2ConnectionTest (unittest.TestCase):
+ ec2 = True
- def test_1_basic(self):
+ @attr('notdefault')
+ def test_launch_permissions(self):
# this is my user_id, if you want to run these tests you should
# replace this with yours or they won't work
user_id = '963068290131'
@@ -57,7 +61,9 @@ class EC2ConnectionTest (unittest.TestCase):
d = image.get_launch_permissions()
assert 'groups' not in d
+ def test_1_basic(self):
# create 2 new security groups
+ c = EC2Connection()
group1_name = 'test-%d' % int(time.time())
group_desc = 'This is a security group created during unit testing'
group1 = c.create_security_group(group1_name, group_desc)
diff --git a/tests/integration/s3/test_bucket.py b/tests/integration/s3/test_bucket.py
index 58045efa..de3ba198 100644
--- a/tests/integration/s3/test_bucket.py
+++ b/tests/integration/s3/test_bucket.py
@@ -34,6 +34,7 @@ from boto.s3.bucketlogging import BucketLogging
from boto.s3.acl import Grant
class S3BucketTest (unittest.TestCase):
+ s3 = True
def setUp(self):
self.conn = S3Connection()
diff --git a/tests/integration/s3/test_connection.py b/tests/integration/s3/test_connection.py
index c2d0a50b..104d6063 100644
--- a/tests/integration/s3/test_connection.py
+++ b/tests/integration/s3/test_connection.py
@@ -34,7 +34,9 @@ from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3PermissionsError, S3ResponseError
+
class S3ConnectionTest (unittest.TestCase):
+ s3 = True
def test_1_basic(self):
print '--- running S3Connection tests ---'
diff --git a/tests/integration/s3/test_encryption.py b/tests/integration/s3/test_encryption.py
index 91ef71c0..c5b1bc67 100644
--- a/tests/integration/s3/test_encryption.py
+++ b/tests/integration/s3/test_encryption.py
@@ -51,6 +51,7 @@ json_policy = """{
}"""
class S3EncryptionTest (unittest.TestCase):
+ s3 = True
def test_1_versions(self):
print '--- running S3Encryption tests ---'
diff --git a/tests/integration/s3/test_gsconnection.py b/tests/integration/s3/test_gsconnection.py
index f50d66e9..332864c4 100644
--- a/tests/integration/s3/test_gsconnection.py
+++ b/tests/integration/s3/test_gsconnection.py
@@ -60,7 +60,8 @@ def has_google_credentials():
"Google credentials are required to run the Google "
"Cloud Storage tests. Update your boto.cfg to run "
"these tests.")
-class GSConnectionTest (unittest.TestCase):
+class GSConnectionTest(unittest.TestCase):
+ gs = True
def test_1_basic(self):
"""basic regression test for Google Cloud Storage"""
diff --git a/tests/integration/s3/test_https_cert_validation.py b/tests/integration/s3/test_https_cert_validation.py
index c42a2ef3..9222a4a7 100644
--- a/tests/integration/s3/test_https_cert_validation.py
+++ b/tests/integration/s3/test_https_cert_validation.py
@@ -63,9 +63,8 @@ PROXY_PORT = os.environ.get('PROXY_PORT', '3128')
INVALID_HOSTNAME_HOST = os.environ.get('INVALID_HOSTNAME_HOST', 'www')
-@attr('notdefault')
-class CertValidationTest (unittest.TestCase):
-
+@attr('notdefault', 'ssl')
+class CertValidationTest(unittest.TestCase):
def setUp(self):
# Clear config
for section in boto.config.sections():
diff --git a/tests/integration/s3/test_key.py b/tests/integration/s3/test_key.py
index 02e43475..6aecb22e 100644
--- a/tests/integration/s3/test_key.py
+++ b/tests/integration/s3/test_key.py
@@ -34,6 +34,7 @@ from boto.s3.key import Key
from boto.exception import S3ResponseError
class S3KeyTest (unittest.TestCase):
+ s3 = True
def setUp(self):
self.conn = S3Connection()
diff --git a/tests/integration/s3/test_multidelete.py b/tests/integration/s3/test_multidelete.py
index 5f8f2cf3..b22581bb 100644
--- a/tests/integration/s3/test_multidelete.py
+++ b/tests/integration/s3/test_multidelete.py
@@ -34,7 +34,8 @@ from boto.s3.prefix import Prefix
from boto.s3.connection import S3Connection
from boto.exception import S3ResponseError
-class S3MultiDeleteTest (unittest.TestCase):
+class S3MultiDeleteTest(unittest.TestCase):
+ s3 = True
def setUp(self):
self.conn = S3Connection()
diff --git a/tests/integration/s3/test_multipart.py b/tests/integration/s3/test_multipart.py
index 8e93a6d8..51d34a51 100644
--- a/tests/integration/s3/test_multipart.py
+++ b/tests/integration/s3/test_multipart.py
@@ -37,7 +37,9 @@ import time
import StringIO
from boto.s3.connection import S3Connection
-class S3MultiPartUploadTest (unittest.TestCase):
+
+class S3MultiPartUploadTest(unittest.TestCase):
+ s3 = True
def setUp(self):
self.conn = S3Connection(is_secure=False)
diff --git a/tests/integration/s3/test_resumable_downloads.py b/tests/integration/s3/test_resumable_downloads.py
index b813d1cb..414acf9f 100755..100644
--- a/tests/integration/s3/test_resumable_downloads.py
+++ b/tests/integration/s3/test_resumable_downloads.py
@@ -61,6 +61,7 @@ class ResumableDownloadTests(unittest.TestCase):
"""
Resumable download test suite.
"""
+ gs = True
def get_suite_description(self):
return 'Resumable download test suite'
diff --git a/tests/integration/s3/test_resumable_uploads.py b/tests/integration/s3/test_resumable_uploads.py
index 714dda32..c9aa5716 100755..100644
--- a/tests/integration/s3/test_resumable_uploads.py
+++ b/tests/integration/s3/test_resumable_uploads.py
@@ -61,6 +61,7 @@ class ResumableUploadTests(unittest.TestCase):
"""
Resumable upload test suite.
"""
+ gs = True
def get_suite_description(self):
return 'Resumable upload test suite'
@@ -448,7 +449,7 @@ class ResumableUploadTests(unittest.TestCase):
# This abort should be a hard abort (file size changing during
# transfer).
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
- self.assertNotEqual(e.message.find('file size changed'), -1, e.message)
+ self.assertNotEqual(e.message.find('file size changed'), -1, e.message)
def test_upload_with_file_size_change_during_upload(self):
"""
diff --git a/tests/integration/sdb/test_connection.py b/tests/integration/sdb/test_connection.py
index a834a9df..72a26cf8 100644
--- a/tests/integration/sdb/test_connection.py
+++ b/tests/integration/sdb/test_connection.py
@@ -31,6 +31,7 @@ from boto.sdb.connection import SDBConnection
from boto.exception import SDBResponseError
class SDBConnectionTest (unittest.TestCase):
+ sdb = True
def test_1_basic(self):
print '--- running SDBConnection tests ---'
diff --git a/tests/integration/sts/test_session_token.py b/tests/integration/sts/test_session_token.py
index 630ebf1c..fa33d5f0 100644
--- a/tests/integration/sts/test_session_token.py
+++ b/tests/integration/sts/test_session_token.py
@@ -15,7 +15,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
@@ -32,6 +32,7 @@ from boto.sts.credentials import Credentials
from boto.s3.connection import S3Connection
class SessionTokenTest (unittest.TestCase):
+ sts = True
def test_session_token(self):
print '--- running Session Token tests ---'
@@ -54,7 +55,7 @@ class SessionTokenTest (unittest.TestCase):
os.unlink('token.json')
assert not token.is_expired()
-
+
# Try using the session token with S3
s3 = S3Connection(aws_access_key_id=token.access_key,
aws_secret_access_key=token.secret_key,
@@ -62,4 +63,3 @@ class SessionTokenTest (unittest.TestCase):
buckets = s3.get_all_buckets()
print '--- tests completed ---'
-
diff --git a/tests/integration/swf/test_layer1.py b/tests/integration/swf/test_layer1.py
index 49f31856..02ad0519 100644
--- a/tests/integration/swf/test_layer1.py
+++ b/tests/integration/swf/test_layer1.py
@@ -57,6 +57,7 @@ class SimpleWorkflowLayer1TestBase(unittest.TestCase):
* SimpleWorkflowLayer1Test
* tests.swf.test_layer1_workflow_execution.SwfL1WorkflowExecutionTest
"""
+ swf = True
# Some params used throughout the tests...
# Domain registration params...
_domain = BOTO_SWF_UNITTEST_DOMAIN
diff --git a/tests/integration/swf/test_layer1_workflow_execution.py b/tests/integration/swf/test_layer1_workflow_execution.py
index 38f95887..6f59a7ad 100644
--- a/tests/integration/swf/test_layer1_workflow_execution.py
+++ b/tests/integration/swf/test_layer1_workflow_execution.py
@@ -17,14 +17,16 @@ class SwfL1WorkflowExecutionTest(SimpleWorkflowLayer1TestBase):
"""
test a simple workflow execution
"""
+ swf = True
+
def run_decider(self):
"""
run one iteration of a simple decision engine
"""
# Poll for a decision task.
- tries = 0
+ tries = 0
while True:
- dtask = self.conn.poll_for_decision_task(self._domain,
+ dtask = self.conn.poll_for_decision_task(self._domain,
self._task_list, reverse_order=True)
if dtask.get('taskToken') is not None:
# This means a real decision task has arrived.
@@ -52,8 +54,8 @@ class SwfL1WorkflowExecutionTest(SimpleWorkflowLayer1TestBase):
decisions = Layer1Decisions()
if event['eventType'] == 'WorkflowExecutionStarted':
activity_id = str(uuid.uuid1())
- decisions.schedule_activity_task(activity_id,
- self._activity_type_name, self._activity_type_version,
+ decisions.schedule_activity_task(activity_id,
+ self._activity_type_name, self._activity_type_version,
task_list=self._task_list,
input=event['workflowExecutionStartedEventAttributes']['input'])
elif event['eventType'] == 'ActivityTaskCompleted':
@@ -79,9 +81,9 @@ class SwfL1WorkflowExecutionTest(SimpleWorkflowLayer1TestBase):
run one iteration of a simple worker engine
"""
# Poll for an activity task.
- tries = 0
+ tries = 0
while True:
- atask = self.conn.poll_for_activity_task(self._domain,
+ atask = self.conn.poll_for_activity_task(self._domain,
self._task_list, identity='test worker')
if atask.get('activityId') is not None:
# This means a real activity task has arrived.
@@ -102,12 +104,12 @@ class SwfL1WorkflowExecutionTest(SimpleWorkflowLayer1TestBase):
if reason is None:
r = self.conn.respond_activity_task_completed(
atask['taskToken'], result)
- else:
+ else:
r = self.conn.respond_activity_task_failed(
atask['taskToken'], reason=reason, details=details)
assert r is None
-
+
def test_workflow_execution(self):
# Start a workflow execution whose activity task will succeed.
workflow_id = 'wfid-%.2f' % (time.time(),)
@@ -132,7 +134,7 @@ class SwfL1WorkflowExecutionTest(SimpleWorkflowLayer1TestBase):
self.run_decider()
# Check that the result was stored in the execution history.
- r = self.conn.get_workflow_execution_history(self._domain,
+ r = self.conn.get_workflow_execution_history(self._domain,
run_id, workflow_id,
reverse_order=True)['events'][0]
result = r['workflowExecutionCompletedEventAttributes']['result']
@@ -163,7 +165,7 @@ class SwfL1WorkflowExecutionTest(SimpleWorkflowLayer1TestBase):
self.run_decider()
# Check that the failure was stored in the execution history.
- r = self.conn.get_workflow_execution_history(self._domain,
+ r = self.conn.get_workflow_execution_history(self._domain,
run_id, workflow_id,
reverse_order=True)['events'][0]
reason = r['workflowExecutionFailedEventAttributes']['reason']