summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorDaniel G. Taylor <danielgtaylor@gmail.com>2014-08-04 15:42:30 -0700
committerDaniel G. Taylor <danielgtaylor@gmail.com>2014-08-04 15:42:30 -0700
commitff3d8159af3c816303785e023a4182aacb6aabf5 (patch)
tree3f9ccb173904eecef829d07f9acbddc6e4b444e5 /tests
parent60603b6514268a76d9f84a8f381fd645271492a8 (diff)
parent8aea0d3381d22d7dc0a58c488db6f5d429083c9c (diff)
downloadboto-ff3d8159af3c816303785e023a4182aacb6aabf5.tar.gz
Merge branch 'release-2.32.1'2.32.1
Conflicts: docs/source/index.rst
Diffstat (limited to 'tests')
-rw-r--r--tests/integration/dynamodb2/test_highlevel.py2
-rw-r--r--tests/integration/glacier/test_layer1.py2
-rw-r--r--tests/integration/s3/mock_storage_service.py6
-rw-r--r--tests/integration/ses/test_connection.py2
-rw-r--r--tests/integration/sns/test_connection.py2
-rw-r--r--tests/integration/sqs/test_bigmessage.py8
-rw-r--r--tests/unit/auth/test_sigv4.py14
-rw-r--r--tests/unit/cloudsearch/test_document.py16
-rw-r--r--tests/unit/dynamodb/test_types.py9
-rw-r--r--tests/unit/emr/test_connection.py95
-rw-r--r--tests/unit/glacier/test_job.py2
-rw-r--r--tests/unit/glacier/test_layer1.py2
-rw-r--r--tests/unit/glacier/test_layer2.py148
-rw-r--r--tests/unit/glacier/test_utils.py53
-rw-r--r--tests/unit/glacier/test_vault.py6
-rw-r--r--tests/unit/glacier/test_writer.py3
-rw-r--r--tests/unit/s3/test_key.py2
-rw-r--r--tests/unit/test_connection.py8
-rw-r--r--tests/unit/utils/test_utils.py16
-rw-r--r--tests/unit/vpc/test_vpc_peering_connection.py54
20 files changed, 337 insertions, 113 deletions
diff --git a/tests/integration/dynamodb2/test_highlevel.py b/tests/integration/dynamodb2/test_highlevel.py
index 90771422..0f893b14 100644
--- a/tests/integration/dynamodb2/test_highlevel.py
+++ b/tests/integration/dynamodb2/test_highlevel.py
@@ -23,8 +23,6 @@
"""
Tests for DynamoDB v2 high-level abstractions.
"""
-from __future__ import with_statement
-
import os
import time
diff --git a/tests/integration/glacier/test_layer1.py b/tests/integration/glacier/test_layer1.py
index effb5628..0d38da27 100644
--- a/tests/integration/glacier/test_layer1.py
+++ b/tests/integration/glacier/test_layer1.py
@@ -36,7 +36,7 @@ class TestGlacierLayer1(unittest.TestCase):
glacier = Layer1()
glacier.create_vault('l1testvault')
self.addCleanup(glacier.delete_vault, 'l1testvault')
- upload_id = glacier.initiate_multipart_upload('l1testvault', 4*1024*1024,
+ upload_id = glacier.initiate_multipart_upload('l1testvault', 4 * 1024 * 1024,
'double spaces here')['UploadId']
self.addCleanup(glacier.abort_multipart_upload, 'l1testvault', upload_id)
response = glacier.list_multipart_uploads('l1testvault')['UploadsList']
diff --git a/tests/integration/s3/mock_storage_service.py b/tests/integration/s3/mock_storage_service.py
index d7c59930..8b5ff28d 100644
--- a/tests/integration/s3/mock_storage_service.py
+++ b/tests/integration/s3/mock_storage_service.py
@@ -30,6 +30,7 @@ import copy
import boto
import base64
import re
+from hashlib import md5
from boto.utils import compute_md5
from boto.utils import find_matching_headers
@@ -37,11 +38,6 @@ from boto.utils import merge_headers_by_name
from boto.s3.prefix import Prefix
from boto.compat import six
-try:
- from hashlib import md5
-except ImportError:
- from md5 import md5
-
NOT_IMPL = None
diff --git a/tests/integration/ses/test_connection.py b/tests/integration/ses/test_connection.py
index f1d66e8c..83b99944 100644
--- a/tests/integration/ses/test_connection.py
+++ b/tests/integration/ses/test_connection.py
@@ -1,5 +1,3 @@
-from __future__ import with_statement
-
from tests.unit import unittest
from boto.ses.connection import SESConnection
diff --git a/tests/integration/sns/test_connection.py b/tests/integration/sns/test_connection.py
index e5af487e..6a359b1b 100644
--- a/tests/integration/sns/test_connection.py
+++ b/tests/integration/sns/test_connection.py
@@ -19,8 +19,6 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-from __future__ import with_statement
-
from tests.compat import mock, unittest
from boto.compat import http_client
from boto.sns import connect_to_region
diff --git a/tests/integration/sqs/test_bigmessage.py b/tests/integration/sqs/test_bigmessage.py
index ce1e983d..bb52dde1 100644
--- a/tests/integration/sqs/test_bigmessage.py
+++ b/tests/integration/sqs/test_bigmessage.py
@@ -24,8 +24,6 @@
"""
Some unit tests for the SQSConnection
"""
-from __future__ import with_statement
-
import time
from threading import Timer
from tests.unit import unittest
@@ -42,7 +40,7 @@ class TestBigMessage(unittest.TestCase):
def test_1_basic(self):
c = boto.connect_sqs()
-
+
# create a queue so we can test BigMessage
queue_name = 'test%d' % int(time.time())
timeout = 60
@@ -61,7 +59,7 @@ class TestBigMessage(unittest.TestCase):
fp = StringIO(msg_body)
s3_url = 's3://%s' % queue_name
message = queue.new_message(fp, s3_url=s3_url)
-
+
queue.write(message)
time.sleep(30)
@@ -69,7 +67,7 @@ class TestBigMessage(unittest.TestCase):
# Make sure msg body is in bucket
self.assertTrue(bucket.lookup(s3_object_name))
-
+
m = queue.read()
self.assertEqual(m.get_body().decode('utf-8'), msg_body)
diff --git a/tests/unit/auth/test_sigv4.py b/tests/unit/auth/test_sigv4.py
index 8c16ebd0..674ec0a7 100644
--- a/tests/unit/auth/test_sigv4.py
+++ b/tests/unit/auth/test_sigv4.py
@@ -20,6 +20,7 @@
# IN THE SOFTWARE.
#
import copy
+import pickle
import os
from tests.compat import unittest, mock
from tests.unit import MockServiceWithConfigTestCase
@@ -29,6 +30,7 @@ from boto.auth import S3HmacAuthV4Handler
from boto.auth import detect_potential_s3sigv4
from boto.auth import detect_potential_sigv4
from boto.connection import HTTPRequest
+from boto.provider import Provider
from boto.regioninfo import RegionInfo
@@ -237,6 +239,18 @@ class TestSigV4Handler(unittest.TestCase):
scope = auth.credential_scope(self.request)
self.assertEqual(scope, '20121121/us-west-2/sqs/aws4_request')
+ def test_pickle_works(self):
+ provider = Provider('aws', access_key='access_key',
+ secret_key='secret_key')
+ auth = HmacAuthV4Handler('queue.amazonaws.com', None, provider)
+
+ # Pickle it!
+ pickled = pickle.dumps(auth)
+
+ # Now restore it
+ auth2 = pickle.loads(pickled)
+ self.assertEqual(auth.host, auth2.host)
+
class TestS3HmacAuthV4Handler(unittest.TestCase):
def setUp(self):
diff --git a/tests/unit/cloudsearch/test_document.py b/tests/unit/cloudsearch/test_document.py
index 34c3cad2..929b62be 100644
--- a/tests/unit/cloudsearch/test_document.py
+++ b/tests/unit/cloudsearch/test_document.py
@@ -8,7 +8,7 @@ import json
from boto.cloudsearch.document import DocumentServiceConnection
from boto.cloudsearch.document import CommitMismatchError, EncodingError, \
- ContentTooLongError, DocumentServiceConnection
+ ContentTooLongError, DocumentServiceConnection, SearchServiceException
import boto
@@ -321,3 +321,17 @@ class CloudSearchDocumentErrorMismatch(CloudSearchDocumentTest):
"category": ["cat_a", "cat_b", "cat_c"]})
self.assertRaises(CommitMismatchError, document.commit)
+
+class CloudSearchDocumentsErrorMissingAdds(CloudSearchDocumentTest):
+ response = {
+ 'status': 'error',
+ 'deletes': 0,
+ 'errors': [{'message': 'Unknown error message'}]
+ }
+
+ def test_fake_failure(self):
+ document = DocumentServiceConnection(
+ endpoint="doc-demo-userdomain.us-east-1.cloudsearch.amazonaws.com")
+ document.add("1234", 10, {"id": "1234", "title": "Title 1",
+ "category": ["cat_a", "cat_b", "cat_c"]})
+ self.assertRaises(SearchServiceException, document.commit)
diff --git a/tests/unit/dynamodb/test_types.py b/tests/unit/dynamodb/test_types.py
index f4f7fb54..25b3f78f 100644
--- a/tests/unit/dynamodb/test_types.py
+++ b/tests/unit/dynamodb/test_types.py
@@ -86,6 +86,12 @@ class TestBinary(unittest.TestCase):
self.assertEqual(b'\x01', data)
self.assertEqual(b'\x01', bytes(data))
+ def test_non_ascii_good_input(self):
+ # Binary data that is out of ASCII range
+ data = types.Binary(b'\x88')
+ self.assertEqual(b'\x88', data)
+ self.assertEqual(b'\x88', bytes(data))
+
@unittest.skipUnless(six.PY2, "Python 2 only")
def test_bad_input(self):
with self.assertRaises(TypeError):
@@ -108,6 +114,9 @@ class TestBinary(unittest.TestCase):
# In Python 2.x these are considered equal
self.assertEqual(data, u'\x01')
+ # Check that the value field is of type bytes
+ self.assertEqual(type(data.value), bytes)
+
@unittest.skipUnless(six.PY3, "Python 3 only")
def test_unicode_py3(self):
with self.assertRaises(TypeError):
diff --git a/tests/unit/emr/test_connection.py b/tests/unit/emr/test_connection.py
index 510f0c1f..c60f04a4 100644
--- a/tests/unit/emr/test_connection.py
+++ b/tests/unit/emr/test_connection.py
@@ -19,8 +19,6 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-from __future__ import with_statement
-
import boto.utils
from datetime import datetime
@@ -374,7 +372,74 @@ class TestListSteps(AWSMockServiceTestCase):
connection_class = EmrConnection
def default_body(self):
- return b"""<ListStepsOutput><Steps><member><Name>Step 1</Name></member></Steps></ListStepsOutput>"""
+ return b"""<ListStepsResponse xmlns="http://elasticmapreduce.amazonaws.com/doc/2009-03-31">
+ <ListStepsResult>
+ <Steps>
+ <member>
+ <Id>abc123</Id>
+ <Status>
+ <StateChangeReason/>
+ <Timeline>
+ <CreationDateTime>2014-07-01T00:00:00.000Z</CreationDateTime>
+ </Timeline>
+ <State>PENDING</State>
+ </Status>
+ <Name>Step 1</Name>
+ <Config>
+ <Jar>/home/hadoop/lib/emr-s3distcp-1.0.jar</Jar>
+ <Args>
+ <member>--src</member>
+ <member>hdfs:///data/test/</member>
+ <member>--dest</member>
+ <member>s3n://test/data</member>
+ </Args>
+ <Properties/>
+ </Config>
+ <ActionOnFailure>CONTINUE</ActionOnFailure>
+ </member>
+ <member>
+ <Id>def456</Id>
+ <Status>
+ <StateChangeReason/>
+ <Timeline>
+ <CreationDateTime>2014-07-01T00:00:00.000Z</CreationDateTime>
+ </Timeline>
+ <State>COMPLETED</State>
+ </Status>
+ <Name>Step 2</Name>
+ <Config>
+ <MainClass>my.main.SomeClass</MainClass>
+ <Jar>s3n://test/jars/foo.jar</Jar>
+ </Config>
+ <ActionOnFailure>CONTINUE</ActionOnFailure>
+ </member>
+ <member>
+ <Id>ghi789</Id>
+ <Status>
+ <StateChangeReason/>
+ <Timeline>
+ <CreationDateTime>2014-07-01T00:00:00.000Z</CreationDateTime>
+ </Timeline>
+ <State>FAILED</State>
+ </Status>
+ <Name>Step 3</Name>
+ <Config>
+ <Jar>s3n://test/jars/bar.jar</Jar>
+ <Args>
+ <member>-arg</member>
+ <member>value</member>
+ </Args>
+ <Properties/>
+ </Config>
+ <ActionOnFailure>TERMINATE_CLUSTER</ActionOnFailure>
+ </member>
+ </Steps>
+ </ListStepsResult>
+ <ResponseMetadata>
+ <RequestId>eff31ee5-0342-11e4-b3c7-9de5a93f6fcb</RequestId>
+ </ResponseMetadata>
+</ListStepsResponse>
+"""
def test_list_steps(self):
self.set_http_response(200)
@@ -392,6 +457,30 @@ class TestListSteps(AWSMockServiceTestCase):
self.assertTrue(isinstance(response, StepSummaryList))
self.assertEqual(response.steps[0].name, 'Step 1')
+ valid_states = [
+ 'PENDING',
+ 'RUNNING',
+ 'COMPLETED',
+ 'CANCELLED',
+ 'FAILED',
+ 'INTERRUPTED'
+ ]
+
+ # Check for step states
+ for step in response.steps:
+ self.assertIn(step.status.state, valid_states)
+
+ # Check for step config
+ step = response.steps[0]
+ self.assertEqual(step.config.jar,
+ '/home/hadoop/lib/emr-s3distcp-1.0.jar')
+ self.assertEqual(len(step.config.args), 4)
+ self.assertEqual(step.config.args[0].value, '--src')
+ self.assertEqual(step.config.args[1].value, 'hdfs:///data/test/')
+
+ step = response.steps[1]
+ self.assertEqual(step.config.mainclass, 'my.main.SomeClass')
+
def test_list_steps_with_states(self):
self.set_http_response(200)
response = self.service_connection.list_steps(
diff --git a/tests/unit/glacier/test_job.py b/tests/unit/glacier/test_job.py
index ac47ad8d..c7b7b1fb 100644
--- a/tests/unit/glacier/test_job.py
+++ b/tests/unit/glacier/test_job.py
@@ -56,7 +56,7 @@ class TestJob(unittest.TestCase):
self.job.get_output(byte_range=(1, 1024), validate_checksum=False)
def test_download_to_fileobj(self):
- http_response=mock.Mock(read=mock.Mock(return_value='xyz'))
+ http_response = mock.Mock(read=mock.Mock(return_value='xyz'))
response = GlacierResponse(http_response, None)
response['TreeHash'] = 'tree_hash'
self.api.get_job_output.return_value = response
diff --git a/tests/unit/glacier/test_layer1.py b/tests/unit/glacier/test_layer1.py
index 1e6490bf..4c8f0cf7 100644
--- a/tests/unit/glacier/test_layer1.py
+++ b/tests/unit/glacier/test_layer1.py
@@ -76,7 +76,7 @@ class GlacierJobOperations(GlacierLayer1ConnectionBase):
self.set_http_response(status_code=200, header=header,
body=self.job_content)
response = self.service_connection.get_job_output(self.vault_name,
- 'example-job-id')
+ 'example-job-id')
self.assertEqual(self.job_content, response.read())
diff --git a/tests/unit/glacier/test_layer2.py b/tests/unit/glacier/test_layer2.py
index eec175d3..84b53aac 100644
--- a/tests/unit/glacier/test_layer2.py
+++ b/tests/unit/glacier/test_layer2.py
@@ -32,73 +32,71 @@ import boto.glacier.vault
from boto.glacier.vault import Vault
from boto.glacier.vault import Job
-from boto.compat import StringIO
-
from datetime import datetime, tzinfo, timedelta
# Some fixture data from the Glacier docs
FIXTURE_VAULT = {
- "CreationDate" : "2012-02-20T17:01:45.198Z",
- "LastInventoryDate" : "2012-03-20T17:03:43.221Z",
- "NumberOfArchives" : 192,
- "SizeInBytes" : 78088912,
- "VaultARN" : "arn:aws:glacier:us-east-1:012345678901:vaults/examplevault",
- "VaultName" : "examplevault"
+ "CreationDate": "2012-02-20T17:01:45.198Z",
+ "LastInventoryDate": "2012-03-20T17:03:43.221Z",
+ "NumberOfArchives": 192,
+ "SizeInBytes": 78088912,
+ "VaultARN": "arn:aws:glacier:us-east-1:012345678901:vaults/examplevault",
+ "VaultName": "examplevault"
}
FIXTURE_VAULTS = {
- 'RequestId': 'vuXO7SHTw-luynJ0Zu31AYjR3TcCn7X25r7ykpuulxY2lv8',
- 'VaultList': [{'SizeInBytes': 0, 'LastInventoryDate': None,
- 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault0',
- 'VaultName': 'vault0', 'NumberOfArchives': 0,
- 'CreationDate': '2013-05-17T02:38:39.049Z'},
- {'SizeInBytes': 0, 'LastInventoryDate': None,
- 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault3',
- 'VaultName': 'vault3', 'NumberOfArchives': 0,
- 'CreationDate': '2013-05-17T02:31:18.659Z'}]}
+ 'RequestId': 'vuXO7SHTw-luynJ0Zu31AYjR3TcCn7X25r7ykpuulxY2lv8',
+ 'VaultList': [{'SizeInBytes': 0, 'LastInventoryDate': None,
+ 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault0',
+ 'VaultName': 'vault0', 'NumberOfArchives': 0,
+ 'CreationDate': '2013-05-17T02:38:39.049Z'},
+ {'SizeInBytes': 0, 'LastInventoryDate': None,
+ 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault3',
+ 'VaultName': 'vault3', 'NumberOfArchives': 0,
+ 'CreationDate': '2013-05-17T02:31:18.659Z'}]}
FIXTURE_PAGINATED_VAULTS = {
- 'Marker': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault2',
- 'RequestId': 'vuXO7SHTw-luynJ0Zu31AYjR3TcCn7X25r7ykpuulxY2lv8',
- 'VaultList': [{'SizeInBytes': 0, 'LastInventoryDate': None,
- 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault0',
- 'VaultName': 'vault0', 'NumberOfArchives': 0,
- 'CreationDate': '2013-05-17T02:38:39.049Z'},
- {'SizeInBytes': 0, 'LastInventoryDate': None,
- 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault1',
- 'VaultName': 'vault1', 'NumberOfArchives': 0,
- 'CreationDate': '2013-05-17T02:31:18.659Z'}]}
+ 'Marker': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault2',
+ 'RequestId': 'vuXO7SHTw-luynJ0Zu31AYjR3TcCn7X25r7ykpuulxY2lv8',
+ 'VaultList': [{'SizeInBytes': 0, 'LastInventoryDate': None,
+ 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault0',
+ 'VaultName': 'vault0', 'NumberOfArchives': 0,
+ 'CreationDate': '2013-05-17T02:38:39.049Z'},
+ {'SizeInBytes': 0, 'LastInventoryDate': None,
+ 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault1',
+ 'VaultName': 'vault1', 'NumberOfArchives': 0,
+ 'CreationDate': '2013-05-17T02:31:18.659Z'}]}
FIXTURE_PAGINATED_VAULTS_CONT = {
- 'Marker': None,
- 'RequestId': 'vuXO7SHTw-luynJ0Zu31AYjR3TcCn7X25r7ykpuulxY2lv8',
- 'VaultList': [{'SizeInBytes': 0, 'LastInventoryDate': None,
- 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault2',
- 'VaultName': 'vault2', 'NumberOfArchives': 0,
- 'CreationDate': '2013-05-17T02:38:39.049Z'},
- {'SizeInBytes': 0, 'LastInventoryDate': None,
- 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault3',
- 'VaultName': 'vault3', 'NumberOfArchives': 0,
- 'CreationDate': '2013-05-17T02:31:18.659Z'}]}
+ 'Marker': None,
+ 'RequestId': 'vuXO7SHTw-luynJ0Zu31AYjR3TcCn7X25r7ykpuulxY2lv8',
+ 'VaultList': [{'SizeInBytes': 0, 'LastInventoryDate': None,
+ 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault2',
+ 'VaultName': 'vault2', 'NumberOfArchives': 0,
+ 'CreationDate': '2013-05-17T02:38:39.049Z'},
+ {'SizeInBytes': 0, 'LastInventoryDate': None,
+ 'VaultARN': 'arn:aws:glacier:us-east-1:686406519478:vaults/vault3',
+ 'VaultName': 'vault3', 'NumberOfArchives': 0,
+ 'CreationDate': '2013-05-17T02:31:18.659Z'}]}
FIXTURE_ARCHIVE_JOB = {
- "Action": "ArchiveRetrieval",
- "ArchiveId": ("NkbByEejwEggmBz2fTHgJrg0XBoDfjP4q6iu87-TjhqG6eGoOY9Z8i1_AUyUs"
- "uhPAdTqLHy8pTl5nfCFJmDl2yEZONi5L26Omw12vcs01MNGntHEQL8MBfGlqr"
- "EXAMPLEArchiveId"),
- "ArchiveSizeInBytes": 16777216,
- "Completed": False,
- "CreationDate": "2012-05-15T17:21:39.339Z",
- "CompletionDate": "2012-05-15T17:21:43.561Z",
- "InventorySizeInBytes": None,
- "JobDescription": "My ArchiveRetrieval Job",
- "JobId": ("HkF9p6o7yjhFx-K3CGl6fuSm6VzW9T7esGQfco8nUXVYwS0jlb5gq1JZ55yHgt5v"
- "P54ZShjoQzQVVh7vEXAMPLEjobID"),
- "SHA256TreeHash": ("beb0fe31a1c7ca8c6c04d574ea906e3f97b31fdca7571defb5b44dc"
- "a89b5af60"),
- "SNSTopic": "arn:aws:sns:us-east-1:012345678901:mytopic",
- "StatusCode": "InProgress",
- "StatusMessage": "Operation in progress.",
- "VaultARN": "arn:aws:glacier:us-east-1:012345678901:vaults/examplevault"
+ "Action": "ArchiveRetrieval",
+ "ArchiveId": ("NkbByEejwEggmBz2fTHgJrg0XBoDfjP4q6iu87-TjhqG6eGoOY9Z8i1_AUyUs"
+ "uhPAdTqLHy8pTl5nfCFJmDl2yEZONi5L26Omw12vcs01MNGntHEQL8MBfGlqr"
+ "EXAMPLEArchiveId"),
+ "ArchiveSizeInBytes": 16777216,
+ "Completed": False,
+ "CreationDate": "2012-05-15T17:21:39.339Z",
+ "CompletionDate": "2012-05-15T17:21:43.561Z",
+ "InventorySizeInBytes": None,
+ "JobDescription": "My ArchiveRetrieval Job",
+ "JobId": ("HkF9p6o7yjhFx-K3CGl6fuSm6VzW9T7esGQfco8nUXVYwS0jlb5gq1JZ55yHgt5v"
+ "P54ZShjoQzQVVh7vEXAMPLEjobID"),
+ "SHA256TreeHash": ("beb0fe31a1c7ca8c6c04d574ea906e3f97b31fdca7571defb5b44dc"
+ "a89b5af60"),
+ "SNSTopic": "arn:aws:sns:us-east-1:012345678901:mytopic",
+ "StatusCode": "InProgress",
+ "StatusMessage": "Operation in progress.",
+ "VaultARN": "arn:aws:glacier:us-east-1:012345678901:vaults/examplevault"
}
EXAMPLE_PART_LIST_RESULT_PAGE_1 = {
@@ -107,11 +105,10 @@ EXAMPLE_PART_LIST_RESULT_PAGE_1 = {
"Marker": "MfgsKHVjbQ6EldVl72bn3_n5h2TaGZQUO-Qb3B9j3TITf7WajQ",
"MultipartUploadId": "OW2fM5iVylEpFEMM9_HpKowRapC3vn5sSL39_396UW9zLFUWVrnRHaPjUJddQ5OxSHVXjYtrN47NBZ-khxOjyEXAMPLE",
"PartSizeInBytes": 4194304,
- "Parts":
- [ {
- "RangeInBytes": "4194304-8388607",
- "SHA256TreeHash": "01d34dabf7be316472c93b1ef80721f5d4"
- }],
+ "Parts": [{
+ "RangeInBytes": "4194304-8388607",
+ "SHA256TreeHash": "01d34dabf7be316472c93b1ef80721f5d4"
+ }],
"VaultARN": "arn:aws:glacier:us-east-1:012345678901:vaults/demo1-vault"
}
@@ -123,11 +120,10 @@ EXAMPLE_PART_LIST_RESULT_PAGE_2 = {
"Marker": None,
"MultipartUploadId": None,
"PartSizeInBytes": None,
- "Parts":
- [ {
- "RangeInBytes": "0-4194303",
- "SHA256TreeHash": "01d34dabf7be316472c93b1ef80721f5d4"
- }],
+ "Parts": [{
+ "RangeInBytes": "0-4194303",
+ "SHA256TreeHash": "01d34dabf7be316472c93b1ef80721f5d4"
+ }],
"VaultARN": None
}
@@ -137,14 +133,13 @@ EXAMPLE_PART_LIST_COMPLETE = {
"Marker": None,
"MultipartUploadId": "OW2fM5iVylEpFEMM9_HpKowRapC3vn5sSL39_396UW9zLFUWVrnRHaPjUJddQ5OxSHVXjYtrN47NBZ-khxOjyEXAMPLE",
"PartSizeInBytes": 4194304,
- "Parts":
- [ {
- "RangeInBytes": "4194304-8388607",
- "SHA256TreeHash": "01d34dabf7be316472c93b1ef80721f5d4"
+ "Parts": [{
+ "RangeInBytes": "4194304-8388607",
+ "SHA256TreeHash": "01d34dabf7be316472c93b1ef80721f5d4"
}, {
- "RangeInBytes": "0-4194303",
- "SHA256TreeHash": "01d34dabf7be316472c93b1ef80721f5d4"
- }],
+ "RangeInBytes": "0-4194303",
+ "SHA256TreeHash": "01d34dabf7be316472c93b1ef80721f5d4"
+ }],
"VaultARN": "arn:aws:glacier:us-east-1:012345678901:vaults/demo1-vault"
}
@@ -183,7 +178,7 @@ class TestGlacierLayer2Connection(GlacierLayer2Base):
def return_paginated_vaults_resp(marker=None, limit=None):
return resps.pop(0)
- self.mock_layer1.list_vaults = Mock(side_effect = return_paginated_vaults_resp)
+ self.mock_layer1.list_vaults = Mock(side_effect=return_paginated_vaults_resp)
vaults = self.layer2.list_vaults()
self.assertEqual(vaults[0].name, "vault0")
self.assertEqual(vaults[3].name, "vault3")
@@ -287,11 +282,11 @@ class TestVault(GlacierLayer2Base):
'Parts': [{
'RangeInBytes': '0-3',
'SHA256TreeHash': '12',
- }, {
+ }, {
'RangeInBytes': '4-6',
'SHA256TreeHash': '34',
- },
- ]}
+ }],
+ }
self.vault.list_all_parts = mock_list_parts
self.vault.resume_archive_from_file(
@@ -315,6 +310,7 @@ class TestJob(GlacierLayer2Base):
"HkF9p6o7yjhFx-K3CGl6fuSm6VzW9T7esGQfco8nUXVYwS0jlb5gq1JZ55yHgt5vP"
"54ZShjoQzQVVh7vEXAMPLEjobID", (0, 100))
+
class TestRangeStringParsing(unittest.TestCase):
def test_simple_range(self):
self.assertEquals(
diff --git a/tests/unit/glacier/test_utils.py b/tests/unit/glacier/test_utils.py
index a051b59e..bace2a38 100644
--- a/tests/unit/glacier/test_utils.py
+++ b/tests/unit/glacier/test_utils.py
@@ -19,13 +19,16 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
-import time
import logging
+import os
+import tempfile
+import time
from hashlib import sha256
from tests.unit import unittest
+from boto.compat import BytesIO, six, StringIO
from boto.glacier.utils import minimum_part_size, chunk_hashes, tree_hash, \
- bytes_to_hex
+ bytes_to_hex, compute_hashes_from_fileobj
class TestPartSizeCalculations(unittest.TestCase):
@@ -114,3 +117,49 @@ class TestTreeHash(unittest.TestCase):
self.assertEqual(
self.calculate_tree_hash(''),
b'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855')
+
+
+class TestFileHash(unittest.TestCase):
+ def _gen_data(self):
+ # Generate some pseudo-random bytes of data. We include the
+ # hard-coded blob as an example that fails to decode via UTF-8.
+ return os.urandom(5000) + b'\xc2\x00'
+
+ def test_compute_hash_tempfile(self):
+ # Compute a hash from a file object. On Python 2 this uses a non-
+ # binary mode. On Python 3, however, binary mode is required for
+ # binary files. If not used, you will get UTF-8 code errors.
+ if six.PY2:
+ mode = "w+"
+ else:
+ mode = "wb+"
+
+ with tempfile.TemporaryFile(mode=mode) as f:
+ f.write(self._gen_data())
+ f.seek(0)
+
+ compute_hashes_from_fileobj(f, chunk_size=512)
+
+ @unittest.skipUnless(six.PY3, 'Python 3 requires reading binary!')
+ def test_compute_hash_tempfile_py3(self):
+ # Note the missing 'b' in the mode!
+ with tempfile.TemporaryFile(mode='w+') as f:
+ with self.assertRaises(ValueError):
+ compute_hashes_from_fileobj(f, chunk_size=512)
+
+ # What about file-like objects without a mode? If it has an
+ # encoding we use it, otherwise attempt UTF-8 encoding to
+ # bytes for hashing.
+ f = StringIO('test data' * 500)
+ compute_hashes_from_fileobj(f, chunk_size=512)
+
+ @unittest.skipUnless(six.PY2, 'Python 3 requires reading binary!')
+ def test_compute_hash_stringio(self):
+ # Python 2 binary data in StringIO example
+ f = StringIO(self._gen_data())
+ compute_hashes_from_fileobj(f, chunk_size=512)
+
+ def test_compute_hash_bytesio(self):
+ # Compute a hash from a file-like BytesIO object.
+ f = BytesIO(self._gen_data())
+ compute_hashes_from_fileobj(f, chunk_size=512)
diff --git a/tests/unit/glacier/test_vault.py b/tests/unit/glacier/test_vault.py
index 68d9d784..f532e3b9 100644
--- a/tests/unit/glacier/test_vault.py
+++ b/tests/unit/glacier/test_vault.py
@@ -44,7 +44,9 @@ class TestVault(unittest.TestCase):
def tearDown(self):
self.size_patch.stop()
- def test_upload_archive_small_file(self):
+ @mock.patch('boto.glacier.vault.compute_hashes_from_fileobj',
+ return_value=[b'abc', b'123'])
+ def test_upload_archive_small_file(self, compute_hashes):
self.getsize.return_value = 1
self.api.upload_archive.return_value = {'ArchiveId': 'archive_id'}
@@ -69,7 +71,7 @@ class TestVault(unittest.TestCase):
# The write should be created with the default part size of the
# instance (2 MB).
self.vault.create_archive_writer.assert_called_with(
- description=mock.ANY, part_size=self.vault.DefaultPartSize)
+ description=mock.ANY, part_size=self.vault.DefaultPartSize)
def test_large_part_size_is_obeyed(self):
self.vault.DefaultPartSize = 8 * 1024 * 1024
diff --git a/tests/unit/glacier/test_writer.py b/tests/unit/glacier/test_writer.py
index c7066b97..b2875f3c 100644
--- a/tests/unit/glacier/test_writer.py
+++ b/tests/unit/glacier/test_writer.py
@@ -27,7 +27,6 @@ from tests.unit import unittest
from mock import (
call,
Mock,
- patch,
sentinel,
)
from nose.tools import assert_equal
@@ -50,7 +49,7 @@ def create_mock_vault():
def partify(data, part_size):
for i in itertools.count(0):
start = i * part_size
- part = data[start:start+part_size]
+ part = data[start:start + part_size]
if part:
yield part
else:
diff --git a/tests/unit/s3/test_key.py b/tests/unit/s3/test_key.py
index ee8b686a..7752d9cd 100644
--- a/tests/unit/s3/test_key.py
+++ b/tests/unit/s3/test_key.py
@@ -20,8 +20,6 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
-from __future__ import with_statement
-
from tests.compat import mock, unittest
from tests.unit import AWSMockServiceTestCase
diff --git a/tests/unit/test_connection.py b/tests/unit/test_connection.py
index 5d726e06..3d4a57b3 100644
--- a/tests/unit/test_connection.py
+++ b/tests/unit/test_connection.py
@@ -19,8 +19,6 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
-from __future__ import with_statement
-
import os
import socket
@@ -116,7 +114,7 @@ class TestAWSAuthConnection(unittest.TestCase):
self.assertEqual(conn.get_path('/folder//image.jpg'), '/folder//image.jpg')
self.assertEqual(conn.get_path('/folder////image.jpg'), '/folder////image.jpg')
self.assertEqual(conn.get_path('///folder////image.jpg'), '///folder////image.jpg')
-
+
def test_connection_behind_proxy(self):
os.environ['http_proxy'] = "http://john.doe:p4ssw0rd@127.0.0.1:8180"
conn = AWSAuthConnection(
@@ -130,7 +128,7 @@ class TestAWSAuthConnection(unittest.TestCase):
self.assertEqual(conn.proxy_pass, 'p4ssw0rd')
self.assertEqual(conn.proxy_port, '8180')
del os.environ['http_proxy']
-
+
def test_connection_behind_proxy_without_explicit_port(self):
os.environ['http_proxy'] = "http://127.0.0.1"
conn = AWSAuthConnection(
@@ -139,7 +137,7 @@ class TestAWSAuthConnection(unittest.TestCase):
aws_secret_access_key='secret',
suppress_consec_slashes=False,
port=8180
- )
+ )
self.assertEqual(conn.proxy, '127.0.0.1')
self.assertEqual(conn.proxy_port, 8180)
del os.environ['http_proxy']
diff --git a/tests/unit/utils/test_utils.py b/tests/unit/utils/test_utils.py
index 2e7ddbd3..57a04a66 100644
--- a/tests/unit/utils/test_utils.py
+++ b/tests/unit/utils/test_utils.py
@@ -207,6 +207,22 @@ class TestRetryURL(unittest.TestCase):
response = retry_url('http://10.10.10.10/foo', num_retries=1)
self.assertEqual(response, 'no proxy response')
+ def test_retry_url_using_bytes_and_string_response(self):
+ test_value = 'normal response'
+ fake_response = mock.Mock()
+
+ # test using unicode
+ fake_response.read.return_value = test_value
+ self.opener.return_value.open.return_value = fake_response
+ response = retry_url('http://10.10.10.10/foo', num_retries=1)
+ self.assertEqual(response, test_value)
+
+ # test using bytes
+ fake_response.read.return_value = test_value.encode('utf-8')
+ self.opener.return_value.open.return_value = fake_response
+ response = retry_url('http://10.10.10.10/foo', num_retries=1)
+ self.assertEqual(response, test_value)
+
class TestLazyLoadMetadata(unittest.TestCase):
def setUp(self):
diff --git a/tests/unit/vpc/test_vpc_peering_connection.py b/tests/unit/vpc/test_vpc_peering_connection.py
index 42e68d44..503e5606 100644
--- a/tests/unit/vpc/test_vpc_peering_connection.py
+++ b/tests/unit/vpc/test_vpc_peering_connection.py
@@ -19,7 +19,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-from tests.unit import unittest
+from tests.unit import mock, unittest
from tests.unit import AWSMockServiceTestCase
from boto.vpc import VpcPeeringConnection, VPCConnection, Subnet
@@ -159,6 +159,58 @@ class TestDeleteVpcPeeringConnection(AWSMockServiceTestCase):
self.set_http_response(status_code=200)
self.assertEquals(self.service_connection.delete_vpc_peering_connection('pcx-12345678'), True)
+class TestDeleteVpcPeeringConnectionShortForm(unittest.TestCase):
+ DESCRIBE_VPC_PEERING_CONNECTIONS= b"""<?xml version="1.0" encoding="UTF-8"?>
+<DescribeVpcPeeringConnectionsResponse xmlns="http://ec2.amazonaws.com/doc/2014-05-01/">
+ <requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
+ <vpcPeeringConnectionSet>
+ <item>
+ <vpcPeeringConnectionId>pcx-111aaa22</vpcPeeringConnectionId>
+ <requesterVpcInfo>
+ <ownerId>777788889999</ownerId>
+ <vpcId>vpc-1a2b3c4d</vpcId>
+ <cidrBlock>172.31.0.0/16</cidrBlock>
+ </requesterVpcInfo>
+ <accepterVpcInfo>
+ <ownerId>111122223333</ownerId>
+ <vpcId>vpc-aa22cc33</vpcId>
+ </accepterVpcInfo>
+ <status>
+ <code>pending-acceptance</code>
+ <message>Pending Acceptance by 111122223333</message>
+ </status>
+ <expirationTime>2014-02-17T16:00:50.000Z</expirationTime>
+ </item>
+ </vpcPeeringConnectionSet>
+</DescribeVpcPeeringConnectionsResponse>"""
+
+ DELETE_VPC_PEERING_CONNECTION= b"""<DeleteVpcPeeringConnectionResponse xmlns="http://ec2.amazonaws.com/doc/2014-05-01/">
+ <requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>
+ <return>true</return>
+</DeleteVpcPeeringConnectionResponse>"""
+
+ def test_delete_vpc_peering_connection(self):
+ vpc_conn = VPCConnection(aws_access_key_id='aws_access_key_id',
+ aws_secret_access_key='aws_secret_access_key')
+
+ mock_response = mock.Mock()
+ mock_response.read.return_value = self.DESCRIBE_VPC_PEERING_CONNECTIONS
+ mock_response.status = 200
+ vpc_conn.make_request = mock.Mock(return_value=mock_response)
+ vpc_peering_connections = vpc_conn.get_all_vpc_peering_connections()
+
+ self.assertEquals(1, len(vpc_peering_connections))
+ vpc_peering_connection = vpc_peering_connections[0]
+
+ mock_response = mock.Mock()
+ mock_response.read.return_value = self.DELETE_VPC_PEERING_CONNECTION
+ mock_response.status = 200
+ vpc_conn.make_request = mock.Mock(return_value=mock_response)
+ self.assertEquals(True, vpc_peering_connection.delete())
+
+ self.assertIn('DeleteVpcPeeringConnection', vpc_conn.make_request.call_args_list[0][0])
+ self.assertNotIn('DeleteVpc', vpc_conn.make_request.call_args_list[0][0])
+
class TestRejectVpcPeeringConnection(AWSMockServiceTestCase):
REJECT_VPC_PEERING_CONNECTION= b"""<RejectVpcPeeringConnectionResponse xmlns="http://ec2.amazonaws.com/doc/2014-05-01/">
<requestId>7a62c49f-347e-4fc4-9331-6e8eEXAMPLE</requestId>