summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVictor Stinner <vstinner@redhat.com>2015-05-20 12:19:58 -0700
committerVictor Stinner <vstinner@redhat.com>2015-07-02 13:18:39 +0200
commit92b3ba64fc66368ef096e7631a99a5b4ea13db97 (patch)
tree21282d97131bf9374b43bef1bc65d98fba2cd6e2
parent0ae8f4102e7aeebb75fe5f65c02ed9bfa76cebb5 (diff)
downloadglance_store-92b3ba64fc66368ef096e7631a99a5b4ea13db97.tar.gz
Port S3 driver to Python 3
* On Python 3, don't encode the access and secret keys to UTF-8: keep unicode * Replace StringIO.StringIO with six.BytesIO for image content * Replace StringIO.len with BytesIO.tell(), the len attribute has been removed in Python 3 * Use byte strings to handle image content Change-Id: I7cd07a68c9da2f1450a804b198c3840a9662ba69
-rw-r--r--glance_store/_drivers/s3.py32
-rw-r--r--glance_store/tests/unit/test_s3_store.py36
2 files changed, 36 insertions, 32 deletions
diff --git a/glance_store/_drivers/s3.py b/glance_store/_drivers/s3.py
index 0f0e3d0..8933e2a 100644
--- a/glance_store/_drivers/s3.py
+++ b/glance_store/_drivers/s3.py
@@ -221,12 +221,13 @@ class StoreLocation(glance_store.location.StoreLocation):
try:
access_key = cred_parts[0]
secret_key = cred_parts[1]
- # NOTE(jaypipes): Need to encode to UTF-8 here because of a
- # bug in the HMAC library that boto uses.
- # See: http://bugs.python.org/issue5285
- # See: http://trac.edgewall.org/ticket/8083
- access_key = access_key.encode('utf-8')
- secret_key = secret_key.encode('utf-8')
+ if six.PY2:
+ # NOTE(jaypipes): Need to encode to UTF-8 here because of a
+ # bug in the HMAC library that boto uses.
+ # See: http://bugs.python.org/issue5285
+ # See: http://trac.edgewall.org/ticket/8083
+ access_key = access_key.encode('utf-8')
+ secret_key = secret_key.encode('utf-8')
self.accesskey = access_key
self.secretkey = secret_key
except IndexError:
@@ -277,7 +278,7 @@ class ChunkedFile(object):
def getvalue(self):
"""Return entire string value... used in testing."""
- data = ""
+ data = b""
self.len = 0
for chunk in self:
read_bytes = len(chunk)
@@ -315,12 +316,15 @@ class Store(glance_store.driver.Store):
self.s3_host = self._option_get('s3_store_host')
access_key = self._option_get('s3_store_access_key')
secret_key = self._option_get('s3_store_secret_key')
- # NOTE(jaypipes): Need to encode to UTF-8 here because of a
- # bug in the HMAC library that boto uses.
- # See: http://bugs.python.org/issue5285
- # See: http://trac.edgewall.org/ticket/8083
- self.access_key = access_key.encode('utf-8')
- self.secret_key = secret_key.encode('utf-8')
+ if six.PY2:
+ # NOTE(jaypipes): Need to encode to UTF-8 here because of a
+ # bug in the HMAC library that boto uses.
+ # See: http://bugs.python.org/issue5285
+ # See: http://trac.edgewall.org/ticket/8083
+ access_key = access_key.encode('utf-8')
+ secret_key = secret_key.encode('utf-8')
+ self.access_key = access_key
+ self.secret_key = secret_key
self.bucket = self._option_get('s3_store_bucket')
self.scheme = 's3'
@@ -583,7 +587,7 @@ class Store(glance_store.driver.Store):
write_chunk_size = max(self.s3_store_large_object_chunk_size,
chunk_size)
it = utils.chunkreadable(image_file, self.WRITE_CHUNKSIZE)
- buffered_chunk = ''
+ buffered_chunk = b''
while True:
try:
buffered_clen = len(buffered_chunk)
diff --git a/glance_store/tests/unit/test_s3_store.py b/glance_store/tests/unit/test_s3_store.py
index a15fcf0..5ca4c14 100644
--- a/glance_store/tests/unit/test_s3_store.py
+++ b/glance_store/tests/unit/test_s3_store.py
@@ -78,7 +78,7 @@ class FakeKey(object):
def set_contents_from_file(self, fp, replace=False, **kwargs):
max_read = kwargs.get('size')
- self.data = six.StringIO()
+ self.data = six.BytesIO()
checksum = hashlib.md5()
while True:
if max_read is None or max_read > self.BufferSize:
@@ -94,7 +94,7 @@ class FakeKey(object):
self.data.write(chunk)
if max_read is not None:
max_read -= len(chunk)
- self.size = self.data.len
+ self.size = self.data.tell()
# Reset the buffer to start
self.data.seek(0)
self.etag = checksum.hexdigest()
@@ -149,7 +149,7 @@ class FakeMPU(object):
Complete the parts into one big FakeKey
"""
key = FakeKey(self.bucket, self.key_name)
- key.data = six.StringIO()
+ key.data = six.BytesIO()
checksum = hashlib.md5()
cnt = 0
for pnum in sorted(self.parts.keys()):
@@ -160,7 +160,7 @@ class FakeMPU(object):
checksum.update(chunk)
key.data.write(chunk)
chunk = part.data.read(key.BufferSize)
- key.size = key.data.len
+ key.size = key.data.tell()
key.data.seek(0)
key.etag = checksum.hexdigest() + '-%d' % cnt
key.read = key.data.read
@@ -230,7 +230,7 @@ def fakers():
fixture_buckets = {'glance': FakeBucket('glance')}
b = fixture_buckets['glance']
k = b.new_key(FAKE_UUID)
- k.set_contents_from_file(six.StringIO("*" * FIVE_KB))
+ k.set_contents_from_file(six.BytesIO(b"*" * FIVE_KB))
def fake_connection_constructor(self, *args, **kwargs):
host = kwargs.get('host')
@@ -294,8 +294,8 @@ class TestStore(base.StoreBaseTest,
self.assertEqual(image_size, FIVE_KB)
- expected_data = "*" * FIVE_KB
- data = ""
+ expected_data = b"*" * FIVE_KB
+ data = b""
for chunk in image_s3:
data += chunk
@@ -359,7 +359,7 @@ class TestStore(base.StoreBaseTest,
"""Test that we can add an image via the s3 backend."""
expected_image_id = str(uuid.uuid4())
expected_s3_size = FIVE_KB
- expected_s3_contents = "*" * expected_s3_size
+ expected_s3_contents = b"*" * expected_s3_size
expected_checksum = hashlib.md5(expected_s3_contents).hexdigest()
expected_location = format_s3_location(
S3_CONF['s3_store_access_key'],
@@ -367,7 +367,7 @@ class TestStore(base.StoreBaseTest,
S3_CONF['s3_store_host'],
S3_CONF['s3_store_bucket'],
expected_image_id)
- image_s3 = six.StringIO(expected_s3_contents)
+ image_s3 = six.BytesIO(expected_s3_contents)
loc, size, checksum, _ = self.store.add(expected_image_id,
image_s3,
@@ -380,10 +380,10 @@ class TestStore(base.StoreBaseTest,
loc = location.get_location_from_uri(expected_location,
conf=self.conf)
(new_image_s3, new_image_size) = self.store.get(loc)
- new_image_contents = six.StringIO()
+ new_image_contents = six.BytesIO()
for chunk in new_image_s3:
new_image_contents.write(chunk)
- new_image_s3_size = new_image_contents.len
+ new_image_s3_size = new_image_contents.tell()
self.assertEqual(expected_s3_contents, new_image_contents.getvalue())
self.assertEqual(expected_s3_size, new_image_s3_size)
@@ -401,7 +401,7 @@ class TestStore(base.StoreBaseTest,
for (vsize, vcnt) in variations:
expected_image_id = str(uuid.uuid4())
expected_s3_size = vsize
- expected_s3_contents = "12345678" * (expected_s3_size / 8)
+ expected_s3_contents = b"12345678" * (expected_s3_size // 8)
expected_chksum = hashlib.md5(expected_s3_contents).hexdigest()
expected_location = format_s3_location(
S3_CONF['s3_store_access_key'],
@@ -409,7 +409,7 @@ class TestStore(base.StoreBaseTest,
S3_CONF['s3_store_host'],
S3_CONF['s3_store_bucket'],
expected_image_id)
- image_s3 = six.StringIO(expected_s3_contents)
+ image_s3 = six.BytesIO(expected_s3_contents)
# add image
loc, size, chksum, _ = self.store.add(expected_image_id,
@@ -424,10 +424,10 @@ class TestStore(base.StoreBaseTest,
loc = location.get_location_from_uri(expected_location,
conf=self.conf)
(new_image_s3, new_image_s3_size) = self.store.get(loc)
- new_image_contents = six.StringIO()
+ new_image_contents = six.BytesIO()
for chunk in new_image_s3:
new_image_contents.write(chunk)
- new_image_size = new_image_contents.len
+ new_image_size = new_image_contents.tell()
self.assertEqual(expected_s3_size, new_image_s3_size)
self.assertEqual(expected_s3_size, new_image_size)
self.assertEqual(expected_s3_contents,
@@ -447,7 +447,7 @@ class TestStore(base.StoreBaseTest,
for variation in variations:
expected_image_id = str(uuid.uuid4())
expected_s3_size = FIVE_KB
- expected_s3_contents = "*" * expected_s3_size
+ expected_s3_contents = b"*" * expected_s3_size
expected_checksum = hashlib.md5(expected_s3_contents).hexdigest()
new_conf = S3_CONF.copy()
new_conf['s3_store_host'] = variation
@@ -457,7 +457,7 @@ class TestStore(base.StoreBaseTest,
new_conf['s3_store_host'],
new_conf['s3_store_bucket'],
expected_image_id)
- image_s3 = six.StringIO(expected_s3_contents)
+ image_s3 = six.BytesIO(expected_s3_contents)
self.config(**new_conf)
self.store = s3.Store(self.conf)
@@ -484,7 +484,7 @@ class TestStore(base.StoreBaseTest,
Tests that adding an image with an existing identifier
raises an appropriate exception
"""
- image_s3 = six.StringIO("nevergonnamakeit")
+ image_s3 = six.BytesIO(b"nevergonnamakeit")
self.assertRaises(exceptions.Duplicate,
self.store.add,
FAKE_UUID, image_s3, 0)