summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBrianna Poulos <Brianna.Poulos@jhuapl.edu>2015-12-09 16:23:31 -0500
committerBrianna Poulos <Brianna.Poulos@jhuapl.edu>2016-01-21 09:49:17 -0500
commit819c7f9a927393d4549461c9b0183a23ce174f6f (patch)
tree4f7cb78975c2ac93d9444d83203e95beef1b04ed
parent8a7729110a5ab203c363fdf5cc0a2f1dc069bf66 (diff)
downloadglance_store-819c7f9a927393d4549461c9b0183a23ce174f6f.tar.gz
Add signature verifier to backend drivers
In order to use the 'sign-the-data' approach instead of the 'sign-the- hash' approach for signature verification, the verifier must be updated with the image data at the same time that the image data is being read in order to update the checksum. This patch adds the ability for glance to pass in a verifier object to the backend drivers when an image is being uploaded, which is given the image data by the backend drivers, and used to verify the signature in glance. The following drivers are supported: * file * rbd * sheepdog * s3 * vsphere The swift driver is supported for images that are greater than the 'large_object_size' since glance_store does not read the data for images that are less than 'large_object_size' (swiftclient does the reading), and as a result the verifier cannot be updated with the data. A child patch addresses this issue. The cinder and http drivers are not supported because they do not have the 'write' capability. Partial-Bug: #1516031 Change-Id: I43799e6a4a6643a23769af8d839a2beb4e0ff9bf
-rw-r--r--glance_store/_drivers/filesystem.py6
-rw-r--r--glance_store/_drivers/rbd.py6
-rw-r--r--glance_store/_drivers/s3.py22
-rw-r--r--glance_store/_drivers/sheepdog.py6
-rw-r--r--glance_store/_drivers/swift/store.py10
-rw-r--r--glance_store/_drivers/vmware_datastore.py19
-rw-r--r--glance_store/backend.py12
-rw-r--r--glance_store/driver.py3
-rw-r--r--glance_store/tests/unit/test_filesystem_store.py13
-rw-r--r--glance_store/tests/unit/test_rbd_store.py14
-rw-r--r--glance_store/tests/unit/test_s3_store.py41
-rw-r--r--glance_store/tests/unit/test_sheepdog_store.py19
-rw-r--r--glance_store/tests/unit/test_swift_store.py47
-rw-r--r--glance_store/tests/unit/test_vmware_store.py46
14 files changed, 242 insertions, 22 deletions
diff --git a/glance_store/_drivers/filesystem.py b/glance_store/_drivers/filesystem.py
index 9b2ed50..b9e3e29 100644
--- a/glance_store/_drivers/filesystem.py
+++ b/glance_store/_drivers/filesystem.py
@@ -561,7 +561,8 @@ class Store(glance_store.driver.Store):
return best_datadir
@capabilities.check
- def add(self, image_id, image_file, image_size, context=None):
+ def add(self, image_id, image_file, image_size, context=None,
+ verifier=None):
"""
Stores an image file with supplied identifier to the backend
storage system and returns a tuple containing information
@@ -570,6 +571,7 @@ class Store(glance_store.driver.Store):
:param image_id: The opaque image identifier
:param image_file: The image data to write, as a file-like object
:param image_size: The size of the image data to write, in bytes
+ :param verifier: An object used to verify signatures for images
:retval tuple of URL in backing store, bytes written, checksum
and a dictionary with storage system specific information
@@ -596,6 +598,8 @@ class Store(glance_store.driver.Store):
self.WRITE_CHUNKSIZE):
bytes_written += len(buf)
checksum.update(buf)
+ if verifier:
+ verifier.update(buf)
f.write(buf)
except IOError as e:
if e.errno != errno.EACCES:
diff --git a/glance_store/_drivers/rbd.py b/glance_store/_drivers/rbd.py
index 11e5b27..2626d2e 100644
--- a/glance_store/_drivers/rbd.py
+++ b/glance_store/_drivers/rbd.py
@@ -352,7 +352,8 @@ class Store(driver.Store):
raise exceptions.NotFound(message=msg)
@capabilities.check
- def add(self, image_id, image_file, image_size, context=None):
+ def add(self, image_id, image_file, image_size, context=None,
+ verifier=None):
"""
Stores an image file with supplied identifier to the backend
storage system and returns a tuple containing information
@@ -361,6 +362,7 @@ class Store(driver.Store):
:param image_id: The opaque image identifier
:param image_file: The image data to write, as a file-like object
:param image_size: The size of the image data to write, in bytes
+ :param verifier: An object used to verify signatures for images
:retval tuple of URL in backing store, bytes written, checksum
and a dictionary with storage system specific information
@@ -412,6 +414,8 @@ class Store(driver.Store):
(offset))
offset += image.write(chunk, offset)
checksum.update(chunk)
+ if verifier:
+ verifier.update(chunk)
if loc.snapshot:
image.create_snap(loc.snapshot)
image.protect_snap(loc.snapshot)
diff --git a/glance_store/_drivers/s3.py b/glance_store/_drivers/s3.py
index 12e0f56..fb18538 100644
--- a/glance_store/_drivers/s3.py
+++ b/glance_store/_drivers/s3.py
@@ -469,7 +469,8 @@ class Store(glance_store.driver.Store):
return key
@capabilities.check
- def add(self, image_id, image_file, image_size, context=None):
+ def add(self, image_id, image_file, image_size, context=None,
+ verifier=None):
"""
Stores an image file with supplied identifier to the backend
storage system and returns a tuple containing information
@@ -478,6 +479,7 @@ class Store(glance_store.driver.Store):
:param image_id: The opaque image identifier
:param image_file: The image data to write, as a file-like object
:param image_size: The size of the image data to write, in bytes
+ :param verifier: An object used to verify signatures for images
:retval tuple of URL in backing store, bytes written, checksum
and a dictionary with storage system specific information
@@ -523,23 +525,25 @@ class Store(glance_store.driver.Store):
self._sanitize(loc.get_uri()))
if image_size < self.s3_store_large_object_size:
- return self.add_singlepart(image_file, bucket_obj, obj_name, loc)
+ return self.add_singlepart(image_file, bucket_obj, obj_name, loc,
+ verifier)
else:
return self.add_multipart(image_file, image_size, bucket_obj,
- obj_name, loc)
+ obj_name, loc, verifier)
def _sanitize(self, uri):
return re.sub('//.*:.*@',
'//s3_store_secret_key:s3_store_access_key@',
uri)
- def add_singlepart(self, image_file, bucket_obj, obj_name, loc):
+ def add_singlepart(self, image_file, bucket_obj, obj_name, loc, verifier):
"""
Stores an image file with a single part upload to S3 backend
:param image_file: The image data to write, as a file-like object
:param bucket_obj: S3 bucket object
:param obj_name: The object name to be stored(image identifier)
+ :param verifier: An object used to verify signatures for images
:loc: The Store Location Info
"""
@@ -566,6 +570,8 @@ class Store(glance_store.driver.Store):
checksum = hashlib.md5()
for chunk in utils.chunkreadable(image_file, self.WRITE_CHUNKSIZE):
checksum.update(chunk)
+ if verifier:
+ verifier.update(chunk)
temp_file.write(chunk)
temp_file.flush()
@@ -587,13 +593,15 @@ class Store(glance_store.driver.Store):
return (loc.get_uri(), size, checksum_hex, {})
- def add_multipart(self, image_file, image_size, bucket_obj, obj_name, loc):
+ def add_multipart(self, image_file, image_size, bucket_obj, obj_name, loc,
+ verifier):
"""
Stores an image file with a multi part upload to S3 backend
:param image_file: The image data to write, as a file-like object
:param bucket_obj: S3 bucket object
:param obj_name: The object name to be stored(image identifier)
+ :param verifier: An object used to verify signatures for images
:loc: The Store Location Info
"""
@@ -625,6 +633,8 @@ class Store(glance_store.driver.Store):
write_chunk = buffered_chunk[:write_chunk_size]
remained_data = buffered_chunk[write_chunk_size:]
checksum.update(write_chunk)
+ if verifier:
+ verifier.update(write_chunk)
fp = six.BytesIO(write_chunk)
fp.seek(0)
part = UploadPart(mpu, fp, cstart + 1, len(write_chunk))
@@ -637,6 +647,8 @@ class Store(glance_store.driver.Store):
# Write the last chunk data
write_chunk = buffered_chunk
checksum.update(write_chunk)
+ if verifier:
+ verifier.update(write_chunk)
fp = six.BytesIO(write_chunk)
fp.seek(0)
part = UploadPart(mpu, fp, cstart + 1, len(write_chunk))
diff --git a/glance_store/_drivers/sheepdog.py b/glance_store/_drivers/sheepdog.py
index f5031ad..8a40765 100644
--- a/glance_store/_drivers/sheepdog.py
+++ b/glance_store/_drivers/sheepdog.py
@@ -264,7 +264,8 @@ class Store(glance_store.driver.Store):
return image.get_size()
@capabilities.check
- def add(self, image_id, image_file, image_size, context=None):
+ def add(self, image_id, image_file, image_size, context=None,
+ verifier=None):
"""
Stores an image file with supplied identifier to the backend
storage system and returns a tuple containing information
@@ -273,6 +274,7 @@ class Store(glance_store.driver.Store):
:param image_id: The opaque image identifier
:param image_file: The image data to write, as a file-like object
:param image_size: The size of the image data to write, in bytes
+ :param verifier: An object used to verify signatures for images
:retval tuple of URL in backing store, bytes written, and checksum
:raises `glance_store.exceptions.Duplicate` if the image already
@@ -302,6 +304,8 @@ class Store(glance_store.driver.Store):
image.write(data, total - left, length)
left -= length
checksum.update(data)
+ if verifier:
+ verifier.update(data)
except Exception:
# Note(zhiyan): clean up already received data when
# error occurs such as ImageSizeLimitExceeded exceptions.
diff --git a/glance_store/_drivers/swift/store.py b/glance_store/_drivers/swift/store.py
index 603a435..9e8a607 100644
--- a/glance_store/_drivers/swift/store.py
+++ b/glance_store/_drivers/swift/store.py
@@ -497,7 +497,7 @@ class BaseStore(driver.Store):
@capabilities.check
def add(self, image_id, image_file, image_size,
- connection=None, context=None):
+ connection=None, context=None, verifier=None):
location = self.create_location(image_id, context=context)
if not connection:
connection = self.get_connection(location, context=context)
@@ -544,7 +544,8 @@ class BaseStore(driver.Store):
content_length = chunk_size
chunk_name = "%s-%05d" % (location.obj, chunk_id)
- reader = ChunkReader(image_file, checksum, chunk_size)
+ reader = ChunkReader(image_file, checksum, chunk_size,
+ verifier)
try:
chunk_etag = connection.put_object(
location.container, chunk_name, reader,
@@ -944,10 +945,11 @@ class MultiTenantStore(BaseStore):
class ChunkReader(object):
- def __init__(self, fd, checksum, total):
+ def __init__(self, fd, checksum, total, verifier=None):
self.fd = fd
self.checksum = checksum
self.total = total
+ self.verifier = verifier
self.bytes_read = 0
def read(self, i):
@@ -960,4 +962,6 @@ class ChunkReader(object):
raise exceptions.ZeroSizeChunk()
self.bytes_read += len(result)
self.checksum.update(result)
+ if self.verifier:
+ self.verifier.update(result)
return result
diff --git a/glance_store/_drivers/vmware_datastore.py b/glance_store/_drivers/vmware_datastore.py
index 982267f..69693a4 100644
--- a/glance_store/_drivers/vmware_datastore.py
+++ b/glance_store/_drivers/vmware_datastore.py
@@ -135,15 +135,18 @@ def http_response_iterator(conn, response, size):
class _Reader(object):
- def __init__(self, data):
+ def __init__(self, data, verifier=None):
self._size = 0
self.data = data
self.checksum = hashlib.md5()
+ self.verifier = verifier
def read(self, size=None):
result = self.data.read(size)
self._size += len(result)
self.checksum.update(result)
+ if self.verifier:
+ self.verifier.update(result)
return result
@property
@@ -153,11 +156,11 @@ class _Reader(object):
class _ChunkReader(_Reader):
- def __init__(self, data, blocksize=8192):
+ def __init__(self, data, verifier=None, blocksize=8192):
self.blocksize = blocksize
self.current_chunk = b""
self.closed = False
- super(_ChunkReader, self).__init__(data)
+ super(_ChunkReader, self).__init__(data, verifier)
def read(self, size=None):
ret = b""
@@ -180,6 +183,8 @@ class _ChunkReader(_Reader):
chunk_len = len(chunk)
self._size += chunk_len
self.checksum.update(chunk)
+ if self.verifier:
+ self.verifier.update(chunk)
if chunk:
if six.PY3:
size_header = ('%x\r\n' % chunk_len).encode('ascii')
@@ -461,7 +466,8 @@ class Store(glance_store.Store):
return cookie.name + '=' + cookie.value
@capabilities.check
- def add(self, image_id, image_file, image_size, context=None):
+ def add(self, image_id, image_file, image_size, context=None,
+ verifier=None):
"""Stores an image file with supplied identifier to the backend
storage system and returns a tuple containing information
about the stored image.
@@ -469,6 +475,7 @@ class Store(glance_store.Store):
:param image_id: The opaque image identifier
:param image_file: The image data to write, as a file-like object
:param image_size: The size of the image data to write, in bytes
+ :param verifier: An object used to verify signatures for images
:retval tuple of URL in backing store, bytes written, checksum
and a dictionary with storage system specific information
:raises `glance.common.exceptions.Duplicate` if the image already
@@ -480,13 +487,13 @@ class Store(glance_store.Store):
ds = self.select_datastore(image_size)
if image_size > 0:
headers = {'Content-Length': image_size}
- image_file = _Reader(image_file)
+ image_file = _Reader(image_file, verifier)
else:
# NOTE (arnaud): use chunk encoding when the image is still being
# generated by the server (ex: stream optimized disks generated by
# Nova).
headers = {'Transfer-Encoding': 'chunked'}
- image_file = _ChunkReader(image_file)
+ image_file = _ChunkReader(image_file, verifier)
loc = StoreLocation({'scheme': self.scheme,
'server_host': self.server_host,
'image_dir': self.store_image_dir,
diff --git a/glance_store/backend.py b/glance_store/backend.py
index 205df5e..d4fe0ae 100644
--- a/glance_store/backend.py
+++ b/glance_store/backend.py
@@ -322,7 +322,8 @@ def check_location_metadata(val, key=''):
% dict(key=key, type=type(val)))
-def store_add_to_backend(image_id, data, size, store, context=None):
+def store_add_to_backend(image_id, data, size, store, context=None,
+ verifier=None):
"""
A wrapper around a call to each stores add() method. This gives glance
a common place to check the output
@@ -339,7 +340,8 @@ def store_add_to_backend(image_id, data, size, store, context=None):
(location, size, checksum, metadata) = store.add(image_id,
data,
size,
- context=context)
+ context=context,
+ verifier=verifier)
if metadata is not None:
if not isinstance(metadata, dict):
msg = (_("The storage driver %(driver)s returned invalid "
@@ -360,11 +362,13 @@ def store_add_to_backend(image_id, data, size, store, context=None):
return (location, size, checksum, metadata)
-def add_to_backend(conf, image_id, data, size, scheme=None, context=None):
+def add_to_backend(conf, image_id, data, size, scheme=None, context=None,
+ verifier=None):
if scheme is None:
scheme = conf['glance_store']['default_store']
store = get_store_from_scheme(scheme)
- return store_add_to_backend(image_id, data, size, store, context)
+ return store_add_to_backend(image_id, data, size, store, context,
+ verifier)
def set_acls(location_uri, public=False, read_tenants=[],
diff --git a/glance_store/driver.py b/glance_store/driver.py
index bab94d8..ac29ef3 100644
--- a/glance_store/driver.py
+++ b/glance_store/driver.py
@@ -125,7 +125,8 @@ class Store(capabilities.StoreCapability):
raise NotImplementedError
@capabilities.check
- def add(self, image_id, image_file, image_size, context=None):
+ def add(self, image_id, image_file, image_size, context=None,
+ verifier=None):
"""
Stores an image file with supplied identifier to the backend
storage system and returns a tuple containing information
diff --git a/glance_store/tests/unit/test_filesystem_store.py b/glance_store/tests/unit/test_filesystem_store.py
index f5ef3db..b8d09c2 100644
--- a/glance_store/tests/unit/test_filesystem_store.py
+++ b/glance_store/tests/unit/test_filesystem_store.py
@@ -183,6 +183,19 @@ class TestStore(base.StoreBaseTest,
self.assertEqual(expected_file_contents, new_image_contents)
self.assertEqual(expected_file_size, new_image_file_size)
+ def test_add_with_verifier(self):
+ """Test that 'verifier.update' is called when verifier is provided."""
+ verifier = mock.MagicMock(name='mock_verifier')
+ self.store.chunk_size = units.Ki
+ image_id = str(uuid.uuid4())
+ file_size = units.Ki # 1K
+ file_contents = b"*" * file_size
+ image_file = six.BytesIO(file_contents)
+
+ self.store.add(image_id, image_file, file_size, verifier=verifier)
+
+ verifier.update.assert_called_with(file_contents)
+
def test_add_check_metadata_with_invalid_mountpoint_location(self):
in_metadata = [{'id': 'abcdefg',
'mountpoint': '/xyz/images'}]
diff --git a/glance_store/tests/unit/test_rbd_store.py b/glance_store/tests/unit/test_rbd_store.py
index 025af94..2793837 100644
--- a/glance_store/tests/unit/test_rbd_store.py
+++ b/glance_store/tests/unit/test_rbd_store.py
@@ -227,6 +227,20 @@ class TestStore(base.StoreBaseTest,
'fake_image_id', self.data_iter, self.data_len)
self.called_commands_expected = ['create']
+ def test_add_with_verifier(self):
+ """Assert 'verifier.update' is called when verifier is provided."""
+ self.store.chunk_size = units.Ki
+ verifier = mock.MagicMock(name='mock_verifier')
+ image_id = 'fake_image_id'
+ file_size = 5 * units.Ki # 5K
+ file_contents = b"*" * file_size
+ image_file = six.BytesIO(file_contents)
+
+ with mock.patch.object(rbd_store.rbd.Image, 'write'):
+ self.store.add(image_id, image_file, file_size, verifier=verifier)
+
+ verifier.update.assert_called_with(file_contents)
+
def test_delete(self):
def _fake_remove(*args, **kwargs):
self.called_commands_actual.append('remove')
diff --git a/glance_store/tests/unit/test_s3_store.py b/glance_store/tests/unit/test_s3_store.py
index a2bac3a..b5db4f6 100644
--- a/glance_store/tests/unit/test_s3_store.py
+++ b/glance_store/tests/unit/test_s3_store.py
@@ -438,6 +438,47 @@ class TestStore(base.StoreBaseTest,
self.assertEqual(expected_s3_contents,
new_image_contents.getvalue())
+ def test_add_with_verifier(self):
+ """
+ Assert 'verifier.update' is called when verifier is provided, both
+ for multipart and for single uploads.
+ """
+ one_part_max = 6 * units.Mi
+ variations = [(FIVE_KB, 1), # simple put (5KB < 5MB)
+ (5 * units.Mi, 1), # 1 part (5MB <= 5MB < 6MB)
+ (one_part_max, 1), # 1 part exact (5MB <= 6MB <= 6MB)
+ (one_part_max + one_part_max // 2, 2), # 1.5 parts
+ (one_part_max * 2, 2)] # 2 parts exact
+
+ for (s3_size, update_calls) in variations:
+ image_id = str(uuid.uuid4())
+ base_byte = b"12345678"
+ s3_contents = base_byte * (s3_size // 8)
+ image_s3 = six.BytesIO(s3_contents)
+ verifier = mock.MagicMock(name='mock_verifier')
+
+ # add image
+ self.store.add(image_id, image_s3, s3_size, verifier=verifier)
+
+ # confirm update called expected number of times
+ self.assertEqual(verifier.update.call_count, update_calls)
+
+ if (update_calls <= 1):
+ # the contents weren't broken into pieces
+ verifier.update.assert_called_with(s3_contents)
+ else:
+ # most calls to update should be with the max one part size
+ s3_contents_max_part = base_byte * (one_part_max // 8)
+
+ # the last call to verify.update should be with what's left
+ s3_contents_last_part = base_byte * ((s3_size - one_part_max)
+ // 8)
+
+ # confirm all expected calls to update have occurred
+ calls = [mock.call(s3_contents_max_part),
+ mock.call(s3_contents_last_part)]
+ verifier.update.assert_has_calls(calls)
+
def test_add_host_variations(self):
"""
Test that having http(s):// in the s3serviceurl in config
diff --git a/glance_store/tests/unit/test_sheepdog_store.py b/glance_store/tests/unit/test_sheepdog_store.py
index 213b090..4758349 100644
--- a/glance_store/tests/unit/test_sheepdog_store.py
+++ b/glance_store/tests/unit/test_sheepdog_store.py
@@ -15,6 +15,7 @@
import mock
from oslo_concurrency import processutils
+from oslo_utils import units
import six
from glance_store._drivers import sheepdog
@@ -133,3 +134,21 @@ class TestSheepdogStore(base.StoreBaseTest,
self.conf, store_specs=self.store_specs)
self.store.delete(loc)
self.assertEqual(called_commands, ['list -r', 'delete'])
+
+ def test_add_with_verifier(self):
+ """Test that 'verifier.update' is called when verifier is provided."""
+ verifier = mock.MagicMock(name='mock_verifier')
+ self.store.chunk_size = units.Ki
+ image_id = 'fake_image_id'
+ file_size = units.Ki # 1K
+ file_contents = b"*" * file_size
+ image_file = six.BytesIO(file_contents)
+
+ def _fake_run_command(command, data, *params):
+ pass
+
+ with mock.patch.object(sheepdog.SheepdogImage, '_run_command') as cmd:
+ cmd.side_effect = _fake_run_command
+ self.store.add(image_id, image_file, file_size, verifier=verifier)
+
+ verifier.update.assert_called_with(file_contents)
diff --git a/glance_store/tests/unit/test_swift_store.py b/glance_store/tests/unit/test_swift_store.py
index f1f8769..a33abd2 100644
--- a/glance_store/tests/unit/test_swift_store.py
+++ b/glance_store/tests/unit/test_swift_store.py
@@ -607,6 +607,53 @@ class SwiftTests(object):
@mock.patch('glance_store._drivers.swift.utils'
'.is_multiple_swift_store_accounts_enabled',
+ mock.Mock(return_value=True))
+ def test_add_with_verifier(self):
+ """Test that the verifier is updated when verifier is provided."""
+ swift_size = FIVE_KB
+ base_byte = b"12345678"
+ swift_contents = base_byte * (swift_size // 8)
+ image_id = str(uuid.uuid4())
+ image_swift = six.BytesIO(swift_contents)
+
+ self.store = Store(self.conf)
+ self.store.configure()
+ orig_max_size = self.store.large_object_size
+ orig_temp_size = self.store.large_object_chunk_size
+ custom_size = units.Ki
+ verifier = mock.MagicMock(name='mock_verifier')
+
+ try:
+ self.store.large_object_size = custom_size
+ self.store.large_object_chunk_size = custom_size
+ self.store.add(image_id, image_swift, swift_size,
+ verifier=verifier)
+ finally:
+ self.store.large_object_chunk_size = orig_temp_size
+ self.store.large_object_size = orig_max_size
+
+ # Confirm verifier update called expected number of times
+ self.assertEqual(verifier.update.call_count,
+ 2 * swift_size / custom_size)
+
+ # define one chunk of the contents
+ swift_contents_piece = base_byte * (custom_size // 8)
+
+ # confirm all expected calls to update have occurred
+ calls = [mock.call(swift_contents_piece),
+ mock.call(b''),
+ mock.call(swift_contents_piece),
+ mock.call(b''),
+ mock.call(swift_contents_piece),
+ mock.call(b''),
+ mock.call(swift_contents_piece),
+ mock.call(b''),
+ mock.call(swift_contents_piece),
+ mock.call(b'')]
+ verifier.update.assert_has_calls(calls)
+
+ @mock.patch('glance_store._drivers.swift.utils'
+ '.is_multiple_swift_store_accounts_enabled',
mock.Mock(return_value=False))
def test_multi_container_doesnt_impact_multi_tenant_add(self):
expected_swift_size = FIVE_KB
diff --git a/glance_store/tests/unit/test_vmware_store.py b/glance_store/tests/unit/test_vmware_store.py
index 9594037..8376220 100644
--- a/glance_store/tests/unit/test_vmware_store.py
+++ b/glance_store/tests/unit/test_vmware_store.py
@@ -217,6 +217,36 @@ class TestStore(base.StoreBaseTest,
self.assertEqual(expected_size, size)
self.assertEqual(expected_checksum, checksum)
+ @mock.patch.object(vm_store.Store, 'select_datastore')
+ @mock.patch('glance_store._drivers.vmware_datastore._Reader')
+ def test_add_with_verifier(self, fake_reader, fake_select_datastore):
+ """Test that the verifier is passed to the _Reader during add."""
+ verifier = mock.MagicMock(name='mock_verifier')
+ image_id = str(uuid.uuid4())
+ size = FIVE_KB
+ contents = b"*" * size
+ image = six.BytesIO(contents)
+ with self._mock_http_connection() as HttpConn:
+ HttpConn.return_value = FakeHTTPConnection()
+ self.store.add(image_id, image, size, verifier=verifier)
+
+ fake_reader.assert_called_with(image, verifier)
+
+ @mock.patch.object(vm_store.Store, 'select_datastore')
+ @mock.patch('glance_store._drivers.vmware_datastore._ChunkReader')
+ def test_add_with_verifier_size_zero(self, fake_reader, fake_select_ds):
+ """Test that the verifier is passed to the _ChunkReader during add."""
+ verifier = mock.MagicMock(name='mock_verifier')
+ image_id = str(uuid.uuid4())
+ size = FIVE_KB
+ contents = b"*" * size
+ image = six.BytesIO(contents)
+ with self._mock_http_connection() as HttpConn:
+ HttpConn.return_value = FakeHTTPConnection()
+ self.store.add(image_id, image, 0, verifier=verifier)
+
+ fake_reader.assert_called_with(image, verifier)
+
@mock.patch('oslo_vmware.api.VMwareAPISession')
def test_delete(self, mock_api_session):
"""Test we can delete an existing image in the VMware store."""
@@ -290,6 +320,14 @@ class TestStore(base.StoreBaseTest,
self.assertEqual(expected_checksum, reader.checksum.hexdigest())
self.assertEqual(1, reader.size)
+ def test_reader_with_verifier(self):
+ content = b'XXX'
+ image = six.BytesIO(content)
+ verifier = mock.MagicMock(name='mock_verifier')
+ reader = vm_store._Reader(image, verifier)
+ reader.read()
+ verifier.update.assert_called_with(content)
+
def test_chunkreader_image_fits_in_blocksize(self):
"""
Test that the image file reader returns the expected chunk of data
@@ -352,6 +390,14 @@ class TestStore(base.StoreBaseTest,
self.assertEqual(len(content), reader.size)
self.assertTrue(reader.closed)
+ def test_chunkreader_with_verifier(self):
+ content = b'XXX'
+ image = six.BytesIO(content)
+ verifier = mock.MagicMock(name='mock_verifier')
+ reader = vm_store._ChunkReader(image, verifier)
+ reader.read(size=3)
+ verifier.update.assert_called_with(content)
+
def test_sanity_check_api_retry_count(self):
"""Test that sanity check raises if api_retry_count is <= 0."""
self.store.conf.glance_store.vmware_api_retry_count = -1