summaryrefslogtreecommitdiff
path: root/boto/gs/key.py
diff options
context:
space:
mode:
Diffstat (limited to 'boto/gs/key.py')
-rw-r--r--boto/gs/key.py47
1 files changed, 38 insertions, 9 deletions
diff --git a/boto/gs/key.py b/boto/gs/key.py
index de6e6f44..21532d38 100644
--- a/boto/gs/key.py
+++ b/boto/gs/key.py
@@ -20,6 +20,7 @@
# IN THE SOFTWARE.
import StringIO
+from boto.exception import BotoClientError
from boto.s3.key import Key as S3Key
class Key(S3Key):
@@ -110,7 +111,7 @@ class Key(S3Key):
def set_contents_from_file(self, fp, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
- res_upload_handler=None):
+ res_upload_handler=None, size=None):
"""
Store an object in GS using the name of the Key object as the
key in GS and the contents of the file pointed to by 'fp' as the
@@ -158,12 +159,31 @@ class Key(S3Key):
:param res_upload_handler: If provided, this handler will perform the
upload.
+ :type size: int
+ :param size: (optional) The Maximum number of bytes to read from
+ the file pointer (fp). This is useful when uploading
+ a file in multiple parts where you are splitting the
+ file up into different ranges to be uploaded. If not
+ specified, the default behaviour is to read all bytes
+ from the file pointer. Less bytes may be available.
+ Notes:
+
+ 1. The "size" parameter currently cannot be used when
+ a resumable upload handler is given but is still
+ useful for uploading part of a file as implemented
+ by the parent class.
+ 2. At present Google Cloud Storage does not support
+ multipart uploads.
+
TODO: At some point we should refactor the Bucket and Key classes,
to move functionality common to all providers into a parent class,
and provider-specific functionality into subclasses (rather than
just overriding/sharing code the way it currently works).
"""
provider = self.bucket.connection.provider
+ if res_upload_handler and size:
+ # could use size instead of file_length if provided but...
+ raise BotoClientError('"size" param not supported for resumable uploads.')
headers = headers or {}
if policy:
headers[provider.acl_header] = policy
@@ -171,25 +191,34 @@ class Key(S3Key):
self.path = fp.name
if self.bucket != None:
if not md5:
- md5 = self.compute_md5(fp)
+ # compute_md5() and also set self.size to actual
+ # size of the bytes read computing the md5.
+ md5 = self.compute_md5(fp, size)
+ # adjust size if required
+ size = self.size
+ elif size:
+ self.size = size
else:
- # Even if md5 is provided, still need to set size of content.
- fp.seek(0, 2)
- self.size = fp.tell()
- fp.seek(0)
+ # If md5 is provided, still need to size so
+ # calculate based on bytes to end of content
+ spos = fp.tell()
+ fp.seek(0, os.SEEK_END)
+ self.size = fp.tell() - spos
+ fp.seek(spos)
+ size = self.size
self.md5 = md5[0]
self.base64md5 = md5[1]
+
if self.name == None:
self.name = self.md5
if not replace:
- k = self.bucket.lookup(self.name)
- if k:
+ if self.bucket.lookup(self.name):
return
if res_upload_handler:
res_upload_handler.send_file(self, fp, headers, cb, num_cb)
else:
# Not a resumable transfer so use basic send_file mechanism.
- self.send_file(fp, headers, cb, num_cb)
+ self.send_file(fp, headers, cb, num_cb, size=size)
def set_contents_from_filename(self, filename, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,