summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorThomas O'Dowd <tpodowd@geminimobile.com>2012-02-11 12:56:17 +0900
committerThomas O'Dowd <tpodowd@geminimobile.com>2012-02-11 12:56:17 +0900
commit846b2e91548bd0cc579e3b7bd8aaffb740ce18ef (patch)
treea839d5c30a46bd27f79c9c24f9f4584296e4b788
parent42efefad7c7b6cd333ad75e2df69096a26d042aa (diff)
downloadboto-846b2e91548bd0cc579e3b7bd8aaffb740ce18ef.tar.gz
fix missing BotoClientError import and more add more docs
-rw-r--r--boto/gs/key.py10
1 files changed, 9 insertions, 1 deletions
diff --git a/boto/gs/key.py b/boto/gs/key.py
index 27edabab..a0c50a79 100644
--- a/boto/gs/key.py
+++ b/boto/gs/key.py
@@ -20,6 +20,7 @@
# IN THE SOFTWARE.
import StringIO
+from boto.exception import BotoClientError
from boto.s3.key import Key as S3Key
class Key(S3Key):
@@ -165,6 +166,13 @@ class Key(S3Key):
file up into different ranges to be uploaded. If not
specified, the default behaviour is to read all bytes
from the file pointer. Less bytes may be available.
+ Notes:
+ 1. The "size" parameter currently cannot be used when
+ a resumable upload handler is given but is still
+ useful for uploading part of a file as implemented
+ by the parent class.
+ 2. At present Google Cloud Storage does not support
+ multipart uploads.
TODO: At some point we should refactor the Bucket and Key classes,
to move functionality common to all providers into a parent class,
@@ -174,7 +182,7 @@ class Key(S3Key):
provider = self.bucket.connection.provider
if res_upload_handler and size:
# could use size instead of file_length if provided but...
- raise BotoClientError('Resumable Uploads with size not supported.')
+ raise BotoClientError('"size" param not supported for resumable uploads.')
headers = headers or {}
if policy:
headers[provider.acl_header] = policy