summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMitch Garnaat <mitch@garnaat.com>2012-03-27 21:00:28 -0700
committerMitch Garnaat <mitch@garnaat.com>2012-03-27 21:00:28 -0700
commit28aff58ff881ed25205cf01072559c8820ec3e07 (patch)
treefae75fb9a77620c2e8305d467efcd4bd95e00814
parent7f4fe40eda8b0ad1c58b647f95bbdaecf2506ffd (diff)
downloadboto-28aff58ff881ed25205cf01072559c8820ec3e07.tar.gz
PEP8 and pyflakes cleanup.
-rw-r--r--boto/emr/__init__.py5
-rw-r--r--boto/emr/bootstrap_action.py1
-rw-r--r--boto/emr/connection.py79
-rw-r--r--boto/emr/emrobject.py6
-rw-r--r--boto/emr/step.py25
-rwxr-xr-xboto/file/__init__.py2
-rw-r--r--boto/file/bucket.py4
-rwxr-xr-xboto/file/connection.py5
-rwxr-xr-xboto/file/key.py9
-rwxr-xr-xboto/file/simpleresultset.py1
-rwxr-xr-xboto/gs/__init__.py3
-rwxr-xr-xboto/gs/acl.py48
-rw-r--r--boto/gs/bucket.py148
-rwxr-xr-xboto/gs/connection.py20
-rwxr-xr-xboto/gs/cors.py91
-rw-r--r--boto/gs/key.py46
-rw-r--r--boto/gs/resumable_upload_handler.py74
17 files changed, 313 insertions, 254 deletions
diff --git a/boto/emr/__init__.py b/boto/emr/__init__.py
index adb5dd03..985e1b38 100644
--- a/boto/emr/__init__.py
+++ b/boto/emr/__init__.py
@@ -23,8 +23,3 @@
This module provies an interface to the Elastic MapReduce (EMR)
service from AWS.
"""
-from .connection import EmrConnection
-from .step import Step, StreamingStep, JarStep
-from .bootstrap_action import BootstrapAction
-
-
diff --git a/boto/emr/bootstrap_action.py b/boto/emr/bootstrap_action.py
index 7db0b3da..18119fb5 100644
--- a/boto/emr/bootstrap_action.py
+++ b/boto/emr/bootstrap_action.py
@@ -20,6 +20,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+
class BootstrapAction(object):
def __init__(self, name, path, bootstrap_action_args):
self.name = name
diff --git a/boto/emr/connection.py b/boto/emr/connection.py
index 868037f0..5f5005a5 100644
--- a/boto/emr/connection.py
+++ b/boto/emr/connection.py
@@ -27,12 +27,13 @@ Represents a connection to the EMR service
import boto
import boto.utils
from boto.ec2.regioninfo import RegionInfo
-from boto.emr.emrobject import JobFlow, RunJobFlowResponse
-from boto.emr.emrobject import AddInstanceGroupsResponse, ModifyInstanceGroupsResponse
-from boto.emr.step import JarStep
+from .emrobject import JobFlow, RunJobFlowResponse
+from .emrobject import AddInstanceGroupsResponse, ModifyInstanceGroupsResponse
+from .step import JarStep
from boto.connection import AWSQueryConnection
from boto.exception import EmrResponseError
+
class EmrConnection(AWSQueryConnection):
APIVersion = boto.config.get('Boto', 'emr_version', '2009-03-31')
@@ -153,7 +154,7 @@ class EmrConnection(AWSQueryConnection):
:type jobflow_id: str
:param jobflow_id: The id of the jobflow which will take the
new instance groups
-
+
:type instance_groups: list(boto.emr.InstanceGroup)
:param instance_groups: A list of instance groups to add to the job
"""
@@ -174,7 +175,7 @@ class EmrConnection(AWSQueryConnection):
:type instance_group_ids: list(str)
:param instance_group_ids: A list of the ID's of the instance
groups to be modified
-
+
:type new_sizes: list(int)
:param new_sizes: A list of the new sizes for each instance group
"""
@@ -190,8 +191,8 @@ class EmrConnection(AWSQueryConnection):
# could be wrong - the example amazon gives uses
# InstanceRequestCount, while the api documentation
# says InstanceCount
- params['InstanceGroups.member.%d.InstanceGroupId' % (k+1) ] = ig[0]
- params['InstanceGroups.member.%d.InstanceCount' % (k+1) ] = ig[1]
+ params['InstanceGroups.member.%d.InstanceGroupId' % (k + 1)] = ig[0]
+ params['InstanceGroups.member.%d.InstanceCount' % (k + 1)] = ig[1]
return self.get_object('ModifyInstanceGroups', params,
ModifyInstanceGroupsResponse, verb='POST')
@@ -213,59 +214,59 @@ class EmrConnection(AWSQueryConnection):
Runs a job flow
:type name: str
:param name: Name of the job flow
-
+
:type log_uri: str
:param log_uri: URI of the S3 bucket to place logs
-
+
:type ec2_keyname: str
:param ec2_keyname: EC2 key used for the instances
-
+
:type availability_zone: str
:param availability_zone: EC2 availability zone of the cluster
-
+
:type master_instance_type: str
:param master_instance_type: EC2 instance type of the master
-
+
:type slave_instance_type: str
:param slave_instance_type: EC2 instance type of the slave nodes
-
+
:type num_instances: int
:param num_instances: Number of instances in the Hadoop cluster
-
+
:type action_on_failure: str
:param action_on_failure: Action to take if a step terminates
-
+
:type keep_alive: bool
:param keep_alive: Denotes whether the cluster should stay
alive upon completion
-
+
:type enable_debugging: bool
:param enable_debugging: Denotes whether AWS console debugging
should be enabled.
:type hadoop_version: str
:param hadoop_version: Version of Hadoop to use. This no longer
- defaults to '0.20' and now uses the AMI default.
+ defaults to '0.20' and now uses the AMI default.
:type steps: list(boto.emr.Step)
:param steps: List of steps to add with the job
-
+
:type bootstrap_actions: list(boto.emr.BootstrapAction)
:param bootstrap_actions: List of bootstrap actions that run
before Hadoop starts.
-
+
:type instance_groups: list(boto.emr.InstanceGroup)
:param instance_groups: Optional list of instance groups to
use when creating this job.
NB: When provided, this argument supersedes num_instances
and master/slave_instance_type.
-
+
:type ami_version: str
:param ami_version: Amazon Machine Image (AMI) version to use
for instances. Values accepted by EMR are '1.0', '2.0', and
'latest'; EMR currently defaults to '1.0' if you don't set
'ami_version'.
-
+
:type additional_info: JSON str
:param additional_info: A JSON string for selecting additional features
@@ -304,7 +305,7 @@ class EmrConnection(AWSQueryConnection):
num_instances)
params.update(instance_params)
else:
- # Instance group args (for spot instances or a heterogenous cluster)
+ # Instance group args for spot instances or a heterogenous cluster
list_args = self._build_instance_group_list_args(instance_groups)
instance_params = dict(
('Instances.%s' % k, v) for k, v in list_args.items()
@@ -353,7 +354,7 @@ class EmrConnection(AWSQueryConnection):
:type jobflow_ids: list or str
:param jobflow_ids: A list of job flow IDs
-
+
:type termination_protection_status: bool
:param termination_protection_status: Termination protection status
"""
@@ -365,7 +366,6 @@ class EmrConnection(AWSQueryConnection):
return self.get_status('SetTerminationProtection', params, verb='POST')
-
def _build_bootstrap_action_args(self, bootstrap_action):
bootstrap_action_params = {}
bootstrap_action_params['ScriptBootstrapAction.Path'] = bootstrap_action.path
@@ -377,7 +377,8 @@ class EmrConnection(AWSQueryConnection):
args = bootstrap_action.args()
if args:
- self.build_list_params(bootstrap_action_params, args, 'ScriptBootstrapAction.Args.member')
+ self.build_list_params(bootstrap_action_params, args,
+ 'ScriptBootstrapAction.Args.member')
return bootstrap_action_params
@@ -392,7 +393,8 @@ class EmrConnection(AWSQueryConnection):
args = step.args()
if args:
- self.build_list_params(step_params, args, 'HadoopJarStep.Args.member')
+ self.build_list_params(step_params, args,
+ 'HadoopJarStep.Args.member')
step_params['Name'] = step.name
return step_params
@@ -414,7 +416,7 @@ class EmrConnection(AWSQueryConnection):
params = {}
for i, step in enumerate(steps):
for key, value in step.items():
- params['Steps.member.%s.%s' % (i+1, key)] = value
+ params['Steps.member.%s.%s' % (i + 1, key)] = value
return params
def _build_instance_common_args(self, ec2_keyname, availability_zone,
@@ -425,7 +427,7 @@ class EmrConnection(AWSQueryConnection):
use in making a RunJobFlow request.
"""
params = {
- 'Instances.KeepJobFlowAliveWhenNoSteps' : str(keep_alive).lower(),
+ 'Instances.KeepJobFlowAliveWhenNoSteps': str(keep_alive).lower(),
}
if hadoop_version:
@@ -438,16 +440,17 @@ class EmrConnection(AWSQueryConnection):
return params
def _build_instance_count_and_type_args(self, master_instance_type,
- slave_instance_type, num_instances):
+ slave_instance_type,
+ num_instances):
"""
Takes a master instance type (string), a slave instance type
(string), and a number of instances. Returns a comparable dict
for use in making a RunJobFlow request.
"""
params = {
- 'Instances.MasterInstanceType' : master_instance_type,
- 'Instances.SlaveInstanceType' : slave_instance_type,
- 'Instances.InstanceCount' : num_instances,
+ 'Instances.MasterInstanceType': master_instance_type,
+ 'Instances.SlaveInstanceType': slave_instance_type,
+ 'Instances.InstanceCount': num_instances,
}
return params
@@ -458,11 +461,11 @@ class EmrConnection(AWSQueryConnection):
RunJobFlow or AddInstanceGroups requests.
"""
params = {
- 'InstanceCount' : instance_group.num_instances,
- 'InstanceRole' : instance_group.role,
- 'InstanceType' : instance_group.type,
- 'Name' : instance_group.name,
- 'Market' : instance_group.market
+ 'InstanceCount': instance_group.num_instances,
+ 'InstanceRole': instance_group.role,
+ 'InstanceType': instance_group.type,
+ 'Name': instance_group.name,
+ 'Market': instance_group.market
}
if instance_group.market == 'SPOT':
params['BidPrice'] = instance_group.bidprice
@@ -481,5 +484,5 @@ class EmrConnection(AWSQueryConnection):
for i, instance_group in enumerate(instance_groups):
ig_dict = self._build_instance_group_args(instance_group)
for key, value in ig_dict.items():
- params['InstanceGroups.member.%d.%s' % (i+1, key)] = value
+ params['InstanceGroups.member.%d.%s' % (i + 1, key)] = value
return params
diff --git a/boto/emr/emrobject.py b/boto/emr/emrobject.py
index c0888127..ef7b0154 100644
--- a/boto/emr/emrobject.py
+++ b/boto/emr/emrobject.py
@@ -45,12 +45,14 @@ class EmrObject(object):
class RunJobFlowResponse(EmrObject):
Fields = set(['JobFlowId'])
+
class AddInstanceGroupsResponse(EmrObject):
Fields = set(['InstanceGroupIds', 'JobFlowId'])
-
+
+
class ModifyInstanceGroupsResponse(EmrObject):
Fields = set(['RequestId'])
-
+
class Arg(EmrObject):
def __init__(self, connection=None):
diff --git a/boto/emr/step.py b/boto/emr/step.py
index 15dfe889..75e132aa 100644
--- a/boto/emr/step.py
+++ b/boto/emr/step.py
@@ -20,6 +20,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+
class Step(object):
"""
Jobflow Step base class
@@ -105,26 +106,40 @@ class StreamingStep(Step):
:type name: str
:param name: The name of the step
+
:type mapper: str
:param mapper: The mapper URI
+
:type reducer: str
:param reducer: The reducer URI
+
:type combiner: str
- :param combiner: The combiner URI. Only works for Hadoop 0.20 and later!
+ :param combiner: The combiner URI. Only works for Hadoop 0.20
+ and later!
+
:type action_on_failure: str
- :param action_on_failure: An action, defined in the EMR docs to take on failure.
+ :param action_on_failure: An action, defined in the EMR docs
+ to take on failure.
+
:type cache_files: list(str)
:param cache_files: A list of cache files to be bundled with the job
+
:type cache_archives: list(str)
- :param cache_archives: A list of jar archives to be bundled with the job
+ :param cache_archives: A list of jar archives to be bundled with
+ the job.
+
:type step_args: list(str)
:param step_args: A list of arguments to pass to the step
+
:type input: str or a list of str
:param input: The input uri
+
:type output: str
:param output: The output uri
+
:type jar: str
- :param jar: The hadoop streaming jar. This can be either a local path on the master node, or an s3:// URI.
+ :param jar: The hadoop streaming jar. This can be either a
+ local path on the master node, or an s3:// URI.
"""
self.name = name
self.mapper = mapper
@@ -180,7 +195,7 @@ class StreamingStep(Step):
args.extend(('-cacheFile', cache_file))
if self.cache_archives:
- for cache_archive in self.cache_archives:
+ for cache_archive in self.cache_archives:
args.extend(('-cacheArchive', cache_archive))
return args
diff --git a/boto/file/__init__.py b/boto/file/__init__.py
index 60ecae4c..5d006008 100755
--- a/boto/file/__init__.py
+++ b/boto/file/__init__.py
@@ -14,7 +14,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
diff --git a/boto/file/bucket.py b/boto/file/bucket.py
index 7640e2b8..5927ff86 100644
--- a/boto/file/bucket.py
+++ b/boto/file/bucket.py
@@ -27,6 +27,7 @@ from .key import Key
from boto.file.simpleresultset import SimpleResultSet
from boto.s3.bucketlistresultset import BucketListResultSet
+
class Bucket(object):
def __init__(self, name, contained_key):
"""Instantiate an anonymous file-based Bucket around a single key.
@@ -81,7 +82,8 @@ class Bucket(object):
:param version_id: Unused in this subclass.
:type stream_type: integer
- :param stream_type: Type of the Key - Regular File or input/output Stream
+ :param stream_type: Type of the Key - Regular File or input/output
+ Stream
:rtype: :class:`boto.file.key.Key`
:returns: A Key object from this bucket.
diff --git a/boto/file/connection.py b/boto/file/connection.py
index 22ce4fca..a4501da0 100755
--- a/boto/file/connection.py
+++ b/boto/file/connection.py
@@ -19,10 +19,13 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-# File representation of connection, for use with "file://" URIs.
+"""
+File representation of connection, for use with "file://" URIs.
+"""
from .bucket import Bucket
+
class FileConnection(object):
def __init__(self, file_storage_uri):
diff --git a/boto/file/key.py b/boto/file/key.py
index f15e99d1..5e32f336 100755
--- a/boto/file/key.py
+++ b/boto/file/key.py
@@ -22,16 +22,19 @@
# File representation of key, for use with "file://" URIs.
-import os, shutil
+import os
+import shutil
import sys
+from boto.exception import BotoClientError
import boto.compat as compat
+
class Key(object):
KEY_STREAM_READABLE = 0x01
KEY_STREAM_WRITABLE = 0x02
- KEY_STREAM = (KEY_STREAM_READABLE | KEY_STREAM_WRITABLE)
- KEY_REGULAR_FILE = 0x00
+ KEY_STREAM = (KEY_STREAM_READABLE | KEY_STREAM_WRITABLE)
+ KEY_REGULAR_FILE = 0x00
def __init__(self, bucket, name, fp=None, key_type=KEY_REGULAR_FILE):
self.bucket = bucket
diff --git a/boto/file/simpleresultset.py b/boto/file/simpleresultset.py
index 5f94dc11..917546bd 100755
--- a/boto/file/simpleresultset.py
+++ b/boto/file/simpleresultset.py
@@ -19,6 +19,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+
class SimpleResultSet(list):
"""
ResultSet facade built from a simple list, rather than via XML parsing.
diff --git a/boto/gs/__init__.py b/boto/gs/__init__.py
index bf4c0b94..8fc21d36 100755
--- a/boto/gs/__init__.py
+++ b/boto/gs/__init__.py
@@ -14,9 +14,8 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
-
diff --git a/boto/gs/acl.py b/boto/gs/acl.py
index 02a5c9ba..874f8402 100755
--- a/boto/gs/acl.py
+++ b/boto/gs/acl.py
@@ -14,7 +14,7 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
@@ -49,6 +49,7 @@ CannedACLStrings = ['private', 'public-read', 'project-private',
SupportedPermissions = ['READ', 'WRITE', 'FULL_CONTROL']
+
class ACL:
def __init__(self, parent=None):
@@ -127,8 +128,8 @@ class Entries:
def __init__(self, parent=None):
self.parent = parent
- # Entries is the class that represents the same-named XML
- # element. entry_list is the list within this class that holds the data.
+ # Entries is the class that represents the same-named XML element.
+ # entry_list is the list within this class that holds the data.
self.entry_list = []
def __repr__(self):
@@ -157,10 +158,12 @@ class Entries:
s += entry.to_xml()
s += '</%s>' % ENTRIES
return s
-
-# Class that represents a single (Scope, Permission) entry in an ACL.
+
class Entry:
+ """
+ Class that represents a single (Scope, Permission) entry in an ACL.
+ """
def __init__(self, scope=None, type=None, id=None, name=None,
email_address=None, domain=None, permission=None):
@@ -174,20 +177,20 @@ class Entry:
def startElement(self, name, attrs, connection):
if name == SCOPE:
- # The following if statement used to look like this:
+ # The following if statement used to look like this:
# if not TYPE in attrs:
- # which caused problems because older versions of the
- # AttributesImpl class in the xml.sax library neglected to include
- # a __contains__() method (which Python calls to implement the
+ # which caused problems because older versions of the
+ # AttributesImpl class in the xml.sax library neglected to include
+ # a __contains__() method (which Python calls to implement the
# 'in' operator). So when you use the in operator, like the if
# statement above, Python invokes the __getiter__() method with
- # index 0, which raises an exception. More recent versions of
- # xml.sax include the __contains__() method, rendering the in
+ # index 0, which raises an exception. More recent versions of
+ # xml.sax include the __contains__() method, rendering the in
# operator functional. The work-around here is to formulate the
- # if statement as below, which is the legal way to query
+ # if statement as below, which is the legal way to query
# AttributesImpl for containment (and is also how the added
# __contains__() method works). At one time gsutil disallowed
- # xmlplus-based parsers, until this more specific problem was
+ # xmlplus-based parsers, until this more specific problem was
# determined.
if TYPE not in attrs:
raise InvalidAclError('Missing "%s" in "%s" part of ACL' %
@@ -217,17 +220,18 @@ class Entry:
s += '</%s>' % ENTRY
return s
+
class Scope:
# Map from Scope type to list of allowed sub-elems.
ALLOWED_SCOPE_TYPE_SUB_ELEMS = {
- ALL_AUTHENTICATED_USERS : [],
- ALL_USERS : [],
- GROUP_BY_DOMAIN : [DOMAIN],
- GROUP_BY_EMAIL : [DISPLAY_NAME, EMAIL_ADDRESS, NAME],
- GROUP_BY_ID : [DISPLAY_NAME, ID, NAME],
- USER_BY_EMAIL : [DISPLAY_NAME, EMAIL_ADDRESS, NAME],
- USER_BY_ID : [DISPLAY_NAME, ID, NAME]
+ ALL_AUTHENTICATED_USERS: [],
+ ALL_USERS: [],
+ GROUP_BY_DOMAIN: [DOMAIN],
+ GROUP_BY_EMAIL: [DISPLAY_NAME, EMAIL_ADDRESS, NAME],
+ GROUP_BY_ID: [DISPLAY_NAME, ID, NAME],
+ USER_BY_EMAIL: [DISPLAY_NAME, EMAIL_ADDRESS, NAME],
+ USER_BY_ID: [DISPLAY_NAME, ID, NAME]
}
def __init__(self, parent, type=None, id=None, name=None,
@@ -284,11 +288,11 @@ class Scope:
s += '<%s>%s</%s>' % (EMAIL_ADDRESS, self.email_address,
EMAIL_ADDRESS)
if self.name:
- s += '<%s>%s</%s>' % (NAME, self.name, NAME)
+ s += '<%s>%s</%s>' % (NAME, self.name, NAME)
elif self.type == GROUP_BY_ID or self.type == USER_BY_ID:
s += '<%s>%s</%s>' % (ID, self.id, ID)
if self.name:
- s += '<%s>%s</%s>' % (NAME, self.name, NAME)
+ s += '<%s>%s</%s>' % (NAME, self.name, NAME)
else:
raise InvalidAclError('Invalid scope type "%s" ', self.type)
diff --git a/boto/gs/bucket.py b/boto/gs/bucket.py
index d9c764f1..d3a6dd1b 100644
--- a/boto/gs/bucket.py
+++ b/boto/gs/bucket.py
@@ -14,12 +14,11 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-import boto
from boto import handler
from boto.exception import InvalidAclError
from boto.gs.acl import ACL, CannedACLStrings
@@ -30,21 +29,24 @@ from boto.s3.acl import Policy
from boto.s3.bucket import Bucket as S3Bucket
import xml.sax
-# constants for http query args
+# constants for http query args
DEF_OBJ_ACL = 'defaultObjectAcl'
STANDARD_ACL = 'acl'
CORS_ARG = 'cors'
+
class Bucket(S3Bucket):
def __init__(self, connection=None, name=None, key_class=GSKey):
super(Bucket, self).__init__(connection, name, key_class)
def set_acl(self, acl_or_str, key_name='', headers=None, version_id=None):
- """sets or changes a bucket's acl. We include a version_id argument
- to support a polymorphic interface for callers, however,
- version_id is not relevant for Google Cloud Storage buckets
- and is therefore ignored here."""
+ """
+ Sets or changes a bucket's acl. We include a version_id argument
+ to support a polymorphic interface for callers, however,
+ version_id is not relevant for Google Cloud Storage buckets
+ and is therefore ignored here.
+ """
if isinstance(acl_or_str, Policy):
raise InvalidAclError('Attempt to set S3 Policy on GS ACL')
elif isinstance(acl_or_str, ACL):
@@ -53,18 +55,21 @@ class Bucket(S3Bucket):
self.set_canned_acl(acl_or_str, key_name, headers=headers)
def set_def_acl(self, acl_or_str, key_name='', headers=None):
- """sets or changes a bucket's default object acl"""
+ """
+ Sets or changes a bucket's default object acl.
+ """
if isinstance(acl_or_str, Policy):
raise InvalidAclError('Attempt to set S3 Policy on GS ACL')
elif isinstance(acl_or_str, ACL):
- self.set_def_xml_acl(acl_or_str.to_xml(), key_name, headers=headers)
+ self.set_def_xml_acl(acl_or_str.to_xml(), key_name,
+ headers=headers)
else:
self.set_def_canned_acl(acl_or_str, key_name, headers=headers)
def get_acl_helper(self, key_name, headers, query_args):
"""provides common functionality for get_acl() and get_def_acl()"""
response = self.connection.make_request('GET', self.name, key_name,
- query_args=query_args,
+ query_args=query_args,
headers=headers)
body = response.read()
if response.status == 200:
@@ -77,25 +82,31 @@ class Bucket(S3Bucket):
response.status, response.reason, body)
def get_acl(self, key_name='', headers=None, version_id=None):
- """returns a bucket's acl. We include a version_id argument
- to support a polymorphic interface for callers, however,
- version_id is not relevant for Google Cloud Storage buckets
- and is therefore ignored here."""
+ """
+ Returns a bucket's acl. We include a version_id argument
+ to support a polymorphic interface for callers, however,
+ version_id is not relevant for Google Cloud Storage buckets
+ and is therefore ignored here.
+ """
return self.get_acl_helper(key_name, headers, STANDARD_ACL)
def get_def_acl(self, key_name='', headers=None):
- """returns a bucket's default object acl"""
+ """
+ Returns a bucket's default object acl.
+ """
return self.get_acl_helper(key_name, headers, DEF_OBJ_ACL)
def set_canned_acl_helper(self, acl_str, key_name, headers, query_args):
- """provides common functionality for set_canned_acl() and
- set_def_canned_acl()"""
+ """
+ Provides common functionality for set_canned_acl() and
+ set_def_canned_acl().
+ """
assert acl_str in CannedACLStrings
if headers:
headers[self.connection.provider.acl_header] = acl_str
else:
- headers={self.connection.provider.acl_header: acl_str}
+ headers = {self.connection.provider.acl_header: acl_str}
response = self.connection.make_request('PUT', self.name, key_name,
headers=headers, query_args=query_args)
@@ -104,30 +115,38 @@ class Bucket(S3Bucket):
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
- def set_canned_acl(self, acl_str, key_name='', headers=None,
+ def set_canned_acl(self, acl_str, key_name='', headers=None,
version_id=None):
- """sets or changes a bucket's acl to a predefined (canned) value.
- We include a version_id argument to support a polymorphic
- interface for callers, however, version_id is not relevant for
- Google Cloud Storage buckets and is therefore ignored here."""
- return self.set_canned_acl_helper(acl_str, key_name, headers,
+ """
+ Sets or changes a bucket's acl to a predefined (canned) value.
+ We include a version_id argument to support a polymorphic
+ interface for callers, however, version_id is not relevant for
+ Google Cloud Storage buckets and is therefore ignored here.
+ """
+ return self.set_canned_acl_helper(acl_str, key_name, headers,
STANDARD_ACL)
def set_def_canned_acl(self, acl_str, key_name='', headers=None):
- """sets or changes a bucket's default object acl to a predefined
- (canned) value"""
- return self.set_canned_acl_helper(acl_str, key_name, headers,
+ """
+ Sets or changes a bucket's default object acl to a predefined
+ (canned) value.
+ """
+ return self.set_canned_acl_helper(acl_str, key_name, headers,
query_args=DEF_OBJ_ACL)
def set_def_xml_acl(self, acl_str, key_name='', headers=None):
- """sets or changes a bucket's default object"""
- return self.set_xml_acl(acl_str, key_name, headers,
+ """
+ Sets or changes a bucket's default object.
+ """
+ return self.set_xml_acl(acl_str, key_name, headers,
query_args=DEF_OBJ_ACL)
def get_cors(self, headers=None):
- """returns a bucket's CORS XML"""
+ """
+ Returns a bucket's CORS XML.
+ """
response = self.connection.make_request('GET', self.name,
- query_args=CORS_ARG,
+ query_args=CORS_ARG,
headers=headers)
body = response.read()
if response.status == 200:
@@ -141,7 +160,9 @@ class Bucket(S3Bucket):
response.status, response.reason, body)
def set_cors(self, cors, headers=None):
- """sets or changes a bucket's CORS XML."""
+ """
+ Sets or changes a bucket's CORS XML.
+ """
cors_xml = cors.encode('ISO-8859-1')
response = self.connection.make_request('PUT', self.name,
data=cors_xml,
@@ -161,15 +182,15 @@ class Bucket(S3Bucket):
to a bucket. This method retrieves the current ACL, creates a new
grant based on the parameters passed in, adds that grant to the ACL
and then PUT's the new ACL back to GS.
-
+
:type permission: string
:param permission: The permission being granted. Should be one of:
(READ, WRITE, FULL_CONTROL).
-
+
:type email_address: string
:param email_address: The email address associated with the GS
account your are granting the permission to.
-
+
:type recursive: boolean
:param recursive: A boolean value to controls whether the call
will apply the grant to all keys within the bucket
@@ -191,28 +212,30 @@ class Bucket(S3Bucket):
# Method with same signature as boto.s3.bucket.Bucket.add_user_grant(),
# to allow polymorphic treatment at application layer.
- def add_user_grant(self, permission, user_id, recursive=False, headers=None):
+ def add_user_grant(self, permission, user_id, recursive=False,
+ headers=None):
"""
- Convenience method that provides a quick way to add a canonical user grant to a bucket.
- This method retrieves the current ACL, creates a new grant based on the parameters
- passed in, adds that grant to the ACL and then PUTs the new ACL back to GS.
-
+ Convenience method that provides a quick way to add a canonical
+ user grant to a bucket. This method retrieves the current ACL,
+ creates a new grant based on the parameters passed in, adds that
+ grant to the ACL and then PUTs the new ACL back to GS.
+
:type permission: string
:param permission: The permission being granted. Should be one of:
(READ|WRITE|FULL_CONTROL)
-
+
:type user_id: string
- :param user_id: The canonical user id associated with the GS account you are granting
- the permission to.
-
+ :param user_id: The canonical user id associated with the GS
+ account you are granting the permission to.
+
:type recursive: bool
:param recursive: A boolean value to controls whether the call
- will apply the grant to all keys within the bucket
- or not. The default value is False. By passing a
- True value, the call will iterate through all keys
- in the bucket and apply the same grant to each key.
- CAUTION: If you have a lot of keys, this could take
- a long time!
+ will apply the grant to all keys within the bucket
+ or not. The default value is False. By passing a
+ True value, the call will iterate through all keys
+ in the bucket and apply the same grant to each key.
+ CAUTION: If you have a lot of keys, this could take
+ a long time!
"""
if permission not in GSPermissions:
raise self.connection.provider.storage_permissions_error(
@@ -224,19 +247,18 @@ class Bucket(S3Bucket):
for key in self:
key.add_user_grant(permission, user_id, headers=headers)
- def add_group_email_grant(self, permission, email_address, recursive=False,
- headers=None):
+ def add_group_email_grant(self, permission, email_address,
+ recursive=False, headers=None):
"""
Convenience method that provides a quick way to add an email group
- grant to a bucket. This method retrieves the current ACL, creates a new
- grant based on the parameters passed in, adds that grant to the ACL and
- then PUT's the new ACL back to GS.
+ grant to a bucket. This method retrieves the current ACL, creates
+ a new grant based on the parameters passed in, adds that grant to
+ the ACL and then PUT's the new ACL back to GS.
:type permission: string
:param permission: The permission being granted. Should be one of:
READ|WRITE|FULL_CONTROL
- See http://code.google.com/apis/storage/docs/developer-guide.html#authorization
- for more details on permissions.
+ See http://goo.gl/cPwh2 for more details on permissions.
:type email_address: string
:param email_address: The email address associated with the Google
@@ -244,12 +266,12 @@ class Bucket(S3Bucket):
:type recursive: bool
:param recursive: A boolean value to controls whether the call
- will apply the grant to all keys within the bucket
- or not. The default value is False. By passing a
- True value, the call will iterate through all keys
- in the bucket and apply the same grant to each key.
- CAUTION: If you have a lot of keys, this could take
- a long time!
+ will apply the grant to all keys within the bucket
+ or not. The default value is False. By passing a
+ True value, the call will iterate through all keys
+ in the bucket and apply the same grant to each key.
+ CAUTION: If you have a lot of keys, this could take
+ a long time!
"""
if permission not in GSPermissions:
raise self.connection.provider.storage_permissions_error(
diff --git a/boto/gs/connection.py b/boto/gs/connection.py
index 20b02203..bf3ad18f 100755
--- a/boto/gs/connection.py
+++ b/boto/gs/connection.py
@@ -14,20 +14,22 @@
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-from boto.gs.bucket import Bucket
+from boto.gs.bucket import Bucket
from boto.s3.connection import S3Connection
from boto.s3.connection import SubdomainCallingFormat
from boto.s3.connection import check_lowercase_bucketname
+
class Location:
- DEFAULT = '' # US
+ DEFAULT = '' # US
EU = 'EU'
+
class GSConnection(S3Connection):
DefaultHost = 'commondatastorage.googleapis.com'
@@ -56,16 +58,17 @@ class GSConnection(S3Connection):
:type bucket_name: string
:param bucket_name: The name of the new bucket
-
+
:type headers: dict
- :param headers: Additional headers to pass along with the request to AWS.
+ :param headers: Additional headers to pass along with the
+ request to AWS.
:type location: :class:`boto.gs.connection.Location`
:param location: The location of the new bucket
:type policy: :class:`boto.s3.acl.CannedACLStrings`
- :param policy: A canned ACL policy that will be applied to the new key in S3.
-
+ :param policy: A canned ACL policy that will be applied to the
+ new key in S3.
"""
check_lowercase_bucketname(bucket_name)
@@ -73,7 +76,7 @@ class GSConnection(S3Connection):
if headers:
headers[self.provider.acl_header] = policy
else:
- headers = {self.provider.acl_header : policy}
+ headers = {self.provider.acl_header: policy}
if not location:
data = ''
else:
@@ -91,4 +94,3 @@ class GSConnection(S3Connection):
else:
raise self.provider.storage_response_error(
response.status, response.reason, body)
-
diff --git a/boto/gs/cors.py b/boto/gs/cors.py
index e5dd9184..c2326a86 100755
--- a/boto/gs/cors.py
+++ b/boto/gs/cors.py
@@ -20,20 +20,20 @@
# IN THE SOFTWARE.
import types
-from boto.gs.user import User
from boto.exception import InvalidCorsError
from xml.sax import handler
# Relevant tags for the CORS XML document.
CORS_CONFIG = 'CorsConfig'
-CORS = 'Cors'
-ORIGINS = 'Origins'
-ORIGIN = 'Origin'
-METHODS = 'Methods'
-METHOD = 'Method'
-HEADERS = 'ResponseHeaders'
-HEADER = 'ResponseHeader'
-MAXAGESEC = 'MaxAgeSec'
+CORS = 'Cors'
+ORIGINS = 'Origins'
+ORIGIN = 'Origin'
+METHODS = 'Methods'
+METHOD = 'Method'
+HEADERS = 'ResponseHeaders'
+HEADER = 'ResponseHeader'
+MAXAGESEC = 'MaxAgeSec'
+
class Cors(handler.ContentHandler):
"""Encapsulates the CORS configuration XML document"""
@@ -55,9 +55,9 @@ class Cors(handler.ContentHandler):
# Dictionary mapping supported collection names to element types
# which may be contained within each.
self.legal_collections = {
- ORIGINS : [ORIGIN],
- METHODS : [METHOD],
- HEADERS : [HEADER],
+ ORIGINS: [ORIGIN],
+ METHODS: [METHOD],
+ HEADERS: [HEADER],
MAXAGESEC: []
}
# List of supported element types within any collection, used for
@@ -78,19 +78,20 @@ class Cors(handler.ContentHandler):
"""SAX XML logic for parsing new element found."""
if name == CORS_CONFIG:
self.validateParseLevel(name, 0)
- self.parse_level += 1;
+ self.parse_level += 1
elif name == CORS:
self.validateParseLevel(name, 1)
- self.parse_level += 1;
+ self.parse_level += 1
elif name in self.legal_collections:
self.validateParseLevel(name, 2)
- self.parse_level += 1;
+ self.parse_level += 1
self.collection = name
elif name in self.legal_elements:
self.validateParseLevel(name, 3)
# Make sure this tag is found inside a collection tag.
if self.collection is None:
- raise InvalidCorsError('Tag %s found outside collection' % name)
+ raise InvalidCorsError('Tag %s found outside collection' %
+ name)
# Make sure this tag is allowed for the current collection tag.
if name not in self.legal_collections[self.collection]:
raise InvalidCorsError('Tag %s not allowed in %s collection' %
@@ -103,10 +104,10 @@ class Cors(handler.ContentHandler):
"""SAX XML logic for parsing new element found."""
if name == CORS_CONFIG:
self.validateParseLevel(name, 1)
- self.parse_level -= 1;
+ self.parse_level -= 1
elif name == CORS:
self.validateParseLevel(name, 2)
- self.parse_level -= 1;
+ self.parse_level -= 1
# Terminating a CORS element, save any collections we found
# and re-initialize collections list.
self.cors.append(self.collections)
@@ -114,17 +115,17 @@ class Cors(handler.ContentHandler):
elif name in self.legal_collections:
self.validateParseLevel(name, 3)
if name != self.collection:
- raise InvalidCorsError('Mismatched start and end tags (%s/%s)' %
- (self.collection, name))
- self.parse_level -= 1;
+ raise InvalidCorsError('Mismatched start and end tags (%s/%s)' %
+ (self.collection, name))
+ self.parse_level -= 1
if not self.legal_collections[name]:
- # If this collection doesn't contain any sub-elements, store
- # a tuple of name and this tag's element value.
- self.collections.append((name, value.strip()))
+ # If this collection doesn't contain any sub-elements, store
+ # a tuple of name and this tag's element value.
+ self.collections.append((name, value.strip()))
else:
- # Otherwise, we're terminating a collection of sub-elements,
- # so store a tuple of name and list of contained elements.
- self.collections.append((name, self.elements))
+ # Otherwise, we're terminating a collection of sub-elements,
+ # so store a tuple of name and list of contained elements.
+ self.collections.append((name, self.elements))
self.elements = []
self.collection = None
elif name in self.legal_elements:
@@ -137,8 +138,8 @@ class Cors(handler.ContentHandler):
raise InvalidCorsError('Tag %s not allowed in %s collection' %
(name, self.collection))
if name != self.element:
- raise InvalidCorsError('Mismatched start and end tags (%s/%s)' %
- (self.element, name))
+ raise InvalidCorsError('Mismatched start and end tags (%s/%s)' %
+ (self.element, name))
# Terminating an element tag, add it to the list of elements
# for the current collection.
self.elements.append((name, value.strip()))
@@ -150,20 +151,20 @@ class Cors(handler.ContentHandler):
"""Convert CORS object into XML string representation."""
s = '<' + CORS_CONFIG + '>'
for collections in self.cors:
- s += '<' + CORS + '>'
- for (collection, elements_or_value) in collections:
- assert collection is not None
- s += '<' + collection + '>'
- # If collection elements has type string, append atomic value,
- # otherwise, append sequence of values in named tags.
- if isinstance(elements_or_value, types.StringTypes):
- s += elements_or_value
- else:
- for (name, value) in elements_or_value:
- assert name is not None
- assert value is not None
- s += '<' + name + '>' + value + '</' + name + '>'
- s += '</' + collection + '>'
- s += '</' + CORS + '>'
- s += '</' + CORS_CONFIG + '>'
+ s += '<' + CORS + '>'
+ for (collection, elements_or_value) in collections:
+ assert collection is not None
+ s += '<' + collection + '>'
+ # If collection elements has type string, append atomic value,
+ # otherwise, append sequence of values in named tags.
+ if isinstance(elements_or_value, types.StringTypes):
+ s += elements_or_value
+ else:
+ for (name, value) in elements_or_value:
+ assert name is not None
+ assert value is not None
+ s += '<' + name + '>' + value + '</' + name + '>'
+ s += '</' + collection + '>'
+ s += '</' + CORS + '>'
+ s += '</' + CORS_CONFIG + '>'
return s
diff --git a/boto/gs/key.py b/boto/gs/key.py
index 44e4c08c..e433397e 100644
--- a/boto/gs/key.py
+++ b/boto/gs/key.py
@@ -24,19 +24,20 @@ from boto.exception import BotoClientError
from boto.s3.key import Key as S3Key
import boto.compat as compat
+
class Key(S3Key):
def add_email_grant(self, permission, email_address):
"""
- Convenience method that provides a quick way to add an email grant to a
- key. This method retrieves the current ACL, creates a new grant based on
- the parameters passed in, adds that grant to the ACL and then PUT's the
- new ACL back to GS.
+ Convenience method that provides a quick way to add an email
+ grant to a key. This method retrieves the current ACL, creates
+ a new grant based on the parameters passed in, adds that grant
+ to the ACL and then PUT's the new ACL back to GS.
:type permission: string
:param permission: The permission being granted. Should be one of:
READ|FULL_CONTROL
- See http://code.google.com/apis/storage/docs/developer-guide.html#authorization
+ See http://goo.gl/8TMF8
for more details on permissions.
:type email_address: string
@@ -57,7 +58,7 @@ class Key(S3Key):
:type permission: string
:param permission: The permission being granted. Should be one of:
READ|FULL_CONTROL
- See http://code.google.com/apis/storage/docs/developer-guide.html#authorization
+ See http://goo.gl/pKHtA
for more details on permissions.
:type user_id: string
@@ -78,7 +79,7 @@ class Key(S3Key):
:type permission: string
:param permission: The permission being granted. Should be one of:
READ|FULL_CONTROL
- See http://code.google.com/apis/storage/docs/developer-guide.html#authorization
+ See http://goo.gl/nfAYD
for more details on permissions.
:type email_address: string
@@ -99,7 +100,7 @@ class Key(S3Key):
:type permission: string
:param permission: The permission being granted. Should be one of:
READ|FULL_CONTROL
- See http://code.google.com/apis/storage/docs/developer-guide.html#authorization
+ See http://goo.gl/3bQ89
for more details on permissions.
:type group_id: string
@@ -112,7 +113,8 @@ class Key(S3Key):
def set_contents_from_file(self, fp, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
- res_upload_handler=None, size=None, rewind=False):
+ res_upload_handler=None, size=None,
+ rewind=False):
"""
Store an object in GS using the name of the Key object as the
key in GS and the contents of the file pointed to by 'fp' as the
@@ -122,7 +124,8 @@ class Key(S3Key):
:param fp: the file whose contents are to be uploaded
:type headers: dict
- :param headers: additional HTTP headers to be sent with the PUT request.
+ :param headers: additional HTTP headers to be sent with the
+ PUT request.
:type replace: bool
:param replace: If this parameter is False, the method will first check
@@ -139,9 +142,9 @@ class Key(S3Key):
:type num_cb: int
:param num_cb: (optional) If a callback is specified with the cb
- parameter, this parameter determines the granularity of the callback
- by defining the maximum number of times the callback will be called
- during the file transfer.
+ parameter, this parameter determines the granularity of the
+ callback by defining the maximum number of times the callback
+ will be called during the file transfer.
:type policy: :class:`boto.gs.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the new key
@@ -152,9 +155,9 @@ class Key(S3Key):
the plain checksum as the second element. This is the same format
returned by the compute_md5 method.
:param md5: If you need to compute the MD5 for any reason prior to
- upload, it's silly to have to do it twice so this param, if present,
- will be used as the MD5 values of the file. Otherwise, the checksum
- will be computed.
+ upload, it's silly to have to do it twice so this param,
+ if present, will be used as the MD5 values of the file.
+ Otherwise, the checksum will be computed.
:type res_upload_handler: ResumableUploadHandler
:param res_upload_handler: If provided, this handler will perform the
@@ -177,7 +180,7 @@ class Key(S3Key):
multipart uploads.
:type rewind: bool
- :param rewind: (optional) If True, the file pointer (fp) will be
+ :param rewind: (optional) If True, the file pointer (fp) will be
rewound to the start before any bytes are read from
it. The default behaviour is False which reads from
the current position of the file pointer (fp).
@@ -262,7 +265,8 @@ class Key(S3Key):
:param filename: The name of the file that you want to put onto GS
:type headers: dict
- :param headers: Additional headers to pass along with the request to GS.
+ :param headers: Additional headers to pass along with the
+ request to GS.
:type replace: bool
:param replace: If True, replaces the contents of the file if it
@@ -290,9 +294,9 @@ class Key(S3Key):
the plain checksum as the second element. This is the same format
returned by the compute_md5 method.
:param md5: If you need to compute the MD5 for any reason prior to
- upload, it's silly to have to do it twice so this param, if present,
- will be used as the MD5 values of the file. Otherwise, the checksum
- will be computed.
+ upload, it's silly to have to do it twice so this param,
+ if present, will be used as the MD5 values of the file.
+ Otherwise, the checksum will be computed.
:type res_upload_handler: ResumableUploadHandler
:param res_upload_handler: If provided, this handler will perform the
diff --git a/boto/gs/resumable_upload_handler.py b/boto/gs/resumable_upload_handler.py
index 2e4d65a8..5daa2bfe 100644
--- a/boto/gs/resumable_upload_handler.py
+++ b/boto/gs/resumable_upload_handler.py
@@ -19,14 +19,12 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-import cgi
import errno
import os
import random
import re
import socket
import time
-import boto
from boto import config
from boto.connection import AWSAuthConnection
from boto.exception import InvalidUriError
@@ -54,8 +52,8 @@ save the state needed to allow retrying later, in a separate process
class ResumableUploadHandler(object):
BUFFER_SIZE = 8192
- RETRYABLE_EXCEPTIONS = (compat.httplib.HTTPException, IOError, socket.error,
- socket.gaierror)
+ RETRYABLE_EXCEPTIONS = (compat.httplib.HTTPException, IOError,
+ socket.error, socket.gaierror)
# (start, end) response indicating server has nothing (upload protocol uses
# inclusive numbering).
@@ -68,10 +66,10 @@ class ResumableUploadHandler(object):
:type tracker_file_name: string
:param tracker_file_name: optional file name to save tracker URI.
If supplied and the current process fails the upload, it can be
- retried in a new process. If called with an existing file containing
- a valid tracker URI, we'll resume the upload from this URI; else
- we'll start a new resumable upload (and write the URI to this
- tracker file).
+ retried in a new process. If called with an existing file
+ containing a valid tracker URI, we'll resume the upload from
+ this URI; else we'll start a new resumable upload (and write
+ the URI to this tracker file).
:type num_retries: int
:param num_retries: the number of times we'll re-try a resumable upload
@@ -230,7 +228,8 @@ class ResumableUploadHandler(object):
if not got_valid_response:
raise ResumableUploadException(
'Couldn\'t parse upload server state query response (%s)' %
- str(resp.getheaders()), ResumableTransferDisposition.START_OVER)
+ str(resp.getheaders()),
+ ResumableTransferDisposition.START_OVER)
if conn.debug >= 1:
print('Server has: Range: %d - %d.' % (server_start, server_end))
return (server_start, server_end)
@@ -306,7 +305,7 @@ class ResumableUploadHandler(object):
buf = fp.read(self.BUFFER_SIZE)
if cb:
if num_cb > 2:
- cb_count = file_length / self.BUFFER_SIZE / (num_cb-2)
+ cb_count = file_length / self.BUFFER_SIZE / (num_cb - 2)
elif num_cb < 0:
cb_count = -1
else:
@@ -390,7 +389,7 @@ class ResumableUploadHandler(object):
(server_start, server_end) = (
self._query_server_pos(conn, file_length))
self.server_has_bytes = server_start
- key=key
+ key = key
if conn.debug >= 1:
print('Resuming transfer.')
except ResumableUploadException as e:
@@ -420,10 +419,10 @@ class ResumableUploadHandler(object):
fp.seek(total_bytes_uploaded)
conn = key.bucket.connection
- # Get a new HTTP connection (vs conn.get_http_connection(), which reuses
- # pool connections) because httplib requires a new HTTP connection per
- # transaction. (Without this, calling http_conn.getresponse() would get
- # "ResponseNotReady".)
+ # Get a new HTTP connection (vs conn.get_http_connection(),
+ # which reuses pool connections) because httplib requires a
+ # new HTTP connection per transaction. (Without this, calling
+ # http_conn.getresponse() would get "ResponseNotReady".)
http_conn = conn.new_http_connection(self.tracker_uri_host,
conn.is_secure)
http_conn.set_debuglevel(conn.debug)
@@ -467,39 +466,42 @@ class ResumableUploadHandler(object):
key.close()
key.delete()
raise ResumableUploadException(
- 'File changed during upload: md5 signature doesn\'t match etag '
- '(incorrect uploaded object deleted)',
+ 'File changed during upload: md5 signature doesn\'t match '
+ 'etag (incorrect uploaded object deleted)',
ResumableTransferDisposition.ABORT)
def send_file(self, key, fp, headers, cb=None, num_cb=10):
"""
Upload a file to a key into a bucket on GS, using GS resumable upload
protocol.
-
+
:type key: :class:`boto.s3.key.Key` or subclass
:param key: The Key object to which data is to be uploaded
-
+
:type fp: file-like object
:param fp: The file pointer to upload
-
+
:type headers: dict
:param headers: The headers to pass along with the PUT request
-
+
:type cb: function
- :param cb: a callback function that will be called to report progress on
- the upload. The callback should accept two integer parameters, the
- first representing the number of bytes that have been successfully
- transmitted to GS, and the second representing the total number of
- bytes that need to be transmitted.
-
+ :param cb: a callback function that will be
+ called to report progress on the upload. The callback should
+ accept two integer parameters, the first representing the
+ number of bytes that have been successfully transmitted to GS,
+ and the second representing the total number of bytes that
+ need to be transmitted.
+
:type num_cb: int
- :param num_cb: (optional) If a callback is specified with the cb
- parameter, this parameter determines the granularity of the callback
- by defining the maximum number of times the callback will be called
- during the file transfer. Providing a negative integer will cause
- your callback to be called with each buffer read.
-
- Raises ResumableUploadException if a problem occurs during the transfer.
+ :param num_cb: (optional) If a callback is specified with the
+ cb parameter, this parameter determines the granularity of
+ the callback by defining the maximum number of times the
+ callback will be called during the file
+ transfer. Providing a negative integer will cause your
+ callback to be called with each buffer read.
+
+ Raises ResumableUploadException if a problem occurs during
+ the transfer.
"""
if not headers:
@@ -509,7 +511,7 @@ class ResumableUploadHandler(object):
# that header.
CT = 'Content-Type'
if CT in headers and headers[CT] is None:
- del headers[CT]
+ del headers[CT]
fp.seek(0, os.SEEK_END)
file_length = fp.tell()
@@ -578,7 +580,7 @@ class ResumableUploadHandler(object):
ResumableTransferDisposition.ABORT_CUR_PROCESS)
# Use binary exponential backoff to desynchronize client requests
- sleep_time_secs = random.random() * (2**progress_less_iterations)
+ sleep_time_secs = random.random() * (2 ** progress_less_iterations)
if debug >= 1:
print('Got retryable failure (%d progress-less in a row).\n'
'Sleeping %3.1f seconds before re-trying' %