summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMitch Garnaat <mitch@garnaat.com>2012-03-27 11:03:03 -0700
committerMitch Garnaat <mitch@garnaat.com>2012-03-27 11:03:03 -0700
commit5aa93b79df6721f4c35b577f4e32b3ae21314bfd (patch)
tree33a49eb614c9ab5a0f2a3d05635aeec55664404d
parent99e06a5b435a0d9f9fb91d42391b0afdb5aed33e (diff)
parentc6e06d693dde8edfbb6b0dc0401a6e66501a4382 (diff)
downloadboto-5aa93b79df6721f4c35b577f4e32b3ae21314bfd.tar.gz
Merge branch 'gholms-futureproof' into neo
-rw-r--r--boto/__init__.py4
-rw-r--r--boto/auth.py80
-rw-r--r--boto/auth_handler.py2
-rw-r--r--boto/cloudformation/__init__.py2
-rw-r--r--boto/cloudformation/connection.py17
-rw-r--r--boto/cloudfront/origin.py2
-rw-r--r--boto/compat.py156
-rw-r--r--boto/connection.py54
-rw-r--r--boto/dynamodb/layer1.py34
-rw-r--r--boto/dynamodb/types.py5
-rw-r--r--boto/ec2/autoscale/__init__.py4
-rw-r--r--boto/ec2/buyreservation.py8
-rw-r--r--boto/ec2/cloudwatch/__init__.py8
-rw-r--r--boto/ec2/cloudwatch/alarm.py10
-rw-r--r--boto/ec2/connection.py5
-rw-r--r--boto/ec2/elb/loadbalancer.py16
-rw-r--r--boto/ec2/image.py6
-rw-r--r--boto/ec2/keypair.py2
-rw-r--r--boto/ec2/reservedinstance.py9
-rw-r--r--boto/ec2/snapshot.py4
-rw-r--r--boto/ecs/item.py4
-rw-r--r--boto/emr/__init__.py6
-rw-r--r--boto/emr/connection.py25
-rw-r--r--boto/exception.py12
-rwxr-xr-xboto/file/__init__.py6
-rw-r--r--boto/file/bucket.py2
-rwxr-xr-xboto/file/connection.py2
-rwxr-xr-xboto/file/key.py5
-rw-r--r--boto/fps/connection.py13
-rwxr-xr-xboto/gs/acl.py4
-rw-r--r--boto/gs/key.py6
-rw-r--r--boto/gs/resumable_upload_handler.py37
-rw-r--r--boto/https_connection.py12
-rw-r--r--boto/iam/__init__.py2
-rw-r--r--boto/jsonresponse.py2
-rw-r--r--boto/manage/cmdshell.py19
-rw-r--r--boto/manage/propget.py10
-rw-r--r--boto/manage/server.py41
-rw-r--r--boto/manage/task.py5
-rw-r--r--boto/manage/test_manage.py28
-rw-r--r--boto/manage/volume.py16
-rw-r--r--boto/mashups/server.py1
-rw-r--r--boto/mturk/connection.py7
-rw-r--r--boto/provider.py9
-rw-r--r--boto/pyami/bootstrap.py2
-rw-r--r--boto/pyami/config.py38
-rw-r--r--boto/pyami/installers/ubuntu/ebs.py2
-rwxr-xr-xboto/pyami/launch_ami.py22
-rw-r--r--boto/pyami/startup.py2
-rw-r--r--boto/rds/__init__.py1
-rw-r--r--boto/rds/parametergroup.py31
-rw-r--r--boto/roboto/awsqueryrequest.py30
-rw-r--r--boto/roboto/awsqueryservice.py9
-rw-r--r--boto/route53/__init__.py2
-rw-r--r--boto/route53/connection.py12
-rw-r--r--boto/s3/bucket.py22
-rw-r--r--boto/s3/bucketlogging.py16
-rw-r--r--boto/s3/connection.py13
-rw-r--r--boto/s3/key.py33
-rw-r--r--boto/s3/multipart.py4
-rw-r--r--boto/s3/resumable_download_handler.py21
-rw-r--r--boto/sdb/__init__.py2
-rw-r--r--boto/sdb/connection.py10
-rw-r--r--boto/sdb/db/blob.py4
-rw-r--r--boto/sdb/db/key.py8
-rw-r--r--boto/sdb/db/manager/__init__.py8
-rw-r--r--boto/sdb/db/manager/pgmanager.py10
-rw-r--r--boto/sdb/db/manager/sdbmanager.py24
-rw-r--r--boto/sdb/db/manager/xmlmanager.py8
-rw-r--r--boto/sdb/db/model.py8
-rw-r--r--boto/sdb/db/property.py52
-rw-r--r--boto/sdb/db/sequence.py12
-rw-r--r--boto/sdb/domain.py76
-rw-r--r--boto/sdb/item.py2
-rw-r--r--boto/services/result.py7
-rw-r--r--boto/services/service.py4
-rw-r--r--boto/ses/__init__.py2
-rw-r--r--boto/ses/connection.py3
-rw-r--r--boto/sns/__init__.py2
-rw-r--r--boto/sns/connection.py34
-rw-r--r--boto/sqs/__init__.py2
-rw-r--r--boto/sqs/jsonmessage.py9
-rw-r--r--boto/sqs/message.py10
-rw-r--r--boto/sqs/queue.py8
-rw-r--r--boto/sts/__init__.py2
-rw-r--r--boto/sts/connection.py2
-rw-r--r--boto/sts/credentials.py12
-rw-r--r--boto/swf/layer1.py70
-rw-r--r--boto/utils.py76
-rw-r--r--boto/vpc/dhcpoptions.py2
-rw-r--r--docs/source/conf.py2
-rw-r--r--tests/autoscale/test_connection.py4
-rw-r--r--tests/cloudfront/test_signed_urls.py43
-rw-r--r--tests/db/test_lists.py4
-rw-r--r--tests/db/test_query.py2
-rw-r--r--tests/db/test_sequence.py14
-rw-r--r--tests/devpay/test_s3.py10
-rw-r--r--tests/dynamodb/test_layer1.py6
-rw-r--r--tests/dynamodb/test_layer2.py18
-rw-r--r--tests/ec2/cloudwatch/test_connection.py4
-rw-r--r--tests/ec2/test_connection.py10
-rw-r--r--tests/emr/test_emr_responses.py2
-rw-r--r--tests/mturk/_init_environment.py48
-rw-r--r--tests/mturk/all_tests.py10
-rw-r--r--tests/mturk/cleanup_tests.py18
-rw-r--r--tests/mturk/common.py88
-rw-r--r--tests/mturk/create_hit_external.py2
-rw-r--r--tests/mturk/create_hit_test.py42
-rw-r--r--tests/mturk/create_hit_with_qualifications.py2
-rw-r--r--tests/mturk/hit_persistence.py54
-rw-r--r--tests/mturk/selenium_support.py6
-rw-r--r--tests/mturk/test_disable_hit.py22
-rw-r--r--tests/s3/mock_storage_service.py6
-rw-r--r--tests/s3/test_bucket.py2
-rw-r--r--tests/s3/test_connection.py18
-rw-r--r--tests/s3/test_encryption.py4
-rw-r--r--tests/s3/test_gsconnection.py6
-rw-r--r--tests/s3/test_key.py16
-rw-r--r--tests/s3/test_mfa.py9
-rw-r--r--tests/s3/test_multidelete.py4
-rw-r--r--tests/s3/test_multipart.py18
-rw-r--r--tests/s3/test_pool.py16
-rw-r--r--[-rwxr-xr-x]tests/s3/test_resumable_downloads.py28
-rw-r--r--[-rwxr-xr-x]tests/s3/test_resumable_uploads.py38
-rw-r--r--tests/s3/test_versioning.py14
-rw-r--r--tests/sdb/test_connection.py6
-rw-r--r--tests/sqs/test_connection.py16
-rw-r--r--tests/sts/test_session_token.py4
-rw-r--r--[-rwxr-xr-x]tests/test.py6
129 files changed, 1057 insertions, 996 deletions
diff --git a/boto/__init__.py b/boto/__init__.py
index 15697547..6818635d 100644
--- a/boto/__init__.py
+++ b/boto/__init__.py
@@ -29,8 +29,8 @@ import boto.plugin
import os, re, sys
import logging
import logging.config
-import urlparse
from boto.exception import InvalidUriError
+import boto.compat as compat
__version__ = '2.3.0'
Version = __version__ # for backware compatibility
@@ -391,7 +391,7 @@ def connect_ec2_endpoint(url, aws_access_key_id=None, aws_secret_access_key=None
"""
from boto.ec2.regioninfo import RegionInfo
- purl = urlparse.urlparse(url)
+ purl = compat.urlparse(url)
kwargs['port'] = purl.port
kwargs['host'] = purl.hostname
kwargs['path'] = purl.path
diff --git a/boto/auth.py b/boto/auth.py
index 14bd0fa2..87988991 100644
--- a/boto/auth.py
+++ b/boto/auth.py
@@ -1,5 +1,6 @@
# Copyright 2010 Google Inc.
-# Copyright (c) 2011 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Copyright (c) 2011, Eucalyptus Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
@@ -34,40 +35,11 @@ import boto.plugin
import boto.utils
import hmac
import sys
-import urllib
+import boto.compat as compat
from email.utils import formatdate
from boto.auth_handler import AuthHandler
from boto.exception import BotoClientError
-#
-# the following is necessary because of the incompatibilities
-# between Python 2.4, 2.5, and 2.6 as well as the fact that some
-# people running 2.4 have installed hashlib as a separate module
-# this fix was provided by boto user mccormix.
-# see: http://code.google.com/p/boto/issues/detail?id=172
-# for more details.
-#
-try:
- from hashlib import sha1 as sha
- from hashlib import sha256 as sha256
-
- if sys.version[:3] == "2.4":
- # we are using an hmac that expects a .new() method.
- class Faker:
- def __init__(self, which):
- self.which = which
- self.digest_size = self.which().digest_size
-
- def new(self, *args, **kwargs):
- return self.which(*args, **kwargs)
-
- sha = Faker(sha)
- sha256 = Faker(sha256)
-
-except ImportError:
- import sha
- sha256 = None
-
class HmacKeys(object):
"""Key based Auth handler helper."""
@@ -79,10 +51,10 @@ class HmacKeys(object):
def update_provider(self, provider):
self._provider = provider
- self._hmac = hmac.new(self._provider.secret_key, digestmod=sha)
- if sha256:
- self._hmac_256 = hmac.new(self._provider.secret_key,
- digestmod=sha256)
+ sk = self._provider.secret_key.encode('utf-8')
+ self._hmac = hmac.new(sk, digestmod=compat.sha)
+ if compat.sha256:
+ self._hmac_256 = hmac.new(sk, digestmod=compat.sha256)
else:
self._hmac_256 = None
@@ -97,8 +69,10 @@ class HmacKeys(object):
hmac = self._hmac_256.copy()
else:
hmac = self._hmac.copy()
+ if not isinstance(string_to_sign, compat.binary_type):
+ string_to_sign = string_to_sign.encode('utf-8')
hmac.update(string_to_sign)
- return base64.encodestring(hmac.digest()).strip()
+ return base64.b64encode(hmac.digest()).strip().decode('utf-8')
class AnonAuthHandler(AuthHandler, HmacKeys):
"""
@@ -127,7 +101,7 @@ class HmacAuthV1Handler(AuthHandler, HmacKeys):
headers = http_request.headers
method = http_request.method
auth_path = http_request.auth_path
- if not headers.has_key('Date'):
+ if 'Date' not in headers:
headers['Date'] = formatdate(usegmt=True)
if self._provider.security_token:
@@ -156,7 +130,7 @@ class HmacAuthV2Handler(AuthHandler, HmacKeys):
def add_auth(self, http_request, **kwargs):
headers = http_request.headers
- if not headers.has_key('Date'):
+ if 'Date' not in headers:
headers['Date'] = formatdate(usegmt=True)
b64_hmac = self.sign_string(headers['Date'])
@@ -176,7 +150,7 @@ class HmacAuthV3Handler(AuthHandler, HmacKeys):
def add_auth(self, http_request, **kwargs):
headers = http_request.headers
- if not headers.has_key('Date'):
+ if 'Date' not in headers:
headers['Date'] = formatdate(usegmt=True)
b64_hmac = self.sign_string(headers['Date'])
@@ -252,7 +226,8 @@ class HmacAuthV3HTTPHandler(AuthHandler, HmacKeys):
req.headers['X-Amz-Security-Token'] = self._provider.security_token
string_to_sign, headers_to_sign = self.string_to_sign(req)
boto.log.debug('StringToSign:\n%s' % string_to_sign)
- hash_value = sha256(string_to_sign).digest()
+ string_to_sign = string_to_sign.encode('utf-8')
+ hash_value = compat.sha256(string_to_sign).digest()
b64_hmac = self.sign_string(hash_value)
s = "AWS3 AWSAccessKeyId=%s," % self._provider.access_key
s += "Algorithm=%s," % self.algorithm()
@@ -279,7 +254,7 @@ class QuerySignatureHelper(HmacKeys):
boto.log.debug('query_string: %s Signature: %s' % (qs, signature))
if http_request.method == 'POST':
headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=UTF-8'
- http_request.body = qs + '&Signature=' + urllib.quote_plus(signature)
+ http_request.body = qs + '&Signature=' + compat.quote_plus(signature)
http_request.headers['Content-Length'] = str(len(http_request.body))
else:
http_request.body = ''
@@ -287,7 +262,7 @@ class QuerySignatureHelper(HmacKeys):
# already be there, we need to get rid of that and rebuild it
http_request.path = http_request.path.split('?')[0]
http_request.path = (http_request.path + '?' + qs +
- '&Signature=' + urllib.quote_plus(signature))
+ '&Signature=' + compat.quote_plus(signature))
class QuerySignatureV0AuthHandler(QuerySignatureHelper, AuthHandler):
"""Provides Signature V0 Signing"""
@@ -299,13 +274,13 @@ class QuerySignatureV0AuthHandler(QuerySignatureHelper, AuthHandler):
boto.log.debug('using _calc_signature_0')
hmac = self._hmac.copy()
s = params['Action'] + params['Timestamp']
+ s = s.encode('utf-8')
hmac.update(s)
- keys = params.keys()
- keys.sort(cmp = lambda x, y: cmp(x.lower(), y.lower()))
+ keys = sorted(params, key = str.lower)
pairs = []
for key in keys:
val = boto.utils.get_utf8_value(params[key])
- pairs.append(key + '=' + urllib.quote(val))
+ pairs.append(key + '=' + compat.quote(val))
qs = '&'.join(pairs)
return (qs, base64.b64encode(hmac.digest()))
@@ -320,14 +295,15 @@ class QuerySignatureV1AuthHandler(QuerySignatureHelper, AuthHandler):
def _calc_signature(self, params, *args):
boto.log.debug('using _calc_signature_1')
hmac = self._hmac.copy()
- keys = params.keys()
- keys.sort(cmp = lambda x, y: cmp(x.lower(), y.lower()))
+ keys = sorted(params, key = str.lower)
pairs = []
for key in keys:
+ key = key.encode('utf-8')
hmac.update(key)
val = boto.utils.get_utf8_value(params[key])
+ val = val.encode('utf-8')
hmac.update(val)
- pairs.append(key + '=' + urllib.quote(val))
+ pairs.append(key + '=' + compat.quote(val))
qs = '&'.join(pairs)
return (qs, base64.b64encode(hmac.digest()))
@@ -349,17 +325,17 @@ class QuerySignatureV2AuthHandler(QuerySignatureHelper, AuthHandler):
params['SignatureMethod'] = 'HmacSHA1'
if self._provider.security_token:
params['SecurityToken'] = self._provider.security_token
- keys = params.keys()
- keys.sort()
+ keys = sorted(params)
pairs = []
for key in keys:
val = boto.utils.get_utf8_value(params[key])
- pairs.append(urllib.quote(key, safe='') + '=' +
- urllib.quote(val, safe='-_~'))
+ pairs.append(compat.quote(key, safe='') + '=' +
+ compat.quote(val, safe='-_~'))
qs = '&'.join(pairs)
boto.log.debug('query string: %s' % qs)
string_to_sign += qs
boto.log.debug('string_to_sign: %s' % string_to_sign)
+ string_to_sign = string_to_sign.encode('utf-8')
hmac.update(string_to_sign)
b64 = base64.b64encode(hmac.digest())
boto.log.debug('len(b64)=%d' % len(b64))
diff --git a/boto/auth_handler.py b/boto/auth_handler.py
index ab2d3170..e3aaa275 100644
--- a/boto/auth_handler.py
+++ b/boto/auth_handler.py
@@ -23,7 +23,7 @@
Defines an interface which all Auth handlers need to implement.
"""
-from plugin import Plugin
+from .plugin import Plugin
class NotReadyToAuthenticate(Exception):
pass
diff --git a/boto/cloudformation/__init__.py b/boto/cloudformation/__init__.py
index 4f8e090b..bc32bbab 100644
--- a/boto/cloudformation/__init__.py
+++ b/boto/cloudformation/__init__.py
@@ -22,4 +22,4 @@
# this is here for backward compatibility
# originally, the SNSConnection class was defined here
-from connection import CloudFormationConnection
+from .connection import CloudFormationConnection
diff --git a/boto/cloudformation/connection.py b/boto/cloudformation/connection.py
index 6243dc6c..b0221bef 100644
--- a/boto/cloudformation/connection.py
+++ b/boto/cloudformation/connection.py
@@ -19,17 +19,14 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-try:
- import simplejson as json
-except:
- import json
-
import boto
from boto.cloudformation.stack import Stack, StackSummary, StackEvent
from boto.cloudformation.stack import StackResource, StackResourceSummary
from boto.cloudformation.template import Template
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
+import boto.compat as compat
+
class CloudFormationConnection(AWSQueryConnection):
@@ -135,7 +132,7 @@ class CloudFormationConnection(AWSQueryConnection):
response = self.make_request('CreateStack', params, '/', 'POST')
body = response.read()
if response.status == 200:
- body = json.loads(body)
+ body = compat.json.loads(body)
return body['CreateStackResponse']['CreateStackResult']['StackId']
else:
boto.log.error('%s %s' % (response.status, response.reason))
@@ -202,7 +199,7 @@ class CloudFormationConnection(AWSQueryConnection):
response = self.make_request('UpdateStack', params, '/', 'POST')
body = response.read()
if response.status == 200:
- body = json.loads(body)
+ body = compat.json.loads(body)
return body['UpdateStackResponse']['UpdateStackResult']['StackId']
else:
boto.log.error('%s %s' % (response.status, response.reason))
@@ -215,7 +212,7 @@ class CloudFormationConnection(AWSQueryConnection):
response = self.make_request('DeleteStack', params, '/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
@@ -237,7 +234,7 @@ class CloudFormationConnection(AWSQueryConnection):
'/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
@@ -267,7 +264,7 @@ class CloudFormationConnection(AWSQueryConnection):
response = self.make_request('GetTemplate', params, '/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
diff --git a/boto/cloudfront/origin.py b/boto/cloudfront/origin.py
index 57af846e..95ec0d8a 100644
--- a/boto/cloudfront/origin.py
+++ b/boto/cloudfront/origin.py
@@ -20,7 +20,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-from identity import OriginAccessIdentity
+from .identity import OriginAccessIdentity
def get_oai_value(origin_access_identity):
if isinstance(origin_access_identity, OriginAccessIdentity):
diff --git a/boto/compat.py b/boto/compat.py
new file mode 100644
index 00000000..901fb505
--- /dev/null
+++ b/boto/compat.py
@@ -0,0 +1,156 @@
+# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
+# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+import sys
+import os
+import types
+
+# True if we are running on Python 3.
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+try:
+ import urllib.parse as urlparse
+except ImportError:
+ import urlparse
+
+try:
+ from urllib.parse import quote, quote_plus, unquote
+except ImportError:
+ from urllib import quote, quote_plus, unquote
+
+try:
+ from urllib.request import urlopen, Request, build_opener, install_opener
+except ImportError:
+ from urllib2 import urlopen, Request
+
+try:
+ from urllib.request import HTTPPasswordMgrWithDefaultRealm
+except ImportError:
+ from urllib2 import HTTPPasswordMgrWithDefaultRealm
+
+try:
+ from urllib.request import HTTPBasicAuthHandler, HTTPError
+except ImportError:
+ from urllib2 import HTTPBasicAuthHandler
+
+try:
+ from hashlib import md5
+except ImportError:
+ from md5 import md5
+
+try:
+ import http.client as httplib
+except ImportError:
+ import httplib
+
+try:
+ import configparser
+except ImportError:
+ import ConfigParser as configparser
+
+if PY3:
+ import io
+ StringIO = io.StringIO
+else:
+ import StringIO
+ StringIO = StringIO.StringIO
+
+if PY3:
+ raw_input = input
+else:
+ raw_input = raw_input
+
+try:
+ # Python 3.x
+ from email.mime.multipart import MIMEMultipart
+ from email.mime.base import MIMEBase
+ from email.mime.text import MIMEText
+ from email.utils import formatdate
+ import email.encoders as Encoders
+ unicode = str
+except ImportError:
+ # Python 2.x
+ from email.MIMEMultipart import MIMEMultipart
+ from email.MIMEBase import MIMEBase
+ from email.MIMEText import MIMEText
+ from email.utils import formatdate
+ from email import Encoders
+
+#
+# the following is necessary because of the incompatibilities
+# between Python 2.4, 2.5, and 2.6 as well as the fact that some
+# people running 2.4 have installed hashlib as a separate module
+# this fix was provided by boto user mccormix.
+# see: http://code.google.com/p/boto/issues/detail?id=172
+# for more details.
+#
+try:
+ from hashlib import sha1 as sha
+ from hashlib import sha256 as sha256
+
+ if sys.version[:3] == "2.4":
+ # we are using an hmac that expects a .new() method.
+ class Faker:
+ def __init__(self, which):
+ self.which = which
+ self.digest_size = self.which().digest_size
+
+ def new(self, *args, **kwargs):
+ return self.which(*args, **kwargs)
+
+ sha = Faker(sha)
+ sha256 = Faker(sha256)
+
+except ImportError:
+ import sha
+ sha256 = None
+
+try:
+ import simplejson as json
+except:
+ import json
+
+
+def on_appengine():
+ return all(key in os.environ for key in ('USER_IS_ADMIN',
+ 'CURRENT_VERSION_ID',
+ 'APPLICATION_ID'))
+
+
+def httplib_ssl_hack(port):
+ return ((on_appengine and sys.version[:3] == '2.5') or
+ sys.version.startswith('3') or
+ sys.version[:3] in ('2.6', '2.7')) and port == 443
diff --git a/boto/connection.py b/boto/connection.py
index e170d0b3..ce754251 100644
--- a/boto/connection.py
+++ b/boto/connection.py
@@ -45,19 +45,17 @@ Handles basic connections to AWS
from __future__ import with_statement
import base64
import errno
-import httplib
import os
-import Queue
import random
import re
import socket
import sys
import time
-import urllib, urlparse
import xml.sax
-import auth
-import auth_handler
+from . import auth
+from . import auth_handler
+from . import compat
import boto
import boto.utils
import boto.handler
@@ -83,9 +81,6 @@ try:
except ImportError:
import dummy_threading as threading
-ON_APP_ENGINE = all(key in os.environ for key in (
- 'USER_IS_ADMIN', 'CURRENT_VERSION_ID', 'APPLICATION_ID'))
-
PORTS_BY_SECURITY = { True: 443, False: 80 }
DEFAULT_CA_CERTS_FILE = os.path.join(
@@ -168,7 +163,7 @@ class HostConnectionPool(object):
This is ugly, reading a private instance variable, but the
state we care about isn't available in any public methods.
"""
- if ON_APP_ENGINE:
+ if compat.on_appengine():
# Google App Engine implementation of HTTPConnection doesn't contain
# _HTTPConnection__response attribute. Moreover, it's not possible
# to determine if given connection is ready. Reusing connections
@@ -350,16 +345,17 @@ class HTTPRequest(object):
def authorize(self, connection, **kwargs):
for key in self.headers:
val = self.headers[key]
- if isinstance(val, unicode):
- self.headers[key] = urllib.quote_plus(val.encode('utf-8'))
+ if isinstance(val, compat.text_type):
+ self.headers[key] = compat.quote_plus(val.encode('utf-8'),
+ safe='/')
connection._auth_handler.add_auth(self, **kwargs)
self.headers['User-Agent'] = UserAgent
# I'm not sure if this is still needed, now that add_auth is
# setting the content-length for POST requests.
- if not self.headers.has_key('Content-Length'):
- if not self.headers.has_key('Transfer-Encoding') or \
+ if 'Content-Length' not in self.headers:
+ if 'Transfer-Encoding' not in self.headers or \
self.headers['Transfer-Encoding'] != 'chunked':
self.headers['Content-Length'] = str(len(self.body))
@@ -429,7 +425,7 @@ class AWSAuthConnection(object):
'Boto', 'ca_certificates_file', DEFAULT_CA_CERTS_FILE)
self.handle_proxy(proxy, proxy_port, proxy_user, proxy_pass)
# define exceptions from httplib that we want to catch and retry
- self.http_exceptions = (httplib.HTTPException, socket.error,
+ self.http_exceptions = (compat.httplib.HTTPException, socket.error,
socket.gaierror)
# define subclasses of the above that are not retryable.
self.http_unretryable_exceptions = []
@@ -450,7 +446,7 @@ class AWSAuthConnection(object):
self.protocol = 'http'
self.host = host
self.path = path
- if isinstance(debug, (int, long)):
+ if isinstance(debug, compat.integer_types):
self.debug = debug
else:
self.debug = config.getint('Boto', 'debug', 0)
@@ -547,8 +543,7 @@ class AWSAuthConnection(object):
# did the same when calculating the V2 signature. In 2.6
# (and higher!)
# it no longer does that. Hence, this kludge.
- if ((ON_APP_ENGINE and sys.version[:3] == '2.5') or
- sys.version[:3] in ('2.6', '2.7')) and port == 443:
+ if compat.httplib_ssl_hack(port):
signature_host = self.host
else:
signature_host = '%s:%d' % (self.host, port)
@@ -559,7 +554,7 @@ class AWSAuthConnection(object):
self.proxy_port = proxy_port
self.proxy_user = proxy_user
self.proxy_pass = proxy_pass
- if os.environ.has_key('http_proxy') and not self.proxy:
+ if 'http_proxy' in os.environ and not self.proxy:
pattern = re.compile(
'(?:http://)?' \
'(?:(?P<user>\w+):(?P<pass>.*)@)?' \
@@ -583,8 +578,7 @@ class AWSAuthConnection(object):
self.proxy_pass = config.get_value('Boto', 'proxy_pass', None)
if not self.proxy_port and self.proxy:
- print "http_proxy environment variable does not specify " \
- "a port, using default"
+ boto.log.warning('http_proxy env variable does not specify a port')
self.proxy_port = self.port
self.use_proxy = (self.proxy != None)
@@ -613,12 +607,12 @@ class AWSAuthConnection(object):
host, ca_certs=self.ca_certificates_file,
**self.http_connection_kwargs)
else:
- connection = httplib.HTTPSConnection(host,
+ connection = compat.httplib.HTTPSConnection(host,
**self.http_connection_kwargs)
else:
boto.log.debug('establishing HTTP connection: kwargs=%s' %
self.http_connection_kwargs)
- connection = httplib.HTTPConnection(host,
+ connection = compat.httplib.HTTPConnection(host,
**self.http_connection_kwargs)
if self.debug > 1:
connection.set_debuglevel(self.debug)
@@ -646,7 +640,7 @@ class AWSAuthConnection(object):
for k, v in self.get_proxy_auth_header().items():
sock.sendall("%s: %s\r\n" % (k, v))
sock.sendall("\r\n")
- resp = httplib.HTTPResponse(sock, strict=True, debuglevel=self.debug)
+ resp = compat.httplib.HTTPResponse(sock, strict=True, debuglevel=self.debug)
resp.begin()
if resp.status != 200:
@@ -659,7 +653,7 @@ class AWSAuthConnection(object):
# We can safely close the response, it duped the original socket
resp.close()
- h = httplib.HTTPConnection(host)
+ h = compat.httplib.HTTPConnection(host)
if self.https_validate_certificates and HAVE_HTTPS_CONNECTION:
boto.log.debug("wrapping ssl socket for proxied connection; "
@@ -678,11 +672,11 @@ class AWSAuthConnection(object):
hostname, cert, 'hostname mismatch')
else:
# Fallback for old Python without ssl.wrap_socket
- if hasattr(httplib, 'ssl'):
- sslSock = httplib.ssl.SSLSocket(sock)
+ if hasattr(compat.httplib, 'ssl'):
+ sslSock = compat.httplib.ssl.SSLSocket(sock)
else:
sslSock = socket.ssl(sock, None, None)
- sslSock = httplib.FakeSocket(sock, sslSock)
+ sslSock = compat.httplib.FakeSocket(sock, sslSock)
# This is a bit unclean
h.sock = sslSock
@@ -693,7 +687,7 @@ class AWSAuthConnection(object):
return path
def get_proxy_auth_header(self):
- auth = base64.encodestring(self.proxy_user + ':' + self.proxy_pass)
+ auth = base64.encodebytes(self.proxy_user + ':' + self.proxy_pass)
return {'Proxy-Authorization': 'Basic %s' % auth}
def _mexe(self, request, sender=None, override_num_retries=None,
@@ -762,7 +756,7 @@ class AWSAuthConnection(object):
return response
else:
scheme, request.host, request.path, \
- params, query, fragment = urlparse.urlparse(location)
+ params, query, fragment = compat.urlparse.urlparse(location)
if query:
request.path += '?' + query
msg = 'Redirecting: %s' % scheme + '://'
@@ -771,7 +765,7 @@ class AWSAuthConnection(object):
connection = self.get_http_connection(request.host,
scheme == 'https')
continue
- except self.http_exceptions, e:
+ except self.http_exceptions as e:
for unretryable in self.http_unretryable_exceptions:
if isinstance(e, unretryable):
boto.log.debug(
diff --git a/boto/dynamodb/layer1.py b/boto/dynamodb/layer1.py
index 412ec0c7..161dc846 100644
--- a/boto/dynamodb/layer1.py
+++ b/boto/dynamodb/layer1.py
@@ -26,12 +26,9 @@ from boto.connection import AWSAuthConnection
from boto.exception import DynamoDBResponseError
from boto.provider import Provider
from boto.dynamodb import exceptions as dynamodb_exceptions
+import boto.compat as compat
import time
-try:
- import simplejson as json
-except ImportError:
- import json
#
# To get full debug output, uncomment the following line and set the
@@ -137,15 +134,16 @@ class Layer1(AWSAuthConnection):
self.instrumentation['times'].append(time.time() - start)
self.instrumentation['ids'].append(self.request_id)
response_body = response.read()
+ response_body = response_body.decode('utf-8')
boto.log.debug(response_body)
- return json.loads(response_body, object_hook=object_hook)
+ return compat.json.loads(response_body, object_hook=object_hook)
def _retry_handler(self, response, i, next_sleep):
status = None
if response.status == 400:
response_body = response.read()
boto.log.debug(response_body)
- data = json.loads(response_body)
+ data = compat.json.loads(response_body)
if self.ThruputError in data.get('__type'):
self.throughput_exceeded_events += 1
msg = "%s, retry attempt %s" % (self.ThruputError, i)
@@ -190,7 +188,7 @@ class Layer1(AWSAuthConnection):
data['Limit'] = limit
if start_table:
data['ExclusiveStartTableName'] = start_table
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('ListTables', json_input)
def describe_table(self, table_name):
@@ -203,7 +201,7 @@ class Layer1(AWSAuthConnection):
:param table_name: The name of the table to describe.
"""
data = {'TableName': table_name}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('DescribeTable', json_input)
def create_table(self, table_name, schema, provisioned_throughput):
@@ -229,7 +227,7 @@ class Layer1(AWSAuthConnection):
data = {'TableName': table_name,
'KeySchema': schema,
'ProvisionedThroughput': provisioned_throughput}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
response_dict = self.make_request('CreateTable', json_input)
return response_dict
@@ -247,7 +245,7 @@ class Layer1(AWSAuthConnection):
"""
data = {'TableName': table_name,
'ProvisionedThroughput': provisioned_throughput}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('UpdateTable', json_input)
def delete_table(self, table_name):
@@ -260,7 +258,7 @@ class Layer1(AWSAuthConnection):
:param table_name: The name of the table to delete.
"""
data = {'TableName': table_name}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('DeleteTable', json_input)
def get_item(self, table_name, key, attributes_to_get=None,
@@ -292,7 +290,7 @@ class Layer1(AWSAuthConnection):
data['AttributesToGet'] = attributes_to_get
if consistent_read:
data['ConsistentRead'] = True
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
response = self.make_request('GetItem', json_input,
object_hook=object_hook)
if 'Item' not in response:
@@ -311,7 +309,7 @@ class Layer1(AWSAuthConnection):
data structure defined by DynamoDB.
"""
data = {'RequestItems': request_items}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('BatchGetItem', json_input,
object_hook=object_hook)
@@ -350,7 +348,7 @@ class Layer1(AWSAuthConnection):
data['Expected'] = expected
if return_values:
data['ReturnValues'] = return_values
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('PutItem', json_input,
object_hook=object_hook)
@@ -392,7 +390,7 @@ class Layer1(AWSAuthConnection):
data['Expected'] = expected
if return_values:
data['ReturnValues'] = return_values
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('UpdateItem', json_input,
object_hook=object_hook)
@@ -428,7 +426,7 @@ class Layer1(AWSAuthConnection):
data['Expected'] = expected
if return_values:
data['ReturnValues'] = return_values
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('DeleteItem', json_input,
object_hook=object_hook)
@@ -489,7 +487,7 @@ class Layer1(AWSAuthConnection):
data['ScanIndexForward'] = False
if exclusive_start_key:
data['ExclusiveStartKey'] = exclusive_start_key
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('Query', json_input,
object_hook=object_hook)
@@ -538,5 +536,5 @@ class Layer1(AWSAuthConnection):
data['Count'] = True
if exclusive_start_key:
data['ExclusiveStartKey'] = exclusive_start_key
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('Scan', json_input, object_hook=object_hook)
diff --git a/boto/dynamodb/types.py b/boto/dynamodb/types.py
index 3eed2c91..51eb25a1 100644
--- a/boto/dynamodb/types.py
+++ b/boto/dynamodb/types.py
@@ -24,14 +24,15 @@
Some utility functions to deal with mapping Amazon DynamoDB types to
Python types and vice-versa.
"""
+import boto.compat as compat
def is_num(n):
- return isinstance(n, (int, long, float, bool))
+ return isinstance(n, (compat.integer_types, float, bool))
def is_str(n):
- return isinstance(n, basestring)
+ return isinstance(n, compat.string_types)
def convert_num(s):
diff --git a/boto/ec2/autoscale/__init__.py b/boto/ec2/autoscale/__init__.py
index 4d404846..dd31e285 100644
--- a/boto/ec2/autoscale/__init__.py
+++ b/boto/ec2/autoscale/__init__.py
@@ -131,9 +131,9 @@ class AutoScaleConnection(AWSQueryConnection):
# different from EC2 list params
for i in xrange(1, len(items)+1):
if isinstance(items[i-1], dict):
- for k, v in items[i-1].iteritems():
+ for k, v in items[i-1].items():
if isinstance(v, dict):
- for kk, vv in v.iteritems():
+ for kk, vv in v.items():
params['%s.member.%d.%s.%s' % (label, i, k, kk)] = vv
else:
params['%s.member.%d.%s' % (label, i, k)] = v
diff --git a/boto/ec2/buyreservation.py b/boto/ec2/buyreservation.py
index fcd8a77c..b84efbc2 100644
--- a/boto/ec2/buyreservation.py
+++ b/boto/ec2/buyreservation.py
@@ -66,19 +66,19 @@ if __name__ == "__main__":
obj.get(params)
offerings = obj.ec2.get_all_reserved_instances_offerings(instance_type=params['instance_type'],
availability_zone=params['zone'].name)
- print '\nThe following Reserved Instances Offerings are available:\n'
+ print('\nThe following Reserved Instances Offerings are available:\n')
for offering in offerings:
offering.describe()
prop = StringProperty(name='offering', verbose_name='Offering',
choices=offerings)
offering = propget.get(prop)
- print '\nYou have chosen this offering:'
+ print('\nYou have chosen this offering:')
offering.describe()
unit_price = float(offering.fixed_price)
total_price = unit_price * params['quantity']
- print '!!! You are about to purchase %d of these offerings for a total of $%.2f !!!' % (params['quantity'], total_price)
+ print('!!! You are about to purchase %d of these offerings for a total of $%.2f !!!' % (params['quantity'], total_price))
answer = raw_input('Are you sure you want to do this? If so, enter YES: ')
if answer.strip().lower() == 'yes':
offering.purchase(params['quantity'])
else:
- print 'Purchase cancelled'
+ print('Purchase cancelled')
diff --git a/boto/ec2/cloudwatch/__init__.py b/boto/ec2/cloudwatch/__init__.py
index bef02a55..3e1988f0 100644
--- a/boto/ec2/cloudwatch/__init__.py
+++ b/boto/ec2/cloudwatch/__init__.py
@@ -23,10 +23,6 @@
This module provides an interface to the Elastic Compute Cloud (EC2)
CloudWatch service from AWS.
"""
-try:
- import simplejson as json
-except ImportError:
- import json
from boto.connection import AWSQueryConnection
from boto.ec2.cloudwatch.metric import Metric
@@ -34,6 +30,8 @@ from boto.ec2.cloudwatch.alarm import MetricAlarm, MetricAlarms, AlarmHistoryIte
from boto.ec2.cloudwatch.datapoint import Datapoint
from boto.regioninfo import RegionInfo
import boto
+import boto.compat as compat
+
RegionData = {
'us-east-1' : 'monitoring.us-east-1.amazonaws.com',
@@ -553,7 +551,7 @@ class CloudWatchConnection(AWSQueryConnection):
'StateReason' : state_reason,
'StateValue' : state_value}
if state_reason_data:
- params['StateReasonData'] = json.dumps(state_reason_data)
+ params['StateReasonData'] = compat.json.dumps(state_reason_data)
return self.get_status('SetAlarmState', params)
diff --git a/boto/ec2/cloudwatch/alarm.py b/boto/ec2/cloudwatch/alarm.py
index 539ad950..9bf50818 100644
--- a/boto/ec2/cloudwatch/alarm.py
+++ b/boto/ec2/cloudwatch/alarm.py
@@ -24,11 +24,7 @@ from datetime import datetime
from boto.resultset import ResultSet
from boto.ec2.cloudwatch.listelement import ListElement
from boto.ec2.cloudwatch.dimension import Dimension
-
-try:
- import simplejson as json
-except ImportError:
- import json
+import boto.compat as compat
class MetricAlarms(list):
@@ -61,7 +57,7 @@ class MetricAlarm(object):
'<' : 'LessThanThreshold',
'<=' : 'LessThanOrEqualToThreshold',
}
- _rev_cmp_map = dict((v, k) for (k, v) in _cmp_map.iteritems())
+ _rev_cmp_map = dict((v, k) for (k, v) in _cmp_map.items())
def __init__(self, connection=None, name=None, metric=None,
namespace=None, statistic=None, comparison=None,
@@ -306,7 +302,7 @@ class AlarmHistoryItem(object):
if name == 'AlarmName':
self.name = value
elif name == 'HistoryData':
- self.data = json.loads(value)
+ self.data = compat.json.loads(value)
elif name == 'HistoryItemType':
self.tem_type = value
elif name == 'HistorySummary':
diff --git a/boto/ec2/connection.py b/boto/ec2/connection.py
index 52f2f370..de245c8f 100644
--- a/boto/ec2/connection.py
+++ b/boto/ec2/connection.py
@@ -1852,7 +1852,7 @@ class EC2Connection(AWSQueryConnection):
"""
try:
return self.get_all_key_pairs(keynames=[keyname])[0]
- except self.ResponseError, e:
+ except self.ResponseError as e:
if e.code == 'InvalidKeyPair.NotFound':
return None
else:
@@ -2757,8 +2757,7 @@ class EC2Connection(AWSQueryConnection):
# Tag methods
def build_tag_param_list(self, params, tags):
- keys = tags.keys()
- keys.sort()
+ keys = sorted(tags)
i = 1
for key in keys:
value = tags[key]
diff --git a/boto/ec2/elb/loadbalancer.py b/boto/ec2/elb/loadbalancer.py
index fec25230..e504976a 100644
--- a/boto/ec2/elb/loadbalancer.py
+++ b/boto/ec2/elb/loadbalancer.py
@@ -26,6 +26,7 @@ from boto.ec2.elb.policies import Policies
from boto.ec2.elb.securitygroup import SecurityGroup
from boto.ec2.instanceinfo import InstanceInfo
from boto.resultset import ResultSet
+import boto.compat as compat
class LoadBalancer(object):
"""
@@ -137,7 +138,7 @@ class LoadBalancer(object):
:param zones: The name of the zone(s) to add.
"""
- if isinstance(zones, str) or isinstance(zones, unicode):
+ if isinstance(zones, compat.string_types):
zones = [zones]
new_zones = self.connection.enable_availability_zones(self.name, zones)
self.availability_zones = new_zones
@@ -150,7 +151,7 @@ class LoadBalancer(object):
:param zones: The name of the zone(s) to add.
"""
- if isinstance(zones, str) or isinstance(zones, unicode):
+ if isinstance(zones, compat.string_types):
zones = [zones]
new_zones = self.connection.disable_availability_zones(self.name, zones)
self.availability_zones = new_zones
@@ -165,7 +166,7 @@ class LoadBalancer(object):
to add to this load balancer.
"""
- if isinstance(instances, str) or isinstance(instances, unicode):
+ if isinstance(instances, compat.string_types):
instances = [instances]
new_instances = self.connection.register_instances(self.name, instances)
self.instances = new_instances
@@ -179,7 +180,7 @@ class LoadBalancer(object):
to remove from this load balancer.
"""
- if isinstance(instances, str) or isinstance(instances, unicode):
+ if isinstance(instances, string_types):
instances = [instances]
new_instances = self.connection.deregister_instances(self.name, instances)
self.instances = new_instances
@@ -259,7 +260,7 @@ class LoadBalancer(object):
:param subnets: The name of the subnet(s) to add.
"""
- if isinstance(subnets, str) or isinstance(subnets, unicode):
+ if isinstance(subnets, compat.string_types):
subnets = [subnets]
new_subnets = self.connection.attach_lb_to_subnets(self.name, subnets)
self.subnets = new_subnets
@@ -272,7 +273,7 @@ class LoadBalancer(object):
:param subnets: The name of the subnet(s) to detach.
"""
- if isinstance(subnets, str) or isinstance(subnets, unicode):
+ if isinstance(subnets, compat.string_types):
subnets = [subnets]
new_subnets = self.connection.detach_lb_to_subnets(self.name, subnets)
self.subnets = new_subnets
@@ -287,8 +288,7 @@ class LoadBalancer(object):
:param security_groups: The name of the security group(s) to add.
"""
- if isinstance(security_groups, str) or \
- isinstance(security_groups, unicode):
+ if isinstance(security_groups, compat.string_types):
security_groups = [security_groups]
new_sgs = self.connection.apply_security_groups_to_lb(
self.name, security_groups)
diff --git a/boto/ec2/image.py b/boto/ec2/image.py
index de1b5d26..0baf8889 100644
--- a/boto/ec2/image.py
+++ b/boto/ec2/image.py
@@ -300,17 +300,17 @@ class ImageAttribute:
if name == 'launchPermission':
self.name = 'launch_permission'
elif name == 'group':
- if self.attrs.has_key('groups'):
+ if 'groups' in self.attrs:
self.attrs['groups'].append(value)
else:
self.attrs['groups'] = [value]
elif name == 'userId':
- if self.attrs.has_key('user_ids'):
+ if 'user_ids' in self.attrs:
self.attrs['user_ids'].append(value)
else:
self.attrs['user_ids'] = [value]
elif name == 'productCode':
- if self.attrs.has_key('product_codes'):
+ if 'product_codes' in self.attrs:
self.attrs['product_codes'].append(value)
else:
self.attrs['product_codes'] = [value]
diff --git a/boto/ec2/keypair.py b/boto/ec2/keypair.py
index 65c95908..6e3911d3 100644
--- a/boto/ec2/keypair.py
+++ b/boto/ec2/keypair.py
@@ -83,7 +83,7 @@ class KeyPair(EC2Object):
fp = open(file_path, 'wb')
fp.write(self.material)
fp.close()
- os.chmod(file_path, 0600)
+ os.chmod(file_path, 0o600)
return True
else:
raise BotoClientError('KeyPair contains no material')
diff --git a/boto/ec2/reservedinstance.py b/boto/ec2/reservedinstance.py
index 1d35c1df..d31164e8 100644
--- a/boto/ec2/reservedinstance.py
+++ b/boto/ec2/reservedinstance.py
@@ -59,15 +59,6 @@ class ReservedInstancesOffering(EC2Object):
else:
setattr(self, name, value)
- def describe(self):
- print 'ID=%s' % self.id
- print '\tInstance Type=%s' % self.instance_type
- print '\tZone=%s' % self.availability_zone
- print '\tDuration=%s' % self.duration
- print '\tFixed Price=%s' % self.fixed_price
- print '\tUsage Price=%s' % self.usage_price
- print '\tDescription=%s' % self.description
-
def purchase(self, instance_count=1):
return self.connection.purchase_reserved_instance_offering(self.id, instance_count)
diff --git a/boto/ec2/snapshot.py b/boto/ec2/snapshot.py
index d52abe44..241cef26 100644
--- a/boto/ec2/snapshot.py
+++ b/boto/ec2/snapshot.py
@@ -124,12 +124,12 @@ class SnapshotAttribute:
if name == 'createVolumePermission':
self.name = 'create_volume_permission'
elif name == 'group':
- if self.attrs.has_key('groups'):
+ if 'groups' in self.attrs:
self.attrs['groups'].append(value)
else:
self.attrs['groups'] = [value]
elif name == 'userId':
- if self.attrs.has_key('user_ids'):
+ if 'user_ids' in self.attrs:
self.attrs['user_ids'].append(value)
else:
self.attrs['user_ids'] = [value]
diff --git a/boto/ecs/item.py b/boto/ecs/item.py
index 29588b86..ebaa653f 100644
--- a/boto/ecs/item.py
+++ b/boto/ecs/item.py
@@ -22,7 +22,7 @@
import xml.sax
import cgi
-from StringIO import StringIO
+import boto.compat as compat
class ResponseGroup(xml.sax.ContentHandler):
"""A Generic "Response Group", which can
@@ -35,7 +35,7 @@ class ResponseGroup(xml.sax.ContentHandler):
self._nodename = nodename
self._nodepath = []
self._curobj = None
- self._xml = StringIO()
+ self._xml = compat.StringIO()
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.__dict__)
diff --git a/boto/emr/__init__.py b/boto/emr/__init__.py
index 3c33f9a0..adb5dd03 100644
--- a/boto/emr/__init__.py
+++ b/boto/emr/__init__.py
@@ -23,8 +23,8 @@
This module provies an interface to the Elastic MapReduce (EMR)
service from AWS.
"""
-from connection import EmrConnection
-from step import Step, StreamingStep, JarStep
-from bootstrap_action import BootstrapAction
+from .connection import EmrConnection
+from .step import Step, StreamingStep, JarStep
+from .bootstrap_action import BootstrapAction
diff --git a/boto/emr/connection.py b/boto/emr/connection.py
index bd264d20..868037f0 100644
--- a/boto/emr/connection.py
+++ b/boto/emr/connection.py
@@ -23,7 +23,6 @@
"""
Represents a connection to the EMR service
"""
-import types
import boto
import boto.utils
@@ -135,7 +134,7 @@ class EmrConnection(AWSQueryConnection):
:type steps: list(boto.emr.Step)
:param steps: A list of steps to add to the job
"""
- if type(steps) != types.ListType:
+ if type(steps) != list:
steps = [steps]
params = {}
params['JobFlowId'] = jobflow_id
@@ -158,7 +157,7 @@ class EmrConnection(AWSQueryConnection):
:type instance_groups: list(boto.emr.InstanceGroup)
:param instance_groups: A list of instance groups to add to the job
"""
- if type(instance_groups) != types.ListType:
+ if type(instance_groups) != list:
instance_groups = [instance_groups]
params = {}
params['JobFlowId'] = jobflow_id
@@ -179,9 +178,9 @@ class EmrConnection(AWSQueryConnection):
:type new_sizes: list(int)
:param new_sizes: A list of the new sizes for each instance group
"""
- if type(instance_group_ids) != types.ListType:
+ if type(instance_group_ids) != list:
instance_group_ids = [instance_group_ids]
- if type(new_sizes) != types.ListType:
+ if type(new_sizes) != list:
new_sizes = [new_sizes]
instance_groups = zip(instance_group_ids, new_sizes)
@@ -308,7 +307,7 @@ class EmrConnection(AWSQueryConnection):
# Instance group args (for spot instances or a heterogenous cluster)
list_args = self._build_instance_group_list_args(instance_groups)
instance_params = dict(
- ('Instances.%s' % k, v) for k, v in list_args.iteritems()
+ ('Instances.%s' % k, v) for k, v in list_args.items()
)
params.update(instance_params)
@@ -337,7 +336,7 @@ class EmrConnection(AWSQueryConnection):
params['AdditionalInfo'] = additional_info
if api_params:
- for key, value in api_params.iteritems():
+ for key, value in api_params.items():
if value is None:
params.pop(key, None)
else:
@@ -399,22 +398,22 @@ class EmrConnection(AWSQueryConnection):
return step_params
def _build_bootstrap_action_list(self, bootstrap_actions):
- if type(bootstrap_actions) != types.ListType:
+ if type(bootstrap_actions) != list:
bootstrap_actions = [bootstrap_actions]
params = {}
for i, bootstrap_action in enumerate(bootstrap_actions):
- for key, value in bootstrap_action.iteritems():
+ for key, value in bootstrap_action.items():
params['BootstrapActions.member.%s.%s' % (i + 1, key)] = value
return params
def _build_step_list(self, steps):
- if type(steps) != types.ListType:
+ if type(steps) != list:
steps = [steps]
params = {}
for i, step in enumerate(steps):
- for key, value in step.iteritems():
+ for key, value in step.items():
params['Steps.member.%s.%s' % (i+1, key)] = value
return params
@@ -475,12 +474,12 @@ class EmrConnection(AWSQueryConnection):
a comparable dict for use in making a RunJobFlow or AddInstanceGroups
request.
"""
- if type(instance_groups) != types.ListType:
+ if type(instance_groups) != list:
instance_groups = [instance_groups]
params = {}
for i, instance_group in enumerate(instance_groups):
ig_dict = self._build_instance_group_args(instance_group)
- for key, value in ig_dict.iteritems():
+ for key, value in ig_dict.items():
params['InstanceGroups.member.%d.%s' % (i+1, key)] = value
return params
diff --git a/boto/exception.py b/boto/exception.py
index d28dc950..f1f4c134 100644
--- a/boto/exception.py
+++ b/boto/exception.py
@@ -30,13 +30,13 @@ from boto import handler
from boto.resultset import ResultSet
-class BotoClientError(StandardError):
+class BotoClientError(Exception):
"""
General Boto Client error (error accessing AWS)
"""
def __init__(self, reason, *args):
- StandardError.__init__(self, reason, *args)
+ Exception.__init__(self, reason, *args)
self.reason = reason
def __repr__(self):
@@ -45,7 +45,7 @@ class BotoClientError(StandardError):
def __str__(self):
return 'BotoClientError: %s' % self.reason
-class SDBPersistenceError(StandardError):
+class SDBPersistenceError(Exception):
pass
@@ -67,10 +67,10 @@ class GSPermissionsError(StoragePermissionsError):
"""
pass
-class BotoServerError(StandardError):
+class BotoServerError(Exception):
def __init__(self, status, reason, body=None, *args):
- StandardError.__init__(self, status, reason, body, *args)
+ Exception.__init__(self, status, reason, body, *args)
self.status = status
self.reason = reason
self.body = body or ''
@@ -85,7 +85,7 @@ class BotoServerError(StandardError):
try:
h = handler.XmlHandler(self, self)
xml.sax.parseString(self.body, h)
- except (TypeError, xml.sax.SAXParseException), pe:
+ except (TypeError, xml.sax.SAXParseException) as pe:
# Remove unparsable message body so we don't include garbage
# in exception. But first, save self.body in self.error_message
# because occasionally we get error messages from Eucalyptus
diff --git a/boto/file/__init__.py b/boto/file/__init__.py
index 0210b47c..60ecae4c 100755
--- a/boto/file/__init__.py
+++ b/boto/file/__init__.py
@@ -21,8 +21,8 @@
import boto
-from connection import FileConnection as Connection
-from key import Key
-from bucket import Bucket
+from .connection import FileConnection as Connection
+from .key import Key
+from .bucket import Bucket
__all__ = ['Connection', 'Key', 'Bucket']
diff --git a/boto/file/bucket.py b/boto/file/bucket.py
index 8aec6773..7640e2b8 100644
--- a/boto/file/bucket.py
+++ b/boto/file/bucket.py
@@ -23,7 +23,7 @@
# File representation of bucket, for use with "file://" URIs.
import os
-from key import Key
+from .key import Key
from boto.file.simpleresultset import SimpleResultSet
from boto.s3.bucketlistresultset import BucketListResultSet
diff --git a/boto/file/connection.py b/boto/file/connection.py
index f453f71e..22ce4fca 100755
--- a/boto/file/connection.py
+++ b/boto/file/connection.py
@@ -21,7 +21,7 @@
# File representation of connection, for use with "file://" URIs.
-from bucket import Bucket
+from .bucket import Bucket
class FileConnection(object):
diff --git a/boto/file/key.py b/boto/file/key.py
index d39c8c65..f15e99d1 100755
--- a/boto/file/key.py
+++ b/boto/file/key.py
@@ -22,8 +22,9 @@
# File representation of key, for use with "file://" URIs.
-import os, shutil, StringIO
+import os, shutil
import sys
+import boto.compat as compat
class Key(object):
@@ -151,7 +152,7 @@ class Key(object):
:returns: The contents of the file as a string
"""
- fp = StringIO.StringIO()
+ fp = compat.StringIO()
self.get_contents_to_file(fp)
return fp.getvalue()
diff --git a/boto/fps/connection.py b/boto/fps/connection.py
index 27eef8f9..33597a1d 100644
--- a/boto/fps/connection.py
+++ b/boto/fps/connection.py
@@ -24,7 +24,6 @@ import urllib
import xml.sax
import uuid
import boto
-import boto.utils
from boto import handler
from boto.connection import AWSQueryConnection
from boto.resultset import ResultSet
@@ -146,11 +145,11 @@ class FPSConnection(AWSQueryConnection):
params["signatureMethod"] = 'HmacSHA256'
params["signatureVersion"] = '2'
- if(not params.has_key('callerReference')):
+ if('callerReference' not in params):
params['callerReference'] = str(uuid.uuid4())
parts = ''
- for k in sorted(params.keys()):
+ for k in sorted(params):
parts += "&%s=%s" % (k, urllib.quote(params[k], '~'))
canonical = '\n'.join(['GET',
@@ -162,7 +161,7 @@ class FPSConnection(AWSQueryConnection):
params["signature"] = signature
urlsuffix = ''
- for k in sorted(params.keys()):
+ for k in sorted(params):
urlsuffix += "&%s=%s" % (k, urllib.quote(params[k], '~'))
urlsuffix = urlsuffix[1:] # strip the first &
@@ -191,11 +190,11 @@ class FPSConnection(AWSQueryConnection):
params["signatureMethod"] = 'HmacSHA256'
params["signatureVersion"] = '2'
- if(not params.has_key('callerReference')):
+ if('callerReference' not in params):
params['callerReference'] = str(uuid.uuid4())
parts = ''
- for k in sorted(params.keys()):
+ for k in sorted(params):
parts += "&%s=%s" % (k, urllib.quote(params[k], '~'))
canonical = '\n'.join(['GET',
@@ -207,7 +206,7 @@ class FPSConnection(AWSQueryConnection):
params["signature"] = signature
urlsuffix = ''
- for k in sorted(params.keys()):
+ for k in sorted(params):
urlsuffix += "&%s=%s" % (k, urllib.quote(params[k], '~'))
urlsuffix = urlsuffix[1:] # strip the first &
diff --git a/boto/gs/acl.py b/boto/gs/acl.py
index 93bb4a9c..02a5c9ba 100755
--- a/boto/gs/acl.py
+++ b/boto/gs/acl.py
@@ -189,7 +189,7 @@ class Entry:
# __contains__() method works). At one time gsutil disallowed
# xmlplus-based parsers, until this more specific problem was
# determined.
- if not attrs.has_key(TYPE):
+ if TYPE not in attrs:
raise InvalidAclError('Missing "%s" in "%s" part of ACL' %
(TYPE, SCOPE))
self.scope = Scope(self, attrs[TYPE])
@@ -238,7 +238,7 @@ class Scope:
self.id = id
self.domain = domain
self.email_address = email_address
- if not self.ALLOWED_SCOPE_TYPE_SUB_ELEMS.has_key(self.type):
+ if self.type not in self.ALLOWED_SCOPE_TYPE_SUB_ELEMS:
raise InvalidAclError('Invalid %s %s "%s" ' %
(SCOPE, TYPE, self.type))
diff --git a/boto/gs/key.py b/boto/gs/key.py
index c2442514..44e4c08c 100644
--- a/boto/gs/key.py
+++ b/boto/gs/key.py
@@ -20,9 +20,9 @@
# IN THE SOFTWARE.
import os
-import StringIO
from boto.exception import BotoClientError
from boto.s3.key import Key as S3Key
+import boto.compat as compat
class Key(S3Key):
@@ -348,9 +348,9 @@ class Key(S3Key):
param, if present, will be used as the MD5 values
of the file. Otherwise, the checksum will be computed.
"""
- if isinstance(s, unicode):
+ if isinstance(s, compat.text_type):
s = s.encode("utf-8")
- fp = StringIO.StringIO(s)
+ fp = compat.StringIO(s)
r = self.set_contents_from_file(fp, headers, replace, cb, num_cb,
policy, md5)
fp.close()
diff --git a/boto/gs/resumable_upload_handler.py b/boto/gs/resumable_upload_handler.py
index d4176c94..2e4d65a8 100644
--- a/boto/gs/resumable_upload_handler.py
+++ b/boto/gs/resumable_upload_handler.py
@@ -21,19 +21,18 @@
import cgi
import errno
-import httplib
import os
import random
import re
import socket
import time
-import urlparse
import boto
from boto import config
from boto.connection import AWSAuthConnection
from boto.exception import InvalidUriError
from boto.exception import ResumableTransferDisposition
from boto.exception import ResumableUploadException
+import boto.compat as compat
"""
Handler for Google Cloud Storage resumable uploads. See
@@ -55,7 +54,7 @@ save the state needed to allow retrying later, in a separate process
class ResumableUploadHandler(object):
BUFFER_SIZE = 8192
- RETRYABLE_EXCEPTIONS = (httplib.HTTPException, IOError, socket.error,
+ RETRYABLE_EXCEPTIONS = (compat.httplib.HTTPException, IOError, socket.error,
socket.gaierror)
# (start, end) response indicating server has nothing (upload protocol uses
@@ -95,7 +94,7 @@ class ResumableUploadHandler(object):
f = open(self.tracker_file_name, 'r')
uri = f.readline().strip()
self._set_tracker_uri(uri)
- except IOError, e:
+ except IOError as e:
# Ignore non-existent file (happens first time an upload
# is attempted on a file), but warn user for other errors.
if e.errno != errno.ENOENT:
@@ -103,7 +102,7 @@ class ResumableUploadHandler(object):
print('Couldn\'t read URI tracker file (%s): %s. Restarting '
'upload from scratch.' %
(self.tracker_file_name, e.strerror))
- except InvalidUriError, e:
+ except InvalidUriError as e:
# Warn user, but proceed (will restart because
# self.tracker_uri == None).
print('Invalid tracker URI (%s) found in URI tracker file '
@@ -123,7 +122,7 @@ class ResumableUploadHandler(object):
try:
f = open(self.tracker_file_name, 'w')
f.write(self.tracker_uri)
- except IOError, e:
+ except IOError as e:
raise ResumableUploadException(
'Couldn\'t write URI tracker file (%s): %s.\nThis can happen'
'if you\'re using an incorrectly configured upload tool\n'
@@ -142,7 +141,7 @@ class ResumableUploadHandler(object):
Raises InvalidUriError if URI is syntactically invalid.
"""
- parse_result = urlparse.urlparse(uri)
+ parse_result = compat.urlparse.urlparse(uri)
if (parse_result.scheme.lower() not in ['http', 'https'] or
not parse_result.netloc):
raise InvalidUriError('Invalid tracker URI (%s)' % uri)
@@ -233,7 +232,7 @@ class ResumableUploadHandler(object):
'Couldn\'t parse upload server state query response (%s)' %
str(resp.getheaders()), ResumableTransferDisposition.START_OVER)
if conn.debug >= 1:
- print 'Server has: Range: %d - %d.' % (server_start, server_end)
+ print('Server has: Range: %d - %d.' % (server_start, server_end))
return (server_start, server_end)
def _start_new_resumable_upload(self, key, headers=None):
@@ -244,7 +243,7 @@ class ResumableUploadHandler(object):
"""
conn = key.bucket.connection
if conn.debug >= 1:
- print 'Starting new resumable upload.'
+ print('Starting new resumable upload.')
self.server_has_bytes = 0
# Start a new resumable upload by sending a POST request with an
@@ -393,10 +392,10 @@ class ResumableUploadHandler(object):
self.server_has_bytes = server_start
key=key
if conn.debug >= 1:
- print 'Resuming transfer.'
- except ResumableUploadException, e:
+ print('Resuming transfer.')
+ except ResumableUploadException as e:
if conn.debug >= 1:
- print 'Unable to resume transfer (%s).' % e.message
+ print('Unable to resume transfer (%s).' % e.message)
self._start_new_resumable_upload(key, headers)
else:
self._start_new_resumable_upload(key, headers)
@@ -457,7 +456,7 @@ class ResumableUploadHandler(object):
change some of the file and not realize they have inconsistent data.
"""
if key.bucket.connection.debug >= 1:
- print 'Checking md5 against etag.'
+ print('Checking md5 against etag.')
if key.md5 != etag.strip('"\''):
# Call key.open_read() before attempting to delete the
# (incorrect-content) key, so we perform that request on a
@@ -532,9 +531,9 @@ class ResumableUploadHandler(object):
self._remove_tracker_file()
self._check_final_md5(key, etag)
if debug >= 1:
- print 'Resumable upload complete.'
+ print('Resumable upload complete.')
return
- except self.RETRYABLE_EXCEPTIONS, e:
+ except self.RETRYABLE_EXCEPTIONS as e:
if debug >= 1:
print('Caught exception (%s)' % e.__repr__())
if isinstance(e, IOError) and e.errno == errno.EPIPE:
@@ -544,7 +543,7 @@ class ResumableUploadHandler(object):
# the upload (which will cause a new connection to be
# opened the next time an HTTP request is sent).
key.bucket.connection.connection.close()
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
if (e.disposition ==
ResumableTransferDisposition.ABORT_CUR_PROCESS):
if debug >= 1:
@@ -581,7 +580,7 @@ class ResumableUploadHandler(object):
# Use binary exponential backoff to desynchronize client requests
sleep_time_secs = random.random() * (2**progress_less_iterations)
if debug >= 1:
- print ('Got retryable failure (%d progress-less in a row).\n'
- 'Sleeping %3.1f seconds before re-trying' %
- (progress_less_iterations, sleep_time_secs))
+ print('Got retryable failure (%d progress-less in a row).\n'
+ 'Sleeping %3.1f seconds before re-trying' %
+ (progress_less_iterations, sleep_time_secs))
time.sleep(sleep_time_secs)
diff --git a/boto/https_connection.py b/boto/https_connection.py
index d7a3f3ac..a45c82ea 100644
--- a/boto/https_connection.py
+++ b/boto/https_connection.py
@@ -19,14 +19,14 @@
"""Extensions to allow HTTPS requests with SSL certificate validation."""
-import httplib
+import boto.compat as compat
import re
import socket
import ssl
import boto
-class InvalidCertificateException(httplib.HTTPException):
+class InvalidCertificateException(compat.httplib.HTTPException):
"""Raised when a certificate is provided with an invalid hostname."""
def __init__(self, host, cert, reason):
@@ -36,7 +36,7 @@ class InvalidCertificateException(httplib.HTTPException):
host: The hostname the connection was made to.
cert: The SSL certificate (as a dictionary) the host returned.
"""
- httplib.HTTPException.__init__(self)
+ compat.httplib.HTTPException.__init__(self)
self.host = host
self.cert = cert
self.reason = reason
@@ -79,10 +79,10 @@ def ValidateCertificateHostname(cert, hostname):
return False
-class CertValidatingHTTPSConnection(httplib.HTTPConnection):
+class CertValidatingHTTPSConnection(compat.httplib.HTTPConnection):
"""An HTTPConnection that connects over SSL and validates certificates."""
- default_port = httplib.HTTPS_PORT
+ default_port = compat.httplib.HTTPS_PORT
def __init__(self, host, port=None, key_file=None, cert_file=None,
ca_certs=None, strict=None, **kwargs):
@@ -98,7 +98,7 @@ class CertValidatingHTTPSConnection(httplib.HTTPConnection):
strict: When true, causes BadStatusLine to be raised if the status line
can't be parsed as a valid HTTP/1.0 or 1.1 status line.
"""
- httplib.HTTPConnection.__init__(self, host, port, strict, **kwargs)
+ compat.httplib.HTTPConnection.__init__(self, host, port, strict, **kwargs)
self.key_file = key_file
self.cert_file = cert_file
self.ca_certs = ca_certs
diff --git a/boto/iam/__init__.py b/boto/iam/__init__.py
index 498d736b..72fb10a8 100644
--- a/boto/iam/__init__.py
+++ b/boto/iam/__init__.py
@@ -22,6 +22,6 @@
# this is here for backward compatibility
# originally, the IAMConnection class was defined here
-from connection import IAMConnection
+from .connection import IAMConnection
diff --git a/boto/jsonresponse.py b/boto/jsonresponse.py
index 01e1f54f..4f907bc2 100644
--- a/boto/jsonresponse.py
+++ b/boto/jsonresponse.py
@@ -21,7 +21,7 @@
# IN THE SOFTWARE.
import xml.sax
-import utils
+from . import utils
class XmlHandler(xml.sax.ContentHandler):
diff --git a/boto/manage/cmdshell.py b/boto/manage/cmdshell.py
index b21898c0..6ae7911a 100644
--- a/boto/manage/cmdshell.py
+++ b/boto/manage/cmdshell.py
@@ -24,10 +24,10 @@ import boto
import os
import time
import shutil
-import StringIO
import paramiko
import socket
import subprocess
+import boto.compat as compat
class SSHClient(object):
@@ -54,23 +54,24 @@ class SSHClient(object):
username=self.uname,
pkey=self._pkey)
return
- except socket.error, (value,message):
+ except socket.error as err:
+ (value, message) = err.args
if value == 61 or value == 111:
- print 'SSH Connection refused, will retry in 5 seconds'
+ print('SSH Connection refused, will retry in 5 seconds')
time.sleep(5)
retry += 1
else:
raise
except paramiko.BadHostKeyException:
- print "%s has an entry in ~/.ssh/known_hosts and it doesn't match" % self.server.hostname
- print 'Edit that file to remove the entry and then hit return to try again'
+ print("%s has an entry in ~/.ssh/known_hosts and it doesn't match" % self.server.hostname)
+ print('Edit that file to remove the entry and then hit return to try again')
raw_input('Hit Enter when ready')
retry += 1
except EOFError:
- print 'Unexpected Error from SSH Connection, retry in 5 seconds'
+ print('Unexpected Error from SSH Connection, retry in 5 seconds')
time.sleep(5)
retry += 1
- print 'Could not establish SSH connection'
+ print('Could not establish SSH connection')
def open_sftp(self):
return self._ssh_client.open_sftp()
@@ -173,11 +174,11 @@ class LocalClient(object):
return os.path.exists(path)
def shell(self):
- raise NotImplementedError, 'shell not supported with LocalClient'
+ raise NotImplementedError('shell not supported with LocalClient')
def run(self):
boto.log.info('running:%s' % self.command)
- log_fp = StringIO.StringIO()
+ log_fp = compat.StringIO()
process = subprocess.Popen(self.command, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
while process.poll() == None:
diff --git a/boto/manage/propget.py b/boto/manage/propget.py
index 45b2ff22..57032029 100644
--- a/boto/manage/propget.py
+++ b/boto/manage/propget.py
@@ -38,7 +38,7 @@ def get(prop, choices=None):
value = choices[i-1]
if isinstance(value, tuple):
value = value[0]
- print '[%d] %s' % (i, value)
+ print('[%d] %s' % (i, value))
value = raw_input('%s [%d-%d]: ' % (prompt, min, max))
try:
int_value = int(value)
@@ -47,18 +47,18 @@ def get(prop, choices=None):
value = value[1]
valid = True
except ValueError:
- print '%s is not a valid choice' % value
+ print('%s is not a valid choice' % value)
except IndexError:
- print '%s is not within the range[%d-%d]' % (min, max)
+ print('%s is not within the range[%d-%d]' % (min, max))
else:
value = raw_input('%s: ' % prompt)
try:
value = prop.validate(value)
if prop.empty(value) and prop.required:
- print 'A value is required'
+ print('A value is required')
else:
valid = True
except:
- print 'Invalid value: %s' % value
+ print('Invalid value: %s' % value)
return value
diff --git a/boto/manage/server.py b/boto/manage/server.py
index 3c7a3032..67260c65 100644
--- a/boto/manage/server.py
+++ b/boto/manage/server.py
@@ -32,9 +32,10 @@ from boto.sdb.db.property import StringProperty, IntegerProperty, BooleanPropert
from boto.manage import propget
from boto.ec2.zone import Zone
from boto.ec2.keypair import KeyPair
-import os, time, StringIO
+import os, time
from contextlib import closing
from boto.exception import EC2ResponseError
+import boto.compat as compat
InstanceTypes = ['m1.small', 'm1.large', 'm1.xlarge',
'c1.medium', 'c1.xlarge',
@@ -49,7 +50,7 @@ class Bundler(object):
self.ssh_client = SSHClient(server, uname=uname)
def copy_x509(self, key_file, cert_file):
- print '\tcopying cert and pk over to /mnt directory on server'
+ print('\tcopying cert and pk over to /mnt directory on server')
self.ssh_client.open_sftp()
path, name = os.path.split(key_file)
self.remote_key_file = '/mnt/%s' % name
@@ -57,7 +58,7 @@ class Bundler(object):
path, name = os.path.split(cert_file)
self.remote_cert_file = '/mnt/%s' % name
self.ssh_client.put_file(cert_file, self.remote_cert_file)
- print '...complete!'
+ print('...complete!')
def bundle_image(self, prefix, size, ssh_key):
command = ""
@@ -103,7 +104,7 @@ class Bundler(object):
ssh_key = self.server.get_ssh_key_file()
self.copy_x509(key_file, cert_file)
if not fp:
- fp = StringIO.StringIO()
+ fp = compat.StringIO()
fp.write('sudo mv %s /mnt/boto.cfg; ' % BotoConfigPath)
fp.write('mv ~/.ssh/authorized_keys /mnt/authorized_keys; ')
if clear_history:
@@ -115,13 +116,13 @@ class Bundler(object):
fp.write('sudo mv /mnt/boto.cfg %s; ' % BotoConfigPath)
fp.write('mv /mnt/authorized_keys ~/.ssh/authorized_keys')
command = fp.getvalue()
- print 'running the following command on the remote server:'
- print command
+ print('running the following command on the remote server:')
+ print(command)
t = self.ssh_client.run(command)
- print '\t%s' % t[0]
- print '\t%s' % t[1]
- print '...complete!'
- print 'registering image...'
+ print('\t%s' % t[0])
+ print('\t%s' % t[1])
+ print('...complete!')
+ print('registering image...')
self.image_id = self.server.ec2.register_image(name=prefix, image_location='%s/%s.manifest.xml' % (bucket, prefix))
return self.image_id
@@ -137,7 +138,7 @@ class CommandLineGetter(object):
def get_region(self, params):
region = params.get('region', None)
- if isinstance(region, str) or isinstance(region, unicode):
+ if isinstance(region, compat.string_types):
region = boto.ec2.get_region(region)
params['region'] = region
if not region:
@@ -189,7 +190,7 @@ class CommandLineGetter(object):
def get_group(self, params):
group = params.get('group', None)
- if isinstance(group, str) or isinstance(group, unicode):
+ if isinstance(group, compat.string_types):
group_list = self.ec2.get_all_security_groups()
for g in group_list:
if g.name == group:
@@ -202,7 +203,7 @@ class CommandLineGetter(object):
def get_key(self, params):
keypair = params.get('keypair', None)
- if isinstance(keypair, str) or isinstance(keypair, unicode):
+ if isinstance(keypair, compat.string_types):
key_list = self.ec2.get_all_key_pairs()
for k in key_list:
if k.name == keypair:
@@ -305,7 +306,7 @@ class Server(Model):
# deal with possibly passed in logical volume:
if logical_volume != None:
cfg.set('EBS', 'logical_volume_name', logical_volume.name)
- cfg_fp = StringIO.StringIO()
+ cfg_fp = compat.StringIO()
cfg.write(cfg_fp)
# deal with the possibility that zone and/or keypair are strings read from the config file:
if isinstance(zone, Zone):
@@ -325,14 +326,14 @@ class Server(Model):
instances = reservation.instances
if elastic_ip != None and instances.__len__() > 0:
instance = instances[0]
- print 'Waiting for instance to start so we can set its elastic IP address...'
+ print('Waiting for instance to start so we can set its elastic IP address...')
# Sometimes we get a message from ec2 that says that the instance does not exist.
# Hopefully the following delay will giv eec2 enough time to get to a stable state:
time.sleep(5)
while instance.update() != 'running':
time.sleep(1)
instance.use_ip(elastic_ip)
- print 'set the elastic IP of the first instance to %s' % elastic_ip
+ print('set the elastic IP of the first instance to %s' % elastic_ip)
for instance in instances:
s = cls()
s.ec2 = ec2
@@ -489,19 +490,19 @@ class Server(Model):
def delete(self):
if self.production:
- raise ValueError, "Can't delete a production server"
+ raise ValueError("Can't delete a production server")
#self.stop()
Model.delete(self)
def stop(self):
if self.production:
- raise ValueError, "Can't delete a production server"
+ raise ValueError("Can't delete a production server")
if self._instance:
self._instance.stop()
def terminate(self):
if self.production:
- raise ValueError, "Can't delete a production server"
+ raise ValueError("Can't delete a production server")
if self._instance:
self._instance.terminate()
@@ -527,7 +528,7 @@ class Server(Model):
def get_cmdshell(self):
if not self._cmdshell:
- import cmdshell
+ from . import cmdshell
self.get_ssh_key_file()
self._cmdshell = cmdshell.start(self)
return self._cmdshell
diff --git a/boto/manage/task.py b/boto/manage/task.py
index 2f9d7d00..39b21f48 100644
--- a/boto/manage/task.py
+++ b/boto/manage/task.py
@@ -23,7 +23,8 @@
import boto
from boto.sdb.db.property import StringProperty, DateTimeProperty, IntegerProperty
from boto.sdb.db.model import Model
-import datetime, subprocess, StringIO, time
+import boto.compat as compat
+import datetime, subprocess, time
def check_hour(val):
if val == '*':
@@ -100,7 +101,7 @@ class Task(Model):
def _run(self, msg, vtimeout):
boto.log.info('Task[%s] - running:%s' % (self.name, self.command))
- log_fp = StringIO.StringIO()
+ log_fp = compat.StringIO()
process = subprocess.Popen(self.command, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
nsecs = 5
diff --git a/boto/manage/test_manage.py b/boto/manage/test_manage.py
index e0b032a9..a8c188c3 100644
--- a/boto/manage/test_manage.py
+++ b/boto/manage/test_manage.py
@@ -2,33 +2,33 @@ from boto.manage.server import Server
from boto.manage.volume import Volume
import time
-print '--> Creating New Volume'
+print('--> Creating New Volume')
volume = Volume.create()
-print volume
+print(volume)
-print '--> Creating New Server'
+print('--> Creating New Server')
server_list = Server.create()
server = server_list[0]
-print server
+print(server)
-print '----> Waiting for Server to start up'
+print('----> Waiting for Server to start up')
while server.status != 'running':
- print '*'
+ print('*')
time.sleep(10)
-print '----> Server is running'
+print('----> Server is running')
-print '--> Run "df -k" on Server'
+print('--> Run "df -k" on Server')
status = server.run('df -k')
-print status[1]
+print(status[1])
-print '--> Now run volume.make_ready to make the volume ready to use on server'
+print('--> Now run volume.make_ready to make the volume ready to use on server')
volume.make_ready(server)
-print '--> Run "df -k" on Server'
+print('--> Run "df -k" on Server')
status = server.run('df -k')
-print status[1]
+print(status[1])
-print '--> Do an "ls -al" on the new filesystem'
+print('--> Do an "ls -al" on the new filesystem')
status = server.run('ls -al %s' % volume.mount_point)
-print status[1]
+print(status[1])
diff --git a/boto/manage/volume.py b/boto/manage/volume.py
index 52c344fe..baabbbb4 100644
--- a/boto/manage/volume.py
+++ b/boto/manage/volume.py
@@ -199,7 +199,7 @@ class Volume(Model):
def attach(self, server=None):
if self.attachment_state == 'attached':
- print 'already attached'
+ print('already attached')
return None
if server:
self.server = server
@@ -210,7 +210,7 @@ class Volume(Model):
def detach(self, force=False):
state = self.attachment_state
if state == 'available' or state == None or state == 'detaching':
- print 'already detached'
+ print('already detached')
return None
ec2 = self.get_ec2_connection()
ec2.detach_volume(self.volume_id, self.server.instance_id, self.device, force)
@@ -219,7 +219,7 @@ class Volume(Model):
def checkfs(self, use_cmd=None):
if self.server == None:
- raise ValueError, 'server attribute must be set to run this command'
+ raise ValueError('server attribute must be set to run this command')
# detemine state of file system on volume, only works if attached
if use_cmd:
cmd = use_cmd
@@ -234,7 +234,7 @@ class Volume(Model):
def wait(self):
if self.server == None:
- raise ValueError, 'server attribute must be set to run this command'
+ raise ValueError('server attribute must be set to run this command')
with closing(self.server.get_cmdshell()) as cmd:
# wait for the volume device to appear
cmd = self.server.get_cmdshell()
@@ -244,7 +244,7 @@ class Volume(Model):
def format(self):
if self.server == None:
- raise ValueError, 'server attribute must be set to run this command'
+ raise ValueError('server attribute must be set to run this command')
status = None
with closing(self.server.get_cmdshell()) as cmd:
if not self.checkfs(cmd):
@@ -254,7 +254,7 @@ class Volume(Model):
def mount(self):
if self.server == None:
- raise ValueError, 'server attribute must be set to run this command'
+ raise ValueError('server attribute must be set to run this command')
boto.log.info('handle_mount_point')
with closing(self.server.get_cmdshell()) as cmd:
cmd = self.server.get_cmdshell()
@@ -353,9 +353,9 @@ class Volume(Model):
day=now.day, tzinfo=now.tzinfo)
# Keep the first snapshot from each day of the previous week
one_week = datetime.timedelta(days=7, seconds=60*60)
- print midnight-one_week, midnight
+ print(midnight-one_week, midnight)
previous_week = self.get_snapshot_range(snaps, midnight-one_week, midnight)
- print previous_week
+ print(previous_week)
if not previous_week:
return snaps
current_day = None
diff --git a/boto/mashups/server.py b/boto/mashups/server.py
index 6cea106c..d6de4238 100644
--- a/boto/mashups/server.py
+++ b/boto/mashups/server.py
@@ -23,7 +23,6 @@
High-level abstraction of an EC2 server
"""
import boto
-import boto.utils
from boto.mashups.iobject import IObject
from boto.pyami.config import Config, BotoConfigPath
from boto.mashups.interactive import interactive_shell
diff --git a/boto/mturk/connection.py b/boto/mturk/connection.py
index 375da5a1..5c357c50 100644
--- a/boto/mturk/connection.py
+++ b/boto/mturk/connection.py
@@ -31,6 +31,7 @@ from boto.connection import AWSQueryConnection
from boto.exception import EC2ResponseError
from boto.resultset import ResultSet
from boto.mturk.question import QuestionForm, ExternalQuestion
+import boto.compat as compat
class MTurkRequestError(EC2ResponseError):
"Error for MTurk Requests"
@@ -265,7 +266,7 @@ class MTurkConnection(AWSQueryConnection):
records, return the page numbers to be retrieved.
"""
pages = total_records/page_size+bool(total_records%page_size)
- return range(1, pages+1)
+ return list(range(1, pages+1))
def get_all_hits(self):
@@ -280,7 +281,7 @@ class MTurkConnection(AWSQueryConnection):
page_size = 100
search_rs = self.search_hits(page_size=page_size)
total_records = int(search_rs.TotalNumResults)
- get_page_hits = lambda(page): self.search_hits(page_size=page_size, page_number=page)
+ get_page_hits = lambda page: self.search_hits(page_size=page_size, page_number=page)
page_nums = self._get_pages(page_size, total_records)
hit_sets = itertools.imap(get_page_hits, page_nums)
return itertools.chain.from_iterable(hit_sets)
@@ -756,7 +757,7 @@ class MTurkConnection(AWSQueryConnection):
keywords = ', '.join(keywords)
if type(keywords) is str:
final_keywords = keywords
- elif type(keywords) is unicode:
+ elif type(keywords) is compat.text_type:
final_keywords = keywords.encode('utf-8')
elif keywords is None:
final_keywords = ""
diff --git a/boto/provider.py b/boto/provider.py
index 37ceae72..4a231c7d 100644
--- a/boto/provider.py
+++ b/boto/provider.py
@@ -33,6 +33,7 @@ from boto.gs.acl import ACL
from boto.gs.acl import CannedACLStrings as CannedGSACLStrings
from boto.s3.acl import CannedACLStrings as CannedS3ACLStrings
from boto.s3.acl import Policy
+import boto.compat as compat
HEADER_PREFIX_KEY = 'header_prefix'
METADATA_PREFIX_KEY = 'metadata_prefix'
@@ -176,21 +177,17 @@ class Provider(object):
access_key_name, secret_key_name = self.CredentialMap[self.name]
if access_key is not None:
self.access_key = access_key
- elif os.environ.has_key(access_key_name.upper()):
+ elif access_key_name.upper() in os.environ:
self.access_key = os.environ[access_key_name.upper()]
elif config.has_option('Credentials', access_key_name):
self.access_key = config.get('Credentials', access_key_name)
if secret_key is not None:
self.secret_key = secret_key
- elif os.environ.has_key(secret_key_name.upper()):
+ elif secret_key_name.upper() in os.environ:
self.secret_key = os.environ[secret_key_name.upper()]
elif config.has_option('Credentials', secret_key_name):
self.secret_key = config.get('Credentials', secret_key_name)
- if isinstance(self.secret_key, unicode):
- # the secret key must be bytes and not unicode to work
- # properly with hmac.new (see http://bugs.python.org/issue5285)
- self.secret_key = str(self.secret_key)
def configure_headers(self):
header_info_map = self.HeaderInfoMap[self.name]
diff --git a/boto/pyami/bootstrap.py b/boto/pyami/bootstrap.py
index cd44682f..c253c927 100644
--- a/boto/pyami/bootstrap.py
+++ b/boto/pyami/bootstrap.py
@@ -82,7 +82,7 @@ class Bootstrap(ScriptBase):
try:
self.run('git pull', cwd=location)
num_remaining_attempts = 0
- except Exception, e:
+ except Exception as e:
boto.log.info('git pull attempt failed with the following exception. Trying again in a bit. %s', e)
time.sleep(2)
if update.find(':') >= 0:
diff --git a/boto/pyami/config.py b/boto/pyami/config.py
index d75e7910..72383afc 100644
--- a/boto/pyami/config.py
+++ b/boto/pyami/config.py
@@ -20,10 +20,10 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
-import StringIO, os, re
+import os, re
import warnings
-import ConfigParser
import boto
+import boto.compat as compat
# If running in Google App Engine there is no "user" and
# os.path.expanduser() will fail. Attempt to detect this case and use a
@@ -55,10 +55,10 @@ elif 'BOTO_PATH' in os.environ:
BotoConfigLocations.append(expanduser(path))
-class Config(ConfigParser.SafeConfigParser):
+class Config(compat.configparser.SafeConfigParser):
def __init__(self, path=None, fp=None, do_load=True):
- ConfigParser.SafeConfigParser.__init__(self, {'working_dir' : '/mnt/pyami',
+ compat.configparser.SafeConfigParser.__init__(self, {'working_dir' : '/mnt/pyami',
'debug' : '0'})
if do_load:
if path:
@@ -76,7 +76,7 @@ class Config(ConfigParser.SafeConfigParser):
def load_credential_file(self, path):
"""Load a credential file as is setup like the Java utilities"""
- c_data = StringIO.StringIO()
+ c_data = compat.StringIO()
c_data.write("[Credentials]\n")
for line in open(path, "r").readlines():
c_data.write(line.replace("AWSAccessKeyId", "aws_access_key_id").replace("AWSSecretKey", "aws_secret_access_key"))
@@ -99,7 +99,7 @@ class Config(ConfigParser.SafeConfigParser):
Replace any previous value. If the path doesn't exist, create it.
Also add the option the the in-memory config.
"""
- config = ConfigParser.SafeConfigParser()
+ config = compat.configparser.SafeConfigParser()
config.read(path)
if not config.has_section(section):
config.add_section(section)
@@ -143,21 +143,21 @@ class Config(ConfigParser.SafeConfigParser):
def get(self, section, name, default=None):
try:
- val = ConfigParser.SafeConfigParser.get(self, section, name)
+ val = compat.configparser.SafeConfigParser.get(self, section, name)
except:
val = default
return val
def getint(self, section, name, default=0):
try:
- val = ConfigParser.SafeConfigParser.getint(self, section, name)
+ val = compat.configparser.SafeConfigParser.getint(self, section, name)
except:
val = int(default)
return val
def getfloat(self, section, name, default=0.0):
try:
- val = ConfigParser.SafeConfigParser.getfloat(self, section, name)
+ val = compat.configparser.SafeConfigParser.getfloat(self, section, name)
except:
val = float(default)
return val
@@ -180,13 +180,13 @@ class Config(ConfigParser.SafeConfigParser):
self.set(section, name, 'false')
def dump(self):
- s = StringIO.StringIO()
+ s = compat.StringIO()
self.write(s)
- print s.getvalue()
+ print(s.getvalue())
def dump_safe(self, fp=None):
if not fp:
- fp = StringIO.StringIO()
+ fp = compat.StringIO()
for section in self.sections():
fp.write('[%s]\n' % section)
for option in self.options(section):
@@ -196,11 +196,6 @@ class Config(ConfigParser.SafeConfigParser):
fp.write('%s = %s\n' % (option, self.get(section, option)))
def dump_to_sdb(self, domain_name, item_name):
- try:
- import simplejson as json
- except ImportError:
- import json
-
sdb = boto.connect_sdb()
domain = sdb.lookup(domain_name)
if not domain:
@@ -211,22 +206,17 @@ class Config(ConfigParser.SafeConfigParser):
d = {}
for option in self.options(section):
d[option] = self.get(section, option)
- item[section] = json.dumps(d)
+ item[section] = compat.json.dumps(d)
item.save()
def load_from_sdb(self, domain_name, item_name):
- try:
- import json
- except ImportError:
- import simplejson as json
-
sdb = boto.connect_sdb()
domain = sdb.lookup(domain_name)
item = domain.get_item(item_name)
for section in item.keys():
if not self.has_section(section):
self.add_section(section)
- d = json.loads(item[section])
+ d = compat.json.loads(item[section])
for attr_name in d.keys():
attr_value = d[attr_name]
if attr_value == None:
diff --git a/boto/pyami/installers/ubuntu/ebs.py b/boto/pyami/installers/ubuntu/ebs.py
index a52549b0..4737ba93 100644
--- a/boto/pyami/installers/ubuntu/ebs.py
+++ b/boto/pyami/installers/ubuntu/ebs.py
@@ -128,7 +128,7 @@ class EBSInstaller(Installer):
try:
ec2.attach_volume(self.volume_id, self.instance_id, self.device)
attempt_attach = False
- except EC2ResponseError, e:
+ except EC2ResponseError as e:
if e.error_code != 'IncorrectState':
# if there's an EC2ResonseError with the code set to IncorrectState, delay a bit for ec2
# to realize the instance is running, then try again. Otherwise, raise the error:
diff --git a/boto/pyami/launch_ami.py b/boto/pyami/launch_ami.py
index 243d56d2..4b8d962f 100755
--- a/boto/pyami/launch_ami.py
+++ b/boto/pyami/launch_ami.py
@@ -68,7 +68,7 @@ SYNOPSIS
"""
def usage():
- print usage_string
+ print(usage_string)
sys.exit()
def main():
@@ -124,14 +124,14 @@ def main():
required = ['ami']
for pname in required:
if not params.get(pname, None):
- print '%s is required' % pname
+ print('%s is required' % pname)
usage()
if params['script_name']:
# first copy the desired module file to S3 bucket
if reload:
- print 'Reloading module %s to S3' % params['script_name']
+ print('Reloading module %s to S3' % params['script_name'])
else:
- print 'Copying module %s to S3' % params['script_name']
+ print('Copying module %s to S3' % params['script_name'])
l = imp.find_module(params['script_name'])
c = boto.connect_s3()
bucket = c.get_bucket(params['script_bucket'])
@@ -155,23 +155,23 @@ def main():
r = img.run(user_data=s, key_name=params['keypair'],
security_groups=[params['group']],
max_count=params.get('num_instances', 1))
- print 'AMI: %s - %s (Started)' % (params['ami'], img.location)
- print 'Reservation %s contains the following instances:' % r.id
+ print('AMI: %s - %s (Started)' % (params['ami'], img.location))
+ print('Reservation %s contains the following instances:' % r.id)
for i in r.instances:
- print '\t%s' % i.id
+ print('\t%s' % i.id)
if wait:
running = False
while not running:
time.sleep(30)
[i.update() for i in r.instances]
status = [i.state for i in r.instances]
- print status
+ print(status)
if status.count('running') == len(r.instances):
running = True
for i in r.instances:
- print 'Instance: %s' % i.ami_launch_index
- print 'Public DNS Name: %s' % i.public_dns_name
- print 'Private DNS Name: %s' % i.private_dns_name
+ print('Instance: %s' % i.ami_launch_index)
+ print('Public DNS Name: %s' % i.public_dns_name)
+ print('Private DNS Name: %s' % i.private_dns_name)
if __name__ == "__main__":
main()
diff --git a/boto/pyami/startup.py b/boto/pyami/startup.py
index 2093151a..1c45fb3f 100644
--- a/boto/pyami/startup.py
+++ b/boto/pyami/startup.py
@@ -44,7 +44,7 @@ class Startup(ScriptBase):
s.main()
else:
boto.log.warning('Trouble parsing script: %s' % script)
- except Exception, e:
+ except Exception as e:
boto.log.exception('Problem Running Script: %s. Startup process halting.' % script)
raise e
diff --git a/boto/rds/__init__.py b/boto/rds/__init__.py
index a400ffd3..c739879d 100644
--- a/boto/rds/__init__.py
+++ b/boto/rds/__init__.py
@@ -20,7 +20,6 @@
# IN THE SOFTWARE.
#
-import boto.utils
import urllib
from boto.connection import AWSQueryConnection
from boto.rds.dbinstance import DBInstance
diff --git a/boto/rds/parametergroup.py b/boto/rds/parametergroup.py
index 44d00e22..e973467c 100644
--- a/boto/rds/parametergroup.py
+++ b/boto/rds/parametergroup.py
@@ -19,6 +19,9 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+import boto.compat as compat
+
+
class ParameterGroup(dict):
def __init__(self, connection=None):
@@ -133,36 +136,22 @@ class Parameter(object):
d[prefix+'ApplyMethod'] = self.apply_method
def _set_string_value(self, value):
- if not isinstance(value, str) or isinstance(value, unicode):
- raise ValueError, 'value must be of type str'
+ if not isinstance(value, compat.string_types):
+ raise ValueError('value must be of type str')
if self.allowed_values:
choices = self.allowed_values.split(',')
if value not in choices:
- raise ValueError, 'value must be in %s' % self.allowed_values
+ raise ValueError('value must be in %s' % self.allowed_values)
self._value = value
def _set_integer_value(self, value):
- if isinstance(value, str) or isinstance(value, unicode):
- value = int(value)
- if isinstance(value, int) or isinstance(value, long):
- if self.allowed_values:
- min, max = self.allowed_values.split('-')
- if value < int(min) or value > int(max):
- raise ValueError, 'range is %s' % self.allowed_values
- self._value = value
- else:
- raise ValueError, 'value must be integer'
-
- def _set_boolean_value(self, value):
- if isinstance(value, bool):
- self._value = value
- elif isinstance(value, str) or isinstance(value, unicode):
+ if isinstance(value, compat.string_types):
if value.lower() == 'true':
self._value = True
else:
self._value = False
else:
- raise ValueError, 'value must be boolean'
+ raise ValueError('value must be boolean')
def set_value(self, value):
if self.type == 'string':
@@ -172,7 +161,7 @@ class Parameter(object):
elif self.type == 'boolean':
self._set_boolean_value(value)
else:
- raise TypeError, 'unknown type (%s)' % self.type
+ raise TypeError('unknown type (%s)' % self.type)
def get_value(self):
if self._value == None:
@@ -188,7 +177,7 @@ class Parameter(object):
self._set_boolean_value(self._value)
return self._value
else:
- raise TypeError, 'unknown type (%s)' % self.type
+ raise TypeError('unknown type (%s)' % self.type)
value = property(get_value, set_value, 'The value of the parameter')
diff --git a/boto/roboto/awsqueryrequest.py b/boto/roboto/awsqueryrequest.py
index 9e05ac63..3fdaf07b 100644
--- a/boto/roboto/awsqueryrequest.py
+++ b/boto/roboto/awsqueryrequest.py
@@ -47,10 +47,10 @@ def boto_except_hook(debugger_flag, debug_flag):
else:
debugger.post_mortem(tb)
elif debug_flag:
- print traceback.print_tb(tb)
+ print(traceback.print_tb(tb))
sys.exit(1)
else:
- print value
+ print(value)
sys.exit(1)
return excepthook
@@ -69,7 +69,7 @@ class Line(object):
def print_it(self):
if not self.printed:
- print self.line
+ print(self.line)
self.printed = True
class RequiredParamError(boto.exception.BotoClientError):
@@ -223,7 +223,7 @@ class AWSQueryRequest(object):
filter_names = [f['name'] for f in self.Filters]
unknown_filters = [f for f in filters if f not in filter_names]
if unknown_filters:
- raise FilterError, 'Unknown filters: %s' % unknown_filters
+ raise FilterError('Unknown filters: %s' % unknown_filters)
for i, filter in enumerate(self.Filters):
name = filter['name']
if name in filters:
@@ -342,9 +342,9 @@ class AWSQueryRequest(object):
def process_standard_options(self, options, args, d):
if hasattr(options, 'help_filters') and options.help_filters:
- print 'Available filters:'
+ print('Available filters:')
for filter in self.Filters:
- print '%s\t%s' % (filter.name, filter.doc)
+ print('%s\t%s' % (filter.name, filter.doc))
sys.exit(0)
if options.debug:
self.args['debug'] = 2
@@ -358,7 +358,7 @@ class AWSQueryRequest(object):
self.args['aws_secret_access_key'] = options.secret_key
if options.version:
# TODO - Where should the version # come from?
- print 'version x.xx'
+ print('version x.xx')
exit(0)
sys.excepthook = boto_except_hook(options.debugger,
options.debug)
@@ -452,17 +452,17 @@ class AWSQueryRequest(object):
try:
response = self.main()
self.cli_formatter(response)
- except RequiredParamError, e:
- print e
+ except RequiredParamError as e:
+ print(e)
sys.exit(1)
- except self.ServiceClass.ResponseError, err:
- print 'Error(%s): %s' % (err.error_code, err.error_message)
+ except self.ServiceClass.ResponseError as err:
+ print('Error(%s): %s' % (err.error_code, err.error_message))
sys.exit(1)
- except boto.roboto.awsqueryservice.NoCredentialsError, err:
- print 'Unable to find credentials.'
+ except boto.roboto.awsqueryservice.NoCredentialsError as err:
+ print('Unable to find credentials.')
sys.exit(1)
- except Exception, e:
- print e
+ except Exception as e:
+ print(e)
sys.exit(1)
def _generic_cli_formatter(self, fmt, data, label=''):
diff --git a/boto/roboto/awsqueryservice.py b/boto/roboto/awsqueryservice.py
index 0ca78c2d..0b3db140 100644
--- a/boto/roboto/awsqueryservice.py
+++ b/boto/roboto/awsqueryservice.py
@@ -1,10 +1,11 @@
import os
-import urlparse
import boto
import boto.connection
import boto.jsonresponse
import boto.exception
-import awsqueryrequest
+import boto.compat as compat
+from . import awsqueryrequest
+
class NoCredentialsError(boto.exception.BotoClientError):
@@ -77,7 +78,7 @@ class AWSQueryService(boto.connection.AWSQueryConnection):
value = value.strip()
self.args['aws_secret_access_key'] = value
else:
- print 'Warning: unable to read AWS_CREDENTIAL_FILE'
+ print('Warning: unable to read AWS_CREDENTIAL_FILE')
def check_for_env_url(self):
"""
@@ -95,7 +96,7 @@ class AWSQueryService(boto.connection.AWSQueryConnection):
if not url and self.EnvURL in os.environ:
url = os.environ[self.EnvURL]
if url:
- rslt = urlparse.urlparse(url)
+ rslt = compat.urlparse.urlparse(url)
if 'is_secure' not in self.args:
if rslt.scheme == 'https':
self.args['is_secure'] = True
diff --git a/boto/route53/__init__.py b/boto/route53/__init__.py
index d404bc73..7f753e19 100644
--- a/boto/route53/__init__.py
+++ b/boto/route53/__init__.py
@@ -23,4 +23,4 @@
# this is here for backward compatibility
# originally, the Route53Connection class was defined here
-from connection import Route53Connection
+from .connection import Route53Connection
diff --git a/boto/route53/connection.py b/boto/route53/connection.py
index 0ae63c14..b0bb0775 100644
--- a/boto/route53/connection.py
+++ b/boto/route53/connection.py
@@ -24,14 +24,14 @@
import xml.sax
import time
import uuid
-import urllib
import boto
from boto.connection import AWSAuthConnection
from boto import handler
from boto.resultset import ResultSet
import boto.jsonresponse
-import exception
-import hostedzone
+import boto.compat as compat
+from . import exception
+from . import hostedzone
HZXML = """<?xml version="1.0" encoding="UTF-8"?>
<CreateHostedZoneRequest xmlns="%(xmlns)s">
@@ -67,9 +67,9 @@ class Route53Connection(AWSAuthConnection):
def make_request(self, action, path, headers=None, data='', params=None):
if params:
pairs = []
- for key, val in params.iteritems():
+ for key, val in params.items():
if val is None: continue
- pairs.append(key + '=' + urllib.quote(str(val)))
+ pairs.append(key + '=' + compat.quote(str(val)))
path += '?' + '&'.join(pairs)
return AWSAuthConnection.make_request(self, action, path, headers, data)
@@ -101,7 +101,7 @@ class Route53Connection(AWSAuthConnection):
h.parse(body)
if zone_list:
e['ListHostedZonesResponse']['HostedZones'].extend(zone_list)
- while e['ListHostedZonesResponse'].has_key('NextMarker'):
+ while 'NextMarker' in e['ListHostedZonesResponse']:
next_marker = e['ListHostedZonesResponse']['NextMarker']
zone_list = e['ListHostedZonesResponse']['HostedZones']
e = self.get_all_hosted_zones(next_marker, zone_list)
diff --git a/boto/s3/bucket.py b/boto/s3/bucket.py
index 44acb4b8..685b0b3c 100644
--- a/boto/s3/bucket.py
+++ b/boto/s3/bucket.py
@@ -38,11 +38,11 @@ from boto.s3.bucketlistresultset import VersionedBucketListResultSet
from boto.s3.bucketlistresultset import MultiPartUploadListResultSet
from boto.s3.lifecycle import Lifecycle
from boto.s3.bucketlogging import BucketLogging
+import boto.compat as compat
import boto.jsonresponse
import boto.utils
import xml.sax
import xml.sax.saxutils
-import StringIO
import urllib
import re
import base64
@@ -291,7 +291,7 @@ class Bucket(object):
k = k.replace('_', '-')
if k == 'maxkeys':
k = 'max-keys'
- if isinstance(v, unicode):
+ if isinstance(v, compat.text_type):
v = v.encode('utf-8')
if v is not None and v != '':
l.append('%s=%s' % (urllib.quote(k), urllib.quote(str(v))))
@@ -484,10 +484,10 @@ class Bucket(object):
query_args = 'delete'
def delete_keys2(hdrs):
hdrs = hdrs or {}
- data = u"""<?xml version="1.0" encoding="UTF-8"?>"""
- data += u"<Delete>"
+ data = """<?xml version="1.0" encoding="UTF-8"?>"""
+ data += "<Delete>"
if quiet:
- data += u"<Quiet>true</Quiet>"
+ data += "<Quiet>true</Quiet>"
count = 0
while count < 1000:
try:
@@ -515,15 +515,15 @@ class Bucket(object):
continue
count += 1
#key_name = key_name.decode('utf-8')
- data += u"<Object><Key>%s</Key>" % xml.sax.saxutils.escape(key_name)
+ data += "<Object><Key>%s</Key>" % xml.sax.saxutils.escape(key_name)
if version_id:
- data += u"<VersionId>%s</VersionId>" % version_id
- data += u"</Object>"
- data += u"</Delete>"
+ data += "<VersionId>%s</VersionId>" % version_id
+ data += "</Object>"
+ data += "</Delete>"
if count <= 0:
return False # no more
data = data.encode('utf-8')
- fp = StringIO.StringIO(data)
+ fp = compat.StringIO(data)
md5 = boto.utils.compute_md5(fp)
hdrs['Content-MD5'] = md5[1]
hdrs['Content-Type'] = 'text/xml'
@@ -1134,7 +1134,7 @@ class Bucket(object):
:param lifecycle_config: The lifecycle configuration you want
to configure for this bucket.
"""
- fp = StringIO.StringIO(lifecycle_config.to_xml())
+ fp = compat.StringIO(lifecycle_config.to_xml())
md5 = boto.utils.compute_md5(fp)
if headers is None:
headers = {}
diff --git a/boto/s3/bucketlogging.py b/boto/s3/bucketlogging.py
index 9e3c050d..33a39228 100644
--- a/boto/s3/bucketlogging.py
+++ b/boto/s3/bucketlogging.py
@@ -20,7 +20,7 @@
# IN THE SOFTWARE.
import xml.sax.saxutils
-from acl import Grant
+from .acl import Grant
class BucketLogging:
@@ -66,18 +66,18 @@ class BucketLogging:
def to_xml(self):
# caller is responsible to encode to utf-8
- s = u'<?xml version="1.0" encoding="UTF-8"?>'
- s += u'<BucketLoggingStatus xmlns="http://doc.s3.amazonaws.com/2006-03-01">'
+ s = '<?xml version="1.0" encoding="UTF-8"?>'
+ s += '<BucketLoggingStatus xmlns="http://doc.s3.amazonaws.com/2006-03-01">'
if self.target is not None:
- s += u'<LoggingEnabled>'
- s += u'<TargetBucket>%s</TargetBucket>' % self.target
+ s += '<LoggingEnabled>'
+ s += '<TargetBucket>%s</TargetBucket>' % self.target
prefix = self.prefix or ''
- s += u'<TargetPrefix>%s</TargetPrefix>' % xml.sax.saxutils.escape(prefix)
+ s += '<TargetPrefix>%s</TargetPrefix>' % xml.sax.saxutils.escape(prefix)
if self.grants:
s += '<TargetGrants>'
for grant in self.grants:
s += grant.to_xml()
s += '</TargetGrants>'
- s += u'</LoggingEnabled>'
- s += u'</BucketLoggingStatus>'
+ s += '</LoggingEnabled>'
+ s += '</BucketLoggingStatus>'
return s
diff --git a/boto/s3/connection.py b/boto/s3/connection.py
index afdc280f..dfe053b0 100644
--- a/boto/s3/connection.py
+++ b/boto/s3/connection.py
@@ -22,7 +22,7 @@
# IN THE SOFTWARE.
import xml.sax
-import urllib, base64
+import base64
import time
import boto.utils
from boto.connection import AWSAuthConnection
@@ -31,6 +31,7 @@ from boto.s3.bucket import Bucket
from boto.s3.key import Key
from boto.resultset import ResultSet
from boto.exception import BotoClientError
+import boto.compat as compat
def check_lowercase_bucketname(n):
"""
@@ -85,11 +86,11 @@ class _CallingFormat(object):
path = ''
if bucket != '':
path = '/' + bucket
- return path + '/%s' % urllib.quote(key)
+ return path + '/%s' % compat.quote(key)
def build_path_base(self, bucket, key=''):
key = boto.utils.get_utf8_value(key)
- return '/%s' % urllib.quote(key)
+ return '/%s' % compat.quote(key)
class SubdomainCallingFormat(_CallingFormat):
@@ -113,7 +114,7 @@ class OrdinaryCallingFormat(_CallingFormat):
path_base = '/'
if bucket:
path_base += "%s/" % bucket
- return path_base + urllib.quote(key)
+ return path_base + compat.quote(key)
class ProtocolIndependentOrdinaryCallingFormat(OrdinaryCallingFormat):
@@ -315,14 +316,14 @@ class S3Connection(AWSAuthConnection):
c_string = boto.utils.canonical_string(method, auth_path, headers,
expires, self.provider)
b64_hmac = self._auth_handler.sign_string(c_string)
- encoded_canonical = urllib.quote_plus(b64_hmac)
+ encoded_canonical = compat.quote_plus(b64_hmac)
self.calling_format.build_path_base(bucket, key)
if query_auth:
query_part = '?' + self.QueryString % (encoded_canonical, expires,
self.aws_access_key_id)
# The response headers must also be GET parameters in the URL.
headers.update(response_headers)
- hdrs = ['%s=%s'%(n, urllib.quote(v)) for n, v in headers.items()]
+ hdrs = ['%s=%s'%(n, compat.quote(v)) for n, v in headers.items()]
q_str = '&'.join(hdrs)
if q_str:
query_part += '&' + q_str
diff --git a/boto/s3/key.py b/boto/s3/key.py
index e8da98f3..3b247d77 100644
--- a/boto/s3/key.py
+++ b/boto/s3/key.py
@@ -23,8 +23,7 @@
import mimetypes
import os
import re
-import rfc822
-import StringIO
+import email
import base64
import math
import urllib
@@ -33,11 +32,7 @@ from boto.exception import BotoClientError
from boto.provider import Provider
from boto.s3.user import User
from boto import UserAgent
-from boto.utils import compute_md5
-try:
- from hashlib import md5
-except ImportError:
- from md5 import md5
+import boto.compat as compat
class Key(object):
@@ -107,7 +102,7 @@ class Key(object):
"""
import binascii
digest = binascii.unhexlify(md5_hexdigest)
- base64md5 = base64.encodestring(digest)
+ base64md5 = base64.b64encode(digest)
if base64md5[-1] == '\n':
base64md5 = base64md5[0:-1]
return (md5_hexdigest, base64md5)
@@ -523,7 +518,7 @@ class Key(object):
http_conn.endheaders()
if chunked_transfer and not self.base64md5:
# MD5 for the stream has to be calculated on the fly.
- m = md5()
+ m = compat.md5()
else:
m = None
@@ -628,9 +623,9 @@ class Key(object):
headers['User-Agent'] = UserAgent
if self.storage_class != 'STANDARD':
headers[provider.storage_class_header] = self.storage_class
- if headers.has_key('Content-Encoding'):
+ if 'Content-Encoding' in headers:
self.content_encoding = headers['Content-Encoding']
- if headers.has_key('Content-Type'):
+ if 'Content-Type' in headers:
# Some use cases need to suppress sending of the Content-Type
# header and depend on the receiving server to set the content
# type. This can be achieved by setting headers['Content-Type']
@@ -682,7 +677,7 @@ class Key(object):
as the first element and the base64 encoded version of the
plain digest as the second element.
"""
- tup = compute_md5(fp, size=size)
+ tup = boto.utils.compute_md5(fp, size=size)
# Returned values are MD5 hash, base64 encoded MD5 hash, and data size.
# The internal implementation of compute_md5() needs to return the
# data size but we don't want to return that value to the external
@@ -1060,9 +1055,9 @@ class Key(object):
will be stored in an encrypted form while
at rest in S3.
"""
- if isinstance(s, unicode):
+ if isinstance(s, compat.text_type):
s = s.encode("utf-8")
- fp = StringIO.StringIO(s)
+ fp = compat.StringIO(s)
r = self.set_contents_from_file(fp, headers, replace, cb, num_cb,
policy, md5, reduced_redundancy,
encrypt_key=encrypt_key)
@@ -1118,7 +1113,7 @@ class Key(object):
query_args.append('torrent')
m = None
else:
- m = md5()
+ m = compat.md5()
# If a version_id is passed in, use that. If not, check to see
# if the Key object has an explicit version_id and, if so, use that.
# Otherwise, don't pass a version_id query param.
@@ -1167,7 +1162,7 @@ class Key(object):
cb(data_len, cb_size)
if m:
self.md5 = m.hexdigest()
- if self.size is None and not torrent and not headers.has_key("Range"):
+ if self.size is None and not torrent and "Range" not in headers:
self.size = data_len
self.close()
self.bucket.connection.debug = save_debug
@@ -1313,8 +1308,8 @@ class Key(object):
# if last_modified date was sent from s3, try to set file's timestamp
if self.last_modified != None:
try:
- modified_tuple = rfc822.parsedate_tz(self.last_modified)
- modified_stamp = int(rfc822.mktime_tz(modified_tuple))
+ modified_tuple = email.utils.parsedate_tz(self.last_modified)
+ modified_stamp = int(email.utils.mktime_tz(modified_tuple))
os.utime(fp.name, (modified_stamp, modified_stamp))
except Exception: pass
@@ -1360,7 +1355,7 @@ class Key(object):
:rtype: string
:returns: The contents of the file as a string
"""
- fp = StringIO.StringIO()
+ fp = compat.StringIO()
self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent,
version_id=version_id,
response_headers=response_headers)
diff --git a/boto/s3/multipart.py b/boto/s3/multipart.py
index a2930333..d0fb3b54 100644
--- a/boto/s3/multipart.py
+++ b/boto/s3/multipart.py
@@ -20,8 +20,8 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-import user
-import key
+from . import user
+from . import key
from boto import handler
import xml.sax
diff --git a/boto/s3/resumable_download_handler.py b/boto/s3/resumable_download_handler.py
index ffa20957..d653cb8c 100644
--- a/boto/s3/resumable_download_handler.py
+++ b/boto/s3/resumable_download_handler.py
@@ -20,7 +20,6 @@
# IN THE SOFTWARE.
import errno
-import httplib
import os
import re
import socket
@@ -88,7 +87,7 @@ class ResumableDownloadHandler(object):
ETAG_REGEX = '([a-z0-9]{32})\n'
- RETRYABLE_EXCEPTIONS = (httplib.HTTPException, IOError, socket.error,
+ RETRYABLE_EXCEPTIONS = (compat.httplib.HTTPException, IOError, socket.error,
socket.gaierror)
def __init__(self, tracker_file_name=None, num_retries=None):
@@ -130,7 +129,7 @@ class ResumableDownloadHandler(object):
else:
print('Couldn\'t read etag in tracker file (%s). Restarting '
'download from scratch.' % self.tracker_file_name)
- except IOError, e:
+ except IOError as e:
# Ignore non-existent file (happens first time a download
# is attempted on an object), but warn user for other errors.
if e.errno != errno.ENOENT:
@@ -151,7 +150,7 @@ class ResumableDownloadHandler(object):
try:
f = open(self.tracker_file_name, 'w')
f.write('%s\n' % self.etag_value_for_current_download)
- except IOError, e:
+ except IOError as e:
raise ResumableDownloadException(
'Couldn\'t write tracker file (%s): %s.\nThis can happen'
'if you\'re using an incorrectly configured download tool\n'
@@ -189,17 +188,17 @@ class ResumableDownloadHandler(object):
key.size), ResumableTransferDisposition.ABORT)
elif cur_file_size == key.size:
if key.bucket.connection.debug >= 1:
- print 'Download complete.'
+ print('Download complete.')
return
if key.bucket.connection.debug >= 1:
- print 'Resuming download.'
+ print('Resuming download.')
headers = headers.copy()
headers['Range'] = 'bytes=%d-%d' % (cur_file_size, key.size - 1)
cb = ByteTranslatingCallbackHandler(cb, cur_file_size).call
self.download_start_point = cur_file_size
else:
if key.bucket.connection.debug >= 1:
- print 'Starting new resumable download.'
+ print('Starting new resumable download.')
self._save_tracker_info(key)
self.download_start_point = 0
# Truncate the file, in case a new resumable download is being
@@ -271,9 +270,9 @@ class ResumableDownloadHandler(object):
# non-resumable downloads, this call was removed. Checksum
# validation of file contents should be done by the caller.
if debug >= 1:
- print 'Resumable download complete.'
+ print('Resumable download complete.')
return
- except self.RETRYABLE_EXCEPTIONS, e:
+ except self.RETRYABLE_EXCEPTIONS as e:
if debug >= 1:
print('Caught exception (%s)' % e.__repr__())
if isinstance(e, IOError) and e.errno == errno.EPIPE:
@@ -283,7 +282,7 @@ class ResumableDownloadHandler(object):
# the download.
key.get_file(fp, headers, cb, num_cb, torrent, version_id,
override_num_retries=0)
- except ResumableDownloadException, e:
+ except ResumableDownloadException as e:
if (e.disposition ==
ResumableTransferDisposition.ABORT_CUR_PROCESS):
if debug >= 1:
@@ -324,7 +323,7 @@ class ResumableDownloadHandler(object):
# which we can safely ignore.
try:
key.close()
- except httplib.IncompleteRead:
+ except compat.httplib.IncompleteRead:
pass
sleep_time_secs = 2**progress_less_iterations
diff --git a/boto/sdb/__init__.py b/boto/sdb/__init__.py
index 15e763dd..a6326b3d 100644
--- a/boto/sdb/__init__.py
+++ b/boto/sdb/__init__.py
@@ -20,7 +20,7 @@
# IN THE SOFTWARE.
#
-from regioninfo import SDBRegionInfo
+from .regioninfo import SDBRegionInfo
def regions():
"""
diff --git a/boto/sdb/connection.py b/boto/sdb/connection.py
index f0431932..367e901c 100644
--- a/boto/sdb/connection.py
+++ b/boto/sdb/connection.py
@@ -49,7 +49,6 @@ class ItemThread(threading.Thread):
:ivar list items: A list of items retrieved. Starts as empty list.
"""
threading.Thread.__init__(self, name=name)
- #print 'starting %s with %d items' % (name, len(item_names))
self.domain_name = domain_name
self.conn = SDBConnection()
self.item_names = item_names
@@ -139,8 +138,7 @@ class SDBConnection(AWSQueryConnection):
def _build_name_value_list(self, params, attributes, replace=False,
label='Attribute'):
- keys = attributes.keys()
- keys.sort()
+ keys = sorted(attributes)
i = 1
for key in keys:
value = attributes[key]
@@ -233,9 +231,9 @@ class SDBConnection(AWSQueryConnection):
requests made on this specific connection instance. It is by
no means an account-wide estimate.
"""
- print 'Total Usage: %f compute seconds' % self.box_usage
+ print('Total Usage: %f compute seconds' % self.box_usage)
cost = self.box_usage * 0.14
- print 'Approximate Cost: $%f' % cost
+ print('Approximate Cost: $%f' % cost)
def get_domain(self, domain_name, validate=True):
"""
@@ -612,6 +610,6 @@ class SDBConnection(AWSQueryConnection):
try:
return self.get_list('Select', params, [('Item', self.item_cls)],
parent=domain)
- except SDBResponseError, e:
+ except SDBResponseError as e:
e.body = "Query: %s\n%s" % (query, e.body)
raise e
diff --git a/boto/sdb/db/blob.py b/boto/sdb/db/blob.py
index b50794c9..54954def 100644
--- a/boto/sdb/db/blob.py
+++ b/boto/sdb/db/blob.py
@@ -19,6 +19,8 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+import boto.compat as compat
+
class Blob(object):
"""Blob object"""
@@ -29,7 +31,7 @@ class Blob(object):
@property
def file(self):
- from StringIO import StringIO
+ from compat.StringIO import StringIO
if self._file:
f = self._file
else:
diff --git a/boto/sdb/db/key.py b/boto/sdb/db/key.py
index 42a9d8da..f630d398 100644
--- a/boto/sdb/db/key.py
+++ b/boto/sdb/db/key.py
@@ -23,7 +23,7 @@ class Key(object):
@classmethod
def from_path(cls, *args, **kwds):
- raise NotImplementedError, "Paths are not currently supported"
+ raise NotImplementedError("Paths are not currently supported")
def __init__(self, encoded=None, obj=None):
self.name = None
@@ -35,7 +35,7 @@ class Key(object):
self.kind = None
def app(self):
- raise NotImplementedError, "Applications are not currently supported"
+ raise NotImplementedError("Applications are not currently supported")
def kind(self):
return self.kind
@@ -44,7 +44,7 @@ class Key(object):
return self.id
def name(self):
- raise NotImplementedError, "Key Names are not currently supported"
+ raise NotImplementedError("Key Names are not currently supported")
def id_or_name(self):
return self.id
@@ -53,7 +53,7 @@ class Key(object):
return self.id != None
def parent(self):
- raise NotImplementedError, "Key parents are not currently supported"
+ raise NotImplementedError("Key parents are not currently supported")
def __str__(self):
return self.id_or_name()
diff --git a/boto/sdb/db/manager/__init__.py b/boto/sdb/db/manager/__init__.py
index 55b32a4d..254d43e9 100644
--- a/boto/sdb/db/manager/__init__.py
+++ b/boto/sdb/db/manager/__init__.py
@@ -72,20 +72,20 @@ def get_manager(cls):
elif hasattr(cls.__bases__[0], "_manager"):
return cls.__bases__[0]._manager
if db_type == 'SimpleDB':
- from sdbmanager import SDBManager
+ from .sdbmanager import SDBManager
return SDBManager(cls, db_name, db_user, db_passwd,
db_host, db_port, db_table, sql_dir, enable_ssl)
elif db_type == 'PostgreSQL':
- from pgmanager import PGManager
+ from .pgmanager import PGManager
if db_table:
return PGManager(cls, db_name, db_user, db_passwd,
db_host, db_port, db_table, sql_dir, enable_ssl)
else:
return None
elif db_type == 'XML':
- from xmlmanager import XMLManager
+ from .xmlmanager import XMLManager
return XMLManager(cls, db_name, db_user, db_passwd,
db_host, db_port, db_table, sql_dir, enable_ssl)
else:
- raise ValueError, 'Unknown db_type: %s' % db_type
+ raise ValueError('Unknown db_type: %s' % db_type)
diff --git a/boto/sdb/db/manager/pgmanager.py b/boto/sdb/db/manager/pgmanager.py
index 31f27caf..bfff2c1f 100644
--- a/boto/sdb/db/manager/pgmanager.py
+++ b/boto/sdb/db/manager/pgmanager.py
@@ -105,7 +105,7 @@ class PGConverter:
try:
return self.manager.get_object_from_id(value)
except:
- raise ValueError, 'Unable to convert %s to Object' % value
+ raise ValueError('Unable to convert %s to Object' % value)
class PGManager(object):
@@ -246,7 +246,7 @@ class PGManager(object):
try:
self.connection.commit()
- except psycopg2.ProgrammingError, err:
+ except psycopg2.ProgrammingError as err:
self.connection.rollback()
raise err
@@ -296,9 +296,9 @@ class PGManager(object):
row = self.cursor.fetchone()
return self._object_from_row(row, self.cursor.description)
elif self.cursor.rowcount == 0:
- raise KeyError, 'Object not found'
+ raise KeyError('Object not found')
else:
- raise LookupError, 'Multiple Objects Found'
+ raise LookupError('Multiple Objects Found')
def query(self, cls, filters, limit=None, order_by=None):
parts = []
@@ -333,7 +333,7 @@ class PGManager(object):
if prop.name == name:
v = self.decode_value(prop, rs[0])
return v
- raise AttributeError, '%s not found' % name
+ raise AttributeError('%s not found' % name)
def set_property(self, prop, obj, name, value):
pass
diff --git a/boto/sdb/db/manager/sdbmanager.py b/boto/sdb/db/manager/sdbmanager.py
index 7f20350e..cc5d78c3 100644
--- a/boto/sdb/db/manager/sdbmanager.py
+++ b/boto/sdb/db/manager/sdbmanager.py
@@ -101,7 +101,7 @@ class SDBConverter(object):
if value == None:
return None
if not isinstance(value, dict):
- raise ValueError, 'Expected a dict value, got %s' % type(value)
+ raise ValueError('Expected a dict value, got %s' % type(value))
new_value = []
for key in value:
item_type = getattr(prop, "item_type")
@@ -276,7 +276,7 @@ class SDBConverter(object):
else:
value = value.split("-")
return date(int(value[0]), int(value[1]), int(value[2]))
- except Exception, e:
+ except Exception as e:
return None
def encode_date(self, value):
@@ -356,7 +356,7 @@ class SDBConverter(object):
bucket = s3.get_bucket(match.group(1), validate=False)
try:
key = bucket.get_key(match.group(2))
- except S3ResponseError, e:
+ except S3ResponseError as e:
if e.reason != "Forbidden":
raise
return None
@@ -471,14 +471,14 @@ class SDBManager(object):
def load_object(self, obj):
if not obj._loaded:
a = self.domain.get_attributes(obj.id,consistent_read=self.consistent)
- if a.has_key('__type__'):
+ if '__type__' in a:
for prop in obj.properties(hidden=False):
- if a.has_key(prop.name):
+ if prop.name in a:
value = self.decode_value(prop, a[prop.name])
value = prop.make_value_from_datastore(value)
try:
setattr(obj, prop.name, value)
- except Exception, e:
+ except Exception as e:
boto.log.exception(e)
obj._loaded = True
@@ -486,13 +486,13 @@ class SDBManager(object):
obj = None
if not a:
a = self.domain.get_attributes(id,consistent_read=self.consistent)
- if a.has_key('__type__'):
+ if '__type__' in a:
if not cls or a['__type__'] != cls.__name__:
cls = find_class(a['__module__'], a['__type__'])
if cls:
params = {}
for prop in cls.properties(hidden=False):
- if a.has_key(prop.name):
+ if prop.name in a:
value = self.decode_value(prop, a[prop.name])
value = prop.make_value_from_datastore(value)
params[prop.name] = value
@@ -589,7 +589,7 @@ class SDBManager(object):
property = cls.find_property(name)
if name == order_by:
order_by_filtered = True
- if types.TypeType(value) == types.ListType:
+ if types.TypeType(value) == list:
filter_parts_sub = []
for val in value:
val = self.encode_value(property, val)
@@ -640,7 +640,7 @@ class SDBManager(object):
return decendents
def query_gql(self, query_string, *args, **kwds):
- raise NotImplementedError, "GQL queries not supported in SimpleDB"
+ raise NotImplementedError("GQL queries not supported in SimpleDB")
def save_object(self, obj, expected_value=None):
if not obj.id:
@@ -706,7 +706,7 @@ class SDBManager(object):
value = prop.make_value_from_datastore(value)
setattr(obj, prop.name, value)
return value
- raise AttributeError, '%s not found' % name
+ raise AttributeError('%s not found' % name)
def set_key_value(self, obj, name, value):
self.domain.put_attributes(obj.id, {name : value}, replace=True)
@@ -716,7 +716,7 @@ class SDBManager(object):
def get_key_value(self, obj, name):
a = self.domain.get_attributes(obj.id, name,consistent_read=self.consistent)
- if a.has_key(name):
+ if name in a:
return a[name]
else:
return None
diff --git a/boto/sdb/db/manager/xmlmanager.py b/boto/sdb/db/manager/xmlmanager.py
index 3608b2c2..5240cdd2 100644
--- a/boto/sdb/db/manager/xmlmanager.py
+++ b/boto/sdb/db/manager/xmlmanager.py
@@ -202,7 +202,7 @@ class XMLManager(object):
self.auth_header = None
if self.db_user:
import base64
- base64string = base64.encodestring('%s:%s' % (self.db_user, self.db_passwd))[:-1]
+ base64string = base64.encodebytes('%s:%s' % (self.db_user, self.db_passwd))[:-1]
authheader = "Basic %s" % base64string
self.auth_header = authheader
@@ -373,7 +373,7 @@ class XMLManager(object):
for property in properties:
if property.name == name:
found = True
- if types.TypeType(value) == types.ListType:
+ if types.TypeType(value) == list:
filter_parts = []
for val in value:
val = self.encode_value(property, val)
@@ -395,7 +395,7 @@ class XMLManager(object):
return ' intersection '.join(parts)
def query_gql(self, query_string, *args, **kwds):
- raise NotImplementedError, "GQL queries not supported in XML"
+ raise NotImplementedError("GQL queries not supported in XML")
def save_list(self, doc, items, prop_node):
items_node = doc.createElement('items')
@@ -495,7 +495,7 @@ class XMLManager(object):
def get_key_value(self, obj, name):
a = self.domain.get_attributes(obj.id, name)
- if a.has_key(name):
+ if name in a:
return a[name]
else:
return None
diff --git a/boto/sdb/db/model.py b/boto/sdb/db/model.py
index eab82763..54ad9316 100644
--- a/boto/sdb/db/model.py
+++ b/boto/sdb/db/model.py
@@ -86,7 +86,7 @@ class Model(object):
@classmethod
def get_by_key_name(cls, key_names, parent=None):
- raise NotImplementedError, "Key Names are not currently supported"
+ raise NotImplementedError("Key Names are not currently supported")
@classmethod
def find(cls, limit=None, next_token=None, **params):
@@ -101,7 +101,7 @@ class Model(object):
@classmethod
def get_or_insert(key_name, **kw):
- raise NotImplementedError, "get_or_insert not currently supported"
+ raise NotImplementedError("get_or_insert not currently supported")
@classmethod
def properties(cls, hidden=True):
@@ -154,7 +154,7 @@ class Model(object):
setattr(self, prop.name, prop.default_value())
except ValueError:
pass
- if kw.has_key('manager'):
+ if 'manager' in kw:
self._manager = kw['manager']
self.id = id
for key in kw:
@@ -163,7 +163,7 @@ class Model(object):
# so if it fails we just revert to it's default value
try:
setattr(self, key, kw[key])
- except Exception, e:
+ except Exception as e:
boto.log.exception(e)
def __repr__(self):
diff --git a/boto/sdb/db/property.py b/boto/sdb/db/property.py
index 1387be9d..37abf133 100644
--- a/boto/sdb/db/property.py
+++ b/boto/sdb/db/property.py
@@ -20,7 +20,7 @@
# IN THE SOFTWARE.
import datetime
-from key import Key
+from .key import Key
from boto.utils import Password
from boto.sdb.db.query import Query
import re
@@ -78,16 +78,16 @@ class Property(object):
if isinstance(value, basestring) or value == self.default_value():
return
if not isinstance(value, self.data_type):
- raise TypeError, 'Validation Error, %s.%s expecting %s, got %s' % (self.model_class.__name__, self.name, self.data_type, type(value))
+ raise TypeError('Validation Error, %s.%s expecting %s, got %s' % (self.model_class.__name__, self.name, self.data_type, type(value)))
def default_value(self):
return self.default
def validate(self, value):
if self.required and value==None:
- raise ValueError, '%s is a required property' % self.name
+ raise ValueError('%s is a required property' % self.name)
if self.choices and value and not value in self.choices:
- raise ValueError, '%s not a valid choice for %s.%s' % (value, self.model_class.__name__, self.name)
+ raise ValueError('%s not a valid choice for %s.%s' % (value, self.model_class.__name__, self.name))
if self.validator:
self.validator(value)
else:
@@ -113,9 +113,9 @@ def validate_string(value):
return
elif isinstance(value, str) or isinstance(value, unicode):
if len(value) > 1024:
- raise ValueError, 'Length of value greater than maxlength'
+ raise ValueError('Length of value greater than maxlength')
else:
- raise TypeError, 'Expecting String, got %s' % type(value)
+ raise TypeError('Expecting String, got %s' % type(value))
class StringProperty(Property):
@@ -137,9 +137,9 @@ class TextProperty(Property):
def validate(self, value):
value = super(TextProperty, self).validate(value)
if not isinstance(value, str) and not isinstance(value, unicode):
- raise TypeError, 'Expecting Text, got %s' % type(value)
+ raise TypeError('Expecting Text, got %s' % type(value))
if self.max_length and len(value) > self.max_length:
- raise ValueError, 'Length of value greater than maxlength %s' % self.max_length
+ raise ValueError('Length of value greater than maxlength %s' % self.max_length)
class PasswordProperty(StringProperty):
"""
@@ -227,9 +227,9 @@ class PasswordProperty(StringProperty):
value = Property.validate(self, value)
if isinstance(value, self.data_type):
if len(value) > 1024:
- raise ValueError, 'Length of value greater than maxlength'
+ raise ValueError('Length of value greater than maxlength')
else:
- raise TypeError, 'Expecting %s, got %s' % (type(self.data_type), type(value))
+ raise TypeError('Expecting %s, got %s' % (type(self.data_type), type(value)))
class BlobProperty(Property):
data_type = Blob
@@ -266,7 +266,7 @@ class S3KeyProperty(Property):
match = re.match(self.validate_regex, value)
if match:
return
- raise TypeError, 'Validation Error, expecting %s, got %s' % (self.data_type, type(value))
+ raise TypeError('Validation Error, expecting %s, got %s' % (self.data_type, type(value)))
def __get__(self, obj, objtype):
value = Property.__get__(self, obj, objtype)
@@ -307,9 +307,9 @@ class IntegerProperty(Property):
value = int(value)
value = Property.validate(self, value)
if value > self.max:
- raise ValueError, 'Maximum value is %d' % self.max
+ raise ValueError('Maximum value is %d' % self.max)
if value < self.min:
- raise ValueError, 'Minimum value is %d' % self.min
+ raise ValueError('Minimum value is %d' % self.min)
return value
def empty(self, value):
@@ -337,9 +337,9 @@ class LongProperty(Property):
min = -9223372036854775808
max = 9223372036854775807
if value > max:
- raise ValueError, 'Maximum value is %d' % max
+ raise ValueError('Maximum value is %d' % max)
if value < min:
- raise ValueError, 'Minimum value is %d' % min
+ raise ValueError('Minimum value is %d' % min)
return value
def empty(self, value):
@@ -429,7 +429,7 @@ class DateProperty(Property):
if value == None:
return
if not isinstance(value, self.data_type):
- raise TypeError, 'Validation Error, expecting %s, got %s' % (self.data_type, type(value))
+ raise TypeError('Validation Error, expecting %s, got %s' % (self.data_type, type(value)))
def get_value_for_datastore(self, model_instance):
if self.auto_now:
@@ -456,7 +456,7 @@ class TimeProperty(Property):
if value is None:
return
if not isinstance(value, self.data_type):
- raise TypeError, 'Validation Error, expecting %s, got %s' % (self.data_type, type(value))
+ raise TypeError('Validation Error, expecting %s, got %s' % (self.data_type, type(value)))
class ReferenceProperty(Property):
@@ -487,7 +487,7 @@ class ReferenceProperty(Property):
"""Don't allow this object to be associated to itself
This causes bad things to happen"""
if value != None and (obj.id == value or (hasattr(value, "id") and obj.id == value.id)):
- raise ValueError, "Can not associate an object with itself!"
+ raise ValueError("Can not associate an object with itself!")
return super(ReferenceProperty, self).__set__(obj,value)
def __property_config__(self, model_class, property_name):
@@ -495,7 +495,7 @@ class ReferenceProperty(Property):
if self.collection_name is None:
self.collection_name = '%s_%s_set' % (model_class.__name__.lower(), self.name)
if hasattr(self.reference_class, self.collection_name):
- raise ValueError, 'duplicate property: %s' % self.collection_name
+ raise ValueError('duplicate property: %s' % self.collection_name)
setattr(self.reference_class, self.collection_name,
_ReverseReferenceProperty(model_class, property_name, self.collection_name))
@@ -511,15 +511,15 @@ class ReferenceProperty(Property):
cls_lineage = self.reference_class.get_lineage()
if obj_lineage.startswith(cls_lineage):
return
- raise TypeError, '%s not instance of %s' % (obj_lineage, cls_lineage)
+ raise TypeError('%s not instance of %s' % (obj_lineage, cls_lineage))
except:
- raise ValueError, '%s is not a Model' % value
+ raise ValueError('%s is not a Model' % value)
def validate(self, value):
if self.validator:
self.validator(value)
if self.required and value==None:
- raise ValueError, '%s is a required property' % self.name
+ raise ValueError('%s is a required property' % self.name)
if value == self.default_value():
return
if not isinstance(value, str) and not isinstance(value, unicode):
@@ -552,7 +552,7 @@ class _ReverseReferenceProperty(Property):
def __set__(self, model_instance, value):
"""Not possible to set a new collection."""
- raise ValueError, 'Virtual property is read-only'
+ raise ValueError('Virtual property is read-only')
class CalculatedProperty(Property):
@@ -619,7 +619,7 @@ class ListProperty(Property):
for item in value:
if not isinstance(item, item_type):
if item_type == (int, long):
- raise ValueError, 'Items in the %s list must all be integers.' % self.name
+ raise ValueError('Items in the %s list must all be integers.' % self.name)
else:
raise ValueError('Items in the %s list must all be %s instances' %
(self.name, self.item_type.__name__))
@@ -661,7 +661,7 @@ class MapProperty(Property):
value = super(MapProperty, self).validate(value)
if value is not None:
if not isinstance(value, dict):
- raise ValueError, 'Value must of type dict'
+ raise ValueError('Value must of type dict')
if self.item_type in (int, long):
item_type = (int, long)
@@ -673,7 +673,7 @@ class MapProperty(Property):
for key in value:
if not isinstance(value[key], item_type):
if item_type == (int, long):
- raise ValueError, 'Values in the %s Map must all be integers.' % self.name
+ raise ValueError('Values in the %s Map must all be integers.' % self.name)
else:
raise ValueError('Values in the %s Map must all be %s instances' %
(self.name, self.item_type.__name__))
diff --git a/boto/sdb/db/sequence.py b/boto/sdb/db/sequence.py
index a01330e8..a0fbdc6b 100644
--- a/boto/sdb/db/sequence.py
+++ b/boto/sdb/db/sequence.py
@@ -169,9 +169,9 @@ class Sequence(object):
try:
self.db.put_attributes(self.id, new_val, expected_value=expected_value)
self.timestamp = new_val['timestamp']
- except SDBResponseError, e:
+ except SDBResponseError as e:
if e.status == 409:
- raise ValueError, "Sequence out of sync"
+ raise ValueError("Sequence out of sync")
else:
raise
@@ -180,11 +180,11 @@ class Sequence(object):
"""Get the value"""
val = self.db.get_attributes(self.id, consistent_read=True)
if val:
- if val.has_key('timestamp'):
+ if 'timestamp' in val:
self.timestamp = val['timestamp']
- if val.has_key('current_value'):
+ if 'current_value' in val:
self._value = self.item_type(val['current_value'])
- if val.has_key("last_value") and val['last_value'] != None:
+ if val.get('last_value') is not None:
self.last_value = self.item_type(val['last_value'])
return self._value
@@ -208,7 +208,7 @@ class Sequence(object):
self.domain_name = boto.config.get("DB", "sequence_db", boto.config.get("DB", "db_name", "default"))
try:
self._db = sdb.get_domain(self.domain_name)
- except SDBResponseError, e:
+ except SDBResponseError as e:
if e.status == 400:
self._db = sdb.create_domain(self.domain_name)
else:
diff --git a/boto/sdb/domain.py b/boto/sdb/domain.py
index f348c8aa..c8764ff9 100644
--- a/boto/sdb/domain.py
+++ b/boto/sdb/domain.py
@@ -23,6 +23,8 @@
Represents an SDB Domain
"""
from boto.sdb.queryresultset import SelectResultSet
+import boto.compat as compat
+
class Domain:
@@ -229,40 +231,40 @@ class Domain:
def delete_item(self, item):
self.delete_attributes(item.name)
- def to_xml(self, f=None):
- """Get this domain as an XML DOM Document
- :param f: Optional File to dump directly to
- :type f: File or Stream
-
- :return: File object where the XML has been dumped to
- :rtype: file
- """
- if not f:
- from tempfile import TemporaryFile
- f = TemporaryFile()
- print >> f, '<?xml version="1.0" encoding="UTF-8"?>'
- print >> f, '<Domain id="%s">' % self.name
- for item in self:
- print >> f, '\t<Item id="%s">' % item.name
- for k in item:
- print >> f, '\t\t<attribute id="%s">' % k
- values = item[k]
- if not isinstance(values, list):
- values = [values]
- for value in values:
- print >> f, '\t\t\t<value><![CDATA[',
- if isinstance(value, unicode):
- value = value.encode('utf-8', 'replace')
- else:
- value = unicode(value, errors='replace').encode('utf-8', 'replace')
- f.write(value)
- print >> f, ']]></value>'
- print >> f, '\t\t</attribute>'
- print >> f, '\t</Item>'
- print >> f, '</Domain>'
- f.flush()
- f.seek(0)
- return f
+ # def to_xml(self, f=None):
+ # """Get this domain as an XML DOM Document
+ # :param f: Optional File to dump directly to
+ # :type f: File or Stream
+
+ # :return: File object where the XML has been dumped to
+ # :rtype: file
+ # """
+ # if not f:
+ # from tempfile import TemporaryFile
+ # f = TemporaryFile()
+ # print >> f, '<?xml version="1.0" encoding="UTF-8"?>'
+ # print >> f, '<Domain id="%s">' % self.name
+ # for item in self:
+ # print >> f, '\t<Item id="%s">' % item.name
+ # for k in item:
+ # print >> f, '\t\t<attribute id="%s">' % k
+ # values = item[k]
+ # if not isinstance(values, list):
+ # values = [values]
+ # for value in values:
+ # print >> f, '\t\t\t<value><![CDATA[',
+ # if isinstance(value, compat.text_types):
+ # value = value.encode('utf-8', 'replace')
+ # else:
+ # value = unicode(value, errors='replace').encode('utf-8', 'replace')
+ # f.write(value)
+ # print >> f, ']]></value>'
+ # print >> f, '\t\t</attribute>'
+ # print >> f, '\t</Item>'
+ # print >> f, '</Domain>'
+ # f.flush()
+ # f.seek(0)
+ # return f
def from_xml(self, doc):
@@ -343,7 +345,7 @@ class DomainDumpParser(ContentHandler):
if self.value and self.attribute:
value = self.value.strip()
attr_name = self.attribute.strip()
- if self.attrs.has_key(attr_name):
+ if attr_name in self.attrs:
self.attrs[attr_name].append(value)
else:
self.attrs[attr_name] = [value]
@@ -370,8 +372,8 @@ class UploaderThread(Thread):
try:
self.db.batch_put_attributes(self.items)
except:
- print "Exception using batch put, trying regular put instead"
+ print("Exception using batch put, trying regular put instead")
for item_name in self.items:
self.db.put_attributes(item_name, self.items[item_name])
- print ".",
+ print(".",)
sys.stdout.flush()
diff --git a/boto/sdb/item.py b/boto/sdb/item.py
index 86bc70ca..999c7f0b 100644
--- a/boto/sdb/item.py
+++ b/boto/sdb/item.py
@@ -75,7 +75,7 @@ class Item(dict):
else:
self.name = self.decode_value(value)
elif name == 'Value':
- if self.has_key(self.last_key):
+ if self.last_key in self:
if not isinstance(self[self.last_key], list):
self[self.last_key] = [self[self.last_key]]
value = self.decode_value(value)
diff --git a/boto/services/result.py b/boto/services/result.py
index 32a6d6a6..98c67dcf 100644
--- a/boto/services/result.py
+++ b/boto/services/result.py
@@ -57,8 +57,7 @@ class ResultProcessor:
self.latest_time = end_time
def log_message(self, msg, path):
- keys = msg.keys()
- keys.sort()
+ keys = sorted(msg)
if not self.log_fp:
self.log_fp = open(os.path.join(path, self.LogFileName), 'a')
line = ','.join(keys)
@@ -76,7 +75,7 @@ class ResultProcessor:
self.log_message(record, path)
self.calculate_stats(record)
outputs = record['OutputKey'].split(',')
- if record.has_key('OutputBucket'):
+ if 'OutputBucket' in record:
bucket = boto.lookup('s3', record['OutputBucket'])
else:
bucket = boto.lookup('s3', record['Bucket'])
@@ -92,7 +91,7 @@ class ResultProcessor:
def get_results_from_queue(self, path, get_file=True, delete_msg=True):
m = self.queue.read()
while m:
- if m.has_key('Batch') and m['Batch'] == self.batch:
+ if 'Batch' in m and m['Batch'] == self.batch:
self.process_record(m, path, get_file)
if delete_msg:
self.queue.delete_message(m)
diff --git a/boto/services/service.py b/boto/services/service.py
index 8ee1a8be..e0e987ce 100644
--- a/boto/services/service.py
+++ b/boto/services/service.py
@@ -92,7 +92,7 @@ class Service(ScriptBase):
def save_results(self, results, input_message, output_message):
output_keys = []
for file, type in results:
- if input_message.has_key('OutputBucket'):
+ if 'OutputBucket' in input_message:
output_bucket = input_message['OutputBucket']
else:
output_bucket = input_message['Bucket']
@@ -105,7 +105,7 @@ class Service(ScriptBase):
def write_message(self, message):
message['Service-Write'] = get_ts()
message['Server'] = self.name
- if os.environ.has_key('HOSTNAME'):
+ if 'HOSTNAME' in os.environ:
message['Host'] = os.environ['HOSTNAME']
else:
message['Host'] = 'unknown'
diff --git a/boto/ses/__init__.py b/boto/ses/__init__.py
index f893423d..01cf8dec 100644
--- a/boto/ses/__init__.py
+++ b/boto/ses/__init__.py
@@ -20,7 +20,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-from connection import SESConnection
+from .connection import SESConnection
from boto.regioninfo import RegionInfo
def regions():
diff --git a/boto/ses/connection.py b/boto/ses/connection.py
index c9ae41d4..ba58a62f 100644
--- a/boto/ses/connection.py
+++ b/boto/ses/connection.py
@@ -29,6 +29,7 @@ import boto.jsonresponse
import urllib
import base64
from boto.ses import exceptions as ses_exceptions
+import boto.compat as compat
class SESConnection(AWSAuthConnection):
@@ -88,7 +89,7 @@ class SESConnection(AWSAuthConnection):
params['Action'] = action
for k, v in params.items():
- if isinstance(v, unicode): # UTF-8 encode only if it's Unicode
+ if isinstance(v, compat.text_types): # UTF-8 encode only if it's Unicode
params[k] = v.encode('utf-8')
response = super(SESConnection, self).make_request(
diff --git a/boto/sns/__init__.py b/boto/sns/__init__.py
index 8d2682f7..1ae5d197 100644
--- a/boto/sns/__init__.py
+++ b/boto/sns/__init__.py
@@ -22,7 +22,7 @@
# this is here for backward compatibility
# originally, the SNSConnection class was defined here
-from connection import SNSConnection
+from .connection import SNSConnection
from boto.regioninfo import RegionInfo
def regions():
diff --git a/boto/sns/connection.py b/boto/sns/connection.py
index 6ce4ff19..0f24bc8e 100644
--- a/boto/sns/connection.py
+++ b/boto/sns/connection.py
@@ -23,10 +23,8 @@ from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
import boto
import uuid
-try:
- import simplejson as json
-except ImportError:
- import json
+import boto.compat as compat
+
class SNSConnection(AWSQueryConnection):
@@ -68,7 +66,7 @@ class SNSConnection(AWSQueryConnection):
response = self.make_request('ListTopics', params, '/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
@@ -87,7 +85,7 @@ class SNSConnection(AWSQueryConnection):
response = self.make_request('GetTopicAttributes', params, '/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
@@ -116,7 +114,7 @@ class SNSConnection(AWSQueryConnection):
response = self.make_request('SetTopicAttributes', params, '/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
@@ -150,7 +148,7 @@ class SNSConnection(AWSQueryConnection):
response = self.make_request('AddPermission', params, '/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
@@ -174,7 +172,7 @@ class SNSConnection(AWSQueryConnection):
response = self.make_request('RemovePermission', params, '/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
@@ -193,7 +191,7 @@ class SNSConnection(AWSQueryConnection):
response = self.make_request('CreateTopic', params, '/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
@@ -212,7 +210,7 @@ class SNSConnection(AWSQueryConnection):
response = self.make_request('DeleteTopic', params, '/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
@@ -245,7 +243,7 @@ class SNSConnection(AWSQueryConnection):
response = self.make_request('Publish', params, '/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
@@ -280,7 +278,7 @@ class SNSConnection(AWSQueryConnection):
response = self.make_request('Subscribe', params, '/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
@@ -325,7 +323,7 @@ class SNSConnection(AWSQueryConnection):
'Sid' : str(uuid.uuid4()),
'Condition' : {'StringLike' : {'aws:SourceArn' : topic}}}
policy['Statement'].append(statement)
- queue.set_attribute('Policy', json.dumps(policy))
+ queue.set_attribute('Policy', compat.json.dumps(policy))
return resp
def confirm_subscription(self, topic, token,
@@ -355,7 +353,7 @@ class SNSConnection(AWSQueryConnection):
response = self.make_request('ConfirmSubscription', params, '/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
@@ -375,7 +373,7 @@ class SNSConnection(AWSQueryConnection):
response = self.make_request('Unsubscribe', params, '/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
@@ -396,7 +394,7 @@ class SNSConnection(AWSQueryConnection):
response = self.make_request('ListSubscriptions', params, '/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
@@ -423,7 +421,7 @@ class SNSConnection(AWSQueryConnection):
'/', 'GET')
body = response.read()
if response.status == 200:
- return json.loads(body)
+ return compat.json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
diff --git a/boto/sqs/__init__.py b/boto/sqs/__init__.py
index 744dd401..170ca1ea 100644
--- a/boto/sqs/__init__.py
+++ b/boto/sqs/__init__.py
@@ -20,7 +20,7 @@
# IN THE SOFTWARE.
#
-from regioninfo import SQSRegionInfo
+from .regioninfo import SQSRegionInfo
def regions():
"""
diff --git a/boto/sqs/jsonmessage.py b/boto/sqs/jsonmessage.py
index fb0a4c30..c517874a 100644
--- a/boto/sqs/jsonmessage.py
+++ b/boto/sqs/jsonmessage.py
@@ -22,10 +22,7 @@
from boto.sqs.message import MHMessage
from boto.exception import SQSDecodeError
import base64
-try:
- import simplejson as json
-except ImportError:
- import json
+import boto.compat as compat
class JSONMessage(MHMessage):
"""
@@ -35,11 +32,11 @@ class JSONMessage(MHMessage):
def decode(self, value):
try:
value = base64.b64decode(value)
- value = json.loads(value)
+ value = compat.json.loads(value)
except:
raise SQSDecodeError('Unable to decode message', self)
return value
def encode(self, value):
- value = json.dumps(value)
+ value = compat.json.dumps(value)
return base64.b64encode(value)
diff --git a/boto/sqs/message.py b/boto/sqs/message.py
index 8fabd478..2b6b3b86 100644
--- a/boto/sqs/message.py
+++ b/boto/sqs/message.py
@@ -64,9 +64,9 @@ in the format in which it would be stored in SQS.
"""
import base64
-import StringIO
from boto.sqs.attributes import Attributes
from boto.exception import SQSDecodeError
+import boto.compat as compat
class RawMessage:
"""
@@ -150,6 +150,8 @@ class Message(RawMessage):
"""
def encode(self, value):
+ if isinstance(value, compat.text_type):
+ value = value.encode('utf-8')
return base64.b64encode(value)
def decode(self, value):
@@ -179,7 +181,7 @@ class MHMessage(Message):
def decode(self, value):
try:
msg = {}
- fp = StringIO.StringIO(value)
+ fp = compat.StringIO(value)
line = fp.readline()
while line:
delim = line.find(':')
@@ -198,7 +200,7 @@ class MHMessage(Message):
return s
def __getitem__(self, key):
- if self._body.has_key(key):
+ if key in self._body:
return self._body[key]
else:
raise KeyError(key)
@@ -217,7 +219,7 @@ class MHMessage(Message):
return self._body.items()
def has_key(self, key):
- return self._body.has_key(key)
+ return key in self._body
def update(self, d):
self._body.update(d)
diff --git a/boto/sqs/queue.py b/boto/sqs/queue.py
index 0ecb1f2c..eee88f31 100644
--- a/boto/sqs/queue.py
+++ b/boto/sqs/queue.py
@@ -23,8 +23,8 @@
Represents an SQS Queue
"""
-import urlparse
from boto.sqs.message import Message
+import boto.compat as compat
class Queue:
@@ -40,7 +40,7 @@ class Queue:
def _id(self):
if self.url:
- val = urlparse.urlparse(self.url)[2]
+ val = compat.urlparse.urlparse(self.url)[2]
else:
val = self.url
return val
@@ -48,7 +48,7 @@ class Queue:
def _name(self):
if self.url:
- val = urlparse.urlparse(self.url)[2].split('/')[2]
+ val = compat.urlparse.urlparse(self.url)[2].split('/')[2]
else:
val = self.url
return val
@@ -398,7 +398,7 @@ class Queue:
m = Message(self, body)
self.write(m)
n += 1
- print 'writing message %d' % n
+ print('writing message %d' % n)
body = ''
else:
body = body + l
diff --git a/boto/sts/__init__.py b/boto/sts/__init__.py
index 7ee10b42..0d02c4ac 100644
--- a/boto/sts/__init__.py
+++ b/boto/sts/__init__.py
@@ -20,7 +20,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
-from connection import STSConnection
+from .connection import STSConnection
from boto.regioninfo import RegionInfo
def regions():
diff --git a/boto/sts/connection.py b/boto/sts/connection.py
index 7b860309..c39fef81 100644
--- a/boto/sts/connection.py
+++ b/boto/sts/connection.py
@@ -22,7 +22,7 @@
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
-from credentials import Credentials, FederationToken
+from .credentials import Credentials, FederationToken
import boto
import boto.utils
import datetime
diff --git a/boto/sts/credentials.py b/boto/sts/credentials.py
index f6d5174d..d6ecb43e 100644
--- a/boto/sts/credentials.py
+++ b/boto/sts/credentials.py
@@ -23,10 +23,8 @@
import boto.utils
import os
import datetime
-try:
- import simplejson as json
-except ImportError:
- import json
+import boto.compat as compat
+
class Credentials(object):
"""
@@ -54,7 +52,7 @@ class Credentials(object):
:param json_doc: A string containing a JSON document with a
previously saved Credentials object.
"""
- d = json.loads(json_doc)
+ d = compat.json.loads(json_doc)
token = cls()
token.__dict__.update(d)
return token
@@ -114,9 +112,9 @@ class Credentials(object):
of the file will be set to readable/writable by owner only.
"""
fp = open(file_path, 'wb')
- json.dump(self.to_dict(), fp)
+ compat.json.dump(self.to_dict(), fp)
fp.close()
- os.chmod(file_path, 0600)
+ os.chmod(file_path, 0o600)
def is_expired(self, time_offset_seconds=0):
"""
diff --git a/boto/swf/layer1.py b/boto/swf/layer1.py
index d39c3d13..fc422f0d 100644
--- a/boto/swf/layer1.py
+++ b/boto/swf/layer1.py
@@ -26,12 +26,8 @@ import boto
from boto.connection import AWSAuthConnection
from boto.provider import Provider
from boto.exception import DynamoDBResponseError
-
+import boto.compat as compat
import time
-try:
- import simplejson as json
-except ImportError:
- import json
#
# To get full debug output, uncomment the following line and set the
@@ -88,7 +84,7 @@ class Layer1(AWSAuthConnection):
response_body = response.read()
boto.log.debug(response_body)
if response_body:
- return json.loads(response_body, object_hook=object_hook)
+ return compat.json.loads(response_body, object_hook=object_hook)
else:
return None
@@ -125,7 +121,7 @@ class Layer1(AWSAuthConnection):
data = {'domain': domain, 'taskList': {'name': task_list}}
if identity:
data['identity'] = identity
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('PollForActivityTask', json_input)
def respond_activity_task_completed(self, task_token, result=None):
@@ -146,7 +142,7 @@ class Layer1(AWSAuthConnection):
data = {'taskToken': task_token}
if result:
data['result'] = result
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('RespondActivityTaskCompleted', json_input)
def respond_activity_task_failed(self, task_token,
@@ -172,7 +168,7 @@ class Layer1(AWSAuthConnection):
data['details'] = details
if reason:
data['reason'] = reason
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('RespondActivityTaskFailed', json_input)
def respond_activity_task_canceled(self, task_token, details=None):
@@ -193,7 +189,7 @@ class Layer1(AWSAuthConnection):
data = {'taskToken': task_token}
if details:
data['details'] = details
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('RespondActivityTaskCanceled', json_input)
def record_activity_task_heartbeat(self, task_token, details=None):
@@ -220,7 +216,7 @@ class Layer1(AWSAuthConnection):
data = {'taskToken': task_token}
if details:
data['details'] = details
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('RecordActivityTaskHeartbeat', json_input)
# Actions related to Deciders
@@ -278,7 +274,7 @@ class Layer1(AWSAuthConnection):
data['nextPageToken'] = next_page_token
if reverse_order:
data['reverseOrder'] = 'true'
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('PollForDecisionTask', json_input)
def respond_decision_task_completed(self, task_token,
@@ -309,7 +305,7 @@ class Layer1(AWSAuthConnection):
data['decisions'] = decisions
if execution_context:
data['executionContext'] = execution_context
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('RespondDecisionTaskCompleted', json_input)
def request_cancel_workflow_execution(self, domain, workflow_id,
@@ -338,7 +334,7 @@ class Layer1(AWSAuthConnection):
data = {'domain': domain, 'workflowId': workflow_id}
if run_id:
data['runId'] = run_id
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('RequestCancelWorkflowExecution', json_input)
def start_workflow_execution(self, domain, workflow_id,
@@ -437,7 +433,7 @@ class Layer1(AWSAuthConnection):
data['tagList'] = tag_list
if task_start_to_close_timeout:
data['taskStartToCloseTimeout'] = task_start_to_close_timeout
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('StartWorkflowExecution', json_input)
def signal_workflow_execution(self, domain, signal_name, workflow_id,
@@ -476,7 +472,7 @@ class Layer1(AWSAuthConnection):
data['input'] = input
if run_id:
data['runId'] = run_id
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('SignalWorkflowExecution', json_input)
def terminate_workflow_execution(self, domain, workflow_id,
@@ -538,7 +534,7 @@ class Layer1(AWSAuthConnection):
data['reason'] = reason
if run_id:
data['runId'] = run_id
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('TerminateWorkflowExecution', json_input)
# Actions related to Administration
@@ -625,7 +621,7 @@ class Layer1(AWSAuthConnection):
data['defaultTaskStartToCloseTimeout'] = default_task_start_to_close_timeout
if description:
data['description'] = description
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('RegisterActivityType', json_input)
def deprecate_activity_type(self, domain, activity_name, activity_version):
@@ -650,7 +646,7 @@ class Layer1(AWSAuthConnection):
data = {'domain': domain}
data['activityType'] = {'name': activity_name,
'version': activity_version}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('DeprecateActivityType', json_input)
## Workflow Management
@@ -732,7 +728,7 @@ class Layer1(AWSAuthConnection):
data['defaultTaskStartToCloseTimeout'] = default_task_start_to_close_timeout
if description:
data['description'] = description
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('RegisterWorkflowType', json_input)
def deprecate_workflow_type(self, domain, workflow_name, workflow_version):
@@ -759,7 +755,7 @@ class Layer1(AWSAuthConnection):
data = {'domain': domain}
data['workflowType'] = {'name': workflow_name,
'version': workflow_version}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('DeprecateWorkflowType', json_input)
## Domain Management
@@ -793,7 +789,7 @@ class Layer1(AWSAuthConnection):
'workflowExecutionRetentionPeriodInDays': workflow_execution_retention_period_in_days}
if description:
data['description'] = description
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('RegisterDomain', json_input)
def deprecate_domain(self, name):
@@ -813,7 +809,7 @@ class Layer1(AWSAuthConnection):
OperationNotPermittedFault
"""
data = {'name': name}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('DeprecateDomain', json_input)
# Visibility Actions
@@ -878,7 +874,7 @@ class Layer1(AWSAuthConnection):
data['nextPageToken'] = next_page_token
if reverse_order:
data['reverseOrder'] = 'true'
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('ListActivityTypes', json_input)
def describe_activity_type(self, domain, activity_name, activity_version):
@@ -902,7 +898,7 @@ class Layer1(AWSAuthConnection):
data = {'domain': domain}
data['activityType'] = {'name': activity_name,
'version': activity_version}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('DescribeActivityType', json_input)
## Workflow Visibility
@@ -958,7 +954,7 @@ class Layer1(AWSAuthConnection):
data['nextPageToken'] = next_page_token
if reverse_order:
data['reverseOrder'] = 'true'
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('ListWorkflowTypes', json_input)
def describe_workflow_type(self, domain, workflow_name, workflow_version):
@@ -983,7 +979,7 @@ class Layer1(AWSAuthConnection):
data = {'domain': domain}
data['workflowType'] = {'name': workflow_name,
'version': workflow_version}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('DescribeWorkflowType', json_input)
## Workflow Execution Visibility
@@ -1009,7 +1005,7 @@ class Layer1(AWSAuthConnection):
"""
data = {'domain': domain}
data['execution'] = {'runId': run_id, 'workflowId': workflow_id}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('DescribeWorkflowExecution', json_input)
def get_workflow_execution_history(self, domain, run_id, workflow_id,
@@ -1063,7 +1059,7 @@ class Layer1(AWSAuthConnection):
data['nextPageToken'] = next_page_token
if reverse_order:
data['reverseOrder'] = 'true'
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('GetWorkflowExecutionHistory', json_input)
def count_open_workflow_executions(self, domain, latest_date, oldest_date,
@@ -1116,7 +1112,7 @@ class Layer1(AWSAuthConnection):
data['executionFilter'] = {'workflowId': workflow_id}
if tag:
data['tagFilter'] = {'tag': tag}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('CountOpenWorkflowExecutions', json_input)
def list_open_workflow_executions(self, domain, name, oldest_date, tag, workflow_id, latest_date=None, maximum_page_size=None, next_page_token=None, reverse_order=None, version=None):
@@ -1166,7 +1162,7 @@ class Layer1(AWSAuthConnection):
data['reverseOrder'] = 'true'
if version:
data['version'] = version
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('ListOpenWorkflowExecutions', json_input)
def count_closed_workflow_executions(self, domain,
@@ -1254,7 +1250,7 @@ class Layer1(AWSAuthConnection):
if workflow_name and workflow_version:
data['typeFilter'] = {'name': workflow_name,
'version': workflow_version}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('CountClosedWorkflowExecutions', json_input)
def list_closed_workflow_executions(self, domain,
@@ -1369,7 +1365,7 @@ class Layer1(AWSAuthConnection):
data['nextPageToken'] = next_page_token
if reverse_order:
data['reverseOrder'] = 'true'
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('ListClosedWorkflowExecutions', json_input)
## Domain Visibility
@@ -1416,7 +1412,7 @@ class Layer1(AWSAuthConnection):
data['nextPageToken'] = next_page_token
if reverse_order:
data['reverseOrder'] = 'true'
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('ListDomains', json_input)
def describe_domain(self, name):
@@ -1430,7 +1426,7 @@ class Layer1(AWSAuthConnection):
:raises: UnknownResourceFault, OperationNotPermittedFault
"""
data = {'name': name}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('DescribeDomain', json_input)
## Task List Visibility
@@ -1452,7 +1448,7 @@ class Layer1(AWSAuthConnection):
:raises: #UnknownResourceFault, #OperationNotPermittedFault
"""
data = {'domain': domain, 'taskList': {'name': task_list}}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('CountPendingDecisionTasks', json_input)
def count_pending_activity_tasks(self, domain, task_list):
@@ -1472,7 +1468,7 @@ class Layer1(AWSAuthConnection):
:raises: UnknownResourceFault, OperationNotPermittedFault
"""
data = {'domain': domain, 'taskList': {'name': task_list}}
- json_input = json.dumps(data)
+ json_input = compat.json.dumps(data)
return self.make_request('CountPendingActivityTasks', json_input)
diff --git a/boto/utils.py b/boto/utils.py
index a2bf386d..c1545623 100644
--- a/boto/utils.py
+++ b/boto/utils.py
@@ -38,11 +38,8 @@
Some handy utility functions used by several classes.
"""
-import urllib
-import urllib2
import imp
import subprocess
-import StringIO
import time
import logging.handlers
import boto
@@ -50,17 +47,9 @@ import boto.provider
import tempfile
import smtplib
import datetime
-from email.MIMEMultipart import MIMEMultipart
-from email.MIMEBase import MIMEBase
-from email.MIMEText import MIMEText
-from email.Utils import formatdate
-from email import Encoders
import gzip
import base64
-try:
- from hashlib import md5
-except ImportError:
- from md5 import md5
+from . import compat
try:
@@ -83,7 +72,7 @@ def unquote_v(nv):
if len(nv) == 1:
return nv
else:
- return (nv[0], urllib.unquote(nv[1]))
+ return (nv[0], compat.unquote(nv[1]))
# generates the aws canonical string for the given parameters
def canonical_string(method, path, headers, expires=None,
@@ -98,13 +87,13 @@ def canonical_string(method, path, headers, expires=None,
interesting_headers[lk] = headers[key].strip()
# these keys get empty strings if they don't exist
- if not interesting_headers.has_key('content-type'):
+ if 'content-type' not in interesting_headers:
interesting_headers['content-type'] = ''
- if not interesting_headers.has_key('content-md5'):
+ if 'content-md5' not in interesting_headers:
interesting_headers['content-md5'] = ''
# just in case someone used this. it's not necessary in this lib.
- if interesting_headers.has_key(provider.date_header):
+ if provider.date_header in interesting_headers:
interesting_headers['date'] = ''
# if you're using expires for query string auth, then it trumps date
@@ -112,8 +101,7 @@ def canonical_string(method, path, headers, expires=None,
if expires:
interesting_headers['date'] = str(expires)
- sorted_header_keys = interesting_headers.keys()
- sorted_header_keys.sort()
+ sorted_header_keys = sorted(interesting_headers)
buf = "%s\n" % method
for key in sorted_header_keys:
@@ -162,7 +150,7 @@ def get_aws_metadata(headers, provider=None):
metadata = {}
for hkey in headers.keys():
if hkey.lower().startswith(metadata_prefix):
- val = urllib.unquote_plus(headers[hkey])
+ val = compat.unquote_plus(headers[hkey])
try:
metadata[hkey[len(metadata_prefix):]] = unicode(val, 'utf-8')
except UnicodeDecodeError:
@@ -173,10 +161,10 @@ def get_aws_metadata(headers, provider=None):
def retry_url(url, retry_on_404=True, num_retries=10):
for i in range(0, num_retries):
try:
- req = urllib2.Request(url)
- resp = urllib2.urlopen(req)
+ req = compat.Request(url)
+ resp = compat.urlopen(req)
return resp.read()
- except urllib2.HTTPError, e:
+ except compat.HTTPError as e:
# in 2.6 you use getcode(), in 2.5 and earlier you use code
if hasattr(e, 'getcode'):
code = e.getcode()
@@ -275,7 +263,7 @@ def update_dme(username, password, dme_id, ip_address):
"""
dme_url = 'https://www.dnsmadeeasy.com/servlet/updateip'
dme_url += '?username=%s&password=%s&id=%s&ip=%s'
- s = urllib2.urlopen(dme_url % (username, password, dme_id, ip_address))
+ s = compat.urlopen(dme_url % (username, password, dme_id, ip_address))
return s.read()
def fetch_file(uri, file=None, username=None, password=None):
@@ -297,12 +285,12 @@ def fetch_file(uri, file=None, username=None, password=None):
key.get_contents_to_file(file)
else:
if username and password:
- passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
+ passman = compat.HTTPPasswordMgrWithDefaultRealm()
passman.add_password(None, uri, username, password)
- authhandler = urllib2.HTTPBasicAuthHandler(passman)
- opener = urllib2.build_opener(authhandler)
- urllib2.install_opener(opener)
- s = urllib2.urlopen(uri)
+ authhandler = compat.HTTPBasicAuthHandler(passman)
+ opener = compat.build_opener(authhandler)
+ compat.install_opener(opener)
+ s = compat.urlopen(uri)
file.write(s.read())
file.seek(0)
except:
@@ -316,7 +304,7 @@ class ShellCommand(object):
def __init__(self, command, wait=True, fail_fast=False, cwd = None):
self.exit_code = 0
self.command = command
- self.log_fp = StringIO.StringIO()
+ self.log_fp = compat.StringIO()
self.wait = wait
self.fail_fast = fail_fast
self.run(cwd = cwd)
@@ -397,7 +385,7 @@ class AuthSMTPHandler(logging.handlers.SMTPHandler):
self.fromaddr,
','.join(self.toaddrs),
self.getSubject(record),
- formatdate(), msg)
+ compat.formatdate(), msg)
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
except (KeyboardInterrupt, SystemExit):
@@ -564,20 +552,20 @@ def notify(subject, body=None, html_body=None, to_string=None, attachments=None,
if to_string:
try:
from_string = boto.config.get_value('Notification', 'smtp_from', 'boto')
- msg = MIMEMultipart()
+ msg = compat.MIMEMultipart()
msg['From'] = from_string
msg['Reply-To'] = from_string
msg['To'] = to_string
- msg['Date'] = formatdate(localtime=True)
+ msg['Date'] = compat.formatdate(localtime=True)
msg['Subject'] = subject
if body:
- msg.attach(MIMEText(body))
+ msg.attach(compat.MIMEText(body))
if html_body:
- part = MIMEBase('text', 'html')
+ part = compat.MIMEBase('text', 'html')
part.set_payload(html_body)
- Encoders.encode_base64(part)
+ compat.Encoders.encode_base64(part)
msg.attach(part)
for part in attachments:
@@ -606,9 +594,9 @@ def notify(subject, body=None, html_body=None, to_string=None, attachments=None,
boto.log.exception('notify failed')
def get_utf8_value(value):
- if not isinstance(value, str) and not isinstance(value, unicode):
+ if not isinstance(value, compat.string_types) and not isinstance(value, compat.text_type):
value = str(value)
- if isinstance(value, unicode):
+ if isinstance(value, compat.text_type):
return value.encode('utf-8')
else:
return value
@@ -650,23 +638,23 @@ def write_mime_multipart(content, compress=False, deftype='text/plain', delimite
:return: Final mime multipart
:rtype: str:
"""
- wrapper = MIMEMultipart()
+ wrapper = compat.MIMEMultipart()
for name,con in content:
definite_type = guess_mime_type(con, deftype)
maintype, subtype = definite_type.split('/', 1)
if maintype == 'text':
- mime_con = MIMEText(con, _subtype=subtype)
+ mime_con = compat.MIMEText(con, _subtype=subtype)
else:
- mime_con = MIMEBase(maintype, subtype)
+ mime_con = compat.MIMEBase(maintype, subtype)
mime_con.set_payload(con)
# Encode the payload using Base64
- Encoders.encode_base64(mime_con)
+ compat.Encoders.encode_base64(mime_con)
mime_con.add_header('Content-Disposition', 'attachment', filename=name)
wrapper.attach(mime_con)
rcontent = wrapper.as_string()
if compress:
- buf = StringIO.StringIO()
+ buf = compat.StringIO()
gz = gzip.GzipFile(mode='wb', fileobj=buf)
try:
gz.write(rcontent)
@@ -728,7 +716,7 @@ def compute_md5(fp, buf_size=8192, size=None):
plain digest as the second element and the data size as
the third element.
"""
- m = md5()
+ m = compat.md5()
spos = fp.tell()
if size and size < buf_size:
s = fp.read(size)
@@ -745,7 +733,7 @@ def compute_md5(fp, buf_size=8192, size=None):
else:
s = fp.read(buf_size)
hex_md5 = m.hexdigest()
- base64md5 = base64.encodestring(m.digest())
+ base64md5 = base64.b64encode(m.digest())
if base64md5[-1] == '\n':
base64md5 = base64md5[0:-1]
# data_size based on bytes read.
diff --git a/boto/vpc/dhcpoptions.py b/boto/vpc/dhcpoptions.py
index 810d9cf8..74846838 100644
--- a/boto/vpc/dhcpoptions.py
+++ b/boto/vpc/dhcpoptions.py
@@ -38,7 +38,7 @@ class DhcpConfigSet(dict):
def startElement(self, name, attrs, connection):
if name == 'valueSet':
- if not self.has_key(self._name):
+ if self._name not in self:
self[self._name] = DhcpValueSet()
return self[self._name]
diff --git a/docs/source/conf.py b/docs/source/conf.py
index fa1d0c24..deeda987 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -26,7 +26,7 @@ intersphinx_mapping = {'http://docs.python.org/': None}
try:
release = os.environ.get('SVN_REVISION', 'HEAD')
print release
-except Exception, e:
+except Exception as e:
print e
html_title = "boto v%s" % version
diff --git a/tests/autoscale/test_connection.py b/tests/autoscale/test_connection.py
index a650dcef..e65cbc84 100644
--- a/tests/autoscale/test_connection.py
+++ b/tests/autoscale/test_connection.py
@@ -43,7 +43,7 @@ class AutoscaleConnectionTest(unittest.TestCase):
# have any autoscale groups to introspect. It's useful, however, to
# catch simple errors
- print '--- running %s tests ---' % self.__class__.__name__
+ print('--- running %s tests ---' % self.__class__.__name__)
c = AutoScaleConnection()
self.assertTrue(repr(c).startswith('AutoScaleConnection'))
@@ -161,6 +161,6 @@ class AutoscaleConnectionTest(unittest.TestCase):
assert not found
- print '--- tests completed ---'
+ print('--- tests completed ---')
diff --git a/tests/cloudfront/test_signed_urls.py b/tests/cloudfront/test_signed_urls.py
index 671ed7f3..79ac3f90 100644
--- a/tests/cloudfront/test_signed_urls.py
+++ b/tests/cloudfront/test_signed_urls.py
@@ -6,6 +6,7 @@ except ImportError:
import json
from textwrap import dedent
from boto.cloudfront.distribution import Distribution
+import boto.compat as compat
class CloudfrontSignedUrlsTest(unittest.TestCase):
def setUp(self):
@@ -103,7 +104,7 @@ class CloudfrontSignedUrlsTest(unittest.TestCase):
"v0pYdWJkflDKJ3xIu7lbwRpSkG98NBlgPi4ZJpRRnVX4kXAJK6td"
"Nx6FucDB7OVqzcxkxHsGFd8VCG1BkC-Afh9~lOCMIYHIaiOB6~5j"
"t9w2EOwi6sIIqrg_")
- unicode_policy = unicode(self.canned_policy)
+ unicode_policy = compat.unicode(self.canned_policy)
sig = self.dist._sign_string(unicode_policy, private_key_string=self.pk_str)
encoded_sig = self.dist._url_base64_encode(sig)
self.assertEqual(expected, encoded_sig)
@@ -141,16 +142,16 @@ class CloudfrontSignedUrlsTest(unittest.TestCase):
policy = self.dist._canned_policy(url, expires)
policy = json.loads(policy)
- self.assertEqual(1, len(policy.keys()))
+ self.assertEqual(1, len(list(policy.keys())))
statements = policy["Statement"]
self.assertEqual(1, len(statements))
statement = statements[0]
resource = statement["Resource"]
self.assertEqual(url, resource)
condition = statement["Condition"]
- self.assertEqual(1, len(condition.keys()))
+ self.assertEqual(1, len(list(condition.keys())))
date_less_than = condition["DateLessThan"]
- self.assertEqual(1, len(date_less_than.keys()))
+ self.assertEqual(1, len(list(date_less_than.keys())))
aws_epoch_time = date_less_than["AWS:EpochTime"]
self.assertEqual(expires, aws_epoch_time)
@@ -164,16 +165,16 @@ class CloudfrontSignedUrlsTest(unittest.TestCase):
policy = self.dist._custom_policy(url, expires=expires)
policy = json.loads(policy)
- self.assertEqual(1, len(policy.keys()))
+ self.assertEqual(1, len(list(policy.keys())))
statements = policy["Statement"]
self.assertEqual(1, len(statements))
statement = statements[0]
resource = statement["Resource"]
self.assertEqual(url, resource)
condition = statement["Condition"]
- self.assertEqual(1, len(condition.keys()))
+ self.assertEqual(1, len(list(condition.keys())))
date_less_than = condition["DateLessThan"]
- self.assertEqual(1, len(date_less_than.keys()))
+ self.assertEqual(1, len(list(date_less_than.keys())))
aws_epoch_time = date_less_than["AWS:EpochTime"]
self.assertEqual(expires, aws_epoch_time)
@@ -187,16 +188,16 @@ class CloudfrontSignedUrlsTest(unittest.TestCase):
policy = self.dist._custom_policy(url, valid_after=valid_after)
policy = json.loads(policy)
- self.assertEqual(1, len(policy.keys()))
+ self.assertEqual(1, len(list(policy.keys())))
statements = policy["Statement"]
self.assertEqual(1, len(statements))
statement = statements[0]
resource = statement["Resource"]
self.assertEqual(url, resource)
condition = statement["Condition"]
- self.assertEqual(1, len(condition.keys()))
+ self.assertEqual(1, len(list(condition.keys())))
date_greater_than = condition["DateGreaterThan"]
- self.assertEqual(1, len(date_greater_than.keys()))
+ self.assertEqual(1, len(list(date_greater_than.keys())))
aws_epoch_time = date_greater_than["AWS:EpochTime"]
self.assertEqual(valid_after, aws_epoch_time)
@@ -210,16 +211,16 @@ class CloudfrontSignedUrlsTest(unittest.TestCase):
policy = self.dist._custom_policy(url, ip_address=ip_range)
policy = json.loads(policy)
- self.assertEqual(1, len(policy.keys()))
+ self.assertEqual(1, len(list(policy.keys())))
statements = policy["Statement"]
self.assertEqual(1, len(statements))
statement = statements[0]
resource = statement["Resource"]
self.assertEqual(url, resource)
condition = statement["Condition"]
- self.assertEqual(1, len(condition.keys()))
+ self.assertEqual(1, len(list(condition.keys())))
ip_address = condition["IpAddress"]
- self.assertEqual(1, len(ip_address.keys()))
+ self.assertEqual(1, len(list(ip_address.keys())))
source_ip = ip_address["AWS:SourceIp"]
self.assertEqual("%s/32" % ip_range, source_ip)
@@ -233,16 +234,16 @@ class CloudfrontSignedUrlsTest(unittest.TestCase):
policy = self.dist._custom_policy(url, ip_address=ip_range)
policy = json.loads(policy)
- self.assertEqual(1, len(policy.keys()))
+ self.assertEqual(1, len(list(policy.keys())))
statements = policy["Statement"]
self.assertEqual(1, len(statements))
statement = statements[0]
resource = statement["Resource"]
self.assertEqual(url, resource)
condition = statement["Condition"]
- self.assertEqual(1, len(condition.keys()))
+ self.assertEqual(1, len(list(condition.keys())))
ip_address = condition["IpAddress"]
- self.assertEqual(1, len(ip_address.keys()))
+ self.assertEqual(1, len(list(ip_address.keys())))
source_ip = ip_address["AWS:SourceIp"]
self.assertEqual(ip_range, source_ip)
@@ -260,27 +261,27 @@ class CloudfrontSignedUrlsTest(unittest.TestCase):
ip_address=ip_range)
policy = json.loads(policy)
- self.assertEqual(1, len(policy.keys()))
+ self.assertEqual(1, len(list(policy.keys())))
statements = policy["Statement"]
self.assertEqual(1, len(statements))
statement = statements[0]
resource = statement["Resource"]
self.assertEqual(url, resource)
condition = statement["Condition"]
- self.assertEqual(3, len(condition.keys()))
+ self.assertEqual(3, len(list(condition.keys())))
#check expires condition
date_less_than = condition["DateLessThan"]
- self.assertEqual(1, len(date_less_than.keys()))
+ self.assertEqual(1, len(list(date_less_than.keys())))
aws_epoch_time = date_less_than["AWS:EpochTime"]
self.assertEqual(expires, aws_epoch_time)
#check valid_after condition
date_greater_than = condition["DateGreaterThan"]
- self.assertEqual(1, len(date_greater_than.keys()))
+ self.assertEqual(1, len(list(date_greater_than.keys())))
aws_epoch_time = date_greater_than["AWS:EpochTime"]
self.assertEqual(valid_after, aws_epoch_time)
#check source ip address condition
ip_address = condition["IpAddress"]
- self.assertEqual(1, len(ip_address.keys()))
+ self.assertEqual(1, len(list(ip_address.keys())))
source_ip = ip_address["AWS:SourceIp"]
self.assertEqual(ip_range, source_ip)
diff --git a/tests/db/test_lists.py b/tests/db/test_lists.py
index d9c76392..5b70810c 100644
--- a/tests/db/test_lists.py
+++ b/tests/db/test_lists.py
@@ -91,7 +91,7 @@ class TestLists(object):
t.put()
self.objs.append(t)
time.sleep(3)
- print SimpleListModel.all().filter("strs !=", "Fizzle").get_query()
+ print(SimpleListModel.all().filter("strs !=", "Fizzle").get_query())
for tt in SimpleListModel.all().filter("strs !=", "Fizzle"):
- print tt.strs
+ print(tt.strs)
assert("Fizzle" not in tt.strs)
diff --git a/tests/db/test_query.py b/tests/db/test_query.py
index 047bf873..7e9ce5f7 100644
--- a/tests/db/test_query.py
+++ b/tests/db/test_query.py
@@ -148,5 +148,5 @@ class TestQuerying(object):
"""Test with a "like" expression"""
query = SimpleModel.all()
query.filter("strs like", "%oo%")
- print query.get_query()
+ print(query.get_query())
assert(query.count() == 1)
diff --git a/tests/db/test_sequence.py b/tests/db/test_sequence.py
index 35f4b352..df1ab3a8 100644
--- a/tests/db/test_sequence.py
+++ b/tests/db/test_sequence.py
@@ -60,11 +60,11 @@ class TestDBHandler(object):
s = Sequence()
self.sequences.append(s)
assert(s.val == 0)
- assert(s.next() == 1)
- assert(s.next() == 2)
+ assert(next(s) == 1)
+ assert(next(s) == 2)
s2 = Sequence(s.id)
assert(s2.val == 2)
- assert(s.next() == 3)
+ assert(next(s) == 3)
assert(s.val == 3)
assert(s2.val == 3)
@@ -73,7 +73,7 @@ class TestDBHandler(object):
s = Sequence(fnc=increment_string)
self.sequences.append(s)
assert(s.val == "A")
- assert(s.next() == "B")
+ assert(next(s) == "B")
def test_fib(self):
"""Test the fibonacci sequence generator"""
@@ -93,7 +93,7 @@ class TestDBHandler(object):
assert(s.val == 1)
# Just check the first few numbers in the sequence
for v in [1,2,3,5,8,13,21,34,55,89,144]:
- assert(s.next() == v)
+ assert(next(s) == v)
assert(s.val == v)
assert(s2.val == v) # it shouldn't matter which reference we use since it's garunteed to be consistent
@@ -103,7 +103,7 @@ class TestDBHandler(object):
s = Sequence(fnc=increment_string)
self.sequences.append(s)
assert(s.val == "A")
- assert(s.next() == "B")
+ assert(next(s) == "B")
s.val = "Z"
assert(s.val == "Z")
- assert(s.next() == "AA")
+ assert(next(s) == "AA")
diff --git a/tests/devpay/test_s3.py b/tests/devpay/test_s3.py
index bb91125b..c0e79a04 100644
--- a/tests/devpay/test_s3.py
+++ b/tests/devpay/test_s3.py
@@ -27,7 +27,7 @@ Some unit tests for the S3Connection
import time
import os
-import urllib
+import boto.compat as compat
from boto.s3.connection import S3Connection
from boto.exception import S3PermissionsError
@@ -37,7 +37,7 @@ from boto.exception import S3PermissionsError
AMAZON_USER_TOKEN = '{UserToken}...your token here...'
DEVPAY_HEADERS = { 'x-amz-security-token': AMAZON_USER_TOKEN }
-print '--- running S3Connection tests (DevPay) ---'
+print('--- running S3Connection tests (DevPay) ---')
c = S3Connection()
# create a new, empty bucket
bucket_name = 'test-%d' % int(time.time())
@@ -66,10 +66,10 @@ assert s1 == fp.read(), 'corrupted file'
fp.close()
# test generated URLs
url = k.generate_url(3600, headers=DEVPAY_HEADERS)
-file = urllib.urlopen(url)
+file = compat.urlopen(url)
assert s1 == file.read(), 'invalid URL %s' % url
url = k.generate_url(3600, force_http=True, headers=DEVPAY_HEADERS)
-file = urllib.urlopen(url)
+file = compat.urlopen(url)
assert s1 == file.read(), 'invalid URL %s' % url
bucket.delete_key(k, headers=DEVPAY_HEADERS)
# test a few variations on get_all_keys - first load some data
@@ -174,4 +174,4 @@ for k in all:
c.delete_bucket(bucket, headers=DEVPAY_HEADERS)
-print '--- tests completed ---'
+print('--- tests completed ---')
diff --git a/tests/dynamodb/test_layer1.py b/tests/dynamodb/test_layer1.py
index 5964118d..97173ced 100644
--- a/tests/dynamodb/test_layer1.py
+++ b/tests/dynamodb/test_layer1.py
@@ -35,7 +35,7 @@ json_doc = """{"access_key": "ASIAIV7R2NUUJ6SB7GKQ", "secret_key": "eIfijGxJlejH
class DynamoDBLayer1Test (unittest.TestCase):
def test_layer1_basic(self):
- print '--- running DynamoDB Layer1 tests ---'
+ print('--- running DynamoDB Layer1 tests ---')
# Create a Layer1 connection with an expired set of
# credentials to test the automatic renewal of tokens
@@ -136,7 +136,7 @@ class DynamoDBLayer1Test (unittest.TestCase):
expected = {'Views': {'Value': {'N': '1'}}}
try:
result = c.delete_item(table_name, key=key1, expected=expected)
- except c.ResponseError, e:
+ except c.ResponseError as e:
assert e.error_code == 'ConditionalCheckFailedException'
# Now update the existing object
@@ -205,5 +205,5 @@ class DynamoDBLayer1Test (unittest.TestCase):
result = c.delete_table(table_name)
assert result['TableDescription']['TableStatus'] == 'DELETING'
- print '--- tests completed ---'
+ print('--- tests completed ---')
diff --git a/tests/dynamodb/test_layer2.py b/tests/dynamodb/test_layer2.py
index 63708a1a..bf897434 100644
--- a/tests/dynamodb/test_layer2.py
+++ b/tests/dynamodb/test_layer2.py
@@ -31,11 +31,12 @@ from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBItemError
from boto.dynamodb.layer2 import Layer2
from boto.dynamodb.types import get_dynamodb_type
from boto.dynamodb.condition import *
+import boto.compat as compat
class DynamoDBLayer2Test (unittest.TestCase):
def test_layer2_basic(self):
- print '--- running Amazon DynamoDB Layer2 tests ---'
+ print('--- running Amazon DynamoDB Layer2 tests ---')
c = Layer2()
# First create a schema for the table
@@ -132,7 +133,7 @@ class DynamoDBLayer2Test (unittest.TestCase):
# make sure the put() succeeds
try:
item1.put()
- except c.layer1.ResponseError, e:
+ except c.layer1.ResponseError as e:
raise Exception("Item put failed: %s" % e)
# Try to get an item that does not exist.
@@ -146,7 +147,8 @@ class DynamoDBLayer2Test (unittest.TestCase):
assert item1_copy.range_key == item1.range_key
for attr_name in item1_copy:
val = item1_copy[attr_name]
- if isinstance(val, (int, long, float, basestring)):
+ if isinstance(val, (compat.integer_types, float,
+ compat.string_types)):
assert val == item1[attr_name]
# Try retrieving only select attributes
@@ -168,7 +170,7 @@ class DynamoDBLayer2Test (unittest.TestCase):
expected = {'Views': 1}
try:
item1.delete(expected_value=expected)
- except c.layer1.ResponseError, e:
+ except c.layer1.ResponseError as e:
assert e.error_code == 'ConditionalCheckFailedException'
else:
raise Exception("Expected Value condition failed")
@@ -177,7 +179,7 @@ class DynamoDBLayer2Test (unittest.TestCase):
expected = {'FooBar': True}
try:
item1.delete(expected_value=expected)
- except c.layer1.ResponseError, e:
+ except c.layer1.ResponseError as e:
pass
# Now update the existing object
@@ -199,9 +201,9 @@ class DynamoDBLayer2Test (unittest.TestCase):
item1_updated = table.get_item(item1_key, item1_range,
consistent_read=True)
assert item1_updated['Replies'] == item1_attrs['Replies'] + 2
- self.assertFalse(item1_updated.has_key(removed_attr))
+ self.assertFalse(removed_attr in item1_updated)
self.assertTrue(removed_tag not in item1_updated['Tags'])
- self.assertTrue(item1_updated.has_key('RepliesBy'))
+ self.assertTrue('RepliesBy' in item1_updated)
self.assertTrue(item1_updated['RepliesBy'] == replies_by_set)
# Put a few more items into the table
@@ -349,4 +351,4 @@ class DynamoDBLayer2Test (unittest.TestCase):
assert table.status == 'DELETING'
assert table2.status == 'DELETING'
- print '--- tests completed ---'
+ print('--- tests completed ---')
diff --git a/tests/ec2/cloudwatch/test_connection.py b/tests/ec2/cloudwatch/test_connection.py
index 0479d650..75d6a6ce 100644
--- a/tests/ec2/cloudwatch/test_connection.py
+++ b/tests/ec2/cloudwatch/test_connection.py
@@ -247,10 +247,10 @@ class CloudWatchConnectionTest(unittest.TestCase):
alarms = c.describe_alarms()
self.assertEquals(alarms[0].name, 'FancyAlarm')
self.assertEquals(alarms[0].comparison, '<')
- self.assertEquals(alarms[0].dimensions, {u'Job': [u'ANiceCronJob']})
+ self.assertEquals(alarms[0].dimensions, {'Job': ['ANiceCronJob']})
self.assertEquals(alarms[1].name, 'SuperFancyAlarm')
self.assertEquals(alarms[1].comparison, '>')
- self.assertEquals(alarms[1].dimensions, {u'Job': [u'ABadCronJob']})
+ self.assertEquals(alarms[1].dimensions, {'Job': ['ABadCronJob']})
if __name__ == '__main__':
unittest.main()
diff --git a/tests/ec2/test_connection.py b/tests/ec2/test_connection.py
index 6b7ece1f..4d6bdf5d 100644
--- a/tests/ec2/test_connection.py
+++ b/tests/ec2/test_connection.py
@@ -37,7 +37,7 @@ class EC2ConnectionTest (unittest.TestCase):
# this is my user_id, if you want to run these tests you should
# replace this with yours or they won't work
user_id = '963068290131'
- print '--- running EC2Connection tests ---'
+ print('--- running EC2Connection tests ---')
c = EC2Connection()
# get list of private AMI's
rs = c.get_all_images(owners=[user_id])
@@ -48,14 +48,14 @@ class EC2ConnectionTest (unittest.TestCase):
status = image.set_launch_permissions(group_names=['all'])
assert status
d = image.get_launch_permissions()
- assert d.has_key('groups')
+ assert 'groups' in d
assert len(d['groups']) > 0
# now remove that permission
status = image.remove_launch_permissions(group_names=['all'])
assert status
time.sleep(10)
d = image.get_launch_permissions()
- assert not d.has_key('groups')
+ assert 'groups' not in d
# create 2 new security groups
group1_name = 'test-%d' % int(time.time())
@@ -110,7 +110,7 @@ class EC2ConnectionTest (unittest.TestCase):
reservation = image.run(security_groups=[group.name])
instance = reservation.instances[0]
while instance.state != 'running':
- print '\tinstance is %s' % instance.state
+ print('\tinstance is %s' % instance.state)
time.sleep(30)
instance.update()
# instance in now running, try to telnet to port 80
@@ -167,4 +167,4 @@ class EC2ConnectionTest (unittest.TestCase):
assert len(l[0].product_codes) == 1
assert l[0].product_codes[0] == demo_paid_ami_product_code
- print '--- tests completed ---'
+ print('--- tests completed ---')
diff --git a/tests/emr/test_emr_responses.py b/tests/emr/test_emr_responses.py
index 77ec494b..cf6284e4 100644
--- a/tests/emr/test_emr_responses.py
+++ b/tests/emr/test_emr_responses.py
@@ -329,7 +329,7 @@ class TestEMRResponses(unittest.TestCase):
return rs
def _assert_fields(self, response, **fields):
- for field, expected in fields.items():
+ for field, expected in list(fields.items()):
actual = getattr(response, field)
self.assertEquals(expected, actual,
"Field %s: %r != %r" % (field, expected, actual))
diff --git a/tests/mturk/_init_environment.py b/tests/mturk/_init_environment.py
index e709785a..b2d649ea 100644
--- a/tests/mturk/_init_environment.py
+++ b/tests/mturk/_init_environment.py
@@ -1,24 +1,24 @@
-import os
-import functools
-
-live_connection = False
-mturk_host = 'mechanicalturk.sandbox.amazonaws.com'
-external_url = 'http://www.example.com/'
-
-try:
- local = os.path.join(os.path.dirname(__file__), 'local.py')
- execfile(local)
-except:
- pass
-
-if live_connection:
- #TODO: you must set the auth credentials to something valid
- from boto.mturk.connection import MTurkConnection
-else:
- # Here the credentials must be set, but it doesn't matter what
- # they're set to.
- os.environ.setdefault('AWS_ACCESS_KEY_ID', 'foo')
- os.environ.setdefault('AWS_SECRET_ACCESS_KEY', 'bar')
- from mocks import MTurkConnection
-
-SetHostMTurkConnection = functools.partial(MTurkConnection, host=mturk_host)
+import os
+import functools
+
+live_connection = False
+mturk_host = 'mechanicalturk.sandbox.amazonaws.com'
+external_url = 'http://www.example.com/'
+
+try:
+ local = os.path.join(os.path.dirname(__file__), 'local.py')
+ exec(compile(open(local).read(), local, 'exec'))
+except:
+ pass
+
+if live_connection:
+ #TODO: you must set the auth credentials to something valid
+ from boto.mturk.connection import MTurkConnection
+else:
+ # Here the credentials must be set, but it doesn't matter what
+ # they're set to.
+ os.environ.setdefault('AWS_ACCESS_KEY_ID', 'foo')
+ os.environ.setdefault('AWS_SECRET_ACCESS_KEY', 'bar')
+ from .mocks import MTurkConnection
+
+SetHostMTurkConnection = functools.partial(MTurkConnection, host=mturk_host)
diff --git a/tests/mturk/all_tests.py b/tests/mturk/all_tests.py
index ba2e1228..6a148e2d 100644
--- a/tests/mturk/all_tests.py
+++ b/tests/mturk/all_tests.py
@@ -3,11 +3,11 @@ import unittest
import doctest
from glob import glob
-from create_hit_test import *
-from create_hit_with_qualifications import *
-from create_hit_external import *
-from create_hit_with_qualifications import *
-from hit_persistence import *
+from .create_hit_test import *
+from .create_hit_with_qualifications import *
+from .create_hit_external import *
+from .create_hit_with_qualifications import *
+from .hit_persistence import *
doctest_suite = doctest.DocFileSuite(
*glob('*.doctest'),
diff --git a/tests/mturk/cleanup_tests.py b/tests/mturk/cleanup_tests.py
index 2381dd9b..787706f2 100644
--- a/tests/mturk/cleanup_tests.py
+++ b/tests/mturk/cleanup_tests.py
@@ -1,6 +1,6 @@
import itertools
-from _init_environment import SetHostMTurkConnection
+from ._init_environment import SetHostMTurkConnection
def description_filter(substring):
return lambda hit: substring in hit.Title
@@ -24,22 +24,22 @@ def cleanup():
is_boto = description_filter('Boto')
- print 'getting hits...'
+ print('getting hits...')
all_hits = list(conn.get_all_hits())
is_reviewable = lambda hit: hit.HITStatus == 'Reviewable'
is_not_reviewable = lambda hit: not is_reviewable(hit)
- hits_to_process = filter(is_boto, all_hits)
- hits_to_disable = filter(is_not_reviewable, hits_to_process)
- hits_to_dispose = filter(is_reviewable, hits_to_process)
- print 'disabling/disposing %d/%d hits' % (len(hits_to_disable), len(hits_to_dispose))
- map(disable_hit, hits_to_disable)
- map(dispose_hit, hits_to_dispose)
+ hits_to_process = list(filter(is_boto, all_hits))
+ hits_to_disable = list(filter(is_not_reviewable, hits_to_process))
+ hits_to_dispose = list(filter(is_reviewable, hits_to_process))
+ print('disabling/disposing %d/%d hits' % (len(hits_to_disable), len(hits_to_dispose)))
+ list(map(disable_hit, hits_to_disable))
+ list(map(dispose_hit, hits_to_dispose))
total_hits = len(all_hits)
hits_processed = len(hits_to_process)
skipped = total_hits - hits_processed
fmt = 'Processed: %(total_hits)d HITs, disabled/disposed: %(hits_processed)d, skipped: %(skipped)d'
- print fmt % vars()
+ print(fmt % vars())
if __name__ == '__main__':
cleanup()
diff --git a/tests/mturk/common.py b/tests/mturk/common.py
index 40e2726c..94cc15cb 100644
--- a/tests/mturk/common.py
+++ b/tests/mturk/common.py
@@ -1,44 +1,44 @@
-import unittest
-import uuid
-import datetime
-
-from boto.mturk.question import (
- Question, QuestionContent, AnswerSpecification, FreeTextAnswer,
-)
-from _init_environment import SetHostMTurkConnection
-
-class MTurkCommon(unittest.TestCase):
- def setUp(self):
- self.conn = SetHostMTurkConnection()
-
- @staticmethod
- def get_question():
- # create content for a question
- qn_content = QuestionContent()
- qn_content.append_field('Title', 'Boto no hit type question content')
- qn_content.append_field('Text', 'What is a boto no hit type?')
-
- # create the question specification
- qn = Question(identifier=str(uuid.uuid4()),
- content=qn_content,
- answer_spec=AnswerSpecification(FreeTextAnswer()))
- return qn
-
- @staticmethod
- def get_hit_params():
- return dict(
- lifetime=datetime.timedelta(minutes=65),
- max_assignments=2,
- title='Boto create_hit title',
- description='Boto create_hit description',
- keywords=['boto', 'test'],
- reward=0.23,
- duration=datetime.timedelta(minutes=6),
- approval_delay=60*60,
- annotation='An annotation from boto create_hit test',
- response_groups=['Minimal',
- 'HITDetail',
- 'HITQuestion',
- 'HITAssignmentSummary',],
- )
-
+import unittest
+import uuid
+import datetime
+
+from boto.mturk.question import (
+ Question, QuestionContent, AnswerSpecification, FreeTextAnswer,
+)
+from ._init_environment import SetHostMTurkConnection
+
+class MTurkCommon(unittest.TestCase):
+ def setUp(self):
+ self.conn = SetHostMTurkConnection()
+
+ @staticmethod
+ def get_question():
+ # create content for a question
+ qn_content = QuestionContent()
+ qn_content.append_field('Title', 'Boto no hit type question content')
+ qn_content.append_field('Text', 'What is a boto no hit type?')
+
+ # create the question specification
+ qn = Question(identifier=str(uuid.uuid4()),
+ content=qn_content,
+ answer_spec=AnswerSpecification(FreeTextAnswer()))
+ return qn
+
+ @staticmethod
+ def get_hit_params():
+ return dict(
+ lifetime=datetime.timedelta(minutes=65),
+ max_assignments=2,
+ title='Boto create_hit title',
+ description='Boto create_hit description',
+ keywords=['boto', 'test'],
+ reward=0.23,
+ duration=datetime.timedelta(minutes=6),
+ approval_delay=60*60,
+ annotation='An annotation from boto create_hit test',
+ response_groups=['Minimal',
+ 'HITDetail',
+ 'HITQuestion',
+ 'HITAssignmentSummary',],
+ )
+
diff --git a/tests/mturk/create_hit_external.py b/tests/mturk/create_hit_external.py
index 9e955a65..86c3cf2a 100644
--- a/tests/mturk/create_hit_external.py
+++ b/tests/mturk/create_hit_external.py
@@ -3,7 +3,7 @@ import uuid
import datetime
from boto.mturk.question import ExternalQuestion
-from _init_environment import SetHostMTurkConnection, external_url
+from ._init_environment import SetHostMTurkConnection, external_url
class Test(unittest.TestCase):
def test_create_hit_external(self):
diff --git a/tests/mturk/create_hit_test.py b/tests/mturk/create_hit_test.py
index ea134b4c..320515d7 100644
--- a/tests/mturk/create_hit_test.py
+++ b/tests/mturk/create_hit_test.py
@@ -1,21 +1,21 @@
-import unittest
-import os
-from boto.mturk.question import QuestionForm
-
-from common import MTurkCommon
-
-class TestHITCreation(MTurkCommon):
- def testCallCreateHitWithOneQuestion(self):
- create_hit_rs = self.conn.create_hit(
- question=self.get_question(),
- **self.get_hit_params()
- )
-
- def testCallCreateHitWithQuestionForm(self):
- create_hit_rs = self.conn.create_hit(
- questions=QuestionForm([self.get_question()]),
- **self.get_hit_params()
- )
-
-if __name__ == '__main__':
- unittest.main()
+import unittest
+import os
+from boto.mturk.question import QuestionForm
+
+from .common import MTurkCommon
+
+class TestHITCreation(MTurkCommon):
+ def testCallCreateHitWithOneQuestion(self):
+ create_hit_rs = self.conn.create_hit(
+ question=self.get_question(),
+ **self.get_hit_params()
+ )
+
+ def testCallCreateHitWithQuestionForm(self):
+ create_hit_rs = self.conn.create_hit(
+ questions=QuestionForm([self.get_question()]),
+ **self.get_hit_params()
+ )
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/mturk/create_hit_with_qualifications.py b/tests/mturk/create_hit_with_qualifications.py
index 9ef2bc5c..f8c67ec0 100644
--- a/tests/mturk/create_hit_with_qualifications.py
+++ b/tests/mturk/create_hit_with_qualifications.py
@@ -10,7 +10,7 @@ def test():
qualifications.add(PercentAssignmentsApprovedRequirement(comparator="GreaterThan", integer_value="95"))
create_hit_rs = conn.create_hit(question=q, lifetime=60*65,max_assignments=2,title="Boto External Question Test", keywords=keywords,reward = 0.05, duration=60*6,approval_delay=60*60, annotation='An annotation from boto external question test', qualifications=qualifications)
assert(create_hit_rs.status == True)
- print create_hit_rs.HITTypeId
+ print(create_hit_rs.HITTypeId)
if __name__ == "__main__":
test()
diff --git a/tests/mturk/hit_persistence.py b/tests/mturk/hit_persistence.py
index 04ebd0c2..812f10f2 100644
--- a/tests/mturk/hit_persistence.py
+++ b/tests/mturk/hit_persistence.py
@@ -1,27 +1,27 @@
-import unittest
-import pickle
-
-from common import MTurkCommon
-
-class TestHITPersistence(MTurkCommon):
- def create_hit_result(self):
- return self.conn.create_hit(
- question=self.get_question(), **self.get_hit_params()
- )
-
- def test_pickle_hit_result(self):
- result = self.create_hit_result()
- new_result = pickle.loads(pickle.dumps(result))
-
- def test_pickle_deserialized_version(self):
- """
- It seems the technique used to store and reload the object must
- result in an equivalent object, or subsequent pickles may fail.
- This tests a double-pickle to elicit that error.
- """
- result = self.create_hit_result()
- new_result = pickle.loads(pickle.dumps(result))
- pickle.dumps(new_result)
-
-if __name__ == '__main__':
- unittest.main()
+import unittest
+import pickle
+
+from .common import MTurkCommon
+
+class TestHITPersistence(MTurkCommon):
+ def create_hit_result(self):
+ return self.conn.create_hit(
+ question=self.get_question(), **self.get_hit_params()
+ )
+
+ def test_pickle_hit_result(self):
+ result = self.create_hit_result()
+ new_result = pickle.loads(pickle.dumps(result))
+
+ def test_pickle_deserialized_version(self):
+ """
+ It seems the technique used to store and reload the object must
+ result in an equivalent object, or subsequent pickles may fail.
+ This tests a double-pickle to elicit that error.
+ """
+ result = self.create_hit_result()
+ new_result = pickle.loads(pickle.dumps(result))
+ pickle.dumps(new_result)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/mturk/selenium_support.py b/tests/mturk/selenium_support.py
index f1552cb2..7e51e434 100644
--- a/tests/mturk/selenium_support.py
+++ b/tests/mturk/selenium_support.py
@@ -1,4 +1,4 @@
-from __future__ import absolute_import
+
from boto.mturk.test.support import unittest
sel_args = ('localhost', 4444, '*chrome', 'https://workersandbox.mturk.com')
@@ -6,7 +6,7 @@ sel_args = ('localhost', 4444, '*chrome', 'https://workersandbox.mturk.com')
class SeleniumFailed(object):
def __init__(self, message):
self.message = message
- def __nonzero__(self):
+ def __bool__(self):
return False
def has_selenium():
@@ -17,7 +17,7 @@ def has_selenium():
# a little trick to see if the server is responding
try:
sel.do_command('shutdown', '')
- except Exception, e:
+ except Exception as e:
if not 'Server Exception' in str(e):
raise
result = True
diff --git a/tests/mturk/test_disable_hit.py b/tests/mturk/test_disable_hit.py
index 09134433..2368d890 100644
--- a/tests/mturk/test_disable_hit.py
+++ b/tests/mturk/test_disable_hit.py
@@ -1,11 +1,11 @@
-from boto.mturk.test.support import unittest
-
-from common import MTurkCommon
-from boto.mturk.connection import MTurkRequestError
-
-class TestDisableHITs(MTurkCommon):
- def test_disable_invalid_hit(self):
- self.assertRaises(MTurkRequestError, self.conn.disable_hit, 'foo')
-
-if __name__ == '__main__':
- unittest.main()
+from boto.mturk.test.support import unittest
+
+from .common import MTurkCommon
+from boto.mturk.connection import MTurkRequestError
+
+class TestDisableHITs(MTurkCommon):
+ def test_disable_invalid_hit(self):
+ self.assertRaises(MTurkRequestError, self.conn.disable_hit, 'foo')
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/tests/s3/mock_storage_service.py b/tests/s3/mock_storage_service.py
index 2bd77439..ce267fc5 100644
--- a/tests/s3/mock_storage_service.py
+++ b/tests/s3/mock_storage_service.py
@@ -235,7 +235,7 @@ class MockBucket(object):
del self.keys[key_name]
def get_all_keys(self, headers=NOT_IMPL):
- return self.keys.itervalues()
+ return iter(self.keys.values())
def get_key(self, key_name, headers=NOT_IMPL, version_id=NOT_IMPL):
# Emulate behavior of boto when get_key called with non-existent key.
@@ -251,7 +251,7 @@ class MockBucket(object):
# deletions while iterating (e.g., during test cleanup).
result = []
key_name_set = set()
- for k in self.keys.itervalues():
+ for k in self.keys.values():
if k.name.startswith(prefix):
k_name_past_prefix = k.name[len(prefix):]
if delimiter:
@@ -328,7 +328,7 @@ class MockConnection(object):
return self.buckets[bucket_name]
def get_all_buckets(self, headers=NOT_IMPL):
- return self.buckets.itervalues()
+ return iter(self.buckets.values())
# We only mock a single provider/connection.
diff --git a/tests/s3/test_bucket.py b/tests/s3/test_bucket.py
index 58045efa..c9d386d2 100644
--- a/tests/s3/test_bucket.py
+++ b/tests/s3/test_bucket.py
@@ -87,7 +87,7 @@ class S3BucketTest (unittest.TestCase):
# grant log write perms to target bucket using canned-acl
self.bucket.set_acl("log-delivery-write")
target_bucket = self.bucket_name
- target_prefix = u"jp/ログ/"
+ target_prefix = "jp/ログ/"
# Check existing status is disabled
bls = sb.get_logging_status()
self.assertEqual(bls.target, None)
diff --git a/tests/s3/test_connection.py b/tests/s3/test_connection.py
index e9d372e5..020e174e 100644
--- a/tests/s3/test_connection.py
+++ b/tests/s3/test_connection.py
@@ -27,15 +27,15 @@ Some unit tests for the S3Connection
import unittest
import time
import os
-import urllib
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3PermissionsError, S3ResponseError
+import boto.compat as compat
class S3ConnectionTest (unittest.TestCase):
def test_1_basic(self):
- print '--- running S3Connection tests ---'
+ print('--- running S3Connection tests ---')
c = S3Connection()
# create a new, empty bucket
bucket_name = 'test-%d' % int(time.time())
@@ -63,13 +63,13 @@ class S3ConnectionTest (unittest.TestCase):
fp.close()
# test generated URLs
url = k.generate_url(3600)
- file = urllib.urlopen(url)
+ file = compat.urlopen(url)
assert s1 == file.read(), 'invalid URL %s' % url
url = k.generate_url(3600, force_http=True)
- file = urllib.urlopen(url)
+ file = compat.urlopen(url)
assert s1 == file.read(), 'invalid URL %s' % url
url = k.generate_url(3600, force_http=True, headers={'x-amz-x-token' : 'XYZ'})
- file = urllib.urlopen(url)
+ file = compat.urlopen(url)
rh = {'response-content-disposition': 'attachment; filename="foo.txt"'}
url = k.generate_url(60, response_headers=rh)
assert s1 == file.read(), 'invalid URL %s' % url
@@ -119,7 +119,7 @@ class S3ConnectionTest (unittest.TestCase):
mdval2 = 'This is the second metadata value'
k.set_metadata(mdkey2, mdval2)
# try a unicode metadata value
- mdval3 = u'föö'
+ mdval3 = 'föö'
mdkey3 = 'meta3'
k.set_metadata(mdkey3, mdval3)
k.set_contents_from_string(s1)
@@ -189,7 +189,7 @@ class S3ConnectionTest (unittest.TestCase):
# now delete bucket
time.sleep(5)
c.delete_bucket(bucket)
- print '--- tests completed ---'
+ print('--- tests completed ---')
def test_basic_anon(self):
auth_con = S3Connection()
@@ -201,7 +201,7 @@ class S3ConnectionTest (unittest.TestCase):
anon_con = S3Connection(anon=True)
anon_bucket = Bucket(anon_con, bucket_name)
try:
- iter(anon_bucket.list()).next()
+ next(iter(anon_bucket.list()))
self.fail("anon bucket list should fail")
except S3ResponseError:
pass
@@ -209,7 +209,7 @@ class S3ConnectionTest (unittest.TestCase):
# give bucket anon user access and anon read again
auth_bucket.set_acl('public-read')
try:
- iter(anon_bucket.list()).next()
+ next(iter(anon_bucket.list()))
self.fail("not expecting contents")
except S3ResponseError:
self.fail("we should have public-read access.")
diff --git a/tests/s3/test_encryption.py b/tests/s3/test_encryption.py
index 91ef71c0..d271d8d2 100644
--- a/tests/s3/test_encryption.py
+++ b/tests/s3/test_encryption.py
@@ -53,7 +53,7 @@ json_policy = """{
class S3EncryptionTest (unittest.TestCase):
def test_1_versions(self):
- print '--- running S3Encryption tests ---'
+ print('--- running S3Encryption tests ---')
c = S3Connection()
# create a new, empty bucket
bucket_name = 'encryption-%d' % int(time.time())
@@ -111,4 +111,4 @@ class S3EncryptionTest (unittest.TestCase):
# now delete bucket
bucket.delete()
- print '--- tests completed ---'
+ print('--- tests completed ---')
diff --git a/tests/s3/test_gsconnection.py b/tests/s3/test_gsconnection.py
index d4b30723..7d53b555 100644
--- a/tests/s3/test_gsconnection.py
+++ b/tests/s3/test_gsconnection.py
@@ -41,7 +41,7 @@ class GSConnectionTest (unittest.TestCase):
def test_1_basic(self):
"""basic regression test for Google Cloud Storage"""
- print '--- running GSConnection tests ---'
+ print('--- running GSConnection tests ---')
c = GSConnection()
# create a new, empty bucket
bucket_name = 'test-%d' % int(time.time())
@@ -122,7 +122,7 @@ class GSConnectionTest (unittest.TestCase):
k.set_metadata(mdkey2, mdval2)
# try a unicode metadata value
- mdval3 = u'föö'
+ mdval3 = 'föö'
mdkey3 = 'meta3'
k.set_metadata(mdkey3, mdval3)
k.set_contents_from_string(s1)
@@ -340,4 +340,4 @@ class GSConnectionTest (unittest.TestCase):
# delete bucket
uri.delete_bucket()
- print '--- tests completed ---'
+ print('--- tests completed ---')
diff --git a/tests/s3/test_key.py b/tests/s3/test_key.py
index 2e823182..7c7e1008 100644
--- a/tests/s3/test_key.py
+++ b/tests/s3/test_key.py
@@ -28,10 +28,10 @@ Some unit tests for S3 Key
import unittest
import time
-import StringIO
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from boto.exception import S3ResponseError
+import boto.compat as compat
class S3KeyTest (unittest.TestCase):
@@ -48,7 +48,7 @@ class S3KeyTest (unittest.TestCase):
def test_set_contents_from_file_dataloss(self):
# Create an empty stringio and write to it.
content = "abcde"
- sfp = StringIO.StringIO()
+ sfp = compat.StringIO()
sfp.write(content)
# Try set_contents_from_file() without rewinding sfp
k = self.bucket.new_key("k")
@@ -66,7 +66,7 @@ class S3KeyTest (unittest.TestCase):
self.assertEqual(ks, content)
# finally, try with a 0 length string
- sfp = StringIO.StringIO()
+ sfp = compat.StringIO()
k = self.bucket.new_key("k")
k.set_contents_from_file(sfp)
self.assertEqual(k.size, 0)
@@ -77,7 +77,7 @@ class S3KeyTest (unittest.TestCase):
def test_set_contents_as_file(self):
content="01234567890123456789"
- sfp = StringIO.StringIO(content)
+ sfp = compat.StringIO(content)
# fp is set at 0 for just opened (for read) files.
# set_contents should write full content to key.
@@ -111,7 +111,7 @@ class S3KeyTest (unittest.TestCase):
def test_set_contents_with_md5(self):
content="01234567890123456789"
- sfp = StringIO.StringIO(content)
+ sfp = compat.StringIO(content)
# fp is set at 0 for just opened (for read) files.
# set_contents should write full content to key.
@@ -146,7 +146,7 @@ class S3KeyTest (unittest.TestCase):
def test_get_contents_with_md5(self):
content="01234567890123456789"
- sfp = StringIO.StringIO(content)
+ sfp = compat.StringIO(content)
k = self.bucket.new_key("k")
k.set_contents_from_file(sfp)
@@ -166,7 +166,7 @@ class S3KeyTest (unittest.TestCase):
self.my_cb_last = None
k = self.bucket.new_key("k")
k.BufferSize = 2
- sfp = StringIO.StringIO("")
+ sfp = compat.StringIO("")
k.set_contents_from_file(sfp, cb=callback, num_cb=10)
self.assertEqual(self.my_cb_cnt, 1)
self.assertEqual(self.my_cb_last, 0)
@@ -180,7 +180,7 @@ class S3KeyTest (unittest.TestCase):
self.assertEqual(self.my_cb_last, 0)
content="01234567890123456789"
- sfp = StringIO.StringIO(content)
+ sfp = compat.StringIO(content)
# expect 2 calls due start/finish
self.my_cb_cnt = 0
diff --git a/tests/s3/test_mfa.py b/tests/s3/test_mfa.py
index 3f47e94c..d8d2594f 100644
--- a/tests/s3/test_mfa.py
+++ b/tests/s3/test_mfa.py
@@ -30,6 +30,7 @@ import time
from boto.s3.connection import S3Connection
from boto.exception import S3ResponseError
from boto.s3.deletemarker import DeleteMarker
+import boto.compat as compat
class S3MFATest (unittest.TestCase):
@@ -45,8 +46,8 @@ class S3MFATest (unittest.TestCase):
def test_mfadel(self):
# Enable Versioning with MfaDelete
- mfa_sn = raw_input('MFA S/N: ')
- mfa_code = raw_input('MFA Code: ')
+ mfa_sn = compat.raw_input('MFA S/N: ')
+ mfa_code = compat.raw_input('MFA Code: ')
self.bucket.configure_versioning(True, mfa_delete=True, mfa_token=(mfa_sn, mfa_code))
# Check enabling mfa worked.
@@ -73,11 +74,11 @@ class S3MFATest (unittest.TestCase):
pass
# Now try delete again with the MFA token
- mfa_code = raw_input('MFA Code: ')
+ mfa_code = compat.raw_input('MFA Code: ')
self.bucket.delete_key('foobar', version_id=v1, mfa_token=(mfa_sn, mfa_code))
# Next suspend versioning and disable MfaDelete on the bucket
- mfa_code = raw_input('MFA Code: ')
+ mfa_code = compat.raw_input('MFA Code: ')
self.bucket.configure_versioning(False, mfa_delete=False, mfa_token=(mfa_sn, mfa_code))
# Lastly, check disabling mfa worked.
diff --git a/tests/s3/test_multidelete.py b/tests/s3/test_multidelete.py
index f5f922c1..c8472bec 100644
--- a/tests/s3/test_multidelete.py
+++ b/tests/s3/test_multidelete.py
@@ -85,7 +85,7 @@ class S3MultiDeleteTest (unittest.TestCase):
self.assertEqual(len(result.errors), 1)
def test_delete_kanji(self):
- result = self.bucket.delete_keys([u"漢字", Key(name=u"日本語")])
+ result = self.bucket.delete_keys(["漢字", Key(name="日本語")])
self.assertEqual(len(result.deleted), 2)
self.assertEqual(len(result.errors), 0)
@@ -95,7 +95,7 @@ class S3MultiDeleteTest (unittest.TestCase):
self.assertEqual(len(result.errors), 0)
def test_delete_kanji_by_list(self):
- for key_name in [u"漢字", u"日本語", u"テスト"]:
+ for key_name in ["漢字", "日本語", "テスト"]:
key = self.bucket.new_key(key_name)
key.set_contents_from_string('this is a test')
result = self.bucket.delete_keys(self.bucket.list())
diff --git a/tests/s3/test_multipart.py b/tests/s3/test_multipart.py
index 8e93a6d8..6e6683f4 100644
--- a/tests/s3/test_multipart.py
+++ b/tests/s3/test_multipart.py
@@ -34,8 +34,8 @@ Some unit tests for the S3 MultiPartUpload
import unittest
import time
-import StringIO
from boto.s3.connection import S3Connection
+import boto.compat as compat
class S3MultiPartUploadTest (unittest.TestCase):
@@ -50,14 +50,14 @@ class S3MultiPartUploadTest (unittest.TestCase):
self.bucket.delete()
def test_abort(self):
- key_name = u"テスト"
+ key_name = "テスト"
mpu = self.bucket.initiate_multipart_upload(key_name)
mpu.cancel_upload()
def test_complete_ascii(self):
key_name = "test"
mpu = self.bucket.initiate_multipart_upload(key_name)
- fp = StringIO.StringIO("small file")
+ fp = compat.StringIO("small file")
mpu.upload_part_from_file(fp, part_num=1)
fp.close()
cmpu = mpu.complete_upload()
@@ -65,9 +65,9 @@ class S3MultiPartUploadTest (unittest.TestCase):
self.assertNotEqual(cmpu.etag, None)
def test_complete_japanese(self):
- key_name = u"テスト"
+ key_name = "テスト"
mpu = self.bucket.initiate_multipart_upload(key_name)
- fp = StringIO.StringIO("small file")
+ fp = compat.StringIO("small file")
mpu.upload_part_from_file(fp, part_num=1)
fp.close()
cmpu = mpu.complete_upload()
@@ -81,18 +81,18 @@ class S3MultiPartUploadTest (unittest.TestCase):
self.assertNotEqual(cmpu.etag, None)
def test_list_japanese(self):
- key_name = u"テスト"
+ key_name = "テスト"
mpu = self.bucket.initiate_multipart_upload(key_name)
rs = self.bucket.list_multipart_uploads()
# New bucket, so only one upload expected
- lmpu = iter(rs).next()
+ lmpu = next(iter(rs))
self.assertEqual(lmpu.id, mpu.id)
self.assertEqual(lmpu.key_name, key_name)
# Abort using the one returned in the list
lmpu.cancel_upload()
def test_list_multipart_uploads(self):
- key_name = u"テスト"
+ key_name = "テスト"
mpus = []
mpus.append(self.bucket.initiate_multipart_upload(key_name))
mpus.append(self.bucket.initiate_multipart_upload(key_name))
@@ -107,7 +107,7 @@ class S3MultiPartUploadTest (unittest.TestCase):
def test_four_part_file(self):
key_name = "k"
contents = "01234567890123456789"
- sfp = StringIO.StringIO(contents)
+ sfp = compat.StringIO(contents)
# upload 20 bytes in 4 parts of 5 bytes each
mpu = self.bucket.initiate_multipart_upload(key_name)
diff --git a/tests/s3/test_pool.py b/tests/s3/test_pool.py
index ebb68c85..2e0a77fe 100644
--- a/tests/s3/test_pool.py
+++ b/tests/s3/test_pool.py
@@ -28,8 +28,6 @@ import boto
import time
import uuid
-from StringIO import StringIO
-
from threading import Thread
def spawn(function, *args, **kwargs):
@@ -57,7 +55,7 @@ def test_close_connections():
dependencies are added to the test suite.
"""
- print "Running test_close_connections"
+ print("Running test_close_connections")
# Connect to S3
s3 = boto.connect_s3()
@@ -117,9 +115,9 @@ def read_big_object(s3, bucket, name, count):
out = WriteAndCount()
key.get_contents_to_file(out)
if out.size != BIG_SIZE:
- print out.size, BIG_SIZE
+ print(out.size, BIG_SIZE)
assert out.size == BIG_SIZE
- print " pool size:", s3._pool.size()
+ print(" pool size:", s3._pool.size())
class LittleQuerier(object):
@@ -147,7 +145,7 @@ class LittleQuerier(object):
rh = { 'response-content-type' : 'small/' + str(i) }
actual = key.get_contents_as_string(response_headers = rh)
if expected != actual:
- print "AHA:", repr(expected), repr(actual)
+ print("AHA:", repr(expected), repr(actual))
assert expected == actual
count += 1
@@ -193,7 +191,7 @@ def test_reuse_connections():
you can see that it's happening.
"""
- print "Running test_reuse_connections"
+ print("Running test_reuse_connections")
# Connect to S3
s3 = boto.connect_s3()
@@ -207,11 +205,11 @@ def test_reuse_connections():
bucket.new_key(name).set_contents_from_string(str(i))
# Wait, clean the connection pool, and make sure it's empty.
- print " waiting for all connections to become stale"
+ print(" waiting for all connections to become stale")
time.sleep(s3._pool.STALE_DURATION + 1)
s3._pool.clean()
assert s3._pool.size() == 0
- print " pool is empty"
+ print(" pool is empty")
# Create a big object in S3.
big_name = str(uuid.uuid4())
diff --git a/tests/s3/test_resumable_downloads.py b/tests/s3/test_resumable_downloads.py
index 38be659b..9c1da501 100755..100644
--- a/tests/s3/test_resumable_downloads.py
+++ b/tests/s3/test_resumable_downloads.py
@@ -32,7 +32,6 @@ import random
import re
import shutil
import socket
-import StringIO
import sys
import tempfile
import time
@@ -45,7 +44,8 @@ from boto.s3.resumable_download_handler import ResumableDownloadHandler
from boto.exception import ResumableTransferDisposition
from boto.exception import ResumableDownloadException
from boto.exception import StorageResponseError
-from cb_test_harnass import CallbackTestHarnass
+import boto.compat as compat
+from .cb_test_harnass import CallbackTestHarnass
# We don't use the OAuth2 authentication plugin directly; importing it here
# ensures that it's loaded and available by default.
@@ -69,7 +69,7 @@ class ResumableDownloadTests(unittest.TestCase):
def resilient_close(key):
try:
key.close()
- except StorageResponseError, e:
+ except StorageResponseError as e:
pass
@classmethod
@@ -108,7 +108,7 @@ class ResumableDownloadTests(unittest.TestCase):
string_data = ''.join(buf)
uri = cls.src_bucket_uri.clone_replace_name(obj_name)
key = uri.new_key(validate=False)
- key.set_contents_from_file(StringIO.StringIO(string_data))
+ key.set_contents_from_file(compat.StringIO(string_data))
# Set debug on key's connection after creating data, so only the test
# runs will show HTTP output (if called passed debug>0).
key.bucket.connection.debug = debug
@@ -177,8 +177,8 @@ class ResumableDownloadTests(unittest.TestCase):
cls.src_bucket_uri.delete_bucket()
break
except StorageResponseError:
- print 'Test bucket (%s) not yet deleted, still trying' % (
- cls.src_bucket_uri.uri)
+ print('Test bucket (%s) not yet deleted, still trying' % (
+ cls.src_bucket_uri.uri))
time.sleep(2)
shutil.rmtree(cls.tmp_dir)
cls.tmp_dir = tempfile.mkdtemp(prefix=cls.tmpdir_prefix)
@@ -217,7 +217,7 @@ class ResumableDownloadTests(unittest.TestCase):
self.dst_fp, cb=harnass.call,
res_download_handler=res_download_handler)
self.fail('Did not get expected ResumableDownloadException')
- except ResumableDownloadException, e:
+ except ResumableDownloadException as e:
# We'll get a ResumableDownloadException at this point because
# of CallbackTestHarnass (above). Check that the tracker file was
# created correctly.
@@ -275,7 +275,7 @@ class ResumableDownloadTests(unittest.TestCase):
self.dst_fp, cb=harnass.call,
res_download_handler=res_download_handler)
self.fail('Did not get expected OSError')
- except OSError, e:
+ except OSError as e:
# Ensure the error was re-raised.
self.assertEqual(e.errno, 13)
@@ -328,7 +328,7 @@ class ResumableDownloadTests(unittest.TestCase):
self.dst_fp, cb=harnass.call,
res_download_handler=res_download_handler)
self.fail('Did not get expected ResumableDownloadException')
- except ResumableDownloadException, e:
+ except ResumableDownloadException as e:
self.assertEqual(e.disposition,
ResumableTransferDisposition.ABORT_CUR_PROCESS)
# Ensure a tracker file survived.
@@ -433,7 +433,7 @@ class ResumableDownloadTests(unittest.TestCase):
os.chmod(self.tmp_dir, 0)
res_download_handler = ResumableDownloadHandler(
tracker_file_name=self.tracker_file_name)
- except ResumableDownloadException, e:
+ except ResumableDownloadException as e:
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
self.assertNotEqual(
e.message.find('Couldn\'t write URI tracker file'), -1)
@@ -465,11 +465,11 @@ if __name__ == '__main__':
# don't assume the user has Python 2.7 (which supports classmethods
# that do it, with camelCase versions of these names).
try:
- print 'Setting up %s...' % test_class.get_suite_description()
+ print('Setting up %s...' % test_class.get_suite_description())
test_class.set_up_class(debug)
- print 'Running %s...' % test_class.get_suite_description()
+ print('Running %s...' % test_class.get_suite_description())
unittest.TextTestRunner(verbosity=2).run(suite)
finally:
- print 'Cleaning up after %s...' % test_class.get_suite_description()
+ print('Cleaning up after %s...' % test_class.get_suite_description())
test_class.tear_down_class()
- print ''
+ print('')
diff --git a/tests/s3/test_resumable_uploads.py b/tests/s3/test_resumable_uploads.py
index 7dabfe08..7cd9c879 100755..100644
--- a/tests/s3/test_resumable_uploads.py
+++ b/tests/s3/test_resumable_uploads.py
@@ -32,7 +32,6 @@ import random
import re
import shutil
import socket
-import StringIO
import sys
import tempfile
import time
@@ -45,7 +44,8 @@ from boto.exception import InvalidUriError
from boto.exception import ResumableTransferDisposition
from boto.exception import ResumableUploadException
from boto.exception import StorageResponseError
-from cb_test_harnass import CallbackTestHarnass
+import boto.compat as compat
+from .cb_test_harnass import CallbackTestHarnass
# We don't use the OAuth2 authentication plugin directly; importing it here
# ensures that it's loaded and available by default.
@@ -100,7 +100,7 @@ class ResumableUploadTests(unittest.TestCase):
for i in range(size):
buf.append(str(random.randint(0, 9)))
file_as_string = ''.join(buf)
- return (file_as_string, StringIO.StringIO(file_as_string))
+ return (file_as_string, compat.StringIO(file_as_string))
@classmethod
def get_dst_bucket_uri(cls, debug):
@@ -192,8 +192,8 @@ class ResumableUploadTests(unittest.TestCase):
cls.dst_bucket_uri.delete_bucket()
break
except StorageResponseError:
- print 'Test bucket (%s) not yet deleted, still trying' % (
- cls.dst_bucket_uri.uri)
+ print('Test bucket (%s) not yet deleted, still trying' % (
+ cls.dst_bucket_uri.uri))
time.sleep(2)
shutil.rmtree(cls.tmp_dir)
cls.tmp_dir = tempfile.mkdtemp(prefix=cls.tmpdir_prefix)
@@ -240,7 +240,7 @@ class ResumableUploadTests(unittest.TestCase):
self.small_src_file, cb=harnass.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
# We'll get a ResumableUploadException at this point because
# of CallbackTestHarnass (above). Check that the tracker file was
# created correctly.
@@ -299,7 +299,7 @@ class ResumableUploadTests(unittest.TestCase):
self.small_src_file, cb=harnass.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected OSError')
- except OSError, e:
+ except OSError as e:
# Ensure the error was re-raised.
self.assertEqual(e.errno, 13)
@@ -353,7 +353,7 @@ class ResumableUploadTests(unittest.TestCase):
self.larger_src_file, cb=harnass.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
self.assertEqual(e.disposition,
ResumableTransferDisposition.ABORT_CUR_PROCESS)
# Ensure a tracker file survived.
@@ -449,7 +449,7 @@ class ResumableUploadTests(unittest.TestCase):
self.larger_src_file, cb=harnass.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
# First abort (from harnass-forced failure) should be
# ABORT_CUR_PROCESS.
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT_CUR_PROCESS)
@@ -465,7 +465,7 @@ class ResumableUploadTests(unittest.TestCase):
self.dst_key.set_contents_from_file(
self.largest_src_file, res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
# This abort should be a hard abort (file size changing during
# transfer).
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
@@ -487,7 +487,7 @@ class ResumableUploadTests(unittest.TestCase):
test_file, cb=harnass.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
self.assertNotEqual(
e.message.find('File changed during upload'), -1)
@@ -511,7 +511,7 @@ class ResumableUploadTests(unittest.TestCase):
test_file, cb=harnass.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
# Ensure the file size didn't change.
test_file.seek(0, os.SEEK_END)
@@ -522,7 +522,7 @@ class ResumableUploadTests(unittest.TestCase):
try:
self.dst_key_uri.get_key()
self.fail('Did not get expected InvalidUriError')
- except InvalidUriError, e:
+ except InvalidUriError as e:
pass
def test_upload_with_content_length_header_set(self):
@@ -538,7 +538,7 @@ class ResumableUploadTests(unittest.TestCase):
self.small_src_file, res_upload_handler=res_upload_handler,
headers={'Content-Length' : self.small_src_file_size})
self.fail('Did not get expected ResumableUploadException')
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
self.assertNotEqual(
e.message.find('Attempt to specify Content-Length header'), -1)
@@ -585,7 +585,7 @@ class ResumableUploadTests(unittest.TestCase):
os.chmod(self.tmp_dir, 0)
res_upload_handler = ResumableUploadHandler(
tracker_file_name=self.tracker_file_name)
- except ResumableUploadException, e:
+ except ResumableUploadException as e:
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
self.assertNotEqual(
e.message.find('Couldn\'t write URI tracker file'), -1)
@@ -613,11 +613,11 @@ if __name__ == '__main__':
# don't assume the user has Python 2.7 (which supports classmethods
# that do it, with camelCase versions of these names).
try:
- print 'Setting up %s...' % test_class.get_suite_description()
+ print('Setting up %s...' % test_class.get_suite_description())
test_class.set_up_class(debug)
- print 'Running %s...' % test_class.get_suite_description()
+ print('Running %s...' % test_class.get_suite_description())
unittest.TextTestRunner(verbosity=2).run(suite)
finally:
- print 'Cleaning up after %s...' % test_class.get_suite_description()
+ print('Cleaning up after %s...' % test_class.get_suite_description())
test_class.tear_down_class()
- print ''
+ print('')
diff --git a/tests/s3/test_versioning.py b/tests/s3/test_versioning.py
index 2d569af9..8fb63c04 100644
--- a/tests/s3/test_versioning.py
+++ b/tests/s3/test_versioning.py
@@ -46,7 +46,7 @@ class S3VersionTest (unittest.TestCase):
def test_1_versions(self):
# check versioning off
d = self.bucket.get_versioning_status()
- self.assertFalse(d.has_key('Versioning'))
+ self.assertFalse('Versioning' in d)
# enable versioning
self.bucket.configure_versioning(versioning=True)
@@ -124,7 +124,7 @@ class S3VersionTest (unittest.TestCase):
kv1.set_contents_from_string("v1")
# read list which should contain latest v1
- listed_kv1 = iter(self.bucket.get_all_versions()).next()
+ listed_kv1 = next(iter(self.bucket.get_all_versions()))
self.assertEqual(listed_kv1.name, key_name)
self.assertEqual(listed_kv1.version_id, kv1.version_id)
self.assertEqual(listed_kv1.is_latest, True)
@@ -135,8 +135,8 @@ class S3VersionTest (unittest.TestCase):
# read 2 versions, confirm v2 is latest
i = iter(self.bucket.get_all_versions())
- listed_kv2 = i.next()
- listed_kv1 = i.next()
+ listed_kv2 = next(i)
+ listed_kv1 = next(i)
self.assertEqual(listed_kv2.version_id, kv2.version_id)
self.assertEqual(listed_kv1.version_id, kv1.version_id)
self.assertEqual(listed_kv2.is_latest, True)
@@ -145,9 +145,9 @@ class S3VersionTest (unittest.TestCase):
# delete key, which creates a delete marker as latest
self.bucket.delete_key(key_name)
i = iter(self.bucket.get_all_versions())
- listed_kv3 = i.next()
- listed_kv2 = i.next()
- listed_kv1 = i.next()
+ listed_kv3 = next(i)
+ listed_kv2 = next(i)
+ listed_kv1 = next(i)
self.assertNotEqual(listed_kv3.version_id, None)
self.assertEqual(listed_kv2.version_id, kv2.version_id)
self.assertEqual(listed_kv1.version_id, kv1.version_id)
diff --git a/tests/sdb/test_connection.py b/tests/sdb/test_connection.py
index a834a9df..76ea773a 100644
--- a/tests/sdb/test_connection.py
+++ b/tests/sdb/test_connection.py
@@ -33,7 +33,7 @@ from boto.exception import SDBResponseError
class SDBConnectionTest (unittest.TestCase):
def test_1_basic(self):
- print '--- running SDBConnection tests ---'
+ print('--- running SDBConnection tests ---')
c = SDBConnection()
rs = c.get_all_domains()
num_domains = len(rs)
@@ -61,7 +61,7 @@ class SDBConnectionTest (unittest.TestCase):
# try to get the attributes and see if they match
item = domain.get_attributes(item_1, consistent_read=True)
- assert len(item.keys()) == len(attrs_1.keys())
+ assert len(list(item.keys())) == len(list(attrs_1.keys()))
assert item['name1'] == attrs_1['name1']
assert item['name2'] == attrs_1['name2']
@@ -114,5 +114,5 @@ class SDBConnectionTest (unittest.TestCase):
stat = c.delete_domain(domain)
assert stat
- print '--- tests completed ---'
+ print('--- tests completed ---')
diff --git a/tests/sqs/test_connection.py b/tests/sqs/test_connection.py
index 6996a54a..e1f31d79 100644
--- a/tests/sqs/test_connection.py
+++ b/tests/sqs/test_connection.py
@@ -34,7 +34,7 @@ from boto.exception import SQSError
class SQSConnectionTest (unittest.TestCase):
def test_1_basic(self):
- print '--- running SQSConnection tests ---'
+ print('--- running SQSConnection tests ---')
c = SQSConnection()
rs = c.get_all_queues()
num_queues = 0
@@ -66,14 +66,14 @@ class SQSConnectionTest (unittest.TestCase):
# now try to get queue attributes
a = q.get_attributes()
- assert a.has_key('ApproximateNumberOfMessages')
- assert a.has_key('VisibilityTimeout')
+ assert 'ApproximateNumberOfMessages' in a
+ assert 'VisibilityTimeout' in a
a = q.get_attributes('ApproximateNumberOfMessages')
- assert a.has_key('ApproximateNumberOfMessages')
- assert not a.has_key('VisibilityTimeout')
+ assert 'ApproximateNumberOfMessages' in a
+ assert 'VisibilityTimeout' not in a
a = q.get_attributes('VisibilityTimeout')
- assert not a.has_key('ApproximateNumberOfMessages')
- assert a.has_key('VisibilityTimeout')
+ assert 'ApproximateNumberOfMessages' not in a
+ assert 'VisibilityTimeout' in a
# now change the visibility timeout
timeout = 45
@@ -132,5 +132,5 @@ class SQSConnectionTest (unittest.TestCase):
# now delete that queue and messages
c.delete_queue(queue, True)
- print '--- tests completed ---'
+ print('--- tests completed ---')
diff --git a/tests/sts/test_session_token.py b/tests/sts/test_session_token.py
index 630ebf1c..8628f4e4 100644
--- a/tests/sts/test_session_token.py
+++ b/tests/sts/test_session_token.py
@@ -34,7 +34,7 @@ from boto.s3.connection import S3Connection
class SessionTokenTest (unittest.TestCase):
def test_session_token(self):
- print '--- running Session Token tests ---'
+ print('--- running Session Token tests ---')
c = STSConnection()
# Create a session token
@@ -61,5 +61,5 @@ class SessionTokenTest (unittest.TestCase):
security_token=token.session_token)
buckets = s3.get_all_buckets()
- print '--- tests completed ---'
+ print('--- tests completed ---')
diff --git a/tests/test.py b/tests/test.py
index e9af4404..619c7bf6 100755..100644
--- a/tests/test.py
+++ b/tests/test.py
@@ -51,9 +51,9 @@ from dynamodb.test_layer2 import DynamoDBLayer2Test
from sts.test_session_token import SessionTokenTest
def usage():
- print "test.py [-t testsuite] [-v verbosity]"
- print " -t run specific testsuite (s3|ssl|s3mfa|gs|sqs|ec2|sdb|dynamodb|dynamodbL1|dynamodbL2|sts|all)"
- print " -v verbosity (0|1|2)"
+ print("test.py [-t testsuite] [-v verbosity]")
+ print(" -t run specific testsuite (s3|ssl|s3mfa|gs|sqs|ec2|sdb|dynamodb|dynamodbL1|dynamodbL2|sts|all)")
+ print(" -v verbosity (0|1|2)")
def main():
try: