diff options
author | Garrett Holmstrom <gholms@fedoraproject.org> | 2012-03-11 08:30:50 -0700 |
---|---|---|
committer | Mitch Garnaat <mitch@garnaat.com> | 2012-03-27 10:26:27 -0700 |
commit | d9b1e1e3601a1a589a972b948457d48d2c063ce4 (patch) | |
tree | e0a745a993d4d3627d44feff79b6eb3459b27f64 | |
parent | 63530ee3c20ac94e8664be420c39bc1d1929ca7f (diff) | |
download | boto-d9b1e1e3601a1a589a972b948457d48d2c063ce4.tar.gz |
Fix test.py imports
Result of first pass of manual merge from branch_master_python3k.
Actually passes SDB tests on both 2.x and 3.x.
Basic S3 operations are now working.
Simple dynamodb requests now working in both versions.
80 files changed, 608 insertions, 521 deletions
diff --git a/boto/__init__.py b/boto/__init__.py index 15697547..6818635d 100644 --- a/boto/__init__.py +++ b/boto/__init__.py @@ -29,8 +29,8 @@ import boto.plugin import os, re, sys import logging import logging.config -import urlparse from boto.exception import InvalidUriError +import boto.compat as compat __version__ = '2.3.0' Version = __version__ # for backware compatibility @@ -391,7 +391,7 @@ def connect_ec2_endpoint(url, aws_access_key_id=None, aws_secret_access_key=None """ from boto.ec2.regioninfo import RegionInfo - purl = urlparse.urlparse(url) + purl = compat.urlparse(url) kwargs['port'] = purl.port kwargs['host'] = purl.hostname kwargs['path'] = purl.path diff --git a/boto/auth.py b/boto/auth.py index b8e00ef0..e5b3b40c 100644 --- a/boto/auth.py +++ b/boto/auth.py @@ -1,5 +1,6 @@ # Copyright 2010 Google Inc. -# Copyright (c) 2011 Mitch Garnaat http://garnaat.org/ +# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/ +# Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved. # Copyright (c) 2011, Eucalyptus Systems, Inc. # # Permission is hereby granted, free of charge, to any person obtaining a @@ -34,7 +35,7 @@ import boto.plugin import boto.utils import hmac import sys -import urllib +import boto.compat as compat from email.utils import formatdate from boto.auth_handler import AuthHandler @@ -79,10 +80,10 @@ class HmacKeys(object): def update_provider(self, provider): self._provider = provider - self._hmac = hmac.new(self._provider.secret_key, digestmod=sha) + sk = self._provider.secret_key.encode('utf-8') + self._hmac = hmac.new(sk, digestmod=sha) if sha256: - self._hmac_256 = hmac.new(self._provider.secret_key, - digestmod=sha256) + self._hmac_256 = hmac.new(sk, digestmod=sha256) else: self._hmac_256 = None @@ -97,8 +98,10 @@ class HmacKeys(object): hmac = self._hmac_256.copy() else: hmac = self._hmac.copy() + if not isinstance(string_to_sign, compat.binary_type): + string_to_sign = string_to_sign.encode('utf-8') hmac.update(string_to_sign) - return base64.encodestring(hmac.digest()).strip() + return base64.b64encode(hmac.digest()).strip().decode('utf-8') class AnonAuthHandler(AuthHandler, HmacKeys): """ @@ -252,6 +255,7 @@ class HmacAuthV3HTTPHandler(AuthHandler, HmacKeys): req.headers['X-Amz-Security-Token'] = self._provider.security_token string_to_sign, headers_to_sign = self.string_to_sign(req) boto.log.debug('StringToSign:\n%s' % string_to_sign) + string_to_sign = string_to_sign.encode('utf-8') hash_value = sha256(string_to_sign).digest() b64_hmac = self.sign_string(hash_value) s = "AWS3 AWSAccessKeyId=%s," % self._provider.access_key @@ -279,7 +283,7 @@ class QuerySignatureHelper(HmacKeys): boto.log.debug('query_string: %s Signature: %s' % (qs, signature)) if http_request.method == 'POST': headers['Content-Type'] = 'application/x-www-form-urlencoded; charset=UTF-8' - http_request.body = qs + '&Signature=' + urllib.quote_plus(signature) + http_request.body = qs + '&Signature=' + compat.quote_plus(signature) http_request.headers['Content-Length'] = str(len(http_request.body)) else: http_request.body = '' @@ -287,7 +291,7 @@ class QuerySignatureHelper(HmacKeys): # already be there, we need to get rid of that and rebuild it http_request.path = http_request.path.split('?')[0] http_request.path = (http_request.path + '?' + qs + - '&Signature=' + urllib.quote_plus(signature)) + '&Signature=' + compat.quote_plus(signature)) class QuerySignatureV0AuthHandler(QuerySignatureHelper, AuthHandler): """Provides Signature V0 Signing""" @@ -299,13 +303,13 @@ class QuerySignatureV0AuthHandler(QuerySignatureHelper, AuthHandler): boto.log.debug('using _calc_signature_0') hmac = self._hmac.copy() s = params['Action'] + params['Timestamp'] + s = s.encode('utf-8') hmac.update(s) - keys = params.keys() - keys.sort(cmp = lambda x, y: cmp(x.lower(), y.lower())) + keys = sorted(params, key = str.lower) pairs = [] for key in keys: val = boto.utils.get_utf8_value(params[key]) - pairs.append(key + '=' + urllib.quote(val)) + pairs.append(key + '=' + compat.quote(val)) qs = '&'.join(pairs) return (qs, base64.b64encode(hmac.digest())) @@ -320,14 +324,15 @@ class QuerySignatureV1AuthHandler(QuerySignatureHelper, AuthHandler): def _calc_signature(self, params, *args): boto.log.debug('using _calc_signature_1') hmac = self._hmac.copy() - keys = params.keys() - keys.sort(cmp = lambda x, y: cmp(x.lower(), y.lower())) + keys = sorted(params, key = str.lower) pairs = [] for key in keys: + key = key.encode('utf-8') hmac.update(key) val = boto.utils.get_utf8_value(params[key]) + val = val.encode('utf-8') hmac.update(val) - pairs.append(key + '=' + urllib.quote(val)) + pairs.append(key + '=' + compat.quote(val)) qs = '&'.join(pairs) return (qs, base64.b64encode(hmac.digest())) @@ -349,17 +354,17 @@ class QuerySignatureV2AuthHandler(QuerySignatureHelper, AuthHandler): params['SignatureMethod'] = 'HmacSHA1' if self._provider.security_token: params['SecurityToken'] = self._provider.security_token - keys = params.keys() - keys.sort() + keys = sorted(params) pairs = [] for key in keys: val = boto.utils.get_utf8_value(params[key]) - pairs.append(urllib.quote(key, safe='') + '=' + - urllib.quote(val, safe='-_~')) + pairs.append(compat.quote(key, safe='') + '=' + + compat.quote(val, safe='-_~')) qs = '&'.join(pairs) boto.log.debug('query string: %s' % qs) string_to_sign += qs boto.log.debug('string_to_sign: %s' % string_to_sign) + string_to_sign = string_to_sign.encode('utf-8') hmac.update(string_to_sign) b64 = base64.b64encode(hmac.digest()) boto.log.debug('len(b64)=%d' % len(b64)) diff --git a/boto/compat.py b/boto/compat.py new file mode 100644 index 00000000..06ade3b4 --- /dev/null +++ b/boto/compat.py @@ -0,0 +1,122 @@ +# Copyright (c) 2012 Mitch Garnaat http://garnaat.org/ +# Copyright (c) 2012 Amazon.com, Inc. or its affiliates. +# All Rights Reserved +# +# Permission is hereby granted, free of charge, to any person obtaining a +# copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, dis- +# tribute, sublicense, and/or sell copies of the Software, and to permit +# persons to whom the Software is furnished to do so, subject to the fol- +# lowing conditions: +# +# The above copyright notice and this permission notice shall be included +# in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- +# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +import sys +import os +import types + +# True if we are running on Python 3. +PY3 = sys.version_info[0] == 3 + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + +try: + import urllib.parse as urlparse +except ImportError: + import urlparse + +try: + from urllib.parse import quote, quote_plus, unquote +except ImportError: + from urllib import quote, quote_plus, unquote + +try: + from urllib.request import urlopen, Request, build_opener, install_opener +except ImportError: + from urllib2 import urlopen, Request + +try: + from urllib.request import HTTPPasswordMgrWithDefaultRealm +except ImportError: + from urllib2 import HTTPPasswordMgrWithDefaultRealm + +try: + from urllib.request import HTTPBasicAuthHandler, HTTPError +except ImportError: + from urllib2 import HTTPBasicAuthHandler + +try: + from hashlib import md5 +except ImportError: + from md5 import md5 + +try: + import http.client as httplib +except ImportError: + import httplib + +try: + import configparser +except ImportError: + import ConfigParser as configparser + +if PY3: + import io + StringIO = io.StringIO +else: + import StringIO + StringIO = StringIO.StringIO + +if PY3: + raw_input = input +else: + raw_input = raw_input + +try: + # Python 3.x + from email.mime.multipart import MIMEMultipart + from email.mime.base import MIMEBase + from email.mime.text import MIMEText + from email.utils import formatdate + import email.encoders as Encoders + unicode = str +except ImportError: + # Python 2.x + from email.MIMEMultipart import MIMEMultipart + from email.MIMEBase import MIMEBase + from email.MIMEText import MIMEText + from email.utils import formatdate + from email import Encoders + + +def on_appengine(): + return all(key in os.environ for key in ('USER_IS_ADMIN', + 'CURRENT_VERSION_ID', + 'APPLICATION_ID')) + + +def httplib_ssl_hack(port): + return ((on_appengine and sys.version[:3] == '2.5') or + sys.version.startswith('3') or + sys.version[:3] in ('2.6', '2.7')) and port == 443 diff --git a/boto/connection.py b/boto/connection.py index 860c9bbb..ce754251 100644 --- a/boto/connection.py +++ b/boto/connection.py @@ -45,19 +45,17 @@ Handles basic connections to AWS from __future__ import with_statement import base64 import errno -import httplib import os -import Queue import random import re import socket import sys import time -import urllib, urlparse import xml.sax from . import auth from . import auth_handler +from . import compat import boto import boto.utils import boto.handler @@ -83,9 +81,6 @@ try: except ImportError: import dummy_threading as threading -ON_APP_ENGINE = all(key in os.environ for key in ( - 'USER_IS_ADMIN', 'CURRENT_VERSION_ID', 'APPLICATION_ID')) - PORTS_BY_SECURITY = { True: 443, False: 80 } DEFAULT_CA_CERTS_FILE = os.path.join( @@ -168,7 +163,7 @@ class HostConnectionPool(object): This is ugly, reading a private instance variable, but the state we care about isn't available in any public methods. """ - if ON_APP_ENGINE: + if compat.on_appengine(): # Google App Engine implementation of HTTPConnection doesn't contain # _HTTPConnection__response attribute. Moreover, it's not possible # to determine if given connection is ready. Reusing connections @@ -350,8 +345,9 @@ class HTTPRequest(object): def authorize(self, connection, **kwargs): for key in self.headers: val = self.headers[key] - if isinstance(val, unicode): - self.headers[key] = urllib.quote_plus(val.encode('utf-8')) + if isinstance(val, compat.text_type): + self.headers[key] = compat.quote_plus(val.encode('utf-8'), + safe='/') connection._auth_handler.add_auth(self, **kwargs) @@ -429,7 +425,7 @@ class AWSAuthConnection(object): 'Boto', 'ca_certificates_file', DEFAULT_CA_CERTS_FILE) self.handle_proxy(proxy, proxy_port, proxy_user, proxy_pass) # define exceptions from httplib that we want to catch and retry - self.http_exceptions = (httplib.HTTPException, socket.error, + self.http_exceptions = (compat.httplib.HTTPException, socket.error, socket.gaierror) # define subclasses of the above that are not retryable. self.http_unretryable_exceptions = [] @@ -450,7 +446,7 @@ class AWSAuthConnection(object): self.protocol = 'http' self.host = host self.path = path - if isinstance(debug, (int, long)): + if isinstance(debug, compat.integer_types): self.debug = debug else: self.debug = config.getint('Boto', 'debug', 0) @@ -547,8 +543,7 @@ class AWSAuthConnection(object): # did the same when calculating the V2 signature. In 2.6 # (and higher!) # it no longer does that. Hence, this kludge. - if ((ON_APP_ENGINE and sys.version[:3] == '2.5') or - sys.version[:3] in ('2.6', '2.7')) and port == 443: + if compat.httplib_ssl_hack(port): signature_host = self.host else: signature_host = '%s:%d' % (self.host, port) @@ -583,8 +578,7 @@ class AWSAuthConnection(object): self.proxy_pass = config.get_value('Boto', 'proxy_pass', None) if not self.proxy_port and self.proxy: - print "http_proxy environment variable does not specify " \ - "a port, using default" + boto.log.warning('http_proxy env variable does not specify a port') self.proxy_port = self.port self.use_proxy = (self.proxy != None) @@ -613,12 +607,12 @@ class AWSAuthConnection(object): host, ca_certs=self.ca_certificates_file, **self.http_connection_kwargs) else: - connection = httplib.HTTPSConnection(host, + connection = compat.httplib.HTTPSConnection(host, **self.http_connection_kwargs) else: boto.log.debug('establishing HTTP connection: kwargs=%s' % self.http_connection_kwargs) - connection = httplib.HTTPConnection(host, + connection = compat.httplib.HTTPConnection(host, **self.http_connection_kwargs) if self.debug > 1: connection.set_debuglevel(self.debug) @@ -646,7 +640,7 @@ class AWSAuthConnection(object): for k, v in self.get_proxy_auth_header().items(): sock.sendall("%s: %s\r\n" % (k, v)) sock.sendall("\r\n") - resp = httplib.HTTPResponse(sock, strict=True, debuglevel=self.debug) + resp = compat.httplib.HTTPResponse(sock, strict=True, debuglevel=self.debug) resp.begin() if resp.status != 200: @@ -659,7 +653,7 @@ class AWSAuthConnection(object): # We can safely close the response, it duped the original socket resp.close() - h = httplib.HTTPConnection(host) + h = compat.httplib.HTTPConnection(host) if self.https_validate_certificates and HAVE_HTTPS_CONNECTION: boto.log.debug("wrapping ssl socket for proxied connection; " @@ -678,11 +672,11 @@ class AWSAuthConnection(object): hostname, cert, 'hostname mismatch') else: # Fallback for old Python without ssl.wrap_socket - if hasattr(httplib, 'ssl'): - sslSock = httplib.ssl.SSLSocket(sock) + if hasattr(compat.httplib, 'ssl'): + sslSock = compat.httplib.ssl.SSLSocket(sock) else: sslSock = socket.ssl(sock, None, None) - sslSock = httplib.FakeSocket(sock, sslSock) + sslSock = compat.httplib.FakeSocket(sock, sslSock) # This is a bit unclean h.sock = sslSock @@ -693,7 +687,7 @@ class AWSAuthConnection(object): return path def get_proxy_auth_header(self): - auth = base64.encodestring(self.proxy_user + ':' + self.proxy_pass) + auth = base64.encodebytes(self.proxy_user + ':' + self.proxy_pass) return {'Proxy-Authorization': 'Basic %s' % auth} def _mexe(self, request, sender=None, override_num_retries=None, @@ -762,7 +756,7 @@ class AWSAuthConnection(object): return response else: scheme, request.host, request.path, \ - params, query, fragment = urlparse.urlparse(location) + params, query, fragment = compat.urlparse.urlparse(location) if query: request.path += '?' + query msg = 'Redirecting: %s' % scheme + '://' diff --git a/boto/dynamodb/layer1.py b/boto/dynamodb/layer1.py index 412ec0c7..b3da6e52 100644 --- a/boto/dynamodb/layer1.py +++ b/boto/dynamodb/layer1.py @@ -137,6 +137,7 @@ class Layer1(AWSAuthConnection): self.instrumentation['times'].append(time.time() - start) self.instrumentation['ids'].append(self.request_id) response_body = response.read() + response_body = response_body.decode('utf-8') boto.log.debug(response_body) return json.loads(response_body, object_hook=object_hook) diff --git a/boto/dynamodb/types.py b/boto/dynamodb/types.py index 3eed2c91..51eb25a1 100644 --- a/boto/dynamodb/types.py +++ b/boto/dynamodb/types.py @@ -24,14 +24,15 @@ Some utility functions to deal with mapping Amazon DynamoDB types to Python types and vice-versa. """ +import boto.compat as compat def is_num(n): - return isinstance(n, (int, long, float, bool)) + return isinstance(n, (compat.integer_types, float, bool)) def is_str(n): - return isinstance(n, basestring) + return isinstance(n, compat.string_types) def convert_num(s): diff --git a/boto/ec2/autoscale/__init__.py b/boto/ec2/autoscale/__init__.py index 4d404846..dd31e285 100644 --- a/boto/ec2/autoscale/__init__.py +++ b/boto/ec2/autoscale/__init__.py @@ -131,9 +131,9 @@ class AutoScaleConnection(AWSQueryConnection): # different from EC2 list params for i in xrange(1, len(items)+1): if isinstance(items[i-1], dict): - for k, v in items[i-1].iteritems(): + for k, v in items[i-1].items(): if isinstance(v, dict): - for kk, vv in v.iteritems(): + for kk, vv in v.items(): params['%s.member.%d.%s.%s' % (label, i, k, kk)] = vv else: params['%s.member.%d.%s' % (label, i, k)] = v diff --git a/boto/ec2/buyreservation.py b/boto/ec2/buyreservation.py index fcd8a77c..b84efbc2 100644 --- a/boto/ec2/buyreservation.py +++ b/boto/ec2/buyreservation.py @@ -66,19 +66,19 @@ if __name__ == "__main__": obj.get(params) offerings = obj.ec2.get_all_reserved_instances_offerings(instance_type=params['instance_type'], availability_zone=params['zone'].name) - print '\nThe following Reserved Instances Offerings are available:\n' + print('\nThe following Reserved Instances Offerings are available:\n') for offering in offerings: offering.describe() prop = StringProperty(name='offering', verbose_name='Offering', choices=offerings) offering = propget.get(prop) - print '\nYou have chosen this offering:' + print('\nYou have chosen this offering:') offering.describe() unit_price = float(offering.fixed_price) total_price = unit_price * params['quantity'] - print '!!! You are about to purchase %d of these offerings for a total of $%.2f !!!' % (params['quantity'], total_price) + print('!!! You are about to purchase %d of these offerings for a total of $%.2f !!!' % (params['quantity'], total_price)) answer = raw_input('Are you sure you want to do this? If so, enter YES: ') if answer.strip().lower() == 'yes': offering.purchase(params['quantity']) else: - print 'Purchase cancelled' + print('Purchase cancelled') diff --git a/boto/ec2/cloudwatch/alarm.py b/boto/ec2/cloudwatch/alarm.py index 539ad950..5c1b8196 100644 --- a/boto/ec2/cloudwatch/alarm.py +++ b/boto/ec2/cloudwatch/alarm.py @@ -61,7 +61,7 @@ class MetricAlarm(object): '<' : 'LessThanThreshold', '<=' : 'LessThanOrEqualToThreshold', } - _rev_cmp_map = dict((v, k) for (k, v) in _cmp_map.iteritems()) + _rev_cmp_map = dict((v, k) for (k, v) in _cmp_map.items()) def __init__(self, connection=None, name=None, metric=None, namespace=None, statistic=None, comparison=None, diff --git a/boto/ec2/connection.py b/boto/ec2/connection.py index 12a26114..de245c8f 100644 --- a/boto/ec2/connection.py +++ b/boto/ec2/connection.py @@ -2757,8 +2757,7 @@ class EC2Connection(AWSQueryConnection): # Tag methods def build_tag_param_list(self, params, tags): - keys = tags.keys() - keys.sort() + keys = sorted(tags) i = 1 for key in keys: value = tags[key] diff --git a/boto/ec2/elb/loadbalancer.py b/boto/ec2/elb/loadbalancer.py index fec25230..e504976a 100644 --- a/boto/ec2/elb/loadbalancer.py +++ b/boto/ec2/elb/loadbalancer.py @@ -26,6 +26,7 @@ from boto.ec2.elb.policies import Policies from boto.ec2.elb.securitygroup import SecurityGroup from boto.ec2.instanceinfo import InstanceInfo from boto.resultset import ResultSet +import boto.compat as compat class LoadBalancer(object): """ @@ -137,7 +138,7 @@ class LoadBalancer(object): :param zones: The name of the zone(s) to add. """ - if isinstance(zones, str) or isinstance(zones, unicode): + if isinstance(zones, compat.string_types): zones = [zones] new_zones = self.connection.enable_availability_zones(self.name, zones) self.availability_zones = new_zones @@ -150,7 +151,7 @@ class LoadBalancer(object): :param zones: The name of the zone(s) to add. """ - if isinstance(zones, str) or isinstance(zones, unicode): + if isinstance(zones, compat.string_types): zones = [zones] new_zones = self.connection.disable_availability_zones(self.name, zones) self.availability_zones = new_zones @@ -165,7 +166,7 @@ class LoadBalancer(object): to add to this load balancer. """ - if isinstance(instances, str) or isinstance(instances, unicode): + if isinstance(instances, compat.string_types): instances = [instances] new_instances = self.connection.register_instances(self.name, instances) self.instances = new_instances @@ -179,7 +180,7 @@ class LoadBalancer(object): to remove from this load balancer. """ - if isinstance(instances, str) or isinstance(instances, unicode): + if isinstance(instances, string_types): instances = [instances] new_instances = self.connection.deregister_instances(self.name, instances) self.instances = new_instances @@ -259,7 +260,7 @@ class LoadBalancer(object): :param subnets: The name of the subnet(s) to add. """ - if isinstance(subnets, str) or isinstance(subnets, unicode): + if isinstance(subnets, compat.string_types): subnets = [subnets] new_subnets = self.connection.attach_lb_to_subnets(self.name, subnets) self.subnets = new_subnets @@ -272,7 +273,7 @@ class LoadBalancer(object): :param subnets: The name of the subnet(s) to detach. """ - if isinstance(subnets, str) or isinstance(subnets, unicode): + if isinstance(subnets, compat.string_types): subnets = [subnets] new_subnets = self.connection.detach_lb_to_subnets(self.name, subnets) self.subnets = new_subnets @@ -287,8 +288,7 @@ class LoadBalancer(object): :param security_groups: The name of the security group(s) to add. """ - if isinstance(security_groups, str) or \ - isinstance(security_groups, unicode): + if isinstance(security_groups, compat.string_types): security_groups = [security_groups] new_sgs = self.connection.apply_security_groups_to_lb( self.name, security_groups) diff --git a/boto/ec2/reservedinstance.py b/boto/ec2/reservedinstance.py index 1d35c1df..d31164e8 100644 --- a/boto/ec2/reservedinstance.py +++ b/boto/ec2/reservedinstance.py @@ -59,15 +59,6 @@ class ReservedInstancesOffering(EC2Object): else: setattr(self, name, value) - def describe(self): - print 'ID=%s' % self.id - print '\tInstance Type=%s' % self.instance_type - print '\tZone=%s' % self.availability_zone - print '\tDuration=%s' % self.duration - print '\tFixed Price=%s' % self.fixed_price - print '\tUsage Price=%s' % self.usage_price - print '\tDescription=%s' % self.description - def purchase(self, instance_count=1): return self.connection.purchase_reserved_instance_offering(self.id, instance_count) diff --git a/boto/ecs/item.py b/boto/ecs/item.py index 29588b86..ebaa653f 100644 --- a/boto/ecs/item.py +++ b/boto/ecs/item.py @@ -22,7 +22,7 @@ import xml.sax import cgi -from StringIO import StringIO +import boto.compat as compat class ResponseGroup(xml.sax.ContentHandler): """A Generic "Response Group", which can @@ -35,7 +35,7 @@ class ResponseGroup(xml.sax.ContentHandler): self._nodename = nodename self._nodepath = [] self._curobj = None - self._xml = StringIO() + self._xml = compat.StringIO() def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self.__dict__) diff --git a/boto/emr/connection.py b/boto/emr/connection.py index 7e6a437b..868037f0 100644 --- a/boto/emr/connection.py +++ b/boto/emr/connection.py @@ -307,7 +307,7 @@ class EmrConnection(AWSQueryConnection): # Instance group args (for spot instances or a heterogenous cluster) list_args = self._build_instance_group_list_args(instance_groups) instance_params = dict( - ('Instances.%s' % k, v) for k, v in list_args.iteritems() + ('Instances.%s' % k, v) for k, v in list_args.items() ) params.update(instance_params) @@ -336,7 +336,7 @@ class EmrConnection(AWSQueryConnection): params['AdditionalInfo'] = additional_info if api_params: - for key, value in api_params.iteritems(): + for key, value in api_params.items(): if value is None: params.pop(key, None) else: @@ -403,7 +403,7 @@ class EmrConnection(AWSQueryConnection): params = {} for i, bootstrap_action in enumerate(bootstrap_actions): - for key, value in bootstrap_action.iteritems(): + for key, value in bootstrap_action.items(): params['BootstrapActions.member.%s.%s' % (i + 1, key)] = value return params @@ -413,7 +413,7 @@ class EmrConnection(AWSQueryConnection): params = {} for i, step in enumerate(steps): - for key, value in step.iteritems(): + for key, value in step.items(): params['Steps.member.%s.%s' % (i+1, key)] = value return params @@ -480,6 +480,6 @@ class EmrConnection(AWSQueryConnection): params = {} for i, instance_group in enumerate(instance_groups): ig_dict = self._build_instance_group_args(instance_group) - for key, value in ig_dict.iteritems(): + for key, value in ig_dict.items(): params['InstanceGroups.member.%d.%s' % (i+1, key)] = value return params diff --git a/boto/file/key.py b/boto/file/key.py index d39c8c65..f15e99d1 100755 --- a/boto/file/key.py +++ b/boto/file/key.py @@ -22,8 +22,9 @@ # File representation of key, for use with "file://" URIs. -import os, shutil, StringIO +import os, shutil import sys +import boto.compat as compat class Key(object): @@ -151,7 +152,7 @@ class Key(object): :returns: The contents of the file as a string """ - fp = StringIO.StringIO() + fp = compat.StringIO() self.get_contents_to_file(fp) return fp.getvalue() diff --git a/boto/fps/connection.py b/boto/fps/connection.py index d2fcb58c..33597a1d 100644 --- a/boto/fps/connection.py +++ b/boto/fps/connection.py @@ -24,7 +24,6 @@ import urllib import xml.sax import uuid import boto -import boto.utils from boto import handler from boto.connection import AWSQueryConnection from boto.resultset import ResultSet @@ -150,7 +149,7 @@ class FPSConnection(AWSQueryConnection): params['callerReference'] = str(uuid.uuid4()) parts = '' - for k in sorted(params.keys()): + for k in sorted(params): parts += "&%s=%s" % (k, urllib.quote(params[k], '~')) canonical = '\n'.join(['GET', @@ -162,7 +161,7 @@ class FPSConnection(AWSQueryConnection): params["signature"] = signature urlsuffix = '' - for k in sorted(params.keys()): + for k in sorted(params): urlsuffix += "&%s=%s" % (k, urllib.quote(params[k], '~')) urlsuffix = urlsuffix[1:] # strip the first & @@ -195,7 +194,7 @@ class FPSConnection(AWSQueryConnection): params['callerReference'] = str(uuid.uuid4()) parts = '' - for k in sorted(params.keys()): + for k in sorted(params): parts += "&%s=%s" % (k, urllib.quote(params[k], '~')) canonical = '\n'.join(['GET', @@ -207,7 +206,7 @@ class FPSConnection(AWSQueryConnection): params["signature"] = signature urlsuffix = '' - for k in sorted(params.keys()): + for k in sorted(params): urlsuffix += "&%s=%s" % (k, urllib.quote(params[k], '~')) urlsuffix = urlsuffix[1:] # strip the first & diff --git a/boto/gs/key.py b/boto/gs/key.py index c2442514..44e4c08c 100644 --- a/boto/gs/key.py +++ b/boto/gs/key.py @@ -20,9 +20,9 @@ # IN THE SOFTWARE. import os -import StringIO from boto.exception import BotoClientError from boto.s3.key import Key as S3Key +import boto.compat as compat class Key(S3Key): @@ -348,9 +348,9 @@ class Key(S3Key): param, if present, will be used as the MD5 values of the file. Otherwise, the checksum will be computed. """ - if isinstance(s, unicode): + if isinstance(s, compat.text_type): s = s.encode("utf-8") - fp = StringIO.StringIO(s) + fp = compat.StringIO(s) r = self.set_contents_from_file(fp, headers, replace, cb, num_cb, policy, md5) fp.close() diff --git a/boto/gs/resumable_upload_handler.py b/boto/gs/resumable_upload_handler.py index 81ae9204..2e4d65a8 100644 --- a/boto/gs/resumable_upload_handler.py +++ b/boto/gs/resumable_upload_handler.py @@ -21,19 +21,18 @@ import cgi import errno -import httplib import os import random import re import socket import time -import urlparse import boto from boto import config from boto.connection import AWSAuthConnection from boto.exception import InvalidUriError from boto.exception import ResumableTransferDisposition from boto.exception import ResumableUploadException +import boto.compat as compat """ Handler for Google Cloud Storage resumable uploads. See @@ -55,7 +54,7 @@ save the state needed to allow retrying later, in a separate process class ResumableUploadHandler(object): BUFFER_SIZE = 8192 - RETRYABLE_EXCEPTIONS = (httplib.HTTPException, IOError, socket.error, + RETRYABLE_EXCEPTIONS = (compat.httplib.HTTPException, IOError, socket.error, socket.gaierror) # (start, end) response indicating server has nothing (upload protocol uses @@ -142,7 +141,7 @@ class ResumableUploadHandler(object): Raises InvalidUriError if URI is syntactically invalid. """ - parse_result = urlparse.urlparse(uri) + parse_result = compat.urlparse.urlparse(uri) if (parse_result.scheme.lower() not in ['http', 'https'] or not parse_result.netloc): raise InvalidUriError('Invalid tracker URI (%s)' % uri) @@ -233,7 +232,7 @@ class ResumableUploadHandler(object): 'Couldn\'t parse upload server state query response (%s)' % str(resp.getheaders()), ResumableTransferDisposition.START_OVER) if conn.debug >= 1: - print 'Server has: Range: %d - %d.' % (server_start, server_end) + print('Server has: Range: %d - %d.' % (server_start, server_end)) return (server_start, server_end) def _start_new_resumable_upload(self, key, headers=None): @@ -244,7 +243,7 @@ class ResumableUploadHandler(object): """ conn = key.bucket.connection if conn.debug >= 1: - print 'Starting new resumable upload.' + print('Starting new resumable upload.') self.server_has_bytes = 0 # Start a new resumable upload by sending a POST request with an @@ -393,10 +392,10 @@ class ResumableUploadHandler(object): self.server_has_bytes = server_start key=key if conn.debug >= 1: - print 'Resuming transfer.' + print('Resuming transfer.') except ResumableUploadException as e: if conn.debug >= 1: - print 'Unable to resume transfer (%s).' % e.message + print('Unable to resume transfer (%s).' % e.message) self._start_new_resumable_upload(key, headers) else: self._start_new_resumable_upload(key, headers) @@ -457,7 +456,7 @@ class ResumableUploadHandler(object): change some of the file and not realize they have inconsistent data. """ if key.bucket.connection.debug >= 1: - print 'Checking md5 against etag.' + print('Checking md5 against etag.') if key.md5 != etag.strip('"\''): # Call key.open_read() before attempting to delete the # (incorrect-content) key, so we perform that request on a @@ -532,7 +531,7 @@ class ResumableUploadHandler(object): self._remove_tracker_file() self._check_final_md5(key, etag) if debug >= 1: - print 'Resumable upload complete.' + print('Resumable upload complete.') return except self.RETRYABLE_EXCEPTIONS as e: if debug >= 1: @@ -581,7 +580,7 @@ class ResumableUploadHandler(object): # Use binary exponential backoff to desynchronize client requests sleep_time_secs = random.random() * (2**progress_less_iterations) if debug >= 1: - print ('Got retryable failure (%d progress-less in a row).\n' - 'Sleeping %3.1f seconds before re-trying' % - (progress_less_iterations, sleep_time_secs)) + print('Got retryable failure (%d progress-less in a row).\n' + 'Sleeping %3.1f seconds before re-trying' % + (progress_less_iterations, sleep_time_secs)) time.sleep(sleep_time_secs) diff --git a/boto/https_connection.py b/boto/https_connection.py index d7a3f3ac..a45c82ea 100644 --- a/boto/https_connection.py +++ b/boto/https_connection.py @@ -19,14 +19,14 @@ """Extensions to allow HTTPS requests with SSL certificate validation.""" -import httplib +import boto.compat as compat import re import socket import ssl import boto -class InvalidCertificateException(httplib.HTTPException): +class InvalidCertificateException(compat.httplib.HTTPException): """Raised when a certificate is provided with an invalid hostname.""" def __init__(self, host, cert, reason): @@ -36,7 +36,7 @@ class InvalidCertificateException(httplib.HTTPException): host: The hostname the connection was made to. cert: The SSL certificate (as a dictionary) the host returned. """ - httplib.HTTPException.__init__(self) + compat.httplib.HTTPException.__init__(self) self.host = host self.cert = cert self.reason = reason @@ -79,10 +79,10 @@ def ValidateCertificateHostname(cert, hostname): return False -class CertValidatingHTTPSConnection(httplib.HTTPConnection): +class CertValidatingHTTPSConnection(compat.httplib.HTTPConnection): """An HTTPConnection that connects over SSL and validates certificates.""" - default_port = httplib.HTTPS_PORT + default_port = compat.httplib.HTTPS_PORT def __init__(self, host, port=None, key_file=None, cert_file=None, ca_certs=None, strict=None, **kwargs): @@ -98,7 +98,7 @@ class CertValidatingHTTPSConnection(httplib.HTTPConnection): strict: When true, causes BadStatusLine to be raised if the status line can't be parsed as a valid HTTP/1.0 or 1.1 status line. """ - httplib.HTTPConnection.__init__(self, host, port, strict, **kwargs) + compat.httplib.HTTPConnection.__init__(self, host, port, strict, **kwargs) self.key_file = key_file self.cert_file = cert_file self.ca_certs = ca_certs diff --git a/boto/manage/cmdshell.py b/boto/manage/cmdshell.py index dfff36d2..6ae7911a 100644 --- a/boto/manage/cmdshell.py +++ b/boto/manage/cmdshell.py @@ -24,10 +24,10 @@ import boto import os import time import shutil -import StringIO import paramiko import socket import subprocess +import boto.compat as compat class SSHClient(object): @@ -57,21 +57,21 @@ class SSHClient(object): except socket.error as err: (value, message) = err.args if value == 61 or value == 111: - print 'SSH Connection refused, will retry in 5 seconds' + print('SSH Connection refused, will retry in 5 seconds') time.sleep(5) retry += 1 else: raise except paramiko.BadHostKeyException: - print "%s has an entry in ~/.ssh/known_hosts and it doesn't match" % self.server.hostname - print 'Edit that file to remove the entry and then hit return to try again' + print("%s has an entry in ~/.ssh/known_hosts and it doesn't match" % self.server.hostname) + print('Edit that file to remove the entry and then hit return to try again') raw_input('Hit Enter when ready') retry += 1 except EOFError: - print 'Unexpected Error from SSH Connection, retry in 5 seconds' + print('Unexpected Error from SSH Connection, retry in 5 seconds') time.sleep(5) retry += 1 - print 'Could not establish SSH connection' + print('Could not establish SSH connection') def open_sftp(self): return self._ssh_client.open_sftp() @@ -178,7 +178,7 @@ class LocalClient(object): def run(self): boto.log.info('running:%s' % self.command) - log_fp = StringIO.StringIO() + log_fp = compat.StringIO() process = subprocess.Popen(self.command, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) while process.poll() == None: diff --git a/boto/manage/propget.py b/boto/manage/propget.py index 45b2ff22..57032029 100644 --- a/boto/manage/propget.py +++ b/boto/manage/propget.py @@ -38,7 +38,7 @@ def get(prop, choices=None): value = choices[i-1] if isinstance(value, tuple): value = value[0] - print '[%d] %s' % (i, value) + print('[%d] %s' % (i, value)) value = raw_input('%s [%d-%d]: ' % (prompt, min, max)) try: int_value = int(value) @@ -47,18 +47,18 @@ def get(prop, choices=None): value = value[1] valid = True except ValueError: - print '%s is not a valid choice' % value + print('%s is not a valid choice' % value) except IndexError: - print '%s is not within the range[%d-%d]' % (min, max) + print('%s is not within the range[%d-%d]' % (min, max)) else: value = raw_input('%s: ' % prompt) try: value = prop.validate(value) if prop.empty(value) and prop.required: - print 'A value is required' + print('A value is required') else: valid = True except: - print 'Invalid value: %s' % value + print('Invalid value: %s' % value) return value diff --git a/boto/manage/server.py b/boto/manage/server.py index 1dcff42d..67260c65 100644 --- a/boto/manage/server.py +++ b/boto/manage/server.py @@ -32,9 +32,10 @@ from boto.sdb.db.property import StringProperty, IntegerProperty, BooleanPropert from boto.manage import propget from boto.ec2.zone import Zone from boto.ec2.keypair import KeyPair -import os, time, StringIO +import os, time from contextlib import closing from boto.exception import EC2ResponseError +import boto.compat as compat InstanceTypes = ['m1.small', 'm1.large', 'm1.xlarge', 'c1.medium', 'c1.xlarge', @@ -49,7 +50,7 @@ class Bundler(object): self.ssh_client = SSHClient(server, uname=uname) def copy_x509(self, key_file, cert_file): - print '\tcopying cert and pk over to /mnt directory on server' + print('\tcopying cert and pk over to /mnt directory on server') self.ssh_client.open_sftp() path, name = os.path.split(key_file) self.remote_key_file = '/mnt/%s' % name @@ -57,7 +58,7 @@ class Bundler(object): path, name = os.path.split(cert_file) self.remote_cert_file = '/mnt/%s' % name self.ssh_client.put_file(cert_file, self.remote_cert_file) - print '...complete!' + print('...complete!') def bundle_image(self, prefix, size, ssh_key): command = "" @@ -103,7 +104,7 @@ class Bundler(object): ssh_key = self.server.get_ssh_key_file() self.copy_x509(key_file, cert_file) if not fp: - fp = StringIO.StringIO() + fp = compat.StringIO() fp.write('sudo mv %s /mnt/boto.cfg; ' % BotoConfigPath) fp.write('mv ~/.ssh/authorized_keys /mnt/authorized_keys; ') if clear_history: @@ -115,13 +116,13 @@ class Bundler(object): fp.write('sudo mv /mnt/boto.cfg %s; ' % BotoConfigPath) fp.write('mv /mnt/authorized_keys ~/.ssh/authorized_keys') command = fp.getvalue() - print 'running the following command on the remote server:' - print command + print('running the following command on the remote server:') + print(command) t = self.ssh_client.run(command) - print '\t%s' % t[0] - print '\t%s' % t[1] - print '...complete!' - print 'registering image...' + print('\t%s' % t[0]) + print('\t%s' % t[1]) + print('...complete!') + print('registering image...') self.image_id = self.server.ec2.register_image(name=prefix, image_location='%s/%s.manifest.xml' % (bucket, prefix)) return self.image_id @@ -137,7 +138,7 @@ class CommandLineGetter(object): def get_region(self, params): region = params.get('region', None) - if isinstance(region, str) or isinstance(region, unicode): + if isinstance(region, compat.string_types): region = boto.ec2.get_region(region) params['region'] = region if not region: @@ -189,7 +190,7 @@ class CommandLineGetter(object): def get_group(self, params): group = params.get('group', None) - if isinstance(group, str) or isinstance(group, unicode): + if isinstance(group, compat.string_types): group_list = self.ec2.get_all_security_groups() for g in group_list: if g.name == group: @@ -202,7 +203,7 @@ class CommandLineGetter(object): def get_key(self, params): keypair = params.get('keypair', None) - if isinstance(keypair, str) or isinstance(keypair, unicode): + if isinstance(keypair, compat.string_types): key_list = self.ec2.get_all_key_pairs() for k in key_list: if k.name == keypair: @@ -305,7 +306,7 @@ class Server(Model): # deal with possibly passed in logical volume: if logical_volume != None: cfg.set('EBS', 'logical_volume_name', logical_volume.name) - cfg_fp = StringIO.StringIO() + cfg_fp = compat.StringIO() cfg.write(cfg_fp) # deal with the possibility that zone and/or keypair are strings read from the config file: if isinstance(zone, Zone): @@ -325,14 +326,14 @@ class Server(Model): instances = reservation.instances if elastic_ip != None and instances.__len__() > 0: instance = instances[0] - print 'Waiting for instance to start so we can set its elastic IP address...' + print('Waiting for instance to start so we can set its elastic IP address...') # Sometimes we get a message from ec2 that says that the instance does not exist. # Hopefully the following delay will giv eec2 enough time to get to a stable state: time.sleep(5) while instance.update() != 'running': time.sleep(1) instance.use_ip(elastic_ip) - print 'set the elastic IP of the first instance to %s' % elastic_ip + print('set the elastic IP of the first instance to %s' % elastic_ip) for instance in instances: s = cls() s.ec2 = ec2 diff --git a/boto/manage/task.py b/boto/manage/task.py index 2f9d7d00..39b21f48 100644 --- a/boto/manage/task.py +++ b/boto/manage/task.py @@ -23,7 +23,8 @@ import boto from boto.sdb.db.property import StringProperty, DateTimeProperty, IntegerProperty from boto.sdb.db.model import Model -import datetime, subprocess, StringIO, time +import boto.compat as compat +import datetime, subprocess, time def check_hour(val): if val == '*': @@ -100,7 +101,7 @@ class Task(Model): def _run(self, msg, vtimeout): boto.log.info('Task[%s] - running:%s' % (self.name, self.command)) - log_fp = StringIO.StringIO() + log_fp = compat.StringIO() process = subprocess.Popen(self.command, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) nsecs = 5 diff --git a/boto/manage/test_manage.py b/boto/manage/test_manage.py index e0b032a9..a8c188c3 100644 --- a/boto/manage/test_manage.py +++ b/boto/manage/test_manage.py @@ -2,33 +2,33 @@ from boto.manage.server import Server from boto.manage.volume import Volume import time -print '--> Creating New Volume' +print('--> Creating New Volume') volume = Volume.create() -print volume +print(volume) -print '--> Creating New Server' +print('--> Creating New Server') server_list = Server.create() server = server_list[0] -print server +print(server) -print '----> Waiting for Server to start up' +print('----> Waiting for Server to start up') while server.status != 'running': - print '*' + print('*') time.sleep(10) -print '----> Server is running' +print('----> Server is running') -print '--> Run "df -k" on Server' +print('--> Run "df -k" on Server') status = server.run('df -k') -print status[1] +print(status[1]) -print '--> Now run volume.make_ready to make the volume ready to use on server' +print('--> Now run volume.make_ready to make the volume ready to use on server') volume.make_ready(server) -print '--> Run "df -k" on Server' +print('--> Run "df -k" on Server') status = server.run('df -k') -print status[1] +print(status[1]) -print '--> Do an "ls -al" on the new filesystem' +print('--> Do an "ls -al" on the new filesystem') status = server.run('ls -al %s' % volume.mount_point) -print status[1] +print(status[1]) diff --git a/boto/manage/volume.py b/boto/manage/volume.py index cbbdb606..baabbbb4 100644 --- a/boto/manage/volume.py +++ b/boto/manage/volume.py @@ -199,7 +199,7 @@ class Volume(Model): def attach(self, server=None): if self.attachment_state == 'attached': - print 'already attached' + print('already attached') return None if server: self.server = server @@ -210,7 +210,7 @@ class Volume(Model): def detach(self, force=False): state = self.attachment_state if state == 'available' or state == None or state == 'detaching': - print 'already detached' + print('already detached') return None ec2 = self.get_ec2_connection() ec2.detach_volume(self.volume_id, self.server.instance_id, self.device, force) @@ -353,9 +353,9 @@ class Volume(Model): day=now.day, tzinfo=now.tzinfo) # Keep the first snapshot from each day of the previous week one_week = datetime.timedelta(days=7, seconds=60*60) - print midnight-one_week, midnight + print(midnight-one_week, midnight) previous_week = self.get_snapshot_range(snaps, midnight-one_week, midnight) - print previous_week + print(previous_week) if not previous_week: return snaps current_day = None diff --git a/boto/mashups/server.py b/boto/mashups/server.py index 6cea106c..d6de4238 100644 --- a/boto/mashups/server.py +++ b/boto/mashups/server.py @@ -23,7 +23,6 @@ High-level abstraction of an EC2 server """ import boto -import boto.utils from boto.mashups.iobject import IObject from boto.pyami.config import Config, BotoConfigPath from boto.mashups.interactive import interactive_shell diff --git a/boto/mturk/connection.py b/boto/mturk/connection.py index e422275b..5c357c50 100644 --- a/boto/mturk/connection.py +++ b/boto/mturk/connection.py @@ -31,6 +31,7 @@ from boto.connection import AWSQueryConnection from boto.exception import EC2ResponseError from boto.resultset import ResultSet from boto.mturk.question import QuestionForm, ExternalQuestion +import boto.compat as compat class MTurkRequestError(EC2ResponseError): "Error for MTurk Requests" @@ -756,7 +757,7 @@ class MTurkConnection(AWSQueryConnection): keywords = ', '.join(keywords) if type(keywords) is str: final_keywords = keywords - elif type(keywords) is unicode: + elif type(keywords) is compat.text_type: final_keywords = keywords.encode('utf-8') elif keywords is None: final_keywords = "" diff --git a/boto/provider.py b/boto/provider.py index e9a73975..4a231c7d 100644 --- a/boto/provider.py +++ b/boto/provider.py @@ -33,6 +33,7 @@ from boto.gs.acl import ACL from boto.gs.acl import CannedACLStrings as CannedGSACLStrings from boto.s3.acl import CannedACLStrings as CannedS3ACLStrings from boto.s3.acl import Policy +import boto.compat as compat HEADER_PREFIX_KEY = 'header_prefix' METADATA_PREFIX_KEY = 'metadata_prefix' @@ -187,10 +188,6 @@ class Provider(object): self.secret_key = os.environ[secret_key_name.upper()] elif config.has_option('Credentials', secret_key_name): self.secret_key = config.get('Credentials', secret_key_name) - if isinstance(self.secret_key, unicode): - # the secret key must be bytes and not unicode to work - # properly with hmac.new (see http://bugs.python.org/issue5285) - self.secret_key = str(self.secret_key) def configure_headers(self): header_info_map = self.HeaderInfoMap[self.name] diff --git a/boto/pyami/config.py b/boto/pyami/config.py index d75e7910..130700f7 100644 --- a/boto/pyami/config.py +++ b/boto/pyami/config.py @@ -20,10 +20,10 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # -import StringIO, os, re +import os, re import warnings -import ConfigParser import boto +import boto.compat as compat # If running in Google App Engine there is no "user" and # os.path.expanduser() will fail. Attempt to detect this case and use a @@ -55,10 +55,10 @@ elif 'BOTO_PATH' in os.environ: BotoConfigLocations.append(expanduser(path)) -class Config(ConfigParser.SafeConfigParser): +class Config(compat.configparser.SafeConfigParser): def __init__(self, path=None, fp=None, do_load=True): - ConfigParser.SafeConfigParser.__init__(self, {'working_dir' : '/mnt/pyami', + compat.configparser.SafeConfigParser.__init__(self, {'working_dir' : '/mnt/pyami', 'debug' : '0'}) if do_load: if path: @@ -76,7 +76,7 @@ class Config(ConfigParser.SafeConfigParser): def load_credential_file(self, path): """Load a credential file as is setup like the Java utilities""" - c_data = StringIO.StringIO() + c_data = compat.StringIO() c_data.write("[Credentials]\n") for line in open(path, "r").readlines(): c_data.write(line.replace("AWSAccessKeyId", "aws_access_key_id").replace("AWSSecretKey", "aws_secret_access_key")) @@ -99,7 +99,7 @@ class Config(ConfigParser.SafeConfigParser): Replace any previous value. If the path doesn't exist, create it. Also add the option the the in-memory config. """ - config = ConfigParser.SafeConfigParser() + config = compat.configparser.SafeConfigParser() config.read(path) if not config.has_section(section): config.add_section(section) @@ -143,21 +143,21 @@ class Config(ConfigParser.SafeConfigParser): def get(self, section, name, default=None): try: - val = ConfigParser.SafeConfigParser.get(self, section, name) + val = compat.configparser.SafeConfigParser.get(self, section, name) except: val = default return val def getint(self, section, name, default=0): try: - val = ConfigParser.SafeConfigParser.getint(self, section, name) + val = compat.configparser.SafeConfigParser.getint(self, section, name) except: val = int(default) return val def getfloat(self, section, name, default=0.0): try: - val = ConfigParser.SafeConfigParser.getfloat(self, section, name) + val = compat.configparser.SafeConfigParser.getfloat(self, section, name) except: val = float(default) return val @@ -180,13 +180,13 @@ class Config(ConfigParser.SafeConfigParser): self.set(section, name, 'false') def dump(self): - s = StringIO.StringIO() + s = compat.StringIO() self.write(s) - print s.getvalue() + print(s.getvalue()) def dump_safe(self, fp=None): if not fp: - fp = StringIO.StringIO() + fp = compat.StringIO() for section in self.sections(): fp.write('[%s]\n' % section) for option in self.options(section): diff --git a/boto/pyami/launch_ami.py b/boto/pyami/launch_ami.py index 243d56d2..4b8d962f 100755 --- a/boto/pyami/launch_ami.py +++ b/boto/pyami/launch_ami.py @@ -68,7 +68,7 @@ SYNOPSIS """ def usage(): - print usage_string + print(usage_string) sys.exit() def main(): @@ -124,14 +124,14 @@ def main(): required = ['ami'] for pname in required: if not params.get(pname, None): - print '%s is required' % pname + print('%s is required' % pname) usage() if params['script_name']: # first copy the desired module file to S3 bucket if reload: - print 'Reloading module %s to S3' % params['script_name'] + print('Reloading module %s to S3' % params['script_name']) else: - print 'Copying module %s to S3' % params['script_name'] + print('Copying module %s to S3' % params['script_name']) l = imp.find_module(params['script_name']) c = boto.connect_s3() bucket = c.get_bucket(params['script_bucket']) @@ -155,23 +155,23 @@ def main(): r = img.run(user_data=s, key_name=params['keypair'], security_groups=[params['group']], max_count=params.get('num_instances', 1)) - print 'AMI: %s - %s (Started)' % (params['ami'], img.location) - print 'Reservation %s contains the following instances:' % r.id + print('AMI: %s - %s (Started)' % (params['ami'], img.location)) + print('Reservation %s contains the following instances:' % r.id) for i in r.instances: - print '\t%s' % i.id + print('\t%s' % i.id) if wait: running = False while not running: time.sleep(30) [i.update() for i in r.instances] status = [i.state for i in r.instances] - print status + print(status) if status.count('running') == len(r.instances): running = True for i in r.instances: - print 'Instance: %s' % i.ami_launch_index - print 'Public DNS Name: %s' % i.public_dns_name - print 'Private DNS Name: %s' % i.private_dns_name + print('Instance: %s' % i.ami_launch_index) + print('Public DNS Name: %s' % i.public_dns_name) + print('Private DNS Name: %s' % i.private_dns_name) if __name__ == "__main__": main() diff --git a/boto/rds/__init__.py b/boto/rds/__init__.py index a400ffd3..c739879d 100644 --- a/boto/rds/__init__.py +++ b/boto/rds/__init__.py @@ -20,7 +20,6 @@ # IN THE SOFTWARE. # -import boto.utils import urllib from boto.connection import AWSQueryConnection from boto.rds.dbinstance import DBInstance diff --git a/boto/rds/parametergroup.py b/boto/rds/parametergroup.py index 108f943f..e973467c 100644 --- a/boto/rds/parametergroup.py +++ b/boto/rds/parametergroup.py @@ -19,6 +19,9 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. +import boto.compat as compat + + class ParameterGroup(dict): def __init__(self, connection=None): @@ -133,7 +136,7 @@ class Parameter(object): d[prefix+'ApplyMethod'] = self.apply_method def _set_string_value(self, value): - if not isinstance(value, str) or isinstance(value, unicode): + if not isinstance(value, compat.string_types): raise ValueError('value must be of type str') if self.allowed_values: choices = self.allowed_values.split(',') @@ -142,21 +145,7 @@ class Parameter(object): self._value = value def _set_integer_value(self, value): - if isinstance(value, str) or isinstance(value, unicode): - value = int(value) - if isinstance(value, int) or isinstance(value, long): - if self.allowed_values: - min, max = self.allowed_values.split('-') - if value < int(min) or value > int(max): - raise ValueError('range is %s' % self.allowed_values) - self._value = value - else: - raise ValueError('value must be integer') - - def _set_boolean_value(self, value): - if isinstance(value, bool): - self._value = value - elif isinstance(value, str) or isinstance(value, unicode): + if isinstance(value, compat.string_types): if value.lower() == 'true': self._value = True else: diff --git a/boto/roboto/awsqueryrequest.py b/boto/roboto/awsqueryrequest.py index 882a866b..3fdaf07b 100644 --- a/boto/roboto/awsqueryrequest.py +++ b/boto/roboto/awsqueryrequest.py @@ -47,10 +47,10 @@ def boto_except_hook(debugger_flag, debug_flag): else: debugger.post_mortem(tb) elif debug_flag: - print traceback.print_tb(tb) + print(traceback.print_tb(tb)) sys.exit(1) else: - print value + print(value) sys.exit(1) return excepthook @@ -69,7 +69,7 @@ class Line(object): def print_it(self): if not self.printed: - print self.line + print(self.line) self.printed = True class RequiredParamError(boto.exception.BotoClientError): @@ -342,9 +342,9 @@ class AWSQueryRequest(object): def process_standard_options(self, options, args, d): if hasattr(options, 'help_filters') and options.help_filters: - print 'Available filters:' + print('Available filters:') for filter in self.Filters: - print '%s\t%s' % (filter.name, filter.doc) + print('%s\t%s' % (filter.name, filter.doc)) sys.exit(0) if options.debug: self.args['debug'] = 2 @@ -358,7 +358,7 @@ class AWSQueryRequest(object): self.args['aws_secret_access_key'] = options.secret_key if options.version: # TODO - Where should the version # come from? - print 'version x.xx' + print('version x.xx') exit(0) sys.excepthook = boto_except_hook(options.debugger, options.debug) @@ -453,16 +453,16 @@ class AWSQueryRequest(object): response = self.main() self.cli_formatter(response) except RequiredParamError as e: - print e + print(e) sys.exit(1) except self.ServiceClass.ResponseError as err: - print 'Error(%s): %s' % (err.error_code, err.error_message) + print('Error(%s): %s' % (err.error_code, err.error_message)) sys.exit(1) except boto.roboto.awsqueryservice.NoCredentialsError as err: - print 'Unable to find credentials.' + print('Unable to find credentials.') sys.exit(1) except Exception as e: - print e + print(e) sys.exit(1) def _generic_cli_formatter(self, fmt, data, label=''): diff --git a/boto/roboto/awsqueryservice.py b/boto/roboto/awsqueryservice.py index 0bcdff30..0b3db140 100644 --- a/boto/roboto/awsqueryservice.py +++ b/boto/roboto/awsqueryservice.py @@ -1,11 +1,12 @@ import os -import urlparse import boto import boto.connection import boto.jsonresponse import boto.exception +import boto.compat as compat from . import awsqueryrequest + class NoCredentialsError(boto.exception.BotoClientError): def __init__(self): @@ -77,7 +78,7 @@ class AWSQueryService(boto.connection.AWSQueryConnection): value = value.strip() self.args['aws_secret_access_key'] = value else: - print 'Warning: unable to read AWS_CREDENTIAL_FILE' + print('Warning: unable to read AWS_CREDENTIAL_FILE') def check_for_env_url(self): """ @@ -95,7 +96,7 @@ class AWSQueryService(boto.connection.AWSQueryConnection): if not url and self.EnvURL in os.environ: url = os.environ[self.EnvURL] if url: - rslt = urlparse.urlparse(url) + rslt = compat.urlparse.urlparse(url) if 'is_secure' not in self.args: if rslt.scheme == 'https': self.args['is_secure'] = True diff --git a/boto/route53/connection.py b/boto/route53/connection.py index d366efa8..b0bb0775 100644 --- a/boto/route53/connection.py +++ b/boto/route53/connection.py @@ -24,12 +24,12 @@ import xml.sax import time import uuid -import urllib import boto from boto.connection import AWSAuthConnection from boto import handler from boto.resultset import ResultSet import boto.jsonresponse +import boto.compat as compat from . import exception from . import hostedzone @@ -67,9 +67,9 @@ class Route53Connection(AWSAuthConnection): def make_request(self, action, path, headers=None, data='', params=None): if params: pairs = [] - for key, val in params.iteritems(): + for key, val in params.items(): if val is None: continue - pairs.append(key + '=' + urllib.quote(str(val))) + pairs.append(key + '=' + compat.quote(str(val))) path += '?' + '&'.join(pairs) return AWSAuthConnection.make_request(self, action, path, headers, data) diff --git a/boto/s3/bucket.py b/boto/s3/bucket.py index 44acb4b8..685b0b3c 100644 --- a/boto/s3/bucket.py +++ b/boto/s3/bucket.py @@ -38,11 +38,11 @@ from boto.s3.bucketlistresultset import VersionedBucketListResultSet from boto.s3.bucketlistresultset import MultiPartUploadListResultSet from boto.s3.lifecycle import Lifecycle from boto.s3.bucketlogging import BucketLogging +import boto.compat as compat import boto.jsonresponse import boto.utils import xml.sax import xml.sax.saxutils -import StringIO import urllib import re import base64 @@ -291,7 +291,7 @@ class Bucket(object): k = k.replace('_', '-') if k == 'maxkeys': k = 'max-keys' - if isinstance(v, unicode): + if isinstance(v, compat.text_type): v = v.encode('utf-8') if v is not None and v != '': l.append('%s=%s' % (urllib.quote(k), urllib.quote(str(v)))) @@ -484,10 +484,10 @@ class Bucket(object): query_args = 'delete' def delete_keys2(hdrs): hdrs = hdrs or {} - data = u"""<?xml version="1.0" encoding="UTF-8"?>""" - data += u"<Delete>" + data = """<?xml version="1.0" encoding="UTF-8"?>""" + data += "<Delete>" if quiet: - data += u"<Quiet>true</Quiet>" + data += "<Quiet>true</Quiet>" count = 0 while count < 1000: try: @@ -515,15 +515,15 @@ class Bucket(object): continue count += 1 #key_name = key_name.decode('utf-8') - data += u"<Object><Key>%s</Key>" % xml.sax.saxutils.escape(key_name) + data += "<Object><Key>%s</Key>" % xml.sax.saxutils.escape(key_name) if version_id: - data += u"<VersionId>%s</VersionId>" % version_id - data += u"</Object>" - data += u"</Delete>" + data += "<VersionId>%s</VersionId>" % version_id + data += "</Object>" + data += "</Delete>" if count <= 0: return False # no more data = data.encode('utf-8') - fp = StringIO.StringIO(data) + fp = compat.StringIO(data) md5 = boto.utils.compute_md5(fp) hdrs['Content-MD5'] = md5[1] hdrs['Content-Type'] = 'text/xml' @@ -1134,7 +1134,7 @@ class Bucket(object): :param lifecycle_config: The lifecycle configuration you want to configure for this bucket. """ - fp = StringIO.StringIO(lifecycle_config.to_xml()) + fp = compat.StringIO(lifecycle_config.to_xml()) md5 = boto.utils.compute_md5(fp) if headers is None: headers = {} diff --git a/boto/s3/bucketlogging.py b/boto/s3/bucketlogging.py index 9e3c050d..33a39228 100644 --- a/boto/s3/bucketlogging.py +++ b/boto/s3/bucketlogging.py @@ -20,7 +20,7 @@ # IN THE SOFTWARE. import xml.sax.saxutils -from acl import Grant +from .acl import Grant class BucketLogging: @@ -66,18 +66,18 @@ class BucketLogging: def to_xml(self): # caller is responsible to encode to utf-8 - s = u'<?xml version="1.0" encoding="UTF-8"?>' - s += u'<BucketLoggingStatus xmlns="http://doc.s3.amazonaws.com/2006-03-01">' + s = '<?xml version="1.0" encoding="UTF-8"?>' + s += '<BucketLoggingStatus xmlns="http://doc.s3.amazonaws.com/2006-03-01">' if self.target is not None: - s += u'<LoggingEnabled>' - s += u'<TargetBucket>%s</TargetBucket>' % self.target + s += '<LoggingEnabled>' + s += '<TargetBucket>%s</TargetBucket>' % self.target prefix = self.prefix or '' - s += u'<TargetPrefix>%s</TargetPrefix>' % xml.sax.saxutils.escape(prefix) + s += '<TargetPrefix>%s</TargetPrefix>' % xml.sax.saxutils.escape(prefix) if self.grants: s += '<TargetGrants>' for grant in self.grants: s += grant.to_xml() s += '</TargetGrants>' - s += u'</LoggingEnabled>' - s += u'</BucketLoggingStatus>' + s += '</LoggingEnabled>' + s += '</BucketLoggingStatus>' return s diff --git a/boto/s3/connection.py b/boto/s3/connection.py index afdc280f..dfe053b0 100644 --- a/boto/s3/connection.py +++ b/boto/s3/connection.py @@ -22,7 +22,7 @@ # IN THE SOFTWARE. import xml.sax -import urllib, base64 +import base64 import time import boto.utils from boto.connection import AWSAuthConnection @@ -31,6 +31,7 @@ from boto.s3.bucket import Bucket from boto.s3.key import Key from boto.resultset import ResultSet from boto.exception import BotoClientError +import boto.compat as compat def check_lowercase_bucketname(n): """ @@ -85,11 +86,11 @@ class _CallingFormat(object): path = '' if bucket != '': path = '/' + bucket - return path + '/%s' % urllib.quote(key) + return path + '/%s' % compat.quote(key) def build_path_base(self, bucket, key=''): key = boto.utils.get_utf8_value(key) - return '/%s' % urllib.quote(key) + return '/%s' % compat.quote(key) class SubdomainCallingFormat(_CallingFormat): @@ -113,7 +114,7 @@ class OrdinaryCallingFormat(_CallingFormat): path_base = '/' if bucket: path_base += "%s/" % bucket - return path_base + urllib.quote(key) + return path_base + compat.quote(key) class ProtocolIndependentOrdinaryCallingFormat(OrdinaryCallingFormat): @@ -315,14 +316,14 @@ class S3Connection(AWSAuthConnection): c_string = boto.utils.canonical_string(method, auth_path, headers, expires, self.provider) b64_hmac = self._auth_handler.sign_string(c_string) - encoded_canonical = urllib.quote_plus(b64_hmac) + encoded_canonical = compat.quote_plus(b64_hmac) self.calling_format.build_path_base(bucket, key) if query_auth: query_part = '?' + self.QueryString % (encoded_canonical, expires, self.aws_access_key_id) # The response headers must also be GET parameters in the URL. headers.update(response_headers) - hdrs = ['%s=%s'%(n, urllib.quote(v)) for n, v in headers.items()] + hdrs = ['%s=%s'%(n, compat.quote(v)) for n, v in headers.items()] q_str = '&'.join(hdrs) if q_str: query_part += '&' + q_str diff --git a/boto/s3/key.py b/boto/s3/key.py index 4a9a8296..3b247d77 100644 --- a/boto/s3/key.py +++ b/boto/s3/key.py @@ -23,8 +23,7 @@ import mimetypes import os import re -import rfc822 -import StringIO +import email import base64 import math import urllib @@ -33,11 +32,7 @@ from boto.exception import BotoClientError from boto.provider import Provider from boto.s3.user import User from boto import UserAgent -from boto.utils import compute_md5 -try: - from hashlib import md5 -except ImportError: - from md5 import md5 +import boto.compat as compat class Key(object): @@ -107,7 +102,7 @@ class Key(object): """ import binascii digest = binascii.unhexlify(md5_hexdigest) - base64md5 = base64.encodestring(digest) + base64md5 = base64.b64encode(digest) if base64md5[-1] == '\n': base64md5 = base64md5[0:-1] return (md5_hexdigest, base64md5) @@ -523,7 +518,7 @@ class Key(object): http_conn.endheaders() if chunked_transfer and not self.base64md5: # MD5 for the stream has to be calculated on the fly. - m = md5() + m = compat.md5() else: m = None @@ -682,7 +677,7 @@ class Key(object): as the first element and the base64 encoded version of the plain digest as the second element. """ - tup = compute_md5(fp, size=size) + tup = boto.utils.compute_md5(fp, size=size) # Returned values are MD5 hash, base64 encoded MD5 hash, and data size. # The internal implementation of compute_md5() needs to return the # data size but we don't want to return that value to the external @@ -1060,9 +1055,9 @@ class Key(object): will be stored in an encrypted form while at rest in S3. """ - if isinstance(s, unicode): + if isinstance(s, compat.text_type): s = s.encode("utf-8") - fp = StringIO.StringIO(s) + fp = compat.StringIO(s) r = self.set_contents_from_file(fp, headers, replace, cb, num_cb, policy, md5, reduced_redundancy, encrypt_key=encrypt_key) @@ -1118,7 +1113,7 @@ class Key(object): query_args.append('torrent') m = None else: - m = md5() + m = compat.md5() # If a version_id is passed in, use that. If not, check to see # if the Key object has an explicit version_id and, if so, use that. # Otherwise, don't pass a version_id query param. @@ -1313,8 +1308,8 @@ class Key(object): # if last_modified date was sent from s3, try to set file's timestamp if self.last_modified != None: try: - modified_tuple = rfc822.parsedate_tz(self.last_modified) - modified_stamp = int(rfc822.mktime_tz(modified_tuple)) + modified_tuple = email.utils.parsedate_tz(self.last_modified) + modified_stamp = int(email.utils.mktime_tz(modified_tuple)) os.utime(fp.name, (modified_stamp, modified_stamp)) except Exception: pass @@ -1360,7 +1355,7 @@ class Key(object): :rtype: string :returns: The contents of the file as a string """ - fp = StringIO.StringIO() + fp = compat.StringIO() self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent, version_id=version_id, response_headers=response_headers) diff --git a/boto/s3/resumable_download_handler.py b/boto/s3/resumable_download_handler.py index b5a54ba9..d653cb8c 100644 --- a/boto/s3/resumable_download_handler.py +++ b/boto/s3/resumable_download_handler.py @@ -20,7 +20,6 @@ # IN THE SOFTWARE. import errno -import httplib import os import re import socket @@ -88,7 +87,7 @@ class ResumableDownloadHandler(object): ETAG_REGEX = '([a-z0-9]{32})\n' - RETRYABLE_EXCEPTIONS = (httplib.HTTPException, IOError, socket.error, + RETRYABLE_EXCEPTIONS = (compat.httplib.HTTPException, IOError, socket.error, socket.gaierror) def __init__(self, tracker_file_name=None, num_retries=None): @@ -189,17 +188,17 @@ class ResumableDownloadHandler(object): key.size), ResumableTransferDisposition.ABORT) elif cur_file_size == key.size: if key.bucket.connection.debug >= 1: - print 'Download complete.' + print('Download complete.') return if key.bucket.connection.debug >= 1: - print 'Resuming download.' + print('Resuming download.') headers = headers.copy() headers['Range'] = 'bytes=%d-%d' % (cur_file_size, key.size - 1) cb = ByteTranslatingCallbackHandler(cb, cur_file_size).call self.download_start_point = cur_file_size else: if key.bucket.connection.debug >= 1: - print 'Starting new resumable download.' + print('Starting new resumable download.') self._save_tracker_info(key) self.download_start_point = 0 # Truncate the file, in case a new resumable download is being @@ -271,7 +270,7 @@ class ResumableDownloadHandler(object): # non-resumable downloads, this call was removed. Checksum # validation of file contents should be done by the caller. if debug >= 1: - print 'Resumable download complete.' + print('Resumable download complete.') return except self.RETRYABLE_EXCEPTIONS as e: if debug >= 1: @@ -324,7 +323,7 @@ class ResumableDownloadHandler(object): # which we can safely ignore. try: key.close() - except httplib.IncompleteRead: + except compat.httplib.IncompleteRead: pass sleep_time_secs = 2**progress_less_iterations diff --git a/boto/sdb/connection.py b/boto/sdb/connection.py index dedc3a83..367e901c 100644 --- a/boto/sdb/connection.py +++ b/boto/sdb/connection.py @@ -49,7 +49,6 @@ class ItemThread(threading.Thread): :ivar list items: A list of items retrieved. Starts as empty list. """ threading.Thread.__init__(self, name=name) - #print 'starting %s with %d items' % (name, len(item_names)) self.domain_name = domain_name self.conn = SDBConnection() self.item_names = item_names @@ -139,8 +138,7 @@ class SDBConnection(AWSQueryConnection): def _build_name_value_list(self, params, attributes, replace=False, label='Attribute'): - keys = attributes.keys() - keys.sort() + keys = sorted(attributes) i = 1 for key in keys: value = attributes[key] @@ -233,9 +231,9 @@ class SDBConnection(AWSQueryConnection): requests made on this specific connection instance. It is by no means an account-wide estimate. """ - print 'Total Usage: %f compute seconds' % self.box_usage + print('Total Usage: %f compute seconds' % self.box_usage) cost = self.box_usage * 0.14 - print 'Approximate Cost: $%f' % cost + print('Approximate Cost: $%f' % cost) def get_domain(self, domain_name, validate=True): """ diff --git a/boto/sdb/db/blob.py b/boto/sdb/db/blob.py index b50794c9..54954def 100644 --- a/boto/sdb/db/blob.py +++ b/boto/sdb/db/blob.py @@ -19,6 +19,8 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. +import boto.compat as compat + class Blob(object): """Blob object""" @@ -29,7 +31,7 @@ class Blob(object): @property def file(self): - from StringIO import StringIO + from compat.StringIO import StringIO if self._file: f = self._file else: diff --git a/boto/sdb/db/manager/xmlmanager.py b/boto/sdb/db/manager/xmlmanager.py index 0f1a881b..5240cdd2 100644 --- a/boto/sdb/db/manager/xmlmanager.py +++ b/boto/sdb/db/manager/xmlmanager.py @@ -202,7 +202,7 @@ class XMLManager(object): self.auth_header = None if self.db_user: import base64 - base64string = base64.encodestring('%s:%s' % (self.db_user, self.db_passwd))[:-1] + base64string = base64.encodebytes('%s:%s' % (self.db_user, self.db_passwd))[:-1] authheader = "Basic %s" % base64string self.auth_header = authheader diff --git a/boto/sdb/domain.py b/boto/sdb/domain.py index d4faf046..c8764ff9 100644 --- a/boto/sdb/domain.py +++ b/boto/sdb/domain.py @@ -23,6 +23,8 @@ Represents an SDB Domain """ from boto.sdb.queryresultset import SelectResultSet +import boto.compat as compat + class Domain: @@ -229,40 +231,40 @@ class Domain: def delete_item(self, item): self.delete_attributes(item.name) - def to_xml(self, f=None): - """Get this domain as an XML DOM Document - :param f: Optional File to dump directly to - :type f: File or Stream - - :return: File object where the XML has been dumped to - :rtype: file - """ - if not f: - from tempfile import TemporaryFile - f = TemporaryFile() - print >> f, '<?xml version="1.0" encoding="UTF-8"?>' - print >> f, '<Domain id="%s">' % self.name - for item in self: - print >> f, '\t<Item id="%s">' % item.name - for k in item: - print >> f, '\t\t<attribute id="%s">' % k - values = item[k] - if not isinstance(values, list): - values = [values] - for value in values: - print >> f, '\t\t\t<value><![CDATA[', - if isinstance(value, unicode): - value = value.encode('utf-8', 'replace') - else: - value = unicode(value, errors='replace').encode('utf-8', 'replace') - f.write(value) - print >> f, ']]></value>' - print >> f, '\t\t</attribute>' - print >> f, '\t</Item>' - print >> f, '</Domain>' - f.flush() - f.seek(0) - return f + # def to_xml(self, f=None): + # """Get this domain as an XML DOM Document + # :param f: Optional File to dump directly to + # :type f: File or Stream + + # :return: File object where the XML has been dumped to + # :rtype: file + # """ + # if not f: + # from tempfile import TemporaryFile + # f = TemporaryFile() + # print >> f, '<?xml version="1.0" encoding="UTF-8"?>' + # print >> f, '<Domain id="%s">' % self.name + # for item in self: + # print >> f, '\t<Item id="%s">' % item.name + # for k in item: + # print >> f, '\t\t<attribute id="%s">' % k + # values = item[k] + # if not isinstance(values, list): + # values = [values] + # for value in values: + # print >> f, '\t\t\t<value><![CDATA[', + # if isinstance(value, compat.text_types): + # value = value.encode('utf-8', 'replace') + # else: + # value = unicode(value, errors='replace').encode('utf-8', 'replace') + # f.write(value) + # print >> f, ']]></value>' + # print >> f, '\t\t</attribute>' + # print >> f, '\t</Item>' + # print >> f, '</Domain>' + # f.flush() + # f.seek(0) + # return f def from_xml(self, doc): @@ -370,8 +372,8 @@ class UploaderThread(Thread): try: self.db.batch_put_attributes(self.items) except: - print "Exception using batch put, trying regular put instead" + print("Exception using batch put, trying regular put instead") for item_name in self.items: self.db.put_attributes(item_name, self.items[item_name]) - print ".", + print(".",) sys.stdout.flush() diff --git a/boto/services/result.py b/boto/services/result.py index 8117673f..98c67dcf 100644 --- a/boto/services/result.py +++ b/boto/services/result.py @@ -57,8 +57,7 @@ class ResultProcessor: self.latest_time = end_time def log_message(self, msg, path): - keys = msg.keys() - keys.sort() + keys = sorted(msg) if not self.log_fp: self.log_fp = open(os.path.join(path, self.LogFileName), 'a') line = ','.join(keys) diff --git a/boto/ses/connection.py b/boto/ses/connection.py index c9ae41d4..ba58a62f 100644 --- a/boto/ses/connection.py +++ b/boto/ses/connection.py @@ -29,6 +29,7 @@ import boto.jsonresponse import urllib import base64 from boto.ses import exceptions as ses_exceptions +import boto.compat as compat class SESConnection(AWSAuthConnection): @@ -88,7 +89,7 @@ class SESConnection(AWSAuthConnection): params['Action'] = action for k, v in params.items(): - if isinstance(v, unicode): # UTF-8 encode only if it's Unicode + if isinstance(v, compat.text_types): # UTF-8 encode only if it's Unicode params[k] = v.encode('utf-8') response = super(SESConnection, self).make_request( diff --git a/boto/sqs/message.py b/boto/sqs/message.py index cdd21795..76d89c83 100644 --- a/boto/sqs/message.py +++ b/boto/sqs/message.py @@ -64,9 +64,9 @@ in the format in which it would be stored in SQS. """ import base64 -import StringIO from boto.sqs.attributes import Attributes from boto.exception import SQSDecodeError +import boto.compat as compat class RawMessage: """ @@ -179,7 +179,7 @@ class MHMessage(Message): def decode(self, value): try: msg = {} - fp = StringIO.StringIO(value) + fp = compat.StringIO(value) line = fp.readline() while line: delim = line.find(':') diff --git a/boto/sqs/queue.py b/boto/sqs/queue.py index 0ecb1f2c..eee88f31 100644 --- a/boto/sqs/queue.py +++ b/boto/sqs/queue.py @@ -23,8 +23,8 @@ Represents an SQS Queue """ -import urlparse from boto.sqs.message import Message +import boto.compat as compat class Queue: @@ -40,7 +40,7 @@ class Queue: def _id(self): if self.url: - val = urlparse.urlparse(self.url)[2] + val = compat.urlparse.urlparse(self.url)[2] else: val = self.url return val @@ -48,7 +48,7 @@ class Queue: def _name(self): if self.url: - val = urlparse.urlparse(self.url)[2].split('/')[2] + val = compat.urlparse.urlparse(self.url)[2].split('/')[2] else: val = self.url return val @@ -398,7 +398,7 @@ class Queue: m = Message(self, body) self.write(m) n += 1 - print 'writing message %d' % n + print('writing message %d' % n) body = '' else: body = body + l diff --git a/boto/utils.py b/boto/utils.py index aeaa5d89..c1545623 100644 --- a/boto/utils.py +++ b/boto/utils.py @@ -38,11 +38,8 @@ Some handy utility functions used by several classes. """ -import urllib -import urllib2 import imp import subprocess -import StringIO import time import logging.handlers import boto @@ -50,17 +47,9 @@ import boto.provider import tempfile import smtplib import datetime -from email.MIMEMultipart import MIMEMultipart -from email.MIMEBase import MIMEBase -from email.MIMEText import MIMEText -from email.Utils import formatdate -from email import Encoders import gzip import base64 -try: - from hashlib import md5 -except ImportError: - from md5 import md5 +from . import compat try: @@ -83,7 +72,7 @@ def unquote_v(nv): if len(nv) == 1: return nv else: - return (nv[0], urllib.unquote(nv[1])) + return (nv[0], compat.unquote(nv[1])) # generates the aws canonical string for the given parameters def canonical_string(method, path, headers, expires=None, @@ -112,8 +101,7 @@ def canonical_string(method, path, headers, expires=None, if expires: interesting_headers['date'] = str(expires) - sorted_header_keys = interesting_headers.keys() - sorted_header_keys.sort() + sorted_header_keys = sorted(interesting_headers) buf = "%s\n" % method for key in sorted_header_keys: @@ -162,7 +150,7 @@ def get_aws_metadata(headers, provider=None): metadata = {} for hkey in headers.keys(): if hkey.lower().startswith(metadata_prefix): - val = urllib.unquote_plus(headers[hkey]) + val = compat.unquote_plus(headers[hkey]) try: metadata[hkey[len(metadata_prefix):]] = unicode(val, 'utf-8') except UnicodeDecodeError: @@ -173,10 +161,10 @@ def get_aws_metadata(headers, provider=None): def retry_url(url, retry_on_404=True, num_retries=10): for i in range(0, num_retries): try: - req = urllib2.Request(url) - resp = urllib2.urlopen(req) + req = compat.Request(url) + resp = compat.urlopen(req) return resp.read() - except urllib2.HTTPError as e: + except compat.HTTPError as e: # in 2.6 you use getcode(), in 2.5 and earlier you use code if hasattr(e, 'getcode'): code = e.getcode() @@ -275,7 +263,7 @@ def update_dme(username, password, dme_id, ip_address): """ dme_url = 'https://www.dnsmadeeasy.com/servlet/updateip' dme_url += '?username=%s&password=%s&id=%s&ip=%s' - s = urllib2.urlopen(dme_url % (username, password, dme_id, ip_address)) + s = compat.urlopen(dme_url % (username, password, dme_id, ip_address)) return s.read() def fetch_file(uri, file=None, username=None, password=None): @@ -297,12 +285,12 @@ def fetch_file(uri, file=None, username=None, password=None): key.get_contents_to_file(file) else: if username and password: - passman = urllib2.HTTPPasswordMgrWithDefaultRealm() + passman = compat.HTTPPasswordMgrWithDefaultRealm() passman.add_password(None, uri, username, password) - authhandler = urllib2.HTTPBasicAuthHandler(passman) - opener = urllib2.build_opener(authhandler) - urllib2.install_opener(opener) - s = urllib2.urlopen(uri) + authhandler = compat.HTTPBasicAuthHandler(passman) + opener = compat.build_opener(authhandler) + compat.install_opener(opener) + s = compat.urlopen(uri) file.write(s.read()) file.seek(0) except: @@ -316,7 +304,7 @@ class ShellCommand(object): def __init__(self, command, wait=True, fail_fast=False, cwd = None): self.exit_code = 0 self.command = command - self.log_fp = StringIO.StringIO() + self.log_fp = compat.StringIO() self.wait = wait self.fail_fast = fail_fast self.run(cwd = cwd) @@ -397,7 +385,7 @@ class AuthSMTPHandler(logging.handlers.SMTPHandler): self.fromaddr, ','.join(self.toaddrs), self.getSubject(record), - formatdate(), msg) + compat.formatdate(), msg) smtp.sendmail(self.fromaddr, self.toaddrs, msg) smtp.quit() except (KeyboardInterrupt, SystemExit): @@ -564,20 +552,20 @@ def notify(subject, body=None, html_body=None, to_string=None, attachments=None, if to_string: try: from_string = boto.config.get_value('Notification', 'smtp_from', 'boto') - msg = MIMEMultipart() + msg = compat.MIMEMultipart() msg['From'] = from_string msg['Reply-To'] = from_string msg['To'] = to_string - msg['Date'] = formatdate(localtime=True) + msg['Date'] = compat.formatdate(localtime=True) msg['Subject'] = subject if body: - msg.attach(MIMEText(body)) + msg.attach(compat.MIMEText(body)) if html_body: - part = MIMEBase('text', 'html') + part = compat.MIMEBase('text', 'html') part.set_payload(html_body) - Encoders.encode_base64(part) + compat.Encoders.encode_base64(part) msg.attach(part) for part in attachments: @@ -606,9 +594,9 @@ def notify(subject, body=None, html_body=None, to_string=None, attachments=None, boto.log.exception('notify failed') def get_utf8_value(value): - if not isinstance(value, str) and not isinstance(value, unicode): + if not isinstance(value, compat.string_types) and not isinstance(value, compat.text_type): value = str(value) - if isinstance(value, unicode): + if isinstance(value, compat.text_type): return value.encode('utf-8') else: return value @@ -650,23 +638,23 @@ def write_mime_multipart(content, compress=False, deftype='text/plain', delimite :return: Final mime multipart :rtype: str: """ - wrapper = MIMEMultipart() + wrapper = compat.MIMEMultipart() for name,con in content: definite_type = guess_mime_type(con, deftype) maintype, subtype = definite_type.split('/', 1) if maintype == 'text': - mime_con = MIMEText(con, _subtype=subtype) + mime_con = compat.MIMEText(con, _subtype=subtype) else: - mime_con = MIMEBase(maintype, subtype) + mime_con = compat.MIMEBase(maintype, subtype) mime_con.set_payload(con) # Encode the payload using Base64 - Encoders.encode_base64(mime_con) + compat.Encoders.encode_base64(mime_con) mime_con.add_header('Content-Disposition', 'attachment', filename=name) wrapper.attach(mime_con) rcontent = wrapper.as_string() if compress: - buf = StringIO.StringIO() + buf = compat.StringIO() gz = gzip.GzipFile(mode='wb', fileobj=buf) try: gz.write(rcontent) @@ -728,7 +716,7 @@ def compute_md5(fp, buf_size=8192, size=None): plain digest as the second element and the data size as the third element. """ - m = md5() + m = compat.md5() spos = fp.tell() if size and size < buf_size: s = fp.read(size) @@ -745,7 +733,7 @@ def compute_md5(fp, buf_size=8192, size=None): else: s = fp.read(buf_size) hex_md5 = m.hexdigest() - base64md5 = base64.encodestring(m.digest()) + base64md5 = base64.b64encode(m.digest()) if base64md5[-1] == '\n': base64md5 = base64md5[0:-1] # data_size based on bytes read. diff --git a/tests/autoscale/test_connection.py b/tests/autoscale/test_connection.py index a650dcef..e65cbc84 100644 --- a/tests/autoscale/test_connection.py +++ b/tests/autoscale/test_connection.py @@ -43,7 +43,7 @@ class AutoscaleConnectionTest(unittest.TestCase): # have any autoscale groups to introspect. It's useful, however, to # catch simple errors - print '--- running %s tests ---' % self.__class__.__name__ + print('--- running %s tests ---' % self.__class__.__name__) c = AutoScaleConnection() self.assertTrue(repr(c).startswith('AutoScaleConnection')) @@ -161,6 +161,6 @@ class AutoscaleConnectionTest(unittest.TestCase): assert not found - print '--- tests completed ---' + print('--- tests completed ---') diff --git a/tests/cloudfront/test_signed_urls.py b/tests/cloudfront/test_signed_urls.py index 671ed7f3..79ac3f90 100644 --- a/tests/cloudfront/test_signed_urls.py +++ b/tests/cloudfront/test_signed_urls.py @@ -6,6 +6,7 @@ except ImportError: import json from textwrap import dedent from boto.cloudfront.distribution import Distribution +import boto.compat as compat class CloudfrontSignedUrlsTest(unittest.TestCase): def setUp(self): @@ -103,7 +104,7 @@ class CloudfrontSignedUrlsTest(unittest.TestCase): "v0pYdWJkflDKJ3xIu7lbwRpSkG98NBlgPi4ZJpRRnVX4kXAJK6td" "Nx6FucDB7OVqzcxkxHsGFd8VCG1BkC-Afh9~lOCMIYHIaiOB6~5j" "t9w2EOwi6sIIqrg_") - unicode_policy = unicode(self.canned_policy) + unicode_policy = compat.unicode(self.canned_policy) sig = self.dist._sign_string(unicode_policy, private_key_string=self.pk_str) encoded_sig = self.dist._url_base64_encode(sig) self.assertEqual(expected, encoded_sig) @@ -141,16 +142,16 @@ class CloudfrontSignedUrlsTest(unittest.TestCase): policy = self.dist._canned_policy(url, expires) policy = json.loads(policy) - self.assertEqual(1, len(policy.keys())) + self.assertEqual(1, len(list(policy.keys()))) statements = policy["Statement"] self.assertEqual(1, len(statements)) statement = statements[0] resource = statement["Resource"] self.assertEqual(url, resource) condition = statement["Condition"] - self.assertEqual(1, len(condition.keys())) + self.assertEqual(1, len(list(condition.keys()))) date_less_than = condition["DateLessThan"] - self.assertEqual(1, len(date_less_than.keys())) + self.assertEqual(1, len(list(date_less_than.keys()))) aws_epoch_time = date_less_than["AWS:EpochTime"] self.assertEqual(expires, aws_epoch_time) @@ -164,16 +165,16 @@ class CloudfrontSignedUrlsTest(unittest.TestCase): policy = self.dist._custom_policy(url, expires=expires) policy = json.loads(policy) - self.assertEqual(1, len(policy.keys())) + self.assertEqual(1, len(list(policy.keys()))) statements = policy["Statement"] self.assertEqual(1, len(statements)) statement = statements[0] resource = statement["Resource"] self.assertEqual(url, resource) condition = statement["Condition"] - self.assertEqual(1, len(condition.keys())) + self.assertEqual(1, len(list(condition.keys()))) date_less_than = condition["DateLessThan"] - self.assertEqual(1, len(date_less_than.keys())) + self.assertEqual(1, len(list(date_less_than.keys()))) aws_epoch_time = date_less_than["AWS:EpochTime"] self.assertEqual(expires, aws_epoch_time) @@ -187,16 +188,16 @@ class CloudfrontSignedUrlsTest(unittest.TestCase): policy = self.dist._custom_policy(url, valid_after=valid_after) policy = json.loads(policy) - self.assertEqual(1, len(policy.keys())) + self.assertEqual(1, len(list(policy.keys()))) statements = policy["Statement"] self.assertEqual(1, len(statements)) statement = statements[0] resource = statement["Resource"] self.assertEqual(url, resource) condition = statement["Condition"] - self.assertEqual(1, len(condition.keys())) + self.assertEqual(1, len(list(condition.keys()))) date_greater_than = condition["DateGreaterThan"] - self.assertEqual(1, len(date_greater_than.keys())) + self.assertEqual(1, len(list(date_greater_than.keys()))) aws_epoch_time = date_greater_than["AWS:EpochTime"] self.assertEqual(valid_after, aws_epoch_time) @@ -210,16 +211,16 @@ class CloudfrontSignedUrlsTest(unittest.TestCase): policy = self.dist._custom_policy(url, ip_address=ip_range) policy = json.loads(policy) - self.assertEqual(1, len(policy.keys())) + self.assertEqual(1, len(list(policy.keys()))) statements = policy["Statement"] self.assertEqual(1, len(statements)) statement = statements[0] resource = statement["Resource"] self.assertEqual(url, resource) condition = statement["Condition"] - self.assertEqual(1, len(condition.keys())) + self.assertEqual(1, len(list(condition.keys()))) ip_address = condition["IpAddress"] - self.assertEqual(1, len(ip_address.keys())) + self.assertEqual(1, len(list(ip_address.keys()))) source_ip = ip_address["AWS:SourceIp"] self.assertEqual("%s/32" % ip_range, source_ip) @@ -233,16 +234,16 @@ class CloudfrontSignedUrlsTest(unittest.TestCase): policy = self.dist._custom_policy(url, ip_address=ip_range) policy = json.loads(policy) - self.assertEqual(1, len(policy.keys())) + self.assertEqual(1, len(list(policy.keys()))) statements = policy["Statement"] self.assertEqual(1, len(statements)) statement = statements[0] resource = statement["Resource"] self.assertEqual(url, resource) condition = statement["Condition"] - self.assertEqual(1, len(condition.keys())) + self.assertEqual(1, len(list(condition.keys()))) ip_address = condition["IpAddress"] - self.assertEqual(1, len(ip_address.keys())) + self.assertEqual(1, len(list(ip_address.keys()))) source_ip = ip_address["AWS:SourceIp"] self.assertEqual(ip_range, source_ip) @@ -260,27 +261,27 @@ class CloudfrontSignedUrlsTest(unittest.TestCase): ip_address=ip_range) policy = json.loads(policy) - self.assertEqual(1, len(policy.keys())) + self.assertEqual(1, len(list(policy.keys()))) statements = policy["Statement"] self.assertEqual(1, len(statements)) statement = statements[0] resource = statement["Resource"] self.assertEqual(url, resource) condition = statement["Condition"] - self.assertEqual(3, len(condition.keys())) + self.assertEqual(3, len(list(condition.keys()))) #check expires condition date_less_than = condition["DateLessThan"] - self.assertEqual(1, len(date_less_than.keys())) + self.assertEqual(1, len(list(date_less_than.keys()))) aws_epoch_time = date_less_than["AWS:EpochTime"] self.assertEqual(expires, aws_epoch_time) #check valid_after condition date_greater_than = condition["DateGreaterThan"] - self.assertEqual(1, len(date_greater_than.keys())) + self.assertEqual(1, len(list(date_greater_than.keys()))) aws_epoch_time = date_greater_than["AWS:EpochTime"] self.assertEqual(valid_after, aws_epoch_time) #check source ip address condition ip_address = condition["IpAddress"] - self.assertEqual(1, len(ip_address.keys())) + self.assertEqual(1, len(list(ip_address.keys()))) source_ip = ip_address["AWS:SourceIp"] self.assertEqual(ip_range, source_ip) diff --git a/tests/db/test_lists.py b/tests/db/test_lists.py index d9c76392..5b70810c 100644 --- a/tests/db/test_lists.py +++ b/tests/db/test_lists.py @@ -91,7 +91,7 @@ class TestLists(object): t.put() self.objs.append(t) time.sleep(3) - print SimpleListModel.all().filter("strs !=", "Fizzle").get_query() + print(SimpleListModel.all().filter("strs !=", "Fizzle").get_query()) for tt in SimpleListModel.all().filter("strs !=", "Fizzle"): - print tt.strs + print(tt.strs) assert("Fizzle" not in tt.strs) diff --git a/tests/db/test_query.py b/tests/db/test_query.py index 047bf873..7e9ce5f7 100644 --- a/tests/db/test_query.py +++ b/tests/db/test_query.py @@ -148,5 +148,5 @@ class TestQuerying(object): """Test with a "like" expression""" query = SimpleModel.all() query.filter("strs like", "%oo%") - print query.get_query() + print(query.get_query()) assert(query.count() == 1) diff --git a/tests/db/test_sequence.py b/tests/db/test_sequence.py index 35f4b352..df1ab3a8 100644 --- a/tests/db/test_sequence.py +++ b/tests/db/test_sequence.py @@ -60,11 +60,11 @@ class TestDBHandler(object): s = Sequence() self.sequences.append(s) assert(s.val == 0) - assert(s.next() == 1) - assert(s.next() == 2) + assert(next(s) == 1) + assert(next(s) == 2) s2 = Sequence(s.id) assert(s2.val == 2) - assert(s.next() == 3) + assert(next(s) == 3) assert(s.val == 3) assert(s2.val == 3) @@ -73,7 +73,7 @@ class TestDBHandler(object): s = Sequence(fnc=increment_string) self.sequences.append(s) assert(s.val == "A") - assert(s.next() == "B") + assert(next(s) == "B") def test_fib(self): """Test the fibonacci sequence generator""" @@ -93,7 +93,7 @@ class TestDBHandler(object): assert(s.val == 1) # Just check the first few numbers in the sequence for v in [1,2,3,5,8,13,21,34,55,89,144]: - assert(s.next() == v) + assert(next(s) == v) assert(s.val == v) assert(s2.val == v) # it shouldn't matter which reference we use since it's garunteed to be consistent @@ -103,7 +103,7 @@ class TestDBHandler(object): s = Sequence(fnc=increment_string) self.sequences.append(s) assert(s.val == "A") - assert(s.next() == "B") + assert(next(s) == "B") s.val = "Z" assert(s.val == "Z") - assert(s.next() == "AA") + assert(next(s) == "AA") diff --git a/tests/devpay/test_s3.py b/tests/devpay/test_s3.py index bb91125b..c0e79a04 100644 --- a/tests/devpay/test_s3.py +++ b/tests/devpay/test_s3.py @@ -27,7 +27,7 @@ Some unit tests for the S3Connection import time import os -import urllib +import boto.compat as compat from boto.s3.connection import S3Connection from boto.exception import S3PermissionsError @@ -37,7 +37,7 @@ from boto.exception import S3PermissionsError AMAZON_USER_TOKEN = '{UserToken}...your token here...' DEVPAY_HEADERS = { 'x-amz-security-token': AMAZON_USER_TOKEN } -print '--- running S3Connection tests (DevPay) ---' +print('--- running S3Connection tests (DevPay) ---') c = S3Connection() # create a new, empty bucket bucket_name = 'test-%d' % int(time.time()) @@ -66,10 +66,10 @@ assert s1 == fp.read(), 'corrupted file' fp.close() # test generated URLs url = k.generate_url(3600, headers=DEVPAY_HEADERS) -file = urllib.urlopen(url) +file = compat.urlopen(url) assert s1 == file.read(), 'invalid URL %s' % url url = k.generate_url(3600, force_http=True, headers=DEVPAY_HEADERS) -file = urllib.urlopen(url) +file = compat.urlopen(url) assert s1 == file.read(), 'invalid URL %s' % url bucket.delete_key(k, headers=DEVPAY_HEADERS) # test a few variations on get_all_keys - first load some data @@ -174,4 +174,4 @@ for k in all: c.delete_bucket(bucket, headers=DEVPAY_HEADERS) -print '--- tests completed ---' +print('--- tests completed ---') diff --git a/tests/dynamodb/test_layer1.py b/tests/dynamodb/test_layer1.py index 00845987..97173ced 100644 --- a/tests/dynamodb/test_layer1.py +++ b/tests/dynamodb/test_layer1.py @@ -35,7 +35,7 @@ json_doc = """{"access_key": "ASIAIV7R2NUUJ6SB7GKQ", "secret_key": "eIfijGxJlejH class DynamoDBLayer1Test (unittest.TestCase): def test_layer1_basic(self): - print '--- running DynamoDB Layer1 tests ---' + print('--- running DynamoDB Layer1 tests ---') # Create a Layer1 connection with an expired set of # credentials to test the automatic renewal of tokens @@ -205,5 +205,5 @@ class DynamoDBLayer1Test (unittest.TestCase): result = c.delete_table(table_name) assert result['TableDescription']['TableStatus'] == 'DELETING' - print '--- tests completed ---' + print('--- tests completed ---') diff --git a/tests/dynamodb/test_layer2.py b/tests/dynamodb/test_layer2.py index dde78f1f..bf897434 100644 --- a/tests/dynamodb/test_layer2.py +++ b/tests/dynamodb/test_layer2.py @@ -31,11 +31,12 @@ from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError, DynamoDBItemError from boto.dynamodb.layer2 import Layer2 from boto.dynamodb.types import get_dynamodb_type from boto.dynamodb.condition import * +import boto.compat as compat class DynamoDBLayer2Test (unittest.TestCase): def test_layer2_basic(self): - print '--- running Amazon DynamoDB Layer2 tests ---' + print('--- running Amazon DynamoDB Layer2 tests ---') c = Layer2() # First create a schema for the table @@ -146,7 +147,8 @@ class DynamoDBLayer2Test (unittest.TestCase): assert item1_copy.range_key == item1.range_key for attr_name in item1_copy: val = item1_copy[attr_name] - if isinstance(val, (int, long, float, basestring)): + if isinstance(val, (compat.integer_types, float, + compat.string_types)): assert val == item1[attr_name] # Try retrieving only select attributes @@ -349,4 +351,4 @@ class DynamoDBLayer2Test (unittest.TestCase): assert table.status == 'DELETING' assert table2.status == 'DELETING' - print '--- tests completed ---' + print('--- tests completed ---') diff --git a/tests/ec2/cloudwatch/test_connection.py b/tests/ec2/cloudwatch/test_connection.py index 0479d650..75d6a6ce 100644 --- a/tests/ec2/cloudwatch/test_connection.py +++ b/tests/ec2/cloudwatch/test_connection.py @@ -247,10 +247,10 @@ class CloudWatchConnectionTest(unittest.TestCase): alarms = c.describe_alarms() self.assertEquals(alarms[0].name, 'FancyAlarm') self.assertEquals(alarms[0].comparison, '<') - self.assertEquals(alarms[0].dimensions, {u'Job': [u'ANiceCronJob']}) + self.assertEquals(alarms[0].dimensions, {'Job': ['ANiceCronJob']}) self.assertEquals(alarms[1].name, 'SuperFancyAlarm') self.assertEquals(alarms[1].comparison, '>') - self.assertEquals(alarms[1].dimensions, {u'Job': [u'ABadCronJob']}) + self.assertEquals(alarms[1].dimensions, {'Job': ['ABadCronJob']}) if __name__ == '__main__': unittest.main() diff --git a/tests/ec2/test_connection.py b/tests/ec2/test_connection.py index bb1c5c89..4d6bdf5d 100644 --- a/tests/ec2/test_connection.py +++ b/tests/ec2/test_connection.py @@ -37,7 +37,7 @@ class EC2ConnectionTest (unittest.TestCase): # this is my user_id, if you want to run these tests you should # replace this with yours or they won't work user_id = '963068290131' - print '--- running EC2Connection tests ---' + print('--- running EC2Connection tests ---') c = EC2Connection() # get list of private AMI's rs = c.get_all_images(owners=[user_id]) @@ -110,7 +110,7 @@ class EC2ConnectionTest (unittest.TestCase): reservation = image.run(security_groups=[group.name]) instance = reservation.instances[0] while instance.state != 'running': - print '\tinstance is %s' % instance.state + print('\tinstance is %s' % instance.state) time.sleep(30) instance.update() # instance in now running, try to telnet to port 80 @@ -167,4 +167,4 @@ class EC2ConnectionTest (unittest.TestCase): assert len(l[0].product_codes) == 1 assert l[0].product_codes[0] == demo_paid_ami_product_code - print '--- tests completed ---' + print('--- tests completed ---') diff --git a/tests/emr/test_emr_responses.py b/tests/emr/test_emr_responses.py index 77ec494b..cf6284e4 100644 --- a/tests/emr/test_emr_responses.py +++ b/tests/emr/test_emr_responses.py @@ -329,7 +329,7 @@ class TestEMRResponses(unittest.TestCase): return rs def _assert_fields(self, response, **fields): - for field, expected in fields.items(): + for field, expected in list(fields.items()): actual = getattr(response, field) self.assertEquals(expected, actual, "Field %s: %r != %r" % (field, expected, actual)) diff --git a/tests/mturk/cleanup_tests.py b/tests/mturk/cleanup_tests.py index cee8c28c..787706f2 100644 --- a/tests/mturk/cleanup_tests.py +++ b/tests/mturk/cleanup_tests.py @@ -24,22 +24,22 @@ def cleanup(): is_boto = description_filter('Boto') - print 'getting hits...' + print('getting hits...') all_hits = list(conn.get_all_hits()) is_reviewable = lambda hit: hit.HITStatus == 'Reviewable' is_not_reviewable = lambda hit: not is_reviewable(hit) - hits_to_process = filter(is_boto, all_hits) - hits_to_disable = filter(is_not_reviewable, hits_to_process) - hits_to_dispose = filter(is_reviewable, hits_to_process) - print 'disabling/disposing %d/%d hits' % (len(hits_to_disable), len(hits_to_dispose)) - map(disable_hit, hits_to_disable) - map(dispose_hit, hits_to_dispose) + hits_to_process = list(filter(is_boto, all_hits)) + hits_to_disable = list(filter(is_not_reviewable, hits_to_process)) + hits_to_dispose = list(filter(is_reviewable, hits_to_process)) + print('disabling/disposing %d/%d hits' % (len(hits_to_disable), len(hits_to_dispose))) + list(map(disable_hit, hits_to_disable)) + list(map(dispose_hit, hits_to_dispose)) total_hits = len(all_hits) hits_processed = len(hits_to_process) skipped = total_hits - hits_processed fmt = 'Processed: %(total_hits)d HITs, disabled/disposed: %(hits_processed)d, skipped: %(skipped)d' - print fmt % vars() + print(fmt % vars()) if __name__ == '__main__': cleanup() diff --git a/tests/mturk/create_hit_with_qualifications.py b/tests/mturk/create_hit_with_qualifications.py index 9ef2bc5c..f8c67ec0 100644 --- a/tests/mturk/create_hit_with_qualifications.py +++ b/tests/mturk/create_hit_with_qualifications.py @@ -10,7 +10,7 @@ def test(): qualifications.add(PercentAssignmentsApprovedRequirement(comparator="GreaterThan", integer_value="95")) create_hit_rs = conn.create_hit(question=q, lifetime=60*65,max_assignments=2,title="Boto External Question Test", keywords=keywords,reward = 0.05, duration=60*6,approval_delay=60*60, annotation='An annotation from boto external question test', qualifications=qualifications) assert(create_hit_rs.status == True) - print create_hit_rs.HITTypeId + print(create_hit_rs.HITTypeId) if __name__ == "__main__": test() diff --git a/tests/mturk/selenium_support.py b/tests/mturk/selenium_support.py index 1ed760cb..7e51e434 100644 --- a/tests/mturk/selenium_support.py +++ b/tests/mturk/selenium_support.py @@ -1,4 +1,4 @@ -from __future__ import absolute_import + from boto.mturk.test.support import unittest sel_args = ('localhost', 4444, '*chrome', 'https://workersandbox.mturk.com') @@ -6,7 +6,7 @@ sel_args = ('localhost', 4444, '*chrome', 'https://workersandbox.mturk.com') class SeleniumFailed(object): def __init__(self, message): self.message = message - def __nonzero__(self): + def __bool__(self): return False def has_selenium(): diff --git a/tests/s3/mock_storage_service.py b/tests/s3/mock_storage_service.py index 2bd77439..ce267fc5 100644 --- a/tests/s3/mock_storage_service.py +++ b/tests/s3/mock_storage_service.py @@ -235,7 +235,7 @@ class MockBucket(object): del self.keys[key_name] def get_all_keys(self, headers=NOT_IMPL): - return self.keys.itervalues() + return iter(self.keys.values()) def get_key(self, key_name, headers=NOT_IMPL, version_id=NOT_IMPL): # Emulate behavior of boto when get_key called with non-existent key. @@ -251,7 +251,7 @@ class MockBucket(object): # deletions while iterating (e.g., during test cleanup). result = [] key_name_set = set() - for k in self.keys.itervalues(): + for k in self.keys.values(): if k.name.startswith(prefix): k_name_past_prefix = k.name[len(prefix):] if delimiter: @@ -328,7 +328,7 @@ class MockConnection(object): return self.buckets[bucket_name] def get_all_buckets(self, headers=NOT_IMPL): - return self.buckets.itervalues() + return iter(self.buckets.values()) # We only mock a single provider/connection. diff --git a/tests/s3/test_bucket.py b/tests/s3/test_bucket.py index 58045efa..c9d386d2 100644 --- a/tests/s3/test_bucket.py +++ b/tests/s3/test_bucket.py @@ -87,7 +87,7 @@ class S3BucketTest (unittest.TestCase): # grant log write perms to target bucket using canned-acl self.bucket.set_acl("log-delivery-write") target_bucket = self.bucket_name - target_prefix = u"jp/ログ/" + target_prefix = "jp/ログ/" # Check existing status is disabled bls = sb.get_logging_status() self.assertEqual(bls.target, None) diff --git a/tests/s3/test_connection.py b/tests/s3/test_connection.py index e9d372e5..020e174e 100644 --- a/tests/s3/test_connection.py +++ b/tests/s3/test_connection.py @@ -27,15 +27,15 @@ Some unit tests for the S3Connection import unittest import time import os -import urllib from boto.s3.connection import S3Connection from boto.s3.bucket import Bucket from boto.exception import S3PermissionsError, S3ResponseError +import boto.compat as compat class S3ConnectionTest (unittest.TestCase): def test_1_basic(self): - print '--- running S3Connection tests ---' + print('--- running S3Connection tests ---') c = S3Connection() # create a new, empty bucket bucket_name = 'test-%d' % int(time.time()) @@ -63,13 +63,13 @@ class S3ConnectionTest (unittest.TestCase): fp.close() # test generated URLs url = k.generate_url(3600) - file = urllib.urlopen(url) + file = compat.urlopen(url) assert s1 == file.read(), 'invalid URL %s' % url url = k.generate_url(3600, force_http=True) - file = urllib.urlopen(url) + file = compat.urlopen(url) assert s1 == file.read(), 'invalid URL %s' % url url = k.generate_url(3600, force_http=True, headers={'x-amz-x-token' : 'XYZ'}) - file = urllib.urlopen(url) + file = compat.urlopen(url) rh = {'response-content-disposition': 'attachment; filename="foo.txt"'} url = k.generate_url(60, response_headers=rh) assert s1 == file.read(), 'invalid URL %s' % url @@ -119,7 +119,7 @@ class S3ConnectionTest (unittest.TestCase): mdval2 = 'This is the second metadata value' k.set_metadata(mdkey2, mdval2) # try a unicode metadata value - mdval3 = u'föö' + mdval3 = 'föö' mdkey3 = 'meta3' k.set_metadata(mdkey3, mdval3) k.set_contents_from_string(s1) @@ -189,7 +189,7 @@ class S3ConnectionTest (unittest.TestCase): # now delete bucket time.sleep(5) c.delete_bucket(bucket) - print '--- tests completed ---' + print('--- tests completed ---') def test_basic_anon(self): auth_con = S3Connection() @@ -201,7 +201,7 @@ class S3ConnectionTest (unittest.TestCase): anon_con = S3Connection(anon=True) anon_bucket = Bucket(anon_con, bucket_name) try: - iter(anon_bucket.list()).next() + next(iter(anon_bucket.list())) self.fail("anon bucket list should fail") except S3ResponseError: pass @@ -209,7 +209,7 @@ class S3ConnectionTest (unittest.TestCase): # give bucket anon user access and anon read again auth_bucket.set_acl('public-read') try: - iter(anon_bucket.list()).next() + next(iter(anon_bucket.list())) self.fail("not expecting contents") except S3ResponseError: self.fail("we should have public-read access.") diff --git a/tests/s3/test_encryption.py b/tests/s3/test_encryption.py index 91ef71c0..d271d8d2 100644 --- a/tests/s3/test_encryption.py +++ b/tests/s3/test_encryption.py @@ -53,7 +53,7 @@ json_policy = """{ class S3EncryptionTest (unittest.TestCase): def test_1_versions(self): - print '--- running S3Encryption tests ---' + print('--- running S3Encryption tests ---') c = S3Connection() # create a new, empty bucket bucket_name = 'encryption-%d' % int(time.time()) @@ -111,4 +111,4 @@ class S3EncryptionTest (unittest.TestCase): # now delete bucket bucket.delete() - print '--- tests completed ---' + print('--- tests completed ---') diff --git a/tests/s3/test_gsconnection.py b/tests/s3/test_gsconnection.py index d4b30723..7d53b555 100644 --- a/tests/s3/test_gsconnection.py +++ b/tests/s3/test_gsconnection.py @@ -41,7 +41,7 @@ class GSConnectionTest (unittest.TestCase): def test_1_basic(self): """basic regression test for Google Cloud Storage""" - print '--- running GSConnection tests ---' + print('--- running GSConnection tests ---') c = GSConnection() # create a new, empty bucket bucket_name = 'test-%d' % int(time.time()) @@ -122,7 +122,7 @@ class GSConnectionTest (unittest.TestCase): k.set_metadata(mdkey2, mdval2) # try a unicode metadata value - mdval3 = u'föö' + mdval3 = 'föö' mdkey3 = 'meta3' k.set_metadata(mdkey3, mdval3) k.set_contents_from_string(s1) @@ -340,4 +340,4 @@ class GSConnectionTest (unittest.TestCase): # delete bucket uri.delete_bucket() - print '--- tests completed ---' + print('--- tests completed ---') diff --git a/tests/s3/test_key.py b/tests/s3/test_key.py index 2e823182..7c7e1008 100644 --- a/tests/s3/test_key.py +++ b/tests/s3/test_key.py @@ -28,10 +28,10 @@ Some unit tests for S3 Key import unittest import time -import StringIO from boto.s3.connection import S3Connection from boto.s3.key import Key from boto.exception import S3ResponseError +import boto.compat as compat class S3KeyTest (unittest.TestCase): @@ -48,7 +48,7 @@ class S3KeyTest (unittest.TestCase): def test_set_contents_from_file_dataloss(self): # Create an empty stringio and write to it. content = "abcde" - sfp = StringIO.StringIO() + sfp = compat.StringIO() sfp.write(content) # Try set_contents_from_file() without rewinding sfp k = self.bucket.new_key("k") @@ -66,7 +66,7 @@ class S3KeyTest (unittest.TestCase): self.assertEqual(ks, content) # finally, try with a 0 length string - sfp = StringIO.StringIO() + sfp = compat.StringIO() k = self.bucket.new_key("k") k.set_contents_from_file(sfp) self.assertEqual(k.size, 0) @@ -77,7 +77,7 @@ class S3KeyTest (unittest.TestCase): def test_set_contents_as_file(self): content="01234567890123456789" - sfp = StringIO.StringIO(content) + sfp = compat.StringIO(content) # fp is set at 0 for just opened (for read) files. # set_contents should write full content to key. @@ -111,7 +111,7 @@ class S3KeyTest (unittest.TestCase): def test_set_contents_with_md5(self): content="01234567890123456789" - sfp = StringIO.StringIO(content) + sfp = compat.StringIO(content) # fp is set at 0 for just opened (for read) files. # set_contents should write full content to key. @@ -146,7 +146,7 @@ class S3KeyTest (unittest.TestCase): def test_get_contents_with_md5(self): content="01234567890123456789" - sfp = StringIO.StringIO(content) + sfp = compat.StringIO(content) k = self.bucket.new_key("k") k.set_contents_from_file(sfp) @@ -166,7 +166,7 @@ class S3KeyTest (unittest.TestCase): self.my_cb_last = None k = self.bucket.new_key("k") k.BufferSize = 2 - sfp = StringIO.StringIO("") + sfp = compat.StringIO("") k.set_contents_from_file(sfp, cb=callback, num_cb=10) self.assertEqual(self.my_cb_cnt, 1) self.assertEqual(self.my_cb_last, 0) @@ -180,7 +180,7 @@ class S3KeyTest (unittest.TestCase): self.assertEqual(self.my_cb_last, 0) content="01234567890123456789" - sfp = StringIO.StringIO(content) + sfp = compat.StringIO(content) # expect 2 calls due start/finish self.my_cb_cnt = 0 diff --git a/tests/s3/test_mfa.py b/tests/s3/test_mfa.py index 3f47e94c..d8d2594f 100644 --- a/tests/s3/test_mfa.py +++ b/tests/s3/test_mfa.py @@ -30,6 +30,7 @@ import time from boto.s3.connection import S3Connection from boto.exception import S3ResponseError from boto.s3.deletemarker import DeleteMarker +import boto.compat as compat class S3MFATest (unittest.TestCase): @@ -45,8 +46,8 @@ class S3MFATest (unittest.TestCase): def test_mfadel(self): # Enable Versioning with MfaDelete - mfa_sn = raw_input('MFA S/N: ') - mfa_code = raw_input('MFA Code: ') + mfa_sn = compat.raw_input('MFA S/N: ') + mfa_code = compat.raw_input('MFA Code: ') self.bucket.configure_versioning(True, mfa_delete=True, mfa_token=(mfa_sn, mfa_code)) # Check enabling mfa worked. @@ -73,11 +74,11 @@ class S3MFATest (unittest.TestCase): pass # Now try delete again with the MFA token - mfa_code = raw_input('MFA Code: ') + mfa_code = compat.raw_input('MFA Code: ') self.bucket.delete_key('foobar', version_id=v1, mfa_token=(mfa_sn, mfa_code)) # Next suspend versioning and disable MfaDelete on the bucket - mfa_code = raw_input('MFA Code: ') + mfa_code = compat.raw_input('MFA Code: ') self.bucket.configure_versioning(False, mfa_delete=False, mfa_token=(mfa_sn, mfa_code)) # Lastly, check disabling mfa worked. diff --git a/tests/s3/test_multidelete.py b/tests/s3/test_multidelete.py index f5f922c1..c8472bec 100644 --- a/tests/s3/test_multidelete.py +++ b/tests/s3/test_multidelete.py @@ -85,7 +85,7 @@ class S3MultiDeleteTest (unittest.TestCase): self.assertEqual(len(result.errors), 1) def test_delete_kanji(self): - result = self.bucket.delete_keys([u"漢字", Key(name=u"日本語")]) + result = self.bucket.delete_keys(["漢字", Key(name="日本語")]) self.assertEqual(len(result.deleted), 2) self.assertEqual(len(result.errors), 0) @@ -95,7 +95,7 @@ class S3MultiDeleteTest (unittest.TestCase): self.assertEqual(len(result.errors), 0) def test_delete_kanji_by_list(self): - for key_name in [u"漢字", u"日本語", u"テスト"]: + for key_name in ["漢字", "日本語", "テスト"]: key = self.bucket.new_key(key_name) key.set_contents_from_string('this is a test') result = self.bucket.delete_keys(self.bucket.list()) diff --git a/tests/s3/test_multipart.py b/tests/s3/test_multipart.py index 8e93a6d8..6e6683f4 100644 --- a/tests/s3/test_multipart.py +++ b/tests/s3/test_multipart.py @@ -34,8 +34,8 @@ Some unit tests for the S3 MultiPartUpload import unittest import time -import StringIO from boto.s3.connection import S3Connection +import boto.compat as compat class S3MultiPartUploadTest (unittest.TestCase): @@ -50,14 +50,14 @@ class S3MultiPartUploadTest (unittest.TestCase): self.bucket.delete() def test_abort(self): - key_name = u"テスト" + key_name = "テスト" mpu = self.bucket.initiate_multipart_upload(key_name) mpu.cancel_upload() def test_complete_ascii(self): key_name = "test" mpu = self.bucket.initiate_multipart_upload(key_name) - fp = StringIO.StringIO("small file") + fp = compat.StringIO("small file") mpu.upload_part_from_file(fp, part_num=1) fp.close() cmpu = mpu.complete_upload() @@ -65,9 +65,9 @@ class S3MultiPartUploadTest (unittest.TestCase): self.assertNotEqual(cmpu.etag, None) def test_complete_japanese(self): - key_name = u"テスト" + key_name = "テスト" mpu = self.bucket.initiate_multipart_upload(key_name) - fp = StringIO.StringIO("small file") + fp = compat.StringIO("small file") mpu.upload_part_from_file(fp, part_num=1) fp.close() cmpu = mpu.complete_upload() @@ -81,18 +81,18 @@ class S3MultiPartUploadTest (unittest.TestCase): self.assertNotEqual(cmpu.etag, None) def test_list_japanese(self): - key_name = u"テスト" + key_name = "テスト" mpu = self.bucket.initiate_multipart_upload(key_name) rs = self.bucket.list_multipart_uploads() # New bucket, so only one upload expected - lmpu = iter(rs).next() + lmpu = next(iter(rs)) self.assertEqual(lmpu.id, mpu.id) self.assertEqual(lmpu.key_name, key_name) # Abort using the one returned in the list lmpu.cancel_upload() def test_list_multipart_uploads(self): - key_name = u"テスト" + key_name = "テスト" mpus = [] mpus.append(self.bucket.initiate_multipart_upload(key_name)) mpus.append(self.bucket.initiate_multipart_upload(key_name)) @@ -107,7 +107,7 @@ class S3MultiPartUploadTest (unittest.TestCase): def test_four_part_file(self): key_name = "k" contents = "01234567890123456789" - sfp = StringIO.StringIO(contents) + sfp = compat.StringIO(contents) # upload 20 bytes in 4 parts of 5 bytes each mpu = self.bucket.initiate_multipart_upload(key_name) diff --git a/tests/s3/test_pool.py b/tests/s3/test_pool.py index ebb68c85..2e0a77fe 100644 --- a/tests/s3/test_pool.py +++ b/tests/s3/test_pool.py @@ -28,8 +28,6 @@ import boto import time import uuid -from StringIO import StringIO - from threading import Thread def spawn(function, *args, **kwargs): @@ -57,7 +55,7 @@ def test_close_connections(): dependencies are added to the test suite. """ - print "Running test_close_connections" + print("Running test_close_connections") # Connect to S3 s3 = boto.connect_s3() @@ -117,9 +115,9 @@ def read_big_object(s3, bucket, name, count): out = WriteAndCount() key.get_contents_to_file(out) if out.size != BIG_SIZE: - print out.size, BIG_SIZE + print(out.size, BIG_SIZE) assert out.size == BIG_SIZE - print " pool size:", s3._pool.size() + print(" pool size:", s3._pool.size()) class LittleQuerier(object): @@ -147,7 +145,7 @@ class LittleQuerier(object): rh = { 'response-content-type' : 'small/' + str(i) } actual = key.get_contents_as_string(response_headers = rh) if expected != actual: - print "AHA:", repr(expected), repr(actual) + print("AHA:", repr(expected), repr(actual)) assert expected == actual count += 1 @@ -193,7 +191,7 @@ def test_reuse_connections(): you can see that it's happening. """ - print "Running test_reuse_connections" + print("Running test_reuse_connections") # Connect to S3 s3 = boto.connect_s3() @@ -207,11 +205,11 @@ def test_reuse_connections(): bucket.new_key(name).set_contents_from_string(str(i)) # Wait, clean the connection pool, and make sure it's empty. - print " waiting for all connections to become stale" + print(" waiting for all connections to become stale") time.sleep(s3._pool.STALE_DURATION + 1) s3._pool.clean() assert s3._pool.size() == 0 - print " pool is empty" + print(" pool is empty") # Create a big object in S3. big_name = str(uuid.uuid4()) diff --git a/tests/s3/test_resumable_downloads.py b/tests/s3/test_resumable_downloads.py index c507fe08..9c1da501 100755..100644 --- a/tests/s3/test_resumable_downloads.py +++ b/tests/s3/test_resumable_downloads.py @@ -32,7 +32,6 @@ import random import re import shutil import socket -import StringIO import sys import tempfile import time @@ -45,6 +44,7 @@ from boto.s3.resumable_download_handler import ResumableDownloadHandler from boto.exception import ResumableTransferDisposition from boto.exception import ResumableDownloadException from boto.exception import StorageResponseError +import boto.compat as compat from .cb_test_harnass import CallbackTestHarnass # We don't use the OAuth2 authentication plugin directly; importing it here @@ -108,7 +108,7 @@ class ResumableDownloadTests(unittest.TestCase): string_data = ''.join(buf) uri = cls.src_bucket_uri.clone_replace_name(obj_name) key = uri.new_key(validate=False) - key.set_contents_from_file(StringIO.StringIO(string_data)) + key.set_contents_from_file(compat.StringIO(string_data)) # Set debug on key's connection after creating data, so only the test # runs will show HTTP output (if called passed debug>0). key.bucket.connection.debug = debug @@ -177,8 +177,8 @@ class ResumableDownloadTests(unittest.TestCase): cls.src_bucket_uri.delete_bucket() break except StorageResponseError: - print 'Test bucket (%s) not yet deleted, still trying' % ( - cls.src_bucket_uri.uri) + print('Test bucket (%s) not yet deleted, still trying' % ( + cls.src_bucket_uri.uri)) time.sleep(2) shutil.rmtree(cls.tmp_dir) cls.tmp_dir = tempfile.mkdtemp(prefix=cls.tmpdir_prefix) @@ -465,11 +465,11 @@ if __name__ == '__main__': # don't assume the user has Python 2.7 (which supports classmethods # that do it, with camelCase versions of these names). try: - print 'Setting up %s...' % test_class.get_suite_description() + print('Setting up %s...' % test_class.get_suite_description()) test_class.set_up_class(debug) - print 'Running %s...' % test_class.get_suite_description() + print('Running %s...' % test_class.get_suite_description()) unittest.TextTestRunner(verbosity=2).run(suite) finally: - print 'Cleaning up after %s...' % test_class.get_suite_description() + print('Cleaning up after %s...' % test_class.get_suite_description()) test_class.tear_down_class() - print '' + print('') diff --git a/tests/s3/test_resumable_uploads.py b/tests/s3/test_resumable_uploads.py index e9777aba..7cd9c879 100755..100644 --- a/tests/s3/test_resumable_uploads.py +++ b/tests/s3/test_resumable_uploads.py @@ -32,7 +32,6 @@ import random import re import shutil import socket -import StringIO import sys import tempfile import time @@ -45,6 +44,7 @@ from boto.exception import InvalidUriError from boto.exception import ResumableTransferDisposition from boto.exception import ResumableUploadException from boto.exception import StorageResponseError +import boto.compat as compat from .cb_test_harnass import CallbackTestHarnass # We don't use the OAuth2 authentication plugin directly; importing it here @@ -100,7 +100,7 @@ class ResumableUploadTests(unittest.TestCase): for i in range(size): buf.append(str(random.randint(0, 9))) file_as_string = ''.join(buf) - return (file_as_string, StringIO.StringIO(file_as_string)) + return (file_as_string, compat.StringIO(file_as_string)) @classmethod def get_dst_bucket_uri(cls, debug): @@ -192,8 +192,8 @@ class ResumableUploadTests(unittest.TestCase): cls.dst_bucket_uri.delete_bucket() break except StorageResponseError: - print 'Test bucket (%s) not yet deleted, still trying' % ( - cls.dst_bucket_uri.uri) + print('Test bucket (%s) not yet deleted, still trying' % ( + cls.dst_bucket_uri.uri)) time.sleep(2) shutil.rmtree(cls.tmp_dir) cls.tmp_dir = tempfile.mkdtemp(prefix=cls.tmpdir_prefix) @@ -613,11 +613,11 @@ if __name__ == '__main__': # don't assume the user has Python 2.7 (which supports classmethods # that do it, with camelCase versions of these names). try: - print 'Setting up %s...' % test_class.get_suite_description() + print('Setting up %s...' % test_class.get_suite_description()) test_class.set_up_class(debug) - print 'Running %s...' % test_class.get_suite_description() + print('Running %s...' % test_class.get_suite_description()) unittest.TextTestRunner(verbosity=2).run(suite) finally: - print 'Cleaning up after %s...' % test_class.get_suite_description() + print('Cleaning up after %s...' % test_class.get_suite_description()) test_class.tear_down_class() - print '' + print('') diff --git a/tests/s3/test_versioning.py b/tests/s3/test_versioning.py index b3e74a5c..8fb63c04 100644 --- a/tests/s3/test_versioning.py +++ b/tests/s3/test_versioning.py @@ -124,7 +124,7 @@ class S3VersionTest (unittest.TestCase): kv1.set_contents_from_string("v1") # read list which should contain latest v1 - listed_kv1 = iter(self.bucket.get_all_versions()).next() + listed_kv1 = next(iter(self.bucket.get_all_versions())) self.assertEqual(listed_kv1.name, key_name) self.assertEqual(listed_kv1.version_id, kv1.version_id) self.assertEqual(listed_kv1.is_latest, True) @@ -135,8 +135,8 @@ class S3VersionTest (unittest.TestCase): # read 2 versions, confirm v2 is latest i = iter(self.bucket.get_all_versions()) - listed_kv2 = i.next() - listed_kv1 = i.next() + listed_kv2 = next(i) + listed_kv1 = next(i) self.assertEqual(listed_kv2.version_id, kv2.version_id) self.assertEqual(listed_kv1.version_id, kv1.version_id) self.assertEqual(listed_kv2.is_latest, True) @@ -145,9 +145,9 @@ class S3VersionTest (unittest.TestCase): # delete key, which creates a delete marker as latest self.bucket.delete_key(key_name) i = iter(self.bucket.get_all_versions()) - listed_kv3 = i.next() - listed_kv2 = i.next() - listed_kv1 = i.next() + listed_kv3 = next(i) + listed_kv2 = next(i) + listed_kv1 = next(i) self.assertNotEqual(listed_kv3.version_id, None) self.assertEqual(listed_kv2.version_id, kv2.version_id) self.assertEqual(listed_kv1.version_id, kv1.version_id) diff --git a/tests/sdb/test_connection.py b/tests/sdb/test_connection.py index a834a9df..76ea773a 100644 --- a/tests/sdb/test_connection.py +++ b/tests/sdb/test_connection.py @@ -33,7 +33,7 @@ from boto.exception import SDBResponseError class SDBConnectionTest (unittest.TestCase): def test_1_basic(self): - print '--- running SDBConnection tests ---' + print('--- running SDBConnection tests ---') c = SDBConnection() rs = c.get_all_domains() num_domains = len(rs) @@ -61,7 +61,7 @@ class SDBConnectionTest (unittest.TestCase): # try to get the attributes and see if they match item = domain.get_attributes(item_1, consistent_read=True) - assert len(item.keys()) == len(attrs_1.keys()) + assert len(list(item.keys())) == len(list(attrs_1.keys())) assert item['name1'] == attrs_1['name1'] assert item['name2'] == attrs_1['name2'] @@ -114,5 +114,5 @@ class SDBConnectionTest (unittest.TestCase): stat = c.delete_domain(domain) assert stat - print '--- tests completed ---' + print('--- tests completed ---') diff --git a/tests/sqs/test_connection.py b/tests/sqs/test_connection.py index 83c00734..e1f31d79 100644 --- a/tests/sqs/test_connection.py +++ b/tests/sqs/test_connection.py @@ -34,7 +34,7 @@ from boto.exception import SQSError class SQSConnectionTest (unittest.TestCase): def test_1_basic(self): - print '--- running SQSConnection tests ---' + print('--- running SQSConnection tests ---') c = SQSConnection() rs = c.get_all_queues() num_queues = 0 @@ -132,5 +132,5 @@ class SQSConnectionTest (unittest.TestCase): # now delete that queue and messages c.delete_queue(queue, True) - print '--- tests completed ---' + print('--- tests completed ---') diff --git a/tests/sts/test_session_token.py b/tests/sts/test_session_token.py index 630ebf1c..8628f4e4 100644 --- a/tests/sts/test_session_token.py +++ b/tests/sts/test_session_token.py @@ -34,7 +34,7 @@ from boto.s3.connection import S3Connection class SessionTokenTest (unittest.TestCase): def test_session_token(self): - print '--- running Session Token tests ---' + print('--- running Session Token tests ---') c = STSConnection() # Create a session token @@ -61,5 +61,5 @@ class SessionTokenTest (unittest.TestCase): security_token=token.session_token) buckets = s3.get_all_buckets() - print '--- tests completed ---' + print('--- tests completed ---') diff --git a/tests/test.py b/tests/test.py index bb5ef6e3..619c7bf6 100755..100644 --- a/tests/test.py +++ b/tests/test.py @@ -29,31 +29,31 @@ import sys import unittest import getopt -from .sqs.test_connection import SQSConnectionTest -from .s3.test_connection import S3ConnectionTest -from .s3.test_versioning import S3VersionTest -from .s3.test_mfa import S3MFATest -from .s3.test_encryption import S3EncryptionTest -from .s3.test_bucket import S3BucketTest -from .s3.test_key import S3KeyTest -from .s3.test_multidelete import S3MultiDeleteTest -from .s3.test_multipart import S3MultiPartUploadTest -from .s3.test_gsconnection import GSConnectionTest -from .s3.test_https_cert_validation import CertValidationTest -from .ec2.test_connection import EC2ConnectionTest -from .ec2.elb.test_connection import ELBConnectionTest -from .ec2.cloudwatch.test_connection import CloudWatchConnectionTest -from .autoscale.test_connection import AutoscaleConnectionTest -from .sdb.test_connection import SDBConnectionTest -from .cloudfront.test_signed_urls import CloudfrontSignedUrlsTest -from .dynamodb.test_layer1 import DynamoDBLayer1Test -from .dynamodb.test_layer2 import DynamoDBLayer2Test -from .sts.test_session_token import SessionTokenTest +from sqs.test_connection import SQSConnectionTest +from s3.test_connection import S3ConnectionTest +from s3.test_versioning import S3VersionTest +from s3.test_mfa import S3MFATest +from s3.test_encryption import S3EncryptionTest +from s3.test_bucket import S3BucketTest +from s3.test_key import S3KeyTest +from s3.test_multidelete import S3MultiDeleteTest +from s3.test_multipart import S3MultiPartUploadTest +from s3.test_gsconnection import GSConnectionTest +from s3.test_https_cert_validation import CertValidationTest +from ec2.test_connection import EC2ConnectionTest +from ec2.elb.test_connection import ELBConnectionTest +from ec2.cloudwatch.test_connection import CloudWatchConnectionTest +from autoscale.test_connection import AutoscaleConnectionTest +from sdb.test_connection import SDBConnectionTest +from cloudfront.test_signed_urls import CloudfrontSignedUrlsTest +from dynamodb.test_layer1 import DynamoDBLayer1Test +from dynamodb.test_layer2 import DynamoDBLayer2Test +from sts.test_session_token import SessionTokenTest def usage(): - print "test.py [-t testsuite] [-v verbosity]" - print " -t run specific testsuite (s3|ssl|s3mfa|gs|sqs|ec2|sdb|dynamodb|dynamodbL1|dynamodbL2|sts|all)" - print " -v verbosity (0|1|2)" + print("test.py [-t testsuite] [-v verbosity]") + print(" -t run specific testsuite (s3|ssl|s3mfa|gs|sqs|ec2|sdb|dynamodb|dynamodbL1|dynamodbL2|sts|all)") + print(" -v verbosity (0|1|2)") def main(): try: |