summaryrefslogtreecommitdiff
path: root/boto
diff options
context:
space:
mode:
Diffstat (limited to 'boto')
-rw-r--r--boto/__init__.py24
-rw-r--r--boto/auth.py8
-rw-r--r--boto/beanstalk/exception.py15
-rw-r--r--boto/beanstalk/layer1.py2
-rw-r--r--boto/cloudformation/connection.py2
-rw-r--r--boto/cloudfront/distribution.py4
-rw-r--r--boto/cloudfront/invalidation.py4
-rw-r--r--boto/cloudsearch/document.py8
-rw-r--r--boto/cloudsearch/domain.py16
-rw-r--r--boto/cloudsearch/layer1.py2
-rw-r--r--boto/cloudsearch/search.py7
-rw-r--r--boto/cloudsearch/sourceattribute.py1
-rw-r--r--boto/cloudsearch2/document.py26
-rw-r--r--boto/cloudsearch2/layer1.py9
-rw-r--r--boto/cloudsearch2/layer2.py3
-rw-r--r--boto/cloudsearch2/optionstatus.py2
-rw-r--r--boto/cloudsearch2/search.py15
-rw-r--r--boto/cloudtrail/layer1.py6
-rw-r--r--boto/compat.py16
-rw-r--r--boto/connection.py19
-rw-r--r--boto/datapipeline/layer1.py2
-rw-r--r--boto/directconnect/layer1.py8
-rw-r--r--boto/dynamodb/batch.py2
-rw-r--r--boto/dynamodb/item.py2
-rw-r--r--boto/dynamodb/layer1.py6
-rw-r--r--boto/dynamodb/layer2.py4
-rw-r--r--boto/dynamodb/table.py6
-rw-r--r--boto/dynamodb/types.py93
-rw-r--r--boto/dynamodb2/items.py6
-rw-r--r--boto/dynamodb2/layer1.py4
-rw-r--r--boto/dynamodb2/results.py8
-rw-r--r--boto/dynamodb2/table.py13
-rw-r--r--boto/ec2/autoscale/__init__.py4
-rw-r--r--boto/ec2/cloudwatch/__init__.py4
-rw-r--r--boto/ec2/cloudwatch/alarm.py12
-rw-r--r--boto/ec2/cloudwatch/datapoint.py1
-rw-r--r--boto/ec2/cloudwatch/dimension.py1
-rw-r--r--boto/ec2/cloudwatch/listelement.py2
-rw-r--r--boto/ec2/connection.py66
-rw-r--r--boto/ec2/elb/__init__.py12
-rw-r--r--boto/ecs/__init__.py4
-rw-r--r--boto/ecs/item.py7
-rw-r--r--boto/elasticache/layer1.py2
-rw-r--r--boto/elastictranscoder/layer1.py2
-rw-r--r--boto/emr/bootstrap_action.py4
-rw-r--r--boto/emr/connection.py12
-rw-r--r--boto/emr/step.py6
-rw-r--r--boto/endpoints.json8
-rwxr-xr-xboto/file/key.py6
-rw-r--r--boto/glacier/concurrent.py2
-rw-r--r--boto/glacier/response.py2
-rw-r--r--boto/glacier/utils.py9
-rw-r--r--boto/glacier/vault.py6
-rw-r--r--boto/glacier/writer.py6
-rw-r--r--boto/gs/resumable_upload_handler.py5
-rw-r--r--boto/https_connection.py8
-rw-r--r--boto/iam/connection.py6
-rw-r--r--boto/kinesis/layer1.py6
-rw-r--r--boto/logs/__init__.py41
-rw-r--r--boto/logs/exceptions.py59
-rw-r--r--boto/logs/layer1.py577
-rw-r--r--boto/manage/cmdshell.py201
-rw-r--r--boto/manage/server.py10
-rw-r--r--boto/manage/task.py5
-rw-r--r--boto/mashups/order.py5
-rw-r--r--boto/mws/connection.py54
-rw-r--r--boto/opsworks/layer1.py8
-rw-r--r--boto/provider.py8
-rw-r--r--boto/pyami/config.py10
-rw-r--r--boto/pyami/copybot.py1
-rw-r--r--boto/pyami/helloworld.py1
-rw-r--r--boto/pyami/installers/__init__.py1
-rw-r--r--boto/pyami/installers/ubuntu/ebs.py7
-rw-r--r--boto/pyami/installers/ubuntu/installer.py6
-rw-r--r--boto/pyami/installers/ubuntu/mysql.py9
-rw-r--r--boto/pyami/installers/ubuntu/trac.py2
-rwxr-xr-xboto/pyami/launch_ami.py25
-rw-r--r--boto/pyami/scriptbase.py7
-rw-r--r--boto/pyami/startup.py2
-rw-r--r--boto/rds2/layer1.py6
-rw-r--r--boto/route53/connection.py5
-rw-r--r--boto/s3/key.py52
-rw-r--r--boto/sdb/connection.py24
-rw-r--r--boto/sdb/db/blob.py8
-rw-r--r--boto/sdb/db/key.py2
-rw-r--r--boto/sdb/db/manager/sdbmanager.py27
-rw-r--r--boto/sdb/db/manager/xmlmanager.py20
-rw-r--r--boto/sdb/db/model.py8
-rw-r--r--boto/sdb/db/property.py45
-rw-r--r--boto/sdb/db/query.py2
-rw-r--r--boto/sdb/db/sequence.py14
-rw-r--r--boto/sdb/db/test_db.py2
-rw-r--r--boto/sdb/domain.py5
-rw-r--r--boto/sdb/item.py32
-rwxr-xr-xboto/services/bs.py5
-rw-r--r--boto/ses/connection.py2
-rw-r--r--boto/sns/connection.py37
-rw-r--r--boto/sqs/queue.py2
-rw-r--r--boto/sts/credentials.py2
-rw-r--r--boto/support/layer1.py9
-rw-r--r--boto/swf/layer1.py4
-rw-r--r--boto/swf/layer1_decisions.py2
-rw-r--r--boto/utils.py25
-rw-r--r--boto/vpc/__init__.py210
-rw-r--r--boto/vpc/vpc_peering_connection.py163
105 files changed, 1785 insertions, 513 deletions
diff --git a/boto/__init__.py b/boto/__init__.py
index ea94863a..5b142e97 100644
--- a/boto/__init__.py
+++ b/boto/__init__.py
@@ -38,7 +38,7 @@ import logging.config
from boto.compat import urlparse
from boto.exception import InvalidUriError
-__version__ = '2.30.0'
+__version__ = '2.32.0'
Version = __version__ # for backware compatibility
# http://bugs.python.org/issue7980
@@ -836,6 +836,28 @@ def connect_kinesis(aws_access_key_id=None,
**kwargs
)
+def connect_logs(aws_access_key_id=None,
+ aws_secret_access_key=None,
+ **kwargs):
+ """
+ Connect to Amazon CloudWatch Logs
+
+ :type aws_access_key_id: string
+ :param aws_access_key_id: Your AWS Access Key ID
+
+ :type aws_secret_access_key: string
+ :param aws_secret_access_key: Your AWS Secret Access Key
+
+ rtype: :class:`boto.kinesis.layer1.CloudWatchLogsConnection`
+ :return: A connection to the Amazon CloudWatch Logs service
+ """
+ from boto.logs.layer1 import CloudWatchLogsConnection
+ return CloudWatchLogsConnection(
+ aws_access_key_id=aws_access_key_id,
+ aws_secret_access_key=aws_secret_access_key,
+ **kwargs
+ )
+
def storage_uri(uri_str, default_scheme='file', debug=0, validate=True,
bucket_storage_uri_class=BucketStorageUri,
suppress_consec_slashes=True, is_latest=False):
diff --git a/boto/auth.py b/boto/auth.py
index 9e0934bf..6012962a 100644
--- a/boto/auth.py
+++ b/boto/auth.py
@@ -41,7 +41,7 @@ import sys
import time
import posixpath
-from boto.compat import urllib
+from boto.compat import urllib, encodebytes
from boto.auth_handler import AuthHandler
from boto.exception import BotoClientError
@@ -89,7 +89,7 @@ class HmacKeys(object):
def sign_string(self, string_to_sign):
new_hmac = self._get_hmac()
new_hmac.update(string_to_sign.encode('utf-8'))
- return base64.encodestring(new_hmac.digest()).decode('utf-8').strip()
+ return encodebytes(new_hmac.digest()).decode('utf-8').strip()
def __getstate__(self):
pickled_dict = copy.copy(self.__dict__)
@@ -99,7 +99,7 @@ class HmacKeys(object):
def __setstate__(self, dct):
self.__dict__ = dct
- self.update_provider(self._provider.encode('utf-8'))
+ self.update_provider(self._provider)
class AnonAuthHandler(AuthHandler, HmacKeys):
@@ -313,7 +313,6 @@ class HmacAuthV4Handler(AuthHandler, HmacKeys):
in the StringToSign.
"""
host_header_value = self.host_header(self.host, http_request)
- headers_to_sign = {}
headers_to_sign = {'Host': host_header_value}
for name, value in http_request.headers.items():
lname = name.lower()
@@ -563,7 +562,6 @@ class S3HmacAuthV4Handler(HmacAuthV4Handler, AuthHandler):
in the StringToSign.
"""
host_header_value = self.host_header(self.host, http_request)
- headers_to_sign = {}
headers_to_sign = {'Host': host_header_value}
for name, value in http_request.headers.items():
lname = name.lower()
diff --git a/boto/beanstalk/exception.py b/boto/beanstalk/exception.py
index f6f9ffad..0fbd4ab9 100644
--- a/boto/beanstalk/exception.py
+++ b/boto/beanstalk/exception.py
@@ -4,12 +4,14 @@ from boto.exception import BotoServerError
def simple(e):
- err = json.loads(e.error_message)
- code = err['Error']['Code']
+ code = e.code
+
+ if code.endswith('Exception'):
+ code = code.rstrip('Exception')
try:
# Dynamically get the error class.
- simple_e = getattr(sys.modules[__name__], code)(e, err)
+ simple_e = getattr(sys.modules[__name__], code)(e)
except AttributeError:
# Return original exception on failure.
return e
@@ -18,12 +20,9 @@ def simple(e):
class SimpleException(BotoServerError):
- def __init__(self, e, err):
+ def __init__(self, e):
super(SimpleException, self).__init__(e.status, e.reason, e.body)
- self.body = e.error_message
- self.request_id = err['RequestId']
- self.error_code = err['Error']['Code']
- self.error_message = err['Error']['Message']
+ self.error_message = self.message
def __repr__(self):
return self.__class__.__name__ + ': ' + self.error_message
diff --git a/boto/beanstalk/layer1.py b/boto/beanstalk/layer1.py
index adccd5bf..e72ee23e 100644
--- a/boto/beanstalk/layer1.py
+++ b/boto/beanstalk/layer1.py
@@ -63,7 +63,7 @@ class Layer1(AWSQueryConnection):
def _get_response(self, action, params, path='/', verb='GET'):
params['ContentType'] = 'JSON'
response = self.make_request(action, params, path, verb)
- body = response.read()
+ body = response.read().decode('utf-8')
boto.log.debug(body)
if response.status == 200:
return json.loads(body)
diff --git a/boto/cloudformation/connection.py b/boto/cloudformation/connection.py
index 40ff8b63..6c31ac53 100644
--- a/boto/cloudformation/connection.py
+++ b/boto/cloudformation/connection.py
@@ -271,7 +271,7 @@ class CloudFormationConnection(AWSQueryConnection):
:return: Parsed JSON response data
"""
response = self.make_request(call, params, path, method)
- body = response.read()
+ body = response.read().decode('utf-8')
if response.status == 200:
body = json.loads(body)
return body
diff --git a/boto/cloudfront/distribution.py b/boto/cloudfront/distribution.py
index 5566bdfe..7131c86c 100644
--- a/boto/cloudfront/distribution.py
+++ b/boto/cloudfront/distribution.py
@@ -22,7 +22,7 @@
import uuid
import base64
import time
-from boto.compat import json
+from boto.compat import six, json
from boto.cloudfront.identity import OriginAccessIdentity
from boto.cloudfront.object import Object, StreamingObject
from boto.cloudfront.signers import ActiveTrustedSigners, TrustedSigners
@@ -665,7 +665,7 @@ class Distribution(object):
raise ValueError("You must specify one of private_key_file or private_key_string")
# If private_key_file is a file name, open it and read it
if private_key_string is None:
- if isinstance(private_key_file, basestring):
+ if isinstance(private_key_file, six.string_types):
with open(private_key_file, 'r') as file_handle:
private_key_string = file_handle.read()
# Otherwise, treat it like a file
diff --git a/boto/cloudfront/invalidation.py b/boto/cloudfront/invalidation.py
index 91ba89d9..58adf81f 100644
--- a/boto/cloudfront/invalidation.py
+++ b/boto/cloudfront/invalidation.py
@@ -20,8 +20,8 @@
# IN THE SOFTWARE.
import uuid
-import urllib
+from boto.compat import urllib
from boto.resultset import ResultSet
@@ -71,7 +71,7 @@ class InvalidationBatch(object):
"""Escape a path, make sure it begins with a slash and contains no invalid characters"""
if not p[0] == "/":
p = "/%s" % p
- return urllib.quote(p)
+ return urllib.parse.quote(p)
def to_xml(self):
"""Get this batch as XML"""
diff --git a/boto/cloudsearch/document.py b/boto/cloudsearch/document.py
index 880096fb..0a1d9db2 100644
--- a/boto/cloudsearch/document.py
+++ b/boto/cloudsearch/document.py
@@ -221,13 +221,15 @@ class CommitResponse(object):
self.doc_service = doc_service
self.sdf = sdf
+ _body = response.content.decode('utf-8')
+
try:
- self.content = json.loads(response.content)
+ self.content = json.loads(_body)
except:
boto.log.error('Error indexing documents.\nResponse Content:\n{0}\n\n'
- 'SDF:\n{1}'.format(response.content, self.sdf))
+ 'SDF:\n{1}'.format(_body, self.sdf))
raise boto.exception.BotoServerError(self.response.status_code, '',
- body=response.content)
+ body=_body)
self.status = self.content['status']
if self.status == 'error':
diff --git a/boto/cloudsearch/domain.py b/boto/cloudsearch/domain.py
index 67ec98e1..9800b175 100644
--- a/boto/cloudsearch/domain.py
+++ b/boto/cloudsearch/domain.py
@@ -36,7 +36,7 @@ def handle_bool(value):
return True
return False
-
+
class Domain(object):
"""
A Cloudsearch domain.
@@ -118,7 +118,7 @@ class Domain(object):
@created.setter
def created(self, value):
self._created = handle_bool(value)
-
+
@property
def deleted(self):
return self._deleted
@@ -126,7 +126,7 @@ class Domain(object):
@deleted.setter
def deleted(self, value):
self._deleted = handle_bool(value)
-
+
@property
def processing(self):
return self._processing
@@ -134,7 +134,7 @@ class Domain(object):
@processing.setter
def processing(self, value):
self._processing = handle_bool(value)
-
+
@property
def requires_index_documents(self):
return self._requires_index_documents
@@ -142,7 +142,7 @@ class Domain(object):
@requires_index_documents.setter
def requires_index_documents(self, value):
self._requires_index_documents = handle_bool(value)
-
+
@property
def search_partition_count(self):
return self._search_partition_count
@@ -150,7 +150,7 @@ class Domain(object):
@search_partition_count.setter
def search_partition_count(self, value):
self._search_partition_count = int(value)
-
+
@property
def search_instance_count(self):
return self._search_instance_count
@@ -158,7 +158,7 @@ class Domain(object):
@search_instance_count.setter
def search_instance_count(self, value):
self._search_instance_count = int(value)
-
+
@property
def num_searchable_docs(self):
return self._num_searchable_docs
@@ -166,7 +166,7 @@ class Domain(object):
@num_searchable_docs.setter
def num_searchable_docs(self, value):
self._num_searchable_docs = int(value)
-
+
@property
def name(self):
return self.domain_name
diff --git a/boto/cloudsearch/layer1.py b/boto/cloudsearch/layer1.py
index 92ebe082..69132e39 100644
--- a/boto/cloudsearch/layer1.py
+++ b/boto/cloudsearch/layer1.py
@@ -680,7 +680,7 @@ class Layer1(AWSQueryConnection):
'update_stemming_options_result',
'stems')
params = {'DomainName': domain_name,
- 'Stems': stems}
+ 'Stems': stems}
return self.get_response(doc_path, 'UpdateStemmingOptions',
params, verb='POST')
diff --git a/boto/cloudsearch/search.py b/boto/cloudsearch/search.py
index 533f6066..70ea479b 100644
--- a/boto/cloudsearch/search.py
+++ b/boto/cloudsearch/search.py
@@ -286,19 +286,20 @@ class SearchConnection(object):
params = query.to_params()
r = requests.get(url, params=params)
+ body = r.content.decode('utf-8')
try:
- data = json.loads(r.content)
+ data = json.loads(body)
except ValueError as e:
if r.status_code == 403:
msg = ''
import re
- g = re.search('<html><body><h1>403 Forbidden</h1>([^<]+)<', r.content)
+ g = re.search('<html><body><h1>403 Forbidden</h1>([^<]+)<', body)
try:
msg = ': %s' % (g.groups()[0].strip())
except AttributeError:
pass
raise SearchServiceException('Authentication error from Amazon%s' % msg)
- raise SearchServiceException("Got non-json response from Amazon. %s" % r.content, query)
+ raise SearchServiceException("Got non-json response from Amazon. %s" % body, query)
if 'messages' in data and 'error' in data:
for m in data['messages']:
diff --git a/boto/cloudsearch/sourceattribute.py b/boto/cloudsearch/sourceattribute.py
index c3435079..28833147 100644
--- a/boto/cloudsearch/sourceattribute.py
+++ b/boto/cloudsearch/sourceattribute.py
@@ -72,4 +72,3 @@ class SourceAttribute(object):
valid = '|'.join(self.ValidDataFunctions)
raise ValueError('data_function must be one of: %s' % valid)
self._data_function = value
-
diff --git a/boto/cloudsearch2/document.py b/boto/cloudsearch2/document.py
index ed0f6c3f..3244a47a 100644
--- a/boto/cloudsearch2/document.py
+++ b/boto/cloudsearch2/document.py
@@ -32,7 +32,9 @@ class SearchServiceException(Exception):
class CommitMismatchError(Exception):
- pass
+ # Let's do some extra work and let the user handle errors on his/her own.
+
+ errors = None
class EncodingError(Exception):
@@ -215,13 +217,15 @@ class CommitResponse(object):
self.doc_service = doc_service
self.sdf = sdf
+ _body = response.content.decode('utf-8')
+
try:
- self.content = json.loads(response.content)
+ self.content = json.loads(_body)
except:
boto.log.error('Error indexing documents.\nResponse Content:\n{0}'
- '\n\nSDF:\n{1}'.format(response.content, self.sdf))
+ '\n\nSDF:\n{1}'.format(_body, self.sdf))
raise boto.exception.BotoServerError(self.response.status_code, '',
- body=response.content)
+ body=_body)
self.status = self.content['status']
if self.status == 'error':
@@ -256,6 +260,16 @@ class CommitResponse(object):
if response_num != commit_num:
boto.log.debug(self.response.content)
- raise CommitMismatchError(
+ # There will always be a commit mismatch error if there is any
+ # errors on cloudsearch. self.errors gets lost when this
+ # CommitMismatchError is raised. Whoever is using boto has no idea
+ # why their commit failed. They can't even notify the user of the
+ # cause by parsing the error messages from amazon. So let's
+ # attach the self.errors to the exceptions if we already spent
+ # time and effort collecting them out of the response.
+ exc = CommitMismatchError(
'Incorrect number of {0}s returned. Commit: {1} Response: {2}'
- .format(type_, commit_num, response_num))
+ .format(type_, commit_num, response_num)
+ )
+ exc.errors = self.errors
+ raise exc
diff --git a/boto/cloudsearch2/layer1.py b/boto/cloudsearch2/layer1.py
index 6c9b203d..fdc9d4c6 100644
--- a/boto/cloudsearch2/layer1.py
+++ b/boto/cloudsearch2/layer1.py
@@ -20,16 +20,12 @@
# IN THE SOFTWARE.
#
-try:
- import json
-except ImportError:
- import simplejson as json
-
import boto
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.cloudsearch2 import exceptions
+from boto.compat import json
class CloudSearchConnection(AWSQueryConnection):
@@ -60,7 +56,6 @@ class CloudSearchConnection(AWSQueryConnection):
"BaseException": exceptions.BaseException,
}
-
def __init__(self, **kwargs):
region = kwargs.pop('region', None)
if not region:
@@ -772,7 +767,7 @@ class CloudSearchConnection(AWSQueryConnection):
params['ContentType'] = 'JSON'
response = self.make_request(action=action, verb='POST',
path='/', params=params)
- body = response.read()
+ body = response.read().decode('utf-8')
boto.log.debug(body)
if response.status == 200:
return json.loads(body)
diff --git a/boto/cloudsearch2/layer2.py b/boto/cloudsearch2/layer2.py
index 517c0cb8..c4840482 100644
--- a/boto/cloudsearch2/layer2.py
+++ b/boto/cloudsearch2/layer2.py
@@ -24,6 +24,7 @@
from boto.cloudsearch2.layer1 import CloudSearchConnection
from boto.cloudsearch2.domain import Domain
+from boto.compat import six
class Layer2(object):
@@ -33,7 +34,7 @@ class Layer2(object):
host=None, debug=0, session_token=None, region=None,
validate_certs=True):
- if type(region) in [str, unicode]:
+ if isinstance(region, six.string_types):
import boto.cloudsearch2
for region_info in boto.cloudsearch2.regions():
if region_info.name == region:
diff --git a/boto/cloudsearch2/optionstatus.py b/boto/cloudsearch2/optionstatus.py
index eca1c9d5..0a45bea4 100644
--- a/boto/cloudsearch2/optionstatus.py
+++ b/boto/cloudsearch2/optionstatus.py
@@ -144,9 +144,9 @@ class ServicePoliciesStatus(OptionStatus):
"Condition": {
"IpAddress": {
"aws:SourceIp": [ip]
- }
}
}
+ }
def _allow_ip(self, arn, ip):
if 'Statement' not in self:
diff --git a/boto/cloudsearch2/search.py b/boto/cloudsearch2/search.py
index 76835373..78ffc168 100644
--- a/boto/cloudsearch2/search.py
+++ b/boto/cloudsearch2/search.py
@@ -34,10 +34,6 @@ class SearchServiceException(Exception):
pass
-class CommitMismatchError(Exception):
- pass
-
-
class SearchResults(object):
def __init__(self, **attrs):
self.rid = attrs['status']['rid']
@@ -126,7 +122,7 @@ class Query(object):
if self.facet:
for k, v in six.iteritems(self.facet):
- if type(v) not in [str, unicode]:
+ if not isinstance(v, six.string_types):
v = json.dumps(v)
params['facet.%s' % k] = v
@@ -279,19 +275,20 @@ class SearchConnection(object):
params = query.to_params()
r = self.session.get(url, params=params)
+ _body = r.content.decode('utf-8')
try:
- data = json.loads(r.content)
- except ValueError as e:
+ data = json.loads(_body)
+ except ValueError:
if r.status_code == 403:
msg = ''
import re
- g = re.search('<html><body><h1>403 Forbidden</h1>([^<]+)<', r.content)
+ g = re.search('<html><body><h1>403 Forbidden</h1>([^<]+)<', _body)
try:
msg = ': %s' % (g.groups()[0].strip())
except AttributeError:
pass
raise SearchServiceException('Authentication error from Amazon%s' % msg)
- raise SearchServiceException("Got non-json response from Amazon. %s" % r.content, query)
+ raise SearchServiceException("Got non-json response from Amazon. %s" % _body, query)
if 'messages' in data and 'error' in data:
for m in data['messages']:
diff --git a/boto/cloudtrail/layer1.py b/boto/cloudtrail/layer1.py
index 8a2d19d2..0c18fa90 100644
--- a/boto/cloudtrail/layer1.py
+++ b/boto/cloudtrail/layer1.py
@@ -20,16 +20,12 @@
# IN THE SOFTWARE.
#
-try:
- import json
-except ImportError:
- import simplejson as json
-
import boto
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.cloudtrail import exceptions
+from boto.compat import json
class CloudTrailConnection(AWSQueryConnection):
diff --git a/boto/compat.py b/boto/compat.py
index c91e808d..c9194124 100644
--- a/boto/compat.py
+++ b/boto/compat.py
@@ -29,6 +29,13 @@ except ImportError:
import json
+# Switch to use encodebytes, which deprecates encodestring in Python 3
+try:
+ from base64 import encodebytes
+except ImportError:
+ from base64 import encodestring as encodebytes
+
+
# If running in Google App Engine there is no "user" and
# os.path.expanduser() will fail. Attempt to detect this case and use a
# no-op expanduser function in this case.
@@ -39,17 +46,12 @@ except (AttributeError, ImportError):
# This is probably running on App Engine.
expanduser = (lambda x: x)
-# Use unittest2 for older versions of Python
-try:
- import unittest2 as unittest
-except ImportError:
- import unittest
-
from boto.vendored import six
from boto.vendored.six import BytesIO, StringIO
from boto.vendored.six.moves import filter, http_client, map, _thread, \
urllib, zip
+from boto.vendored.six.moves.queue import Queue
from boto.vendored.six.moves.configparser import SafeConfigParser
from boto.vendored.six.moves.urllib.parse import parse_qs, quote, unquote, \
urlparse, urlsplit
@@ -58,5 +60,7 @@ from boto.vendored.six.moves.urllib.request import urlopen
if six.PY3:
# StandardError was removed, so use the base exception type instead
StandardError = Exception
+ long_type = int
else:
StandardError = StandardError
+ long_type = long
diff --git a/boto/connection.py b/boto/connection.py
index fc8c23b6..6d06ca18 100644
--- a/boto/connection.py
+++ b/boto/connection.py
@@ -44,7 +44,6 @@ Handles basic connections to AWS
"""
from __future__ import with_statement
-import base64
from datetime import datetime
import errno
import os
@@ -64,7 +63,7 @@ import boto.handler
import boto.cacerts
from boto import config, UserAgent
-from boto.compat import six, http_client, urlparse, quote
+from boto.compat import six, http_client, urlparse, quote, encodebytes
from boto.exception import AWSConnectionError
from boto.exception import BotoClientError
from boto.exception import BotoServerError
@@ -787,13 +786,13 @@ class AWSAuthConnection(object):
host = '%s:%d' % (host, port)
else:
host = '%s:%d' % (self.host, self.port)
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- try:
- sock.connect((self.proxy, int(self.proxy_port)))
- if "timeout" in self.http_connection_kwargs:
- sock.settimeout(self.http_connection_kwargs["timeout"])
- except:
- raise
+ # Seems properly to use timeout for connect too
+ timeout = self.http_connection_kwargs.get("timeout")
+ if timeout is not None:
+ sock = socket.create_connection((self.proxy,
+ int(self.proxy_port)), timeout)
+ else:
+ sock = socket.create_connection((self.proxy, int(self.proxy_port)))
boto.log.debug("Proxy connection: CONNECT %s HTTP/1.0\r\n", host)
sock.sendall("CONNECT %s HTTP/1.0\r\n" % host)
sock.sendall("User-Agent: %s\r\n" % UserAgent)
@@ -857,7 +856,7 @@ class AWSAuthConnection(object):
return path
def get_proxy_auth_header(self):
- auth = base64.encodestring(self.proxy_user + ':' + self.proxy_pass)
+ auth = encodebytes(self.proxy_user + ':' + self.proxy_pass)
return {'Proxy-Authorization': 'Basic %s' % auth}
def set_host_header(self, request):
diff --git a/boto/datapipeline/layer1.py b/boto/datapipeline/layer1.py
index 6635f01c..6a3fb9b5 100644
--- a/boto/datapipeline/layer1.py
+++ b/boto/datapipeline/layer1.py
@@ -627,7 +627,7 @@ class DataPipelineConnection(AWSQueryConnection):
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
diff --git a/boto/directconnect/layer1.py b/boto/directconnect/layer1.py
index b225e18e..08197ddf 100644
--- a/boto/directconnect/layer1.py
+++ b/boto/directconnect/layer1.py
@@ -20,16 +20,12 @@
# IN THE SOFTWARE.
#
-try:
- import json
-except ImportError:
- import simplejson as json
-
import boto
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.directconnect import exceptions
+from boto.compat import json
class DirectConnectConnection(AWSQueryConnection):
@@ -619,7 +615,7 @@ class DirectConnectConnection(AWSQueryConnection):
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
diff --git a/boto/dynamodb/batch.py b/boto/dynamodb/batch.py
index 4e2821e9..f30b8425 100644
--- a/boto/dynamodb/batch.py
+++ b/boto/dynamodb/batch.py
@@ -197,7 +197,6 @@ class BatchList(list):
return self.submit()
-
def submit(self):
res = self.layer2.batch_get_item(self)
if 'UnprocessedKeys' in res:
@@ -260,4 +259,3 @@ class BatchWriteList(list):
table_name, batch_dict = batch.to_dict()
d[table_name] = batch_dict
return d
-
diff --git a/boto/dynamodb/item.py b/boto/dynamodb/item.py
index 9dcbad06..a47f22bf 100644
--- a/boto/dynamodb/item.py
+++ b/boto/dynamodb/item.py
@@ -35,7 +35,7 @@ class Item(dict):
:ivar range_key_name: The name of the RangeKey associated with this item.
:ivar table: The Table this item belongs to.
"""
-
+
def __init__(self, table, hash_key=None, range_key=None, attrs=None):
self.table = table
self._updates = None
diff --git a/boto/dynamodb/layer1.py b/boto/dynamodb/layer1.py
index 01a49020..0984f71a 100644
--- a/boto/dynamodb/layer1.py
+++ b/boto/dynamodb/layer1.py
@@ -122,14 +122,14 @@ class Layer1(AWSAuthConnection):
boto.log.debug('RequestId: %s' % request_id)
boto.perflog.debug('%s: id=%s time=%sms',
headers['X-Amz-Target'], request_id, int(elapsed))
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
return json.loads(response_body, object_hook=object_hook)
def _retry_handler(self, response, i, next_sleep):
status = None
if response.status == 400:
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
data = json.loads(response_body)
if self.ThruputError in data.get('__type'):
@@ -160,7 +160,7 @@ class Layer1(AWSAuthConnection):
expected_crc32 = response.getheader('x-amz-crc32')
if self._validate_checksums and expected_crc32 is not None:
boto.log.debug('Validating crc32 checksum for body: %s',
- response.read())
+ response.read().decode('utf-8'))
actual_crc32 = crc32(response.read()) & 0xffffffff
expected_crc32 = int(expected_crc32)
if actual_crc32 != expected_crc32:
diff --git a/boto/dynamodb/layer2.py b/boto/dynamodb/layer2.py
index 743c7055..fa0e545f 100644
--- a/boto/dynamodb/layer2.py
+++ b/boto/dynamodb/layer2.py
@@ -264,13 +264,13 @@ class Layer2(object):
"""
dynamodb_key = {}
dynamodb_value = self.dynamizer.encode(hash_key)
- if dynamodb_value.keys()[0] != schema.hash_key_type:
+ if list(dynamodb_value.keys())[0] != schema.hash_key_type:
msg = 'Hashkey must be of type: %s' % schema.hash_key_type
raise TypeError(msg)
dynamodb_key['HashKeyElement'] = dynamodb_value
if range_key is not None:
dynamodb_value = self.dynamizer.encode(range_key)
- if dynamodb_value.keys()[0] != schema.range_key_type:
+ if list(dynamodb_value.keys())[0] != schema.range_key_type:
msg = 'RangeKey must be of type: %s' % schema.range_key_type
raise TypeError(msg)
dynamodb_key['RangeKeyElement'] = dynamodb_value
diff --git a/boto/dynamodb/table.py b/boto/dynamodb/table.py
index 129b0795..152b95d9 100644
--- a/boto/dynamodb/table.py
+++ b/boto/dynamodb/table.py
@@ -47,9 +47,9 @@ class TableBatchGenerator(object):
self.consistent_read = consistent_read
def _queue_unprocessed(self, res):
- if not u'UnprocessedKeys' in res:
+ if u'UnprocessedKeys' not in res:
return
- if not self.table.name in res[u'UnprocessedKeys']:
+ if self.table.name not in res[u'UnprocessedKeys']:
return
keys = res[u'UnprocessedKeys'][self.table.name][u'Keys']
@@ -68,7 +68,7 @@ class TableBatchGenerator(object):
res = batch.submit()
# parse the results
- if not self.table.name in res[u'Responses']:
+ if self.table.name not in res[u'Responses']:
continue
self.consumed_units += res[u'Responses'][self.table.name][u'ConsumedCapacityUnits']
for elem in res[u'Responses'][self.table.name][u'Items']:
diff --git a/boto/dynamodb/types.py b/boto/dynamodb/types.py
index 03715149..5e3bae69 100644
--- a/boto/dynamodb/types.py
+++ b/boto/dynamodb/types.py
@@ -28,7 +28,7 @@ import base64
from decimal import (Decimal, DecimalException, Context,
Clamped, Overflow, Inexact, Underflow, Rounded)
from boto.dynamodb.exceptions import DynamoDBNumberError
-from boto.compat import filter, map
+from boto.compat import filter, map, six, long_type
DYNAMODB_CONTEXT = Context(
@@ -52,17 +52,25 @@ def float_to_decimal(f):
def is_num(n):
- types = (int, long, float, bool, Decimal)
+ types = (int, long_type, float, bool, Decimal)
return isinstance(n, types) or n in types
-def is_str(n):
- return isinstance(n, basestring) or (isinstance(n, type) and
- issubclass(n, basestring))
+if six.PY2:
+ def is_str(n):
+ return (isinstance(n, basestring) or
+ isinstance(n, type) and issubclass(n, basestring))
+ def is_binary(n):
+ return isinstance(n, Binary)
-def is_binary(n):
- return isinstance(n, Binary)
+else: # PY3
+ def is_str(n):
+ return (isinstance(n, str) or
+ isinstance(n, type) and issubclass(n, str))
+
+ def is_binary(n):
+ return isinstance(n, bytes) # Binary is subclass of bytes.
def serialize_num(val):
@@ -104,7 +112,7 @@ def get_dynamodb_type(val):
dynamodb_type = 'SS'
elif False not in map(is_binary, val):
dynamodb_type = 'BS'
- elif isinstance(val, Binary):
+ elif is_binary(val):
dynamodb_type = 'B'
if dynamodb_type is None:
msg = 'Unsupported type "%s" for value "%s"' % (type(val), val)
@@ -129,39 +137,58 @@ def dynamize_value(val):
elif dynamodb_type == 'SS':
val = {dynamodb_type: [n for n in val]}
elif dynamodb_type == 'B':
+ if isinstance(val, bytes):
+ val = Binary(val)
val = {dynamodb_type: val.encode()}
elif dynamodb_type == 'BS':
val = {dynamodb_type: [n.encode() for n in val]}
return val
-class Binary(object):
- def __init__(self, value):
- if not isinstance(value, basestring):
- raise TypeError('Value must be a string of binary data!')
+if six.PY2:
+ class Binary(object):
+ def __init__(self, value):
+ if isinstance(value, six.text_type): # Support only PY2 for backward compatibility.
+ value = value.encode('utf-8')
+ elif not isinstance(value, bytes):
+ raise TypeError('Value must be a string of binary data!')
+
+ self.value = value
- self.value = value
+ def encode(self):
+ return base64.b64encode(self.value).decode('utf-8')
+
+ def __eq__(self, other):
+ if isinstance(other, Binary):
+ return self.value == other.value
+ else:
+ return self.value == other
- def encode(self):
- return base64.b64encode(self.value)
+ def __ne__(self, other):
+ return not self.__eq__(other)
- def __eq__(self, other):
- if isinstance(other, Binary):
- return self.value == other.value
- else:
- return self.value == other
+ def __repr__(self):
+ return 'Binary(%r)' % self.value
- def __ne__(self, other):
- return not self.__eq__(other)
+ def __str__(self):
+ return self.value.decode('utf-8')
- def __repr__(self):
- return 'Binary(%s)' % self.value
+ def __hash__(self):
+ return hash(self.value)
+else:
+ class Binary(bytes):
+ def encode(self):
+ return base64.b64encode(self).decode('utf-8')
- def __str__(self):
- return self.value
+ @property
+ def value(self):
+ # This matches the public API of the Python 2 version,
+ # but just returns itself since it is already a bytes
+ # instance.
+ return bytes(self)
- def __hash__(self):
- return hash(self.value)
+ def __repr__(self):
+ return 'Binary(%r)' % self.value
def item_object_hook(dct):
@@ -254,9 +281,9 @@ class Dynamizer(object):
raise DynamoDBNumberError(msg)
def _encode_s(self, attr):
- if isinstance(attr, unicode):
- attr = attr.encode('utf-8')
- elif not isinstance(attr, str):
+ if isinstance(attr, bytes):
+ attr = attr.decode('utf-8')
+ elif not isinstance(attr, six.text_type):
attr = str(attr)
return attr
@@ -267,6 +294,8 @@ class Dynamizer(object):
return [self._encode_s(n) for n in attr]
def _encode_b(self, attr):
+ if isinstance(attr, bytes):
+ attr = Binary(attr)
return attr.encode()
def _encode_bs(self, attr):
@@ -280,7 +309,7 @@ class Dynamizer(object):
"""
if len(attr) > 1 or not attr:
return attr
- dynamodb_type = attr.keys()[0]
+ dynamodb_type = list(attr.keys())[0]
if dynamodb_type.lower() == dynamodb_type:
# It's not an actual type, just a single character attr that
# overlaps with the DDB types. Return it.
diff --git a/boto/dynamodb2/items.py b/boto/dynamodb2/items.py
index 257a7459..463feee7 100644
--- a/boto/dynamodb2/items.py
+++ b/boto/dynamodb2/items.py
@@ -108,9 +108,11 @@ class Item(object):
def __contains__(self, key):
return key in self._data
- def __nonzero__(self):
+ def __bool__(self):
return bool(self._data)
+ __nonzero__ = __bool__
+
def _determine_alterations(self):
"""
Checks the ``-orig_data`` against the ``_data`` to determine what
@@ -256,7 +258,7 @@ class Item(object):
expects = {}
if fields is None:
- fields = self._data.keys() + self._orig_data.keys()
+ fields = list(self._data.keys()) + list(self._orig_data.keys())
# Only uniques.
fields = set(fields)
diff --git a/boto/dynamodb2/layer1.py b/boto/dynamodb2/layer1.py
index 360c7d08..aa319cfd 100644
--- a/boto/dynamodb2/layer1.py
+++ b/boto/dynamodb2/layer1.py
@@ -2098,7 +2098,7 @@ class DynamoDBConnection(AWSQueryConnection):
response = self._mexe(http_request, sender=None,
override_num_retries=self.NumberRetries,
retry_handler=self._retry_handler)
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
@@ -2114,7 +2114,7 @@ class DynamoDBConnection(AWSQueryConnection):
status = None
boto.log.debug("Saw HTTP status: %s" % response.status)
if response.status == 400:
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
data = json.loads(response_body)
if 'ProvisionedThroughputExceededException' in data.get('__type'):
diff --git a/boto/dynamodb2/results.py b/boto/dynamodb2/results.py
index 98da8a6a..3d80ecf3 100644
--- a/boto/dynamodb2/results.py
+++ b/boto/dynamodb2/results.py
@@ -52,7 +52,7 @@ class ResultSet(object):
def __iter__(self):
return self
- def next(self):
+ def __next__(self):
self._offset += 1
if self._offset >= len(self._results):
@@ -78,6 +78,8 @@ class ResultSet(object):
else:
raise StopIteration()
+ next = __next__
+
def to_call(self, the_callable, *args, **kwargs):
"""
Sets up the callable & any arguments to run it with.
@@ -106,7 +108,7 @@ class ResultSet(object):
# DDB api calls use (which limit page size, not the overall result set).
self._limit = kwargs.pop('limit', None)
- if self._limit < 0:
+ if self._limit is not None and self._limit < 0:
self._limit = None
self.the_callable = the_callable
@@ -130,7 +132,7 @@ class ResultSet(object):
# If the page size is greater than limit set them
# to the same value
- if self._limit and self._max_page_size > self._limit:
+ if self._limit and self._max_page_size and self._max_page_size > self._limit:
self._max_page_size = self._limit
# Put in the max page size.
diff --git a/boto/dynamodb2/table.py b/boto/dynamodb2/table.py
index 66e6337d..79cf7975 100644
--- a/boto/dynamodb2/table.py
+++ b/boto/dynamodb2/table.py
@@ -1283,7 +1283,7 @@ class Table(object):
'last_key': last_key,
}
- def batch_get(self, keys, consistent=False):
+ def batch_get(self, keys, consistent=False, attributes=None):
"""
Fetches many specific items in batch from a table.
@@ -1294,6 +1294,10 @@ class Table(object):
boolean. If you provide ``True``, a strongly consistent read will be
used. (Default: False)
+ Optionally accepts an ``attributes`` parameter, which should be a
+ tuple. If you provide any attributes only these will be fetched
+ from DynamoDB.
+
Returns a ``ResultSet``, which transparently handles the pagination of
results you get back.
@@ -1320,10 +1324,10 @@ class Table(object):
# We pass the keys to the constructor instead, so it can maintain it's
# own internal state as to what keys have been processed.
results = BatchGetResultSet(keys=keys, max_batch_get=self.max_batch_get)
- results.to_call(self._batch_get, consistent=consistent)
+ results.to_call(self._batch_get, consistent=consistent, attributes=attributes)
return results
- def _batch_get(self, keys, consistent=False):
+ def _batch_get(self, keys, consistent=False, attributes=None):
"""
The internal method that performs the actual batch get. Used extensively
by ``BatchGetResultSet`` to perform each (paginated) request.
@@ -1337,6 +1341,9 @@ class Table(object):
if consistent:
items[self.table_name]['ConsistentRead'] = True
+ if attributes is not None:
+ items[self.table_name]['AttributesToGet'] = attributes
+
for key_data in keys:
raw_key = {}
diff --git a/boto/ec2/autoscale/__init__.py b/boto/ec2/autoscale/__init__.py
index c720a9c0..fc0534b4 100644
--- a/boto/ec2/autoscale/__init__.py
+++ b/boto/ec2/autoscale/__init__.py
@@ -143,7 +143,7 @@ class AutoScaleConnection(AWSQueryConnection):
params['%s.member.%d.%s.%s' % (label, i, k, kk)] = vv
else:
params['%s.member.%d.%s' % (label, i, k)] = v
- elif isinstance(items[i - 1], basestring):
+ elif isinstance(items[i - 1], six.string_types):
params['%s.member.%d' % (label, i)] = items[i - 1]
def _update_group(self, op, as_group):
@@ -222,7 +222,7 @@ class AutoScaleConnection(AWSQueryConnection):
if launch_config.key_name:
params['KeyName'] = launch_config.key_name
if launch_config.user_data:
- params['UserData'] = base64.b64encode(launch_config.user_data)
+ params['UserData'] = base64.b64encode(launch_config.user_data).decode('utf-8')
if launch_config.kernel_id:
params['KernelId'] = launch_config.kernel_id
if launch_config.ramdisk_id:
diff --git a/boto/ec2/cloudwatch/__init__.py b/boto/ec2/cloudwatch/__init__.py
index 0c32e2a6..0c9115e8 100644
--- a/boto/ec2/cloudwatch/__init__.py
+++ b/boto/ec2/cloudwatch/__init__.py
@@ -110,7 +110,7 @@ class CloudWatchConnection(AWSQueryConnection):
for dim_name in dimension:
dim_value = dimension[dim_name]
if dim_value:
- if isinstance(dim_value, basestring):
+ if isinstance(dim_value, six.string_types):
dim_value = [dim_value]
for value in dim_value:
params['%s.%d.Name' % (prefix, i+1)] = dim_name
@@ -121,7 +121,7 @@ class CloudWatchConnection(AWSQueryConnection):
i += 1
def build_list_params(self, params, items, label):
- if isinstance(items, basestring):
+ if isinstance(items, six.string_types):
items = [items]
for index, item in enumerate(items):
i = index + 1
diff --git a/boto/ec2/cloudwatch/alarm.py b/boto/ec2/cloudwatch/alarm.py
index 1ef8869c..4989fb34 100644
--- a/boto/ec2/cloudwatch/alarm.py
+++ b/boto/ec2/cloudwatch/alarm.py
@@ -21,7 +21,6 @@
#
from datetime import datetime
-from boto.resultset import ResultSet
from boto.ec2.cloudwatch.listelement import ListElement
from boto.ec2.cloudwatch.dimension import Dimension
from boto.compat import json
@@ -253,11 +252,11 @@ class MetricAlarm(object):
def add_alarm_action(self, action_arn=None):
"""
- Adds an alarm action, represented as an SNS topic, to this alarm.
+ Adds an alarm action, represented as an SNS topic, to this alarm.
What do do when alarm is triggered.
:type action_arn: str
- :param action_arn: SNS topics to which notification should be
+ :param action_arn: SNS topics to which notification should be
sent if the alarm goes to state ALARM.
"""
if not action_arn:
@@ -271,21 +270,21 @@ class MetricAlarm(object):
this alarm. What to do when the insufficient_data state is reached.
:type action_arn: str
- :param action_arn: SNS topics to which notification should be
+ :param action_arn: SNS topics to which notification should be
sent if the alarm goes to state INSUFFICIENT_DATA.
"""
if not action_arn:
return
self.actions_enabled = 'true'
self.insufficient_data_actions.append(action_arn)
-
+
def add_ok_action(self, action_arn=None):
"""
Adds an ok action, represented as an SNS topic, to this alarm. What
to do when the ok state is reached.
:type action_arn: str
- :param action_arn: SNS topics to which notification should be
+ :param action_arn: SNS topics to which notification should be
sent if the alarm goes to state INSUFFICIENT_DATA.
"""
if not action_arn:
@@ -321,4 +320,3 @@ class AlarmHistoryItem(object):
'%Y-%m-%dT%H:%M:%S.%fZ')
except ValueError:
self.timestamp = datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
-
diff --git a/boto/ec2/cloudwatch/datapoint.py b/boto/ec2/cloudwatch/datapoint.py
index d4350cef..a33771a1 100644
--- a/boto/ec2/cloudwatch/datapoint.py
+++ b/boto/ec2/cloudwatch/datapoint.py
@@ -37,4 +37,3 @@ class Datapoint(dict):
self[name] = datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
elif name != 'member':
self[name] = value
-
diff --git a/boto/ec2/cloudwatch/dimension.py b/boto/ec2/cloudwatch/dimension.py
index 42c8a880..86ebb9c3 100644
--- a/boto/ec2/cloudwatch/dimension.py
+++ b/boto/ec2/cloudwatch/dimension.py
@@ -35,4 +35,3 @@ class Dimension(dict):
self[self._name] = [value]
else:
setattr(self, name, value)
-
diff --git a/boto/ec2/cloudwatch/listelement.py b/boto/ec2/cloudwatch/listelement.py
index 5be45992..2dd9cef0 100644
--- a/boto/ec2/cloudwatch/listelement.py
+++ b/boto/ec2/cloudwatch/listelement.py
@@ -27,5 +27,3 @@ class ListElement(list):
def endElement(self, name, value, connection):
if name == 'member':
self.append(value)
-
-
diff --git a/boto/ec2/connection.py b/boto/ec2/connection.py
index 5cb15880..4fa205b6 100644
--- a/boto/ec2/connection.py
+++ b/boto/ec2/connection.py
@@ -65,6 +65,7 @@ from boto.ec2.networkinterface import NetworkInterface
from boto.ec2.attributes import AccountAttribute, VPCAttribute
from boto.ec2.blockdevicemapping import BlockDeviceMapping, BlockDeviceType
from boto.exception import EC2ResponseError
+from boto.compat import six
#boto.set_stream_logger('ec2')
@@ -122,6 +123,9 @@ class EC2Connection(AWSQueryConnection):
return params
def build_filter_params(self, params, filters):
+ if not isinstance(filters, dict):
+ filters = dict(filters)
+
i = 1
for name in filters:
aws_name = name
@@ -610,15 +614,24 @@ class EC2Connection(AWSQueryConnection):
:rtype: list
:return: A list of :class:`boto.ec2.instance.Instance`
"""
- reservations = self.get_all_reservations(instance_ids=instance_ids,
- filters=filters,
- dry_run=dry_run,
- max_results=max_results)
- return [instance for reservation in reservations
- for instance in reservation.instances]
+ next_token = None
+ retval = []
+ while True:
+ reservations = self.get_all_reservations(instance_ids=instance_ids,
+ filters=filters,
+ dry_run=dry_run,
+ max_results=max_results,
+ next_token=next_token)
+ retval.extend([instance for reservation in reservations for
+ instance in reservation.instances])
+ next_token = reservations.next_token
+ if not next_token:
+ break
+
+ return retval
def get_all_reservations(self, instance_ids=None, filters=None,
- dry_run=False, max_results=None):
+ dry_run=False, max_results=None, next_token=None):
"""
Retrieve all the instance reservations associated with your account.
@@ -640,6 +653,10 @@ class EC2Connection(AWSQueryConnection):
:param max_results: The maximum number of paginated instance
items per response.
+ :type next_token: str
+ :param next_token: A string specifying the next paginated set
+ of results to return.
+
:rtype: list
:return: A list of :class:`boto.ec2.instance.Reservation`
"""
@@ -660,12 +677,15 @@ class EC2Connection(AWSQueryConnection):
params['DryRun'] = 'true'
if max_results is not None:
params['MaxResults'] = max_results
+ if next_token:
+ params['NextToken'] = next_token
return self.get_list('DescribeInstances', params,
[('item', Reservation)], verb='POST')
def get_all_instance_status(self, instance_ids=None,
max_results=None, next_token=None,
- filters=None, dry_run=False):
+ filters=None, dry_run=False,
+ include_all_instances=False):
"""
Retrieve all the instances in your account scheduled for maintenance.
@@ -693,6 +713,11 @@ class EC2Connection(AWSQueryConnection):
:type dry_run: bool
:param dry_run: Set to True if the operation should not actually run.
+ :type include_all_instances: bool
+ :param include_all_instances: Set to True if all
+ instances should be returned. (Only running
+ instances are included by default.)
+
:rtype: list
:return: A list of instances that have maintenance scheduled.
"""
@@ -707,6 +732,8 @@ class EC2Connection(AWSQueryConnection):
self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
+ if include_all_instances:
+ params['IncludeAllInstances'] = 'true'
return self.get_object('DescribeInstanceStatus', params,
InstanceStatusSet, verb='POST')
@@ -799,7 +826,7 @@ class EC2Connection(AWSQueryConnection):
instances.
:type monitoring_enabled: bool
- :param monitoring_enabled: Enable CloudWatch monitoring on
+ :param monitoring_enabled: Enable detailed CloudWatch monitoring on
the instance.
:type subnet_id: string
@@ -867,9 +894,9 @@ class EC2Connection(AWSQueryConnection):
provide optimal EBS I/O performance. This optimization
isn't available with all instance types.
- :type network_interfaces: list
- :param network_interfaces: A list of
- :class:`boto.ec2.networkinterface.NetworkInterfaceSpecification`
+ :type network_interfaces: :class:`boto.ec2.networkinterface.NetworkInterfaceCollection`
+ :param network_interfaces: A NetworkInterfaceCollection data
+ structure containing the ENI specifications for the instance.
:type dry_run: bool
:param dry_run: Set to True if the operation should not actually run.
@@ -900,7 +927,9 @@ class EC2Connection(AWSQueryConnection):
l.append(group)
self.build_list_params(params, l, 'SecurityGroup')
if user_data:
- params['UserData'] = base64.b64encode(user_data)
+ if isinstance(user_data, six.text_type):
+ user_data = user_data.encode('utf-8')
+ params['UserData'] = base64.b64encode(user_data).decode('utf-8')
if addressing_type:
params['AddressingType'] = addressing_type
if instance_type:
@@ -1498,7 +1527,7 @@ class EC2Connection(AWSQueryConnection):
instances
:type monitoring_enabled: bool
- :param monitoring_enabled: Enable CloudWatch monitoring on
+ :param monitoring_enabled: Enable detailed CloudWatch monitoring on
the instance.
:type subnet_id: string
@@ -3839,7 +3868,7 @@ class EC2Connection(AWSQueryConnection):
def monitor_instances(self, instance_ids, dry_run=False):
"""
- Enable CloudWatch monitoring for the supplied instances.
+ Enable detailed CloudWatch monitoring for the supplied instances.
:type instance_id: list of strings
:param instance_id: The instance ids
@@ -3860,7 +3889,7 @@ class EC2Connection(AWSQueryConnection):
def monitor_instance(self, instance_id, dry_run=False):
"""
Deprecated Version, maintained for backward compatibility.
- Enable CloudWatch monitoring for the supplied instance.
+ Enable detailed CloudWatch monitoring for the supplied instance.
:type instance_id: string
:param instance_id: The instance id
@@ -3896,7 +3925,7 @@ class EC2Connection(AWSQueryConnection):
def unmonitor_instance(self, instance_id, dry_run=False):
"""
Deprecated Version, maintained for backward compatibility.
- Disable CloudWatch monitoring for the supplied instance.
+ Disable detailed CloudWatch monitoring for the supplied instance.
:type instance_id: string
:param instance_id: The instance id
@@ -4364,7 +4393,8 @@ class EC2Connection(AWSQueryConnection):
"""
:type dry_run: bool
:param dry_run: Set to True if the operation should not actually run.
-
+ :rtype: :class:`boto.ec2.image.CopyImage`
+ :return: Object containing the image_id of the copied image.
"""
params = {
'SourceRegion': source_region,
diff --git a/boto/ec2/elb/__init__.py b/boto/ec2/elb/__init__.py
index bb1e2483..9971e060 100644
--- a/boto/ec2/elb/__init__.py
+++ b/boto/ec2/elb/__init__.py
@@ -106,13 +106,19 @@ class ELBConnection(AWSQueryConnection):
for index, item in enumerate(items):
params[label % (index + 1)] = item
- def get_all_load_balancers(self, load_balancer_names=None):
+ def get_all_load_balancers(self, load_balancer_names=None, marker=None):
"""
Retrieve all load balancers associated with your account.
:type load_balancer_names: list
:keyword load_balancer_names: An optional list of load balancer names.
+ :type marker: string
+ :param marker: Use this only when paginating results and only
+ in follow-up request after you've received a response
+ where the results are truncated. Set this to the value of
+ the Marker element in the response you just received.
+
:rtype: :py:class:`boto.resultset.ResultSet`
:return: A ResultSet containing instances of
:class:`boto.ec2.elb.loadbalancer.LoadBalancer`
@@ -121,6 +127,10 @@ class ELBConnection(AWSQueryConnection):
if load_balancer_names:
self.build_list_params(params, load_balancer_names,
'LoadBalancerNames.member.%d')
+
+ if marker:
+ params['Marker'] = marker
+
return self.get_list('DescribeLoadBalancers', params,
[('member', LoadBalancer)])
diff --git a/boto/ecs/__init__.py b/boto/ecs/__init__.py
index 97c0d8dd..46db5065 100644
--- a/boto/ecs/__init__.py
+++ b/boto/ecs/__init__.py
@@ -62,7 +62,7 @@ class ECSConnection(AWSQueryConnection):
if page:
params['ItemPage'] = page
response = self.make_request(None, params, "/onca/xml")
- body = response.read()
+ body = response.read().decode('utf-8')
boto.log.debug(body)
if response.status != 200:
@@ -75,7 +75,7 @@ class ECSConnection(AWSQueryConnection):
else:
rs = itemSet
h = handler.XmlHandler(rs, self)
- xml.sax.parseString(body, h)
+ xml.sax.parseString(body.encode('utf-8'), h)
if not rs.is_valid:
raise BotoServerError(response.status, '{Code}: {Message}'.format(**rs.errors[0]))
return rs
diff --git a/boto/ecs/item.py b/boto/ecs/item.py
index 2ae3405f..79177a31 100644
--- a/boto/ecs/item.py
+++ b/boto/ecs/item.py
@@ -22,8 +22,7 @@
import xml.sax
import cgi
-from StringIO import StringIO
-from boto.compat import six
+from boto.compat import six, StringIO
class ResponseGroup(xml.sax.ContentHandler):
"""A Generic "Response Group", which can
@@ -137,7 +136,7 @@ class ItemSet(ResponseGroup):
self.curItem.endElement(name, value, connection)
return None
- def next(self):
+ def __next__(self):
"""Special paging functionality"""
if self.iter is None:
self.iter = iter(self.objs)
@@ -153,6 +152,8 @@ class ItemSet(ResponseGroup):
else:
raise
+ next = __next__
+
def __iter__(self):
return self
diff --git a/boto/elasticache/layer1.py b/boto/elasticache/layer1.py
index be7080d0..59c43a3d 100644
--- a/boto/elasticache/layer1.py
+++ b/boto/elasticache/layer1.py
@@ -1657,7 +1657,7 @@ class ElastiCacheConnection(AWSQueryConnection):
params['ContentType'] = 'JSON'
response = self.make_request(action=action, verb='POST',
path='/', params=params)
- body = response.read()
+ body = response.read().decode('utf-8')
boto.log.debug(body)
if response.status == 200:
return json.loads(body)
diff --git a/boto/elastictranscoder/layer1.py b/boto/elastictranscoder/layer1.py
index e47c199c..3189f35d 100644
--- a/boto/elastictranscoder/layer1.py
+++ b/boto/elastictranscoder/layer1.py
@@ -923,7 +923,7 @@ class ElasticTranscoderConnection(AWSAuthConnection):
headers = {}
response = super(ElasticTranscoderConnection, self).make_request(
verb, resource, headers=headers, data=data)
- body = json.load(response)
+ body = json.loads(response.read().decode('utf-8'))
if response.status == expected_status:
return body
else:
diff --git a/boto/emr/bootstrap_action.py b/boto/emr/bootstrap_action.py
index 7db0b3da..5a01fd21 100644
--- a/boto/emr/bootstrap_action.py
+++ b/boto/emr/bootstrap_action.py
@@ -20,12 +20,14 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+from boto.compat import six
+
class BootstrapAction(object):
def __init__(self, name, path, bootstrap_action_args):
self.name = name
self.path = path
- if isinstance(bootstrap_action_args, basestring):
+ if isinstance(bootstrap_action_args, six.string_types):
bootstrap_action_args = [bootstrap_action_args]
self.bootstrap_action_args = bootstrap_action_args
diff --git a/boto/emr/connection.py b/boto/emr/connection.py
index 52d4ebc5..d15852ea 100644
--- a/boto/emr/connection.py
+++ b/boto/emr/connection.py
@@ -282,7 +282,7 @@ class EmrConnection(AWSQueryConnection):
value for that tag should be the empty string
(e.g. '') or None.
"""
- assert isinstance(resource_id, basestring)
+ assert isinstance(resource_id, six.string_types)
params = {
'ResourceId': resource_id,
}
@@ -410,7 +410,8 @@ class EmrConnection(AWSQueryConnection):
ami_version=None,
api_params=None,
visible_to_all_users=None,
- job_flow_role=None):
+ job_flow_role=None,
+ service_role=None):
"""
Runs a job flow
:type name: str
@@ -492,6 +493,10 @@ class EmrConnection(AWSQueryConnection):
``EMRJobflowDefault``. In order to use the default role,
you must have already created it using the CLI.
+ :type service_role: str
+ :param service_role: The IAM role that will be assumed by the Amazon
+ EMR service to access AWS resources on your behalf.
+
:rtype: str
:return: The jobflow id
"""
@@ -569,6 +574,9 @@ class EmrConnection(AWSQueryConnection):
if job_flow_role is not None:
params['JobFlowRole'] = job_flow_role
+ if service_role is not None:
+ params['ServiceRole'] = service_role
+
response = self.get_object(
'RunJobFlow', params, RunJobFlowResponse, verb='POST')
return response.jobflowid
diff --git a/boto/emr/step.py b/boto/emr/step.py
index 4cb78898..de6835fb 100644
--- a/boto/emr/step.py
+++ b/boto/emr/step.py
@@ -20,6 +20,8 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+from boto.compat import six
+
class Step(object):
"""
@@ -73,7 +75,7 @@ class JarStep(Step):
self._main_class = main_class
self.action_on_failure = action_on_failure
- if isinstance(step_args, basestring):
+ if isinstance(step_args, six.string_types):
step_args = [step_args]
self.step_args = step_args
@@ -143,7 +145,7 @@ class StreamingStep(Step):
self.output = output
self._jar = jar
- if isinstance(step_args, basestring):
+ if isinstance(step_args, six.string_types):
step_args = [step_args]
self.step_args = step_args
diff --git a/boto/endpoints.json b/boto/endpoints.json
index 27b50762..8823f668 100644
--- a/boto/endpoints.json
+++ b/boto/endpoints.json
@@ -190,7 +190,13 @@
"kinesis": {
"us-east-1": "kinesis.us-east-1.amazonaws.com",
"us-west-2": "kinesis.us-west-2.amazonaws.com",
- "eu-west-1": "kinesis.eu-west-1.amazonaws.com"
+ "eu-west-1": "kinesis.eu-west-1.amazonaws.com",
+ "ap-southeast-1": "kinesis.ap-southeast-1.amazonaws.com",
+ "ap-southeast-2": "kinesis.ap-southeast-2.amazonaws.com",
+ "ap-northeast-1": "kinesis.ap-northeast-1.amazonaws.com"
+ },
+ "logs": {
+ "us-east-1": "logs.us-east-1.amazonaws.com"
},
"opsworks": {
"us-east-1": "opsworks.us-east-1.amazonaws.com"
diff --git a/boto/file/key.py b/boto/file/key.py
index 2f20cae5..3ec345d4 100755
--- a/boto/file/key.py
+++ b/boto/file/key.py
@@ -22,9 +22,11 @@
# File representation of key, for use with "file://" URIs.
-import os, shutil, StringIO
+import os, shutil
import sys
+from boto.compat import StringIO
+
class Key(object):
KEY_STREAM_READABLE = 0x01
@@ -182,7 +184,7 @@ class Key(object):
:returns: The contents of the file as a string
"""
- fp = StringIO.StringIO()
+ fp = StringIO()
self.get_contents_to_file(fp)
return fp.getvalue()
diff --git a/boto/glacier/concurrent.py b/boto/glacier/concurrent.py
index 6c038eab..93a12d4f 100644
--- a/boto/glacier/concurrent.py
+++ b/boto/glacier/concurrent.py
@@ -27,7 +27,7 @@ import threading
import hashlib
import time
import logging
-from Queue import Queue, Empty
+from boto.compat import Queue
import binascii
from boto.glacier.utils import DEFAULT_PART_SIZE, minimum_part_size, \
diff --git a/boto/glacier/response.py b/boto/glacier/response.py
index 78d9f5f9..a67ec61d 100644
--- a/boto/glacier/response.py
+++ b/boto/glacier/response.py
@@ -37,7 +37,7 @@ class GlacierResponse(dict):
for header_name, item_name in response_headers:
self[item_name] = http_response.getheader(header_name)
if http_response.getheader('Content-Type') == 'application/json':
- body = json.loads(http_response.read())
+ body = json.loads(http_response.read().decode('utf-8'))
self.update(body)
size = http_response.getheader('Content-Length', None)
if size is not None:
diff --git a/boto/glacier/utils.py b/boto/glacier/utils.py
index fe53bb8f..21da67ba 100644
--- a/boto/glacier/utils.py
+++ b/boto/glacier/utils.py
@@ -21,6 +21,7 @@
#
import hashlib
import math
+import binascii
_MEGABYTE = 1024 * 1024
@@ -76,7 +77,7 @@ def chunk_hashes(bytestring, chunk_size=_MEGABYTE):
end = (i + 1) * chunk_size
hashes.append(hashlib.sha256(bytestring[start:end]).digest())
if not hashes:
- return [hashlib.sha256('').digest()]
+ return [hashlib.sha256(b'').digest()]
return hashes
@@ -123,18 +124,18 @@ def compute_hashes_from_fileobj(fileobj, chunk_size=1024 * 1024):
"""
linear_hash = hashlib.sha256()
chunks = []
- chunk = fileobj.read(chunk_size)
+ chunk = fileobj.read(chunk_size).encode('utf-8')
while chunk:
linear_hash.update(chunk)
chunks.append(hashlib.sha256(chunk).digest())
chunk = fileobj.read(chunk_size)
if not chunks:
- chunks = [hashlib.sha256('').digest()]
+ chunks = [hashlib.sha256(b'').digest()]
return linear_hash.hexdigest(), bytes_to_hex(tree_hash(chunks))
def bytes_to_hex(str_as_bytes):
- return ''.join(["%02x" % ord(x) for x in str_as_bytes]).strip()
+ return binascii.hexlify(str_as_bytes)
def tree_hash_from_str(str_as_bytes):
diff --git a/boto/glacier/vault.py b/boto/glacier/vault.py
index 6070ffb8..e447c188 100644
--- a/boto/glacier/vault.py
+++ b/boto/glacier/vault.py
@@ -22,6 +22,8 @@
# IN THE SOFTWARE.
#
from __future__ import with_statement
+import codecs
+from boto.compat import six
from boto.glacier.exceptions import UploadArchiveError
from boto.glacier.job import Job
from boto.glacier.writer import compute_hashes_from_fileobj, \
@@ -55,8 +57,6 @@ class Vault(object):
if response_data:
for response_name, attr_name, default in self.ResponseDataElements:
value = response_data[response_name]
- if isinstance(value, unicode):
- value = value.encode('utf8')
setattr(self, attr_name, value)
else:
for response_name, attr_name, default in self.ResponseDataElements:
@@ -228,7 +228,7 @@ class Vault(object):
for part_desc in part_list_response['Parts']:
part_index = self._range_string_to_part_index(
part_desc['RangeInBytes'], part_size)
- part_tree_hash = part_desc['SHA256TreeHash'].decode('hex')
+ part_tree_hash = codecs.decode(part_desc['SHA256TreeHash'], 'hex_codec')
part_hash_map[part_index] = part_tree_hash
if not file_obj:
diff --git a/boto/glacier/writer.py b/boto/glacier/writer.py
index ad0ab265..fa3161ab 100644
--- a/boto/glacier/writer.py
+++ b/boto/glacier/writer.py
@@ -53,7 +53,7 @@ class _Partitioner(object):
self._buffer_size = 0
def write(self, data):
- if data == '':
+ if data == b'':
return
self._buffer.append(data)
self._buffer_size += len(data)
@@ -61,7 +61,7 @@ class _Partitioner(object):
self._send_part()
def _send_part(self):
- data = ''.join(self._buffer)
+ data = b''.join(self._buffer)
# Put back any data remaining over the part size into the
# buffer
if len(data) > self.part_size:
@@ -164,7 +164,7 @@ class _Uploader(object):
def generate_parts_from_fobj(fobj, part_size):
data = fobj.read(part_size)
while data:
- yield data
+ yield data.encode('utf-8')
data = fobj.read(part_size)
diff --git a/boto/gs/resumable_upload_handler.py b/boto/gs/resumable_upload_handler.py
index 5b88b621..d7443469 100644
--- a/boto/gs/resumable_upload_handler.py
+++ b/boto/gs/resumable_upload_handler.py
@@ -26,16 +26,13 @@ import re
import socket
import time
import urlparse
+from hashlib import md5
from boto import config, UserAgent
from boto.connection import AWSAuthConnection
from boto.exception import InvalidUriError
from boto.exception import ResumableTransferDisposition
from boto.exception import ResumableUploadException
from boto.s3.keyfile import KeyFile
-try:
- from hashlib import md5
-except ImportError:
- from md5 import md5
"""
Handler for Google Cloud Storage resumable uploads. See
diff --git a/boto/https_connection.py b/boto/https_connection.py
index 5cc5029d..9222fbde 100644
--- a/boto/https_connection.py
+++ b/boto/https_connection.py
@@ -112,10 +112,10 @@ class CertValidatingHTTPSConnection(http_client.HTTPConnection):
def connect(self):
"Connect to a host on a given (SSL) port."
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- if hasattr(self, "timeout") and self.timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
- sock.settimeout(self.timeout)
- sock.connect((self.host, self.port))
+ if hasattr(self, "timeout"):
+ sock = socket.create_connection((self.host, self.port), self.timeout)
+ else:
+ sock = socket.create_connection((self.host, self.port))
msg = "wrapping ssl socket; "
if self.ca_certs:
msg += "CA certificate file=%s" %self.ca_certs
diff --git a/boto/iam/connection.py b/boto/iam/connection.py
index bbb0afd9..da242c2c 100644
--- a/boto/iam/connection.py
+++ b/boto/iam/connection.py
@@ -21,7 +21,7 @@
# IN THE SOFTWARE.
import boto
import boto.jsonresponse
-from boto.compat import json
+from boto.compat import json, six
from boto.resultset import ResultSet
from boto.iam.summarymap import SummaryMap
from boto.connection import AWSQueryConnection
@@ -1104,7 +1104,7 @@ class IAMConnection(AWSQueryConnection):
def _build_policy(self, assume_role_policy_document=None):
if assume_role_policy_document is not None:
- if isinstance(assume_role_policy_document, basestring):
+ if isinstance(assume_role_policy_document, six.string_types):
# Historically, they had to pass a string. If it's a string,
# assume the user has already handled it.
return assume_role_policy_document
@@ -1440,7 +1440,7 @@ class IAMConnection(AWSQueryConnection):
Lists the SAML providers in the account.
This operation requires `Signature Version 4`_.
"""
- return self.get_response('ListSAMLProviders', {})
+ return self.get_response('ListSAMLProviders', {}, list_marker='SAMLProviderList')
def get_saml_provider(self, saml_provider_arn):
"""
diff --git a/boto/kinesis/layer1.py b/boto/kinesis/layer1.py
index 11bf079e..d514b064 100644
--- a/boto/kinesis/layer1.py
+++ b/boto/kinesis/layer1.py
@@ -20,11 +20,6 @@
# IN THE SOFTWARE.
#
-try:
- import json
-except ImportError:
- import simplejson as json
-
import base64
import boto
@@ -32,6 +27,7 @@ from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.kinesis import exceptions
+from boto.compat import json
class KinesisConnection(AWSQueryConnection):
diff --git a/boto/logs/__init__.py b/boto/logs/__init__.py
new file mode 100644
index 00000000..2ea075d1
--- /dev/null
+++ b/boto/logs/__init__.py
@@ -0,0 +1,41 @@
+# Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+from boto.regioninfo import get_regions
+
+
+def regions():
+ """
+ Get all available regions for the CloudWatch Logs service.
+
+ :rtype: list
+ :return: A list of :class:`boto.regioninfo.RegionInfo`
+ """
+ from boto.logs.layer1 import CloudWatchLogsConnection
+ return get_regions('logs', connection_cls=CloudWatchLogsConnection)
+
+
+def connect_to_region(region_name, **kw_params):
+ for region in regions():
+ if region.name == region_name:
+ return region.connect(**kw_params)
+ return None
diff --git a/boto/logs/exceptions.py b/boto/logs/exceptions.py
new file mode 100644
index 00000000..49c01fa9
--- /dev/null
+++ b/boto/logs/exceptions.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 Thomas Parslow http://almostobsolete.net/
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+from boto.exception import BotoServerError
+
+
+class LimitExceededException(BotoServerError):
+ pass
+
+
+class DataAlreadyAcceptedException(BotoServerError):
+ pass
+
+
+class ResourceInUseException(BotoServerError):
+ pass
+
+
+class ServiceUnavailableException(BotoServerError):
+ pass
+
+
+class InvalidParameterException(BotoServerError):
+ pass
+
+
+class ResourceNotFoundException(BotoServerError):
+ pass
+
+
+class ResourceAlreadyExistsException(BotoServerError):
+ pass
+
+
+class OperationAbortedException(BotoServerError):
+ pass
+
+
+class InvalidSequenceTokenException(BotoServerError):
+ pass
diff --git a/boto/logs/layer1.py b/boto/logs/layer1.py
new file mode 100644
index 00000000..254e2855
--- /dev/null
+++ b/boto/logs/layer1.py
@@ -0,0 +1,577 @@
+# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+
+import boto
+from boto.connection import AWSQueryConnection
+from boto.regioninfo import RegionInfo
+from boto.exception import JSONResponseError
+from boto.logs import exceptions
+from boto.compat import json
+
+
+class CloudWatchLogsConnection(AWSQueryConnection):
+ """
+ Amazon CloudWatch Logs Service API Reference
+ This is the Amazon CloudWatch Logs API Reference . Amazon
+ CloudWatch Logs is a managed service for real time monitoring and
+ archival of application logs. This guide provides detailed
+ information about Amazon CloudWatch Logs actions, data types,
+ parameters, and errors. For detailed information about Amazon
+ CloudWatch Logs features and their associated API calls, go to the
+ `Amazon CloudWatch Logs Developer Guide`_.
+
+ Use the following links to get started using the Amazon CloudWatch
+ API Reference :
+
+
+ + `Actions`_: An alphabetical list of all Amazon CloudWatch Logs
+ actions.
+ + `Data Types`_: An alphabetical list of all Amazon CloudWatch
+ Logs data types.
+ + `Common Parameters`_: Parameters that all Query actions can use.
+ + `Common Errors`_: Client and server errors that all actions can
+ return.
+ + `Regions and Endpoints`_: Itemized regions and endpoints for all
+ AWS products.
+
+
+ In addition to using the Amazon CloudWatch Logs API, you can also
+ use the following SDKs and third-party libraries to access Amazon
+ CloudWatch Logs programmatically.
+
+
+ + `AWS SDK for Java Documentation`_
+ + `AWS SDK for .NET Documentation`_
+ + `AWS SDK for PHP Documentation`_
+ + `AWS SDK for Ruby Documentation`_
+
+
+ Developers in the AWS developer community also provide their own
+ libraries, which you can find at the following AWS developer
+ centers:
+
+
+ + `AWS Java Developer Center`_
+ + `AWS PHP Developer Center`_
+ + `AWS Python Developer Center`_
+ + `AWS Ruby Developer Center`_
+ + `AWS Windows and .NET Developer Center`_
+ """
+ APIVersion = "2014-03-28"
+ DefaultRegionName = "us-east-1"
+ DefaultRegionEndpoint = "logs.us-east-1.amazonaws.com"
+ ServiceName = "CloudWatchLogs"
+ TargetPrefix = "Logs_20140328"
+ ResponseError = JSONResponseError
+
+ _faults = {
+ "LimitExceededException": exceptions.LimitExceededException,
+ "DataAlreadyAcceptedException": exceptions.DataAlreadyAcceptedException,
+ "ResourceInUseException": exceptions.ResourceInUseException,
+ "ServiceUnavailableException": exceptions.ServiceUnavailableException,
+ "InvalidParameterException": exceptions.InvalidParameterException,
+ "ResourceNotFoundException": exceptions.ResourceNotFoundException,
+ "ResourceAlreadyExistsException": exceptions.ResourceAlreadyExistsException,
+ "OperationAbortedException": exceptions.OperationAbortedException,
+ "InvalidSequenceTokenException": exceptions.InvalidSequenceTokenException,
+ }
+
+
+ def __init__(self, **kwargs):
+ region = kwargs.pop('region', None)
+ if not region:
+ region = RegionInfo(self, self.DefaultRegionName,
+ self.DefaultRegionEndpoint)
+
+ if 'host' not in kwargs or kwargs['host'] is None:
+ kwargs['host'] = region.endpoint
+
+ super(CloudWatchLogsConnection, self).__init__(**kwargs)
+ self.region = region
+
+ def _required_auth_capability(self):
+ return ['hmac-v4']
+
+ def create_log_group(self, log_group_name):
+ """
+ Creates a new log group with the specified name. The name of
+ the log group must be unique within a region for an AWS
+ account. You can create up to 100 log groups per account.
+
+ You must use the following guidelines when naming a log group:
+
+ + Log group names can be between 1 and 512 characters long.
+ + Allowed characters are az, AZ, 09, '_' (underscore), '-'
+ (hyphen), '/' (forward slash), and '.' (period).
+
+
+
+ Log groups are created with a default retention of 14 days.
+ The retention attribute allow you to configure the number of
+ days you want to retain log events in the specified log group.
+ See the `SetRetention` operation on how to modify the
+ retention of your log groups.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ """
+ params = {'logGroupName': log_group_name, }
+ return self.make_request(action='CreateLogGroup',
+ body=json.dumps(params))
+
+ def create_log_stream(self, log_group_name, log_stream_name):
+ """
+ Creates a new log stream in the specified log group. The name
+ of the log stream must be unique within the log group. There
+ is no limit on the number of log streams that can exist in a
+ log group.
+
+ You must use the following guidelines when naming a log
+ stream:
+
+ + Log stream names can be between 1 and 512 characters long.
+ + The ':' colon character is not allowed.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type log_stream_name: string
+ :param log_stream_name:
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'logStreamName': log_stream_name,
+ }
+ return self.make_request(action='CreateLogStream',
+ body=json.dumps(params))
+
+ def delete_log_group(self, log_group_name):
+ """
+ Deletes the log group with the specified name. Amazon
+ CloudWatch Logs will delete a log group only if there are no
+ log streams and no metric filters associated with the log
+ group. If this condition is not satisfied, the request will
+ fail and the log group will not be deleted.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ """
+ params = {'logGroupName': log_group_name, }
+ return self.make_request(action='DeleteLogGroup',
+ body=json.dumps(params))
+
+ def delete_log_stream(self, log_group_name, log_stream_name):
+ """
+ Deletes a log stream and permanently deletes all the archived
+ log events associated with it.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type log_stream_name: string
+ :param log_stream_name:
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'logStreamName': log_stream_name,
+ }
+ return self.make_request(action='DeleteLogStream',
+ body=json.dumps(params))
+
+ def delete_metric_filter(self, log_group_name, filter_name):
+ """
+ Deletes a metric filter associated with the specified log
+ group.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type filter_name: string
+ :param filter_name: The name of the metric filter.
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'filterName': filter_name,
+ }
+ return self.make_request(action='DeleteMetricFilter',
+ body=json.dumps(params))
+
+ def delete_retention_policy(self, log_group_name):
+ """
+
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ """
+ params = {'logGroupName': log_group_name, }
+ return self.make_request(action='DeleteRetentionPolicy',
+ body=json.dumps(params))
+
+ def describe_log_groups(self, log_group_name_prefix=None,
+ next_token=None, limit=None):
+ """
+ Returns all the log groups that are associated with the AWS
+ account making the request. The list returned in the response
+ is ASCII-sorted by log group name.
+
+ By default, this operation returns up to 50 log groups. If
+ there are more log groups to list, the response would contain
+ a `nextToken` value in the response body. You can also limit
+ the number of log groups returned in the response by
+ specifying the `limit` parameter in the request.
+
+ :type log_group_name_prefix: string
+ :param log_group_name_prefix:
+
+ :type next_token: string
+ :param next_token: A string token used for pagination that points to
+ the next page of results. It must be a value obtained from the
+ response of the previous `DescribeLogGroups` request.
+
+ :type limit: integer
+ :param limit: The maximum number of items returned in the response. If
+ you don't specify a value, the request would return up to 50 items.
+
+ """
+ params = {}
+ if log_group_name_prefix is not None:
+ params['logGroupNamePrefix'] = log_group_name_prefix
+ if next_token is not None:
+ params['nextToken'] = next_token
+ if limit is not None:
+ params['limit'] = limit
+ return self.make_request(action='DescribeLogGroups',
+ body=json.dumps(params))
+
+ def describe_log_streams(self, log_group_name,
+ log_stream_name_prefix=None, next_token=None,
+ limit=None):
+ """
+ Returns all the log streams that are associated with the
+ specified log group. The list returned in the response is
+ ASCII-sorted by log stream name.
+
+ By default, this operation returns up to 50 log streams. If
+ there are more log streams to list, the response would contain
+ a `nextToken` value in the response body. You can also limit
+ the number of log streams returned in the response by
+ specifying the `limit` parameter in the request.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type log_stream_name_prefix: string
+ :param log_stream_name_prefix:
+
+ :type next_token: string
+ :param next_token: A string token used for pagination that points to
+ the next page of results. It must be a value obtained from the
+ response of the previous `DescribeLogStreams` request.
+
+ :type limit: integer
+ :param limit: The maximum number of items returned in the response. If
+ you don't specify a value, the request would return up to 50 items.
+
+ """
+ params = {'logGroupName': log_group_name, }
+ if log_stream_name_prefix is not None:
+ params['logStreamNamePrefix'] = log_stream_name_prefix
+ if next_token is not None:
+ params['nextToken'] = next_token
+ if limit is not None:
+ params['limit'] = limit
+ return self.make_request(action='DescribeLogStreams',
+ body=json.dumps(params))
+
+ def describe_metric_filters(self, log_group_name,
+ filter_name_prefix=None, next_token=None,
+ limit=None):
+ """
+ Returns all the metrics filters associated with the specified
+ log group. The list returned in the response is ASCII-sorted
+ by filter name.
+
+ By default, this operation returns up to 50 metric filters. If
+ there are more metric filters to list, the response would
+ contain a `nextToken` value in the response body. You can also
+ limit the number of metric filters returned in the response by
+ specifying the `limit` parameter in the request.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type filter_name_prefix: string
+ :param filter_name_prefix: The name of the metric filter.
+
+ :type next_token: string
+ :param next_token: A string token used for pagination that points to
+ the next page of results. It must be a value obtained from the
+ response of the previous `DescribeMetricFilters` request.
+
+ :type limit: integer
+ :param limit: The maximum number of items returned in the response. If
+ you don't specify a value, the request would return up to 50 items.
+
+ """
+ params = {'logGroupName': log_group_name, }
+ if filter_name_prefix is not None:
+ params['filterNamePrefix'] = filter_name_prefix
+ if next_token is not None:
+ params['nextToken'] = next_token
+ if limit is not None:
+ params['limit'] = limit
+ return self.make_request(action='DescribeMetricFilters',
+ body=json.dumps(params))
+
+ def get_log_events(self, log_group_name, log_stream_name,
+ start_time=None, end_time=None, next_token=None,
+ limit=None, start_from_head=None):
+ """
+ Retrieves log events from the specified log stream. You can
+ provide an optional time range to filter the results on the
+ event `timestamp`.
+
+ By default, this operation returns as much log events as can
+ fit in a response size of 1MB, up to 10,000 log events. The
+ response will always include a `nextForwardToken` and a
+ `nextBackwardToken` in the response body. You can use any of
+ these tokens in subsequent `GetLogEvents` requests to paginate
+ through events in either forward or backward direction. You
+ can also limit the number of log events returned in the
+ response by specifying the `limit` parameter in the request.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type log_stream_name: string
+ :param log_stream_name:
+
+ :type start_time: long
+ :param start_time: A point in time expressed as the number milliseconds
+ since Jan 1, 1970 00:00:00 UTC.
+
+ :type end_time: long
+ :param end_time: A point in time expressed as the number milliseconds
+ since Jan 1, 1970 00:00:00 UTC.
+
+ :type next_token: string
+ :param next_token: A string token used for pagination that points to
+ the next page of results. It must be a value obtained from the
+ `nextForwardToken` or `nextBackwardToken` fields in the response of
+ the previous `GetLogEvents` request.
+
+ :type limit: integer
+ :param limit: The maximum number of log events returned in the
+ response. If you don't specify a value, the request would return as
+ much log events as can fit in a response size of 1MB, up to 10,000
+ log events.
+
+ :type start_from_head: boolean
+ :param start_from_head:
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'logStreamName': log_stream_name,
+ }
+ if start_time is not None:
+ params['startTime'] = start_time
+ if end_time is not None:
+ params['endTime'] = end_time
+ if next_token is not None:
+ params['nextToken'] = next_token
+ if limit is not None:
+ params['limit'] = limit
+ if start_from_head is not None:
+ params['startFromHead'] = start_from_head
+ return self.make_request(action='GetLogEvents',
+ body=json.dumps(params))
+
+ def put_log_events(self, log_group_name, log_stream_name, log_events,
+ sequence_token=None):
+ """
+ Uploads a batch of log events to the specified log stream.
+
+ Every PutLogEvents request must include the `sequenceToken`
+ obtained from the response of the previous request. An upload
+ in a newly created log stream does not require a
+ `sequenceToken`.
+
+ The batch of events must satisfy the following constraints:
+
+ + The maximum batch size is 32,768 bytes, and this size is
+ calculated as the sum of all event messages in UTF-8, plus 26
+ bytes for each log event.
+ + None of the log events in the batch can be more than 2 hours
+ in the future.
+ + None of the log events in the batch can be older than 14
+ days or the retention period of the log group.
+ + The log events in the batch must be in chronological ordered
+ by their `timestamp`.
+ + The maximum number of log events in a batch is 1,000.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type log_stream_name: string
+ :param log_stream_name:
+
+ :type log_events: list
+ :param log_events: A list of events belonging to a log stream.
+
+ :type sequence_token: string
+ :param sequence_token: A string token that must be obtained from the
+ response of the previous `PutLogEvents` request.
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'logStreamName': log_stream_name,
+ 'logEvents': log_events,
+ }
+ if sequence_token is not None:
+ params['sequenceToken'] = sequence_token
+ return self.make_request(action='PutLogEvents',
+ body=json.dumps(params))
+
+ def put_metric_filter(self, log_group_name, filter_name, filter_pattern,
+ metric_transformations):
+ """
+ Creates or updates a metric filter and associates it with the
+ specified log group. Metric filters allow you to configure
+ rules to extract metric data from log events ingested through
+ `PutLogEvents` requests.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type filter_name: string
+ :param filter_name: The name of the metric filter.
+
+ :type filter_pattern: string
+ :param filter_pattern:
+
+ :type metric_transformations: list
+ :param metric_transformations:
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'filterName': filter_name,
+ 'filterPattern': filter_pattern,
+ 'metricTransformations': metric_transformations,
+ }
+ return self.make_request(action='PutMetricFilter',
+ body=json.dumps(params))
+
+ def put_retention_policy(self, log_group_name, retention_in_days):
+ """
+
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type retention_in_days: integer
+ :param retention_in_days: Specifies the number of days you want to
+ retain log events in the specified log group. Possible values are:
+ 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 547, 730.
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'retentionInDays': retention_in_days,
+ }
+ return self.make_request(action='PutRetentionPolicy',
+ body=json.dumps(params))
+
+ def set_retention(self, log_group_name, retention_in_days):
+ """
+ Sets the retention of the specified log group. Log groups are
+ created with a default retention of 14 days. The retention
+ attribute allow you to configure the number of days you want
+ to retain log events in the specified log group.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type retention_in_days: integer
+ :param retention_in_days: Specifies the number of days you want to
+ retain log events in the specified log group. Possible values are:
+ 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 547, 730.
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'retentionInDays': retention_in_days,
+ }
+ return self.make_request(action='SetRetention',
+ body=json.dumps(params))
+
+ def test_metric_filter(self, filter_pattern, log_event_messages):
+ """
+ Tests the filter pattern of a metric filter against a sample
+ of log event messages. You can use this operation to validate
+ the correctness of a metric filter pattern.
+
+ :type filter_pattern: string
+ :param filter_pattern:
+
+ :type log_event_messages: list
+ :param log_event_messages:
+
+ """
+ params = {
+ 'filterPattern': filter_pattern,
+ 'logEventMessages': log_event_messages,
+ }
+ return self.make_request(action='TestMetricFilter',
+ body=json.dumps(params))
+
+ def make_request(self, action, body):
+ headers = {
+ 'X-Amz-Target': '%s.%s' % (self.TargetPrefix, action),
+ 'Host': self.region.endpoint,
+ 'Content-Type': 'application/x-amz-json-1.1',
+ 'Content-Length': str(len(body)),
+ }
+ http_request = self.build_base_http_request(
+ method='POST', path='/', auth_path='/', params={},
+ headers=headers, data=body)
+ response = self._mexe(http_request, sender=None,
+ override_num_retries=10)
+ response_body = response.read().decode('utf-8')
+ boto.log.debug(response_body)
+ if response.status == 200:
+ if response_body:
+ return json.loads(response_body)
+ else:
+ json_body = json.loads(response_body)
+ fault_name = json_body.get('__type', None)
+ exception_class = self._faults.get(fault_name, self.ResponseError)
+ raise exception_class(response.status, response.reason,
+ body=json_body)
diff --git a/boto/manage/cmdshell.py b/boto/manage/cmdshell.py
index 86184c57..f5322776 100644
--- a/boto/manage/cmdshell.py
+++ b/boto/manage/cmdshell.py
@@ -18,6 +18,12 @@
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+"""
+The cmdshell module uses the paramiko package to create SSH connections
+to the servers that are represented by instance objects. The module has
+functions for running commands, managing files, and opening interactive
+shell sessions over those connections.
+"""
from boto.mashups.interactive import interactive_shell
import boto
import os
@@ -29,9 +35,19 @@ import subprocess
from boto.compat import StringIO
-
class SSHClient(object):
-
+ """
+ This class creates a paramiko.SSHClient() object that represents
+ a session with an SSH server. You can use the SSHClient object to send
+ commands to the remote host and manipulate files on the remote host.
+
+ :ivar server: A Server object or FakeServer object.
+ :ivar host_key_file: The path to the user's .ssh key files.
+ :ivar uname: The username for the SSH connection. Default = 'root'.
+ :ivar timeout: The optional timeout variable for the TCP connection.
+ :ivar ssh_pwd: An optional password to use for authentication or for
+ unlocking the private key.
+ """
def __init__(self, server,
host_key_file='~/.ssh/known_hosts',
uname='root', timeout=None, ssh_pwd=None):
@@ -48,6 +64,12 @@ class SSHClient(object):
self.connect()
def connect(self, num_retries=5):
+ """
+ Connect to an SSH server and authenticate with it.
+
+ :type num_retries: int
+ :param num_retries: The maximum number of connection attempts.
+ """
retry = 0
while retry < num_retries:
try:
@@ -76,34 +98,112 @@ class SSHClient(object):
print('Could not establish SSH connection')
def open_sftp(self):
+ """
+ Open an SFTP session on the SSH server.
+
+ :rtype: :class:`paramiko.sftp_client.SFTPClient`
+ :return: An SFTP client object.
+ """
return self._ssh_client.open_sftp()
def get_file(self, src, dst):
+ """
+ Open an SFTP session on the remote host, and copy a file from
+ the remote host to the specified path on the local host.
+
+ :type src: string
+ :param src: The path to the target file on the remote host.
+
+ :type dst: string
+ :param dst: The path on your local host where you want to
+ store the file.
+ """
sftp_client = self.open_sftp()
sftp_client.get(src, dst)
def put_file(self, src, dst):
+ """
+ Open an SFTP session on the remote host, and copy a file from
+ the local host to the specified path on the remote host.
+
+ :type src: string
+ :param src: The path to the target file on your local host.
+
+ :type dst: string
+ :param dst: The path on the remote host where you want to store
+ the file.
+ """
sftp_client = self.open_sftp()
sftp_client.put(src, dst)
def open(self, filename, mode='r', bufsize=-1):
"""
- Open a file on the remote system and return a file-like object.
+ Open an SFTP session to the remote host, and open a file on
+ that host.
+
+ :type filename: string
+ :param filename: The path to the file on the remote host.
+
+ :type mode: string
+ :param mode: The file interaction mode.
+
+ :type bufsize: integer
+ :param bufsize: The file buffer size.
+
+ :rtype: :class:`paramiko.sftp_file.SFTPFile`
+ :return: A paramiko proxy object for a file on the remote server.
"""
sftp_client = self.open_sftp()
return sftp_client.open(filename, mode, bufsize)
def listdir(self, path):
+ """
+ List all of the files and subdirectories at the specified path
+ on the remote host.
+
+ :type path: string
+ :param path: The base path from which to obtain the list.
+
+ :rtype: list
+ :return: A list of files and subdirectories at the specified path.
+ """
sftp_client = self.open_sftp()
return sftp_client.listdir(path)
def isdir(self, path):
+ """
+ Check the specified path on the remote host to determine if
+ it is a directory.
+
+ :type path: string
+ :param path: The path to the directory that you want to check.
+
+ :rtype: integer
+ :return: If the path is a directory, the function returns 1.
+ If the path is a file or an invalid path, the function
+ returns 0.
+ """
status = self.run('[ -d %s ] || echo "FALSE"' % path)
if status[1].startswith('FALSE'):
return 0
return 1
def exists(self, path):
+ """
+ Check the remote host for the specified path, or a file
+ at the specified path. This function returns 1 if the
+ path or the file exist on the remote host, and returns 0 if
+ the path or the file does not exist on the remote host.
+
+ :type path: string
+ :param path: The path to the directory or file that you want to check.
+
+ :rtype: integer
+ :return: If the path or the file exist, the function returns 1.
+ If the path or the file do not exist on the remote host,
+ the function returns 0.
+ """
+
status = self.run('[ -a %s ] || echo "FALSE"' % path)
if status[1].startswith('FALSE'):
return 0
@@ -111,16 +211,22 @@ class SSHClient(object):
def shell(self):
"""
- Start an interactive shell session on the remote host.
+ Start an interactive shell session with the remote host.
"""
channel = self._ssh_client.invoke_shell()
interactive_shell(channel)
def run(self, command):
"""
- Execute a command on the remote host. Return a tuple containing
- an integer status and two strings, the first containing stdout
- and the second containing stderr from the command.
+ Run a command on the remote host.
+
+ :type command: string
+ :param command: The command that you want to send to the remote host.
+
+ :rtype: tuple
+ :return: This function returns a tuple that contains an integer status,
+ the stdout from the command, and the stderr from the command.
+
"""
boto.log.debug('running:%s on %s' % (command, self.server.instance_id))
status = 0
@@ -139,8 +245,14 @@ class SSHClient(object):
def run_pty(self, command):
"""
- Execute a command on the remote host with a pseudo-terminal.
- Returns a string containing the output of the command.
+ Request a pseudo-terminal from a server, and execute a command on that
+ server.
+
+ :type command: string
+ :param command: The command that you want to run on the remote host.
+
+ :rtype: :class:`paramiko.channel.Channel`
+ :return: An open channel object.
"""
boto.log.debug('running:%s on %s' % (command, self.server.instance_id))
channel = self._ssh_client.get_transport().open_session()
@@ -149,36 +261,75 @@ class SSHClient(object):
return channel
def close(self):
+ """
+ Close an SSH session and any open channels that are tied to it.
+ """
transport = self._ssh_client.get_transport()
transport.close()
self.server.reset_cmdshell()
class LocalClient(object):
-
+ """
+ :ivar server: A Server object or FakeServer object.
+ :ivar host_key_file: The path to the user's .ssh key files.
+ :ivar uname: The username for the SSH connection. Default = 'root'.
+ """
def __init__(self, server, host_key_file=None, uname='root'):
self.server = server
self.host_key_file = host_key_file
self.uname = uname
def get_file(self, src, dst):
+ """
+ Copy a file from one directory to another.
+ """
shutil.copyfile(src, dst)
def put_file(self, src, dst):
+ """
+ Copy a file from one directory to another.
+ """
shutil.copyfile(src, dst)
def listdir(self, path):
+ """
+ List all of the files and subdirectories at the specified path.
+
+ :rtype: list
+ :return: Return a list containing the names of the entries
+ in the directory given by path.
+ """
return os.listdir(path)
def isdir(self, path):
+ """
+ Check the specified path to determine if it is a directory.
+
+ :rtype: boolean
+ :return: Returns True if the path is an existing directory.
+ """
return os.path.isdir(path)
def exists(self, path):
+ """
+ Check for the specified path, or check a file at the specified path.
+
+ :rtype: boolean
+ :return: If the path or the file exist, the function returns True.
+ """
return os.path.exists(path)
def shell(self):
raise NotImplementedError('shell not supported with LocalClient')
def run(self):
+ """
+ Open a subprocess and run a command on the local host.
+
+ :rtype: tuple
+ :return: This function returns a tuple that contains an integer status
+ and a string with the combined stdout and stderr output.
+ """
boto.log.info('running:%s' % self.command)
log_fp = StringIO()
process = subprocess.Popen(self.command, shell=True, stdin=subprocess.PIPE,
@@ -197,9 +348,13 @@ class LocalClient(object):
class FakeServer(object):
"""
- A little class to fake out SSHClient (which is expecting a
- :class`boto.manage.server.Server` instance. This allows us
- to
+ This object has a subset of the variables that are normally in a
+ :class:`boto.manage.server.Server` object. You can use this FakeServer
+ object to create a :class:`boto.manage.SSHClient` object if you
+ don't have a real Server object.
+
+ :ivar instance: A boto Instance object.
+ :ivar ssh_key_file: The path to the SSH key file.
"""
def __init__(self, instance, ssh_key_file):
self.instance = instance
@@ -208,6 +363,14 @@ class FakeServer(object):
self.instance_id = self.instance.id
def start(server):
+ """
+ Connect to the specified server.
+
+ :return: If the server is local, the function returns a
+ :class:`boto.manage.cmdshell.LocalClient` object.
+ If the server is remote, the function returns a
+ :class:`boto.manage.cmdshell.SSHClient` object.
+ """
instance_id = boto.config.get('Instance', 'instance-id', None)
if instance_id == server.instance_id:
return LocalClient(server)
@@ -224,19 +387,19 @@ def sshclient_from_instance(instance, ssh_key_file,
:type instance: :class`boto.ec2.instance.Instance` object
:param instance: The instance object.
- :type ssh_key_file: str
- :param ssh_key_file: A path to the private key file used
- to log into instance.
+ :type ssh_key_file: string
+ :param ssh_key_file: A path to the private key file that is
+ used to log into the instance.
- :type host_key_file: str
+ :type host_key_file: string
:param host_key_file: A path to the known_hosts file used
by the SSH client.
Defaults to ~/.ssh/known_hosts
- :type user_name: str
+ :type user_name: string
:param user_name: The username to use when logging into
the instance. Defaults to root.
- :type ssh_pwd: str
+ :type ssh_pwd: string
:param ssh_pwd: The passphrase, if any, associated with
private key.
"""
diff --git a/boto/manage/server.py b/boto/manage/server.py
index f6accc73..d9224ab8 100644
--- a/boto/manage/server.py
+++ b/boto/manage/server.py
@@ -31,10 +31,10 @@ from boto.sdb.db.property import StringProperty, IntegerProperty, BooleanPropert
from boto.manage import propget
from boto.ec2.zone import Zone
from boto.ec2.keypair import KeyPair
-import os, time, StringIO
+import os, time
from contextlib import closing
from boto.exception import EC2ResponseError
-from boto.compat import six
+from boto.compat import six, StringIO
InstanceTypes = ['m1.small', 'm1.large', 'm1.xlarge',
'c1.medium', 'c1.xlarge',
@@ -103,7 +103,7 @@ class Bundler(object):
ssh_key = self.server.get_ssh_key_file()
self.copy_x509(key_file, cert_file)
if not fp:
- fp = StringIO.StringIO()
+ fp = StringIO()
fp.write('sudo mv %s /mnt/boto.cfg; ' % BotoConfigPath)
fp.write('mv ~/.ssh/authorized_keys /mnt/authorized_keys; ')
if clear_history:
@@ -250,7 +250,7 @@ class Server(Model):
instance_type = CalculatedProperty(verbose_name="Instance Type", calculated_type=str, use_method=True)
status = CalculatedProperty(verbose_name="Current Status", calculated_type=str, use_method=True)
launch_time = CalculatedProperty(verbose_name="Server Launch Time", calculated_type=str, use_method=True)
- console_output = CalculatedProperty(verbose_name="Console Output", calculated_type=file, use_method=True)
+ console_output = CalculatedProperty(verbose_name="Console Output", calculated_type=open, use_method=True)
packages = []
plugins = []
@@ -305,7 +305,7 @@ class Server(Model):
# deal with possibly passed in logical volume:
if logical_volume != None:
cfg.set('EBS', 'logical_volume_name', logical_volume.name)
- cfg_fp = StringIO.StringIO()
+ cfg_fp = StringIO()
cfg.write(cfg_fp)
# deal with the possibility that zone and/or keypair are strings read from the config file:
if isinstance(zone, Zone):
diff --git a/boto/manage/task.py b/boto/manage/task.py
index 5d273c31..c6663b9f 100644
--- a/boto/manage/task.py
+++ b/boto/manage/task.py
@@ -23,7 +23,8 @@
import boto
from boto.sdb.db.property import StringProperty, DateTimeProperty, IntegerProperty
from boto.sdb.db.model import Model
-import datetime, subprocess, StringIO, time
+import datetime, subprocess, time
+from boto.compat import StringIO
def check_hour(val):
if val == '*':
@@ -100,7 +101,7 @@ class Task(Model):
def _run(self, msg, vtimeout):
boto.log.info('Task[%s] - running:%s' % (self.name, self.command))
- log_fp = StringIO.StringIO()
+ log_fp = StringIO()
process = subprocess.Popen(self.command, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
nsecs = 5
diff --git a/boto/mashups/order.py b/boto/mashups/order.py
index c96974d5..4aaec307 100644
--- a/boto/mashups/order.py
+++ b/boto/mashups/order.py
@@ -28,7 +28,8 @@ from boto.mashups.server import Server, ServerSet
from boto.mashups.iobject import IObject
from boto.pyami.config import Config
from boto.sdb.persist import get_domain, set_domain
-import time, StringIO
+import time
+from boto.compat import StringIO
InstanceTypes = ['m1.small', 'm1.large', 'm1.xlarge', 'c1.medium', 'c1.xlarge']
@@ -123,7 +124,7 @@ class Item(IObject):
self.config = Config(path=config_path)
def get_userdata_string(self):
- s = StringIO.StringIO()
+ s = StringIO()
self.config.write(s)
return s.getvalue()
diff --git a/boto/mws/connection.py b/boto/mws/connection.py
index ea222062..01b0b30b 100644
--- a/boto/mws/connection.py
+++ b/boto/mws/connection.py
@@ -20,7 +20,6 @@
# IN THE SOFTWARE.
import xml.sax
import hashlib
-import base64
import string
import collections
from boto.connection import AWSQueryConnection
@@ -28,7 +27,7 @@ from boto.exception import BotoServerError
import boto.mws.exception
import boto.mws.response
from boto.handler import XmlHandler
-from boto.compat import filter, map
+from boto.compat import filter, map, six, encodebytes
__all__ = ['MWSConnection']
@@ -55,7 +54,7 @@ api_version_path = {
'OffAmazonPayments': ('2013-01-01', 'SellerId',
'/OffAmazonPayments/2013-01-01'),
}
-content_md5 = lambda c: base64.encodestring(hashlib.md5(c).digest()).strip()
+content_md5 = lambda c: encodebytes(hashlib.md5(c).digest()).strip()
decorated_attrs = ('action', 'response', 'section',
'quota', 'restore', 'version')
api_call_map = {}
@@ -64,6 +63,7 @@ api_call_map = {}
def add_attrs_from(func, to):
for attr in decorated_attrs:
setattr(to, attr, getattr(func, attr, None))
+ to.__wrapped__ = func
return to
@@ -115,7 +115,7 @@ def destructure_object(value, into, prefix, members=False):
continue
destructure_object(value[name], into, prefix + '.' + name,
members=members)
- elif isinstance(value, basestring):
+ elif isinstance(value, six.string_types):
into[prefix] = value
elif isinstance(value, collections.Iterable):
for index, element in enumerate(value):
@@ -134,7 +134,7 @@ def structured_objects(*fields, **kwargs):
def wrapper(*args, **kw):
members = kwargs.get('members', False)
- for field in filter(kw.has_key, fields):
+ for field in filter(lambda i: i in kw, fields):
destructure_object(kw.pop(field), kw, field, members=members)
return func(*args, **kw)
wrapper.__doc__ = "{0}\nElement|Iter|Map: {1}\n" \
@@ -148,18 +148,18 @@ def requires(*groups):
def decorator(func):
- def wrapper(*args, **kw):
- hasgroup = lambda x: len(x) == len(filter(kw.has_key, x))
- if 1 != len(filter(hasgroup, groups)):
+ def requires(*args, **kw):
+ hasgroup = lambda group: all(key in kw for key in group)
+ if 1 != len(list(filter(hasgroup, groups))):
message = ' OR '.join(['+'.join(g) for g in groups])
message = "{0} requires {1} argument(s)" \
"".format(func.action, message)
raise KeyError(message)
return func(*args, **kw)
message = ' OR '.join(['+'.join(g) for g in groups])
- wrapper.__doc__ = "{0}\nRequired: {1}".format(func.__doc__,
- message)
- return add_attrs_from(func, to=wrapper)
+ requires.__doc__ = "{0}\nRequired: {1}".format(func.__doc__,
+ message)
+ return add_attrs_from(func, to=requires)
return decorator
@@ -168,8 +168,8 @@ def exclusive(*groups):
def decorator(func):
def wrapper(*args, **kw):
- hasgroup = lambda x: len(x) == len(filter(kw.has_key, x))
- if len(filter(hasgroup, groups)) not in (0, 1):
+ hasgroup = lambda group: all(key in kw for key in group)
+ if len(list(filter(hasgroup, groups))) not in (0, 1):
message = ' OR '.join(['+'.join(g) for g in groups])
message = "{0} requires either {1}" \
"".format(func.action, message)
@@ -187,8 +187,8 @@ def dependent(field, *groups):
def decorator(func):
def wrapper(*args, **kw):
- hasgroup = lambda x: len(x) == len(filter(kw.has_key, x))
- if field in kw and 1 > len(filter(hasgroup, groups)):
+ hasgroup = lambda group: all(key in kw for key in group)
+ if field in kw and not any(hasgroup(g) for g in groups):
message = ' OR '.join(['+'.join(g) for g in groups])
message = "{0} argument {1} requires {2}" \
"".format(func.action, field, message)
@@ -206,15 +206,15 @@ def requires_some_of(*fields):
def decorator(func):
- def wrapper(*args, **kw):
- if not filter(kw.has_key, fields):
+ def requires(*args, **kw):
+ if not any(i in kw for i in fields):
message = "{0} requires at least one of {1} argument(s)" \
"".format(func.action, ', '.join(fields))
raise KeyError(message)
return func(*args, **kw)
- wrapper.__doc__ = "{0}\nSome Required: {1}".format(func.__doc__,
- ', '.join(fields))
- return add_attrs_from(func, to=wrapper)
+ requires.__doc__ = "{0}\nSome Required: {1}".format(func.__doc__,
+ ', '.join(fields))
+ return add_attrs_from(func, to=requires)
return decorator
@@ -364,10 +364,10 @@ class MWSConnection(AWSQueryConnection):
response = more(NextToken=response._result.NextToken)
yield response
+ @requires(['FeedType'])
@boolean_arguments('PurgeAndReplace')
@http_body('FeedContent')
@structured_lists('MarketplaceIdList.Id')
- @requires(['FeedType'])
@api_action('Feeds', 15, 120)
def submit_feed(self, request, response, headers=None, body='', **kw):
"""Uploads a feed for processing by Amazon MWS.
@@ -423,9 +423,9 @@ class MWSConnection(AWSQueryConnection):
"{1}".format(self.__class__.__name__, sections)
raise AttributeError(message)
+ @requires(['ReportType'])
@structured_lists('MarketplaceIdList.Id')
@boolean_arguments('ReportOptions=ShowSalesChannel')
- @requires(['ReportType'])
@api_action('Reports', 15, 60)
def request_report(self, request, response, **kw):
"""Creates a report request and submits the request to Amazon MWS.
@@ -536,8 +536,8 @@ class MWSConnection(AWSQueryConnection):
"""
return self._post_request(request, kw, response)
- @boolean_arguments('Acknowledged')
@requires(['ReportIdList'])
+ @boolean_arguments('Acknowledged')
@structured_lists('ReportIdList.Id')
@api_action('Reports', 10, 45)
def update_report_acknowledgements(self, request, response, **kw):
@@ -643,8 +643,8 @@ class MWSConnection(AWSQueryConnection):
"""
return self._post_request(request, kw, response)
- @structured_objects('Address', 'Items')
@requires(['Address', 'Items'])
+ @structured_objects('Address', 'Items')
@api_action('Outbound', 30, 0.5)
def get_fulfillment_preview(self, request, response, **kw):
"""Returns a list of fulfillment order previews based on items
@@ -652,11 +652,11 @@ class MWSConnection(AWSQueryConnection):
"""
return self._post_request(request, kw, response)
- @structured_objects('DestinationAddress', 'Items')
@requires(['SellerFulfillmentOrderId', 'DisplayableOrderId',
'ShippingSpeedCategory', 'DisplayableOrderDateTime',
'DestinationAddress', 'DisplayableOrderComment',
'Items'])
+ @structured_objects('DestinationAddress', 'Items')
@api_action('Outbound', 30, 0.5)
def create_fulfillment_order(self, request, response, **kw):
"""Requests that Amazon ship items from the seller's inventory
@@ -703,12 +703,12 @@ class MWSConnection(AWSQueryConnection):
return self._post_request(request, kw, response)
@requires(['CreatedAfter'], ['LastUpdatedAfter'])
+ @requires(['MarketplaceId'])
@exclusive(['CreatedAfter'], ['LastUpdatedAfter'])
@dependent('CreatedBefore', ['CreatedAfter'])
@exclusive(['LastUpdatedAfter'], ['BuyerEmail'], ['SellerOrderId'])
@dependent('LastUpdatedBefore', ['LastUpdatedAfter'])
@exclusive(['CreatedAfter'], ['LastUpdatedBefore'])
- @requires(['MarketplaceId'])
@structured_objects('OrderTotal', 'ShippingAddress',
'PaymentExecutionDetail')
@structured_lists('MarketplaceId.Id', 'OrderStatus.Status',
@@ -725,7 +725,7 @@ class MWSConnection(AWSQueryConnection):
'BuyerEmail': toggle.union(['SellerOrderId']),
'SellerOrderId': toggle.union(['BuyerEmail']),
}.items():
- if do in kw and filter(kw.has_key, dont):
+ if do in kw and any(i in dont for i in kw):
message = "Don't include {0} when specifying " \
"{1}".format(' or '.join(dont), do)
raise AssertionError(message)
diff --git a/boto/opsworks/layer1.py b/boto/opsworks/layer1.py
index e2e75e6f..3b703ee1 100644
--- a/boto/opsworks/layer1.py
+++ b/boto/opsworks/layer1.py
@@ -20,16 +20,13 @@
# IN THE SOFTWARE.
#
-try:
- import json
-except ImportError:
- import simplejson as json
import boto
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.opsworks import exceptions
+from boto.compat import json
class OpsWorksConnection(AWSQueryConnection):
@@ -2580,7 +2577,7 @@ class OpsWorksConnection(AWSQueryConnection):
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
@@ -2591,4 +2588,3 @@ class OpsWorksConnection(AWSQueryConnection):
exception_class = self._faults.get(fault_name, self.ResponseError)
raise exception_class(response.status, response.reason,
body=json_body)
-
diff --git a/boto/provider.py b/boto/provider.py
index ab4de7c6..8e04bff3 100644
--- a/boto/provider.py
+++ b/boto/provider.py
@@ -356,6 +356,14 @@ class Provider(object):
security_token_name)
boto.log.debug("Using security token found in shared "
"credential file.")
+ elif profile_name is not None:
+ if config.has_option("profile %s" % profile_name,
+ security_token_name):
+ boto.log.debug("config has option")
+ self.security_token = config.get("profile %s" % profile_name,
+ security_token_name)
+ boto.log.debug("Using security token found in config file: "
+ "profile %s." % profile_name)
elif config.has_option('Credentials', security_token_name):
self.security_token = config.get('Credentials',
security_token_name)
diff --git a/boto/pyami/config.py b/boto/pyami/config.py
index 2ecabbce..3789113b 100644
--- a/boto/pyami/config.py
+++ b/boto/pyami/config.py
@@ -54,8 +54,8 @@ class Config(SafeConfigParser):
def __init__(self, path=None, fp=None, do_load=True):
# We don't use ``super`` here, because ``ConfigParser`` still uses
# old-style classes.
- SafeConfigParser.__init__(self, {'working_dir' : '/mnt/pyami',
- 'debug' : '0'})
+ SafeConfigParser.__init__(self, {'working_dir': '/mnt/pyami',
+ 'debug': '0'})
if do_load:
if path:
self.load_from_path(path)
@@ -72,7 +72,7 @@ class Config(SafeConfigParser):
def load_credential_file(self, path):
"""Load a credential file as is setup like the Java utilities"""
- c_data = StringIO.StringIO()
+ c_data = StringIO()
c_data.write("[Credentials]\n")
for line in open(path, "r").readlines():
c_data.write(line.replace("AWSAccessKeyId", "aws_access_key_id").replace("AWSSecretKey", "aws_secret_access_key"))
@@ -176,13 +176,13 @@ class Config(SafeConfigParser):
self.set(section, name, 'false')
def dump(self):
- s = StringIO.StringIO()
+ s = StringIO()
self.write(s)
print(s.getvalue())
def dump_safe(self, fp=None):
if not fp:
- fp = StringIO.StringIO()
+ fp = StringIO()
for section in self.sections():
fp.write('[%s]\n' % section)
for option in self.options(section):
diff --git a/boto/pyami/copybot.py b/boto/pyami/copybot.py
index 02d8bb22..09a6d444 100644
--- a/boto/pyami/copybot.py
+++ b/boto/pyami/copybot.py
@@ -94,4 +94,3 @@ class CopyBot(ScriptBase):
if boto.config.getbool(self.name, 'exit_on_completion', True):
ec2 = boto.connect_ec2()
ec2.terminate_instances([self.instance_id])
-
diff --git a/boto/pyami/helloworld.py b/boto/pyami/helloworld.py
index 680873ce..b9b53b60 100644
--- a/boto/pyami/helloworld.py
+++ b/boto/pyami/helloworld.py
@@ -25,4 +25,3 @@ class HelloWorld(ScriptBase):
def main(self):
self.log('Hello World!!!')
-
diff --git a/boto/pyami/installers/__init__.py b/boto/pyami/installers/__init__.py
index cc689264..44abd0d2 100644
--- a/boto/pyami/installers/__init__.py
+++ b/boto/pyami/installers/__init__.py
@@ -61,4 +61,3 @@ class Installer(ScriptBase):
Do whatever is necessary to "install" the package.
"""
raise NotImplementedError
-
diff --git a/boto/pyami/installers/ubuntu/ebs.py b/boto/pyami/installers/ubuntu/ebs.py
index ef7f3e30..54a47985 100644
--- a/boto/pyami/installers/ubuntu/ebs.py
+++ b/boto/pyami/installers/ubuntu/ebs.py
@@ -49,7 +49,6 @@ from boto.exception import EC2ResponseError
import os, time
from boto.pyami.installers.ubuntu.installer import Installer
from string import Template
-from boto.compat import six
BackupScriptTemplate = """#!/usr/bin/env python
# Backup EBS volume
@@ -115,7 +114,7 @@ class EBSInstaller(Installer):
if self.logical_volume_name:
# if a logical volume was specified, override the specified volume_id
# (if there was one) with the current AWS volume for the logical volume:
- logical_volume = next(Volume.find(name = self.logical_volume_name))
+ logical_volume = next(Volume.find(name=self.logical_volume_name))
self.volume_id = logical_volume._volume_id
volume = ec2.get_all_volumes([self.volume_id])[0]
# wait for the volume to be available. The volume may still be being created
@@ -158,7 +157,7 @@ class EBSInstaller(Installer):
fp.close()
self.run('chmod +x /usr/local/bin/ebs_backup')
- def create_backup_cleanup_script(self, use_tag_based_cleanup = False):
+ def create_backup_cleanup_script(self, use_tag_based_cleanup=False):
fp = open('/usr/local/bin/ebs_backup_cleanup', 'w')
if use_tag_based_cleanup:
fp.write(TagBasedBackupCleanupScript)
@@ -226,7 +225,7 @@ class EBSInstaller(Installer):
# volume. Check for the presence of the new configuration flag, and use the appropriate
# cleanup method / script:
use_tag_based_cleanup = boto.config.has_option('EBS', 'use_tag_based_snapshot_cleanup')
- self.create_backup_cleanup_script(use_tag_based_cleanup);
+ self.create_backup_cleanup_script(use_tag_based_cleanup)
self.add_cron("ebs_backup_cleanup", "/usr/local/bin/ebs_backup_cleanup", minute=minute, hour=hour)
# Set up the fstab
diff --git a/boto/pyami/installers/ubuntu/installer.py b/boto/pyami/installers/ubuntu/installer.py
index 370d63fd..5a2abd90 100644
--- a/boto/pyami/installers/ubuntu/installer.py
+++ b/boto/pyami/installers/ubuntu/installer.py
@@ -56,7 +56,7 @@ class Installer(boto.pyami.installers.Installer):
f = open(f_path, "w")
f.write(file)
f.close()
- os.chmod(f_path, stat.S_IREAD| stat.S_IWRITE | stat.S_IEXEC)
+ os.chmod(f_path, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
self.run("/usr/sbin/update-rc.d %s defaults" % name)
def add_env(self, key, value):
@@ -84,13 +84,11 @@ class Installer(boto.pyami.installers.Installer):
Create a user on the local system
"""
self.run("useradd -m %s" % user)
- usr = getpwnam(user)
+ usr = getpwnam(user)
return usr
-
def install(self):
"""
This is the only method you need to override
"""
raise NotImplementedError
-
diff --git a/boto/pyami/installers/ubuntu/mysql.py b/boto/pyami/installers/ubuntu/mysql.py
index 490e5dbb..8dd643bf 100644
--- a/boto/pyami/installers/ubuntu/mysql.py
+++ b/boto/pyami/installers/ubuntu/mysql.py
@@ -55,18 +55,18 @@ class MySQL(Installer):
def change_data_dir(self, password=None):
data_dir = boto.config.get('MySQL', 'data_dir', '/mnt')
- fresh_install = False;
+ fresh_install = False
is_mysql_running_command = ShellCommand('mysqladmin ping') # exit status 0 if mysql is running
is_mysql_running_command.run()
if is_mysql_running_command.getStatus() == 0:
- # mysql is running. This is the state apt-get will leave it in. If it isn't running,
+ # mysql is running. This is the state apt-get will leave it in. If it isn't running,
# that means mysql was already installed on the AMI and there's no need to stop it,
# saving 40 seconds on instance startup.
time.sleep(10) #trying to stop mysql immediately after installing it fails
# We need to wait until mysql creates the root account before we kill it
# or bad things will happen
i = 0
- while self.run("echo 'quit' | mysql -u root") != 0 and i<5:
+ while self.run("echo 'quit' | mysql -u root") != 0 and i < 5:
time.sleep(5)
i = i + 1
self.run('/etc/init.d/mysql stop')
@@ -75,7 +75,7 @@ class MySQL(Installer):
mysql_path = os.path.join(data_dir, 'mysql')
if not os.path.exists(mysql_path):
self.run('mkdir %s' % mysql_path)
- fresh_install = True;
+ fresh_install = True
self.run('chown -R mysql:mysql %s' % mysql_path)
fp = open('/etc/mysql/conf.d/use_mnt.cnf', 'w')
fp.write('# created by pyami\n')
@@ -106,4 +106,3 @@ class MySQL(Installer):
# and changing that is too ugly to be worth it:
#self.set_root_password()
self.change_data_dir()
-
diff --git a/boto/pyami/installers/ubuntu/trac.py b/boto/pyami/installers/ubuntu/trac.py
index ef83af7a..8c51c8f7 100644
--- a/boto/pyami/installers/ubuntu/trac.py
+++ b/boto/pyami/installers/ubuntu/trac.py
@@ -52,7 +52,7 @@ class Trac(Installer):
self.run("a2enmod mod_python")
self.run("a2enmod dav_svn")
self.run("a2enmod rewrite")
- # Make sure that boto.log is writable by everyone so that subversion post-commit hooks can
+ # Make sure that boto.log is writable by everyone so that subversion post-commit hooks can
# write to it.
self.run("touch /var/log/boto.log")
self.run("chmod a+w /var/log/boto.log")
diff --git a/boto/pyami/launch_ami.py b/boto/pyami/launch_ami.py
index 4b8d962f..9037217b 100755
--- a/boto/pyami/launch_ami.py
+++ b/boto/pyami/launch_ami.py
@@ -29,7 +29,7 @@ import boto
usage_string = """
SYNOPSIS
launch_ami.py -a ami_id [-b script_bucket] [-s script_name]
- [-m module] [-c class_name] [-r]
+ [-m module] [-c class_name] [-r]
[-g group] [-k key_name] [-n num_instances]
[-w] [extra_data]
Where:
@@ -80,16 +80,16 @@ def main():
'reload', 'script_name', 'wait'])
except:
usage()
- params = {'module_name' : None,
- 'script_name' : None,
- 'class_name' : None,
- 'script_bucket' : None,
- 'group' : 'default',
- 'keypair' : None,
- 'ami' : None,
- 'num_instances' : 1,
- 'input_queue_name' : None,
- 'output_queue_name' : None}
+ params = {'module_name': None,
+ 'script_name': None,
+ 'class_name': None,
+ 'script_bucket': None,
+ 'group': 'default',
+ 'keypair': None,
+ 'ami': None,
+ 'num_instances': 1,
+ 'input_queue_name': None,
+ 'output_queue_name': None}
reload = None
wait = None
for o, a in opts:
@@ -135,7 +135,7 @@ def main():
l = imp.find_module(params['script_name'])
c = boto.connect_s3()
bucket = c.get_bucket(params['script_bucket'])
- key = bucket.new_key(params['script_name']+'.py')
+ key = bucket.new_key(params['script_name'] + '.py')
key.set_contents_from_file(l[0])
params['script_md5'] = key.md5
# we have everything we need, now build userdata string
@@ -175,4 +175,3 @@ def main():
if __name__ == "__main__":
main()
-
diff --git a/boto/pyami/scriptbase.py b/boto/pyami/scriptbase.py
index 8e8cb0c0..d99a2b46 100644
--- a/boto/pyami/scriptbase.py
+++ b/boto/pyami/scriptbase.py
@@ -32,13 +32,12 @@ class ScriptBase(object):
if self.last_command.status != 0:
boto.log.error('Error running command: "%s". Output: "%s"' % (command, self.last_command.output))
if notify:
- self.notify('Error encountered', \
- 'Error running the following command:\n\t%s\n\nCommand output:\n\t%s' % \
- (command, self.last_command.output))
+ self.notify('Error encountered',
+ 'Error running the following command:\n\t%s\n\nCommand output:\n\t%s' % \
+ (command, self.last_command.output))
if exit_on_error:
sys.exit(-1)
return self.last_command.status
def main(self):
pass
-
diff --git a/boto/pyami/startup.py b/boto/pyami/startup.py
index 1c45fb3f..4bd9dadd 100644
--- a/boto/pyami/startup.py
+++ b/boto/pyami/startup.py
@@ -37,7 +37,7 @@ class Startup(ScriptBase):
pos = script.rfind('.')
if pos > 0:
mod_name = script[0:pos]
- cls_name = script[pos+1:]
+ cls_name = script[pos + 1:]
cls = find_class(mod_name, cls_name)
boto.log.info('Running Script: %s' % script)
s = cls()
diff --git a/boto/rds2/layer1.py b/boto/rds2/layer1.py
index 887708ba..5615f110 100644
--- a/boto/rds2/layer1.py
+++ b/boto/rds2/layer1.py
@@ -20,16 +20,12 @@
# IN THE SOFTWARE.
#
-try:
- import json
-except ImportError:
- import simplejson as json
-
import boto
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.rds2 import exceptions
+from boto.compat import json
class RDSConnection(AWSQueryConnection):
diff --git a/boto/route53/connection.py b/boto/route53/connection.py
index f0b9f048..2cab2359 100644
--- a/boto/route53/connection.py
+++ b/boto/route53/connection.py
@@ -26,7 +26,6 @@
from boto.route53 import exception
import random
-import urllib
import uuid
import xml.sax
@@ -36,7 +35,7 @@ from boto import handler
import boto.jsonresponse
from boto.route53.record import ResourceRecordSets
from boto.route53.zone import Zone
-from boto.compat import six
+from boto.compat import six, urllib
HZXML = """<?xml version="1.0" encoding="UTF-8"?>
@@ -83,7 +82,7 @@ class Route53Connection(AWSAuthConnection):
for key, val in six.iteritems(params):
if val is None:
continue
- pairs.append(key + '=' + urllib.quote(str(val)))
+ pairs.append(key + '=' + urllib.parse.quote(str(val)))
path += '?' + '&'.join(pairs)
return super(Route53Connection, self).make_request(action, path,
headers, data,
diff --git a/boto/s3/key.py b/boto/s3/key.py
index 9b261641..9924d9d4 100644
--- a/boto/s3/key.py
+++ b/boto/s3/key.py
@@ -31,8 +31,9 @@ import re
import base64
import binascii
import math
+from hashlib import md5
import boto.utils
-from boto.compat import BytesIO, six, urllib
+from boto.compat import BytesIO, six, urllib, encodebytes
from boto.exception import BotoClientError
from boto.exception import StorageDataError
@@ -45,11 +46,6 @@ from boto.utils import compute_md5, compute_hash
from boto.utils import find_matching_headers
from boto.utils import merge_headers_by_name
-try:
- from hashlib import md5
-except ImportError:
- from md5 import md5
-
class Key(object):
"""
@@ -116,7 +112,7 @@ class Key(object):
self.is_latest = False
self.last_modified = None
self.owner = None
- self.storage_class = 'STANDARD'
+ self._storage_class = None
self.path = None
self.resp = None
self.mode = None
@@ -187,13 +183,31 @@ class Key(object):
base64md5 = property(_get_base64md5, _set_base64md5);
+ def _get_storage_class(self):
+ if self._storage_class is None and self.bucket:
+ # Attempt to fetch storage class
+ list_items = list(self.bucket.list(self.name.encode('utf-8')))
+ if len(list_items) and getattr(list_items[0], '_storage_class',
+ None):
+ self._storage_class = list_items[0]._storage_class
+ else:
+ # Key is not yet saved? Just use default...
+ self._storage_class = 'STANDARD'
+
+ return self._storage_class
+
+ def _set_storage_class(self, value):
+ self._storage_class = value
+
+ storage_class = property(_get_storage_class, _set_storage_class)
+
def get_md5_from_hexdigest(self, md5_hexdigest):
"""
A utility function to create the 2-tuple (md5hexdigest, base64md5)
from just having a precalculated md5_hexdigest.
"""
digest = binascii.unhexlify(md5_hexdigest)
- base64md5 = base64.encodestring(digest)
+ base64md5 = encodebytes(digest)
if base64md5[-1] == '\n':
base64md5 = base64md5[0:-1]
return (md5_hexdigest, base64md5)
@@ -877,7 +891,9 @@ class Key(object):
for header in find_matching_headers('User-Agent', headers):
del headers[header]
headers['User-Agent'] = UserAgent
- if self.storage_class != 'STANDARD':
+ # If storage_class is None, then a user has not explicitly requested
+ # a storage class, so we can assume STANDARD here
+ if self._storage_class not in [None, 'STANDARD']:
headers[provider.storage_class_header] = self.storage_class
if find_matching_headers('Content-Encoding', headers):
self.content_encoding = merge_headers_by_name(
@@ -950,10 +966,16 @@ class Key(object):
if isinstance(md5, bytes):
md5 = md5.decode('utf-8')
- if self.etag != '"%s"' % md5:
- raise provider.storage_data_error(
- 'ETag from S3 did not match computed MD5. '
- '%s vs. %s' % (self.etag, self.md5))
+ # If you use customer-provided encryption keys, the ETag value that
+ # Amazon S3 returns in the response will not be the MD5 of the
+ # object.
+ server_side_encryption_customer_algorithm = response.getheader(
+ 'x-amz-server-side-encryption-customer-algorithm', None)
+ if server_side_encryption_customer_algorithm is None:
+ if self.etag != '"%s"' % md5:
+ raise provider.storage_data_error(
+ 'ETag from S3 did not match computed MD5. '
+ '%s vs. %s' % (self.etag, self.md5))
return True
@@ -1438,7 +1460,7 @@ class Key(object):
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
-
+
:type version_id: str
:param version_id: The ID of a particular version of the object.
If this parameter is not supplied but the Key object has
@@ -1667,7 +1689,7 @@ class Key(object):
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
-
+
:type version_id: str
:param version_id: The ID of a particular version of the object.
If this parameter is not supplied but the Key object has
diff --git a/boto/sdb/connection.py b/boto/sdb/connection.py
index fa8f1788..fa7cb83e 100644
--- a/boto/sdb/connection.py
+++ b/boto/sdb/connection.py
@@ -317,7 +317,7 @@ class SDBConnection(AWSQueryConnection):
:rtype: :class:`boto.sdb.domain.Domain` object
:return: The newly created domain
"""
- params = {'DomainName':domain_name}
+ params = {'DomainName': domain_name}
d = self.get_object('CreateDomain', params, Domain)
d.name = domain_name
return d
@@ -360,7 +360,7 @@ class SDBConnection(AWSQueryConnection):
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'DomainName':domain_name}
+ params = {'DomainName': domain_name}
return self.get_status('DeleteDomain', params)
def domain_metadata(self, domain_or_name):
@@ -374,7 +374,7 @@ class SDBConnection(AWSQueryConnection):
:return: The newly created domain metadata object
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'DomainName':domain_name}
+ params = {'DomainName': domain_name}
d = self.get_object('DomainMetadata', params, DomainMetaData)
d.domain = domain
return d
@@ -420,8 +420,8 @@ class SDBConnection(AWSQueryConnection):
:return: True if successful
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'DomainName' : domain_name,
- 'ItemName' : item_name}
+ params = {'DomainName': domain_name,
+ 'ItemName': item_name}
self._build_name_value_list(params, attributes, replace)
if expected_value:
self._build_expected_value(params, expected_value)
@@ -450,7 +450,7 @@ class SDBConnection(AWSQueryConnection):
:return: True if successful
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'DomainName' : domain_name}
+ params = {'DomainName': domain_name}
self._build_batch_list(params, items, replace)
return self.get_status('BatchPutAttributes', params, verb='POST')
@@ -483,8 +483,8 @@ class SDBConnection(AWSQueryConnection):
:return: An Item with the requested attribute name/values set on it
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'DomainName' : domain_name,
- 'ItemName' : item_name}
+ params = {'DomainName': domain_name,
+ 'ItemName': item_name}
if consistent_read:
params['ConsistentRead'] = 'true'
if attribute_names:
@@ -544,8 +544,8 @@ class SDBConnection(AWSQueryConnection):
:return: True if successful
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'DomainName':domain_name,
- 'ItemName' : item_name}
+ params = {'DomainName': domain_name,
+ 'ItemName': item_name}
if attr_names:
if isinstance(attr_names, list):
self._build_name_list(params, attr_names)
@@ -577,7 +577,7 @@ class SDBConnection(AWSQueryConnection):
:return: True if successful
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'DomainName' : domain_name}
+ params = {'DomainName': domain_name}
self._build_batch_list(params, items, False)
return self.get_status('BatchDeleteAttributes', params, verb='POST')
@@ -605,7 +605,7 @@ class SDBConnection(AWSQueryConnection):
:return: An iterator containing the results.
"""
domain, domain_name = self.get_domain_and_name(domain_or_name)
- params = {'SelectExpression' : query}
+ params = {'SelectExpression': query}
if consistent_read:
params['ConsistentRead'] = 'true'
if next_token:
diff --git a/boto/sdb/db/blob.py b/boto/sdb/db/blob.py
index 145b35d3..6c286ec3 100644
--- a/boto/sdb/db/blob.py
+++ b/boto/sdb/db/blob.py
@@ -1,4 +1,3 @@
-from boto.compat import six
# Copyright (c) 2006,2007,2008 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
@@ -20,6 +19,8 @@ from boto.compat import six
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
+from boto.compat import six
+
class Blob(object):
"""Blob object"""
@@ -38,19 +39,18 @@ class Blob(object):
return f
def __str__(self):
- return unicode(self).encode('utf-8')
+ return six.text_type(self).encode('utf-8')
def __unicode__(self):
if hasattr(self.file, "get_contents_as_string"):
value = self.file.get_contents_as_string()
else:
value = self.file.getvalue()
- if isinstance(value, unicode):
+ if isinstance(value, six.text_type):
return value
else:
return value.decode('utf-8')
-
def read(self):
if hasattr(self.file, "get_contents_as_string"):
return self.file.get_contents_as_string()
diff --git a/boto/sdb/db/key.py b/boto/sdb/db/key.py
index 6ac47a68..42f6bc9b 100644
--- a/boto/sdb/db/key.py
+++ b/boto/sdb/db/key.py
@@ -54,6 +54,6 @@ class Key(object):
def parent(self):
raise NotImplementedError("Key parents are not currently supported")
-
+
def __str__(self):
return self.id_or_name()
diff --git a/boto/sdb/db/manager/sdbmanager.py b/boto/sdb/db/manager/sdbmanager.py
index 97c4a1ee..d964d07a 100644
--- a/boto/sdb/db/manager/sdbmanager.py
+++ b/boto/sdb/db/manager/sdbmanager.py
@@ -28,7 +28,7 @@ from boto.sdb.db.blob import Blob
from boto.sdb.db.property import ListProperty, MapProperty
from datetime import datetime, date, time
from boto.exception import SDBPersistenceError, S3ResponseError
-from boto.compat import map, six
+from boto.compat import map, six, long_type
ISO8601 = '%Y-%m-%dT%H:%M:%SZ'
@@ -59,7 +59,6 @@ class SDBConverter(object):
self.manager = manager
self.type_map = {bool: (self.encode_bool, self.decode_bool),
int: (self.encode_int, self.decode_int),
- long: (self.encode_long, self.decode_long),
float: (self.encode_float, self.decode_float),
self.model_class: (
self.encode_reference, self.decode_reference
@@ -71,6 +70,8 @@ class SDBConverter(object):
Blob: (self.encode_blob, self.decode_blob),
str: (self.encode_string, self.decode_string),
}
+ if six.PY2:
+ self.type_map[long] = (self.encode_long, self.decode_long)
def encode(self, item_type, value):
try:
@@ -194,12 +195,12 @@ class SDBConverter(object):
return int(value)
def encode_long(self, value):
- value = long(value)
+ value = long_type(value)
value += 9223372036854775808
return '%020d' % value
def decode_long(self, value):
- value = long(value)
+ value = long_type(value)
value -= 9223372036854775808
return value
@@ -265,7 +266,7 @@ class SDBConverter(object):
return float(mantissa + 'e' + exponent)
def encode_datetime(self, value):
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
return value
if isinstance(value, datetime):
return value.strftime(ISO8601)
@@ -286,11 +287,11 @@ class SDBConverter(object):
else:
value = value.split("-")
return date(int(value[0]), int(value[1]), int(value[2]))
- except Exception as e:
+ except Exception:
return None
def encode_date(self, value):
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
return value
return value.isoformat()
@@ -323,7 +324,7 @@ class SDBConverter(object):
def encode_reference(self, value):
if value in (None, 'None', '', ' '):
return None
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
return value
else:
return value.id
@@ -336,7 +337,7 @@ class SDBConverter(object):
def encode_blob(self, value):
if not value:
return None
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
return value
if not value.id:
@@ -381,14 +382,14 @@ class SDBConverter(object):
if not isinstance(value, str):
return value
try:
- return unicode(value, 'utf-8')
+ return six.text_type(value, 'utf-8')
except:
# really, this should throw an exception.
# in the interest of not breaking current
# systems, however:
arr = []
for ch in value:
- arr.append(unichr(ord(ch)))
+ arr.append(six.unichr(ord(ch)))
return u"".join(arr)
def decode_string(self, value):
@@ -523,7 +524,7 @@ class SDBManager(object):
query_str = "select * from `%s` %s" % (self.domain.name, self._build_filter_part(query.model_class, query.filters, query.sort_by, query.select))
if query.limit:
query_str += " limit %s" % query.limit
- rs = self.domain.select(query_str, max_items=query.limit, next_token = query.next_token)
+ rs = self.domain.select(query_str, max_items=query.limit, next_token=query.next_token)
query.rs = rs
return self._object_lister(query.model_class, rs)
@@ -582,7 +583,7 @@ class SDBManager(object):
order_by_filtered = True
query_parts.append("(%s)" % select)
- if isinstance(filters, basestring):
+ if isinstance(filters, six.string_types):
query = "WHERE %s AND `__type__` = '%s'" % (filters, cls.__name__)
if order_by in ["__id__", "itemName()"]:
query += " ORDER BY itemName() %s" % order_by_method
diff --git a/boto/sdb/db/manager/xmlmanager.py b/boto/sdb/db/manager/xmlmanager.py
index 94ab72fc..f457347a 100644
--- a/boto/sdb/db/manager/xmlmanager.py
+++ b/boto/sdb/db/manager/xmlmanager.py
@@ -22,6 +22,7 @@ import boto
from boto.utils import find_class, Password
from boto.sdb.db.key import Key
from boto.sdb.db.model import Model
+from boto.compat import six, encodebytes
from datetime import datetime
from xml.dom.minidom import getDOMImplementation, parse, parseString, Node
@@ -43,11 +44,12 @@ class XMLConverter(object):
self.manager = manager
self.type_map = { bool : (self.encode_bool, self.decode_bool),
int : (self.encode_int, self.decode_int),
- long : (self.encode_long, self.decode_long),
Model : (self.encode_reference, self.decode_reference),
Key : (self.encode_reference, self.decode_reference),
Password : (self.encode_password, self.decode_password),
datetime : (self.encode_datetime, self.decode_datetime)}
+ if six.PY2:
+ self.type_map[long] = (self.encode_long, self.decode_long)
def get_text_value(self, parent_node):
value = ''
@@ -145,7 +147,7 @@ class XMLConverter(object):
return None
def encode_reference(self, value):
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
return value
if value is None:
return ''
@@ -201,9 +203,8 @@ class XMLManager(object):
self.enable_ssl = enable_ssl
self.auth_header = None
if self.db_user:
- import base64
- base64string = base64.encodestring('%s:%s' % (self.db_user, self.db_passwd))[:-1]
- authheader = "Basic %s" % base64string
+ base64string = encodebytes('%s:%s' % (self.db_user, self.db_passwd))[:-1]
+ authheader = "Basic %s" % base64string
self.auth_header = authheader
def _connect(self):
@@ -459,14 +460,14 @@ class XMLManager(object):
elif isinstance(value, Node):
prop_node.appendChild(value)
else:
- text_node = doc.createTextNode(unicode(value).encode("ascii", "ignore"))
+ text_node = doc.createTextNode(six.text_type(value).encode("ascii", "ignore"))
prop_node.appendChild(text_node)
obj_node.appendChild(prop_node)
return doc
def unmarshal_object(self, fp, cls=None, id=None):
- if isinstance(fp, basestring):
+ if isinstance(fp, six.string_types):
doc = parseString(fp)
else:
doc = parse(fp)
@@ -477,7 +478,7 @@ class XMLManager(object):
Same as unmarshalling an object, except it returns
from "get_props_from_doc"
"""
- if isinstance(fp, basestring):
+ if isinstance(fp, six.string_types):
doc = parseString(fp)
else:
doc = parse(fp)
@@ -488,7 +489,7 @@ class XMLManager(object):
return self._make_request("DELETE", url)
def set_key_value(self, obj, name, value):
- self.domain.put_attributes(obj.id, {name : value}, replace=True)
+ self.domain.put_attributes(obj.id, {name: value}, replace=True)
def delete_key_value(self, obj, name):
self.domain.delete_attributes(obj.id, name)
@@ -514,4 +515,3 @@ class XMLManager(object):
obj = obj.get_by_id(obj.id)
obj._loaded = True
return obj
-
diff --git a/boto/sdb/db/model.py b/boto/sdb/db/model.py
index c403284e..741ad438 100644
--- a/boto/sdb/db/model.py
+++ b/boto/sdb/db/model.py
@@ -255,9 +255,9 @@ class Model(object):
props = {}
for prop in self.properties(hidden=False):
props[prop.name] = getattr(self, prop.name)
- obj = {'properties' : props,
- 'id' : self.id}
- return {self.__class__.__name__ : obj}
+ obj = {'properties': props,
+ 'id': self.id}
+ return {self.__class__.__name__: obj}
def to_xml(self, doc=None):
xmlmanager = self.get_xmlmanager()
@@ -294,5 +294,3 @@ class Expando(Model):
object.__setattr__(self, name, value)
return value
raise AttributeError
-
-
diff --git a/boto/sdb/db/property.py b/boto/sdb/db/property.py
index b5e60f46..575aa892 100644
--- a/boto/sdb/db/property.py
+++ b/boto/sdb/db/property.py
@@ -27,6 +27,7 @@ import re
import boto
import boto.s3.key
from boto.sdb.db.blob import Blob
+from boto.compat import six, long_type
class Property(object):
@@ -76,7 +77,7 @@ class Property(object):
self.slot_name = '_' + self.name
def default_validator(self, value):
- if isinstance(value, basestring) or value == self.default_value():
+ if isinstance(value, six.string_types) or value == self.default_value():
return
if not isinstance(value, self.data_type):
raise TypeError('Validation Error, %s.%s expecting %s, got %s' % (self.model_class.__name__, self.name, self.data_type, type(value)))
@@ -87,7 +88,7 @@ class Property(object):
def validate(self, value):
if self.required and value is None:
raise ValueError('%s is a required property' % self.name)
- if self.choices and value and not value in self.choices:
+ if self.choices and value and value not in self.choices:
raise ValueError('%s not a valid choice for %s.%s' % (value, self.model_class.__name__, self.name))
if self.validator:
self.validator(value)
@@ -113,7 +114,7 @@ class Property(object):
def validate_string(value):
if value is None:
return
- elif isinstance(value, basestring):
+ elif isinstance(value, six.string_types):
if len(value) > 1024:
raise ValueError('Length of value greater than maxlength')
else:
@@ -144,7 +145,7 @@ class TextProperty(Property):
def validate(self, value):
value = super(TextProperty, self).validate(value)
- if not isinstance(value, basestring):
+ if not isinstance(value, six.string_types):
raise TypeError('Expecting Text, got %s' % type(value))
if self.max_length and len(value) > self.max_length:
raise ValueError('Length of value greater than maxlength %s' % self.max_length)
@@ -335,7 +336,7 @@ class IntegerProperty(Property):
class LongProperty(Property):
- data_type = long
+ data_type = long_type
type_name = 'Long'
def __init__(self, verbose_name=None, name=None, default=0, required=False,
@@ -343,7 +344,7 @@ class LongProperty(Property):
super(LongProperty, self).__init__(verbose_name, name, default, required, validator, choices, unique)
def validate(self, value):
- value = long(value)
+ value = long_type(value)
value = super(LongProperty, self).validate(value)
min = -9223372036854775808
max = 9223372036854775807
@@ -493,7 +494,7 @@ class ReferenceProperty(Property):
# If the value is still the UUID for the referenced object, we need to create
# the object now that is the attribute has actually been accessed. This lazy
# instantiation saves unnecessary roundtrips to SimpleDB
- if isinstance(value, basestring):
+ if isinstance(value, six.string_types):
value = self.reference_class(value)
setattr(obj, self.name, value)
return value
@@ -537,7 +538,7 @@ class ReferenceProperty(Property):
raise ValueError('%s is a required property' % self.name)
if value == self.default_value():
return
- if not isinstance(value, basestring):
+ if not isinstance(value, six.string_types):
self.check_instance(value)
@@ -626,16 +627,16 @@ class ListProperty(Property):
if not isinstance(value, list):
value = [value]
- if self.item_type in (int, long):
- item_type = (int, long)
- elif self.item_type in (str, unicode):
- item_type = (str, unicode)
+ if self.item_type in six.integer_types:
+ item_type = six.integer_types
+ elif self.item_type in six.string_types:
+ item_type = six.string_types
else:
item_type = self.item_type
for item in value:
if not isinstance(item, item_type):
- if item_type == (int, long):
+ if item_type == six.integer_types:
raise ValueError('Items in the %s list must all be integers.' % self.name)
else:
raise ValueError('Items in the %s list must all be %s instances' %
@@ -650,10 +651,10 @@ class ListProperty(Property):
def __set__(self, obj, value):
"""Override the set method to allow them to set the property to an instance of the item_type instead of requiring a list to be passed in"""
- if self.item_type in (int, long):
- item_type = (int, long)
- elif self.item_type in (str, unicode):
- item_type = (str, unicode)
+ if self.item_type in six.integer_types:
+ item_type = six.integer_types
+ elif self.item_type in six.string_types:
+ item_type = six.string_types
else:
item_type = self.item_type
if isinstance(value, item_type):
@@ -680,16 +681,16 @@ class MapProperty(Property):
if not isinstance(value, dict):
raise ValueError('Value must of type dict')
- if self.item_type in (int, long):
- item_type = (int, long)
- elif self.item_type in (str, unicode):
- item_type = (str, unicode)
+ if self.item_type in six.integer_types:
+ item_type = six.integer_types
+ elif self.item_type in six.string_types:
+ item_type = six.string_types
else:
item_type = self.item_type
for key in value:
if not isinstance(value[key], item_type):
- if item_type == (int, long):
+ if item_type == six.integer_types:
raise ValueError('Values in the %s Map must all be integers.' % self.name)
else:
raise ValueError('Values in the %s Map must all be %s instances' %
diff --git a/boto/sdb/db/query.py b/boto/sdb/db/query.py
index 69a29970..8945d4c0 100644
--- a/boto/sdb/db/query.py
+++ b/boto/sdb/db/query.py
@@ -64,7 +64,7 @@ class Query(object):
def order(self, key):
self.sort_by = key
return self
-
+
def to_xml(self, doc=None):
if not doc:
xmlmanager = self.model_class.get_xmlmanager()
diff --git a/boto/sdb/db/sequence.py b/boto/sdb/db/sequence.py
index 6b1e23fa..a2879893 100644
--- a/boto/sdb/db/sequence.py
+++ b/boto/sdb/db/sequence.py
@@ -20,6 +20,7 @@
# IN THE SOFTWARE.
from boto.exception import SDBResponseError
+from boto.compat import six
class SequenceGenerator(object):
"""Generic Sequence Generator object, this takes a single
@@ -71,8 +72,7 @@ class SequenceGenerator(object):
def _inc(self, val):
"""Increment a single value"""
assert(len(val) == self.sequence_length)
- return self.sequence_string[(self.sequence_string.index(val)+1) % len(self.sequence_string)]
-
+ return self.sequence_string[(self.sequence_string.index(val) + 1) % len(self.sequence_string)]
#
@@ -100,19 +100,17 @@ def fib(cv=1, lv=0):
increment_string = SequenceGenerator("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
-
class Sequence(object):
"""A simple Sequence using the new SDB "Consistent" features
Based largly off of the "Counter" example from mitch garnaat:
http://bitbucket.org/mitch/stupidbototricks/src/tip/counter.py"""
-
def __init__(self, id=None, domain_name=None, fnc=increment_by_one, init_val=None):
- """Create a new Sequence, using an optional function to
+ """Create a new Sequence, using an optional function to
increment to the next number, by default we just increment by one.
Every parameter here is optional, if you don't specify any options
then you'll get a new SequenceGenerator with a random ID stored in the
- default domain that increments by one and uses the default botoweb
+ default domain that increments by one and uses the default botoweb
environment
:param id: Optional ID (name) for this counter
@@ -127,7 +125,7 @@ class Sequence(object):
Your function must accept "None" to get the initial value
:type fnc: function, str
- :param init_val: Initial value, by default this is the first element in your sequence,
+ :param init_val: Initial value, by default this is the first element in your sequence,
but you can pass in any value, even a string if you pass in a function that uses
strings instead of ints to increment
"""
@@ -146,7 +144,7 @@ class Sequence(object):
self.item_type = type(fnc(None))
self.timestamp = None
# Allow us to pass in a full name to a function
- if isinstance(fnc, basestring):
+ if isinstance(fnc, six.string_types):
from boto.utils import find_class
fnc = find_class(fnc)
self.fnc = fnc
diff --git a/boto/sdb/db/test_db.py b/boto/sdb/db/test_db.py
index b582bcee..ba2fb3cd 100644
--- a/boto/sdb/db/test_db.py
+++ b/boto/sdb/db/test_db.py
@@ -109,7 +109,7 @@ def test_float():
assert tt.name == t.name
assert tt.value == t.value
return t
-
+
def test_required():
global _objects
t = TestRequired()
diff --git a/boto/sdb/domain.py b/boto/sdb/domain.py
index 36ab4c55..faed8133 100644
--- a/boto/sdb/domain.py
+++ b/boto/sdb/domain.py
@@ -25,6 +25,7 @@ Represents an SDB Domain
"""
from boto.sdb.queryresultset import SelectResultSet
+from boto.compat import six
class Domain(object):
@@ -253,10 +254,10 @@ class Domain(object):
values = [values]
for value in values:
print('\t\t\t<value><![CDATA[', end=' ', file=f)
- if isinstance(value, unicode):
+ if isinstance(value, six.text_type):
value = value.encode('utf-8', 'replace')
else:
- value = unicode(value, errors='replace').encode('utf-8', 'replace')
+ value = six.text_type(value, errors='replace').encode('utf-8', 'replace')
f.write(value)
print(']]></value>', file=f)
print('\t\t</attribute>', file=f)
diff --git a/boto/sdb/item.py b/boto/sdb/item.py
index a742d80c..e09a9d9a 100644
--- a/boto/sdb/item.py
+++ b/boto/sdb/item.py
@@ -27,10 +27,10 @@ class Item(dict):
SimpleDB item. An item in SDB is similar to a row in a relational
database. Items belong to a :py:class:`Domain <boto.sdb.domain.Domain>`,
which is similar to a table in a relational database.
-
+
The keys on instances of this object correspond to attributes that are
- stored on the SDB item.
-
+ stored on the SDB item.
+
.. tip:: While it is possible to instantiate this class directly, you may
want to use the convenience methods on :py:class:`boto.sdb.domain.Domain`
for that purpose. For example, :py:meth:`boto.sdb.domain.Domain.get_item`.
@@ -39,9 +39,9 @@ class Item(dict):
"""
:type domain: :py:class:`boto.sdb.domain.Domain`
:param domain: The domain that this item belongs to.
-
+
:param str name: The name of this item. This name will be used when
- querying for items using methods like
+ querying for items using methods like
:py:meth:`boto.sdb.domain.Domain.get_item`
"""
dict.__init__(self)
@@ -102,8 +102,8 @@ class Item(dict):
def load(self):
"""
Loads or re-loads this item's attributes from SDB.
-
- .. warning::
+
+ .. warning::
If you have changed attribute values on an Item instance,
this method will over-write the values if they are different in
SDB. For any local attributes that don't yet exist in SDB,
@@ -114,7 +114,7 @@ class Item(dict):
def save(self, replace=True):
"""
Saves this item to SDB.
-
+
:param bool replace: If ``True``, delete any attributes on the remote
SDB item that have a ``None`` value on this object.
"""
@@ -134,11 +134,11 @@ class Item(dict):
attribute that has yet to be set, it will simply create an attribute
named ``key`` with your given ``value`` as its value. If you are
adding a value to an existing attribute, this method will convert the
- attribute to a list (if it isn't already) and append your new value
+ attribute to a list (if it isn't already) and append your new value
to said list.
-
+
For clarification, consider the following interactive session:
-
+
.. code-block:: python
>>> item = some_domain.get_item('some_item')
@@ -150,9 +150,9 @@ class Item(dict):
>>> item.add_value('some_attr', 2)
>>> item['some_attr']
[1, 2]
-
+
:param str key: The attribute to add a value to.
- :param object value: The value to set or append to the attribute.
+ :param object value: The value to set or append to the attribute.
"""
if key in self:
# We already have this key on the item.
@@ -170,12 +170,8 @@ class Item(dict):
def delete(self):
"""
Deletes this item in SDB.
-
+
.. note:: This local Python object remains in its current state
after deletion, this only deletes the remote item in SDB.
"""
self.domain.delete_item(self)
-
-
-
-
diff --git a/boto/services/bs.py b/boto/services/bs.py
index e6cc21cc..396c4839 100755
--- a/boto/services/bs.py
+++ b/boto/services/bs.py
@@ -24,7 +24,8 @@ from boto.services.servicedef import ServiceDef
from boto.services.submit import Submitter
from boto.services.result import ResultProcessor
import boto
-import sys, os, StringIO
+import sys, os
+from boto.compat import StringIO
class BS(object):
@@ -111,7 +112,7 @@ class BS(object):
self.sd.add_section('Credentials')
self.sd.set('Credentials', 'aws_access_key_id', ec2.aws_access_key_id)
self.sd.set('Credentials', 'aws_secret_access_key', ec2.aws_secret_access_key)
- s = StringIO.StringIO()
+ s = StringIO()
self.sd.write(s)
rs = ec2.get_all_images([ami_id])
img = rs[0]
diff --git a/boto/ses/connection.py b/boto/ses/connection.py
index 4525a7f2..ed69ad29 100644
--- a/boto/ses/connection.py
+++ b/boto/ses/connection.py
@@ -533,7 +533,7 @@ class SESConnection(AWSAuthConnection):
:type notification_type: string
:param notification_type: The type of feedback notifications that will
be published to the specified topic.
- Valid Values: Bounce | Complaint
+ Valid Values: Bounce | Complaint | Delivery
:type sns_topic: string or None
:param sns_topic: The Amazon Resource Name (ARN) of the Amazon Simple
diff --git a/boto/sns/connection.py b/boto/sns/connection.py
index 986d5948..5a6da205 100644
--- a/boto/sns/connection.py
+++ b/boto/sns/connection.py
@@ -214,7 +214,7 @@ class SNSConnection(AWSQueryConnection):
return self._make_request('DeleteTopic', params, '/', 'GET')
def publish(self, topic=None, message=None, subject=None, target_arn=None,
- message_structure=None):
+ message_structure=None, message_attributes=None):
"""
Get properties of a Topic
@@ -233,6 +233,23 @@ class SNSConnection(AWSQueryConnection):
matches the structure described at
http://docs.aws.amazon.com/sns/latest/dg/PublishTopic.html#sns-message-formatting-by-protocol
+ :type message_attributes: dict
+ :param message_attributes: Message attributes to set. Should be
+ of the form:
+
+ .. code-block:: python
+
+ {
+ "name1": {
+ "data_type": "Number",
+ "string_value": "42"
+ },
+ "name2": {
+ "data_type": "String",
+ "string_value": "Bob"
+ }
+ }
+
:type subject: string
:param subject: Optional parameter to be used as the "Subject"
line of the email notifications.
@@ -256,6 +273,20 @@ class SNSConnection(AWSQueryConnection):
params['TargetArn'] = target_arn
if message_structure is not None:
params['MessageStructure'] = message_structure
+ if message_attributes is not None:
+ keys = sorted(message_attributes.keys())
+ for i, name in enumerate(keys, start=1):
+ attribute = message_attributes[name]
+ params['MessageAttributes.entry.{0}.Name'.format(i)] = name
+ if 'data_type' in attribute:
+ params['MessageAttributes.entry.{0}.Value.DataType'.format(i)] = \
+ attribute['data_type']
+ if 'string_value' in attribute:
+ params['MessageAttributes.entry.{0}.Value.StringValue'.format(i)] = \
+ attribute['string_value']
+ if 'binary_value' in attribute:
+ params['MessageAttributes.entry.{0}.Value.BinaryValue'.format(i)] = \
+ attribute['binary_value']
return self._make_request('Publish', params, '/', 'POST')
def subscribe(self, topic, protocol, endpoint):
@@ -313,7 +344,7 @@ class SNSConnection(AWSQueryConnection):
"""
t = queue.id.split('/')
q_arn = queue.arn
- sid = hashlib.md5(topic + q_arn).hexdigest()
+ sid = hashlib.md5((topic + q_arn).encode('utf-8')).hexdigest()
sid_exists = False
resp = self.subscribe(topic, 'sqs', q_arn)
attr = queue.get_attributes('Policy')
@@ -724,7 +755,7 @@ class SNSConnection(AWSQueryConnection):
params['ContentType'] = 'JSON'
response = self.make_request(action=action, verb=verb,
path=path, params=params)
- body = response.read()
+ body = response.read().decode('utf-8')
boto.log.debug(body)
if response.status == 200:
return json.loads(body)
diff --git a/boto/sqs/queue.py b/boto/sqs/queue.py
index 88d64c4f..6bcae489 100644
--- a/boto/sqs/queue.py
+++ b/boto/sqs/queue.py
@@ -47,7 +47,7 @@ class Queue(object):
def _name(self):
if self.url:
- val = urlparse.urlparse(self.url)[2].split('/')[2]
+ val = urllib.parse.urlparse(self.url)[2].split('/')[2]
else:
val = self.url
return val
diff --git a/boto/sts/credentials.py b/boto/sts/credentials.py
index 67f4b926..7ab63194 100644
--- a/boto/sts/credentials.py
+++ b/boto/sts/credentials.py
@@ -113,7 +113,7 @@ class Credentials(object):
the credentials contained in the file, the permissions
of the file will be set to readable/writable by owner only.
"""
- fp = open(file_path, 'wb')
+ fp = open(file_path, 'w')
json.dump(self.to_dict(), fp)
fp.close()
os.chmod(file_path, 0o600)
diff --git a/boto/support/layer1.py b/boto/support/layer1.py
index c4e18da0..b1c3ea01 100644
--- a/boto/support/layer1.py
+++ b/boto/support/layer1.py
@@ -20,16 +20,12 @@
# IN THE SOFTWARE.
#
-try:
- import json
-except ImportError:
- import simplejson as json
-
import boto
from boto.connection import AWSQueryConnection
from boto.regioninfo import RegionInfo
from boto.exception import JSONResponseError
from boto.support import exceptions
+from boto.compat import json
class SupportConnection(AWSQueryConnection):
@@ -577,7 +573,7 @@ class SupportConnection(AWSQueryConnection):
headers=headers, data=body)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
@@ -588,4 +584,3 @@ class SupportConnection(AWSQueryConnection):
exception_class = self._faults.get(fault_name, self.ResponseError)
raise exception_class(response.status, response.reason,
body=json_body)
-
diff --git a/boto/swf/layer1.py b/boto/swf/layer1.py
index 73a809b2..bba16ad2 100644
--- a/boto/swf/layer1.py
+++ b/boto/swf/layer1.py
@@ -96,7 +96,7 @@ class Layer1(AWSAuthConnection):
:type data: dict
:param data: Specifies request parameters with default values to be removed.
"""
- for item in data.keys():
+ for item in list(data.keys()):
if isinstance(data[item], dict):
cls._normalize_request_dict(data[item])
if data[item] in (None, {}):
@@ -130,7 +130,7 @@ class Layer1(AWSAuthConnection):
{}, headers, body, None)
response = self._mexe(http_request, sender=None,
override_num_retries=10)
- response_body = response.read()
+ response_body = response.read().decode('utf-8')
boto.log.debug(response_body)
if response.status == 200:
if response_body:
diff --git a/boto/swf/layer1_decisions.py b/boto/swf/layer1_decisions.py
index 7649da17..3f5f74af 100644
--- a/boto/swf/layer1_decisions.py
+++ b/boto/swf/layer1_decisions.py
@@ -167,7 +167,7 @@ class Layer1Decisions(object):
if task_list is not None:
attrs['taskList'] = {'name': task_list}
if start_to_close_timeout is not None:
- attrs['startToCloseTimeout'] = start_to_close_timeout
+ attrs['taskStartToCloseTimeout'] = start_to_close_timeout
if workflow_type_version is not None:
attrs['workflowTypeVersion'] = workflow_type_version
self._data.append(o)
diff --git a/boto/utils.py b/boto/utils.py
index 2e0603f4..4fbce49c 100644
--- a/boto/utils.py
+++ b/boto/utils.py
@@ -57,25 +57,14 @@ import email.mime.text
import email.utils
import email.encoders
import gzip
-import base64
import threading
import locale
-from boto.compat import six, StringIO, urllib
+from boto.compat import six, StringIO, urllib, encodebytes
from contextlib import contextmanager
-try:
- from hashlib import md5
-except ImportError:
- from md5 import md5
-
-
-try:
- import hashlib
- _hashfn = hashlib.sha512
-except ImportError:
- import md5
- _hashfn = md5.md5
+from hashlib import md5, sha512
+_hashfn = sha512
from boto.compat import json
@@ -226,11 +215,7 @@ def retry_url(url, retry_on_404=True, num_retries=10):
result = r.read()
return result
except urllib.error.HTTPError as e:
- # in 2.6 you use getcode(), in 2.5 and earlier you use code
- if hasattr(e, 'getcode'):
- code = e.getcode()
- else:
- code = e.code
+ code = e.getcode()
if code == 404 and not retry_on_404:
return ''
except Exception as e:
@@ -1033,7 +1018,7 @@ def compute_hash(fp, buf_size=8192, size=None, hash_algorithm=md5):
else:
s = fp.read(buf_size)
hex_digest = hash_obj.hexdigest()
- base64_digest = base64.encodestring(hash_obj.digest()).decode('utf-8')
+ base64_digest = encodebytes(hash_obj.digest()).decode('utf-8')
if base64_digest[-1] == '\n':
base64_digest = base64_digest[0:-1]
# data_size based on bytes read.
diff --git a/boto/vpc/__init__.py b/boto/vpc/__init__.py
index 4025d667..ee9558c3 100644
--- a/boto/vpc/__init__.py
+++ b/boto/vpc/__init__.py
@@ -34,6 +34,7 @@ from boto.vpc.vpngateway import VpnGateway, Attachment
from boto.vpc.dhcpoptions import DhcpOptions
from boto.vpc.subnet import Subnet
from boto.vpc.vpnconnection import VpnConnection
+from boto.vpc.vpc_peering_connection import VpcPeeringConnection
from boto.ec2 import RegionData
from boto.regioninfo import RegionInfo, get_regions
@@ -85,9 +86,9 @@ class VPCConnection(EC2Connection):
:type vpc_ids: list
:param vpc_ids: A list of strings with the desired VPC ID's
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
Possible filter keys are:
* *state* - a list of states of the VPC (pending or available)
@@ -104,7 +105,7 @@ class VPCConnection(EC2Connection):
if vpc_ids:
self.build_list_params(params, vpc_ids, 'VpcId')
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
return self.get_list('DescribeVpcs', params, [('item', VPC)])
@@ -204,9 +205,9 @@ class VPCConnection(EC2Connection):
:param route_table_ids: A list of strings with the desired route table
IDs.
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
:type dry_run: bool
:param dry_run: Set to True if the operation should not actually run.
@@ -218,7 +219,7 @@ class VPCConnection(EC2Connection):
if route_table_ids:
self.build_list_params(params, route_table_ids, "RouteTableId")
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
return self.get_list('DescribeRouteTables', params,
@@ -516,9 +517,9 @@ class VPCConnection(EC2Connection):
:param network_acl_ids: A list of strings with the desired network ACL
IDs.
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
:rtype: list
:return: A list of :class:`boto.vpc.networkacl.NetworkAcl`
@@ -527,7 +528,7 @@ class VPCConnection(EC2Connection):
if network_acl_ids:
self.build_list_params(params, network_acl_ids, "NetworkAclId")
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
return self.get_list('DescribeNetworkAcls', params,
[('item', NetworkAcl)])
@@ -779,9 +780,9 @@ class VPCConnection(EC2Connection):
:type internet_gateway_ids: list
:param internet_gateway_ids: A list of strings with the desired gateway IDs.
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
:type dry_run: bool
:param dry_run: Set to True if the operation should not actually run.
@@ -793,7 +794,7 @@ class VPCConnection(EC2Connection):
self.build_list_params(params, internet_gateway_ids,
'InternetGatewayId')
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
return self.get_list('DescribeInternetGateways', params,
@@ -896,9 +897,9 @@ class VPCConnection(EC2Connection):
:param customer_gateway_ids: A list of strings with the desired
CustomerGateway ID's.
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
Possible filter keys are:
- *state*, the state of the CustomerGateway
@@ -918,7 +919,7 @@ class VPCConnection(EC2Connection):
self.build_list_params(params, customer_gateway_ids,
'CustomerGatewayId')
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
@@ -985,9 +986,9 @@ class VPCConnection(EC2Connection):
:type vpn_gateway_ids: list
:param vpn_gateway_ids: A list of strings with the desired VpnGateway ID's
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
Possible filter keys are:
- *state*, a list of states of the VpnGateway
@@ -1006,7 +1007,7 @@ class VPCConnection(EC2Connection):
if vpn_gateway_ids:
self.build_list_params(params, vpn_gateway_ids, 'VpnGatewayId')
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
return self.get_list('DescribeVpnGateways', params,
@@ -1109,9 +1110,9 @@ class VPCConnection(EC2Connection):
:type subnet_ids: list
:param subnet_ids: A list of strings with the desired Subnet ID's
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
Possible filter keys are:
- *state*, a list of states of the Subnet
@@ -1132,7 +1133,7 @@ class VPCConnection(EC2Connection):
if subnet_ids:
self.build_list_params(params, subnet_ids, 'SubnetId')
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
return self.get_list('DescribeSubnets', params, [('item', Subnet)])
@@ -1192,9 +1193,9 @@ class VPCConnection(EC2Connection):
:type dhcp_options_ids: list
:param dhcp_options_ids: A list of strings with the desired DhcpOption ID's
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
:type dry_run: bool
:param dry_run: Set to True if the operation should not actually run.
@@ -1206,7 +1207,7 @@ class VPCConnection(EC2Connection):
if dhcp_options_ids:
self.build_list_params(params, dhcp_options_ids, 'DhcpOptionsId')
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
return self.get_list('DescribeDhcpOptions', params,
@@ -1339,9 +1340,9 @@ class VPCConnection(EC2Connection):
:type vpn_connection_ids: list
:param vpn_connection_ids: A list of strings with the desired VPN_CONNECTION ID's
- :type filters: list of tuples
- :param filters: A list of tuples containing filters. Each tuple
- consists of a filter key and a filter value.
+ :type filters: list of tuples or dict
+ :param filters: A list of tuples or dict containing filters. Each tuple
+ or dict item consists of a filter key and a filter value.
Possible filter keys are:
- *state*, a list of states of the VPN_CONNECTION
@@ -1363,7 +1364,7 @@ class VPCConnection(EC2Connection):
self.build_list_params(params, vpn_connection_ids,
'VpnConnectionId')
if filters:
- self.build_filter_params(params, dict(filters))
+ self.build_filter_params(params, filters)
if dry_run:
params['DryRun'] = 'true'
return self.get_list('DescribeVpnConnections', params,
@@ -1533,3 +1534,140 @@ class VPCConnection(EC2Connection):
if dry_run:
params['DryRun'] = 'true'
return self.get_status('DeleteVpnConnectionRoute', params)
+
+ def get_all_vpc_peering_connections(self, vpc_peering_connection_ids=None,
+ filters=None, dry_run=False):
+ """
+ Retrieve information about your VPC peering connections. You
+ can filter results to return information only about those VPC
+ peering connections that match your search parameters.
+ Otherwise, all VPC peering connections associated with your
+ account are returned.
+
+ :type vpc_peering_connection_ids: list
+ :param vpc_peering_connection_ids: A list of strings with the desired VPC
+ peering connection ID's
+
+ :type filters: list of tuples
+ :param filters: A list of tuples containing filters. Each tuple
+ consists of a filter key and a filter value.
+ Possible filter keys are:
+
+ * *accepter-vpc-info.cidr-block* - The CIDR block of the peer VPC.
+ * *accepter-vpc-info.owner-id* - The AWS account ID of the owner
+ of the peer VPC.
+ * *accepter-vpc-info.vpc-id* - The ID of the peer VPC.
+ * *expiration-time* - The expiration date and time for the VPC
+ peering connection.
+ * *requester-vpc-info.cidr-block* - The CIDR block of the
+ requester's VPC.
+ * *requester-vpc-info.owner-id* - The AWS account ID of the
+ owner of the requester VPC.
+ * *requester-vpc-info.vpc-id* - The ID of the requester VPC.
+ * *status-code* - The status of the VPC peering connection.
+ * *status-message* - A message that provides more information
+ about the status of the VPC peering connection, if applicable.
+
+ :type dry_run: bool
+ :param dry_run: Set to True if the operation should not actually run.
+
+ :rtype: list
+ :return: A list of :class:`boto.vpc.vpc.VPC`
+ """
+ params = {}
+ if vpc_peering_connection_ids:
+ self.build_list_params(params, vpc_peering_connection_ids, 'VpcPeeringConnectionId')
+ if filters:
+ self.build_filter_params(params, dict(filters))
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_list('DescribeVpcPeeringConnections', params, [('item', VpcPeeringConnection)])
+
+ def create_vpc_peering_connection(self, vpc_id, peer_vpc_id,
+ peer_owner_id=None, dry_run=False):
+ """
+ Create a new VPN Peering connection.
+
+ :type vpc_id: str
+ :param vpc_id: The ID of the requester VPC.
+
+ :type peer_vpc_id: str
+ :param vpc_peer_id: The ID of the VPC with which you are creating the peering connection.
+
+ :type peer_owner_id: str
+ :param peer_owner_id: The AWS account ID of the owner of the peer VPC.
+
+ :rtype: The newly created VpcPeeringConnection
+ :return: A :class:`boto.vpc.vpc_peering_connection.VpcPeeringConnection` object
+ """
+ params = {'VpcId': vpc_id,
+ 'PeerVpcId': peer_vpc_id }
+ if peer_owner_id is not None:
+ params['PeerOwnerId'] = peer_owner_id
+ if dry_run:
+ params['DryRun'] = 'true'
+
+ return self.get_object('CreateVpcPeeringConnection', params,
+ VpcPeeringConnection)
+
+ def delete_vpc_peering_connection(self, vpc_peering_connection_id, dry_run=False):
+ """
+ Deletes a VPC peering connection. Either the owner of the requester
+ VPC or the owner of the peer VPC can delete the VPC peering connection
+ if it's in the active state. The owner of the requester VPC can delete
+ a VPC peering connection in the pending-acceptance state.
+
+ :type vpc_peering_connection_id: str
+ :param vpc_peering_connection_id: The ID of the VPC peering connection.
+
+ :rtype: bool
+ :return: True if successful
+ """
+ params = {
+ 'VpcPeeringConnectionId': vpc_peering_connection_id
+ }
+
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_status('DeleteVpcPeeringConnection', params)
+
+ def reject_vpc_peering_connection(self, vpc_peering_connection_id, dry_run=False):
+ """
+ Rejects a VPC peering connection request. The VPC peering connection
+ must be in the pending-acceptance state.
+
+ :type vpc_peering_connection_id: str
+ :param vpc_peering_connection_id: The ID of the VPC peering connection.
+
+ :rtype: bool
+ :return: True if successful
+ """
+ params = {
+ 'VpcPeeringConnectionId': vpc_peering_connection_id
+ }
+
+ if dry_run:
+ params['DryRun'] = 'true'
+ return self.get_status('RejectVpcPeeringConnection', params)
+
+ def accept_vpc_peering_connection(self, vpc_peering_connection_id, dry_run=False):
+ """
+ Acceptss a VPC peering connection request. The VPC peering connection
+ must be in the pending-acceptance state.
+
+ :type vpc_peering_connection_id: str
+ :param vpc_peering_connection_id: The ID of the VPC peering connection.
+
+ :rtype: Accepted VpcPeeringConnection
+ :return: A :class:`boto.vpc.vpc_peering_connection.VpcPeeringConnection` object
+ """
+ params = {
+ 'VpcPeeringConnectionId': vpc_peering_connection_id
+ }
+
+ if dry_run:
+ params['DryRun'] = 'true'
+
+ return self.get_object('AcceptVpcPeeringConnection', params,
+ VpcPeeringConnection)
+
diff --git a/boto/vpc/vpc_peering_connection.py b/boto/vpc/vpc_peering_connection.py
new file mode 100644
index 00000000..cdb9af8d
--- /dev/null
+++ b/boto/vpc/vpc_peering_connection.py
@@ -0,0 +1,163 @@
+# Copyright (c) 2014 Skytap http://skytap.com/
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+"""
+Represents a VPC Peering Connection.
+"""
+
+from boto.ec2.ec2object import TaggedEC2Object
+
+class VpcInfo(object):
+ def __init__(self):
+ """
+ Information on peer Vpc.
+
+ :ivar id: The unique ID of peer Vpc.
+ :ivar owner_id: Owner of peer Vpc.
+ :ivar cidr_block: CIDR Block of peer Vpc.
+ """
+
+ self.vpc_id = None
+ self.owner_id = None
+ self.cidr_block = None
+
+ def __repr__(self):
+ return 'VpcInfo:%s' % self.vpc_id
+
+ def startElement(self, name, attrs, connection):
+ pass
+
+ def endElement(self, name, value, connection):
+ if name == 'vpcId':
+ self.vpc_id = value
+ elif name == 'ownerId':
+ self.owner_id = value
+ elif name == 'cidrBlock':
+ self.cidr_block = value
+ else:
+ setattr(self, name, value)
+
+class VpcPeeringConnectionStatus(object):
+ """
+ The status of VPC peering connection.
+
+ :ivar code: The status of the VPC peering connection. Valid values are:
+
+ * pending-acceptance
+ * failed
+ * expired
+ * provisioning
+ * active
+ * deleted
+ * rejected
+
+ :ivar message: A message that provides more information about the status of the VPC peering connection, if applicable.
+ """
+ def __init__(self, code=0, message=None):
+ self.code = code
+ self.message = message
+
+ def __repr__(self):
+ return '%s(%d)' % (self.code, self.message)
+
+ def startElement(self, name, attrs, connection):
+ pass
+
+ def endElement(self, name, value, connection):
+ if name == 'code':
+ self.code = value
+ elif name == 'message':
+ self.message = value
+ else:
+ setattr(self, name, value)
+
+
+
+class VpcPeeringConnection(TaggedEC2Object):
+
+ def __init__(self, connection=None):
+ """
+ Represents a VPC peering connection.
+
+ :ivar id: The unique ID of the VPC peering connection.
+ :ivar accepter_vpc_info: Information on peer Vpc.
+ :ivar requester_vpc_info: Information on requester Vpc.
+ :ivar expiration_time: The expiration date and time for the VPC peering connection.
+ :ivar status_code: The status of the VPC peering connection.
+ :ivar status_message: A message that provides more information about the status of the VPC peering connection, if applicable.
+ """
+ super(VpcPeeringConnection, self).__init__(connection)
+ self.id = None
+ self.accepter_vpc_info = VpcInfo()
+ self.requester_vpc_info = VpcInfo()
+ self.expiration_time = None
+ self._status = VpcPeeringConnectionStatus()
+
+ @property
+ def status_code(self):
+ return self._status.code
+
+ @property
+ def status_message(self):
+ return self._status.message
+
+ def __repr__(self):
+ return 'VpcPeeringConnection:%s' % self.id
+
+ def startElement(self, name, attrs, connection):
+ retval = super(VpcPeeringConnection, self).startElement(name, attrs, connection)
+ if retval is not None:
+ return retval
+
+ if name == 'requesterVpcInfo':
+ return self.requester_vpc_info
+ elif name == 'accepterVpcInfo':
+ return self.accepter_vpc_info
+ elif name == 'status':
+ return self._status
+
+ return None
+
+ def endElement(self, name, value, connection):
+ if name == 'vpcPeeringConnectionId':
+ self.id = value
+ elif name == 'expirationTime':
+ self.expiration_time = value
+ else:
+ setattr(self, name, value)
+
+ def delete(self):
+ return self.connection.delete_vpc_peering_connection(self.id)
+
+ def _update(self, updated):
+ self.__dict__.update(updated.__dict__)
+
+ def update(self, validate=False, dry_run=False):
+ vpc_peering_connection_list = self.connection.get_all_vpc_peering_connections(
+ [self.id],
+ dry_run=dry_run
+ )
+ if len(vpc_peering_connection_list):
+ updated_vpc_peering_connection = vpc_peering_connection_list[0]
+ self._update(updated_vpc_peering_connection)
+ elif validate:
+ raise ValueError('%s is not a valid VpcPeeringConnection ID' % (self.id,))
+ return self.status_code