summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--README.rst5
-rw-r--r--boto/__init__.py24
-rw-r--r--boto/endpoints.json3
-rw-r--r--boto/logs/__init__.py41
-rw-r--r--boto/logs/exceptions.py59
-rw-r--r--boto/logs/layer1.py582
-rw-r--r--docs/source/index.rst1
-rw-r--r--docs/source/ref/logs.rst26
-rw-r--r--docs/source/releasenotes/v2.31.0.rst11
-rw-r--r--setup.py2
-rw-r--r--tests/integration/logs/__init__.py0
-rw-r--r--tests/integration/logs/test_cert_verification.py37
-rw-r--r--tests/integration/logs/test_layer1.py43
-rw-r--r--tests/unit/logs/__init__.py0
-rw-r--r--tests/unit/logs/test_layer1.py22
15 files changed, 852 insertions, 4 deletions
diff --git a/README.rst b/README.rst
index d8fbc045..41e16361 100644
--- a/README.rst
+++ b/README.rst
@@ -1,9 +1,9 @@
####
boto
####
-boto 2.30.0
+boto 2.31.0
-Released: 1-Jul-2014
+Released: 10-Jul-2014
.. image:: https://travis-ci.org/boto/boto.png?branch=develop
:target: https://travis-ci.org/boto/boto
@@ -61,6 +61,7 @@ At the moment, boto supports:
* Monitoring
* Amazon CloudWatch (EC2 Only)
+ * Amazon CloudWatch Logs
* Networking
diff --git a/boto/__init__.py b/boto/__init__.py
index 1505bc41..bd16911e 100644
--- a/boto/__init__.py
+++ b/boto/__init__.py
@@ -37,7 +37,7 @@ import logging.config
import urlparse
from boto.exception import InvalidUriError
-__version__ = '2.30.0'
+__version__ = '2.31.0'
Version = __version__ # for backware compatibility
# http://bugs.python.org/issue7980
@@ -835,6 +835,28 @@ def connect_kinesis(aws_access_key_id=None,
**kwargs
)
+def connect_logs(aws_access_key_id=None,
+ aws_secret_access_key=None,
+ **kwargs):
+ """
+ Connect to Amazon CloudWatch Logs
+
+ :type aws_access_key_id: string
+ :param aws_access_key_id: Your AWS Access Key ID
+
+ :type aws_secret_access_key: string
+ :param aws_secret_access_key: Your AWS Secret Access Key
+
+ rtype: :class:`boto.kinesis.layer1.CloudWatchLogsConnection`
+ :return: A connection to the Amazon CloudWatch Logs service
+ """
+ from boto.logs.layer1 import CloudWatchLogsConnection
+ return CloudWatchLogsConnection(
+ aws_access_key_id=aws_access_key_id,
+ aws_secret_access_key=aws_secret_access_key,
+ **kwargs
+ )
+
def storage_uri(uri_str, default_scheme='file', debug=0, validate=True,
bucket_storage_uri_class=BucketStorageUri,
suppress_consec_slashes=True, is_latest=False):
diff --git a/boto/endpoints.json b/boto/endpoints.json
index 27b50762..f139ff35 100644
--- a/boto/endpoints.json
+++ b/boto/endpoints.json
@@ -192,6 +192,9 @@
"us-west-2": "kinesis.us-west-2.amazonaws.com",
"eu-west-1": "kinesis.eu-west-1.amazonaws.com"
},
+ "logs": {
+ "us-east-1": "logs.us-east-1.amazonaws.com"
+ },
"opsworks": {
"us-east-1": "opsworks.us-east-1.amazonaws.com"
},
diff --git a/boto/logs/__init__.py b/boto/logs/__init__.py
new file mode 100644
index 00000000..dab84b56
--- /dev/null
+++ b/boto/logs/__init__.py
@@ -0,0 +1,41 @@
+# Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
+# All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+from boto.regioninfo import get_regions
+
+
+def regions():
+ """
+ Get all available regions for the Amazon Kinesis service.
+
+ :rtype: list
+ :return: A list of :class:`boto.regioninfo.RegionInfo`
+ """
+ from boto.logs.layer1 import CloudWatchLogsConnection
+ return get_regions('logs', connection_cls=CloudWatchLogsConnection)
+
+
+def connect_to_region(region_name, **kw_params):
+ for region in regions():
+ if region.name == region_name:
+ return region.connect(**kw_params)
+ return None
diff --git a/boto/logs/exceptions.py b/boto/logs/exceptions.py
new file mode 100644
index 00000000..49c01fa9
--- /dev/null
+++ b/boto/logs/exceptions.py
@@ -0,0 +1,59 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2012 Thomas Parslow http://almostobsolete.net/
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+from boto.exception import BotoServerError
+
+
+class LimitExceededException(BotoServerError):
+ pass
+
+
+class DataAlreadyAcceptedException(BotoServerError):
+ pass
+
+
+class ResourceInUseException(BotoServerError):
+ pass
+
+
+class ServiceUnavailableException(BotoServerError):
+ pass
+
+
+class InvalidParameterException(BotoServerError):
+ pass
+
+
+class ResourceNotFoundException(BotoServerError):
+ pass
+
+
+class ResourceAlreadyExistsException(BotoServerError):
+ pass
+
+
+class OperationAbortedException(BotoServerError):
+ pass
+
+
+class InvalidSequenceTokenException(BotoServerError):
+ pass
diff --git a/boto/logs/layer1.py b/boto/logs/layer1.py
new file mode 100644
index 00000000..52d7791e
--- /dev/null
+++ b/boto/logs/layer1.py
@@ -0,0 +1,582 @@
+# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+#
+
+try:
+ import json
+except ImportError:
+ import simplejson as json
+
+import boto
+from boto.connection import AWSQueryConnection
+from boto.regioninfo import RegionInfo
+from boto.exception import JSONResponseError
+from boto.logs import exceptions
+
+
+class CloudWatchLogsConnection(AWSQueryConnection):
+ """
+ Amazon CloudWatch Logs Service API Reference
+ This is the Amazon CloudWatch Logs API Reference . Amazon
+ CloudWatch Logs is a managed service for real time monitoring and
+ archival of application logs. This guide provides detailed
+ information about Amazon CloudWatch Logs actions, data types,
+ parameters, and errors. For detailed information about Amazon
+ CloudWatch Logs features and their associated API calls, go to the
+ `Amazon CloudWatch Logs Developer Guide`_.
+
+ Use the following links to get started using the Amazon CloudWatch
+ API Reference :
+
+
+ + `Actions`_: An alphabetical list of all Amazon CloudWatch Logs
+ actions.
+ + `Data Types`_: An alphabetical list of all Amazon CloudWatch
+ Logs data types.
+ + `Common Parameters`_: Parameters that all Query actions can use.
+ + `Common Errors`_: Client and server errors that all actions can
+ return.
+ + `Regions and Endpoints`_: Itemized regions and endpoints for all
+ AWS products.
+
+
+ In addition to using the Amazon CloudWatch Logs API, you can also
+ use the following SDKs and third-party libraries to access Amazon
+ CloudWatch Logs programmatically.
+
+
+ + `AWS SDK for Java Documentation`_
+ + `AWS SDK for .NET Documentation`_
+ + `AWS SDK for PHP Documentation`_
+ + `AWS SDK for Ruby Documentation`_
+
+
+ Developers in the AWS developer community also provide their own
+ libraries, which you can find at the following AWS developer
+ centers:
+
+
+ + `AWS Java Developer Center`_
+ + `AWS PHP Developer Center`_
+ + `AWS Python Developer Center`_
+ + `AWS Ruby Developer Center`_
+ + `AWS Windows and .NET Developer Center`_
+ """
+ APIVersion = "2014-03-28"
+ DefaultRegionName = "us-east-1"
+ DefaultRegionEndpoint = "logs.us-east-1.amazonaws.com"
+ ServiceName = "CloudWatchLogs"
+ TargetPrefix = "Logs_20140328"
+ ResponseError = JSONResponseError
+
+ _faults = {
+ "LimitExceededException": exceptions.LimitExceededException,
+ "DataAlreadyAcceptedException": exceptions.DataAlreadyAcceptedException,
+ "ResourceInUseException": exceptions.ResourceInUseException,
+ "ServiceUnavailableException": exceptions.ServiceUnavailableException,
+ "InvalidParameterException": exceptions.InvalidParameterException,
+ "ResourceNotFoundException": exceptions.ResourceNotFoundException,
+ "ResourceAlreadyExistsException": exceptions.ResourceAlreadyExistsException,
+ "OperationAbortedException": exceptions.OperationAbortedException,
+ "InvalidSequenceTokenException": exceptions.InvalidSequenceTokenException,
+ }
+
+
+ def __init__(self, **kwargs):
+ region = kwargs.pop('region', None)
+ if not region:
+ region = RegionInfo(self, self.DefaultRegionName,
+ self.DefaultRegionEndpoint)
+
+ if 'host' not in kwargs or kwargs['host'] is None:
+ kwargs['host'] = region.endpoint
+
+ super(CloudWatchLogsConnection, self).__init__(**kwargs)
+ self.region = region
+
+ def _required_auth_capability(self):
+ return ['hmac-v4']
+
+ def create_log_group(self, log_group_name):
+ """
+ Creates a new log group with the specified name. The name of
+ the log group must be unique within a region for an AWS
+ account. You can create up to 100 log groups per account.
+
+ You must use the following guidelines when naming a log group:
+
+ + Log group names can be between 1 and 512 characters long.
+ + Allowed characters are az, AZ, 09, '_' (underscore), '-'
+ (hyphen), '/' (forward slash), and '.' (period).
+
+
+
+ Log groups are created with a default retention of 14 days.
+ The retention attribute allow you to configure the number of
+ days you want to retain log events in the specified log group.
+ See the `SetRetention` operation on how to modify the
+ retention of your log groups.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ """
+ params = {'logGroupName': log_group_name, }
+ return self.make_request(action='CreateLogGroup',
+ body=json.dumps(params))
+
+ def create_log_stream(self, log_group_name, log_stream_name):
+ """
+ Creates a new log stream in the specified log group. The name
+ of the log stream must be unique within the log group. There
+ is no limit on the number of log streams that can exist in a
+ log group.
+
+ You must use the following guidelines when naming a log
+ stream:
+
+ + Log stream names can be between 1 and 512 characters long.
+ + The ':' colon character is not allowed.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type log_stream_name: string
+ :param log_stream_name:
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'logStreamName': log_stream_name,
+ }
+ return self.make_request(action='CreateLogStream',
+ body=json.dumps(params))
+
+ def delete_log_group(self, log_group_name):
+ """
+ Deletes the log group with the specified name. Amazon
+ CloudWatch Logs will delete a log group only if there are no
+ log streams and no metric filters associated with the log
+ group. If this condition is not satisfied, the request will
+ fail and the log group will not be deleted.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ """
+ params = {'logGroupName': log_group_name, }
+ return self.make_request(action='DeleteLogGroup',
+ body=json.dumps(params))
+
+ def delete_log_stream(self, log_group_name, log_stream_name):
+ """
+ Deletes a log stream and permanently deletes all the archived
+ log events associated with it.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type log_stream_name: string
+ :param log_stream_name:
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'logStreamName': log_stream_name,
+ }
+ return self.make_request(action='DeleteLogStream',
+ body=json.dumps(params))
+
+ def delete_metric_filter(self, log_group_name, filter_name):
+ """
+ Deletes a metric filter associated with the specified log
+ group.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type filter_name: string
+ :param filter_name: The name of the metric filter.
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'filterName': filter_name,
+ }
+ return self.make_request(action='DeleteMetricFilter',
+ body=json.dumps(params))
+
+ def delete_retention_policy(self, log_group_name):
+ """
+
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ """
+ params = {'logGroupName': log_group_name, }
+ return self.make_request(action='DeleteRetentionPolicy',
+ body=json.dumps(params))
+
+ def describe_log_groups(self, log_group_name_prefix=None,
+ next_token=None, limit=None):
+ """
+ Returns all the log groups that are associated with the AWS
+ account making the request. The list returned in the response
+ is ASCII-sorted by log group name.
+
+ By default, this operation returns up to 50 log groups. If
+ there are more log groups to list, the response would contain
+ a `nextToken` value in the response body. You can also limit
+ the number of log groups returned in the response by
+ specifying the `limit` parameter in the request.
+
+ :type log_group_name_prefix: string
+ :param log_group_name_prefix:
+
+ :type next_token: string
+ :param next_token: A string token used for pagination that points to
+ the next page of results. It must be a value obtained from the
+ response of the previous `DescribeLogGroups` request.
+
+ :type limit: integer
+ :param limit: The maximum number of items returned in the response. If
+ you don't specify a value, the request would return up to 50 items.
+
+ """
+ params = {}
+ if log_group_name_prefix is not None:
+ params['logGroupNamePrefix'] = log_group_name_prefix
+ if next_token is not None:
+ params['nextToken'] = next_token
+ if limit is not None:
+ params['limit'] = limit
+ return self.make_request(action='DescribeLogGroups',
+ body=json.dumps(params))
+
+ def describe_log_streams(self, log_group_name,
+ log_stream_name_prefix=None, next_token=None,
+ limit=None):
+ """
+ Returns all the log streams that are associated with the
+ specified log group. The list returned in the response is
+ ASCII-sorted by log stream name.
+
+ By default, this operation returns up to 50 log streams. If
+ there are more log streams to list, the response would contain
+ a `nextToken` value in the response body. You can also limit
+ the number of log streams returned in the response by
+ specifying the `limit` parameter in the request.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type log_stream_name_prefix: string
+ :param log_stream_name_prefix:
+
+ :type next_token: string
+ :param next_token: A string token used for pagination that points to
+ the next page of results. It must be a value obtained from the
+ response of the previous `DescribeLogStreams` request.
+
+ :type limit: integer
+ :param limit: The maximum number of items returned in the response. If
+ you don't specify a value, the request would return up to 50 items.
+
+ """
+ params = {'logGroupName': log_group_name, }
+ if log_stream_name_prefix is not None:
+ params['logStreamNamePrefix'] = log_stream_name_prefix
+ if next_token is not None:
+ params['nextToken'] = next_token
+ if limit is not None:
+ params['limit'] = limit
+ return self.make_request(action='DescribeLogStreams',
+ body=json.dumps(params))
+
+ def describe_metric_filters(self, log_group_name,
+ filter_name_prefix=None, next_token=None,
+ limit=None):
+ """
+ Returns all the metrics filters associated with the specified
+ log group. The list returned in the response is ASCII-sorted
+ by filter name.
+
+ By default, this operation returns up to 50 metric filters. If
+ there are more metric filters to list, the response would
+ contain a `nextToken` value in the response body. You can also
+ limit the number of metric filters returned in the response by
+ specifying the `limit` parameter in the request.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type filter_name_prefix: string
+ :param filter_name_prefix: The name of the metric filter.
+
+ :type next_token: string
+ :param next_token: A string token used for pagination that points to
+ the next page of results. It must be a value obtained from the
+ response of the previous `DescribeMetricFilters` request.
+
+ :type limit: integer
+ :param limit: The maximum number of items returned in the response. If
+ you don't specify a value, the request would return up to 50 items.
+
+ """
+ params = {'logGroupName': log_group_name, }
+ if filter_name_prefix is not None:
+ params['filterNamePrefix'] = filter_name_prefix
+ if next_token is not None:
+ params['nextToken'] = next_token
+ if limit is not None:
+ params['limit'] = limit
+ return self.make_request(action='DescribeMetricFilters',
+ body=json.dumps(params))
+
+ def get_log_events(self, log_group_name, log_stream_name,
+ start_time=None, end_time=None, next_token=None,
+ limit=None, start_from_head=None):
+ """
+ Retrieves log events from the specified log stream. You can
+ provide an optional time range to filter the results on the
+ event `timestamp`.
+
+ By default, this operation returns as much log events as can
+ fit in a response size of 1MB, up to 10,000 log events. The
+ response will always include a `nextForwardToken` and a
+ `nextBackwardToken` in the response body. You can use any of
+ these tokens in subsequent `GetLogEvents` requests to paginate
+ through events in either forward or backward direction. You
+ can also limit the number of log events returned in the
+ response by specifying the `limit` parameter in the request.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type log_stream_name: string
+ :param log_stream_name:
+
+ :type start_time: long
+ :param start_time: A point in time expressed as the number milliseconds
+ since Jan 1, 1970 00:00:00 UTC.
+
+ :type end_time: long
+ :param end_time: A point in time expressed as the number milliseconds
+ since Jan 1, 1970 00:00:00 UTC.
+
+ :type next_token: string
+ :param next_token: A string token used for pagination that points to
+ the next page of results. It must be a value obtained from the
+ `nextForwardToken` or `nextBackwardToken` fields in the response of
+ the previous `GetLogEvents` request.
+
+ :type limit: integer
+ :param limit: The maximum number of log events returned in the
+ response. If you don't specify a value, the request would return as
+ much log events as can fit in a response size of 1MB, up to 10,000
+ log events.
+
+ :type start_from_head: boolean
+ :param start_from_head:
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'logStreamName': log_stream_name,
+ }
+ if start_time is not None:
+ params['startTime'] = start_time
+ if end_time is not None:
+ params['endTime'] = end_time
+ if next_token is not None:
+ params['nextToken'] = next_token
+ if limit is not None:
+ params['limit'] = limit
+ if start_from_head is not None:
+ params['startFromHead'] = start_from_head
+ return self.make_request(action='GetLogEvents',
+ body=json.dumps(params))
+
+ def put_log_events(self, log_group_name, log_stream_name, log_events,
+ sequence_token=None):
+ """
+ Uploads a batch of log events to the specified log stream.
+
+ Every PutLogEvents request must include the `sequenceToken`
+ obtained from the response of the previous request. An upload
+ in a newly created log stream does not require a
+ `sequenceToken`.
+
+ The batch of events must satisfy the following constraints:
+
+ + The maximum batch size is 32,768 bytes, and this size is
+ calculated as the sum of all event messages in UTF-8, plus 26
+ bytes for each log event.
+ + None of the log events in the batch can be more than 2 hours
+ in the future.
+ + None of the log events in the batch can be older than 14
+ days or the retention period of the log group.
+ + The log events in the batch must be in chronological ordered
+ by their `timestamp`.
+ + The maximum number of log events in a batch is 1,000.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type log_stream_name: string
+ :param log_stream_name:
+
+ :type log_events: list
+ :param log_events: A list of events belonging to a log stream.
+
+ :type sequence_token: string
+ :param sequence_token: A string token that must be obtained from the
+ response of the previous `PutLogEvents` request.
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'logStreamName': log_stream_name,
+ 'logEvents': log_events,
+ }
+ if sequence_token is not None:
+ params['sequenceToken'] = sequence_token
+ return self.make_request(action='PutLogEvents',
+ body=json.dumps(params))
+
+ def put_metric_filter(self, log_group_name, filter_name, filter_pattern,
+ metric_transformations):
+ """
+ Creates or updates a metric filter and associates it with the
+ specified log group. Metric filters allow you to configure
+ rules to extract metric data from log events ingested through
+ `PutLogEvents` requests.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type filter_name: string
+ :param filter_name: The name of the metric filter.
+
+ :type filter_pattern: string
+ :param filter_pattern:
+
+ :type metric_transformations: list
+ :param metric_transformations:
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'filterName': filter_name,
+ 'filterPattern': filter_pattern,
+ 'metricTransformations': metric_transformations,
+ }
+ return self.make_request(action='PutMetricFilter',
+ body=json.dumps(params))
+
+ def put_retention_policy(self, log_group_name, retention_in_days):
+ """
+
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type retention_in_days: integer
+ :param retention_in_days: Specifies the number of days you want to
+ retain log events in the specified log group. Possible values are:
+ 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 547, 730.
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'retentionInDays': retention_in_days,
+ }
+ return self.make_request(action='PutRetentionPolicy',
+ body=json.dumps(params))
+
+ def set_retention(self, log_group_name, retention_in_days):
+ """
+ Sets the retention of the specified log group. Log groups are
+ created with a default retention of 14 days. The retention
+ attribute allow you to configure the number of days you want
+ to retain log events in the specified log group.
+
+ :type log_group_name: string
+ :param log_group_name:
+
+ :type retention_in_days: integer
+ :param retention_in_days: Specifies the number of days you want to
+ retain log events in the specified log group. Possible values are:
+ 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 547, 730.
+
+ """
+ params = {
+ 'logGroupName': log_group_name,
+ 'retentionInDays': retention_in_days,
+ }
+ return self.make_request(action='SetRetention',
+ body=json.dumps(params))
+
+ def test_metric_filter(self, filter_pattern, log_event_messages):
+ """
+ Tests the filter pattern of a metric filter against a sample
+ of log event messages. You can use this operation to validate
+ the correctness of a metric filter pattern.
+
+ :type filter_pattern: string
+ :param filter_pattern:
+
+ :type log_event_messages: list
+ :param log_event_messages:
+
+ """
+ params = {
+ 'filterPattern': filter_pattern,
+ 'logEventMessages': log_event_messages,
+ }
+ return self.make_request(action='TestMetricFilter',
+ body=json.dumps(params))
+
+ def make_request(self, action, body):
+ headers = {
+ 'X-Amz-Target': '%s.%s' % (self.TargetPrefix, action),
+ 'Host': self.region.endpoint,
+ 'Content-Type': 'application/x-amz-json-1.1',
+ 'Content-Length': str(len(body)),
+ }
+ http_request = self.build_base_http_request(
+ method='POST', path='/', auth_path='/', params={},
+ headers=headers, data=body)
+ response = self._mexe(http_request, sender=None,
+ override_num_retries=10)
+ response_body = response.read().decode('utf-8')
+ boto.log.debug(response_body)
+ if response.status == 200:
+ if response_body:
+ return json.loads(response_body)
+ else:
+ json_body = json.loads(response_body)
+ fault_name = json_body.get('__type', None)
+ exception_class = self._faults.get(fault_name, self.ResponseError)
+ raise exception_class(response.status, response.reason,
+ body=json_body)
+
diff --git a/docs/source/index.rst b/docs/source/index.rst
index c260822a..87c66904 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -66,6 +66,7 @@ Currently Supported Services
* **Monitoring**
* :doc:`CloudWatch <cloudwatch_tut>` -- (:doc:`API Reference <ref/cloudwatch>`)
+ * CloudWatch Logs -- (:doc:`API Reference <ref/logs>`)
* **Networking**
diff --git a/docs/source/ref/logs.rst b/docs/source/ref/logs.rst
new file mode 100644
index 00000000..3a0fc34c
--- /dev/null
+++ b/docs/source/ref/logs.rst
@@ -0,0 +1,26 @@
+.. _ref-logs:
+
+===============
+CloudWatch Logs
+===============
+
+boto.logs
+---------------
+
+.. automodule:: boto.logs
+ :members:
+ :undoc-members:
+
+boto.logs.layer1
+----------------------
+
+.. automodule:: boto.logs.layer1
+ :members:
+ :undoc-members:
+
+boto.logs.exceptions
+--------------------------
+
+.. automodule:: boto.logs.exceptions
+ :members:
+ :undoc-members:
diff --git a/docs/source/releasenotes/v2.31.0.rst b/docs/source/releasenotes/v2.31.0.rst
new file mode 100644
index 00000000..af140f03
--- /dev/null
+++ b/docs/source/releasenotes/v2.31.0.rst
@@ -0,0 +1,11 @@
+boto v2.31.0
+============
+
+:date: 2014/07/10
+
+This release adds support for Amazon CloudWatch Logs.
+
+
+Changes
+-------
+* Add support for Amazon CloudWatch Logs. (:sha:`125c94d`)
diff --git a/setup.py b/setup.py
index c14b88bb..729c887b 100644
--- a/setup.py
+++ b/setup.py
@@ -76,7 +76,7 @@ setup(name = "boto",
"boto.elastictranscoder", "boto.opsworks", "boto.redshift",
"boto.dynamodb2", "boto.support", "boto.cloudtrail",
"boto.directconnect", "boto.kinesis", "boto.rds2",
- "boto.cloudsearch2"],
+ "boto.cloudsearch2", "boto.logs"],
package_data = {
"boto.cacerts": ["cacerts.txt"],
"boto": ["endpoints.json"],
diff --git a/tests/integration/logs/__init__.py b/tests/integration/logs/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/integration/logs/__init__.py
diff --git a/tests/integration/logs/test_cert_verification.py b/tests/integration/logs/test_cert_verification.py
new file mode 100644
index 00000000..8ce072c8
--- /dev/null
+++ b/tests/integration/logs/test_cert_verification.py
@@ -0,0 +1,37 @@
+# Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
+# All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+"""
+Check that all of the certs on all service endpoints validate.
+"""
+
+from boto.compat import unittest
+from tests.integration import ServiceCertVerificationTest
+
+import boto.logs
+
+
+class CloudWatchLogsCertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
+ regions = boto.logs.regions()
+
+ def sample_service_call(self, conn):
+ conn.describe_log_groups()
diff --git a/tests/integration/logs/test_layer1.py b/tests/integration/logs/test_layer1.py
new file mode 100644
index 00000000..b1766ffa
--- /dev/null
+++ b/tests/integration/logs/test_layer1.py
@@ -0,0 +1,43 @@
+# Copyright (c) 2014 Amazon.com, Inc. or its affiliates.
+# All rights reserved.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish, dis-
+# tribute, sublicense, and/or sell copies of the Software, and to permit
+# persons to whom the Software is furnished to do so, subject to the fol-
+# lowing conditions:
+#
+# The above copyright notice and this permission notice shall be included
+# in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
+# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+# IN THE SOFTWARE.
+
+import boto
+from boto.compat import unittest
+
+
+class TestCloudWatchLogs(unittest.TestCase):
+ def setUp(self):
+ self.logs = boto.connect_logs()
+
+ def test_logs(self):
+ logs = self.logs
+
+ response = logs.describe_log_groups(log_group_name_prefix='test')
+ self.assertIsInstance(response['logGroups'], list)
+
+ mfilter = '[ip, id, user, ..., status_code=500, size]'
+ sample = [
+ '127.0.0.1 - frank "GET /apache_pb.gif HTTP/1.0" 200 1534',
+ '127.0.0.1 - frank "GET /apache_pb.gif HTTP/1.0" 500 5324',
+ ]
+ response = logs.test_metric_filter(mfilter, sample)
+ self.assertEqual(len(response['matches']), 1)
diff --git a/tests/unit/logs/__init__.py b/tests/unit/logs/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/tests/unit/logs/__init__.py
diff --git a/tests/unit/logs/test_layer1.py b/tests/unit/logs/test_layer1.py
new file mode 100644
index 00000000..9b8dc2ef
--- /dev/null
+++ b/tests/unit/logs/test_layer1.py
@@ -0,0 +1,22 @@
+#!/usr/bin/env python
+
+from boto.logs.layer1 import CloudWatchLogsConnection
+from tests.unit import AWSMockServiceTestCase
+
+
+class TestDescribeLogs(AWSMockServiceTestCase):
+ connection_class = CloudWatchLogsConnection
+
+ def default_body(self):
+ return b'{"logGroups": []}'
+
+ def test_describe(self):
+ self.set_http_response(status_code=200)
+ api_response = self.service_connection.describe_log_groups()
+
+ self.assertEqual(0, len(api_response['logGroups']))
+
+ self.assert_request_parameters({})
+
+ target = self.actual_request.headers['X-Amz-Target']
+ self.assertTrue('DescribeLogGroups' in target)