summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavanum Srinivas <dims@linux.vnet.ibm.com>2014-06-18 23:10:41 -0400
committerDavanum Srinivas <dims@linux.vnet.ibm.com>2014-06-18 23:12:10 -0400
commit01bca57862633ca43c8f1c05cef761acd85a7011 (patch)
treee76937a606c6de550cb45d372438c7578bda46b4
parent7b9410e41c0a40f25443bbfe847cfd3585098834 (diff)
downloadoslo-utils-01bca57862633ca43c8f1c05cef761acd85a7011.tar.gz
Get the tox tests working
-rw-r--r--.testr.conf4
-rw-r--r--openstack-common.conf11
-rw-r--r--oslo/__init__.py6
-rw-r--r--oslo/utils/excutils.py2
-rw-r--r--oslo/utils/network_utils.py6
-rw-r--r--oslo/utils/openstack/__init__.py0
-rw-r--r--oslo/utils/openstack/common/__init__.py17
-rw-r--r--oslo/utils/openstack/common/gettextutils.py498
-rw-r--r--oslo/utils/openstack/common/importutils.py73
-rw-r--r--oslo/utils/openstack/common/jsonutils.py187
-rw-r--r--oslo/utils/openstack/common/local.py45
-rw-r--r--oslo/utils/openstack/common/log.py728
-rw-r--r--oslo/utils/openstack/common/strutils.py239
-rw-r--r--oslo/utils/openstack/common/timeutils.py210
-rw-r--r--oslo/utils/openstack/common/units.py38
-rw-r--r--oslo/utils/strutils.py2
-rw-r--r--requirements.txt5
-rw-r--r--setup.cfg5
-rwxr-xr-xsetup.py2
-rw-r--r--test-requirements.txt22
-rw-r--r--tests/__init__.py2
-rw-r--r--tests/base.py3
-rw-r--r--tests/fake/__init__.py23
-rw-r--r--tests/test_excutils.py (renamed from tests/unit/test_excutils.py)2
-rw-r--r--tests/test_importutils.py (renamed from tests/unit/test_importutils.py)32
-rw-r--r--tests/test_network_utils.py (renamed from tests/unit/test_network_utils.py)23
-rw-r--r--tests/test_strutils.py (renamed from tests/unit/test_strutils.py)4
-rw-r--r--tests/test_timeutils.py (renamed from tests/unit/test_timeutils.py)2
-rw-r--r--tests/test_utils.py2
-rwxr-xr-xtools/run_cross_tests.sh91
-rw-r--r--tox.ini4
31 files changed, 2241 insertions, 47 deletions
diff --git a/.testr.conf b/.testr.conf
index 19721fc..6d83b3c 100644
--- a/.testr.conf
+++ b/.testr.conf
@@ -2,6 +2,6 @@
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \
- ${PYTHON:-python} -m subunit.run discover -t ./ ./tests $LISTOPT $IDOPTION
+ ${PYTHON:-python} -m subunit.run discover -t ./ . $LISTOPT $IDOPTION
test_id_option=--load-list $IDFILE
-test_list_option=--list \ No newline at end of file
+test_list_option=--list
diff --git a/openstack-common.conf b/openstack-common.conf
new file mode 100644
index 0000000..76490fe
--- /dev/null
+++ b/openstack-common.conf
@@ -0,0 +1,11 @@
+[DEFAULT]
+
+# The list of modules to copy from oslo-incubator.git
+script = tools/run_cross_tests.sh
+
+# The base module to hold the copy of openstack.common
+base=oslo.utils
+
+module=gettextutils
+module=log
+module=units
diff --git a/oslo/__init__.py b/oslo/__init__.py
index c659cac..594bc16 100644
--- a/oslo/__init__.py
+++ b/oslo/__init__.py
@@ -1,5 +1,5 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
+# Copyright 2012 Red Hat, Inc.
+#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
@@ -12,4 +12,4 @@
# License for the specific language governing permissions and limitations
# under the License.
-__import__('pkg_resources').declare_namespace(__name__) \ No newline at end of file
+__import__('pkg_resources').declare_namespace(__name__)
diff --git a/oslo/utils/excutils.py b/oslo/utils/excutils.py
index 07a02be..5108d8c 100644
--- a/oslo/utils/excutils.py
+++ b/oslo/utils/excutils.py
@@ -24,7 +24,7 @@ import traceback
import six
-from openstack.common.gettextutils import _LE
+from oslo.utils.openstack.common.gettextutils import _LE
class save_and_reraise_exception(object):
diff --git a/oslo/utils/network_utils.py b/oslo/utils/network_utils.py
index b0f9a02..f71f275 100644
--- a/oslo/utils/network_utils.py
+++ b/oslo/utils/network_utils.py
@@ -21,8 +21,8 @@ import socket
from six.moves.urllib import parse
-from openstack.common.gettextutils import _LW
-from openstack.common import log as logging
+from oslo.utils.openstack.common.gettextutils import _LW
+from oslo.utils.openstack.common import log as logging
LOG = logging.getLogger(__name__)
@@ -160,4 +160,4 @@ def set_tcp_keepalive(sock, tcp_keepalive=True,
socket.TCP_KEEPCNT,
tcp_keepalive_count)
else:
- LOG.warning(_LW('tcp_keepknt not available on your system'))
+ LOG.warning(_LW('tcp_keepcnt not available on your system'))
diff --git a/oslo/utils/openstack/__init__.py b/oslo/utils/openstack/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/oslo/utils/openstack/__init__.py
diff --git a/oslo/utils/openstack/common/__init__.py b/oslo/utils/openstack/common/__init__.py
new file mode 100644
index 0000000..d1223ea
--- /dev/null
+++ b/oslo/utils/openstack/common/__init__.py
@@ -0,0 +1,17 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import six
+
+
+six.add_move(six.MovedModule('mox', 'mox', 'mox3.mox'))
diff --git a/oslo/utils/openstack/common/gettextutils.py b/oslo/utils/openstack/common/gettextutils.py
new file mode 100644
index 0000000..99e8642
--- /dev/null
+++ b/oslo/utils/openstack/common/gettextutils.py
@@ -0,0 +1,498 @@
+# Copyright 2012 Red Hat, Inc.
+# Copyright 2013 IBM Corp.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+gettext for openstack-common modules.
+
+Usual usage in an openstack.common module:
+
+ from oslo.utils.openstack.common.gettextutils import _
+"""
+
+import copy
+import functools
+import gettext
+import locale
+from logging import handlers
+import os
+
+from babel import localedata
+import six
+
+_AVAILABLE_LANGUAGES = {}
+
+# FIXME(dhellmann): Remove this when moving to oslo.i18n.
+USE_LAZY = False
+
+
+class TranslatorFactory(object):
+ """Create translator functions
+ """
+
+ def __init__(self, domain, lazy=False, localedir=None):
+ """Establish a set of translation functions for the domain.
+
+ :param domain: Name of translation domain,
+ specifying a message catalog.
+ :type domain: str
+ :param lazy: Delays translation until a message is emitted.
+ Defaults to False.
+ :type lazy: Boolean
+ :param localedir: Directory with translation catalogs.
+ :type localedir: str
+ """
+ self.domain = domain
+ self.lazy = lazy
+ if localedir is None:
+ localedir = os.environ.get(domain.upper() + '_LOCALEDIR')
+ self.localedir = localedir
+
+ def _make_translation_func(self, domain=None):
+ """Return a new translation function ready for use.
+
+ Takes into account whether or not lazy translation is being
+ done.
+
+ The domain can be specified to override the default from the
+ factory, but the localedir from the factory is always used
+ because we assume the log-level translation catalogs are
+ installed in the same directory as the main application
+ catalog.
+
+ """
+ if domain is None:
+ domain = self.domain
+ if self.lazy:
+ return functools.partial(Message, domain=domain)
+ t = gettext.translation(
+ domain,
+ localedir=self.localedir,
+ fallback=True,
+ )
+ if six.PY3:
+ return t.gettext
+ return t.ugettext
+
+ @property
+ def primary(self):
+ "The default translation function."
+ return self._make_translation_func()
+
+ def _make_log_translation_func(self, level):
+ return self._make_translation_func(self.domain + '-log-' + level)
+
+ @property
+ def log_info(self):
+ "Translate info-level log messages."
+ return self._make_log_translation_func('info')
+
+ @property
+ def log_warning(self):
+ "Translate warning-level log messages."
+ return self._make_log_translation_func('warning')
+
+ @property
+ def log_error(self):
+ "Translate error-level log messages."
+ return self._make_log_translation_func('error')
+
+ @property
+ def log_critical(self):
+ "Translate critical-level log messages."
+ return self._make_log_translation_func('critical')
+
+
+# NOTE(dhellmann): When this module moves out of the incubator into
+# oslo.i18n, these global variables can be moved to an integration
+# module within each application.
+
+# Create the global translation functions.
+_translators = TranslatorFactory('oslo.utils')
+
+# The primary translation function using the well-known name "_"
+_ = _translators.primary
+
+# Translators for log levels.
+#
+# The abbreviated names are meant to reflect the usual use of a short
+# name like '_'. The "L" is for "log" and the other letter comes from
+# the level.
+_LI = _translators.log_info
+_LW = _translators.log_warning
+_LE = _translators.log_error
+_LC = _translators.log_critical
+
+# NOTE(dhellmann): End of globals that will move to the application's
+# integration module.
+
+
+def enable_lazy():
+ """Convenience function for configuring _() to use lazy gettext
+
+ Call this at the start of execution to enable the gettextutils._
+ function to use lazy gettext functionality. This is useful if
+ your project is importing _ directly instead of using the
+ gettextutils.install() way of importing the _ function.
+ """
+ # FIXME(dhellmann): This function will be removed in oslo.i18n,
+ # because the TranslatorFactory makes it superfluous.
+ global _, _LI, _LW, _LE, _LC, USE_LAZY
+ tf = TranslatorFactory('oslo.utils', lazy=True)
+ _ = tf.primary
+ _LI = tf.log_info
+ _LW = tf.log_warning
+ _LE = tf.log_error
+ _LC = tf.log_critical
+ USE_LAZY = True
+
+
+def install(domain, lazy=False):
+ """Install a _() function using the given translation domain.
+
+ Given a translation domain, install a _() function using gettext's
+ install() function.
+
+ The main difference from gettext.install() is that we allow
+ overriding the default localedir (e.g. /usr/share/locale) using
+ a translation-domain-specific environment variable (e.g.
+ NOVA_LOCALEDIR).
+
+ :param domain: the translation domain
+ :param lazy: indicates whether or not to install the lazy _() function.
+ The lazy _() introduces a way to do deferred translation
+ of messages by installing a _ that builds Message objects,
+ instead of strings, which can then be lazily translated into
+ any available locale.
+ """
+ if lazy:
+ from six import moves
+ tf = TranslatorFactory(domain, lazy=True)
+ moves.builtins.__dict__['_'] = tf.primary
+ else:
+ localedir = '%s_LOCALEDIR' % domain.upper()
+ if six.PY3:
+ gettext.install(domain,
+ localedir=os.environ.get(localedir))
+ else:
+ gettext.install(domain,
+ localedir=os.environ.get(localedir),
+ unicode=True)
+
+
+class Message(six.text_type):
+ """A Message object is a unicode object that can be translated.
+
+ Translation of Message is done explicitly using the translate() method.
+ For all non-translation intents and purposes, a Message is simply unicode,
+ and can be treated as such.
+ """
+
+ def __new__(cls, msgid, msgtext=None, params=None,
+ domain='oslo.utils', *args):
+ """Create a new Message object.
+
+ In order for translation to work gettext requires a message ID, this
+ msgid will be used as the base unicode text. It is also possible
+ for the msgid and the base unicode text to be different by passing
+ the msgtext parameter.
+ """
+ # If the base msgtext is not given, we use the default translation
+ # of the msgid (which is in English) just in case the system locale is
+ # not English, so that the base text will be in that locale by default.
+ if not msgtext:
+ msgtext = Message._translate_msgid(msgid, domain)
+ # We want to initialize the parent unicode with the actual object that
+ # would have been plain unicode if 'Message' was not enabled.
+ msg = super(Message, cls).__new__(cls, msgtext)
+ msg.msgid = msgid
+ msg.domain = domain
+ msg.params = params
+ return msg
+
+ def translate(self, desired_locale=None):
+ """Translate this message to the desired locale.
+
+ :param desired_locale: The desired locale to translate the message to,
+ if no locale is provided the message will be
+ translated to the system's default locale.
+
+ :returns: the translated message in unicode
+ """
+
+ translated_message = Message._translate_msgid(self.msgid,
+ self.domain,
+ desired_locale)
+ if self.params is None:
+ # No need for more translation
+ return translated_message
+
+ # This Message object may have been formatted with one or more
+ # Message objects as substitution arguments, given either as a single
+ # argument, part of a tuple, or as one or more values in a dictionary.
+ # When translating this Message we need to translate those Messages too
+ translated_params = _translate_args(self.params, desired_locale)
+
+ translated_message = translated_message % translated_params
+
+ return translated_message
+
+ @staticmethod
+ def _translate_msgid(msgid, domain, desired_locale=None):
+ if not desired_locale:
+ system_locale = locale.getdefaultlocale()
+ # If the system locale is not available to the runtime use English
+ if not system_locale[0]:
+ desired_locale = 'en_US'
+ else:
+ desired_locale = system_locale[0]
+
+ locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR')
+ lang = gettext.translation(domain,
+ localedir=locale_dir,
+ languages=[desired_locale],
+ fallback=True)
+ if six.PY3:
+ translator = lang.gettext
+ else:
+ translator = lang.ugettext
+
+ translated_message = translator(msgid)
+ return translated_message
+
+ def __mod__(self, other):
+ # When we mod a Message we want the actual operation to be performed
+ # by the parent class (i.e. unicode()), the only thing we do here is
+ # save the original msgid and the parameters in case of a translation
+ params = self._sanitize_mod_params(other)
+ unicode_mod = super(Message, self).__mod__(params)
+ modded = Message(self.msgid,
+ msgtext=unicode_mod,
+ params=params,
+ domain=self.domain)
+ return modded
+
+ def _sanitize_mod_params(self, other):
+ """Sanitize the object being modded with this Message.
+
+ - Add support for modding 'None' so translation supports it
+ - Trim the modded object, which can be a large dictionary, to only
+ those keys that would actually be used in a translation
+ - Snapshot the object being modded, in case the message is
+ translated, it will be used as it was when the Message was created
+ """
+ if other is None:
+ params = (other,)
+ elif isinstance(other, dict):
+ # Merge the dictionaries
+ # Copy each item in case one does not support deep copy.
+ params = {}
+ if isinstance(self.params, dict):
+ for key, val in self.params.items():
+ params[key] = self._copy_param(val)
+ for key, val in other.items():
+ params[key] = self._copy_param(val)
+ else:
+ params = self._copy_param(other)
+ return params
+
+ def _copy_param(self, param):
+ try:
+ return copy.deepcopy(param)
+ except Exception:
+ # Fallback to casting to unicode this will handle the
+ # python code-like objects that can't be deep-copied
+ return six.text_type(param)
+
+ def __add__(self, other):
+ msg = _('Message objects do not support addition.')
+ raise TypeError(msg)
+
+ def __radd__(self, other):
+ return self.__add__(other)
+
+ if six.PY2:
+ def __str__(self):
+ # NOTE(luisg): Logging in python 2.6 tries to str() log records,
+ # and it expects specifically a UnicodeError in order to proceed.
+ msg = _('Message objects do not support str() because they may '
+ 'contain non-ascii characters. '
+ 'Please use unicode() or translate() instead.')
+ raise UnicodeError(msg)
+
+
+def get_available_languages(domain):
+ """Lists the available languages for the given translation domain.
+
+ :param domain: the domain to get languages for
+ """
+ if domain in _AVAILABLE_LANGUAGES:
+ return copy.copy(_AVAILABLE_LANGUAGES[domain])
+
+ localedir = '%s_LOCALEDIR' % domain.upper()
+ find = lambda x: gettext.find(domain,
+ localedir=os.environ.get(localedir),
+ languages=[x])
+
+ # NOTE(mrodden): en_US should always be available (and first in case
+ # order matters) since our in-line message strings are en_US
+ language_list = ['en_US']
+ # NOTE(luisg): Babel <1.0 used a function called list(), which was
+ # renamed to locale_identifiers() in >=1.0, the requirements master list
+ # requires >=0.9.6, uncapped, so defensively work with both. We can remove
+ # this check when the master list updates to >=1.0, and update all projects
+ list_identifiers = (getattr(localedata, 'list', None) or
+ getattr(localedata, 'locale_identifiers'))
+ locale_identifiers = list_identifiers()
+
+ for i in locale_identifiers:
+ if find(i) is not None:
+ language_list.append(i)
+
+ # NOTE(luisg): Babel>=1.0,<1.3 has a bug where some OpenStack supported
+ # locales (e.g. 'zh_CN', and 'zh_TW') aren't supported even though they
+ # are perfectly legitimate locales:
+ # https://github.com/mitsuhiko/babel/issues/37
+ # In Babel 1.3 they fixed the bug and they support these locales, but
+ # they are still not explicitly "listed" by locale_identifiers().
+ # That is why we add the locales here explicitly if necessary so that
+ # they are listed as supported.
+ aliases = {'zh': 'zh_CN',
+ 'zh_Hant_HK': 'zh_HK',
+ 'zh_Hant': 'zh_TW',
+ 'fil': 'tl_PH'}
+ for (locale, alias) in six.iteritems(aliases):
+ if locale in language_list and alias not in language_list:
+ language_list.append(alias)
+
+ _AVAILABLE_LANGUAGES[domain] = language_list
+ return copy.copy(language_list)
+
+
+def translate(obj, desired_locale=None):
+ """Gets the translated unicode representation of the given object.
+
+ If the object is not translatable it is returned as-is.
+ If the locale is None the object is translated to the system locale.
+
+ :param obj: the object to translate
+ :param desired_locale: the locale to translate the message to, if None the
+ default system locale will be used
+ :returns: the translated object in unicode, or the original object if
+ it could not be translated
+ """
+ message = obj
+ if not isinstance(message, Message):
+ # If the object to translate is not already translatable,
+ # let's first get its unicode representation
+ message = six.text_type(obj)
+ if isinstance(message, Message):
+ # Even after unicoding() we still need to check if we are
+ # running with translatable unicode before translating
+ return message.translate(desired_locale)
+ return obj
+
+
+def _translate_args(args, desired_locale=None):
+ """Translates all the translatable elements of the given arguments object.
+
+ This method is used for translating the translatable values in method
+ arguments which include values of tuples or dictionaries.
+ If the object is not a tuple or a dictionary the object itself is
+ translated if it is translatable.
+
+ If the locale is None the object is translated to the system locale.
+
+ :param args: the args to translate
+ :param desired_locale: the locale to translate the args to, if None the
+ default system locale will be used
+ :returns: a new args object with the translated contents of the original
+ """
+ if isinstance(args, tuple):
+ return tuple(translate(v, desired_locale) for v in args)
+ if isinstance(args, dict):
+ translated_dict = {}
+ for (k, v) in six.iteritems(args):
+ translated_v = translate(v, desired_locale)
+ translated_dict[k] = translated_v
+ return translated_dict
+ return translate(args, desired_locale)
+
+
+class TranslationHandler(handlers.MemoryHandler):
+ """Handler that translates records before logging them.
+
+ The TranslationHandler takes a locale and a target logging.Handler object
+ to forward LogRecord objects to after translating them. This handler
+ depends on Message objects being logged, instead of regular strings.
+
+ The handler can be configured declaratively in the logging.conf as follows:
+
+ [handlers]
+ keys = translatedlog, translator
+
+ [handler_translatedlog]
+ class = handlers.WatchedFileHandler
+ args = ('/var/log/api-localized.log',)
+ formatter = context
+
+ [handler_translator]
+ class = openstack.common.log.TranslationHandler
+ target = translatedlog
+ args = ('zh_CN',)
+
+ If the specified locale is not available in the system, the handler will
+ log in the default locale.
+ """
+
+ def __init__(self, locale=None, target=None):
+ """Initialize a TranslationHandler
+
+ :param locale: locale to use for translating messages
+ :param target: logging.Handler object to forward
+ LogRecord objects to after translation
+ """
+ # NOTE(luisg): In order to allow this handler to be a wrapper for
+ # other handlers, such as a FileHandler, and still be able to
+ # configure it using logging.conf, this handler has to extend
+ # MemoryHandler because only the MemoryHandlers' logging.conf
+ # parsing is implemented such that it accepts a target handler.
+ handlers.MemoryHandler.__init__(self, capacity=0, target=target)
+ self.locale = locale
+
+ def setFormatter(self, fmt):
+ self.target.setFormatter(fmt)
+
+ def emit(self, record):
+ # We save the message from the original record to restore it
+ # after translation, so other handlers are not affected by this
+ original_msg = record.msg
+ original_args = record.args
+
+ try:
+ self._translate_and_log_record(record)
+ finally:
+ record.msg = original_msg
+ record.args = original_args
+
+ def _translate_and_log_record(self, record):
+ record.msg = translate(record.msg, self.locale)
+
+ # In addition to translating the message, we also need to translate
+ # arguments that were passed to the log method that were not part
+ # of the main message e.g., log.info(_('Some message %s'), this_one))
+ record.args = _translate_args(record.args, self.locale)
+
+ self.target.emit(record)
diff --git a/oslo/utils/openstack/common/importutils.py b/oslo/utils/openstack/common/importutils.py
new file mode 100644
index 0000000..5b6d08f
--- /dev/null
+++ b/oslo/utils/openstack/common/importutils.py
@@ -0,0 +1,73 @@
+# Copyright 2011 OpenStack Foundation.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Import related utilities and helper functions.
+"""
+
+import sys
+import traceback
+
+
+def import_class(import_str):
+ """Returns a class from a string including module and class."""
+ mod_str, _sep, class_str = import_str.rpartition('.')
+ __import__(mod_str)
+ try:
+ return getattr(sys.modules[mod_str], class_str)
+ except AttributeError:
+ raise ImportError('Class %s cannot be found (%s)' %
+ (class_str,
+ traceback.format_exception(*sys.exc_info())))
+
+
+def import_object(import_str, *args, **kwargs):
+ """Import a class and return an instance of it."""
+ return import_class(import_str)(*args, **kwargs)
+
+
+def import_object_ns(name_space, import_str, *args, **kwargs):
+ """Tries to import object from default namespace.
+
+ Imports a class and return an instance of it, first by trying
+ to find the class in a default namespace, then failing back to
+ a full path if not found in the default namespace.
+ """
+ import_value = "%s.%s" % (name_space, import_str)
+ try:
+ return import_class(import_value)(*args, **kwargs)
+ except ImportError:
+ return import_class(import_str)(*args, **kwargs)
+
+
+def import_module(import_str):
+ """Import a module."""
+ __import__(import_str)
+ return sys.modules[import_str]
+
+
+def import_versioned_module(version, submodule=None):
+ module = 'oslo.utils.v%s' % version
+ if submodule:
+ module = '.'.join((module, submodule))
+ return import_module(module)
+
+
+def try_import(import_str, default=None):
+ """Try to import a module and if it fails return default."""
+ try:
+ return import_module(import_str)
+ except ImportError:
+ return default
diff --git a/oslo/utils/openstack/common/jsonutils.py b/oslo/utils/openstack/common/jsonutils.py
new file mode 100644
index 0000000..cbd08b3
--- /dev/null
+++ b/oslo/utils/openstack/common/jsonutils.py
@@ -0,0 +1,187 @@
+# Copyright 2010 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# Copyright 2011 Justin Santa Barbara
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+'''
+JSON related utilities.
+
+This module provides a few things:
+
+ 1) A handy function for getting an object down to something that can be
+ JSON serialized. See to_primitive().
+
+ 2) Wrappers around loads() and dumps(). The dumps() wrapper will
+ automatically use to_primitive() for you if needed.
+
+ 3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
+ is available.
+'''
+
+
+import codecs
+import datetime
+import functools
+import inspect
+import itertools
+import sys
+
+if sys.version_info < (2, 7):
+ # On Python <= 2.6, json module is not C boosted, so try to use
+ # simplejson module if available
+ try:
+ import simplejson as json
+ except ImportError:
+ import json
+else:
+ import json
+
+import six
+import six.moves.xmlrpc_client as xmlrpclib
+
+from oslo.utils.openstack.common import gettextutils
+
+from oslo.utils import importutils
+from oslo.utils import strutils
+from oslo.utils import timeutils
+
+netaddr = importutils.try_import("netaddr")
+
+_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
+ inspect.isfunction, inspect.isgeneratorfunction,
+ inspect.isgenerator, inspect.istraceback, inspect.isframe,
+ inspect.iscode, inspect.isbuiltin, inspect.isroutine,
+ inspect.isabstract]
+
+_simple_types = (six.string_types + six.integer_types
+ + (type(None), bool, float))
+
+
+def to_primitive(value, convert_instances=False, convert_datetime=True,
+ level=0, max_depth=3):
+ """Convert a complex object into primitives.
+
+ Handy for JSON serialization. We can optionally handle instances,
+ but since this is a recursive function, we could have cyclical
+ data structures.
+
+ To handle cyclical data structures we could track the actual objects
+ visited in a set, but not all objects are hashable. Instead we just
+ track the depth of the object inspections and don't go too deep.
+
+ Therefore, convert_instances=True is lossy ... be aware.
+
+ """
+ # handle obvious types first - order of basic types determined by running
+ # full tests on nova project, resulting in the following counts:
+ # 572754 <type 'NoneType'>
+ # 460353 <type 'int'>
+ # 379632 <type 'unicode'>
+ # 274610 <type 'str'>
+ # 199918 <type 'dict'>
+ # 114200 <type 'datetime.datetime'>
+ # 51817 <type 'bool'>
+ # 26164 <type 'list'>
+ # 6491 <type 'float'>
+ # 283 <type 'tuple'>
+ # 19 <type 'long'>
+ if isinstance(value, _simple_types):
+ return value
+
+ if isinstance(value, datetime.datetime):
+ if convert_datetime:
+ return timeutils.strtime(value)
+ else:
+ return value
+
+ # value of itertools.count doesn't get caught by nasty_type_tests
+ # and results in infinite loop when list(value) is called.
+ if type(value) == itertools.count:
+ return six.text_type(value)
+
+ # FIXME(vish): Workaround for LP bug 852095. Without this workaround,
+ # tests that raise an exception in a mocked method that
+ # has a @wrap_exception with a notifier will fail. If
+ # we up the dependency to 0.5.4 (when it is released) we
+ # can remove this workaround.
+ if getattr(value, '__module__', None) == 'mox':
+ return 'mock'
+
+ if level > max_depth:
+ return '?'
+
+ # The try block may not be necessary after the class check above,
+ # but just in case ...
+ try:
+ recursive = functools.partial(to_primitive,
+ convert_instances=convert_instances,
+ convert_datetime=convert_datetime,
+ level=level,
+ max_depth=max_depth)
+ if isinstance(value, dict):
+ return dict((k, recursive(v)) for k, v in six.iteritems(value))
+ elif isinstance(value, (list, tuple)):
+ return [recursive(lv) for lv in value]
+
+ # It's not clear why xmlrpclib created their own DateTime type, but
+ # for our purposes, make it a datetime type which is explicitly
+ # handled
+ if isinstance(value, xmlrpclib.DateTime):
+ value = datetime.datetime(*tuple(value.timetuple())[:6])
+
+ if convert_datetime and isinstance(value, datetime.datetime):
+ return timeutils.strtime(value)
+ elif isinstance(value, gettextutils.Message):
+ return value.data
+ elif hasattr(value, 'iteritems'):
+ return recursive(dict(value.iteritems()), level=level + 1)
+ elif hasattr(value, '__iter__'):
+ return recursive(list(value))
+ elif convert_instances and hasattr(value, '__dict__'):
+ # Likely an instance of something. Watch for cycles.
+ # Ignore class member vars.
+ return recursive(value.__dict__, level=level + 1)
+ elif netaddr and isinstance(value, netaddr.IPAddress):
+ return six.text_type(value)
+ else:
+ if any(test(value) for test in _nasty_type_tests):
+ return six.text_type(value)
+ return value
+ except TypeError:
+ # Class objects are tricky since they may define something like
+ # __iter__ defined but it isn't callable as list().
+ return six.text_type(value)
+
+
+def dumps(value, default=to_primitive, **kwargs):
+ return json.dumps(value, default=default, **kwargs)
+
+
+def loads(s, encoding='utf-8', **kwargs):
+ return json.loads(strutils.safe_decode(s, encoding), **kwargs)
+
+
+def load(fp, encoding='utf-8', **kwargs):
+ return json.load(codecs.getreader(encoding)(fp), **kwargs)
+
+
+try:
+ import anyjson
+except ImportError:
+ pass
+else:
+ anyjson._modules.append((__name__, 'dumps', TypeError,
+ 'loads', ValueError, 'load'))
+ anyjson.force_implementation(__name__)
diff --git a/oslo/utils/openstack/common/local.py b/oslo/utils/openstack/common/local.py
new file mode 100644
index 0000000..0819d5b
--- /dev/null
+++ b/oslo/utils/openstack/common/local.py
@@ -0,0 +1,45 @@
+# Copyright 2011 OpenStack Foundation.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Local storage of variables using weak references"""
+
+import threading
+import weakref
+
+
+class WeakLocal(threading.local):
+ def __getattribute__(self, attr):
+ rval = super(WeakLocal, self).__getattribute__(attr)
+ if rval:
+ # NOTE(mikal): this bit is confusing. What is stored is a weak
+ # reference, not the value itself. We therefore need to lookup
+ # the weak reference and return the inner value here.
+ rval = rval()
+ return rval
+
+ def __setattr__(self, attr, value):
+ value = weakref.ref(value)
+ return super(WeakLocal, self).__setattr__(attr, value)
+
+
+# NOTE(mikal): the name "store" should be deprecated in the future
+store = WeakLocal()
+
+# A "weak" store uses weak references and allows an object to fall out of scope
+# when it falls out of scope in the code that uses the thread local storage. A
+# "strong" store will hold a reference to the object so that it never falls out
+# of scope.
+weak_store = WeakLocal()
+strong_store = threading.local()
diff --git a/oslo/utils/openstack/common/log.py b/oslo/utils/openstack/common/log.py
new file mode 100644
index 0000000..6c6af3c
--- /dev/null
+++ b/oslo/utils/openstack/common/log.py
@@ -0,0 +1,728 @@
+# Copyright 2011 OpenStack Foundation.
+# Copyright 2010 United States Government as represented by the
+# Administrator of the National Aeronautics and Space Administration.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""OpenStack logging handler.
+
+This module adds to logging functionality by adding the option to specify
+a context object when calling the various log methods. If the context object
+is not specified, default formatting is used. Additionally, an instance uuid
+may be passed as part of the log message, which is intended to make it easier
+for admins to find messages related to a specific instance.
+
+It also allows setting of formatting information through conf.
+
+"""
+
+import inspect
+import itertools
+import logging
+import logging.config
+import logging.handlers
+import os
+import re
+import sys
+import traceback
+
+from oslo.config import cfg
+import six
+from six import moves
+
+from oslo.utils.openstack.common.gettextutils import _
+from oslo.utils.openstack.common import jsonutils
+from oslo.utils.openstack.common import local
+
+from oslo.utils import importutils
+
+_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
+
+_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password']
+
+# NOTE(ldbragst): Let's build a list of regex objects using the list of
+# _SANITIZE_KEYS we already have. This way, we only have to add the new key
+# to the list of _SANITIZE_KEYS and we can generate regular expressions
+# for XML and JSON automatically.
+_SANITIZE_PATTERNS = []
+_FORMAT_PATTERNS = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])',
+ r'(<%(key)s>).*?(</%(key)s>)',
+ r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])',
+ r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])',
+ r'([\'"].*?%(key)s[\'"]\s*,\s*\'--?[A-z]+\'\s*,\s*u?[\'"])'
+ '.*?([\'"])',
+ r'(%(key)s\s*--?[A-z]+\s*).*?([\s])']
+
+for key in _SANITIZE_KEYS:
+ for pattern in _FORMAT_PATTERNS:
+ reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
+ _SANITIZE_PATTERNS.append(reg_ex)
+
+
+common_cli_opts = [
+ cfg.BoolOpt('debug',
+ short='d',
+ default=False,
+ help='Print debugging output (set logging level to '
+ 'DEBUG instead of default WARNING level).'),
+ cfg.BoolOpt('verbose',
+ short='v',
+ default=False,
+ help='Print more verbose output (set logging level to '
+ 'INFO instead of default WARNING level).'),
+]
+
+logging_cli_opts = [
+ cfg.StrOpt('log-config-append',
+ metavar='PATH',
+ deprecated_name='log-config',
+ help='The name of a logging configuration file. This file '
+ 'is appended to any existing logging configuration '
+ 'files. For details about logging configuration files, '
+ 'see the Python logging module documentation.'),
+ cfg.StrOpt('log-format',
+ metavar='FORMAT',
+ help='DEPRECATED. '
+ 'A logging.Formatter log message format string which may '
+ 'use any of the available logging.LogRecord attributes. '
+ 'This option is deprecated. Please use '
+ 'logging_context_format_string and '
+ 'logging_default_format_string instead.'),
+ cfg.StrOpt('log-date-format',
+ default=_DEFAULT_LOG_DATE_FORMAT,
+ metavar='DATE_FORMAT',
+ help='Format string for %%(asctime)s in log records. '
+ 'Default: %(default)s .'),
+ cfg.StrOpt('log-file',
+ metavar='PATH',
+ deprecated_name='logfile',
+ help='(Optional) Name of log file to output to. '
+ 'If no default is set, logging will go to stdout.'),
+ cfg.StrOpt('log-dir',
+ deprecated_name='logdir',
+ help='(Optional) The base directory used for relative '
+ '--log-file paths.'),
+ cfg.BoolOpt('use-syslog',
+ default=False,
+ help='Use syslog for logging. '
+ 'Existing syslog format is DEPRECATED during I, '
+ 'and will change in J to honor RFC5424.'),
+ cfg.BoolOpt('use-syslog-rfc-format',
+ # TODO(bogdando) remove or use True after existing
+ # syslog format deprecation in J
+ default=False,
+ help='(Optional) Enables or disables syslog rfc5424 format '
+ 'for logging. If enabled, prefixes the MSG part of the '
+ 'syslog message with APP-NAME (RFC5424). The '
+ 'format without the APP-NAME is deprecated in I, '
+ 'and will be removed in J.'),
+ cfg.StrOpt('syslog-log-facility',
+ default='LOG_USER',
+ help='Syslog facility to receive log lines.')
+]
+
+generic_log_opts = [
+ cfg.BoolOpt('use_stderr',
+ default=True,
+ help='Log output to standard error.')
+]
+
+log_opts = [
+ cfg.StrOpt('logging_context_format_string',
+ default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
+ '%(name)s [%(request_id)s %(user_identity)s] '
+ '%(instance)s%(message)s',
+ help='Format string to use for log messages with context.'),
+ cfg.StrOpt('logging_default_format_string',
+ default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
+ '%(name)s [-] %(instance)s%(message)s',
+ help='Format string to use for log messages without context.'),
+ cfg.StrOpt('logging_debug_format_suffix',
+ default='%(funcName)s %(pathname)s:%(lineno)d',
+ help='Data to append to log format when level is DEBUG.'),
+ cfg.StrOpt('logging_exception_prefix',
+ default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
+ '%(instance)s',
+ help='Prefix each line of exception output with this format.'),
+ cfg.ListOpt('default_log_levels',
+ default=[
+ 'amqp=WARN',
+ 'amqplib=WARN',
+ 'boto=WARN',
+ 'qpid=WARN',
+ 'sqlalchemy=WARN',
+ 'suds=INFO',
+ 'oslo.messaging=INFO',
+ 'iso8601=WARN',
+ 'requests.packages.urllib3.connectionpool=WARN'
+ ],
+ help='List of logger=LEVEL pairs.'),
+ cfg.BoolOpt('publish_errors',
+ default=False,
+ help='Enables or disables publication of error events.'),
+ cfg.BoolOpt('fatal_deprecations',
+ default=False,
+ help='Enables or disables fatal status of deprecations.'),
+
+ # NOTE(mikal): there are two options here because sometimes we are handed
+ # a full instance (and could include more information), and other times we
+ # are just handed a UUID for the instance.
+ cfg.StrOpt('instance_format',
+ default='[instance: %(uuid)s] ',
+ help='The format for an instance that is passed with the log '
+ 'message. '),
+ cfg.StrOpt('instance_uuid_format',
+ default='[instance: %(uuid)s] ',
+ help='The format for an instance UUID that is passed with the '
+ 'log message. '),
+]
+
+CONF = cfg.CONF
+CONF.register_cli_opts(common_cli_opts)
+CONF.register_cli_opts(logging_cli_opts)
+CONF.register_opts(generic_log_opts)
+CONF.register_opts(log_opts)
+
+# our new audit level
+# NOTE(jkoelker) Since we synthesized an audit level, make the logging
+# module aware of it so it acts like other levels.
+logging.AUDIT = logging.INFO + 1
+logging.addLevelName(logging.AUDIT, 'AUDIT')
+
+
+try:
+ NullHandler = logging.NullHandler
+except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
+ class NullHandler(logging.Handler):
+ def handle(self, record):
+ pass
+
+ def emit(self, record):
+ pass
+
+ def createLock(self):
+ self.lock = None
+
+
+def _dictify_context(context):
+ if context is None:
+ return None
+ if not isinstance(context, dict) and getattr(context, 'to_dict', None):
+ context = context.to_dict()
+ return context
+
+
+def _get_binary_name():
+ return os.path.basename(inspect.stack()[-1][1])
+
+
+def _get_log_file_path(binary=None):
+ logfile = CONF.log_file
+ logdir = CONF.log_dir
+
+ if logfile and not logdir:
+ return logfile
+
+ if logfile and logdir:
+ return os.path.join(logdir, logfile)
+
+ if logdir:
+ binary = binary or _get_binary_name()
+ return '%s.log' % (os.path.join(logdir, binary),)
+
+ return None
+
+
+def mask_password(message, secret="***"):
+ """Replace password with 'secret' in message.
+
+ :param message: The string which includes security information.
+ :param secret: value with which to replace passwords.
+ :returns: The unicode value of message with the password fields masked.
+
+ For example:
+
+ >>> mask_password("'adminPass' : 'aaaaa'")
+ "'adminPass' : '***'"
+ >>> mask_password("'admin_pass' : 'aaaaa'")
+ "'admin_pass' : '***'"
+ >>> mask_password('"password" : "aaaaa"')
+ '"password" : "***"'
+ >>> mask_password("'original_password' : 'aaaaa'")
+ "'original_password' : '***'"
+ >>> mask_password("u'original_password' : u'aaaaa'")
+ "u'original_password' : u'***'"
+ """
+ message = six.text_type(message)
+
+ # NOTE(ldbragst): Check to see if anything in message contains any key
+ # specified in _SANITIZE_KEYS, if not then just return the message since
+ # we don't have to mask any passwords.
+ if not any(key in message for key in _SANITIZE_KEYS):
+ return message
+
+ secret = r'\g<1>' + secret + r'\g<2>'
+ for pattern in _SANITIZE_PATTERNS:
+ message = re.sub(pattern, secret, message)
+ return message
+
+
+class BaseLoggerAdapter(logging.LoggerAdapter):
+
+ def audit(self, msg, *args, **kwargs):
+ self.log(logging.AUDIT, msg, *args, **kwargs)
+
+
+class LazyAdapter(BaseLoggerAdapter):
+ def __init__(self, name='unknown', version='unknown'):
+ self._logger = None
+ self.extra = {}
+ self.name = name
+ self.version = version
+
+ @property
+ def logger(self):
+ if not self._logger:
+ self._logger = getLogger(self.name, self.version)
+ return self._logger
+
+
+class ContextAdapter(BaseLoggerAdapter):
+ warn = logging.LoggerAdapter.warning
+
+ def __init__(self, logger, project_name, version_string):
+ self.logger = logger
+ self.project = project_name
+ self.version = version_string
+ self._deprecated_messages_sent = dict()
+
+ @property
+ def handlers(self):
+ return self.logger.handlers
+
+ def deprecated(self, msg, *args, **kwargs):
+ """Call this method when a deprecated feature is used.
+
+ If the system is configured for fatal deprecations then the message
+ is logged at the 'critical' level and :class:`DeprecatedConfig` will
+ be raised.
+
+ Otherwise, the message will be logged (once) at the 'warn' level.
+
+ :raises: :class:`DeprecatedConfig` if the system is configured for
+ fatal deprecations.
+
+ """
+ stdmsg = _("Deprecated: %s") % msg
+ if CONF.fatal_deprecations:
+ self.critical(stdmsg, *args, **kwargs)
+ raise DeprecatedConfig(msg=stdmsg)
+
+ # Using a list because a tuple with dict can't be stored in a set.
+ sent_args = self._deprecated_messages_sent.setdefault(msg, list())
+
+ if args in sent_args:
+ # Already logged this message, so don't log it again.
+ return
+
+ sent_args.append(args)
+ self.warn(stdmsg, *args, **kwargs)
+
+ def process(self, msg, kwargs):
+ # NOTE(mrodden): catch any Message/other object and
+ # coerce to unicode before they can get
+ # to the python logging and possibly
+ # cause string encoding trouble
+ if not isinstance(msg, six.string_types):
+ msg = six.text_type(msg)
+
+ if 'extra' not in kwargs:
+ kwargs['extra'] = {}
+ extra = kwargs['extra']
+
+ context = kwargs.pop('context', None)
+ if not context:
+ context = getattr(local.store, 'context', None)
+ if context:
+ extra.update(_dictify_context(context))
+
+ instance = kwargs.pop('instance', None)
+ instance_uuid = (extra.get('instance_uuid') or
+ kwargs.pop('instance_uuid', None))
+ instance_extra = ''
+ if instance:
+ instance_extra = CONF.instance_format % instance
+ elif instance_uuid:
+ instance_extra = (CONF.instance_uuid_format
+ % {'uuid': instance_uuid})
+ extra['instance'] = instance_extra
+
+ extra.setdefault('user_identity', kwargs.pop('user_identity', None))
+
+ extra['project'] = self.project
+ extra['version'] = self.version
+ extra['extra'] = extra.copy()
+ return msg, kwargs
+
+
+class JSONFormatter(logging.Formatter):
+ def __init__(self, fmt=None, datefmt=None):
+ # NOTE(jkoelker) we ignore the fmt argument, but its still there
+ # since logging.config.fileConfig passes it.
+ self.datefmt = datefmt
+
+ def formatException(self, ei, strip_newlines=True):
+ lines = traceback.format_exception(*ei)
+ if strip_newlines:
+ lines = [moves.filter(
+ lambda x: x,
+ line.rstrip().splitlines()) for line in lines]
+ lines = list(itertools.chain(*lines))
+ return lines
+
+ def format(self, record):
+ message = {'message': record.getMessage(),
+ 'asctime': self.formatTime(record, self.datefmt),
+ 'name': record.name,
+ 'msg': record.msg,
+ 'args': record.args,
+ 'levelname': record.levelname,
+ 'levelno': record.levelno,
+ 'pathname': record.pathname,
+ 'filename': record.filename,
+ 'module': record.module,
+ 'lineno': record.lineno,
+ 'funcname': record.funcName,
+ 'created': record.created,
+ 'msecs': record.msecs,
+ 'relative_created': record.relativeCreated,
+ 'thread': record.thread,
+ 'thread_name': record.threadName,
+ 'process_name': record.processName,
+ 'process': record.process,
+ 'traceback': None}
+
+ if hasattr(record, 'extra'):
+ message['extra'] = record.extra
+
+ if record.exc_info:
+ message['traceback'] = self.formatException(record.exc_info)
+
+ return jsonutils.dumps(message)
+
+
+def _create_logging_excepthook(product_name):
+ def logging_excepthook(exc_type, value, tb):
+ extra = {'exc_info': (exc_type, value, tb)}
+ getLogger(product_name).critical(
+ "".join(traceback.format_exception_only(exc_type, value)),
+ **extra)
+ return logging_excepthook
+
+
+class LogConfigError(Exception):
+
+ message = _('Error loading logging config %(log_config)s: %(err_msg)s')
+
+ def __init__(self, log_config, err_msg):
+ self.log_config = log_config
+ self.err_msg = err_msg
+
+ def __str__(self):
+ return self.message % dict(log_config=self.log_config,
+ err_msg=self.err_msg)
+
+
+def _load_log_config(log_config_append):
+ try:
+ logging.config.fileConfig(log_config_append,
+ disable_existing_loggers=False)
+ except moves.configparser.Error as exc:
+ raise LogConfigError(log_config_append, six.text_type(exc))
+
+
+def setup(product_name, version='unknown'):
+ """Setup logging."""
+ if CONF.log_config_append:
+ _load_log_config(CONF.log_config_append)
+ else:
+ _setup_logging_from_conf(product_name, version)
+ sys.excepthook = _create_logging_excepthook(product_name)
+
+
+def set_defaults(logging_context_format_string):
+ cfg.set_defaults(log_opts,
+ logging_context_format_string=
+ logging_context_format_string)
+
+
+def _find_facility_from_conf():
+ facility_names = logging.handlers.SysLogHandler.facility_names
+ facility = getattr(logging.handlers.SysLogHandler,
+ CONF.syslog_log_facility,
+ None)
+
+ if facility is None and CONF.syslog_log_facility in facility_names:
+ facility = facility_names.get(CONF.syslog_log_facility)
+
+ if facility is None:
+ valid_facilities = facility_names.keys()
+ consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
+ 'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
+ 'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
+ 'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
+ 'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
+ valid_facilities.extend(consts)
+ raise TypeError(_('syslog facility must be one of: %s') %
+ ', '.join("'%s'" % fac
+ for fac in valid_facilities))
+
+ return facility
+
+
+class RFCSysLogHandler(logging.handlers.SysLogHandler):
+ def __init__(self, *args, **kwargs):
+ self.binary_name = _get_binary_name()
+ # Do not use super() unless type(logging.handlers.SysLogHandler)
+ # is 'type' (Python 2.7).
+ # Use old style calls, if the type is 'classobj' (Python 2.6)
+ logging.handlers.SysLogHandler.__init__(self, *args, **kwargs)
+
+ def format(self, record):
+ # Do not use super() unless type(logging.handlers.SysLogHandler)
+ # is 'type' (Python 2.7).
+ # Use old style calls, if the type is 'classobj' (Python 2.6)
+ msg = logging.handlers.SysLogHandler.format(self, record)
+ msg = self.binary_name + ' ' + msg
+ return msg
+
+
+def _setup_logging_from_conf(project, version):
+ log_root = getLogger(None).logger
+ for handler in log_root.handlers:
+ log_root.removeHandler(handler)
+
+ if CONF.use_syslog:
+ facility = _find_facility_from_conf()
+ # TODO(bogdando) use the format provided by RFCSysLogHandler
+ # after existing syslog format deprecation in J
+ if CONF.use_syslog_rfc_format:
+ syslog = RFCSysLogHandler(address='/dev/log',
+ facility=facility)
+ else:
+ syslog = logging.handlers.SysLogHandler(address='/dev/log',
+ facility=facility)
+ log_root.addHandler(syslog)
+
+ logpath = _get_log_file_path()
+ if logpath:
+ filelog = logging.handlers.WatchedFileHandler(logpath)
+ log_root.addHandler(filelog)
+
+ if CONF.use_stderr:
+ streamlog = ColorHandler()
+ log_root.addHandler(streamlog)
+
+ elif not logpath:
+ # pass sys.stdout as a positional argument
+ # python2.6 calls the argument strm, in 2.7 it's stream
+ streamlog = logging.StreamHandler(sys.stdout)
+ log_root.addHandler(streamlog)
+
+ if CONF.publish_errors:
+ handler = importutils.import_object(
+ "oslo.utils.openstack.common.log_handler.PublishErrorsHandler",
+ logging.ERROR)
+ log_root.addHandler(handler)
+
+ datefmt = CONF.log_date_format
+ for handler in log_root.handlers:
+ # NOTE(alaski): CONF.log_format overrides everything currently. This
+ # should be deprecated in favor of context aware formatting.
+ if CONF.log_format:
+ handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
+ datefmt=datefmt))
+ log_root.info('Deprecated: log_format is now deprecated and will '
+ 'be removed in the next release')
+ else:
+ handler.setFormatter(ContextFormatter(project=project,
+ version=version,
+ datefmt=datefmt))
+
+ if CONF.debug:
+ log_root.setLevel(logging.DEBUG)
+ elif CONF.verbose:
+ log_root.setLevel(logging.INFO)
+ else:
+ log_root.setLevel(logging.WARNING)
+
+ for pair in CONF.default_log_levels:
+ mod, _sep, level_name = pair.partition('=')
+ logger = logging.getLogger(mod)
+ # NOTE(AAzza) in python2.6 Logger.setLevel doesn't convert string name
+ # to integer code.
+ if sys.version_info < (2, 7):
+ level = logging.getLevelName(level_name)
+ logger.setLevel(level)
+ else:
+ logger.setLevel(level_name)
+
+
+_loggers = {}
+
+
+def getLogger(name='unknown', version='unknown'):
+ if name not in _loggers:
+ _loggers[name] = ContextAdapter(logging.getLogger(name),
+ name,
+ version)
+ return _loggers[name]
+
+
+def getLazyLogger(name='unknown', version='unknown'):
+ """Returns lazy logger.
+
+ Creates a pass-through logger that does not create the real logger
+ until it is really needed and delegates all calls to the real logger
+ once it is created.
+ """
+ return LazyAdapter(name, version)
+
+
+class WritableLogger(object):
+ """A thin wrapper that responds to `write` and logs."""
+
+ def __init__(self, logger, level=logging.INFO):
+ self.logger = logger
+ self.level = level
+
+ def write(self, msg):
+ self.logger.log(self.level, msg.rstrip())
+
+
+class ContextFormatter(logging.Formatter):
+ """A context.RequestContext aware formatter configured through flags.
+
+ The flags used to set format strings are: logging_context_format_string
+ and logging_default_format_string. You can also specify
+ logging_debug_format_suffix to append extra formatting if the log level is
+ debug.
+
+ For information about what variables are available for the formatter see:
+ http://docs.python.org/library/logging.html#formatter
+
+ If available, uses the context value stored in TLS - local.store.context
+
+ """
+
+ def __init__(self, *args, **kwargs):
+ """Initialize ContextFormatter instance
+
+ Takes additional keyword arguments which can be used in the message
+ format string.
+
+ :keyword project: project name
+ :type project: string
+ :keyword version: project version
+ :type version: string
+
+ """
+
+ self.project = kwargs.pop('project', 'unknown')
+ self.version = kwargs.pop('version', 'unknown')
+
+ logging.Formatter.__init__(self, *args, **kwargs)
+
+ def format(self, record):
+ """Uses contextstring if request_id is set, otherwise default."""
+
+ # store project info
+ record.project = self.project
+ record.version = self.version
+
+ # store request info
+ context = getattr(local.store, 'context', None)
+ if context:
+ d = _dictify_context(context)
+ for k, v in d.items():
+ setattr(record, k, v)
+
+ # NOTE(sdague): default the fancier formatting params
+ # to an empty string so we don't throw an exception if
+ # they get used
+ for key in ('instance', 'color', 'user_identity'):
+ if key not in record.__dict__:
+ record.__dict__[key] = ''
+
+ if record.__dict__.get('request_id'):
+ fmt = CONF.logging_context_format_string
+ else:
+ fmt = CONF.logging_default_format_string
+
+ if (record.levelno == logging.DEBUG and
+ CONF.logging_debug_format_suffix):
+ fmt += " " + CONF.logging_debug_format_suffix
+
+ if sys.version_info < (3, 2):
+ self._fmt = fmt
+ else:
+ self._style = logging.PercentStyle(fmt)
+ self._fmt = self._style._fmt
+ # Cache this on the record, Logger will respect our formatted copy
+ if record.exc_info:
+ record.exc_text = self.formatException(record.exc_info, record)
+ return logging.Formatter.format(self, record)
+
+ def formatException(self, exc_info, record=None):
+ """Format exception output with CONF.logging_exception_prefix."""
+ if not record:
+ return logging.Formatter.formatException(self, exc_info)
+
+ stringbuffer = moves.StringIO()
+ traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
+ None, stringbuffer)
+ lines = stringbuffer.getvalue().split('\n')
+ stringbuffer.close()
+
+ if CONF.logging_exception_prefix.find('%(asctime)') != -1:
+ record.asctime = self.formatTime(record, self.datefmt)
+
+ formatted_lines = []
+ for line in lines:
+ pl = CONF.logging_exception_prefix % record.__dict__
+ fl = '%s%s' % (pl, line)
+ formatted_lines.append(fl)
+ return '\n'.join(formatted_lines)
+
+
+class ColorHandler(logging.StreamHandler):
+ LEVEL_COLORS = {
+ logging.DEBUG: '\033[00;32m', # GREEN
+ logging.INFO: '\033[00;36m', # CYAN
+ logging.AUDIT: '\033[01;36m', # BOLD CYAN
+ logging.WARN: '\033[01;33m', # BOLD YELLOW
+ logging.ERROR: '\033[01;31m', # BOLD RED
+ logging.CRITICAL: '\033[01;31m', # BOLD RED
+ }
+
+ def format(self, record):
+ record.color = self.LEVEL_COLORS[record.levelno]
+ return logging.StreamHandler.format(self, record)
+
+
+class DeprecatedConfig(Exception):
+ message = _("Fatal call to deprecated config: %(msg)s")
+
+ def __init__(self, msg):
+ super(Exception, self).__init__(self.message % dict(msg=msg))
diff --git a/oslo/utils/openstack/common/strutils.py b/oslo/utils/openstack/common/strutils.py
new file mode 100644
index 0000000..bcb9626
--- /dev/null
+++ b/oslo/utils/openstack/common/strutils.py
@@ -0,0 +1,239 @@
+# Copyright 2011 OpenStack Foundation.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+System-level utilities and helper functions.
+"""
+
+import math
+import re
+import sys
+import unicodedata
+
+import six
+
+from oslo.utils.openstack.common.gettextutils import _
+
+
+UNIT_PREFIX_EXPONENT = {
+ 'k': 1,
+ 'K': 1,
+ 'Ki': 1,
+ 'M': 2,
+ 'Mi': 2,
+ 'G': 3,
+ 'Gi': 3,
+ 'T': 4,
+ 'Ti': 4,
+}
+UNIT_SYSTEM_INFO = {
+ 'IEC': (1024, re.compile(r'(^[-+]?\d*\.?\d+)([KMGT]i?)?(b|bit|B)$')),
+ 'SI': (1000, re.compile(r'(^[-+]?\d*\.?\d+)([kMGT])?(b|bit|B)$')),
+}
+
+TRUE_STRINGS = ('1', 't', 'true', 'on', 'y', 'yes')
+FALSE_STRINGS = ('0', 'f', 'false', 'off', 'n', 'no')
+
+SLUGIFY_STRIP_RE = re.compile(r"[^\w\s-]")
+SLUGIFY_HYPHENATE_RE = re.compile(r"[-\s]+")
+
+
+def int_from_bool_as_string(subject):
+ """Interpret a string as a boolean and return either 1 or 0.
+
+ Any string value in:
+
+ ('True', 'true', 'On', 'on', '1')
+
+ is interpreted as a boolean True.
+
+ Useful for JSON-decoded stuff and config file parsing
+ """
+ return bool_from_string(subject) and 1 or 0
+
+
+def bool_from_string(subject, strict=False, default=False):
+ """Interpret a string as a boolean.
+
+ A case-insensitive match is performed such that strings matching 't',
+ 'true', 'on', 'y', 'yes', or '1' are considered True and, when
+ `strict=False`, anything else returns the value specified by 'default'.
+
+ Useful for JSON-decoded stuff and config file parsing.
+
+ If `strict=True`, unrecognized values, including None, will raise a
+ ValueError which is useful when parsing values passed in from an API call.
+ Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'.
+ """
+ if not isinstance(subject, six.string_types):
+ subject = six.text_type(subject)
+
+ lowered = subject.strip().lower()
+
+ if lowered in TRUE_STRINGS:
+ return True
+ elif lowered in FALSE_STRINGS:
+ return False
+ elif strict:
+ acceptable = ', '.join(
+ "'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS))
+ msg = _("Unrecognized value '%(val)s', acceptable values are:"
+ " %(acceptable)s") % {'val': subject,
+ 'acceptable': acceptable}
+ raise ValueError(msg)
+ else:
+ return default
+
+
+def safe_decode(text, incoming=None, errors='strict'):
+ """Decodes incoming text/bytes string using `incoming` if they're not
+ already unicode.
+
+ :param incoming: Text's current encoding
+ :param errors: Errors handling policy. See here for valid
+ values http://docs.python.org/2/library/codecs.html
+ :returns: text or a unicode `incoming` encoded
+ representation of it.
+ :raises TypeError: If text is not an instance of str
+ """
+ if not isinstance(text, (six.string_types, six.binary_type)):
+ raise TypeError("%s can't be decoded" % type(text))
+
+ if isinstance(text, six.text_type):
+ return text
+
+ if not incoming:
+ incoming = (sys.stdin.encoding or
+ sys.getdefaultencoding())
+
+ try:
+ return text.decode(incoming, errors)
+ except UnicodeDecodeError:
+ # Note(flaper87) If we get here, it means that
+ # sys.stdin.encoding / sys.getdefaultencoding
+ # didn't return a suitable encoding to decode
+ # text. This happens mostly when global LANG
+ # var is not set correctly and there's no
+ # default encoding. In this case, most likely
+ # python will use ASCII or ANSI encoders as
+ # default encodings but they won't be capable
+ # of decoding non-ASCII characters.
+ #
+ # Also, UTF-8 is being used since it's an ASCII
+ # extension.
+ return text.decode('utf-8', errors)
+
+
+def safe_encode(text, incoming=None,
+ encoding='utf-8', errors='strict'):
+ """Encodes incoming text/bytes string using `encoding`.
+
+ If incoming is not specified, text is expected to be encoded with
+ current python's default encoding. (`sys.getdefaultencoding`)
+
+ :param incoming: Text's current encoding
+ :param encoding: Expected encoding for text (Default UTF-8)
+ :param errors: Errors handling policy. See here for valid
+ values http://docs.python.org/2/library/codecs.html
+ :returns: text or a bytestring `encoding` encoded
+ representation of it.
+ :raises TypeError: If text is not an instance of str
+ """
+ if not isinstance(text, (six.string_types, six.binary_type)):
+ raise TypeError("%s can't be encoded" % type(text))
+
+ if not incoming:
+ incoming = (sys.stdin.encoding or
+ sys.getdefaultencoding())
+
+ if isinstance(text, six.text_type):
+ return text.encode(encoding, errors)
+ elif text and encoding != incoming:
+ # Decode text before encoding it with `encoding`
+ text = safe_decode(text, incoming, errors)
+ return text.encode(encoding, errors)
+ else:
+ return text
+
+
+def string_to_bytes(text, unit_system='IEC', return_int=False):
+ """Converts a string into an float representation of bytes.
+
+ The units supported for IEC ::
+
+ Kb(it), Kib(it), Mb(it), Mib(it), Gb(it), Gib(it), Tb(it), Tib(it)
+ KB, KiB, MB, MiB, GB, GiB, TB, TiB
+
+ The units supported for SI ::
+
+ kb(it), Mb(it), Gb(it), Tb(it)
+ kB, MB, GB, TB
+
+ Note that the SI unit system does not support capital letter 'K'
+
+ :param text: String input for bytes size conversion.
+ :param unit_system: Unit system for byte size conversion.
+ :param return_int: If True, returns integer representation of text
+ in bytes. (default: decimal)
+ :returns: Numerical representation of text in bytes.
+ :raises ValueError: If text has an invalid value.
+
+ """
+ try:
+ base, reg_ex = UNIT_SYSTEM_INFO[unit_system]
+ except KeyError:
+ msg = _('Invalid unit system: "%s"') % unit_system
+ raise ValueError(msg)
+ match = reg_ex.match(text)
+ if match:
+ magnitude = float(match.group(1))
+ unit_prefix = match.group(2)
+ if match.group(3) in ['b', 'bit']:
+ magnitude /= 8
+ else:
+ msg = _('Invalid string format: %s') % text
+ raise ValueError(msg)
+ if not unit_prefix:
+ res = magnitude
+ else:
+ res = magnitude * pow(base, UNIT_PREFIX_EXPONENT[unit_prefix])
+ if return_int:
+ return int(math.ceil(res))
+ return res
+
+
+def to_slug(value, incoming=None, errors="strict"):
+ """Normalize string.
+
+ Convert to lowercase, remove non-word characters, and convert spaces
+ to hyphens.
+
+ Inspired by Django's `slugify` filter.
+
+ :param value: Text to slugify
+ :param incoming: Text's current encoding
+ :param errors: Errors handling policy. See here for valid
+ values http://docs.python.org/2/library/codecs.html
+ :returns: slugified unicode representation of `value`
+ :raises TypeError: If text is not an instance of str
+ """
+ value = safe_decode(value, incoming, errors)
+ # NOTE(aababilov): no need to use safe_(encode|decode) here:
+ # encodings are always "ascii", error handling is always "ignore"
+ # and types are always known (first: unicode; second: str)
+ value = unicodedata.normalize("NFKD", value).encode(
+ "ascii", "ignore").decode("ascii")
+ value = SLUGIFY_STRIP_RE.sub("", value).strip().lower()
+ return SLUGIFY_HYPHENATE_RE.sub("-", value)
diff --git a/oslo/utils/openstack/common/timeutils.py b/oslo/utils/openstack/common/timeutils.py
new file mode 100644
index 0000000..c48da95
--- /dev/null
+++ b/oslo/utils/openstack/common/timeutils.py
@@ -0,0 +1,210 @@
+# Copyright 2011 OpenStack Foundation.
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Time related utilities and helper functions.
+"""
+
+import calendar
+import datetime
+import time
+
+import iso8601
+import six
+
+
+# ISO 8601 extended time format with microseconds
+_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f'
+_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
+PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND
+
+
+def isotime(at=None, subsecond=False):
+ """Stringify time in ISO 8601 format."""
+ if not at:
+ at = utcnow()
+ st = at.strftime(_ISO8601_TIME_FORMAT
+ if not subsecond
+ else _ISO8601_TIME_FORMAT_SUBSECOND)
+ tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
+ st += ('Z' if tz == 'UTC' else tz)
+ return st
+
+
+def parse_isotime(timestr):
+ """Parse time from ISO 8601 format."""
+ try:
+ return iso8601.parse_date(timestr)
+ except iso8601.ParseError as e:
+ raise ValueError(six.text_type(e))
+ except TypeError as e:
+ raise ValueError(six.text_type(e))
+
+
+def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
+ """Returns formatted utcnow."""
+ if not at:
+ at = utcnow()
+ return at.strftime(fmt)
+
+
+def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT):
+ """Turn a formatted time back into a datetime."""
+ return datetime.datetime.strptime(timestr, fmt)
+
+
+def normalize_time(timestamp):
+ """Normalize time in arbitrary timezone to UTC naive object."""
+ offset = timestamp.utcoffset()
+ if offset is None:
+ return timestamp
+ return timestamp.replace(tzinfo=None) - offset
+
+
+def is_older_than(before, seconds):
+ """Return True if before is older than seconds."""
+ if isinstance(before, six.string_types):
+ before = parse_strtime(before).replace(tzinfo=None)
+ else:
+ before = before.replace(tzinfo=None)
+
+ return utcnow() - before > datetime.timedelta(seconds=seconds)
+
+
+def is_newer_than(after, seconds):
+ """Return True if after is newer than seconds."""
+ if isinstance(after, six.string_types):
+ after = parse_strtime(after).replace(tzinfo=None)
+ else:
+ after = after.replace(tzinfo=None)
+
+ return after - utcnow() > datetime.timedelta(seconds=seconds)
+
+
+def utcnow_ts():
+ """Timestamp version of our utcnow function."""
+ if utcnow.override_time is None:
+ # NOTE(kgriffs): This is several times faster
+ # than going through calendar.timegm(...)
+ return int(time.time())
+
+ return calendar.timegm(utcnow().timetuple())
+
+
+def utcnow():
+ """Overridable version of utils.utcnow."""
+ if utcnow.override_time:
+ try:
+ return utcnow.override_time.pop(0)
+ except AttributeError:
+ return utcnow.override_time
+ return datetime.datetime.utcnow()
+
+
+def iso8601_from_timestamp(timestamp):
+ """Returns an iso8601 formatted date from timestamp."""
+ return isotime(datetime.datetime.utcfromtimestamp(timestamp))
+
+
+utcnow.override_time = None
+
+
+def set_time_override(override_time=None):
+ """Overrides utils.utcnow.
+
+ Make it return a constant time or a list thereof, one at a time.
+
+ :param override_time: datetime instance or list thereof. If not
+ given, defaults to the current UTC time.
+ """
+ utcnow.override_time = override_time or datetime.datetime.utcnow()
+
+
+def advance_time_delta(timedelta):
+ """Advance overridden time using a datetime.timedelta."""
+ assert utcnow.override_time is not None
+ try:
+ for dt in utcnow.override_time:
+ dt += timedelta
+ except TypeError:
+ utcnow.override_time += timedelta
+
+
+def advance_time_seconds(seconds):
+ """Advance overridden time by seconds."""
+ advance_time_delta(datetime.timedelta(0, seconds))
+
+
+def clear_time_override():
+ """Remove the overridden time."""
+ utcnow.override_time = None
+
+
+def marshall_now(now=None):
+ """Make an rpc-safe datetime with microseconds.
+
+ Note: tzinfo is stripped, but not required for relative times.
+ """
+ if not now:
+ now = utcnow()
+ return dict(day=now.day, month=now.month, year=now.year, hour=now.hour,
+ minute=now.minute, second=now.second,
+ microsecond=now.microsecond)
+
+
+def unmarshall_time(tyme):
+ """Unmarshall a datetime dict."""
+ return datetime.datetime(day=tyme['day'],
+ month=tyme['month'],
+ year=tyme['year'],
+ hour=tyme['hour'],
+ minute=tyme['minute'],
+ second=tyme['second'],
+ microsecond=tyme['microsecond'])
+
+
+def delta_seconds(before, after):
+ """Return the difference between two timing objects.
+
+ Compute the difference in seconds between two date, time, or
+ datetime objects (as a float, to microsecond resolution).
+ """
+ delta = after - before
+ return total_seconds(delta)
+
+
+def total_seconds(delta):
+ """Return the total seconds of datetime.timedelta object.
+
+ Compute total seconds of datetime.timedelta, datetime.timedelta
+ doesn't have method total_seconds in Python2.6, calculate it manually.
+ """
+ try:
+ return delta.total_seconds()
+ except AttributeError:
+ return ((delta.days * 24 * 3600) + delta.seconds +
+ float(delta.microseconds) / (10 ** 6))
+
+
+def is_soon(dt, window):
+ """Determines if time is going to happen in the next window seconds.
+
+ :param dt: the time
+ :param window: minimum seconds to remain to consider the time not soon
+
+ :return: True if expiration is within the given duration
+ """
+ soon = (utcnow() + datetime.timedelta(seconds=window))
+ return normalize_time(dt) <= soon
diff --git a/oslo/utils/openstack/common/units.py b/oslo/utils/openstack/common/units.py
new file mode 100644
index 0000000..84b518c
--- /dev/null
+++ b/oslo/utils/openstack/common/units.py
@@ -0,0 +1,38 @@
+# Copyright 2013 IBM Corp
+# All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""
+Unit constants
+"""
+
+#Binary unit constants.
+Ki = 1024
+Mi = 1024 ** 2
+Gi = 1024 ** 3
+Ti = 1024 ** 4
+Pi = 1024 ** 5
+Ei = 1024 ** 6
+Zi = 1024 ** 7
+Yi = 1024 ** 8
+
+#Decimal unit constants.
+k = 1000
+M = 1000 ** 2
+G = 1000 ** 3
+T = 1000 ** 4
+P = 1000 ** 5
+E = 1000 ** 6
+Z = 1000 ** 7
+Y = 1000 ** 8
diff --git a/oslo/utils/strutils.py b/oslo/utils/strutils.py
index b49184e..bcb9626 100644
--- a/oslo/utils/strutils.py
+++ b/oslo/utils/strutils.py
@@ -24,7 +24,7 @@ import unicodedata
import six
-from openstack.common.gettextutils import _
+from oslo.utils.openstack.common.gettextutils import _
UNIT_PREFIX_EXPONENT = {
diff --git a/requirements.txt b/requirements.txt
index dbb4dd1..f85b737 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1 +1,4 @@
-Babel>=0.9.6 \ No newline at end of file
+Babel>=0.9.6
+six>=1.6.0
+iso8601>=0.1.9
+oslo.config>=1.2.0
diff --git a/setup.cfg b/setup.cfg
index 2ac4f87..fb1e954 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,6 +1,6 @@
[metadata]
name = oslo.utils
-summary = oslo.utils library
+summary = Oslo Utility library
description-file =
README.rst
author = OpenStack
@@ -22,7 +22,6 @@ classifier =
[files]
packages =
oslo
- oslo.utils
namespace_packages =
oslo
@@ -46,4 +45,4 @@ input_file = oslo.utils/locale/oslo.utils.pot
[extract_messages]
keywords = _ gettext ngettext l_ lazy_gettext
mapping_file = babel.cfg
-output_file = oslo.utils/locale/oslo.utils.pot \ No newline at end of file
+output_file = oslo.utils/locale/oslo.utils.pot
diff --git a/setup.py b/setup.py
index 7eeb36b..70c2b3f 100755
--- a/setup.py
+++ b/setup.py
@@ -19,4 +19,4 @@ import setuptools
setuptools.setup(
setup_requires=['pbr'],
- pbr=True) \ No newline at end of file
+ pbr=True)
diff --git a/test-requirements.txt b/test-requirements.txt
index 41eb968..7b64565 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,3 +1,21 @@
-hacking>=0.5.6,<0.8
+hacking>=0.9.1,<0.10
+
+discover
+fixtures>=0.3.14
+python-subunit>=0.0.18
+testrepository>=0.0.18
+testscenarios>=0.4
+testtools>=0.9.34
+oslotest
+
+# when we can require tox>= 1.4, this can go into tox.ini:
+# [testenv:cover]
+# deps = {[testenv]deps} coverage
+coverage>=3.6
+
+# this is required for the docs build jobs
+sphinx>=1.2.1,<1.3
oslosphinx
-oslotest \ No newline at end of file
+
+# mocking framework
+mock>=1.0
diff --git a/tests/__init__.py b/tests/__init__.py
index f88664e..19f5e72 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -10,4 +10,4 @@
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
-# under the License. \ No newline at end of file
+# under the License.
diff --git a/tests/base.py b/tests/base.py
index f9a09a8..a3069ed 100644
--- a/tests/base.py
+++ b/tests/base.py
@@ -24,6 +24,7 @@ _TRUE_VALUES = ('true', '1', 'yes')
# FIXME(dhellmann) Update this to use oslo.test library
+
class TestCase(testtools.TestCase):
"""Test case base class for all unit tests."""
@@ -51,4 +52,4 @@ class TestCase(testtools.TestCase):
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
- self.log_fixture = self.useFixture(fixtures.FakeLogger()) \ No newline at end of file
+ self.log_fixture = self.useFixture(fixtures.FakeLogger())
diff --git a/tests/fake/__init__.py b/tests/fake/__init__.py
new file mode 100644
index 0000000..06cc944
--- /dev/null
+++ b/tests/fake/__init__.py
@@ -0,0 +1,23 @@
+# Copyright 2012 IBM Corp.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+class FakeDriver():
+ def __init__(self, first_arg=True):
+ self.first_arg = first_arg
+
+
+class FakeDriver2():
+ def __init__(self, first_arg):
+ self.first_arg = first_arg
diff --git a/tests/unit/test_excutils.py b/tests/test_excutils.py
index 53e7daf..b38e7ea 100644
--- a/tests/unit/test_excutils.py
+++ b/tests/test_excutils.py
@@ -19,7 +19,7 @@ import mock
from oslotest import base as test_base
from oslotest import moxstubout
-from openstack.common import excutils
+from oslo.utils import excutils
mox = moxstubout.mox
diff --git a/tests/unit/test_importutils.py b/tests/test_importutils.py
index 88cc735..579bf76 100644
--- a/tests/unit/test_importutils.py
+++ b/tests/test_importutils.py
@@ -18,7 +18,7 @@ import sys
from oslotest import base as test_base
-from openstack.common import importutils
+from oslo.utils import importutils
class ImportUtilsTest(test_base.BaseTestCase):
@@ -39,64 +39,64 @@ class ImportUtilsTest(test_base.BaseTestCase):
self.assertEqual(sys.modules['datetime'], dt)
def test_import_object_optional_arg_not_present(self):
- obj = importutils.import_object('tests.unit.fake.FakeDriver')
+ obj = importutils.import_object('tests.fake.FakeDriver')
self.assertEqual(obj.__class__.__name__, 'FakeDriver')
def test_import_object_optional_arg_present(self):
- obj = importutils.import_object('tests.unit.fake.FakeDriver',
+ obj = importutils.import_object('tests.fake.FakeDriver',
first_arg=False)
self.assertEqual(obj.__class__.__name__, 'FakeDriver')
def test_import_object_required_arg_not_present(self):
# arg 1 isn't optional here
self.assertRaises(TypeError, importutils.import_object,
- 'tests.unit.fake.FakeDriver2')
+ 'tests.fake.FakeDriver2')
def test_import_object_required_arg_present(self):
- obj = importutils.import_object('tests.unit.fake.FakeDriver2',
+ obj = importutils.import_object('tests.fake.FakeDriver2',
first_arg=False)
self.assertEqual(obj.__class__.__name__, 'FakeDriver2')
# namespace tests
def test_import_object_ns_optional_arg_not_present(self):
- obj = importutils.import_object_ns('tests.unit', 'fake.FakeDriver')
+ obj = importutils.import_object_ns('tests', 'fake.FakeDriver')
self.assertEqual(obj.__class__.__name__, 'FakeDriver')
def test_import_object_ns_optional_arg_present(self):
- obj = importutils.import_object_ns('tests.unit', 'fake.FakeDriver',
+ obj = importutils.import_object_ns('tests', 'fake.FakeDriver',
first_arg=False)
self.assertEqual(obj.__class__.__name__, 'FakeDriver')
def test_import_object_ns_required_arg_not_present(self):
# arg 1 isn't optional here
self.assertRaises(TypeError, importutils.import_object_ns,
- 'tests.unit', 'fake.FakeDriver2')
+ 'tests', 'fake.FakeDriver2')
def test_import_object_ns_required_arg_present(self):
- obj = importutils.import_object_ns('tests.unit', 'fake.FakeDriver2',
+ obj = importutils.import_object_ns('tests', 'fake.FakeDriver2',
first_arg=False)
self.assertEqual(obj.__class__.__name__, 'FakeDriver2')
# namespace tests
def test_import_object_ns_full_optional_arg_not_present(self):
- obj = importutils.import_object_ns('tests.unit2',
- 'tests.unit.fake.FakeDriver')
+ obj = importutils.import_object_ns('tests2',
+ 'tests.fake.FakeDriver')
self.assertEqual(obj.__class__.__name__, 'FakeDriver')
def test_import_object_ns_full_optional_arg_present(self):
- obj = importutils.import_object_ns('tests.unit2',
- 'tests.unit.fake.FakeDriver',
+ obj = importutils.import_object_ns('tests2',
+ 'tests.fake.FakeDriver',
first_arg=False)
self.assertEqual(obj.__class__.__name__, 'FakeDriver')
def test_import_object_ns_full_required_arg_not_present(self):
# arg 1 isn't optional here
self.assertRaises(TypeError, importutils.import_object_ns,
- 'tests.unit2', 'tests.unit.fake.FakeDriver2')
+ 'tests2', 'tests.fake.FakeDriver2')
def test_import_object_ns_full_required_arg_present(self):
- obj = importutils.import_object_ns('tests.unit2',
- 'tests.unit.fake.FakeDriver2',
+ obj = importutils.import_object_ns('tests2',
+ 'tests.fake.FakeDriver2',
first_arg=False)
self.assertEqual(obj.__class__.__name__, 'FakeDriver2')
diff --git a/tests/unit/test_network_utils.py b/tests/test_network_utils.py
index 5865396..5e9d862 100644
--- a/tests/unit/test_network_utils.py
+++ b/tests/test_network_utils.py
@@ -18,7 +18,7 @@ import socket
import mock
from oslotest import base as test_base
-from openstack.common import network_utils
+from oslo.utils import network_utils
class NetworkUtilsTest(test_base.BaseTestCase):
@@ -107,11 +107,24 @@ class NetworkUtilsTest(test_base.BaseTestCase):
mock_sock = mock.Mock()
network_utils.set_tcp_keepalive(mock_sock, True, 100, 10, 5)
calls = [
- mock.call.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, True),
- mock.call.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 100),
- mock.call.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 10),
- mock.call.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 5)
+ mock.call.setsockopt(socket.SOL_SOCKET,
+ socket.SO_KEEPALIVE, True),
]
+ if hasattr(socket, 'TCP_KEEPIDLE'):
+ calls += [
+ mock.call.setsockopt(socket.IPPROTO_TCP,
+ socket.TCP_KEEPIDLE, 100)
+ ]
+ if hasattr(socket, 'TCP_KEEPINTVL'):
+ calls += [
+ mock.call.setsockopt(socket.IPPROTO_TCP,
+ socket.TCP_KEEPINTVL, 10),
+ ]
+ if hasattr(socket, 'TCP_KEEPCNT'):
+ calls += [
+ mock.call.setsockopt(socket.IPPROTO_TCP,
+ socket.TCP_KEEPCNT, 5)
+ ]
mock_sock.assert_has_calls(calls)
mock_sock.reset_mock()
diff --git a/tests/unit/test_strutils.py b/tests/test_strutils.py
index a97103c..0ee6bc6 100644
--- a/tests/unit/test_strutils.py
+++ b/tests/test_strutils.py
@@ -22,8 +22,8 @@ from oslotest import base as test_base
import six
import testscenarios
-from openstack.common import strutils
-from openstack.common import units
+from oslo.utils.openstack.common import units
+from oslo.utils import strutils
load_tests = testscenarios.load_tests_apply_scenarios
diff --git a/tests/unit/test_timeutils.py b/tests/test_timeutils.py
index d5aa5b1..fef980f 100644
--- a/tests/unit/test_timeutils.py
+++ b/tests/test_timeutils.py
@@ -22,7 +22,7 @@ import mock
from oslotest import base as test_base
from testtools import matchers
-from openstack.common import timeutils
+from oslo.utils import timeutils
class TimeUtilsTest(test_base.BaseTestCase):
diff --git a/tests/test_utils.py b/tests/test_utils.py
index 0f845bc..19c7d9c 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -25,4 +25,4 @@ from . import base
class TestUtils(base.TestCase):
def test_something(self):
- pass \ No newline at end of file
+ pass
diff --git a/tools/run_cross_tests.sh b/tools/run_cross_tests.sh
new file mode 100755
index 0000000..5e7bc11
--- /dev/null
+++ b/tools/run_cross_tests.sh
@@ -0,0 +1,91 @@
+#!/bin/bash
+#
+# Run cross-project tests
+#
+# Usage:
+#
+# run_cross_tests.sh project_dir venv
+
+# Fail the build if any command fails
+set -e
+
+project_dir="$1"
+venv="$2"
+
+if [ -z "$project_dir" -o -z "$venv" ]
+then
+ cat - <<EOF
+ERROR: Missing argument(s)
+
+Usage:
+
+ $0 PROJECT_DIR VIRTUAL_ENV
+
+Example, run the python 2.7 tests for python-neutronclient:
+
+ $0 /opt/stack/python-neutronclient py27
+
+EOF
+ exit 1
+fi
+
+# Set up the virtualenv without running the tests
+(cd $project_dir && tox --notest -e $venv)
+
+tox_envbin=$project_dir/.tox/$venv/bin
+
+our_name=$(python setup.py --name)
+
+# Replace the pip-installed package with the version in our source
+# tree. Look to see if we are already installed before trying to
+# uninstall ourselves, to avoid failures from packages that do not use us
+# yet.
+if $tox_envbin/pip freeze | grep -q $our_name
+then
+ $tox_envbin/pip uninstall -y $our_name
+fi
+$tox_envbin/pip install -U .
+
+# Run the tests
+(cd $project_dir && tox -e $venv)
+result=$?
+
+
+# The below checks are modified from
+# openstack-infra/config/modules/jenkins/files/slave_scripts/run-unittests.sh.
+
+# They expect to be run in the project being tested.
+cd $project_dir
+
+echo "Begin pip freeze output from test virtualenv:"
+echo "======================================================================"
+.tox/$venv/bin/pip freeze
+echo "======================================================================"
+
+# We only want to run the next check if the tool is installed, so look
+# for it before continuing.
+if [ -f /usr/local/jenkins/slave_scripts/subunit2html.py -a -d ".testrepository" ] ; then
+ if [ -f ".testrepository/0.2" ] ; then
+ cp .testrepository/0.2 ./subunit_log.txt
+ elif [ -f ".testrepository/0" ] ; then
+ .tox/$venv/bin/subunit-1to2 < .testrepository/0 > ./subunit_log.txt
+ fi
+ .tox/$venv/bin/python /usr/local/jenkins/slave_scripts/subunit2html.py ./subunit_log.txt testr_results.html
+ gzip -9 ./subunit_log.txt
+ gzip -9 ./testr_results.html
+
+ export PYTHON=.tox/$venv/bin/python
+ set -e
+ rancount=$(.tox/$venv/bin/testr last | sed -ne 's/Ran \([0-9]\+\).*tests in.*/\1/p')
+ if [ "$rancount" -eq "0" ] ; then
+ echo
+ echo "Zero tests were run. At least one test should have been run."
+ echo "Failing this test as a result"
+ echo
+ exit 1
+ fi
+fi
+
+# If we make it this far, report status based on the tests that were
+# run.
+exit $result
diff --git a/tox.ini b/tox.ini
index 83f4dac..fb30fb4 100644
--- a/tox.ini
+++ b/tox.ini
@@ -30,6 +30,6 @@ commands = python setup.py testr --coverage --testr-args='{posargs}'
# E123, E125 skipped as they are invalid PEP-8.
show-source = True
-ignore = E123,E125,H803
+ignore = E123,E125,H304,H405,H803
builtins = _
-exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build \ No newline at end of file
+exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build,__init__.py