summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJenkins <jenkins@review.openstack.org>2014-09-26 09:41:35 +0000
committerGerrit Code Review <review@openstack.org>2014-09-26 09:41:36 +0000
commit02884174d55aa49fbf221b120fc324153529b20f (patch)
tree7b66c1c8f268133e03558619167f9f087990d7a4
parentdbf117aaf5eaf59a8b5d3c0516add2b708ce3e73 (diff)
parentaaa51fd689db73f40088eba7a07ac8a131aad528 (diff)
downloadtaskflow-02884174d55aa49fbf221b120fc324153529b20f.tar.gz
Merge "Switch to using oslo.utils and oslo.serialization"
-rw-r--r--openstack-common.conf8
-rw-r--r--requirements-py2.txt7
-rw-r--r--requirements-py3.txt7
-rw-r--r--taskflow/engines/action_engine/engine.py3
-rw-r--r--taskflow/engines/helpers.py2
-rw-r--r--taskflow/engines/worker_based/executor.py3
-rw-r--r--taskflow/engines/worker_based/protocol.py2
-rw-r--r--taskflow/jobs/backends/impl_zookeeper.py4
-rw-r--r--taskflow/listeners/base.py2
-rw-r--r--taskflow/openstack/common/excutils.py113
-rw-r--r--taskflow/openstack/common/gettextutils.py479
-rw-r--r--taskflow/openstack/common/importutils.py73
-rw-r--r--taskflow/openstack/common/jsonutils.py202
-rw-r--r--taskflow/openstack/common/network_utils.py163
-rw-r--r--taskflow/openstack/common/strutils.py311
-rw-r--r--taskflow/openstack/common/timeutils.py210
-rw-r--r--taskflow/persistence/backends/impl_dir.py2
-rw-r--r--taskflow/persistence/backends/impl_sqlalchemy.py2
-rw-r--r--taskflow/persistence/backends/impl_zookeeper.py2
-rw-r--r--taskflow/persistence/backends/sqlalchemy/models.py4
-rw-r--r--taskflow/persistence/logbook.py2
-rw-r--r--taskflow/tests/unit/jobs/test_zk_job.py2
-rw-r--r--taskflow/tests/unit/test_engine_helpers.py4
-rw-r--r--taskflow/tests/unit/worker_based/test_executor.py2
-rw-r--r--taskflow/utils/misc.py6
-rw-r--r--taskflow/utils/persistence_utils.py3
-rw-r--r--taskflow/utils/reflection.py3
27 files changed, 33 insertions, 1588 deletions
diff --git a/openstack-common.conf b/openstack-common.conf
index 8940a04..9db6be0 100644
--- a/openstack-common.conf
+++ b/openstack-common.conf
@@ -1,16 +1,8 @@
[DEFAULT]
# The list of modules to copy from oslo-incubator.git
-module=excutils
-module=importutils
-module=jsonutils
-module=strutils
-module=timeutils
module=uuidutils
-module=network_utils
-
script=tools/run_cross_tests.sh
# The base module to hold the copy of openstack.common
base=taskflow
-
diff --git a/requirements-py2.txt b/requirements-py2.txt
index fae98b8..d4f85ae 100644
--- a/requirements-py2.txt
+++ b/requirements-py2.txt
@@ -3,18 +3,19 @@
# process, which may cause wedges in the gate later.
# Packages needed for using this library.
-anyjson>=0.3.3
-iso8601>=0.1.9
+
# Only needed on python 2.6
ordereddict
# Python 2->3 compatibility library.
six>=1.7.0
# Very nice graph library
networkx>=1.8
-Babel>=1.3
# Used for backend storage engine loading.
stevedore>=1.0.0 # Apache-2.0
# Backport for concurrent.futures which exists in 3.2+
futures>=2.1.6
# Used for structured input validation
jsonschema>=2.0.0,<3.0.0
+# For common utilities
+oslo.utils>=0.3.0
+oslo.serialization>=0.1.0
diff --git a/requirements-py3.txt b/requirements-py3.txt
index 12f78d4..59ee1d3 100644
--- a/requirements-py3.txt
+++ b/requirements-py3.txt
@@ -3,14 +3,15 @@
# process, which may cause wedges in the gate later.
# Packages needed for using this library.
-anyjson>=0.3.3
-iso8601>=0.1.9
+
# Python 2->3 compatibility library.
six>=1.7.0
# Very nice graph library
networkx>=1.8
-Babel>=1.3
# Used for backend storage engine loading.
stevedore>=1.0.0 # Apache-2.0
# Used for structured input validation
jsonschema>=2.0.0,<3.0.0
+# For common utilities
+oslo.utils>=0.3.0
+oslo.serialization>=0.1.0
diff --git a/taskflow/engines/action_engine/engine.py b/taskflow/engines/action_engine/engine.py
index a5f587f..9bf6242 100644
--- a/taskflow/engines/action_engine/engine.py
+++ b/taskflow/engines/action_engine/engine.py
@@ -17,12 +17,13 @@
import contextlib
import threading
+from oslo.utils import excutils
+
from taskflow.engines.action_engine import compiler
from taskflow.engines.action_engine import executor
from taskflow.engines.action_engine import runtime
from taskflow.engines import base
from taskflow import exceptions as exc
-from taskflow.openstack.common import excutils
from taskflow import retry
from taskflow import states
from taskflow import storage as atom_storage
diff --git a/taskflow/engines/helpers.py b/taskflow/engines/helpers.py
index c200df8..bfbaaa5 100644
--- a/taskflow/engines/helpers.py
+++ b/taskflow/engines/helpers.py
@@ -16,11 +16,11 @@
import contextlib
+from oslo.utils import importutils
import six
import stevedore.driver
from taskflow import exceptions as exc
-from taskflow.openstack.common import importutils
from taskflow.persistence import backends as p_backends
from taskflow.utils import misc
from taskflow.utils import persistence_utils as p_utils
diff --git a/taskflow/engines/worker_based/executor.py b/taskflow/engines/worker_based/executor.py
index 813612c..827db0e 100644
--- a/taskflow/engines/worker_based/executor.py
+++ b/taskflow/engines/worker_based/executor.py
@@ -18,12 +18,13 @@ import functools
import logging
import threading
+from oslo.utils import timeutils
+
from taskflow.engines.action_engine import executor
from taskflow.engines.worker_based import cache
from taskflow.engines.worker_based import protocol as pr
from taskflow.engines.worker_based import proxy
from taskflow import exceptions as exc
-from taskflow.openstack.common import timeutils
from taskflow.types import timing as tt
from taskflow.utils import async_utils
from taskflow.utils import misc
diff --git a/taskflow/engines/worker_based/protocol.py b/taskflow/engines/worker_based/protocol.py
index 6e54f9f..a97240a 100644
--- a/taskflow/engines/worker_based/protocol.py
+++ b/taskflow/engines/worker_based/protocol.py
@@ -21,11 +21,11 @@ import threading
from concurrent import futures
import jsonschema
from jsonschema import exceptions as schema_exc
+from oslo.utils import timeutils
import six
from taskflow.engines.action_engine import executor
from taskflow import exceptions as excp
-from taskflow.openstack.common import timeutils
from taskflow.types import timing as tt
from taskflow.utils import lock_utils
from taskflow.utils import misc
diff --git a/taskflow/jobs/backends/impl_zookeeper.py b/taskflow/jobs/backends/impl_zookeeper.py
index 4fc7b6e..cc6101c 100644
--- a/taskflow/jobs/backends/impl_zookeeper.py
+++ b/taskflow/jobs/backends/impl_zookeeper.py
@@ -24,13 +24,13 @@ from concurrent import futures
from kazoo import exceptions as k_exceptions
from kazoo.protocol import paths as k_paths
from kazoo.recipe import watchers
+from oslo.serialization import jsonutils
+from oslo.utils import excutils
import six
from taskflow import exceptions as excp
from taskflow.jobs import job as base_job
from taskflow.jobs import jobboard
-from taskflow.openstack.common import excutils
-from taskflow.openstack.common import jsonutils
from taskflow.openstack.common import uuidutils
from taskflow import states
from taskflow.types import timing as tt
diff --git a/taskflow/listeners/base.py b/taskflow/listeners/base.py
index 352b652..0b15cce 100644
--- a/taskflow/listeners/base.py
+++ b/taskflow/listeners/base.py
@@ -19,9 +19,9 @@ from __future__ import absolute_import
import abc
import logging
+from oslo.utils import excutils
import six
-from taskflow.openstack.common import excutils
from taskflow import states
from taskflow.utils import misc
diff --git a/taskflow/openstack/common/excutils.py b/taskflow/openstack/common/excutils.py
deleted file mode 100644
index 790fc0b..0000000
--- a/taskflow/openstack/common/excutils.py
+++ /dev/null
@@ -1,113 +0,0 @@
-# Copyright 2011 OpenStack Foundation.
-# Copyright 2012, Red Hat, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Exception related utilities.
-"""
-
-import logging
-import sys
-import time
-import traceback
-
-import six
-
-from taskflow.openstack.common.gettextutils import _LE
-
-
-class save_and_reraise_exception(object):
- """Save current exception, run some code and then re-raise.
-
- In some cases the exception context can be cleared, resulting in None
- being attempted to be re-raised after an exception handler is run. This
- can happen when eventlet switches greenthreads or when running an
- exception handler, code raises and catches an exception. In both
- cases the exception context will be cleared.
-
- To work around this, we save the exception state, run handler code, and
- then re-raise the original exception. If another exception occurs, the
- saved exception is logged and the new exception is re-raised.
-
- In some cases the caller may not want to re-raise the exception, and
- for those circumstances this context provides a reraise flag that
- can be used to suppress the exception. For example::
-
- except Exception:
- with save_and_reraise_exception() as ctxt:
- decide_if_need_reraise()
- if not should_be_reraised:
- ctxt.reraise = False
-
- If another exception occurs and reraise flag is False,
- the saved exception will not be logged.
-
- If the caller wants to raise new exception during exception handling
- he/she sets reraise to False initially with an ability to set it back to
- True if needed::
-
- except Exception:
- with save_and_reraise_exception(reraise=False) as ctxt:
- [if statements to determine whether to raise a new exception]
- # Not raising a new exception, so reraise
- ctxt.reraise = True
- """
- def __init__(self, reraise=True):
- self.reraise = reraise
-
- def __enter__(self):
- self.type_, self.value, self.tb, = sys.exc_info()
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- if exc_type is not None:
- if self.reraise:
- logging.error(_LE('Original exception being dropped: %s'),
- traceback.format_exception(self.type_,
- self.value,
- self.tb))
- return False
- if self.reraise:
- six.reraise(self.type_, self.value, self.tb)
-
-
-def forever_retry_uncaught_exceptions(infunc):
- def inner_func(*args, **kwargs):
- last_log_time = 0
- last_exc_message = None
- exc_count = 0
- while True:
- try:
- return infunc(*args, **kwargs)
- except Exception as exc:
- this_exc_message = six.u(str(exc))
- if this_exc_message == last_exc_message:
- exc_count += 1
- else:
- exc_count = 1
- # Do not log any more frequently than once a minute unless
- # the exception message changes
- cur_time = int(time.time())
- if (cur_time - last_log_time > 60 or
- this_exc_message != last_exc_message):
- logging.exception(
- _LE('Unexpected exception occurred %d time(s)... '
- 'retrying.') % exc_count)
- last_log_time = cur_time
- last_exc_message = this_exc_message
- exc_count = 0
- # This should be a very rare event. In case it isn't, do
- # a sleep.
- time.sleep(1)
- return inner_func
diff --git a/taskflow/openstack/common/gettextutils.py b/taskflow/openstack/common/gettextutils.py
deleted file mode 100644
index 20fc254..0000000
--- a/taskflow/openstack/common/gettextutils.py
+++ /dev/null
@@ -1,479 +0,0 @@
-# Copyright 2012 Red Hat, Inc.
-# Copyright 2013 IBM Corp.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-gettext for openstack-common modules.
-
-Usual usage in an openstack.common module:
-
- from taskflow.openstack.common.gettextutils import _
-"""
-
-import copy
-import gettext
-import locale
-from logging import handlers
-import os
-
-from babel import localedata
-import six
-
-_AVAILABLE_LANGUAGES = {}
-
-# FIXME(dhellmann): Remove this when moving to oslo.i18n.
-USE_LAZY = False
-
-
-class TranslatorFactory(object):
- """Create translator functions
- """
-
- def __init__(self, domain, localedir=None):
- """Establish a set of translation functions for the domain.
-
- :param domain: Name of translation domain,
- specifying a message catalog.
- :type domain: str
- :param lazy: Delays translation until a message is emitted.
- Defaults to False.
- :type lazy: Boolean
- :param localedir: Directory with translation catalogs.
- :type localedir: str
- """
- self.domain = domain
- if localedir is None:
- localedir = os.environ.get(domain.upper() + '_LOCALEDIR')
- self.localedir = localedir
-
- def _make_translation_func(self, domain=None):
- """Return a new translation function ready for use.
-
- Takes into account whether or not lazy translation is being
- done.
-
- The domain can be specified to override the default from the
- factory, but the localedir from the factory is always used
- because we assume the log-level translation catalogs are
- installed in the same directory as the main application
- catalog.
-
- """
- if domain is None:
- domain = self.domain
- t = gettext.translation(domain,
- localedir=self.localedir,
- fallback=True)
- # Use the appropriate method of the translation object based
- # on the python version.
- m = t.gettext if six.PY3 else t.ugettext
-
- def f(msg):
- """oslo.i18n.gettextutils translation function."""
- if USE_LAZY:
- return Message(msg, domain=domain)
- return m(msg)
- return f
-
- @property
- def primary(self):
- "The default translation function."
- return self._make_translation_func()
-
- def _make_log_translation_func(self, level):
- return self._make_translation_func(self.domain + '-log-' + level)
-
- @property
- def log_info(self):
- "Translate info-level log messages."
- return self._make_log_translation_func('info')
-
- @property
- def log_warning(self):
- "Translate warning-level log messages."
- return self._make_log_translation_func('warning')
-
- @property
- def log_error(self):
- "Translate error-level log messages."
- return self._make_log_translation_func('error')
-
- @property
- def log_critical(self):
- "Translate critical-level log messages."
- return self._make_log_translation_func('critical')
-
-
-# NOTE(dhellmann): When this module moves out of the incubator into
-# oslo.i18n, these global variables can be moved to an integration
-# module within each application.
-
-# Create the global translation functions.
-_translators = TranslatorFactory('taskflow')
-
-# The primary translation function using the well-known name "_"
-_ = _translators.primary
-
-# Translators for log levels.
-#
-# The abbreviated names are meant to reflect the usual use of a short
-# name like '_'. The "L" is for "log" and the other letter comes from
-# the level.
-_LI = _translators.log_info
-_LW = _translators.log_warning
-_LE = _translators.log_error
-_LC = _translators.log_critical
-
-# NOTE(dhellmann): End of globals that will move to the application's
-# integration module.
-
-
-def enable_lazy():
- """Convenience function for configuring _() to use lazy gettext
-
- Call this at the start of execution to enable the gettextutils._
- function to use lazy gettext functionality. This is useful if
- your project is importing _ directly instead of using the
- gettextutils.install() way of importing the _ function.
- """
- global USE_LAZY
- USE_LAZY = True
-
-
-def install(domain):
- """Install a _() function using the given translation domain.
-
- Given a translation domain, install a _() function using gettext's
- install() function.
-
- The main difference from gettext.install() is that we allow
- overriding the default localedir (e.g. /usr/share/locale) using
- a translation-domain-specific environment variable (e.g.
- NOVA_LOCALEDIR).
-
- Note that to enable lazy translation, enable_lazy must be
- called.
-
- :param domain: the translation domain
- """
- from six import moves
- tf = TranslatorFactory(domain)
- moves.builtins.__dict__['_'] = tf.primary
-
-
-class Message(six.text_type):
- """A Message object is a unicode object that can be translated.
-
- Translation of Message is done explicitly using the translate() method.
- For all non-translation intents and purposes, a Message is simply unicode,
- and can be treated as such.
- """
-
- def __new__(cls, msgid, msgtext=None, params=None,
- domain='taskflow', *args):
- """Create a new Message object.
-
- In order for translation to work gettext requires a message ID, this
- msgid will be used as the base unicode text. It is also possible
- for the msgid and the base unicode text to be different by passing
- the msgtext parameter.
- """
- # If the base msgtext is not given, we use the default translation
- # of the msgid (which is in English) just in case the system locale is
- # not English, so that the base text will be in that locale by default.
- if not msgtext:
- msgtext = Message._translate_msgid(msgid, domain)
- # We want to initialize the parent unicode with the actual object that
- # would have been plain unicode if 'Message' was not enabled.
- msg = super(Message, cls).__new__(cls, msgtext)
- msg.msgid = msgid
- msg.domain = domain
- msg.params = params
- return msg
-
- def translate(self, desired_locale=None):
- """Translate this message to the desired locale.
-
- :param desired_locale: The desired locale to translate the message to,
- if no locale is provided the message will be
- translated to the system's default locale.
-
- :returns: the translated message in unicode
- """
-
- translated_message = Message._translate_msgid(self.msgid,
- self.domain,
- desired_locale)
- if self.params is None:
- # No need for more translation
- return translated_message
-
- # This Message object may have been formatted with one or more
- # Message objects as substitution arguments, given either as a single
- # argument, part of a tuple, or as one or more values in a dictionary.
- # When translating this Message we need to translate those Messages too
- translated_params = _translate_args(self.params, desired_locale)
-
- translated_message = translated_message % translated_params
-
- return translated_message
-
- @staticmethod
- def _translate_msgid(msgid, domain, desired_locale=None):
- if not desired_locale:
- system_locale = locale.getdefaultlocale()
- # If the system locale is not available to the runtime use English
- if not system_locale[0]:
- desired_locale = 'en_US'
- else:
- desired_locale = system_locale[0]
-
- locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR')
- lang = gettext.translation(domain,
- localedir=locale_dir,
- languages=[desired_locale],
- fallback=True)
- if six.PY3:
- translator = lang.gettext
- else:
- translator = lang.ugettext
-
- translated_message = translator(msgid)
- return translated_message
-
- def __mod__(self, other):
- # When we mod a Message we want the actual operation to be performed
- # by the parent class (i.e. unicode()), the only thing we do here is
- # save the original msgid and the parameters in case of a translation
- params = self._sanitize_mod_params(other)
- unicode_mod = super(Message, self).__mod__(params)
- modded = Message(self.msgid,
- msgtext=unicode_mod,
- params=params,
- domain=self.domain)
- return modded
-
- def _sanitize_mod_params(self, other):
- """Sanitize the object being modded with this Message.
-
- - Add support for modding 'None' so translation supports it
- - Trim the modded object, which can be a large dictionary, to only
- those keys that would actually be used in a translation
- - Snapshot the object being modded, in case the message is
- translated, it will be used as it was when the Message was created
- """
- if other is None:
- params = (other,)
- elif isinstance(other, dict):
- # Merge the dictionaries
- # Copy each item in case one does not support deep copy.
- params = {}
- if isinstance(self.params, dict):
- for key, val in self.params.items():
- params[key] = self._copy_param(val)
- for key, val in other.items():
- params[key] = self._copy_param(val)
- else:
- params = self._copy_param(other)
- return params
-
- def _copy_param(self, param):
- try:
- return copy.deepcopy(param)
- except Exception:
- # Fallback to casting to unicode this will handle the
- # python code-like objects that can't be deep-copied
- return six.text_type(param)
-
- def __add__(self, other):
- msg = _('Message objects do not support addition.')
- raise TypeError(msg)
-
- def __radd__(self, other):
- return self.__add__(other)
-
- if six.PY2:
- def __str__(self):
- # NOTE(luisg): Logging in python 2.6 tries to str() log records,
- # and it expects specifically a UnicodeError in order to proceed.
- msg = _('Message objects do not support str() because they may '
- 'contain non-ascii characters. '
- 'Please use unicode() or translate() instead.')
- raise UnicodeError(msg)
-
-
-def get_available_languages(domain):
- """Lists the available languages for the given translation domain.
-
- :param domain: the domain to get languages for
- """
- if domain in _AVAILABLE_LANGUAGES:
- return copy.copy(_AVAILABLE_LANGUAGES[domain])
-
- localedir = '%s_LOCALEDIR' % domain.upper()
- find = lambda x: gettext.find(domain,
- localedir=os.environ.get(localedir),
- languages=[x])
-
- # NOTE(mrodden): en_US should always be available (and first in case
- # order matters) since our in-line message strings are en_US
- language_list = ['en_US']
- # NOTE(luisg): Babel <1.0 used a function called list(), which was
- # renamed to locale_identifiers() in >=1.0, the requirements master list
- # requires >=0.9.6, uncapped, so defensively work with both. We can remove
- # this check when the master list updates to >=1.0, and update all projects
- list_identifiers = (getattr(localedata, 'list', None) or
- getattr(localedata, 'locale_identifiers'))
- locale_identifiers = list_identifiers()
-
- for i in locale_identifiers:
- if find(i) is not None:
- language_list.append(i)
-
- # NOTE(luisg): Babel>=1.0,<1.3 has a bug where some OpenStack supported
- # locales (e.g. 'zh_CN', and 'zh_TW') aren't supported even though they
- # are perfectly legitimate locales:
- # https://github.com/mitsuhiko/babel/issues/37
- # In Babel 1.3 they fixed the bug and they support these locales, but
- # they are still not explicitly "listed" by locale_identifiers().
- # That is why we add the locales here explicitly if necessary so that
- # they are listed as supported.
- aliases = {'zh': 'zh_CN',
- 'zh_Hant_HK': 'zh_HK',
- 'zh_Hant': 'zh_TW',
- 'fil': 'tl_PH'}
- for (locale_, alias) in six.iteritems(aliases):
- if locale_ in language_list and alias not in language_list:
- language_list.append(alias)
-
- _AVAILABLE_LANGUAGES[domain] = language_list
- return copy.copy(language_list)
-
-
-def translate(obj, desired_locale=None):
- """Gets the translated unicode representation of the given object.
-
- If the object is not translatable it is returned as-is.
- If the locale is None the object is translated to the system locale.
-
- :param obj: the object to translate
- :param desired_locale: the locale to translate the message to, if None the
- default system locale will be used
- :returns: the translated object in unicode, or the original object if
- it could not be translated
- """
- message = obj
- if not isinstance(message, Message):
- # If the object to translate is not already translatable,
- # let's first get its unicode representation
- message = six.text_type(obj)
- if isinstance(message, Message):
- # Even after unicoding() we still need to check if we are
- # running with translatable unicode before translating
- return message.translate(desired_locale)
- return obj
-
-
-def _translate_args(args, desired_locale=None):
- """Translates all the translatable elements of the given arguments object.
-
- This method is used for translating the translatable values in method
- arguments which include values of tuples or dictionaries.
- If the object is not a tuple or a dictionary the object itself is
- translated if it is translatable.
-
- If the locale is None the object is translated to the system locale.
-
- :param args: the args to translate
- :param desired_locale: the locale to translate the args to, if None the
- default system locale will be used
- :returns: a new args object with the translated contents of the original
- """
- if isinstance(args, tuple):
- return tuple(translate(v, desired_locale) for v in args)
- if isinstance(args, dict):
- translated_dict = {}
- for (k, v) in six.iteritems(args):
- translated_v = translate(v, desired_locale)
- translated_dict[k] = translated_v
- return translated_dict
- return translate(args, desired_locale)
-
-
-class TranslationHandler(handlers.MemoryHandler):
- """Handler that translates records before logging them.
-
- The TranslationHandler takes a locale and a target logging.Handler object
- to forward LogRecord objects to after translating them. This handler
- depends on Message objects being logged, instead of regular strings.
-
- The handler can be configured declaratively in the logging.conf as follows:
-
- [handlers]
- keys = translatedlog, translator
-
- [handler_translatedlog]
- class = handlers.WatchedFileHandler
- args = ('/var/log/api-localized.log',)
- formatter = context
-
- [handler_translator]
- class = openstack.common.log.TranslationHandler
- target = translatedlog
- args = ('zh_CN',)
-
- If the specified locale is not available in the system, the handler will
- log in the default locale.
- """
-
- def __init__(self, locale=None, target=None):
- """Initialize a TranslationHandler
-
- :param locale: locale to use for translating messages
- :param target: logging.Handler object to forward
- LogRecord objects to after translation
- """
- # NOTE(luisg): In order to allow this handler to be a wrapper for
- # other handlers, such as a FileHandler, and still be able to
- # configure it using logging.conf, this handler has to extend
- # MemoryHandler because only the MemoryHandlers' logging.conf
- # parsing is implemented such that it accepts a target handler.
- handlers.MemoryHandler.__init__(self, capacity=0, target=target)
- self.locale = locale
-
- def setFormatter(self, fmt):
- self.target.setFormatter(fmt)
-
- def emit(self, record):
- # We save the message from the original record to restore it
- # after translation, so other handlers are not affected by this
- original_msg = record.msg
- original_args = record.args
-
- try:
- self._translate_and_log_record(record)
- finally:
- record.msg = original_msg
- record.args = original_args
-
- def _translate_and_log_record(self, record):
- record.msg = translate(record.msg, self.locale)
-
- # In addition to translating the message, we also need to translate
- # arguments that were passed to the log method that were not part
- # of the main message e.g., log.info(_('Some message %s'), this_one))
- record.args = _translate_args(record.args, self.locale)
-
- self.target.emit(record)
diff --git a/taskflow/openstack/common/importutils.py b/taskflow/openstack/common/importutils.py
deleted file mode 100644
index 1e0e703..0000000
--- a/taskflow/openstack/common/importutils.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright 2011 OpenStack Foundation.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Import related utilities and helper functions.
-"""
-
-import sys
-import traceback
-
-
-def import_class(import_str):
- """Returns a class from a string including module and class."""
- mod_str, _sep, class_str = import_str.rpartition('.')
- __import__(mod_str)
- try:
- return getattr(sys.modules[mod_str], class_str)
- except AttributeError:
- raise ImportError('Class %s cannot be found (%s)' %
- (class_str,
- traceback.format_exception(*sys.exc_info())))
-
-
-def import_object(import_str, *args, **kwargs):
- """Import a class and return an instance of it."""
- return import_class(import_str)(*args, **kwargs)
-
-
-def import_object_ns(name_space, import_str, *args, **kwargs):
- """Tries to import object from default namespace.
-
- Imports a class and return an instance of it, first by trying
- to find the class in a default namespace, then failing back to
- a full path if not found in the default namespace.
- """
- import_value = "%s.%s" % (name_space, import_str)
- try:
- return import_class(import_value)(*args, **kwargs)
- except ImportError:
- return import_class(import_str)(*args, **kwargs)
-
-
-def import_module(import_str):
- """Import a module."""
- __import__(import_str)
- return sys.modules[import_str]
-
-
-def import_versioned_module(version, submodule=None):
- module = 'taskflow.v%s' % version
- if submodule:
- module = '.'.join((module, submodule))
- return import_module(module)
-
-
-def try_import(import_str, default=None):
- """Try to import a module and if it fails return default."""
- try:
- return import_module(import_str)
- except ImportError:
- return default
diff --git a/taskflow/openstack/common/jsonutils.py b/taskflow/openstack/common/jsonutils.py
deleted file mode 100644
index 8231688..0000000
--- a/taskflow/openstack/common/jsonutils.py
+++ /dev/null
@@ -1,202 +0,0 @@
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# Copyright 2011 Justin Santa Barbara
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-'''
-JSON related utilities.
-
-This module provides a few things:
-
- 1) A handy function for getting an object down to something that can be
- JSON serialized. See to_primitive().
-
- 2) Wrappers around loads() and dumps(). The dumps() wrapper will
- automatically use to_primitive() for you if needed.
-
- 3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
- is available.
-'''
-
-
-import codecs
-import datetime
-import functools
-import inspect
-import itertools
-import sys
-
-is_simplejson = False
-if sys.version_info < (2, 7):
- # On Python <= 2.6, json module is not C boosted, so try to use
- # simplejson module if available
- try:
- import simplejson as json
- # NOTE(mriedem): Make sure we have a new enough version of simplejson
- # to support the namedobject_as_tuple argument. This can be removed
- # in the Kilo release when python 2.6 support is dropped.
- if 'namedtuple_as_object' in inspect.getargspec(json.dumps).args:
- is_simplejson = True
- else:
- import json
- except ImportError:
- import json
-else:
- import json
-
-import six
-import six.moves.xmlrpc_client as xmlrpclib
-
-from taskflow.openstack.common import gettextutils
-from taskflow.openstack.common import importutils
-from taskflow.openstack.common import strutils
-from taskflow.openstack.common import timeutils
-
-netaddr = importutils.try_import("netaddr")
-
-_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
- inspect.isfunction, inspect.isgeneratorfunction,
- inspect.isgenerator, inspect.istraceback, inspect.isframe,
- inspect.iscode, inspect.isbuiltin, inspect.isroutine,
- inspect.isabstract]
-
-_simple_types = (six.string_types + six.integer_types
- + (type(None), bool, float))
-
-
-def to_primitive(value, convert_instances=False, convert_datetime=True,
- level=0, max_depth=3):
- """Convert a complex object into primitives.
-
- Handy for JSON serialization. We can optionally handle instances,
- but since this is a recursive function, we could have cyclical
- data structures.
-
- To handle cyclical data structures we could track the actual objects
- visited in a set, but not all objects are hashable. Instead we just
- track the depth of the object inspections and don't go too deep.
-
- Therefore, convert_instances=True is lossy ... be aware.
-
- """
- # handle obvious types first - order of basic types determined by running
- # full tests on nova project, resulting in the following counts:
- # 572754 <type 'NoneType'>
- # 460353 <type 'int'>
- # 379632 <type 'unicode'>
- # 274610 <type 'str'>
- # 199918 <type 'dict'>
- # 114200 <type 'datetime.datetime'>
- # 51817 <type 'bool'>
- # 26164 <type 'list'>
- # 6491 <type 'float'>
- # 283 <type 'tuple'>
- # 19 <type 'long'>
- if isinstance(value, _simple_types):
- return value
-
- if isinstance(value, datetime.datetime):
- if convert_datetime:
- return timeutils.strtime(value)
- else:
- return value
-
- # value of itertools.count doesn't get caught by nasty_type_tests
- # and results in infinite loop when list(value) is called.
- if type(value) == itertools.count:
- return six.text_type(value)
-
- # FIXME(vish): Workaround for LP bug 852095. Without this workaround,
- # tests that raise an exception in a mocked method that
- # has a @wrap_exception with a notifier will fail. If
- # we up the dependency to 0.5.4 (when it is released) we
- # can remove this workaround.
- if getattr(value, '__module__', None) == 'mox':
- return 'mock'
-
- if level > max_depth:
- return '?'
-
- # The try block may not be necessary after the class check above,
- # but just in case ...
- try:
- recursive = functools.partial(to_primitive,
- convert_instances=convert_instances,
- convert_datetime=convert_datetime,
- level=level,
- max_depth=max_depth)
- if isinstance(value, dict):
- return dict((k, recursive(v)) for k, v in six.iteritems(value))
- elif isinstance(value, (list, tuple)):
- return [recursive(lv) for lv in value]
-
- # It's not clear why xmlrpclib created their own DateTime type, but
- # for our purposes, make it a datetime type which is explicitly
- # handled
- if isinstance(value, xmlrpclib.DateTime):
- value = datetime.datetime(*tuple(value.timetuple())[:6])
-
- if convert_datetime and isinstance(value, datetime.datetime):
- return timeutils.strtime(value)
- elif isinstance(value, gettextutils.Message):
- return value.data
- elif hasattr(value, 'iteritems'):
- return recursive(dict(value.iteritems()), level=level + 1)
- elif hasattr(value, '__iter__'):
- return recursive(list(value))
- elif convert_instances and hasattr(value, '__dict__'):
- # Likely an instance of something. Watch for cycles.
- # Ignore class member vars.
- return recursive(value.__dict__, level=level + 1)
- elif netaddr and isinstance(value, netaddr.IPAddress):
- return six.text_type(value)
- else:
- if any(test(value) for test in _nasty_type_tests):
- return six.text_type(value)
- return value
- except TypeError:
- # Class objects are tricky since they may define something like
- # __iter__ defined but it isn't callable as list().
- return six.text_type(value)
-
-
-def dumps(value, default=to_primitive, **kwargs):
- if is_simplejson:
- kwargs['namedtuple_as_object'] = False
- return json.dumps(value, default=default, **kwargs)
-
-
-def dump(obj, fp, *args, **kwargs):
- if is_simplejson:
- kwargs['namedtuple_as_object'] = False
- return json.dump(obj, fp, *args, **kwargs)
-
-
-def loads(s, encoding='utf-8', **kwargs):
- return json.loads(strutils.safe_decode(s, encoding), **kwargs)
-
-
-def load(fp, encoding='utf-8', **kwargs):
- return json.load(codecs.getreader(encoding)(fp), **kwargs)
-
-
-try:
- import anyjson
-except ImportError:
- pass
-else:
- anyjson._modules.append((__name__, 'dumps', TypeError,
- 'loads', ValueError, 'load'))
- anyjson.force_implementation(__name__)
diff --git a/taskflow/openstack/common/network_utils.py b/taskflow/openstack/common/network_utils.py
deleted file mode 100644
index 2729c3f..0000000
--- a/taskflow/openstack/common/network_utils.py
+++ /dev/null
@@ -1,163 +0,0 @@
-# Copyright 2012 OpenStack Foundation.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Network-related utilities and helper functions.
-"""
-
-import logging
-import socket
-
-from six.moves.urllib import parse
-
-from taskflow.openstack.common.gettextutils import _LW
-
-LOG = logging.getLogger(__name__)
-
-
-def parse_host_port(address, default_port=None):
- """Interpret a string as a host:port pair.
-
- An IPv6 address MUST be escaped if accompanied by a port,
- because otherwise ambiguity ensues: 2001:db8:85a3::8a2e:370:7334
- means both [2001:db8:85a3::8a2e:370:7334] and
- [2001:db8:85a3::8a2e:370]:7334.
-
- >>> parse_host_port('server01:80')
- ('server01', 80)
- >>> parse_host_port('server01')
- ('server01', None)
- >>> parse_host_port('server01', default_port=1234)
- ('server01', 1234)
- >>> parse_host_port('[::1]:80')
- ('::1', 80)
- >>> parse_host_port('[::1]')
- ('::1', None)
- >>> parse_host_port('[::1]', default_port=1234)
- ('::1', 1234)
- >>> parse_host_port('2001:db8:85a3::8a2e:370:7334', default_port=1234)
- ('2001:db8:85a3::8a2e:370:7334', 1234)
- >>> parse_host_port(None)
- (None, None)
- """
- if not address:
- return (None, None)
-
- if address[0] == '[':
- # Escaped ipv6
- _host, _port = address[1:].split(']')
- host = _host
- if ':' in _port:
- port = _port.split(':')[1]
- else:
- port = default_port
- else:
- if address.count(':') == 1:
- host, port = address.split(':')
- else:
- # 0 means ipv4, >1 means ipv6.
- # We prohibit unescaped ipv6 addresses with port.
- host = address
- port = default_port
-
- return (host, None if port is None else int(port))
-
-
-class ModifiedSplitResult(parse.SplitResult):
- """Split results class for urlsplit."""
-
- # NOTE(dims): The functions below are needed for Python 2.6.x.
- # We can remove these when we drop support for 2.6.x.
- @property
- def hostname(self):
- netloc = self.netloc.split('@', 1)[-1]
- host, port = parse_host_port(netloc)
- return host
-
- @property
- def port(self):
- netloc = self.netloc.split('@', 1)[-1]
- host, port = parse_host_port(netloc)
- return port
-
-
-def urlsplit(url, scheme='', allow_fragments=True):
- """Parse a URL using urlparse.urlsplit(), splitting query and fragments.
- This function papers over Python issue9374 when needed.
-
- The parameters are the same as urlparse.urlsplit.
- """
- scheme, netloc, path, query, fragment = parse.urlsplit(
- url, scheme, allow_fragments)
- if allow_fragments and '#' in path:
- path, fragment = path.split('#', 1)
- if '?' in path:
- path, query = path.split('?', 1)
- return ModifiedSplitResult(scheme, netloc,
- path, query, fragment)
-
-
-def set_tcp_keepalive(sock, tcp_keepalive=True,
- tcp_keepidle=None,
- tcp_keepalive_interval=None,
- tcp_keepalive_count=None):
- """Set values for tcp keepalive parameters
-
- This function configures tcp keepalive parameters if users wish to do
- so.
-
- :param tcp_keepalive: Boolean, turn on or off tcp_keepalive. If users are
- not sure, this should be True, and default values will be used.
-
- :param tcp_keepidle: time to wait before starting to send keepalive probes
- :param tcp_keepalive_interval: time between successive probes, once the
- initial wait time is over
- :param tcp_keepalive_count: number of probes to send before the connection
- is killed
- """
-
- # NOTE(praneshp): Despite keepalive being a tcp concept, the level is
- # still SOL_SOCKET. This is a quirk.
- if isinstance(tcp_keepalive, bool):
- sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, tcp_keepalive)
- else:
- raise TypeError("tcp_keepalive must be a boolean")
-
- if not tcp_keepalive:
- return
-
- # These options aren't available in the OS X version of eventlet,
- # Idle + Count * Interval effectively gives you the total timeout.
- if tcp_keepidle is not None:
- if hasattr(socket, 'TCP_KEEPIDLE'):
- sock.setsockopt(socket.IPPROTO_TCP,
- socket.TCP_KEEPIDLE,
- tcp_keepidle)
- else:
- LOG.warning(_LW('tcp_keepidle not available on your system'))
- if tcp_keepalive_interval is not None:
- if hasattr(socket, 'TCP_KEEPINTVL'):
- sock.setsockopt(socket.IPPROTO_TCP,
- socket.TCP_KEEPINTVL,
- tcp_keepalive_interval)
- else:
- LOG.warning(_LW('tcp_keepintvl not available on your system'))
- if tcp_keepalive_count is not None:
- if hasattr(socket, 'TCP_KEEPCNT'):
- sock.setsockopt(socket.IPPROTO_TCP,
- socket.TCP_KEEPCNT,
- tcp_keepalive_count)
- else:
- LOG.warning(_LW('tcp_keepknt not available on your system'))
diff --git a/taskflow/openstack/common/strutils.py b/taskflow/openstack/common/strutils.py
deleted file mode 100644
index 2f0fd65..0000000
--- a/taskflow/openstack/common/strutils.py
+++ /dev/null
@@ -1,311 +0,0 @@
-# Copyright 2011 OpenStack Foundation.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-System-level utilities and helper functions.
-"""
-
-import math
-import re
-import sys
-import unicodedata
-
-import six
-
-from taskflow.openstack.common.gettextutils import _
-
-
-UNIT_PREFIX_EXPONENT = {
- 'k': 1,
- 'K': 1,
- 'Ki': 1,
- 'M': 2,
- 'Mi': 2,
- 'G': 3,
- 'Gi': 3,
- 'T': 4,
- 'Ti': 4,
-}
-UNIT_SYSTEM_INFO = {
- 'IEC': (1024, re.compile(r'(^[-+]?\d*\.?\d+)([KMGT]i?)?(b|bit|B)$')),
- 'SI': (1000, re.compile(r'(^[-+]?\d*\.?\d+)([kMGT])?(b|bit|B)$')),
-}
-
-TRUE_STRINGS = ('1', 't', 'true', 'on', 'y', 'yes')
-FALSE_STRINGS = ('0', 'f', 'false', 'off', 'n', 'no')
-
-SLUGIFY_STRIP_RE = re.compile(r"[^\w\s-]")
-SLUGIFY_HYPHENATE_RE = re.compile(r"[-\s]+")
-
-
-# NOTE(flaper87): The following globals are used by `mask_password`
-_SANITIZE_KEYS = ['adminPass', 'admin_pass', 'password', 'admin_password']
-
-# NOTE(ldbragst): Let's build a list of regex objects using the list of
-# _SANITIZE_KEYS we already have. This way, we only have to add the new key
-# to the list of _SANITIZE_KEYS and we can generate regular expressions
-# for XML and JSON automatically.
-_SANITIZE_PATTERNS_2 = []
-_SANITIZE_PATTERNS_1 = []
-
-# NOTE(amrith): Some regular expressions have only one parameter, some
-# have two parameters. Use different lists of patterns here.
-_FORMAT_PATTERNS_1 = [r'(%(key)s\s*[=]\s*)[^\s^\'^\"]+']
-_FORMAT_PATTERNS_2 = [r'(%(key)s\s*[=]\s*[\"\']).*?([\"\'])',
- r'(%(key)s\s+[\"\']).*?([\"\'])',
- r'([-]{2}%(key)s\s+)[^\'^\"^=^\s]+([\s]*)',
- r'(<%(key)s>).*?(</%(key)s>)',
- r'([\"\']%(key)s[\"\']\s*:\s*[\"\']).*?([\"\'])',
- r'([\'"].*?%(key)s[\'"]\s*:\s*u?[\'"]).*?([\'"])',
- r'([\'"].*?%(key)s[\'"]\s*,\s*\'--?[A-z]+\'\s*,\s*u?'
- '[\'"]).*?([\'"])',
- r'(%(key)s\s*--?[A-z]+\s*)\S+(\s*)']
-
-for key in _SANITIZE_KEYS:
- for pattern in _FORMAT_PATTERNS_2:
- reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
- _SANITIZE_PATTERNS_2.append(reg_ex)
-
- for pattern in _FORMAT_PATTERNS_1:
- reg_ex = re.compile(pattern % {'key': key}, re.DOTALL)
- _SANITIZE_PATTERNS_1.append(reg_ex)
-
-
-def int_from_bool_as_string(subject):
- """Interpret a string as a boolean and return either 1 or 0.
-
- Any string value in:
-
- ('True', 'true', 'On', 'on', '1')
-
- is interpreted as a boolean True.
-
- Useful for JSON-decoded stuff and config file parsing
- """
- return bool_from_string(subject) and 1 or 0
-
-
-def bool_from_string(subject, strict=False, default=False):
- """Interpret a string as a boolean.
-
- A case-insensitive match is performed such that strings matching 't',
- 'true', 'on', 'y', 'yes', or '1' are considered True and, when
- `strict=False`, anything else returns the value specified by 'default'.
-
- Useful for JSON-decoded stuff and config file parsing.
-
- If `strict=True`, unrecognized values, including None, will raise a
- ValueError which is useful when parsing values passed in from an API call.
- Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'.
- """
- if not isinstance(subject, six.string_types):
- subject = six.text_type(subject)
-
- lowered = subject.strip().lower()
-
- if lowered in TRUE_STRINGS:
- return True
- elif lowered in FALSE_STRINGS:
- return False
- elif strict:
- acceptable = ', '.join(
- "'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS))
- msg = _("Unrecognized value '%(val)s', acceptable values are:"
- " %(acceptable)s") % {'val': subject,
- 'acceptable': acceptable}
- raise ValueError(msg)
- else:
- return default
-
-
-def safe_decode(text, incoming=None, errors='strict'):
- """Decodes incoming text/bytes string using `incoming` if they're not
- already unicode.
-
- :param incoming: Text's current encoding
- :param errors: Errors handling policy. See here for valid
- values http://docs.python.org/2/library/codecs.html
- :returns: text or a unicode `incoming` encoded
- representation of it.
- :raises TypeError: If text is not an instance of str
- """
- if not isinstance(text, (six.string_types, six.binary_type)):
- raise TypeError("%s can't be decoded" % type(text))
-
- if isinstance(text, six.text_type):
- return text
-
- if not incoming:
- incoming = (sys.stdin.encoding or
- sys.getdefaultencoding())
-
- try:
- return text.decode(incoming, errors)
- except UnicodeDecodeError:
- # Note(flaper87) If we get here, it means that
- # sys.stdin.encoding / sys.getdefaultencoding
- # didn't return a suitable encoding to decode
- # text. This happens mostly when global LANG
- # var is not set correctly and there's no
- # default encoding. In this case, most likely
- # python will use ASCII or ANSI encoders as
- # default encodings but they won't be capable
- # of decoding non-ASCII characters.
- #
- # Also, UTF-8 is being used since it's an ASCII
- # extension.
- return text.decode('utf-8', errors)
-
-
-def safe_encode(text, incoming=None,
- encoding='utf-8', errors='strict'):
- """Encodes incoming text/bytes string using `encoding`.
-
- If incoming is not specified, text is expected to be encoded with
- current python's default encoding. (`sys.getdefaultencoding`)
-
- :param incoming: Text's current encoding
- :param encoding: Expected encoding for text (Default UTF-8)
- :param errors: Errors handling policy. See here for valid
- values http://docs.python.org/2/library/codecs.html
- :returns: text or a bytestring `encoding` encoded
- representation of it.
- :raises TypeError: If text is not an instance of str
- """
- if not isinstance(text, (six.string_types, six.binary_type)):
- raise TypeError("%s can't be encoded" % type(text))
-
- if not incoming:
- incoming = (sys.stdin.encoding or
- sys.getdefaultencoding())
-
- if isinstance(text, six.text_type):
- return text.encode(encoding, errors)
- elif text and encoding != incoming:
- # Decode text before encoding it with `encoding`
- text = safe_decode(text, incoming, errors)
- return text.encode(encoding, errors)
- else:
- return text
-
-
-def string_to_bytes(text, unit_system='IEC', return_int=False):
- """Converts a string into an float representation of bytes.
-
- The units supported for IEC ::
-
- Kb(it), Kib(it), Mb(it), Mib(it), Gb(it), Gib(it), Tb(it), Tib(it)
- KB, KiB, MB, MiB, GB, GiB, TB, TiB
-
- The units supported for SI ::
-
- kb(it), Mb(it), Gb(it), Tb(it)
- kB, MB, GB, TB
-
- Note that the SI unit system does not support capital letter 'K'
-
- :param text: String input for bytes size conversion.
- :param unit_system: Unit system for byte size conversion.
- :param return_int: If True, returns integer representation of text
- in bytes. (default: decimal)
- :returns: Numerical representation of text in bytes.
- :raises ValueError: If text has an invalid value.
-
- """
- try:
- base, reg_ex = UNIT_SYSTEM_INFO[unit_system]
- except KeyError:
- msg = _('Invalid unit system: "%s"') % unit_system
- raise ValueError(msg)
- match = reg_ex.match(text)
- if match:
- magnitude = float(match.group(1))
- unit_prefix = match.group(2)
- if match.group(3) in ['b', 'bit']:
- magnitude /= 8
- else:
- msg = _('Invalid string format: %s') % text
- raise ValueError(msg)
- if not unit_prefix:
- res = magnitude
- else:
- res = magnitude * pow(base, UNIT_PREFIX_EXPONENT[unit_prefix])
- if return_int:
- return int(math.ceil(res))
- return res
-
-
-def to_slug(value, incoming=None, errors="strict"):
- """Normalize string.
-
- Convert to lowercase, remove non-word characters, and convert spaces
- to hyphens.
-
- Inspired by Django's `slugify` filter.
-
- :param value: Text to slugify
- :param incoming: Text's current encoding
- :param errors: Errors handling policy. See here for valid
- values http://docs.python.org/2/library/codecs.html
- :returns: slugified unicode representation of `value`
- :raises TypeError: If text is not an instance of str
- """
- value = safe_decode(value, incoming, errors)
- # NOTE(aababilov): no need to use safe_(encode|decode) here:
- # encodings are always "ascii", error handling is always "ignore"
- # and types are always known (first: unicode; second: str)
- value = unicodedata.normalize("NFKD", value).encode(
- "ascii", "ignore").decode("ascii")
- value = SLUGIFY_STRIP_RE.sub("", value).strip().lower()
- return SLUGIFY_HYPHENATE_RE.sub("-", value)
-
-
-def mask_password(message, secret="***"):
- """Replace password with 'secret' in message.
-
- :param message: The string which includes security information.
- :param secret: value with which to replace passwords.
- :returns: The unicode value of message with the password fields masked.
-
- For example:
-
- >>> mask_password("'adminPass' : 'aaaaa'")
- "'adminPass' : '***'"
- >>> mask_password("'admin_pass' : 'aaaaa'")
- "'admin_pass' : '***'"
- >>> mask_password('"password" : "aaaaa"')
- '"password" : "***"'
- >>> mask_password("'original_password' : 'aaaaa'")
- "'original_password' : '***'"
- >>> mask_password("u'original_password' : u'aaaaa'")
- "u'original_password' : u'***'"
- """
- message = six.text_type(message)
-
- # NOTE(ldbragst): Check to see if anything in message contains any key
- # specified in _SANITIZE_KEYS, if not then just return the message since
- # we don't have to mask any passwords.
- if not any(key in message for key in _SANITIZE_KEYS):
- return message
-
- substitute = r'\g<1>' + secret + r'\g<2>'
- for pattern in _SANITIZE_PATTERNS_2:
- message = re.sub(pattern, substitute, message)
-
- substitute = r'\g<1>' + secret
- for pattern in _SANITIZE_PATTERNS_1:
- message = re.sub(pattern, substitute, message)
-
- return message
diff --git a/taskflow/openstack/common/timeutils.py b/taskflow/openstack/common/timeutils.py
deleted file mode 100644
index c48da95..0000000
--- a/taskflow/openstack/common/timeutils.py
+++ /dev/null
@@ -1,210 +0,0 @@
-# Copyright 2011 OpenStack Foundation.
-# All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Time related utilities and helper functions.
-"""
-
-import calendar
-import datetime
-import time
-
-import iso8601
-import six
-
-
-# ISO 8601 extended time format with microseconds
-_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f'
-_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
-PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND
-
-
-def isotime(at=None, subsecond=False):
- """Stringify time in ISO 8601 format."""
- if not at:
- at = utcnow()
- st = at.strftime(_ISO8601_TIME_FORMAT
- if not subsecond
- else _ISO8601_TIME_FORMAT_SUBSECOND)
- tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
- st += ('Z' if tz == 'UTC' else tz)
- return st
-
-
-def parse_isotime(timestr):
- """Parse time from ISO 8601 format."""
- try:
- return iso8601.parse_date(timestr)
- except iso8601.ParseError as e:
- raise ValueError(six.text_type(e))
- except TypeError as e:
- raise ValueError(six.text_type(e))
-
-
-def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
- """Returns formatted utcnow."""
- if not at:
- at = utcnow()
- return at.strftime(fmt)
-
-
-def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT):
- """Turn a formatted time back into a datetime."""
- return datetime.datetime.strptime(timestr, fmt)
-
-
-def normalize_time(timestamp):
- """Normalize time in arbitrary timezone to UTC naive object."""
- offset = timestamp.utcoffset()
- if offset is None:
- return timestamp
- return timestamp.replace(tzinfo=None) - offset
-
-
-def is_older_than(before, seconds):
- """Return True if before is older than seconds."""
- if isinstance(before, six.string_types):
- before = parse_strtime(before).replace(tzinfo=None)
- else:
- before = before.replace(tzinfo=None)
-
- return utcnow() - before > datetime.timedelta(seconds=seconds)
-
-
-def is_newer_than(after, seconds):
- """Return True if after is newer than seconds."""
- if isinstance(after, six.string_types):
- after = parse_strtime(after).replace(tzinfo=None)
- else:
- after = after.replace(tzinfo=None)
-
- return after - utcnow() > datetime.timedelta(seconds=seconds)
-
-
-def utcnow_ts():
- """Timestamp version of our utcnow function."""
- if utcnow.override_time is None:
- # NOTE(kgriffs): This is several times faster
- # than going through calendar.timegm(...)
- return int(time.time())
-
- return calendar.timegm(utcnow().timetuple())
-
-
-def utcnow():
- """Overridable version of utils.utcnow."""
- if utcnow.override_time:
- try:
- return utcnow.override_time.pop(0)
- except AttributeError:
- return utcnow.override_time
- return datetime.datetime.utcnow()
-
-
-def iso8601_from_timestamp(timestamp):
- """Returns an iso8601 formatted date from timestamp."""
- return isotime(datetime.datetime.utcfromtimestamp(timestamp))
-
-
-utcnow.override_time = None
-
-
-def set_time_override(override_time=None):
- """Overrides utils.utcnow.
-
- Make it return a constant time or a list thereof, one at a time.
-
- :param override_time: datetime instance or list thereof. If not
- given, defaults to the current UTC time.
- """
- utcnow.override_time = override_time or datetime.datetime.utcnow()
-
-
-def advance_time_delta(timedelta):
- """Advance overridden time using a datetime.timedelta."""
- assert utcnow.override_time is not None
- try:
- for dt in utcnow.override_time:
- dt += timedelta
- except TypeError:
- utcnow.override_time += timedelta
-
-
-def advance_time_seconds(seconds):
- """Advance overridden time by seconds."""
- advance_time_delta(datetime.timedelta(0, seconds))
-
-
-def clear_time_override():
- """Remove the overridden time."""
- utcnow.override_time = None
-
-
-def marshall_now(now=None):
- """Make an rpc-safe datetime with microseconds.
-
- Note: tzinfo is stripped, but not required for relative times.
- """
- if not now:
- now = utcnow()
- return dict(day=now.day, month=now.month, year=now.year, hour=now.hour,
- minute=now.minute, second=now.second,
- microsecond=now.microsecond)
-
-
-def unmarshall_time(tyme):
- """Unmarshall a datetime dict."""
- return datetime.datetime(day=tyme['day'],
- month=tyme['month'],
- year=tyme['year'],
- hour=tyme['hour'],
- minute=tyme['minute'],
- second=tyme['second'],
- microsecond=tyme['microsecond'])
-
-
-def delta_seconds(before, after):
- """Return the difference between two timing objects.
-
- Compute the difference in seconds between two date, time, or
- datetime objects (as a float, to microsecond resolution).
- """
- delta = after - before
- return total_seconds(delta)
-
-
-def total_seconds(delta):
- """Return the total seconds of datetime.timedelta object.
-
- Compute total seconds of datetime.timedelta, datetime.timedelta
- doesn't have method total_seconds in Python2.6, calculate it manually.
- """
- try:
- return delta.total_seconds()
- except AttributeError:
- return ((delta.days * 24 * 3600) + delta.seconds +
- float(delta.microseconds) / (10 ** 6))
-
-
-def is_soon(dt, window):
- """Determines if time is going to happen in the next window seconds.
-
- :param dt: the time
- :param window: minimum seconds to remain to consider the time not soon
-
- :return: True if expiration is within the given duration
- """
- soon = (utcnow() + datetime.timedelta(seconds=window))
- return normalize_time(dt) <= soon
diff --git a/taskflow/persistence/backends/impl_dir.py b/taskflow/persistence/backends/impl_dir.py
index 9ce4a32..f469a1d 100644
--- a/taskflow/persistence/backends/impl_dir.py
+++ b/taskflow/persistence/backends/impl_dir.py
@@ -20,10 +20,10 @@ import logging
import os
import shutil
+from oslo.serialization import jsonutils
import six
from taskflow import exceptions as exc
-from taskflow.openstack.common import jsonutils
from taskflow.persistence.backends import base
from taskflow.persistence import logbook
from taskflow.utils import lock_utils
diff --git a/taskflow/persistence/backends/impl_sqlalchemy.py b/taskflow/persistence/backends/impl_sqlalchemy.py
index 1dc008e..587d4d2 100644
--- a/taskflow/persistence/backends/impl_sqlalchemy.py
+++ b/taskflow/persistence/backends/impl_sqlalchemy.py
@@ -25,6 +25,7 @@ import functools
import logging
import time
+from oslo.utils import strutils
import six
import sqlalchemy as sa
from sqlalchemy import exc as sa_exc
@@ -32,7 +33,6 @@ from sqlalchemy import orm as sa_orm
from sqlalchemy import pool as sa_pool
from taskflow import exceptions as exc
-from taskflow.openstack.common import strutils
from taskflow.persistence.backends import base
from taskflow.persistence.backends.sqlalchemy import migration
from taskflow.persistence.backends.sqlalchemy import models
diff --git a/taskflow/persistence/backends/impl_zookeeper.py b/taskflow/persistence/backends/impl_zookeeper.py
index e60bad8..948c54d 100644
--- a/taskflow/persistence/backends/impl_zookeeper.py
+++ b/taskflow/persistence/backends/impl_zookeeper.py
@@ -19,9 +19,9 @@ import logging
from kazoo import exceptions as k_exc
from kazoo.protocol import paths
+from oslo.serialization import jsonutils
from taskflow import exceptions as exc
-from taskflow.openstack.common import jsonutils
from taskflow.persistence.backends import base
from taskflow.persistence import logbook
from taskflow.utils import kazoo_utils as k_utils
diff --git a/taskflow/persistence/backends/sqlalchemy/models.py b/taskflow/persistence/backends/sqlalchemy/models.py
index 4a78c5c..47b8c83 100644
--- a/taskflow/persistence/backends/sqlalchemy/models.py
+++ b/taskflow/persistence/backends/sqlalchemy/models.py
@@ -15,6 +15,8 @@
# License for the specific language governing permissions and limitations
# under the License.
+from oslo.serialization import jsonutils
+from oslo.utils import timeutils
from sqlalchemy import Column, String, DateTime, Enum
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import ForeignKey
@@ -22,8 +24,6 @@ from sqlalchemy.orm import backref
from sqlalchemy.orm import relationship
from sqlalchemy import types as types
-from taskflow.openstack.common import jsonutils
-from taskflow.openstack.common import timeutils
from taskflow.openstack.common import uuidutils
from taskflow.persistence import logbook
from taskflow import states
diff --git a/taskflow/persistence/logbook.py b/taskflow/persistence/logbook.py
index 12c6c99..974d846 100644
--- a/taskflow/persistence/logbook.py
+++ b/taskflow/persistence/logbook.py
@@ -19,10 +19,10 @@ import abc
import copy
import logging
+from oslo.utils import timeutils
import six
from taskflow import exceptions as exc
-from taskflow.openstack.common import timeutils
from taskflow.openstack.common import uuidutils
from taskflow import states
from taskflow.utils import misc
diff --git a/taskflow/tests/unit/jobs/test_zk_job.py b/taskflow/tests/unit/jobs/test_zk_job.py
index 7268a1a..5a536f9 100644
--- a/taskflow/tests/unit/jobs/test_zk_job.py
+++ b/taskflow/tests/unit/jobs/test_zk_job.py
@@ -14,13 +14,13 @@
# License for the specific language governing permissions and limitations
# under the License.
+from oslo.serialization import jsonutils
import six
import testtools
from zake import fake_client
from zake import utils as zake_utils
from taskflow.jobs.backends import impl_zookeeper
-from taskflow.openstack.common import jsonutils
from taskflow.openstack.common import uuidutils
from taskflow import states
from taskflow import test
diff --git a/taskflow/tests/unit/test_engine_helpers.py b/taskflow/tests/unit/test_engine_helpers.py
index 30ff51c..fbf1756 100644
--- a/taskflow/tests/unit/test_engine_helpers.py
+++ b/taskflow/tests/unit/test_engine_helpers.py
@@ -69,7 +69,7 @@ class FlowFromDetailTestCase(test.TestCase):
_lb, flow_detail = p_utils.temporary_flow_detail()
flow_detail.meta = dict(factory=dict(name=name))
- with mock.patch('taskflow.openstack.common.importutils.import_class',
+ with mock.patch('oslo.utils.importutils.import_class',
return_value=lambda: 'RESULT') as mock_import:
result = taskflow.engines.flow_from_detail(flow_detail)
mock_import.assert_called_onec_with(name)
@@ -80,7 +80,7 @@ class FlowFromDetailTestCase(test.TestCase):
_lb, flow_detail = p_utils.temporary_flow_detail()
flow_detail.meta = dict(factory=dict(name=name, args=['foo']))
- with mock.patch('taskflow.openstack.common.importutils.import_class',
+ with mock.patch('oslo.utils.importutils.import_class',
return_value=lambda x: 'RESULT %s' % x) as mock_import:
result = taskflow.engines.flow_from_detail(flow_detail)
mock_import.assert_called_onec_with(name)
diff --git a/taskflow/tests/unit/worker_based/test_executor.py b/taskflow/tests/unit/worker_based/test_executor.py
index e6c97e1..cc184df 100644
--- a/taskflow/tests/unit/worker_based/test_executor.py
+++ b/taskflow/tests/unit/worker_based/test_executor.py
@@ -19,10 +19,10 @@ import time
from concurrent import futures
import mock
+from oslo.utils import timeutils
from taskflow.engines.worker_based import executor
from taskflow.engines.worker_based import protocol as pr
-from taskflow.openstack.common import timeutils
from taskflow import test
from taskflow.tests import utils
from taskflow.utils import misc
diff --git a/taskflow/utils/misc.py b/taskflow/utils/misc.py
index 3984621..f3cb44f 100644
--- a/taskflow/utils/misc.py
+++ b/taskflow/utils/misc.py
@@ -30,12 +30,12 @@ import sys
import time
import traceback
+from oslo.serialization import jsonutils
+from oslo.utils import netutils
import six
from six.moves.urllib import parse as urlparse
from taskflow import exceptions as exc
-from taskflow.openstack.common import jsonutils
-from taskflow.openstack.common import network_utils
from taskflow.utils import reflection
@@ -82,7 +82,7 @@ def parse_uri(uri, query_duplicates=False):
if not match:
raise ValueError("Uri %r does not start with a RFC 3986 compliant"
" scheme" % (uri))
- parsed = network_utils.urlsplit(uri)
+ parsed = netutils.urlsplit(uri)
if parsed.query:
query_params = urlparse.parse_qsl(parsed.query)
if not query_duplicates:
diff --git a/taskflow/utils/persistence_utils.py b/taskflow/utils/persistence_utils.py
index e3c4ba3..dbcdac2 100644
--- a/taskflow/utils/persistence_utils.py
+++ b/taskflow/utils/persistence_utils.py
@@ -17,7 +17,8 @@
import contextlib
import logging
-from taskflow.openstack.common import timeutils
+from oslo.utils import timeutils
+
from taskflow.openstack.common import uuidutils
from taskflow.persistence import logbook
from taskflow.utils import misc
diff --git a/taskflow/utils/reflection.py b/taskflow/utils/reflection.py
index bc5a322..34793e0 100644
--- a/taskflow/utils/reflection.py
+++ b/taskflow/utils/reflection.py
@@ -17,10 +17,9 @@
import inspect
import types
+from oslo.utils import importutils
import six
-from taskflow.openstack.common import importutils
-
try:
_TYPE_TYPE = types.TypeType
except AttributeError: