summaryrefslogtreecommitdiff
path: root/saharaclient
diff options
context:
space:
mode:
Diffstat (limited to 'saharaclient')
-rw-r--r--saharaclient/_i18n.py2
-rw-r--r--saharaclient/api/base.py4
-rw-r--r--saharaclient/api/node_group_templates.py4
-rw-r--r--saharaclient/osc/v1/cluster_templates.py84
-rw-r--r--saharaclient/osc/v1/clusters.py7
-rw-r--r--saharaclient/osc/v1/node_group_templates.py10
-rw-r--r--saharaclient/osc/v1/plugins.py3
-rw-r--r--saharaclient/osc/v1/utils.py2
-rw-r--r--saharaclient/tests/hacking/__init__.py0
-rw-r--r--saharaclient/tests/hacking/checks.py138
-rw-r--r--saharaclient/tests/hacking/commit_message.py95
-rw-r--r--saharaclient/tests/hacking/import_checks.py450
-rw-r--r--saharaclient/tests/hacking/logging_checks.py64
-rw-r--r--saharaclient/tests/unit/base.py5
-rw-r--r--saharaclient/tests/unit/osc/v1/test_clusters.py1
-rw-r--r--saharaclient/tests/unit/osc/v1/test_data_sources.py1
-rw-r--r--saharaclient/tests/unit/osc/v1/test_images.py1
-rw-r--r--saharaclient/tests/unit/osc/v1/test_job_binaries.py2
-rw-r--r--saharaclient/tests/unit/osc/v1/test_job_templates.py1
-rw-r--r--saharaclient/tests/unit/osc/v1/test_job_types.py1
-rw-r--r--saharaclient/tests/unit/osc/v1/test_jobs.py1
-rw-r--r--saharaclient/tests/unit/osc/v1/test_node_group_templates.py8
-rw-r--r--saharaclient/tests/unit/osc/v1/test_plugins.py2
-rw-r--r--saharaclient/tests/unit/test_cluster_templates.py2
-rw-r--r--saharaclient/tests/unit/test_clusters.py2
-rw-r--r--saharaclient/tests/unit/test_data_sources.py2
-rw-r--r--saharaclient/tests/unit/test_hacking.py71
-rw-r--r--saharaclient/tests/unit/test_images.py2
-rw-r--r--saharaclient/tests/unit/test_job_binaries.py2
-rw-r--r--saharaclient/tests/unit/test_job_binary_internals.py2
-rw-r--r--saharaclient/tests/unit/test_job_executions.py2
-rw-r--r--saharaclient/tests/unit/test_jobs.py2
-rw-r--r--saharaclient/tests/unit/test_node_group_templates.py11
33 files changed, 949 insertions, 35 deletions
diff --git a/saharaclient/_i18n.py b/saharaclient/_i18n.py
index ac80de5..635d806 100644
--- a/saharaclient/_i18n.py
+++ b/saharaclient/_i18n.py
@@ -11,7 +11,7 @@
# under the License.
"""oslo.i18n integration module.
-See http://docs.openstack.org/developer/oslo.i18n/usage.html
+See https://docs.openstack.org/oslo.i18n/latest/user/usage.html
"""
import oslo_i18n
diff --git a/saharaclient/api/base.py b/saharaclient/api/base.py
index 38a2848..2d04716 100644
--- a/saharaclient/api/base.py
+++ b/saharaclient/api/base.py
@@ -14,8 +14,8 @@
# limitations under the License.
import copy
-import json
+from oslo_serialization import jsonutils
from six.moves.urllib import parse
from saharaclient._i18n import _
@@ -236,7 +236,7 @@ def get_json(response):
if callable(json_field_or_function):
return response.json()
else:
- return json.loads(response.content)
+ return jsonutils.loads(response.content)
class APIException(Exception):
diff --git a/saharaclient/api/node_group_templates.py b/saharaclient/api/node_group_templates.py
index 90c1b4d..0eaf80b 100644
--- a/saharaclient/api/node_group_templates.py
+++ b/saharaclient/api/node_group_templates.py
@@ -127,3 +127,7 @@ class NodeGroupTemplateManager(base.ResourceManager):
def delete(self, ng_template_id):
"""Delete a Node Group Template."""
self._delete('/node-group-templates/%s' % ng_template_id)
+
+ def export(self, ng_template_id):
+ """Export a Node Group Template."""
+ return self._get('/node-group-templates/%s/export' % ng_template_id)
diff --git a/saharaclient/osc/v1/cluster_templates.py b/saharaclient/osc/v1/cluster_templates.py
index 6d9491e..e3154df 100644
--- a/saharaclient/osc/v1/cluster_templates.py
+++ b/saharaclient/osc/v1/cluster_templates.py
@@ -13,13 +13,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import json
import sys
from osc_lib.command import command
from osc_lib import exceptions
from osc_lib import utils as osc_utils
from oslo_log import log as logging
+from oslo_serialization import jsonutils as json
from saharaclient.osc.v1 import utils
@@ -506,3 +506,85 @@ class UpdateClusterTemplate(command.ShowOne):
data = utils.prepare_data(data, CT_FIELDS)
return self.dict2columns(data)
+
+
+class ImportClusterTemplate(command.ShowOne):
+ """Imports node group template"""
+
+ log = logging.getLogger(__name__ + ".ImportClusterTemplate")
+
+ def get_parser(self, prog_name):
+ parser = super(ImportClusterTemplate, self).get_parser(prog_name)
+
+ parser.add_argument(
+ 'json',
+ metavar="<json>",
+ help="JSON containing cluster template",
+ )
+ parser.add_argument(
+ '--name',
+ metavar="<name>",
+ help="Name of the cluster template",
+ )
+ parser.add_argument(
+ '--default-image-id',
+ metavar="<default_image_id>",
+ help="Default image ID to be used",
+ )
+ parser.add_argument(
+ '--node-groups',
+ metavar="<node-group:instances_count>",
+ nargs="+",
+ required=True,
+ help="List of the node groups(names or IDs) and numbers of "
+ "instances for each one of them"
+ )
+ return parser
+
+ def take_action(self, parsed_args):
+ self.log.debug("take_action(%s)", parsed_args)
+ client = self.app.client_manager.data_processing
+
+ if (not parsed_args.node_groups):
+ raise exceptions.CommandError('--node_groups should be specified')
+
+ blob = osc_utils.read_blob_file_contents(parsed_args.json)
+ try:
+ template = json.loads(blob)
+ except ValueError as e:
+ raise exceptions.CommandError(
+ 'An error occurred when reading '
+ 'template from file %s: %s' % (parsed_args.json, e))
+
+ if parsed_args.default_image_id:
+ template['cluster_template']['default_image_id'] = (
+ parsed_args.default_image_id)
+ else:
+ template['cluster_template']['default_image_id'] = None
+
+ if parsed_args.name:
+ template['cluster_template']['name'] = parsed_args.name
+
+ if 'neutron_management_network' in template['cluster_template']:
+ template['cluster_template']['net_id'] = (
+ template['cluster_template'].pop('neutron_management_network'))
+
+ plugin, plugin_version, node_groups = _configure_node_groups(
+ parsed_args.node_groups, client)
+ if (('plugin_version' in template['cluster_template'] and
+ template['cluster_template']['plugin_version'] !=
+ plugin_version) or
+ ('plugin' in template['cluster_template'] and
+ template['cluster_template']['plugin'] != plugin)):
+ raise exceptions.CommandError(
+ 'Plugin of plugin version do not match between template '
+ 'and given node group templates')
+ template['cluster_template']['node_groups'] = node_groups
+
+ data = client.cluster_templates.create(
+ **template['cluster_template']).to_dict()
+
+ _format_ct_output(data)
+ data = utils.prepare_data(data, CT_FIELDS)
+
+ return self.dict2columns(data)
diff --git a/saharaclient/osc/v1/clusters.py b/saharaclient/osc/v1/clusters.py
index b156a3d..72355a2 100644
--- a/saharaclient/osc/v1/clusters.py
+++ b/saharaclient/osc/v1/clusters.py
@@ -13,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import json
import sys
from osc_lib.command import command
@@ -149,7 +148,7 @@ class CreateCluster(command.ShowOne):
if parsed_args.json:
blob = osc_utils.read_blob_file_contents(parsed_args.json)
try:
- template = json.loads(blob)
+ template = jsonutils.loads(blob)
except ValueError as e:
raise exceptions.CommandError(
'An error occurred when reading '
@@ -489,7 +488,7 @@ class UpdateCluster(command.ShowOne):
if parsed_args.shares:
blob = osc_utils.read_blob_file_contents(parsed_args.shares)
try:
- shares = json.loads(blob)
+ shares = jsonutils.loads(blob)
except ValueError as e:
raise exceptions.CommandError(
'An error occurred when reading '
@@ -556,7 +555,7 @@ class ScaleCluster(command.ShowOne):
if parsed_args.json:
blob = osc_utils.read_blob_file_contents(parsed_args.json)
try:
- template = json.loads(blob)
+ template = jsonutils.loads(blob)
except ValueError as e:
raise exceptions.CommandError(
'An error occurred when reading '
diff --git a/saharaclient/osc/v1/node_group_templates.py b/saharaclient/osc/v1/node_group_templates.py
index f65f993..3b0c7dd 100644
--- a/saharaclient/osc/v1/node_group_templates.py
+++ b/saharaclient/osc/v1/node_group_templates.py
@@ -13,13 +13,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import json
import sys
from osc_lib.command import command
from osc_lib import exceptions
from osc_lib import utils as osc_utils
from oslo_log import log as logging
+from oslo_serialization import jsonutils as json
from saharaclient.osc.v1 import utils
@@ -265,7 +265,9 @@ class CreateNodeGroupTemplate(command.ShowOne):
is_protected=parsed_args.protected,
node_configs=configs,
shares=shares,
- volumes_availability_zone=parsed_args.volumes_availability_zone
+ volumes_availability_zone=(
+ parsed_args.volumes_availability_zone),
+ volume_mount_prefix=parsed_args.volumes_mount_prefix
).to_dict()
_format_ngt_output(data)
@@ -679,7 +681,9 @@ class UpdateNodeGroupTemplate(command.ShowOne):
is_protected=parsed_args.is_protected,
node_configs=configs,
shares=shares,
- volumes_availability_zone=parsed_args.volumes_availability_zone
+ volumes_availability_zone=(
+ parsed_args.volumes_availability_zone),
+ volume_mount_prefix=parsed_args.volumes_mount_prefix
)
data = client.node_group_templates.update(
diff --git a/saharaclient/osc/v1/plugins.py b/saharaclient/osc/v1/plugins.py
index 0ff3fa3..8b10420 100644
--- a/saharaclient/osc/v1/plugins.py
+++ b/saharaclient/osc/v1/plugins.py
@@ -13,7 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import json
from os import path
import sys
@@ -206,7 +205,7 @@ class UpdatePlugin(command.ShowOne):
client = self.app.client_manager.data_processing
blob = osc_utils.read_blob_file_contents(parsed_args.json)
try:
- update_dict = json.loads(blob)
+ update_dict = jsonutils.loads(blob)
except ValueError as e:
raise exceptions.CommandError(
'An error occurred when reading '
diff --git a/saharaclient/osc/v1/utils.py b/saharaclient/osc/v1/utils.py
index 714d96a..973f385 100644
--- a/saharaclient/osc/v1/utils.py
+++ b/saharaclient/osc/v1/utils.py
@@ -52,7 +52,7 @@ def get_resource_id(manager, name_or_id):
def create_dict_from_kwargs(**kwargs):
- return dict((k, v) for (k, v) in kwargs.items() if v is not None)
+ return {k: v for (k, v) in kwargs.items() if v is not None}
def prepare_data(data, fields):
diff --git a/saharaclient/tests/hacking/__init__.py b/saharaclient/tests/hacking/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/saharaclient/tests/hacking/__init__.py
diff --git a/saharaclient/tests/hacking/checks.py b/saharaclient/tests/hacking/checks.py
new file mode 100644
index 0000000..23d0019
--- /dev/null
+++ b/saharaclient/tests/hacking/checks.py
@@ -0,0 +1,138 @@
+# Copyright (c) 2013 Mirantis Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+import tokenize
+
+import pep8
+
+from saharaclient.tests.hacking import commit_message
+from saharaclient.tests.hacking import import_checks
+from saharaclient.tests.hacking import logging_checks
+
+RE_OSLO_IMPORTS = (re.compile(r"(((from)|(import))\s+oslo\.)"),
+ re.compile(r"(from\s+oslo\s+import)"))
+RE_DICT_CONSTRUCTOR_WITH_LIST_COPY = re.compile(r".*\bdict\((\[)?(\(|\[)")
+RE_USE_JSONUTILS_INVALID_LINE = re.compile(r"(import\s+json)")
+RE_USE_JSONUTILS_VALID_LINE = re.compile(r"(import\s+jsonschema)")
+RE_MUTABLE_DEFAULT_ARGS = re.compile(r"^\s*def .+\((.+=\{\}|.+=\[\])")
+
+
+def _starts_with_any(line, *prefixes):
+ for prefix in prefixes:
+ if line.startswith(prefix):
+ return True
+ return False
+
+
+def _any_in(line, *sublines):
+ for subline in sublines:
+ if subline in line:
+ return True
+ return False
+
+
+def import_db_only_in_conductor(logical_line, filename):
+ """Check that db calls are only in conductor module and in tests.
+
+ S361
+ """
+ if _any_in(filename,
+ "sahara/conductor",
+ "sahara/tests",
+ "sahara/db"):
+ return
+
+ if _starts_with_any(logical_line,
+ "from sahara import db",
+ "from sahara.db",
+ "import sahara.db"):
+ yield (0, "S361: sahara.db import only allowed in "
+ "sahara/conductor/*")
+
+
+def hacking_no_author_attr(logical_line, tokens):
+ """__author__ should not be used.
+
+ S362: __author__ = slukjanov
+ """
+ for token_type, text, start_index, _, _ in tokens:
+ if token_type == tokenize.NAME and text == "__author__":
+ yield (start_index[1],
+ "S362: __author__ should not be used")
+
+
+def check_oslo_namespace_imports(logical_line):
+ """Check to prevent old oslo namespace usage.
+
+ S363
+ """
+ if re.match(RE_OSLO_IMPORTS[0], logical_line):
+ yield(0, "S363: '%s' must be used instead of '%s'." % (
+ logical_line.replace('oslo.', 'oslo_'),
+ logical_line))
+
+ if re.match(RE_OSLO_IMPORTS[1], logical_line):
+ yield(0, "S363: '%s' must be used instead of '%s'" % (
+ 'import oslo_%s' % logical_line.split()[-1],
+ logical_line))
+
+
+def dict_constructor_with_list_copy(logical_line):
+ """Check to prevent dict constructor with a sequence of key-value pairs.
+
+ S368
+ """
+ if RE_DICT_CONSTRUCTOR_WITH_LIST_COPY.match(logical_line):
+ yield (0, 'S368: Must use a dict comprehension instead of a dict '
+ 'constructor with a sequence of key-value pairs.')
+
+
+def use_jsonutils(logical_line, filename):
+ """Check to prevent importing json in sahara code.
+
+ S375
+ """
+ if pep8.noqa(logical_line):
+ return
+ if (RE_USE_JSONUTILS_INVALID_LINE.match(logical_line) and
+ not RE_USE_JSONUTILS_VALID_LINE.match(logical_line)):
+ yield(0, "S375: Use jsonutils from oslo_serialization instead"
+ " of json")
+
+
+def no_mutable_default_args(logical_line):
+ """Check to prevent mutable default argument in sahara code.
+
+ S360
+ """
+ msg = "S360: Method's default argument shouldn't be mutable!"
+ if RE_MUTABLE_DEFAULT_ARGS.match(logical_line):
+ yield (0, msg)
+
+
+def factory(register):
+ register(import_db_only_in_conductor)
+ register(hacking_no_author_attr)
+ register(check_oslo_namespace_imports)
+ register(commit_message.OnceGitCheckCommitTitleBug)
+ register(commit_message.OnceGitCheckCommitTitleLength)
+ register(import_checks.hacking_import_groups)
+ register(import_checks.hacking_import_groups_together)
+ register(dict_constructor_with_list_copy)
+ register(logging_checks.no_translate_logs)
+ register(logging_checks.accepted_log_levels)
+ register(use_jsonutils)
+ register(no_mutable_default_args)
diff --git a/saharaclient/tests/hacking/commit_message.py b/saharaclient/tests/hacking/commit_message.py
new file mode 100644
index 0000000..173c173
--- /dev/null
+++ b/saharaclient/tests/hacking/commit_message.py
@@ -0,0 +1,95 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import os
+import re
+import subprocess # nosec
+
+from hacking import core
+
+
+class GitCheck(core.GlobalCheck):
+ """Base-class for Git related checks."""
+
+ def _get_commit_title(self):
+ # Check if we're inside a git checkout
+ try:
+ subp = subprocess.Popen( # nosec
+ ['git', 'rev-parse', '--show-toplevel'],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ gitdir = subp.communicate()[0].rstrip()
+ except OSError:
+ # "git" was not found
+ return None
+
+ if not os.path.exists(gitdir):
+ return None
+
+ # Get title of most recent commit
+ subp = subprocess.Popen( # nosec
+ ['git', 'log', '--no-merges', '--pretty=%s', '-1'],
+ stdout=subprocess.PIPE)
+ title = subp.communicate()[0]
+
+ if subp.returncode:
+ raise Exception("git log failed with code %s" % subp.returncode)
+ return title.decode('utf-8')
+
+
+class OnceGitCheckCommitTitleBug(GitCheck):
+ """Check git commit messages for bugs.
+
+ OpenStack HACKING recommends not referencing a bug or blueprint in first
+ line. It should provide an accurate description of the change
+ S364
+ """
+ name = "GitCheckCommitTitleBug"
+
+ # From https://github.com/openstack/openstack-ci-puppet
+ # /blob/master/modules/gerrit/manifests/init.pp#L74
+ # Changeid|bug|blueprint
+ GIT_REGEX = re.compile(
+ r'(I[0-9a-f]{8,40})|'
+ '([Bb]ug|[Ll][Pp])[\s\#:]*(\d+)|'
+ '([Bb]lue[Pp]rint|[Bb][Pp])[\s\#:]*([A-Za-z0-9\\-]+)')
+
+ def run_once(self):
+ title = self._get_commit_title()
+
+ # NOTE(jogo) if match regex but over 3 words, acceptable title
+ if (title and self.GIT_REGEX.search(title) is not None
+ and len(title.split()) <= 3):
+ return (1, 0,
+ "S364: git commit title ('%s') should provide an accurate "
+ "description of the change, not just a reference to a bug "
+ "or blueprint" % title.strip(), self.name)
+
+
+class OnceGitCheckCommitTitleLength(GitCheck):
+ """Check git commit message length.
+
+ HACKING recommends commit titles 50 chars or less, but enforces
+ a 72 character limit
+
+ S365 Title limited to 72 chars
+ """
+ name = "GitCheckCommitTitleLength"
+
+ def run_once(self):
+ title = self._get_commit_title()
+
+ if title and len(title) > 72:
+ return (
+ 1, 0,
+ "S365: git commit title ('%s') should be under 50 chars"
+ % title.strip(),
+ self.name)
diff --git a/saharaclient/tests/hacking/import_checks.py b/saharaclient/tests/hacking/import_checks.py
new file mode 100644
index 0000000..6de3834
--- /dev/null
+++ b/saharaclient/tests/hacking/import_checks.py
@@ -0,0 +1,450 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import imp
+
+from hacking import core
+
+
+# NOTE(Kezar): This checks a good enough if we have only py2.7 supported.
+# As soon as we'll get py3.x we need to drop it or rewrite. You can read more
+# about it in dev-list archive, topic: "[hacking]proposed rules drop for 1.0"
+def _find_module(module, path=None):
+ mod_base = module
+ parent_path = None
+ while '.' in mod_base:
+ first, _, mod_base = mod_base.partition('.')
+ parent_path = path
+ _, path, _ = imp.find_module(first, path)
+ path = [path]
+ try:
+ _, path, _ = imp.find_module(mod_base, path)
+ except ImportError:
+ # NOTE(bnemec): There are two reasons we might get here: 1) A
+ # non-module import and 2) an import of a namespace module that is
+ # in the same namespace as the current project, which caused us to
+ # recurse into the project namespace but fail to find the third-party
+ # module. For 1), we won't be able to import it as a module, so we
+ # return the parent module's path, but for 2) the import below should
+ # succeed, so we re-raise the ImportError because the module was
+ # legitimately not found in this path.
+ try:
+ __import__(module)
+ except ImportError:
+ # Non-module import, return the parent path if we have it
+ if parent_path:
+ return parent_path
+ raise
+ raise
+ return path
+
+module_cache = dict()
+
+# List of all Python 2 stdlib modules - anything not in this list will be
+# allowed in either the stdlib or third-party groups to allow for Python 3
+# stdlib additions.
+# The list was generated via the following script, which is a variation on
+# the one found here:
+# http://stackoverflow.com/questions/6463918/how-can-i-get-a-list-of-all-the-python-standard-library-modules
+"""
+from distutils import sysconfig
+import os
+import sys
+
+std_lib = sysconfig.get_python_lib(standard_lib=True)
+prefix_len = len(std_lib) + 1
+modules = ''
+line = '['
+mod_list = []
+for top, dirs, files in os.walk(std_lib):
+ for name in files:
+ if 'site-packages' not in top:
+ if name == '__init__.py':
+ full_name = top[prefix_len:].replace('/', '.')
+ mod_list.append(full_name)
+ elif name.endswith('.py'):
+ full_name = top.replace('/', '.') + '.'
+ full_name += name[:-3]
+ full_name = full_name[prefix_len:]
+ mod_list.append(full_name)
+ elif name.endswith('.so') and top.endswith('lib-dynload'):
+ full_name = name[:-3]
+ if full_name.endswith('module'):
+ full_name = full_name[:-6]
+ mod_list.append(full_name)
+for name in sys.builtin_module_names:
+ mod_list.append(name)
+mod_list.sort()
+for mod in mod_list:
+ if len(line + mod) + 8 > 79:
+ modules += '\n' + line
+ line = ' '
+ line += "'%s', " % mod
+print modules + ']'
+"""
+py2_stdlib = [
+ 'BaseHTTPServer', 'Bastion', 'CGIHTTPServer', 'ConfigParser', 'Cookie',
+ 'DocXMLRPCServer', 'HTMLParser', 'MimeWriter', 'Queue',
+ 'SimpleHTTPServer', 'SimpleXMLRPCServer', 'SocketServer', 'StringIO',
+ 'UserDict', 'UserList', 'UserString', '_LWPCookieJar',
+ '_MozillaCookieJar', '__builtin__', '__future__', '__main__',
+ '__phello__.foo', '_abcoll', '_ast', '_bisect', '_bsddb', '_codecs',
+ '_codecs_cn', '_codecs_hk', '_codecs_iso2022', '_codecs_jp',
+ '_codecs_kr', '_codecs_tw', '_collections', '_crypt', '_csv',
+ '_ctypes', '_curses', '_curses_panel', '_elementtree', '_functools',
+ '_hashlib', '_heapq', '_hotshot', '_io', '_json', '_locale',
+ '_lsprof', '_multibytecodec', '_multiprocessing', '_osx_support',
+ '_pyio', '_random', '_socket', '_sqlite3', '_sre', '_ssl',
+ '_strptime', '_struct', '_symtable', '_sysconfigdata',
+ '_threading_local', '_warnings', '_weakref', '_weakrefset', 'abc',
+ 'aifc', 'antigravity', 'anydbm', 'argparse', 'array', 'ast',
+ 'asynchat', 'asyncore', 'atexit', 'audiodev', 'audioop', 'base64',
+ 'bdb', 'binascii', 'binhex', 'bisect', 'bsddb', 'bsddb.db',
+ 'bsddb.dbobj', 'bsddb.dbrecio', 'bsddb.dbshelve', 'bsddb.dbtables',
+ 'bsddb.dbutils', 'bz2', 'cPickle', 'cProfile', 'cStringIO',
+ 'calendar', 'cgi', 'cgitb', 'chunk', 'cmath', 'cmd', 'code', 'codecs',
+ 'codeop', 'collections', 'colorsys', 'commands', 'compileall',
+ 'compiler', 'compiler.ast', 'compiler.consts', 'compiler.future',
+ 'compiler.misc', 'compiler.pyassem', 'compiler.pycodegen',
+ 'compiler.symbols', 'compiler.syntax', 'compiler.transformer',
+ 'compiler.visitor', 'contextlib', 'cookielib', 'copy', 'copy_reg',
+ 'crypt', 'csv', 'ctypes', 'ctypes._endian', 'ctypes.macholib',
+ 'ctypes.macholib.dyld', 'ctypes.macholib.dylib',
+ 'ctypes.macholib.framework', 'ctypes.util', 'ctypes.wintypes',
+ 'curses', 'curses.ascii', 'curses.has_key', 'curses.panel',
+ 'curses.textpad', 'curses.wrapper', 'datetime', 'dbhash', 'dbm',
+ 'decimal', 'difflib', 'dircache', 'dis', 'distutils',
+ 'distutils.archive_util', 'distutils.bcppcompiler',
+ 'distutils.ccompiler', 'distutils.cmd', 'distutils.command',
+ 'distutils.command.bdist', 'distutils.command.bdist_dumb',
+ 'distutils.command.bdist_msi', 'distutils.command.bdist_rpm',
+ 'distutils.command.bdist_wininst', 'distutils.command.build',
+ 'distutils.command.build_clib', 'distutils.command.build_ext',
+ 'distutils.command.build_py', 'distutils.command.build_scripts',
+ 'distutils.command.check', 'distutils.command.clean',
+ 'distutils.command.config', 'distutils.command.install',
+ 'distutils.command.install_data',
+ 'distutils.command.install_egg_info',
+ 'distutils.command.install_headers', 'distutils.command.install_lib',
+ 'distutils.command.install_scripts', 'distutils.command.register',
+ 'distutils.command.sdist', 'distutils.command.upload',
+ 'distutils.config', 'distutils.core', 'distutils.cygwinccompiler',
+ 'distutils.debug', 'distutils.dep_util', 'distutils.dir_util',
+ 'distutils.dist', 'distutils.emxccompiler', 'distutils.errors',
+ 'distutils.extension', 'distutils.fancy_getopt',
+ 'distutils.file_util', 'distutils.filelist', 'distutils.log',
+ 'distutils.msvc9compiler', 'distutils.msvccompiler',
+ 'distutils.spawn', 'distutils.sysconfig', 'distutils.text_file',
+ 'distutils.unixccompiler', 'distutils.util', 'distutils.version',
+ 'distutils.versionpredicate', 'dl', 'doctest', 'dumbdbm',
+ 'dummy_thread', 'dummy_threading', 'email', 'email._parseaddr',
+ 'email.base64mime', 'email.charset', 'email.encoders', 'email.errors',
+ 'email.feedparser', 'email.generator', 'email.header',
+ 'email.iterators', 'email.message', 'email.mime',
+ 'email.mime.application', 'email.mime.audio', 'email.mime.base',
+ 'email.mime.image', 'email.mime.message', 'email.mime.multipart',
+ 'email.mime.nonmultipart', 'email.mime.text', 'email.parser',
+ 'email.quoprimime', 'email.utils', 'encodings', 'encodings.aliases',
+ 'encodings.ascii', 'encodings.base64_codec', 'encodings.big5',
+ 'encodings.big5hkscs', 'encodings.bz2_codec', 'encodings.charmap',
+ 'encodings.cp037', 'encodings.cp1006', 'encodings.cp1026',
+ 'encodings.cp1140', 'encodings.cp1250', 'encodings.cp1251',
+ 'encodings.cp1252', 'encodings.cp1253', 'encodings.cp1254',
+ 'encodings.cp1255', 'encodings.cp1256', 'encodings.cp1257',
+ 'encodings.cp1258', 'encodings.cp424', 'encodings.cp437',
+ 'encodings.cp500', 'encodings.cp720', 'encodings.cp737',
+ 'encodings.cp775', 'encodings.cp850', 'encodings.cp852',
+ 'encodings.cp855', 'encodings.cp856', 'encodings.cp857',
+ 'encodings.cp858', 'encodings.cp860', 'encodings.cp861',
+ 'encodings.cp862', 'encodings.cp863', 'encodings.cp864',
+ 'encodings.cp865', 'encodings.cp866', 'encodings.cp869',
+ 'encodings.cp874', 'encodings.cp875', 'encodings.cp932',
+ 'encodings.cp949', 'encodings.cp950', 'encodings.euc_jis_2004',
+ 'encodings.euc_jisx0213', 'encodings.euc_jp', 'encodings.euc_kr',
+ 'encodings.gb18030', 'encodings.gb2312', 'encodings.gbk',
+ 'encodings.hex_codec', 'encodings.hp_roman8', 'encodings.hz',
+ 'encodings.idna', 'encodings.iso2022_jp', 'encodings.iso2022_jp_1',
+ 'encodings.iso2022_jp_2', 'encodings.iso2022_jp_2004',
+ 'encodings.iso2022_jp_3', 'encodings.iso2022_jp_ext',
+ 'encodings.iso2022_kr', 'encodings.iso8859_1', 'encodings.iso8859_10',
+ 'encodings.iso8859_11', 'encodings.iso8859_13',
+ 'encodings.iso8859_14', 'encodings.iso8859_15',
+ 'encodings.iso8859_16', 'encodings.iso8859_2', 'encodings.iso8859_3',
+ 'encodings.iso8859_4', 'encodings.iso8859_5', 'encodings.iso8859_6',
+ 'encodings.iso8859_7', 'encodings.iso8859_8', 'encodings.iso8859_9',
+ 'encodings.johab', 'encodings.koi8_r', 'encodings.koi8_u',
+ 'encodings.latin_1', 'encodings.mac_arabic', 'encodings.mac_centeuro',
+ 'encodings.mac_croatian', 'encodings.mac_cyrillic',
+ 'encodings.mac_farsi', 'encodings.mac_greek', 'encodings.mac_iceland',
+ 'encodings.mac_latin2', 'encodings.mac_roman',
+ 'encodings.mac_romanian', 'encodings.mac_turkish', 'encodings.mbcs',
+ 'encodings.palmos', 'encodings.ptcp154', 'encodings.punycode',
+ 'encodings.quopri_codec', 'encodings.raw_unicode_escape',
+ 'encodings.rot_13', 'encodings.shift_jis', 'encodings.shift_jis_2004',
+ 'encodings.shift_jisx0213', 'encodings.string_escape',
+ 'encodings.tis_620', 'encodings.undefined',
+ 'encodings.unicode_escape', 'encodings.unicode_internal',
+ 'encodings.utf_16', 'encodings.utf_16_be', 'encodings.utf_16_le',
+ 'encodings.utf_32', 'encodings.utf_32_be', 'encodings.utf_32_le',
+ 'encodings.utf_7', 'encodings.utf_8', 'encodings.utf_8_sig',
+ 'encodings.uu_codec', 'encodings.zlib_codec', 'errno', 'exceptions',
+ 'fcntl', 'filecmp', 'fileinput', 'fnmatch', 'formatter', 'fpformat',
+ 'fractions', 'ftplib', 'functools', 'future_builtins', 'gc', 'gdbm',
+ 'genericpath', 'getopt', 'getpass', 'gettext', 'glob', 'grp', 'gzip',
+ 'hashlib', 'heapq', 'hmac', 'hotshot', 'hotshot.log', 'hotshot.stats',
+ 'hotshot.stones', 'htmlentitydefs', 'htmllib', 'httplib', 'idlelib',
+ 'idlelib.AutoComplete', 'idlelib.AutoCompleteWindow',
+ 'idlelib.AutoExpand', 'idlelib.Bindings', 'idlelib.CallTipWindow',
+ 'idlelib.CallTips', 'idlelib.ClassBrowser', 'idlelib.CodeContext',
+ 'idlelib.ColorDelegator', 'idlelib.Debugger', 'idlelib.Delegator',
+ 'idlelib.EditorWindow', 'idlelib.FileList', 'idlelib.FormatParagraph',
+ 'idlelib.GrepDialog', 'idlelib.HyperParser', 'idlelib.IOBinding',
+ 'idlelib.IdleHistory', 'idlelib.MultiCall', 'idlelib.MultiStatusBar',
+ 'idlelib.ObjectBrowser', 'idlelib.OutputWindow', 'idlelib.ParenMatch',
+ 'idlelib.PathBrowser', 'idlelib.Percolator', 'idlelib.PyParse',
+ 'idlelib.PyShell', 'idlelib.RemoteDebugger',
+ 'idlelib.RemoteObjectBrowser', 'idlelib.ReplaceDialog',
+ 'idlelib.RstripExtension', 'idlelib.ScriptBinding',
+ 'idlelib.ScrolledList', 'idlelib.SearchDialog',
+ 'idlelib.SearchDialogBase', 'idlelib.SearchEngine',
+ 'idlelib.StackViewer', 'idlelib.ToolTip', 'idlelib.TreeWidget',
+ 'idlelib.UndoDelegator', 'idlelib.WidgetRedirector',
+ 'idlelib.WindowList', 'idlelib.ZoomHeight', 'idlelib.aboutDialog',
+ 'idlelib.configDialog', 'idlelib.configHandler',
+ 'idlelib.configHelpSourceEdit', 'idlelib.configSectionNameDialog',
+ 'idlelib.dynOptionMenuWidget', 'idlelib.idle', 'idlelib.idlever',
+ 'idlelib.keybindingDialog', 'idlelib.macosxSupport', 'idlelib.rpc',
+ 'idlelib.run', 'idlelib.tabbedpages', 'idlelib.textView', 'ihooks',
+ 'imageop', 'imaplib', 'imghdr', 'imp', 'importlib', 'imputil',
+ 'inspect', 'io', 'itertools', 'json', 'json.decoder', 'json.encoder',
+ 'json.scanner', 'json.tool', 'keyword', 'lib2to3', 'lib2to3.__main__',
+ 'lib2to3.btm_matcher', 'lib2to3.btm_utils', 'lib2to3.fixer_base',
+ 'lib2to3.fixer_util', 'lib2to3.fixes', 'lib2to3.fixes.fix_apply',
+ 'lib2to3.fixes.fix_basestring', 'lib2to3.fixes.fix_buffer',
+ 'lib2to3.fixes.fix_callable', 'lib2to3.fixes.fix_dict',
+ 'lib2to3.fixes.fix_except', 'lib2to3.fixes.fix_exec',
+ 'lib2to3.fixes.fix_execfile', 'lib2to3.fixes.fix_exitfunc',
+ 'lib2to3.fixes.fix_filter', 'lib2to3.fixes.fix_funcattrs',
+ 'lib2to3.fixes.fix_future', 'lib2to3.fixes.fix_getcwdu',
+ 'lib2to3.fixes.fix_has_key', 'lib2to3.fixes.fix_idioms',
+ 'lib2to3.fixes.fix_import', 'lib2to3.fixes.fix_imports',
+ 'lib2to3.fixes.fix_imports2', 'lib2to3.fixes.fix_input',
+ 'lib2to3.fixes.fix_intern', 'lib2to3.fixes.fix_isinstance',
+ 'lib2to3.fixes.fix_itertools', 'lib2to3.fixes.fix_itertools_imports',
+ 'lib2to3.fixes.fix_long', 'lib2to3.fixes.fix_map',
+ 'lib2to3.fixes.fix_metaclass', 'lib2to3.fixes.fix_methodattrs',
+ 'lib2to3.fixes.fix_ne', 'lib2to3.fixes.fix_next',
+ 'lib2to3.fixes.fix_nonzero', 'lib2to3.fixes.fix_numliterals',
+ 'lib2to3.fixes.fix_operator', 'lib2to3.fixes.fix_paren',
+ 'lib2to3.fixes.fix_print', 'lib2to3.fixes.fix_raise',
+ 'lib2to3.fixes.fix_raw_input', 'lib2to3.fixes.fix_reduce',
+ 'lib2to3.fixes.fix_renames', 'lib2to3.fixes.fix_repr',
+ 'lib2to3.fixes.fix_set_literal', 'lib2to3.fixes.fix_standarderror',
+ 'lib2to3.fixes.fix_sys_exc', 'lib2to3.fixes.fix_throw',
+ 'lib2to3.fixes.fix_tuple_params', 'lib2to3.fixes.fix_types',
+ 'lib2to3.fixes.fix_unicode', 'lib2to3.fixes.fix_urllib',
+ 'lib2to3.fixes.fix_ws_comma', 'lib2to3.fixes.fix_xrange',
+ 'lib2to3.fixes.fix_xreadlines', 'lib2to3.fixes.fix_zip',
+ 'lib2to3.main', 'lib2to3.patcomp', 'lib2to3.pgen2',
+ 'lib2to3.pgen2.conv', 'lib2to3.pgen2.driver', 'lib2to3.pgen2.grammar',
+ 'lib2to3.pgen2.literals', 'lib2to3.pgen2.parse', 'lib2to3.pgen2.pgen',
+ 'lib2to3.pgen2.token', 'lib2to3.pgen2.tokenize', 'lib2to3.pygram',
+ 'lib2to3.pytree', 'lib2to3.refactor', 'linecache', 'linuxaudiodev',
+ 'locale', 'logging', 'logging.config', 'logging.handlers', 'macpath',
+ 'macurl2path', 'mailbox', 'mailcap', 'markupbase', 'marshal', 'math',
+ 'md5', 'mhlib', 'mimetools', 'mimetypes', 'mimify', 'mmap',
+ 'modulefinder', 'multifile', 'multiprocessing',
+ 'multiprocessing.connection', 'multiprocessing.dummy',
+ 'multiprocessing.dummy.connection', 'multiprocessing.forking',
+ 'multiprocessing.heap', 'multiprocessing.managers',
+ 'multiprocessing.pool', 'multiprocessing.process',
+ 'multiprocessing.queues', 'multiprocessing.reduction',
+ 'multiprocessing.sharedctypes', 'multiprocessing.synchronize',
+ 'multiprocessing.util', 'mutex', 'netrc', 'new', 'nis', 'nntplib',
+ 'ntpath', 'nturl2path', 'numbers', 'opcode', 'operator', 'optparse',
+ 'os', 'os2emxpath', 'ossaudiodev', 'parser', 'pdb', 'pickle',
+ 'pickletools', 'pipes', 'pkgutil', 'plat-linux2.CDROM',
+ 'plat-linux2.DLFCN', 'plat-linux2.IN', 'plat-linux2.TYPES',
+ 'platform', 'plistlib', 'popen2', 'poplib', 'posix', 'posixfile',
+ 'posixpath', 'pprint', 'profile', 'pstats', 'pty', 'pwd',
+ 'py_compile', 'pyclbr', 'pydoc', 'pydoc_data', 'pydoc_data.topics',
+ 'pyexpat', 'quopri', 'random', 're', 'readline', 'repr', 'resource',
+ 'rexec', 'rfc822', 'rlcompleter', 'robotparser', 'runpy', 'sched',
+ 'select', 'sets', 'sgmllib', 'sha', 'shelve', 'shlex', 'shutil',
+ 'signal', 'site', 'smtpd', 'smtplib', 'sndhdr', 'socket', 'spwd',
+ 'sqlite3', 'sqlite3.dbapi2', 'sqlite3.dump', 'sre', 'sre_compile',
+ 'sre_constants', 'sre_parse', 'ssl', 'stat', 'statvfs', 'string',
+ 'stringold', 'stringprep', 'strop', 'struct', 'subprocess', 'sunau',
+ 'sunaudio', 'symbol', 'symtable', 'sys', 'sysconfig', 'syslog',
+ 'tabnanny', 'tarfile', 'telnetlib', 'tempfile', 'termios', 'test',
+ 'test.test_support', 'textwrap', 'this', 'thread', 'threading',
+ 'time', 'timeit', 'timing', 'toaiff', 'token', 'tokenize', 'trace',
+ 'traceback', 'tty', 'types', 'unicodedata', 'unittest',
+ 'unittest.__main__', 'unittest.case', 'unittest.loader',
+ 'unittest.main', 'unittest.result', 'unittest.runner',
+ 'unittest.signals', 'unittest.suite', 'unittest.test',
+ 'unittest.test.dummy', 'unittest.test.support',
+ 'unittest.test.test_assertions', 'unittest.test.test_break',
+ 'unittest.test.test_case', 'unittest.test.test_discovery',
+ 'unittest.test.test_functiontestcase', 'unittest.test.test_loader',
+ 'unittest.test.test_program', 'unittest.test.test_result',
+ 'unittest.test.test_runner', 'unittest.test.test_setups',
+ 'unittest.test.test_skipping', 'unittest.test.test_suite',
+ 'unittest.util', 'urllib', 'urllib2', 'urlparse', 'user', 'uu',
+ 'uuid', 'warnings', 'wave', 'weakref', 'webbrowser', 'whichdb',
+ 'wsgiref', 'wsgiref.handlers', 'wsgiref.headers',
+ 'wsgiref.simple_server', 'wsgiref.util', 'wsgiref.validate', 'xdrlib',
+ 'xml', 'xml.dom', 'xml.dom.NodeFilter', 'xml.dom.domreg',
+ 'xml.dom.expatbuilder', 'xml.dom.minicompat', 'xml.dom.minidom',
+ 'xml.dom.pulldom', 'xml.dom.xmlbuilder', 'xml.etree',
+ 'xml.etree.ElementInclude', 'xml.etree.ElementPath',
+ 'xml.etree.ElementTree', 'xml.etree.cElementTree', 'xml.parsers',
+ 'xml.parsers.expat', 'xml.sax', 'xml.sax._exceptions',
+ 'xml.sax.expatreader', 'xml.sax.handler', 'xml.sax.saxutils',
+ 'xml.sax.xmlreader', 'xmllib', 'xmlrpclib', 'xxsubtype', 'zipfile', ]
+# Dynamic modules that can't be auto-discovered by the script above
+manual_stdlib = ['os.path', ]
+py2_stdlib.extend(manual_stdlib)
+
+
+def _get_import_type(module):
+ if module in module_cache:
+ return module_cache[module]
+
+ def cache_type(module_type):
+ module_cache[module] = module_type
+ return module_type
+
+ # Check static stdlib list
+ if module in py2_stdlib:
+ return cache_type('stdlib')
+
+ # Check if the module is local
+ try:
+ _find_module(module, ['.'])
+ # If the previous line succeeded then it must be a project module
+ return cache_type('project')
+ except ImportError:
+ pass
+
+ # Otherwise treat it as third-party - this means we may treat some stdlib
+ # modules as third-party, but that's okay because we are allowing
+ # third-party libs in the stdlib section.
+ return cache_type('third-party')
+
+
+@core.flake8ext
+def hacking_import_groups(logical_line, blank_before, previous_logical,
+ indent_level, previous_indent_level, physical_line,
+ noqa):
+ r"""Check that imports are grouped correctly.
+
+ OpenStack HACKING guide recommendation for imports:
+ imports grouped such that Python standard library imports are together,
+ third party library imports are together, and project imports are
+ together
+
+ Okay: import os\nimport sys\n\nimport six\n\nimport hacking
+ Okay: import six\nimport znon_existent_package
+ Okay: import os\nimport threading
+ S366: import mock\nimport os
+ S366: import hacking\nimport os
+ S366: import hacking\nimport nonexistent
+ S366: import hacking\nimport mock
+ """
+ if (noqa or blank_before > 0 or
+ indent_level != previous_indent_level):
+ return
+
+ normalized_line = core.import_normalize(logical_line.strip()).split()
+ normalized_previous = core.import_normalize(previous_logical.
+ strip()).split()
+
+ def compatible(previous, current):
+ if previous == current:
+ return True
+
+ if normalized_line and normalized_line[0] == 'import':
+ current_type = _get_import_type(normalized_line[1])
+ if normalized_previous and normalized_previous[0] == 'import':
+ previous_type = _get_import_type(normalized_previous[1])
+ if not compatible(previous_type, current_type):
+ yield(0, 'S366: imports not grouped correctly '
+ '(%s: %s, %s: %s)' %
+ (normalized_previous[1], previous_type,
+ normalized_line[1], current_type))
+
+
+class ImportGroupData(object):
+ """A class to hold persistent state data for import group checks.
+
+ To verify import grouping, it is necessary to know the current group
+ for the current file. This can not always be known solely from the
+ current and previous line, so this class can be used to keep track.
+ """
+
+ # NOTE(bnemec): *args is needed because the test code tries to run this
+ # as a flake8 check and passes an argument to it.
+ def __init__(self, *args):
+ self.current_group = None
+ self.current_filename = None
+ self.current_import = None
+
+
+together_data = ImportGroupData()
+
+
+@core.flake8ext
+def hacking_import_groups_together(logical_line, blank_lines, indent_level,
+ previous_indent_level, line_number,
+ physical_line, filename, noqa):
+ r"""Check that like imports are grouped together.
+
+ OpenStack HACKING guide recommendation for imports:
+ Imports should be grouped together by type.
+
+ Okay: import os\nimport sys
+ Okay: try:\n import foo\nexcept ImportError:\n pass\n\nimport six
+ Okay: import abc\nimport mock\n\nimport six
+ Okay: import eventlet\neventlet.monkey_patch()\n\nimport copy
+ S367: import mock\n\nimport six
+ S367: import os\n\nimport sys
+ S367: import mock\nimport os\n\nimport sys
+ """
+ if line_number == 1 or filename != together_data.current_filename:
+ together_data.current_group = None
+ together_data.current_filename = filename
+
+ if noqa:
+ return
+
+ def update_current_group(current):
+ together_data.current_group = current
+
+ normalized_line = core.import_normalize(logical_line.strip()).split()
+ if normalized_line:
+ if normalized_line[0] == 'import':
+ current_type = _get_import_type(normalized_line[1])
+ previous_import = together_data.current_import
+ together_data.current_import = normalized_line[1]
+ matched = current_type == together_data.current_group
+ update_current_group(current_type)
+ if (matched and indent_level == previous_indent_level and
+ blank_lines >= 1):
+ yield(0, 'S367: like imports should be grouped together (%s '
+ 'and %s from %s are separated by whitespace)' %
+ (previous_import,
+ together_data.current_import,
+ current_type))
+ else:
+ # Reset on non-import code
+ together_data.current_group = None
diff --git a/saharaclient/tests/hacking/logging_checks.py b/saharaclient/tests/hacking/logging_checks.py
new file mode 100644
index 0000000..24a678b
--- /dev/null
+++ b/saharaclient/tests/hacking/logging_checks.py
@@ -0,0 +1,64 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import re
+
+
+ALL_LOG_LEVELS = "info|exception|warning|critical|error|debug"
+
+RE_ACCEPTED_LOG_LEVELS = re.compile(
+ r"(.)*LOG\.(%(levels)s)\(" % {'levels': ALL_LOG_LEVELS})
+
+# Since _Lx() have been removed, we just need to check _()
+RE_TRANSLATED_LOG = re.compile(
+ r"(.)*LOG\.(%(levels)s)\(\s*_\(" % {'levels': ALL_LOG_LEVELS})
+
+
+def no_translate_logs(logical_line, filename):
+ """Check for 'LOG.*(_('
+
+ Translators don't provide translations for log messages, and operators
+ asked not to translate them.
+
+ * This check assumes that 'LOG' is a logger.
+ * Use filename so we can start enforcing this in specific folders instead
+ of needing to do so all at once.
+ S373
+ """
+
+ msg = "S373 Don't translate logs"
+ if RE_TRANSLATED_LOG.match(logical_line):
+ yield (0, msg)
+
+
+def accepted_log_levels(logical_line, filename):
+ """In Sahara we use only 5 log levels.
+
+ This check is needed because we don't want new contributors to
+ use deprecated log levels.
+ S374
+ """
+
+ # NOTE(Kezar): sahara/tests included because we don't require translations
+ # in tests. sahara/db/templates provide separate cli interface so we don't
+ # want to translate it.
+
+ ignore_dirs = ["sahara/db/templates",
+ "sahara/tests"]
+ for directory in ignore_dirs:
+ if directory in filename:
+ return
+ msg = ("S374 You used deprecated log level. Accepted log levels are "
+ "%(levels)s" % {'levels': ALL_LOG_LEVELS})
+ if logical_line.startswith("LOG."):
+ if not RE_ACCEPTED_LOG_LEVELS.search(logical_line):
+ yield(0, msg)
diff --git a/saharaclient/tests/unit/base.py b/saharaclient/tests/unit/base.py
index c2cd255..6d95f6a 100644
--- a/saharaclient/tests/unit/base.py
+++ b/saharaclient/tests/unit/base.py
@@ -35,6 +35,11 @@ class BaseTestCase(testtools.TestCase):
for key, value in body.items():
self.assertEqual(value, getattr(obj, key))
+ def assertDictsEqual(self, dict1, dict2):
+ self.assertEqual(len(dict1), len(dict2))
+ for key in dict1:
+ self.assertEqual(dict1[key], dict2[key])
+
class TestResource(base.Resource):
resource_name = 'Test Resource'
diff --git a/saharaclient/tests/unit/osc/v1/test_clusters.py b/saharaclient/tests/unit/osc/v1/test_clusters.py
index 9564e3f..25197dc 100644
--- a/saharaclient/tests/unit/osc/v1/test_clusters.py
+++ b/saharaclient/tests/unit/osc/v1/test_clusters.py
@@ -14,7 +14,6 @@
# limitations under the License.
import mock
-
from osc_lib.tests import utils as osc_utils
from saharaclient.api import cluster_templates as api_ct
diff --git a/saharaclient/tests/unit/osc/v1/test_data_sources.py b/saharaclient/tests/unit/osc/v1/test_data_sources.py
index cae2e3c..bc0473f 100644
--- a/saharaclient/tests/unit/osc/v1/test_data_sources.py
+++ b/saharaclient/tests/unit/osc/v1/test_data_sources.py
@@ -14,7 +14,6 @@
# limitations under the License.
import mock
-
from osc_lib.tests import utils as osc_utils
from saharaclient.api import data_sources as api_ds
diff --git a/saharaclient/tests/unit/osc/v1/test_images.py b/saharaclient/tests/unit/osc/v1/test_images.py
index 9c59bdf..d116c04 100644
--- a/saharaclient/tests/unit/osc/v1/test_images.py
+++ b/saharaclient/tests/unit/osc/v1/test_images.py
@@ -14,7 +14,6 @@
# limitations under the License.
import mock
-
from osc_lib.tests import utils as osc_utils
from saharaclient.api import images as api_images
diff --git a/saharaclient/tests/unit/osc/v1/test_job_binaries.py b/saharaclient/tests/unit/osc/v1/test_job_binaries.py
index afb83de..d01a03e 100644
--- a/saharaclient/tests/unit/osc/v1/test_job_binaries.py
+++ b/saharaclient/tests/unit/osc/v1/test_job_binaries.py
@@ -17,9 +17,9 @@ import mock
from saharaclient.api import job_binaries as api_jb
from saharaclient.osc.v1 import job_binaries as osc_jb
-
from saharaclient.tests.unit.osc.v1 import fakes
+
JOB_BINARY_INFO = {
"name": 'job-binary',
"description": 'descr',
diff --git a/saharaclient/tests/unit/osc/v1/test_job_templates.py b/saharaclient/tests/unit/osc/v1/test_job_templates.py
index 10b65b3..6f88260 100644
--- a/saharaclient/tests/unit/osc/v1/test_job_templates.py
+++ b/saharaclient/tests/unit/osc/v1/test_job_templates.py
@@ -14,7 +14,6 @@
# limitations under the License.
import mock
-
from osc_lib.tests import utils as osc_utils
from saharaclient.api import jobs as api_j
diff --git a/saharaclient/tests/unit/osc/v1/test_job_types.py b/saharaclient/tests/unit/osc/v1/test_job_types.py
index d087b91..8a1a471 100644
--- a/saharaclient/tests/unit/osc/v1/test_job_types.py
+++ b/saharaclient/tests/unit/osc/v1/test_job_types.py
@@ -18,7 +18,6 @@ import mock
from saharaclient.api import job_types as api_jt
from saharaclient.api import jobs as api_j
from saharaclient.osc.v1 import job_types as osc_jt
-
from saharaclient.tests.unit.osc.v1 import fakes
JOB_TYPE_INFO = {
diff --git a/saharaclient/tests/unit/osc/v1/test_jobs.py b/saharaclient/tests/unit/osc/v1/test_jobs.py
index 6beaa15..82ce3d8 100644
--- a/saharaclient/tests/unit/osc/v1/test_jobs.py
+++ b/saharaclient/tests/unit/osc/v1/test_jobs.py
@@ -14,7 +14,6 @@
# limitations under the License.
import mock
-
from osc_lib.tests import utils as osc_utils
from saharaclient.api import job_executions as api_je
diff --git a/saharaclient/tests/unit/osc/v1/test_node_group_templates.py b/saharaclient/tests/unit/osc/v1/test_node_group_templates.py
index 8e23f42..b23a4e4 100644
--- a/saharaclient/tests/unit/osc/v1/test_node_group_templates.py
+++ b/saharaclient/tests/unit/osc/v1/test_node_group_templates.py
@@ -14,7 +14,6 @@
# limitations under the License.
import mock
-
from osc_lib.tests import utils as osc_utils
from saharaclient.api import node_group_templates as api_ngt
@@ -99,7 +98,7 @@ class TestCreateNodeGroupTemplate(TestNodeGroupTemplates):
volume_local_to_instance=False,
volume_type=None, volumes_availability_zone=None,
volumes_per_node=None, volumes_size=None, shares=None,
- node_configs=None)
+ node_configs=None, volume_mount_prefix=None)
def test_ngt_create_all_options(self):
arglist = ['--name', 'template', '--plugin', 'fake',
@@ -145,7 +144,8 @@ class TestCreateNodeGroupTemplate(TestNodeGroupTemplates):
plugin_name='fake', security_groups=['secgr'], use_autoconfig=True,
volume_local_to_instance=True, volume_type='type',
volumes_availability_zone='vavzone', volumes_per_node=2,
- volumes_size=2, shares=None, node_configs=None)
+ volumes_size=2, shares=None, node_configs=None,
+ volume_mount_prefix='/volume/asd')
# Check that columns are correct
expected_columns = (
@@ -371,7 +371,7 @@ class TestUpdateNodeGroupTemplate(TestNodeGroupTemplates):
plugin_name='fake', security_groups=['secgr'], use_autoconfig=True,
volume_local_to_instance=True, volume_type='type',
volumes_availability_zone='vavzone', volumes_per_node=2,
- volumes_size=2)
+ volumes_size=2, volume_mount_prefix='/volume/asd')
# Check that columns are correct
expected_columns = (
diff --git a/saharaclient/tests/unit/osc/v1/test_plugins.py b/saharaclient/tests/unit/osc/v1/test_plugins.py
index 0a07ff2..2c7c3f6 100644
--- a/saharaclient/tests/unit/osc/v1/test_plugins.py
+++ b/saharaclient/tests/unit/osc/v1/test_plugins.py
@@ -13,8 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import json
import mock
+from oslo_serialization import jsonutils as json
from saharaclient.api import plugins as api_plugins
from saharaclient.osc.v1 import plugins as osc_plugins
diff --git a/saharaclient/tests/unit/test_cluster_templates.py b/saharaclient/tests/unit/test_cluster_templates.py
index d674b6d..67b8b9c 100644
--- a/saharaclient/tests/unit/test_cluster_templates.py
+++ b/saharaclient/tests/unit/test_cluster_templates.py
@@ -15,7 +15,7 @@
from saharaclient.api import cluster_templates as ct
from saharaclient.tests.unit import base
-import json
+from oslo_serialization import jsonutils as json
class ClusterTemplateTest(base.BaseTestCase):
diff --git a/saharaclient/tests/unit/test_clusters.py b/saharaclient/tests/unit/test_clusters.py
index 70b240d..0c3f4c6 100644
--- a/saharaclient/tests/unit/test_clusters.py
+++ b/saharaclient/tests/unit/test_clusters.py
@@ -15,7 +15,7 @@
from saharaclient.api import clusters as cl
from saharaclient.tests.unit import base
-import json
+from oslo_serialization import jsonutils as json
class ClusterTest(base.BaseTestCase):
diff --git a/saharaclient/tests/unit/test_data_sources.py b/saharaclient/tests/unit/test_data_sources.py
index 217f1af..13476d1 100644
--- a/saharaclient/tests/unit/test_data_sources.py
+++ b/saharaclient/tests/unit/test_data_sources.py
@@ -15,7 +15,7 @@
from saharaclient.api import data_sources as ds
from saharaclient.tests.unit import base
-import json
+from oslo_serialization import jsonutils as json
class DataSourceTest(base.BaseTestCase):
diff --git a/saharaclient/tests/unit/test_hacking.py b/saharaclient/tests/unit/test_hacking.py
new file mode 100644
index 0000000..53390a5
--- /dev/null
+++ b/saharaclient/tests/unit/test_hacking.py
@@ -0,0 +1,71 @@
+# Copyright 2015 EasyStack Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import testtools
+
+from saharaclient.tests.hacking import checks
+
+
+class HackingTestCase(testtools.TestCase):
+ def test_dict_constructor_with_list_copy(self):
+ # Following checks for code-lines with pep8 error
+ self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
+ " dict([(i, connect_info[i])"))))
+
+ self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
+ " attrs = dict([(k, _from_json(v))"))))
+
+ self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
+ " type_names = dict((value, key) for key, value in"))))
+
+ self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
+ " dict((value, key) for key, value in"))))
+
+ self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
+ "foo(param=dict((k, v) for k, v in bar.items()))"))))
+
+ self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
+ " dict([[i,i] for i in range(3)])"))))
+
+ self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
+ " dd = dict([i,i] for i in range(3))"))))
+ # Following checks for ok code-lines
+ self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
+ " dict()"))))
+
+ self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
+ " create_kwargs = dict(snapshot=snapshot,"))))
+
+ self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
+ " self._render_dict(xml, data_el, data.__dict__)"))))
+
+ def test_use_jsonutils(self):
+ self.assertEqual(0, len(list(checks.use_jsonutils(
+ "import json # noqa", "path"))))
+ self.assertEqual(0, len(list(checks.use_jsonutils(
+ "from oslo_serialization import jsonutils as json", "path"))))
+ self.assertEqual(0, len(list(checks.use_jsonutils(
+ "import jsonschema", "path"))))
+ self.assertEqual(1, len(list(checks.use_jsonutils(
+ "import json", "path"))))
+ self.assertEqual(1, len(list(checks.use_jsonutils(
+ "import json as jsonutils", "path"))))
+
+ def test_no_mutable_default_args(self):
+ self.assertEqual(0, len(list(checks.no_mutable_default_args(
+ "def foo (bar):"))))
+ self.assertEqual(1, len(list(checks.no_mutable_default_args(
+ "def foo (bar=[]):"))))
+ self.assertEqual(1, len(list(checks.no_mutable_default_args(
+ "def foo (bar={}):"))))
diff --git a/saharaclient/tests/unit/test_images.py b/saharaclient/tests/unit/test_images.py
index 51732c5..de13aa4 100644
--- a/saharaclient/tests/unit/test_images.py
+++ b/saharaclient/tests/unit/test_images.py
@@ -15,7 +15,7 @@
from saharaclient.api import images
from saharaclient.tests.unit import base
-import json
+from oslo_serialization import jsonutils as json
class ImageTest(base.BaseTestCase):
diff --git a/saharaclient/tests/unit/test_job_binaries.py b/saharaclient/tests/unit/test_job_binaries.py
index 4d3b988..546c4c3 100644
--- a/saharaclient/tests/unit/test_job_binaries.py
+++ b/saharaclient/tests/unit/test_job_binaries.py
@@ -15,7 +15,7 @@
from saharaclient.api import job_binaries as jb
from saharaclient.tests.unit import base
-import json
+from oslo_serialization import jsonutils as json
class JobBinaryTest(base.BaseTestCase):
diff --git a/saharaclient/tests/unit/test_job_binary_internals.py b/saharaclient/tests/unit/test_job_binary_internals.py
index 806b886..2d184e2 100644
--- a/saharaclient/tests/unit/test_job_binary_internals.py
+++ b/saharaclient/tests/unit/test_job_binary_internals.py
@@ -12,7 +12,7 @@
# License for the specific language governing permissions and limitations
# under the License.
-import json
+from oslo_serialization import jsonutils as json
from saharaclient.api import job_binary_internals as jbi
from saharaclient.tests.unit import base
diff --git a/saharaclient/tests/unit/test_job_executions.py b/saharaclient/tests/unit/test_job_executions.py
index 2b7a4c1..1806e35 100644
--- a/saharaclient/tests/unit/test_job_executions.py
+++ b/saharaclient/tests/unit/test_job_executions.py
@@ -15,7 +15,7 @@
from saharaclient.api import job_executions as je
from saharaclient.tests.unit import base
-import json
+from oslo_serialization import jsonutils as json
class JobExecutionTest(base.BaseTestCase):
diff --git a/saharaclient/tests/unit/test_jobs.py b/saharaclient/tests/unit/test_jobs.py
index b5e2ec2..b5ff2bd 100644
--- a/saharaclient/tests/unit/test_jobs.py
+++ b/saharaclient/tests/unit/test_jobs.py
@@ -15,7 +15,7 @@
from saharaclient.api import jobs
from saharaclient.tests.unit import base
-import json
+from oslo_serialization import jsonutils as json
class JobTest(base.BaseTestCase):
diff --git a/saharaclient/tests/unit/test_node_group_templates.py b/saharaclient/tests/unit/test_node_group_templates.py
index 99ef5a0..f37416a 100644
--- a/saharaclient/tests/unit/test_node_group_templates.py
+++ b/saharaclient/tests/unit/test_node_group_templates.py
@@ -15,7 +15,7 @@
from saharaclient.api import node_group_templates as ng
from saharaclient.tests.unit import base
-import json
+from oslo_serialization import jsonutils as json
class NodeGroupTemplateTest(base.BaseTestCase):
@@ -145,3 +145,12 @@ class NodeGroupTemplateTest(base.BaseTestCase):
self.assertEqual(update_url, self.responses.last_request.url)
self.assertEqual(unset_json,
json.loads(self.responses.last_request.body))
+
+ def test_node_group_template_export(self):
+ url = self.URL + '/node-group-templates/id/export'
+ self.responses.get(url, json={'node_group_template': self.body})
+ resp = self.client.node_group_templates.export('id')
+
+ self.assertEqual(url, self.responses.last_request.url)
+ self.assertIsInstance(resp, ng.NodeGroupTemplate)
+ self.assertDictsEqual(self.body, resp.__dict__[u'node_group_template'])