summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--lower-constraints.txt6
-rw-r--r--releasenotes/notes/job-job-template-apiv2-change-93ffbf2b1360cddc.yaml4
-rw-r--r--releasenotes/notes/rework-auth-c3e13a68a935671e.yaml6
-rw-r--r--requirements.txt2
-rw-r--r--saharaclient/api/client.py84
-rw-r--r--saharaclient/api/data_sources.py4
-rw-r--r--saharaclient/api/node_group_templates.py16
-rw-r--r--saharaclient/api/v2/job_templates.py4
-rw-r--r--saharaclient/api/v2/jobs.py6
-rw-r--r--saharaclient/osc/plugin.py5
-rw-r--r--saharaclient/osc/utils.py373
-rw-r--r--saharaclient/osc/v1/cluster_templates.py2
-rw-r--r--saharaclient/osc/v1/clusters.py2
-rw-r--r--saharaclient/osc/v1/data_sources.py2
-rw-r--r--saharaclient/osc/v1/images.py2
-rw-r--r--saharaclient/osc/v1/job_binaries.py2
-rw-r--r--saharaclient/osc/v1/job_templates.py2
-rw-r--r--saharaclient/osc/v1/job_types.py2
-rw-r--r--saharaclient/osc/v1/jobs.py2
-rw-r--r--saharaclient/osc/v1/node_group_templates.py223
-rw-r--r--saharaclient/osc/v1/plugins.py2
-rw-r--r--saharaclient/osc/v1/utils.py101
-rw-r--r--saharaclient/osc/v2/__init__.py0
-rw-r--r--saharaclient/osc/v2/node_group_templates.py176
-rw-r--r--saharaclient/tests/unit/base.py8
-rw-r--r--saharaclient/tests/unit/osc/test_plugin.py20
-rw-r--r--saharaclient/tests/unit/osc/v1/test_node_group_templates.py1
-rw-r--r--saharaclient/tests/unit/osc/v1/test_utils.py2
-rw-r--r--saharaclient/tests/unit/osc/v2/__init__.py0
-rw-r--r--saharaclient/tests/unit/osc/v2/test_node_group_templates.py412
-rw-r--r--saharaclient/tests/unit/test_data_sources.py25
-rw-r--r--saharaclient/tests/unit/test_node_group_templates.py108
-rw-r--r--setup.cfg9
-rw-r--r--test-requirements.txt3
-rw-r--r--tox.ini14
35 files changed, 1203 insertions, 427 deletions
diff --git a/lower-constraints.txt b/lower-constraints.txt
index eb1c9e8..ce64b90 100644
--- a/lower-constraints.txt
+++ b/lower-constraints.txt
@@ -34,8 +34,7 @@ netifaces==0.10.4
openstacksdk==0.11.2
os-client-config==1.28.0
os-service-types==1.2.0
-os-testr==1.0.0
-osc-lib==1.8.0
+osc-lib==1.11.0
oslo.config==5.2.0
oslo.context==2.19.2
oslo.i18n==3.15.3
@@ -64,14 +63,13 @@ python-subunit==1.0.0
pytz==2013.6
PyYAML==3.12
requests==2.14.2
-requests-mock==1.1.0
+requests-mock==1.2.0
requestsexceptions==1.2.0
rfc3986==0.3.1
simplejson==3.5.1
six==1.10.0
stestr==1.0.0
stevedore==1.20.0
-testrepository==0.0.18
testtools==2.2.0
traceback2==1.4.0
unittest2==1.1.0
diff --git a/releasenotes/notes/job-job-template-apiv2-change-93ffbf2b1360cddc.yaml b/releasenotes/notes/job-job-template-apiv2-change-93ffbf2b1360cddc.yaml
new file mode 100644
index 0000000..76d89ca
--- /dev/null
+++ b/releasenotes/notes/job-job-template-apiv2-change-93ffbf2b1360cddc.yaml
@@ -0,0 +1,4 @@
+other:
+ - When using APIv2, the viewing (GET) of specific job templates and jobs and
+ the creation (POST) of job templates and jobs now only supports the API
+ behavior of Sahara 9.0.0.0b3 or later.
diff --git a/releasenotes/notes/rework-auth-c3e13a68a935671e.yaml b/releasenotes/notes/rework-auth-c3e13a68a935671e.yaml
new file mode 100644
index 0000000..a85282e
--- /dev/null
+++ b/releasenotes/notes/rework-auth-c3e13a68a935671e.yaml
@@ -0,0 +1,6 @@
+---
+upgrade:
+ - |
+ The Sahara client library now only supports authentication with a Keystone
+ session object. Consequently the arguments which `saharaclient.api.Client`
+ accepts, and the order of those arguments, have changed.
diff --git a/requirements.txt b/requirements.txt
index 3701928..087b802 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -6,7 +6,7 @@ pbr!=2.1.0,>=2.0.0 # Apache-2.0
Babel!=2.4.0,>=2.3.4 # BSD
keystoneauth1>=3.4.0 # Apache-2.0
-osc-lib>=1.8.0 # Apache-2.0
+osc-lib>=1.11.0 # Apache-2.0
oslo.log>=3.36.0 # Apache-2.0
oslo.serialization!=2.19.1,>=2.18.0 # Apache-2.0
oslo.i18n>=3.15.3 # Apache-2.0
diff --git a/saharaclient/api/client.py b/saharaclient/api/client.py
index adfaeed..697bc8d 100644
--- a/saharaclient/api/client.py
+++ b/saharaclient/api/client.py
@@ -13,13 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import warnings
-
from keystoneauth1 import adapter
-from keystoneauth1.identity import v2
-from keystoneauth1.identity import v3
-from keystoneauth1 import session as keystone_session
-from keystoneauth1 import token_endpoint
from saharaclient.api import cluster_templates
from saharaclient.api import clusters
@@ -52,59 +46,21 @@ class Client(object):
_api_version = '1.1'
"""Client for the OpenStack Data Processing API.
-
- :param str username: Username for Keystone authentication.
- :param str api_key: Password for Keystone authentication.
- :param str project_id: Keystone Tenant id.
- :param str project_name: Keystone Tenant name.
- :param str auth_url: Keystone URL that will be used for authentication.
- :param str sahara_url: Sahara REST API URL to communicate with.
- :param str endpoint_type: Desired Sahara endpoint type.
- :param str service_type: Sahara service name in Keystone catalog.
- :param str input_auth_token: Keystone authorization token.
- :param session: Keystone Session object.
- :param auth: Keystone Authentication Plugin object.
- :param boolean insecure: Allow insecure.
- :param string cacert: Path to the Privacy Enhanced Mail (PEM) file
- which contains certificates needed to establish
- SSL connection with the identity service.
+ :param session: Keystone session object. Required.
+ :param string sahara_url: Endpoint override.
+ :param string endpoint_type: Desired Sahara endpoint type.
+ :param string service_type: Sahara service name in Keystone catalog.
:param string region_name: Name of a region to select when choosing an
endpoint from the service catalog.
"""
- def __init__(self, username=None, api_key=None, project_id=None,
- project_name=None, auth_url=None, sahara_url=None,
+ def __init__(self, session=None, sahara_url=None,
endpoint_type='publicURL', service_type='data-processing',
- input_auth_token=None, session=None, auth=None,
- insecure=False, cacert=None, region_name=None, **kwargs):
+ region_name=None, **kwargs):
if not session:
- warnings.simplefilter('once', category=DeprecationWarning)
- warnings.warn('Passing authentication parameters to saharaclient '
- 'is deprecated. Please construct and pass an '
- 'authenticated session object directly.',
- DeprecationWarning)
- warnings.resetwarnings()
-
- if input_auth_token:
- auth = token_endpoint.Token(sahara_url, input_auth_token)
-
- else:
- auth = self._get_keystone_auth(auth_url=auth_url,
- username=username,
- api_key=api_key,
- project_id=project_id,
- project_name=project_name)
-
- verify = True
- if insecure:
- verify = False
- elif cacert:
- verify = cacert
-
- session = keystone_session.Session(verify=verify)
-
- if not auth:
- auth = session.auth
+ raise RuntimeError("Must provide session")
+
+ auth = session.auth
kwargs['user_agent'] = USER_AGENT
kwargs.setdefault('interface', endpoint_type)
@@ -138,28 +94,6 @@ class Client(object):
)
self.job_types = job_types.JobTypesManager(client)
- def _get_keystone_auth(self, username=None, api_key=None, auth_url=None,
- project_id=None, project_name=None):
- if not auth_url:
- raise RuntimeError("No auth url specified")
-
- if 'v2.0' in auth_url:
- return v2.Password(auth_url=auth_url,
- username=username,
- password=api_key,
- tenant_id=project_id,
- tenant_name=project_name)
- else:
- # NOTE(jamielennox): Setting these to default is what
- # keystoneclient does in the event they are not passed.
- return v3.Password(auth_url=auth_url,
- username=username,
- password=api_key,
- user_domain_id='default',
- project_id=project_id,
- project_name=project_name,
- project_domain_id='default')
-
class ClientV2(Client):
diff --git a/saharaclient/api/data_sources.py b/saharaclient/api/data_sources.py
index 93a1efb..ccfd39b 100644
--- a/saharaclient/api/data_sources.py
+++ b/saharaclient/api/data_sources.py
@@ -76,7 +76,9 @@ class DataSourceManagerV1(base.ResourceManager):
* url
* is_public
* is_protected
- * credentials - dict with `user` and `password` keyword arguments
+ * credentials - dict with the keys `user` and `password` for data
+ source in Swift, or with the keys `accesskey`, `secretkey`,
+ `endpoint`, `ssl`, and `bucket_in_path` for data source in S3
"""
if self.version >= 2:
diff --git a/saharaclient/api/node_group_templates.py b/saharaclient/api/node_group_templates.py
index db45798..9486e8b 100644
--- a/saharaclient/api/node_group_templates.py
+++ b/saharaclient/api/node_group_templates.py
@@ -57,7 +57,8 @@ class NodeGroupTemplateManagerV1(base.ResourceManager):
auto_security_group, availability_zone,
volumes_availability_zone, volume_type, image_id,
is_proxy_gateway, volume_local_to_instance, use_autoconfig,
- shares, is_public, is_protected, volume_mount_prefix):
+ shares, is_public, is_protected, volume_mount_prefix,
+ boot_from_volume=None):
self._copy_if_defined(data,
description=description,
@@ -71,7 +72,8 @@ class NodeGroupTemplateManagerV1(base.ResourceManager):
use_autoconfig=use_autoconfig,
shares=shares,
is_public=is_public,
- is_protected=is_protected
+ is_protected=is_protected,
+ boot_from_volume=boot_from_volume
)
if volumes_per_node:
@@ -160,7 +162,7 @@ class NodeGroupTemplateManagerV2(NodeGroupTemplateManagerV1):
volume_type=None, image_id=None, is_proxy_gateway=None,
volume_local_to_instance=None, use_autoconfig=None,
shares=None, is_public=None, is_protected=None,
- volume_mount_prefix=None):
+ volume_mount_prefix=None, boot_from_volume=None):
"""Create a Node Group Template."""
data = {
@@ -178,7 +180,7 @@ class NodeGroupTemplateManagerV2(NodeGroupTemplateManagerV1):
volume_type, image_id, is_proxy_gateway,
volume_local_to_instance, use_autoconfig,
shares, is_public, is_protected,
- volume_mount_prefix)
+ volume_mount_prefix, boot_from_volume)
def update(self, ng_template_id, name=NotUpdated, plugin_name=NotUpdated,
plugin_version=NotUpdated, flavor_id=NotUpdated,
@@ -191,7 +193,8 @@ class NodeGroupTemplateManagerV2(NodeGroupTemplateManagerV1):
image_id=NotUpdated, is_proxy_gateway=NotUpdated,
volume_local_to_instance=NotUpdated, use_autoconfig=NotUpdated,
shares=NotUpdated, is_public=NotUpdated,
- is_protected=NotUpdated, volume_mount_prefix=NotUpdated):
+ is_protected=NotUpdated, volume_mount_prefix=NotUpdated,
+ boot_from_volume=NotUpdated):
"""Update a Node Group Template."""
data = {}
@@ -210,7 +213,8 @@ class NodeGroupTemplateManagerV2(NodeGroupTemplateManagerV1):
volume_local_to_instance=volume_local_to_instance,
use_autoconfig=use_autoconfig, shares=shares,
is_public=is_public, is_protected=is_protected,
- volume_mount_prefix=volume_mount_prefix
+ volume_mount_prefix=volume_mount_prefix,
+ boot_from_volume=boot_from_volume
)
return self._patch('/node-group-templates/%s' % ng_template_id, data,
diff --git a/saharaclient/api/v2/job_templates.py b/saharaclient/api/v2/job_templates.py
index 0236829..3fb2be0 100644
--- a/saharaclient/api/v2/job_templates.py
+++ b/saharaclient/api/v2/job_templates.py
@@ -36,7 +36,7 @@ class JobTemplatesManagerV2(base.ResourceManager):
libs=libs, interface=interface,
is_public=is_public, is_protected=is_protected)
- return self._create('/%s' % 'job-templates', data, 'job')
+ return self._create('/%s' % 'job-templates', data, 'job_template')
def list(self, search_opts=None, limit=None,
marker=None, sort_by=None, reverse=None):
@@ -49,7 +49,7 @@ class JobTemplatesManagerV2(base.ResourceManager):
def get(self, job_id):
"""Get information about a Job Template."""
- return self._get('/%s/%s' % ('job-templates', job_id), 'job')
+ return self._get('/%s/%s' % ('job-templates', job_id), 'job_template')
def get_configs(self, job_type):
"""Get config hints for a specified Job Template type."""
diff --git a/saharaclient/api/v2/jobs.py b/saharaclient/api/v2/jobs.py
index fa50894..15387a9 100644
--- a/saharaclient/api/v2/jobs.py
+++ b/saharaclient/api/v2/jobs.py
@@ -34,7 +34,7 @@ class JobsManagerV2(base.ResourceManager):
def get(self, obj_id):
"""Get information about a Job."""
- return self._get('/jobs/%s' % obj_id, 'job_execution')
+ return self._get('/jobs/%s' % obj_id, 'job')
def delete(self, obj_id):
"""Delete a Job."""
@@ -54,13 +54,13 @@ class JobsManagerV2(base.ResourceManager):
job_configs=configs, interface=interface,
is_public=is_public, is_protected=is_protected)
- return self._create('/jobs', data, 'job_execution')
+ return self._create('/jobs', data, 'job')
def refresh_status(self, obj_id):
"""Refresh Job Status."""
return self._get(
'/jobs/%s?refresh_status=True' % obj_id,
- 'job_execution'
+ 'job'
)
def update(self, obj_id, is_public=NotUpdated, is_protected=NotUpdated):
diff --git a/saharaclient/osc/plugin.py b/saharaclient/osc/plugin.py
index c5b7024..14f2b48 100644
--- a/saharaclient/osc/plugin.py
+++ b/saharaclient/osc/plugin.py
@@ -24,7 +24,8 @@ DEFAULT_DATA_PROCESSING_API_VERSION = "1.1"
API_VERSION_OPTION = "os_data_processing_api_version"
API_NAME = "data_processing"
API_VERSIONS = {
- "1.1": "saharaclient.api.client.Client"
+ "1.1": "saharaclient.api.client.Client",
+ "2": "saharaclient.api.client.ClientV2"
}
@@ -41,8 +42,6 @@ def make_client(instance):
client = data_processing_client(
session=instance.session,
region_name=instance._region_name,
- cacert=instance._cacert,
- insecure=instance._insecure,
sahara_url=instance._cli_options.data_processing_url,
**kwargs
)
diff --git a/saharaclient/osc/utils.py b/saharaclient/osc/utils.py
new file mode 100644
index 0000000..aebb628
--- /dev/null
+++ b/saharaclient/osc/utils.py
@@ -0,0 +1,373 @@
+# Copyright (c) 2015 Mirantis Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import time
+
+from osc_lib import exceptions
+from osc_lib import utils as osc_utils
+from oslo_serialization import jsonutils as json
+from oslo_utils import timeutils
+from oslo_utils import uuidutils
+
+from saharaclient.api import base
+
+
+def get_resource(manager, name_or_id, **kwargs):
+ if uuidutils.is_uuid_like(name_or_id):
+ return manager.get(name_or_id, **kwargs)
+ else:
+ resource = manager.find_unique(name=name_or_id)
+ if kwargs:
+ # we really need additional call to apply kwargs
+ resource = manager.get(resource.id, **kwargs)
+ return resource
+
+
+def created_at_sorted(objs, reverse=False):
+ return sorted(objs, key=created_at_key, reverse=reverse)
+
+
+def random_name(prefix=None):
+ return "%s-%s" % (prefix, uuidutils.generate_uuid()[:8])
+
+
+def created_at_key(obj):
+ return timeutils.parse_isotime(obj["created_at"])
+
+
+def get_resource_id(manager, name_or_id):
+ if uuidutils.is_uuid_like(name_or_id):
+ return name_or_id
+ else:
+ return manager.find_unique(name=name_or_id).id
+
+
+def create_dict_from_kwargs(**kwargs):
+ return {k: v for (k, v) in kwargs.items() if v is not None}
+
+
+def prepare_data(data, fields):
+ new_data = {}
+ for f in fields:
+ if f in data:
+ new_data[f.replace('_', ' ').capitalize()] = data[f]
+
+ return new_data
+
+
+def unzip(data):
+ return zip(*data)
+
+
+def extend_columns(columns, items):
+ return unzip(list(unzip(columns)) + [('', '')] + items)
+
+
+def prepare_column_headers(columns, remap=None):
+ remap = remap if remap else {}
+ new_columns = []
+ for c in columns:
+ for old, new in remap.items():
+ c = c.replace(old, new)
+ new_columns.append(c.replace('_', ' ').capitalize())
+
+ return new_columns
+
+
+def get_by_name_substring(data, name):
+ return [obj for obj in data if name in obj.name]
+
+
+def wait_for_delete(manager, obj_id, sleep_time=5, timeout=3000):
+ s_time = timeutils.utcnow()
+ while timeutils.delta_seconds(s_time, timeutils.utcnow()) < timeout:
+ try:
+ manager.get(obj_id)
+ except base.APIException as ex:
+ if ex.error_code == 404:
+ return True
+ raise
+ time.sleep(sleep_time)
+
+ return False
+
+
+def create_node_group_templates(client, app, parsed_args, flavor_id, configs,
+ shares):
+ if app.api_version['data_processing'] == '2':
+ data = client.node_group_templates.create(
+ name=parsed_args.name,
+ plugin_name=parsed_args.plugin,
+ plugin_version=parsed_args.plugin_version,
+ flavor_id=flavor_id,
+ description=parsed_args.description,
+ volumes_per_node=parsed_args.volumes_per_node,
+ volumes_size=parsed_args.volumes_size,
+ node_processes=parsed_args.processes,
+ floating_ip_pool=parsed_args.floating_ip_pool,
+ security_groups=parsed_args.security_groups,
+ auto_security_group=parsed_args.auto_security_group,
+ availability_zone=parsed_args.availability_zone,
+ volume_type=parsed_args.volumes_type,
+ is_proxy_gateway=parsed_args.proxy_gateway,
+ volume_local_to_instance=parsed_args.volumes_locality,
+ use_autoconfig=parsed_args.autoconfig,
+ is_public=parsed_args.public,
+ is_protected=parsed_args.protected,
+ node_configs=configs,
+ shares=shares,
+ volumes_availability_zone=(
+ parsed_args.volumes_availability_zone),
+ volume_mount_prefix=parsed_args.volumes_mount_prefix,
+ boot_from_volume=parsed_args.boot_from_volume).to_dict()
+ else:
+ data = client.node_group_templates.create(
+ name=parsed_args.name,
+ plugin_name=parsed_args.plugin,
+ hadoop_version=parsed_args.plugin_version,
+ flavor_id=flavor_id,
+ description=parsed_args.description,
+ volumes_per_node=parsed_args.volumes_per_node,
+ volumes_size=parsed_args.volumes_size,
+ node_processes=parsed_args.processes,
+ floating_ip_pool=parsed_args.floating_ip_pool,
+ security_groups=parsed_args.security_groups,
+ auto_security_group=parsed_args.auto_security_group,
+ availability_zone=parsed_args.availability_zone,
+ volume_type=parsed_args.volumes_type,
+ is_proxy_gateway=parsed_args.proxy_gateway,
+ volume_local_to_instance=parsed_args.volumes_locality,
+ use_autoconfig=parsed_args.autoconfig,
+ is_public=parsed_args.public,
+ is_protected=parsed_args.protected,
+ node_configs=configs,
+ shares=shares,
+ volumes_availability_zone=(
+ parsed_args.volumes_availability_zone),
+ volume_mount_prefix=parsed_args.volumes_mount_prefix).to_dict()
+ return data
+
+
+class NodeGroupTemplatesUtils(object):
+
+ def _create_take_action(self, client, app, parsed_args):
+ if parsed_args.json:
+ blob = osc_utils.read_blob_file_contents(parsed_args.json)
+ try:
+ template = json.loads(blob)
+ except ValueError as e:
+ raise exceptions.CommandError(
+ 'An error occurred when reading '
+ 'template from file %s: %s' % (parsed_args.json, e))
+ data = client.node_group_templates.create(**template).to_dict()
+ else:
+ if (not parsed_args.name or not parsed_args.plugin or
+ not parsed_args.plugin_version or not parsed_args.flavor or
+ not parsed_args.processes):
+ raise exceptions.CommandError(
+ 'At least --name, --plugin, --plugin-version, --processes,'
+ ' --flavor arguments should be specified or json template '
+ 'should be provided with --json argument')
+
+ configs = None
+ if parsed_args.configs:
+ blob = osc_utils.read_blob_file_contents(parsed_args.configs)
+ try:
+ configs = json.loads(blob)
+ except ValueError as e:
+ raise exceptions.CommandError(
+ 'An error occurred when reading '
+ 'configs from file %s: %s' % (parsed_args.configs, e))
+
+ shares = None
+ if parsed_args.shares:
+ blob = osc_utils.read_blob_file_contents(parsed_args.shares)
+ try:
+ shares = json.loads(blob)
+ except ValueError as e:
+ raise exceptions.CommandError(
+ 'An error occurred when reading '
+ 'shares from file %s: %s' % (parsed_args.shares, e))
+
+ compute_client = app.client_manager.compute
+ flavor_id = osc_utils.find_resource(
+ compute_client.flavors, parsed_args.flavor).id
+
+ data = create_node_group_templates(client, app, parsed_args,
+ flavor_id, configs, shares)
+
+ return data
+
+ def _list_take_action(self, client, app, parsed_args):
+ search_opts = {}
+ if parsed_args.plugin:
+ search_opts['plugin_name'] = parsed_args.plugin
+ if parsed_args.plugin_version:
+ search_opts['hadoop_version'] = parsed_args.plugin_version
+
+ data = client.node_group_templates.list(search_opts=search_opts)
+
+ if parsed_args.name:
+ data = get_by_name_substring(data, parsed_args.name)
+
+ if app.api_version['data_processing'] == '2':
+ if parsed_args.long:
+ columns = ('name', 'id', 'plugin_name', 'plugin_version',
+ 'node_processes', 'description')
+ column_headers = prepare_column_headers(columns)
+
+ else:
+ columns = ('name', 'id', 'plugin_name', 'plugin_version')
+ column_headers = prepare_column_headers(columns)
+ else:
+ if parsed_args.long:
+ columns = ('name', 'id', 'plugin_name', 'hadoop_version',
+ 'node_processes', 'description')
+ column_headers = prepare_column_headers(
+ columns, {'hadoop_version': 'plugin_version'})
+
+ else:
+ columns = ('name', 'id', 'plugin_name', 'hadoop_version')
+ column_headers = prepare_column_headers(
+ columns, {'hadoop_version': 'plugin_version'})
+
+ return (
+ column_headers,
+ (osc_utils.get_item_properties(
+ s,
+ columns,
+ formatters={
+ 'node_processes': osc_utils.format_list
+ }
+ ) for s in data)
+ )
+
+ def _update_take_action(self, client, app, parsed_args):
+ ngt_id = get_resource_id(
+ client.node_group_templates, parsed_args.node_group_template)
+
+ if parsed_args.json:
+ blob = osc_utils.read_blob_file_contents(parsed_args.json)
+ try:
+ template = json.loads(blob)
+ except ValueError as e:
+ raise exceptions.CommandError(
+ 'An error occurred when reading '
+ 'template from file %s: %s' % (parsed_args.json, e))
+ data = client.node_group_templates.update(
+ ngt_id, **template).to_dict()
+ else:
+ configs = None
+ if parsed_args.configs:
+ blob = osc_utils.read_blob_file_contents(parsed_args.configs)
+ try:
+ configs = json.loads(blob)
+ except ValueError as e:
+ raise exceptions.CommandError(
+ 'An error occurred when reading '
+ 'configs from file %s: %s' % (parsed_args.configs, e))
+
+ shares = None
+ if parsed_args.shares:
+ blob = osc_utils.read_blob_file_contents(parsed_args.shares)
+ try:
+ shares = json.loads(blob)
+ except ValueError as e:
+ raise exceptions.CommandError(
+ 'An error occurred when reading '
+ 'shares from file %s: %s' % (parsed_args.shares, e))
+
+ flavor_id = None
+ if parsed_args.flavor:
+ compute_client = self.app.client_manager.compute
+ flavor_id = osc_utils.find_resource(
+ compute_client.flavors, parsed_args.flavor).id
+
+ update_dict = create_dict_from_kwargs(
+ name=parsed_args.name,
+ plugin_name=parsed_args.plugin,
+ hadoop_version=parsed_args.plugin_version,
+ flavor_id=flavor_id,
+ description=parsed_args.description,
+ volumes_per_node=parsed_args.volumes_per_node,
+ volumes_size=parsed_args.volumes_size,
+ node_processes=parsed_args.processes,
+ floating_ip_pool=parsed_args.floating_ip_pool,
+ security_groups=parsed_args.security_groups,
+ auto_security_group=parsed_args.use_auto_security_group,
+ availability_zone=parsed_args.availability_zone,
+ volume_type=parsed_args.volumes_type,
+ is_proxy_gateway=parsed_args.is_proxy_gateway,
+ volume_local_to_instance=parsed_args.volume_locality,
+ use_autoconfig=parsed_args.use_autoconfig,
+ is_public=parsed_args.is_public,
+ is_protected=parsed_args.is_protected,
+ node_configs=configs,
+ shares=shares,
+ volumes_availability_zone=(
+ parsed_args.volumes_availability_zone),
+ volume_mount_prefix=parsed_args.volumes_mount_prefix
+ )
+
+ if app.api_version['data_processing'] == '2':
+ if 'hadoop_version' in update_dict:
+ update_dict.pop('hadoop_version')
+ update_dict['plugin_version'] = parsed_args.plugin_version
+ if parsed_args.boot_from_volume is not None:
+ update_dict['boot_from_volume'] = (
+ parsed_args.boot_from_volume)
+ data = client.node_group_templates.update(
+ ngt_id, **update_dict).to_dict()
+
+ return data
+
+ def _import_take_action(self, client, parsed_args):
+ if (not parsed_args.image_id or
+ not parsed_args.flavor_id):
+ raise exceptions.CommandError(
+ 'At least --image_id and --flavor_id should be specified')
+ blob = osc_utils.read_blob_file_contents(parsed_args.json)
+ try:
+ template = json.loads(blob)
+ except ValueError as e:
+ raise exceptions.CommandError(
+ 'An error occurred when reading '
+ 'template from file %s: %s' % (parsed_args.json, e))
+ template['node_group_template']['floating_ip_pool'] = (
+ parsed_args.floating_ip_pool)
+ template['node_group_template']['image_id'] = (
+ parsed_args.image_id)
+ template['node_group_template']['flavor_id'] = (
+ parsed_args.flavor_id)
+ template['node_group_template']['security_groups'] = (
+ parsed_args.security_groups)
+ if parsed_args.name:
+ template['node_group_template']['name'] = parsed_args.name
+ data = client.node_group_templates.create(
+ **template['node_group_template']).to_dict()
+
+ return data
+
+ def _export_take_action(self, client, parsed_args):
+ ngt_id = get_resource_id(
+ client.node_group_templates, parsed_args.node_group_template)
+ response = client.node_group_templates.export(ngt_id)
+ result = json.dumps(response._info, indent=4)+"\n"
+ if parsed_args.file:
+ with open(parsed_args.file, "w+") as file:
+ file.write(result)
+ else:
+ sys.stdout.write(result)
diff --git a/saharaclient/osc/v1/cluster_templates.py b/saharaclient/osc/v1/cluster_templates.py
index 6441c17..1442415 100644
--- a/saharaclient/osc/v1/cluster_templates.py
+++ b/saharaclient/osc/v1/cluster_templates.py
@@ -21,7 +21,7 @@ from osc_lib import utils as osc_utils
from oslo_log import log as logging
from oslo_serialization import jsonutils as json
-from saharaclient.osc.v1 import utils
+from saharaclient.osc import utils
CT_FIELDS = ['id', 'name', 'plugin_name', 'plugin_version', 'description',
'node_groups', 'anti_affinity', 'use_autoconfig', 'is_default',
diff --git a/saharaclient/osc/v1/clusters.py b/saharaclient/osc/v1/clusters.py
index b197c1d..6930bf3 100644
--- a/saharaclient/osc/v1/clusters.py
+++ b/saharaclient/osc/v1/clusters.py
@@ -21,7 +21,7 @@ from osc_lib import utils as osc_utils
from oslo_log import log as logging
from oslo_serialization import jsonutils
-from saharaclient.osc.v1 import utils
+from saharaclient.osc import utils
CLUSTER_FIELDS = ["cluster_template_id", "use_autoconfig", "user_keypair_id",
"status", "image", "node_groups", "id", "info",
diff --git a/saharaclient/osc/v1/data_sources.py b/saharaclient/osc/v1/data_sources.py
index 9360308..f25c6da 100644
--- a/saharaclient/osc/v1/data_sources.py
+++ b/saharaclient/osc/v1/data_sources.py
@@ -19,7 +19,7 @@ from osc_lib.command import command
from osc_lib import utils as osc_utils
from oslo_log import log as logging
-from saharaclient.osc.v1 import utils
+from saharaclient.osc import utils
DATA_SOURCE_FIELDS = ['name', 'id', 'type', 'url', 'description', 'is_public',
'is_protected']
diff --git a/saharaclient/osc/v1/images.py b/saharaclient/osc/v1/images.py
index 4cab041..36ce753 100644
--- a/saharaclient/osc/v1/images.py
+++ b/saharaclient/osc/v1/images.py
@@ -19,7 +19,7 @@ from osc_lib.command import command
from osc_lib import utils as osc_utils
from oslo_log import log as logging
-from saharaclient.osc.v1 import utils
+from saharaclient.osc import utils
IMAGE_FIELDS = ['name', 'id', 'username', 'tags', 'status', 'description']
diff --git a/saharaclient/osc/v1/job_binaries.py b/saharaclient/osc/v1/job_binaries.py
index 530d9c9..8333c27 100644
--- a/saharaclient/osc/v1/job_binaries.py
+++ b/saharaclient/osc/v1/job_binaries.py
@@ -23,7 +23,7 @@ from oslo_log import log as logging
from oslo_serialization import jsonutils
from saharaclient.api import base
-from saharaclient.osc.v1 import utils
+from saharaclient.osc import utils
JOB_BINARY_FIELDS = ['name', 'id', 'url', 'description', 'is_public',
'is_protected']
diff --git a/saharaclient/osc/v1/job_templates.py b/saharaclient/osc/v1/job_templates.py
index 476a49e..9013563 100644
--- a/saharaclient/osc/v1/job_templates.py
+++ b/saharaclient/osc/v1/job_templates.py
@@ -21,7 +21,7 @@ from osc_lib import utils as osc_utils
from oslo_log import log as logging
from oslo_serialization import jsonutils
-from saharaclient.osc.v1 import utils
+from saharaclient.osc import utils
JOB_TEMPLATE_FIELDS = ['name', 'id', 'type', 'mains', 'libs', 'description',
'is_public', 'is_protected']
diff --git a/saharaclient/osc/v1/job_types.py b/saharaclient/osc/v1/job_types.py
index 43fc0f6..2b778f3 100644
--- a/saharaclient/osc/v1/job_types.py
+++ b/saharaclient/osc/v1/job_types.py
@@ -22,8 +22,8 @@ from osc_lib import utils as osc_utils
from oslo_log import log as logging
from oslo_serialization import jsonutils
+from saharaclient.osc import utils
from saharaclient.osc.v1.job_templates import JOB_TYPES_CHOICES
-from saharaclient.osc.v1 import utils
class ListJobTypes(command.Lister):
diff --git a/saharaclient/osc/v1/jobs.py b/saharaclient/osc/v1/jobs.py
index ec9743d..27f1f88 100644
--- a/saharaclient/osc/v1/jobs.py
+++ b/saharaclient/osc/v1/jobs.py
@@ -21,7 +21,7 @@ from osc_lib import utils as osc_utils
from oslo_log import log as logging
from oslo_serialization import jsonutils
-from saharaclient.osc.v1 import utils
+from saharaclient.osc import utils
JOB_FIELDS = ['id', 'job_template_id', 'cluster_id', 'input_id', 'output_id',
'start_time', 'end_time', 'status', 'is_public', 'is_protected',
diff --git a/saharaclient/osc/v1/node_group_templates.py b/saharaclient/osc/v1/node_group_templates.py
index 9cfe282..590547e 100644
--- a/saharaclient/osc/v1/node_group_templates.py
+++ b/saharaclient/osc/v1/node_group_templates.py
@@ -16,12 +16,10 @@
import sys
from osc_lib.command import command
-from osc_lib import exceptions
from osc_lib import utils as osc_utils
from oslo_log import log as logging
-from oslo_serialization import jsonutils as json
-from saharaclient.osc.v1 import utils
+from saharaclient.osc import utils
NGT_FIELDS = ['id', 'name', 'plugin_name', 'plugin_version', 'node_processes',
'description', 'auto_security_group', 'security_groups',
@@ -43,7 +41,7 @@ def _format_ngt_output(data):
del data['volumes_size']
-class CreateNodeGroupTemplate(command.ShowOne):
+class CreateNodeGroupTemplate(command.ShowOne, utils.NodeGroupTemplatesUtils):
"""Creates node group template"""
log = logging.getLogger(__name__ + ".CreateNodeGroupTemplate")
@@ -202,73 +200,7 @@ class CreateNodeGroupTemplate(command.ShowOne):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
- if parsed_args.json:
- blob = osc_utils.read_blob_file_contents(parsed_args.json)
- try:
- template = json.loads(blob)
- except ValueError as e:
- raise exceptions.CommandError(
- 'An error occurred when reading '
- 'template from file %s: %s' % (parsed_args.json, e))
- data = client.node_group_templates.create(**template).to_dict()
- else:
- if (not parsed_args.name or not parsed_args.plugin or
- not parsed_args.plugin_version or not parsed_args.flavor or
- not parsed_args.processes):
- raise exceptions.CommandError(
- 'At least --name, --plugin, --plugin-version, --processes,'
- ' --flavor arguments should be specified or json template '
- 'should be provided with --json argument')
-
- configs = None
- if parsed_args.configs:
- blob = osc_utils.read_blob_file_contents(parsed_args.configs)
- try:
- configs = json.loads(blob)
- except ValueError as e:
- raise exceptions.CommandError(
- 'An error occurred when reading '
- 'configs from file %s: %s' % (parsed_args.configs, e))
-
- shares = None
- if parsed_args.shares:
- blob = osc_utils.read_blob_file_contents(parsed_args.shares)
- try:
- shares = json.loads(blob)
- except ValueError as e:
- raise exceptions.CommandError(
- 'An error occurred when reading '
- 'shares from file %s: %s' % (parsed_args.shares, e))
-
- compute_client = self.app.client_manager.compute
- flavor_id = osc_utils.find_resource(
- compute_client.flavors, parsed_args.flavor).id
-
- data = client.node_group_templates.create(
- name=parsed_args.name,
- plugin_name=parsed_args.plugin,
- hadoop_version=parsed_args.plugin_version,
- flavor_id=flavor_id,
- description=parsed_args.description,
- volumes_per_node=parsed_args.volumes_per_node,
- volumes_size=parsed_args.volumes_size,
- node_processes=parsed_args.processes,
- floating_ip_pool=parsed_args.floating_ip_pool,
- security_groups=parsed_args.security_groups,
- auto_security_group=parsed_args.auto_security_group,
- availability_zone=parsed_args.availability_zone,
- volume_type=parsed_args.volumes_type,
- is_proxy_gateway=parsed_args.proxy_gateway,
- volume_local_to_instance=parsed_args.volumes_locality,
- use_autoconfig=parsed_args.autoconfig,
- is_public=parsed_args.public,
- is_protected=parsed_args.protected,
- node_configs=configs,
- shares=shares,
- volumes_availability_zone=(
- parsed_args.volumes_availability_zone),
- volume_mount_prefix=parsed_args.volumes_mount_prefix
- ).to_dict()
+ data = self._create_take_action(client, self.app, parsed_args)
_format_ngt_output(data)
data = utils.prepare_data(data, NGT_FIELDS)
@@ -276,7 +208,7 @@ class CreateNodeGroupTemplate(command.ShowOne):
return self.dict2columns(data)
-class ListNodeGroupTemplates(command.Lister):
+class ListNodeGroupTemplates(command.Lister, utils.NodeGroupTemplatesUtils):
"""Lists node group templates"""
log = logging.getLogger(__name__ + ".ListNodeGroupTemplates")
@@ -314,41 +246,10 @@ class ListNodeGroupTemplates(command.Lister):
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
- search_opts = {}
- if parsed_args.plugin:
- search_opts['plugin_name'] = parsed_args.plugin
- if parsed_args.plugin_version:
- search_opts['hadoop_version'] = parsed_args.plugin_version
+ return self._list_take_action(client, self.app, parsed_args)
- data = client.node_group_templates.list(search_opts=search_opts)
- if parsed_args.name:
- data = utils.get_by_name_substring(data, parsed_args.name)
-
- if parsed_args.long:
- columns = ('name', 'id', 'plugin_name', 'hadoop_version',
- 'node_processes', 'description')
- column_headers = utils.prepare_column_headers(
- columns, {'hadoop_version': 'plugin_version'})
-
- else:
- columns = ('name', 'id', 'plugin_name', 'hadoop_version')
- column_headers = utils.prepare_column_headers(
- columns, {'hadoop_version': 'plugin_version'})
-
- return (
- column_headers,
- (osc_utils.get_item_properties(
- s,
- columns,
- formatters={
- 'node_processes': osc_utils.format_list
- }
- ) for s in data)
- )
-
-
-class ShowNodeGroupTemplate(command.ShowOne):
+class ShowNodeGroupTemplate(command.ShowOne, utils.NodeGroupTemplatesUtils):
"""Display node group template details"""
log = logging.getLogger(__name__ + ".ShowNodeGroupTemplate")
@@ -378,7 +279,7 @@ class ShowNodeGroupTemplate(command.ShowOne):
return self.dict2columns(data)
-class DeleteNodeGroupTemplate(command.Command):
+class DeleteNodeGroupTemplate(command.Command, utils.NodeGroupTemplatesUtils):
"""Deletes node group template"""
log = logging.getLogger(__name__ + ".DeleteNodeGroupTemplate")
@@ -406,7 +307,7 @@ class DeleteNodeGroupTemplate(command.Command):
'successfully.\n'.format(ngt=ngt))
-class UpdateNodeGroupTemplate(command.ShowOne):
+class UpdateNodeGroupTemplate(command.ShowOne, utils.NodeGroupTemplatesUtils):
"""Updates node group template"""
log = logging.getLogger(__name__ + ".UpdateNodeGroupTemplate")
@@ -620,74 +521,7 @@ class UpdateNodeGroupTemplate(command.ShowOne):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
- ngt_id = utils.get_resource_id(
- client.node_group_templates, parsed_args.node_group_template)
-
- if parsed_args.json:
- blob = osc_utils.read_blob_file_contents(parsed_args.json)
- try:
- template = json.loads(blob)
- except ValueError as e:
- raise exceptions.CommandError(
- 'An error occurred when reading '
- 'template from file %s: %s' % (parsed_args.json, e))
- data = client.node_group_templates.update(
- ngt_id, **template).to_dict()
- else:
- configs = None
- if parsed_args.configs:
- blob = osc_utils.read_blob_file_contents(parsed_args.configs)
- try:
- configs = json.loads(blob)
- except ValueError as e:
- raise exceptions.CommandError(
- 'An error occurred when reading '
- 'configs from file %s: %s' % (parsed_args.configs, e))
-
- shares = None
- if parsed_args.shares:
- blob = osc_utils.read_blob_file_contents(parsed_args.shares)
- try:
- shares = json.loads(blob)
- except ValueError as e:
- raise exceptions.CommandError(
- 'An error occurred when reading '
- 'shares from file %s: %s' % (parsed_args.shares, e))
-
- flavor_id = None
- if parsed_args.flavor:
- compute_client = self.app.client_manager.compute
- flavor_id = osc_utils.find_resource(
- compute_client.flavors, parsed_args.flavor).id
-
- update_dict = utils.create_dict_from_kwargs(
- name=parsed_args.name,
- plugin_name=parsed_args.plugin,
- hadoop_version=parsed_args.plugin_version,
- flavor_id=flavor_id,
- description=parsed_args.description,
- volumes_per_node=parsed_args.volumes_per_node,
- volumes_size=parsed_args.volumes_size,
- node_processes=parsed_args.processes,
- floating_ip_pool=parsed_args.floating_ip_pool,
- security_groups=parsed_args.security_groups,
- auto_security_group=parsed_args.use_auto_security_group,
- availability_zone=parsed_args.availability_zone,
- volume_type=parsed_args.volumes_type,
- is_proxy_gateway=parsed_args.is_proxy_gateway,
- volume_local_to_instance=parsed_args.volume_locality,
- use_autoconfig=parsed_args.use_autoconfig,
- is_public=parsed_args.is_public,
- is_protected=parsed_args.is_protected,
- node_configs=configs,
- shares=shares,
- volumes_availability_zone=(
- parsed_args.volumes_availability_zone),
- volume_mount_prefix=parsed_args.volumes_mount_prefix
- )
-
- data = client.node_group_templates.update(
- ngt_id, **update_dict).to_dict()
+ data = self._update_take_action(client, self.app, parsed_args)
_format_ngt_output(data)
data = utils.prepare_data(data, NGT_FIELDS)
@@ -695,7 +529,7 @@ class UpdateNodeGroupTemplate(command.ShowOne):
return self.dict2columns(data)
-class ImportNodeGroupTemplate(command.ShowOne):
+class ImportNodeGroupTemplate(command.ShowOne, utils.NodeGroupTemplatesUtils):
"""Imports node group template"""
log = logging.getLogger(__name__ + ".ImportNodeGroupTemplate")
@@ -740,29 +574,8 @@ class ImportNodeGroupTemplate(command.ShowOne):
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
- if (not parsed_args.image_id or
- not parsed_args.flavor_id):
- raise exceptions.CommandError(
- 'At least --image_id and --flavor_id should be specified')
- blob = osc_utils.read_blob_file_contents(parsed_args.json)
- try:
- template = json.loads(blob)
- except ValueError as e:
- raise exceptions.CommandError(
- 'An error occurred when reading '
- 'template from file %s: %s' % (parsed_args.json, e))
- template['node_group_template']['floating_ip_pool'] = (
- parsed_args.floating_ip_pool)
- template['node_group_template']['image_id'] = (
- parsed_args.image_id)
- template['node_group_template']['flavor_id'] = (
- parsed_args.flavor_id)
- template['node_group_template']['security_groups'] = (
- parsed_args.security_groups)
- if parsed_args.name:
- template['node_group_template']['name'] = parsed_args.name
- data = client.node_group_templates.create(
- **template['node_group_template']).to_dict()
+
+ data = self._import_take_action(client, parsed_args)
_format_ngt_output(data)
data = utils.prepare_data(data, NGT_FIELDS)
@@ -770,7 +583,7 @@ class ImportNodeGroupTemplate(command.ShowOne):
return self.dict2columns(data)
-class ExportNodeGroupTemplate(command.Command):
+class ExportNodeGroupTemplate(command.Command, utils.NodeGroupTemplatesUtils):
"""Export node group template to JSON"""
log = logging.getLogger(__name__ + ".ExportNodeGroupTemplate")
@@ -794,12 +607,4 @@ class ExportNodeGroupTemplate(command.Command):
def take_action(self, parsed_args):
self.log.debug("take_action(%s)", parsed_args)
client = self.app.client_manager.data_processing
- ngt_id = utils.get_resource_id(
- client.node_group_templates, parsed_args.node_group_template)
- response = client.node_group_templates.export(ngt_id)
- result = json.dumps(response._info, indent=4)+"\n"
- if parsed_args.file:
- with open(parsed_args.file, "w+") as file:
- file.write(result)
- else:
- sys.stdout.write(result)
+ self._export_take_action(client, parsed_args)
diff --git a/saharaclient/osc/v1/plugins.py b/saharaclient/osc/v1/plugins.py
index 8b10420..ac2d25a 100644
--- a/saharaclient/osc/v1/plugins.py
+++ b/saharaclient/osc/v1/plugins.py
@@ -22,7 +22,7 @@ from osc_lib import utils as osc_utils
from oslo_log import log as logging
from oslo_serialization import jsonutils
-from saharaclient.osc.v1 import utils
+from saharaclient.osc import utils
def _serialize_label_items(plugin):
diff --git a/saharaclient/osc/v1/utils.py b/saharaclient/osc/v1/utils.py
deleted file mode 100644
index 973f385..0000000
--- a/saharaclient/osc/v1/utils.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# Copyright (c) 2015 Mirantis Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-# implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-
-from oslo_utils import timeutils
-from oslo_utils import uuidutils
-
-from saharaclient.api import base
-
-
-def get_resource(manager, name_or_id, **kwargs):
- if uuidutils.is_uuid_like(name_or_id):
- return manager.get(name_or_id, **kwargs)
- else:
- resource = manager.find_unique(name=name_or_id)
- if kwargs:
- # we really need additional call to apply kwargs
- resource = manager.get(resource.id, **kwargs)
- return resource
-
-
-def created_at_sorted(objs, reverse=False):
- return sorted(objs, key=created_at_key, reverse=reverse)
-
-
-def random_name(prefix=None):
- return "%s-%s" % (prefix, uuidutils.generate_uuid()[:8])
-
-
-def created_at_key(obj):
- return timeutils.parse_isotime(obj["created_at"])
-
-
-def get_resource_id(manager, name_or_id):
- if uuidutils.is_uuid_like(name_or_id):
- return name_or_id
- else:
- return manager.find_unique(name=name_or_id).id
-
-
-def create_dict_from_kwargs(**kwargs):
- return {k: v for (k, v) in kwargs.items() if v is not None}
-
-
-def prepare_data(data, fields):
- new_data = {}
- for f in fields:
- if f in data:
- new_data[f.replace('_', ' ').capitalize()] = data[f]
-
- return new_data
-
-
-def unzip(data):
- return zip(*data)
-
-
-def extend_columns(columns, items):
- return unzip(list(unzip(columns)) + [('', '')] + items)
-
-
-def prepare_column_headers(columns, remap=None):
- remap = remap if remap else {}
- new_columns = []
- for c in columns:
- for old, new in remap.items():
- c = c.replace(old, new)
- new_columns.append(c.replace('_', ' ').capitalize())
-
- return new_columns
-
-
-def get_by_name_substring(data, name):
- return [obj for obj in data if name in obj.name]
-
-
-def wait_for_delete(manager, obj_id, sleep_time=5, timeout=3000):
- s_time = timeutils.utcnow()
- while timeutils.delta_seconds(s_time, timeutils.utcnow()) < timeout:
- try:
- manager.get(obj_id)
- except base.APIException as ex:
- if ex.error_code == 404:
- return True
- raise
- time.sleep(sleep_time)
-
- return False
diff --git a/saharaclient/osc/v2/__init__.py b/saharaclient/osc/v2/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/saharaclient/osc/v2/__init__.py
diff --git a/saharaclient/osc/v2/node_group_templates.py b/saharaclient/osc/v2/node_group_templates.py
new file mode 100644
index 0000000..c2aaf94
--- /dev/null
+++ b/saharaclient/osc/v2/node_group_templates.py
@@ -0,0 +1,176 @@
+# Copyright (c) 2018 Red Hat Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+from osc_lib import utils as osc_utils
+
+from saharaclient.osc import utils
+from saharaclient.osc.v1 import node_group_templates as ngt_v1
+
+NGT_FIELDS = ['id', 'name', 'plugin_name', 'plugin_version', 'node_processes',
+ 'description', 'auto_security_group', 'security_groups',
+ 'availability_zone', 'flavor_id', 'floating_ip_pool',
+ 'volumes_per_node', 'volumes_size',
+ 'volume_type', 'volume_local_to_instance', 'volume_mount_prefix',
+ 'volumes_availability_zone', 'use_autoconfig',
+ 'is_proxy_gateway', 'is_default', 'is_protected', 'is_public',
+ 'boot_from_volume']
+
+
+def _format_ngt_output(data):
+ data['node_processes'] = osc_utils.format_list(data['node_processes'])
+ if data['volumes_per_node'] == 0:
+ del data['volume_local_to_instance']
+ del data['volume_mount_prefix']
+ del data['volume_type'],
+ del data['volumes_availability_zone']
+ del data['volumes_size']
+
+
+class CreateNodeGroupTemplate(ngt_v1.CreateNodeGroupTemplate,
+ utils.NodeGroupTemplatesUtils):
+ """Creates node group template"""
+
+ def get_parser(self, prog_name):
+ parser = super(CreateNodeGroupTemplate, self).get_parser(prog_name)
+
+ parser.add_argument(
+ '--boot-from-volume',
+ action='store_true',
+ default=False,
+ help="Make the node group bootable from volume",
+ )
+ return parser
+
+ def take_action(self, parsed_args):
+ self.log.debug("take_action(%s)", parsed_args)
+ client = self.app.client_manager.data_processing
+
+ data = self._create_take_action(client, self.app, parsed_args)
+
+ _format_ngt_output(data)
+ data = utils.prepare_data(data, NGT_FIELDS)
+
+ return self.dict2columns(data)
+
+
+class ListNodeGroupTemplates(ngt_v1.ListNodeGroupTemplates,
+ utils.NodeGroupTemplatesUtils):
+ """Lists node group templates"""
+
+ def take_action(self, parsed_args):
+ self.log.debug("take_action(%s)", parsed_args)
+ client = self.app.client_manager.data_processing
+ return self._list_take_action(client, self.app, parsed_args)
+
+
+class ShowNodeGroupTemplate(ngt_v1.ShowNodeGroupTemplate,
+ utils.NodeGroupTemplatesUtils):
+ """Display node group template details"""
+
+ def take_action(self, parsed_args):
+ self.log.debug("take_action(%s)", parsed_args)
+ client = self.app.client_manager.data_processing
+
+ data = utils.get_resource(
+ client.node_group_templates,
+ parsed_args.node_group_template).to_dict()
+
+ _format_ngt_output(data)
+
+ data = utils.prepare_data(data, NGT_FIELDS)
+
+ return self.dict2columns(data)
+
+
+class DeleteNodeGroupTemplate(ngt_v1.DeleteNodeGroupTemplate,
+ utils.NodeGroupTemplatesUtils):
+ """Deletes node group template"""
+
+ def take_action(self, parsed_args):
+ self.log.debug("take_action(%s)", parsed_args)
+ client = self.app.client_manager.data_processing
+ for ngt in parsed_args.node_group_template:
+ ngt_id = utils.get_resource_id(
+ client.node_group_templates, ngt)
+ client.node_group_templates.delete(ngt_id)
+ sys.stdout.write(
+ 'Node group template "{ngt}" has been removed '
+ 'successfully.\n'.format(ngt=ngt))
+
+
+class UpdateNodeGroupTemplate(ngt_v1.UpdateNodeGroupTemplate,
+ utils.NodeGroupTemplatesUtils):
+ """Updates node group template"""
+
+ def get_parser(self, prog_name):
+ parser = super(UpdateNodeGroupTemplate, self).get_parser(prog_name)
+
+ bootfromvolume = parser.add_mutually_exclusive_group()
+ bootfromvolume.add_argument(
+ '--boot-from-volume-enable',
+ action='store_true',
+ help='Makes node group bootable from volume.',
+ dest='boot_from_volume'
+ )
+ bootfromvolume.add_argument(
+ '--boot-from-volume-disable',
+ action='store_false',
+ help='Makes node group not bootable from volume.',
+ dest='boot_from_volume'
+ )
+ parser.set_defaults(is_public=None, is_protected=None,
+ is_proxy_gateway=None, volume_locality=None,
+ use_auto_security_group=None, use_autoconfig=None,
+ boot_from_volume=None)
+ return parser
+
+ def take_action(self, parsed_args):
+ self.log.debug("take_action(%s)", parsed_args)
+ client = self.app.client_manager.data_processing
+
+ data = self._update_take_action(client, self.app, parsed_args)
+
+ _format_ngt_output(data)
+ data = utils.prepare_data(data, NGT_FIELDS)
+
+ return self.dict2columns(data)
+
+
+class ImportNodeGroupTemplate(ngt_v1.ImportNodeGroupTemplate,
+ utils.NodeGroupTemplatesUtils):
+ """Imports node group template"""
+
+ def take_action(self, parsed_args):
+ self.log.debug("take_action(%s)", parsed_args)
+ client = self.app.client_manager.data_processing
+
+ data = self._import_take_action(client, parsed_args)
+
+ _format_ngt_output(data)
+ data = utils.prepare_data(data, NGT_FIELDS)
+
+ return self.dict2columns(data)
+
+
+class ExportNodeGroupTemplate(ngt_v1.ExportNodeGroupTemplate,
+ utils.NodeGroupTemplatesUtils):
+ """Export node group template to JSON"""
+
+ def take_action(self, parsed_args):
+ self.log.debug("take_action(%s)", parsed_args)
+ client = self.app.client_manager.data_processing
+ self._export_take_action(client, parsed_args)
diff --git a/saharaclient/tests/unit/base.py b/saharaclient/tests/unit/base.py
index 6d95f6a..35f648b 100644
--- a/saharaclient/tests/unit/base.py
+++ b/saharaclient/tests/unit/base.py
@@ -17,19 +17,21 @@ import testtools
from saharaclient.api import base
from saharaclient.api import client
+from keystoneauth1 import session
from requests_mock.contrib import fixture
class BaseTestCase(testtools.TestCase):
URL = 'http://localhost:8386'
- TOKEN = 'token'
+ SESSION = session.Session()
def setUp(self):
super(BaseTestCase, self).setUp()
self.responses = self.useFixture(fixture.Fixture())
- self.client = client.Client(sahara_url=self.URL,
- input_auth_token=self.TOKEN)
+ self.client = client.Client(session=self.SESSION, sahara_url=self.URL)
+ self.client_v2 = client.ClientV2(session=self.SESSION,
+ sahara_url=self.URL)
def assertFields(self, body, obj):
for key, value in body.items():
diff --git a/saharaclient/tests/unit/osc/test_plugin.py b/saharaclient/tests/unit/osc/test_plugin.py
index 6e288b9..cf5ac7d 100644
--- a/saharaclient/tests/unit/osc/test_plugin.py
+++ b/saharaclient/tests/unit/osc/test_plugin.py
@@ -28,15 +28,27 @@ class TestDataProcessingPlugin(base.BaseTestCase):
instance._api_version = {"data_processing": '1.1'}
instance.session = 'session'
instance._region_name = 'region_name'
- instance._cacert = 'cacert'
- instance._insecure = 'insecure'
instance._cli_options.data_processing_url = 'url'
instance._interface = 'public'
plugin.make_client(instance)
p_client.assert_called_with(session='session',
region_name='region_name',
- cacert='cacert',
- insecure='insecure',
+ sahara_url='url',
+ endpoint_type='public')
+
+ @mock.patch("saharaclient.api.client.ClientV2")
+ def test_make_client_v2(self, p_client):
+
+ instance = mock.Mock()
+ instance._api_version = {"data_processing": '2'}
+ instance.session = 'session'
+ instance._region_name = 'region_name'
+ instance._cli_options.data_processing_url = 'url'
+ instance._interface = 'public'
+
+ plugin.make_client(instance)
+ p_client.assert_called_with(session='session',
+ region_name='region_name',
sahara_url='url',
endpoint_type='public')
diff --git a/saharaclient/tests/unit/osc/v1/test_node_group_templates.py b/saharaclient/tests/unit/osc/v1/test_node_group_templates.py
index b23a4e4..10dc5aa 100644
--- a/saharaclient/tests/unit/osc/v1/test_node_group_templates.py
+++ b/saharaclient/tests/unit/osc/v1/test_node_group_templates.py
@@ -59,6 +59,7 @@ class TestNodeGroupTemplates(fakes.TestDataProcessing):
self.ngt_mock = (
self.app.client_manager.data_processing.node_group_templates)
self.ngt_mock.reset_mock()
+ self.app.api_version['data_processing'] = '1'
class TestCreateNodeGroupTemplate(TestNodeGroupTemplates):
diff --git a/saharaclient/tests/unit/osc/v1/test_utils.py b/saharaclient/tests/unit/osc/v1/test_utils.py
index 7a9ccf1..59ddbdc 100644
--- a/saharaclient/tests/unit/osc/v1/test_utils.py
+++ b/saharaclient/tests/unit/osc/v1/test_utils.py
@@ -15,7 +15,7 @@
import mock
-from saharaclient.osc.v1 import utils
+from saharaclient.osc import utils
from saharaclient.tests.unit import base
diff --git a/saharaclient/tests/unit/osc/v2/__init__.py b/saharaclient/tests/unit/osc/v2/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/saharaclient/tests/unit/osc/v2/__init__.py
diff --git a/saharaclient/tests/unit/osc/v2/test_node_group_templates.py b/saharaclient/tests/unit/osc/v2/test_node_group_templates.py
new file mode 100644
index 0000000..df174dd
--- /dev/null
+++ b/saharaclient/tests/unit/osc/v2/test_node_group_templates.py
@@ -0,0 +1,412 @@
+# Copyright (c) 2015 Mirantis Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+from osc_lib.tests import utils as osc_utils
+
+from saharaclient.api import node_group_templates as api_ngt
+from saharaclient.osc.v2 import node_group_templates as osc_ngt
+from saharaclient.tests.unit.osc.v1 import fakes
+
+
+NGT_INFO = {
+ "node_processes": [
+ "namenode",
+ "tasktracker"
+ ],
+ "name": "template",
+ "tenant_id": "tenant_id",
+ "availability_zone": 'av_zone',
+ "use_autoconfig": True,
+ "plugin_version": "0.1",
+ "shares": None,
+ "is_default": False,
+ "description": 'description',
+ "node_configs": {},
+ "is_proxy_gateway": False,
+ "auto_security_group": True,
+ "volume_type": None,
+ "volumes_size": 2,
+ "volume_mount_prefix": "/volumes/disk",
+ "plugin_name": "fake",
+ "is_protected": False,
+ "security_groups": None,
+ "floating_ip_pool": "floating_pool",
+ "is_public": True,
+ "id": "ng_id",
+ "flavor_id": "flavor_id",
+ "volumes_availability_zone": None,
+ "volumes_per_node": 2,
+ "volume_local_to_instance": False,
+ "boot_from_volume": False
+}
+
+
+class TestNodeGroupTemplates(fakes.TestDataProcessing):
+ def setUp(self):
+ super(TestNodeGroupTemplates, self).setUp()
+ self.ngt_mock = (
+ self.app.client_manager.data_processing.node_group_templates)
+ self.ngt_mock.reset_mock()
+ self.app.api_version['data_processing'] = '2'
+
+
+class TestCreateNodeGroupTemplate(TestNodeGroupTemplates):
+ # TODO(apavlov): check for creation with --json
+ def setUp(self):
+ super(TestCreateNodeGroupTemplate, self).setUp()
+ self.ngt_mock.create.return_value = api_ngt.NodeGroupTemplate(
+ None, NGT_INFO)
+
+ self.fl_mock = self.app.client_manager.compute.flavors
+ self.fl_mock.get.return_value = mock.Mock(id='flavor_id')
+ self.fl_mock.reset_mock()
+
+ # Command to test
+ self.cmd = osc_ngt.CreateNodeGroupTemplate(self.app, None)
+
+ def test_ngt_create_minimum_options(self):
+ arglist = ['--name', 'template', '--plugin', 'fake',
+ '--plugin-version', '0.1', '--processes', 'namenode',
+ 'tasktracker', '--flavor', 'flavor_id']
+ verifylist = [('name', 'template'), ('plugin', 'fake'),
+ ('plugin_version', '0.1'), ('flavor', 'flavor_id'),
+ ('processes', ['namenode', 'tasktracker'])]
+
+ parsed_args = self.check_parser(self.cmd, arglist, verifylist)
+
+ self.cmd.take_action(parsed_args)
+
+ # Check that correct arguments were passed
+ self.ngt_mock.create.assert_called_once_with(
+ auto_security_group=False, availability_zone=None,
+ description=None, flavor_id='flavor_id', floating_ip_pool=None,
+ plugin_version='0.1', is_protected=False, is_proxy_gateway=False,
+ is_public=False, name='template',
+ node_processes=['namenode', 'tasktracker'], plugin_name='fake',
+ security_groups=None, use_autoconfig=False,
+ volume_local_to_instance=False,
+ volume_type=None, volumes_availability_zone=None,
+ volumes_per_node=None, volumes_size=None, shares=None,
+ node_configs=None, volume_mount_prefix=None,
+ boot_from_volume=False)
+
+ def test_ngt_create_all_options(self):
+ arglist = ['--name', 'template', '--plugin', 'fake',
+ '--plugin-version', '0.1', '--processes', 'namenode',
+ 'tasktracker', '--security-groups', 'secgr',
+ '--auto-security-group', '--availability-zone', 'av_zone',
+ '--flavor', 'flavor_id', '--floating-ip-pool',
+ 'floating_pool', '--volumes-per-node',
+ '2', '--volumes-size', '2', '--volumes-type', 'type',
+ '--volumes-availability-zone', 'vavzone',
+ '--volumes-mount-prefix', '/volume/asd',
+ '--volumes-locality', '--description', 'descr',
+ '--autoconfig', '--proxy-gateway', '--public',
+ '--protected', '--boot-from-volume']
+
+ verifylist = [('name', 'template'), ('plugin', 'fake'),
+ ('plugin_version', '0.1'),
+ ('processes', ['namenode', 'tasktracker']),
+ ('security_groups', ['secgr']),
+ ('auto_security_group', True),
+ ('availability_zone', 'av_zone'),
+ ('flavor', 'flavor_id'),
+ ('floating_ip_pool', 'floating_pool'),
+ ('volumes_per_node', 2), ('volumes_size', 2),
+ ('volumes_type', 'type'),
+ ('volumes_availability_zone', 'vavzone'),
+ ('volumes_mount_prefix', '/volume/asd'),
+ ('volumes_locality', True), ('description', 'descr'),
+ ('autoconfig', True), ('proxy_gateway', True),
+ ('public', True), ('protected', True),
+ ('boot_from_volume', True)]
+
+ parsed_args = self.check_parser(self.cmd, arglist, verifylist)
+
+ columns, data = self.cmd.take_action(parsed_args)
+
+ # Check that correct arguments were passed
+ self.ngt_mock.create.assert_called_once_with(
+ auto_security_group=True, availability_zone='av_zone',
+ description='descr', flavor_id='flavor_id',
+ floating_ip_pool='floating_pool', plugin_version='0.1',
+ is_protected=True, is_proxy_gateway=True, is_public=True,
+ name='template', node_processes=['namenode', 'tasktracker'],
+ plugin_name='fake', security_groups=['secgr'], use_autoconfig=True,
+ volume_local_to_instance=True, volume_type='type',
+ volumes_availability_zone='vavzone', volumes_per_node=2,
+ volumes_size=2, shares=None, node_configs=None,
+ volume_mount_prefix='/volume/asd', boot_from_volume=True)
+
+ # Check that columns are correct
+ expected_columns = (
+ 'Auto security group', 'Availability zone', 'Boot from volume',
+ 'Description', 'Flavor id', 'Floating ip pool', 'Id',
+ 'Is default', 'Is protected', 'Is proxy gateway', 'Is public',
+ 'Name', 'Node processes', 'Plugin name', 'Plugin version',
+ 'Security groups', 'Use autoconfig', 'Volume local to instance',
+ 'Volume mount prefix', 'Volume type', 'Volumes availability zone',
+ 'Volumes per node', 'Volumes size')
+ self.assertEqual(expected_columns, columns)
+
+ # Check that data is correct
+ expected_data = (
+ True, 'av_zone', False, 'description', 'flavor_id',
+ 'floating_pool', 'ng_id', False, False, False, True,
+ 'template', 'namenode, tasktracker', 'fake', '0.1', None, True,
+ False, '/volumes/disk', None, None, 2, 2)
+ self.assertEqual(expected_data, data)
+
+
+class TestListNodeGroupTemplates(TestNodeGroupTemplates):
+ def setUp(self):
+ super(TestListNodeGroupTemplates, self).setUp()
+ self.ngt_mock.list.return_value = [api_ngt.NodeGroupTemplate(
+ None, NGT_INFO)]
+
+ # Command to test
+ self.cmd = osc_ngt.ListNodeGroupTemplates(self.app, None)
+
+ def test_ngt_list_no_options(self):
+ arglist = []
+ verifylist = []
+
+ parsed_args = self.check_parser(self.cmd, arglist, verifylist)
+
+ columns, data = self.cmd.take_action(parsed_args)
+
+ # Check that columns are correct
+ expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version']
+ self.assertEqual(expected_columns, columns)
+
+ # Check that data is correct
+ expected_data = [('template', 'ng_id', 'fake', '0.1')]
+ self.assertEqual(expected_data, list(data))
+
+ def test_ngt_list_long(self):
+ arglist = ['--long']
+ verifylist = [('long', True)]
+
+ parsed_args = self.check_parser(self.cmd, arglist, verifylist)
+
+ columns, data = self.cmd.take_action(parsed_args)
+
+ # Check that columns are correct
+ expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version',
+ 'Node processes', 'Description']
+ self.assertEqual(expected_columns, columns)
+
+ # Check that data is correct
+ expected_data = [('template', 'ng_id', 'fake', '0.1',
+ 'namenode, tasktracker', 'description')]
+ self.assertEqual(expected_data, list(data))
+
+ def test_ngt_list_extra_search_opts(self):
+ arglist = ['--plugin', 'fake', '--plugin-version', '0.1', '--name',
+ 'templ']
+ verifylist = [('plugin', 'fake'), ('plugin_version', '0.1'),
+ ('name', 'templ')]
+
+ parsed_args = self.check_parser(self.cmd, arglist, verifylist)
+
+ columns, data = self.cmd.take_action(parsed_args)
+
+ # Check that columns are correct
+ expected_columns = ['Name', 'Id', 'Plugin name', 'Plugin version']
+ self.assertEqual(expected_columns, columns)
+
+ # Check that data is correct
+ expected_data = [('template', 'ng_id', 'fake', '0.1')]
+ self.assertEqual(expected_data, list(data))
+
+
+class TestShowNodeGroupTemplate(TestNodeGroupTemplates):
+ def setUp(self):
+ super(TestShowNodeGroupTemplate, self).setUp()
+ self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate(
+ None, NGT_INFO)
+
+ # Command to test
+ self.cmd = osc_ngt.ShowNodeGroupTemplate(self.app, None)
+
+ def test_ngt_show(self):
+ arglist = ['template']
+ verifylist = [('node_group_template', 'template')]
+
+ parsed_args = self.check_parser(self.cmd, arglist, verifylist)
+
+ columns, data = self.cmd.take_action(parsed_args)
+
+ # Check that correct arguments were passed
+ self.ngt_mock.find_unique.assert_called_once_with(name='template')
+
+ # Check that columns are correct
+ expected_columns = (
+ 'Auto security group', 'Availability zone', 'Boot from volume',
+ 'Description', 'Flavor id', 'Floating ip pool', 'Id',
+ 'Is default', 'Is protected', 'Is proxy gateway', 'Is public',
+ 'Name', 'Node processes', 'Plugin name', 'Plugin version',
+ 'Security groups', 'Use autoconfig', 'Volume local to instance',
+ 'Volume mount prefix', 'Volume type', 'Volumes availability zone',
+ 'Volumes per node', 'Volumes size')
+ self.assertEqual(expected_columns, columns)
+
+ # Check that data is correct
+ expected_data = (
+ True, 'av_zone', False, 'description', 'flavor_id',
+ 'floating_pool', 'ng_id', False, False, False, True,
+ 'template', 'namenode, tasktracker', 'fake', '0.1', None, True,
+ False, '/volumes/disk', None, None, 2, 2)
+ self.assertEqual(expected_data, data)
+
+
+class TestDeleteNodeGroupTemplate(TestNodeGroupTemplates):
+ def setUp(self):
+ super(TestDeleteNodeGroupTemplate, self).setUp()
+ self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate(
+ None, NGT_INFO)
+
+ # Command to test
+ self.cmd = osc_ngt.DeleteNodeGroupTemplate(self.app, None)
+
+ def test_ngt_delete(self):
+ arglist = ['template']
+ verifylist = [('node_group_template', ['template'])]
+
+ parsed_args = self.check_parser(self.cmd, arglist, verifylist)
+
+ self.cmd.take_action(parsed_args)
+
+ # Check that correct arguments were passed
+ self.ngt_mock.delete.assert_called_once_with('ng_id')
+
+
+class TestUpdateNodeGroupTemplate(TestNodeGroupTemplates):
+ # TODO(apavlov): check for update with --json
+ def setUp(self):
+ super(TestUpdateNodeGroupTemplate, self).setUp()
+ self.ngt_mock.find_unique.return_value = api_ngt.NodeGroupTemplate(
+ None, NGT_INFO)
+ self.ngt_mock.update.return_value = api_ngt.NodeGroupTemplate(
+ None, NGT_INFO)
+
+ self.fl_mock = self.app.client_manager.compute.flavors
+ self.fl_mock.get.return_value = mock.Mock(id='flavor_id')
+ self.fl_mock.reset_mock()
+
+ # Command to test
+ self.cmd = osc_ngt.UpdateNodeGroupTemplate(self.app, None)
+
+ def test_ngt_update_no_options(self):
+ arglist = []
+ verifylist = []
+
+ self.assertRaises(osc_utils.ParserException, self.check_parser,
+ self.cmd, arglist, verifylist)
+
+ def test_ngt_update_nothing_updated(self):
+ arglist = ['template']
+ verifylist = [('node_group_template', 'template')]
+
+ parsed_args = self.check_parser(self.cmd, arglist, verifylist)
+
+ self.cmd.take_action(parsed_args)
+
+ # Check that correct arguments were passed
+ self.ngt_mock.update.assert_called_once_with('ng_id')
+
+ def test_ngt_update_all_options(self):
+ arglist = ['template', '--name', 'template', '--plugin', 'fake',
+ '--plugin-version', '0.1', '--processes', 'namenode',
+ 'tasktracker', '--security-groups', 'secgr',
+ '--auto-security-group-enable',
+ '--availability-zone', 'av_zone', '--flavor', 'flavor_id',
+ '--floating-ip-pool', 'floating_pool', '--volumes-per-node',
+ '2', '--volumes-size', '2', '--volumes-type', 'type',
+ '--volumes-availability-zone', 'vavzone',
+ '--volumes-mount-prefix', '/volume/asd',
+ '--volumes-locality-enable', '--description', 'descr',
+ '--autoconfig-enable', '--proxy-gateway-enable', '--public',
+ '--protected', '--boot-from-volume-enable']
+
+ verifylist = [('node_group_template', 'template'),
+ ('name', 'template'), ('plugin', 'fake'),
+ ('plugin_version', '0.1'),
+ ('processes', ['namenode', 'tasktracker']),
+ ('security_groups', ['secgr']),
+ ('use_auto_security_group', True),
+ ('availability_zone', 'av_zone'),
+ ('flavor', 'flavor_id'),
+ ('floating_ip_pool', 'floating_pool'),
+ ('volumes_per_node', 2), ('volumes_size', 2),
+ ('volumes_type', 'type'),
+ ('volumes_availability_zone', 'vavzone'),
+ ('volumes_mount_prefix', '/volume/asd'),
+ ('volume_locality', True),
+ ('description', 'descr'), ('use_autoconfig', True),
+ ('is_proxy_gateway', True),
+ ('is_public', True), ('is_protected', True),
+ ('boot_from_volume', True)]
+
+ parsed_args = self.check_parser(self.cmd, arglist, verifylist)
+
+ columns, data = self.cmd.take_action(parsed_args)
+
+ # Check that correct arguments were passed
+ self.ngt_mock.update.assert_called_once_with(
+ 'ng_id',
+ auto_security_group=True, availability_zone='av_zone',
+ description='descr', flavor_id='flavor_id',
+ floating_ip_pool='floating_pool', plugin_version='0.1',
+ is_protected=True, is_proxy_gateway=True, is_public=True,
+ name='template', node_processes=['namenode', 'tasktracker'],
+ plugin_name='fake', security_groups=['secgr'], use_autoconfig=True,
+ volume_local_to_instance=True, volume_type='type',
+ volumes_availability_zone='vavzone', volumes_per_node=2,
+ volumes_size=2, volume_mount_prefix='/volume/asd',
+ boot_from_volume=True)
+
+ # Check that columns are correct
+ expected_columns = (
+ 'Auto security group', 'Availability zone', 'Boot from volume',
+ 'Description', 'Flavor id', 'Floating ip pool', 'Id',
+ 'Is default', 'Is protected', 'Is proxy gateway', 'Is public',
+ 'Name', 'Node processes', 'Plugin name', 'Plugin version',
+ 'Security groups', 'Use autoconfig', 'Volume local to instance',
+ 'Volume mount prefix', 'Volume type', 'Volumes availability zone',
+ 'Volumes per node', 'Volumes size')
+ self.assertEqual(expected_columns, columns)
+
+ # Check that data is correct
+ expected_data = (
+ True, 'av_zone', False, 'description', 'flavor_id',
+ 'floating_pool', 'ng_id', False, False, False, True,
+ 'template', 'namenode, tasktracker', 'fake', '0.1', None, True,
+ False, '/volumes/disk', None, None, 2, 2)
+ self.assertEqual(expected_data, data)
+
+ def test_ngt_update_private_unprotected(self):
+ arglist = ['template', '--private', '--unprotected']
+ verifylist = [('node_group_template', 'template'),
+ ('is_public', False), ('is_protected', False)]
+
+ parsed_args = self.check_parser(self.cmd, arglist, verifylist)
+
+ self.cmd.take_action(parsed_args)
+
+ # Check that correct arguments were passed
+ self.ngt_mock.update.assert_called_once_with(
+ 'ng_id', is_protected=False, is_public=False)
diff --git a/saharaclient/tests/unit/test_data_sources.py b/saharaclient/tests/unit/test_data_sources.py
index 13476d1..594d654 100644
--- a/saharaclient/tests/unit/test_data_sources.py
+++ b/saharaclient/tests/unit/test_data_sources.py
@@ -15,6 +15,7 @@
from saharaclient.api import data_sources as ds
from saharaclient.tests.unit import base
+import mock
from oslo_serialization import jsonutils as json
@@ -92,3 +93,27 @@ class DataSourceTest(base.BaseTestCase):
updated = self.client.data_sources.update("id", self.update_json)
self.assertEqual(self.update_json["name"], updated.name)
self.assertEqual(self.update_json["url"], updated.url)
+
+ @mock.patch('saharaclient.api.base.ResourceManager._create')
+ def test_create_data_source_s3_or_swift_credentials(self, create):
+ # Data source without any credential arguments
+ self.client.data_sources.create('ds', '', 'swift', 'swift://path')
+ self.assertNotIn('credentials', create.call_args[0][1])
+
+ # Data source with Swift credential arguments
+ self.client.data_sources.create('ds', '', 'swift', 'swift://path',
+ credential_user='user')
+ self.assertIn('credentials', create.call_args[0][1])
+
+ # Data source with S3 credential arguments
+ self.client.data_sources.create('ds', '', 'swift', 'swift://path',
+ s3_credentials={'accesskey': 'a'})
+ self.assertIn('credentials', create.call_args[0][1])
+ self.assertIn('accesskey', create.call_args[0][1]['credentials'])
+
+ # Data source with both S3 and swift credential arguments
+ self.client.data_sources.create('ds', '', 's3', 's3://path',
+ credential_user='swift_user',
+ s3_credentials={'accesskey': 's3_a'})
+ self.assertIn('user', create.call_args[0][1]['credentials'])
+ self.assertNotIn('accesskey', create.call_args[0][1]['credentials'])
diff --git a/saharaclient/tests/unit/test_node_group_templates.py b/saharaclient/tests/unit/test_node_group_templates.py
index f37416a..959bbee 100644
--- a/saharaclient/tests/unit/test_node_group_templates.py
+++ b/saharaclient/tests/unit/test_node_group_templates.py
@@ -154,3 +154,111 @@ class NodeGroupTemplateTest(base.BaseTestCase):
self.assertEqual(url, self.responses.last_request.url)
self.assertIsInstance(resp, ng.NodeGroupTemplate)
self.assertDictsEqual(self.body, resp.__dict__[u'node_group_template'])
+
+
+class NodeGroupTemplateTestV2(base.BaseTestCase):
+ body = {
+ "name": "name",
+ "plugin_name": "plugin",
+ "plugin_version": "1",
+ "flavor_id": "2",
+ "description": "description",
+ "volumes_per_node": "3",
+ "volumes_size": "4",
+ "node_processes": ["datanode"],
+ "use_autoconfig": True,
+ "volume_mount_prefix": '/volumes/disk',
+ "boot_from_volume": False
+ }
+
+ update_json = {
+ "node_group_template": {
+ "name": "UpdatedName",
+ "plugin_name": "new_plugin",
+ "plugin_version": "2",
+ "flavor_id": "7",
+ "description": "description",
+ "volumes_per_node": "3",
+ "volumes_size": "4",
+ "node_processes": ["datanode", "namenode"],
+ "use_autoconfig": False,
+ "volume_mount_prefix": '/volumes/newdisk',
+ "boot_from_volume": True
+ }
+ }
+
+ def test_create_node_group_template_v2(self):
+ url = self.URL + '/node-group-templates'
+ self.responses.post(url, status_code=202,
+ json={'node_group_template': self.body})
+
+ resp = self.client_v2.node_group_templates.create(**self.body)
+
+ self.assertEqual(url, self.responses.last_request.url)
+ self.assertEqual(self.body,
+ json.loads(self.responses.last_request.body))
+ self.assertIsInstance(resp, ng.NodeGroupTemplate)
+ self.assertFields(self.body, resp)
+
+ def test_update_node_group_template_v2(self):
+ url = self.URL + '/node-group-templates'
+ self.responses.post(url, status_code=202,
+ json={'node_group_template': self.body})
+ resp = self.client_v2.node_group_templates.create(**self.body)
+
+ update_url = self.URL + '/node-group-templates/id'
+ self.responses.patch(update_url, status_code=202,
+ json=self.update_json)
+
+ # check that all parameters will be updated
+ updated = self.client_v2.node_group_templates.update(
+ "id",
+ resp.name,
+ resp.plugin_name,
+ resp.plugin_version,
+ resp.flavor_id,
+ description=getattr(resp, "description", None),
+ volumes_per_node=getattr(resp, "volumes_per_node", None),
+ node_configs=getattr(resp, "node_configs", None),
+ floating_ip_pool=getattr(resp, "floating_ip_pool", None),
+ security_groups=getattr(resp, "security_groups", None),
+ auto_security_group=getattr(resp, "auto_security_group", None),
+ availability_zone=getattr(resp, "availability_zone", None),
+ volumes_availability_zone=getattr(resp,
+ "volumes_availability_zone",
+ None),
+ volume_type=getattr(resp, "volume_type", None),
+ image_id=getattr(resp, "image_id", None),
+ is_proxy_gateway=getattr(resp, "is_proxy_gateway", None),
+ volume_local_to_instance=getattr(resp,
+ "volume_local_to_instance",
+ None),
+ use_autoconfig=False,
+ boot_from_volume=getattr(resp, "boot_from_volume", None)
+ )
+ self.assertIsInstance(updated, ng.NodeGroupTemplate)
+ self.assertFields(self.update_json["node_group_template"], updated)
+
+ # check that parameters will not be updated
+ self.client_v2.node_group_templates.update("id")
+ self.assertEqual(update_url, self.responses.last_request.url)
+ self.assertEqual({},
+ json.loads(self.responses.last_request.body))
+
+ # check that all parameters will be unset
+ unset_json = {
+ 'auto_security_group': None, 'availability_zone': None,
+ 'description': None, 'flavor_id': None, 'floating_ip_pool': None,
+ 'plugin_version': None, 'image_id': None, 'is_protected': None,
+ 'is_proxy_gateway': None, 'is_public': None, 'name': None,
+ 'node_configs': None, 'node_processes': None, 'plugin_name': None,
+ 'security_groups': None, 'shares': None, 'use_autoconfig': None,
+ 'volume_local_to_instance': None, 'volume_mount_prefix': None,
+ 'volume_type': None, 'volumes_availability_zone': None,
+ 'volumes_per_node': None, 'volumes_size': None,
+ 'boot_from_volume': None}
+
+ self.client_v2.node_group_templates.update("id", **unset_json)
+ self.assertEqual(update_url, self.responses.last_request.url)
+ self.assertEqual(unset_json,
+ json.loads(self.responses.last_request.body))
diff --git a/setup.cfg b/setup.cfg
index 5e5542d..afe081f 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -96,5 +96,14 @@ openstack.data_processing.v1 =
dataprocessing_job_binary_delete = saharaclient.osc.v1.job_binaries:DeleteJobBinary
dataprocessing_job_binary_download = saharaclient.osc.v1.job_binaries:DownloadJobBinary
+openstack.data_processing.v2 =
+ dataprocessing_node_group_template_create = saharaclient.osc.v2.node_group_templates:CreateNodeGroupTemplate
+ dataprocessing_node_group_template_list = saharaclient.osc.v2.node_group_templates:ListNodeGroupTemplates
+ dataprocessing_node_group_template_show = saharaclient.osc.v2.node_group_templates:ShowNodeGroupTemplate
+ dataprocessing_node_group_template_update = saharaclient.osc.v2.node_group_templates:UpdateNodeGroupTemplate
+ dataprocessing_node_group_template_delete = saharaclient.osc.v2.node_group_templates:DeleteNodeGroupTemplate
+ dataprocessing_node_group_template_import = saharaclient.osc.v2.node_group_templates:ImportNodeGroupTemplate
+ dataprocessing_node_group_template_export = saharaclient.osc.v2.node_group_templates:ExportNodeGroupTemplate
+
[wheel]
universal = 1
diff --git a/test-requirements.txt b/test-requirements.txt
index 6dbca11..886ab37 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -7,6 +7,5 @@ hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0
coverage!=4.4,>=4.0 # Apache-2.0
mock>=2.0.0 # BSD
oslotest>=3.2.0 # Apache-2.0
-os-testr>=1.0.0 # Apache-2.0
+stestr>=1.0.0 # Apache-2.0
requests-mock>=1.2.0 # Apache-2.0
-testrepository>=0.0.18 # Apache-2.0/BSD
diff --git a/tox.ini b/tox.ini
index c99f39a..2470364 100644
--- a/tox.ini
+++ b/tox.ini
@@ -14,7 +14,7 @@ deps =
-r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
commands = find . -type f -name "*.pyc" -delete
- ostestr {posargs}
+ stestr run {posargs}
whitelist_externals = find
rm
passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY
@@ -33,9 +33,17 @@ commands = oslo_debug_helper -t saharaclient/tests/unit {posargs}
[testenv:cover]
basepython = python3
+setenv =
+ {[testenv]setenv}
+ PYTHON=coverage run --source saharaclient --parallel-mode
commands =
- python setup.py test --coverage --testr-args='{posargs}'
- coverage report
+ coverage erase
+ find . -type f -name "*.pyc" -delete
+ stestr run {posargs}
+ coverage combine
+ coverage html -d cover
+ coverage xml -o cover/coverage.xml
+ coverage report
[tox:jenkins]
sitepackages = False