diff options
author | Denis Egorenko <degorenko@mirantis.com> | 2016-05-31 16:21:31 +0300 |
---|---|---|
committer | Denis Egorenko <degorenko@mirantis.com> | 2016-06-01 19:15:01 +0000 |
commit | f02ea8999c79ea9a3e1f515220775bc54838f9d2 (patch) | |
tree | 2984b8321d89db826186bb60af8d4ef9b5dddd7f /saharaclient/osc | |
parent | 4944304d3461e130a4d04096cc2590c31689e496 (diff) | |
download | python-saharaclient-f02ea8999c79ea9a3e1f515220775bc54838f9d2.tar.gz |
Change plugin version 'version' parameter to 'plugin-version'
Currently we have parameter 'version' for specifying plugin version
for creating node group templates and also for listing available
plugins. The problem is that this parameter is global parameter for
getting version of client. So, this patch fixes this confusion.
Change-Id: I89a670188c20e3b20b98b596a3efb8d5997c3214
Closes-bug: #1565775
Diffstat (limited to 'saharaclient/osc')
-rw-r--r-- | saharaclient/osc/v1/cluster_templates.py | 30 | ||||
-rw-r--r-- | saharaclient/osc/v1/clusters.py | 20 | ||||
-rw-r--r-- | saharaclient/osc/v1/job_types.py | 12 | ||||
-rw-r--r-- | saharaclient/osc/v1/node_group_templates.py | 34 | ||||
-rw-r--r-- | saharaclient/osc/v1/plugins.py | 13 |
5 files changed, 55 insertions, 54 deletions
diff --git a/saharaclient/osc/v1/cluster_templates.py b/saharaclient/osc/v1/cluster_templates.py index 440098f..2396610 100644 --- a/saharaclient/osc/v1/cluster_templates.py +++ b/saharaclient/osc/v1/cluster_templates.py @@ -25,7 +25,7 @@ from oslo_log import log as logging from saharaclient.osc.v1 import utils -CT_FIELDS = ['id', 'name', 'plugin_name', 'version', 'description', +CT_FIELDS = ['id', 'name', 'plugin_name', 'plugin_version', 'description', 'node_groups', 'anti_affinity', 'use_autoconfig', 'is_default', 'is_protected', 'is_public'] @@ -36,7 +36,7 @@ def _format_node_groups_list(node_groups): def _format_ct_output(data): - data['version'] = data.pop('hadoop_version') + data['plugin_version'] = data.pop('hadoop_version') data['node_groups'] = _format_node_groups_list(data['node_groups']) data['anti_affinity'] = osc_utils.format_list(data['anti_affinity']) @@ -59,8 +59,8 @@ def _configure_node_groups(node_groups, client): raise exceptions.CommandError('Node groups with the same plugins ' 'and versions must be specified') - plugin, version = plugins_versions.pop() - return plugin, version, node_groups + plugin, plugin_version = plugins_versions.pop() + return plugin, plugin_version, node_groups class CreateClusterTemplate(show.ShowOne): @@ -180,13 +180,13 @@ class CreateClusterTemplate(show.ShowOne): 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) - plugin, version, node_groups = _configure_node_groups( + plugin, plugin_version, node_groups = _configure_node_groups( parsed_args.node_groups, client) data = client.cluster_templates.create( name=parsed_args.name, plugin_name=plugin, - hadoop_version=version, + hadoop_version=plugin_version, description=parsed_args.description, node_groups=node_groups, use_autoconfig=parsed_args.autoconfig, @@ -222,8 +222,8 @@ class ListClusterTemplates(lister.Lister): ) parser.add_argument( - '--version', - metavar="<version>", + '--plugin-version', + metavar="<plugin_version>", help="List cluster templates with specific version of the " "plugin" ) @@ -243,8 +243,8 @@ class ListClusterTemplates(lister.Lister): search_opts = {} if parsed_args.plugin: search_opts['plugin_name'] = parsed_args.plugin - if parsed_args.version: - search_opts['hadoop_version'] = parsed_args.version + if parsed_args.plugin_version: + search_opts['hadoop_version'] = parsed_args.plugin_version data = client.cluster_templates.list(search_opts=search_opts) @@ -255,12 +255,12 @@ class ListClusterTemplates(lister.Lister): columns = ('name', 'id', 'plugin_name', 'hadoop_version', 'node_groups', 'description') column_headers = utils.prepare_column_headers( - columns, {'hadoop_version': 'version'}) + columns, {'hadoop_version': 'plugin_version'}) else: columns = ('name', 'id', 'plugin_name', 'hadoop_version') column_headers = utils.prepare_column_headers( - columns, {'hadoop_version': 'version'}) + columns, {'hadoop_version': 'plugin_version'}) return ( column_headers, @@ -448,9 +448,9 @@ class UpdateClusterTemplate(show.ShowOne): data = client.cluster_templates.update( ct_id, **template).to_dict() else: - plugin, version, node_groups = None, None, None + plugin, plugin_version, node_groups = None, None, None if parsed_args.node_groups: - plugin, version, node_groups = _configure_node_groups( + plugin, plugin_version, node_groups = _configure_node_groups( parsed_args.node_groups, client) configs = None @@ -476,7 +476,7 @@ class UpdateClusterTemplate(show.ShowOne): update_dict = utils.create_dict_from_kwargs( name=parsed_args.name, plugin_name=plugin, - hadoop_version=version, + hadoop_version=plugin_version, description=parsed_args.description, node_groups=node_groups, use_autoconfig=parsed_args.use_autoconfig, diff --git a/saharaclient/osc/v1/clusters.py b/saharaclient/osc/v1/clusters.py index 0f1d8f1..05ec51f 100644 --- a/saharaclient/osc/v1/clusters.py +++ b/saharaclient/osc/v1/clusters.py @@ -27,7 +27,7 @@ from saharaclient.osc.v1 import utils CLUSTER_FIELDS = ["cluster_template_id", "use_autoconfig", "user_keypair_id", "status", "image", "node_groups", "id", - "anti_affinity", "version", "name", "is_transient", + "anti_affinity", "plugin_version", "name", "is_transient", "is_protected", "description", "is_public", "neutron_management_network", "plugin_name"] @@ -38,7 +38,7 @@ def _format_node_groups_list(node_groups): def _format_cluster_output(data): - data['version'] = data.pop('hadoop_version') + data['plugin_version'] = data.pop('hadoop_version') data['image'] = data.pop('default_image_id') data['node_groups'] = _format_node_groups_list(data['node_groups']) data['anti_affinity'] = osc_utils.format_list(data['anti_affinity']) @@ -171,7 +171,7 @@ class CreateCluster(show.ShowOne): 'should be specified or json template should be provided ' 'with --json argument') - plugin, version, template_id = _get_plugin_version( + plugin, plugin_version, template_id = _get_plugin_version( parsed_args.cluster_template, client) image_id = utils.get_resource_id(client.images, parsed_args.image) @@ -183,7 +183,7 @@ class CreateCluster(show.ShowOne): data = client.clusters.create( name=parsed_args.name, plugin_name=plugin, - hadoop_version=version, + hadoop_version=plugin_version, cluster_template_id=template_id, default_image_id=image_id, description=parsed_args.description, @@ -245,8 +245,8 @@ class ListClusters(lister.Lister): ) parser.add_argument( - '--version', - metavar="<version>", + '--plugin-version', + metavar="<plugin_version>", help="List clusters with specific version of the " "plugin" ) @@ -265,8 +265,8 @@ class ListClusters(lister.Lister): search_opts = {} if parsed_args.plugin: search_opts['plugin_name'] = parsed_args.plugin - if parsed_args.version: - search_opts['hadoop_version'] = parsed_args.version + if parsed_args.plugin_version: + search_opts['hadoop_version'] = parsed_args.plugin_version data = client.clusters.list(search_opts=search_opts) @@ -277,13 +277,13 @@ class ListClusters(lister.Lister): columns = ('name', 'id', 'plugin_name', 'hadoop_version', 'status', 'description', 'default_image_id') column_headers = utils.prepare_column_headers( - columns, {'hadoop_version': 'version', + columns, {'hadoop_version': 'plugin_version', 'default_image_id': 'image'}) else: columns = ('name', 'id', 'plugin_name', 'hadoop_version', 'status') column_headers = utils.prepare_column_headers( - columns, {'hadoop_version': 'version', + columns, {'hadoop_version': 'plugin_version', 'default_image_id': 'image'}) return ( column_headers, diff --git a/saharaclient/osc/v1/job_types.py b/saharaclient/osc/v1/job_types.py index 4f98791..08f4013 100644 --- a/saharaclient/osc/v1/job_types.py +++ b/saharaclient/osc/v1/job_types.py @@ -46,8 +46,8 @@ class ListJobTypes(lister.Lister): help="Get only job types supported by this plugin" ) parser.add_argument( - '--version', - metavar="<version>", + '--plugin-version', + metavar="<plugin_version>", help="Get only job types supported by specific version of the " "plugin. This parameter will be taken into account only if " "plugin is provided" @@ -64,11 +64,11 @@ class ListJobTypes(lister.Lister): search_opts['type'] = parsed_args.type if parsed_args.plugin: search_opts['plugin'] = parsed_args.plugin - if parsed_args.version: - search_opts['version'] = parsed_args.version - elif parsed_args.version: + if parsed_args.plugin_version: + search_opts['plugin_version'] = parsed_args.plugin_version + elif parsed_args.plugin_version: raise exceptions.CommandError( - '--version argument should be specified with --plugin ' + '--plugin-version argument should be specified with --plugin ' 'argument') data = client.job_types.list(search_opts=search_opts) diff --git a/saharaclient/osc/v1/node_group_templates.py b/saharaclient/osc/v1/node_group_templates.py index a24253d..1d08097 100644 --- a/saharaclient/osc/v1/node_group_templates.py +++ b/saharaclient/osc/v1/node_group_templates.py @@ -25,7 +25,7 @@ from oslo_log import log as logging from saharaclient.osc.v1 import utils -NGT_FIELDS = ['id', 'name', 'plugin_name', 'version', 'node_processes', +NGT_FIELDS = ['id', 'name', 'plugin_name', 'plugin_version', 'node_processes', 'description', 'auto_security_group', 'security_groups', 'availability_zone', 'flavor_id', 'floating_ip_pool', 'volumes_per_node', 'volumes_size', @@ -36,7 +36,7 @@ NGT_FIELDS = ['id', 'name', 'plugin_name', 'version', 'node_processes', def _format_ngt_output(data): data['node_processes'] = osc_utils.format_list(data['node_processes']) - data['version'] = data.pop('hadoop_version') + data['plugin_version'] = data.pop('hadoop_version') if data['volumes_per_node'] == 0: del data['volume_local_to_instance'] del data['volume_mount_prefix'] @@ -65,8 +65,8 @@ class CreateNodeGroupTemplate(show.ShowOne): help="Name of the plugin [REQUIRED if JSON is not provided]" ) parser.add_argument( - '--version', - metavar="<version>", + '--plugin-version', + metavar="<plugin_version>", help="Version of the plugin [REQUIRED if JSON is not provided]" ) parser.add_argument( @@ -215,11 +215,11 @@ class CreateNodeGroupTemplate(show.ShowOne): data = client.node_group_templates.create(**template).to_dict() else: if (not parsed_args.name or not parsed_args.plugin or - not parsed_args.version or not parsed_args.flavor or + not parsed_args.plugin_version or not parsed_args.flavor or not parsed_args.processes): raise exceptions.CommandError( - 'At least --name, --plugin, --version, --processes, ' - '--flavor arguments should be specified or json template ' + 'At least --name, --plugin, --plugin-version, --processes,' + ' --flavor arguments should be specified or json template ' 'should be provided with --json argument') configs = None @@ -249,7 +249,7 @@ class CreateNodeGroupTemplate(show.ShowOne): data = client.node_group_templates.create( name=parsed_args.name, plugin_name=parsed_args.plugin, - hadoop_version=parsed_args.version, + hadoop_version=parsed_args.plugin_version, flavor_id=flavor_id, description=parsed_args.description, volumes_per_node=parsed_args.volumes_per_node, @@ -296,8 +296,8 @@ class ListNodeGroupTemplates(lister.Lister): ) parser.add_argument( - '--version', - metavar="<version>", + '--plugin-version', + metavar="<plugin_version>", help="List node group templates with specific version of the " "plugin" ) @@ -317,8 +317,8 @@ class ListNodeGroupTemplates(lister.Lister): search_opts = {} if parsed_args.plugin: search_opts['plugin_name'] = parsed_args.plugin - if parsed_args.version: - search_opts['hadoop_version'] = parsed_args.version + if parsed_args.plugin_version: + search_opts['hadoop_version'] = parsed_args.plugin_version data = client.node_group_templates.list(search_opts=search_opts) @@ -329,12 +329,12 @@ class ListNodeGroupTemplates(lister.Lister): columns = ('name', 'id', 'plugin_name', 'hadoop_version', 'node_processes', 'description') column_headers = utils.prepare_column_headers( - columns, {'hadoop_version': 'version'}) + columns, {'hadoop_version': 'plugin_version'}) else: columns = ('name', 'id', 'plugin_name', 'hadoop_version') column_headers = utils.prepare_column_headers( - columns, {'hadoop_version': 'version'}) + columns, {'hadoop_version': 'plugin_version'}) return ( column_headers, @@ -430,8 +430,8 @@ class UpdateNodeGroupTemplate(show.ShowOne): help="Name of the plugin" ) parser.add_argument( - '--version', - metavar="<version>", + '--plugin-version', + metavar="<plugin_version>", help="Version of the plugin" ) parser.add_argument( @@ -663,7 +663,7 @@ class UpdateNodeGroupTemplate(show.ShowOne): update_dict = utils.create_dict_from_kwargs( name=parsed_args.name, plugin_name=parsed_args.plugin, - hadoop_version=parsed_args.version, + hadoop_version=parsed_args.plugin_version, flavor_id=flavor_id, description=parsed_args.description, volumes_per_node=parsed_args.volumes_per_node, diff --git a/saharaclient/osc/v1/plugins.py b/saharaclient/osc/v1/plugins.py index e616a35..21f5726 100644 --- a/saharaclient/osc/v1/plugins.py +++ b/saharaclient/osc/v1/plugins.py @@ -80,7 +80,8 @@ class ShowPlugin(show.ShowOne): help="Name of the plugin to display", ) parser.add_argument( - "--version", + "--plugin-version", + metavar="<plugin_version>", help='Version of the plugin to display' ) @@ -90,9 +91,9 @@ class ShowPlugin(show.ShowOne): self.log.debug("take_action(%s)" % parsed_args) client = self.app.client_manager.data_processing - if parsed_args.version: + if parsed_args.plugin_version: data = client.plugins.get_version_details( - parsed_args.plugin, parsed_args.version).to_dict() + parsed_args.plugin, parsed_args.plugin_version).to_dict() processes = data.pop('node_processes') for k, v in processes.items(): @@ -129,8 +130,8 @@ class GetPluginConfigs(command.Command): help="Name of the plugin to provide config information about", ) parser.add_argument( - "version", - metavar="<version>", + "plugin_version", + metavar="<plugin_version>", help="Version of the plugin to provide config information about", ) parser.add_argument( @@ -148,7 +149,7 @@ class GetPluginConfigs(command.Command): parsed_args.file = parsed_args.plugin data = client.plugins.get_version_details( - parsed_args.plugin, parsed_args.version).to_dict() + parsed_args.plugin, parsed_args.plugin_version).to_dict() if path.exists(parsed_args.file): self.log.error('File "%s" already exists. Chose another one with ' |