summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorZuul <zuul@review.opendev.org>2022-03-17 15:35:42 +0000
committerGerrit Code Review <review@openstack.org>2022-03-17 15:35:42 +0000
commite5a1997df840080d53e3bc2a12ac9169c3f96990 (patch)
treec3c1bbe5ffa92e3c5a6b815ac8d362006e4aa759
parent687694c83ccd61371cf89816ef7602ae9fed0dd0 (diff)
parent3e631a5931c9a3d53d78438339adfeeae5fa5a70 (diff)
downloadironic-e5a1997df840080d53e3bc2a12ac9169c3f96990.tar.gz
Merge "Create API documentation from docstrings"
-rw-r--r--api-ref/source/baremetal-api-v1-nodes.inc5
-rw-r--r--api-ref/source/parameters.yaml15
-rw-r--r--doc/source/_exts/web_api_docstring.py346
-rw-r--r--doc/source/conf.py3
-rw-r--r--ironic/api/controllers/v1/allocation.py53
-rw-r--r--ironic/api/controllers/v1/driver.py27
-rw-r--r--ironic/api/controllers/v1/node.py6
-rw-r--r--tox.ini2
8 files changed, 418 insertions, 39 deletions
diff --git a/api-ref/source/baremetal-api-v1-nodes.inc b/api-ref/source/baremetal-api-v1-nodes.inc
index 44bae0014..2ebbd2c5d 100644
--- a/api-ref/source/baremetal-api-v1-nodes.inc
+++ b/api-ref/source/baremetal-api-v1-nodes.inc
@@ -153,11 +153,6 @@ Request
.. literalinclude:: samples/node-create-request-dynamic.json
:language: javascript
-**Example Node creation request with a classic driver:**
-
-.. literalinclude:: samples/node-create-request-classic.json
- :language: javascript
-
Response
--------
diff --git a/api-ref/source/parameters.yaml b/api-ref/source/parameters.yaml
index b3eb28f82..d0da64ec2 100644
--- a/api-ref/source/parameters.yaml
+++ b/api-ref/source/parameters.yaml
@@ -324,6 +324,13 @@ r_node_uuid:
in: query
required: false
type: string
+r_owner:
+ description: |
+ Filter the list of returned allocations, and only return those with
+ the specified owner.
+ in: query
+ required: false
+ type: string
r_port_address:
description: |
Filter the list of returned Ports, and only return the ones with the
@@ -412,7 +419,7 @@ sort_dir:
type: string
sort_key:
description: |
- Sorts the response by the this attribute value.
+ Sorts the response by this attribute value.
Default is ``id``. You can specify multiple pairs of sort key and
sort direction query parameters. If you omit the sort direction in
a pair, the API uses the natural sorting direction of the server
@@ -466,6 +473,12 @@ allocation_node:
in: body
required: true
type: string
+allocation_patch:
+ description: |
+ A JSON patch document to apply to the allocation.
+ in: body
+ required: true
+ type: JSON
allocation_resource_class:
description: |
The resource class requested for the allocation. Can be ``null`` if
diff --git a/doc/source/_exts/web_api_docstring.py b/doc/source/_exts/web_api_docstring.py
new file mode 100644
index 000000000..4e560a9b7
--- /dev/null
+++ b/doc/source/_exts/web_api_docstring.py
@@ -0,0 +1,346 @@
+# -*- coding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from http import HTTPStatus
+import os
+import re # Stdlib
+
+from docutils import nodes
+from docutils.parsers.rst import Directive # 3rd Party
+from sphinx.util.docfields import GroupedField # 3rd Party
+import yaml # 3rd party
+
+from ironic.common import exception # Application
+
+
+def read_from_file(fpath):
+ """Read the data in file given by fpath."""
+
+ with open(fpath, 'r') as stream:
+ yaml_data = yaml.load(stream, Loader=yaml.SafeLoader)
+ return yaml_data
+
+
+def split_str_to_field(input_str):
+ """Split the input_str into 2 parts, the field name and field body.
+
+ The split is based on this regex format: :field_name: field_body.
+ """
+
+ regex_pattern = "((^:{1}.*:{1})(.*))"
+ field_name = None
+ field_body = None
+
+ if input_str is None:
+ return field_name, field_body
+
+ regex_output = re.match(regex_pattern, input_str)
+
+ if regex_output is None and len(input_str) > 0:
+ field_body = input_str.lstrip(' ')
+
+ if regex_output is not None:
+ field = regex_output.groups()
+ field_name = field[1].strip(':')
+ field_body = field[2].strip()
+
+ return field_name, field_body
+
+
+def parse_field_list(content):
+ """Convert list of fields as strings, to a dictionary.
+
+ This function takes a list of strings as input, each item being
+ a :field_name: field_body combination, and converts it into a dictionary
+ with the field names as keys, and field bodies as values.
+ """
+
+ field_list = {} # dictionary to hold parsed input field list
+
+ for c in content:
+ if c is None:
+ continue
+ field_name, field_body = split_str_to_field(c)
+ field_list[field_name] = field_body
+
+ return field_list
+
+
+def create_bullet_list(input_dict, input_build_env):
+ """Convert input_dict into a sphinx representaion of a bullet list."""
+
+ grp_field = GroupedField('grp_field', label='title')
+ bullet_list = nodes.paragraph()
+
+ for field_name in input_dict:
+ fbody_txt_node = nodes.Text(data=input_dict[field_name])
+ tmp_field_node = grp_field.make_field(domain='py',
+ types=nodes.field,
+ items=[(field_name,
+ fbody_txt_node)],
+ env=input_build_env)
+
+ for c in tmp_field_node.children:
+ if c.tagname == 'field_body':
+ for ch in c.children:
+ bullet_list += ch
+
+ return bullet_list
+
+
+def create_table(table_title, table_contents):
+ """Construct a docutils-based table (single row and column)."""
+
+ table = nodes.table()
+ tgroup = nodes.tgroup(cols=1)
+ colspec = nodes.colspec(colwidth=1)
+ tgroup.append(colspec)
+ table += tgroup
+
+ thead = nodes.thead()
+ tgroup += thead
+
+ row = nodes.row()
+ entry = nodes.entry()
+ entry += nodes.paragraph(text=table_title)
+ row += entry
+
+ thead.append(row)
+
+ rows = []
+
+ row = nodes.row()
+ rows.append(row)
+
+ entry = nodes.entry()
+ entry += table_contents
+ row += entry
+
+ tbody = nodes.tbody()
+ tbody.extend(rows)
+ tgroup += tbody
+
+ return table
+
+
+def split_list(input_list):
+ """Split input_list into three sub-lists.
+
+ This function splits the input_list into three, one list containing the
+ inital non-empty items, one list containing items appearing after the
+ string 'Success' in input_list; and the other list containing items
+ appearing after the string 'Failure' in input_list.
+ """
+ initial_flag = 1
+ success_flag = 0
+ failure_flag = 0
+
+ initial_list = []
+ success_list = []
+ failure_list = []
+
+ for c in input_list:
+ if c == 'Success:':
+ success_flag = 1
+ failure_flag = 0
+ elif c == 'Failure:':
+ failure_flag = 1
+ success_flag = 0
+ elif c != '' and success_flag:
+ success_list.append(c)
+ elif c != '' and failure_flag:
+ failure_list.append(c)
+ elif c != '' and initial_flag:
+ initial_list.append(c)
+
+ return initial_list, success_list, failure_list
+
+
+def process_list(input_list):
+ """Combine fields split over multiple list items into one.
+
+ This function expects to receive a field list as input,
+ with each item in the list representing a line
+ read from the document, as-is.
+
+ It combines the field bodies split over multiple lines into
+ one list item, making each field (name and body) one list item.
+ It also removes extra whitespace which was used for indentation
+ in input.
+ """
+
+ out_list = []
+
+ # Convert list to string
+ str1 = "".join(input_list)
+
+ # Replace multiple spaces with one space
+ str2 = re.sub(r'\s+', ' ', str1)
+
+ regex_pattern = r'(:\S*.:)'
+
+ # Split the string, based on field names
+ list3 = re.split(regex_pattern, str2)
+
+ # Remove empty items from the list
+ list4 = list(filter(None, list3))
+
+ # Append the field name and field body strings together
+ for i in range(0, len(list4), 2):
+ out_list.append(list4[i] + list4[i + 1])
+
+ return out_list
+
+
+def add_exception_info(failure_list):
+ """Add exception information to fields.
+
+ This function takes a list of fields (field name and field body)
+ as an argument. If the field name is the name of an exception, it adds
+ the exception code into the field name, and exception message into
+ the field body.
+ """
+
+ failure_dict = {}
+
+ # Add the exception code and message string
+ for f in failure_list:
+ field_name, field_body = split_str_to_field(f)
+ exc_code = ""
+ exc_msg = ""
+
+ if (field_name is not None) and hasattr(exception, field_name):
+ # Get the exception code and message string
+ exc_class = getattr(exception, field_name)
+ try:
+ exc_code = exc_class.code
+ exc_msg = exc_class._msg_fmt
+ except AttributeError:
+ pass
+
+ # Add the exception's HTTP code and HTTP phrase
+ # to the field name
+ if isinstance(exc_code, HTTPStatus):
+ field_name = (field_name
+ + " (HTTP "
+ + str(exc_code.value)
+ + " "
+ + exc_code.phrase
+ + ")")
+ else:
+ field_name = field_name + " (HTTP " + str(exc_code) + ")"
+
+ # Add the exception's HTTP description to the field body
+ field_body = exc_msg + " \n" + field_body
+
+ # Add to dictionary if field name and field body exist
+ if field_name is not None and field_body is not None:
+ failure_dict[field_name] = field_body
+
+ return failure_dict
+
+
+class Parameters(Directive):
+ """This class implements the Parameters Directive."""
+
+ required_arguments = 1
+ has_content = True
+
+ def run(self):
+ # Parse the input field list from the docstring, as a dictionary
+ input_dict = {}
+ input_dict = parse_field_list(self.content)
+
+ # Read from yaml file
+ param_file = self.arguments[0]
+ cur_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+ param_file_path = cur_path + '/' + param_file
+ yaml_data = read_from_file(param_file_path)
+
+ # Substitute the parameter descriptions with the yaml file descriptions
+ for field_name in input_dict:
+ old_field_body = input_dict[field_name]
+ if old_field_body in yaml_data.keys():
+ input_dict[field_name] = yaml_data[old_field_body]["description"]
+
+ # Convert dictionary to bullet list format
+ params_build_env = self.state.document.settings.env
+ params_bullet_list = create_bullet_list(input_dict, params_build_env)
+
+ # Create a table to display the final Parameters directive output
+ params_table = create_table('Parameters', params_bullet_list)
+ return [params_table]
+
+
+class Return(Directive):
+ """This class implements the Return Directive."""
+
+ has_content = True
+
+ def run(self):
+ initial_list, success_list, failure_list = split_list(self.content)
+
+ # Concatenate the field bodies split over multiple lines
+ proc_fail_list = process_list(failure_list)
+
+ # Add the exception code(s) and corresponding message string(s)
+ failure_dict = {}
+ failure_dict = add_exception_info(proc_fail_list)
+
+ ret_table_contents = nodes.paragraph()
+ if len(initial_list) > 0:
+ for i in initial_list:
+ initial_cont = nodes.Text(data=i)
+ ret_table_contents += initial_cont
+
+ if len(success_list) > 0:
+ # Add heading 'Success:' to output
+ success_heading = nodes.strong()
+ success_heading += nodes.Text(data='Success:')
+ ret_table_contents += success_heading
+
+ # Add Success details to output
+ success_detail = nodes.paragraph()
+ for s in success_list:
+ success_detail += nodes.Text(data=s)
+ ret_table_contents += success_detail
+
+ if len(proc_fail_list) > 0:
+ # Add heading 'Failure:' to output
+ failure_heading = nodes.strong()
+ failure_heading += nodes.Text(data='Failure:')
+ ret_table_contents += failure_heading
+
+ # Add failure details to output
+ ret_build_env = self.state.document.settings.env
+ failure_detail = create_bullet_list(failure_dict, ret_build_env)
+ ret_table_contents += failure_detail
+
+ if len(initial_list) > 0 or len(success_list) > 0 or len(proc_fail_list) > 0:
+ # Create a table to display the final Returns directive output
+ ret_table = create_table('Returns', ret_table_contents)
+ return [ret_table]
+ else:
+ return None
+
+
+def setup(app):
+ app.add_directive("parameters", Parameters)
+ app.add_directive("return", Return)
+
+ return {
+ 'version': '0.1',
+ 'parallel_read_safe': True,
+ 'parallel_write_safe': True,
+ }
diff --git a/doc/source/conf.py b/doc/source/conf.py
index 1f667a4b4..2746ac934 100644
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -42,7 +42,8 @@ extensions = ['sphinx.ext.viewcode',
'oslo_policy.sphinxext',
'oslo_policy.sphinxpolicygen',
'automated_steps',
- 'openstackdocstheme'
+ 'openstackdocstheme',
+ 'web_api_docstring'
]
# sphinxcontrib.apidoc options
diff --git a/ironic/api/controllers/v1/allocation.py b/ironic/api/controllers/v1/allocation.py
index be7e41c64..7884df1fa 100644
--- a/ironic/api/controllers/v1/allocation.py
+++ b/ironic/api/controllers/v1/allocation.py
@@ -259,20 +259,17 @@ class AllocationsController(pecan.rest.RestController):
owner=None):
"""Retrieve a list of allocations.
- :param node: UUID or name of a node, to get only allocations for that
- node.
- :param resource_class: Filter by requested resource class.
- :param state: Filter by allocation state.
- :param marker: pagination marker for large data sets.
- :param limit: maximum number of resources to return in a single result.
- This value cannot be larger than the value of max_limit
- in the [api] section of the ironic configuration, or only
- max_limit resources will be returned.
- :param sort_key: column to sort results by. Default: id.
- :param sort_dir: direction to sort. "asc" or "desc". Default: asc.
- :param fields: Optional, a list with a specified set of fields
- of the resource to be returned.
- :param owner: Filter by owner.
+ .. parameters:: ../../api-ref/source/parameters.yaml
+
+ :node: r_allocation_node
+ :resource_class: req_allocation_resource_class
+ :state: r_allocation_state
+ :marker: marker
+ :limit: limit
+ :sort_key: sort_key
+ :sort_dir: sort_dir
+ :fields: fields
+ :owner: r_owner
"""
owner = api_utils.check_list_policy('allocation', owner)
@@ -291,9 +288,10 @@ class AllocationsController(pecan.rest.RestController):
def get_one(self, allocation_ident, fields=None):
"""Retrieve information about the given allocation.
- :param allocation_ident: UUID or logical name of an allocation.
- :param fields: Optional, a list with a specified set of fields
- of the resource to be returned.
+ .. parameters:: ../../api-ref/source/parameters.yaml
+
+ :allocation_ident: allocation_ident
+ :fields: fields
"""
rpc_allocation = api_utils.check_allocation_policy_and_retrieve(
'baremetal:allocation:get', allocation_ident)
@@ -341,7 +339,9 @@ class AllocationsController(pecan.rest.RestController):
def post(self, allocation):
"""Create a new allocation.
- :param allocation: an allocation within the request body.
+ .. parameters:: ../../api-ref/source/parameters.yaml
+
+ :allocation: req_allocation_name
"""
context = api.request.context
cdict = context.to_policy_values()
@@ -472,8 +472,10 @@ class AllocationsController(pecan.rest.RestController):
def patch(self, allocation_ident, patch):
"""Update an existing allocation.
- :param allocation_ident: UUID or logical name of an allocation.
- :param patch: a json PATCH document to apply to this allocation.
+ .. parameters:: ../../api-ref/source/parameters.yaml
+
+ :allocation_ident: allocation_ident
+ :patch: allocation_patch
"""
if not api_utils.allow_allocation_update():
raise webob_exc.HTTPMethodNotAllowed(_(
@@ -513,7 +515,9 @@ class AllocationsController(pecan.rest.RestController):
def delete(self, allocation_ident):
"""Delete an allocation.
- :param allocation_ident: UUID or logical name of an allocation.
+ .. parameters:: ../../api-ref/source/parameters.yaml
+
+ :allocation_ident: allocation_ident
"""
context = api.request.context
rpc_allocation = api_utils.check_allocation_policy_and_retrieve(
@@ -556,6 +560,12 @@ class NodeAllocationController(pecan.rest.RestController):
@method.expose()
@args.validate(fields=args.string_list)
def get_all(self, fields=None):
+ """Get all allocations.
+
+ .. parameters:: ../../api-ref/source/parameters.yaml
+
+ :fields: fields
+ """
parent_node = self.parent_node_ident
result = self.inner._get_allocations_collection(
parent_node,
@@ -572,6 +582,7 @@ class NodeAllocationController(pecan.rest.RestController):
@METRICS.timer('NodeAllocationController.delete')
@method.expose(status_code=http_client.NO_CONTENT)
def delete(self):
+ """Delete an allocation."""
context = api.request.context
rpc_node = api_utils.get_rpc_node_with_suffix(self.parent_node_ident)
diff --git a/ironic/api/controllers/v1/driver.py b/ironic/api/controllers/v1/driver.py
index 2775ed284..f117c327f 100644
--- a/ironic/api/controllers/v1/driver.py
+++ b/ironic/api/controllers/v1/driver.py
@@ -225,7 +225,7 @@ class DriverPassthruController(rest.RestController):
:param driver_name: name of the driver.
:returns: dictionary with <vendor method name>:<method metadata>
entries.
- :raises: DriverNotFound if the driver name is invalid or the
+ :raises DriverNotFound: if the driver name is invalid or the
driver cannot be loaded.
"""
api_utils.check_policy('baremetal:driver:vendor_passthru')
@@ -272,15 +272,20 @@ class DriverRaidController(rest.RestController):
def logical_disk_properties(self, driver_name):
"""Returns the logical disk properties for the driver.
- :param driver_name: Name of the driver.
- :returns: A dictionary containing the properties that can be mentioned
- for logical disks and a textual description for them.
- :raises: UnsupportedDriverExtension if the driver doesn't
- support RAID configuration.
- :raises: NotAcceptable, if requested version of the API is less than
- 1.12.
- :raises: DriverNotFound, if driver is not loaded on any of the
- conductors.
+ .. parameters:: ../../api-ref/source/parameters.yaml
+
+ :driver_name: Name of the driver.
+
+ .. return::
+
+ Success:
+ A dictionary containing the properties that can be mentioned
+
+ Failure:
+ :UnsupportedDriverExtension: If the driver doesn't support RAID
+ configuration.
+ :NotAcceptable: If requested version of the API is less than 1.12.
+ :DriverNotFound: If driver is not loaded on any of the conductors.
"""
api_utils.check_policy(
'baremetal:driver:get_raid_logical_disk_properties')
@@ -377,7 +382,7 @@ class DriversController(rest.RestController):
:param driver_name: name of the driver.
:returns: dictionary with <property name>:<property description>
entries.
- :raises: DriverNotFound (HTTP 404) if the driver name is invalid or
+ :raises DriverNotFound (HTTP 404): if the driver name is invalid or
the driver cannot be loaded.
"""
api_utils.check_policy('baremetal:driver:get_properties')
diff --git a/ironic/api/controllers/v1/node.py b/ironic/api/controllers/v1/node.py
index bfba203d1..f182e2fd2 100644
--- a/ironic/api/controllers/v1/node.py
+++ b/ironic/api/controllers/v1/node.py
@@ -2442,6 +2442,12 @@ class NodesController(rest.RestController):
"""Create a new node.
:param node: a node within the request body.
+
+ **Example Node creation request:**
+
+ .. literalinclude::
+ ../../../../api-ref/source/samples/node-create-request-dynamic.json
+ :language: javascript
"""
if self.from_chassis:
raise exception.OperationNotPermitted()
diff --git a/tox.ini b/tox.ini
index a5fd56477..f0e04357c 100644
--- a/tox.ini
+++ b/tox.ini
@@ -92,9 +92,11 @@ commands =
[testenv:api-ref]
+# NOTE(Mahnoor): documentation building process requires importing ironic API modules
usedevelop = False
deps =
-c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master}
+ -r{toxinidir}/requirements.txt
-r{toxinidir}/doc/requirements.txt
allowlist_externals = bash
commands =