summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorZuul <zuul@review.opendev.org>2022-11-22 09:52:47 +0000
committerGerrit Code Review <review@openstack.org>2022-11-22 09:52:47 +0000
commitaa379e7c49510ba6bf5da5a0dc8f8a6719837cea (patch)
treef15e0f402d5cffc36d70333efa0ff84e4f742859
parent6762fd6812ac914295896ee56abc47a31a23a8f2 (diff)
parentd691ee05e5362f7b7c9f135f70654f4db514a856 (diff)
downloadironic-aa379e7c49510ba6bf5da5a0dc8f8a6719837cea.tar.gz
Merge "Add ports statistics to tools/benchmark scripts"
-rw-r--r--tools/benchmark/do_not_run_create_benchmark_data.py63
-rw-r--r--tools/benchmark/generate-statistics.py112
2 files changed, 155 insertions, 20 deletions
diff --git a/tools/benchmark/do_not_run_create_benchmark_data.py b/tools/benchmark/do_not_run_create_benchmark_data.py
index afdb3c7f2..d738e1285 100644
--- a/tools/benchmark/do_not_run_create_benchmark_data.py
+++ b/tools/benchmark/do_not_run_create_benchmark_data.py
@@ -10,7 +10,7 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
-
+import random
import sys
import time
@@ -20,31 +20,54 @@ from sqlalchemy import sql
from ironic.common import service
from ironic.conf import CONF # noqa To Load Configuration
from ironic.objects import node
+from ironic.objects import port
+
+
+NODE_COUNT = 10000
+PORTS_PER_NODE = 2
+
+
+# NOTE(hjensas): Mostly copy-paste from Nova
+def generate_mac_address():
+ """Generate an Ethernet MAC address."""
+ mac = [random.randint(0x00, 0xff),
+ random.randint(0x00, 0xff),
+ random.randint(0x00, 0xff),
+ random.randint(0x00, 0xff),
+ random.randint(0x00, 0xff),
+ random.randint(0x00, 0xff)]
+ return ':'.join(map(lambda x: "%02x" % x, mac))
+
+
+def _create_test_node_ports(new_node):
+ for i in range(0, PORTS_PER_NODE):
+ new_port = port.Port()
+ new_port.node_id = new_node.id
+ new_port.address = generate_mac_address()
+ new_port.pxe_enabled = True
+ new_port.create()
def _create_test_nodes():
print("Starting creation of fake nodes.")
start = time.time()
- node_count = 10000
checkin = time.time()
- for i in range(0, node_count):
-
- new_node = node.Node({
- 'power_state': 'power off',
- 'driver': 'ipmi',
- 'driver_internal_info': {'test-meow': i},
- 'name': 'BenchmarkTestNode-%s' % i,
- 'driver_info': {
- 'ipmi_username': 'admin',
- 'ipmi_password': 'admin',
- 'ipmi_address': 'testhost%s.env.top.level.domain' % i},
- 'resource_class': 'CUSTOM_BAREMETAL',
- 'properties': {
- 'cpu': 4,
- 'memory': 32,
- 'cats': i,
- 'meowing': True}})
+ for i in range(0, NODE_COUNT):
+ new_node = node.Node()
+ new_node.power_state = 'power off'
+ new_node.driver = 'ipmi'
+ new_node.driver_internal_info = {'test-meow': i}
+ new_node.name = 'BenchmarkTestNode-%s' % i
+ new_node.driver_info = {
+ 'ipmi_username': 'admin', 'ipmi_password': 'admin',
+ 'ipmi_address': 'testhost%s.env.top.level.domain' % i}
+ new_node.resource_class = 'CUSTOM_BAREMETAL'
+ new_node.properties = {'cpu': 4,
+ 'memory': 32,
+ 'cats': i,
+ 'meowing': True}
new_node.create()
+ _create_test_node_ports(new_node)
delta = time.time() - checkin
if delta > 10:
checkin = time.time()
@@ -52,7 +75,7 @@ def _create_test_nodes():
% (i, delta, time.time() - start))
created = time.time()
elapse = created - start
- print('Created %s nodes in %s seconds.\n' % (node_count, elapse))
+ print('Created %s nodes in %s seconds.\n' % (NODE_COUNT, elapse))
def _mix_up_nodes_data():
diff --git a/tools/benchmark/generate-statistics.py b/tools/benchmark/generate-statistics.py
index 740c3be08..e8327f3ac 100644
--- a/tools/benchmark/generate-statistics.py
+++ b/tools/benchmark/generate-statistics.py
@@ -21,6 +21,7 @@ import oslo_policy
from oslo_utils import timeutils
from ironic.api.controllers.v1 import node as node_api
+from ironic.api.controllers.v1 import port as port_api
from ironic.api.controllers.v1 import utils as api_utils
from ironic.common import context
from ironic.common import service
@@ -28,6 +29,7 @@ from ironic.conf import CONF # noqa To Load Configuration
from ironic.db import api as db_api
from ironic.objects import conductor
from ironic.objects import node
+from ironic.objects import port
def _calculate_delta(start, finish):
@@ -56,6 +58,24 @@ def _assess_db_performance():
return node_count
+def _assess_db_performance_ports():
+ start = time.time()
+ dbapi = db_api.get_instance()
+ print('Phase - Assess DB performance - Ports')
+ _add_a_line()
+ got_connection = time.time()
+ ports = dbapi.get_port_list()
+ port_count = len(ports)
+ query_complete = time.time()
+ delta = _calculate_delta(start, got_connection)
+ print('Obtained DB client in %s seconds.' % delta)
+ delta = _calculate_delta(got_connection, query_complete)
+ print('Returned %s ports in python %s seconds from the DB.\n' %
+ (port_count, delta))
+ # return node count for future use.
+ return port_count
+
+
def _assess_db_and_object_performance():
print('Phase - Assess DB & Object conversion Performance')
_add_a_line()
@@ -88,6 +108,33 @@ def _assess_db_and_object_performance():
observed_vendors.append(vendor)
+def _assess_db_and_object_performance_ports():
+ print('Phase - Assess DB & Object conversion Performance - Ports')
+ _add_a_line()
+ start = time.time()
+ port_list = port.Port().list(context.get_admin_context())
+ got_list = time.time()
+ delta = _calculate_delta(start, got_list)
+ print('Obtained list of port objects in %s seconds.' % delta)
+ count = 0
+ tbl_size = 0
+ # In a sense, this helps provide a relative understanding if the
+ # database is the bottleneck, or the objects post conversion.
+ # converting completely to json and then measuring the size helps
+ # ensure that everything is "assessed" while not revealing too
+ # much detail.
+ for port_obj in port_list:
+ # Just looping through the entire set to count should be
+ # enough to ensure that the entry is loaded from the db
+ # and then converted to an object.
+ tbl_size = tbl_size + sys.getsizeof(port_obj.as_dict())
+ count = count + 1
+ delta = _calculate_delta(got_list, time.time())
+ print('Took %s seconds to iterate through %s port objects.' %
+ (delta, count))
+ print('Ports table is roughly %s bytes of JSON.\n' % tbl_size)
+
+
@mock.patch('ironic.api.request') # noqa patch needed for the object model
@mock.patch.object(metrics_utils, 'get_metrics_logger', lambda *_: mock.Mock)
@mock.patch.object(api_utils, 'check_list_policy', lambda *_: None)
@@ -155,6 +202,68 @@ def _assess_db_object_and_api_performance(mock_log, mock_request):
'nodes API call pattern.\n' % (delta, total_nodes))
+
+@mock.patch('ironic.api.request') # noqa patch needed for the object model
+@mock.patch.object(metrics_utils, 'get_metrics_logger', lambda *_: mock.Mock)
+@mock.patch.object(api_utils, 'check_list_policy', lambda *_: None)
+@mock.patch.object(api_utils, 'check_allow_specify_fields', lambda *_: None)
+@mock.patch.object(api_utils, 'check_allowed_fields', lambda *_: None)
+@mock.patch.object(oslo_policy.policy, 'LOG', autospec=True)
+def _assess_db_object_and_api_performance_ports(mock_log, mock_request):
+ print('Phase - Assess DB & Object conversion Performance - Ports')
+ _add_a_line()
+ # Just mock it to silence it since getting the logger to update
+ # config seems like not a thing once started. :\
+ mock_log.debug = mock.Mock()
+ # Internal logic requires major/minor versions and a context to
+ # proceed. This is just to make the NodesController respond properly.
+ mock_request.context = context.get_admin_context()
+ mock_request.version.major = 1
+ mock_request.version.minor = 71
+
+ start = time.time()
+ port_api_controller = port_api.PortsController()
+ port_api_controller.context = context.get_admin_context()
+ fields = ("uuid,node_uuid,address,extra,local_link_connection,"
+ "pxe_enabled,internal_info,physical_network,"
+ "is_smartnic")
+
+ total_ports = 0
+
+ res = port_api_controller._get_ports_collection(
+ resource_url='ports',
+ node_ident=None,
+ address=None,
+ portgroup_ident=None,
+ marker=None,
+ limit=None,
+ sort_key="id",
+ sort_dir="asc",
+ fields=fields.split(','))
+ total_ports = len(res['ports'])
+ while len(res['ports']) != 1:
+ print(" ** Getting ports ** %s Elapsed: %s seconds." %
+ (total_ports, _calculate_delta(start, time.time())))
+ res = port_api_controller._get_ports_collection(
+ resource_url='ports',
+ node_ident=None,
+ address=None,
+ portgroup_ident=None,
+ marker=res['ports'][-1]['uuid'],
+ limit=None,
+ sort_key="id",
+ sort_dir="asc",
+ fields=fields.split(','))
+ new_ports = len(res['ports'])
+ if new_ports == 0:
+ break
+ total_ports = total_ports + new_ports
+
+ delta = _calculate_delta(start, time.time())
+ print('Took %s seconds to return all %s ports via '
+ 'ports API call pattern.\n' % (delta, total_ports))
+
+
def _report_conductors():
print('Phase - identifying conductors/drivers')
_add_a_line()
@@ -190,6 +299,9 @@ def main():
_assess_db_performance()
_assess_db_and_object_performance()
_assess_db_object_and_api_performance()
+ _assess_db_performance_ports()
+ _assess_db_and_object_performance_ports()
+ _assess_db_object_and_api_performance_ports()
_report_conductors()