summaryrefslogtreecommitdiff
path: root/test/lib/ansible_test/_internal/coverage/combine.py
diff options
context:
space:
mode:
authorMatt Clay <mclay@redhat.com>2020-02-05 22:16:15 -0800
committerGitHub <noreply@github.com>2020-02-05 22:16:15 -0800
commit5e68bb3d93c4782e266420ee1f57a4502fadea6e (patch)
tree5597943fdc2ead9a782f739414a388ac54d5d6ba /test/lib/ansible_test/_internal/coverage/combine.py
parent68b981ae21f85e96d951aefac6acd1b0d169cefe (diff)
downloadansible-5e68bb3d93c4782e266420ee1f57a4502fadea6e.tar.gz
Add code coverage target analysis to ansible-test. (#67141)
* Refactor coverage file enumeration. * Relocate sanitize_filename function. * Support sets when writing JSON files. * Generalize setting of info_stderr mode. * Split out coverage path checking. * Split out collection regex logic. * Improve sanitize_filename type hints and docs. * Clean up coverage erase command. * Fix docs and type hints for initialize_coverage. * Update type hints on CoverageConfig. * Split out logic for finding modules. * Split out arc enumeration. * Split out powershell coverage enumeration. * Raise verbosity level of empty coverage warnings. * Add code coverage target analysis to ansible-test.
Diffstat (limited to 'test/lib/ansible_test/_internal/coverage/combine.py')
-rw-r--r--test/lib/ansible_test/_internal/coverage/combine.py187
1 files changed, 23 insertions, 164 deletions
diff --git a/test/lib/ansible_test/_internal/coverage/combine.py b/test/lib/ansible_test/_internal/coverage/combine.py
index a07a4dd6de..e4a6f61415 100644
--- a/test/lib/ansible_test/_internal/coverage/combine.py
+++ b/test/lib/ansible_test/_internal/coverage/combine.py
@@ -3,16 +3,13 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
-import re
from ..target import (
- walk_module_targets,
walk_compile_targets,
walk_powershell_targets,
)
from ..io import (
- read_json_file,
read_text_file,
)
@@ -25,15 +22,18 @@ from ..util_common import (
write_json_test_results,
)
-from ..data import (
- data_context,
-)
-
from . import (
+ enumerate_python_arcs,
+ enumerate_powershell_lines,
+ get_collection_path_regexes,
+ get_python_coverage_files,
+ get_python_modules,
+ get_powershell_coverage_files,
initialize_coverage,
COVERAGE_OUTPUT_FILE_NAME,
COVERAGE_GROUPS,
CoverageConfig,
+ PathChecker,
)
@@ -57,58 +57,27 @@ def _command_coverage_combine_python(args):
"""
coverage = initialize_coverage(args)
- modules = dict((target.module, target.path) for target in list(walk_module_targets()) if target.path.endswith('.py'))
+ modules = get_python_modules()
- coverage_dir = ResultType.COVERAGE.path
- coverage_files = [os.path.join(coverage_dir, f) for f in os.listdir(coverage_dir)
- if '=coverage.' in f and '=python' in f]
+ coverage_files = get_python_coverage_files()
counter = 0
sources = _get_coverage_targets(args, walk_compile_targets)
groups = _build_stub_groups(args, sources, lambda line_count: set())
- if data_context().content.collection:
- collection_search_re = re.compile(r'/%s/' % data_context().content.collection.directory)
- collection_sub_re = re.compile(r'^.*?/%s/' % data_context().content.collection.directory)
- else:
- collection_search_re = None
- collection_sub_re = None
+ collection_search_re, collection_sub_re = get_collection_path_regexes()
for coverage_file in coverage_files:
counter += 1
display.info('[%4d/%4d] %s' % (counter, len(coverage_files), coverage_file), verbosity=2)
- original = coverage.CoverageData()
-
group = get_coverage_group(args, coverage_file)
if group is None:
display.warning('Unexpected name for coverage file: %s' % coverage_file)
continue
- if os.path.getsize(coverage_file) == 0:
- display.warning('Empty coverage file: %s' % coverage_file)
- continue
-
- try:
- original.read_file(coverage_file)
- except Exception as ex: # pylint: disable=locally-disabled, broad-except
- display.error(u'%s' % ex)
- continue
-
- for filename in original.measured_files():
- arcs = set(original.arcs(filename) or [])
-
- if not arcs:
- # This is most likely due to using an unsupported version of coverage.
- display.warning('No arcs found for "%s" in coverage file: %s' % (filename, coverage_file))
- continue
-
- filename = _sanitize_filename(filename, modules=modules, collection_search_re=collection_search_re,
- collection_sub_re=collection_sub_re)
- if not filename:
- continue
-
+ for filename, arcs in enumerate_python_arcs(coverage_file, coverage, modules, collection_search_re, collection_sub_re):
if group not in groups:
groups[group] = {}
@@ -120,28 +89,18 @@ def _command_coverage_combine_python(args):
arc_data[filename].update(arcs)
output_files = []
- invalid_path_count = 0
- invalid_path_chars = 0
coverage_file = os.path.join(ResultType.COVERAGE.path, COVERAGE_OUTPUT_FILE_NAME)
+ path_checker = PathChecker(args, collection_search_re)
+
for group in sorted(groups):
arc_data = groups[group]
updated = coverage.CoverageData()
for filename in arc_data:
- if not os.path.isfile(filename):
- if collection_search_re and collection_search_re.search(filename) and os.path.basename(filename) == '__init__.py':
- # the collection loader uses implicit namespace packages, so __init__.py does not need to exist on disk
- continue
-
- invalid_path_count += 1
- invalid_path_chars += len(filename)
-
- if args.verbosity > 1:
- display.warning('Invalid coverage path: %s' % filename)
-
+ if not path_checker.check_path(filename):
continue
updated.add_arcs({filename: list(arc_data[filename])})
@@ -154,8 +113,7 @@ def _command_coverage_combine_python(args):
updated.write_file(output_file)
output_files.append(output_file)
- if invalid_path_count > 0:
- display.warning('Ignored %d characters from %d invalid coverage path(s).' % (invalid_path_chars, invalid_path_count))
+ path_checker.report()
return sorted(output_files)
@@ -165,9 +123,7 @@ def _command_coverage_combine_powershell(args):
:type args: CoverageConfig
:rtype: list[str]
"""
- coverage_dir = ResultType.COVERAGE.path
- coverage_files = [os.path.join(coverage_dir, f) for f in os.listdir(coverage_dir)
- if '=coverage.' in f and '=powershell' in f]
+ coverage_files = get_powershell_coverage_files()
def _default_stub_value(lines):
val = {}
@@ -189,57 +145,26 @@ def _command_coverage_combine_powershell(args):
display.warning('Unexpected name for coverage file: %s' % coverage_file)
continue
- if os.path.getsize(coverage_file) == 0:
- display.warning('Empty coverage file: %s' % coverage_file)
- continue
-
- try:
- coverage_run = read_json_file(coverage_file)
- except Exception as ex: # pylint: disable=locally-disabled, broad-except
- display.error(u'%s' % ex)
- continue
-
- for filename, hit_info in coverage_run.items():
+ for filename, hits in enumerate_powershell_lines(coverage_file):
if group not in groups:
groups[group] = {}
coverage_data = groups[group]
- filename = _sanitize_filename(filename)
- if not filename:
- continue
-
if filename not in coverage_data:
coverage_data[filename] = {}
file_coverage = coverage_data[filename]
- if not isinstance(hit_info, list):
- hit_info = [hit_info]
-
- for hit_entry in hit_info:
- if not hit_entry:
- continue
-
- line_count = file_coverage.get(hit_entry['Line'], 0) + hit_entry['HitCount']
- file_coverage[hit_entry['Line']] = line_count
+ for line_no, hit_count in hits.items():
+ file_coverage[line_no] = file_coverage.get(line_no, 0) + hit_count
output_files = []
- invalid_path_count = 0
- invalid_path_chars = 0
- for group in sorted(groups):
- coverage_data = groups[group]
+ path_checker = PathChecker(args)
- for filename in coverage_data:
- if not os.path.isfile(filename):
- invalid_path_count += 1
- invalid_path_chars += len(filename)
-
- if args.verbosity > 1:
- display.warning('Invalid coverage path: %s' % filename)
-
- continue
+ for group in sorted(groups):
+ coverage_data = dict((filename, data) for filename, data in groups[group].items() if path_checker.check_path(filename))
if args.all:
# Add 0 line entries for files not in coverage_data
@@ -256,9 +181,7 @@ def _command_coverage_combine_powershell(args):
output_files.append(os.path.join(ResultType.COVERAGE.path, output_file))
- if invalid_path_count > 0:
- display.warning(
- 'Ignored %d characters from %d invalid coverage path(s).' % (invalid_path_chars, invalid_path_count))
+ path_checker.report()
return sorted(output_files)
@@ -346,67 +269,3 @@ def get_coverage_group(args, coverage_file):
group += '=%s' % names[part]
return group
-
-
-def _sanitize_filename(filename, modules=None, collection_search_re=None, collection_sub_re=None):
- """
- :type filename: str
- :type modules: dict | None
- :type collection_search_re: Pattern | None
- :type collection_sub_re: Pattern | None
- :rtype: str | None
- """
- ansible_path = os.path.abspath('lib/ansible/') + '/'
- root_path = data_context().content.root + '/'
- integration_temp_path = os.path.sep + os.path.join(ResultType.TMP.relative_path, 'integration') + os.path.sep
-
- if modules is None:
- modules = {}
-
- if '/ansible_modlib.zip/ansible/' in filename:
- # Rewrite the module_utils path from the remote host to match the controller. Ansible 2.6 and earlier.
- new_name = re.sub('^.*/ansible_modlib.zip/ansible/', ansible_path, filename)
- display.info('%s -> %s' % (filename, new_name), verbosity=3)
- filename = new_name
- elif collection_search_re and collection_search_re.search(filename):
- new_name = os.path.abspath(collection_sub_re.sub('', filename))
- display.info('%s -> %s' % (filename, new_name), verbosity=3)
- filename = new_name
- elif re.search(r'/ansible_[^/]+_payload\.zip/ansible/', filename):
- # Rewrite the module_utils path from the remote host to match the controller. Ansible 2.7 and later.
- new_name = re.sub(r'^.*/ansible_[^/]+_payload\.zip/ansible/', ansible_path, filename)
- display.info('%s -> %s' % (filename, new_name), verbosity=3)
- filename = new_name
- elif '/ansible_module_' in filename:
- # Rewrite the module path from the remote host to match the controller. Ansible 2.6 and earlier.
- module_name = re.sub('^.*/ansible_module_(?P<module>.*).py$', '\\g<module>', filename)
- if module_name not in modules:
- display.warning('Skipping coverage of unknown module: %s' % module_name)
- return None
- new_name = os.path.abspath(modules[module_name])
- display.info('%s -> %s' % (filename, new_name), verbosity=3)
- filename = new_name
- elif re.search(r'/ansible_[^/]+_payload(_[^/]+|\.zip)/__main__\.py$', filename):
- # Rewrite the module path from the remote host to match the controller. Ansible 2.7 and later.
- # AnsiballZ versions using zipimporter will match the `.zip` portion of the regex.
- # AnsiballZ versions not using zipimporter will match the `_[^/]+` portion of the regex.
- module_name = re.sub(r'^.*/ansible_(?P<module>[^/]+)_payload(_[^/]+|\.zip)/__main__\.py$',
- '\\g<module>', filename).rstrip('_')
- if module_name not in modules:
- display.warning('Skipping coverage of unknown module: %s' % module_name)
- return None
- new_name = os.path.abspath(modules[module_name])
- display.info('%s -> %s' % (filename, new_name), verbosity=3)
- filename = new_name
- elif re.search('^(/.*?)?/root/ansible/', filename):
- # Rewrite the path of code running on a remote host or in a docker container as root.
- new_name = re.sub('^(/.*?)?/root/ansible/', root_path, filename)
- display.info('%s -> %s' % (filename, new_name), verbosity=3)
- filename = new_name
- elif integration_temp_path in filename:
- # Rewrite the path of code running from an integration test temporary directory.
- new_name = re.sub(r'^.*' + re.escape(integration_temp_path) + '[^/]+/', root_path, filename)
- display.info('%s -> %s' % (filename, new_name), verbosity=3)
- filename = new_name
-
- return filename