From e0a8af19e0a4434943212594b07de3f4056e5e86 Mon Sep 17 00:00:00 2001 From: Ben Finney Date: Thu, 15 Oct 2015 13:25:57 +1100 Subject: Derive command name from command-line arguments. The command name is determined by the command-line used to invoke the program, and should not be hard-coded in messages. Derive it from the command-line arguments. --- coverage/cmdline.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) (limited to 'coverage') diff --git a/coverage/cmdline.py b/coverage/cmdline.py index 46a87ca..94bb75f 100644 --- a/coverage/cmdline.py +++ b/coverage/cmdline.py @@ -142,6 +142,8 @@ class CoverageOptionParser(optparse.OptionParser, object): """ + program_name = os.path.basename(sys.argv[0]) + def __init__(self, *args, **kwargs): super(CoverageOptionParser, self).__init__( add_help_option=False, *args, **kwargs @@ -228,7 +230,7 @@ class CmdOptionParser(CoverageOptionParser): if usage: usage = "%prog " + usage super(CmdOptionParser, self).__init__( - prog="coverage %s" % action, + prog="%(program_name)s %s" % action, usage=usage, description=description, ) @@ -523,13 +525,17 @@ class CoverageScript(object): assert error or topic or parser if error: print(error) - print("Use 'coverage help' for help.") + print("Use '%(program_name)s help' for help." % { + 'program_name': parser.program_name}) elif parser: print(parser.format_help().strip()) else: + help_params = self.covpkg.__dict__ + help_params.update({ + 'program_name': CoverageOptionParser.program_name}) help_msg = textwrap.dedent(HELP_TOPICS.get(topic, '')).strip() if help_msg: - print(help_msg % self.covpkg.__dict__) + print(help_msg % help_params) else: print("Don't know topic %r" % topic) @@ -682,7 +688,7 @@ HELP_TOPICS = { Coverage.py, version %(__version__)s Measure, collect, and report on code coverage in Python programs. - usage: coverage [options] [args] + usage: %(program_name)s [options] [args] Commands: annotate Annotate source files with execution information. @@ -694,12 +700,12 @@ HELP_TOPICS = { run Run a Python program and measure code execution. xml Create an XML report of coverage results. - Use "coverage help " for detailed help on any command. + Use "%(program_name)s help " for detailed help on any command. For full documentation, see %(__url__)s """, 'minimum_help': """\ - Code coverage for Python. Use 'coverage help' for help. + Code coverage for Python. Use '%(program_name)s help' for help. """, 'version': """\ -- cgit v1.2.1 From 192aae2ee24eeacce209b208916528086ab3a229 Mon Sep 17 00:00:00 2001 From: Ben Finney Date: Sun, 18 Oct 2015 12:58:20 +1100 Subject: Override program name only for ?CmdOptionParser? instances. We only need to append the sub-command, so we shouldn't override the actual command. Get the command from the superclass. --- coverage/cmdline.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/cmdline.py b/coverage/cmdline.py index 94bb75f..308dbac 100644 --- a/coverage/cmdline.py +++ b/coverage/cmdline.py @@ -230,7 +230,6 @@ class CmdOptionParser(CoverageOptionParser): if usage: usage = "%prog " + usage super(CmdOptionParser, self).__init__( - prog="%(program_name)s %s" % action, usage=usage, description=description, ) @@ -244,6 +243,15 @@ class CmdOptionParser(CoverageOptionParser): # results, and they will compare equal to objects. return (other == "" % self.cmd) + def get_prog_name(self): + program_name = super(CmdOptionParser, self).get_prog_name() + + # Include the sub-command for this parser as part of the command. + result = "%(command)s %(subcommand)s" % { + 'command': program_name, 'subcommand': self.cmd} + return result + + GLOBAL_ARGS = [ Opts.debug, Opts.help, -- cgit v1.2.1 From 72a789f5a7692b322947431d22fd71294e0fef15 Mon Sep 17 00:00:00 2001 From: Ben Finney Date: Sun, 18 Oct 2015 12:59:28 +1100 Subject: Move program name attribute to the application class. --- coverage/cmdline.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'coverage') diff --git a/coverage/cmdline.py b/coverage/cmdline.py index 308dbac..97ea596 100644 --- a/coverage/cmdline.py +++ b/coverage/cmdline.py @@ -142,8 +142,6 @@ class CoverageOptionParser(optparse.OptionParser, object): """ - program_name = os.path.basename(sys.argv[0]) - def __init__(self, *args, **kwargs): super(CoverageOptionParser, self).__init__( add_help_option=False, *args, **kwargs @@ -382,6 +380,8 @@ OK, ERR, FAIL_UNDER = 0, 1, 2 class CoverageScript(object): """The command-line interface to coverage.py.""" + program_name = os.path.basename(sys.argv[0]) + def __init__(self, _covpkg=None, _run_python_file=None, _run_python_module=None, _help_fn=None, _path_exists=None): # _covpkg is for dependency injection, so we can test this code. @@ -534,13 +534,13 @@ class CoverageScript(object): if error: print(error) print("Use '%(program_name)s help' for help." % { - 'program_name': parser.program_name}) + 'program_name': self.program_name}) elif parser: print(parser.format_help().strip()) else: help_params = self.covpkg.__dict__ help_params.update({ - 'program_name': CoverageOptionParser.program_name}) + 'program_name': self.program_name}) help_msg = textwrap.dedent(HELP_TOPICS.get(topic, '')).strip() if help_msg: print(help_msg % help_params) -- cgit v1.2.1 From de250a5c71ca08d1b6d90f4bf395aede1eb0985e Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 14 Nov 2015 06:18:31 -0500 Subject: Bump to 4.0.3 --- coverage/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/version.py b/coverage/version.py index 8f44be0..9897319 100644 --- a/coverage/version.py +++ b/coverage/version.py @@ -5,7 +5,7 @@ # This file is exec'ed in setup.py, don't import anything! # Same semantics as sys.version_info. -version_info = (4, 0, 2, 'final', 0) +version_info = (4, 0, 3, 'final', 0) def _make_version(major, minor, micro, releaselevel, serial): -- cgit v1.2.1 From 54ee1a0a528c027f800fce57dae44899c4f6283b Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 14 Nov 2015 09:30:58 -0500 Subject: Another edge case of encoding detection. #443 --- coverage/phystokens.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/phystokens.py b/coverage/phystokens.py index f5bd0bc..b34b1c3 100644 --- a/coverage/phystokens.py +++ b/coverage/phystokens.py @@ -153,7 +153,7 @@ class CachedTokenizer(object): generate_tokens = CachedTokenizer().generate_tokens -COOKIE_RE = re.compile(r"^\s*#.*coding[:=]\s*([-\w.]+)", flags=re.MULTILINE) +COOKIE_RE = re.compile(r"^[ \t]*#.*coding[:=][ \t]*([-\w.]+)", flags=re.MULTILINE) @contract(source='bytes') def _source_encoding_py2(source): -- cgit v1.2.1 From 91adfce1c89926a91a1bbd304973280f320a6841 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 14 Nov 2015 17:04:14 -0500 Subject: Cleanups for style, docs, etc from #438 --- coverage/backward.py | 9 +++++++++ coverage/cmdline.py | 13 +++++-------- 2 files changed, 14 insertions(+), 8 deletions(-) (limited to 'coverage') diff --git a/coverage/backward.py b/coverage/backward.py index 7f571c0..81ca342 100644 --- a/coverage/backward.py +++ b/coverage/backward.py @@ -49,6 +49,15 @@ try: except NameError: range = range +# shlex.quote is new, but there's an undocumented implementation in "pipes", +# who knew!? +try: + from shlex import quote as shlex_quote +except ImportError: + # Useful function, available under a different (undocumented) name + # in Python versions earlier than 3.3. + from pipes import quote as shlex_quote + # A function to iterate listlessly over a dict's items. try: {}.iteritems diff --git a/coverage/cmdline.py b/coverage/cmdline.py index 97ea596..5742e6a 100644 --- a/coverage/cmdline.py +++ b/coverage/cmdline.py @@ -242,12 +242,11 @@ class CmdOptionParser(CoverageOptionParser): return (other == "" % self.cmd) def get_prog_name(self): + """Override of an undocumented function in optparse.OptionParser.""" program_name = super(CmdOptionParser, self).get_prog_name() # Include the sub-command for this parser as part of the command. - result = "%(command)s %(subcommand)s" % { - 'command': program_name, 'subcommand': self.cmd} - return result + return "%(command)s %(subcommand)s" % {'command': program_name, 'subcommand': self.cmd} GLOBAL_ARGS = [ @@ -533,14 +532,12 @@ class CoverageScript(object): assert error or topic or parser if error: print(error) - print("Use '%(program_name)s help' for help." % { - 'program_name': self.program_name}) + print("Use '%s help' for help." % (self.program_name,)) elif parser: print(parser.format_help().strip()) else: - help_params = self.covpkg.__dict__ - help_params.update({ - 'program_name': self.program_name}) + help_params = dict(self.covpkg.__dict__) + help_params['program_name'] = self.program_name help_msg = textwrap.dedent(HELP_TOPICS.get(topic, '')).strip() if help_msg: print(help_msg % help_params) -- cgit v1.2.1 From df4719503e28ebffd3e6120bd4457b7964647518 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 14 Nov 2015 21:54:42 -0500 Subject: Need to account for Windows when figuring the program name. --- coverage/cmdline.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) (limited to 'coverage') diff --git a/coverage/cmdline.py b/coverage/cmdline.py index 5742e6a..221c18d 100644 --- a/coverage/cmdline.py +++ b/coverage/cmdline.py @@ -379,8 +379,6 @@ OK, ERR, FAIL_UNDER = 0, 1, 2 class CoverageScript(object): """The command-line interface to coverage.py.""" - program_name = os.path.basename(sys.argv[0]) - def __init__(self, _covpkg=None, _run_python_file=None, _run_python_module=None, _help_fn=None, _path_exists=None): # _covpkg is for dependency injection, so we can test this code. @@ -399,6 +397,17 @@ class CoverageScript(object): self.coverage = None + self.program_name = os.path.basename(sys.argv[0]) + if env.WINDOWS: + # entry_points={'console_scripts':...} on Windows makes files + # called coverage.exe, coverage3.exe, and coverage-3.5.exe. These + # invoke coverage-script.py, coverage3-script.py, and + # coverage-3.5-script.py. argv[0] is the .py file, but we want to + # get back to the original form. + auto_suffix = "-script.py" + if self.program_name.endswith(auto_suffix): + self.program_name = self.program_name[:-len(auto_suffix)] + def command_line(self, argv): """The bulk of the command line interface to coverage.py. -- cgit v1.2.1 From 95d3ada3cd3164bb086ce1b7cc14ea6a2985d637 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 21 Nov 2015 14:14:03 -0500 Subject: Possible fix for #445 and #420 The line that seems to break #445 is the import of weakref, even if we never use it. Delaying the import until we need it seems to fix #445. --- coverage/ctracer/tracer.c | 29 ++++++++++++++++------------- 1 file changed, 16 insertions(+), 13 deletions(-) (limited to 'coverage') diff --git a/coverage/ctracer/tracer.c b/coverage/ctracer/tracer.c index dba8a11..25036f9 100644 --- a/coverage/ctracer/tracer.c +++ b/coverage/ctracer/tracer.c @@ -64,24 +64,11 @@ static int CTracer_init(CTracer *self, PyObject *args_unused, PyObject *kwds_unused) { int ret = RET_ERROR; - PyObject * weakref = NULL; if (DataStack_init(&self->stats, &self->data_stack) < 0) { goto error; } - weakref = PyImport_ImportModule("weakref"); - if (weakref == NULL) { - goto error; - } - STATS( self->stats.pycalls++; ) - self->data_stack_index = PyObject_CallMethod(weakref, "WeakKeyDictionary", NULL); - Py_XDECREF(weakref); - - if (self->data_stack_index == NULL) { - goto error; - } - self->pdata_stack = &self->data_stack; self->cur_entry.last_line = -1; @@ -212,6 +199,22 @@ CTracer_set_pdata_stack(CTracer *self) if (self->concur_id_func != Py_None) { int the_index = 0; + if (self->data_stack_index == NULL) { + PyObject * weakref = NULL; + + weakref = PyImport_ImportModule("weakref"); + if (weakref == NULL) { + goto error; + } + STATS( self->stats.pycalls++; ) + self->data_stack_index = PyObject_CallMethod(weakref, "WeakKeyDictionary", NULL); + Py_XDECREF(weakref); + + if (self->data_stack_index == NULL) { + goto error; + } + } + STATS( self->stats.pycalls++; ) co_obj = PyObject_CallObject(self->concur_id_func, NULL); if (co_obj == NULL) { -- cgit v1.2.1 From 5a38a3e5ff72fcd6fdb2e8b62345974eb96097da Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sun, 22 Nov 2015 10:03:13 -0500 Subject: Extend import_local_file so I can use a file in another directory. --- coverage/backward.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) (limited to 'coverage') diff --git a/coverage/backward.py b/coverage/backward.py index 81ca342..4fc7221 100644 --- a/coverage/backward.py +++ b/coverage/backward.py @@ -151,11 +151,12 @@ except AttributeError: PYC_MAGIC_NUMBER = imp.get_magic() -def import_local_file(modname): +def import_local_file(modname, modfile=None): """Import a local file as a module. Opens a file in the current directory named `modname`.py, imports it - as `modname`, and returns the module object. + as `modname`, and returns the module object. `modfile` is the file to + import if it isn't in the current directory. """ try: @@ -163,7 +164,8 @@ def import_local_file(modname): except ImportError: SourceFileLoader = None - modfile = modname + '.py' + if modfile is None: + modfile = modname + '.py' if SourceFileLoader: mod = SourceFileLoader(modname, modfile).load_module() else: -- cgit v1.2.1 From 7889f390058da471c253390a20bde71d88ba0956 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sun, 22 Nov 2015 10:58:58 -0500 Subject: XML element properly reflects --source= option. #439 --- coverage/xmlreport.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) (limited to 'coverage') diff --git a/coverage/xmlreport.py b/coverage/xmlreport.py index d7c2f44..3023f4b 100644 --- a/coverage/xmlreport.py +++ b/coverage/xmlreport.py @@ -4,6 +4,7 @@ """XML reporting for coverage.py""" import os +import os.path import sys import time import xml.dom.minidom @@ -38,6 +39,10 @@ class XmlReporter(Reporter): super(XmlReporter, self).__init__(coverage, config) self.source_paths = set() + if config.source: + for src in config.source: + if os.path.exists(src): + self.source_paths.add(files.canonical_filename(src)) self.packages = {} self.xml_out = None self.has_arcs = coverage.data.has_arcs() @@ -141,9 +146,10 @@ class XmlReporter(Reporter): parts = dirname.split("/") dirname = "/".join(parts[:self.config.xml_package_depth]) package_name = dirname.replace("/", ".") - className = fr.relative_filename() + rel_name = fr.relative_filename() - self.source_paths.add(files.relative_directory().rstrip('/')) + if rel_name != fr.filename: + self.source_paths.add(fr.filename[:-len(rel_name)].rstrip('/')) package = self.packages.setdefault(package_name, [{}, 0, 0, 0, 0]) xclass = self.xml_out.createElement("class") @@ -201,7 +207,7 @@ class XmlReporter(Reporter): branch_rate = "0" xclass.setAttribute("branch-rate", branch_rate) - package[0][className] = xclass + package[0][rel_name] = xclass package[1] += class_hits package[2] += class_lines package[3] += class_br_hits -- cgit v1.2.1 From 00603e012d01088f1d648106c6fbffaa9ada1ec7 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sun, 22 Nov 2015 12:50:58 -0500 Subject: Windows windows --- coverage/xmlreport.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/xmlreport.py b/coverage/xmlreport.py index 3023f4b..50a4684 100644 --- a/coverage/xmlreport.py +++ b/coverage/xmlreport.py @@ -149,7 +149,7 @@ class XmlReporter(Reporter): rel_name = fr.relative_filename() if rel_name != fr.filename: - self.source_paths.add(fr.filename[:-len(rel_name)].rstrip('/')) + self.source_paths.add(fr.filename[:-len(rel_name)].rstrip(r"\/")) package = self.packages.setdefault(package_name, [{}, 0, 0, 0, 0]) xclass = self.xml_out.createElement("class") -- cgit v1.2.1 From b2c0c9dd53e1afcfa06376dddb052775caae31c5 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Tue, 24 Nov 2015 06:08:51 -0500 Subject: If STATS is enabled, all STATS() have to be after declarations --- coverage/ctracer/datastack.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/ctracer/datastack.c b/coverage/ctracer/datastack.c index 1d925ad..5a384e6 100644 --- a/coverage/ctracer/datastack.c +++ b/coverage/ctracer/datastack.c @@ -26,10 +26,10 @@ DataStack_grow(Stats *pstats, DataStack *pdata_stack) { pdata_stack->depth++; if (pdata_stack->depth >= pdata_stack->alloc) { - STATS( pstats->stack_reallocs++; ) /* We've outgrown our data_stack array: make it bigger. */ int bigger = pdata_stack->alloc + STACK_DELTA; DataStackEntry * bigger_data_stack = PyMem_Realloc(pdata_stack->stack, bigger * sizeof(DataStackEntry)); + STATS( pstats->stack_reallocs++; ) if (bigger_data_stack == NULL) { PyErr_NoMemory(); pdata_stack->depth--; -- cgit v1.2.1 From dac9d17cda49681154aba71dd4983a3b598171de Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Fri, 27 Nov 2015 06:19:44 -0500 Subject: Scooch the version --- coverage/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/version.py b/coverage/version.py index 9897319..34f5293 100644 --- a/coverage/version.py +++ b/coverage/version.py @@ -5,7 +5,7 @@ # This file is exec'ed in setup.py, don't import anything! # Same semantics as sys.version_info. -version_info = (4, 0, 3, 'final', 0) +version_info = (4, 0, 4, 'final', 0) def _make_version(major, minor, micro, releaselevel, serial): -- cgit v1.2.1 From 7c69bbef24928d18cf6f9fd767d9cffece614b1a Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Fri, 27 Nov 2015 06:50:42 -0500 Subject: Actually, 4.1 --- coverage/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/version.py b/coverage/version.py index 34f5293..dc4c57c 100644 --- a/coverage/version.py +++ b/coverage/version.py @@ -5,7 +5,7 @@ # This file is exec'ed in setup.py, don't import anything! # Same semantics as sys.version_info. -version_info = (4, 0, 4, 'final', 0) +version_info = (4, 1, 0, 'alpha', 0) def _make_version(major, minor, micro, releaselevel, serial): -- cgit v1.2.1 From 5a3f3c5525a97e5da2220f7ba30275c7e464111b Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 28 Nov 2015 07:01:29 -0500 Subject: Clean up PythonParser a bit. --- coverage/parser.py | 86 ++++++++++++++++++++++++++++-------------------------- coverage/python.py | 11 ++----- 2 files changed, 47 insertions(+), 50 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index a5e9623..111826d 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -12,7 +12,7 @@ import tokenize from coverage.backward import range # pylint: disable=redefined-builtin from coverage.backward import bytes_to_ints from coverage.bytecode import ByteCodes, CodeObjects -from coverage.misc import contract, nice_pair, expensive, join_regex +from coverage.misc import contract, nice_pair, join_regex from coverage.misc import CoverageException, NoSource, NotPython from coverage.phystokens import compile_unicode, generate_tokens @@ -42,25 +42,39 @@ class PythonParser(object): self.exclude = exclude - self.show_tokens = False - # The text lines of the parsed code. self.lines = self.text.split('\n') - # The line numbers of excluded lines of code. + # The normalized line numbers of the statements in the code. Exclusions + # are taken into account, and statements are adjusted to their first + # lines. + self.statements = set() + + # The normalized line numbers of the excluded lines in the code, + # adjusted to their first lines. self.excluded = set() - # The line numbers of docstring lines. - self.docstrings = set() + # The raw_* attributes are only used in this class, and in + # lab/parser.py to show how this class is working. + + # The line numbers that start statements, as reported by the line + # number table in the bytecode. + self.raw_statements = set() + + # The raw line numbers of excluded lines of code, as marked by pragmas. + self.raw_excluded = set() # The line numbers of class definitions. - self.classdefs = set() + self.raw_classdefs = set() - # A dict mapping line numbers to (lo,hi) for multi-line statements. - self.multiline = {} + # The line numbers of docstring lines. + self.raw_docstrings = set() + + # Internal detail, used by lab/parser.py. + self.show_tokens = False - # The line numbers that start statements. - self.statement_starts = set() + # A dict mapping line numbers to (lo,hi) for multi-line statements. + self._multiline = {} # Lazily-created ByteParser and arc data. self._byte_parser = None @@ -91,12 +105,12 @@ class PythonParser(object): def _raw_parse(self): """Parse the source to find the interesting facts about its lines. - A handful of member fields are updated. + A handful of attributes are updated. """ # Find lines which match an exclusion pattern. if self.exclude: - self.excluded = self.lines_matching(self.exclude) + self.raw_excluded = self.lines_matching(self.exclude) # Tokenize, to find excluded suites, to find docstrings, and to find # multi-line statements. @@ -122,9 +136,9 @@ class PythonParser(object): # Class definitions look like branches in the byte code, so # we need to exclude them. The simplest way is to note the # lines with the 'class' keyword. - self.classdefs.add(slineno) + self.raw_classdefs.add(slineno) elif toktype == token.OP and ttext == ':': - if not excluding and elineno in self.excluded: + if not excluding and elineno in self.raw_excluded: # Start excluding a suite. We trigger off of the colon # token so that the #pragma comment will be recognized on # the same line as the colon. @@ -135,14 +149,14 @@ class PythonParser(object): # (a trick from trace.py in the stdlib.) This works for # 99.9999% of cases. For the rest (!) see: # http://stackoverflow.com/questions/1769332/x/1769794#1769794 - self.docstrings.update(range(slineno, elineno+1)) + self.raw_docstrings.update(range(slineno, elineno+1)) elif toktype == token.NEWLINE: if first_line is not None and elineno != first_line: # We're at the end of a line, and we've ended on a # different line than the first line of the statement, # so record a multi-line range. for l in range(first_line, elineno+1): - self.multiline[l] = first_line + self._multiline[l] = first_line first_line = None if ttext.strip() and toktype != tokenize.COMMENT: @@ -156,17 +170,17 @@ class PythonParser(object): if excluding and indent <= exclude_indent: excluding = False if excluding: - self.excluded.add(elineno) + self.raw_excluded.add(elineno) prev_toktype = toktype # Find the starts of the executable statements. if not empty: - self.statement_starts.update(self.byte_parser._find_statements()) + self.raw_statements.update(self.byte_parser._find_statements()) def first_line(self, line): """Return the first line number of the statement including `line`.""" - first_line = self.multiline.get(line) + first_line = self._multiline.get(line) if first_line: return first_line else: @@ -187,20 +201,13 @@ class PythonParser(object): def translate_arcs(self, arcs): """Implement `FileReporter.translate_arcs`.""" - return [ - (self.first_line(a), self.first_line(b)) - for (a, b) in arcs - ] + return [(self.first_line(a), self.first_line(b)) for (a, b) in arcs] - @expensive def parse_source(self): """Parse source text to find executable lines, excluded lines, etc. - Return values are 1) a set of executable line numbers, and 2) a set of - excluded line numbers. - - Reported line numbers are normalized to the first line of multi-line - statements. + Sets the .excluded and .statements attributes, normalized to the first + line of multi-line statements. """ try: @@ -216,15 +223,11 @@ class PythonParser(object): ) ) - excluded_lines = self.first_lines(self.excluded) - ignore = set() - ignore.update(excluded_lines) - ignore.update(self.docstrings) - starts = self.statement_starts - ignore - lines = self.first_lines(starts) - lines -= ignore + self.excluded = self.first_lines(self.raw_excluded) - return lines, excluded_lines + ignore = self.excluded | self.raw_docstrings + starts = self.raw_statements - ignore + self.statements = self.first_lines(starts) - ignore def arcs(self): """Get information about the arcs available in the code. @@ -248,22 +251,21 @@ class PythonParser(object): Excluded lines are excluded. """ - excluded_lines = self.first_lines(self.excluded) exit_counts = collections.defaultdict(int) for l1, l2 in self.arcs(): if l1 < 0: # Don't ever report -1 as a line number continue - if l1 in excluded_lines: + if l1 in self.excluded: # Don't report excluded lines as line numbers. continue - if l2 in excluded_lines: + if l2 in self.excluded: # Arcs to excluded lines shouldn't count. continue exit_counts[l1] += 1 # Class definitions have one extra exit, so remove one for each: - for l in self.classdefs: + for l in self.raw_classdefs: # Ensure key is there: class definitions can include excluded lines. if l in exit_counts: exit_counts[l] -= 1 diff --git a/coverage/python.py b/coverage/python.py index 4f58973..5e56382 100644 --- a/coverage/python.py +++ b/coverage/python.py @@ -130,21 +130,16 @@ class PythonFileReporter(FileReporter): filename=self.filename, exclude=self.coverage._exclude_regex('exclude'), ) + self._parser.parse_source() return self._parser - @expensive def lines(self): """Return the line numbers of statements in the file.""" - if self._statements is None: - self._statements, self._excluded = self.parser.parse_source() - return self._statements + return self.parser.statements - @expensive def excluded_lines(self): """Return the line numbers of statements in the file.""" - if self._excluded is None: - self._statements, self._excluded = self.parser.parse_source() - return self._excluded + return self.parser.excluded def translate_lines(self, lines): return self.parser.translate_lines(lines) -- cgit v1.2.1 From ed73d02203e0096a7fcec28506a23cc860c506f5 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 28 Nov 2015 14:45:27 -0500 Subject: Pragmas on decorators apply to the entire function or class. #131 --- coverage/parser.py | 39 +++++++++++++++++++++++++++------------ 1 file changed, 27 insertions(+), 12 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 111826d..7b8a60f 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -117,9 +117,11 @@ class PythonParser(object): indent = 0 exclude_indent = 0 excluding = False + excluding_decorators = False prev_toktype = token.INDENT first_line = None empty = True + first_on_line = True tokgen = generate_tokens(self.text) for toktype, ttext, (slineno, _), (elineno, _), ltext in tokgen: @@ -132,18 +134,29 @@ class PythonParser(object): indent += 1 elif toktype == token.DEDENT: indent -= 1 - elif toktype == token.NAME and ttext == 'class': - # Class definitions look like branches in the byte code, so - # we need to exclude them. The simplest way is to note the - # lines with the 'class' keyword. - self.raw_classdefs.add(slineno) - elif toktype == token.OP and ttext == ':': - if not excluding and elineno in self.raw_excluded: - # Start excluding a suite. We trigger off of the colon - # token so that the #pragma comment will be recognized on - # the same line as the colon. - exclude_indent = indent - excluding = True + elif toktype == token.NAME: + if ttext == 'class': + # Class definitions look like branches in the byte code, so + # we need to exclude them. The simplest way is to note the + # lines with the 'class' keyword. + self.raw_classdefs.add(slineno) + elif toktype == token.OP: + if ttext == ':': + should_exclude = (elineno in self.raw_excluded) or excluding_decorators + if not excluding and should_exclude: + # Start excluding a suite. We trigger off of the colon + # token so that the #pragma comment will be recognized on + # the same line as the colon. + self.raw_excluded.add(elineno) + exclude_indent = indent + excluding = True + excluding_decorators = False + elif ttext == '@' and first_on_line: + # A decorator. + if elineno in self.raw_excluded: + excluding_decorators = True + if excluding_decorators: + self.raw_excluded.add(elineno) elif toktype == token.STRING and prev_toktype == token.INDENT: # Strings that are first on an indented line are docstrings. # (a trick from trace.py in the stdlib.) This works for @@ -158,6 +171,7 @@ class PythonParser(object): for l in range(first_line, elineno+1): self._multiline[l] = first_line first_line = None + first_on_line = True if ttext.strip() and toktype != tokenize.COMMENT: # A non-whitespace token. @@ -171,6 +185,7 @@ class PythonParser(object): excluding = False if excluding: self.raw_excluded.add(elineno) + first_on_line = False prev_toktype = toktype -- cgit v1.2.1 From edf6154e98b6b59a7f30898bc99e19b1bb2ce87d Mon Sep 17 00:00:00 2001 From: Rodrigue Cloutier Date: Thu, 10 Dec 2015 10:22:06 -0500 Subject: Fix Windows support for multiprocessing monkey patch --- coverage/monkey.py | 42 ++++++++++++++++++++++-------------------- 1 file changed, 22 insertions(+), 20 deletions(-) (limited to 'coverage') diff --git a/coverage/monkey.py b/coverage/monkey.py index c4ec68c..b896dbf 100644 --- a/coverage/monkey.py +++ b/coverage/monkey.py @@ -11,6 +11,28 @@ import sys # monkey-patched. PATCHED_MARKER = "_coverage$patched" +if sys.version_info >= (3, 4): + + klass = multiprocessing.process.BaseProcess +else: + klass = multiprocessing.Process + +original_bootstrap = klass._bootstrap + + +class ProcessWithCoverage(klass): + """A replacement for multiprocess.Process that starts coverage.""" + def _bootstrap(self): + """Wrapper around _bootstrap to start coverage.""" + from coverage import Coverage + cov = Coverage(data_suffix=True) + cov.start() + try: + return original_bootstrap(self) + finally: + cov.stop() + cov.save() + def patch_multiprocessing(): """Monkey-patch the multiprocessing module. @@ -22,26 +44,6 @@ def patch_multiprocessing(): if hasattr(multiprocessing, PATCHED_MARKER): return - if sys.version_info >= (3, 4): - klass = multiprocessing.process.BaseProcess - else: - klass = multiprocessing.Process - - original_bootstrap = klass._bootstrap - - class ProcessWithCoverage(klass): - """A replacement for multiprocess.Process that starts coverage.""" - def _bootstrap(self): - """Wrapper around _bootstrap to start coverage.""" - from coverage import Coverage - cov = Coverage(data_suffix=True) - cov.start() - try: - return original_bootstrap(self) - finally: - cov.stop() - cov.save() - if sys.version_info >= (3, 4): klass._bootstrap = ProcessWithCoverage._bootstrap else: -- cgit v1.2.1 From f7ca62f80d91f4f207a0b9f5675aa251be3dfff1 Mon Sep 17 00:00:00 2001 From: Max Linke Date: Mon, 14 Dec 2015 11:27:12 +0100 Subject: Fix error with double occurence of encoding declaration If a file sets the encoding using both vim and emacs style we can't compile the source-code. This commit ensures that always both occurences are removed before we compile the source-code. --- coverage/phystokens.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/phystokens.py b/coverage/phystokens.py index b34b1c3..5aa3402 100644 --- a/coverage/phystokens.py +++ b/coverage/phystokens.py @@ -291,5 +291,5 @@ def compile_unicode(source, filename, mode): @contract(source='unicode', returns='unicode') def neuter_encoding_declaration(source): """Return `source`, with any encoding declaration neutered.""" - source = COOKIE_RE.sub("# (deleted declaration)", source, count=1) + source = COOKIE_RE.sub("# (deleted declaration)", source, count=2) return source -- cgit v1.2.1 From fbe278c8bc9f3355a23dd68ad53a8a0201004f0b Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Thu, 24 Dec 2015 08:49:50 -0500 Subject: WIP: measure branches with ast instead of bytecode --- coverage/parser.py | 230 +++++++++++++++++++++++++++++++++++++++++++++++++++- coverage/python.py | 4 + coverage/results.py | 5 ++ 3 files changed, 237 insertions(+), 2 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 7b8a60f..fb2cf95 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -3,6 +3,7 @@ """Code parsing for coverage.py.""" +import ast import collections import dis import re @@ -260,6 +261,18 @@ class PythonParser(object): self._all_arcs.add((fl1, fl2)) return self._all_arcs + def ast_arcs(self): + aaa = AstArcAnalyzer(self.text) + arcs = aaa.collect_arcs() + + arcs_ = set() + for l1, l2 in arcs: + fl1 = self.first_line(l1) + fl2 = self.first_line(l2) + if fl1 != fl2: + arcs_.add((fl1, fl2)) + return arcs_ + def exit_counts(self): """Get a count of exits from that each line. @@ -288,6 +301,168 @@ class PythonParser(object): return exit_counts +class AstArcAnalyzer(object): + def __init__(self, text): + self.root_node = ast.parse(text) + ast_dump(self.root_node) + + self.arcs = None + # References to the nearest enclosing thing of its kind. + self.function_start = None + self.loop_start = None + + # Break-exits from a loop + self.break_exits = None + + def line_for_node(self, node): + """What is the right line number to use for this node?""" + node_name = node.__class__.__name__ + if node_name == "Assign": + return node.value.lineno + elif node_name == "comprehension": + # TODO: is this how to get the line number for a comprehension? + return node.target.lineno + else: + return node.lineno + + def collect_arcs(self): + self.arcs = set() + self.add_arcs_for_code_objects(self.root_node) + return self.arcs + + def add_arcs(self, node): + """add the arcs for `node`. + + Return a set of line numbers, exits from this node to the next. + """ + node_name = node.__class__.__name__ + #print("Adding arcs for {}".format(node_name)) + + handler = getattr(self, "handle_" + node_name, self.handle_default) + return handler(node) + + def add_body_arcs(self, body, from_line): + prev_lines = set([from_line]) + for body_node in body: + lineno = self.line_for_node(body_node) + for prev_lineno in prev_lines: + self.arcs.add((prev_lineno, lineno)) + prev_lines = self.add_arcs(body_node) + return prev_lines + + def is_constant_expr(self, node): + """Is this a compile-time constant?""" + node_name = node.__class__.__name__ + return node_name in ["NameConstant", "Num"] + + # tests to write: + # TODO: while EXPR: + # TODO: while False: + # TODO: multi-target assignment with computed targets + # TODO: listcomps hidden deep in other expressions + # TODO: listcomps hidden in lists: x = [[i for i in range(10)]] + # TODO: multi-line listcomps + # TODO: nested function definitions + + def handle_Break(self, node): + here = self.line_for_node(node) + # TODO: what if self.break_exits is None? + self.break_exits.add(here) + return set([]) + + def handle_Continue(self, node): + here = self.line_for_node(node) + # TODO: what if self.loop_start is None? + self.arcs.add((here, self.loop_start)) + return set([]) + + def handle_For(self, node): + start = self.line_for_node(node.iter) + loop_state = self.loop_start, self.break_exits + self.loop_start = start + self.break_exits = set() + exits = self.add_body_arcs(node.body, from_line=start) + for exit in exits: + self.arcs.add((exit, start)) + exits = self.break_exits + self.loop_start, self.break_exits = loop_state + if node.orelse: + else_start = self.line_for_node(node.orelse[0]) + self.arcs.add((start, else_start)) + else_exits = self.add_body_arcs(node.orelse, from_line=start) + exits |= else_exits + else: + # no else clause: exit from the for line. + exits.add(start) + return exits + + def handle_FunctionDef(self, node): + start = self.line_for_node(node) + # the body is handled in add_arcs_for_code_objects. + exits = set([start]) + return exits + + def handle_If(self, node): + start = self.line_for_node(node.test) + exits = self.add_body_arcs(node.body, from_line=start) + exits |= self.add_body_arcs(node.orelse, from_line=start) + return exits + + def handle_Module(self, node): + raise Exception("TODO: this shouldn't happen") + + def handle_Return(self, node): + here = self.line_for_node(node) + # TODO: what if self.function_start is None? + self.arcs.add((here, -self.function_start)) + return set([]) + + def handle_While(self, node): + constant_test = self.is_constant_expr(node.test) + start = to_top = self.line_for_node(node.test) + if constant_test: + to_top = self.line_for_node(node.body[0]) + loop_state = self.loop_start, self.break_exits + self.loop_start = start + self.break_exits = set() + exits = self.add_body_arcs(node.body, from_line=start) + for exit in exits: + self.arcs.add((exit, to_top)) + exits = self.break_exits + self.loop_start, self.break_exits = loop_state + # TODO: orelse + return exits + + def handle_default(self, node): + node_name = node.__class__.__name__ + if node_name not in ["Assign", "Assert", "AugAssign", "Expr"]: + print("*** Unhandled: {}".format(node)) + return set([self.line_for_node(node)]) + + def add_arcs_for_code_objects(self, root_node): + for node in ast.walk(root_node): + node_name = node.__class__.__name__ + if node_name == "Module": + start = self.line_for_node(node.body[0]) + exits = self.add_body_arcs(node.body, from_line=-1) + for exit in exits: + self.arcs.add((exit, -start)) + elif node_name == "FunctionDef": + start = self.line_for_node(node) + self.function_start = start + func_exits = self.add_body_arcs(node.body, from_line=-1) + for exit in func_exits: + self.arcs.add((exit, -start)) + self.function_start = None + elif node_name == "comprehension": + start = self.line_for_node(node) + self.arcs.add((-1, start)) + self.arcs.add((start, -start)) + # TODO: guaranteed this won't work for multi-line comps. + + + + ## Opcodes that guide the ByteParser. def _opcode(name): @@ -321,7 +496,7 @@ OPS_CHUNK_BEGIN = _opcode_set('JUMP_ABSOLUTE', 'JUMP_FORWARD') # Opcodes that push a block on the block stack. OPS_PUSH_BLOCK = _opcode_set( - 'SETUP_LOOP', 'SETUP_EXCEPT', 'SETUP_FINALLY', 'SETUP_WITH' + 'SETUP_LOOP', 'SETUP_EXCEPT', 'SETUP_FINALLY', 'SETUP_WITH', 'SETUP_ASYNC_WITH', ) # Block types for exception handling. @@ -330,6 +505,8 @@ OPS_EXCEPT_BLOCKS = _opcode_set('SETUP_EXCEPT', 'SETUP_FINALLY') # Opcodes that pop a block from the block stack. OPS_POP_BLOCK = _opcode_set('POP_BLOCK') +OPS_GET_AITER = _opcode_set('GET_AITER') + # Opcodes that have a jump destination, but aren't really a jump. OPS_NO_JUMP = OPS_PUSH_BLOCK @@ -449,6 +626,8 @@ class ByteParser(object): # is a count of how many ignores are left. ignore_branch = 0 + ignore_pop_block = 0 + # We have to handle the last two bytecodes specially. ult = penult = None @@ -507,7 +686,10 @@ class ByteParser(object): block_stack.append((bc.op, bc.jump_to)) if bc.op in OPS_POP_BLOCK: # The opcode pops a block from the block stack. - block_stack.pop() + if ignore_pop_block: + ignore_pop_block -= 1 + else: + block_stack.pop() if bc.op in OPS_CHUNK_END: # This opcode forces the end of the chunk. if bc.op == OP_BREAK_LOOP: @@ -527,6 +709,15 @@ class ByteParser(object): # branch, so that except's don't count as branches. ignore_branch += 1 + if bc.op in OPS_GET_AITER: + # GET_AITER is weird: First, it seems to generate one more + # POP_BLOCK than SETUP_*, so we have to prepare to ignore one + # of the POP_BLOCKS. Second, we don't have a clear branch to + # the exit of the loop, so we peek into the block stack to find + # it. + ignore_pop_block += 1 + chunk.exits.add(block_stack[-1][1]) + penult = ult ult = bc @@ -686,3 +877,38 @@ class Chunk(object): "v" if self.entrance else "", list(self.exits), ) + + +SKIP_FIELDS = ["ctx"] + +def ast_dump(node, depth=0): + indent = " " * depth + lineno = getattr(node, "lineno", None) + if lineno is not None: + linemark = " @ {0}".format(lineno) + else: + linemark = "" + print("{0}<{1}{2}".format(indent, node.__class__.__name__, linemark)) + + indent += " " + for field_name, value in ast.iter_fields(node): + if field_name in SKIP_FIELDS: + continue + prefix = "{0}{1}:".format(indent, field_name) + if value is None: + print("{0} None".format(prefix)) + elif isinstance(value, (str, int)): + print("{0} {1!r}".format(prefix, value)) + elif isinstance(value, list): + if value == []: + print("{0} []".format(prefix)) + else: + print("{0} [".format(prefix)) + for n in value: + ast_dump(n, depth + 8) + print("{0}]".format(indent)) + else: + print(prefix) + ast_dump(value, depth + 8) + + print("{0}>".format(" " * depth)) diff --git a/coverage/python.py b/coverage/python.py index 5e56382..bf19cb2 100644 --- a/coverage/python.py +++ b/coverage/python.py @@ -159,6 +159,10 @@ class PythonFileReporter(FileReporter): def arcs(self): return self.parser.arcs() + @expensive + def ast_arcs(self): + return self.parser.ast_arcs() + @expensive def exit_counts(self): return self.parser.exit_counts() diff --git a/coverage/results.py b/coverage/results.py index 9627373..b80d504 100644 --- a/coverage/results.py +++ b/coverage/results.py @@ -26,6 +26,7 @@ class Analysis(object): if self.data.has_arcs(): self._arc_possibilities = sorted(self.file_reporter.arcs()) + self._ast_arc_possibilities = sorted(self.file_reporter.ast_arcs()) self.exit_counts = self.file_reporter.exit_counts() self.no_branch = self.file_reporter.no_branch_lines() n_branches = self.total_branches() @@ -36,6 +37,7 @@ class Analysis(object): n_missing_branches = sum(len(v) for k,v in iitems(mba)) else: self._arc_possibilities = [] + self._ast_arc_possibilities = [] self.exit_counts = {} self.no_branch = set() n_branches = n_partial_branches = n_missing_branches = 0 @@ -66,6 +68,9 @@ class Analysis(object): """Returns a sorted list of the arcs in the code.""" return self._arc_possibilities + def ast_arc_possibilities(self): + return self._ast_arc_possibilities + def arcs_executed(self): """Returns a sorted list of the arcs actually executed in the code.""" executed = self.data.arcs(self.filename) or [] -- cgit v1.2.1 From 112f0747f59452f59c282dfab2aec41d185ec7a9 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Thu, 24 Dec 2015 11:11:37 -0500 Subject: Non-ascii characters work again in config regexes. Fixes #455. --- coverage/parser.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 7b8a60f..884d40c 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -9,6 +9,7 @@ import re import token import tokenize +from coverage import env from coverage.backward import range # pylint: disable=redefined-builtin from coverage.backward import bytes_to_ints from coverage.bytecode import ByteCodes, CodeObjects @@ -95,7 +96,10 @@ class PythonParser(object): part of it. """ - regex_c = re.compile(join_regex(regexes)) + combined = join_regex(regexes) + if env.PY2: + combined = combined.decode("utf8") + regex_c = re.compile(combined) matches = set() for i, ltext in enumerate(self.lines, start=1): if regex_c.search(ltext): -- cgit v1.2.1 From aa6dd62bb2c67f5e1d536133503f164895abee03 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Thu, 24 Dec 2015 19:46:00 -0500 Subject: A start on try/except/finally --- coverage/parser.py | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index fb2cf95..4b920f1 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -341,8 +341,9 @@ class AstArcAnalyzer(object): handler = getattr(self, "handle_" + node_name, self.handle_default) return handler(node) - def add_body_arcs(self, body, from_line): - prev_lines = set([from_line]) + def add_body_arcs(self, body, from_line=None, prev_lines=None): + if prev_lines is None: + prev_lines = set([from_line]) for body_node in body: lineno = self.line_for_node(body_node) for prev_lineno in prev_lines: @@ -363,6 +364,7 @@ class AstArcAnalyzer(object): # TODO: listcomps hidden in lists: x = [[i for i in range(10)]] # TODO: multi-line listcomps # TODO: nested function definitions + # TODO: multiple `except` clauses def handle_Break(self, node): here = self.line_for_node(node) @@ -411,12 +413,30 @@ class AstArcAnalyzer(object): def handle_Module(self, node): raise Exception("TODO: this shouldn't happen") + def handle_Raise(self, node): + # `raise` statement jumps away, no exits from here. + return set([]) + def handle_Return(self, node): here = self.line_for_node(node) # TODO: what if self.function_start is None? self.arcs.add((here, -self.function_start)) return set([]) + def handle_Try(self, node): + start = self.line_for_node(node) + exits = self.add_body_arcs(node.body, from_line=start) + handler_exits = set() + for handler_node in node.handlers: + handler_start = self.line_for_node(handler_node) + # TODO: handler_node.name and handler_node.type + handler_exits |= self.add_body_arcs(handler_node.body, from_line=handler_start) + # TODO: node.orelse + # TODO: node.finalbody + if node.finalbody: + exits = self.add_body_arcs(node.finalbody, prev_lines=exits|handler_exits) + return exits + def handle_While(self, node): constant_test = self.is_constant_expr(node.test) start = to_top = self.line_for_node(node.test) -- cgit v1.2.1 From a3af46810ab5297910870ccd4313420420c2c7d6 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Mon, 28 Dec 2015 16:48:05 -0500 Subject: Execution flows from the end of exception handlers to the finally --- coverage/parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 4b920f1..65b1f0f 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -432,9 +432,9 @@ class AstArcAnalyzer(object): # TODO: handler_node.name and handler_node.type handler_exits |= self.add_body_arcs(handler_node.body, from_line=handler_start) # TODO: node.orelse - # TODO: node.finalbody + exits |= handler_exits if node.finalbody: - exits = self.add_body_arcs(node.finalbody, prev_lines=exits|handler_exits) + exits = self.add_body_arcs(node.finalbody, prev_lines=exits) return exits def handle_While(self, node): -- cgit v1.2.1 From ccd54266dfd8e0b5f39212d9ae77674e7b5bea6b Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Thu, 31 Dec 2015 15:39:30 -0500 Subject: Exception tests pass on py3 --- coverage/parser.py | 143 +++++++++++++++++++++++++++++++++++++++++----------- coverage/results.py | 5 -- 2 files changed, 113 insertions(+), 35 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 65b1f0f..ff2d2be 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -11,7 +11,7 @@ import token import tokenize from coverage.backward import range # pylint: disable=redefined-builtin -from coverage.backward import bytes_to_ints +from coverage.backward import bytes_to_ints, string_class from coverage.bytecode import ByteCodes, CodeObjects from coverage.misc import contract, nice_pair, join_regex from coverage.misc import CoverageException, NoSource, NotPython @@ -245,7 +245,7 @@ class PythonParser(object): starts = self.raw_statements - ignore self.statements = self.first_lines(starts) - ignore - def arcs(self): + def old_arcs(self): """Get information about the arcs available in the code. Returns a set of line number pairs. Line numbers have been normalized @@ -261,7 +261,7 @@ class PythonParser(object): self._all_arcs.add((fl1, fl2)) return self._all_arcs - def ast_arcs(self): + def arcs(self): aaa = AstArcAnalyzer(self.text) arcs = aaa.collect_arcs() @@ -301,18 +301,36 @@ class PythonParser(object): return exit_counts +class LoopBlock(object): + def __init__(self, start): + self.start = start + self.break_exits = set() + +class FunctionBlock(object): + def __init__(self, start): + self.start = start + +class TryBlock(object): + def __init__(self, handler_start=None, final_start=None): + self.handler_start = handler_start # TODO: is this used? + self.final_start = final_start # TODO: is this used? + self.break_from = set([]) + self.continue_from = set([]) + self.return_from = set([]) + self.raise_from = set([]) + + class AstArcAnalyzer(object): def __init__(self, text): self.root_node = ast.parse(text) - ast_dump(self.root_node) + #ast_dump(self.root_node) self.arcs = None - # References to the nearest enclosing thing of its kind. - self.function_start = None - self.loop_start = None + self.block_stack = [] - # Break-exits from a loop - self.break_exits = None + def blocks(self): + """Yield the blocks in nearest-to-farthest order.""" + return reversed(self.block_stack) def line_for_node(self, node): """What is the right line number to use for this node?""" @@ -366,28 +384,70 @@ class AstArcAnalyzer(object): # TODO: nested function definitions # TODO: multiple `except` clauses + def process_break_exits(self, exits): + for block in self.blocks(): + if isinstance(block, LoopBlock): + # TODO: what if there is no loop? + block.break_exits.update(exits) + break + elif isinstance(block, TryBlock) and block.final_start: + block.break_from.update(exits) + break + + def process_continue_exits(self, exits): + for block in self.blocks(): + if isinstance(block, LoopBlock): + # TODO: what if there is no loop? + for exit in exits: + self.arcs.add((exit, block.start)) + break + elif isinstance(block, TryBlock) and block.final_start: + block.continue_from.update(exits) + break + + def process_raise_exits(self, exits): + for block in self.blocks(): + if isinstance(block, TryBlock): + if block.handler_start: + for exit in exits: + self.arcs.add((exit, block.handler_start)) + break + elif block.final_start: + block.raise_from.update(exits) + break + elif isinstance(block, FunctionBlock): + for exit in exits: + self.arcs.add((exit, -block.start)) + break + + def process_return_exits(self, exits): + for block in self.blocks(): + if isinstance(block, FunctionBlock): + # TODO: what if there is no enclosing function? + for exit in exits: + self.arcs.add((exit, -block.start)) + break + + ## Handlers + def handle_Break(self, node): here = self.line_for_node(node) - # TODO: what if self.break_exits is None? - self.break_exits.add(here) + self.process_break_exits([here]) return set([]) def handle_Continue(self, node): here = self.line_for_node(node) - # TODO: what if self.loop_start is None? - self.arcs.add((here, self.loop_start)) + self.process_continue_exits([here]) return set([]) def handle_For(self, node): start = self.line_for_node(node.iter) - loop_state = self.loop_start, self.break_exits - self.loop_start = start - self.break_exits = set() + self.block_stack.append(LoopBlock(start=start)) exits = self.add_body_arcs(node.body, from_line=start) for exit in exits: self.arcs.add((exit, start)) - exits = self.break_exits - self.loop_start, self.break_exits = loop_state + my_block = self.block_stack.pop() + exits = my_block.break_exits if node.orelse: else_start = self.line_for_node(node.orelse[0]) self.arcs.add((start, else_start)) @@ -415,15 +475,29 @@ class AstArcAnalyzer(object): def handle_Raise(self, node): # `raise` statement jumps away, no exits from here. + here = self.line_for_node(node) + self.process_raise_exits([here]) return set([]) def handle_Return(self, node): + # TODO: deal with returning through a finally. here = self.line_for_node(node) - # TODO: what if self.function_start is None? - self.arcs.add((here, -self.function_start)) + self.process_return_exits([here]) return set([]) def handle_Try(self, node): + # try/finally is tricky. If there's a finally clause, then we need a + # FinallyBlock to track what flows might go through the finally instead + # of their normal flow. + if node.handlers: + handler_start = self.line_for_node(node.handlers[0]) + else: + handler_start = None + if node.finalbody: + final_start = self.line_for_node(node.finalbody[0]) + else: + final_start = None + self.block_stack.append(TryBlock(handler_start=handler_start, final_start=final_start)) start = self.line_for_node(node) exits = self.add_body_arcs(node.body, from_line=start) handler_exits = set() @@ -434,7 +508,17 @@ class AstArcAnalyzer(object): # TODO: node.orelse exits |= handler_exits if node.finalbody: - exits = self.add_body_arcs(node.finalbody, prev_lines=exits) + final_block = self.block_stack.pop() + final_from = exits | final_block.break_from | final_block.continue_from | final_block.raise_from | final_block.return_from + exits = self.add_body_arcs(node.finalbody, prev_lines=final_from) + if final_block.break_from: + self.process_break_exits(exits) + if final_block.continue_from: + self.process_continue_exits(exits) + if final_block.raise_from: + self.process_raise_exits(exits) + if final_block.return_from: + self.process_return_exits(exits) return exits def handle_While(self, node): @@ -442,20 +526,19 @@ class AstArcAnalyzer(object): start = to_top = self.line_for_node(node.test) if constant_test: to_top = self.line_for_node(node.body[0]) - loop_state = self.loop_start, self.break_exits - self.loop_start = start - self.break_exits = set() + self.block_stack.append(LoopBlock(start=start)) exits = self.add_body_arcs(node.body, from_line=start) for exit in exits: self.arcs.add((exit, to_top)) - exits = self.break_exits - self.loop_start, self.break_exits = loop_state + # TODO: while loop that finishes? + my_block = self.block_stack.pop() + exits = my_block.break_exits # TODO: orelse return exits def handle_default(self, node): node_name = node.__class__.__name__ - if node_name not in ["Assign", "Assert", "AugAssign", "Expr"]: + if node_name not in ["Assign", "Assert", "AugAssign", "Expr", "Pass"]: print("*** Unhandled: {}".format(node)) return set([self.line_for_node(node)]) @@ -469,11 +552,11 @@ class AstArcAnalyzer(object): self.arcs.add((exit, -start)) elif node_name == "FunctionDef": start = self.line_for_node(node) - self.function_start = start + self.block_stack.append(FunctionBlock(start=start)) func_exits = self.add_body_arcs(node.body, from_line=-1) + self.block_stack.pop() for exit in func_exits: self.arcs.add((exit, -start)) - self.function_start = None elif node_name == "comprehension": start = self.line_for_node(node) self.arcs.add((-1, start)) @@ -917,7 +1000,7 @@ def ast_dump(node, depth=0): prefix = "{0}{1}:".format(indent, field_name) if value is None: print("{0} None".format(prefix)) - elif isinstance(value, (str, int)): + elif isinstance(value, (string_class, int, float)): print("{0} {1!r}".format(prefix, value)) elif isinstance(value, list): if value == []: diff --git a/coverage/results.py b/coverage/results.py index b80d504..9627373 100644 --- a/coverage/results.py +++ b/coverage/results.py @@ -26,7 +26,6 @@ class Analysis(object): if self.data.has_arcs(): self._arc_possibilities = sorted(self.file_reporter.arcs()) - self._ast_arc_possibilities = sorted(self.file_reporter.ast_arcs()) self.exit_counts = self.file_reporter.exit_counts() self.no_branch = self.file_reporter.no_branch_lines() n_branches = self.total_branches() @@ -37,7 +36,6 @@ class Analysis(object): n_missing_branches = sum(len(v) for k,v in iitems(mba)) else: self._arc_possibilities = [] - self._ast_arc_possibilities = [] self.exit_counts = {} self.no_branch = set() n_branches = n_partial_branches = n_missing_branches = 0 @@ -68,9 +66,6 @@ class Analysis(object): """Returns a sorted list of the arcs in the code.""" return self._arc_possibilities - def ast_arc_possibilities(self): - return self._ast_arc_possibilities - def arcs_executed(self): """Returns a sorted list of the arcs actually executed in the code.""" executed = self.data.arcs(self.filename) or [] -- cgit v1.2.1 From 3d35e3d6c2939774a50fb8d8afddf03c4235af70 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Thu, 31 Dec 2015 16:20:09 -0500 Subject: Make other comprehensions work on py2 and py3 --- coverage/parser.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index ff2d2be..36fa729 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -10,6 +10,7 @@ import re import token import tokenize +from coverage import env from coverage.backward import range # pylint: disable=redefined-builtin from coverage.backward import bytes_to_ints, string_class from coverage.bytecode import ByteCodes, CodeObjects @@ -323,7 +324,7 @@ class TryBlock(object): class AstArcAnalyzer(object): def __init__(self, text): self.root_node = ast.parse(text) - #ast_dump(self.root_node) + ast_dump(self.root_node) self.arcs = None self.block_stack = [] @@ -542,6 +543,10 @@ class AstArcAnalyzer(object): print("*** Unhandled: {}".format(node)) return set([self.line_for_node(node)]) + CODE_COMPREHENSIONS = set(["GeneratorExp", "DictComp", "SetComp"]) + if env.PY3: + CODE_COMPREHENSIONS.add("ListComp") + def add_arcs_for_code_objects(self, root_node): for node in ast.walk(root_node): node_name = node.__class__.__name__ @@ -557,13 +562,12 @@ class AstArcAnalyzer(object): self.block_stack.pop() for exit in func_exits: self.arcs.add((exit, -start)) - elif node_name == "comprehension": - start = self.line_for_node(node) - self.arcs.add((-1, start)) - self.arcs.add((start, -start)) - # TODO: guaranteed this won't work for multi-line comps. - - + elif node_name in self.CODE_COMPREHENSIONS: + for gen in node.generators: + start = self.line_for_node(gen) + self.arcs.add((-1, start)) + self.arcs.add((start, -start)) + # TODO: guaranteed this won't work for multi-line comps. ## Opcodes that guide the ByteParser. -- cgit v1.2.1 From ab092fe7ef5ba28f26901e37e78a1b4c111a296c Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Thu, 31 Dec 2015 16:39:17 -0500 Subject: Support exception arcs on py2, where the ast still has separate TryExcept and TryFinally nodes --- coverage/parser.py | 44 +++++++++++++++++++++++++++----------------- 1 file changed, 27 insertions(+), 17 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 36fa729..d8b0bee 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -487,38 +487,48 @@ class AstArcAnalyzer(object): return set([]) def handle_Try(self, node): + return self.try_work(node, node.body, node.handlers, node.orelse, node.finalbody) + + def handle_TryExcept(self, node): + return self.try_work(node, node.body, node.handlers, node.orelse, None) + + def handle_TryFinally(self, node): + return self.try_work(node, node.body, None, None, node.finalbody) + + def try_work(self, node, body, handlers, orelse, finalbody): # try/finally is tricky. If there's a finally clause, then we need a # FinallyBlock to track what flows might go through the finally instead # of their normal flow. - if node.handlers: - handler_start = self.line_for_node(node.handlers[0]) + if handlers: + handler_start = self.line_for_node(handlers[0]) else: handler_start = None - if node.finalbody: - final_start = self.line_for_node(node.finalbody[0]) + if finalbody: + final_start = self.line_for_node(finalbody[0]) else: final_start = None self.block_stack.append(TryBlock(handler_start=handler_start, final_start=final_start)) start = self.line_for_node(node) - exits = self.add_body_arcs(node.body, from_line=start) + exits = self.add_body_arcs(body, from_line=start) + try_block = self.block_stack.pop() handler_exits = set() - for handler_node in node.handlers: - handler_start = self.line_for_node(handler_node) - # TODO: handler_node.name and handler_node.type - handler_exits |= self.add_body_arcs(handler_node.body, from_line=handler_start) + if handlers: + for handler_node in handlers: + handler_start = self.line_for_node(handler_node) + # TODO: handler_node.name and handler_node.type + handler_exits |= self.add_body_arcs(handler_node.body, from_line=handler_start) # TODO: node.orelse exits |= handler_exits - if node.finalbody: - final_block = self.block_stack.pop() - final_from = exits | final_block.break_from | final_block.continue_from | final_block.raise_from | final_block.return_from - exits = self.add_body_arcs(node.finalbody, prev_lines=final_from) - if final_block.break_from: + if finalbody: + final_from = exits | try_block.break_from | try_block.continue_from | try_block.raise_from | try_block.return_from + exits = self.add_body_arcs(finalbody, prev_lines=final_from) + if try_block.break_from: self.process_break_exits(exits) - if final_block.continue_from: + if try_block.continue_from: self.process_continue_exits(exits) - if final_block.raise_from: + if try_block.raise_from: self.process_raise_exits(exits) - if final_block.return_from: + if try_block.return_from: self.process_return_exits(exits) return exits -- cgit v1.2.1 From 6946a970d0ba787a5d90ec7e62197d1eea120edd Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Thu, 31 Dec 2015 18:24:36 -0500 Subject: Support classdef and some async keywords --- coverage/parser.py | 58 ++++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 41 insertions(+), 17 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index d8b0bee..d599bef 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -6,6 +6,7 @@ import ast import collections import dis +import os import re import token import tokenize @@ -315,16 +316,17 @@ class TryBlock(object): def __init__(self, handler_start=None, final_start=None): self.handler_start = handler_start # TODO: is this used? self.final_start = final_start # TODO: is this used? - self.break_from = set([]) - self.continue_from = set([]) - self.return_from = set([]) - self.raise_from = set([]) + self.break_from = set() + self.continue_from = set() + self.return_from = set() + self.raise_from = set() class AstArcAnalyzer(object): def __init__(self, text): self.root_node = ast.parse(text) - ast_dump(self.root_node) + if int(os.environ.get("COVERAGE_ASTDUMP", 0)): + ast_dump(self.root_node) self.arcs = None self.block_stack = [] @@ -434,12 +436,17 @@ class AstArcAnalyzer(object): def handle_Break(self, node): here = self.line_for_node(node) self.process_break_exits([here]) - return set([]) + return set() + + def handle_ClassDef(self, node): + start = self.line_for_node(node) + # the body is handled in add_arcs_for_code_objects. + return set([start]) def handle_Continue(self, node): here = self.line_for_node(node) self.process_continue_exits([here]) - return set([]) + return set() def handle_For(self, node): start = self.line_for_node(node.iter) @@ -459,11 +466,14 @@ class AstArcAnalyzer(object): exits.add(start) return exits + handle_AsyncFor = handle_For + def handle_FunctionDef(self, node): start = self.line_for_node(node) # the body is handled in add_arcs_for_code_objects. - exits = set([start]) - return exits + return set([start]) + + handle_AsyncFunctionDef = handle_FunctionDef def handle_If(self, node): start = self.line_for_node(node.test) @@ -478,13 +488,13 @@ class AstArcAnalyzer(object): # `raise` statement jumps away, no exits from here. here = self.line_for_node(node) self.process_raise_exits([here]) - return set([]) + return set() def handle_Return(self, node): # TODO: deal with returning through a finally. here = self.line_for_node(node) self.process_return_exits([here]) - return set([]) + return set() def handle_Try(self, node): return self.try_work(node, node.body, node.handlers, node.orelse, node.finalbody) @@ -541,15 +551,17 @@ class AstArcAnalyzer(object): exits = self.add_body_arcs(node.body, from_line=start) for exit in exits: self.arcs.add((exit, to_top)) - # TODO: while loop that finishes? + exits = set() + if not constant_test: + exits.add(start) my_block = self.block_stack.pop() - exits = my_block.break_exits + exits.update(my_block.break_exits) # TODO: orelse return exits def handle_default(self, node): node_name = node.__class__.__name__ - if node_name not in ["Assign", "Assert", "AugAssign", "Expr", "Pass"]: + if node_name not in ["Assign", "Assert", "AugAssign", "Expr", "Import", "Pass", "Print"]: print("*** Unhandled: {}".format(node)) return set([self.line_for_node(node)]) @@ -565,19 +577,31 @@ class AstArcAnalyzer(object): exits = self.add_body_arcs(node.body, from_line=-1) for exit in exits: self.arcs.add((exit, -start)) - elif node_name == "FunctionDef": + elif node_name in ["FunctionDef", "AsyncFunctionDef"]: start = self.line_for_node(node) self.block_stack.append(FunctionBlock(start=start)) - func_exits = self.add_body_arcs(node.body, from_line=-1) + exits = self.add_body_arcs(node.body, from_line=-1) self.block_stack.pop() - for exit in func_exits: + for exit in exits: + self.arcs.add((exit, -start)) + elif node_name == "ClassDef": + start = self.line_for_node(node) + self.arcs.add((-1, start)) + exits = self.add_body_arcs(node.body, from_line=start) + for exit in exits: self.arcs.add((exit, -start)) elif node_name in self.CODE_COMPREHENSIONS: + # TODO: tests for when generators is more than one? for gen in node.generators: start = self.line_for_node(gen) self.arcs.add((-1, start)) self.arcs.add((start, -start)) # TODO: guaranteed this won't work for multi-line comps. + elif node_name == "Lambda": + start = self.line_for_node(node) + self.arcs.add((-1, start)) + self.arcs.add((start, -start)) + # TODO: test multi-line lambdas ## Opcodes that guide the ByteParser. -- cgit v1.2.1 From fc795063a718138dfc3d6900adb3db0957f130a7 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Fri, 1 Jan 2016 10:53:45 -0500 Subject: Support 'with' --- coverage/parser.py | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index d599bef..a5e12d3 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -264,16 +264,17 @@ class PythonParser(object): return self._all_arcs def arcs(self): - aaa = AstArcAnalyzer(self.text) - arcs = aaa.collect_arcs() + if self._all_arcs is None: + aaa = AstArcAnalyzer(self.text) + arcs = aaa.collect_arcs() - arcs_ = set() - for l1, l2 in arcs: - fl1 = self.first_line(l1) - fl2 = self.first_line(l2) - if fl1 != fl2: - arcs_.add((fl1, fl2)) - return arcs_ + self._all_arcs = set() + for l1, l2 in arcs: + fl1 = self.first_line(l1) + fl2 = self.first_line(l2) + if fl1 != fl2: + self._all_arcs.add((fl1, fl2)) + return self._all_arcs def exit_counts(self): """Get a count of exits from that each line. @@ -559,6 +560,13 @@ class AstArcAnalyzer(object): # TODO: orelse return exits + def handle_With(self, node): + start = self.line_for_node(node) + exits = self.add_body_arcs(node.body, from_line=start) + return exits + + handle_AsyncWith = handle_With + def handle_default(self, node): node_name = node.__class__.__name__ if node_name not in ["Assign", "Assert", "AugAssign", "Expr", "Import", "Pass", "Print"]: -- cgit v1.2.1 From 4b51265510020834db47ac6cf9ad5314ff16419f Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Fri, 1 Jan 2016 12:18:57 -0500 Subject: All test_arcs.py tests pass on py27 and py35 --- coverage/parser.py | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index a5e12d3..b261892 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -339,13 +339,27 @@ class AstArcAnalyzer(object): def line_for_node(self, node): """What is the right line number to use for this node?""" node_name = node.__class__.__name__ - if node_name == "Assign": - return node.value.lineno - elif node_name == "comprehension": - # TODO: is this how to get the line number for a comprehension? - return node.target.lineno - else: - return node.lineno + handler = getattr(self, "line_" + node_name, self.line_default) + return handler(node) + + def line_Assign(self, node): + return self.line_for_node(node.value) + + def line_Dict(self, node): + # Python 3.5 changed how dict literals are made. + if env.PYVERSION >= (3, 5): + return node.keys[0].lineno + return node.lineno + + def line_List(self, node): + return self.line_for_node(node.elts[0]) + + def line_comprehension(self, node): + # TODO: is this how to get the line number for a comprehension? + return node.target.lineno + + def line_default(self, node): + return node.lineno def collect_arcs(self): self.arcs = set() @@ -358,8 +372,6 @@ class AstArcAnalyzer(object): Return a set of line numbers, exits from this node to the next. """ node_name = node.__class__.__name__ - #print("Adding arcs for {}".format(node_name)) - handler = getattr(self, "handle_" + node_name, self.handle_default) return handler(node) -- cgit v1.2.1 From 062fbe6d70dd451ae6ada9fde7b596e4aa8b5c26 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Fri, 1 Jan 2016 13:38:06 -0500 Subject: test_arcs now passes for all Python versions --- coverage/parser.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index b261892..3348092 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -388,7 +388,12 @@ class AstArcAnalyzer(object): def is_constant_expr(self, node): """Is this a compile-time constant?""" node_name = node.__class__.__name__ - return node_name in ["NameConstant", "Num"] + if node_name in ["NameConstant", "Num"]: + return True + elif node_name == "Name": + if env.PY3 and node.id in ["True", "False", "None"]: + return True + return False # tests to write: # TODO: while EXPR: -- cgit v1.2.1 From 529ef9857c37ab12ad10b35aa67e8fccb3d707d4 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Fri, 1 Jan 2016 16:10:50 -0500 Subject: check_coverage now assumes empty missing and unpredicted, and uses branch always --- coverage/parser.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 3348092..2396fb8 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -347,7 +347,7 @@ class AstArcAnalyzer(object): def line_Dict(self, node): # Python 3.5 changed how dict literals are made. - if env.PYVERSION >= (3, 5): + if env.PYVERSION >= (3, 5) and node.keys: return node.keys[0].lineno return node.lineno @@ -587,7 +587,7 @@ class AstArcAnalyzer(object): def handle_default(self, node): node_name = node.__class__.__name__ if node_name not in ["Assign", "Assert", "AugAssign", "Expr", "Import", "Pass", "Print"]: - print("*** Unhandled: {}".format(node)) + print("*** Unhandled: {0}".format(node)) return set([self.line_for_node(node)]) CODE_COMPREHENSIONS = set(["GeneratorExp", "DictComp", "SetComp"]) @@ -1049,6 +1049,10 @@ SKIP_FIELDS = ["ctx"] def ast_dump(node, depth=0): indent = " " * depth + if not isinstance(node, ast.AST): + print("{0}<{1} {2!r}>".format(indent, node.__class__.__name__, node)) + return + lineno = getattr(node, "lineno", None) if lineno is not None: linemark = " @ {0}".format(lineno) -- cgit v1.2.1 From baf18bed45cbd943f379f9ca4e7747fb607552c8 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 2 Jan 2016 10:18:04 -0500 Subject: Handle yield-from and await. All tests pass --- coverage/parser.py | 88 +++++++++++++++++++++++++++++++++--------------- coverage/test_helpers.py | 12 +++---- 2 files changed, 66 insertions(+), 34 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 2396fb8..0462802 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -327,11 +327,17 @@ class AstArcAnalyzer(object): def __init__(self, text): self.root_node = ast.parse(text) if int(os.environ.get("COVERAGE_ASTDUMP", 0)): + # Dump the AST so that failing tests have helpful output. ast_dump(self.root_node) self.arcs = None self.block_stack = [] + def collect_arcs(self): + self.arcs = set() + self.add_arcs_for_code_objects(self.root_node) + return self.arcs + def blocks(self): """Yield the blocks in nearest-to-farthest order.""" return reversed(self.block_stack) @@ -361,16 +367,19 @@ class AstArcAnalyzer(object): def line_default(self, node): return node.lineno - def collect_arcs(self): - self.arcs = set() - self.add_arcs_for_code_objects(self.root_node) - return self.arcs - def add_arcs(self, node): - """add the arcs for `node`. + """Add the arcs for `node`. Return a set of line numbers, exits from this node to the next. """ + # Yield-froms and awaits can appear anywhere. + # TODO: this is probably over-doing it, and too expensive. Can we + # instrument the ast walking to see how many nodes we are revisiting? + if isinstance(node, ast.stmt): + for name, value in ast.iter_fields(node): + if isinstance(value, ast.expr) and self.contains_return_expression(value): + self.process_return_exits([self.line_for_node(node)]) + break node_name = node.__class__.__name__ handler = getattr(self, "handle_" + node_name, self.handle_default) return handler(node) @@ -404,6 +413,7 @@ class AstArcAnalyzer(object): # TODO: multi-line listcomps # TODO: nested function definitions # TODO: multiple `except` clauses + # TODO: return->finally def process_break_exits(self, exits): for block in self.blocks(): @@ -443,6 +453,7 @@ class AstArcAnalyzer(object): def process_return_exits(self, exits): for block in self.blocks(): + # TODO: need a check here for TryBlock if isinstance(block, FunctionBlock): # TODO: what if there is no enclosing function? for exit in exits: @@ -587,6 +598,7 @@ class AstArcAnalyzer(object): def handle_default(self, node): node_name = node.__class__.__name__ if node_name not in ["Assign", "Assert", "AugAssign", "Expr", "Import", "Pass", "Print"]: + # TODO: put 1/0 here to find unhandled nodes. print("*** Unhandled: {0}".format(node)) return set([self.line_for_node(node)]) @@ -628,6 +640,14 @@ class AstArcAnalyzer(object): self.arcs.add((start, -start)) # TODO: test multi-line lambdas + def contains_return_expression(self, node): + """Is there a yield-from or await in `node` someplace?""" + for child in ast.walk(node): + if child.__class__.__name__ in ["YieldFrom", "Await"]: + return True + + return False + ## Opcodes that guide the ByteParser. @@ -1045,7 +1065,13 @@ class Chunk(object): ) -SKIP_FIELDS = ["ctx"] +SKIP_DUMP_FIELDS = ["ctx"] + +def is_simple_value(value): + return ( + value in [None, [], (), {}, set()] or + isinstance(value, (string_class, int, float)) + ) def ast_dump(node, depth=0): indent = " " * depth @@ -1055,30 +1081,36 @@ def ast_dump(node, depth=0): lineno = getattr(node, "lineno", None) if lineno is not None: - linemark = " @ {0}".format(lineno) + linemark = " @ {0}".format(node.lineno) else: linemark = "" - print("{0}<{1}{2}".format(indent, node.__class__.__name__, linemark)) - - indent += " " - for field_name, value in ast.iter_fields(node): - if field_name in SKIP_FIELDS: - continue - prefix = "{0}{1}:".format(indent, field_name) - if value is None: - print("{0} None".format(prefix)) - elif isinstance(value, (string_class, int, float)): - print("{0} {1!r}".format(prefix, value)) - elif isinstance(value, list): - if value == []: - print("{0} []".format(prefix)) - else: + head = "{0}<{1}{2}".format(indent, node.__class__.__name__, linemark) + + named_fields = [ + (name, value) + for name, value in ast.iter_fields(node) + if name not in SKIP_DUMP_FIELDS + ] + if not named_fields: + print("{0}>".format(head)) + elif len(named_fields) == 1 and is_simple_value(named_fields[0][1]): + field_name, value = named_fields[0] + print("{0} {1}: {2!r}>".format(head, field_name, value)) + else: + print(head) + print("{0}# mro: {1}".format(indent, ", ".join(c.__name__ for c in node.__class__.__mro__[1:]))) + next_indent = indent + " " + for field_name, value in named_fields: + prefix = "{0}{1}:".format(next_indent, field_name) + if is_simple_value(value): + print("{0} {1!r}".format(prefix, value)) + elif isinstance(value, list): print("{0} [".format(prefix)) for n in value: ast_dump(n, depth + 8) - print("{0}]".format(indent)) - else: - print(prefix) - ast_dump(value, depth + 8) + print("{0}]".format(next_indent)) + else: + print(prefix) + ast_dump(value, depth + 8) - print("{0}>".format(" " * depth)) + print("{0}>".format(indent)) diff --git a/coverage/test_helpers.py b/coverage/test_helpers.py index 50cc329..092daa0 100644 --- a/coverage/test_helpers.py +++ b/coverage/test_helpers.py @@ -162,20 +162,20 @@ class StdStreamCapturingMixin(TestCase): # nose keeps stdout from littering the screen, so we can safely Tee it, # but it doesn't capture stderr, so we don't want to Tee stderr to the # real stderr, since it will interfere with our nice field of dots. - self.old_stdout = sys.stdout + old_stdout = sys.stdout self.captured_stdout = StringIO() sys.stdout = Tee(sys.stdout, self.captured_stdout) - self.old_stderr = sys.stderr + old_stderr = sys.stderr self.captured_stderr = StringIO() sys.stderr = self.captured_stderr - self.addCleanup(self.cleanup_std_streams) + self.addCleanup(self.cleanup_std_streams, old_stdout, old_stderr) - def cleanup_std_streams(self): + def cleanup_std_streams(self, old_stdout, old_stderr): """Restore stdout and stderr.""" - sys.stdout = self.old_stdout - sys.stderr = self.old_stderr + sys.stdout = old_stdout + sys.stderr = old_stderr def stdout(self): """Return the data written to stdout during the test.""" -- cgit v1.2.1 From 8a23aafc03000fd9e500b1898b7c211e605e2176 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 2 Jan 2016 11:09:10 -0500 Subject: Deal with a few more cases the test suite didn't turn up --- coverage/parser.py | 26 ++++++++++++++++++++++---- 1 file changed, 22 insertions(+), 4 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 0462802..fc631fc 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -355,10 +355,23 @@ class AstArcAnalyzer(object): # Python 3.5 changed how dict literals are made. if env.PYVERSION >= (3, 5) and node.keys: return node.keys[0].lineno - return node.lineno + else: + return node.lineno def line_List(self, node): - return self.line_for_node(node.elts[0]) + if node.elts: + return self.line_for_node(node.elts[0]) + else: + # TODO: test case for this branch: x = [] + return node.lineno + + def line_Module(self, node): + if node.body: + return self.line_for_node(node.body[0]) + else: + # Modules have no line number, they always start at 1. + # TODO: test case for empty module. + return 1 def line_comprehension(self, node): # TODO: is this how to get the line number for a comprehension? @@ -595,9 +608,14 @@ class AstArcAnalyzer(object): handle_AsyncWith = handle_With + OK_TO_DEFAULT = set([ + "Assign", "Assert", "AugAssign", "Delete", "Exec", "Expr", "Global", + "Import", "ImportFrom", "Pass", "Print", + ]) + def handle_default(self, node): node_name = node.__class__.__name__ - if node_name not in ["Assign", "Assert", "AugAssign", "Expr", "Import", "Pass", "Print"]: + if node_name not in self.OK_TO_DEFAULT: # TODO: put 1/0 here to find unhandled nodes. print("*** Unhandled: {0}".format(node)) return set([self.line_for_node(node)]) @@ -610,7 +628,7 @@ class AstArcAnalyzer(object): for node in ast.walk(root_node): node_name = node.__class__.__name__ if node_name == "Module": - start = self.line_for_node(node.body[0]) + start = self.line_for_node(node) exits = self.add_body_arcs(node.body, from_line=-1) for exit in exits: self.arcs.add((exit, -start)) -- cgit v1.2.1 From f50c3af027401c326f3d107288705e9743692f11 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 2 Jan 2016 11:19:45 -0500 Subject: Coding declarations are a pain in the ass --- coverage/parser.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index fc631fc..262a78e 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -17,7 +17,7 @@ from coverage.backward import bytes_to_ints, string_class from coverage.bytecode import ByteCodes, CodeObjects from coverage.misc import contract, nice_pair, join_regex from coverage.misc import CoverageException, NoSource, NotPython -from coverage.phystokens import compile_unicode, generate_tokens +from coverage.phystokens import compile_unicode, generate_tokens, neuter_encoding_declaration class PythonParser(object): @@ -324,8 +324,9 @@ class TryBlock(object): class AstArcAnalyzer(object): + @contract(text='unicode') def __init__(self, text): - self.root_node = ast.parse(text) + self.root_node = ast.parse(neuter_encoding_declaration(text)) if int(os.environ.get("COVERAGE_ASTDUMP", 0)): # Dump the AST so that failing tests have helpful output. ast_dump(self.root_node) -- cgit v1.2.1 From cb6157226f382622598dd29ab99bd61a176fa666 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 2 Jan 2016 14:30:28 -0500 Subject: Better exception support, include except-except arcs, and except-else --- coverage/parser.py | 81 ++++++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 60 insertions(+), 21 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 262a78e..44cb155 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -540,41 +540,56 @@ class AstArcAnalyzer(object): return set() def handle_Try(self, node): - return self.try_work(node, node.body, node.handlers, node.orelse, node.finalbody) - - def handle_TryExcept(self, node): - return self.try_work(node, node.body, node.handlers, node.orelse, None) - - def handle_TryFinally(self, node): - return self.try_work(node, node.body, None, None, node.finalbody) - - def try_work(self, node, body, handlers, orelse, finalbody): # try/finally is tricky. If there's a finally clause, then we need a # FinallyBlock to track what flows might go through the finally instead # of their normal flow. - if handlers: - handler_start = self.line_for_node(handlers[0]) + if node.handlers: + handler_start = self.line_for_node(node.handlers[0]) else: handler_start = None - if finalbody: - final_start = self.line_for_node(finalbody[0]) + + if node.finalbody: + final_start = self.line_for_node(node.finalbody[0]) else: final_start = None + self.block_stack.append(TryBlock(handler_start=handler_start, final_start=final_start)) + start = self.line_for_node(node) - exits = self.add_body_arcs(body, from_line=start) + exits = self.add_body_arcs(node.body, from_line=start) + try_block = self.block_stack.pop() handler_exits = set() - if handlers: - for handler_node in handlers: + last_handler_start = None + if node.handlers: + for handler_node in node.handlers: handler_start = self.line_for_node(handler_node) - # TODO: handler_node.name and handler_node.type + if last_handler_start is not None: + self.arcs.add((last_handler_start, handler_start)) + last_handler_start = handler_start handler_exits |= self.add_body_arcs(handler_node.body, from_line=handler_start) - # TODO: node.orelse + if handler_node.type is None: + # "except:" doesn't jump to subsequent handlers, or + # "finally:". + last_handler_start = None + # TODO: should we break here? Handlers after "except:" + # won't be run. Should coverage know that code can't be + # run, or should it flag it as not run? + + if node.orelse: + exits = self.add_body_arcs(node.orelse, prev_lines=exits) + exits |= handler_exits - if finalbody: - final_from = exits | try_block.break_from | try_block.continue_from | try_block.raise_from | try_block.return_from - exits = self.add_body_arcs(finalbody, prev_lines=final_from) + if node.finalbody: + final_from = exits | try_block.break_from | try_block.continue_from | try_block.return_from + if node.handlers and last_handler_start is not None: + # If there was an "except X:" clause, then a "raise" in the + # body goes to the "except X:" before the "finally", but the + # "except" go to the finally. + final_from.add(last_handler_start) + else: + final_from |= try_block.raise_from + exits = self.add_body_arcs(node.finalbody, prev_lines=final_from) if try_block.break_from: self.process_break_exits(exits) if try_block.continue_from: @@ -585,6 +600,30 @@ class AstArcAnalyzer(object): self.process_return_exits(exits) return exits + def handle_TryExcept(self, node): + # Python 2.7 uses separate TryExcept and TryFinally nodes. If we get + # TryExcept, it means there was no finally, so fake it, and treat as + # a general Try node. + node.finalbody = [] + return self.handle_Try(node) + + def handle_TryFinally(self, node): + # Python 2.7 uses separate TryExcept and TryFinally nodes. If we get + # TryFinally, see if there's a TryExcept nested inside. If so, merge + # them. Otherwise, fake fields to complete a Try node. + node.handlers = [] + node.orelse = [] + + if node.body: + first = node.body[0] + if first.__class__.__name__ == "TryExcept" and node.lineno == first.lineno: + assert len(node.body) == 1 + node.body = first.body + node.handlers = first.handlers + node.orelse = first.orelse + + return self.handle_Try(node) + def handle_While(self, node): constant_test = self.is_constant_expr(node.test) start = to_top = self.line_for_node(node.test) -- cgit v1.2.1 From 255afeb3314da3ad388ac7a3330dd3f94eae0d99 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 2 Jan 2016 16:14:35 -0500 Subject: Support returning through a finally --- coverage/parser.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 44cb155..d85f0b5 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -426,8 +426,6 @@ class AstArcAnalyzer(object): # TODO: listcomps hidden in lists: x = [[i for i in range(10)]] # TODO: multi-line listcomps # TODO: nested function definitions - # TODO: multiple `except` clauses - # TODO: return->finally def process_break_exits(self, exits): for block in self.blocks(): @@ -467,8 +465,10 @@ class AstArcAnalyzer(object): def process_return_exits(self, exits): for block in self.blocks(): - # TODO: need a check here for TryBlock - if isinstance(block, FunctionBlock): + if isinstance(block, TryBlock) and block.final_start: + block.return_from.update(exits) + break + elif isinstance(block, FunctionBlock): # TODO: what if there is no enclosing function? for exit in exits: self.arcs.add((exit, -block.start)) -- cgit v1.2.1 From 58578ed08fc32ae2b9c9f287ad017b64862b7915 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sun, 3 Jan 2016 11:08:06 -0500 Subject: Fix arcs for function and class decorators --- coverage/parser.py | 43 +++++++++++++++++++++++++++++++++---------- 1 file changed, 33 insertions(+), 10 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index c11bc22..d3fbad8 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -67,8 +67,9 @@ class PythonParser(object): # The raw line numbers of excluded lines of code, as marked by pragmas. self.raw_excluded = set() - # The line numbers of class definitions. + # The line numbers of class and function definitions. self.raw_classdefs = set() + self.raw_funcdefs = set() # The line numbers of docstring lines. self.raw_docstrings = set() @@ -146,6 +147,8 @@ class PythonParser(object): # we need to exclude them. The simplest way is to note the # lines with the 'class' keyword. self.raw_classdefs.add(slineno) + elif ttext == 'def': + self.raw_funcdefs.add(slineno) elif toktype == token.OP: if ttext == ':': should_exclude = (elineno in self.raw_excluded) or excluding_decorators @@ -268,7 +271,7 @@ class PythonParser(object): def arcs(self): if self._all_arcs is None: - aaa = AstArcAnalyzer(self.text) + aaa = AstArcAnalyzer(self.text, self.raw_funcdefs, self.raw_classdefs) arcs = aaa.collect_arcs() self._all_arcs = set() @@ -327,9 +330,12 @@ class TryBlock(object): class AstArcAnalyzer(object): - @contract(text='unicode') - def __init__(self, text): + @contract(text='unicode', funcdefs=set, classdefs=set) + def __init__(self, text, funcdefs, classdefs): self.root_node = ast.parse(neuter_encoding_declaration(text)) + self.funcdefs = funcdefs + self.classdefs = classdefs + if int(os.environ.get("COVERAGE_ASTDUMP", 0)): # Dump the AST so that failing tests have helpful output. ast_dump(self.root_node) @@ -485,9 +491,25 @@ class AstArcAnalyzer(object): return set() def handle_ClassDef(self, node): - start = self.line_for_node(node) + return self.do_decorated(node, self.classdefs) + + def do_decorated(self, node, defs): + first = last = self.line_for_node(node) + if node.decorator_list: + for dec_node in node.decorator_list: + dec_start = self.line_for_node(dec_node) + if dec_start != last: + self.arcs.add((last, dec_start)) + last = dec_start + # The definition line may have been missed, but we should have it in + # `defs`. + body_start = self.line_for_node(node.body[0]) + for lineno in range(last+1, body_start): + if lineno in defs: + self.arcs.add((last, lineno)) + last = lineno # the body is handled in add_arcs_for_code_objects. - return set([start]) + return set([last]) def handle_Continue(self, node): here = self.line_for_node(node) @@ -515,9 +537,7 @@ class AstArcAnalyzer(object): handle_AsyncFor = handle_For def handle_FunctionDef(self, node): - start = self.line_for_node(node) - # the body is handled in add_arcs_for_code_objects. - return set([start]) + return self.do_decorated(node, self.funcdefs) handle_AsyncFunctionDef = handle_FunctionDef @@ -1159,7 +1179,10 @@ def ast_dump(node, depth=0): print("{0} {1}: {2!r}>".format(head, field_name, value)) else: print(head) - print("{0}# mro: {1}".format(indent, ", ".join(c.__name__ for c in node.__class__.__mro__[1:]))) + if 0: + print("{0}# mro: {1}".format( + indent, ", ".join(c.__name__ for c in node.__class__.__mro__[1:]), + )) next_indent = indent + " " for field_name, value in named_fields: prefix = "{0}{1}:".format(next_indent, field_name) -- cgit v1.2.1 From b15c34008760903049f673ce87addd5f6a27022d Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sun, 3 Jan 2016 12:31:58 -0500 Subject: Clean up some lint --- coverage/parser.py | 56 ++++++++++++++++++++++++++++-------------------------- 1 file changed, 29 insertions(+), 27 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index d3fbad8..39e23d2 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -399,7 +399,7 @@ class AstArcAnalyzer(object): # TODO: this is probably over-doing it, and too expensive. Can we # instrument the ast walking to see how many nodes we are revisiting? if isinstance(node, ast.stmt): - for name, value in ast.iter_fields(node): + for _, value in ast.iter_fields(node): if isinstance(value, ast.expr) and self.contains_return_expression(value): self.process_return_exits([self.line_for_node(node)]) break @@ -450,8 +450,8 @@ class AstArcAnalyzer(object): for block in self.blocks(): if isinstance(block, LoopBlock): # TODO: what if there is no loop? - for exit in exits: - self.arcs.add((exit, block.start)) + for xit in exits: + self.arcs.add((xit, block.start)) break elif isinstance(block, TryBlock) and block.final_start: block.continue_from.update(exits) @@ -461,15 +461,15 @@ class AstArcAnalyzer(object): for block in self.blocks(): if isinstance(block, TryBlock): if block.handler_start: - for exit in exits: - self.arcs.add((exit, block.handler_start)) + for xit in exits: + self.arcs.add((xit, block.handler_start)) break elif block.final_start: block.raise_from.update(exits) break elif isinstance(block, FunctionBlock): - for exit in exits: - self.arcs.add((exit, -block.start)) + for xit in exits: + self.arcs.add((xit, -block.start)) break def process_return_exits(self, exits): @@ -479,8 +479,8 @@ class AstArcAnalyzer(object): break elif isinstance(block, FunctionBlock): # TODO: what if there is no enclosing function? - for exit in exits: - self.arcs.add((exit, -block.start)) + for xit in exits: + self.arcs.add((xit, -block.start)) break ## Handlers @@ -491,10 +491,10 @@ class AstArcAnalyzer(object): return set() def handle_ClassDef(self, node): - return self.do_decorated(node, self.classdefs) + return self.process_decorated(node, self.classdefs) - def do_decorated(self, node, defs): - first = last = self.line_for_node(node) + def process_decorated(self, node, defs): + last = self.line_for_node(node) if node.decorator_list: for dec_node in node.decorator_list: dec_start = self.line_for_node(dec_node) @@ -520,8 +520,8 @@ class AstArcAnalyzer(object): start = self.line_for_node(node.iter) self.block_stack.append(LoopBlock(start=start)) exits = self.add_body_arcs(node.body, from_line=start) - for exit in exits: - self.arcs.add((exit, start)) + for xit in exits: + self.arcs.add((xit, start)) my_block = self.block_stack.pop() exits = my_block.break_exits if node.orelse: @@ -537,7 +537,7 @@ class AstArcAnalyzer(object): handle_AsyncFor = handle_For def handle_FunctionDef(self, node): - return self.do_decorated(node, self.funcdefs) + return self.process_decorated(node, self.funcdefs) handle_AsyncFunctionDef = handle_FunctionDef @@ -547,9 +547,6 @@ class AstArcAnalyzer(object): exits |= self.add_body_arcs(node.orelse, from_line=start) return exits - def handle_Module(self, node): - raise Exception("TODO: this shouldn't happen") - def handle_Raise(self, node): # `raise` statement jumps away, no exits from here. here = self.line_for_node(node) @@ -604,7 +601,12 @@ class AstArcAnalyzer(object): exits |= handler_exits if node.finalbody: - final_from = exits | try_block.break_from | try_block.continue_from | try_block.return_from + final_from = ( # You can get to the `finally` clause from: + exits | # the exits of the body or `else` clause, + try_block.break_from | # or a `break` in the body, + try_block.continue_from | # or a `continue` in the body, + try_block.return_from # or a `return` in the body. + ) if node.handlers and last_handler_start is not None: # If there was an "except X:" clause, then a "raise" in the # body goes to the "except X:" before the "finally", but the @@ -654,8 +656,8 @@ class AstArcAnalyzer(object): to_top = self.line_for_node(node.body[0]) self.block_stack.append(LoopBlock(start=start)) exits = self.add_body_arcs(node.body, from_line=start) - for exit in exits: - self.arcs.add((exit, to_top)) + for xit in exits: + self.arcs.add((xit, to_top)) exits = set() if not constant_test: exits.add(start) @@ -693,21 +695,21 @@ class AstArcAnalyzer(object): if node_name == "Module": start = self.line_for_node(node) exits = self.add_body_arcs(node.body, from_line=-1) - for exit in exits: - self.arcs.add((exit, -start)) + for xit in exits: + self.arcs.add((xit, -start)) elif node_name in ["FunctionDef", "AsyncFunctionDef"]: start = self.line_for_node(node) self.block_stack.append(FunctionBlock(start=start)) exits = self.add_body_arcs(node.body, from_line=-1) self.block_stack.pop() - for exit in exits: - self.arcs.add((exit, -start)) + for xit in exits: + self.arcs.add((xit, -start)) elif node_name == "ClassDef": start = self.line_for_node(node) self.arcs.add((-1, start)) exits = self.add_body_arcs(node.body, from_line=start) - for exit in exits: - self.arcs.add((exit, -start)) + for xit in exits: + self.arcs.add((xit, -start)) elif node_name in self.CODE_COMPREHENSIONS: # TODO: tests for when generators is more than one? for gen in node.generators: -- cgit v1.2.1 From 5c131d28792a41113539f9fe1397738b7958bbfd Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Mon, 4 Jan 2016 07:18:08 -0500 Subject: Add a delayed_assertions context manager --- coverage/test_helpers.py | 49 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) (limited to 'coverage') diff --git a/coverage/test_helpers.py b/coverage/test_helpers.py index 092daa0..84e2f1c 100644 --- a/coverage/test_helpers.py +++ b/coverage/test_helpers.py @@ -186,6 +186,55 @@ class StdStreamCapturingMixin(TestCase): return self.captured_stderr.getvalue() +class DelayedAssertionMixin(TestCase): + """A test case mixin that provides a `delayed_assertions` context manager. + + Use it like this:: + + with self.delayed_assertions(): + self.assertEqual(x, y) + self.assertEqual(z, w) + + All of the assertions will run. The failures will be displayed at the end + of the with-statement. + + NOTE: only works with some assert methods, I'm not sure which! + + """ + def __init__(self, *args, **kwargs): + super(DelayedAssertionMixin, self).__init__(*args, **kwargs) + # This mixin only works with assert methods that call `self.fail`. In + # Python 2.7, `assertEqual` didn't, but we can do what Python 3 does, + # and use `assertMultiLineEqual` for comparing strings. + self.addTypeEqualityFunc(str, 'assertMultiLineEqual') + self._delayed_assertions = None + + @contextlib.contextmanager + def delayed_assertions(self): + """The context manager: assert that we didn't collect any assertions.""" + self._delayed_assertions = [] + old_fail = self.fail + self.fail = self._delayed_fail + try: + yield + finally: + self.fail = old_fail + if self._delayed_assertions: + if len(self._delayed_assertions) == 1: + self.fail(self._delayed_assertions[0]) + else: + self.fail( + "{} failed assertions:\n{}".format( + len(self._delayed_assertions), + "\n".join(self._delayed_assertions), + ) + ) + + def _delayed_fail(self, msg=None): + """The stand-in for TestCase.fail during delayed_assertions.""" + self._delayed_assertions.append(msg) + + class TempDirMixin(SysPathAwareMixin, ModuleAwareMixin, TestCase): """A test case mixin that creates a temp directory and files in it. -- cgit v1.2.1 From 97563bd21e4a93246d26ad18c5085dd00abb1076 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Mon, 4 Jan 2016 07:21:03 -0500 Subject: Fix 2.6, as usual --- coverage/test_helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/test_helpers.py b/coverage/test_helpers.py index 84e2f1c..1d606aa 100644 --- a/coverage/test_helpers.py +++ b/coverage/test_helpers.py @@ -224,7 +224,7 @@ class DelayedAssertionMixin(TestCase): self.fail(self._delayed_assertions[0]) else: self.fail( - "{} failed assertions:\n{}".format( + "{0} failed assertions:\n{1}".format( len(self._delayed_assertions), "\n".join(self._delayed_assertions), ) -- cgit v1.2.1 From 66fb1397390db71b62a58aef77911cf354ee597d Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Mon, 4 Jan 2016 09:31:55 -0500 Subject: Clarify when delayed_assertions is known to work. --- coverage/test_helpers.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/test_helpers.py b/coverage/test_helpers.py index 1d606aa..a76bed3 100644 --- a/coverage/test_helpers.py +++ b/coverage/test_helpers.py @@ -198,7 +198,11 @@ class DelayedAssertionMixin(TestCase): All of the assertions will run. The failures will be displayed at the end of the with-statement. - NOTE: only works with some assert methods, I'm not sure which! + NOTE: this only works with some assertions. These are known to work: + + - `assertEqual(str, str)` + + - `assertMultilineEqual(str, str)` """ def __init__(self, *args, **kwargs): -- cgit v1.2.1 From 688488a7574cf872574c24d699acf2f869b1c786 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Mon, 4 Jan 2016 20:12:55 -0500 Subject: Support comprehensions better --- coverage/parser.py | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 39e23d2..c680f63 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -383,10 +383,6 @@ class AstArcAnalyzer(object): # TODO: test case for empty module. return 1 - def line_comprehension(self, node): - # TODO: is this how to get the line number for a comprehension? - return node.target.lineno - def line_default(self, node): return node.lineno @@ -433,7 +429,6 @@ class AstArcAnalyzer(object): # TODO: multi-target assignment with computed targets # TODO: listcomps hidden deep in other expressions # TODO: listcomps hidden in lists: x = [[i for i in range(10)]] - # TODO: multi-line listcomps # TODO: nested function definitions def process_break_exits(self, exits): @@ -554,7 +549,6 @@ class AstArcAnalyzer(object): return set() def handle_Return(self, node): - # TODO: deal with returning through a finally. here = self.line_for_node(node) self.process_return_exits([here]) return set() @@ -711,12 +705,9 @@ class AstArcAnalyzer(object): for xit in exits: self.arcs.add((xit, -start)) elif node_name in self.CODE_COMPREHENSIONS: - # TODO: tests for when generators is more than one? - for gen in node.generators: - start = self.line_for_node(gen) - self.arcs.add((-1, start)) - self.arcs.add((start, -start)) - # TODO: guaranteed this won't work for multi-line comps. + start = self.line_for_node(node) + self.arcs.add((-1, start)) + self.arcs.add((start, -start)) elif node_name == "Lambda": start = self.line_for_node(node) self.arcs.add((-1, start)) -- cgit v1.2.1 From 452f6f60c89119f1de1254ab9c5f4e25ac2f073d Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Tue, 5 Jan 2016 06:54:07 -0500 Subject: Support while-else --- coverage/parser.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index c680f63..b0e7371 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -520,8 +520,6 @@ class AstArcAnalyzer(object): my_block = self.block_stack.pop() exits = my_block.break_exits if node.orelse: - else_start = self.line_for_node(node.orelse[0]) - self.arcs.add((start, else_start)) else_exits = self.add_body_arcs(node.orelse, from_line=start) exits |= else_exits else: @@ -653,11 +651,15 @@ class AstArcAnalyzer(object): for xit in exits: self.arcs.add((xit, to_top)) exits = set() - if not constant_test: - exits.add(start) my_block = self.block_stack.pop() exits.update(my_block.break_exits) - # TODO: orelse + if node.orelse: + else_exits = self.add_body_arcs(node.orelse, from_line=start) + exits |= else_exits + else: + # No `else` clause: you can exit from the start. + if not constant_test: + exits.add(start) return exits def handle_With(self, node): -- cgit v1.2.1 From 3e2625dd17674339073c6afc4da6ba3b99932e4c Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Tue, 5 Jan 2016 14:48:41 -0500 Subject: Fix another form-feed problem, #461 --- coverage/phystokens.py | 3 +-- coverage/python.py | 2 ++ 2 files changed, 3 insertions(+), 2 deletions(-) (limited to 'coverage') diff --git a/coverage/phystokens.py b/coverage/phystokens.py index 5aa3402..5e80ed5 100644 --- a/coverage/phystokens.py +++ b/coverage/phystokens.py @@ -92,8 +92,7 @@ def source_token_lines(source): line = [] col = 0 - # The \f is because of http://bugs.python.org/issue19035 - source = source.expandtabs(8).replace('\r\n', '\n').replace('\f', ' ') + source = source.expandtabs(8).replace('\r\n', '\n') tokgen = generate_tokens(source) for ttype, ttext, (_, scol), (_, ecol), _ in phys_tokens(tokgen): diff --git a/coverage/python.py b/coverage/python.py index 5e56382..07d2347 100644 --- a/coverage/python.py +++ b/coverage/python.py @@ -50,6 +50,8 @@ def get_python_source(filename): # Couldn't find source. raise NoSource("No source for code: '%s'." % filename) + # Replace \f because of http://bugs.python.org/issue19035 + source = source.replace(b'\f', b' ') source = source.decode(source_encoding(source), "replace") # Python code should always end with a line with a newline. -- cgit v1.2.1 From 72aa922a23e7ee0b44710fbbe226abbbecdd8f09 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Wed, 6 Jan 2016 07:11:32 -0500 Subject: Add some tests for uncovered cases --- coverage/parser.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index b0e7371..a6a8ad6 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -336,7 +336,7 @@ class AstArcAnalyzer(object): self.funcdefs = funcdefs self.classdefs = classdefs - if int(os.environ.get("COVERAGE_ASTDUMP", 0)): + if int(os.environ.get("COVERAGE_ASTDUMP", 0)): # pragma: debugging # Dump the AST so that failing tests have helpful output. ast_dump(self.root_node) @@ -372,7 +372,6 @@ class AstArcAnalyzer(object): if node.elts: return self.line_for_node(node.elts[0]) else: - # TODO: test case for this branch: x = [] return node.lineno def line_Module(self, node): @@ -380,7 +379,6 @@ class AstArcAnalyzer(object): return self.line_for_node(node.body[0]) else: # Modules have no line number, they always start at 1. - # TODO: test case for empty module. return 1 def line_default(self, node): @@ -426,7 +424,6 @@ class AstArcAnalyzer(object): # tests to write: # TODO: while EXPR: # TODO: while False: - # TODO: multi-target assignment with computed targets # TODO: listcomps hidden deep in other expressions # TODO: listcomps hidden in lists: x = [[i for i in range(10)]] # TODO: nested function definitions @@ -688,11 +685,17 @@ class AstArcAnalyzer(object): def add_arcs_for_code_objects(self, root_node): for node in ast.walk(root_node): node_name = node.__class__.__name__ + # TODO: should this be broken into separate methods? if node_name == "Module": start = self.line_for_node(node) - exits = self.add_body_arcs(node.body, from_line=-1) - for xit in exits: - self.arcs.add((xit, -start)) + if node.body: + exits = self.add_body_arcs(node.body, from_line=-1) + for xit in exits: + self.arcs.add((xit, -start)) + else: + # Empty module. + self.arcs.add((-1, start)) + self.arcs.add((start, -1)) elif node_name in ["FunctionDef", "AsyncFunctionDef"]: start = self.line_for_node(node) self.block_stack.append(FunctionBlock(start=start)) -- cgit v1.2.1 From 4f5c742ad734de781445a749f672ff5fa509b201 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Wed, 6 Jan 2016 07:56:05 -0500 Subject: Clean up some TODO's and code paths --- coverage/parser.py | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index a6a8ad6..348eb7c 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -431,7 +431,6 @@ class AstArcAnalyzer(object): def process_break_exits(self, exits): for block in self.blocks(): if isinstance(block, LoopBlock): - # TODO: what if there is no loop? block.break_exits.update(exits) break elif isinstance(block, TryBlock) and block.final_start: @@ -441,7 +440,6 @@ class AstArcAnalyzer(object): def process_continue_exits(self, exits): for block in self.blocks(): if isinstance(block, LoopBlock): - # TODO: what if there is no loop? for xit in exits: self.arcs.add((xit, block.start)) break @@ -470,7 +468,6 @@ class AstArcAnalyzer(object): block.return_from.update(exits) break elif isinstance(block, FunctionBlock): - # TODO: what if there is no enclosing function? for xit in exits: self.arcs.add((xit, -block.start)) break @@ -628,13 +625,12 @@ class AstArcAnalyzer(object): node.handlers = [] node.orelse = [] - if node.body: - first = node.body[0] - if first.__class__.__name__ == "TryExcept" and node.lineno == first.lineno: - assert len(node.body) == 1 - node.body = first.body - node.handlers = first.handlers - node.orelse = first.orelse + first = node.body[0] + if first.__class__.__name__ == "TryExcept" and node.lineno == first.lineno: + assert len(node.body) == 1 + node.body = first.body + node.handlers = first.handlers + node.orelse = first.orelse return self.handle_Try(node) @@ -672,10 +668,10 @@ class AstArcAnalyzer(object): ]) def handle_default(self, node): - node_name = node.__class__.__name__ - if node_name not in self.OK_TO_DEFAULT: - # TODO: put 1/0 here to find unhandled nodes. - print("*** Unhandled: {0}".format(node)) + if 0: + node_name = node.__class__.__name__ + if node_name not in self.OK_TO_DEFAULT: + print("*** Unhandled: {0}".format(node)) return set([self.line_for_node(node)]) CODE_COMPREHENSIONS = set(["GeneratorExp", "DictComp", "SetComp"]) -- cgit v1.2.1 From ad5c3c02327b0cb67e560a3193f6effc6b891a8b Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Wed, 6 Jan 2016 08:11:58 -0500 Subject: More uniform dispatch: use methods for everything, and handle defaults in the dispatch instead of calling another method. --- coverage/parser.py | 120 +++++++++++++++++++++++++++++------------------------ 1 file changed, 66 insertions(+), 54 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 348eb7c..c5d7c61 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -355,8 +355,11 @@ class AstArcAnalyzer(object): def line_for_node(self, node): """What is the right line number to use for this node?""" node_name = node.__class__.__name__ - handler = getattr(self, "line_" + node_name, self.line_default) - return handler(node) + handler = getattr(self, "line_" + node_name, None) + if handler is not None: + return handler(node) + else: + return node.lineno def line_Assign(self, node): return self.line_for_node(node.value) @@ -381,8 +384,10 @@ class AstArcAnalyzer(object): # Modules have no line number, they always start at 1. return 1 - def line_default(self, node): - return node.lineno + OK_TO_DEFAULT = set([ + "Assign", "Assert", "AugAssign", "Delete", "Exec", "Expr", "Global", + "Import", "ImportFrom", "Pass", "Print", + ]) def add_arcs(self, node): """Add the arcs for `node`. @@ -397,9 +402,17 @@ class AstArcAnalyzer(object): if isinstance(value, ast.expr) and self.contains_return_expression(value): self.process_return_exits([self.line_for_node(node)]) break + node_name = node.__class__.__name__ - handler = getattr(self, "handle_" + node_name, self.handle_default) - return handler(node) + handler = getattr(self, "handle_" + node_name, None) + if handler is not None: + return handler(node) + + if 0: + node_name = node.__class__.__name__ + if node_name not in self.OK_TO_DEFAULT: + print("*** Unhandled: {0}".format(node)) + return set([self.line_for_node(node)]) def add_body_arcs(self, body, from_line=None, prev_lines=None): if prev_lines is None: @@ -662,58 +675,57 @@ class AstArcAnalyzer(object): handle_AsyncWith = handle_With - OK_TO_DEFAULT = set([ - "Assign", "Assert", "AugAssign", "Delete", "Exec", "Expr", "Global", - "Import", "ImportFrom", "Pass", "Print", - ]) - - def handle_default(self, node): - if 0: + def add_arcs_for_code_objects(self, root_node): + for node in ast.walk(root_node): node_name = node.__class__.__name__ - if node_name not in self.OK_TO_DEFAULT: - print("*** Unhandled: {0}".format(node)) - return set([self.line_for_node(node)]) + code_object_handler = getattr(self, "code_object_" + node_name, None) + if code_object_handler is not None: + code_object_handler(node) + + def code_object_Module(self, node): + start = self.line_for_node(node) + if node.body: + exits = self.add_body_arcs(node.body, from_line=-1) + for xit in exits: + self.arcs.add((xit, -start)) + else: + # Empty module. + self.arcs.add((-1, start)) + self.arcs.add((start, -1)) + + def code_object_FunctionDef(self, node): + start = self.line_for_node(node) + self.block_stack.append(FunctionBlock(start=start)) + exits = self.add_body_arcs(node.body, from_line=-1) + self.block_stack.pop() + for xit in exits: + self.arcs.add((xit, -start)) + + code_object_AsyncFunctionDef = code_object_FunctionDef - CODE_COMPREHENSIONS = set(["GeneratorExp", "DictComp", "SetComp"]) + def code_object_ClassDef(self, node): + start = self.line_for_node(node) + self.arcs.add((-1, start)) + exits = self.add_body_arcs(node.body, from_line=start) + for xit in exits: + self.arcs.add((xit, -start)) + + def do_code_object_comprehension(self, node): + start = self.line_for_node(node) + self.arcs.add((-1, start)) + self.arcs.add((start, -start)) + + code_object_GeneratorExp = do_code_object_comprehension + code_object_DictComp = do_code_object_comprehension + code_object_SetComp = do_code_object_comprehension if env.PY3: - CODE_COMPREHENSIONS.add("ListComp") + code_object_ListComp = do_code_object_comprehension - def add_arcs_for_code_objects(self, root_node): - for node in ast.walk(root_node): - node_name = node.__class__.__name__ - # TODO: should this be broken into separate methods? - if node_name == "Module": - start = self.line_for_node(node) - if node.body: - exits = self.add_body_arcs(node.body, from_line=-1) - for xit in exits: - self.arcs.add((xit, -start)) - else: - # Empty module. - self.arcs.add((-1, start)) - self.arcs.add((start, -1)) - elif node_name in ["FunctionDef", "AsyncFunctionDef"]: - start = self.line_for_node(node) - self.block_stack.append(FunctionBlock(start=start)) - exits = self.add_body_arcs(node.body, from_line=-1) - self.block_stack.pop() - for xit in exits: - self.arcs.add((xit, -start)) - elif node_name == "ClassDef": - start = self.line_for_node(node) - self.arcs.add((-1, start)) - exits = self.add_body_arcs(node.body, from_line=start) - for xit in exits: - self.arcs.add((xit, -start)) - elif node_name in self.CODE_COMPREHENSIONS: - start = self.line_for_node(node) - self.arcs.add((-1, start)) - self.arcs.add((start, -start)) - elif node_name == "Lambda": - start = self.line_for_node(node) - self.arcs.add((-1, start)) - self.arcs.add((start, -start)) - # TODO: test multi-line lambdas + def code_object_Lambda(self, node): + start = self.line_for_node(node) + self.arcs.add((-1, start)) + self.arcs.add((start, -start)) + # TODO: test multi-line lambdas def contains_return_expression(self, node): """Is there a yield-from or await in `node` someplace?""" -- cgit v1.2.1 From 074613f8237b8b0d9324c3a4cd7d0aed8be1309c Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Wed, 6 Jan 2016 08:46:59 -0500 Subject: Name the dispatched-to methods more unusually --- coverage/parser.py | 68 +++++++++++++++++++++++++++--------------------------- 1 file changed, 34 insertions(+), 34 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index c5d7c61..647dbd0 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -355,29 +355,29 @@ class AstArcAnalyzer(object): def line_for_node(self, node): """What is the right line number to use for this node?""" node_name = node.__class__.__name__ - handler = getattr(self, "line_" + node_name, None) + handler = getattr(self, "_line__" + node_name, None) if handler is not None: return handler(node) else: return node.lineno - def line_Assign(self, node): + def _line__Assign(self, node): return self.line_for_node(node.value) - def line_Dict(self, node): + def _line__Dict(self, node): # Python 3.5 changed how dict literals are made. if env.PYVERSION >= (3, 5) and node.keys: return node.keys[0].lineno else: return node.lineno - def line_List(self, node): + def _line__List(self, node): if node.elts: return self.line_for_node(node.elts[0]) else: return node.lineno - def line_Module(self, node): + def _line__Module(self, node): if node.body: return self.line_for_node(node.body[0]) else: @@ -404,7 +404,7 @@ class AstArcAnalyzer(object): break node_name = node.__class__.__name__ - handler = getattr(self, "handle_" + node_name, None) + handler = getattr(self, "_handle__" + node_name, None) if handler is not None: return handler(node) @@ -487,12 +487,12 @@ class AstArcAnalyzer(object): ## Handlers - def handle_Break(self, node): + def _handle__Break(self, node): here = self.line_for_node(node) self.process_break_exits([here]) return set() - def handle_ClassDef(self, node): + def _handle__ClassDef(self, node): return self.process_decorated(node, self.classdefs) def process_decorated(self, node, defs): @@ -513,12 +513,12 @@ class AstArcAnalyzer(object): # the body is handled in add_arcs_for_code_objects. return set([last]) - def handle_Continue(self, node): + def _handle__Continue(self, node): here = self.line_for_node(node) self.process_continue_exits([here]) return set() - def handle_For(self, node): + def _handle__For(self, node): start = self.line_for_node(node.iter) self.block_stack.append(LoopBlock(start=start)) exits = self.add_body_arcs(node.body, from_line=start) @@ -534,31 +534,31 @@ class AstArcAnalyzer(object): exits.add(start) return exits - handle_AsyncFor = handle_For + _handle__AsyncFor = _handle__For - def handle_FunctionDef(self, node): + def _handle__FunctionDef(self, node): return self.process_decorated(node, self.funcdefs) - handle_AsyncFunctionDef = handle_FunctionDef + _handle__AsyncFunctionDef = _handle__FunctionDef - def handle_If(self, node): + def _handle__If(self, node): start = self.line_for_node(node.test) exits = self.add_body_arcs(node.body, from_line=start) exits |= self.add_body_arcs(node.orelse, from_line=start) return exits - def handle_Raise(self, node): + def _handle__Raise(self, node): # `raise` statement jumps away, no exits from here. here = self.line_for_node(node) self.process_raise_exits([here]) return set() - def handle_Return(self, node): + def _handle__Return(self, node): here = self.line_for_node(node) self.process_return_exits([here]) return set() - def handle_Try(self, node): + def _handle__Try(self, node): # try/finally is tricky. If there's a finally clause, then we need a # FinallyBlock to track what flows might go through the finally instead # of their normal flow. @@ -624,14 +624,14 @@ class AstArcAnalyzer(object): self.process_return_exits(exits) return exits - def handle_TryExcept(self, node): + def _handle__TryExcept(self, node): # Python 2.7 uses separate TryExcept and TryFinally nodes. If we get # TryExcept, it means there was no finally, so fake it, and treat as # a general Try node. node.finalbody = [] - return self.handle_Try(node) + return self._handle__Try(node) - def handle_TryFinally(self, node): + def _handle__TryFinally(self, node): # Python 2.7 uses separate TryExcept and TryFinally nodes. If we get # TryFinally, see if there's a TryExcept nested inside. If so, merge # them. Otherwise, fake fields to complete a Try node. @@ -645,9 +645,9 @@ class AstArcAnalyzer(object): node.handlers = first.handlers node.orelse = first.orelse - return self.handle_Try(node) + return self._handle__Try(node) - def handle_While(self, node): + def _handle__While(self, node): constant_test = self.is_constant_expr(node.test) start = to_top = self.line_for_node(node.test) if constant_test: @@ -668,21 +668,21 @@ class AstArcAnalyzer(object): exits.add(start) return exits - def handle_With(self, node): + def _handle__With(self, node): start = self.line_for_node(node) exits = self.add_body_arcs(node.body, from_line=start) return exits - handle_AsyncWith = handle_With + _handle__AsyncWith = _handle__With def add_arcs_for_code_objects(self, root_node): for node in ast.walk(root_node): node_name = node.__class__.__name__ - code_object_handler = getattr(self, "code_object_" + node_name, None) + code_object_handler = getattr(self, "_code_object__" + node_name, None) if code_object_handler is not None: code_object_handler(node) - def code_object_Module(self, node): + def _code_object__Module(self, node): start = self.line_for_node(node) if node.body: exits = self.add_body_arcs(node.body, from_line=-1) @@ -693,7 +693,7 @@ class AstArcAnalyzer(object): self.arcs.add((-1, start)) self.arcs.add((start, -1)) - def code_object_FunctionDef(self, node): + def _code_object__FunctionDef(self, node): start = self.line_for_node(node) self.block_stack.append(FunctionBlock(start=start)) exits = self.add_body_arcs(node.body, from_line=-1) @@ -701,9 +701,9 @@ class AstArcAnalyzer(object): for xit in exits: self.arcs.add((xit, -start)) - code_object_AsyncFunctionDef = code_object_FunctionDef + _code_object__AsyncFunctionDef = _code_object__FunctionDef - def code_object_ClassDef(self, node): + def _code_object__ClassDef(self, node): start = self.line_for_node(node) self.arcs.add((-1, start)) exits = self.add_body_arcs(node.body, from_line=start) @@ -715,13 +715,13 @@ class AstArcAnalyzer(object): self.arcs.add((-1, start)) self.arcs.add((start, -start)) - code_object_GeneratorExp = do_code_object_comprehension - code_object_DictComp = do_code_object_comprehension - code_object_SetComp = do_code_object_comprehension + _code_object__GeneratorExp = do_code_object_comprehension + _code_object__DictComp = do_code_object_comprehension + _code_object__SetComp = do_code_object_comprehension if env.PY3: - code_object_ListComp = do_code_object_comprehension + _code_object__ListComp = do_code_object_comprehension - def code_object_Lambda(self, node): + def _code_object__Lambda(self, node): start = self.line_for_node(node) self.arcs.add((-1, start)) self.arcs.add((start, -start)) -- cgit v1.2.1 From 4a08806f814fc350187537d10081f2dfb4195396 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Wed, 6 Jan 2016 16:27:24 -0500 Subject: Remove the old bytecode-based branch analyzer --- coverage/backward.py | 8 -- coverage/bytecode.py | 65 ---------- coverage/parser.py | 361 +-------------------------------------------------- 3 files changed, 2 insertions(+), 432 deletions(-) (limited to 'coverage') diff --git a/coverage/backward.py b/coverage/backward.py index 4fc7221..50d49a0 100644 --- a/coverage/backward.py +++ b/coverage/backward.py @@ -93,10 +93,6 @@ if env.PY3: """Produce a byte string with the ints from `byte_values`.""" return bytes(byte_values) - def byte_to_int(byte_value): - """Turn an element of a bytes object into an int.""" - return byte_value - def bytes_to_ints(bytes_value): """Turn a bytes object into a sequence of ints.""" # In Python 3, iterating bytes gives ints. @@ -111,10 +107,6 @@ else: """Produce a byte string with the ints from `byte_values`.""" return "".join(chr(b) for b in byte_values) - def byte_to_int(byte_value): - """Turn an element of a bytes object into an int.""" - return ord(byte_value) - def bytes_to_ints(bytes_value): """Turn a bytes object into a sequence of ints.""" for byte in bytes_value: diff --git a/coverage/bytecode.py b/coverage/bytecode.py index 82929ce..d823c67 100644 --- a/coverage/bytecode.py +++ b/coverage/bytecode.py @@ -3,73 +3,8 @@ """Bytecode manipulation for coverage.py""" -import opcode import types -from coverage.backward import byte_to_int - - -class ByteCode(object): - """A single bytecode.""" - def __init__(self): - # The offset of this bytecode in the code object. - self.offset = -1 - - # The opcode, defined in the `opcode` module. - self.op = -1 - - # The argument, a small integer, whose meaning depends on the opcode. - self.arg = -1 - - # The offset in the code object of the next bytecode. - self.next_offset = -1 - - # The offset to jump to. - self.jump_to = -1 - - -class ByteCodes(object): - """Iterator over byte codes in `code`. - - This handles the logic of EXTENDED_ARG byte codes internally. Those byte - codes are not returned by this iterator. - - Returns `ByteCode` objects. - - """ - def __init__(self, code): - self.code = code - - def __getitem__(self, i): - return byte_to_int(self.code[i]) - - def __iter__(self): - offset = 0 - ext_arg = 0 - while offset < len(self.code): - bc = ByteCode() - bc.op = self[offset] - bc.offset = offset - - next_offset = offset+1 - if bc.op >= opcode.HAVE_ARGUMENT: - bc.arg = ext_arg + self[offset+1] + 256*self[offset+2] - next_offset += 2 - - label = -1 - if bc.op in opcode.hasjrel: - label = next_offset + bc.arg - elif bc.op in opcode.hasjabs: - label = bc.arg - bc.jump_to = label - - bc.next_offset = offset = next_offset - if bc.op == opcode.EXTENDED_ARG: - ext_arg = bc.arg * 256*256 - else: - ext_arg = 0 - yield bc - class CodeObjects(object): """Iterate over all the code objects in `code`.""" diff --git a/coverage/parser.py b/coverage/parser.py index 647dbd0..32a7590 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -5,7 +5,6 @@ import ast import collections -import dis import os import re import token @@ -14,7 +13,7 @@ import tokenize from coverage import env from coverage.backward import range # pylint: disable=redefined-builtin from coverage.backward import bytes_to_ints, string_class -from coverage.bytecode import ByteCodes, CodeObjects +from coverage.bytecode import CodeObjects from coverage.misc import contract, nice_pair, join_regex from coverage.misc import CoverageException, NoSource, NotPython from coverage.phystokens import compile_unicode, generate_tokens, neuter_encoding_declaration @@ -253,22 +252,6 @@ class PythonParser(object): starts = self.raw_statements - ignore self.statements = self.first_lines(starts) - ignore - def old_arcs(self): - """Get information about the arcs available in the code. - - Returns a set of line number pairs. Line numbers have been normalized - to the first line of multi-line statements. - - """ - if self._all_arcs is None: - self._all_arcs = set() - for l1, l2 in self.byte_parser._all_arcs(): - fl1 = self.first_line(l1) - fl2 = self.first_line(l2) - if fl1 != fl2: - self._all_arcs.add((fl1, fl2)) - return self._all_arcs - def arcs(self): if self._all_arcs is None: aaa = AstArcAnalyzer(self.text, self.raw_funcdefs, self.raw_classdefs) @@ -736,62 +719,6 @@ class AstArcAnalyzer(object): return False -## Opcodes that guide the ByteParser. - -def _opcode(name): - """Return the opcode by name from the dis module.""" - return dis.opmap[name] - - -def _opcode_set(*names): - """Return a set of opcodes by the names in `names`.""" - s = set() - for name in names: - try: - s.add(_opcode(name)) - except KeyError: - pass - return s - -# Opcodes that leave the code object. -OPS_CODE_END = _opcode_set('RETURN_VALUE') - -# Opcodes that unconditionally end the code chunk. -OPS_CHUNK_END = _opcode_set( - 'JUMP_ABSOLUTE', 'JUMP_FORWARD', 'RETURN_VALUE', 'RAISE_VARARGS', - 'BREAK_LOOP', 'CONTINUE_LOOP', -) - -# Opcodes that unconditionally begin a new code chunk. By starting new chunks -# with unconditional jump instructions, we neatly deal with jumps to jumps -# properly. -OPS_CHUNK_BEGIN = _opcode_set('JUMP_ABSOLUTE', 'JUMP_FORWARD') - -# Opcodes that push a block on the block stack. -OPS_PUSH_BLOCK = _opcode_set( - 'SETUP_LOOP', 'SETUP_EXCEPT', 'SETUP_FINALLY', 'SETUP_WITH', 'SETUP_ASYNC_WITH', -) - -# Block types for exception handling. -OPS_EXCEPT_BLOCKS = _opcode_set('SETUP_EXCEPT', 'SETUP_FINALLY') - -# Opcodes that pop a block from the block stack. -OPS_POP_BLOCK = _opcode_set('POP_BLOCK') - -OPS_GET_AITER = _opcode_set('GET_AITER') - -# Opcodes that have a jump destination, but aren't really a jump. -OPS_NO_JUMP = OPS_PUSH_BLOCK - -# Individual opcodes we need below. -OP_BREAK_LOOP = _opcode('BREAK_LOOP') -OP_END_FINALLY = _opcode('END_FINALLY') -OP_COMPARE_OP = _opcode('COMPARE_OP') -COMPARE_EXCEPTION = 10 # just have to get this constant from the code. -OP_LOAD_CONST = _opcode('LOAD_CONST') -OP_RETURN_VALUE = _opcode('RETURN_VALUE') - - class ByteParser(object): """Parse byte codes to understand the structure of code.""" @@ -812,7 +739,7 @@ class ByteParser(object): # Alternative Python implementations don't always provide all the # attributes on code objects that we need to do the analysis. - for attr in ['co_lnotab', 'co_firstlineno', 'co_consts', 'co_code']: + for attr in ['co_lnotab', 'co_firstlineno', 'co_consts']: if not hasattr(self.code, attr): raise CoverageException( "This implementation of Python doesn't support code analysis.\n" @@ -867,290 +794,6 @@ class ByteParser(object): for _, l in bp._bytes_lines(): yield l - def _block_stack_repr(self, block_stack): # pragma: debugging - """Get a string version of `block_stack`, for debugging.""" - blocks = ", ".join( - "(%s, %r)" % (dis.opname[b[0]], b[1]) for b in block_stack - ) - return "[" + blocks + "]" - - def _split_into_chunks(self): - """Split the code object into a list of `Chunk` objects. - - Each chunk is only entered at its first instruction, though there can - be many exits from a chunk. - - Returns a list of `Chunk` objects. - - """ - # The list of chunks so far, and the one we're working on. - chunks = [] - chunk = None - - # A dict mapping byte offsets of line starts to the line numbers. - bytes_lines_map = dict(self._bytes_lines()) - - # The block stack: loops and try blocks get pushed here for the - # implicit jumps that can occur. - # Each entry is a tuple: (block type, destination) - block_stack = [] - - # Some op codes are followed by branches that should be ignored. This - # is a count of how many ignores are left. - ignore_branch = 0 - - ignore_pop_block = 0 - - # We have to handle the last two bytecodes specially. - ult = penult = None - - # Get a set of all of the jump-to points. - jump_to = set() - bytecodes = list(ByteCodes(self.code.co_code)) - for bc in bytecodes: - if bc.jump_to >= 0: - jump_to.add(bc.jump_to) - - chunk_lineno = 0 - - # Walk the byte codes building chunks. - for bc in bytecodes: - # Maybe have to start a new chunk. - start_new_chunk = False - first_chunk = False - if bc.offset in bytes_lines_map: - # Start a new chunk for each source line number. - start_new_chunk = True - chunk_lineno = bytes_lines_map[bc.offset] - first_chunk = True - elif bc.offset in jump_to: - # To make chunks have a single entrance, we have to make a new - # chunk when we get to a place some bytecode jumps to. - start_new_chunk = True - elif bc.op in OPS_CHUNK_BEGIN: - # Jumps deserve their own unnumbered chunk. This fixes - # problems with jumps to jumps getting confused. - start_new_chunk = True - - if not chunk or start_new_chunk: - if chunk: - chunk.exits.add(bc.offset) - chunk = Chunk(bc.offset, chunk_lineno, first_chunk) - if not chunks: - # The very first chunk of a code object is always an - # entrance. - chunk.entrance = True - chunks.append(chunk) - - # Look at the opcode. - if bc.jump_to >= 0 and bc.op not in OPS_NO_JUMP: - if ignore_branch: - # Someone earlier wanted us to ignore this branch. - ignore_branch -= 1 - else: - # The opcode has a jump, it's an exit for this chunk. - chunk.exits.add(bc.jump_to) - - if bc.op in OPS_CODE_END: - # The opcode can exit the code object. - chunk.exits.add(-self.code.co_firstlineno) - if bc.op in OPS_PUSH_BLOCK: - # The opcode adds a block to the block_stack. - block_stack.append((bc.op, bc.jump_to)) - if bc.op in OPS_POP_BLOCK: - # The opcode pops a block from the block stack. - if ignore_pop_block: - ignore_pop_block -= 1 - else: - block_stack.pop() - if bc.op in OPS_CHUNK_END: - # This opcode forces the end of the chunk. - if bc.op == OP_BREAK_LOOP: - # A break is implicit: jump where the top of the - # block_stack points. - chunk.exits.add(block_stack[-1][1]) - chunk = None - if bc.op == OP_END_FINALLY: - # For the finally clause we need to find the closest exception - # block, and use its jump target as an exit. - for block in reversed(block_stack): - if block[0] in OPS_EXCEPT_BLOCKS: - chunk.exits.add(block[1]) - break - if bc.op == OP_COMPARE_OP and bc.arg == COMPARE_EXCEPTION: - # This is an except clause. We want to overlook the next - # branch, so that except's don't count as branches. - ignore_branch += 1 - - if bc.op in OPS_GET_AITER: - # GET_AITER is weird: First, it seems to generate one more - # POP_BLOCK than SETUP_*, so we have to prepare to ignore one - # of the POP_BLOCKS. Second, we don't have a clear branch to - # the exit of the loop, so we peek into the block stack to find - # it. - ignore_pop_block += 1 - chunk.exits.add(block_stack[-1][1]) - - penult = ult - ult = bc - - if chunks: - # The last two bytecodes could be a dummy "return None" that - # shouldn't be counted as real code. Every Python code object seems - # to end with a return, and a "return None" is inserted if there - # isn't an explicit return in the source. - if ult and penult: - if penult.op == OP_LOAD_CONST and ult.op == OP_RETURN_VALUE: - if self.code.co_consts[penult.arg] is None: - # This is "return None", but is it dummy? A real line - # would be a last chunk all by itself. - if chunks[-1].byte != penult.offset: - ex = -self.code.co_firstlineno - # Split the last chunk - last_chunk = chunks[-1] - last_chunk.exits.remove(ex) - last_chunk.exits.add(penult.offset) - chunk = Chunk( - penult.offset, last_chunk.line, False - ) - chunk.exits.add(ex) - chunks.append(chunk) - - # Give all the chunks a length. - chunks[-1].length = bc.next_offset - chunks[-1].byte - for i in range(len(chunks)-1): - chunks[i].length = chunks[i+1].byte - chunks[i].byte - - #self.validate_chunks(chunks) - return chunks - - def validate_chunks(self, chunks): # pragma: debugging - """Validate the rule that chunks have a single entrance.""" - # starts is the entrances to the chunks - starts = set(ch.byte for ch in chunks) - for ch in chunks: - assert all((ex in starts or ex < 0) for ex in ch.exits) - - def _arcs(self): - """Find the executable arcs in the code. - - Yields pairs: (from,to). From and to are integer line numbers. If - from is < 0, then the arc is an entrance into the code object. If to - is < 0, the arc is an exit from the code object. - - """ - chunks = self._split_into_chunks() - - # A map from byte offsets to the chunk starting at that offset. - byte_chunks = dict((c.byte, c) for c in chunks) - - # Traverse from the first chunk in each line, and yield arcs where - # the trace function will be invoked. - for chunk in chunks: - if chunk.entrance: - yield (-1, chunk.line) - - if not chunk.first: - continue - - chunks_considered = set() - chunks_to_consider = [chunk] - while chunks_to_consider: - # Get the chunk we're considering, and make sure we don't - # consider it again. - this_chunk = chunks_to_consider.pop() - chunks_considered.add(this_chunk) - - # For each exit, add the line number if the trace function - # would be triggered, or add the chunk to those being - # considered if not. - for ex in this_chunk.exits: - if ex < 0: - yield (chunk.line, ex) - else: - next_chunk = byte_chunks[ex] - if next_chunk in chunks_considered: - continue - - # The trace function is invoked if visiting the first - # bytecode in a line, or if the transition is a - # backward jump. - backward_jump = next_chunk.byte < this_chunk.byte - if next_chunk.first or backward_jump: - if next_chunk.line != chunk.line: - yield (chunk.line, next_chunk.line) - else: - chunks_to_consider.append(next_chunk) - - def _all_chunks(self): - """Returns a list of `Chunk` objects for this code and its children. - - See `_split_into_chunks` for details. - - """ - chunks = [] - for bp in self.child_parsers(): - chunks.extend(bp._split_into_chunks()) - - return chunks - - def _all_arcs(self): - """Get the set of all arcs in this code object and its children. - - See `_arcs` for details. - - """ - arcs = set() - for bp in self.child_parsers(): - arcs.update(bp._arcs()) - - return arcs - - -class Chunk(object): - """A sequence of byte codes with a single entrance. - - To analyze byte code, we have to divide it into chunks, sequences of byte - codes such that each chunk has only one entrance, the first instruction in - the block. - - This is almost the CS concept of `basic block`_, except that we're willing - to have many exits from a chunk, and "basic block" is a more cumbersome - term. - - .. _basic block: http://en.wikipedia.org/wiki/Basic_block - - `byte` is the offset to the bytecode starting this chunk. - - `line` is the source line number containing this chunk. - - `first` is true if this is the first chunk in the source line. - - An exit < 0 means the chunk can leave the code (return). The exit is - the negative of the starting line number of the code block. - - The `entrance` attribute is a boolean indicating whether the code object - can be entered at this chunk. - - """ - def __init__(self, byte, line, first): - self.byte = byte - self.line = line - self.first = first - self.length = 0 - self.entrance = False - self.exits = set() - - def __repr__(self): - return "<%d+%d @%d%s%s %r>" % ( - self.byte, - self.length, - self.line, - "!" if self.first else "", - "v" if self.entrance else "", - list(self.exits), - ) - SKIP_DUMP_FIELDS = ["ctx"] -- cgit v1.2.1 From fd524de906c72855360f05f5a80cc1b540895829 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Thu, 7 Jan 2016 08:46:35 -0500 Subject: Bytecode not byte code --- coverage/parser.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 32a7590..16419ca 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -142,7 +142,7 @@ class PythonParser(object): indent -= 1 elif toktype == token.NAME: if ttext == 'class': - # Class definitions look like branches in the byte code, so + # Class definitions look like branches in the bytecode, so # we need to exclude them. The simplest way is to note the # lines with the 'class' keyword. self.raw_classdefs.add(slineno) @@ -720,7 +720,7 @@ class AstArcAnalyzer(object): class ByteParser(object): - """Parse byte codes to understand the structure of code.""" + """Parse bytecode to understand the structure of code.""" @contract(text='unicode') def __init__(self, text, code=None, filename=None): -- cgit v1.2.1 From ca465b6eb54960f4a8be4481a5b2501614c6aa00 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Thu, 7 Jan 2016 12:06:11 -0500 Subject: Clean up small stuff --- coverage/parser.py | 9 +++++++++ coverage/python.py | 4 ---- 2 files changed, 9 insertions(+), 4 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 16419ca..c03a308 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -253,6 +253,12 @@ class PythonParser(object): self.statements = self.first_lines(starts) - ignore def arcs(self): + """Get information about the arcs available in the code. + + Returns a set of line number pairs. Line numbers have been normalized + to the first line of multi-line statements. + + """ if self._all_arcs is None: aaa = AstArcAnalyzer(self.text, self.raw_funcdefs, self.raw_classdefs) arcs = aaa.collect_arcs() @@ -298,10 +304,12 @@ class LoopBlock(object): self.start = start self.break_exits = set() + class FunctionBlock(object): def __init__(self, start): self.start = start + class TryBlock(object): def __init__(self, handler_start=None, final_start=None): self.handler_start = handler_start # TODO: is this used? @@ -803,6 +811,7 @@ def is_simple_value(value): isinstance(value, (string_class, int, float)) ) +# TODO: a test of ast_dump? def ast_dump(node, depth=0): indent = " " * depth if not isinstance(node, ast.AST): diff --git a/coverage/python.py b/coverage/python.py index bf19cb2..5e56382 100644 --- a/coverage/python.py +++ b/coverage/python.py @@ -159,10 +159,6 @@ class PythonFileReporter(FileReporter): def arcs(self): return self.parser.arcs() - @expensive - def ast_arcs(self): - return self.parser.ast_arcs() - @expensive def exit_counts(self): return self.parser.exit_counts() -- cgit v1.2.1 From 858c44bb1007511a69e0ed9ea60ccd3f9fe3eb56 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Thu, 7 Jan 2016 19:42:42 -0500 Subject: Add tests of multiline lambdas, though i don't quite understand the line numbers involved --- coverage/parser.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index c03a308..9f7400e 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -312,8 +312,8 @@ class FunctionBlock(object): class TryBlock(object): def __init__(self, handler_start=None, final_start=None): - self.handler_start = handler_start # TODO: is this used? - self.final_start = final_start # TODO: is this used? + self.handler_start = handler_start + self.final_start = final_start self.break_from = set() self.continue_from = set() self.return_from = set() @@ -716,7 +716,6 @@ class AstArcAnalyzer(object): start = self.line_for_node(node) self.arcs.add((-1, start)) self.arcs.add((start, -start)) - # TODO: test multi-line lambdas def contains_return_expression(self, node): """Is there a yield-from or await in `node` someplace?""" -- cgit v1.2.1 From f617b38b25de067d7eacc7a0290434f216cb9490 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Fri, 8 Jan 2016 07:18:37 -0500 Subject: Put the lambda tests together --- coverage/parser.py | 1 + 1 file changed, 1 insertion(+) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 9f7400e..babf4d7 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -260,6 +260,7 @@ class PythonParser(object): """ if self._all_arcs is None: + import pudb,sys as __sys;__sys.stdout=__sys.__stdout__;pudb.set_trace() # -={XX}=-={XX}=-={XX}=- aaa = AstArcAnalyzer(self.text, self.raw_funcdefs, self.raw_classdefs) arcs = aaa.collect_arcs() -- cgit v1.2.1 From ff44f3f0c888e2ca5077bc5bb921d25485e753c9 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 9 Jan 2016 08:28:25 -0500 Subject: Add a test of partial lambdas --- coverage/parser.py | 1 - 1 file changed, 1 deletion(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index babf4d7..9f7400e 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -260,7 +260,6 @@ class PythonParser(object): """ if self._all_arcs is None: - import pudb,sys as __sys;__sys.stdout=__sys.__stdout__;pudb.set_trace() # -={XX}=-={XX}=-={XX}=- aaa = AstArcAnalyzer(self.text, self.raw_funcdefs, self.raw_classdefs) arcs = aaa.collect_arcs() -- cgit v1.2.1 From 68d997423de82bdf0406e24d691efe79cf083f62 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 9 Jan 2016 09:00:48 -0500 Subject: Add some docstrings, etc --- coverage/parser.py | 47 +++++++++++++++++++++++++++++++++++------------ 1 file changed, 35 insertions(+), 12 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 9f7400e..6e641c4 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -299,18 +299,25 @@ class PythonParser(object): return exit_counts +# +# AST analysis +# + class LoopBlock(object): + """A block on the block stack representing a `for` or `while` loop.""" def __init__(self, start): self.start = start self.break_exits = set() class FunctionBlock(object): + """A block on the block stack representing a function definition.""" def __init__(self, start): self.start = start class TryBlock(object): + """A block on the block stack representing a `try` block.""" def __init__(self, handler_start=None, final_start=None): self.handler_start = handler_start self.final_start = final_start @@ -321,6 +328,8 @@ class TryBlock(object): class AstArcAnalyzer(object): + """Analyze source text with an AST to find executable code paths.""" + @contract(text='unicode', funcdefs=set, classdefs=set) def __init__(self, text, funcdefs, classdefs): self.root_node = ast.parse(neuter_encoding_declaration(text)) @@ -339,12 +348,16 @@ class AstArcAnalyzer(object): self.add_arcs_for_code_objects(self.root_node) return self.arcs - def blocks(self): + def nearest_blocks(self): """Yield the blocks in nearest-to-farthest order.""" return reversed(self.block_stack) def line_for_node(self, node): - """What is the right line number to use for this node?""" + """What is the right line number to use for this node? + + This dispatches to _line__Node functions where needed. + + """ node_name = node.__class__.__name__ handler = getattr(self, "_line__" + node_name, None) if handler is not None: @@ -433,7 +446,8 @@ class AstArcAnalyzer(object): # TODO: nested function definitions def process_break_exits(self, exits): - for block in self.blocks(): + """Add arcs due to jumps from `exits` being breaks.""" + for block in self.nearest_blocks(): if isinstance(block, LoopBlock): block.break_exits.update(exits) break @@ -442,7 +456,8 @@ class AstArcAnalyzer(object): break def process_continue_exits(self, exits): - for block in self.blocks(): + """Add arcs due to jumps from `exits` being continues.""" + for block in self.nearest_blocks(): if isinstance(block, LoopBlock): for xit in exits: self.arcs.add((xit, block.start)) @@ -452,7 +467,8 @@ class AstArcAnalyzer(object): break def process_raise_exits(self, exits): - for block in self.blocks(): + """Add arcs due to jumps from `exits` being raises.""" + for block in self.nearest_blocks(): if isinstance(block, TryBlock): if block.handler_start: for xit in exits: @@ -467,7 +483,8 @@ class AstArcAnalyzer(object): break def process_return_exits(self, exits): - for block in self.blocks(): + """Add arcs due to jumps from `exits` being returns.""" + for block in self.nearest_blocks(): if isinstance(block, TryBlock) and block.final_start: block.return_from.update(exits) break @@ -582,9 +599,9 @@ class AstArcAnalyzer(object): # "except:" doesn't jump to subsequent handlers, or # "finally:". last_handler_start = None - # TODO: should we break here? Handlers after "except:" - # won't be run. Should coverage know that code can't be - # run, or should it flag it as not run? + # Note that once we have `except:`, no further handlers + # will ever be run. We'll keep collecting them, and the + # code will get marked as not run. if node.orelse: exits = self.add_body_arcs(node.orelse, prev_lines=exits) @@ -804,7 +821,8 @@ class ByteParser(object): SKIP_DUMP_FIELDS = ["ctx"] -def is_simple_value(value): +def _is_simple_value(value): + """Is `value` simple enough to be displayed on a single line?""" return ( value in [None, [], (), {}, set()] or isinstance(value, (string_class, int, float)) @@ -812,6 +830,11 @@ def is_simple_value(value): # TODO: a test of ast_dump? def ast_dump(node, depth=0): + """Dump the AST for `node`. + + This recursively walks the AST, printing a readable version. + + """ indent = " " * depth if not isinstance(node, ast.AST): print("{0}<{1} {2!r}>".format(indent, node.__class__.__name__, node)) @@ -831,7 +854,7 @@ def ast_dump(node, depth=0): ] if not named_fields: print("{0}>".format(head)) - elif len(named_fields) == 1 and is_simple_value(named_fields[0][1]): + elif len(named_fields) == 1 and _is_simple_value(named_fields[0][1]): field_name, value = named_fields[0] print("{0} {1}: {2!r}>".format(head, field_name, value)) else: @@ -843,7 +866,7 @@ def ast_dump(node, depth=0): next_indent = indent + " " for field_name, value in named_fields: prefix = "{0}{1}:".format(next_indent, field_name) - if is_simple_value(value): + if _is_simple_value(value): print("{0} {1!r}".format(prefix, value)) elif isinstance(value, list): print("{0} [".format(prefix)) -- cgit v1.2.1 From da188e40a5ec0270e5626387f7a635cdbd5c498b Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 9 Jan 2016 09:50:33 -0500 Subject: Use all statements instead of specialized class and funcs sets in ast analysis --- coverage/parser.py | 30 ++++++++++++------------------ 1 file changed, 12 insertions(+), 18 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 6e641c4..4f15743 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -68,7 +68,6 @@ class PythonParser(object): # The line numbers of class and function definitions. self.raw_classdefs = set() - self.raw_funcdefs = set() # The line numbers of docstring lines. self.raw_docstrings = set() @@ -146,8 +145,6 @@ class PythonParser(object): # we need to exclude them. The simplest way is to note the # lines with the 'class' keyword. self.raw_classdefs.add(slineno) - elif ttext == 'def': - self.raw_funcdefs.add(slineno) elif toktype == token.OP: if ttext == ':': should_exclude = (elineno in self.raw_excluded) or excluding_decorators @@ -260,7 +257,7 @@ class PythonParser(object): """ if self._all_arcs is None: - aaa = AstArcAnalyzer(self.text, self.raw_funcdefs, self.raw_classdefs) + aaa = AstArcAnalyzer(self.text, self.raw_statements) arcs = aaa.collect_arcs() self._all_arcs = set() @@ -330,11 +327,10 @@ class TryBlock(object): class AstArcAnalyzer(object): """Analyze source text with an AST to find executable code paths.""" - @contract(text='unicode', funcdefs=set, classdefs=set) - def __init__(self, text, funcdefs, classdefs): + @contract(text='unicode', statements=set) + def __init__(self, text, statements): self.root_node = ast.parse(neuter_encoding_declaration(text)) - self.funcdefs = funcdefs - self.classdefs = classdefs + self.statements = statements if int(os.environ.get("COVERAGE_ASTDUMP", 0)): # pragma: debugging # Dump the AST so that failing tests have helpful output. @@ -500,10 +496,8 @@ class AstArcAnalyzer(object): self.process_break_exits([here]) return set() - def _handle__ClassDef(self, node): - return self.process_decorated(node, self.classdefs) - - def process_decorated(self, node, defs): + def _handle_decorated(self, node): + """Add arcs for things that can be decorated (classes and functions).""" last = self.line_for_node(node) if node.decorator_list: for dec_node in node.decorator_list: @@ -512,15 +506,17 @@ class AstArcAnalyzer(object): self.arcs.add((last, dec_start)) last = dec_start # The definition line may have been missed, but we should have it in - # `defs`. + # `self.statements`. body_start = self.line_for_node(node.body[0]) for lineno in range(last+1, body_start): - if lineno in defs: + if lineno in self.statements: self.arcs.add((last, lineno)) last = lineno # the body is handled in add_arcs_for_code_objects. return set([last]) + _handle__ClassDef = _handle_decorated + def _handle__Continue(self, node): here = self.line_for_node(node) self.process_continue_exits([here]) @@ -544,10 +540,8 @@ class AstArcAnalyzer(object): _handle__AsyncFor = _handle__For - def _handle__FunctionDef(self, node): - return self.process_decorated(node, self.funcdefs) - - _handle__AsyncFunctionDef = _handle__FunctionDef + _handle__FunctionDef = _handle_decorated + _handle__AsyncFunctionDef = _handle_decorated def _handle__If(self, node): start = self.line_for_node(node.test) -- cgit v1.2.1 From 12f4082afee3900e0518088997b9bff5f93db53c Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 9 Jan 2016 17:34:32 -0500 Subject: Properly skip lines that are optimized away --- coverage/parser.py | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 4f15743..501b76c 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -75,7 +75,8 @@ class PythonParser(object): # Internal detail, used by lab/parser.py. self.show_tokens = False - # A dict mapping line numbers to (lo,hi) for multi-line statements. + # A dict mapping line numbers to lexical statement starts for + # multi-line statements. self._multiline = {} # Lazily-created ByteParser and arc data. @@ -200,11 +201,7 @@ class PythonParser(object): def first_line(self, line): """Return the first line number of the statement including `line`.""" - first_line = self._multiline.get(line) - if first_line: - return first_line - else: - return line + return self._multiline.get(line, line) def first_lines(self, lines): """Map the line numbers in `lines` to the correct first line of the @@ -257,7 +254,7 @@ class PythonParser(object): """ if self._all_arcs is None: - aaa = AstArcAnalyzer(self.text, self.raw_statements) + aaa = AstArcAnalyzer(self.text, self.raw_statements, self._multiline) arcs = aaa.collect_arcs() self._all_arcs = set() @@ -328,12 +325,15 @@ class AstArcAnalyzer(object): """Analyze source text with an AST to find executable code paths.""" @contract(text='unicode', statements=set) - def __init__(self, text, statements): + def __init__(self, text, statements, multiline): self.root_node = ast.parse(neuter_encoding_declaration(text)) - self.statements = statements + # TODO: I think this is happening in too many places. + self.statements = set(multiline.get(l, l) for l in statements) + self.multiline = multiline if int(os.environ.get("COVERAGE_ASTDUMP", 0)): # pragma: debugging # Dump the AST so that failing tests have helpful output. + print(self.statements) ast_dump(self.root_node) self.arcs = None @@ -419,6 +419,9 @@ class AstArcAnalyzer(object): prev_lines = set([from_line]) for body_node in body: lineno = self.line_for_node(body_node) + first_line = self.multiline.get(lineno, lineno) + if first_line not in self.statements: + continue for prev_lineno in prev_lines: self.arcs.add((prev_lineno, lineno)) prev_lines = self.add_arcs(body_node) -- cgit v1.2.1 From 54779fbe4d54a13808f8fee846e6eb626cf40ea2 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sun, 10 Jan 2016 12:37:36 -0500 Subject: Make multiprocessing support work with spawned processes, which is what Windows uses. --- coverage/monkey.py | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/monkey.py b/coverage/monkey.py index b896dbf..3f78d7d 100644 --- a/coverage/monkey.py +++ b/coverage/monkey.py @@ -12,7 +12,6 @@ import sys PATCHED_MARKER = "_coverage$patched" if sys.version_info >= (3, 4): - klass = multiprocessing.process.BaseProcess else: klass = multiprocessing.Process @@ -49,4 +48,33 @@ def patch_multiprocessing(): else: multiprocessing.Process = ProcessWithCoverage + # When spawning processes rather than forking them, we have no state in the + # new process. We sneak in there with a Stowaway: we stuff one of our own + # objects into the data that gets pickled and sent to the sub-process. When + # the Stowaway is unpickled, it's __setstate__ method is called, which + # re-applies the monkey-patch. + # Windows only spawns, so this is needed to keep Windows working. + try: + from multiprocessing import spawn + original_get_preparation_data = spawn.get_preparation_data + except (ImportError, AttributeError): + pass + else: + def get_preparation_data_with_stowaway(name): + """Get the original preparation data, and also insert our stowaway.""" + d = original_get_preparation_data(name) + d['stowaway'] = Stowaway() + return d + + spawn.get_preparation_data = get_preparation_data_with_stowaway + setattr(multiprocessing, PATCHED_MARKER, True) + + +class Stowaway(object): + """An object to pickle, so when it is unpickled, it can apply the monkey-patch.""" + def __getstate__(self): + return {} + + def __setstate__(self, state): + patch_multiprocessing() -- cgit v1.2.1 From 553925a3412b70da85b68cd50729411da5dba73f Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sun, 10 Jan 2016 12:52:25 -0500 Subject: A little clean up of the monkey-patch --- coverage/monkey.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) (limited to 'coverage') diff --git a/coverage/monkey.py b/coverage/monkey.py index 3f78d7d..e7517bf 100644 --- a/coverage/monkey.py +++ b/coverage/monkey.py @@ -33,6 +33,15 @@ class ProcessWithCoverage(klass): cov.save() +class Stowaway(object): + """An object to pickle, so when it is unpickled, it can apply the monkey-patch.""" + def __getstate__(self): + return {} + + def __setstate__(self, state_unused): + patch_multiprocessing() + + def patch_multiprocessing(): """Monkey-patch the multiprocessing module. @@ -55,7 +64,7 @@ def patch_multiprocessing(): # re-applies the monkey-patch. # Windows only spawns, so this is needed to keep Windows working. try: - from multiprocessing import spawn + from multiprocessing import spawn # pylint: disable=no-name-in-module original_get_preparation_data = spawn.get_preparation_data except (ImportError, AttributeError): pass @@ -69,12 +78,3 @@ def patch_multiprocessing(): spawn.get_preparation_data = get_preparation_data_with_stowaway setattr(multiprocessing, PATCHED_MARKER, True) - - -class Stowaway(object): - """An object to pickle, so when it is unpickled, it can apply the monkey-patch.""" - def __getstate__(self): - return {} - - def __setstate__(self, state): - patch_multiprocessing() -- cgit v1.2.1 From 990a0cc965a20cc0c424f0b61dcf9c082c88a35b Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sun, 10 Jan 2016 13:01:57 -0500 Subject: Prep for 4.1b1 --- coverage/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/version.py b/coverage/version.py index dc4c57c..1907e1b 100644 --- a/coverage/version.py +++ b/coverage/version.py @@ -5,7 +5,7 @@ # This file is exec'ed in setup.py, don't import anything! # Same semantics as sys.version_info. -version_info = (4, 1, 0, 'alpha', 0) +version_info = (4, 1, 0, 'beta', 1) def _make_version(major, minor, micro, releaselevel, serial): -- cgit v1.2.1 From 55ab897f5d822920b3086be4677bd989a3f8b051 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sun, 10 Jan 2016 15:33:01 -0500 Subject: Class docstrings are executable. --- coverage/parser.py | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 501b76c..307b83e 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -125,6 +125,7 @@ class PythonParser(object): excluding = False excluding_decorators = False prev_toktype = token.INDENT + last_name = None first_line = None empty = True first_on_line = True @@ -146,6 +147,7 @@ class PythonParser(object): # we need to exclude them. The simplest way is to note the # lines with the 'class' keyword. self.raw_classdefs.add(slineno) + last_name = ttext elif toktype == token.OP: if ttext == ':': should_exclude = (elineno in self.raw_excluded) or excluding_decorators @@ -168,7 +170,8 @@ class PythonParser(object): # (a trick from trace.py in the stdlib.) This works for # 99.9999% of cases. For the rest (!) see: # http://stackoverflow.com/questions/1769332/x/1769794#1769794 - self.raw_docstrings.update(range(slineno, elineno+1)) + if last_name == 'def': + self.raw_docstrings.update(range(slineno, elineno+1)) elif toktype == token.NEWLINE: if first_line is not None and elineno != first_line: # We're at the end of a line, and we've ended on a @@ -334,6 +337,7 @@ class AstArcAnalyzer(object): if int(os.environ.get("COVERAGE_ASTDUMP", 0)): # pragma: debugging # Dump the AST so that failing tests have helpful output. print(self.statements) + print(self.multiline) ast_dump(self.root_node) self.arcs = None @@ -508,13 +512,13 @@ class AstArcAnalyzer(object): if dec_start != last: self.arcs.add((last, dec_start)) last = dec_start - # The definition line may have been missed, but we should have it in - # `self.statements`. - body_start = self.line_for_node(node.body[0]) - for lineno in range(last+1, body_start): - if lineno in self.statements: - self.arcs.add((last, lineno)) - last = lineno + # The definition line may have been missed, but we should have it in + # `self.statements`. + body_start = self.line_for_node(node.body[0]) + for lineno in range(last+1, body_start): + if lineno in self.statements: + self.arcs.add((last, lineno)) + last = lineno # the body is handled in add_arcs_for_code_objects. return set([last]) -- cgit v1.2.1 From c96db22f97e36a3ad99895a6e68c19752c203e9d Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sun, 10 Jan 2016 17:41:13 -0500 Subject: Properly handle break/continue/raise/return from except/else clauses --- coverage/parser.py | 55 ++++++++++++++++++++++++++++++++++-------------------- 1 file changed, 35 insertions(+), 20 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 307b83e..756ec68 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -454,7 +454,7 @@ class AstArcAnalyzer(object): if isinstance(block, LoopBlock): block.break_exits.update(exits) break - elif isinstance(block, TryBlock) and block.final_start: + elif isinstance(block, TryBlock) and block.final_start is not None: block.break_from.update(exits) break @@ -465,7 +465,7 @@ class AstArcAnalyzer(object): for xit in exits: self.arcs.add((xit, block.start)) break - elif isinstance(block, TryBlock) and block.final_start: + elif isinstance(block, TryBlock) and block.final_start is not None: block.continue_from.update(exits) break @@ -473,11 +473,11 @@ class AstArcAnalyzer(object): """Add arcs due to jumps from `exits` being raises.""" for block in self.nearest_blocks(): if isinstance(block, TryBlock): - if block.handler_start: + if block.handler_start is not None: for xit in exits: self.arcs.add((xit, block.handler_start)) break - elif block.final_start: + elif block.final_start is not None: block.raise_from.update(exits) break elif isinstance(block, FunctionBlock): @@ -488,7 +488,7 @@ class AstArcAnalyzer(object): def process_return_exits(self, exits): """Add arcs due to jumps from `exits` being returns.""" for block in self.nearest_blocks(): - if isinstance(block, TryBlock) and block.final_start: + if isinstance(block, TryBlock) and block.final_start is not None: block.return_from.update(exits) break elif isinstance(block, FunctionBlock): @@ -568,9 +568,6 @@ class AstArcAnalyzer(object): return set() def _handle__Try(self, node): - # try/finally is tricky. If there's a finally clause, then we need a - # FinallyBlock to track what flows might go through the finally instead - # of their normal flow. if node.handlers: handler_start = self.line_for_node(node.handlers[0]) else: @@ -581,13 +578,27 @@ class AstArcAnalyzer(object): else: final_start = None - self.block_stack.append(TryBlock(handler_start=handler_start, final_start=final_start)) + try_block = TryBlock(handler_start=handler_start, final_start=final_start) + self.block_stack.append(try_block) start = self.line_for_node(node) exits = self.add_body_arcs(node.body, from_line=start) - try_block = self.block_stack.pop() + # We're done with the `try` body, so this block no longer handles + # exceptions. We keep the block so the `finally` clause can pick up + # flows from the handlers and `else` clause. + if node.finalbody: + try_block.handler_start = None + if node.handlers: + # If there are `except` clauses, then raises in the try body + # will already jump to them. Start this set over for raises in + # `except` and `else`. + try_block.raise_from = set([]) + else: + self.block_stack.pop() + handler_exits = set() + last_handler_start = None if node.handlers: for handler_node in node.handlers: @@ -608,20 +619,23 @@ class AstArcAnalyzer(object): exits = self.add_body_arcs(node.orelse, prev_lines=exits) exits |= handler_exits + if node.finalbody: + self.block_stack.pop() final_from = ( # You can get to the `finally` clause from: exits | # the exits of the body or `else` clause, - try_block.break_from | # or a `break` in the body, - try_block.continue_from | # or a `continue` in the body, - try_block.return_from # or a `return` in the body. + try_block.break_from | # or a `break`, + try_block.continue_from | # or a `continue`, + try_block.raise_from | # or a `raise`, + try_block.return_from # or a `return`. ) - if node.handlers and last_handler_start is not None: - # If there was an "except X:" clause, then a "raise" in the - # body goes to the "except X:" before the "finally", but the - # "except" go to the finally. - final_from.add(last_handler_start) - else: - final_from |= try_block.raise_from + if node.handlers: + if last_handler_start is not None: + # If we had handlers, and we didn't have a bare `except:` + # handler, then the last handler jumps to the `finally` for the + # unhandled exceptions. + final_from.add(last_handler_start) + exits = self.add_body_arcs(node.finalbody, prev_lines=final_from) if try_block.break_from: self.process_break_exits(exits) @@ -631,6 +645,7 @@ class AstArcAnalyzer(object): self.process_raise_exits(exits) if try_block.return_from: self.process_return_exits(exits) + return exits def _handle__TryExcept(self, node): -- cgit v1.2.1 From 036897dab970ce281160da3493cbf6a4f7eeb2d3 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sun, 10 Jan 2016 17:49:00 -0500 Subject: Clean up the ast analyzer a little --- coverage/parser.py | 34 +++++++++++++++++++++++----------- 1 file changed, 23 insertions(+), 11 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 756ec68..4a7bab5 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -340,12 +340,21 @@ class AstArcAnalyzer(object): print(self.multiline) ast_dump(self.root_node) - self.arcs = None + self.arcs = set() self.block_stack = [] def collect_arcs(self): - self.arcs = set() - self.add_arcs_for_code_objects(self.root_node) + """Examine the AST tree from `root_node` to determine possible arcs. + + Returns a set of (from, to) line number pairs. + + """ + for node in ast.walk(self.root_node): + node_name = node.__class__.__name__ + code_object_handler = getattr(self, "_code_object__" + node_name, None) + if code_object_handler is not None: + code_object_handler(node) + return self.arcs def nearest_blocks(self): @@ -419,6 +428,15 @@ class AstArcAnalyzer(object): return set([self.line_for_node(node)]) def add_body_arcs(self, body, from_line=None, prev_lines=None): + """Add arcs for the body of a compound statement. + + `body` is the body node. `from_line` is a single line that can be the + previous line in flow before this body. `prev_lines` is a set of lines + that can be the previous line. Only one of them should be given. + + Returns a set of lines, the exits from this body. + + """ if prev_lines is None: prev_lines = set([from_line]) for body_node in body: @@ -519,7 +537,7 @@ class AstArcAnalyzer(object): if lineno in self.statements: self.arcs.add((last, lineno)) last = lineno - # the body is handled in add_arcs_for_code_objects. + # The body is handled in collect_arcs. return set([last]) _handle__ClassDef = _handle_decorated @@ -699,13 +717,6 @@ class AstArcAnalyzer(object): _handle__AsyncWith = _handle__With - def add_arcs_for_code_objects(self, root_node): - for node in ast.walk(root_node): - node_name = node.__class__.__name__ - code_object_handler = getattr(self, "_code_object__" + node_name, None) - if code_object_handler is not None: - code_object_handler(node) - def _code_object__Module(self, node): start = self.line_for_node(node) if node.body: @@ -735,6 +746,7 @@ class AstArcAnalyzer(object): self.arcs.add((xit, -start)) def do_code_object_comprehension(self, node): + """The common code for all comprehension nodes.""" start = self.line_for_node(node) self.arcs.add((-1, start)) self.arcs.add((start, -start)) -- cgit v1.2.1 From 81ac8aa0734621e2abc9ff605c56c2095d7ae905 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Mon, 11 Jan 2016 05:50:32 -0500 Subject: Bump to 4.1b2 --- coverage/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/version.py b/coverage/version.py index 1907e1b..2ab668a 100644 --- a/coverage/version.py +++ b/coverage/version.py @@ -5,7 +5,7 @@ # This file is exec'ed in setup.py, don't import anything! # Same semantics as sys.version_info. -version_info = (4, 1, 0, 'beta', 1) +version_info = (4, 1, 0, 'beta', 2) def _make_version(major, minor, micro, releaselevel, serial): -- cgit v1.2.1 From 1a5d0393d3b5b6a9a9f5ecebd96325540f145a4b Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Mon, 11 Jan 2016 06:23:37 -0500 Subject: Class docstrings shouldn't be considered executable. Not sure what happened there... --- coverage/parser.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 4a7bab5..f0bfe61 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -125,7 +125,6 @@ class PythonParser(object): excluding = False excluding_decorators = False prev_toktype = token.INDENT - last_name = None first_line = None empty = True first_on_line = True @@ -147,7 +146,6 @@ class PythonParser(object): # we need to exclude them. The simplest way is to note the # lines with the 'class' keyword. self.raw_classdefs.add(slineno) - last_name = ttext elif toktype == token.OP: if ttext == ':': should_exclude = (elineno in self.raw_excluded) or excluding_decorators @@ -170,8 +168,7 @@ class PythonParser(object): # (a trick from trace.py in the stdlib.) This works for # 99.9999% of cases. For the rest (!) see: # http://stackoverflow.com/questions/1769332/x/1769794#1769794 - if last_name == 'def': - self.raw_docstrings.update(range(slineno, elineno+1)) + self.raw_docstrings.update(range(slineno, elineno+1)) elif toktype == token.NEWLINE: if first_line is not None and elineno != first_line: # We're at the end of a line, and we've ended on a -- cgit v1.2.1 From 233b6368556bd318396b534a7d98e781f12cf0a2 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 16 Jan 2016 11:23:57 -0500 Subject: Stop treating yield-from and await as function returns --- coverage/parser.py | 17 ----------------- 1 file changed, 17 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index f0bfe61..07cb75d 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -404,15 +404,6 @@ class AstArcAnalyzer(object): Return a set of line numbers, exits from this node to the next. """ - # Yield-froms and awaits can appear anywhere. - # TODO: this is probably over-doing it, and too expensive. Can we - # instrument the ast walking to see how many nodes we are revisiting? - if isinstance(node, ast.stmt): - for _, value in ast.iter_fields(node): - if isinstance(value, ast.expr) and self.contains_return_expression(value): - self.process_return_exits([self.line_for_node(node)]) - break - node_name = node.__class__.__name__ handler = getattr(self, "_handle__" + node_name, None) if handler is not None: @@ -759,14 +750,6 @@ class AstArcAnalyzer(object): self.arcs.add((-1, start)) self.arcs.add((start, -start)) - def contains_return_expression(self, node): - """Is there a yield-from or await in `node` someplace?""" - for child in ast.walk(node): - if child.__class__.__name__ in ["YieldFrom", "Await"]: - return True - - return False - class ByteParser(object): """Parse bytecode to understand the structure of code.""" -- cgit v1.2.1 From e27ab798b8efb4067ed0bafd75627ae724dcfad5 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sun, 17 Jan 2016 12:51:09 -0500 Subject: Plain-text output no longer includes trailing spaces --- coverage/summary.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) (limited to 'coverage') diff --git a/coverage/summary.py b/coverage/summary.py index 5ddbb38..94c4458 100644 --- a/coverage/summary.py +++ b/coverage/summary.py @@ -30,8 +30,8 @@ class SummaryReporter(Reporter): # Prepare the formatting strings max_name = max([len(fr.relative_filename()) for fr in self.file_reporters] + [5]) fmt_name = u"%%- %ds " % max_name - fmt_err = u"%s %s: %s\n" - fmt_skip_covered = u"\n%s file%s skipped due to complete coverage.\n" + fmt_err = u"%s %s: %s" + fmt_skip_covered = u"\n%s file%s skipped due to complete coverage." header = (fmt_name % "Name") + u" Stmts Miss" fmt_coverage = fmt_name + u"%6d %6d" @@ -44,17 +44,16 @@ class SummaryReporter(Reporter): if self.config.show_missing: header += u" Missing" fmt_coverage += u" %s" - rule = u"-" * len(header) + u"\n" - header += u"\n" - fmt_coverage += u"\n" + rule = u"-" * len(header) if outfile is None: outfile = sys.stdout - if env.PY2: - writeout = lambda u: outfile.write(u.encode(output_encoding())) - else: - writeout = outfile.write + def writeout(line): + if env.PY2: + line = line.encode(output_encoding()) + outfile.write(line.rstrip()) + outfile.write("\n") # Write the header writeout(header) -- cgit v1.2.1 From b0bf6360206d0789ebb4f8110ac5c7133bfdc22b Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Mon, 18 Jan 2016 18:54:41 -0500 Subject: Add some control over how many stack frames are shown --- coverage/debug.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) (limited to 'coverage') diff --git a/coverage/debug.py b/coverage/debug.py index 4076b9b..719e97b 100644 --- a/coverage/debug.py +++ b/coverage/debug.py @@ -42,7 +42,7 @@ class DebugControl(object): msg = "pid %5d: %s" % (os.getpid(), msg) self.output.write(msg+"\n") if self.should('callers'): - dump_stack_frames(self.output) + dump_stack_frames(out=self.output) self.output.flush() def write_formatted_info(self, header, info): @@ -80,7 +80,7 @@ def info_formatter(info): yield "%*s: %s" % (label_len, label, data) -def short_stack(): # pragma: debugging +def short_stack(limit=None): # pragma: debugging """Return a string summarizing the call stack. The string is multi-line, with one line per stack frame. Each line shows @@ -92,13 +92,15 @@ def short_stack(): # pragma: debugging import_local_file : /Users/ned/coverage/trunk/coverage/backward.py @159 ... + `limit` is the number of frames to include, defaulting to all of them. + """ - stack = inspect.stack()[:0:-1] + stack = inspect.stack()[limit:0:-1] return "\n".join("%30s : %s @%d" % (t[3], t[1], t[2]) for t in stack) -def dump_stack_frames(out=None): # pragma: debugging +def dump_stack_frames(limit=None, out=None): # pragma: debugging """Print a summary of the stack to stdout, or some place else.""" out = out or sys.stdout - out.write(short_stack()) + out.write(short_stack(limit=limit)) out.write("\n") -- cgit v1.2.1 From c67e5b03f367654f3cb9c130a87dbe1df3e618af Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Mon, 18 Jan 2016 18:59:03 -0500 Subject: Add a missing docstring. --- coverage/summary.py | 1 + 1 file changed, 1 insertion(+) (limited to 'coverage') diff --git a/coverage/summary.py b/coverage/summary.py index 94c4458..9227055 100644 --- a/coverage/summary.py +++ b/coverage/summary.py @@ -50,6 +50,7 @@ class SummaryReporter(Reporter): outfile = sys.stdout def writeout(line): + """Write a line to the output, adding a newline.""" if env.PY2: line = line.encode(output_encoding()) outfile.write(line.rstrip()) -- cgit v1.2.1 From 95c9930f5c936571f32b873c6a9110204a2404ec Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Mon, 18 Jan 2016 19:24:50 -0500 Subject: Fix #466: multi-line statements first in decorated functions Also, leave in the SetSpy tracer we've used before to find things like this. --- coverage/parser.py | 32 +++++++++++++++++++++++++------- 1 file changed, 25 insertions(+), 7 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 07cb75d..2d0bceb 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -321,6 +321,19 @@ class TryBlock(object): self.raise_from = set() +class SetSpy(object): # pragma: debugging + """A set proxy that shows who is adding things to it.""" + def __init__(self, the_set): + self.the_set = the_set + + def add(self, arc): + """set.add, but with a stack trace.""" + from coverage.debug import short_stack + print("\nAdding arc: {}".format(arc)) + print(short_stack(limit=6)) + self.the_set.add(arc) + + class AstArcAnalyzer(object): """Analyze source text with an AST to find executable code paths.""" @@ -333,11 +346,13 @@ class AstArcAnalyzer(object): if int(os.environ.get("COVERAGE_ASTDUMP", 0)): # pragma: debugging # Dump the AST so that failing tests have helpful output. - print(self.statements) - print(self.multiline) + print("Statements: {}".format(self.statements)) + print("Multiline map: {}".format(self.multiline)) ast_dump(self.root_node) - self.arcs = set() + self.arcs = self.arcs_to_return = set() + if int(os.environ.get("COVERAGE_TRACK_ARCS", 0)): # pragma: debugging + self.arcs = SetSpy(self.arcs) self.block_stack = [] def collect_arcs(self): @@ -352,7 +367,7 @@ class AstArcAnalyzer(object): if code_object_handler is not None: code_object_handler(node) - return self.arcs + return self.arcs_to_return def nearest_blocks(self): """Yield the blocks in nearest-to-farthest order.""" @@ -517,10 +532,13 @@ class AstArcAnalyzer(object): dec_start = self.line_for_node(dec_node) if dec_start != last: self.arcs.add((last, dec_start)) - last = dec_start - # The definition line may have been missed, but we should have it in - # `self.statements`. + last = dec_start + # The definition line may have been missed, but we should have it + # in `self.statements`. For some constructs, `line_for_node` is + # not what we'd think of as the first line in the statement, so map + # it to the first one. body_start = self.line_for_node(node.body[0]) + body_start = self.multiline.get(body_start, body_start) for lineno in range(last+1, body_start): if lineno in self.statements: self.arcs.add((last, lineno)) -- cgit v1.2.1 From d1ae46f93594f91cba299644c25852a3230d6e26 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 23 Jan 2016 14:25:17 -0500 Subject: Do a better job with sources in the XML report --- coverage/xmlreport.py | 28 ++++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) (limited to 'coverage') diff --git a/coverage/xmlreport.py b/coverage/xmlreport.py index 50a4684..1408bec 100644 --- a/coverage/xmlreport.py +++ b/coverage/xmlreport.py @@ -11,6 +11,7 @@ import xml.dom.minidom from coverage import env from coverage import __url__, __version__, files +from coverage.backward import iitems from coverage.misc import isolate_module from coverage.report import Reporter @@ -91,15 +92,14 @@ class XmlReporter(Reporter): xcoverage.appendChild(xpackages) # Populate the XML DOM with the package info. - for pkg_name in sorted(self.packages.keys()): - pkg_data = self.packages[pkg_name] + for pkg_name, pkg_data in sorted(iitems(self.packages)): class_elts, lhits, lnum, bhits, bnum = pkg_data xpackage = self.xml_out.createElement("package") xpackages.appendChild(xpackage) xclasses = self.xml_out.createElement("classes") xpackage.appendChild(xclasses) - for class_name in sorted(class_elts.keys()): - xclasses.appendChild(class_elts[class_name]) + for class_name, class_elt in sorted(iitems(class_elts)): + xclasses.appendChild(class_elt) xpackage.setAttribute("name", pkg_name.replace(os.sep, '.')) xpackage.setAttribute("line-rate", rate(lhits, lnum)) if self.has_arcs: @@ -140,13 +140,17 @@ class XmlReporter(Reporter): # Create the 'lines' and 'package' XML elements, which # are populated later. Note that a package == a directory. - filename = fr.relative_filename() - filename = filename.replace("\\", "/") - dirname = os.path.dirname(filename) or "." - parts = dirname.split("/") - dirname = "/".join(parts[:self.config.xml_package_depth]) + filename = fr.filename.replace("\\", "/") + for source_path in self.source_paths: + if filename.startswith(source_path+"/"): + rel_name = fr.filename[len(source_path)+1:] + break + else: + rel_name = fr.relative_filename() + + dirname = os.path.dirname(rel_name) or "." + dirname = "/".join(dirname.split("/")[:self.config.xml_package_depth]) package_name = dirname.replace("/", ".") - rel_name = fr.relative_filename() if rel_name != fr.filename: self.source_paths.add(fr.filename[:-len(rel_name)].rstrip(r"\/")) @@ -159,8 +163,8 @@ class XmlReporter(Reporter): xlines = self.xml_out.createElement("lines") xclass.appendChild(xlines) - xclass.setAttribute("name", os.path.relpath(filename, dirname)) - xclass.setAttribute("filename", filename) + xclass.setAttribute("name", os.path.relpath(rel_name, dirname)) + xclass.setAttribute("filename", fr.relative_filename()) xclass.setAttribute("complexity", "0") branch_stats = analysis.branch_stats() -- cgit v1.2.1 From 493d333685c552bc5e67ebb870195298fec4a29e Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 23 Jan 2016 15:15:53 -0500 Subject: Windows fixes --- coverage/xmlreport.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) (limited to 'coverage') diff --git a/coverage/xmlreport.py b/coverage/xmlreport.py index 1408bec..d27c07b 100644 --- a/coverage/xmlreport.py +++ b/coverage/xmlreport.py @@ -142,8 +142,8 @@ class XmlReporter(Reporter): # are populated later. Note that a package == a directory. filename = fr.filename.replace("\\", "/") for source_path in self.source_paths: - if filename.startswith(source_path+"/"): - rel_name = fr.filename[len(source_path)+1:] + if filename.startswith(source_path.replace("\\", "/") + "/"): + rel_name = filename[len(source_path)+1:] break else: rel_name = fr.relative_filename() @@ -164,7 +164,7 @@ class XmlReporter(Reporter): xclass.appendChild(xlines) xclass.setAttribute("name", os.path.relpath(rel_name, dirname)) - xclass.setAttribute("filename", fr.relative_filename()) + xclass.setAttribute("filename", fr.relative_filename().replace("\\", "/")) xclass.setAttribute("complexity", "0") branch_stats = analysis.branch_stats() -- cgit v1.2.1 From 92e5ff9298cebaae6637455810b4d97b652e8448 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 23 Jan 2016 15:30:15 -0500 Subject: Remove an unused variable --- coverage/xmlreport.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/xmlreport.py b/coverage/xmlreport.py index 1408bec..460c531 100644 --- a/coverage/xmlreport.py +++ b/coverage/xmlreport.py @@ -98,7 +98,7 @@ class XmlReporter(Reporter): xpackages.appendChild(xpackage) xclasses = self.xml_out.createElement("classes") xpackage.appendChild(xclasses) - for class_name, class_elt in sorted(iitems(class_elts)): + for _, class_elt in sorted(iitems(class_elts)): xclasses.appendChild(class_elt) xpackage.setAttribute("name", pkg_name.replace(os.sep, '.')) xpackage.setAttribute("line-rate", rate(lhits, lnum)) -- cgit v1.2.1 From 2c7f6b2141c6d4eaa83bdb87b500ffd982bdea40 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 23 Jan 2016 19:31:21 -0500 Subject: Bump to 4.1c1 --- coverage/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/version.py b/coverage/version.py index 2ab668a..fd75e16 100644 --- a/coverage/version.py +++ b/coverage/version.py @@ -5,7 +5,7 @@ # This file is exec'ed in setup.py, don't import anything! # Same semantics as sys.version_info. -version_info = (4, 1, 0, 'beta', 2) +version_info = (4, 1, 0, 'candidate', 1) def _make_version(major, minor, micro, releaselevel, serial): -- cgit v1.2.1 From 13fa2276af4619c51aa9c538b2859612ec197eb4 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sun, 24 Jan 2016 10:04:58 -0500 Subject: Let's call it beta 3 until we learn more --- coverage/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'coverage') diff --git a/coverage/version.py b/coverage/version.py index fd75e16..e0302dc 100644 --- a/coverage/version.py +++ b/coverage/version.py @@ -5,7 +5,7 @@ # This file is exec'ed in setup.py, don't import anything! # Same semantics as sys.version_info. -version_info = (4, 1, 0, 'candidate', 1) +version_info = (4, 1, 0, 'beta', 3) def _make_version(major, minor, micro, releaselevel, serial): -- cgit v1.2.1 From 09184a99a25fa8fa267a2ac1320abb71fcad5079 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Wed, 27 Jan 2016 06:09:20 -0500 Subject: Simplify a condition --- coverage/parser.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 2d0bceb..1a2ff5f 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -653,12 +653,11 @@ class AstArcAnalyzer(object): try_block.raise_from | # or a `raise`, try_block.return_from # or a `return`. ) - if node.handlers: - if last_handler_start is not None: - # If we had handlers, and we didn't have a bare `except:` - # handler, then the last handler jumps to the `finally` for the - # unhandled exceptions. - final_from.add(last_handler_start) + if last_handler_start is not None: + # If we had handlers, and we didn't have a bare `except:` + # handler, then the last handler jumps to the `finally` for the + # unhandled exceptions. + final_from.add(last_handler_start) exits = self.add_body_arcs(node.finalbody, prev_lines=final_from) if try_block.break_from: -- cgit v1.2.1 From 87453c9c87650976092f3172c370b5aba589159c Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Fri, 29 Jan 2016 10:41:37 -0500 Subject: How to flag uncaught exception branches --- coverage/parser.py | 5 +++++ 1 file changed, 5 insertions(+) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 1a2ff5f..27c1743 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -668,6 +668,11 @@ class AstArcAnalyzer(object): self.process_raise_exits(exits) if try_block.return_from: self.process_return_exits(exits) + else: + # No final body: if there is an `except` handler without a + # catch-all, then exceptions can raise from there. + if last_handler_start is not None: + self.process_raise_exits([last_handler_start]) return exits -- cgit v1.2.1 From 52e2fceb439de9867a4391065558adb27727f5da Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Fri, 29 Jan 2016 11:26:18 -0500 Subject: Don't track unhandled exception branches --- coverage/parser.py | 17 ----------------- 1 file changed, 17 deletions(-) (limited to 'coverage') diff --git a/coverage/parser.py b/coverage/parser.py index 27c1743..17f1f0d 100644 --- a/coverage/parser.py +++ b/coverage/parser.py @@ -631,13 +631,6 @@ class AstArcAnalyzer(object): self.arcs.add((last_handler_start, handler_start)) last_handler_start = handler_start handler_exits |= self.add_body_arcs(handler_node.body, from_line=handler_start) - if handler_node.type is None: - # "except:" doesn't jump to subsequent handlers, or - # "finally:". - last_handler_start = None - # Note that once we have `except:`, no further handlers - # will ever be run. We'll keep collecting them, and the - # code will get marked as not run. if node.orelse: exits = self.add_body_arcs(node.orelse, prev_lines=exits) @@ -653,11 +646,6 @@ class AstArcAnalyzer(object): try_block.raise_from | # or a `raise`, try_block.return_from # or a `return`. ) - if last_handler_start is not None: - # If we had handlers, and we didn't have a bare `except:` - # handler, then the last handler jumps to the `finally` for the - # unhandled exceptions. - final_from.add(last_handler_start) exits = self.add_body_arcs(node.finalbody, prev_lines=final_from) if try_block.break_from: @@ -668,11 +656,6 @@ class AstArcAnalyzer(object): self.process_raise_exits(exits) if try_block.return_from: self.process_return_exits(exits) - else: - # No final body: if there is an `except` handler without a - # catch-all, then exceptions can raise from there. - if last_handler_start is not None: - self.process_raise_exits([last_handler_start]) return exits -- cgit v1.2.1 From 903d797950b0d9c8a96bab36e246121c6143b675 Mon Sep 17 00:00:00 2001 From: Ned Batchelder Date: Sat, 30 Jan 2016 10:33:41 -0500 Subject: Clean up in the C code --- coverage/ctracer/tracer.c | 3 +-- coverage/ctracer/util.h | 46 ++++++++++++++++++++++------------------------ 2 files changed, 23 insertions(+), 26 deletions(-) (limited to 'coverage') diff --git a/coverage/ctracer/tracer.c b/coverage/ctracer/tracer.c index 25036f9..681c9a9 100644 --- a/coverage/ctracer/tracer.c +++ b/coverage/ctracer/tracer.c @@ -842,7 +842,7 @@ cleanup: * PyEval_SetTrace. So sys.gettrace() will return our self parameter, which * means it must be callable to be used in sys.settrace(). * - * So we make our self callable, equivalent to invoking our trace function. + * So we make ourself callable, equivalent to invoking our trace function. * * To help with the process of replaying stored frames, this function has an * optional keyword argument: @@ -946,7 +946,6 @@ CTracer_start(CTracer *self, PyObject *args_unused) PyEval_SetTrace((Py_tracefunc)CTracer_trace, (PyObject*)self); self->started = 1; self->tracing_arcs = self->trace_arcs && PyObject_IsTrue(self->trace_arcs); - self->cur_entry.last_line = -1; /* start() returns a trace function usable with sys.settrace() */ Py_INCREF(self); diff --git a/coverage/ctracer/util.h b/coverage/ctracer/util.h index bb3ad5a..1f53619 100644 --- a/coverage/ctracer/util.h +++ b/coverage/ctracer/util.h @@ -15,33 +15,31 @@ #if PY_MAJOR_VERSION >= 3 -#define MyText_Type PyUnicode_Type -#define MyText_AS_BYTES(o) PyUnicode_AsASCIIString(o) -#define MyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) -#define MyBytes_AS_STRING(o) PyBytes_AS_STRING(o) -#define MyText_AsString(o) PyUnicode_AsUTF8(o) -#define MyText_FromFormat PyUnicode_FromFormat -#define MyInt_FromInt(i) PyLong_FromLong((long)i) -#define MyInt_AsInt(o) (int)PyLong_AsLong(o) -#define MyText_InternFromString(s) \ - PyUnicode_InternFromString(s) - -#define MyType_HEAD_INIT PyVarObject_HEAD_INIT(NULL, 0) +#define MyText_Type PyUnicode_Type +#define MyText_AS_BYTES(o) PyUnicode_AsASCIIString(o) +#define MyBytes_GET_SIZE(o) PyBytes_GET_SIZE(o) +#define MyBytes_AS_STRING(o) PyBytes_AS_STRING(o) +#define MyText_AsString(o) PyUnicode_AsUTF8(o) +#define MyText_FromFormat PyUnicode_FromFormat +#define MyInt_FromInt(i) PyLong_FromLong((long)i) +#define MyInt_AsInt(o) (int)PyLong_AsLong(o) +#define MyText_InternFromString(s) PyUnicode_InternFromString(s) + +#define MyType_HEAD_INIT PyVarObject_HEAD_INIT(NULL, 0) #else -#define MyText_Type PyString_Type -#define MyText_AS_BYTES(o) (Py_INCREF(o), o) -#define MyBytes_GET_SIZE(o) PyString_GET_SIZE(o) -#define MyBytes_AS_STRING(o) PyString_AS_STRING(o) -#define MyText_AsString(o) PyString_AsString(o) -#define MyText_FromFormat PyUnicode_FromFormat -#define MyInt_FromInt(i) PyInt_FromLong((long)i) -#define MyInt_AsInt(o) (int)PyInt_AsLong(o) -#define MyText_InternFromString(s) \ - PyString_InternFromString(s) - -#define MyType_HEAD_INIT PyObject_HEAD_INIT(NULL) 0, +#define MyText_Type PyString_Type +#define MyText_AS_BYTES(o) (Py_INCREF(o), o) +#define MyBytes_GET_SIZE(o) PyString_GET_SIZE(o) +#define MyBytes_AS_STRING(o) PyString_AS_STRING(o) +#define MyText_AsString(o) PyString_AsString(o) +#define MyText_FromFormat PyUnicode_FromFormat +#define MyInt_FromInt(i) PyInt_FromLong((long)i) +#define MyInt_AsInt(o) (int)PyInt_AsLong(o) +#define MyText_InternFromString(s) PyString_InternFromString(s) + +#define MyType_HEAD_INIT PyObject_HEAD_INIT(NULL) 0, #endif /* Py3k */ -- cgit v1.2.1