summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavid A. Riggs <david.a.riggs@gmail.com>2017-01-28 07:03:33 -0800
committerEli Bendersky <eliben@users.noreply.github.com>2017-01-28 07:03:33 -0800
commit8e7d9555e7206b99f3802f89972cded9a0e254bb (patch)
treee93d130907b5c04054d73078c506b806c81a65e5
parent44137334bac69df72c6378fa84931006179d8bdf (diff)
downloadpycparser-8e7d9555e7206b99f3802f89972cded9a0e254bb.tar.gz
Merge upstream PLY 3.9 into pycparser tree. (#161)
* Merge upstream PLY 3.9 into pycparser tree.
-rw-r--r--pycparser/ply/LICENSE4
-rw-r--r--pycparser/ply/__init__.py2
-rw-r--r--pycparser/ply/cpp.py13
-rw-r--r--pycparser/ply/lex.py11
-rw-r--r--pycparser/ply/yacc.py73
5 files changed, 68 insertions, 35 deletions
diff --git a/pycparser/ply/LICENSE b/pycparser/ply/LICENSE
index 4efebae..21d5c35 100644
--- a/pycparser/ply/LICENSE
+++ b/pycparser/ply/LICENSE
@@ -1,6 +1,6 @@
-PLY (Python Lex-Yacc) Version 3.8
+PLY (Python Lex-Yacc) Version 3.9
-Copyright (C) 2001-2015,
+Copyright (C) 2001-2016,
David M. Beazley (Dabeaz LLC)
All rights reserved.
diff --git a/pycparser/ply/__init__.py b/pycparser/ply/__init__.py
index 2461a44..6e53cdd 100644
--- a/pycparser/ply/__init__.py
+++ b/pycparser/ply/__init__.py
@@ -1,5 +1,5 @@
# PLY package
# Author: David Beazley (dave@dabeaz.com)
-__version__ = '3.7'
+__version__ = '3.9'
__all__ = ['lex','yacc']
diff --git a/pycparser/ply/cpp.py b/pycparser/ply/cpp.py
index 2f6a030..ade2987 100644
--- a/pycparser/ply/cpp.py
+++ b/pycparser/ply/cpp.py
@@ -9,6 +9,15 @@
# -----------------------------------------------------------------------------
from __future__ import generators
+import sys
+
+# Some Python 3 compatibility shims
+if sys.version_info.major < 3:
+ STRING_TYPES = (str, unicode)
+else:
+ STRING_TYPES = str
+ xrange = range
+
# -----------------------------------------------------------------------------
# Default preprocessor lexer definitions. These tokens are enough to get
# a basic preprocessor working. Other modules may import these if they want
@@ -590,7 +599,7 @@ class Preprocessor(object):
expr = expr.replace("!"," not ")
try:
result = eval(expr)
- except StandardError:
+ except Exception:
self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression")
result = 0
return result
@@ -781,7 +790,7 @@ class Preprocessor(object):
# ----------------------------------------------------------------------
def define(self,tokens):
- if isinstance(tokens,(str,unicode)):
+ if isinstance(tokens,STRING_TYPES):
tokens = self.tokenize(tokens)
linetok = tokens
diff --git a/pycparser/ply/lex.py b/pycparser/ply/lex.py
index 3a40f62..0f3e464 100644
--- a/pycparser/ply/lex.py
+++ b/pycparser/ply/lex.py
@@ -1,7 +1,7 @@
# -----------------------------------------------------------------------------
# ply: lex.py
#
-# Copyright (C) 2001-2015,
+# Copyright (C) 2001-2016
# David M. Beazley (Dabeaz LLC)
# All rights reserved.
#
@@ -31,7 +31,7 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# -----------------------------------------------------------------------------
-__version__ = '3.8'
+__version__ = '3.9'
__tabversion__ = '3.8'
import re
@@ -179,7 +179,7 @@ class Lexer:
with open(filename, 'w') as tf:
tf.write('# %s.py. This file automatically created by PLY (version %s). Don\'t edit!\n' % (basetabmodule, __version__))
tf.write('_tabversion = %s\n' % repr(__tabversion__))
- tf.write('_lextokens = %s\n' % repr(self.lextokens))
+ tf.write('_lextokens = set(%s)\n' % repr(tuple(self.lextokens)))
tf.write('_lexreflags = %s\n' % repr(self.lexreflags))
tf.write('_lexliterals = %s\n' % repr(self.lexliterals))
tf.write('_lexstateinfo = %s\n' % repr(self.lexstateinfo))
@@ -830,7 +830,10 @@ class LexerReflect(object):
# -----------------------------------------------------------------------------
def validate_module(self, module):
- lines, linen = inspect.getsourcelines(module)
+ try:
+ lines, linen = inspect.getsourcelines(module)
+ except IOError:
+ return
fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(')
sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=')
diff --git a/pycparser/ply/yacc.py b/pycparser/ply/yacc.py
index e7f36aa..6842832 100644
--- a/pycparser/ply/yacc.py
+++ b/pycparser/ply/yacc.py
@@ -1,7 +1,7 @@
# -----------------------------------------------------------------------------
# ply: yacc.py
#
-# Copyright (C) 2001-2015,
+# Copyright (C) 2001-2016
# David M. Beazley (Dabeaz LLC)
# All rights reserved.
#
@@ -67,7 +67,7 @@ import inspect
import base64
import warnings
-__version__ = '3.8'
+__version__ = '3.9'
__tabversion__ = '3.8'
#-----------------------------------------------------------------------------
@@ -497,8 +497,9 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
del symstack[-plen:]
- del statestack[-plen:]
+ self.state = state
p.callable(pslice)
+ del statestack[-plen:]
#--! DEBUG
debug.info('Result : %s', format_result(pslice[0]))
#--! DEBUG
@@ -507,14 +508,16 @@ class LRParser:
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ symstack.extend(targ[1:-1]) # Put the production slice back on the stack
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -537,6 +540,7 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
+ self.state = state
p.callable(pslice)
#--! DEBUG
debug.info('Result : %s', format_result(pslice[0]))
@@ -546,14 +550,15 @@ class LRParser:
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -592,6 +597,7 @@ class LRParser:
if self.errorfunc:
if errtoken and not hasattr(errtoken, 'lexer'):
errtoken.lexer = lexer
+ self.state = state
tok = call_errorfunc(self.errorfunc, errtoken, self)
if self.errorok:
# User must have done some kind of panic
@@ -811,21 +817,24 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
del symstack[-plen:]
- del statestack[-plen:]
+ self.state = state
p.callable(pslice)
+ del statestack[-plen:]
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ symstack.extend(targ[1:-1]) # Put the production slice back on the stack
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -848,20 +857,22 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
+ self.state = state
p.callable(pslice)
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -892,6 +903,7 @@ class LRParser:
if self.errorfunc:
if errtoken and not hasattr(errtoken, 'lexer'):
errtoken.lexer = lexer
+ self.state = state
tok = call_errorfunc(self.errorfunc, errtoken, self)
if self.errorok:
# User must have done some kind of panic
@@ -1102,21 +1114,24 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
del symstack[-plen:]
- del statestack[-plen:]
+ self.state = state
p.callable(pslice)
+ del statestack[-plen:]
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ symstack.extend(targ[1:-1]) # Put the production slice back on the stack
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -1134,20 +1149,22 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
+ self.state = state
p.callable(pslice)
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -1178,6 +1195,7 @@ class LRParser:
if self.errorfunc:
if errtoken and not hasattr(errtoken, 'lexer'):
errtoken.lexer = lexer
+ self.state = state
tok = call_errorfunc(self.errorfunc, errtoken, self)
if self.errorok:
# User must have done some kind of panic
@@ -1982,7 +2000,7 @@ class LRTable(object):
import cPickle as pickle
except ImportError:
import pickle
-
+
if not os.path.exists(filename):
raise ImportError
@@ -2979,7 +2997,10 @@ class ParserReflect(object):
fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(')
for module in self.modules:
- lines, linen = inspect.getsourcelines(module)
+ try:
+ lines, linen = inspect.getsourcelines(module)
+ except IOError:
+ continue
counthash = {}
for linen, line in enumerate(lines):
@@ -3107,7 +3128,7 @@ class ParserReflect(object):
if not name.startswith('p_') or name == 'p_error':
continue
if isinstance(item, (types.FunctionType, types.MethodType)):
- line = item.__code__.co_firstlineno
+ line = getattr(item, 'co_firstlineno', item.__code__.co_firstlineno)
module = inspect.getmodule(item)
p_functions.append((line, module, name, item.__doc__))