summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPablo Galindo <Pablogsal@gmail.com>2019-03-01 15:34:44 -0800
committerGitHub <noreply@github.com>2019-03-01 15:34:44 -0800
commit1f24a719e7be5e49b876a5dc7daf21d01ee69faa (patch)
tree8f8f56cab78ef671a8cb7f54b8ec2495d9a435e6
parent7eebbbd5b3907447eddadf5cb7cb1cc9230d15b2 (diff)
downloadcpython-git-1f24a719e7be5e49b876a5dc7daf21d01ee69faa.tar.gz
bpo-35808: Retire pgen and use pgen2 to generate the parser (GH-11814)
Pgen is the oldest piece of technology in the CPython repository, building it requires various #if[n]def PGEN hacks in other parts of the code and it also depends more and more on CPython internals. This commit removes the old pgen C code and replaces it for a new version implemented in pure Python. This is a modified and adapted version of lib2to3/pgen2 that can generate grammar files compatibles with the current parser. This commit also eliminates all the #ifdef and code branches related to pgen, simplifying the code and making it more maintainable. The regen-grammar step now uses $(PYTHON_FOR_REGEN) that can be any version of the interpreter, so the new pgen code maintains compatibility with older versions of the interpreter (this also allows regenerating the grammar with the current CI solution that uses Python3.5). The new pgen Python module also makes use of the Grammar/Tokens file that holds the token specification, so is always kept in sync and avoids having to maintain duplicate token definitions.
-rw-r--r--.gitignore2
-rw-r--r--Include/metagrammar.h18
-rw-r--r--Include/parsetok.h3
-rw-r--r--Include/pgen.h18
-rw-r--r--Makefile.pre.in42
-rw-r--r--Misc/NEWS.d/next/Core and Builtins/2019-02-11-00-50-03.bpo-11814.M12CMH.rst2
-rw-r--r--Misc/coverity_model.c8
-rw-r--r--PCbuild/pythoncore.vcxproj6
-rw-r--r--PCbuild/pythoncore.vcxproj.filters18
-rw-r--r--Parser/bitset.c66
-rw-r--r--Parser/firstsets.c113
-rw-r--r--Parser/grammar.c273
-rw-r--r--Parser/metagrammar.c159
-rw-r--r--Parser/parsetok.c16
-rw-r--r--Parser/parsetok_pgen.c2
-rw-r--r--Parser/pgen.c724
-rw-r--r--Parser/pgen/__init__.py0
-rw-r--r--Parser/pgen/__main__.py34
-rw-r--r--Parser/pgen/grammar.py160
-rw-r--r--Parser/pgen/pgen.py408
-rw-r--r--Parser/pgen/token.py40
-rw-r--r--Parser/pgenmain.c188
-rw-r--r--Parser/printgrammar.c120
-rw-r--r--Parser/tokenizer.c56
-rw-r--r--Parser/tokenizer.h8
-rw-r--r--Parser/tokenizer_pgen.c2
-rw-r--r--Python/graminit.c1678
27 files changed, 1480 insertions, 2684 deletions
diff --git a/.gitignore b/.gitignore
index 7ee9e6d875..9e60f5a206 100644
--- a/.gitignore
+++ b/.gitignore
@@ -73,8 +73,6 @@ PCbuild/arm32/
PCbuild/obj/
PCbuild/win32/
.purify
-Parser/pgen
-Parser/pgen.exe
__pycache__
autom4te.cache
build/
diff --git a/Include/metagrammar.h b/Include/metagrammar.h
deleted file mode 100644
index 15c8ef8f3f..0000000000
--- a/Include/metagrammar.h
+++ /dev/null
@@ -1,18 +0,0 @@
-#ifndef Py_METAGRAMMAR_H
-#define Py_METAGRAMMAR_H
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-
-#define MSTART 256
-#define RULE 257
-#define RHS 258
-#define ALT 259
-#define ITEM 260
-#define ATOM 261
-
-#ifdef __cplusplus
-}
-#endif
-#endif /* !Py_METAGRAMMAR_H */
diff --git a/Include/parsetok.h b/Include/parsetok.h
index e95dd31fb7..be49f0005a 100644
--- a/Include/parsetok.h
+++ b/Include/parsetok.h
@@ -12,10 +12,7 @@ extern "C" {
typedef struct {
int error;
-#ifndef PGEN
- /* The filename is useless for pgen, see comment in tok_state structure */
PyObject *filename;
-#endif
int lineno;
int offset;
char *text; /* UTF-8-encoded string */
diff --git a/Include/pgen.h b/Include/pgen.h
deleted file mode 100644
index 8a325ed074..0000000000
--- a/Include/pgen.h
+++ /dev/null
@@ -1,18 +0,0 @@
-#ifndef Py_PGEN_H
-#define Py_PGEN_H
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-
-/* Parser generator interface */
-
-extern grammar *meta_grammar(void);
-
-struct _node;
-extern grammar *pgen(struct _node *);
-
-#ifdef __cplusplus
-}
-#endif
-#endif /* !Py_PGEN_H */
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 5ae69480bf..135c323029 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -290,40 +290,21 @@ LIBFFI_INCLUDEDIR= @LIBFFI_INCLUDEDIR@
##########################################################################
# Parser
-PGEN= Parser/pgen$(EXE)
-
POBJS= \
Parser/acceler.o \
Parser/grammar1.o \
Parser/listnode.o \
Parser/node.o \
Parser/parser.o \
- Parser/bitset.o \
- Parser/metagrammar.o \
- Parser/firstsets.o \
- Parser/grammar.o \
Parser/token.o \
- Parser/pgen.o
PARSER_OBJS= $(POBJS) Parser/myreadline.o Parser/parsetok.o Parser/tokenizer.o
-PGOBJS= \
- Objects/obmalloc.o \
- Python/dynamic_annotations.o \
- Python/mysnprintf.o \
- Python/pyctype.o \
- Parser/tokenizer_pgen.o \
- Parser/printgrammar.o \
- Parser/parsetok_pgen.o \
- Parser/pgenmain.o
-
PARSER_HEADERS= \
$(srcdir)/Parser/parser.h \
$(srcdir)/Include/parsetok.h \
$(srcdir)/Parser/tokenizer.h
-PGENOBJS= $(POBJS) $(PGOBJS)
-
##########################################################################
# Python
@@ -802,31 +783,18 @@ Python/sysmodule.o: $(srcdir)/Python/sysmodule.c Makefile
$(IO_OBJS): $(IO_H)
-$(PGEN): $(PGENOBJS)
- $(CC) $(OPT) $(PY_CORE_LDFLAGS) $(PGENOBJS) $(LIBS) -o $(PGEN)
-
.PHONY: regen-grammar
-regen-grammar: $(PGEN)
+regen-grammar: regen-token
# Regenerate Include/graminit.h and Python/graminit.c
# from Grammar/Grammar using pgen
@$(MKDIR_P) Include
- $(PGEN) $(srcdir)/Grammar/Grammar \
+ $(PYTHON_FOR_REGEN) -m Parser.pgen $(srcdir)/Grammar/Grammar \
+ $(srcdir)/Grammar/Tokens \
$(srcdir)/Include/graminit.h.new \
$(srcdir)/Python/graminit.c.new
$(UPDATE_FILE) $(srcdir)/Include/graminit.h $(srcdir)/Include/graminit.h.new
$(UPDATE_FILE) $(srcdir)/Python/graminit.c $(srcdir)/Python/graminit.c.new
-Parser/grammar.o: $(srcdir)/Parser/grammar.c \
- $(srcdir)/Include/token.h \
- $(srcdir)/Include/grammar.h
-Parser/metagrammar.o: $(srcdir)/Parser/metagrammar.c
-
-Parser/tokenizer_pgen.o: $(srcdir)/Parser/tokenizer.c
-Parser/parsetok_pgen.o: $(srcdir)/Parser/parsetok.c
-Parser/printgrammar.o: $(srcdir)/Parser/printgrammar.c
-
-Parser/pgenmain.o: $(srcdir)/Include/parsetok.h
-
.PHONY=regen-ast
regen-ast:
# Regenerate Include/Python-ast.h using Parser/asdl_c.py -h
@@ -1016,7 +984,6 @@ PYTHON_HEADERS= \
$(srcdir)/Include/longobject.h \
$(srcdir)/Include/marshal.h \
$(srcdir)/Include/memoryobject.h \
- $(srcdir)/Include/metagrammar.h \
$(srcdir)/Include/methodobject.h \
$(srcdir)/Include/modsupport.h \
$(srcdir)/Include/moduleobject.h \
@@ -1028,7 +995,6 @@ PYTHON_HEADERS= \
$(srcdir)/Include/osdefs.h \
$(srcdir)/Include/osmodule.h \
$(srcdir)/Include/patchlevel.h \
- $(srcdir)/Include/pgen.h \
$(srcdir)/Include/pgenheaders.h \
$(srcdir)/Include/pyarena.h \
$(srcdir)/Include/pycapsule.h \
@@ -1771,7 +1737,7 @@ profile-removal:
rm -f profile-run-stamp
clobber: clean profile-removal
- -rm -f $(BUILDPYTHON) $(PGEN) $(LIBRARY) $(LDLIBRARY) $(DLLLIBRARY) \
+ -rm -f $(BUILDPYTHON) $(LIBRARY) $(LDLIBRARY) $(DLLLIBRARY) \
tags TAGS \
config.cache config.log pyconfig.h Modules/config.c
-rm -rf build platform
diff --git a/Misc/NEWS.d/next/Core and Builtins/2019-02-11-00-50-03.bpo-11814.M12CMH.rst b/Misc/NEWS.d/next/Core and Builtins/2019-02-11-00-50-03.bpo-11814.M12CMH.rst
new file mode 100644
index 0000000000..b3bec728d9
--- /dev/null
+++ b/Misc/NEWS.d/next/Core and Builtins/2019-02-11-00-50-03.bpo-11814.M12CMH.rst
@@ -0,0 +1,2 @@
+Retire pgen and use a modified version of pgen2 to generate the parser.
+Patch by Pablo Galindo.
diff --git a/Misc/coverity_model.c b/Misc/coverity_model.c
index 1ae617911c..8960362a6d 100644
--- a/Misc/coverity_model.c
+++ b/Misc/coverity_model.c
@@ -92,14 +92,6 @@ wchar_t *Py_DecodeLocale(const char* arg, size_t *size)
return w;
}
-/* Parser/pgenmain.c */
-grammar *getgrammar(const char *filename)
-{
- grammar *g;
- __coverity_tainted_data_sink__(filename);
- return g;
-}
-
/* Python/marshal.c */
static Py_ssize_t r_string(char *s, Py_ssize_t n, RFILE *p)
diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj
index 992c84f8cc..bff7b95a84 100644
--- a/PCbuild/pythoncore.vcxproj
+++ b/PCbuild/pythoncore.vcxproj
@@ -161,7 +161,6 @@
<ClInclude Include="..\Include\longobject.h" />
<ClInclude Include="..\Include\marshal.h" />
<ClInclude Include="..\Include\memoryobject.h" />
- <ClInclude Include="..\Include\metagrammar.h" />
<ClInclude Include="..\Include\methodobject.h" />
<ClInclude Include="..\Include\modsupport.h" />
<ClInclude Include="..\Include\moduleobject.h" />
@@ -175,7 +174,6 @@
<ClInclude Include="..\Include\osmodule.h" />
<ClInclude Include="..\Include\parsetok.h" />
<ClInclude Include="..\Include\patchlevel.h" />
- <ClInclude Include="..\Include\pgen.h" />
<ClInclude Include="..\Include\pgenheaders.h" />
<ClInclude Include="..\Include\pyhash.h" />
<ClInclude Include="..\Include\py_curses.h" />
@@ -372,12 +370,8 @@
<ClCompile Include="..\Objects\unicodeobject.c" />
<ClCompile Include="..\Objects\weakrefobject.c" />
<ClCompile Include="..\Parser\acceler.c" />
- <ClCompile Include="..\Parser\bitset.c" />
- <ClCompile Include="..\Parser\firstsets.c" />
- <ClCompile Include="..\Parser\grammar.c" />
<ClCompile Include="..\Parser\grammar1.c" />
<ClCompile Include="..\Parser\listnode.c" />
- <ClCompile Include="..\Parser\metagrammar.c" />
<ClCompile Include="..\Parser\myreadline.c" />
<ClCompile Include="..\Parser\node.c" />
<ClCompile Include="..\Parser\parser.c" />
diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters
index 293bded66c..03030744b1 100644
--- a/PCbuild/pythoncore.vcxproj.filters
+++ b/PCbuild/pythoncore.vcxproj.filters
@@ -234,9 +234,6 @@
<ClInclude Include="..\Include\memoryobject.h">
<Filter>Include</Filter>
</ClInclude>
- <ClInclude Include="..\Include\metagrammar.h">
- <Filter>Include</Filter>
- </ClInclude>
<ClInclude Include="..\Include\methodobject.h">
<Filter>Include</Filter>
</ClInclude>
@@ -270,9 +267,6 @@
<ClInclude Include="..\Include\patchlevel.h">
<Filter>Include</Filter>
</ClInclude>
- <ClInclude Include="..\Include\pgen.h">
- <Filter>Include</Filter>
- </ClInclude>
<ClInclude Include="..\Include\pgenheaders.h">
<Filter>Include</Filter>
</ClInclude>
@@ -836,24 +830,12 @@
<ClCompile Include="..\Parser\acceler.c">
<Filter>Parser</Filter>
</ClCompile>
- <ClCompile Include="..\Parser\bitset.c">
- <Filter>Parser</Filter>
- </ClCompile>
- <ClCompile Include="..\Parser\firstsets.c">
- <Filter>Parser</Filter>
- </ClCompile>
- <ClCompile Include="..\Parser\grammar.c">
- <Filter>Parser</Filter>
- </ClCompile>
<ClCompile Include="..\Parser\grammar1.c">
<Filter>Parser</Filter>
</ClCompile>
<ClCompile Include="..\Parser\listnode.c">
<Filter>Parser</Filter>
</ClCompile>
- <ClCompile Include="..\Parser\metagrammar.c">
- <Filter>Parser</Filter>
- </ClCompile>
<ClCompile Include="..\Parser\myreadline.c">
<Filter>Parser</Filter>
</ClCompile>
diff --git a/Parser/bitset.c b/Parser/bitset.c
deleted file mode 100644
index f5bfd41bd6..0000000000
--- a/Parser/bitset.c
+++ /dev/null
@@ -1,66 +0,0 @@
-
-/* Bitset primitives used by the parser generator */
-
-#include "pgenheaders.h"
-#include "bitset.h"
-
-bitset
-newbitset(int nbits)
-{
- int nbytes = NBYTES(nbits);
- bitset ss = (char *)PyObject_MALLOC(sizeof(BYTE) * nbytes);
-
- if (ss == NULL)
- Py_FatalError("no mem for bitset");
-
- ss += nbytes;
- while (--nbytes >= 0)
- *--ss = 0;
- return ss;
-}
-
-void
-delbitset(bitset ss)
-{
- PyObject_FREE(ss);
-}
-
-int
-addbit(bitset ss, int ibit)
-{
- int ibyte = BIT2BYTE(ibit);
- BYTE mask = BIT2MASK(ibit);
-
- if (ss[ibyte] & mask)
- return 0; /* Bit already set */
- ss[ibyte] |= mask;
- return 1;
-}
-
-#if 0 /* Now a macro */
-int
-testbit(bitset ss, int ibit)
-{
- return (ss[BIT2BYTE(ibit)] & BIT2MASK(ibit)) != 0;
-}
-#endif
-
-int
-samebitset(bitset ss1, bitset ss2, int nbits)
-{
- int i;
-
- for (i = NBYTES(nbits); --i >= 0; )
- if (*ss1++ != *ss2++)
- return 0;
- return 1;
-}
-
-void
-mergebitset(bitset ss1, bitset ss2, int nbits)
-{
- int i;
-
- for (i = NBYTES(nbits); --i >= 0; )
- *ss1++ |= *ss2++;
-}
diff --git a/Parser/firstsets.c b/Parser/firstsets.c
deleted file mode 100644
index ee75d1bfe0..0000000000
--- a/Parser/firstsets.c
+++ /dev/null
@@ -1,113 +0,0 @@
-
-/* Computation of FIRST stets */
-
-#include "pgenheaders.h"
-#include "grammar.h"
-#include "token.h"
-
-extern int Py_DebugFlag;
-
-/* Forward */
-static void calcfirstset(grammar *, dfa *);
-
-void
-addfirstsets(grammar *g)
-{
- int i;
- dfa *d;
-
- if (Py_DebugFlag)
- printf("Adding FIRST sets ...\n");
- for (i = 0; i < g->g_ndfas; i++) {
- d = &g->g_dfa[i];
- if (d->d_first == NULL)
- calcfirstset(g, d);
- }
-}
-
-static void
-calcfirstset(grammar *g, dfa *d)
-{
- int i, j;
- state *s;
- arc *a;
- int nsyms;
- int *sym;
- int nbits;
- static bitset dummy;
- bitset result;
- int type;
- dfa *d1;
- label *l0;
-
- if (Py_DebugFlag)
- printf("Calculate FIRST set for '%s'\n", d->d_name);
-
- if (dummy == NULL)
- dummy = newbitset(1);
- if (d->d_first == dummy) {
- fprintf(stderr, "Left-recursion for '%s'\n", d->d_name);
- return;
- }
- if (d->d_first != NULL) {
- fprintf(stderr, "Re-calculating FIRST set for '%s' ???\n",
- d->d_name);
- }
- d->d_first = dummy;
-
- l0 = g->g_ll.ll_label;
- nbits = g->g_ll.ll_nlabels;
- result = newbitset(nbits);
-
- sym = (int *)PyObject_MALLOC(sizeof(int));
- if (sym == NULL)
- Py_FatalError("no mem for new sym in calcfirstset");
- nsyms = 1;
- sym[0] = findlabel(&g->g_ll, d->d_type, (char *)NULL);
-
- s = &d->d_state[d->d_initial];
- for (i = 0; i < s->s_narcs; i++) {
- a = &s->s_arc[i];
- for (j = 0; j < nsyms; j++) {
- if (sym[j] == a->a_lbl)
- break;
- }
- if (j >= nsyms) { /* New label */
- sym = (int *)PyObject_REALLOC(sym,
- sizeof(int) * (nsyms + 1));
- if (sym == NULL)
- Py_FatalError(
- "no mem to resize sym in calcfirstset");
- sym[nsyms++] = a->a_lbl;
- type = l0[a->a_lbl].lb_type;
- if (ISNONTERMINAL(type)) {
- d1 = PyGrammar_FindDFA(g, type);
- if (d1->d_first == dummy) {
- fprintf(stderr,
- "Left-recursion below '%s'\n",
- d->d_name);
- }
- else {
- if (d1->d_first == NULL)
- calcfirstset(g, d1);
- mergebitset(result,
- d1->d_first, nbits);
- }
- }
- else if (ISTERMINAL(type)) {
- addbit(result, a->a_lbl);
- }
- }
- }
- d->d_first = result;
- if (Py_DebugFlag) {
- printf("FIRST set for '%s': {", d->d_name);
- for (i = 0; i < nbits; i++) {
- if (testbit(result, i))
- printf(" %s", PyGrammar_LabelRepr(&l0[i]));
- }
- printf(" }\n");
- }
-
- PyObject_FREE(sym);
-}
diff --git a/Parser/grammar.c b/Parser/grammar.c
deleted file mode 100644
index 75fd5b9cde..0000000000
--- a/Parser/grammar.c
+++ /dev/null
@@ -1,273 +0,0 @@
-
-/* Grammar implementation */
-
-#include "Python.h"
-#include "pgenheaders.h"
-
-#include <ctype.h>
-
-#include "token.h"
-#include "grammar.h"
-
-extern int Py_DebugFlag;
-
-grammar *
-newgrammar(int start)
-{
- grammar *g;
-
- g = (grammar *)PyObject_MALLOC(sizeof(grammar));
- if (g == NULL)
- Py_FatalError("no mem for new grammar");
- g->g_ndfas = 0;
- g->g_dfa = NULL;
- g->g_start = start;
- g->g_ll.ll_nlabels = 0;
- g->g_ll.ll_label = NULL;
- g->g_accel = 0;
- return g;
-}
-
-void
-freegrammar(grammar *g)
-{
- int i;
- for (i = 0; i < g->g_ndfas; i++) {
- free(g->g_dfa[i].d_name);
- for (int j = 0; j < g->g_dfa[i].d_nstates; j++)
- PyObject_FREE(g->g_dfa[i].d_state[j].s_arc);
- PyObject_FREE(g->g_dfa[i].d_state);
- }
- PyObject_FREE(g->g_dfa);
- for (i = 0; i < g->g_ll.ll_nlabels; i++)
- free(g->g_ll.ll_label[i].lb_str);
- PyObject_FREE(g->g_ll.ll_label);
- PyObject_FREE(g);
-}
-
-dfa *
-adddfa(grammar *g, int type, const char *name)
-{
- dfa *d;
-
- g->g_dfa = (dfa *)PyObject_REALLOC(g->g_dfa,
- sizeof(dfa) * (g->g_ndfas + 1));
- if (g->g_dfa == NULL)
- Py_FatalError("no mem to resize dfa in adddfa");
- d = &g->g_dfa[g->g_ndfas++];
- d->d_type = type;
- d->d_name = strdup(name);
- d->d_nstates = 0;
- d->d_state = NULL;
- d->d_initial = -1;
- d->d_first = NULL;
- return d; /* Only use while fresh! */
-}
-
-int
-addstate(dfa *d)
-{
- state *s;
-
- d->d_state = (state *)PyObject_REALLOC(d->d_state,
- sizeof(state) * (d->d_nstates + 1));
- if (d->d_state == NULL)
- Py_FatalError("no mem to resize state in addstate");
- s = &d->d_state[d->d_nstates++];
- s->s_narcs = 0;
- s->s_arc = NULL;
- s->s_lower = 0;
- s->s_upper = 0;
- s->s_accel = NULL;
- s->s_accept = 0;
- return Py_SAFE_DOWNCAST(s - d->d_state, intptr_t, int);
-}
-
-void
-addarc(dfa *d, int from, int to, int lbl)
-{
- state *s;
- arc *a;
-
- assert(0 <= from && from < d->d_nstates);
- assert(0 <= to && to < d->d_nstates);
-
- s = &d->d_state[from];
- s->s_arc = (arc *)PyObject_REALLOC(s->s_arc, sizeof(arc) * (s->s_narcs + 1));
- if (s->s_arc == NULL)
- Py_FatalError("no mem to resize arc list in addarc");
- a = &s->s_arc[s->s_narcs++];
- a->a_lbl = lbl;
- a->a_arrow = to;
-}
-
-int
-addlabel(labellist *ll, int type, const char *str)
-{
- int i;
- label *lb;
-
- for (i = 0; i < ll->ll_nlabels; i++) {
- if (ll->ll_label[i].lb_type == type &&
- strcmp(ll->ll_label[i].lb_str, str) == 0)
- return i;
- }
- ll->ll_label = (label *)PyObject_REALLOC(ll->ll_label,
- sizeof(label) * (ll->ll_nlabels + 1));
- if (ll->ll_label == NULL)
- Py_FatalError("no mem to resize labellist in addlabel");
- lb = &ll->ll_label[ll->ll_nlabels++];
- lb->lb_type = type;
- lb->lb_str = strdup(str);
- if (Py_DebugFlag)
- printf("Label @ %8p, %d: %s\n", ll, ll->ll_nlabels,
- PyGrammar_LabelRepr(lb));
- return Py_SAFE_DOWNCAST(lb - ll->ll_label, intptr_t, int);
-}
-
-/* Same, but rather dies than adds */
-
-int
-findlabel(labellist *ll, int type, const char *str)
-{
- int i;
-
- for (i = 0; i < ll->ll_nlabels; i++) {
- if (ll->ll_label[i].lb_type == type /*&&
- strcmp(ll->ll_label[i].lb_str, str) == 0*/)
- return i;
- }
- fprintf(stderr, "Label %d/'%s' not found\n", type, str);
- Py_FatalError("grammar.c:findlabel()");
-
- /* Py_FatalError() is declared with __attribute__((__noreturn__)).
- GCC emits a warning without "return 0;" (compiler bug!), but Clang is
- smarter and emits a warning on the return... */
-#ifndef __clang__
- return 0; /* Make gcc -Wall happy */
-#endif
-}
-
-/* Forward */
-static void translabel(grammar *, label *);
-
-void
-translatelabels(grammar *g)
-{
- int i;
-
-#ifdef Py_DEBUG
- printf("Translating labels ...\n");
-#endif
- /* Don't translate EMPTY */
- for (i = EMPTY+1; i < g->g_ll.ll_nlabels; i++)
- translabel(g, &g->g_ll.ll_label[i]);
-}
-
-static void
-translabel(grammar *g, label *lb)
-{
- int i;
-
- if (Py_DebugFlag)
- printf("Translating label %s ...\n", PyGrammar_LabelRepr(lb));
-
- if (lb->lb_type == NAME) {
- for (i = 0; i < g->g_ndfas; i++) {
- if (strcmp(lb->lb_str, g->g_dfa[i].d_name) == 0) {
- if (Py_DebugFlag)
- printf(
- "Label %s is non-terminal %d.\n",
- lb->lb_str,
- g->g_dfa[i].d_type);
- lb->lb_type = g->g_dfa[i].d_type;
- free(lb->lb_str);
- lb->lb_str = NULL;
- return;
- }
- }
- for (i = 0; i < (int)N_TOKENS; i++) {
- if (strcmp(lb->lb_str, _PyParser_TokenNames[i]) == 0) {
- if (Py_DebugFlag)
- printf("Label %s is terminal %d.\n",
- lb->lb_str, i);
- lb->lb_type = i;
- free(lb->lb_str);
- lb->lb_str = NULL;
- return;
- }
- }
- printf("Can't translate NAME label '%s'\n", lb->lb_str);
- return;
- }
-
- if (lb->lb_type == STRING) {
- if (isalpha(Py_CHARMASK(lb->lb_str[1])) ||
- lb->lb_str[1] == '_') {
- char *p;
- char *src;
- char *dest;
- size_t name_len;
- if (Py_DebugFlag)
- printf("Label %s is a keyword\n", lb->lb_str);
- lb->lb_type = NAME;
- src = lb->lb_str + 1;
- p = strchr(src, '\'');
- if (p)
- name_len = p - src;
- else
- name_len = strlen(src);
- dest = (char *)malloc(name_len + 1);
- if (!dest) {
- printf("Can't alloc dest '%s'\n", src);
- return;
- }
- strncpy(dest, src, name_len);
- dest[name_len] = '\0';
- free(lb->lb_str);
- lb->lb_str = dest;
- }
- else if (lb->lb_str[2] == lb->lb_str[0]) {
- int type = (int) PyToken_OneChar(lb->lb_str[1]);
- if (type != OP) {
- lb->lb_type = type;
- free(lb->lb_str);
- lb->lb_str = NULL;
- }
- else
- printf("Unknown OP label %s\n",
- lb->lb_str);
- }
- else if (lb->lb_str[2] && lb->lb_str[3] == lb->lb_str[0]) {
- int type = (int) PyToken_TwoChars(lb->lb_str[1],
- lb->lb_str[2]);
- if (type != OP) {
- lb->lb_type = type;
- free(lb->lb_str);
- lb->lb_str = NULL;
- }
- else
- printf("Unknown OP label %s\n",
- lb->lb_str);
- }
- else if (lb->lb_str[2] && lb->lb_str[3] && lb->lb_str[4] == lb->lb_str[0]) {
- int type = (int) PyToken_ThreeChars(lb->lb_str[1],
- lb->lb_str[2],
- lb->lb_str[3]);
- if (type != OP) {
- lb->lb_type = type;
- free(lb->lb_str);
- lb->lb_str = NULL;
- }
- else
- printf("Unknown OP label %s\n",
- lb->lb_str);
- }
- else
- printf("Can't translate STRING label %s\n",
- lb->lb_str);
- }
- else
- printf("Can't translate label '%s'\n",
- PyGrammar_LabelRepr(lb));
-}
diff --git a/Parser/metagrammar.c b/Parser/metagrammar.c
deleted file mode 100644
index 53810b8125..0000000000
--- a/Parser/metagrammar.c
+++ /dev/null
@@ -1,159 +0,0 @@
-
-#include "pgenheaders.h"
-#include "metagrammar.h"
-#include "grammar.h"
-#include "pgen.h"
-static arc arcs_0_0[3] = {
- {2, 0},
- {3, 0},
- {4, 1},
-};
-static arc arcs_0_1[1] = {
- {0, 1},
-};
-static state states_0[2] = {
- {3, arcs_0_0},
- {1, arcs_0_1},
-};
-static arc arcs_1_0[1] = {
- {5, 1},
-};
-static arc arcs_1_1[1] = {
- {6, 2},
-};
-static arc arcs_1_2[1] = {
- {7, 3},
-};
-static arc arcs_1_3[1] = {
- {3, 4},
-};
-static arc arcs_1_4[1] = {
- {0, 4},
-};
-static state states_1[5] = {
- {1, arcs_1_0},
- {1, arcs_1_1},
- {1, arcs_1_2},
- {1, arcs_1_3},
- {1, arcs_1_4},
-};
-static arc arcs_2_0[1] = {
- {8, 1},
-};
-static arc arcs_2_1[2] = {
- {9, 0},
- {0, 1},
-};
-static state states_2[2] = {
- {1, arcs_2_0},
- {2, arcs_2_1},
-};
-static arc arcs_3_0[1] = {
- {10, 1},
-};
-static arc arcs_3_1[2] = {
- {10, 1},
- {0, 1},
-};
-static state states_3[2] = {
- {1, arcs_3_0},
- {2, arcs_3_1},
-};
-static arc arcs_4_0[2] = {
- {11, 1},
- {13, 2},
-};
-static arc arcs_4_1[1] = {
- {7, 3},
-};
-static arc arcs_4_2[3] = {
- {14, 4},
- {15, 4},
- {0, 2},
-};
-static arc arcs_4_3[1] = {
- {12, 4},
-};
-static arc arcs_4_4[1] = {
- {0, 4},
-};
-static state states_4[5] = {
- {2, arcs_4_0},
- {1, arcs_4_1},
- {3, arcs_4_2},
- {1, arcs_4_3},
- {1, arcs_4_4},
-};
-static arc arcs_5_0[3] = {
- {5, 1},
- {16, 1},
- {17, 2},
-};
-static arc arcs_5_1[1] = {
- {0, 1},
-};
-static arc arcs_5_2[1] = {
- {7, 3},
-};
-static arc arcs_5_3[1] = {
- {18, 1},
-};
-static state states_5[4] = {
- {3, arcs_5_0},
- {1, arcs_5_1},
- {1, arcs_5_2},
- {1, arcs_5_3},
-};
-static dfa dfas[6] = {
- {256, "MSTART", 0, 2, states_0,
- "\070\000\000"},
- {257, "RULE", 0, 5, states_1,
- "\040\000\000"},
- {258, "RHS", 0, 2, states_2,
- "\040\010\003"},
- {259, "ALT", 0, 2, states_3,
- "\040\010\003"},
- {260, "ITEM", 0, 5, states_4,
- "\040\010\003"},
- {261, "ATOM", 0, 4, states_5,
- "\040\000\003"},
-};
-static label labels[19] = {
- {0, "EMPTY"},
- {256, 0},
- {257, 0},
- {4, 0},
- {0, 0},
- {1, 0},
- {11, 0},
- {258, 0},
- {259, 0},
- {18, 0},
- {260, 0},
- {9, 0},
- {10, 0},
- {261, 0},
- {16, 0},
- {14, 0},
- {3, 0},
- {7, 0},
- {8, 0},
-};
-static grammar _PyParser_Grammar = {
- 6,
- dfas,
- {19, labels},
- 256
-};
-
-grammar *
-meta_grammar(void)
-{
- return &_PyParser_Grammar;
-}
-
-grammar *
-Py_meta_grammar(void)
-{
- return meta_grammar();
-}
diff --git a/Parser/parsetok.c b/Parser/parsetok.c
index 6a96f6bc5a..7a6c886519 100644
--- a/Parser/parsetok.c
+++ b/Parser/parsetok.c
@@ -99,10 +99,8 @@ PyParser_ParseStringObject(const char *s, PyObject *filename,
tok->type_comments = 1;
}
-#ifndef PGEN
Py_INCREF(err_ret->filename);
tok->filename = err_ret->filename;
-#endif
return parsetok(tok, g, start, err_ret, flags);
}
@@ -113,7 +111,6 @@ PyParser_ParseStringFlagsFilenameEx(const char *s, const char *filename_str,
{
node *n;
PyObject *filename = NULL;
-#ifndef PGEN
if (filename_str != NULL) {
filename = PyUnicode_DecodeFSDefault(filename_str);
if (filename == NULL) {
@@ -121,11 +118,8 @@ PyParser_ParseStringFlagsFilenameEx(const char *s, const char *filename_str,
return NULL;
}
}
-#endif
n = PyParser_ParseStringObject(s, filename, g, start, err_ret, flags);
-#ifndef PGEN
Py_XDECREF(filename);
-#endif
return n;
}
@@ -169,10 +163,8 @@ PyParser_ParseFileObject(FILE *fp, PyObject *filename,
if (*flags & PyPARSE_TYPE_COMMENTS) {
tok->type_comments = 1;
}
-#ifndef PGEN
Py_INCREF(err_ret->filename);
tok->filename = err_ret->filename;
-#endif
return parsetok(tok, g, start, err_ret, flags);
}
@@ -184,7 +176,6 @@ PyParser_ParseFileFlagsEx(FILE *fp, const char *filename,
{
node *n;
PyObject *fileobj = NULL;
-#ifndef PGEN
if (filename != NULL) {
fileobj = PyUnicode_DecodeFSDefault(filename);
if (fileobj == NULL) {
@@ -192,12 +183,9 @@ PyParser_ParseFileFlagsEx(FILE *fp, const char *filename,
return NULL;
}
}
-#endif
n = PyParser_ParseFileObject(fp, fileobj, enc, g,
start, ps1, ps2, err_ret, flags);
-#ifndef PGEN
Py_XDECREF(fileobj);
-#endif
return n;
}
@@ -371,7 +359,6 @@ parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
}
}
-#ifndef PGEN
/* Check that the source for a single input statement really
is a single statement by looking at what is left in the
buffer after parsing. Trailing whitespace and comments
@@ -399,7 +386,6 @@ parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret,
c = *++cur;
}
}
-#endif
}
else
n = NULL;
@@ -470,7 +456,6 @@ initerr(perrdetail *err_ret, PyObject *filename)
err_ret->text = NULL;
err_ret->token = -1;
err_ret->expected = -1;
-#ifndef PGEN
if (filename) {
Py_INCREF(filename);
err_ret->filename = filename;
@@ -482,6 +467,5 @@ initerr(perrdetail *err_ret, PyObject *filename)
return -1;
}
}
-#endif
return 0;
}
diff --git a/Parser/parsetok_pgen.c b/Parser/parsetok_pgen.c
deleted file mode 100644
index 97b92883f3..0000000000
--- a/Parser/parsetok_pgen.c
+++ /dev/null
@@ -1,2 +0,0 @@
-#define PGEN
-#include "parsetok.c"
diff --git a/Parser/pgen.c b/Parser/pgen.c
deleted file mode 100644
index 6451a1d998..0000000000
--- a/Parser/pgen.c
+++ /dev/null
@@ -1,724 +0,0 @@
-/* Parser generator */
-
-/* For a description, see the comments at end of this file */
-
-#include "Python.h"
-#include "pgenheaders.h"
-#include "token.h"
-#include "node.h"
-#include "grammar.h"
-#include "metagrammar.h"
-#include "pgen.h"
-
-extern int Py_DebugFlag;
-extern int Py_IgnoreEnvironmentFlag; /* needed by Py_GETENV */
-
-
-/* PART ONE -- CONSTRUCT NFA -- Cf. Algorithm 3.2 from [Aho&Ullman 77] */
-
-typedef struct _nfaarc {
- int ar_label;
- int ar_arrow;
-} nfaarc;
-
-typedef struct _nfastate {
- int st_narcs;
- nfaarc *st_arc;
-} nfastate;
-
-typedef struct _nfa {
- int nf_type;
- char *nf_name;
- int nf_nstates;
- nfastate *nf_state;
- int nf_start, nf_finish;
-} nfa;
-
-/* Forward */
-static void compile_rhs(labellist *ll,
- nfa *nf, node *n, int *pa, int *pb);
-static void compile_alt(labellist *ll,
- nfa *nf, node *n, int *pa, int *pb);
-static void compile_item(labellist *ll,
- nfa *nf, node *n, int *pa, int *pb);
-static void compile_atom(labellist *ll,
- nfa *nf, node *n, int *pa, int *pb);
-
-static int
-addnfastate(nfa *nf)
-{
- nfastate *st;
-
- nf->nf_state = (nfastate *)PyObject_REALLOC(nf->nf_state,
- sizeof(nfastate) * (nf->nf_nstates + 1));
- if (nf->nf_state == NULL)
- Py_FatalError("out of mem");
- st = &nf->nf_state[nf->nf_nstates++];
- st->st_narcs = 0;
- st->st_arc = NULL;
- return st - nf->nf_state;
-}
-
-static void
-addnfaarc(nfa *nf, int from, int to, int lbl)
-{
- nfastate *st;
- nfaarc *ar;
-
- st = &nf->nf_state[from];
- st->st_arc = (nfaarc *)PyObject_REALLOC(st->st_arc,
- sizeof(nfaarc) * (st->st_narcs + 1));
- if (st->st_arc == NULL)
- Py_FatalError("out of mem");
- ar = &st->st_arc[st->st_narcs++];
- ar->ar_label = lbl;
- ar->ar_arrow = to;
-}
-
-static nfa *
-newnfa(char *name)
-{
- nfa *nf;
- static int type = NT_OFFSET; /* All types will be disjunct */
-
- nf = (nfa *)PyObject_MALLOC(sizeof(nfa));
- if (nf == NULL)
- Py_FatalError("no mem for new nfa");
- nf->nf_type = type++;
- nf->nf_name = name; /* XXX strdup(name) ??? */
- nf->nf_nstates = 0;
- nf->nf_state = NULL;
- nf->nf_start = nf->nf_finish = -1;
- return nf;
-}
-
-typedef struct _nfagrammar {
- int gr_nnfas;
- nfa **gr_nfa;
- labellist gr_ll;
-} nfagrammar;
-
-/* Forward */
-static void compile_rule(nfagrammar *gr, node *n);
-
-static nfagrammar *
-newnfagrammar(void)
-{
- nfagrammar *gr;
-
- gr = (nfagrammar *)PyObject_MALLOC(sizeof(nfagrammar));
- if (gr == NULL)
- Py_FatalError("no mem for new nfa grammar");
- gr->gr_nnfas = 0;
- gr->gr_nfa = NULL;
- gr->gr_ll.ll_nlabels = 0;
- gr->gr_ll.ll_label = NULL;
- addlabel(&gr->gr_ll, ENDMARKER, "EMPTY");
- return gr;
-}
-
-static void
-freenfagrammar(nfagrammar *gr)
-{
- for (int i = 0; i < gr->gr_nnfas; i++) {
- PyObject_FREE(gr->gr_nfa[i]->nf_state);
- }
- PyObject_FREE(gr->gr_nfa);
- PyObject_FREE(gr);
-}
-
-static nfa *
-addnfa(nfagrammar *gr, char *name)
-{
- nfa *nf;
-
- nf = newnfa(name);
- gr->gr_nfa = (nfa **)PyObject_REALLOC(gr->gr_nfa,
- sizeof(nfa*) * (gr->gr_nnfas + 1));
- if (gr->gr_nfa == NULL)
- Py_FatalError("out of mem");
- gr->gr_nfa[gr->gr_nnfas++] = nf;
- addlabel(&gr->gr_ll, NAME, nf->nf_name);
- return nf;
-}
-
-#ifdef Py_DEBUG
-
-static const char REQNFMT[] = "metacompile: less than %d children\n";
-
-#define REQN(i, count) do { \
- if (i < count) { \
- fprintf(stderr, REQNFMT, count); \
- Py_FatalError("REQN"); \
- } \
-} while (0)
-
-#else
-#define REQN(i, count) /* empty */
-#endif
-
-static nfagrammar *
-metacompile(node *n)
-{
- nfagrammar *gr;
- int i;
-
- if (Py_DebugFlag)
- printf("Compiling (meta-) parse tree into NFA grammar\n");
- gr = newnfagrammar();
- REQ(n, MSTART);
- i = n->n_nchildren - 1; /* Last child is ENDMARKER */
- n = n->n_child;
- for (; --i >= 0; n++) {
- if (n->n_type != NEWLINE)
- compile_rule(gr, n);
- }
- return gr;
-}
-
-static void
-compile_rule(nfagrammar *gr, node *n)
-{
- nfa *nf;
-
- REQ(n, RULE);
- REQN(n->n_nchildren, 4);
- n = n->n_child;
- REQ(n, NAME);
- nf = addnfa(gr, n->n_str);
- n++;
- REQ(n, COLON);
- n++;
- REQ(n, RHS);
- compile_rhs(&gr->gr_ll, nf, n, &nf->nf_start, &nf->nf_finish);
- n++;
- REQ(n, NEWLINE);
-}
-
-static void
-compile_rhs(labellist *ll, nfa *nf, node *n, int *pa, int *pb)
-{
- int i;
- int a, b;
-
- REQ(n, RHS);
- i = n->n_nchildren;
- REQN(i, 1);
- n = n->n_child;
- REQ(n, ALT);
- compile_alt(ll, nf, n, pa, pb);
- if (--i <= 0)
- return;
- n++;
- a = *pa;
- b = *pb;
- *pa = addnfastate(nf);
- *pb = addnfastate(nf);
- addnfaarc(nf, *pa, a, EMPTY);
- addnfaarc(nf, b, *pb, EMPTY);
- for (; --i >= 0; n++) {
- REQ(n, VBAR);
- REQN(i, 1);
- --i;
- n++;
- REQ(n, ALT);
- compile_alt(ll, nf, n, &a, &b);
- addnfaarc(nf, *pa, a, EMPTY);
- addnfaarc(nf, b, *pb, EMPTY);
- }
-}
-
-static void
-compile_alt(labellist *ll, nfa *nf, node *n, int *pa, int *pb)
-{
- int i;
- int a, b;
-
- REQ(n, ALT);
- i = n->n_nchildren;
- REQN(i, 1);
- n = n->n_child;
- REQ(n, ITEM);
- compile_item(ll, nf, n, pa, pb);
- --i;
- n++;
- for (; --i >= 0; n++) {
- REQ(n, ITEM);
- compile_item(ll, nf, n, &a, &b);
- addnfaarc(nf, *pb, a, EMPTY);
- *pb = b;
- }
-}
-
-static void
-compile_item(labellist *ll, nfa *nf, node *n, int *pa, int *pb)
-{
- int i;
- int a, b;
-
- REQ(n, ITEM);
- i = n->n_nchildren;
- REQN(i, 1);
- n = n->n_child;
- if (n->n_type == LSQB) {
- REQN(i, 3);
- n++;
- REQ(n, RHS);
- *pa = addnfastate(nf);
- *pb = addnfastate(nf);
- addnfaarc(nf, *pa, *pb, EMPTY);
- compile_rhs(ll, nf, n, &a, &b);
- addnfaarc(nf, *pa, a, EMPTY);
- addnfaarc(nf, b, *pb, EMPTY);
- REQN(i, 1);
- n++;
- REQ(n, RSQB);
- }
- else {
- compile_atom(ll, nf, n, pa, pb);
- if (--i <= 0)
- return;
- n++;
- addnfaarc(nf, *pb, *pa, EMPTY);
- if (n->n_type == STAR)
- *pb = *pa;
- else
- REQ(n, PLUS);
- }
-}
-
-static void
-compile_atom(labellist *ll, nfa *nf, node *n, int *pa, int *pb)
-{
- int i;
-
- REQ(n, ATOM);
- i = n->n_nchildren;
- (void)i; /* Don't warn about set but unused */
- REQN(i, 1);
- n = n->n_child;
- if (n->n_type == LPAR) {
- REQN(i, 3);
- n++;
- REQ(n, RHS);
- compile_rhs(ll, nf, n, pa, pb);
- n++;
- REQ(n, RPAR);
- }
- else if (n->n_type == NAME || n->n_type == STRING) {
- *pa = addnfastate(nf);
- *pb = addnfastate(nf);
- addnfaarc(nf, *pa, *pb, addlabel(ll, n->n_type, n->n_str));
- }
- else
- REQ(n, NAME);
-}
-
-static void
-dumpstate(labellist *ll, nfa *nf, int istate)
-{
- nfastate *st;
- int i;
- nfaarc *ar;
-
- printf("%c%2d%c",
- istate == nf->nf_start ? '*' : ' ',
- istate,
- istate == nf->nf_finish ? '.' : ' ');
- st = &nf->nf_state[istate];
- ar = st->st_arc;
- for (i = 0; i < st->st_narcs; i++) {
- if (i > 0)
- printf("\n ");
- printf("-> %2d %s", ar->ar_arrow,
- PyGrammar_LabelRepr(&ll->ll_label[ar->ar_label]));
- ar++;
- }
- printf("\n");
-}
-
-static void
-dumpnfa(labellist *ll, nfa *nf)
-{
- int i;
-
- printf("NFA '%s' has %d states; start %d, finish %d\n",
- nf->nf_name, nf->nf_nstates, nf->nf_start, nf->nf_finish);
- for (i = 0; i < nf->nf_nstates; i++)
- dumpstate(ll, nf, i);
-}
-
-
-/* PART TWO -- CONSTRUCT DFA -- Algorithm 3.1 from [Aho&Ullman 77] */
-
-static void
-addclosure(bitset ss, nfa *nf, int istate)
-{
- if (addbit(ss, istate)) {
- nfastate *st = &nf->nf_state[istate];
- nfaarc *ar = st->st_arc;
- int i;
-
- for (i = st->st_narcs; --i >= 0; ) {
- if (ar->ar_label == EMPTY)
- addclosure(ss, nf, ar->ar_arrow);
- ar++;
- }
- }
-}
-
-typedef struct _ss_arc {
- bitset sa_bitset;
- int sa_arrow;
- int sa_label;
-} ss_arc;
-
-typedef struct _ss_state {
- bitset ss_ss;
- int ss_narcs;
- struct _ss_arc *ss_arc;
- int ss_deleted;
- int ss_finish;
- int ss_rename;
-} ss_state;
-
-typedef struct _ss_dfa {
- int sd_nstates;
- ss_state *sd_state;
-} ss_dfa;
-
-/* Forward */
-static void printssdfa(int xx_nstates, ss_state *xx_state, int nbits,
- labellist *ll, const char *msg);
-static void simplify(int xx_nstates, ss_state *xx_state);
-static void convert(dfa *d, int xx_nstates, ss_state *xx_state);
-
-static void
-makedfa(nfagrammar *gr, nfa *nf, dfa *d)
-{
- int nbits = nf->nf_nstates;
- bitset ss;
- int xx_nstates;
- ss_state *xx_state, *yy;
- ss_arc *zz;
- int istate, jstate, iarc, jarc, ibit;
- nfastate *st;
- nfaarc *ar;
-
- ss = newbitset(nbits);
- addclosure(ss, nf, nf->nf_start);
- xx_state = (ss_state *)PyObject_MALLOC(sizeof(ss_state));
- if (xx_state == NULL)
- Py_FatalError("no mem for xx_state in makedfa");
- xx_nstates = 1;
- yy = &xx_state[0];
- yy->ss_ss = ss;
- yy->ss_narcs = 0;
- yy->ss_arc = NULL;
- yy->ss_deleted = 0;
- yy->ss_finish = testbit(ss, nf->nf_finish);
- if (yy->ss_finish)
- printf("Error: nonterminal '%s' may produce empty.\n",
- nf->nf_name);
-
- /* This algorithm is from a book written before
- the invention of structured programming... */
-
- /* For each unmarked state... */
- for (istate = 0; istate < xx_nstates; ++istate) {
- size_t size;
- yy = &xx_state[istate];
- ss = yy->ss_ss;
- /* For all its states... */
- for (ibit = 0; ibit < nf->nf_nstates; ++ibit) {
- if (!testbit(ss, ibit))
- continue;
- st = &nf->nf_state[ibit];
- /* For all non-empty arcs from this state... */
- for (iarc = 0; iarc < st->st_narcs; iarc++) {
- ar = &st->st_arc[iarc];
- if (ar->ar_label == EMPTY)
- continue;
- /* Look up in list of arcs from this state */
- for (jarc = 0; jarc < yy->ss_narcs; ++jarc) {
- zz = &yy->ss_arc[jarc];
- if (ar->ar_label == zz->sa_label)
- goto found;
- }
- /* Add new arc for this state */
- size = sizeof(ss_arc) * (yy->ss_narcs + 1);
- yy->ss_arc = (ss_arc *)PyObject_REALLOC(
- yy->ss_arc, size);
- if (yy->ss_arc == NULL)
- Py_FatalError("out of mem");
- zz = &yy->ss_arc[yy->ss_narcs++];
- zz->sa_label = ar->ar_label;
- zz->sa_bitset = newbitset(nbits);
- zz->sa_arrow = -1;
- found: ;
- /* Add destination */
- addclosure(zz->sa_bitset, nf, ar->ar_arrow);
- }
- }
- /* Now look up all the arrow states */
- for (jarc = 0; jarc < xx_state[istate].ss_narcs; jarc++) {
- zz = &xx_state[istate].ss_arc[jarc];
- for (jstate = 0; jstate < xx_nstates; jstate++) {
- if (samebitset(zz->sa_bitset,
- xx_state[jstate].ss_ss, nbits)) {
- zz->sa_arrow = jstate;
- goto done;
- }
- }
- size = sizeof(ss_state) * (xx_nstates + 1);
- xx_state = (ss_state *)PyObject_REALLOC(xx_state,
- size);
- if (xx_state == NULL)
- Py_FatalError("out of mem");
- zz->sa_arrow = xx_nstates;
- yy = &xx_state[xx_nstates++];
- yy->ss_ss = zz->sa_bitset;
- yy->ss_narcs = 0;
- yy->ss_arc = NULL;
- yy->ss_deleted = 0;
- yy->ss_finish = testbit(yy->ss_ss, nf->nf_finish);
- done: ;
- }
- }
-
- if (Py_DebugFlag)
- printssdfa(xx_nstates, xx_state, nbits, &gr->gr_ll,
- "before minimizing");
-
- simplify(xx_nstates, xx_state);
-
- if (Py_DebugFlag)
- printssdfa(xx_nstates, xx_state, nbits, &gr->gr_ll,
- "after minimizing");
-
- convert(d, xx_nstates, xx_state);
-
- for (int i = 0; i < xx_nstates; i++) {
- for (int j = 0; j < xx_state[i].ss_narcs; j++)
- delbitset(xx_state[i].ss_arc[j].sa_bitset);
- PyObject_FREE(xx_state[i].ss_arc);
- }
- PyObject_FREE(xx_state);
-}
-
-static void
-printssdfa(int xx_nstates, ss_state *xx_state, int nbits,
- labellist *ll, const char *msg)
-{
- int i, ibit, iarc;
- ss_state *yy;
- ss_arc *zz;
-
- printf("Subset DFA %s\n", msg);
- for (i = 0; i < xx_nstates; i++) {
- yy = &xx_state[i];
- if (yy->ss_deleted)
- continue;
- printf(" Subset %d", i);
- if (yy->ss_finish)
- printf(" (finish)");
- printf(" { ");
- for (ibit = 0; ibit < nbits; ibit++) {
- if (testbit(yy->ss_ss, ibit))
- printf("%d ", ibit);
- }
- printf("}\n");
- for (iarc = 0; iarc < yy->ss_narcs; iarc++) {
- zz = &yy->ss_arc[iarc];
- printf(" Arc to state %d, label %s\n",
- zz->sa_arrow,
- PyGrammar_LabelRepr(
- &ll->ll_label[zz->sa_label]));
- }
- }
-}
-
-
-/* PART THREE -- SIMPLIFY DFA */
-
-/* Simplify the DFA by repeatedly eliminating states that are
- equivalent to another oner. This is NOT Algorithm 3.3 from
- [Aho&Ullman 77]. It does not always finds the minimal DFA,
- but it does usually make a much smaller one... (For an example
- of sub-optimal behavior, try S: x a b+ | y a b+.)
-*/
-
-static int
-samestate(ss_state *s1, ss_state *s2)
-{
- int i;
-
- if (s1->ss_narcs != s2->ss_narcs || s1->ss_finish != s2->ss_finish)
- return 0;
- for (i = 0; i < s1->ss_narcs; i++) {
- if (s1->ss_arc[i].sa_arrow != s2->ss_arc[i].sa_arrow ||
- s1->ss_arc[i].sa_label != s2->ss_arc[i].sa_label)
- return 0;
- }
- return 1;
-}
-
-static void
-renamestates(int xx_nstates, ss_state *xx_state, int from, int to)
-{
- int i, j;
-
- if (Py_DebugFlag)
- printf("Rename state %d to %d.\n", from, to);
- for (i = 0; i < xx_nstates; i++) {
- if (xx_state[i].ss_deleted)
- continue;
- for (j = 0; j < xx_state[i].ss_narcs; j++) {
- if (xx_state[i].ss_arc[j].sa_arrow == from)
- xx_state[i].ss_arc[j].sa_arrow = to;
- }
- }
-}
-
-static void
-simplify(int xx_nstates, ss_state *xx_state)
-{
- int changes;
- int i, j;
-
- do {
- changes = 0;
- for (i = 1; i < xx_nstates; i++) {
- if (xx_state[i].ss_deleted)
- continue;
- for (j = 0; j < i; j++) {
- if (xx_state[j].ss_deleted)
- continue;
- if (samestate(&xx_state[i], &xx_state[j])) {
- xx_state[i].ss_deleted++;
- renamestates(xx_nstates, xx_state,
- i, j);
- changes++;
- break;
- }
- }
- }
- } while (changes);
-}
-
-
-/* PART FOUR -- GENERATE PARSING TABLES */
-
-/* Convert the DFA into a grammar that can be used by our parser */
-
-static void
-convert(dfa *d, int xx_nstates, ss_state *xx_state)
-{
- int i, j;
- ss_state *yy;
- ss_arc *zz;
-
- for (i = 0; i < xx_nstates; i++) {
- yy = &xx_state[i];
- if (yy->ss_deleted)
- continue;
- yy->ss_rename = addstate(d);
- }
-
- for (i = 0; i < xx_nstates; i++) {
- yy = &xx_state[i];
- if (yy->ss_deleted)
- continue;
- for (j = 0; j < yy->ss_narcs; j++) {
- zz = &yy->ss_arc[j];
- addarc(d, yy->ss_rename,
- xx_state[zz->sa_arrow].ss_rename,
- zz->sa_label);
- }
- if (yy->ss_finish)
- addarc(d, yy->ss_rename, yy->ss_rename, 0);
- }
-
- d->d_initial = 0;
-}
-
-
-/* PART FIVE -- GLUE IT ALL TOGETHER */
-
-static grammar *
-maketables(nfagrammar *gr)
-{
- int i;
- nfa *nf;
- dfa *d;
- grammar *g;
-
- if (gr->gr_nnfas == 0)
- return NULL;
- g = newgrammar(gr->gr_nfa[0]->nf_type);
- /* XXX first rule must be start rule */
- g->g_ll = gr->gr_ll;
-
- for (i = 0; i < gr->gr_nnfas; i++) {
- nf = gr->gr_nfa[i];
- if (Py_DebugFlag) {
- printf("Dump of NFA for '%s' ...\n", nf->nf_name);
- dumpnfa(&gr->gr_ll, nf);
- printf("Making DFA for '%s' ...\n", nf->nf_name);
- }
- d = adddfa(g, nf->nf_type, nf->nf_name);
- makedfa(gr, gr->gr_nfa[i], d);
- }
-
- return g;
-}
-
-grammar *
-pgen(node *n)
-{
- nfagrammar *gr;
- grammar *g;
-
- gr = metacompile(n);
- g = maketables(gr);
- translatelabels(g);
- addfirstsets(g);
- freenfagrammar(gr);
- return g;
-}
-
-grammar *
-Py_pgen(node *n)
-{
- return pgen(n);
-}
-
-/*
-
-Description
------------
-
-Input is a grammar in extended BNF (using * for repetition, + for
-at-least-once repetition, [] for optional parts, | for alternatives and
-() for grouping). This has already been parsed and turned into a parse
-tree.
-
-Each rule is considered as a regular expression in its own right.
-It is turned into a Non-deterministic Finite Automaton (NFA), which
-is then turned into a Deterministic Finite Automaton (DFA), which is then
-optimized to reduce the number of states. See [Aho&Ullman 77] chapter 3,
-or similar compiler books (this technique is more often used for lexical
-analyzers).
-
-The DFA's are used by the parser as parsing tables in a special way
-that's probably unique. Before they are usable, the FIRST sets of all
-non-terminals are computed.
-
-Reference
----------
-
-[Aho&Ullman 77]
- Aho&Ullman, Principles of Compiler Design, Addison-Wesley 1977
- (first edition)
-
-*/
diff --git a/Parser/pgen/__init__.py b/Parser/pgen/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/Parser/pgen/__init__.py
diff --git a/Parser/pgen/__main__.py b/Parser/pgen/__main__.py
new file mode 100644
index 0000000000..965b08fd23
--- /dev/null
+++ b/Parser/pgen/__main__.py
@@ -0,0 +1,34 @@
+import argparse
+
+from .pgen import ParserGenerator
+
+def main():
+ parser = argparse.ArgumentParser(description="Parser generator main program.")
+ parser.add_argument(
+ "grammar", type=str, help="The file with the grammar definition in EBNF format"
+ )
+ parser.add_argument(
+ "tokens", type=str, help="The file with the token definitions"
+ )
+ parser.add_argument(
+ "graminit_h",
+ type=argparse.FileType('w'),
+ help="The path to write the grammar's non-terminals as #defines",
+ )
+ parser.add_argument(
+ "graminit_c",
+ type=argparse.FileType('w'),
+ help="The path to write the grammar as initialized data",
+ )
+
+ parser.add_argument("--verbose", "-v", action="count")
+ args = parser.parse_args()
+
+ p = ParserGenerator(args.grammar, args.tokens, verbose=args.verbose)
+ grammar = p.make_grammar()
+ grammar.produce_graminit_h(args.graminit_h.write)
+ grammar.produce_graminit_c(args.graminit_c.write)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/Parser/pgen/grammar.py b/Parser/pgen/grammar.py
new file mode 100644
index 0000000000..bd405e674e
--- /dev/null
+++ b/Parser/pgen/grammar.py
@@ -0,0 +1,160 @@
+import collections
+
+class Grammar:
+ """Pgen parsing tables conversion class.
+
+ Once initialized, this class supplies the grammar tables for the
+ parsing engine implemented by parse.py. The parsing engine
+ accesses the instance variables directly. The class here does not
+ provide initialization of the tables; several subclasses exist to
+ do this (see the conv and pgen modules).
+
+ The load() method reads the tables from a pickle file, which is
+ much faster than the other ways offered by subclasses. The pickle
+ file is written by calling dump() (after loading the grammar
+ tables using a subclass). The report() method prints a readable
+ representation of the tables to stdout, for debugging.
+
+ The instance variables are as follows:
+
+ symbol2number -- a dict mapping symbol names to numbers. Symbol
+ numbers are always 256 or higher, to distinguish
+ them from token numbers, which are between 0 and
+ 255 (inclusive).
+
+ number2symbol -- a dict mapping numbers to symbol names;
+ these two are each other's inverse.
+
+ states -- a list of DFAs, where each DFA is a list of
+ states, each state is a list of arcs, and each
+ arc is a (i, j) pair where i is a label and j is
+ a state number. The DFA number is the index into
+ this list. (This name is slightly confusing.)
+ Final states are represented by a special arc of
+ the form (0, j) where j is its own state number.
+
+ dfas -- a dict mapping symbol numbers to (DFA, first)
+ pairs, where DFA is an item from the states list
+ above, and first is a set of tokens that can
+ begin this grammar rule (represented by a dict
+ whose values are always 1).
+
+ labels -- a list of (x, y) pairs where x is either a token
+ number or a symbol number, and y is either None
+ or a string; the strings are keywords. The label
+ number is the index in this list; label numbers
+ are used to mark state transitions (arcs) in the
+ DFAs.
+
+ start -- the number of the grammar's start symbol.
+
+ keywords -- a dict mapping keyword strings to arc labels.
+
+ tokens -- a dict mapping token numbers to arc labels.
+
+ """
+
+ def __init__(self):
+ self.symbol2number = collections.OrderedDict()
+ self.number2symbol = collections.OrderedDict()
+ self.states = []
+ self.dfas = collections.OrderedDict()
+ self.labels = [(0, "EMPTY")]
+ self.keywords = collections.OrderedDict()
+ self.tokens = collections.OrderedDict()
+ self.symbol2label = collections.OrderedDict()
+ self.start = 256
+
+ def produce_graminit_h(self, writer):
+ writer("/* Generated by Parser/pgen */\n\n")
+ for number, symbol in self.number2symbol.items():
+ writer("#define {} {}\n".format(symbol, number))
+
+ def produce_graminit_c(self, writer):
+ writer("/* Generated by Parser/pgen */\n\n")
+
+ writer('#include "pgenheaders.h"\n')
+ writer('#include "grammar.h"\n')
+ writer("grammar _PyParser_Grammar;\n")
+
+ self.print_dfas(writer)
+ self.print_labels(writer)
+
+ writer("grammar _PyParser_Grammar = {\n")
+ writer(" {n_dfas},\n".format(n_dfas=len(self.dfas)))
+ writer(" dfas,\n")
+ writer(" {{{n_labels}, labels}},\n".format(n_labels=len(self.labels)))
+ writer(" {start_number}\n".format(start_number=self.start))
+ writer("};\n")
+
+ def print_labels(self, writer):
+ writer(
+ "static label labels[{n_labels}] = {{\n".format(n_labels=len(self.labels))
+ )
+ for label, name in self.labels:
+ if name is None:
+ writer(" {{{label}, 0}},\n".format(label=label))
+ else:
+ writer(
+ ' {{{label}, "{label_name}"}},\n'.format(
+ label=label, label_name=name
+ )
+ )
+ writer("};\n")
+
+ def print_dfas(self, writer):
+ self.print_states(writer)
+ writer("static dfa dfas[{}] = {{\n".format(len(self.dfas)))
+ for dfaindex, dfa_elem in enumerate(self.dfas.items()):
+ symbol, (dfa, first_sets) = dfa_elem
+ writer(
+ ' {{{dfa_symbol}, "{symbol_name}", '.format(
+ dfa_symbol=symbol, symbol_name=self.number2symbol[symbol]
+ )
+ + "0, {n_states}, states_{dfa_index},\n".format(
+ n_states=len(dfa), dfa_index=dfaindex
+ )
+ )
+ writer(' "')
+
+ k = [name for label, name in self.labels if label in first_sets]
+ bitset = bytearray((len(self.labels) >> 3) + 1)
+ for token in first_sets:
+ bitset[token >> 3] |= 1 << (token & 7)
+ for byte in bitset:
+ writer("\\%03o" % (byte & 0xFF))
+ writer('"},\n')
+ writer("};\n")
+
+ def print_states(self, write):
+ for dfaindex, dfa in enumerate(self.states):
+ self.print_arcs(write, dfaindex, dfa)
+ write(
+ "static state states_{dfa_index}[{n_states}] = {{\n".format(
+ dfa_index=dfaindex, n_states=len(dfa)
+ )
+ )
+ for stateindex, state in enumerate(dfa):
+ narcs = len(state)
+ write(
+ " {{{n_arcs}, arcs_{dfa_index}_{state_index}}},\n".format(
+ n_arcs=narcs, dfa_index=dfaindex, state_index=stateindex
+ )
+ )
+ write("};\n")
+
+ def print_arcs(self, write, dfaindex, states):
+ for stateindex, state in enumerate(states):
+ narcs = len(state)
+ write(
+ "static arc arcs_{dfa_index}_{state_index}[{n_arcs}] = {{\n".format(
+ dfa_index=dfaindex, state_index=stateindex, n_arcs=narcs
+ )
+ )
+ for a, b in state:
+ write(
+ " {{{from_label}, {to_state}}},\n".format(
+ from_label=a, to_state=b
+ )
+ )
+ write("};\n")
diff --git a/Parser/pgen/pgen.py b/Parser/pgen/pgen.py
new file mode 100644
index 0000000000..c878919f2a
--- /dev/null
+++ b/Parser/pgen/pgen.py
@@ -0,0 +1,408 @@
+import collections
+import tokenize # from stdlib
+
+from . import grammar, token
+
+class ParserGenerator(object):
+
+ def __init__(self, grammar_file, token_file, stream=None, verbose=False):
+ close_stream = None
+ if stream is None:
+ stream = open(grammar_file)
+ close_stream = stream.close
+ with open(token_file) as tok_file:
+ token_lines = tok_file.readlines()
+ self.tokens = dict(token.generate_tokens(token_lines))
+ self.opmap = dict(token.generate_opmap(token_lines))
+ # Manually add <> so it does not collide with !=
+ self.opmap['<>'] = "NOTEQUAL"
+ self.verbose = verbose
+ self.filename = grammar_file
+ self.stream = stream
+ self.generator = tokenize.generate_tokens(stream.readline)
+ self.gettoken() # Initialize lookahead
+ self.dfas, self.startsymbol = self.parse()
+ if close_stream is not None:
+ close_stream()
+ self.first = {} # map from symbol name to set of tokens
+ self.addfirstsets()
+
+ def make_grammar(self):
+ c = grammar.Grammar()
+ names = list(self.dfas.keys())
+ names.remove(self.startsymbol)
+ names.insert(0, self.startsymbol)
+ for name in names:
+ i = 256 + len(c.symbol2number)
+ c.symbol2number[name] = i
+ c.number2symbol[i] = name
+ for name in names:
+ self.make_label(c, name)
+ dfa = self.dfas[name]
+ states = []
+ for state in dfa:
+ arcs = []
+ for label, next in sorted(state.arcs.items()):
+ arcs.append((self.make_label(c, label), dfa.index(next)))
+ if state.isfinal:
+ arcs.append((0, dfa.index(state)))
+ states.append(arcs)
+ c.states.append(states)
+ c.dfas[c.symbol2number[name]] = (states, self.make_first(c, name))
+ c.start = c.symbol2number[self.startsymbol]
+
+ if self.verbose:
+ print("")
+ print("Grammar summary")
+ print("===============")
+
+ print("- {n_labels} labels".format(n_labels=len(c.labels)))
+ print("- {n_dfas} dfas".format(n_dfas=len(c.dfas)))
+ print("- {n_tokens} tokens".format(n_tokens=len(c.tokens)))
+ print("- {n_keywords} keywords".format(n_keywords=len(c.keywords)))
+ print(
+ "- Start symbol: {start_symbol}".format(
+ start_symbol=c.number2symbol[c.start]
+ )
+ )
+ return c
+
+ def make_first(self, c, name):
+ rawfirst = self.first[name]
+ first = set()
+ for label in sorted(rawfirst):
+ ilabel = self.make_label(c, label)
+ ##assert ilabel not in first # XXX failed on <> ... !=
+ first.add(ilabel)
+ return first
+
+ def make_label(self, c, label):
+ # XXX Maybe this should be a method on a subclass of converter?
+ ilabel = len(c.labels)
+ if label[0].isalpha():
+ # Either a symbol name or a named token
+ if label in c.symbol2number:
+ # A symbol name (a non-terminal)
+ if label in c.symbol2label:
+ return c.symbol2label[label]
+ else:
+ c.labels.append((c.symbol2number[label], None))
+ c.symbol2label[label] = ilabel
+ return ilabel
+ else:
+ # A named token (NAME, NUMBER, STRING)
+ itoken = self.tokens.get(label, None)
+ assert isinstance(itoken, int), label
+ assert itoken in self.tokens.values(), label
+ if itoken in c.tokens:
+ return c.tokens[itoken]
+ else:
+ c.labels.append((itoken, None))
+ c.tokens[itoken] = ilabel
+ return ilabel
+ else:
+ # Either a keyword or an operator
+ assert label[0] in ('"', "'"), label
+ value = eval(label)
+ if value[0].isalpha():
+ # A keyword
+ if value in c.keywords:
+ return c.keywords[value]
+ else:
+ c.labels.append((self.tokens["NAME"], value))
+ c.keywords[value] = ilabel
+ return ilabel
+ else:
+ # An operator (any non-numeric token)
+ tok_name = self.opmap[value] # Fails if unknown token
+ itoken = self.tokens[tok_name]
+ if itoken in c.tokens:
+ return c.tokens[itoken]
+ else:
+ c.labels.append((itoken, None))
+ c.tokens[itoken] = ilabel
+ return ilabel
+
+ def addfirstsets(self):
+ names = list(self.dfas.keys())
+ for name in names:
+ if name not in self.first:
+ self.calcfirst(name)
+
+ if self.verbose:
+ print("First set for {dfa_name}".format(dfa_name=name))
+ for item in self.first[name]:
+ print(" - {terminal}".format(terminal=item))
+
+ def calcfirst(self, name):
+ dfa = self.dfas[name]
+ self.first[name] = None # dummy to detect left recursion
+ state = dfa[0]
+ totalset = set()
+ overlapcheck = {}
+ for label, next in state.arcs.items():
+ if label in self.dfas:
+ if label in self.first:
+ fset = self.first[label]
+ if fset is None:
+ raise ValueError("recursion for rule %r" % name)
+ else:
+ self.calcfirst(label)
+ fset = self.first[label]
+ totalset.update(fset)
+ overlapcheck[label] = fset
+ else:
+ totalset.add(label)
+ overlapcheck[label] = {label}
+ inverse = {}
+ for label, itsfirst in overlapcheck.items():
+ for symbol in itsfirst:
+ if symbol in inverse:
+ raise ValueError("rule %s is ambiguous; %s is in the"
+ " first sets of %s as well as %s" %
+ (name, symbol, label, inverse[symbol]))
+ inverse[symbol] = label
+ self.first[name] = totalset
+
+ def parse(self):
+ dfas = collections.OrderedDict()
+ startsymbol = None
+ # MSTART: (NEWLINE | RULE)* ENDMARKER
+ while self.type != tokenize.ENDMARKER:
+ while self.type == tokenize.NEWLINE:
+ self.gettoken()
+ # RULE: NAME ':' RHS NEWLINE
+ name = self.expect(tokenize.NAME)
+ if self.verbose:
+ print("Processing rule {dfa_name}".format(dfa_name=name))
+ self.expect(tokenize.OP, ":")
+ a, z = self.parse_rhs()
+ self.expect(tokenize.NEWLINE)
+ if self.verbose:
+ self.dump_nfa(name, a, z)
+ dfa = self.make_dfa(a, z)
+ if self.verbose:
+ self.dump_dfa(name, dfa)
+ oldlen = len(dfa)
+ self.simplify_dfa(dfa)
+ newlen = len(dfa)
+ dfas[name] = dfa
+ #print name, oldlen, newlen
+ if startsymbol is None:
+ startsymbol = name
+ return dfas, startsymbol
+
+ def make_dfa(self, start, finish):
+ # To turn an NFA into a DFA, we define the states of the DFA
+ # to correspond to *sets* of states of the NFA. Then do some
+ # state reduction. Let's represent sets as dicts with 1 for
+ # values.
+ assert isinstance(start, NFAState)
+ assert isinstance(finish, NFAState)
+ def closure(state):
+ base = set()
+ addclosure(state, base)
+ return base
+ def addclosure(state, base):
+ assert isinstance(state, NFAState)
+ if state in base:
+ return
+ base.add(state)
+ for label, next in state.arcs:
+ if label is None:
+ addclosure(next, base)
+ states = [DFAState(closure(start), finish)]
+ for state in states: # NB states grows while we're iterating
+ arcs = {}
+ for nfastate in state.nfaset:
+ for label, next in nfastate.arcs:
+ if label is not None:
+ addclosure(next, arcs.setdefault(label, set()))
+ for label, nfaset in sorted(arcs.items()):
+ for st in states:
+ if st.nfaset == nfaset:
+ break
+ else:
+ st = DFAState(nfaset, finish)
+ states.append(st)
+ state.addarc(st, label)
+ return states # List of DFAState instances; first one is start
+
+ def dump_nfa(self, name, start, finish):
+ print("Dump of NFA for", name)
+ todo = [start]
+ for i, state in enumerate(todo):
+ print(" State", i, state is finish and "(final)" or "")
+ for label, next in state.arcs:
+ if next in todo:
+ j = todo.index(next)
+ else:
+ j = len(todo)
+ todo.append(next)
+ if label is None:
+ print(" -> %d" % j)
+ else:
+ print(" %s -> %d" % (label, j))
+
+ def dump_dfa(self, name, dfa):
+ print("Dump of DFA for", name)
+ for i, state in enumerate(dfa):
+ print(" State", i, state.isfinal and "(final)" or "")
+ for label, next in sorted(state.arcs.items()):
+ print(" %s -> %d" % (label, dfa.index(next)))
+
+ def simplify_dfa(self, dfa):
+ # This is not theoretically optimal, but works well enough.
+ # Algorithm: repeatedly look for two states that have the same
+ # set of arcs (same labels pointing to the same nodes) and
+ # unify them, until things stop changing.
+
+ # dfa is a list of DFAState instances
+ changes = True
+ while changes:
+ changes = False
+ for i, state_i in enumerate(dfa):
+ for j in range(i+1, len(dfa)):
+ state_j = dfa[j]
+ if state_i == state_j:
+ #print " unify", i, j
+ del dfa[j]
+ for state in dfa:
+ state.unifystate(state_j, state_i)
+ changes = True
+ break
+
+ def parse_rhs(self):
+ # RHS: ALT ('|' ALT)*
+ a, z = self.parse_alt()
+ if self.value != "|":
+ return a, z
+ else:
+ aa = NFAState()
+ zz = NFAState()
+ aa.addarc(a)
+ z.addarc(zz)
+ while self.value == "|":
+ self.gettoken()
+ a, z = self.parse_alt()
+ aa.addarc(a)
+ z.addarc(zz)
+ return aa, zz
+
+ def parse_alt(self):
+ # ALT: ITEM+
+ a, b = self.parse_item()
+ while (self.value in ("(", "[") or
+ self.type in (tokenize.NAME, tokenize.STRING)):
+ c, d = self.parse_item()
+ b.addarc(c)
+ b = d
+ return a, b
+
+ def parse_item(self):
+ # ITEM: '[' RHS ']' | ATOM ['+' | '*']
+ if self.value == "[":
+ self.gettoken()
+ a, z = self.parse_rhs()
+ self.expect(tokenize.OP, "]")
+ a.addarc(z)
+ return a, z
+ else:
+ a, z = self.parse_atom()
+ value = self.value
+ if value not in ("+", "*"):
+ return a, z
+ self.gettoken()
+ z.addarc(a)
+ if value == "+":
+ return a, z
+ else:
+ return a, a
+
+ def parse_atom(self):
+ # ATOM: '(' RHS ')' | NAME | STRING
+ if self.value == "(":
+ self.gettoken()
+ a, z = self.parse_rhs()
+ self.expect(tokenize.OP, ")")
+ return a, z
+ elif self.type in (tokenize.NAME, tokenize.STRING):
+ a = NFAState()
+ z = NFAState()
+ a.addarc(z, self.value)
+ self.gettoken()
+ return a, z
+ else:
+ self.raise_error("expected (...) or NAME or STRING, got %s/%s",
+ self.type, self.value)
+
+ def expect(self, type, value=None):
+ if self.type != type or (value is not None and self.value != value):
+ self.raise_error("expected %s/%s, got %s/%s",
+ type, value, self.type, self.value)
+ value = self.value
+ self.gettoken()
+ return value
+
+ def gettoken(self):
+ tup = next(self.generator)
+ while tup[0] in (tokenize.COMMENT, tokenize.NL):
+ tup = next(self.generator)
+ self.type, self.value, self.begin, self.end, self.line = tup
+ # print(getattr(tokenize, 'tok_name')[self.type], repr(self.value))
+
+ def raise_error(self, msg, *args):
+ if args:
+ try:
+ msg = msg % args
+ except:
+ msg = " ".join([msg] + list(map(str, args)))
+ raise SyntaxError(msg, (self.filename, self.end[0],
+ self.end[1], self.line))
+
+class NFAState(object):
+
+ def __init__(self):
+ self.arcs = [] # list of (label, NFAState) pairs
+
+ def addarc(self, next, label=None):
+ assert label is None or isinstance(label, str)
+ assert isinstance(next, NFAState)
+ self.arcs.append((label, next))
+
+class DFAState(object):
+
+ def __init__(self, nfaset, final):
+ assert isinstance(nfaset, set)
+ assert isinstance(next(iter(nfaset)), NFAState)
+ assert isinstance(final, NFAState)
+ self.nfaset = nfaset
+ self.isfinal = final in nfaset
+ self.arcs = {} # map from label to DFAState
+
+ def addarc(self, next, label):
+ assert isinstance(label, str)
+ assert label not in self.arcs
+ assert isinstance(next, DFAState)
+ self.arcs[label] = next
+
+ def unifystate(self, old, new):
+ for label, next in self.arcs.items():
+ if next is old:
+ self.arcs[label] = new
+
+ def __eq__(self, other):
+ # Equality test -- ignore the nfaset instance variable
+ assert isinstance(other, DFAState)
+ if self.isfinal != other.isfinal:
+ return False
+ # Can't just return self.arcs == other.arcs, because that
+ # would invoke this method recursively, with cycles...
+ if len(self.arcs) != len(other.arcs):
+ return False
+ for label, next in self.arcs.items():
+ if next is not other.arcs.get(label):
+ return False
+ return True
+
+ __hash__ = None # For Py3 compatibility.
diff --git a/Parser/pgen/token.py b/Parser/pgen/token.py
new file mode 100644
index 0000000000..f9d45c450c
--- /dev/null
+++ b/Parser/pgen/token.py
@@ -0,0 +1,40 @@
+import itertools
+
+def generate_tokens(tokens):
+ numbers = itertools.count(0)
+ for line in tokens:
+ line = line.strip()
+
+ if not line:
+ continue
+ if line.strip().startswith('#'):
+ continue
+
+ name = line.split()[0]
+ yield (name, next(numbers))
+
+ yield ('N_TOKENS', next(numbers))
+ yield ('NT_OFFSET', 256)
+
+def generate_opmap(tokens):
+ for line in tokens:
+ line = line.strip()
+
+ if not line:
+ continue
+ if line.strip().startswith('#'):
+ continue
+
+ pieces = line.split()
+
+ if len(pieces) != 2:
+ continue
+
+ name, op = pieces
+ yield (op.strip("'"), name)
+
+ # Yield independently <>. This is needed so it does not collide
+ # with the token generation in "generate_tokens" because if this
+ # symbol is included in Grammar/Tokens, it will collide with !=
+ # as it has the same name (NOTEQUAL).
+ yield ('<>', 'NOTEQUAL')
diff --git a/Parser/pgenmain.c b/Parser/pgenmain.c
deleted file mode 100644
index fa6c88253c..0000000000
--- a/Parser/pgenmain.c
+++ /dev/null
@@ -1,188 +0,0 @@
-
-/* Parser generator main program */
-
-/* This expects a filename containing the grammar as argv[1] (UNIX)
- or asks the console for such a file name (THINK C).
- It writes its output on two files in the current directory:
- - "graminit.c" gets the grammar as a bunch of initialized data
- - "graminit.h" gets the grammar's non-terminals as #defines.
- Error messages and status info during the generation process are
- written to stdout, or sometimes to stderr. */
-
-/* XXX TO DO:
- - check for duplicate definitions of names (instead of fatal err)
-*/
-
-#define PGEN
-
-#include "Python.h"
-#include "pycore_pymem.h"
-#include "pycore_pystate.h"
-#include "pgenheaders.h"
-#include "grammar.h"
-#include "node.h"
-#include "parsetok.h"
-#include "pgen.h"
-
-int Py_DebugFlag = 0;
-int Py_VerboseFlag = 0;
-int Py_IgnoreEnvironmentFlag = 0;
-_PyRuntimeState _PyRuntime = _PyRuntimeState_INIT;
-
-/* Forward */
-grammar *getgrammar(const char *filename);
-
-void
-Py_Exit(int sts)
-{
- exit(sts);
-}
-
-/* Needed by obmalloc.c */
-int PyGILState_Check(void)
-{ return 1; }
-
-void _PyMem_DumpTraceback(int fd, const void *ptr)
-{}
-
-int
-main(int argc, char **argv)
-{
- grammar *g;
- FILE *fp;
- char *filename, *graminit_h, *graminit_c;
-
- if (argc != 4) {
- fprintf(stderr,
- "usage: %s grammar graminit.h graminit.c\n", argv[0]);
- Py_Exit(2);
- }
- filename = argv[1];
- graminit_h = argv[2];
- graminit_c = argv[3];
- g = getgrammar(filename);
- fp = fopen(graminit_c, "w");
- if (fp == NULL) {
- perror(graminit_c);
- Py_Exit(1);
- }
- if (Py_DebugFlag)
- printf("Writing %s ...\n", graminit_c);
- printgrammar(g, fp);
- fclose(fp);
- fp = fopen(graminit_h, "w");
- if (fp == NULL) {
- perror(graminit_h);
- Py_Exit(1);
- }
- if (Py_DebugFlag)
- printf("Writing %s ...\n", graminit_h);
- printnonterminals(g, fp);
- fclose(fp);
- freegrammar(g);
- Py_Exit(0);
- return 0; /* Make gcc -Wall happy */
-}
-
-grammar *
-getgrammar(const char *filename)
-{
- FILE *fp;
- node *n;
- grammar *g0, *g;
- perrdetail err;
-
- fp = fopen(filename, "r");
- if (fp == NULL) {
- perror(filename);
- Py_Exit(1);
- }
- g0 = meta_grammar();
- n = PyParser_ParseFile(fp, filename, g0, g0->g_start,
- (char *)NULL, (char *)NULL, &err);
- fclose(fp);
- if (n == NULL) {
- fprintf(stderr, "Parsing error %d, line %d.\n",
- err.error, err.lineno);
- if (err.text != NULL) {
- size_t len;
- int i;
- fprintf(stderr, "%s", err.text);
- len = strlen(err.text);
- if (len == 0 || err.text[len-1] != '\n')
- fprintf(stderr, "\n");
- for (i = 0; i < err.offset; i++) {
- if (err.text[i] == '\t')
- putc('\t', stderr);
- else
- putc(' ', stderr);
- }
- fprintf(stderr, "^\n");
- PyObject_FREE(err.text);
- }
- Py_Exit(1);
- }
- g = pgen(n);
- PyNode_Free(n);
- if (g == NULL) {
- printf("Bad grammar.\n");
- Py_Exit(1);
- }
- return g;
-}
-
-/* Can't happen in pgen */
-PyObject*
-PyErr_Occurred()
-{
- return 0;
-}
-
-void
-Py_FatalError(const char *msg)
-{
- fprintf(stderr, "pgen: FATAL ERROR: %s\n", msg);
- Py_Exit(1);
-}
-
-/* No-nonsense my_readline() for tokenizer.c */
-
-char *
-PyOS_Readline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt)
-{
- size_t n = 1000;
- char *p = (char *)PyMem_MALLOC(n);
- char *q;
- if (p == NULL)
- return NULL;
- fprintf(stderr, "%s", prompt);
- q = fgets(p, n, sys_stdin);
- if (q == NULL) {
- *p = '\0';
- return p;
- }
- n = strlen(p);
- if (n > 0 && p[n-1] != '\n')
- p[n-1] = '\n';
- return (char *)PyMem_REALLOC(p, n+1);
-}
-
-/* No-nonsense fgets */
-char *
-Py_UniversalNewlineFgets(char *buf, int n, FILE *stream, PyObject *fobj)
-{
- return fgets(buf, n, stream);
-}
-
-
-#include <stdarg.h>
-
-void
-PySys_WriteStderr(const char *format, ...)
-{
- va_list va;
-
- va_start(va, format);
- vfprintf(stderr, format, va);
- va_end(va);
-}
diff --git a/Parser/printgrammar.c b/Parser/printgrammar.c
deleted file mode 100644
index 1a8b0e176f..0000000000
--- a/Parser/printgrammar.c
+++ /dev/null
@@ -1,120 +0,0 @@
-
-/* Print a bunch of C initializers that represent a grammar */
-
-#define PGEN
-
-#include "pgenheaders.h"
-#include "grammar.h"
-
-/* Forward */
-static void printarcs(int, dfa *, FILE *);
-static void printstates(grammar *, FILE *);
-static void printdfas(grammar *, FILE *);
-static void printlabels(grammar *, FILE *);
-
-void
-printgrammar(grammar *g, FILE *fp)
-{
- fprintf(fp, "/* Generated by Parser/pgen */\n\n");
- fprintf(fp, "#include \"pgenheaders.h\"\n");
- fprintf(fp, "#include \"grammar.h\"\n");
- fprintf(fp, "grammar _PyParser_Grammar;\n");
- printdfas(g, fp);
- printlabels(g, fp);
- fprintf(fp, "grammar _PyParser_Grammar = {\n");
- fprintf(fp, " %d,\n", g->g_ndfas);
- fprintf(fp, " dfas,\n");
- fprintf(fp, " {%d, labels},\n", g->g_ll.ll_nlabels);
- fprintf(fp, " %d\n", g->g_start);
- fprintf(fp, "};\n");
-}
-
-void
-printnonterminals(grammar *g, FILE *fp)
-{
- dfa *d;
- int i;
-
- fprintf(fp, "/* Generated by Parser/pgen */\n\n");
-
- d = g->g_dfa;
- for (i = g->g_ndfas; --i >= 0; d++)
- fprintf(fp, "#define %s %d\n", d->d_name, d->d_type);
-}
-
-static void
-printarcs(int i, dfa *d, FILE *fp)
-{
- arc *a;
- state *s;
- int j, k;
-
- s = d->d_state;
- for (j = 0; j < d->d_nstates; j++, s++) {
- fprintf(fp, "static arc arcs_%d_%d[%d] = {\n",
- i, j, s->s_narcs);
- a = s->s_arc;
- for (k = 0; k < s->s_narcs; k++, a++)
- fprintf(fp, " {%d, %d},\n", a->a_lbl, a->a_arrow);
- fprintf(fp, "};\n");
- }
-}
-
-static void
-printstates(grammar *g, FILE *fp)
-{
- state *s;
- dfa *d;
- int i, j;
-
- d = g->g_dfa;
- for (i = 0; i < g->g_ndfas; i++, d++) {
- printarcs(i, d, fp);
- fprintf(fp, "static state states_%d[%d] = {\n",
- i, d->d_nstates);
- s = d->d_state;
- for (j = 0; j < d->d_nstates; j++, s++)
- fprintf(fp, " {%d, arcs_%d_%d},\n",
- s->s_narcs, i, j);
- fprintf(fp, "};\n");
- }
-}
-
-static void
-printdfas(grammar *g, FILE *fp)
-{
- dfa *d;
- int i, j, n;
-
- printstates(g, fp);
- fprintf(fp, "static dfa dfas[%d] = {\n", g->g_ndfas);
- d = g->g_dfa;
- for (i = 0; i < g->g_ndfas; i++, d++) {
- fprintf(fp, " {%d, \"%s\", %d, %d, states_%d,\n",
- d->d_type, d->d_name, d->d_initial, d->d_nstates, i);
- fprintf(fp, " \"");
- n = NBYTES(g->g_ll.ll_nlabels);
- for (j = 0; j < n; j++)
- fprintf(fp, "\\%03o", d->d_first[j] & 0xff);
- fprintf(fp, "\"},\n");
- }
- fprintf(fp, "};\n");
-}
-
-static void
-printlabels(grammar *g, FILE *fp)
-{
- label *l;
- int i;
-
- fprintf(fp, "static label labels[%d] = {\n", g->g_ll.ll_nlabels);
- l = g->g_ll.ll_label;
- for (i = g->g_ll.ll_nlabels; --i >= 0; l++) {
- if (l->lb_str == NULL)
- fprintf(fp, " {%d, 0},\n", l->lb_type);
- else
- fprintf(fp, " {%d, \"%s\"},\n",
- l->lb_type, l->lb_str);
- }
- fprintf(fp, "};\n");
-}
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c
index 1ded9ade37..44ec41512c 100644
--- a/Parser/tokenizer.c
+++ b/Parser/tokenizer.c
@@ -10,13 +10,11 @@
#include "tokenizer.h"
#include "errcode.h"
-#ifndef PGEN
#include "unicodeobject.h"
#include "bytesobject.h"
#include "fileobject.h"
#include "codecs.h"
#include "abstract.h"
-#endif /* PGEN */
/* Alternate tab spacing */
#define ALTTABSIZE 1
@@ -81,11 +79,9 @@ tok_new(void)
tok->enc = NULL;
tok->encoding = NULL;
tok->cont_line = 0;
-#ifndef PGEN
tok->filename = NULL;
tok->decoding_readline = NULL;
tok->decoding_buffer = NULL;
-#endif
tok->type_comments = 0;
return tok;
@@ -104,28 +100,6 @@ new_string(const char *s, Py_ssize_t len, struct tok_state *tok)
return result;
}
-#ifdef PGEN
-
-static char *
-decoding_fgets(char *s, int size, struct tok_state *tok)
-{
- return fgets(s, size, tok->fp);
-}
-
-static int
-decoding_feof(struct tok_state *tok)
-{
- return feof(tok->fp);
-}
-
-static char *
-decode_str(const char *str, int exec_input, struct tok_state *tok)
-{
- return new_string(str, strlen(str), tok);
-}
-
-#else /* PGEN */
-
static char *
error_ret(struct tok_state *tok) /* XXX */
{
@@ -551,7 +525,6 @@ decoding_fgets(char *s, int size, struct tok_state *tok)
return error_ret(tok);
}
}
-#ifndef PGEN
/* The default encoding is UTF-8, so make sure we don't have any
non-UTF-8 sequences in it. */
if (line && !tok->encoding) {
@@ -574,7 +547,6 @@ decoding_fgets(char *s, int size, struct tok_state *tok)
badchar, tok->filename, tok->lineno + 1);
return error_ret(tok);
}
-#endif
return line;
}
@@ -738,8 +710,6 @@ decode_str(const char *input, int single, struct tok_state *tok)
return str;
}
-#endif /* PGEN */
-
/* Set up tokenizer for string */
struct tok_state *
@@ -765,9 +735,7 @@ PyTokenizer_FromUTF8(const char *str, int exec_input)
struct tok_state *tok = tok_new();
if (tok == NULL)
return NULL;
-#ifndef PGEN
tok->input = str = translate_newlines(str, exec_input, tok);
-#endif
if (str == NULL) {
PyTokenizer_Free(tok);
return NULL;
@@ -828,11 +796,9 @@ PyTokenizer_Free(struct tok_state *tok)
{
if (tok->encoding != NULL)
PyMem_FREE(tok->encoding);
-#ifndef PGEN
Py_XDECREF(tok->decoding_readline);
Py_XDECREF(tok->decoding_buffer);
Py_XDECREF(tok->filename);
-#endif
if (tok->fp != NULL && tok->buf != NULL)
PyMem_FREE(tok->buf);
if (tok->input)
@@ -871,7 +837,6 @@ tok_nextc(struct tok_state *tok)
}
if (tok->prompt != NULL) {
char *newtok = PyOS_Readline(stdin, stdout, tok->prompt);
-#ifndef PGEN
if (newtok != NULL) {
char *translated = translate_newlines(newtok, 0, tok);
PyMem_FREE(newtok);
@@ -900,7 +865,6 @@ tok_nextc(struct tok_state *tok)
strcpy(newtok, buf);
Py_DECREF(u);
}
-#endif
if (tok->nextprompt != NULL)
tok->prompt = tok->nextprompt;
if (newtok == NULL)
@@ -1056,7 +1020,6 @@ tok_backup(struct tok_state *tok, int c)
static int
syntaxerror(struct tok_state *tok, const char *format, ...)
{
-#ifndef PGEN
va_list vargs;
#ifdef HAVE_STDARG_PROTOTYPES
va_start(vargs, format);
@@ -1069,9 +1032,6 @@ syntaxerror(struct tok_state *tok, const char *format, ...)
tok->lineno,
(int)(tok->cur - tok->line_start));
tok->done = E_ERROR;
-#else
- tok->done = E_TOKEN;
-#endif
return ERRORTOKEN;
}
@@ -1083,9 +1043,6 @@ indenterror(struct tok_state *tok)
return ERRORTOKEN;
}
-#ifdef PGEN
-#define verify_identifier(tok) 1
-#else
/* Verify that the identifier follows PEP 3131.
All identifier strings are guaranteed to be "ready" unicode objects.
*/
@@ -1112,7 +1069,6 @@ verify_identifier(struct tok_state *tok)
tok->done = E_IDENTIFIER;
return result;
}
-#endif
static int
tok_decimal_tail(struct tok_state *tok)
@@ -1667,25 +1623,20 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
case '(':
case '[':
case '{':
-#ifndef PGEN
if (tok->level >= MAXLEVEL) {
return syntaxerror(tok, "too many nested parentheses");
}
tok->parenstack[tok->level] = c;
tok->parenlinenostack[tok->level] = tok->lineno;
-#endif
tok->level++;
break;
case ')':
case ']':
case '}':
-#ifndef PGEN
if (!tok->level) {
return syntaxerror(tok, "unmatched '%c'", c);
}
-#endif
tok->level--;
-#ifndef PGEN
int opening = tok->parenstack[tok->level];
if (!((opening == '(' && c == ')') ||
(opening == '[' && c == ']') ||
@@ -1704,7 +1655,6 @@ tok_get(struct tok_state *tok, char **p_start, char **p_end)
c, opening);
}
}
-#endif
break;
}
@@ -1742,11 +1692,7 @@ PyTokenizer_FindEncodingFilename(int fd, PyObject *filename)
FILE *fp;
char *p_start =NULL , *p_end =NULL , *encoding = NULL;
-#ifndef PGEN
fd = _Py_dup(fd);
-#else
- fd = dup(fd);
-#endif
if (fd < 0) {
return NULL;
}
@@ -1760,7 +1706,6 @@ PyTokenizer_FindEncodingFilename(int fd, PyObject *filename)
fclose(fp);
return NULL;
}
-#ifndef PGEN
if (filename != NULL) {
Py_INCREF(filename);
tok->filename = filename;
@@ -1773,7 +1718,6 @@ PyTokenizer_FindEncodingFilename(int fd, PyObject *filename)
return encoding;
}
}
-#endif
while (tok->lineno < 2 && tok->done == E_OK) {
PyTokenizer_Get(tok, &p_start, &p_end);
}
diff --git a/Parser/tokenizer.h b/Parser/tokenizer.h
index 9639c658b1..22e91f7dca 100644
--- a/Parser/tokenizer.h
+++ b/Parser/tokenizer.h
@@ -42,15 +42,9 @@ struct tok_state {
expression (cf. issue 16806) */
int level; /* () [] {} Parentheses nesting level */
/* Used to allow free continuations inside them */
-#ifndef PGEN
char parenstack[MAXLEVEL];
int parenlinenostack[MAXLEVEL];
- /* pgen doesn't have access to Python codecs, it cannot decode the input
- filename. The bytes filename might be kept, but it is only used by
- indenterror() and it is not really needed: pgen only compiles one file
- (Grammar/Grammar). */
PyObject *filename;
-#endif
/* Stuff for checking on different tab sizes */
int altindstack[MAXINDENT]; /* Stack of alternate indents */
/* Stuff for PEP 0263 */
@@ -63,10 +57,8 @@ struct tok_state {
const char* multi_line_start; /* pointer to start of first line of
a single line or multi line string
expression (cf. issue 16806) */
-#ifndef PGEN
PyObject *decoding_readline; /* open(...).readline */
PyObject *decoding_buffer;
-#endif
const char* enc; /* Encoding for the current str. */
const char* str;
const char* input; /* Tokenizer's newline translated copy of the string. */
diff --git a/Parser/tokenizer_pgen.c b/Parser/tokenizer_pgen.c
deleted file mode 100644
index 9cb8492d6a..0000000000
--- a/Parser/tokenizer_pgen.c
+++ /dev/null
@@ -1,2 +0,0 @@
-#define PGEN
-#include "tokenizer.c"
diff --git a/Python/graminit.c b/Python/graminit.c
index 5cdde2789c..6cb7bbc9f7 100644
--- a/Python/graminit.c
+++ b/Python/graminit.c
@@ -5,8 +5,8 @@
grammar _PyParser_Grammar;
static arc arcs_0_0[3] = {
{2, 1},
- {3, 1},
- {4, 2},
+ {3, 2},
+ {4, 1},
};
static arc arcs_0_1[1] = {
{0, 1},
@@ -20,9 +20,9 @@ static state states_0[3] = {
{1, arcs_0_2},
};
static arc arcs_1_0[3] = {
+ {44, 1},
{2, 0},
- {6, 0},
- {7, 1},
+ {45, 0},
};
static arc arcs_1_1[1] = {
{0, 1},
@@ -32,11 +32,11 @@ static state states_1[2] = {
{1, arcs_1_1},
};
static arc arcs_2_0[1] = {
- {9, 1},
+ {47, 1},
};
static arc arcs_2_1[2] = {
+ {44, 2},
{2, 1},
- {7, 2},
};
static arc arcs_2_2[1] = {
{0, 2},
@@ -47,27 +47,27 @@ static state states_2[3] = {
{1, arcs_2_2},
};
static arc arcs_3_0[1] = {
- {11, 1},
+ {10, 1},
};
static arc arcs_3_1[1] = {
- {12, 2},
+ {49, 2},
};
static arc arcs_3_2[2] = {
- {13, 3},
+ {5, 3},
{2, 4},
};
static arc arcs_3_3[2] = {
- {14, 5},
- {15, 6},
+ {50, 5},
+ {51, 6},
};
static arc arcs_3_4[1] = {
{0, 4},
};
static arc arcs_3_5[1] = {
- {15, 6},
+ {2, 4},
};
static arc arcs_3_6[1] = {
- {2, 4},
+ {50, 5},
};
static state states_3[7] = {
{1, arcs_3_0},
@@ -79,10 +79,10 @@ static state states_3[7] = {
{1, arcs_3_6},
};
static arc arcs_4_0[1] = {
- {10, 1},
+ {48, 1},
};
static arc arcs_4_1[2] = {
- {10, 1},
+ {48, 1},
{0, 1},
};
static state states_4[2] = {
@@ -90,12 +90,12 @@ static state states_4[2] = {
{2, arcs_4_1},
};
static arc arcs_5_0[1] = {
- {16, 1},
+ {52, 1},
};
static arc arcs_5_1[3] = {
- {18, 2},
- {19, 2},
- {20, 2},
+ {54, 2},
+ {55, 2},
+ {56, 2},
};
static arc arcs_5_2[1] = {
{0, 2},
@@ -106,10 +106,10 @@ static state states_5[3] = {
{1, arcs_5_2},
};
static arc arcs_6_0[1] = {
- {21, 1},
+ {16, 1},
};
static arc arcs_6_1[1] = {
- {19, 2},
+ {56, 2},
};
static arc arcs_6_2[1] = {
{0, 2},
@@ -120,30 +120,30 @@ static state states_6[3] = {
{1, arcs_6_2},
};
static arc arcs_7_0[1] = {
- {22, 1},
+ {21, 1},
};
static arc arcs_7_1[1] = {
- {23, 2},
+ {40, 2},
};
static arc arcs_7_2[1] = {
- {24, 3},
+ {57, 3},
};
static arc arcs_7_3[2] = {
- {25, 4},
- {27, 5},
+ {58, 4},
+ {59, 5},
};
static arc arcs_7_4[1] = {
- {26, 6},
+ {60, 6},
};
static arc arcs_7_5[2] = {
- {28, 7},
- {29, 8},
+ {61, 7},
+ {62, 8},
};
static arc arcs_7_6[1] = {
- {27, 5},
+ {59, 5},
};
static arc arcs_7_7[1] = {
- {29, 8},
+ {62, 8},
};
static arc arcs_7_8[1] = {
{0, 8},
@@ -160,17 +160,17 @@ static state states_7[9] = {
{1, arcs_7_8},
};
static arc arcs_8_0[1] = {
- {13, 1},
+ {5, 1},
};
static arc arcs_8_1[2] = {
- {30, 2},
- {15, 3},
+ {50, 2},
+ {63, 3},
};
static arc arcs_8_2[1] = {
- {15, 3},
+ {0, 2},
};
static arc arcs_8_3[1] = {
- {0, 3},
+ {50, 2},
};
static state states_8[4] = {
{1, arcs_8_0},
@@ -179,154 +179,147 @@ static state states_8[4] = {
{1, arcs_8_3},
};
static arc arcs_9_0[3] = {
- {31, 1},
- {34, 2},
- {35, 3},
+ {6, 1},
+ {64, 2},
+ {65, 3},
};
static arc arcs_9_1[4] = {
- {32, 4},
- {33, 5},
- {28, 6},
+ {66, 4},
+ {61, 5},
+ {65, 6},
{0, 1},
};
-static arc arcs_9_2[4] = {
- {31, 7},
- {33, 8},
- {28, 6},
- {0, 2},
+static arc arcs_9_2[1] = {
+ {65, 7},
};
-static arc arcs_9_3[1] = {
- {31, 9},
+static arc arcs_9_3[4] = {
+ {66, 8},
+ {67, 9},
+ {61, 5},
+ {0, 3},
};
-static arc arcs_9_4[1] = {
- {26, 10},
+static arc arcs_9_4[4] = {
+ {64, 2},
+ {61, 10},
+ {65, 11},
+ {0, 4},
};
-static arc arcs_9_5[5] = {
- {28, 11},
- {31, 12},
- {34, 13},
- {35, 3},
+static arc arcs_9_5[1] = {
{0, 5},
};
-static arc arcs_9_6[1] = {
+static arc arcs_9_6[3] = {
+ {66, 4},
+ {61, 5},
{0, 6},
};
static arc arcs_9_7[3] = {
- {33, 8},
- {28, 6},
+ {66, 12},
+ {61, 5},
{0, 7},
};
-static arc arcs_9_8[4] = {
- {28, 14},
- {31, 15},
- {35, 3},
+static arc arcs_9_8[5] = {
+ {6, 13},
+ {64, 2},
+ {61, 14},
+ {65, 3},
{0, 8},
};
-static arc arcs_9_9[3] = {
- {33, 16},
- {28, 6},
- {0, 9},
+static arc arcs_9_9[1] = {
+ {60, 15},
};
static arc arcs_9_10[3] = {
- {33, 5},
- {28, 6},
+ {64, 2},
+ {65, 11},
{0, 10},
};
static arc arcs_9_11[4] = {
- {31, 12},
- {34, 13},
- {35, 3},
+ {66, 4},
+ {67, 16},
+ {61, 5},
{0, 11},
};
-static arc arcs_9_12[4] = {
- {33, 5},
- {32, 4},
- {28, 6},
+static arc arcs_9_12[2] = {
+ {61, 5},
{0, 12},
};
static arc arcs_9_13[4] = {
- {31, 17},
- {33, 18},
- {28, 6},
+ {66, 17},
+ {61, 5},
+ {65, 18},
{0, 13},
};
-static arc arcs_9_14[3] = {
- {31, 15},
- {35, 3},
+static arc arcs_9_14[4] = {
+ {6, 13},
+ {64, 2},
+ {65, 3},
{0, 14},
};
-static arc arcs_9_15[4] = {
- {33, 8},
- {32, 19},
- {28, 6},
+static arc arcs_9_15[3] = {
+ {66, 8},
+ {61, 5},
{0, 15},
};
-static arc arcs_9_16[2] = {
- {28, 6},
- {0, 16},
+static arc arcs_9_16[1] = {
+ {60, 6},
};
-static arc arcs_9_17[3] = {
- {33, 18},
- {28, 6},
+static arc arcs_9_17[4] = {
+ {64, 2},
+ {61, 19},
+ {65, 20},
{0, 17},
};
-static arc arcs_9_18[4] = {
- {28, 20},
- {31, 21},
- {35, 3},
+static arc arcs_9_18[3] = {
+ {66, 17},
+ {61, 5},
{0, 18},
};
-static arc arcs_9_19[1] = {
- {26, 7},
+static arc arcs_9_19[3] = {
+ {64, 2},
+ {65, 20},
+ {0, 19},
};
-static arc arcs_9_20[3] = {
- {31, 21},
- {35, 3},
+static arc arcs_9_20[4] = {
+ {66, 17},
+ {67, 21},
+ {61, 5},
{0, 20},
};
-static arc arcs_9_21[4] = {
- {33, 18},
- {32, 22},
- {28, 6},
- {0, 21},
+static arc arcs_9_21[1] = {
+ {60, 18},
};
-static arc arcs_9_22[1] = {
- {26, 17},
-};
-static state states_9[23] = {
+static state states_9[22] = {
{3, arcs_9_0},
{4, arcs_9_1},
- {4, arcs_9_2},
- {1, arcs_9_3},
- {1, arcs_9_4},
- {5, arcs_9_5},
- {1, arcs_9_6},
+ {1, arcs_9_2},
+ {4, arcs_9_3},
+ {4, arcs_9_4},
+ {1, arcs_9_5},
+ {3, arcs_9_6},
{3, arcs_9_7},
- {4, arcs_9_8},
- {3, arcs_9_9},
+ {5, arcs_9_8},
+ {1, arcs_9_9},
{3, arcs_9_10},
{4, arcs_9_11},
- {4, arcs_9_12},
+ {2, arcs_9_12},
{4, arcs_9_13},
- {3, arcs_9_14},
- {4, arcs_9_15},
- {2, arcs_9_16},
- {3, arcs_9_17},
- {4, arcs_9_18},
- {1, arcs_9_19},
- {3, arcs_9_20},
- {4, arcs_9_21},
- {1, arcs_9_22},
+ {4, arcs_9_14},
+ {3, arcs_9_15},
+ {1, arcs_9_16},
+ {4, arcs_9_17},
+ {3, arcs_9_18},
+ {3, arcs_9_19},
+ {4, arcs_9_20},
+ {1, arcs_9_21},
};
static arc arcs_10_0[1] = {
- {23, 1},
+ {40, 1},
};
static arc arcs_10_1[2] = {
- {27, 2},
+ {59, 2},
{0, 1},
};
static arc arcs_10_2[1] = {
- {26, 3},
+ {60, 3},
};
static arc arcs_10_3[1] = {
{0, 3},
@@ -338,110 +331,104 @@ static state states_10[4] = {
{1, arcs_10_3},
};
static arc arcs_11_0[3] = {
- {37, 1},
- {34, 2},
- {35, 3},
+ {6, 1},
+ {64, 2},
+ {69, 3},
};
static arc arcs_11_1[3] = {
- {32, 4},
- {33, 5},
+ {66, 4},
+ {69, 5},
{0, 1},
};
-static arc arcs_11_2[3] = {
- {37, 6},
- {33, 7},
- {0, 2},
+static arc arcs_11_2[1] = {
+ {69, 6},
};
-static arc arcs_11_3[1] = {
- {37, 8},
+static arc arcs_11_3[3] = {
+ {66, 7},
+ {67, 8},
+ {0, 3},
};
-static arc arcs_11_4[1] = {
- {26, 9},
+static arc arcs_11_4[3] = {
+ {64, 2},
+ {69, 9},
+ {0, 4},
};
-static arc arcs_11_5[4] = {
- {37, 10},
- {34, 11},
- {35, 3},
+static arc arcs_11_5[2] = {
+ {66, 4},
{0, 5},
};
static arc arcs_11_6[2] = {
- {33, 7},
+ {66, 10},
{0, 6},
};
-static arc arcs_11_7[3] = {
- {37, 12},
- {35, 3},
+static arc arcs_11_7[4] = {
+ {6, 11},
+ {64, 2},
+ {69, 3},
{0, 7},
};
-static arc arcs_11_8[2] = {
- {33, 13},
- {0, 8},
+static arc arcs_11_8[1] = {
+ {60, 12},
};
-static arc arcs_11_9[2] = {
- {33, 5},
+static arc arcs_11_9[3] = {
+ {66, 4},
+ {67, 13},
{0, 9},
};
-static arc arcs_11_10[3] = {
- {33, 5},
- {32, 4},
+static arc arcs_11_10[1] = {
{0, 10},
};
static arc arcs_11_11[3] = {
- {37, 14},
- {33, 15},
+ {66, 14},
+ {69, 15},
{0, 11},
};
-static arc arcs_11_12[3] = {
- {33, 7},
- {32, 16},
+static arc arcs_11_12[2] = {
+ {66, 7},
{0, 12},
};
static arc arcs_11_13[1] = {
- {0, 13},
+ {60, 5},
};
-static arc arcs_11_14[2] = {
- {33, 15},
+static arc arcs_11_14[3] = {
+ {64, 2},
+ {69, 16},
{0, 14},
};
-static arc arcs_11_15[3] = {
- {37, 17},
- {35, 3},
+static arc arcs_11_15[2] = {
+ {66, 14},
{0, 15},
};
-static arc arcs_11_16[1] = {
- {26, 6},
-};
-static arc arcs_11_17[3] = {
- {33, 15},
- {32, 18},
- {0, 17},
+static arc arcs_11_16[3] = {
+ {66, 14},
+ {67, 17},
+ {0, 16},
};
-static arc arcs_11_18[1] = {
- {26, 14},
+static arc arcs_11_17[1] = {
+ {60, 15},
};
-static state states_11[19] = {
+static state states_11[18] = {
{3, arcs_11_0},
{3, arcs_11_1},
- {3, arcs_11_2},
- {1, arcs_11_3},
- {1, arcs_11_4},
- {4, arcs_11_5},
+ {1, arcs_11_2},
+ {3, arcs_11_3},
+ {3, arcs_11_4},
+ {2, arcs_11_5},
{2, arcs_11_6},
- {3, arcs_11_7},
- {2, arcs_11_8},
- {2, arcs_11_9},
- {3, arcs_11_10},
+ {4, arcs_11_7},
+ {1, arcs_11_8},
+ {3, arcs_11_9},
+ {1, arcs_11_10},
{3, arcs_11_11},
- {3, arcs_11_12},
+ {2, arcs_11_12},
{1, arcs_11_13},
- {2, arcs_11_14},
- {3, arcs_11_15},
- {1, arcs_11_16},
- {3, arcs_11_17},
- {1, arcs_11_18},
+ {3, arcs_11_14},
+ {2, arcs_11_15},
+ {3, arcs_11_16},
+ {1, arcs_11_17},
};
static arc arcs_12_0[1] = {
- {23, 1},
+ {40, 1},
};
static arc arcs_12_1[1] = {
{0, 1},
@@ -462,15 +449,15 @@ static state states_13[2] = {
{1, arcs_13_1},
};
static arc arcs_14_0[1] = {
- {38, 1},
+ {70, 1},
};
static arc arcs_14_1[2] = {
- {39, 2},
+ {71, 2},
{2, 3},
};
static arc arcs_14_2[2] = {
- {38, 1},
{2, 3},
+ {70, 1},
};
static arc arcs_14_3[1] = {
{0, 3},
@@ -482,14 +469,14 @@ static state states_14[4] = {
{1, arcs_14_3},
};
static arc arcs_15_0[8] = {
- {40, 1},
- {41, 1},
- {42, 1},
- {43, 1},
- {44, 1},
- {45, 1},
- {46, 1},
- {47, 1},
+ {72, 1},
+ {73, 1},
+ {74, 1},
+ {75, 1},
+ {76, 1},
+ {77, 1},
+ {78, 1},
+ {79, 1},
};
static arc arcs_15_1[1] = {
{0, 1},
@@ -499,51 +486,51 @@ static state states_15[2] = {
{1, arcs_15_1},
};
static arc arcs_16_0[1] = {
- {48, 1},
+ {80, 1},
};
static arc arcs_16_1[4] = {
- {49, 2},
- {50, 3},
- {32, 4},
+ {67, 2},
+ {81, 3},
+ {82, 4},
{0, 1},
};
-static arc arcs_16_2[1] = {
- {0, 2},
+static arc arcs_16_2[2] = {
+ {80, 5},
+ {83, 5},
};
-static arc arcs_16_3[2] = {
- {51, 2},
- {9, 2},
+static arc arcs_16_3[1] = {
+ {0, 3},
};
static arc arcs_16_4[2] = {
- {51, 5},
- {48, 5},
+ {47, 3},
+ {83, 3},
};
static arc arcs_16_5[3] = {
- {32, 4},
- {28, 2},
+ {67, 2},
+ {61, 3},
{0, 5},
};
static state states_16[6] = {
{1, arcs_16_0},
{4, arcs_16_1},
- {1, arcs_16_2},
- {2, arcs_16_3},
+ {2, arcs_16_2},
+ {1, arcs_16_3},
{2, arcs_16_4},
{3, arcs_16_5},
};
static arc arcs_17_0[1] = {
- {27, 1},
+ {59, 1},
};
static arc arcs_17_1[1] = {
- {26, 2},
+ {60, 2},
};
static arc arcs_17_2[2] = {
- {32, 3},
+ {67, 3},
{0, 2},
};
static arc arcs_17_3[2] = {
- {51, 4},
- {9, 4},
+ {47, 4},
+ {83, 4},
};
static arc arcs_17_4[1] = {
{0, 4},
@@ -556,16 +543,16 @@ static state states_17[5] = {
{1, arcs_17_4},
};
static arc arcs_18_0[2] = {
- {26, 1},
- {52, 1},
+ {84, 1},
+ {60, 1},
};
static arc arcs_18_1[2] = {
- {33, 2},
+ {66, 2},
{0, 1},
};
static arc arcs_18_2[3] = {
- {26, 1},
- {52, 1},
+ {84, 1},
+ {60, 1},
{0, 2},
};
static state states_18[3] = {
@@ -574,19 +561,19 @@ static state states_18[3] = {
{3, arcs_18_2},
};
static arc arcs_19_0[13] = {
- {53, 1},
- {54, 1},
- {55, 1},
- {56, 1},
- {57, 1},
- {58, 1},
- {59, 1},
- {60, 1},
- {61, 1},
- {62, 1},
- {63, 1},
- {64, 1},
- {65, 1},
+ {85, 1},
+ {86, 1},
+ {87, 1},
+ {88, 1},
+ {89, 1},
+ {90, 1},
+ {91, 1},
+ {92, 1},
+ {93, 1},
+ {94, 1},
+ {95, 1},
+ {96, 1},
+ {97, 1},
};
static arc arcs_19_1[1] = {
{0, 1},
@@ -596,10 +583,10 @@ static state states_19[2] = {
{1, arcs_19_1},
};
static arc arcs_20_0[1] = {
- {66, 1},
+ {22, 1},
};
static arc arcs_20_1[1] = {
- {67, 2},
+ {98, 2},
};
static arc arcs_20_2[1] = {
{0, 2},
@@ -610,7 +597,7 @@ static state states_20[3] = {
{1, arcs_20_2},
};
static arc arcs_21_0[1] = {
- {68, 1},
+ {31, 1},
};
static arc arcs_21_1[1] = {
{0, 1},
@@ -620,11 +607,11 @@ static state states_21[2] = {
{1, arcs_21_1},
};
static arc arcs_22_0[5] = {
- {69, 1},
- {70, 1},
- {71, 1},
- {72, 1},
- {73, 1},
+ {99, 1},
+ {100, 1},
+ {101, 1},
+ {102, 1},
+ {103, 1},
};
static arc arcs_22_1[1] = {
{0, 1},
@@ -634,7 +621,7 @@ static state states_22[2] = {
{1, arcs_22_1},
};
static arc arcs_23_0[1] = {
- {74, 1},
+ {18, 1},
};
static arc arcs_23_1[1] = {
{0, 1},
@@ -644,7 +631,7 @@ static state states_23[2] = {
{1, arcs_23_1},
};
static arc arcs_24_0[1] = {
- {75, 1},
+ {20, 1},
};
static arc arcs_24_1[1] = {
{0, 1},
@@ -654,10 +641,10 @@ static state states_24[2] = {
{1, arcs_24_1},
};
static arc arcs_25_0[1] = {
- {76, 1},
+ {33, 1},
};
static arc arcs_25_1[2] = {
- {48, 2},
+ {80, 2},
{0, 1},
};
static arc arcs_25_2[1] = {
@@ -669,7 +656,7 @@ static state states_25[3] = {
{1, arcs_25_2},
};
static arc arcs_26_0[1] = {
- {51, 1},
+ {83, 1},
};
static arc arcs_26_1[1] = {
{0, 1},
@@ -679,18 +666,18 @@ static state states_26[2] = {
{1, arcs_26_1},
};
static arc arcs_27_0[1] = {
- {77, 1},
+ {32, 1},
};
static arc arcs_27_1[2] = {
- {26, 2},
+ {60, 2},
{0, 1},
};
static arc arcs_27_2[2] = {
- {78, 3},
+ {24, 3},
{0, 2},
};
static arc arcs_27_3[1] = {
- {26, 4},
+ {60, 4},
};
static arc arcs_27_4[1] = {
{0, 4},
@@ -703,8 +690,8 @@ static state states_27[5] = {
{1, arcs_27_4},
};
static arc arcs_28_0[2] = {
- {79, 1},
- {80, 1},
+ {104, 1},
+ {105, 1},
};
static arc arcs_28_1[1] = {
{0, 1},
@@ -714,10 +701,10 @@ static state states_28[2] = {
{1, arcs_28_1},
};
static arc arcs_29_0[1] = {
- {81, 1},
+ {27, 1},
};
static arc arcs_29_1[1] = {
- {82, 2},
+ {106, 2},
};
static arc arcs_29_2[1] = {
{0, 2},
@@ -728,35 +715,35 @@ static state states_29[3] = {
{1, arcs_29_2},
};
static arc arcs_30_0[1] = {
- {78, 1},
+ {24, 1},
};
static arc arcs_30_1[3] = {
- {83, 2},
- {84, 2},
- {12, 3},
+ {107, 2},
+ {9, 2},
+ {49, 3},
};
static arc arcs_30_2[4] = {
- {83, 2},
- {84, 2},
- {12, 3},
- {81, 4},
+ {107, 2},
+ {9, 2},
+ {27, 4},
+ {49, 3},
};
static arc arcs_30_3[1] = {
- {81, 4},
+ {27, 4},
};
static arc arcs_30_4[3] = {
- {34, 5},
- {13, 6},
- {85, 5},
+ {5, 5},
+ {6, 6},
+ {108, 6},
};
static arc arcs_30_5[1] = {
- {0, 5},
+ {108, 7},
};
static arc arcs_30_6[1] = {
- {85, 7},
+ {0, 6},
};
static arc arcs_30_7[1] = {
- {15, 5},
+ {50, 6},
};
static state states_30[8] = {
{1, arcs_30_0},
@@ -769,14 +756,14 @@ static state states_30[8] = {
{1, arcs_30_7},
};
static arc arcs_31_0[1] = {
- {23, 1},
+ {40, 1},
};
static arc arcs_31_1[2] = {
- {87, 2},
+ {110, 2},
{0, 1},
};
static arc arcs_31_2[1] = {
- {23, 3},
+ {40, 3},
};
static arc arcs_31_3[1] = {
{0, 3},
@@ -788,14 +775,14 @@ static state states_31[4] = {
{1, arcs_31_3},
};
static arc arcs_32_0[1] = {
- {12, 1},
+ {49, 1},
};
static arc arcs_32_1[2] = {
- {87, 2},
+ {110, 2},
{0, 1},
};
static arc arcs_32_2[1] = {
- {23, 3},
+ {40, 3},
};
static arc arcs_32_3[1] = {
{0, 3},
@@ -807,14 +794,14 @@ static state states_32[4] = {
{1, arcs_32_3},
};
static arc arcs_33_0[1] = {
- {86, 1},
+ {109, 1},
};
static arc arcs_33_1[2] = {
- {33, 2},
+ {66, 2},
{0, 1},
};
static arc arcs_33_2[2] = {
- {86, 1},
+ {109, 1},
{0, 2},
};
static state states_33[3] = {
@@ -823,10 +810,10 @@ static state states_33[3] = {
{2, arcs_33_2},
};
static arc arcs_34_0[1] = {
- {88, 1},
+ {111, 1},
};
static arc arcs_34_1[2] = {
- {33, 0},
+ {66, 0},
{0, 1},
};
static state states_34[2] = {
@@ -834,10 +821,10 @@ static state states_34[2] = {
{2, arcs_34_1},
};
static arc arcs_35_0[1] = {
- {23, 1},
+ {40, 1},
};
static arc arcs_35_1[2] = {
- {83, 0},
+ {107, 0},
{0, 1},
};
static state states_35[2] = {
@@ -845,13 +832,13 @@ static state states_35[2] = {
{2, arcs_35_1},
};
static arc arcs_36_0[1] = {
- {89, 1},
+ {25, 1},
};
static arc arcs_36_1[1] = {
- {23, 2},
+ {40, 2},
};
static arc arcs_36_2[2] = {
- {33, 1},
+ {66, 1},
{0, 2},
};
static state states_36[3] = {
@@ -860,13 +847,13 @@ static state states_36[3] = {
{2, arcs_36_2},
};
static arc arcs_37_0[1] = {
- {90, 1},
+ {29, 1},
};
static arc arcs_37_1[1] = {
- {23, 2},
+ {40, 2},
};
static arc arcs_37_2[2] = {
- {33, 1},
+ {66, 1},
{0, 2},
};
static state states_37[3] = {
@@ -875,17 +862,17 @@ static state states_37[3] = {
{2, arcs_37_2},
};
static arc arcs_38_0[1] = {
- {91, 1},
+ {15, 1},
};
static arc arcs_38_1[1] = {
- {26, 2},
+ {60, 2},
};
static arc arcs_38_2[2] = {
- {33, 3},
+ {66, 3},
{0, 2},
};
static arc arcs_38_3[1] = {
- {26, 4},
+ {60, 4},
};
static arc arcs_38_4[1] = {
{0, 4},
@@ -898,15 +885,15 @@ static state states_38[5] = {
{1, arcs_38_4},
};
static arc arcs_39_0[9] = {
- {92, 1},
- {93, 1},
- {94, 1},
- {95, 1},
- {96, 1},
- {19, 1},
- {18, 1},
- {17, 1},
- {97, 1},
+ {112, 1},
+ {55, 1},
+ {53, 1},
+ {113, 1},
+ {56, 1},
+ {114, 1},
+ {115, 1},
+ {116, 1},
+ {117, 1},
};
static arc arcs_39_1[1] = {
{0, 1},
@@ -916,12 +903,12 @@ static state states_39[2] = {
{1, arcs_39_1},
};
static arc arcs_40_0[1] = {
- {21, 1},
+ {16, 1},
};
static arc arcs_40_1[3] = {
- {19, 2},
- {96, 2},
- {94, 2},
+ {113, 2},
+ {56, 2},
+ {117, 2},
};
static arc arcs_40_2[1] = {
{0, 2},
@@ -932,27 +919,27 @@ static state states_40[3] = {
{1, arcs_40_2},
};
static arc arcs_41_0[1] = {
- {98, 1},
+ {26, 1},
};
static arc arcs_41_1[1] = {
- {99, 2},
+ {118, 2},
};
static arc arcs_41_2[1] = {
- {27, 3},
+ {59, 3},
};
static arc arcs_41_3[1] = {
- {100, 4},
+ {119, 4},
};
static arc arcs_41_4[3] = {
- {101, 1},
- {102, 5},
+ {120, 1},
+ {121, 5},
{0, 4},
};
static arc arcs_41_5[1] = {
- {27, 6},
+ {59, 6},
};
static arc arcs_41_6[1] = {
- {100, 7},
+ {119, 7},
};
static arc arcs_41_7[1] = {
{0, 7},
@@ -968,26 +955,26 @@ static state states_41[8] = {
{1, arcs_41_7},
};
static arc arcs_42_0[1] = {
- {103, 1},
+ {35, 1},
};
static arc arcs_42_1[1] = {
- {99, 2},
+ {118, 2},
};
static arc arcs_42_2[1] = {
- {27, 3},
+ {59, 3},
};
static arc arcs_42_3[1] = {
- {100, 4},
+ {119, 4},
};
static arc arcs_42_4[2] = {
- {102, 5},
+ {121, 5},
{0, 4},
};
static arc arcs_42_5[1] = {
- {27, 6},
+ {59, 6},
};
static arc arcs_42_6[1] = {
- {100, 7},
+ {119, 7},
};
static arc arcs_42_7[1] = {
{0, 7},
@@ -1003,36 +990,36 @@ static state states_42[8] = {
{1, arcs_42_7},
};
static arc arcs_43_0[1] = {
- {104, 1},
+ {23, 1},
};
static arc arcs_43_1[1] = {
- {67, 2},
+ {98, 2},
};
static arc arcs_43_2[1] = {
- {105, 3},
+ {122, 3},
};
static arc arcs_43_3[1] = {
- {9, 4},
+ {47, 4},
};
static arc arcs_43_4[1] = {
- {27, 5},
+ {59, 5},
};
static arc arcs_43_5[2] = {
- {28, 6},
- {100, 7},
+ {61, 6},
+ {119, 7},
};
static arc arcs_43_6[1] = {
- {100, 7},
+ {119, 7},
};
static arc arcs_43_7[2] = {
- {102, 8},
+ {121, 8},
{0, 7},
};
static arc arcs_43_8[1] = {
- {27, 9},
+ {59, 9},
};
static arc arcs_43_9[1] = {
- {100, 10},
+ {119, 10},
};
static arc arcs_43_10[1] = {
{0, 10},
@@ -1051,47 +1038,47 @@ static state states_43[11] = {
{1, arcs_43_10},
};
static arc arcs_44_0[1] = {
- {106, 1},
+ {34, 1},
};
static arc arcs_44_1[1] = {
- {27, 2},
+ {59, 2},
};
static arc arcs_44_2[1] = {
- {100, 3},
+ {119, 3},
};
static arc arcs_44_3[2] = {
- {107, 4},
- {108, 5},
+ {123, 4},
+ {124, 5},
};
static arc arcs_44_4[1] = {
- {27, 6},
+ {59, 6},
};
static arc arcs_44_5[1] = {
- {27, 7},
+ {59, 7},
};
static arc arcs_44_6[1] = {
- {100, 8},
+ {119, 8},
};
static arc arcs_44_7[1] = {
- {100, 9},
+ {119, 9},
};
-static arc arcs_44_8[4] = {
- {107, 4},
- {102, 10},
- {108, 5},
+static arc arcs_44_8[1] = {
{0, 8},
};
-static arc arcs_44_9[1] = {
+static arc arcs_44_9[4] = {
+ {121, 10},
+ {123, 4},
+ {124, 5},
{0, 9},
};
static arc arcs_44_10[1] = {
- {27, 11},
+ {59, 11},
};
static arc arcs_44_11[1] = {
- {100, 12},
+ {119, 12},
};
static arc arcs_44_12[2] = {
- {108, 5},
+ {123, 4},
{0, 12},
};
static state states_44[13] = {
@@ -1103,28 +1090,28 @@ static state states_44[13] = {
{1, arcs_44_5},
{1, arcs_44_6},
{1, arcs_44_7},
- {4, arcs_44_8},
- {1, arcs_44_9},
+ {1, arcs_44_8},
+ {4, arcs_44_9},
{1, arcs_44_10},
{1, arcs_44_11},
{2, arcs_44_12},
};
static arc arcs_45_0[1] = {
- {109, 1},
+ {36, 1},
};
static arc arcs_45_1[1] = {
- {110, 2},
+ {125, 2},
};
static arc arcs_45_2[2] = {
- {33, 1},
- {27, 3},
+ {66, 1},
+ {59, 3},
};
static arc arcs_45_3[2] = {
- {28, 4},
- {100, 5},
+ {61, 4},
+ {119, 5},
};
static arc arcs_45_4[1] = {
- {100, 5},
+ {119, 5},
};
static arc arcs_45_5[1] = {
{0, 5},
@@ -1138,14 +1125,14 @@ static state states_45[6] = {
{1, arcs_45_5},
};
static arc arcs_46_0[1] = {
- {26, 1},
+ {60, 1},
};
static arc arcs_46_1[2] = {
- {87, 2},
+ {110, 2},
{0, 1},
};
static arc arcs_46_2[1] = {
- {111, 3},
+ {126, 3},
};
static arc arcs_46_3[1] = {
{0, 3},
@@ -1157,18 +1144,18 @@ static state states_46[4] = {
{1, arcs_46_3},
};
static arc arcs_47_0[1] = {
- {112, 1},
+ {127, 1},
};
static arc arcs_47_1[2] = {
- {26, 2},
+ {60, 2},
{0, 1},
};
static arc arcs_47_2[2] = {
- {87, 3},
+ {110, 3},
{0, 2},
};
static arc arcs_47_3[1] = {
- {23, 4},
+ {40, 4},
};
static arc arcs_47_4[1] = {
{0, 4},
@@ -1181,21 +1168,21 @@ static state states_47[5] = {
{1, arcs_47_4},
};
static arc arcs_48_0[2] = {
- {3, 1},
- {2, 2},
+ {2, 1},
+ {4, 2},
};
static arc arcs_48_1[1] = {
- {0, 1},
+ {128, 3},
};
static arc arcs_48_2[1] = {
- {113, 3},
+ {0, 2},
};
static arc arcs_48_3[1] = {
- {6, 4},
+ {45, 4},
};
static arc arcs_48_4[2] = {
- {6, 4},
- {114, 1},
+ {129, 2},
+ {45, 4},
};
static state states_48[5] = {
{2, arcs_48_0},
@@ -1205,14 +1192,14 @@ static state states_48[5] = {
{2, arcs_48_4},
};
static arc arcs_49_0[1] = {
- {26, 1},
+ {60, 1},
};
static arc arcs_49_1[2] = {
- {115, 2},
+ {130, 2},
{0, 1},
};
static arc arcs_49_2[1] = {
- {26, 3},
+ {60, 3},
};
static arc arcs_49_3[1] = {
{0, 3},
@@ -1224,36 +1211,36 @@ static state states_49[4] = {
{1, arcs_49_3},
};
static arc arcs_50_0[2] = {
- {116, 1},
- {117, 2},
+ {131, 1},
+ {132, 2},
};
-static arc arcs_50_1[2] = {
- {98, 3},
+static arc arcs_50_1[1] = {
{0, 1},
};
-static arc arcs_50_2[1] = {
+static arc arcs_50_2[2] = {
+ {26, 3},
{0, 2},
};
static arc arcs_50_3[1] = {
- {116, 4},
+ {132, 4},
};
static arc arcs_50_4[1] = {
- {102, 5},
+ {121, 5},
};
static arc arcs_50_5[1] = {
- {26, 2},
+ {60, 1},
};
static state states_50[6] = {
{2, arcs_50_0},
- {2, arcs_50_1},
- {1, arcs_50_2},
+ {1, arcs_50_1},
+ {2, arcs_50_2},
{1, arcs_50_3},
{1, arcs_50_4},
{1, arcs_50_5},
};
static arc arcs_51_0[2] = {
- {116, 1},
- {119, 1},
+ {134, 1},
+ {132, 1},
};
static arc arcs_51_1[1] = {
{0, 1},
@@ -1263,17 +1250,17 @@ static state states_51[2] = {
{1, arcs_51_1},
};
static arc arcs_52_0[1] = {
- {120, 1},
+ {28, 1},
};
static arc arcs_52_1[2] = {
- {36, 2},
- {27, 3},
+ {59, 2},
+ {68, 3},
};
static arc arcs_52_2[1] = {
- {27, 3},
+ {60, 4},
};
static arc arcs_52_3[1] = {
- {26, 4},
+ {59, 2},
};
static arc arcs_52_4[1] = {
{0, 4},
@@ -1286,17 +1273,17 @@ static state states_52[5] = {
{1, arcs_52_4},
};
static arc arcs_53_0[1] = {
- {120, 1},
+ {28, 1},
};
static arc arcs_53_1[2] = {
- {36, 2},
- {27, 3},
+ {59, 2},
+ {68, 3},
};
static arc arcs_53_2[1] = {
- {27, 3},
+ {133, 4},
};
static arc arcs_53_3[1] = {
- {118, 4},
+ {59, 2},
};
static arc arcs_53_4[1] = {
{0, 4},
@@ -1309,10 +1296,10 @@ static state states_53[5] = {
{1, arcs_53_4},
};
static arc arcs_54_0[1] = {
- {121, 1},
+ {135, 1},
};
static arc arcs_54_1[2] = {
- {122, 0},
+ {136, 0},
{0, 1},
};
static state states_54[2] = {
@@ -1320,10 +1307,10 @@ static state states_54[2] = {
{2, arcs_54_1},
};
static arc arcs_55_0[1] = {
- {123, 1},
+ {137, 1},
};
static arc arcs_55_1[2] = {
- {124, 0},
+ {138, 0},
{0, 1},
};
static state states_55[2] = {
@@ -1331,11 +1318,11 @@ static state states_55[2] = {
{2, arcs_55_1},
};
static arc arcs_56_0[2] = {
- {125, 1},
- {126, 2},
+ {30, 1},
+ {139, 2},
};
static arc arcs_56_1[1] = {
- {123, 2},
+ {137, 2},
};
static arc arcs_56_2[1] = {
{0, 2},
@@ -1346,10 +1333,10 @@ static state states_56[3] = {
{1, arcs_56_2},
};
static arc arcs_57_0[1] = {
- {111, 1},
+ {126, 1},
};
static arc arcs_57_1[2] = {
- {127, 0},
+ {140, 0},
{0, 1},
};
static state states_57[2] = {
@@ -1357,38 +1344,38 @@ static state states_57[2] = {
{2, arcs_57_1},
};
static arc arcs_58_0[10] = {
- {128, 1},
- {129, 1},
- {130, 1},
- {131, 1},
- {132, 1},
- {133, 1},
- {134, 1},
- {105, 1},
- {125, 2},
- {135, 3},
+ {141, 1},
+ {142, 1},
+ {143, 1},
+ {141, 1},
+ {144, 1},
+ {145, 1},
+ {146, 1},
+ {122, 1},
+ {147, 2},
+ {30, 3},
};
static arc arcs_58_1[1] = {
{0, 1},
};
-static arc arcs_58_2[1] = {
- {105, 1},
+static arc arcs_58_2[2] = {
+ {30, 1},
+ {0, 2},
};
-static arc arcs_58_3[2] = {
- {125, 1},
- {0, 3},
+static arc arcs_58_3[1] = {
+ {122, 1},
};
static state states_58[4] = {
{10, arcs_58_0},
{1, arcs_58_1},
- {1, arcs_58_2},
- {2, arcs_58_3},
+ {2, arcs_58_2},
+ {1, arcs_58_3},
};
static arc arcs_59_0[1] = {
- {34, 1},
+ {6, 1},
};
static arc arcs_59_1[1] = {
- {111, 2},
+ {126, 2},
};
static arc arcs_59_2[1] = {
{0, 2},
@@ -1399,10 +1386,10 @@ static state states_59[3] = {
{1, arcs_59_2},
};
static arc arcs_60_0[1] = {
- {136, 1},
+ {148, 1},
};
static arc arcs_60_1[2] = {
- {137, 0},
+ {149, 0},
{0, 1},
};
static state states_60[2] = {
@@ -1410,10 +1397,10 @@ static state states_60[2] = {
{2, arcs_60_1},
};
static arc arcs_61_0[1] = {
- {138, 1},
+ {150, 1},
};
static arc arcs_61_1[2] = {
- {139, 0},
+ {151, 0},
{0, 1},
};
static state states_61[2] = {
@@ -1421,10 +1408,10 @@ static state states_61[2] = {
{2, arcs_61_1},
};
static arc arcs_62_0[1] = {
- {140, 1},
+ {152, 1},
};
static arc arcs_62_1[2] = {
- {141, 0},
+ {153, 0},
{0, 1},
};
static state states_62[2] = {
@@ -1432,11 +1419,11 @@ static state states_62[2] = {
{2, arcs_62_1},
};
static arc arcs_63_0[1] = {
- {142, 1},
+ {154, 1},
};
static arc arcs_63_1[3] = {
- {143, 0},
- {144, 0},
+ {155, 0},
+ {156, 0},
{0, 1},
};
static state states_63[2] = {
@@ -1444,11 +1431,11 @@ static state states_63[2] = {
{3, arcs_63_1},
};
static arc arcs_64_0[1] = {
- {145, 1},
+ {157, 1},
};
static arc arcs_64_1[3] = {
- {146, 0},
- {147, 0},
+ {7, 0},
+ {8, 0},
{0, 1},
};
static state states_64[2] = {
@@ -1456,14 +1443,14 @@ static state states_64[2] = {
{3, arcs_64_1},
};
static arc arcs_65_0[1] = {
- {148, 1},
+ {158, 1},
};
static arc arcs_65_1[6] = {
- {34, 0},
- {11, 0},
- {149, 0},
- {150, 0},
- {151, 0},
+ {159, 0},
+ {6, 0},
+ {160, 0},
+ {161, 0},
+ {10, 0},
{0, 1},
};
static state states_65[2] = {
@@ -1471,13 +1458,13 @@ static state states_65[2] = {
{6, arcs_65_1},
};
static arc arcs_66_0[4] = {
- {146, 1},
- {147, 1},
- {152, 1},
- {153, 2},
+ {7, 1},
+ {8, 1},
+ {39, 1},
+ {162, 2},
};
static arc arcs_66_1[1] = {
- {148, 2},
+ {158, 2},
};
static arc arcs_66_2[1] = {
{0, 2},
@@ -1488,14 +1475,14 @@ static state states_66[3] = {
{1, arcs_66_2},
};
static arc arcs_67_0[1] = {
- {154, 1},
+ {163, 1},
};
static arc arcs_67_1[2] = {
- {35, 2},
+ {64, 2},
{0, 1},
};
static arc arcs_67_2[1] = {
- {148, 3},
+ {158, 3},
};
static arc arcs_67_3[1] = {
{0, 3},
@@ -1507,14 +1494,14 @@ static state states_67[4] = {
{1, arcs_67_3},
};
static arc arcs_68_0[2] = {
- {155, 1},
- {156, 2},
+ {17, 1},
+ {164, 2},
};
static arc arcs_68_1[1] = {
- {156, 2},
+ {164, 2},
};
static arc arcs_68_2[2] = {
- {157, 2},
+ {165, 2},
{0, 2},
};
static state states_68[3] = {
@@ -1523,108 +1510,108 @@ static state states_68[3] = {
{2, arcs_68_2},
};
static arc arcs_69_0[10] = {
- {13, 1},
- {159, 2},
- {161, 3},
- {23, 4},
- {164, 4},
- {165, 5},
- {84, 4},
- {166, 4},
- {167, 4},
- {168, 4},
+ {5, 1},
+ {9, 2},
+ {11, 2},
+ {12, 2},
+ {13, 2},
+ {14, 3},
+ {38, 4},
+ {40, 2},
+ {41, 2},
+ {42, 5},
};
static arc arcs_69_1[3] = {
- {51, 6},
- {158, 6},
- {15, 4},
+ {50, 2},
+ {166, 6},
+ {83, 6},
};
-static arc arcs_69_2[2] = {
- {158, 7},
- {160, 4},
+static arc arcs_69_2[1] = {
+ {0, 2},
};
static arc arcs_69_3[2] = {
- {162, 8},
- {163, 4},
+ {167, 2},
+ {166, 7},
};
-static arc arcs_69_4[1] = {
- {0, 4},
+static arc arcs_69_4[2] = {
+ {168, 2},
+ {169, 8},
};
static arc arcs_69_5[2] = {
- {165, 5},
+ {42, 5},
{0, 5},
};
static arc arcs_69_6[1] = {
- {15, 4},
+ {50, 2},
};
static arc arcs_69_7[1] = {
- {160, 4},
+ {167, 2},
};
static arc arcs_69_8[1] = {
- {163, 4},
+ {168, 2},
};
static state states_69[9] = {
{10, arcs_69_0},
{3, arcs_69_1},
- {2, arcs_69_2},
+ {1, arcs_69_2},
{2, arcs_69_3},
- {1, arcs_69_4},
+ {2, arcs_69_4},
{2, arcs_69_5},
{1, arcs_69_6},
{1, arcs_69_7},
{1, arcs_69_8},
};
static arc arcs_70_0[2] = {
- {99, 1},
- {52, 1},
+ {118, 1},
+ {84, 1},
};
static arc arcs_70_1[3] = {
- {169, 2},
- {33, 3},
+ {66, 2},
+ {170, 3},
{0, 1},
};
-static arc arcs_70_2[1] = {
+static arc arcs_70_2[3] = {
+ {118, 4},
+ {84, 4},
{0, 2},
};
-static arc arcs_70_3[3] = {
- {99, 4},
- {52, 4},
+static arc arcs_70_3[1] = {
{0, 3},
};
static arc arcs_70_4[2] = {
- {33, 3},
+ {66, 2},
{0, 4},
};
static state states_70[5] = {
{2, arcs_70_0},
{3, arcs_70_1},
- {1, arcs_70_2},
- {3, arcs_70_3},
+ {3, arcs_70_2},
+ {1, arcs_70_3},
{2, arcs_70_4},
};
static arc arcs_71_0[3] = {
- {13, 1},
- {159, 2},
- {83, 3},
+ {5, 1},
+ {107, 2},
+ {14, 3},
};
static arc arcs_71_1[2] = {
- {14, 4},
- {15, 5},
+ {50, 4},
+ {51, 5},
};
static arc arcs_71_2[1] = {
- {170, 6},
+ {40, 4},
};
static arc arcs_71_3[1] = {
- {23, 5},
+ {171, 6},
};
static arc arcs_71_4[1] = {
- {15, 5},
+ {0, 4},
};
static arc arcs_71_5[1] = {
- {0, 5},
+ {50, 4},
};
static arc arcs_71_6[1] = {
- {160, 5},
+ {167, 4},
};
static state states_71[7] = {
{3, arcs_71_0},
@@ -1636,14 +1623,14 @@ static state states_71[7] = {
{1, arcs_71_6},
};
static arc arcs_72_0[1] = {
- {171, 1},
+ {172, 1},
};
static arc arcs_72_1[2] = {
- {33, 2},
+ {66, 2},
{0, 1},
};
static arc arcs_72_2[2] = {
- {171, 1},
+ {172, 1},
{0, 2},
};
static state states_72[3] = {
@@ -1652,37 +1639,37 @@ static state states_72[3] = {
{2, arcs_72_2},
};
static arc arcs_73_0[2] = {
- {26, 1},
- {27, 2},
+ {59, 1},
+ {60, 2},
};
-static arc arcs_73_1[2] = {
- {27, 2},
+static arc arcs_73_1[3] = {
+ {173, 3},
+ {60, 4},
{0, 1},
};
-static arc arcs_73_2[3] = {
- {26, 3},
- {172, 4},
+static arc arcs_73_2[2] = {
+ {59, 1},
{0, 2},
};
-static arc arcs_73_3[2] = {
- {172, 4},
+static arc arcs_73_3[1] = {
{0, 3},
};
-static arc arcs_73_4[1] = {
+static arc arcs_73_4[2] = {
+ {173, 3},
{0, 4},
};
static state states_73[5] = {
{2, arcs_73_0},
- {2, arcs_73_1},
- {3, arcs_73_2},
- {2, arcs_73_3},
- {1, arcs_73_4},
+ {3, arcs_73_1},
+ {2, arcs_73_2},
+ {1, arcs_73_3},
+ {2, arcs_73_4},
};
static arc arcs_74_0[1] = {
- {27, 1},
+ {59, 1},
};
static arc arcs_74_1[2] = {
- {26, 2},
+ {60, 2},
{0, 1},
};
static arc arcs_74_2[1] = {
@@ -1694,16 +1681,16 @@ static state states_74[3] = {
{1, arcs_74_2},
};
static arc arcs_75_0[2] = {
- {111, 1},
- {52, 1},
+ {126, 1},
+ {84, 1},
};
static arc arcs_75_1[2] = {
- {33, 2},
+ {66, 2},
{0, 1},
};
static arc arcs_75_2[3] = {
- {111, 1},
- {52, 1},
+ {126, 1},
+ {84, 1},
{0, 2},
};
static state states_75[3] = {
@@ -1712,14 +1699,14 @@ static state states_75[3] = {
{3, arcs_75_2},
};
static arc arcs_76_0[1] = {
- {26, 1},
+ {60, 1},
};
static arc arcs_76_1[2] = {
- {33, 2},
+ {66, 2},
{0, 1},
};
static arc arcs_76_2[2] = {
- {26, 1},
+ {60, 1},
{0, 2},
};
static state states_76[3] = {
@@ -1728,100 +1715,100 @@ static state states_76[3] = {
{2, arcs_76_2},
};
static arc arcs_77_0[3] = {
- {26, 1},
- {35, 2},
- {52, 3},
+ {64, 1},
+ {84, 2},
+ {60, 3},
};
-static arc arcs_77_1[4] = {
- {27, 4},
- {169, 5},
- {33, 6},
- {0, 1},
+static arc arcs_77_1[1] = {
+ {126, 4},
};
-static arc arcs_77_2[1] = {
- {111, 7},
+static arc arcs_77_2[3] = {
+ {66, 5},
+ {170, 6},
+ {0, 2},
};
-static arc arcs_77_3[3] = {
- {169, 5},
- {33, 6},
+static arc arcs_77_3[4] = {
+ {66, 5},
+ {59, 7},
+ {170, 6},
{0, 3},
};
-static arc arcs_77_4[1] = {
- {26, 7},
+static arc arcs_77_4[3] = {
+ {66, 8},
+ {170, 6},
+ {0, 4},
};
-static arc arcs_77_5[1] = {
+static arc arcs_77_5[3] = {
+ {84, 9},
+ {60, 9},
{0, 5},
};
-static arc arcs_77_6[3] = {
- {26, 8},
- {52, 8},
+static arc arcs_77_6[1] = {
{0, 6},
};
-static arc arcs_77_7[3] = {
- {169, 5},
- {33, 9},
- {0, 7},
+static arc arcs_77_7[1] = {
+ {60, 4},
};
-static arc arcs_77_8[2] = {
- {33, 6},
+static arc arcs_77_8[3] = {
+ {64, 10},
+ {60, 11},
{0, 8},
};
-static arc arcs_77_9[3] = {
- {26, 10},
- {35, 11},
+static arc arcs_77_9[2] = {
+ {66, 5},
{0, 9},
};
static arc arcs_77_10[1] = {
- {27, 12},
+ {126, 12},
};
static arc arcs_77_11[1] = {
- {111, 13},
+ {59, 13},
};
-static arc arcs_77_12[1] = {
- {26, 13},
+static arc arcs_77_12[2] = {
+ {66, 8},
+ {0, 12},
};
-static arc arcs_77_13[2] = {
- {33, 9},
- {0, 13},
+static arc arcs_77_13[1] = {
+ {60, 12},
};
static state states_77[14] = {
{3, arcs_77_0},
- {4, arcs_77_1},
- {1, arcs_77_2},
- {3, arcs_77_3},
- {1, arcs_77_4},
- {1, arcs_77_5},
- {3, arcs_77_6},
- {3, arcs_77_7},
- {2, arcs_77_8},
- {3, arcs_77_9},
+ {1, arcs_77_1},
+ {3, arcs_77_2},
+ {4, arcs_77_3},
+ {3, arcs_77_4},
+ {3, arcs_77_5},
+ {1, arcs_77_6},
+ {1, arcs_77_7},
+ {3, arcs_77_8},
+ {2, arcs_77_9},
{1, arcs_77_10},
{1, arcs_77_11},
- {1, arcs_77_12},
- {2, arcs_77_13},
+ {2, arcs_77_12},
+ {1, arcs_77_13},
};
static arc arcs_78_0[1] = {
- {173, 1},
+ {19, 1},
};
static arc arcs_78_1[1] = {
- {23, 2},
+ {40, 2},
};
static arc arcs_78_2[2] = {
- {13, 3},
- {27, 4},
+ {5, 3},
+ {59, 4},
};
static arc arcs_78_3[2] = {
- {14, 5},
- {15, 6},
+ {50, 5},
+ {51, 6},
};
static arc arcs_78_4[1] = {
- {100, 7},
+ {119, 7},
};
static arc arcs_78_5[1] = {
- {15, 6},
+ {59, 4},
};
static arc arcs_78_6[1] = {
- {27, 4},
+ {50, 5},
};
static arc arcs_78_7[1] = {
{0, 7},
@@ -1840,7 +1827,7 @@ static arc arcs_79_0[1] = {
{174, 1},
};
static arc arcs_79_1[2] = {
- {33, 2},
+ {66, 2},
{0, 1},
};
static arc arcs_79_2[2] = {
@@ -1853,30 +1840,30 @@ static state states_79[3] = {
{2, arcs_79_2},
};
static arc arcs_80_0[3] = {
- {26, 1},
- {35, 2},
- {34, 2},
+ {6, 1},
+ {64, 1},
+ {60, 2},
};
-static arc arcs_80_1[4] = {
- {169, 3},
- {115, 2},
- {32, 2},
- {0, 1},
+static arc arcs_80_1[1] = {
+ {60, 3},
};
-static arc arcs_80_2[1] = {
- {26, 3},
+static arc arcs_80_2[4] = {
+ {130, 1},
+ {67, 1},
+ {170, 3},
+ {0, 2},
};
static arc arcs_80_3[1] = {
{0, 3},
};
static state states_80[4] = {
{3, arcs_80_0},
- {4, arcs_80_1},
- {1, arcs_80_2},
+ {1, arcs_80_1},
+ {4, arcs_80_2},
{1, arcs_80_3},
};
static arc arcs_81_0[2] = {
- {169, 1},
+ {170, 1},
{176, 1},
};
static arc arcs_81_1[1] = {
@@ -1887,16 +1874,16 @@ static state states_81[2] = {
{1, arcs_81_1},
};
static arc arcs_82_0[1] = {
- {104, 1},
+ {23, 1},
};
static arc arcs_82_1[1] = {
- {67, 2},
+ {98, 2},
};
static arc arcs_82_2[1] = {
- {105, 3},
+ {122, 3},
};
static arc arcs_82_3[1] = {
- {116, 4},
+ {132, 4},
};
static arc arcs_82_4[2] = {
{175, 5},
@@ -1914,7 +1901,7 @@ static state states_82[6] = {
{1, arcs_82_5},
};
static arc arcs_83_0[2] = {
- {21, 1},
+ {16, 1},
{177, 2},
};
static arc arcs_83_1[1] = {
@@ -1929,10 +1916,10 @@ static state states_83[3] = {
{1, arcs_83_2},
};
static arc arcs_84_0[1] = {
- {98, 1},
+ {26, 1},
};
static arc arcs_84_1[1] = {
- {118, 2},
+ {133, 2},
};
static arc arcs_84_2[2] = {
{175, 3},
@@ -1948,7 +1935,7 @@ static state states_84[4] = {
{1, arcs_84_3},
};
static arc arcs_85_0[1] = {
- {23, 1},
+ {40, 1},
};
static arc arcs_85_1[1] = {
{0, 1},
@@ -1958,10 +1945,10 @@ static state states_85[2] = {
{1, arcs_85_1},
};
static arc arcs_86_0[1] = {
- {179, 1},
+ {37, 1},
};
static arc arcs_86_1[2] = {
- {180, 2},
+ {179, 2},
{0, 1},
};
static arc arcs_86_2[1] = {
@@ -1973,11 +1960,11 @@ static state states_86[3] = {
{1, arcs_86_2},
};
static arc arcs_87_0[2] = {
- {78, 1},
- {48, 2},
+ {24, 1},
+ {80, 2},
};
static arc arcs_87_1[1] = {
- {26, 2},
+ {60, 2},
};
static arc arcs_87_2[1] = {
{0, 2},
@@ -1988,44 +1975,44 @@ static state states_87[3] = {
{1, arcs_87_2},
};
static arc arcs_88_0[2] = {
- {3, 1},
- {2, 2},
+ {2, 1},
+ {4, 2},
};
-static arc arcs_88_1[1] = {
- {0, 1},
+static arc arcs_88_1[2] = {
+ {128, 3},
+ {61, 4},
};
-static arc arcs_88_2[2] = {
- {28, 3},
- {113, 4},
+static arc arcs_88_2[1] = {
+ {0, 2},
};
static arc arcs_88_3[1] = {
- {2, 5},
+ {45, 5},
};
static arc arcs_88_4[1] = {
- {6, 6},
+ {2, 6},
};
-static arc arcs_88_5[1] = {
- {113, 4},
+static arc arcs_88_5[2] = {
+ {129, 2},
+ {45, 5},
};
-static arc arcs_88_6[2] = {
- {6, 6},
- {114, 1},
+static arc arcs_88_6[1] = {
+ {128, 3},
};
static state states_88[7] = {
{2, arcs_88_0},
- {1, arcs_88_1},
- {2, arcs_88_2},
+ {2, arcs_88_1},
+ {1, arcs_88_2},
{1, arcs_88_3},
{1, arcs_88_4},
- {1, arcs_88_5},
- {2, arcs_88_6},
+ {2, arcs_88_5},
+ {1, arcs_88_6},
};
static arc arcs_89_0[1] = {
- {182, 1},
+ {181, 1},
};
static arc arcs_89_1[2] = {
+ {44, 2},
{2, 1},
- {7, 2},
};
static arc arcs_89_2[1] = {
{0, 2},
@@ -2036,20 +2023,20 @@ static state states_89[3] = {
{1, arcs_89_2},
};
static arc arcs_90_0[1] = {
- {13, 1},
+ {5, 1},
};
static arc arcs_90_1[2] = {
- {183, 2},
- {15, 3},
+ {50, 2},
+ {182, 3},
};
static arc arcs_90_2[1] = {
- {15, 3},
+ {58, 4},
};
static arc arcs_90_3[1] = {
- {25, 4},
+ {50, 2},
};
static arc arcs_90_4[1] = {
- {26, 5},
+ {60, 5},
};
static arc arcs_90_5[1] = {
{0, 5},
@@ -2063,387 +2050,399 @@ static state states_90[6] = {
{1, arcs_90_5},
};
static arc arcs_91_0[3] = {
- {26, 1},
- {34, 2},
- {35, 3},
+ {6, 1},
+ {64, 2},
+ {60, 3},
};
-static arc arcs_91_1[2] = {
- {33, 4},
+static arc arcs_91_1[3] = {
+ {66, 4},
+ {60, 5},
{0, 1},
};
-static arc arcs_91_2[3] = {
- {26, 5},
- {33, 6},
- {0, 2},
+static arc arcs_91_2[1] = {
+ {60, 6},
};
-static arc arcs_91_3[1] = {
- {26, 7},
+static arc arcs_91_3[2] = {
+ {66, 7},
+ {0, 3},
};
-static arc arcs_91_4[4] = {
- {26, 1},
- {34, 8},
- {35, 3},
- {0, 4},
+static arc arcs_91_4[2] = {
+ {64, 2},
+ {60, 5},
};
static arc arcs_91_5[2] = {
- {33, 6},
+ {66, 4},
{0, 5},
};
-static arc arcs_91_6[2] = {
- {26, 5},
- {35, 3},
+static arc arcs_91_6[1] = {
+ {0, 6},
};
-static arc arcs_91_7[1] = {
+static arc arcs_91_7[4] = {
+ {6, 8},
+ {64, 2},
+ {60, 3},
{0, 7},
};
static arc arcs_91_8[3] = {
- {26, 9},
- {33, 10},
+ {66, 9},
+ {60, 10},
{0, 8},
};
static arc arcs_91_9[2] = {
- {33, 10},
- {0, 9},
+ {64, 2},
+ {60, 10},
};
static arc arcs_91_10[2] = {
- {26, 9},
- {35, 3},
+ {66, 9},
+ {0, 10},
};
static state states_91[11] = {
{3, arcs_91_0},
- {2, arcs_91_1},
- {3, arcs_91_2},
- {1, arcs_91_3},
- {4, arcs_91_4},
+ {3, arcs_91_1},
+ {1, arcs_91_2},
+ {2, arcs_91_3},
+ {2, arcs_91_4},
{2, arcs_91_5},
- {2, arcs_91_6},
- {1, arcs_91_7},
+ {1, arcs_91_6},
+ {4, arcs_91_7},
{3, arcs_91_8},
{2, arcs_91_9},
{2, arcs_91_10},
};
static dfa dfas[92] = {
{256, "single_input", 0, 3, states_0,
- "\004\050\340\000\004\000\000\000\024\174\022\016\204\045\000\041\000\000\014\211\362\041\010"},
+ "\344\377\377\377\377\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{257, "file_input", 0, 2, states_1,
- "\204\050\340\000\004\000\000\000\024\174\022\016\204\045\000\041\000\000\014\211\362\041\010"},
+ "\344\377\377\377\377\027\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{258, "eval_input", 0, 3, states_2,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\240\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{259, "decorator", 0, 7, states_3,
- "\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{260, "decorators", 0, 2, states_4,
- "\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{261, "decorated", 0, 3, states_5,
- "\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{262, "async_funcdef", 0, 3, states_6,
- "\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{263, "funcdef", 0, 9, states_7,
- "\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{264, "parameters", 0, 4, states_8,
- "\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
- {265, "typedargslist", 0, 23, states_9,
- "\000\000\200\000\014\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ {265, "typedargslist", 0, 22, states_9,
+ "\100\000\000\000\000\001\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{266, "tfpdef", 0, 4, states_10,
- "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
- {267, "varargslist", 0, 19, states_11,
- "\000\000\200\000\014\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ {267, "varargslist", 0, 18, states_11,
+ "\100\000\000\000\000\001\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{268, "vfpdef", 0, 2, states_12,
- "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{269, "stmt", 0, 2, states_13,
- "\000\050\340\000\004\000\000\000\024\174\022\016\204\045\000\041\000\000\014\211\362\041\010"},
+ "\340\377\377\377\377\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{270, "simple_stmt", 0, 4, states_14,
- "\000\040\200\000\004\000\000\000\024\174\022\016\000\000\000\041\000\000\014\211\362\001\010"},
+ "\340\373\126\373\343\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{271, "small_stmt", 0, 2, states_15,
- "\000\040\200\000\004\000\000\000\024\174\022\016\000\000\000\041\000\000\014\211\362\001\010"},
+ "\340\373\126\373\343\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{272, "expr_stmt", 0, 6, states_16,
- "\000\040\200\000\004\000\000\000\000\000\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\340\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{273, "annassign", 0, 5, states_17,
- "\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{274, "testlist_star_expr", 0, 3, states_18,
- "\000\040\200\000\004\000\000\000\000\000\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\340\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{275, "augassign", 0, 2, states_19,
- "\000\000\000\000\000\000\340\377\003\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\000\000\000\000\000\000\340\377\003\000\000\000\000\000\000\000\000\000\000"},
{276, "del_stmt", 0, 3, states_20,
- "\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{277, "pass_stmt", 0, 2, states_21,
- "\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{278, "flow_stmt", 0, 2, states_22,
- "\000\000\000\000\000\000\000\000\000\074\000\000\000\000\000\000\000\000\000\000\000\000\010"},
+ "\000\000\024\000\043\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{279, "break_stmt", 0, 2, states_23,
- "\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{280, "continue_stmt", 0, 2, states_24,
- "\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{281, "return_stmt", 0, 3, states_25,
- "\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{282, "yield_stmt", 0, 2, states_26,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\010"},
+ "\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{283, "raise_stmt", 0, 5, states_27,
- "\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{284, "import_stmt", 0, 2, states_28,
- "\000\000\000\000\000\000\000\000\000\100\002\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\011\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{285, "import_name", 0, 3, states_29,
- "\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{286, "import_from", 0, 8, states_30,
- "\000\000\000\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{287, "import_as_name", 0, 4, states_31,
- "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{288, "dotted_as_name", 0, 4, states_32,
- "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{289, "import_as_names", 0, 3, states_33,
- "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{290, "dotted_as_names", 0, 2, states_34,
- "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{291, "dotted_name", 0, 2, states_35,
- "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{292, "global_stmt", 0, 3, states_36,
- "\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{293, "nonlocal_stmt", 0, 3, states_37,
- "\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{294, "assert_stmt", 0, 5, states_38,
- "\000\000\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{295, "compound_stmt", 0, 2, states_39,
- "\000\010\140\000\000\000\000\000\000\000\000\000\204\045\000\000\000\000\000\000\000\040\000"},
+ "\000\004\251\004\034\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{296, "async_stmt", 0, 3, states_40,
- "\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{297, "if_stmt", 0, 8, states_41,
- "\000\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{298, "while_stmt", 0, 8, states_42,
- "\000\000\000\000\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{299, "for_stmt", 0, 11, states_43,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000"},
+ "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{300, "try_stmt", 0, 13, states_44,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{301, "with_stmt", 0, 6, states_45,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{302, "with_item", 0, 4, states_46,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\240\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{303, "except_clause", 0, 5, states_47,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000"},
{304, "suite", 0, 5, states_48,
- "\004\040\200\000\004\000\000\000\024\174\022\016\000\000\000\041\000\000\014\211\362\001\010"},
+ "\344\373\126\373\343\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{305, "namedexpr_test", 0, 4, states_49,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\240\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{306, "test", 0, 6, states_50,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\240\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{307, "test_nocond", 0, 2, states_51,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\240\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{308, "lambdef", 0, 5, states_52,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000"},
+ "\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{309, "lambdef_nocond", 0, 5, states_53,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000"},
+ "\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{310, "or_test", 0, 2, states_54,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\040\000\000\014\211\362\001\000"},
+ "\240\173\002\100\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{311, "and_test", 0, 2, states_55,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\040\000\000\014\211\362\001\000"},
+ "\240\173\002\100\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{312, "not_test", 0, 3, states_56,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\040\000\000\014\211\362\001\000"},
+ "\240\173\002\100\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{313, "comparison", 0, 2, states_57,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\014\211\362\001\000"},
+ "\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{314, "comp_op", 0, 4, states_58,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\002\000\040\377\000\000\000\000\000\000"},
+ "\000\000\000\100\000\000\000\000\000\000\000\000\000\000\000\004\000\340\017\000\000\000\000"},
{315, "star_expr", 0, 3, states_59,
- "\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{316, "expr", 0, 2, states_60,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\014\211\362\001\000"},
+ "\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{317, "xor_expr", 0, 2, states_61,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\014\211\362\001\000"},
+ "\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{318, "and_expr", 0, 2, states_62,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\014\211\362\001\000"},
+ "\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{319, "shift_expr", 0, 2, states_63,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\014\211\362\001\000"},
+ "\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{320, "arith_expr", 0, 2, states_64,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\014\211\362\001\000"},
+ "\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{321, "term", 0, 2, states_65,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\014\211\362\001\000"},
+ "\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{322, "factor", 0, 3, states_66,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\014\211\362\001\000"},
+ "\240\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{323, "power", 0, 4, states_67,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\210\362\001\000"},
+ "\040\172\002\000\100\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{324, "atom_expr", 0, 3, states_68,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\210\362\001\000"},
+ "\040\172\002\000\100\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{325, "atom", 0, 9, states_69,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\200\362\001\000"},
+ "\040\172\000\000\100\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{326, "testlist_comp", 0, 5, states_70,
- "\000\040\200\000\004\000\000\000\000\000\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\340\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{327, "trailer", 0, 7, states_71,
- "\000\040\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\200\000\000\000"},
+ "\040\100\000\000\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000"},
{328, "subscriptlist", 0, 3, states_72,
- "\000\040\200\010\000\000\000\000\000\000\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\240\173\002\120\300\007\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{329, "subscript", 0, 5, states_73,
- "\000\040\200\010\000\000\000\000\000\000\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\240\173\002\120\300\007\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{330, "sliceop", 0, 3, states_74,
- "\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{331, "exprlist", 0, 3, states_75,
- "\000\040\200\000\004\000\000\000\000\000\020\000\000\000\000\000\000\000\014\211\362\001\000"},
+ "\340\173\002\000\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{332, "testlist", 0, 3, states_76,
- "\000\040\200\000\000\000\000\000\000\000\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\240\173\002\120\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{333, "dictorsetmaker", 0, 14, states_77,
- "\000\040\200\000\014\000\000\000\000\000\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\340\173\002\120\300\007\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{334, "classdef", 0, 8, states_78,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\040\000"},
+ "\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{335, "arglist", 0, 3, states_79,
- "\000\040\200\000\014\000\000\000\000\000\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\340\173\002\120\300\007\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{336, "argument", 0, 4, states_80,
- "\000\040\200\000\014\000\000\000\000\000\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\340\173\002\120\300\007\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{337, "comp_iter", 0, 2, states_81,
- "\000\000\040\000\000\000\000\000\000\000\000\000\004\001\000\000\000\000\000\000\000\000\000"},
+ "\000\000\201\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{338, "sync_comp_for", 0, 6, states_82,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000"},
+ "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{339, "comp_for", 0, 3, states_83,
- "\000\000\040\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000"},
+ "\000\000\201\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{340, "comp_if", 0, 4, states_84,
- "\000\000\000\000\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{341, "encoding_decl", 0, 2, states_85,
- "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{342, "yield_expr", 0, 3, states_86,
- "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\010"},
+ "\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{343, "yield_arg", 0, 3, states_87,
- "\000\040\200\000\004\000\000\000\000\100\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\340\173\002\121\300\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{344, "func_body_suite", 0, 7, states_88,
- "\004\040\200\000\004\000\000\000\024\174\022\016\000\000\000\041\000\000\014\211\362\001\010"},
+ "\344\373\126\373\343\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{345, "func_type_input", 0, 3, states_89,
- "\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{346, "func_type", 0, 6, states_90,
- "\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
+ "\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
{347, "typelist", 0, 11, states_91,
- "\000\040\200\000\014\000\000\000\000\000\020\000\000\000\000\041\000\000\014\211\362\001\000"},
+ "\340\173\002\120\300\007\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000"},
};
-static label labels[184] = {
+static label labels[183] = {
{0, "EMPTY"},
{256, 0},
{4, 0},
- {270, 0},
{295, 0},
+ {270, 0},
+ {7, 0},
+ {16, 0},
+ {14, 0},
+ {15, 0},
+ {52, 0},
+ {49, 0},
+ {1, "False"},
+ {1, "None"},
+ {1, "True"},
+ {9, 0},
+ {1, "assert"},
+ {1, "async"},
+ {1, "await"},
+ {1, "break"},
+ {1, "class"},
+ {1, "continue"},
+ {1, "def"},
+ {1, "del"},
+ {1, "for"},
+ {1, "from"},
+ {1, "global"},
+ {1, "if"},
+ {1, "import"},
+ {1, "lambda"},
+ {1, "nonlocal"},
+ {1, "not"},
+ {1, "pass"},
+ {1, "raise"},
+ {1, "return"},
+ {1, "try"},
+ {1, "while"},
+ {1, "with"},
+ {1, "yield"},
+ {25, 0},
+ {31, 0},
+ {1, 0},
+ {2, 0},
+ {3, 0},
{257, 0},
- {269, 0},
{0, 0},
+ {269, 0},
{258, 0},
{332, 0},
{259, 0},
- {49, 0},
{291, 0},
- {7, 0},
- {335, 0},
{8, 0},
+ {335, 0},
{260, 0},
{261, 0},
+ {262, 0},
{334, 0},
{263, 0},
- {262, 0},
- {1, "async"},
- {1, "def"},
- {1, 0},
{264, 0},
{51, 0},
- {306, 0},
{11, 0},
+ {306, 0},
{56, 0},
{344, 0},
{265, 0},
+ {35, 0},
{266, 0},
- {22, 0},
{12, 0},
- {16, 0},
- {35, 0},
+ {22, 0},
{267, 0},
{268, 0},
{271, 0},
{13, 0},
- {272, 0},
+ {294, 0},
{276, 0},
- {277, 0},
+ {272, 0},
{278, 0},
- {284, 0},
{292, 0},
+ {284, 0},
{293, 0},
- {294, 0},
+ {277, 0},
{274, 0},
{273, 0},
{275, 0},
{342, 0},
{315, 0},
+ {40, 0},
+ {41, 0},
+ {46, 0},
+ {38, 0},
{36, 0},
{37, 0},
- {38, 0},
- {50, 0},
+ {48, 0},
{39, 0},
- {40, 0},
- {41, 0},
- {42, 0},
- {43, 0},
{44, 0},
{45, 0},
- {46, 0},
- {48, 0},
- {1, "del"},
+ {50, 0},
+ {43, 0},
+ {42, 0},
{331, 0},
- {1, "pass"},
{279, 0},
{280, 0},
- {281, 0},
{283, 0},
+ {281, 0},
{282, 0},
- {1, "break"},
- {1, "continue"},
- {1, "return"},
- {1, "raise"},
- {1, "from"},
- {285, 0},
{286, 0},
- {1, "import"},
+ {285, 0},
{290, 0},
{23, 0},
- {52, 0},
{289, 0},
{287, 0},
{1, "as"},
{288, 0},
- {1, "global"},
- {1, "nonlocal"},
- {1, "assert"},
- {297, 0},
- {298, 0},
+ {296, 0},
{299, 0},
+ {297, 0},
{300, 0},
+ {298, 0},
{301, 0},
- {296, 0},
- {1, "if"},
{305, 0},
{304, 0},
{1, "elif"},
{1, "else"},
- {1, "while"},
- {1, "for"},
{1, "in"},
- {1, "try"},
- {303, 0},
{1, "finally"},
- {1, "with"},
+ {303, 0},
{302, 0},
{316, 0},
{1, "except"},
{5, 0},
{6, 0},
{53, 0},
- {310, 0},
{308, 0},
+ {310, 0},
{307, 0},
{309, 0},
- {1, "lambda"},
{311, 0},
{1, "or"},
{312, 0},
{1, "and"},
- {1, "not"},
{313, 0},
{314, 0},
+ {28, 0},
{20, 0},
- {21, 0},
+ {29, 0},
{27, 0},
+ {21, 0},
{30, 0},
- {29, 0},
- {28, 0},
- {28, 0},
{1, "is"},
{317, 0},
{18, 0},
@@ -2455,40 +2454,27 @@ static label labels[184] = {
{33, 0},
{34, 0},
{321, 0},
- {14, 0},
- {15, 0},
{322, 0},
- {17, 0},
{24, 0},
+ {17, 0},
{47, 0},
- {31, 0},
{323, 0},
{324, 0},
- {1, "await"},
{325, 0},
{327, 0},
{326, 0},
- {9, 0},
{10, 0},
- {25, 0},
- {333, 0},
{26, 0},
- {2, 0},
- {3, 0},
- {1, "None"},
- {1, "True"},
- {1, "False"},
+ {333, 0},
{339, 0},
{328, 0},
{329, 0},
{330, 0},
- {1, "class"},
{336, 0},
{337, 0},
{340, 0},
{338, 0},
{341, 0},
- {1, "yield"},
{343, 0},
{345, 0},
{346, 0},
@@ -2497,6 +2483,6 @@ static label labels[184] = {
grammar _PyParser_Grammar = {
92,
dfas,
- {184, labels},
+ {183, labels},
256
};