diff options
36 files changed, 171 insertions, 178 deletions
diff --git a/Grammar/Grammar b/Grammar/Grammar index 7a7f6bc0b6..cab3b9b7eb 100644 --- a/Grammar/Grammar +++ b/Grammar/Grammar @@ -90,7 +90,7 @@ or_test: and_test ('or' and_test)* and_test: not_test ('and' not_test)* not_test: 'not' not_test | comparison comparison: expr (comp_op expr)* -comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' +comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is'|'is' 'not' expr: xor_expr ('|' xor_expr)* xor_expr: and_expr ('^' and_expr)* and_expr: shift_expr ('&' shift_expr)* diff --git a/Lib/bsddb/dbtables.py b/Lib/bsddb/dbtables.py index 655fb96120..7f862d7509 100644 --- a/Lib/bsddb/dbtables.py +++ b/Lib/bsddb/dbtables.py @@ -453,7 +453,7 @@ class bsdTableDB : # error dataitem = None dataitem = mappings[column](dataitem) - if dataitem <> None: + if dataitem != None: self.db.put( _data_key(table, column, rowid), dataitem, txn=txn) diff --git a/Lib/bsddb/test/test_compat.py b/Lib/bsddb/test/test_compat.py index 841e01c8a6..af9e128904 100644 --- a/Lib/bsddb/test/test_compat.py +++ b/Lib/bsddb/test/test_compat.py @@ -120,7 +120,7 @@ class CompatibilityTestCase(unittest.TestCase): try: rec = f.next() except KeyError: - assert rec == f.last(), 'Error, last <> last!' + assert rec == f.last(), 'Error, last != last!' f.previous() break if verbose: diff --git a/Lib/bsddb/test/test_recno.py b/Lib/bsddb/test/test_recno.py index 170448e600..e325aac89a 100644 --- a/Lib/bsddb/test/test_recno.py +++ b/Lib/bsddb/test/test_recno.py @@ -30,7 +30,7 @@ class SimpleRecnoTestCase(unittest.TestCase): try: os.remove(self.filename) except OSError, e: - if e.errno <> errno.EEXIST: raise + if e.errno != errno.EEXIST: raise def test01_basic(self): d = db.DB() diff --git a/Lib/bsddb/test/test_thread.py b/Lib/bsddb/test/test_thread.py index f18741323d..6942aa222c 100644 --- a/Lib/bsddb/test/test_thread.py +++ b/Lib/bsddb/test/test_thread.py @@ -58,7 +58,7 @@ class BaseThreadedTestCase(unittest.TestCase): try: os.mkdir(homeDir) except OSError, e: - if e.errno <> errno.EEXIST: raise + if e.errno != errno.EEXIST: raise self.env = db.DBEnv() self.setEnvOpts() self.env.open(homeDir, self.envflags | db.DB_CREATE) diff --git a/Lib/compiler/transformer.py b/Lib/compiler/transformer.py index 091d599878..b564300c01 100644 --- a/Lib/compiler/transformer.py +++ b/Lib/compiler/transformer.py @@ -618,7 +618,7 @@ class Transformer: for i in range(2, len(nodelist), 2): nl = nodelist[i-1] - # comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '==' + # comp_op: '<' | '>' | '=' | '>=' | '<=' | '!=' | '==' # | 'in' | 'not' 'in' | 'is' | 'is' 'not' n = nl[1] if n[0] == token.NAME: @@ -1396,7 +1396,7 @@ _doc_nodes = [ symbol.power, ] -# comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '==' +# comp_op: '<' | '>' | '=' | '>=' | '<=' | '!=' | '==' # | 'in' | 'not' 'in' | 'is' | 'is' 'not' _cmp_types = { token.LESS : '<', diff --git a/Lib/email/base64mime.py b/Lib/email/base64mime.py index 0129d9d4e6..0b29eb88f3 100644 --- a/Lib/email/base64mime.py +++ b/Lib/email/base64mime.py @@ -146,7 +146,7 @@ def encode(s, binary=True, maxlinelen=76, eol=NL): # BAW: should encode() inherit b2a_base64()'s dubious behavior in # adding a newline to the encoded string? enc = b2a_base64(s[i:i + max_unencoded]) - if enc.endswith(NL) and eol <> NL: + if enc.endswith(NL) and eol != NL: enc = enc[:-1] + eol encvec.append(enc) return EMPTYSTRING.join(encvec) diff --git a/Lib/email/charset.py b/Lib/email/charset.py index 8f218b2095..882aa420ce 100644 --- a/Lib/email/charset.py +++ b/Lib/email/charset.py @@ -250,7 +250,7 @@ class Charset: Returns "base64" if self.body_encoding is BASE64. Returns "7bit" otherwise. """ - assert self.body_encoding <> SHORTEST + assert self.body_encoding != SHORTEST if self.body_encoding == QP: return 'quoted-printable' elif self.body_encoding == BASE64: @@ -260,7 +260,7 @@ class Charset: def convert(self, s): """Convert a string from the input_codec to the output_codec.""" - if self.input_codec <> self.output_codec: + if self.input_codec != self.output_codec: return unicode(s, self.input_codec).encode(self.output_codec) else: return s diff --git a/Lib/email/generator.py b/Lib/email/generator.py index 6e7a515302..ed832a3e9a 100644 --- a/Lib/email/generator.py +++ b/Lib/email/generator.py @@ -211,7 +211,7 @@ class Generator: # doesn't preserve newlines/continuations in headers. This is no big # deal in practice, but turns out to be inconvenient for the unittest # suite. - if msg.get_boundary() <> boundary: + if msg.get_boundary() != boundary: msg.set_boundary(boundary) # If there's a preamble, write it out, with a trailing CRLF if msg.preamble is not None: diff --git a/Lib/email/header.py b/Lib/email/header.py index 183c337560..3de44f905b 100644 --- a/Lib/email/header.py +++ b/Lib/email/header.py @@ -248,7 +248,7 @@ class Header: elif not isinstance(charset, Charset): charset = Charset(charset) # If the charset is our faux 8bit charset, leave the string unchanged - if charset <> '8bit': + if charset != '8bit': # We need to test that the string can be converted to unicode and # back to a byte string, given the input and output codecs of the # charset. @@ -454,7 +454,7 @@ def _split_ascii(s, firstlen, restlen, continuation_ws, splitchars): # If this part is longer than maxlen and we aren't already # splitting on whitespace, try to recursively split this line # on whitespace. - if partlen > maxlen and ch <> ' ': + if partlen > maxlen and ch != ' ': subl = _split_ascii(part, maxlen, restlen, continuation_ws, ' ') lines.extend(subl[:-1]) diff --git a/Lib/email/message.py b/Lib/email/message.py index 61101317c7..9d25cb0c6d 100644 --- a/Lib/email/message.py +++ b/Lib/email/message.py @@ -252,7 +252,7 @@ class Message: charset=charset.get_output_charset()) else: self.set_param('charset', charset.get_output_charset()) - if str(charset) <> charset.get_output_charset(): + if str(charset) != charset.get_output_charset(): self._payload = charset.body_encode(self._payload) if 'Content-Transfer-Encoding' not in self: cte = charset.get_body_encoding() @@ -301,7 +301,7 @@ class Message: name = name.lower() newheaders = [] for k, v in self._headers: - if k.lower() <> name: + if k.lower() != name: newheaders.append((k, v)) self._headers = newheaders @@ -438,7 +438,7 @@ class Message: return self.get_default_type() ctype = paramre.split(value)[0].lower().strip() # RFC 2045, section 5.2 says if its invalid, use text/plain - if ctype.count('/') <> 1: + if ctype.count('/') != 1: return 'text/plain' return ctype @@ -601,7 +601,7 @@ class Message: ctype = append_param else: ctype = SEMISPACE.join([ctype, append_param]) - if ctype <> self.get(header): + if ctype != self.get(header): del self[header] self[header] = ctype @@ -617,13 +617,13 @@ class Message: return new_ctype = '' for p, v in self.get_params(header=header, unquote=requote): - if p.lower() <> param.lower(): + if p.lower() != param.lower(): if not new_ctype: new_ctype = _formatparam(p, v, requote) else: new_ctype = SEMISPACE.join([new_ctype, _formatparam(p, v, requote)]) - if new_ctype <> self.get(header): + if new_ctype != self.get(header): del self[header] self[header] = new_ctype diff --git a/Lib/email/quoprimime.py b/Lib/email/quoprimime.py index a5658dd3f7..389b276de6 100644 --- a/Lib/email/quoprimime.py +++ b/Lib/email/quoprimime.py @@ -287,7 +287,7 @@ def decode(encoded, eol=NL): n = len(line) while i < n: c = line[i] - if c <> '=': + if c != '=': decoded += c i += 1 # Otherwise, c == "=". Are we at the end of the line? If so, add diff --git a/Lib/email/test/test_email.py b/Lib/email/test/test_email.py index 13801dce12..8127ef003f 100644 --- a/Lib/email/test/test_email.py +++ b/Lib/email/test/test_email.py @@ -51,7 +51,7 @@ def openfile(filename, mode='r'): class TestEmailBase(unittest.TestCase): def ndiffAssertEqual(self, first, second): """Like failUnlessEqual except use ndiff for readable output.""" - if first <> second: + if first != second: sfirst = str(first) ssecond = str(second) diff = difflib.ndiff(sfirst.splitlines(), ssecond.splitlines()) @@ -2726,7 +2726,7 @@ class TestCharset(unittest.TestCase): # Try a charset with None body encoding c = Charset('us-ascii') eq('hello world', c.body_encode('hello world')) - # Try the convert argument, where input codec <> output codec + # Try the convert argument, where input codec != output codec c = Charset('euc-jp') # With apologies to Tokio Kikuchi ;) try: diff --git a/Lib/email/test/test_email_renamed.py b/Lib/email/test/test_email_renamed.py index 30f39b905d..ce685c5f2d 100644 --- a/Lib/email/test/test_email_renamed.py +++ b/Lib/email/test/test_email_renamed.py @@ -52,7 +52,7 @@ def openfile(filename, mode='r'): class TestEmailBase(unittest.TestCase): def ndiffAssertEqual(self, first, second): """Like failUnlessEqual except use ndiff for readable output.""" - if first <> second: + if first != second: sfirst = str(first) ssecond = str(second) diff = difflib.ndiff(sfirst.splitlines(), ssecond.splitlines()) @@ -2732,7 +2732,7 @@ class TestCharset(unittest.TestCase): # Try a charset with None body encoding c = Charset('us-ascii') eq('hello world', c.body_encode('hello world')) - # Try the convert argument, where input codec <> output codec + # Try the convert argument, where input codec != output codec c = Charset('euc-jp') # With apologies to Tokio Kikuchi ;) try: diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py index d5681c8a63..953afe9ebe 100644 --- a/Lib/logging/__init__.py +++ b/Lib/logging/__init__.py @@ -912,7 +912,8 @@ class Manager: """ #for c in ph.loggers: for c in ph.loggerMap.keys(): - if string.find(c.parent.name, alogger.name) <> 0: + # XXX Is the following correct? Shouldn't it be >= 0? + if string.find(c.parent.name, alogger.name) != 0: alogger.parent = c.parent c.parent = alogger diff --git a/Lib/plat-mac/FrameWork.py b/Lib/plat-mac/FrameWork.py index cda38e43cc..0bd6feb7e3 100644 --- a/Lib/plat-mac/FrameWork.py +++ b/Lib/plat-mac/FrameWork.py @@ -602,7 +602,7 @@ class Menu: def dispatch(self, id, item, window, event): title, shortcut, callback, mtype = self.items[item-1] if callback: - if not self.bar.parent or type(callback) <> types.StringType: + if not self.bar.parent or not isinstance(callback, str): menuhandler = callback else: # callback is string @@ -748,7 +748,7 @@ class Window: self.parent = parent def open(self, bounds=(40, 40, 400, 400), resid=None): - if resid <> None: + if resid is not None: self.wid = GetNewWindow(resid, -1) else: self.wid = NewWindow(bounds, self.__class__.__name__, 1, @@ -826,7 +826,7 @@ class Window: # If we're not frontmost, select ourselves and wait for # the activate event. # - if MyFrontWindow() <> window: + if MyFrontWindow() != window: window.SelectWindow() return # We are. Handle the event. @@ -875,7 +875,7 @@ class ControlsWindow(Window): if DEBUG: print "control hit in", window, "on", control, "; pcode =", pcode def do_inContent(self, partcode, window, event): - if MyFrontWindow() <> window: + if MyFrontWindow() != window: window.SelectWindow() return (what, message, when, where, modifiers) = event diff --git a/Lib/plat-mac/buildtools.py b/Lib/plat-mac/buildtools.py index c83e218024..7c5d0f4402 100644 --- a/Lib/plat-mac/buildtools.py +++ b/Lib/plat-mac/buildtools.py @@ -192,7 +192,7 @@ def process_common(template, progress, code, rsrcname, destname, is_update, 'icl8', 'ics4', 'ics8', 'ICN#', 'ics#'] if not copy_codefragment: skiptypes.append('cfrg') -## skipowner = (ownertype <> None) +## skipowner = (ownertype != None) # Copy the resources from the template diff --git a/Lib/plat-mac/cfmfile.py b/Lib/plat-mac/cfmfile.py index fd1a3e86ca..df157fde8c 100644 --- a/Lib/plat-mac/cfmfile.py +++ b/Lib/plat-mac/cfmfile.py @@ -73,7 +73,7 @@ class CfrgResource: Res.CloseResFile(resref) Res.UseResFile(currentresref) self.parse(data) - if self.version <> 1: + if self.version != 1: raise error, "unknown 'cfrg' resource format" def parse(self, data): @@ -143,7 +143,7 @@ class FragmentDescriptor: return data def getfragment(self): - if self.where <> 1: + if self.where != 1: raise error, "can't read fragment, unsupported location" f = open(self.path, "rb") f.seek(self.offset) @@ -155,7 +155,7 @@ class FragmentDescriptor: return frag def copydata(self, outfile): - if self.where <> 1: + if self.where != 1: raise error, "can't read fragment, unsupported location" infile = open(self.path, "rb") if self.length == 0: diff --git a/Lib/plat-mac/gensuitemodule.py b/Lib/plat-mac/gensuitemodule.py index 983e0f971e..53c0a5270a 100644 --- a/Lib/plat-mac/gensuitemodule.py +++ b/Lib/plat-mac/gensuitemodule.py @@ -169,7 +169,7 @@ def processfile_fromresource(fullname, output=None, basepkgname=None, aete = decode(data, verbose) aetelist.append((aete, res.GetResInfo())) finally: - if rf <> cur: + if rf != cur: CloseResFile(rf) UseResFile(cur) # switch back (needed for dialogs in Python) @@ -332,7 +332,7 @@ def getpstr(f, *args): def getalign(f): if f.tell() & 1: c = f.read(1) - ##if c <> '\0': + ##if c != '\0': ## print align:', repr(c) def getlist(f, description, getitem): @@ -779,7 +779,7 @@ class SuiteCompiler: if is_enum(a[2]): kname = a[1] ename = a[2][0] - if ename <> '****': + if ename != '****': fp.write(" aetools.enumsubst(_arguments, %r, _Enum_%s)\n" % (kname, identify(ename))) self.enumsneeded[ename] = 1 @@ -810,7 +810,7 @@ class SuiteCompiler: for a in arguments: if is_enum(a[2]): ename = a[2][0] - if ename <> '****': + if ename != '****': self.enumsneeded[ename] = 1 # diff --git a/Lib/plat-mac/macerrors.py b/Lib/plat-mac/macerrors.py index ce2a118287..faa5244496 100644 --- a/Lib/plat-mac/macerrors.py +++ b/Lib/plat-mac/macerrors.py @@ -1574,7 +1574,7 @@ smFHBlkDispErr = -311 #Error occurred during _sDisposePtr (Dispose of FHea smFHBlockRdErr = -310 #Error occurred during _sGetFHeader. smBLFieldBad = -309 #ByteLanes field was bad. smUnExBusErr = -308 #Unexpected BusError -smResrvErr = -307 #Fatal reserved error. Resreved field <> 0. +smResrvErr = -307 #Fatal reserved error. Resreved field != 0. smNosInfoArray = -306 #No sInfoArray. Memory Mgr error. smDisabledSlot = -305 #This slot is disabled (-305 use to be smLWTstBad) smNoDir = -304 #Directory offset is Nil diff --git a/Lib/test/output/test_class b/Lib/test/output/test_class index 7d8ab5e465..f3dc490afb 100644 --- a/Lib/test/output/test_class +++ b/Lib/test/output/test_class @@ -55,12 +55,10 @@ __eq__: (1,) __lt__: (1,) __gt__: (1,) __ne__: (1,) -__ne__: (1,) __eq__: (1,) __gt__: (1,) __lt__: (1,) __ne__: (1,) -__ne__: (1,) __del__: () __getattr__: ('spam',) __setattr__: ('eggs', 'spam, spam, spam and ham') diff --git a/Lib/test/output/test_tokenize b/Lib/test/output/test_tokenize index b78a223475..edd39bfeba 100644 --- a/Lib/test/output/test_tokenize +++ b/Lib/test/output/test_tokenize @@ -108,11 +108,11 @@ test_tokenize 37,0-37,1: NL '\n' 38,0-38,20: COMMENT '# Ordinary integers\n' 39,0-39,4: NUMBER '0xff' -39,5-39,7: OP '<>' +39,5-39,7: OP '!=' 39,8-39,11: NUMBER '255' 39,11-39,12: NEWLINE '\n' 40,0-40,4: NUMBER '0377' -40,5-40,7: OP '<>' +40,5-40,7: OP '!=' 40,8-40,11: NUMBER '255' 40,11-40,12: NEWLINE '\n' 41,0-41,10: NUMBER '2147483647' @@ -484,7 +484,7 @@ test_tokenize 149,2-149,3: OP ',' 149,4-149,5: NAME 'y' 149,5-149,6: OP ')' -149,7-149,9: OP '<>' +149,7-149,9: OP '!=' 149,10-149,11: OP '(' 149,11-149,12: OP '{' 149,12-149,15: STRING "'a'" @@ -513,7 +513,7 @@ test_tokenize 152,21-152,22: NUMBER '1' 152,23-152,25: OP '<=' 152,26-152,27: NUMBER '1' -152,28-152,30: OP '<>' +152,28-152,30: OP '!=' 152,31-152,32: NUMBER '1' 152,33-152,35: OP '!=' 152,36-152,37: NUMBER '1' diff --git a/Lib/test/test_bsddb3.py b/Lib/test/test_bsddb3.py index 8b0c50c8c5..166ad03ab2 100644 --- a/Lib/test/test_bsddb3.py +++ b/Lib/test/test_bsddb3.py @@ -8,7 +8,7 @@ from test.test_support import requires, verbose, run_suite, unlink # When running as a script instead of within the regrtest framework, skip the # requires test, since it's obvious we want to run them. -if __name__ <> '__main__': +if __name__ != '__main__': requires('bsddb') verbose = False diff --git a/Lib/test/test_class.py b/Lib/test/test_class.py index 66f426515a..795acd911b 100644 --- a/Lib/test/test_class.py +++ b/Lib/test/test_class.py @@ -244,12 +244,10 @@ str(testme) testme == 1 testme < 1 testme > 1 -testme <> 1 testme != 1 1 == testme 1 < testme 1 > testme -1 <> testme 1 != testme # This test has to be last (duh.) diff --git a/Lib/test/test_funcattrs.py b/Lib/test/test_funcattrs.py index 7a083b70df..ab3352854c 100644 --- a/Lib/test/test_funcattrs.py +++ b/Lib/test/test_funcattrs.py @@ -19,16 +19,16 @@ try: except AttributeError: pass else: raise TestFailed, 'expected AttributeError' -if b.__dict__ <> {}: +if b.__dict__ != {}: raise TestFailed, 'expected unassigned func.__dict__ to be {}' b.publish = 1 -if b.publish <> 1: +if b.publish != 1: raise TestFailed, 'function attribute not set to expected value' docstring = 'its docstring' b.__doc__ = docstring -if b.__doc__ <> docstring: +if b.__doc__ != docstring: raise TestFailed, 'problem with setting __doc__ attribute' if 'publish' not in dir(b): @@ -49,7 +49,7 @@ d = {'hello': 'world'} b.__dict__ = d if b.func_dict is not d: raise TestFailed, 'func.__dict__ assignment to dictionary failed' -if b.hello <> 'world': +if b.hello != 'world': raise TestFailed, 'attribute after func.__dict__ assignment failed' f1 = F() @@ -75,13 +75,13 @@ else: raise TestFailed, 'expected AttributeError or TypeError' # But setting it explicitly on the underlying function object is okay. F.a.im_func.publish = 1 -if F.a.publish <> 1: +if F.a.publish != 1: raise TestFailed, 'unbound method attribute not set to expected value' -if f1.a.publish <> 1: +if f1.a.publish != 1: raise TestFailed, 'bound method attribute access did not work' -if f2.a.publish <> 1: +if f2.a.publish != 1: raise TestFailed, 'bound method attribute access did not work' if 'publish' not in dir(F.a): @@ -117,7 +117,7 @@ else: raise TestFailed, 'expected TypeError or AttributeError' F.a.im_func.__dict__ = {'one': 11, 'two': 22, 'three': 33} -if f1.a.two <> 22: +if f1.a.two != 22: raise TestFailed, 'setting __dict__' from UserDict import UserDict @@ -128,7 +128,7 @@ try: except (AttributeError, TypeError): pass else: raise TestFailed -if f2.a.one <> f1.a.one <> F.a.one <> 11: +if f2.a.one != f1.a.one != F.a.one != 11: raise TestFailed # im_func may not be a Python method! @@ -136,7 +136,7 @@ import new F.id = new.instancemethod(id, None, F) eff = F() -if eff.id() <> id(eff): +if eff.id() != id(eff): raise TestFailed try: diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py index 458bfa2a16..0ce43dc6f0 100644 --- a/Lib/test/test_grammar.py +++ b/Lib/test/test_grammar.py @@ -412,7 +412,7 @@ def test_break_continue_loop(extra_burning_oil = 1, count=0): continue except: raise - if count > 2 or big_hippo <> 1: + if count > 2 or big_hippo != 1: print "continue then break in try/except in loop broken!" test_break_continue_loop() @@ -586,12 +586,11 @@ if 1 and 1 or 1 and 1 and 1 or not 1 and 1: pass print 'comparison' ### comparison: expr (comp_op expr)* -### comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' +### comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is'|'is' 'not' if 1: pass x = (1 == 1) if 1 == 1: pass if 1 != 1: pass -if 1 <> 1: pass if 1 < 1: pass if 1 > 1: pass if 1 <= 1: pass @@ -600,7 +599,7 @@ if 1 is 1: pass if 1 is not 1: pass if 1 in (): pass if 1 not in (): pass -if 1 < 1 > 1 == 1 >= 1 <= 1 <> 1 != 1 in 1 not in 1 is 1 is not 1: pass +if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 not in 1 is 1 is not 1: pass print 'binary mask ops' x = 1 & 1 diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py index 356b80199b..df37f73bda 100644 --- a/Lib/test/test_socket.py +++ b/Lib/test/test_socket.py @@ -285,7 +285,7 @@ class GeneralModuleTests(unittest.TestCase): orig = sys.getrefcount(__name__) socket.getnameinfo(__name__,0) except SystemError: - if sys.getrefcount(__name__) <> orig: + if sys.getrefcount(__name__) != orig: self.fail("socket.getnameinfo loses a reference") def testInterpreterCrash(self): diff --git a/Lib/test/test_wsgiref.py b/Lib/test/test_wsgiref.py index b42f437601..f33c30dda4 100755 --- a/Lib/test/test_wsgiref.py +++ b/Lib/test/test_wsgiref.py @@ -515,7 +515,7 @@ class HandlerTests(TestCase): "Content-Length: %d\r\n" "\r\n%s" % (h.error_status,len(h.error_body),h.error_body)) - self.failUnless(h.stderr.getvalue().find("AssertionError")<>-1) + self.failUnless("AssertionError" in h.stderr.getvalue()) def testErrorAfterOutput(self): MSG = "Some output has been sent" @@ -528,7 +528,7 @@ class HandlerTests(TestCase): self.assertEqual(h.stdout.getvalue(), "Status: 200 OK\r\n" "\r\n"+MSG) - self.failUnless(h.stderr.getvalue().find("AssertionError")<>-1) + self.failUnless("AssertionError" in h.stderr.getvalue()) def testHeaderFormats(self): diff --git a/Lib/test/tokenize_tests.txt b/Lib/test/tokenize_tests.txt index 4ef3bf134a..59e51d7c92 100644 --- a/Lib/test/tokenize_tests.txt +++ b/Lib/test/tokenize_tests.txt @@ -36,8 +36,8 @@ x = 1 \ x = 0 # Ordinary integers -0xff <> 255 -0377 <> 255 +0xff != 255 +0377 != 255 2147483647 != 017777777777 -2147483647-1 != 020000000000 037777777777 != -1 @@ -146,10 +146,10 @@ if 0: def d22(a, b, c=1, d=2): pass def d01v(a=1, *restt, **restd): pass -(x, y) <> ({'a':1}, {'b':2}) +(x, y) != ({'a':1}, {'b':2}) # comparison -if 1 < 1 > 1 == 1 >= 1 <= 1 <> 1 != 1 in 1 not in 1 is 1 is not 1: pass +if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 != 1 in 1 not in 1 is 1 is not 1: pass # binary x = 1 & 1 diff --git a/Lib/tokenize.py b/Lib/tokenize.py index a9be4cfe03..27d566a099 100644 --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -77,7 +77,7 @@ String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'", # Because of leftmost-then-longest match semantics, be sure to put the # longest operators first (e.g., if = came before ==, == would get # recognized as two instances of =). -Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=", +Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"!=", r"//=?", r"[+\-*/%&|^=<>]=?", r"~") diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index b9fdd5d54b..9fd1615604 100644 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -627,7 +627,7 @@ def main(): for o, a in opts: if o == '-n': new_win = 1 elif o == '-t': new_win = 2 - if len(args) <> 1: + if len(args) != 1: print >>sys.stderr, usage sys.exit(1) diff --git a/Lib/wsgiref/headers.py b/Lib/wsgiref/headers.py index 934a645f3f..0908f60b9b 100644 --- a/Lib/wsgiref/headers.py +++ b/Lib/wsgiref/headers.py @@ -63,7 +63,7 @@ class Headers: Does *not* raise an exception if the header is missing. """ name = name.lower() - self._headers[:] = [kv for kv in self._headers if kv[0].lower()<>name] + self._headers[:] = [kv for kv in self._headers if kv[0].lower() != name] def __getitem__(self,name): """Get the first header value for 'name' diff --git a/Lib/wsgiref/util.py b/Lib/wsgiref/util.py index 17fdff637f..450a32fbce 100644 --- a/Lib/wsgiref/util.py +++ b/Lib/wsgiref/util.py @@ -98,7 +98,7 @@ def shift_path_info(environ): return None path_parts = path_info.split('/') - path_parts[1:-1] = [p for p in path_parts[1:-1] if p and p<>'.'] + path_parts[1:-1] = [p for p in path_parts[1:-1] if p and p != '.'] name = path_parts[1] del path_parts[1] diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index 885b91622a..b734a1288c 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -982,7 +982,6 @@ PyToken_TwoChars(int c1, int c2) break; case '<': switch (c2) { - case '>': return NOTEQUAL; case '=': return LESSEQUAL; case '<': return LEFTSHIFT; } diff --git a/Python/ast.c b/Python/ast.c index b356192d89..f472d96c92 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -478,7 +478,7 @@ ast_for_augassign(const node *n) static cmpop_ty ast_for_comp_op(const node *n) { - /* comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is' + /* comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is' |'is' 'not' */ REQ(n, comp_op); diff --git a/Python/graminit.c b/Python/graminit.c index 33ef64b8b7..8299563bef 100644 --- a/Python/graminit.c +++ b/Python/graminit.c @@ -1107,17 +1107,16 @@ static state states_51[2] = { {1, arcs_51_0}, {2, arcs_51_1}, }; -static arc arcs_52_0[10] = { +static arc arcs_52_0[9] = { {117, 1}, {118, 1}, {119, 1}, {120, 1}, {121, 1}, {122, 1}, - {123, 1}, {83, 1}, {114, 2}, - {124, 3}, + {123, 3}, }; static arc arcs_52_1[1] = { {0, 1}, @@ -1130,16 +1129,16 @@ static arc arcs_52_3[2] = { {0, 3}, }; static state states_52[4] = { - {10, arcs_52_0}, + {9, arcs_52_0}, {1, arcs_52_1}, {1, arcs_52_2}, {2, arcs_52_3}, }; static arc arcs_53_0[1] = { - {125, 1}, + {124, 1}, }; static arc arcs_53_1[2] = { - {126, 0}, + {125, 0}, {0, 1}, }; static state states_53[2] = { @@ -1147,10 +1146,10 @@ static state states_53[2] = { {2, arcs_53_1}, }; static arc arcs_54_0[1] = { - {127, 1}, + {126, 1}, }; static arc arcs_54_1[2] = { - {128, 0}, + {127, 0}, {0, 1}, }; static state states_54[2] = { @@ -1158,10 +1157,10 @@ static state states_54[2] = { {2, arcs_54_1}, }; static arc arcs_55_0[1] = { - {129, 1}, + {128, 1}, }; static arc arcs_55_1[2] = { - {130, 0}, + {129, 0}, {0, 1}, }; static state states_55[2] = { @@ -1169,10 +1168,10 @@ static state states_55[2] = { {2, arcs_55_1}, }; static arc arcs_56_0[1] = { - {131, 1}, + {130, 1}, }; static arc arcs_56_1[3] = { - {132, 0}, + {131, 0}, {57, 0}, {0, 1}, }; @@ -1181,11 +1180,11 @@ static state states_56[2] = { {3, arcs_56_1}, }; static arc arcs_57_0[1] = { - {133, 1}, + {132, 1}, }; static arc arcs_57_1[3] = { + {133, 0}, {134, 0}, - {135, 0}, {0, 1}, }; static state states_57[2] = { @@ -1193,13 +1192,13 @@ static state states_57[2] = { {3, arcs_57_1}, }; static arc arcs_58_0[1] = { - {136, 1}, + {135, 1}, }; static arc arcs_58_1[5] = { {28, 0}, + {136, 0}, {137, 0}, {138, 0}, - {139, 0}, {0, 1}, }; static state states_58[2] = { @@ -1207,13 +1206,13 @@ static state states_58[2] = { {5, arcs_58_1}, }; static arc arcs_59_0[4] = { + {133, 1}, {134, 1}, - {135, 1}, - {140, 1}, - {141, 2}, + {139, 1}, + {140, 2}, }; static arc arcs_59_1[1] = { - {136, 2}, + {135, 2}, }; static arc arcs_59_2[1] = { {0, 2}, @@ -1224,15 +1223,15 @@ static state states_59[3] = { {1, arcs_59_2}, }; static arc arcs_60_0[1] = { - {142, 1}, + {141, 1}, }; static arc arcs_60_1[3] = { - {143, 1}, + {142, 1}, {29, 2}, {0, 1}, }; static arc arcs_60_2[1] = { - {136, 3}, + {135, 3}, }; static arc arcs_60_3[1] = { {0, 3}, @@ -1245,47 +1244,47 @@ static state states_60[4] = { }; static arc arcs_61_0[7] = { {13, 1}, - {145, 2}, - {148, 3}, - {151, 4}, + {144, 2}, + {147, 3}, + {150, 4}, {19, 5}, - {153, 5}, - {154, 6}, + {152, 5}, + {153, 6}, }; static arc arcs_61_1[3] = { {43, 7}, - {144, 7}, + {143, 7}, {15, 5}, }; static arc arcs_61_2[2] = { - {146, 8}, - {147, 5}, + {145, 8}, + {146, 5}, }; static arc arcs_61_3[2] = { - {149, 9}, - {150, 5}, + {148, 9}, + {149, 5}, }; static arc arcs_61_4[1] = { - {152, 10}, + {151, 10}, }; static arc arcs_61_5[1] = { {0, 5}, }; static arc arcs_61_6[2] = { - {154, 6}, + {153, 6}, {0, 6}, }; static arc arcs_61_7[1] = { {15, 5}, }; static arc arcs_61_8[1] = { - {147, 5}, + {146, 5}, }; static arc arcs_61_9[1] = { - {150, 5}, + {149, 5}, }; static arc arcs_61_10[1] = { - {151, 5}, + {150, 5}, }; static state states_61[11] = { {7, arcs_61_0}, @@ -1304,7 +1303,7 @@ static arc arcs_62_0[1] = { {26, 1}, }; static arc arcs_62_1[3] = { - {155, 2}, + {154, 2}, {27, 3}, {0, 1}, }; @@ -1330,7 +1329,7 @@ static arc arcs_63_0[1] = { {26, 1}, }; static arc arcs_63_1[3] = { - {156, 2}, + {155, 2}, {27, 3}, {0, 1}, }; @@ -1377,7 +1376,7 @@ static state states_64[5] = { }; static arc arcs_65_0[3] = { {13, 1}, - {145, 2}, + {144, 2}, {75, 3}, }; static arc arcs_65_1[2] = { @@ -1385,7 +1384,7 @@ static arc arcs_65_1[2] = { {15, 5}, }; static arc arcs_65_2[1] = { - {157, 6}, + {156, 6}, }; static arc arcs_65_3[1] = { {19, 5}, @@ -1397,7 +1396,7 @@ static arc arcs_65_5[1] = { {0, 5}, }; static arc arcs_65_6[1] = { - {147, 5}, + {146, 5}, }; static state states_65[7] = { {3, arcs_65_0}, @@ -1409,14 +1408,14 @@ static state states_65[7] = { {1, arcs_65_6}, }; static arc arcs_66_0[1] = { - {158, 1}, + {157, 1}, }; static arc arcs_66_1[2] = { {27, 2}, {0, 1}, }; static arc arcs_66_2[2] = { - {158, 1}, + {157, 1}, {0, 2}, }; static state states_66[3] = { @@ -1438,14 +1437,14 @@ static arc arcs_67_2[2] = { }; static arc arcs_67_3[3] = { {26, 5}, - {159, 6}, + {158, 6}, {0, 3}, }; static arc arcs_67_4[1] = { {75, 6}, }; static arc arcs_67_5[2] = { - {159, 6}, + {158, 6}, {0, 5}, }; static arc arcs_67_6[1] = { @@ -1532,7 +1531,7 @@ static state states_71[5] = { {2, arcs_71_4}, }; static arc arcs_72_0[1] = { - {160, 1}, + {159, 1}, }; static arc arcs_72_1[1] = { {19, 2}, @@ -1568,7 +1567,7 @@ static state states_72[8] = { {1, arcs_72_7}, }; static arc arcs_73_0[3] = { - {161, 1}, + {160, 1}, {28, 2}, {29, 3}, }; @@ -1583,7 +1582,7 @@ static arc arcs_73_3[1] = { {26, 6}, }; static arc arcs_73_4[4] = { - {161, 1}, + {160, 1}, {28, 2}, {29, 3}, {0, 4}, @@ -1612,7 +1611,7 @@ static arc arcs_74_0[1] = { {26, 1}, }; static arc arcs_74_1[3] = { - {156, 2}, + {155, 2}, {25, 3}, {0, 1}, }; @@ -1629,8 +1628,8 @@ static state states_74[4] = { {1, arcs_74_3}, }; static arc arcs_75_0[2] = { - {155, 1}, - {163, 1}, + {154, 1}, + {162, 1}, }; static arc arcs_75_1[1] = { {0, 1}, @@ -1652,7 +1651,7 @@ static arc arcs_76_3[1] = { {104, 4}, }; static arc arcs_76_4[2] = { - {162, 5}, + {161, 5}, {0, 4}, }; static arc arcs_76_5[1] = { @@ -1673,7 +1672,7 @@ static arc arcs_77_1[1] = { {105, 2}, }; static arc arcs_77_2[2] = { - {162, 3}, + {161, 3}, {0, 2}, }; static arc arcs_77_3[1] = { @@ -1686,8 +1685,8 @@ static state states_77[4] = { {1, arcs_77_3}, }; static arc arcs_78_0[2] = { - {156, 1}, - {165, 1}, + {155, 1}, + {164, 1}, }; static arc arcs_78_1[1] = { {0, 1}, @@ -1709,7 +1708,7 @@ static arc arcs_79_3[1] = { {106, 4}, }; static arc arcs_79_4[2] = { - {164, 5}, + {163, 5}, {0, 4}, }; static arc arcs_79_5[1] = { @@ -1730,7 +1729,7 @@ static arc arcs_80_1[1] = { {105, 2}, }; static arc arcs_80_2[2] = { - {164, 3}, + {163, 3}, {0, 2}, }; static arc arcs_80_3[1] = { @@ -1764,7 +1763,7 @@ static state states_82[2] = { {1, arcs_82_1}, }; static arc arcs_83_0[1] = { - {167, 1}, + {166, 1}, }; static arc arcs_83_1[2] = { {9, 2}, @@ -1780,11 +1779,11 @@ static state states_83[3] = { }; static dfa dfas[84] = { {256, "single_input", 0, 3, states_0, - "\004\050\014\000\000\000\000\025\074\005\023\310\011\020\004\000\300\020\222\006\201"}, + "\004\050\014\000\000\000\000\025\074\005\023\310\011\020\004\000\140\010\111\203\100"}, {257, "file_input", 0, 2, states_1, - "\204\050\014\000\000\000\000\025\074\005\023\310\011\020\004\000\300\020\222\006\201"}, + "\204\050\014\000\000\000\000\025\074\005\023\310\011\020\004\000\140\010\111\203\100"}, {258, "eval_input", 0, 3, states_2, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"}, {259, "decorator", 0, 7, states_3, "\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, {260, "decorators", 0, 2, states_4, @@ -1800,13 +1799,13 @@ static dfa dfas[84] = { {265, "fplist", 0, 3, states_9, "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, {266, "stmt", 0, 2, states_10, - "\000\050\014\000\000\000\000\025\074\005\023\310\011\020\004\000\300\020\222\006\201"}, + "\000\050\014\000\000\000\000\025\074\005\023\310\011\020\004\000\140\010\111\203\100"}, {267, "simple_stmt", 0, 4, states_11, - "\000\040\010\000\000\000\000\025\074\005\023\000\000\020\004\000\300\020\222\006\200"}, + "\000\040\010\000\000\000\000\025\074\005\023\000\000\020\004\000\140\010\111\003\100"}, {268, "small_stmt", 0, 2, states_12, - "\000\040\010\000\000\000\000\025\074\005\023\000\000\020\004\000\300\020\222\006\200"}, + "\000\040\010\000\000\000\000\025\074\005\023\000\000\020\004\000\140\010\111\003\100"}, {269, "expr_stmt", 0, 6, states_13, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"}, {270, "augassign", 0, 2, states_14, "\000\000\000\000\000\360\377\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, {271, "print_stmt", 0, 9, states_15, @@ -1816,7 +1815,7 @@ static dfa dfas[84] = { {273, "pass_stmt", 0, 2, states_17, "\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000"}, {274, "flow_stmt", 0, 2, states_18, - "\000\000\000\000\000\000\000\000\074\000\000\000\000\000\000\000\000\000\000\000\200"}, + "\000\000\000\000\000\000\000\000\074\000\000\000\000\000\000\000\000\000\000\000\100"}, {275, "break_stmt", 0, 2, states_19, "\000\000\000\000\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000"}, {276, "continue_stmt", 0, 2, states_20, @@ -1824,7 +1823,7 @@ static dfa dfas[84] = { {277, "return_stmt", 0, 3, states_21, "\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000"}, {278, "yield_stmt", 0, 2, states_22, - "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200"}, + "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\100"}, {279, "raise_stmt", 0, 7, states_23, "\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000"}, {280, "import_stmt", 0, 2, states_24, @@ -1850,7 +1849,7 @@ static dfa dfas[84] = { {290, "assert_stmt", 0, 5, states_34, "\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000"}, {291, "compound_stmt", 0, 2, states_35, - "\000\010\004\000\000\000\000\000\000\000\000\310\011\000\000\000\000\000\000\000\001"}, + "\000\010\004\000\000\000\000\000\000\000\000\310\011\000\000\000\000\000\000\200\000"}, {292, "if_stmt", 0, 8, states_36, "\000\000\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000"}, {293, "while_stmt", 0, 8, states_37, @@ -1866,69 +1865,69 @@ static dfa dfas[84] = { {298, "except_clause", 0, 5, states_42, "\000\000\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000"}, {299, "suite", 0, 5, states_43, - "\004\040\010\000\000\000\000\025\074\005\023\000\000\020\004\000\300\020\222\006\200"}, + "\004\040\010\000\000\000\000\025\074\005\023\000\000\020\004\000\140\010\111\003\100"}, {300, "testlist_safe", 0, 5, states_44, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"}, {301, "old_test", 0, 2, states_45, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"}, {302, "old_lambdef", 0, 5, states_46, "\000\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000"}, {303, "test", 0, 6, states_47, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"}, {304, "or_test", 0, 2, states_48, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\004\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\004\000\140\010\111\003\000"}, {305, "and_test", 0, 2, states_49, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\004\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\004\000\140\010\111\003\000"}, {306, "not_test", 0, 3, states_50, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\004\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\004\000\140\010\111\003\000"}, {307, "comparison", 0, 2, states_51, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"}, {308, "comp_op", 0, 4, states_52, - "\000\000\000\000\000\000\000\000\000\000\010\000\000\000\344\037\000\000\000\000\000"}, + "\000\000\000\000\000\000\000\000\000\000\010\000\000\000\344\017\000\000\000\000\000"}, {309, "expr", 0, 2, states_53, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"}, {310, "xor_expr", 0, 2, states_54, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"}, {311, "and_expr", 0, 2, states_55, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"}, {312, "shift_expr", 0, 2, states_56, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"}, {313, "arith_expr", 0, 2, states_57, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"}, {314, "term", 0, 2, states_58, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"}, {315, "factor", 0, 3, states_59, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"}, {316, "power", 0, 4, states_60, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\111\003\000"}, {317, "atom", 0, 11, states_61, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\111\003\000"}, {318, "listmaker", 0, 5, states_62, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"}, {319, "testlist_gexp", 0, 5, states_63, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"}, {320, "lambdef", 0, 5, states_64, "\000\000\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000"}, {321, "trailer", 0, 7, states_65, - "\000\040\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\002\000\000"}, + "\000\040\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\001\000\000"}, {322, "subscriptlist", 0, 3, states_66, - "\000\040\050\000\000\000\000\000\000\010\000\000\000\020\004\000\300\020\222\006\000"}, + "\000\040\050\000\000\000\000\000\000\010\000\000\000\020\004\000\140\010\111\003\000"}, {323, "subscript", 0, 7, states_67, - "\000\040\050\000\000\000\000\000\000\010\000\000\000\020\004\000\300\020\222\006\000"}, + "\000\040\050\000\000\000\000\000\000\010\000\000\000\020\004\000\140\010\111\003\000"}, {324, "sliceop", 0, 3, states_68, "\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, {325, "exprlist", 0, 3, states_69, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\140\010\111\003\000"}, {326, "testlist", 0, 3, states_70, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"}, {327, "dictmaker", 0, 5, states_71, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"}, {328, "classdef", 0, 8, states_72, - "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\001"}, + "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000"}, {329, "arglist", 0, 8, states_73, - "\000\040\010\060\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, + "\000\040\010\060\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"}, {330, "argument", 0, 4, states_74, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"}, {331, "list_iter", 0, 2, states_75, "\000\000\000\000\000\000\000\000\000\000\000\210\000\000\000\000\000\000\000\000\000"}, {332, "list_for", 0, 6, states_76, @@ -1942,13 +1941,13 @@ static dfa dfas[84] = { {336, "gen_if", 0, 4, states_80, "\000\000\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000"}, {337, "testlist1", 0, 2, states_81, - "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\300\020\222\006\000"}, + "\000\040\010\000\000\000\000\000\000\000\000\000\000\020\004\000\140\010\111\003\000"}, {338, "encoding_decl", 0, 2, states_82, "\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, {339, "yield_expr", 0, 3, states_83, - "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200"}, + "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\100"}, }; -static label labels[168] = { +static label labels[167] = { {0, "EMPTY"}, {256, 0}, {4, 0}, @@ -2072,7 +2071,6 @@ static label labels[168] = { {31, 0}, {30, 0}, {29, 0}, - {29, 0}, {1, "is"}, {310, 0}, {18, 0}, @@ -2121,6 +2119,6 @@ static label labels[168] = { grammar _PyParser_Grammar = { 84, dfas, - {168, labels}, + {167, labels}, 256 }; |