diff options
Diffstat (limited to 'Lib/tokenize.py')
-rw-r--r-- | Lib/tokenize.py | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/Lib/tokenize.py b/Lib/tokenize.py index 8dc4c53e91..1a72d6fc59 100644 --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -228,7 +228,7 @@ def untokenize(iterable): # Output text will tokenize the back to the input t1 = [tok[:2] for tok in generate_tokens(f.readline)] newcode = untokenize(t1) - readline = iter(newcode.splitlines(1)).next + readline = iter(newcode.splitlines(1)).__next__ t2 = [tok[:2] for tokin generate_tokens(readline)] assert t1 == t2 """ @@ -242,7 +242,7 @@ def generate_tokens(readline): readline() method of built-in file objects. Each call to the function should return one line of input as a string. Alternately, readline can be a callable function terminating with StopIteration: - readline = open(myfile).next # Example of alternate readline + readline = open(myfile).__next__ # Example of alternate readline The generator produces 5-tuples with these members: the token type; the token string; a 2-tuple (srow, scol) of ints specifying the row and |