|
|
|
@ -33,7 +33,6 @@ from token import *
|
|
|
|
|
import token
|
|
|
|
|
__all__ = [x for x in dir(token) if not x.startswith("_")]
|
|
|
|
|
__all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
|
|
|
|
|
del x
|
|
|
|
|
del token
|
|
|
|
|
|
|
|
|
|
COMMENT = N_TOKENS
|
|
|
|
@ -150,8 +149,8 @@ class StopTokenizing(Exception): pass
|
|
|
|
|
def printtoken(type, token, srow_scol, erow_ecol, line): # for testing
|
|
|
|
|
srow, scol = srow_scol
|
|
|
|
|
erow, ecol = erow_ecol
|
|
|
|
|
print "%d,%d-%d,%d:\t%s\t%s" % \
|
|
|
|
|
(srow, scol, erow, ecol, tok_name[type], repr(token))
|
|
|
|
|
print("%d,%d-%d,%d:\t%s\t%s" % \
|
|
|
|
|
(srow, scol, erow, ecol, tok_name[type], repr(token)))
|
|
|
|
|
|
|
|
|
|
def tokenize(readline, tokeneater=printtoken):
|
|
|
|
|
"""
|
|
|
|
@ -316,7 +315,7 @@ def generate_tokens(readline):
|
|
|
|
|
|
|
|
|
|
if contstr: # continued string
|
|
|
|
|
if not line:
|
|
|
|
|
raise TokenError, ("EOF in multi-line string", strstart)
|
|
|
|
|
raise TokenError("EOF in multi-line string", strstart)
|
|
|
|
|
endmatch = endprog.match(line)
|
|
|
|
|
if endmatch:
|
|
|
|
|
pos = end = endmatch.end(0)
|
|
|
|
@ -377,7 +376,7 @@ def generate_tokens(readline):
|
|
|
|
|
|
|
|
|
|
else: # continued statement
|
|
|
|
|
if not line:
|
|
|
|
|
raise TokenError, ("EOF in multi-line statement", (lnum, 0))
|
|
|
|
|
raise TokenError("EOF in multi-line statement", (lnum, 0))
|
|
|
|
|
continued = 0
|
|
|
|
|
|
|
|
|
|
while pos < max:
|
|
|
|
|