error tokens only exist in the end in a tokenizer

This commit is contained in:
Dave Halter
2014-02-21 00:09:49 +01:00
parent 3232ae5b0c
commit a69eee5876
2 changed files with 22 additions and 8 deletions

View File

@@ -155,7 +155,7 @@ def generate_tokens(readline, line_offset=0):
lnum = line_offset
continued = False
numchars = '0123456789'
contstr, needcont = '', 0
contstr, needcont = '', False
contline = None
indents = [0]
@@ -163,6 +163,10 @@ def generate_tokens(readline, line_offset=0):
try:
line = readline()
except StopIteration:
if contstr:
yield TokenInfo(ERRORTOKEN, contstr, strstart, (lnum, pos))
contstr, needcont = '', False
contline = None
line = b''
lnum += 1
@@ -176,14 +180,15 @@ def generate_tokens(readline, line_offset=0):
if endmatch:
pos = end = endmatch.end(0)
yield TokenInfo(STRING, contstr + line[:end], strstart, (lnum, end))
contstr, needcont = '', 0
contstr, needcont = '', False
contline = None
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
yield TokenInfo(ERRORTOKEN, contstr + line,
strstart, (lnum, len(line)))
contstr = ''
contline = None
continue
#yield TokenInfo(ERRORTOKEN, contstr + line,
# strstart, (lnum, len(line)))
#contstr = ''
#contline = None
#continue
pass
else:
contstr = contstr + line
contline = contline + line
@@ -264,7 +269,7 @@ def generate_tokens(readline, line_offset=0):
strstart = (lnum, start)
endprog = (endprogs[initial] or endprogs[token[1]] or
endprogs[token[2]])
contstr, needcont = line[start:], 1
contstr, needcont = line[start:], True
contline = line
break
else: # ordinary string

View File

@@ -265,3 +265,12 @@ except ImportError, i_b:
i_b
#? ImportError()
i_b
# -----------------
# continuations
# -----------------
foo = \
1
#? int()
foo