removed some line checks from tokenizer

This commit is contained in:
Dave Halter
2014-02-21 00:13:23 +01:00
parent a69eee5876
commit 184e90a3a0

View File

@@ -165,17 +165,12 @@ def generate_tokens(readline, line_offset=0):
except StopIteration: except StopIteration:
if contstr: if contstr:
yield TokenInfo(ERRORTOKEN, contstr, strstart, (lnum, pos)) yield TokenInfo(ERRORTOKEN, contstr, strstart, (lnum, pos))
contstr, needcont = '', False break
contline = None
line = b''
lnum += 1 lnum += 1
pos, max = 0, len(line) pos, max = 0, len(line)
if contstr: # continued string if contstr: # continued string
if not line:
# multiline string has not been finished
break
endmatch = endprog.match(line) endmatch = endprog.match(line)
if endmatch: if endmatch:
pos = end = endmatch.end(0) pos = end = endmatch.end(0)
@@ -195,8 +190,6 @@ def generate_tokens(readline, line_offset=0):
continue continue
elif not continued: # new statement elif not continued: # new statement
if not line:
break
column = 0 column = 0
while pos < max: # measure leading whitespace while pos < max: # measure leading whitespace
if line[pos] == ' ': if line[pos] == ' ':
@@ -230,9 +223,6 @@ def generate_tokens(readline, line_offset=0):
indents = indents[:-1] indents = indents[:-1]
else: # continued statement else: # continued statement
if not line:
# basically a statement has not been finished here.
break
continued = False continued = False
while pos < max: while pos < max: