Small tokenizer refactoring

This commit is contained in:
Dave Halter
2020-04-04 13:13:00 +02:00
parent ae6af7849e
commit 1047204654

View File

@@ -593,7 +593,7 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0), indents=None):
token = line[start:pos]
yield PythonToken(STRING, token, spos, prefix)
else:
contstr_start = (lnum, start) # multiple lines
contstr_start = spos # multiple lines
contstr = line[start:]
contline = line
break