Tokenizer: Add error dedents only if parens are not open

This commit is contained in:
Dave Halter
2020-03-28 14:41:10 +01:00
parent 8e49d8ab5f
commit cf880f43d4
2 changed files with 3 additions and 3 deletions

View File

@@ -498,7 +498,7 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
pseudomatch = pseudo_token.match(line, pos)
if not pseudomatch: # scan for tokens
match = whitespace.match(line, pos)
if pos == 0:
if pos == 0 and paren_level == 0:
for t in dedent_if_necessary(match.end()):
yield t
pos = match.end()