Fix tokenizer: Dedents should only happen after newlines

This commit is contained in:
Dave Halter
2019-01-03 11:44:17 +01:00
parent fde64d0eae
commit 5da51720cd
3 changed files with 54 additions and 7 deletions

View File

@@ -246,3 +246,25 @@ def test_error_string():
assert t1.string == '"'
assert endmarker.prefix == '\n'
assert endmarker.string == ''
def test_indent_error_recovery():
code = dedent("""\
str(
from x import a
def
""")
lst = _get_token_list(code)
expected = [
# `str(`
INDENT, NAME, OP,
# `from parso`
NAME, NAME,
# `import a` on same line as the previous from parso
NAME, NAME, NEWLINE,
# Dedent happens, because there's an import now and the import
# statement "breaks" out of the opening paren on the first line.
DEDENT,
# `b`
NAME, NEWLINE, ENDMARKER]
assert [t.type for t in lst] == expected