Fix tokenizer: Form feeds and multiline docstrings didn't work together

This commit is contained in:
Dave Halter
2019-01-13 23:16:09 +01:00
parent 01dba7f8ce
commit 7ae1efe5c7
3 changed files with 25 additions and 16 deletions

View File

@@ -299,3 +299,11 @@ def test_brackets_no_indentation():
""")
lst = _get_token_list(code)
assert [t.type for t in lst] == [OP, NEWLINE, OP, OP, NEWLINE, ENDMARKER]
def test_form_feed():
error_token, endmarker = _get_token_list(dedent('''\
\f"""'''))
assert error_token.prefix == '\f'
assert error_token.string == '"""'
assert endmarker.prefix == ''