Fix tokenizer: Form feeds and multiline docstrings didn't work together

This commit is contained in:
Dave Halter
2019-01-13 23:16:09 +01:00
parent 01dba7f8ce
commit 7ae1efe5c7
3 changed files with 25 additions and 16 deletions

View File

@@ -1016,20 +1016,20 @@ def test_special_no_newline_ending(differ):
def test_random_character_insertion(differ):
code1 = dedent('''\
def create(self):
1
if self.path is not None:
return
# 3
# 4
def create(self):
1
if self.path is not None:
return
# 3
# 4
''')
code2 = dedent('''\
def create(self):
1
if 2:
x return
# 3
# 4
def create(self):
1
if 2:
x return
# 3
# 4
''')
differ.initialize(code1)
differ.parse(code2, copies=1, parsers=3, expect_error_leaves=True)

View File

@@ -299,3 +299,11 @@ def test_brackets_no_indentation():
""")
lst = _get_token_list(code)
assert [t.type for t in lst] == [OP, NEWLINE, OP, OP, NEWLINE, ENDMARKER]
def test_form_feed():
error_token, endmarker = _get_token_list(dedent('''\
\f"""'''))
assert error_token.prefix == '\f'
assert error_token.string == '"""'
assert endmarker.prefix == ''