mirror of
https://github.com/davidhalter/parso.git
synced 2025-12-08 21:54:54 +08:00
Fix tokenizer: Form feeds and multiline docstrings didn't work together
This commit is contained in:
@@ -487,14 +487,15 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
|
|||||||
new_line = False
|
new_line = False
|
||||||
if paren_level == 0 and not fstring_stack:
|
if paren_level == 0 and not fstring_stack:
|
||||||
i = 0
|
i = 0
|
||||||
|
indent_start = start
|
||||||
while line[i] == '\f':
|
while line[i] == '\f':
|
||||||
i += 1
|
i += 1
|
||||||
# TODO don't we need to change spos as well?
|
# TODO don't we need to change spos as well?
|
||||||
start -= 1
|
indent_start -= 1
|
||||||
if start > indents[-1]:
|
if indent_start > indents[-1]:
|
||||||
yield PythonToken(INDENT, '', spos, '')
|
yield PythonToken(INDENT, '', spos, '')
|
||||||
indents.append(start)
|
indents.append(indent_start)
|
||||||
for t in dedent_if_necessary(start):
|
for t in dedent_if_necessary(indent_start):
|
||||||
yield t
|
yield t
|
||||||
|
|
||||||
if fstring_stack:
|
if fstring_stack:
|
||||||
|
|||||||
@@ -1016,20 +1016,20 @@ def test_special_no_newline_ending(differ):
|
|||||||
|
|
||||||
def test_random_character_insertion(differ):
|
def test_random_character_insertion(differ):
|
||||||
code1 = dedent('''\
|
code1 = dedent('''\
|
||||||
def create(self):
|
def create(self):
|
||||||
1
|
1
|
||||||
if self.path is not None:
|
if self.path is not None:
|
||||||
return
|
return
|
||||||
# 3
|
# 3
|
||||||
# 4
|
# 4
|
||||||
''')
|
''')
|
||||||
code2 = dedent('''\
|
code2 = dedent('''\
|
||||||
def create(self):
|
def create(self):
|
||||||
1
|
1
|
||||||
if 2:
|
if 2:
|
||||||
x return
|
x return
|
||||||
# 3
|
# 3
|
||||||
# 4
|
# 4
|
||||||
''')
|
''')
|
||||||
differ.initialize(code1)
|
differ.initialize(code1)
|
||||||
differ.parse(code2, copies=1, parsers=3, expect_error_leaves=True)
|
differ.parse(code2, copies=1, parsers=3, expect_error_leaves=True)
|
||||||
|
|||||||
@@ -299,3 +299,11 @@ def test_brackets_no_indentation():
|
|||||||
""")
|
""")
|
||||||
lst = _get_token_list(code)
|
lst = _get_token_list(code)
|
||||||
assert [t.type for t in lst] == [OP, NEWLINE, OP, OP, NEWLINE, ENDMARKER]
|
assert [t.type for t in lst] == [OP, NEWLINE, OP, OP, NEWLINE, ENDMARKER]
|
||||||
|
|
||||||
|
|
||||||
|
def test_form_feed():
|
||||||
|
error_token, endmarker = _get_token_list(dedent('''\
|
||||||
|
\f"""'''))
|
||||||
|
assert error_token.prefix == '\f'
|
||||||
|
assert error_token.string == '"""'
|
||||||
|
assert endmarker.prefix == ''
|
||||||
|
|||||||
Reference in New Issue
Block a user