mirror of
https://github.com/davidhalter/parso.git
synced 2025-12-08 05:34:51 +08:00
Fix an f-string tokenizer issue
This commit is contained in:
@@ -419,8 +419,6 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
|
||||
tos = fstring_stack[-1]
|
||||
if not tos.is_in_expr():
|
||||
string, pos = _find_fstring_string(endpats, fstring_stack, line, lnum, pos)
|
||||
if pos == max:
|
||||
break
|
||||
if string:
|
||||
yield PythonToken(
|
||||
FSTRING_STRING, string,
|
||||
@@ -431,6 +429,8 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
|
||||
)
|
||||
tos.previous_lines = ''
|
||||
continue
|
||||
if pos == max:
|
||||
break
|
||||
|
||||
rest = line[pos:]
|
||||
fstring_end_token, additional_prefix, quote_length = _close_fstring_if_necessary(
|
||||
|
||||
@@ -79,11 +79,17 @@ def test_tokenize_start_pos(code, positions):
|
||||
assert positions == [p.start_pos for p in tokens]
|
||||
|
||||
|
||||
def test_roundtrip(grammar):
|
||||
code = dedent("""\
|
||||
f'''s{
|
||||
str.uppe
|
||||
'''
|
||||
""")
|
||||
@pytest.mark.parametrize(
|
||||
'code', [
|
||||
dedent("""\
|
||||
f'''s{
|
||||
str.uppe
|
||||
'''
|
||||
"""),
|
||||
'f"foo',
|
||||
'f"""foo',
|
||||
]
|
||||
)
|
||||
def test_roundtrip(grammar, code):
|
||||
tree = grammar.parse(code)
|
||||
assert tree.get_code() == code
|
||||
|
||||
Reference in New Issue
Block a user