diff --git a/parso/python/tokenize.py b/parso/python/tokenize.py index 20b80be..11b9a85 100644 --- a/parso/python/tokenize.py +++ b/parso/python/tokenize.py @@ -419,8 +419,6 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)): tos = fstring_stack[-1] if not tos.is_in_expr(): string, pos = _find_fstring_string(endpats, fstring_stack, line, lnum, pos) - if pos == max: - break if string: yield PythonToken( FSTRING_STRING, string, @@ -431,6 +429,8 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)): ) tos.previous_lines = '' continue + if pos == max: + break rest = line[pos:] fstring_end_token, additional_prefix, quote_length = _close_fstring_if_necessary( diff --git a/test/test_fstring.py b/test/test_fstring.py index 4c85de8..a0d346d 100644 --- a/test/test_fstring.py +++ b/test/test_fstring.py @@ -79,11 +79,17 @@ def test_tokenize_start_pos(code, positions): assert positions == [p.start_pos for p in tokens] -def test_roundtrip(grammar): - code = dedent("""\ - f'''s{ - str.uppe - ''' - """) +@pytest.mark.parametrize( + 'code', [ + dedent("""\ + f'''s{ + str.uppe + ''' + """), + 'f"foo', + 'f"""foo', + ] +) +def test_roundtrip(grammar, code): tree = grammar.parse(code) assert tree.get_code() == code