Fix an f-string tokenizer issue

This commit is contained in:
Dave Halter
2019-02-13 00:17:37 +01:00
parent f1ee7614c9
commit 3f6fc8a5ad
2 changed files with 14 additions and 8 deletions

View File

@@ -79,11 +79,17 @@ def test_tokenize_start_pos(code, positions):
assert positions == [p.start_pos for p in tokens]
def test_roundtrip(grammar):
code = dedent("""\
f'''s{
str.uppe
'''
""")
@pytest.mark.parametrize(
'code', [
dedent("""\
f'''s{
str.uppe
'''
"""),
'f"foo',
'f"""foo',
]
)
def test_roundtrip(grammar, code):
tree = grammar.parse(code)
assert tree.get_code() == code