From d3fa7e1cad2a5422cae1ed1f9371a8236c098b83 Mon Sep 17 00:00:00 2001 From: Dave Halter Date: Sat, 7 Apr 2018 11:14:41 +0200 Subject: [PATCH] Fix a Python 2 related issue. --- parso/python/tokenize.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/parso/python/tokenize.py b/parso/python/tokenize.py index 5b8bea7..8d9e7cc 100644 --- a/parso/python/tokenize.py +++ b/parso/python/tokenize.py @@ -477,7 +477,9 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)): yield PythonToken(NUMBER, token, spos, prefix) elif initial in '\r\n': if any(not f.allow_multiline() for f in fstring_stack): - fstring_stack.clear() + # Would use fstring_stack.clear, but that's not available + # in Python 2. + fstring_stack[:] = [] if not new_line and paren_level == 0 and not fstring_stack: yield PythonToken(NEWLINE, token, spos, prefix) @@ -516,7 +518,7 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)): yield PythonToken(FSTRING_START, token, spos, prefix) elif is_identifier(initial): # ordinary name if token in always_break_tokens: - fstring_stack.clear() + fstring_stack[:] = [] paren_level = 0 while True: indent = indents.pop()