Fix tokenizer fstring end positions

This commit is contained in:
Dave Halter
2020-03-28 11:22:32 +01:00
parent 77b3ad5843
commit 8e49d8ab5f
2 changed files with 14 additions and 3 deletions

View File

@@ -310,7 +310,7 @@ class FStringNode(object):
return not self.is_in_expr() and self.format_spec_count return not self.is_in_expr() and self.format_spec_count
def _close_fstring_if_necessary(fstring_stack, string, start_pos, additional_prefix): def _close_fstring_if_necessary(fstring_stack, string, line_nr, column, additional_prefix):
for fstring_stack_index, node in enumerate(fstring_stack): for fstring_stack_index, node in enumerate(fstring_stack):
lstripped_string = string.lstrip() lstripped_string = string.lstrip()
len_lstrip = len(string) - len(lstripped_string) len_lstrip = len(string) - len(lstripped_string)
@@ -318,7 +318,7 @@ def _close_fstring_if_necessary(fstring_stack, string, start_pos, additional_pre
token = PythonToken( token = PythonToken(
FSTRING_END, FSTRING_END,
node.quote, node.quote,
start_pos, (line_nr, column + len_lstrip),
prefix=additional_prefix+string[:len_lstrip], prefix=additional_prefix+string[:len_lstrip],
) )
additional_prefix = '' additional_prefix = ''
@@ -474,7 +474,8 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
fstring_end_token, additional_prefix, quote_length = _close_fstring_if_necessary( fstring_end_token, additional_prefix, quote_length = _close_fstring_if_necessary(
fstring_stack, fstring_stack,
rest, rest,
(lnum, pos), lnum,
pos,
additional_prefix, additional_prefix,
) )
pos += quote_length pos += quote_length

View File

@@ -414,3 +414,13 @@ def test_fstring(code, types, version_ge_py36):
def test_fstring_assignment_expression(code, types, version_ge_py38): def test_fstring_assignment_expression(code, types, version_ge_py38):
actual_types = [t.type for t in _get_token_list(code, version_ge_py38)] actual_types = [t.type for t in _get_token_list(code, version_ge_py38)]
assert types + [ENDMARKER] == actual_types assert types + [ENDMARKER] == actual_types
def test_fstring_end_error_pos(version_ge_py38):
f_start, f_string, bracket, f_end, endmarker = \
_get_token_list('f" { "', version_ge_py38)
assert f_start.start_pos == (1, 0)
assert f_string.start_pos == (1, 2)
assert bracket.start_pos == (1, 3)
assert f_end.start_pos == (1, 5)
assert endmarker.start_pos == (1, 6)