diff --git a/parso/python/tokenize.py b/parso/python/tokenize.py index eee3cf6..eda4c99 100644 --- a/parso/python/tokenize.py +++ b/parso/python/tokenize.py @@ -310,7 +310,7 @@ class FStringNode(object): return not self.is_in_expr() and self.format_spec_count -def _close_fstring_if_necessary(fstring_stack, string, start_pos, additional_prefix): +def _close_fstring_if_necessary(fstring_stack, string, line_nr, column, additional_prefix): for fstring_stack_index, node in enumerate(fstring_stack): lstripped_string = string.lstrip() len_lstrip = len(string) - len(lstripped_string) @@ -318,7 +318,7 @@ def _close_fstring_if_necessary(fstring_stack, string, start_pos, additional_pre token = PythonToken( FSTRING_END, node.quote, - start_pos, + (line_nr, column + len_lstrip), prefix=additional_prefix+string[:len_lstrip], ) additional_prefix = '' @@ -474,7 +474,8 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)): fstring_end_token, additional_prefix, quote_length = _close_fstring_if_necessary( fstring_stack, rest, - (lnum, pos), + lnum, + pos, additional_prefix, ) pos += quote_length diff --git a/test/test_tokenize.py b/test/test_tokenize.py index f0fc4c2..b73ce16 100644 --- a/test/test_tokenize.py +++ b/test/test_tokenize.py @@ -414,3 +414,13 @@ def test_fstring(code, types, version_ge_py36): def test_fstring_assignment_expression(code, types, version_ge_py38): actual_types = [t.type for t in _get_token_list(code, version_ge_py38)] assert types + [ENDMARKER] == actual_types + + +def test_fstring_end_error_pos(version_ge_py38): + f_start, f_string, bracket, f_end, endmarker = \ + _get_token_list('f" { "', version_ge_py38) + assert f_start.start_pos == (1, 0) + assert f_string.start_pos == (1, 2) + assert bracket.start_pos == (1, 3) + assert f_end.start_pos == (1, 5) + assert endmarker.start_pos == (1, 6)