mirror of
https://github.com/davidhalter/parso.git
synced 2025-12-06 12:54:29 +08:00
Fix tokenizer fstring end positions
This commit is contained in:
@@ -310,7 +310,7 @@ class FStringNode(object):
|
|||||||
return not self.is_in_expr() and self.format_spec_count
|
return not self.is_in_expr() and self.format_spec_count
|
||||||
|
|
||||||
|
|
||||||
def _close_fstring_if_necessary(fstring_stack, string, start_pos, additional_prefix):
|
def _close_fstring_if_necessary(fstring_stack, string, line_nr, column, additional_prefix):
|
||||||
for fstring_stack_index, node in enumerate(fstring_stack):
|
for fstring_stack_index, node in enumerate(fstring_stack):
|
||||||
lstripped_string = string.lstrip()
|
lstripped_string = string.lstrip()
|
||||||
len_lstrip = len(string) - len(lstripped_string)
|
len_lstrip = len(string) - len(lstripped_string)
|
||||||
@@ -318,7 +318,7 @@ def _close_fstring_if_necessary(fstring_stack, string, start_pos, additional_pre
|
|||||||
token = PythonToken(
|
token = PythonToken(
|
||||||
FSTRING_END,
|
FSTRING_END,
|
||||||
node.quote,
|
node.quote,
|
||||||
start_pos,
|
(line_nr, column + len_lstrip),
|
||||||
prefix=additional_prefix+string[:len_lstrip],
|
prefix=additional_prefix+string[:len_lstrip],
|
||||||
)
|
)
|
||||||
additional_prefix = ''
|
additional_prefix = ''
|
||||||
@@ -474,7 +474,8 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
|
|||||||
fstring_end_token, additional_prefix, quote_length = _close_fstring_if_necessary(
|
fstring_end_token, additional_prefix, quote_length = _close_fstring_if_necessary(
|
||||||
fstring_stack,
|
fstring_stack,
|
||||||
rest,
|
rest,
|
||||||
(lnum, pos),
|
lnum,
|
||||||
|
pos,
|
||||||
additional_prefix,
|
additional_prefix,
|
||||||
)
|
)
|
||||||
pos += quote_length
|
pos += quote_length
|
||||||
|
|||||||
@@ -414,3 +414,13 @@ def test_fstring(code, types, version_ge_py36):
|
|||||||
def test_fstring_assignment_expression(code, types, version_ge_py38):
|
def test_fstring_assignment_expression(code, types, version_ge_py38):
|
||||||
actual_types = [t.type for t in _get_token_list(code, version_ge_py38)]
|
actual_types = [t.type for t in _get_token_list(code, version_ge_py38)]
|
||||||
assert types + [ENDMARKER] == actual_types
|
assert types + [ENDMARKER] == actual_types
|
||||||
|
|
||||||
|
|
||||||
|
def test_fstring_end_error_pos(version_ge_py38):
|
||||||
|
f_start, f_string, bracket, f_end, endmarker = \
|
||||||
|
_get_token_list('f" { "', version_ge_py38)
|
||||||
|
assert f_start.start_pos == (1, 0)
|
||||||
|
assert f_string.start_pos == (1, 2)
|
||||||
|
assert bracket.start_pos == (1, 3)
|
||||||
|
assert f_end.start_pos == (1, 5)
|
||||||
|
assert endmarker.start_pos == (1, 6)
|
||||||
|
|||||||
Reference in New Issue
Block a user