mirror of
https://github.com/davidhalter/parso.git
synced 2025-12-08 21:54:54 +08:00
@@ -58,7 +58,9 @@ def pytest_generate_tests(metafunc):
|
|||||||
elif 'each_py3_version' in metafunc.fixturenames:
|
elif 'each_py3_version' in metafunc.fixturenames:
|
||||||
metafunc.parametrize('each_py3_version', VERSIONS_3)
|
metafunc.parametrize('each_py3_version', VERSIONS_3)
|
||||||
elif 'version_ge_py36' in metafunc.fixturenames:
|
elif 'version_ge_py36' in metafunc.fixturenames:
|
||||||
metafunc.parametrize('version_ge_py36', ['3.6', '3.7'])
|
metafunc.parametrize('version_ge_py36', ['3.6', '3.7', '3.8'])
|
||||||
|
elif 'version_ge_py38' in metafunc.fixturenames:
|
||||||
|
metafunc.parametrize('version_ge_py38', ['3.8'])
|
||||||
|
|
||||||
|
|
||||||
class NormalizerIssueCase(object):
|
class NormalizerIssueCase(object):
|
||||||
|
|||||||
@@ -314,17 +314,19 @@ class FStringNode(object):
|
|||||||
|
|
||||||
def _close_fstring_if_necessary(fstring_stack, string, start_pos, additional_prefix):
|
def _close_fstring_if_necessary(fstring_stack, string, start_pos, additional_prefix):
|
||||||
for fstring_stack_index, node in enumerate(fstring_stack):
|
for fstring_stack_index, node in enumerate(fstring_stack):
|
||||||
if string.startswith(node.quote):
|
lstriped_string = string.lstrip()
|
||||||
|
len_lstrip = len(string) - len(lstriped_string)
|
||||||
|
if lstriped_string.startswith(node.quote):
|
||||||
token = PythonToken(
|
token = PythonToken(
|
||||||
FSTRING_END,
|
FSTRING_END,
|
||||||
node.quote,
|
node.quote,
|
||||||
start_pos,
|
start_pos,
|
||||||
prefix=additional_prefix,
|
prefix=additional_prefix+string[:len_lstrip],
|
||||||
)
|
)
|
||||||
additional_prefix = ''
|
additional_prefix = ''
|
||||||
assert not node.previous_lines
|
assert not node.previous_lines
|
||||||
del fstring_stack[fstring_stack_index:]
|
del fstring_stack[fstring_stack_index:]
|
||||||
return token, '', len(node.quote)
|
return token, '', len(node.quote) + len_lstrip
|
||||||
return None, additional_prefix, 0
|
return None, additional_prefix, 0
|
||||||
|
|
||||||
|
|
||||||
@@ -482,6 +484,18 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
|
|||||||
yield fstring_end_token
|
yield fstring_end_token
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# in an f-string, match until the end of the string
|
||||||
|
if fstring_stack:
|
||||||
|
string_line = line
|
||||||
|
for fstring_stack_node in fstring_stack:
|
||||||
|
quote = fstring_stack_node.quote
|
||||||
|
end_match = endpats[quote].match(line, pos)
|
||||||
|
if end_match is not None:
|
||||||
|
end_match_string = end_match.group(0)
|
||||||
|
if len(end_match_string) - len(quote) + pos < len(string_line):
|
||||||
|
string_line = line[:pos] + end_match_string[:-len(quote)]
|
||||||
|
pseudomatch = pseudo_token.match(string_line, pos)
|
||||||
|
else:
|
||||||
pseudomatch = pseudo_token.match(line, pos)
|
pseudomatch = pseudo_token.match(line, pos)
|
||||||
if not pseudomatch: # scan for tokens
|
if not pseudomatch: # scan for tokens
|
||||||
match = whitespace.match(line, pos)
|
match = whitespace.match(line, pos)
|
||||||
@@ -560,6 +574,11 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
|
|||||||
new_line = True
|
new_line = True
|
||||||
elif initial == '#': # Comments
|
elif initial == '#': # Comments
|
||||||
assert not token.endswith("\n")
|
assert not token.endswith("\n")
|
||||||
|
if fstring_stack and fstring_stack[-1].is_in_expr():
|
||||||
|
# `#` is not allowed in f-string expressions
|
||||||
|
yield PythonToken(ERRORTOKEN, initial, spos, prefix)
|
||||||
|
pos = start + 1
|
||||||
|
else:
|
||||||
additional_prefix = prefix + token
|
additional_prefix = prefix + token
|
||||||
elif token in triple_quoted:
|
elif token in triple_quoted:
|
||||||
endprog = endpats[token]
|
endprog = endpats[token]
|
||||||
@@ -616,10 +635,13 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
|
|||||||
else:
|
else:
|
||||||
if paren_level:
|
if paren_level:
|
||||||
paren_level -= 1
|
paren_level -= 1
|
||||||
elif token == ':' and fstring_stack \
|
elif token.startswith(':') and fstring_stack \
|
||||||
and fstring_stack[-1].parentheses_count \
|
and fstring_stack[-1].parentheses_count \
|
||||||
- fstring_stack[-1].format_spec_count == 1:
|
- fstring_stack[-1].format_spec_count == 1:
|
||||||
|
# `:` and `:=` both count
|
||||||
fstring_stack[-1].format_spec_count += 1
|
fstring_stack[-1].format_spec_count += 1
|
||||||
|
token = ':'
|
||||||
|
pos = start + 1
|
||||||
|
|
||||||
yield PythonToken(OP, token, spos, prefix)
|
yield PythonToken(OP, token, spos, prefix)
|
||||||
|
|
||||||
|
|||||||
@@ -385,8 +385,32 @@ def test_backslash():
|
|||||||
NAME, OP, FSTRING_START, FSTRING_STRING, OP, NAME, OP,
|
NAME, OP, FSTRING_START, FSTRING_STRING, OP, NAME, OP,
|
||||||
FSTRING_STRING, OP, FSTRING_STRING, OP, NAME, OP, FSTRING_END, OP
|
FSTRING_STRING, OP, FSTRING_STRING, OP, NAME, OP, FSTRING_END, OP
|
||||||
]),
|
]),
|
||||||
|
# issue #86, a string-like in an f-string expression
|
||||||
|
('f"{ ""}"', [
|
||||||
|
FSTRING_START, OP, FSTRING_END, STRING
|
||||||
|
]),
|
||||||
|
('f"{ f""}"', [
|
||||||
|
FSTRING_START, OP, NAME, FSTRING_END, STRING
|
||||||
|
]),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
def test_fstring(code, types, version_ge_py36):
|
def test_fstring(code, types, version_ge_py36):
|
||||||
actual_types = [t.type for t in _get_token_list(code, version_ge_py36)]
|
actual_types = [t.type for t in _get_token_list(code, version_ge_py36)]
|
||||||
assert types + [ENDMARKER] == actual_types
|
assert types + [ENDMARKER] == actual_types
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
('code', 'types'), [
|
||||||
|
# issue #87, `:=` in the outest paratheses should be tokenized
|
||||||
|
# as a format spec marker and part of the format
|
||||||
|
('f"{x:=10}"', [
|
||||||
|
FSTRING_START, OP, NAME, OP, FSTRING_STRING, OP, FSTRING_END
|
||||||
|
]),
|
||||||
|
('f"{(x:=10)}"', [
|
||||||
|
FSTRING_START, OP, OP, NAME, OP, NUMBER, OP, OP, FSTRING_END
|
||||||
|
]),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_fstring_assignment_expression(code, types, version_ge_py38):
|
||||||
|
actual_types = [t.type for t in _get_token_list(code, version_ge_py38)]
|
||||||
|
assert types + [ENDMARKER] == actual_types
|
||||||
|
|||||||
Reference in New Issue
Block a user