This commit is contained in:
yangyang
2019-12-05 19:01:20 +08:00
parent ee2995c110
commit b90e5cd758
3 changed files with 56 additions and 8 deletions

View File

@@ -58,7 +58,9 @@ def pytest_generate_tests(metafunc):
elif 'each_py3_version' in metafunc.fixturenames:
metafunc.parametrize('each_py3_version', VERSIONS_3)
elif 'version_ge_py36' in metafunc.fixturenames:
metafunc.parametrize('version_ge_py36', ['3.6', '3.7'])
metafunc.parametrize('version_ge_py36', ['3.6', '3.7', '3.8'])
elif 'version_ge_py38' in metafunc.fixturenames:
metafunc.parametrize('version_ge_py38', ['3.8'])
class NormalizerIssueCase(object):

View File

@@ -314,17 +314,19 @@ class FStringNode(object):
def _close_fstring_if_necessary(fstring_stack, string, start_pos, additional_prefix):
for fstring_stack_index, node in enumerate(fstring_stack):
if string.startswith(node.quote):
lstriped_string = string.lstrip()
len_lstrip = len(string) - len(lstriped_string)
if lstriped_string.startswith(node.quote):
token = PythonToken(
FSTRING_END,
node.quote,
start_pos,
prefix=additional_prefix,
prefix=additional_prefix+string[:len_lstrip],
)
additional_prefix = ''
assert not node.previous_lines
del fstring_stack[fstring_stack_index:]
return token, '', len(node.quote)
return token, '', len(node.quote) + len_lstrip
return None, additional_prefix, 0
@@ -482,6 +484,18 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
yield fstring_end_token
continue
# in an f-string, match until the end of the string
if fstring_stack:
string_line = line
for fstring_stack_node in fstring_stack:
quote = fstring_stack_node.quote
end_match = endpats[quote].match(line, pos)
if end_match is not None:
end_match_string = end_match.group(0)
if len(end_match_string) - len(quote) + pos < len(string_line):
string_line = line[:pos] + end_match_string[:-len(quote)]
pseudomatch = pseudo_token.match(string_line, pos)
else:
pseudomatch = pseudo_token.match(line, pos)
if not pseudomatch: # scan for tokens
match = whitespace.match(line, pos)
@@ -560,6 +574,11 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
new_line = True
elif initial == '#': # Comments
assert not token.endswith("\n")
if fstring_stack and fstring_stack[-1].is_in_expr():
# `#` is not allowed in f-string expressions
yield PythonToken(ERRORTOKEN, initial, spos, prefix)
pos = start + 1
else:
additional_prefix = prefix + token
elif token in triple_quoted:
endprog = endpats[token]
@@ -616,10 +635,13 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
else:
if paren_level:
paren_level -= 1
elif token == ':' and fstring_stack \
elif token.startswith(':') and fstring_stack \
and fstring_stack[-1].parentheses_count \
- fstring_stack[-1].format_spec_count == 1:
# `:` and `:=` both count
fstring_stack[-1].format_spec_count += 1
token = ':'
pos = start + 1
yield PythonToken(OP, token, spos, prefix)

View File

@@ -385,8 +385,32 @@ def test_backslash():
NAME, OP, FSTRING_START, FSTRING_STRING, OP, NAME, OP,
FSTRING_STRING, OP, FSTRING_STRING, OP, NAME, OP, FSTRING_END, OP
]),
# issue #86, a string-like in an f-string expression
('f"{ ""}"', [
FSTRING_START, OP, FSTRING_END, STRING
]),
('f"{ f""}"', [
FSTRING_START, OP, NAME, FSTRING_END, STRING
]),
]
)
def test_fstring(code, types, version_ge_py36):
actual_types = [t.type for t in _get_token_list(code, version_ge_py36)]
assert types + [ENDMARKER] == actual_types
@pytest.mark.parametrize(
('code', 'types'), [
# issue #87, `:=` in the outest paratheses should be tokenized
# as a format spec marker and part of the format
('f"{x:=10}"', [
FSTRING_START, OP, NAME, OP, FSTRING_STRING, OP, FSTRING_END
]),
('f"{(x:=10)}"', [
FSTRING_START, OP, OP, NAME, OP, NUMBER, OP, OP, FSTRING_END
]),
]
)
def test_fstring_assignment_expression(code, types, version_ge_py38):
actual_types = [t.type for t in _get_token_list(code, version_ge_py38)]
assert types + [ENDMARKER] == actual_types