Fix tokenizer: Carriage returns after backslashes were not properly handled

This commit is contained in:
Dave Halter
2019-01-14 01:49:09 +01:00
parent d3d28480ed
commit d7171ae927
2 changed files with 6 additions and 1 deletions

View File

@@ -573,7 +573,7 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
indents.append(indent)
break
yield PythonToken(NAME, token, spos, prefix)
elif initial == '\\' and line[start:] in ('\\\n', '\\\r\n', '\\r'): # continued stmt
elif initial == '\\' and line[start:] in ('\\\n', '\\\r\n', '\\\r'): # continued stmt
additional_prefix += prefix + line[start:]
break
else:

View File

@@ -307,3 +307,8 @@ def test_form_feed():
assert error_token.prefix == '\f'
assert error_token.string == '"""'
assert endmarker.prefix == ''
def test_carriage_return():
lst = _get_token_list(' =\\\rclass')
assert [t.type for t in lst] == [INDENT, OP, DEDENT, NAME, ENDMARKER]