Fix tokenizer: Carriage returns after backslashes were not properly handled

This commit is contained in:
Dave Halter
2019-01-14 01:49:09 +01:00
parent d3d28480ed
commit d7171ae927
2 changed files with 6 additions and 1 deletions

View File

@@ -307,3 +307,8 @@ def test_form_feed():
assert error_token.prefix == '\f'
assert error_token.string == '"""'
assert endmarker.prefix == ''
def test_carriage_return():
lst = _get_token_list(' =\\\rclass')
assert [t.type for t in lst] == [INDENT, OP, DEDENT, NAME, ENDMARKER]