diff --git a/parso/python/tokenize.py b/parso/python/tokenize.py index 40ad694..628e804 100644 --- a/parso/python/tokenize.py +++ b/parso/python/tokenize.py @@ -573,7 +573,7 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)): indents.append(indent) break yield PythonToken(NAME, token, spos, prefix) - elif initial == '\\' and line[start:] in ('\\\n', '\\\r\n', '\\r'): # continued stmt + elif initial == '\\' and line[start:] in ('\\\n', '\\\r\n', '\\\r'): # continued stmt additional_prefix += prefix + line[start:] break else: diff --git a/test/test_tokenize.py b/test/test_tokenize.py index a3e1a88..7bf3da2 100644 --- a/test/test_tokenize.py +++ b/test/test_tokenize.py @@ -307,3 +307,8 @@ def test_form_feed(): assert error_token.prefix == '\f' assert error_token.string == '"""' assert endmarker.prefix == '' + + +def test_carriage_return(): + lst = _get_token_list(' =\\\rclass') + assert [t.type for t in lst] == [INDENT, OP, DEDENT, NAME, ENDMARKER]