diff --git a/parso/python/tokenize.py b/parso/python/tokenize.py index dec4363..36a6ad4 100644 --- a/parso/python/tokenize.py +++ b/parso/python/tokenize.py @@ -532,7 +532,7 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0), indents=None, is_first if not pseudomatch: # scan for tokens match = whitespace.match(line, pos) - if pos == 0 and paren_level == 0 and not fstring_stack: + if new_line and paren_level == 0 and not fstring_stack: for t in dedent_if_necessary(match.end()): yield t pos = match.end() diff --git a/test/test_diff_parser.py b/test/test_diff_parser.py index 7552df1..96d12db 100644 --- a/test/test_diff_parser.py +++ b/test/test_diff_parser.py @@ -1636,3 +1636,28 @@ def test_fstring_with_error_leaf(differ): differ.initialize(code1) differ.parse(code2, parsers=1, copies=1, expect_error_leaves=True) + + +def test_yet_another_backslash(differ): + code1 = dedent('''\ + def f(): + x + def g(): + y + base = "" \\ + "" % to + return + ''') + code2 = dedent('''\ + def f(): + x + def g(): + y + base = "" \\ + \x0f + return + ''') + + differ.initialize(code1) + differ.parse(code2, parsers=ANY, copies=ANY, expect_error_leaves=True) + differ.parse(code1, parsers=ANY, copies=ANY)