mirror of
https://github.com/davidhalter/parso.git
synced 2025-12-16 01:17:13 +08:00
Fix tokenizer error tokens
This commit is contained in:
@@ -532,7 +532,7 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0), indents=None, is_first
|
||||
|
||||
if not pseudomatch: # scan for tokens
|
||||
match = whitespace.match(line, pos)
|
||||
if pos == 0 and paren_level == 0 and not fstring_stack:
|
||||
if new_line and paren_level == 0 and not fstring_stack:
|
||||
for t in dedent_if_necessary(match.end()):
|
||||
yield t
|
||||
pos = match.end()
|
||||
|
||||
@@ -1636,3 +1636,28 @@ def test_fstring_with_error_leaf(differ):
|
||||
|
||||
differ.initialize(code1)
|
||||
differ.parse(code2, parsers=1, copies=1, expect_error_leaves=True)
|
||||
|
||||
|
||||
def test_yet_another_backslash(differ):
|
||||
code1 = dedent('''\
|
||||
def f():
|
||||
x
|
||||
def g():
|
||||
y
|
||||
base = "" \\
|
||||
"" % to
|
||||
return
|
||||
''')
|
||||
code2 = dedent('''\
|
||||
def f():
|
||||
x
|
||||
def g():
|
||||
y
|
||||
base = "" \\
|
||||
\x0f
|
||||
return
|
||||
''')
|
||||
|
||||
differ.initialize(code1)
|
||||
differ.parse(code2, parsers=ANY, copies=ANY, expect_error_leaves=True)
|
||||
differ.parse(code1, parsers=ANY, copies=ANY)
|
||||
|
||||
Reference in New Issue
Block a user