Tokenizer: Add error dedents only if parens are not open

This commit is contained in:
Dave Halter
2020-03-28 14:41:10 +01:00
parent 8e49d8ab5f
commit cf880f43d4
2 changed files with 3 additions and 3 deletions

View File

@@ -498,7 +498,7 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
pseudomatch = pseudo_token.match(line, pos) pseudomatch = pseudo_token.match(line, pos)
if not pseudomatch: # scan for tokens if not pseudomatch: # scan for tokens
match = whitespace.match(line, pos) match = whitespace.match(line, pos)
if pos == 0: if pos == 0 and paren_level == 0:
for t in dedent_if_necessary(match.end()): for t in dedent_if_necessary(match.end()):
yield t yield t
pos = match.end() pos = match.end()

View File

@@ -1007,7 +1007,7 @@ def test_random_unicode_characters(differ):
differ.parse(s, parsers=1, expect_error_leaves=True) differ.parse(s, parsers=1, expect_error_leaves=True)
differ.parse('') differ.parse('')
differ.parse(s + '\n', parsers=1, expect_error_leaves=True) differ.parse(s + '\n', parsers=1, expect_error_leaves=True)
differ.parse(u' result = (\r\f\x17\t\x11res)', parsers=2, expect_error_leaves=True) differ.parse(u' result = (\r\f\x17\t\x11res)', parsers=1, expect_error_leaves=True)
differ.parse('') differ.parse('')
differ.parse(' a( # xx\ndef', parsers=2, expect_error_leaves=True) differ.parse(' a( # xx\ndef', parsers=2, expect_error_leaves=True)
@@ -1124,7 +1124,7 @@ def test_all_sorts_of_indentation(differ):
end end
''') ''')
differ.initialize(code1) differ.initialize(code1)
differ.parse(code2, copies=1, parsers=4, expect_error_leaves=True) differ.parse(code2, copies=1, parsers=1, expect_error_leaves=True)
differ.parse(code1, copies=1, parsers=3, expect_error_leaves=True) differ.parse(code1, copies=1, parsers=3, expect_error_leaves=True)
code3 = dedent('''\ code3 = dedent('''\