1
0
forked from VimPlug/jedi

Fix a nasty issue in the tokenizer. Fixes #836.

At the same time there was a related issue of not cleaning up newlines properly.
This commit is contained in:
Dave Halter
2017-01-24 00:50:37 +01:00
parent 741993a738
commit 09779c88aa
4 changed files with 29 additions and 13 deletions

View File

@@ -232,14 +232,11 @@ class Parser(object):
# If there's a statement that fails to be parsed, there
# will be no previous leaf. So just ignore it.
break
elif newline.value != '\n':
# TODO REMOVE, error recovery was simplified.
# This may happen if error correction strikes and removes
# a whole statement including '\n'.
break
else:
newline.value = ''
assert newline.value.endswith('\n')
newline.value = newline.value[:-1]
endmarker.start_pos = newline.start_pos
break
class ParserWithRecovery(Parser):

View File

@@ -259,8 +259,7 @@ def generate_tokens(readline, use_exact_op_types=False):
# line is an error token.
txt = line[pos:]
yield TokenInfo(ERRORTOKEN, txt, (lnum, pos), prefix)
pos += 1
continue
break
prefix = additional_prefix + pseudomatch.group(1)
additional_prefix = ''