mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-07 06:24:27 +08:00
Move toward ParserWithRecovery for the completion context.
It was simply not possible to do it with the normal parser, because of dedents.
This commit is contained in:
@@ -135,22 +135,18 @@ def get_stack_at_position(grammar, code_lines, module, pos):
|
||||
for token_ in tokens:
|
||||
if token_.string == safeword:
|
||||
raise EndMarkerReached()
|
||||
elif token_.type == token.DEDENT and False:
|
||||
# Ignore those. Error statements should not contain them, if
|
||||
# they do it's for cases where an indentation happens and
|
||||
# before the endmarker we still see them.
|
||||
pass
|
||||
else:
|
||||
yield token_
|
||||
|
||||
code = _get_code_for_stack(code_lines, module, pos)
|
||||
# We use a word to tell Jedi when we have reached the start of the
|
||||
# completion.
|
||||
safeword = 'XXX_USER_WANTS_TO_COMPLETE_HERE_WITH_JEDI'
|
||||
# Use Z as a prefix because it's not part of a number suffix.
|
||||
safeword = 'ZZZ_USER_WANTS_TO_COMPLETE_HERE_WITH_JEDI'
|
||||
# Remove as many indents from **all** code lines as possible.
|
||||
code = dedent(code + safeword)
|
||||
code = code + safeword
|
||||
|
||||
p = parser.Parser(grammar, code, start_parsing=False)
|
||||
p = parser.ParserWithRecovery(grammar, code, start_parsing=False)
|
||||
try:
|
||||
p.parse(tokenizer=tokenize_without_endmarker(code))
|
||||
except EndMarkerReached:
|
||||
|
||||
@@ -289,7 +289,8 @@ class ParserWithRecovery(Parser):
|
||||
:param module_path: The path of the module in the file system, may be None.
|
||||
:type module_path: str
|
||||
"""
|
||||
def __init__(self, grammar, source, module_path=None, tokenizer=None):
|
||||
def __init__(self, grammar, source, module_path=None, tokenizer=None,
|
||||
start_parsing=True):
|
||||
self.syntax_errors = []
|
||||
|
||||
self._omit_dedent_list = []
|
||||
@@ -304,12 +305,16 @@ class ParserWithRecovery(Parser):
|
||||
# if self.options["print_function"]:
|
||||
# python_grammar = pygram.python_grammar_no_print_statement
|
||||
# else:
|
||||
super(ParserWithRecovery, self).__init__(grammar, source, tokenizer=tokenizer)
|
||||
|
||||
self.module = self._parsed
|
||||
self.module.used_names = self._used_names
|
||||
self.module.path = module_path
|
||||
self.module.global_names = self._global_names
|
||||
super(ParserWithRecovery, self).__init__(
|
||||
grammar, source,
|
||||
tokenizer=tokenizer,
|
||||
start_parsing=start_parsing
|
||||
)
|
||||
if start_parsing:
|
||||
self.module = self._parsed
|
||||
self.module.used_names = self._used_names
|
||||
self.module.path = module_path
|
||||
self.module.global_names = self._global_names
|
||||
|
||||
def parse(self, tokenizer):
|
||||
return super(ParserWithRecovery, self).parse(self._tokenize(self._tokenize(tokenizer)))
|
||||
|
||||
Reference in New Issue
Block a user