1
0
forked from VimPlug/jedi

Starting to create a way of how context sensitive completions can be made.

This involves playing heavily with the parser pgen2. We use its stack to check for all possible tokens/keywords.
This commit is contained in:
Dave Halter
2016-05-23 18:11:44 +02:00
parent 36a135c347
commit d4a10929e2
8 changed files with 264 additions and 59 deletions

View File

@@ -4,8 +4,11 @@ Helpers for the API
import re
from collections import namedtuple
from jedi import common
from jedi.parser import tree as pt
from jedi.evaluate import imports
from jedi import parser
from jedi.parser import tokenize, token
CompletionParts = namedtuple('CompletionParts', ['path', 'has_dot', 'name'])
@@ -46,6 +49,83 @@ def check_error_statements(module, pos):
return None, 0, False, False
def get_code_until(code, start_pos, end_pos):
lines = common.splitlines(code)
line_difference = end_pos[0] - start_pos[0]
if line_difference == 0:
end_line_length = end_pos[1] - start_pos[1]
else:
end_line_length = end_pos[1]
if line_difference > len(lines) or end_line_length > len(lines[-1]):
raise ValueError("The end_pos seems to be after the code part.")
new_lines = lines[:line_difference] + [lines[-1][:end_line_length]]
return '\n'.join(new_lines)
def get_stack_at_position(grammar, module, pos):
"""
Returns the possible node names (e.g. import_from, xor_test or yield_stmt).
"""
for error_statement in module.error_statement_stacks:
if error_statement.first_pos < pos <= error_statement.next_start_pos:
code = error_statement.get_code()
code = get_code_until(code, error_statement.first_pos, pos)
break
else:
raise NotImplementedError
class EndMarkerReached(Exception):
pass
def tokenize_without_endmarker(code):
for token_ in tokenize.source_tokens(code):
if token_[0] == token.ENDMARKER:
raise EndMarkerReached()
else:
yield token_
p = parser.Parser(grammar, code, tokenizer=tokenize_without_endmarker(code),
start_parsing=False)
try:
p.parse()
except EndMarkerReached:
return p.pgen_parser.stack
def get_possible_completion_types(grammar, stack):
def add_results(label_index):
try:
grammar_labels.append(inversed_tokens[label_index])
except KeyError:
try:
keywords.append(inversed_keywords[label_index])
except KeyError:
t, v = grammar.labels[label_index]
assert t >= 256
# See if it's a symbol and if we're in its first set
inversed_keywords
itsdfa = grammar.dfas[t]
itsstates, itsfirst = itsdfa
for first_label_index in itsfirst.keys():
add_results(first_label_index)
dfa, state, node = stack[-1]
states, first = dfa
arcs = states[state]
inversed_keywords = dict((v, k) for k, v in grammar.keywords.items())
inversed_tokens = dict((v, k) for k, v in grammar.tokens.items())
keywords = []
grammar_labels = []
for label_index, new_state in arcs:
add_results(label_index)
return keywords, grammar_labels
def importer_from_error_statement(error_statement, pos):
def check_dotted(children):
for name in children[::2]: