1
0
forked from VimPlug/jedi

Get the first dict completions passing

This commit is contained in:
Dave Halter
2019-09-22 11:13:56 +02:00
parent e86a2ec566
commit e8afb46cde
5 changed files with 69 additions and 31 deletions

View File

@@ -10,6 +10,7 @@ from jedi import settings
from jedi.api import classes
from jedi.api import helpers
from jedi.api import keywords
from jedi.api.dicts import completions_for_dicts
from jedi.api.file_name import file_name_completions
from jedi.inference import imports
from jedi.inference.helpers import infer_call_of_leaf, parse_dotted_names
@@ -177,19 +178,20 @@ class Completion:
if not current_line or current_line[-1] in ' \t.;':
completion_names += self._get_keyword_completion_names(allowed_transitions)
nodes = _gather_nodes(stack)
if nodes[-1] == '[' and stack[-1].nonterminal == 'trailer':
bracket = self._module_node.get_leaf_for_position(self._position, include_prefixes=True)
context = self._module_context.create_context(bracket)
values = infer_call_of_leaf(context, bracket.get_previous_leaf())
completion_names += completions_for_dicts(values)
if any(t in allowed_transitions for t in (PythonTokenTypes.NAME,
PythonTokenTypes.INDENT)):
# This means that we actually have to do type inference.
nonterminals = [stack_node.nonterminal for stack_node in stack]
nodes = []
for stack_node in stack:
if stack_node.dfa.from_rule == 'small_stmt':
nodes = []
else:
nodes += stack_node.nodes
if nodes and nodes[-1] in ('as', 'def', 'class'):
# No completions for ``with x as foo`` and ``import x as foo``.
# Also true for defining names as a class or function.
@@ -282,6 +284,16 @@ class Completion:
yield name
def _gather_nodes(stack):
nodes = []
for stack_node in stack:
if stack_node.dfa.from_rule == 'small_stmt':
nodes = []
else:
nodes += stack_node.nodes
return nodes
def _extract_string_while_in_string(leaf, position):
if leaf.type == 'string':
match = re.match(r'^\w*(\'{3}|"{3}|\'|")', leaf.value)

19
jedi/api/dicts.py Normal file
View File

@@ -0,0 +1,19 @@
from jedi.inference.names import AbstractArbitraryName
_sentinel = object()
class F(AbstractArbitraryName):
api_type = u'path'
is_value_name = False
def completions_for_dicts(dicts, literal_string):
for dct in dicts:
if dct.array_type == 'dict':
for key in dct.get_key_values():
dict_key = key.get_safe_value(default=_sentinel)
if dict_key is not _sentinel:
dict_key_str = str(dict_key)
if dict_key_str.startswith(literal_string):
yield F(dct.inference_state, dict_key_str[len(literal_string):])

View File

@@ -193,6 +193,9 @@ class DictModification(_Modification):
yield lazy_context
yield self._contextualized_key
def get_key_values(self):
return self._wrapped_value.get_key_values() | self._contextualized_key.infer()
class ListModification(_Modification):
def py__iter__(self):

View File

@@ -245,7 +245,17 @@ class GeneratorComprehension(_BaseComprehension, GeneratorBase):
pass
class DictComprehension(ComprehensionMixin, Sequence):
class _DictKeyMixin(object):
# TODO merge with _DictMixin?
def get_mapping_item_values(self):
return self._dict_keys(), self._dict_values()
def get_key_values(self):
# TODO merge with _dict_keys?
return self._dict_keys()
class DictComprehension(ComprehensionMixin, Sequence, _DictKeyMixin):
array_type = u'dict'
def __init__(self, inference_state, defining_context, sync_comp_for_node, key_node, value_node):
@@ -295,9 +305,6 @@ class DictComprehension(ComprehensionMixin, Sequence):
return ValueSet([FakeList(self.inference_state, lazy_values)])
def get_mapping_item_values(self):
return self._dict_keys(), self._dict_values()
def exact_key_items(self):
# NOTE: A smarter thing can probably done here to achieve better
# completions, but at least like this jedi doesn't crash
@@ -408,7 +415,7 @@ class SequenceLiteralValue(Sequence):
return "<%s of %s>" % (self.__class__.__name__, self.atom)
class DictLiteralValue(_DictMixin, SequenceLiteralValue):
class DictLiteralValue(_DictMixin, SequenceLiteralValue, _DictKeyMixin):
array_type = u'dict'
def __init__(self, inference_state, defining_context, atom):
@@ -473,9 +480,6 @@ class DictLiteralValue(_DictMixin, SequenceLiteralValue):
for k, v in self.get_tree_entries()
)
def get_mapping_item_values(self):
return self._dict_keys(), self._dict_values()
class _FakeSequence(Sequence):
def __init__(self, inference_state, lazy_value_list):
@@ -511,7 +515,7 @@ class FakeList(_FakeSequence):
array_type = u'tuple'
class FakeDict(_DictMixin, Sequence):
class FakeDict(_DictMixin, Sequence, _DictKeyMixin):
array_type = u'dict'
def __init__(self, inference_state, dct):
@@ -555,9 +559,6 @@ class FakeDict(_DictMixin, Sequence):
def _dict_keys(self):
return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
def get_mapping_item_values(self):
return self._dict_keys(), self._dict_values()
def exact_key_items(self):
return self._dct.items()

View File

@@ -269,22 +269,22 @@ def test_file_path_completions(Script, file, code, column, expected):
@pytest.mark.parametrize(
'added_code, column, expected', [
('ints[', 5, ['1', '50']),
('ints[]', 5, ['1', '50']),
('ints[1]', 5, ['1']),
('ints[', 5, ['1', '50', Ellipsis]),
('ints[]', 5, ['1', '50', Ellipsis]),
('ints[1]', 5, ['1', '50', Ellipsis]),
('ints[1]', 6, ['']),
('ints[1', 5, ['1']),
('ints[1', 5, ['1', Ellipsis]),
('ints[1', 6, ['']),
('ints[5]', 5, ['1']),
('ints[5]', 5, ['1', Ellipsis]),
('ints[5]', 6, ['0']),
('ints[50', 5, ['50']),
('ints[50', 5, ['50', Ellipsis]),
('ints[5', 6, ['0']),
('ints[50', 6, ['0']),
('ints[50', 7, ['']),
('strs[', 5, ["'asdf'", "'foo'", "'fbar'"]),
('strs[]', 5, ["'asdf'", "'foo'", "'fbar'"]),
('strs[', 5, ["'asdf'", "'foo'", "'fbar'", Ellipsis]),
('strs[]', 5, ["'asdf'", "'foo'", "'fbar'", Ellipsis]),
("strs[']", 6, ["asdf'", "foo'", "fbar'"]),
('strs["]', 6, ['asdf"', 'foo"', 'fbar"']),
('strs["""]', 6, ['asdf', 'foo', 'fbar']),
@@ -322,7 +322,10 @@ def test_dict_keys_completions(Script, added_code, column, expected, skip_pre_py
raise NotImplementedError
line, column = column
comps = Script(code + added_code, line=line, column=column).completions()
if expected == "A LOT":
assert len(comps) > 100 # This is basically global completions.
else:
assert [c.complete for c in comps] == expected
if Ellipsis in expected:
# This means that global completions are part of this, so filter all of
# that out.
comps = [c for c in comps if not c._name.is_value_name]
expected = [e for e in expected if e is not Ellipsis]
assert [c.complete for c in comps] == expected