1
0
forked from VimPlug/jedi

List comprehensions now at least don't cause errors anymore.

This commit is contained in:
Dave Halter
2016-11-16 09:43:45 +01:00
parent f672b367da
commit af7c13d2e6
7 changed files with 65 additions and 53 deletions

View File

@@ -365,7 +365,7 @@ class Evaluator(object):
pass pass
if comp_for.type == 'comp_for': if comp_for.type == 'comp_for':
return set([iterable.Comprehension.from_atom(self, atom)]) return set([iterable.Comprehension.from_atom(self, context, atom)])
return set([iterable.ArrayLiteralContext(self, context, atom)]) return set([iterable.ArrayLiteralContext(self, context, atom)])
def eval_trailer(self, context, types, trailer): def eval_trailer(self, context, types, trailer):

View File

@@ -43,11 +43,6 @@ class Context(object):
def eval_stmt(self, stmt, seek_name=None): def eval_stmt(self, stmt, seek_name=None):
return self.evaluator.eval_statement(self, stmt, seek_name) return self.evaluator.eval_statement(self, stmt, seek_name)
def py__getattribute__(self, name_str, position=None,
search_global=False, is_goto=False):
return self.evaluator.find_types(self, name_str, position,
search_global, is_goto)
class TreeContext(Context): class TreeContext(Context):
pass pass

View File

@@ -114,7 +114,7 @@ class AbstractUsedNamesFilter(AbstractFilter):
super(AbstractUsedNamesFilter, self).__init__(origin_scope) super(AbstractUsedNamesFilter, self).__init__(origin_scope)
self._parser_scope = parser_scope self._parser_scope = parser_scope
self._used_names = self._parser_scope.get_root_node().used_names self._used_names = self._parser_scope.get_root_node().used_names
self._context = context self.context = context
def get(self, name): def get(self, name):
try: try:
@@ -125,7 +125,7 @@ class AbstractUsedNamesFilter(AbstractFilter):
return self._convert_names(self._filter(names)) return self._convert_names(self._filter(names))
def _convert_names(self, names): def _convert_names(self, names):
return [self.name_class(self._context, name) for name in names] return [self.name_class(self.context, name) for name in names]
def values(self): def values(self):
return self._convert_names(name for name_list in self._used_names.values() return self._convert_names(name for name_list in self._used_names.values()
@@ -140,14 +140,16 @@ class ParserTreeFilter(AbstractUsedNamesFilter):
def _filter(self, names): def _filter(self, names):
names = super(ParserTreeFilter, self)._filter(names) names = super(ParserTreeFilter, self)._filter(names)
names = [n for n in names if n.is_definition() and n.parent.type != 'trailer'] names = [n for n in names if n.is_definition() and n.parent.type != 'trailer']
names = [n for n in names if n.parent.get_parent_scope() == self._parser_scope] names = [n for n in names
if ((n.parent if n.parent.type in ('classdef', 'funcdef') else n)
.get_parent_scope() == self._parser_scope)]
return list(self._check_flows(names)) return list(self._check_flows(names))
def _check_flows(self, names): def _check_flows(self, names):
for name in sorted(names, key=lambda name: name.start_pos, reverse=True): for name in sorted(names, key=lambda name: name.start_pos, reverse=True):
check = flow_analysis.reachability_check( check = flow_analysis.reachability_check(
self._context, self._parser_scope, name, self._origin_scope self.context, self._parser_scope, name, self._origin_scope
) )
if check is not flow_analysis.UNREACHABLE: if check is not flow_analysis.UNREACHABLE:
yield name yield name
@@ -174,9 +176,9 @@ class FunctionExecutionFilter(ParserTreeFilter):
for name in names: for name in names:
param = search_ancestor(name, 'param') param = search_ancestor(name, 'param')
if param: if param:
yield self.param_name(self._context, name) yield self.param_name(self.context, name)
else: else:
yield TreeNameDefinition(self._context, name) yield TreeNameDefinition(self.context, name)
class AnonymousInstanceFunctionExecutionFilter(FunctionExecutionFilter): class AnonymousInstanceFunctionExecutionFilter(FunctionExecutionFilter):

View File

@@ -130,7 +130,6 @@ class NameFinder(object):
analysis.add(self._evaluator, 'name-error', self.name_str, analysis.add(self._evaluator, 'name-error', self.name_str,
message) message)
debug.dbg('finder._names_to_types: %s -> %s', names, types)
return types return types
def get_filters(self, search_global=False): def get_filters(self, search_global=False):
@@ -245,9 +244,23 @@ class NameFinder(object):
""" """
names = [] names = []
for filter in filters: for filter in filters:
names = filter.get(self.name_str) if self._evaluator.predefined_if_name_dict_dict:
if names: node = self.name_str
break while node is not None and not isinstance(node, tree.IsScope):
node = node.parent
if node.type in ("if_stmt", "for_stmt", "comp_for"):
try:
name_dict = self._evaluator.predefined_if_name_dict_dict[node]
types = set(name_dict[str(self.name_str)])
except KeyError:
continue
else:
self._found_predefined_if_name = types
return []
else:
names = filter.get(self.name_str)
if names:
break
debug.dbg('finder.filter_name "%s" in (%s): %s@%s', self.name_str, debug.dbg('finder.filter_name "%s" in (%s): %s@%s', self.name_str,
self.context, names, self.position) self.context, names, self.position)
return list(self._clean_names(names)) return list(self._clean_names(names))
@@ -313,6 +326,7 @@ class NameFinder(object):
else: else:
types |= set(new_types) types |= set(new_types)
debug.dbg('finder._names_to_types: %s -> %s', names, types)
if not names and isinstance(self.context, AbstractInstanceContext): if not names and isinstance(self.context, AbstractInstanceContext):
# handling __getattr__ / __getattribute__ # handling __getattr__ / __getattribute__
return self._check_getattr(self.context) return self._check_getattr(self.context)

View File

@@ -276,7 +276,7 @@ class InstanceClassFilter(filters.ParserTreeFilter):
def _equals_origin_scope(self): def _equals_origin_scope(self):
node = self._origin_scope node = self._origin_scope
while node is not None: while node is not None:
if node == self._parser_scope or node == self._context: if node == self._parser_scope or node == self.context:
return True return True
node = node.get_parent_scope() node = node.get_parent_scope()
return False return False
@@ -293,7 +293,7 @@ class InstanceClassFilter(filters.ParserTreeFilter):
return names return names
def _convert_names(self, names): def _convert_names(self, names):
return [self.name_class(self._context, self._class_context, name) for name in names] return [self.name_class(self.context, self._class_context, name) for name in names]
class SelfNameFilter(InstanceClassFilter): class SelfNameFilter(InstanceClassFilter):
@@ -316,7 +316,7 @@ class SelfNameFilter(InstanceClassFilter):
if name.is_definition() and self._access_possible(name): if name.is_definition() and self._access_possible(name):
yield name yield name
continue continue
init_execution = self._context.get_init_function() init_execution = self.context.get_init_function()
# Hopefully we can somehow change this. # Hopefully we can somehow change this.
if init_execution is not None and \ if init_execution is not None and \
init_execution.start_pos < name.start_pos < init_execution.end_pos: init_execution.start_pos < name.start_pos < init_execution.end_pos:

View File

@@ -37,6 +37,9 @@ from jedi.evaluate import precedence
class AbstractSequence(context.Context): class AbstractSequence(context.Context):
def __init__(self, evaluator):
super(AbstractSequence, self).__init__(evaluator, evaluator.BUILTINS)
def get_filters(self, search_global, until_position=None, origin_scope=None): def get_filters(self, search_global, until_position=None, origin_scope=None):
raise NotImplementedError raise NotImplementedError
@@ -117,7 +120,7 @@ def register_builtin_method(method_name, python_version_match=None):
@has_builtin_methods @has_builtin_methods
class GeneratorMixin(object): class GeneratorMixin(object):
type = None array_type = None
@register_builtin_method('send') @register_builtin_method('send')
@register_builtin_method('next', python_version_match=2) @register_builtin_method('next', python_version_match=2)
@@ -145,7 +148,7 @@ class GeneratorMixin(object):
return gen_obj.py__class__() return gen_obj.py__class__()
class Generator(context.Context, GeneratorMixin): class Generator(GeneratorMixin, context.Context):
"""Handling of `yield` functions.""" """Handling of `yield` functions."""
def __init__(self, evaluator, func_execution_context): def __init__(self, evaluator, func_execution_context):
@@ -159,9 +162,9 @@ class Generator(context.Context, GeneratorMixin):
return "<%s of %s>" % (type(self).__name__, self._func_execution_context) return "<%s of %s>" % (type(self).__name__, self._func_execution_context)
class Comprehension(object): class Comprehension(AbstractSequence):
@staticmethod @staticmethod
def from_atom(evaluator, atom): def from_atom(evaluator, context, atom):
bracket = atom.children[0] bracket = atom.children[0]
if bracket == '{': if bracket == '{':
if atom.children[1].children[1] == ':': if atom.children[1].children[1] == ':':
@@ -172,10 +175,11 @@ class Comprehension(object):
cls = GeneratorComprehension cls = GeneratorComprehension
elif bracket == '[': elif bracket == '[':
cls = ListComprehension cls = ListComprehension
return cls(evaluator, atom) return cls(evaluator, context, atom)
def __init__(self, evaluator, atom): def __init__(self, evaluator, defining_context, atom):
self.evaluator = evaluator super(Comprehension, self).__init__(evaluator)
self._defining_context = defining_context
self._atom = atom self._atom = atom
def _get_comprehension(self): def _get_comprehension(self):
@@ -209,20 +213,21 @@ class Comprehension(object):
evaluator = self.evaluator evaluator = self.evaluator
comp_for = comp_fors[0] comp_for = comp_fors[0]
input_node = comp_for.children[3] input_node = comp_for.children[3]
input_types = evaluator.eval_element(input_node) input_types = self._defining_context.eval_node(input_node)
iterated = py__iter__(evaluator, input_types, input_node) iterated = py__iter__(evaluator, input_types, input_node)
exprlist = comp_for.children[1] exprlist = comp_for.children[1]
for i, types in enumerate(iterated): for i, lazy_context in enumerate(iterated):
types = lazy_context.infer()
evaluator.predefined_if_name_dict_dict[comp_for] = \ evaluator.predefined_if_name_dict_dict[comp_for] = \
unpack_tuple_to_dict(evaluator, types, exprlist) unpack_tuple_to_dict(evaluator, types, exprlist)
try: try:
for result in self._nested(comp_fors[1:]): for result in self._nested(comp_fors[1:]):
yield result yield result
except IndexError: except IndexError:
iterated = evaluator.eval_element(self._eval_node()) iterated = self._defining_context.eval_node(self._eval_node())
if self.array_type == 'dict': if self.array_type == 'dict':
yield iterated, evaluator.eval_element(self._eval_node(2)) yield iterated, self._defining_context.eval_node(self._eval_node(2))
else: else:
yield iterated yield iterated
finally: finally:
@@ -236,8 +241,8 @@ class Comprehension(object):
yield result yield result
def py__iter__(self): def py__iter__(self):
raise NotImplementedError for set_ in self._iterate():
return self._iterate() yield context.LazyKnownContexts(set_)
def __repr__(self): def __repr__(self):
return "<%s of %s>" % (type(self).__name__, self._atom) return "<%s of %s>" % (type(self).__name__, self._atom)
@@ -292,28 +297,24 @@ class DICT(object):
return create_evaluated_sequence_set(self.evaluator, *items, sequence_type='list') return create_evaluated_sequence_set(self.evaluator, *items, sequence_type='list')
class ListComprehension(Comprehension, ArrayMixin): class ListComprehension(ArrayMixin, Comprehension):
type = 'list' array_type = 'list'
def py__getitem__(self, index): def py__getitem__(self, index):
all_types = list(self.py__iter__())
result = all_types[index]
if isinstance(index, slice): if isinstance(index, slice):
return create_evaluated_sequence_set( return set([self])
self.evaluator,
unite(result), all_types = list(self.py__iter__())
sequence_type='list' return all_types[index].infer()
)
return result
class SetComprehension(Comprehension, ArrayMixin): class SetComprehension(ArrayMixin, Comprehension):
type = 'set' array_type = 'set'
@has_builtin_methods @has_builtin_methods
class DictComprehension(Comprehension, ArrayMixin): class DictComprehension(ArrayMixin, Comprehension):
type = 'dict' array_type = 'dict'
def _get_comp_for(self): def _get_comp_for(self):
return self._get_comprehension().children[3] return self._get_comprehension().children[3]
@@ -343,7 +344,7 @@ class DictComprehension(Comprehension, ArrayMixin):
return create_evaluated_sequence_set(self.evaluator, items, sequence_type='list') return create_evaluated_sequence_set(self.evaluator, items, sequence_type='list')
class GeneratorComprehension(Comprehension, GeneratorMixin): class GeneratorComprehension(GeneratorMixin, Comprehension):
pass pass
@@ -353,7 +354,7 @@ class ArrayLiteralContext(ArrayMixin, AbstractSequence):
'{': 'dict'} '{': 'dict'}
def __init__(self, evaluator, defining_context, atom): def __init__(self, evaluator, defining_context, atom):
super(ArrayLiteralContext, self).__init__(evaluator, evaluator.BUILTINS) super(ArrayLiteralContext, self).__init__(evaluator)
self.atom = atom self.atom = atom
self._defining_context = defining_context self._defining_context = defining_context
@@ -461,7 +462,6 @@ class _FakeArray(ArrayLiteralContext):
self.array_type = type self.array_type = type
self.evaluator = evaluator self.evaluator = evaluator
self.atom = container self.atom = container
self.parent_context = evaluator.BUILTINS
class ImplicitTuple(_FakeArray): class ImplicitTuple(_FakeArray):
@@ -585,7 +585,7 @@ def unpack_tuple_to_dict(evaluator, types, exprlist):
dct = {} dct = {}
parts = iter(exprlist.children[::2]) parts = iter(exprlist.children[::2])
n = 0 n = 0
for iter_types in py__iter__(evaluator, types, exprlist): for lazy_context in py__iter__(evaluator, types, exprlist):
n += 1 n += 1
try: try:
part = next(parts) part = next(parts)
@@ -593,7 +593,7 @@ def unpack_tuple_to_dict(evaluator, types, exprlist):
analysis.add(evaluator, 'value-error-too-many-values', part, analysis.add(evaluator, 'value-error-too-many-values', part,
message="ValueError: too many values to unpack (expected %s)" % n) message="ValueError: too many values to unpack (expected %s)" % n)
else: else:
dct.update(unpack_tuple_to_dict(evaluator, iter_types, part)) dct.update(unpack_tuple_to_dict(evaluator, lazy_context.infer(), part))
has_parts = next(parts, None) has_parts = next(parts, None)
if types and has_parts is not None: if types and has_parts is not None:
analysis.add(evaluator, 'value-error-too-few-values', has_parts, analysis.add(evaluator, 'value-error-too-few-values', has_parts,
@@ -685,6 +685,7 @@ def py__getitem__(evaluator, context, types, trailer):
result |= py__iter__types(evaluator, set([typ])) result |= py__iter__types(evaluator, set([typ]))
except KeyError: except KeyError:
# Must be a dict. Lists don't raise KeyErrors. # Must be a dict. Lists don't raise KeyErrors.
raise
result |= typ.dict_values() result |= typ.dict_values()
return result return result

View File

@@ -687,7 +687,7 @@ class FunctionExecutionContext(Executed):
parent = for_stmt.parent parent = for_stmt.parent
if parent.type == 'suite': if parent.type == 'suite':
parent = parent.parent parent = parent.parent
if for_stmt.type == 'for_stmt' and parent == self.func_def \ if for_stmt.type == 'for_stmt' and parent == self.funcdef \
and for_stmt.defines_one_name(): # Simplicity for now. and for_stmt.defines_one_name(): # Simplicity for now.
if for_stmt == last_for_stmt: if for_stmt == last_for_stmt:
yields_order[-1][1].append(yield_) yields_order[-1][1].append(yield_)
@@ -709,7 +709,7 @@ class FunctionExecutionContext(Executed):
yield result yield result
else: else:
input_node = for_stmt.get_input_node() input_node = for_stmt.get_input_node()
for_types = evaluator.eval_element(input_node) for_types = self.eval_node(input_node)
ordered = iterable.py__iter__(evaluator, for_types, input_node) ordered = iterable.py__iter__(evaluator, for_types, input_node)
for index_types in ordered: for index_types in ordered:
dct = {str(for_stmt.children[1]): index_types} dct = {str(for_stmt.children[1]): index_types}