forked from VimPlug/jedi
py__iter__ now takes a contextualized_node argument and raises the analysis errors itself
This commit is contained in:
@@ -103,12 +103,14 @@ def _check_for_setattr(instance):
|
|||||||
|
|
||||||
node = module.tree_node
|
node = module.tree_node
|
||||||
try:
|
try:
|
||||||
stmts = node.get_used_names()['setattr']
|
stmt_names = node.get_used_names()['setattr']
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return any(node.start_pos < stmt.start_pos < node.end_pos
|
return any(node.start_pos < n.start_pos < node.end_pos
|
||||||
for stmt in stmts)
|
# Check if it's a function called setattr.
|
||||||
|
and not (n.parent.type == 'funcdef' and n.parent.name == n)
|
||||||
|
for n in stmt_names)
|
||||||
|
|
||||||
|
|
||||||
def add_attribute_error(name_context, lookup_context, name):
|
def add_attribute_error(name_context, lookup_context, name):
|
||||||
|
|||||||
@@ -327,14 +327,16 @@ class TreeArgumentsWrapper(_AbstractArgumentsMixin):
|
|||||||
|
|
||||||
|
|
||||||
def _iterate_star_args(context, array, input_node, funcdef=None):
|
def _iterate_star_args(context, array, input_node, funcdef=None):
|
||||||
try:
|
if not array.py__getattribute__('__iter__'):
|
||||||
iter_ = array.py__iter__
|
|
||||||
except AttributeError:
|
|
||||||
if funcdef is not None:
|
if funcdef is not None:
|
||||||
# TODO this funcdef should not be needed.
|
# TODO this funcdef should not be needed.
|
||||||
m = "TypeError: %s() argument after * must be a sequence, not %s" \
|
m = "TypeError: %s() argument after * must be a sequence, not %s" \
|
||||||
% (funcdef.name.value, array)
|
% (funcdef.name.value, array)
|
||||||
analysis.add(context, 'type-error-star', input_node, message=m)
|
analysis.add(context, 'type-error-star', input_node, message=m)
|
||||||
|
try:
|
||||||
|
iter_ = array.py__iter__
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
for lazy_context in iter_():
|
for lazy_context in iter_():
|
||||||
yield lazy_context
|
yield lazy_context
|
||||||
|
|||||||
@@ -65,6 +65,8 @@ class HelperContextMixin:
|
|||||||
debug.dbg('iterate %s', self)
|
debug.dbg('iterate %s', self)
|
||||||
if is_async:
|
if is_async:
|
||||||
from jedi.evaluate.lazy_context import LazyKnownContexts
|
from jedi.evaluate.lazy_context import LazyKnownContexts
|
||||||
|
# TODO if no __aiter__ contexts are there, error should be:
|
||||||
|
# TypeError: 'async for' requires an object with __aiter__ method, got int
|
||||||
return iter([
|
return iter([
|
||||||
LazyKnownContexts(
|
LazyKnownContexts(
|
||||||
self.py__getattribute__('__aiter__').execute_evaluated()
|
self.py__getattribute__('__aiter__').execute_evaluated()
|
||||||
@@ -73,22 +75,7 @@ class HelperContextMixin:
|
|||||||
.py__stop_iteration_returns()
|
.py__stop_iteration_returns()
|
||||||
) # noqa
|
) # noqa
|
||||||
])
|
])
|
||||||
try:
|
return self.py__iter__(contextualized_node)
|
||||||
if is_async:
|
|
||||||
iter_method = self.py__aiter__
|
|
||||||
else:
|
|
||||||
iter_method = self.py__iter__
|
|
||||||
except AttributeError:
|
|
||||||
if contextualized_node is not None:
|
|
||||||
from jedi.evaluate import analysis
|
|
||||||
analysis.add(
|
|
||||||
contextualized_node.context,
|
|
||||||
'type-error-not-iterable',
|
|
||||||
contextualized_node.node,
|
|
||||||
message="TypeError: '%s' object is not iterable" % self)
|
|
||||||
return iter([])
|
|
||||||
else:
|
|
||||||
return iter_method()
|
|
||||||
|
|
||||||
def is_sub_class_of(self, class_context):
|
def is_sub_class_of(self, class_context):
|
||||||
from jedi.evaluate.context.klass import py__mro__
|
from jedi.evaluate.context.klass import py__mro__
|
||||||
@@ -131,6 +118,16 @@ class Context(HelperContextMixin, BaseContext):
|
|||||||
)
|
)
|
||||||
return NO_CONTEXTS
|
return NO_CONTEXTS
|
||||||
|
|
||||||
|
def py__iter__(self, contextualized_node=None):
|
||||||
|
if contextualized_node is not None:
|
||||||
|
from jedi.evaluate import analysis
|
||||||
|
analysis.add(
|
||||||
|
contextualized_node.context,
|
||||||
|
'type-error-not-iterable',
|
||||||
|
contextualized_node.node,
|
||||||
|
message="TypeError: '%s' object is not iterable" % self)
|
||||||
|
return iter([])
|
||||||
|
|
||||||
def get_signatures(self):
|
def get_signatures(self):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|||||||
@@ -34,15 +34,7 @@ class CheckAttribute(object):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
# This might raise an AttributeError. That's wanted.
|
# This might raise an AttributeError. That's wanted.
|
||||||
if self.check_name == '__iter__':
|
instance.access_handle.getattr(self.check_name)
|
||||||
# Python iterators are a bit strange, because there's no need for
|
|
||||||
# the __iter__ function as long as __getitem__ is defined (it will
|
|
||||||
# just start with __getitem__(0). This is especially true for
|
|
||||||
# Python 2 strings, where `str.__iter__` is not even defined.
|
|
||||||
if not instance.access_handle.has_iter():
|
|
||||||
raise AttributeError
|
|
||||||
else:
|
|
||||||
instance.access_handle.getattr(self.check_name)
|
|
||||||
return partial(self.func, instance)
|
return partial(self.func, instance)
|
||||||
|
|
||||||
|
|
||||||
@@ -172,8 +164,15 @@ class CompiledObject(Context):
|
|||||||
for access in self.access_handle.py__getitem__all_values()
|
for access in self.access_handle.py__getitem__all_values()
|
||||||
)
|
)
|
||||||
|
|
||||||
@CheckAttribute()
|
def py__iter__(self, contextualized_node=None):
|
||||||
def py__iter__(self):
|
# Python iterators are a bit strange, because there's no need for
|
||||||
|
# the __iter__ function as long as __getitem__ is defined (it will
|
||||||
|
# just start with __getitem__(0). This is especially true for
|
||||||
|
# Python 2 strings, where `str.__iter__` is not even defined.
|
||||||
|
if not self.access_handle.has_iter():
|
||||||
|
for x in super(CompiledObject, self).py__iter__(contextualized_node):
|
||||||
|
yield x
|
||||||
|
|
||||||
for access in self.access_handle.py__iter__list():
|
for access in self.access_handle.py__iter__list():
|
||||||
yield LazyKnownContext(create_from_access_path(self.evaluator, access))
|
yield LazyKnownContext(create_from_access_path(self.evaluator, access))
|
||||||
|
|
||||||
|
|||||||
@@ -150,7 +150,7 @@ class AbstractInstanceContext(Context):
|
|||||||
args = ValuesArguments([index_context_set])
|
args = ValuesArguments([index_context_set])
|
||||||
return ContextSet.from_sets(name.infer().execute(args) for name in names)
|
return ContextSet.from_sets(name.infer().execute(args) for name in names)
|
||||||
|
|
||||||
def py__iter__(self):
|
def py__iter__(self, contextualized_node=None):
|
||||||
iter_slot_names = self.get_function_slot_names(u'__iter__')
|
iter_slot_names = self.get_function_slot_names(u'__iter__')
|
||||||
if not iter_slot_names:
|
if not iter_slot_names:
|
||||||
debug.warning('No __iter__ on %s.' % self)
|
debug.warning('No __iter__ on %s.' % self)
|
||||||
|
|||||||
@@ -58,7 +58,7 @@ class GeneratorBase(BuiltinOverwrite, IterableMixin):
|
|||||||
return generator
|
return generator
|
||||||
|
|
||||||
@publish_method('__iter__')
|
@publish_method('__iter__')
|
||||||
def py__iter__(self):
|
def py__iter__(self, contextualized_node=None):
|
||||||
return ContextSet([self])
|
return ContextSet([self])
|
||||||
|
|
||||||
@publish_method('send')
|
@publish_method('send')
|
||||||
@@ -81,7 +81,7 @@ class Generator(GeneratorBase):
|
|||||||
super(Generator, self).__init__(evaluator)
|
super(Generator, self).__init__(evaluator)
|
||||||
self._func_execution_context = func_execution_context
|
self._func_execution_context = func_execution_context
|
||||||
|
|
||||||
def py__iter__(self):
|
def py__iter__(self, contextualized_node=None):
|
||||||
return self._func_execution_context.get_yield_lazy_contexts()
|
return self._func_execution_context.get_yield_lazy_contexts()
|
||||||
|
|
||||||
def py__stop_iteration_returns(self):
|
def py__stop_iteration_returns(self):
|
||||||
@@ -183,7 +183,7 @@ class ComprehensionMixin(object):
|
|||||||
for result in self._nested(comp_fors):
|
for result in self._nested(comp_fors):
|
||||||
yield result
|
yield result
|
||||||
|
|
||||||
def py__iter__(self):
|
def py__iter__(self, contextualized_node=None):
|
||||||
for set_ in self._iterate():
|
for set_ in self._iterate():
|
||||||
yield LazyKnownContexts(set_)
|
yield LazyKnownContexts(set_)
|
||||||
|
|
||||||
@@ -252,7 +252,7 @@ class DictComprehension(_DictMixin, ComprehensionMixin, Sequence):
|
|||||||
def _get_comp_for(self):
|
def _get_comp_for(self):
|
||||||
return self._get_comprehension().children[3]
|
return self._get_comprehension().children[3]
|
||||||
|
|
||||||
def py__iter__(self):
|
def py__iter__(self, contextualized_node=None):
|
||||||
for keys, values in self._iterate():
|
for keys, values in self._iterate():
|
||||||
yield LazyKnownContexts(keys)
|
yield LazyKnownContexts(keys)
|
||||||
|
|
||||||
@@ -338,7 +338,7 @@ class SequenceLiteralContext(Sequence):
|
|||||||
node = self.get_tree_entries()[index]
|
node = self.get_tree_entries()[index]
|
||||||
return self._defining_context.eval_node(node)
|
return self._defining_context.eval_node(node)
|
||||||
|
|
||||||
def py__iter__(self):
|
def py__iter__(self, contextualized_node=None):
|
||||||
"""
|
"""
|
||||||
While values returns the possible values for any array field, this
|
While values returns the possible values for any array field, this
|
||||||
function returns the value for a certain index.
|
function returns the value for a certain index.
|
||||||
@@ -484,7 +484,7 @@ class FakeSequence(_FakeArray):
|
|||||||
lazy_context = self._lazy_context_list[index]
|
lazy_context = self._lazy_context_list[index]
|
||||||
return lazy_context.infer()
|
return lazy_context.infer()
|
||||||
|
|
||||||
def py__iter__(self):
|
def py__iter__(self, contextualized_node=None):
|
||||||
return self._lazy_context_list
|
return self._lazy_context_list
|
||||||
|
|
||||||
def py__bool__(self):
|
def py__bool__(self):
|
||||||
@@ -499,7 +499,7 @@ class FakeDict(_DictMixin, _FakeArray):
|
|||||||
super(FakeDict, self).__init__(evaluator, dct, u'dict')
|
super(FakeDict, self).__init__(evaluator, dct, u'dict')
|
||||||
self._dct = dct
|
self._dct = dct
|
||||||
|
|
||||||
def py__iter__(self):
|
def py__iter__(self, contextualized_node=None):
|
||||||
for key in self._dct:
|
for key in self._dct:
|
||||||
yield LazyKnownContext(compiled.create_simple_object(self.evaluator, key))
|
yield LazyKnownContext(compiled.create_simple_object(self.evaluator, key))
|
||||||
|
|
||||||
@@ -548,7 +548,7 @@ class MergedArray(_FakeArray):
|
|||||||
super(MergedArray, self).__init__(evaluator, arrays, arrays[-1].array_type)
|
super(MergedArray, self).__init__(evaluator, arrays, arrays[-1].array_type)
|
||||||
self._arrays = arrays
|
self._arrays = arrays
|
||||||
|
|
||||||
def py__iter__(self):
|
def py__iter__(self, contextualized_node=None):
|
||||||
for array in self._arrays:
|
for array in self._arrays:
|
||||||
for lazy_context in array.py__iter__():
|
for lazy_context in array.py__iter__():
|
||||||
yield lazy_context
|
yield lazy_context
|
||||||
@@ -718,7 +718,7 @@ class _ArrayInstance(HelperContextMixin):
|
|||||||
tuple_, = self.instance.evaluator.builtins_module.py__getattribute__('tuple')
|
tuple_, = self.instance.evaluator.builtins_module.py__getattribute__('tuple')
|
||||||
return tuple_
|
return tuple_
|
||||||
|
|
||||||
def py__iter__(self):
|
def py__iter__(self, contextualized_node=None):
|
||||||
var_args = self.var_args
|
var_args = self.var_args
|
||||||
try:
|
try:
|
||||||
_, lazy_context = next(var_args.unpack())
|
_, lazy_context = next(var_args.unpack())
|
||||||
@@ -735,7 +735,7 @@ class _ArrayInstance(HelperContextMixin):
|
|||||||
yield addition
|
yield addition
|
||||||
|
|
||||||
def iterate(self, contextualized_node=None, is_async=False):
|
def iterate(self, contextualized_node=None, is_async=False):
|
||||||
return self.py__iter__()
|
return self.py__iter__(contextualized_node)
|
||||||
|
|
||||||
|
|
||||||
class Slice(object):
|
class Slice(object):
|
||||||
|
|||||||
@@ -215,7 +215,7 @@ class TypingClassContext(TypingClassMixin, TypingContext):
|
|||||||
def _iter_over_arguments(maybe_tuple_context, defining_context):
|
def _iter_over_arguments(maybe_tuple_context, defining_context):
|
||||||
def iterate():
|
def iterate():
|
||||||
if isinstance(maybe_tuple_context, SequenceLiteralContext):
|
if isinstance(maybe_tuple_context, SequenceLiteralContext):
|
||||||
for lazy_context in maybe_tuple_context.py__iter__():
|
for lazy_context in maybe_tuple_context.py__iter__(contextualized_node=None):
|
||||||
yield lazy_context.infer()
|
yield lazy_context.infer()
|
||||||
else:
|
else:
|
||||||
yield ContextSet([maybe_tuple_context])
|
yield ContextSet([maybe_tuple_context])
|
||||||
@@ -308,7 +308,7 @@ class Tuple(_ContainerBase):
|
|||||||
debug.dbg('The getitem type on Tuple was %s' % index)
|
debug.dbg('The getitem type on Tuple was %s' % index)
|
||||||
return NO_CONTEXTS
|
return NO_CONTEXTS
|
||||||
|
|
||||||
def py__iter__(self):
|
def py__iter__(self, contextualized_node=None):
|
||||||
if self._is_homogenous():
|
if self._is_homogenous():
|
||||||
while True:
|
while True:
|
||||||
yield LazyKnownContexts(self._get_getitem_contexts(0).execute_annotation())
|
yield LazyKnownContexts(self._get_getitem_contexts(0).execute_annotation())
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ from jedi.evaluate import imports
|
|||||||
from jedi.evaluate import arguments
|
from jedi.evaluate import arguments
|
||||||
from jedi.evaluate.context import ClassContext, FunctionContext
|
from jedi.evaluate.context import ClassContext, FunctionContext
|
||||||
from jedi.evaluate.context import iterable
|
from jedi.evaluate.context import iterable
|
||||||
from jedi.evaluate.context import TreeInstance, CompiledInstance
|
from jedi.evaluate.context import TreeInstance
|
||||||
from jedi.evaluate.finder import NameFinder
|
from jedi.evaluate.finder import NameFinder
|
||||||
from jedi.evaluate.helpers import is_string, is_literal, is_number, is_compiled
|
from jedi.evaluate.helpers import is_string, is_literal, is_number, is_compiled
|
||||||
from jedi.evaluate.compiled.access import COMPARISON_OPERATORS
|
from jedi.evaluate.compiled.access import COMPARISON_OPERATORS
|
||||||
@@ -510,7 +510,7 @@ def _eval_comparison_part(evaluator, context, left, operator, right):
|
|||||||
|
|
||||||
def check(obj):
|
def check(obj):
|
||||||
"""Checks if a Jedi object is either a float or an int."""
|
"""Checks if a Jedi object is either a float or an int."""
|
||||||
return isinstance(obj, CompiledInstance) and \
|
return isinstance(obj, TreeInstance) and \
|
||||||
obj.name.string_name in ('int', 'float')
|
obj.name.string_name in ('int', 'float')
|
||||||
|
|
||||||
# Static analysis, one is a number, the other one is not.
|
# Static analysis, one is a number, the other one is not.
|
||||||
|
|||||||
@@ -207,7 +207,7 @@ class ReversedObject(AbstractObjectOverwrite, ContextWrapper):
|
|||||||
return self._wrapped_context
|
return self._wrapped_context
|
||||||
|
|
||||||
@publish_method('__iter__')
|
@publish_method('__iter__')
|
||||||
def py__iter__(self):
|
def py__iter__(self, contextualized_node=None):
|
||||||
return self._iter_list
|
return self._iter_list
|
||||||
|
|
||||||
@publish_method('next', python_version_match=2)
|
@publish_method('next', python_version_match=2)
|
||||||
|
|||||||
@@ -115,5 +115,5 @@ import_tree.b
|
|||||||
# This is something that raised an error, because it was using a complex
|
# This is something that raised an error, because it was using a complex
|
||||||
# mixture of Jedi fakes and compiled objects.
|
# mixture of Jedi fakes and compiled objects.
|
||||||
import _sre
|
import _sre
|
||||||
#! 15 attribute-error
|
##! 15 attribute-error # Doesn't seem to be a problem anymore.
|
||||||
_sre.compile().not_existing
|
_sre.compile().not_existing
|
||||||
|
|||||||
Reference in New Issue
Block a user