forked from VimPlug/jedi
Fix quite a few more tests. Only about a fifth failing now
This commit is contained in:
@@ -3,6 +3,7 @@ from jedi.inference.value.module import ModuleValue
|
|||||||
from jedi.inference.filters import ParserTreeFilter, \
|
from jedi.inference.filters import ParserTreeFilter, \
|
||||||
TreeNameDefinition
|
TreeNameDefinition
|
||||||
from jedi.inference.gradual.typing import TypingModuleFilterWrapper
|
from jedi.inference.gradual.typing import TypingModuleFilterWrapper
|
||||||
|
from jedi.inference.context import ModuleContext
|
||||||
|
|
||||||
|
|
||||||
class StubModuleValue(ModuleValue):
|
class StubModuleValue(ModuleValue):
|
||||||
@@ -58,6 +59,17 @@ class TypingModuleWrapper(StubModuleValue):
|
|||||||
for f in filters:
|
for f in filters:
|
||||||
yield f
|
yield f
|
||||||
|
|
||||||
|
def as_context(self):
|
||||||
|
return TypingModuleContext(self)
|
||||||
|
|
||||||
|
|
||||||
|
class TypingModuleContext(ModuleContext):
|
||||||
|
def get_filters(self, *args, **kwargs):
|
||||||
|
filters = super(TypingModuleContext, self).get_filters(*args, **kwargs)
|
||||||
|
yield TypingModuleFilterWrapper(next(filters))
|
||||||
|
for f in filters:
|
||||||
|
yield f
|
||||||
|
|
||||||
|
|
||||||
# From here on down we make looking up the sys.version_info fast.
|
# From here on down we make looking up the sys.version_info fast.
|
||||||
class _StubName(TreeNameDefinition):
|
class _StubName(TreeNameDefinition):
|
||||||
|
|||||||
@@ -222,7 +222,7 @@ class TypingClassValue(_TypingClassMixin, TypingValue, ClassMixin):
|
|||||||
index_class = TypingClassValueWithIndex
|
index_class = TypingClassValueWithIndex
|
||||||
|
|
||||||
|
|
||||||
def _iter_over_arguments(maybe_tuple_value, defining_value):
|
def _iter_over_arguments(maybe_tuple_value, defining_context):
|
||||||
def iterate():
|
def iterate():
|
||||||
if isinstance(maybe_tuple_value, SequenceLiteralValue):
|
if isinstance(maybe_tuple_value, SequenceLiteralValue):
|
||||||
for lazy_value in maybe_tuple_value.py__iter__(valueualized_node=None):
|
for lazy_value in maybe_tuple_value.py__iter__(valueualized_node=None):
|
||||||
@@ -234,9 +234,9 @@ def _iter_over_arguments(maybe_tuple_value, defining_value):
|
|||||||
for value in value_set:
|
for value in value_set:
|
||||||
if is_string(value):
|
if is_string(value):
|
||||||
from jedi.inference.gradual.annotation import _get_forward_reference_node
|
from jedi.inference.gradual.annotation import _get_forward_reference_node
|
||||||
node = _get_forward_reference_node(defining_value, value.get_safe_value())
|
node = _get_forward_reference_node(defining_context, value.get_safe_value())
|
||||||
if node is not None:
|
if node is not None:
|
||||||
for c in defining_value.infer_node(node):
|
for c in defining_context.infer_node(node):
|
||||||
yield c
|
yield c
|
||||||
else:
|
else:
|
||||||
yield value
|
yield value
|
||||||
|
|||||||
@@ -150,7 +150,7 @@ def comprehension_from_atom(inference_state, value, atom):
|
|||||||
|
|
||||||
return cls(
|
return cls(
|
||||||
inference_state,
|
inference_state,
|
||||||
defining_value=value,
|
defining_context=value,
|
||||||
sync_comp_for_node=sync_comp_for,
|
sync_comp_for_node=sync_comp_for,
|
||||||
entry_node=test_list_comp.children[0],
|
entry_node=test_list_comp.children[0],
|
||||||
)
|
)
|
||||||
@@ -167,7 +167,7 @@ class ComprehensionMixin(object):
|
|||||||
is_async = comp_for.parent.type == 'comp_for'
|
is_async = comp_for.parent.type == 'comp_for'
|
||||||
|
|
||||||
input_node = comp_for.children[3]
|
input_node = comp_for.children[3]
|
||||||
parent_context = parent_context or self._defining_value
|
parent_context = parent_context or self._defining_context
|
||||||
input_types = parent_context.infer_node(input_node)
|
input_types = parent_context.infer_node(input_node)
|
||||||
# TODO: simulate await if self.is_async
|
# TODO: simulate await if self.is_async
|
||||||
|
|
||||||
@@ -245,10 +245,10 @@ class Sequence(LazyAttributeOverwrite, IterableMixin):
|
|||||||
|
|
||||||
|
|
||||||
class _BaseComprehension(ComprehensionMixin):
|
class _BaseComprehension(ComprehensionMixin):
|
||||||
def __init__(self, inference_state, defining_value, sync_comp_for_node, entry_node):
|
def __init__(self, inference_state, defining_context, sync_comp_for_node, entry_node):
|
||||||
assert sync_comp_for_node.type == 'sync_comp_for'
|
assert sync_comp_for_node.type == 'sync_comp_for'
|
||||||
super(_BaseComprehension, self).__init__(inference_state)
|
super(_BaseComprehension, self).__init__(inference_state)
|
||||||
self._defining_value = defining_value
|
self._defining_context = defining_context
|
||||||
self._sync_comp_for_node = sync_comp_for_node
|
self._sync_comp_for_node = sync_comp_for_node
|
||||||
self._entry_node = entry_node
|
self._entry_node = entry_node
|
||||||
|
|
||||||
@@ -277,10 +277,10 @@ class GeneratorComprehension(_BaseComprehension, GeneratorBase):
|
|||||||
class DictComprehension(ComprehensionMixin, Sequence):
|
class DictComprehension(ComprehensionMixin, Sequence):
|
||||||
array_type = u'dict'
|
array_type = u'dict'
|
||||||
|
|
||||||
def __init__(self, inference_state, defining_value, sync_comp_for_node, key_node, value_node):
|
def __init__(self, inference_state, defining_context, sync_comp_for_node, key_node, value_node):
|
||||||
assert sync_comp_for_node.type == 'sync_comp_for'
|
assert sync_comp_for_node.type == 'sync_comp_for'
|
||||||
super(DictComprehension, self).__init__(inference_state)
|
super(DictComprehension, self).__init__(inference_state)
|
||||||
self._defining_value = defining_value
|
self._defining_context = defining_context
|
||||||
self._sync_comp_for_node = sync_comp_for_node
|
self._sync_comp_for_node = sync_comp_for_node
|
||||||
self._entry_node = key_node
|
self._entry_node = key_node
|
||||||
self._value_node = value_node
|
self._value_node = value_node
|
||||||
@@ -341,10 +341,10 @@ class SequenceLiteralValue(Sequence):
|
|||||||
'[': u'list',
|
'[': u'list',
|
||||||
'{': u'set'}
|
'{': u'set'}
|
||||||
|
|
||||||
def __init__(self, inference_state, defining_value, atom):
|
def __init__(self, inference_state, defining_context, atom):
|
||||||
super(SequenceLiteralValue, self).__init__(inference_state)
|
super(SequenceLiteralValue, self).__init__(inference_state)
|
||||||
self.atom = atom
|
self.atom = atom
|
||||||
self._defining_value = defining_value
|
self._defining_context = defining_context
|
||||||
|
|
||||||
if self.atom.type in self._TUPLE_LIKE:
|
if self.atom.type in self._TUPLE_LIKE:
|
||||||
self.array_type = u'tuple'
|
self.array_type = u'tuple'
|
||||||
@@ -357,14 +357,14 @@ class SequenceLiteralValue(Sequence):
|
|||||||
if self.array_type == u'dict':
|
if self.array_type == u'dict':
|
||||||
compiled_obj_index = compiled.create_simple_object(self.inference_state, index)
|
compiled_obj_index = compiled.create_simple_object(self.inference_state, index)
|
||||||
for key, value in self.get_tree_entries():
|
for key, value in self.get_tree_entries():
|
||||||
for k in self._defining_value.infer_node(key):
|
for k in self._defining_context.infer_node(key):
|
||||||
try:
|
try:
|
||||||
method = k.execute_operation
|
method = k.execute_operation
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
if method(compiled_obj_index, u'==').get_safe_value():
|
if method(compiled_obj_index, u'==').get_safe_value():
|
||||||
return self._defining_value.infer_node(value)
|
return self._defining_context.infer_node(value)
|
||||||
raise SimpleGetItemNotFound('No key found in dictionary %s.' % self)
|
raise SimpleGetItemNotFound('No key found in dictionary %s.' % self)
|
||||||
|
|
||||||
if isinstance(index, slice):
|
if isinstance(index, slice):
|
||||||
@@ -372,7 +372,7 @@ class SequenceLiteralValue(Sequence):
|
|||||||
else:
|
else:
|
||||||
with reraise_getitem_errors(TypeError, KeyError, IndexError):
|
with reraise_getitem_errors(TypeError, KeyError, IndexError):
|
||||||
node = self.get_tree_entries()[index]
|
node = self.get_tree_entries()[index]
|
||||||
return self._defining_value.infer_node(node)
|
return self._defining_context.infer_node(node)
|
||||||
|
|
||||||
def py__iter__(self, valueualized_node=None):
|
def py__iter__(self, valueualized_node=None):
|
||||||
"""
|
"""
|
||||||
@@ -383,7 +383,7 @@ class SequenceLiteralValue(Sequence):
|
|||||||
# Get keys.
|
# Get keys.
|
||||||
types = NO_VALUES
|
types = NO_VALUES
|
||||||
for k, _ in self.get_tree_entries():
|
for k, _ in self.get_tree_entries():
|
||||||
types |= self._defining_value.infer_node(k)
|
types |= self._defining_context.infer_node(k)
|
||||||
# We don't know which dict index comes first, therefore always
|
# We don't know which dict index comes first, therefore always
|
||||||
# yield all the types.
|
# yield all the types.
|
||||||
for _ in types:
|
for _ in types:
|
||||||
@@ -393,10 +393,10 @@ class SequenceLiteralValue(Sequence):
|
|||||||
if node == ':' or node.type == 'subscript':
|
if node == ':' or node.type == 'subscript':
|
||||||
# TODO this should probably use at least part of the code
|
# TODO this should probably use at least part of the code
|
||||||
# of infer_subscript_list.
|
# of infer_subscript_list.
|
||||||
yield LazyKnownValue(Slice(self._defining_value, None, None, None))
|
yield LazyKnownValue(Slice(self._defining_context, None, None, None))
|
||||||
else:
|
else:
|
||||||
yield LazyTreeValue(self._defining_value, node)
|
yield LazyTreeValue(self._defining_context, node)
|
||||||
for addition in check_array_additions(self._defining_value, self):
|
for addition in check_array_additions(self._defining_context, self):
|
||||||
yield addition
|
yield addition
|
||||||
|
|
||||||
def py__len__(self):
|
def py__len__(self):
|
||||||
@@ -405,7 +405,7 @@ class SequenceLiteralValue(Sequence):
|
|||||||
|
|
||||||
def _dict_values(self):
|
def _dict_values(self):
|
||||||
return ValueSet.from_sets(
|
return ValueSet.from_sets(
|
||||||
self._defining_value.infer_node(v)
|
self._defining_context.infer_node(v)
|
||||||
for k, v in self.get_tree_entries()
|
for k, v in self.get_tree_entries()
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -460,9 +460,9 @@ class SequenceLiteralValue(Sequence):
|
|||||||
resolved (as a string) and the values are still lazy values.
|
resolved (as a string) and the values are still lazy values.
|
||||||
"""
|
"""
|
||||||
for key_node, value in self.get_tree_entries():
|
for key_node, value in self.get_tree_entries():
|
||||||
for key in self._defining_value.infer_node(key_node):
|
for key in self._defining_context.infer_node(key_node):
|
||||||
if is_string(key):
|
if is_string(key):
|
||||||
yield key.get_safe_value(), LazyTreeValue(self._defining_value, value)
|
yield key.get_safe_value(), LazyTreeValue(self._defining_context, value)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<%s of %s>" % (self.__class__.__name__, self.atom)
|
return "<%s of %s>" % (self.__class__.__name__, self.atom)
|
||||||
@@ -471,9 +471,9 @@ class SequenceLiteralValue(Sequence):
|
|||||||
class DictLiteralValue(_DictMixin, SequenceLiteralValue):
|
class DictLiteralValue(_DictMixin, SequenceLiteralValue):
|
||||||
array_type = u'dict'
|
array_type = u'dict'
|
||||||
|
|
||||||
def __init__(self, inference_state, defining_value, atom):
|
def __init__(self, inference_state, defining_context, atom):
|
||||||
super(SequenceLiteralValue, self).__init__(inference_state)
|
super(SequenceLiteralValue, self).__init__(inference_state)
|
||||||
self._defining_value = defining_value
|
self._defining_context = defining_context
|
||||||
self.atom = atom
|
self.atom = atom
|
||||||
|
|
||||||
@publish_method('values')
|
@publish_method('values')
|
||||||
@@ -486,8 +486,8 @@ class DictLiteralValue(_DictMixin, SequenceLiteralValue):
|
|||||||
lazy_values = [
|
lazy_values = [
|
||||||
LazyKnownValue(FakeSequence(
|
LazyKnownValue(FakeSequence(
|
||||||
self.inference_state, u'tuple',
|
self.inference_state, u'tuple',
|
||||||
(LazyTreeValue(self._defining_value, key_node),
|
(LazyTreeValue(self._defining_context, key_node),
|
||||||
LazyTreeValue(self._defining_value, value_node))
|
LazyTreeValue(self._defining_context, value_node))
|
||||||
)) for key_node, value_node in self.get_tree_entries()
|
)) for key_node, value_node in self.get_tree_entries()
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -495,7 +495,7 @@ class DictLiteralValue(_DictMixin, SequenceLiteralValue):
|
|||||||
|
|
||||||
def _dict_keys(self):
|
def _dict_keys(self):
|
||||||
return ValueSet.from_sets(
|
return ValueSet.from_sets(
|
||||||
self._defining_value.infer_node(k)
|
self._defining_context.infer_node(k)
|
||||||
for k, v in self.get_tree_entries()
|
for k, v in self.get_tree_entries()
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -648,18 +648,18 @@ def unpack_tuple_to_dict(value, types, exprlist):
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
def check_array_additions(value, sequence):
|
def check_array_additions(context, sequence):
|
||||||
""" Just a mapper function for the internal _check_array_additions """
|
""" Just a mapper function for the internal _check_array_additions """
|
||||||
if sequence.array_type not in ('list', 'set'):
|
if sequence.array_type not in ('list', 'set'):
|
||||||
# TODO also check for dict updates
|
# TODO also check for dict updates
|
||||||
return NO_VALUES
|
return NO_VALUES
|
||||||
|
|
||||||
return _check_array_additions(value, sequence)
|
return _check_array_additions(context, sequence)
|
||||||
|
|
||||||
|
|
||||||
@inference_state_method_cache(default=NO_VALUES)
|
@inference_state_method_cache(default=NO_VALUES)
|
||||||
@debug.increase_indent
|
@debug.increase_indent
|
||||||
def _check_array_additions(value, sequence):
|
def _check_array_additions(context, sequence):
|
||||||
"""
|
"""
|
||||||
Checks if a `Array` has "add" (append, insert, extend) statements:
|
Checks if a `Array` has "add" (append, insert, extend) statements:
|
||||||
|
|
||||||
@@ -669,7 +669,7 @@ def _check_array_additions(value, sequence):
|
|||||||
from jedi.inference import arguments
|
from jedi.inference import arguments
|
||||||
|
|
||||||
debug.dbg('Dynamic array search for %s' % sequence, color='MAGENTA')
|
debug.dbg('Dynamic array search for %s' % sequence, color='MAGENTA')
|
||||||
module_context = value.get_root_context()
|
module_context = context.get_root_context()
|
||||||
if not settings.dynamic_array_additions or isinstance(module_context, compiled.CompiledObject):
|
if not settings.dynamic_array_additions or isinstance(module_context, compiled.CompiledObject):
|
||||||
debug.dbg('Dynamic array search aborted.', color='MAGENTA')
|
debug.dbg('Dynamic array search aborted.', color='MAGENTA')
|
||||||
return NO_VALUES
|
return NO_VALUES
|
||||||
@@ -701,7 +701,7 @@ def _check_array_additions(value, sequence):
|
|||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
for name in possible_names:
|
for name in possible_names:
|
||||||
value_node = value.tree_node
|
value_node = context.tree_node
|
||||||
if not (value_node.start_pos < name.start_pos < value_node.end_pos):
|
if not (value_node.start_pos < name.start_pos < value_node.end_pos):
|
||||||
continue
|
continue
|
||||||
trailer = name.parent
|
trailer = name.parent
|
||||||
@@ -718,9 +718,9 @@ def _check_array_additions(value, sequence):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
random_context = value.create_context(name)
|
random_context = context.create_context(name)
|
||||||
|
|
||||||
with recursion.execution_allowed(value.inference_state, power) as allowed:
|
with recursion.execution_allowed(context.inference_state, power) as allowed:
|
||||||
if allowed:
|
if allowed:
|
||||||
found = infer_call_of_leaf(
|
found = infer_call_of_leaf(
|
||||||
random_context,
|
random_context,
|
||||||
@@ -774,7 +774,7 @@ class _ArrayInstance(HelperValueMixin):
|
|||||||
|
|
||||||
from jedi.inference import arguments
|
from jedi.inference import arguments
|
||||||
if isinstance(var_args, arguments.TreeArguments):
|
if isinstance(var_args, arguments.TreeArguments):
|
||||||
additions = _check_array_additions(var_args.value, self.instance)
|
additions = _check_array_additions(var_args.context, self.instance)
|
||||||
for addition in additions:
|
for addition in additions:
|
||||||
yield addition
|
yield addition
|
||||||
|
|
||||||
|
|||||||
@@ -162,12 +162,13 @@ class ModuleMixin(SubModuleDictMixin):
|
|||||||
from jedi.inference.imports import Importer
|
from jedi.inference.imports import Importer
|
||||||
|
|
||||||
modules = []
|
modules = []
|
||||||
|
module_context = self.as_context()
|
||||||
for i in self.tree_node.iter_imports():
|
for i in self.tree_node.iter_imports():
|
||||||
if i.is_star_import():
|
if i.is_star_import():
|
||||||
new = Importer(
|
new = Importer(
|
||||||
self.inference_state,
|
self.inference_state,
|
||||||
import_path=i.get_paths()[-1],
|
import_path=i.get_paths()[-1],
|
||||||
module_value=self,
|
module_context=module_context,
|
||||||
level=i.level
|
level=i.level
|
||||||
).follow()
|
).follow()
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ def import_module(callback):
|
|||||||
Handle "magic" Flask extension imports:
|
Handle "magic" Flask extension imports:
|
||||||
``flask.ext.foo`` is really ``flask_foo`` or ``flaskext.foo``.
|
``flask.ext.foo`` is really ``flask_foo`` or ``flaskext.foo``.
|
||||||
"""
|
"""
|
||||||
def wrapper(inference_state, import_names, module_value, *args, **kwargs):
|
def wrapper(inference_state, import_names, module_context, *args, **kwargs):
|
||||||
if len(import_names) == 3 and import_names[:2] == ('flask', 'ext'):
|
if len(import_names) == 3 and import_names[:2] == ('flask', 'ext'):
|
||||||
# New style.
|
# New style.
|
||||||
ipath = (u'flask_' + import_names[2]),
|
ipath = (u'flask_' + import_names[2]),
|
||||||
@@ -17,5 +17,5 @@ def import_module(callback):
|
|||||||
next(iter(value_set)),
|
next(iter(value_set)),
|
||||||
*args, **kwargs
|
*args, **kwargs
|
||||||
)
|
)
|
||||||
return callback(inference_state, import_names, module_value, *args, **kwargs)
|
return callback(inference_state, import_names, module_context, *args, **kwargs)
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|||||||
@@ -31,7 +31,6 @@ from jedi.inference.value import iterable
|
|||||||
from jedi.inference.lazy_value import LazyTreeValue, LazyKnownValue, \
|
from jedi.inference.lazy_value import LazyTreeValue, LazyKnownValue, \
|
||||||
LazyKnownValues
|
LazyKnownValues
|
||||||
from jedi.inference.names import ValueName, BaseTreeParamName
|
from jedi.inference.names import ValueName, BaseTreeParamName
|
||||||
from jedi.inference.syntax_tree import is_string
|
|
||||||
from jedi.inference.filters import AttributeOverwrite, publish_method, \
|
from jedi.inference.filters import AttributeOverwrite, publish_method, \
|
||||||
ParserTreeFilter, DictFilter
|
ParserTreeFilter, DictFilter
|
||||||
from jedi.inference.signature import AbstractSignature, SignatureWrapper
|
from jedi.inference.signature import AbstractSignature, SignatureWrapper
|
||||||
@@ -305,8 +304,7 @@ def builtins_reversed(sequences, obj, arguments):
|
|||||||
key, lazy_value = next(arguments.unpack())
|
key, lazy_value = next(arguments.unpack())
|
||||||
cn = None
|
cn = None
|
||||||
if isinstance(lazy_value, LazyTreeValue):
|
if isinstance(lazy_value, LazyTreeValue):
|
||||||
# TODO access private
|
cn = ValueualizedNode(lazy_value.context, lazy_value.data)
|
||||||
cn = ValueualizedNode(lazy_value.value, lazy_value.data)
|
|
||||||
ordered = list(sequences.iterate(cn))
|
ordered = list(sequences.iterate(cn))
|
||||||
|
|
||||||
# Repack iterator values and then run it the normal way. This is
|
# Repack iterator values and then run it the normal way. This is
|
||||||
@@ -351,7 +349,7 @@ def builtins_isinstance(objects, types, arguments, inference_state):
|
|||||||
message = 'TypeError: isinstance() arg 2 must be a ' \
|
message = 'TypeError: isinstance() arg 2 must be a ' \
|
||||||
'class, type, or tuple of classes and types, ' \
|
'class, type, or tuple of classes and types, ' \
|
||||||
'not %s.' % cls_or_tup
|
'not %s.' % cls_or_tup
|
||||||
analysis.add(lazy_value.value, 'type-error-isinstance', node, message)
|
analysis.add(lazy_value.context, 'type-error-isinstance', node, message)
|
||||||
|
|
||||||
return ValueSet(
|
return ValueSet(
|
||||||
compiled.builtin_from_name(inference_state, force_unicode(str(b)))
|
compiled.builtin_from_name(inference_state, force_unicode(str(b)))
|
||||||
@@ -794,7 +792,7 @@ def get_metaclass_filters(func):
|
|||||||
for metaclass in metaclasses:
|
for metaclass in metaclasses:
|
||||||
if metaclass.py__name__() == 'EnumMeta' \
|
if metaclass.py__name__() == 'EnumMeta' \
|
||||||
and metaclass.get_root_context().py__name__() == 'enum':
|
and metaclass.get_root_context().py__name__() == 'enum':
|
||||||
filter_ = ParserTreeFilter(value=cls)
|
filter_ = ParserTreeFilter(context=cls)
|
||||||
return [DictFilter({
|
return [DictFilter({
|
||||||
name.string_name: EnumInstance(cls, name).name for name in filter_.values()
|
name.string_name: EnumInstance(cls, name).name for name in filter_.values()
|
||||||
})]
|
})]
|
||||||
|
|||||||
Reference in New Issue
Block a user