From f61246bf1395459a2ac972cf164f9ddef3ff40a8 Mon Sep 17 00:00:00 2001 From: Dave Halter Date: Sun, 18 Aug 2019 00:47:21 +0200 Subject: [PATCH] Fix quite a few more tests. Only about a fifth failing now --- jedi/inference/gradual/stub_value.py | 12 ++++++ jedi/inference/gradual/typing.py | 6 +-- jedi/inference/value/iterable.py | 62 ++++++++++++++-------------- jedi/inference/value/module.py | 3 +- jedi/plugins/flask.py | 4 +- jedi/plugins/stdlib.py | 8 ++-- 6 files changed, 53 insertions(+), 42 deletions(-) diff --git a/jedi/inference/gradual/stub_value.py b/jedi/inference/gradual/stub_value.py index 1b781dbd..54515848 100644 --- a/jedi/inference/gradual/stub_value.py +++ b/jedi/inference/gradual/stub_value.py @@ -3,6 +3,7 @@ from jedi.inference.value.module import ModuleValue from jedi.inference.filters import ParserTreeFilter, \ TreeNameDefinition from jedi.inference.gradual.typing import TypingModuleFilterWrapper +from jedi.inference.context import ModuleContext class StubModuleValue(ModuleValue): @@ -58,6 +59,17 @@ class TypingModuleWrapper(StubModuleValue): for f in filters: yield f + def as_context(self): + return TypingModuleContext(self) + + +class TypingModuleContext(ModuleContext): + def get_filters(self, *args, **kwargs): + filters = super(TypingModuleContext, self).get_filters(*args, **kwargs) + yield TypingModuleFilterWrapper(next(filters)) + for f in filters: + yield f + # From here on down we make looking up the sys.version_info fast. class _StubName(TreeNameDefinition): diff --git a/jedi/inference/gradual/typing.py b/jedi/inference/gradual/typing.py index 74035703..5e4887f1 100644 --- a/jedi/inference/gradual/typing.py +++ b/jedi/inference/gradual/typing.py @@ -222,7 +222,7 @@ class TypingClassValue(_TypingClassMixin, TypingValue, ClassMixin): index_class = TypingClassValueWithIndex -def _iter_over_arguments(maybe_tuple_value, defining_value): +def _iter_over_arguments(maybe_tuple_value, defining_context): def iterate(): if isinstance(maybe_tuple_value, SequenceLiteralValue): for lazy_value in maybe_tuple_value.py__iter__(valueualized_node=None): @@ -234,9 +234,9 @@ def _iter_over_arguments(maybe_tuple_value, defining_value): for value in value_set: if is_string(value): from jedi.inference.gradual.annotation import _get_forward_reference_node - node = _get_forward_reference_node(defining_value, value.get_safe_value()) + node = _get_forward_reference_node(defining_context, value.get_safe_value()) if node is not None: - for c in defining_value.infer_node(node): + for c in defining_context.infer_node(node): yield c else: yield value diff --git a/jedi/inference/value/iterable.py b/jedi/inference/value/iterable.py index 338b3d4a..625405c0 100644 --- a/jedi/inference/value/iterable.py +++ b/jedi/inference/value/iterable.py @@ -150,7 +150,7 @@ def comprehension_from_atom(inference_state, value, atom): return cls( inference_state, - defining_value=value, + defining_context=value, sync_comp_for_node=sync_comp_for, entry_node=test_list_comp.children[0], ) @@ -167,7 +167,7 @@ class ComprehensionMixin(object): is_async = comp_for.parent.type == 'comp_for' input_node = comp_for.children[3] - parent_context = parent_context or self._defining_value + parent_context = parent_context or self._defining_context input_types = parent_context.infer_node(input_node) # TODO: simulate await if self.is_async @@ -245,10 +245,10 @@ class Sequence(LazyAttributeOverwrite, IterableMixin): class _BaseComprehension(ComprehensionMixin): - def __init__(self, inference_state, defining_value, sync_comp_for_node, entry_node): + def __init__(self, inference_state, defining_context, sync_comp_for_node, entry_node): assert sync_comp_for_node.type == 'sync_comp_for' super(_BaseComprehension, self).__init__(inference_state) - self._defining_value = defining_value + self._defining_context = defining_context self._sync_comp_for_node = sync_comp_for_node self._entry_node = entry_node @@ -277,10 +277,10 @@ class GeneratorComprehension(_BaseComprehension, GeneratorBase): class DictComprehension(ComprehensionMixin, Sequence): array_type = u'dict' - def __init__(self, inference_state, defining_value, sync_comp_for_node, key_node, value_node): + def __init__(self, inference_state, defining_context, sync_comp_for_node, key_node, value_node): assert sync_comp_for_node.type == 'sync_comp_for' super(DictComprehension, self).__init__(inference_state) - self._defining_value = defining_value + self._defining_context = defining_context self._sync_comp_for_node = sync_comp_for_node self._entry_node = key_node self._value_node = value_node @@ -341,10 +341,10 @@ class SequenceLiteralValue(Sequence): '[': u'list', '{': u'set'} - def __init__(self, inference_state, defining_value, atom): + def __init__(self, inference_state, defining_context, atom): super(SequenceLiteralValue, self).__init__(inference_state) self.atom = atom - self._defining_value = defining_value + self._defining_context = defining_context if self.atom.type in self._TUPLE_LIKE: self.array_type = u'tuple' @@ -357,14 +357,14 @@ class SequenceLiteralValue(Sequence): if self.array_type == u'dict': compiled_obj_index = compiled.create_simple_object(self.inference_state, index) for key, value in self.get_tree_entries(): - for k in self._defining_value.infer_node(key): + for k in self._defining_context.infer_node(key): try: method = k.execute_operation except AttributeError: pass else: if method(compiled_obj_index, u'==').get_safe_value(): - return self._defining_value.infer_node(value) + return self._defining_context.infer_node(value) raise SimpleGetItemNotFound('No key found in dictionary %s.' % self) if isinstance(index, slice): @@ -372,7 +372,7 @@ class SequenceLiteralValue(Sequence): else: with reraise_getitem_errors(TypeError, KeyError, IndexError): node = self.get_tree_entries()[index] - return self._defining_value.infer_node(node) + return self._defining_context.infer_node(node) def py__iter__(self, valueualized_node=None): """ @@ -383,7 +383,7 @@ class SequenceLiteralValue(Sequence): # Get keys. types = NO_VALUES for k, _ in self.get_tree_entries(): - types |= self._defining_value.infer_node(k) + types |= self._defining_context.infer_node(k) # We don't know which dict index comes first, therefore always # yield all the types. for _ in types: @@ -393,10 +393,10 @@ class SequenceLiteralValue(Sequence): if node == ':' or node.type == 'subscript': # TODO this should probably use at least part of the code # of infer_subscript_list. - yield LazyKnownValue(Slice(self._defining_value, None, None, None)) + yield LazyKnownValue(Slice(self._defining_context, None, None, None)) else: - yield LazyTreeValue(self._defining_value, node) - for addition in check_array_additions(self._defining_value, self): + yield LazyTreeValue(self._defining_context, node) + for addition in check_array_additions(self._defining_context, self): yield addition def py__len__(self): @@ -405,7 +405,7 @@ class SequenceLiteralValue(Sequence): def _dict_values(self): return ValueSet.from_sets( - self._defining_value.infer_node(v) + self._defining_context.infer_node(v) for k, v in self.get_tree_entries() ) @@ -460,9 +460,9 @@ class SequenceLiteralValue(Sequence): resolved (as a string) and the values are still lazy values. """ for key_node, value in self.get_tree_entries(): - for key in self._defining_value.infer_node(key_node): + for key in self._defining_context.infer_node(key_node): if is_string(key): - yield key.get_safe_value(), LazyTreeValue(self._defining_value, value) + yield key.get_safe_value(), LazyTreeValue(self._defining_context, value) def __repr__(self): return "<%s of %s>" % (self.__class__.__name__, self.atom) @@ -471,9 +471,9 @@ class SequenceLiteralValue(Sequence): class DictLiteralValue(_DictMixin, SequenceLiteralValue): array_type = u'dict' - def __init__(self, inference_state, defining_value, atom): + def __init__(self, inference_state, defining_context, atom): super(SequenceLiteralValue, self).__init__(inference_state) - self._defining_value = defining_value + self._defining_context = defining_context self.atom = atom @publish_method('values') @@ -486,8 +486,8 @@ class DictLiteralValue(_DictMixin, SequenceLiteralValue): lazy_values = [ LazyKnownValue(FakeSequence( self.inference_state, u'tuple', - (LazyTreeValue(self._defining_value, key_node), - LazyTreeValue(self._defining_value, value_node)) + (LazyTreeValue(self._defining_context, key_node), + LazyTreeValue(self._defining_context, value_node)) )) for key_node, value_node in self.get_tree_entries() ] @@ -495,7 +495,7 @@ class DictLiteralValue(_DictMixin, SequenceLiteralValue): def _dict_keys(self): return ValueSet.from_sets( - self._defining_value.infer_node(k) + self._defining_context.infer_node(k) for k, v in self.get_tree_entries() ) @@ -648,18 +648,18 @@ def unpack_tuple_to_dict(value, types, exprlist): raise NotImplementedError -def check_array_additions(value, sequence): +def check_array_additions(context, sequence): """ Just a mapper function for the internal _check_array_additions """ if sequence.array_type not in ('list', 'set'): # TODO also check for dict updates return NO_VALUES - return _check_array_additions(value, sequence) + return _check_array_additions(context, sequence) @inference_state_method_cache(default=NO_VALUES) @debug.increase_indent -def _check_array_additions(value, sequence): +def _check_array_additions(context, sequence): """ Checks if a `Array` has "add" (append, insert, extend) statements: @@ -669,7 +669,7 @@ def _check_array_additions(value, sequence): from jedi.inference import arguments debug.dbg('Dynamic array search for %s' % sequence, color='MAGENTA') - module_context = value.get_root_context() + module_context = context.get_root_context() if not settings.dynamic_array_additions or isinstance(module_context, compiled.CompiledObject): debug.dbg('Dynamic array search aborted.', color='MAGENTA') return NO_VALUES @@ -701,7 +701,7 @@ def _check_array_additions(value, sequence): continue else: for name in possible_names: - value_node = value.tree_node + value_node = context.tree_node if not (value_node.start_pos < name.start_pos < value_node.end_pos): continue trailer = name.parent @@ -718,9 +718,9 @@ def _check_array_additions(value, sequence): continue raise NotImplementedError - random_context = value.create_context(name) + random_context = context.create_context(name) - with recursion.execution_allowed(value.inference_state, power) as allowed: + with recursion.execution_allowed(context.inference_state, power) as allowed: if allowed: found = infer_call_of_leaf( random_context, @@ -774,7 +774,7 @@ class _ArrayInstance(HelperValueMixin): from jedi.inference import arguments if isinstance(var_args, arguments.TreeArguments): - additions = _check_array_additions(var_args.value, self.instance) + additions = _check_array_additions(var_args.context, self.instance) for addition in additions: yield addition diff --git a/jedi/inference/value/module.py b/jedi/inference/value/module.py index 79b784e4..d965293a 100644 --- a/jedi/inference/value/module.py +++ b/jedi/inference/value/module.py @@ -162,12 +162,13 @@ class ModuleMixin(SubModuleDictMixin): from jedi.inference.imports import Importer modules = [] + module_context = self.as_context() for i in self.tree_node.iter_imports(): if i.is_star_import(): new = Importer( self.inference_state, import_path=i.get_paths()[-1], - module_value=self, + module_context=module_context, level=i.level ).follow() diff --git a/jedi/plugins/flask.py b/jedi/plugins/flask.py index 6444a6e0..693c3ae7 100644 --- a/jedi/plugins/flask.py +++ b/jedi/plugins/flask.py @@ -3,7 +3,7 @@ def import_module(callback): Handle "magic" Flask extension imports: ``flask.ext.foo`` is really ``flask_foo`` or ``flaskext.foo``. """ - def wrapper(inference_state, import_names, module_value, *args, **kwargs): + def wrapper(inference_state, import_names, module_context, *args, **kwargs): if len(import_names) == 3 and import_names[:2] == ('flask', 'ext'): # New style. ipath = (u'flask_' + import_names[2]), @@ -17,5 +17,5 @@ def import_module(callback): next(iter(value_set)), *args, **kwargs ) - return callback(inference_state, import_names, module_value, *args, **kwargs) + return callback(inference_state, import_names, module_context, *args, **kwargs) return wrapper diff --git a/jedi/plugins/stdlib.py b/jedi/plugins/stdlib.py index 5e3fc310..08c91fae 100644 --- a/jedi/plugins/stdlib.py +++ b/jedi/plugins/stdlib.py @@ -31,7 +31,6 @@ from jedi.inference.value import iterable from jedi.inference.lazy_value import LazyTreeValue, LazyKnownValue, \ LazyKnownValues from jedi.inference.names import ValueName, BaseTreeParamName -from jedi.inference.syntax_tree import is_string from jedi.inference.filters import AttributeOverwrite, publish_method, \ ParserTreeFilter, DictFilter from jedi.inference.signature import AbstractSignature, SignatureWrapper @@ -305,8 +304,7 @@ def builtins_reversed(sequences, obj, arguments): key, lazy_value = next(arguments.unpack()) cn = None if isinstance(lazy_value, LazyTreeValue): - # TODO access private - cn = ValueualizedNode(lazy_value.value, lazy_value.data) + cn = ValueualizedNode(lazy_value.context, lazy_value.data) ordered = list(sequences.iterate(cn)) # Repack iterator values and then run it the normal way. This is @@ -351,7 +349,7 @@ def builtins_isinstance(objects, types, arguments, inference_state): message = 'TypeError: isinstance() arg 2 must be a ' \ 'class, type, or tuple of classes and types, ' \ 'not %s.' % cls_or_tup - analysis.add(lazy_value.value, 'type-error-isinstance', node, message) + analysis.add(lazy_value.context, 'type-error-isinstance', node, message) return ValueSet( compiled.builtin_from_name(inference_state, force_unicode(str(b))) @@ -794,7 +792,7 @@ def get_metaclass_filters(func): for metaclass in metaclasses: if metaclass.py__name__() == 'EnumMeta' \ and metaclass.get_root_context().py__name__() == 'enum': - filter_ = ParserTreeFilter(value=cls) + filter_ = ParserTreeFilter(context=cls) return [DictFilter({ name.string_name: EnumInstance(cls, name).name for name in filter_.values() })]