forked from VimPlug/jedi
Merge branch 'master' into dict
This commit is contained in:
@@ -10,7 +10,7 @@ current module will be checked for appearances of ``arr.append``,
|
||||
content will be added
|
||||
|
||||
This can be really cpu intensive, as you can imagine. Because |jedi| has to
|
||||
follow **every** ``append`` and check wheter it's the right array. However this
|
||||
follow **every** ``append`` and check whether it's the right array. However this
|
||||
works pretty good, because in *slow* cases, the recursion detector and other
|
||||
settings will stop this process.
|
||||
|
||||
@@ -188,8 +188,8 @@ class _Modification(ValueWrapper):
|
||||
|
||||
|
||||
class DictModification(_Modification):
|
||||
def py__iter__(self):
|
||||
for lazy_context in self._wrapped_value.py__iter__():
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
for lazy_context in self._wrapped_value.py__iter__(contextualized_node):
|
||||
yield lazy_context
|
||||
yield self._contextualized_key
|
||||
|
||||
@@ -198,7 +198,7 @@ class DictModification(_Modification):
|
||||
|
||||
|
||||
class ListModification(_Modification):
|
||||
def py__iter__(self):
|
||||
for lazy_context in self._wrapped_value.py__iter__():
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
for lazy_context in self._wrapped_value.py__iter__(contextualized_node):
|
||||
yield lazy_context
|
||||
yield LazyKnownValues(self._assigned_values)
|
||||
|
||||
@@ -21,6 +21,7 @@ from jedi.inference.value import iterable
|
||||
from jedi import parser_utils
|
||||
from jedi.inference.parser_cache import get_yield_exprs
|
||||
from jedi.inference.helpers import values_from_qualified_names
|
||||
from jedi.inference.gradual.generics import TupleGenericManager
|
||||
|
||||
|
||||
class LambdaName(AbstractNameDefinition):
|
||||
@@ -96,9 +97,6 @@ class FunctionMixin(object):
|
||||
|
||||
|
||||
class FunctionValue(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndClassBase)):
|
||||
def is_function(self):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def from_context(cls, context, tree_node):
|
||||
def create(tree_node):
|
||||
@@ -161,6 +159,9 @@ class MethodValue(FunctionValue):
|
||||
|
||||
|
||||
class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
|
||||
def is_function_execution(self):
|
||||
return True
|
||||
|
||||
def _infer_annotations(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -276,17 +277,19 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
|
||||
for lazy_value in self.get_yield_lazy_values()
|
||||
)
|
||||
|
||||
def is_generator(self):
|
||||
return bool(get_yield_exprs(self.inference_state, self.tree_node))
|
||||
|
||||
def infer(self):
|
||||
"""
|
||||
Created to be used by inheritance.
|
||||
"""
|
||||
inference_state = self.inference_state
|
||||
is_coroutine = self.tree_node.parent.type in ('async_stmt', 'async_funcdef')
|
||||
is_generator = bool(get_yield_exprs(inference_state, self.tree_node))
|
||||
from jedi.inference.gradual.typing import GenericClass
|
||||
from jedi.inference.gradual.base import GenericClass
|
||||
|
||||
if is_coroutine:
|
||||
if is_generator:
|
||||
if self.is_generator():
|
||||
if inference_state.environment.version_info < (3, 6):
|
||||
return NO_VALUES
|
||||
async_generator_classes = inference_state.typing_module \
|
||||
@@ -297,7 +300,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
|
||||
generics = (yield_values.py__class__(), NO_VALUES)
|
||||
return ValueSet(
|
||||
# In Python 3.6 AsyncGenerator is still a class.
|
||||
GenericClass(c, generics)
|
||||
GenericClass(c, TupleGenericManager(generics))
|
||||
for c in async_generator_classes
|
||||
).execute_annotation()
|
||||
else:
|
||||
@@ -308,10 +311,10 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
|
||||
# Only the first generic is relevant.
|
||||
generics = (return_values.py__class__(), NO_VALUES, NO_VALUES)
|
||||
return ValueSet(
|
||||
GenericClass(c, generics) for c in async_classes
|
||||
GenericClass(c, TupleGenericManager(generics)) for c in async_classes
|
||||
).execute_annotation()
|
||||
else:
|
||||
if is_generator:
|
||||
if self.is_generator():
|
||||
return ValueSet([iterable.Generator(inference_state, self)])
|
||||
else:
|
||||
return self.get_return_values()
|
||||
|
||||
@@ -17,8 +17,9 @@ from jedi.inference.arguments import ValuesArguments, TreeArgumentsWrapper
|
||||
from jedi.inference.value.function import \
|
||||
FunctionValue, FunctionMixin, OverloadedFunctionValue, \
|
||||
BaseFunctionExecutionContext, FunctionExecutionContext
|
||||
from jedi.inference.value.klass import apply_py__get__, ClassFilter
|
||||
from jedi.inference.value.klass import ClassFilter
|
||||
from jedi.inference.value.dynamic_arrays import get_dynamic_array_instance
|
||||
from jedi.parser_utils import function_is_staticmethod, function_is_classmethod
|
||||
|
||||
|
||||
class InstanceExecutedParamName(ParamName):
|
||||
@@ -41,7 +42,18 @@ class AnonymousMethodExecutionFilter(AnonymousFunctionExecutionFilter):
|
||||
|
||||
def _convert_param(self, param, name):
|
||||
if param.position_index == 0:
|
||||
return InstanceExecutedParamName(self._instance, self._function_value, name)
|
||||
if function_is_classmethod(self._function_value.tree_node):
|
||||
return InstanceExecutedParamName(
|
||||
self._instance.py__class__(),
|
||||
self._function_value,
|
||||
name
|
||||
)
|
||||
elif not function_is_staticmethod(self._function_value.tree_node):
|
||||
return InstanceExecutedParamName(
|
||||
self._instance,
|
||||
self._function_value,
|
||||
name
|
||||
)
|
||||
return super(AnonymousMethodExecutionFilter, self)._convert_param(param, name)
|
||||
|
||||
|
||||
@@ -107,11 +119,23 @@ class AbstractInstanceValue(Value):
|
||||
call_funcs = self.py__getattribute__('__call__').py__get__(self, self.class_value)
|
||||
return [s.bind(self) for s in call_funcs.get_signatures()]
|
||||
|
||||
def get_function_slot_names(self, name):
|
||||
# Searches for Python functions in classes.
|
||||
return []
|
||||
|
||||
def execute_function_slots(self, names, *inferred_args):
|
||||
return ValueSet.from_sets(
|
||||
name.infer().execute_with_values(*inferred_args)
|
||||
for name in names
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (self.__class__.__name__, self.class_value)
|
||||
|
||||
|
||||
class CompiledInstance(AbstractInstanceValue):
|
||||
# This is not really a compiled class, it's just an instance from a
|
||||
# compiled class.
|
||||
def __init__(self, inference_state, parent_context, class_value, arguments):
|
||||
super(CompiledInstance, self).__init__(inference_state, parent_context,
|
||||
class_value)
|
||||
@@ -130,9 +154,6 @@ class CompiledInstance(AbstractInstanceValue):
|
||||
def name(self):
|
||||
return compiled.CompiledValueName(self, self.class_value.name.string_name)
|
||||
|
||||
def is_compiled(self):
|
||||
return True
|
||||
|
||||
def is_stub(self):
|
||||
return False
|
||||
|
||||
@@ -215,8 +236,8 @@ class _BaseTreeInstance(AbstractInstanceValue):
|
||||
# We are inversing this, because a hand-crafted `__getattribute__`
|
||||
# could still call another hand-crafted `__getattr__`, but not the
|
||||
# other way around.
|
||||
names = (self.get_function_slot_names(u'__getattr__') or
|
||||
self.get_function_slot_names(u'__getattribute__'))
|
||||
names = (self.get_function_slot_names(u'__getattr__')
|
||||
or self.get_function_slot_names(u'__getattribute__'))
|
||||
return self.execute_function_slots(names, name)
|
||||
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
@@ -263,7 +284,7 @@ class _BaseTreeInstance(AbstractInstanceValue):
|
||||
|
||||
return ValueSet.from_sets(name.infer().execute(arguments) for name in names)
|
||||
|
||||
def py__get__(self, obj, class_value):
|
||||
def py__get__(self, instance, class_value):
|
||||
"""
|
||||
obj may be None.
|
||||
"""
|
||||
@@ -271,9 +292,9 @@ class _BaseTreeInstance(AbstractInstanceValue):
|
||||
# `method` is the new parent of the array, don't know if that's good.
|
||||
names = self.get_function_slot_names(u'__get__')
|
||||
if names:
|
||||
if obj is None:
|
||||
obj = compiled.builtin_from_name(self.inference_state, u'None')
|
||||
return self.execute_function_slots(names, obj, class_value)
|
||||
if instance is None:
|
||||
instance = compiled.builtin_from_name(self.inference_state, u'None')
|
||||
return self.execute_function_slots(names, instance, class_value)
|
||||
else:
|
||||
return ValueSet([self])
|
||||
|
||||
@@ -287,12 +308,6 @@ class _BaseTreeInstance(AbstractInstanceValue):
|
||||
return names
|
||||
return []
|
||||
|
||||
def execute_function_slots(self, names, *inferred_args):
|
||||
return ValueSet.from_sets(
|
||||
name.infer().execute_with_values(*inferred_args)
|
||||
for name in names
|
||||
)
|
||||
|
||||
|
||||
class TreeInstance(_BaseTreeInstance):
|
||||
def __init__(self, inference_state, parent_context, class_value, arguments):
|
||||
@@ -320,7 +335,8 @@ class TreeInstance(_BaseTreeInstance):
|
||||
for signature in self.class_value.py__getattribute__('__init__').get_signatures():
|
||||
# Just take the first result, it should always be one, because we
|
||||
# control the typeshed code.
|
||||
if not signature.matches_signature(args):
|
||||
if not signature.matches_signature(args) \
|
||||
or signature.value.tree_node is None:
|
||||
# First check if the signature even matches, if not we don't
|
||||
# need to infer anything.
|
||||
continue
|
||||
@@ -481,7 +497,7 @@ class LazyInstanceClassName(object):
|
||||
@iterator_to_value_set
|
||||
def infer(self):
|
||||
for result_value in self._class_member_name.infer():
|
||||
for c in apply_py__get__(result_value, self._instance, self._instance.py__class__()):
|
||||
for c in result_value.py__get__(self._instance, self._instance.py__class__()):
|
||||
yield c
|
||||
|
||||
def __getattr__(self, name):
|
||||
@@ -514,7 +530,7 @@ class InstanceClassFilter(AbstractFilter):
|
||||
]
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s for %s>' % (self.__class__.__name__, self._class_filter.context)
|
||||
return '<%s for %s>' % (self.__class__.__name__, self._class_filter)
|
||||
|
||||
|
||||
class SelfAttributeFilter(ClassFilter):
|
||||
@@ -544,17 +560,18 @@ class SelfAttributeFilter(ClassFilter):
|
||||
if name.is_definition() and self._access_possible(name, from_instance=True):
|
||||
# TODO filter non-self assignments instead of this bad
|
||||
# filter.
|
||||
if self._is_in_right_scope(name):
|
||||
if self._is_in_right_scope(trailer.parent.children[0], name):
|
||||
yield name
|
||||
|
||||
def _is_in_right_scope(self, name):
|
||||
base = name
|
||||
hit_funcdef = False
|
||||
while True:
|
||||
base = search_ancestor(base, 'funcdef', 'classdef', 'lambdef')
|
||||
if base is self._parser_scope:
|
||||
return hit_funcdef
|
||||
hit_funcdef = True
|
||||
def _is_in_right_scope(self, self_name, name):
|
||||
self_context = self._node_context.create_context(self_name)
|
||||
names = self_context.goto(self_name, position=self_name.start_pos)
|
||||
return any(
|
||||
n.api_type == 'param'
|
||||
and n.tree_name.get_definition().position_index == 0
|
||||
and n.parent_context.tree_node is self._parser_scope
|
||||
for n in names
|
||||
)
|
||||
|
||||
def _convert_names(self, names):
|
||||
return [SelfName(self._instance, self._node_context, name) for name in names]
|
||||
|
||||
@@ -194,17 +194,18 @@ class Sequence(LazyAttributeOverwrite, IterableMixin):
|
||||
return (self.merge_types_of_iterate().py__class__(),)
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
from jedi.inference.gradual.typing import GenericClass
|
||||
from jedi.inference.gradual.base import GenericClass
|
||||
from jedi.inference.gradual.generics import TupleGenericManager
|
||||
klass = compiled.builtin_from_name(self.inference_state, self.array_type)
|
||||
c, = GenericClass(klass, self._get_generics()).execute_annotation()
|
||||
c, = GenericClass(
|
||||
klass,
|
||||
TupleGenericManager(self._get_generics())
|
||||
).execute_annotation()
|
||||
return c
|
||||
|
||||
def py__bool__(self):
|
||||
return None # We don't know the length, because of appends.
|
||||
|
||||
def py__class__(self):
|
||||
return compiled.builtin_from_name(self.inference_state, self.array_type)
|
||||
|
||||
@safe_property
|
||||
def parent(self):
|
||||
return self.inference_state.builtins_module
|
||||
|
||||
@@ -25,7 +25,7 @@ py__iter__() Returns a generator of a set of types.
|
||||
py__class__() Returns the class of an instance.
|
||||
py__simple_getitem__(index: int/str) Returns a a set of types of the index.
|
||||
Can raise an IndexError/KeyError.
|
||||
py__getitem__(indexes: ValueSet) Returns a a set of types of the index.
|
||||
py__getitem__(indexes: ValueSet) Returns a a set of types of the index.
|
||||
py__file__() Only on modules. Returns None if does
|
||||
not exist.
|
||||
py__package__() -> List[str] Only on modules. For the import system.
|
||||
@@ -50,23 +50,13 @@ from jedi.inference.base_value import ValueSet, iterator_to_value_set, \
|
||||
NO_VALUES
|
||||
from jedi.inference.context import ClassContext
|
||||
from jedi.inference.value.function import FunctionAndClassBase
|
||||
from jedi.inference.gradual.generics import LazyGenericManager, TupleGenericManager
|
||||
from jedi.plugins import plugin_manager
|
||||
|
||||
|
||||
def apply_py__get__(value, instance, class_value):
|
||||
try:
|
||||
method = value.py__get__
|
||||
except AttributeError:
|
||||
yield value
|
||||
else:
|
||||
for descriptor_value in method(instance, class_value):
|
||||
yield descriptor_value
|
||||
|
||||
|
||||
class ClassName(TreeNameDefinition):
|
||||
def __init__(self, class_value, tree_name, name_context, apply_decorators):
|
||||
super(ClassName, self).__init__(class_value.as_context(), tree_name)
|
||||
self._name_context = name_context
|
||||
super(ClassName, self).__init__(name_context, tree_name)
|
||||
self._apply_decorators = apply_decorators
|
||||
self._class_value = class_value
|
||||
|
||||
@@ -75,13 +65,11 @@ class ClassName(TreeNameDefinition):
|
||||
# We're using a different value to infer, so we cannot call super().
|
||||
from jedi.inference.syntax_tree import tree_name_to_values
|
||||
inferred = tree_name_to_values(
|
||||
self.parent_context.inference_state, self._name_context, self.tree_name)
|
||||
self.parent_context.inference_state, self.parent_context, self.tree_name)
|
||||
|
||||
for result_value in inferred:
|
||||
if self._apply_decorators:
|
||||
for c in apply_py__get__(result_value,
|
||||
instance=None,
|
||||
class_value=self._class_value):
|
||||
for c in result_value.py__get__(instance=None, class_value=self._class_value):
|
||||
yield c
|
||||
else:
|
||||
yield result_value
|
||||
@@ -278,20 +266,22 @@ class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase
|
||||
)]
|
||||
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
from jedi.inference.gradual.typing import LazyGenericClass
|
||||
from jedi.inference.gradual.base import GenericClass
|
||||
if not index_value_set:
|
||||
return ValueSet([self])
|
||||
return ValueSet(
|
||||
LazyGenericClass(
|
||||
GenericClass(
|
||||
self,
|
||||
index_value,
|
||||
value_of_index=contextualized_node.context,
|
||||
LazyGenericManager(
|
||||
context_of_index=contextualized_node.context,
|
||||
index_value=index_value,
|
||||
)
|
||||
)
|
||||
for index_value in index_value_set
|
||||
)
|
||||
|
||||
def define_generics(self, type_var_dict):
|
||||
from jedi.inference.gradual.typing import GenericClass
|
||||
from jedi.inference.gradual.base import GenericClass
|
||||
|
||||
def remap_type_vars():
|
||||
"""
|
||||
@@ -309,7 +299,7 @@ class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase
|
||||
if type_var_dict:
|
||||
return ValueSet([GenericClass(
|
||||
self,
|
||||
generics=tuple(remap_type_vars())
|
||||
TupleGenericManager(tuple(remap_type_vars()))
|
||||
)])
|
||||
return ValueSet({self})
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import os
|
||||
|
||||
from jedi import debug
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference.names import ValueNameMixin, AbstractNameDefinition
|
||||
from jedi.inference.names import AbstractNameDefinition, ModuleName
|
||||
from jedi.inference.filters import GlobalNameFilter, ParserTreeFilter, DictFilter, MergedFilter
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference.base_value import TreeValue
|
||||
@@ -37,18 +37,6 @@ class _ModuleAttributeName(AbstractNameDefinition):
|
||||
return compiled.get_string_value_set(self.parent_context.inference_state)
|
||||
|
||||
|
||||
class ModuleName(ValueNameMixin, AbstractNameDefinition):
|
||||
start_pos = 1, 0
|
||||
|
||||
def __init__(self, value, name):
|
||||
self._value = value
|
||||
self._name = name
|
||||
|
||||
@property
|
||||
def string_name(self):
|
||||
return self._name
|
||||
|
||||
|
||||
def iter_module_names(inference_state, paths):
|
||||
# Python modules/packages
|
||||
for n in inference_state.compiled_subprocess.list_module_names(paths):
|
||||
@@ -83,11 +71,11 @@ class SubModuleDictMixin(object):
|
||||
package).
|
||||
"""
|
||||
names = {}
|
||||
if self.is_package:
|
||||
if self.is_package():
|
||||
mods = iter_module_names(self.inference_state, self.py__path__())
|
||||
for name in mods:
|
||||
# It's obviously a relative import to the current module.
|
||||
names[name] = SubModuleName(self, name)
|
||||
names[name] = SubModuleName(self.as_context(), name)
|
||||
|
||||
# In the case of an import like `from x.` we don't need to
|
||||
# add all the variables, this is only about submodules.
|
||||
@@ -95,13 +83,15 @@ class SubModuleDictMixin(object):
|
||||
|
||||
|
||||
class ModuleMixin(SubModuleDictMixin):
|
||||
_module_name_class = ModuleName
|
||||
|
||||
def get_filters(self, origin_scope=None):
|
||||
yield MergedFilter(
|
||||
ParserTreeFilter(
|
||||
parent_context=self.as_context(),
|
||||
origin_scope=origin_scope
|
||||
),
|
||||
GlobalNameFilter(self, self.tree_node),
|
||||
GlobalNameFilter(self.as_context(), self.tree_node),
|
||||
)
|
||||
yield DictFilter(self.sub_modules_dict())
|
||||
yield DictFilter(self._module_attributes_dict())
|
||||
@@ -121,7 +111,7 @@ class ModuleMixin(SubModuleDictMixin):
|
||||
@property
|
||||
@inference_state_method_cache()
|
||||
def name(self):
|
||||
return ModuleName(self, self._string_name)
|
||||
return self._module_name_class(self, self._string_name)
|
||||
|
||||
@property
|
||||
def _string_name(self):
|
||||
@@ -200,7 +190,7 @@ class ModuleValue(ModuleMixin, TreeValue):
|
||||
self._path = file_io.path
|
||||
self.string_names = string_names # Optional[Tuple[str, ...]]
|
||||
self.code_lines = code_lines
|
||||
self.is_package = is_package
|
||||
self._is_package = is_package
|
||||
|
||||
def is_stub(self):
|
||||
if self._path is not None and self._path.endswith('.pyi'):
|
||||
@@ -224,8 +214,11 @@ class ModuleValue(ModuleMixin, TreeValue):
|
||||
|
||||
return os.path.abspath(self._path)
|
||||
|
||||
def is_package(self):
|
||||
return self._is_package
|
||||
|
||||
def py__package__(self):
|
||||
if self.is_package:
|
||||
if self._is_package:
|
||||
return self.string_names
|
||||
return self.string_names[:-1]
|
||||
|
||||
@@ -235,7 +228,7 @@ class ModuleValue(ModuleMixin, TreeValue):
|
||||
is a list of paths (strings).
|
||||
Returns None if the module is not a package.
|
||||
"""
|
||||
if not self.is_package:
|
||||
if not self._is_package:
|
||||
return None
|
||||
|
||||
# A namespace package is typically auto generated and ~10 lines long.
|
||||
|
||||
@@ -61,7 +61,6 @@ class ImplicitNamespaceValue(Value, SubModuleDictMixin):
|
||||
def is_stub(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_package(self):
|
||||
return True
|
||||
|
||||
|
||||
Reference in New Issue
Block a user