Use some solid caching for typing

This commit is contained in:
Dave Halter
2018-09-12 22:58:35 +02:00
parent 6f8385143f
commit a646d930c8
4 changed files with 61 additions and 32 deletions

View File

@@ -14,9 +14,32 @@ from jedi.parser_utils import clean_scope_docstring, get_doc_with_call_signature
from jedi.common import BaseContextSet, BaseContext from jedi.common import BaseContextSet, BaseContext
from jedi.evaluate.helpers import SimpleGetItemNotFound, execute_evaluated from jedi.evaluate.helpers import SimpleGetItemNotFound, execute_evaluated
from jedi.evaluate.utils import safe_property from jedi.evaluate.utils import safe_property
from jedi.evaluate.cache import evaluator_as_method_param_cache
def _is_same_class(class1, class2):
if class1 == class2:
return True
try:
comp_func = class1.is_same_class
except AttributeError:
try:
comp_func = class2.is_same_class
except AttributeError:
return False
else:
return comp_func(class1)
else:
return comp_func(class2)
class HelperContextMixin: class HelperContextMixin:
@classmethod
@evaluator_as_method_param_cache()
def create_cached(cls, *args, **kwargs):
return cls(*args, **kwargs)
def execute_evaluated(self, *value_list): def execute_evaluated(self, *value_list):
return execute_evaluated(self, *value_list) return execute_evaluated(self, *value_list)
@@ -37,6 +60,13 @@ class HelperContextMixin:
return f.filter_name(filters) return f.filter_name(filters)
return f.find(filters, attribute_lookup=not search_global) return f.find(filters, attribute_lookup=not search_global)
def is_sub_class_of(self, class_context):
from jedi.evaluate.context.klass import py__mro__
for cls in py__mro__(self):
if _is_same_class(cls, class_context):
return True
return False
class Context(HelperContextMixin, BaseContext): class Context(HelperContextMixin, BaseContext):
""" """
@@ -274,6 +304,12 @@ class ContextSet(BaseContextSet):
def get_item(self, *args, **kwargs): def get_item(self, *args, **kwargs):
return ContextSet.from_sets(_getitem(c, *args, **kwargs) for c in self._set) return ContextSet.from_sets(_getitem(c, *args, **kwargs) for c in self._set)
def is_sub_class_of(self, class_context):
for c in self._set:
if c.is_sub_class_of(class_context):
return True
return False
NO_CONTEXTS = ContextSet() NO_CONTEXTS = ContextSet()

View File

@@ -333,11 +333,10 @@ def signature_matches(function_context, arguments):
function_context.parent_context, function_context.parent_context,
param_node.annotation param_node.annotation
) )
return has_same_class( return any(
argument.infer().py__class__(), argument.infer().py__class__().is_sub_class_of(c)
_type_vars_to_classes(annotation_result), for c in _type_vars_to_classes(annotation_result)
) )
return True return True

View File

@@ -41,9 +41,9 @@ class TypingName(AbstractTreeName):
class _BaseTypingContext(Context): class _BaseTypingContext(Context):
def __init__(self, name): def __init__(self, evaluator, name):
super(_BaseTypingContext, self).__init__( super(_BaseTypingContext, self).__init__(
name.parent_context.evaluator, evaluator,
parent_context=name.parent_context, parent_context=name.parent_context,
) )
self._name = name self._name = name
@@ -72,6 +72,9 @@ class _BaseTypingContext(Context):
class TypingModuleName(NameWrapper): class TypingModuleName(NameWrapper):
def __init__(self, *args, **kwargs):
assert not isinstance(args[0], TypingModuleName)
return super().__init__(*args, **kwargs)
def infer(self): def infer(self):
return ContextSet.from_iterable(self._remap()) return ContextSet.from_iterable(self._remap())
@@ -83,28 +86,28 @@ class TypingModuleName(NameWrapper):
except KeyError: except KeyError:
pass pass
else: else:
yield TypeAlias(evaluator, self.tree_name, actual) yield TypeAlias.create_cached(evaluator, self.tree_name, actual)
return return
if name in _PROXY_CLASS_TYPES: if name in _PROXY_CLASS_TYPES:
yield TypingClassContext(self) yield TypingClassContext(evaluator, self)
elif name in _PROXY_TYPES: elif name in _PROXY_TYPES:
yield TypingContext(self) yield TypingContext.create_cached(evaluator, self)
elif name == 'runtime': elif name == 'runtime':
# We don't want anything here, not sure what this function is # We don't want anything here, not sure what this function is
# supposed to do, since it just appears in the stubs and shouldn't # supposed to do, since it just appears in the stubs and shouldn't
# have any effects there (because it's never executed). # have any effects there (because it's never executed).
return return
elif name == 'TypeVar': elif name == 'TypeVar':
yield TypeVarClass(self) yield TypeVarClass.create_cached(evaluator, self)
elif name == 'Any': elif name == 'Any':
yield Any(self) yield Any.create_cached(evaluator, self)
elif name == 'TYPE_CHECKING': elif name == 'TYPE_CHECKING':
# This is needed for e.g. imports that are only available for type # This is needed for e.g. imports that are only available for type
# checking or are in cycles. The user can then check this variable. # checking or are in cycles. The user can then check this variable.
yield builtin_from_name(evaluator, u'True') yield builtin_from_name(evaluator, u'True')
elif name == 'overload': elif name == 'overload':
yield OverloadFunction(self) yield OverloadFunction.create_cached(evaluator, self)
elif name == 'cast': elif name == 'cast':
# TODO implement cast # TODO implement cast
for c in self._wrapped_name.infer(): # Fuck my life Python 2 for c in self._wrapped_name.infer(): # Fuck my life Python 2
@@ -128,8 +131,8 @@ class TypingModuleFilterWrapper(FilterWrapper):
class _WithIndexBase(_BaseTypingContext): class _WithIndexBase(_BaseTypingContext):
def __init__(self, name, index_context, context_of_index): def __init__(self, evaluator, name, index_context, context_of_index):
super(_WithIndexBase, self).__init__(name) super(_WithIndexBase, self).__init__(evaluator, name)
self._index_context = index_context self._index_context = index_context
self._context_of_index = context_of_index self._context_of_index = context_of_index
@@ -176,7 +179,8 @@ class TypingContext(_BaseTypingContext):
def py__getitem__(self, index_context_set, contextualized_node): def py__getitem__(self, index_context_set, contextualized_node):
return ContextSet.from_iterable( return ContextSet.from_iterable(
self.index_class( self.index_class.create_cached(
self.evaluator,
self._name, self._name,
index_context, index_context,
context_of_index=contextualized_node.context) context_of_index=contextualized_node.context)
@@ -322,17 +326,6 @@ class Any(_BaseTypingContext):
return NO_CONTEXTS return NO_CONTEXTS
class GenericClass(object):
def __init__(self, class_context, ):
self._class_context = class_context
def __getattr__(self, name):
return getattr(self._class_context, name)
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self._class_context)
class TypeVarClass(_BaseTypingContext): class TypeVarClass(_BaseTypingContext):
def py__call__(self, arguments): def py__call__(self, arguments):
unpacked = arguments.unpack() unpacked = arguments.unpack()
@@ -344,7 +337,7 @@ class TypeVarClass(_BaseTypingContext):
debug.warning('Found a variable without a name %s', arguments) debug.warning('Found a variable without a name %s', arguments)
return NO_CONTEXTS return NO_CONTEXTS
return ContextSet(TypeVar(self._name, var_name, unpacked)) return ContextSet(TypeVar.create_cached(self.evaluator, self._name, var_name, unpacked))
def _find_string_name(self, lazy_context): def _find_string_name(self, lazy_context):
if lazy_context is None: if lazy_context is None:
@@ -363,8 +356,8 @@ class TypeVarClass(_BaseTypingContext):
class TypeVar(_BaseTypingContext): class TypeVar(_BaseTypingContext):
def __init__(self, class_name, var_name, unpacked_args): def __init__(self, evaluator, class_name, var_name, unpacked_args):
super(TypeVar, self).__init__(class_name) super(TypeVar, self).__init__(evaluator, class_name)
self.var_name = var_name self.var_name = var_name
self._constraints_lazy_contexts = [] self._constraints_lazy_contexts = []
@@ -541,6 +534,9 @@ class AnnotatedClass(_AbstractAnnotatedClass):
def get_given_types(self): def get_given_types(self):
return list(_iter_over_arguments(self._index_context, self._context_of_index)) return list(_iter_over_arguments(self._index_context, self._context_of_index))
def is_same_class(self, other):
return self == other
class AnnotatedSubClass(_AbstractAnnotatedClass): class AnnotatedSubClass(_AbstractAnnotatedClass):
def __init__(self, evaluator, parent_context, tree_node, given_types): def __init__(self, evaluator, parent_context, tree_node, given_types):
@@ -561,7 +557,7 @@ class LazyAnnotatedBaseClass(object):
for base in self._lazy_base_class.infer(): for base in self._lazy_base_class.infer():
if isinstance(base, _AbstractAnnotatedClass): if isinstance(base, _AbstractAnnotatedClass):
# Here we have to recalculate the given types. # Here we have to recalculate the given types.
yield AnnotatedSubClass( yield AnnotatedSubClass.create_cached(
base.evaluator, base.evaluator,
base.parent_context, base.parent_context,
base.tree_node, base.tree_node,

View File

@@ -287,8 +287,6 @@ class StubParserTreeFilter(ParserTreeFilter):
# for all API accesses. Otherwise the user will be directed to the # for all API accesses. Otherwise the user will be directed to the
# non-stub positions (see NameWithStub). # non-stub positions (see NameWithStub).
n = TreeNameDefinition(self.context, name) n = TreeNameDefinition(self.context, name)
if isinstance(self.context, TypingModuleWrapper):
n = TypingModuleName(n)
if len(non_stub_names): if len(non_stub_names):
for non_stub_name in non_stub_names: for non_stub_name in non_stub_names:
if isinstance(non_stub_name, CompiledName): if isinstance(non_stub_name, CompiledName):