1
0
forked from VimPlug/jedi

Make a lot of progress with typeshed/typing

This commit is contained in:
Dave Halter
2018-08-25 02:35:31 +02:00
parent 05cf6af546
commit 3526def0a0
4 changed files with 136 additions and 82 deletions

View File

@@ -217,6 +217,7 @@ def _get_item(context, index_contexts, contextualized_node):
ContextSet.from_set(unused_contexts), ContextSet.from_set(unused_contexts),
contextualized_node contextualized_node
) )
debug.dbg('py__getitem__ result: %s', result)
return result return result

View File

@@ -38,6 +38,7 @@ py__doc__(include_call_signature: Returns the docstring for a context.
====================================== ======================================== ====================================== ========================================
""" """
from jedi import debug
from jedi._compatibility import use_metaclass from jedi._compatibility import use_metaclass
from jedi.parser_utils import get_parent_scope from jedi.parser_utils import get_parent_scope
from jedi.evaluate.cache import evaluator_method_cache, CachedMetaClass from jedi.evaluate.cache import evaluator_method_cache, CachedMetaClass
@@ -150,7 +151,7 @@ class ClassContext(use_metaclass(CachedMetaClass, TreeContext)):
File "<stdin>", line 1, in <module> File "<stdin>", line 1, in <module>
TypeError: int() takes at most 2 arguments (3 given) TypeError: int() takes at most 2 arguments (3 given)
""" """
pass debug.warning('Super class of %s is not a class: %s', self, cls)
else: else:
add(cls) add(cls)
for cls_new in mro_method(): for cls_new in mro_method():
@@ -220,6 +221,7 @@ class ClassContext(use_metaclass(CachedMetaClass, TreeContext)):
return ContextName(self, self.tree_node.name) return ContextName(self, self.tree_node.name)
def py__getitem__(self, index_context_set, contextualized_node): def py__getitem__(self, index_context_set, contextualized_node):
print(self.parent_context.__class__.__name__)
for cls in list(self.py__mro__()): for cls in list(self.py__mro__()):
pass pass
print('ha', self, list(self.py__mro__())) print('ha', self, list(self.py__mro__()))

View File

@@ -3,90 +3,103 @@ We need to somehow work with the typing objects. Since the typing objects are
pretty bare we need to add all the Jedi customizations to make them work as pretty bare we need to add all the Jedi customizations to make them work as
contexts. contexts.
""" """
from parso.python import tree
from jedi import debug from jedi import debug
from jedi.evaluate.compiled import builtin_from_name, CompiledObject from jedi.evaluate.compiled import builtin_from_name, CompiledObject
from jedi.evaluate.base_context import ContextSet, NO_CONTEXTS, Context from jedi.evaluate.base_context import ContextSet, NO_CONTEXTS, Context
from jedi.evaluate.context.iterable import SequenceLiteralContext from jedi.evaluate.context.iterable import SequenceLiteralContext
from jedi.evaluate.filters import FilterWrapper, NameWrapper from jedi.evaluate.filters import FilterWrapper, NameWrapper, \
AbstractTreeName
_PROXY_TYPES = 'Optional Union Callable Type ClassVar Tuple Generic Protocol'.split() _PROXY_CLASS_TYPES = 'Tuple Generic Protocol'.split()
_TYPE_ALIAS_TYPES = 'List Dict DefaultDict Set FrozenSet Counter Deque ChainMap'.split() _TYPE_ALIAS_TYPES = 'List Dict DefaultDict Set FrozenSet Counter Deque ChainMap'.split()
_PROXY_TYPES = 'Optional Union Callable Type ClassVar'.split()
class _TypingBase(object): class TypingName(AbstractTreeName):
def __init__(self, name, typing_context): def __init__(self, context, other_name):
super(TypingName, self).__init__(context.parent_context, other_name.tree_name)
self._context = context
def infer(self):
return ContextSet(self._context)
class _BaseTypingContext(Context):
def __init__(self, name):
super(_BaseTypingContext, self).__init__(
name.parent_context.evaluator,
parent_context=name.parent_context,
)
self._name = name self._name = name
self._context = typing_context
def __getattr__(self, name): @property
return getattr(self._context, name) def name(self):
return TypingName(self, self._name)
def __repr__(self): def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self._context) return '%s(%s)' % (self.__class__.__name__, self._name.string_name)
class TypingModuleName(NameWrapper): class TypingModuleName(NameWrapper):
def infer(self): def infer(self):
return ContextSet.from_iterable( return ContextSet.from_iterable(self._remap())
self._remap(context) for context in self._wrapped_name.infer()
)
def _remap(self, context): def _remap(self):
# TODO we don't want the SpecialForm bullshit
name = self.string_name name = self.string_name
print('name', name) evaluator = self.parent_context.evaluator
if name in (_PROXY_TYPES + _TYPE_ALIAS_TYPES): if name in (_PROXY_CLASS_TYPES + _TYPE_ALIAS_TYPES):
print('NAME', name) yield TypingClassContext(self)
return TypingProxy(name, context) elif name == _PROXY_TYPES:
yield TypingContext(self)
elif name == 'runtime':
# We don't want anything here, not sure what this function is
# supposed to do, since it just appears in the stubs and shouldn't
# have any effects there (because it's never executed).
return
elif name == 'TypeVar': elif name == 'TypeVar':
return TypeVarClass(context.evaluator) yield TypeVarClass(evaluator)
elif name == 'Any': elif name == 'Any':
return Any(context) yield Any()
elif name == 'TYPE_CHECKING': elif name == 'TYPE_CHECKING':
# This is needed for e.g. imports that are only available for type # This is needed for e.g. imports that are only available for type
# checking or are in cycles. The user can then check this variable. # checking or are in cycles. The user can then check this variable.
return builtin_from_name(context.evaluator, u'True') yield builtin_from_name(evaluator, u'True')
elif name == 'overload': elif name == 'overload':
# TODO implement overload # TODO implement overload
return context pass
elif name == 'cast': elif name == 'cast':
# TODO implement cast # TODO implement cast
return context for c in self._wrapped_name.infer(): # Fuck my life Python 2
yield c
elif name == 'TypedDict': elif name == 'TypedDict':
# TODO implement # TODO doesn't even exist in typeshed/typing.py, yet. But will be
# e.g. Movie = TypedDict('Movie', {'name': str, 'year': int}) # added soon.
return context pass
elif name in ('no_type_check', 'no_type_check_decorator'): elif name in ('no_type_check', 'no_type_check_decorator'):
# This is not necessary, as long as we are not doing type checking. # This is not necessary, as long as we are not doing type checking.
return context for c in self._wrapped_name.infer(): # Fuck my life Python 2
return context yield c
else:
# Everything else shouldn't be relevant for type checking.
for c in self._wrapped_name.infer(): # Fuck my life Python 2
yield c
class TypingModuleFilterWrapper(FilterWrapper): class TypingModuleFilterWrapper(FilterWrapper):
name_wrapper_class = TypingModuleName name_wrapper_class = TypingModuleName
class TypingProxy(_TypingBase): class _WithIndexBase(_BaseTypingContext):
py__simple_getitem__ = None def __init__(self, name, index_context):
super(_WithIndexBase, self).__init__(name)
def py__getitem__(self, index_context_set, contextualized_node):
return ContextSet.from_iterable(
TypingProxyWithIndex(self._name, self._context, index_context)
for index_context in index_context_set
)
class _WithIndexBase(_TypingBase):
def __init__(self, name, class_context, index_context):
super(_WithIndexBase, self).__init__(name, class_context)
self._index_context = index_context self._index_context = index_context
def __repr__(self): def __repr__(self):
return '%s(%s, %s)' % ( return '<%s: %s[%s]>' % (
self.__class__.__name__, self.__class__.__name__,
self._context, self._name.string_name,
self._index_context self._index_context,
) )
def _execute_annotations_for_all_indexes(self): def _execute_annotations_for_all_indexes(self):
@@ -95,7 +108,7 @@ class _WithIndexBase(_TypingBase):
).execute_annotation() ).execute_annotation()
class TypingProxyWithIndex(_WithIndexBase): class TypingContextWithIndex(_WithIndexBase):
def execute_annotation(self): def execute_annotation(self):
name = self._name name = self._name
if name in _TYPE_ALIAS_TYPES: if name in _TYPE_ALIAS_TYPES:
@@ -122,6 +135,30 @@ class TypingProxyWithIndex(_WithIndexBase):
return ContextSet(cls(name, self._context, self._index_context)) return ContextSet(cls(name, self._context, self._index_context))
class TypingContext(_BaseTypingContext):
index_class = TypingContextWithIndex
py__simple_getitem__ = None
def py__getitem__(self, index_context_set, contextualized_node):
return ContextSet.from_iterable(
self.index_class(self._name, index_context)
for index_context in index_context_set
)
class TypingClassMixin(object):
def py__mro__(self):
return (self,)
class TypingClassContextWithIndex(TypingClassMixin, TypingContextWithIndex):
pass
class TypingClassContext(TypingClassMixin, TypingContext):
index_class = TypingClassContextWithIndex
def _iter_over_arguments(maybe_tuple_context): def _iter_over_arguments(maybe_tuple_context):
if isinstance(maybe_tuple_context, SequenceLiteralContext): if isinstance(maybe_tuple_context, SequenceLiteralContext):
for lazy_context in maybe_tuple_context.py__iter__(): for lazy_context in maybe_tuple_context.py__iter__():
@@ -175,16 +212,14 @@ class Tuple(_ContainerBase):
class Generic(_ContainerBase): class Generic(_ContainerBase):
# TODO implement typevars
pass pass
# For pure type inference these two classes are basically the same. It's much class Protocol(_ContainerBase):
# more interesting once you do type checking. pass
Protocol = Generic
class Any(_TypingBase): class Any(_BaseTypingContext):
def __init__(self): def __init__(self):
# Any is basically object, when it comes to type inference/completions. # Any is basically object, when it comes to type inference/completions.
# This is obviously not correct, but let's just use this for now. # This is obviously not correct, but let's just use this for now.
@@ -240,20 +275,28 @@ class TypeVar(Context):
def __init__(self, evaluator, name, unpacked_args): def __init__(self, evaluator, name, unpacked_args):
super(TypeVar, self).__init__(evaluator) super(TypeVar, self).__init__(evaluator)
self._name = name self._name = name
self._unpacked_args = unpacked_args
def _unpack(self): self._constraints_lazy_contexts = []
# TODO self._bound_lazy_context = None
constraints = ContextSet() self._covariant_lazy_context = None
bound = None self._contravariant_lazy_context = None
covariant = False for key, lazy_context in unpacked_args:
contravariant = False
for key, lazy_context in unpacked:
if key is None: if key is None:
constraints |= lazy_context.infer() self._constraints_lazy_contexts.append(lazy_context)
else: else:
if name == 'bound': if key == 'bound':
bound = lazy_context.infer() self._bound_lazy_context = lazy_context
elif key == 'covariant':
self._covariant_lazy_context = lazy_context
elif key == 'contravariant':
self._contra_variant_lazy_context = lazy_context
else:
debug.warning('Invalid TypeVar param name %s', key)
def execute_annotation(self):
if self._bound_lazy_context is not None:
return self._bound_lazy_context.infer()
return NO_CONTEXTS
def __repr__(self): def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self._name) return '<%s: %s>' % (self.__class__.__name__, self._name)

View File

@@ -10,9 +10,9 @@ from jedi.evaluate.base_context import ContextSet, iterator_to_context_set
from jedi.evaluate.filters import AbstractTreeName, ParserTreeFilter, \ from jedi.evaluate.filters import AbstractTreeName, ParserTreeFilter, \
TreeNameDefinition TreeNameDefinition
from jedi.evaluate.context import ModuleContext, FunctionContext, ClassContext from jedi.evaluate.context import ModuleContext, FunctionContext, ClassContext
from jedi.evaluate.context.typing import TypingModuleFilterWrapper from jedi.evaluate.context.typing import TypingModuleFilterWrapper, \
TypingModuleName
from jedi.evaluate.compiled import CompiledObject from jedi.evaluate.compiled import CompiledObject
from jedi.evaluate.syntax_tree import tree_name_to_contexts
from jedi.evaluate.utils import to_list from jedi.evaluate.utils import to_list
@@ -164,27 +164,22 @@ class TypeshedPlugin(BasePlugin):
return wrapper return wrapper
class StubName(TreeNameDefinition): class NameWithStub(TreeNameDefinition):
""" """
This name is only here to mix stub names with non-stub names. The idea is This name is only here to mix stub names with non-stub names. The idea is
that the user can goto the actual name, but end up on the definition of the that the user can goto the actual name, but end up on the definition of the
stub when inferring types. stub when inferring types.
""" """
def __init__(self, parent_context, tree_name, stub_parent_context, stub_tree_name): def __init__(self, parent_context, tree_name, stub_name):
super(StubName, self).__init__(parent_context, tree_name) super(NameWithStub, self).__init__(parent_context, tree_name)
self._stub_parent_context = stub_parent_context self._stub_name = stub_name
self._stub_tree_name = stub_tree_name
@memoize_method @memoize_method
@iterator_to_context_set @iterator_to_context_set
def infer(self): def infer(self):
actual_contexts = super(StubName, self).infer() actual_contexts = super(NameWithStub, self).infer()
stub_contexts = tree_name_to_contexts( stub_contexts = self._stub_name.infer()
self.parent_context.evaluator,
self._stub_parent_context,
self._stub_tree_name
)
if not actual_contexts: if not actual_contexts:
for c in stub_contexts: for c in stub_contexts:
@@ -217,13 +212,24 @@ class StubName(TreeNameDefinition):
class StubParserTreeFilter(ParserTreeFilter): class StubParserTreeFilter(ParserTreeFilter):
name_class = StubName name_class = NameWithStub
def __init__(self, non_stub_filters, *args, **kwargs): def __init__(self, non_stub_filters, *args, **kwargs):
self._search_global = kwargs.pop('search_global') # Python 2 :/ self._search_global = kwargs.pop('search_global') # Python 2 :/
super(StubParserTreeFilter, self).__init__(*args, **kwargs) super(StubParserTreeFilter, self).__init__(*args, **kwargs)
self._non_stub_filters = non_stub_filters self._non_stub_filters = non_stub_filters
def get(self, name):
try:
names = self._used_names[name]
except KeyError:
return self._get_non_stub_names(name)
return self._convert_names(self._filter(names))
# TODO maybe implement values, because currently the names that don't exist
# in the stub file are not part of values.
def _check_flows(self, names): def _check_flows(self, names):
return names return names
@@ -241,18 +247,20 @@ class StubParserTreeFilter(ParserTreeFilter):
# Try to match the names of stubs with non-stubs. If there's no # Try to match the names of stubs with non-stubs. If there's no
# match, just use the stub name. The user will be directed there # match, just use the stub name. The user will be directed there
# for all API accesses. Otherwise the user will be directed to the # for all API accesses. Otherwise the user will be directed to the
# non-stub positions (see StubName). # non-stub positions (see NameWithStub).
if not len(non_stub_names): n = TreeNameDefinition(self.context, name)
yield TreeNameDefinition(self.context, name) if isinstance(self.context, TypingModuleWrapper):
else: n = TypingModuleName(n)
if len(non_stub_names):
for non_stub_name in non_stub_names: for non_stub_name in non_stub_names:
assert isinstance(non_stub_name, AbstractTreeName), non_stub_name assert isinstance(non_stub_name, AbstractTreeName), non_stub_name
yield self.name_class( yield self.name_class(
non_stub_name.parent_context, non_stub_name.parent_context,
non_stub_name.tree_name, non_stub_name.tree_name,
self.context, stub_name=n,
name,
) )
else:
yield n
def _is_name_reachable(self, name): def _is_name_reachable(self, name):
if not super(StubParserTreeFilter, self)._is_name_reachable(name): if not super(StubParserTreeFilter, self)._is_name_reachable(name):