Refactor the way builtins can be overwritten by jedi's own contexts

This commit is contained in:
Dave Halter
2018-02-21 00:09:41 +01:00
parent 5c8300e62a
commit bf01b9d47c
10 changed files with 133 additions and 163 deletions

View File

@@ -164,7 +164,7 @@ def _check_for_exception_catch(node_context, jedi_name, exception, payload=None)
except_classes = node_context.eval_node(node)
for cls in except_classes:
from jedi.evaluate.context import iterable
if isinstance(cls, iterable.AbstractIterable) and \
if isinstance(cls, iterable.Sequence) and \
cls.array_type == 'tuple':
# multiple exceptions
for lazy_context in cls.py__iter__():

View File

@@ -240,7 +240,7 @@ def _star_star_dict(context, array, input_node, funcdef):
# For now ignore this case. In the future add proper iterators and just
# make one call without crazy isinstance checks.
return {}
elif isinstance(array, iterable.AbstractIterable) and array.array_type == 'dict':
elif isinstance(array, iterable.Sequence) and array.array_type == 'dict':
return array.exact_key_items()
else:
if funcdef is not None:

View File

@@ -84,7 +84,7 @@ class Context(BaseContext):
def get_item(self, index_contexts, contextualized_node):
from jedi.evaluate.compiled import CompiledObject
from jedi.evaluate.context.iterable import Slice, AbstractIterable
from jedi.evaluate.context.iterable import Slice, Sequence
result = ContextSet()
for index in index_contexts:
@@ -99,7 +99,7 @@ class Context(BaseContext):
if type(index) not in (float, int, str, unicode, slice, bytes):
# If the index is not clearly defined, we have to get all the
# possiblities.
if isinstance(self, AbstractIterable) and self.array_type == 'dict':
if isinstance(self, Sequence) and self.array_type == 'dict':
result |= self.dict_values()
else:
result |= iterate_contexts(ContextSet(self))

View File

@@ -1,77 +1,34 @@
from jedi.evaluate import compiled
from jedi.evaluate.filters import has_builtin_methods, \
register_builtin_method, SpecialMethodFilter
from jedi.evaluate.base_context import ContextSet, Context
from jedi.evaluate.filters import publish_method, BuiltinOverwrite
from jedi.evaluate.base_context import ContextSet
@has_builtin_methods
class CoroutineMixin(object):
array_type = None
def get_filters(self, search_global, until_position=None, origin_scope=None):
gen_obj = compiled.get_special_object(self.evaluator, 'COROUTINE_TYPE')
yield SpecialMethodFilter(self, self.builtin_methods, gen_obj)
for filter in gen_obj.get_filters(search_global):
yield filter
def py__bool__(self):
return True
def py__class__(self):
gen_obj = compiled.get_special_object(self.evaluator, 'COROUTINE_TYPE')
return gen_obj.py__class__()
class AsyncBase(BuiltinOverwrite):
def __init__(self, evaluator, func_execution_context):
super(AsyncBase, self).__init__(evaluator)
self._func_execution_context = func_execution_context
@property
def name(self):
return compiled.CompiledContextName(self, 'coroutine')
return self.get_builtin_object().py__name__()
def __repr__(self):
return "<%s of %s>" % (type(self).__name__, self._func_execution_context)
class Coroutine(CoroutineMixin, Context):
def __init__(self, evaluator, func_execution_context):
super(Coroutine, self).__init__(evaluator, parent_context=evaluator.builtins_module)
self._func_execution_context = func_execution_context
class Coroutine(AsyncBase):
special_object_identifier = u'COROUTINE_TYPE'
def execute_await(self):
return self._func_execution_context.get_return_values()
def __repr__(self):
return "<%s of %s>" % (type(self).__name__, self._func_execution_context)
class AsyncGenerator(AsyncBase):
"""Handling of `yield` functions."""
special_object_identifier = u'ASYNC_GENERATOR_TYPE'
@has_builtin_methods
class AsyncGeneratorMixin(object):
array_type = None
@register_builtin_method('__anext__')
@publish_method('__anext__')
def py__anext__(self):
return ContextSet.from_sets(lazy_context.infer() for lazy_context in self.py__aiter__())
def get_filters(self, search_global, until_position=None, origin_scope=None):
gen_obj = compiled.get_special_object(self.evaluator, 'ASYNC_GENERATOR_TYPE')
yield SpecialMethodFilter(self, self.builtin_methods, gen_obj)
for filter in gen_obj.get_filters(search_global):
yield filter
def py__bool__(self):
return True
def py__class__(self):
gen_obj = compiled.get_special_object(self.evaluator, 'ASYNC_GENERATOR_TYPE')
return gen_obj.py__class__()
@property
def name(self):
return compiled.CompiledContextName(self, 'asyncgenerator')
class AsyncGenerator(AsyncGeneratorMixin, Context):
"""Handling of `yield` functions."""
def __init__(self, evaluator, func_execution_context):
super(AsyncGenerator, self).__init__(evaluator, parent_context=evaluator.builtins_module)
self._func_execution_context = func_execution_context
def py__aiter__(self):
return self._func_execution_context.get_yield_lazy_contexts(is_async=True)
def __repr__(self):
return "<%s of %s>" % (type(self).__name__, self._func_execution_context)

View File

@@ -23,6 +23,7 @@ It is important to note that:
from jedi import debug
from jedi import settings
from jedi._compatibility import force_unicode, is_py3
from jedi.cache import memoize_method
from jedi.evaluate import compiled
from jedi.evaluate import analysis
from jedi.evaluate import recursion
@@ -33,61 +34,38 @@ from jedi.evaluate.helpers import get_int_or_none, is_string, \
from jedi.evaluate.utils import safe_property
from jedi.evaluate.utils import to_list
from jedi.evaluate.cache import evaluator_method_cache
from jedi.evaluate.filters import ParserTreeFilter, has_builtin_methods, \
register_builtin_method, SpecialMethodFilter
from jedi.evaluate.filters import ParserTreeFilter, BuiltinOverwrite, \
publish_method
from jedi.evaluate.base_context import ContextSet, NO_CONTEXTS, Context, \
TreeContext, ContextualizedNode
from jedi.parser_utils import get_comp_fors
class AbstractIterable(Context):
builtin_methods = {}
api_type = u'instance'
def __init__(self, evaluator):
super(AbstractIterable, self).__init__(evaluator, evaluator.builtins_module)
def get_filters(self, search_global, until_position=None, origin_scope=None):
raise NotImplementedError
class AbstractIterableMixin(object):
@property
def name(self):
return compiled.CompiledContextName(self, self.array_type)
@has_builtin_methods
class GeneratorMixin(object):
class GeneratorBase(BuiltinOverwrite):
array_type = None
special_object_identifier = u'GENERATOR_OBJECT'
@register_builtin_method('send')
@register_builtin_method('next', python_version_match=2)
@register_builtin_method('__next__', python_version_match=3)
@publish_method('send')
@publish_method('next', python_version_match=2)
@publish_method('__next__', python_version_match=3)
def py__next__(self):
# TODO add TypeError if params are given.
return ContextSet.from_sets(lazy_context.infer() for lazy_context in self.py__iter__())
def get_filters(self, search_global, until_position=None, origin_scope=None):
gen_obj = compiled.get_special_object(self.evaluator, u'GENERATOR_OBJECT')
yield SpecialMethodFilter(self, self.builtin_methods, gen_obj)
for filter in gen_obj.get_filters(search_global):
yield filter
def py__bool__(self):
return True
def py__class__(self):
gen_obj = compiled.get_special_object(self.evaluator, u'GENERATOR_OBJECT')
return gen_obj.py__class__()
@property
def name(self):
return compiled.CompiledContextName(self, 'generator')
class Generator(GeneratorMixin, Context):
class Generator(GeneratorBase):
"""Handling of `yield` functions."""
def __init__(self, evaluator, func_execution_context):
super(Generator, self).__init__(evaluator, parent_context=evaluator.builtins_module)
super(Generator, self).__init__(evaluator)
self._func_execution_context = func_execution_context
def py__iter__(self):
@@ -113,23 +91,23 @@ class CompForContext(TreeContext):
yield ParserTreeFilter(self.evaluator, self)
class Comprehension(AbstractIterable):
@staticmethod
def from_atom(evaluator, context, atom):
bracket = atom.children[0]
if bracket == '{':
if atom.children[1].children[1] == ':':
cls = DictComprehension
else:
cls = SetComprehension
elif bracket == '(':
cls = GeneratorComprehension
elif bracket == '[':
cls = ListComprehension
return cls(evaluator, context, atom)
def comprehension_from_atom(evaluator, context, atom):
bracket = atom.children[0]
if bracket == '{':
if atom.children[1].children[1] == ':':
cls = DictComprehension
else:
cls = SetComprehension
elif bracket == '(':
cls = GeneratorComprehension
elif bracket == '[':
cls = ListComprehension
return cls(evaluator, context, atom)
class ComprehensionMixin(object):
def __init__(self, evaluator, defining_context, atom, is_async=False):
super(Comprehension, self).__init__(evaluator)
super(ComprehensionMixin, self).__init__(evaluator)
self._defining_context = defining_context
self._atom = atom
@@ -201,14 +179,14 @@ class Comprehension(AbstractIterable):
return "<%s of %s>" % (type(self).__name__, self._atom)
class ArrayMixin(object):
def get_filters(self, search_global, until_position=None, origin_scope=None):
# `array.type` is a string with the type, e.g. 'list'.
class Sequence(BuiltinOverwrite, AbstractIterableMixin):
api_type = u'instance'
@memoize_method
def get_builtin_object(self):
compiled_obj = compiled.builtin_from_name(self.evaluator, self.array_type)
yield SpecialMethodFilter(self, self.builtin_methods, compiled_obj)
for typ in compiled_obj.execute_evaluated(self):
for filter in typ.get_filters():
yield filter
only_obj, = compiled_obj.execute_evaluated(self)
return only_obj
def py__bool__(self):
return None # We don't know the length, because of appends.
@@ -227,7 +205,7 @@ class ArrayMixin(object):
)
class ListComprehension(ArrayMixin, Comprehension):
class ListComprehension(ComprehensionMixin, Sequence):
array_type = u'list'
def py__getitem__(self, index):
@@ -238,12 +216,11 @@ class ListComprehension(ArrayMixin, Comprehension):
return all_types[index].infer()
class SetComprehension(ArrayMixin, Comprehension):
class SetComprehension(ComprehensionMixin, Sequence):
array_type = u'set'
@has_builtin_methods
class DictComprehension(ArrayMixin, Comprehension):
class DictComprehension(ComprehensionMixin, Sequence):
array_type = u'dict'
def _get_comp_for(self):
@@ -264,12 +241,12 @@ class DictComprehension(ArrayMixin, Comprehension):
def dict_values(self):
return ContextSet.from_sets(values for keys, values in self._iterate())
@register_builtin_method('values')
@publish_method('values')
def _imitate_values(self):
lazy_context = LazyKnownContexts(self.dict_values())
return ContextSet(FakeSequence(self.evaluator, u'list', [lazy_context]))
@register_builtin_method('items')
@publish_method('items')
def _imitate_items(self):
items = ContextSet.from_iterable(
FakeSequence(
@@ -281,11 +258,11 @@ class DictComprehension(ArrayMixin, Comprehension):
return create_evaluated_sequence_set(self.evaluator, items, sequence_type=u'list')
class GeneratorComprehension(GeneratorMixin, Comprehension):
class GeneratorComprehension(ComprehensionMixin, GeneratorBase):
pass
class SequenceLiteralContext(ArrayMixin, AbstractIterable):
class SequenceLiteralContext(Sequence):
mapping = {'(': u'tuple',
'[': u'list',
'{': u'set'}
@@ -387,7 +364,6 @@ class SequenceLiteralContext(ArrayMixin, AbstractIterable):
return "<%s of %s>" % (self.__class__.__name__, self.atom)
@has_builtin_methods
class DictLiteralContext(SequenceLiteralContext):
array_type = u'dict'
@@ -396,12 +372,12 @@ class DictLiteralContext(SequenceLiteralContext):
self._defining_context = defining_context
self.atom = atom
@register_builtin_method('values')
@publish_method('values')
def _imitate_values(self):
lazy_context = LazyKnownContexts(self.dict_values())
return ContextSet(FakeSequence(self.evaluator, u'list', [lazy_context]))
@register_builtin_method('items')
@publish_method('items')
def _imitate_items(self):
lazy_contexts = [
LazyKnownContext(FakeSequence(
@@ -443,7 +419,6 @@ class FakeSequence(_FakeArray):
return "<%s of %s>" % (type(self).__name__, self._lazy_context_list)
@has_builtin_methods
class FakeDict(_FakeArray):
def __init__(self, evaluator, dct):
super(FakeDict, self).__init__(evaluator, dct, u'dict')
@@ -471,7 +446,7 @@ class FakeDict(_FakeArray):
return self._dct[index].infer()
@register_builtin_method('values')
@publish_method('values')
def _values(self):
return ContextSet(FakeSequence(
self.evaluator, u'tuple',

View File

@@ -6,6 +6,8 @@ from abc import abstractmethod
from parso.tree import search_ancestor
from jedi._compatibility import use_metaclass
from jedi.cache import memoize_method
from jedi.evaluate import flow_analysis
from jedi.evaluate.base_context import ContextSet, Context
from jedi.parser_utils import get_parent_scope
@@ -307,6 +309,7 @@ class _BuiltinMappedMethod(Context):
self._builtin_func = builtin_func
def py__call__(self, params):
# TODO add TypeError if params are given/or not correct.
return self._method(self.parent_context)
def __getattr__(self, name):
@@ -333,12 +336,19 @@ class SpecialMethodFilter(DictFilter):
self._builtin_context = builtin_context
def infer(self):
filter = next(self._builtin_context.get_filters())
# We can take the first index, because on builtin methods there's
# always only going to be one name. The same is true for the
# inferred values.
builtin_func = next(iter(filter.get(self.string_name)[0].infer()))
return ContextSet(_BuiltinMappedMethod(self.parent_context, self._callable, builtin_func))
for filter in self._builtin_context.get_filters():
# We can take the first index, because on builtin methods there's
# always only going to be one name. The same is true for the
# inferred values.
for name in filter.get(self.string_name):
builtin_func = next(iter(name.infer()))
break
else:
continue
break
return ContextSet(
_BuiltinMappedMethod(self.parent_context, self._callable, builtin_func)
)
def __init__(self, context, dct, builtin_context):
super(SpecialMethodFilter, self).__init__(dct)
@@ -355,26 +365,53 @@ class SpecialMethodFilter(DictFilter):
return self.SpecialMethodName(self.context, name, value, self._builtin_context)
def has_builtin_methods(cls):
base_dct = {}
# Need to care properly about inheritance. Builtin Methods should not get
# lost, just because they are not mentioned in a class.
for base_cls in reversed(cls.__bases__):
try:
base_dct.update(base_cls.builtin_methods)
except AttributeError:
pass
class _BuiltinOverwriteMeta(type):
def __init__(cls, name, bases, dct):
super(_BuiltinOverwriteMeta, cls).__init__(name, bases, dct)
cls.builtin_methods = base_dct
for func in cls.__dict__.values():
try:
cls.builtin_methods.update(func.registered_builtin_methods)
except AttributeError:
pass
return cls
base_dct = {}
for base_cls in reversed(cls.__bases__):
try:
base_dct.update(base_cls.builtin_methods)
except AttributeError:
pass
for func in cls.__dict__.values():
try:
base_dct.update(func.registered_builtin_methods)
except AttributeError:
pass
cls.builtin_methods = base_dct
def register_builtin_method(method_name, python_version_match=None):
class BuiltinOverwrite(use_metaclass(_BuiltinOverwriteMeta, Context)):
special_object_identifier = None
def __init__(self, evaluator):
super(BuiltinOverwrite, self).__init__(evaluator, evaluator.builtins_module)
@memoize_method
def get_builtin_object(self):
from jedi.evaluate import compiled
assert self.special_object_identifier
return compiled.get_special_object(self.evaluator, self.special_object_identifier)
def _get_special_method_filter(self):
special_method_filter = SpecialMethodFilter(
self, self.builtin_methods, self.get_builtin_object())
return special_method_filter
def py__class__(self):
return self.get_builtin_object().py__class__()
def get_filters(self, search_global, *args, **kwargs):
yield self._get_special_method_filter()
for filter in self.get_builtin_object().get_filters(search_global):
yield filter
def publish_method(method_name, python_version_match=None):
def decorator(func):
dct = func.__dict__.setdefault('registered_builtin_methods', {})
dct[method_name] = func, python_version_match

View File

@@ -250,8 +250,7 @@ def _check_isinstance_type(context, element, search_name):
context_set = ContextSet()
for cls_or_tup in lazy_context_cls.infer():
if isinstance(cls_or_tup, iterable.AbstractIterable) and \
cls_or_tup.array_type == 'tuple':
if isinstance(cls_or_tup, iterable.Sequence) and cls_or_tup.array_type == 'tuple':
for lazy_context in cls_or_tup.py__iter__():
for context in lazy_context.infer():
context_set |= context.execute_evaluated()

View File

@@ -278,7 +278,7 @@ def collections_namedtuple(evaluator, obj, arguments):
_fields = list(_follow_param(evaluator, arguments, 1))[0]
if isinstance(_fields, compiled.CompiledObject):
fields = _fields.get_safe_value().replace(',', ' ').split()
elif isinstance(_fields, iterable.AbstractIterable):
elif isinstance(_fields, iterable.Sequence):
fields = [
v.get_safe_value()
for lazy_context in _fields.py__iter__()

View File

@@ -221,7 +221,9 @@ def eval_atom(context, atom):
pass
if comp_for.type == 'comp_for':
return ContextSet(iterable.Comprehension.from_atom(context.evaluator, context, atom))
return ContextSet(iterable.comprehension_from_atom(
context.evaluator, context, atom
))
# It's a dict/list/tuple literal.
array_node = c[1]
@@ -371,11 +373,11 @@ def _eval_comparison(evaluator, context, left_contexts, operator, right_contexts
def _is_tuple(context):
return isinstance(context, iterable.AbstractIterable) and context.array_type == 'tuple'
return isinstance(context, iterable.Sequence) and context.array_type == 'tuple'
def _is_list(context):
return isinstance(context, iterable.AbstractIterable) and context.array_type == 'list'
return isinstance(context, iterable.Sequence) and context.array_type == 'list'
def _bool_to_context(evaluator, bool_):
@@ -392,9 +394,9 @@ def _eval_comparison_part(evaluator, context, left, operator, right):
if str_operator == '*':
# for iterables, ignore * operations
if isinstance(left, iterable.AbstractIterable) or is_string(left):
if isinstance(left, iterable.Sequence) or is_string(left):
return ContextSet(left)
elif isinstance(right, iterable.AbstractIterable) or is_string(right):
elif isinstance(right, iterable.Sequence) or is_string(right):
return ContextSet(right)
elif str_operator == '+':
if l_is_num and r_is_num or is_string(left) and is_string(right):

View File

@@ -34,7 +34,7 @@ async def wrapper():
[x async for x in asgen()][0]
async for y in asgen():
# TODO: make this an int()
#? int()
y
#? ['__anext__']