1
0
forked from VimPlug/jedi

Evaluator -> InferState

This commit is contained in:
Dave Halter
2019-08-15 00:33:49 +02:00
parent 8157d119a7
commit a5dff65142
62 changed files with 768 additions and 768 deletions

View File

@@ -2,7 +2,7 @@ from parso.python import tree
from jedi._compatibility import use_metaclass
from jedi import debug
from jedi.inference.cache import evaluator_method_cache, CachedMetaClass
from jedi.inference.cache import infer_state_method_cache, CachedMetaClass
from jedi.inference import compiled
from jedi.inference import recursion
from jedi.inference import docstrings
@@ -58,7 +58,7 @@ class FunctionMixin(object):
def get_filters(self, search_global=False, until_position=None, origin_scope=None):
if search_global:
yield ParserTreeFilter(
self.evaluator,
self.infer_state,
context=self,
until_position=until_position,
origin_scope=origin_scope
@@ -98,7 +98,7 @@ class FunctionMixin(object):
if arguments is None:
arguments = AnonymousArguments()
return FunctionExecutionContext(self.evaluator, self.parent_context, self, arguments)
return FunctionExecutionContext(self.infer_state, self.parent_context, self, arguments)
def get_signatures(self):
return [TreeSignature(f) for f in self.get_signature_functions()]
@@ -113,14 +113,14 @@ class FunctionContext(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndC
def create(tree_node):
if context.is_class():
return MethodContext(
context.evaluator,
context.infer_state,
context,
parent_context=parent_context,
tree_node=tree_node
)
else:
return cls(
context.evaluator,
context.infer_state,
parent_context=parent_context,
tree_node=tree_node
)
@@ -141,7 +141,7 @@ class FunctionContext(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndC
return function
def py__class__(self):
c, = contexts_from_qualified_names(self.evaluator, u'types', u'FunctionType')
c, = contexts_from_qualified_names(self.infer_state, u'types', u'FunctionType')
return c
def get_default_param_context(self):
@@ -152,8 +152,8 @@ class FunctionContext(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndC
class MethodContext(FunctionContext):
def __init__(self, evaluator, class_context, *args, **kwargs):
super(MethodContext, self).__init__(evaluator, *args, **kwargs)
def __init__(self, infer_state, class_context, *args, **kwargs):
super(MethodContext, self).__init__(infer_state, *args, **kwargs)
self.class_context = class_context
def get_default_param_context(self):
@@ -171,16 +171,16 @@ class MethodContext(FunctionContext):
class FunctionExecutionContext(TreeContext):
function_execution_filter = FunctionExecutionFilter
def __init__(self, evaluator, parent_context, function_context, var_args):
def __init__(self, infer_state, parent_context, function_context, var_args):
super(FunctionExecutionContext, self).__init__(
evaluator,
infer_state,
parent_context,
function_context.tree_node,
)
self.function_context = function_context
self.var_args = var_args
@evaluator_method_cache(default=NO_CONTEXTS)
@infer_state_method_cache(default=NO_CONTEXTS)
@recursion.execution_recursion_decorator()
def get_return_values(self, check_yields=False):
funcdef = self.tree_node
@@ -189,7 +189,7 @@ class FunctionExecutionContext(TreeContext):
if check_yields:
context_set = NO_CONTEXTS
returns = get_yield_exprs(self.evaluator, funcdef)
returns = get_yield_exprs(self.infer_state, funcdef)
else:
returns = funcdef.iter_return_stmts()
from jedi.inference.gradual.annotation import infer_return_types
@@ -214,7 +214,7 @@ class FunctionExecutionContext(TreeContext):
try:
children = r.children
except AttributeError:
ctx = compiled.builtin_from_name(self.evaluator, u'None')
ctx = compiled.builtin_from_name(self.infer_state, u'None')
context_set |= ContextSet([ctx])
else:
context_set |= self.infer_node(children[1])
@@ -226,7 +226,7 @@ class FunctionExecutionContext(TreeContext):
def _get_yield_lazy_context(self, yield_expr):
if yield_expr.type == 'keyword':
# `yield` just yields None.
ctx = compiled.builtin_from_name(self.evaluator, u'None')
ctx = compiled.builtin_from_name(self.infer_state, u'None')
yield LazyKnownContext(ctx)
return
@@ -243,7 +243,7 @@ class FunctionExecutionContext(TreeContext):
# TODO: if is_async, wrap yield statements in Awaitable/async_generator_asend
for_parents = [(y, tree.search_ancestor(y, 'for_stmt', 'funcdef',
'while_stmt', 'if_stmt'))
for y in get_yield_exprs(self.evaluator, self.tree_node)]
for y in get_yield_exprs(self.infer_state, self.tree_node)]
# Calculate if the yields are placed within the same for loop.
yields_order = []
@@ -294,11 +294,11 @@ class FunctionExecutionContext(TreeContext):
)
def get_filters(self, search_global=False, until_position=None, origin_scope=None):
yield self.function_execution_filter(self.evaluator, self,
yield self.function_execution_filter(self.infer_state, self,
until_position=until_position,
origin_scope=origin_scope)
@evaluator_method_cache()
@infer_state_method_cache()
def get_executed_params_and_issues(self):
return self.var_args.get_executed_params_and_issues(self)
@@ -323,16 +323,16 @@ class FunctionExecutionContext(TreeContext):
"""
Created to be used by inheritance.
"""
evaluator = self.evaluator
infer_state = self.infer_state
is_coroutine = self.tree_node.parent.type in ('async_stmt', 'async_funcdef')
is_generator = bool(get_yield_exprs(evaluator, self.tree_node))
is_generator = bool(get_yield_exprs(infer_state, self.tree_node))
from jedi.inference.gradual.typing import GenericClass
if is_coroutine:
if is_generator:
if evaluator.environment.version_info < (3, 6):
if infer_state.environment.version_info < (3, 6):
return NO_CONTEXTS
async_generator_classes = evaluator.typing_module \
async_generator_classes = infer_state.typing_module \
.py__getattribute__('AsyncGenerator')
yield_contexts = self.merge_yield_contexts(is_async=True)
@@ -344,9 +344,9 @@ class FunctionExecutionContext(TreeContext):
for c in async_generator_classes
).execute_annotation()
else:
if evaluator.environment.version_info < (3, 5):
if infer_state.environment.version_info < (3, 5):
return NO_CONTEXTS
async_classes = evaluator.typing_module.py__getattribute__('Coroutine')
async_classes = infer_state.typing_module.py__getattribute__('Coroutine')
return_contexts = self.get_return_values()
# Only the first generic is relevant.
generics = (return_contexts.py__class__(), NO_CONTEXTS, NO_CONTEXTS)
@@ -355,7 +355,7 @@ class FunctionExecutionContext(TreeContext):
).execute_annotation()
else:
if is_generator:
return ContextSet([iterable.Generator(evaluator, self)])
return ContextSet([iterable.Generator(infer_state, self)])
else:
return self.get_return_values()
@@ -380,7 +380,7 @@ class OverloadedFunctionContext(FunctionMixin, ContextWrapper):
if matched:
return context_set
if self.evaluator.is_analysis:
if self.infer_state.is_analysis:
# In this case we want precision.
return NO_CONTEXTS
return ContextSet.from_sets(fe.infer() for fe in function_executions)
@@ -412,7 +412,7 @@ def _find_overload_functions(context, tree_node):
while True:
filter = ParserTreeFilter(
context.evaluator,
context.infer_state,
context,
until_position=tree_node.start_pos
)

View File

@@ -10,7 +10,7 @@ from jedi.inference.names import ContextName, TreeNameDefinition
from jedi.inference.base_context import Context, NO_CONTEXTS, ContextSet, \
iterator_to_context_set, ContextWrapper
from jedi.inference.lazy_context import LazyKnownContext, LazyKnownContexts
from jedi.inference.cache import evaluator_method_cache
from jedi.inference.cache import infer_state_method_cache
from jedi.inference.arguments import AnonymousArguments, \
ValuesArguments, TreeArgumentsWrapper
from jedi.inference.context.function import \
@@ -50,7 +50,7 @@ class AnonymousInstanceArguments(AnonymousArguments):
# executions of this function, we have all the params already.
return [self_param], []
executed_params = list(search_params(
execution_context.evaluator,
execution_context.infer_state,
execution_context,
execution_context.tree_node
))
@@ -61,8 +61,8 @@ class AnonymousInstanceArguments(AnonymousArguments):
class AbstractInstanceContext(Context):
api_type = u'instance'
def __init__(self, evaluator, parent_context, class_context, var_args):
super(AbstractInstanceContext, self).__init__(evaluator, parent_context)
def __init__(self, infer_state, parent_context, class_context, var_args):
super(AbstractInstanceContext, self).__init__(infer_state, parent_context)
# Generated instances are classes that are just generated by self
# (No var_args) used.
self.class_context = class_context
@@ -117,7 +117,7 @@ class AbstractInstanceContext(Context):
names = self.get_function_slot_names(u'__get__')
if names:
if obj is None:
obj = compiled.builtin_from_name(self.evaluator, u'None')
obj = compiled.builtin_from_name(self.infer_state, u'None')
return self.execute_function_slots(names, obj, class_context)
else:
return ContextSet([self])
@@ -132,7 +132,7 @@ class AbstractInstanceContext(Context):
# In this case we're excluding compiled objects that are
# not fake objects. It doesn't make sense for normal
# compiled objects to search for self variables.
yield SelfAttributeFilter(self.evaluator, self, cls, origin_scope)
yield SelfAttributeFilter(self.infer_state, self, cls, origin_scope)
class_filters = class_context.get_filters(
search_global=False,
@@ -141,9 +141,9 @@ class AbstractInstanceContext(Context):
)
for f in class_filters:
if isinstance(f, ClassFilter):
yield InstanceClassFilter(self.evaluator, self, f)
yield InstanceClassFilter(self.infer_state, self, f)
elif isinstance(f, CompiledObjectFilter):
yield CompiledInstanceClassFilter(self.evaluator, self, f)
yield CompiledInstanceClassFilter(self.infer_state, self, f)
else:
# Propably from the metaclass.
yield f
@@ -168,7 +168,7 @@ class AbstractInstanceContext(Context):
for generator in self.execute_function_slots(iter_slot_names):
if generator.is_instance() and not generator.is_compiled():
# `__next__` logic.
if self.evaluator.environment.version_info.major == 2:
if self.infer_state.environment.version_info.major == 2:
name = u'next'
else:
name = u'__next__'
@@ -199,7 +199,7 @@ class AbstractInstanceContext(Context):
bound_method = BoundMethod(self, function)
yield bound_method.get_function_execution(self.var_args)
@evaluator_method_cache()
@infer_state_method_cache()
def create_instance_context(self, class_context, node):
if node.parent.type in ('funcdef', 'classdef'):
node = node.parent
@@ -219,7 +219,7 @@ class AbstractInstanceContext(Context):
else:
return bound_method.get_function_execution()
elif scope.type == 'classdef':
class_context = ClassContext(self.evaluator, parent_context, scope)
class_context = ClassContext(self.infer_state, parent_context, scope)
return class_context
elif scope.type in ('comp_for', 'sync_comp_for'):
# Comprehensions currently don't have a special scope in Jedi.
@@ -238,9 +238,9 @@ class AbstractInstanceContext(Context):
class CompiledInstance(AbstractInstanceContext):
def __init__(self, evaluator, parent_context, class_context, var_args):
def __init__(self, infer_state, parent_context, class_context, var_args):
self._original_var_args = var_args
super(CompiledInstance, self).__init__(evaluator, parent_context, class_context, var_args)
super(CompiledInstance, self).__init__(infer_state, parent_context, class_context, var_args)
@property
def name(self):
@@ -258,16 +258,16 @@ class CompiledInstance(AbstractInstanceContext):
class TreeInstance(AbstractInstanceContext):
def __init__(self, evaluator, parent_context, class_context, var_args):
def __init__(self, infer_state, parent_context, class_context, var_args):
# I don't think that dynamic append lookups should happen here. That
# sounds more like something that should go to py__iter__.
if class_context.py__name__() in ['list', 'set'] \
and parent_context.get_root_context() == evaluator.builtins_module:
and parent_context.get_root_context() == infer_state.builtins_module:
# compare the module path with the builtin name.
if settings.dynamic_array_additions:
var_args = iterable.get_dynamic_array_instance(self, var_args)
super(TreeInstance, self).__init__(evaluator, parent_context,
super(TreeInstance, self).__init__(infer_state, parent_context,
class_context, var_args)
self.tree_node = class_context.tree_node
@@ -277,7 +277,7 @@ class TreeInstance(AbstractInstanceContext):
# This can recurse, if the initialization of the class includes a reference
# to itself.
@evaluator_method_cache(default=None)
@infer_state_method_cache(default=None)
def _get_annotated_class_object(self):
from jedi.inference.gradual.annotation import py__annotations__, \
infer_type_vars_for_execution
@@ -313,9 +313,9 @@ class TreeInstance(AbstractInstanceContext):
class AnonymousInstance(TreeInstance):
def __init__(self, evaluator, parent_context, class_context):
def __init__(self, infer_state, parent_context, class_context):
super(AnonymousInstance, self).__init__(
evaluator,
infer_state,
parent_context,
class_context,
var_args=AnonymousInstanceArguments(self),
@@ -327,9 +327,9 @@ class AnonymousInstance(TreeInstance):
class CompiledInstanceName(compiled.CompiledName):
def __init__(self, evaluator, instance, klass, name):
def __init__(self, infer_state, instance, klass, name):
super(CompiledInstanceName, self).__init__(
evaluator,
infer_state,
klass.parent_context,
name.string_name
)
@@ -348,8 +348,8 @@ class CompiledInstanceName(compiled.CompiledName):
class CompiledInstanceClassFilter(AbstractFilter):
name_class = CompiledInstanceName
def __init__(self, evaluator, instance, f):
self._evaluator = evaluator
def __init__(self, infer_state, instance, f):
self._infer_state = infer_state
self._instance = instance
self._class_filter = f
@@ -362,7 +362,7 @@ class CompiledInstanceClassFilter(AbstractFilter):
def _convert(self, names):
klass = self._class_filter.compiled_object
return [
CompiledInstanceName(self._evaluator, self._instance, klass, n)
CompiledInstanceName(self._infer_state, self._instance, klass, n)
for n in names
]
@@ -376,7 +376,7 @@ class BoundMethod(FunctionMixin, ContextWrapper):
return True
def py__class__(self):
c, = contexts_from_qualified_names(self.evaluator, u'types', u'MethodType')
c, = contexts_from_qualified_names(self.infer_state, u'types', u'MethodType')
return c
def _get_arguments(self, arguments):
@@ -456,7 +456,7 @@ class InstanceClassFilter(AbstractFilter):
resulting names in LazyINstanceClassName. The idea is that the class name
filtering can be very flexible and always be reflected in instances.
"""
def __init__(self, evaluator, instance, class_filter):
def __init__(self, infer_state, instance, class_filter):
self._instance = instance
self._class_filter = class_filter
@@ -479,9 +479,9 @@ class SelfAttributeFilter(ClassFilter):
"""
name_class = SelfName
def __init__(self, evaluator, context, class_context, origin_scope):
def __init__(self, infer_state, context, class_context, origin_scope):
super(SelfAttributeFilter, self).__init__(
evaluator=evaluator,
infer_state=infer_state,
context=context,
node_context=class_context,
origin_scope=origin_scope,

View File

@@ -34,7 +34,7 @@ from jedi.inference.helpers import get_int_or_none, is_string, \
predefine_names, infer_call_of_leaf, reraise_getitem_errors, \
SimpleGetItemNotFound
from jedi.inference.utils import safe_property, to_list
from jedi.inference.cache import evaluator_method_cache
from jedi.inference.cache import infer_state_method_cache
from jedi.inference.filters import ParserTreeFilter, LazyAttributeOverwrite, \
publish_method
from jedi.inference.base_context import ContextSet, Context, NO_CONTEXTS, \
@@ -44,7 +44,7 @@ from jedi.parser_utils import get_sync_comp_fors
class IterableMixin(object):
def py__stop_iteration_returns(self):
return ContextSet([compiled.builtin_from_name(self.evaluator, u'None')])
return ContextSet([compiled.builtin_from_name(self.infer_state, u'None')])
# At the moment, safe values are simple values like "foo", 1 and not
# lists/dicts. Therefore as a small speed optimization we can just do the
@@ -66,7 +66,7 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
array_type = None
def _get_wrapped_context(self):
generator, = self.evaluator.typing_module \
generator, = self.infer_state.typing_module \
.py__getattribute__('Generator') \
.execute_annotation()
return generator
@@ -88,7 +88,7 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
return ContextSet.from_sets(lazy_context.infer() for lazy_context in self.py__iter__())
def py__stop_iteration_returns(self):
return ContextSet([compiled.builtin_from_name(self.evaluator, u'None')])
return ContextSet([compiled.builtin_from_name(self.infer_state, u'None')])
@property
def name(self):
@@ -97,8 +97,8 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
class Generator(GeneratorBase):
"""Handling of `yield` functions."""
def __init__(self, evaluator, func_execution_context):
super(Generator, self).__init__(evaluator)
def __init__(self, infer_state, func_execution_context):
super(Generator, self).__init__(infer_state)
self._func_execution_context = func_execution_context
def py__iter__(self, contextualized_node=None):
@@ -114,13 +114,13 @@ class Generator(GeneratorBase):
class CompForContext(TreeContext):
@classmethod
def from_comp_for(cls, parent_context, comp_for):
return cls(parent_context.evaluator, parent_context, comp_for)
return cls(parent_context.infer_state, parent_context, comp_for)
def get_filters(self, search_global=False, until_position=None, origin_scope=None):
yield ParserTreeFilter(self.evaluator, self)
yield ParserTreeFilter(self.infer_state, self)
def comprehension_from_atom(evaluator, context, atom):
def comprehension_from_atom(infer_state, context, atom):
bracket = atom.children[0]
test_list_comp = atom.children[1]
@@ -131,7 +131,7 @@ def comprehension_from_atom(evaluator, context, atom):
sync_comp_for = sync_comp_for.children[1]
return DictComprehension(
evaluator,
infer_state,
context,
sync_comp_for_node=sync_comp_for,
key_node=test_list_comp.children[0],
@@ -149,7 +149,7 @@ def comprehension_from_atom(evaluator, context, atom):
sync_comp_for = sync_comp_for.children[1]
return cls(
evaluator,
infer_state,
defining_context=context,
sync_comp_for_node=sync_comp_for,
entry_node=test_list_comp.children[0],
@@ -157,7 +157,7 @@ def comprehension_from_atom(evaluator, context, atom):
class ComprehensionMixin(object):
@evaluator_method_cache()
@infer_state_method_cache()
def _get_comp_for_context(self, parent_context, comp_for):
return CompForContext.from_comp_for(parent_context, comp_for)
@@ -192,7 +192,7 @@ class ComprehensionMixin(object):
else:
yield iterated
@evaluator_method_cache(default=[])
@infer_state_method_cache(default=[])
@to_list
def _iterate(self):
comp_fors = tuple(get_sync_comp_fors(self._sync_comp_for_node))
@@ -224,7 +224,7 @@ class Sequence(LazyAttributeOverwrite, IterableMixin):
def _get_wrapped_context(self):
from jedi.inference.gradual.typing import GenericClass
klass = compiled.builtin_from_name(self.evaluator, self.array_type)
klass = compiled.builtin_from_name(self.infer_state, self.array_type)
c, = GenericClass(klass, self._get_generics()).execute_annotation()
return c
@@ -232,11 +232,11 @@ class Sequence(LazyAttributeOverwrite, IterableMixin):
return None # We don't know the length, because of appends.
def py__class__(self):
return compiled.builtin_from_name(self.evaluator, self.array_type)
return compiled.builtin_from_name(self.infer_state, self.array_type)
@safe_property
def parent(self):
return self.evaluator.builtins_module
return self.infer_state.builtins_module
def py__getitem__(self, index_context_set, contextualized_node):
if self.array_type == 'dict':
@@ -245,9 +245,9 @@ class Sequence(LazyAttributeOverwrite, IterableMixin):
class _BaseComprehension(ComprehensionMixin):
def __init__(self, evaluator, defining_context, sync_comp_for_node, entry_node):
def __init__(self, infer_state, defining_context, sync_comp_for_node, entry_node):
assert sync_comp_for_node.type == 'sync_comp_for'
super(_BaseComprehension, self).__init__(evaluator)
super(_BaseComprehension, self).__init__(infer_state)
self._defining_context = defining_context
self._sync_comp_for_node = sync_comp_for_node
self._entry_node = entry_node
@@ -277,9 +277,9 @@ class GeneratorComprehension(_BaseComprehension, GeneratorBase):
class DictComprehension(ComprehensionMixin, Sequence):
array_type = u'dict'
def __init__(self, evaluator, defining_context, sync_comp_for_node, key_node, value_node):
def __init__(self, infer_state, defining_context, sync_comp_for_node, key_node, value_node):
assert sync_comp_for_node.type == 'sync_comp_for'
super(DictComprehension, self).__init__(evaluator)
super(DictComprehension, self).__init__(infer_state)
self._defining_context = defining_context
self._sync_comp_for_node = sync_comp_for_node
self._entry_node = key_node
@@ -308,14 +308,14 @@ class DictComprehension(ComprehensionMixin, Sequence):
@publish_method('values')
def _imitate_values(self):
lazy_context = LazyKnownContexts(self._dict_values())
return ContextSet([FakeSequence(self.evaluator, u'list', [lazy_context])])
return ContextSet([FakeSequence(self.infer_state, u'list', [lazy_context])])
@publish_method('items')
def _imitate_items(self):
lazy_contexts = [
LazyKnownContext(
FakeSequence(
self.evaluator,
self.infer_state,
u'tuple',
[LazyKnownContexts(key),
LazyKnownContexts(value)]
@@ -324,7 +324,7 @@ class DictComprehension(ComprehensionMixin, Sequence):
for key, value in self._iterate()
]
return ContextSet([FakeSequence(self.evaluator, u'list', lazy_contexts)])
return ContextSet([FakeSequence(self.infer_state, u'list', lazy_contexts)])
def get_mapping_item_contexts(self):
return self._dict_keys(), self._dict_values()
@@ -341,8 +341,8 @@ class SequenceLiteralContext(Sequence):
'[': u'list',
'{': u'set'}
def __init__(self, evaluator, defining_context, atom):
super(SequenceLiteralContext, self).__init__(evaluator)
def __init__(self, infer_state, defining_context, atom):
super(SequenceLiteralContext, self).__init__(infer_state)
self.atom = atom
self._defining_context = defining_context
@@ -355,7 +355,7 @@ class SequenceLiteralContext(Sequence):
def py__simple_getitem__(self, index):
"""Here the index is an int/str. Raises IndexError/KeyError."""
if self.array_type == u'dict':
compiled_obj_index = compiled.create_simple_object(self.evaluator, index)
compiled_obj_index = compiled.create_simple_object(self.infer_state, index)
for key, value in self.get_tree_entries():
for k in self._defining_context.infer_node(key):
try:
@@ -471,27 +471,27 @@ class SequenceLiteralContext(Sequence):
class DictLiteralContext(_DictMixin, SequenceLiteralContext):
array_type = u'dict'
def __init__(self, evaluator, defining_context, atom):
super(SequenceLiteralContext, self).__init__(evaluator)
def __init__(self, infer_state, defining_context, atom):
super(SequenceLiteralContext, self).__init__(infer_state)
self._defining_context = defining_context
self.atom = atom
@publish_method('values')
def _imitate_values(self):
lazy_context = LazyKnownContexts(self._dict_values())
return ContextSet([FakeSequence(self.evaluator, u'list', [lazy_context])])
return ContextSet([FakeSequence(self.infer_state, u'list', [lazy_context])])
@publish_method('items')
def _imitate_items(self):
lazy_contexts = [
LazyKnownContext(FakeSequence(
self.evaluator, u'tuple',
self.infer_state, u'tuple',
(LazyTreeContext(self._defining_context, key_node),
LazyTreeContext(self._defining_context, value_node))
)) for key_node, value_node in self.get_tree_entries()
]
return ContextSet([FakeSequence(self.evaluator, u'list', lazy_contexts)])
return ContextSet([FakeSequence(self.infer_state, u'list', lazy_contexts)])
def _dict_keys(self):
return ContextSet.from_sets(
@@ -504,19 +504,19 @@ class DictLiteralContext(_DictMixin, SequenceLiteralContext):
class _FakeArray(SequenceLiteralContext):
def __init__(self, evaluator, container, type):
super(SequenceLiteralContext, self).__init__(evaluator)
def __init__(self, infer_state, container, type):
super(SequenceLiteralContext, self).__init__(infer_state)
self.array_type = type
self.atom = container
# TODO is this class really needed?
class FakeSequence(_FakeArray):
def __init__(self, evaluator, array_type, lazy_context_list):
def __init__(self, infer_state, array_type, lazy_context_list):
"""
type should be one of "tuple", "list"
"""
super(FakeSequence, self).__init__(evaluator, None, array_type)
super(FakeSequence, self).__init__(infer_state, None, array_type)
self._lazy_context_list = lazy_context_list
def py__simple_getitem__(self, index):
@@ -538,16 +538,16 @@ class FakeSequence(_FakeArray):
class FakeDict(_DictMixin, _FakeArray):
def __init__(self, evaluator, dct):
super(FakeDict, self).__init__(evaluator, dct, u'dict')
def __init__(self, infer_state, dct):
super(FakeDict, self).__init__(infer_state, dct, u'dict')
self._dct = dct
def py__iter__(self, contextualized_node=None):
for key in self._dct:
yield LazyKnownContext(compiled.create_simple_object(self.evaluator, key))
yield LazyKnownContext(compiled.create_simple_object(self.infer_state, key))
def py__simple_getitem__(self, index):
if is_py3 and self.evaluator.environment.version_info.major == 2:
if is_py3 and self.infer_state.environment.version_info.major == 2:
# In Python 2 bytes and unicode compare.
if isinstance(index, bytes):
index_unicode = force_unicode(index)
@@ -569,7 +569,7 @@ class FakeDict(_DictMixin, _FakeArray):
@publish_method('values')
def _values(self):
return ContextSet([FakeSequence(
self.evaluator, u'tuple',
self.infer_state, u'tuple',
[LazyKnownContexts(self._dict_values())]
)])
@@ -587,8 +587,8 @@ class FakeDict(_DictMixin, _FakeArray):
class MergedArray(_FakeArray):
def __init__(self, evaluator, arrays):
super(MergedArray, self).__init__(evaluator, arrays, arrays[-1].array_type)
def __init__(self, infer_state, arrays):
super(MergedArray, self).__init__(infer_state, arrays, arrays[-1].array_type)
self._arrays = arrays
def py__iter__(self, contextualized_node=None):
@@ -657,7 +657,7 @@ def check_array_additions(context, sequence):
return _check_array_additions(context, sequence)
@evaluator_method_cache(default=NO_CONTEXTS)
@infer_state_method_cache(default=NO_CONTEXTS)
@debug.increase_indent
def _check_array_additions(context, sequence):
"""
@@ -675,7 +675,7 @@ def _check_array_additions(context, sequence):
return NO_CONTEXTS
def find_additions(context, arglist, add_name):
params = list(arguments.TreeArguments(context.evaluator, context, arglist).unpack())
params = list(arguments.TreeArguments(context.infer_state, context, arglist).unpack())
result = set()
if add_name in ['insert']:
params = params[1:]
@@ -719,7 +719,7 @@ def _check_array_additions(context, sequence):
random_context = context.create_context(name)
with recursion.execution_allowed(context.evaluator, power) as allowed:
with recursion.execution_allowed(context.infer_state, power) as allowed:
if allowed:
found = infer_call_of_leaf(
random_context,
@@ -758,7 +758,7 @@ class _ArrayInstance(HelperContextMixin):
self.var_args = var_args
def py__class__(self):
tuple_, = self.instance.evaluator.builtins_module.py__getattribute__('tuple')
tuple_, = self.instance.infer_state.builtins_module.py__getattribute__('tuple')
return tuple_
def py__iter__(self, contextualized_node=None):
@@ -792,7 +792,7 @@ class Slice(object):
def __getattr__(self, name):
if self._slice_object is None:
context = compiled.builtin_from_name(self._context.evaluator, 'slice')
context = compiled.builtin_from_name(self._context.infer_state, 'slice')
self._slice_object, = context.execute_with_values()
return getattr(self._slice_object, name)

View File

@@ -39,8 +39,8 @@ py__doc__() Returns the docstring for a context.
from jedi import debug
from jedi._compatibility import use_metaclass
from jedi.parser_utils import get_cached_parent_scope
from jedi.inference.cache import evaluator_method_cache, CachedMetaClass, \
evaluator_method_generator_cache
from jedi.inference.cache import infer_state_method_cache, CachedMetaClass, \
infer_state_method_generator_cache
from jedi.inference import compiled
from jedi.inference.lazy_context import LazyKnownContexts
from jedi.inference.filters import ParserTreeFilter
@@ -73,7 +73,7 @@ class ClassName(TreeNameDefinition):
# We're using a different context to infer, so we cannot call super().
from jedi.inference.syntax_tree import tree_name_to_contexts
inferred = tree_name_to_contexts(
self.parent_context.evaluator, self._name_context, self.tree_name)
self.parent_context.infer_state, self._name_context, self.tree_name)
for result_context in inferred:
if self._apply_decorators:
@@ -141,10 +141,10 @@ class ClassMixin(object):
from jedi.inference.context import TreeInstance
if arguments is None:
arguments = ValuesArguments([])
return ContextSet([TreeInstance(self.evaluator, self.parent_context, self, arguments)])
return ContextSet([TreeInstance(self.infer_state, self.parent_context, self, arguments)])
def py__class__(self):
return compiled.builtin_from_name(self.evaluator, u'type')
return compiled.builtin_from_name(self.infer_state, u'type')
@property
def name(self):
@@ -159,7 +159,7 @@ class ClassMixin(object):
return list(context_.get_param_names())[1:]
return []
@evaluator_method_generator_cache()
@infer_state_method_generator_cache()
def py__mro__(self):
mro = [self]
yield self
@@ -208,13 +208,13 @@ class ClassMixin(object):
yield filter
else:
yield ClassFilter(
self.evaluator, self, node_context=cls,
self.infer_state, self, node_context=cls,
origin_scope=origin_scope,
is_instance=is_instance
)
if not is_instance:
from jedi.inference.compiled import builtin_from_name
type_ = builtin_from_name(self.evaluator, u'type')
type_ = builtin_from_name(self.infer_state, u'type')
assert isinstance(type_, ClassContext)
if type_ != self:
for instance in type_.py__call__():
@@ -230,7 +230,7 @@ class ClassMixin(object):
def get_global_filter(self, until_position=None, origin_scope=None):
return ParserTreeFilter(
self.evaluator,
self.infer_state,
context=self,
until_position=until_position,
origin_scope=origin_scope
@@ -240,7 +240,7 @@ class ClassMixin(object):
class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase)):
api_type = u'class'
@evaluator_method_cache()
@infer_state_method_cache()
def list_type_vars(self):
found = []
arglist = self.tree_node.get_super_arglist()
@@ -262,10 +262,10 @@ class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBa
arglist = self.tree_node.get_super_arglist()
if arglist:
from jedi.inference import arguments
return arguments.TreeArguments(self.evaluator, self.parent_context, arglist)
return arguments.TreeArguments(self.infer_state, self.parent_context, arglist)
return None
@evaluator_method_cache(default=())
@infer_state_method_cache(default=())
def py__bases__(self):
args = self._get_bases_arguments()
if args is not None:
@@ -274,10 +274,10 @@ class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBa
return lst
if self.py__name__() == 'object' \
and self.parent_context == self.evaluator.builtins_module:
and self.parent_context == self.infer_state.builtins_module:
return []
return [LazyKnownContexts(
self.evaluator.builtins_module.py__getattribute__('object')
self.infer_state.builtins_module.py__getattribute__('object')
)]
def py__getitem__(self, index_context_set, contextualized_node):
@@ -321,7 +321,7 @@ class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBa
debug.dbg('Unprocessed metaclass %s', metaclass)
return []
@evaluator_method_cache(default=NO_CONTEXTS)
@infer_state_method_cache(default=NO_CONTEXTS)
def get_metaclasses(self):
args = self._get_bases_arguments()
if args is not None:

View File

@@ -2,7 +2,7 @@ import re
import os
from jedi import debug
from jedi.inference.cache import evaluator_method_cache
from jedi.inference.cache import infer_state_method_cache
from jedi.inference.names import ContextNameMixin, AbstractNameDefinition
from jedi.inference.filters import GlobalNameFilter, ParserTreeFilter, DictFilter, MergedFilter
from jedi.inference import compiled
@@ -27,13 +27,13 @@ class _ModuleAttributeName(AbstractNameDefinition):
def infer(self):
if self._string_value is not None:
s = self._string_value
if self.parent_context.evaluator.environment.version_info.major == 2 \
if self.parent_context.infer_state.environment.version_info.major == 2 \
and not isinstance(s, bytes):
s = s.encode('utf-8')
return ContextSet([
create_simple_object(self.parent_context.evaluator, s)
create_simple_object(self.parent_context.infer_state, s)
])
return compiled.get_string_context_set(self.parent_context.evaluator)
return compiled.get_string_context_set(self.parent_context.infer_state)
class ModuleName(ContextNameMixin, AbstractNameDefinition):
@@ -48,9 +48,9 @@ class ModuleName(ContextNameMixin, AbstractNameDefinition):
return self._name
def iter_module_names(evaluator, paths):
def iter_module_names(infer_state, paths):
# Python modules/packages
for n in evaluator.compiled_subprocess.list_module_names(paths):
for n in infer_state.compiled_subprocess.list_module_names(paths):
yield n
for path in paths:
@@ -75,7 +75,7 @@ def iter_module_names(evaluator, paths):
class SubModuleDictMixin(object):
@evaluator_method_cache()
@infer_state_method_cache()
def sub_modules_dict(self):
"""
Lists modules in the directory of this module (if this module is a
@@ -87,7 +87,7 @@ class SubModuleDictMixin(object):
except AttributeError:
pass
else:
mods = iter_module_names(self.evaluator, method())
mods = iter_module_names(self.infer_state, method())
for name in mods:
# It's obviously a relative import to the current module.
names[name] = SubModuleName(self, name)
@@ -101,7 +101,7 @@ class ModuleMixin(SubModuleDictMixin):
def get_filters(self, search_global=False, until_position=None, origin_scope=None):
yield MergedFilter(
ParserTreeFilter(
self.evaluator,
self.infer_state,
context=self,
until_position=until_position,
origin_scope=origin_scope
@@ -114,7 +114,7 @@ class ModuleMixin(SubModuleDictMixin):
yield star_filter
def py__class__(self):
c, = contexts_from_qualified_names(self.evaluator, u'types', u'ModuleType')
c, = contexts_from_qualified_names(self.infer_state, u'types', u'ModuleType')
return c
def is_module(self):
@@ -124,7 +124,7 @@ class ModuleMixin(SubModuleDictMixin):
return False
@property
@evaluator_method_cache()
@infer_state_method_cache()
def name(self):
return ModuleName(self, self._string_name)
@@ -141,7 +141,7 @@ class ModuleMixin(SubModuleDictMixin):
# Remove PEP 3149 names
return re.sub(r'\.[a-z]+-\d{2}[mud]{0,3}$', '', r.group(1))
@evaluator_method_cache()
@infer_state_method_cache()
def _module_attributes_dict(self):
names = ['__package__', '__doc__', '__name__']
# All the additional module attributes are strings.
@@ -157,8 +157,8 @@ class ModuleMixin(SubModuleDictMixin):
# I'm not sure if the star import cache is really that effective anymore
# with all the other really fast import caches. Recheck. Also we would need
# to push the star imports into Evaluator.module_cache, if we reenable this.
@evaluator_method_cache([])
# to push the star imports into InferState.module_cache, if we reenable this.
@infer_state_method_cache([])
def star_imports(self):
from jedi.inference.imports import Importer
@@ -166,7 +166,7 @@ class ModuleMixin(SubModuleDictMixin):
for i in self.tree_node.iter_imports():
if i.is_star_import():
new = Importer(
self.evaluator,
self.infer_state,
import_path=i.get_paths()[-1],
module_context=self,
level=i.level
@@ -191,9 +191,9 @@ class ModuleContext(ModuleMixin, TreeContext):
api_type = u'module'
parent_context = None
def __init__(self, evaluator, module_node, file_io, string_names, code_lines, is_package=False):
def __init__(self, infer_state, module_node, file_io, string_names, code_lines, is_package=False):
super(ModuleContext, self).__init__(
evaluator,
infer_state,
parent_context=None,
tree_node=module_node
)
@@ -243,7 +243,7 @@ class ModuleContext(ModuleMixin, TreeContext):
# It is a namespace, now try to find the rest of the
# modules on sys_path or whatever the search_path is.
paths = set()
for s in self.evaluator.get_sys_path():
for s in self.infer_state.get_sys_path():
other = os.path.join(s, self.name.string_name)
if os.path.isdir(other):
paths.add(other)

View File

@@ -1,4 +1,4 @@
from jedi.inference.cache import evaluator_method_cache
from jedi.inference.cache import infer_state_method_cache
from jedi.inference.filters import DictFilter
from jedi.inference.names import ContextNameMixin, AbstractNameDefinition
from jedi.inference.base_context import Context
@@ -25,9 +25,9 @@ class ImplicitNamespaceContext(Context, SubModuleDictMixin):
api_type = u'module'
parent_context = None
def __init__(self, evaluator, fullname, paths):
super(ImplicitNamespaceContext, self).__init__(evaluator, parent_context=None)
self.evaluator = evaluator
def __init__(self, infer_state, fullname, paths):
super(ImplicitNamespaceContext, self).__init__(infer_state, parent_context=None)
self.infer_state = infer_state
self._fullname = fullname
self._paths = paths
@@ -35,7 +35,7 @@ class ImplicitNamespaceContext(Context, SubModuleDictMixin):
yield DictFilter(self.sub_modules_dict())
@property
@evaluator_method_cache()
@infer_state_method_cache()
def name(self):
string_name = self.py__package__()[-1]
return ImplicitNSName(self, string_name)