mirror of
https://github.com/davidhalter/jedi.git
synced 2026-01-30 20:45:22 +08:00
Publicize the _evaluator in contexts.
This commit is contained in:
@@ -24,7 +24,7 @@ def memoize_default(default=NO_DEFAULT, evaluator_is_first_arg=False, second_arg
|
||||
elif second_arg_is_evaluator: # needed for meta classes
|
||||
cache = args[0].memoize_cache
|
||||
else:
|
||||
cache = obj._evaluator.memoize_cache
|
||||
cache = obj.evaluator.memoize_cache
|
||||
|
||||
try:
|
||||
memo = cache[function]
|
||||
|
||||
@@ -44,9 +44,8 @@ class CompiledObject(Context):
|
||||
used_names = {} # To be consistent with modules.
|
||||
|
||||
def __init__(self, evaluator, obj, parent_context=None):
|
||||
self._evaluator = evaluator
|
||||
super(CompiledObject, self).__init__(evaluator, parent_context)
|
||||
self.obj = obj
|
||||
self.parent_context = parent_context
|
||||
|
||||
def get_root_node(self):
|
||||
# To make things a bit easier with filters we add this method here.
|
||||
@@ -56,21 +55,21 @@ class CompiledObject(Context):
|
||||
def py__call__(self, params):
|
||||
if inspect.isclass(self.obj):
|
||||
from jedi.evaluate.instance import CompiledInstance
|
||||
return set([CompiledInstance(self._evaluator, self.parent_context, self, params)])
|
||||
return set([CompiledInstance(self.evaluator, self.parent_context, self, params)])
|
||||
else:
|
||||
return set(self._execute_function(params))
|
||||
|
||||
@CheckAttribute
|
||||
def py__class__(self):
|
||||
return create(self._evaluator, self.obj.__class__)
|
||||
return create(self.evaluator, self.obj.__class__)
|
||||
|
||||
@CheckAttribute
|
||||
def py__mro__(self):
|
||||
return tuple(create(self._evaluator, cls) for cls in self.obj.__mro__)
|
||||
return tuple(create(self.evaluator, cls) for cls in self.obj.__mro__)
|
||||
|
||||
@CheckAttribute
|
||||
def py__bases__(self):
|
||||
return tuple(create(self._evaluator, cls) for cls in self.obj.__bases__)
|
||||
return tuple(create(self.evaluator, cls) for cls in self.obj.__bases__)
|
||||
|
||||
def py__bool__(self):
|
||||
return bool(self.obj)
|
||||
@@ -172,7 +171,7 @@ class CompiledObject(Context):
|
||||
search_global shouldn't change the fact that there's one dict, this way
|
||||
there's only one `object`.
|
||||
"""
|
||||
return [LazyNamesDict(self._evaluator, self, is_instance)]
|
||||
return [LazyNamesDict(self.evaluator, self, is_instance)]
|
||||
|
||||
@memoize_method
|
||||
def _ensure_one_filter(self, is_instance):
|
||||
@@ -180,11 +179,11 @@ class CompiledObject(Context):
|
||||
search_global shouldn't change the fact that there's one dict, this way
|
||||
there's only one `object`.
|
||||
"""
|
||||
return CompiledObjectFilter(self._evaluator, self, is_instance)
|
||||
return CompiledObjectFilter(self.evaluator, self, is_instance)
|
||||
|
||||
def get_subscope_by_name(self, name):
|
||||
if name in dir(self.obj):
|
||||
return CompiledName(self._evaluator, self, name).parent
|
||||
return CompiledName(self.evaluator, self, name).parent
|
||||
else:
|
||||
raise KeyError("CompiledObject doesn't have an attribute '%s'." % name)
|
||||
|
||||
@@ -194,7 +193,7 @@ class CompiledObject(Context):
|
||||
# Get rid of side effects, we won't call custom `__getitem__`s.
|
||||
return set()
|
||||
|
||||
return set([create(self._evaluator, self.obj[index])])
|
||||
return set([create(self.evaluator, self.obj[index])])
|
||||
|
||||
@CheckAttribute
|
||||
def py__iter__(self):
|
||||
@@ -203,7 +202,7 @@ class CompiledObject(Context):
|
||||
return
|
||||
|
||||
for part in self.obj:
|
||||
yield set([create(self._evaluator, part)])
|
||||
yield set([create(self.evaluator, part)])
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -227,8 +226,8 @@ class CompiledObject(Context):
|
||||
# We want to evaluate everything except None.
|
||||
# TODO do we?
|
||||
continue
|
||||
bltn_obj = create(self._evaluator, bltn_obj)
|
||||
for result in self._evaluator.execute(bltn_obj, params):
|
||||
bltn_obj = create(self.evaluator, bltn_obj)
|
||||
for result in self.evaluator.execute(bltn_obj, params):
|
||||
yield result
|
||||
|
||||
@property
|
||||
@@ -244,7 +243,7 @@ class CompiledObject(Context):
|
||||
for name in dir(self.obj):
|
||||
try:
|
||||
faked_subscopes.append(
|
||||
fake.get_faked(self._evaluator, module, self.obj, parent=self, name=name)
|
||||
fake.get_faked(self.evaluator, module, self.obj, parent=self, name=name)
|
||||
)
|
||||
except fake.FakeDoesNotExist:
|
||||
pass
|
||||
|
||||
@@ -3,7 +3,7 @@ class Context(object):
|
||||
type = None # TODO remove
|
||||
|
||||
def __init__(self, evaluator, parent_context=None):
|
||||
self._evaluator = evaluator
|
||||
self.evaluator = evaluator
|
||||
self.parent_context = parent_context
|
||||
|
||||
def get_parent_flow_context(self):
|
||||
@@ -17,7 +17,7 @@ class Context(object):
|
||||
context = context.parent_context
|
||||
|
||||
def execute(self, arguments=None):
|
||||
return self._evaluator.execute(self, arguments)
|
||||
return self.evaluator.execute(self, arguments)
|
||||
|
||||
def execute_evaluated(self, *value_list):
|
||||
"""
|
||||
@@ -27,7 +27,7 @@ class Context(object):
|
||||
return self.execute(ValuesArguments(value_list))
|
||||
|
||||
def eval_node(self, node):
|
||||
return self._evaluator.eval_element(self, node)
|
||||
return self.evaluator.eval_element(self, node)
|
||||
|
||||
|
||||
class TreeContext(Context):
|
||||
|
||||
@@ -59,7 +59,7 @@ class TreeNameDefinition(ContextName):
|
||||
def infer(self):
|
||||
# Refactor this, should probably be here.
|
||||
from jedi.evaluate.finder import _name_to_types
|
||||
return _name_to_types(self.parent_context._evaluator, self.parent_context, self.name, None)
|
||||
return _name_to_types(self.parent_context.evaluator, self.parent_context, self.name, None)
|
||||
|
||||
|
||||
class ParamName(ContextName):
|
||||
@@ -124,7 +124,6 @@ class ParserTreeFilter(AbstractUsedNamesFilter):
|
||||
def __init__(self, evaluator, context, parser_scope, until_position=None, origin_scope=None):
|
||||
super(ParserTreeFilter, self).__init__(context, parser_scope, origin_scope)
|
||||
self._until_position = until_position
|
||||
self._evaluator = evaluator
|
||||
|
||||
def _filter(self, names):
|
||||
names = super(ParserTreeFilter, self)._filter(names)
|
||||
|
||||
@@ -70,14 +70,14 @@ class AbstractInstanceContext(Context):
|
||||
def execute_function_slot(self, name, *args):
|
||||
raise NotImplementedError
|
||||
method = self.get_subscope_by_name(name)
|
||||
return self._evaluator.execute_evaluated(method, *args)
|
||||
return self.evaluator.execute_evaluated(method, *args)
|
||||
|
||||
def get_descriptor_returns(self, obj):
|
||||
""" Throws a KeyError if there's no method. """
|
||||
raise NotImplementedError
|
||||
# Arguments in __get__ descriptors are obj, class.
|
||||
# `method` is the new parent of the array, don't know if that's good.
|
||||
none_obj = compiled.create(self._evaluator, None)
|
||||
none_obj = compiled.create(self.evaluator, None)
|
||||
args = [obj, obj.base] if isinstance(obj, Instance) else [none_obj, obj]
|
||||
try:
|
||||
return self.execute_subscope_by_name('__get__', *args)
|
||||
@@ -89,15 +89,15 @@ class AbstractInstanceContext(Context):
|
||||
if include_self_names:
|
||||
for cls in self._class_context.py__mro__():
|
||||
if isinstance(cls, compiled.CompiledObject):
|
||||
yield SelfNameFilter(self._evaluator, self, cls, origin_scope)
|
||||
yield SelfNameFilter(self.evaluator, self, cls, origin_scope)
|
||||
else:
|
||||
yield SelfNameFilter(self._evaluator, self, cls.classdef, origin_scope)
|
||||
yield SelfNameFilter(self.evaluator, self, cls.classdef, origin_scope)
|
||||
|
||||
for cls in self._class_context.py__mro__():
|
||||
if isinstance(cls, compiled.CompiledObject):
|
||||
yield CompiledInstanceClassFilter(self._evaluator, self, cls)
|
||||
yield CompiledInstanceClassFilter(self.evaluator, self, cls)
|
||||
else:
|
||||
yield InstanceClassFilter(self._evaluator, self, cls.classdef, origin_scope)
|
||||
yield InstanceClassFilter(self.evaluator, self, cls.classdef, origin_scope)
|
||||
|
||||
def py__getitem__(self, index):
|
||||
try:
|
||||
@@ -106,7 +106,7 @@ class AbstractInstanceContext(Context):
|
||||
debug.warning('No __getitem__, cannot access the array.')
|
||||
return set()
|
||||
else:
|
||||
index_obj = compiled.create(self._evaluator, index)
|
||||
index_obj = compiled.create(self.evaluator, index)
|
||||
return unite(name.execute_evaluated(index_obj) for name in names)
|
||||
|
||||
def py__iter__(self):
|
||||
@@ -116,7 +116,7 @@ class AbstractInstanceContext(Context):
|
||||
debug.warning('No __iter__ on %s.' % self)
|
||||
return
|
||||
else:
|
||||
iters = self._evaluator.execute(method)
|
||||
iters = self.evaluator.execute(method)
|
||||
for generator in iters:
|
||||
if isinstance(generator, Instance):
|
||||
# `__next__` logic.
|
||||
|
||||
@@ -121,18 +121,18 @@ class GeneratorMixin(object):
|
||||
|
||||
@memoize_default()
|
||||
def names_dicts(self, search_global=False): # is always False
|
||||
gen_obj = compiled.get_special_object(self._evaluator, 'GENERATOR_OBJECT')
|
||||
gen_obj = compiled.get_special_object(self.evaluator, 'GENERATOR_OBJECT')
|
||||
yield self._get_names_dict(gen_obj.names_dict)
|
||||
|
||||
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
||||
gen_obj = compiled.get_special_object(self._evaluator, 'GENERATOR_OBJECT')
|
||||
gen_obj = compiled.get_special_object(self.evaluator, 'GENERATOR_OBJECT')
|
||||
yield DictFilter(self._get_names_dict(gen_obj.names_dict))
|
||||
|
||||
def py__bool__(self):
|
||||
return True
|
||||
|
||||
def py__class__(self):
|
||||
gen_obj = compiled.get_special_object(self._evaluator, 'GENERATOR_OBJECT')
|
||||
gen_obj = compiled.get_special_object(self.evaluator, 'GENERATOR_OBJECT')
|
||||
return gen_obj.py__class__()
|
||||
|
||||
|
||||
@@ -166,7 +166,7 @@ class Comprehension(IterableWrapper):
|
||||
return cls(evaluator, atom)
|
||||
|
||||
def __init__(self, evaluator, atom):
|
||||
self._evaluator = evaluator
|
||||
self.evaluator = evaluator
|
||||
self._atom = atom
|
||||
|
||||
def _get_comprehension(self):
|
||||
@@ -197,7 +197,7 @@ class Comprehension(IterableWrapper):
|
||||
return helpers.deep_ast_copy(node, parent=last_comp)
|
||||
|
||||
def _nested(self, comp_fors):
|
||||
evaluator = self._evaluator
|
||||
evaluator = self.evaluator
|
||||
comp_for = comp_fors[0]
|
||||
input_node = comp_for.children[3]
|
||||
input_types = evaluator.eval_element(input_node)
|
||||
@@ -239,15 +239,15 @@ class ArrayMixin(object):
|
||||
@memoize_default()
|
||||
def names_dicts(self, search_global=False): # Always False.
|
||||
# `array.type` is a string with the type, e.g. 'list'.
|
||||
scope = compiled.builtin_from_name(self._evaluator, self.type)
|
||||
scope = compiled.builtin_from_name(self.evaluator, self.type)
|
||||
# builtins only have one class -> [0]
|
||||
scopes = self._evaluator.execute_evaluated(scope, self)
|
||||
scopes = self.evaluator.execute_evaluated(scope, self)
|
||||
names_dicts = list(scopes)[0].names_dicts(search_global)
|
||||
yield self._get_names_dict(names_dicts[1])
|
||||
|
||||
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
||||
# `array.type` is a string with the type, e.g. 'list'.
|
||||
compiled_obj = compiled.builtin_from_name(self._evaluator, self.array_type)
|
||||
compiled_obj = compiled.builtin_from_name(self.evaluator, self.array_type)
|
||||
for typ in compiled_obj.execute_evaluated(self):
|
||||
for filter in typ.get_filters():
|
||||
yield filter
|
||||
@@ -258,26 +258,26 @@ class ArrayMixin(object):
|
||||
return None # We don't know the length, because of appends.
|
||||
|
||||
def py__class__(self):
|
||||
return compiled.builtin_from_name(self._evaluator, self.type)
|
||||
return compiled.builtin_from_name(self.evaluator, self.type)
|
||||
|
||||
@safe_property
|
||||
def parent(self):
|
||||
return self._evaluator.BUILTINS
|
||||
return self.evaluator.BUILTINS
|
||||
|
||||
def dict_values(self):
|
||||
return unite(self._evaluator.eval_element(v) for k, v in self._items())
|
||||
return unite(self.evaluator.eval_element(v) for k, v in self._items())
|
||||
|
||||
@register_builtin_method('values', type='dict')
|
||||
def _imitate_values(self):
|
||||
items = self.dict_values()
|
||||
return create_evaluated_sequence_set(self._evaluator, items, sequence_type='list')
|
||||
return create_evaluated_sequence_set(self.evaluator, items, sequence_type='list')
|
||||
|
||||
@register_builtin_method('items', type='dict')
|
||||
def _imitate_items(self):
|
||||
items = [set([FakeSequence(self._evaluator, (k, v), 'tuple')])
|
||||
items = [set([FakeSequence(self.evaluator, (k, v), 'tuple')])
|
||||
for k, v in self._items()]
|
||||
|
||||
return create_evaluated_sequence_set(self._evaluator, *items, sequence_type='list')
|
||||
return create_evaluated_sequence_set(self.evaluator, *items, sequence_type='list')
|
||||
|
||||
|
||||
class ListComprehension(Comprehension, ArrayMixin):
|
||||
@@ -288,7 +288,7 @@ class ListComprehension(Comprehension, ArrayMixin):
|
||||
result = all_types[index]
|
||||
if isinstance(index, slice):
|
||||
return create_evaluated_sequence_set(
|
||||
self._evaluator,
|
||||
self.evaluator,
|
||||
unite(result),
|
||||
sequence_type='list'
|
||||
)
|
||||
@@ -324,11 +324,11 @@ class DictComprehension(Comprehension, ArrayMixin):
|
||||
|
||||
@register_builtin_method('items', type='dict')
|
||||
def _imitate_items(self):
|
||||
items = set(FakeSequence(self._evaluator,
|
||||
items = set(FakeSequence(self.evaluator,
|
||||
(AlreadyEvaluated(keys), AlreadyEvaluated(values)), 'tuple')
|
||||
for keys, values in self._iterate())
|
||||
|
||||
return create_evaluated_sequence_set(self._evaluator, items, sequence_type='list')
|
||||
return create_evaluated_sequence_set(self.evaluator, items, sequence_type='list')
|
||||
|
||||
|
||||
class GeneratorComprehension(Comprehension, GeneratorMixin):
|
||||
@@ -391,7 +391,7 @@ class ArrayLiteralContext(ArrayMixin, AbstractSequence):
|
||||
for node in self._items():
|
||||
yield context.LazyTreeContext(self._defining_context, node)
|
||||
|
||||
additions = check_array_additions(self._evaluator, self)
|
||||
additions = check_array_additions(self.evaluator, self)
|
||||
if additions:
|
||||
yield additions
|
||||
|
||||
@@ -447,7 +447,7 @@ class _FakeArray(ArrayLiteralContext):
|
||||
def __init__(self, evaluator, container, type):
|
||||
# TODO is this class really needed?
|
||||
self.array_type = type
|
||||
self._evaluator = evaluator
|
||||
self.evaluator = evaluator
|
||||
self.atom = container
|
||||
self.parent_context = evaluator.BUILTINS
|
||||
|
||||
@@ -519,7 +519,7 @@ class FakeDict(_FakeArray):
|
||||
|
||||
def py__iter__(self):
|
||||
for key in self._dct:
|
||||
yield context.LazyKnownContext(compiled.create(self._evaluator, key))
|
||||
yield context.LazyKnownContext(compiled.create(self.evaluator, key))
|
||||
|
||||
def py__getitem__(self, index):
|
||||
return self._dct[index].infer()
|
||||
@@ -820,7 +820,7 @@ class _ArrayInstance(IterableWrapper):
|
||||
we don't use these operations in `builtins.py`.
|
||||
"""
|
||||
def __init__(self, evaluator, instance):
|
||||
self._evaluator = evaluator
|
||||
self.evaluator = evaluator
|
||||
self.instance = instance
|
||||
self.var_args = instance.var_args
|
||||
|
||||
@@ -831,15 +831,15 @@ class _ArrayInstance(IterableWrapper):
|
||||
except StopIteration:
|
||||
types = set()
|
||||
else:
|
||||
types = unite(self._evaluator.eval_element(node) for node in first_nodes)
|
||||
for types in py__iter__(self._evaluator, types, first_nodes[0]):
|
||||
types = unite(self.evaluator.eval_element(node) for node in first_nodes)
|
||||
for types in py__iter__(self.evaluator, types, first_nodes[0]):
|
||||
yield types
|
||||
|
||||
module = self.var_args.get_parent_until()
|
||||
if module is None:
|
||||
return
|
||||
is_list = str(self.instance.name) == 'list'
|
||||
additions = _check_array_additions(self._evaluator, self.instance, module, is_list)
|
||||
additions = _check_array_additions(self.evaluator, self.instance, module, is_list)
|
||||
if additions:
|
||||
yield additions
|
||||
|
||||
|
||||
@@ -416,11 +416,11 @@ def _error_argument_count(func, actual_count):
|
||||
def create_default_param(parent_context, param):
|
||||
if param.stars == 1:
|
||||
result_arg = context.LazyKnownContext(
|
||||
iterable.FakeSequence(parent_context._evaluator, 'tuple', [])
|
||||
iterable.FakeSequence(parent_context.evaluator, 'tuple', [])
|
||||
)
|
||||
elif param.stars == 2:
|
||||
result_arg = context.LazyKnownContext(
|
||||
iterable.FakeDict(parent_context._evaluator, {})
|
||||
iterable.FakeDict(parent_context.evaluator, {})
|
||||
)
|
||||
elif param.default is None:
|
||||
result_arg = context.LazyUnknownContext()
|
||||
|
||||
@@ -92,7 +92,7 @@ class _RecursionNode(object):
|
||||
|
||||
def execution_recursion_decorator(func):
|
||||
def run(execution, **kwargs):
|
||||
detector = execution._evaluator.execution_recursion_detector
|
||||
detector = execution.evaluator.execution_recursion_detector
|
||||
if detector.push_execution(execution):
|
||||
result = set()
|
||||
else:
|
||||
|
||||
@@ -107,7 +107,7 @@ class Instance(use_metaclass(CachedMetaClass, Executed)):
|
||||
@property
|
||||
def py__call__(self):
|
||||
def actual(params):
|
||||
return self._evaluator.execute(method, params)
|
||||
return self.evaluator.execute(method, params)
|
||||
|
||||
try:
|
||||
method = self.get_subscope_by_name('__call__')
|
||||
@@ -130,7 +130,7 @@ class Instance(use_metaclass(CachedMetaClass, Executed)):
|
||||
func = self.get_subscope_by_name('__init__')
|
||||
except KeyError:
|
||||
return None
|
||||
return FunctionExecutionContext(self._evaluator, self, func, self.var_args)
|
||||
return FunctionExecutionContext(self.evaluator, self, func, self.var_args)
|
||||
|
||||
def _get_func_self_name(self, func):
|
||||
"""
|
||||
@@ -172,22 +172,22 @@ class Instance(use_metaclass(CachedMetaClass, Executed)):
|
||||
name = trailer.children[1] # After dot.
|
||||
if name.is_definition():
|
||||
arr = names.setdefault(name.value, [])
|
||||
arr.append(get_instance_el(self._evaluator, self, name))
|
||||
arr.append(get_instance_el(self.evaluator, self, name))
|
||||
return names
|
||||
|
||||
def get_subscope_by_name(self, name):
|
||||
sub = self._class_context.get_subscope_by_name(name)
|
||||
return get_instance_el(self._evaluator, self, sub, True)
|
||||
return get_instance_el(self.evaluator, self, sub, True)
|
||||
|
||||
def execute_subscope_by_name(self, name, *args):
|
||||
method = self.get_subscope_by_name(name)
|
||||
return self._evaluator.execute_evaluated(method, *args)
|
||||
return self.evaluator.execute_evaluated(method, *args)
|
||||
|
||||
def get_descriptor_returns(self, obj):
|
||||
""" Throws a KeyError if there's no method. """
|
||||
# Arguments in __get__ descriptors are obj, class.
|
||||
# `method` is the new parent of the array, don't know if that's good.
|
||||
none_obj = compiled.create(self._evaluator, None)
|
||||
none_obj = compiled.create(self.evaluator, None)
|
||||
args = [obj, obj.base] if isinstance(obj, Instance) else [none_obj, obj]
|
||||
try:
|
||||
return self.execute_subscope_by_name('__get__', *args)
|
||||
@@ -201,24 +201,24 @@ class Instance(use_metaclass(CachedMetaClass, Executed)):
|
||||
for s in self._class_context.py__mro__()[1:]:
|
||||
if not isinstance(s, compiled.CompiledObject):
|
||||
# Compiled objects don't have `self.` names.
|
||||
for inst in self._evaluator.execute(s):
|
||||
for inst in self.evaluator.execute(s):
|
||||
yield inst._self_names_dict(add_mro=False)
|
||||
|
||||
for names_dict in self.base.names_dicts(search_global=False, is_instance=True):
|
||||
yield LazyInstanceDict(self._evaluator, self, names_dict)
|
||||
yield LazyInstanceDict(self.evaluator, self, names_dict)
|
||||
|
||||
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
||||
for cls in self._class_context.py__mro__():
|
||||
if isinstance(cls, compiled.CompiledObject):
|
||||
yield SelfNameFilter(self._evaluator, self, cls, origin_scope)
|
||||
yield SelfNameFilter(self.evaluator, self, cls, origin_scope)
|
||||
else:
|
||||
yield SelfNameFilter(self._evaluator, self, cls.base, origin_scope)
|
||||
yield SelfNameFilter(self.evaluator, self, cls.base, origin_scope)
|
||||
|
||||
for cls in self._class_context.py__mro__():
|
||||
if isinstance(cls, compiled.CompiledObject):
|
||||
yield CompiledInstanceClassFilter(self._evaluator, self, cls)
|
||||
yield CompiledInstanceClassFilter(self.evaluator, self, cls)
|
||||
else:
|
||||
yield InstanceClassFilter(self._evaluator, self, cls.base, origin_scope)
|
||||
yield InstanceClassFilter(self.evaluator, self, cls.base, origin_scope)
|
||||
|
||||
def py__getitem__(self, index):
|
||||
try:
|
||||
@@ -227,8 +227,8 @@ class Instance(use_metaclass(CachedMetaClass, Executed)):
|
||||
debug.warning('No __getitem__, cannot access the array.')
|
||||
return set()
|
||||
else:
|
||||
index_obj = compiled.create(self._evaluator, index)
|
||||
return self._evaluator.execute_evaluated(method, index_obj)
|
||||
index_obj = compiled.create(self.evaluator, index)
|
||||
return self.evaluator.execute_evaluated(method, index_obj)
|
||||
|
||||
def py__iter__(self):
|
||||
try:
|
||||
@@ -237,7 +237,7 @@ class Instance(use_metaclass(CachedMetaClass, Executed)):
|
||||
debug.warning('No __iter__ on %s.' % self)
|
||||
return
|
||||
else:
|
||||
iters = self._evaluator.execute(method)
|
||||
iters = self.evaluator.execute(method)
|
||||
for generator in iters:
|
||||
if isinstance(generator, Instance):
|
||||
# `__next__` logic.
|
||||
@@ -313,7 +313,7 @@ class InstanceElement(use_metaclass(CachedMetaClass, tree.Base)):
|
||||
variable (e.g. self.variable or class methods).
|
||||
"""
|
||||
def __init__(self, evaluator, instance, var, is_class_var):
|
||||
self._evaluator = evaluator
|
||||
self.evaluator = evaluator
|
||||
self.instance = instance
|
||||
self.var = var
|
||||
self.is_class_var = is_class_var
|
||||
@@ -328,7 +328,7 @@ class InstanceElement(use_metaclass(CachedMetaClass, tree.Base)):
|
||||
and par == self.instance.base.base:
|
||||
par = self.instance
|
||||
else:
|
||||
par = get_instance_el(self._evaluator, self.instance, par,
|
||||
par = get_instance_el(self.evaluator, self.instance, par,
|
||||
self.is_class_var)
|
||||
return par
|
||||
|
||||
@@ -341,11 +341,11 @@ class InstanceElement(use_metaclass(CachedMetaClass, tree.Base)):
|
||||
def get_decorated_func(self):
|
||||
""" Needed because the InstanceElement should not be stripped """
|
||||
func = self.var.get_decorated_func()
|
||||
func = get_instance_el(self._evaluator, self.instance, func)
|
||||
func = get_instance_el(self.evaluator, self.instance, func)
|
||||
return func
|
||||
|
||||
def get_rhs(self):
|
||||
return get_instance_el(self._evaluator, self.instance,
|
||||
return get_instance_el(self.evaluator, self.instance,
|
||||
self.var.get_rhs(), self.is_class_var)
|
||||
|
||||
def is_definition(self):
|
||||
@@ -354,7 +354,7 @@ class InstanceElement(use_metaclass(CachedMetaClass, tree.Base)):
|
||||
@property
|
||||
def children(self):
|
||||
# Copy and modify the array.
|
||||
return [get_instance_el(self._evaluator, self.instance, command, self.is_class_var)
|
||||
return [get_instance_el(self.evaluator, self.instance, command, self.is_class_var)
|
||||
for command in self.var.children]
|
||||
|
||||
@property
|
||||
@@ -364,11 +364,11 @@ class InstanceElement(use_metaclass(CachedMetaClass, tree.Base)):
|
||||
|
||||
def __iter__(self):
|
||||
for el in self.var.__iter__():
|
||||
yield get_instance_el(self._evaluator, self.instance, el,
|
||||
yield get_instance_el(self.evaluator, self.instance, el,
|
||||
self.is_class_var)
|
||||
|
||||
def __getitem__(self, index):
|
||||
return get_instance_el(self._evaluator, self.instance, self.var[index],
|
||||
return get_instance_el(self.evaluator, self.instance, self.var[index],
|
||||
self.is_class_var)
|
||||
|
||||
def isinstance(self, *cls):
|
||||
@@ -462,16 +462,16 @@ class ClassContext(use_metaclass(CachedMetaClass, context.TreeContext, Wrapper))
|
||||
def py__bases__(self):
|
||||
arglist = self.classdef.get_super_arglist()
|
||||
if arglist:
|
||||
args = param.TreeArguments(self._evaluator, self, arglist)
|
||||
args = param.TreeArguments(self.evaluator, self, arglist)
|
||||
return [value for key, value in args.unpack() if key is None]
|
||||
else:
|
||||
return [context.LazyKnownContext(compiled.create(self._evaluator, object))]
|
||||
return [context.LazyKnownContext(compiled.create(self.evaluator, object))]
|
||||
|
||||
def py__call__(self, params):
|
||||
return set([TreeInstance(self._evaluator, self.parent_context, self, params)])
|
||||
return set([TreeInstance(self.evaluator, self.parent_context, self, params)])
|
||||
|
||||
def py__class__(self):
|
||||
return compiled.create(self._evaluator, type)
|
||||
return compiled.create(self.evaluator, type)
|
||||
|
||||
@property
|
||||
def params(self):
|
||||
@@ -492,14 +492,14 @@ class ClassContext(use_metaclass(CachedMetaClass, context.TreeContext, Wrapper))
|
||||
|
||||
def get_filters(self, search_global, until_position=None, origin_scope=None, is_instance=False):
|
||||
if search_global:
|
||||
yield ParserTreeFilter(self._evaluator, self, self.classdef, until_position, origin_scope=origin_scope)
|
||||
yield ParserTreeFilter(self.evaluator, self, self.classdef, until_position, origin_scope=origin_scope)
|
||||
else:
|
||||
for scope in self.py__mro__():
|
||||
if isinstance(scope, compiled.CompiledObject):
|
||||
for filter in scope.get_filters(is_instance=is_instance):
|
||||
yield filter
|
||||
else:
|
||||
yield ParserTreeFilter(self._evaluator, self, scope.classdef, origin_scope=origin_scope)
|
||||
yield ParserTreeFilter(self.evaluator, self, scope.classdef, origin_scope=origin_scope)
|
||||
|
||||
def is_class(self):
|
||||
return True
|
||||
@@ -535,7 +535,7 @@ class FunctionContext(use_metaclass(CachedMetaClass, context.TreeContext, Wrappe
|
||||
|
||||
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
||||
if search_global:
|
||||
yield ParserTreeFilter(self._evaluator, self, self.base, until_position, origin_scope=origin_scope)
|
||||
yield ParserTreeFilter(self.evaluator, self, self.base, until_position, origin_scope=origin_scope)
|
||||
else:
|
||||
scope = self.py__class__()
|
||||
for filter in scope.get_filters(search_global=False, origin_scope=origin_scope):
|
||||
@@ -544,13 +544,13 @@ class FunctionContext(use_metaclass(CachedMetaClass, context.TreeContext, Wrappe
|
||||
@Python3Method
|
||||
def py__call__(self, params):
|
||||
function_execution = FunctionExecutionContext(
|
||||
self._evaluator,
|
||||
self.evaluator,
|
||||
self.parent_context,
|
||||
self.base,
|
||||
params
|
||||
)
|
||||
if self.base.is_generator():
|
||||
return set([iterable.Generator(self._evaluator, function_execution)])
|
||||
return set([iterable.Generator(self.evaluator, function_execution)])
|
||||
else:
|
||||
return function_execution.get_return_values()
|
||||
|
||||
@@ -561,7 +561,7 @@ class FunctionContext(use_metaclass(CachedMetaClass, context.TreeContext, Wrappe
|
||||
name = 'METHOD_CLASS'
|
||||
else:
|
||||
name = 'FUNCTION_CLASS'
|
||||
return compiled.get_special_object(self._evaluator, name)
|
||||
return compiled.get_special_object(self.evaluator, name)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (type(self).__name__, self.base_func)
|
||||
@@ -604,7 +604,7 @@ class FunctionExecutionContext(Executed):
|
||||
def get_return_values(self, check_yields=False):
|
||||
funcdef = self.funcdef
|
||||
if funcdef.type in ('lambdef', 'lambdef_nocond'):
|
||||
return self._evaluator.eval_element(self.children[-1])
|
||||
return self.evaluator.eval_element(self.children[-1])
|
||||
|
||||
"""
|
||||
if func.listeners:
|
||||
@@ -622,8 +622,8 @@ class FunctionExecutionContext(Executed):
|
||||
returns = funcdef.yields
|
||||
else:
|
||||
returns = funcdef.returns
|
||||
types = set(docstrings.find_return_types(self._evaluator, funcdef))
|
||||
types |= set(pep0484.find_return_types(self._evaluator, funcdef))
|
||||
types = set(docstrings.find_return_types(self.evaluator, funcdef))
|
||||
types |= set(pep0484.find_return_types(self.evaluator, funcdef))
|
||||
|
||||
for r in returns:
|
||||
check = flow_analysis.reachability_check(self, funcdef, r)
|
||||
@@ -646,7 +646,7 @@ class FunctionExecutionContext(Executed):
|
||||
if node.type == 'yield_arg':
|
||||
# It must be a yield from.
|
||||
yield_from_types = self.eval_node(node)
|
||||
for lazy_context in iterable.py__iter__(self._evaluator, yield_from_types, node):
|
||||
for lazy_context in iterable.py__iter__(self.evaluator, yield_from_types, node):
|
||||
yield lazy_context
|
||||
else:
|
||||
yield context.LazyTreeContext(self, node)
|
||||
@@ -679,7 +679,7 @@ class FunctionExecutionContext(Executed):
|
||||
return
|
||||
last_for_stmt = for_stmt
|
||||
|
||||
evaluator = self._evaluator
|
||||
evaluator = self.evaluator
|
||||
for for_stmt, yields in yields_order:
|
||||
if for_stmt is None:
|
||||
# No for_stmt, just normal yields.
|
||||
@@ -699,13 +699,13 @@ class FunctionExecutionContext(Executed):
|
||||
del evaluator.predefined_if_name_dict_dict[for_stmt]
|
||||
|
||||
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
||||
yield FunctionExecutionFilter(self._evaluator, self, self.funcdef,
|
||||
yield FunctionExecutionFilter(self.evaluator, self, self.funcdef,
|
||||
until_position,
|
||||
origin_scope=origin_scope)
|
||||
|
||||
@memoize_default(default=NO_DEFAULT)
|
||||
def get_params(self):
|
||||
return param.get_params(self._evaluator, self.parent_context, self.funcdef, self.var_args)
|
||||
return param.get_params(self.evaluator, self.parent_context, self.funcdef, self.var_args)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (type(self).__name__, self.funcdef)
|
||||
@@ -719,7 +719,7 @@ class AnonymousFunctionExecution(FunctionExecutionContext):
|
||||
@memoize_default(default=NO_DEFAULT)
|
||||
def get_params(self):
|
||||
# We need to do a dynamic search here.
|
||||
return search_params(self._evaluator, self.parent_context, self.funcdef)
|
||||
return search_params(self.evaluator, self.parent_context, self.funcdef)
|
||||
|
||||
|
||||
class GlobalName(helpers.FakeName):
|
||||
@@ -753,7 +753,7 @@ class ModuleContext(use_metaclass(CachedMetaClass, context.TreeContext, Wrapper)
|
||||
|
||||
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
||||
yield ParserTreeFilter(
|
||||
self._evaluator,
|
||||
self.evaluator,
|
||||
self,
|
||||
self.module_node,
|
||||
until_position,
|
||||
@@ -778,7 +778,7 @@ class ModuleContext(use_metaclass(CachedMetaClass, context.TreeContext, Wrapper)
|
||||
for i in self.module_node.imports:
|
||||
if i.is_star_import():
|
||||
name = i.star_import_name()
|
||||
new = imports.ImportWrapper(self._evaluator, name).follow()
|
||||
new = imports.ImportWrapper(self.evaluator, name).follow()
|
||||
for module in new:
|
||||
if isinstance(module, tree.Module):
|
||||
modules += module.star_imports()
|
||||
@@ -789,7 +789,7 @@ class ModuleContext(use_metaclass(CachedMetaClass, context.TreeContext, Wrapper)
|
||||
def _module_attributes_dict(self):
|
||||
def parent_callback():
|
||||
# Create a string type object (without a defined string in it):
|
||||
return list(self._evaluator.execute(compiled.create(self._evaluator, str)))[0]
|
||||
return list(self.evaluator.execute(compiled.create(self.evaluator, str)))[0]
|
||||
|
||||
names = ['__file__', '__package__', '__doc__', '__name__']
|
||||
# All the additional module attributes are strings.
|
||||
@@ -815,7 +815,7 @@ class ModuleContext(use_metaclass(CachedMetaClass, context.TreeContext, Wrapper)
|
||||
return None
|
||||
|
||||
def py__name__(self):
|
||||
for name, module in self._evaluator.modules.items():
|
||||
for name, module in self.evaluator.modules.items():
|
||||
if module == self:
|
||||
return name
|
||||
|
||||
@@ -838,7 +838,7 @@ class ModuleContext(use_metaclass(CachedMetaClass, context.TreeContext, Wrapper)
|
||||
|
||||
def _py__path__(self):
|
||||
if self._parent_module is None:
|
||||
search_path = self._evaluator.sys_path
|
||||
search_path = self.evaluator.sys_path
|
||||
else:
|
||||
search_path = self._parent_module.py__path__()
|
||||
init_path = self.py__file__()
|
||||
@@ -907,7 +907,7 @@ class ModuleContext(use_metaclass(CachedMetaClass, context.TreeContext, Wrapper)
|
||||
return names
|
||||
|
||||
def py__class__(self):
|
||||
return compiled.get_special_object(self._evaluator, 'MODULE_CLASS')
|
||||
return compiled.get_special_object(self.evaluator, 'MODULE_CLASS')
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s: %s>" % (type(self).__name__, self.module_node)
|
||||
|
||||
Reference in New Issue
Block a user