forked from VimPlug/jedi
Start implemeting contexts with specialized TreeNameDefinitions to avoid gambling with the parser.
This commit is contained in:
@@ -29,6 +29,7 @@ def get_call_signature_param_names(call_signatures):
|
|||||||
def filter_names(evaluator, completion_names, stack, like_name):
|
def filter_names(evaluator, completion_names, stack, like_name):
|
||||||
comp_dct = {}
|
comp_dct = {}
|
||||||
for name in set(completion_names):
|
for name in set(completion_names):
|
||||||
|
print(name)
|
||||||
if settings.case_insensitive_completion \
|
if settings.case_insensitive_completion \
|
||||||
and str(name).lower().startswith(like_name.lower()) \
|
and str(name).lower().startswith(like_name.lower()) \
|
||||||
or str(name).startswith(like_name):
|
or str(name).startswith(like_name):
|
||||||
@@ -193,11 +194,11 @@ class Completion:
|
|||||||
|
|
||||||
def _trailer_completions(self, atom_expr):
|
def _trailer_completions(self, atom_expr):
|
||||||
user_scope = get_user_scope(self._module, self._position)
|
user_scope = get_user_scope(self._module, self._position)
|
||||||
scopes = self._evaluator.eval_element(atom_expr)
|
contexts = self._evaluator.eval_element(self._evaluator.create_context(atom_expr), atom_expr)
|
||||||
completion_names = []
|
completion_names = []
|
||||||
debug.dbg('trailer completion scopes: %s', scopes)
|
debug.dbg('trailer completion contexts: %s', contexts)
|
||||||
for s in scopes:
|
for context in contexts:
|
||||||
for filter in s.get_filters(search_global=False, origin_scope=user_scope):
|
for filter in context.get_filters(search_global=False, origin_scope=user_scope):
|
||||||
completion_names += filter.values()
|
completion_names += filter.values()
|
||||||
return completion_names
|
return completion_names
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import copy
|
|||||||
|
|
||||||
from jedi.cache import underscore_memoization
|
from jedi.cache import underscore_memoization
|
||||||
from jedi.evaluate import helpers
|
from jedi.evaluate import helpers
|
||||||
from jedi.evaluate.representation import ModuleWrapper
|
from jedi.evaluate.representation import ModuleContext
|
||||||
from jedi.evaluate.compiled import mixed
|
from jedi.evaluate.compiled import mixed
|
||||||
|
|
||||||
|
|
||||||
@@ -18,7 +18,7 @@ class MixedModule(object):
|
|||||||
self._namespaces = namespaces
|
self._namespaces = namespaces
|
||||||
|
|
||||||
self._namespace_objects = [type('jedi_namespace', (), n) for n in namespaces]
|
self._namespace_objects = [type('jedi_namespace', (), n) for n in namespaces]
|
||||||
self._wrapped_module = ModuleWrapper(evaluator, parser_module)
|
self._wrapped_module = ModuleContext(evaluator, parser_module)
|
||||||
# Usually we are dealing with very small code sizes when it comes to
|
# Usually we are dealing with very small code sizes when it comes to
|
||||||
# interpreter modules. In this case we just copy the whole syntax tree
|
# interpreter modules. In this case we just copy the whole syntax tree
|
||||||
# to be able to modify it.
|
# to be able to modify it.
|
||||||
|
|||||||
@@ -109,20 +109,20 @@ class Evaluator(object):
|
|||||||
self.recursion_detector = recursion.RecursionDetector(self)
|
self.recursion_detector = recursion.RecursionDetector(self)
|
||||||
self.execution_recursion_detector = recursion.ExecutionRecursionDetector(self)
|
self.execution_recursion_detector = recursion.ExecutionRecursionDetector(self)
|
||||||
|
|
||||||
def wrap(self, element):
|
def wrap(self, element, parent_context=None):
|
||||||
if isinstance(element, (er.Wrapper, er.InstanceElement,
|
if isinstance(element, (er.Wrapper, er.InstanceElement,
|
||||||
er.ModuleWrapper, er.FunctionExecution, er.Instance, compiled.CompiledObject)) or element is None:
|
er.ModuleContext, er.FunctionExecution, er.Instance, compiled.CompiledObject)) or element is None:
|
||||||
# TODO this is so ugly, please refactor.
|
# TODO this is so ugly, please refactor.
|
||||||
return element
|
return element
|
||||||
|
|
||||||
if element.type == 'classdef':
|
if element.type == 'classdef':
|
||||||
return er.Class(self, element)
|
return er.ClassContext(self, element, parent_context)
|
||||||
elif element.type == 'funcdef':
|
elif element.type == 'funcdef':
|
||||||
return er.Function(self, element)
|
return er.Function(self, element)
|
||||||
elif element.type == 'lambda':
|
elif element.type == 'lambda':
|
||||||
return er.LambdaWrapper(self, element)
|
return er.LambdaWrapper(self, element)
|
||||||
elif element.type == 'file_input':
|
elif element.type == 'file_input':
|
||||||
return er.ModuleWrapper(self, element)
|
return er.ModuleContext(self, element)
|
||||||
else:
|
else:
|
||||||
return element
|
return element
|
||||||
|
|
||||||
@@ -145,7 +145,7 @@ class Evaluator(object):
|
|||||||
#@memoize_default(default=[], evaluator_is_first_arg=True)
|
#@memoize_default(default=[], evaluator_is_first_arg=True)
|
||||||
#@recursion.recursion_decorator
|
#@recursion.recursion_decorator
|
||||||
@debug.increase_indent
|
@debug.increase_indent
|
||||||
def eval_statement(self, stmt, seek_name=None):
|
def eval_statement(self, context, stmt, seek_name=None):
|
||||||
"""
|
"""
|
||||||
The starting point of the completion. A statement always owns a call
|
The starting point of the completion. A statement always owns a call
|
||||||
list, which are the calls, that a statement does. In case multiple
|
list, which are the calls, that a statement does. In case multiple
|
||||||
@@ -156,7 +156,7 @@ class Evaluator(object):
|
|||||||
"""
|
"""
|
||||||
debug.dbg('eval_statement %s (%s)', stmt, seek_name)
|
debug.dbg('eval_statement %s (%s)', stmt, seek_name)
|
||||||
rhs = stmt.get_rhs()
|
rhs = stmt.get_rhs()
|
||||||
types = self.eval_element(rhs)
|
types = self.eval_element(context, rhs)
|
||||||
|
|
||||||
if seek_name:
|
if seek_name:
|
||||||
types = finder.check_tuple_assignments(self, types, seek_name)
|
types = finder.check_tuple_assignments(self, types, seek_name)
|
||||||
@@ -177,13 +177,13 @@ class Evaluator(object):
|
|||||||
# only in for loops without clutter, because they are
|
# only in for loops without clutter, because they are
|
||||||
# predictable. Also only do it, if the variable is not a tuple.
|
# predictable. Also only do it, if the variable is not a tuple.
|
||||||
node = for_stmt.get_input_node()
|
node = for_stmt.get_input_node()
|
||||||
for_iterables = self.eval_element(node)
|
for_iterables = self.eval_element(context, node)
|
||||||
ordered = list(iterable.py__iter__(self, for_iterables, node))
|
ordered = list(iterable.py__iter__(self, for_iterables, node))
|
||||||
|
|
||||||
for index_types in ordered:
|
for index_types in ordered:
|
||||||
dct = {str(for_stmt.children[1]): index_types}
|
dct = {str(for_stmt.children[1]): index_types}
|
||||||
self.predefined_if_name_dict_dict[for_stmt] = dct
|
self.predefined_if_name_dict_dict[for_stmt] = dct
|
||||||
t = self.eval_element(rhs)
|
t = self.eval_element(context, rhs)
|
||||||
left = precedence.calculate(self, left, operator, t)
|
left = precedence.calculate(self, left, operator, t)
|
||||||
types = left
|
types = left
|
||||||
if ordered:
|
if ordered:
|
||||||
@@ -196,11 +196,11 @@ class Evaluator(object):
|
|||||||
debug.dbg('eval_statement result %s', types)
|
debug.dbg('eval_statement result %s', types)
|
||||||
return types
|
return types
|
||||||
|
|
||||||
def eval_element(self, element):
|
def eval_element(self, context, element):
|
||||||
if isinstance(element, iterable.AlreadyEvaluated):
|
if isinstance(element, iterable.AlreadyEvaluated):
|
||||||
return set(element)
|
return set(element)
|
||||||
elif isinstance(element, iterable.MergedNodes):
|
elif isinstance(element, iterable.MergedNodes):
|
||||||
return iterable.unite(self.eval_element(e) for e in element)
|
return iterable.unite(self.eval_element(context, e) for e in element)
|
||||||
|
|
||||||
if_stmt = element.get_parent_until((tree.IfStmt, tree.ForStmt, tree.IsScope))
|
if_stmt = element.get_parent_until((tree.IfStmt, tree.ForStmt, tree.IsScope))
|
||||||
predefined_if_name_dict = self.predefined_if_name_dict_dict.get(if_stmt)
|
predefined_if_name_dict = self.predefined_if_name_dict_dict.get(if_stmt)
|
||||||
@@ -249,21 +249,21 @@ class Evaluator(object):
|
|||||||
for name_dict in name_dicts:
|
for name_dict in name_dicts:
|
||||||
self.predefined_if_name_dict_dict[if_stmt] = name_dict
|
self.predefined_if_name_dict_dict[if_stmt] = name_dict
|
||||||
try:
|
try:
|
||||||
result |= self._eval_element_not_cached(element)
|
result |= self._eval_element_not_cached(context, element)
|
||||||
finally:
|
finally:
|
||||||
del self.predefined_if_name_dict_dict[if_stmt]
|
del self.predefined_if_name_dict_dict[if_stmt]
|
||||||
return result
|
return result
|
||||||
else:
|
else:
|
||||||
return self._eval_element_if_evaluated(element)
|
return self._eval_element_if_evaluated(context, element)
|
||||||
return self._eval_element_cached(element)
|
return self._eval_element_cached(context, element)
|
||||||
else:
|
else:
|
||||||
if predefined_if_name_dict:
|
if predefined_if_name_dict:
|
||||||
return self._eval_element_not_cached(element)
|
return self._eval_element_not_cached(context, element)
|
||||||
else:
|
else:
|
||||||
return self._eval_element_if_evaluated(element)
|
return self._eval_element_if_evaluated(context, element)
|
||||||
return self._eval_element_cached(element)
|
return self._eval_element_cached(context, element)
|
||||||
|
|
||||||
def _eval_element_if_evaluated(self, element):
|
def _eval_element_if_evaluated(self, context, element):
|
||||||
"""
|
"""
|
||||||
TODO This function is temporary: Merge with eval_element.
|
TODO This function is temporary: Merge with eval_element.
|
||||||
"""
|
"""
|
||||||
@@ -272,19 +272,19 @@ class Evaluator(object):
|
|||||||
parent = parent.parent
|
parent = parent.parent
|
||||||
predefined_if_name_dict = self.predefined_if_name_dict_dict.get(parent)
|
predefined_if_name_dict = self.predefined_if_name_dict_dict.get(parent)
|
||||||
if predefined_if_name_dict is not None:
|
if predefined_if_name_dict is not None:
|
||||||
return self._eval_element_not_cached(element)
|
return self._eval_element_not_cached(context, element)
|
||||||
return self._eval_element_cached(element)
|
return self._eval_element_cached(context, element)
|
||||||
|
|
||||||
@memoize_default(default=set(), evaluator_is_first_arg=True)
|
@memoize_default(default=set(), evaluator_is_first_arg=True)
|
||||||
def _eval_element_cached(self, element):
|
def _eval_element_cached(self, context, element):
|
||||||
return self._eval_element_not_cached(element)
|
return self._eval_element_not_cached(context, element)
|
||||||
|
|
||||||
@debug.increase_indent
|
@debug.increase_indent
|
||||||
def _eval_element_not_cached(self, element):
|
def _eval_element_not_cached(self, context, element):
|
||||||
debug.dbg('eval_element %s@%s', element, element.start_pos)
|
debug.dbg('eval_element %s@%s', element, element.start_pos)
|
||||||
types = set()
|
types = set()
|
||||||
if isinstance(element, (tree.Name, tree.Literal)) or tree.is_node(element, 'atom'):
|
if isinstance(element, (tree.Name, tree.Literal)) or tree.is_node(element, 'atom'):
|
||||||
types = self._eval_atom(element)
|
types = self._eval_atom(context, element)
|
||||||
elif isinstance(element, tree.Keyword):
|
elif isinstance(element, tree.Keyword):
|
||||||
# For False/True/None
|
# For False/True/None
|
||||||
if element.value in ('False', 'True', 'None'):
|
if element.value in ('False', 'True', 'None'):
|
||||||
@@ -295,12 +295,12 @@ class Evaluator(object):
|
|||||||
elif element.isinstance(er.LambdaWrapper):
|
elif element.isinstance(er.LambdaWrapper):
|
||||||
types = set([element]) # TODO this is no real evaluation.
|
types = set([element]) # TODO this is no real evaluation.
|
||||||
elif element.type == 'expr_stmt':
|
elif element.type == 'expr_stmt':
|
||||||
types = self.eval_statement(element)
|
types = self.eval_statement(context, element)
|
||||||
elif element.type in ('power', 'atom_expr'):
|
elif element.type in ('power', 'atom_expr'):
|
||||||
types = self._eval_atom(element.children[0])
|
types = self._eval_atom(context, element.children[0])
|
||||||
for trailer in element.children[1:]:
|
for trailer in element.children[1:]:
|
||||||
if trailer == '**': # has a power operation.
|
if trailer == '**': # has a power operation.
|
||||||
right = self.eval_element(element.children[2])
|
right = self.eval_element(context, element.children[2])
|
||||||
types = set(precedence.calculate(self, types, trailer, right))
|
types = set(precedence.calculate(self, types, trailer, right))
|
||||||
break
|
break
|
||||||
types = self.eval_trailer(types, trailer)
|
types = self.eval_trailer(types, trailer)
|
||||||
@@ -308,31 +308,31 @@ class Evaluator(object):
|
|||||||
# The implicit tuple in statements.
|
# The implicit tuple in statements.
|
||||||
types = set([iterable.ImplicitTuple(self, element)])
|
types = set([iterable.ImplicitTuple(self, element)])
|
||||||
elif element.type in ('not_test', 'factor'):
|
elif element.type in ('not_test', 'factor'):
|
||||||
types = self.eval_element(element.children[-1])
|
types = self.eval_element(context, element.children[-1])
|
||||||
for operator in element.children[:-1]:
|
for operator in element.children[:-1]:
|
||||||
types = set(precedence.factor_calculate(self, types, operator))
|
types = set(precedence.factor_calculate(self, types, operator))
|
||||||
elif element.type == 'test':
|
elif element.type == 'test':
|
||||||
# `x if foo else y` case.
|
# `x if foo else y` case.
|
||||||
types = (self.eval_element(element.children[0]) |
|
types = (self.eval_element(context, element.children[0]) |
|
||||||
self.eval_element(element.children[-1]))
|
self.eval_element(context, element.children[-1]))
|
||||||
elif element.type == 'operator':
|
elif element.type == 'operator':
|
||||||
# Must be an ellipsis, other operators are not evaluated.
|
# Must be an ellipsis, other operators are not evaluated.
|
||||||
assert element.value == '...'
|
assert element.value == '...'
|
||||||
types = set([compiled.create(self, Ellipsis)])
|
types = set([compiled.create(self, Ellipsis)])
|
||||||
elif element.type == 'dotted_name':
|
elif element.type == 'dotted_name':
|
||||||
types = self._eval_atom(element.children[0])
|
types = self._eval_atom(context, element.children[0])
|
||||||
for next_name in element.children[2::2]:
|
for next_name in element.children[2::2]:
|
||||||
types = set(chain.from_iterable(self.find_types(typ, next_name)
|
types = set(chain.from_iterable(self.find_types(typ, next_name)
|
||||||
for typ in types))
|
for typ in types))
|
||||||
types = types
|
types = types
|
||||||
elif element.type == 'eval_input':
|
elif element.type == 'eval_input':
|
||||||
types = self._eval_element_not_cached(element.children[0])
|
types = self._eval_element_not_cached(context, element.children[0])
|
||||||
else:
|
else:
|
||||||
types = precedence.calculate_children(self, element.children)
|
types = precedence.calculate_children(self, element.children)
|
||||||
debug.dbg('eval_element result %s', types)
|
debug.dbg('eval_element result %s', types)
|
||||||
return types
|
return types
|
||||||
|
|
||||||
def _eval_atom(self, atom):
|
def _eval_atom(self, context, atom):
|
||||||
"""
|
"""
|
||||||
Basically to process ``atom`` nodes. The parser sometimes doesn't
|
Basically to process ``atom`` nodes. The parser sometimes doesn't
|
||||||
generate the node (because it has just one child). In that case an atom
|
generate the node (because it has just one child). In that case an atom
|
||||||
@@ -442,25 +442,25 @@ class Evaluator(object):
|
|||||||
debug.dbg('execute result: %s in %s', types, obj)
|
debug.dbg('execute result: %s in %s', types, obj)
|
||||||
return types
|
return types
|
||||||
|
|
||||||
def goto_definitions(self, name):
|
def goto_definitions(self, context, name):
|
||||||
def_ = name.get_definition()
|
def_ = name.get_definition()
|
||||||
is_simple_name = name.parent.type not in ('power', 'trailer')
|
is_simple_name = name.parent.type not in ('power', 'trailer')
|
||||||
if is_simple_name:
|
if is_simple_name:
|
||||||
if name.parent.type in ('file_input', 'classdef', 'funcdef'):
|
if name.parent.type in ('file_input', 'classdef', 'funcdef'):
|
||||||
return [self.wrap(name.parent)]
|
return [self.wrap(name.parent)]
|
||||||
if def_.type == 'expr_stmt' and name in def_.get_defined_names():
|
if def_.type == 'expr_stmt' and name in def_.get_defined_names():
|
||||||
return self.eval_statement(def_, name)
|
return self.eval_statement(context, def_, name)
|
||||||
elif def_.type == 'for_stmt':
|
elif def_.type == 'for_stmt':
|
||||||
container_types = self.eval_element(def_.children[3])
|
container_types = self.eval_element(context, def_.children[3])
|
||||||
for_types = iterable.py__iter__types(self, container_types, def_.children[3])
|
for_types = iterable.py__iter__types(self, container_types, def_.children[3])
|
||||||
return finder.check_tuple_assignments(self, for_types, name)
|
return finder.check_tuple_assignments(self, for_types, name)
|
||||||
elif def_.type in ('import_from', 'import_name'):
|
elif def_.type in ('import_from', 'import_name'):
|
||||||
return imports.ImportWrapper(self, name).follow()
|
return imports.ImportWrapper(self, name).follow()
|
||||||
|
|
||||||
call = helpers.call_of_leaf(name)
|
call = helpers.call_of_leaf(name)
|
||||||
return self.eval_element(call)
|
return self.eval_element(context, call)
|
||||||
|
|
||||||
def goto(self, name):
|
def goto(self, context, name):
|
||||||
def resolve_implicit_imports(names):
|
def resolve_implicit_imports(names):
|
||||||
for name in names:
|
for name in names:
|
||||||
if isinstance(name.parent, helpers.FakeImport):
|
if isinstance(name.parent, helpers.FakeImport):
|
||||||
@@ -480,13 +480,13 @@ class Evaluator(object):
|
|||||||
trailer = trailer.parent
|
trailer = trailer.parent
|
||||||
if trailer.type != 'classdef':
|
if trailer.type != 'classdef':
|
||||||
if trailer.type == 'decorator':
|
if trailer.type == 'decorator':
|
||||||
types = self.eval_element(trailer.children[1])
|
types = self.eval_element(context, trailer.children[1])
|
||||||
else:
|
else:
|
||||||
i = trailer.parent.children.index(trailer)
|
i = trailer.parent.children.index(trailer)
|
||||||
to_evaluate = trailer.parent.children[:i]
|
to_evaluate = trailer.parent.children[:i]
|
||||||
types = self.eval_element(to_evaluate[0])
|
types = self.eval_element(context, to_evaluate[0])
|
||||||
for trailer in to_evaluate[1:]:
|
for trailer in to_evaluate[1:]:
|
||||||
types = self.eval_trailer(types, trailer)
|
types = self.eval_trailer(context, types, trailer)
|
||||||
param_names = []
|
param_names = []
|
||||||
for typ in types:
|
for typ in types:
|
||||||
try:
|
try:
|
||||||
@@ -511,7 +511,7 @@ class Evaluator(object):
|
|||||||
if index > 0:
|
if index > 0:
|
||||||
new_dotted = helpers.deep_ast_copy(par)
|
new_dotted = helpers.deep_ast_copy(par)
|
||||||
new_dotted.children[index - 1:] = []
|
new_dotted.children[index - 1:] = []
|
||||||
types = self.eval_element(new_dotted)
|
types = self.eval_element(context, new_dotted)
|
||||||
return resolve_implicit_imports(iterable.unite(
|
return resolve_implicit_imports(iterable.unite(
|
||||||
self.find_types(typ, name, is_goto=True) for typ in types
|
self.find_types(typ, name, is_goto=True) for typ in types
|
||||||
))
|
))
|
||||||
@@ -519,7 +519,7 @@ class Evaluator(object):
|
|||||||
scope = name.get_parent_scope()
|
scope = name.get_parent_scope()
|
||||||
if tree.is_node(par, 'trailer') and par.children[0] == '.':
|
if tree.is_node(par, 'trailer') and par.children[0] == '.':
|
||||||
call = helpers.call_of_leaf(name, cut_own_trailer=True)
|
call = helpers.call_of_leaf(name, cut_own_trailer=True)
|
||||||
types = self.eval_element(call)
|
types = self.eval_element(context, call)
|
||||||
return resolve_implicit_imports(iterable.unite(
|
return resolve_implicit_imports(iterable.unite(
|
||||||
self.find_types(typ, name, is_goto=True) for typ in types
|
self.find_types(typ, name, is_goto=True) for typ in types
|
||||||
))
|
))
|
||||||
@@ -530,3 +530,9 @@ class Evaluator(object):
|
|||||||
stmt = name
|
stmt = name
|
||||||
return self.find_types(scope, name, stmt.start_pos,
|
return self.find_types(scope, name, stmt.start_pos,
|
||||||
search_global=True, is_goto=True)
|
search_global=True, is_goto=True)
|
||||||
|
|
||||||
|
def create_context(self, node):
|
||||||
|
scope = node.get_parent_scope()
|
||||||
|
if scope.get_parent_scope() is not None:
|
||||||
|
raise NotImplementedError
|
||||||
|
return self.wrap(scope)
|
||||||
|
|||||||
@@ -271,15 +271,10 @@ class CompiledName(FakeName):
|
|||||||
def is_definition(self):
|
def is_definition(self):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@property
|
|
||||||
@underscore_memoization
|
@underscore_memoization
|
||||||
def parent(self):
|
def infer(self):
|
||||||
module = self._compiled_obj.get_parent_until()
|
module = self._compiled_obj.get_parent_until()
|
||||||
return _create_from_name(self._evaluator, module, self._compiled_obj, self.name)
|
return [_create_from_name(self._evaluator, module, self._compiled_obj, self.name)]
|
||||||
|
|
||||||
@parent.setter
|
|
||||||
def parent(self, value):
|
|
||||||
pass # Just ignore this, FakeName tries to overwrite the parent attribute.
|
|
||||||
|
|
||||||
|
|
||||||
class LazyNamesDict(object):
|
class LazyNamesDict(object):
|
||||||
|
|||||||
@@ -9,6 +9,44 @@ from jedi.evaluate import flow_analysis
|
|||||||
from jedi.common import to_list
|
from jedi.common import to_list
|
||||||
|
|
||||||
|
|
||||||
|
class AbstractNameDefinition(object):
|
||||||
|
start_pos = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def string_name(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def infer(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class TreeNameDefinition(AbstractNameDefinition):
|
||||||
|
def __init__(self, parent_context, name):
|
||||||
|
self.parent_context = parent_context
|
||||||
|
self._name = name
|
||||||
|
|
||||||
|
def get_parent_flow_context(self):
|
||||||
|
return self.parent_context
|
||||||
|
|
||||||
|
@property
|
||||||
|
def string_name(self):
|
||||||
|
return self._name.value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def start_pos(self):
|
||||||
|
return self._name.start_pos
|
||||||
|
|
||||||
|
def infer(self):
|
||||||
|
# Refactor this, should probably be here.
|
||||||
|
from jedi.evaluate.finder import _name_to_types
|
||||||
|
return _name_to_types(self.parent_context._evaluator, self.parent_context, self._name, None)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s: %s@%s' % (type(self).__name__, self.string_name, self.start_pos)
|
||||||
|
|
||||||
|
|
||||||
class AbstractFilter(object):
|
class AbstractFilter(object):
|
||||||
_until_position = None
|
_until_position = None
|
||||||
|
|
||||||
@@ -30,10 +68,11 @@ class AbstractFilter(object):
|
|||||||
|
|
||||||
|
|
||||||
class AbstractUsedNamesFilter(AbstractFilter):
|
class AbstractUsedNamesFilter(AbstractFilter):
|
||||||
def __init__(self, parser_scope, origin_scope=None):
|
def __init__(self, context, parser_scope, origin_scope=None):
|
||||||
super(AbstractUsedNamesFilter, self).__init__(origin_scope)
|
super(AbstractUsedNamesFilter, self).__init__(origin_scope)
|
||||||
self._parser_scope = parser_scope
|
self._parser_scope = parser_scope
|
||||||
self._used_names = self._parser_scope.get_root_node().used_names
|
self._used_names = self._parser_scope.get_root_node().used_names
|
||||||
|
self._context = context
|
||||||
|
|
||||||
def get(self, name):
|
def get(self, name):
|
||||||
try:
|
try:
|
||||||
@@ -41,16 +80,19 @@ class AbstractUsedNamesFilter(AbstractFilter):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
return list(self._filter(names))
|
return self._convert_to_names(self._filter(names))
|
||||||
|
|
||||||
|
def _convert_to_names(self, names):
|
||||||
|
return [TreeNameDefinition(self._context, name) for name in names]
|
||||||
|
|
||||||
def values(self):
|
def values(self):
|
||||||
return [name for name_list in self._used_names.values()
|
return self._convert_to_names(name for name_list in self._used_names.values()
|
||||||
for name in self._filter(name_list)]
|
for name in self._filter(name_list))
|
||||||
|
|
||||||
|
|
||||||
class ParserTreeFilter(AbstractUsedNamesFilter):
|
class ParserTreeFilter(AbstractUsedNamesFilter):
|
||||||
def __init__(self, evaluator, parser_scope, until_position=None, origin_scope=None):
|
def __init__(self, evaluator, context, parser_scope, until_position=None, origin_scope=None):
|
||||||
super(ParserTreeFilter, self).__init__(parser_scope, origin_scope)
|
super(ParserTreeFilter, self).__init__(context, parser_scope, origin_scope)
|
||||||
self._until_position = until_position
|
self._until_position = until_position
|
||||||
self._evaluator = evaluator
|
self._evaluator = evaluator
|
||||||
|
|
||||||
@@ -65,8 +107,9 @@ class ParserTreeFilter(AbstractUsedNamesFilter):
|
|||||||
for name in sorted(names, key=lambda name: name.start_pos, reverse=True):
|
for name in sorted(names, key=lambda name: name.start_pos, reverse=True):
|
||||||
stmt = name.get_definition()
|
stmt = name.get_definition()
|
||||||
name_scope = self._evaluator.wrap(stmt.get_parent_scope())
|
name_scope = self._evaluator.wrap(stmt.get_parent_scope())
|
||||||
check = flow_analysis.break_check(self._evaluator, name_scope,
|
check = flow_analysis.UNSURE
|
||||||
stmt, self._origin_scope)
|
#check = flow_analysis.break_check(self._evaluator, name_scope,
|
||||||
|
# stmt, self._origin_scope)
|
||||||
if check is not flow_analysis.UNREACHABLE:
|
if check is not flow_analysis.UNREACHABLE:
|
||||||
yield name
|
yield name
|
||||||
|
|
||||||
@@ -75,7 +118,7 @@ class ParserTreeFilter(AbstractUsedNamesFilter):
|
|||||||
|
|
||||||
|
|
||||||
class FunctionExecutionFilter(ParserTreeFilter):
|
class FunctionExecutionFilter(ParserTreeFilter):
|
||||||
def __init__(self, evaluator, parser_scope, executed_function, param_by_name,
|
def __init__(self, evaluator, context, parser_scope, executed_function, param_by_name,
|
||||||
until_position=None, origin_scope=None):
|
until_position=None, origin_scope=None):
|
||||||
super(FunctionExecutionFilter, self).__init__(
|
super(FunctionExecutionFilter, self).__init__(
|
||||||
evaluator,
|
evaluator,
|
||||||
@@ -96,8 +139,8 @@ class FunctionExecutionFilter(ParserTreeFilter):
|
|||||||
|
|
||||||
|
|
||||||
class GlobalNameFilter(AbstractUsedNamesFilter):
|
class GlobalNameFilter(AbstractUsedNamesFilter):
|
||||||
def __init__(self, parser_scope, origin_scope=None):
|
def __init__(self, context, parser_scope, origin_scope=None):
|
||||||
super(GlobalNameFilter, self).__init__(parser_scope)
|
super(GlobalNameFilter, self).__init__(context, parser_scope)
|
||||||
|
|
||||||
@to_list
|
@to_list
|
||||||
def _filter(self, names):
|
def _filter(self, names):
|
||||||
@@ -141,7 +184,7 @@ def get_global_filters(evaluator, context, until_position, origin_scope):
|
|||||||
until_position = None
|
until_position = None
|
||||||
in_func = True
|
in_func = True
|
||||||
|
|
||||||
node = context.get_parent_scope()
|
node = context.parent_context
|
||||||
context = evaluator.wrap(node)
|
context = evaluator.wrap(node)
|
||||||
|
|
||||||
# Add builtins to the global scope.
|
# Add builtins to the global scope.
|
||||||
|
|||||||
@@ -179,7 +179,7 @@ class NameFinder(object):
|
|||||||
last_names.append(name)
|
last_names.append(name)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if isinstance(stmt, er.ModuleWrapper):
|
if isinstance(stmt, er.ModuleContext):
|
||||||
# In case of REPL completion, we can infer modules names that
|
# In case of REPL completion, we can infer modules names that
|
||||||
# don't really have a definition (because they are really just
|
# don't really have a definition (because they are really just
|
||||||
# namespaces). In this case we can just add it.
|
# namespaces). In this case we can just add it.
|
||||||
@@ -258,17 +258,19 @@ class NameFinder(object):
|
|||||||
wrapper parents. We don't want to see AST classes out in the
|
wrapper parents. We don't want to see AST classes out in the
|
||||||
evaluation, so remove them already here!
|
evaluation, so remove them already here!
|
||||||
"""
|
"""
|
||||||
for n in names:
|
|
||||||
definition = n.parent
|
return names
|
||||||
if isinstance(definition, (compiled.CompiledObject,
|
#for n in names:
|
||||||
iterable.BuiltinMethod)):
|
# definition = n.parent
|
||||||
# TODO this if should really be removed by changing the type of
|
# if isinstance(definition, (compiled.CompiledObject,
|
||||||
# those classes.
|
# iterable.BuiltinMethod)):
|
||||||
yield n
|
# # TODO this if should really be removed by changing the type of
|
||||||
elif definition.type in ('funcdef', 'classdef', 'file_input'):
|
# # those classes.
|
||||||
yield self._evaluator.wrap(definition).name
|
# yield n
|
||||||
else:
|
# elif definition.type in ('funcdef', 'classdef', 'file_input'):
|
||||||
yield n
|
# yield self._evaluator.wrap(definition).name
|
||||||
|
# else:
|
||||||
|
# yield n
|
||||||
|
|
||||||
def _check_getattr(self, inst):
|
def _check_getattr(self, inst):
|
||||||
"""Checks for both __getattr__ and __getattribute__ methods"""
|
"""Checks for both __getattr__ and __getattribute__ methods"""
|
||||||
@@ -308,8 +310,8 @@ class NameFinder(object):
|
|||||||
return n
|
return n
|
||||||
|
|
||||||
for name in names:
|
for name in names:
|
||||||
new_types = _name_to_types(self._evaluator, name, self.scope)
|
new_types = name.infer()
|
||||||
if isinstance(self.scope, (er.Class, er.Instance)) and attribute_lookup:
|
if isinstance(self.scope, (er.ClassContext, er.Instance)) and attribute_lookup:
|
||||||
types |= set(self._resolve_descriptors(name, new_types))
|
types |= set(self._resolve_descriptors(name, new_types))
|
||||||
else:
|
else:
|
||||||
types |= set(new_types)
|
types |= set(new_types)
|
||||||
@@ -347,7 +349,7 @@ def _get_global_stmt_scopes(evaluator, global_stmt, name):
|
|||||||
|
|
||||||
|
|
||||||
@memoize_default(set(), evaluator_is_first_arg=True)
|
@memoize_default(set(), evaluator_is_first_arg=True)
|
||||||
def _name_to_types(evaluator, name, scope):
|
def _name_to_types(evaluator, context, name, scope):
|
||||||
types = []
|
types = []
|
||||||
typ = name.get_definition()
|
typ = name.get_definition()
|
||||||
if typ.isinstance(tree.ForStmt):
|
if typ.isinstance(tree.ForStmt):
|
||||||
@@ -365,7 +367,7 @@ def _name_to_types(evaluator, name, scope):
|
|||||||
elif isinstance(typ, tree.Param):
|
elif isinstance(typ, tree.Param):
|
||||||
types = _eval_param(evaluator, typ, scope)
|
types = _eval_param(evaluator, typ, scope)
|
||||||
elif typ.isinstance(tree.ExprStmt):
|
elif typ.isinstance(tree.ExprStmt):
|
||||||
types = _remove_statements(evaluator, typ, name)
|
types = _remove_statements(evaluator, context, typ, name)
|
||||||
elif typ.isinstance(tree.WithStmt):
|
elif typ.isinstance(tree.WithStmt):
|
||||||
types = evaluator.eval_element(typ.node_from_name(name))
|
types = evaluator.eval_element(typ.node_from_name(name))
|
||||||
elif isinstance(typ, tree.Import):
|
elif isinstance(typ, tree.Import):
|
||||||
@@ -393,7 +395,7 @@ def _name_to_types(evaluator, name, scope):
|
|||||||
return types
|
return types
|
||||||
|
|
||||||
|
|
||||||
def _remove_statements(evaluator, stmt, name):
|
def _remove_statements(evaluator, context, stmt, name):
|
||||||
"""
|
"""
|
||||||
This is the part where statements are being stripped.
|
This is the part where statements are being stripped.
|
||||||
|
|
||||||
@@ -415,7 +417,7 @@ def _remove_statements(evaluator, stmt, name):
|
|||||||
pep0484.find_type_from_comment_hint_assign(evaluator, stmt, name)
|
pep0484.find_type_from_comment_hint_assign(evaluator, stmt, name)
|
||||||
if pep0484types:
|
if pep0484types:
|
||||||
return pep0484types
|
return pep0484types
|
||||||
types |= evaluator.eval_statement(stmt, seek_name=name)
|
types |= evaluator.eval_statement(context, stmt, seek_name=name)
|
||||||
|
|
||||||
if check_instance is not None:
|
if check_instance is not None:
|
||||||
# class renames
|
# class renames
|
||||||
|
|||||||
@@ -33,6 +33,7 @@ UNSURE = Status(None, 'unsure')
|
|||||||
|
|
||||||
|
|
||||||
def break_check(evaluator, base_scope, stmt, origin_scope=None):
|
def break_check(evaluator, base_scope, stmt, origin_scope=None):
|
||||||
|
raise NotImplementedError
|
||||||
element_scope = evaluator.wrap(stmt.get_parent_scope(include_flows=True))
|
element_scope = evaluator.wrap(stmt.get_parent_scope(include_flows=True))
|
||||||
# Direct parents get resolved, we filter scopes that are separate branches.
|
# Direct parents get resolved, we filter scopes that are separate branches.
|
||||||
# This makes sense for autocompletion and static analysis. For actual
|
# This makes sense for autocompletion and static analysis. For actual
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ def try_iter_content(types, depth=0):
|
|||||||
|
|
||||||
|
|
||||||
class Arguments(tree.Base):
|
class Arguments(tree.Base):
|
||||||
def __init__(self, evaluator, argument_node, trailer=None):
|
def __init__(self, evaluator, context, argument_node, trailer=None):
|
||||||
"""
|
"""
|
||||||
The argument_node is either a parser node or a list of evaluated
|
The argument_node is either a parser node or a list of evaluated
|
||||||
objects. Those evaluated objects may be lists of evaluated objects
|
objects. Those evaluated objects may be lists of evaluated objects
|
||||||
@@ -39,6 +39,7 @@ class Arguments(tree.Base):
|
|||||||
:param argument_node: May be an argument_node or a list of nodes.
|
:param argument_node: May be an argument_node or a list of nodes.
|
||||||
"""
|
"""
|
||||||
self.argument_node = argument_node
|
self.argument_node = argument_node
|
||||||
|
self._context = context
|
||||||
self._evaluator = evaluator
|
self._evaluator = evaluator
|
||||||
self.trailer = trailer # Can be None, e.g. in a class definition.
|
self.trailer = trailer # Can be None, e.g. in a class definition.
|
||||||
|
|
||||||
@@ -73,7 +74,7 @@ class Arguments(tree.Base):
|
|||||||
element = self.argument_node[0]
|
element = self.argument_node[0]
|
||||||
from jedi.evaluate.iterable import AlreadyEvaluated
|
from jedi.evaluate.iterable import AlreadyEvaluated
|
||||||
if isinstance(element, AlreadyEvaluated):
|
if isinstance(element, AlreadyEvaluated):
|
||||||
element = list(self._evaluator.eval_element(element))[0]
|
element = list(self._evaluator.eval_element(self._context, element))[0]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
@@ -93,14 +94,14 @@ class Arguments(tree.Base):
|
|||||||
named_args = []
|
named_args = []
|
||||||
for stars, el in self._split():
|
for stars, el in self._split():
|
||||||
if stars == 1:
|
if stars == 1:
|
||||||
arrays = self._evaluator.eval_element(el)
|
arrays = self._evaluator.eval_element(self._context, el)
|
||||||
iterators = [_iterate_star_args(self._evaluator, a, el, func)
|
iterators = [_iterate_star_args(self._evaluator, a, el, func)
|
||||||
for a in arrays]
|
for a in arrays]
|
||||||
iterators = list(iterators)
|
iterators = list(iterators)
|
||||||
for values in list(zip_longest(*iterators)):
|
for values in list(zip_longest(*iterators)):
|
||||||
yield None, [v for v in values if v is not None]
|
yield None, [v for v in values if v is not None]
|
||||||
elif stars == 2:
|
elif stars == 2:
|
||||||
arrays = self._evaluator.eval_element(el)
|
arrays = self._evaluator.eval_element(self._context, el)
|
||||||
dicts = [_star_star_dict(self._evaluator, a, el, func)
|
dicts = [_star_star_dict(self._evaluator, a, el, func)
|
||||||
for a in arrays]
|
for a in arrays]
|
||||||
for dct in dicts:
|
for dct in dicts:
|
||||||
@@ -155,7 +156,7 @@ class Arguments(tree.Base):
|
|||||||
debug.warning('TypeError: %s expected at least %s arguments, got %s',
|
debug.warning('TypeError: %s expected at least %s arguments, got %s',
|
||||||
name, len(arguments), i)
|
name, len(arguments), i)
|
||||||
raise ValueError
|
raise ValueError
|
||||||
values = set(chain.from_iterable(self._evaluator.eval_element(el)
|
values = set(chain.from_iterable(self._evaluator.eval_element(self._context, el)
|
||||||
for el in va_values))
|
for el in va_values))
|
||||||
if not values and not optional:
|
if not values and not optional:
|
||||||
# For the stdlib we always want values. If we don't get them,
|
# For the stdlib we always want values. If we don't get them,
|
||||||
@@ -172,7 +173,7 @@ class Arguments(tree.Base):
|
|||||||
def eval_args(self):
|
def eval_args(self):
|
||||||
# TODO this method doesn't work with named args and a lot of other
|
# TODO this method doesn't work with named args and a lot of other
|
||||||
# things. Use unpack.
|
# things. Use unpack.
|
||||||
return [self._evaluator.eval_element(el) for stars, el in self._split()]
|
return [self._evaluator.eval_element(self._context, el) for stars, el in self._split()]
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return '<%s: %s>' % (type(self).__name__, self.argument_node)
|
return '<%s: %s>' % (type(self).__name__, self.argument_node)
|
||||||
@@ -191,7 +192,7 @@ class Arguments(tree.Base):
|
|||||||
"""
|
"""
|
||||||
for key, element_values in self.unpack():
|
for key, element_values in self.unpack():
|
||||||
for element in element_values:
|
for element in element_values:
|
||||||
types = self._evaluator.eval_element(element)
|
types = self._evaluator.eval_element(self._context, element)
|
||||||
try_iter_content(types)
|
try_iter_content(types)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -59,6 +59,21 @@ from jedi.evaluate.filters import ParserTreeFilter, FunctionExecutionFilter, \
|
|||||||
GlobalNameFilter, DictFilter
|
GlobalNameFilter, DictFilter
|
||||||
|
|
||||||
|
|
||||||
|
class Context(object):
|
||||||
|
def __init__(self, evaluator, parent_context=None):
|
||||||
|
self._evaluator = evaluator
|
||||||
|
self.parent_context = parent_context
|
||||||
|
|
||||||
|
def get_parent_flow_context(self):
|
||||||
|
return self.parent_context
|
||||||
|
|
||||||
|
|
||||||
|
class FlowContext(Context):
|
||||||
|
def get_parent_flow_context(self):
|
||||||
|
if 1:
|
||||||
|
return self.parent_context
|
||||||
|
|
||||||
|
|
||||||
class Executed(tree.Base):
|
class Executed(tree.Base):
|
||||||
"""
|
"""
|
||||||
An instance is also an executable - because __init__ is called
|
An instance is also an executable - because __init__ is called
|
||||||
@@ -152,7 +167,7 @@ class Instance(use_metaclass(CachedMetaClass, Executed)):
|
|||||||
# This loop adds the names of the self object, copies them and removes
|
# This loop adds the names of the self object, copies them and removes
|
||||||
# the self.
|
# the self.
|
||||||
for sub in self.base.subscopes:
|
for sub in self.base.subscopes:
|
||||||
if isinstance(sub, tree.Class):
|
if isinstance(sub, tree.ClassContext):
|
||||||
continue
|
continue
|
||||||
# Get the self name, if there's one.
|
# Get the self name, if there's one.
|
||||||
self_name = self._get_func_self_name(sub)
|
self_name = self._get_func_self_name(sub)
|
||||||
@@ -292,18 +307,18 @@ class CompiledInstanceClassFilter(compiled.CompiledObjectFilter):
|
|||||||
|
|
||||||
|
|
||||||
class InstanceClassFilter(ParserTreeFilter):
|
class InstanceClassFilter(ParserTreeFilter):
|
||||||
def __init__(self, evaluator, instance, parser_scope, origin_scope):
|
def __init__(self, evaluator, context, parser_scope, origin_scope):
|
||||||
super(InstanceClassFilter, self).__init__(
|
super(InstanceClassFilter, self).__init__(
|
||||||
evaluator=evaluator,
|
evaluator=evaluator,
|
||||||
|
context=context,
|
||||||
parser_scope=parser_scope,
|
parser_scope=parser_scope,
|
||||||
origin_scope=origin_scope
|
origin_scope=origin_scope
|
||||||
)
|
)
|
||||||
self._instance = instance
|
|
||||||
|
|
||||||
def _equals_origin_scope(self):
|
def _equals_origin_scope(self):
|
||||||
node = self._origin_scope
|
node = self._origin_scope
|
||||||
while node is not None:
|
while node is not None:
|
||||||
if node == self._parser_scope or node == self._instance:
|
if node == self._parser_scope or node == self._context:
|
||||||
return True
|
return True
|
||||||
node = node.get_parent_scope()
|
node = node.get_parent_scope()
|
||||||
return False
|
return False
|
||||||
@@ -314,7 +329,7 @@ class InstanceClassFilter(ParserTreeFilter):
|
|||||||
|
|
||||||
def _filter(self, names):
|
def _filter(self, names):
|
||||||
names = super(InstanceClassFilter, self)._filter(names)
|
names = super(InstanceClassFilter, self)._filter(names)
|
||||||
return [get_instance_el(self._evaluator, self._instance, name, True)
|
return [get_instance_el(self._evaluator, self._context, name, True)
|
||||||
for name in names if self._access_possible(name)]
|
for name in names if self._access_possible(name)]
|
||||||
|
|
||||||
def _check_flows(self, names):
|
def _check_flows(self, names):
|
||||||
@@ -337,12 +352,12 @@ class SelfNameFilter(InstanceClassFilter):
|
|||||||
and len(trailer.children) == 2 \
|
and len(trailer.children) == 2 \
|
||||||
and trailer.children[0] == '.':
|
and trailer.children[0] == '.':
|
||||||
if name.is_definition() and self._access_possible(name):
|
if name.is_definition() and self._access_possible(name):
|
||||||
init_execution = self._instance._get_init_execution()
|
init_execution = self._context._get_init_execution()
|
||||||
# Hopefully we can somehow change this.
|
# Hopefully we can somehow change this.
|
||||||
if init_execution is not None and \
|
if init_execution is not None and \
|
||||||
init_execution.start_pos < name.start_pos < init_execution.end_pos:
|
init_execution.start_pos < name.start_pos < init_execution.end_pos:
|
||||||
name = init_execution.name_for_position(name.start_pos)
|
name = init_execution.name_for_position(name.start_pos)
|
||||||
yield get_instance_el(self._evaluator, self._instance, name)
|
yield get_instance_el(self._evaluator, self._context, name)
|
||||||
|
|
||||||
|
|
||||||
class LazyInstanceDict(object):
|
class LazyInstanceDict(object):
|
||||||
@@ -406,8 +421,8 @@ class InstanceElement(use_metaclass(CachedMetaClass, tree.Base)):
|
|||||||
@memoize_default()
|
@memoize_default()
|
||||||
def parent(self):
|
def parent(self):
|
||||||
par = self.var.parent
|
par = self.var.parent
|
||||||
if isinstance(par, Class) and par == self.instance.base \
|
if isinstance(par, ClassContext) and par == self.instance.base \
|
||||||
or not isinstance(self.instance.base, (tree.Class, Class)) \
|
or not isinstance(self.instance.base, (tree.Class, ClassContext)) \
|
||||||
or isinstance(par, tree.Class) \
|
or isinstance(par, tree.Class) \
|
||||||
and par == self.instance.base.base:
|
and par == self.instance.base.base:
|
||||||
par = self.instance
|
par = self.instance
|
||||||
@@ -501,13 +516,13 @@ class Wrapper(tree.Base):
|
|||||||
return helpers.FakeName(unicode(name), self, name.start_pos)
|
return helpers.FakeName(unicode(name), self, name.start_pos)
|
||||||
|
|
||||||
|
|
||||||
class Class(use_metaclass(CachedMetaClass, Wrapper)):
|
class ClassContext(use_metaclass(CachedMetaClass, Context, Wrapper)):
|
||||||
"""
|
"""
|
||||||
This class is not only important to extend `tree.Class`, it is also a
|
This class is not only important to extend `tree.Class`, it is also a
|
||||||
important for descriptors (if the descriptor methods are evaluated or not).
|
important for descriptors (if the descriptor methods are evaluated or not).
|
||||||
"""
|
"""
|
||||||
def __init__(self, evaluator, base):
|
def __init__(self, evaluator, base, parent_context):
|
||||||
self._evaluator = evaluator
|
super(ClassContext, self).__init__(evaluator, parent_context=parent_context)
|
||||||
self.base = base
|
self.base = base
|
||||||
|
|
||||||
@memoize_default(default=())
|
@memoize_default(default=())
|
||||||
@@ -547,7 +562,7 @@ class Class(use_metaclass(CachedMetaClass, Wrapper)):
|
|||||||
def py__bases__(self):
|
def py__bases__(self):
|
||||||
arglist = self.base.get_super_arglist()
|
arglist = self.base.get_super_arglist()
|
||||||
if arglist:
|
if arglist:
|
||||||
args = param.Arguments(self._evaluator, arglist)
|
args = param.Arguments(self._evaluator, self, arglist)
|
||||||
return list(chain.from_iterable(args.eval_args()))
|
return list(chain.from_iterable(args.eval_args()))
|
||||||
else:
|
else:
|
||||||
return [compiled.create(self._evaluator, object)]
|
return [compiled.create(self._evaluator, object)]
|
||||||
@@ -577,14 +592,14 @@ class Class(use_metaclass(CachedMetaClass, Wrapper)):
|
|||||||
|
|
||||||
def get_filters(self, search_global, until_position=None, origin_scope=None, is_instance=False):
|
def get_filters(self, search_global, until_position=None, origin_scope=None, is_instance=False):
|
||||||
if search_global:
|
if search_global:
|
||||||
yield ParserTreeFilter(self._evaluator, self.base, until_position, origin_scope=origin_scope)
|
yield ParserTreeFilter(self._evaluator, self, self.base, until_position, origin_scope=origin_scope)
|
||||||
else:
|
else:
|
||||||
for scope in self.py__mro__():
|
for scope in self.py__mro__():
|
||||||
if isinstance(scope, compiled.CompiledObject):
|
if isinstance(scope, compiled.CompiledObject):
|
||||||
for filter in scope.get_filters(is_instance=is_instance):
|
for filter in scope.get_filters(is_instance=is_instance):
|
||||||
yield filter
|
yield filter
|
||||||
else:
|
else:
|
||||||
yield ParserTreeFilter(self._evaluator, scope.base, origin_scope=origin_scope)
|
yield ParserTreeFilter(self._evaluator, self, scope.base, origin_scope=origin_scope)
|
||||||
|
|
||||||
def is_class(self):
|
def is_class(self):
|
||||||
return True
|
return True
|
||||||
@@ -684,7 +699,7 @@ class Function(use_metaclass(CachedMetaClass, Wrapper)):
|
|||||||
|
|
||||||
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
||||||
if search_global:
|
if search_global:
|
||||||
yield ParserTreeFilter(self._evaluator, self.base, until_position, origin_scope=origin_scope)
|
yield ParserTreeFilter(self._evaluator, self, self.base, until_position, origin_scope=origin_scope)
|
||||||
else:
|
else:
|
||||||
scope = self.py__class__()
|
scope = self.py__class__()
|
||||||
for filter in scope.get_filters(search_global=False, origin_scope=origin_scope):
|
for filter in scope.get_filters(search_global=False, origin_scope=origin_scope):
|
||||||
@@ -862,7 +877,7 @@ class FunctionExecution(Executed):
|
|||||||
del evaluator.predefined_if_name_dict_dict[for_stmt]
|
del evaluator.predefined_if_name_dict_dict[for_stmt]
|
||||||
|
|
||||||
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
||||||
yield FunctionExecutionFilter(self._evaluator, self._original_function,
|
yield FunctionExecutionFilter(self._evaluator, self, self._original_function,
|
||||||
self._copied_funcdef,
|
self._copied_funcdef,
|
||||||
self.param_by_name,
|
self.param_by_name,
|
||||||
until_position,
|
until_position,
|
||||||
@@ -923,7 +938,9 @@ class GlobalName(helpers.FakeName):
|
|||||||
name.start_pos, is_definition=True)
|
name.start_pos, is_definition=True)
|
||||||
|
|
||||||
|
|
||||||
class ModuleWrapper(use_metaclass(CachedMetaClass, tree.Module, Wrapper)):
|
class ModuleContext(use_metaclass(CachedMetaClass, tree.Module, Wrapper)):
|
||||||
|
parent_context = None
|
||||||
|
|
||||||
def __init__(self, evaluator, module, parent_module=None):
|
def __init__(self, evaluator, module, parent_module=None):
|
||||||
self._evaluator = evaluator
|
self._evaluator = evaluator
|
||||||
self.base = self._module = module
|
self.base = self._module = module
|
||||||
@@ -942,11 +959,12 @@ class ModuleWrapper(use_metaclass(CachedMetaClass, tree.Module, Wrapper)):
|
|||||||
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
def get_filters(self, search_global, until_position=None, origin_scope=None):
|
||||||
yield ParserTreeFilter(
|
yield ParserTreeFilter(
|
||||||
self._evaluator,
|
self._evaluator,
|
||||||
|
self,
|
||||||
self._module,
|
self._module,
|
||||||
until_position,
|
until_position,
|
||||||
origin_scope=origin_scope
|
origin_scope=origin_scope
|
||||||
)
|
)
|
||||||
yield GlobalNameFilter(self._module)
|
yield GlobalNameFilter(self, self._module)
|
||||||
yield DictFilter(self._sub_modules_dict())
|
yield DictFilter(self._sub_modules_dict())
|
||||||
yield DictFilter(self._module_attributes_dict())
|
yield DictFilter(self._module_attributes_dict())
|
||||||
# TODO
|
# TODO
|
||||||
|
|||||||
Reference in New Issue
Block a user