forked from VimPlug/jedi
Move the evaluate package to inference
This commit is contained in:
6
jedi/inference/context/__init__.py
Normal file
6
jedi/inference/context/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from jedi.inference.context.module import ModuleContext
|
||||
from jedi.inference.context.klass import ClassContext
|
||||
from jedi.inference.context.function import FunctionContext, \
|
||||
MethodContext, FunctionExecutionContext
|
||||
from jedi.inference.context.instance import AnonymousInstance, BoundMethod, \
|
||||
CompiledInstance, AbstractInstanceContext, TreeInstance
|
||||
15
jedi/inference/context/decorator.py
Normal file
15
jedi/inference/context/decorator.py
Normal file
@@ -0,0 +1,15 @@
|
||||
'''
|
||||
Decorators are not really contexts, however we need some wrappers to improve
|
||||
docstrings and other things around decorators.
|
||||
'''
|
||||
|
||||
from jedi.inference.base_context import ContextWrapper
|
||||
|
||||
|
||||
class Decoratee(ContextWrapper):
|
||||
def __init__(self, wrapped_context, original_context):
|
||||
self._wrapped_context = wrapped_context
|
||||
self._original_context = original_context
|
||||
|
||||
def py__doc__(self):
|
||||
return self._original_context.py__doc__()
|
||||
433
jedi/inference/context/function.py
Normal file
433
jedi/inference/context/function.py
Normal file
@@ -0,0 +1,433 @@
|
||||
from parso.python import tree
|
||||
|
||||
from jedi._compatibility import use_metaclass
|
||||
from jedi import debug
|
||||
from jedi.inference.cache import evaluator_method_cache, CachedMetaClass
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference import recursion
|
||||
from jedi.inference import docstrings
|
||||
from jedi.inference import flow_analysis
|
||||
from jedi.inference import helpers
|
||||
from jedi.inference.signature import TreeSignature
|
||||
from jedi.inference.arguments import AnonymousArguments
|
||||
from jedi.inference.filters import ParserTreeFilter, FunctionExecutionFilter
|
||||
from jedi.inference.names import ContextName, AbstractNameDefinition, ParamName
|
||||
from jedi.inference.base_context import ContextualizedNode, NO_CONTEXTS, \
|
||||
ContextSet, TreeContext, ContextWrapper
|
||||
from jedi.inference.lazy_context import LazyKnownContexts, LazyKnownContext, \
|
||||
LazyTreeContext
|
||||
from jedi.inference.context import iterable
|
||||
from jedi import parser_utils
|
||||
from jedi.inference.parser_cache import get_yield_exprs
|
||||
from jedi.inference.helpers import contexts_from_qualified_names
|
||||
|
||||
|
||||
class LambdaName(AbstractNameDefinition):
|
||||
string_name = '<lambda>'
|
||||
api_type = u'function'
|
||||
|
||||
def __init__(self, lambda_context):
|
||||
self._lambda_context = lambda_context
|
||||
self.parent_context = lambda_context.parent_context
|
||||
|
||||
@property
|
||||
def start_pos(self):
|
||||
return self._lambda_context.tree_node.start_pos
|
||||
|
||||
def infer(self):
|
||||
return ContextSet([self._lambda_context])
|
||||
|
||||
|
||||
class FunctionAndClassBase(TreeContext):
|
||||
def get_qualified_names(self):
|
||||
if self.parent_context.is_class():
|
||||
n = self.parent_context.get_qualified_names()
|
||||
if n is None:
|
||||
# This means that the parent class lives within a function.
|
||||
return None
|
||||
return n + (self.py__name__(),)
|
||||
elif self.parent_context.is_module():
|
||||
return (self.py__name__(),)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
class FunctionMixin(object):
|
||||
api_type = u'function'
|
||||
|
||||
def get_filters(self, search_global=False, until_position=None, origin_scope=None):
|
||||
if search_global:
|
||||
yield ParserTreeFilter(
|
||||
self.evaluator,
|
||||
context=self,
|
||||
until_position=until_position,
|
||||
origin_scope=origin_scope
|
||||
)
|
||||
else:
|
||||
cls = self.py__class__()
|
||||
for instance in cls.execute_with_values():
|
||||
for filter in instance.get_filters(search_global=False, origin_scope=origin_scope):
|
||||
yield filter
|
||||
|
||||
def py__get__(self, instance, class_context):
|
||||
from jedi.inference.context.instance import BoundMethod
|
||||
if instance is None:
|
||||
# Calling the Foo.bar results in the original bar function.
|
||||
return ContextSet([self])
|
||||
return ContextSet([BoundMethod(instance, self)])
|
||||
|
||||
def get_param_names(self):
|
||||
function_execution = self.get_function_execution()
|
||||
return [ParamName(function_execution, param.name)
|
||||
for param in self.tree_node.get_params()]
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
if self.tree_node.type == 'lambdef':
|
||||
return LambdaName(self)
|
||||
return ContextName(self, self.tree_node.name)
|
||||
|
||||
def py__name__(self):
|
||||
return self.name.string_name
|
||||
|
||||
def py__call__(self, arguments):
|
||||
function_execution = self.get_function_execution(arguments)
|
||||
return function_execution.infer()
|
||||
|
||||
def get_function_execution(self, arguments=None):
|
||||
if arguments is None:
|
||||
arguments = AnonymousArguments()
|
||||
|
||||
return FunctionExecutionContext(self.evaluator, self.parent_context, self, arguments)
|
||||
|
||||
def get_signatures(self):
|
||||
return [TreeSignature(f) for f in self.get_signature_functions()]
|
||||
|
||||
|
||||
class FunctionContext(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndClassBase)):
|
||||
def is_function(self):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def from_context(cls, context, tree_node):
|
||||
def create(tree_node):
|
||||
if context.is_class():
|
||||
return MethodContext(
|
||||
context.evaluator,
|
||||
context,
|
||||
parent_context=parent_context,
|
||||
tree_node=tree_node
|
||||
)
|
||||
else:
|
||||
return cls(
|
||||
context.evaluator,
|
||||
parent_context=parent_context,
|
||||
tree_node=tree_node
|
||||
)
|
||||
|
||||
overloaded_funcs = list(_find_overload_functions(context, tree_node))
|
||||
|
||||
parent_context = context
|
||||
while parent_context.is_class() or parent_context.is_instance():
|
||||
parent_context = parent_context.parent_context
|
||||
|
||||
function = create(tree_node)
|
||||
|
||||
if overloaded_funcs:
|
||||
return OverloadedFunctionContext(
|
||||
function,
|
||||
[create(f) for f in overloaded_funcs]
|
||||
)
|
||||
return function
|
||||
|
||||
def py__class__(self):
|
||||
c, = contexts_from_qualified_names(self.evaluator, u'types', u'FunctionType')
|
||||
return c
|
||||
|
||||
def get_default_param_context(self):
|
||||
return self.parent_context
|
||||
|
||||
def get_signature_functions(self):
|
||||
return [self]
|
||||
|
||||
|
||||
class MethodContext(FunctionContext):
|
||||
def __init__(self, evaluator, class_context, *args, **kwargs):
|
||||
super(MethodContext, self).__init__(evaluator, *args, **kwargs)
|
||||
self.class_context = class_context
|
||||
|
||||
def get_default_param_context(self):
|
||||
return self.class_context
|
||||
|
||||
def get_qualified_names(self):
|
||||
# Need to implement this, because the parent context of a method
|
||||
# context is not the class context but the module.
|
||||
names = self.class_context.get_qualified_names()
|
||||
if names is None:
|
||||
return None
|
||||
return names + (self.py__name__(),)
|
||||
|
||||
|
||||
class FunctionExecutionContext(TreeContext):
|
||||
function_execution_filter = FunctionExecutionFilter
|
||||
|
||||
def __init__(self, evaluator, parent_context, function_context, var_args):
|
||||
super(FunctionExecutionContext, self).__init__(
|
||||
evaluator,
|
||||
parent_context,
|
||||
function_context.tree_node,
|
||||
)
|
||||
self.function_context = function_context
|
||||
self.var_args = var_args
|
||||
|
||||
@evaluator_method_cache(default=NO_CONTEXTS)
|
||||
@recursion.execution_recursion_decorator()
|
||||
def get_return_values(self, check_yields=False):
|
||||
funcdef = self.tree_node
|
||||
if funcdef.type == 'lambdef':
|
||||
return self.eval_node(funcdef.children[-1])
|
||||
|
||||
if check_yields:
|
||||
context_set = NO_CONTEXTS
|
||||
returns = get_yield_exprs(self.evaluator, funcdef)
|
||||
else:
|
||||
returns = funcdef.iter_return_stmts()
|
||||
from jedi.inference.gradual.annotation import infer_return_types
|
||||
context_set = infer_return_types(self)
|
||||
if context_set:
|
||||
# If there are annotations, prefer them over anything else.
|
||||
# This will make it faster.
|
||||
return context_set
|
||||
context_set |= docstrings.infer_return_types(self.function_context)
|
||||
|
||||
for r in returns:
|
||||
check = flow_analysis.reachability_check(self, funcdef, r)
|
||||
if check is flow_analysis.UNREACHABLE:
|
||||
debug.dbg('Return unreachable: %s', r)
|
||||
else:
|
||||
if check_yields:
|
||||
context_set |= ContextSet.from_sets(
|
||||
lazy_context.infer()
|
||||
for lazy_context in self._get_yield_lazy_context(r)
|
||||
)
|
||||
else:
|
||||
try:
|
||||
children = r.children
|
||||
except AttributeError:
|
||||
ctx = compiled.builtin_from_name(self.evaluator, u'None')
|
||||
context_set |= ContextSet([ctx])
|
||||
else:
|
||||
context_set |= self.eval_node(children[1])
|
||||
if check is flow_analysis.REACHABLE:
|
||||
debug.dbg('Return reachable: %s', r)
|
||||
break
|
||||
return context_set
|
||||
|
||||
def _get_yield_lazy_context(self, yield_expr):
|
||||
if yield_expr.type == 'keyword':
|
||||
# `yield` just yields None.
|
||||
ctx = compiled.builtin_from_name(self.evaluator, u'None')
|
||||
yield LazyKnownContext(ctx)
|
||||
return
|
||||
|
||||
node = yield_expr.children[1]
|
||||
if node.type == 'yield_arg': # It must be a yield from.
|
||||
cn = ContextualizedNode(self, node.children[1])
|
||||
for lazy_context in cn.infer().iterate(cn):
|
||||
yield lazy_context
|
||||
else:
|
||||
yield LazyTreeContext(self, node)
|
||||
|
||||
@recursion.execution_recursion_decorator(default=iter([]))
|
||||
def get_yield_lazy_contexts(self, is_async=False):
|
||||
# TODO: if is_async, wrap yield statements in Awaitable/async_generator_asend
|
||||
for_parents = [(y, tree.search_ancestor(y, 'for_stmt', 'funcdef',
|
||||
'while_stmt', 'if_stmt'))
|
||||
for y in get_yield_exprs(self.evaluator, self.tree_node)]
|
||||
|
||||
# Calculate if the yields are placed within the same for loop.
|
||||
yields_order = []
|
||||
last_for_stmt = None
|
||||
for yield_, for_stmt in for_parents:
|
||||
# For really simple for loops we can predict the order. Otherwise
|
||||
# we just ignore it.
|
||||
parent = for_stmt.parent
|
||||
if parent.type == 'suite':
|
||||
parent = parent.parent
|
||||
if for_stmt.type == 'for_stmt' and parent == self.tree_node \
|
||||
and parser_utils.for_stmt_defines_one_name(for_stmt): # Simplicity for now.
|
||||
if for_stmt == last_for_stmt:
|
||||
yields_order[-1][1].append(yield_)
|
||||
else:
|
||||
yields_order.append((for_stmt, [yield_]))
|
||||
elif for_stmt == self.tree_node:
|
||||
yields_order.append((None, [yield_]))
|
||||
else:
|
||||
types = self.get_return_values(check_yields=True)
|
||||
if types:
|
||||
yield LazyKnownContexts(types)
|
||||
return
|
||||
last_for_stmt = for_stmt
|
||||
|
||||
for for_stmt, yields in yields_order:
|
||||
if for_stmt is None:
|
||||
# No for_stmt, just normal yields.
|
||||
for yield_ in yields:
|
||||
for result in self._get_yield_lazy_context(yield_):
|
||||
yield result
|
||||
else:
|
||||
input_node = for_stmt.get_testlist()
|
||||
cn = ContextualizedNode(self, input_node)
|
||||
ordered = cn.infer().iterate(cn)
|
||||
ordered = list(ordered)
|
||||
for lazy_context in ordered:
|
||||
dct = {str(for_stmt.children[1].value): lazy_context.infer()}
|
||||
with helpers.predefine_names(self, for_stmt, dct):
|
||||
for yield_in_same_for_stmt in yields:
|
||||
for result in self._get_yield_lazy_context(yield_in_same_for_stmt):
|
||||
yield result
|
||||
|
||||
def merge_yield_contexts(self, is_async=False):
|
||||
return ContextSet.from_sets(
|
||||
lazy_context.infer()
|
||||
for lazy_context in self.get_yield_lazy_contexts()
|
||||
)
|
||||
|
||||
def get_filters(self, search_global=False, until_position=None, origin_scope=None):
|
||||
yield self.function_execution_filter(self.evaluator, self,
|
||||
until_position=until_position,
|
||||
origin_scope=origin_scope)
|
||||
|
||||
@evaluator_method_cache()
|
||||
def get_executed_params_and_issues(self):
|
||||
return self.var_args.get_executed_params_and_issues(self)
|
||||
|
||||
def matches_signature(self):
|
||||
executed_params, issues = self.get_executed_params_and_issues()
|
||||
if issues:
|
||||
return False
|
||||
|
||||
matches = all(executed_param.matches_signature()
|
||||
for executed_param in executed_params)
|
||||
if debug.enable_notice:
|
||||
signature = parser_utils.get_call_signature(self.tree_node)
|
||||
if matches:
|
||||
debug.dbg("Overloading match: %s@%s (%s)",
|
||||
signature, self.tree_node.start_pos[0], self.var_args, color='BLUE')
|
||||
else:
|
||||
debug.dbg("Overloading no match: %s@%s (%s)",
|
||||
signature, self.tree_node.start_pos[0], self.var_args, color='BLUE')
|
||||
return matches
|
||||
|
||||
def infer(self):
|
||||
"""
|
||||
Created to be used by inheritance.
|
||||
"""
|
||||
evaluator = self.evaluator
|
||||
is_coroutine = self.tree_node.parent.type in ('async_stmt', 'async_funcdef')
|
||||
is_generator = bool(get_yield_exprs(evaluator, self.tree_node))
|
||||
from jedi.inference.gradual.typing import GenericClass
|
||||
|
||||
if is_coroutine:
|
||||
if is_generator:
|
||||
if evaluator.environment.version_info < (3, 6):
|
||||
return NO_CONTEXTS
|
||||
async_generator_classes = evaluator.typing_module \
|
||||
.py__getattribute__('AsyncGenerator')
|
||||
|
||||
yield_contexts = self.merge_yield_contexts(is_async=True)
|
||||
# The contravariant doesn't seem to be defined.
|
||||
generics = (yield_contexts.py__class__(), NO_CONTEXTS)
|
||||
return ContextSet(
|
||||
# In Python 3.6 AsyncGenerator is still a class.
|
||||
GenericClass(c, generics)
|
||||
for c in async_generator_classes
|
||||
).execute_annotation()
|
||||
else:
|
||||
if evaluator.environment.version_info < (3, 5):
|
||||
return NO_CONTEXTS
|
||||
async_classes = evaluator.typing_module.py__getattribute__('Coroutine')
|
||||
return_contexts = self.get_return_values()
|
||||
# Only the first generic is relevant.
|
||||
generics = (return_contexts.py__class__(), NO_CONTEXTS, NO_CONTEXTS)
|
||||
return ContextSet(
|
||||
GenericClass(c, generics) for c in async_classes
|
||||
).execute_annotation()
|
||||
else:
|
||||
if is_generator:
|
||||
return ContextSet([iterable.Generator(evaluator, self)])
|
||||
else:
|
||||
return self.get_return_values()
|
||||
|
||||
|
||||
class OverloadedFunctionContext(FunctionMixin, ContextWrapper):
|
||||
def __init__(self, function, overloaded_functions):
|
||||
super(OverloadedFunctionContext, self).__init__(function)
|
||||
self._overloaded_functions = overloaded_functions
|
||||
|
||||
def py__call__(self, arguments):
|
||||
debug.dbg("Execute overloaded function %s", self._wrapped_context, color='BLUE')
|
||||
function_executions = []
|
||||
context_set = NO_CONTEXTS
|
||||
matched = False
|
||||
for f in self._overloaded_functions:
|
||||
function_execution = f.get_function_execution(arguments)
|
||||
function_executions.append(function_execution)
|
||||
if function_execution.matches_signature():
|
||||
matched = True
|
||||
return function_execution.infer()
|
||||
|
||||
if matched:
|
||||
return context_set
|
||||
|
||||
if self.evaluator.is_analysis:
|
||||
# In this case we want precision.
|
||||
return NO_CONTEXTS
|
||||
return ContextSet.from_sets(fe.infer() for fe in function_executions)
|
||||
|
||||
def get_signature_functions(self):
|
||||
return self._overloaded_functions
|
||||
|
||||
|
||||
def _find_overload_functions(context, tree_node):
|
||||
def _is_overload_decorated(funcdef):
|
||||
if funcdef.parent.type == 'decorated':
|
||||
decorators = funcdef.parent.children[0]
|
||||
if decorators.type == 'decorator':
|
||||
decorators = [decorators]
|
||||
else:
|
||||
decorators = decorators.children
|
||||
for decorator in decorators:
|
||||
dotted_name = decorator.children[1]
|
||||
if dotted_name.type == 'name' and dotted_name.value == 'overload':
|
||||
# TODO check with contexts if it's the right overload
|
||||
return True
|
||||
return False
|
||||
|
||||
if tree_node.type == 'lambdef':
|
||||
return
|
||||
|
||||
if _is_overload_decorated(tree_node):
|
||||
yield tree_node
|
||||
|
||||
while True:
|
||||
filter = ParserTreeFilter(
|
||||
context.evaluator,
|
||||
context,
|
||||
until_position=tree_node.start_pos
|
||||
)
|
||||
names = filter.get(tree_node.name.value)
|
||||
assert isinstance(names, list)
|
||||
if not names:
|
||||
break
|
||||
|
||||
found = False
|
||||
for name in names:
|
||||
funcdef = name.tree_name.parent
|
||||
if funcdef.type == 'funcdef' and _is_overload_decorated(funcdef):
|
||||
tree_node = funcdef
|
||||
found = True
|
||||
yield funcdef
|
||||
|
||||
if not found:
|
||||
break
|
||||
528
jedi/inference/context/instance.py
Normal file
528
jedi/inference/context/instance.py
Normal file
@@ -0,0 +1,528 @@
|
||||
from abc import abstractproperty
|
||||
|
||||
from jedi import debug
|
||||
from jedi import settings
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference.compiled.context import CompiledObjectFilter
|
||||
from jedi.inference.helpers import contexts_from_qualified_names
|
||||
from jedi.inference.filters import AbstractFilter
|
||||
from jedi.inference.names import ContextName, TreeNameDefinition
|
||||
from jedi.inference.base_context import Context, NO_CONTEXTS, ContextSet, \
|
||||
iterator_to_context_set, ContextWrapper
|
||||
from jedi.inference.lazy_context import LazyKnownContext, LazyKnownContexts
|
||||
from jedi.inference.cache import evaluator_method_cache
|
||||
from jedi.inference.arguments import AnonymousArguments, \
|
||||
ValuesArguments, TreeArgumentsWrapper
|
||||
from jedi.inference.context.function import \
|
||||
FunctionContext, FunctionMixin, OverloadedFunctionContext
|
||||
from jedi.inference.context.klass import ClassContext, apply_py__get__, \
|
||||
ClassFilter
|
||||
from jedi.inference.context import iterable
|
||||
from jedi.parser_utils import get_parent_scope
|
||||
|
||||
|
||||
class InstanceExecutedParam(object):
|
||||
def __init__(self, instance, tree_param):
|
||||
self._instance = instance
|
||||
self._tree_param = tree_param
|
||||
self.string_name = self._tree_param.name.value
|
||||
|
||||
def infer(self):
|
||||
return ContextSet([self._instance])
|
||||
|
||||
def matches_signature(self):
|
||||
return True
|
||||
|
||||
|
||||
class AnonymousInstanceArguments(AnonymousArguments):
|
||||
def __init__(self, instance):
|
||||
self._instance = instance
|
||||
|
||||
def get_executed_params_and_issues(self, execution_context):
|
||||
from jedi.inference.dynamic import search_params
|
||||
tree_params = execution_context.tree_node.get_params()
|
||||
if not tree_params:
|
||||
return [], []
|
||||
|
||||
self_param = InstanceExecutedParam(self._instance, tree_params[0])
|
||||
if len(tree_params) == 1:
|
||||
# If the only param is self, we don't need to try to find
|
||||
# executions of this function, we have all the params already.
|
||||
return [self_param], []
|
||||
executed_params = list(search_params(
|
||||
execution_context.evaluator,
|
||||
execution_context,
|
||||
execution_context.tree_node
|
||||
))
|
||||
executed_params[0] = self_param
|
||||
return executed_params, []
|
||||
|
||||
|
||||
class AbstractInstanceContext(Context):
|
||||
api_type = u'instance'
|
||||
|
||||
def __init__(self, evaluator, parent_context, class_context, var_args):
|
||||
super(AbstractInstanceContext, self).__init__(evaluator, parent_context)
|
||||
# Generated instances are classes that are just generated by self
|
||||
# (No var_args) used.
|
||||
self.class_context = class_context
|
||||
self.var_args = var_args
|
||||
|
||||
def is_instance(self):
|
||||
return True
|
||||
|
||||
def get_qualified_names(self):
|
||||
return self.class_context.get_qualified_names()
|
||||
|
||||
def get_annotated_class_object(self):
|
||||
return self.class_context # This is the default.
|
||||
|
||||
def py__call__(self, arguments):
|
||||
names = self.get_function_slot_names(u'__call__')
|
||||
if not names:
|
||||
# Means the Instance is not callable.
|
||||
return super(AbstractInstanceContext, self).py__call__(arguments)
|
||||
|
||||
return ContextSet.from_sets(name.infer().execute(arguments) for name in names)
|
||||
|
||||
def py__class__(self):
|
||||
return self.class_context
|
||||
|
||||
def py__bool__(self):
|
||||
# Signalize that we don't know about the bool type.
|
||||
return None
|
||||
|
||||
def get_function_slot_names(self, name):
|
||||
# Python classes don't look at the dictionary of the instance when
|
||||
# looking up `__call__`. This is something that has to do with Python's
|
||||
# internal slot system (note: not __slots__, but C slots).
|
||||
for filter in self.get_filters(include_self_names=False):
|
||||
names = filter.get(name)
|
||||
if names:
|
||||
return names
|
||||
return []
|
||||
|
||||
def execute_function_slots(self, names, *inferred_args):
|
||||
return ContextSet.from_sets(
|
||||
name.infer().execute_with_values(*inferred_args)
|
||||
for name in names
|
||||
)
|
||||
|
||||
def py__get__(self, obj, class_context):
|
||||
"""
|
||||
obj may be None.
|
||||
"""
|
||||
# Arguments in __get__ descriptors are obj, class.
|
||||
# `method` is the new parent of the array, don't know if that's good.
|
||||
names = self.get_function_slot_names(u'__get__')
|
||||
if names:
|
||||
if obj is None:
|
||||
obj = compiled.builtin_from_name(self.evaluator, u'None')
|
||||
return self.execute_function_slots(names, obj, class_context)
|
||||
else:
|
||||
return ContextSet([self])
|
||||
|
||||
def get_filters(self, search_global=None, until_position=None,
|
||||
origin_scope=None, include_self_names=True):
|
||||
class_context = self.get_annotated_class_object()
|
||||
if include_self_names:
|
||||
for cls in class_context.py__mro__():
|
||||
if not isinstance(cls, compiled.CompiledObject) \
|
||||
or cls.tree_node is not None:
|
||||
# In this case we're excluding compiled objects that are
|
||||
# not fake objects. It doesn't make sense for normal
|
||||
# compiled objects to search for self variables.
|
||||
yield SelfAttributeFilter(self.evaluator, self, cls, origin_scope)
|
||||
|
||||
class_filters = class_context.get_filters(
|
||||
search_global=False,
|
||||
origin_scope=origin_scope,
|
||||
is_instance=True,
|
||||
)
|
||||
for f in class_filters:
|
||||
if isinstance(f, ClassFilter):
|
||||
yield InstanceClassFilter(self.evaluator, self, f)
|
||||
elif isinstance(f, CompiledObjectFilter):
|
||||
yield CompiledInstanceClassFilter(self.evaluator, self, f)
|
||||
else:
|
||||
# Propably from the metaclass.
|
||||
yield f
|
||||
|
||||
def py__getitem__(self, index_context_set, contextualized_node):
|
||||
names = self.get_function_slot_names(u'__getitem__')
|
||||
if not names:
|
||||
return super(AbstractInstanceContext, self).py__getitem__(
|
||||
index_context_set,
|
||||
contextualized_node,
|
||||
)
|
||||
|
||||
args = ValuesArguments([index_context_set])
|
||||
return ContextSet.from_sets(name.infer().execute(args) for name in names)
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
iter_slot_names = self.get_function_slot_names(u'__iter__')
|
||||
if not iter_slot_names:
|
||||
return super(AbstractInstanceContext, self).py__iter__(contextualized_node)
|
||||
|
||||
def iterate():
|
||||
for generator in self.execute_function_slots(iter_slot_names):
|
||||
if generator.is_instance() and not generator.is_compiled():
|
||||
# `__next__` logic.
|
||||
if self.evaluator.environment.version_info.major == 2:
|
||||
name = u'next'
|
||||
else:
|
||||
name = u'__next__'
|
||||
next_slot_names = generator.get_function_slot_names(name)
|
||||
if next_slot_names:
|
||||
yield LazyKnownContexts(
|
||||
generator.execute_function_slots(next_slot_names)
|
||||
)
|
||||
else:
|
||||
debug.warning('Instance has no __next__ function in %s.', generator)
|
||||
else:
|
||||
for lazy_context in generator.py__iter__():
|
||||
yield lazy_context
|
||||
return iterate()
|
||||
|
||||
@abstractproperty
|
||||
def name(self):
|
||||
pass
|
||||
|
||||
def create_init_executions(self):
|
||||
for name in self.get_function_slot_names(u'__init__'):
|
||||
# TODO is this correct? I think we need to check for functions.
|
||||
if isinstance(name, LazyInstanceClassName):
|
||||
function = FunctionContext.from_context(
|
||||
self.parent_context,
|
||||
name.tree_name.parent
|
||||
)
|
||||
bound_method = BoundMethod(self, function)
|
||||
yield bound_method.get_function_execution(self.var_args)
|
||||
|
||||
@evaluator_method_cache()
|
||||
def create_instance_context(self, class_context, node):
|
||||
if node.parent.type in ('funcdef', 'classdef'):
|
||||
node = node.parent
|
||||
scope = get_parent_scope(node)
|
||||
if scope == class_context.tree_node:
|
||||
return class_context
|
||||
else:
|
||||
parent_context = self.create_instance_context(class_context, scope)
|
||||
if scope.type == 'funcdef':
|
||||
func = FunctionContext.from_context(
|
||||
parent_context,
|
||||
scope,
|
||||
)
|
||||
bound_method = BoundMethod(self, func)
|
||||
if scope.name.value == '__init__' and parent_context == class_context:
|
||||
return bound_method.get_function_execution(self.var_args)
|
||||
else:
|
||||
return bound_method.get_function_execution()
|
||||
elif scope.type == 'classdef':
|
||||
class_context = ClassContext(self.evaluator, parent_context, scope)
|
||||
return class_context
|
||||
elif scope.type in ('comp_for', 'sync_comp_for'):
|
||||
# Comprehensions currently don't have a special scope in Jedi.
|
||||
return self.create_instance_context(class_context, scope)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
return class_context
|
||||
|
||||
def get_signatures(self):
|
||||
call_funcs = self.py__getattribute__('__call__').py__get__(self, self.class_context)
|
||||
return [s.bind(self) for s in call_funcs.get_signatures()]
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s(%s)>" % (self.__class__.__name__, self.class_context,
|
||||
self.var_args)
|
||||
|
||||
|
||||
class CompiledInstance(AbstractInstanceContext):
|
||||
def __init__(self, evaluator, parent_context, class_context, var_args):
|
||||
self._original_var_args = var_args
|
||||
super(CompiledInstance, self).__init__(evaluator, parent_context, class_context, var_args)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return compiled.CompiledContextName(self, self.class_context.name.string_name)
|
||||
|
||||
def get_first_non_keyword_argument_contexts(self):
|
||||
key, lazy_context = next(self._original_var_args.unpack(), ('', None))
|
||||
if key is not None:
|
||||
return NO_CONTEXTS
|
||||
|
||||
return lazy_context.infer()
|
||||
|
||||
def is_stub(self):
|
||||
return False
|
||||
|
||||
|
||||
class TreeInstance(AbstractInstanceContext):
|
||||
def __init__(self, evaluator, parent_context, class_context, var_args):
|
||||
# I don't think that dynamic append lookups should happen here. That
|
||||
# sounds more like something that should go to py__iter__.
|
||||
if class_context.py__name__() in ['list', 'set'] \
|
||||
and parent_context.get_root_context() == evaluator.builtins_module:
|
||||
# compare the module path with the builtin name.
|
||||
if settings.dynamic_array_additions:
|
||||
var_args = iterable.get_dynamic_array_instance(self, var_args)
|
||||
|
||||
super(TreeInstance, self).__init__(evaluator, parent_context,
|
||||
class_context, var_args)
|
||||
self.tree_node = class_context.tree_node
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return ContextName(self, self.class_context.name.tree_name)
|
||||
|
||||
# This can recurse, if the initialization of the class includes a reference
|
||||
# to itself.
|
||||
@evaluator_method_cache(default=None)
|
||||
def _get_annotated_class_object(self):
|
||||
from jedi.inference.gradual.annotation import py__annotations__, \
|
||||
infer_type_vars_for_execution
|
||||
|
||||
for func in self._get_annotation_init_functions():
|
||||
# Just take the first result, it should always be one, because we
|
||||
# control the typeshed code.
|
||||
bound = BoundMethod(self, func)
|
||||
execution = bound.get_function_execution(self.var_args)
|
||||
if not execution.matches_signature():
|
||||
# First check if the signature even matches, if not we don't
|
||||
# need to infer anything.
|
||||
continue
|
||||
|
||||
all_annotations = py__annotations__(execution.tree_node)
|
||||
defined, = self.class_context.define_generics(
|
||||
infer_type_vars_for_execution(execution, all_annotations),
|
||||
)
|
||||
debug.dbg('Inferred instance context as %s', defined, color='BLUE')
|
||||
return defined
|
||||
return None
|
||||
|
||||
def get_annotated_class_object(self):
|
||||
return self._get_annotated_class_object() or self.class_context
|
||||
|
||||
def _get_annotation_init_functions(self):
|
||||
filter = next(self.class_context.get_filters())
|
||||
for init_name in filter.get('__init__'):
|
||||
for init in init_name.infer():
|
||||
if init.is_function():
|
||||
for signature in init.get_signatures():
|
||||
yield signature.context
|
||||
|
||||
|
||||
class AnonymousInstance(TreeInstance):
|
||||
def __init__(self, evaluator, parent_context, class_context):
|
||||
super(AnonymousInstance, self).__init__(
|
||||
evaluator,
|
||||
parent_context,
|
||||
class_context,
|
||||
var_args=AnonymousInstanceArguments(self),
|
||||
)
|
||||
|
||||
def get_annotated_class_object(self):
|
||||
return self.class_context # This is the default.
|
||||
|
||||
|
||||
class CompiledInstanceName(compiled.CompiledName):
|
||||
|
||||
def __init__(self, evaluator, instance, klass, name):
|
||||
super(CompiledInstanceName, self).__init__(
|
||||
evaluator,
|
||||
klass.parent_context,
|
||||
name.string_name
|
||||
)
|
||||
self._instance = instance
|
||||
self._class_member_name = name
|
||||
|
||||
@iterator_to_context_set
|
||||
def infer(self):
|
||||
for result_context in self._class_member_name.infer():
|
||||
if result_context.api_type == 'function':
|
||||
yield CompiledBoundMethod(result_context)
|
||||
else:
|
||||
yield result_context
|
||||
|
||||
|
||||
class CompiledInstanceClassFilter(AbstractFilter):
|
||||
name_class = CompiledInstanceName
|
||||
|
||||
def __init__(self, evaluator, instance, f):
|
||||
self._evaluator = evaluator
|
||||
self._instance = instance
|
||||
self._class_filter = f
|
||||
|
||||
def get(self, name):
|
||||
return self._convert(self._class_filter.get(name))
|
||||
|
||||
def values(self):
|
||||
return self._convert(self._class_filter.values())
|
||||
|
||||
def _convert(self, names):
|
||||
klass = self._class_filter.compiled_object
|
||||
return [
|
||||
CompiledInstanceName(self._evaluator, self._instance, klass, n)
|
||||
for n in names
|
||||
]
|
||||
|
||||
|
||||
class BoundMethod(FunctionMixin, ContextWrapper):
|
||||
def __init__(self, instance, function):
|
||||
super(BoundMethod, self).__init__(function)
|
||||
self.instance = instance
|
||||
|
||||
def is_bound_method(self):
|
||||
return True
|
||||
|
||||
def py__class__(self):
|
||||
c, = contexts_from_qualified_names(self.evaluator, u'types', u'MethodType')
|
||||
return c
|
||||
|
||||
def _get_arguments(self, arguments):
|
||||
if arguments is None:
|
||||
arguments = AnonymousInstanceArguments(self.instance)
|
||||
|
||||
return InstanceArguments(self.instance, arguments)
|
||||
|
||||
def get_function_execution(self, arguments=None):
|
||||
arguments = self._get_arguments(arguments)
|
||||
return super(BoundMethod, self).get_function_execution(arguments)
|
||||
|
||||
def py__call__(self, arguments):
|
||||
if isinstance(self._wrapped_context, OverloadedFunctionContext):
|
||||
return self._wrapped_context.py__call__(self._get_arguments(arguments))
|
||||
|
||||
function_execution = self.get_function_execution(arguments)
|
||||
return function_execution.infer()
|
||||
|
||||
def get_signature_functions(self):
|
||||
return [
|
||||
BoundMethod(self.instance, f)
|
||||
for f in self._wrapped_context.get_signature_functions()
|
||||
]
|
||||
|
||||
def get_signatures(self):
|
||||
return [sig.bind(self) for sig in super(BoundMethod, self).get_signatures()]
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self._wrapped_context)
|
||||
|
||||
|
||||
class CompiledBoundMethod(ContextWrapper):
|
||||
def is_bound_method(self):
|
||||
return True
|
||||
|
||||
def get_signatures(self):
|
||||
return [sig.bind(self) for sig in self._wrapped_context.get_signatures()]
|
||||
|
||||
|
||||
class SelfName(TreeNameDefinition):
|
||||
"""
|
||||
This name calculates the parent_context lazily.
|
||||
"""
|
||||
def __init__(self, instance, class_context, tree_name):
|
||||
self._instance = instance
|
||||
self.class_context = class_context
|
||||
self.tree_name = tree_name
|
||||
|
||||
@property
|
||||
def parent_context(self):
|
||||
return self._instance.create_instance_context(self.class_context, self.tree_name)
|
||||
|
||||
|
||||
class LazyInstanceClassName(object):
|
||||
def __init__(self, instance, class_context, class_member_name):
|
||||
self._instance = instance
|
||||
self.class_context = class_context
|
||||
self._class_member_name = class_member_name
|
||||
|
||||
@iterator_to_context_set
|
||||
def infer(self):
|
||||
for result_context in self._class_member_name.infer():
|
||||
for c in apply_py__get__(result_context, self._instance, self.class_context):
|
||||
yield c
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._class_member_name, name)
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self._class_member_name)
|
||||
|
||||
|
||||
class InstanceClassFilter(AbstractFilter):
|
||||
"""
|
||||
This filter is special in that it uses the class filter and wraps the
|
||||
resulting names in LazyINstanceClassName. The idea is that the class name
|
||||
filtering can be very flexible and always be reflected in instances.
|
||||
"""
|
||||
def __init__(self, evaluator, instance, class_filter):
|
||||
self._instance = instance
|
||||
self._class_filter = class_filter
|
||||
|
||||
def get(self, name):
|
||||
return self._convert(self._class_filter.get(name, from_instance=True))
|
||||
|
||||
def values(self):
|
||||
return self._convert(self._class_filter.values(from_instance=True))
|
||||
|
||||
def _convert(self, names):
|
||||
return [LazyInstanceClassName(self._instance, self._class_filter.context, n) for n in names]
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s for %s>' % (self.__class__.__name__, self._class_filter.context)
|
||||
|
||||
|
||||
class SelfAttributeFilter(ClassFilter):
|
||||
"""
|
||||
This class basically filters all the use cases where `self.*` was assigned.
|
||||
"""
|
||||
name_class = SelfName
|
||||
|
||||
def __init__(self, evaluator, context, class_context, origin_scope):
|
||||
super(SelfAttributeFilter, self).__init__(
|
||||
evaluator=evaluator,
|
||||
context=context,
|
||||
node_context=class_context,
|
||||
origin_scope=origin_scope,
|
||||
is_instance=True,
|
||||
)
|
||||
self._class_context = class_context
|
||||
|
||||
def _filter(self, names):
|
||||
names = self._filter_self_names(names)
|
||||
start, end = self._parser_scope.start_pos, self._parser_scope.end_pos
|
||||
return [n for n in names if start < n.start_pos < end]
|
||||
|
||||
def _filter_self_names(self, names):
|
||||
for name in names:
|
||||
trailer = name.parent
|
||||
if trailer.type == 'trailer' \
|
||||
and len(trailer.parent.children) == 2 \
|
||||
and trailer.children[0] == '.':
|
||||
if name.is_definition() and self._access_possible(name, from_instance=True):
|
||||
# TODO filter non-self assignments.
|
||||
yield name
|
||||
|
||||
def _convert_names(self, names):
|
||||
return [self.name_class(self.context, self._class_context, name) for name in names]
|
||||
|
||||
def _check_flows(self, names):
|
||||
return names
|
||||
|
||||
|
||||
class InstanceArguments(TreeArgumentsWrapper):
|
||||
def __init__(self, instance, arguments):
|
||||
super(InstanceArguments, self).__init__(arguments)
|
||||
self.instance = instance
|
||||
|
||||
def unpack(self, func=None):
|
||||
yield None, LazyKnownContext(self.instance)
|
||||
for values in self._wrapped_arguments.unpack(func):
|
||||
yield values
|
||||
|
||||
def get_executed_params_and_issues(self, execution_context):
|
||||
if isinstance(self._wrapped_arguments, AnonymousInstanceArguments):
|
||||
return self._wrapped_arguments.get_executed_params_and_issues(execution_context)
|
||||
|
||||
return super(InstanceArguments, self).get_executed_params_and_issues(execution_context)
|
||||
821
jedi/inference/context/iterable.py
Normal file
821
jedi/inference/context/iterable.py
Normal file
@@ -0,0 +1,821 @@
|
||||
"""
|
||||
Contains all classes and functions to deal with lists, dicts, generators and
|
||||
iterators in general.
|
||||
|
||||
Array modifications
|
||||
*******************
|
||||
|
||||
If the content of an array (``set``/``list``) is requested somewhere, the
|
||||
current module will be checked for appearances of ``arr.append``,
|
||||
``arr.insert``, etc. If the ``arr`` name points to an actual array, the
|
||||
content will be added
|
||||
|
||||
This can be really cpu intensive, as you can imagine. Because |jedi| has to
|
||||
follow **every** ``append`` and check wheter it's the right array. However this
|
||||
works pretty good, because in *slow* cases, the recursion detector and other
|
||||
settings will stop this process.
|
||||
|
||||
It is important to note that:
|
||||
|
||||
1. Array modfications work only in the current module.
|
||||
2. Jedi only checks Array additions; ``list.pop``, etc are ignored.
|
||||
"""
|
||||
import sys
|
||||
|
||||
from jedi import debug
|
||||
from jedi import settings
|
||||
from jedi._compatibility import force_unicode, is_py3
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference import analysis
|
||||
from jedi.inference import recursion
|
||||
from jedi.inference.lazy_context import LazyKnownContext, LazyKnownContexts, \
|
||||
LazyTreeContext
|
||||
from jedi.inference.helpers import get_int_or_none, is_string, \
|
||||
predefine_names, infer_call_of_leaf, reraise_getitem_errors, \
|
||||
SimpleGetItemNotFound
|
||||
from jedi.inference.utils import safe_property, to_list
|
||||
from jedi.inference.cache import evaluator_method_cache
|
||||
from jedi.inference.filters import ParserTreeFilter, LazyAttributeOverwrite, \
|
||||
publish_method
|
||||
from jedi.inference.base_context import ContextSet, Context, NO_CONTEXTS, \
|
||||
TreeContext, ContextualizedNode, iterate_contexts, HelperContextMixin, _sentinel
|
||||
from jedi.parser_utils import get_sync_comp_fors
|
||||
|
||||
|
||||
class IterableMixin(object):
|
||||
def py__stop_iteration_returns(self):
|
||||
return ContextSet([compiled.builtin_from_name(self.evaluator, u'None')])
|
||||
|
||||
# At the moment, safe values are simple values like "foo", 1 and not
|
||||
# lists/dicts. Therefore as a small speed optimization we can just do the
|
||||
# default instead of resolving the lazy wrapped contexts, that are just
|
||||
# doing this in the end as well.
|
||||
# This mostly speeds up patterns like `sys.version_info >= (3, 0)` in
|
||||
# typeshed.
|
||||
if sys.version_info[0] == 2:
|
||||
# Python 2...........
|
||||
def get_safe_value(self, default=_sentinel):
|
||||
if default is _sentinel:
|
||||
raise ValueError("There exists no safe value for context %s" % self)
|
||||
return default
|
||||
else:
|
||||
get_safe_value = Context.get_safe_value
|
||||
|
||||
|
||||
class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
|
||||
array_type = None
|
||||
|
||||
def _get_wrapped_context(self):
|
||||
generator, = self.evaluator.typing_module \
|
||||
.py__getattribute__('Generator') \
|
||||
.execute_annotation()
|
||||
return generator
|
||||
|
||||
def is_instance(self):
|
||||
return False
|
||||
|
||||
def py__bool__(self):
|
||||
return True
|
||||
|
||||
@publish_method('__iter__')
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
return ContextSet([self])
|
||||
|
||||
@publish_method('send')
|
||||
@publish_method('next', python_version_match=2)
|
||||
@publish_method('__next__', python_version_match=3)
|
||||
def py__next__(self):
|
||||
return ContextSet.from_sets(lazy_context.infer() for lazy_context in self.py__iter__())
|
||||
|
||||
def py__stop_iteration_returns(self):
|
||||
return ContextSet([compiled.builtin_from_name(self.evaluator, u'None')])
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return compiled.CompiledContextName(self, 'Generator')
|
||||
|
||||
|
||||
class Generator(GeneratorBase):
|
||||
"""Handling of `yield` functions."""
|
||||
def __init__(self, evaluator, func_execution_context):
|
||||
super(Generator, self).__init__(evaluator)
|
||||
self._func_execution_context = func_execution_context
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
return self._func_execution_context.get_yield_lazy_contexts()
|
||||
|
||||
def py__stop_iteration_returns(self):
|
||||
return self._func_execution_context.get_return_values()
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (type(self).__name__, self._func_execution_context)
|
||||
|
||||
|
||||
class CompForContext(TreeContext):
|
||||
@classmethod
|
||||
def from_comp_for(cls, parent_context, comp_for):
|
||||
return cls(parent_context.evaluator, parent_context, comp_for)
|
||||
|
||||
def get_filters(self, search_global=False, until_position=None, origin_scope=None):
|
||||
yield ParserTreeFilter(self.evaluator, self)
|
||||
|
||||
|
||||
def comprehension_from_atom(evaluator, context, atom):
|
||||
bracket = atom.children[0]
|
||||
test_list_comp = atom.children[1]
|
||||
|
||||
if bracket == '{':
|
||||
if atom.children[1].children[1] == ':':
|
||||
sync_comp_for = test_list_comp.children[3]
|
||||
if sync_comp_for.type == 'comp_for':
|
||||
sync_comp_for = sync_comp_for.children[1]
|
||||
|
||||
return DictComprehension(
|
||||
evaluator,
|
||||
context,
|
||||
sync_comp_for_node=sync_comp_for,
|
||||
key_node=test_list_comp.children[0],
|
||||
value_node=test_list_comp.children[2],
|
||||
)
|
||||
else:
|
||||
cls = SetComprehension
|
||||
elif bracket == '(':
|
||||
cls = GeneratorComprehension
|
||||
elif bracket == '[':
|
||||
cls = ListComprehension
|
||||
|
||||
sync_comp_for = test_list_comp.children[1]
|
||||
if sync_comp_for.type == 'comp_for':
|
||||
sync_comp_for = sync_comp_for.children[1]
|
||||
|
||||
return cls(
|
||||
evaluator,
|
||||
defining_context=context,
|
||||
sync_comp_for_node=sync_comp_for,
|
||||
entry_node=test_list_comp.children[0],
|
||||
)
|
||||
|
||||
|
||||
class ComprehensionMixin(object):
|
||||
@evaluator_method_cache()
|
||||
def _get_comp_for_context(self, parent_context, comp_for):
|
||||
return CompForContext.from_comp_for(parent_context, comp_for)
|
||||
|
||||
def _nested(self, comp_fors, parent_context=None):
|
||||
comp_for = comp_fors[0]
|
||||
|
||||
is_async = comp_for.parent.type == 'comp_for'
|
||||
|
||||
input_node = comp_for.children[3]
|
||||
parent_context = parent_context or self._defining_context
|
||||
input_types = parent_context.eval_node(input_node)
|
||||
# TODO: simulate await if self.is_async
|
||||
|
||||
cn = ContextualizedNode(parent_context, input_node)
|
||||
iterated = input_types.iterate(cn, is_async=is_async)
|
||||
exprlist = comp_for.children[1]
|
||||
for i, lazy_context in enumerate(iterated):
|
||||
types = lazy_context.infer()
|
||||
dct = unpack_tuple_to_dict(parent_context, types, exprlist)
|
||||
context_ = self._get_comp_for_context(
|
||||
parent_context,
|
||||
comp_for,
|
||||
)
|
||||
with predefine_names(context_, comp_for, dct):
|
||||
try:
|
||||
for result in self._nested(comp_fors[1:], context_):
|
||||
yield result
|
||||
except IndexError:
|
||||
iterated = context_.eval_node(self._entry_node)
|
||||
if self.array_type == 'dict':
|
||||
yield iterated, context_.eval_node(self._value_node)
|
||||
else:
|
||||
yield iterated
|
||||
|
||||
@evaluator_method_cache(default=[])
|
||||
@to_list
|
||||
def _iterate(self):
|
||||
comp_fors = tuple(get_sync_comp_fors(self._sync_comp_for_node))
|
||||
for result in self._nested(comp_fors):
|
||||
yield result
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
for set_ in self._iterate():
|
||||
yield LazyKnownContexts(set_)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (type(self).__name__, self._sync_comp_for_node)
|
||||
|
||||
|
||||
class _DictMixin(object):
|
||||
def _get_generics(self):
|
||||
return tuple(c_set.py__class__() for c_set in self.get_mapping_item_contexts())
|
||||
|
||||
|
||||
class Sequence(LazyAttributeOverwrite, IterableMixin):
|
||||
api_type = u'instance'
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return compiled.CompiledContextName(self, self.array_type)
|
||||
|
||||
def _get_generics(self):
|
||||
return (self.merge_types_of_iterate().py__class__(),)
|
||||
|
||||
def _get_wrapped_context(self):
|
||||
from jedi.inference.gradual.typing import GenericClass
|
||||
klass = compiled.builtin_from_name(self.evaluator, self.array_type)
|
||||
c, = GenericClass(klass, self._get_generics()).execute_annotation()
|
||||
return c
|
||||
|
||||
def py__bool__(self):
|
||||
return None # We don't know the length, because of appends.
|
||||
|
||||
def py__class__(self):
|
||||
return compiled.builtin_from_name(self.evaluator, self.array_type)
|
||||
|
||||
@safe_property
|
||||
def parent(self):
|
||||
return self.evaluator.builtins_module
|
||||
|
||||
def py__getitem__(self, index_context_set, contextualized_node):
|
||||
if self.array_type == 'dict':
|
||||
return self._dict_values()
|
||||
return iterate_contexts(ContextSet([self]))
|
||||
|
||||
|
||||
class _BaseComprehension(ComprehensionMixin):
|
||||
def __init__(self, evaluator, defining_context, sync_comp_for_node, entry_node):
|
||||
assert sync_comp_for_node.type == 'sync_comp_for'
|
||||
super(_BaseComprehension, self).__init__(evaluator)
|
||||
self._defining_context = defining_context
|
||||
self._sync_comp_for_node = sync_comp_for_node
|
||||
self._entry_node = entry_node
|
||||
|
||||
|
||||
class ListComprehension(_BaseComprehension, Sequence):
|
||||
array_type = u'list'
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
if isinstance(index, slice):
|
||||
return ContextSet([self])
|
||||
|
||||
all_types = list(self.py__iter__())
|
||||
with reraise_getitem_errors(IndexError, TypeError):
|
||||
lazy_context = all_types[index]
|
||||
return lazy_context.infer()
|
||||
|
||||
|
||||
class SetComprehension(_BaseComprehension, Sequence):
|
||||
array_type = u'set'
|
||||
|
||||
|
||||
class GeneratorComprehension(_BaseComprehension, GeneratorBase):
|
||||
pass
|
||||
|
||||
|
||||
class DictComprehension(ComprehensionMixin, Sequence):
|
||||
array_type = u'dict'
|
||||
|
||||
def __init__(self, evaluator, defining_context, sync_comp_for_node, key_node, value_node):
|
||||
assert sync_comp_for_node.type == 'sync_comp_for'
|
||||
super(DictComprehension, self).__init__(evaluator)
|
||||
self._defining_context = defining_context
|
||||
self._sync_comp_for_node = sync_comp_for_node
|
||||
self._entry_node = key_node
|
||||
self._value_node = value_node
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
for keys, values in self._iterate():
|
||||
yield LazyKnownContexts(keys)
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
for keys, values in self._iterate():
|
||||
for k in keys:
|
||||
if isinstance(k, compiled.CompiledObject):
|
||||
# Be careful in the future if refactoring, index could be a
|
||||
# slice.
|
||||
if k.get_safe_value(default=object()) == index:
|
||||
return values
|
||||
raise SimpleGetItemNotFound()
|
||||
|
||||
def _dict_keys(self):
|
||||
return ContextSet.from_sets(keys for keys, values in self._iterate())
|
||||
|
||||
def _dict_values(self):
|
||||
return ContextSet.from_sets(values for keys, values in self._iterate())
|
||||
|
||||
@publish_method('values')
|
||||
def _imitate_values(self):
|
||||
lazy_context = LazyKnownContexts(self._dict_values())
|
||||
return ContextSet([FakeSequence(self.evaluator, u'list', [lazy_context])])
|
||||
|
||||
@publish_method('items')
|
||||
def _imitate_items(self):
|
||||
lazy_contexts = [
|
||||
LazyKnownContext(
|
||||
FakeSequence(
|
||||
self.evaluator,
|
||||
u'tuple',
|
||||
[LazyKnownContexts(key),
|
||||
LazyKnownContexts(value)]
|
||||
)
|
||||
)
|
||||
for key, value in self._iterate()
|
||||
]
|
||||
|
||||
return ContextSet([FakeSequence(self.evaluator, u'list', lazy_contexts)])
|
||||
|
||||
def get_mapping_item_contexts(self):
|
||||
return self._dict_keys(), self._dict_values()
|
||||
|
||||
def exact_key_items(self):
|
||||
# NOTE: A smarter thing can probably done here to achieve better
|
||||
# completions, but at least like this jedi doesn't crash
|
||||
return []
|
||||
|
||||
|
||||
class SequenceLiteralContext(Sequence):
|
||||
_TUPLE_LIKE = 'testlist_star_expr', 'testlist', 'subscriptlist'
|
||||
mapping = {'(': u'tuple',
|
||||
'[': u'list',
|
||||
'{': u'set'}
|
||||
|
||||
def __init__(self, evaluator, defining_context, atom):
|
||||
super(SequenceLiteralContext, self).__init__(evaluator)
|
||||
self.atom = atom
|
||||
self._defining_context = defining_context
|
||||
|
||||
if self.atom.type in self._TUPLE_LIKE:
|
||||
self.array_type = u'tuple'
|
||||
else:
|
||||
self.array_type = SequenceLiteralContext.mapping[atom.children[0]]
|
||||
"""The builtin name of the array (list, set, tuple or dict)."""
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
"""Here the index is an int/str. Raises IndexError/KeyError."""
|
||||
if self.array_type == u'dict':
|
||||
compiled_obj_index = compiled.create_simple_object(self.evaluator, index)
|
||||
for key, value in self.get_tree_entries():
|
||||
for k in self._defining_context.eval_node(key):
|
||||
try:
|
||||
method = k.execute_operation
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
if method(compiled_obj_index, u'==').get_safe_value():
|
||||
return self._defining_context.eval_node(value)
|
||||
raise SimpleGetItemNotFound('No key found in dictionary %s.' % self)
|
||||
|
||||
if isinstance(index, slice):
|
||||
return ContextSet([self])
|
||||
else:
|
||||
with reraise_getitem_errors(TypeError, KeyError, IndexError):
|
||||
node = self.get_tree_entries()[index]
|
||||
return self._defining_context.eval_node(node)
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
"""
|
||||
While values returns the possible values for any array field, this
|
||||
function returns the value for a certain index.
|
||||
"""
|
||||
if self.array_type == u'dict':
|
||||
# Get keys.
|
||||
types = NO_CONTEXTS
|
||||
for k, _ in self.get_tree_entries():
|
||||
types |= self._defining_context.eval_node(k)
|
||||
# We don't know which dict index comes first, therefore always
|
||||
# yield all the types.
|
||||
for _ in types:
|
||||
yield LazyKnownContexts(types)
|
||||
else:
|
||||
for node in self.get_tree_entries():
|
||||
if node == ':' or node.type == 'subscript':
|
||||
# TODO this should probably use at least part of the code
|
||||
# of eval_subscript_list.
|
||||
yield LazyKnownContext(Slice(self._defining_context, None, None, None))
|
||||
else:
|
||||
yield LazyTreeContext(self._defining_context, node)
|
||||
for addition in check_array_additions(self._defining_context, self):
|
||||
yield addition
|
||||
|
||||
def py__len__(self):
|
||||
# This function is not really used often. It's more of a try.
|
||||
return len(self.get_tree_entries())
|
||||
|
||||
def _dict_values(self):
|
||||
return ContextSet.from_sets(
|
||||
self._defining_context.eval_node(v)
|
||||
for k, v in self.get_tree_entries()
|
||||
)
|
||||
|
||||
def get_tree_entries(self):
|
||||
c = self.atom.children
|
||||
|
||||
if self.atom.type in self._TUPLE_LIKE:
|
||||
return c[::2]
|
||||
|
||||
array_node = c[1]
|
||||
if array_node in (']', '}', ')'):
|
||||
return [] # Direct closing bracket, doesn't contain items.
|
||||
|
||||
if array_node.type == 'testlist_comp':
|
||||
# filter out (for now) pep 448 single-star unpacking
|
||||
return [value for value in array_node.children[::2]
|
||||
if value.type != "star_expr"]
|
||||
elif array_node.type == 'dictorsetmaker':
|
||||
kv = []
|
||||
iterator = iter(array_node.children)
|
||||
for key in iterator:
|
||||
if key == "**":
|
||||
# dict with pep 448 double-star unpacking
|
||||
# for now ignoring the values imported by **
|
||||
next(iterator)
|
||||
next(iterator, None) # Possible comma.
|
||||
else:
|
||||
op = next(iterator, None)
|
||||
if op is None or op == ',':
|
||||
if key.type == "star_expr":
|
||||
# pep 448 single-star unpacking
|
||||
# for now ignoring values imported by *
|
||||
pass
|
||||
else:
|
||||
kv.append(key) # A set.
|
||||
else:
|
||||
assert op == ':' # A dict.
|
||||
kv.append((key, next(iterator)))
|
||||
next(iterator, None) # Possible comma.
|
||||
return kv
|
||||
else:
|
||||
if array_node.type == "star_expr":
|
||||
# pep 448 single-star unpacking
|
||||
# for now ignoring values imported by *
|
||||
return []
|
||||
else:
|
||||
return [array_node]
|
||||
|
||||
def exact_key_items(self):
|
||||
"""
|
||||
Returns a generator of tuples like dict.items(), where the key is
|
||||
resolved (as a string) and the values are still lazy contexts.
|
||||
"""
|
||||
for key_node, value in self.get_tree_entries():
|
||||
for key in self._defining_context.eval_node(key_node):
|
||||
if is_string(key):
|
||||
yield key.get_safe_value(), LazyTreeContext(self._defining_context, value)
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (self.__class__.__name__, self.atom)
|
||||
|
||||
|
||||
class DictLiteralContext(_DictMixin, SequenceLiteralContext):
|
||||
array_type = u'dict'
|
||||
|
||||
def __init__(self, evaluator, defining_context, atom):
|
||||
super(SequenceLiteralContext, self).__init__(evaluator)
|
||||
self._defining_context = defining_context
|
||||
self.atom = atom
|
||||
|
||||
@publish_method('values')
|
||||
def _imitate_values(self):
|
||||
lazy_context = LazyKnownContexts(self._dict_values())
|
||||
return ContextSet([FakeSequence(self.evaluator, u'list', [lazy_context])])
|
||||
|
||||
@publish_method('items')
|
||||
def _imitate_items(self):
|
||||
lazy_contexts = [
|
||||
LazyKnownContext(FakeSequence(
|
||||
self.evaluator, u'tuple',
|
||||
(LazyTreeContext(self._defining_context, key_node),
|
||||
LazyTreeContext(self._defining_context, value_node))
|
||||
)) for key_node, value_node in self.get_tree_entries()
|
||||
]
|
||||
|
||||
return ContextSet([FakeSequence(self.evaluator, u'list', lazy_contexts)])
|
||||
|
||||
def _dict_keys(self):
|
||||
return ContextSet.from_sets(
|
||||
self._defining_context.eval_node(k)
|
||||
for k, v in self.get_tree_entries()
|
||||
)
|
||||
|
||||
def get_mapping_item_contexts(self):
|
||||
return self._dict_keys(), self._dict_values()
|
||||
|
||||
|
||||
class _FakeArray(SequenceLiteralContext):
|
||||
def __init__(self, evaluator, container, type):
|
||||
super(SequenceLiteralContext, self).__init__(evaluator)
|
||||
self.array_type = type
|
||||
self.atom = container
|
||||
# TODO is this class really needed?
|
||||
|
||||
|
||||
class FakeSequence(_FakeArray):
|
||||
def __init__(self, evaluator, array_type, lazy_context_list):
|
||||
"""
|
||||
type should be one of "tuple", "list"
|
||||
"""
|
||||
super(FakeSequence, self).__init__(evaluator, None, array_type)
|
||||
self._lazy_context_list = lazy_context_list
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
if isinstance(index, slice):
|
||||
return ContextSet([self])
|
||||
|
||||
with reraise_getitem_errors(IndexError, TypeError):
|
||||
lazy_context = self._lazy_context_list[index]
|
||||
return lazy_context.infer()
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
return self._lazy_context_list
|
||||
|
||||
def py__bool__(self):
|
||||
return bool(len(self._lazy_context_list))
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (type(self).__name__, self._lazy_context_list)
|
||||
|
||||
|
||||
class FakeDict(_DictMixin, _FakeArray):
|
||||
def __init__(self, evaluator, dct):
|
||||
super(FakeDict, self).__init__(evaluator, dct, u'dict')
|
||||
self._dct = dct
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
for key in self._dct:
|
||||
yield LazyKnownContext(compiled.create_simple_object(self.evaluator, key))
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
if is_py3 and self.evaluator.environment.version_info.major == 2:
|
||||
# In Python 2 bytes and unicode compare.
|
||||
if isinstance(index, bytes):
|
||||
index_unicode = force_unicode(index)
|
||||
try:
|
||||
return self._dct[index_unicode].infer()
|
||||
except KeyError:
|
||||
pass
|
||||
elif isinstance(index, str):
|
||||
index_bytes = index.encode('utf-8')
|
||||
try:
|
||||
return self._dct[index_bytes].infer()
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
with reraise_getitem_errors(KeyError, TypeError):
|
||||
lazy_context = self._dct[index]
|
||||
return lazy_context.infer()
|
||||
|
||||
@publish_method('values')
|
||||
def _values(self):
|
||||
return ContextSet([FakeSequence(
|
||||
self.evaluator, u'tuple',
|
||||
[LazyKnownContexts(self._dict_values())]
|
||||
)])
|
||||
|
||||
def _dict_values(self):
|
||||
return ContextSet.from_sets(lazy_context.infer() for lazy_context in self._dct.values())
|
||||
|
||||
def _dict_keys(self):
|
||||
return ContextSet.from_sets(lazy_context.infer() for lazy_context in self.py__iter__())
|
||||
|
||||
def get_mapping_item_contexts(self):
|
||||
return self._dict_keys(), self._dict_values()
|
||||
|
||||
def exact_key_items(self):
|
||||
return self._dct.items()
|
||||
|
||||
|
||||
class MergedArray(_FakeArray):
|
||||
def __init__(self, evaluator, arrays):
|
||||
super(MergedArray, self).__init__(evaluator, arrays, arrays[-1].array_type)
|
||||
self._arrays = arrays
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
for array in self._arrays:
|
||||
for lazy_context in array.py__iter__():
|
||||
yield lazy_context
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
return ContextSet.from_sets(lazy_context.infer() for lazy_context in self.py__iter__())
|
||||
|
||||
def get_tree_entries(self):
|
||||
for array in self._arrays:
|
||||
for a in array.get_tree_entries():
|
||||
yield a
|
||||
|
||||
def __len__(self):
|
||||
return sum(len(a) for a in self._arrays)
|
||||
|
||||
|
||||
def unpack_tuple_to_dict(context, types, exprlist):
|
||||
"""
|
||||
Unpacking tuple assignments in for statements and expr_stmts.
|
||||
"""
|
||||
if exprlist.type == 'name':
|
||||
return {exprlist.value: types}
|
||||
elif exprlist.type == 'atom' and exprlist.children[0] in ('(', '['):
|
||||
return unpack_tuple_to_dict(context, types, exprlist.children[1])
|
||||
elif exprlist.type in ('testlist', 'testlist_comp', 'exprlist',
|
||||
'testlist_star_expr'):
|
||||
dct = {}
|
||||
parts = iter(exprlist.children[::2])
|
||||
n = 0
|
||||
for lazy_context in types.iterate(exprlist):
|
||||
n += 1
|
||||
try:
|
||||
part = next(parts)
|
||||
except StopIteration:
|
||||
# TODO this context is probably not right.
|
||||
analysis.add(context, 'value-error-too-many-values', part,
|
||||
message="ValueError: too many values to unpack (expected %s)" % n)
|
||||
else:
|
||||
dct.update(unpack_tuple_to_dict(context, lazy_context.infer(), part))
|
||||
has_parts = next(parts, None)
|
||||
if types and has_parts is not None:
|
||||
# TODO this context is probably not right.
|
||||
analysis.add(context, 'value-error-too-few-values', has_parts,
|
||||
message="ValueError: need more than %s values to unpack" % n)
|
||||
return dct
|
||||
elif exprlist.type == 'power' or exprlist.type == 'atom_expr':
|
||||
# Something like ``arr[x], var = ...``.
|
||||
# This is something that is not yet supported, would also be difficult
|
||||
# to write into a dict.
|
||||
return {}
|
||||
elif exprlist.type == 'star_expr': # `a, *b, c = x` type unpackings
|
||||
# Currently we're not supporting them.
|
||||
return {}
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def check_array_additions(context, sequence):
|
||||
""" Just a mapper function for the internal _check_array_additions """
|
||||
if sequence.array_type not in ('list', 'set'):
|
||||
# TODO also check for dict updates
|
||||
return NO_CONTEXTS
|
||||
|
||||
return _check_array_additions(context, sequence)
|
||||
|
||||
|
||||
@evaluator_method_cache(default=NO_CONTEXTS)
|
||||
@debug.increase_indent
|
||||
def _check_array_additions(context, sequence):
|
||||
"""
|
||||
Checks if a `Array` has "add" (append, insert, extend) statements:
|
||||
|
||||
>>> a = [""]
|
||||
>>> a.append(1)
|
||||
"""
|
||||
from jedi.inference import arguments
|
||||
|
||||
debug.dbg('Dynamic array search for %s' % sequence, color='MAGENTA')
|
||||
module_context = context.get_root_context()
|
||||
if not settings.dynamic_array_additions or isinstance(module_context, compiled.CompiledObject):
|
||||
debug.dbg('Dynamic array search aborted.', color='MAGENTA')
|
||||
return NO_CONTEXTS
|
||||
|
||||
def find_additions(context, arglist, add_name):
|
||||
params = list(arguments.TreeArguments(context.evaluator, context, arglist).unpack())
|
||||
result = set()
|
||||
if add_name in ['insert']:
|
||||
params = params[1:]
|
||||
if add_name in ['append', 'add', 'insert']:
|
||||
for key, lazy_context in params:
|
||||
result.add(lazy_context)
|
||||
elif add_name in ['extend', 'update']:
|
||||
for key, lazy_context in params:
|
||||
result |= set(lazy_context.infer().iterate())
|
||||
return result
|
||||
|
||||
temp_param_add, settings.dynamic_params_for_other_modules = \
|
||||
settings.dynamic_params_for_other_modules, False
|
||||
|
||||
is_list = sequence.name.string_name == 'list'
|
||||
search_names = (['append', 'extend', 'insert'] if is_list else ['add', 'update'])
|
||||
|
||||
added_types = set()
|
||||
for add_name in search_names:
|
||||
try:
|
||||
possible_names = module_context.tree_node.get_used_names()[add_name]
|
||||
except KeyError:
|
||||
continue
|
||||
else:
|
||||
for name in possible_names:
|
||||
context_node = context.tree_node
|
||||
if not (context_node.start_pos < name.start_pos < context_node.end_pos):
|
||||
continue
|
||||
trailer = name.parent
|
||||
power = trailer.parent
|
||||
trailer_pos = power.children.index(trailer)
|
||||
try:
|
||||
execution_trailer = power.children[trailer_pos + 1]
|
||||
except IndexError:
|
||||
continue
|
||||
else:
|
||||
if execution_trailer.type != 'trailer' \
|
||||
or execution_trailer.children[0] != '(' \
|
||||
or execution_trailer.children[1] == ')':
|
||||
continue
|
||||
|
||||
random_context = context.create_context(name)
|
||||
|
||||
with recursion.execution_allowed(context.evaluator, power) as allowed:
|
||||
if allowed:
|
||||
found = infer_call_of_leaf(
|
||||
random_context,
|
||||
name,
|
||||
cut_own_trailer=True
|
||||
)
|
||||
if sequence in found:
|
||||
# The arrays match. Now add the results
|
||||
added_types |= find_additions(
|
||||
random_context,
|
||||
execution_trailer.children[1],
|
||||
add_name
|
||||
)
|
||||
|
||||
# reset settings
|
||||
settings.dynamic_params_for_other_modules = temp_param_add
|
||||
debug.dbg('Dynamic array result %s' % added_types, color='MAGENTA')
|
||||
return added_types
|
||||
|
||||
|
||||
def get_dynamic_array_instance(instance, arguments):
|
||||
"""Used for set() and list() instances."""
|
||||
ai = _ArrayInstance(instance, arguments)
|
||||
from jedi.inference import arguments
|
||||
return arguments.ValuesArguments([ContextSet([ai])])
|
||||
|
||||
|
||||
class _ArrayInstance(HelperContextMixin):
|
||||
"""
|
||||
Used for the usage of set() and list().
|
||||
This is definitely a hack, but a good one :-)
|
||||
It makes it possible to use set/list conversions.
|
||||
"""
|
||||
def __init__(self, instance, var_args):
|
||||
self.instance = instance
|
||||
self.var_args = var_args
|
||||
|
||||
def py__class__(self):
|
||||
tuple_, = self.instance.evaluator.builtins_module.py__getattribute__('tuple')
|
||||
return tuple_
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
var_args = self.var_args
|
||||
try:
|
||||
_, lazy_context = next(var_args.unpack())
|
||||
except StopIteration:
|
||||
pass
|
||||
else:
|
||||
for lazy in lazy_context.infer().iterate():
|
||||
yield lazy
|
||||
|
||||
from jedi.inference import arguments
|
||||
if isinstance(var_args, arguments.TreeArguments):
|
||||
additions = _check_array_additions(var_args.context, self.instance)
|
||||
for addition in additions:
|
||||
yield addition
|
||||
|
||||
def iterate(self, contextualized_node=None, is_async=False):
|
||||
return self.py__iter__(contextualized_node)
|
||||
|
||||
|
||||
class Slice(object):
|
||||
def __init__(self, context, start, stop, step):
|
||||
self._context = context
|
||||
self._slice_object = None
|
||||
# All of them are either a Precedence or None.
|
||||
self._start = start
|
||||
self._stop = stop
|
||||
self._step = step
|
||||
|
||||
def __getattr__(self, name):
|
||||
if self._slice_object is None:
|
||||
context = compiled.builtin_from_name(self._context.evaluator, 'slice')
|
||||
self._slice_object, = context.execute_with_values()
|
||||
return getattr(self._slice_object, name)
|
||||
|
||||
@property
|
||||
def obj(self):
|
||||
"""
|
||||
Imitate CompiledObject.obj behavior and return a ``builtin.slice()``
|
||||
object.
|
||||
"""
|
||||
def get(element):
|
||||
if element is None:
|
||||
return None
|
||||
|
||||
result = self._context.eval_node(element)
|
||||
if len(result) != 1:
|
||||
# For simplicity, we want slices to be clear defined with just
|
||||
# one type. Otherwise we will return an empty slice object.
|
||||
raise IndexError
|
||||
|
||||
context, = result
|
||||
return get_int_or_none(context)
|
||||
|
||||
try:
|
||||
return slice(get(self._start), get(self._stop), get(self._step))
|
||||
except IndexError:
|
||||
return slice(None, None, None)
|
||||
340
jedi/inference/context/klass.py
Normal file
340
jedi/inference/context/klass.py
Normal file
@@ -0,0 +1,340 @@
|
||||
"""
|
||||
Like described in the :mod:`parso.python.tree` module,
|
||||
there's a need for an ast like module to represent the states of parsed
|
||||
modules.
|
||||
|
||||
But now there are also structures in Python that need a little bit more than
|
||||
that. An ``Instance`` for example is only a ``Class`` before it is
|
||||
instantiated. This class represents these cases.
|
||||
|
||||
So, why is there also a ``Class`` class here? Well, there are decorators and
|
||||
they change classes in Python 3.
|
||||
|
||||
Representation modules also define "magic methods". Those methods look like
|
||||
``py__foo__`` and are typically mappable to the Python equivalents ``__call__``
|
||||
and others. Here's a list:
|
||||
|
||||
====================================== ========================================
|
||||
**Method** **Description**
|
||||
-------------------------------------- ----------------------------------------
|
||||
py__call__(arguments: Array) On callable objects, returns types.
|
||||
py__bool__() Returns True/False/None; None means that
|
||||
there's no certainty.
|
||||
py__bases__() Returns a list of base classes.
|
||||
py__iter__() Returns a generator of a set of types.
|
||||
py__class__() Returns the class of an instance.
|
||||
py__simple_getitem__(index: int/str) Returns a a set of types of the index.
|
||||
Can raise an IndexError/KeyError.
|
||||
py__getitem__(indexes: ContextSet) Returns a a set of types of the index.
|
||||
py__file__() Only on modules. Returns None if does
|
||||
not exist.
|
||||
py__package__() -> List[str] Only on modules. For the import system.
|
||||
py__path__() Only on modules. For the import system.
|
||||
py__get__(call_object) Only on instances. Simulates
|
||||
descriptors.
|
||||
py__doc__() Returns the docstring for a context.
|
||||
====================================== ========================================
|
||||
|
||||
"""
|
||||
from jedi import debug
|
||||
from jedi._compatibility import use_metaclass
|
||||
from jedi.parser_utils import get_cached_parent_scope
|
||||
from jedi.inference.cache import evaluator_method_cache, CachedMetaClass, \
|
||||
evaluator_method_generator_cache
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference.lazy_context import LazyKnownContexts
|
||||
from jedi.inference.filters import ParserTreeFilter
|
||||
from jedi.inference.names import TreeNameDefinition, ContextName
|
||||
from jedi.inference.arguments import unpack_arglist, ValuesArguments
|
||||
from jedi.inference.base_context import ContextSet, iterator_to_context_set, \
|
||||
NO_CONTEXTS
|
||||
from jedi.inference.context.function import FunctionAndClassBase
|
||||
from jedi.plugins import plugin_manager
|
||||
|
||||
|
||||
def apply_py__get__(context, instance, class_context):
|
||||
try:
|
||||
method = context.py__get__
|
||||
except AttributeError:
|
||||
yield context
|
||||
else:
|
||||
for descriptor_context in method(instance, class_context):
|
||||
yield descriptor_context
|
||||
|
||||
|
||||
class ClassName(TreeNameDefinition):
|
||||
def __init__(self, parent_context, tree_name, name_context, apply_decorators):
|
||||
super(ClassName, self).__init__(parent_context, tree_name)
|
||||
self._name_context = name_context
|
||||
self._apply_decorators = apply_decorators
|
||||
|
||||
@iterator_to_context_set
|
||||
def infer(self):
|
||||
# We're using a different context to infer, so we cannot call super().
|
||||
from jedi.inference.syntax_tree import tree_name_to_contexts
|
||||
inferred = tree_name_to_contexts(
|
||||
self.parent_context.evaluator, self._name_context, self.tree_name)
|
||||
|
||||
for result_context in inferred:
|
||||
if self._apply_decorators:
|
||||
for c in apply_py__get__(result_context,
|
||||
instance=None,
|
||||
class_context=self.parent_context):
|
||||
yield c
|
||||
else:
|
||||
yield result_context
|
||||
|
||||
|
||||
class ClassFilter(ParserTreeFilter):
|
||||
name_class = ClassName
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._is_instance = kwargs.pop('is_instance') # Python 2 :/
|
||||
super(ClassFilter, self).__init__(*args, **kwargs)
|
||||
|
||||
def _convert_names(self, names):
|
||||
return [
|
||||
self.name_class(
|
||||
parent_context=self.context,
|
||||
tree_name=name,
|
||||
name_context=self._node_context,
|
||||
apply_decorators=not self._is_instance,
|
||||
) for name in names
|
||||
]
|
||||
|
||||
def _equals_origin_scope(self):
|
||||
node = self._origin_scope
|
||||
while node is not None:
|
||||
if node == self._parser_scope or node == self.context:
|
||||
return True
|
||||
node = get_cached_parent_scope(self._used_names, node)
|
||||
return False
|
||||
|
||||
def _access_possible(self, name, from_instance=False):
|
||||
# Filter for ClassVar variables
|
||||
# TODO this is not properly done, yet. It just checks for the string
|
||||
# ClassVar in the annotation, which can be quite imprecise. If we
|
||||
# wanted to do this correct, we would have to resolve the ClassVar.
|
||||
if not from_instance:
|
||||
expr_stmt = name.get_definition()
|
||||
if expr_stmt is not None and expr_stmt.type == 'expr_stmt':
|
||||
annassign = expr_stmt.children[1]
|
||||
if annassign.type == 'annassign':
|
||||
# TODO this is not proper matching
|
||||
if 'ClassVar' not in annassign.children[1].get_code():
|
||||
return False
|
||||
|
||||
# Filter for name mangling of private variables like __foo
|
||||
return not name.value.startswith('__') or name.value.endswith('__') \
|
||||
or self._equals_origin_scope()
|
||||
|
||||
def _filter(self, names, from_instance=False):
|
||||
names = super(ClassFilter, self)._filter(names)
|
||||
return [name for name in names if self._access_possible(name, from_instance)]
|
||||
|
||||
|
||||
class ClassMixin(object):
|
||||
def is_class(self):
|
||||
return True
|
||||
|
||||
def py__call__(self, arguments=None):
|
||||
from jedi.inference.context import TreeInstance
|
||||
if arguments is None:
|
||||
arguments = ValuesArguments([])
|
||||
return ContextSet([TreeInstance(self.evaluator, self.parent_context, self, arguments)])
|
||||
|
||||
def py__class__(self):
|
||||
return compiled.builtin_from_name(self.evaluator, u'type')
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return ContextName(self, self.tree_node.name)
|
||||
|
||||
def py__name__(self):
|
||||
return self.name.string_name
|
||||
|
||||
def get_param_names(self):
|
||||
for context_ in self.py__getattribute__(u'__init__'):
|
||||
if context_.is_function():
|
||||
return list(context_.get_param_names())[1:]
|
||||
return []
|
||||
|
||||
@evaluator_method_generator_cache()
|
||||
def py__mro__(self):
|
||||
mro = [self]
|
||||
yield self
|
||||
# TODO Do a proper mro resolution. Currently we are just listing
|
||||
# classes. However, it's a complicated algorithm.
|
||||
for lazy_cls in self.py__bases__():
|
||||
# TODO there's multiple different mro paths possible if this yields
|
||||
# multiple possibilities. Could be changed to be more correct.
|
||||
for cls in lazy_cls.infer():
|
||||
# TODO detect for TypeError: duplicate base class str,
|
||||
# e.g. `class X(str, str): pass`
|
||||
try:
|
||||
mro_method = cls.py__mro__
|
||||
except AttributeError:
|
||||
# TODO add a TypeError like:
|
||||
"""
|
||||
>>> class Y(lambda: test): pass
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
TypeError: function() argument 1 must be code, not str
|
||||
>>> class Y(1): pass
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in <module>
|
||||
TypeError: int() takes at most 2 arguments (3 given)
|
||||
"""
|
||||
debug.warning('Super class of %s is not a class: %s', self, cls)
|
||||
else:
|
||||
for cls_new in mro_method():
|
||||
if cls_new not in mro:
|
||||
mro.append(cls_new)
|
||||
yield cls_new
|
||||
|
||||
def get_filters(self, search_global=False, until_position=None,
|
||||
origin_scope=None, is_instance=False):
|
||||
metaclasses = self.get_metaclasses()
|
||||
if metaclasses:
|
||||
for f in self.get_metaclass_filters(metaclasses):
|
||||
yield f
|
||||
|
||||
if search_global:
|
||||
yield self.get_global_filter(until_position, origin_scope)
|
||||
else:
|
||||
for cls in self.py__mro__():
|
||||
if isinstance(cls, compiled.CompiledObject):
|
||||
for filter in cls.get_filters(is_instance=is_instance):
|
||||
yield filter
|
||||
else:
|
||||
yield ClassFilter(
|
||||
self.evaluator, self, node_context=cls,
|
||||
origin_scope=origin_scope,
|
||||
is_instance=is_instance
|
||||
)
|
||||
if not is_instance:
|
||||
from jedi.inference.compiled import builtin_from_name
|
||||
type_ = builtin_from_name(self.evaluator, u'type')
|
||||
assert isinstance(type_, ClassContext)
|
||||
if type_ != self:
|
||||
for instance in type_.py__call__():
|
||||
instance_filters = instance.get_filters()
|
||||
# Filter out self filters
|
||||
next(instance_filters)
|
||||
next(instance_filters)
|
||||
yield next(instance_filters)
|
||||
|
||||
def get_signatures(self):
|
||||
init_funcs = self.py__call__().py__getattribute__('__init__')
|
||||
return [sig.bind(self) for sig in init_funcs.get_signatures()]
|
||||
|
||||
def get_global_filter(self, until_position=None, origin_scope=None):
|
||||
return ParserTreeFilter(
|
||||
self.evaluator,
|
||||
context=self,
|
||||
until_position=until_position,
|
||||
origin_scope=origin_scope
|
||||
)
|
||||
|
||||
|
||||
class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase)):
|
||||
api_type = u'class'
|
||||
|
||||
@evaluator_method_cache()
|
||||
def list_type_vars(self):
|
||||
found = []
|
||||
arglist = self.tree_node.get_super_arglist()
|
||||
if arglist is None:
|
||||
return []
|
||||
|
||||
for stars, node in unpack_arglist(arglist):
|
||||
if stars:
|
||||
continue # These are not relevant for this search.
|
||||
|
||||
from jedi.inference.gradual.annotation import find_unknown_type_vars
|
||||
for type_var in find_unknown_type_vars(self.parent_context, node):
|
||||
if type_var not in found:
|
||||
# The order matters and it's therefore a list.
|
||||
found.append(type_var)
|
||||
return found
|
||||
|
||||
def _get_bases_arguments(self):
|
||||
arglist = self.tree_node.get_super_arglist()
|
||||
if arglist:
|
||||
from jedi.inference import arguments
|
||||
return arguments.TreeArguments(self.evaluator, self.parent_context, arglist)
|
||||
return None
|
||||
|
||||
@evaluator_method_cache(default=())
|
||||
def py__bases__(self):
|
||||
args = self._get_bases_arguments()
|
||||
if args is not None:
|
||||
lst = [value for key, value in args.unpack() if key is None]
|
||||
if lst:
|
||||
return lst
|
||||
|
||||
if self.py__name__() == 'object' \
|
||||
and self.parent_context == self.evaluator.builtins_module:
|
||||
return []
|
||||
return [LazyKnownContexts(
|
||||
self.evaluator.builtins_module.py__getattribute__('object')
|
||||
)]
|
||||
|
||||
def py__getitem__(self, index_context_set, contextualized_node):
|
||||
from jedi.inference.gradual.typing import LazyGenericClass
|
||||
if not index_context_set:
|
||||
return ContextSet([self])
|
||||
return ContextSet(
|
||||
LazyGenericClass(
|
||||
self,
|
||||
index_context,
|
||||
context_of_index=contextualized_node.context,
|
||||
)
|
||||
for index_context in index_context_set
|
||||
)
|
||||
|
||||
def define_generics(self, type_var_dict):
|
||||
from jedi.inference.gradual.typing import GenericClass
|
||||
|
||||
def remap_type_vars():
|
||||
"""
|
||||
The TypeVars in the resulting classes have sometimes different names
|
||||
and we need to check for that, e.g. a signature can be:
|
||||
|
||||
def iter(iterable: Iterable[_T]) -> Iterator[_T]: ...
|
||||
|
||||
However, the iterator is defined as Iterator[_T_co], which means it has
|
||||
a different type var name.
|
||||
"""
|
||||
for type_var in self.list_type_vars():
|
||||
yield type_var_dict.get(type_var.py__name__(), NO_CONTEXTS)
|
||||
|
||||
if type_var_dict:
|
||||
return ContextSet([GenericClass(
|
||||
self,
|
||||
generics=tuple(remap_type_vars())
|
||||
)])
|
||||
return ContextSet({self})
|
||||
|
||||
@plugin_manager.decorate()
|
||||
def get_metaclass_filters(self, metaclass):
|
||||
debug.dbg('Unprocessed metaclass %s', metaclass)
|
||||
return []
|
||||
|
||||
@evaluator_method_cache(default=NO_CONTEXTS)
|
||||
def get_metaclasses(self):
|
||||
args = self._get_bases_arguments()
|
||||
if args is not None:
|
||||
m = [value for key, value in args.unpack() if key == 'metaclass']
|
||||
metaclasses = ContextSet.from_sets(lazy_context.infer() for lazy_context in m)
|
||||
metaclasses = ContextSet(m for m in metaclasses if m.is_class())
|
||||
if metaclasses:
|
||||
return metaclasses
|
||||
|
||||
for lazy_base in self.py__bases__():
|
||||
for context in lazy_base.infer():
|
||||
if context.is_class():
|
||||
contexts = context.get_metaclasses()
|
||||
if contexts:
|
||||
return contexts
|
||||
return NO_CONTEXTS
|
||||
283
jedi/inference/context/module.py
Normal file
283
jedi/inference/context/module.py
Normal file
@@ -0,0 +1,283 @@
|
||||
import re
|
||||
import os
|
||||
|
||||
from jedi import debug
|
||||
from jedi.inference.cache import evaluator_method_cache
|
||||
from jedi.inference.names import ContextNameMixin, AbstractNameDefinition
|
||||
from jedi.inference.filters import GlobalNameFilter, ParserTreeFilter, DictFilter, MergedFilter
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference.base_context import TreeContext
|
||||
from jedi.inference.names import SubModuleName
|
||||
from jedi.inference.helpers import contexts_from_qualified_names
|
||||
from jedi.inference.compiled import create_simple_object
|
||||
from jedi.inference.base_context import ContextSet
|
||||
|
||||
|
||||
class _ModuleAttributeName(AbstractNameDefinition):
|
||||
"""
|
||||
For module attributes like __file__, __str__ and so on.
|
||||
"""
|
||||
api_type = u'instance'
|
||||
|
||||
def __init__(self, parent_module, string_name, string_value=None):
|
||||
self.parent_context = parent_module
|
||||
self.string_name = string_name
|
||||
self._string_value = string_value
|
||||
|
||||
def infer(self):
|
||||
if self._string_value is not None:
|
||||
s = self._string_value
|
||||
if self.parent_context.evaluator.environment.version_info.major == 2 \
|
||||
and not isinstance(s, bytes):
|
||||
s = s.encode('utf-8')
|
||||
return ContextSet([
|
||||
create_simple_object(self.parent_context.evaluator, s)
|
||||
])
|
||||
return compiled.get_string_context_set(self.parent_context.evaluator)
|
||||
|
||||
|
||||
class ModuleName(ContextNameMixin, AbstractNameDefinition):
|
||||
start_pos = 1, 0
|
||||
|
||||
def __init__(self, context, name):
|
||||
self._context = context
|
||||
self._name = name
|
||||
|
||||
@property
|
||||
def string_name(self):
|
||||
return self._name
|
||||
|
||||
|
||||
def iter_module_names(evaluator, paths):
|
||||
# Python modules/packages
|
||||
for n in evaluator.compiled_subprocess.list_module_names(paths):
|
||||
yield n
|
||||
|
||||
for path in paths:
|
||||
try:
|
||||
dirs = os.listdir(path)
|
||||
except OSError:
|
||||
# The file might not exist or reading it might lead to an error.
|
||||
debug.warning("Not possible to list directory: %s", path)
|
||||
continue
|
||||
for name in dirs:
|
||||
# Namespaces
|
||||
if os.path.isdir(os.path.join(path, name)):
|
||||
# pycache is obviously not an interestin namespace. Also the
|
||||
# name must be a valid identifier.
|
||||
# TODO use str.isidentifier, once Python 2 is removed
|
||||
if name != '__pycache__' and not re.search(r'\W|^\d', name):
|
||||
yield name
|
||||
# Stub files
|
||||
if name.endswith('.pyi'):
|
||||
if name != '__init__.pyi':
|
||||
yield name[:-4]
|
||||
|
||||
|
||||
class SubModuleDictMixin(object):
|
||||
@evaluator_method_cache()
|
||||
def sub_modules_dict(self):
|
||||
"""
|
||||
Lists modules in the directory of this module (if this module is a
|
||||
package).
|
||||
"""
|
||||
names = {}
|
||||
try:
|
||||
method = self.py__path__
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
mods = iter_module_names(self.evaluator, method())
|
||||
for name in mods:
|
||||
# It's obviously a relative import to the current module.
|
||||
names[name] = SubModuleName(self, name)
|
||||
|
||||
# In the case of an import like `from x.` we don't need to
|
||||
# add all the variables, this is only about submodules.
|
||||
return names
|
||||
|
||||
|
||||
class ModuleMixin(SubModuleDictMixin):
|
||||
def get_filters(self, search_global=False, until_position=None, origin_scope=None):
|
||||
yield MergedFilter(
|
||||
ParserTreeFilter(
|
||||
self.evaluator,
|
||||
context=self,
|
||||
until_position=until_position,
|
||||
origin_scope=origin_scope
|
||||
),
|
||||
GlobalNameFilter(self, self.tree_node),
|
||||
)
|
||||
yield DictFilter(self.sub_modules_dict())
|
||||
yield DictFilter(self._module_attributes_dict())
|
||||
for star_filter in self.iter_star_filters():
|
||||
yield star_filter
|
||||
|
||||
def py__class__(self):
|
||||
c, = contexts_from_qualified_names(self.evaluator, u'types', u'ModuleType')
|
||||
return c
|
||||
|
||||
def is_module(self):
|
||||
return True
|
||||
|
||||
def is_stub(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
@evaluator_method_cache()
|
||||
def name(self):
|
||||
return ModuleName(self, self._string_name)
|
||||
|
||||
@property
|
||||
def _string_name(self):
|
||||
""" This is used for the goto functions. """
|
||||
# TODO It's ugly that we even use this, the name is usually well known
|
||||
# ahead so just pass it when create a ModuleContext.
|
||||
if self._path is None:
|
||||
return '' # no path -> empty name
|
||||
else:
|
||||
sep = (re.escape(os.path.sep),) * 2
|
||||
r = re.search(r'([^%s]*?)(%s__init__)?(\.pyi?|\.so)?$' % sep, self._path)
|
||||
# Remove PEP 3149 names
|
||||
return re.sub(r'\.[a-z]+-\d{2}[mud]{0,3}$', '', r.group(1))
|
||||
|
||||
@evaluator_method_cache()
|
||||
def _module_attributes_dict(self):
|
||||
names = ['__package__', '__doc__', '__name__']
|
||||
# All the additional module attributes are strings.
|
||||
dct = dict((n, _ModuleAttributeName(self, n)) for n in names)
|
||||
file = self.py__file__()
|
||||
if file is not None:
|
||||
dct['__file__'] = _ModuleAttributeName(self, '__file__', file)
|
||||
return dct
|
||||
|
||||
def iter_star_filters(self, search_global=False):
|
||||
for star_module in self.star_imports():
|
||||
yield next(star_module.get_filters(search_global))
|
||||
|
||||
# I'm not sure if the star import cache is really that effective anymore
|
||||
# with all the other really fast import caches. Recheck. Also we would need
|
||||
# to push the star imports into Evaluator.module_cache, if we reenable this.
|
||||
@evaluator_method_cache([])
|
||||
def star_imports(self):
|
||||
from jedi.inference.imports import Importer
|
||||
|
||||
modules = []
|
||||
for i in self.tree_node.iter_imports():
|
||||
if i.is_star_import():
|
||||
new = Importer(
|
||||
self.evaluator,
|
||||
import_path=i.get_paths()[-1],
|
||||
module_context=self,
|
||||
level=i.level
|
||||
).follow()
|
||||
|
||||
for module in new:
|
||||
if isinstance(module, ModuleContext):
|
||||
modules += module.star_imports()
|
||||
modules += new
|
||||
return modules
|
||||
|
||||
def get_qualified_names(self):
|
||||
"""
|
||||
A module doesn't have a qualified name, but it's important to note that
|
||||
it's reachable and not `None`. With this information we can add
|
||||
qualified names on top for all context children.
|
||||
"""
|
||||
return ()
|
||||
|
||||
|
||||
class ModuleContext(ModuleMixin, TreeContext):
|
||||
api_type = u'module'
|
||||
parent_context = None
|
||||
|
||||
def __init__(self, evaluator, module_node, file_io, string_names, code_lines, is_package=False):
|
||||
super(ModuleContext, self).__init__(
|
||||
evaluator,
|
||||
parent_context=None,
|
||||
tree_node=module_node
|
||||
)
|
||||
self.file_io = file_io
|
||||
if file_io is None:
|
||||
self._path = None
|
||||
else:
|
||||
self._path = file_io.path
|
||||
self.string_names = string_names # Optional[Tuple[str, ...]]
|
||||
self.code_lines = code_lines
|
||||
self.is_package = is_package
|
||||
|
||||
def is_stub(self):
|
||||
if self._path is not None and self._path.endswith('.pyi'):
|
||||
# Currently this is the way how we identify stubs when e.g. goto is
|
||||
# used in them. This could be changed if stubs would be identified
|
||||
# sooner and used as StubModuleContext.
|
||||
return True
|
||||
return super(ModuleContext, self).is_stub()
|
||||
|
||||
def py__name__(self):
|
||||
if self.string_names is None:
|
||||
return None
|
||||
return '.'.join(self.string_names)
|
||||
|
||||
def py__file__(self):
|
||||
"""
|
||||
In contrast to Python's __file__ can be None.
|
||||
"""
|
||||
if self._path is None:
|
||||
return None
|
||||
|
||||
return os.path.abspath(self._path)
|
||||
|
||||
def py__package__(self):
|
||||
if self.is_package:
|
||||
return self.string_names
|
||||
return self.string_names[:-1]
|
||||
|
||||
def _py__path__(self):
|
||||
# A namespace package is typically auto generated and ~10 lines long.
|
||||
first_few_lines = ''.join(self.code_lines[:50])
|
||||
# these are strings that need to be used for namespace packages,
|
||||
# the first one is ``pkgutil``, the second ``pkg_resources``.
|
||||
options = ('declare_namespace(__name__)', 'extend_path(__path__')
|
||||
if options[0] in first_few_lines or options[1] in first_few_lines:
|
||||
# It is a namespace, now try to find the rest of the
|
||||
# modules on sys_path or whatever the search_path is.
|
||||
paths = set()
|
||||
for s in self.evaluator.get_sys_path():
|
||||
other = os.path.join(s, self.name.string_name)
|
||||
if os.path.isdir(other):
|
||||
paths.add(other)
|
||||
if paths:
|
||||
return list(paths)
|
||||
# Nested namespace packages will not be supported. Nobody ever
|
||||
# asked for it and in Python 3 they are there without using all the
|
||||
# crap above.
|
||||
|
||||
# Default to the of this file.
|
||||
file = self.py__file__()
|
||||
assert file is not None # Shouldn't be a package in the first place.
|
||||
return [os.path.dirname(file)]
|
||||
|
||||
@property
|
||||
def py__path__(self):
|
||||
"""
|
||||
Not seen here, since it's a property. The callback actually uses a
|
||||
variable, so use it like::
|
||||
|
||||
foo.py__path__(sys_path)
|
||||
|
||||
In case of a package, this returns Python's __path__ attribute, which
|
||||
is a list of paths (strings).
|
||||
Raises an AttributeError if the module is not a package.
|
||||
"""
|
||||
if self.is_package:
|
||||
return self._py__path__
|
||||
else:
|
||||
raise AttributeError('Only packages have __path__ attributes.')
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s: %s@%s-%s is_stub=%s>" % (
|
||||
self.__class__.__name__, self._string_name,
|
||||
self.tree_node.start_pos[0], self.tree_node.end_pos[0],
|
||||
self.is_stub()
|
||||
)
|
||||
64
jedi/inference/context/namespace.py
Normal file
64
jedi/inference/context/namespace.py
Normal file
@@ -0,0 +1,64 @@
|
||||
from jedi.inference.cache import evaluator_method_cache
|
||||
from jedi.inference.filters import DictFilter
|
||||
from jedi.inference.names import ContextNameMixin, AbstractNameDefinition
|
||||
from jedi.inference.base_context import Context
|
||||
from jedi.inference.context.module import SubModuleDictMixin
|
||||
|
||||
|
||||
class ImplicitNSName(ContextNameMixin, AbstractNameDefinition):
|
||||
"""
|
||||
Accessing names for implicit namespace packages should infer to nothing.
|
||||
This object will prevent Jedi from raising exceptions
|
||||
"""
|
||||
def __init__(self, implicit_ns_context, string_name):
|
||||
self._context = implicit_ns_context
|
||||
self.string_name = string_name
|
||||
|
||||
|
||||
class ImplicitNamespaceContext(Context, SubModuleDictMixin):
|
||||
"""
|
||||
Provides support for implicit namespace packages
|
||||
"""
|
||||
# Is a module like every other module, because if you import an empty
|
||||
# folder foobar it will be available as an object:
|
||||
# <module 'foobar' (namespace)>.
|
||||
api_type = u'module'
|
||||
parent_context = None
|
||||
|
||||
def __init__(self, evaluator, fullname, paths):
|
||||
super(ImplicitNamespaceContext, self).__init__(evaluator, parent_context=None)
|
||||
self.evaluator = evaluator
|
||||
self._fullname = fullname
|
||||
self._paths = paths
|
||||
|
||||
def get_filters(self, search_global=False, until_position=None, origin_scope=None):
|
||||
yield DictFilter(self.sub_modules_dict())
|
||||
|
||||
@property
|
||||
@evaluator_method_cache()
|
||||
def name(self):
|
||||
string_name = self.py__package__()[-1]
|
||||
return ImplicitNSName(self, string_name)
|
||||
|
||||
def py__file__(self):
|
||||
return None
|
||||
|
||||
def py__package__(self):
|
||||
"""Return the fullname
|
||||
"""
|
||||
return self._fullname.split('.')
|
||||
|
||||
def py__path__(self):
|
||||
return self._paths
|
||||
|
||||
def py__name__(self):
|
||||
return self._fullname
|
||||
|
||||
def is_namespace(self):
|
||||
return True
|
||||
|
||||
def is_stub(self):
|
||||
return False
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self._fullname)
|
||||
Reference in New Issue
Block a user