forked from VimPlug/jedi
Merge branch 'master' of https://github.com/davidhalter/jedi
This commit is contained in:
@@ -34,7 +34,6 @@ from jedi.inference import usages
|
||||
from jedi.inference.arguments import try_iter_content
|
||||
from jedi.inference.helpers import get_module_names, infer_call_of_leaf
|
||||
from jedi.inference.sys_path import transform_path_to_dotted
|
||||
from jedi.inference.names import TreeNameDefinition, ParamName
|
||||
from jedi.inference.syntax_tree import tree_name_to_values
|
||||
from jedi.inference.value import ModuleValue
|
||||
from jedi.inference.base_value import ValueSet
|
||||
@@ -302,8 +301,8 @@ class Script(object):
|
||||
# Without a name we really just want to jump to the result e.g.
|
||||
# executed by `foo()`, if we the cursor is after `)`.
|
||||
return self.goto_definitions(only_stubs=only_stubs, prefer_stubs=prefer_stubs)
|
||||
context = self._get_module_context().create_context(tree_name)
|
||||
names = list(self._inference_state.goto(context, tree_name))
|
||||
name = self._get_module_context().create_name(tree_name)
|
||||
names = list(name.goto())
|
||||
|
||||
if follow_imports:
|
||||
names = filter_follow_imports(names)
|
||||
@@ -503,23 +502,13 @@ def names(source=None, path=None, encoding='utf-8', all_scopes=False,
|
||||
is_def = _def._name.tree_name.is_definition()
|
||||
return definitions and is_def or references and not is_def
|
||||
|
||||
def create_name(name):
|
||||
if name.parent.type == 'param':
|
||||
cls = ParamName
|
||||
else:
|
||||
cls = TreeNameDefinition
|
||||
return cls(
|
||||
module_context.create_context(name),
|
||||
name
|
||||
)
|
||||
|
||||
# Set line/column to a random position, because they don't matter.
|
||||
script = Script(source, line=1, column=0, path=path, encoding=encoding, environment=environment)
|
||||
module_context = script._get_module_context()
|
||||
defs = [
|
||||
classes.Definition(
|
||||
script._inference_state,
|
||||
create_name(name)
|
||||
module_context.create_name(name)
|
||||
) for name in get_module_names(script._module_node, all_scopes)
|
||||
]
|
||||
return sorted(filter(def_ref_filter, defs), key=lambda x: (x.line, x.column))
|
||||
|
||||
@@ -553,7 +553,7 @@ class Definition(BaseDefinition):
|
||||
typ = 'def'
|
||||
return typ + ' ' + self._name.get_public_name()
|
||||
|
||||
definition = tree_name.get_definition() or tree_name
|
||||
definition = tree_name.get_definition(include_setitem=True) or tree_name
|
||||
# Remove the prefix, because that's not what we want for get_code
|
||||
# here.
|
||||
txt = definition.get_code(include_prefix=False)
|
||||
|
||||
@@ -33,7 +33,7 @@ return the ``date`` class.
|
||||
To *visualize* this (simplified):
|
||||
|
||||
- ``InferenceState.infer_expr_stmt`` doesn't do much, because there's no assignment.
|
||||
- ``Value.infer_node`` cares for resolving the dotted path
|
||||
- ``Context.infer_node`` cares for resolving the dotted path
|
||||
- ``InferenceState.find_types`` searches for global definitions of datetime, which
|
||||
it finds in the definition of an import, by scanning the syntax tree.
|
||||
- Using the import logic, the datetime module is found.
|
||||
@@ -62,25 +62,22 @@ I need to mention now that lazy type inference is really good because it
|
||||
only *inferes* what needs to be *inferred*. All the statements and modules
|
||||
that are not used are just being ignored.
|
||||
"""
|
||||
from parso.python import tree
|
||||
import parso
|
||||
from parso import python_bytes_to_unicode
|
||||
from jedi.file_io import FileIO
|
||||
|
||||
from jedi import debug
|
||||
from jedi import parser_utils
|
||||
from jedi.inference.utils import unite
|
||||
from jedi.inference import imports
|
||||
from jedi.inference import recursion
|
||||
from jedi.inference.cache import inference_state_function_cache
|
||||
from jedi.inference import helpers
|
||||
from jedi.inference.names import TreeNameDefinition, ParamName
|
||||
from jedi.inference.base_value import ContextualizedName, ContextualizedNode, \
|
||||
ValueSet, NO_VALUES, iterate_values
|
||||
from jedi.inference.names import TreeNameDefinition
|
||||
from jedi.inference.base_value import ContextualizedNode, \
|
||||
ValueSet, iterate_values
|
||||
from jedi.inference.value import ClassValue, FunctionValue
|
||||
from jedi.inference.context import CompForContext
|
||||
from jedi.inference.syntax_tree import infer_trailer, infer_expr_stmt, \
|
||||
infer_node, check_tuple_assignments
|
||||
from jedi.inference.syntax_tree import infer_expr_stmt, \
|
||||
check_tuple_assignments
|
||||
from jedi.inference.imports import follow_error_node_imports_if_possible
|
||||
from jedi.plugins import plugin_manager
|
||||
|
||||
|
||||
@@ -149,93 +146,6 @@ class InferenceState(object):
|
||||
"""Convenience function"""
|
||||
return self.project._get_sys_path(self, environment=self.environment, **kwargs)
|
||||
|
||||
def infer_element(self, context, element):
|
||||
if isinstance(context, CompForContext):
|
||||
return infer_node(context, element)
|
||||
|
||||
if_stmt = element
|
||||
while if_stmt is not None:
|
||||
if_stmt = if_stmt.parent
|
||||
if if_stmt.type in ('if_stmt', 'for_stmt'):
|
||||
break
|
||||
if parser_utils.is_scope(if_stmt):
|
||||
if_stmt = None
|
||||
break
|
||||
predefined_if_name_dict = context.predefined_names.get(if_stmt)
|
||||
# TODO there's a lot of issues with this one. We actually should do
|
||||
# this in a different way. Caching should only be active in certain
|
||||
# cases and this all sucks.
|
||||
if predefined_if_name_dict is None and if_stmt \
|
||||
and if_stmt.type == 'if_stmt' and self.is_analysis:
|
||||
if_stmt_test = if_stmt.children[1]
|
||||
name_dicts = [{}]
|
||||
# If we already did a check, we don't want to do it again -> If
|
||||
# value.predefined_names is filled, we stop.
|
||||
# We don't want to check the if stmt itself, it's just about
|
||||
# the content.
|
||||
if element.start_pos > if_stmt_test.end_pos:
|
||||
# Now we need to check if the names in the if_stmt match the
|
||||
# names in the suite.
|
||||
if_names = helpers.get_names_of_node(if_stmt_test)
|
||||
element_names = helpers.get_names_of_node(element)
|
||||
str_element_names = [e.value for e in element_names]
|
||||
if any(i.value in str_element_names for i in if_names):
|
||||
for if_name in if_names:
|
||||
definitions = self.goto_definitions(context, if_name)
|
||||
# Every name that has multiple different definitions
|
||||
# causes the complexity to rise. The complexity should
|
||||
# never fall below 1.
|
||||
if len(definitions) > 1:
|
||||
if len(name_dicts) * len(definitions) > 16:
|
||||
debug.dbg('Too many options for if branch inference %s.', if_stmt)
|
||||
# There's only a certain amount of branches
|
||||
# Jedi can infer, otherwise it will take to
|
||||
# long.
|
||||
name_dicts = [{}]
|
||||
break
|
||||
|
||||
original_name_dicts = list(name_dicts)
|
||||
name_dicts = []
|
||||
for definition in definitions:
|
||||
new_name_dicts = list(original_name_dicts)
|
||||
for i, name_dict in enumerate(new_name_dicts):
|
||||
new_name_dicts[i] = name_dict.copy()
|
||||
new_name_dicts[i][if_name.value] = ValueSet([definition])
|
||||
|
||||
name_dicts += new_name_dicts
|
||||
else:
|
||||
for name_dict in name_dicts:
|
||||
name_dict[if_name.value] = definitions
|
||||
if len(name_dicts) > 1:
|
||||
result = NO_VALUES
|
||||
for name_dict in name_dicts:
|
||||
with context.predefine_names(if_stmt, name_dict):
|
||||
result |= infer_node(context, element)
|
||||
return result
|
||||
else:
|
||||
return self._infer_element_if_inferred(context, element)
|
||||
else:
|
||||
if predefined_if_name_dict:
|
||||
return infer_node(context, element)
|
||||
else:
|
||||
return self._infer_element_if_inferred(context, element)
|
||||
|
||||
def _infer_element_if_inferred(self, context, element):
|
||||
"""
|
||||
TODO This function is temporary: Merge with infer_element.
|
||||
"""
|
||||
parent = element
|
||||
while parent is not None:
|
||||
parent = parent.parent
|
||||
predefined_if_name_dict = context.predefined_names.get(parent)
|
||||
if predefined_if_name_dict is not None:
|
||||
return infer_node(context, element)
|
||||
return self._infer_element_cached(context, element)
|
||||
|
||||
@inference_state_function_cache(default=NO_VALUES)
|
||||
def _infer_element_cached(self, context, element):
|
||||
return infer_node(context, element)
|
||||
|
||||
def goto_definitions(self, context, name):
|
||||
def_ = name.get_definition(import_name_always=True)
|
||||
if def_ is not None:
|
||||
@@ -256,113 +166,17 @@ class InferenceState(object):
|
||||
container_types = context.infer_node(def_.children[3])
|
||||
cn = ContextualizedNode(context, def_.children[3])
|
||||
for_types = iterate_values(container_types, cn)
|
||||
c_node = ContextualizedName(context, name)
|
||||
return check_tuple_assignments(c_node, for_types)
|
||||
n = TreeNameDefinition(context, name)
|
||||
return check_tuple_assignments(n, for_types)
|
||||
if type_ in ('import_from', 'import_name'):
|
||||
return imports.infer_import(context, name)
|
||||
else:
|
||||
result = self._follow_error_node_imports_if_possible(context, name)
|
||||
result = follow_error_node_imports_if_possible(context, name)
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
return helpers.infer_call_of_leaf(context, name)
|
||||
|
||||
def _follow_error_node_imports_if_possible(self, context, name):
|
||||
error_node = tree.search_ancestor(name, 'error_node')
|
||||
if error_node is not None:
|
||||
# Get the first command start of a started simple_stmt. The error
|
||||
# node is sometimes a small_stmt and sometimes a simple_stmt. Check
|
||||
# for ; leaves that start a new statements.
|
||||
start_index = 0
|
||||
for index, n in enumerate(error_node.children):
|
||||
if n.start_pos > name.start_pos:
|
||||
break
|
||||
if n == ';':
|
||||
start_index = index + 1
|
||||
nodes = error_node.children[start_index:]
|
||||
first_name = nodes[0].get_first_leaf().value
|
||||
|
||||
# Make it possible to infer stuff like `import foo.` or
|
||||
# `from foo.bar`.
|
||||
if first_name in ('from', 'import'):
|
||||
is_import_from = first_name == 'from'
|
||||
level, names = helpers.parse_dotted_names(
|
||||
nodes,
|
||||
is_import_from=is_import_from,
|
||||
until_node=name,
|
||||
)
|
||||
return imports.Importer(self, names, context.get_root_context(), level).follow()
|
||||
return None
|
||||
|
||||
def goto(self, context, name):
|
||||
definition = name.get_definition(import_name_always=True)
|
||||
if definition is not None:
|
||||
type_ = definition.type
|
||||
if type_ == 'expr_stmt':
|
||||
# Only take the parent, because if it's more complicated than just
|
||||
# a name it's something you can "goto" again.
|
||||
is_simple_name = name.parent.type not in ('power', 'trailer')
|
||||
if is_simple_name:
|
||||
return [TreeNameDefinition(context, name)]
|
||||
elif type_ == 'param':
|
||||
return [ParamName(context, name)]
|
||||
elif type_ in ('import_from', 'import_name'):
|
||||
module_names = imports.goto_import(context, name)
|
||||
return module_names
|
||||
else:
|
||||
return [TreeNameDefinition(context, name)]
|
||||
else:
|
||||
values = self._follow_error_node_imports_if_possible(context, name)
|
||||
if values is not None:
|
||||
return [value.name for value in values]
|
||||
|
||||
par = name.parent
|
||||
node_type = par.type
|
||||
if node_type == 'argument' and par.children[1] == '=' and par.children[0] == name:
|
||||
# Named param goto.
|
||||
trailer = par.parent
|
||||
if trailer.type == 'arglist':
|
||||
trailer = trailer.parent
|
||||
if trailer.type != 'classdef':
|
||||
if trailer.type == 'decorator':
|
||||
value_set = context.infer_node(trailer.children[1])
|
||||
else:
|
||||
i = trailer.parent.children.index(trailer)
|
||||
to_infer = trailer.parent.children[:i]
|
||||
if to_infer[0] == 'await':
|
||||
to_infer.pop(0)
|
||||
value_set = context.infer_node(to_infer[0])
|
||||
for trailer in to_infer[1:]:
|
||||
value_set = infer_trailer(context, value_set, trailer)
|
||||
param_names = []
|
||||
for value in value_set:
|
||||
for signature in value.get_signatures():
|
||||
for param_name in signature.get_param_names():
|
||||
if param_name.string_name == name.value:
|
||||
param_names.append(param_name)
|
||||
return param_names
|
||||
elif node_type == 'dotted_name': # Is a decorator.
|
||||
index = par.children.index(name)
|
||||
if index > 0:
|
||||
new_dotted = helpers.deep_ast_copy(par)
|
||||
new_dotted.children[index - 1:] = []
|
||||
values = context.infer_node(new_dotted)
|
||||
return unite(
|
||||
value.goto(name, name_context=value.as_context())
|
||||
for value in values
|
||||
)
|
||||
|
||||
if node_type == 'trailer' and par.children[0] == '.':
|
||||
values = helpers.infer_call_of_leaf(context, name, cut_own_trailer=True)
|
||||
return values.goto(name, name_context=context)
|
||||
else:
|
||||
stmt = tree.search_ancestor(
|
||||
name, 'expr_stmt', 'lambdef'
|
||||
) or name
|
||||
if stmt.type == 'lambdef':
|
||||
stmt = name
|
||||
return context.goto(name, position=stmt.start_pos)
|
||||
|
||||
def parse_and_get_code(self, code=None, path=None, encoding='utf-8',
|
||||
use_latest_grammar=False, file_io=None, **kwargs):
|
||||
if self.allow_different_encoding:
|
||||
|
||||
@@ -8,11 +8,10 @@ from jedi.inference.utils import PushBackIterator
|
||||
from jedi.inference import analysis
|
||||
from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues, \
|
||||
LazyTreeValue, get_merged_lazy_value
|
||||
from jedi.inference.names import ParamName, TreeNameDefinition
|
||||
from jedi.inference.names import ParamName, TreeNameDefinition, AnonymousParamName
|
||||
from jedi.inference.base_value import NO_VALUES, ValueSet, ContextualizedNode
|
||||
from jedi.inference.value import iterable
|
||||
from jedi.inference.cache import inference_state_as_method_param_cache
|
||||
from jedi.inference.param import get_executed_param_names_and_issues
|
||||
|
||||
|
||||
def try_iter_content(types, depth=0):
|
||||
@@ -84,7 +83,7 @@ def _iterate_argument_clinic(inference_state, arguments, parameters):
|
||||
break
|
||||
|
||||
lazy_values.append(argument)
|
||||
yield ValueSet([iterable.FakeSequence(inference_state, u'tuple', lazy_values)])
|
||||
yield ValueSet([iterable.FakeTuple(inference_state, lazy_values)])
|
||||
lazy_values
|
||||
continue
|
||||
elif stars == 2:
|
||||
@@ -144,9 +143,6 @@ class _AbstractArgumentsMixin(object):
|
||||
def unpack(self, funcdef=None):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_executed_param_names_and_issues(self, execution_context):
|
||||
return get_executed_param_names_and_issues(execution_context, self)
|
||||
|
||||
def get_calling_nodes(self):
|
||||
return []
|
||||
|
||||
@@ -157,19 +153,6 @@ class AbstractArguments(_AbstractArgumentsMixin):
|
||||
trailer = None
|
||||
|
||||
|
||||
class AnonymousArguments(AbstractArguments):
|
||||
def get_executed_param_names_and_issues(self, execution_context):
|
||||
from jedi.inference.dynamic import search_param_names
|
||||
return search_param_names(
|
||||
execution_context.inference_state,
|
||||
execution_context,
|
||||
execution_context.tree_node
|
||||
), []
|
||||
|
||||
def __repr__(self):
|
||||
return '%s()' % self.__class__.__name__
|
||||
|
||||
|
||||
def unpack_arglist(arglist):
|
||||
if arglist is None:
|
||||
return
|
||||
@@ -275,7 +258,6 @@ class TreeArguments(AbstractArguments):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self.argument_node)
|
||||
|
||||
def get_calling_nodes(self):
|
||||
from jedi.inference.dynamic import DynamicExecutedParamName
|
||||
old_arguments_list = []
|
||||
arguments = self
|
||||
|
||||
@@ -288,15 +270,14 @@ class TreeArguments(AbstractArguments):
|
||||
names = calling_name.goto()
|
||||
if len(names) != 1:
|
||||
break
|
||||
if isinstance(names[0], AnonymousParamName):
|
||||
# Dynamic parameters should not have calling nodes, because
|
||||
# they are dynamic and extremely random.
|
||||
return []
|
||||
if not isinstance(names[0], ParamName):
|
||||
break
|
||||
param = names[0].get_executed_param_name()
|
||||
if isinstance(param, DynamicExecutedParamName):
|
||||
# For dynamic searches we don't even want to see errors.
|
||||
return []
|
||||
if param.var_args is None:
|
||||
break
|
||||
arguments = param.var_args
|
||||
executed_param_name = names[0].get_executed_param_name()
|
||||
arguments = executed_param_name.arguments
|
||||
break
|
||||
|
||||
if arguments.argument_node is not None:
|
||||
|
||||
@@ -140,9 +140,12 @@ class HelperValueMixin(object):
|
||||
|
||||
class Value(HelperValueMixin, BaseValue):
|
||||
"""
|
||||
To be defined by subclasses.
|
||||
To be implemented by subclasses.
|
||||
"""
|
||||
tree_node = None
|
||||
# Possible values: None, tuple, list, dict and set. Here to deal with these
|
||||
# very important containers.
|
||||
array_type = None
|
||||
|
||||
@property
|
||||
def api_type(self):
|
||||
@@ -161,6 +164,9 @@ class Value(HelperValueMixin, BaseValue):
|
||||
)
|
||||
return NO_VALUES
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
raise SimpleGetItemNotFound
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
if contextualized_node is not None:
|
||||
from jedi.inference import analysis
|
||||
@@ -323,67 +329,15 @@ class ContextualizedNode(object):
|
||||
return '<%s: %s in %s>' % (self.__class__.__name__, self.node, self.context)
|
||||
|
||||
|
||||
class ContextualizedName(ContextualizedNode):
|
||||
# TODO merge with TreeNameDefinition?!
|
||||
@property
|
||||
def name(self):
|
||||
return self.node
|
||||
|
||||
def assignment_indexes(self):
|
||||
"""
|
||||
Returns an array of tuple(int, node) of the indexes that are used in
|
||||
tuple assignments.
|
||||
|
||||
For example if the name is ``y`` in the following code::
|
||||
|
||||
x, (y, z) = 2, ''
|
||||
|
||||
would result in ``[(1, xyz_node), (0, yz_node)]``.
|
||||
|
||||
When searching for b in the case ``a, *b, c = [...]`` it will return::
|
||||
|
||||
[(slice(1, -1), abc_node)]
|
||||
"""
|
||||
indexes = []
|
||||
is_star_expr = False
|
||||
node = self.node.parent
|
||||
compare = self.node
|
||||
while node is not None:
|
||||
if node.type in ('testlist', 'testlist_comp', 'testlist_star_expr', 'exprlist'):
|
||||
for i, child in enumerate(node.children):
|
||||
if child == compare:
|
||||
index = int(i / 2)
|
||||
if is_star_expr:
|
||||
from_end = int((len(node.children) - i) / 2)
|
||||
index = slice(index, -from_end)
|
||||
indexes.insert(0, (index, node))
|
||||
break
|
||||
else:
|
||||
raise LookupError("Couldn't find the assignment.")
|
||||
is_star_expr = False
|
||||
elif node.type == 'star_expr':
|
||||
is_star_expr = True
|
||||
elif isinstance(node, (ExprStmt, SyncCompFor)):
|
||||
break
|
||||
|
||||
compare = node
|
||||
node = node.parent
|
||||
return indexes
|
||||
|
||||
|
||||
def _getitem(value, index_values, contextualized_node):
|
||||
# The actual getitem call.
|
||||
simple_getitem = getattr(value, 'py__simple_getitem__', None)
|
||||
|
||||
result = NO_VALUES
|
||||
unused_values = set()
|
||||
for index_value in index_values:
|
||||
if simple_getitem is not None:
|
||||
|
||||
index = index_value.get_safe_value(default=None)
|
||||
if type(index) in (float, int, str, unicode, slice, bytes):
|
||||
try:
|
||||
result |= simple_getitem(index)
|
||||
result |= value.py__simple_getitem__(index)
|
||||
continue
|
||||
except SimpleGetItemNotFound:
|
||||
pass
|
||||
|
||||
@@ -6,6 +6,7 @@ from parso.python.tree import Name
|
||||
|
||||
from jedi.inference.filters import ParserTreeFilter, MergedFilter, \
|
||||
GlobalNameFilter
|
||||
from jedi.inference.names import AnonymousParamName, TreeNameDefinition
|
||||
from jedi.inference.base_value import NO_VALUES, ValueSet
|
||||
from jedi.parser_utils import get_parent_scope
|
||||
from jedi import debug
|
||||
@@ -151,9 +152,6 @@ class AbstractContext(object):
|
||||
finally:
|
||||
del predefined[flow_scope]
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s)' % (self.__class__.__name__, self._value)
|
||||
|
||||
|
||||
class ValueContext(AbstractContext):
|
||||
"""
|
||||
@@ -208,7 +206,8 @@ class ValueContext(AbstractContext):
|
||||
|
||||
class TreeContextMixin(object):
|
||||
def infer_node(self, node):
|
||||
return self.inference_state.infer_element(self, node)
|
||||
from jedi.inference.syntax_tree import infer_node
|
||||
return infer_node(self, node)
|
||||
|
||||
def create_value(self, node):
|
||||
from jedi.inference import value
|
||||
@@ -274,6 +273,16 @@ class TreeContextMixin(object):
|
||||
scope_node = parent_scope(scope_node)
|
||||
return from_scope_node(scope_node, is_nested=True)
|
||||
|
||||
def create_name(self, tree_name):
|
||||
definition = tree_name.get_definition()
|
||||
if definition and definition.type == 'param' and definition.name == tree_name:
|
||||
funcdef = search_ancestor(definition, 'funcdef', 'lambdef')
|
||||
func = self.create_value(funcdef)
|
||||
return AnonymousParamName(func, tree_name)
|
||||
else:
|
||||
context = self.create_context(tree_name)
|
||||
return TreeNameDefinition(context, tree_name)
|
||||
|
||||
|
||||
class FunctionContext(TreeContextMixin, ValueContext):
|
||||
def get_filters(self, until_position=None, origin_scope=None):
|
||||
@@ -358,6 +367,12 @@ class CompForContext(TreeContextMixin, AbstractContext):
|
||||
def get_filters(self, until_position=None, origin_scope=None):
|
||||
yield ParserTreeFilter(self)
|
||||
|
||||
def py__name__(self):
|
||||
return '<comprehension context>'
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s)' % (self.__class__.__name__, self.tree_node)
|
||||
|
||||
|
||||
class CompiledContext(ValueContext):
|
||||
def get_filters(self, until_position=None, origin_scope=None):
|
||||
@@ -440,14 +455,14 @@ def get_global_filters(context, until_position, origin_scope):
|
||||
[...]
|
||||
"""
|
||||
base_context = context
|
||||
from jedi.inference.value.function import FunctionExecutionContext
|
||||
from jedi.inference.value.function import BaseFunctionExecutionContext
|
||||
while context is not None:
|
||||
# Names in methods cannot be resolved within the class.
|
||||
for filter in context.get_filters(
|
||||
until_position=until_position,
|
||||
origin_scope=origin_scope):
|
||||
yield filter
|
||||
if isinstance(context, FunctionExecutionContext):
|
||||
if isinstance(context, BaseFunctionExecutionContext):
|
||||
# The position should be reset if the current scope is a function.
|
||||
until_position = None
|
||||
|
||||
|
||||
@@ -253,8 +253,8 @@ def _execute_array_values(inference_state, array):
|
||||
Tuples indicate that there's not just one return value, but the listed
|
||||
ones. `(str, int)` means that it returns a tuple with both types.
|
||||
"""
|
||||
from jedi.inference.value.iterable import SequenceLiteralValue, FakeSequence
|
||||
if isinstance(array, SequenceLiteralValue):
|
||||
from jedi.inference.value.iterable import SequenceLiteralValue, FakeTuple, FakeList
|
||||
if isinstance(array, SequenceLiteralValue) and array.array_type in ('tuple', 'list'):
|
||||
values = []
|
||||
for lazy_value in array.py__iter__():
|
||||
objects = ValueSet.from_sets(
|
||||
@@ -262,33 +262,29 @@ def _execute_array_values(inference_state, array):
|
||||
for typ in lazy_value.infer()
|
||||
)
|
||||
values.append(LazyKnownValues(objects))
|
||||
return {FakeSequence(inference_state, array.array_type, values)}
|
||||
cls = FakeTuple if array.array_type == 'tuple' else FakeList
|
||||
return {cls(inference_state, values)}
|
||||
else:
|
||||
return array.execute_annotation()
|
||||
|
||||
|
||||
@inference_state_method_cache()
|
||||
def infer_param(execution_context, param):
|
||||
from jedi.inference.value.instance import InstanceArguments
|
||||
from jedi.inference.value import FunctionExecutionContext
|
||||
|
||||
def infer_param(function_value, param):
|
||||
def infer_docstring(docstring):
|
||||
return ValueSet(
|
||||
p
|
||||
for param_str in _search_param_in_docstr(docstring, param.name.value)
|
||||
for p in _infer_for_statement_string(module_context, param_str)
|
||||
)
|
||||
module_context = execution_context.get_root_context()
|
||||
module_context = function_value.get_root_context()
|
||||
func = param.get_parent_function()
|
||||
if func.type == 'lambdef':
|
||||
return NO_VALUES
|
||||
|
||||
types = infer_docstring(execution_context.py__doc__())
|
||||
if isinstance(execution_context, FunctionExecutionContext) \
|
||||
and isinstance(execution_context.var_args, InstanceArguments) \
|
||||
and execution_context.function_value.py__name__() == '__init__':
|
||||
class_value = execution_context.var_args.instance.class_value
|
||||
types |= infer_docstring(class_value.py__doc__())
|
||||
types = infer_docstring(function_value.py__doc__())
|
||||
if function_value.is_bound_method() \
|
||||
and function_value.py__name__() == '__init__':
|
||||
types |= infer_docstring(function_value.class_context.py__doc__())
|
||||
|
||||
debug.dbg('Found param types for docstring: %s', types, color='BLUE')
|
||||
return types
|
||||
|
||||
@@ -19,44 +19,42 @@ It works as follows:
|
||||
|
||||
from jedi import settings
|
||||
from jedi import debug
|
||||
from jedi.inference.cache import inference_state_function_cache
|
||||
from jedi.parser_utils import get_parent_scope
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference import imports
|
||||
from jedi.inference.arguments import TreeArguments
|
||||
from jedi.inference.param import create_default_params
|
||||
from jedi.inference.param import get_executed_param_names
|
||||
from jedi.inference.helpers import is_stdlib_path
|
||||
from jedi.inference.utils import to_list
|
||||
from jedi.parser_utils import get_parent_scope
|
||||
from jedi.inference.value import instance
|
||||
from jedi.inference.base_value import ValueSet, NO_VALUES
|
||||
from jedi.inference import recursion
|
||||
from jedi.inference.names import ParamNameWrapper
|
||||
|
||||
|
||||
MAX_PARAM_SEARCHES = 20
|
||||
|
||||
|
||||
class DynamicExecutedParamName(ParamNameWrapper):
|
||||
"""
|
||||
Simulates being a parameter while actually just being multiple params.
|
||||
"""
|
||||
|
||||
def __init__(self, executed_param_names):
|
||||
super(DynamicExecutedParamName, self).__init__(executed_param_names[0])
|
||||
self._executed_param_names = executed_param_names
|
||||
|
||||
def infer(self):
|
||||
inf = self.parent_context.inference_state
|
||||
with recursion.execution_allowed(inf, self) as allowed:
|
||||
def _avoid_recursions(func):
|
||||
def wrapper(function_value, param_index):
|
||||
inf = function_value.inference_state
|
||||
with recursion.execution_allowed(inf, function_value.tree_node) as allowed:
|
||||
# We need to catch recursions that may occur, because an
|
||||
# anonymous functions can create an anonymous parameter that is
|
||||
# more or less self referencing.
|
||||
if allowed:
|
||||
return ValueSet.from_sets(p.infer() for p in self._executed_param_names)
|
||||
inf.dynamic_params_depth += 1
|
||||
try:
|
||||
return func(function_value, param_index)
|
||||
finally:
|
||||
inf.dynamic_params_depth -= 1
|
||||
return NO_VALUES
|
||||
return
|
||||
return wrapper
|
||||
|
||||
|
||||
@debug.increase_indent
|
||||
def search_param_names(inference_state, execution_context, funcdef):
|
||||
@_avoid_recursions
|
||||
def dynamic_param_lookup(function_value, param_index):
|
||||
"""
|
||||
A dynamic search for param values. If you try to complete a type:
|
||||
|
||||
@@ -69,54 +67,42 @@ def search_param_names(inference_state, execution_context, funcdef):
|
||||
have to look for all calls to ``func`` to find out what ``foo`` possibly
|
||||
is.
|
||||
"""
|
||||
if not settings.dynamic_params:
|
||||
return create_default_params(execution_context, funcdef)
|
||||
funcdef = function_value.tree_node
|
||||
|
||||
inference_state.dynamic_params_depth += 1
|
||||
try:
|
||||
path = execution_context.get_root_context().py__file__()
|
||||
if not settings.dynamic_params:
|
||||
return NO_VALUES
|
||||
|
||||
path = function_value.get_root_context().py__file__()
|
||||
if path is not None and is_stdlib_path(path):
|
||||
# We don't want to search for usages in the stdlib. Usually people
|
||||
# don't work with it (except if you are a core maintainer, sorry).
|
||||
# This makes everything slower. Just disable it and run the tests,
|
||||
# you will see the slowdown, especially in 3.6.
|
||||
return create_default_params(execution_context, funcdef)
|
||||
return NO_VALUES
|
||||
|
||||
if funcdef.type == 'lambdef':
|
||||
string_name = _get_lambda_name(funcdef)
|
||||
if string_name is None:
|
||||
return create_default_params(execution_context, funcdef)
|
||||
return NO_VALUES
|
||||
else:
|
||||
string_name = funcdef.name.value
|
||||
debug.dbg('Dynamic param search in %s.', string_name, color='MAGENTA')
|
||||
|
||||
try:
|
||||
module_context = execution_context.get_root_context()
|
||||
function_executions = _search_function_executions(
|
||||
inference_state,
|
||||
module_context,
|
||||
funcdef,
|
||||
string_name=string_name,
|
||||
module_context = function_value.get_root_context()
|
||||
arguments_list = _search_function_arguments(module_context, funcdef, string_name)
|
||||
values = ValueSet.from_sets(
|
||||
get_executed_param_names(
|
||||
function_value, arguments
|
||||
)[param_index].infer()
|
||||
for arguments in arguments_list
|
||||
)
|
||||
if function_executions:
|
||||
zipped_param_names = zip(*list(
|
||||
function_execution.get_executed_param_names_and_issues()[0]
|
||||
for function_execution in function_executions
|
||||
))
|
||||
params = [DynamicExecutedParamName(executed_param_names)
|
||||
for executed_param_names in zipped_param_names]
|
||||
else:
|
||||
return create_default_params(execution_context, funcdef)
|
||||
finally:
|
||||
debug.dbg('Dynamic param result finished', color='MAGENTA')
|
||||
return params
|
||||
finally:
|
||||
inference_state.dynamic_params_depth -= 1
|
||||
return values
|
||||
|
||||
|
||||
@inference_state_function_cache(default=None)
|
||||
@inference_state_method_cache(default=None)
|
||||
@to_list
|
||||
def _search_function_executions(inference_state, module_context, funcdef, string_name):
|
||||
def _search_function_arguments(module_context, funcdef, string_name):
|
||||
"""
|
||||
Returns a list of param names.
|
||||
"""
|
||||
@@ -127,8 +113,9 @@ def _search_function_executions(inference_state, module_context, funcdef, string
|
||||
string_name = cls.name.value
|
||||
compare_node = cls
|
||||
|
||||
found_executions = False
|
||||
found_arguments = False
|
||||
i = 0
|
||||
inference_state = module_context.inference_state
|
||||
for for_mod_context in imports.get_module_contexts_containing_name(
|
||||
inference_state, [module_context], string_name):
|
||||
for name, trailer in _get_potential_nodes(for_mod_context, string_name):
|
||||
@@ -141,14 +128,14 @@ def _search_function_executions(inference_state, module_context, funcdef, string
|
||||
return
|
||||
|
||||
random_context = for_mod_context.create_context(name)
|
||||
for function_execution in _check_name_for_execution(
|
||||
for arguments in _check_name_for_execution(
|
||||
inference_state, random_context, compare_node, name, trailer):
|
||||
found_executions = True
|
||||
yield function_execution
|
||||
found_arguments = True
|
||||
yield arguments
|
||||
|
||||
# If there are results after processing a module, we're probably
|
||||
# good to process. This is a speed optimization.
|
||||
if found_executions:
|
||||
if found_arguments:
|
||||
return
|
||||
|
||||
|
||||
@@ -178,13 +165,14 @@ def _get_potential_nodes(module_value, func_string_name):
|
||||
|
||||
|
||||
def _check_name_for_execution(inference_state, context, compare_node, name, trailer):
|
||||
from jedi.inference.value.function import FunctionExecutionContext
|
||||
from jedi.inference.value.function import BaseFunctionExecutionContext
|
||||
|
||||
def create_func_excs(value):
|
||||
def create_args(value):
|
||||
arglist = trailer.children[1]
|
||||
if arglist == ')':
|
||||
arglist = None
|
||||
args = TreeArguments(inference_state, context, arglist, trailer)
|
||||
from jedi.inference.value.instance import InstanceArguments
|
||||
if value.tree_node.type == 'classdef':
|
||||
created_instance = instance.TreeInstance(
|
||||
inference_state,
|
||||
@@ -192,30 +180,29 @@ def _check_name_for_execution(inference_state, context, compare_node, name, trai
|
||||
value,
|
||||
args
|
||||
)
|
||||
for execution in created_instance.create_init_executions():
|
||||
yield execution
|
||||
return InstanceArguments(created_instance, args)
|
||||
else:
|
||||
yield value.as_context(args)
|
||||
if value.is_bound_method():
|
||||
args = InstanceArguments(value.instance, args)
|
||||
return args
|
||||
|
||||
for value in inference_state.goto_definitions(context, name):
|
||||
value_node = value.tree_node
|
||||
if compare_node == value_node:
|
||||
for func_execution in create_func_excs(value):
|
||||
yield func_execution
|
||||
elif isinstance(value.parent_context, FunctionExecutionContext) and \
|
||||
compare_node.type == 'funcdef':
|
||||
yield create_args(value)
|
||||
elif isinstance(value.parent_context, BaseFunctionExecutionContext) \
|
||||
and compare_node.type == 'funcdef':
|
||||
# Here we're trying to find decorators by checking the first
|
||||
# parameter. It's not very generic though. Should find a better
|
||||
# solution that also applies to nested decorators.
|
||||
param_names, _ = value.parent_context.get_executed_param_names_and_issues()
|
||||
param_names = value.parent_context.get_param_names()
|
||||
if len(param_names) != 1:
|
||||
continue
|
||||
values = param_names[0].infer()
|
||||
nodes = [v.tree_node for v in values]
|
||||
if nodes == [compare_node]:
|
||||
if [v.tree_node for v in values] == [compare_node]:
|
||||
# Found a decorator.
|
||||
module_context = context.get_root_context()
|
||||
execution_context = next(create_func_excs(value))
|
||||
execution_context = value.as_context(create_args(value))
|
||||
potential_nodes = _get_potential_nodes(module_context, param_names[0].string_name)
|
||||
for name, trailer in potential_nodes:
|
||||
if value_node.start_pos < name.start_pos < value_node.end_pos:
|
||||
@@ -227,5 +214,5 @@ def _check_name_for_execution(inference_state, context, compare_node, name, trai
|
||||
name,
|
||||
trailer
|
||||
)
|
||||
for function_execution in iterator:
|
||||
yield function_execution
|
||||
for arguments in iterator:
|
||||
yield arguments
|
||||
@@ -13,7 +13,8 @@ from jedi.inference.base_value import ValueSet, Value, ValueWrapper, \
|
||||
LazyValueWrapper
|
||||
from jedi.parser_utils import get_cached_parent_scope
|
||||
from jedi.inference.utils import to_list
|
||||
from jedi.inference.names import TreeNameDefinition, ParamName, AbstractNameDefinition
|
||||
from jedi.inference.names import TreeNameDefinition, ParamName, \
|
||||
AnonymousParamName, AbstractNameDefinition
|
||||
|
||||
_definition_name_cache = weakref.WeakKeyDictionary()
|
||||
|
||||
@@ -61,7 +62,9 @@ def _get_definition_names(used_names, name_key):
|
||||
return for_module[name_key]
|
||||
except KeyError:
|
||||
names = used_names.get(name_key, ())
|
||||
result = for_module[name_key] = tuple(name for name in names if name.is_definition())
|
||||
result = for_module[name_key] = tuple(
|
||||
name for name in names if name.is_definition(include_setitem=True)
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
@@ -140,28 +143,45 @@ class ParserTreeFilter(AbstractUsedNamesFilter):
|
||||
break
|
||||
|
||||
|
||||
class FunctionExecutionFilter(ParserTreeFilter):
|
||||
param_name = ParamName
|
||||
|
||||
def __init__(self, parent_context, node_context=None,
|
||||
until_position=None, origin_scope=None):
|
||||
super(FunctionExecutionFilter, self).__init__(
|
||||
class _FunctionExecutionFilter(ParserTreeFilter):
|
||||
def __init__(self, parent_context, function_value, until_position, origin_scope):
|
||||
super(_FunctionExecutionFilter, self).__init__(
|
||||
parent_context,
|
||||
node_context,
|
||||
until_position,
|
||||
origin_scope
|
||||
until_position=until_position,
|
||||
origin_scope=origin_scope,
|
||||
)
|
||||
self._function_value = function_value
|
||||
|
||||
def _convert_param(self, param, name):
|
||||
raise NotImplementedError
|
||||
|
||||
@to_list
|
||||
def _convert_names(self, names):
|
||||
for name in names:
|
||||
param = search_ancestor(name, 'param')
|
||||
# Here we don't need to check if the param is a default/annotation,
|
||||
# because those are not definitions and never make it to this
|
||||
# point.
|
||||
if param:
|
||||
yield self.param_name(self.parent_context, name)
|
||||
yield self._convert_param(param, name)
|
||||
else:
|
||||
yield TreeNameDefinition(self.parent_context, name)
|
||||
|
||||
|
||||
class FunctionExecutionFilter(_FunctionExecutionFilter):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._arguments = kwargs.pop('arguments') # Python 2
|
||||
super(FunctionExecutionFilter, self).__init__(*args, **kwargs)
|
||||
|
||||
def _convert_param(self, param, name):
|
||||
return ParamName(self._function_value, name, self._arguments)
|
||||
|
||||
|
||||
class AnonymousFunctionExecutionFilter(_FunctionExecutionFilter):
|
||||
def _convert_param(self, param, name):
|
||||
return AnonymousParamName(self._function_value, name)
|
||||
|
||||
|
||||
class GlobalNameFilter(AbstractUsedNamesFilter):
|
||||
def get(self, name):
|
||||
try:
|
||||
|
||||
@@ -17,6 +17,7 @@ from jedi.inference.gradual.typing import TypeVar, LazyGenericClass, \
|
||||
from jedi.inference.gradual.typing import GenericClass
|
||||
from jedi.inference.helpers import is_string
|
||||
from jedi.inference.compiled import builtin_from_name
|
||||
from jedi.inference.param import get_executed_param_names
|
||||
from jedi import debug
|
||||
from jedi import parser_utils
|
||||
|
||||
@@ -107,11 +108,11 @@ def _split_comment_param_declaration(decl_text):
|
||||
|
||||
|
||||
@inference_state_method_cache()
|
||||
def infer_param(execution_context, param, ignore_stars=False):
|
||||
values = _infer_param(execution_context, param)
|
||||
def infer_param(function_value, param, ignore_stars=False):
|
||||
values = _infer_param(function_value, param)
|
||||
if ignore_stars:
|
||||
return values
|
||||
inference_state = execution_context.inference_state
|
||||
inference_state = function_value.inference_state
|
||||
if param.star_count == 1:
|
||||
tuple_ = builtin_from_name(inference_state, 'tuple')
|
||||
return ValueSet([GenericClass(
|
||||
@@ -128,7 +129,7 @@ def infer_param(execution_context, param, ignore_stars=False):
|
||||
return values
|
||||
|
||||
|
||||
def _infer_param(execution_context, param):
|
||||
def _infer_param(function_value, param):
|
||||
"""
|
||||
Infers the type of a function parameter, using type annotations.
|
||||
"""
|
||||
@@ -161,7 +162,7 @@ def _infer_param(execution_context, param):
|
||||
params_comments, all_params
|
||||
)
|
||||
from jedi.inference.value.instance import InstanceArguments
|
||||
if isinstance(execution_context.var_args, InstanceArguments):
|
||||
if function_value.is_bound_method():
|
||||
if index == 0:
|
||||
# Assume it's self, which is already handled
|
||||
return NO_VALUES
|
||||
@@ -171,11 +172,11 @@ def _infer_param(execution_context, param):
|
||||
|
||||
param_comment = params_comments[index]
|
||||
return _infer_annotation_string(
|
||||
execution_context.function_value.get_default_param_context(),
|
||||
function_value.get_default_param_context(),
|
||||
param_comment
|
||||
)
|
||||
# Annotations are like default params and resolve in the same way.
|
||||
context = execution_context.function_value.get_default_param_context()
|
||||
context = function_value.get_default_param_context()
|
||||
return infer_annotation(context, annotation)
|
||||
|
||||
|
||||
@@ -193,16 +194,16 @@ def py__annotations__(funcdef):
|
||||
|
||||
|
||||
@inference_state_method_cache()
|
||||
def infer_return_types(function_execution_context):
|
||||
def infer_return_types(function, arguments):
|
||||
"""
|
||||
Infers the type of a function's return value,
|
||||
according to type annotations.
|
||||
"""
|
||||
all_annotations = py__annotations__(function_execution_context.tree_node)
|
||||
all_annotations = py__annotations__(function.tree_node)
|
||||
annotation = all_annotations.get("return", None)
|
||||
if annotation is None:
|
||||
# If there is no Python 3-type annotation, look for a Python 2-type annotation
|
||||
node = function_execution_context.tree_node
|
||||
node = function.tree_node
|
||||
comment = parser_utils.get_following_comment_same_line(node)
|
||||
if comment is None:
|
||||
return NO_VALUES
|
||||
@@ -212,19 +213,19 @@ def infer_return_types(function_execution_context):
|
||||
return NO_VALUES
|
||||
|
||||
return _infer_annotation_string(
|
||||
function_execution_context.function_value.get_default_param_context(),
|
||||
function.get_default_param_context(),
|
||||
match.group(1).strip()
|
||||
).execute_annotation()
|
||||
if annotation is None:
|
||||
return NO_VALUES
|
||||
|
||||
context = function_execution_context.function_value.get_default_param_context()
|
||||
unknown_type_vars = list(find_unknown_type_vars(context, annotation))
|
||||
context = function.get_default_param_context()
|
||||
unknown_type_vars = find_unknown_type_vars(context, annotation)
|
||||
annotation_values = infer_annotation(context, annotation)
|
||||
if not unknown_type_vars:
|
||||
return annotation_values.execute_annotation()
|
||||
|
||||
type_var_dict = infer_type_vars_for_execution(function_execution_context, all_annotations)
|
||||
type_var_dict = infer_type_vars_for_execution(function, arguments, all_annotations)
|
||||
|
||||
return ValueSet.from_sets(
|
||||
ann.define_generics(type_var_dict)
|
||||
@@ -233,7 +234,7 @@ def infer_return_types(function_execution_context):
|
||||
).execute_annotation()
|
||||
|
||||
|
||||
def infer_type_vars_for_execution(execution_context, annotation_dict):
|
||||
def infer_type_vars_for_execution(function, arguments, annotation_dict):
|
||||
"""
|
||||
Some functions use type vars that are not defined by the class, but rather
|
||||
only defined in the function. See for example `iter`. In those cases we
|
||||
@@ -243,10 +244,10 @@ def infer_type_vars_for_execution(execution_context, annotation_dict):
|
||||
2. Infer type vars with the execution state we have.
|
||||
3. Return the union of all type vars that have been found.
|
||||
"""
|
||||
context = execution_context.function_value.get_default_param_context()
|
||||
context = function.get_default_param_context()
|
||||
|
||||
annotation_variable_results = {}
|
||||
executed_param_names, _ = execution_context.get_executed_param_names_and_issues()
|
||||
executed_param_names = get_executed_param_names(function, arguments)
|
||||
for executed_param_name in executed_param_names:
|
||||
try:
|
||||
annotation_node = annotation_dict[executed_param_name.string_name]
|
||||
@@ -275,6 +276,7 @@ def infer_type_vars_for_execution(execution_context, annotation_dict):
|
||||
|
||||
def _merge_type_var_dicts(base_dict, new_dict):
|
||||
for type_var_name, values in new_dict.items():
|
||||
if values:
|
||||
try:
|
||||
base_dict[type_var_name] |= values
|
||||
except KeyError:
|
||||
|
||||
@@ -143,7 +143,7 @@ class _WithIndexBase(_BaseTypingValue):
|
||||
def __init__(self, inference_state, parent_context, name, index_value, value_of_index):
|
||||
super(_WithIndexBase, self).__init__(inference_state, parent_context, name)
|
||||
self._index_value = index_value
|
||||
self._value_of_index = value_of_index
|
||||
self._context_of_index = value_of_index
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s[%s]>' % (
|
||||
@@ -179,12 +179,12 @@ class TypingValueWithIndex(_WithIndexBase):
|
||||
self.parent_context,
|
||||
self._tree_name,
|
||||
self._index_value,
|
||||
self._value_of_index
|
||||
self._context_of_index
|
||||
)])
|
||||
|
||||
def gather_annotation_classes(self):
|
||||
return ValueSet.from_sets(
|
||||
_iter_over_arguments(self._index_value, self._value_of_index)
|
||||
_iter_over_arguments(self._index_value, self._context_of_index)
|
||||
)
|
||||
|
||||
|
||||
@@ -285,7 +285,7 @@ class TypeAlias(LazyValueWrapper):
|
||||
|
||||
class _ContainerBase(_WithIndexBase):
|
||||
def _get_getitem_values(self, index):
|
||||
args = _iter_over_arguments(self._index_value, self._value_of_index)
|
||||
args = _iter_over_arguments(self._index_value, self._context_of_index)
|
||||
for i, values in enumerate(args):
|
||||
if i == index:
|
||||
return values
|
||||
@@ -333,7 +333,7 @@ class Tuple(_ContainerBase):
|
||||
return self._get_getitem_values(0).execute_annotation()
|
||||
|
||||
return ValueSet.from_sets(
|
||||
_iter_over_arguments(self._index_value, self._value_of_index)
|
||||
_iter_over_arguments(self._index_value, self._context_of_index)
|
||||
).execute_annotation()
|
||||
|
||||
|
||||
@@ -649,11 +649,11 @@ class LazyGenericClass(AbstractAnnotatedClass):
|
||||
def __init__(self, class_value, index_value, value_of_index):
|
||||
super(LazyGenericClass, self).__init__(class_value)
|
||||
self._index_value = index_value
|
||||
self._value_of_index = value_of_index
|
||||
self._context_of_index = value_of_index
|
||||
|
||||
@inference_state_method_cache()
|
||||
def get_generics(self):
|
||||
return list(_iter_over_arguments(self._index_value, self._value_of_index))
|
||||
return list(_iter_over_arguments(self._index_value, self._context_of_index))
|
||||
|
||||
|
||||
class GenericClass(AbstractAnnotatedClass):
|
||||
|
||||
@@ -61,8 +61,7 @@ def infer_import(context, tree_name):
|
||||
module_context = context.get_root_context()
|
||||
from_import_name, import_path, level, values = \
|
||||
_prepare_infer_import(module_context, tree_name)
|
||||
if not values:
|
||||
return NO_VALUES
|
||||
if values:
|
||||
|
||||
if from_import_name is not None:
|
||||
values = values.py__getattribute__(
|
||||
@@ -578,3 +577,33 @@ def get_module_contexts_containing_name(inference_state, module_contexts, name):
|
||||
m = check_fs(file_io, base_names)
|
||||
if m is not None:
|
||||
yield m
|
||||
|
||||
|
||||
def follow_error_node_imports_if_possible(context, name):
|
||||
error_node = tree.search_ancestor(name, 'error_node')
|
||||
if error_node is not None:
|
||||
# Get the first command start of a started simple_stmt. The error
|
||||
# node is sometimes a small_stmt and sometimes a simple_stmt. Check
|
||||
# for ; leaves that start a new statements.
|
||||
start_index = 0
|
||||
for index, n in enumerate(error_node.children):
|
||||
if n.start_pos > name.start_pos:
|
||||
break
|
||||
if n == ';':
|
||||
start_index = index + 1
|
||||
nodes = error_node.children[start_index:]
|
||||
first_name = nodes[0].get_first_leaf().value
|
||||
|
||||
# Make it possible to infer stuff like `import foo.` or
|
||||
# `from foo.bar`.
|
||||
if first_name in ('from', 'import'):
|
||||
is_import_from = first_name == 'from'
|
||||
level, names = helpers.parse_dotted_names(
|
||||
nodes,
|
||||
is_import_from=is_import_from,
|
||||
until_node=name,
|
||||
)
|
||||
return Importer(
|
||||
context.inference_state, names, context.get_root_context(), level).follow()
|
||||
return None
|
||||
|
||||
|
||||
@@ -3,9 +3,11 @@ from abc import abstractmethod
|
||||
from parso.tree import search_ancestor
|
||||
|
||||
from jedi._compatibility import Parameter
|
||||
from jedi.inference.utils import unite
|
||||
from jedi.inference.base_value import ValueSet, NO_VALUES
|
||||
from jedi.inference import docstrings
|
||||
from jedi.cache import memoize_method
|
||||
from jedi.inference.helpers import deep_ast_copy, infer_call_of_leaf
|
||||
|
||||
|
||||
class AbstractNameDefinition(object):
|
||||
@@ -106,11 +108,78 @@ class AbstractTreeName(AbstractNameDefinition):
|
||||
return None
|
||||
return parent_names + (self.tree_name.value,)
|
||||
|
||||
def goto(self, **kwargs):
|
||||
return self.parent_context.inference_state.goto(
|
||||
self.parent_context, self.tree_name, **kwargs
|
||||
def goto(self):
|
||||
context = self.parent_context
|
||||
name = self.tree_name
|
||||
definition = name.get_definition(import_name_always=True)
|
||||
if definition is not None:
|
||||
type_ = definition.type
|
||||
if type_ == 'expr_stmt':
|
||||
# Only take the parent, because if it's more complicated than just
|
||||
# a name it's something you can "goto" again.
|
||||
is_simple_name = name.parent.type not in ('power', 'trailer')
|
||||
if is_simple_name:
|
||||
return [self]
|
||||
elif type_ in ('import_from', 'import_name'):
|
||||
from jedi.inference.imports import goto_import
|
||||
module_names = goto_import(context, name)
|
||||
return module_names
|
||||
else:
|
||||
return [self]
|
||||
else:
|
||||
from jedi.inference.imports import follow_error_node_imports_if_possible
|
||||
values = follow_error_node_imports_if_possible(context, name)
|
||||
if values is not None:
|
||||
return [value.name for value in values]
|
||||
|
||||
par = name.parent
|
||||
node_type = par.type
|
||||
if node_type == 'argument' and par.children[1] == '=' and par.children[0] == name:
|
||||
# Named param goto.
|
||||
trailer = par.parent
|
||||
if trailer.type == 'arglist':
|
||||
trailer = trailer.parent
|
||||
if trailer.type != 'classdef':
|
||||
if trailer.type == 'decorator':
|
||||
value_set = context.infer_node(trailer.children[1])
|
||||
else:
|
||||
i = trailer.parent.children.index(trailer)
|
||||
to_infer = trailer.parent.children[:i]
|
||||
if to_infer[0] == 'await':
|
||||
to_infer.pop(0)
|
||||
value_set = context.infer_node(to_infer[0])
|
||||
from jedi.inference.syntax_tree import infer_trailer
|
||||
for trailer in to_infer[1:]:
|
||||
value_set = infer_trailer(context, value_set, trailer)
|
||||
param_names = []
|
||||
for value in value_set:
|
||||
for signature in value.get_signatures():
|
||||
for param_name in signature.get_param_names():
|
||||
if param_name.string_name == name.value:
|
||||
param_names.append(param_name)
|
||||
return param_names
|
||||
elif node_type == 'dotted_name': # Is a decorator.
|
||||
index = par.children.index(name)
|
||||
if index > 0:
|
||||
new_dotted = deep_ast_copy(par)
|
||||
new_dotted.children[index - 1:] = []
|
||||
values = context.infer_node(new_dotted)
|
||||
return unite(
|
||||
value.goto(name, name_context=value.as_context())
|
||||
for value in values
|
||||
)
|
||||
|
||||
if node_type == 'trailer' and par.children[0] == '.':
|
||||
values = infer_call_of_leaf(context, name, cut_own_trailer=True)
|
||||
return values.goto(name, name_context=context)
|
||||
else:
|
||||
stmt = search_ancestor(
|
||||
name, 'expr_stmt', 'lambdef'
|
||||
) or name
|
||||
if stmt.type == 'lambdef':
|
||||
stmt = name
|
||||
return context.goto(name, position=stmt.start_pos)
|
||||
|
||||
def is_import(self):
|
||||
imp = search_ancestor(self.tree_name, 'import_from', 'import_name')
|
||||
return imp is not None
|
||||
@@ -175,6 +244,47 @@ class TreeNameDefinition(AbstractTreeName):
|
||||
return 'statement'
|
||||
return self._API_TYPES.get(definition.type, 'statement')
|
||||
|
||||
def assignment_indexes(self):
|
||||
"""
|
||||
Returns an array of tuple(int, node) of the indexes that are used in
|
||||
tuple assignments.
|
||||
|
||||
For example if the name is ``y`` in the following code::
|
||||
|
||||
x, (y, z) = 2, ''
|
||||
|
||||
would result in ``[(1, xyz_node), (0, yz_node)]``.
|
||||
|
||||
When searching for b in the case ``a, *b, c = [...]`` it will return::
|
||||
|
||||
[(slice(1, -1), abc_node)]
|
||||
"""
|
||||
indexes = []
|
||||
is_star_expr = False
|
||||
node = self.tree_name.parent
|
||||
compare = self.tree_name
|
||||
while node is not None:
|
||||
if node.type in ('testlist', 'testlist_comp', 'testlist_star_expr', 'exprlist'):
|
||||
for i, child in enumerate(node.children):
|
||||
if child == compare:
|
||||
index = int(i / 2)
|
||||
if is_star_expr:
|
||||
from_end = int((len(node.children) - i) / 2)
|
||||
index = slice(index, -from_end)
|
||||
indexes.insert(0, (index, node))
|
||||
break
|
||||
else:
|
||||
raise LookupError("Couldn't find the assignment.")
|
||||
is_star_expr = False
|
||||
elif node.type == 'star_expr':
|
||||
is_star_expr = True
|
||||
elif node.type in ('expr_stmt', 'sync_comp_for'):
|
||||
break
|
||||
|
||||
compare = node
|
||||
node = node.parent
|
||||
return indexes
|
||||
|
||||
|
||||
class _ParamMixin(object):
|
||||
def maybe_positional_argument(self, include_star=True):
|
||||
@@ -242,8 +352,24 @@ class BaseTreeParamName(ParamNameInterface, AbstractTreeName):
|
||||
output += '=' + default.get_code(include_prefix=False)
|
||||
return output
|
||||
|
||||
def get_public_name(self):
|
||||
name = self.string_name
|
||||
if name.startswith('__'):
|
||||
# Params starting with __ are an equivalent to positional only
|
||||
# variables in typeshed.
|
||||
name = name[2:]
|
||||
return name
|
||||
|
||||
def goto(self, **kwargs):
|
||||
return [self]
|
||||
|
||||
|
||||
class _ActualTreeParamName(BaseTreeParamName):
|
||||
def __init__(self, function_value, tree_name):
|
||||
super(_ActualTreeParamName, self).__init__(
|
||||
function_value.get_default_param_context(), tree_name)
|
||||
self.function_value = function_value
|
||||
|
||||
class ParamName(BaseTreeParamName):
|
||||
def _get_param_node(self):
|
||||
return search_ancestor(self.tree_name, 'param')
|
||||
|
||||
@@ -254,7 +380,7 @@ class ParamName(BaseTreeParamName):
|
||||
def infer_annotation(self, execute_annotation=True, ignore_stars=False):
|
||||
from jedi.inference.gradual.annotation import infer_param
|
||||
values = infer_param(
|
||||
self.parent_context, self._get_param_node(),
|
||||
self.function_value, self._get_param_node(),
|
||||
ignore_stars=ignore_stars)
|
||||
if execute_annotation:
|
||||
values = values.execute_annotation()
|
||||
@@ -264,20 +390,12 @@ class ParamName(BaseTreeParamName):
|
||||
node = self.default_node
|
||||
if node is None:
|
||||
return NO_VALUES
|
||||
return self.parent_context.parent_context.infer_node(node)
|
||||
return self.parent_context.infer_node(node)
|
||||
|
||||
@property
|
||||
def default_node(self):
|
||||
return self._get_param_node().default
|
||||
|
||||
def get_public_name(self):
|
||||
name = self.string_name
|
||||
if name.startswith('__'):
|
||||
# Params starting with __ are an equivalent to positional only
|
||||
# variables in typeshed.
|
||||
name = name[2:]
|
||||
return name
|
||||
|
||||
def get_kind(self):
|
||||
tree_param = self._get_param_node()
|
||||
if tree_param.star_count == 1: # *args
|
||||
@@ -311,14 +429,52 @@ class ParamName(BaseTreeParamName):
|
||||
if values:
|
||||
return values
|
||||
|
||||
doc_params = docstrings.infer_param(self.parent_context, self._get_param_node())
|
||||
if doc_params:
|
||||
doc_params = docstrings.infer_param(self.function_value, self._get_param_node())
|
||||
return doc_params
|
||||
|
||||
|
||||
class AnonymousParamName(_ActualTreeParamName):
|
||||
def __init__(self, function_value, tree_name):
|
||||
super(AnonymousParamName, self).__init__(function_value, tree_name)
|
||||
|
||||
def infer(self):
|
||||
values = super(AnonymousParamName, self).infer()
|
||||
if values:
|
||||
return values
|
||||
from jedi.inference.dynamic_params import dynamic_param_lookup
|
||||
param = self._get_param_node()
|
||||
values = dynamic_param_lookup(self.function_value, param.position_index)
|
||||
if values:
|
||||
return values
|
||||
|
||||
if param.star_count == 1:
|
||||
from jedi.inference.value.iterable import FakeTuple
|
||||
value = FakeTuple(self.function_value.inference_state, [])
|
||||
elif param.star_count == 2:
|
||||
from jedi.inference.value.iterable import FakeDict
|
||||
value = FakeDict(self.function_value.inference_state, {})
|
||||
elif param.default is None:
|
||||
return NO_VALUES
|
||||
else:
|
||||
return self.function_value.parent_context.infer_node(param.default)
|
||||
return ValueSet({value})
|
||||
|
||||
|
||||
class ParamName(_ActualTreeParamName):
|
||||
def __init__(self, function_value, tree_name, arguments):
|
||||
super(ParamName, self).__init__(function_value, tree_name)
|
||||
self.arguments = arguments
|
||||
|
||||
def infer(self):
|
||||
values = super(ParamName, self).infer()
|
||||
if values:
|
||||
return values
|
||||
|
||||
return self.get_executed_param_name().infer()
|
||||
|
||||
def get_executed_param_name(self):
|
||||
params_names, _ = self.parent_context.get_executed_param_names_and_issues()
|
||||
from jedi.inference.param import get_executed_param_names
|
||||
params_names = get_executed_param_names(self.function_value, self.arguments)
|
||||
return params_names[self._get_param_node().position_index]
|
||||
|
||||
|
||||
|
||||
@@ -19,9 +19,9 @@ def _add_argument_issue(error_name, lazy_value, message):
|
||||
|
||||
|
||||
class ExecutedParamName(ParamName):
|
||||
"""Fake a param and give it values."""
|
||||
def __init__(self, execution_context, param_node, lazy_value, is_default=False):
|
||||
super(ExecutedParamName, self).__init__(execution_context, param_node.name)
|
||||
def __init__(self, function_value, arguments, param_node, lazy_value, is_default=False):
|
||||
super(ExecutedParamName, self).__init__(
|
||||
function_value, param_node.name, arguments=arguments)
|
||||
self._lazy_value = lazy_value
|
||||
self._is_default = is_default
|
||||
|
||||
@@ -42,19 +42,15 @@ class ExecutedParamName(ParamName):
|
||||
matches = any(c1.is_sub_class_of(c2)
|
||||
for c1 in argument_values
|
||||
for c2 in annotations.gather_annotation_classes())
|
||||
debug.dbg("signature compare %s: %s <=> %s",
|
||||
debug.dbg("param compare %s: %s <=> %s",
|
||||
matches, argument_values, annotations, color='BLUE')
|
||||
return matches
|
||||
|
||||
@property
|
||||
def var_args(self):
|
||||
return self.parent_context.var_args
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self.string_name)
|
||||
|
||||
|
||||
def get_executed_param_names_and_issues(execution_context, arguments):
|
||||
def get_executed_param_names_and_issues(function_value, arguments):
|
||||
def too_many_args(argument):
|
||||
m = _error_argument_count(funcdef, len(unpacked_va))
|
||||
# Just report an error for the first param that is not needed (like
|
||||
@@ -70,15 +66,16 @@ def get_executed_param_names_and_issues(execution_context, arguments):
|
||||
)
|
||||
else:
|
||||
issues.append(None)
|
||||
debug.warning('non-public warning: %s', m)
|
||||
|
||||
issues = [] # List[Optional[analysis issue]]
|
||||
result_params = []
|
||||
param_dict = {}
|
||||
funcdef = execution_context.tree_node
|
||||
funcdef = function_value.tree_node
|
||||
# Default params are part of the value where the function was defined.
|
||||
# This means that they might have access on class variables that the
|
||||
# function itself doesn't have.
|
||||
default_param_context = execution_context.function_value.get_default_param_context()
|
||||
default_param_context = function_value.get_default_param_context()
|
||||
|
||||
for param in funcdef.get_params():
|
||||
param_dict[param.name.value] = param
|
||||
@@ -114,7 +111,8 @@ def get_executed_param_names_and_issues(execution_context, arguments):
|
||||
contextualized_node.node, message=m)
|
||||
)
|
||||
else:
|
||||
keys_used[key] = ExecutedParamName(execution_context, key_param, argument)
|
||||
keys_used[key] = ExecutedParamName(
|
||||
function_value, arguments, key_param, argument)
|
||||
key, argument = next(var_arg_iterator, (None, None))
|
||||
|
||||
try:
|
||||
@@ -134,13 +132,13 @@ def get_executed_param_names_and_issues(execution_context, arguments):
|
||||
var_arg_iterator.push_back((key, argument))
|
||||
break
|
||||
lazy_value_list.append(argument)
|
||||
seq = iterable.FakeSequence(execution_context.inference_state, u'tuple', lazy_value_list)
|
||||
seq = iterable.FakeTuple(function_value.inference_state, lazy_value_list)
|
||||
result_arg = LazyKnownValue(seq)
|
||||
elif param.star_count == 2:
|
||||
if argument is not None:
|
||||
too_many_args(argument)
|
||||
# **kwargs param
|
||||
dct = iterable.FakeDict(execution_context.inference_state, dict(non_matching_keys))
|
||||
dct = iterable.FakeDict(function_value.inference_state, dict(non_matching_keys))
|
||||
result_arg = LazyKnownValue(dct)
|
||||
non_matching_keys = {}
|
||||
else:
|
||||
@@ -167,8 +165,7 @@ def get_executed_param_names_and_issues(execution_context, arguments):
|
||||
result_arg = argument
|
||||
|
||||
result_params.append(ExecutedParamName(
|
||||
execution_context, param, result_arg,
|
||||
is_default=is_default
|
||||
function_value, arguments, param, result_arg, is_default=is_default
|
||||
))
|
||||
if not isinstance(result_arg, LazyUnknownValue):
|
||||
keys_used[param.name.value] = result_params[-1]
|
||||
@@ -209,6 +206,10 @@ def get_executed_param_names_and_issues(execution_context, arguments):
|
||||
return result_params, issues
|
||||
|
||||
|
||||
def get_executed_param_names(function_value, arguments):
|
||||
return get_executed_param_names_and_issues(function_value, arguments)[0]
|
||||
|
||||
|
||||
def _error_argument_count(funcdef, actual_count):
|
||||
params = funcdef.get_params()
|
||||
default_arguments = sum(1 for p in params if p.default or p.star_count)
|
||||
@@ -219,24 +220,3 @@ def _error_argument_count(funcdef, actual_count):
|
||||
before = 'from %s to ' % (len(params) - default_arguments)
|
||||
return ('TypeError: %s() takes %s%s arguments (%s given).'
|
||||
% (funcdef.name, before, len(params), actual_count))
|
||||
|
||||
|
||||
def _create_default_param(execution_context, param):
|
||||
if param.star_count == 1:
|
||||
result_arg = LazyKnownValue(
|
||||
iterable.FakeSequence(execution_context.inference_state, u'tuple', [])
|
||||
)
|
||||
elif param.star_count == 2:
|
||||
result_arg = LazyKnownValue(
|
||||
iterable.FakeDict(execution_context.inference_state, {})
|
||||
)
|
||||
elif param.default is None:
|
||||
result_arg = LazyUnknownValue()
|
||||
else:
|
||||
result_arg = LazyTreeValue(execution_context.parent_context, param.default)
|
||||
return ExecutedParamName(execution_context, param, result_arg)
|
||||
|
||||
|
||||
def create_default_params(execution_context, funcdef):
|
||||
return [_create_default_param(execution_context, p)
|
||||
for p in funcdef.get_params()]
|
||||
|
||||
@@ -133,8 +133,7 @@ class ExecutionRecursionDetector(object):
|
||||
self._execution_count += 1
|
||||
|
||||
if self._funcdef_execution_counts.setdefault(funcdef, 0) >= per_function_execution_limit:
|
||||
# TODO why check for builtins here again?
|
||||
if module_context.py__name__() in ('builtins', 'typing'):
|
||||
if module_context.py__name__() == 'typing':
|
||||
return False
|
||||
debug.warning(
|
||||
'Per function execution limit (%s) reached: %s',
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from jedi._compatibility import Parameter
|
||||
from jedi.cache import memoize_method
|
||||
from jedi import debug
|
||||
from jedi import parser_utils
|
||||
|
||||
|
||||
class _SignatureMixin(object):
|
||||
@@ -55,6 +57,8 @@ class AbstractSignature(_SignatureMixin):
|
||||
raise NotImplementedError
|
||||
|
||||
def __repr__(self):
|
||||
if self.value is self._function_value:
|
||||
return '<%s: %s>' % (self.__class__.__name__, self.value)
|
||||
return '<%s: %s, %s>' % (self.__class__.__name__, self.value, self._function_value)
|
||||
|
||||
|
||||
@@ -89,6 +93,26 @@ class TreeSignature(AbstractSignature):
|
||||
params = process_params(params)
|
||||
return params
|
||||
|
||||
def matches_signature(self, arguments):
|
||||
from jedi.inference.param import get_executed_param_names_and_issues
|
||||
executed_param_names, issues = \
|
||||
get_executed_param_names_and_issues(self._function_value, arguments)
|
||||
if issues:
|
||||
return False
|
||||
|
||||
matches = all(executed_param_name.matches_signature()
|
||||
for executed_param_name in executed_param_names)
|
||||
if debug.enable_notice:
|
||||
tree_node = self._function_value.tree_node
|
||||
signature = parser_utils.get_call_signature(tree_node)
|
||||
if matches:
|
||||
debug.dbg("Overloading match: %s@%s (%s)",
|
||||
signature, tree_node.start_pos[0], arguments, color='BLUE')
|
||||
else:
|
||||
debug.dbg("Overloading no match: %s@%s (%s)",
|
||||
signature, tree_node.start_pos[0], arguments, color='BLUE')
|
||||
return matches
|
||||
|
||||
|
||||
class BuiltinSignature(AbstractSignature):
|
||||
def __init__(self, value, return_string, is_bound=False):
|
||||
|
||||
@@ -47,8 +47,6 @@ def _iter_nodes_for_param(param_name):
|
||||
)
|
||||
for c in values:
|
||||
yield c, args
|
||||
else:
|
||||
assert False
|
||||
|
||||
|
||||
def _goes_to_param_name(param_name, context, potential_name):
|
||||
|
||||
@@ -9,7 +9,7 @@ from jedi._compatibility import force_unicode, unicode
|
||||
from jedi import debug
|
||||
from jedi import parser_utils
|
||||
from jedi.inference.base_value import ValueSet, NO_VALUES, ContextualizedNode, \
|
||||
ContextualizedName, iterator_to_value_set, iterate_values
|
||||
iterator_to_value_set, iterate_values
|
||||
from jedi.inference.lazy_value import LazyTreeValue
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference import recursion
|
||||
@@ -18,12 +18,15 @@ from jedi.inference import imports
|
||||
from jedi.inference import arguments
|
||||
from jedi.inference.value import ClassValue, FunctionValue
|
||||
from jedi.inference.value import iterable
|
||||
from jedi.inference.value.dynamic_arrays import ListModification, DictModification
|
||||
from jedi.inference.value import TreeInstance
|
||||
from jedi.inference.helpers import is_string, is_literal, is_number
|
||||
from jedi.inference.helpers import is_string, is_literal, is_number, get_names_of_node
|
||||
from jedi.inference.compiled.access import COMPARISON_OPERATORS
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference.gradual.stub_value import VersionInfo
|
||||
from jedi.inference.gradual import annotation
|
||||
from jedi.inference.names import TreeNameDefinition
|
||||
from jedi.inference.context import CompForContext
|
||||
from jedi.inference.value.decorator import Decoratee
|
||||
from jedi.plugins import plugin_manager
|
||||
|
||||
@@ -64,9 +67,99 @@ def _py__stop_iteration_returns(generators):
|
||||
return results
|
||||
|
||||
|
||||
def infer_node(context, element):
|
||||
if isinstance(context, CompForContext):
|
||||
return _infer_node(context, element)
|
||||
|
||||
if_stmt = element
|
||||
while if_stmt is not None:
|
||||
if_stmt = if_stmt.parent
|
||||
if if_stmt.type in ('if_stmt', 'for_stmt'):
|
||||
break
|
||||
if parser_utils.is_scope(if_stmt):
|
||||
if_stmt = None
|
||||
break
|
||||
predefined_if_name_dict = context.predefined_names.get(if_stmt)
|
||||
# TODO there's a lot of issues with this one. We actually should do
|
||||
# this in a different way. Caching should only be active in certain
|
||||
# cases and this all sucks.
|
||||
if predefined_if_name_dict is None and if_stmt \
|
||||
and if_stmt.type == 'if_stmt' and context.inference_state.is_analysis:
|
||||
if_stmt_test = if_stmt.children[1]
|
||||
name_dicts = [{}]
|
||||
# If we already did a check, we don't want to do it again -> If
|
||||
# value.predefined_names is filled, we stop.
|
||||
# We don't want to check the if stmt itself, it's just about
|
||||
# the content.
|
||||
if element.start_pos > if_stmt_test.end_pos:
|
||||
# Now we need to check if the names in the if_stmt match the
|
||||
# names in the suite.
|
||||
if_names = get_names_of_node(if_stmt_test)
|
||||
element_names = get_names_of_node(element)
|
||||
str_element_names = [e.value for e in element_names]
|
||||
if any(i.value in str_element_names for i in if_names):
|
||||
for if_name in if_names:
|
||||
definitions = context.inference_state.goto_definitions(context, if_name)
|
||||
# Every name that has multiple different definitions
|
||||
# causes the complexity to rise. The complexity should
|
||||
# never fall below 1.
|
||||
if len(definitions) > 1:
|
||||
if len(name_dicts) * len(definitions) > 16:
|
||||
debug.dbg('Too many options for if branch inference %s.', if_stmt)
|
||||
# There's only a certain amount of branches
|
||||
# Jedi can infer, otherwise it will take to
|
||||
# long.
|
||||
name_dicts = [{}]
|
||||
break
|
||||
|
||||
original_name_dicts = list(name_dicts)
|
||||
name_dicts = []
|
||||
for definition in definitions:
|
||||
new_name_dicts = list(original_name_dicts)
|
||||
for i, name_dict in enumerate(new_name_dicts):
|
||||
new_name_dicts[i] = name_dict.copy()
|
||||
new_name_dicts[i][if_name.value] = ValueSet([definition])
|
||||
|
||||
name_dicts += new_name_dicts
|
||||
else:
|
||||
for name_dict in name_dicts:
|
||||
name_dict[if_name.value] = definitions
|
||||
if len(name_dicts) > 1:
|
||||
result = NO_VALUES
|
||||
for name_dict in name_dicts:
|
||||
with context.predefine_names(if_stmt, name_dict):
|
||||
result |= _infer_node(context, element)
|
||||
return result
|
||||
else:
|
||||
return _infer_node_if_inferred(context, element)
|
||||
else:
|
||||
if predefined_if_name_dict:
|
||||
return _infer_node(context, element)
|
||||
else:
|
||||
return _infer_node_if_inferred(context, element)
|
||||
|
||||
|
||||
def _infer_node_if_inferred(context, element):
|
||||
"""
|
||||
TODO This function is temporary: Merge with infer_node.
|
||||
"""
|
||||
parent = element
|
||||
while parent is not None:
|
||||
parent = parent.parent
|
||||
predefined_if_name_dict = context.predefined_names.get(parent)
|
||||
if predefined_if_name_dict is not None:
|
||||
return _infer_node(context, element)
|
||||
return _infer_node_cached(context, element)
|
||||
|
||||
|
||||
@inference_state_method_cache(default=NO_VALUES)
|
||||
def _infer_node_cached(context, element):
|
||||
return _infer_node(context, element)
|
||||
|
||||
|
||||
@debug.increase_indent
|
||||
@_limit_value_infers
|
||||
def infer_node(context, element):
|
||||
def _infer_node(context, element):
|
||||
debug.dbg('infer_node %s@%s in %s', element, element.start_pos, context)
|
||||
inference_state = context.inference_state
|
||||
typ = element.type
|
||||
@@ -126,7 +219,7 @@ def infer_node(context, element):
|
||||
value_set = value_set.py__getattribute__(next_name, name_context=context)
|
||||
return value_set
|
||||
elif typ == 'eval_input':
|
||||
return infer_node(context, element.children[0])
|
||||
return context.infer_node(element.children[0])
|
||||
elif typ == 'annassign':
|
||||
return annotation.infer_annotation(context, element.children[1]) \
|
||||
.execute_annotation()
|
||||
@@ -141,7 +234,7 @@ def infer_node(context, element):
|
||||
# Generator.send() is not implemented.
|
||||
return NO_VALUES
|
||||
elif typ == 'namedexpr_test':
|
||||
return infer_node(context, element.children[2])
|
||||
return context.infer_node(element.children[2])
|
||||
else:
|
||||
return infer_or_test(context, element)
|
||||
|
||||
@@ -264,20 +357,6 @@ def infer_atom(context, atom):
|
||||
@_limit_value_infers
|
||||
def infer_expr_stmt(context, stmt, seek_name=None):
|
||||
with recursion.execution_allowed(context.inference_state, stmt) as allowed:
|
||||
# Here we allow list/set to recurse under certain conditions. To make
|
||||
# it possible to resolve stuff like list(set(list(x))), this is
|
||||
# necessary.
|
||||
if not allowed and context.get_root_context().is_builtins_module():
|
||||
try:
|
||||
instance = context.var_args.instance
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
if instance.name.string_name in ('list', 'set'):
|
||||
c = instance.get_first_non_keyword_argument_values()
|
||||
if instance not in c:
|
||||
allowed = True
|
||||
|
||||
if allowed:
|
||||
return _infer_expr_stmt(context, stmt, seek_name)
|
||||
return NO_VALUES
|
||||
@@ -291,24 +370,53 @@ def _infer_expr_stmt(context, stmt, seek_name=None):
|
||||
names are defined in the statement, `seek_name` returns the result for
|
||||
this name.
|
||||
|
||||
expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) |
|
||||
('=' (yield_expr|testlist_star_expr))*)
|
||||
annassign: ':' test ['=' test]
|
||||
augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' |
|
||||
'<<=' | '>>=' | '**=' | '//=')
|
||||
|
||||
:param stmt: A `tree.ExprStmt`.
|
||||
"""
|
||||
def check_setitem(stmt):
|
||||
atom_expr = stmt.children[0]
|
||||
if atom_expr.type not in ('atom_expr', 'power'):
|
||||
return False, None
|
||||
name = atom_expr.children[0]
|
||||
if name.type != 'name' or len(atom_expr.children) != 2:
|
||||
return False, None
|
||||
trailer = atom_expr.children[-1]
|
||||
return trailer.children[0] == '[', trailer.children[1]
|
||||
|
||||
debug.dbg('infer_expr_stmt %s (%s)', stmt, seek_name)
|
||||
rhs = stmt.get_rhs()
|
||||
value_set = context.infer_node(rhs)
|
||||
|
||||
if seek_name:
|
||||
c_node = ContextualizedName(context, seek_name)
|
||||
value_set = check_tuple_assignments(c_node, value_set)
|
||||
n = TreeNameDefinition(context, seek_name)
|
||||
value_set = check_tuple_assignments(n, value_set)
|
||||
|
||||
first_operator = next(stmt.yield_operators(), None)
|
||||
if first_operator not in ('=', None) and first_operator.type == 'operator':
|
||||
is_setitem, subscriptlist = check_setitem(stmt)
|
||||
is_annassign = first_operator not in ('=', None) and first_operator.type == 'operator'
|
||||
if is_annassign or is_setitem:
|
||||
# `=` is always the last character in aug assignments -> -1
|
||||
name = stmt.get_defined_names(include_setitem=True)[0].value
|
||||
left_values = context.py__getattribute__(name, position=stmt.start_pos)
|
||||
|
||||
if is_setitem:
|
||||
def to_mod(v):
|
||||
c = ContextualizedNode(context, subscriptlist)
|
||||
if v.array_type == 'dict':
|
||||
return DictModification(v, value_set, c)
|
||||
elif v.array_type == 'list':
|
||||
return ListModification(v, value_set, c)
|
||||
return v
|
||||
|
||||
value_set = ValueSet(to_mod(v) for v in left_values)
|
||||
else:
|
||||
operator = copy.copy(first_operator)
|
||||
operator.value = operator.value[:-1]
|
||||
name = stmt.get_defined_names()[0].value
|
||||
left = context.py__getattribute__(name, position=stmt.start_pos)
|
||||
|
||||
for_stmt = tree.search_ancestor(stmt, 'for_stmt')
|
||||
if for_stmt is not None and for_stmt.type == 'for_stmt' and value_set \
|
||||
and parser_utils.for_stmt_defines_one_name(for_stmt):
|
||||
@@ -323,10 +431,10 @@ def _infer_expr_stmt(context, stmt, seek_name=None):
|
||||
dct = {for_stmt.children[1].value: lazy_value.infer()}
|
||||
with context.predefine_names(for_stmt, dct):
|
||||
t = context.infer_node(rhs)
|
||||
left = _infer_comparison(context, left, operator, t)
|
||||
value_set = left
|
||||
left_values = _infer_comparison(context, left_values, operator, t)
|
||||
value_set = left_values
|
||||
else:
|
||||
value_set = _infer_comparison(context, left, operator, value_set)
|
||||
value_set = _infer_comparison(context, left_values, operator, value_set)
|
||||
debug.dbg('infer_expr_stmt result %s', value_set)
|
||||
return value_set
|
||||
|
||||
@@ -564,7 +672,7 @@ def tree_name_to_values(inference_state, context, tree_name):
|
||||
return value_set
|
||||
|
||||
types = []
|
||||
node = tree_name.get_definition(import_name_always=True)
|
||||
node = tree_name.get_definition(import_name_always=True, include_setitem=True)
|
||||
if node is None:
|
||||
node = tree_name.parent
|
||||
if node.type == 'global_stmt':
|
||||
@@ -598,8 +706,8 @@ def tree_name_to_values(inference_state, context, tree_name):
|
||||
contextualized_node=cn,
|
||||
is_async=node.parent.type == 'async_stmt',
|
||||
)
|
||||
c_node = ContextualizedName(context, tree_name)
|
||||
types = check_tuple_assignments(c_node, for_types)
|
||||
n = TreeNameDefinition(context, tree_name)
|
||||
types = check_tuple_assignments(n, for_types)
|
||||
elif typ == 'expr_stmt':
|
||||
types = _remove_statements(context, node, tree_name)
|
||||
elif typ == 'with_stmt':
|
||||
@@ -671,13 +779,13 @@ def _apply_decorators(context, node):
|
||||
return values
|
||||
|
||||
|
||||
def check_tuple_assignments(contextualized_name, value_set):
|
||||
def check_tuple_assignments(name, value_set):
|
||||
"""
|
||||
Checks if tuples are assigned.
|
||||
"""
|
||||
lazy_value = None
|
||||
for index, node in contextualized_name.assignment_indexes():
|
||||
cn = ContextualizedNode(contextualized_name.context, node)
|
||||
for index, node in name.assignment_indexes():
|
||||
cn = ContextualizedNode(name.parent_context, node)
|
||||
iterated = value_set.iterate(cn)
|
||||
if isinstance(index, slice):
|
||||
# For no star unpacking is not possible.
|
||||
|
||||
@@ -3,7 +3,7 @@ import os
|
||||
from jedi._compatibility import unicode, force_unicode, all_suffixes
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference.base_value import ContextualizedNode
|
||||
from jedi.inference.helpers import is_string
|
||||
from jedi.inference.helpers import is_string, get_str_or_none
|
||||
from jedi.common.utils import traverse_parents
|
||||
from jedi.parser_utils import get_cached_code_lines
|
||||
from jedi.file_io import FileIO
|
||||
@@ -86,8 +86,10 @@ def _paths_from_list_modifications(module_context, trailer1, trailer2):
|
||||
arg = arg.children[2]
|
||||
|
||||
for value in module_context.create_context(arg).infer_node(arg):
|
||||
if is_string(value):
|
||||
abs_path = _abs_path(module_context, value.get_safe_value())
|
||||
p = get_str_or_none(value)
|
||||
if p is None:
|
||||
continue
|
||||
abs_path = _abs_path(module_context, p)
|
||||
if abs_path is not None:
|
||||
yield abs_path
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from jedi.inference import imports
|
||||
from jedi.inference.names import TreeNameDefinition
|
||||
|
||||
|
||||
def _resolve_names(definition_names, avoid_names=()):
|
||||
@@ -27,8 +26,7 @@ def _dictionarize(names):
|
||||
|
||||
|
||||
def _find_names(module_context, tree_name):
|
||||
context = module_context.create_context(tree_name)
|
||||
name = TreeNameDefinition(context, tree_name)
|
||||
name = module_context.create_name(tree_name)
|
||||
found_names = set(name.goto())
|
||||
found_names.add(name)
|
||||
return _dictionarize(_resolve_names(found_names))
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from jedi.inference.value.module import ModuleValue
|
||||
from jedi.inference.value.klass import ClassValue
|
||||
from jedi.inference.value.function import FunctionValue, \
|
||||
MethodValue, FunctionExecutionContext
|
||||
MethodValue
|
||||
from jedi.inference.value.instance import AnonymousInstance, BoundMethod, \
|
||||
CompiledInstance, AbstractInstanceValue, TreeInstance
|
||||
|
||||
201
jedi/inference/value/dynamic_arrays.py
Normal file
201
jedi/inference/value/dynamic_arrays.py
Normal file
@@ -0,0 +1,201 @@
|
||||
"""
|
||||
A module to deal with stuff like `list.append` and `set.add`.
|
||||
|
||||
Array modifications
|
||||
*******************
|
||||
|
||||
If the content of an array (``set``/``list``) is requested somewhere, the
|
||||
current module will be checked for appearances of ``arr.append``,
|
||||
``arr.insert``, etc. If the ``arr`` name points to an actual array, the
|
||||
content will be added
|
||||
|
||||
This can be really cpu intensive, as you can imagine. Because |jedi| has to
|
||||
follow **every** ``append`` and check wheter it's the right array. However this
|
||||
works pretty good, because in *slow* cases, the recursion detector and other
|
||||
settings will stop this process.
|
||||
|
||||
It is important to note that:
|
||||
|
||||
1. Array modfications work only in the current module.
|
||||
2. Jedi only checks Array additions; ``list.pop``, etc are ignored.
|
||||
"""
|
||||
from jedi import debug
|
||||
from jedi import settings
|
||||
from jedi.inference import recursion
|
||||
from jedi.inference.base_value import ValueSet, NO_VALUES, HelperValueMixin, \
|
||||
ValueWrapper
|
||||
from jedi.inference.lazy_value import LazyKnownValues
|
||||
from jedi.inference.helpers import infer_call_of_leaf
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
|
||||
_sentinel = object()
|
||||
|
||||
|
||||
def check_array_additions(context, sequence):
|
||||
""" Just a mapper function for the internal _internal_check_array_additions """
|
||||
if sequence.array_type not in ('list', 'set'):
|
||||
# TODO also check for dict updates
|
||||
return NO_VALUES
|
||||
|
||||
return _internal_check_array_additions(context, sequence)
|
||||
|
||||
|
||||
@inference_state_method_cache(default=NO_VALUES)
|
||||
@debug.increase_indent
|
||||
def _internal_check_array_additions(context, sequence):
|
||||
"""
|
||||
Checks if a `Array` has "add" (append, insert, extend) statements:
|
||||
|
||||
>>> a = [""]
|
||||
>>> a.append(1)
|
||||
"""
|
||||
from jedi.inference import arguments
|
||||
|
||||
debug.dbg('Dynamic array search for %s' % sequence, color='MAGENTA')
|
||||
module_context = context.get_root_context()
|
||||
if not settings.dynamic_array_additions or module_context.is_compiled():
|
||||
debug.dbg('Dynamic array search aborted.', color='MAGENTA')
|
||||
return NO_VALUES
|
||||
|
||||
def find_additions(context, arglist, add_name):
|
||||
params = list(arguments.TreeArguments(context.inference_state, context, arglist).unpack())
|
||||
result = set()
|
||||
if add_name in ['insert']:
|
||||
params = params[1:]
|
||||
if add_name in ['append', 'add', 'insert']:
|
||||
for key, lazy_value in params:
|
||||
result.add(lazy_value)
|
||||
elif add_name in ['extend', 'update']:
|
||||
for key, lazy_value in params:
|
||||
result |= set(lazy_value.infer().iterate())
|
||||
return result
|
||||
|
||||
temp_param_add, settings.dynamic_params_for_other_modules = \
|
||||
settings.dynamic_params_for_other_modules, False
|
||||
|
||||
is_list = sequence.name.string_name == 'list'
|
||||
search_names = (['append', 'extend', 'insert'] if is_list else ['add', 'update'])
|
||||
|
||||
added_types = set()
|
||||
for add_name in search_names:
|
||||
try:
|
||||
possible_names = module_context.tree_node.get_used_names()[add_name]
|
||||
except KeyError:
|
||||
continue
|
||||
else:
|
||||
for name in possible_names:
|
||||
value_node = context.tree_node
|
||||
if not (value_node.start_pos < name.start_pos < value_node.end_pos):
|
||||
continue
|
||||
trailer = name.parent
|
||||
power = trailer.parent
|
||||
trailer_pos = power.children.index(trailer)
|
||||
try:
|
||||
execution_trailer = power.children[trailer_pos + 1]
|
||||
except IndexError:
|
||||
continue
|
||||
else:
|
||||
if execution_trailer.type != 'trailer' \
|
||||
or execution_trailer.children[0] != '(' \
|
||||
or execution_trailer.children[1] == ')':
|
||||
continue
|
||||
|
||||
random_context = context.create_context(name)
|
||||
|
||||
with recursion.execution_allowed(context.inference_state, power) as allowed:
|
||||
if allowed:
|
||||
found = infer_call_of_leaf(
|
||||
random_context,
|
||||
name,
|
||||
cut_own_trailer=True
|
||||
)
|
||||
if sequence in found:
|
||||
# The arrays match. Now add the results
|
||||
added_types |= find_additions(
|
||||
random_context,
|
||||
execution_trailer.children[1],
|
||||
add_name
|
||||
)
|
||||
|
||||
# reset settings
|
||||
settings.dynamic_params_for_other_modules = temp_param_add
|
||||
debug.dbg('Dynamic array result %s' % added_types, color='MAGENTA')
|
||||
return added_types
|
||||
|
||||
|
||||
def get_dynamic_array_instance(instance, arguments):
|
||||
"""Used for set() and list() instances."""
|
||||
ai = _DynamicArrayAdditions(instance, arguments)
|
||||
from jedi.inference import arguments
|
||||
return arguments.ValuesArguments([ValueSet([ai])])
|
||||
|
||||
|
||||
class _DynamicArrayAdditions(HelperValueMixin):
|
||||
"""
|
||||
Used for the usage of set() and list().
|
||||
This is definitely a hack, but a good one :-)
|
||||
It makes it possible to use set/list conversions.
|
||||
|
||||
This is not a proper context, because it doesn't have to be. It's not used
|
||||
in the wild, it's just used within typeshed as an argument to `__init__`
|
||||
for set/list and never used in any other place.
|
||||
"""
|
||||
def __init__(self, instance, arguments):
|
||||
self._instance = instance
|
||||
self._arguments = arguments
|
||||
|
||||
def py__class__(self):
|
||||
tuple_, = self._instance.inference_state.builtins_module.py__getattribute__('tuple')
|
||||
return tuple_
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
arguments = self._arguments
|
||||
try:
|
||||
_, lazy_value = next(arguments.unpack())
|
||||
except StopIteration:
|
||||
pass
|
||||
else:
|
||||
for lazy in lazy_value.infer().iterate():
|
||||
yield lazy
|
||||
|
||||
from jedi.inference.arguments import TreeArguments
|
||||
if isinstance(arguments, TreeArguments):
|
||||
additions = _internal_check_array_additions(arguments.context, self._instance)
|
||||
for addition in additions:
|
||||
yield addition
|
||||
|
||||
def iterate(self, contextualized_node=None, is_async=False):
|
||||
return self.py__iter__(contextualized_node)
|
||||
|
||||
|
||||
class _Modification(ValueWrapper):
|
||||
def __init__(self, wrapped_value, assigned_values, contextualized_key):
|
||||
super(_Modification, self).__init__(wrapped_value)
|
||||
self._assigned_values = assigned_values
|
||||
self._contextualized_key = contextualized_key
|
||||
|
||||
def py__getitem__(self, *args, **kwargs):
|
||||
return self._wrapped_value.py__getitem__(*args, **kwargs) | self._assigned_values
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
actual = [
|
||||
v.get_safe_value(_sentinel)
|
||||
for v in self._contextualized_key.infer()
|
||||
]
|
||||
if index in actual:
|
||||
return self._assigned_values
|
||||
return self._wrapped_value.py__simple_getitem__(index)
|
||||
|
||||
|
||||
class DictModification(_Modification):
|
||||
def py__iter__(self):
|
||||
for lazy_context in self._wrapped_value.py__iter__():
|
||||
yield lazy_context
|
||||
yield self._contextualized_key
|
||||
|
||||
|
||||
class ListModification(_Modification):
|
||||
def py__iter__(self):
|
||||
for lazy_context in self._wrapped_value.py__iter__():
|
||||
yield lazy_context
|
||||
yield LazyKnownValues(self._assigned_values)
|
||||
@@ -8,9 +8,10 @@ from jedi.inference import recursion
|
||||
from jedi.inference import docstrings
|
||||
from jedi.inference import flow_analysis
|
||||
from jedi.inference.signature import TreeSignature
|
||||
from jedi.inference.arguments import AnonymousArguments
|
||||
from jedi.inference.filters import ParserTreeFilter, FunctionExecutionFilter
|
||||
from jedi.inference.names import ValueName, AbstractNameDefinition, ParamName
|
||||
from jedi.inference.filters import ParserTreeFilter, FunctionExecutionFilter, \
|
||||
AnonymousFunctionExecutionFilter
|
||||
from jedi.inference.names import ValueName, AbstractNameDefinition, \
|
||||
AnonymousParamName, ParamName
|
||||
from jedi.inference.base_value import ContextualizedNode, NO_VALUES, \
|
||||
ValueSet, TreeValue, ValueWrapper
|
||||
from jedi.inference.lazy_value import LazyKnownValues, LazyKnownValue, \
|
||||
@@ -69,8 +70,7 @@ class FunctionMixin(object):
|
||||
return ValueSet([BoundMethod(instance, self)])
|
||||
|
||||
def get_param_names(self):
|
||||
function_execution = self.as_context()
|
||||
return [ParamName(function_execution, param.name)
|
||||
return [AnonymousParamName(self, param.name)
|
||||
for param in self.tree_node.get_params()]
|
||||
|
||||
@property
|
||||
@@ -88,7 +88,7 @@ class FunctionMixin(object):
|
||||
|
||||
def _as_context(self, arguments=None):
|
||||
if arguments is None:
|
||||
arguments = AnonymousArguments()
|
||||
return AnonymousFunctionExecution(self)
|
||||
return FunctionExecutionContext(self, arguments)
|
||||
|
||||
def get_signatures(self):
|
||||
@@ -127,7 +127,8 @@ class FunctionValue(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndCla
|
||||
if overloaded_funcs:
|
||||
return OverloadedFunctionValue(
|
||||
function,
|
||||
[create(f) for f in overloaded_funcs]
|
||||
# Get them into the correct order: lower line first.
|
||||
list(reversed([create(f) for f in overloaded_funcs]))
|
||||
)
|
||||
return function
|
||||
|
||||
@@ -159,13 +160,9 @@ class MethodValue(FunctionValue):
|
||||
return names + (self.py__name__(),)
|
||||
|
||||
|
||||
class FunctionExecutionContext(ValueContext, TreeContextMixin):
|
||||
function_execution_filter = FunctionExecutionFilter
|
||||
|
||||
def __init__(self, function_value, var_args):
|
||||
super(FunctionExecutionContext, self).__init__(function_value)
|
||||
self.function_value = function_value
|
||||
self.var_args = var_args
|
||||
class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
|
||||
def _infer_annotations(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@inference_state_method_cache(default=NO_VALUES)
|
||||
@recursion.execution_recursion_decorator()
|
||||
@@ -178,14 +175,13 @@ class FunctionExecutionContext(ValueContext, TreeContextMixin):
|
||||
value_set = NO_VALUES
|
||||
returns = get_yield_exprs(self.inference_state, funcdef)
|
||||
else:
|
||||
returns = funcdef.iter_return_stmts()
|
||||
from jedi.inference.gradual.annotation import infer_return_types
|
||||
value_set = infer_return_types(self)
|
||||
value_set = self._infer_annotations()
|
||||
if value_set:
|
||||
# If there are annotations, prefer them over anything else.
|
||||
# This will make it faster.
|
||||
return value_set
|
||||
value_set |= docstrings.infer_return_types(self.function_value)
|
||||
value_set |= docstrings.infer_return_types(self._value)
|
||||
returns = funcdef.iter_return_stmts()
|
||||
|
||||
for r in returns:
|
||||
check = flow_analysis.reachability_check(self, funcdef, r)
|
||||
@@ -280,32 +276,6 @@ class FunctionExecutionContext(ValueContext, TreeContextMixin):
|
||||
for lazy_value in self.get_yield_lazy_values()
|
||||
)
|
||||
|
||||
def get_filters(self, until_position=None, origin_scope=None):
|
||||
yield self.function_execution_filter(self,
|
||||
until_position=until_position,
|
||||
origin_scope=origin_scope)
|
||||
|
||||
@inference_state_method_cache()
|
||||
def get_executed_param_names_and_issues(self):
|
||||
return self.var_args.get_executed_param_names_and_issues(self)
|
||||
|
||||
def matches_signature(self):
|
||||
executed_param_names, issues = self.get_executed_param_names_and_issues()
|
||||
if issues:
|
||||
return False
|
||||
|
||||
matches = all(executed_param_name.matches_signature()
|
||||
for executed_param_name in executed_param_names)
|
||||
if debug.enable_notice:
|
||||
signature = parser_utils.get_call_signature(self.tree_node)
|
||||
if matches:
|
||||
debug.dbg("Overloading match: %s@%s (%s)",
|
||||
signature, self.tree_node.start_pos[0], self.var_args, color='BLUE')
|
||||
else:
|
||||
debug.dbg("Overloading no match: %s@%s (%s)",
|
||||
signature, self.tree_node.start_pos[0], self.var_args, color='BLUE')
|
||||
return matches
|
||||
|
||||
def infer(self):
|
||||
"""
|
||||
Created to be used by inheritance.
|
||||
@@ -347,6 +317,47 @@ class FunctionExecutionContext(ValueContext, TreeContextMixin):
|
||||
return self.get_return_values()
|
||||
|
||||
|
||||
class FunctionExecutionContext(BaseFunctionExecutionContext):
|
||||
def __init__(self, function_value, arguments):
|
||||
super(FunctionExecutionContext, self).__init__(function_value)
|
||||
self._arguments = arguments
|
||||
|
||||
def get_filters(self, until_position=None, origin_scope=None):
|
||||
yield FunctionExecutionFilter(
|
||||
self, self._value,
|
||||
until_position=until_position,
|
||||
origin_scope=origin_scope,
|
||||
arguments=self._arguments
|
||||
)
|
||||
|
||||
def _infer_annotations(self):
|
||||
from jedi.inference.gradual.annotation import infer_return_types
|
||||
return infer_return_types(self._value, self._arguments)
|
||||
|
||||
def get_param_names(self):
|
||||
return [
|
||||
ParamName(self._value, param.name, self._arguments)
|
||||
for param in self._value.tree_node.get_params()
|
||||
]
|
||||
|
||||
|
||||
class AnonymousFunctionExecution(BaseFunctionExecutionContext):
|
||||
def _infer_annotations(self):
|
||||
# I don't think inferring anonymous executions is a big thing.
|
||||
# Anonymous contexts are mostly there for the user to work in. ~ dave
|
||||
return NO_VALUES
|
||||
|
||||
def get_filters(self, until_position=None, origin_scope=None):
|
||||
yield AnonymousFunctionExecutionFilter(
|
||||
self, self._value,
|
||||
until_position=until_position,
|
||||
origin_scope=origin_scope,
|
||||
)
|
||||
|
||||
def get_param_names(self):
|
||||
return self._value.get_param_names()
|
||||
|
||||
|
||||
class OverloadedFunctionValue(FunctionMixin, ValueWrapper):
|
||||
def __init__(self, function, overloaded_functions):
|
||||
super(OverloadedFunctionValue, self).__init__(function)
|
||||
@@ -355,18 +366,12 @@ class OverloadedFunctionValue(FunctionMixin, ValueWrapper):
|
||||
def py__call__(self, arguments):
|
||||
debug.dbg("Execute overloaded function %s", self._wrapped_value, color='BLUE')
|
||||
function_executions = []
|
||||
value_set = NO_VALUES
|
||||
matched = False
|
||||
for f in self._overloaded_functions:
|
||||
function_execution = f.as_context(arguments)
|
||||
for signature in self.get_signatures():
|
||||
function_execution = signature.value.as_context(arguments)
|
||||
function_executions.append(function_execution)
|
||||
if function_execution.matches_signature():
|
||||
matched = True
|
||||
if signature.matches_signature(arguments):
|
||||
return function_execution.infer()
|
||||
|
||||
if matched:
|
||||
return value_set
|
||||
|
||||
if self.inference_state.is_analysis:
|
||||
# In this case we want precision.
|
||||
return NO_VALUES
|
||||
|
||||
@@ -1,29 +1,30 @@
|
||||
from abc import abstractproperty
|
||||
|
||||
from parso.python.tree import search_ancestor
|
||||
|
||||
from jedi import debug
|
||||
from jedi import settings
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference.compiled.value import CompiledObjectFilter
|
||||
from jedi.inference.helpers import values_from_qualified_names
|
||||
from jedi.inference.filters import AbstractFilter
|
||||
from jedi.inference.filters import AbstractFilter, AnonymousFunctionExecutionFilter
|
||||
from jedi.inference.names import ValueName, TreeNameDefinition, ParamName
|
||||
from jedi.inference.base_value import Value, NO_VALUES, ValueSet, \
|
||||
iterator_to_value_set, ValueWrapper
|
||||
from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference.arguments import AnonymousArguments, \
|
||||
ValuesArguments, TreeArgumentsWrapper
|
||||
from jedi.inference.arguments import ValuesArguments, TreeArgumentsWrapper
|
||||
from jedi.inference.value.function import \
|
||||
FunctionValue, FunctionMixin, OverloadedFunctionValue
|
||||
from jedi.inference.value.klass import ClassValue, apply_py__get__, \
|
||||
ClassFilter
|
||||
from jedi.inference.value import iterable
|
||||
from jedi.parser_utils import get_parent_scope
|
||||
FunctionValue, FunctionMixin, OverloadedFunctionValue, \
|
||||
BaseFunctionExecutionContext, FunctionExecutionContext
|
||||
from jedi.inference.value.klass import apply_py__get__, ClassFilter
|
||||
from jedi.inference.value.dynamic_arrays import get_dynamic_array_instance
|
||||
|
||||
|
||||
class InstanceExecutedParamName(ParamName):
|
||||
def __init__(self, instance, execution_context, tree_param):
|
||||
super(InstanceExecutedParamName, self).__init__(execution_context, tree_param.name)
|
||||
def __init__(self, instance, function_value, tree_name):
|
||||
super(InstanceExecutedParamName, self).__init__(
|
||||
function_value, tree_name, arguments=None)
|
||||
self._instance = instance
|
||||
|
||||
def infer(self):
|
||||
@@ -33,40 +34,54 @@ class InstanceExecutedParamName(ParamName):
|
||||
return True
|
||||
|
||||
|
||||
class AnonymousInstanceArguments(AnonymousArguments):
|
||||
def __init__(self, instance):
|
||||
class AnonymousMethodExecutionFilter(AnonymousFunctionExecutionFilter):
|
||||
def __init__(self, instance, *args, **kwargs):
|
||||
super(AnonymousMethodExecutionFilter, self).__init__(*args, **kwargs)
|
||||
self._instance = instance
|
||||
|
||||
def get_executed_param_names_and_issues(self, execution_context):
|
||||
from jedi.inference.dynamic import search_param_names
|
||||
tree_params = execution_context.tree_node.get_params()
|
||||
if not tree_params:
|
||||
return [], []
|
||||
def _convert_param(self, param, name):
|
||||
if param.position_index == 0:
|
||||
return InstanceExecutedParamName(self._instance, self._function_value, name)
|
||||
return super(AnonymousMethodExecutionFilter, self)._convert_param(param, name)
|
||||
|
||||
self_param = InstanceExecutedParamName(
|
||||
self._instance, execution_context, tree_params[0])
|
||||
if len(tree_params) == 1:
|
||||
# If the only param is self, we don't need to try to find
|
||||
# executions of this function, we have all the params already.
|
||||
return [self_param], []
|
||||
executed_param_names = list(search_param_names(
|
||||
execution_context.inference_state,
|
||||
execution_context,
|
||||
execution_context.tree_node
|
||||
))
|
||||
executed_param_names[0] = self_param
|
||||
return executed_param_names, []
|
||||
|
||||
class AnonymousMethodExecutionContext(BaseFunctionExecutionContext):
|
||||
def __init__(self, instance, value):
|
||||
super(AnonymousMethodExecutionContext, self).__init__(value)
|
||||
self.instance = instance
|
||||
|
||||
def get_filters(self, until_position=None, origin_scope=None):
|
||||
yield AnonymousMethodExecutionFilter(
|
||||
self.instance, self, self._value,
|
||||
until_position=until_position,
|
||||
origin_scope=origin_scope,
|
||||
)
|
||||
|
||||
def get_param_names(self):
|
||||
param_names = list(self._value.get_param_names())
|
||||
# set the self name
|
||||
param_names[0] = InstanceExecutedParamName(
|
||||
self.instance,
|
||||
self._function_value,
|
||||
param_names[0].tree_name
|
||||
)
|
||||
return param_names
|
||||
|
||||
|
||||
class MethodExecutionContext(FunctionExecutionContext):
|
||||
def __init__(self, instance, *args, **kwargs):
|
||||
super(MethodExecutionContext, self).__init__(*args, **kwargs)
|
||||
self.instance = instance
|
||||
|
||||
|
||||
class AbstractInstanceValue(Value):
|
||||
api_type = u'instance'
|
||||
|
||||
def __init__(self, inference_state, parent_context, class_value, var_args):
|
||||
def __init__(self, inference_state, parent_context, class_value):
|
||||
super(AbstractInstanceValue, self).__init__(inference_state, parent_context)
|
||||
# Generated instances are classes that are just generated by self
|
||||
# (No var_args) used.
|
||||
# (No arguments) used.
|
||||
self.class_value = class_value
|
||||
self.var_args = var_args
|
||||
|
||||
def is_instance(self):
|
||||
return True
|
||||
@@ -77,14 +92,6 @@ class AbstractInstanceValue(Value):
|
||||
def get_annotated_class_object(self):
|
||||
return self.class_value # This is the default.
|
||||
|
||||
def py__call__(self, arguments):
|
||||
names = self.get_function_slot_names(u'__call__')
|
||||
if not names:
|
||||
# Means the Instance is not callable.
|
||||
return super(AbstractInstanceValue, self).py__call__(arguments)
|
||||
|
||||
return ValueSet.from_sets(name.infer().execute(arguments) for name in names)
|
||||
|
||||
def py__class__(self):
|
||||
return self.class_value
|
||||
|
||||
@@ -92,42 +99,62 @@ class AbstractInstanceValue(Value):
|
||||
# Signalize that we don't know about the bool type.
|
||||
return None
|
||||
|
||||
def get_function_slot_names(self, name):
|
||||
# Python classes don't look at the dictionary of the instance when
|
||||
# looking up `__call__`. This is something that has to do with Python's
|
||||
# internal slot system (note: not __slots__, but C slots).
|
||||
for filter in self.get_filters(include_self_names=False):
|
||||
names = filter.get(name)
|
||||
if names:
|
||||
return names
|
||||
return []
|
||||
@abstractproperty
|
||||
def name(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def execute_function_slots(self, names, *inferred_args):
|
||||
return ValueSet.from_sets(
|
||||
name.infer().execute_with_values(*inferred_args)
|
||||
for name in names
|
||||
def get_signatures(self):
|
||||
call_funcs = self.py__getattribute__('__call__').py__get__(self, self.class_value)
|
||||
return [s.bind(self) for s in call_funcs.get_signatures()]
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % (self.__class__.__name__, self.class_value)
|
||||
|
||||
|
||||
class CompiledInstance(AbstractInstanceValue):
|
||||
def __init__(self, inference_state, parent_context, class_value, arguments):
|
||||
super(CompiledInstance, self).__init__(inference_state, parent_context,
|
||||
class_value)
|
||||
self._arguments = arguments
|
||||
|
||||
def get_filters(self, origin_scope=None, include_self_names=True):
|
||||
class_value = self.get_annotated_class_object()
|
||||
class_filters = class_value.get_filters(
|
||||
origin_scope=origin_scope,
|
||||
is_instance=True,
|
||||
)
|
||||
for f in class_filters:
|
||||
yield CompiledInstanceClassFilter(self, f)
|
||||
|
||||
def py__get__(self, obj, class_value):
|
||||
"""
|
||||
obj may be None.
|
||||
"""
|
||||
# Arguments in __get__ descriptors are obj, class.
|
||||
# `method` is the new parent of the array, don't know if that's good.
|
||||
names = self.get_function_slot_names(u'__get__')
|
||||
if names:
|
||||
if obj is None:
|
||||
obj = compiled.builtin_from_name(self.inference_state, u'None')
|
||||
return self.execute_function_slots(names, obj, class_value)
|
||||
else:
|
||||
return ValueSet([self])
|
||||
@property
|
||||
def name(self):
|
||||
return compiled.CompiledValueName(self, self.class_value.name.string_name)
|
||||
|
||||
def is_compiled(self):
|
||||
return True
|
||||
|
||||
def is_stub(self):
|
||||
return False
|
||||
|
||||
|
||||
class _BaseTreeInstance(AbstractInstanceValue):
|
||||
@property
|
||||
def array_type(self):
|
||||
name = self.class_value.py__name__()
|
||||
if name in ['list', 'set', 'dict'] \
|
||||
and self.parent_context.get_root_context().is_builtins_module():
|
||||
return name
|
||||
return None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return ValueName(self, self.class_value.name.tree_name)
|
||||
|
||||
def get_filters(self, origin_scope=None, include_self_names=True):
|
||||
class_value = self.get_annotated_class_object()
|
||||
if include_self_names:
|
||||
for cls in class_value.py__mro__():
|
||||
if not isinstance(cls, compiled.CompiledObject) \
|
||||
or cls.tree_node is not None:
|
||||
if not cls.is_compiled():
|
||||
# In this case we're excluding compiled objects that are
|
||||
# not fake objects. It doesn't make sense for normal
|
||||
# compiled objects to search for self variables.
|
||||
@@ -146,6 +173,52 @@ class AbstractInstanceValue(Value):
|
||||
# Propably from the metaclass.
|
||||
yield f
|
||||
|
||||
def _get_annotation_init_functions(self):
|
||||
filter = next(self.class_value.get_filters())
|
||||
for init_name in filter.get('__init__'):
|
||||
for init in init_name.infer():
|
||||
if init.is_function():
|
||||
for signature in init.get_signatures():
|
||||
yield signature.value
|
||||
|
||||
@inference_state_method_cache()
|
||||
def create_instance_context(self, class_context, node):
|
||||
new = node
|
||||
while True:
|
||||
func_node = new
|
||||
new = search_ancestor(new, 'funcdef', 'classdef')
|
||||
if class_context.tree_node is new:
|
||||
func = FunctionValue.from_context(class_context, func_node)
|
||||
bound_method = BoundMethod(self, func)
|
||||
if func_node.name.value == '__init__':
|
||||
context = bound_method.as_context(self._arguments)
|
||||
else:
|
||||
context = bound_method.as_context()
|
||||
break
|
||||
return context.create_context(node)
|
||||
|
||||
def py__getattribute__alternatives(self, string_name):
|
||||
'''
|
||||
Since nothing was inferred, now check the __getattr__ and
|
||||
__getattribute__ methods. Stubs don't need to be checked, because
|
||||
they don't contain any logic.
|
||||
'''
|
||||
if self.is_stub():
|
||||
return NO_VALUES
|
||||
|
||||
name = compiled.create_simple_object(self.inference_state, string_name)
|
||||
|
||||
# This is a little bit special. `__getattribute__` is in Python
|
||||
# executed before `__getattr__`. But: I know no use case, where
|
||||
# this could be practical and where Jedi would return wrong types.
|
||||
# If you ever find something, let me know!
|
||||
# We are inversing this, because a hand-crafted `__getattribute__`
|
||||
# could still call another hand-crafted `__getattr__`, but not the
|
||||
# other way around.
|
||||
names = (self.get_function_slot_names(u'__getattr__') or
|
||||
self.get_function_slot_names(u'__getattribute__'))
|
||||
return self.execute_function_slots(names, name)
|
||||
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
names = self.get_function_slot_names(u'__getitem__')
|
||||
if not names:
|
||||
@@ -182,97 +255,60 @@ class AbstractInstanceValue(Value):
|
||||
yield lazy_value
|
||||
return iterate()
|
||||
|
||||
@abstractproperty
|
||||
def name(self):
|
||||
pass
|
||||
def py__call__(self, arguments):
|
||||
names = self.get_function_slot_names(u'__call__')
|
||||
if not names:
|
||||
# Means the Instance is not callable.
|
||||
return super(AbstractInstanceValue, self).py__call__(arguments)
|
||||
|
||||
def create_init_executions(self):
|
||||
for name in self.get_function_slot_names(u'__init__'):
|
||||
# TODO is this correct? I think we need to check for functions.
|
||||
if isinstance(name, LazyInstanceClassName):
|
||||
function = FunctionValue.from_context(
|
||||
self.parent_context,
|
||||
name.tree_name.parent
|
||||
return ValueSet.from_sets(name.infer().execute(arguments) for name in names)
|
||||
|
||||
def py__get__(self, obj, class_value):
|
||||
"""
|
||||
obj may be None.
|
||||
"""
|
||||
# Arguments in __get__ descriptors are obj, class.
|
||||
# `method` is the new parent of the array, don't know if that's good.
|
||||
names = self.get_function_slot_names(u'__get__')
|
||||
if names:
|
||||
if obj is None:
|
||||
obj = compiled.builtin_from_name(self.inference_state, u'None')
|
||||
return self.execute_function_slots(names, obj, class_value)
|
||||
else:
|
||||
return ValueSet([self])
|
||||
|
||||
def get_function_slot_names(self, name):
|
||||
# Python classes don't look at the dictionary of the instance when
|
||||
# looking up `__call__`. This is something that has to do with Python's
|
||||
# internal slot system (note: not __slots__, but C slots).
|
||||
for filter in self.get_filters(include_self_names=False):
|
||||
names = filter.get(name)
|
||||
if names:
|
||||
return names
|
||||
return []
|
||||
|
||||
def execute_function_slots(self, names, *inferred_args):
|
||||
return ValueSet.from_sets(
|
||||
name.infer().execute_with_values(*inferred_args)
|
||||
for name in names
|
||||
)
|
||||
bound_method = BoundMethod(self, function)
|
||||
yield bound_method.as_context(self.var_args)
|
||||
|
||||
@inference_state_method_cache()
|
||||
def create_instance_context(self, class_context, node):
|
||||
if node.parent.type in ('funcdef', 'classdef'):
|
||||
node = node.parent
|
||||
scope = get_parent_scope(node)
|
||||
if scope == class_context.tree_node:
|
||||
return class_context
|
||||
else:
|
||||
parent_context = self.create_instance_context(class_context, scope)
|
||||
if scope.type == 'funcdef':
|
||||
func = FunctionValue.from_context(
|
||||
parent_context,
|
||||
scope,
|
||||
)
|
||||
bound_method = BoundMethod(self, func)
|
||||
if scope.name.value == '__init__' and parent_context == class_context:
|
||||
return bound_method.as_context(self.var_args)
|
||||
else:
|
||||
return bound_method.as_context()
|
||||
elif scope.type == 'classdef':
|
||||
class_context = ClassValue(self.inference_state, parent_context, scope)
|
||||
return class_context.as_context()
|
||||
elif scope.type in ('comp_for', 'sync_comp_for'):
|
||||
# Comprehensions currently don't have a special scope in Jedi.
|
||||
return self.create_instance_context(class_context, scope)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
return class_context
|
||||
|
||||
def get_signatures(self):
|
||||
call_funcs = self.py__getattribute__('__call__').py__get__(self, self.class_value)
|
||||
return [s.bind(self) for s in call_funcs.get_signatures()]
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s(%s)>" % (self.__class__.__name__, self.class_value,
|
||||
self.var_args)
|
||||
|
||||
|
||||
class CompiledInstance(AbstractInstanceValue):
|
||||
def __init__(self, inference_state, parent_context, class_value, var_args):
|
||||
self._original_var_args = var_args
|
||||
super(CompiledInstance, self).__init__(inference_state, parent_context, class_value, var_args)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return compiled.CompiledValueName(self, self.class_value.name.string_name)
|
||||
|
||||
def get_first_non_keyword_argument_values(self):
|
||||
key, lazy_value = next(self._original_var_args.unpack(), ('', None))
|
||||
if key is not None:
|
||||
return NO_VALUES
|
||||
|
||||
return lazy_value.infer()
|
||||
|
||||
def is_stub(self):
|
||||
return False
|
||||
|
||||
|
||||
class TreeInstance(AbstractInstanceValue):
|
||||
def __init__(self, inference_state, parent_context, class_value, var_args):
|
||||
class TreeInstance(_BaseTreeInstance):
|
||||
def __init__(self, inference_state, parent_context, class_value, arguments):
|
||||
# I don't think that dynamic append lookups should happen here. That
|
||||
# sounds more like something that should go to py__iter__.
|
||||
if class_value.py__name__() in ['list', 'set'] \
|
||||
and parent_context.get_root_context().is_builtins_module():
|
||||
# compare the module path with the builtin name.
|
||||
if settings.dynamic_array_additions:
|
||||
var_args = iterable.get_dynamic_array_instance(self, var_args)
|
||||
arguments = get_dynamic_array_instance(self, arguments)
|
||||
|
||||
super(TreeInstance, self).__init__(inference_state, parent_context,
|
||||
class_value, var_args)
|
||||
super(_BaseTreeInstance, self).__init__(inference_state, parent_context,
|
||||
class_value)
|
||||
self._arguments = arguments
|
||||
self.tree_node = class_value.tree_node
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return ValueName(self, self.class_value.name.tree_name)
|
||||
|
||||
# This can recurse, if the initialization of the class includes a reference
|
||||
# to itself.
|
||||
@inference_state_method_cache(default=None)
|
||||
@@ -280,19 +316,20 @@ class TreeInstance(AbstractInstanceValue):
|
||||
from jedi.inference.gradual.annotation import py__annotations__, \
|
||||
infer_type_vars_for_execution
|
||||
|
||||
for func in self._get_annotation_init_functions():
|
||||
args = InstanceArguments(self, self._arguments)
|
||||
for signature in self.class_value.py__getattribute__('__init__').get_signatures():
|
||||
# Just take the first result, it should always be one, because we
|
||||
# control the typeshed code.
|
||||
bound = BoundMethod(self, func)
|
||||
execution = bound.as_context(self.var_args)
|
||||
if not execution.matches_signature():
|
||||
if not signature.matches_signature(args):
|
||||
# First check if the signature even matches, if not we don't
|
||||
# need to infer anything.
|
||||
continue
|
||||
|
||||
all_annotations = py__annotations__(execution.tree_node)
|
||||
bound_method = BoundMethod(self, signature.value)
|
||||
all_annotations = py__annotations__(signature.value.tree_node)
|
||||
type_var_dict = infer_type_vars_for_execution(bound_method, args, all_annotations)
|
||||
if type_var_dict:
|
||||
defined, = self.class_value.define_generics(
|
||||
infer_type_vars_for_execution(execution, all_annotations),
|
||||
infer_type_vars_for_execution(signature.value, args, all_annotations),
|
||||
)
|
||||
debug.dbg('Inferred instance value as %s', defined, color='BLUE')
|
||||
return defined
|
||||
@@ -301,52 +338,39 @@ class TreeInstance(AbstractInstanceValue):
|
||||
def get_annotated_class_object(self):
|
||||
return self._get_annotated_class_object() or self.class_value
|
||||
|
||||
def _get_annotation_init_functions(self):
|
||||
filter = next(self.class_value.get_filters())
|
||||
for init_name in filter.get('__init__'):
|
||||
for init in init_name.infer():
|
||||
if init.is_function():
|
||||
for signature in init.get_signatures():
|
||||
yield signature.value
|
||||
|
||||
def py__getattribute__alternatives(self, string_name):
|
||||
'''
|
||||
Since nothing was inferred, now check the __getattr__ and
|
||||
__getattribute__ methods. Stubs don't need to be checked, because
|
||||
they don't contain any logic.
|
||||
'''
|
||||
if self.is_stub():
|
||||
return NO_VALUES
|
||||
|
||||
name = compiled.create_simple_object(self.inference_state, string_name)
|
||||
|
||||
# This is a little bit special. `__getattribute__` is in Python
|
||||
# executed before `__getattr__`. But: I know no use case, where
|
||||
# this could be practical and where Jedi would return wrong types.
|
||||
# If you ever find something, let me know!
|
||||
# We are inversing this, because a hand-crafted `__getattribute__`
|
||||
# could still call another hand-crafted `__getattr__`, but not the
|
||||
# other way around.
|
||||
names = (self.get_function_slot_names(u'__getattr__') or
|
||||
self.get_function_slot_names(u'__getattribute__'))
|
||||
return self.execute_function_slots(names, name)
|
||||
|
||||
|
||||
class AnonymousInstance(TreeInstance):
|
||||
def __init__(self, inference_state, parent_context, class_value):
|
||||
super(AnonymousInstance, self).__init__(
|
||||
inference_state,
|
||||
parent_context,
|
||||
class_value,
|
||||
var_args=AnonymousInstanceArguments(self),
|
||||
def py__simple_getitem__(self, index):
|
||||
if self.array_type == 'dict':
|
||||
# Logic for dict({'foo': bar}) and dict(foo=bar)
|
||||
# reversed, because:
|
||||
# >>> dict({'a': 1}, a=3)
|
||||
# {'a': 3}
|
||||
# TODO tuple initializations
|
||||
# >>> dict([('a', 4)])
|
||||
# {'a': 4}
|
||||
for key, lazy_context in reversed(list(self._arguments.unpack())):
|
||||
if key is None:
|
||||
values = ValueSet.from_sets(
|
||||
dct_value.py__simple_getitem__(index)
|
||||
for dct_value in lazy_context.infer()
|
||||
if dct_value.array_type == 'dict'
|
||||
)
|
||||
if values:
|
||||
return values
|
||||
else:
|
||||
if key == index:
|
||||
return lazy_context.infer()
|
||||
return super(TreeInstance, self).py__simple_getitem__(index)
|
||||
|
||||
def get_annotated_class_object(self):
|
||||
return self.class_value # This is the default.
|
||||
def __repr__(self):
|
||||
return "<%s of %s(%s)>" % (self.__class__.__name__, self.class_value,
|
||||
self._arguments)
|
||||
|
||||
|
||||
class AnonymousInstance(_BaseTreeInstance):
|
||||
_arguments = None
|
||||
|
||||
|
||||
class CompiledInstanceName(compiled.CompiledName):
|
||||
|
||||
def __init__(self, inference_state, instance, klass, name):
|
||||
super(CompiledInstanceName, self).__init__(
|
||||
inference_state,
|
||||
@@ -397,14 +421,15 @@ class BoundMethod(FunctionMixin, ValueWrapper):
|
||||
return c
|
||||
|
||||
def _get_arguments(self, arguments):
|
||||
if arguments is None:
|
||||
arguments = AnonymousInstanceArguments(self.instance)
|
||||
|
||||
assert arguments is not None
|
||||
return InstanceArguments(self.instance, arguments)
|
||||
|
||||
def as_context(self, arguments=None):
|
||||
def _as_context(self, arguments=None):
|
||||
if arguments is None:
|
||||
return AnonymousMethodExecutionContext(self.instance, self)
|
||||
|
||||
arguments = self._get_arguments(arguments)
|
||||
return super(BoundMethod, self).as_context(arguments)
|
||||
return MethodExecutionContext(self.instance, self, arguments)
|
||||
|
||||
def py__call__(self, arguments):
|
||||
if isinstance(self._wrapped_value, OverloadedFunctionValue):
|
||||
@@ -445,10 +470,7 @@ class SelfName(TreeNameDefinition):
|
||||
|
||||
@property
|
||||
def parent_context(self):
|
||||
return self._instance.create_instance_context(
|
||||
self.class_context,
|
||||
self.tree_name
|
||||
)
|
||||
return self._instance.create_instance_context(self.class_context, self.tree_name)
|
||||
|
||||
|
||||
class LazyInstanceClassName(object):
|
||||
@@ -509,9 +531,9 @@ class SelfAttributeFilter(ClassFilter):
|
||||
self._instance = instance
|
||||
|
||||
def _filter(self, names):
|
||||
names = self._filter_self_names(names)
|
||||
start, end = self._parser_scope.start_pos, self._parser_scope.end_pos
|
||||
return [n for n in names if start < n.start_pos < end]
|
||||
names = [n for n in names if start < n.start_pos < end]
|
||||
return self._filter_self_names(names)
|
||||
|
||||
def _filter_self_names(self, names):
|
||||
for name in names:
|
||||
@@ -520,9 +542,20 @@ class SelfAttributeFilter(ClassFilter):
|
||||
and len(trailer.parent.children) == 2 \
|
||||
and trailer.children[0] == '.':
|
||||
if name.is_definition() and self._access_possible(name, from_instance=True):
|
||||
# TODO filter non-self assignments.
|
||||
# TODO filter non-self assignments instead of this bad
|
||||
# filter.
|
||||
if self._is_in_right_scope(name):
|
||||
yield name
|
||||
|
||||
def _is_in_right_scope(self, name):
|
||||
base = name
|
||||
hit_funcdef = False
|
||||
while True:
|
||||
base = search_ancestor(base, 'funcdef', 'classdef', 'lambdef')
|
||||
if base is self._parser_scope:
|
||||
return hit_funcdef
|
||||
hit_funcdef = True
|
||||
|
||||
def _convert_names(self, names):
|
||||
return [SelfName(self._instance, self._node_context, name) for name in names]
|
||||
|
||||
@@ -539,9 +572,3 @@ class InstanceArguments(TreeArgumentsWrapper):
|
||||
yield None, LazyKnownValue(self.instance)
|
||||
for values in self._wrapped_arguments.unpack(func):
|
||||
yield values
|
||||
|
||||
def get_executed_param_names_and_issues(self, execution_context):
|
||||
if isinstance(self._wrapped_arguments, AnonymousInstanceArguments):
|
||||
return self._wrapped_arguments.get_executed_param_names_and_issues(execution_context)
|
||||
|
||||
return super(InstanceArguments, self).get_executed_param_names_and_issues(execution_context)
|
||||
|
||||
@@ -1,45 +1,25 @@
|
||||
"""
|
||||
Contains all classes and functions to deal with lists, dicts, generators and
|
||||
iterators in general.
|
||||
|
||||
Array modifications
|
||||
*******************
|
||||
|
||||
If the content of an array (``set``/``list``) is requested somewhere, the
|
||||
current module will be checked for appearances of ``arr.append``,
|
||||
``arr.insert``, etc. If the ``arr`` name points to an actual array, the
|
||||
content will be added
|
||||
|
||||
This can be really cpu intensive, as you can imagine. Because |jedi| has to
|
||||
follow **every** ``append`` and check wheter it's the right array. However this
|
||||
works pretty good, because in *slow* cases, the recursion detector and other
|
||||
settings will stop this process.
|
||||
|
||||
It is important to note that:
|
||||
|
||||
1. Array modfications work only in the current module.
|
||||
2. Jedi only checks Array additions; ``list.pop``, etc are ignored.
|
||||
"""
|
||||
import sys
|
||||
|
||||
from jedi import debug
|
||||
from jedi import settings
|
||||
from jedi._compatibility import force_unicode, is_py3
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference import analysis
|
||||
from jedi.inference import recursion
|
||||
from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues, \
|
||||
LazyTreeValue
|
||||
from jedi.inference.helpers import get_int_or_none, is_string, \
|
||||
infer_call_of_leaf, reraise_getitem_errors, SimpleGetItemNotFound
|
||||
reraise_getitem_errors, SimpleGetItemNotFound
|
||||
from jedi.inference.utils import safe_property, to_list
|
||||
from jedi.inference.cache import inference_state_method_cache
|
||||
from jedi.inference.filters import LazyAttributeOverwrite, publish_method
|
||||
from jedi.inference.base_value import ValueSet, Value, NO_VALUES, \
|
||||
ContextualizedNode, iterate_values, HelperValueMixin, sentinel, \
|
||||
ContextualizedNode, iterate_values, sentinel, \
|
||||
LazyValueWrapper
|
||||
from jedi.parser_utils import get_sync_comp_fors
|
||||
from jedi.inference.context import CompForContext
|
||||
from jedi.inference.value.dynamic_arrays import check_array_additions
|
||||
|
||||
|
||||
class IterableMixin(object):
|
||||
@@ -298,15 +278,14 @@ class DictComprehension(ComprehensionMixin, Sequence):
|
||||
@publish_method('values')
|
||||
def _imitate_values(self):
|
||||
lazy_value = LazyKnownValues(self._dict_values())
|
||||
return ValueSet([FakeSequence(self.inference_state, u'list', [lazy_value])])
|
||||
return ValueSet([FakeList(self.inference_state, [lazy_value])])
|
||||
|
||||
@publish_method('items')
|
||||
def _imitate_items(self):
|
||||
lazy_values = [
|
||||
LazyKnownValue(
|
||||
FakeSequence(
|
||||
FakeTuple(
|
||||
self.inference_state,
|
||||
u'tuple',
|
||||
[LazyKnownValues(key),
|
||||
LazyKnownValues(value)]
|
||||
)
|
||||
@@ -314,7 +293,7 @@ class DictComprehension(ComprehensionMixin, Sequence):
|
||||
for key, value in self._iterate()
|
||||
]
|
||||
|
||||
return ValueSet([FakeSequence(self.inference_state, u'list', lazy_values)])
|
||||
return ValueSet([FakeList(self.inference_state, lazy_values)])
|
||||
|
||||
def get_mapping_item_values(self):
|
||||
return self._dict_keys(), self._dict_values()
|
||||
@@ -344,19 +323,6 @@ class SequenceLiteralValue(Sequence):
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
"""Here the index is an int/str. Raises IndexError/KeyError."""
|
||||
if self.array_type == u'dict':
|
||||
compiled_obj_index = compiled.create_simple_object(self.inference_state, index)
|
||||
for key, value in self.get_tree_entries():
|
||||
for k in self._defining_context.infer_node(key):
|
||||
try:
|
||||
method = k.execute_operation
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
if method(compiled_obj_index, u'==').get_safe_value():
|
||||
return self._defining_context.infer_node(value)
|
||||
raise SimpleGetItemNotFound('No key found in dictionary %s.' % self)
|
||||
|
||||
if isinstance(index, slice):
|
||||
return ValueSet([self])
|
||||
else:
|
||||
@@ -369,16 +335,6 @@ class SequenceLiteralValue(Sequence):
|
||||
While values returns the possible values for any array field, this
|
||||
function returns the value for a certain index.
|
||||
"""
|
||||
if self.array_type == u'dict':
|
||||
# Get keys.
|
||||
types = NO_VALUES
|
||||
for k, _ in self.get_tree_entries():
|
||||
types |= self._defining_context.infer_node(k)
|
||||
# We don't know which dict index comes first, therefore always
|
||||
# yield all the types.
|
||||
for _ in types:
|
||||
yield LazyKnownValues(types)
|
||||
else:
|
||||
for node in self.get_tree_entries():
|
||||
if node == ':' or node.type == 'subscript':
|
||||
# TODO this should probably use at least part of the code
|
||||
@@ -393,12 +349,6 @@ class SequenceLiteralValue(Sequence):
|
||||
# This function is not really used often. It's more of a try.
|
||||
return len(self.get_tree_entries())
|
||||
|
||||
def _dict_values(self):
|
||||
return ValueSet.from_sets(
|
||||
self._defining_context.infer_node(v)
|
||||
for k, v in self.get_tree_entries()
|
||||
)
|
||||
|
||||
def get_tree_entries(self):
|
||||
c = self.atom.children
|
||||
|
||||
@@ -466,22 +416,56 @@ class DictLiteralValue(_DictMixin, SequenceLiteralValue):
|
||||
self._defining_context = defining_context
|
||||
self.atom = atom
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
"""Here the index is an int/str. Raises IndexError/KeyError."""
|
||||
compiled_obj_index = compiled.create_simple_object(self.inference_state, index)
|
||||
for key, value in self.get_tree_entries():
|
||||
for k in self._defining_context.infer_node(key):
|
||||
try:
|
||||
method = k.execute_operation
|
||||
except AttributeError:
|
||||
pass
|
||||
else:
|
||||
if method(compiled_obj_index, u'==').get_safe_value():
|
||||
return self._defining_context.infer_node(value)
|
||||
raise SimpleGetItemNotFound('No key found in dictionary %s.' % self)
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
"""
|
||||
While values returns the possible values for any array field, this
|
||||
function returns the value for a certain index.
|
||||
"""
|
||||
# Get keys.
|
||||
types = NO_VALUES
|
||||
for k, _ in self.get_tree_entries():
|
||||
types |= self._defining_context.infer_node(k)
|
||||
# We don't know which dict index comes first, therefore always
|
||||
# yield all the types.
|
||||
for _ in types:
|
||||
yield LazyKnownValues(types)
|
||||
|
||||
@publish_method('values')
|
||||
def _imitate_values(self):
|
||||
lazy_value = LazyKnownValues(self._dict_values())
|
||||
return ValueSet([FakeSequence(self.inference_state, u'list', [lazy_value])])
|
||||
return ValueSet([FakeList(self.inference_state, [lazy_value])])
|
||||
|
||||
@publish_method('items')
|
||||
def _imitate_items(self):
|
||||
lazy_values = [
|
||||
LazyKnownValue(FakeSequence(
|
||||
self.inference_state, u'tuple',
|
||||
LazyKnownValue(FakeTuple(
|
||||
self.inference_state,
|
||||
(LazyTreeValue(self._defining_context, key_node),
|
||||
LazyTreeValue(self._defining_context, value_node))
|
||||
)) for key_node, value_node in self.get_tree_entries()
|
||||
]
|
||||
|
||||
return ValueSet([FakeSequence(self.inference_state, u'list', lazy_values)])
|
||||
return ValueSet([FakeList(self.inference_state, lazy_values)])
|
||||
|
||||
def _dict_values(self):
|
||||
return ValueSet.from_sets(
|
||||
self._defining_context.infer_node(v)
|
||||
for k, v in self.get_tree_entries()
|
||||
)
|
||||
|
||||
def _dict_keys(self):
|
||||
return ValueSet.from_sets(
|
||||
@@ -493,20 +477,12 @@ class DictLiteralValue(_DictMixin, SequenceLiteralValue):
|
||||
return self._dict_keys(), self._dict_values()
|
||||
|
||||
|
||||
class _FakeArray(SequenceLiteralValue):
|
||||
def __init__(self, inference_state, container, type):
|
||||
super(SequenceLiteralValue, self).__init__(inference_state)
|
||||
self.array_type = type
|
||||
self.atom = container
|
||||
# TODO is this class really needed?
|
||||
|
||||
|
||||
class FakeSequence(_FakeArray):
|
||||
def __init__(self, inference_state, array_type, lazy_value_list):
|
||||
class _FakeSequence(Sequence):
|
||||
def __init__(self, inference_state, lazy_value_list):
|
||||
"""
|
||||
type should be one of "tuple", "list"
|
||||
"""
|
||||
super(FakeSequence, self).__init__(inference_state, None, array_type)
|
||||
super(_FakeSequence, self).__init__(inference_state)
|
||||
self._lazy_value_list = lazy_value_list
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
@@ -527,9 +503,19 @@ class FakeSequence(_FakeArray):
|
||||
return "<%s of %s>" % (type(self).__name__, self._lazy_value_list)
|
||||
|
||||
|
||||
class FakeDict(_DictMixin, _FakeArray):
|
||||
class FakeTuple(_FakeSequence):
|
||||
array_type = u'tuple'
|
||||
|
||||
|
||||
class FakeList(_FakeSequence):
|
||||
array_type = u'tuple'
|
||||
|
||||
|
||||
class FakeDict(_DictMixin, Sequence):
|
||||
array_type = u'dict'
|
||||
|
||||
def __init__(self, inference_state, dct):
|
||||
super(FakeDict, self).__init__(inference_state, dct, u'dict')
|
||||
super(FakeDict, self).__init__(inference_state)
|
||||
self._dct = dct
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
@@ -558,8 +544,8 @@ class FakeDict(_DictMixin, _FakeArray):
|
||||
|
||||
@publish_method('values')
|
||||
def _values(self):
|
||||
return ValueSet([FakeSequence(
|
||||
self.inference_state, u'tuple',
|
||||
return ValueSet([FakeTuple(
|
||||
self.inference_state,
|
||||
[LazyKnownValues(self._dict_values())]
|
||||
)])
|
||||
|
||||
@@ -575,10 +561,14 @@ class FakeDict(_DictMixin, _FakeArray):
|
||||
def exact_key_items(self):
|
||||
return self._dct.items()
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s>' % (self.__class__.__name__, self._dct)
|
||||
|
||||
class MergedArray(_FakeArray):
|
||||
|
||||
class MergedArray(Sequence):
|
||||
def __init__(self, inference_state, arrays):
|
||||
super(MergedArray, self).__init__(inference_state, arrays, arrays[-1].array_type)
|
||||
super(MergedArray, self).__init__(inference_state)
|
||||
self.array_type = arrays[-1].array_type
|
||||
self._arrays = arrays
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
@@ -589,14 +579,6 @@ class MergedArray(_FakeArray):
|
||||
def py__simple_getitem__(self, index):
|
||||
return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
|
||||
|
||||
def get_tree_entries(self):
|
||||
for array in self._arrays:
|
||||
for a in array.get_tree_entries():
|
||||
yield a
|
||||
|
||||
def __len__(self):
|
||||
return sum(len(a) for a in self._arrays)
|
||||
|
||||
|
||||
def unpack_tuple_to_dict(value, types, exprlist):
|
||||
"""
|
||||
@@ -638,139 +620,6 @@ def unpack_tuple_to_dict(value, types, exprlist):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def check_array_additions(context, sequence):
|
||||
""" Just a mapper function for the internal _check_array_additions """
|
||||
if sequence.array_type not in ('list', 'set'):
|
||||
# TODO also check for dict updates
|
||||
return NO_VALUES
|
||||
|
||||
return _check_array_additions(context, sequence)
|
||||
|
||||
|
||||
@inference_state_method_cache(default=NO_VALUES)
|
||||
@debug.increase_indent
|
||||
def _check_array_additions(context, sequence):
|
||||
"""
|
||||
Checks if a `Array` has "add" (append, insert, extend) statements:
|
||||
|
||||
>>> a = [""]
|
||||
>>> a.append(1)
|
||||
"""
|
||||
from jedi.inference import arguments
|
||||
|
||||
debug.dbg('Dynamic array search for %s' % sequence, color='MAGENTA')
|
||||
module_context = context.get_root_context()
|
||||
if not settings.dynamic_array_additions or module_context.is_compiled():
|
||||
debug.dbg('Dynamic array search aborted.', color='MAGENTA')
|
||||
return NO_VALUES
|
||||
|
||||
def find_additions(context, arglist, add_name):
|
||||
params = list(arguments.TreeArguments(context.inference_state, context, arglist).unpack())
|
||||
result = set()
|
||||
if add_name in ['insert']:
|
||||
params = params[1:]
|
||||
if add_name in ['append', 'add', 'insert']:
|
||||
for key, lazy_value in params:
|
||||
result.add(lazy_value)
|
||||
elif add_name in ['extend', 'update']:
|
||||
for key, lazy_value in params:
|
||||
result |= set(lazy_value.infer().iterate())
|
||||
return result
|
||||
|
||||
temp_param_add, settings.dynamic_params_for_other_modules = \
|
||||
settings.dynamic_params_for_other_modules, False
|
||||
|
||||
is_list = sequence.name.string_name == 'list'
|
||||
search_names = (['append', 'extend', 'insert'] if is_list else ['add', 'update'])
|
||||
|
||||
added_types = set()
|
||||
for add_name in search_names:
|
||||
try:
|
||||
possible_names = module_context.tree_node.get_used_names()[add_name]
|
||||
except KeyError:
|
||||
continue
|
||||
else:
|
||||
for name in possible_names:
|
||||
value_node = context.tree_node
|
||||
if not (value_node.start_pos < name.start_pos < value_node.end_pos):
|
||||
continue
|
||||
trailer = name.parent
|
||||
power = trailer.parent
|
||||
trailer_pos = power.children.index(trailer)
|
||||
try:
|
||||
execution_trailer = power.children[trailer_pos + 1]
|
||||
except IndexError:
|
||||
continue
|
||||
else:
|
||||
if execution_trailer.type != 'trailer' \
|
||||
or execution_trailer.children[0] != '(' \
|
||||
or execution_trailer.children[1] == ')':
|
||||
continue
|
||||
|
||||
random_context = context.create_context(name)
|
||||
|
||||
with recursion.execution_allowed(context.inference_state, power) as allowed:
|
||||
if allowed:
|
||||
found = infer_call_of_leaf(
|
||||
random_context,
|
||||
name,
|
||||
cut_own_trailer=True
|
||||
)
|
||||
if sequence in found:
|
||||
# The arrays match. Now add the results
|
||||
added_types |= find_additions(
|
||||
random_context,
|
||||
execution_trailer.children[1],
|
||||
add_name
|
||||
)
|
||||
|
||||
# reset settings
|
||||
settings.dynamic_params_for_other_modules = temp_param_add
|
||||
debug.dbg('Dynamic array result %s' % added_types, color='MAGENTA')
|
||||
return added_types
|
||||
|
||||
|
||||
def get_dynamic_array_instance(instance, arguments):
|
||||
"""Used for set() and list() instances."""
|
||||
ai = _ArrayInstance(instance, arguments)
|
||||
from jedi.inference import arguments
|
||||
return arguments.ValuesArguments([ValueSet([ai])])
|
||||
|
||||
|
||||
class _ArrayInstance(HelperValueMixin):
|
||||
"""
|
||||
Used for the usage of set() and list().
|
||||
This is definitely a hack, but a good one :-)
|
||||
It makes it possible to use set/list conversions.
|
||||
"""
|
||||
def __init__(self, instance, var_args):
|
||||
self.instance = instance
|
||||
self.var_args = var_args
|
||||
|
||||
def py__class__(self):
|
||||
tuple_, = self.instance.inference_state.builtins_module.py__getattribute__('tuple')
|
||||
return tuple_
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
var_args = self.var_args
|
||||
try:
|
||||
_, lazy_value = next(var_args.unpack())
|
||||
except StopIteration:
|
||||
pass
|
||||
else:
|
||||
for lazy in lazy_value.infer().iterate():
|
||||
yield lazy
|
||||
|
||||
from jedi.inference import arguments
|
||||
if isinstance(var_args, arguments.TreeArguments):
|
||||
additions = _check_array_additions(var_args.context, self.instance)
|
||||
for addition in additions:
|
||||
yield addition
|
||||
|
||||
def iterate(self, contextualized_node=None, is_async=False):
|
||||
return self.py__iter__(contextualized_node)
|
||||
|
||||
|
||||
class Slice(LazyValueWrapper):
|
||||
def __init__(self, python_context, start, stop, step):
|
||||
self.inference_state = python_context.inference_state
|
||||
|
||||
@@ -120,7 +120,7 @@ class ClassFilter(ParserTreeFilter):
|
||||
# Filter for ClassVar variables
|
||||
# TODO this is not properly done, yet. It just checks for the string
|
||||
# ClassVar in the annotation, which can be quite imprecise. If we
|
||||
# wanted to do this correct, we would have to resolve the ClassVar.
|
||||
# wanted to do this correct, we would have to infer the ClassVar.
|
||||
if not from_instance:
|
||||
expr_stmt = name.get_definition()
|
||||
if expr_stmt is not None and expr_stmt.type == 'expr_stmt':
|
||||
|
||||
@@ -16,15 +16,15 @@ from jedi._compatibility import force_unicode, Parameter
|
||||
from jedi import debug
|
||||
from jedi.inference.utils import safe_property
|
||||
from jedi.inference.helpers import get_str_or_none
|
||||
from jedi.inference.arguments import ValuesArguments, \
|
||||
from jedi.inference.arguments import \
|
||||
repack_with_argument_clinic, AbstractArguments, TreeArgumentsWrapper
|
||||
from jedi.inference import analysis
|
||||
from jedi.inference import compiled
|
||||
from jedi.inference.value.instance import BoundMethod, InstanceArguments
|
||||
from jedi.inference.value.instance import \
|
||||
AnonymousMethodExecutionContext, MethodExecutionContext
|
||||
from jedi.inference.base_value import ContextualizedNode, \
|
||||
NO_VALUES, ValueSet, ValueWrapper, LazyValueWrapper
|
||||
from jedi.inference.value import ClassValue, ModuleValue, \
|
||||
FunctionExecutionContext
|
||||
from jedi.inference.value import ClassValue, ModuleValue
|
||||
from jedi.inference.value.klass import ClassMixin
|
||||
from jedi.inference.value.function import FunctionMixin
|
||||
from jedi.inference.value import iterable
|
||||
@@ -121,19 +121,7 @@ def execute(callback):
|
||||
else:
|
||||
return call()
|
||||
|
||||
if isinstance(value, BoundMethod):
|
||||
if module_name == 'builtins':
|
||||
if value.py__name__() == '__get__':
|
||||
if value.class_context.py__name__() == 'property':
|
||||
return builtins_property(
|
||||
value,
|
||||
arguments=arguments,
|
||||
callback=call,
|
||||
)
|
||||
elif value.py__name__() in ('deleter', 'getter', 'setter'):
|
||||
if value.class_context.py__name__() == 'property':
|
||||
return ValueSet([value.instance])
|
||||
|
||||
if value.is_bound_method():
|
||||
return call()
|
||||
|
||||
# for now we just support builtin functions.
|
||||
@@ -157,7 +145,7 @@ def _follow_param(inference_state, arguments, index):
|
||||
return lazy_value.infer()
|
||||
|
||||
|
||||
def argument_clinic(string, want_obj=False, want_context=False,
|
||||
def argument_clinic(string, want_value=False, want_context=False,
|
||||
want_arguments=False, want_inference_state=False,
|
||||
want_callback=False):
|
||||
"""
|
||||
@@ -167,18 +155,18 @@ def argument_clinic(string, want_obj=False, want_context=False,
|
||||
def f(func):
|
||||
@repack_with_argument_clinic(string, keep_arguments_param=True,
|
||||
keep_callback_param=True)
|
||||
def wrapper(obj, *args, **kwargs):
|
||||
def wrapper(value, *args, **kwargs):
|
||||
arguments = kwargs.pop('arguments')
|
||||
callback = kwargs.pop('callback')
|
||||
assert not kwargs # Python 2...
|
||||
debug.dbg('builtin start %s' % obj, color='MAGENTA')
|
||||
debug.dbg('builtin start %s' % value, color='MAGENTA')
|
||||
result = NO_VALUES
|
||||
if want_context:
|
||||
kwargs['context'] = arguments.context
|
||||
if want_obj:
|
||||
kwargs['obj'] = obj
|
||||
if want_value:
|
||||
kwargs['value'] = value
|
||||
if want_inference_state:
|
||||
kwargs['inference_state'] = obj.inference_state
|
||||
kwargs['inference_state'] = value.inference_state
|
||||
if want_arguments:
|
||||
kwargs['arguments'] = arguments
|
||||
if want_callback:
|
||||
@@ -191,17 +179,6 @@ def argument_clinic(string, want_obj=False, want_context=False,
|
||||
return f
|
||||
|
||||
|
||||
@argument_clinic('obj, type, /', want_obj=True, want_arguments=True)
|
||||
def builtins_property(objects, types, obj, arguments):
|
||||
property_args = obj.instance.var_args.unpack()
|
||||
key, lazy_value = next(property_args, (None, None))
|
||||
if key is not None or lazy_value is None:
|
||||
debug.warning('property expected a first param, not %s', arguments)
|
||||
return NO_VALUES
|
||||
|
||||
return lazy_value.infer().py__call__(arguments=ValuesArguments([objects]))
|
||||
|
||||
|
||||
@argument_clinic('iterator[, default], /', want_inference_state=True)
|
||||
def builtins_next(iterators, defaults, inference_state):
|
||||
if inference_state.environment.version_info.major == 2:
|
||||
@@ -223,14 +200,14 @@ def builtins_iter(iterators_or_callables, defaults):
|
||||
@argument_clinic('object, name[, default], /')
|
||||
def builtins_getattr(objects, names, defaults=None):
|
||||
# follow the first param
|
||||
for obj in objects:
|
||||
for value in objects:
|
||||
for name in names:
|
||||
string = get_str_or_none(name)
|
||||
if string is None:
|
||||
debug.warning('getattr called without str')
|
||||
continue
|
||||
else:
|
||||
return obj.py__getattribute__(force_unicode(string))
|
||||
return value.py__getattribute__(force_unicode(string))
|
||||
return NO_VALUES
|
||||
|
||||
|
||||
@@ -262,21 +239,21 @@ class SuperInstance(LazyValueWrapper):
|
||||
|
||||
def get_filters(self, origin_scope=None):
|
||||
for b in self._get_bases():
|
||||
for obj in b.infer().execute_with_values():
|
||||
for f in obj.get_filters():
|
||||
for value in b.infer().execute_with_values():
|
||||
for f in value.get_filters():
|
||||
yield f
|
||||
|
||||
|
||||
@argument_clinic('[type[, obj]], /', want_context=True)
|
||||
@argument_clinic('[type[, value]], /', want_context=True)
|
||||
def builtins_super(types, objects, context):
|
||||
if isinstance(context, FunctionExecutionContext):
|
||||
if isinstance(context.var_args, InstanceArguments):
|
||||
instance = context.var_args.instance
|
||||
# TODO if a class is given it doesn't have to be the direct super
|
||||
# class, it can be an anecestor from long ago.
|
||||
return ValueSet({SuperInstance(instance.inference_state, instance)})
|
||||
|
||||
instance = None
|
||||
if isinstance(context, AnonymousMethodExecutionContext):
|
||||
instance = context.instance
|
||||
elif isinstance(context, MethodExecutionContext):
|
||||
instance = context.instance
|
||||
if instance is None:
|
||||
return NO_VALUES
|
||||
return ValueSet({SuperInstance(instance.inference_state, instance)})
|
||||
|
||||
|
||||
class ReversedObject(AttributeOverwrite):
|
||||
@@ -296,8 +273,8 @@ class ReversedObject(AttributeOverwrite):
|
||||
)
|
||||
|
||||
|
||||
@argument_clinic('sequence, /', want_obj=True, want_arguments=True)
|
||||
def builtins_reversed(sequences, obj, arguments):
|
||||
@argument_clinic('sequence, /', want_value=True, want_arguments=True)
|
||||
def builtins_reversed(sequences, value, arguments):
|
||||
# While we could do without this variable (just by using sequences), we
|
||||
# want static analysis to work well. Therefore we need to generated the
|
||||
# values again.
|
||||
@@ -311,11 +288,11 @@ def builtins_reversed(sequences, obj, arguments):
|
||||
# necessary, because `reversed` is a function and autocompletion
|
||||
# would fail in certain cases like `reversed(x).__iter__` if we
|
||||
# just returned the result directly.
|
||||
seq, = obj.inference_state.typing_module.py__getattribute__('Iterator').execute_with_values()
|
||||
seq, = value.inference_state.typing_module.py__getattribute__('Iterator').execute_with_values()
|
||||
return ValueSet([ReversedObject(seq, list(reversed(ordered)))])
|
||||
|
||||
|
||||
@argument_clinic('obj, type, /', want_arguments=True, want_inference_state=True)
|
||||
@argument_clinic('value, type, /', want_arguments=True, want_inference_state=True)
|
||||
def builtins_isinstance(objects, types, arguments, inference_state):
|
||||
bool_results = set()
|
||||
for o in objects:
|
||||
@@ -357,10 +334,7 @@ def builtins_isinstance(objects, types, arguments, inference_state):
|
||||
)
|
||||
|
||||
|
||||
class StaticMethodObject(AttributeOverwrite, ValueWrapper):
|
||||
def get_object(self):
|
||||
return self._wrapped_value
|
||||
|
||||
class StaticMethodObject(ValueWrapper):
|
||||
def py__get__(self, instance, klass):
|
||||
return ValueSet([self._wrapped_value])
|
||||
|
||||
@@ -370,22 +344,19 @@ def builtins_staticmethod(functions):
|
||||
return ValueSet(StaticMethodObject(f) for f in functions)
|
||||
|
||||
|
||||
class ClassMethodObject(AttributeOverwrite, ValueWrapper):
|
||||
class ClassMethodObject(ValueWrapper):
|
||||
def __init__(self, class_method_obj, function):
|
||||
super(ClassMethodObject, self).__init__(class_method_obj)
|
||||
self._function = function
|
||||
|
||||
def get_object(self):
|
||||
return self._wrapped_value
|
||||
|
||||
def py__get__(self, obj, class_value):
|
||||
def py__get__(self, instance, class_value):
|
||||
return ValueSet([
|
||||
ClassMethodGet(__get__, class_value, self._function)
|
||||
for __get__ in self._wrapped_value.py__getattribute__('__get__')
|
||||
])
|
||||
|
||||
|
||||
class ClassMethodGet(AttributeOverwrite, ValueWrapper):
|
||||
class ClassMethodGet(ValueWrapper):
|
||||
def __init__(self, get_method, klass, function):
|
||||
super(ClassMethodGet, self).__init__(get_method)
|
||||
self._class = klass
|
||||
@@ -394,9 +365,6 @@ class ClassMethodGet(AttributeOverwrite, ValueWrapper):
|
||||
def get_signatures(self):
|
||||
return self._function.get_signatures()
|
||||
|
||||
def get_object(self):
|
||||
return self._wrapped_value
|
||||
|
||||
def py__call__(self, arguments):
|
||||
return self._function.execute(ClassMethodArguments(self._class, arguments))
|
||||
|
||||
@@ -412,16 +380,42 @@ class ClassMethodArguments(TreeArgumentsWrapper):
|
||||
yield values
|
||||
|
||||
|
||||
@argument_clinic('sequence, /', want_obj=True, want_arguments=True)
|
||||
def builtins_classmethod(functions, obj, arguments):
|
||||
@argument_clinic('sequence, /', want_value=True, want_arguments=True)
|
||||
def builtins_classmethod(functions, value, arguments):
|
||||
return ValueSet(
|
||||
ClassMethodObject(class_method_object, function)
|
||||
for class_method_object in obj.py__call__(arguments=arguments)
|
||||
for class_method_object in value.py__call__(arguments=arguments)
|
||||
for function in functions
|
||||
)
|
||||
|
||||
|
||||
def collections_namedtuple(obj, arguments, callback):
|
||||
class PropertyObject(AttributeOverwrite, ValueWrapper):
|
||||
def __init__(self, property_obj, function):
|
||||
super(PropertyObject, self).__init__(property_obj)
|
||||
self._function = function
|
||||
|
||||
def py__get__(self, instance, class_value):
|
||||
if instance is None:
|
||||
return NO_VALUES
|
||||
return self._function.execute_with_values(instance)
|
||||
|
||||
@publish_method('deleter')
|
||||
@publish_method('getter')
|
||||
@publish_method('setter')
|
||||
def _return_self(self):
|
||||
return ValueSet({self})
|
||||
|
||||
|
||||
@argument_clinic('func, /', want_callback=True)
|
||||
def builtins_property(functions, callback):
|
||||
return ValueSet(
|
||||
PropertyObject(property_value, function)
|
||||
for property_value in callback()
|
||||
for function in functions
|
||||
)
|
||||
|
||||
|
||||
def collections_namedtuple(value, arguments, callback):
|
||||
"""
|
||||
Implementation of the namedtuple function.
|
||||
|
||||
@@ -429,7 +423,7 @@ def collections_namedtuple(obj, arguments, callback):
|
||||
inferring the result.
|
||||
|
||||
"""
|
||||
inference_state = obj.inference_state
|
||||
inference_state = value.inference_state
|
||||
|
||||
# Process arguments
|
||||
name = u'jedi_unknown_namedtuple'
|
||||
@@ -548,10 +542,10 @@ class MergedPartialArguments(AbstractArguments):
|
||||
yield key_lazy_value
|
||||
|
||||
|
||||
def functools_partial(obj, arguments, callback):
|
||||
def functools_partial(value, arguments, callback):
|
||||
return ValueSet(
|
||||
PartialObject(instance, arguments)
|
||||
for instance in obj.py__call__(arguments)
|
||||
for instance in value.py__call__(arguments)
|
||||
)
|
||||
|
||||
|
||||
@@ -569,12 +563,12 @@ def _random_choice(sequences):
|
||||
)
|
||||
|
||||
|
||||
def _dataclass(obj, arguments, callback):
|
||||
for c in _follow_param(obj.inference_state, arguments, 0):
|
||||
def _dataclass(value, arguments, callback):
|
||||
for c in _follow_param(value.inference_state, arguments, 0):
|
||||
if c.is_class():
|
||||
return ValueSet([DataclassWrapper(c)])
|
||||
else:
|
||||
return ValueSet([obj])
|
||||
return ValueSet([value])
|
||||
return NO_VALUES
|
||||
|
||||
|
||||
@@ -643,9 +637,8 @@ class ItemGetterCallable(ValueWrapper):
|
||||
# TODO we need to add the contextualized value.
|
||||
value_set |= item_value_set.get_item(lazy_values[0].infer(), None)
|
||||
else:
|
||||
value_set |= ValueSet([iterable.FakeSequence(
|
||||
value_set |= ValueSet([iterable.FakeList(
|
||||
self._wrapped_value.inference_state,
|
||||
'list',
|
||||
[
|
||||
LazyKnownValues(item_value_set.get_item(lazy_value.infer(), None))
|
||||
for lazy_value in lazy_values
|
||||
@@ -681,17 +674,17 @@ class Wrapped(ValueWrapper, FunctionMixin):
|
||||
return [self]
|
||||
|
||||
|
||||
@argument_clinic('*args, /', want_obj=True, want_arguments=True)
|
||||
def _operator_itemgetter(args_value_set, obj, arguments):
|
||||
@argument_clinic('*args, /', want_value=True, want_arguments=True)
|
||||
def _operator_itemgetter(args_value_set, value, arguments):
|
||||
return ValueSet([
|
||||
ItemGetterCallable(instance, args_value_set)
|
||||
for instance in obj.py__call__(arguments)
|
||||
for instance in value.py__call__(arguments)
|
||||
])
|
||||
|
||||
|
||||
def _create_string_input_function(func):
|
||||
@argument_clinic('string, /', want_obj=True, want_arguments=True)
|
||||
def wrapper(strings, obj, arguments):
|
||||
@argument_clinic('string, /', want_value=True, want_arguments=True)
|
||||
def wrapper(strings, value, arguments):
|
||||
def iterate():
|
||||
for value in strings:
|
||||
s = get_str_or_none(value)
|
||||
@@ -701,7 +694,7 @@ def _create_string_input_function(func):
|
||||
values = ValueSet(iterate())
|
||||
if values:
|
||||
return values
|
||||
return obj.py__call__(arguments)
|
||||
return value.py__call__(arguments)
|
||||
return wrapper
|
||||
|
||||
|
||||
@@ -738,14 +731,15 @@ _implemented = {
|
||||
'iter': builtins_iter,
|
||||
'staticmethod': builtins_staticmethod,
|
||||
'classmethod': builtins_classmethod,
|
||||
'property': builtins_property,
|
||||
},
|
||||
'copy': {
|
||||
'copy': _return_first_param,
|
||||
'deepcopy': _return_first_param,
|
||||
},
|
||||
'json': {
|
||||
'load': lambda obj, arguments, callback: NO_VALUES,
|
||||
'loads': lambda obj, arguments, callback: NO_VALUES,
|
||||
'load': lambda value, arguments, callback: NO_VALUES,
|
||||
'loads': lambda value, arguments, callback: NO_VALUES,
|
||||
},
|
||||
'collections': {
|
||||
'namedtuple': collections_namedtuple,
|
||||
@@ -772,7 +766,7 @@ _implemented = {
|
||||
# The _alias function just leads to some annoying type inference.
|
||||
# Therefore, just make it return nothing, which leads to the stubs
|
||||
# being used instead. This only matters for 3.7+.
|
||||
'_alias': lambda obj, arguments, callback: NO_VALUES,
|
||||
'_alias': lambda value, arguments, callback: NO_VALUES,
|
||||
},
|
||||
'dataclasses': {
|
||||
# For now this works at least better than Jedi trying to understand it.
|
||||
@@ -902,8 +896,8 @@ class EnumInstance(LazyValueWrapper):
|
||||
return ValueName(self, self._name.tree_name)
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
obj, = self._cls.execute_with_values()
|
||||
return obj
|
||||
value, = self._cls.execute_with_values()
|
||||
return value
|
||||
|
||||
def get_filters(self, origin_scope=None):
|
||||
yield DictFilter(dict(
|
||||
|
||||
@@ -261,11 +261,17 @@ def y(a):
|
||||
#?
|
||||
y(**d)
|
||||
|
||||
#? str()
|
||||
d['a']
|
||||
|
||||
# problem with more complicated casts
|
||||
dic = {str(key): ''}
|
||||
#? str()
|
||||
dic['']
|
||||
|
||||
# Just skip Python 2 tests from here. EoL soon, I'm too lazy for it.
|
||||
# python > 2.7
|
||||
|
||||
|
||||
for x in {1: 3.0, '': 1j}:
|
||||
#? int() str()
|
||||
@@ -278,11 +284,19 @@ d = dict(a=3, b='')
|
||||
x, = d.values()
|
||||
#? int() str()
|
||||
x
|
||||
#? int() str()
|
||||
#? int()
|
||||
d['a']
|
||||
#? int() str() None
|
||||
d.get('a')
|
||||
|
||||
some_dct = dict({'a': 1, 'b': ''}, a=1.0)
|
||||
#? float()
|
||||
some_dct['a']
|
||||
#? str()
|
||||
some_dct['b']
|
||||
#? int() float() str()
|
||||
some_dct['c']
|
||||
|
||||
# -----------------
|
||||
# with variable as index
|
||||
# -----------------
|
||||
|
||||
@@ -31,6 +31,7 @@ second = 1
|
||||
second = ""
|
||||
class TestClass(object):
|
||||
var_class = TestClass(1)
|
||||
self.pseudo_var = 3
|
||||
|
||||
def __init__(self2, first_param, second_param, third=1.0):
|
||||
self2.var_inst = first_param
|
||||
@@ -85,6 +86,10 @@ TestClass.var
|
||||
inst.var_local
|
||||
#? []
|
||||
TestClass.var_local.
|
||||
#?
|
||||
TestClass.pseudo_var
|
||||
#?
|
||||
TestClass().pseudo_var
|
||||
|
||||
#? int()
|
||||
TestClass().ret(1)
|
||||
|
||||
@@ -118,15 +118,15 @@ class D(): pass
|
||||
class E(): pass
|
||||
lst = [1]
|
||||
lst.append(1.0)
|
||||
lst += [C]
|
||||
lst += [C()]
|
||||
s = set(lst)
|
||||
s.add("")
|
||||
s += [D]
|
||||
s += [D()]
|
||||
lst = list(s)
|
||||
lst.append({})
|
||||
lst += [E]
|
||||
lst += [E()]
|
||||
|
||||
##? dict() int() float() str() C D E
|
||||
#? dict() int() float() str() C() D() E()
|
||||
lst[0]
|
||||
|
||||
# -----------------
|
||||
@@ -307,3 +307,77 @@ lst.append('')
|
||||
#? float() int() str()
|
||||
lst[0]
|
||||
|
||||
# -----------------
|
||||
# list setitem
|
||||
# -----------------
|
||||
|
||||
some_lst = [int]
|
||||
some_lst[3] = str
|
||||
#? int
|
||||
some_lst[0]
|
||||
#? str
|
||||
some_lst[3]
|
||||
#? int str
|
||||
some_lst[2]
|
||||
|
||||
some_lst[0] = tuple
|
||||
#? tuple
|
||||
some_lst[0]
|
||||
#? int str tuple
|
||||
some_lst[1]
|
||||
|
||||
some_lst2 = list([1])
|
||||
some_lst2[3] = ''
|
||||
#? int() str()
|
||||
some_lst2[0]
|
||||
#? str()
|
||||
some_lst2[3]
|
||||
#? int() str()
|
||||
some_lst2[2]
|
||||
|
||||
# -----------------
|
||||
# set setitem/other modifications (should not work)
|
||||
# -----------------
|
||||
|
||||
some_set = {int}
|
||||
some_set[3] = str
|
||||
#? int
|
||||
some_set[0]
|
||||
#? int
|
||||
some_set[3]
|
||||
|
||||
something = object()
|
||||
something[3] = str
|
||||
#?
|
||||
something[0]
|
||||
#?
|
||||
something[3]
|
||||
|
||||
# -----------------
|
||||
# dict setitem
|
||||
# -----------------
|
||||
|
||||
some_dct = {'a': float, 1: int}
|
||||
some_dct['x'] = list
|
||||
some_dct['y'] = tuple
|
||||
#? list
|
||||
some_dct['x']
|
||||
#? int float list tuple
|
||||
some_dct['unknown']
|
||||
#? float
|
||||
some_dct['a']
|
||||
|
||||
some_dct = dict({'a': 1, 1: ''})
|
||||
#? int() str()
|
||||
some_dct['la']
|
||||
#? int()
|
||||
some_dct['a']
|
||||
|
||||
some_dct['x'] = list
|
||||
some_dct['y'] = tuple
|
||||
#? list
|
||||
some_dct['x']
|
||||
#? int() str() list tuple
|
||||
some_dct['unknown']
|
||||
#? int()
|
||||
some_dct['a']
|
||||
|
||||
@@ -7,7 +7,7 @@ there should never be any errors.
|
||||
"""
|
||||
|
||||
# wait until keywords are out of definitions (pydoc function).
|
||||
##? 5
|
||||
#? 5
|
||||
's'()
|
||||
|
||||
#? []
|
||||
@@ -32,7 +32,7 @@ def wrong_indents():
|
||||
asdf
|
||||
asdf(
|
||||
# TODO this seems to be wrong now?
|
||||
##? int()
|
||||
#? int()
|
||||
asdf
|
||||
def openbrace():
|
||||
asdf = 3
|
||||
|
||||
@@ -422,3 +422,15 @@ def cast_tests():
|
||||
|
||||
#? str()
|
||||
cast_tests()
|
||||
|
||||
|
||||
# -------------------------
|
||||
# dynamic
|
||||
# -------------------------
|
||||
|
||||
def dynamic_annotation(x: int):
|
||||
#? int()
|
||||
return x
|
||||
|
||||
#? int()
|
||||
dynamic_annotation('')
|
||||
|
||||
@@ -240,8 +240,7 @@ cls().s
|
||||
|
||||
import zipfile
|
||||
z = zipfile.ZipFile("foo")
|
||||
# It's too slow. So we don't run it at the moment.
|
||||
##? ['upper']
|
||||
#? ['upper']
|
||||
z.read('name').upper
|
||||
|
||||
# -----------------
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
|
||||
import sys
|
||||
import os
|
||||
from os import dirname
|
||||
from os.path import dirname
|
||||
|
||||
sys.path.insert(0, '../../jedi')
|
||||
sys.path.append(dirname(os.path.abspath('thirdparty' + os.path.sep + 'asdf')))
|
||||
sys.path.append(os.path.join(dirname(__file__), 'thirdparty'))
|
||||
|
||||
# modifications, that should fail:
|
||||
# syntax err
|
||||
@@ -17,8 +17,8 @@ import inference
|
||||
inference.inference_state_fu
|
||||
|
||||
# Those don't work because dirname and abspath are not properly understood.
|
||||
##? ['jedi_']
|
||||
#? ['jedi_']
|
||||
import jedi_
|
||||
|
||||
##? ['el']
|
||||
#? ['el']
|
||||
jedi_.el
|
||||
|
||||
@@ -150,8 +150,7 @@ class NestedClass():
|
||||
return self
|
||||
|
||||
# Shouldn't find a definition, because there's other `instance`.
|
||||
# TODO reenable that test
|
||||
##< (0, 14),
|
||||
#< (0, 14),
|
||||
NestedClass().instance
|
||||
|
||||
|
||||
|
||||
@@ -3,8 +3,10 @@ Test all things related to the ``jedi.api`` module.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
|
||||
import pytest
|
||||
from pytest import raises
|
||||
from parso import cache
|
||||
|
||||
@@ -12,6 +14,7 @@ from jedi import preload_module
|
||||
from jedi.inference.gradual import typeshed
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.version_info[0] == 2, reason="Ignore Python 2, EoL")
|
||||
def test_preload_modules():
|
||||
def check_loaded(*modules):
|
||||
for grammar_cache in cache.parser_cache.values():
|
||||
@@ -101,7 +104,7 @@ def test_completion_on_hex_literals(Script):
|
||||
_check_number(Script, '0xE7.', 'int')
|
||||
_check_number(Script, '0xEa.', 'int')
|
||||
# theoretically, but people can just check for syntax errors:
|
||||
#assert Script('0x.').completions() == []
|
||||
assert Script('0x.').completions() == []
|
||||
|
||||
|
||||
def test_completion_on_complex_literals(Script):
|
||||
|
||||
@@ -266,11 +266,11 @@ def _params(Script, source, line=None, column=None):
|
||||
def test_int_params(Script):
|
||||
sig1, sig2 = Script('int(').call_signatures()
|
||||
# int is defined as: `int(x[, base])`
|
||||
assert len(sig1.params) == 2
|
||||
assert len(sig1.params) == 1
|
||||
assert sig1.params[0].name == 'x'
|
||||
assert sig1.params[1].name == 'base'
|
||||
assert len(sig2.params) == 1
|
||||
assert len(sig2.params) == 2
|
||||
assert sig2.params[0].name == 'x'
|
||||
assert sig2.params[1].name == 'base'
|
||||
|
||||
|
||||
def test_pow_params(Script):
|
||||
|
||||
@@ -266,3 +266,25 @@ def test_dataclass_signature(Script, skip_pre_python37, start, start_params):
|
||||
assert quantity.name == 'int'
|
||||
price, = sig.params[-2].infer()
|
||||
assert price.name == 'float'
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'stmt, expected', [
|
||||
('args = 1', 'wrapped(*args, b, c)'),
|
||||
('args = (1,)', 'wrapped(*args, c)'),
|
||||
('kwargs = 1', 'wrapped(b, /, **kwargs)'),
|
||||
('kwargs = dict(b=3)', 'wrapped(b, /, **kwargs)'),
|
||||
]
|
||||
)
|
||||
def test_param_resolving_to_static(Script, stmt, expected, skip_pre_python35):
|
||||
code = dedent('''\
|
||||
def full_redirect(func):
|
||||
def wrapped(*args, **kwargs):
|
||||
{stmt}
|
||||
return func(1, *args, **kwargs)
|
||||
return wrapped
|
||||
def simple(a, b, *, c): ...
|
||||
full_redirect(simple)('''.format(stmt=stmt))
|
||||
|
||||
sig, = Script(code).call_signatures()
|
||||
assert sig.to_string() == expected
|
||||
|
||||
Reference in New Issue
Block a user