Fix some array tests

This commit is contained in:
Dave Halter
2019-08-17 15:42:13 +02:00
parent c6d2aa6da2
commit 2629ff55f3
30 changed files with 252 additions and 226 deletions

View File

@@ -82,7 +82,7 @@ Inference Values (inference/base_value.py)
jedi.inference.value.instance.TreeInstance
jedi.inference.value.klass.Classvalue
jedi.inference.value.function.FunctionValue
jedi.inference.value.function.FunctionExecutionValue
jedi.inference.value.function.FunctionExecutionContext
:parts: 1

View File

@@ -14,7 +14,7 @@ from jedi.cache import memoize_method
from jedi.inference import imports
from jedi.inference import compiled
from jedi.inference.imports import ImportName
from jedi.inference.value import FunctionExecutionValue
from jedi.inference.value import FunctionExecutionContext
from jedi.inference.gradual.typeshed import StubModuleValue
from jedi.inference.gradual.conversion import convert_names, convert_values
from jedi.inference.base_value import ValueSet
@@ -32,7 +32,7 @@ def defined_names(inference_state, value):
:type scope: Scope
:rtype: list of Definition
"""
filter = next(value.get_filters(search_global=True))
filter = next(value.get_filters())
names = [name for name in filter.values()]
return [Definition(inference_state, n) for n in _sort_names_by_start_pos(names)]
@@ -183,7 +183,7 @@ class BaseDefinition(object):
>>> print(d.module_name) # doctest: +ELLIPSIS
json
"""
return self._get_module().name.string_name
return self._get_module().py__name__()
def in_builtin_module(self):
"""Whether this is a builtin module."""
@@ -364,7 +364,7 @@ class BaseDefinition(object):
if value is None:
return None
if isinstance(value, FunctionExecutionValue):
if isinstance(value, FunctionExecutionContext):
value = value.function_value
return Definition(self._inference_state, value.name)

View File

@@ -261,17 +261,13 @@ class Completion:
completion_names = []
debug.dbg('trailer completion values: %s', values, color='MAGENTA')
for value in values:
for filter in value.get_filters(
search_global=False,
origin_scope=user_value.tree_node):
for filter in value.get_filters(origin_scope=user_value.tree_node):
completion_names += filter.values()
python_values = convert_values(values)
for c in python_values:
if c not in values:
for filter in c.get_filters(
search_global=False,
origin_scope=user_value.tree_node):
for filter in c.get_filters(origin_scope=user_value.tree_node):
completion_names += filter.values()
return completion_names
@@ -298,7 +294,7 @@ class Completion:
if cls.start_pos[1] >= leaf.start_pos[1]:
return
filters = random_value.get_filters(search_global=False, is_instance=True)
filters = random_value.get_filters(is_instance=True)
# The first dict is the dictionary of class itself.
next(filters)
for filter in filters:

View File

@@ -385,7 +385,7 @@ def cache_call_signatures(inference_state, value, bracket_leaf, code_lines, user
whole = ''.join(other_lines + [before_cursor])
before_bracket = re.match(r'.*\(', whole, re.DOTALL)
module_path = value.get_root_value().py__file__()
module_path = value.get_root_context().py__file__()
if module_path is None:
yield None # Don't cache!
else:

View File

@@ -3,7 +3,7 @@ class BaseValue(object):
self.inference_state = inference_state
self.parent_context = parent_context
def get_root_value(self):
def get_root_context(self):
value = self
while True:
if value.parent_context is None:

View File

@@ -362,11 +362,7 @@ class InferenceState(object):
) or name
if stmt.type == 'lambdef':
stmt = name
return context.py__getattribute__(
name,
position=stmt.start_pos,
search_global=True, is_goto=True
)
return context.goto(name, position=stmt.start_pos)
def create_context(self, base_context, node, node_is_value=False, node_is_object=False):
def parent_scope(node):

View File

@@ -149,7 +149,7 @@ def _check_for_exception_catch(node_context, jedi_name, exception, payload=None)
for python_cls in exception.mro():
if cls.py__name__() == python_cls.__name__ \
and cls.parent_context == cls.inference_state.builtins_module:
and cls.parent_context.is_builtins_module():
return True
return False

View File

@@ -25,6 +25,9 @@ _sentinel = object()
class HelperValueMixin(object):
def get_root_context(self):
value = self
if value.parent_context is None:
return value.as_context()
while True:
if value.parent_context is None:
return value
@@ -55,24 +58,18 @@ class HelperValueMixin(object):
for lazy_value in self.iterate(valueualized_node, is_async)
)
def py__getattribute__(self, name_or_str, name_value=None, position=None,
search_global=False, is_goto=False,
def py__getattribute__(self, name_or_str, name_context=None, position=None,
analysis_errors=True):
"""
:param position: Position of the last statement -> tuple of line, column
"""
if name_value is None:
name_value = self
if name_context is None:
name_context = self
from jedi.inference import finder
f = finder.NameFinder(self.inference_state, self, name_value, name_or_str,
f = finder.NameFinder(self.inference_state, self, name_context, name_or_str,
position, analysis_errors=analysis_errors)
if search_global:
filters = f.get_global_filters()
else:
filters = f.get_value_filters()
if is_goto:
return f.filter_name(filters)
return f.find(filters, attribute_lookup=not search_global)
filters = f.get_value_filters()
return f.find(filters, attribute_lookup=True)
def py__await__(self):
await_value_set = self.py__getattribute__(u"__await__")
@@ -132,7 +129,7 @@ class Value(HelperValueMixin, BaseValue):
from jedi.inference import analysis
# TODO this value is probably not right.
analysis.add(
valueualized_node.value,
valueualized_node.context,
'type-error-not-subscriptable',
valueualized_node.node,
message="TypeError: '%s' object is not subscriptable" % self
@@ -143,7 +140,7 @@ class Value(HelperValueMixin, BaseValue):
if valueualized_node is not None:
from jedi.inference import analysis
analysis.add(
valueualized_node.value,
valueualized_node.context,
'type-error-not-iterable',
valueualized_node.node,
message="TypeError: '%s' object is not iterable" % self)
@@ -210,6 +207,9 @@ class Value(HelperValueMixin, BaseValue):
# The root value knows if it's a stub or not.
return self.parent_context.is_stub()
def as_context(self):
raise NotImplementedError('Not all values need to be converted to contexts')
def iterate_values(values, valueualized_node=None, is_async=False):
"""
@@ -419,6 +419,9 @@ class ValueSet(BaseValueSet):
value_set |= method()
return value_set
def as_context(self):
return [v.as_context() for v in self._set]
def gather_annotation_classes(self):
return ValueSet.from_sets([c.gather_annotation_classes() for c in self._set])

View File

@@ -17,6 +17,7 @@ from jedi.inference.compiled.access import _sentinel
from jedi.inference.cache import inference_state_function_cache
from jedi.inference.helpers import reraise_getitem_errors
from jedi.inference.signature import BuiltinSignature
from jedi.inference.context import AbstractContext
class CheckAttribute(object):
@@ -168,16 +169,11 @@ class CompiledObject(Value):
# Ensures that a CompiledObject is returned that is not an instance (like list)
return self
def get_filters(self, search_global=False, is_instance=False,
until_position=None, origin_scope=None):
def get_filters(self, is_instance=False, origin_scope=None):
yield self._ensure_one_filter(is_instance)
@memoize_method
def _ensure_one_filter(self, is_instance):
"""
search_global shouldn't change the fact that there's one dict, this way
there's only one `object`.
"""
return CompiledObjectFilter(self.inference_state, self, is_instance)
@CheckAttribute(u'__getitem__')
@@ -267,6 +263,9 @@ class CompiledObject(Value):
def get_metaclasses(self):
return NO_VALUES
def as_context(self):
return AbstractContext(self)
class CompiledName(AbstractNameDefinition):
def __init__(self, inference_state, parent_context, name):

View File

@@ -1,6 +1,7 @@
from abc import abstractmethod
from jedi.inference.filters import ParserTreeFilter
from jedi.inference.filters import ParserTreeFilter, MergedFilter, \
GlobalNameFilter
class AbstractContext(object):
@@ -23,14 +24,55 @@ class AbstractContext(object):
def create_context(self, node, node_is_value=False, node_is_object=False):
return self.inference_state.create_context(self, node, node_is_value, node_is_object)
@property
def py__getattribute__(self):
return self._value.py__getattribute__
def goto(self, name_or_str, position):
from jedi.inference import finder
f = finder.NameFinder(self.inference_state, self, self, name_or_str, position)
raise NotImplementedError('this does not seem to be correct')
filters = f.get_global_filters()
return f.filter_name(filters)
def py__getattribute__(self, name_or_str, name_value=None, position=None,
analysis_errors=True):
"""
:param position: Position of the last statement -> tuple of line, column
"""
if name_value is None:
name_value = self
from jedi.inference import finder
f = finder.NameFinder(self.inference_state, self, name_value, name_or_str,
position, analysis_errors=analysis_errors)
filters = f.get_global_filters()
return f.find(filters, attribute_lookup=False)
@property
def tree_node(self):
return self._value.tree_node
@property
def parent_context(self):
return self._value.parent_context
def is_module(self):
return self._value.is_module()
def is_builtins_module(self):
return self._value == self.inference_state.builtins_module
def is_class(self):
return self._value.is_class()
def is_stub(self):
return self._value.is_stub()
def is_instance(self):
return self._value.is_instance()
def is_compiled(self):
return self._value.is_compiled()
def py__name__(self):
return self._value.py__name__()
def infer_node(self, node):
return self.inference_state.infer_element(self, node)
@@ -56,14 +98,36 @@ class ModuleContext(AbstractContext):
def py__package__(self):
return self._value.py__package__
def get_filters(self, until_position=None, origin_scope=None):
filters = self._value.get_filters(origin_scope)
# Skip the first filter and replace it.
yield MergedFilter(
ParserTreeFilter(
context=self,
until_position=until_position,
origin_scope=origin_scope
),
GlobalNameFilter(self, self.tree_node),
)
for f in filters: # Python 2...
yield f
def get_value(self):
"""
This is the only function that converts a context back to a value.
This is necessary for stub -> python conversion and vice versa. However
this method shouldn't be move to AbstractContext.
"""
return self._value
class ClassContext(AbstractContext):
def get_filters(self, until_position=None, origin_scope=None):
yield self._value.get_global_filter(until_position, origin_scope)
yield self.get_global_filter(until_position, origin_scope)
def get_global_filter(self, until_position=None, origin_scope=None):
return ParserTreeFilter(
value=self,
context=self,
until_position=until_position,
origin_scope=origin_scope
)

View File

@@ -270,21 +270,21 @@ def _execute_array_values(inference_state, array):
@inference_state_method_cache()
def infer_param(execution_value, param):
from jedi.inference.value.instance import InstanceArguments
from jedi.inference.value import FunctionExecutionValue
from jedi.inference.value import FunctionExecutionContext
def infer_docstring(docstring):
return ValueSet(
p
for param_str in _search_param_in_docstr(docstring, param.name.value)
for p in _infer_for_statement_string(module_value, param_str)
for p in _infer_for_statement_string(module_context, param_str)
)
module_value = execution_value.get_root_value()
module_context = execution_value.get_root_context()
func = param.get_parent_function()
if func.type == 'lambdef':
return NO_VALUES
types = infer_docstring(execution_value.py__doc__())
if isinstance(execution_value, FunctionExecutionValue) \
if isinstance(execution_value, FunctionExecutionContext) \
and isinstance(execution_value.var_args, InstanceArguments) \
and execution_value.function_value.py__name__() == '__init__':
class_value = execution_value.var_args.instance.class_value
@@ -307,5 +307,5 @@ def infer_return_types(function_value):
yield type_
for type_str in search_return_in_docstr(function_value.py__doc__()):
for value in _infer_for_statement_string(function_value.get_root_value(), type_str):
for value in _infer_for_statement_string(function_value.get_root_context(), type_str):
yield value

View File

@@ -72,7 +72,7 @@ def search_params(inference_state, execution_value, funcdef):
inference_state.dynamic_params_depth += 1
try:
path = execution_value.get_root_value().py__file__()
path = execution_value.get_root_context().py__file__()
if path is not None and is_stdlib_path(path):
# We don't want to search for usages in the stdlib. Usually people
# don't work with it (except if you are a core maintainer, sorry).
@@ -89,7 +89,7 @@ def search_params(inference_state, execution_value, funcdef):
debug.dbg('Dynamic param search in %s.', string_name, color='MAGENTA')
try:
module_context = execution_value.get_root_value()
module_context = execution_value.get_root_context()
function_executions = _search_function_executions(
inference_state,
module_context,
@@ -132,7 +132,8 @@ def _search_function_executions(inference_state, module_context, funcdef, string
inference_state, [module_context], string_name):
if not isinstance(module_context, ModuleValue):
return
for name, trailer in _get_possible_nodes(for_mod_value, string_name):
for_mod_context = for_mod_value.as_context()
for name, trailer in _get_possible_nodes(for_mod_context, string_name):
i += 1
# This is a simple way to stop Jedi's dynamic param recursion
@@ -142,7 +143,7 @@ def _search_function_executions(inference_state, module_context, funcdef, string
return
raise NotImplementedError
random_value = inference_state.create_context(for_mod_value, name)
random_value = inference_state.create_context(for_mod_context, name)
for function_execution in _check_name_for_execution(
inference_state, random_value, compare_node, name, trailer):
found_executions = True
@@ -180,7 +181,7 @@ def _get_possible_nodes(module_value, func_string_name):
def _check_name_for_execution(inference_state, value, compare_node, name, trailer):
from jedi.inference.value.function import FunctionExecutionValue
from jedi.inference.value.function import FunctionExecutionContext
def create_func_excs():
arglist = trailer.children[1]
@@ -204,7 +205,7 @@ def _check_name_for_execution(inference_state, value, compare_node, name, traile
if compare_node == value_node:
for func_execution in create_func_excs():
yield func_execution
elif isinstance(v.parent_context, FunctionExecutionValue) and \
elif isinstance(v.parent_context, FunctionExecutionContext) and \
compare_node.type == 'funcdef':
# Here we're trying to find decorators by checking the first
# parameter. It's not very generic though. Should find a better
@@ -216,9 +217,9 @@ def _check_name_for_execution(inference_state, value, compare_node, name, traile
nodes = [v.tree_node for v in values]
if nodes == [compare_node]:
# Found a decorator.
module_value = value.get_root_value()
module_value = value.get_root_context()
execution_value = next(create_func_excs())
for name, trailer in _get_possible_nodes(module_value, params[0].string_name):
for name, trailer in _get_possible_nodes(module_context, params[0].string_name):
if value_node.start_pos < name.start_pos < value_node.end_pos:
raise NotImplementedError
random_value = inference_state.create_context(execution_value, name)

View File

@@ -68,11 +68,11 @@ def _get_definition_names(used_names, name_key):
class AbstractUsedNamesFilter(AbstractFilter):
name_class = TreeNameDefinition
def __init__(self, value, parser_scope):
def __init__(self, context, parser_scope):
self._parser_scope = parser_scope
self._module_node = self._parser_scope.get_root_node()
self._used_names = self._module_node.get_used_names()
self.value = value
self.context = context
def get(self, name, **filter_kwargs):
return self._convert_names(self._filter(
@@ -81,7 +81,7 @@ class AbstractUsedNamesFilter(AbstractFilter):
))
def _convert_names(self, names):
return [self.name_class(self.value, name) for name in names]
return [self.name_class(self.context, name) for name in names]
def values(self, **filter_kwargs):
return self._convert_names(
@@ -94,22 +94,22 @@ class AbstractUsedNamesFilter(AbstractFilter):
)
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.value)
return '<%s: %s>' % (self.__class__.__name__, self.context)
class ParserTreeFilter(AbstractUsedNamesFilter):
def __init__(self, value, node_value=None, until_position=None,
def __init__(self, context, node_context=None, until_position=None,
origin_scope=None):
"""
node_value is an option to specify a second value for use cases
node_context is an option to specify a second value for use cases
like the class mro where the parent class of a new name would be the
value, but for some type inference it's important to have a local
value of the other classes.
"""
if node_value is None:
node_value = value
super(ParserTreeFilter, self).__init__(value, node_value.tree_node)
self._node_value = node_value
if node_context is None:
node_context = context
super(ParserTreeFilter, self).__init__(context, node_context.tree_node)
self._node_context = node_context
self._origin_scope = origin_scope
self._until_position = until_position
@@ -128,7 +128,7 @@ class ParserTreeFilter(AbstractUsedNamesFilter):
def _check_flows(self, names):
for name in sorted(names, key=lambda name: name.start_pos, reverse=True):
check = flow_analysis.reachability_check(
context=self._node_value,
context=self._node_context,
value_scope=self._parser_scope,
node=name,
origin_scope=self._origin_scope
@@ -143,11 +143,11 @@ class ParserTreeFilter(AbstractUsedNamesFilter):
class FunctionExecutionFilter(ParserTreeFilter):
param_name = ParamName
def __init__(self, value, node_value=None,
def __init__(self, context, node_context=None,
until_position=None, origin_scope=None):
super(FunctionExecutionFilter, self).__init__(
value,
node_value,
context,
node_context,
until_position,
origin_scope
)
@@ -157,15 +157,12 @@ class FunctionExecutionFilter(ParserTreeFilter):
for name in names:
param = search_ancestor(name, 'param')
if param:
yield self.param_name(self.value, name)
yield self.param_name(self.context, name)
else:
yield TreeNameDefinition(self.context, name)
class GlobalNameFilter(AbstractUsedNamesFilter):
def __init__(self, value, parser_scope):
super(GlobalNameFilter, self).__init__(value, parser_scope)
def get(self, name):
try:
names = self._used_names[name]
@@ -318,10 +315,10 @@ class _OverwriteMeta(type):
class _AttributeOverwriteMixin(object):
def get_filters(self, search_global=False, *args, **kwargs):
def get_filters(self, *args, **kwargs):
yield SpecialMethodFilter(self, self.overwritten_methods, self._wrapped_value)
for filter in self._wrapped_value.get_filters(search_global):
for filter in self._wrapped_value.get_filters():
yield filter
@@ -344,7 +341,7 @@ def publish_method(method_name, python_version_match=None):
return decorator
def get_global_filters(inference_state, value, until_position, origin_scope):
def get_global_filters(inference_state, context, until_position, origin_scope):
"""
Returns all filters in order of priority for name resolution.
@@ -392,19 +389,18 @@ def get_global_filters(inference_state, value, until_position, origin_scope):
>>> list(filters[3].values()) # doctest: +ELLIPSIS
[...]
"""
from jedi.inference.value.function import FunctionExecutionValue
while value is not None:
from jedi.inference.value.function import FunctionExecutionContext
while context is not None:
# Names in methods cannot be resolved within the class.
for filter in value.get_filters(
search_global=True,
for filter in context.get_filters(
until_position=until_position,
origin_scope=origin_scope):
yield filter
if isinstance(value, FunctionExecutionValue):
if isinstance(context, FunctionExecutionContext):
# The position should be reset if the current scope is a function.
until_position = None
value = value.parent_context
context = context.parent_context
# Add builtins to the global scope.
yield next(inference_state.builtins_module.get_filters())

View File

@@ -117,7 +117,7 @@ class NameFinder(object):
def get_value_filters(self):
origin_scope = self._get_origin_scope()
for f in self._value.get_filters(False, origin_scope=origin_scope):
for f in self._value.get_filters(origin_scope=origin_scope):
yield f
# This covers the case where a stub files are incomplete.
if self._value.is_stub():

View File

@@ -6,8 +6,8 @@ from jedi.inference.gradual.stub_value import StubModuleValue
def _stub_to_python_value_set(stub_value, ignore_compiled=False):
stub_module = stub_value.get_root_value()
if not stub_module.is_stub():
stub_module_context = stub_value.get_root_context()
if not stub_module_context.is_stub():
return ValueSet([stub_value])
was_instance = stub_value.is_instance()
@@ -25,7 +25,7 @@ def _stub_to_python_value_set(stub_value, ignore_compiled=False):
qualified_names = qualified_names[:-1]
was_instance = True
values = _infer_from_stub(stub_module, qualified_names, ignore_compiled)
values = _infer_from_stub(stub_module_context, qualified_names, ignore_compiled)
if was_instance:
values = ValueSet.from_sets(
c.execute_with_values()
@@ -39,9 +39,10 @@ def _stub_to_python_value_set(stub_value, ignore_compiled=False):
return values
def _infer_from_stub(stub_module, qualified_names, ignore_compiled):
def _infer_from_stub(stub_module_context, qualified_names, ignore_compiled):
from jedi.inference.compiled.mixed import MixedObject
assert isinstance(stub_module, (StubModuleValue, MixedObject)), stub_module
stub_module = stub_module_context._value # TODO private!
assert isinstance(stub_module, (StubModuleValue, MixedObject)), stub_module_context
non_stubs = stub_module.non_stub_value_set
if ignore_compiled:
non_stubs = non_stubs.filter(lambda c: not c.is_compiled())
@@ -53,8 +54,8 @@ def _infer_from_stub(stub_module, qualified_names, ignore_compiled):
@to_list
def _try_stub_to_python_names(names, prefer_stub_to_compiled=False):
for name in names:
module = name.get_root_value()
if not module.is_stub():
module_context = name.get_root_context()
if not module_context.is_stub():
yield name
continue
@@ -63,7 +64,7 @@ def _try_stub_to_python_names(names, prefer_stub_to_compiled=False):
values = NO_VALUES
else:
values = _infer_from_stub(
module,
module_context,
name_list[:-1],
ignore_compiled=prefer_stub_to_compiled,
)
@@ -98,8 +99,8 @@ def _load_stub_module(module):
@to_list
def _python_to_stub_names(names, fallback_to_python=False):
for name in names:
module = name.get_root_value()
if module.is_stub():
module_context = name.get_root_context()
if module_context.is_stub():
yield name
continue
@@ -114,7 +115,7 @@ def _python_to_stub_names(names, fallback_to_python=False):
name_list = name.get_qualified_names()
stubs = NO_VALUES
if name_list is not None:
stub_module = _load_stub_module(module)
stub_module = _load_stub_module(module_context.get_value())
if stub_module is not None:
stubs = ValueSet({stub_module})
for name in name_list[:-1]:
@@ -171,7 +172,7 @@ def to_stub(value):
value = value.py__class__()
qualified_names = value.get_qualified_names()
stub_module = _load_stub_module(value.get_root_value())
stub_module = _load_stub_module(value.get_root_context().get_value())
if stub_module is None or qualified_names is None:
return NO_VALUES

View File

@@ -32,26 +32,18 @@ class StubModuleValue(ModuleValue):
def _get_first_non_stub_filters(self):
for value in self.non_stub_value_set:
yield next(value.get_filters(search_global=False))
yield next(value.get_filters())
def _get_stub_filters(self, search_global, **filter_kwargs):
def _get_stub_filters(self, origin_scope):
return [StubFilter(
value=self,
search_global=search_global,
**filter_kwargs
)] + list(self.iter_star_filters(search_global=search_global))
context=self.as_context(),
origin_scope=origin_scope
)] + list(self.iter_star_filters())
def get_filters(self, search_global=False, until_position=None,
origin_scope=None, **kwargs):
filters = super(StubModuleValue, self).get_filters(
search_global, until_position, origin_scope, **kwargs
)
def get_filters(self, origin_scope=None):
filters = super(StubModuleValue, self).get_filters(origin_scope)
next(filters) # Ignore the first filter and replace it with our own
stub_filters = self._get_stub_filters(
search_global=search_global,
until_position=until_position,
origin_scope=origin_scope,
)
stub_filters = self._get_stub_filters(origin_scope=origin_scope)
for f in stub_filters:
yield f
@@ -71,7 +63,7 @@ class TypingModuleWrapper(StubModuleValue):
class _StubName(TreeNameDefinition):
def infer(self):
inferred = super(_StubName, self).infer()
if self.string_name == 'version_info' and self.get_root_value().py__name__() == 'sys':
if self.string_name == 'version_info' and self.get_root_context().py__name__() == 'sys':
return [VersionInfo(c) for c in inferred]
return inferred
@@ -79,24 +71,20 @@ class _StubName(TreeNameDefinition):
class StubFilter(ParserTreeFilter):
name_class = _StubName
def __init__(self, *args, **kwargs):
self._search_global = kwargs.pop('search_global') # Python 2 :/
super(StubFilter, self).__init__(*args, **kwargs)
def _is_name_reachable(self, name):
if not super(StubFilter, self)._is_name_reachable(name):
return False
if not self._search_global:
# Imports in stub files are only public if they have an "as"
# export.
definition = name.get_definition()
if definition.type in ('import_from', 'import_name'):
if name.parent.type not in ('import_as_name', 'dotted_as_name'):
return False
n = name.value
if n.startswith('_') and not (n.startswith('__') and n.endswith('__')):
# Imports in stub files are only public if they have an "as"
# export.
definition = name.get_definition()
if definition.type in ('import_from', 'import_name'):
if name.parent.type not in ('import_as_name', 'dotted_as_name'):
return False
n = name.value
# TODO rewrite direct return
if n.startswith('_') and not (n.startswith('__') and n.endswith('__')):
return False
return True

View File

@@ -198,7 +198,7 @@ class TypingValue(_BaseTypingValue):
self.parent_context,
self._tree_name,
index_value,
value_of_index=valueualized_node.value)
value_of_index=valueualized_node.context)
for index_value in index_value_set
)
@@ -213,7 +213,7 @@ class _TypingClassMixin(object):
return []
class TypingClassValueWithIndex(_TypingClassMixin, TypingValueWithIndex, ClassMixin):
class TypingClassValueWithIndex(_TypingClassMixin, ClassMixin, TypingValueWithIndex):
pass
@@ -470,7 +470,7 @@ class NewTypeFunction(_BaseTypingValue):
return ValueSet(
NewType(
self.inference_state,
valueualized_node.value,
valueualized_node.context,
valueualized_node.node,
second_arg.infer(),
) for valueualized_node in arguments.get_calling_nodes())
@@ -553,9 +553,8 @@ class AbstractAnnotatedClass(ClassMixin, ValueWrapper):
def get_type_var_filter(self):
return TypeVarFilter(self.get_generics(), self.list_type_vars())
def get_filters(self, search_global=False, *args, **kwargs):
def get_filters(self, *args, **kwargs):
filters = super(AbstractAnnotatedClass, self).get_filters(
search_global,
*args, **kwargs
)
for f in filters:

View File

@@ -87,17 +87,20 @@ def infer_import(context, tree_name, is_goto=False):
return NO_VALUES
if from_import_name is not None:
types = unite(
t.py__getattribute__(
if is_goto:
types = unite([
c.goto(
from_import_name,
name_context=context,
analysis_errors=False
) for c in types.as_context()
])
else:
types = types.py__getattribute__(
from_import_name,
name_context=context,
is_goto=is_goto,
analysis_errors=False
)
for t in types
)
if not is_goto:
types = ValueSet(types)
if not types:
path = import_path + [from_import_name]
@@ -360,7 +363,7 @@ class Importer(object):
both_values = values | convert_values(values)
for c in both_values:
for filter in c.get_filters(search_global=False):
for filter in c.get_filters():
names += filter.values()
else:
if self.level:

View File

@@ -120,7 +120,7 @@ def get_executed_params_and_issues(execution_value, arguments):
% (funcdef.name, key))
for valueualized_node in arguments.get_calling_nodes():
issues.append(
analysis.add(valueualized_node.value,
analysis.add(valueualized_node.context,
'type-error-multiple-values',
valueualized_node.node, message=m)
)
@@ -165,7 +165,7 @@ def get_executed_params_and_issues(execution_value, arguments):
m = _error_argument_count(funcdef, len(unpacked_va))
issues.append(
analysis.add(
valueualized_node.value,
valueualized_node.context,
'type-error-too-few-arguments',
valueualized_node.node,
message=m,
@@ -197,7 +197,7 @@ def get_executed_params_and_issues(execution_value, arguments):
for valueualized_node in arguments.get_calling_nodes():
m = _error_argument_count(funcdef, len(unpacked_va))
issues.append(
analysis.add(valueualized_node.value,
analysis.add(valueualized_node.context,
'type-error-too-few-arguments',
valueualized_node.node, message=m)
)

View File

@@ -115,9 +115,9 @@ class ExecutionRecursionDetector(object):
self._recursion_level += 1
self._parent_execution_funcs.append(funcdef)
module = execution.get_root_value()
module_context = execution.get_root_context()
if module == self._inference_state.builtins_module:
if module_context.is_builtins_module():
# We have control over builtins so we know they are not recursing
# like crazy. Therefore we just let them execute always, because
# they usually just help a lot with getting good results.
@@ -133,7 +133,8 @@ class ExecutionRecursionDetector(object):
self._execution_count += 1
if self._funcdef_execution_counts.setdefault(funcdef, 0) >= per_function_execution_limit:
if module.py__name__() in ('builtins', 'typing'):
# TODO why check for builtins here again?
if module_context.py__name__() in ('builtins', 'typing'):
return False
debug.warning(
'Per function execution limit (%s) reached: %s',

View File

@@ -125,8 +125,7 @@ def infer_node(context, element):
elif typ == 'dotted_name':
value_set = infer_atom(context, element.children[0])
for next_name in element.children[2::2]:
# TODO add search_global=True?
value_set = value_set.py__getattribute__(next_name, name_value=context)
value_set = value_set.py__getattribute__(next_name, name_context=context)
return value_set
elif typ == 'eval_input':
return infer_node(context, element.children[0])
@@ -199,11 +198,7 @@ def infer_atom(context, atom):
# position to None, so the finder will not try to stop at a certain
# position in the module.
position = None
return context.py__getattribute__(
name_or_str=atom,
position=position,
search_global=True
)
return context.py__getattribute__(atom, position=position)
elif atom.type == 'keyword':
# For False/True/None
if atom.value in ('False', 'True', 'None'):
@@ -225,7 +220,7 @@ def infer_atom(context, atom):
value_set = infer_atom(context, atom.children[0])
for string in atom.children[1:]:
right = infer_atom(context, string)
value_set = _infer_comparison(state, value_set, u'+', right)
value_set = _infer_comparison(context, value_set, u'+', right)
return value_set
elif atom.type == 'fstring':
return compiled.get_string_value_set(state)
@@ -274,7 +269,7 @@ def infer_expr_stmt(context, stmt, seek_name=None):
# Here we allow list/set to recurse under certain conditions. To make
# it possible to resolve stuff like list(set(list(x))), this is
# necessary.
if not allowed and context.get_root_value() == context.inference_state.builtins_module:
if not allowed and context.get_root_context().is_builtins_module():
try:
instance = context.var_args.instance
except AttributeError:
@@ -314,8 +309,7 @@ def _infer_expr_stmt(context, stmt, seek_name=None):
operator = copy.copy(first_operator)
operator.value = operator.value[:-1]
name = stmt.get_defined_names()[0].value
left = context.py__getattribute__(
name, position=stmt.start_pos, search_global=True)
left = context.py__getattribute__(name, position=stmt.start_pos)
for_stmt = tree.search_ancestor(stmt, 'for_stmt')
if for_stmt is not None and for_stmt.type == 'for_stmt' and value_set \
@@ -687,7 +681,7 @@ def check_tuple_assignments(valueualized_name, value_set):
"""
lazy_value = None
for index, node in valueualized_name.assignment_indexes():
cn = ValueualizedNode(valueualized_name.value, node)
cn = ValueualizedNode(valueualized_name.context, node)
iterated = value_set.iterate(cn)
if isinstance(index, slice):
# For no star unpacking is not possible.

View File

@@ -37,7 +37,7 @@ def _find_names(module_context, tree_name):
def usages(module_context, tree_name):
search_name = tree_name.value
found_names = _find_names(module_context, tree_name)
modules = set(d.get_root_value() for d in found_names.values())
modules = set(d.get_root_context() for d in found_names.values())
modules = set(m for m in modules if m.is_module() and not m.is_compiled())
non_matching_usage_maps = {}

View File

@@ -1,6 +1,6 @@
from jedi.inference.value.module import ModuleValue
from jedi.inference.value.klass import ClassValue
from jedi.inference.value.function import FunctionValue, \
MethodValue, FunctionExecutionValue
MethodValue, FunctionExecutionContext
from jedi.inference.value.instance import AnonymousInstance, BoundMethod, \
CompiledInstance, AbstractInstanceValue, TreeInstance

View File

@@ -55,18 +55,11 @@ class FunctionAndClassBase(TreeValue):
class FunctionMixin(object):
api_type = u'function'
def get_filters(self, search_global=False, until_position=None, origin_scope=None):
if search_global:
yield ParserTreeFilter(
value=self,
until_position=until_position,
origin_scope=origin_scope
)
else:
cls = self.py__class__()
for instance in cls.execute_with_values():
for filter in instance.get_filters(search_global=False, origin_scope=origin_scope):
yield filter
def get_filters(self, origin_scope=None):
cls = self.py__class__()
for instance in cls.execute_with_values():
for filter in instance.get_filters(origin_scope=origin_scope):
yield filter
def py__get__(self, instance, class_value):
from jedi.inference.value.instance import BoundMethod
@@ -97,7 +90,7 @@ class FunctionMixin(object):
if arguments is None:
arguments = AnonymousArguments()
return FunctionExecutionValue(self.inference_state, self.parent_context, self, arguments)
return FunctionExecutionContext(self.inference_state, self.parent_context, self, arguments)
def get_signatures(self):
return [TreeSignature(f) for f in self.get_signature_functions()]
@@ -167,11 +160,11 @@ class MethodValue(FunctionValue):
return names + (self.py__name__(),)
class FunctionExecutionValue(TreeValue):
class FunctionExecutionContext(TreeValue):
function_execution_filter = FunctionExecutionFilter
def __init__(self, inference_state, parent_context, function_value, var_args):
super(FunctionExecutionValue, self).__init__(
super(FunctionExecutionContext, self).__init__(
inference_state,
parent_context,
function_value.tree_node,
@@ -292,7 +285,7 @@ class FunctionExecutionValue(TreeValue):
for lazy_value in self.get_yield_lazy_values()
)
def get_filters(self, search_global=False, until_position=None, origin_scope=None):
def get_filters(self, until_position=None, origin_scope=None):
yield self.function_execution_filter(self,
until_position=until_position,
origin_scope=origin_scope)

View File

@@ -122,8 +122,7 @@ class AbstractInstanceValue(Value):
else:
return ValueSet([self])
def get_filters(self, search_global=None, until_position=None,
origin_scope=None, include_self_names=True):
def get_filters(self, origin_scope=None, include_self_names=True):
class_value = self.get_annotated_class_object()
if include_self_names:
for cls in class_value.py__mro__():
@@ -135,7 +134,6 @@ class AbstractInstanceValue(Value):
yield SelfAttributeFilter(self, cls, origin_scope)
class_filters = class_value.get_filters(
search_global=False,
origin_scope=origin_scope,
is_instance=True,
)
@@ -262,7 +260,7 @@ class TreeInstance(AbstractInstanceValue):
# I don't think that dynamic append lookups should happen here. That
# sounds more like something that should go to py__iter__.
if class_value.py__name__() in ['list', 'set'] \
and parent_context.get_root_value() == inference_state.builtins_module:
and parent_context.get_root_context().is_builtins_module():
# compare the module path with the builtin name.
if settings.dynamic_array_additions:
var_args = iterable.get_dynamic_array_instance(self, var_args)
@@ -466,7 +464,10 @@ class InstanceClassFilter(AbstractFilter):
return self._convert(self._class_filter.values(from_instance=True))
def _convert(self, names):
return [LazyInstanceClassName(self._instance, self._class_filter.value, n) for n in names]
return [
LazyInstanceClassName(self._instance, self._class_filter.context, n)
for n in names
]
def __repr__(self):
return '<%s for %s>' % (self.__class__.__name__, self._class_filter.value)
@@ -480,8 +481,8 @@ class SelfAttributeFilter(ClassFilter):
def __init__(self, value, class_value, origin_scope):
super(SelfAttributeFilter, self).__init__(
value=value,
node_value=class_value,
context=value,
node_context=class_value.as_context(),
origin_scope=origin_scope,
is_instance=True,
)
@@ -503,7 +504,7 @@ class SelfAttributeFilter(ClassFilter):
yield name
def _convert_names(self, names):
return [self.name_class(self.value, self._class_value, name) for name in names]
return [self.name_class(self.context, self._class_value, name) for name in names]
def _check_flows(self, names):
return names

View File

@@ -116,7 +116,7 @@ class CompForValue(TreeValue):
def from_comp_for(cls, parent_context, comp_for):
return cls(parent_context.inference_state, parent_context, comp_for)
def get_filters(self, search_global=False, until_position=None, origin_scope=None):
def get_filters(self, until_position=None, origin_scope=None):
yield ParserTreeFilter(self)

View File

@@ -48,6 +48,7 @@ from jedi.inference.names import TreeNameDefinition, ValueName
from jedi.inference.arguments import unpack_arglist, ValuesArguments
from jedi.inference.base_value import ValueSet, iterator_to_value_set, \
NO_VALUES
from jedi.inference.context import ClassContext
from jedi.inference.value.function import FunctionAndClassBase
from jedi.plugins import plugin_manager
@@ -95,9 +96,9 @@ class ClassFilter(ParserTreeFilter):
def _convert_names(self, names):
return [
self.name_class(
parent_context=self.value,
parent_context=self.context,
tree_name=name,
name_value=self._node_value,
name_context=self._node_context,
apply_decorators=not self._is_instance,
) for name in names
]
@@ -192,26 +193,22 @@ class ClassMixin(object):
mro.append(cls_new)
yield cls_new
def get_filters(self, search_global=False, until_position=None,
origin_scope=None, is_instance=False):
def get_filters(self, origin_scope=None, is_instance=False):
metaclasses = self.get_metaclasses()
if metaclasses:
for f in self.get_metaclass_filters(metaclasses):
yield f
if search_global:
yield self.get_global_filter(until_position, origin_scope)
else:
for cls in self.py__mro__():
if isinstance(cls, compiled.CompiledObject):
for filter in cls.get_filters(is_instance=is_instance):
yield filter
else:
yield ClassFilter(
self, node_value=cls,
origin_scope=origin_scope,
is_instance=is_instance
)
for cls in self.py__mro__():
if isinstance(cls, compiled.CompiledObject):
for filter in cls.get_filters(is_instance=is_instance):
yield filter
else:
yield ClassFilter(
self, node_context=cls.as_context(),
origin_scope=origin_scope,
is_instance=is_instance
)
if not is_instance:
from jedi.inference.compiled import builtin_from_name
type_ = builtin_from_name(self.inference_state, u'type')
@@ -228,12 +225,8 @@ class ClassMixin(object):
init_funcs = self.py__call__().py__getattribute__('__init__')
return [sig.bind(self) for sig in init_funcs.get_signatures()]
def get_global_filter(self, until_position=None, origin_scope=None):
return ParserTreeFilter(
value=self,
until_position=until_position,
origin_scope=origin_scope
)
def as_context(self):
return ClassContext(self)
class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase)):
@@ -273,7 +266,7 @@ class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase
return lst
if self.py__name__() == 'object' \
and self.parent_context == self.inference_state.builtins_module:
and self.parent_context.is_builtins_module():
return []
return [LazyKnownValues(
self.inference_state.builtins_module.py__getattribute__('object')
@@ -287,7 +280,7 @@ class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase
LazyGenericClass(
self,
index_value,
value_of_index=valueualized_node.value,
value_of_index=valueualized_node.context,
)
for index_value in index_value_set
)

View File

@@ -99,11 +99,10 @@ class SubModuleDictMixin(object):
class ModuleMixin(SubModuleDictMixin):
def get_filters(self, search_global=False, until_position=None, origin_scope=None):
def get_filters(self, origin_scope=None):
yield MergedFilter(
ParserTreeFilter(
value=self,
until_position=until_position,
context=self.as_context(),
origin_scope=origin_scope
),
GlobalNameFilter(self, self.tree_node),
@@ -151,9 +150,9 @@ class ModuleMixin(SubModuleDictMixin):
dct['__file__'] = _ModuleAttributeName(self, '__file__', file)
return dct
def iter_star_filters(self, search_global=False):
def iter_star_filters(self):
for star_module in self.star_imports():
yield next(star_module.get_filters(search_global))
yield next(star_module.get_filters())
# I'm not sure if the star import cache is really that effective anymore
# with all the other really fast import caches. Recheck. Also we would need
@@ -189,7 +188,6 @@ class ModuleMixin(SubModuleDictMixin):
class ModuleValue(ModuleMixin, TreeValue):
api_type = u'module'
parent_context = None
def __init__(self, inference_state, module_node, file_io, string_names,
code_lines, is_package=False):

View File

@@ -31,7 +31,7 @@ class ImplicitNamespaceValue(Value, SubModuleDictMixin):
self._fullname = fullname
self._paths = paths
def get_filters(self, search_global=False, until_position=None, origin_scope=None):
def get_filters(self, origin_scope=None):
yield DictFilter(self.sub_modules_dict())
@property

View File

@@ -24,7 +24,7 @@ from jedi.inference.value.instance import BoundMethod, InstanceArguments
from jedi.inference.base_value import ValueualizedNode, \
NO_VALUES, ValueSet, ValueWrapper, LazyValueWrapper
from jedi.inference.value import ClassValue, ModuleValue, \
FunctionExecutionValue
FunctionExecutionContext
from jedi.inference.value.klass import ClassMixin
from jedi.inference.value.function import FunctionMixin
from jedi.inference.value import iterable
@@ -114,7 +114,7 @@ def execute(callback):
except AttributeError:
pass
else:
if value.parent_context == value.inference_state.builtins_module:
if value.parent_context.is_builtins_module():
module_name = 'builtins'
elif value.parent_context is not None and value.parent_context.is_module():
module_name = value.parent_context.py__name__()
@@ -260,7 +260,7 @@ class SuperInstance(LazyValueWrapper):
return self._instance
return next(iter(objs))
def get_filters(self, search_global=False, until_position=None, origin_scope=None):
def get_filters(self, origin_scope=None):
for b in self._get_bases():
for obj in b.infer().execute_with_values():
for f in obj.get_filters():
@@ -269,7 +269,7 @@ class SuperInstance(LazyValueWrapper):
@argument_clinic('[type[, obj]], /', want_value=True)
def builtins_super(types, objects, value):
if isinstance(value, FunctionExecutionValue):
if isinstance(value, FunctionExecutionContext):
if isinstance(value.var_args, InstanceArguments):
instance = value.var_args.instance
# TODO if a class is given it doesn't have to be the direct super
@@ -336,7 +336,7 @@ def builtins_isinstance(objects, types, arguments, inference_state):
if cls_or_tup.is_class():
bool_results.add(cls_or_tup in mro)
elif cls_or_tup.name.string_name == 'tuple' \
and cls_or_tup.get_root_value() == inference_state.builtins_module:
and cls_or_tup.get_root_context().is_builtins_module():
# Check for tuples.
classes = ValueSet.from_sets(
lazy_value.infer()
@@ -792,7 +792,7 @@ def get_metaclass_filters(func):
def wrapper(cls, metaclasses):
for metaclass in metaclasses:
if metaclass.py__name__() == 'EnumMeta' \
and metaclass.get_root_value().py__name__() == 'enum':
and metaclass.get_root_context().py__name__() == 'enum':
filter_ = ParserTreeFilter(value=cls)
return [DictFilter({
name.string_name: EnumInstance(cls, name).name for name in filter_.values()
@@ -816,7 +816,7 @@ class EnumInstance(LazyValueWrapper):
obj, = self._cls.execute_with_values()
return obj
def get_filters(self, search_global=False, position=None, origin_scope=None):
def get_filters(self, origin_scope=None):
yield DictFilter(dict(
name=compiled.create_simple_object(self.inference_state, self._name.string_name).name,
value=self._name,