Context -> Value

This commit is contained in:
Dave Halter
2019-08-15 01:25:43 +02:00
parent 49f996867d
commit 9986d8c9aa
41 changed files with 536 additions and 536 deletions

View File

@@ -81,8 +81,8 @@ Inference Values (inference/base_value.py)
.. inheritance-diagram:: .. inheritance-diagram::
jedi.inference.value.instance.TreeInstance jedi.inference.value.instance.TreeInstance
jedi.inference.value.klass.Classvalue jedi.inference.value.klass.Classvalue
jedi.inference.value.function.FunctionContext jedi.inference.value.function.FunctionValue
jedi.inference.value.function.FunctionExecutionContext jedi.inference.value.function.FunctionExecutionValue
:parts: 1 :parts: 1

View File

@@ -36,8 +36,8 @@ from jedi.inference.helpers import get_module_names, infer_call_of_leaf
from jedi.inference.sys_path import transform_path_to_dotted from jedi.inference.sys_path import transform_path_to_dotted
from jedi.inference.names import TreeNameDefinition, ParamName from jedi.inference.names import TreeNameDefinition, ParamName
from jedi.inference.syntax_tree import tree_name_to_values from jedi.inference.syntax_tree import tree_name_to_values
from jedi.inference.value import ModuleContext from jedi.inference.value import ModuleValue
from jedi.inference.base_value import ContextSet from jedi.inference.base_value import ValueSet
from jedi.inference.value.iterable import unpack_tuple_to_dict from jedi.inference.value.iterable import unpack_tuple_to_dict
from jedi.inference.gradual.conversion import convert_names, convert_values from jedi.inference.gradual.conversion import convert_names, convert_values
from jedi.inference.gradual.utils import load_proper_stub_module from jedi.inference.gradual.utils import load_proper_stub_module
@@ -181,7 +181,7 @@ class Script(object):
if names is None: if names is None:
names = ('__main__',) names = ('__main__',)
module = ModuleContext( module = ModuleValue(
self._infer_state, self._module_node, file_io, self._infer_state, self._module_node, file_io,
string_names=names, string_names=names,
code_lines=self._code_lines, code_lines=self._code_lines,
@@ -189,7 +189,7 @@ class Script(object):
) )
if names[0] not in ('builtins', '__builtin__', 'typing'): if names[0] not in ('builtins', '__builtin__', 'typing'):
# These modules are essential for Jedi, so don't overwrite them. # These modules are essential for Jedi, so don't overwrite them.
self._infer_state.module_cache.add(names, ContextSet([module])) self._infer_state.module_cache.add(names, ValueSet([module]))
return module return module
def __repr__(self): def __repr__(self):
@@ -470,7 +470,7 @@ class Interpreter(Script):
self._infer_state.allow_descriptor_getattr = self._allow_descriptor_getattr_default self._infer_state.allow_descriptor_getattr = self._allow_descriptor_getattr_default
def _get_module(self): def _get_module(self):
return interpreter.MixedModuleContext( return interpreter.MixedModuleValue(
self._infer_state, self._infer_state,
self._module_node, self._module_node,
self.namespaces, self.namespaces,

View File

@@ -14,10 +14,10 @@ from jedi.cache import memoize_method
from jedi.inference import imports from jedi.inference import imports
from jedi.inference import compiled from jedi.inference import compiled
from jedi.inference.imports import ImportName from jedi.inference.imports import ImportName
from jedi.inference.value import FunctionExecutionContext from jedi.inference.value import FunctionExecutionValue
from jedi.inference.gradual.typeshed import StubModuleContext from jedi.inference.gradual.typeshed import StubModuleValue
from jedi.inference.gradual.conversion import convert_names, convert_values from jedi.inference.gradual.conversion import convert_names, convert_values
from jedi.inference.base_value import ContextSet from jedi.inference.base_value import ValueSet
from jedi.api.keywords import KeywordName from jedi.api.keywords import KeywordName
@@ -187,7 +187,7 @@ class BaseDefinition(object):
def in_builtin_module(self): def in_builtin_module(self):
"""Whether this is a builtin module.""" """Whether this is a builtin module."""
if isinstance(self._get_module(), StubModuleContext): if isinstance(self._get_module(), StubModuleValue):
return any(isinstance(value, compiled.CompiledObject) return any(isinstance(value, compiled.CompiledObject)
for value in self._get_module().non_stub_value_set) for value in self._get_module().non_stub_value_set)
return isinstance(self._get_module(), compiled.CompiledObject) return isinstance(self._get_module(), compiled.CompiledObject)
@@ -324,7 +324,7 @@ class BaseDefinition(object):
# results of Python objects instead of stubs. # results of Python objects instead of stubs.
names = convert_names([self._name], prefer_stubs=True) names = convert_names([self._name], prefer_stubs=True)
values = convert_values( values = convert_values(
ContextSet.from_sets(n.infer() for n in names), ValueSet.from_sets(n.infer() for n in names),
only_stubs=only_stubs, only_stubs=only_stubs,
prefer_stubs=prefer_stubs, prefer_stubs=prefer_stubs,
) )
@@ -364,7 +364,7 @@ class BaseDefinition(object):
if value is None: if value is None:
return None return None
if isinstance(value, FunctionExecutionContext): if isinstance(value, FunctionExecutionValue):
value = value.function_value value = value.function_value
return Definition(self._infer_state, value.name) return Definition(self._infer_state, value.name)
@@ -755,7 +755,7 @@ class _Help(object):
if not raw: if not raw:
signature_text = _format_signatures(value) signature_text = _format_signatures(value)
if not doc and value.is_stub(): if not doc and value.is_stub():
for c in convert_values(ContextSet({value}), ignore_compiled=False): for c in convert_values(ValueSet({value}), ignore_compiled=False):
doc = c.py__doc__() doc = c.py__doc__()
if doc: if doc:
break break

View File

@@ -2,11 +2,11 @@
TODO Some parts of this module are still not well documented. TODO Some parts of this module are still not well documented.
""" """
from jedi.inference.value import ModuleContext from jedi.inference.value import ModuleValue
from jedi.inference import compiled from jedi.inference import compiled
from jedi.inference.compiled import mixed from jedi.inference.compiled import mixed
from jedi.inference.compiled.access import create_access_path from jedi.inference.compiled.access import create_access_path
from jedi.inference.base_value import ContextWrapper from jedi.inference.base_value import ValueWrapper
def _create(infer_state, obj): def _create(infer_state, obj):
@@ -20,17 +20,17 @@ class NamespaceObject(object):
self.__dict__ = dct self.__dict__ = dct
class MixedModuleContext(ContextWrapper): class MixedModuleValue(ValueWrapper):
type = 'mixed_module' type = 'mixed_module'
def __init__(self, infer_state, tree_module, namespaces, file_io, code_lines): def __init__(self, infer_state, tree_module, namespaces, file_io, code_lines):
module_value = ModuleContext( module_value = ModuleValue(
infer_state, tree_module, infer_state, tree_module,
file_io=file_io, file_io=file_io,
string_names=('__main__',), string_names=('__main__',),
code_lines=code_lines code_lines=code_lines
) )
super(MixedModuleContext, self).__init__(module_value) super(MixedModuleValue, self).__init__(module_value)
self._namespace_objects = [NamespaceObject(n) for n in namespaces] self._namespace_objects = [NamespaceObject(n) for n in namespaces]
def get_filters(self, *args, **kwargs): def get_filters(self, *args, **kwargs):

View File

@@ -44,7 +44,7 @@ class Keyword(object):
def get_signatures(self): def get_signatures(self):
# TODO this makes no sense, I think Keyword should somehow merge with # TODO this makes no sense, I think Keyword should somehow merge with
# Context to make it easier for the api/classes.py to deal with all # Value to make it easier for the api/classes.py to deal with all
# of it. # of it.
return [] return []

View File

@@ -1 +1 @@
from jedi.common.value import BaseContextSet, BaseContext from jedi.common.value import BaseValueSet, BaseValue

View File

@@ -1,4 +1,4 @@
class BaseContext(object): class BaseValue(object):
def __init__(self, infer_state, parent_value=None): def __init__(self, infer_state, parent_value=None):
self.infer_state = infer_state self.infer_state = infer_state
self.parent_value = parent_value self.parent_value = parent_value
@@ -11,11 +11,11 @@ class BaseContext(object):
value = value.parent_value value = value.parent_value
class BaseContextSet(object): class BaseValueSet(object):
def __init__(self, iterable): def __init__(self, iterable):
self._set = frozenset(iterable) self._set = frozenset(iterable)
for value in iterable: for value in iterable:
assert not isinstance(value, BaseContextSet) assert not isinstance(value, BaseValueSet)
@classmethod @classmethod
def _from_frozen_set(cls, frozenset_): def _from_frozen_set(cls, frozenset_):
@@ -30,7 +30,7 @@ class BaseContextSet(object):
""" """
aggregated = set() aggregated = set()
for set_ in sets: for set_ in sets:
if isinstance(set_, BaseContextSet): if isinstance(set_, BaseValueSet):
aggregated |= set_._set aggregated |= set_._set
else: else:
aggregated |= frozenset(set_) aggregated |= frozenset(set_)

View File

@@ -33,7 +33,7 @@ return the ``date`` class.
To *visualize* this (simplified): To *visualize* this (simplified):
- ``InferState.infer_expr_stmt`` doesn't do much, because there's no assignment. - ``InferState.infer_expr_stmt`` doesn't do much, because there's no assignment.
- ``Context.infer_node`` cares for resolving the dotted path - ``Value.infer_node`` cares for resolving the dotted path
- ``InferState.find_types`` searches for global definitions of datetime, which - ``InferState.find_types`` searches for global definitions of datetime, which
it finds in the definition of an import, by scanning the syntax tree. it finds in the definition of an import, by scanning the syntax tree.
- Using the import logic, the datetime module is found. - Using the import logic, the datetime module is found.
@@ -75,11 +75,11 @@ from jedi.inference import recursion
from jedi.inference.cache import infer_state_function_cache from jedi.inference.cache import infer_state_function_cache
from jedi.inference import helpers from jedi.inference import helpers
from jedi.inference.names import TreeNameDefinition, ParamName from jedi.inference.names import TreeNameDefinition, ParamName
from jedi.inference.base_value import ContextualizedName, ContextualizedNode, \ from jedi.inference.base_value import ValueualizedName, ValueualizedNode, \
ContextSet, NO_VALUES, iterate_values ValueSet, NO_VALUES, iterate_values
from jedi.inference.value import ClassContext, FunctionContext, \ from jedi.inference.value import ClassValue, FunctionValue, \
AnonymousInstance, BoundMethod AnonymousInstance, BoundMethod
from jedi.inference.value.iterable import CompForContext from jedi.inference.value.iterable import CompForValue
from jedi.inference.syntax_tree import infer_trailer, infer_expr_stmt, \ from jedi.inference.syntax_tree import infer_trailer, infer_expr_stmt, \
infer_node, check_tuple_assignments infer_node, check_tuple_assignments
from jedi.plugins import plugin_manager from jedi.plugins import plugin_manager
@@ -97,7 +97,7 @@ class InferState(object):
self.latest_grammar = parso.load_grammar(version='3.7') self.latest_grammar = parso.load_grammar(version='3.7')
self.memoize_cache = {} # for memoize decorators self.memoize_cache = {} # for memoize decorators
self.module_cache = imports.ModuleCache() # does the job of `sys.modules`. self.module_cache = imports.ModuleCache() # does the job of `sys.modules`.
self.stub_module_cache = {} # Dict[Tuple[str, ...], Optional[ModuleContext]] self.stub_module_cache = {} # Dict[Tuple[str, ...], Optional[ModuleValue]]
self.compiled_cache = {} # see `inference.compiled.create()` self.compiled_cache = {} # see `inference.compiled.create()`
self.inferred_element_counts = {} self.inferred_element_counts = {}
self.mixed_cache = {} # see `inference.compiled.mixed._create()` self.mixed_cache = {} # see `inference.compiled.mixed._create()`
@@ -151,7 +151,7 @@ class InferState(object):
return self.project._get_sys_path(self, environment=self.environment, **kwargs) return self.project._get_sys_path(self, environment=self.environment, **kwargs)
def infer_element(self, value, element): def infer_element(self, value, element):
if isinstance(value, CompForContext): if isinstance(value, CompForValue):
return infer_node(value, element) return infer_node(value, element)
if_stmt = element if_stmt = element
@@ -201,7 +201,7 @@ class InferState(object):
new_name_dicts = list(original_name_dicts) new_name_dicts = list(original_name_dicts)
for i, name_dict in enumerate(new_name_dicts): for i, name_dict in enumerate(new_name_dicts):
new_name_dicts[i] = name_dict.copy() new_name_dicts[i] = name_dict.copy()
new_name_dicts[i][if_name.value] = ContextSet([definition]) new_name_dicts[i][if_name.value] = ValueSet([definition])
name_dicts += new_name_dicts name_dicts += new_name_dicts
else: else:
@@ -244,10 +244,10 @@ class InferState(object):
is_classdef = type_ == 'classdef' is_classdef = type_ == 'classdef'
if is_classdef or type_ == 'funcdef': if is_classdef or type_ == 'funcdef':
if is_classdef: if is_classdef:
c = ClassContext(self, value, name.parent) c = ClassValue(self, value, name.parent)
else: else:
c = FunctionContext.from_value(value, name.parent) c = FunctionValue.from_value(value, name.parent)
return ContextSet([c]) return ValueSet([c])
if type_ == 'expr_stmt': if type_ == 'expr_stmt':
is_simple_name = name.parent.type not in ('power', 'trailer') is_simple_name = name.parent.type not in ('power', 'trailer')
@@ -255,9 +255,9 @@ class InferState(object):
return infer_expr_stmt(value, def_, name) return infer_expr_stmt(value, def_, name)
if type_ == 'for_stmt': if type_ == 'for_stmt':
container_types = value.infer_node(def_.children[3]) container_types = value.infer_node(def_.children[3])
cn = ContextualizedNode(value, def_.children[3]) cn = ValueualizedNode(value, def_.children[3])
for_types = iterate_values(container_types, cn) for_types = iterate_values(container_types, cn)
c_node = ContextualizedName(value, name) c_node = ValueualizedName(value, name)
return check_tuple_assignments(self, c_node, for_types) return check_tuple_assignments(self, c_node, for_types)
if type_ in ('import_from', 'import_name'): if type_ in ('import_from', 'import_name'):
return imports.infer_import(value, name) return imports.infer_import(value, name)
@@ -393,7 +393,7 @@ class InferState(object):
parent_value = from_scope_node(parent_scope) parent_value = from_scope_node(parent_scope)
if is_funcdef: if is_funcdef:
func = FunctionContext.from_value(parent_value, scope_node) func = FunctionValue.from_value(parent_value, scope_node)
if parent_value.is_class(): if parent_value.is_class():
instance = AnonymousInstance( instance = AnonymousInstance(
self, parent_value.parent_value, parent_value) self, parent_value.parent_value, parent_value)
@@ -406,11 +406,11 @@ class InferState(object):
return func.get_function_execution() return func.get_function_execution()
return func return func
elif scope_node.type == 'classdef': elif scope_node.type == 'classdef':
return ClassContext(self, parent_value, scope_node) return ClassValue(self, parent_value, scope_node)
elif scope_node.type in ('comp_for', 'sync_comp_for'): elif scope_node.type in ('comp_for', 'sync_comp_for'):
if node.start_pos >= scope_node.children[-1].start_pos: if node.start_pos >= scope_node.children[-1].start_pos:
return parent_value return parent_value
return CompForContext.from_comp_for(parent_value, scope_node) return CompForValue.from_comp_for(parent_value, scope_node)
raise Exception("There's a scope that was not managed.") raise Exception("There's a scope that was not managed.")
base_node = base_value.tree_node base_node = base_value.tree_node

View File

@@ -6,10 +6,10 @@ from jedi._compatibility import zip_longest
from jedi import debug from jedi import debug
from jedi.inference.utils import PushBackIterator from jedi.inference.utils import PushBackIterator
from jedi.inference import analysis from jedi.inference import analysis
from jedi.inference.lazy_value import LazyKnownContext, LazyKnownContexts, \ from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues, \
LazyTreeContext, get_merged_lazy_value LazyTreeValue, get_merged_lazy_value
from jedi.inference.names import ParamName, TreeNameDefinition from jedi.inference.names import ParamName, TreeNameDefinition
from jedi.inference.base_value import NO_VALUES, ContextSet, ContextualizedNode from jedi.inference.base_value import NO_VALUES, ValueSet, ValueualizedNode
from jedi.inference.value import iterable from jedi.inference.value import iterable
from jedi.inference.cache import infer_state_as_method_param_cache from jedi.inference.cache import infer_state_as_method_param_cache
from jedi.inference.param import get_executed_params_and_issues, ExecutedParam from jedi.inference.param import get_executed_params_and_issues, ExecutedParam
@@ -84,7 +84,7 @@ def _iterate_argument_clinic(infer_state, arguments, parameters):
break break
lazy_values.append(argument) lazy_values.append(argument)
yield ContextSet([iterable.FakeSequence(infer_state, u'tuple', lazy_values)]) yield ValueSet([iterable.FakeSequence(infer_state, u'tuple', lazy_values)])
lazy_values lazy_values
continue continue
elif stars == 2: elif stars == 2:
@@ -234,7 +234,7 @@ class TreeArguments(AbstractArguments):
if el.type == 'argument': if el.type == 'argument':
c = el.children c = el.children
if len(c) == 3: # Keyword argument. if len(c) == 3: # Keyword argument.
named_args.append((c[0].value, LazyTreeContext(self.value, c[2]),)) named_args.append((c[0].value, LazyTreeValue(self.value, c[2]),))
else: # Generator comprehension. else: # Generator comprehension.
# Include the brackets with the parent. # Include the brackets with the parent.
sync_comp_for = el.children[1] sync_comp_for = el.children[1]
@@ -246,9 +246,9 @@ class TreeArguments(AbstractArguments):
sync_comp_for_node=sync_comp_for, sync_comp_for_node=sync_comp_for,
entry_node=el.children[0], entry_node=el.children[0],
) )
yield None, LazyKnownContext(comp) yield None, LazyKnownValue(comp)
else: else:
yield None, LazyTreeContext(self.value, el) yield None, LazyTreeValue(self.value, el)
# Reordering arguments is necessary, because star args sometimes appear # Reordering arguments is necessary, because star args sometimes appear
# after named argument, but in the actual order it's prepended. # after named argument, but in the actual order it's prepended.
@@ -302,9 +302,9 @@ class TreeArguments(AbstractArguments):
break break
if arguments.argument_node is not None: if arguments.argument_node is not None:
return [ContextualizedNode(arguments.value, arguments.argument_node)] return [ValueualizedNode(arguments.value, arguments.argument_node)]
if arguments.trailer is not None: if arguments.trailer is not None:
return [ContextualizedNode(arguments.value, arguments.trailer)] return [ValueualizedNode(arguments.value, arguments.trailer)]
return [] return []
@@ -314,7 +314,7 @@ class ValuesArguments(AbstractArguments):
def unpack(self, funcdef=None): def unpack(self, funcdef=None):
for values in self._values_list: for values in self._values_list:
yield None, LazyKnownContexts(values) yield None, LazyKnownValues(values)
def __repr__(self): def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self._values_list) return '<%s: %s>' % (self.__class__.__name__, self._values_list)

View File

@@ -1,8 +1,8 @@
""" """
Contexts are the "values" that Python would return. However Contexts are at the Values are the "values" that Python would return. However Values are at the
same time also the "values" that a user is currently sitting in. same time also the "values" that a user is currently sitting in.
A ContextSet is typically used to specify the return of a function or any other A ValueSet is typically used to specify the return of a function or any other
static analysis operation. In jedi there are always multiple returns and not static analysis operation. In jedi there are always multiple returns and not
just one. just one.
""" """
@@ -13,7 +13,7 @@ from parso.python.tree import ExprStmt, SyncCompFor
from jedi import debug from jedi import debug
from jedi._compatibility import zip_longest, unicode from jedi._compatibility import zip_longest, unicode
from jedi.parser_utils import clean_scope_docstring from jedi.parser_utils import clean_scope_docstring
from jedi.common import BaseContextSet, BaseContext from jedi.common import BaseValueSet, BaseValue
from jedi.inference.helpers import SimpleGetItemNotFound from jedi.inference.helpers import SimpleGetItemNotFound
from jedi.inference.utils import safe_property from jedi.inference.utils import safe_property
from jedi.inference.cache import infer_state_as_method_param_cache from jedi.inference.cache import infer_state_as_method_param_cache
@@ -22,7 +22,7 @@ from jedi.cache import memoize_method
_sentinel = object() _sentinel = object()
class HelperContextMixin(object): class HelperValueMixin(object):
def get_root_value(self): def get_root_value(self):
value = self value = self
while True: while True:
@@ -40,17 +40,17 @@ class HelperContextMixin(object):
def execute_with_values(self, *value_list): def execute_with_values(self, *value_list):
from jedi.inference.arguments import ValuesArguments from jedi.inference.arguments import ValuesArguments
arguments = ValuesArguments([ContextSet([value]) for value in value_list]) arguments = ValuesArguments([ValueSet([value]) for value in value_list])
return self.infer_state.execute(self, arguments) return self.infer_state.execute(self, arguments)
def execute_annotation(self): def execute_annotation(self):
return self.execute_with_values() return self.execute_with_values()
def gather_annotation_classes(self): def gather_annotation_classes(self):
return ContextSet([self]) return ValueSet([self])
def merge_types_of_iterate(self, valueualized_node=None, is_async=False): def merge_types_of_iterate(self, valueualized_node=None, is_async=False):
return ContextSet.from_sets( return ValueSet.from_sets(
lazy_value.infer() lazy_value.infer()
for lazy_value in self.iterate(valueualized_node, is_async) for lazy_value in self.iterate(valueualized_node, is_async)
) )
@@ -86,11 +86,11 @@ class HelperContextMixin(object):
def iterate(self, valueualized_node=None, is_async=False): def iterate(self, valueualized_node=None, is_async=False):
debug.dbg('iterate %s', self) debug.dbg('iterate %s', self)
if is_async: if is_async:
from jedi.inference.lazy_value import LazyKnownContexts from jedi.inference.lazy_value import LazyKnownValues
# TODO if no __aiter__ values are there, error should be: # TODO if no __aiter__ values are there, error should be:
# TypeError: 'async for' requires an object with __aiter__ method, got int # TypeError: 'async for' requires an object with __aiter__ method, got int
return iter([ return iter([
LazyKnownContexts( LazyKnownValues(
self.py__getattribute__('__aiter__').execute_with_values() self.py__getattribute__('__aiter__').execute_with_values()
.py__getattribute__('__anext__').execute_with_values() .py__getattribute__('__anext__').execute_with_values()
.py__getattribute__('__await__').execute_with_values() .py__getattribute__('__await__').execute_with_values()
@@ -107,12 +107,12 @@ class HelperContextMixin(object):
def is_same_class(self, class2): def is_same_class(self, class2):
# Class matching should prefer comparisons that are not this function. # Class matching should prefer comparisons that are not this function.
if type(class2).is_same_class != HelperContextMixin.is_same_class: if type(class2).is_same_class != HelperValueMixin.is_same_class:
return class2.is_same_class(self) return class2.is_same_class(self)
return self == class2 return self == class2
class Context(HelperContextMixin, BaseContext): class Value(HelperValueMixin, BaseValue):
""" """
Should be defined, otherwise the API returns empty types. Should be defined, otherwise the API returns empty types.
""" """
@@ -216,24 +216,24 @@ def iterate_values(values, valueualized_node=None, is_async=False):
Calls `iterate`, on all values but ignores the ordering and just returns Calls `iterate`, on all values but ignores the ordering and just returns
all values that the iterate functions yield. all values that the iterate functions yield.
""" """
return ContextSet.from_sets( return ValueSet.from_sets(
lazy_value.infer() lazy_value.infer()
for lazy_value in values.iterate(valueualized_node, is_async=is_async) for lazy_value in values.iterate(valueualized_node, is_async=is_async)
) )
class _ContextWrapperBase(HelperContextMixin): class _ValueWrapperBase(HelperValueMixin):
predefined_names = {} predefined_names = {}
@safe_property @safe_property
def name(self): def name(self):
from jedi.inference.names import ContextName from jedi.inference.names import ValueName
wrapped_name = self._wrapped_value.name wrapped_name = self._wrapped_value.name
if wrapped_name.tree_name is not None: if wrapped_name.tree_name is not None:
return ContextName(self, wrapped_name.tree_name) return ValueName(self, wrapped_name.tree_name)
else: else:
from jedi.inference.compiled import CompiledContextName from jedi.inference.compiled import CompiledValueName
return CompiledContextName(self, wrapped_name.string_name) return CompiledValueName(self, wrapped_name.string_name)
@classmethod @classmethod
@infer_state_as_method_param_cache() @infer_state_as_method_param_cache()
@@ -245,7 +245,7 @@ class _ContextWrapperBase(HelperContextMixin):
return getattr(self._wrapped_value, name) return getattr(self._wrapped_value, name)
class LazyContextWrapper(_ContextWrapperBase): class LazyValueWrapper(_ValueWrapperBase):
@safe_property @safe_property
@memoize_method @memoize_method
def _wrapped_value(self): def _wrapped_value(self):
@@ -259,7 +259,7 @@ class LazyContextWrapper(_ContextWrapperBase):
raise NotImplementedError raise NotImplementedError
class ContextWrapper(_ContextWrapperBase): class ValueWrapper(_ValueWrapperBase):
def __init__(self, wrapped_value): def __init__(self, wrapped_value):
self._wrapped_value = wrapped_value self._wrapped_value = wrapped_value
@@ -267,9 +267,9 @@ class ContextWrapper(_ContextWrapperBase):
return '%s(%s)' % (self.__class__.__name__, self._wrapped_value) return '%s(%s)' % (self.__class__.__name__, self._wrapped_value)
class TreeContext(Context): class TreeValue(Value):
def __init__(self, infer_state, parent_value, tree_node): def __init__(self, infer_state, parent_value, tree_node):
super(TreeContext, self).__init__(infer_state, parent_value) super(TreeValue, self).__init__(infer_state, parent_value)
self.predefined_names = {} self.predefined_names = {}
self.tree_node = tree_node self.tree_node = tree_node
@@ -277,7 +277,7 @@ class TreeContext(Context):
return '<%s: %s>' % (self.__class__.__name__, self.tree_node) return '<%s: %s>' % (self.__class__.__name__, self.tree_node)
class ContextualizedNode(object): class ValueualizedNode(object):
def __init__(self, value, node): def __init__(self, value, node):
self.value = value self.value = value
self.node = node self.node = node
@@ -292,7 +292,7 @@ class ContextualizedNode(object):
return '<%s: %s in %s>' % (self.__class__.__name__, self.node, self.value) return '<%s: %s in %s>' % (self.__class__.__name__, self.node, self.value)
class ContextualizedName(ContextualizedNode): class ValueualizedName(ValueualizedNode):
# TODO merge with TreeNameDefinition?! # TODO merge with TreeNameDefinition?!
@property @property
def name(self): def name(self):
@@ -375,16 +375,16 @@ def _getitem(value, index_values, valueualized_node):
# all results. # all results.
if unused_values or not index_values: if unused_values or not index_values:
result |= value.py__getitem__( result |= value.py__getitem__(
ContextSet(unused_values), ValueSet(unused_values),
valueualized_node valueualized_node
) )
debug.dbg('py__getitem__ result: %s', result) debug.dbg('py__getitem__ result: %s', result)
return result return result
class ContextSet(BaseContextSet): class ValueSet(BaseValueSet):
def py__class__(self): def py__class__(self):
return ContextSet(c.py__class__() for c in self._set) return ValueSet(c.py__class__() for c in self._set)
def iterate(self, valueualized_node=None, is_async=False): def iterate(self, valueualized_node=None, is_async=False):
from jedi.inference.lazy_value import get_merged_lazy_value from jedi.inference.lazy_value import get_merged_lazy_value
@@ -395,18 +395,18 @@ class ContextSet(BaseContextSet):
) )
def execute(self, arguments): def execute(self, arguments):
return ContextSet.from_sets(c.infer_state.execute(c, arguments) for c in self._set) return ValueSet.from_sets(c.infer_state.execute(c, arguments) for c in self._set)
def execute_with_values(self, *args, **kwargs): def execute_with_values(self, *args, **kwargs):
return ContextSet.from_sets(c.execute_with_values(*args, **kwargs) for c in self._set) return ValueSet.from_sets(c.execute_with_values(*args, **kwargs) for c in self._set)
def py__getattribute__(self, *args, **kwargs): def py__getattribute__(self, *args, **kwargs):
if kwargs.get('is_goto'): if kwargs.get('is_goto'):
return reduce(add, [c.py__getattribute__(*args, **kwargs) for c in self._set], []) return reduce(add, [c.py__getattribute__(*args, **kwargs) for c in self._set], [])
return ContextSet.from_sets(c.py__getattribute__(*args, **kwargs) for c in self._set) return ValueSet.from_sets(c.py__getattribute__(*args, **kwargs) for c in self._set)
def get_item(self, *args, **kwargs): def get_item(self, *args, **kwargs):
return ContextSet.from_sets(_getitem(c, *args, **kwargs) for c in self._set) return ValueSet.from_sets(_getitem(c, *args, **kwargs) for c in self._set)
def try_merge(self, function_name): def try_merge(self, function_name):
value_set = self.__class__([]) value_set = self.__class__([])
@@ -420,17 +420,17 @@ class ContextSet(BaseContextSet):
return value_set return value_set
def gather_annotation_classes(self): def gather_annotation_classes(self):
return ContextSet.from_sets([c.gather_annotation_classes() for c in self._set]) return ValueSet.from_sets([c.gather_annotation_classes() for c in self._set])
def get_signatures(self): def get_signatures(self):
return [sig for c in self._set for sig in c.get_signatures()] return [sig for c in self._set for sig in c.get_signatures()]
NO_VALUES = ContextSet([]) NO_VALUES = ValueSet([])
def iterator_to_value_set(func): def iterator_to_value_set(func):
def wrapper(*args, **kwargs): def wrapper(*args, **kwargs):
return ContextSet(func(*args, **kwargs)) return ValueSet(func(*args, **kwargs))
return wrapper return wrapper

View File

@@ -1,7 +1,7 @@
from jedi._compatibility import unicode from jedi._compatibility import unicode
from jedi.inference.compiled.value import CompiledObject, CompiledName, \ from jedi.inference.compiled.value import CompiledObject, CompiledName, \
CompiledObjectFilter, CompiledContextName, create_from_access_path CompiledObjectFilter, CompiledValueName, create_from_access_path
from jedi.inference.base_value import ContextWrapper, LazyContextWrapper from jedi.inference.base_value import ValueWrapper, LazyValueWrapper
def builtin_from_name(infer_state, string): def builtin_from_name(infer_state, string):
@@ -16,7 +16,7 @@ def builtin_from_name(infer_state, string):
return value return value
class CompiledValue(LazyContextWrapper): class CompiledValue(LazyValueWrapper):
def __init__(self, compiled_obj): def __init__(self, compiled_obj):
self.infer_state = compiled_obj.infer_state self.infer_state = compiled_obj.infer_state
self._compiled_obj = compiled_obj self._compiled_obj = compiled_obj

View File

@@ -12,9 +12,9 @@ from jedi import settings
from jedi.inference import compiled from jedi.inference import compiled
from jedi.cache import underscore_memoization from jedi.cache import underscore_memoization
from jedi.file_io import FileIO from jedi.file_io import FileIO
from jedi.inference.base_value import ContextSet, ContextWrapper from jedi.inference.base_value import ValueSet, ValueWrapper
from jedi.inference.helpers import SimpleGetItemNotFound from jedi.inference.helpers import SimpleGetItemNotFound
from jedi.inference.value import ModuleContext from jedi.inference.value import ModuleValue
from jedi.inference.cache import infer_state_function_cache from jedi.inference.cache import infer_state_function_cache
from jedi.inference.compiled.getattr_static import getattr_static from jedi.inference.compiled.getattr_static import getattr_static
from jedi.inference.compiled.access import compiled_objects_cache, \ from jedi.inference.compiled.access import compiled_objects_cache, \
@@ -25,7 +25,7 @@ from jedi.inference.gradual.conversion import to_stub
_sentinel = object() _sentinel = object()
class MixedObject(ContextWrapper): class MixedObject(ValueWrapper):
""" """
A ``MixedObject`` is used in two ways: A ``MixedObject`` is used in two ways:
@@ -104,10 +104,10 @@ class MixedName(compiled.CompiledName):
assert len(access_paths) assert len(access_paths)
values = [None] values = [None]
for access in access_paths: for access in access_paths:
values = ContextSet.from_sets( values = ValueSet.from_sets(
_create(self._infer_state, access, parent_value=c) _create(self._infer_state, access, parent_value=c)
if c is None or isinstance(c, MixedObject) if c is None or isinstance(c, MixedObject)
else ContextSet({create_cached_compiled_object(c.infer_state, access, c)}) else ValueSet({create_cached_compiled_object(c.infer_state, access, c)})
for c in values for c in values
) )
return values return values
@@ -244,11 +244,11 @@ def _create(infer_state, access_handle, parent_value, *args):
if result is None: if result is None:
# TODO Care about generics from stuff like `[1]` and don't return like this. # TODO Care about generics from stuff like `[1]` and don't return like this.
if type(python_object) in (dict, list, tuple): if type(python_object) in (dict, list, tuple):
return ContextSet({compiled_object}) return ValueSet({compiled_object})
tree_values = to_stub(compiled_object) tree_values = to_stub(compiled_object)
if not tree_values: if not tree_values:
return ContextSet({compiled_object}) return ValueSet({compiled_object})
else: else:
module_node, tree_node, file_io, code_lines = result module_node, tree_node, file_io, code_lines = result
@@ -256,7 +256,7 @@ def _create(infer_state, access_handle, parent_value, *args):
# TODO this __name__ is probably wrong. # TODO this __name__ is probably wrong.
name = compiled_object.get_root_value().py__name__() name = compiled_object.get_root_value().py__name__()
string_names = tuple(name.split('.')) string_names = tuple(name.split('.'))
module_value = ModuleContext( module_value = ModuleValue(
infer_state, module_node, infer_state, module_node,
file_io=file_io, file_io=file_io,
string_names=string_names, string_names=string_names,
@@ -264,16 +264,16 @@ def _create(infer_state, access_handle, parent_value, *args):
is_package=hasattr(compiled_object, 'py__path__'), is_package=hasattr(compiled_object, 'py__path__'),
) )
if name is not None: if name is not None:
infer_state.module_cache.add(string_names, ContextSet([module_value])) infer_state.module_cache.add(string_names, ValueSet([module_value]))
else: else:
if parent_value.tree_node.get_root_node() != module_node: if parent_value.tree_node.get_root_node() != module_node:
# This happens e.g. when __module__ is wrong, or when using # This happens e.g. when __module__ is wrong, or when using
# TypeVar('foo'), where Jedi uses 'foo' as the name and # TypeVar('foo'), where Jedi uses 'foo' as the name and
# Python's TypeVar('foo').__module__ will be typing. # Python's TypeVar('foo').__module__ will be typing.
return ContextSet({compiled_object}) return ValueSet({compiled_object})
module_value = parent_value.get_root_value() module_value = parent_value.get_root_value()
tree_values = ContextSet({ tree_values = ValueSet({
module_value.create_value( module_value.create_value(
tree_node, tree_node,
node_is_value=True, node_is_value=True,
@@ -285,7 +285,7 @@ def _create(infer_state, access_handle, parent_value, *args):
# Is an instance, not a class. # Is an instance, not a class.
tree_values = tree_values.execute_with_values() tree_values = tree_values.execute_with_values()
return ContextSet( return ValueSet(
MixedObject(compiled_object, tree_value=tree_value) MixedObject(compiled_object, tree_value=tree_value)
for tree_value in tree_values for tree_value in tree_values
) )

View File

@@ -9,10 +9,10 @@ from jedi.inference.utils import to_list
from jedi._compatibility import force_unicode, Parameter, cast_path from jedi._compatibility import force_unicode, Parameter, cast_path
from jedi.cache import underscore_memoization, memoize_method from jedi.cache import underscore_memoization, memoize_method
from jedi.inference.filters import AbstractFilter from jedi.inference.filters import AbstractFilter
from jedi.inference.names import AbstractNameDefinition, ContextNameMixin, \ from jedi.inference.names import AbstractNameDefinition, ValueNameMixin, \
ParamNameInterface ParamNameInterface
from jedi.inference.base_value import Context, ContextSet, NO_VALUES from jedi.inference.base_value import Value, ValueSet, NO_VALUES
from jedi.inference.lazy_value import LazyKnownContext from jedi.inference.lazy_value import LazyKnownValue
from jedi.inference.compiled.access import _sentinel from jedi.inference.compiled.access import _sentinel
from jedi.inference.cache import infer_state_function_cache from jedi.inference.cache import infer_state_function_cache
from jedi.inference.helpers import reraise_getitem_errors from jedi.inference.helpers import reraise_getitem_errors
@@ -40,7 +40,7 @@ class CheckAttribute(object):
return partial(self.func, instance) return partial(self.func, instance)
class CompiledObject(Context): class CompiledObject(Value):
def __init__(self, infer_state, access_handle, parent_value=None): def __init__(self, infer_state, access_handle, parent_value=None):
super(CompiledObject, self).__init__(infer_state, parent_value) super(CompiledObject, self).__init__(infer_state, parent_value)
self.access_handle = access_handle self.access_handle = access_handle
@@ -58,11 +58,11 @@ class CompiledObject(Context):
else: else:
if self.access_handle.is_class(): if self.access_handle.is_class():
from jedi.inference.value import CompiledInstance from jedi.inference.value import CompiledInstance
return ContextSet([ return ValueSet([
CompiledInstance(self.infer_state, self.parent_value, self, arguments) CompiledInstance(self.infer_state, self.parent_value, self, arguments)
]) ])
else: else:
return ContextSet(self._execute_function(arguments)) return ValueSet(self._execute_function(arguments))
@CheckAttribute() @CheckAttribute()
def py__class__(self): def py__class__(self):
@@ -187,7 +187,7 @@ class CompiledObject(Context):
if access is None: if access is None:
return NO_VALUES return NO_VALUES
return ContextSet([create_from_access_path(self.infer_state, access)]) return ValueSet([create_from_access_path(self.infer_state, access)])
def py__getitem__(self, index_value_set, valueualized_node): def py__getitem__(self, index_value_set, valueualized_node):
all_access_paths = self.access_handle.py__getitem__all_values() all_access_paths = self.access_handle.py__getitem__all_values()
@@ -195,7 +195,7 @@ class CompiledObject(Context):
# This means basically that no __getitem__ has been defined on this # This means basically that no __getitem__ has been defined on this
# object. # object.
return super(CompiledObject, self).py__getitem__(index_value_set, valueualized_node) return super(CompiledObject, self).py__getitem__(index_value_set, valueualized_node)
return ContextSet( return ValueSet(
create_from_access_path(self.infer_state, access) create_from_access_path(self.infer_state, access)
for access in all_access_paths for access in all_access_paths
) )
@@ -215,7 +215,7 @@ class CompiledObject(Context):
return return
for access in access_path_list: for access in access_path_list:
yield LazyKnownContext(create_from_access_path(self.infer_state, access)) yield LazyKnownValue(create_from_access_path(self.infer_state, access))
def py__name__(self): def py__name__(self):
return self.access_handle.py__name__() return self.access_handle.py__name__()
@@ -225,7 +225,7 @@ class CompiledObject(Context):
name = self.py__name__() name = self.py__name__()
if name is None: if name is None:
name = self.access_handle.get_repr() name = self.access_handle.get_repr()
return CompiledContextName(self, name) return CompiledValueName(self, name)
def _execute_function(self, params): def _execute_function(self, params):
from jedi.inference import docstrings from jedi.inference import docstrings
@@ -295,7 +295,7 @@ class CompiledName(AbstractNameDefinition):
@underscore_memoization @underscore_memoization
def infer(self): def infer(self):
return ContextSet([_create_from_name( return ValueSet([_create_from_name(
self._infer_state, self.parent_value, self.string_name self._infer_state, self.parent_value, self.string_name
)]) )])
@@ -325,7 +325,7 @@ class SignatureParamName(ParamNameInterface, AbstractNameDefinition):
infer_state = self.parent_value.infer_state infer_state = self.parent_value.infer_state
values = NO_VALUES values = NO_VALUES
if p.has_default: if p.has_default:
values = ContextSet([create_from_access_path(infer_state, p.default)]) values = ValueSet([create_from_access_path(infer_state, p.default)])
if p.has_annotation: if p.has_annotation:
annotation = create_from_access_path(infer_state, p.annotation) annotation = create_from_access_path(infer_state, p.annotation)
values |= annotation.execute_with_values() values |= annotation.execute_with_values()
@@ -351,7 +351,7 @@ class UnresolvableParamName(ParamNameInterface, AbstractNameDefinition):
return NO_VALUES return NO_VALUES
class CompiledContextName(ContextNameMixin, AbstractNameDefinition): class CompiledValueName(ValueNameMixin, AbstractNameDefinition):
def __init__(self, value, name): def __init__(self, value, name):
self.string_name = name self.string_name = name
self._value = value self._value = value

View File

@@ -25,9 +25,9 @@ from jedi._compatibility import u
from jedi import debug from jedi import debug
from jedi.inference.utils import indent_block from jedi.inference.utils import indent_block
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import infer_state_method_cache
from jedi.inference.base_value import iterator_to_value_set, ContextSet, \ from jedi.inference.base_value import iterator_to_value_set, ValueSet, \
NO_VALUES NO_VALUES
from jedi.inference.lazy_value import LazyKnownContexts from jedi.inference.lazy_value import LazyKnownValues
DOCSTRING_PARAM_PATTERNS = [ DOCSTRING_PARAM_PATTERNS = [
@@ -221,8 +221,8 @@ def _infer_for_statement_string(module_value, string):
if stmt.type not in ('name', 'atom', 'atom_expr'): if stmt.type not in ('name', 'atom', 'atom_expr'):
return [] return []
from jedi.inference.value import FunctionContext from jedi.inference.value import FunctionValue
function_value = FunctionContext( function_value = FunctionValue(
module_value.infer_state, module_value.infer_state,
module_value, module_value,
funcdef funcdef
@@ -242,7 +242,7 @@ def _execute_types_in_stmt(module_value, stmt):
contain is executed. (Used as type information). contain is executed. (Used as type information).
""" """
definitions = module_value.infer_node(stmt) definitions = module_value.infer_node(stmt)
return ContextSet.from_sets( return ValueSet.from_sets(
_execute_array_values(module_value.infer_state, d) _execute_array_values(module_value.infer_state, d)
for d in definitions for d in definitions
) )
@@ -253,15 +253,15 @@ def _execute_array_values(infer_state, array):
Tuples indicate that there's not just one return value, but the listed Tuples indicate that there's not just one return value, but the listed
ones. `(str, int)` means that it returns a tuple with both types. ones. `(str, int)` means that it returns a tuple with both types.
""" """
from jedi.inference.value.iterable import SequenceLiteralContext, FakeSequence from jedi.inference.value.iterable import SequenceLiteralValue, FakeSequence
if isinstance(array, SequenceLiteralContext): if isinstance(array, SequenceLiteralValue):
values = [] values = []
for lazy_value in array.py__iter__(): for lazy_value in array.py__iter__():
objects = ContextSet.from_sets( objects = ValueSet.from_sets(
_execute_array_values(infer_state, typ) _execute_array_values(infer_state, typ)
for typ in lazy_value.infer() for typ in lazy_value.infer()
) )
values.append(LazyKnownContexts(objects)) values.append(LazyKnownValues(objects))
return {FakeSequence(infer_state, array.array_type, values)} return {FakeSequence(infer_state, array.array_type, values)}
else: else:
return array.execute_annotation() return array.execute_annotation()
@@ -270,10 +270,10 @@ def _execute_array_values(infer_state, array):
@infer_state_method_cache() @infer_state_method_cache()
def infer_param(execution_value, param): def infer_param(execution_value, param):
from jedi.inference.value.instance import InstanceArguments from jedi.inference.value.instance import InstanceArguments
from jedi.inference.value import FunctionExecutionContext from jedi.inference.value import FunctionExecutionValue
def infer_docstring(docstring): def infer_docstring(docstring):
return ContextSet( return ValueSet(
p p
for param_str in _search_param_in_docstr(docstring, param.name.value) for param_str in _search_param_in_docstr(docstring, param.name.value)
for p in _infer_for_statement_string(module_value, param_str) for p in _infer_for_statement_string(module_value, param_str)
@@ -284,7 +284,7 @@ def infer_param(execution_value, param):
return NO_VALUES return NO_VALUES
types = infer_docstring(execution_value.py__doc__()) types = infer_docstring(execution_value.py__doc__())
if isinstance(execution_value, FunctionExecutionContext) \ if isinstance(execution_value, FunctionExecutionValue) \
and isinstance(execution_value.var_args, InstanceArguments) \ and isinstance(execution_value.var_args, InstanceArguments) \
and execution_value.function_value.py__name__() == '__init__': and execution_value.function_value.py__name__() == '__init__':
class_value = execution_value.var_args.instance.class_value class_value = execution_value.var_args.instance.class_value

View File

@@ -26,8 +26,8 @@ from jedi.inference.param import create_default_params
from jedi.inference.helpers import is_stdlib_path from jedi.inference.helpers import is_stdlib_path
from jedi.inference.utils import to_list from jedi.inference.utils import to_list
from jedi.parser_utils import get_parent_scope from jedi.parser_utils import get_parent_scope
from jedi.inference.value import ModuleContext, instance from jedi.inference.value import ModuleValue, instance
from jedi.inference.base_value import ContextSet, NO_VALUES from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.inference import recursion from jedi.inference import recursion
@@ -49,7 +49,7 @@ class DynamicExecutedParams(object):
# anonymous functions can create an anonymous parameter that is # anonymous functions can create an anonymous parameter that is
# more or less self referencing. # more or less self referencing.
if allowed: if allowed:
return ContextSet.from_sets(p.infer() for p in self._executed_params) return ValueSet.from_sets(p.infer() for p in self._executed_params)
return NO_VALUES return NO_VALUES
@@ -130,7 +130,7 @@ def _search_function_executions(infer_state, module_value, funcdef, string_name)
i = 0 i = 0
for for_mod_value in imports.get_modules_containing_name( for for_mod_value in imports.get_modules_containing_name(
infer_state, [module_value], string_name): infer_state, [module_value], string_name):
if not isinstance(module_value, ModuleContext): if not isinstance(module_value, ModuleValue):
return return
for name, trailer in _get_possible_nodes(for_mod_value, string_name): for name, trailer in _get_possible_nodes(for_mod_value, string_name):
i += 1 i += 1
@@ -179,7 +179,7 @@ def _get_possible_nodes(module_value, func_string_name):
def _check_name_for_execution(infer_state, value, compare_node, name, trailer): def _check_name_for_execution(infer_state, value, compare_node, name, trailer):
from jedi.inference.value.function import FunctionExecutionContext from jedi.inference.value.function import FunctionExecutionValue
def create_func_excs(): def create_func_excs():
arglist = trailer.children[1] arglist = trailer.children[1]
@@ -203,7 +203,7 @@ def _check_name_for_execution(infer_state, value, compare_node, name, trailer):
if compare_node == value_node: if compare_node == value_node:
for func_execution in create_func_excs(): for func_execution in create_func_excs():
yield func_execution yield func_execution
elif isinstance(v.parent_value, FunctionExecutionContext) and \ elif isinstance(v.parent_value, FunctionExecutionValue) and \
compare_node.type == 'funcdef': compare_node.type == 'funcdef':
# Here we're trying to find decorators by checking the first # Here we're trying to find decorators by checking the first
# parameter. It's not very generic though. Should find a better # parameter. It's not very generic though. Should find a better

View File

@@ -9,8 +9,8 @@ from parso.tree import search_ancestor
from jedi._compatibility import use_metaclass from jedi._compatibility import use_metaclass
from jedi.inference import flow_analysis from jedi.inference import flow_analysis
from jedi.inference.base_value import ContextSet, Context, ContextWrapper, \ from jedi.inference.base_value import ValueSet, Value, ValueWrapper, \
LazyContextWrapper LazyValueWrapper
from jedi.parser_utils import get_cached_parent_scope from jedi.parser_utils import get_cached_parent_scope
from jedi.inference.utils import to_list from jedi.inference.utils import to_list
from jedi.inference.names import TreeNameDefinition, ParamName, AbstractNameDefinition from jedi.inference.names import TreeNameDefinition, ParamName, AbstractNameDefinition
@@ -231,7 +231,7 @@ class MergedFilter(object):
return '%s(%s)' % (self.__class__.__name__, ', '.join(str(f) for f in self._filters)) return '%s(%s)' % (self.__class__.__name__, ', '.join(str(f) for f in self._filters))
class _BuiltinMappedMethod(Context): class _BuiltinMappedMethod(Value):
"""``Generator.__next__`` ``dict.values`` methods and so on.""" """``Generator.__next__`` ``dict.values`` methods and so on."""
api_type = u'function' api_type = u'function'
@@ -281,7 +281,7 @@ class SpecialMethodFilter(DictFilter):
else: else:
continue continue
break break
return ContextSet([ return ValueSet([
_BuiltinMappedMethod(self.parent_value, self._callable, builtin_func) _BuiltinMappedMethod(self.parent_value, self._callable, builtin_func)
]) ])
@@ -328,13 +328,13 @@ class _AttributeOverwriteMixin(object):
class LazyAttributeOverwrite(use_metaclass(_OverwriteMeta, _AttributeOverwriteMixin, class LazyAttributeOverwrite(use_metaclass(_OverwriteMeta, _AttributeOverwriteMixin,
LazyContextWrapper)): LazyValueWrapper)):
def __init__(self, infer_state): def __init__(self, infer_state):
self.infer_state = infer_state self.infer_state = infer_state
class AttributeOverwrite(use_metaclass(_OverwriteMeta, _AttributeOverwriteMixin, class AttributeOverwrite(use_metaclass(_OverwriteMeta, _AttributeOverwriteMixin,
ContextWrapper)): ValueWrapper)):
pass pass
@@ -394,7 +394,7 @@ def get_global_filters(infer_state, value, until_position, origin_scope):
>>> list(filters[3].values()) # doctest: +ELLIPSIS >>> list(filters[3].values()) # doctest: +ELLIPSIS
[...] [...]
""" """
from jedi.inference.value.function import FunctionExecutionContext from jedi.inference.value.function import FunctionExecutionValue
while value is not None: while value is not None:
# Names in methods cannot be resolved within the class. # Names in methods cannot be resolved within the class.
for filter in value.get_filters( for filter in value.get_filters(
@@ -402,7 +402,7 @@ def get_global_filters(infer_state, value, until_position, origin_scope):
until_position=until_position, until_position=until_position,
origin_scope=origin_scope): origin_scope=origin_scope):
yield filter yield filter
if isinstance(value, FunctionExecutionContext): if isinstance(value, FunctionExecutionValue):
# The position should be reset if the current scope is a function. # The position should be reset if the current scope is a function.
until_position = None until_position = None

View File

@@ -27,7 +27,7 @@ from jedi.inference import helpers
from jedi.inference.value import iterable from jedi.inference.value import iterable
from jedi.inference.filters import get_global_filters from jedi.inference.filters import get_global_filters
from jedi.inference.names import TreeNameDefinition from jedi.inference.names import TreeNameDefinition
from jedi.inference.base_value import ContextSet, NO_VALUES from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.parser_utils import is_scope, get_parent_scope from jedi.parser_utils import is_scope, get_parent_scope
from jedi.inference.gradual.conversion import convert_values from jedi.inference.gradual.conversion import convert_values
@@ -123,7 +123,7 @@ class NameFinder(object):
yield f yield f
# This covers the case where a stub files are incomplete. # This covers the case where a stub files are incomplete.
if self._value.is_stub(): if self._value.is_stub():
for c in convert_values(ContextSet({self._value})): for c in convert_values(ValueSet({self._value})):
for f in c.get_filters(): for f in c.get_filters():
yield f yield f
@@ -187,7 +187,7 @@ class NameFinder(object):
return inst.execute_function_slots(names, name) return inst.execute_function_slots(names, name)
def _names_to_types(self, names, attribute_lookup): def _names_to_types(self, names, attribute_lookup):
values = ContextSet.from_sets(name.infer() for name in names) values = ValueSet.from_sets(name.infer() for name in names)
debug.dbg('finder._names_to_types: %s -> %s', names, values) debug.dbg('finder._names_to_types: %s -> %s', names, values)
if not names and self._value.is_instance() and not self._value.is_compiled(): if not names and self._value.is_instance() and not self._value.is_compiled():

View File

@@ -11,7 +11,7 @@ from parso import ParserSyntaxError, parse
from jedi._compatibility import force_unicode from jedi._compatibility import force_unicode
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import infer_state_method_cache
from jedi.inference.base_value import ContextSet, NO_VALUES from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.inference.gradual.typing import TypeVar, LazyGenericClass, \ from jedi.inference.gradual.typing import TypeVar, LazyGenericClass, \
AbstractAnnotatedClass AbstractAnnotatedClass
from jedi.inference.gradual.typing import GenericClass from jedi.inference.gradual.typing import GenericClass
@@ -112,15 +112,15 @@ def infer_param(execution_value, param):
infer_state = execution_value.infer_state infer_state = execution_value.infer_state
if param.star_count == 1: if param.star_count == 1:
tuple_ = builtin_from_name(infer_state, 'tuple') tuple_ = builtin_from_name(infer_state, 'tuple')
return ContextSet([GenericClass( return ValueSet([GenericClass(
tuple_, tuple_,
generics=(values,), generics=(values,),
) for c in values]) ) for c in values])
elif param.star_count == 2: elif param.star_count == 2:
dct = builtin_from_name(infer_state, 'dict') dct = builtin_from_name(infer_state, 'dict')
return ContextSet([GenericClass( return ValueSet([GenericClass(
dct, dct,
generics=(ContextSet([builtin_from_name(infer_state, 'str')]), values), generics=(ValueSet([builtin_from_name(infer_state, 'str')]), values),
) for c in values]) ) for c in values])
pass pass
return values return values
@@ -224,9 +224,9 @@ def infer_return_types(function_execution_value):
type_var_dict = infer_type_vars_for_execution(function_execution_value, all_annotations) type_var_dict = infer_type_vars_for_execution(function_execution_value, all_annotations)
return ContextSet.from_sets( return ValueSet.from_sets(
ann.define_generics(type_var_dict) ann.define_generics(type_var_dict)
if isinstance(ann, (AbstractAnnotatedClass, TypeVar)) else ContextSet({ann}) if isinstance(ann, (AbstractAnnotatedClass, TypeVar)) else ValueSet({ann})
for ann in annotation_values for ann in annotation_values
).execute_annotation() ).execute_annotation()

View File

@@ -1,14 +1,14 @@
from jedi import debug from jedi import debug
from jedi.inference.base_value import ContextSet, \ from jedi.inference.base_value import ValueSet, \
NO_VALUES NO_VALUES
from jedi.inference.utils import to_list from jedi.inference.utils import to_list
from jedi.inference.gradual.stub_value import StubModuleContext from jedi.inference.gradual.stub_value import StubModuleValue
def _stub_to_python_value_set(stub_value, ignore_compiled=False): def _stub_to_python_value_set(stub_value, ignore_compiled=False):
stub_module = stub_value.get_root_value() stub_module = stub_value.get_root_value()
if not stub_module.is_stub(): if not stub_module.is_stub():
return ContextSet([stub_value]) return ValueSet([stub_value])
was_instance = stub_value.is_instance() was_instance = stub_value.is_instance()
if was_instance: if was_instance:
@@ -27,7 +27,7 @@ def _stub_to_python_value_set(stub_value, ignore_compiled=False):
values = _infer_from_stub(stub_module, qualified_names, ignore_compiled) values = _infer_from_stub(stub_module, qualified_names, ignore_compiled)
if was_instance: if was_instance:
values = ContextSet.from_sets( values = ValueSet.from_sets(
c.execute_with_values() c.execute_with_values()
for c in values for c in values
if c.is_class() if c.is_class()
@@ -41,7 +41,7 @@ def _stub_to_python_value_set(stub_value, ignore_compiled=False):
def _infer_from_stub(stub_module, qualified_names, ignore_compiled): def _infer_from_stub(stub_module, qualified_names, ignore_compiled):
from jedi.inference.compiled.mixed import MixedObject from jedi.inference.compiled.mixed import MixedObject
assert isinstance(stub_module, (StubModuleContext, MixedObject)), stub_module assert isinstance(stub_module, (StubModuleValue, MixedObject)), stub_module
non_stubs = stub_module.non_stub_value_set non_stubs = stub_module.non_stub_value_set
if ignore_compiled: if ignore_compiled:
non_stubs = non_stubs.filter(lambda c: not c.is_compiled()) non_stubs = non_stubs.filter(lambda c: not c.is_compiled())
@@ -89,7 +89,7 @@ def _load_stub_module(module):
return _try_to_load_stub_cached( return _try_to_load_stub_cached(
module.infer_state, module.infer_state,
import_names=module.string_names, import_names=module.string_names,
python_value_set=ContextSet([module]), python_value_set=ValueSet([module]),
parent_module_value=None, parent_module_value=None,
sys_path=module.infer_state.get_sys_path(), sys_path=module.infer_state.get_sys_path(),
) )
@@ -116,7 +116,7 @@ def _python_to_stub_names(names, fallback_to_python=False):
if name_list is not None: if name_list is not None:
stub_module = _load_stub_module(module) stub_module = _load_stub_module(module)
if stub_module is not None: if stub_module is not None:
stubs = ContextSet({stub_module}) stubs = ValueSet({stub_module})
for name in name_list[:-1]: for name in name_list[:-1]:
stubs = stubs.py__getattribute__(name) stubs = stubs.py__getattribute__(name)
if stubs and name_list: if stubs and name_list:
@@ -148,15 +148,15 @@ def convert_values(values, only_stubs=False, prefer_stubs=False, ignore_compiled
assert not (only_stubs and prefer_stubs) assert not (only_stubs and prefer_stubs)
with debug.increase_indent_cm('convert values'): with debug.increase_indent_cm('convert values'):
if only_stubs or prefer_stubs: if only_stubs or prefer_stubs:
return ContextSet.from_sets( return ValueSet.from_sets(
to_stub(value) to_stub(value)
or (ContextSet({value}) if prefer_stubs else NO_VALUES) or (ValueSet({value}) if prefer_stubs else NO_VALUES)
for value in values for value in values
) )
else: else:
return ContextSet.from_sets( return ValueSet.from_sets(
_stub_to_python_value_set(stub_value, ignore_compiled=ignore_compiled) _stub_to_python_value_set(stub_value, ignore_compiled=ignore_compiled)
or ContextSet({stub_value}) or ValueSet({stub_value})
for stub_value in values for stub_value in values
) )
@@ -164,7 +164,7 @@ def convert_values(values, only_stubs=False, prefer_stubs=False, ignore_compiled
# TODO merge with _python_to_stub_names? # TODO merge with _python_to_stub_names?
def to_stub(value): def to_stub(value):
if value.is_stub(): if value.is_stub():
return ContextSet([value]) return ValueSet([value])
was_instance = value.is_instance() was_instance = value.is_instance()
if was_instance: if was_instance:
@@ -182,12 +182,12 @@ def to_stub(value):
qualified_names = qualified_names[:-1] qualified_names = qualified_names[:-1]
was_instance = True was_instance = True
stub_values = ContextSet([stub_module]) stub_values = ValueSet([stub_module])
for name in qualified_names: for name in qualified_names:
stub_values = stub_values.py__getattribute__(name) stub_values = stub_values.py__getattribute__(name)
if was_instance: if was_instance:
stub_values = ContextSet.from_sets( stub_values = ValueSet.from_sets(
c.execute_with_values() c.execute_with_values()
for c in stub_values for c in stub_values
if c.is_class() if c.is_class()

View File

@@ -1,13 +1,13 @@
from jedi.inference.base_value import ContextWrapper from jedi.inference.base_value import ValueWrapper
from jedi.inference.value.module import ModuleContext from jedi.inference.value.module import ModuleValue
from jedi.inference.filters import ParserTreeFilter, \ from jedi.inference.filters import ParserTreeFilter, \
TreeNameDefinition TreeNameDefinition
from jedi.inference.gradual.typing import TypingModuleFilterWrapper from jedi.inference.gradual.typing import TypingModuleFilterWrapper
class StubModuleContext(ModuleContext): class StubModuleValue(ModuleValue):
def __init__(self, non_stub_value_set, *args, **kwargs): def __init__(self, non_stub_value_set, *args, **kwargs):
super(StubModuleContext, self).__init__(*args, **kwargs) super(StubModuleValue, self).__init__(*args, **kwargs)
self.non_stub_value_set = non_stub_value_set self.non_stub_value_set = non_stub_value_set
def is_stub(self): def is_stub(self):
@@ -27,7 +27,7 @@ class StubModuleContext(ModuleContext):
pass pass
else: else:
names.update(method()) names.update(method())
names.update(super(StubModuleContext, self).sub_modules_dict()) names.update(super(StubModuleValue, self).sub_modules_dict())
return names return names
def _get_first_non_stub_filters(self): def _get_first_non_stub_filters(self):
@@ -44,7 +44,7 @@ class StubModuleContext(ModuleContext):
def get_filters(self, search_global=False, until_position=None, def get_filters(self, search_global=False, until_position=None,
origin_scope=None, **kwargs): origin_scope=None, **kwargs):
filters = super(StubModuleContext, self).get_filters( filters = super(StubModuleValue, self).get_filters(
search_global, until_position, origin_scope, **kwargs search_global, until_position, origin_scope, **kwargs
) )
next(filters) # Ignore the first filter and replace it with our own next(filters) # Ignore the first filter and replace it with our own
@@ -60,7 +60,7 @@ class StubModuleContext(ModuleContext):
yield f yield f
class TypingModuleWrapper(StubModuleContext): class TypingModuleWrapper(StubModuleValue):
def get_filters(self, *args, **kwargs): def get_filters(self, *args, **kwargs):
filters = super(TypingModuleWrapper, self).get_filters(*args, **kwargs) filters = super(TypingModuleWrapper, self).get_filters(*args, **kwargs)
yield TypingModuleFilterWrapper(next(filters)) yield TypingModuleFilterWrapper(next(filters))
@@ -101,5 +101,5 @@ class StubFilter(ParserTreeFilter):
return True return True
class VersionInfo(ContextWrapper): class VersionInfo(ValueWrapper):
pass pass

View File

@@ -5,8 +5,8 @@ from functools import wraps
from jedi.file_io import FileIO from jedi.file_io import FileIO
from jedi._compatibility import FileNotFoundError, cast_path from jedi._compatibility import FileNotFoundError, cast_path
from jedi.parser_utils import get_cached_code_lines from jedi.parser_utils import get_cached_code_lines
from jedi.inference.base_value import ContextSet, NO_VALUES from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.inference.gradual.stub_value import TypingModuleWrapper, StubModuleContext from jedi.inference.gradual.stub_value import TypingModuleWrapper, StubModuleValue
_jedi_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) _jedi_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
TYPESHED_PATH = os.path.join(_jedi_path, 'third_party', 'typeshed') TYPESHED_PATH = os.path.join(_jedi_path, 'third_party', 'typeshed')
@@ -107,7 +107,7 @@ def import_module_decorator(func):
python_parent, = infer_state.import_module(('os',), prefer_stubs=False) python_parent, = infer_state.import_module(('os',), prefer_stubs=False)
python_value_set = python_parent.py__getattribute__('path') python_value_set = python_parent.py__getattribute__('path')
else: else:
python_value_set = ContextSet.from_sets( python_value_set = ValueSet.from_sets(
func(infer_state, import_names, p, sys_path,) func(infer_state, import_names, p, sys_path,)
for p in parent_module_values for p in parent_module_values
) )
@@ -119,7 +119,7 @@ def import_module_decorator(func):
stub = _try_to_load_stub_cached(infer_state, import_names, python_value_set, stub = _try_to_load_stub_cached(infer_state, import_names, python_value_set,
parent_module_value, sys_path) parent_module_value, sys_path)
if stub is not None: if stub is not None:
return ContextSet([stub]) return ValueSet([stub])
return python_value_set return python_value_set
return wrapper return wrapper
@@ -235,7 +235,7 @@ def _load_from_typeshed(infer_state, python_value_set, parent_module_value, impo
if len(import_names) == 1: if len(import_names) == 1:
map_ = _cache_stub_file_map(infer_state.grammar.version_info) map_ = _cache_stub_file_map(infer_state.grammar.version_info)
import_name = _IMPORT_MAP.get(import_name, import_name) import_name = _IMPORT_MAP.get(import_name, import_name)
elif isinstance(parent_module_value, StubModuleContext): elif isinstance(parent_module_value, StubModuleValue):
if not parent_module_value.is_package: if not parent_module_value.is_package:
# Only if it's a package (= a folder) something can be # Only if it's a package (= a folder) something can be
# imported. # imported.
@@ -275,7 +275,7 @@ def create_stub_module(infer_state, python_value_set, stub_module_node, file_io,
if import_names == ('typing',): if import_names == ('typing',):
module_cls = TypingModuleWrapper module_cls = TypingModuleWrapper
else: else:
module_cls = StubModuleContext module_cls = StubModuleValue
file_name = os.path.basename(file_io.path) file_name = os.path.basename(file_io.path)
stub_module_value = module_cls( stub_module_value = module_cls(
python_value_set, infer_state, stub_module_node, python_value_set, infer_state, stub_module_node,

View File

@@ -9,15 +9,15 @@ from jedi._compatibility import unicode, force_unicode
from jedi import debug from jedi import debug
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import infer_state_method_cache
from jedi.inference.compiled import builtin_from_name from jedi.inference.compiled import builtin_from_name
from jedi.inference.base_value import ContextSet, NO_VALUES, Context, \ from jedi.inference.base_value import ValueSet, NO_VALUES, Value, \
iterator_to_value_set, ContextWrapper, LazyContextWrapper iterator_to_value_set, ValueWrapper, LazyValueWrapper
from jedi.inference.lazy_value import LazyKnownContexts from jedi.inference.lazy_value import LazyKnownValues
from jedi.inference.value.iterable import SequenceLiteralContext from jedi.inference.value.iterable import SequenceLiteralValue
from jedi.inference.arguments import repack_with_argument_clinic from jedi.inference.arguments import repack_with_argument_clinic
from jedi.inference.utils import to_list from jedi.inference.utils import to_list
from jedi.inference.filters import FilterWrapper from jedi.inference.filters import FilterWrapper
from jedi.inference.names import NameWrapper, AbstractTreeName, \ from jedi.inference.names import NameWrapper, AbstractTreeName, \
AbstractNameDefinition, ContextName AbstractNameDefinition, ValueName
from jedi.inference.helpers import is_string from jedi.inference.helpers import is_string
from jedi.inference.value.klass import ClassMixin, ClassFilter from jedi.inference.value.klass import ClassMixin, ClassFilter
@@ -41,12 +41,12 @@ class TypingName(AbstractTreeName):
self._value = value self._value = value
def infer(self): def infer(self):
return ContextSet([self._value]) return ValueSet([self._value])
class _BaseTypingContext(Context): class _BaseTypingValue(Value):
def __init__(self, infer_state, parent_value, tree_name): def __init__(self, infer_state, parent_value, tree_name):
super(_BaseTypingContext, self).__init__(infer_state, parent_value) super(_BaseTypingValue, self).__init__(infer_state, parent_value)
self._tree_name = tree_name self._tree_name = tree_name
@property @property
@@ -75,7 +75,7 @@ class _BaseTypingContext(Context):
@property @property
def name(self): def name(self):
return ContextName(self, self._tree_name) return ValueName(self, self._tree_name)
def __repr__(self): def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self._tree_name.value) return '%s(%s)' % (self.__class__.__name__, self._tree_name.value)
@@ -83,7 +83,7 @@ class _BaseTypingContext(Context):
class TypingModuleName(NameWrapper): class TypingModuleName(NameWrapper):
def infer(self): def infer(self):
return ContextSet(self._remap()) return ValueSet(self._remap())
def _remap(self): def _remap(self):
name = self.string_name name = self.string_name
@@ -97,9 +97,9 @@ class TypingModuleName(NameWrapper):
return return
if name in _PROXY_CLASS_TYPES: if name in _PROXY_CLASS_TYPES:
yield TypingClassContext.create_cached(infer_state, self.parent_value, self.tree_name) yield TypingClassValue.create_cached(infer_state, self.parent_value, self.tree_name)
elif name in _PROXY_TYPES: elif name in _PROXY_TYPES:
yield TypingContext.create_cached(infer_state, self.parent_value, self.tree_name) yield TypingValue.create_cached(infer_state, self.parent_value, self.tree_name)
elif name == 'runtime': elif name == 'runtime':
# We don't want anything here, not sure what this function is # We don't want anything here, not sure what this function is
# supposed to do, since it just appears in the stubs and shouldn't # supposed to do, since it just appears in the stubs and shouldn't
@@ -138,7 +138,7 @@ class TypingModuleFilterWrapper(FilterWrapper):
name_wrapper_class = TypingModuleName name_wrapper_class = TypingModuleName
class _WithIndexBase(_BaseTypingContext): class _WithIndexBase(_BaseTypingValue):
def __init__(self, infer_state, parent_value, name, index_value, value_of_index): def __init__(self, infer_state, parent_value, name, index_value, value_of_index):
super(_WithIndexBase, self).__init__(infer_state, parent_value, name) super(_WithIndexBase, self).__init__(infer_state, parent_value, name)
self._index_value = index_value self._index_value = index_value
@@ -152,28 +152,28 @@ class _WithIndexBase(_BaseTypingContext):
) )
class TypingContextWithIndex(_WithIndexBase): class TypingValueWithIndex(_WithIndexBase):
def execute_annotation(self): def execute_annotation(self):
string_name = self._tree_name.value string_name = self._tree_name.value
if string_name == 'Union': if string_name == 'Union':
# This is kind of a special case, because we have Unions (in Jedi # This is kind of a special case, because we have Unions (in Jedi
# ContextSets). # ValueSets).
return self.gather_annotation_classes().execute_annotation() return self.gather_annotation_classes().execute_annotation()
elif string_name == 'Optional': elif string_name == 'Optional':
# Optional is basically just saying it's either None or the actual # Optional is basically just saying it's either None or the actual
# type. # type.
return self.gather_annotation_classes().execute_annotation() \ return self.gather_annotation_classes().execute_annotation() \
| ContextSet([builtin_from_name(self.infer_state, u'None')]) | ValueSet([builtin_from_name(self.infer_state, u'None')])
elif string_name == 'Type': elif string_name == 'Type':
# The type is actually already given in the index_value # The type is actually already given in the index_value
return ContextSet([self._index_value]) return ValueSet([self._index_value])
elif string_name == 'ClassVar': elif string_name == 'ClassVar':
# For now don't do anything here, ClassVars are always used. # For now don't do anything here, ClassVars are always used.
return self._index_value.execute_annotation() return self._index_value.execute_annotation()
cls = globals()[string_name] cls = globals()[string_name]
return ContextSet([cls( return ValueSet([cls(
self.infer_state, self.infer_state,
self.parent_value, self.parent_value,
self._tree_name, self._tree_name,
@@ -182,17 +182,17 @@ class TypingContextWithIndex(_WithIndexBase):
)]) )])
def gather_annotation_classes(self): def gather_annotation_classes(self):
return ContextSet.from_sets( return ValueSet.from_sets(
_iter_over_arguments(self._index_value, self._value_of_index) _iter_over_arguments(self._index_value, self._value_of_index)
) )
class TypingContext(_BaseTypingContext): class TypingValue(_BaseTypingValue):
index_class = TypingContextWithIndex index_class = TypingValueWithIndex
py__simple_getitem__ = None py__simple_getitem__ = None
def py__getitem__(self, index_value_set, valueualized_node): def py__getitem__(self, index_value_set, valueualized_node):
return ContextSet( return ValueSet(
self.index_class.create_cached( self.index_class.create_cached(
self.infer_state, self.infer_state,
self.parent_value, self.parent_value,
@@ -205,7 +205,7 @@ class TypingContext(_BaseTypingContext):
class _TypingClassMixin(object): class _TypingClassMixin(object):
def py__bases__(self): def py__bases__(self):
return [LazyKnownContexts( return [LazyKnownValues(
self.infer_state.builtins_module.py__getattribute__('object') self.infer_state.builtins_module.py__getattribute__('object')
)] )]
@@ -213,21 +213,21 @@ class _TypingClassMixin(object):
return [] return []
class TypingClassContextWithIndex(_TypingClassMixin, TypingContextWithIndex, ClassMixin): class TypingClassValueWithIndex(_TypingClassMixin, TypingValueWithIndex, ClassMixin):
pass pass
class TypingClassContext(_TypingClassMixin, TypingContext, ClassMixin): class TypingClassValue(_TypingClassMixin, TypingValue, ClassMixin):
index_class = TypingClassContextWithIndex index_class = TypingClassValueWithIndex
def _iter_over_arguments(maybe_tuple_value, defining_value): def _iter_over_arguments(maybe_tuple_value, defining_value):
def iterate(): def iterate():
if isinstance(maybe_tuple_value, SequenceLiteralContext): if isinstance(maybe_tuple_value, SequenceLiteralValue):
for lazy_value in maybe_tuple_value.py__iter__(valueualized_node=None): for lazy_value in maybe_tuple_value.py__iter__(valueualized_node=None):
yield lazy_value.infer() yield lazy_value.infer()
else: else:
yield ContextSet([maybe_tuple_value]) yield ValueSet([maybe_tuple_value])
def resolve_forward_references(value_set): def resolve_forward_references(value_set):
for value in value_set: for value in value_set:
@@ -241,10 +241,10 @@ def _iter_over_arguments(maybe_tuple_value, defining_value):
yield value yield value
for value_set in iterate(): for value_set in iterate():
yield ContextSet(resolve_forward_references(value_set)) yield ValueSet(resolve_forward_references(value_set))
class TypeAlias(LazyContextWrapper): class TypeAlias(LazyValueWrapper):
def __init__(self, parent_value, origin_tree_name, actual): def __init__(self, parent_value, origin_tree_name, actual):
self.infer_state = parent_value.infer_state self.infer_state = parent_value.infer_state
self.parent_value = parent_value self.parent_value = parent_value
@@ -253,7 +253,7 @@ class TypeAlias(LazyContextWrapper):
@property @property
def name(self): def name(self):
return ContextName(self, self._origin_tree_name) return ValueName(self, self._origin_tree_name)
def py__name__(self): def py__name__(self):
return self.name.string_name return self.name.string_name
@@ -299,7 +299,7 @@ class Tuple(_ContainerBase):
def _is_homogenous(self): def _is_homogenous(self):
# To specify a variable-length tuple of homogeneous type, Tuple[T, ...] # To specify a variable-length tuple of homogeneous type, Tuple[T, ...]
# is used. # is used.
if isinstance(self._index_value, SequenceLiteralContext): if isinstance(self._index_value, SequenceLiteralValue):
entries = self._index_value.get_tree_entries() entries = self._index_value.get_tree_entries()
if len(entries) == 2 and entries[1] == '...': if len(entries) == 2 and entries[1] == '...':
return True return True
@@ -317,17 +317,17 @@ class Tuple(_ContainerBase):
def py__iter__(self, valueualized_node=None): def py__iter__(self, valueualized_node=None):
if self._is_homogenous(): if self._is_homogenous():
yield LazyKnownContexts(self._get_getitem_values(0).execute_annotation()) yield LazyKnownValues(self._get_getitem_values(0).execute_annotation())
else: else:
if isinstance(self._index_value, SequenceLiteralContext): if isinstance(self._index_value, SequenceLiteralValue):
for i in range(self._index_value.py__len__()): for i in range(self._index_value.py__len__()):
yield LazyKnownContexts(self._get_getitem_values(i).execute_annotation()) yield LazyKnownValues(self._get_getitem_values(i).execute_annotation())
def py__getitem__(self, index_value_set, valueualized_node): def py__getitem__(self, index_value_set, valueualized_node):
if self._is_homogenous(): if self._is_homogenous():
return self._get_getitem_values(0).execute_annotation() return self._get_getitem_values(0).execute_annotation()
return ContextSet.from_sets( return ValueSet.from_sets(
_iter_over_arguments(self._index_value, self._value_of_index) _iter_over_arguments(self._index_value, self._value_of_index)
).execute_annotation() ).execute_annotation()
@@ -340,13 +340,13 @@ class Protocol(_ContainerBase):
pass pass
class Any(_BaseTypingContext): class Any(_BaseTypingValue):
def execute_annotation(self): def execute_annotation(self):
debug.warning('Used Any - returned no results') debug.warning('Used Any - returned no results')
return NO_VALUES return NO_VALUES
class TypeVarClass(_BaseTypingContext): class TypeVarClass(_BaseTypingValue):
def py__call__(self, arguments): def py__call__(self, arguments):
unpacked = arguments.unpack() unpacked = arguments.unpack()
@@ -357,7 +357,7 @@ class TypeVarClass(_BaseTypingContext):
debug.warning('Found a variable without a name %s', arguments) debug.warning('Found a variable without a name %s', arguments)
return NO_VALUES return NO_VALUES
return ContextSet([TypeVar.create_cached( return ValueSet([TypeVar.create_cached(
self.infer_state, self.infer_state,
self.parent_value, self.parent_value,
self._tree_name, self._tree_name,
@@ -390,7 +390,7 @@ class TypeVarClass(_BaseTypingContext):
return None return None
class TypeVar(_BaseTypingContext): class TypeVar(_BaseTypingValue):
def __init__(self, infer_state, parent_value, tree_name, var_name, unpacked_args): def __init__(self, infer_state, parent_value, tree_name, var_name, unpacked_args):
super(TypeVar, self).__init__(infer_state, parent_value, tree_name) super(TypeVar, self).__init__(infer_state, parent_value, tree_name)
self._var_name = var_name self._var_name = var_name
@@ -432,7 +432,7 @@ class TypeVar(_BaseTypingContext):
@property @property
def constraints(self): def constraints(self):
return ContextSet.from_sets( return ValueSet.from_sets(
lazy.infer() for lazy in self._constraints_lazy_values lazy.infer() for lazy in self._constraints_lazy_values
) )
@@ -444,7 +444,7 @@ class TypeVar(_BaseTypingContext):
else: else:
if found: if found:
return found return found
return self._get_classes() or ContextSet({self}) return self._get_classes() or ValueSet({self})
def execute_annotation(self): def execute_annotation(self):
return self._get_classes().execute_annotation() return self._get_classes().execute_annotation()
@@ -453,21 +453,21 @@ class TypeVar(_BaseTypingContext):
return '<%s: %s>' % (self.__class__.__name__, self.py__name__()) return '<%s: %s>' % (self.__class__.__name__, self.py__name__())
class OverloadFunction(_BaseTypingContext): class OverloadFunction(_BaseTypingValue):
@repack_with_argument_clinic('func, /') @repack_with_argument_clinic('func, /')
def py__call__(self, func_value_set): def py__call__(self, func_value_set):
# Just pass arguments through. # Just pass arguments through.
return func_value_set return func_value_set
class NewTypeFunction(_BaseTypingContext): class NewTypeFunction(_BaseTypingValue):
def py__call__(self, arguments): def py__call__(self, arguments):
ordered_args = arguments.unpack() ordered_args = arguments.unpack()
next(ordered_args, (None, None)) next(ordered_args, (None, None))
_, second_arg = next(ordered_args, (None, None)) _, second_arg = next(ordered_args, (None, None))
if second_arg is None: if second_arg is None:
return NO_VALUES return NO_VALUES
return ContextSet( return ValueSet(
NewType( NewType(
self.infer_state, self.infer_state,
valueualized_node.value, valueualized_node.value,
@@ -476,7 +476,7 @@ class NewTypeFunction(_BaseTypingContext):
) for valueualized_node in arguments.get_calling_nodes()) ) for valueualized_node in arguments.get_calling_nodes())
class NewType(Context): class NewType(Value):
def __init__(self, infer_state, parent_value, tree_node, type_value_set): def __init__(self, infer_state, parent_value, tree_node, type_value_set):
super(NewType, self).__init__(infer_state, parent_value) super(NewType, self).__init__(infer_state, parent_value)
self._type_value_set = type_value_set self._type_value_set = type_value_set
@@ -486,7 +486,7 @@ class NewType(Context):
return self._type_value_set.execute_annotation() return self._type_value_set.execute_annotation()
class CastFunction(_BaseTypingContext): class CastFunction(_BaseTypingValue):
@repack_with_argument_clinic('type, object, /') @repack_with_argument_clinic('type, object, /')
def py__call__(self, type_value_set, object_value_set): def py__call__(self, type_value_set, object_value_set):
return type_value_set.execute_annotation() return type_value_set.execute_annotation()
@@ -510,7 +510,7 @@ class BoundTypeVarName(AbstractNameDefinition):
yield constraint yield constraint
else: else:
yield value yield value
return ContextSet(iter_()) return ValueSet(iter_())
def py__name__(self): def py__name__(self):
return self._type_var.py__name__() return self._type_var.py__name__()
@@ -549,7 +549,7 @@ class TypeVarFilter(object):
return [] return []
class AbstractAnnotatedClass(ClassMixin, ContextWrapper): class AbstractAnnotatedClass(ClassMixin, ValueWrapper):
def get_type_var_filter(self): def get_type_var_filter(self):
return TypeVarFilter(self.get_generics(), self.list_type_vars()) return TypeVarFilter(self.get_generics(), self.list_type_vars())
@@ -593,7 +593,7 @@ class AbstractAnnotatedClass(ClassMixin, ContextWrapper):
def py__call__(self, arguments): def py__call__(self, arguments):
instance, = super(AbstractAnnotatedClass, self).py__call__(arguments) instance, = super(AbstractAnnotatedClass, self).py__call__(arguments)
return ContextSet([InstanceWrapper(instance)]) return ValueSet([InstanceWrapper(instance)])
def get_generics(self): def get_generics(self):
raise NotImplementedError raise NotImplementedError
@@ -607,19 +607,19 @@ class AbstractAnnotatedClass(ClassMixin, ContextWrapper):
if isinstance(generic, (AbstractAnnotatedClass, TypeVar)): if isinstance(generic, (AbstractAnnotatedClass, TypeVar)):
result = generic.define_generics(type_var_dict) result = generic.define_generics(type_var_dict)
values |= result values |= result
if result != ContextSet({generic}): if result != ValueSet({generic}):
changed = True changed = True
else: else:
values |= ContextSet([generic]) values |= ValueSet([generic])
new_generics.append(values) new_generics.append(values)
if not changed: if not changed:
# There might not be any type vars that change. In that case just # There might not be any type vars that change. In that case just
# return itself, because it does not make sense to potentially lose # return itself, because it does not make sense to potentially lose
# cached results. # cached results.
return ContextSet([self]) return ValueSet([self])
return ContextSet([GenericClass( return ValueSet([GenericClass(
self._wrapped_value, self._wrapped_value,
generics=tuple(new_generics) generics=tuple(new_generics)
)]) )])
@@ -682,18 +682,18 @@ class LazyAnnotatedBaseClass(object):
for type_var in type_var_set: for type_var in type_var_set:
if isinstance(type_var, TypeVar): if isinstance(type_var, TypeVar):
names = filter.get(type_var.py__name__()) names = filter.get(type_var.py__name__())
new |= ContextSet.from_sets( new |= ValueSet.from_sets(
name.infer() for name in names name.infer() for name in names
) )
else: else:
# Mostly will be type vars, except if in some cases # Mostly will be type vars, except if in some cases
# a concrete type will already be there. In that # a concrete type will already be there. In that
# case just add it to the value set. # case just add it to the value set.
new |= ContextSet([type_var]) new |= ValueSet([type_var])
yield new yield new
class InstanceWrapper(ContextWrapper): class InstanceWrapper(ValueWrapper):
def py__stop_iteration_returns(self): def py__stop_iteration_returns(self):
for cls in self._wrapped_value.class_value.py__mro__(): for cls in self._wrapped_value.class_value.py__mro__():
if cls.py__name__() == 'Generator': if cls.py__name__() == 'Generator':
@@ -703,5 +703,5 @@ class InstanceWrapper(ContextWrapper):
except IndexError: except IndexError:
pass pass
elif cls.py__name__() == 'Iterator': elif cls.py__name__() == 'Iterator':
return ContextSet([builtin_from_name(self.infer_state, u'None')]) return ValueSet([builtin_from_name(self.infer_state, u'None')])
return self._wrapped_value.py__stop_iteration_returns() return self._wrapped_value.py__stop_iteration_returns()

View File

@@ -30,7 +30,7 @@ from jedi.inference import analysis
from jedi.inference.utils import unite from jedi.inference.utils import unite
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import infer_state_method_cache
from jedi.inference.names import ImportName, SubModuleName from jedi.inference.names import ImportName, SubModuleName
from jedi.inference.base_value import ContextSet, NO_VALUES from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.inference.gradual.typeshed import import_module_decorator from jedi.inference.gradual.typeshed import import_module_decorator
from jedi.inference.value.module import iter_module_names from jedi.inference.value.module import iter_module_names
from jedi.plugins import plugin_manager from jedi.plugins import plugin_manager
@@ -97,7 +97,7 @@ def infer_import(value, tree_name, is_goto=False):
for t in types for t in types
) )
if not is_goto: if not is_goto:
types = ContextSet(types) types = ValueSet(types)
if not types: if not types:
path = import_path + [from_import_name] path = import_path + [from_import_name]
@@ -289,7 +289,7 @@ class Importer(object):
value_set = [None] value_set = [None]
for i, name in enumerate(self.import_path): for i, name in enumerate(self.import_path):
value_set = ContextSet.from_sets([ value_set = ValueSet.from_sets([
self._infer_state.import_module( self._infer_state.import_module(
import_names[:i+1], import_names[:i+1],
parent_module_value, parent_module_value,
@@ -382,7 +382,7 @@ def import_module(infer_state, import_names, parent_module_value, sys_path):
module = _load_builtin_module(infer_state, import_names, sys_path) module = _load_builtin_module(infer_state, import_names, sys_path)
if module is None: if module is None:
return NO_VALUES return NO_VALUES
return ContextSet([module]) return ValueSet([module])
module_name = '.'.join(import_names) module_name = '.'.join(import_names)
if parent_module_value is None: if parent_module_value is None:
@@ -421,8 +421,8 @@ def import_module(infer_state, import_names, parent_module_value, sys_path):
return NO_VALUES return NO_VALUES
if isinstance(file_io_or_ns, ImplicitNSInfo): if isinstance(file_io_or_ns, ImplicitNSInfo):
from jedi.inference.value.namespace import ImplicitNamespaceContext from jedi.inference.value.namespace import ImplicitNamespaceValue
module = ImplicitNamespaceContext( module = ImplicitNamespaceValue(
infer_state, infer_state,
fullname=file_io_or_ns.name, fullname=file_io_or_ns.name,
paths=file_io_or_ns.paths, paths=file_io_or_ns.paths,
@@ -442,7 +442,7 @@ def import_module(infer_state, import_names, parent_module_value, sys_path):
debug.dbg('global search_module %s: %s', import_names[-1], module) debug.dbg('global search_module %s: %s', import_names[-1], module)
else: else:
debug.dbg('search_module %s in paths %s: %s', module_name, paths, module) debug.dbg('search_module %s in paths %s: %s', module_name, paths, module)
return ContextSet([module]) return ValueSet([module])
def _load_python_module(infer_state, file_io, sys_path=None, def _load_python_module(infer_state, file_io, sys_path=None,
@@ -459,8 +459,8 @@ def _load_python_module(infer_state, file_io, sys_path=None,
cache_path=settings.cache_directory cache_path=settings.cache_directory
) )
from jedi.inference.value import ModuleContext from jedi.inference.value import ModuleValue
return ModuleContext( return ModuleValue(
infer_state, module_node, infer_state, module_node,
file_io=file_io, file_io=file_io,
string_names=import_names, string_names=import_names,
@@ -508,7 +508,7 @@ def _load_module_from_path(infer_state, file_io, base_names):
import_names=import_names, import_names=import_names,
is_package=is_package, is_package=is_package,
) )
infer_state.module_cache.add(import_names, ContextSet([module])) infer_state.module_cache.add(import_names, ValueSet([module]))
return module return module

View File

@@ -1,8 +1,8 @@
from jedi.inference.base_value import ContextSet, NO_VALUES from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.common.utils import monkeypatch from jedi.common.utils import monkeypatch
class AbstractLazyContext(object): class AbstractLazyValue(object):
def __init__(self, data): def __init__(self, data):
self.data = data self.data = data
@@ -13,29 +13,29 @@ class AbstractLazyContext(object):
raise NotImplementedError raise NotImplementedError
class LazyKnownContext(AbstractLazyContext): class LazyKnownValue(AbstractLazyValue):
"""data is a value.""" """data is a value."""
def infer(self): def infer(self):
return ContextSet([self.data]) return ValueSet([self.data])
class LazyKnownContexts(AbstractLazyContext): class LazyKnownValues(AbstractLazyValue):
"""data is a ContextSet.""" """data is a ValueSet."""
def infer(self): def infer(self):
return self.data return self.data
class LazyUnknownContext(AbstractLazyContext): class LazyUnknownValue(AbstractLazyValue):
def __init__(self): def __init__(self):
super(LazyUnknownContext, self).__init__(None) super(LazyUnknownValue, self).__init__(None)
def infer(self): def infer(self):
return NO_VALUES return NO_VALUES
class LazyTreeContext(AbstractLazyContext): class LazyTreeValue(AbstractLazyValue):
def __init__(self, value, node): def __init__(self, value, node):
super(LazyTreeContext, self).__init__(node) super(LazyTreeValue, self).__init__(node)
self.value = value self.value = value
# We need to save the predefined names. It's an unfortunate side effect # We need to save the predefined names. It's an unfortunate side effect
# that needs to be tracked otherwise results will be wrong. # that needs to be tracked otherwise results will be wrong.
@@ -48,12 +48,12 @@ class LazyTreeContext(AbstractLazyContext):
def get_merged_lazy_value(lazy_values): def get_merged_lazy_value(lazy_values):
if len(lazy_values) > 1: if len(lazy_values) > 1:
return MergedLazyContexts(lazy_values) return MergedLazyValues(lazy_values)
else: else:
return lazy_values[0] return lazy_values[0]
class MergedLazyContexts(AbstractLazyContext): class MergedLazyValues(AbstractLazyValue):
"""data is a list of lazy values.""" """data is a list of lazy values."""
def infer(self): def infer(self):
return ContextSet.from_sets(l.infer() for l in self.data) return ValueSet.from_sets(l.infer() for l in self.data)

View File

@@ -3,7 +3,7 @@ from abc import abstractmethod
from parso.tree import search_ancestor from parso.tree import search_ancestor
from jedi._compatibility import Parameter from jedi._compatibility import Parameter
from jedi.inference.base_value import ContextSet, NO_VALUES from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.cache import memoize_method from jedi.cache import memoize_method
@@ -118,9 +118,9 @@ class AbstractTreeName(AbstractNameDefinition):
return self.tree_name.start_pos return self.tree_name.start_pos
class ContextNameMixin(object): class ValueNameMixin(object):
def infer(self): def infer(self):
return ContextSet([self._value]) return ValueSet([self._value])
def _get_qualified_names(self): def _get_qualified_names(self):
return self._value.get_qualified_names() return self._value.get_qualified_names()
@@ -128,20 +128,20 @@ class ContextNameMixin(object):
def get_root_value(self): def get_root_value(self):
if self.parent_value is None: # A module if self.parent_value is None: # A module
return self._value return self._value
return super(ContextNameMixin, self).get_root_value() return super(ValueNameMixin, self).get_root_value()
@property @property
def api_type(self): def api_type(self):
return self._value.api_type return self._value.api_type
class ContextName(ContextNameMixin, AbstractTreeName): class ValueName(ValueNameMixin, AbstractTreeName):
def __init__(self, value, tree_name): def __init__(self, value, tree_name):
super(ContextName, self).__init__(value.parent_value, tree_name) super(ValueName, self).__init__(value.parent_value, tree_name)
self._value = value self._value = value
def goto(self): def goto(self):
return ContextSet([self._value.name]) return ValueSet([self._value.name])
class TreeNameDefinition(AbstractTreeName): class TreeNameDefinition(AbstractTreeName):

View File

@@ -3,14 +3,14 @@ from collections import defaultdict
from jedi import debug from jedi import debug
from jedi.inference.utils import PushBackIterator from jedi.inference.utils import PushBackIterator
from jedi.inference import analysis from jedi.inference import analysis
from jedi.inference.lazy_value import LazyKnownContext, \ from jedi.inference.lazy_value import LazyKnownValue, \
LazyTreeContext, LazyUnknownContext LazyTreeValue, LazyUnknownValue
from jedi.inference import docstrings from jedi.inference import docstrings
from jedi.inference.value import iterable from jedi.inference.value import iterable
def _add_argument_issue(error_name, lazy_value, message): def _add_argument_issue(error_name, lazy_value, message):
if isinstance(lazy_value, LazyTreeContext): if isinstance(lazy_value, LazyTreeValue):
node = lazy_value.data node = lazy_value.data
if node.parent.type == 'argument': if node.parent.type == 'argument':
node = node.parent node = node.parent
@@ -146,20 +146,20 @@ def get_executed_params_and_issues(execution_value, arguments):
break break
lazy_value_list.append(argument) lazy_value_list.append(argument)
seq = iterable.FakeSequence(execution_value.infer_state, u'tuple', lazy_value_list) seq = iterable.FakeSequence(execution_value.infer_state, u'tuple', lazy_value_list)
result_arg = LazyKnownContext(seq) result_arg = LazyKnownValue(seq)
elif param.star_count == 2: elif param.star_count == 2:
if argument is not None: if argument is not None:
too_many_args(argument) too_many_args(argument)
# **kwargs param # **kwargs param
dct = iterable.FakeDict(execution_value.infer_state, dict(non_matching_keys)) dct = iterable.FakeDict(execution_value.infer_state, dict(non_matching_keys))
result_arg = LazyKnownContext(dct) result_arg = LazyKnownValue(dct)
non_matching_keys = {} non_matching_keys = {}
else: else:
# normal param # normal param
if argument is None: if argument is None:
# No value: Return an empty container # No value: Return an empty container
if param.default is None: if param.default is None:
result_arg = LazyUnknownContext() result_arg = LazyUnknownValue()
if not keys_only: if not keys_only:
for valueualized_node in arguments.get_calling_nodes(): for valueualized_node in arguments.get_calling_nodes():
m = _error_argument_count(funcdef, len(unpacked_va)) m = _error_argument_count(funcdef, len(unpacked_va))
@@ -172,7 +172,7 @@ def get_executed_params_and_issues(execution_value, arguments):
) )
) )
else: else:
result_arg = LazyTreeContext(default_param_value, param.default) result_arg = LazyTreeValue(default_param_value, param.default)
is_default = True is_default = True
else: else:
result_arg = argument result_arg = argument
@@ -181,7 +181,7 @@ def get_executed_params_and_issues(execution_value, arguments):
execution_value, param, result_arg, execution_value, param, result_arg,
is_default=is_default is_default=is_default
)) ))
if not isinstance(result_arg, LazyUnknownContext): if not isinstance(result_arg, LazyUnknownValue):
keys_used[param.name.value] = result_params[-1] keys_used[param.name.value] = result_params[-1]
if keys_only: if keys_only:
@@ -234,17 +234,17 @@ def _error_argument_count(funcdef, actual_count):
def _create_default_param(execution_value, param): def _create_default_param(execution_value, param):
if param.star_count == 1: if param.star_count == 1:
result_arg = LazyKnownContext( result_arg = LazyKnownValue(
iterable.FakeSequence(execution_value.infer_state, u'tuple', []) iterable.FakeSequence(execution_value.infer_state, u'tuple', [])
) )
elif param.star_count == 2: elif param.star_count == 2:
result_arg = LazyKnownContext( result_arg = LazyKnownValue(
iterable.FakeDict(execution_value.infer_state, {}) iterable.FakeDict(execution_value.infer_state, {})
) )
elif param.default is None: elif param.default is None:
result_arg = LazyUnknownContext() result_arg = LazyUnknownValue()
else: else:
result_arg = LazyTreeContext(execution_value.parent_value, param.default) result_arg = LazyTreeValue(execution_value.parent_value, param.default)
return ExecutedParam(execution_value, param, result_arg) return ExecutedParam(execution_value, param, result_arg)

View File

@@ -8,16 +8,16 @@ from parso.python import tree
from jedi._compatibility import force_unicode, unicode from jedi._compatibility import force_unicode, unicode
from jedi import debug from jedi import debug
from jedi import parser_utils from jedi import parser_utils
from jedi.inference.base_value import ContextSet, NO_VALUES, ContextualizedNode, \ from jedi.inference.base_value import ValueSet, NO_VALUES, ValueualizedNode, \
ContextualizedName, iterator_to_value_set, iterate_values ValueualizedName, iterator_to_value_set, iterate_values
from jedi.inference.lazy_value import LazyTreeContext from jedi.inference.lazy_value import LazyTreeValue
from jedi.inference import compiled from jedi.inference import compiled
from jedi.inference import recursion from jedi.inference import recursion
from jedi.inference import helpers from jedi.inference import helpers
from jedi.inference import analysis from jedi.inference import analysis
from jedi.inference import imports from jedi.inference import imports
from jedi.inference import arguments from jedi.inference import arguments
from jedi.inference.value import ClassContext, FunctionContext from jedi.inference.value import ClassValue, FunctionValue
from jedi.inference.value import iterable from jedi.inference.value import iterable
from jedi.inference.value import TreeInstance from jedi.inference.value import TreeInstance
from jedi.inference.finder import NameFinder from jedi.inference.finder import NameFinder
@@ -75,7 +75,7 @@ def infer_node(value, element):
if typ in ('name', 'number', 'string', 'atom', 'strings', 'keyword', 'fstring'): if typ in ('name', 'number', 'string', 'atom', 'strings', 'keyword', 'fstring'):
return infer_atom(value, element) return infer_atom(value, element)
elif typ == 'lambdef': elif typ == 'lambdef':
return ContextSet([FunctionContext.from_value(value, element)]) return ValueSet([FunctionValue.from_value(value, element)])
elif typ == 'expr_stmt': elif typ == 'expr_stmt':
return infer_expr_stmt(value, element) return infer_expr_stmt(value, element)
elif typ in ('power', 'atom_expr'): elif typ in ('power', 'atom_expr'):
@@ -105,7 +105,7 @@ def infer_node(value, element):
return value_set return value_set
elif typ in ('testlist_star_expr', 'testlist',): elif typ in ('testlist_star_expr', 'testlist',):
# The implicit tuple in statements. # The implicit tuple in statements.
return ContextSet([iterable.SequenceLiteralContext(infer_state, value, element)]) return ValueSet([iterable.SequenceLiteralValue(infer_state, value, element)])
elif typ in ('not_test', 'factor'): elif typ in ('not_test', 'factor'):
value_set = value.infer_node(element.children[-1]) value_set = value.infer_node(element.children[-1])
for operator in element.children[:-1]: for operator in element.children[:-1]:
@@ -122,7 +122,7 @@ def infer_node(value, element):
if element.value not in ('.', '...'): if element.value not in ('.', '...'):
origin = element.parent origin = element.parent
raise AssertionError("unhandled operator %s in %s " % (repr(element.value), origin)) raise AssertionError("unhandled operator %s in %s " % (repr(element.value), origin))
return ContextSet([compiled.builtin_from_name(infer_state, u'Ellipsis')]) return ValueSet([compiled.builtin_from_name(infer_state, u'Ellipsis')])
elif typ == 'dotted_name': elif typ == 'dotted_name':
value_set = infer_atom(value, element.children[0]) value_set = infer_atom(value, element.children[0])
for next_name in element.children[2::2]: for next_name in element.children[2::2]:
@@ -159,7 +159,7 @@ def infer_trailer(value, atom_values, trailer):
trailer_op, node, _ = trailer.children trailer_op, node, _ = trailer.children
return atom_values.get_item( return atom_values.get_item(
infer_subscript_list(value.infer_state, value, node), infer_subscript_list(value.infer_state, value, node),
ContextualizedNode(value, trailer) ValueualizedNode(value, trailer)
) )
else: else:
debug.dbg('infer_trailer: %s in %s', trailer, atom_values) debug.dbg('infer_trailer: %s in %s', trailer, atom_values)
@@ -183,7 +183,7 @@ def infer_atom(value, atom):
if atom.type == 'name': if atom.type == 'name':
if atom.value in ('True', 'False', 'None'): if atom.value in ('True', 'False', 'None'):
# Python 2... # Python 2...
return ContextSet([compiled.builtin_from_name(value.infer_state, atom.value)]) return ValueSet([compiled.builtin_from_name(value.infer_state, atom.value)])
# This is the first global lookup. # This is the first global lookup.
stmt = tree.search_ancestor( stmt = tree.search_ancestor(
@@ -207,7 +207,7 @@ def infer_atom(value, atom):
elif atom.type == 'keyword': elif atom.type == 'keyword':
# For False/True/None # For False/True/None
if atom.value in ('False', 'True', 'None'): if atom.value in ('False', 'True', 'None'):
return ContextSet([compiled.builtin_from_name(value.infer_state, atom.value)]) return ValueSet([compiled.builtin_from_name(value.infer_state, atom.value)])
elif atom.value == 'print': elif atom.value == 'print':
# print e.g. could be inferred like this in Python 2.7 # print e.g. could be inferred like this in Python 2.7
return NO_VALUES return NO_VALUES
@@ -219,7 +219,7 @@ def infer_atom(value, atom):
elif isinstance(atom, tree.Literal): elif isinstance(atom, tree.Literal):
string = value.infer_state.compiled_subprocess.safe_literal_eval(atom.value) string = value.infer_state.compiled_subprocess.safe_literal_eval(atom.value)
return ContextSet([compiled.create_simple_object(value.infer_state, string)]) return ValueSet([compiled.create_simple_object(value.infer_state, string)])
elif atom.type == 'strings': elif atom.type == 'strings':
# Will be multiple string. # Will be multiple string.
value_set = infer_atom(value, atom.children[0]) value_set = infer_atom(value, atom.children[0])
@@ -250,7 +250,7 @@ def infer_atom(value, atom):
pass pass
if comp_for.type in ('comp_for', 'sync_comp_for'): if comp_for.type in ('comp_for', 'sync_comp_for'):
return ContextSet([iterable.comprehension_from_atom( return ValueSet([iterable.comprehension_from_atom(
value.infer_state, value, atom value.infer_state, value, atom
)]) )])
@@ -262,10 +262,10 @@ def infer_atom(value, atom):
array_node_c = [] array_node_c = []
if c[0] == '{' and (array_node == '}' or ':' in array_node_c or if c[0] == '{' and (array_node == '}' or ':' in array_node_c or
'**' in array_node_c): '**' in array_node_c):
new_value = iterable.DictLiteralContext(value.infer_state, value, atom) new_value = iterable.DictLiteralValue(value.infer_state, value, atom)
else: else:
new_value = iterable.SequenceLiteralContext(value.infer_state, value, atom) new_value = iterable.SequenceLiteralValue(value.infer_state, value, atom)
return ContextSet([new_value]) return ValueSet([new_value])
@_limit_value_infers @_limit_value_infers
@@ -305,7 +305,7 @@ def _infer_expr_stmt(value, stmt, seek_name=None):
value_set = value.infer_node(rhs) value_set = value.infer_node(rhs)
if seek_name: if seek_name:
c_node = ContextualizedName(value, seek_name) c_node = ValueualizedName(value, seek_name)
value_set = check_tuple_assignments(value.infer_state, c_node, value_set) value_set = check_tuple_assignments(value.infer_state, c_node, value_set)
first_operator = next(stmt.yield_operators(), None) first_operator = next(stmt.yield_operators(), None)
@@ -324,7 +324,7 @@ def _infer_expr_stmt(value, stmt, seek_name=None):
# only in for loops without clutter, because they are # only in for loops without clutter, because they are
# predictable. Also only do it, if the variable is not a tuple. # predictable. Also only do it, if the variable is not a tuple.
node = for_stmt.get_testlist() node = for_stmt.get_testlist()
cn = ContextualizedNode(value, node) cn = ValueualizedNode(value, node)
ordered = list(cn.infer().iterate(cn)) ordered = list(cn.infer().iterate(cn))
for lazy_value in ordered: for lazy_value in ordered:
@@ -393,7 +393,7 @@ def _literals_to_types(infer_state, result):
cls = compiled.builtin_from_name(infer_state, typ.name.string_name) cls = compiled.builtin_from_name(infer_state, typ.name.string_name)
new_result |= cls.execute_with_values() new_result |= cls.execute_with_values()
else: else:
new_result |= ContextSet([typ]) new_result |= ValueSet([typ])
return new_result return new_result
@@ -409,7 +409,7 @@ def _infer_comparison(infer_state, value, left_values, operator, right_values):
if len(left_values) * len(right_values) > 6: if len(left_values) * len(right_values) > 6:
return _literals_to_types(infer_state, left_values | right_values) return _literals_to_types(infer_state, left_values | right_values)
else: else:
return ContextSet.from_sets( return ValueSet.from_sets(
_infer_comparison_part(infer_state, value, left, operator, right) _infer_comparison_part(infer_state, value, left, operator, right)
for left in left_values for left in left_values
for right in right_values for right in right_values
@@ -445,11 +445,11 @@ def _bool_to_value(infer_state, bool_):
def _get_tuple_ints(value): def _get_tuple_ints(value):
if not isinstance(value, iterable.SequenceLiteralContext): if not isinstance(value, iterable.SequenceLiteralValue):
return None return None
numbers = [] numbers = []
for lazy_value in value.py__iter__(): for lazy_value in value.py__iter__():
if not isinstance(lazy_value, LazyTreeContext): if not isinstance(lazy_value, LazyTreeValue):
return None return None
node = lazy_value.data node = lazy_value.data
if node.type != 'number': if node.type != 'number':
@@ -472,26 +472,26 @@ def _infer_comparison_part(infer_state, value, left, operator, right):
if str_operator == '*': if str_operator == '*':
# for iterables, ignore * operations # for iterables, ignore * operations
if isinstance(left, iterable.Sequence) or is_string(left): if isinstance(left, iterable.Sequence) or is_string(left):
return ContextSet([left]) return ValueSet([left])
elif isinstance(right, iterable.Sequence) or is_string(right): elif isinstance(right, iterable.Sequence) or is_string(right):
return ContextSet([right]) return ValueSet([right])
elif str_operator == '+': elif str_operator == '+':
if l_is_num and r_is_num or is_string(left) and is_string(right): if l_is_num and r_is_num or is_string(left) and is_string(right):
return ContextSet([left.execute_operation(right, str_operator)]) return ValueSet([left.execute_operation(right, str_operator)])
elif _is_tuple(left) and _is_tuple(right) or _is_list(left) and _is_list(right): elif _is_tuple(left) and _is_tuple(right) or _is_list(left) and _is_list(right):
return ContextSet([iterable.MergedArray(infer_state, (left, right))]) return ValueSet([iterable.MergedArray(infer_state, (left, right))])
elif str_operator == '-': elif str_operator == '-':
if l_is_num and r_is_num: if l_is_num and r_is_num:
return ContextSet([left.execute_operation(right, str_operator)]) return ValueSet([left.execute_operation(right, str_operator)])
elif str_operator == '%': elif str_operator == '%':
# With strings and numbers the left type typically remains. Except for # With strings and numbers the left type typically remains. Except for
# `int() % float()`. # `int() % float()`.
return ContextSet([left]) return ValueSet([left])
elif str_operator in COMPARISON_OPERATORS: elif str_operator in COMPARISON_OPERATORS:
if left.is_compiled() and right.is_compiled(): if left.is_compiled() and right.is_compiled():
# Possible, because the return is not an option. Just compare. # Possible, because the return is not an option. Just compare.
try: try:
return ContextSet([left.execute_operation(right, str_operator)]) return ValueSet([left.execute_operation(right, str_operator)])
except TypeError: except TypeError:
# Could be True or False. # Could be True or False.
pass pass
@@ -499,7 +499,7 @@ def _infer_comparison_part(infer_state, value, left, operator, right):
if str_operator in ('is', '!=', '==', 'is not'): if str_operator in ('is', '!=', '==', 'is not'):
operation = COMPARISON_OPERATORS[str_operator] operation = COMPARISON_OPERATORS[str_operator]
bool_ = operation(left, right) bool_ = operation(left, right)
return ContextSet([_bool_to_value(infer_state, bool_)]) return ValueSet([_bool_to_value(infer_state, bool_)])
if isinstance(left, VersionInfo): if isinstance(left, VersionInfo):
version_info = _get_tuple_ints(right) version_info = _get_tuple_ints(right)
@@ -508,9 +508,9 @@ def _infer_comparison_part(infer_state, value, left, operator, right):
infer_state.environment.version_info, infer_state.environment.version_info,
tuple(version_info) tuple(version_info)
) )
return ContextSet([_bool_to_value(infer_state, bool_result)]) return ValueSet([_bool_to_value(infer_state, bool_result)])
return ContextSet([_bool_to_value(infer_state, True), _bool_to_value(infer_state, False)]) return ValueSet([_bool_to_value(infer_state, True), _bool_to_value(infer_state, False)])
elif str_operator == 'in': elif str_operator == 'in':
return NO_VALUES return NO_VALUES
@@ -526,7 +526,7 @@ def _infer_comparison_part(infer_state, value, left, operator, right):
analysis.add(value, 'type-error-operation', operator, analysis.add(value, 'type-error-operation', operator,
message % (left, right)) message % (left, right))
result = ContextSet([left, right]) result = ValueSet([left, right])
debug.dbg('Used operator %s resulting in %s', operator, result) debug.dbg('Used operator %s resulting in %s', operator, result)
return result return result
@@ -595,13 +595,13 @@ def tree_name_to_values(infer_state, value, tree_name):
try: try:
types = value.predefined_names[node][tree_name.value] types = value.predefined_names[node][tree_name.value]
except KeyError: except KeyError:
cn = ContextualizedNode(value, node.children[3]) cn = ValueualizedNode(value, node.children[3])
for_types = iterate_values( for_types = iterate_values(
cn.infer(), cn.infer(),
valueualized_node=cn, valueualized_node=cn,
is_async=node.parent.type == 'async_stmt', is_async=node.parent.type == 'async_stmt',
) )
c_node = ContextualizedName(value, tree_name) c_node = ValueualizedName(value, tree_name)
types = check_tuple_assignments(infer_state, c_node, for_types) types = check_tuple_assignments(infer_state, c_node, for_types)
elif typ == 'expr_stmt': elif typ == 'expr_stmt':
types = _remove_statements(infer_state, value, node, tree_name) types = _remove_statements(infer_state, value, node, tree_name)
@@ -635,14 +635,14 @@ def _apply_decorators(value, node):
This is also the places where the decorators are processed. This is also the places where the decorators are processed.
""" """
if node.type == 'classdef': if node.type == 'classdef':
decoratee_value = ClassContext( decoratee_value = ClassValue(
value.infer_state, value.infer_state,
parent_value=value, parent_value=value,
tree_node=node tree_node=node
) )
else: else:
decoratee_value = FunctionContext.from_value(value, node) decoratee_value = FunctionValue.from_value(value, node)
initial = values = ContextSet([decoratee_value]) initial = values = ValueSet([decoratee_value])
for dec in reversed(node.get_decorators()): for dec in reversed(node.get_decorators()):
debug.dbg('decorator: %s %s', dec, values, color="MAGENTA") debug.dbg('decorator: %s %s', dec, values, color="MAGENTA")
with debug.increase_indent_cm(): with debug.increase_indent_cm():
@@ -670,7 +670,7 @@ def _apply_decorators(value, node):
debug.dbg('decorator end %s', values, color="MAGENTA") debug.dbg('decorator end %s', values, color="MAGENTA")
if values != initial: if values != initial:
return ContextSet([Decoratee(c, decoratee_value) for c in values]) return ValueSet([Decoratee(c, decoratee_value) for c in values])
return values return values
@@ -680,7 +680,7 @@ def check_tuple_assignments(infer_state, valueualized_name, value_set):
""" """
lazy_value = None lazy_value = None
for index, node in valueualized_name.assignment_indexes(): for index, node in valueualized_name.assignment_indexes():
cn = ContextualizedNode(valueualized_name.value, node) cn = ValueualizedNode(valueualized_name.value, node)
iterated = value_set.iterate(cn) iterated = value_set.iterate(cn)
if isinstance(index, slice): if isinstance(index, slice):
# For no star unpacking is not possible. # For no star unpacking is not possible.
@@ -704,7 +704,7 @@ def infer_subscript_list(infer_state, value, index):
""" """
if index == ':': if index == ':':
# Like array[:] # Like array[:]
return ContextSet([iterable.Slice(value, None, None, None)]) return ValueSet([iterable.Slice(value, None, None, None)])
elif index.type == 'subscript' and not index.children[0] == '.': elif index.type == 'subscript' and not index.children[0] == '.':
# subscript basically implies a slice operation, except for Python 2's # subscript basically implies a slice operation, except for Python 2's
@@ -722,9 +722,9 @@ def infer_subscript_list(infer_state, value, index):
result.append(el) result.append(el)
result += [None] * (3 - len(result)) result += [None] * (3 - len(result))
return ContextSet([iterable.Slice(value, *result)]) return ValueSet([iterable.Slice(value, *result)])
elif index.type == 'subscriptlist': elif index.type == 'subscriptlist':
return ContextSet([iterable.SequenceLiteralContext(infer_state, value, index)]) return ValueSet([iterable.SequenceLiteralValue(infer_state, value, index)])
# No slices # No slices
return value.infer_node(index) return value.infer_node(index)

View File

@@ -2,7 +2,7 @@ import os
from jedi._compatibility import unicode, force_unicode, all_suffixes from jedi._compatibility import unicode, force_unicode, all_suffixes
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import infer_state_method_cache
from jedi.inference.base_value import ContextualizedNode from jedi.inference.base_value import ValueualizedNode
from jedi.inference.helpers import is_string from jedi.inference.helpers import is_string
from jedi.common.utils import traverse_parents from jedi.common.utils import traverse_parents
from jedi.parser_utils import get_cached_code_lines from jedi.parser_utils import get_cached_code_lines
@@ -60,7 +60,7 @@ def _paths_from_assignment(module_value, expr_stmt):
except AssertionError: except AssertionError:
continue continue
cn = ContextualizedNode(module_value.create_value(expr_stmt), expr_stmt) cn = ValueualizedNode(module_value.create_value(expr_stmt), expr_stmt)
for lazy_value in cn.infer().iterate(cn): for lazy_value in cn.infer().iterate(cn):
for value in lazy_value.infer(): for value in lazy_value.infer():
if is_string(value): if is_string(value):
@@ -152,8 +152,8 @@ def _get_paths_from_buildout_script(infer_state, buildout_script_path):
debug.warning('Error trying to read buildout_script: %s', buildout_script_path) debug.warning('Error trying to read buildout_script: %s', buildout_script_path)
return return
from jedi.inference.value import ModuleContext from jedi.inference.value import ModuleValue
module = ModuleContext( module = ModuleValue(
infer_state, module_node, file_io, infer_state, module_node, file_io,
string_names=None, string_names=None,
code_lines=get_cached_code_lines(infer_state.grammar, buildout_script_path), code_lines=get_cached_code_lines(infer_state.grammar, buildout_script_path),

View File

@@ -100,7 +100,7 @@ class PushBackIterator(object):
@contextlib.contextmanager @contextlib.contextmanager
def ignored(*exceptions): def ignored(*exceptions):
""" """
Context manager that ignores all of the specified exceptions. This will Value manager that ignores all of the specified exceptions. This will
be in the standard library starting with Python 3.4. be in the standard library starting with Python 3.4.
""" """
try: try:

View File

@@ -1,6 +1,6 @@
from jedi.inference.value.module import ModuleContext from jedi.inference.value.module import ModuleValue
from jedi.inference.value.klass import ClassContext from jedi.inference.value.klass import ClassValue
from jedi.inference.value.function import FunctionContext, \ from jedi.inference.value.function import FunctionValue, \
MethodContext, FunctionExecutionContext MethodValue, FunctionExecutionValue
from jedi.inference.value.instance import AnonymousInstance, BoundMethod, \ from jedi.inference.value.instance import AnonymousInstance, BoundMethod, \
CompiledInstance, AbstractInstanceContext, TreeInstance CompiledInstance, AbstractInstanceValue, TreeInstance

View File

@@ -3,10 +3,10 @@ Decorators are not really values, however we need some wrappers to improve
docstrings and other things around decorators. docstrings and other things around decorators.
''' '''
from jedi.inference.base_value import ContextWrapper from jedi.inference.base_value import ValueWrapper
class Decoratee(ContextWrapper): class Decoratee(ValueWrapper):
def __init__(self, wrapped_value, original_value): def __init__(self, wrapped_value, original_value):
self._wrapped_value = wrapped_value self._wrapped_value = wrapped_value
self._original_value = original_value self._original_value = original_value

View File

@@ -11,11 +11,11 @@ from jedi.inference import helpers
from jedi.inference.signature import TreeSignature from jedi.inference.signature import TreeSignature
from jedi.inference.arguments import AnonymousArguments from jedi.inference.arguments import AnonymousArguments
from jedi.inference.filters import ParserTreeFilter, FunctionExecutionFilter from jedi.inference.filters import ParserTreeFilter, FunctionExecutionFilter
from jedi.inference.names import ContextName, AbstractNameDefinition, ParamName from jedi.inference.names import ValueName, AbstractNameDefinition, ParamName
from jedi.inference.base_value import ContextualizedNode, NO_VALUES, \ from jedi.inference.base_value import ValueualizedNode, NO_VALUES, \
ContextSet, TreeContext, ContextWrapper ValueSet, TreeValue, ValueWrapper
from jedi.inference.lazy_value import LazyKnownContexts, LazyKnownContext, \ from jedi.inference.lazy_value import LazyKnownValues, LazyKnownValue, \
LazyTreeContext LazyTreeValue
from jedi.inference.value import iterable from jedi.inference.value import iterable
from jedi import parser_utils from jedi import parser_utils
from jedi.inference.parser_cache import get_yield_exprs from jedi.inference.parser_cache import get_yield_exprs
@@ -35,10 +35,10 @@ class LambdaName(AbstractNameDefinition):
return self._lambda_value.tree_node.start_pos return self._lambda_value.tree_node.start_pos
def infer(self): def infer(self):
return ContextSet([self._lambda_value]) return ValueSet([self._lambda_value])
class FunctionAndClassBase(TreeContext): class FunctionAndClassBase(TreeValue):
def get_qualified_names(self): def get_qualified_names(self):
if self.parent_value.is_class(): if self.parent_value.is_class():
n = self.parent_value.get_qualified_names() n = self.parent_value.get_qualified_names()
@@ -73,8 +73,8 @@ class FunctionMixin(object):
from jedi.inference.value.instance import BoundMethod from jedi.inference.value.instance import BoundMethod
if instance is None: if instance is None:
# Calling the Foo.bar results in the original bar function. # Calling the Foo.bar results in the original bar function.
return ContextSet([self]) return ValueSet([self])
return ContextSet([BoundMethod(instance, self)]) return ValueSet([BoundMethod(instance, self)])
def get_param_names(self): def get_param_names(self):
function_execution = self.get_function_execution() function_execution = self.get_function_execution()
@@ -85,7 +85,7 @@ class FunctionMixin(object):
def name(self): def name(self):
if self.tree_node.type == 'lambdef': if self.tree_node.type == 'lambdef':
return LambdaName(self) return LambdaName(self)
return ContextName(self, self.tree_node.name) return ValueName(self, self.tree_node.name)
def py__name__(self): def py__name__(self):
return self.name.string_name return self.name.string_name
@@ -98,13 +98,13 @@ class FunctionMixin(object):
if arguments is None: if arguments is None:
arguments = AnonymousArguments() arguments = AnonymousArguments()
return FunctionExecutionContext(self.infer_state, self.parent_value, self, arguments) return FunctionExecutionValue(self.infer_state, self.parent_value, self, arguments)
def get_signatures(self): def get_signatures(self):
return [TreeSignature(f) for f in self.get_signature_functions()] return [TreeSignature(f) for f in self.get_signature_functions()]
class FunctionContext(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndClassBase)): class FunctionValue(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndClassBase)):
def is_function(self): def is_function(self):
return True return True
@@ -112,7 +112,7 @@ class FunctionContext(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndC
def from_value(cls, value, tree_node): def from_value(cls, value, tree_node):
def create(tree_node): def create(tree_node):
if value.is_class(): if value.is_class():
return MethodContext( return MethodValue(
value.infer_state, value.infer_state,
value, value,
parent_value=parent_value, parent_value=parent_value,
@@ -134,7 +134,7 @@ class FunctionContext(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndC
function = create(tree_node) function = create(tree_node)
if overloaded_funcs: if overloaded_funcs:
return OverloadedFunctionContext( return OverloadedFunctionValue(
function, function,
[create(f) for f in overloaded_funcs] [create(f) for f in overloaded_funcs]
) )
@@ -151,9 +151,9 @@ class FunctionContext(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndC
return [self] return [self]
class MethodContext(FunctionContext): class MethodValue(FunctionValue):
def __init__(self, infer_state, class_value, *args, **kwargs): def __init__(self, infer_state, class_value, *args, **kwargs):
super(MethodContext, self).__init__(infer_state, *args, **kwargs) super(MethodValue, self).__init__(infer_state, *args, **kwargs)
self.class_value = class_value self.class_value = class_value
def get_default_param_value(self): def get_default_param_value(self):
@@ -168,11 +168,11 @@ class MethodContext(FunctionContext):
return names + (self.py__name__(),) return names + (self.py__name__(),)
class FunctionExecutionContext(TreeContext): class FunctionExecutionValue(TreeValue):
function_execution_filter = FunctionExecutionFilter function_execution_filter = FunctionExecutionFilter
def __init__(self, infer_state, parent_value, function_value, var_args): def __init__(self, infer_state, parent_value, function_value, var_args):
super(FunctionExecutionContext, self).__init__( super(FunctionExecutionValue, self).__init__(
infer_state, infer_state,
parent_value, parent_value,
function_value.tree_node, function_value.tree_node,
@@ -206,7 +206,7 @@ class FunctionExecutionContext(TreeContext):
debug.dbg('Return unreachable: %s', r) debug.dbg('Return unreachable: %s', r)
else: else:
if check_yields: if check_yields:
value_set |= ContextSet.from_sets( value_set |= ValueSet.from_sets(
lazy_value.infer() lazy_value.infer()
for lazy_value in self._get_yield_lazy_value(r) for lazy_value in self._get_yield_lazy_value(r)
) )
@@ -215,7 +215,7 @@ class FunctionExecutionContext(TreeContext):
children = r.children children = r.children
except AttributeError: except AttributeError:
ctx = compiled.builtin_from_name(self.infer_state, u'None') ctx = compiled.builtin_from_name(self.infer_state, u'None')
value_set |= ContextSet([ctx]) value_set |= ValueSet([ctx])
else: else:
value_set |= self.infer_node(children[1]) value_set |= self.infer_node(children[1])
if check is flow_analysis.REACHABLE: if check is flow_analysis.REACHABLE:
@@ -227,16 +227,16 @@ class FunctionExecutionContext(TreeContext):
if yield_expr.type == 'keyword': if yield_expr.type == 'keyword':
# `yield` just yields None. # `yield` just yields None.
ctx = compiled.builtin_from_name(self.infer_state, u'None') ctx = compiled.builtin_from_name(self.infer_state, u'None')
yield LazyKnownContext(ctx) yield LazyKnownValue(ctx)
return return
node = yield_expr.children[1] node = yield_expr.children[1]
if node.type == 'yield_arg': # It must be a yield from. if node.type == 'yield_arg': # It must be a yield from.
cn = ContextualizedNode(self, node.children[1]) cn = ValueualizedNode(self, node.children[1])
for lazy_value in cn.infer().iterate(cn): for lazy_value in cn.infer().iterate(cn):
yield lazy_value yield lazy_value
else: else:
yield LazyTreeContext(self, node) yield LazyTreeValue(self, node)
@recursion.execution_recursion_decorator(default=iter([])) @recursion.execution_recursion_decorator(default=iter([]))
def get_yield_lazy_values(self, is_async=False): def get_yield_lazy_values(self, is_async=False):
@@ -265,7 +265,7 @@ class FunctionExecutionContext(TreeContext):
else: else:
types = self.get_return_values(check_yields=True) types = self.get_return_values(check_yields=True)
if types: if types:
yield LazyKnownContexts(types) yield LazyKnownValues(types)
return return
last_for_stmt = for_stmt last_for_stmt = for_stmt
@@ -277,7 +277,7 @@ class FunctionExecutionContext(TreeContext):
yield result yield result
else: else:
input_node = for_stmt.get_testlist() input_node = for_stmt.get_testlist()
cn = ContextualizedNode(self, input_node) cn = ValueualizedNode(self, input_node)
ordered = cn.infer().iterate(cn) ordered = cn.infer().iterate(cn)
ordered = list(ordered) ordered = list(ordered)
for lazy_value in ordered: for lazy_value in ordered:
@@ -288,7 +288,7 @@ class FunctionExecutionContext(TreeContext):
yield result yield result
def merge_yield_values(self, is_async=False): def merge_yield_values(self, is_async=False):
return ContextSet.from_sets( return ValueSet.from_sets(
lazy_value.infer() lazy_value.infer()
for lazy_value in self.get_yield_lazy_values() for lazy_value in self.get_yield_lazy_values()
) )
@@ -338,7 +338,7 @@ class FunctionExecutionContext(TreeContext):
yield_values = self.merge_yield_values(is_async=True) yield_values = self.merge_yield_values(is_async=True)
# The contravariant doesn't seem to be defined. # The contravariant doesn't seem to be defined.
generics = (yield_values.py__class__(), NO_VALUES) generics = (yield_values.py__class__(), NO_VALUES)
return ContextSet( return ValueSet(
# In Python 3.6 AsyncGenerator is still a class. # In Python 3.6 AsyncGenerator is still a class.
GenericClass(c, generics) GenericClass(c, generics)
for c in async_generator_classes for c in async_generator_classes
@@ -350,19 +350,19 @@ class FunctionExecutionContext(TreeContext):
return_values = self.get_return_values() return_values = self.get_return_values()
# Only the first generic is relevant. # Only the first generic is relevant.
generics = (return_values.py__class__(), NO_VALUES, NO_VALUES) generics = (return_values.py__class__(), NO_VALUES, NO_VALUES)
return ContextSet( return ValueSet(
GenericClass(c, generics) for c in async_classes GenericClass(c, generics) for c in async_classes
).execute_annotation() ).execute_annotation()
else: else:
if is_generator: if is_generator:
return ContextSet([iterable.Generator(infer_state, self)]) return ValueSet([iterable.Generator(infer_state, self)])
else: else:
return self.get_return_values() return self.get_return_values()
class OverloadedFunctionContext(FunctionMixin, ContextWrapper): class OverloadedFunctionValue(FunctionMixin, ValueWrapper):
def __init__(self, function, overloaded_functions): def __init__(self, function, overloaded_functions):
super(OverloadedFunctionContext, self).__init__(function) super(OverloadedFunctionValue, self).__init__(function)
self._overloaded_functions = overloaded_functions self._overloaded_functions = overloaded_functions
def py__call__(self, arguments): def py__call__(self, arguments):
@@ -383,7 +383,7 @@ class OverloadedFunctionContext(FunctionMixin, ContextWrapper):
if self.infer_state.is_analysis: if self.infer_state.is_analysis:
# In this case we want precision. # In this case we want precision.
return NO_VALUES return NO_VALUES
return ContextSet.from_sets(fe.infer() for fe in function_executions) return ValueSet.from_sets(fe.infer() for fe in function_executions)
def get_signature_functions(self): def get_signature_functions(self):
return self._overloaded_functions return self._overloaded_functions

View File

@@ -6,16 +6,16 @@ from jedi.inference import compiled
from jedi.inference.compiled.value import CompiledObjectFilter from jedi.inference.compiled.value import CompiledObjectFilter
from jedi.inference.helpers import values_from_qualified_names from jedi.inference.helpers import values_from_qualified_names
from jedi.inference.filters import AbstractFilter from jedi.inference.filters import AbstractFilter
from jedi.inference.names import ContextName, TreeNameDefinition from jedi.inference.names import ValueName, TreeNameDefinition
from jedi.inference.base_value import Context, NO_VALUES, ContextSet, \ from jedi.inference.base_value import Value, NO_VALUES, ValueSet, \
iterator_to_value_set, ContextWrapper iterator_to_value_set, ValueWrapper
from jedi.inference.lazy_value import LazyKnownContext, LazyKnownContexts from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import infer_state_method_cache
from jedi.inference.arguments import AnonymousArguments, \ from jedi.inference.arguments import AnonymousArguments, \
ValuesArguments, TreeArgumentsWrapper ValuesArguments, TreeArgumentsWrapper
from jedi.inference.value.function import \ from jedi.inference.value.function import \
FunctionContext, FunctionMixin, OverloadedFunctionContext FunctionValue, FunctionMixin, OverloadedFunctionValue
from jedi.inference.value.klass import ClassContext, apply_py__get__, \ from jedi.inference.value.klass import ClassValue, apply_py__get__, \
ClassFilter ClassFilter
from jedi.inference.value import iterable from jedi.inference.value import iterable
from jedi.parser_utils import get_parent_scope from jedi.parser_utils import get_parent_scope
@@ -28,7 +28,7 @@ class InstanceExecutedParam(object):
self.string_name = self._tree_param.name.value self.string_name = self._tree_param.name.value
def infer(self): def infer(self):
return ContextSet([self._instance]) return ValueSet([self._instance])
def matches_signature(self): def matches_signature(self):
return True return True
@@ -58,11 +58,11 @@ class AnonymousInstanceArguments(AnonymousArguments):
return executed_params, [] return executed_params, []
class AbstractInstanceContext(Context): class AbstractInstanceValue(Value):
api_type = u'instance' api_type = u'instance'
def __init__(self, infer_state, parent_value, class_value, var_args): def __init__(self, infer_state, parent_value, class_value, var_args):
super(AbstractInstanceContext, self).__init__(infer_state, parent_value) super(AbstractInstanceValue, self).__init__(infer_state, parent_value)
# Generated instances are classes that are just generated by self # Generated instances are classes that are just generated by self
# (No var_args) used. # (No var_args) used.
self.class_value = class_value self.class_value = class_value
@@ -81,9 +81,9 @@ class AbstractInstanceContext(Context):
names = self.get_function_slot_names(u'__call__') names = self.get_function_slot_names(u'__call__')
if not names: if not names:
# Means the Instance is not callable. # Means the Instance is not callable.
return super(AbstractInstanceContext, self).py__call__(arguments) return super(AbstractInstanceValue, self).py__call__(arguments)
return ContextSet.from_sets(name.infer().execute(arguments) for name in names) return ValueSet.from_sets(name.infer().execute(arguments) for name in names)
def py__class__(self): def py__class__(self):
return self.class_value return self.class_value
@@ -103,7 +103,7 @@ class AbstractInstanceContext(Context):
return [] return []
def execute_function_slots(self, names, *inferred_args): def execute_function_slots(self, names, *inferred_args):
return ContextSet.from_sets( return ValueSet.from_sets(
name.infer().execute_with_values(*inferred_args) name.infer().execute_with_values(*inferred_args)
for name in names for name in names
) )
@@ -120,7 +120,7 @@ class AbstractInstanceContext(Context):
obj = compiled.builtin_from_name(self.infer_state, u'None') obj = compiled.builtin_from_name(self.infer_state, u'None')
return self.execute_function_slots(names, obj, class_value) return self.execute_function_slots(names, obj, class_value)
else: else:
return ContextSet([self]) return ValueSet([self])
def get_filters(self, search_global=None, until_position=None, def get_filters(self, search_global=None, until_position=None,
origin_scope=None, include_self_names=True): origin_scope=None, include_self_names=True):
@@ -151,18 +151,18 @@ class AbstractInstanceContext(Context):
def py__getitem__(self, index_value_set, valueualized_node): def py__getitem__(self, index_value_set, valueualized_node):
names = self.get_function_slot_names(u'__getitem__') names = self.get_function_slot_names(u'__getitem__')
if not names: if not names:
return super(AbstractInstanceContext, self).py__getitem__( return super(AbstractInstanceValue, self).py__getitem__(
index_value_set, index_value_set,
valueualized_node, valueualized_node,
) )
args = ValuesArguments([index_value_set]) args = ValuesArguments([index_value_set])
return ContextSet.from_sets(name.infer().execute(args) for name in names) return ValueSet.from_sets(name.infer().execute(args) for name in names)
def py__iter__(self, valueualized_node=None): def py__iter__(self, valueualized_node=None):
iter_slot_names = self.get_function_slot_names(u'__iter__') iter_slot_names = self.get_function_slot_names(u'__iter__')
if not iter_slot_names: if not iter_slot_names:
return super(AbstractInstanceContext, self).py__iter__(valueualized_node) return super(AbstractInstanceValue, self).py__iter__(valueualized_node)
def iterate(): def iterate():
for generator in self.execute_function_slots(iter_slot_names): for generator in self.execute_function_slots(iter_slot_names):
@@ -174,7 +174,7 @@ class AbstractInstanceContext(Context):
name = u'__next__' name = u'__next__'
next_slot_names = generator.get_function_slot_names(name) next_slot_names = generator.get_function_slot_names(name)
if next_slot_names: if next_slot_names:
yield LazyKnownContexts( yield LazyKnownValues(
generator.execute_function_slots(next_slot_names) generator.execute_function_slots(next_slot_names)
) )
else: else:
@@ -192,7 +192,7 @@ class AbstractInstanceContext(Context):
for name in self.get_function_slot_names(u'__init__'): for name in self.get_function_slot_names(u'__init__'):
# TODO is this correct? I think we need to check for functions. # TODO is this correct? I think we need to check for functions.
if isinstance(name, LazyInstanceClassName): if isinstance(name, LazyInstanceClassName):
function = FunctionContext.from_value( function = FunctionValue.from_value(
self.parent_value, self.parent_value,
name.tree_name.parent name.tree_name.parent
) )
@@ -209,7 +209,7 @@ class AbstractInstanceContext(Context):
else: else:
parent_value = self.create_instance_value(class_value, scope) parent_value = self.create_instance_value(class_value, scope)
if scope.type == 'funcdef': if scope.type == 'funcdef':
func = FunctionContext.from_value( func = FunctionValue.from_value(
parent_value, parent_value,
scope, scope,
) )
@@ -219,7 +219,7 @@ class AbstractInstanceContext(Context):
else: else:
return bound_method.get_function_execution() return bound_method.get_function_execution()
elif scope.type == 'classdef': elif scope.type == 'classdef':
class_value = ClassContext(self.infer_state, parent_value, scope) class_value = ClassValue(self.infer_state, parent_value, scope)
return class_value return class_value
elif scope.type in ('comp_for', 'sync_comp_for'): elif scope.type in ('comp_for', 'sync_comp_for'):
# Comprehensions currently don't have a special scope in Jedi. # Comprehensions currently don't have a special scope in Jedi.
@@ -237,14 +237,14 @@ class AbstractInstanceContext(Context):
self.var_args) self.var_args)
class CompiledInstance(AbstractInstanceContext): class CompiledInstance(AbstractInstanceValue):
def __init__(self, infer_state, parent_value, class_value, var_args): def __init__(self, infer_state, parent_value, class_value, var_args):
self._original_var_args = var_args self._original_var_args = var_args
super(CompiledInstance, self).__init__(infer_state, parent_value, class_value, var_args) super(CompiledInstance, self).__init__(infer_state, parent_value, class_value, var_args)
@property @property
def name(self): def name(self):
return compiled.CompiledContextName(self, self.class_value.name.string_name) return compiled.CompiledValueName(self, self.class_value.name.string_name)
def get_first_non_keyword_argument_values(self): def get_first_non_keyword_argument_values(self):
key, lazy_value = next(self._original_var_args.unpack(), ('', None)) key, lazy_value = next(self._original_var_args.unpack(), ('', None))
@@ -257,7 +257,7 @@ class CompiledInstance(AbstractInstanceContext):
return False return False
class TreeInstance(AbstractInstanceContext): class TreeInstance(AbstractInstanceValue):
def __init__(self, infer_state, parent_value, class_value, var_args): def __init__(self, infer_state, parent_value, class_value, var_args):
# I don't think that dynamic append lookups should happen here. That # I don't think that dynamic append lookups should happen here. That
# sounds more like something that should go to py__iter__. # sounds more like something that should go to py__iter__.
@@ -273,7 +273,7 @@ class TreeInstance(AbstractInstanceContext):
@property @property
def name(self): def name(self):
return ContextName(self, self.class_value.name.tree_name) return ValueName(self, self.class_value.name.tree_name)
# This can recurse, if the initialization of the class includes a reference # This can recurse, if the initialization of the class includes a reference
# to itself. # to itself.
@@ -367,7 +367,7 @@ class CompiledInstanceClassFilter(AbstractFilter):
] ]
class BoundMethod(FunctionMixin, ContextWrapper): class BoundMethod(FunctionMixin, ValueWrapper):
def __init__(self, instance, function): def __init__(self, instance, function):
super(BoundMethod, self).__init__(function) super(BoundMethod, self).__init__(function)
self.instance = instance self.instance = instance
@@ -390,7 +390,7 @@ class BoundMethod(FunctionMixin, ContextWrapper):
return super(BoundMethod, self).get_function_execution(arguments) return super(BoundMethod, self).get_function_execution(arguments)
def py__call__(self, arguments): def py__call__(self, arguments):
if isinstance(self._wrapped_value, OverloadedFunctionContext): if isinstance(self._wrapped_value, OverloadedFunctionValue):
return self._wrapped_value.py__call__(self._get_arguments(arguments)) return self._wrapped_value.py__call__(self._get_arguments(arguments))
function_execution = self.get_function_execution(arguments) function_execution = self.get_function_execution(arguments)
@@ -409,7 +409,7 @@ class BoundMethod(FunctionMixin, ContextWrapper):
return '<%s: %s>' % (self.__class__.__name__, self._wrapped_value) return '<%s: %s>' % (self.__class__.__name__, self._wrapped_value)
class CompiledBoundMethod(ContextWrapper): class CompiledBoundMethod(ValueWrapper):
def is_bound_method(self): def is_bound_method(self):
return True return True
@@ -517,7 +517,7 @@ class InstanceArguments(TreeArgumentsWrapper):
self.instance = instance self.instance = instance
def unpack(self, func=None): def unpack(self, func=None):
yield None, LazyKnownContext(self.instance) yield None, LazyKnownValue(self.instance)
for values in self._wrapped_arguments.unpack(func): for values in self._wrapped_arguments.unpack(func):
yield values yield values

View File

@@ -28,8 +28,8 @@ from jedi._compatibility import force_unicode, is_py3
from jedi.inference import compiled from jedi.inference import compiled
from jedi.inference import analysis from jedi.inference import analysis
from jedi.inference import recursion from jedi.inference import recursion
from jedi.inference.lazy_value import LazyKnownContext, LazyKnownContexts, \ from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues, \
LazyTreeContext LazyTreeValue
from jedi.inference.helpers import get_int_or_none, is_string, \ from jedi.inference.helpers import get_int_or_none, is_string, \
predefine_names, infer_call_of_leaf, reraise_getitem_errors, \ predefine_names, infer_call_of_leaf, reraise_getitem_errors, \
SimpleGetItemNotFound SimpleGetItemNotFound
@@ -37,14 +37,14 @@ from jedi.inference.utils import safe_property, to_list
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import infer_state_method_cache
from jedi.inference.filters import ParserTreeFilter, LazyAttributeOverwrite, \ from jedi.inference.filters import ParserTreeFilter, LazyAttributeOverwrite, \
publish_method publish_method
from jedi.inference.base_value import ContextSet, Context, NO_VALUES, \ from jedi.inference.base_value import ValueSet, Value, NO_VALUES, \
TreeContext, ContextualizedNode, iterate_values, HelperContextMixin, _sentinel TreeValue, ValueualizedNode, iterate_values, HelperValueMixin, _sentinel
from jedi.parser_utils import get_sync_comp_fors from jedi.parser_utils import get_sync_comp_fors
class IterableMixin(object): class IterableMixin(object):
def py__stop_iteration_returns(self): def py__stop_iteration_returns(self):
return ContextSet([compiled.builtin_from_name(self.infer_state, u'None')]) return ValueSet([compiled.builtin_from_name(self.infer_state, u'None')])
# At the moment, safe values are simple values like "foo", 1 and not # At the moment, safe values are simple values like "foo", 1 and not
# lists/dicts. Therefore as a small speed optimization we can just do the # lists/dicts. Therefore as a small speed optimization we can just do the
@@ -59,7 +59,7 @@ class IterableMixin(object):
raise ValueError("There exists no safe value for value %s" % self) raise ValueError("There exists no safe value for value %s" % self)
return default return default
else: else:
get_safe_value = Context.get_safe_value get_safe_value = Value.get_safe_value
class GeneratorBase(LazyAttributeOverwrite, IterableMixin): class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
@@ -79,20 +79,20 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
@publish_method('__iter__') @publish_method('__iter__')
def py__iter__(self, valueualized_node=None): def py__iter__(self, valueualized_node=None):
return ContextSet([self]) return ValueSet([self])
@publish_method('send') @publish_method('send')
@publish_method('next', python_version_match=2) @publish_method('next', python_version_match=2)
@publish_method('__next__', python_version_match=3) @publish_method('__next__', python_version_match=3)
def py__next__(self): def py__next__(self):
return ContextSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__()) return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
def py__stop_iteration_returns(self): def py__stop_iteration_returns(self):
return ContextSet([compiled.builtin_from_name(self.infer_state, u'None')]) return ValueSet([compiled.builtin_from_name(self.infer_state, u'None')])
@property @property
def name(self): def name(self):
return compiled.CompiledContextName(self, 'Generator') return compiled.CompiledValueName(self, 'Generator')
class Generator(GeneratorBase): class Generator(GeneratorBase):
@@ -111,7 +111,7 @@ class Generator(GeneratorBase):
return "<%s of %s>" % (type(self).__name__, self._func_execution_value) return "<%s of %s>" % (type(self).__name__, self._func_execution_value)
class CompForContext(TreeContext): class CompForValue(TreeValue):
@classmethod @classmethod
def from_comp_for(cls, parent_value, comp_for): def from_comp_for(cls, parent_value, comp_for):
return cls(parent_value.infer_state, parent_value, comp_for) return cls(parent_value.infer_state, parent_value, comp_for)
@@ -159,7 +159,7 @@ def comprehension_from_atom(infer_state, value, atom):
class ComprehensionMixin(object): class ComprehensionMixin(object):
@infer_state_method_cache() @infer_state_method_cache()
def _get_comp_for_value(self, parent_value, comp_for): def _get_comp_for_value(self, parent_value, comp_for):
return CompForContext.from_comp_for(parent_value, comp_for) return CompForValue.from_comp_for(parent_value, comp_for)
def _nested(self, comp_fors, parent_value=None): def _nested(self, comp_fors, parent_value=None):
comp_for = comp_fors[0] comp_for = comp_fors[0]
@@ -171,7 +171,7 @@ class ComprehensionMixin(object):
input_types = parent_value.infer_node(input_node) input_types = parent_value.infer_node(input_node)
# TODO: simulate await if self.is_async # TODO: simulate await if self.is_async
cn = ContextualizedNode(parent_value, input_node) cn = ValueualizedNode(parent_value, input_node)
iterated = input_types.iterate(cn, is_async=is_async) iterated = input_types.iterate(cn, is_async=is_async)
exprlist = comp_for.children[1] exprlist = comp_for.children[1]
for i, lazy_value in enumerate(iterated): for i, lazy_value in enumerate(iterated):
@@ -201,7 +201,7 @@ class ComprehensionMixin(object):
def py__iter__(self, valueualized_node=None): def py__iter__(self, valueualized_node=None):
for set_ in self._iterate(): for set_ in self._iterate():
yield LazyKnownContexts(set_) yield LazyKnownValues(set_)
def __repr__(self): def __repr__(self):
return "<%s of %s>" % (type(self).__name__, self._sync_comp_for_node) return "<%s of %s>" % (type(self).__name__, self._sync_comp_for_node)
@@ -217,7 +217,7 @@ class Sequence(LazyAttributeOverwrite, IterableMixin):
@property @property
def name(self): def name(self):
return compiled.CompiledContextName(self, self.array_type) return compiled.CompiledValueName(self, self.array_type)
def _get_generics(self): def _get_generics(self):
return (self.merge_types_of_iterate().py__class__(),) return (self.merge_types_of_iterate().py__class__(),)
@@ -241,7 +241,7 @@ class Sequence(LazyAttributeOverwrite, IterableMixin):
def py__getitem__(self, index_value_set, valueualized_node): def py__getitem__(self, index_value_set, valueualized_node):
if self.array_type == 'dict': if self.array_type == 'dict':
return self._dict_values() return self._dict_values()
return iterate_values(ContextSet([self])) return iterate_values(ValueSet([self]))
class _BaseComprehension(ComprehensionMixin): class _BaseComprehension(ComprehensionMixin):
@@ -258,7 +258,7 @@ class ListComprehension(_BaseComprehension, Sequence):
def py__simple_getitem__(self, index): def py__simple_getitem__(self, index):
if isinstance(index, slice): if isinstance(index, slice):
return ContextSet([self]) return ValueSet([self])
all_types = list(self.py__iter__()) all_types = list(self.py__iter__())
with reraise_getitem_errors(IndexError, TypeError): with reraise_getitem_errors(IndexError, TypeError):
@@ -287,7 +287,7 @@ class DictComprehension(ComprehensionMixin, Sequence):
def py__iter__(self, valueualized_node=None): def py__iter__(self, valueualized_node=None):
for keys, values in self._iterate(): for keys, values in self._iterate():
yield LazyKnownContexts(keys) yield LazyKnownValues(keys)
def py__simple_getitem__(self, index): def py__simple_getitem__(self, index):
for keys, values in self._iterate(): for keys, values in self._iterate():
@@ -300,31 +300,31 @@ class DictComprehension(ComprehensionMixin, Sequence):
raise SimpleGetItemNotFound() raise SimpleGetItemNotFound()
def _dict_keys(self): def _dict_keys(self):
return ContextSet.from_sets(keys for keys, values in self._iterate()) return ValueSet.from_sets(keys for keys, values in self._iterate())
def _dict_values(self): def _dict_values(self):
return ContextSet.from_sets(values for keys, values in self._iterate()) return ValueSet.from_sets(values for keys, values in self._iterate())
@publish_method('values') @publish_method('values')
def _imitate_values(self): def _imitate_values(self):
lazy_value = LazyKnownContexts(self._dict_values()) lazy_value = LazyKnownValues(self._dict_values())
return ContextSet([FakeSequence(self.infer_state, u'list', [lazy_value])]) return ValueSet([FakeSequence(self.infer_state, u'list', [lazy_value])])
@publish_method('items') @publish_method('items')
def _imitate_items(self): def _imitate_items(self):
lazy_values = [ lazy_values = [
LazyKnownContext( LazyKnownValue(
FakeSequence( FakeSequence(
self.infer_state, self.infer_state,
u'tuple', u'tuple',
[LazyKnownContexts(key), [LazyKnownValues(key),
LazyKnownContexts(value)] LazyKnownValues(value)]
) )
) )
for key, value in self._iterate() for key, value in self._iterate()
] ]
return ContextSet([FakeSequence(self.infer_state, u'list', lazy_values)]) return ValueSet([FakeSequence(self.infer_state, u'list', lazy_values)])
def get_mapping_item_values(self): def get_mapping_item_values(self):
return self._dict_keys(), self._dict_values() return self._dict_keys(), self._dict_values()
@@ -335,21 +335,21 @@ class DictComprehension(ComprehensionMixin, Sequence):
return [] return []
class SequenceLiteralContext(Sequence): class SequenceLiteralValue(Sequence):
_TUPLE_LIKE = 'testlist_star_expr', 'testlist', 'subscriptlist' _TUPLE_LIKE = 'testlist_star_expr', 'testlist', 'subscriptlist'
mapping = {'(': u'tuple', mapping = {'(': u'tuple',
'[': u'list', '[': u'list',
'{': u'set'} '{': u'set'}
def __init__(self, infer_state, defining_value, atom): def __init__(self, infer_state, defining_value, atom):
super(SequenceLiteralContext, self).__init__(infer_state) super(SequenceLiteralValue, self).__init__(infer_state)
self.atom = atom self.atom = atom
self._defining_value = defining_value self._defining_value = defining_value
if self.atom.type in self._TUPLE_LIKE: if self.atom.type in self._TUPLE_LIKE:
self.array_type = u'tuple' self.array_type = u'tuple'
else: else:
self.array_type = SequenceLiteralContext.mapping[atom.children[0]] self.array_type = SequenceLiteralValue.mapping[atom.children[0]]
"""The builtin name of the array (list, set, tuple or dict).""" """The builtin name of the array (list, set, tuple or dict)."""
def py__simple_getitem__(self, index): def py__simple_getitem__(self, index):
@@ -368,7 +368,7 @@ class SequenceLiteralContext(Sequence):
raise SimpleGetItemNotFound('No key found in dictionary %s.' % self) raise SimpleGetItemNotFound('No key found in dictionary %s.' % self)
if isinstance(index, slice): if isinstance(index, slice):
return ContextSet([self]) return ValueSet([self])
else: else:
with reraise_getitem_errors(TypeError, KeyError, IndexError): with reraise_getitem_errors(TypeError, KeyError, IndexError):
node = self.get_tree_entries()[index] node = self.get_tree_entries()[index]
@@ -387,15 +387,15 @@ class SequenceLiteralContext(Sequence):
# We don't know which dict index comes first, therefore always # We don't know which dict index comes first, therefore always
# yield all the types. # yield all the types.
for _ in types: for _ in types:
yield LazyKnownContexts(types) yield LazyKnownValues(types)
else: else:
for node in self.get_tree_entries(): for node in self.get_tree_entries():
if node == ':' or node.type == 'subscript': if node == ':' or node.type == 'subscript':
# TODO this should probably use at least part of the code # TODO this should probably use at least part of the code
# of infer_subscript_list. # of infer_subscript_list.
yield LazyKnownContext(Slice(self._defining_value, None, None, None)) yield LazyKnownValue(Slice(self._defining_value, None, None, None))
else: else:
yield LazyTreeContext(self._defining_value, node) yield LazyTreeValue(self._defining_value, node)
for addition in check_array_additions(self._defining_value, self): for addition in check_array_additions(self._defining_value, self):
yield addition yield addition
@@ -404,7 +404,7 @@ class SequenceLiteralContext(Sequence):
return len(self.get_tree_entries()) return len(self.get_tree_entries())
def _dict_values(self): def _dict_values(self):
return ContextSet.from_sets( return ValueSet.from_sets(
self._defining_value.infer_node(v) self._defining_value.infer_node(v)
for k, v in self.get_tree_entries() for k, v in self.get_tree_entries()
) )
@@ -462,39 +462,39 @@ class SequenceLiteralContext(Sequence):
for key_node, value in self.get_tree_entries(): for key_node, value in self.get_tree_entries():
for key in self._defining_value.infer_node(key_node): for key in self._defining_value.infer_node(key_node):
if is_string(key): if is_string(key):
yield key.get_safe_value(), LazyTreeContext(self._defining_value, value) yield key.get_safe_value(), LazyTreeValue(self._defining_value, value)
def __repr__(self): def __repr__(self):
return "<%s of %s>" % (self.__class__.__name__, self.atom) return "<%s of %s>" % (self.__class__.__name__, self.atom)
class DictLiteralContext(_DictMixin, SequenceLiteralContext): class DictLiteralValue(_DictMixin, SequenceLiteralValue):
array_type = u'dict' array_type = u'dict'
def __init__(self, infer_state, defining_value, atom): def __init__(self, infer_state, defining_value, atom):
super(SequenceLiteralContext, self).__init__(infer_state) super(SequenceLiteralValue, self).__init__(infer_state)
self._defining_value = defining_value self._defining_value = defining_value
self.atom = atom self.atom = atom
@publish_method('values') @publish_method('values')
def _imitate_values(self): def _imitate_values(self):
lazy_value = LazyKnownContexts(self._dict_values()) lazy_value = LazyKnownValues(self._dict_values())
return ContextSet([FakeSequence(self.infer_state, u'list', [lazy_value])]) return ValueSet([FakeSequence(self.infer_state, u'list', [lazy_value])])
@publish_method('items') @publish_method('items')
def _imitate_items(self): def _imitate_items(self):
lazy_values = [ lazy_values = [
LazyKnownContext(FakeSequence( LazyKnownValue(FakeSequence(
self.infer_state, u'tuple', self.infer_state, u'tuple',
(LazyTreeContext(self._defining_value, key_node), (LazyTreeValue(self._defining_value, key_node),
LazyTreeContext(self._defining_value, value_node)) LazyTreeValue(self._defining_value, value_node))
)) for key_node, value_node in self.get_tree_entries() )) for key_node, value_node in self.get_tree_entries()
] ]
return ContextSet([FakeSequence(self.infer_state, u'list', lazy_values)]) return ValueSet([FakeSequence(self.infer_state, u'list', lazy_values)])
def _dict_keys(self): def _dict_keys(self):
return ContextSet.from_sets( return ValueSet.from_sets(
self._defining_value.infer_node(k) self._defining_value.infer_node(k)
for k, v in self.get_tree_entries() for k, v in self.get_tree_entries()
) )
@@ -503,9 +503,9 @@ class DictLiteralContext(_DictMixin, SequenceLiteralContext):
return self._dict_keys(), self._dict_values() return self._dict_keys(), self._dict_values()
class _FakeArray(SequenceLiteralContext): class _FakeArray(SequenceLiteralValue):
def __init__(self, infer_state, container, type): def __init__(self, infer_state, container, type):
super(SequenceLiteralContext, self).__init__(infer_state) super(SequenceLiteralValue, self).__init__(infer_state)
self.array_type = type self.array_type = type
self.atom = container self.atom = container
# TODO is this class really needed? # TODO is this class really needed?
@@ -521,7 +521,7 @@ class FakeSequence(_FakeArray):
def py__simple_getitem__(self, index): def py__simple_getitem__(self, index):
if isinstance(index, slice): if isinstance(index, slice):
return ContextSet([self]) return ValueSet([self])
with reraise_getitem_errors(IndexError, TypeError): with reraise_getitem_errors(IndexError, TypeError):
lazy_value = self._lazy_value_list[index] lazy_value = self._lazy_value_list[index]
@@ -544,7 +544,7 @@ class FakeDict(_DictMixin, _FakeArray):
def py__iter__(self, valueualized_node=None): def py__iter__(self, valueualized_node=None):
for key in self._dct: for key in self._dct:
yield LazyKnownContext(compiled.create_simple_object(self.infer_state, key)) yield LazyKnownValue(compiled.create_simple_object(self.infer_state, key))
def py__simple_getitem__(self, index): def py__simple_getitem__(self, index):
if is_py3 and self.infer_state.environment.version_info.major == 2: if is_py3 and self.infer_state.environment.version_info.major == 2:
@@ -568,16 +568,16 @@ class FakeDict(_DictMixin, _FakeArray):
@publish_method('values') @publish_method('values')
def _values(self): def _values(self):
return ContextSet([FakeSequence( return ValueSet([FakeSequence(
self.infer_state, u'tuple', self.infer_state, u'tuple',
[LazyKnownContexts(self._dict_values())] [LazyKnownValues(self._dict_values())]
)]) )])
def _dict_values(self): def _dict_values(self):
return ContextSet.from_sets(lazy_value.infer() for lazy_value in self._dct.values()) return ValueSet.from_sets(lazy_value.infer() for lazy_value in self._dct.values())
def _dict_keys(self): def _dict_keys(self):
return ContextSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__()) return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
def get_mapping_item_values(self): def get_mapping_item_values(self):
return self._dict_keys(), self._dict_values() return self._dict_keys(), self._dict_values()
@@ -597,7 +597,7 @@ class MergedArray(_FakeArray):
yield lazy_value yield lazy_value
def py__simple_getitem__(self, index): def py__simple_getitem__(self, index):
return ContextSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__()) return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
def get_tree_entries(self): def get_tree_entries(self):
for array in self._arrays: for array in self._arrays:
@@ -744,10 +744,10 @@ def get_dynamic_array_instance(instance, arguments):
"""Used for set() and list() instances.""" """Used for set() and list() instances."""
ai = _ArrayInstance(instance, arguments) ai = _ArrayInstance(instance, arguments)
from jedi.inference import arguments from jedi.inference import arguments
return arguments.ValuesArguments([ContextSet([ai])]) return arguments.ValuesArguments([ValueSet([ai])])
class _ArrayInstance(HelperContextMixin): class _ArrayInstance(HelperValueMixin):
""" """
Used for the usage of set() and list(). Used for the usage of set() and list().
This is definitely a hack, but a good one :-) This is definitely a hack, but a good one :-)

View File

@@ -25,7 +25,7 @@ py__iter__() Returns a generator of a set of types.
py__class__() Returns the class of an instance. py__class__() Returns the class of an instance.
py__simple_getitem__(index: int/str) Returns a a set of types of the index. py__simple_getitem__(index: int/str) Returns a a set of types of the index.
Can raise an IndexError/KeyError. Can raise an IndexError/KeyError.
py__getitem__(indexes: ContextSet) Returns a a set of types of the index. py__getitem__(indexes: ValueSet) Returns a a set of types of the index.
py__file__() Only on modules. Returns None if does py__file__() Only on modules. Returns None if does
not exist. not exist.
py__package__() -> List[str] Only on modules. For the import system. py__package__() -> List[str] Only on modules. For the import system.
@@ -42,11 +42,11 @@ from jedi.parser_utils import get_cached_parent_scope
from jedi.inference.cache import infer_state_method_cache, CachedMetaClass, \ from jedi.inference.cache import infer_state_method_cache, CachedMetaClass, \
infer_state_method_generator_cache infer_state_method_generator_cache
from jedi.inference import compiled from jedi.inference import compiled
from jedi.inference.lazy_value import LazyKnownContexts from jedi.inference.lazy_value import LazyKnownValues
from jedi.inference.filters import ParserTreeFilter from jedi.inference.filters import ParserTreeFilter
from jedi.inference.names import TreeNameDefinition, ContextName from jedi.inference.names import TreeNameDefinition, ValueName
from jedi.inference.arguments import unpack_arglist, ValuesArguments from jedi.inference.arguments import unpack_arglist, ValuesArguments
from jedi.inference.base_value import ContextSet, iterator_to_value_set, \ from jedi.inference.base_value import ValueSet, iterator_to_value_set, \
NO_VALUES NO_VALUES
from jedi.inference.value.function import FunctionAndClassBase from jedi.inference.value.function import FunctionAndClassBase
from jedi.plugins import plugin_manager from jedi.plugins import plugin_manager
@@ -141,14 +141,14 @@ class ClassMixin(object):
from jedi.inference.value import TreeInstance from jedi.inference.value import TreeInstance
if arguments is None: if arguments is None:
arguments = ValuesArguments([]) arguments = ValuesArguments([])
return ContextSet([TreeInstance(self.infer_state, self.parent_value, self, arguments)]) return ValueSet([TreeInstance(self.infer_state, self.parent_value, self, arguments)])
def py__class__(self): def py__class__(self):
return compiled.builtin_from_name(self.infer_state, u'type') return compiled.builtin_from_name(self.infer_state, u'type')
@property @property
def name(self): def name(self):
return ContextName(self, self.tree_node.name) return ValueName(self, self.tree_node.name)
def py__name__(self): def py__name__(self):
return self.name.string_name return self.name.string_name
@@ -215,7 +215,7 @@ class ClassMixin(object):
if not is_instance: if not is_instance:
from jedi.inference.compiled import builtin_from_name from jedi.inference.compiled import builtin_from_name
type_ = builtin_from_name(self.infer_state, u'type') type_ = builtin_from_name(self.infer_state, u'type')
assert isinstance(type_, ClassContext) assert isinstance(type_, ClassValue)
if type_ != self: if type_ != self:
for instance in type_.py__call__(): for instance in type_.py__call__():
instance_filters = instance.get_filters() instance_filters = instance.get_filters()
@@ -237,7 +237,7 @@ class ClassMixin(object):
) )
class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase)): class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase)):
api_type = u'class' api_type = u'class'
@infer_state_method_cache() @infer_state_method_cache()
@@ -276,15 +276,15 @@ class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBa
if self.py__name__() == 'object' \ if self.py__name__() == 'object' \
and self.parent_value == self.infer_state.builtins_module: and self.parent_value == self.infer_state.builtins_module:
return [] return []
return [LazyKnownContexts( return [LazyKnownValues(
self.infer_state.builtins_module.py__getattribute__('object') self.infer_state.builtins_module.py__getattribute__('object')
)] )]
def py__getitem__(self, index_value_set, valueualized_node): def py__getitem__(self, index_value_set, valueualized_node):
from jedi.inference.gradual.typing import LazyGenericClass from jedi.inference.gradual.typing import LazyGenericClass
if not index_value_set: if not index_value_set:
return ContextSet([self]) return ValueSet([self])
return ContextSet( return ValueSet(
LazyGenericClass( LazyGenericClass(
self, self,
index_value, index_value,
@@ -310,11 +310,11 @@ class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBa
yield type_var_dict.get(type_var.py__name__(), NO_VALUES) yield type_var_dict.get(type_var.py__name__(), NO_VALUES)
if type_var_dict: if type_var_dict:
return ContextSet([GenericClass( return ValueSet([GenericClass(
self, self,
generics=tuple(remap_type_vars()) generics=tuple(remap_type_vars())
)]) )])
return ContextSet({self}) return ValueSet({self})
@plugin_manager.decorate() @plugin_manager.decorate()
def get_metaclass_filters(self, metaclass): def get_metaclass_filters(self, metaclass):
@@ -326,8 +326,8 @@ class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBa
args = self._get_bases_arguments() args = self._get_bases_arguments()
if args is not None: if args is not None:
m = [value for key, value in args.unpack() if key == 'metaclass'] m = [value for key, value in args.unpack() if key == 'metaclass']
metaclasses = ContextSet.from_sets(lazy_value.infer() for lazy_value in m) metaclasses = ValueSet.from_sets(lazy_value.infer() for lazy_value in m)
metaclasses = ContextSet(m for m in metaclasses if m.is_class()) metaclasses = ValueSet(m for m in metaclasses if m.is_class())
if metaclasses: if metaclasses:
return metaclasses return metaclasses

View File

@@ -3,14 +3,14 @@ import os
from jedi import debug from jedi import debug
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import infer_state_method_cache
from jedi.inference.names import ContextNameMixin, AbstractNameDefinition from jedi.inference.names import ValueNameMixin, AbstractNameDefinition
from jedi.inference.filters import GlobalNameFilter, ParserTreeFilter, DictFilter, MergedFilter from jedi.inference.filters import GlobalNameFilter, ParserTreeFilter, DictFilter, MergedFilter
from jedi.inference import compiled from jedi.inference import compiled
from jedi.inference.base_value import TreeContext from jedi.inference.base_value import TreeValue
from jedi.inference.names import SubModuleName from jedi.inference.names import SubModuleName
from jedi.inference.helpers import values_from_qualified_names from jedi.inference.helpers import values_from_qualified_names
from jedi.inference.compiled import create_simple_object from jedi.inference.compiled import create_simple_object
from jedi.inference.base_value import ContextSet from jedi.inference.base_value import ValueSet
class _ModuleAttributeName(AbstractNameDefinition): class _ModuleAttributeName(AbstractNameDefinition):
@@ -30,13 +30,13 @@ class _ModuleAttributeName(AbstractNameDefinition):
if self.parent_value.infer_state.environment.version_info.major == 2 \ if self.parent_value.infer_state.environment.version_info.major == 2 \
and not isinstance(s, bytes): and not isinstance(s, bytes):
s = s.encode('utf-8') s = s.encode('utf-8')
return ContextSet([ return ValueSet([
create_simple_object(self.parent_value.infer_state, s) create_simple_object(self.parent_value.infer_state, s)
]) ])
return compiled.get_string_value_set(self.parent_value.infer_state) return compiled.get_string_value_set(self.parent_value.infer_state)
class ModuleName(ContextNameMixin, AbstractNameDefinition): class ModuleName(ValueNameMixin, AbstractNameDefinition):
start_pos = 1, 0 start_pos = 1, 0
def __init__(self, value, name): def __init__(self, value, name):
@@ -132,7 +132,7 @@ class ModuleMixin(SubModuleDictMixin):
def _string_name(self): def _string_name(self):
""" This is used for the goto functions. """ """ This is used for the goto functions. """
# TODO It's ugly that we even use this, the name is usually well known # TODO It's ugly that we even use this, the name is usually well known
# ahead so just pass it when create a ModuleContext. # ahead so just pass it when create a ModuleValue.
if self._path is None: if self._path is None:
return '' # no path -> empty name return '' # no path -> empty name
else: else:
@@ -173,7 +173,7 @@ class ModuleMixin(SubModuleDictMixin):
).follow() ).follow()
for module in new: for module in new:
if isinstance(module, ModuleContext): if isinstance(module, ModuleValue):
modules += module.star_imports() modules += module.star_imports()
modules += new modules += new
return modules return modules
@@ -187,12 +187,12 @@ class ModuleMixin(SubModuleDictMixin):
return () return ()
class ModuleContext(ModuleMixin, TreeContext): class ModuleValue(ModuleMixin, TreeValue):
api_type = u'module' api_type = u'module'
parent_value = None parent_value = None
def __init__(self, infer_state, module_node, file_io, string_names, code_lines, is_package=False): def __init__(self, infer_state, module_node, file_io, string_names, code_lines, is_package=False):
super(ModuleContext, self).__init__( super(ModuleValue, self).__init__(
infer_state, infer_state,
parent_value=None, parent_value=None,
tree_node=module_node tree_node=module_node
@@ -210,9 +210,9 @@ class ModuleContext(ModuleMixin, TreeContext):
if self._path is not None and self._path.endswith('.pyi'): if self._path is not None and self._path.endswith('.pyi'):
# Currently this is the way how we identify stubs when e.g. goto is # Currently this is the way how we identify stubs when e.g. goto is
# used in them. This could be changed if stubs would be identified # used in them. This could be changed if stubs would be identified
# sooner and used as StubModuleContext. # sooner and used as StubModuleValue.
return True return True
return super(ModuleContext, self).is_stub() return super(ModuleValue, self).is_stub()
def py__name__(self): def py__name__(self):
if self.string_names is None: if self.string_names is None:

View File

@@ -1,11 +1,11 @@
from jedi.inference.cache import infer_state_method_cache from jedi.inference.cache import infer_state_method_cache
from jedi.inference.filters import DictFilter from jedi.inference.filters import DictFilter
from jedi.inference.names import ContextNameMixin, AbstractNameDefinition from jedi.inference.names import ValueNameMixin, AbstractNameDefinition
from jedi.inference.base_value import Context from jedi.inference.base_value import Value
from jedi.inference.value.module import SubModuleDictMixin from jedi.inference.value.module import SubModuleDictMixin
class ImplicitNSName(ContextNameMixin, AbstractNameDefinition): class ImplicitNSName(ValueNameMixin, AbstractNameDefinition):
""" """
Accessing names for implicit namespace packages should infer to nothing. Accessing names for implicit namespace packages should infer to nothing.
This object will prevent Jedi from raising exceptions This object will prevent Jedi from raising exceptions
@@ -15,7 +15,7 @@ class ImplicitNSName(ContextNameMixin, AbstractNameDefinition):
self.string_name = string_name self.string_name = string_name
class ImplicitNamespaceContext(Context, SubModuleDictMixin): class ImplicitNamespaceValue(Value, SubModuleDictMixin):
""" """
Provides support for implicit namespace packages Provides support for implicit namespace packages
""" """
@@ -26,7 +26,7 @@ class ImplicitNamespaceContext(Context, SubModuleDictMixin):
parent_value = None parent_value = None
def __init__(self, infer_state, fullname, paths): def __init__(self, infer_state, fullname, paths):
super(ImplicitNamespaceContext, self).__init__(infer_state, parent_value=None) super(ImplicitNamespaceValue, self).__init__(infer_state, parent_value=None)
self.infer_state = infer_state self.infer_state = infer_state
self._fullname = fullname self._fullname = fullname
self._paths = paths self._paths = paths

View File

@@ -21,16 +21,16 @@ from jedi.inference.arguments import ValuesArguments, \
from jedi.inference import analysis from jedi.inference import analysis
from jedi.inference import compiled from jedi.inference import compiled
from jedi.inference.value.instance import BoundMethod, InstanceArguments from jedi.inference.value.instance import BoundMethod, InstanceArguments
from jedi.inference.base_value import ContextualizedNode, \ from jedi.inference.base_value import ValueualizedNode, \
NO_VALUES, ContextSet, ContextWrapper, LazyContextWrapper NO_VALUES, ValueSet, ValueWrapper, LazyValueWrapper
from jedi.inference.value import ClassContext, ModuleContext, \ from jedi.inference.value import ClassValue, ModuleValue, \
FunctionExecutionContext FunctionExecutionValue
from jedi.inference.value.klass import ClassMixin from jedi.inference.value.klass import ClassMixin
from jedi.inference.value.function import FunctionMixin from jedi.inference.value.function import FunctionMixin
from jedi.inference.value import iterable from jedi.inference.value import iterable
from jedi.inference.lazy_value import LazyTreeContext, LazyKnownContext, \ from jedi.inference.lazy_value import LazyTreeValue, LazyKnownValue, \
LazyKnownContexts LazyKnownValues
from jedi.inference.names import ContextName, BaseTreeParamName from jedi.inference.names import ValueName, BaseTreeParamName
from jedi.inference.syntax_tree import is_string from jedi.inference.syntax_tree import is_string
from jedi.inference.filters import AttributeOverwrite, publish_method, \ from jedi.inference.filters import AttributeOverwrite, publish_method, \
ParserTreeFilter, DictFilter ParserTreeFilter, DictFilter
@@ -132,7 +132,7 @@ def execute(callback):
) )
elif value.py__name__() in ('deleter', 'getter', 'setter'): elif value.py__name__() in ('deleter', 'getter', 'setter'):
if value.class_value.py__name__() == 'property': if value.class_value.py__name__() == 'property':
return ContextSet([value.instance]) return ValueSet([value.instance])
return call() return call()
@@ -243,7 +243,7 @@ def builtins_type(objects, bases, dicts):
return objects.py__class__() return objects.py__class__()
class SuperInstance(LazyContextWrapper): class SuperInstance(LazyValueWrapper):
"""To be used like the object ``super`` returns.""" """To be used like the object ``super`` returns."""
def __init__(self, infer_state, instance): def __init__(self, infer_state, instance):
self.infer_state = infer_state self.infer_state = infer_state
@@ -269,12 +269,12 @@ class SuperInstance(LazyContextWrapper):
@argument_clinic('[type[, obj]], /', want_value=True) @argument_clinic('[type[, obj]], /', want_value=True)
def builtins_super(types, objects, value): def builtins_super(types, objects, value):
if isinstance(value, FunctionExecutionContext): if isinstance(value, FunctionExecutionValue):
if isinstance(value.var_args, InstanceArguments): if isinstance(value.var_args, InstanceArguments):
instance = value.var_args.instance instance = value.var_args.instance
# TODO if a class is given it doesn't have to be the direct super # TODO if a class is given it doesn't have to be the direct super
# class, it can be an anecestor from long ago. # class, it can be an anecestor from long ago.
return ContextSet({SuperInstance(instance.infer_state, instance)}) return ValueSet({SuperInstance(instance.infer_state, instance)})
return NO_VALUES return NO_VALUES
@@ -291,7 +291,7 @@ class ReversedObject(AttributeOverwrite):
@publish_method('next', python_version_match=2) @publish_method('next', python_version_match=2)
@publish_method('__next__', python_version_match=3) @publish_method('__next__', python_version_match=3)
def py__next__(self): def py__next__(self):
return ContextSet.from_sets( return ValueSet.from_sets(
lazy_value.infer() for lazy_value in self._iter_list lazy_value.infer() for lazy_value in self._iter_list
) )
@@ -303,9 +303,9 @@ def builtins_reversed(sequences, obj, arguments):
# values again. # values again.
key, lazy_value = next(arguments.unpack()) key, lazy_value = next(arguments.unpack())
cn = None cn = None
if isinstance(lazy_value, LazyTreeContext): if isinstance(lazy_value, LazyTreeValue):
# TODO access private # TODO access private
cn = ContextualizedNode(lazy_value.value, lazy_value.data) cn = ValueualizedNode(lazy_value.value, lazy_value.data)
ordered = list(sequences.iterate(cn)) ordered = list(sequences.iterate(cn))
# Repack iterator values and then run it the normal way. This is # Repack iterator values and then run it the normal way. This is
@@ -313,7 +313,7 @@ def builtins_reversed(sequences, obj, arguments):
# would fail in certain cases like `reversed(x).__iter__` if we # would fail in certain cases like `reversed(x).__iter__` if we
# just returned the result directly. # just returned the result directly.
seq, = obj.infer_state.typing_module.py__getattribute__('Iterator').execute_with_values() seq, = obj.infer_state.typing_module.py__getattribute__('Iterator').execute_with_values()
return ContextSet([ReversedObject(seq, list(reversed(ordered)))]) return ValueSet([ReversedObject(seq, list(reversed(ordered)))])
@argument_clinic('obj, type, /', want_arguments=True, want_infer_state=True) @argument_clinic('obj, type, /', want_arguments=True, want_infer_state=True)
@@ -338,40 +338,40 @@ def builtins_isinstance(objects, types, arguments, infer_state):
elif cls_or_tup.name.string_name == 'tuple' \ elif cls_or_tup.name.string_name == 'tuple' \
and cls_or_tup.get_root_value() == infer_state.builtins_module: and cls_or_tup.get_root_value() == infer_state.builtins_module:
# Check for tuples. # Check for tuples.
classes = ContextSet.from_sets( classes = ValueSet.from_sets(
lazy_value.infer() lazy_value.infer()
for lazy_value in cls_or_tup.iterate() for lazy_value in cls_or_tup.iterate()
) )
bool_results.add(any(cls in mro for cls in classes)) bool_results.add(any(cls in mro for cls in classes))
else: else:
_, lazy_value = list(arguments.unpack())[1] _, lazy_value = list(arguments.unpack())[1]
if isinstance(lazy_value, LazyTreeContext): if isinstance(lazy_value, LazyTreeValue):
node = lazy_value.data node = lazy_value.data
message = 'TypeError: isinstance() arg 2 must be a ' \ message = 'TypeError: isinstance() arg 2 must be a ' \
'class, type, or tuple of classes and types, ' \ 'class, type, or tuple of classes and types, ' \
'not %s.' % cls_or_tup 'not %s.' % cls_or_tup
analysis.add(lazy_value.value, 'type-error-isinstance', node, message) analysis.add(lazy_value.value, 'type-error-isinstance', node, message)
return ContextSet( return ValueSet(
compiled.builtin_from_name(infer_state, force_unicode(str(b))) compiled.builtin_from_name(infer_state, force_unicode(str(b)))
for b in bool_results for b in bool_results
) )
class StaticMethodObject(AttributeOverwrite, ContextWrapper): class StaticMethodObject(AttributeOverwrite, ValueWrapper):
def get_object(self): def get_object(self):
return self._wrapped_value return self._wrapped_value
def py__get__(self, instance, klass): def py__get__(self, instance, klass):
return ContextSet([self._wrapped_value]) return ValueSet([self._wrapped_value])
@argument_clinic('sequence, /') @argument_clinic('sequence, /')
def builtins_staticmethod(functions): def builtins_staticmethod(functions):
return ContextSet(StaticMethodObject(f) for f in functions) return ValueSet(StaticMethodObject(f) for f in functions)
class ClassMethodObject(AttributeOverwrite, ContextWrapper): class ClassMethodObject(AttributeOverwrite, ValueWrapper):
def __init__(self, class_method_obj, function): def __init__(self, class_method_obj, function):
super(ClassMethodObject, self).__init__(class_method_obj) super(ClassMethodObject, self).__init__(class_method_obj)
self._function = function self._function = function
@@ -380,13 +380,13 @@ class ClassMethodObject(AttributeOverwrite, ContextWrapper):
return self._wrapped_value return self._wrapped_value
def py__get__(self, obj, class_value): def py__get__(self, obj, class_value):
return ContextSet([ return ValueSet([
ClassMethodGet(__get__, class_value, self._function) ClassMethodGet(__get__, class_value, self._function)
for __get__ in self._wrapped_value.py__getattribute__('__get__') for __get__ in self._wrapped_value.py__getattribute__('__get__')
]) ])
class ClassMethodGet(AttributeOverwrite, ContextWrapper): class ClassMethodGet(AttributeOverwrite, ValueWrapper):
def __init__(self, get_method, klass, function): def __init__(self, get_method, klass, function):
super(ClassMethodGet, self).__init__(get_method) super(ClassMethodGet, self).__init__(get_method)
self._class = klass self._class = klass
@@ -408,14 +408,14 @@ class ClassMethodArguments(TreeArgumentsWrapper):
self._class = klass self._class = klass
def unpack(self, func=None): def unpack(self, func=None):
yield None, LazyKnownContext(self._class) yield None, LazyKnownValue(self._class)
for values in self._wrapped_arguments.unpack(func): for values in self._wrapped_arguments.unpack(func):
yield values yield values
@argument_clinic('sequence, /', want_obj=True, want_arguments=True) @argument_clinic('sequence, /', want_obj=True, want_arguments=True)
def builtins_classmethod(functions, obj, arguments): def builtins_classmethod(functions, obj, arguments):
return ContextSet( return ValueSet(
ClassMethodObject(class_method_object, function) ClassMethodObject(class_method_object, function)
for class_method_object in obj.py__call__(arguments=arguments) for class_method_object in obj.py__call__(arguments=arguments)
for function in functions for function in functions
@@ -472,14 +472,14 @@ def collections_namedtuple(obj, arguments, callback):
# Parse source code # Parse source code
module = infer_state.grammar.parse(code) module = infer_state.grammar.parse(code)
generated_class = next(module.iter_classdefs()) generated_class = next(module.iter_classdefs())
parent_value = ModuleContext( parent_value = ModuleValue(
infer_state, module, infer_state, module,
file_io=None, file_io=None,
string_names=None, string_names=None,
code_lines=parso.split_lines(code, keepends=True), code_lines=parso.split_lines(code, keepends=True),
) )
return ContextSet([ClassContext(infer_state, parent_value, generated_class)]) return ValueSet([ClassValue(infer_state, parent_value, generated_class)])
class PartialObject(object): class PartialObject(object):
@@ -550,7 +550,7 @@ class MergedPartialArguments(AbstractArguments):
def functools_partial(obj, arguments, callback): def functools_partial(obj, arguments, callback):
return ContextSet( return ValueSet(
PartialObject(instance, arguments) PartialObject(instance, arguments)
for instance in obj.py__call__(arguments) for instance in obj.py__call__(arguments)
) )
@@ -563,7 +563,7 @@ def _return_first_param(firsts):
@argument_clinic('seq') @argument_clinic('seq')
def _random_choice(sequences): def _random_choice(sequences):
return ContextSet.from_sets( return ValueSet.from_sets(
lazy_value.infer() lazy_value.infer()
for sequence in sequences for sequence in sequences
for lazy_value in sequence.py__iter__() for lazy_value in sequence.py__iter__()
@@ -573,13 +573,13 @@ def _random_choice(sequences):
def _dataclass(obj, arguments, callback): def _dataclass(obj, arguments, callback):
for c in _follow_param(obj.infer_state, arguments, 0): for c in _follow_param(obj.infer_state, arguments, 0):
if c.is_class(): if c.is_class():
return ContextSet([DataclassWrapper(c)]) return ValueSet([DataclassWrapper(c)])
else: else:
return ContextSet([obj]) return ValueSet([obj])
return NO_VALUES return NO_VALUES
class DataclassWrapper(ContextWrapper, ClassMixin): class DataclassWrapper(ValueWrapper, ClassMixin):
def get_signatures(self): def get_signatures(self):
param_names = [] param_names = []
for cls in reversed(list(self.py__mro__())): for cls in reversed(list(self.py__mro__())):
@@ -630,7 +630,7 @@ class DataclassParamName(BaseTreeParamName):
return self.parent_value.infer_node(self.annotation_node) return self.parent_value.infer_node(self.annotation_node)
class ItemGetterCallable(ContextWrapper): class ItemGetterCallable(ValueWrapper):
def __init__(self, instance, args_value_set): def __init__(self, instance, args_value_set):
super(ItemGetterCallable, self).__init__(instance) super(ItemGetterCallable, self).__init__(instance)
self._args_value_set = args_value_set self._args_value_set = args_value_set
@@ -644,11 +644,11 @@ class ItemGetterCallable(ContextWrapper):
# TODO we need to add the valueualized value. # TODO we need to add the valueualized value.
value_set |= item_value_set.get_item(lazy_values[0].infer(), None) value_set |= item_value_set.get_item(lazy_values[0].infer(), None)
else: else:
value_set |= ContextSet([iterable.FakeSequence( value_set |= ValueSet([iterable.FakeSequence(
self._wrapped_value.infer_state, self._wrapped_value.infer_state,
'list', 'list',
[ [
LazyKnownContexts(item_value_set.get_item(lazy_value.infer(), None)) LazyKnownValues(item_value_set.get_item(lazy_value.infer(), None))
for lazy_value in lazy_values for lazy_value in lazy_values
], ],
)]) )])
@@ -657,19 +657,19 @@ class ItemGetterCallable(ContextWrapper):
@argument_clinic('func, /') @argument_clinic('func, /')
def _functools_wraps(funcs): def _functools_wraps(funcs):
return ContextSet(WrapsCallable(func) for func in funcs) return ValueSet(WrapsCallable(func) for func in funcs)
class WrapsCallable(ContextWrapper): class WrapsCallable(ValueWrapper):
# XXX this is not the correct wrapped value, it should be a weird # XXX this is not the correct wrapped value, it should be a weird
# partials object, but it doesn't matter, because it's always used as a # partials object, but it doesn't matter, because it's always used as a
# decorator anyway. # decorator anyway.
@repack_with_argument_clinic('func, /') @repack_with_argument_clinic('func, /')
def py__call__(self, funcs): def py__call__(self, funcs):
return ContextSet({Wrapped(func, self._wrapped_value) for func in funcs}) return ValueSet({Wrapped(func, self._wrapped_value) for func in funcs})
class Wrapped(ContextWrapper, FunctionMixin): class Wrapped(ValueWrapper, FunctionMixin):
def __init__(self, func, original_function): def __init__(self, func, original_function):
super(Wrapped, self).__init__(func) super(Wrapped, self).__init__(func)
self._original_function = original_function self._original_function = original_function
@@ -684,7 +684,7 @@ class Wrapped(ContextWrapper, FunctionMixin):
@argument_clinic('*args, /', want_obj=True, want_arguments=True) @argument_clinic('*args, /', want_obj=True, want_arguments=True)
def _operator_itemgetter(args_value_set, obj, arguments): def _operator_itemgetter(args_value_set, obj, arguments):
return ContextSet([ return ValueSet([
ItemGetterCallable(instance, args_value_set) ItemGetterCallable(instance, args_value_set)
for instance in obj.py__call__(arguments) for instance in obj.py__call__(arguments)
]) ])
@@ -699,7 +699,7 @@ def _create_string_input_function(func):
if s is not None: if s is not None:
s = func(s) s = func(s)
yield compiled.create_simple_object(value.infer_state, s) yield compiled.create_simple_object(value.infer_state, s)
values = ContextSet(iterate()) values = ValueSet(iterate())
if values: if values:
return values return values
return obj.py__call__(arguments) return obj.py__call__(arguments)
@@ -724,7 +724,7 @@ def _os_path_join(args_set, callback):
string += force_unicode(s) string += force_unicode(s)
is_first = False is_first = False
else: else:
return ContextSet([compiled.create_simple_object(sequence.infer_state, string)]) return ValueSet([compiled.create_simple_object(sequence.infer_state, string)])
return callback() return callback()
@@ -801,7 +801,7 @@ def get_metaclass_filters(func):
return wrapper return wrapper
class EnumInstance(LazyContextWrapper): class EnumInstance(LazyValueWrapper):
def __init__(self, cls, name): def __init__(self, cls, name):
self.infer_state = cls.infer_state self.infer_state = cls.infer_state
self._cls = cls # Corresponds to super().__self__ self._cls = cls # Corresponds to super().__self__
@@ -810,7 +810,7 @@ class EnumInstance(LazyContextWrapper):
@safe_property @safe_property
def name(self): def name(self):
return ContextName(self, self._name.tree_name) return ValueName(self, self._name.tree_name)
def _get_wrapped_value(self): def _get_wrapped_value(self):
obj, = self._cls.execute_with_values() obj, = self._cls.execute_with_values()
@@ -828,7 +828,7 @@ class EnumInstance(LazyContextWrapper):
def tree_name_to_values(func): def tree_name_to_values(func):
def wrapper(infer_state, value, tree_name): def wrapper(infer_state, value, tree_name):
if tree_name.value == 'sep' and value.is_module() and value.py__name__() == 'os.path': if tree_name.value == 'sep' and value.is_module() and value.py__name__() == 'os.path':
return ContextSet({ return ValueSet({
compiled.create_simple_object(infer_state, os.path.sep), compiled.create_simple_object(infer_state, os.path.sep),
}) })
return func(infer_state, value, tree_name) return func(infer_state, value, tree_name)

View File

@@ -8,7 +8,7 @@ import pytest
import jedi import jedi
from jedi import __doc__ as jedi_doc from jedi import __doc__ as jedi_doc
from jedi.inference.compiled import CompiledContextName from jedi.inference.compiled import CompiledValueName
def test_is_keyword(Script): def test_is_keyword(Script):
@@ -436,7 +436,7 @@ def test_builtin_module_with_path(Script):
confusing. confusing.
""" """
semlock, = Script('from _multiprocessing import SemLock').goto_definitions() semlock, = Script('from _multiprocessing import SemLock').goto_definitions()
assert isinstance(semlock._name, CompiledContextName) assert isinstance(semlock._name, CompiledValueName)
assert semlock.module_path is None assert semlock.module_path is None
assert semlock.in_builtin_module() is True assert semlock.in_builtin_module() is True
assert semlock.name == 'SemLock' assert semlock.name == 'SemLock'

View File

@@ -4,8 +4,8 @@ import pytest
from parso.utils import PythonVersionInfo from parso.utils import PythonVersionInfo
from jedi.inference.gradual import typeshed, stub_value from jedi.inference.gradual import typeshed, stub_value
from jedi.inference.value import TreeInstance, BoundMethod, FunctionContext, \ from jedi.inference.value import TreeInstance, BoundMethod, FunctionValue, \
MethodContext, ClassContext MethodValue, ClassValue
TYPESHED_PYTHON3 = os.path.join(typeshed.TYPESHED_PATH, 'stdlib', '3') TYPESHED_PYTHON3 = os.path.join(typeshed.TYPESHED_PATH, 'stdlib', '3')
@@ -48,14 +48,14 @@ def test_function(Script, environment):
code = 'import threading; threading.current_thread' code = 'import threading; threading.current_thread'
def_, = Script(code).goto_definitions() def_, = Script(code).goto_definitions()
value = def_._name._value value = def_._name._value
assert isinstance(value, FunctionContext), value assert isinstance(value, FunctionValue), value
def_, = Script(code + '()').goto_definitions() def_, = Script(code + '()').goto_definitions()
value = def_._name._value value = def_._name._value
assert isinstance(value, TreeInstance) assert isinstance(value, TreeInstance)
def_, = Script('import threading; threading.Thread').goto_definitions() def_, = Script('import threading; threading.Thread').goto_definitions()
assert isinstance(def_._name._value, ClassContext), def_ assert isinstance(def_._name._value, ClassValue), def_
def test_keywords_variable(Script): def test_keywords_variable(Script):
@@ -70,7 +70,7 @@ def test_keywords_variable(Script):
def test_class(Script): def test_class(Script):
def_, = Script('import threading; threading.Thread').goto_definitions() def_, = Script('import threading; threading.Thread').goto_definitions()
value = def_._name._value value = def_._name._value
assert isinstance(value, ClassContext), value assert isinstance(value, ClassValue), value
def test_instance(Script): def test_instance(Script):
@@ -82,7 +82,7 @@ def test_instance(Script):
def test_class_function(Script): def test_class_function(Script):
def_, = Script('import threading; threading.Thread.getName').goto_definitions() def_, = Script('import threading; threading.Thread.getName').goto_definitions()
value = def_._name._value value = def_._name._value
assert isinstance(value, MethodContext), value assert isinstance(value, MethodValue), value
def test_method(Script): def test_method(Script):
@@ -90,7 +90,7 @@ def test_method(Script):
def_, = Script(code).goto_definitions() def_, = Script(code).goto_definitions()
value = def_._name._value value = def_._name._value
assert isinstance(value, BoundMethod), value assert isinstance(value, BoundMethod), value
assert isinstance(value._wrapped_value, MethodContext), value assert isinstance(value._wrapped_value, MethodValue), value
def_, = Script(code + '()').goto_definitions() def_, = Script(code + '()').goto_definitions()
value = def_._name._value value = def_._name._value

View File

@@ -1,9 +1,9 @@
import pytest import pytest
from jedi import settings from jedi import settings
from jedi.inference.names import ContextName from jedi.inference.names import ValueName
from jedi.inference.compiled import CompiledContextName from jedi.inference.compiled import CompiledValueName
from jedi.inference.gradual.typeshed import StubModuleContext from jedi.inference.gradual.typeshed import StubModuleValue
@pytest.fixture() @pytest.fixture()
@@ -13,14 +13,14 @@ def auto_import_json(monkeypatch):
def test_base_auto_import_modules(auto_import_json, Script): def test_base_auto_import_modules(auto_import_json, Script):
loads, = Script('import json; json.loads').goto_definitions() loads, = Script('import json; json.loads').goto_definitions()
assert isinstance(loads._name, ContextName) assert isinstance(loads._name, ValueName)
value, = loads._name.infer() value, = loads._name.infer()
assert isinstance(value.parent_value, StubModuleContext) assert isinstance(value.parent_value, StubModuleValue)
def test_auto_import_modules_imports(auto_import_json, Script): def test_auto_import_modules_imports(auto_import_json, Script):
main, = Script('from json import tool; tool.main').goto_definitions() main, = Script('from json import tool; tool.main').goto_definitions()
assert isinstance(main._name, CompiledContextName) assert isinstance(main._name, CompiledValueName)
def test_additional_dynamic_modules(monkeypatch, Script): def test_additional_dynamic_modules(monkeypatch, Script):