diff --git a/docs/docs/development.rst b/docs/docs/development.rst index 545893c9..035ae32c 100644 --- a/docs/docs/development.rst +++ b/docs/docs/development.rst @@ -81,8 +81,8 @@ Inference Values (inference/base_value.py) .. inheritance-diagram:: jedi.inference.value.instance.TreeInstance jedi.inference.value.klass.Classvalue - jedi.inference.value.function.FunctionContext - jedi.inference.value.function.FunctionExecutionContext + jedi.inference.value.function.FunctionValue + jedi.inference.value.function.FunctionExecutionValue :parts: 1 diff --git a/jedi/api/__init__.py b/jedi/api/__init__.py index 55bea48f..b74364e3 100644 --- a/jedi/api/__init__.py +++ b/jedi/api/__init__.py @@ -36,8 +36,8 @@ from jedi.inference.helpers import get_module_names, infer_call_of_leaf from jedi.inference.sys_path import transform_path_to_dotted from jedi.inference.names import TreeNameDefinition, ParamName from jedi.inference.syntax_tree import tree_name_to_values -from jedi.inference.value import ModuleContext -from jedi.inference.base_value import ContextSet +from jedi.inference.value import ModuleValue +from jedi.inference.base_value import ValueSet from jedi.inference.value.iterable import unpack_tuple_to_dict from jedi.inference.gradual.conversion import convert_names, convert_values from jedi.inference.gradual.utils import load_proper_stub_module @@ -181,7 +181,7 @@ class Script(object): if names is None: names = ('__main__',) - module = ModuleContext( + module = ModuleValue( self._infer_state, self._module_node, file_io, string_names=names, code_lines=self._code_lines, @@ -189,7 +189,7 @@ class Script(object): ) if names[0] not in ('builtins', '__builtin__', 'typing'): # These modules are essential for Jedi, so don't overwrite them. - self._infer_state.module_cache.add(names, ContextSet([module])) + self._infer_state.module_cache.add(names, ValueSet([module])) return module def __repr__(self): @@ -470,7 +470,7 @@ class Interpreter(Script): self._infer_state.allow_descriptor_getattr = self._allow_descriptor_getattr_default def _get_module(self): - return interpreter.MixedModuleContext( + return interpreter.MixedModuleValue( self._infer_state, self._module_node, self.namespaces, diff --git a/jedi/api/classes.py b/jedi/api/classes.py index ffc9850b..bb96a881 100644 --- a/jedi/api/classes.py +++ b/jedi/api/classes.py @@ -14,10 +14,10 @@ from jedi.cache import memoize_method from jedi.inference import imports from jedi.inference import compiled from jedi.inference.imports import ImportName -from jedi.inference.value import FunctionExecutionContext -from jedi.inference.gradual.typeshed import StubModuleContext +from jedi.inference.value import FunctionExecutionValue +from jedi.inference.gradual.typeshed import StubModuleValue from jedi.inference.gradual.conversion import convert_names, convert_values -from jedi.inference.base_value import ContextSet +from jedi.inference.base_value import ValueSet from jedi.api.keywords import KeywordName @@ -187,7 +187,7 @@ class BaseDefinition(object): def in_builtin_module(self): """Whether this is a builtin module.""" - if isinstance(self._get_module(), StubModuleContext): + if isinstance(self._get_module(), StubModuleValue): return any(isinstance(value, compiled.CompiledObject) for value in self._get_module().non_stub_value_set) return isinstance(self._get_module(), compiled.CompiledObject) @@ -324,7 +324,7 @@ class BaseDefinition(object): # results of Python objects instead of stubs. names = convert_names([self._name], prefer_stubs=True) values = convert_values( - ContextSet.from_sets(n.infer() for n in names), + ValueSet.from_sets(n.infer() for n in names), only_stubs=only_stubs, prefer_stubs=prefer_stubs, ) @@ -364,7 +364,7 @@ class BaseDefinition(object): if value is None: return None - if isinstance(value, FunctionExecutionContext): + if isinstance(value, FunctionExecutionValue): value = value.function_value return Definition(self._infer_state, value.name) @@ -755,7 +755,7 @@ class _Help(object): if not raw: signature_text = _format_signatures(value) if not doc and value.is_stub(): - for c in convert_values(ContextSet({value}), ignore_compiled=False): + for c in convert_values(ValueSet({value}), ignore_compiled=False): doc = c.py__doc__() if doc: break diff --git a/jedi/api/interpreter.py b/jedi/api/interpreter.py index 12e7b11f..f7b932bf 100644 --- a/jedi/api/interpreter.py +++ b/jedi/api/interpreter.py @@ -2,11 +2,11 @@ TODO Some parts of this module are still not well documented. """ -from jedi.inference.value import ModuleContext +from jedi.inference.value import ModuleValue from jedi.inference import compiled from jedi.inference.compiled import mixed from jedi.inference.compiled.access import create_access_path -from jedi.inference.base_value import ContextWrapper +from jedi.inference.base_value import ValueWrapper def _create(infer_state, obj): @@ -20,17 +20,17 @@ class NamespaceObject(object): self.__dict__ = dct -class MixedModuleContext(ContextWrapper): +class MixedModuleValue(ValueWrapper): type = 'mixed_module' def __init__(self, infer_state, tree_module, namespaces, file_io, code_lines): - module_value = ModuleContext( + module_value = ModuleValue( infer_state, tree_module, file_io=file_io, string_names=('__main__',), code_lines=code_lines ) - super(MixedModuleContext, self).__init__(module_value) + super(MixedModuleValue, self).__init__(module_value) self._namespace_objects = [NamespaceObject(n) for n in namespaces] def get_filters(self, *args, **kwargs): diff --git a/jedi/api/keywords.py b/jedi/api/keywords.py index 9377ae6a..e1ce9dc9 100644 --- a/jedi/api/keywords.py +++ b/jedi/api/keywords.py @@ -44,7 +44,7 @@ class Keyword(object): def get_signatures(self): # TODO this makes no sense, I think Keyword should somehow merge with - # Context to make it easier for the api/classes.py to deal with all + # Value to make it easier for the api/classes.py to deal with all # of it. return [] diff --git a/jedi/common/__init__.py b/jedi/common/__init__.py index 82985ee2..817cde41 100644 --- a/jedi/common/__init__.py +++ b/jedi/common/__init__.py @@ -1 +1 @@ -from jedi.common.value import BaseContextSet, BaseContext +from jedi.common.value import BaseValueSet, BaseValue diff --git a/jedi/common/value.py b/jedi/common/value.py index 66e7e0bb..a6274d17 100644 --- a/jedi/common/value.py +++ b/jedi/common/value.py @@ -1,4 +1,4 @@ -class BaseContext(object): +class BaseValue(object): def __init__(self, infer_state, parent_value=None): self.infer_state = infer_state self.parent_value = parent_value @@ -11,11 +11,11 @@ class BaseContext(object): value = value.parent_value -class BaseContextSet(object): +class BaseValueSet(object): def __init__(self, iterable): self._set = frozenset(iterable) for value in iterable: - assert not isinstance(value, BaseContextSet) + assert not isinstance(value, BaseValueSet) @classmethod def _from_frozen_set(cls, frozenset_): @@ -30,7 +30,7 @@ class BaseContextSet(object): """ aggregated = set() for set_ in sets: - if isinstance(set_, BaseContextSet): + if isinstance(set_, BaseValueSet): aggregated |= set_._set else: aggregated |= frozenset(set_) diff --git a/jedi/inference/__init__.py b/jedi/inference/__init__.py index b08ace07..ecdaab6a 100644 --- a/jedi/inference/__init__.py +++ b/jedi/inference/__init__.py @@ -33,7 +33,7 @@ return the ``date`` class. To *visualize* this (simplified): - ``InferState.infer_expr_stmt`` doesn't do much, because there's no assignment. -- ``Context.infer_node`` cares for resolving the dotted path +- ``Value.infer_node`` cares for resolving the dotted path - ``InferState.find_types`` searches for global definitions of datetime, which it finds in the definition of an import, by scanning the syntax tree. - Using the import logic, the datetime module is found. @@ -75,11 +75,11 @@ from jedi.inference import recursion from jedi.inference.cache import infer_state_function_cache from jedi.inference import helpers from jedi.inference.names import TreeNameDefinition, ParamName -from jedi.inference.base_value import ContextualizedName, ContextualizedNode, \ - ContextSet, NO_VALUES, iterate_values -from jedi.inference.value import ClassContext, FunctionContext, \ +from jedi.inference.base_value import ValueualizedName, ValueualizedNode, \ + ValueSet, NO_VALUES, iterate_values +from jedi.inference.value import ClassValue, FunctionValue, \ AnonymousInstance, BoundMethod -from jedi.inference.value.iterable import CompForContext +from jedi.inference.value.iterable import CompForValue from jedi.inference.syntax_tree import infer_trailer, infer_expr_stmt, \ infer_node, check_tuple_assignments from jedi.plugins import plugin_manager @@ -97,7 +97,7 @@ class InferState(object): self.latest_grammar = parso.load_grammar(version='3.7') self.memoize_cache = {} # for memoize decorators self.module_cache = imports.ModuleCache() # does the job of `sys.modules`. - self.stub_module_cache = {} # Dict[Tuple[str, ...], Optional[ModuleContext]] + self.stub_module_cache = {} # Dict[Tuple[str, ...], Optional[ModuleValue]] self.compiled_cache = {} # see `inference.compiled.create()` self.inferred_element_counts = {} self.mixed_cache = {} # see `inference.compiled.mixed._create()` @@ -151,7 +151,7 @@ class InferState(object): return self.project._get_sys_path(self, environment=self.environment, **kwargs) def infer_element(self, value, element): - if isinstance(value, CompForContext): + if isinstance(value, CompForValue): return infer_node(value, element) if_stmt = element @@ -201,7 +201,7 @@ class InferState(object): new_name_dicts = list(original_name_dicts) for i, name_dict in enumerate(new_name_dicts): new_name_dicts[i] = name_dict.copy() - new_name_dicts[i][if_name.value] = ContextSet([definition]) + new_name_dicts[i][if_name.value] = ValueSet([definition]) name_dicts += new_name_dicts else: @@ -244,10 +244,10 @@ class InferState(object): is_classdef = type_ == 'classdef' if is_classdef or type_ == 'funcdef': if is_classdef: - c = ClassContext(self, value, name.parent) + c = ClassValue(self, value, name.parent) else: - c = FunctionContext.from_value(value, name.parent) - return ContextSet([c]) + c = FunctionValue.from_value(value, name.parent) + return ValueSet([c]) if type_ == 'expr_stmt': is_simple_name = name.parent.type not in ('power', 'trailer') @@ -255,9 +255,9 @@ class InferState(object): return infer_expr_stmt(value, def_, name) if type_ == 'for_stmt': container_types = value.infer_node(def_.children[3]) - cn = ContextualizedNode(value, def_.children[3]) + cn = ValueualizedNode(value, def_.children[3]) for_types = iterate_values(container_types, cn) - c_node = ContextualizedName(value, name) + c_node = ValueualizedName(value, name) return check_tuple_assignments(self, c_node, for_types) if type_ in ('import_from', 'import_name'): return imports.infer_import(value, name) @@ -393,7 +393,7 @@ class InferState(object): parent_value = from_scope_node(parent_scope) if is_funcdef: - func = FunctionContext.from_value(parent_value, scope_node) + func = FunctionValue.from_value(parent_value, scope_node) if parent_value.is_class(): instance = AnonymousInstance( self, parent_value.parent_value, parent_value) @@ -406,11 +406,11 @@ class InferState(object): return func.get_function_execution() return func elif scope_node.type == 'classdef': - return ClassContext(self, parent_value, scope_node) + return ClassValue(self, parent_value, scope_node) elif scope_node.type in ('comp_for', 'sync_comp_for'): if node.start_pos >= scope_node.children[-1].start_pos: return parent_value - return CompForContext.from_comp_for(parent_value, scope_node) + return CompForValue.from_comp_for(parent_value, scope_node) raise Exception("There's a scope that was not managed.") base_node = base_value.tree_node diff --git a/jedi/inference/arguments.py b/jedi/inference/arguments.py index e25425d3..ec43178e 100644 --- a/jedi/inference/arguments.py +++ b/jedi/inference/arguments.py @@ -6,10 +6,10 @@ from jedi._compatibility import zip_longest from jedi import debug from jedi.inference.utils import PushBackIterator from jedi.inference import analysis -from jedi.inference.lazy_value import LazyKnownContext, LazyKnownContexts, \ - LazyTreeContext, get_merged_lazy_value +from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues, \ + LazyTreeValue, get_merged_lazy_value from jedi.inference.names import ParamName, TreeNameDefinition -from jedi.inference.base_value import NO_VALUES, ContextSet, ContextualizedNode +from jedi.inference.base_value import NO_VALUES, ValueSet, ValueualizedNode from jedi.inference.value import iterable from jedi.inference.cache import infer_state_as_method_param_cache from jedi.inference.param import get_executed_params_and_issues, ExecutedParam @@ -84,7 +84,7 @@ def _iterate_argument_clinic(infer_state, arguments, parameters): break lazy_values.append(argument) - yield ContextSet([iterable.FakeSequence(infer_state, u'tuple', lazy_values)]) + yield ValueSet([iterable.FakeSequence(infer_state, u'tuple', lazy_values)]) lazy_values continue elif stars == 2: @@ -234,7 +234,7 @@ class TreeArguments(AbstractArguments): if el.type == 'argument': c = el.children if len(c) == 3: # Keyword argument. - named_args.append((c[0].value, LazyTreeContext(self.value, c[2]),)) + named_args.append((c[0].value, LazyTreeValue(self.value, c[2]),)) else: # Generator comprehension. # Include the brackets with the parent. sync_comp_for = el.children[1] @@ -246,9 +246,9 @@ class TreeArguments(AbstractArguments): sync_comp_for_node=sync_comp_for, entry_node=el.children[0], ) - yield None, LazyKnownContext(comp) + yield None, LazyKnownValue(comp) else: - yield None, LazyTreeContext(self.value, el) + yield None, LazyTreeValue(self.value, el) # Reordering arguments is necessary, because star args sometimes appear # after named argument, but in the actual order it's prepended. @@ -302,9 +302,9 @@ class TreeArguments(AbstractArguments): break if arguments.argument_node is not None: - return [ContextualizedNode(arguments.value, arguments.argument_node)] + return [ValueualizedNode(arguments.value, arguments.argument_node)] if arguments.trailer is not None: - return [ContextualizedNode(arguments.value, arguments.trailer)] + return [ValueualizedNode(arguments.value, arguments.trailer)] return [] @@ -314,7 +314,7 @@ class ValuesArguments(AbstractArguments): def unpack(self, funcdef=None): for values in self._values_list: - yield None, LazyKnownContexts(values) + yield None, LazyKnownValues(values) def __repr__(self): return '<%s: %s>' % (self.__class__.__name__, self._values_list) diff --git a/jedi/inference/base_value.py b/jedi/inference/base_value.py index a66d982f..88fb0e4c 100644 --- a/jedi/inference/base_value.py +++ b/jedi/inference/base_value.py @@ -1,8 +1,8 @@ """ -Contexts are the "values" that Python would return. However Contexts are at the +Values are the "values" that Python would return. However Values are at the same time also the "values" that a user is currently sitting in. -A ContextSet is typically used to specify the return of a function or any other +A ValueSet is typically used to specify the return of a function or any other static analysis operation. In jedi there are always multiple returns and not just one. """ @@ -13,7 +13,7 @@ from parso.python.tree import ExprStmt, SyncCompFor from jedi import debug from jedi._compatibility import zip_longest, unicode from jedi.parser_utils import clean_scope_docstring -from jedi.common import BaseContextSet, BaseContext +from jedi.common import BaseValueSet, BaseValue from jedi.inference.helpers import SimpleGetItemNotFound from jedi.inference.utils import safe_property from jedi.inference.cache import infer_state_as_method_param_cache @@ -22,7 +22,7 @@ from jedi.cache import memoize_method _sentinel = object() -class HelperContextMixin(object): +class HelperValueMixin(object): def get_root_value(self): value = self while True: @@ -40,17 +40,17 @@ class HelperContextMixin(object): def execute_with_values(self, *value_list): from jedi.inference.arguments import ValuesArguments - arguments = ValuesArguments([ContextSet([value]) for value in value_list]) + arguments = ValuesArguments([ValueSet([value]) for value in value_list]) return self.infer_state.execute(self, arguments) def execute_annotation(self): return self.execute_with_values() def gather_annotation_classes(self): - return ContextSet([self]) + return ValueSet([self]) def merge_types_of_iterate(self, valueualized_node=None, is_async=False): - return ContextSet.from_sets( + return ValueSet.from_sets( lazy_value.infer() for lazy_value in self.iterate(valueualized_node, is_async) ) @@ -86,11 +86,11 @@ class HelperContextMixin(object): def iterate(self, valueualized_node=None, is_async=False): debug.dbg('iterate %s', self) if is_async: - from jedi.inference.lazy_value import LazyKnownContexts + from jedi.inference.lazy_value import LazyKnownValues # TODO if no __aiter__ values are there, error should be: # TypeError: 'async for' requires an object with __aiter__ method, got int return iter([ - LazyKnownContexts( + LazyKnownValues( self.py__getattribute__('__aiter__').execute_with_values() .py__getattribute__('__anext__').execute_with_values() .py__getattribute__('__await__').execute_with_values() @@ -107,12 +107,12 @@ class HelperContextMixin(object): def is_same_class(self, class2): # Class matching should prefer comparisons that are not this function. - if type(class2).is_same_class != HelperContextMixin.is_same_class: + if type(class2).is_same_class != HelperValueMixin.is_same_class: return class2.is_same_class(self) return self == class2 -class Context(HelperContextMixin, BaseContext): +class Value(HelperValueMixin, BaseValue): """ Should be defined, otherwise the API returns empty types. """ @@ -216,24 +216,24 @@ def iterate_values(values, valueualized_node=None, is_async=False): Calls `iterate`, on all values but ignores the ordering and just returns all values that the iterate functions yield. """ - return ContextSet.from_sets( + return ValueSet.from_sets( lazy_value.infer() for lazy_value in values.iterate(valueualized_node, is_async=is_async) ) -class _ContextWrapperBase(HelperContextMixin): +class _ValueWrapperBase(HelperValueMixin): predefined_names = {} @safe_property def name(self): - from jedi.inference.names import ContextName + from jedi.inference.names import ValueName wrapped_name = self._wrapped_value.name if wrapped_name.tree_name is not None: - return ContextName(self, wrapped_name.tree_name) + return ValueName(self, wrapped_name.tree_name) else: - from jedi.inference.compiled import CompiledContextName - return CompiledContextName(self, wrapped_name.string_name) + from jedi.inference.compiled import CompiledValueName + return CompiledValueName(self, wrapped_name.string_name) @classmethod @infer_state_as_method_param_cache() @@ -245,7 +245,7 @@ class _ContextWrapperBase(HelperContextMixin): return getattr(self._wrapped_value, name) -class LazyContextWrapper(_ContextWrapperBase): +class LazyValueWrapper(_ValueWrapperBase): @safe_property @memoize_method def _wrapped_value(self): @@ -259,7 +259,7 @@ class LazyContextWrapper(_ContextWrapperBase): raise NotImplementedError -class ContextWrapper(_ContextWrapperBase): +class ValueWrapper(_ValueWrapperBase): def __init__(self, wrapped_value): self._wrapped_value = wrapped_value @@ -267,9 +267,9 @@ class ContextWrapper(_ContextWrapperBase): return '%s(%s)' % (self.__class__.__name__, self._wrapped_value) -class TreeContext(Context): +class TreeValue(Value): def __init__(self, infer_state, parent_value, tree_node): - super(TreeContext, self).__init__(infer_state, parent_value) + super(TreeValue, self).__init__(infer_state, parent_value) self.predefined_names = {} self.tree_node = tree_node @@ -277,7 +277,7 @@ class TreeContext(Context): return '<%s: %s>' % (self.__class__.__name__, self.tree_node) -class ContextualizedNode(object): +class ValueualizedNode(object): def __init__(self, value, node): self.value = value self.node = node @@ -292,7 +292,7 @@ class ContextualizedNode(object): return '<%s: %s in %s>' % (self.__class__.__name__, self.node, self.value) -class ContextualizedName(ContextualizedNode): +class ValueualizedName(ValueualizedNode): # TODO merge with TreeNameDefinition?! @property def name(self): @@ -375,16 +375,16 @@ def _getitem(value, index_values, valueualized_node): # all results. if unused_values or not index_values: result |= value.py__getitem__( - ContextSet(unused_values), + ValueSet(unused_values), valueualized_node ) debug.dbg('py__getitem__ result: %s', result) return result -class ContextSet(BaseContextSet): +class ValueSet(BaseValueSet): def py__class__(self): - return ContextSet(c.py__class__() for c in self._set) + return ValueSet(c.py__class__() for c in self._set) def iterate(self, valueualized_node=None, is_async=False): from jedi.inference.lazy_value import get_merged_lazy_value @@ -395,18 +395,18 @@ class ContextSet(BaseContextSet): ) def execute(self, arguments): - return ContextSet.from_sets(c.infer_state.execute(c, arguments) for c in self._set) + return ValueSet.from_sets(c.infer_state.execute(c, arguments) for c in self._set) def execute_with_values(self, *args, **kwargs): - return ContextSet.from_sets(c.execute_with_values(*args, **kwargs) for c in self._set) + return ValueSet.from_sets(c.execute_with_values(*args, **kwargs) for c in self._set) def py__getattribute__(self, *args, **kwargs): if kwargs.get('is_goto'): return reduce(add, [c.py__getattribute__(*args, **kwargs) for c in self._set], []) - return ContextSet.from_sets(c.py__getattribute__(*args, **kwargs) for c in self._set) + return ValueSet.from_sets(c.py__getattribute__(*args, **kwargs) for c in self._set) def get_item(self, *args, **kwargs): - return ContextSet.from_sets(_getitem(c, *args, **kwargs) for c in self._set) + return ValueSet.from_sets(_getitem(c, *args, **kwargs) for c in self._set) def try_merge(self, function_name): value_set = self.__class__([]) @@ -420,17 +420,17 @@ class ContextSet(BaseContextSet): return value_set def gather_annotation_classes(self): - return ContextSet.from_sets([c.gather_annotation_classes() for c in self._set]) + return ValueSet.from_sets([c.gather_annotation_classes() for c in self._set]) def get_signatures(self): return [sig for c in self._set for sig in c.get_signatures()] -NO_VALUES = ContextSet([]) +NO_VALUES = ValueSet([]) def iterator_to_value_set(func): def wrapper(*args, **kwargs): - return ContextSet(func(*args, **kwargs)) + return ValueSet(func(*args, **kwargs)) return wrapper diff --git a/jedi/inference/compiled/__init__.py b/jedi/inference/compiled/__init__.py index e1e60ecb..5df74795 100644 --- a/jedi/inference/compiled/__init__.py +++ b/jedi/inference/compiled/__init__.py @@ -1,7 +1,7 @@ from jedi._compatibility import unicode from jedi.inference.compiled.value import CompiledObject, CompiledName, \ - CompiledObjectFilter, CompiledContextName, create_from_access_path -from jedi.inference.base_value import ContextWrapper, LazyContextWrapper + CompiledObjectFilter, CompiledValueName, create_from_access_path +from jedi.inference.base_value import ValueWrapper, LazyValueWrapper def builtin_from_name(infer_state, string): @@ -16,7 +16,7 @@ def builtin_from_name(infer_state, string): return value -class CompiledValue(LazyContextWrapper): +class CompiledValue(LazyValueWrapper): def __init__(self, compiled_obj): self.infer_state = compiled_obj.infer_state self._compiled_obj = compiled_obj diff --git a/jedi/inference/compiled/mixed.py b/jedi/inference/compiled/mixed.py index 29328304..9c660a84 100644 --- a/jedi/inference/compiled/mixed.py +++ b/jedi/inference/compiled/mixed.py @@ -12,9 +12,9 @@ from jedi import settings from jedi.inference import compiled from jedi.cache import underscore_memoization from jedi.file_io import FileIO -from jedi.inference.base_value import ContextSet, ContextWrapper +from jedi.inference.base_value import ValueSet, ValueWrapper from jedi.inference.helpers import SimpleGetItemNotFound -from jedi.inference.value import ModuleContext +from jedi.inference.value import ModuleValue from jedi.inference.cache import infer_state_function_cache from jedi.inference.compiled.getattr_static import getattr_static from jedi.inference.compiled.access import compiled_objects_cache, \ @@ -25,7 +25,7 @@ from jedi.inference.gradual.conversion import to_stub _sentinel = object() -class MixedObject(ContextWrapper): +class MixedObject(ValueWrapper): """ A ``MixedObject`` is used in two ways: @@ -104,10 +104,10 @@ class MixedName(compiled.CompiledName): assert len(access_paths) values = [None] for access in access_paths: - values = ContextSet.from_sets( + values = ValueSet.from_sets( _create(self._infer_state, access, parent_value=c) if c is None or isinstance(c, MixedObject) - else ContextSet({create_cached_compiled_object(c.infer_state, access, c)}) + else ValueSet({create_cached_compiled_object(c.infer_state, access, c)}) for c in values ) return values @@ -244,11 +244,11 @@ def _create(infer_state, access_handle, parent_value, *args): if result is None: # TODO Care about generics from stuff like `[1]` and don't return like this. if type(python_object) in (dict, list, tuple): - return ContextSet({compiled_object}) + return ValueSet({compiled_object}) tree_values = to_stub(compiled_object) if not tree_values: - return ContextSet({compiled_object}) + return ValueSet({compiled_object}) else: module_node, tree_node, file_io, code_lines = result @@ -256,7 +256,7 @@ def _create(infer_state, access_handle, parent_value, *args): # TODO this __name__ is probably wrong. name = compiled_object.get_root_value().py__name__() string_names = tuple(name.split('.')) - module_value = ModuleContext( + module_value = ModuleValue( infer_state, module_node, file_io=file_io, string_names=string_names, @@ -264,16 +264,16 @@ def _create(infer_state, access_handle, parent_value, *args): is_package=hasattr(compiled_object, 'py__path__'), ) if name is not None: - infer_state.module_cache.add(string_names, ContextSet([module_value])) + infer_state.module_cache.add(string_names, ValueSet([module_value])) else: if parent_value.tree_node.get_root_node() != module_node: # This happens e.g. when __module__ is wrong, or when using # TypeVar('foo'), where Jedi uses 'foo' as the name and # Python's TypeVar('foo').__module__ will be typing. - return ContextSet({compiled_object}) + return ValueSet({compiled_object}) module_value = parent_value.get_root_value() - tree_values = ContextSet({ + tree_values = ValueSet({ module_value.create_value( tree_node, node_is_value=True, @@ -285,7 +285,7 @@ def _create(infer_state, access_handle, parent_value, *args): # Is an instance, not a class. tree_values = tree_values.execute_with_values() - return ContextSet( + return ValueSet( MixedObject(compiled_object, tree_value=tree_value) for tree_value in tree_values ) diff --git a/jedi/inference/compiled/value.py b/jedi/inference/compiled/value.py index f061ee58..4f485278 100644 --- a/jedi/inference/compiled/value.py +++ b/jedi/inference/compiled/value.py @@ -9,10 +9,10 @@ from jedi.inference.utils import to_list from jedi._compatibility import force_unicode, Parameter, cast_path from jedi.cache import underscore_memoization, memoize_method from jedi.inference.filters import AbstractFilter -from jedi.inference.names import AbstractNameDefinition, ContextNameMixin, \ +from jedi.inference.names import AbstractNameDefinition, ValueNameMixin, \ ParamNameInterface -from jedi.inference.base_value import Context, ContextSet, NO_VALUES -from jedi.inference.lazy_value import LazyKnownContext +from jedi.inference.base_value import Value, ValueSet, NO_VALUES +from jedi.inference.lazy_value import LazyKnownValue from jedi.inference.compiled.access import _sentinel from jedi.inference.cache import infer_state_function_cache from jedi.inference.helpers import reraise_getitem_errors @@ -40,7 +40,7 @@ class CheckAttribute(object): return partial(self.func, instance) -class CompiledObject(Context): +class CompiledObject(Value): def __init__(self, infer_state, access_handle, parent_value=None): super(CompiledObject, self).__init__(infer_state, parent_value) self.access_handle = access_handle @@ -58,11 +58,11 @@ class CompiledObject(Context): else: if self.access_handle.is_class(): from jedi.inference.value import CompiledInstance - return ContextSet([ + return ValueSet([ CompiledInstance(self.infer_state, self.parent_value, self, arguments) ]) else: - return ContextSet(self._execute_function(arguments)) + return ValueSet(self._execute_function(arguments)) @CheckAttribute() def py__class__(self): @@ -187,7 +187,7 @@ class CompiledObject(Context): if access is None: return NO_VALUES - return ContextSet([create_from_access_path(self.infer_state, access)]) + return ValueSet([create_from_access_path(self.infer_state, access)]) def py__getitem__(self, index_value_set, valueualized_node): all_access_paths = self.access_handle.py__getitem__all_values() @@ -195,7 +195,7 @@ class CompiledObject(Context): # This means basically that no __getitem__ has been defined on this # object. return super(CompiledObject, self).py__getitem__(index_value_set, valueualized_node) - return ContextSet( + return ValueSet( create_from_access_path(self.infer_state, access) for access in all_access_paths ) @@ -215,7 +215,7 @@ class CompiledObject(Context): return for access in access_path_list: - yield LazyKnownContext(create_from_access_path(self.infer_state, access)) + yield LazyKnownValue(create_from_access_path(self.infer_state, access)) def py__name__(self): return self.access_handle.py__name__() @@ -225,7 +225,7 @@ class CompiledObject(Context): name = self.py__name__() if name is None: name = self.access_handle.get_repr() - return CompiledContextName(self, name) + return CompiledValueName(self, name) def _execute_function(self, params): from jedi.inference import docstrings @@ -295,7 +295,7 @@ class CompiledName(AbstractNameDefinition): @underscore_memoization def infer(self): - return ContextSet([_create_from_name( + return ValueSet([_create_from_name( self._infer_state, self.parent_value, self.string_name )]) @@ -325,7 +325,7 @@ class SignatureParamName(ParamNameInterface, AbstractNameDefinition): infer_state = self.parent_value.infer_state values = NO_VALUES if p.has_default: - values = ContextSet([create_from_access_path(infer_state, p.default)]) + values = ValueSet([create_from_access_path(infer_state, p.default)]) if p.has_annotation: annotation = create_from_access_path(infer_state, p.annotation) values |= annotation.execute_with_values() @@ -351,7 +351,7 @@ class UnresolvableParamName(ParamNameInterface, AbstractNameDefinition): return NO_VALUES -class CompiledContextName(ContextNameMixin, AbstractNameDefinition): +class CompiledValueName(ValueNameMixin, AbstractNameDefinition): def __init__(self, value, name): self.string_name = name self._value = value diff --git a/jedi/inference/docstrings.py b/jedi/inference/docstrings.py index d3cb722b..d52df6de 100644 --- a/jedi/inference/docstrings.py +++ b/jedi/inference/docstrings.py @@ -25,9 +25,9 @@ from jedi._compatibility import u from jedi import debug from jedi.inference.utils import indent_block from jedi.inference.cache import infer_state_method_cache -from jedi.inference.base_value import iterator_to_value_set, ContextSet, \ +from jedi.inference.base_value import iterator_to_value_set, ValueSet, \ NO_VALUES -from jedi.inference.lazy_value import LazyKnownContexts +from jedi.inference.lazy_value import LazyKnownValues DOCSTRING_PARAM_PATTERNS = [ @@ -221,8 +221,8 @@ def _infer_for_statement_string(module_value, string): if stmt.type not in ('name', 'atom', 'atom_expr'): return [] - from jedi.inference.value import FunctionContext - function_value = FunctionContext( + from jedi.inference.value import FunctionValue + function_value = FunctionValue( module_value.infer_state, module_value, funcdef @@ -242,7 +242,7 @@ def _execute_types_in_stmt(module_value, stmt): contain is executed. (Used as type information). """ definitions = module_value.infer_node(stmt) - return ContextSet.from_sets( + return ValueSet.from_sets( _execute_array_values(module_value.infer_state, d) for d in definitions ) @@ -253,15 +253,15 @@ def _execute_array_values(infer_state, array): Tuples indicate that there's not just one return value, but the listed ones. `(str, int)` means that it returns a tuple with both types. """ - from jedi.inference.value.iterable import SequenceLiteralContext, FakeSequence - if isinstance(array, SequenceLiteralContext): + from jedi.inference.value.iterable import SequenceLiteralValue, FakeSequence + if isinstance(array, SequenceLiteralValue): values = [] for lazy_value in array.py__iter__(): - objects = ContextSet.from_sets( + objects = ValueSet.from_sets( _execute_array_values(infer_state, typ) for typ in lazy_value.infer() ) - values.append(LazyKnownContexts(objects)) + values.append(LazyKnownValues(objects)) return {FakeSequence(infer_state, array.array_type, values)} else: return array.execute_annotation() @@ -270,10 +270,10 @@ def _execute_array_values(infer_state, array): @infer_state_method_cache() def infer_param(execution_value, param): from jedi.inference.value.instance import InstanceArguments - from jedi.inference.value import FunctionExecutionContext + from jedi.inference.value import FunctionExecutionValue def infer_docstring(docstring): - return ContextSet( + return ValueSet( p for param_str in _search_param_in_docstr(docstring, param.name.value) for p in _infer_for_statement_string(module_value, param_str) @@ -284,7 +284,7 @@ def infer_param(execution_value, param): return NO_VALUES types = infer_docstring(execution_value.py__doc__()) - if isinstance(execution_value, FunctionExecutionContext) \ + if isinstance(execution_value, FunctionExecutionValue) \ and isinstance(execution_value.var_args, InstanceArguments) \ and execution_value.function_value.py__name__() == '__init__': class_value = execution_value.var_args.instance.class_value diff --git a/jedi/inference/dynamic.py b/jedi/inference/dynamic.py index abddc0e4..b15c6264 100644 --- a/jedi/inference/dynamic.py +++ b/jedi/inference/dynamic.py @@ -26,8 +26,8 @@ from jedi.inference.param import create_default_params from jedi.inference.helpers import is_stdlib_path from jedi.inference.utils import to_list from jedi.parser_utils import get_parent_scope -from jedi.inference.value import ModuleContext, instance -from jedi.inference.base_value import ContextSet, NO_VALUES +from jedi.inference.value import ModuleValue, instance +from jedi.inference.base_value import ValueSet, NO_VALUES from jedi.inference import recursion @@ -49,7 +49,7 @@ class DynamicExecutedParams(object): # anonymous functions can create an anonymous parameter that is # more or less self referencing. if allowed: - return ContextSet.from_sets(p.infer() for p in self._executed_params) + return ValueSet.from_sets(p.infer() for p in self._executed_params) return NO_VALUES @@ -130,7 +130,7 @@ def _search_function_executions(infer_state, module_value, funcdef, string_name) i = 0 for for_mod_value in imports.get_modules_containing_name( infer_state, [module_value], string_name): - if not isinstance(module_value, ModuleContext): + if not isinstance(module_value, ModuleValue): return for name, trailer in _get_possible_nodes(for_mod_value, string_name): i += 1 @@ -179,7 +179,7 @@ def _get_possible_nodes(module_value, func_string_name): def _check_name_for_execution(infer_state, value, compare_node, name, trailer): - from jedi.inference.value.function import FunctionExecutionContext + from jedi.inference.value.function import FunctionExecutionValue def create_func_excs(): arglist = trailer.children[1] @@ -203,7 +203,7 @@ def _check_name_for_execution(infer_state, value, compare_node, name, trailer): if compare_node == value_node: for func_execution in create_func_excs(): yield func_execution - elif isinstance(v.parent_value, FunctionExecutionContext) and \ + elif isinstance(v.parent_value, FunctionExecutionValue) and \ compare_node.type == 'funcdef': # Here we're trying to find decorators by checking the first # parameter. It's not very generic though. Should find a better diff --git a/jedi/inference/filters.py b/jedi/inference/filters.py index f3742090..3547aee7 100644 --- a/jedi/inference/filters.py +++ b/jedi/inference/filters.py @@ -9,8 +9,8 @@ from parso.tree import search_ancestor from jedi._compatibility import use_metaclass from jedi.inference import flow_analysis -from jedi.inference.base_value import ContextSet, Context, ContextWrapper, \ - LazyContextWrapper +from jedi.inference.base_value import ValueSet, Value, ValueWrapper, \ + LazyValueWrapper from jedi.parser_utils import get_cached_parent_scope from jedi.inference.utils import to_list from jedi.inference.names import TreeNameDefinition, ParamName, AbstractNameDefinition @@ -231,7 +231,7 @@ class MergedFilter(object): return '%s(%s)' % (self.__class__.__name__, ', '.join(str(f) for f in self._filters)) -class _BuiltinMappedMethod(Context): +class _BuiltinMappedMethod(Value): """``Generator.__next__`` ``dict.values`` methods and so on.""" api_type = u'function' @@ -281,7 +281,7 @@ class SpecialMethodFilter(DictFilter): else: continue break - return ContextSet([ + return ValueSet([ _BuiltinMappedMethod(self.parent_value, self._callable, builtin_func) ]) @@ -328,13 +328,13 @@ class _AttributeOverwriteMixin(object): class LazyAttributeOverwrite(use_metaclass(_OverwriteMeta, _AttributeOverwriteMixin, - LazyContextWrapper)): + LazyValueWrapper)): def __init__(self, infer_state): self.infer_state = infer_state class AttributeOverwrite(use_metaclass(_OverwriteMeta, _AttributeOverwriteMixin, - ContextWrapper)): + ValueWrapper)): pass @@ -394,7 +394,7 @@ def get_global_filters(infer_state, value, until_position, origin_scope): >>> list(filters[3].values()) # doctest: +ELLIPSIS [...] """ - from jedi.inference.value.function import FunctionExecutionContext + from jedi.inference.value.function import FunctionExecutionValue while value is not None: # Names in methods cannot be resolved within the class. for filter in value.get_filters( @@ -402,7 +402,7 @@ def get_global_filters(infer_state, value, until_position, origin_scope): until_position=until_position, origin_scope=origin_scope): yield filter - if isinstance(value, FunctionExecutionContext): + if isinstance(value, FunctionExecutionValue): # The position should be reset if the current scope is a function. until_position = None diff --git a/jedi/inference/finder.py b/jedi/inference/finder.py index b20fcef4..f44f44e1 100644 --- a/jedi/inference/finder.py +++ b/jedi/inference/finder.py @@ -27,7 +27,7 @@ from jedi.inference import helpers from jedi.inference.value import iterable from jedi.inference.filters import get_global_filters from jedi.inference.names import TreeNameDefinition -from jedi.inference.base_value import ContextSet, NO_VALUES +from jedi.inference.base_value import ValueSet, NO_VALUES from jedi.parser_utils import is_scope, get_parent_scope from jedi.inference.gradual.conversion import convert_values @@ -123,7 +123,7 @@ class NameFinder(object): yield f # This covers the case where a stub files are incomplete. if self._value.is_stub(): - for c in convert_values(ContextSet({self._value})): + for c in convert_values(ValueSet({self._value})): for f in c.get_filters(): yield f @@ -187,7 +187,7 @@ class NameFinder(object): return inst.execute_function_slots(names, name) def _names_to_types(self, names, attribute_lookup): - values = ContextSet.from_sets(name.infer() for name in names) + values = ValueSet.from_sets(name.infer() for name in names) debug.dbg('finder._names_to_types: %s -> %s', names, values) if not names and self._value.is_instance() and not self._value.is_compiled(): diff --git a/jedi/inference/gradual/annotation.py b/jedi/inference/gradual/annotation.py index 693e6212..69da15a0 100644 --- a/jedi/inference/gradual/annotation.py +++ b/jedi/inference/gradual/annotation.py @@ -11,7 +11,7 @@ from parso import ParserSyntaxError, parse from jedi._compatibility import force_unicode from jedi.inference.cache import infer_state_method_cache -from jedi.inference.base_value import ContextSet, NO_VALUES +from jedi.inference.base_value import ValueSet, NO_VALUES from jedi.inference.gradual.typing import TypeVar, LazyGenericClass, \ AbstractAnnotatedClass from jedi.inference.gradual.typing import GenericClass @@ -112,15 +112,15 @@ def infer_param(execution_value, param): infer_state = execution_value.infer_state if param.star_count == 1: tuple_ = builtin_from_name(infer_state, 'tuple') - return ContextSet([GenericClass( + return ValueSet([GenericClass( tuple_, generics=(values,), ) for c in values]) elif param.star_count == 2: dct = builtin_from_name(infer_state, 'dict') - return ContextSet([GenericClass( + return ValueSet([GenericClass( dct, - generics=(ContextSet([builtin_from_name(infer_state, 'str')]), values), + generics=(ValueSet([builtin_from_name(infer_state, 'str')]), values), ) for c in values]) pass return values @@ -224,9 +224,9 @@ def infer_return_types(function_execution_value): type_var_dict = infer_type_vars_for_execution(function_execution_value, all_annotations) - return ContextSet.from_sets( + return ValueSet.from_sets( ann.define_generics(type_var_dict) - if isinstance(ann, (AbstractAnnotatedClass, TypeVar)) else ContextSet({ann}) + if isinstance(ann, (AbstractAnnotatedClass, TypeVar)) else ValueSet({ann}) for ann in annotation_values ).execute_annotation() diff --git a/jedi/inference/gradual/conversion.py b/jedi/inference/gradual/conversion.py index 95f4d015..291480b9 100644 --- a/jedi/inference/gradual/conversion.py +++ b/jedi/inference/gradual/conversion.py @@ -1,14 +1,14 @@ from jedi import debug -from jedi.inference.base_value import ContextSet, \ +from jedi.inference.base_value import ValueSet, \ NO_VALUES from jedi.inference.utils import to_list -from jedi.inference.gradual.stub_value import StubModuleContext +from jedi.inference.gradual.stub_value import StubModuleValue def _stub_to_python_value_set(stub_value, ignore_compiled=False): stub_module = stub_value.get_root_value() if not stub_module.is_stub(): - return ContextSet([stub_value]) + return ValueSet([stub_value]) was_instance = stub_value.is_instance() if was_instance: @@ -27,7 +27,7 @@ def _stub_to_python_value_set(stub_value, ignore_compiled=False): values = _infer_from_stub(stub_module, qualified_names, ignore_compiled) if was_instance: - values = ContextSet.from_sets( + values = ValueSet.from_sets( c.execute_with_values() for c in values if c.is_class() @@ -41,7 +41,7 @@ def _stub_to_python_value_set(stub_value, ignore_compiled=False): def _infer_from_stub(stub_module, qualified_names, ignore_compiled): from jedi.inference.compiled.mixed import MixedObject - assert isinstance(stub_module, (StubModuleContext, MixedObject)), stub_module + assert isinstance(stub_module, (StubModuleValue, MixedObject)), stub_module non_stubs = stub_module.non_stub_value_set if ignore_compiled: non_stubs = non_stubs.filter(lambda c: not c.is_compiled()) @@ -89,7 +89,7 @@ def _load_stub_module(module): return _try_to_load_stub_cached( module.infer_state, import_names=module.string_names, - python_value_set=ContextSet([module]), + python_value_set=ValueSet([module]), parent_module_value=None, sys_path=module.infer_state.get_sys_path(), ) @@ -116,7 +116,7 @@ def _python_to_stub_names(names, fallback_to_python=False): if name_list is not None: stub_module = _load_stub_module(module) if stub_module is not None: - stubs = ContextSet({stub_module}) + stubs = ValueSet({stub_module}) for name in name_list[:-1]: stubs = stubs.py__getattribute__(name) if stubs and name_list: @@ -148,15 +148,15 @@ def convert_values(values, only_stubs=False, prefer_stubs=False, ignore_compiled assert not (only_stubs and prefer_stubs) with debug.increase_indent_cm('convert values'): if only_stubs or prefer_stubs: - return ContextSet.from_sets( + return ValueSet.from_sets( to_stub(value) - or (ContextSet({value}) if prefer_stubs else NO_VALUES) + or (ValueSet({value}) if prefer_stubs else NO_VALUES) for value in values ) else: - return ContextSet.from_sets( + return ValueSet.from_sets( _stub_to_python_value_set(stub_value, ignore_compiled=ignore_compiled) - or ContextSet({stub_value}) + or ValueSet({stub_value}) for stub_value in values ) @@ -164,7 +164,7 @@ def convert_values(values, only_stubs=False, prefer_stubs=False, ignore_compiled # TODO merge with _python_to_stub_names? def to_stub(value): if value.is_stub(): - return ContextSet([value]) + return ValueSet([value]) was_instance = value.is_instance() if was_instance: @@ -182,12 +182,12 @@ def to_stub(value): qualified_names = qualified_names[:-1] was_instance = True - stub_values = ContextSet([stub_module]) + stub_values = ValueSet([stub_module]) for name in qualified_names: stub_values = stub_values.py__getattribute__(name) if was_instance: - stub_values = ContextSet.from_sets( + stub_values = ValueSet.from_sets( c.execute_with_values() for c in stub_values if c.is_class() diff --git a/jedi/inference/gradual/stub_value.py b/jedi/inference/gradual/stub_value.py index b0e07add..b3ed65b6 100644 --- a/jedi/inference/gradual/stub_value.py +++ b/jedi/inference/gradual/stub_value.py @@ -1,13 +1,13 @@ -from jedi.inference.base_value import ContextWrapper -from jedi.inference.value.module import ModuleContext +from jedi.inference.base_value import ValueWrapper +from jedi.inference.value.module import ModuleValue from jedi.inference.filters import ParserTreeFilter, \ TreeNameDefinition from jedi.inference.gradual.typing import TypingModuleFilterWrapper -class StubModuleContext(ModuleContext): +class StubModuleValue(ModuleValue): def __init__(self, non_stub_value_set, *args, **kwargs): - super(StubModuleContext, self).__init__(*args, **kwargs) + super(StubModuleValue, self).__init__(*args, **kwargs) self.non_stub_value_set = non_stub_value_set def is_stub(self): @@ -27,7 +27,7 @@ class StubModuleContext(ModuleContext): pass else: names.update(method()) - names.update(super(StubModuleContext, self).sub_modules_dict()) + names.update(super(StubModuleValue, self).sub_modules_dict()) return names def _get_first_non_stub_filters(self): @@ -44,7 +44,7 @@ class StubModuleContext(ModuleContext): def get_filters(self, search_global=False, until_position=None, origin_scope=None, **kwargs): - filters = super(StubModuleContext, self).get_filters( + filters = super(StubModuleValue, self).get_filters( search_global, until_position, origin_scope, **kwargs ) next(filters) # Ignore the first filter and replace it with our own @@ -60,7 +60,7 @@ class StubModuleContext(ModuleContext): yield f -class TypingModuleWrapper(StubModuleContext): +class TypingModuleWrapper(StubModuleValue): def get_filters(self, *args, **kwargs): filters = super(TypingModuleWrapper, self).get_filters(*args, **kwargs) yield TypingModuleFilterWrapper(next(filters)) @@ -101,5 +101,5 @@ class StubFilter(ParserTreeFilter): return True -class VersionInfo(ContextWrapper): +class VersionInfo(ValueWrapper): pass diff --git a/jedi/inference/gradual/typeshed.py b/jedi/inference/gradual/typeshed.py index fda1c41b..42bb75e0 100644 --- a/jedi/inference/gradual/typeshed.py +++ b/jedi/inference/gradual/typeshed.py @@ -5,8 +5,8 @@ from functools import wraps from jedi.file_io import FileIO from jedi._compatibility import FileNotFoundError, cast_path from jedi.parser_utils import get_cached_code_lines -from jedi.inference.base_value import ContextSet, NO_VALUES -from jedi.inference.gradual.stub_value import TypingModuleWrapper, StubModuleContext +from jedi.inference.base_value import ValueSet, NO_VALUES +from jedi.inference.gradual.stub_value import TypingModuleWrapper, StubModuleValue _jedi_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) TYPESHED_PATH = os.path.join(_jedi_path, 'third_party', 'typeshed') @@ -107,7 +107,7 @@ def import_module_decorator(func): python_parent, = infer_state.import_module(('os',), prefer_stubs=False) python_value_set = python_parent.py__getattribute__('path') else: - python_value_set = ContextSet.from_sets( + python_value_set = ValueSet.from_sets( func(infer_state, import_names, p, sys_path,) for p in parent_module_values ) @@ -119,7 +119,7 @@ def import_module_decorator(func): stub = _try_to_load_stub_cached(infer_state, import_names, python_value_set, parent_module_value, sys_path) if stub is not None: - return ContextSet([stub]) + return ValueSet([stub]) return python_value_set return wrapper @@ -235,7 +235,7 @@ def _load_from_typeshed(infer_state, python_value_set, parent_module_value, impo if len(import_names) == 1: map_ = _cache_stub_file_map(infer_state.grammar.version_info) import_name = _IMPORT_MAP.get(import_name, import_name) - elif isinstance(parent_module_value, StubModuleContext): + elif isinstance(parent_module_value, StubModuleValue): if not parent_module_value.is_package: # Only if it's a package (= a folder) something can be # imported. @@ -275,7 +275,7 @@ def create_stub_module(infer_state, python_value_set, stub_module_node, file_io, if import_names == ('typing',): module_cls = TypingModuleWrapper else: - module_cls = StubModuleContext + module_cls = StubModuleValue file_name = os.path.basename(file_io.path) stub_module_value = module_cls( python_value_set, infer_state, stub_module_node, diff --git a/jedi/inference/gradual/typing.py b/jedi/inference/gradual/typing.py index 364b554b..14d3fe14 100644 --- a/jedi/inference/gradual/typing.py +++ b/jedi/inference/gradual/typing.py @@ -9,15 +9,15 @@ from jedi._compatibility import unicode, force_unicode from jedi import debug from jedi.inference.cache import infer_state_method_cache from jedi.inference.compiled import builtin_from_name -from jedi.inference.base_value import ContextSet, NO_VALUES, Context, \ - iterator_to_value_set, ContextWrapper, LazyContextWrapper -from jedi.inference.lazy_value import LazyKnownContexts -from jedi.inference.value.iterable import SequenceLiteralContext +from jedi.inference.base_value import ValueSet, NO_VALUES, Value, \ + iterator_to_value_set, ValueWrapper, LazyValueWrapper +from jedi.inference.lazy_value import LazyKnownValues +from jedi.inference.value.iterable import SequenceLiteralValue from jedi.inference.arguments import repack_with_argument_clinic from jedi.inference.utils import to_list from jedi.inference.filters import FilterWrapper from jedi.inference.names import NameWrapper, AbstractTreeName, \ - AbstractNameDefinition, ContextName + AbstractNameDefinition, ValueName from jedi.inference.helpers import is_string from jedi.inference.value.klass import ClassMixin, ClassFilter @@ -41,12 +41,12 @@ class TypingName(AbstractTreeName): self._value = value def infer(self): - return ContextSet([self._value]) + return ValueSet([self._value]) -class _BaseTypingContext(Context): +class _BaseTypingValue(Value): def __init__(self, infer_state, parent_value, tree_name): - super(_BaseTypingContext, self).__init__(infer_state, parent_value) + super(_BaseTypingValue, self).__init__(infer_state, parent_value) self._tree_name = tree_name @property @@ -75,7 +75,7 @@ class _BaseTypingContext(Context): @property def name(self): - return ContextName(self, self._tree_name) + return ValueName(self, self._tree_name) def __repr__(self): return '%s(%s)' % (self.__class__.__name__, self._tree_name.value) @@ -83,7 +83,7 @@ class _BaseTypingContext(Context): class TypingModuleName(NameWrapper): def infer(self): - return ContextSet(self._remap()) + return ValueSet(self._remap()) def _remap(self): name = self.string_name @@ -97,9 +97,9 @@ class TypingModuleName(NameWrapper): return if name in _PROXY_CLASS_TYPES: - yield TypingClassContext.create_cached(infer_state, self.parent_value, self.tree_name) + yield TypingClassValue.create_cached(infer_state, self.parent_value, self.tree_name) elif name in _PROXY_TYPES: - yield TypingContext.create_cached(infer_state, self.parent_value, self.tree_name) + yield TypingValue.create_cached(infer_state, self.parent_value, self.tree_name) elif name == 'runtime': # We don't want anything here, not sure what this function is # supposed to do, since it just appears in the stubs and shouldn't @@ -138,7 +138,7 @@ class TypingModuleFilterWrapper(FilterWrapper): name_wrapper_class = TypingModuleName -class _WithIndexBase(_BaseTypingContext): +class _WithIndexBase(_BaseTypingValue): def __init__(self, infer_state, parent_value, name, index_value, value_of_index): super(_WithIndexBase, self).__init__(infer_state, parent_value, name) self._index_value = index_value @@ -152,28 +152,28 @@ class _WithIndexBase(_BaseTypingContext): ) -class TypingContextWithIndex(_WithIndexBase): +class TypingValueWithIndex(_WithIndexBase): def execute_annotation(self): string_name = self._tree_name.value if string_name == 'Union': # This is kind of a special case, because we have Unions (in Jedi - # ContextSets). + # ValueSets). return self.gather_annotation_classes().execute_annotation() elif string_name == 'Optional': # Optional is basically just saying it's either None or the actual # type. return self.gather_annotation_classes().execute_annotation() \ - | ContextSet([builtin_from_name(self.infer_state, u'None')]) + | ValueSet([builtin_from_name(self.infer_state, u'None')]) elif string_name == 'Type': # The type is actually already given in the index_value - return ContextSet([self._index_value]) + return ValueSet([self._index_value]) elif string_name == 'ClassVar': # For now don't do anything here, ClassVars are always used. return self._index_value.execute_annotation() cls = globals()[string_name] - return ContextSet([cls( + return ValueSet([cls( self.infer_state, self.parent_value, self._tree_name, @@ -182,17 +182,17 @@ class TypingContextWithIndex(_WithIndexBase): )]) def gather_annotation_classes(self): - return ContextSet.from_sets( + return ValueSet.from_sets( _iter_over_arguments(self._index_value, self._value_of_index) ) -class TypingContext(_BaseTypingContext): - index_class = TypingContextWithIndex +class TypingValue(_BaseTypingValue): + index_class = TypingValueWithIndex py__simple_getitem__ = None def py__getitem__(self, index_value_set, valueualized_node): - return ContextSet( + return ValueSet( self.index_class.create_cached( self.infer_state, self.parent_value, @@ -205,7 +205,7 @@ class TypingContext(_BaseTypingContext): class _TypingClassMixin(object): def py__bases__(self): - return [LazyKnownContexts( + return [LazyKnownValues( self.infer_state.builtins_module.py__getattribute__('object') )] @@ -213,21 +213,21 @@ class _TypingClassMixin(object): return [] -class TypingClassContextWithIndex(_TypingClassMixin, TypingContextWithIndex, ClassMixin): +class TypingClassValueWithIndex(_TypingClassMixin, TypingValueWithIndex, ClassMixin): pass -class TypingClassContext(_TypingClassMixin, TypingContext, ClassMixin): - index_class = TypingClassContextWithIndex +class TypingClassValue(_TypingClassMixin, TypingValue, ClassMixin): + index_class = TypingClassValueWithIndex def _iter_over_arguments(maybe_tuple_value, defining_value): def iterate(): - if isinstance(maybe_tuple_value, SequenceLiteralContext): + if isinstance(maybe_tuple_value, SequenceLiteralValue): for lazy_value in maybe_tuple_value.py__iter__(valueualized_node=None): yield lazy_value.infer() else: - yield ContextSet([maybe_tuple_value]) + yield ValueSet([maybe_tuple_value]) def resolve_forward_references(value_set): for value in value_set: @@ -241,10 +241,10 @@ def _iter_over_arguments(maybe_tuple_value, defining_value): yield value for value_set in iterate(): - yield ContextSet(resolve_forward_references(value_set)) + yield ValueSet(resolve_forward_references(value_set)) -class TypeAlias(LazyContextWrapper): +class TypeAlias(LazyValueWrapper): def __init__(self, parent_value, origin_tree_name, actual): self.infer_state = parent_value.infer_state self.parent_value = parent_value @@ -253,7 +253,7 @@ class TypeAlias(LazyContextWrapper): @property def name(self): - return ContextName(self, self._origin_tree_name) + return ValueName(self, self._origin_tree_name) def py__name__(self): return self.name.string_name @@ -299,7 +299,7 @@ class Tuple(_ContainerBase): def _is_homogenous(self): # To specify a variable-length tuple of homogeneous type, Tuple[T, ...] # is used. - if isinstance(self._index_value, SequenceLiteralContext): + if isinstance(self._index_value, SequenceLiteralValue): entries = self._index_value.get_tree_entries() if len(entries) == 2 and entries[1] == '...': return True @@ -317,17 +317,17 @@ class Tuple(_ContainerBase): def py__iter__(self, valueualized_node=None): if self._is_homogenous(): - yield LazyKnownContexts(self._get_getitem_values(0).execute_annotation()) + yield LazyKnownValues(self._get_getitem_values(0).execute_annotation()) else: - if isinstance(self._index_value, SequenceLiteralContext): + if isinstance(self._index_value, SequenceLiteralValue): for i in range(self._index_value.py__len__()): - yield LazyKnownContexts(self._get_getitem_values(i).execute_annotation()) + yield LazyKnownValues(self._get_getitem_values(i).execute_annotation()) def py__getitem__(self, index_value_set, valueualized_node): if self._is_homogenous(): return self._get_getitem_values(0).execute_annotation() - return ContextSet.from_sets( + return ValueSet.from_sets( _iter_over_arguments(self._index_value, self._value_of_index) ).execute_annotation() @@ -340,13 +340,13 @@ class Protocol(_ContainerBase): pass -class Any(_BaseTypingContext): +class Any(_BaseTypingValue): def execute_annotation(self): debug.warning('Used Any - returned no results') return NO_VALUES -class TypeVarClass(_BaseTypingContext): +class TypeVarClass(_BaseTypingValue): def py__call__(self, arguments): unpacked = arguments.unpack() @@ -357,7 +357,7 @@ class TypeVarClass(_BaseTypingContext): debug.warning('Found a variable without a name %s', arguments) return NO_VALUES - return ContextSet([TypeVar.create_cached( + return ValueSet([TypeVar.create_cached( self.infer_state, self.parent_value, self._tree_name, @@ -390,7 +390,7 @@ class TypeVarClass(_BaseTypingContext): return None -class TypeVar(_BaseTypingContext): +class TypeVar(_BaseTypingValue): def __init__(self, infer_state, parent_value, tree_name, var_name, unpacked_args): super(TypeVar, self).__init__(infer_state, parent_value, tree_name) self._var_name = var_name @@ -432,7 +432,7 @@ class TypeVar(_BaseTypingContext): @property def constraints(self): - return ContextSet.from_sets( + return ValueSet.from_sets( lazy.infer() for lazy in self._constraints_lazy_values ) @@ -444,7 +444,7 @@ class TypeVar(_BaseTypingContext): else: if found: return found - return self._get_classes() or ContextSet({self}) + return self._get_classes() or ValueSet({self}) def execute_annotation(self): return self._get_classes().execute_annotation() @@ -453,21 +453,21 @@ class TypeVar(_BaseTypingContext): return '<%s: %s>' % (self.__class__.__name__, self.py__name__()) -class OverloadFunction(_BaseTypingContext): +class OverloadFunction(_BaseTypingValue): @repack_with_argument_clinic('func, /') def py__call__(self, func_value_set): # Just pass arguments through. return func_value_set -class NewTypeFunction(_BaseTypingContext): +class NewTypeFunction(_BaseTypingValue): def py__call__(self, arguments): ordered_args = arguments.unpack() next(ordered_args, (None, None)) _, second_arg = next(ordered_args, (None, None)) if second_arg is None: return NO_VALUES - return ContextSet( + return ValueSet( NewType( self.infer_state, valueualized_node.value, @@ -476,7 +476,7 @@ class NewTypeFunction(_BaseTypingContext): ) for valueualized_node in arguments.get_calling_nodes()) -class NewType(Context): +class NewType(Value): def __init__(self, infer_state, parent_value, tree_node, type_value_set): super(NewType, self).__init__(infer_state, parent_value) self._type_value_set = type_value_set @@ -486,7 +486,7 @@ class NewType(Context): return self._type_value_set.execute_annotation() -class CastFunction(_BaseTypingContext): +class CastFunction(_BaseTypingValue): @repack_with_argument_clinic('type, object, /') def py__call__(self, type_value_set, object_value_set): return type_value_set.execute_annotation() @@ -510,7 +510,7 @@ class BoundTypeVarName(AbstractNameDefinition): yield constraint else: yield value - return ContextSet(iter_()) + return ValueSet(iter_()) def py__name__(self): return self._type_var.py__name__() @@ -549,7 +549,7 @@ class TypeVarFilter(object): return [] -class AbstractAnnotatedClass(ClassMixin, ContextWrapper): +class AbstractAnnotatedClass(ClassMixin, ValueWrapper): def get_type_var_filter(self): return TypeVarFilter(self.get_generics(), self.list_type_vars()) @@ -593,7 +593,7 @@ class AbstractAnnotatedClass(ClassMixin, ContextWrapper): def py__call__(self, arguments): instance, = super(AbstractAnnotatedClass, self).py__call__(arguments) - return ContextSet([InstanceWrapper(instance)]) + return ValueSet([InstanceWrapper(instance)]) def get_generics(self): raise NotImplementedError @@ -607,19 +607,19 @@ class AbstractAnnotatedClass(ClassMixin, ContextWrapper): if isinstance(generic, (AbstractAnnotatedClass, TypeVar)): result = generic.define_generics(type_var_dict) values |= result - if result != ContextSet({generic}): + if result != ValueSet({generic}): changed = True else: - values |= ContextSet([generic]) + values |= ValueSet([generic]) new_generics.append(values) if not changed: # There might not be any type vars that change. In that case just # return itself, because it does not make sense to potentially lose # cached results. - return ContextSet([self]) + return ValueSet([self]) - return ContextSet([GenericClass( + return ValueSet([GenericClass( self._wrapped_value, generics=tuple(new_generics) )]) @@ -682,18 +682,18 @@ class LazyAnnotatedBaseClass(object): for type_var in type_var_set: if isinstance(type_var, TypeVar): names = filter.get(type_var.py__name__()) - new |= ContextSet.from_sets( + new |= ValueSet.from_sets( name.infer() for name in names ) else: # Mostly will be type vars, except if in some cases # a concrete type will already be there. In that # case just add it to the value set. - new |= ContextSet([type_var]) + new |= ValueSet([type_var]) yield new -class InstanceWrapper(ContextWrapper): +class InstanceWrapper(ValueWrapper): def py__stop_iteration_returns(self): for cls in self._wrapped_value.class_value.py__mro__(): if cls.py__name__() == 'Generator': @@ -703,5 +703,5 @@ class InstanceWrapper(ContextWrapper): except IndexError: pass elif cls.py__name__() == 'Iterator': - return ContextSet([builtin_from_name(self.infer_state, u'None')]) + return ValueSet([builtin_from_name(self.infer_state, u'None')]) return self._wrapped_value.py__stop_iteration_returns() diff --git a/jedi/inference/imports.py b/jedi/inference/imports.py index c73bfa3d..a431f511 100644 --- a/jedi/inference/imports.py +++ b/jedi/inference/imports.py @@ -30,7 +30,7 @@ from jedi.inference import analysis from jedi.inference.utils import unite from jedi.inference.cache import infer_state_method_cache from jedi.inference.names import ImportName, SubModuleName -from jedi.inference.base_value import ContextSet, NO_VALUES +from jedi.inference.base_value import ValueSet, NO_VALUES from jedi.inference.gradual.typeshed import import_module_decorator from jedi.inference.value.module import iter_module_names from jedi.plugins import plugin_manager @@ -97,7 +97,7 @@ def infer_import(value, tree_name, is_goto=False): for t in types ) if not is_goto: - types = ContextSet(types) + types = ValueSet(types) if not types: path = import_path + [from_import_name] @@ -289,7 +289,7 @@ class Importer(object): value_set = [None] for i, name in enumerate(self.import_path): - value_set = ContextSet.from_sets([ + value_set = ValueSet.from_sets([ self._infer_state.import_module( import_names[:i+1], parent_module_value, @@ -382,7 +382,7 @@ def import_module(infer_state, import_names, parent_module_value, sys_path): module = _load_builtin_module(infer_state, import_names, sys_path) if module is None: return NO_VALUES - return ContextSet([module]) + return ValueSet([module]) module_name = '.'.join(import_names) if parent_module_value is None: @@ -421,8 +421,8 @@ def import_module(infer_state, import_names, parent_module_value, sys_path): return NO_VALUES if isinstance(file_io_or_ns, ImplicitNSInfo): - from jedi.inference.value.namespace import ImplicitNamespaceContext - module = ImplicitNamespaceContext( + from jedi.inference.value.namespace import ImplicitNamespaceValue + module = ImplicitNamespaceValue( infer_state, fullname=file_io_or_ns.name, paths=file_io_or_ns.paths, @@ -442,7 +442,7 @@ def import_module(infer_state, import_names, parent_module_value, sys_path): debug.dbg('global search_module %s: %s', import_names[-1], module) else: debug.dbg('search_module %s in paths %s: %s', module_name, paths, module) - return ContextSet([module]) + return ValueSet([module]) def _load_python_module(infer_state, file_io, sys_path=None, @@ -459,8 +459,8 @@ def _load_python_module(infer_state, file_io, sys_path=None, cache_path=settings.cache_directory ) - from jedi.inference.value import ModuleContext - return ModuleContext( + from jedi.inference.value import ModuleValue + return ModuleValue( infer_state, module_node, file_io=file_io, string_names=import_names, @@ -508,7 +508,7 @@ def _load_module_from_path(infer_state, file_io, base_names): import_names=import_names, is_package=is_package, ) - infer_state.module_cache.add(import_names, ContextSet([module])) + infer_state.module_cache.add(import_names, ValueSet([module])) return module diff --git a/jedi/inference/lazy_value.py b/jedi/inference/lazy_value.py index 2f85ffdb..0f06a373 100644 --- a/jedi/inference/lazy_value.py +++ b/jedi/inference/lazy_value.py @@ -1,8 +1,8 @@ -from jedi.inference.base_value import ContextSet, NO_VALUES +from jedi.inference.base_value import ValueSet, NO_VALUES from jedi.common.utils import monkeypatch -class AbstractLazyContext(object): +class AbstractLazyValue(object): def __init__(self, data): self.data = data @@ -13,29 +13,29 @@ class AbstractLazyContext(object): raise NotImplementedError -class LazyKnownContext(AbstractLazyContext): +class LazyKnownValue(AbstractLazyValue): """data is a value.""" def infer(self): - return ContextSet([self.data]) + return ValueSet([self.data]) -class LazyKnownContexts(AbstractLazyContext): - """data is a ContextSet.""" +class LazyKnownValues(AbstractLazyValue): + """data is a ValueSet.""" def infer(self): return self.data -class LazyUnknownContext(AbstractLazyContext): +class LazyUnknownValue(AbstractLazyValue): def __init__(self): - super(LazyUnknownContext, self).__init__(None) + super(LazyUnknownValue, self).__init__(None) def infer(self): return NO_VALUES -class LazyTreeContext(AbstractLazyContext): +class LazyTreeValue(AbstractLazyValue): def __init__(self, value, node): - super(LazyTreeContext, self).__init__(node) + super(LazyTreeValue, self).__init__(node) self.value = value # We need to save the predefined names. It's an unfortunate side effect # that needs to be tracked otherwise results will be wrong. @@ -48,12 +48,12 @@ class LazyTreeContext(AbstractLazyContext): def get_merged_lazy_value(lazy_values): if len(lazy_values) > 1: - return MergedLazyContexts(lazy_values) + return MergedLazyValues(lazy_values) else: return lazy_values[0] -class MergedLazyContexts(AbstractLazyContext): +class MergedLazyValues(AbstractLazyValue): """data is a list of lazy values.""" def infer(self): - return ContextSet.from_sets(l.infer() for l in self.data) + return ValueSet.from_sets(l.infer() for l in self.data) diff --git a/jedi/inference/names.py b/jedi/inference/names.py index 6150d0fd..224e325d 100644 --- a/jedi/inference/names.py +++ b/jedi/inference/names.py @@ -3,7 +3,7 @@ from abc import abstractmethod from parso.tree import search_ancestor from jedi._compatibility import Parameter -from jedi.inference.base_value import ContextSet, NO_VALUES +from jedi.inference.base_value import ValueSet, NO_VALUES from jedi.cache import memoize_method @@ -118,9 +118,9 @@ class AbstractTreeName(AbstractNameDefinition): return self.tree_name.start_pos -class ContextNameMixin(object): +class ValueNameMixin(object): def infer(self): - return ContextSet([self._value]) + return ValueSet([self._value]) def _get_qualified_names(self): return self._value.get_qualified_names() @@ -128,20 +128,20 @@ class ContextNameMixin(object): def get_root_value(self): if self.parent_value is None: # A module return self._value - return super(ContextNameMixin, self).get_root_value() + return super(ValueNameMixin, self).get_root_value() @property def api_type(self): return self._value.api_type -class ContextName(ContextNameMixin, AbstractTreeName): +class ValueName(ValueNameMixin, AbstractTreeName): def __init__(self, value, tree_name): - super(ContextName, self).__init__(value.parent_value, tree_name) + super(ValueName, self).__init__(value.parent_value, tree_name) self._value = value def goto(self): - return ContextSet([self._value.name]) + return ValueSet([self._value.name]) class TreeNameDefinition(AbstractTreeName): diff --git a/jedi/inference/param.py b/jedi/inference/param.py index 19029b72..3d9abdc2 100644 --- a/jedi/inference/param.py +++ b/jedi/inference/param.py @@ -3,14 +3,14 @@ from collections import defaultdict from jedi import debug from jedi.inference.utils import PushBackIterator from jedi.inference import analysis -from jedi.inference.lazy_value import LazyKnownContext, \ - LazyTreeContext, LazyUnknownContext +from jedi.inference.lazy_value import LazyKnownValue, \ + LazyTreeValue, LazyUnknownValue from jedi.inference import docstrings from jedi.inference.value import iterable def _add_argument_issue(error_name, lazy_value, message): - if isinstance(lazy_value, LazyTreeContext): + if isinstance(lazy_value, LazyTreeValue): node = lazy_value.data if node.parent.type == 'argument': node = node.parent @@ -146,20 +146,20 @@ def get_executed_params_and_issues(execution_value, arguments): break lazy_value_list.append(argument) seq = iterable.FakeSequence(execution_value.infer_state, u'tuple', lazy_value_list) - result_arg = LazyKnownContext(seq) + result_arg = LazyKnownValue(seq) elif param.star_count == 2: if argument is not None: too_many_args(argument) # **kwargs param dct = iterable.FakeDict(execution_value.infer_state, dict(non_matching_keys)) - result_arg = LazyKnownContext(dct) + result_arg = LazyKnownValue(dct) non_matching_keys = {} else: # normal param if argument is None: # No value: Return an empty container if param.default is None: - result_arg = LazyUnknownContext() + result_arg = LazyUnknownValue() if not keys_only: for valueualized_node in arguments.get_calling_nodes(): m = _error_argument_count(funcdef, len(unpacked_va)) @@ -172,7 +172,7 @@ def get_executed_params_and_issues(execution_value, arguments): ) ) else: - result_arg = LazyTreeContext(default_param_value, param.default) + result_arg = LazyTreeValue(default_param_value, param.default) is_default = True else: result_arg = argument @@ -181,7 +181,7 @@ def get_executed_params_and_issues(execution_value, arguments): execution_value, param, result_arg, is_default=is_default )) - if not isinstance(result_arg, LazyUnknownContext): + if not isinstance(result_arg, LazyUnknownValue): keys_used[param.name.value] = result_params[-1] if keys_only: @@ -234,17 +234,17 @@ def _error_argument_count(funcdef, actual_count): def _create_default_param(execution_value, param): if param.star_count == 1: - result_arg = LazyKnownContext( + result_arg = LazyKnownValue( iterable.FakeSequence(execution_value.infer_state, u'tuple', []) ) elif param.star_count == 2: - result_arg = LazyKnownContext( + result_arg = LazyKnownValue( iterable.FakeDict(execution_value.infer_state, {}) ) elif param.default is None: - result_arg = LazyUnknownContext() + result_arg = LazyUnknownValue() else: - result_arg = LazyTreeContext(execution_value.parent_value, param.default) + result_arg = LazyTreeValue(execution_value.parent_value, param.default) return ExecutedParam(execution_value, param, result_arg) diff --git a/jedi/inference/syntax_tree.py b/jedi/inference/syntax_tree.py index 6a4b2c41..e94d6acc 100644 --- a/jedi/inference/syntax_tree.py +++ b/jedi/inference/syntax_tree.py @@ -8,16 +8,16 @@ from parso.python import tree from jedi._compatibility import force_unicode, unicode from jedi import debug from jedi import parser_utils -from jedi.inference.base_value import ContextSet, NO_VALUES, ContextualizedNode, \ - ContextualizedName, iterator_to_value_set, iterate_values -from jedi.inference.lazy_value import LazyTreeContext +from jedi.inference.base_value import ValueSet, NO_VALUES, ValueualizedNode, \ + ValueualizedName, iterator_to_value_set, iterate_values +from jedi.inference.lazy_value import LazyTreeValue from jedi.inference import compiled from jedi.inference import recursion from jedi.inference import helpers from jedi.inference import analysis from jedi.inference import imports from jedi.inference import arguments -from jedi.inference.value import ClassContext, FunctionContext +from jedi.inference.value import ClassValue, FunctionValue from jedi.inference.value import iterable from jedi.inference.value import TreeInstance from jedi.inference.finder import NameFinder @@ -75,7 +75,7 @@ def infer_node(value, element): if typ in ('name', 'number', 'string', 'atom', 'strings', 'keyword', 'fstring'): return infer_atom(value, element) elif typ == 'lambdef': - return ContextSet([FunctionContext.from_value(value, element)]) + return ValueSet([FunctionValue.from_value(value, element)]) elif typ == 'expr_stmt': return infer_expr_stmt(value, element) elif typ in ('power', 'atom_expr'): @@ -105,7 +105,7 @@ def infer_node(value, element): return value_set elif typ in ('testlist_star_expr', 'testlist',): # The implicit tuple in statements. - return ContextSet([iterable.SequenceLiteralContext(infer_state, value, element)]) + return ValueSet([iterable.SequenceLiteralValue(infer_state, value, element)]) elif typ in ('not_test', 'factor'): value_set = value.infer_node(element.children[-1]) for operator in element.children[:-1]: @@ -122,7 +122,7 @@ def infer_node(value, element): if element.value not in ('.', '...'): origin = element.parent raise AssertionError("unhandled operator %s in %s " % (repr(element.value), origin)) - return ContextSet([compiled.builtin_from_name(infer_state, u'Ellipsis')]) + return ValueSet([compiled.builtin_from_name(infer_state, u'Ellipsis')]) elif typ == 'dotted_name': value_set = infer_atom(value, element.children[0]) for next_name in element.children[2::2]: @@ -159,7 +159,7 @@ def infer_trailer(value, atom_values, trailer): trailer_op, node, _ = trailer.children return atom_values.get_item( infer_subscript_list(value.infer_state, value, node), - ContextualizedNode(value, trailer) + ValueualizedNode(value, trailer) ) else: debug.dbg('infer_trailer: %s in %s', trailer, atom_values) @@ -183,7 +183,7 @@ def infer_atom(value, atom): if atom.type == 'name': if atom.value in ('True', 'False', 'None'): # Python 2... - return ContextSet([compiled.builtin_from_name(value.infer_state, atom.value)]) + return ValueSet([compiled.builtin_from_name(value.infer_state, atom.value)]) # This is the first global lookup. stmt = tree.search_ancestor( @@ -207,7 +207,7 @@ def infer_atom(value, atom): elif atom.type == 'keyword': # For False/True/None if atom.value in ('False', 'True', 'None'): - return ContextSet([compiled.builtin_from_name(value.infer_state, atom.value)]) + return ValueSet([compiled.builtin_from_name(value.infer_state, atom.value)]) elif atom.value == 'print': # print e.g. could be inferred like this in Python 2.7 return NO_VALUES @@ -219,7 +219,7 @@ def infer_atom(value, atom): elif isinstance(atom, tree.Literal): string = value.infer_state.compiled_subprocess.safe_literal_eval(atom.value) - return ContextSet([compiled.create_simple_object(value.infer_state, string)]) + return ValueSet([compiled.create_simple_object(value.infer_state, string)]) elif atom.type == 'strings': # Will be multiple string. value_set = infer_atom(value, atom.children[0]) @@ -250,7 +250,7 @@ def infer_atom(value, atom): pass if comp_for.type in ('comp_for', 'sync_comp_for'): - return ContextSet([iterable.comprehension_from_atom( + return ValueSet([iterable.comprehension_from_atom( value.infer_state, value, atom )]) @@ -262,10 +262,10 @@ def infer_atom(value, atom): array_node_c = [] if c[0] == '{' and (array_node == '}' or ':' in array_node_c or '**' in array_node_c): - new_value = iterable.DictLiteralContext(value.infer_state, value, atom) + new_value = iterable.DictLiteralValue(value.infer_state, value, atom) else: - new_value = iterable.SequenceLiteralContext(value.infer_state, value, atom) - return ContextSet([new_value]) + new_value = iterable.SequenceLiteralValue(value.infer_state, value, atom) + return ValueSet([new_value]) @_limit_value_infers @@ -305,7 +305,7 @@ def _infer_expr_stmt(value, stmt, seek_name=None): value_set = value.infer_node(rhs) if seek_name: - c_node = ContextualizedName(value, seek_name) + c_node = ValueualizedName(value, seek_name) value_set = check_tuple_assignments(value.infer_state, c_node, value_set) first_operator = next(stmt.yield_operators(), None) @@ -324,7 +324,7 @@ def _infer_expr_stmt(value, stmt, seek_name=None): # only in for loops without clutter, because they are # predictable. Also only do it, if the variable is not a tuple. node = for_stmt.get_testlist() - cn = ContextualizedNode(value, node) + cn = ValueualizedNode(value, node) ordered = list(cn.infer().iterate(cn)) for lazy_value in ordered: @@ -393,7 +393,7 @@ def _literals_to_types(infer_state, result): cls = compiled.builtin_from_name(infer_state, typ.name.string_name) new_result |= cls.execute_with_values() else: - new_result |= ContextSet([typ]) + new_result |= ValueSet([typ]) return new_result @@ -409,7 +409,7 @@ def _infer_comparison(infer_state, value, left_values, operator, right_values): if len(left_values) * len(right_values) > 6: return _literals_to_types(infer_state, left_values | right_values) else: - return ContextSet.from_sets( + return ValueSet.from_sets( _infer_comparison_part(infer_state, value, left, operator, right) for left in left_values for right in right_values @@ -445,11 +445,11 @@ def _bool_to_value(infer_state, bool_): def _get_tuple_ints(value): - if not isinstance(value, iterable.SequenceLiteralContext): + if not isinstance(value, iterable.SequenceLiteralValue): return None numbers = [] for lazy_value in value.py__iter__(): - if not isinstance(lazy_value, LazyTreeContext): + if not isinstance(lazy_value, LazyTreeValue): return None node = lazy_value.data if node.type != 'number': @@ -472,26 +472,26 @@ def _infer_comparison_part(infer_state, value, left, operator, right): if str_operator == '*': # for iterables, ignore * operations if isinstance(left, iterable.Sequence) or is_string(left): - return ContextSet([left]) + return ValueSet([left]) elif isinstance(right, iterable.Sequence) or is_string(right): - return ContextSet([right]) + return ValueSet([right]) elif str_operator == '+': if l_is_num and r_is_num or is_string(left) and is_string(right): - return ContextSet([left.execute_operation(right, str_operator)]) + return ValueSet([left.execute_operation(right, str_operator)]) elif _is_tuple(left) and _is_tuple(right) or _is_list(left) and _is_list(right): - return ContextSet([iterable.MergedArray(infer_state, (left, right))]) + return ValueSet([iterable.MergedArray(infer_state, (left, right))]) elif str_operator == '-': if l_is_num and r_is_num: - return ContextSet([left.execute_operation(right, str_operator)]) + return ValueSet([left.execute_operation(right, str_operator)]) elif str_operator == '%': # With strings and numbers the left type typically remains. Except for # `int() % float()`. - return ContextSet([left]) + return ValueSet([left]) elif str_operator in COMPARISON_OPERATORS: if left.is_compiled() and right.is_compiled(): # Possible, because the return is not an option. Just compare. try: - return ContextSet([left.execute_operation(right, str_operator)]) + return ValueSet([left.execute_operation(right, str_operator)]) except TypeError: # Could be True or False. pass @@ -499,7 +499,7 @@ def _infer_comparison_part(infer_state, value, left, operator, right): if str_operator in ('is', '!=', '==', 'is not'): operation = COMPARISON_OPERATORS[str_operator] bool_ = operation(left, right) - return ContextSet([_bool_to_value(infer_state, bool_)]) + return ValueSet([_bool_to_value(infer_state, bool_)]) if isinstance(left, VersionInfo): version_info = _get_tuple_ints(right) @@ -508,9 +508,9 @@ def _infer_comparison_part(infer_state, value, left, operator, right): infer_state.environment.version_info, tuple(version_info) ) - return ContextSet([_bool_to_value(infer_state, bool_result)]) + return ValueSet([_bool_to_value(infer_state, bool_result)]) - return ContextSet([_bool_to_value(infer_state, True), _bool_to_value(infer_state, False)]) + return ValueSet([_bool_to_value(infer_state, True), _bool_to_value(infer_state, False)]) elif str_operator == 'in': return NO_VALUES @@ -526,7 +526,7 @@ def _infer_comparison_part(infer_state, value, left, operator, right): analysis.add(value, 'type-error-operation', operator, message % (left, right)) - result = ContextSet([left, right]) + result = ValueSet([left, right]) debug.dbg('Used operator %s resulting in %s', operator, result) return result @@ -595,13 +595,13 @@ def tree_name_to_values(infer_state, value, tree_name): try: types = value.predefined_names[node][tree_name.value] except KeyError: - cn = ContextualizedNode(value, node.children[3]) + cn = ValueualizedNode(value, node.children[3]) for_types = iterate_values( cn.infer(), valueualized_node=cn, is_async=node.parent.type == 'async_stmt', ) - c_node = ContextualizedName(value, tree_name) + c_node = ValueualizedName(value, tree_name) types = check_tuple_assignments(infer_state, c_node, for_types) elif typ == 'expr_stmt': types = _remove_statements(infer_state, value, node, tree_name) @@ -635,14 +635,14 @@ def _apply_decorators(value, node): This is also the places where the decorators are processed. """ if node.type == 'classdef': - decoratee_value = ClassContext( + decoratee_value = ClassValue( value.infer_state, parent_value=value, tree_node=node ) else: - decoratee_value = FunctionContext.from_value(value, node) - initial = values = ContextSet([decoratee_value]) + decoratee_value = FunctionValue.from_value(value, node) + initial = values = ValueSet([decoratee_value]) for dec in reversed(node.get_decorators()): debug.dbg('decorator: %s %s', dec, values, color="MAGENTA") with debug.increase_indent_cm(): @@ -670,7 +670,7 @@ def _apply_decorators(value, node): debug.dbg('decorator end %s', values, color="MAGENTA") if values != initial: - return ContextSet([Decoratee(c, decoratee_value) for c in values]) + return ValueSet([Decoratee(c, decoratee_value) for c in values]) return values @@ -680,7 +680,7 @@ def check_tuple_assignments(infer_state, valueualized_name, value_set): """ lazy_value = None for index, node in valueualized_name.assignment_indexes(): - cn = ContextualizedNode(valueualized_name.value, node) + cn = ValueualizedNode(valueualized_name.value, node) iterated = value_set.iterate(cn) if isinstance(index, slice): # For no star unpacking is not possible. @@ -704,7 +704,7 @@ def infer_subscript_list(infer_state, value, index): """ if index == ':': # Like array[:] - return ContextSet([iterable.Slice(value, None, None, None)]) + return ValueSet([iterable.Slice(value, None, None, None)]) elif index.type == 'subscript' and not index.children[0] == '.': # subscript basically implies a slice operation, except for Python 2's @@ -722,9 +722,9 @@ def infer_subscript_list(infer_state, value, index): result.append(el) result += [None] * (3 - len(result)) - return ContextSet([iterable.Slice(value, *result)]) + return ValueSet([iterable.Slice(value, *result)]) elif index.type == 'subscriptlist': - return ContextSet([iterable.SequenceLiteralContext(infer_state, value, index)]) + return ValueSet([iterable.SequenceLiteralValue(infer_state, value, index)]) # No slices return value.infer_node(index) diff --git a/jedi/inference/sys_path.py b/jedi/inference/sys_path.py index 0c20dde7..e1729655 100644 --- a/jedi/inference/sys_path.py +++ b/jedi/inference/sys_path.py @@ -2,7 +2,7 @@ import os from jedi._compatibility import unicode, force_unicode, all_suffixes from jedi.inference.cache import infer_state_method_cache -from jedi.inference.base_value import ContextualizedNode +from jedi.inference.base_value import ValueualizedNode from jedi.inference.helpers import is_string from jedi.common.utils import traverse_parents from jedi.parser_utils import get_cached_code_lines @@ -60,7 +60,7 @@ def _paths_from_assignment(module_value, expr_stmt): except AssertionError: continue - cn = ContextualizedNode(module_value.create_value(expr_stmt), expr_stmt) + cn = ValueualizedNode(module_value.create_value(expr_stmt), expr_stmt) for lazy_value in cn.infer().iterate(cn): for value in lazy_value.infer(): if is_string(value): @@ -152,8 +152,8 @@ def _get_paths_from_buildout_script(infer_state, buildout_script_path): debug.warning('Error trying to read buildout_script: %s', buildout_script_path) return - from jedi.inference.value import ModuleContext - module = ModuleContext( + from jedi.inference.value import ModuleValue + module = ModuleValue( infer_state, module_node, file_io, string_names=None, code_lines=get_cached_code_lines(infer_state.grammar, buildout_script_path), diff --git a/jedi/inference/utils.py b/jedi/inference/utils.py index 990a995e..3254d8e1 100644 --- a/jedi/inference/utils.py +++ b/jedi/inference/utils.py @@ -100,7 +100,7 @@ class PushBackIterator(object): @contextlib.contextmanager def ignored(*exceptions): """ - Context manager that ignores all of the specified exceptions. This will + Value manager that ignores all of the specified exceptions. This will be in the standard library starting with Python 3.4. """ try: diff --git a/jedi/inference/value/__init__.py b/jedi/inference/value/__init__.py index 3bfbe6c9..e2718613 100644 --- a/jedi/inference/value/__init__.py +++ b/jedi/inference/value/__init__.py @@ -1,6 +1,6 @@ -from jedi.inference.value.module import ModuleContext -from jedi.inference.value.klass import ClassContext -from jedi.inference.value.function import FunctionContext, \ - MethodContext, FunctionExecutionContext +from jedi.inference.value.module import ModuleValue +from jedi.inference.value.klass import ClassValue +from jedi.inference.value.function import FunctionValue, \ + MethodValue, FunctionExecutionValue from jedi.inference.value.instance import AnonymousInstance, BoundMethod, \ - CompiledInstance, AbstractInstanceContext, TreeInstance + CompiledInstance, AbstractInstanceValue, TreeInstance diff --git a/jedi/inference/value/decorator.py b/jedi/inference/value/decorator.py index 20617147..39c4b701 100644 --- a/jedi/inference/value/decorator.py +++ b/jedi/inference/value/decorator.py @@ -3,10 +3,10 @@ Decorators are not really values, however we need some wrappers to improve docstrings and other things around decorators. ''' -from jedi.inference.base_value import ContextWrapper +from jedi.inference.base_value import ValueWrapper -class Decoratee(ContextWrapper): +class Decoratee(ValueWrapper): def __init__(self, wrapped_value, original_value): self._wrapped_value = wrapped_value self._original_value = original_value diff --git a/jedi/inference/value/function.py b/jedi/inference/value/function.py index eafe50d9..1542257f 100644 --- a/jedi/inference/value/function.py +++ b/jedi/inference/value/function.py @@ -11,11 +11,11 @@ from jedi.inference import helpers from jedi.inference.signature import TreeSignature from jedi.inference.arguments import AnonymousArguments from jedi.inference.filters import ParserTreeFilter, FunctionExecutionFilter -from jedi.inference.names import ContextName, AbstractNameDefinition, ParamName -from jedi.inference.base_value import ContextualizedNode, NO_VALUES, \ - ContextSet, TreeContext, ContextWrapper -from jedi.inference.lazy_value import LazyKnownContexts, LazyKnownContext, \ - LazyTreeContext +from jedi.inference.names import ValueName, AbstractNameDefinition, ParamName +from jedi.inference.base_value import ValueualizedNode, NO_VALUES, \ + ValueSet, TreeValue, ValueWrapper +from jedi.inference.lazy_value import LazyKnownValues, LazyKnownValue, \ + LazyTreeValue from jedi.inference.value import iterable from jedi import parser_utils from jedi.inference.parser_cache import get_yield_exprs @@ -35,10 +35,10 @@ class LambdaName(AbstractNameDefinition): return self._lambda_value.tree_node.start_pos def infer(self): - return ContextSet([self._lambda_value]) + return ValueSet([self._lambda_value]) -class FunctionAndClassBase(TreeContext): +class FunctionAndClassBase(TreeValue): def get_qualified_names(self): if self.parent_value.is_class(): n = self.parent_value.get_qualified_names() @@ -73,8 +73,8 @@ class FunctionMixin(object): from jedi.inference.value.instance import BoundMethod if instance is None: # Calling the Foo.bar results in the original bar function. - return ContextSet([self]) - return ContextSet([BoundMethod(instance, self)]) + return ValueSet([self]) + return ValueSet([BoundMethod(instance, self)]) def get_param_names(self): function_execution = self.get_function_execution() @@ -85,7 +85,7 @@ class FunctionMixin(object): def name(self): if self.tree_node.type == 'lambdef': return LambdaName(self) - return ContextName(self, self.tree_node.name) + return ValueName(self, self.tree_node.name) def py__name__(self): return self.name.string_name @@ -98,13 +98,13 @@ class FunctionMixin(object): if arguments is None: arguments = AnonymousArguments() - return FunctionExecutionContext(self.infer_state, self.parent_value, self, arguments) + return FunctionExecutionValue(self.infer_state, self.parent_value, self, arguments) def get_signatures(self): return [TreeSignature(f) for f in self.get_signature_functions()] -class FunctionContext(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndClassBase)): +class FunctionValue(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndClassBase)): def is_function(self): return True @@ -112,7 +112,7 @@ class FunctionContext(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndC def from_value(cls, value, tree_node): def create(tree_node): if value.is_class(): - return MethodContext( + return MethodValue( value.infer_state, value, parent_value=parent_value, @@ -134,7 +134,7 @@ class FunctionContext(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndC function = create(tree_node) if overloaded_funcs: - return OverloadedFunctionContext( + return OverloadedFunctionValue( function, [create(f) for f in overloaded_funcs] ) @@ -151,9 +151,9 @@ class FunctionContext(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndC return [self] -class MethodContext(FunctionContext): +class MethodValue(FunctionValue): def __init__(self, infer_state, class_value, *args, **kwargs): - super(MethodContext, self).__init__(infer_state, *args, **kwargs) + super(MethodValue, self).__init__(infer_state, *args, **kwargs) self.class_value = class_value def get_default_param_value(self): @@ -168,11 +168,11 @@ class MethodContext(FunctionContext): return names + (self.py__name__(),) -class FunctionExecutionContext(TreeContext): +class FunctionExecutionValue(TreeValue): function_execution_filter = FunctionExecutionFilter def __init__(self, infer_state, parent_value, function_value, var_args): - super(FunctionExecutionContext, self).__init__( + super(FunctionExecutionValue, self).__init__( infer_state, parent_value, function_value.tree_node, @@ -206,7 +206,7 @@ class FunctionExecutionContext(TreeContext): debug.dbg('Return unreachable: %s', r) else: if check_yields: - value_set |= ContextSet.from_sets( + value_set |= ValueSet.from_sets( lazy_value.infer() for lazy_value in self._get_yield_lazy_value(r) ) @@ -215,7 +215,7 @@ class FunctionExecutionContext(TreeContext): children = r.children except AttributeError: ctx = compiled.builtin_from_name(self.infer_state, u'None') - value_set |= ContextSet([ctx]) + value_set |= ValueSet([ctx]) else: value_set |= self.infer_node(children[1]) if check is flow_analysis.REACHABLE: @@ -227,16 +227,16 @@ class FunctionExecutionContext(TreeContext): if yield_expr.type == 'keyword': # `yield` just yields None. ctx = compiled.builtin_from_name(self.infer_state, u'None') - yield LazyKnownContext(ctx) + yield LazyKnownValue(ctx) return node = yield_expr.children[1] if node.type == 'yield_arg': # It must be a yield from. - cn = ContextualizedNode(self, node.children[1]) + cn = ValueualizedNode(self, node.children[1]) for lazy_value in cn.infer().iterate(cn): yield lazy_value else: - yield LazyTreeContext(self, node) + yield LazyTreeValue(self, node) @recursion.execution_recursion_decorator(default=iter([])) def get_yield_lazy_values(self, is_async=False): @@ -265,7 +265,7 @@ class FunctionExecutionContext(TreeContext): else: types = self.get_return_values(check_yields=True) if types: - yield LazyKnownContexts(types) + yield LazyKnownValues(types) return last_for_stmt = for_stmt @@ -277,7 +277,7 @@ class FunctionExecutionContext(TreeContext): yield result else: input_node = for_stmt.get_testlist() - cn = ContextualizedNode(self, input_node) + cn = ValueualizedNode(self, input_node) ordered = cn.infer().iterate(cn) ordered = list(ordered) for lazy_value in ordered: @@ -288,7 +288,7 @@ class FunctionExecutionContext(TreeContext): yield result def merge_yield_values(self, is_async=False): - return ContextSet.from_sets( + return ValueSet.from_sets( lazy_value.infer() for lazy_value in self.get_yield_lazy_values() ) @@ -338,7 +338,7 @@ class FunctionExecutionContext(TreeContext): yield_values = self.merge_yield_values(is_async=True) # The contravariant doesn't seem to be defined. generics = (yield_values.py__class__(), NO_VALUES) - return ContextSet( + return ValueSet( # In Python 3.6 AsyncGenerator is still a class. GenericClass(c, generics) for c in async_generator_classes @@ -350,19 +350,19 @@ class FunctionExecutionContext(TreeContext): return_values = self.get_return_values() # Only the first generic is relevant. generics = (return_values.py__class__(), NO_VALUES, NO_VALUES) - return ContextSet( + return ValueSet( GenericClass(c, generics) for c in async_classes ).execute_annotation() else: if is_generator: - return ContextSet([iterable.Generator(infer_state, self)]) + return ValueSet([iterable.Generator(infer_state, self)]) else: return self.get_return_values() -class OverloadedFunctionContext(FunctionMixin, ContextWrapper): +class OverloadedFunctionValue(FunctionMixin, ValueWrapper): def __init__(self, function, overloaded_functions): - super(OverloadedFunctionContext, self).__init__(function) + super(OverloadedFunctionValue, self).__init__(function) self._overloaded_functions = overloaded_functions def py__call__(self, arguments): @@ -383,7 +383,7 @@ class OverloadedFunctionContext(FunctionMixin, ContextWrapper): if self.infer_state.is_analysis: # In this case we want precision. return NO_VALUES - return ContextSet.from_sets(fe.infer() for fe in function_executions) + return ValueSet.from_sets(fe.infer() for fe in function_executions) def get_signature_functions(self): return self._overloaded_functions diff --git a/jedi/inference/value/instance.py b/jedi/inference/value/instance.py index a06b62c7..89116970 100644 --- a/jedi/inference/value/instance.py +++ b/jedi/inference/value/instance.py @@ -6,16 +6,16 @@ from jedi.inference import compiled from jedi.inference.compiled.value import CompiledObjectFilter from jedi.inference.helpers import values_from_qualified_names from jedi.inference.filters import AbstractFilter -from jedi.inference.names import ContextName, TreeNameDefinition -from jedi.inference.base_value import Context, NO_VALUES, ContextSet, \ - iterator_to_value_set, ContextWrapper -from jedi.inference.lazy_value import LazyKnownContext, LazyKnownContexts +from jedi.inference.names import ValueName, TreeNameDefinition +from jedi.inference.base_value import Value, NO_VALUES, ValueSet, \ + iterator_to_value_set, ValueWrapper +from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues from jedi.inference.cache import infer_state_method_cache from jedi.inference.arguments import AnonymousArguments, \ ValuesArguments, TreeArgumentsWrapper from jedi.inference.value.function import \ - FunctionContext, FunctionMixin, OverloadedFunctionContext -from jedi.inference.value.klass import ClassContext, apply_py__get__, \ + FunctionValue, FunctionMixin, OverloadedFunctionValue +from jedi.inference.value.klass import ClassValue, apply_py__get__, \ ClassFilter from jedi.inference.value import iterable from jedi.parser_utils import get_parent_scope @@ -28,7 +28,7 @@ class InstanceExecutedParam(object): self.string_name = self._tree_param.name.value def infer(self): - return ContextSet([self._instance]) + return ValueSet([self._instance]) def matches_signature(self): return True @@ -58,11 +58,11 @@ class AnonymousInstanceArguments(AnonymousArguments): return executed_params, [] -class AbstractInstanceContext(Context): +class AbstractInstanceValue(Value): api_type = u'instance' def __init__(self, infer_state, parent_value, class_value, var_args): - super(AbstractInstanceContext, self).__init__(infer_state, parent_value) + super(AbstractInstanceValue, self).__init__(infer_state, parent_value) # Generated instances are classes that are just generated by self # (No var_args) used. self.class_value = class_value @@ -81,9 +81,9 @@ class AbstractInstanceContext(Context): names = self.get_function_slot_names(u'__call__') if not names: # Means the Instance is not callable. - return super(AbstractInstanceContext, self).py__call__(arguments) + return super(AbstractInstanceValue, self).py__call__(arguments) - return ContextSet.from_sets(name.infer().execute(arguments) for name in names) + return ValueSet.from_sets(name.infer().execute(arguments) for name in names) def py__class__(self): return self.class_value @@ -103,7 +103,7 @@ class AbstractInstanceContext(Context): return [] def execute_function_slots(self, names, *inferred_args): - return ContextSet.from_sets( + return ValueSet.from_sets( name.infer().execute_with_values(*inferred_args) for name in names ) @@ -120,7 +120,7 @@ class AbstractInstanceContext(Context): obj = compiled.builtin_from_name(self.infer_state, u'None') return self.execute_function_slots(names, obj, class_value) else: - return ContextSet([self]) + return ValueSet([self]) def get_filters(self, search_global=None, until_position=None, origin_scope=None, include_self_names=True): @@ -151,18 +151,18 @@ class AbstractInstanceContext(Context): def py__getitem__(self, index_value_set, valueualized_node): names = self.get_function_slot_names(u'__getitem__') if not names: - return super(AbstractInstanceContext, self).py__getitem__( + return super(AbstractInstanceValue, self).py__getitem__( index_value_set, valueualized_node, ) args = ValuesArguments([index_value_set]) - return ContextSet.from_sets(name.infer().execute(args) for name in names) + return ValueSet.from_sets(name.infer().execute(args) for name in names) def py__iter__(self, valueualized_node=None): iter_slot_names = self.get_function_slot_names(u'__iter__') if not iter_slot_names: - return super(AbstractInstanceContext, self).py__iter__(valueualized_node) + return super(AbstractInstanceValue, self).py__iter__(valueualized_node) def iterate(): for generator in self.execute_function_slots(iter_slot_names): @@ -174,7 +174,7 @@ class AbstractInstanceContext(Context): name = u'__next__' next_slot_names = generator.get_function_slot_names(name) if next_slot_names: - yield LazyKnownContexts( + yield LazyKnownValues( generator.execute_function_slots(next_slot_names) ) else: @@ -192,7 +192,7 @@ class AbstractInstanceContext(Context): for name in self.get_function_slot_names(u'__init__'): # TODO is this correct? I think we need to check for functions. if isinstance(name, LazyInstanceClassName): - function = FunctionContext.from_value( + function = FunctionValue.from_value( self.parent_value, name.tree_name.parent ) @@ -209,7 +209,7 @@ class AbstractInstanceContext(Context): else: parent_value = self.create_instance_value(class_value, scope) if scope.type == 'funcdef': - func = FunctionContext.from_value( + func = FunctionValue.from_value( parent_value, scope, ) @@ -219,7 +219,7 @@ class AbstractInstanceContext(Context): else: return bound_method.get_function_execution() elif scope.type == 'classdef': - class_value = ClassContext(self.infer_state, parent_value, scope) + class_value = ClassValue(self.infer_state, parent_value, scope) return class_value elif scope.type in ('comp_for', 'sync_comp_for'): # Comprehensions currently don't have a special scope in Jedi. @@ -237,14 +237,14 @@ class AbstractInstanceContext(Context): self.var_args) -class CompiledInstance(AbstractInstanceContext): +class CompiledInstance(AbstractInstanceValue): def __init__(self, infer_state, parent_value, class_value, var_args): self._original_var_args = var_args super(CompiledInstance, self).__init__(infer_state, parent_value, class_value, var_args) @property def name(self): - return compiled.CompiledContextName(self, self.class_value.name.string_name) + return compiled.CompiledValueName(self, self.class_value.name.string_name) def get_first_non_keyword_argument_values(self): key, lazy_value = next(self._original_var_args.unpack(), ('', None)) @@ -257,7 +257,7 @@ class CompiledInstance(AbstractInstanceContext): return False -class TreeInstance(AbstractInstanceContext): +class TreeInstance(AbstractInstanceValue): def __init__(self, infer_state, parent_value, class_value, var_args): # I don't think that dynamic append lookups should happen here. That # sounds more like something that should go to py__iter__. @@ -273,7 +273,7 @@ class TreeInstance(AbstractInstanceContext): @property def name(self): - return ContextName(self, self.class_value.name.tree_name) + return ValueName(self, self.class_value.name.tree_name) # This can recurse, if the initialization of the class includes a reference # to itself. @@ -367,7 +367,7 @@ class CompiledInstanceClassFilter(AbstractFilter): ] -class BoundMethod(FunctionMixin, ContextWrapper): +class BoundMethod(FunctionMixin, ValueWrapper): def __init__(self, instance, function): super(BoundMethod, self).__init__(function) self.instance = instance @@ -390,7 +390,7 @@ class BoundMethod(FunctionMixin, ContextWrapper): return super(BoundMethod, self).get_function_execution(arguments) def py__call__(self, arguments): - if isinstance(self._wrapped_value, OverloadedFunctionContext): + if isinstance(self._wrapped_value, OverloadedFunctionValue): return self._wrapped_value.py__call__(self._get_arguments(arguments)) function_execution = self.get_function_execution(arguments) @@ -409,7 +409,7 @@ class BoundMethod(FunctionMixin, ContextWrapper): return '<%s: %s>' % (self.__class__.__name__, self._wrapped_value) -class CompiledBoundMethod(ContextWrapper): +class CompiledBoundMethod(ValueWrapper): def is_bound_method(self): return True @@ -517,7 +517,7 @@ class InstanceArguments(TreeArgumentsWrapper): self.instance = instance def unpack(self, func=None): - yield None, LazyKnownContext(self.instance) + yield None, LazyKnownValue(self.instance) for values in self._wrapped_arguments.unpack(func): yield values diff --git a/jedi/inference/value/iterable.py b/jedi/inference/value/iterable.py index 4f11b637..4a8b9864 100644 --- a/jedi/inference/value/iterable.py +++ b/jedi/inference/value/iterable.py @@ -28,8 +28,8 @@ from jedi._compatibility import force_unicode, is_py3 from jedi.inference import compiled from jedi.inference import analysis from jedi.inference import recursion -from jedi.inference.lazy_value import LazyKnownContext, LazyKnownContexts, \ - LazyTreeContext +from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues, \ + LazyTreeValue from jedi.inference.helpers import get_int_or_none, is_string, \ predefine_names, infer_call_of_leaf, reraise_getitem_errors, \ SimpleGetItemNotFound @@ -37,14 +37,14 @@ from jedi.inference.utils import safe_property, to_list from jedi.inference.cache import infer_state_method_cache from jedi.inference.filters import ParserTreeFilter, LazyAttributeOverwrite, \ publish_method -from jedi.inference.base_value import ContextSet, Context, NO_VALUES, \ - TreeContext, ContextualizedNode, iterate_values, HelperContextMixin, _sentinel +from jedi.inference.base_value import ValueSet, Value, NO_VALUES, \ + TreeValue, ValueualizedNode, iterate_values, HelperValueMixin, _sentinel from jedi.parser_utils import get_sync_comp_fors class IterableMixin(object): def py__stop_iteration_returns(self): - return ContextSet([compiled.builtin_from_name(self.infer_state, u'None')]) + return ValueSet([compiled.builtin_from_name(self.infer_state, u'None')]) # At the moment, safe values are simple values like "foo", 1 and not # lists/dicts. Therefore as a small speed optimization we can just do the @@ -59,7 +59,7 @@ class IterableMixin(object): raise ValueError("There exists no safe value for value %s" % self) return default else: - get_safe_value = Context.get_safe_value + get_safe_value = Value.get_safe_value class GeneratorBase(LazyAttributeOverwrite, IterableMixin): @@ -79,20 +79,20 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin): @publish_method('__iter__') def py__iter__(self, valueualized_node=None): - return ContextSet([self]) + return ValueSet([self]) @publish_method('send') @publish_method('next', python_version_match=2) @publish_method('__next__', python_version_match=3) def py__next__(self): - return ContextSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__()) + return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__()) def py__stop_iteration_returns(self): - return ContextSet([compiled.builtin_from_name(self.infer_state, u'None')]) + return ValueSet([compiled.builtin_from_name(self.infer_state, u'None')]) @property def name(self): - return compiled.CompiledContextName(self, 'Generator') + return compiled.CompiledValueName(self, 'Generator') class Generator(GeneratorBase): @@ -111,7 +111,7 @@ class Generator(GeneratorBase): return "<%s of %s>" % (type(self).__name__, self._func_execution_value) -class CompForContext(TreeContext): +class CompForValue(TreeValue): @classmethod def from_comp_for(cls, parent_value, comp_for): return cls(parent_value.infer_state, parent_value, comp_for) @@ -159,7 +159,7 @@ def comprehension_from_atom(infer_state, value, atom): class ComprehensionMixin(object): @infer_state_method_cache() def _get_comp_for_value(self, parent_value, comp_for): - return CompForContext.from_comp_for(parent_value, comp_for) + return CompForValue.from_comp_for(parent_value, comp_for) def _nested(self, comp_fors, parent_value=None): comp_for = comp_fors[0] @@ -171,7 +171,7 @@ class ComprehensionMixin(object): input_types = parent_value.infer_node(input_node) # TODO: simulate await if self.is_async - cn = ContextualizedNode(parent_value, input_node) + cn = ValueualizedNode(parent_value, input_node) iterated = input_types.iterate(cn, is_async=is_async) exprlist = comp_for.children[1] for i, lazy_value in enumerate(iterated): @@ -201,7 +201,7 @@ class ComprehensionMixin(object): def py__iter__(self, valueualized_node=None): for set_ in self._iterate(): - yield LazyKnownContexts(set_) + yield LazyKnownValues(set_) def __repr__(self): return "<%s of %s>" % (type(self).__name__, self._sync_comp_for_node) @@ -217,7 +217,7 @@ class Sequence(LazyAttributeOverwrite, IterableMixin): @property def name(self): - return compiled.CompiledContextName(self, self.array_type) + return compiled.CompiledValueName(self, self.array_type) def _get_generics(self): return (self.merge_types_of_iterate().py__class__(),) @@ -241,7 +241,7 @@ class Sequence(LazyAttributeOverwrite, IterableMixin): def py__getitem__(self, index_value_set, valueualized_node): if self.array_type == 'dict': return self._dict_values() - return iterate_values(ContextSet([self])) + return iterate_values(ValueSet([self])) class _BaseComprehension(ComprehensionMixin): @@ -258,7 +258,7 @@ class ListComprehension(_BaseComprehension, Sequence): def py__simple_getitem__(self, index): if isinstance(index, slice): - return ContextSet([self]) + return ValueSet([self]) all_types = list(self.py__iter__()) with reraise_getitem_errors(IndexError, TypeError): @@ -287,7 +287,7 @@ class DictComprehension(ComprehensionMixin, Sequence): def py__iter__(self, valueualized_node=None): for keys, values in self._iterate(): - yield LazyKnownContexts(keys) + yield LazyKnownValues(keys) def py__simple_getitem__(self, index): for keys, values in self._iterate(): @@ -300,31 +300,31 @@ class DictComprehension(ComprehensionMixin, Sequence): raise SimpleGetItemNotFound() def _dict_keys(self): - return ContextSet.from_sets(keys for keys, values in self._iterate()) + return ValueSet.from_sets(keys for keys, values in self._iterate()) def _dict_values(self): - return ContextSet.from_sets(values for keys, values in self._iterate()) + return ValueSet.from_sets(values for keys, values in self._iterate()) @publish_method('values') def _imitate_values(self): - lazy_value = LazyKnownContexts(self._dict_values()) - return ContextSet([FakeSequence(self.infer_state, u'list', [lazy_value])]) + lazy_value = LazyKnownValues(self._dict_values()) + return ValueSet([FakeSequence(self.infer_state, u'list', [lazy_value])]) @publish_method('items') def _imitate_items(self): lazy_values = [ - LazyKnownContext( + LazyKnownValue( FakeSequence( self.infer_state, u'tuple', - [LazyKnownContexts(key), - LazyKnownContexts(value)] + [LazyKnownValues(key), + LazyKnownValues(value)] ) ) for key, value in self._iterate() ] - return ContextSet([FakeSequence(self.infer_state, u'list', lazy_values)]) + return ValueSet([FakeSequence(self.infer_state, u'list', lazy_values)]) def get_mapping_item_values(self): return self._dict_keys(), self._dict_values() @@ -335,21 +335,21 @@ class DictComprehension(ComprehensionMixin, Sequence): return [] -class SequenceLiteralContext(Sequence): +class SequenceLiteralValue(Sequence): _TUPLE_LIKE = 'testlist_star_expr', 'testlist', 'subscriptlist' mapping = {'(': u'tuple', '[': u'list', '{': u'set'} def __init__(self, infer_state, defining_value, atom): - super(SequenceLiteralContext, self).__init__(infer_state) + super(SequenceLiteralValue, self).__init__(infer_state) self.atom = atom self._defining_value = defining_value if self.atom.type in self._TUPLE_LIKE: self.array_type = u'tuple' else: - self.array_type = SequenceLiteralContext.mapping[atom.children[0]] + self.array_type = SequenceLiteralValue.mapping[atom.children[0]] """The builtin name of the array (list, set, tuple or dict).""" def py__simple_getitem__(self, index): @@ -368,7 +368,7 @@ class SequenceLiteralContext(Sequence): raise SimpleGetItemNotFound('No key found in dictionary %s.' % self) if isinstance(index, slice): - return ContextSet([self]) + return ValueSet([self]) else: with reraise_getitem_errors(TypeError, KeyError, IndexError): node = self.get_tree_entries()[index] @@ -387,15 +387,15 @@ class SequenceLiteralContext(Sequence): # We don't know which dict index comes first, therefore always # yield all the types. for _ in types: - yield LazyKnownContexts(types) + yield LazyKnownValues(types) else: for node in self.get_tree_entries(): if node == ':' or node.type == 'subscript': # TODO this should probably use at least part of the code # of infer_subscript_list. - yield LazyKnownContext(Slice(self._defining_value, None, None, None)) + yield LazyKnownValue(Slice(self._defining_value, None, None, None)) else: - yield LazyTreeContext(self._defining_value, node) + yield LazyTreeValue(self._defining_value, node) for addition in check_array_additions(self._defining_value, self): yield addition @@ -404,7 +404,7 @@ class SequenceLiteralContext(Sequence): return len(self.get_tree_entries()) def _dict_values(self): - return ContextSet.from_sets( + return ValueSet.from_sets( self._defining_value.infer_node(v) for k, v in self.get_tree_entries() ) @@ -462,39 +462,39 @@ class SequenceLiteralContext(Sequence): for key_node, value in self.get_tree_entries(): for key in self._defining_value.infer_node(key_node): if is_string(key): - yield key.get_safe_value(), LazyTreeContext(self._defining_value, value) + yield key.get_safe_value(), LazyTreeValue(self._defining_value, value) def __repr__(self): return "<%s of %s>" % (self.__class__.__name__, self.atom) -class DictLiteralContext(_DictMixin, SequenceLiteralContext): +class DictLiteralValue(_DictMixin, SequenceLiteralValue): array_type = u'dict' def __init__(self, infer_state, defining_value, atom): - super(SequenceLiteralContext, self).__init__(infer_state) + super(SequenceLiteralValue, self).__init__(infer_state) self._defining_value = defining_value self.atom = atom @publish_method('values') def _imitate_values(self): - lazy_value = LazyKnownContexts(self._dict_values()) - return ContextSet([FakeSequence(self.infer_state, u'list', [lazy_value])]) + lazy_value = LazyKnownValues(self._dict_values()) + return ValueSet([FakeSequence(self.infer_state, u'list', [lazy_value])]) @publish_method('items') def _imitate_items(self): lazy_values = [ - LazyKnownContext(FakeSequence( + LazyKnownValue(FakeSequence( self.infer_state, u'tuple', - (LazyTreeContext(self._defining_value, key_node), - LazyTreeContext(self._defining_value, value_node)) + (LazyTreeValue(self._defining_value, key_node), + LazyTreeValue(self._defining_value, value_node)) )) for key_node, value_node in self.get_tree_entries() ] - return ContextSet([FakeSequence(self.infer_state, u'list', lazy_values)]) + return ValueSet([FakeSequence(self.infer_state, u'list', lazy_values)]) def _dict_keys(self): - return ContextSet.from_sets( + return ValueSet.from_sets( self._defining_value.infer_node(k) for k, v in self.get_tree_entries() ) @@ -503,9 +503,9 @@ class DictLiteralContext(_DictMixin, SequenceLiteralContext): return self._dict_keys(), self._dict_values() -class _FakeArray(SequenceLiteralContext): +class _FakeArray(SequenceLiteralValue): def __init__(self, infer_state, container, type): - super(SequenceLiteralContext, self).__init__(infer_state) + super(SequenceLiteralValue, self).__init__(infer_state) self.array_type = type self.atom = container # TODO is this class really needed? @@ -521,7 +521,7 @@ class FakeSequence(_FakeArray): def py__simple_getitem__(self, index): if isinstance(index, slice): - return ContextSet([self]) + return ValueSet([self]) with reraise_getitem_errors(IndexError, TypeError): lazy_value = self._lazy_value_list[index] @@ -544,7 +544,7 @@ class FakeDict(_DictMixin, _FakeArray): def py__iter__(self, valueualized_node=None): for key in self._dct: - yield LazyKnownContext(compiled.create_simple_object(self.infer_state, key)) + yield LazyKnownValue(compiled.create_simple_object(self.infer_state, key)) def py__simple_getitem__(self, index): if is_py3 and self.infer_state.environment.version_info.major == 2: @@ -568,16 +568,16 @@ class FakeDict(_DictMixin, _FakeArray): @publish_method('values') def _values(self): - return ContextSet([FakeSequence( + return ValueSet([FakeSequence( self.infer_state, u'tuple', - [LazyKnownContexts(self._dict_values())] + [LazyKnownValues(self._dict_values())] )]) def _dict_values(self): - return ContextSet.from_sets(lazy_value.infer() for lazy_value in self._dct.values()) + return ValueSet.from_sets(lazy_value.infer() for lazy_value in self._dct.values()) def _dict_keys(self): - return ContextSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__()) + return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__()) def get_mapping_item_values(self): return self._dict_keys(), self._dict_values() @@ -597,7 +597,7 @@ class MergedArray(_FakeArray): yield lazy_value def py__simple_getitem__(self, index): - return ContextSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__()) + return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__()) def get_tree_entries(self): for array in self._arrays: @@ -744,10 +744,10 @@ def get_dynamic_array_instance(instance, arguments): """Used for set() and list() instances.""" ai = _ArrayInstance(instance, arguments) from jedi.inference import arguments - return arguments.ValuesArguments([ContextSet([ai])]) + return arguments.ValuesArguments([ValueSet([ai])]) -class _ArrayInstance(HelperContextMixin): +class _ArrayInstance(HelperValueMixin): """ Used for the usage of set() and list(). This is definitely a hack, but a good one :-) diff --git a/jedi/inference/value/klass.py b/jedi/inference/value/klass.py index 45ab3a66..bf460dc2 100644 --- a/jedi/inference/value/klass.py +++ b/jedi/inference/value/klass.py @@ -25,7 +25,7 @@ py__iter__() Returns a generator of a set of types. py__class__() Returns the class of an instance. py__simple_getitem__(index: int/str) Returns a a set of types of the index. Can raise an IndexError/KeyError. -py__getitem__(indexes: ContextSet) Returns a a set of types of the index. +py__getitem__(indexes: ValueSet) Returns a a set of types of the index. py__file__() Only on modules. Returns None if does not exist. py__package__() -> List[str] Only on modules. For the import system. @@ -42,11 +42,11 @@ from jedi.parser_utils import get_cached_parent_scope from jedi.inference.cache import infer_state_method_cache, CachedMetaClass, \ infer_state_method_generator_cache from jedi.inference import compiled -from jedi.inference.lazy_value import LazyKnownContexts +from jedi.inference.lazy_value import LazyKnownValues from jedi.inference.filters import ParserTreeFilter -from jedi.inference.names import TreeNameDefinition, ContextName +from jedi.inference.names import TreeNameDefinition, ValueName from jedi.inference.arguments import unpack_arglist, ValuesArguments -from jedi.inference.base_value import ContextSet, iterator_to_value_set, \ +from jedi.inference.base_value import ValueSet, iterator_to_value_set, \ NO_VALUES from jedi.inference.value.function import FunctionAndClassBase from jedi.plugins import plugin_manager @@ -141,14 +141,14 @@ class ClassMixin(object): from jedi.inference.value import TreeInstance if arguments is None: arguments = ValuesArguments([]) - return ContextSet([TreeInstance(self.infer_state, self.parent_value, self, arguments)]) + return ValueSet([TreeInstance(self.infer_state, self.parent_value, self, arguments)]) def py__class__(self): return compiled.builtin_from_name(self.infer_state, u'type') @property def name(self): - return ContextName(self, self.tree_node.name) + return ValueName(self, self.tree_node.name) def py__name__(self): return self.name.string_name @@ -215,7 +215,7 @@ class ClassMixin(object): if not is_instance: from jedi.inference.compiled import builtin_from_name type_ = builtin_from_name(self.infer_state, u'type') - assert isinstance(type_, ClassContext) + assert isinstance(type_, ClassValue) if type_ != self: for instance in type_.py__call__(): instance_filters = instance.get_filters() @@ -237,7 +237,7 @@ class ClassMixin(object): ) -class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase)): +class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase)): api_type = u'class' @infer_state_method_cache() @@ -276,15 +276,15 @@ class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBa if self.py__name__() == 'object' \ and self.parent_value == self.infer_state.builtins_module: return [] - return [LazyKnownContexts( + return [LazyKnownValues( self.infer_state.builtins_module.py__getattribute__('object') )] def py__getitem__(self, index_value_set, valueualized_node): from jedi.inference.gradual.typing import LazyGenericClass if not index_value_set: - return ContextSet([self]) - return ContextSet( + return ValueSet([self]) + return ValueSet( LazyGenericClass( self, index_value, @@ -310,11 +310,11 @@ class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBa yield type_var_dict.get(type_var.py__name__(), NO_VALUES) if type_var_dict: - return ContextSet([GenericClass( + return ValueSet([GenericClass( self, generics=tuple(remap_type_vars()) )]) - return ContextSet({self}) + return ValueSet({self}) @plugin_manager.decorate() def get_metaclass_filters(self, metaclass): @@ -326,8 +326,8 @@ class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBa args = self._get_bases_arguments() if args is not None: m = [value for key, value in args.unpack() if key == 'metaclass'] - metaclasses = ContextSet.from_sets(lazy_value.infer() for lazy_value in m) - metaclasses = ContextSet(m for m in metaclasses if m.is_class()) + metaclasses = ValueSet.from_sets(lazy_value.infer() for lazy_value in m) + metaclasses = ValueSet(m for m in metaclasses if m.is_class()) if metaclasses: return metaclasses diff --git a/jedi/inference/value/module.py b/jedi/inference/value/module.py index e7829c0a..964647d7 100644 --- a/jedi/inference/value/module.py +++ b/jedi/inference/value/module.py @@ -3,14 +3,14 @@ import os from jedi import debug from jedi.inference.cache import infer_state_method_cache -from jedi.inference.names import ContextNameMixin, AbstractNameDefinition +from jedi.inference.names import ValueNameMixin, AbstractNameDefinition from jedi.inference.filters import GlobalNameFilter, ParserTreeFilter, DictFilter, MergedFilter from jedi.inference import compiled -from jedi.inference.base_value import TreeContext +from jedi.inference.base_value import TreeValue from jedi.inference.names import SubModuleName from jedi.inference.helpers import values_from_qualified_names from jedi.inference.compiled import create_simple_object -from jedi.inference.base_value import ContextSet +from jedi.inference.base_value import ValueSet class _ModuleAttributeName(AbstractNameDefinition): @@ -30,13 +30,13 @@ class _ModuleAttributeName(AbstractNameDefinition): if self.parent_value.infer_state.environment.version_info.major == 2 \ and not isinstance(s, bytes): s = s.encode('utf-8') - return ContextSet([ + return ValueSet([ create_simple_object(self.parent_value.infer_state, s) ]) return compiled.get_string_value_set(self.parent_value.infer_state) -class ModuleName(ContextNameMixin, AbstractNameDefinition): +class ModuleName(ValueNameMixin, AbstractNameDefinition): start_pos = 1, 0 def __init__(self, value, name): @@ -132,7 +132,7 @@ class ModuleMixin(SubModuleDictMixin): def _string_name(self): """ This is used for the goto functions. """ # TODO It's ugly that we even use this, the name is usually well known - # ahead so just pass it when create a ModuleContext. + # ahead so just pass it when create a ModuleValue. if self._path is None: return '' # no path -> empty name else: @@ -173,7 +173,7 @@ class ModuleMixin(SubModuleDictMixin): ).follow() for module in new: - if isinstance(module, ModuleContext): + if isinstance(module, ModuleValue): modules += module.star_imports() modules += new return modules @@ -187,12 +187,12 @@ class ModuleMixin(SubModuleDictMixin): return () -class ModuleContext(ModuleMixin, TreeContext): +class ModuleValue(ModuleMixin, TreeValue): api_type = u'module' parent_value = None def __init__(self, infer_state, module_node, file_io, string_names, code_lines, is_package=False): - super(ModuleContext, self).__init__( + super(ModuleValue, self).__init__( infer_state, parent_value=None, tree_node=module_node @@ -210,9 +210,9 @@ class ModuleContext(ModuleMixin, TreeContext): if self._path is not None and self._path.endswith('.pyi'): # Currently this is the way how we identify stubs when e.g. goto is # used in them. This could be changed if stubs would be identified - # sooner and used as StubModuleContext. + # sooner and used as StubModuleValue. return True - return super(ModuleContext, self).is_stub() + return super(ModuleValue, self).is_stub() def py__name__(self): if self.string_names is None: diff --git a/jedi/inference/value/namespace.py b/jedi/inference/value/namespace.py index 808f6c8c..20465e76 100644 --- a/jedi/inference/value/namespace.py +++ b/jedi/inference/value/namespace.py @@ -1,11 +1,11 @@ from jedi.inference.cache import infer_state_method_cache from jedi.inference.filters import DictFilter -from jedi.inference.names import ContextNameMixin, AbstractNameDefinition -from jedi.inference.base_value import Context +from jedi.inference.names import ValueNameMixin, AbstractNameDefinition +from jedi.inference.base_value import Value from jedi.inference.value.module import SubModuleDictMixin -class ImplicitNSName(ContextNameMixin, AbstractNameDefinition): +class ImplicitNSName(ValueNameMixin, AbstractNameDefinition): """ Accessing names for implicit namespace packages should infer to nothing. This object will prevent Jedi from raising exceptions @@ -15,7 +15,7 @@ class ImplicitNSName(ContextNameMixin, AbstractNameDefinition): self.string_name = string_name -class ImplicitNamespaceContext(Context, SubModuleDictMixin): +class ImplicitNamespaceValue(Value, SubModuleDictMixin): """ Provides support for implicit namespace packages """ @@ -26,7 +26,7 @@ class ImplicitNamespaceContext(Context, SubModuleDictMixin): parent_value = None def __init__(self, infer_state, fullname, paths): - super(ImplicitNamespaceContext, self).__init__(infer_state, parent_value=None) + super(ImplicitNamespaceValue, self).__init__(infer_state, parent_value=None) self.infer_state = infer_state self._fullname = fullname self._paths = paths diff --git a/jedi/plugins/stdlib.py b/jedi/plugins/stdlib.py index b1fa7eb8..f98f4be6 100644 --- a/jedi/plugins/stdlib.py +++ b/jedi/plugins/stdlib.py @@ -21,16 +21,16 @@ from jedi.inference.arguments import ValuesArguments, \ from jedi.inference import analysis from jedi.inference import compiled from jedi.inference.value.instance import BoundMethod, InstanceArguments -from jedi.inference.base_value import ContextualizedNode, \ - NO_VALUES, ContextSet, ContextWrapper, LazyContextWrapper -from jedi.inference.value import ClassContext, ModuleContext, \ - FunctionExecutionContext +from jedi.inference.base_value import ValueualizedNode, \ + NO_VALUES, ValueSet, ValueWrapper, LazyValueWrapper +from jedi.inference.value import ClassValue, ModuleValue, \ + FunctionExecutionValue from jedi.inference.value.klass import ClassMixin from jedi.inference.value.function import FunctionMixin from jedi.inference.value import iterable -from jedi.inference.lazy_value import LazyTreeContext, LazyKnownContext, \ - LazyKnownContexts -from jedi.inference.names import ContextName, BaseTreeParamName +from jedi.inference.lazy_value import LazyTreeValue, LazyKnownValue, \ + LazyKnownValues +from jedi.inference.names import ValueName, BaseTreeParamName from jedi.inference.syntax_tree import is_string from jedi.inference.filters import AttributeOverwrite, publish_method, \ ParserTreeFilter, DictFilter @@ -132,7 +132,7 @@ def execute(callback): ) elif value.py__name__() in ('deleter', 'getter', 'setter'): if value.class_value.py__name__() == 'property': - return ContextSet([value.instance]) + return ValueSet([value.instance]) return call() @@ -243,7 +243,7 @@ def builtins_type(objects, bases, dicts): return objects.py__class__() -class SuperInstance(LazyContextWrapper): +class SuperInstance(LazyValueWrapper): """To be used like the object ``super`` returns.""" def __init__(self, infer_state, instance): self.infer_state = infer_state @@ -269,12 +269,12 @@ class SuperInstance(LazyContextWrapper): @argument_clinic('[type[, obj]], /', want_value=True) def builtins_super(types, objects, value): - if isinstance(value, FunctionExecutionContext): + if isinstance(value, FunctionExecutionValue): if isinstance(value.var_args, InstanceArguments): instance = value.var_args.instance # TODO if a class is given it doesn't have to be the direct super # class, it can be an anecestor from long ago. - return ContextSet({SuperInstance(instance.infer_state, instance)}) + return ValueSet({SuperInstance(instance.infer_state, instance)}) return NO_VALUES @@ -291,7 +291,7 @@ class ReversedObject(AttributeOverwrite): @publish_method('next', python_version_match=2) @publish_method('__next__', python_version_match=3) def py__next__(self): - return ContextSet.from_sets( + return ValueSet.from_sets( lazy_value.infer() for lazy_value in self._iter_list ) @@ -303,9 +303,9 @@ def builtins_reversed(sequences, obj, arguments): # values again. key, lazy_value = next(arguments.unpack()) cn = None - if isinstance(lazy_value, LazyTreeContext): + if isinstance(lazy_value, LazyTreeValue): # TODO access private - cn = ContextualizedNode(lazy_value.value, lazy_value.data) + cn = ValueualizedNode(lazy_value.value, lazy_value.data) ordered = list(sequences.iterate(cn)) # Repack iterator values and then run it the normal way. This is @@ -313,7 +313,7 @@ def builtins_reversed(sequences, obj, arguments): # would fail in certain cases like `reversed(x).__iter__` if we # just returned the result directly. seq, = obj.infer_state.typing_module.py__getattribute__('Iterator').execute_with_values() - return ContextSet([ReversedObject(seq, list(reversed(ordered)))]) + return ValueSet([ReversedObject(seq, list(reversed(ordered)))]) @argument_clinic('obj, type, /', want_arguments=True, want_infer_state=True) @@ -338,40 +338,40 @@ def builtins_isinstance(objects, types, arguments, infer_state): elif cls_or_tup.name.string_name == 'tuple' \ and cls_or_tup.get_root_value() == infer_state.builtins_module: # Check for tuples. - classes = ContextSet.from_sets( + classes = ValueSet.from_sets( lazy_value.infer() for lazy_value in cls_or_tup.iterate() ) bool_results.add(any(cls in mro for cls in classes)) else: _, lazy_value = list(arguments.unpack())[1] - if isinstance(lazy_value, LazyTreeContext): + if isinstance(lazy_value, LazyTreeValue): node = lazy_value.data message = 'TypeError: isinstance() arg 2 must be a ' \ 'class, type, or tuple of classes and types, ' \ 'not %s.' % cls_or_tup analysis.add(lazy_value.value, 'type-error-isinstance', node, message) - return ContextSet( + return ValueSet( compiled.builtin_from_name(infer_state, force_unicode(str(b))) for b in bool_results ) -class StaticMethodObject(AttributeOverwrite, ContextWrapper): +class StaticMethodObject(AttributeOverwrite, ValueWrapper): def get_object(self): return self._wrapped_value def py__get__(self, instance, klass): - return ContextSet([self._wrapped_value]) + return ValueSet([self._wrapped_value]) @argument_clinic('sequence, /') def builtins_staticmethod(functions): - return ContextSet(StaticMethodObject(f) for f in functions) + return ValueSet(StaticMethodObject(f) for f in functions) -class ClassMethodObject(AttributeOverwrite, ContextWrapper): +class ClassMethodObject(AttributeOverwrite, ValueWrapper): def __init__(self, class_method_obj, function): super(ClassMethodObject, self).__init__(class_method_obj) self._function = function @@ -380,13 +380,13 @@ class ClassMethodObject(AttributeOverwrite, ContextWrapper): return self._wrapped_value def py__get__(self, obj, class_value): - return ContextSet([ + return ValueSet([ ClassMethodGet(__get__, class_value, self._function) for __get__ in self._wrapped_value.py__getattribute__('__get__') ]) -class ClassMethodGet(AttributeOverwrite, ContextWrapper): +class ClassMethodGet(AttributeOverwrite, ValueWrapper): def __init__(self, get_method, klass, function): super(ClassMethodGet, self).__init__(get_method) self._class = klass @@ -408,14 +408,14 @@ class ClassMethodArguments(TreeArgumentsWrapper): self._class = klass def unpack(self, func=None): - yield None, LazyKnownContext(self._class) + yield None, LazyKnownValue(self._class) for values in self._wrapped_arguments.unpack(func): yield values @argument_clinic('sequence, /', want_obj=True, want_arguments=True) def builtins_classmethod(functions, obj, arguments): - return ContextSet( + return ValueSet( ClassMethodObject(class_method_object, function) for class_method_object in obj.py__call__(arguments=arguments) for function in functions @@ -472,14 +472,14 @@ def collections_namedtuple(obj, arguments, callback): # Parse source code module = infer_state.grammar.parse(code) generated_class = next(module.iter_classdefs()) - parent_value = ModuleContext( + parent_value = ModuleValue( infer_state, module, file_io=None, string_names=None, code_lines=parso.split_lines(code, keepends=True), ) - return ContextSet([ClassContext(infer_state, parent_value, generated_class)]) + return ValueSet([ClassValue(infer_state, parent_value, generated_class)]) class PartialObject(object): @@ -550,7 +550,7 @@ class MergedPartialArguments(AbstractArguments): def functools_partial(obj, arguments, callback): - return ContextSet( + return ValueSet( PartialObject(instance, arguments) for instance in obj.py__call__(arguments) ) @@ -563,7 +563,7 @@ def _return_first_param(firsts): @argument_clinic('seq') def _random_choice(sequences): - return ContextSet.from_sets( + return ValueSet.from_sets( lazy_value.infer() for sequence in sequences for lazy_value in sequence.py__iter__() @@ -573,13 +573,13 @@ def _random_choice(sequences): def _dataclass(obj, arguments, callback): for c in _follow_param(obj.infer_state, arguments, 0): if c.is_class(): - return ContextSet([DataclassWrapper(c)]) + return ValueSet([DataclassWrapper(c)]) else: - return ContextSet([obj]) + return ValueSet([obj]) return NO_VALUES -class DataclassWrapper(ContextWrapper, ClassMixin): +class DataclassWrapper(ValueWrapper, ClassMixin): def get_signatures(self): param_names = [] for cls in reversed(list(self.py__mro__())): @@ -630,7 +630,7 @@ class DataclassParamName(BaseTreeParamName): return self.parent_value.infer_node(self.annotation_node) -class ItemGetterCallable(ContextWrapper): +class ItemGetterCallable(ValueWrapper): def __init__(self, instance, args_value_set): super(ItemGetterCallable, self).__init__(instance) self._args_value_set = args_value_set @@ -644,11 +644,11 @@ class ItemGetterCallable(ContextWrapper): # TODO we need to add the valueualized value. value_set |= item_value_set.get_item(lazy_values[0].infer(), None) else: - value_set |= ContextSet([iterable.FakeSequence( + value_set |= ValueSet([iterable.FakeSequence( self._wrapped_value.infer_state, 'list', [ - LazyKnownContexts(item_value_set.get_item(lazy_value.infer(), None)) + LazyKnownValues(item_value_set.get_item(lazy_value.infer(), None)) for lazy_value in lazy_values ], )]) @@ -657,19 +657,19 @@ class ItemGetterCallable(ContextWrapper): @argument_clinic('func, /') def _functools_wraps(funcs): - return ContextSet(WrapsCallable(func) for func in funcs) + return ValueSet(WrapsCallable(func) for func in funcs) -class WrapsCallable(ContextWrapper): +class WrapsCallable(ValueWrapper): # XXX this is not the correct wrapped value, it should be a weird # partials object, but it doesn't matter, because it's always used as a # decorator anyway. @repack_with_argument_clinic('func, /') def py__call__(self, funcs): - return ContextSet({Wrapped(func, self._wrapped_value) for func in funcs}) + return ValueSet({Wrapped(func, self._wrapped_value) for func in funcs}) -class Wrapped(ContextWrapper, FunctionMixin): +class Wrapped(ValueWrapper, FunctionMixin): def __init__(self, func, original_function): super(Wrapped, self).__init__(func) self._original_function = original_function @@ -684,7 +684,7 @@ class Wrapped(ContextWrapper, FunctionMixin): @argument_clinic('*args, /', want_obj=True, want_arguments=True) def _operator_itemgetter(args_value_set, obj, arguments): - return ContextSet([ + return ValueSet([ ItemGetterCallable(instance, args_value_set) for instance in obj.py__call__(arguments) ]) @@ -699,7 +699,7 @@ def _create_string_input_function(func): if s is not None: s = func(s) yield compiled.create_simple_object(value.infer_state, s) - values = ContextSet(iterate()) + values = ValueSet(iterate()) if values: return values return obj.py__call__(arguments) @@ -724,7 +724,7 @@ def _os_path_join(args_set, callback): string += force_unicode(s) is_first = False else: - return ContextSet([compiled.create_simple_object(sequence.infer_state, string)]) + return ValueSet([compiled.create_simple_object(sequence.infer_state, string)]) return callback() @@ -801,7 +801,7 @@ def get_metaclass_filters(func): return wrapper -class EnumInstance(LazyContextWrapper): +class EnumInstance(LazyValueWrapper): def __init__(self, cls, name): self.infer_state = cls.infer_state self._cls = cls # Corresponds to super().__self__ @@ -810,7 +810,7 @@ class EnumInstance(LazyContextWrapper): @safe_property def name(self): - return ContextName(self, self._name.tree_name) + return ValueName(self, self._name.tree_name) def _get_wrapped_value(self): obj, = self._cls.execute_with_values() @@ -828,7 +828,7 @@ class EnumInstance(LazyContextWrapper): def tree_name_to_values(func): def wrapper(infer_state, value, tree_name): if tree_name.value == 'sep' and value.is_module() and value.py__name__() == 'os.path': - return ContextSet({ + return ValueSet({ compiled.create_simple_object(infer_state, os.path.sep), }) return func(infer_state, value, tree_name) diff --git a/test/test_api/test_classes.py b/test/test_api/test_classes.py index 515d7d02..31084f39 100644 --- a/test/test_api/test_classes.py +++ b/test/test_api/test_classes.py @@ -8,7 +8,7 @@ import pytest import jedi from jedi import __doc__ as jedi_doc -from jedi.inference.compiled import CompiledContextName +from jedi.inference.compiled import CompiledValueName def test_is_keyword(Script): @@ -436,7 +436,7 @@ def test_builtin_module_with_path(Script): confusing. """ semlock, = Script('from _multiprocessing import SemLock').goto_definitions() - assert isinstance(semlock._name, CompiledContextName) + assert isinstance(semlock._name, CompiledValueName) assert semlock.module_path is None assert semlock.in_builtin_module() is True assert semlock.name == 'SemLock' diff --git a/test/test_inference/test_gradual/test_typeshed.py b/test/test_inference/test_gradual/test_typeshed.py index 8064fcff..4a30cb51 100644 --- a/test/test_inference/test_gradual/test_typeshed.py +++ b/test/test_inference/test_gradual/test_typeshed.py @@ -4,8 +4,8 @@ import pytest from parso.utils import PythonVersionInfo from jedi.inference.gradual import typeshed, stub_value -from jedi.inference.value import TreeInstance, BoundMethod, FunctionContext, \ - MethodContext, ClassContext +from jedi.inference.value import TreeInstance, BoundMethod, FunctionValue, \ + MethodValue, ClassValue TYPESHED_PYTHON3 = os.path.join(typeshed.TYPESHED_PATH, 'stdlib', '3') @@ -48,14 +48,14 @@ def test_function(Script, environment): code = 'import threading; threading.current_thread' def_, = Script(code).goto_definitions() value = def_._name._value - assert isinstance(value, FunctionContext), value + assert isinstance(value, FunctionValue), value def_, = Script(code + '()').goto_definitions() value = def_._name._value assert isinstance(value, TreeInstance) def_, = Script('import threading; threading.Thread').goto_definitions() - assert isinstance(def_._name._value, ClassContext), def_ + assert isinstance(def_._name._value, ClassValue), def_ def test_keywords_variable(Script): @@ -70,7 +70,7 @@ def test_keywords_variable(Script): def test_class(Script): def_, = Script('import threading; threading.Thread').goto_definitions() value = def_._name._value - assert isinstance(value, ClassContext), value + assert isinstance(value, ClassValue), value def test_instance(Script): @@ -82,7 +82,7 @@ def test_instance(Script): def test_class_function(Script): def_, = Script('import threading; threading.Thread.getName').goto_definitions() value = def_._name._value - assert isinstance(value, MethodContext), value + assert isinstance(value, MethodValue), value def test_method(Script): @@ -90,7 +90,7 @@ def test_method(Script): def_, = Script(code).goto_definitions() value = def_._name._value assert isinstance(value, BoundMethod), value - assert isinstance(value._wrapped_value, MethodContext), value + assert isinstance(value._wrapped_value, MethodValue), value def_, = Script(code + '()').goto_definitions() value = def_._name._value diff --git a/test/test_settings.py b/test/test_settings.py index 46127df9..fdfffb52 100644 --- a/test/test_settings.py +++ b/test/test_settings.py @@ -1,9 +1,9 @@ import pytest from jedi import settings -from jedi.inference.names import ContextName -from jedi.inference.compiled import CompiledContextName -from jedi.inference.gradual.typeshed import StubModuleContext +from jedi.inference.names import ValueName +from jedi.inference.compiled import CompiledValueName +from jedi.inference.gradual.typeshed import StubModuleValue @pytest.fixture() @@ -13,14 +13,14 @@ def auto_import_json(monkeypatch): def test_base_auto_import_modules(auto_import_json, Script): loads, = Script('import json; json.loads').goto_definitions() - assert isinstance(loads._name, ContextName) + assert isinstance(loads._name, ValueName) value, = loads._name.infer() - assert isinstance(value.parent_value, StubModuleContext) + assert isinstance(value.parent_value, StubModuleValue) def test_auto_import_modules_imports(auto_import_json, Script): main, = Script('from json import tool; tool.main').goto_definitions() - assert isinstance(main._name, CompiledContextName) + assert isinstance(main._name, CompiledValueName) def test_additional_dynamic_modules(monkeypatch, Script):