mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-06 05:54:25 +08:00
valueualized_node -> contextualized_node
This commit is contained in:
@@ -52,10 +52,10 @@ class HelperValueMixin(object):
|
||||
def gather_annotation_classes(self):
|
||||
return ValueSet([self])
|
||||
|
||||
def merge_types_of_iterate(self, valueualized_node=None, is_async=False):
|
||||
def merge_types_of_iterate(self, contextualized_node=None, is_async=False):
|
||||
return ValueSet.from_sets(
|
||||
lazy_value.infer()
|
||||
for lazy_value in self.iterate(valueualized_node, is_async)
|
||||
for lazy_value in self.iterate(contextualized_node, is_async)
|
||||
)
|
||||
|
||||
def py__getattribute__(self, name_or_str, name_context=None, position=None,
|
||||
@@ -92,7 +92,7 @@ class HelperValueMixin(object):
|
||||
def infer_node(self, node):
|
||||
return self.inference_state.infer_element(self, node)
|
||||
|
||||
def iterate(self, valueualized_node=None, is_async=False):
|
||||
def iterate(self, contextualized_node=None, is_async=False):
|
||||
debug.dbg('iterate %s', self)
|
||||
if is_async:
|
||||
from jedi.inference.lazy_value import LazyKnownValues
|
||||
@@ -106,7 +106,7 @@ class HelperValueMixin(object):
|
||||
.py__stop_iteration_returns()
|
||||
) # noqa
|
||||
])
|
||||
return self.py__iter__(valueualized_node)
|
||||
return self.py__iter__(contextualized_node)
|
||||
|
||||
def is_sub_class_of(self, class_value):
|
||||
for cls in self.py__mro__():
|
||||
@@ -137,24 +137,24 @@ class Value(HelperValueMixin, BaseValue):
|
||||
# overwritten.
|
||||
return self.__class__.__name__.lower()
|
||||
|
||||
def py__getitem__(self, index_value_set, valueualized_node):
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
from jedi.inference import analysis
|
||||
# TODO this value is probably not right.
|
||||
analysis.add(
|
||||
valueualized_node.context,
|
||||
contextualized_node.context,
|
||||
'type-error-not-subscriptable',
|
||||
valueualized_node.node,
|
||||
contextualized_node.node,
|
||||
message="TypeError: '%s' object is not subscriptable" % self
|
||||
)
|
||||
return NO_VALUES
|
||||
|
||||
def py__iter__(self, valueualized_node=None):
|
||||
if valueualized_node is not None:
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
if contextualized_node is not None:
|
||||
from jedi.inference import analysis
|
||||
analysis.add(
|
||||
valueualized_node.context,
|
||||
contextualized_node.context,
|
||||
'type-error-not-iterable',
|
||||
valueualized_node.node,
|
||||
contextualized_node.node,
|
||||
message="TypeError: '%s' object is not iterable" % self)
|
||||
return iter([])
|
||||
|
||||
@@ -226,14 +226,14 @@ class Value(HelperValueMixin, BaseValue):
|
||||
raise NotImplementedError('Not all values need to be converted to contexts')
|
||||
|
||||
|
||||
def iterate_values(values, valueualized_node=None, is_async=False):
|
||||
def iterate_values(values, contextualized_node=None, is_async=False):
|
||||
"""
|
||||
Calls `iterate`, on all values but ignores the ordering and just returns
|
||||
all values that the iterate functions yield.
|
||||
"""
|
||||
return ValueSet.from_sets(
|
||||
lazy_value.infer()
|
||||
for lazy_value in values.iterate(valueualized_node, is_async=is_async)
|
||||
for lazy_value in values.iterate(contextualized_node, is_async=is_async)
|
||||
)
|
||||
|
||||
|
||||
@@ -355,7 +355,7 @@ class ContextualizedName(ContextualizedNode):
|
||||
return indexes
|
||||
|
||||
|
||||
def _getitem(value, index_values, valueualized_node):
|
||||
def _getitem(value, index_values, contextualized_node):
|
||||
from jedi.inference.value.iterable import Slice
|
||||
|
||||
# The actual getitem call.
|
||||
@@ -391,7 +391,7 @@ def _getitem(value, index_values, valueualized_node):
|
||||
if unused_values or not index_values:
|
||||
result |= value.py__getitem__(
|
||||
ValueSet(unused_values),
|
||||
valueualized_node
|
||||
contextualized_node
|
||||
)
|
||||
debug.dbg('py__getitem__ result: %s', result)
|
||||
return result
|
||||
@@ -401,9 +401,9 @@ class ValueSet(BaseValueSet):
|
||||
def py__class__(self):
|
||||
return ValueSet(c.py__class__() for c in self._set)
|
||||
|
||||
def iterate(self, valueualized_node=None, is_async=False):
|
||||
def iterate(self, contextualized_node=None, is_async=False):
|
||||
from jedi.inference.lazy_value import get_merged_lazy_value
|
||||
type_iters = [c.iterate(valueualized_node, is_async=is_async) for c in self._set]
|
||||
type_iters = [c.iterate(contextualized_node, is_async=is_async) for c in self._set]
|
||||
for lazy_values in zip_longest(*type_iters):
|
||||
yield get_merged_lazy_value(
|
||||
[l for l in lazy_values if l is not None]
|
||||
|
||||
@@ -185,24 +185,24 @@ class CompiledObject(Value):
|
||||
|
||||
return ValueSet([create_from_access_path(self.inference_state, access)])
|
||||
|
||||
def py__getitem__(self, index_value_set, valueualized_node):
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
all_access_paths = self.access_handle.py__getitem__all_values()
|
||||
if all_access_paths is None:
|
||||
# This means basically that no __getitem__ has been defined on this
|
||||
# object.
|
||||
return super(CompiledObject, self).py__getitem__(index_value_set, valueualized_node)
|
||||
return super(CompiledObject, self).py__getitem__(index_value_set, contextualized_node)
|
||||
return ValueSet(
|
||||
create_from_access_path(self.inference_state, access)
|
||||
for access in all_access_paths
|
||||
)
|
||||
|
||||
def py__iter__(self, valueualized_node=None):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
# Python iterators are a bit strange, because there's no need for
|
||||
# the __iter__ function as long as __getitem__ is defined (it will
|
||||
# just start with __getitem__(0). This is especially true for
|
||||
# Python 2 strings, where `str.__iter__` is not even defined.
|
||||
if not self.access_handle.has_iter():
|
||||
for x in super(CompiledObject, self).py__iter__(valueualized_node):
|
||||
for x in super(CompiledObject, self).py__iter__(contextualized_node):
|
||||
yield x
|
||||
|
||||
access_path_list = self.access_handle.py__iter__list()
|
||||
|
||||
@@ -192,14 +192,14 @@ class TypingValue(_BaseTypingValue):
|
||||
index_class = TypingValueWithIndex
|
||||
py__simple_getitem__ = None
|
||||
|
||||
def py__getitem__(self, index_value_set, valueualized_node):
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
return ValueSet(
|
||||
self.index_class.create_cached(
|
||||
self.inference_state,
|
||||
self.parent_context,
|
||||
self._tree_name,
|
||||
index_value,
|
||||
value_of_index=valueualized_node.context)
|
||||
value_of_index=contextualized_node.context)
|
||||
for index_value in index_value_set
|
||||
)
|
||||
|
||||
@@ -225,7 +225,7 @@ class TypingClassValue(_TypingClassMixin, TypingValue, ClassMixin):
|
||||
def _iter_over_arguments(maybe_tuple_value, defining_context):
|
||||
def iterate():
|
||||
if isinstance(maybe_tuple_value, SequenceLiteralValue):
|
||||
for lazy_value in maybe_tuple_value.py__iter__(valueualized_node=None):
|
||||
for lazy_value in maybe_tuple_value.py__iter__(contextualized_node=None):
|
||||
yield lazy_value.infer()
|
||||
else:
|
||||
yield ValueSet([maybe_tuple_value])
|
||||
@@ -316,7 +316,7 @@ class Tuple(_ContainerBase):
|
||||
debug.dbg('The getitem type on Tuple was %s' % index)
|
||||
return NO_VALUES
|
||||
|
||||
def py__iter__(self, valueualized_node=None):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
if self._is_homogenous():
|
||||
yield LazyKnownValues(self._get_getitem_values(0).execute_annotation())
|
||||
else:
|
||||
@@ -324,7 +324,7 @@ class Tuple(_ContainerBase):
|
||||
for i in range(self._index_value.py__len__()):
|
||||
yield LazyKnownValues(self._get_getitem_values(i).execute_annotation())
|
||||
|
||||
def py__getitem__(self, index_value_set, valueualized_node):
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
if self._is_homogenous():
|
||||
return self._get_getitem_values(0).execute_annotation()
|
||||
|
||||
@@ -471,10 +471,10 @@ class NewTypeFunction(_BaseTypingValue):
|
||||
return ValueSet(
|
||||
NewType(
|
||||
self.inference_state,
|
||||
valueualized_node.context,
|
||||
valueualized_node.node,
|
||||
contextualized_node.context,
|
||||
contextualized_node.node,
|
||||
second_arg.infer(),
|
||||
) for valueualized_node in arguments.get_calling_nodes())
|
||||
) for contextualized_node in arguments.get_calling_nodes())
|
||||
|
||||
|
||||
class NewType(Value):
|
||||
|
||||
@@ -118,11 +118,11 @@ def get_executed_params_and_issues(execution_context, arguments):
|
||||
had_multiple_value_error = True
|
||||
m = ("TypeError: %s() got multiple values for keyword argument '%s'."
|
||||
% (funcdef.name, key))
|
||||
for valueualized_node in arguments.get_calling_nodes():
|
||||
for contextualized_node in arguments.get_calling_nodes():
|
||||
issues.append(
|
||||
analysis.add(valueualized_node.context,
|
||||
analysis.add(contextualized_node.context,
|
||||
'type-error-multiple-values',
|
||||
valueualized_node.node, message=m)
|
||||
contextualized_node.node, message=m)
|
||||
)
|
||||
else:
|
||||
keys_used[key] = ExecutedParam(execution_context, key_param, argument)
|
||||
@@ -161,13 +161,13 @@ def get_executed_params_and_issues(execution_context, arguments):
|
||||
if param.default is None:
|
||||
result_arg = LazyUnknownValue()
|
||||
if not keys_only:
|
||||
for valueualized_node in arguments.get_calling_nodes():
|
||||
for contextualized_node in arguments.get_calling_nodes():
|
||||
m = _error_argument_count(funcdef, len(unpacked_va))
|
||||
issues.append(
|
||||
analysis.add(
|
||||
valueualized_node.context,
|
||||
contextualized_node.context,
|
||||
'type-error-too-few-arguments',
|
||||
valueualized_node.node,
|
||||
contextualized_node.node,
|
||||
message=m,
|
||||
)
|
||||
)
|
||||
@@ -194,12 +194,12 @@ def get_executed_params_and_issues(execution_context, arguments):
|
||||
if not (non_matching_keys or had_multiple_value_error or
|
||||
param.star_count or param.default):
|
||||
# add a warning only if there's not another one.
|
||||
for valueualized_node in arguments.get_calling_nodes():
|
||||
for contextualized_node in arguments.get_calling_nodes():
|
||||
m = _error_argument_count(funcdef, len(unpacked_va))
|
||||
issues.append(
|
||||
analysis.add(valueualized_node.context,
|
||||
analysis.add(contextualized_node.context,
|
||||
'type-error-too-few-arguments',
|
||||
valueualized_node.node, message=m)
|
||||
contextualized_node.node, message=m)
|
||||
)
|
||||
|
||||
for key, lazy_value in non_matching_keys.items():
|
||||
|
||||
@@ -598,7 +598,7 @@ def tree_name_to_values(inference_state, context, tree_name):
|
||||
cn = ContextualizedNode(context, node.children[3])
|
||||
for_types = iterate_values(
|
||||
cn.infer(),
|
||||
valueualized_node=cn,
|
||||
contextualized_node=cn,
|
||||
is_async=node.parent.type == 'async_stmt',
|
||||
)
|
||||
c_node = ContextualizedName(context, tree_name)
|
||||
@@ -674,13 +674,13 @@ def _apply_decorators(context, node):
|
||||
return values
|
||||
|
||||
|
||||
def check_tuple_assignments(valueualized_name, value_set):
|
||||
def check_tuple_assignments(contextualized_name, value_set):
|
||||
"""
|
||||
Checks if tuples are assigned.
|
||||
"""
|
||||
lazy_value = None
|
||||
for index, node in valueualized_name.assignment_indexes():
|
||||
cn = ContextualizedNode(valueualized_name.context, node)
|
||||
for index, node in contextualized_name.assignment_indexes():
|
||||
cn = ContextualizedNode(contextualized_name.context, node)
|
||||
iterated = value_set.iterate(cn)
|
||||
if isinstance(index, slice):
|
||||
# For no star unpacking is not possible.
|
||||
|
||||
@@ -146,21 +146,21 @@ class AbstractInstanceValue(Value):
|
||||
# Propably from the metaclass.
|
||||
yield f
|
||||
|
||||
def py__getitem__(self, index_value_set, valueualized_node):
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
names = self.get_function_slot_names(u'__getitem__')
|
||||
if not names:
|
||||
return super(AbstractInstanceValue, self).py__getitem__(
|
||||
index_value_set,
|
||||
valueualized_node,
|
||||
contextualized_node,
|
||||
)
|
||||
|
||||
args = ValuesArguments([index_value_set])
|
||||
return ValueSet.from_sets(name.infer().execute(args) for name in names)
|
||||
|
||||
def py__iter__(self, valueualized_node=None):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
iter_slot_names = self.get_function_slot_names(u'__iter__')
|
||||
if not iter_slot_names:
|
||||
return super(AbstractInstanceValue, self).py__iter__(valueualized_node)
|
||||
return super(AbstractInstanceValue, self).py__iter__(contextualized_node)
|
||||
|
||||
def iterate():
|
||||
for generator in self.execute_function_slots(iter_slot_names):
|
||||
|
||||
@@ -78,7 +78,7 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
|
||||
return True
|
||||
|
||||
@publish_method('__iter__')
|
||||
def py__iter__(self, valueualized_node=None):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
return ValueSet([self])
|
||||
|
||||
@publish_method('send')
|
||||
@@ -101,7 +101,7 @@ class Generator(GeneratorBase):
|
||||
super(Generator, self).__init__(inference_state)
|
||||
self._func_execution_context = func_execution_context
|
||||
|
||||
def py__iter__(self, valueualized_node=None):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
return self._func_execution_context.get_yield_lazy_values()
|
||||
|
||||
def py__stop_iteration_returns(self):
|
||||
@@ -199,7 +199,7 @@ class ComprehensionMixin(object):
|
||||
for result in self._nested(comp_fors):
|
||||
yield result
|
||||
|
||||
def py__iter__(self, valueualized_node=None):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
for set_ in self._iterate():
|
||||
yield LazyKnownValues(set_)
|
||||
|
||||
@@ -238,7 +238,7 @@ class Sequence(LazyAttributeOverwrite, IterableMixin):
|
||||
def parent(self):
|
||||
return self.inference_state.builtins_module
|
||||
|
||||
def py__getitem__(self, index_value_set, valueualized_node):
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
if self.array_type == 'dict':
|
||||
return self._dict_values()
|
||||
return iterate_values(ValueSet([self]))
|
||||
@@ -285,7 +285,7 @@ class DictComprehension(ComprehensionMixin, Sequence):
|
||||
self._entry_node = key_node
|
||||
self._value_node = value_node
|
||||
|
||||
def py__iter__(self, valueualized_node=None):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
for keys, values in self._iterate():
|
||||
yield LazyKnownValues(keys)
|
||||
|
||||
@@ -374,7 +374,7 @@ class SequenceLiteralValue(Sequence):
|
||||
node = self.get_tree_entries()[index]
|
||||
return self._defining_context.infer_node(node)
|
||||
|
||||
def py__iter__(self, valueualized_node=None):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
"""
|
||||
While values returns the possible values for any array field, this
|
||||
function returns the value for a certain index.
|
||||
@@ -527,7 +527,7 @@ class FakeSequence(_FakeArray):
|
||||
lazy_value = self._lazy_value_list[index]
|
||||
return lazy_value.infer()
|
||||
|
||||
def py__iter__(self, valueualized_node=None):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
return self._lazy_value_list
|
||||
|
||||
def py__bool__(self):
|
||||
@@ -542,7 +542,7 @@ class FakeDict(_DictMixin, _FakeArray):
|
||||
super(FakeDict, self).__init__(inference_state, dct, u'dict')
|
||||
self._dct = dct
|
||||
|
||||
def py__iter__(self, valueualized_node=None):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
for key in self._dct:
|
||||
yield LazyKnownValue(compiled.create_simple_object(self.inference_state, key))
|
||||
|
||||
@@ -591,7 +591,7 @@ class MergedArray(_FakeArray):
|
||||
super(MergedArray, self).__init__(inference_state, arrays, arrays[-1].array_type)
|
||||
self._arrays = arrays
|
||||
|
||||
def py__iter__(self, valueualized_node=None):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
for array in self._arrays:
|
||||
for lazy_value in array.py__iter__():
|
||||
yield lazy_value
|
||||
@@ -762,7 +762,7 @@ class _ArrayInstance(HelperValueMixin):
|
||||
tuple_, = self.instance.inference_state.builtins_module.py__getattribute__('tuple')
|
||||
return tuple_
|
||||
|
||||
def py__iter__(self, valueualized_node=None):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
var_args = self.var_args
|
||||
try:
|
||||
_, lazy_value = next(var_args.unpack())
|
||||
@@ -778,8 +778,8 @@ class _ArrayInstance(HelperValueMixin):
|
||||
for addition in additions:
|
||||
yield addition
|
||||
|
||||
def iterate(self, valueualized_node=None, is_async=False):
|
||||
return self.py__iter__(valueualized_node)
|
||||
def iterate(self, contextualized_node=None, is_async=False):
|
||||
return self.py__iter__(contextualized_node)
|
||||
|
||||
|
||||
class Slice(object):
|
||||
|
||||
@@ -272,7 +272,7 @@ class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase
|
||||
self.inference_state.builtins_module.py__getattribute__('object')
|
||||
)]
|
||||
|
||||
def py__getitem__(self, index_value_set, valueualized_node):
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
from jedi.inference.gradual.typing import LazyGenericClass
|
||||
if not index_value_set:
|
||||
return ValueSet([self])
|
||||
@@ -280,7 +280,7 @@ class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase
|
||||
LazyGenericClass(
|
||||
self,
|
||||
index_value,
|
||||
value_of_index=valueualized_node.context,
|
||||
value_of_index=contextualized_node.context,
|
||||
)
|
||||
for index_value in index_value_set
|
||||
)
|
||||
|
||||
@@ -285,7 +285,7 @@ class ReversedObject(AttributeOverwrite):
|
||||
self._iter_list = iter_list
|
||||
|
||||
@publish_method('__iter__')
|
||||
def py__iter__(self, valueualized_node=None):
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
return self._iter_list
|
||||
|
||||
@publish_method('next', python_version_match=2)
|
||||
@@ -640,7 +640,7 @@ class ItemGetterCallable(ValueWrapper):
|
||||
for args_value in self._args_value_set:
|
||||
lazy_values = list(args_value.py__iter__())
|
||||
if len(lazy_values) == 1:
|
||||
# TODO we need to add the valueualized value.
|
||||
# TODO we need to add the contextualized value.
|
||||
value_set |= item_value_set.get_item(lazy_values[0].infer(), None)
|
||||
else:
|
||||
value_set |= ValueSet([iterable.FakeSequence(
|
||||
|
||||
Reference in New Issue
Block a user