forked from VimPlug/jedi
Try to use yield from instead of yield, if possible
This commit is contained in:
@@ -198,8 +198,7 @@ def filter_follow_imports(names, follow_builtin_imports=False):
|
|||||||
if found_builtin:
|
if found_builtin:
|
||||||
yield name
|
yield name
|
||||||
else:
|
else:
|
||||||
for new_name in new_names:
|
yield from new_names
|
||||||
yield new_name
|
|
||||||
else:
|
else:
|
||||||
yield name
|
yield name
|
||||||
|
|
||||||
|
|||||||
@@ -71,5 +71,4 @@ class MixedModuleContext(ModuleContext):
|
|||||||
)
|
)
|
||||||
|
|
||||||
for mixed_object in self.mixed_values:
|
for mixed_object in self.mixed_values:
|
||||||
for filter in mixed_object.get_filters(until_position, origin_scope):
|
yield from mixed_object.get_filters(until_position, origin_scope)
|
||||||
yield filter
|
|
||||||
|
|||||||
@@ -192,8 +192,7 @@ class TreeArguments(AbstractArguments):
|
|||||||
elif star_count == 2:
|
elif star_count == 2:
|
||||||
arrays = self.context.infer_node(el)
|
arrays = self.context.infer_node(el)
|
||||||
for dct in arrays:
|
for dct in arrays:
|
||||||
for key, values in _star_star_dict(self.context, dct, el, funcdef):
|
yield from _star_star_dict(self.context, dct, el, funcdef)
|
||||||
yield key, values
|
|
||||||
else:
|
else:
|
||||||
if el.type == 'argument':
|
if el.type == 'argument':
|
||||||
c = el.children
|
c = el.children
|
||||||
@@ -216,8 +215,7 @@ class TreeArguments(AbstractArguments):
|
|||||||
|
|
||||||
# Reordering arguments is necessary, because star args sometimes appear
|
# Reordering arguments is necessary, because star args sometimes appear
|
||||||
# after named argument, but in the actual order it's prepended.
|
# after named argument, but in the actual order it's prepended.
|
||||||
for named_arg in named_args:
|
yield from named_args
|
||||||
yield named_arg
|
|
||||||
|
|
||||||
def _as_tree_tuple_objects(self):
|
def _as_tree_tuple_objects(self):
|
||||||
for star_count, argument in unpack_arglist(self.argument_node):
|
for star_count, argument in unpack_arglist(self.argument_node):
|
||||||
@@ -318,8 +316,7 @@ def _iterate_star_args(context, array, input_node, funcdef=None):
|
|||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
for lazy_value in iter_():
|
yield from iter_()
|
||||||
yield lazy_value
|
|
||||||
|
|
||||||
|
|
||||||
def _star_star_dict(context, array, input_node, funcdef):
|
def _star_star_dict(context, array, input_node, funcdef):
|
||||||
|
|||||||
@@ -55,14 +55,12 @@ class HelperValueMixin(object):
|
|||||||
|
|
||||||
def _get_value_filters(self, name_or_str):
|
def _get_value_filters(self, name_or_str):
|
||||||
origin_scope = name_or_str if isinstance(name_or_str, Name) else None
|
origin_scope = name_or_str if isinstance(name_or_str, Name) else None
|
||||||
for f in self.get_filters(origin_scope=origin_scope):
|
yield from self.get_filters(origin_scope=origin_scope)
|
||||||
yield f
|
|
||||||
# This covers the case where a stub files are incomplete.
|
# This covers the case where a stub files are incomplete.
|
||||||
if self.is_stub():
|
if self.is_stub():
|
||||||
from jedi.inference.gradual.conversion import convert_values
|
from jedi.inference.gradual.conversion import convert_values
|
||||||
for c in convert_values(ValueSet({self})):
|
for c in convert_values(ValueSet({self})):
|
||||||
for f in c.get_filters():
|
yield from c.get_filters()
|
||||||
yield f
|
|
||||||
|
|
||||||
def goto(self, name_or_str, name_context=None, analysis_errors=True):
|
def goto(self, name_or_str, name_context=None, analysis_errors=True):
|
||||||
from jedi.inference import finder
|
from jedi.inference import finder
|
||||||
@@ -439,8 +437,7 @@ class ValueSet(object):
|
|||||||
return self._from_frozen_set(self._set & other._set)
|
return self._from_frozen_set(self._set & other._set)
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
for element in self._set:
|
return iter(self._set)
|
||||||
yield element
|
|
||||||
|
|
||||||
def __bool__(self):
|
def __bool__(self):
|
||||||
return bool(self._set)
|
return bool(self._set)
|
||||||
|
|||||||
@@ -184,8 +184,7 @@ class CompiledValue(Value):
|
|||||||
|
|
||||||
def py__iter__(self, contextualized_node=None):
|
def py__iter__(self, contextualized_node=None):
|
||||||
if not self.access_handle.has_iter():
|
if not self.access_handle.has_iter():
|
||||||
for x in super().py__iter__(contextualized_node):
|
yield from super().py__iter__(contextualized_node)
|
||||||
yield x
|
|
||||||
|
|
||||||
access_path_list = self.access_handle.py__iter__list()
|
access_path_list = self.access_handle.py__iter__list()
|
||||||
if access_path_list is None:
|
if access_path_list is None:
|
||||||
@@ -220,10 +219,8 @@ class CompiledValue(Value):
|
|||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
bltn_obj = builtin_from_name(self.inference_state, name)
|
bltn_obj = builtin_from_name(self.inference_state, name)
|
||||||
for result in self.inference_state.execute(bltn_obj, params):
|
yield from self.inference_state.execute(bltn_obj, params)
|
||||||
yield result
|
yield from docstrings.infer_return_types(self)
|
||||||
for type_ in docstrings.infer_return_types(self):
|
|
||||||
yield type_
|
|
||||||
|
|
||||||
def get_safe_value(self, default=_sentinel):
|
def get_safe_value(self, default=_sentinel):
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -482,10 +482,10 @@ def get_global_filters(context, until_position, origin_scope):
|
|||||||
from jedi.inference.value.function import BaseFunctionExecutionContext
|
from jedi.inference.value.function import BaseFunctionExecutionContext
|
||||||
while context is not None:
|
while context is not None:
|
||||||
# Names in methods cannot be resolved within the class.
|
# Names in methods cannot be resolved within the class.
|
||||||
for filter in context.get_filters(
|
yield from context.get_filters(
|
||||||
until_position=until_position,
|
until_position=until_position,
|
||||||
origin_scope=origin_scope):
|
origin_scope=origin_scope
|
||||||
yield filter
|
)
|
||||||
if isinstance(context, (BaseFunctionExecutionContext, ModuleContext)):
|
if isinstance(context, (BaseFunctionExecutionContext, ModuleContext)):
|
||||||
# The position should be reset if the current scope is a function.
|
# The position should be reset if the current scope is a function.
|
||||||
until_position = None
|
until_position = None
|
||||||
|
|||||||
@@ -95,8 +95,7 @@ def _search_return_in_numpydocstr(docstr):
|
|||||||
# Return names are optional and if so the type is in the name
|
# Return names are optional and if so the type is in the name
|
||||||
if not r_type:
|
if not r_type:
|
||||||
r_type = r_name
|
r_type = r_name
|
||||||
for type_ in _expand_typestr(r_type):
|
yield from _expand_typestr(r_type)
|
||||||
yield type_
|
|
||||||
|
|
||||||
|
|
||||||
def _expand_typestr(type_str):
|
def _expand_typestr(type_str):
|
||||||
@@ -295,9 +294,7 @@ def infer_return_types(function_value):
|
|||||||
if match:
|
if match:
|
||||||
yield _strip_rst_role(match.group(1))
|
yield _strip_rst_role(match.group(1))
|
||||||
# Check for numpy style return hint
|
# Check for numpy style return hint
|
||||||
for type_ in _search_return_in_numpydocstr(code):
|
yield from _search_return_in_numpydocstr(code)
|
||||||
yield type_
|
|
||||||
|
|
||||||
for type_str in search_return_in_docstr(function_value.py__doc__()):
|
for type_str in search_return_in_docstr(function_value.py__doc__()):
|
||||||
for value in _infer_for_statement_string(function_value.get_root_context(), type_str):
|
yield from _infer_for_statement_string(function_value.get_root_context(), type_str)
|
||||||
yield value
|
|
||||||
|
|||||||
@@ -215,12 +215,10 @@ def _check_name_for_execution(inference_state, context, compare_node, name, trai
|
|||||||
for name, trailer in potential_nodes:
|
for name, trailer in potential_nodes:
|
||||||
if value_node.start_pos < name.start_pos < value_node.end_pos:
|
if value_node.start_pos < name.start_pos < value_node.end_pos:
|
||||||
random_context = execution_context.create_context(name)
|
random_context = execution_context.create_context(name)
|
||||||
iterator = _check_name_for_execution(
|
yield from _check_name_for_execution(
|
||||||
inference_state,
|
inference_state,
|
||||||
random_context,
|
random_context,
|
||||||
compare_node,
|
compare_node,
|
||||||
name,
|
name,
|
||||||
trailer
|
trailer
|
||||||
)
|
)
|
||||||
for arguments in iterator:
|
|
||||||
yield arguments
|
|
||||||
|
|||||||
@@ -323,9 +323,7 @@ class _OverwriteMeta(type):
|
|||||||
class _AttributeOverwriteMixin(object):
|
class _AttributeOverwriteMixin(object):
|
||||||
def get_filters(self, *args, **kwargs):
|
def get_filters(self, *args, **kwargs):
|
||||||
yield SpecialMethodFilter(self, self.overwritten_methods, self._wrapped_value)
|
yield SpecialMethodFilter(self, self.overwritten_methods, self._wrapped_value)
|
||||||
|
yield from self._wrapped_value.get_filters(*args, **kwargs)
|
||||||
for filter in self._wrapped_value.get_filters(*args, **kwargs):
|
|
||||||
yield filter
|
|
||||||
|
|
||||||
|
|
||||||
class LazyAttributeOverwrite(_AttributeOverwriteMixin, LazyValueWrapper,
|
class LazyAttributeOverwrite(_AttributeOverwriteMixin, LazyValueWrapper,
|
||||||
|
|||||||
@@ -25,8 +25,7 @@ class _BoundTypeVarName(AbstractNameDefinition):
|
|||||||
# Replace any with the constraints if they are there.
|
# Replace any with the constraints if they are there.
|
||||||
from jedi.inference.gradual.typing import AnyClass
|
from jedi.inference.gradual.typing import AnyClass
|
||||||
if isinstance(value, AnyClass):
|
if isinstance(value, AnyClass):
|
||||||
for constraint in self._type_var.constraints:
|
yield from self._type_var.constraints
|
||||||
yield constraint
|
|
||||||
else:
|
else:
|
||||||
yield value
|
yield value
|
||||||
return ValueSet(iter_())
|
return ValueSet(iter_())
|
||||||
@@ -73,8 +72,7 @@ class _AnnotatedClassContext(ClassContext):
|
|||||||
filters = super().get_filters(
|
filters = super().get_filters(
|
||||||
*args, **kwargs
|
*args, **kwargs
|
||||||
)
|
)
|
||||||
for f in filters:
|
yield from filters
|
||||||
yield f
|
|
||||||
|
|
||||||
# The type vars can only be looked up if it's a global search and
|
# The type vars can only be looked up if it's a global search and
|
||||||
# not a direct lookup on the class.
|
# not a direct lookup on the class.
|
||||||
|
|||||||
@@ -135,8 +135,7 @@ def _python_to_stub_names(names, fallback_to_python=False):
|
|||||||
if converted:
|
if converted:
|
||||||
converted_names = converted.goto(name.get_public_name())
|
converted_names = converted.goto(name.get_public_name())
|
||||||
if converted_names:
|
if converted_names:
|
||||||
for n in converted_names:
|
yield from converted_names
|
||||||
yield n
|
|
||||||
continue
|
continue
|
||||||
if fallback_to_python:
|
if fallback_to_python:
|
||||||
# This is the part where if we haven't found anything, just return
|
# This is the part where if we haven't found anything, just return
|
||||||
|
|||||||
@@ -43,11 +43,8 @@ class StubModuleValue(ModuleValue):
|
|||||||
filters = super().get_filters(origin_scope)
|
filters = super().get_filters(origin_scope)
|
||||||
next(filters, None) # Ignore the first filter and replace it with our own
|
next(filters, None) # Ignore the first filter and replace it with our own
|
||||||
stub_filters = self._get_stub_filters(origin_scope=origin_scope)
|
stub_filters = self._get_stub_filters(origin_scope=origin_scope)
|
||||||
for f in stub_filters:
|
yield from stub_filters
|
||||||
yield f
|
yield from filters
|
||||||
|
|
||||||
for f in filters:
|
|
||||||
yield f
|
|
||||||
|
|
||||||
def _as_context(self):
|
def _as_context(self):
|
||||||
return StubModuleContext(self)
|
return StubModuleContext(self)
|
||||||
@@ -66,8 +63,7 @@ class TypingModuleWrapper(StubModuleValue):
|
|||||||
f = next(filters, None)
|
f = next(filters, None)
|
||||||
assert f is not None
|
assert f is not None
|
||||||
yield TypingModuleFilterWrapper(f)
|
yield TypingModuleFilterWrapper(f)
|
||||||
for f in filters:
|
yield from filters
|
||||||
yield f
|
|
||||||
|
|
||||||
def _as_context(self):
|
def _as_context(self):
|
||||||
return TypingModuleContext(self)
|
return TypingModuleContext(self)
|
||||||
@@ -77,8 +73,7 @@ class TypingModuleContext(ModuleContext):
|
|||||||
def get_filters(self, *args, **kwargs):
|
def get_filters(self, *args, **kwargs):
|
||||||
filters = super().get_filters(*args, **kwargs)
|
filters = super().get_filters(*args, **kwargs)
|
||||||
yield TypingModuleFilterWrapper(next(filters, None))
|
yield TypingModuleFilterWrapper(next(filters, None))
|
||||||
for f in filters:
|
yield from filters
|
||||||
yield f
|
|
||||||
|
|
||||||
|
|
||||||
class StubFilter(ParserTreeFilter):
|
class StubFilter(ParserTreeFilter):
|
||||||
|
|||||||
@@ -38,8 +38,7 @@ def _resolve_names(definition_names, avoid_names=()):
|
|||||||
yield name
|
yield name
|
||||||
|
|
||||||
if name.api_type == 'module':
|
if name.api_type == 'module':
|
||||||
for n in _resolve_names(name.goto(), definition_names):
|
yield from _resolve_names(name.goto(), definition_names)
|
||||||
yield n
|
|
||||||
|
|
||||||
|
|
||||||
def _dictionarize(names):
|
def _dictionarize(names):
|
||||||
@@ -90,8 +89,7 @@ def _add_names_in_same_context(context, string_name):
|
|||||||
names = set(filter_.get(string_name))
|
names = set(filter_.get(string_name))
|
||||||
if not names:
|
if not names:
|
||||||
break
|
break
|
||||||
for name in names:
|
yield from names
|
||||||
yield name
|
|
||||||
ordered = sorted(names, key=lambda x: x.start_pos)
|
ordered = sorted(names, key=lambda x: x.start_pos)
|
||||||
until_position = ordered[0].start_pos
|
until_position = ordered[0].start_pos
|
||||||
|
|
||||||
@@ -109,8 +107,7 @@ def _find_global_variables(names, search_name):
|
|||||||
for global_name in method().get(search_name):
|
for global_name in method().get(search_name):
|
||||||
yield global_name
|
yield global_name
|
||||||
c = module_context.create_context(global_name.tree_name)
|
c = module_context.create_context(global_name.tree_name)
|
||||||
for n in _add_names_in_same_context(c, global_name.string_name):
|
yield from _add_names_in_same_context(c, global_name.string_name)
|
||||||
yield n
|
|
||||||
|
|
||||||
|
|
||||||
def find_references(module_context, tree_name, only_in_module=False):
|
def find_references(module_context, tree_name, only_in_module=False):
|
||||||
|
|||||||
@@ -96,8 +96,7 @@ def process_params(param_names, star_count=3): # default means both * and **
|
|||||||
if is_big_annoying_library(param_names[0].parent_context):
|
if is_big_annoying_library(param_names[0].parent_context):
|
||||||
# At first this feature can look innocent, but it does a lot of
|
# At first this feature can look innocent, but it does a lot of
|
||||||
# type inference in some cases, so we just ditch it.
|
# type inference in some cases, so we just ditch it.
|
||||||
for p in param_names:
|
yield from param_names
|
||||||
yield p
|
|
||||||
return
|
return
|
||||||
|
|
||||||
used_names = set()
|
used_names = set()
|
||||||
|
|||||||
@@ -166,8 +166,7 @@ def _get_paths_from_buildout_script(inference_state, buildout_script_path):
|
|||||||
string_names=None,
|
string_names=None,
|
||||||
code_lines=get_cached_code_lines(inference_state.grammar, str(buildout_script_path)),
|
code_lines=get_cached_code_lines(inference_state.grammar, str(buildout_script_path)),
|
||||||
).as_context()
|
).as_context()
|
||||||
for path in check_sys_path_modifications(module_context):
|
yield from check_sys_path_modifications(module_context)
|
||||||
yield path
|
|
||||||
|
|
||||||
|
|
||||||
def _get_parent_dir_with_file(path: Path, filename):
|
def _get_parent_dir_with_file(path: Path, filename):
|
||||||
|
|||||||
@@ -155,14 +155,12 @@ class _DynamicArrayAdditions(HelperValueMixin):
|
|||||||
except StopIteration:
|
except StopIteration:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
for lazy in lazy_value.infer().iterate():
|
yield from lazy_value.infer().iterate()
|
||||||
yield lazy
|
|
||||||
|
|
||||||
from jedi.inference.arguments import TreeArguments
|
from jedi.inference.arguments import TreeArguments
|
||||||
if isinstance(arguments, TreeArguments):
|
if isinstance(arguments, TreeArguments):
|
||||||
additions = _internal_check_array_additions(arguments.context, self._instance)
|
additions = _internal_check_array_additions(arguments.context, self._instance)
|
||||||
for addition in additions:
|
yield from additions
|
||||||
yield addition
|
|
||||||
|
|
||||||
def iterate(self, contextualized_node=None, is_async=False):
|
def iterate(self, contextualized_node=None, is_async=False):
|
||||||
return self.py__iter__(contextualized_node)
|
return self.py__iter__(contextualized_node)
|
||||||
@@ -189,8 +187,7 @@ class _Modification(ValueWrapper):
|
|||||||
|
|
||||||
class DictModification(_Modification):
|
class DictModification(_Modification):
|
||||||
def py__iter__(self, contextualized_node=None):
|
def py__iter__(self, contextualized_node=None):
|
||||||
for lazy_context in self._wrapped_value.py__iter__(contextualized_node):
|
yield from self._wrapped_value.py__iter__(contextualized_node)
|
||||||
yield lazy_context
|
|
||||||
yield self._contextualized_key
|
yield self._contextualized_key
|
||||||
|
|
||||||
def get_key_values(self):
|
def get_key_values(self):
|
||||||
@@ -199,6 +196,5 @@ class DictModification(_Modification):
|
|||||||
|
|
||||||
class ListModification(_Modification):
|
class ListModification(_Modification):
|
||||||
def py__iter__(self, contextualized_node=None):
|
def py__iter__(self, contextualized_node=None):
|
||||||
for lazy_context in self._wrapped_value.py__iter__(contextualized_node):
|
yield from self._wrapped_value.py__iter__(contextualized_node)
|
||||||
yield lazy_context
|
|
||||||
yield LazyKnownValues(self._assigned_values)
|
yield LazyKnownValues(self._assigned_values)
|
||||||
|
|||||||
@@ -59,8 +59,7 @@ class FunctionMixin(object):
|
|||||||
def get_filters(self, origin_scope=None):
|
def get_filters(self, origin_scope=None):
|
||||||
cls = self.py__class__()
|
cls = self.py__class__()
|
||||||
for instance in cls.execute_with_values():
|
for instance in cls.execute_with_values():
|
||||||
for filter in instance.get_filters(origin_scope=origin_scope):
|
yield from instance.get_filters(origin_scope=origin_scope)
|
||||||
yield filter
|
|
||||||
|
|
||||||
def py__get__(self, instance, class_value):
|
def py__get__(self, instance, class_value):
|
||||||
from jedi.inference.value.instance import BoundMethod
|
from jedi.inference.value.instance import BoundMethod
|
||||||
@@ -256,8 +255,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
|
|||||||
node = yield_expr.children[1]
|
node = yield_expr.children[1]
|
||||||
if node.type == 'yield_arg': # It must be a yield from.
|
if node.type == 'yield_arg': # It must be a yield from.
|
||||||
cn = ContextualizedNode(self, node.children[1])
|
cn = ContextualizedNode(self, node.children[1])
|
||||||
for lazy_value in cn.infer().iterate(cn):
|
yield from cn.infer().iterate(cn)
|
||||||
yield lazy_value
|
|
||||||
else:
|
else:
|
||||||
yield LazyTreeValue(self, node)
|
yield LazyTreeValue(self, node)
|
||||||
|
|
||||||
@@ -296,8 +294,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
|
|||||||
if for_stmt is None:
|
if for_stmt is None:
|
||||||
# No for_stmt, just normal yields.
|
# No for_stmt, just normal yields.
|
||||||
for yield_ in yields:
|
for yield_ in yields:
|
||||||
for result in self._get_yield_lazy_value(yield_):
|
yield from self._get_yield_lazy_value(yield_)
|
||||||
yield result
|
|
||||||
else:
|
else:
|
||||||
input_node = for_stmt.get_testlist()
|
input_node = for_stmt.get_testlist()
|
||||||
cn = ContextualizedNode(self, input_node)
|
cn = ContextualizedNode(self, input_node)
|
||||||
@@ -307,8 +304,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
|
|||||||
dct = {str(for_stmt.children[1].value): lazy_value.infer()}
|
dct = {str(for_stmt.children[1].value): lazy_value.infer()}
|
||||||
with self.predefine_names(for_stmt, dct):
|
with self.predefine_names(for_stmt, dct):
|
||||||
for yield_in_same_for_stmt in yields:
|
for yield_in_same_for_stmt in yields:
|
||||||
for result in self._get_yield_lazy_value(yield_in_same_for_stmt):
|
yield from self._get_yield_lazy_value(yield_in_same_for_stmt)
|
||||||
yield result
|
|
||||||
|
|
||||||
def merge_yield_values(self, is_async=False):
|
def merge_yield_values(self, is_async=False):
|
||||||
return ValueSet.from_sets(
|
return ValueSet.from_sets(
|
||||||
|
|||||||
@@ -255,8 +255,7 @@ class _BaseTreeInstance(AbstractInstanceValue):
|
|||||||
|
|
||||||
def iterate():
|
def iterate():
|
||||||
for generator in self.execute_function_slots(iter_slot_names):
|
for generator in self.execute_function_slots(iter_slot_names):
|
||||||
for lazy_value in generator.py__next__(contextualized_node):
|
yield from generator.py__next__(contextualized_node)
|
||||||
yield lazy_value
|
|
||||||
return iterate()
|
return iterate()
|
||||||
|
|
||||||
def py__next__(self, contextualized_node=None):
|
def py__next__(self, contextualized_node=None):
|
||||||
@@ -526,8 +525,7 @@ class LazyInstanceClassName(NameWrapper):
|
|||||||
@iterator_to_value_set
|
@iterator_to_value_set
|
||||||
def infer(self):
|
def infer(self):
|
||||||
for result_value in self._wrapped_name.infer():
|
for result_value in self._wrapped_name.infer():
|
||||||
for c in result_value.py__get__(self._instance, self._instance.py__class__()):
|
yield from result_value.py__get__(self._instance, self._instance.py__class__())
|
||||||
yield c
|
|
||||||
|
|
||||||
def get_signatures(self):
|
def get_signatures(self):
|
||||||
return self.infer().get_signatures()
|
return self.infer().get_signatures()
|
||||||
@@ -616,5 +614,4 @@ class InstanceArguments(TreeArgumentsWrapper):
|
|||||||
|
|
||||||
def unpack(self, func=None):
|
def unpack(self, func=None):
|
||||||
yield None, LazyKnownValue(self.instance)
|
yield None, LazyKnownValue(self.instance)
|
||||||
for values in self._wrapped_arguments.unpack(func):
|
yield from self._wrapped_arguments.unpack(func)
|
||||||
yield values
|
|
||||||
|
|||||||
@@ -153,8 +153,7 @@ class ComprehensionMixin(object):
|
|||||||
)
|
)
|
||||||
with context.predefine_names(comp_for, dct):
|
with context.predefine_names(comp_for, dct):
|
||||||
try:
|
try:
|
||||||
for result in self._nested(comp_fors[1:], context):
|
yield from self._nested(comp_fors[1:], context)
|
||||||
yield result
|
|
||||||
except IndexError:
|
except IndexError:
|
||||||
iterated = context.infer_node(self._entry_node)
|
iterated = context.infer_node(self._entry_node)
|
||||||
if self.array_type == 'dict':
|
if self.array_type == 'dict':
|
||||||
@@ -166,8 +165,7 @@ class ComprehensionMixin(object):
|
|||||||
@to_list
|
@to_list
|
||||||
def _iterate(self):
|
def _iterate(self):
|
||||||
comp_fors = tuple(get_sync_comp_fors(self._sync_comp_for_node))
|
comp_fors = tuple(get_sync_comp_fors(self._sync_comp_for_node))
|
||||||
for result in self._nested(comp_fors):
|
yield from self._nested(comp_fors)
|
||||||
yield result
|
|
||||||
|
|
||||||
def py__iter__(self, contextualized_node=None):
|
def py__iter__(self, contextualized_node=None):
|
||||||
for set_ in self._iterate():
|
for set_ in self._iterate():
|
||||||
@@ -358,8 +356,7 @@ class SequenceLiteralValue(Sequence):
|
|||||||
yield LazyKnownValue(Slice(self._defining_context, None, None, None))
|
yield LazyKnownValue(Slice(self._defining_context, None, None, None))
|
||||||
else:
|
else:
|
||||||
yield LazyTreeValue(self._defining_context, node)
|
yield LazyTreeValue(self._defining_context, node)
|
||||||
for addition in check_array_additions(self._defining_context, self):
|
yield from check_array_additions(self._defining_context, self)
|
||||||
yield addition
|
|
||||||
|
|
||||||
def py__len__(self):
|
def py__len__(self):
|
||||||
# This function is not really used often. It's more of a try.
|
# This function is not really used often. It's more of a try.
|
||||||
@@ -566,8 +563,7 @@ class MergedArray(Sequence):
|
|||||||
|
|
||||||
def py__iter__(self, contextualized_node=None):
|
def py__iter__(self, contextualized_node=None):
|
||||||
for array in self._arrays:
|
for array in self._arrays:
|
||||||
for lazy_value in array.py__iter__():
|
yield from array.py__iter__()
|
||||||
yield lazy_value
|
|
||||||
|
|
||||||
def py__simple_getitem__(self, index):
|
def py__simple_getitem__(self, index):
|
||||||
return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
|
return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
|
||||||
|
|||||||
@@ -68,8 +68,7 @@ class ClassName(TreeNameDefinition):
|
|||||||
|
|
||||||
for result_value in inferred:
|
for result_value in inferred:
|
||||||
if self._apply_decorators:
|
if self._apply_decorators:
|
||||||
for c in result_value.py__get__(instance=None, class_value=self._class_value):
|
yield from result_value.py__get__(instance=None, class_value=self._class_value)
|
||||||
yield c
|
|
||||||
else:
|
else:
|
||||||
yield result_value
|
yield result_value
|
||||||
|
|
||||||
|
|||||||
@@ -67,8 +67,7 @@ class ModuleMixin(SubModuleDictMixin):
|
|||||||
)
|
)
|
||||||
yield DictFilter(self.sub_modules_dict())
|
yield DictFilter(self.sub_modules_dict())
|
||||||
yield DictFilter(self._module_attributes_dict())
|
yield DictFilter(self._module_attributes_dict())
|
||||||
for star_filter in self.iter_star_filters():
|
yield from self.iter_star_filters()
|
||||||
yield star_filter
|
|
||||||
|
|
||||||
def py__class__(self):
|
def py__class__(self):
|
||||||
c, = values_from_qualified_names(self.inference_state, 'types', 'ModuleType')
|
c, = values_from_qualified_names(self.inference_state, 'types', 'ModuleType')
|
||||||
|
|||||||
Reference in New Issue
Block a user