diff --git a/parso/python/errors.py b/parso/python/errors.py index 54a6641..19a3329 100644 --- a/parso/python/errors.py +++ b/parso/python/errors.py @@ -173,13 +173,11 @@ def _iter_definition_exprs_from_lists(exprlist): if child.children[0] == '(': testlist_comp = child.children[1] if testlist_comp.type == 'testlist_comp': - for expr in _iter_definition_exprs_from_lists(testlist_comp): - yield expr + yield from _iter_definition_exprs_from_lists(testlist_comp) return else: # It's a paren that doesn't do anything, like 1 + (1) - for c in check_expr(testlist_comp): - yield c + yield from check_expr(testlist_comp) return elif child.children[0] == '[': yield testlist_comp @@ -188,11 +186,9 @@ def _iter_definition_exprs_from_lists(exprlist): if exprlist.type in _STAR_EXPR_PARENTS: for child in exprlist.children[::2]: - for c in check_expr(child): # Python 2 sucks - yield c + yield from check_expr(child) else: - for c in check_expr(exprlist): # Python 2 sucks - yield c + yield from check_expr(exprlist) def _get_expr_stmt_definition_exprs(expr_stmt): diff --git a/parso/python/tokenize.py b/parso/python/tokenize.py index 15b8e9e..39b645d 100644 --- a/parso/python/tokenize.py +++ b/parso/python/tokenize.py @@ -528,14 +528,12 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0), indents=None, is_first if indent_start > indents[-1]: yield PythonToken(INDENT, '', spos, '') indents.append(indent_start) - for t in dedent_if_necessary(indent_start): - yield t + yield from dedent_if_necessary(indent_start) if not pseudomatch: # scan for tokens match = whitespace.match(line, pos) if new_line and paren_level == 0 and not fstring_stack: - for t in dedent_if_necessary(match.end()): - yield t + yield from dedent_if_necessary(match.end()) pos = match.end() new_line = False yield PythonToken( @@ -556,13 +554,11 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0), indents=None, is_first # We only want to dedent if the token is on a new line. m = re.match(r'[ \f\t]*$', line[:start]) if m is not None: - for t in dedent_if_necessary(m.end()): - yield t + yield from dedent_if_necessary(m.end()) if is_identifier(token): yield PythonToken(NAME, token, spos, prefix) else: - for t in _split_illegal_unicode_name(token, spos, prefix): - yield t # yield from Python 2 + yield from _split_illegal_unicode_name(token, spos, prefix) elif initial in '\r\n': if any(not f.allow_multiline() for f in fstring_stack): # Would use fstring_stack.clear, but that's not available diff --git a/parso/python/tree.py b/parso/python/tree.py index 06d4c96..d9d836b 100644 --- a/parso/python/tree.py +++ b/parso/python/tree.py @@ -363,8 +363,7 @@ class Scope(PythonBaseNode, DocstringMixin): if element.type in names: yield element if element.type in _FUNC_CONTAINERS: - for e in scan(element.children): - yield e + yield from scan(element.children) return scan(self.children) @@ -615,8 +614,7 @@ class Function(ClassOrFunc): else: yield element else: - for result in scan(nested_children): - yield result + yield from scan(nested_children) return scan(self.children) @@ -630,8 +628,7 @@ class Function(ClassOrFunc): or element.type == 'keyword' and element.value == 'return': yield element if element.type in _RETURN_STMT_CONTAINERS: - for e in scan(element.children): - yield e + yield from scan(element.children) return scan(self.children) @@ -645,8 +642,7 @@ class Function(ClassOrFunc): or element.type == 'keyword' and element.value == 'raise': yield element if element.type in _RETURN_STMT_CONTAINERS: - for e in scan(element.children): - yield e + yield from scan(element.children) return scan(self.children) @@ -1098,8 +1094,7 @@ class ExprStmt(PythonBaseNode, DocstringMixin): first = first.children[2] yield first - for operator in self.children[3::2]: - yield operator + yield from self.children[3::2] class Param(PythonBaseNode):