mirror of
https://github.com/davidhalter/parso.git
synced 2025-12-07 21:34:32 +08:00
Compare commits
25 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5edab0407a | ||
|
|
c4f297a57a | ||
|
|
5bba083af8 | ||
|
|
2799a7a3c2 | ||
|
|
fac5c089ff | ||
|
|
e5d6663721 | ||
|
|
e5731d3932 | ||
|
|
93206f6eba | ||
|
|
cbe0b91d6a | ||
|
|
771fe6bf33 | ||
|
|
1139e53429 | ||
|
|
0e20c33c21 | ||
|
|
14c88c1f4b | ||
|
|
257ac768fb | ||
|
|
79aeb2a801 | ||
|
|
ef90bba3b3 | ||
|
|
a9d0cc1179 | ||
|
|
f45ffa1948 | ||
|
|
b287476366 | ||
|
|
d39aadc4cc | ||
|
|
b08b61b578 | ||
|
|
034a9e8944 | ||
|
|
634df56d90 | ||
|
|
52cfa5a8ac | ||
|
|
606c528803 |
@@ -6,6 +6,7 @@ David Halter (@davidhalter) <davidhalter88@gmail.com>
|
||||
Code Contributors
|
||||
=================
|
||||
Alisdair Robertson (@robodair)
|
||||
Bryan Forbes (@bryanforbes) <bryan@reigndropsfall.net>
|
||||
|
||||
|
||||
Code Contributors (to Jedi and therefore possibly to this library)
|
||||
|
||||
@@ -6,6 +6,16 @@ Changelog
|
||||
Unreleased
|
||||
++++++++++
|
||||
|
||||
0.8.2 (2021-03-30)
|
||||
++++++++++++++++++
|
||||
|
||||
- Various small bugfixes
|
||||
|
||||
0.8.1 (2020-12-10)
|
||||
++++++++++++++++++
|
||||
|
||||
- Various small bugfixes
|
||||
|
||||
0.8.0 (2020-08-05)
|
||||
++++++++++++++++++
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ from parso.grammar import Grammar, load_grammar
|
||||
from parso.utils import split_lines, python_bytes_to_unicode
|
||||
|
||||
|
||||
__version__ = '0.8.0'
|
||||
__version__ = '0.8.2'
|
||||
|
||||
|
||||
def parse(code=None, **kwargs):
|
||||
|
||||
@@ -187,7 +187,7 @@ def try_to_save_module(hashed_grammar, file_io, module, lines, pickling=True, ca
|
||||
# file system. It's still in RAM in that case. However we should
|
||||
# still warn the user that this is happening.
|
||||
warnings.warn(
|
||||
'Tried to save a file to %s, but got permission denied.',
|
||||
'Tried to save a file to %s, but got permission denied.' % path,
|
||||
Warning
|
||||
)
|
||||
else:
|
||||
|
||||
@@ -147,8 +147,20 @@ def _remove_parens(atom):
|
||||
return atom
|
||||
|
||||
|
||||
def _skip_parens_bottom_up(node):
|
||||
"""
|
||||
Returns an ancestor node of an expression, skipping all levels of parens
|
||||
bottom-up.
|
||||
"""
|
||||
while node.parent is not None:
|
||||
node = node.parent
|
||||
if node.type != 'atom' or node.children[0] != '(':
|
||||
return node
|
||||
return None
|
||||
|
||||
|
||||
def _iter_params(parent_node):
|
||||
return (n for n in parent_node.children if n.type == 'param')
|
||||
return (n for n in parent_node.children if n.type == 'param' or n.type == 'operator')
|
||||
|
||||
|
||||
def _is_future_import_first(import_from):
|
||||
@@ -229,6 +241,7 @@ class _Context:
|
||||
self.parent_context = parent_context
|
||||
self._used_name_dict = {}
|
||||
self._global_names = []
|
||||
self._local_params_names = []
|
||||
self._nonlocal_names = []
|
||||
self._nonlocal_names_in_subscopes = []
|
||||
self._add_syntax_error = add_syntax_error
|
||||
@@ -252,6 +265,10 @@ class _Context:
|
||||
self._global_names.append(name)
|
||||
elif parent_type == 'nonlocal_stmt':
|
||||
self._nonlocal_names.append(name)
|
||||
elif parent_type == 'funcdef':
|
||||
self._local_params_names.extend(
|
||||
[param.name.value for param in name.parent.get_params()]
|
||||
)
|
||||
else:
|
||||
self._used_name_dict.setdefault(name.value, []).append(name)
|
||||
|
||||
@@ -279,6 +296,8 @@ class _Context:
|
||||
nonlocals_not_handled = []
|
||||
for nonlocal_name in self._nonlocal_names_in_subscopes:
|
||||
search = nonlocal_name.value
|
||||
if search in self._local_params_names:
|
||||
continue
|
||||
if search in global_name_strs or self.parent_context is None:
|
||||
message = "no binding for nonlocal '%s' found" % nonlocal_name.value
|
||||
self._add_syntax_error(nonlocal_name, message)
|
||||
@@ -730,9 +749,34 @@ class _FutureImportRule(SyntaxRule):
|
||||
@ErrorFinder.register_rule(type='star_expr')
|
||||
class _StarExprRule(SyntaxRule):
|
||||
message_iterable_unpacking = "iterable unpacking cannot be used in comprehension"
|
||||
message_assignment = "can use starred expression only as assignment target"
|
||||
|
||||
def is_issue(self, node):
|
||||
def check_delete_starred(node):
|
||||
while node.parent is not None:
|
||||
node = node.parent
|
||||
if node.type == 'del_stmt':
|
||||
return True
|
||||
if node.type not in (*_STAR_EXPR_PARENTS, 'atom'):
|
||||
return False
|
||||
return False
|
||||
|
||||
if self._normalizer.version >= (3, 9):
|
||||
ancestor = node.parent
|
||||
else:
|
||||
ancestor = _skip_parens_bottom_up(node)
|
||||
# starred expression not in tuple/list/set
|
||||
if ancestor.type not in (*_STAR_EXPR_PARENTS, 'dictorsetmaker') \
|
||||
and not (ancestor.type == 'atom' and ancestor.children[0] != '('):
|
||||
self.add_issue(node, message="can't use starred expression here")
|
||||
return
|
||||
|
||||
if check_delete_starred(node):
|
||||
if self._normalizer.version >= (3, 9):
|
||||
self.add_issue(node, message="cannot delete starred")
|
||||
else:
|
||||
self.add_issue(node, message="can't use starred expression here")
|
||||
return
|
||||
|
||||
if node.parent.type == 'testlist_comp':
|
||||
# [*[] for a in [1]]
|
||||
if node.parent.children[1].type in _COMP_FOR_TYPES:
|
||||
@@ -742,39 +786,33 @@ class _StarExprRule(SyntaxRule):
|
||||
@ErrorFinder.register_rule(types=_STAR_EXPR_PARENTS)
|
||||
class _StarExprParentRule(SyntaxRule):
|
||||
def is_issue(self, node):
|
||||
if node.parent.type == 'del_stmt':
|
||||
if self._normalizer.version >= (3, 9):
|
||||
self.add_issue(node.parent, message="cannot delete starred")
|
||||
else:
|
||||
self.add_issue(node.parent, message="can't use starred expression here")
|
||||
else:
|
||||
def is_definition(node, ancestor):
|
||||
if ancestor is None:
|
||||
return False
|
||||
def is_definition(node, ancestor):
|
||||
if ancestor is None:
|
||||
return False
|
||||
|
||||
type_ = ancestor.type
|
||||
if type_ == 'trailer':
|
||||
return False
|
||||
type_ = ancestor.type
|
||||
if type_ == 'trailer':
|
||||
return False
|
||||
|
||||
if type_ == 'expr_stmt':
|
||||
return node.start_pos < ancestor.children[-1].start_pos
|
||||
if type_ == 'expr_stmt':
|
||||
return node.start_pos < ancestor.children[-1].start_pos
|
||||
|
||||
return is_definition(node, ancestor.parent)
|
||||
return is_definition(node, ancestor.parent)
|
||||
|
||||
if is_definition(node, node.parent):
|
||||
args = [c for c in node.children if c != ',']
|
||||
starred = [c for c in args if c.type == 'star_expr']
|
||||
if len(starred) > 1:
|
||||
if self._normalizer.version < (3, 9):
|
||||
message = "two starred expressions in assignment"
|
||||
else:
|
||||
message = "multiple starred expressions in assignment"
|
||||
self.add_issue(starred[1], message=message)
|
||||
elif starred:
|
||||
count = args.index(starred[0])
|
||||
if count >= 256:
|
||||
message = "too many expressions in star-unpacking assignment"
|
||||
self.add_issue(starred[0], message=message)
|
||||
if is_definition(node, node.parent):
|
||||
args = [c for c in node.children if c != ',']
|
||||
starred = [c for c in args if c.type == 'star_expr']
|
||||
if len(starred) > 1:
|
||||
if self._normalizer.version < (3, 9):
|
||||
message = "two starred expressions in assignment"
|
||||
else:
|
||||
message = "multiple starred expressions in assignment"
|
||||
self.add_issue(starred[1], message=message)
|
||||
elif starred:
|
||||
count = args.index(starred[0])
|
||||
if count >= 256:
|
||||
message = "too many expressions in star-unpacking assignment"
|
||||
self.add_issue(starred[0], message=message)
|
||||
|
||||
|
||||
@ErrorFinder.register_rule(type='annassign')
|
||||
@@ -911,17 +949,28 @@ class _ParameterRule(SyntaxRule):
|
||||
def is_issue(self, node):
|
||||
param_names = set()
|
||||
default_only = False
|
||||
star_seen = False
|
||||
for p in _iter_params(node):
|
||||
if p.type == 'operator':
|
||||
if p.value == '*':
|
||||
star_seen = True
|
||||
default_only = False
|
||||
continue
|
||||
|
||||
if p.name.value in param_names:
|
||||
message = "duplicate argument '%s' in function definition"
|
||||
self.add_issue(p.name, message=message % p.name.value)
|
||||
param_names.add(p.name.value)
|
||||
|
||||
if p.default is None and not p.star_count:
|
||||
if default_only:
|
||||
return True
|
||||
else:
|
||||
default_only = True
|
||||
if not star_seen:
|
||||
if p.default is None and not p.star_count:
|
||||
if default_only:
|
||||
return True
|
||||
elif p.star_count:
|
||||
star_seen = True
|
||||
default_only = False
|
||||
else:
|
||||
default_only = True
|
||||
|
||||
|
||||
@ErrorFinder.register_rule(type='try_stmt')
|
||||
@@ -957,7 +1006,11 @@ class _FStringRule(SyntaxRule):
|
||||
if '\\' in expr.get_code():
|
||||
self.add_issue(expr, message=self.message_expr)
|
||||
|
||||
conversion = fstring_expr.children[2]
|
||||
children_2 = fstring_expr.children[2]
|
||||
if children_2.type == 'operator' and children_2.value == '=':
|
||||
conversion = fstring_expr.children[3]
|
||||
else:
|
||||
conversion = children_2
|
||||
if conversion.type == 'fstring_conversion':
|
||||
name = conversion.children[1]
|
||||
if name.value not in ('s', 'r', 'a'):
|
||||
@@ -1075,8 +1128,15 @@ class _CheckAssignmentRule(SyntaxRule):
|
||||
error = "starred"
|
||||
else:
|
||||
self.add_issue(node, message="can't use starred expression here")
|
||||
elif not search_ancestor(node, *_STAR_EXPR_PARENTS) and not is_aug_assign:
|
||||
self.add_issue(node, message="starred assignment target must be in a list or tuple")
|
||||
else:
|
||||
if self._normalizer.version >= (3, 9):
|
||||
ancestor = node.parent
|
||||
else:
|
||||
ancestor = _skip_parens_bottom_up(node)
|
||||
if ancestor.type not in _STAR_EXPR_PARENTS and not is_aug_assign \
|
||||
and not (ancestor.type == 'atom' and ancestor.children[0] == '['):
|
||||
message = "starred assignment target must be in a list or tuple"
|
||||
self.add_issue(node, message=message)
|
||||
|
||||
self._check_assignment(node.children[1])
|
||||
|
||||
|
||||
@@ -124,14 +124,14 @@ atom: ('(' [yield_expr|testlist_comp] ')' |
|
||||
testlist_comp: (namedexpr_test|star_expr) ( comp_for | (',' (namedexpr_test|star_expr))* [','] )
|
||||
trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
|
||||
subscriptlist: subscript (',' subscript)* [',']
|
||||
subscript: test | [test] ':' [test] [sliceop]
|
||||
subscript: test [':=' test] | [test] ':' [test] [sliceop]
|
||||
sliceop: ':' [test]
|
||||
exprlist: (expr|star_expr) (',' (expr|star_expr))* [',']
|
||||
testlist: test (',' test)* [',']
|
||||
dictorsetmaker: ( ((test ':' test | '**' expr)
|
||||
(comp_for | (',' (test ':' test | '**' expr))* [','])) |
|
||||
((test | star_expr)
|
||||
(comp_for | (',' (test | star_expr))* [','])) )
|
||||
((test [':=' test] | star_expr)
|
||||
(comp_for | (',' (test [':=' test] | star_expr))* [','])) )
|
||||
|
||||
classdef: 'class' NAME ['(' [arglist] ')'] ':' suite
|
||||
|
||||
@@ -167,5 +167,5 @@ strings: (STRING | fstring)+
|
||||
fstring: FSTRING_START fstring_content* FSTRING_END
|
||||
fstring_content: FSTRING_STRING | fstring_expr
|
||||
fstring_conversion: '!' NAME
|
||||
fstring_expr: '{' testlist ['='] [ fstring_conversion ] [ fstring_format_spec ] '}'
|
||||
fstring_expr: '{' (testlist_comp | yield_expr) ['='] [ fstring_conversion ] [ fstring_format_spec ] '}'
|
||||
fstring_format_spec: ':' fstring_content*
|
||||
|
||||
@@ -154,5 +154,5 @@ strings: (STRING | fstring)+
|
||||
fstring: FSTRING_START fstring_content* FSTRING_END
|
||||
fstring_content: FSTRING_STRING | fstring_expr
|
||||
fstring_conversion: '!' NAME
|
||||
fstring_expr: '{' testlist_comp [ fstring_conversion ] [ fstring_format_spec ] '}'
|
||||
fstring_expr: '{' (testlist_comp | yield_expr) [ fstring_conversion ] [ fstring_format_spec ] '}'
|
||||
fstring_format_spec: ':' fstring_content*
|
||||
|
||||
@@ -152,5 +152,5 @@ strings: (STRING | fstring)+
|
||||
fstring: FSTRING_START fstring_content* FSTRING_END
|
||||
fstring_content: FSTRING_STRING | fstring_expr
|
||||
fstring_conversion: '!' NAME
|
||||
fstring_expr: '{' testlist [ fstring_conversion ] [ fstring_format_spec ] '}'
|
||||
fstring_expr: '{' (testlist_comp | yield_expr) [ fstring_conversion ] [ fstring_format_spec ] '}'
|
||||
fstring_format_spec: ':' fstring_content*
|
||||
|
||||
@@ -167,5 +167,5 @@ strings: (STRING | fstring)+
|
||||
fstring: FSTRING_START fstring_content* FSTRING_END
|
||||
fstring_content: FSTRING_STRING | fstring_expr
|
||||
fstring_conversion: '!' NAME
|
||||
fstring_expr: '{' testlist ['='] [ fstring_conversion ] [ fstring_format_spec ] '}'
|
||||
fstring_expr: '{' (testlist_comp | yield_expr) ['='] [ fstring_conversion ] [ fstring_format_spec ] '}'
|
||||
fstring_format_spec: ':' fstring_content*
|
||||
|
||||
@@ -130,8 +130,8 @@ exprlist: (expr|star_expr) (',' (expr|star_expr))* [',']
|
||||
testlist: test (',' test)* [',']
|
||||
dictorsetmaker: ( ((test ':' test | '**' expr)
|
||||
(comp_for | (',' (test ':' test | '**' expr))* [','])) |
|
||||
((test | star_expr)
|
||||
(comp_for | (',' (test | star_expr))* [','])) )
|
||||
((test [':=' test] | star_expr)
|
||||
(comp_for | (',' (test [':=' test] | star_expr))* [','])) )
|
||||
|
||||
classdef: 'class' NAME ['(' [arglist] ')'] ':' suite
|
||||
|
||||
@@ -167,5 +167,5 @@ strings: (STRING | fstring)+
|
||||
fstring: FSTRING_START fstring_content* FSTRING_END
|
||||
fstring_content: FSTRING_STRING | fstring_expr
|
||||
fstring_conversion: '!' NAME
|
||||
fstring_expr: '{' testlist ['='] [ fstring_conversion ] [ fstring_format_spec ] '}'
|
||||
fstring_expr: '{' (testlist_comp | yield_expr) ['='] [ fstring_conversion ] [ fstring_format_spec ] '}'
|
||||
fstring_format_spec: ':' fstring_content*
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import re
|
||||
from codecs import BOM_UTF8
|
||||
from typing import Tuple
|
||||
|
||||
from parso.python.tokenize import group
|
||||
|
||||
@@ -13,10 +14,10 @@ class PrefixPart:
|
||||
self.type = typ
|
||||
self.value = value
|
||||
self.spacing = spacing
|
||||
self.start_pos = start_pos
|
||||
self.start_pos: Tuple[int, int] = start_pos
|
||||
|
||||
@property
|
||||
def end_pos(self):
|
||||
def end_pos(self) -> Tuple[int, int]:
|
||||
if self.value.endswith('\n'):
|
||||
return self.start_pos[0] + 1, 0
|
||||
if self.value == unicode_bom:
|
||||
|
||||
@@ -111,8 +111,14 @@ def _get_token_collection(version_info):
|
||||
return result
|
||||
|
||||
|
||||
fstring_string_single_line = _compile(r'(?:\{\{|\}\}|\\(?:\r\n?|\n)|[^{}\r\n])+')
|
||||
fstring_string_multi_line = _compile(r'(?:[^{}]+|\{\{|\}\})+')
|
||||
unicode_character_name = r'[A-Za-z0-9\-]+(?: [A-Za-z0-9\-]+)*'
|
||||
fstring_string_single_line = _compile(
|
||||
r'(?:\{\{|\}\}|\\N\{' + unicode_character_name
|
||||
+ r'\}|\\(?:\r\n?|\n)|\\[^\r\nN]|[^{}\r\n\\])+'
|
||||
)
|
||||
fstring_string_multi_line = _compile(
|
||||
r'(?:\{\{|\}\}|\\N\{' + unicode_character_name + r'\}|\\[^N]|[^{}\\])+'
|
||||
)
|
||||
fstring_format_spec_single_line = _compile(r'(?:\\(?:\r\n?|\n)|[^{}\r\n])+')
|
||||
fstring_format_spec_multi_line = _compile(r'[^{}]+')
|
||||
|
||||
|
||||
@@ -47,6 +47,7 @@ try:
|
||||
from collections.abc import Mapping
|
||||
except ImportError:
|
||||
from collections import Mapping
|
||||
from typing import Tuple
|
||||
|
||||
from parso.tree import Node, BaseNode, Leaf, ErrorNode, ErrorLeaf, \
|
||||
search_ancestor
|
||||
@@ -149,7 +150,7 @@ class _LeafWithoutNewlines(PythonLeaf):
|
||||
__slots__ = ()
|
||||
|
||||
@property
|
||||
def end_pos(self):
|
||||
def end_pos(self) -> Tuple[int, int]:
|
||||
return self.line, self.column + len(self.value)
|
||||
|
||||
|
||||
@@ -775,8 +776,8 @@ class WithStmt(Flow):
|
||||
return names
|
||||
|
||||
def get_test_node_from_name(self, name):
|
||||
node = name.parent
|
||||
if node.type != 'with_item':
|
||||
node = search_ancestor(name, "with_item")
|
||||
if node is None:
|
||||
raise ValueError('The name is not actually part of a with statement.')
|
||||
return node.children[0]
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from abc import abstractmethod, abstractproperty
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from parso.utils import split_lines
|
||||
|
||||
@@ -125,7 +126,7 @@ class NodeOrLeaf:
|
||||
return node
|
||||
|
||||
@abstractproperty
|
||||
def start_pos(self):
|
||||
def start_pos(self) -> Tuple[int, int]:
|
||||
"""
|
||||
Returns the starting position of the prefix as a tuple, e.g. `(3, 4)`.
|
||||
|
||||
@@ -133,7 +134,7 @@ class NodeOrLeaf:
|
||||
"""
|
||||
|
||||
@abstractproperty
|
||||
def end_pos(self):
|
||||
def end_pos(self) -> Tuple[int, int]:
|
||||
"""
|
||||
Returns the end position of the prefix as a tuple, e.g. `(3, 4)`.
|
||||
|
||||
@@ -180,7 +181,7 @@ class Leaf(NodeOrLeaf):
|
||||
'''
|
||||
__slots__ = ('value', 'parent', 'line', 'column', 'prefix')
|
||||
|
||||
def __init__(self, value, start_pos, prefix=''):
|
||||
def __init__(self, value: str, start_pos: Tuple[int, int], prefix: str = '') -> None:
|
||||
self.value = value
|
||||
'''
|
||||
:py:func:`str` The value of the current token.
|
||||
@@ -191,17 +192,17 @@ class Leaf(NodeOrLeaf):
|
||||
:py:func:`str` Typically a mixture of whitespace and comments. Stuff
|
||||
that is syntactically irrelevant for the syntax tree.
|
||||
'''
|
||||
self.parent = None
|
||||
self.parent: Optional[BaseNode] = None
|
||||
'''
|
||||
The parent :class:`BaseNode` of this leaf.
|
||||
'''
|
||||
|
||||
@property
|
||||
def start_pos(self):
|
||||
def start_pos(self) -> Tuple[int, int]:
|
||||
return self.line, self.column
|
||||
|
||||
@start_pos.setter
|
||||
def start_pos(self, value):
|
||||
def start_pos(self, value: Tuple[int, int]) -> None:
|
||||
self.line = value[0]
|
||||
self.column = value[1]
|
||||
|
||||
@@ -226,7 +227,7 @@ class Leaf(NodeOrLeaf):
|
||||
return self.value
|
||||
|
||||
@property
|
||||
def end_pos(self):
|
||||
def end_pos(self) -> Tuple[int, int]:
|
||||
lines = split_lines(self.value)
|
||||
end_pos_line = self.line + len(lines) - 1
|
||||
# Check for multiline token
|
||||
@@ -258,26 +259,26 @@ class BaseNode(NodeOrLeaf):
|
||||
"""
|
||||
__slots__ = ('children', 'parent')
|
||||
|
||||
def __init__(self, children):
|
||||
def __init__(self, children: List[NodeOrLeaf]) -> None:
|
||||
self.children = children
|
||||
"""
|
||||
A list of :class:`NodeOrLeaf` child nodes.
|
||||
"""
|
||||
self.parent = None
|
||||
self.parent: Optional[BaseNode] = None
|
||||
'''
|
||||
The parent :class:`BaseNode` of this leaf.
|
||||
None if this is the root node.
|
||||
'''
|
||||
|
||||
@property
|
||||
def start_pos(self):
|
||||
def start_pos(self) -> Tuple[int, int]:
|
||||
return self.children[0].start_pos
|
||||
|
||||
def get_start_pos_of_prefix(self):
|
||||
return self.children[0].get_start_pos_of_prefix()
|
||||
|
||||
@property
|
||||
def end_pos(self):
|
||||
def end_pos(self) -> Tuple[int, int]:
|
||||
return self.children[-1].end_pos
|
||||
|
||||
def _get_code_for_children(self, children, include_prefix):
|
||||
|
||||
@@ -145,6 +145,44 @@ FAILING_EXAMPLES = [
|
||||
'([False], a) = x',
|
||||
'def x(): from math import *',
|
||||
|
||||
# invalid del statements
|
||||
'del x + y',
|
||||
'del x(y)',
|
||||
'async def foo(): del await x',
|
||||
'def foo(): del (yield x)',
|
||||
'del [x for x in range(10)]',
|
||||
'del *x',
|
||||
'del *x,',
|
||||
'del (*x,)',
|
||||
'del [*x]',
|
||||
'del x, *y',
|
||||
'del *x.y,',
|
||||
'del *x[y],',
|
||||
'del *x[y::], z',
|
||||
'del x, (y, *z)',
|
||||
'del (x, *[y, z])',
|
||||
'del [x, *(y, [*z])]',
|
||||
'del {}',
|
||||
'del {x}',
|
||||
'del {x, y}',
|
||||
'del {x, *y}',
|
||||
|
||||
# invalid starred expressions
|
||||
'*x',
|
||||
'(*x)',
|
||||
'((*x))',
|
||||
'1 + (*x)',
|
||||
'*x; 1',
|
||||
'1; *x',
|
||||
'1\n*x',
|
||||
'x = *y',
|
||||
'x: int = *y',
|
||||
'def foo(): return *x',
|
||||
'def foo(): yield *x',
|
||||
'f"{*x}"',
|
||||
'for *x in 1: pass',
|
||||
'[1 for *x in 1]',
|
||||
|
||||
# str/bytes combinations
|
||||
'"s" b""',
|
||||
'"s" b"" ""',
|
||||
@@ -198,6 +236,9 @@ FAILING_EXAMPLES = [
|
||||
'[*[] for a in [1]]',
|
||||
'async def bla():\n def x(): await bla()',
|
||||
'del None',
|
||||
'del True',
|
||||
'del False',
|
||||
'del ...',
|
||||
|
||||
# Errors of global / nonlocal
|
||||
dedent('''
|
||||
@@ -296,6 +337,13 @@ FAILING_EXAMPLES = [
|
||||
def z():
|
||||
nonlocal a
|
||||
'''),
|
||||
# Name is assigned before nonlocal declaration
|
||||
dedent('''
|
||||
def x(a):
|
||||
def y():
|
||||
a = 10
|
||||
nonlocal a
|
||||
'''),
|
||||
]
|
||||
|
||||
if sys.version_info[:2] >= (3, 7):
|
||||
@@ -356,4 +404,12 @@ if sys.version_info[:2] >= (3, 8):
|
||||
'(False := 1)',
|
||||
'(None := 1)',
|
||||
'(__debug__ := 1)',
|
||||
# Unparenthesized walrus not allowed in dict literals, dict comprehensions and slices
|
||||
'{a:="a": b:=1}',
|
||||
'{y:=1: 2 for x in range(5)}',
|
||||
'a[b:=0:1:2]',
|
||||
]
|
||||
# f-string debugging syntax with invalid conversion character
|
||||
FAILING_EXAMPLES += [
|
||||
"f'{1=!b}'",
|
||||
]
|
||||
|
||||
@@ -46,6 +46,28 @@ def x(b=a):
|
||||
global a
|
||||
|
||||
|
||||
def x(*args, c=2, d):
|
||||
pass
|
||||
|
||||
|
||||
def x(*, c=2, d):
|
||||
pass
|
||||
|
||||
|
||||
def x(a, b=1, *args, c=2, d):
|
||||
pass
|
||||
|
||||
|
||||
def x(a, b=1, *, c=2, d):
|
||||
pass
|
||||
|
||||
|
||||
lambda *args, c=2, d: (c, d)
|
||||
lambda *, c=2, d: (c, d)
|
||||
lambda a, b=1, *args, c=2, d: (c, d)
|
||||
lambda a, b=1, *, c=2, d: (c, d)
|
||||
|
||||
|
||||
*foo, a = (1,)
|
||||
*foo[0], a = (1,)
|
||||
*[], a = (1,)
|
||||
@@ -113,6 +135,29 @@ def x():
|
||||
nonlocal a
|
||||
|
||||
|
||||
def x(a):
|
||||
def y():
|
||||
nonlocal a
|
||||
|
||||
|
||||
def x(a, b):
|
||||
def y():
|
||||
nonlocal b
|
||||
nonlocal a
|
||||
|
||||
|
||||
def x(a):
|
||||
def y():
|
||||
def z():
|
||||
nonlocal a
|
||||
|
||||
|
||||
def x():
|
||||
def y(a):
|
||||
def z():
|
||||
nonlocal a
|
||||
|
||||
|
||||
a = *args, *args
|
||||
error[(*args, *args)] = 3
|
||||
*args, *args
|
||||
|
||||
@@ -146,7 +146,7 @@ def test_cache_last_used_update(diff_cache, use_file_io):
|
||||
parse('somecode2', cache=True, path=p, diff_cache=diff_cache)
|
||||
|
||||
node_cache_item = next(iter(parser_cache.values()))[p]
|
||||
assert now < node_cache_item.last_used < time.time()
|
||||
assert now <= node_cache_item.last_used <= time.time()
|
||||
|
||||
|
||||
@skip_pypy
|
||||
|
||||
@@ -60,6 +60,24 @@ def grammar():
|
||||
|
||||
# a line continuation inside of an format spec
|
||||
'f"{123:.2\\\nf}"',
|
||||
|
||||
# some unparenthesized syntactic structures
|
||||
'f"{*x,}"',
|
||||
'f"{*x, *y}"',
|
||||
'f"{x, *y}"',
|
||||
'f"{*x, y}"',
|
||||
'f"{x for x in [1]}"',
|
||||
|
||||
# named unicode characters
|
||||
'f"\\N{BULLET}"',
|
||||
'f"\\N{FLEUR-DE-LIS}"',
|
||||
'f"\\N{NO ENTRY}"',
|
||||
'f"Combo {expr} and \\N{NO ENTRY}"',
|
||||
'f"\\N{NO ENTRY} and {expr}"',
|
||||
'f"\\N{no entry}"',
|
||||
'f"\\N{SOYOMBO LETTER -A}"',
|
||||
'f"\\N{DOMINO TILE HORIZONTAL-00-00}"',
|
||||
'f"""\\N{NO ENTRY}"""',
|
||||
]
|
||||
)
|
||||
def test_valid(code, grammar):
|
||||
@@ -79,6 +97,7 @@ def test_valid(code, grammar):
|
||||
|
||||
# invalid conversion characters
|
||||
'f"{1!{a}}"',
|
||||
'f"{1=!{a}}"',
|
||||
'f"{!{a}}"',
|
||||
|
||||
# The curly braces must contain an expression
|
||||
@@ -96,6 +115,11 @@ def test_valid(code, grammar):
|
||||
|
||||
# a newline without a line continuation inside a single-line string
|
||||
'f"abc\ndef"',
|
||||
|
||||
# various named unicode escapes that aren't name-shaped
|
||||
'f"\\N{ BULLET }"',
|
||||
'f"\\N{NO ENTRY}"',
|
||||
'f"""\\N{NO\nENTRY}"""',
|
||||
]
|
||||
)
|
||||
def test_invalid(code, grammar):
|
||||
@@ -114,6 +138,8 @@ def test_invalid(code, grammar):
|
||||
(1, 10), (1, 11), (1, 12), (1, 13)]),
|
||||
('f"""\n {\nfoo\n }"""', [(1, 0), (1, 4), (2, 1), (3, 0), (4, 1),
|
||||
(4, 2), (4, 5)]),
|
||||
('f"\\N{NO ENTRY} and {expr}"', [(1, 0), (1, 2), (1, 19), (1, 20),
|
||||
(1, 24), (1, 25), (1, 26)]),
|
||||
]
|
||||
)
|
||||
def test_tokenize_start_pos(code, positions):
|
||||
|
||||
@@ -6,6 +6,7 @@ tests of pydocstyle.
|
||||
import difflib
|
||||
import re
|
||||
from functools import total_ordering
|
||||
from typing import Iterator, Tuple
|
||||
|
||||
import parso
|
||||
from parso.utils import python_bytes_to_unicode
|
||||
@@ -13,7 +14,7 @@ from parso.utils import python_bytes_to_unicode
|
||||
|
||||
@total_ordering
|
||||
class WantedIssue:
|
||||
def __init__(self, code, line, column):
|
||||
def __init__(self, code: str, line: int, column: int) -> None:
|
||||
self.code = code
|
||||
self._line = line
|
||||
self._column = column
|
||||
@@ -21,18 +22,18 @@ class WantedIssue:
|
||||
def __eq__(self, other):
|
||||
return self.code == other.code and self.start_pos == other.start_pos
|
||||
|
||||
def __lt__(self, other):
|
||||
def __lt__(self, other: 'WantedIssue') -> bool:
|
||||
return self.start_pos < other.start_pos or self.code < other.code
|
||||
|
||||
def __hash__(self):
|
||||
def __hash__(self) -> int:
|
||||
return hash(str(self.code) + str(self._line) + str(self._column))
|
||||
|
||||
@property
|
||||
def start_pos(self):
|
||||
def start_pos(self) -> Tuple[int, int]:
|
||||
return self._line, self._column
|
||||
|
||||
|
||||
def collect_errors(code):
|
||||
def collect_errors(code: str) -> Iterator[WantedIssue]:
|
||||
for line_nr, line in enumerate(code.splitlines(), 1):
|
||||
match = re.match(r'(\s*)#: (.*)$', line)
|
||||
if match is not None:
|
||||
|
||||
@@ -229,3 +229,13 @@ def test_iter_funcdefs():
|
||||
module = parse(code, version='3.8')
|
||||
func_names = [f.name.value for f in module.iter_funcdefs()]
|
||||
assert func_names == ['normal', 'asyn', 'dec_normal', 'dec_async']
|
||||
|
||||
|
||||
def test_with_stmt_get_test_node_from_name():
|
||||
code = "with A as X.Y, B as (Z), C as Q[0], D as Q['foo']: pass"
|
||||
with_stmt = parse(code, version='3').children[0]
|
||||
tests = [
|
||||
with_stmt.get_test_node_from_name(name).value
|
||||
for name in with_stmt.get_defined_names(include_setitem=True)
|
||||
]
|
||||
assert tests == ["A", "B", "C", "D"]
|
||||
|
||||
@@ -274,6 +274,8 @@ def test_paren_kwarg():
|
||||
r'fr"\""',
|
||||
r'fr"\\\""',
|
||||
r"print(f'Some {x:.2f} and some {y}')",
|
||||
# Unparenthesized yield expression
|
||||
'def foo(): return f"{yield 1}"',
|
||||
]
|
||||
)
|
||||
def test_valid_fstrings(code):
|
||||
@@ -287,12 +289,37 @@ def test_valid_fstrings(code):
|
||||
'[total := total + v for v in range(10)]',
|
||||
'while chunk := file.read(2):\n pass',
|
||||
'numbers = [y := math.factorial(x), y**2, y**3]',
|
||||
'{(a:="a"): (b:=1)}',
|
||||
'{(y:=1): 2 for x in range(5)}',
|
||||
'a[(b:=0)]',
|
||||
'a[(b:=0, c:=0)]',
|
||||
'a[(b:=0):1:2]',
|
||||
]
|
||||
)
|
||||
def test_valid_namedexpr(code):
|
||||
assert not _get_error_list(code, version='3.8')
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'code', [
|
||||
'{x := 1, 2, 3}',
|
||||
'{x4 := x ** 5 for x in range(7)}',
|
||||
]
|
||||
)
|
||||
def test_valid_namedexpr_set(code):
|
||||
assert not _get_error_list(code, version='3.9')
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'code', [
|
||||
'a[b:=0]',
|
||||
'a[b:=0, c:=0]',
|
||||
]
|
||||
)
|
||||
def test_valid_namedexpr_index(code):
|
||||
assert not _get_error_list(code, version='3.10')
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('code', 'message'), [
|
||||
("f'{1+}'", ('invalid syntax')),
|
||||
@@ -388,11 +415,28 @@ def test_unparenthesized_genexp(source, no_errors):
|
||||
('*x = 2', False),
|
||||
('(*y) = 1', False),
|
||||
('((*z)) = 1', False),
|
||||
('*a,', True),
|
||||
('*a, = 1', True),
|
||||
('(*a,)', True),
|
||||
('(*a,) = 1', True),
|
||||
('[*a]', True),
|
||||
('[*a] = 1', True),
|
||||
('a, *b', True),
|
||||
('a, *b = 1', True),
|
||||
('a, *b, c', True),
|
||||
('a, *b, c = 1', True),
|
||||
('a, (*b), c = 1', True),
|
||||
('a, ((*b)), c = 1', True),
|
||||
('a, (*b, c), d', True),
|
||||
('a, (*b, c), d = 1', True),
|
||||
('*a.b,', True),
|
||||
('*a.b, = 1', True),
|
||||
('*a[b],', True),
|
||||
('*a[b], = 1', True),
|
||||
('*a[b::], c', True),
|
||||
('*a[b::], c = 1', True),
|
||||
('(a, *[b, c])', True),
|
||||
('(a, *[b, c]) = 1', True),
|
||||
('[a, *(b, [*c])]', True),
|
||||
('[a, *(b, [*c])] = 1', True),
|
||||
('[*(1,2,3)]', True),
|
||||
('{*(1,2,3)}', True),
|
||||
('[*(1,2,3),]', True),
|
||||
@@ -405,3 +449,48 @@ def test_unparenthesized_genexp(source, no_errors):
|
||||
)
|
||||
def test_starred_expr(source, no_errors):
|
||||
assert bool(_get_error_list(source, version="3")) ^ no_errors
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'code', [
|
||||
'a, (*b), c',
|
||||
'a, (*b), c = 1',
|
||||
'a, ((*b)), c',
|
||||
'a, ((*b)), c = 1',
|
||||
]
|
||||
)
|
||||
def test_parenthesized_single_starred_expr(code):
|
||||
assert not _get_error_list(code, version='3.8')
|
||||
assert _get_error_list(code, version='3.9')
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'code', [
|
||||
'() = ()',
|
||||
'() = []',
|
||||
'[] = ()',
|
||||
'[] = []',
|
||||
]
|
||||
)
|
||||
def test_valid_empty_assignment(code):
|
||||
assert not _get_error_list(code)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'code', [
|
||||
'del ()',
|
||||
'del []',
|
||||
'del x',
|
||||
'del x,',
|
||||
'del x, y',
|
||||
'del (x, y)',
|
||||
'del [x, y]',
|
||||
'del (x, [y, z])',
|
||||
'del x.y, x[y]',
|
||||
'del f(x)[y::]',
|
||||
'del x[[*y]]',
|
||||
'del x[[*y]::]',
|
||||
]
|
||||
)
|
||||
def test_valid_del(code):
|
||||
assert not _get_error_list(code)
|
||||
|
||||
Reference in New Issue
Block a user