Remove support for parsing Python 2

This commit is contained in:
Dave Halter
2020-07-24 14:48:02 +02:00
parent 5ac4bac368
commit b5e2e67a4d
17 changed files with 197 additions and 809 deletions

View File

@@ -2,7 +2,6 @@ import re
import tempfile import tempfile
import shutil import shutil
import logging import logging
import sys
import os import os
import pytest import pytest
@@ -13,8 +12,7 @@ from parso.utils import parse_version_string
collect_ignore = ["setup.py"] collect_ignore = ["setup.py"]
VERSIONS_2 = '2.7', _SUPPORTED_VERSIONS = '3.6', '3.7', '3.8', '3.9', '3.10'
VERSIONS_3 = '3.4', '3.5', '3.6', '3.7', '3.8'
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
@@ -52,15 +50,10 @@ def pytest_generate_tests(metafunc):
ids=[c.name for c in cases] ids=[c.name for c in cases]
) )
elif 'each_version' in metafunc.fixturenames: elif 'each_version' in metafunc.fixturenames:
metafunc.parametrize('each_version', VERSIONS_2 + VERSIONS_3) metafunc.parametrize('each_version', _SUPPORTED_VERSIONS)
elif 'each_py2_version' in metafunc.fixturenames:
metafunc.parametrize('each_py2_version', VERSIONS_2)
elif 'each_py3_version' in metafunc.fixturenames:
metafunc.parametrize('each_py3_version', VERSIONS_3)
elif 'version_ge_py36' in metafunc.fixturenames:
metafunc.parametrize('version_ge_py36', ['3.6', '3.7', '3.8'])
elif 'version_ge_py38' in metafunc.fixturenames: elif 'version_ge_py38' in metafunc.fixturenames:
metafunc.parametrize('version_ge_py38', ['3.8']) ge38 = set(_SUPPORTED_VERSIONS) - {'3.6', '3.7'}
metafunc.parametrize('version_ge_py38', sorted(ge38))
class NormalizerIssueCase(object): class NormalizerIssueCase(object):
@@ -137,37 +130,16 @@ def works_not_in_py(each_version):
@pytest.fixture @pytest.fixture
def works_in_py2(each_version): def works_in_py(each_version):
return Checker(each_version, each_version.startswith('2')) return Checker(each_version, True)
@pytest.fixture
def works_ge_py27(each_version):
version_info = parse_version_string(each_version)
return Checker(each_version, version_info >= (2, 7))
@pytest.fixture
def works_ge_py3(each_version):
version_info = parse_version_string(each_version)
return Checker(each_version, version_info >= (3, 0))
@pytest.fixture
def works_ge_py35(each_version):
version_info = parse_version_string(each_version)
return Checker(each_version, version_info >= (3, 5))
@pytest.fixture
def works_ge_py36(each_version):
version_info = parse_version_string(each_version)
return Checker(each_version, version_info >= (3, 6))
@pytest.fixture @pytest.fixture
def works_ge_py38(each_version): def works_ge_py38(each_version):
version_info = parse_version_string(each_version) version_info = parse_version_string(each_version)
return Checker(each_version, version_info >= (3, 8)) return Checker(each_version, version_info >= (3, 8))
@pytest.fixture @pytest.fixture
def works_ge_py39(each_version): def works_ge_py39(each_version):
version_info = parse_version_string(each_version) version_info = parse_version_string(each_version)

View File

@@ -1,143 +0,0 @@
# Grammar for Python
# Note: Changing the grammar specified in this file will most likely
# require corresponding changes in the parser module
# (../Modules/parsermodule.c). If you can't make the changes to
# that module yourself, please co-ordinate the required changes
# with someone who can; ask around on python-dev for help. Fred
# Drake <fdrake@acm.org> will probably be listening there.
# NOTE WELL: You should also follow all the steps listed in PEP 306,
# "How to Change Python's Grammar"
# Start symbols for the grammar:
# single_input is a single interactive statement;
# file_input is a module or sequence of commands read from an input file;
# eval_input is the input for the eval() and input() functions.
# NB: compound_stmt in single_input is followed by extra NEWLINE!
single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
file_input: stmt* ENDMARKER
eval_input: testlist NEWLINE* ENDMARKER
decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
decorators: decorator+
decorated: decorators (classdef | funcdef)
funcdef: 'def' NAME parameters ':' suite
parameters: '(' [varargslist] ')'
varargslist: ((fpdef ['=' test] ',')*
('*' NAME [',' '**' NAME] | '**' NAME) |
fpdef ['=' test] (',' fpdef ['=' test])* [','])
fpdef: NAME | '(' fplist ')'
fplist: fpdef (',' fpdef)* [',']
stmt: simple_stmt | compound_stmt | NEWLINE
simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
small_stmt: (expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
import_stmt | global_stmt | exec_stmt | assert_stmt)
expr_stmt: testlist (augassign (yield_expr|testlist) |
('=' (yield_expr|testlist))*)
augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
'<<=' | '>>=' | '**=' | '//=')
# For normal assignments, additional restrictions enforced by the interpreter
print_stmt: 'print' ( [ test (',' test)* [','] ] |
'>>' test [ (',' test)+ [','] ] )
del_stmt: 'del' exprlist
pass_stmt: 'pass'
flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt
break_stmt: 'break'
continue_stmt: 'continue'
return_stmt: 'return' [testlist]
yield_stmt: yield_expr
raise_stmt: 'raise' [test [',' test [',' test]]]
import_stmt: import_name | import_from
import_name: 'import' dotted_as_names
import_from: ('from' ('.'* dotted_name | '.'+)
'import' ('*' | '(' import_as_names ')' | import_as_names))
import_as_name: NAME ['as' NAME]
dotted_as_name: dotted_name ['as' NAME]
import_as_names: import_as_name (',' import_as_name)* [',']
dotted_as_names: dotted_as_name (',' dotted_as_name)*
dotted_name: NAME ('.' NAME)*
global_stmt: 'global' NAME (',' NAME)*
exec_stmt: 'exec' expr ['in' test [',' test]]
assert_stmt: 'assert' test [',' test]
compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated
if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
while_stmt: 'while' test ':' suite ['else' ':' suite]
for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
try_stmt: ('try' ':' suite
((except_clause ':' suite)+
['else' ':' suite]
['finally' ':' suite] |
'finally' ':' suite))
with_stmt: 'with' with_item (',' with_item)* ':' suite
with_item: test ['as' expr]
# NB compile.c makes sure that the default except clause is last
except_clause: 'except' [test [('as' | ',') test]]
suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT
# Backward compatibility cruft to support:
# [ x for x in lambda: True, lambda: False if x() ]
# even while also allowing:
# lambda x: 5 if x else 2
# (But not a mix of the two)
testlist_safe: old_test [(',' old_test)+ [',']]
old_test: or_test | old_lambdef
old_lambdef: 'lambda' [varargslist] ':' old_test
test: or_test ['if' or_test 'else' test] | lambdef
or_test: and_test ('or' and_test)*
and_test: not_test ('and' not_test)*
not_test: 'not' not_test | comparison
comparison: expr (comp_op expr)*
comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
expr: xor_expr ('|' xor_expr)*
xor_expr: and_expr ('^' and_expr)*
and_expr: shift_expr ('&' shift_expr)*
shift_expr: arith_expr (('<<'|'>>') arith_expr)*
arith_expr: term (('+'|'-') term)*
term: factor (('*'|'/'|'%'|'//') factor)*
factor: ('+'|'-'|'~') factor | power
power: atom trailer* ['**' factor]
atom: ('(' [yield_expr|testlist_comp] ')' |
'[' [listmaker] ']' |
'{' [dictorsetmaker] '}' |
'`' testlist1 '`' |
NAME | NUMBER | strings)
strings: STRING+
listmaker: test ( list_for | (',' test)* [','] )
testlist_comp: test ( sync_comp_for | (',' test)* [','] )
lambdef: 'lambda' [varargslist] ':' test
trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
subscriptlist: subscript (',' subscript)* [',']
subscript: '.' '.' '.' | test | [test] ':' [test] [sliceop]
sliceop: ':' [test]
exprlist: expr (',' expr)* [',']
testlist: test (',' test)* [',']
dictorsetmaker: ( (test ':' test (sync_comp_for | (',' test ':' test)* [','])) |
(test (sync_comp_for | (',' test)* [','])) )
classdef: 'class' NAME ['(' [testlist] ')'] ':' suite
arglist: (argument ',')* (argument [',']
|'*' test (',' argument)* [',' '**' test]
|'**' test)
# The reason that keywords are test nodes instead of NAME is that using NAME
# results in an ambiguity. ast.c makes sure it's a NAME.
argument: test [sync_comp_for] | test '=' test
list_iter: list_for | list_if
list_for: 'for' exprlist 'in' testlist_safe [list_iter]
list_if: 'if' old_test [list_iter]
comp_iter: sync_comp_for | comp_if
sync_comp_for: 'for' exprlist 'in' or_test [comp_iter]
comp_if: 'if' old_test [comp_iter]
testlist1: test (',' test)*
# not used in grammar, but may appear in "node" passed from Parser to Compiler
encoding_decl: NAME
yield_expr: 'yield' [testlist]

View File

@@ -1,134 +0,0 @@
# Grammar for Python
# Note: Changing the grammar specified in this file will most likely
# require corresponding changes in the parser module
# (../Modules/parsermodule.c). If you can't make the changes to
# that module yourself, please co-ordinate the required changes
# with someone who can; ask around on python-dev for help. Fred
# Drake <fdrake@acm.org> will probably be listening there.
# NOTE WELL: You should also follow all the steps listed in PEP 306,
# "How to Change Python's Grammar"
# Start symbols for the grammar:
# single_input is a single interactive statement;
# file_input is a module or sequence of commands read from an input file;
# eval_input is the input for the eval() functions.
# NB: compound_stmt in single_input is followed by extra NEWLINE!
single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
file_input: stmt* ENDMARKER
eval_input: testlist NEWLINE* ENDMARKER
decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
decorators: decorator+
decorated: decorators (classdef | funcdef)
funcdef: 'def' NAME parameters ['->' test] ':' suite
parameters: '(' [typedargslist] ')'
typedargslist: (tfpdef ['=' test] (',' tfpdef ['=' test])* [','
['*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef]]
| '*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef)
tfpdef: NAME [':' test]
varargslist: (vfpdef ['=' test] (',' vfpdef ['=' test])* [','
['*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef]]
| '*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef)
vfpdef: NAME
stmt: simple_stmt | compound_stmt | NEWLINE
simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt |
import_stmt | global_stmt | nonlocal_stmt | assert_stmt)
expr_stmt: testlist_star_expr (augassign (yield_expr|testlist) |
('=' (yield_expr|testlist_star_expr))*)
testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [',']
augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
'<<=' | '>>=' | '**=' | '//=')
# For normal assignments, additional restrictions enforced by the interpreter
del_stmt: 'del' exprlist
pass_stmt: 'pass'
flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt
break_stmt: 'break'
continue_stmt: 'continue'
return_stmt: 'return' [testlist]
yield_stmt: yield_expr
raise_stmt: 'raise' [test ['from' test]]
import_stmt: import_name | import_from
import_name: 'import' dotted_as_names
# note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS
import_from: ('from' (('.' | '...')* dotted_name | ('.' | '...')+)
'import' ('*' | '(' import_as_names ')' | import_as_names))
import_as_name: NAME ['as' NAME]
dotted_as_name: dotted_name ['as' NAME]
import_as_names: import_as_name (',' import_as_name)* [',']
dotted_as_names: dotted_as_name (',' dotted_as_name)*
dotted_name: NAME ('.' NAME)*
global_stmt: 'global' NAME (',' NAME)*
nonlocal_stmt: 'nonlocal' NAME (',' NAME)*
assert_stmt: 'assert' test [',' test]
compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated
if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
while_stmt: 'while' test ':' suite ['else' ':' suite]
for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
try_stmt: ('try' ':' suite
((except_clause ':' suite)+
['else' ':' suite]
['finally' ':' suite] |
'finally' ':' suite))
with_stmt: 'with' with_item (',' with_item)* ':' suite
with_item: test ['as' expr]
# NB compile.c makes sure that the default except clause is last
except_clause: 'except' [test ['as' NAME]]
suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT
test: or_test ['if' or_test 'else' test] | lambdef
test_nocond: or_test | lambdef_nocond
lambdef: 'lambda' [varargslist] ':' test
lambdef_nocond: 'lambda' [varargslist] ':' test_nocond
or_test: and_test ('or' and_test)*
and_test: not_test ('and' not_test)*
not_test: 'not' not_test | comparison
comparison: expr (comp_op expr)*
# <> isn't actually a valid comparison operator in Python. It's here for the
# sake of a __future__ import described in PEP 401
comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
star_expr: '*' expr
expr: xor_expr ('|' xor_expr)*
xor_expr: and_expr ('^' and_expr)*
and_expr: shift_expr ('&' shift_expr)*
shift_expr: arith_expr (('<<'|'>>') arith_expr)*
arith_expr: term (('+'|'-') term)*
term: factor (('*'|'/'|'%'|'//') factor)*
factor: ('+'|'-'|'~') factor | power
power: atom trailer* ['**' factor]
atom: ('(' [yield_expr|testlist_comp] ')' |
'[' [testlist_comp] ']' |
'{' [dictorsetmaker] '}' |
NAME | NUMBER | strings | '...' | 'None' | 'True' | 'False')
strings: STRING+
testlist_comp: (test|star_expr) ( sync_comp_for | (',' (test|star_expr))* [','] )
trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
subscriptlist: subscript (',' subscript)* [',']
subscript: test | [test] ':' [test] [sliceop]
sliceop: ':' [test]
exprlist: (expr|star_expr) (',' (expr|star_expr))* [',']
testlist: test (',' test)* [',']
dictorsetmaker: ( (test ':' test (sync_comp_for | (',' test ':' test)* [','])) |
(test (sync_comp_for | (',' test)* [','])) )
classdef: 'class' NAME ['(' [arglist] ')'] ':' suite
arglist: (argument ',')* (argument [',']
|'*' test (',' argument)* [',' '**' test]
|'**' test)
# The reason that keywords are test nodes instead of NAME is that using NAME
# results in an ambiguity. ast.c makes sure it's a NAME.
argument: test [sync_comp_for] | test '=' test # Really [keyword '='] test
comp_iter: sync_comp_for | comp_if
sync_comp_for: 'for' exprlist 'in' or_test [comp_iter]
comp_if: 'if' test_nocond [comp_iter]
# not used in grammar, but may appear in "node" passed from Parser to Compiler
encoding_decl: NAME
yield_expr: 'yield' [yield_arg]
yield_arg: 'from' test | testlist

View File

@@ -1,134 +0,0 @@
# Grammar for Python
# Note: Changing the grammar specified in this file will most likely
# require corresponding changes in the parser module
# (../Modules/parsermodule.c). If you can't make the changes to
# that module yourself, please co-ordinate the required changes
# with someone who can; ask around on python-dev for help. Fred
# Drake <fdrake@acm.org> will probably be listening there.
# NOTE WELL: You should also follow all the steps listed at
# https://docs.python.org/devguide/grammar.html
# Start symbols for the grammar:
# single_input is a single interactive statement;
# file_input is a module or sequence of commands read from an input file;
# eval_input is the input for the eval() functions.
# NB: compound_stmt in single_input is followed by extra NEWLINE!
single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
file_input: stmt* ENDMARKER
eval_input: testlist NEWLINE* ENDMARKER
decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
decorators: decorator+
decorated: decorators (classdef | funcdef)
funcdef: 'def' NAME parameters ['->' test] ':' suite
parameters: '(' [typedargslist] ')'
typedargslist: (tfpdef ['=' test] (',' tfpdef ['=' test])* [','
['*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef]]
| '*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef)
tfpdef: NAME [':' test]
varargslist: (vfpdef ['=' test] (',' vfpdef ['=' test])* [','
['*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef]]
| '*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef)
vfpdef: NAME
stmt: simple_stmt | compound_stmt | NEWLINE
simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt |
import_stmt | global_stmt | nonlocal_stmt | assert_stmt)
expr_stmt: testlist_star_expr (augassign (yield_expr|testlist) |
('=' (yield_expr|testlist_star_expr))*)
testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [',']
augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
'<<=' | '>>=' | '**=' | '//=')
# For normal assignments, additional restrictions enforced by the interpreter
del_stmt: 'del' exprlist
pass_stmt: 'pass'
flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt
break_stmt: 'break'
continue_stmt: 'continue'
return_stmt: 'return' [testlist]
yield_stmt: yield_expr
raise_stmt: 'raise' [test ['from' test]]
import_stmt: import_name | import_from
import_name: 'import' dotted_as_names
# note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS
import_from: ('from' (('.' | '...')* dotted_name | ('.' | '...')+)
'import' ('*' | '(' import_as_names ')' | import_as_names))
import_as_name: NAME ['as' NAME]
dotted_as_name: dotted_name ['as' NAME]
import_as_names: import_as_name (',' import_as_name)* [',']
dotted_as_names: dotted_as_name (',' dotted_as_name)*
dotted_name: NAME ('.' NAME)*
global_stmt: 'global' NAME (',' NAME)*
nonlocal_stmt: 'nonlocal' NAME (',' NAME)*
assert_stmt: 'assert' test [',' test]
compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated
if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
while_stmt: 'while' test ':' suite ['else' ':' suite]
for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
try_stmt: ('try' ':' suite
((except_clause ':' suite)+
['else' ':' suite]
['finally' ':' suite] |
'finally' ':' suite))
with_stmt: 'with' with_item (',' with_item)* ':' suite
with_item: test ['as' expr]
# NB compile.c makes sure that the default except clause is last
except_clause: 'except' [test ['as' NAME]]
suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT
test: or_test ['if' or_test 'else' test] | lambdef
test_nocond: or_test | lambdef_nocond
lambdef: 'lambda' [varargslist] ':' test
lambdef_nocond: 'lambda' [varargslist] ':' test_nocond
or_test: and_test ('or' and_test)*
and_test: not_test ('and' not_test)*
not_test: 'not' not_test | comparison
comparison: expr (comp_op expr)*
# <> isn't actually a valid comparison operator in Python. It's here for the
# sake of a __future__ import described in PEP 401
comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
star_expr: '*' expr
expr: xor_expr ('|' xor_expr)*
xor_expr: and_expr ('^' and_expr)*
and_expr: shift_expr ('&' shift_expr)*
shift_expr: arith_expr (('<<'|'>>') arith_expr)*
arith_expr: term (('+'|'-') term)*
term: factor (('*'|'/'|'%'|'//') factor)*
factor: ('+'|'-'|'~') factor | power
power: atom trailer* ['**' factor]
atom: ('(' [yield_expr|testlist_comp] ')' |
'[' [testlist_comp] ']' |
'{' [dictorsetmaker] '}' |
NAME | NUMBER | strings | '...' | 'None' | 'True' | 'False')
strings: STRING+
testlist_comp: (test|star_expr) ( sync_comp_for | (',' (test|star_expr))* [','] )
trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
subscriptlist: subscript (',' subscript)* [',']
subscript: test | [test] ':' [test] [sliceop]
sliceop: ':' [test]
exprlist: (expr|star_expr) (',' (expr|star_expr))* [',']
testlist: test (',' test)* [',']
dictorsetmaker: ( (test ':' test (sync_comp_for | (',' test ':' test)* [','])) |
(test (sync_comp_for | (',' test)* [','])) )
classdef: 'class' NAME ['(' [arglist] ')'] ':' suite
arglist: (argument ',')* (argument [',']
|'*' test (',' argument)* [',' '**' test]
|'**' test)
# The reason that keywords are test nodes instead of NAME is that using NAME
# results in an ambiguity. ast.c makes sure it's a NAME.
argument: test [sync_comp_for] | test '=' test # Really [keyword '='] test
comp_iter: sync_comp_for | comp_if
sync_comp_for: 'for' exprlist 'in' or_test [comp_iter]
comp_if: 'if' test_nocond [comp_iter]
# not used in grammar, but may appear in "node" passed from Parser to Compiler
encoding_decl: NAME
yield_expr: 'yield' [yield_arg]
yield_arg: 'from' test | testlist

View File

@@ -1,153 +0,0 @@
# Grammar for Python
# Note: Changing the grammar specified in this file will most likely
# require corresponding changes in the parser module
# (../Modules/parsermodule.c). If you can't make the changes to
# that module yourself, please co-ordinate the required changes
# with someone who can; ask around on python-dev for help. Fred
# Drake <fdrake@acm.org> will probably be listening there.
# NOTE WELL: You should also follow all the steps listed at
# https://docs.python.org/devguide/grammar.html
# Start symbols for the grammar:
# single_input is a single interactive statement;
# file_input is a module or sequence of commands read from an input file;
# eval_input is the input for the eval() functions.
# NB: compound_stmt in single_input is followed by extra NEWLINE!
single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
file_input: stmt* ENDMARKER
eval_input: testlist NEWLINE* ENDMARKER
decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE
decorators: decorator+
decorated: decorators (classdef | funcdef | async_funcdef)
# NOTE: Reinoud Elhorst, using ASYNC/AWAIT keywords instead of tokens
# skipping python3.5 compatibility, in favour of 3.7 solution
async_funcdef: 'async' funcdef
funcdef: 'def' NAME parameters ['->' test] ':' suite
parameters: '(' [typedargslist] ')'
typedargslist: (tfpdef ['=' test] (',' tfpdef ['=' test])* [','
['*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef]]
| '*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef)
tfpdef: NAME [':' test]
varargslist: (vfpdef ['=' test] (',' vfpdef ['=' test])* [','
['*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef]]
| '*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef)
vfpdef: NAME
stmt: simple_stmt | compound_stmt | NEWLINE
simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt |
import_stmt | global_stmt | nonlocal_stmt | assert_stmt)
expr_stmt: testlist_star_expr (augassign (yield_expr|testlist) |
('=' (yield_expr|testlist_star_expr))*)
testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [',']
augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' |
'<<=' | '>>=' | '**=' | '//=')
# For normal assignments, additional restrictions enforced by the interpreter
del_stmt: 'del' exprlist
pass_stmt: 'pass'
flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt
break_stmt: 'break'
continue_stmt: 'continue'
return_stmt: 'return' [testlist]
yield_stmt: yield_expr
raise_stmt: 'raise' [test ['from' test]]
import_stmt: import_name | import_from
import_name: 'import' dotted_as_names
# note below: the ('.' | '...') is necessary because '...' is tokenized as ELLIPSIS
import_from: ('from' (('.' | '...')* dotted_name | ('.' | '...')+)
'import' ('*' | '(' import_as_names ')' | import_as_names))
import_as_name: NAME ['as' NAME]
dotted_as_name: dotted_name ['as' NAME]
import_as_names: import_as_name (',' import_as_name)* [',']
dotted_as_names: dotted_as_name (',' dotted_as_name)*
dotted_name: NAME ('.' NAME)*
global_stmt: 'global' NAME (',' NAME)*
nonlocal_stmt: 'nonlocal' NAME (',' NAME)*
assert_stmt: 'assert' test [',' test]
compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt
async_stmt: 'async' (funcdef | with_stmt | for_stmt)
if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
while_stmt: 'while' test ':' suite ['else' ':' suite]
for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite]
try_stmt: ('try' ':' suite
((except_clause ':' suite)+
['else' ':' suite]
['finally' ':' suite] |
'finally' ':' suite))
with_stmt: 'with' with_item (',' with_item)* ':' suite
with_item: test ['as' expr]
# NB compile.c makes sure that the default except clause is last
except_clause: 'except' [test ['as' NAME]]
suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT
test: or_test ['if' or_test 'else' test] | lambdef
test_nocond: or_test | lambdef_nocond
lambdef: 'lambda' [varargslist] ':' test
lambdef_nocond: 'lambda' [varargslist] ':' test_nocond
or_test: and_test ('or' and_test)*
and_test: not_test ('and' not_test)*
not_test: 'not' not_test | comparison
comparison: expr (comp_op expr)*
# <> isn't actually a valid comparison operator in Python. It's here for the
# sake of a __future__ import described in PEP 401 (which really works :-)
comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
star_expr: '*' expr
expr: xor_expr ('|' xor_expr)*
xor_expr: and_expr ('^' and_expr)*
and_expr: shift_expr ('&' shift_expr)*
shift_expr: arith_expr (('<<'|'>>') arith_expr)*
arith_expr: term (('+'|'-') term)*
term: factor (('*'|'@'|'/'|'%'|'//') factor)*
factor: ('+'|'-'|'~') factor | power
power: atom_expr ['**' factor]
atom_expr: ['await'] atom trailer*
atom: ('(' [yield_expr|testlist_comp] ')' |
'[' [testlist_comp] ']' |
'{' [dictorsetmaker] '}' |
NAME | NUMBER | strings | '...' | 'None' | 'True' | 'False')
strings: STRING+
testlist_comp: (test|star_expr) ( sync_comp_for | (',' (test|star_expr))* [','] )
trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
subscriptlist: subscript (',' subscript)* [',']
subscript: test | [test] ':' [test] [sliceop]
sliceop: ':' [test]
exprlist: (expr|star_expr) (',' (expr|star_expr))* [',']
testlist: test (',' test)* [',']
dictorsetmaker: ( ((test ':' test | '**' expr)
(sync_comp_for | (',' (test ':' test | '**' expr))* [','])) |
((test | star_expr)
(sync_comp_for | (',' (test | star_expr))* [','])) )
classdef: 'class' NAME ['(' [arglist] ')'] ':' suite
arglist: argument (',' argument)* [',']
# The reason that keywords are test nodes instead of NAME is that using NAME
# results in an ambiguity. ast.c makes sure it's a NAME.
# "test '=' test" is really "keyword '=' test", but we have no such token.
# These need to be in a single rule to avoid grammar that is ambiguous
# to our LL(1) parser. Even though 'test' includes '*expr' in star_expr,
# we explicitly match '*' here, too, to give it proper precedence.
# Illegal combinations and orderings are blocked in ast.c:
# multiple (test comp_for) arguments are blocked; keyword unpackings
# that precede iterable unpackings are blocked; etc.
argument: ( test [sync_comp_for] |
test '=' test |
'**' test |
'*' test )
comp_iter: sync_comp_for | comp_if
sync_comp_for: 'for' exprlist 'in' or_test [comp_iter]
comp_if: 'if' test_nocond [comp_iter]
# not used in grammar, but may appear in "node" passed from Parser to Compiler
encoding_decl: NAME
yield_expr: 'yield' [yield_arg]
yield_arg: 'from' test | testlist

View File

@@ -44,3 +44,75 @@ a = 3
def x(b=a): def x(b=a):
global a global a
*foo, a = (1,)
*foo[0], a = (1,)
*[], a = (1,)
async def foo():
await bar()
#: E901
yield from []
return
#: E901
return ''
# With decorator it's a different statement.
@bla
async def foo():
await bar()
#: E901
yield from []
return
#: E901
return ''
foo: int = 4
(foo): int = 3
((foo)): int = 3
foo.bar: int
foo[3]: int
def glob():
global x
y: foo = x
def c():
a = 3
def d():
class X():
nonlocal a
def x():
a = 3
def y():
nonlocal a
def x():
def y():
nonlocal a
a = 3
def x():
a = 3
def y():
class z():
nonlocal a
a = *args, *args
error[(*args, *args)] = 3
*args, *args

View File

@@ -1,2 +0,0 @@
's' b''
u's' b'ä'

View File

@@ -1,3 +0,0 @@
*foo, a = (1,)
*foo[0], a = (1,)
*[], a = (1,)

View File

@@ -1,23 +0,0 @@
"""
Mostly allowed syntax in Python 3.5.
"""
async def foo():
await bar()
#: E901
yield from []
return
#: E901
return ''
# With decorator it's a different statement.
@bla
async def foo():
await bar()
#: E901
yield from []
return
#: E901
return ''

View File

@@ -1,45 +0,0 @@
foo: int = 4
(foo): int = 3
((foo)): int = 3
foo.bar: int
foo[3]: int
def glob():
global x
y: foo = x
def c():
a = 3
def d():
class X():
nonlocal a
def x():
a = 3
def y():
nonlocal a
def x():
def y():
nonlocal a
a = 3
def x():
a = 3
def y():
class z():
nonlocal a
a = *args, *args
error[(*args, *args)] = 3
*args, *args

View File

@@ -1,14 +0,0 @@
import sys
print 1, 2 >> sys.stdout
foo = ur'This is not possible in Python 3.'
# This is actually printing a tuple.
#: E275:5
print(1, 2)
# True and False are not keywords in Python 2 and therefore there's no need for
# a space.
norman = True+False

View File

@@ -10,12 +10,12 @@ def get_python_files(path):
yield os.path.join(dir_path, file_name) yield os.path.join(dir_path, file_name)
def test_on_itself(each_py3_version): def test_on_itself(each_version):
""" """
There are obviously no syntax erros in the Python code of parso. However There are obviously no syntax erros in the Python code of parso. However
parso should output the same for all versions. parso should output the same for all versions.
""" """
grammar = parso.load_grammar(version=each_py3_version) grammar = parso.load_grammar(version=each_version)
path = os.path.dirname(os.path.dirname(__file__)) + '/parso' path = os.path.dirname(os.path.dirname(__file__)) + '/parso'
for file in get_python_files(path): for file in get_python_files(path):
tree = grammar.parse(path=file) tree = grammar.parse(path=file)

View File

@@ -8,13 +8,13 @@ from textwrap import dedent
from parso import parse from parso import parse
def assert_params(param_string, version=None, **wanted_dct): def assert_params(param_string, **wanted_dct):
source = dedent(''' source = dedent('''
def x(%s): def x(%s):
pass pass
''') % param_string ''') % param_string
module = parse(source, version=version) module = parse(source)
funcdef = next(module.iter_funcdefs()) funcdef = next(module.iter_funcdefs())
dct = dict((p.name.value, p.default and p.default.get_code()) dct = dict((p.name.value, p.default and p.default.get_code())
for p in funcdef.get_params()) for p in funcdef.get_params())
@@ -23,9 +23,9 @@ def assert_params(param_string, version=None, **wanted_dct):
def test_split_params_with_separation_star(): def test_split_params_with_separation_star():
assert_params('x, y=1, *, z=3', x=None, y='1', z='3', version='3.5') assert_params('x, y=1, *, z=3', x=None, y='1', z='3')
assert_params('*, x', x=None, version='3.5') assert_params('*, x', x=None)
assert_params('*', version='3.5') assert_params('*')
def test_split_params_with_stars(): def test_split_params_with_stars():
@@ -34,12 +34,12 @@ def test_split_params_with_stars():
assert_params('*args, **kwargs', args=None, kwargs=None) assert_params('*args, **kwargs', args=None, kwargs=None)
def test_kw_only_no_kw(works_ge_py3): def test_kw_only_no_kw(works_in_py):
""" """
Parsing this should be working. In CPython the parser also parses this and Parsing this should be working. In CPython the parser also parses this and
in a later step the AST complains. in a later step the AST complains.
""" """
module = works_ge_py3.parse('def test(arg, *):\n pass') module = works_in_py.parse('def test(arg, *):\n pass')
if module is not None: if module is not None:
func = module.children[0] func = module.children[0]
open_, p1, asterisk, close = func._get_param_nodes() open_, p1, asterisk, close = func._get_param_nodes()

View File

@@ -26,7 +26,7 @@ class TestsFunctionAndLambdaParsing(object):
@pytest.fixture(params=FIXTURES) @pytest.fixture(params=FIXTURES)
def node(self, request): def node(self, request):
parsed = parse(dedent(request.param[0]), version='3.5') parsed = parse(dedent(request.param[0]), version='3.10')
request.keywords['expected'] = request.param[1] request.keywords['expected'] = request.param[1]
child = parsed.children[0] child = parsed.children[0]
if child.type == 'simple_stmt': if child.type == 'simple_stmt':
@@ -79,16 +79,16 @@ def test_default_param(each_version):
assert not param.star_count assert not param.star_count
def test_annotation_param(each_py3_version): def test_annotation_param(each_version):
func = parse('def x(foo: 3): pass', version=each_py3_version).children[0] func = parse('def x(foo: 3): pass', version=each_version).children[0]
param, = func.get_params() param, = func.get_params()
assert param.default is None assert param.default is None
assert param.annotation.value == '3' assert param.annotation.value == '3'
assert not param.star_count assert not param.star_count
def test_annotation_params(each_py3_version): def test_annotation_params(each_version):
func = parse('def x(foo: 3, bar: 4): pass', version=each_py3_version).children[0] func = parse('def x(foo: 3, bar: 4): pass', version=each_version).children[0]
param1, param2 = func.get_params() param1, param2 = func.get_params()
assert param1.default is None assert param1.default is None
@@ -100,23 +100,14 @@ def test_annotation_params(each_py3_version):
assert not param2.star_count assert not param2.star_count
def test_default_and_annotation_param(each_py3_version): def test_default_and_annotation_param(each_version):
func = parse('def x(foo:3=42): pass', version=each_py3_version).children[0] func = parse('def x(foo:3=42): pass', version=each_version).children[0]
param, = func.get_params() param, = func.get_params()
assert param.default.value == '42' assert param.default.value == '42'
assert param.annotation.value == '3' assert param.annotation.value == '3'
assert not param.star_count assert not param.star_count
def test_ellipsis_py2(each_py2_version):
module = parse('[0][...]', version=each_py2_version, error_recovery=False)
expr = module.children[0]
trailer = expr.children[-1]
subscript = trailer.children[1]
assert subscript.type == 'subscript'
assert [leaf.value for leaf in subscript.children] == ['.', '.', '.']
def get_yield_exprs(code, version): def get_yield_exprs(code, version):
return list(parse(code, version=version).children[0].iter_yield_exprs()) return list(parse(code, version=version).children[0].iter_yield_exprs())

View File

@@ -42,23 +42,23 @@ def test_formfeed(each_version):
_parse(s, each_version) _parse(s, each_version)
def test_matrix_multiplication_operator(works_ge_py35): def test_matrix_multiplication_operator(works_in_py):
works_ge_py35.parse("a @ b") works_in_py.parse("a @ b")
works_ge_py35.parse("a @= b") works_in_py.parse("a @= b")
def test_yield_from(works_ge_py3, each_version): def test_yield_from(works_in_py, each_version):
works_ge_py3.parse("yield from x") works_in_py.parse("yield from x")
works_ge_py3.parse("(yield from x) + y") works_in_py.parse("(yield from x) + y")
_invalid_syntax("yield from", each_version) _invalid_syntax("yield from", each_version)
def test_await_expr(works_ge_py35): def test_await_expr(works_in_py):
works_ge_py35.parse("""async def foo(): works_in_py.parse("""async def foo():
await x await x
""") """)
works_ge_py35.parse("""async def foo(): works_in_py.parse("""async def foo():
def foo(): pass def foo(): pass
@@ -67,24 +67,23 @@ def test_await_expr(works_ge_py35):
await x await x
""") """)
works_ge_py35.parse("""async def foo(): return await a""") works_in_py.parse("""async def foo(): return await a""")
works_ge_py35.parse("""def foo(): works_in_py.parse("""def foo():
def foo(): pass def foo(): pass
async def foo(): await x async def foo(): await x
""") """)
@pytest.mark.skipif('sys.version_info[:2] < (3, 5)')
@pytest.mark.xfail(reason="acting like python 3.7") @pytest.mark.xfail(reason="acting like python 3.7")
def test_async_var(): def test_async_var():
_parse("""async = 1""", "3.5") _parse("""async = 1""", "3.6")
_parse("""await = 1""", "3.5") _parse("""await = 1""", "3.6")
_parse("""def async(): pass""", "3.5") _parse("""def async(): pass""", "3.6")
def test_async_for(works_ge_py35): def test_async_for(works_in_py):
works_ge_py35.parse("async def foo():\n async for a in b: pass") works_in_py.parse("async def foo():\n async for a in b: pass")
@pytest.mark.parametrize("body", [ @pytest.mark.parametrize("body", [
@@ -114,77 +113,82 @@ def test_async_for(works_ge_py35):
1 async for a in b 1 async for a in b
]""", ]""",
]) ])
def test_async_for_comprehension_newline(works_ge_py36, body): def test_async_for_comprehension_newline(works_in_py, body):
# Issue #139 # Issue #139
works_ge_py36.parse("""async def foo(): works_in_py.parse("""async def foo():
{}""".format(body)) {}""".format(body))
def test_async_with(works_ge_py35): def test_async_with(works_in_py):
works_ge_py35.parse("async def foo():\n async with a: pass") works_in_py.parse("async def foo():\n async with a: pass")
@pytest.mark.skipif('sys.version_info[:2] < (3, 5)')
@pytest.mark.xfail(reason="acting like python 3.7") def test_async_with_invalid(works_in_py):
def test_async_with_invalid(): works_in_py.parse("""def foo():\n async with a: pass""")
_invalid_syntax("""def foo():
async with a: pass""", version="3.5")
def test_raise_3x_style_1(each_version): def test_raise_3x_style_1(each_version):
_parse("raise", each_version) _parse("raise", each_version)
def test_raise_2x_style_2(works_in_py2): def test_raise_2x_style_2(works_not_in_py):
works_in_py2.parse("raise E, V") works_not_in_py.parse("raise E, V")
def test_raise_2x_style_3(works_not_in_py):
works_not_in_py.parse("raise E, V, T")
def test_raise_2x_style_3(works_in_py2):
works_in_py2.parse("raise E, V, T")
def test_raise_2x_style_invalid_1(each_version): def test_raise_2x_style_invalid_1(each_version):
_invalid_syntax("raise E, V, T, Z", version=each_version) _invalid_syntax("raise E, V, T, Z", version=each_version)
def test_raise_3x_style(works_ge_py3):
works_ge_py3.parse("raise E1 from E2") def test_raise_3x_style(works_in_py):
works_in_py.parse("raise E1 from E2")
def test_raise_3x_style_invalid_1(each_version): def test_raise_3x_style_invalid_1(each_version):
_invalid_syntax("raise E, V from E1", each_version) _invalid_syntax("raise E, V from E1", each_version)
def test_raise_3x_style_invalid_2(each_version): def test_raise_3x_style_invalid_2(each_version):
_invalid_syntax("raise E from E1, E2", each_version) _invalid_syntax("raise E from E1, E2", each_version)
def test_raise_3x_style_invalid_3(each_version): def test_raise_3x_style_invalid_3(each_version):
_invalid_syntax("raise from E1, E2", each_version) _invalid_syntax("raise from E1, E2", each_version)
def test_raise_3x_style_invalid_4(each_version): def test_raise_3x_style_invalid_4(each_version):
_invalid_syntax("raise E from", each_version) _invalid_syntax("raise E from", each_version)
# Adapted from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef # Adapted from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef
def test_annotation_1(works_ge_py3): def test_annotation_1(works_in_py):
works_ge_py3.parse("""def f(x) -> list: pass""") works_in_py.parse("""def f(x) -> list: pass""")
def test_annotation_2(works_ge_py3): def test_annotation_2(works_in_py):
works_ge_py3.parse("""def f(x:int): pass""") works_in_py.parse("""def f(x:int): pass""")
def test_annotation_3(works_ge_py3): def test_annotation_3(works_in_py):
works_ge_py3.parse("""def f(*x:str): pass""") works_in_py.parse("""def f(*x:str): pass""")
def test_annotation_4(works_ge_py3): def test_annotation_4(works_in_py):
works_ge_py3.parse("""def f(**x:float): pass""") works_in_py.parse("""def f(**x:float): pass""")
def test_annotation_5(works_ge_py3): def test_annotation_5(works_in_py):
works_ge_py3.parse("""def f(x, y:1+2): pass""") works_in_py.parse("""def f(x, y:1+2): pass""")
def test_annotation_6(each_py3_version): def test_annotation_6(each_version):
_invalid_syntax("""def f(a, (b:1, c:2, d)): pass""", each_py3_version) _invalid_syntax("""def f(a, (b:1, c:2, d)): pass""", each_version)
def test_annotation_7(each_py3_version): def test_annotation_7(each_version):
_invalid_syntax("""def f(a, (b:1, c:2, d), e:3=4, f=5, *g:6): pass""", each_py3_version) _invalid_syntax("""def f(a, (b:1, c:2, d), e:3=4, f=5, *g:6): pass""", each_version)
def test_annotation_8(each_py3_version): def test_annotation_8(each_version):
s = """def f(a, (b:1, c:2, d), e:3=4, f=5, s = """def f(a, (b:1, c:2, d), e:3=4, f=5,
*g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass""" *g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass"""
_invalid_syntax(s, each_py3_version) _invalid_syntax(s, each_version)
def test_except_new(each_version): def test_except_new(each_version):
@@ -195,27 +199,27 @@ def test_except_new(each_version):
y""") y""")
_parse(s, each_version) _parse(s, each_version)
def test_except_old(works_in_py2): def test_except_old(works_not_in_py):
s = dedent(""" s = dedent("""
try: try:
x x
except E, N: except E, N:
y""") y""")
works_in_py2.parse(s) works_not_in_py.parse(s)
# Adapted from Python 3's Lib/test/test_grammar.py:GrammarTests.testAtoms # Adapted from Python 3's Lib/test/test_grammar.py:GrammarTests.testAtoms
def test_set_literal_1(works_ge_py27): def test_set_literal_1(works_in_py):
works_ge_py27.parse("""x = {'one'}""") works_in_py.parse("""x = {'one'}""")
def test_set_literal_2(works_ge_py27): def test_set_literal_2(works_in_py):
works_ge_py27.parse("""x = {'one', 1,}""") works_in_py.parse("""x = {'one', 1,}""")
def test_set_literal_3(works_ge_py27): def test_set_literal_3(works_in_py):
works_ge_py27.parse("""x = {'one', 'two', 'three'}""") works_in_py.parse("""x = {'one', 'two', 'three'}""")
def test_set_literal_4(works_ge_py27): def test_set_literal_4(works_in_py):
works_ge_py27.parse("""x = {2, 3, 4,}""") works_in_py.parse("""x = {2, 3, 4,}""")
def test_new_octal_notation(each_version): def test_new_octal_notation(each_version):
@@ -223,21 +227,21 @@ def test_new_octal_notation(each_version):
_invalid_syntax("""0o7324528887""", each_version) _invalid_syntax("""0o7324528887""", each_version)
def test_old_octal_notation(works_in_py2): def test_old_octal_notation(works_not_in_py):
works_in_py2.parse("07") works_not_in_py.parse("07")
def test_long_notation(works_in_py2): def test_long_notation(works_not_in_py):
works_in_py2.parse("0xFl") works_not_in_py.parse("0xFl")
works_in_py2.parse("0xFL") works_not_in_py.parse("0xFL")
works_in_py2.parse("0b1l") works_not_in_py.parse("0b1l")
works_in_py2.parse("0B1L") works_not_in_py.parse("0B1L")
works_in_py2.parse("0o7l") works_not_in_py.parse("0o7l")
works_in_py2.parse("0O7L") works_not_in_py.parse("0O7L")
works_in_py2.parse("0l") works_not_in_py.parse("0l")
works_in_py2.parse("0L") works_not_in_py.parse("0L")
works_in_py2.parse("10l") works_not_in_py.parse("10l")
works_in_py2.parse("10L") works_not_in_py.parse("10L")
def test_new_binary_notation(each_version): def test_new_binary_notation(each_version):
@@ -245,20 +249,20 @@ def test_new_binary_notation(each_version):
_invalid_syntax("""0b0101021""", each_version) _invalid_syntax("""0b0101021""", each_version)
def test_class_new_syntax(works_ge_py3): def test_class_new_syntax(works_in_py):
works_ge_py3.parse("class B(t=7): pass") works_in_py.parse("class B(t=7): pass")
works_ge_py3.parse("class B(t, *args): pass") works_in_py.parse("class B(t, *args): pass")
works_ge_py3.parse("class B(t, **kwargs): pass") works_in_py.parse("class B(t, **kwargs): pass")
works_ge_py3.parse("class B(t, *args, **kwargs): pass") works_in_py.parse("class B(t, *args, **kwargs): pass")
works_ge_py3.parse("class B(t, y=9, *args, **kwargs): pass") works_in_py.parse("class B(t, y=9, *args, **kwargs): pass")
def test_parser_idempotency_extended_unpacking(works_ge_py3): def test_parser_idempotency_extended_unpacking(works_in_py):
"""A cut-down version of pytree_idempotency.py.""" """A cut-down version of pytree_idempotency.py."""
works_ge_py3.parse("a, *b, c = x\n") works_in_py.parse("a, *b, c = x\n")
works_ge_py3.parse("[*a, b] = x\n") works_in_py.parse("[*a, b] = x\n")
works_ge_py3.parse("(z, *y, w) = m\n") works_in_py.parse("(z, *y, w) = m\n")
works_ge_py3.parse("for *z, m in d: pass\n") works_in_py.parse("for *z, m in d: pass\n")
def test_multiline_bytes_literals(each_version): def test_multiline_bytes_literals(each_version):
@@ -285,13 +289,13 @@ def test_multiline_bytes_tripquote_literals(each_version):
_parse(s, each_version) _parse(s, each_version)
def test_ellipsis(works_ge_py3, each_version): def test_ellipsis(works_in_py, each_version):
works_ge_py3.parse("...") works_in_py.parse("...")
_parse("[0][...]", version=each_version) _parse("[0][...]", version=each_version)
def test_dict_unpacking(works_ge_py35): def test_dict_unpacking(works_in_py):
works_ge_py35.parse("{**dict(a=3), foo:2}") works_in_py.parse("{**dict(a=3), foo:2}")
def test_multiline_str_literals(each_version): def test_multiline_str_literals(each_version):
@@ -304,24 +308,24 @@ def test_multiline_str_literals(each_version):
_parse(s, each_version) _parse(s, each_version)
def test_py2_backticks(works_in_py2): def test_py2_backticks(works_not_in_py):
works_in_py2.parse("`1`") works_not_in_py.parse("`1`")
def test_py2_string_prefixes(works_in_py2): def test_py2_string_prefixes(works_not_in_py):
works_in_py2.parse("ur'1'") works_not_in_py.parse("ur'1'")
works_in_py2.parse("Ur'1'") works_not_in_py.parse("Ur'1'")
works_in_py2.parse("UR'1'") works_not_in_py.parse("UR'1'")
_invalid_syntax("ru'1'", works_in_py2.version) _invalid_syntax("ru'1'", works_not_in_py.version)
def py_br(each_version): def py_br(each_version):
_parse('br""', each_version) _parse('br""', each_version)
def test_py3_rb(works_ge_py3): def test_py3_rb(works_in_py):
works_ge_py3.parse("rb'1'") works_in_py.parse("rb'1'")
works_ge_py3.parse("RB'1'") works_in_py.parse("RB'1'")
def test_left_recursion(): def test_left_recursion():

View File

@@ -411,8 +411,8 @@ def test_backslash():
]), ]),
] ]
) )
def test_fstring_token_types(code, types, version_ge_py36): def test_fstring_token_types(code, types, each_version):
actual_types = [t.type for t in _get_token_list(code, version_ge_py36)] actual_types = [t.type for t in _get_token_list(code, each_version)]
assert types + [ENDMARKER] == actual_types assert types + [ENDMARKER] == actual_types