mirror of
https://github.com/davidhalter/parso.git
synced 2025-12-07 05:14:29 +08:00
Compare commits
17 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8a06f0da05 | ||
|
|
bd95989c2e | ||
|
|
57e91262cd | ||
|
|
476383cca9 | ||
|
|
b2ab64d8f9 | ||
|
|
18cbeb1a3d | ||
|
|
a5686d6cda | ||
|
|
dfe7fba08e | ||
|
|
6db7f40942 | ||
|
|
d5eb96309c | ||
|
|
4c65368056 | ||
|
|
3e2956264c | ||
|
|
e77a67cd36 | ||
|
|
c4d6de2aab | ||
|
|
7770e73609 | ||
|
|
acccb4f28d | ||
|
|
3f6fc8a5ad |
@@ -1,4 +1,5 @@
|
||||
[run]
|
||||
source = parso
|
||||
|
||||
[report]
|
||||
# Regexes for lines to exclude from consideration
|
||||
|
||||
20
.travis.yml
20
.travis.yml
@@ -1,25 +1,25 @@
|
||||
dist: xenial
|
||||
language: python
|
||||
sudo: false
|
||||
python:
|
||||
- 2.6
|
||||
- 2.7
|
||||
- 3.4
|
||||
- 3.5
|
||||
- 3.6
|
||||
- pypy
|
||||
- 3.7
|
||||
- 3.8-dev
|
||||
- pypy2.7-6.0
|
||||
- pypy3.5-6.0
|
||||
matrix:
|
||||
include:
|
||||
- { python: "3.7", dist: xenial, sudo: true }
|
||||
- python: 3.5
|
||||
env: TOXENV=cov
|
||||
allow_failures:
|
||||
- env: TOXENV=cov
|
||||
env: TOXENV=py35-coverage
|
||||
install:
|
||||
- pip install --quiet tox-travis
|
||||
script:
|
||||
- tox
|
||||
after_script:
|
||||
- if [ $TOXENV == "cov" ]; then
|
||||
pip install --quiet coveralls;
|
||||
coveralls;
|
||||
- |
|
||||
if [ "${TOXENV%-coverage}" == "$TOXENV" ]; then
|
||||
pip install --quiet coveralls;
|
||||
coveralls;
|
||||
fi
|
||||
|
||||
@@ -3,6 +3,17 @@
|
||||
Changelog
|
||||
---------
|
||||
|
||||
0.4.0 (2019-04-05)
|
||||
++++++++++++++++++
|
||||
|
||||
- Python 3.8 support
|
||||
- FileIO support, it's now possible to use abstract file IO, support is alpha
|
||||
|
||||
0.3.4 (2018-02-13)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Fix an f-string tokenizer error
|
||||
|
||||
0.3.3 (2018-02-06)
|
||||
+++++++++++++++++++
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ from parso.grammar import Grammar, load_grammar
|
||||
from parso.utils import split_lines, python_bytes_to_unicode
|
||||
|
||||
|
||||
__version__ = '0.3.3'
|
||||
__version__ = '0.4.0'
|
||||
|
||||
|
||||
def parse(code=None, **kwargs):
|
||||
|
||||
@@ -18,7 +18,7 @@ from parso._compatibility import FileNotFoundError
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
_PICKLE_VERSION = 30
|
||||
_PICKLE_VERSION = 31
|
||||
"""
|
||||
Version number (integer) for file system cache.
|
||||
|
||||
@@ -45,6 +45,7 @@ we generate something similar. See:
|
||||
http://docs.python.org/3/library/sys.html#sys.implementation
|
||||
"""
|
||||
|
||||
|
||||
def _get_default_cache_path():
|
||||
if platform.system().lower() == 'windows':
|
||||
dir_ = os.path.join(os.getenv('LOCALAPPDATA') or '~', 'Parso', 'Parso')
|
||||
@@ -54,6 +55,7 @@ def _get_default_cache_path():
|
||||
dir_ = os.path.join(os.getenv('XDG_CACHE_HOME') or '~/.cache', 'parso')
|
||||
return os.path.expanduser(dir_)
|
||||
|
||||
|
||||
_default_cache_path = _get_default_cache_path()
|
||||
"""
|
||||
The path where the cache is stored.
|
||||
@@ -76,21 +78,26 @@ class _NodeCacheItem(object):
|
||||
self.change_time = change_time
|
||||
|
||||
|
||||
def load_module(hashed_grammar, path, cache_path=None):
|
||||
def load_module(hashed_grammar, file_io, cache_path=None):
|
||||
"""
|
||||
Returns a module or None, if it fails.
|
||||
"""
|
||||
try:
|
||||
p_time = os.path.getmtime(path)
|
||||
p_time = file_io.get_last_modified()
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
|
||||
try:
|
||||
module_cache_item = parser_cache[hashed_grammar][path]
|
||||
module_cache_item = parser_cache[hashed_grammar][file_io.path]
|
||||
if p_time <= module_cache_item.change_time:
|
||||
return module_cache_item.node
|
||||
except KeyError:
|
||||
return _load_from_file_system(hashed_grammar, path, p_time, cache_path=cache_path)
|
||||
return _load_from_file_system(
|
||||
hashed_grammar,
|
||||
file_io.path,
|
||||
p_time,
|
||||
cache_path=cache_path
|
||||
)
|
||||
|
||||
|
||||
def _load_from_file_system(hashed_grammar, path, p_time, cache_path=None):
|
||||
@@ -121,9 +128,10 @@ def _load_from_file_system(hashed_grammar, path, p_time, cache_path=None):
|
||||
return module_cache_item.node
|
||||
|
||||
|
||||
def save_module(hashed_grammar, path, module, lines, pickling=True, cache_path=None):
|
||||
def save_module(hashed_grammar, file_io, module, lines, pickling=True, cache_path=None):
|
||||
path = file_io.path
|
||||
try:
|
||||
p_time = None if path is None else os.path.getmtime(path)
|
||||
p_time = None if path is None else file_io.get_last_modified()
|
||||
except OSError:
|
||||
p_time = None
|
||||
pickling = False
|
||||
|
||||
32
parso/file_io.py
Normal file
32
parso/file_io.py
Normal file
@@ -0,0 +1,32 @@
|
||||
import os
|
||||
|
||||
|
||||
class FileIO(object):
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
def read(self): # Returns bytes/str
|
||||
# We would like to read unicode here, but we cannot, because we are not
|
||||
# sure if it is a valid unicode file. Therefore just read whatever is
|
||||
# here.
|
||||
with open(self.path, 'rb') as f:
|
||||
return f.read()
|
||||
|
||||
def get_last_modified(self):
|
||||
"""
|
||||
Returns float - timestamp
|
||||
Might raise FileNotFoundError
|
||||
"""
|
||||
return os.path.getmtime(self.path)
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s)' % (self.__class__.__name__, self.path)
|
||||
|
||||
|
||||
class KnownContentFileIO(FileIO):
|
||||
def __init__(self, path, content):
|
||||
super(KnownContentFileIO, self).__init__(path)
|
||||
self._content = content
|
||||
|
||||
def read(self):
|
||||
return self._content
|
||||
@@ -12,6 +12,7 @@ from parso.parser import BaseParser
|
||||
from parso.python.parser import Parser as PythonParser
|
||||
from parso.python.errors import ErrorFinderConfig
|
||||
from parso.python import pep8
|
||||
from parso.file_io import FileIO, KnownContentFileIO
|
||||
|
||||
_loaded_grammars = {}
|
||||
|
||||
@@ -77,14 +78,14 @@ class Grammar(object):
|
||||
|
||||
def _parse(self, code=None, error_recovery=True, path=None,
|
||||
start_symbol=None, cache=False, diff_cache=False,
|
||||
cache_path=None, start_pos=(1, 0)):
|
||||
cache_path=None, file_io=None, start_pos=(1, 0)):
|
||||
"""
|
||||
Wanted python3.5 * operator and keyword only arguments. Therefore just
|
||||
wrap it all.
|
||||
start_pos here is just a parameter internally used. Might be public
|
||||
sometime in the future.
|
||||
"""
|
||||
if code is None and path is None:
|
||||
if code is None and path is None and file_io is None:
|
||||
raise TypeError("Please provide either code or a path.")
|
||||
|
||||
if start_symbol is None:
|
||||
@@ -93,15 +94,19 @@ class Grammar(object):
|
||||
if error_recovery and start_symbol != 'file_input':
|
||||
raise NotImplementedError("This is currently not implemented.")
|
||||
|
||||
if cache and path is not None:
|
||||
module_node = load_module(self._hashed, path, cache_path=cache_path)
|
||||
if file_io is None:
|
||||
if code is None:
|
||||
file_io = FileIO(path)
|
||||
else:
|
||||
file_io = KnownContentFileIO(path, code)
|
||||
|
||||
if cache and file_io.path is not None:
|
||||
module_node = load_module(self._hashed, file_io, cache_path=cache_path)
|
||||
if module_node is not None:
|
||||
return module_node
|
||||
|
||||
if code is None:
|
||||
with open(path, 'rb') as f:
|
||||
code = f.read()
|
||||
|
||||
code = file_io.read()
|
||||
code = python_bytes_to_unicode(code)
|
||||
|
||||
lines = split_lines(code, keepends=True)
|
||||
@@ -110,7 +115,7 @@ class Grammar(object):
|
||||
raise TypeError("You have to define a diff parser to be able "
|
||||
"to use this option.")
|
||||
try:
|
||||
module_cache_item = parser_cache[self._hashed][path]
|
||||
module_cache_item = parser_cache[self._hashed][file_io.path]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
@@ -125,7 +130,7 @@ class Grammar(object):
|
||||
old_lines=old_lines,
|
||||
new_lines=lines
|
||||
)
|
||||
save_module(self._hashed, path, new_node, lines,
|
||||
save_module(self._hashed, file_io, new_node, lines,
|
||||
# Never pickle in pypy, it's slow as hell.
|
||||
pickling=cache and not is_pypy,
|
||||
cache_path=cache_path)
|
||||
@@ -141,7 +146,7 @@ class Grammar(object):
|
||||
root_node = p.parse(tokens=tokens)
|
||||
|
||||
if cache or diff_cache:
|
||||
save_module(self._hashed, path, root_node, lines,
|
||||
save_module(self._hashed, file_io, root_node, lines,
|
||||
# Never pickle in pypy, it's slow as hell.
|
||||
pickling=cache and not is_pypy,
|
||||
cache_path=cache_path)
|
||||
|
||||
@@ -6,7 +6,6 @@ from contextlib import contextmanager
|
||||
|
||||
from parso.normalizer import Normalizer, NormalizerConfig, Issue, Rule
|
||||
from parso.python.tree import search_ancestor
|
||||
from parso.parser import ParserSyntaxError
|
||||
|
||||
_BLOCK_STMTS = ('if_stmt', 'while_stmt', 'for_stmt', 'try_stmt', 'with_stmt')
|
||||
_STAR_EXPR_PARENTS = ('testlist_star_expr', 'testlist_comp', 'exprlist')
|
||||
@@ -17,6 +16,7 @@ ALLOWED_FUTURES = (
|
||||
'all_feature_names', 'nested_scopes', 'generators', 'division',
|
||||
'absolute_import', 'with_statement', 'print_function', 'unicode_literals',
|
||||
)
|
||||
_COMP_FOR_TYPES = ('comp_for', 'sync_comp_for')
|
||||
|
||||
|
||||
def _iter_stmts(scope):
|
||||
@@ -35,12 +35,12 @@ def _iter_stmts(scope):
|
||||
|
||||
def _get_comprehension_type(atom):
|
||||
first, second = atom.children[:2]
|
||||
if second.type == 'testlist_comp' and second.children[1].type == 'comp_for':
|
||||
if second.type == 'testlist_comp' and second.children[1].type in _COMP_FOR_TYPES:
|
||||
if first == '[':
|
||||
return 'list comprehension'
|
||||
else:
|
||||
return 'generator expression'
|
||||
elif second.type == 'dictorsetmaker' and second.children[-1].type == 'comp_for':
|
||||
elif second.type == 'dictorsetmaker' and second.children[-1].type in _COMP_FOR_TYPES:
|
||||
if second.children[1] == ':':
|
||||
return 'dict comprehension'
|
||||
else:
|
||||
@@ -107,6 +107,7 @@ def _iter_definition_exprs_from_lists(exprlist):
|
||||
|
||||
yield child
|
||||
|
||||
|
||||
def _get_expr_stmt_definition_exprs(expr_stmt):
|
||||
exprs = []
|
||||
for list_ in expr_stmt.children[:-2:2]:
|
||||
@@ -273,13 +274,12 @@ class ErrorFinder(Normalizer):
|
||||
def visit(self, node):
|
||||
if node.type == 'error_node':
|
||||
with self.visit_node(node):
|
||||
# Don't need to investigate the inners of an error node. We
|
||||
# might find errors in there that should be ignored, because
|
||||
# the error node itself already shows that there's an issue.
|
||||
return ''
|
||||
# Don't need to investigate the inners of an error node. We
|
||||
# might find errors in there that should be ignored, because
|
||||
# the error node itself already shows that there's an issue.
|
||||
return ''
|
||||
return super(ErrorFinder, self).visit(node)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def visit_node(self, node):
|
||||
self._check_type_rules(node)
|
||||
@@ -455,23 +455,19 @@ class _YieldFromCheck(SyntaxRule):
|
||||
|
||||
def is_issue(self, leaf):
|
||||
return leaf.parent.type == 'yield_arg' \
|
||||
and self._normalizer.context.is_async_funcdef()
|
||||
and self._normalizer.context.is_async_funcdef()
|
||||
|
||||
|
||||
@ErrorFinder.register_rule(type='name')
|
||||
class _NameChecks(SyntaxRule):
|
||||
message = 'cannot assign to __debug__'
|
||||
message_keyword = 'assignment to keyword'
|
||||
message_none = 'cannot assign to None'
|
||||
|
||||
def is_issue(self, leaf):
|
||||
self._normalizer.context.add_name(leaf)
|
||||
|
||||
if leaf.value == '__debug__' and leaf.is_definition():
|
||||
if self._normalizer.version < (3, 0):
|
||||
return True
|
||||
else:
|
||||
self.add_issue(leaf, message=self.message_keyword)
|
||||
return True
|
||||
if leaf.value == 'None' and self._normalizer.version < (3, 0) \
|
||||
and leaf.is_definition():
|
||||
self.add_issue(leaf, message=self.message_none)
|
||||
@@ -539,7 +535,7 @@ class _StarStarCheck(SyntaxRule):
|
||||
def is_issue(self, leaf):
|
||||
if leaf.parent.type == 'dictorsetmaker':
|
||||
comp_for = leaf.get_next_sibling().get_next_sibling()
|
||||
return comp_for is not None and comp_for.type == 'comp_for'
|
||||
return comp_for is not None and comp_for.type in _COMP_FOR_TYPES
|
||||
|
||||
|
||||
@ErrorFinder.register_rule(value='yield')
|
||||
@@ -618,7 +614,7 @@ class _FutureImportRule(SyntaxRule):
|
||||
allowed_futures.append('generator_stop')
|
||||
|
||||
if name == 'braces':
|
||||
self.add_issue(node, message = "not a chance")
|
||||
self.add_issue(node, message="not a chance")
|
||||
elif name == 'barry_as_FLUFL':
|
||||
m = "Seriously I'm not implementing this :) ~ Dave"
|
||||
self.add_issue(node, message=m)
|
||||
@@ -638,7 +634,7 @@ class _StarExprRule(SyntaxRule):
|
||||
return True
|
||||
if node.parent.type == 'testlist_comp':
|
||||
# [*[] for a in [1]]
|
||||
if node.parent.children[1].type == 'comp_for':
|
||||
if node.parent.children[1].type in _COMP_FOR_TYPES:
|
||||
self.add_issue(node, message=self.message_iterable_unpacking)
|
||||
if self._normalizer.version <= (3, 4):
|
||||
n = search_ancestor(node, 'for_stmt', 'expr_stmt')
|
||||
@@ -715,8 +711,8 @@ class _AnnotatorRule(SyntaxRule):
|
||||
if not (lhs.type == 'name'
|
||||
# subscript/attributes are allowed
|
||||
or lhs.type in ('atom_expr', 'power')
|
||||
and trailer.type == 'trailer'
|
||||
and trailer.children[0] != '('):
|
||||
and trailer.type == 'trailer'
|
||||
and trailer.children[0] != '('):
|
||||
return True
|
||||
else:
|
||||
# x, y: str
|
||||
@@ -731,10 +727,16 @@ class _ArgumentRule(SyntaxRule):
|
||||
if node.children[1] == '=' and first.type != 'name':
|
||||
if first.type == 'lambdef':
|
||||
# f(lambda: 1=1)
|
||||
message = "lambda cannot contain assignment"
|
||||
if self._normalizer.version < (3, 8):
|
||||
message = "lambda cannot contain assignment"
|
||||
else:
|
||||
message = 'expression cannot contain assignment, perhaps you meant "=="?'
|
||||
else:
|
||||
# f(+x=1)
|
||||
message = "keyword can't be an expression"
|
||||
if self._normalizer.version < (3, 8):
|
||||
message = "keyword can't be an expression"
|
||||
else:
|
||||
message = 'expression cannot contain assignment, perhaps you meant "=="?'
|
||||
self.add_issue(first, message=message)
|
||||
|
||||
|
||||
@@ -758,7 +760,7 @@ class _ArglistRule(SyntaxRule):
|
||||
def is_issue(self, node):
|
||||
first_arg = node.children[0]
|
||||
if first_arg.type == 'argument' \
|
||||
and first_arg.children[1].type == 'comp_for':
|
||||
and first_arg.children[1].type in _COMP_FOR_TYPES:
|
||||
# e.g. foo(x for x in [], b)
|
||||
return len(node.children) >= 2
|
||||
else:
|
||||
@@ -787,7 +789,8 @@ class _ArglistRule(SyntaxRule):
|
||||
if first == '*':
|
||||
if kw_unpacking_only:
|
||||
# foo(**kwargs, *args)
|
||||
message = "iterable argument unpacking follows keyword argument unpacking"
|
||||
message = "iterable argument unpacking " \
|
||||
"follows keyword argument unpacking"
|
||||
self.add_issue(argument, message=message)
|
||||
else:
|
||||
kw_unpacking_only = True
|
||||
@@ -809,6 +812,7 @@ class _ArglistRule(SyntaxRule):
|
||||
message = "positional argument follows keyword argument"
|
||||
self.add_issue(argument, message=message)
|
||||
|
||||
|
||||
@ErrorFinder.register_rule(type='parameters')
|
||||
@ErrorFinder.register_rule(type='lambdef')
|
||||
class _ParameterRule(SyntaxRule):
|
||||
@@ -889,7 +893,13 @@ class _CheckAssignmentRule(SyntaxRule):
|
||||
error = _get_comprehension_type(node)
|
||||
if error is None:
|
||||
if second.type == 'dictorsetmaker':
|
||||
error = 'literal'
|
||||
if self._normalizer.version < (3, 8):
|
||||
error = 'literal'
|
||||
else:
|
||||
if second.children[1] == ':':
|
||||
error = 'dict display'
|
||||
else:
|
||||
error = 'set display'
|
||||
elif first in ('(', '['):
|
||||
if second.type == 'yield_expr':
|
||||
error = 'yield expression'
|
||||
@@ -901,7 +911,10 @@ class _CheckAssignmentRule(SyntaxRule):
|
||||
else: # Everything handled, must be useless brackets.
|
||||
self._check_assignment(second, is_deletion)
|
||||
elif type_ == 'keyword':
|
||||
error = 'keyword'
|
||||
if self._normalizer.version < (3, 8):
|
||||
error = 'keyword'
|
||||
else:
|
||||
error = str(node.value)
|
||||
elif type_ == 'operator':
|
||||
if node.value == '...':
|
||||
error = 'Ellipsis'
|
||||
@@ -929,25 +942,29 @@ class _CheckAssignmentRule(SyntaxRule):
|
||||
elif type_ in ('testlist_star_expr', 'exprlist', 'testlist'):
|
||||
for child in node.children[::2]:
|
||||
self._check_assignment(child, is_deletion)
|
||||
elif ('expr' in type_ and type_ != 'star_expr' # is a substring
|
||||
elif ('expr' in type_ and type_ != 'star_expr' # is a substring
|
||||
or '_test' in type_
|
||||
or type_ in ('term', 'factor')):
|
||||
error = 'operator'
|
||||
|
||||
if error is not None:
|
||||
message = "can't %s %s" % ("delete" if is_deletion else "assign to", error)
|
||||
cannot = "can't" if self._normalizer.version < (3, 8) else "cannot"
|
||||
message = ' '.join([cannot, "delete" if is_deletion else "assign to", error])
|
||||
self.add_issue(node, message=message)
|
||||
|
||||
|
||||
@ErrorFinder.register_rule(type='comp_for')
|
||||
@ErrorFinder.register_rule(type='sync_comp_for')
|
||||
class _CompForRule(_CheckAssignmentRule):
|
||||
message = "asynchronous comprehension outside of an asynchronous function"
|
||||
|
||||
def is_issue(self, node):
|
||||
# Some of the nodes here are already used, so no else if
|
||||
expr_list = node.children[1 + int(node.children[0] == 'async')]
|
||||
if expr_list.type != 'expr_list': # Already handled.
|
||||
self._check_assignment(expr_list)
|
||||
if node.type != 'comp_for' or self._normalizer.version < (3, 8):
|
||||
# comp_for was replaced by sync_comp_for in Python 3.8.
|
||||
expr_list = node.children[1 + int(node.children[0] == 'async')]
|
||||
if expr_list.type != 'expr_list': # Already handled.
|
||||
self._check_assignment(expr_list)
|
||||
|
||||
return node.children[0] == 'async' \
|
||||
and not self._normalizer.context.is_async_funcdef()
|
||||
|
||||
@@ -419,8 +419,6 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
|
||||
tos = fstring_stack[-1]
|
||||
if not tos.is_in_expr():
|
||||
string, pos = _find_fstring_string(endpats, fstring_stack, line, lnum, pos)
|
||||
if pos == max:
|
||||
break
|
||||
if string:
|
||||
yield PythonToken(
|
||||
FSTRING_STRING, string,
|
||||
@@ -431,6 +429,8 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
|
||||
)
|
||||
tos.previous_lines = ''
|
||||
continue
|
||||
if pos == max:
|
||||
break
|
||||
|
||||
rest = line[pos:]
|
||||
fstring_end_token, additional_prefix, quote_length = _close_fstring_if_necessary(
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
[pytest]
|
||||
addopts = --doctest-modules
|
||||
|
||||
testpaths = parso test
|
||||
|
||||
# Ignore broken files inblackbox test directories
|
||||
norecursedirs = .* docs scripts normalizer_issue_files build
|
||||
|
||||
|
||||
10
setup.cfg
10
setup.cfg
@@ -1,2 +1,12 @@
|
||||
[bdist_wheel]
|
||||
universal=1
|
||||
|
||||
[flake8]
|
||||
max-line-length = 100
|
||||
ignore =
|
||||
# do not use bare 'except'
|
||||
E722,
|
||||
# don't know why this was ever even an option, 1+1 should be possible.
|
||||
E226,
|
||||
# line break before binary operator
|
||||
W503,
|
||||
|
||||
@@ -19,14 +19,6 @@ def build_nested(code, depth, base='def f():\n'):
|
||||
FAILING_EXAMPLES = [
|
||||
'1 +',
|
||||
'?',
|
||||
# Python/compile.c
|
||||
dedent('''\
|
||||
for a in [1]:
|
||||
try:
|
||||
pass
|
||||
finally:
|
||||
continue
|
||||
'''), # 'continue' not supported inside 'finally' clause"
|
||||
'continue',
|
||||
'break',
|
||||
'return',
|
||||
@@ -259,10 +251,6 @@ GLOBAL_NONLOCAL_ERROR = [
|
||||
|
||||
if sys.version_info >= (3, 6):
|
||||
FAILING_EXAMPLES += GLOBAL_NONLOCAL_ERROR
|
||||
FAILING_EXAMPLES += [
|
||||
# Raises multiple errors in previous versions.
|
||||
'async def foo():\n def nofoo():[x async for x in []]',
|
||||
]
|
||||
if sys.version_info >= (3, 5):
|
||||
FAILING_EXAMPLES += [
|
||||
# Raises different errors so just ignore them for now.
|
||||
@@ -319,3 +307,15 @@ if sys.version_info[:2] <= (3, 4):
|
||||
'a = *[1], 2',
|
||||
'(*[1], 2)',
|
||||
]
|
||||
|
||||
if sys.version_info[:2] < (3, 8):
|
||||
FAILING_EXAMPLES += [
|
||||
# Python/compile.c
|
||||
dedent('''\
|
||||
for a in [1]:
|
||||
try:
|
||||
pass
|
||||
finally:
|
||||
continue
|
||||
'''), # 'continue' not supported inside 'finally' clause"
|
||||
]
|
||||
|
||||
@@ -10,6 +10,7 @@ from parso.cache import _NodeCacheItem, save_module, load_module, \
|
||||
_get_hashed_path, parser_cache, _load_from_file_system, _save_to_file_system
|
||||
from parso import load_grammar
|
||||
from parso import cache
|
||||
from parso import file_io
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@@ -76,12 +77,13 @@ def test_modulepickling_simulate_deleted_cache(tmpdir):
|
||||
path = tmpdir.dirname + '/some_path'
|
||||
with open(path, 'w'):
|
||||
pass
|
||||
io = file_io.FileIO(path)
|
||||
|
||||
save_module(grammar._hashed, path, module, [])
|
||||
assert load_module(grammar._hashed, path) == module
|
||||
save_module(grammar._hashed, io, module, lines=[])
|
||||
assert load_module(grammar._hashed, io) == module
|
||||
|
||||
unlink(_get_hashed_path(grammar._hashed, path))
|
||||
parser_cache.clear()
|
||||
|
||||
cached2 = load_module(grammar._hashed, path)
|
||||
cached2 = load_module(grammar._hashed, io)
|
||||
assert cached2 is None
|
||||
|
||||
@@ -79,11 +79,17 @@ def test_tokenize_start_pos(code, positions):
|
||||
assert positions == [p.start_pos for p in tokens]
|
||||
|
||||
|
||||
def test_roundtrip(grammar):
|
||||
code = dedent("""\
|
||||
f'''s{
|
||||
str.uppe
|
||||
'''
|
||||
""")
|
||||
@pytest.mark.parametrize(
|
||||
'code', [
|
||||
dedent("""\
|
||||
f'''s{
|
||||
str.uppe
|
||||
'''
|
||||
"""),
|
||||
'f"foo',
|
||||
'f"""foo',
|
||||
]
|
||||
)
|
||||
def test_roundtrip(grammar, code):
|
||||
tree = grammar.parse(code)
|
||||
assert tree.get_code() == code
|
||||
|
||||
@@ -41,6 +41,29 @@ def test_python_exception_matches(code):
|
||||
assert line_nr is None or line_nr == error.start_pos[0]
|
||||
|
||||
|
||||
def test_non_async_in_async():
|
||||
"""
|
||||
This example doesn't work with FAILING_EXAMPLES, because the line numbers
|
||||
are not always the same / incorrect in Python 3.8.
|
||||
"""
|
||||
if sys.version_info[:2] < (3, 5):
|
||||
pytest.skip()
|
||||
|
||||
# Raises multiple errors in previous versions.
|
||||
code = 'async def foo():\n def nofoo():[x async for x in []]'
|
||||
wanted, line_nr = _get_actual_exception(code)
|
||||
|
||||
errors = _get_error_list(code)
|
||||
if errors:
|
||||
error, = errors
|
||||
actual = error.message
|
||||
assert actual in wanted
|
||||
if sys.version_info[:2] < (3, 8):
|
||||
assert line_nr == error.start_pos[0]
|
||||
else:
|
||||
assert line_nr == 0 # For whatever reason this is zero in Python 3.8+
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
('code', 'positions'), [
|
||||
('1 +', [(1, 3)]),
|
||||
@@ -103,7 +126,8 @@ def _get_actual_exception(code):
|
||||
# The python 3.5+ way, a bit nicer.
|
||||
wanted = 'SyntaxError: positional argument follows keyword argument'
|
||||
elif wanted == 'SyntaxError: assignment to keyword':
|
||||
return [wanted, "SyntaxError: can't assign to keyword"], line_nr
|
||||
return [wanted, "SyntaxError: can't assign to keyword",
|
||||
'SyntaxError: cannot assign to __debug__'], line_nr
|
||||
elif wanted == 'SyntaxError: assignment to None':
|
||||
# Python 2.6 does has a slightly different error.
|
||||
wanted = 'SyntaxError: cannot assign to None'
|
||||
|
||||
13
tox.ini
13
tox.ini
@@ -1,19 +1,16 @@
|
||||
[tox]
|
||||
envlist = py27, py33, py34, py35, py36, py37, pypy
|
||||
envlist = {py26,py27,py33,py34,py35,py36,py37}
|
||||
[testenv]
|
||||
extras = testing
|
||||
deps =
|
||||
py26,py33: pytest>=3.0.7,<3.3
|
||||
py26,py33: setuptools<37
|
||||
coverage: coverage
|
||||
setenv =
|
||||
# https://github.com/tomchristie/django-rest-framework/issues/1957
|
||||
# tox corrupts __pycache__, solution from here:
|
||||
PYTHONDONTWRITEBYTECODE=1
|
||||
coverage: TOX_TESTENV_COMMAND=coverage run -m pytest
|
||||
commands =
|
||||
pytest {posargs:parso test}
|
||||
[testenv:cov]
|
||||
deps =
|
||||
coverage
|
||||
commands =
|
||||
coverage run --source parso -m pytest
|
||||
coverage report
|
||||
{env:TOX_TESTENV_COMMAND:pytest} {posargs}
|
||||
coverage: coverage report
|
||||
|
||||
Reference in New Issue
Block a user