mirror of
https://github.com/davidhalter/parso.git
synced 2025-12-07 05:14:29 +08:00
Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e6bc924fba | ||
|
|
59605438e9 | ||
|
|
e7f71a3eba | ||
|
|
3f7aad84f9 | ||
|
|
52e3db4834 | ||
|
|
0daf4d9068 | ||
|
|
29b6232541 | ||
|
|
e05d7fd59f | ||
|
|
7f964c26f2 |
@@ -3,7 +3,12 @@
|
||||
Changelog
|
||||
---------
|
||||
|
||||
0.3.0 (2018-07-30)
|
||||
0.3.1 (2018-07-09)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Bugfixes in the diff parser and keyword-only arguments
|
||||
|
||||
0.3.0 (2018-06-30)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Rewrote the pgen2 parser generator.
|
||||
|
||||
@@ -144,7 +144,7 @@ html_sidebars = {
|
||||
#'relations.html',
|
||||
'ghbuttons.html',
|
||||
#'sourcelink.html',
|
||||
#'searchbox.html'
|
||||
'searchbox.html'
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ from parso.grammar import Grammar, load_grammar
|
||||
from parso.utils import split_lines, python_bytes_to_unicode
|
||||
|
||||
|
||||
__version__ = '0.3.0'
|
||||
__version__ = '0.3.1'
|
||||
|
||||
|
||||
def parse(code=None, **kwargs):
|
||||
|
||||
@@ -147,7 +147,7 @@ class BaseParser(object):
|
||||
raise NotImplementedError("Error Recovery is not implemented")
|
||||
else:
|
||||
type_, value, start_pos, prefix = token
|
||||
error_leaf = tree.ErrorLeaf('TODO %s' % type_, value, start_pos, prefix)
|
||||
error_leaf = tree.ErrorLeaf(type_, value, start_pos, prefix)
|
||||
raise ParserSyntaxError('SyntaxError: invalid syntax', error_leaf)
|
||||
|
||||
def convert_node(self, nonterminal, children):
|
||||
|
||||
@@ -286,7 +286,6 @@ def _make_transition(token_namespace, reserved_syntax_strings, label):
|
||||
# Either a keyword or an operator
|
||||
assert label[0] in ('"', "'"), label
|
||||
assert not label.startswith('"""') and not label.startswith("'''")
|
||||
# TODO use literal_eval instead of a simple eval.
|
||||
value = literal_eval(label)
|
||||
try:
|
||||
return reserved_syntax_strings[value]
|
||||
|
||||
@@ -47,6 +47,14 @@ def _flows_finished(pgen_grammar, stack):
|
||||
return True
|
||||
|
||||
|
||||
def _func_or_class_has_suite(node):
|
||||
if node.type == 'decorated':
|
||||
node = node.children[-1]
|
||||
if node.type in ('async_funcdef', 'async_stmt'):
|
||||
node = node.children[-1]
|
||||
return node.type in ('classdef', 'funcdef') and node.children[-1].type == 'suite'
|
||||
|
||||
|
||||
def suite_or_file_input_is_valid(pgen_grammar, stack):
|
||||
if not _flows_finished(pgen_grammar, stack):
|
||||
return False
|
||||
@@ -511,7 +519,7 @@ class _NodesStack(object):
|
||||
# binary search.
|
||||
if _get_last_line(node) > until_line:
|
||||
# We can split up functions and classes later.
|
||||
if node.type in ('classdef', 'funcdef') and node.children[-1].type == 'suite':
|
||||
if _func_or_class_has_suite(node):
|
||||
new_nodes.append(node)
|
||||
break
|
||||
|
||||
@@ -522,23 +530,25 @@ class _NodesStack(object):
|
||||
|
||||
last_node = new_nodes[-1]
|
||||
line_offset_index = -1
|
||||
if last_node.type in ('classdef', 'funcdef'):
|
||||
suite = last_node.children[-1]
|
||||
if suite.type == 'suite':
|
||||
suite_tos = _NodesStackNode(suite)
|
||||
# Don't need to pass line_offset here, it's already done by the
|
||||
# parent.
|
||||
suite_nodes, recursive_tos = self._copy_nodes(
|
||||
suite_tos, suite.children, until_line, line_offset)
|
||||
if len(suite_nodes) < 2:
|
||||
# A suite only with newline is not valid.
|
||||
new_nodes.pop()
|
||||
else:
|
||||
suite_tos.parent = tos
|
||||
new_tos = recursive_tos
|
||||
line_offset_index = -2
|
||||
if _func_or_class_has_suite(last_node):
|
||||
suite = last_node
|
||||
while suite.type != 'suite':
|
||||
suite = suite.children[-1]
|
||||
|
||||
elif (new_nodes[-1].type in ('error_leaf', 'error_node') or
|
||||
suite_tos = _NodesStackNode(suite)
|
||||
# Don't need to pass line_offset here, it's already done by the
|
||||
# parent.
|
||||
suite_nodes, recursive_tos = self._copy_nodes(
|
||||
suite_tos, suite.children, until_line, line_offset)
|
||||
if len(suite_nodes) < 2:
|
||||
# A suite only with newline is not valid.
|
||||
new_nodes.pop()
|
||||
else:
|
||||
suite_tos.parent = tos
|
||||
new_tos = recursive_tos
|
||||
line_offset_index = -2
|
||||
|
||||
elif (last_node.type in ('error_leaf', 'error_node') or
|
||||
_is_flow_node(new_nodes[-1])):
|
||||
# Error leafs/nodes don't have a defined start/end. Error
|
||||
# nodes might not end with a newline (e.g. if there's an
|
||||
@@ -570,7 +580,7 @@ class _NodesStack(object):
|
||||
self._tos = _NodesStackNode(tree_node, self._tos)
|
||||
self._tos.add(list(tree_node.children))
|
||||
self._update_tos(tree_node.children[-1])
|
||||
elif tree_node.type in ('classdef', 'funcdef'):
|
||||
elif _func_or_class_has_suite(tree_node):
|
||||
self._update_tos(tree_node.children[-1])
|
||||
|
||||
def close(self):
|
||||
|
||||
@@ -448,16 +448,15 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
|
||||
|
||||
pseudomatch = pseudo_token.match(line, pos)
|
||||
if not pseudomatch: # scan for tokens
|
||||
if line.endswith('\n'):
|
||||
new_line = True
|
||||
match = whitespace.match(line, pos)
|
||||
pos = match.end()
|
||||
yield PythonToken(
|
||||
ERRORTOKEN, line[pos:], (lnum, pos),
|
||||
ERRORTOKEN, line[pos], (lnum, pos),
|
||||
additional_prefix + match.group(0)
|
||||
)
|
||||
additional_prefix = ''
|
||||
break
|
||||
pos += 1
|
||||
continue
|
||||
|
||||
prefix = additional_prefix + pseudomatch.group(1)
|
||||
additional_prefix = ''
|
||||
|
||||
@@ -537,7 +537,9 @@ def _create_params(parent, argslist_list):
|
||||
if child is None or child == ',':
|
||||
param_children = children[start:end]
|
||||
if param_children: # Could as well be comma and then end.
|
||||
if param_children[0] == '*' and param_children[1] == ',' \
|
||||
if param_children[0] == '*' \
|
||||
and (len(param_children) == 1
|
||||
or param_children[1] == ',') \
|
||||
or check_python2_nested_param(param_children[0]):
|
||||
for p in param_children:
|
||||
p.parent = parent
|
||||
|
||||
@@ -507,3 +507,22 @@ def test_endmarker_newline(differ):
|
||||
def test_newlines_at_end(differ):
|
||||
differ.initialize('a\n\n')
|
||||
differ.parse('a\n', copies=1)
|
||||
|
||||
|
||||
def test_end_newline_with_decorator(differ):
|
||||
code = dedent('''\
|
||||
@staticmethod
|
||||
def spam():
|
||||
import json
|
||||
json.l''')
|
||||
|
||||
differ.initialize(code)
|
||||
module = differ.parse(code + '\n', copies=1)
|
||||
decorated, endmarker = module.children
|
||||
assert decorated.type == 'decorated'
|
||||
decorator, func = decorated.children
|
||||
suite = func.children[-1]
|
||||
assert suite.type == 'suite'
|
||||
newline, first_stmt, second_stmt = suite.children
|
||||
assert first_stmt.get_code() == ' import json\n'
|
||||
assert second_stmt.get_code() == ' json.l\n'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from parso import parse
|
||||
from parso import parse, load_grammar
|
||||
|
||||
|
||||
def test_with_stmt():
|
||||
@@ -59,3 +59,27 @@ def test_if_stmt():
|
||||
assert in_else_stmt.type == 'error_node'
|
||||
assert in_else_stmt.children[0].value == 'g'
|
||||
assert in_else_stmt.children[1].value == '('
|
||||
|
||||
|
||||
def test_invalid_token():
|
||||
module = parse('a + ? + b')
|
||||
error_node, q, plus_b, endmarker = module.children
|
||||
assert error_node.get_code() == 'a +'
|
||||
assert q.value == '?'
|
||||
assert q.type == 'error_leaf'
|
||||
assert plus_b.type == 'factor'
|
||||
assert plus_b.get_code() == ' + b'
|
||||
|
||||
|
||||
def test_invalid_token_in_fstr():
|
||||
module = load_grammar(version='3.6').parse('f"{a + ? + b}"')
|
||||
error_node, q, plus_b, error1, error2, endmarker = module.children
|
||||
assert error_node.get_code() == 'f"{a +'
|
||||
assert q.value == '?'
|
||||
assert q.type == 'error_leaf'
|
||||
assert plus_b.type == 'error_node'
|
||||
assert plus_b.get_code() == ' + b'
|
||||
assert error1.value == '}'
|
||||
assert error1.type == 'error_leaf'
|
||||
assert error2.value == '"'
|
||||
assert error2.type == 'error_leaf'
|
||||
|
||||
@@ -32,3 +32,16 @@ def test_split_params_with_stars():
|
||||
assert_params(u'x, *args', x=None, args=None)
|
||||
assert_params(u'**kwargs', kwargs=None)
|
||||
assert_params(u'*args, **kwargs', args=None, kwargs=None)
|
||||
|
||||
|
||||
def test_kw_only_no_kw(works_ge_py3):
|
||||
"""
|
||||
Parsing this should be working. In CPython the parser also parses this and
|
||||
in a later step the AST complains.
|
||||
"""
|
||||
module = works_ge_py3.parse('def test(arg, *):\n pass')
|
||||
if module is not None:
|
||||
func = module.children[0]
|
||||
open_, p1, asterisk, close = func._get_param_nodes()
|
||||
assert p1.get_code('arg,')
|
||||
assert asterisk.value == '*'
|
||||
|
||||
@@ -199,10 +199,9 @@ def test_ur_literals():
|
||||
def test_error_literal():
|
||||
error_token, endmarker = _get_token_list('"\n')
|
||||
assert error_token.type == ERRORTOKEN
|
||||
assert endmarker.prefix == ''
|
||||
assert error_token.string == '"\n'
|
||||
assert error_token.string == '"'
|
||||
assert endmarker.type == ENDMARKER
|
||||
assert endmarker.prefix == ''
|
||||
assert endmarker.prefix == '\n'
|
||||
|
||||
bracket, error_token, endmarker = _get_token_list('( """')
|
||||
assert error_token.type == ERRORTOKEN
|
||||
@@ -244,5 +243,6 @@ def test_error_string():
|
||||
t1, endmarker = _get_token_list(' "\n')
|
||||
assert t1.type == ERRORTOKEN
|
||||
assert t1.prefix == ' '
|
||||
assert t1.string == '"\n'
|
||||
assert t1.string == '"'
|
||||
assert endmarker.prefix == '\n'
|
||||
assert endmarker.string == ''
|
||||
|
||||
2
tox.ini
2
tox.ini
@@ -2,6 +2,7 @@
|
||||
envlist = py27, py33, py34, py35, py36, py37
|
||||
[testenv]
|
||||
deps =
|
||||
{env:_SETUPTOOLS_DEP:setuptools}
|
||||
{env:_PARSO_TEST_PYTEST_DEP:pytest>=3.0.7}
|
||||
# For --lf and --ff.
|
||||
pytest-cache
|
||||
@@ -10,6 +11,7 @@ setenv =
|
||||
# tox corrupts __pycache__, solution from here:
|
||||
PYTHONDONTWRITEBYTECODE=1
|
||||
py26,py33: _PARSO_TEST_PYTEST_DEP=pytest>=3.0.7,<3.3
|
||||
py26,py33: _SETUPTOOLS_DEP=setuptools<37
|
||||
commands =
|
||||
pytest {posargs:parso test}
|
||||
[testenv:cov]
|
||||
|
||||
Reference in New Issue
Block a user