mirror of
https://github.com/davidhalter/parso.git
synced 2025-12-15 17:07:13 +08:00
Don't use grammar as an argument anymore, because it's already there
This commit is contained in:
@@ -99,7 +99,6 @@ def _token_to_transition(grammar, type_, value):
|
|||||||
return type_
|
return type_
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class BaseParser(object):
|
class BaseParser(object):
|
||||||
"""Parser engine.
|
"""Parser engine.
|
||||||
|
|
||||||
@@ -129,7 +128,7 @@ class BaseParser(object):
|
|||||||
self.stack = Stack([StackNode(first_dfa)])
|
self.stack = Stack([StackNode(first_dfa)])
|
||||||
|
|
||||||
for type_, value, start_pos, prefix in tokens:
|
for type_, value, start_pos, prefix in tokens:
|
||||||
self.add_token(type_, value, start_pos, prefix)
|
self._add_token(type_, value, start_pos, prefix)
|
||||||
|
|
||||||
while self.stack and self.stack[-1].dfa.is_final:
|
while self.stack and self.stack[-1].dfa.is_final:
|
||||||
self._pop()
|
self._pop()
|
||||||
@@ -141,27 +140,26 @@ class BaseParser(object):
|
|||||||
raise InternalParseError("incomplete input", type_, value, start_pos)
|
raise InternalParseError("incomplete input", type_, value, start_pos)
|
||||||
return self.rootnode
|
return self.rootnode
|
||||||
|
|
||||||
def error_recovery(self, pgen_grammar, stack, typ, value, start_pos, prefix,
|
def error_recovery(self, typ, value, start_pos, prefix):
|
||||||
add_token_callback):
|
|
||||||
if self._error_recovery:
|
if self._error_recovery:
|
||||||
raise NotImplementedError("Error Recovery is not implemented")
|
raise NotImplementedError("Error Recovery is not implemented")
|
||||||
else:
|
else:
|
||||||
error_leaf = tree.ErrorLeaf('TODO %s' % typ, value, start_pos, prefix)
|
error_leaf = tree.ErrorLeaf('TODO %s' % typ, value, start_pos, prefix)
|
||||||
raise ParserSyntaxError('SyntaxError: invalid syntax', error_leaf)
|
raise ParserSyntaxError('SyntaxError: invalid syntax', error_leaf)
|
||||||
|
|
||||||
def convert_node(self, pgen_grammar, nonterminal, children):
|
def convert_node(self, nonterminal, children):
|
||||||
try:
|
try:
|
||||||
return self.node_map[nonterminal](children)
|
return self.node_map[nonterminal](children)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return self.default_node(nonterminal, children)
|
return self.default_node(nonterminal, children)
|
||||||
|
|
||||||
def convert_leaf(self, pgen_grammar, type_, value, prefix, start_pos):
|
def convert_leaf(self, type_, value, prefix, start_pos):
|
||||||
try:
|
try:
|
||||||
return self.leaf_map[type_](value, start_pos, prefix)
|
return self.leaf_map[type_](value, start_pos, prefix)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return self.default_leaf(value, start_pos, prefix)
|
return self.default_leaf(value, start_pos, prefix)
|
||||||
|
|
||||||
def add_token(self, type_, value, start_pos, prefix):
|
def _add_token(self, type_, value, start_pos, prefix):
|
||||||
"""Add a token; return True if this is the end of the program."""
|
"""Add a token; return True if this is the end of the program."""
|
||||||
grammar = self._pgen_grammar
|
grammar = self._pgen_grammar
|
||||||
stack = self.stack
|
stack = self.stack
|
||||||
@@ -175,8 +173,7 @@ class BaseParser(object):
|
|||||||
if stack[-1].dfa.is_final:
|
if stack[-1].dfa.is_final:
|
||||||
self._pop()
|
self._pop()
|
||||||
else:
|
else:
|
||||||
self.error_recovery(grammar, stack, type_,
|
self.error_recovery(type_, value, start_pos, prefix)
|
||||||
value, start_pos, prefix, self.add_token)
|
|
||||||
return
|
return
|
||||||
except IndexError:
|
except IndexError:
|
||||||
raise InternalParseError("too much input", type_, value, start_pos)
|
raise InternalParseError("too much input", type_, value, start_pos)
|
||||||
@@ -186,7 +183,7 @@ class BaseParser(object):
|
|||||||
for push in plan.dfa_pushes:
|
for push in plan.dfa_pushes:
|
||||||
stack.append(StackNode(push))
|
stack.append(StackNode(push))
|
||||||
|
|
||||||
leaf = self.convert_leaf(grammar, type_, value, prefix, start_pos)
|
leaf = self.convert_leaf(type_, value, prefix, start_pos)
|
||||||
stack[-1].nodes.append(leaf)
|
stack[-1].nodes.append(leaf)
|
||||||
|
|
||||||
def _pop(self):
|
def _pop(self):
|
||||||
@@ -198,7 +195,7 @@ class BaseParser(object):
|
|||||||
if len(tos.nodes) == 1:
|
if len(tos.nodes) == 1:
|
||||||
new_node = tos.nodes[0]
|
new_node = tos.nodes[0]
|
||||||
else:
|
else:
|
||||||
new_node = self.convert_node(self._pgen_grammar, tos.dfa.from_rule, tos.nodes)
|
new_node = self.convert_node(tos.dfa.from_rule, tos.nodes)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.stack[-1].nodes.append(new_node)
|
self.stack[-1].nodes.append(new_node)
|
||||||
|
|||||||
@@ -95,14 +95,13 @@ class Parser(BaseParser):
|
|||||||
# If there's only one statement, we get back a non-module. That's
|
# If there's only one statement, we get back a non-module. That's
|
||||||
# not what we want, we want a module, so we add it here:
|
# not what we want, we want a module, so we add it here:
|
||||||
node = self.convert_node(
|
node = self.convert_node(
|
||||||
self._pgen_grammar,
|
|
||||||
'file_input',
|
'file_input',
|
||||||
[node]
|
[node]
|
||||||
)
|
)
|
||||||
|
|
||||||
return node
|
return node
|
||||||
|
|
||||||
def convert_node(self, pgen_grammar, nonterminal, children):
|
def convert_node(self, nonterminal, children):
|
||||||
"""
|
"""
|
||||||
Convert raw node information to a PythonBaseNode instance.
|
Convert raw node information to a PythonBaseNode instance.
|
||||||
|
|
||||||
@@ -127,19 +126,18 @@ class Parser(BaseParser):
|
|||||||
nonterminal = 'testlist_comp'
|
nonterminal = 'testlist_comp'
|
||||||
return self.default_node(nonterminal, children)
|
return self.default_node(nonterminal, children)
|
||||||
|
|
||||||
def convert_leaf(self, pgen_grammar, type, value, prefix, start_pos):
|
def convert_leaf(self, type, value, prefix, start_pos):
|
||||||
# print('leaf', repr(value), token.tok_name[type])
|
# print('leaf', repr(value), token.tok_name[type])
|
||||||
if type == NAME:
|
if type == NAME:
|
||||||
if value in pgen_grammar.reserved_syntax_strings:
|
if value in self._pgen_grammar.reserved_syntax_strings:
|
||||||
return tree.Keyword(value, start_pos, prefix)
|
return tree.Keyword(value, start_pos, prefix)
|
||||||
else:
|
else:
|
||||||
return tree.Name(value, start_pos, prefix)
|
return tree.Name(value, start_pos, prefix)
|
||||||
|
|
||||||
return self._leaf_map.get(type, tree.Operator)(value, start_pos, prefix)
|
return self._leaf_map.get(type, tree.Operator)(value, start_pos, prefix)
|
||||||
|
|
||||||
def error_recovery(self, pgen_grammar, stack, typ, value, start_pos, prefix,
|
def error_recovery(self, typ, value, start_pos, prefix):
|
||||||
add_token_callback):
|
tos_nodes = self.stack[-1].nodes
|
||||||
tos_nodes = stack[-1].nodes
|
|
||||||
if tos_nodes:
|
if tos_nodes:
|
||||||
last_leaf = tos_nodes[-1].get_last_leaf()
|
last_leaf = tos_nodes[-1].get_last_leaf()
|
||||||
else:
|
else:
|
||||||
@@ -152,23 +150,21 @@ class Parser(BaseParser):
|
|||||||
# possible (and valid in Python ) that there's no newline at the
|
# possible (and valid in Python ) that there's no newline at the
|
||||||
# end of a file, we have to recover even if the user doesn't want
|
# end of a file, we have to recover even if the user doesn't want
|
||||||
# error recovery.
|
# error recovery.
|
||||||
if stack[-1].dfa.from_rule == 'simple_stmt':
|
if self.stack[-1].dfa.from_rule == 'simple_stmt':
|
||||||
try:
|
try:
|
||||||
plan = stack[-1].dfa.transition_to_plan[PythonTokenTypes.NEWLINE]
|
plan = self.stack[-1].dfa.transition_to_plan[PythonTokenTypes.NEWLINE]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
if plan.next_dfa.is_final and not plan.dfa_pushes:
|
if plan.next_dfa.is_final and not plan.dfa_pushes:
|
||||||
# We are ignoring here that the newline would be
|
# We are ignoring here that the newline would be
|
||||||
# required for a simple_stmt.
|
# required for a simple_stmt.
|
||||||
stack[-1].dfa = plan.next_dfa
|
self.stack[-1].dfa = plan.next_dfa
|
||||||
add_token_callback(typ, value, start_pos, prefix)
|
self._add_token(typ, value, start_pos, prefix)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not self._error_recovery:
|
if not self._error_recovery:
|
||||||
return super(Parser, self).error_recovery(
|
return super(Parser, self).error_recovery(typ, value, start_pos, prefix)
|
||||||
pgen_grammar, stack, typ, value, start_pos, prefix,
|
|
||||||
add_token_callback)
|
|
||||||
|
|
||||||
def current_suite(stack):
|
def current_suite(stack):
|
||||||
# For now just discard everything that is not a suite or
|
# For now just discard everything that is not a suite or
|
||||||
@@ -185,10 +181,10 @@ class Parser(BaseParser):
|
|||||||
break
|
break
|
||||||
return until_index
|
return until_index
|
||||||
|
|
||||||
until_index = current_suite(stack)
|
until_index = current_suite(self.stack)
|
||||||
|
|
||||||
if self._stack_removal(stack, until_index + 1):
|
if self._stack_removal(self.stack, until_index + 1):
|
||||||
add_token_callback(typ, value, start_pos, prefix)
|
self._add_token(typ, value, start_pos, prefix)
|
||||||
else:
|
else:
|
||||||
if typ == INDENT:
|
if typ == INDENT:
|
||||||
# For every deleted INDENT we have to delete a DEDENT as well.
|
# For every deleted INDENT we have to delete a DEDENT as well.
|
||||||
@@ -196,9 +192,9 @@ class Parser(BaseParser):
|
|||||||
self._omit_dedent_list.append(self._indent_counter)
|
self._omit_dedent_list.append(self._indent_counter)
|
||||||
|
|
||||||
error_leaf = tree.PythonErrorLeaf(typ.name, value, start_pos, prefix)
|
error_leaf = tree.PythonErrorLeaf(typ.name, value, start_pos, prefix)
|
||||||
stack[-1].nodes.append(error_leaf)
|
self.stack[-1].nodes.append(error_leaf)
|
||||||
|
|
||||||
tos = stack[-1]
|
tos = self.stack[-1]
|
||||||
if tos.nonterminal == 'suite':
|
if tos.nonterminal == 'suite':
|
||||||
# Need at least one statement in the suite. This happend with the
|
# Need at least one statement in the suite. This happend with the
|
||||||
# error recovery above.
|
# error recovery above.
|
||||||
|
|||||||
Reference in New Issue
Block a user