Don't use grammar as an argument anymore, because it's already there

This commit is contained in:
Dave Halter
2018-06-28 00:01:47 +02:00
parent f7d3d4e82f
commit 692436ba12
2 changed files with 23 additions and 30 deletions

View File

@@ -99,7 +99,6 @@ def _token_to_transition(grammar, type_, value):
return type_
class BaseParser(object):
"""Parser engine.
@@ -129,7 +128,7 @@ class BaseParser(object):
self.stack = Stack([StackNode(first_dfa)])
for type_, value, start_pos, prefix in tokens:
self.add_token(type_, value, start_pos, prefix)
self._add_token(type_, value, start_pos, prefix)
while self.stack and self.stack[-1].dfa.is_final:
self._pop()
@@ -141,27 +140,26 @@ class BaseParser(object):
raise InternalParseError("incomplete input", type_, value, start_pos)
return self.rootnode
def error_recovery(self, pgen_grammar, stack, typ, value, start_pos, prefix,
add_token_callback):
def error_recovery(self, typ, value, start_pos, prefix):
if self._error_recovery:
raise NotImplementedError("Error Recovery is not implemented")
else:
error_leaf = tree.ErrorLeaf('TODO %s' % typ, value, start_pos, prefix)
raise ParserSyntaxError('SyntaxError: invalid syntax', error_leaf)
def convert_node(self, pgen_grammar, nonterminal, children):
def convert_node(self, nonterminal, children):
try:
return self.node_map[nonterminal](children)
except KeyError:
return self.default_node(nonterminal, children)
def convert_leaf(self, pgen_grammar, type_, value, prefix, start_pos):
def convert_leaf(self, type_, value, prefix, start_pos):
try:
return self.leaf_map[type_](value, start_pos, prefix)
except KeyError:
return self.default_leaf(value, start_pos, prefix)
def add_token(self, type_, value, start_pos, prefix):
def _add_token(self, type_, value, start_pos, prefix):
"""Add a token; return True if this is the end of the program."""
grammar = self._pgen_grammar
stack = self.stack
@@ -175,8 +173,7 @@ class BaseParser(object):
if stack[-1].dfa.is_final:
self._pop()
else:
self.error_recovery(grammar, stack, type_,
value, start_pos, prefix, self.add_token)
self.error_recovery(type_, value, start_pos, prefix)
return
except IndexError:
raise InternalParseError("too much input", type_, value, start_pos)
@@ -186,7 +183,7 @@ class BaseParser(object):
for push in plan.dfa_pushes:
stack.append(StackNode(push))
leaf = self.convert_leaf(grammar, type_, value, prefix, start_pos)
leaf = self.convert_leaf(type_, value, prefix, start_pos)
stack[-1].nodes.append(leaf)
def _pop(self):
@@ -198,7 +195,7 @@ class BaseParser(object):
if len(tos.nodes) == 1:
new_node = tos.nodes[0]
else:
new_node = self.convert_node(self._pgen_grammar, tos.dfa.from_rule, tos.nodes)
new_node = self.convert_node(tos.dfa.from_rule, tos.nodes)
try:
self.stack[-1].nodes.append(new_node)

View File

@@ -95,14 +95,13 @@ class Parser(BaseParser):
# If there's only one statement, we get back a non-module. That's
# not what we want, we want a module, so we add it here:
node = self.convert_node(
self._pgen_grammar,
'file_input',
[node]
)
return node
def convert_node(self, pgen_grammar, nonterminal, children):
def convert_node(self, nonterminal, children):
"""
Convert raw node information to a PythonBaseNode instance.
@@ -127,19 +126,18 @@ class Parser(BaseParser):
nonterminal = 'testlist_comp'
return self.default_node(nonterminal, children)
def convert_leaf(self, pgen_grammar, type, value, prefix, start_pos):
def convert_leaf(self, type, value, prefix, start_pos):
# print('leaf', repr(value), token.tok_name[type])
if type == NAME:
if value in pgen_grammar.reserved_syntax_strings:
if value in self._pgen_grammar.reserved_syntax_strings:
return tree.Keyword(value, start_pos, prefix)
else:
return tree.Name(value, start_pos, prefix)
return self._leaf_map.get(type, tree.Operator)(value, start_pos, prefix)
def error_recovery(self, pgen_grammar, stack, typ, value, start_pos, prefix,
add_token_callback):
tos_nodes = stack[-1].nodes
def error_recovery(self, typ, value, start_pos, prefix):
tos_nodes = self.stack[-1].nodes
if tos_nodes:
last_leaf = tos_nodes[-1].get_last_leaf()
else:
@@ -152,23 +150,21 @@ class Parser(BaseParser):
# possible (and valid in Python ) that there's no newline at the
# end of a file, we have to recover even if the user doesn't want
# error recovery.
if stack[-1].dfa.from_rule == 'simple_stmt':
if self.stack[-1].dfa.from_rule == 'simple_stmt':
try:
plan = stack[-1].dfa.transition_to_plan[PythonTokenTypes.NEWLINE]
plan = self.stack[-1].dfa.transition_to_plan[PythonTokenTypes.NEWLINE]
except KeyError:
pass
else:
if plan.next_dfa.is_final and not plan.dfa_pushes:
# We are ignoring here that the newline would be
# required for a simple_stmt.
stack[-1].dfa = plan.next_dfa
add_token_callback(typ, value, start_pos, prefix)
self.stack[-1].dfa = plan.next_dfa
self._add_token(typ, value, start_pos, prefix)
return
if not self._error_recovery:
return super(Parser, self).error_recovery(
pgen_grammar, stack, typ, value, start_pos, prefix,
add_token_callback)
return super(Parser, self).error_recovery(typ, value, start_pos, prefix)
def current_suite(stack):
# For now just discard everything that is not a suite or
@@ -185,10 +181,10 @@ class Parser(BaseParser):
break
return until_index
until_index = current_suite(stack)
until_index = current_suite(self.stack)
if self._stack_removal(stack, until_index + 1):
add_token_callback(typ, value, start_pos, prefix)
if self._stack_removal(self.stack, until_index + 1):
self._add_token(typ, value, start_pos, prefix)
else:
if typ == INDENT:
# For every deleted INDENT we have to delete a DEDENT as well.
@@ -196,9 +192,9 @@ class Parser(BaseParser):
self._omit_dedent_list.append(self._indent_counter)
error_leaf = tree.PythonErrorLeaf(typ.name, value, start_pos, prefix)
stack[-1].nodes.append(error_leaf)
self.stack[-1].nodes.append(error_leaf)
tos = stack[-1]
tos = self.stack[-1]
if tos.nonterminal == 'suite':
# Need at least one statement in the suite. This happend with the
# error recovery above.