mirror of
https://github.com/davidhalter/parso.git
synced 2025-12-08 21:54:54 +08:00
Pass tokens around and not all the different token values
This commit is contained in:
@@ -127,8 +127,8 @@ class BaseParser(object):
|
||||
first_dfa = self._pgen_grammar.nonterminal_to_dfas[self._start_nonterminal][0]
|
||||
self.stack = Stack([StackNode(first_dfa)])
|
||||
|
||||
for type_, value, start_pos, prefix in tokens:
|
||||
self._add_token(type_, value, start_pos, prefix)
|
||||
for token in tokens:
|
||||
self._add_token(token)
|
||||
|
||||
while self.stack and self.stack[-1].dfa.is_final:
|
||||
self._pop()
|
||||
@@ -137,14 +137,15 @@ class BaseParser(object):
|
||||
# We never broke out -- EOF is too soon -- Unfinished statement.
|
||||
# However, the error recovery might have added the token again, if
|
||||
# the stack is empty, we're fine.
|
||||
raise InternalParseError("incomplete input", type_, value, start_pos)
|
||||
raise InternalParseError("incomplete input", token.type, token.value, token.start_pos)
|
||||
return self.rootnode
|
||||
|
||||
def error_recovery(self, typ, value, start_pos, prefix):
|
||||
def error_recovery(self, token):
|
||||
if self._error_recovery:
|
||||
raise NotImplementedError("Error Recovery is not implemented")
|
||||
else:
|
||||
error_leaf = tree.ErrorLeaf('TODO %s' % typ, value, start_pos, prefix)
|
||||
type_, value, start_pos, prefix = token
|
||||
error_leaf = tree.ErrorLeaf('TODO %s' % type_, value, start_pos, prefix)
|
||||
raise ParserSyntaxError('SyntaxError: invalid syntax', error_leaf)
|
||||
|
||||
def convert_node(self, nonterminal, children):
|
||||
@@ -159,10 +160,11 @@ class BaseParser(object):
|
||||
except KeyError:
|
||||
return self.default_leaf(value, start_pos, prefix)
|
||||
|
||||
def _add_token(self, type_, value, start_pos, prefix):
|
||||
def _add_token(self, token):
|
||||
"""Add a token; return True if this is the end of the program."""
|
||||
grammar = self._pgen_grammar
|
||||
stack = self.stack
|
||||
type_, value, start_pos, prefix = token
|
||||
transition = _token_to_transition(grammar, type_, value)
|
||||
|
||||
while True:
|
||||
@@ -173,7 +175,7 @@ class BaseParser(object):
|
||||
if stack[-1].dfa.is_final:
|
||||
self._pop()
|
||||
else:
|
||||
self.error_recovery(type_, value, start_pos, prefix)
|
||||
self.error_recovery(token)
|
||||
return
|
||||
except IndexError:
|
||||
raise InternalParseError("too much input", type_, value, start_pos)
|
||||
|
||||
@@ -136,7 +136,7 @@ class Parser(BaseParser):
|
||||
|
||||
return self._leaf_map.get(type, tree.Operator)(value, start_pos, prefix)
|
||||
|
||||
def error_recovery(self, typ, value, start_pos, prefix):
|
||||
def error_recovery(self, token):
|
||||
tos_nodes = self.stack[-1].nodes
|
||||
if tos_nodes:
|
||||
last_leaf = tos_nodes[-1].get_last_leaf()
|
||||
@@ -144,8 +144,8 @@ class Parser(BaseParser):
|
||||
last_leaf = None
|
||||
|
||||
if self._start_nonterminal == 'file_input' and \
|
||||
(typ == PythonTokenTypes.ENDMARKER or
|
||||
typ == DEDENT and '\n' not in last_leaf.value):
|
||||
(token.type == PythonTokenTypes.ENDMARKER or
|
||||
token.type == DEDENT and '\n' not in last_leaf.value):
|
||||
# In Python statements need to end with a newline. But since it's
|
||||
# possible (and valid in Python ) that there's no newline at the
|
||||
# end of a file, we have to recover even if the user doesn't want
|
||||
@@ -160,11 +160,11 @@ class Parser(BaseParser):
|
||||
# We are ignoring here that the newline would be
|
||||
# required for a simple_stmt.
|
||||
self.stack[-1].dfa = plan.next_dfa
|
||||
self._add_token(typ, value, start_pos, prefix)
|
||||
self._add_token(token)
|
||||
return
|
||||
|
||||
if not self._error_recovery:
|
||||
return super(Parser, self).error_recovery(typ, value, start_pos, prefix)
|
||||
return super(Parser, self).error_recovery(token)
|
||||
|
||||
def current_suite(stack):
|
||||
# For now just discard everything that is not a suite or
|
||||
@@ -184,8 +184,9 @@ class Parser(BaseParser):
|
||||
until_index = current_suite(self.stack)
|
||||
|
||||
if self._stack_removal(until_index + 1):
|
||||
self._add_token(typ, value, start_pos, prefix)
|
||||
self._add_token(token)
|
||||
else:
|
||||
typ, value, start_pos, prefix = token
|
||||
if typ == INDENT:
|
||||
# For every deleted INDENT we have to delete a DEDENT as well.
|
||||
# Otherwise the parser will get into trouble and DEDENT too early.
|
||||
|
||||
Reference in New Issue
Block a user