This commit is contained in:
Dave Halter
2018-06-13 20:47:16 +02:00
parent fc5560874b
commit 9a0b6f4928

View File

@@ -420,18 +420,18 @@ def generate_grammar(bnf_grammar, token_namespace):
""" """
dfas = {} dfas = {}
start_symbol = None start_symbol = None
for a, z in _GrammarParser(bnf_grammar).parse(): for nfa_a, nfa_z in _GrammarParser(bnf_grammar).parse():
#_dump_nfa(a, z) #_dump_nfa(a, z)
dfa = _make_dfa(a, z) dfa = _make_dfa(nfa_a, nfa_z)
#_dump_dfa(self._current_rule_name, dfa) #_dump_dfa(self._current_rule_name, dfa)
# oldlen = len(dfa) # oldlen = len(dfa)
_simplify_dfa(dfa) _simplify_dfa(dfa)
# newlen = len(dfa) # newlen = len(dfa)
dfas[a.from_rule] = dfa dfas[nfa_a.from_rule] = dfa
#print(self._current_rule_name, oldlen, newlen) #print(self._current_rule_name, oldlen, newlen)
if start_symbol is None: if start_symbol is None:
start_symbol = a.from_rule start_symbol = nfa_a.from_rule
p = ParserGenerator(dfas, token_namespace) p = ParserGenerator(dfas, token_namespace)
return p.make_grammar(Grammar(bnf_grammar, start_symbol)) return p.make_grammar(Grammar(bnf_grammar, start_symbol))