Hack around the fact that the tokenizers are not really integrated with parsers.

This commit is contained in:
Dave Halter
2017-08-25 21:21:57 +02:00
parent 09b05422a6
commit 609ab1ffa9
2 changed files with 16 additions and 1 deletions

View File

@@ -5,6 +5,7 @@ from parso.utils import split_lines
from parso.python.tokenize import Token
from parso.python import token
from parso import parser
from parso.tree import TypedLeaf
version36 = PythonVersionInfo(3, 6)
@@ -19,6 +20,8 @@ class TokenNamespace:
PYTHON_EXPR = 101
EXCLAMATION_MARK = 102
token_map = dict((v, k) for k, v in locals().items())
@classmethod
def generate_token_id(cls, string):
if string == '{':
@@ -180,3 +183,8 @@ class Parser(parser.BaseParser):
node = self.default_node('fstring', [node])
return node
def convert_leaf(self, pgen_grammar, type, value, prefix, start_pos):
# TODO this is so ugly.
leaf_type = TokenNamespace.token_map[type]
return TypedLeaf(leaf_type, value, start_pos, prefix)