From 34ab35558ff491afd5f9f05d4b57f57caa08fd71 Mon Sep 17 00:00:00 2001 From: Dave Halter Date: Sun, 24 Jun 2018 16:31:58 +0200 Subject: [PATCH] Remove a lot of the old token code --- parso/python/token.py | 71 ---------------------------------------- parso/python/tokenize.py | 11 ++----- 2 files changed, 2 insertions(+), 80 deletions(-) diff --git a/parso/python/token.py b/parso/python/token.py index 3e4e17b..bb86ec9 100644 --- a/parso/python/token.py +++ b/parso/python/token.py @@ -1,76 +1,5 @@ from __future__ import absolute_import -# Map from operator to number (since tokenize doesn't do this) - -opmap_raw = """\ -( LPAR -) RPAR -[ LSQB -] RSQB -: COLON -, COMMA -; SEMI -+ PLUS -- MINUS -* STAR -/ SLASH -| VBAR -& AMPER -< LESS -> GREATER -= EQUAL -. DOT -% PERCENT -` BACKQUOTE -{ LBRACE -} RBRACE -@ AT -== EQEQUAL -!= NOTEQUAL -<> NOTEQUAL -<= LESSEQUAL ->= GREATEREQUAL -~ TILDE -^ CIRCUMFLEX -<< LEFTSHIFT ->> RIGHTSHIFT -** DOUBLESTAR -+= PLUSEQUAL --= MINEQUAL -*= STAREQUAL -/= SLASHEQUAL -%= PERCENTEQUAL -&= AMPEREQUAL -|= VBAREQUAL -@= ATEQUAL -^= CIRCUMFLEXEQUAL -<<= LEFTSHIFTEQUAL ->>= RIGHTSHIFTEQUAL -**= DOUBLESTAREQUAL -// DOUBLESLASH -//= DOUBLESLASHEQUAL --> RARROW -... ELLIPSIS -! EXCLAMATION -""" - -opmap = {} -for line in opmap_raw.splitlines(): - op, name = line.split() - opmap[op] = name - - -def generate_token_id(string): - """ - Uses a token in the grammar (e.g. `'+'` or `'and'`returns the corresponding - ID for it. The strings are part of the grammar file. - """ - try: - return opmap[string] - except KeyError: - pass - return globals()[string] - class TokenType(object): def __init__(self, name, contains_syntax=False): diff --git a/parso/python/tokenize.py b/parso/python/tokenize.py index 1061672..4273625 100644 --- a/parso/python/tokenize.py +++ b/parso/python/tokenize.py @@ -18,7 +18,7 @@ from collections import namedtuple import itertools as _itertools from codecs import BOM_UTF8 -from parso.python.token import PythonTokenTypes, opmap +from parso.python.token import PythonTokenTypes from parso._compatibility import py_version from parso.utils import split_lines @@ -567,14 +567,7 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)): and fstring_stack[-1].parentheses_count == 1: fstring_stack[-1].format_spec_count += 1 - try: - # This check is needed in any case to check if it's a valid - # operator or just some random unicode character. - opmap[token] - typ = PythonTokenTypes.OP - except KeyError: - typ = PythonTokenTypes.ERRORTOKEN - yield PythonToken(typ, token, spos, prefix) + yield PythonToken(PythonTokenTypes.OP, token, spos, prefix) if contstr: yield PythonToken(PythonTokenTypes.ERRORTOKEN, contstr, contstr_start, prefix)