Remove a lot of the old token code

This commit is contained in:
Dave Halter
2018-06-24 16:31:58 +02:00
parent 03de9cebb8
commit 34ab35558f
2 changed files with 2 additions and 80 deletions

View File

@@ -1,76 +1,5 @@
from __future__ import absolute_import
# Map from operator to number (since tokenize doesn't do this)
opmap_raw = """\
( LPAR
) RPAR
[ LSQB
] RSQB
: COLON
, COMMA
; SEMI
+ PLUS
- MINUS
* STAR
/ SLASH
| VBAR
& AMPER
< LESS
> GREATER
= EQUAL
. DOT
% PERCENT
` BACKQUOTE
{ LBRACE
} RBRACE
@ AT
== EQEQUAL
!= NOTEQUAL
<> NOTEQUAL
<= LESSEQUAL
>= GREATEREQUAL
~ TILDE
^ CIRCUMFLEX
<< LEFTSHIFT
>> RIGHTSHIFT
** DOUBLESTAR
+= PLUSEQUAL
-= MINEQUAL
*= STAREQUAL
/= SLASHEQUAL
%= PERCENTEQUAL
&= AMPEREQUAL
|= VBAREQUAL
@= ATEQUAL
^= CIRCUMFLEXEQUAL
<<= LEFTSHIFTEQUAL
>>= RIGHTSHIFTEQUAL
**= DOUBLESTAREQUAL
// DOUBLESLASH
//= DOUBLESLASHEQUAL
-> RARROW
... ELLIPSIS
! EXCLAMATION
"""
opmap = {}
for line in opmap_raw.splitlines():
op, name = line.split()
opmap[op] = name
def generate_token_id(string):
"""
Uses a token in the grammar (e.g. `'+'` or `'and'`returns the corresponding
ID for it. The strings are part of the grammar file.
"""
try:
return opmap[string]
except KeyError:
pass
return globals()[string]
class TokenType(object):
def __init__(self, name, contains_syntax=False):

View File

@@ -18,7 +18,7 @@ from collections import namedtuple
import itertools as _itertools
from codecs import BOM_UTF8
from parso.python.token import PythonTokenTypes, opmap
from parso.python.token import PythonTokenTypes
from parso._compatibility import py_version
from parso.utils import split_lines
@@ -567,14 +567,7 @@ def tokenize_lines(lines, version_info, start_pos=(1, 0)):
and fstring_stack[-1].parentheses_count == 1:
fstring_stack[-1].format_spec_count += 1
try:
# This check is needed in any case to check if it's a valid
# operator or just some random unicode character.
opmap[token]
typ = PythonTokenTypes.OP
except KeyError:
typ = PythonTokenTypes.ERRORTOKEN
yield PythonToken(typ, token, spos, prefix)
yield PythonToken(PythonTokenTypes.OP, token, spos, prefix)
if contstr:
yield PythonToken(PythonTokenTypes.ERRORTOKEN, contstr, contstr_start, prefix)