1
0
forked from VimPlug/jedi

start uniting tokenize.TokenInfo and token.Token

This commit is contained in:
Dave Halter
2014-02-25 13:54:18 +01:00
parent 5b84f0b27f
commit f4f79317fe
4 changed files with 15 additions and 14 deletions

View File

@@ -357,7 +357,7 @@ class Parser(object):
first_tok = tok_list[0] first_tok = tok_list[0]
# docstrings # docstrings
if len(tok_list) == 1 and not isinstance(first_tok, pr.Name) \ if len(tok_list) == 1 and not isinstance(first_tok, pr.Name) \
and first_tok.token_type == tokenize.STRING: and first_tok.type == tokenize.STRING:
# Normal docstring check # Normal docstring check
if self.freshscope and not self.no_docstr: if self.freshscope and not self.no_docstr:
self._scope.add_docstr( self._scope.add_docstr(
@@ -367,7 +367,7 @@ class Parser(object):
# Attribute docstring (PEP 224) support (sphinx uses it, e.g.) # Attribute docstring (PEP 224) support (sphinx uses it, e.g.)
# If string literal is being parsed... # If string literal is being parsed...
elif first_tok.token_type == tokenize.STRING: elif first_tok.type == tokenize.STRING:
with common.ignored(IndexError, AttributeError): with common.ignored(IndexError, AttributeError):
# ...then set it as a docstring # ...then set it as a docstring
self._scope.statements[-1].add_docstr( self._scope.statements[-1].add_docstr(

View File

@@ -1110,7 +1110,7 @@ class Statement(Simple):
start_pos = tok.start_pos start_pos = tok.start_pos
end_pos = tok.end_pos end_pos = tok.end_pos
else: else:
token_type = tok_temp.token_type token_type = tok_temp.type
tok = tok_temp.token tok = tok_temp.token
start_pos = tok_temp.start_pos start_pos = tok_temp.start_pos
end_pos = tok_temp.end_pos end_pos = tok_temp.end_pos

View File

@@ -13,7 +13,7 @@ from jedi._compatibility import u, unicode
class Token(object): class Token(object):
"""The token object is an efficient representation of the structure """The token object is an efficient representation of the structure
(token_type, token, (start_pos_line, start_pos_col)). It has indexer (type, token, (start_pos_line, start_pos_col)). It has indexer
methods that maintain compatibility to existing code that expects the above methods that maintain compatibility to existing code that expects the above
structure. structure.
@@ -40,16 +40,16 @@ class Token(object):
>>> unicode(Token(1, u("😷"), 1 ,1)) + "p" == u("😷p") >>> unicode(Token(1, u("😷"), 1 ,1)) + "p" == u("😷p")
True True
""" """
__slots__ = ("_token_type", "_token", "_start_pos_line", "_start_pos_col") __slots__ = ("_type", "_token", "_start_pos_line", "_start_pos_col")
@classmethod @classmethod
def from_tuple(cls, tp): def from_tuple(cls, tp):
return Token(tp[0], tp[1], tp[2][0], tp[2][1]) return Token(tp[0], tp[1], tp[2][0], tp[2][1])
def __init__( def __init__(
self, token_type, token, start_pos_line, start_pos_col self, type, token, start_pos_line, start_pos_col
): ):
self._token_type = token_type self._type = type
self._token = token self._token = token
self._start_pos_line = start_pos_line self._start_pos_line = start_pos_line
self._start_pos_col = start_pos_col self._start_pos_col = start_pos_col
@@ -75,7 +75,7 @@ class Token(object):
def __getitem__(self, key): def __getitem__(self, key):
# Builds the same structure as tuple used to have # Builds the same structure as tuple used to have
if key == 0: if key == 0:
return self.token_type return self.type
elif key == 1: elif key == 1:
return self.token return self.token
elif key == 2: elif key == 2:
@@ -84,8 +84,8 @@ class Token(object):
raise IndexError("list index out of range") raise IndexError("list index out of range")
@property @property
def token_type(self): def type(self):
return self._token_type return self._type
@property @property
def token(self): def token(self):
@@ -120,14 +120,14 @@ class Token(object):
# Make cache footprint smaller for faster unpickling # Make cache footprint smaller for faster unpickling
def __getstate__(self): def __getstate__(self):
return ( return (
self.token_type, self.type,
self.token, self.token,
self.start_pos_line, self.start_pos_line,
self.start_pos_col, self.start_pos_col,
) )
def __setstate__(self, state): def __setstate__(self, state):
self._token_type = state[0] self._type = state[0]
self._token = state[1] self._token = state[1]
self._start_pos_line = state[2] self._start_pos_line = state[2]
self._start_pos_col = state[3] self._start_pos_col = state[3]

View File

@@ -36,8 +36,9 @@ tok_name[ENCODING] = 'ENCODING'
class TokenInfo(object): class TokenInfo(object):
"""The token object is an efficient representation of the structure """
(token_type, token, (start_pos_line, start_pos_col)). It has indexer The token object is an efficient representation of the structure
(type, token, (start_pos_line, start_pos_col)). It has indexer
methods that maintain compatibility to existing code that expects the above methods that maintain compatibility to existing code that expects the above
structure. structure.