1
0
forked from VimPlug/jedi

prepare for eventual? tokenizer end_pos replacement.

This commit is contained in:
Dave Halter
2014-02-25 11:59:10 +01:00
parent 246118f851
commit 3a23c80ae5
4 changed files with 27 additions and 24 deletions

View File

@@ -616,7 +616,7 @@ class PushBackTokenizer(object):
def __init__(self, tokenizer): def __init__(self, tokenizer):
self._tokenizer = tokenizer self._tokenizer = tokenizer
self._push_backs = [] self._push_backs = []
self.current = tokenize.TokenInfo(None, None, (0, 0), (0, 0)) self.current = tokenize.TokenInfo(None, '', (0, 0), (0, 0))
def push_last_back(self): def push_last_back(self):
self._push_backs.append(self.current) self._push_backs.append(self.current)

View File

@@ -398,7 +398,7 @@ class FastTokenizer(object):
self.closed = False self.closed = False
# fast parser options # fast parser options
self.current = self.previous = TokenInfo(None, None, (0, 0), (0, 0)) self.current = self.previous = TokenInfo(None, '', (0, 0), (0, 0))
self.in_flow = False self.in_flow = False
self.new_indent = False self.new_indent = False
self.parser_indent = self.old_parser_indent = 0 self.parser_indent = self.old_parser_indent = 0

View File

@@ -49,8 +49,8 @@ class TokenInfo(object):
methods that maintain compatibility to existing code that expects the above methods that maintain compatibility to existing code that expects the above
structure. structure.
>>> tuple(TokenInfo(1,2,(3,4))) >>> tuple(TokenInfo(1, 'foo' ,(3,4)))
(1, 2, (3, 4), None) (1, 'foo', (3, 4), None)
>>> str(TokenInfo(1, "test", (1, 1))) == "test" >>> str(TokenInfo(1, "test", (1, 1))) == "test"
True True
>>> repr(TokenInfo(1, "test", (1, 1))) >>> repr(TokenInfo(1, "test", (1, 1)))
@@ -65,19 +65,19 @@ class TokenInfo(object):
(3, 4) (3, 4)
>>> a.string >>> a.string
2 2
>>> a.start_pos_col >>> a._start_pos_col
4 4
>>> unicode(TokenInfo(1, u("😷"), (1 ,1))) + "p" == u("😷p") >>> unicode(TokenInfo(1, u("😷"), (1 ,1))) + "p" == u("😷p")
True True
""" """
__slots__ = ("type", "string", "_start_pos_line", "_start_pos_col", "end") __slots__ = ("type", "string", "_start_pos_line", "_start_pos_col", "end_pos")
def __init__(self, type, string, start_pos, end_pos=None): def __init__(self, type, string, start_pos, end_pos=None):
self.type = type self.type = type
self.string = string self.string = string
self._start_pos_line = start_pos[0] self._start_pos_line = start_pos[0]
self._start_pos_col = start_pos[1] self._start_pos_col = start_pos[1]
self.end = end_pos self.end_pos = end_pos
def __repr__(self): def __repr__(self):
return "<%s: %s>" % (type(self).__name__, tuple(self)[:3]) return "<%s: %s>" % (type(self).__name__, tuple(self)[:3])
@@ -104,20 +104,12 @@ class TokenInfo(object):
elif key == 1: elif key == 1:
return self.string return self.string
elif key == 2: elif key == 2:
return (self.start_pos_line, self.start_pos_col) return (self._start_pos_line, self._start_pos_col)
elif key == 3: elif key == 3:
return self.end return self.end
else: else:
raise IndexError("list index out of range") raise IndexError("list index out of range")
@property
def start_pos_line(self):
return self._start_pos_line
@property
def start_pos_col(self):
return self._start_pos_col
@property @property
def start_pos(self): def start_pos(self):
return (self._start_pos_line, self._start_pos_col) return (self._start_pos_line, self._start_pos_col)
@@ -127,14 +119,24 @@ class TokenInfo(object):
return (self._start_pos_line, self._start_pos_col) return (self._start_pos_line, self._start_pos_col)
@property @property
def _end_pos(self): def end(self):
if self.end_pos is not None and self._end != self.end_pos:
print(self.end_pos, self._end, repr(self.string))
assert False
return self.end_pos
@property
def _end(self):
"""Returns end position respecting multiline tokens.""" """Returns end position respecting multiline tokens."""
end_pos_line = self.start_pos_line end_pos_line = self._start_pos_line
lines = unicode(self).split('\n') lines = self.string.split('\n')
if self.string.endswith('\n'):
lines = lines[:-1]
lines[-1] += '\n'
end_pos_line += len(lines) - 1 end_pos_line += len(lines) - 1
end_pos_col = self.start_pos_col end_pos_col = self._start_pos_col
# Check for multiline token # Check for multiline token
if self.start_pos_line == end_pos_line: if self._start_pos_line == end_pos_line:
end_pos_col += len(lines[-1]) end_pos_col += len(lines[-1])
else: else:
end_pos_col = len(lines[-1]) end_pos_col = len(lines[-1])
@@ -145,8 +147,8 @@ class TokenInfo(object):
return ( return (
self.type, self.type,
self.string, self.string,
self.start_pos_line, self._start_pos_line,
self.start_pos_col, self._start_pos_col,
) )
def __setstate__(self, state): def __setstate__(self, state):

View File

@@ -42,7 +42,8 @@ class UserContext(object):
else: else:
line = self.get_line(self._line_temp) line = self.get_line(self._line_temp)
self._line_length = len(line) self._line_length = len(line)
line = line + '\n' line = '\n' + line
# add lines with a backslash at the end # add lines with a backslash at the end
while True: while True:
self._line_temp -= 1 self._line_temp -= 1