1
0
forked from VimPlug/jedi

add NoErrorTokenizer, for more readability

This commit is contained in:
David Halter
2012-12-13 18:08:01 +01:00
parent cab74d5f51
commit 3aecb3ff90
2 changed files with 50 additions and 28 deletions

View File

@@ -1,6 +1,8 @@
""" A universal module with functions / classes without dependencies. """
import contextlib
import tokenize
import debug
import settings
@@ -26,6 +28,44 @@ class PushBackIterator(object):
return next(self.iterator)
class NoErrorTokenizer(object):
def __init__(self, readline, line_offset=0):
self.readline = readline
self.gen = PushBackIterator(tokenize.generate_tokens(readline))
self.line_offset = line_offset
def push_last_back(self):
self.gen.push_back(self.current)
def next(self):
""" Python 2 Compatibility """
return self.__next__()
def __next__(self):
try:
self.current = next(self.gen)
except tokenize.TokenError:
# We just ignore this error, I try to handle it earlier - as
# good as possible
debug.warning('parentheses not closed error')
except IndentationError:
# This is an error, that tokenize may produce, because the code
# is not indented as it should. Here it just ignores this line
# and restarts the parser.
# (This is a rather unlikely error message, for normal code,
# tokenize seems to be pretty tolerant)
debug.warning('indentation error on line %s, ignoring it' %
(self.start_pos[0]))
# add the starting line of the last position
self.line_offset += self.current[2][0]
self.gen = PushBackIterator(tokenize.generate_tokens(
self.readline))
self.current = self.next()
c = list(self.current)
c[2] += self.line_offset, 0
return c
@contextlib.contextmanager
def scale_speed_settings(factor):
a = settings.max_executions