forked from VimPlug/jedi
tokenize not needed anymore in _compatibility
This commit is contained in:
@@ -28,7 +28,7 @@ All those classes are being generated by PyFuzzyParser, which takes python text
|
||||
as input and ignores just all the non-python stuff. Basically you could feed it
|
||||
a perl script, and it should still work (which means throw no error.
|
||||
"""
|
||||
from _compatibility import (next, literal_eval, tokenize_func, StringIO,
|
||||
from _compatibility import (next, literal_eval, StringIO,
|
||||
property, is_py3k, cleandoc, Python3Method)
|
||||
|
||||
import tokenize
|
||||
@@ -1566,7 +1566,8 @@ class PyFuzzyParser(object):
|
||||
debug.warning('indentation error on line %s, ignoring it' %
|
||||
(self.start_pos[0]))
|
||||
self._line_of_tokenize_restart = self.start_pos[0] + 1
|
||||
self.gen = PushBackIterator(tokenize_func(self.buf.readline))
|
||||
self.gen = PushBackIterator(tokenize.generate_tokens(
|
||||
self.buf.readline))
|
||||
return self.next()
|
||||
|
||||
type, tok, self._tokenize_start_pos, self._tokenize_end_pos, \
|
||||
@@ -1593,7 +1594,8 @@ class PyFuzzyParser(object):
|
||||
:raises: IndentationError
|
||||
"""
|
||||
self.buf = StringIO(self.code)
|
||||
self.gen = PushBackIterator(tokenize_func(self.buf.readline))
|
||||
self.gen = PushBackIterator(tokenize.generate_tokens(
|
||||
self.buf.readline))
|
||||
|
||||
extended_flow = ['else', 'elif', 'except', 'finally']
|
||||
statement_toks = ['{', '[', '(', '`']
|
||||
|
||||
Reference in New Issue
Block a user