forked from VimPlug/jedi
tokenize not needed anymore in _compatibility
This commit is contained in:
@@ -68,11 +68,6 @@ else:
|
|||||||
eval(compile("""def exec_function(source, global_map):
|
eval(compile("""def exec_function(source, global_map):
|
||||||
exec source in global_map """, 'blub', 'exec'))
|
exec source in global_map """, 'blub', 'exec'))
|
||||||
|
|
||||||
# tokenize function
|
|
||||||
# TODO remove this, not used anymore
|
|
||||||
import tokenize
|
|
||||||
tokenize_func = tokenize.generate_tokens
|
|
||||||
|
|
||||||
# StringIO (Python 2.5 has no io module), so use io only for py3k
|
# StringIO (Python 2.5 has no io module), so use io only for py3k
|
||||||
try:
|
try:
|
||||||
from StringIO import StringIO
|
from StringIO import StringIO
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ All those classes are being generated by PyFuzzyParser, which takes python text
|
|||||||
as input and ignores just all the non-python stuff. Basically you could feed it
|
as input and ignores just all the non-python stuff. Basically you could feed it
|
||||||
a perl script, and it should still work (which means throw no error.
|
a perl script, and it should still work (which means throw no error.
|
||||||
"""
|
"""
|
||||||
from _compatibility import (next, literal_eval, tokenize_func, StringIO,
|
from _compatibility import (next, literal_eval, StringIO,
|
||||||
property, is_py3k, cleandoc, Python3Method)
|
property, is_py3k, cleandoc, Python3Method)
|
||||||
|
|
||||||
import tokenize
|
import tokenize
|
||||||
@@ -1566,7 +1566,8 @@ class PyFuzzyParser(object):
|
|||||||
debug.warning('indentation error on line %s, ignoring it' %
|
debug.warning('indentation error on line %s, ignoring it' %
|
||||||
(self.start_pos[0]))
|
(self.start_pos[0]))
|
||||||
self._line_of_tokenize_restart = self.start_pos[0] + 1
|
self._line_of_tokenize_restart = self.start_pos[0] + 1
|
||||||
self.gen = PushBackIterator(tokenize_func(self.buf.readline))
|
self.gen = PushBackIterator(tokenize.generate_tokens(
|
||||||
|
self.buf.readline))
|
||||||
return self.next()
|
return self.next()
|
||||||
|
|
||||||
type, tok, self._tokenize_start_pos, self._tokenize_end_pos, \
|
type, tok, self._tokenize_start_pos, self._tokenize_end_pos, \
|
||||||
@@ -1593,7 +1594,8 @@ class PyFuzzyParser(object):
|
|||||||
:raises: IndentationError
|
:raises: IndentationError
|
||||||
"""
|
"""
|
||||||
self.buf = StringIO(self.code)
|
self.buf = StringIO(self.code)
|
||||||
self.gen = PushBackIterator(tokenize_func(self.buf.readline))
|
self.gen = PushBackIterator(tokenize.generate_tokens(
|
||||||
|
self.buf.readline))
|
||||||
|
|
||||||
extended_flow = ['else', 'elif', 'except', 'finally']
|
extended_flow = ['else', 'elif', 'except', 'finally']
|
||||||
statement_toks = ['{', '[', '(', '`']
|
statement_toks = ['{', '[', '(', '`']
|
||||||
|
|||||||
Reference in New Issue
Block a user