mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-07 22:44:27 +08:00
python 2.5 compatibility, works, but tests fail, because certain thing don't exist in 2.5 (like int.real)
This commit is contained in:
@@ -57,10 +57,23 @@ except NameError:
|
||||
def unicode(s):
|
||||
return s.decode("utf-8")
|
||||
|
||||
# Borrowed from Ned Batchelder
|
||||
# exec function
|
||||
if sys.hexversion > 0x03000000:
|
||||
def exec_function(source, global_map):
|
||||
exec(source, global_map)
|
||||
else:
|
||||
eval(compile("""def exec_function(source, global_map):
|
||||
exec source in global_map """, 'blub', 'exec'))
|
||||
|
||||
# tokenize function
|
||||
import tokenize
|
||||
if sys.hexversion > 0x03000000:
|
||||
tokenize_func = tokenize.tokenize
|
||||
else:
|
||||
tokenize_func = tokenize.generate_tokens
|
||||
|
||||
# BytesIO (Python 2.5 has no io module)
|
||||
try:
|
||||
from cStringIO import StringIO as BytesIO
|
||||
except ImportError:
|
||||
from io import BytesIO
|
||||
|
||||
@@ -214,7 +214,7 @@ class Parser(CachedModule):
|
||||
# variables
|
||||
for name, value in stmts.items():
|
||||
if type(value).__name__ == 'file':
|
||||
value = 'file'
|
||||
value = 'open()'
|
||||
elif type(value).__name__ in ['int', 'bool', 'float',
|
||||
'dict', 'list', 'tuple']:
|
||||
value = repr(value)
|
||||
|
||||
11
parsing.py
11
parsing.py
@@ -31,10 +31,9 @@ Ignored statements:
|
||||
TODO take special care for future imports
|
||||
TODO check meta classes
|
||||
"""
|
||||
from _compatibility import next, literal_eval
|
||||
from _compatibility import next, literal_eval, tokenize_func, BytesIO
|
||||
|
||||
import tokenize
|
||||
from io import BytesIO
|
||||
import re
|
||||
|
||||
import debug
|
||||
@@ -1246,13 +1245,7 @@ class PyFuzzyParser(object):
|
||||
:raises: IndentationError
|
||||
"""
|
||||
buf = BytesIO(self.code.encode())
|
||||
#print(self.code.encode())
|
||||
#print(list(tokenize.tokenize(BytesIO(self.code.encode()).readline))[:9])
|
||||
import sys
|
||||
if sys.hexversion > 0x03000000:
|
||||
self.gen = tokenize.tokenize(buf.readline)
|
||||
else:
|
||||
self.gen = tokenize.generate_tokens(buf.readline)
|
||||
self.gen = tokenize_func(buf.readline)
|
||||
self.currentscope = self.scope
|
||||
|
||||
extended_flow = ['else', 'elif', 'except', 'finally']
|
||||
|
||||
@@ -2,13 +2,12 @@
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
from io import BytesIO
|
||||
import traceback
|
||||
|
||||
os.chdir('../')
|
||||
sys.path.append('.')
|
||||
import functions
|
||||
from _compatibility import unicode
|
||||
from _compatibility import unicode, BytesIO
|
||||
|
||||
#functions.set_debug_function(functions.debug.print_to_stdout)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user