mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-09 07:14:48 +08:00
python 2.5 compatibility, works, but tests fail, because certain thing don't exist in 2.5 (like int.real)
This commit is contained in:
@@ -57,10 +57,23 @@ except NameError:
|
|||||||
def unicode(s):
|
def unicode(s):
|
||||||
return s.decode("utf-8")
|
return s.decode("utf-8")
|
||||||
|
|
||||||
# Borrowed from Ned Batchelder
|
# exec function
|
||||||
if sys.hexversion > 0x03000000:
|
if sys.hexversion > 0x03000000:
|
||||||
def exec_function(source, global_map):
|
def exec_function(source, global_map):
|
||||||
exec(source, global_map)
|
exec(source, global_map)
|
||||||
else:
|
else:
|
||||||
eval(compile("""def exec_function(source, global_map):
|
eval(compile("""def exec_function(source, global_map):
|
||||||
exec source in global_map """, 'blub', 'exec'))
|
exec source in global_map """, 'blub', 'exec'))
|
||||||
|
|
||||||
|
# tokenize function
|
||||||
|
import tokenize
|
||||||
|
if sys.hexversion > 0x03000000:
|
||||||
|
tokenize_func = tokenize.tokenize
|
||||||
|
else:
|
||||||
|
tokenize_func = tokenize.generate_tokens
|
||||||
|
|
||||||
|
# BytesIO (Python 2.5 has no io module)
|
||||||
|
try:
|
||||||
|
from cStringIO import StringIO as BytesIO
|
||||||
|
except ImportError:
|
||||||
|
from io import BytesIO
|
||||||
|
|||||||
@@ -214,7 +214,7 @@ class Parser(CachedModule):
|
|||||||
# variables
|
# variables
|
||||||
for name, value in stmts.items():
|
for name, value in stmts.items():
|
||||||
if type(value).__name__ == 'file':
|
if type(value).__name__ == 'file':
|
||||||
value = 'file'
|
value = 'open()'
|
||||||
elif type(value).__name__ in ['int', 'bool', 'float',
|
elif type(value).__name__ in ['int', 'bool', 'float',
|
||||||
'dict', 'list', 'tuple']:
|
'dict', 'list', 'tuple']:
|
||||||
value = repr(value)
|
value = repr(value)
|
||||||
|
|||||||
11
parsing.py
11
parsing.py
@@ -31,10 +31,9 @@ Ignored statements:
|
|||||||
TODO take special care for future imports
|
TODO take special care for future imports
|
||||||
TODO check meta classes
|
TODO check meta classes
|
||||||
"""
|
"""
|
||||||
from _compatibility import next, literal_eval
|
from _compatibility import next, literal_eval, tokenize_func, BytesIO
|
||||||
|
|
||||||
import tokenize
|
import tokenize
|
||||||
from io import BytesIO
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import debug
|
import debug
|
||||||
@@ -1246,13 +1245,7 @@ class PyFuzzyParser(object):
|
|||||||
:raises: IndentationError
|
:raises: IndentationError
|
||||||
"""
|
"""
|
||||||
buf = BytesIO(self.code.encode())
|
buf = BytesIO(self.code.encode())
|
||||||
#print(self.code.encode())
|
self.gen = tokenize_func(buf.readline)
|
||||||
#print(list(tokenize.tokenize(BytesIO(self.code.encode()).readline))[:9])
|
|
||||||
import sys
|
|
||||||
if sys.hexversion > 0x03000000:
|
|
||||||
self.gen = tokenize.tokenize(buf.readline)
|
|
||||||
else:
|
|
||||||
self.gen = tokenize.generate_tokens(buf.readline)
|
|
||||||
self.currentscope = self.scope
|
self.currentscope = self.scope
|
||||||
|
|
||||||
extended_flow = ['else', 'elif', 'except', 'finally']
|
extended_flow = ['else', 'elif', 'except', 'finally']
|
||||||
|
|||||||
@@ -2,13 +2,12 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
from io import BytesIO
|
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
os.chdir('../')
|
os.chdir('../')
|
||||||
sys.path.append('.')
|
sys.path.append('.')
|
||||||
import functions
|
import functions
|
||||||
from _compatibility import unicode
|
from _compatibility import unicode, BytesIO
|
||||||
|
|
||||||
#functions.set_debug_function(functions.debug.print_to_stdout)
|
#functions.set_debug_function(functions.debug.print_to_stdout)
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user