mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-07 06:24:27 +08:00
Merge branch 'dev' of github.com:davidhalter/jedi into dev
This commit is contained in:
@@ -42,8 +42,8 @@ import sys
|
|||||||
# imports and circular imports... Just avoid it:
|
# imports and circular imports... Just avoid it:
|
||||||
sys.path.insert(0, __path__[0])
|
sys.path.insert(0, __path__[0])
|
||||||
|
|
||||||
from .api import Script, Interpreter, NotFoundError, set_debug_function, \
|
from .api import Script, Interpreter, NotFoundError, set_debug_function
|
||||||
preload_module, defined_names
|
from .api import preload_module, defined_names
|
||||||
from . import settings
|
from . import settings
|
||||||
|
|
||||||
sys.path.pop(0)
|
sys.path.pop(0)
|
||||||
|
|||||||
@@ -33,7 +33,6 @@ import builtin
|
|||||||
|
|
||||||
class NotFoundError(Exception):
|
class NotFoundError(Exception):
|
||||||
"""A custom error to avoid catching the wrong exceptions."""
|
"""A custom error to avoid catching the wrong exceptions."""
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Script(object):
|
class Script(object):
|
||||||
|
|||||||
@@ -376,7 +376,6 @@ class Completion(BaseDefinition):
|
|||||||
warnings.warn("Use name instead.", DeprecationWarning)
|
warnings.warn("Use name instead.", DeprecationWarning)
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def description(self):
|
def description(self):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -321,7 +321,7 @@ def _generate_code(scope, mixin_funcs={}, depth=0):
|
|||||||
file_type = io.TextIOWrapper
|
file_type = io.TextIOWrapper
|
||||||
else:
|
else:
|
||||||
file_type = types.FileType
|
file_type = types.FileType
|
||||||
if type(value) == file_type:
|
if isinstance(value, file_type):
|
||||||
value = 'open()'
|
value = 'open()'
|
||||||
elif name == 'None':
|
elif name == 'None':
|
||||||
value = ''
|
value = ''
|
||||||
@@ -336,13 +336,6 @@ def _generate_code(scope, mixin_funcs={}, depth=0):
|
|||||||
value = '%s.%s' % (mod, value)
|
value = '%s.%s' % (mod, value)
|
||||||
code += '%s = %s\n' % (name, value)
|
code += '%s = %s\n' % (name, value)
|
||||||
|
|
||||||
if depth == 0:
|
|
||||||
#with open('writeout.py', 'w') as f:
|
|
||||||
# f.write(code)
|
|
||||||
#import sys
|
|
||||||
#sys.stdout.write(code)
|
|
||||||
#exit()
|
|
||||||
pass
|
|
||||||
return code
|
return code
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -148,7 +148,7 @@ class NoErrorTokenizer(object):
|
|||||||
tokenize.NEWLINE, tokenize.DEDENT) \
|
tokenize.NEWLINE, tokenize.DEDENT) \
|
||||||
and c[0] not in (tokenize.COMMENT, tokenize.INDENT,
|
and c[0] not in (tokenize.COMMENT, tokenize.INDENT,
|
||||||
tokenize.NL, tokenize.NEWLINE, tokenize.DEDENT):
|
tokenize.NL, tokenize.NEWLINE, tokenize.DEDENT):
|
||||||
#print c, tokenize.tok_name[c[0]]
|
# print c, tokenize.tok_name[c[0]]
|
||||||
|
|
||||||
tok = c[1]
|
tok = c[1]
|
||||||
indent = c[2][1]
|
indent = c[2][1]
|
||||||
|
|||||||
@@ -62,4 +62,4 @@ def print_to_stdout(level, str_out):
|
|||||||
print(col + str_out + Fore.RESET)
|
print(col + str_out + Fore.RESET)
|
||||||
|
|
||||||
|
|
||||||
#debug_function = print_to_stdout
|
# debug_function = print_to_stdout
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ REST_ROLE_PATTERN = re.compile(r':[^`]+:`([^`]+)`')
|
|||||||
@cache.memoize_default()
|
@cache.memoize_default()
|
||||||
def follow_param(param):
|
def follow_param(param):
|
||||||
func = param.parent_function
|
func = param.parent_function
|
||||||
#print func, param, param.parent_function
|
# print func, param, param.parent_function
|
||||||
param_str = _search_param_in_docstr(func.docstr, str(param.get_name()))
|
param_str = _search_param_in_docstr(func.docstr, str(param.get_name()))
|
||||||
user_position = (1, 0)
|
user_position = (1, 0)
|
||||||
|
|
||||||
|
|||||||
@@ -403,7 +403,9 @@ class ArrayInstance(pr.Base):
|
|||||||
if self.var_args.start_pos != array.var_args.start_pos:
|
if self.var_args.start_pos != array.var_args.start_pos:
|
||||||
items += array.iter_content()
|
items += array.iter_content()
|
||||||
else:
|
else:
|
||||||
debug.warning('ArrayInstance recursion', self.var_args)
|
debug.warning(
|
||||||
|
'ArrayInstance recursion',
|
||||||
|
self.var_args)
|
||||||
continue
|
continue
|
||||||
items += evaluate.get_iterator_types([typ])
|
items += evaluate.get_iterator_types([typ])
|
||||||
|
|
||||||
@@ -524,7 +526,7 @@ def check_statement_information(stmt, search_name):
|
|||||||
# this might be removed if we analyze and, etc
|
# this might be removed if we analyze and, etc
|
||||||
assert len(commands) == 1
|
assert len(commands) == 1
|
||||||
call = commands[0]
|
call = commands[0]
|
||||||
assert type(call) == pr.Call and str(call.name) == 'isinstance'
|
assert type(call) is pr.Call and str(call.name) == 'isinstance'
|
||||||
assert bool(call.execution)
|
assert bool(call.execution)
|
||||||
|
|
||||||
# isinstance check
|
# isinstance check
|
||||||
|
|||||||
@@ -170,8 +170,7 @@ def get_names_of_scope(scope, position=None, star_search=True,
|
|||||||
if not (scope != non_flow and scope.isinstance(pr.Class)
|
if not (scope != non_flow and scope.isinstance(pr.Class)
|
||||||
or scope.isinstance(pr.Flow)
|
or scope.isinstance(pr.Flow)
|
||||||
or scope.isinstance(er.Instance)
|
or scope.isinstance(er.Instance)
|
||||||
and non_flow.isinstance(er.Function)
|
and non_flow.isinstance(er.Function)):
|
||||||
):
|
|
||||||
try:
|
try:
|
||||||
if isinstance(scope, er.Instance):
|
if isinstance(scope, er.Instance):
|
||||||
for g in scope.scope_generator():
|
for g in scope.scope_generator():
|
||||||
|
|||||||
@@ -53,7 +53,9 @@ class Executable(pr.IsScope):
|
|||||||
|
|
||||||
|
|
||||||
class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
|
class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
|
||||||
|
|
||||||
""" This class is used to evaluate instances. """
|
""" This class is used to evaluate instances. """
|
||||||
|
|
||||||
def __init__(self, base, var_args=()):
|
def __init__(self, base, var_args=()):
|
||||||
super(Instance, self).__init__(base, var_args)
|
super(Instance, self).__init__(base, var_args)
|
||||||
if str(base.name) in ['list', 'set'] \
|
if str(base.name) in ['list', 'set'] \
|
||||||
@@ -641,7 +643,7 @@ class Execution(Executable):
|
|||||||
call = key_stmt.get_commands()[0]
|
call = key_stmt.get_commands()[0]
|
||||||
if isinstance(call, pr.Name):
|
if isinstance(call, pr.Name):
|
||||||
yield call, value_stmt
|
yield call, value_stmt
|
||||||
elif type(call) == pr.Call:
|
elif type(call) is pr.Call:
|
||||||
yield call.name, value_stmt
|
yield call.name, value_stmt
|
||||||
# Normal arguments (including key arguments).
|
# Normal arguments (including key arguments).
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -328,7 +328,7 @@ class FastParser(use_metaclass(CachedFastParser)):
|
|||||||
nodes += self.current_node.old_children
|
nodes += self.current_node.old_children
|
||||||
|
|
||||||
# check if code_part has already been parsed
|
# check if code_part has already been parsed
|
||||||
#print '#'*45,line_offset, p and p.end_pos, '\n', code_part
|
# print '#'*45,line_offset, p and p.end_pos, '\n', code_part
|
||||||
p, node = self._get_parser(code_part, code[start:],
|
p, node = self._get_parser(code_part, code[start:],
|
||||||
line_offset, nodes, not is_first)
|
line_offset, nodes, not is_first)
|
||||||
|
|
||||||
@@ -365,7 +365,7 @@ class FastParser(use_metaclass(CachedFastParser)):
|
|||||||
|
|
||||||
is_first = False
|
is_first = False
|
||||||
else:
|
else:
|
||||||
#print '#'*45, line_offset, p.end_pos, 'theheck\n', code_part
|
# print '#'*45, line_offset, p.end_pos, 'theheck\n', code_part
|
||||||
pass
|
pass
|
||||||
|
|
||||||
line_offset += lines
|
line_offset += lines
|
||||||
@@ -378,7 +378,7 @@ class FastParser(use_metaclass(CachedFastParser)):
|
|||||||
|
|
||||||
self.module.end_pos = self.parsers[-1].end_pos
|
self.module.end_pos = self.parsers[-1].end_pos
|
||||||
|
|
||||||
#print(self.parsers[0].module.get_code())
|
# print(self.parsers[0].module.get_code())
|
||||||
del code
|
del code
|
||||||
|
|
||||||
def _get_parser(self, code, parser_code, line_offset, nodes, no_docstr):
|
def _get_parser(self, code, parser_code, line_offset, nodes, no_docstr):
|
||||||
|
|||||||
@@ -136,7 +136,7 @@ class ModuleWithCursor(Module):
|
|||||||
self._line_length = len(line)
|
self._line_length = len(line)
|
||||||
line = line + '\n'
|
line = line + '\n'
|
||||||
# add lines with a backslash at the end
|
# add lines with a backslash at the end
|
||||||
while 1:
|
while True:
|
||||||
self._line_temp -= 1
|
self._line_temp -= 1
|
||||||
last_line = self.get_line(self._line_temp)
|
last_line = self.get_line(self._line_temp)
|
||||||
if last_line and last_line[-1] == '\\':
|
if last_line and last_line[-1] == '\\':
|
||||||
@@ -163,7 +163,7 @@ class ModuleWithCursor(Module):
|
|||||||
last_type = None
|
last_type = None
|
||||||
try:
|
try:
|
||||||
for token_type, tok, start, end, line in gen:
|
for token_type, tok, start, end, line in gen:
|
||||||
#print 'tok', token_type, tok, force_point
|
# print 'tok', token_type, tok, force_point
|
||||||
if last_type == token_type == tokenize.NAME:
|
if last_type == token_type == tokenize.NAME:
|
||||||
string += ' '
|
string += ' '
|
||||||
|
|
||||||
|
|||||||
@@ -348,7 +348,7 @@ class Parser(object):
|
|||||||
or tok in not_first_break and not tok_list
|
or tok in not_first_break and not tok_list
|
||||||
or tok in breaks and level <= 0):
|
or tok in breaks and level <= 0):
|
||||||
try:
|
try:
|
||||||
#print 'parse_stmt', tok, tokenize.tok_name[token_type]
|
# print 'parse_stmt', tok, tokenize.tok_name[token_type]
|
||||||
tok_list.append(self.current + (self.start_pos,))
|
tok_list.append(self.current + (self.start_pos,))
|
||||||
if tok == 'as':
|
if tok == 'as':
|
||||||
token_type, tok = self.next()
|
token_type, tok = self.next()
|
||||||
@@ -387,7 +387,7 @@ class Parser(object):
|
|||||||
|
|
||||||
if not tok_list:
|
if not tok_list:
|
||||||
return None, tok
|
return None, tok
|
||||||
#print 'new_stat', set_vars, used_vars
|
# print 'new_stat', set_vars, used_vars
|
||||||
if self.freshscope and not self.no_docstr and len(tok_list) == 1 \
|
if self.freshscope and not self.no_docstr and len(tok_list) == 1 \
|
||||||
and self.last_token[0] == tokenize.STRING:
|
and self.last_token[0] == tokenize.STRING:
|
||||||
self._scope.add_docstr(self.last_token[1])
|
self._scope.add_docstr(self.last_token[1])
|
||||||
@@ -469,7 +469,7 @@ class Parser(object):
|
|||||||
# This iterator stuff is not intentional. It grew historically.
|
# This iterator stuff is not intentional. It grew historically.
|
||||||
for token_type, tok in self.iterator:
|
for token_type, tok in self.iterator:
|
||||||
self.module.temp_used_names = []
|
self.module.temp_used_names = []
|
||||||
#debug.dbg('main: tok=[%s] type=[%s] indent=[%s]'\
|
# debug.dbg('main: tok=[%s] type=[%s] indent=[%s]'\
|
||||||
# % (tok, tokenize.tok_name[token_type], start_position[0]))
|
# % (tok, tokenize.tok_name[token_type], start_position[0]))
|
||||||
|
|
||||||
while token_type == tokenize.DEDENT and self._scope != self.module:
|
while token_type == tokenize.DEDENT and self._scope != self.module:
|
||||||
@@ -532,7 +532,7 @@ class Parser(object):
|
|||||||
defunct = False
|
defunct = False
|
||||||
# take care for relative imports
|
# take care for relative imports
|
||||||
relative_count = 0
|
relative_count = 0
|
||||||
while 1:
|
while True:
|
||||||
token_type, tok = self.next()
|
token_type, tok = self.next()
|
||||||
if tok != '.':
|
if tok != '.':
|
||||||
break
|
break
|
||||||
@@ -561,7 +561,7 @@ class Parser(object):
|
|||||||
self._check_user_stmt(i)
|
self._check_user_stmt(i)
|
||||||
self._scope.add_import(i)
|
self._scope.add_import(i)
|
||||||
self.freshscope = False
|
self.freshscope = False
|
||||||
#loops
|
# loops
|
||||||
elif tok == 'for':
|
elif tok == 'for':
|
||||||
set_stmt, tok = self._parse_statement(added_breaks=['in'])
|
set_stmt, tok = self._parse_statement(added_breaks=['in'])
|
||||||
if tok == 'in':
|
if tok == 'in':
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ class RecursionDecorator(object):
|
|||||||
self.reset()
|
self.reset()
|
||||||
|
|
||||||
def __call__(self, stmt, *args, **kwargs):
|
def __call__(self, stmt, *args, **kwargs):
|
||||||
#print stmt, len(self.node_statements())
|
# print stmt, len(self.node_statements())
|
||||||
if self.push_stmt(stmt):
|
if self.push_stmt(stmt):
|
||||||
return []
|
return []
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -208,7 +208,8 @@ def generate_tokens(readline):
|
|||||||
yield TokenInfo(NL, line[nl_pos:],
|
yield TokenInfo(NL, line[nl_pos:],
|
||||||
(lnum, nl_pos), (lnum, len(line)), line)
|
(lnum, nl_pos), (lnum, len(line)), line)
|
||||||
else:
|
else:
|
||||||
yield TokenInfo((NL, COMMENT)[line[pos] == '#'], line[pos:],
|
yield TokenInfo(
|
||||||
|
(NL, COMMENT)[line[pos] == '#'], line[pos:],
|
||||||
(lnum, pos), (lnum, len(line)), line)
|
(lnum, pos), (lnum, len(line)), line)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ The twisted equivalent of this module is ``twisted.trial._synctest``.
|
|||||||
"""
|
"""
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import unittest # this is stdlib unittest, but jedi gets the local one
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
class Assertions(unittest.TestCase):
|
class Assertions(unittest.TestCase):
|
||||||
|
|||||||
Reference in New Issue
Block a user