mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-06 05:54:25 +08:00
Merge branch 'dev' of github.com:davidhalter/jedi into dev
This commit is contained in:
@@ -42,8 +42,8 @@ import sys
|
||||
# imports and circular imports... Just avoid it:
|
||||
sys.path.insert(0, __path__[0])
|
||||
|
||||
from .api import Script, Interpreter, NotFoundError, set_debug_function, \
|
||||
preload_module, defined_names
|
||||
from .api import Script, Interpreter, NotFoundError, set_debug_function
|
||||
from .api import preload_module, defined_names
|
||||
from . import settings
|
||||
|
||||
sys.path.pop(0)
|
||||
|
||||
@@ -18,7 +18,7 @@ is_py33 = sys.hexversion >= 0x03030000
|
||||
def find_module_py33(string, path=None):
|
||||
loader = importlib.machinery.PathFinder.find_module(string, path)
|
||||
|
||||
if loader is None and path is None: # Fallback to find builtins
|
||||
if loader is None and path is None: # Fallback to find builtins
|
||||
loader = importlib.find_loader(string)
|
||||
|
||||
if loader is None:
|
||||
|
||||
33
jedi/api.py
33
jedi/api.py
@@ -33,7 +33,6 @@ import builtin
|
||||
|
||||
class NotFoundError(Exception):
|
||||
"""A custom error to avoid catching the wrong exceptions."""
|
||||
pass
|
||||
|
||||
|
||||
class Script(object):
|
||||
@@ -68,7 +67,7 @@ class Script(object):
|
||||
source_path, source=self.source, position=self.pos)
|
||||
self._source_path = source_path
|
||||
self.source_path = None if source_path is None \
|
||||
else os.path.abspath(source_path)
|
||||
else os.path.abspath(source_path)
|
||||
debug.speed('init')
|
||||
|
||||
def __repr__(self):
|
||||
@@ -100,7 +99,7 @@ class Script(object):
|
||||
except NotFoundError:
|
||||
scopes = []
|
||||
scope_generator = evaluate.get_names_of_scope(
|
||||
self._parser.user_scope, self.pos)
|
||||
self._parser.user_scope, self.pos)
|
||||
completions = []
|
||||
for scope, name_list in scope_generator:
|
||||
for c in name_list:
|
||||
@@ -137,14 +136,14 @@ class Script(object):
|
||||
bs = builtin.Builtin.scope
|
||||
if isinstance(u, pr.Import):
|
||||
if (u.relative_count > 0 or u.from_ns) and not re.search(
|
||||
r'(,|from)\s*$|import\s+$', completion_line):
|
||||
r'(,|from)\s*$|import\s+$', completion_line):
|
||||
completions += ((k, bs) for k
|
||||
in keywords.get_keywords('import'))
|
||||
in keywords.get_keywords('import'))
|
||||
|
||||
if not path and not isinstance(u, pr.Import):
|
||||
# add keywords
|
||||
completions += ((k, bs) for k in keywords.get_keywords(
|
||||
all=True))
|
||||
all=True))
|
||||
|
||||
needs_dot = not dot and path
|
||||
|
||||
@@ -156,9 +155,9 @@ class Script(object):
|
||||
and n.lower().startswith(like.lower()) \
|
||||
or n.startswith(like):
|
||||
if not evaluate.filter_private_variable(s,
|
||||
self._parser.user_stmt, n):
|
||||
self._parser.user_stmt, n):
|
||||
new = api_classes.Completion(c, needs_dot,
|
||||
len(like), s)
|
||||
len(like), s)
|
||||
k = (new.name, new.complete) # key
|
||||
if k in comp_dct and settings.no_completion_duplicates:
|
||||
comp_dct[k]._same_name_completions.append(new)
|
||||
@@ -329,7 +328,7 @@ class Script(object):
|
||||
scopes |= keywords.get_keywords(string=goto_path, pos=self.pos)
|
||||
|
||||
d = set([api_classes.Definition(s) for s in scopes
|
||||
if not isinstance(s, imports.ImportPath._GlobalNamespace)])
|
||||
if not isinstance(s, imports.ImportPath._GlobalNamespace)])
|
||||
return self._sorted_defs(d)
|
||||
|
||||
@api_classes._clear_caches_after_call
|
||||
@@ -343,7 +342,7 @@ class Script(object):
|
||||
:rtype: list of :class:`api_classes.Definition`
|
||||
"""
|
||||
d = [api_classes.Definition(d) for d in set(self._goto()[0])
|
||||
if not isinstance(d, imports.ImportPath._GlobalNamespace)]
|
||||
if not isinstance(d, imports.ImportPath._GlobalNamespace)]
|
||||
return self._sorted_defs(d)
|
||||
|
||||
def _goto(self, add_import_name=False):
|
||||
@@ -360,7 +359,7 @@ class Script(object):
|
||||
definitions = set(defs)
|
||||
for d in defs:
|
||||
if isinstance(d.parent, pr.Import) \
|
||||
and d.start_pos == (0, 0):
|
||||
and d.start_pos == (0, 0):
|
||||
i = imports.ImportPath(d.parent).follow(is_goto=True)
|
||||
definitions.remove(d)
|
||||
definitions |= follow_inexistent_imports(i)
|
||||
@@ -414,14 +413,14 @@ class Script(object):
|
||||
user_stmt = self._parser.user_stmt
|
||||
definitions, search_name = self._goto(add_import_name=True)
|
||||
if isinstance(user_stmt, pr.Statement) \
|
||||
and self.pos < user_stmt.get_commands()[0].start_pos:
|
||||
and self.pos < user_stmt.get_commands()[0].start_pos:
|
||||
# the search_name might be before `=`
|
||||
definitions = [v for v in user_stmt.set_vars
|
||||
if unicode(v.names[-1]) == search_name]
|
||||
if unicode(v.names[-1]) == search_name]
|
||||
if not isinstance(user_stmt, pr.Import):
|
||||
# import case is looked at with add_import_name option
|
||||
definitions = dynamic.usages_add_import_modules(definitions,
|
||||
search_name)
|
||||
search_name)
|
||||
|
||||
module = set([d.get_parent_until() for d in definitions])
|
||||
module.add(self._parser.module)
|
||||
@@ -472,7 +471,7 @@ class Script(object):
|
||||
user_stmt = self._parser.user_stmt
|
||||
if user_stmt is not None and isinstance(user_stmt, pr.Statement):
|
||||
call, index, _ = helpers.search_function_definition(
|
||||
user_stmt, self.pos)
|
||||
user_stmt, self.pos)
|
||||
debug.speed('func_call parsed')
|
||||
return call, index
|
||||
|
||||
@@ -493,7 +492,7 @@ class Script(object):
|
||||
kill_count += 1
|
||||
|
||||
i = imports.ImportPath(user_stmt, is_like_search,
|
||||
kill_count=kill_count, direct_resolve=True)
|
||||
kill_count=kill_count, direct_resolve=True)
|
||||
return i, cur_name_part
|
||||
|
||||
def _get_completion_parts(self, path):
|
||||
@@ -582,7 +581,7 @@ def preload_module(*modules):
|
||||
|
||||
|
||||
def set_debug_function(func_cb=debug.print_to_stdout, warnings=True,
|
||||
notices=True, speed=True):
|
||||
notices=True, speed=True):
|
||||
"""
|
||||
Define a callback debug function to get all the debug messages.
|
||||
|
||||
|
||||
@@ -344,7 +344,7 @@ class Completion(BaseDefinition):
|
||||
dot = '.' if self._needs_dot else ''
|
||||
append = ''
|
||||
if settings.add_bracket_after_function \
|
||||
and self.type == 'Function':
|
||||
and self.type == 'Function':
|
||||
append = '('
|
||||
|
||||
if settings.add_dot_after_module:
|
||||
@@ -376,7 +376,6 @@ class Completion(BaseDefinition):
|
||||
warnings.warn("Use name instead.", DeprecationWarning)
|
||||
return self.name
|
||||
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
"""
|
||||
@@ -414,7 +413,7 @@ class Completion(BaseDefinition):
|
||||
return [self]
|
||||
|
||||
self._followed_definitions = \
|
||||
[BaseDefinition(d, d.start_pos) for d in defs]
|
||||
[BaseDefinition(d, d.start_pos) for d in defs]
|
||||
_clear_caches()
|
||||
|
||||
return self._followed_definitions
|
||||
@@ -504,7 +503,7 @@ class Definition(BaseDefinition):
|
||||
`module.class.function` path.
|
||||
"""
|
||||
if self.module_path.endswith('.py') \
|
||||
and not isinstance(self._definition, pr.Module):
|
||||
and not isinstance(self._definition, pr.Module):
|
||||
position = '@%s' % (self.line)
|
||||
else:
|
||||
# is a builtin or module
|
||||
@@ -601,4 +600,4 @@ class CallDef(object):
|
||||
|
||||
def __repr__(self):
|
||||
return '<%s: %s index %s>' % (type(self).__name__, self._executable,
|
||||
self.index)
|
||||
self.index)
|
||||
|
||||
@@ -233,7 +233,7 @@ def _generate_code(scope, mixin_funcs={}, depth=0):
|
||||
if is_in_base_classes(scope, n, exe):
|
||||
continue
|
||||
if inspect.isbuiltin(exe) or inspect.ismethod(exe) \
|
||||
or inspect.ismethoddescriptor(exe):
|
||||
or inspect.ismethoddescriptor(exe):
|
||||
funcs[n] = exe
|
||||
elif inspect.isclass(exe) or inspect.ismodule(exe):
|
||||
classes[n] = exe
|
||||
@@ -254,15 +254,15 @@ def _generate_code(scope, mixin_funcs={}, depth=0):
|
||||
code += get_doc(scope)
|
||||
|
||||
names = set(dir(scope)) - set(['__file__', '__name__', '__doc__',
|
||||
'__path__', '__package__']) \
|
||||
| set(['mro'])
|
||||
'__path__', '__package__']) \
|
||||
| set(['mro'])
|
||||
|
||||
classes, funcs, stmts, members = get_scope_objects(names)
|
||||
|
||||
# classes
|
||||
for name, cl in classes.items():
|
||||
bases = (c.__name__ for c in cl.__bases__) if inspect.isclass(cl) \
|
||||
else []
|
||||
else []
|
||||
code += 'class %s(%s):\n' % (name, ','.join(bases))
|
||||
if depth == 0:
|
||||
try:
|
||||
@@ -321,7 +321,7 @@ def _generate_code(scope, mixin_funcs={}, depth=0):
|
||||
file_type = io.TextIOWrapper
|
||||
else:
|
||||
file_type = types.FileType
|
||||
if type(value) == file_type:
|
||||
if isinstance(value, file_type):
|
||||
value = 'open()'
|
||||
elif name == 'None':
|
||||
value = ''
|
||||
@@ -336,13 +336,6 @@ def _generate_code(scope, mixin_funcs={}, depth=0):
|
||||
value = '%s.%s' % (mod, value)
|
||||
code += '%s = %s\n' % (name, value)
|
||||
|
||||
if depth == 0:
|
||||
#with open('writeout.py', 'w') as f:
|
||||
# f.write(code)
|
||||
#import sys
|
||||
#sys.stdout.write(code)
|
||||
#exit()
|
||||
pass
|
||||
return code
|
||||
|
||||
|
||||
@@ -378,7 +371,7 @@ def _parse_function_doc(func):
|
||||
return ','.join(args)
|
||||
while True:
|
||||
param_str, changes = re.subn(r' ?\[([^\[\]]+)\]',
|
||||
change_options, param_str)
|
||||
change_options, param_str)
|
||||
if changes == 0:
|
||||
break
|
||||
except (ValueError, AttributeError):
|
||||
|
||||
@@ -147,8 +147,8 @@ class NoErrorTokenizer(object):
|
||||
and self.previous[0] in (tokenize.INDENT, tokenize.NL, None,
|
||||
tokenize.NEWLINE, tokenize.DEDENT) \
|
||||
and c[0] not in (tokenize.COMMENT, tokenize.INDENT,
|
||||
tokenize.NL, tokenize.NEWLINE, tokenize.DEDENT):
|
||||
#print c, tokenize.tok_name[c[0]]
|
||||
tokenize.NL, tokenize.NEWLINE, tokenize.DEDENT):
|
||||
# print c, tokenize.tok_name[c[0]]
|
||||
|
||||
tok = c[1]
|
||||
indent = c[2][1]
|
||||
|
||||
@@ -62,4 +62,4 @@ def print_to_stdout(level, str_out):
|
||||
print(col + str_out + Fore.RESET)
|
||||
|
||||
|
||||
#debug_function = print_to_stdout
|
||||
# debug_function = print_to_stdout
|
||||
|
||||
@@ -27,8 +27,8 @@ DOCSTRING_PARAM_PATTERNS = [
|
||||
]
|
||||
|
||||
DOCSTRING_RETURN_PATTERNS = [
|
||||
re.compile(r'\s*:rtype:\s*([^\n]+)', re.M), # Sphinx
|
||||
re.compile(r'\s*@rtype:\s*([^\n]+)', re.M), # Epydoc
|
||||
re.compile(r'\s*:rtype:\s*([^\n]+)', re.M), # Sphinx
|
||||
re.compile(r'\s*@rtype:\s*([^\n]+)', re.M), # Epydoc
|
||||
]
|
||||
|
||||
REST_ROLE_PATTERN = re.compile(r':[^`]+:`([^`]+)`')
|
||||
@@ -37,7 +37,7 @@ REST_ROLE_PATTERN = re.compile(r':[^`]+:`([^`]+)`')
|
||||
@cache.memoize_default()
|
||||
def follow_param(param):
|
||||
func = param.parent_function
|
||||
#print func, param, param.parent_function
|
||||
# print func, param, param.parent_function
|
||||
param_str = _search_param_in_docstr(func.docstr, str(param.get_name()))
|
||||
user_position = (1, 0)
|
||||
|
||||
@@ -52,7 +52,7 @@ def follow_param(param):
|
||||
user_position = (2, 0)
|
||||
|
||||
p = parsing.Parser(param_str, None, user_position,
|
||||
no_docstr=True)
|
||||
no_docstr=True)
|
||||
return evaluate.follow_statement(p.user_stmt)
|
||||
return []
|
||||
|
||||
|
||||
@@ -334,7 +334,7 @@ def _check_array_additions(compare_array, module, is_list):
|
||||
settings.dynamic_params_for_other_modules = False
|
||||
|
||||
search_names = ['append', 'extend', 'insert'] if is_list else \
|
||||
['add', 'update']
|
||||
['add', 'update']
|
||||
comp_arr_parent = get_execution_parent(compare_array, er.Execution)
|
||||
|
||||
possible_stmts = []
|
||||
@@ -351,7 +351,7 @@ def _check_array_additions(compare_array, module, is_list):
|
||||
# literally copy the contents of a function.
|
||||
if isinstance(comp_arr_parent, er.Execution):
|
||||
stmt = comp_arr_parent. \
|
||||
get_statement_for_position(stmt.start_pos)
|
||||
get_statement_for_position(stmt.start_pos)
|
||||
if stmt is None:
|
||||
continue
|
||||
# InstanceElements are special, because they don't get copied,
|
||||
@@ -403,7 +403,9 @@ class ArrayInstance(pr.Base):
|
||||
if self.var_args.start_pos != array.var_args.start_pos:
|
||||
items += array.iter_content()
|
||||
else:
|
||||
debug.warning('ArrayInstance recursion', self.var_args)
|
||||
debug.warning(
|
||||
'ArrayInstance recursion',
|
||||
self.var_args)
|
||||
continue
|
||||
items += evaluate.get_iterator_types([typ])
|
||||
|
||||
@@ -472,7 +474,7 @@ def usages(definitions, search_name, mods):
|
||||
|
||||
for used_count, name_part in imps:
|
||||
i = imports.ImportPath(stmt, kill_count=count - used_count,
|
||||
direct_resolve=True)
|
||||
direct_resolve=True)
|
||||
f = i.follow(is_goto=True)
|
||||
if set(f) & set(definitions):
|
||||
names.append(api_classes.Usage(name_part, stmt))
|
||||
@@ -524,7 +526,7 @@ def check_statement_information(stmt, search_name):
|
||||
# this might be removed if we analyze and, etc
|
||||
assert len(commands) == 1
|
||||
call = commands[0]
|
||||
assert type(call) == pr.Call and str(call.name) == 'isinstance'
|
||||
assert type(call) is pr.Call and str(call.name) == 'isinstance'
|
||||
assert bool(call.execution)
|
||||
|
||||
# isinstance check
|
||||
|
||||
@@ -105,8 +105,8 @@ def get_defined_names_for_position(scope, position=None, start_scope=None):
|
||||
# Instances have special rules, always return all the possible completions,
|
||||
# because class variables are always valid and the `self.` variables, too.
|
||||
if (not position or isinstance(scope, (er.Array, er.Instance))
|
||||
or start_scope != scope
|
||||
and isinstance(start_scope, (pr.Function, er.Execution))):
|
||||
or start_scope != scope
|
||||
and isinstance(start_scope, (pr.Function, er.Execution))):
|
||||
return names
|
||||
names_new = []
|
||||
for n in names:
|
||||
@@ -116,7 +116,7 @@ def get_defined_names_for_position(scope, position=None, start_scope=None):
|
||||
|
||||
|
||||
def get_names_of_scope(scope, position=None, star_search=True,
|
||||
include_builtin=True):
|
||||
include_builtin=True):
|
||||
"""
|
||||
Get all completions (names) possible for the current scope.
|
||||
The star search option is only here to provide an optimization. Otherwise
|
||||
@@ -168,17 +168,16 @@ def get_names_of_scope(scope, position=None, star_search=True,
|
||||
# Ignore the Flows, because the classes and functions care for that.
|
||||
# InstanceElement of Class is ignored, if it is not the start scope.
|
||||
if not (scope != non_flow and scope.isinstance(pr.Class)
|
||||
or scope.isinstance(pr.Flow)
|
||||
or scope.isinstance(er.Instance)
|
||||
and non_flow.isinstance(er.Function)
|
||||
):
|
||||
or scope.isinstance(pr.Flow)
|
||||
or scope.isinstance(er.Instance)
|
||||
and non_flow.isinstance(er.Function)):
|
||||
try:
|
||||
if isinstance(scope, er.Instance):
|
||||
for g in scope.scope_generator():
|
||||
yield g
|
||||
else:
|
||||
yield scope, get_defined_names_for_position(scope,
|
||||
position, in_func_scope)
|
||||
position, in_func_scope)
|
||||
except StopIteration:
|
||||
reraise(common.MultiLevelStopIteration, sys.exc_info()[2])
|
||||
if scope.isinstance(pr.ForFlow) and scope.is_list_comp:
|
||||
@@ -204,7 +203,7 @@ def get_names_of_scope(scope, position=None, star_search=True,
|
||||
|
||||
|
||||
def find_name(scope, name_str, position=None, search_global=False,
|
||||
is_goto=False):
|
||||
is_goto=False):
|
||||
"""
|
||||
This is the search function. The most important part to debug.
|
||||
`remove_statements` and `filter_statements` really are the core part of
|
||||
@@ -273,8 +272,8 @@ def find_name(scope, name_str, position=None, search_global=False,
|
||||
if check_instance is not None:
|
||||
# class renames
|
||||
add = [er.InstanceElement(check_instance, a, True)
|
||||
if isinstance(a, (er.Function, pr.Function))
|
||||
else a for a in add]
|
||||
if isinstance(a, (er.Function, pr.Function))
|
||||
else a for a in add]
|
||||
res_new += add
|
||||
else:
|
||||
if isinstance(r, pr.Class):
|
||||
@@ -349,7 +348,7 @@ def find_name(scope, name_str, position=None, search_global=False,
|
||||
# Compare start_pos, because names may be different
|
||||
# because of executions.
|
||||
if c.name.start_pos == name.start_pos \
|
||||
and c.execution:
|
||||
and c.execution:
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -368,7 +367,7 @@ def find_name(scope, name_str, position=None, search_global=False,
|
||||
|
||||
# TODO this makes self variables non-breakable. wanted?
|
||||
if isinstance(name, er.InstanceElement) \
|
||||
and not name.is_class_var:
|
||||
and not name.is_class_var:
|
||||
no_break_scope = True
|
||||
|
||||
result.append(par)
|
||||
@@ -387,7 +386,7 @@ def find_name(scope, name_str, position=None, search_global=False,
|
||||
for name in sorted(name_list, key=comparison_func, reverse=True):
|
||||
p = name.parent.parent if name.parent else None
|
||||
if isinstance(p, er.InstanceElement) \
|
||||
and isinstance(p.var, pr.Class):
|
||||
and isinstance(p.var, pr.Class):
|
||||
p = p.var
|
||||
if name_str == name.get_code() and p not in break_scopes:
|
||||
r, no_break_scope = process(name)
|
||||
@@ -410,7 +409,7 @@ def find_name(scope, name_str, position=None, search_global=False,
|
||||
while flow_scope:
|
||||
# TODO check if result is in scope -> no evaluation necessary
|
||||
n = dynamic.check_flow_information(flow_scope, name_str,
|
||||
position)
|
||||
position)
|
||||
if n:
|
||||
result = n
|
||||
break
|
||||
@@ -428,7 +427,7 @@ def find_name(scope, name_str, position=None, search_global=False,
|
||||
# __getattr__ / __getattribute__
|
||||
result += check_getattr(nscope, name_str)
|
||||
debug.dbg('sfn filter "%s" in (%s-%s): %s@%s' % (name_str, scope,
|
||||
nscope, result, position))
|
||||
nscope, result, position))
|
||||
return result
|
||||
|
||||
def descriptor_check(result):
|
||||
@@ -436,7 +435,7 @@ def find_name(scope, name_str, position=None, search_global=False,
|
||||
res_new = []
|
||||
for r in result:
|
||||
if isinstance(scope, (er.Instance, er.Class)) \
|
||||
and hasattr(r, 'get_descriptor_return'):
|
||||
and hasattr(r, 'get_descriptor_return'):
|
||||
# handle descriptors
|
||||
with common.ignored(KeyError):
|
||||
res_new += r.get_descriptor_return(scope)
|
||||
@@ -537,7 +536,7 @@ def assign_tuples(tup, results, seek_name):
|
||||
func = r.get_exact_index_types
|
||||
except AttributeError:
|
||||
debug.warning("invalid tuple lookup %s of result %s in %s"
|
||||
% (tup, results, seek_name))
|
||||
% (tup, results, seek_name))
|
||||
else:
|
||||
with common.ignored(IndexError):
|
||||
types += func(index)
|
||||
@@ -643,7 +642,7 @@ def follow_call_list(call_list, follow_array=False):
|
||||
call_path = call.generate_call_path()
|
||||
next(call_path, None) # the first one has been used already
|
||||
result += follow_paths(call_path, r, call.parent,
|
||||
position=call.start_pos)
|
||||
position=call.start_pos)
|
||||
elif isinstance(call, pr.ListComprehension):
|
||||
loop = evaluate_list_comprehension(call)
|
||||
# Caveat: parents are being changed, but this doesn't matter,
|
||||
@@ -655,7 +654,7 @@ def follow_call_list(call_list, follow_array=False):
|
||||
result.append(er.Function(call))
|
||||
# With things like params, these can also be functions...
|
||||
elif isinstance(call, (er.Function, er.Class, er.Instance,
|
||||
dynamic.ArrayInstance)):
|
||||
dynamic.ArrayInstance)):
|
||||
result.append(call)
|
||||
# The string tokens are just operations (+, -, etc.)
|
||||
elif not isinstance(call, (str, unicode)):
|
||||
@@ -673,8 +672,8 @@ def follow_call_list(call_list, follow_array=False):
|
||||
result += follow_call(call)
|
||||
elif call == '*':
|
||||
if [r for r in result if isinstance(r, er.Array)
|
||||
or isinstance(r, er.Instance)
|
||||
and str(r.name) == 'str']:
|
||||
or isinstance(r, er.Instance)
|
||||
and str(r.name) == 'str']:
|
||||
# if it is an iterable, ignore * operations
|
||||
next(calls_iterator)
|
||||
return set(result)
|
||||
@@ -701,7 +700,7 @@ def follow_call_path(path, scope, position):
|
||||
if isinstance(current, pr.NamePart):
|
||||
# This is the first global lookup.
|
||||
scopes = find_name(scope, current, position=position,
|
||||
search_global=True)
|
||||
search_global=True)
|
||||
else:
|
||||
if current.type in (pr.Call.STRING, pr.Call.NUMBER):
|
||||
t = type(current.name).__name__
|
||||
@@ -775,7 +774,7 @@ def follow_path(path, scope, call_scope, position=None):
|
||||
if filter_private_variable(scope, call_scope, current):
|
||||
return []
|
||||
result = imports.strip_imports(find_name(scope, current,
|
||||
position=position))
|
||||
position=position))
|
||||
return follow_paths(path, set(result), call_scope, position=position)
|
||||
|
||||
|
||||
@@ -814,5 +813,5 @@ def goto(stmt, call_path=None):
|
||||
follow_res = []
|
||||
for s in scopes:
|
||||
follow_res += find_name(s, search, pos,
|
||||
search_global=search_global, is_goto=True)
|
||||
search_global=search_global, is_goto=True)
|
||||
return follow_res, search
|
||||
|
||||
@@ -53,11 +53,13 @@ class Executable(pr.IsScope):
|
||||
|
||||
|
||||
class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
|
||||
|
||||
""" This class is used to evaluate instances. """
|
||||
|
||||
def __init__(self, base, var_args=()):
|
||||
super(Instance, self).__init__(base, var_args)
|
||||
if str(base.name) in ['list', 'set'] \
|
||||
and builtin.Builtin.scope == base.get_parent_until():
|
||||
and builtin.Builtin.scope == base.get_parent_until():
|
||||
# compare the module path with the builtin name.
|
||||
self.var_args = dynamic.check_array_instances(self)
|
||||
else:
|
||||
@@ -166,14 +168,14 @@ class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name not in ['start_pos', 'end_pos', 'name', 'get_imports',
|
||||
'doc', 'docstr', 'asserts']:
|
||||
'doc', 'docstr', 'asserts']:
|
||||
raise AttributeError("Instance %s: Don't touch this (%s)!"
|
||||
% (self, name))
|
||||
% (self, name))
|
||||
return getattr(self.base, name)
|
||||
|
||||
def __repr__(self):
|
||||
return "<e%s of %s (var_args: %s)>" % \
|
||||
(type(self).__name__, self.base, len(self.var_args or []))
|
||||
(type(self).__name__, self.base, len(self.var_args or []))
|
||||
|
||||
|
||||
class InstanceElement(use_metaclass(cache.CachedMetaClass)):
|
||||
@@ -195,8 +197,8 @@ class InstanceElement(use_metaclass(cache.CachedMetaClass)):
|
||||
def parent(self):
|
||||
par = self.var.parent
|
||||
if isinstance(par, Class) and par == self.instance.base \
|
||||
or isinstance(par, pr.Class) \
|
||||
and par == self.instance.base.base:
|
||||
or isinstance(par, pr.Class) \
|
||||
and par == self.instance.base.base:
|
||||
par = self.instance
|
||||
elif not isinstance(par, pr.Module):
|
||||
par = InstanceElement(self.instance, par, self.is_class_var)
|
||||
@@ -285,8 +287,8 @@ class Class(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name not in ['start_pos', 'end_pos', 'parent', 'asserts', 'docstr',
|
||||
'doc', 'get_imports', 'get_parent_until', 'get_code',
|
||||
'subscopes']:
|
||||
'doc', 'get_imports', 'get_parent_until', 'get_code',
|
||||
'subscopes']:
|
||||
raise AttributeError("Don't touch this: %s of %s !" % (name, self))
|
||||
return getattr(self.base, name)
|
||||
|
||||
@@ -319,11 +321,11 @@ class Function(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
|
||||
dec_results = evaluate.follow_statement(dec)
|
||||
if not len(dec_results):
|
||||
debug.warning('decorator func not found: %s in stmt %s' %
|
||||
(self.base_func, dec))
|
||||
(self.base_func, dec))
|
||||
return None
|
||||
if len(dec_results) > 1:
|
||||
debug.warning('multiple decorators found', self.base_func,
|
||||
dec_results)
|
||||
dec_results)
|
||||
decorator = dec_results.pop()
|
||||
# Create param array.
|
||||
old_func = Function(f, is_decorated=True)
|
||||
@@ -334,7 +336,7 @@ class Function(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
|
||||
return None
|
||||
if len(wrappers) > 1:
|
||||
debug.warning('multiple wrappers found', self.base_func,
|
||||
wrappers)
|
||||
wrappers)
|
||||
# This is here, that the wrapper gets executed.
|
||||
f = wrappers[0]
|
||||
|
||||
@@ -415,7 +417,7 @@ class Execution(Executable):
|
||||
debug.warning('jedi getattr is too simple')
|
||||
key = arr_name.var_args[0]
|
||||
stmts += evaluate.follow_path(iter([key]), obj,
|
||||
self.base)
|
||||
self.base)
|
||||
return stmts
|
||||
elif func_name == 'type':
|
||||
# otherwise it would be a metaclass
|
||||
@@ -428,7 +430,7 @@ class Execution(Executable):
|
||||
func = self.var_args.get_parent_until(accept)
|
||||
if func.isinstance(*accept):
|
||||
cls = func.get_parent_until(accept + (pr.Class,),
|
||||
include_current=False)
|
||||
include_current=False)
|
||||
if isinstance(cls, pr.Class):
|
||||
cls = Class(cls)
|
||||
su = cls.get_super_classes()
|
||||
@@ -450,7 +452,7 @@ class Execution(Executable):
|
||||
if hasattr(self.base, 'execute_subscope_by_name'):
|
||||
try:
|
||||
stmts = self.base.execute_subscope_by_name('__call__',
|
||||
self.var_args)
|
||||
self.var_args)
|
||||
except KeyError:
|
||||
debug.warning("no __call__ func available", self.base)
|
||||
else:
|
||||
@@ -554,7 +556,7 @@ class Execution(Executable):
|
||||
else:
|
||||
keys_used.add(str(key))
|
||||
result.append(gen_param_name_copy(key_param,
|
||||
values=[value]))
|
||||
values=[value]))
|
||||
key, value = next(var_arg_iterator, (None, None))
|
||||
|
||||
commands = param.get_commands()
|
||||
@@ -599,7 +601,7 @@ class Execution(Executable):
|
||||
if not ignore_creation and (not keys_only or commands[0] == '**'):
|
||||
keys_used.add(str(key))
|
||||
result.append(gen_param_name_copy(param, keys=keys,
|
||||
values=values, array_type=array_type))
|
||||
values=values, array_type=array_type))
|
||||
|
||||
if keys_only:
|
||||
# sometimes param arguments are not completely written (which would
|
||||
@@ -641,7 +643,7 @@ class Execution(Executable):
|
||||
call = key_stmt.get_commands()[0]
|
||||
if isinstance(call, pr.Name):
|
||||
yield call, value_stmt
|
||||
elif type(call) == pr.Call:
|
||||
elif type(call) is pr.Call:
|
||||
yield call.name, value_stmt
|
||||
# Normal arguments (including key arguments).
|
||||
else:
|
||||
@@ -732,7 +734,7 @@ class Execution(Executable):
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s of %s>" % \
|
||||
(type(self).__name__, self.base)
|
||||
(type(self).__name__, self.base)
|
||||
|
||||
|
||||
class Generator(use_metaclass(cache.CachedMetaClass, pr.Base)):
|
||||
@@ -752,7 +754,7 @@ class Generator(use_metaclass(cache.CachedMetaClass, pr.Base)):
|
||||
executes_generator = ('__next__', 'send')
|
||||
for n in ('close', 'throw') + executes_generator:
|
||||
name = pr.Name(builtin.Builtin.scope, [(n, none_pos)],
|
||||
none_pos, none_pos)
|
||||
none_pos, none_pos)
|
||||
if n in executes_generator:
|
||||
name.parent = self
|
||||
names.append(name)
|
||||
@@ -769,10 +771,10 @@ class Generator(use_metaclass(cache.CachedMetaClass, pr.Base)):
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name not in ['start_pos', 'end_pos', 'parent', 'get_imports',
|
||||
'asserts', 'doc', 'docstr', 'get_parent_until', 'get_code',
|
||||
'subscopes']:
|
||||
'asserts', 'doc', 'docstr', 'get_parent_until', 'get_code',
|
||||
'subscopes']:
|
||||
raise AttributeError("Accessing %s of %s is not allowed."
|
||||
% (self, name))
|
||||
% (self, name))
|
||||
return getattr(self.func, name)
|
||||
|
||||
def __repr__(self):
|
||||
@@ -800,8 +802,8 @@ class Array(use_metaclass(cache.CachedMetaClass, pr.Base)):
|
||||
# otherwise it just ignores the index (e.g. [1+1]).
|
||||
index = index_possibilities[0]
|
||||
if isinstance(index, Instance) \
|
||||
and str(index.name) in ['int', 'str'] \
|
||||
and len(index.var_args) == 1:
|
||||
and str(index.name) in ['int', 'str'] \
|
||||
and len(index.var_args) == 1:
|
||||
with common.ignored(KeyError, IndexError):
|
||||
return self.get_exact_index_types(index.var_args[0])
|
||||
|
||||
|
||||
@@ -55,7 +55,7 @@ class Module(pr.Simple, pr.Module):
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s: %s@%s-%s>" % (type(self).__name__, self.name,
|
||||
self.start_pos[0], self.end_pos[0])
|
||||
self.start_pos[0], self.end_pos[0])
|
||||
|
||||
|
||||
class CachedFastParser(type):
|
||||
@@ -67,7 +67,7 @@ class CachedFastParser(type):
|
||||
pi = cache.parser_cache.get(module_path, None)
|
||||
if pi is None or isinstance(pi.parser, parsing.Parser):
|
||||
p = super(CachedFastParser, self).__call__(source, module_path,
|
||||
user_position)
|
||||
user_position)
|
||||
else:
|
||||
p = pi.parser # pi is a `cache.ParserCacheItem`
|
||||
p.update(source, user_position)
|
||||
@@ -204,7 +204,7 @@ class FastParser(use_metaclass(CachedFastParser)):
|
||||
self._user_scope = p.user_scope
|
||||
|
||||
if isinstance(self._user_scope, pr.SubModule) \
|
||||
or self._user_scope is None:
|
||||
or self._user_scope is None:
|
||||
self._user_scope = self.module
|
||||
return self._user_scope
|
||||
|
||||
@@ -324,11 +324,11 @@ class FastParser(use_metaclass(CachedFastParser)):
|
||||
if self.current_node is not None:
|
||||
|
||||
self.current_node = \
|
||||
self.current_node.parent_until_indent(indent)
|
||||
self.current_node.parent_until_indent(indent)
|
||||
nodes += self.current_node.old_children
|
||||
|
||||
# check if code_part has already been parsed
|
||||
#print '#'*45,line_offset, p and p.end_pos, '\n', code_part
|
||||
# print '#'*45,line_offset, p and p.end_pos, '\n', code_part
|
||||
p, node = self._get_parser(code_part, code[start:],
|
||||
line_offset, nodes, not is_first)
|
||||
|
||||
@@ -351,12 +351,12 @@ class FastParser(use_metaclass(CachedFastParser)):
|
||||
else:
|
||||
if node is None:
|
||||
self.current_node = \
|
||||
self.current_node.add_parser(p, code_part)
|
||||
self.current_node.add_parser(p, code_part)
|
||||
else:
|
||||
self.current_node = self.current_node.add_node(node)
|
||||
|
||||
if self.current_node.parent and (isinstance(p.user_scope,
|
||||
pr.SubModule) or p.user_scope is None) \
|
||||
pr.SubModule) or p.user_scope is None) \
|
||||
and self.user_position \
|
||||
and p.start_pos <= self.user_position < p.end_pos:
|
||||
p.user_scope = self.current_node.parent.content_scope
|
||||
@@ -365,7 +365,7 @@ class FastParser(use_metaclass(CachedFastParser)):
|
||||
|
||||
is_first = False
|
||||
else:
|
||||
#print '#'*45, line_offset, p.end_pos, 'theheck\n', code_part
|
||||
# print '#'*45, line_offset, p.end_pos, 'theheck\n', code_part
|
||||
pass
|
||||
|
||||
line_offset += lines
|
||||
@@ -378,7 +378,7 @@ class FastParser(use_metaclass(CachedFastParser)):
|
||||
|
||||
self.module.end_pos = self.parsers[-1].end_pos
|
||||
|
||||
#print(self.parsers[0].module.get_code())
|
||||
# print(self.parsers[0].module.get_code())
|
||||
del code
|
||||
|
||||
def _get_parser(self, code, parser_code, line_offset, nodes, no_docstr):
|
||||
@@ -408,7 +408,7 @@ class FastParser(use_metaclass(CachedFastParser)):
|
||||
# It's important to take care of the whole user
|
||||
# positioning stuff, if no reparsing is being done.
|
||||
p.user_stmt = m.get_statement_for_position(
|
||||
self.user_position, include_imports=True)
|
||||
self.user_position, include_imports=True)
|
||||
if p.user_stmt:
|
||||
p.user_scope = p.user_stmt.parent
|
||||
else:
|
||||
|
||||
@@ -56,7 +56,7 @@ class ImportPath(pr.Base):
|
||||
GlobalNamespace = _GlobalNamespace()
|
||||
|
||||
def __init__(self, import_stmt, is_like_search=False, kill_count=0,
|
||||
direct_resolve=False):
|
||||
direct_resolve=False):
|
||||
self.import_stmt = import_stmt
|
||||
self.is_like_search = is_like_search
|
||||
self.direct_resolve = direct_resolve
|
||||
@@ -88,8 +88,8 @@ class ImportPath(pr.Base):
|
||||
import foo.bar
|
||||
"""
|
||||
return not self.import_stmt.alias and not self.import_stmt.from_ns \
|
||||
and len(self.import_stmt.namespace.names) > 1 \
|
||||
and not self.direct_resolve
|
||||
and len(self.import_stmt.namespace.names) > 1 \
|
||||
and not self.direct_resolve
|
||||
|
||||
def get_nested_import(self, parent):
|
||||
"""
|
||||
@@ -127,14 +127,14 @@ class ImportPath(pr.Base):
|
||||
names += m.parser.module.get_defined_names()
|
||||
else:
|
||||
if on_import_stmt and isinstance(scope, pr.Module) \
|
||||
and scope.path.endswith('__init__.py'):
|
||||
and scope.path.endswith('__init__.py'):
|
||||
pkg_path = os.path.dirname(scope.path)
|
||||
names += self.get_module_names([pkg_path])
|
||||
for s, scope_names in evaluate.get_names_of_scope(scope,
|
||||
include_builtin=False):
|
||||
include_builtin=False):
|
||||
for n in scope_names:
|
||||
if self.import_stmt.from_ns is None \
|
||||
or self.is_partial_import:
|
||||
or self.is_partial_import:
|
||||
# from_ns must be defined to access module
|
||||
# values plus a partial import means that there
|
||||
# is something after the import, which
|
||||
@@ -155,7 +155,7 @@ class ImportPath(pr.Base):
|
||||
for module_loader, name, is_pkg in pkgutil.iter_modules(search_path):
|
||||
inf_pos = (float('inf'), float('inf'))
|
||||
names.append(pr.Name(self.GlobalNamespace, [(name, inf_pos)],
|
||||
inf_pos, inf_pos, self.import_stmt))
|
||||
inf_pos, inf_pos, self.import_stmt))
|
||||
return names
|
||||
|
||||
def sys_path_with_modifications(self):
|
||||
@@ -198,12 +198,12 @@ class ImportPath(pr.Base):
|
||||
elif rest:
|
||||
if is_goto:
|
||||
scopes = itertools.chain.from_iterable(
|
||||
evaluate.find_name(s, rest[0], is_goto=True)
|
||||
for s in scopes)
|
||||
evaluate.find_name(s, rest[0], is_goto=True)
|
||||
for s in scopes)
|
||||
else:
|
||||
scopes = itertools.chain.from_iterable(
|
||||
evaluate.follow_path(iter(rest), s, s)
|
||||
for s in scopes)
|
||||
evaluate.follow_path(iter(rest), s, s)
|
||||
for s in scopes)
|
||||
scopes = list(scopes)
|
||||
|
||||
if self.is_nested_import():
|
||||
@@ -273,7 +273,7 @@ class ImportPath(pr.Base):
|
||||
current_namespace = follow_str(current_namespace[1], s)
|
||||
except ImportError:
|
||||
if self.import_stmt.relative_count \
|
||||
and len(self.import_path) == 1:
|
||||
and len(self.import_path) == 1:
|
||||
# follow `from . import some_variable`
|
||||
rel_path = self.get_relative_path()
|
||||
with common.ignored(ImportError):
|
||||
|
||||
@@ -70,7 +70,7 @@ class ObjectImporter(object):
|
||||
if objclass and module:
|
||||
alias = self._genname()
|
||||
fakeimport = self.make_fakeimport(module, objclass.__name__,
|
||||
alias)
|
||||
alias)
|
||||
fakestmt = self.make_fakestatement(variable, alias, call=True)
|
||||
scope.add_import(fakeimport)
|
||||
scope.add_statement(fakestmt)
|
||||
|
||||
@@ -45,7 +45,7 @@ class CachedModule(object):
|
||||
""" get the parser lazy """
|
||||
if self._parser is None:
|
||||
self._parser = cache.load_module(self.path, self.name) \
|
||||
or self._load_module()
|
||||
or self._load_module()
|
||||
return self._parser
|
||||
|
||||
def _get_source(self):
|
||||
@@ -113,10 +113,10 @@ class ModuleWithCursor(Module):
|
||||
# Also, the position is here important (which will not be used by
|
||||
# default), therefore fill the cache here.
|
||||
self._parser = fast_parser.FastParser(self.source, self.path,
|
||||
self.position)
|
||||
self.position)
|
||||
# don't pickle that module, because it's changing fast
|
||||
cache.save_module(self.path, self.name, self._parser,
|
||||
pickling=False)
|
||||
pickling=False)
|
||||
return self._parser
|
||||
|
||||
def get_path_until_cursor(self):
|
||||
@@ -136,7 +136,7 @@ class ModuleWithCursor(Module):
|
||||
self._line_length = len(line)
|
||||
line = line + '\n'
|
||||
# add lines with a backslash at the end
|
||||
while 1:
|
||||
while True:
|
||||
self._line_temp -= 1
|
||||
last_line = self.get_line(self._line_temp)
|
||||
if last_line and last_line[-1] == '\\':
|
||||
@@ -163,7 +163,7 @@ class ModuleWithCursor(Module):
|
||||
last_type = None
|
||||
try:
|
||||
for token_type, tok, start, end, line in gen:
|
||||
#print 'tok', token_type, tok, force_point
|
||||
# print 'tok', token_type, tok, force_point
|
||||
if last_type == token_type == tokenize.NAME:
|
||||
string += ' '
|
||||
|
||||
@@ -214,7 +214,7 @@ class ModuleWithCursor(Module):
|
||||
after = re.match("[^\w\s]+", line[self.position[1]:])
|
||||
before = re.match("[^\w\s]+", line[:self.position[1]][::-1])
|
||||
return (before.group(0) if before is not None else '') \
|
||||
+ (after.group(0) if after is not None else '')
|
||||
+ (after.group(0) if after is not None else '')
|
||||
|
||||
def get_context(self):
|
||||
pos = self._start_cursor_pos
|
||||
@@ -375,7 +375,7 @@ def source_to_unicode(source, encoding=None):
|
||||
|
||||
first_two_lines = re.match(r'(?:[^\n]*\n){0,2}', str(source)).group(0)
|
||||
possible_encoding = re.search(r"coding[=:]\s*([-\w.]+)",
|
||||
first_two_lines)
|
||||
first_two_lines)
|
||||
if possible_encoding:
|
||||
return possible_encoding.group(1)
|
||||
else:
|
||||
|
||||
@@ -46,8 +46,8 @@ class Parser(object):
|
||||
:param top_module: Use this module as a parent instead of `self.module`.
|
||||
"""
|
||||
def __init__(self, source, module_path=None, user_position=None,
|
||||
no_docstr=False, offset=(0, 0), is_fast_parser=None,
|
||||
top_module=None):
|
||||
no_docstr=False, offset=(0, 0), is_fast_parser=None,
|
||||
top_module=None):
|
||||
self.user_position = user_position
|
||||
self.user_scope = None
|
||||
self.user_stmt = None
|
||||
@@ -149,7 +149,7 @@ class Parser(object):
|
||||
append((tok, self.start_pos))
|
||||
|
||||
n = pr.Name(self.module, names, first_pos, self.end_pos) if names \
|
||||
else None
|
||||
else None
|
||||
return n, token_type, tok
|
||||
|
||||
def _parse_import_list(self):
|
||||
@@ -171,7 +171,7 @@ class Parser(object):
|
||||
imports = []
|
||||
brackets = False
|
||||
continue_kw = [",", ";", "\n", ')'] \
|
||||
+ list(set(keyword.kwlist) - set(['as']))
|
||||
+ list(set(keyword.kwlist) - set(['as']))
|
||||
while True:
|
||||
defunct = False
|
||||
token_type, tok = self.next()
|
||||
@@ -207,7 +207,7 @@ class Parser(object):
|
||||
breaks = [',', ':']
|
||||
while tok not in [')', ':']:
|
||||
param, tok = self._parse_statement(added_breaks=breaks,
|
||||
stmt_class=pr.Param)
|
||||
stmt_class=pr.Param)
|
||||
if param and tok == ':':
|
||||
# parse annotations
|
||||
annotation, tok = self._parse_statement(added_breaks=breaks)
|
||||
@@ -236,7 +236,7 @@ class Parser(object):
|
||||
return None
|
||||
|
||||
fname = pr.Name(self.module, [(fname, self.start_pos)], self.start_pos,
|
||||
self.end_pos)
|
||||
self.end_pos)
|
||||
|
||||
token_type, open = self.next()
|
||||
if open != '(':
|
||||
@@ -260,7 +260,7 @@ class Parser(object):
|
||||
# because of 2 line func param definitions
|
||||
scope = pr.Function(self.module, fname, params, first_pos, annotation)
|
||||
if self.user_scope and scope != self.user_scope \
|
||||
and self.user_position > first_pos:
|
||||
and self.user_position > first_pos:
|
||||
self.user_scope = scope
|
||||
return scope
|
||||
|
||||
@@ -276,11 +276,11 @@ class Parser(object):
|
||||
token_type, cname = self.next()
|
||||
if token_type != tokenize.NAME:
|
||||
debug.warning("class: syntax err, token is not a name@%s (%s: %s)"
|
||||
% (self.start_pos[0], tokenize.tok_name[token_type], cname))
|
||||
% (self.start_pos[0], tokenize.tok_name[token_type], cname))
|
||||
return None
|
||||
|
||||
cname = pr.Name(self.module, [(cname, self.start_pos)], self.start_pos,
|
||||
self.end_pos)
|
||||
self.end_pos)
|
||||
|
||||
super = []
|
||||
token_type, _next = self.next()
|
||||
@@ -295,12 +295,12 @@ class Parser(object):
|
||||
# because of 2 line class initializations
|
||||
scope = pr.Class(self.module, cname, super, first_pos)
|
||||
if self.user_scope and scope != self.user_scope \
|
||||
and self.user_position > first_pos:
|
||||
and self.user_position > first_pos:
|
||||
self.user_scope = scope
|
||||
return scope
|
||||
|
||||
def _parse_statement(self, pre_used_token=None, added_breaks=None,
|
||||
stmt_class=pr.Statement):
|
||||
stmt_class=pr.Statement):
|
||||
"""
|
||||
Parses statements like::
|
||||
|
||||
@@ -345,10 +345,10 @@ class Parser(object):
|
||||
|
||||
tok_list = []
|
||||
while not (tok in always_break
|
||||
or tok in not_first_break and not tok_list
|
||||
or tok in breaks and level <= 0):
|
||||
or tok in not_first_break and not tok_list
|
||||
or tok in breaks and level <= 0):
|
||||
try:
|
||||
#print 'parse_stmt', tok, tokenize.tok_name[token_type]
|
||||
# print 'parse_stmt', tok, tokenize.tok_name[token_type]
|
||||
tok_list.append(self.current + (self.start_pos,))
|
||||
if tok == 'as':
|
||||
token_type, tok = self.next()
|
||||
@@ -387,9 +387,9 @@ class Parser(object):
|
||||
|
||||
if not tok_list:
|
||||
return None, tok
|
||||
#print 'new_stat', set_vars, used_vars
|
||||
# print 'new_stat', set_vars, used_vars
|
||||
if self.freshscope and not self.no_docstr and len(tok_list) == 1 \
|
||||
and self.last_token[0] == tokenize.STRING:
|
||||
and self.last_token[0] == tokenize.STRING:
|
||||
self._scope.add_docstr(self.last_token[1])
|
||||
return None, tok
|
||||
else:
|
||||
@@ -432,7 +432,7 @@ class Parser(object):
|
||||
s = self._scope
|
||||
while s is not None:
|
||||
if isinstance(s, pr.Module) \
|
||||
and not isinstance(s, pr.SubModule):
|
||||
and not isinstance(s, pr.SubModule):
|
||||
self.module.end_pos = self.end_pos
|
||||
break
|
||||
s.end_pos = self.end_pos
|
||||
@@ -440,10 +440,10 @@ class Parser(object):
|
||||
raise
|
||||
|
||||
if self.user_position and (self.start_pos[0] == self.user_position[0]
|
||||
or self.user_scope is None
|
||||
and self.start_pos[0] >= self.user_position[0]):
|
||||
or self.user_scope is None
|
||||
and self.start_pos[0] >= self.user_position[0]):
|
||||
debug.dbg('user scope found [%s] = %s' %
|
||||
(self.parserline.replace('\n', ''), repr(self._scope)))
|
||||
(self.parserline.replace('\n', ''), repr(self._scope)))
|
||||
self.user_scope = self._scope
|
||||
self.last_token = self.current
|
||||
self.current = (typ, tok)
|
||||
@@ -469,7 +469,7 @@ class Parser(object):
|
||||
# This iterator stuff is not intentional. It grew historically.
|
||||
for token_type, tok in self.iterator:
|
||||
self.module.temp_used_names = []
|
||||
#debug.dbg('main: tok=[%s] type=[%s] indent=[%s]'\
|
||||
# debug.dbg('main: tok=[%s] type=[%s] indent=[%s]'\
|
||||
# % (tok, tokenize.tok_name[token_type], start_position[0]))
|
||||
|
||||
while token_type == tokenize.DEDENT and self._scope != self.module:
|
||||
@@ -494,13 +494,13 @@ class Parser(object):
|
||||
self._scope = self.module
|
||||
|
||||
use_as_parent_scope = self.top_module if isinstance(self._scope,
|
||||
pr.SubModule) else self._scope
|
||||
pr.SubModule) else self._scope
|
||||
first_pos = self.start_pos
|
||||
if tok == 'def':
|
||||
func = self._parse_function()
|
||||
if func is None:
|
||||
debug.warning("function: syntax error@%s" %
|
||||
self.start_pos[0])
|
||||
self.start_pos[0])
|
||||
continue
|
||||
self.freshscope = True
|
||||
self._scope = self._scope.add_scope(func, self._decorators)
|
||||
@@ -520,19 +520,19 @@ class Parser(object):
|
||||
e = (alias or m or self).end_pos
|
||||
end_pos = self.end_pos if count + 1 == len(imports) else e
|
||||
i = pr.Import(self.module, first_pos, end_pos, m,
|
||||
alias, defunct=defunct)
|
||||
alias, defunct=defunct)
|
||||
self._check_user_stmt(i)
|
||||
self._scope.add_import(i)
|
||||
if not imports:
|
||||
i = pr.Import(self.module, first_pos, self.end_pos, None,
|
||||
defunct=True)
|
||||
defunct=True)
|
||||
self._check_user_stmt(i)
|
||||
self.freshscope = False
|
||||
elif tok == 'from':
|
||||
defunct = False
|
||||
# take care for relative imports
|
||||
relative_count = 0
|
||||
while 1:
|
||||
while True:
|
||||
token_type, tok = self.next()
|
||||
if tok != '.':
|
||||
break
|
||||
@@ -556,12 +556,12 @@ class Parser(object):
|
||||
e = (alias or name or self).end_pos
|
||||
end_pos = self.end_pos if count + 1 == len(names) else e
|
||||
i = pr.Import(self.module, first_pos, end_pos, name,
|
||||
alias, mod, star, relative_count,
|
||||
defunct=defunct or defunct2)
|
||||
alias, mod, star, relative_count,
|
||||
defunct=defunct or defunct2)
|
||||
self._check_user_stmt(i)
|
||||
self._scope.add_import(i)
|
||||
self.freshscope = False
|
||||
#loops
|
||||
# loops
|
||||
elif tok == 'for':
|
||||
set_stmt, tok = self._parse_statement(added_breaks=['in'])
|
||||
if tok == 'in':
|
||||
@@ -572,14 +572,14 @@ class Parser(object):
|
||||
self._scope = self._scope.add_statement(f)
|
||||
else:
|
||||
debug.warning('syntax err, for flow started @%s',
|
||||
self.start_pos[0])
|
||||
self.start_pos[0])
|
||||
if statement is not None:
|
||||
statement.parent = use_as_parent_scope
|
||||
if set_stmt is not None:
|
||||
set_stmt.parent = use_as_parent_scope
|
||||
else:
|
||||
debug.warning('syntax err, for flow incomplete @%s',
|
||||
self.start_pos[0])
|
||||
self.start_pos[0])
|
||||
if set_stmt is not None:
|
||||
set_stmt.parent = use_as_parent_scope
|
||||
|
||||
@@ -592,7 +592,7 @@ class Parser(object):
|
||||
inputs = []
|
||||
first = True
|
||||
while first or command == 'with' \
|
||||
and tok not in [':', '\n']:
|
||||
and tok not in [':', '\n']:
|
||||
statement, tok = \
|
||||
self._parse_statement(added_breaks=added_breaks)
|
||||
if command == 'except' and tok in added_breaks:
|
||||
@@ -623,7 +623,7 @@ class Parser(object):
|
||||
for i in inputs:
|
||||
i.parent = use_as_parent_scope
|
||||
debug.warning('syntax err, flow started @%s',
|
||||
self.start_pos[0])
|
||||
self.start_pos[0])
|
||||
# returns
|
||||
elif tok in ['return', 'yield']:
|
||||
s = self.start_pos
|
||||
@@ -677,6 +677,6 @@ class Parser(object):
|
||||
if token_type not in [tokenize.COMMENT, tokenize.INDENT,
|
||||
tokenize.NEWLINE, tokenize.NL]:
|
||||
debug.warning('token not classified', tok, token_type,
|
||||
self.start_pos[0])
|
||||
self.start_pos[0])
|
||||
continue
|
||||
self.no_docstr = False
|
||||
|
||||
@@ -91,7 +91,7 @@ class Simple(Base):
|
||||
@property
|
||||
def start_pos(self):
|
||||
return self._sub_module.line_offset + self._start_pos[0], \
|
||||
self._start_pos[1]
|
||||
self._start_pos[1]
|
||||
|
||||
@start_pos.setter
|
||||
def start_pos(self, value):
|
||||
@@ -102,7 +102,7 @@ class Simple(Base):
|
||||
if None in self._end_pos:
|
||||
return self._end_pos
|
||||
return self._sub_module.line_offset + self._end_pos[0], \
|
||||
self._end_pos[1]
|
||||
self._end_pos[1]
|
||||
|
||||
@end_pos.setter
|
||||
def end_pos(self, value):
|
||||
@@ -110,7 +110,7 @@ class Simple(Base):
|
||||
|
||||
@Python3Method
|
||||
def get_parent_until(self, classes=(), reverse=False,
|
||||
include_current=True):
|
||||
include_current=True):
|
||||
""" Takes always the parent, until one class (not a Class) """
|
||||
if type(classes) not in (tuple, list):
|
||||
classes = (classes,)
|
||||
@@ -259,7 +259,7 @@ class Scope(Simple, IsScope):
|
||||
|
||||
"""
|
||||
return [n for n in self.get_set_vars()
|
||||
if isinstance(n, Import) or len(n) == 1]
|
||||
if isinstance(n, Import) or len(n) == 1]
|
||||
|
||||
def is_empty(self):
|
||||
"""
|
||||
@@ -304,7 +304,7 @@ class Scope(Simple, IsScope):
|
||||
name = self.command
|
||||
|
||||
return "<%s: %s@%s-%s>" % (type(self).__name__, name,
|
||||
self.start_pos[0], self.end_pos[0])
|
||||
self.start_pos[0], self.end_pos[0])
|
||||
|
||||
|
||||
class Module(IsScope):
|
||||
@@ -366,12 +366,12 @@ class SubModule(Scope, Module):
|
||||
else:
|
||||
sep = (re.escape(os.path.sep),) * 2
|
||||
r = re.search(r'([^%s]*?)(%s__init__)?(\.py|\.so)?$' % sep,
|
||||
self.path)
|
||||
self.path)
|
||||
# remove PEP 3149 names
|
||||
string = re.sub('\.[a-z]+-\d{2}[mud]{0,3}$', '', r.group(1))
|
||||
names = [(string, (0, 0))]
|
||||
self._name = Name(self, names, self.start_pos, self.end_pos,
|
||||
self.use_as_parent)
|
||||
self.use_as_parent)
|
||||
return self._name
|
||||
|
||||
def is_builtin(self):
|
||||
@@ -532,7 +532,7 @@ class Lambda(Function):
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s @%s (%s-%s)>" % (type(self).__name__, self.start_pos[0],
|
||||
self.start_pos[1], self.end_pos[1])
|
||||
self.start_pos[1], self.end_pos[1])
|
||||
|
||||
|
||||
class Flow(Scope):
|
||||
@@ -641,7 +641,7 @@ class ForFlow(Flow):
|
||||
def __init__(self, module, inputs, start_pos, set_stmt,
|
||||
is_list_comp=False):
|
||||
super(ForFlow, self).__init__(module, 'for', inputs, start_pos,
|
||||
set_stmt.used_vars)
|
||||
set_stmt.used_vars)
|
||||
self.set_stmt = set_stmt
|
||||
set_stmt.parent = self.use_as_parent
|
||||
self.is_list_comp = is_list_comp
|
||||
@@ -814,7 +814,7 @@ class Statement(Simple):
|
||||
def get_code(self, new_line=True):
|
||||
def assemble(command_list, assignment=None):
|
||||
pieces = [c.get_code() if isinstance(c, Simple) else unicode(c)
|
||||
for c in command_list]
|
||||
for c in command_list]
|
||||
if assignment is None:
|
||||
return ''.join(pieces)
|
||||
return '%s %s ' % (''.join(pieces), assignment)
|
||||
@@ -866,7 +866,7 @@ class Statement(Simple):
|
||||
"""
|
||||
def is_assignment(tok):
|
||||
return isinstance(tok, (str, unicode)) and tok.endswith('=') \
|
||||
and not tok in ['>=', '<=', '==', '!=']
|
||||
and not tok in ['>=', '<=', '==', '!=']
|
||||
|
||||
def parse_array(token_iterator, array_type, start_pos, add_el=None,
|
||||
added_breaks=()):
|
||||
@@ -901,7 +901,7 @@ class Statement(Simple):
|
||||
|
||||
c = token_iterator.current[1]
|
||||
arr.end_pos = c.end_pos if isinstance(c, Simple) \
|
||||
else (c[2][0], c[2][1] + len(c[1]))
|
||||
else (c[2][0], c[2][1] + len(c[1]))
|
||||
return arr, break_tok
|
||||
|
||||
def parse_stmt(token_iterator, maybe_dict=False, added_breaks=(),
|
||||
@@ -939,7 +939,7 @@ class Statement(Simple):
|
||||
token_list.append(lambd)
|
||||
elif tok == 'for':
|
||||
list_comp, tok = parse_list_comp(token_iterator,
|
||||
token_list, start_pos, last_end_pos)
|
||||
token_list, start_pos, last_end_pos)
|
||||
if list_comp is not None:
|
||||
token_list = [list_comp]
|
||||
|
||||
@@ -951,8 +951,8 @@ class Statement(Simple):
|
||||
if level == 0 and tok in closing_brackets \
|
||||
or tok in added_breaks \
|
||||
or level == 1 and (tok == ','
|
||||
or maybe_dict and tok == ':'
|
||||
or is_assignment(tok) and break_on_assignment):
|
||||
or maybe_dict and tok == ':'
|
||||
or is_assignment(tok) and break_on_assignment):
|
||||
end_pos = end_pos[0], end_pos[1] - 1
|
||||
break
|
||||
token_list.append(tok_temp)
|
||||
@@ -961,7 +961,7 @@ class Statement(Simple):
|
||||
return None, tok
|
||||
|
||||
statement = stmt_class(self._sub_module, [], [], token_list,
|
||||
start_pos, end_pos, self.parent)
|
||||
start_pos, end_pos, self.parent)
|
||||
statement.used_vars = used_vars
|
||||
return statement, tok
|
||||
|
||||
@@ -1078,7 +1078,7 @@ class Statement(Simple):
|
||||
is_chain = False
|
||||
elif tok in brackets.keys():
|
||||
arr, is_ass = parse_array(token_iterator, brackets[tok],
|
||||
start_pos)
|
||||
start_pos)
|
||||
if result and isinstance(result[-1], Call):
|
||||
result[-1].set_execution(arr)
|
||||
else:
|
||||
@@ -1101,10 +1101,10 @@ class Statement(Simple):
|
||||
end_pos = t.end_pos
|
||||
except AttributeError:
|
||||
end_pos = (t[2][0], t[2][1] + len(t[1])) \
|
||||
if isinstance(t, tuple) else t.start_pos
|
||||
if isinstance(t, tuple) else t.start_pos
|
||||
|
||||
stmt = Statement(self._sub_module, [], [], result,
|
||||
start_pos, end_pos, self.parent)
|
||||
start_pos, end_pos, self.parent)
|
||||
stmt._commands = result
|
||||
arr, break_tok = parse_array(token_iterator, Array.TUPLE,
|
||||
stmt.start_pos, stmt)
|
||||
@@ -1211,7 +1211,7 @@ class Call(Simple):
|
||||
s = self.name.get_code()
|
||||
else:
|
||||
if not is_py3k and isinstance(self.name, str)\
|
||||
and "'" not in self.name:
|
||||
and "'" not in self.name:
|
||||
# This is a very rough spot, because of repr not supporting
|
||||
# unicode signs, see `test_unicode_script`.
|
||||
s = "'%s'" % unicode(self.name, 'UTF-8')
|
||||
@@ -1225,7 +1225,7 @@ class Call(Simple):
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s: %s>" % \
|
||||
(type(self).__name__, self.name)
|
||||
(type(self).__name__, self.name)
|
||||
|
||||
|
||||
class Array(Call):
|
||||
@@ -1356,7 +1356,7 @@ class Name(Simple):
|
||||
def __init__(self, module, names, start_pos, end_pos, parent=None):
|
||||
super(Name, self).__init__(module, start_pos, end_pos)
|
||||
self.names = tuple(n if isinstance(n, NamePart) else
|
||||
NamePart(n[0], self, n[1]) for n in names)
|
||||
NamePart(n[0], self, n[1]) for n in names)
|
||||
if parent is not None:
|
||||
self.parent = parent
|
||||
|
||||
@@ -1399,7 +1399,7 @@ class ListComprehension(Base):
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s: %s>" % \
|
||||
(type(self).__name__, self.get_code())
|
||||
(type(self).__name__, self.get_code())
|
||||
|
||||
def get_code(self):
|
||||
statements = self.stmt, self.middle, self.input
|
||||
|
||||
@@ -24,7 +24,7 @@ class RecursionDecorator(object):
|
||||
self.reset()
|
||||
|
||||
def __call__(self, stmt, *args, **kwargs):
|
||||
#print stmt, len(self.node_statements())
|
||||
# print stmt, len(self.node_statements())
|
||||
if self.push_stmt(stmt):
|
||||
return []
|
||||
else:
|
||||
@@ -37,7 +37,7 @@ class RecursionDecorator(object):
|
||||
check = self._check_recursion()
|
||||
if check: # TODO remove False!!!!
|
||||
debug.warning('catched stmt recursion: %s against %s @%s'
|
||||
% (stmt, check.stmt, stmt.start_pos))
|
||||
% (stmt, check.stmt, stmt.start_pos))
|
||||
self.pop_stmt()
|
||||
return True
|
||||
return False
|
||||
@@ -82,7 +82,7 @@ class RecursionNode(object):
|
||||
# The same's true for the builtins, because the builtins are really
|
||||
# simple.
|
||||
self.is_ignored = isinstance(stmt, pr.Param) \
|
||||
or (self.script == builtin.Builtin.scope)
|
||||
or (self.script == builtin.Builtin.scope)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not other:
|
||||
@@ -90,10 +90,10 @@ class RecursionNode(object):
|
||||
|
||||
is_list_comp = lambda x: isinstance(x, pr.ForFlow) and x.is_list_comp
|
||||
return self.script == other.script \
|
||||
and self.position == other.position \
|
||||
and not is_list_comp(self.stmt.parent) \
|
||||
and not is_list_comp(other.parent) \
|
||||
and not self.is_ignored and not other.is_ignored
|
||||
and self.position == other.position \
|
||||
and not is_list_comp(self.stmt.parent) \
|
||||
and not is_list_comp(other.parent) \
|
||||
and not self.is_ignored and not other.is_ignored
|
||||
|
||||
|
||||
class ExecutionRecursionDecorator(object):
|
||||
@@ -107,7 +107,7 @@ class ExecutionRecursionDecorator(object):
|
||||
|
||||
def __call__(self, execution, evaluate_generator=False):
|
||||
debug.dbg('Execution recursions: %s' % execution, self.recursion_level,
|
||||
self.execution_count, len(self.execution_funcs))
|
||||
self.execution_count, len(self.execution_funcs))
|
||||
if self.check_recursion(execution, evaluate_generator):
|
||||
result = []
|
||||
else:
|
||||
|
||||
@@ -65,7 +65,7 @@ def rename(script, new_name):
|
||||
def _rename(names, replace_str):
|
||||
""" For both rename and inline. """
|
||||
order = sorted(names, key=lambda x: (x.module_path, x.start_pos),
|
||||
reverse=True)
|
||||
reverse=True)
|
||||
|
||||
def process(path, old_lines, new_lines):
|
||||
if new_lines is not None: # goto next file, save last
|
||||
@@ -92,7 +92,7 @@ def _rename(names, replace_str):
|
||||
nr, indent = name.start_pos
|
||||
line = new_lines[nr - 1]
|
||||
new_lines[nr - 1] = line[:indent] + replace_str + \
|
||||
line[indent + len(name.text):]
|
||||
line[indent + len(name.text):]
|
||||
process(current_path, old_lines, new_lines)
|
||||
return dct
|
||||
|
||||
@@ -148,7 +148,7 @@ def extract(script, new_name):
|
||||
open_brackets = ['(', '[', '{']
|
||||
close_brackets = [')', ']', '}']
|
||||
if '\n' in text and not (text[0] in open_brackets and text[-1] ==
|
||||
close_brackets[open_brackets.index(text[0])]):
|
||||
close_brackets[open_brackets.index(text[0])]):
|
||||
text = '(%s)' % text
|
||||
|
||||
# add new line before statement
|
||||
@@ -173,9 +173,9 @@ def inline(script):
|
||||
stmt = definitions[0]._definition
|
||||
usages = script.usages()
|
||||
inlines = [r for r in usages
|
||||
if not stmt.start_pos <= r.start_pos <= stmt.end_pos]
|
||||
if not stmt.start_pos <= r.start_pos <= stmt.end_pos]
|
||||
inlines = sorted(inlines, key=lambda x: (x.module_path, x.start_pos),
|
||||
reverse=True)
|
||||
reverse=True)
|
||||
commands = stmt.get_commands()
|
||||
# don't allow multiline refactorings for now.
|
||||
assert stmt.start_pos[0] == stmt.end_pos[0]
|
||||
|
||||
@@ -168,12 +168,12 @@ def generate_tokens(readline):
|
||||
if endmatch:
|
||||
pos = end = endmatch.end(0)
|
||||
yield TokenInfo(STRING, contstr + line[:end],
|
||||
strstart, (lnum, end), contline + line)
|
||||
strstart, (lnum, end), contline + line)
|
||||
contstr, needcont = '', 0
|
||||
contline = None
|
||||
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
|
||||
yield TokenInfo(ERRORTOKEN, contstr + line,
|
||||
strstart, (lnum, len(line)), contline)
|
||||
strstart, (lnum, len(line)), contline)
|
||||
contstr = ''
|
||||
contline = None
|
||||
continue
|
||||
@@ -204,12 +204,13 @@ def generate_tokens(readline):
|
||||
comment_token = line[pos:].rstrip('\r\n')
|
||||
nl_pos = pos + len(comment_token)
|
||||
yield TokenInfo(COMMENT, comment_token,
|
||||
(lnum, pos), (lnum, pos + len(comment_token)), line)
|
||||
(lnum, pos), (lnum, pos + len(comment_token)), line)
|
||||
yield TokenInfo(NL, line[nl_pos:],
|
||||
(lnum, nl_pos), (lnum, len(line)), line)
|
||||
(lnum, nl_pos), (lnum, len(line)), line)
|
||||
else:
|
||||
yield TokenInfo((NL, COMMENT)[line[pos] == '#'], line[pos:],
|
||||
(lnum, pos), (lnum, len(line)), line)
|
||||
yield TokenInfo(
|
||||
(NL, COMMENT)[line[pos] == '#'], line[pos:],
|
||||
(lnum, pos), (lnum, len(line)), line)
|
||||
continue
|
||||
|
||||
if column > indents[-1]: # count indents or dedents
|
||||
@@ -237,7 +238,7 @@ def generate_tokens(readline):
|
||||
yield TokenInfo(NUMBER, token, spos, epos, line)
|
||||
elif initial in '\r\n':
|
||||
yield TokenInfo(NL if parenlev > 0 else NEWLINE,
|
||||
token, spos, epos, line)
|
||||
token, spos, epos, line)
|
||||
elif initial == '#':
|
||||
assert not token.endswith("\n")
|
||||
yield TokenInfo(COMMENT, token, spos, epos, line)
|
||||
@@ -277,7 +278,7 @@ def generate_tokens(readline):
|
||||
yield TokenInfo(OP, token, spos, epos, line)
|
||||
else:
|
||||
yield TokenInfo(ERRORTOKEN, line[pos],
|
||||
(lnum, pos), (lnum, pos + 1), line)
|
||||
(lnum, pos), (lnum, pos + 1), line)
|
||||
pos += 1
|
||||
|
||||
for indent in indents[1:]: # pop remaining indent levels
|
||||
|
||||
@@ -7,7 +7,7 @@ The twisted equivalent of this module is ``twisted.trial._synctest``.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import unittest # this is stdlib unittest, but jedi gets the local one
|
||||
import unittest
|
||||
|
||||
|
||||
class Assertions(unittest.TestCase):
|
||||
|
||||
Reference in New Issue
Block a user