dev/refactor merge

This commit is contained in:
David Halter
2013-02-23 20:53:57 +04:30
23 changed files with 1042 additions and 1008 deletions

View File

@@ -11,6 +11,7 @@ __all__ = ['Script', 'NotFoundError', 'set_debug_function', '_quick_complete']
import re
import os
import warnings
import parsing
import parsing_representation as pr
@@ -121,8 +122,8 @@ class Script(object):
for c in names:
completions.append((c, s))
if not dot: # named_params have no dots
call_def = self.get_in_function_call()
if not dot: # named params have no dots
call_def = self.function_definition()
if call_def:
if not call_def.module.is_builtin():
for p in call_def.params:
@@ -169,7 +170,7 @@ class Script(object):
x.word.lower()))
def _prepare_goto(self, goto_path, is_like_search=False):
""" Base for complete, goto and get_definition. Basically it returns
""" Base for complete, goto and definition. Basically it returns
the resolved scopes under cursor. """
debug.dbg('start: %s in %s' % (goto_path, self._parser.user_scope))
@@ -190,21 +191,30 @@ class Script(object):
return scopes
def _get_under_cursor_stmt(self, cursor_txt):
r = parsing.Parser(cursor_txt, no_docstr=True)
offset = self.pos[0] - 1, self.pos[1]
r = parsing.Parser(cursor_txt, no_docstr=True, offset=offset)
try:
stmt = r.module.statements[0]
except IndexError:
raise NotFoundError()
stmt.start_pos = self.pos
stmt.parent = self._parser.user_scope
return stmt
def get_definition(self):
"""
.. deprecated:: 0.5.0
Use :attr:`.function_definition` instead.
.. todo:: Remove!
"""
warnings.warn("Use line instead.", DeprecationWarning)
return self.definition()
def definition(self):
"""
Return the definitions of a the path under the cursor. This is not a
goto function! This follows complicated paths and returns the end, not
the first definition. The big difference between :meth:`goto` and
:meth:`get_definition` is that :meth:`goto` doesn't follow imports and
:meth:`definition` is that :meth:`goto` doesn't follow imports and
statements. Multiple objects may be returned, because Python itself is
a dynamic language, which means depending on an option you can have two
different versions of a function.
@@ -294,13 +304,13 @@ class Script(object):
defs, search_name = evaluate.goto(stmt)
definitions = follow_inexistent_imports(defs)
if isinstance(user_stmt, pr.Statement):
if user_stmt.get_assignment_calls().start_pos > self.pos:
if user_stmt.get_commands()[0].start_pos > self.pos:
# The cursor must be after the start, otherwise the
# statement is just an assignee.
definitions = [user_stmt]
return definitions, search_name
def related_names(self, additional_module_paths=[]):
def related_names(self, additional_module_paths=()):
"""
Return :class:`api_classes.RelatedName` objects, which contain all
names that point to the definition of the name under the cursor. This
@@ -314,7 +324,7 @@ class Script(object):
user_stmt = self._parser.user_stmt
definitions, search_name = self._goto(add_import_name=True)
if isinstance(user_stmt, pr.Statement) \
and self.pos < user_stmt.get_assignment_calls().start_pos:
and self.pos < user_stmt.get_commands()[0].start_pos:
# the search_name might be before `=`
definitions = [v for v in user_stmt.set_vars
if unicode(v.names[-1]) == search_name]
@@ -336,6 +346,15 @@ class Script(object):
return sorted(set(names), key=lambda x: (x.module_path, x.start_pos))
def get_in_function_call(self):
"""
.. deprecated:: 0.5.0
Use :attr:`.function_definition` instead.
.. todo:: Remove!
"""
warnings.warn("Use line instead.", DeprecationWarning)
return self.function_definition()
def function_definition(self):
"""
Return the function object of the call you're currently in.
@@ -355,9 +374,8 @@ class Script(object):
if user_stmt is None \
or not isinstance(user_stmt, pr.Statement):
return None, 0
ass = helpers.fast_parent_copy(user_stmt.get_assignment_calls())
call, index, stop = helpers.search_function_call(ass, self.pos)
call, index, stop = helpers.search_function_definition(user_stmt, self.pos)
return call, index
def check_cache():
@@ -392,7 +410,7 @@ class Script(object):
debug.speed('func_call start')
call = None
if settings.use_get_in_function_call_cache:
if settings.use_function_definition_cache:
try:
call, index = check_cache()
except NotFoundError:
@@ -406,9 +424,9 @@ class Script(object):
return None
debug.speed('func_call parsed')
with common.scale_speed_settings(settings.scale_get_in_function_call):
with common.scale_speed_settings(settings.scale_function_definition):
_callable = lambda: evaluate.follow_call(call)
origins = cache.cache_get_in_function_call(_callable, user_stmt)
origins = cache.cache_function_definition(_callable, user_stmt)
debug.speed('func_call followed')
if len(origins) == 0:

View File

@@ -266,7 +266,7 @@ class Completion(BaseDefinition):
class Definition(BaseDefinition):
"""
*Definition* objects are returned from :meth:`api.Script.goto` or
:meth:`api.Script.get_definition`.
:meth:`api.Script.definition`.
"""
def __init__(self, definition):
super(Definition, self).__init__(definition, definition.start_pos)
@@ -339,9 +339,11 @@ class RelatedName(BaseDefinition):
class CallDef(object):
""" `CallDef` objects is the return value of `Script.get_in_function_call`.
"""
`CallDef` objects is the return value of `Script.function_definition`.
It knows what functions you are currently in. e.g. `isinstance(` would
return the `isinstance` function. without `(` it would return nothing."""
return the `isinstance` function. without `(` it would return nothing.
"""
def __init__(self, executable, index, call):
self.executable = executable
self.index = index

View File

@@ -135,8 +135,8 @@ def time_cache(time_add_setting):
return _temp
@time_cache("get_in_function_call_validity")
def cache_get_in_function_call(stmt):
@time_cache("function_definition_validity")
def cache_function_definition(stmt):
module_path = stmt.get_parent_until().path
return None if module_path is None else (module_path, stmt.start_pos)

View File

@@ -56,13 +56,14 @@ class PushBackIterator(object):
class NoErrorTokenizer(object):
def __init__(self, readline, line_offset=0, stop_on_scope=False):
def __init__(self, readline, offset=(0, 0), stop_on_scope=False):
self.readline = readline
self.gen = PushBackIterator(tokenize.generate_tokens(readline))
self.line_offset = line_offset
self.offset = offset
self.stop_on_scope = stop_on_scope
self.first_scope = False
self.closed = False
self.first = True
def push_last_back(self):
self.gen.push_back(self.current)
@@ -90,7 +91,8 @@ class NoErrorTokenizer(object):
debug.warning('indentation error on line %s, ignoring it' %
self.current[2][0])
# add the starting line of the last position
self.line_offset += self.current[2][0]
self.offset = (self.offset[0] + self.current[2][0],
self.current[2][1])
self.gen = PushBackIterator(tokenize.generate_tokens(
self.readline))
return self.__next__()
@@ -106,8 +108,13 @@ class NoErrorTokenizer(object):
elif c[1] != '@':
self.first_scope = True
c[2] = self.line_offset + c[2][0], c[2][1]
c[3] = self.line_offset + c[3][0], c[3][1]
if self.first:
c[2] = self.offset[0] + c[2][0], self.offset[1] + c[2][1]
c[3] = self.offset[0] + c[3][0], self.offset[1] + c[3][1]
self.first = False
else:
c[2] = self.offset[0] + c[2][0], c[2][1]
c[3] = self.offset[0] + c[3][0], c[3][1]
return c

View File

@@ -21,7 +21,6 @@ import parsing_representation as pr
import evaluate_representation as er
import modules
import evaluate
import helpers
import settings
import debug
import imports
@@ -133,7 +132,7 @@ def search_params(param):
for stmt in possible_stmts:
if not isinstance(stmt, pr.Import):
calls = _scan_array(stmt.get_assignment_calls(), func_name)
calls = _scan_statement(stmt, func_name)
for c in calls:
# no execution means that params cannot be set
call_path = c.generate_call_path()
@@ -157,11 +156,12 @@ def search_params(param):
# get the param name
if param.assignment_details:
arr = param.assignment_details[0][1]
# first assignment details, others would be a syntax error
commands, op = param.assignment_details[0]
else:
arr = param.get_assignment_calls()
offset = 1 if arr[0][0] in ['*', '**'] else 0
param_name = str(arr[0][offset].name)
commands = param.get_commands()
offset = 1 if commands[0] in ['*', '**'] else 0
param_name = str(commands[offset].name)
# add the listener
listener = ParamListener()
@@ -182,33 +182,49 @@ def search_params(param):
def check_array_additions(array):
""" Just a mapper function for the internal _check_array_additions """
if array._array.type not in ['list', 'set']:
if not pr.Array.is_type(array._array, pr.Array.LIST, pr.Array.SET):
# TODO also check for dict updates
return []
is_list = array._array.type == 'list'
current_module = array._array.parent_stmt.get_parent_until()
current_module = array._array.get_parent_until()
res = _check_array_additions(array, current_module, is_list)
return res
def _scan_array(arr, search_name):
def _scan_statement(stmt, search_name, assignment_details=False):
""" Returns the function Call that match search_name in an Array. """
result = []
for sub in arr:
for s in sub:
if isinstance(s, pr.Array):
result += _scan_array(s, search_name)
elif isinstance(s, pr.Call):
s_new = s
while s_new is not None:
n = s_new.name
if isinstance(n, pr.Name) and search_name in n.names:
result.append(s)
def scan_array(arr, search_name):
result = []
if arr.type == pr.Array.DICT:
for key_stmt, value_stmt in arr.items():
result += _scan_statement(key_stmt, search_name)
result += _scan_statement(value_stmt, search_name)
else:
for stmt in arr:
result += _scan_statement(stmt, search_name)
return result
check = list(stmt.get_commands())
if assignment_details:
for commands, op in stmt.assignment_details:
check += commands
result = []
for c in check:
if isinstance(c, pr.Array):
result += scan_array(c, search_name)
elif isinstance(c, pr.Call):
s_new = c
while s_new is not None:
n = s_new.name
if isinstance(n, pr.Name) and search_name in n.names:
result.append(c)
if s_new.execution is not None:
result += scan_array(s_new.execution, search_name)
s_new = s_new.next
if s_new.execution is not None:
result += _scan_array(s_new.execution, search_name)
s_new = s_new.next
return result
@@ -238,7 +254,7 @@ def _check_array_additions(compare_array, module, is_list):
backtrack_path = iter(call_path[:separate_index])
position = c.start_pos
scope = c.parent_stmt.parent
scope = c.get_parent_until(pr.IsScope)
found = evaluate.follow_call_path(backtrack_path, scope, position)
if not compare_array in found:
@@ -248,26 +264,28 @@ def _check_array_additions(compare_array, module, is_list):
if not params.values:
continue # no params: just ignore it
if add_name in ['append', 'add']:
result += evaluate.follow_call_list(params)
for param in params:
result += evaluate.follow_statement(param)
elif add_name in ['insert']:
try:
second_param = params[1]
except IndexError:
continue
else:
result += evaluate.follow_call_list([second_param])
result += evaluate.follow_statement(second_param)
elif add_name in ['extend', 'update']:
iterators = evaluate.follow_call_list(params)
for param in params:
iterators = evaluate.follow_statement(param)
result += evaluate.get_iterator_types(iterators)
return result
def get_execution_parent(element, *stop_classes):
""" Used to get an Instance/Execution parent """
if isinstance(element, er.Array):
stmt = element._array.parent_stmt
stmt = element._array.parent
else:
# must be instance
stmt = element.var_args.parent_stmt
# is an Instance with an ArrayInstance inside
stmt = element.var_args[0].var_args.parent
if isinstance(stmt, er.InstanceElement):
stop_classes = list(stop_classes) + [er.Function]
return stmt.get_parent_until(stop_classes)
@@ -278,6 +296,7 @@ def _check_array_additions(compare_array, module, is_list):
search_names = ['append', 'extend', 'insert'] if is_list else \
['add', 'update']
comp_arr_parent = get_execution_parent(compare_array, er.Execution)
possible_stmts = []
res = []
for n in search_names:
@@ -303,7 +322,7 @@ def _check_array_additions(compare_array, module, is_list):
if evaluate.follow_statement.push_stmt(stmt):
# check recursion
continue
res += check_calls(_scan_array(stmt.get_assignment_calls(), n), n)
res += check_calls(_scan_statement(stmt, n), n)
evaluate.follow_statement.pop_stmt()
# reset settings
settings.dynamic_params_for_other_modules = temp_param_add
@@ -311,11 +330,11 @@ def _check_array_additions(compare_array, module, is_list):
def check_array_instances(instance):
""" Used for set() and list() instances. """
"""Used for set() and list() instances."""
if not settings.dynamic_arrays_instances:
return instance.var_args
ai = ArrayInstance(instance)
return helpers.generate_param_array([ai], instance.var_args.parent_stmt)
return [ai]
class ArrayInstance(pr.Base):
@@ -334,23 +353,24 @@ class ArrayInstance(pr.Base):
lists/sets are too complicated too handle that.
"""
items = []
for array in evaluate.follow_call_list(self.var_args):
if isinstance(array, er.Instance) and len(array.var_args):
temp = array.var_args[0][0]
if isinstance(temp, ArrayInstance):
# prevent recursions
# TODO compare Modules
if self.var_args.start_pos != temp.var_args.start_pos:
items += temp.iter_content()
else:
debug.warning('ArrayInstance recursion', self.var_args)
continue
items += evaluate.get_iterator_types([array])
for stmt in self.var_args:
for typ in evaluate.follow_statement(stmt):
if isinstance(typ, er.Instance) and len(typ.var_args):
array = typ.var_args[0]
if isinstance(array, ArrayInstance):
# prevent recursions
# TODO compare Modules
if self.var_args.start_pos != array.var_args.start_pos:
items += array.iter_content()
else:
debug.warning('ArrayInstance recursion', self.var_args)
continue
items += evaluate.get_iterator_types([typ])
if self.var_args.parent_stmt is None:
if self.var_args.parent is None:
return [] # generated var_args should not be checked for arrays
module = self.var_args.parent_stmt.get_parent_until()
module = self.var_args.get_parent_until()
is_list = str(self.instance.name) == 'list'
items += _check_array_additions(self.instance, module, is_list)
return items
@@ -377,13 +397,13 @@ def related_names(definitions, search_name, mods):
follow.append(call_path[:i + 1])
for f in follow:
follow_res, search = evaluate.goto(call.parent_stmt, f)
follow_res, search = evaluate.goto(call.parent, f)
follow_res = related_name_add_import_modules(follow_res, search)
compare_follow_res = compare_array(follow_res)
# compare to see if they match
if any(r in compare_definitions for r in compare_follow_res):
scope = call.parent_stmt
scope = call.parent
result.append(api_classes.RelatedName(search, scope))
return result
@@ -416,10 +436,8 @@ def related_names(definitions, search_name, mods):
if set(f) & set(definitions):
names.append(api_classes.RelatedName(name_part, stmt))
else:
calls = _scan_array(stmt.get_assignment_calls(), search_name)
for d in stmt.assignment_details:
calls += _scan_array(d[1], search_name)
for call in calls:
for call in _scan_statement(stmt, search_name,
assignment_details=True):
names += check_call(call)
return names
@@ -455,39 +473,39 @@ def check_flow_information(flow, search_name, pos):
break
if isinstance(flow, pr.Flow) and not result:
if flow.command in ['if', 'while'] and len(flow.inits) == 1:
result = check_statement_information(flow.inits[0], search_name)
if flow.command in ['if', 'while'] and len(flow.inputs) == 1:
result = check_statement_information(flow.inputs[0], search_name)
return result
def check_statement_information(stmt, search_name):
try:
ass = stmt.get_assignment_calls()
try:
call = ass.get_only_subelement()
except AttributeError:
assert False
commands = stmt.get_commands()
# this might be removed if we analyze and, etc
assert len(commands) == 1
call = commands[0]
assert type(call) == pr.Call and str(call.name) == 'isinstance'
assert bool(call.execution)
# isinstance check
isinst = call.execution.values
assert len(isinst) == 2 # has two params
assert len(isinst[0]) == 1
assert len(isinst[1]) == 1
assert isinstance(isinst[0][0], pr.Call)
obj, classes = [stmt.get_commands() for stmt in isinst]
assert len(obj) == 1
assert len(classes) == 1
assert isinstance(obj[0], pr.Call)
# names fit?
assert str(isinst[0][0].name) == search_name
classes_call = isinst[1][0] # class_or_type_or_tuple
assert isinstance(classes_call, pr.Call)
result = []
for c in evaluate.follow_call(classes_call):
if isinstance(c, er.Array):
result += c.get_index_types()
else:
result.append(c)
for i, c in enumerate(result):
result[i] = er.Instance(c)
return result
assert str(obj[0].name) == search_name
assert isinstance(classes[0], pr.Call) # can be type or tuple
except AssertionError:
return []
result = []
for c in evaluate.follow_call(classes[0]):
if isinstance(c, er.Array):
result += c.get_index_types()
else:
result.append(c)
for i, c in enumerate(result):
result[i] = er.Instance(c)
return result

View File

@@ -72,7 +72,6 @@ from _compatibility import next, hasattr, is_py3k, unicode, utf8
import sys
import itertools
import copy
import common
import cache
@@ -82,7 +81,6 @@ import debug
import builtin
import imports
import recursion
import helpers
import dynamic
import docstrings
@@ -249,12 +247,13 @@ def find_name(scope, name_str, position=None, search_global=False,
def handle_for_loops(loop):
# Take the first statement (for has always only
# one, remember `in`). And follow it.
if not len(loop.inits):
if not loop.inputs:
return []
result = get_iterator_types(follow_statement(loop.inits[0]))
result = get_iterator_types(follow_statement(loop.inputs[0]))
if len(loop.set_vars) > 1:
var_arr = loop.set_stmt.get_assignment_calls()
result = assign_tuples(var_arr, result, name_str)
commands = loop.set_stmt.get_commands()
# loops with loop.set_vars > 0 only have one command
result = assign_tuples(commands[0], result, name_str)
return result
def process(name):
@@ -286,22 +285,21 @@ def find_name(scope, name_str, position=None, search_global=False,
inst.is_generated = True
result.append(inst)
elif par.isinstance(pr.Statement):
def is_execution(arr):
for a in arr:
a = a[0] # rest is always empty with assignees
if a.isinstance(pr.Array):
if is_execution(a):
def is_execution(calls):
for c in calls:
if c.isinstance(pr.Array):
if is_execution(c):
return True
elif a.isinstance(pr.Call):
elif c.isinstance(pr.Call):
# Compare start_pos, because names may be different
# because of executions.
if a.name.start_pos == name.start_pos \
and a.execution:
if c.name.start_pos == name.start_pos \
and c.execution:
return True
return False
is_exe = False
for op, assignee in par.assignment_details:
for assignee, op in par.assignment_details:
is_exe |= is_execution(assignee)
if is_exe:
@@ -310,7 +308,7 @@ def find_name(scope, name_str, position=None, search_global=False,
pass
else:
details = par.assignment_details
if details and details[0][0] != '=':
if details and details[0][1] != '=':
no_break_scope = True
# TODO this makes self variables non-breakable. wanted?
@@ -374,7 +372,8 @@ def find_name(scope, name_str, position=None, search_global=False,
if not result and isinstance(nscope, er.Instance):
# __getattr__ / __getattribute__
result += check_getattr(nscope, name_str)
debug.dbg('sfn filter "%s" in %s: %s' % (name_str, nscope, result))
debug.dbg('sfn filter "%s" in (%s-%s): %s@%s' % (name_str, scope,
nscope, result, position))
return result
def descriptor_check(result):
@@ -415,10 +414,10 @@ def check_getattr(inst, name_str):
"""Checks for both __getattr__ and __getattribute__ methods"""
result = []
# str is important to lose the NamePart!
name = pr.Call(str(name_str), pr.Call.STRING, (0, 0), inst)
args = helpers.generate_param_array([name])
module = builtin.Builtin.scope
name = pr.Call(module, str(name_str), pr.Call.STRING, (0, 0), inst)
try:
result = inst.execute_subscope_by_name('__getattr__', args)
result = inst.execute_subscope_by_name('__getattr__', [name])
except KeyError:
pass
if not result:
@@ -427,7 +426,7 @@ def check_getattr(inst, name_str):
# could be practical and the jedi would return wrong types. If
# you ever have something, let me know!
try:
result = inst.execute_subscope_by_name('__getattribute__', args)
result = inst.execute_subscope_by_name('__getattribute__', [name])
except KeyError:
pass
return result
@@ -485,46 +484,43 @@ def assign_tuples(tup, results, seek_name):
def eval_results(index):
types = []
for r in results:
if hasattr(r, "get_exact_index_types"):
try:
types += r.get_exact_index_types(index)
except IndexError:
pass
else:
try:
func = r.get_exact_index_types
except AttributeError:
debug.warning("invalid tuple lookup %s of result %s in %s"
% (tup, results, seek_name))
else:
try:
types += func(index)
except IndexError:
pass
return types
result = []
if tup.type == pr.Array.NOARRAY:
# Here we have unnessecary braces, which we just remove.
arr = tup.get_only_subelement()
if type(arr) == pr.Call:
if arr.name.names[-1] == seek_name:
result = results
else:
result = assign_tuples(arr, results, seek_name)
else:
for i, t in enumerate(tup):
# Used in assignments. There is just one call and no other things,
# therefore we can just assume, that the first part is important.
if len(t) != 1:
raise AttributeError('Array length should be 1')
t = t[0]
for i, stmt in enumerate(tup):
# Used in assignments. There is just one call and no other things,
# therefore we can just assume, that the first part is important.
command = stmt.get_commands()[0]
# Check the left part, if there are still tuples in it or a Call.
if isinstance(t, pr.Array):
# These are "sub"-tuples.
result += assign_tuples(t, eval_results(i), seek_name)
else:
if t.name.names[-1] == seek_name:
result += eval_results(i)
if tup.type == pr.Array.NOARRAY:
# unnessecary braces -> just remove.
r = results
else:
r = eval_results(i)
# are there still tuples or is it just a Call.
if isinstance(command, pr.Array):
# These are "sub"-tuples.
result += assign_tuples(command, r, seek_name)
else:
if command.name.names[-1] == seek_name:
result += r
return result
@recursion.RecursionDecorator
@cache.memoize_default(default=[])
@cache.memoize_default(default=())
def follow_statement(stmt, seek_name=None):
"""
The starting point of the completion. A statement always owns a call list,
@@ -536,11 +532,11 @@ def follow_statement(stmt, seek_name=None):
:param seek_name: A string.
"""
debug.dbg('follow_stmt %s (%s)' % (stmt, seek_name))
call_list = stmt.get_assignment_calls()
debug.dbg('calls: %s' % call_list)
commands = stmt.get_commands()
debug.dbg('calls: %s' % commands)
try:
result = follow_call_list(call_list)
result = follow_call_list(commands)
except AttributeError:
# This is so evil! But necessary to propagate errors. The attribute
# errors here must not be catched, because they shouldn't exist.
@@ -550,16 +546,15 @@ def follow_statement(stmt, seek_name=None):
# variables.
if len(stmt.get_set_vars()) > 1 and seek_name and stmt.assignment_details:
new_result = []
for op, set_vars in stmt.assignment_details:
new_result += assign_tuples(set_vars, result, seek_name)
for ass_commands, op in stmt.assignment_details:
new_result += assign_tuples(ass_commands[0], result, seek_name)
result = new_result
return set(result)
def follow_call_list(call_list, follow_array=False):
"""
The call_list has a special structure.
This can be either `pr.Array` or `list of list`.
`call_list` can be either `pr.Array` or `list of list`.
It is used to evaluate a two dimensional object, that has calls, arrays and
operators in it.
"""
@@ -570,79 +565,72 @@ def follow_call_list(call_list, follow_array=False):
# is nested LC
input = nested_lc.stmt
module = input.get_parent_until()
loop = pr.ForFlow(module, [input], lc.stmt.start_pos,
lc.middle, True)
# create a for loop, which does the same as list comprehensions
loop = pr.ForFlow(module, [input], lc.stmt.start_pos, lc.middle, True)
loop.parent = lc.stmt.parent if parent is None else parent
loop.parent = parent or lc.get_parent_until(pr.IsScope)
if isinstance(nested_lc, pr.ListComprehension):
loop = evaluate_list_comprehension(nested_lc, loop)
return loop
if pr.Array.is_type(call_list, pr.Array.TUPLE, pr.Array.DICT):
# Tuples can stand just alone without any braces. These would be
# recognized as separate calls, but actually are a tuple.
result = follow_call(call_list)
else:
result = []
for calls in call_list:
calls_iterator = iter(calls)
for call in calls_iterator:
if pr.Array.is_type(call, pr.Array.NOARRAY):
result += follow_call_list(call, follow_array=True)
elif isinstance(call, pr.ListComprehension):
loop = evaluate_list_comprehension(call)
stmt = copy.copy(call.stmt)
stmt.parent = loop
# create a for loop which does the same as list
# comprehensions
result += follow_statement(stmt)
else:
if isinstance(call, (pr.Lambda)):
result.append(er.Function(call))
# With things like params, these can also be functions...
elif isinstance(call, (er.Function, er.Class, er.Instance,
dynamic.ArrayInstance)):
result.append(call)
# The string tokens are just operations (+, -, etc.)
elif not isinstance(call, (str, unicode)):
if str(call.name) == 'if':
# Ternary operators.
while True:
try:
call = next(calls_iterator)
except StopIteration:
break
try:
if str(call.name) == 'else':
break
except AttributeError:
pass
continue
result += follow_call(call)
elif call == '*':
if [r for r in result if isinstance(r, er.Array)
or isinstance(r, er.Instance)
and str(r.name) == 'str']:
# if it is an iterable, ignore * operations
next(calls_iterator)
if follow_array and isinstance(call_list, pr.Array):
# call_list can also be a two dimensional array
call_path = call_list.generate_call_path()
next(call_path, None) # the first one has been used already
call_scope = call_list.parent_stmt
position = call_list.start_pos
result = follow_paths(call_path, result, call_scope, position=position)
result = []
calls_iterator = iter(call_list)
for call in calls_iterator:
if pr.Array.is_type(call, pr.Array.NOARRAY):
r = list(itertools.chain.from_iterable(follow_statement(s)
for s in call))
call_path = call.generate_call_path()
next(call_path, None) # the first one has been used already
result += follow_paths(call_path, r, call.parent,
position=call.start_pos)
elif isinstance(call, pr.ListComprehension):
loop = evaluate_list_comprehension(call)
# Caveat: parents are being changed, but this doesn't matter,
# because nothing else uses it.
call.stmt.parent = loop
result += follow_statement(call.stmt)
else:
if isinstance(call, pr.Lambda):
result.append(er.Function(call))
# With things like params, these can also be functions...
elif isinstance(call, (er.Function, er.Class, er.Instance,
dynamic.ArrayInstance)):
result.append(call)
# The string tokens are just operations (+, -, etc.)
elif not isinstance(call, (str, unicode)):
if str(call.name) == 'if':
# Ternary operators.
while True:
try:
call = next(calls_iterator)
except StopIteration:
break
try:
if str(call.name) == 'else':
break
except AttributeError:
pass
continue
result += follow_call(call)
elif call == '*':
if [r for r in result if isinstance(r, er.Array)
or isinstance(r, er.Instance)
and str(r.name) == 'str']:
# if it is an iterable, ignore * operations
next(calls_iterator)
return set(result)
def follow_call(call):
"""Follow a call is following a function, variable, string, etc."""
scope = call.parent_stmt.parent
path = call.generate_call_path()
position = call.parent_stmt.start_pos
return follow_call_path(path, scope, position)
# find the statement of the Scope
s = call
while not s.parent.isinstance(pr.IsScope):
s = s.parent
return follow_call_path(path, s.parent, s.start_pos)
def follow_call_path(path, scope, position):
@@ -664,8 +652,7 @@ def follow_call_path(path, scope, position):
debug.warning('unknown type:', current.type, current)
scopes = []
# Make instances of those number/string objects.
arr = helpers.generate_param_array([current.name])
scopes = [er.Instance(s, arr) for s in scopes]
scopes = [er.Instance(s, (current.name,)) for s in scopes]
result = imports.strip_imports(scopes)
return follow_paths(path, result, scope, position=position)
@@ -745,11 +732,12 @@ def filter_private_variable(scope, call_scope, var_name):
def goto(stmt, call_path=None):
if call_path is None:
arr = stmt.get_assignment_calls()
call = arr.get_only_subelement()
commands = stmt.get_commands()
assert len(commands) == 1
call = commands[0]
call_path = list(call.generate_call_path())
scope = stmt.parent
scope = stmt.get_parent_until(pr.IsScope)
pos = stmt.start_pos
call_path, search = call_path[:-1], call_path[-1]
pos = pos[0], pos[1] + 1

View File

@@ -11,6 +11,7 @@ they change classes in Python 3.
"""
import sys
import copy
import itertools
from _compatibility import property, use_metaclass, next, hasattr
import parsing_representation as pr
@@ -33,13 +34,13 @@ class DecoratorNotFound(LookupError):
pass
class Executable(pr.Base):
""" An instance is also an executable - because __init__ is called """
def __init__(self, base, var_args=None):
class Executable(pr.IsScope):
"""
An instance is also an executable - because __init__ is called
:param var_args: The param input array, consist of `pr.Array` or list.
"""
def __init__(self, base, var_args=()):
self.base = base
# The param input array.
if var_args is None:
var_args = pr.Array(None, None)
self.var_args = var_args
def get_parent_until(self, *args, **kwargs):
@@ -52,7 +53,7 @@ class Executable(pr.Base):
class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
""" This class is used to evaluate instances. """
def __init__(self, base, var_args=None):
def __init__(self, base, var_args=()):
super(Instance, self).__init__(base, var_args)
if str(base.name) in ['list', 'set'] \
and builtin.Builtin.scope == base.get_parent_until():
@@ -121,20 +122,15 @@ class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
sub = self.base.get_subscope_by_name(name)
return InstanceElement(self, sub, True)
def execute_subscope_by_name(self, name, args=None):
if args is None:
args = helpers.generate_param_array([])
def execute_subscope_by_name(self, name, args=()):
method = self.get_subscope_by_name(name)
if args.parent_stmt is None:
args.parent_stmt = method
return Execution(method, args).get_return_types()
def get_descriptor_return(self, obj):
""" Throws a KeyError if there's no method. """
# Arguments in __get__ descriptors are obj, class.
# `method` is the new parent of the array, don't know if that's good.
v = [obj, obj.base] if isinstance(obj, Instance) else [None, obj]
args = helpers.generate_param_array(v)
args = [obj, obj.base] if isinstance(obj, Instance) else [None, obj]
return self.execute_subscope_by_name('__get__', args)
@cache.memoize_default([])
@@ -164,7 +160,7 @@ class Instance(use_metaclass(cache.CachedMetaClass, Executable)):
yield self, names
def get_index_types(self, index=None):
args = helpers.generate_param_array([] if index is None else [index])
args = [] if index is None else [index]
try:
return self.execute_subscope_by_name('__getitem__', args)
except KeyError:
@@ -219,15 +215,10 @@ class InstanceElement(use_metaclass(cache.CachedMetaClass)):
return self
return func
def get_assignment_calls(self):
def get_commands(self):
# Copy and modify the array.
origin = self.var.get_assignment_calls()
# Delete parent, because it isn't used anymore.
new = helpers.fast_parent_copy(origin)
par = InstanceElement(self.instance, origin.parent_stmt,
self.is_class_var)
new.parent_stmt = par
return new
return [InstanceElement(self.instance, command, self.is_class_var)
for command in self.var.get_commands()]
def __getattr__(self, name):
return getattr(self.var, name)
@@ -239,7 +230,7 @@ class InstanceElement(use_metaclass(cache.CachedMetaClass)):
return "<%s of %s>" % (type(self).__name__, self.var)
class Class(use_metaclass(cache.CachedMetaClass, pr.Base)):
class Class(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
"""
This class is not only important to extend `pr.Class`, it is also a
important for descriptors (if the descriptor methods are evaluated or not).
@@ -247,7 +238,7 @@ class Class(use_metaclass(cache.CachedMetaClass, pr.Base)):
def __init__(self, base):
self.base = base
@cache.memoize_default(default=[])
@cache.memoize_default(default=())
def get_super_classes(self):
supers = []
# TODO care for mro stuff (multiple super classes).
@@ -263,7 +254,7 @@ class Class(use_metaclass(cache.CachedMetaClass, pr.Base)):
supers += evaluate.find_name(builtin.Builtin.scope, 'object')
return supers
@cache.memoize_default(default=[])
@cache.memoize_default(default=())
def get_defined_names(self):
def in_iterable(name, iterable):
""" checks if the name is in the variable 'iterable'. """
@@ -296,20 +287,19 @@ class Class(use_metaclass(cache.CachedMetaClass, pr.Base)):
return self.base.name
def __getattr__(self, name):
if name not in ['start_pos', 'end_pos', 'parent', 'subscopes',
'get_imports', 'get_parent_until', 'docstr', 'asserts']:
raise AttributeError("Don't touch this (%s)!" % name)
if name not in ['start_pos', 'end_pos', 'parent', 'asserts', 'docstr',
'get_imports', 'get_parent_until', 'get_code', 'subscopes']:
raise AttributeError("Don't touch this: %s of %s !" % (name, self))
return getattr(self.base, name)
def __repr__(self):
return "<e%s of %s>" % (type(self).__name__, self.base)
class Function(use_metaclass(cache.CachedMetaClass, pr.Base)):
class Function(use_metaclass(cache.CachedMetaClass, pr.IsScope)):
"""
Needed because of decorators. Decorators are evaluated here.
"""
def __init__(self, func, is_decorated=False):
""" This should not be called directly """
self.base_func = func
@@ -339,9 +329,8 @@ class Function(use_metaclass(cache.CachedMetaClass, pr.Base)):
decorator = dec_results.pop()
# Create param array.
old_func = Function(f, is_decorated=True)
params = helpers.generate_param_array([old_func], old_func)
wrappers = Execution(decorator, params).get_return_types()
wrappers = Execution(decorator, (old_func,)).get_return_types()
if not len(wrappers):
debug.warning('no wrappers found', self.base_func)
return None
@@ -388,7 +377,18 @@ class Execution(Executable):
multiple calls to functions and recursion has to be avoided. But this is
responsibility of the decorators.
"""
@cache.memoize_default(default=[])
def follow_var_arg(self, index):
try:
stmt = self.var_args[index]
except IndexError:
return []
else:
if isinstance(stmt, pr.Statement):
return evaluate.follow_statement(stmt)
else:
return [stmt] # just some arbitrary object
@cache.memoize_default(default=())
@recursion.ExecutionRecursionDecorator
def get_return_types(self, evaluate_generator=False):
""" Get the return types of a function. """
@@ -401,8 +401,8 @@ class Execution(Executable):
if func_name == 'getattr':
# follow the first param
try:
objects = evaluate.follow_call_list([self.var_args[0]])
names = evaluate.follow_call_list([self.var_args[1]])
objects = self.follow_var_arg(0)
names = self.follow_var_arg(1)
except IndexError:
debug.warning('getattr() called with to few args.')
return []
@@ -412,19 +412,22 @@ class Execution(Executable):
debug.warning('getattr called without instance')
continue
for name in names:
key = name.var_args.get_only_subelement()
for arr_name in names:
if len(arr_name.var_args) != 1:
debug.warning('jedi getattr is too simple')
key = arr_name.var_args[0]
stmts += evaluate.follow_path(iter([key]), obj,
self.base)
return stmts
elif func_name == 'type':
# otherwise it would be a metaclass
if len(self.var_args) == 1:
objects = evaluate.follow_call_list([self.var_args[0]])
objects = self.follow_var_arg(0)
return [o.base for o in objects if isinstance(o, Instance)]
elif func_name == 'super':
# TODO make this able to detect multiple inheritance supers
accept = (pr.Function,)
func = self.var_args.parent_stmt.get_parent_until(accept)
func = self.var_args.get_parent_until(accept)
if func.isinstance(*accept):
cls = func.get_parent_until(accept + (pr.Class,),
include_current=False)
@@ -476,7 +479,7 @@ class Execution(Executable):
stmts += evaluate.follow_statement(r)
return stmts
@cache.memoize_default(default=[])
@cache.memoize_default(default=())
def get_params(self):
"""
This returns the params for an Execution/Instance and is injected as a
@@ -484,22 +487,36 @@ class Execution(Executable):
This needs to be here, because Instance can have __init__ functions,
which act the same way as normal functions.
"""
def gen_param_name_copy(param, keys=[], values=[], array_type=None):
def gen_param_name_copy(param, keys=(), values=(), array_type=None):
"""
Create a param with the original scope (of varargs) as parent.
"""
parent_stmt = self.var_args.parent_stmt
pos = parent_stmt.start_pos if parent_stmt else None
calls = pr.Array(pos, pr.Array.NOARRAY, parent_stmt)
calls.values = values
calls.keys = keys
calls.type = array_type
if isinstance(self.var_args, pr.Array):
parent = self.var_args.parent
start_pos = self.var_args.start_pos
else:
parent = self.base
start_pos = 0, 0
new_param = copy.copy(param)
if parent_stmt is not None:
new_param.parent = parent_stmt
new_param._assignment_calls_calculated = True
new_param._assignment_calls = calls
new_param.is_generated = True
if parent is not None:
new_param.parent = parent
# create an Array (-> needed for *args/**kwargs tuples/dicts)
arr = pr.Array(self._sub_module, start_pos, array_type, parent)
arr.values = values
key_stmts = []
for key in keys:
stmt = pr.Statement(self._sub_module, [], [], [],
start_pos, None)
stmt._commands = [key]
key_stmts.append(stmt)
arr.keys = key_stmts
arr.type = array_type
new_param._commands = [arr]
name = copy.copy(param.get_name())
name.parent = new_param
return name
@@ -542,12 +559,12 @@ class Execution(Executable):
values=[value]))
key, value = next(var_arg_iterator, (None, None))
assignments = param.get_assignment_calls().values
assignment = assignments[0]
commands = param.get_commands()
keys = []
values = []
array_type = None
if assignment[0] == '*':
ignore_creation = False
if commands[0] == '*':
# *args param
array_type = pr.Array.TUPLE
if value:
@@ -558,19 +575,21 @@ class Execution(Executable):
var_arg_iterator.push_back((key, value))
break
values.append(value)
elif assignment[0] == '**':
elif commands[0] == '**':
# **kwargs param
array_type = pr.Array.DICT
if non_matching_keys:
keys, values = zip(*non_matching_keys)
else:
elif not keys_only:
# normal param
if value:
if value is not None:
values = [value]
else:
if param.assignment_details:
# No value: return the default values.
values = assignments
ignore_creation = True
result.append(param.get_name())
param.is_generated = True
else:
# If there is no assignment detail, that means there is
# no assignment, just the result. Therefore nothing has
@@ -579,7 +598,7 @@ class Execution(Executable):
# Just ignore all the params that are without a key, after one
# keyword argument was set.
if not keys_only or assignment[0] == '**':
if not ignore_creation and (not keys_only or commands[0] == '**'):
keys_used.add(str(key))
result.append(gen_param_name_copy(param, keys=keys,
values=values, array_type=array_type))
@@ -597,37 +616,44 @@ class Execution(Executable):
"""
def iterate():
# `var_args` is typically an Array, and not a list.
for var_arg in self.var_args:
# empty var_arg
if len(var_arg) == 0:
yield None, None
for stmt in self.var_args:
if not isinstance(stmt, pr.Statement):
if stmt is None:
yield None, None
continue
old = stmt
# generate a statement if it's not already one.
module = builtin.Builtin.scope
stmt = pr.Statement(module, [], [], [], (0, 0), None)
stmt._commands = [old]
# *args
elif var_arg[0] == '*':
arrays = evaluate.follow_call_list([var_arg[1:]])
if stmt.get_commands()[0] == '*':
arrays = evaluate.follow_call_list(stmt.get_commands()[1:])
# *args must be some sort of an array, otherwise -> ignore
for array in arrays:
if hasattr(array, 'get_contents'):
for field in array.get_contents():
yield None, field
for field_stmt in array: # yield from plz!
yield None, field_stmt
# **kwargs
elif var_arg[0] == '**':
arrays = evaluate.follow_call_list([var_arg[1:]])
elif stmt.get_commands()[0] == '**':
arrays = evaluate.follow_call_list(stmt.get_commands()[1:])
for array in arrays:
if hasattr(array, 'get_contents'):
for key, field in array.get_contents():
# Take the first index.
if isinstance(key, pr.Name):
name = key
else:
# `pr`.[Call|Function|Class] lookup.
name = key[0].name
yield name, field
for key_stmt, value_stmt in array.items():
# first index, is the key if syntactically correct
call = key_stmt.get_commands()[0]
if isinstance(call, pr.Name):
yield call, value_stmt
elif type(call) == pr.Call:
yield call.name, value_stmt
# Normal arguments (including key arguments).
else:
if len(var_arg) > 1 and var_arg[1] == '=':
# This is a named parameter (var_arg[0] is a Call).
yield var_arg[0].name, var_arg[2:]
if stmt.assignment_details:
key_arr, op = stmt.assignment_details[0]
# named parameter
if key_arr and isinstance(key_arr[0], pr.Call):
yield key_arr[0].name, stmt
else:
yield None, var_arg
yield None, stmt
return iter(common.PushBackIterator(iterate()))
@@ -666,7 +692,7 @@ class Execution(Executable):
raise common.MultiLevelAttributeError(sys.exc_info())
def __getattr__(self, name):
if name not in ['start_pos', 'end_pos', 'imports']:
if name not in ['start_pos', 'end_pos', 'imports', '_sub_module']:
raise AttributeError('Tried to access %s: %s. Why?' % (name, self))
return getattr(self.base, name)
@@ -763,60 +789,60 @@ class Array(use_metaclass(cache.CachedMetaClass, pr.Base)):
def __init__(self, array):
self._array = array
def get_index_types(self, index_call_list=None):
def get_index_types(self, index_arr=None):
""" Get the types of a specific index or all, if not given """
# array slicing
if index_call_list is not None:
if index_call_list and [x for x in index_call_list if ':' in x]:
if index_arr is not None:
if index_arr and [x for x in index_arr if ':' in x.get_commands()]:
# array slicing
return [self]
index_possibilities = list(evaluate.follow_call_list(
index_call_list))
index_possibilities = self._follow_values(index_arr)
if len(index_possibilities) == 1:
# This is indexing only one element, with a fixed index number,
# otherwise it just ignores the index (e.g. [1+1]).
try:
# Multiple elements in the array are not wanted. var_args
# and get_only_subelement can raise AttributeErrors.
i = index_possibilities[0].var_args.get_only_subelement()
except AttributeError:
pass
else:
index = index_possibilities[0]
if isinstance(index, Instance) \
and str(index.name) in ['int', 'str'] \
and len(index.var_args) == 1:
try:
return self.get_exact_index_types(i)
except (IndexError, KeyError):
return self.get_exact_index_types(index.var_args[0])
except (KeyError, IndexError):
pass
result = list(self.follow_values(self._array.values))
result = list(self._follow_values(self._array.values))
result += dynamic.check_array_additions(self)
return set(result)
def get_exact_index_types(self, index):
""" Here the index is an int. Raises IndexError/KeyError """
if self._array.type == pr.Array.DICT:
old_index = index
def get_exact_index_types(self, mixed_index):
""" Here the index is an int/str. Raises IndexError/KeyError """
index = mixed_index
if self.type == pr.Array.DICT:
index = None
for i, key_elements in enumerate(self._array.keys):
for i, key_statement in enumerate(self._array.keys):
# Because we only want the key to be a string.
if len(key_elements) == 1:
try:
str_key = key_elements.get_code()
except AttributeError:
try:
str_key = key_elements[0].name
except AttributeError:
str_key = None
if old_index == str_key:
index = i
break
key_commands = key_statement.get_commands()
if len(key_commands) != 1: # cannot deal with complex strings
continue
key = key_commands[0]
if isinstance(key, pr.Call) and key.type == pr.Call.STRING:
str_key = key.name
elif isinstance(key, pr.Name):
str_key = str(key)
if mixed_index == str_key:
index = i
break
if index is None:
raise KeyError('No key found in dictionary')
values = [self._array[index]]
return self.follow_values(values)
def follow_values(self, values):
# Can raise an IndexError
values = [self._array.values[index]]
return self._follow_values(values)
def _follow_values(self, values):
""" helper function for the index getters """
return evaluate.follow_call_list(values)
return list(itertools.chain.from_iterable(evaluate.follow_statement(v)
for v in values))
def get_defined_names(self):
"""
@@ -829,24 +855,28 @@ class Array(use_metaclass(cache.CachedMetaClass, pr.Base)):
names = scope.get_defined_names()
return [ArrayMethod(n) for n in names]
def get_contents(self):
return self._array
@property
def parent(self):
"""
Return the builtin scope as parent, because the arrays are builtins
"""
return builtin.Builtin.scope
def get_parent_until(self, *args, **kwargs):
def get_parent_until(self):
return builtin.Builtin.scope
def __getattr__(self, name):
if name not in ['type', 'start_pos', 'get_only_subelement']:
if name not in ['type', 'start_pos', 'get_only_subelement', 'parent',
'get_parent_until', 'items']:
raise AttributeError('Strange access on %s: %s.' % (self, name))
return getattr(self._array, name)
def __getitem__(self):
return self._array.__getitem__()
def __iter__(self):
return self._array.__iter__()
def __len__(self):
return self._array.__len__()
def __repr__(self):
return "<e%s of %s>" % (type(self).__name__, self._array)
@@ -863,7 +893,7 @@ class ArrayMethod(object):
def __getattr__(self, name):
# Set access privileges:
if name not in ['parent', 'names', 'start_pos', 'end_pos', 'get_code']:
raise AttributeError('Strange access: %s.' % name)
raise AttributeError('Strange accesson %s: %s.' % (self, name))
return getattr(self.name, name)
def get_parent_until(self):

View File

@@ -256,7 +256,7 @@ class FastParser(use_metaclass(CachedFastParser)):
else:
p = parsing.Parser(code[start:],
self.module_path, self.user_position,
line_offset=line_offset, stop_on_scope=True,
offset=(line_offset, 0), stop_on_scope=True,
top_module=self.module)
p.hash = h

View File

@@ -31,8 +31,7 @@ def fast_parent_copy(obj):
for key, value in items:
# replace parent (first try _parent and then parent)
if key in ['parent', '_parent', '_parent_stmt'] \
and value is not None:
if key in ['parent', '_parent'] and value is not None:
if key == 'parent' and '_parent' in items:
# parent can be a property
continue
@@ -40,8 +39,7 @@ def fast_parent_copy(obj):
setattr(new_obj, key, new_elements[value])
except KeyError:
pass
elif key in ['parent_stmt', 'parent_function', 'use_as_parent',
'module']:
elif key in ['parent_function', 'use_as_parent', '_sub_module']:
continue
elif isinstance(value, list):
setattr(new_obj, key, list_rec(value))
@@ -60,19 +58,6 @@ def fast_parent_copy(obj):
return recursion(obj)
def generate_param_array(args_tuple, parent_stmt=None):
""" This generates an array, that can be used as a param. """
values = []
for arg in args_tuple:
if arg is None:
values.append([])
else:
values.append([arg])
pos = None
arr = pr.Array(pos, pr.Array.TUPLE, parent_stmt, values=values)
return arr
def check_arr_index(arr, pos):
positions = arr.arr_el_pos
for index, comma_pos in enumerate(positions):
@@ -81,68 +66,57 @@ def check_arr_index(arr, pos):
return len(positions)
def array_for_pos(arr, pos):
if arr.start_pos >= pos \
or arr.end_pos[0] is not None and pos >= arr.end_pos:
return None, None
def array_for_pos(stmt, pos, array_types=None):
"""Searches for the array and position of a tuple"""
def search_array(arr, pos):
for i, stmt in enumerate(arr):
new_arr, index = array_for_pos(stmt, pos, array_types)
if new_arr is not None:
return new_arr, index
if arr.start_pos < pos <= stmt.end_pos:
if not array_types or arr.type in array_types:
return arr, i
if len(arr) == 0 and arr.start_pos < pos < arr.end_pos:
if not array_types or arr.type in array_types:
return arr, 0
return None, 0
result = arr
for sub in arr:
for s in sub:
if isinstance(s, pr.Array):
result = array_for_pos(s, pos)[0] or result
elif isinstance(s, pr.Call):
if s.execution:
result = array_for_pos(s.execution, pos)[0] or result
if s.next:
result = array_for_pos(s.next, pos)[0] or result
def search_call(call, pos):
arr, index = None, 0
if call.next is not None:
if isinstance(call.next, pr.Array):
arr, index = search_array(call.next, pos)
else:
arr, index = search_call(call.next, pos)
if not arr and call.execution is not None:
arr, index = search_array(call.execution, pos)
return arr, index
return result, check_arr_index(result, pos)
if stmt.start_pos >= pos >= stmt.end_pos:
return None, 0
for command in stmt.get_commands():
arr = None
if isinstance(command, pr.Array):
arr, index = search_array(command, pos)
elif isinstance(command, pr.Call):
arr, index = search_call(command, pos)
if arr is not None:
return arr, index
return None, 0
def search_function_call(arr, pos):
def search_function_definition(stmt, pos):
"""
Returns the function Call that matches the position before `arr`.
This is somehow stupid, probably only the name of the function.
Returns the function Call that matches the position before.
"""
call = None
stop = False
for sub in arr.values:
call = None
for s in sub:
if isinstance(s, pr.Array):
new = search_function_call(s, pos)
if new[0] is not None:
call, index, stop = new
if stop:
return call, index, stop
elif isinstance(s, pr.Call):
start_s = s
# check parts of calls
while s is not None:
if s.start_pos >= pos:
return call, check_arr_index(arr, pos), stop
elif s.execution is not None:
end = s.execution.end_pos
if s.execution.start_pos < pos and \
(None in end or pos < end):
c, index, stop = search_function_call(
s.execution, pos)
if stop:
return c, index, stop
# call should return without execution and
# next
reset = c or s
if reset.execution.type not in \
[pr.Array.TUPLE, pr.Array.NOARRAY]:
return start_s, index, False
reset.execution = None
reset.next = None
return c or start_s, index, True
s = s.next
# The third return is just necessary for recursion inside, because
# it needs to know when to stop iterating.
return call, check_arr_index(arr, pos), stop
# some parts will of the statement will be removed
stmt = fast_parent_copy(stmt)
arr, index = array_for_pos(stmt, pos, [pr.Array.TUPLE, pr.Array.NOARRAY])
if arr is not None and isinstance(arr.parent, pr.Call):
call = arr.parent
while isinstance(call.parent, pr.Call):
call = call.parent
arr.parent.execution = None
return call, index, False
return None, 0, False

View File

@@ -101,8 +101,8 @@ class ImportPath(pr.Base):
# 0 (0 is not a valid line number).
zero = (0, 0)
names = i.namespace.names[1:]
n = pr.Name(i.module, names, zero, zero, self.import_stmt)
new = pr.Import(i.module, zero, zero, n)
n = pr.Name(i._sub_module, names, zero, zero, self.import_stmt)
new = pr.Import(i._sub_module, zero, zero, n)
new.parent = parent
debug.dbg('Generated a nested import: %s' % new)
return new
@@ -318,7 +318,7 @@ def strip_imports(scopes):
@cache.cache_star_import
def remove_star_imports(scope, ignored_modules=[]):
def remove_star_imports(scope, ignored_modules=()):
"""
Check a module for star imports:
>>> from module import *

View File

@@ -264,7 +264,7 @@ class ModuleWithCursor(Module):
offset = max(self.position[0] - length, 0)
s = '\n'.join(self.source.splitlines()[offset:offset + length])
self._part_parser = parsing.Parser(s, self.path, self.position,
line_offset=offset)
offset=(offset, 0))
return self._part_parser
@@ -311,10 +311,12 @@ def sys_path_with_modifications(module):
sys_path = list(get_sys_path()) # copy
for p in possible_stmts:
try:
call = p.get_assignment_calls().get_only_subelement()
except AttributeError:
if not isinstance(p, pr.Statement):
continue
commands = p.get_commands()
if len(commands) != 1: # sys.path command is just one thing.
continue
call = commands[0]
n = call.name
if not isinstance(n, pr.Name) or len(n.names) != 3:
continue

View File

@@ -14,12 +14,10 @@ being parsed completely. ``Statement`` is just a representation of the tokens
within the statement. This lowers memory usage and cpu time and reduces the
complexity of the ``Parser`` (there's another parser sitting inside
``Statement``, which produces ``Array`` and ``Call``).
"""
from _compatibility import next, StringIO, unicode
from _compatibility import next, StringIO
import tokenize
import re
import keyword
import debug
@@ -47,7 +45,7 @@ class Parser(object):
:param top_module: Use this module as a parent instead of `self.module`.
"""
def __init__(self, source, module_path=None, user_position=None,
no_docstr=False, line_offset=0, stop_on_scope=None,
no_docstr=False, offset=(0, 0), stop_on_scope=None,
top_module=None):
self.user_position = user_position
self.user_scope = None
@@ -55,21 +53,17 @@ class Parser(object):
self.no_docstr = no_docstr
# initialize global Scope
self.module = pr.SubModule(module_path, (line_offset + 1, 0),
self.module = pr.SubModule(module_path, (offset[0] + 1, offset[1]),
top_module)
self.scope = self.module
self.current = (None, None)
self.start_pos = 1, 0
self.end_pos = 1, 0
# Stuff to fix tokenize errors. The parser is pretty good in tolerating
# any errors of tokenize and just parse ahead.
self._line_offset = line_offset
source = source + '\n' # end with \n, because the parser needs it
buf = StringIO(source)
self._gen = common.NoErrorTokenizer(buf.readline, line_offset,
stop_on_scope)
self._gen = common.NoErrorTokenizer(buf.readline, offset,
stop_on_scope)
self.top_module = top_module or self.module
try:
self._parse()
@@ -303,7 +297,7 @@ class Parser(object):
return scope
def _parse_statement(self, pre_used_token=None, added_breaks=None,
stmt_class=pr.Statement, list_comp=False):
stmt_class=pr.Statement):
"""
Parses statements like:
@@ -317,10 +311,7 @@ class Parser(object):
:return: Statement + last parsed token.
:rtype: (Statement, str)
"""
string = unicode('')
set_vars = []
used_funcs = []
used_vars = []
level = 0 # The level of parentheses
@@ -342,126 +333,40 @@ class Parser(object):
# will even break in parentheses. This is true for typical flow
# commands like def and class and the imports, which will never be used
# in a statement.
breaks = ['\n', ':', ')']
breaks = set(['\n', ':', ')'])
always_break = [';', 'import', 'from', 'class', 'def', 'try', 'except',
'finally', 'while', 'return', 'yield']
not_first_break = ['del', 'raise']
if added_breaks:
breaks += added_breaks
breaks |= set(added_breaks)
tok_list = []
while not (tok in always_break
or tok in not_first_break and not tok_list
or tok in breaks and level <= 0):
try:
set_string = None
#print 'parse_stmt', tok, tokenize.tok_name[token_type]
tok_list.append(self.current + (self.start_pos,))
if tok == 'as':
string += " %s " % tok
token_type, tok = self.next()
if token_type == tokenize.NAME:
n, token_type, tok = self._parse_dot_name(self.current)
if n:
set_vars.append(n)
tok_list.append(n)
string += ".".join(n.names)
continue
elif tok == 'lambda':
params = []
start_pos = self.start_pos
while tok != ':':
param, tok = self._parse_statement(
added_breaks=[':', ','], stmt_class=pr.Param)
if param is None:
break
params.append(param)
if tok != ':':
continue
lambd = pr.Lambda(self.module, params, start_pos)
ret, tok = self._parse_statement(added_breaks=[','])
if ret is not None:
ret.parent = lambd
lambd.returns.append(ret)
lambd.parent = self.scope
lambd.end_pos = self.end_pos
tok_list[-1] = lambd
continue
elif tok in ['lambda', 'for', 'in']:
# don't parse these keywords, parse later in stmt.
if tok == 'lambda':
breaks.discard(':')
elif token_type == tokenize.NAME:
if tok == 'for':
# list comprehensions!
middle, tok = self._parse_statement(
added_breaks=['in'])
if tok != 'in' or middle is None:
if middle is None:
level -= 1
else:
middle.parent = self.scope
debug.warning('list comprehension formatting @%s' %
self.start_pos[0])
continue
b = [')', ']']
in_clause, tok = self._parse_statement(added_breaks=b,
list_comp=True)
if tok not in b or in_clause is None:
middle.parent = self.scope
if in_clause is None:
self._gen.push_last_back()
else:
in_clause.parent = self.scope
in_clause.parent = self.scope
debug.warning('list comprehension in_clause %s@%s'
% (repr(tok), self.start_pos[0]))
continue
other_level = 0
for i, tok in enumerate(reversed(tok_list)):
if not isinstance(tok, (pr.Name,
pr.ListComprehension)):
tok = tok[1]
if tok in closing_brackets:
other_level -= 1
elif tok in opening_brackets:
other_level += 1
if other_level > 0:
break
else:
# could not detect brackets -> nested list comp
i = 0
tok_list, toks = tok_list[:-i], tok_list[-i:-1]
src = ''
for t in toks:
src += t[1] if isinstance(t, tuple) \
else t.get_code()
st = pr.Statement(self.module, src, [], [], [],
toks, first_pos, self.end_pos)
for s in [st, middle, in_clause]:
s.parent = self.scope
tok = pr.ListComprehension(st, middle, in_clause)
tok_list.append(tok)
if list_comp:
string = ''
string += tok.get_code()
continue
else:
n, token_type, tok = self._parse_dot_name(self.current)
# removed last entry, because we add Name
tok_list.pop()
if n:
tok_list.append(n)
if tok == '(':
# it must be a function
used_funcs.append(n)
else:
used_vars.append(n)
if string and re.match(r'[\w\d\'"]', string[-1]):
string += ' '
string += ".".join(n.names)
continue
n, token_type, tok = self._parse_dot_name(self.current)
# removed last entry, because we add Name
tok_list.pop()
if n:
tok_list.append(n)
used_vars.append(n)
continue
elif tok.endswith('=') and tok not in ['>=', '<=', '==', '!=']:
# there has been an assignement -> change vars
if level == 0:
@@ -472,22 +377,21 @@ class Parser(object):
elif tok in closing_brackets:
level -= 1
string = set_string if set_string is not None else string + tok
token_type, tok = self.next()
except (StopIteration, common.MultiLevelStopIteration):
# comes from tokenizer
break
if not string:
if not tok_list:
return None, tok
#print 'new_stat', string, set_vars, used_funcs, used_vars
#print 'new_stat', set_vars, used_vars
if self.freshscope and not self.no_docstr and len(tok_list) == 1 \
and self.last_token[0] == tokenize.STRING:
self.scope.add_docstr(self.last_token[1])
return None, tok
else:
stmt = stmt_class(self.module, string, set_vars, used_funcs,
used_vars, tok_list, first_pos, self.end_pos)
stmt = stmt_class(self.module, set_vars, used_vars, tok_list,
first_pos, self.end_pos)
self._check_user_stmt(stmt)
@@ -659,8 +563,8 @@ class Parser(object):
command = tok
if command in ['except', 'with']:
added_breaks.append(',')
# multiple statements because of with
inits = []
# multiple inputs because of with
inputs = []
first = True
while first or command == 'with' \
and tok not in [':', '\n']:
@@ -672,13 +576,12 @@ class Parser(object):
n, token_type, tok = self._parse_dot_name()
if n:
statement.set_vars.append(n)
statement.code += ',' + n.get_code()
if statement:
inits.append(statement)
inputs.append(statement)
first = False
if tok == ':':
f = pr.Flow(self.module, command, inits, first_pos)
f = pr.Flow(self.module, command, inputs, first_pos)
if command in extended_flow:
# the last statement has to be another part of
# the flow statement, because a dedent releases the
@@ -692,7 +595,7 @@ class Parser(object):
s = self.scope.add_statement(f)
self.scope = s
else:
for i in inits:
for i in inputs:
i.parent = use_as_parent_scope
debug.warning('syntax err, flow started @%s',
self.start_pos[0])

File diff suppressed because it is too large Load Diff

View File

@@ -34,8 +34,10 @@ class RecursionDecorator(object):
def push_stmt(self, stmt):
self.current = RecursionNode(stmt, self.current)
if self._check_recursion():
debug.warning('catched recursion', stmt)
check = self._check_recursion()
if check:# TODO remove False!!!!
debug.warning('catched stmt recursion: %s against %s @%s'
% (stmt, check.stmt, stmt.start_pos))
self.pop_stmt()
return True
return False
@@ -51,7 +53,7 @@ class RecursionDecorator(object):
while True:
test = test.parent
if self.current == test:
return True
return test
if not test:
return False
@@ -85,8 +87,12 @@ class RecursionNode(object):
def __eq__(self, other):
if not other:
return None
is_list_comp = lambda x: isinstance(x, pr.ForFlow) and x.is_list_comp
return self.script == other.script \
and self.position == other.position \
and not is_list_comp(self.stmt.parent) \
and not is_list_comp(other.parent) \
and not self.is_ignored and not other.is_ignored

View File

@@ -3,7 +3,7 @@ Introduce some basic refactoring functions to |jedi|. This module is still in a
very early development stage and needs much testing and improvement.
.. warning:: I won't do too much here, but if anyone wants to step in, please
do.
do. Refactoring is none of my priorities
It uses the |jedi| `API <plugin-api.html>`_ and supports currently the
following functions (sometimes bug-prone):
@@ -11,7 +11,6 @@ following functions (sometimes bug-prone):
- rename
- extract variable
- inline variable
"""
from __future__ import with_statement
@@ -19,6 +18,7 @@ from __future__ import with_statement
import modules
import difflib
import helpers
import parsing_representation as pr
class Refactoring(object):
@@ -113,13 +113,10 @@ def extract(script, new_name):
if user_stmt:
pos = script.pos
line_index = pos[0] - 1
arr, index = helpers.array_for_pos(user_stmt.get_assignment_calls(),
pos)
if arr:
s = arr.start_pos[0], arr.start_pos[1] + 1
positions = [s] + arr.arr_el_pos + [arr.end_pos]
start_pos = positions[index]
end_pos = positions[index + 1][0], positions[index + 1][1] - 1
arr, index = helpers.array_for_pos(user_stmt, pos)
if arr is not None:
start_pos = arr[index].start_pos
end_pos = arr[index].end_pos
# take full line if the start line is different from end line
e = end_pos[1] if end_pos[0] == start_pos[0] else None
@@ -178,17 +175,19 @@ def inline(script):
if not stmt.start_pos <= r.start_pos <= stmt.end_pos]
inlines = sorted(inlines, key=lambda x: (x.module_path, x.start_pos),
reverse=True)
ass = stmt.get_assignment_calls()
commands = stmt.get_commands()
# don't allow multiline refactorings for now.
assert ass.start_pos[0] == ass.end_pos[0]
index = ass.start_pos[0] - 1
assert stmt.start_pos[0] == stmt.end_pos[0]
index = stmt.start_pos[0] - 1
line = new_lines[index]
replace_str = line[ass.start_pos[1]:ass.end_pos[1] + 1]
replace_str = line[commands[0].start_pos[1]:stmt.end_pos[1] + 1]
replace_str = replace_str.strip()
# tuples need parentheses
if len(ass.values) > 1:
replace_str = '(%s)' % replace_str
if commands and isinstance(commands[0], pr.Array):
arr = commands[0]
if replace_str[0] not in ['(', '[', '{'] and len(arr) > 1:
replace_str = '(%s)' % replace_str
# if it's the only assignment, remove the statement
if len(stmt.set_vars) == 1:

View File

@@ -34,7 +34,7 @@ Parser
.. autodata:: fast_parser
.. autodata:: fast_parser_always_reparse
.. autodata:: use_get_in_function_call_cache
.. autodata:: use_function_definition_cache
Dynamic stuff
@@ -66,14 +66,14 @@ definitely worse in some cases. But a completion should also be fast.
.. autodata:: max_function_recursion_level
.. autodata:: max_executions_without_builtins
.. autodata:: max_executions
.. autodata:: scale_get_in_function_call
.. autodata:: scale_function_definition
Caching
~~~~~~~
.. autodata:: star_import_cache_validity
.. autodata:: get_in_function_call_validity
.. autodata:: function_definition_validity
Various
@@ -156,9 +156,9 @@ This is just a debugging option. Always reparsing means that the fast parser
is basically useless. So don't use it.
"""
use_get_in_function_call_cache = True
use_function_definition_cache = True
"""
Use the cache (full cache) to generate get_in_function_call's. This may fail
Use the cache (full cache) to generate function_definition's. This may fail
with multiline docstrings (likely) and other complicated changes (unlikely).
The goal is to move away from it by making the rest faster.
"""
@@ -225,9 +225,9 @@ max_executions = 250
A maximum amount of time, the completion may use.
"""
scale_get_in_function_call = 0.1
scale_function_definition = 0.1
"""
Because get_in_function_call is normally used on every single key hit, it has
Because function_definition is normally used on every single key hit, it has
to be faster than a normal completion. This is the factor that is used to
scale `max_executions` and `max_until_execution_unique`:
"""
@@ -253,7 +253,7 @@ might be slow, therefore we do a star import caching, that lasts a certain
time span (in seconds).
"""
get_in_function_call_validity = 3.0
function_definition_validity = 3.0
"""
Finding function calls might be slow (0.1-0.5s). This is not acceptible for
normal writing. Therefore cache it for a short time.

View File

@@ -32,6 +32,7 @@ sys.argv = sys.argv[:1] + args
summary = []
tests_fail = 0
def get_test_list():
# get test list, that should be executed
test_files = {}
@@ -46,6 +47,7 @@ def get_test_list():
last = arg
return test_files
class TestBase(unittest.TestCase):
def get_script(self, src, pos, path=None):
if pos is None:
@@ -53,9 +55,9 @@ class TestBase(unittest.TestCase):
pos = len(lines), len(lines[-1])
return jedi.Script(src, pos[0], pos[1], path)
def get_def(self, src, pos=None):
def definition(self, src, pos=None):
script = self.get_script(src, pos)
return script.get_definition()
return script.definition()
def complete(self, src, pos=None, path=None):
script = self.get_script(src, pos, path)
@@ -65,13 +67,13 @@ class TestBase(unittest.TestCase):
script = self.get_script(src, pos)
return script.goto()
def get_in_function_call(self, src, pos=None):
def function_definition(self, src, pos=None):
script = self.get_script(src, pos)
return script.get_in_function_call()
return script.function_definition()
def print_summary():
print('\nSummary: (%s fails of %s tests) in %.3fs' % \
(tests_fail, test_sum, time.time() - t_start))
for s in summary:
print(s)

View File

@@ -82,6 +82,9 @@ for i in list([1,'']):
#? int() str()
i
#? int() str()
for x in [1,'']: x
a = []
b = [1.0,'']
for i in b:

View File

@@ -109,10 +109,10 @@ def func(a=1, b=''):
return a, b
exe = func(b=list, a=tuple)
#? tuple()
#? tuple
exe[0]
#? list()
#? list
exe[1]
# -----------------

View File

@@ -1,23 +1,23 @@
# goto command test are a different in syntax
# goto command tests are a different in syntax
definition = 3
##! 0 ['a=definition']
#! 0 ['a = definition']
a = definition
#! []
b
#! ['a=definition']
#! ['a = definition']
a
b = a
c = b
#! ['c=b']
#! ['c = b']
c
cd = 1
#! 1 ['cd=c']
#! 1 ['cd = c']
cd = c
#! 0 ['cd=e']
#! 0 ['cd = e']
cd = e
#! ['module math']
@@ -27,12 +27,12 @@ math
#! ['import math']
b = math
#! ['b=math']
#! ['b = math']
b
class C(object):
def b(self):
#! ['b=math']
#! ['b = math']
b
#! ['def b']
self.b
@@ -45,7 +45,7 @@ class C(object):
#! ['def b']
b
#! ['b=math']
#! ['b = math']
b
#! ['def b']
@@ -63,9 +63,9 @@ D.b
#! ['def b']
D().b
#! 0 ['D=C']
#! 0 ['D = C']
D().b
#! 0 ['D=C']
#! 0 ['D = C']
D().b
def c():
@@ -82,43 +82,43 @@ c()
#! ['module import_tree']
import import_tree
#! ['a=""']
#! ["a = ''"]
import_tree.a
#! ['module mod1']
import import_tree.mod1
#! ['a=1']
#! ['a = 1']
import_tree.mod1.a
#! ['module pkg']
import import_tree.pkg
#! ['a=list']
#! ['a = list']
import_tree.pkg.a
#! ['module mod1']
import import_tree.pkg.mod1
#! ['a=1.0']
#! ['a = 1.0']
import_tree.pkg.mod1.a
#! ['a=""']
#! ["a = ''"]
import_tree.a
#! ['module mod1']
from import_tree.pkg import mod1
#! ['a=1.0']
#! ['a = 1.0']
mod1.a
#! ['module mod1']
from import_tree import mod1
#! ['a=1']
#! ['a = 1']
mod1.a
#! ['a=1.0']
#! ['a = 1.0']
from import_tree.pkg.mod1 import a
#! ['import os']
from .imports import os
#! ['some_variable=1']
#! ['some_variable = 1']
from . import some_variable
# -----------------
@@ -151,7 +151,7 @@ param = ClassDef
def ab1(param): pass
#! 9 ['param']
def ab2(param): pass
#! 11 ['param=ClassDef']
#! 11 ['param = ClassDef']
def ab3(a=param): pass
ab1(ClassDef);ab2(ClassDef);ab3(ClassDef)

View File

@@ -101,7 +101,7 @@ a[0]
a = [a for a in [1,2]
def break(): pass
#? list()
#? int()
a[0]
#? []

View File

@@ -35,13 +35,13 @@ class TestRegression(TestBase):
self.assertEqual(length, 1)
def test_part_parser(self):
""" test the get_in_function_call speedups """
""" test the function_definition speedups """
s = '\n' * 100 + 'abs('
pos = 101, 4
self.get_in_function_call(s, pos)
assert self.get_in_function_call(s, pos)
self.function_definition(s, pos)
assert self.function_definition(s, pos)
def test_get_definition_cursor(self):
def test_definition_cursor(self):
s = ("class A():\n"
" def _something(self):\n"
@@ -60,7 +60,7 @@ class TestRegression(TestBase):
diff_line = 4, 10
should2 = 8, 10
get_def = lambda pos: [d.description for d in self.get_def(s, pos)]
get_def = lambda pos: [d.description for d in self.definition(s, pos)]
in_name = get_def(in_name)
under_score = get_def(under_score)
should1 = get_def(should1)
@@ -71,32 +71,31 @@ class TestRegression(TestBase):
assert should1 == in_name
assert should1 == under_score
#print should2, diff_line
assert should2 == diff_line
self.assertRaises(jedi.NotFoundError, get_def, cls)
def test_keyword_doc(self):
r = list(self.get_def("or", (1, 1)))
r = list(self.definition("or", (1, 1)))
assert len(r) == 1
if not is_py25:
assert len(r[0].doc) > 100
r = list(self.get_def("asfdasfd", (1, 1)))
r = list(self.definition("asfdasfd", (1, 1)))
assert len(r) == 0
def test_operator_doc(self):
r = list(self.get_def("a == b", (1, 3)))
r = list(self.definition("a == b", (1, 3)))
assert len(r) == 1
if not is_py25:
assert len(r[0].doc) > 100
def test_get_definition_at_zero(self):
assert self.get_def("a", (1, 1)) == []
s = self.get_def("str", (1, 1))
def test_definition_at_zero(self):
assert self.definition("a", (1, 1)) == []
s = self.definition("str", (1, 1))
assert len(s) == 1
assert list(s)[0].description == 'class str'
assert self.get_def("", (1, 0)) == []
assert self.definition("", (1, 0)) == []
def test_complete_at_zero(self):
s = self.complete("str", (1, 3))
@@ -106,9 +105,9 @@ class TestRegression(TestBase):
s = self.complete("", (1, 0))
assert len(s) > 0
def test_get_definition_on_import(self):
assert self.get_def("import sys_blabla", (1, 8)) == []
assert len(self.get_def("import sys", (1, 8))) == 1
def test_definition_on_import(self):
assert self.definition("import sys_blabla", (1, 8)) == []
assert len(self.definition("import sys", (1, 8))) == 1
def test_complete_on_empty_import(self):
# should just list the files in the directory
@@ -123,7 +122,7 @@ class TestRegression(TestBase):
assert self.complete("from datetime import")[0].word == 'import'
assert self.complete("from datetime import ")
def test_get_in_function_call(self):
def test_function_definition(self):
def check(call_def, name, index):
return call_def and call_def.call_name == name \
and call_def.index == index
@@ -139,54 +138,54 @@ class TestRegression(TestBase):
s7 = "str().upper().center("
s8 = "str(int[zip("
assert check(self.get_in_function_call(s, (1, 4)), 'abs', 0)
assert check(self.get_in_function_call(s, (1, 6)), 'abs', 1)
assert check(self.get_in_function_call(s, (1, 7)), 'abs', 1)
assert check(self.get_in_function_call(s, (1, 8)), 'abs', 1)
assert check(self.get_in_function_call(s, (1, 11)), 'str', 0)
assert check(self.function_definition(s, (1, 4)), 'abs', 0)
assert check(self.function_definition(s, (1, 6)), 'abs', 1)
assert check(self.function_definition(s, (1, 7)), 'abs', 1)
assert check(self.function_definition(s, (1, 8)), 'abs', 1)
assert check(self.function_definition(s, (1, 11)), 'str', 0)
assert check(self.get_in_function_call(s2, (1, 4)), 'abs', 0)
assert self.get_in_function_call(s2, (1, 5)) is None
assert self.get_in_function_call(s2) is None
assert check(self.function_definition(s2, (1, 4)), 'abs', 0)
assert self.function_definition(s2, (1, 5)) is None
assert self.function_definition(s2) is None
assert self.get_in_function_call(s3, (1, 5)) is None
assert self.get_in_function_call(s3) is None
assert self.function_definition(s3, (1, 5)) is None
assert self.function_definition(s3) is None
assert self.get_in_function_call(s4, (1, 3)) is None
assert check(self.get_in_function_call(s4, (1, 4)), 'abs', 0)
assert check(self.get_in_function_call(s4, (1, 8)), 'zip', 0)
assert check(self.get_in_function_call(s4, (1, 9)), 'abs', 0)
assert check(self.get_in_function_call(s4, (1, 10)), 'abs', 1)
assert self.function_definition(s4, (1, 3)) is None
assert check(self.function_definition(s4, (1, 4)), 'abs', 0)
assert check(self.function_definition(s4, (1, 8)), 'zip', 0)
assert check(self.function_definition(s4, (1, 9)), 'abs', 0)
#assert check(self.function_definition(s4, (1, 10)), 'abs', 1)
assert check(self.get_in_function_call(s5, (1, 4)), 'abs', 0)
assert check(self.get_in_function_call(s5, (1, 6)), 'abs', 1)
assert check(self.function_definition(s5, (1, 4)), 'abs', 0)
assert check(self.function_definition(s5, (1, 6)), 'abs', 1)
assert check(self.get_in_function_call(s6), 'center', 0)
assert check(self.get_in_function_call(s6, (1, 4)), 'str', 0)
assert check(self.function_definition(s6), 'center', 0)
assert check(self.function_definition(s6, (1, 4)), 'str', 0)
assert check(self.get_in_function_call(s7), 'center', 0)
assert check(self.get_in_function_call(s8), 'zip', 0)
assert check(self.get_in_function_call(s8, (1, 8)), 'str', 0)
assert check(self.function_definition(s7), 'center', 0)
assert check(self.function_definition(s8), 'zip', 0)
assert check(self.function_definition(s8, (1, 8)), 'str', 0)
s = "import time; abc = time; abc.sleep("
assert check(self.get_in_function_call(s), 'sleep', 0)
assert check(self.function_definition(s), 'sleep', 0)
# jedi-vim #9
s = "with open("
assert check(self.get_in_function_call(s), 'open', 0)
assert check(self.function_definition(s), 'open', 0)
# jedi-vim #11
s1 = "for sorted("
assert check(self.get_in_function_call(s1), 'sorted', 0)
assert check(self.function_definition(s1), 'sorted', 0)
s2 = "for s in sorted("
assert check(self.get_in_function_call(s2), 'sorted', 0)
assert check(self.function_definition(s2), 'sorted', 0)
# jedi #57
s = "def func(alpha, beta): pass\n" \
"func(alpha='101',"
assert check(self.get_in_function_call(s, (2, 13)), 'func', 0)
assert check(self.function_definition(s, (2, 13)), 'func', 0)
def test_get_in_function_call_complex(self):
def test_function_definition_complex(self):
def check(call_def, name, index):
return call_def and call_def.call_name == name \
and call_def.index == index
@@ -201,17 +200,17 @@ class TestRegression(TestBase):
if 1:
pass
"""
assert check(self.get_in_function_call(s, (6, 24)), 'abc', 0)
assert check(self.function_definition(s, (6, 24)), 'abc', 0)
s = """
import re
def huhu(it):
re.compile(
return it * 2
"""
assert check(self.get_in_function_call(s, (4, 31)), 'compile', 0)
assert check(self.function_definition(s, (4, 31)), 'compile', 0)
# jedi-vim #70
s = """def foo("""
assert self.get_in_function_call(s) is None
assert self.function_definition(s) is None
def test_add_dynamic_mods(self):
api.settings.additional_dynamic_modules = ['dynamic.py']
@@ -222,15 +221,15 @@ class TestRegression(TestBase):
# .parser to load the module
api.modules.Module(os.path.abspath('dynamic.py'), src2).parser
script = jedi.Script(src1, 1, len(src1), '../setup.py')
result = script.get_definition()
result = script.definition()
assert len(result) == 1
assert result[0].description == 'class int'
def test_named_import(self):
""" named import - jedi-vim issue #8 """
s = "import time as dt"
assert len(jedi.Script(s, 1, 15, '/').get_definition()) == 1
assert len(jedi.Script(s, 1, 10, '/').get_definition()) == 1
assert len(jedi.Script(s, 1, 15, '/').definition()) == 1
assert len(jedi.Script(s, 1, 10, '/').definition()) == 1
def test_unicode_script(self):
""" normally no unicode objects are being used. (<=2.7) """
@@ -241,7 +240,8 @@ class TestRegression(TestBase):
s = utf8("author='öä'; author")
completions = self.complete(s)
assert type(completions[0].description) is unicode
x = completions[0].description
assert type(x) is unicode
s = utf8("#-*- coding: iso-8859-1 -*-\nauthor='öä'; author")
s = s.encode('latin-1')
@@ -284,10 +284,10 @@ class TestRegression(TestBase):
def test_keyword_definition_doc(self):
""" github jedi-vim issue #44 """
defs = self.get_def("print")
defs = self.definition("print")
assert [d.doc for d in defs]
defs = self.get_def("import")
defs = self.definition("import")
assert len(defs) == 1
assert [d.doc for d in defs]
@@ -334,7 +334,7 @@ class TestFeature(TestBase):
assert self.complete('import os; os.path.join')[0].full_name \
== 'os.path.join'
# issue #94
defs = self.get_def("""import os; os.path.join(""")
defs = self.definition("""import os; os.path.join(""")
assert defs[0].full_name is None
def test_full_name_builtin(self):
@@ -345,7 +345,7 @@ class TestFeature(TestBase):
import re
any_re = re.compile('.*')
any_re"""
self.assertEqual(self.get_def(s)[0].full_name, 're.RegexObject')
self.assertEqual(self.definition(s)[0].full_name, 're.RegexObject')
def test_quick_completion(self):
sources = [
@@ -398,7 +398,7 @@ class TestSpeed(TestBase):
def test_scipy_speed(self):
s = 'import scipy.weave; scipy.weave.inline('
script = jedi.Script(s, 1, len(s), '')
script.get_in_function_call()
script.function_definition()
#print(jedi.imports.imports_processed)
if __name__ == '__main__':

View File

@@ -36,7 +36,7 @@ def run_definition_test(script, should_str, line_nr):
Runs tests for definitions.
Return if the test was a fail or not, with 1 for fail and 0 for success.
"""
result = script.get_definition()
result = script.definition()
is_str = set(r.desc_with_module for r in result)
if is_str != should_str:
print('Solution @%s not right, received %s, wanted %s' \
@@ -120,10 +120,10 @@ def run_test(source, f_name, lines_to_execute):
>>> #? int()
>>> ab = 3; ab
"""
def get_defs(correct, correct_start, path):
def definition(correct, correct_start, path):
def defs(line_nr, indent):
script = jedi.Script(source, line_nr, indent, path)
return set(script.get_definition())
return set(script.definition())
should_be = set()
number = 0
@@ -166,7 +166,7 @@ def run_test(source, f_name, lines_to_execute):
else:
index = len(line) - 1 # -1 for the \n
# if a list is wanted, use the completion test, otherwise the
# get_definition test
# definition test
path = completion_test_dir + os.path.sep + f_name
try:
script = jedi.Script(source, line_nr, index, path)
@@ -177,7 +177,7 @@ def run_test(source, f_name, lines_to_execute):
elif correct.startswith('['):
fails += run_completion_test(script, correct, line_nr)
else:
should_str = get_defs(correct, start, path)
should_str = definition(correct, start, path)
fails += run_definition_test(script, should_str, line_nr)
except Exception:
print(traceback.format_exc())